Commit 5b6ef054 authored by yuguo's avatar yuguo
Browse files
parents 76060570 a7eeb28b
# Copyright (c) 2022-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# See LICENSE for license information.
import datetime
import os
import pathlib
import subprocess
from builtins import str
# Basic project info
project = "Transformer Engine"
author = "NVIDIA CORPORATION & AFFILIATES"
# Copyright statement
release_year = 2022
current_year = datetime.date.today().year
if current_year == release_year:
copyright_year = release_year
else:
copyright_year = str(release_year) + "-" + str(current_year)
copyright = f"{copyright_year}, NVIDIA CORPORATION & AFFILIATES. All rights reserved."
# Transformer Engine root directory
root_path = pathlib.Path(__file__).resolve().parent.parent
# Git hash
git_sha = os.getenv("GIT_SHA")
if not git_sha:
try:
git_sha = (
subprocess.check_output(["git", "log", "--pretty=format:'%h'", "-n1"])
.decode("ascii")
.replace("'", "")
.strip()
)
except:
git_sha = "0000000"
git_sha = git_sha[:7] if len(git_sha) > 7 else git_sha
# Version
with open(root_path / "build_tools" / "VERSION.txt", "r") as f:
_raw_version = f.readline().strip()
if "dev" in _raw_version:
version = str(_raw_version + "-" + git_sha)
else:
version = str(_raw_version)
release = _raw_version
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.mathjax",
"sphinx.ext.napoleon",
"sphinx.ext.ifconfig",
"nbsphinx",
"breathe",
"autoapi.extension",
]
templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
source_suffix = ".rst"
master_doc = "index"
pygments_style = "sphinx"
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = "sphinx_rtd_theme"
html_static_path = ["_static"]
html_show_sphinx = False
html_css_files = [
"css/nvidia_font.css",
"css/nvidia_footer.css",
]
html_theme_options = {
"collapse_navigation": False,
"logo_only": False,
"version_selector": False,
"language_selector": False,
}
napoleon_custom_sections = [
("Parallelism parameters", "params_style"),
("Optimization parameters", "params_style"),
("Values", "params_style"),
("Graphing parameters", "params_style"),
("FP8-related parameters", "params_style"),
]
breathe_projects = {"TransformerEngine": root_path / "docs" / "doxygen" / "xml"}
breathe_default_project = "TransformerEngine"
autoapi_generate_api_docs = False
autoapi_dirs = [root_path / "transformer_engine"]
{
"cells": [
{
"cell_type": "markdown",
"id": "24184f3f",
"metadata": {},
"source": [
"# Performance Optimizations"
]
},
{
"cell_type": "markdown",
"id": "6dcbf25a",
"metadata": {},
"source": [
"This guide is a follow-up to the discussion in the [quickstart guide](quickstart.ipynb). We will focus on techniques to achieve maximum performance when training a basic GPT encoder layer. For convenience, we use some helper functions defined in [quickstart_utils.py](quickstart_utils.py). "
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "2b53dfa7",
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"import transformer_engine.pytorch as te\n",
"from transformer_engine.common.recipe import Format, DelayedScaling\n",
"import quickstart_utils as utils\n",
"\n",
"# Layer configuration\n",
"hidden_size = 4096\n",
"sequence_length = 2048\n",
"batch_size = 4\n",
"ffn_hidden_size = 16384\n",
"num_attention_heads = 32\n",
"dtype = torch.float16\n",
"\n",
"# Synthetic data\n",
"x = torch.rand(sequence_length, batch_size, hidden_size).cuda().to(dtype=dtype)\n",
"dy = torch.rand(sequence_length, batch_size, hidden_size).cuda().to(dtype=dtype)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "b96a9ef6",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 27.82952880859375 ms\n"
]
}
],
"source": [
"# Construct layer\n",
"basic_transformer = te.TransformerLayer(\n",
" hidden_size,\n",
" ffn_hidden_size,\n",
" num_attention_heads,\n",
")\n",
"basic_transformer.to(dtype=dtype).cuda()\n",
"\n",
"fp8_format = Format.HYBRID\n",
"fp8_recipe = DelayedScaling(\n",
" fp8_format=fp8_format,\n",
" amax_history_len=16,\n",
" amax_compute_algo=\"max\",\n",
")\n",
"# Training step\n",
"with te.fp8_autocast(enabled=True, fp8_recipe=fp8_recipe):\n",
" y = basic_transformer(x, attention_mask=None)\n",
"y.backward(dy)\n",
"\n",
"# Measure step time\n",
"utils.speedometer(\n",
" basic_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None },\n",
" fp8_autocast_kwargs = { \"enabled\": True, \"fp8_recipe\": fp8_recipe },\n",
")"
]
},
{
"cell_type": "markdown",
"id": "11367f5b",
"metadata": {},
"source": [
"## Multi-GPU training\n",
"\n",
"<div class=\"alert alert-info\">\n",
"\n",
"<b>Summary</b>\n",
" \n",
"We parallelize a Transformer layer with data, tensor, and sequence parallelism.\n",
"\n",
"</div>\n",
"\n",
"A variety of parallelism strategies can be used to enable multi-GPU training of Transformer models, often based on different approaches to distribute their $\\text{sequence_length} \\times \\text{batch_size} \\times \\text{hidden_size}$ activation tensors. The most common approach is data parallelism, which distributes along the $\\text{batch_size}$ dimension. By storing duplicate copies of the model on each GPU, the forward and backward passes of the training step can be done independently, followed by a gradient synchronization. A more advanced strategy is tensor parallelism, a type of model parallelism that distributes along the $\\text{hidden_size}$ dimension. This allows us to scale past the limits of data parallelism (typically $\\text{hidden_size} > \\text{batch_size}$) and to reduce the per-GPU memory usage (since model parameters are also distributed), but it also incurs the overhead of communicating activation tensors between GPUs at every step. For a more detailed explanation, please see the [Megatron-LM paper](https://arxiv.org/pdf/1909.08053.pdf). Finally, sequence parallelism distributes along the $\\text{sequence_length}$ dimension. This can be used when tensor parallelism is enabled in order to parallelize operations that run outside the tensor-parallel region (e.g. layer norm). For more details, please see [this paper](https://arxiv.org/pdf/2205.05198.pdf).\n",
"\n",
"To show this in action, let's first initialize NCCL with a trivial process group:"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "fca06ec3",
"metadata": {},
"outputs": [],
"source": [
"# Configure parallel groups\n",
"import os\n",
"import torch\n",
"torch.distributed.init_process_group(\n",
" \"nccl\",\n",
" init_method=\"file:///tmp/rdzv\",\n",
" world_size=1,\n",
" rank=0,\n",
")\n",
"world_group = torch.distributed.new_group(ranks=[0], backend=\"nccl\")\n",
"data_parallel_group = torch.distributed.new_group(ranks=[0], backend=\"nccl\")\n",
"tensor_parallel_group = torch.distributed.new_group(ranks=[0], backend=\"nccl\")"
]
},
{
"cell_type": "markdown",
"id": "1f2b80d0",
"metadata": {},
"source": [
"We only initialize with one GPU to keep this example simple. Please consult the documentation [torch.distributed](https://pytorch.org/docs/stable/distributed.html) for guidance on running with multiple GPUs. Note that we require that each distributed process corresponds to exactly one GPU, so we treat them interchangeably. In practice, there are multiple factors that can affect the optimal parallel layout: the system hardware, the network topology, usage of other parallelism schemes like pipeline parallelism. A rough rule-of-thumb is to interpret the GPUs as a 2D grid with dimensions of $\\text{num_nodes} \\times \\text{gpus_per_node}$. The rows are tensor-parallel groups and the columns are data-parallel groups.\n",
"\n",
"Enabling data parallelism with Transformer Engine is similar to enabling data parallelism with standard PyTorch models: simply wrap the modules with [torch.nn.parallel.DistributedDataParallel](https://pytorch.org/docs/stable/generated/torch.nn.parallel.DistributedDataParallel.html). Transformer Engine modules also have native support for tensor and sequence parallelism. If the user provides a process group for tensor parallelism, the modules will distribute the data and perform communication internally. If sequence parallelism is enabled, it will be applied for operations that are not amenable to tensor parallelism and it will use the tensor-parallel process group.\n",
"\n",
"One important consideration for multi-GPU FP8 training is how to synchronize the FP8 scaling factors between GPUs. If tensor parallelism is enabled, the scales must be synchronized over the tensor-parallel group. However, synchronizing over both the data-parallel and tensor-parallel groups is recommended for the best convergence. This can be configured with the **fp8_group** argument in the [fp8_autocast](../api/pytorch.rst#transformer_engine.pytorch.fp8_autocast) context manager."
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "1892cc9d",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 29.09606689453125 ms\n"
]
}
],
"source": [
"# Construct layer\n",
"parallel_transformer = te.TransformerLayer(\n",
" hidden_size,\n",
" ffn_hidden_size,\n",
" num_attention_heads,\n",
" set_parallel_mode=True,\n",
" tp_group=tensor_parallel_group,\n",
" sequence_parallel=True,\n",
")\n",
"parallel_transformer.to(dtype=dtype).cuda()\n",
"parallel_transformer = torch.nn.parallel.DistributedDataParallel(\n",
" parallel_transformer,\n",
" process_group=data_parallel_group,\n",
")\n",
"\n",
"# Training step\n",
"with te.fp8_autocast(enabled=True, fp8_recipe=fp8_recipe, fp8_group=world_group):\n",
" y = parallel_transformer(x, attention_mask=None)\n",
"y.backward(dy)\n",
"\n",
"# Measure step time\n",
"utils.speedometer(\n",
" parallel_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None },\n",
" fp8_autocast_kwargs = {\n",
" \"enabled\": True,\n",
" \"fp8_recipe\": fp8_recipe,\n",
" \"fp8_group\": world_group,\n",
" },\n",
")"
]
},
{
"cell_type": "markdown",
"id": "5f03f6d8",
"metadata": {},
"source": [
"## Gradient accumulation fusion\n",
"\n",
"<div class=\"alert alert-info\">\n",
"\n",
"<b>Summary</b>\n",
" \n",
"We take advantage of the ability of Tensor Cores to accumulate outputs directly into FP32.\n",
"\n",
"</div>\n",
"\n",
"PyTorch's autograd functionality assumes that a model parameter and its corresponding gradient have the same data type. However, while low-precision data types like FP8 are sufficient for evaluating a neural network's forward and backward passes, the optimization step typically requires full FP32 precision to avoid significant learning degradation. In addition, Tensor Cores on Hopper GPUs have the option to accumulate matrix products directly into FP32, resulting in better numerical accuracy and avoiding the need for a separate casting kernel. Thus, Transformer Engine provides an option to directly generate FP32 gradients for weight tensors. The FP32 gradients are not output to the parameter's `grad` tensor, but rather to a `main_grad` tensor that must be initialized before the backward pass."
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "a7f612ec",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 27.510029296875 ms\n"
]
}
],
"source": [
"# Construct layer\n",
"wgrad_transformer = te.TransformerLayer(\n",
" hidden_size,\n",
" ffn_hidden_size,\n",
" num_attention_heads,\n",
" fuse_wgrad_accumulation=True,\n",
" fuse_qkv_params=True, # Required for fuse_wgrad_accumulation\n",
")\n",
"wgrad_transformer.to(dtype=dtype).cuda()\n",
"for param in wgrad_transformer.parameters():\n",
" param.grad = None\n",
" param.main_grad = torch.zeros_like(param, dtype=torch.float32)\n",
"\n",
"# Training step\n",
"with te.fp8_autocast(enabled=True, fp8_recipe=fp8_recipe):\n",
" y = wgrad_transformer(x, attention_mask=None)\n",
"y.backward(dy)\n",
"for param in wgrad_transformer.parameters():\n",
" if param.grad is not None:\n",
" param.main_grad.copy_(param.grad)\n",
" param.grad = None\n",
"\n",
"# Measure step time\n",
"utils.speedometer(\n",
" wgrad_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None },\n",
" fp8_autocast_kwargs = { \"enabled\": True, \"fp8_recipe\": fp8_recipe },\n",
")"
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "add64bd5",
"metadata": {},
"source": [
"## FP8 weight caching\n",
"\n",
"<div class=\"alert alert-info\">\n",
"\n",
"<b>Summary</b>\n",
" \n",
"We avoid redundant FP8 casting when training with multiple gradient accumulation steps.\n",
"\n",
"</div>\n",
"\n",
"Since weights are typically trained in FP32, a type conversion is required before we can perform compute in FP8. By default, the [fp8_autocast](../api/pytorch.rst#transformer_engine.pytorch.fp8_autocast) context manager will handle this internally by casting non-FP8 tensors to FP8 as they are encountered. However, we can improve upon this in some cases. In particular, if our training iteration is split into multiple gradient accumulation steps, each micro-batch will encounter the same weight tensors. Thus, we only need to cast the weights to FP8 in the first gradient accumulation step and we can cache the resulting FP8 weights for the remaining gradient accumulation steps.\n",
"\n",
"<div class=\"alert alert-warning\">\n",
"\n",
"<b>Warning!</b> \n",
"\n",
"The precise numerical outputs with and without the FP8 weight caching optimization may not be bitwise identical. This is because while the weights remain frozen across a gradient accumulation cycle, the scaling factors and amaxes for the FP8 weights can change as they are updated at the end of every iteration. These changes in amax tensors are incorporated into the amax history, which is not frozen.\n",
"\n",
"</div>"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "abbc218e",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 27.262666015625 ms\n"
]
}
],
"source": [
"# Construct layer\n",
"weight_caching_transformer = te.TransformerLayer(\n",
" hidden_size,\n",
" ffn_hidden_size,\n",
" num_attention_heads,\n",
")\n",
"weight_caching_transformer.to(dtype=dtype).cuda()\n",
"\n",
"# Cast weights in first gradient accumulation step\n",
"with te.fp8_autocast(enabled=True, fp8_recipe=fp8_recipe):\n",
" y = weight_caching_transformer(x, attention_mask=None, is_first_microbatch=True)\n",
"y.backward(dy)\n",
"\n",
"# Reuse FP8 weights in subsequent gradient accumulation steps\n",
"with te.fp8_autocast(enabled=True, fp8_recipe=fp8_recipe):\n",
" y = weight_caching_transformer(x, attention_mask=None, is_first_microbatch=False)\n",
"y.backward(dy)\n",
"\n",
"# Measure step time\n",
"utils.speedometer(\n",
" weight_caching_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None, \"is_first_microbatch\": False },\n",
" fp8_autocast_kwargs = { \"enabled\": True, \"fp8_recipe\": fp8_recipe },\n",
")"
]
}
],
"metadata": {
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
# Copyright (c) 2022-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# See LICENSE for license information.
import os
import torch
from typing import Tuple
from tests.pytorch.fused_attn.test_fused_attn import ModelConfig
from transformer_engine.pytorch.attention import DotProductAttention
# Initialize RNG state
seed = 1234
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
_cpu_rng_state = torch.get_rng_state()
_cuda_rng_state = torch.cuda.get_rng_state()
_NVTE_DEBUG = int(os.getenv("NVTE_DEBUG", "0"))
def reset_rng_states() -> None:
"""Revert back to initial RNG state"""
torch.set_rng_state(_cpu_rng_state)
torch.cuda.set_rng_state(_cuda_rng_state)
def _run_dot_product_attention(
dtype: torch.dtype,
config: ModelConfig,
qkv_layout: str,
) -> Tuple[torch.Tensor, Tuple[torch.Tensor, torch.Tensor, torch.Tensor]]:
"""Run DotProductAttention module with one forward pass and one backward pass"""
reset_rng_states()
seqlens_q = torch.full(
[config.batch_size], config.max_seqlen_q, dtype=torch.int32, device="cuda"
)
seqlens_kv = torch.full(
[config.batch_size], config.max_seqlen_kv, dtype=torch.int32, device="cuda"
)
inp = torch.randn(
[config.batch_size, config.max_seqlen_q, 3, config.num_heads, config.head_dim_qk],
dtype=dtype,
device="cuda",
)
q = inp[:, :, 0, :, :]
k = inp[:, :, 1, :, :]
v = inp[:, :, 2, :, :]
q.requires_grad = True
k.requires_grad = True
v.requires_grad = True
out_grad = torch.randn(
[config.batch_size, config.max_seqlen_q, config.num_heads * config.head_dim_v],
dtype=dtype,
device="cuda",
)
# Create attention mask / bias
attention_mask = None
bias = None
if config.attn_mask_type == "arbitrary":
attention_mask = torch.randint(
-10,
10,
[config.batch_size, config.num_heads, config.max_seqlen_q, config.max_seqlen_kv],
).to(dtype=torch.bool, device="cuda")
if config.attn_bias_type == "post_scale_bias":
# convert mask to bias
attention_mask = torch.randint(
-10,
10,
[config.batch_size, config.num_heads, config.max_seqlen_q, config.max_seqlen_kv],
).to(dtype=torch.bool, device="cuda")
bias = attention_mask.clone()
neginf = -(2**50) if dtype == torch.bfloat16 else -(2**15)
bias = torch.where(bias == 0, 0, neginf).to(dtype=dtype, device="cuda")
bias.requires_grad = False
attention_mask = None
block = DotProductAttention(
config.num_heads,
config.head_dim_qk,
num_gqa_groups=config.num_gqa_groups,
qkv_format="bshd",
attention_dropout=config.dropout_p,
sequence_parallel=False,
tp_size=1,
get_rng_state_tracker=None,
tp_group=None,
layer_number=1,
attn_mask_type="no_mask",
window_size=(-1, -1),
).to(dtype=dtype, device="cuda")
# Run a forward and backward pass
out = None
if config.attn_mask_type == "arbitrary":
out = block(
q,
k,
v,
attention_mask=attention_mask, # attention_mask
qkv_format="bshd",
attn_mask_type=config.attn_mask_type, # 'arbitrary'
core_attention_bias_type=config.attn_bias_type, # 'no_bias'
core_attention_bias=bias, # None
window_size=(-1, -1),
)
out.backward(out_grad)
if config.attn_bias_type == "post_scale_bias":
out = block(
q,
k,
v,
attention_mask=attention_mask, # None
qkv_format="bshd",
attn_mask_type=config.attn_mask_type, # no_mask
core_attention_bias_type=config.attn_bias_type, # 'post_scale_bias'
core_attention_bias=bias, # bias
window_size=(-1, -1),
)
out.backward(out_grad)
return out, (q.grad, k.grad, v.grad)
dtype = torch.bfloat16
model_configs = {
# test: b, h, hg, d, sq, skv, p, mask, bias
"test_mask": ModelConfig(4, 16, 16, 64, 2048, 2048, 0.0, "arbitrary", "no_bias"),
"test_bias": ModelConfig(4, 16, 16, 64, 2048, 2048, 0.0, "no_mask", "post_scale_bias"),
}
print("Run with post_scale_bias:")
config = model_configs["test_bias"]
fused_attn_fwd, fused_attn_bwd = _run_dot_product_attention(dtype, config, "bs3hd")
print()
print("Run with arbitrary mask:")
config = model_configs["test_mask"]
unfused_attn_fwd, unfused_attn_bwd = _run_dot_product_attention(dtype, config, "bs3hd")
torch.testing.assert_close(unfused_attn_fwd, fused_attn_fwd, atol=2.5e-2, rtol=2.5e-2)
for i in range(3):
torch.testing.assert_close(unfused_attn_bwd[i], fused_attn_bwd[i], atol=2.5e-2, rtol=2.5e-2)
print()
print("Test passed!")
{
"cells": [
{
"cell_type": "markdown",
"id": "040f466a",
"metadata": {},
"source": [
"# Attention Is All You Need!\n",
"\n",
"The core idea behind Transformer models is the attention mechanism [[1]](https://arxiv.org/abs/1706.03762). It identifies the correlation between words, selects the most important parts of the sentence to focus on, and captures meaningful patterns and dependencies in the data. Figure 1 shows a typical attention mechanism, where pre-softmax operations can be a combination of scaling, bias and masking while the post-softmax operation is often just dropout.\n",
"\n",
"<figure align=\"center\">\n",
"<img src=\"dot_product_attention.png\" width=\"70%\">\n",
"<figcaption> Figure 1: Dot product attention. </figcaption>\n",
"</figure>\n",
"\n",
"[Transformer Engine](https://github.com/NVIDIA/TransformerEngine.git) supports the calculation of dot product attention in two frameworks, [PyTorch](https://github.com/pytorch/pytorch) and [JAX](https://github.com/google/jax). The API for each framework is\n",
"\n",
"- [transformer_engine.pytorch.DotProductAttention](../../api/pytorch.rst#transformer_engine.pytorch.DotProductAttention)\n",
"- [transformer_engine.jax.flax.DotProductAttention](../../api/jax.rst#transformer_engine.jax.flax.DotProductAttention)"
]
},
{
"cell_type": "markdown",
"id": "89a7d849",
"metadata": {},
"source": [
"## 1. Attention Backends\n",
"\n",
"Transformer Engine provides multiple attention backends for each supported framework. The framework-native backends provide a robust baseline, while the fused, GPU-optimized implementations offer more performance. For example, the flash-attention and cuDNN attention backends in PyTorch. The framework-native backends are often named with \"unfused\", while the more optimized backends are \"fused\" or \"flash\".\n",
"<table class=\"docutils align-default\">\n",
" <tr>\n",
" <th>Framework</th>\n",
" <th>Backend (Module Name)</th>\n",
" <th>Module Location</th>\n",
" </tr>\n",
" <tr>\n",
" <td rowspan=\"3\">PyTorch</td>\n",
" <td>cuDNN attention (`FusedAttention`)</td>\n",
" <td rowspan=\"3\"> [transformer_engine.pytorch.attention](https://github.com/NVIDIA/TransformerEngine/blob/main/transformer_engine/pytorch/attention.py)</td>\n",
" </tr>\n",
" <tr>\n",
" <td> flash-attention (`FlashAttention`)</td>\n",
" </tr>\n",
" <tr>\n",
" <td>\n",
" PyTorch-native attention (`UnfusedDotProductAttention`)\n",
" </td> \n",
" </tr>\n",
" <tr>\n",
" <td rowspan=\"2\">JAX</td>\n",
" <td>cuDNN attention (`_FusedDotProductAttention`)</td>\n",
" <td rowspan=\"2\">[transformer_engine.jax.flax.transformer](https://github.com/NVIDIA/TransformerEngine/blob/main/transformer_engine/jax/flax/transformer.py)</td>\n",
" </tr>\n",
" <tr>\n",
" <td>JAX-native attention (`_UnfusedDotProductAttention`)</td>\n",
" </tr>\n",
" \n",
"</table>"
]
},
{
"cell_type": "markdown",
"id": "c90a2573",
"metadata": {},
"source": [
"### 1.1 Flash vs. Non-Flash\n",
"\n",
"The attention calculation has quadratic computational and memory complexities to the sequence length. Its runtime and memory requirements quadruple, when the sequence length doubles. This presents a significant challenge to scale Transformer models up for longer contexts, in order to achieve higher model quality.\n",
"\n",
"Compared to the standard, non-flash algorithm, the flash algorithm [[2]](https://arxiv.org/abs/2205.14135) was proposed to reduce the memory scaling to linear and improve the computational efficiency through optimized memory accesses. It employs the following two distinctive techniques.\n",
"\n",
"- **Tiling:** The non-flash algorithm tries to process the query, key, value tensors in one single step, requiring large amounts of global memory and incurring high volumes of reads/writes between global memory and shared memory. The flash algorithm decomposes the input into several tiles, based on the available shared memory and register size, and it computes the softmax one tile at a time.\n",
"\n",
"- **Recomputation:** The non-flash algorithm stores the softmax matrix (quadratic to sequence length) to global memory for the backward pass, while the flash algorithm only saves the softmax normalization factors (linear to sequence length). This reduces the amount of memory required as well as the bandwidth utilization between global memory and shared memory. Even though there is extra computation incurred in order to recalculate the attention in the backward pass, the bandwidth savings still provide significant improvement in efficiency.\n",
"\n",
"<div class=\"alert alert-info\">\n",
"<b>Note:</b> \n",
" \n",
"Transformer Engine's flash-attention backend, available in PyTorch, and cuDNN attention backend (sub-backends 1 and 2), available in PyTorch and JAX, are both based on the flash algorithm.\n",
"</div>\n"
]
},
{
"cell_type": "markdown",
"id": "b5ce567d",
"metadata": {},
"source": [
"### 1.2 flash-attention\n",
"\n",
"The flash-attention backend, available only in PyTorch, is a module wrapped around the public `flash-attn` package [[3]](https://github.com/Dao-AILab/flash-attention). \n",
"\n",
"The flash-attention backend supports `flash-attn`'s features as well as a few extra functionalities to facilitate the use of `flash-attn`, such as converting the `attention_mask` to cumulative sequence lengths `cu_seqlens` for `padding` mask use cases. Please see `transformer_engine.pytorch.attention.FlashAttention` for details.\n",
"\n",
"The `flash-attn` dependency is regularly updated in Transformer Engine. As of v2.0, Transformer Engine supports `flash-attn` 2.0.6+ (see [setup.py](https://github.com/NVIDIA/TransformerEngine/blob/main/setup.py)).\n",
"\n",
"To understand `flash-attn`'s performance, please refer to their benchmarks [here](https://github.com/Dao-AILab/flash-attention?tab=readme-ov-file#performance).\n",
"\n",
"### 1.3 cuDNN Attention\n",
"\n",
"The cuDNN attention backend, available in PyTorch and JAX, offers another high-performance solution to the attention calculation. It requires [cuDNN](https://developer.nvidia.com/cudnn) to run, and has several sub-backends to support the different precisions and sequence lengths.\n",
"\n",
"<table class=\"docutils align-default\">\n",
" <tr>\n",
" <th>Sub-Backend</th>\n",
" <th>Algorithm</th>\n",
" <th>Precision</th>\n",
" <th>Sequence Length</th>\n",
" <th>Architecture</th>\n",
" <th>Additional info</th>\n",
" </tr>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>Non-Flash</td>\n",
" <td>BF16/FP16</td>\n",
" <td> &le;512 </td>\n",
" <td> sm80, 90 </td>\n",
" <td> [cuDNN](https://docs.nvidia.com/deeplearning/cudnn/latest/developer/graph-api.html#fused-attention-fprop)</td> \n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>Flash</td>\n",
" <td>BF16/FP16</td>\n",
" <td> Any </td>\n",
" <td> sm80+ </td>\n",
" <td> [cuDNN](https://docs.nvidia.com/deeplearning/cudnn/latest/developer/graph-api.html#fused-flash-attention-fprop),\n",
" [cudnn-frontend](https://github.com/NVIDIA/cudnn-frontend/blob/main/docs/operations/Attention.md#scaled-dot-product-attention)\n",
" </td>\n",
" </tr>\n",
" <tr>\n",
" <td rowspan=\"2\">2</td>\n",
" <td rowspan=\"2\">Flash</td>\n",
" <td rowspan=\"2\">FP8</td>\n",
" <td> cuDNN pre-9.0: &le;512 </td>\n",
" <td>cuDNN pre-9.0: sm90</td>\n",
" <td></td>\n",
" </tr>\n",
" <tr>\n",
" <td> cuDNN 9.0+: Any</td>\n",
" <td> cuDNN 9.0+: sm90+ </td>\n",
" <td> cuDNN 9.0+: [cudnn-frontend](https://github.com/NVIDIA/cudnn-frontend/blob/main/docs/operations/Attention.md#scaled-dot-product-attention-fp8)\n",
" </td> \n",
" </tr>\n",
"</table>\n",
"\n",
"The cuDNN attention backend and flash-attention backend have several notable differences. As of Transformer Engine 2.0, cuDNN 9.3 and `flash-attn` 2.4.2,\n",
"\n",
"- flash-attention only supports the PyTorch framework while cuDNN attention supports PyTorch and JAX.\n",
"- flash-attention supports BF16, FP16 precisions while cuDNN attention also supports FP8 (through its sub-backend 2).\n",
"- flash-attention supports `bshd`, `thd` input formats, without any transposes, and `sbhd` format, with transposes, while cuDNN attention supports all three formats without transposes (see Section 3.1 for more details).\n",
"- flash-attention does not support `post_scale_bias`, and cuDNN attention does.\n",
"- flash-attention supports KV-caching and paged attention, and cuDNN attention does not.\n",
"- flash-attention uses bottom right diagonal for `causal` mask in cross attention (see [change log](https://github.com/Dao-AILab/flash-attention?tab=readme-ov-file#21-change-behavior-of-causal-flag)), and cuDNN attention supports both top left and bottom right.\n",
"- flash-attention outperforms cuDNN attention on Ampere architectures, and cuDNN attention has 20-50% advantages on Hopper architectures, based on our benchmarks for a number of commonly-used model configurations.\n",
"\n",
"To compare cuDNN attention and flash-attention, users can modify the `model_configs` dictionary in [benchmarks/attention/benchmark_attention.py](https://github.com/NVIDIA/TransformerEngine/blob/main/benchmarks/attention/benchmark_attention.py) to collect performance numbers. The script runs each entry in `model_configs` for `num_iters` times, each time with one forward pass and one backward pass. Both backends are tried, and if one backend does not have support for the specific user input, the runtimes and speedups in the final table would be 0."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c5b8e3d7",
"metadata": {},
"outputs": [],
"source": [
"model_configs = {\n",
" # test: b, h, hg, d, sq, skv, p, mask, bias\n",
" \"test_0\": ModelConfig(2, 16, 16, 64, 512, 512, 0.0, \"no_mask\", \"no_bias\"), # short seq\n",
" \"test_1\": ModelConfig(2, 16, 16, 128, 2048, 2048, 0.0, \"causal\", \"no_bias\"), # longer seq, mask\n",
" \"test_2\": ModelConfig(2, 16, 16, 128, 2048, 2048, 0.0, \"causal\", \"post_scale_bias\"), # bias\n",
" \"test_3\": ModelConfig(2, 32, 4, 128, 8192, 8192, 0.0, \"causal\", \"no_bias\"), # GQA\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "50852cb5",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Device 0: NVIDIA H100 80GB HBM3 GPU, sm90 compute capability, 79.1GB memory\n",
"Running test_0 with cuDNN attention and flash-attention...\n",
"Running test_1 with cuDNN attention and flash-attention...\n",
"Running test_2 with cuDNN attention...\n",
"Running test_3 with cuDNN attention and flash-attention...\n",
"\n",
" cuDNN fwd+bwd (ms) flash-attn fwd+bwd (ms) cuDNN vs flash speedup\n",
"test_0 0.0340 0.0468 1.3786\n",
"test_1 0.3664 0.5850 1.5968\n",
"test_2 0.9332 0.0000 0.0000\n",
"test_3 7.4875 11.8879 1.5877\n"
]
}
],
"source": [
"!cd ../../../benchmarks/attention/ && python benchmark_attention.py"
]
},
{
"cell_type": "markdown",
"id": "9a615119",
"metadata": {},
"source": [
"## 2. Backend Selection\n",
"\n",
"Given the various attention backends, Transformer Engine has a selection logic in place to choose the most appropriate backend for a particular set of user inputs and runtime environment. The selection logic is based on both backend availability and backend performance.\n",
"\n",
"Backend availability is determined by factors such as model configuration, training hyper-parameters, software versions, and the GPU architecture in question. For example, some considerations are the sequence length, number of attention heads, head size, attention mask type, attention bias type, training or inference mode, self or cross attention, MHA or MQA/GQA, `flash-attn`/cuDNN library versions, and the compute capability of the GPU.\n",
"\n",
"When there are multiple backends available, Transformer Engine makes backend selection based on performance. In general, there are a few rules being followed in our selection logic (see table below). As we monitor the performance of different backends, the selection logic may change.\n",
"\n",
"<table class=\"docutils align-default\">\n",
" <tr>\n",
" <th>Framework</th>\n",
" <th>Selection Order</th>\n",
" </tr>\n",
" <tr>\n",
" <td rowspan=\"3\">PyTorch</td>\n",
" <td>sm90: cuDNN attention > flash-attention > PyTorch-native attention</td>\n",
" </tr>\n",
" <tr>\n",
" <td> sm80: flash-attention > cuDNN attention > PyTorch-native attention</td>\n",
" </tr>\n",
" <tr>\n",
" <td>\n",
" cuDNN attention: sub-backend 1 > sub-backend 0\n",
" </td> \n",
" </tr>\n",
" <tr>\n",
" <td>JAX</td>\n",
" <td>cuDNN attention > JAX-native attention</td>\n",
" </tr>\n",
"</table>"
]
},
{
"cell_type": "markdown",
"id": "e6c0f3f0",
"metadata": {},
"source": [
"### 2.1 Debug Information\n",
"\n",
"To find out which backend is being used during runtime, we have the following two debugging flags. Logging is done by using the `logging` package.\n",
"```\n",
"NVTE_DEBUG = 0/1 # disables/enables debugging\n",
"NVTE_DEBUG_LEVEL = 0/1/2 # enables logging.WARNING/INFO/DEBUG-level messages\n",
"```\n",
"<div class=\"alert alert-info\">\n",
"<b>Note:</b>\n",
" \n",
"These flags are supported in PyTorch only as of Transformer Engine 2.0. JAX support is expected to be added in the future.\n",
"</div>"
]
},
{
"cell_type": "markdown",
"id": "16660323",
"metadata": {},
"source": [
"The example script [example_attention.py](https://raw.githubusercontent.com/NVIDIA/TransformerEngine/main/docs/examples/attention/example_attention.py) runs a very basic model with two attention backends, cuDNN attention and flash-attention. Here `NVTE_DEBUG_LEVEL=1` allows us to find out which backend/sub-backend is used in runtime."
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "906b8cf1",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Run cuDNN attention...\n",
"[INFO | DotProductAttention]: Running with FusedAttention backend (sub-backend 1)\n",
"\n",
"Run flash-attention...\n",
"[INFO | DotProductAttention]: Running with FlashAttention backend\n",
"\n",
"Test passed.\n"
]
}
],
"source": [
"!NVTE_DEBUG=1 NVTE_DEBUG_LEVEL=1 python example_attention.py"
]
},
{
"cell_type": "markdown",
"id": "8ca99461",
"metadata": {},
"source": [
"`NVTE_DEBUG_LEVEL=2` allows us to find out more about the backend selection logic. Users are encouraged to double check the `config` and provide it to the Transformer Engine team if they would like to file a bug. "
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "d3637094",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Run cuDNN attention...\n",
"[DEBUG | DotProductAttention]: Running with config={'transformer_engine_version': '1.10.0.dev0+ee85a91', 'compute_capability': 'sm90', 'flash_attn_version': <Version('2.4.2')>, 'cudnn_version': '9.3.0', 'qkv_type': <class 'torch.Tensor'>, 'qkv_dtype': torch.bfloat16, 'qkv_layout': 'bshd_bshd_bshd', 'batch_size': 2, 'num_heads': 16, 'num_gqa_groups': 16, 'max_seqlen_q': 512, 'max_seqlen_kv': 512, 'head_dim_qk': 64, 'head_dim_v': 64, 'attn_mask_type': 'no_mask', 'window_size': (-1, -1), 'alibi_slopes_shape': None, 'core_attention_bias_type': 'no_bias', 'core_attention_bias_shape': None, 'core_attention_bias_requires_grad': False, 'pad_between_seqs': False, 'attention_dropout': 0.0, 'context_parallel': False, 'deterministic': False, 'is_training': True, 'fp8': False, 'fp8_meta': {'fp8_checkpoint': False, 'fp8_group': None, 'recipe': margin=0, format=HYBRID, amax_history_len=1024, wgrad_override=False, fp8_dpa=False, fp8_mha=False}}\n",
"[DEBUG | DotProductAttention]: Disabling FlashAttention due to NVTE_FLASH_ATTN=0\n",
"[DEBUG | DotProductAttention]: Available backends = {FlashAttention=False, FusedAttention=True (sub-backend 1), UnfusedDotProductAttention=True}\n",
"[DEBUG | DotProductAttention]: Selected backend = FusedAttention (sub-backend 1)\n",
"[INFO | DotProductAttention]: Running with FusedAttention backend (sub-backend 1)\n",
"\n",
"Run flash-attention...\n",
"[DEBUG | DotProductAttention]: Running with config={'transformer_engine_version': '1.10.0.dev0+ee85a91', 'compute_capability': 'sm90', 'flash_attn_version': <Version('2.4.2')>, 'cudnn_version': '9.3.0', 'qkv_type': <class 'torch.Tensor'>, 'qkv_dtype': torch.bfloat16, 'qkv_layout': 'bshd_bshd_bshd', 'batch_size': 2, 'num_heads': 16, 'num_gqa_groups': 16, 'max_seqlen_q': 512, 'max_seqlen_kv': 512, 'head_dim_qk': 64, 'head_dim_v': 64, 'attn_mask_type': 'no_mask', 'window_size': (-1, -1), 'alibi_slopes_shape': None, 'core_attention_bias_type': 'no_bias', 'core_attention_bias_shape': None, 'core_attention_bias_requires_grad': False, 'pad_between_seqs': False, 'attention_dropout': 0.0, 'context_parallel': False, 'deterministic': False, 'is_training': True, 'fp8': False, 'fp8_meta': {'fp8_checkpoint': False, 'fp8_group': None, 'recipe': margin=0, format=HYBRID, amax_history_len=1024, wgrad_override=False, fp8_dpa=False, fp8_mha=False}}\n",
"[DEBUG | DotProductAttention]: Disabling FusedAttention due to NVTE_FUSED_ATTN=0\n",
"[DEBUG | DotProductAttention]: Available backends = {FlashAttention=True, FusedAttention=False, UnfusedDotProductAttention=True}\n",
"[DEBUG | DotProductAttention]: Selected backend = FlashAttention\n",
"[INFO | DotProductAttention]: Running with FlashAttention backend\n",
"\n",
"Test passed.\n"
]
}
],
"source": [
"!NVTE_DEBUG=1 NVTE_DEBUG_LEVEL=2 python example_attention.py"
]
},
{
"cell_type": "markdown",
"id": "611d8fdb",
"metadata": {},
"source": [
"### 2.2 User Control\n",
"\n",
"Users usually do not need to worry about the backend selection. However, if there is a convergence or performance issue encountered, Transformer Engine provides a few other environment variables for users to experiment with different backends.\n",
"\n",
"**flash-attention or cuDNN attention:**\n",
"Users can enable/disable the flash-attention backend or cuDNN attention backend via the following two environment variables in PyTorch.\n",
"```\n",
"NVTE_FLASH_ATTN = 0 # disables flash-attention; default = 1\n",
"NVTE_FUSED_ATTN = 0 # disables cuDNN attention; default = 1\n",
"```\n",
"\n",
"**cuDNN attention sub-backends:**\n",
"This environment variable allows users to express their preference of cuDNN attention sub-backends. However, the elected sub-backend will only be used *if* it is eligible, i.e. if it has support for the provided inputs and runtime environment.\n",
"```\n",
"NVTE_FUSED_ATTN_BACKEND = 0/1/2 # user preference of cuDNN sub-backend\n",
"```\n",
"\n",
"**Execution paths of cuDNN sub-backend 1:**\n",
"cuDNN attention sub-backend 1 also offers two execution paths: workspace optimization path and non-workspace optimization path. The workspace optimization path requires a larger amount of global memory, provides determinism, and offers bias gradient support. Before cuDNN 9.0, it also has 20-30% performance advantage over the non-workspace optimization path. But after cuDNN 9.0, it is 20-30% slower than the non-workspace optimization path.\n",
"\n",
"Users can experiment with these two paths through the following environment variable. However, please be aware of the possible Out-Of-Memory risks.\n",
"```\n",
"Before cuDNN 9.0:\n",
" NVTE_FUSED_ATTN_FORCE_WORKSPACE_OPT = 0 # disables workspace optimization path\n",
" NVTE_FUSED_ATTN_FORCE_WORKSPACE_OPT = 1 # enables workspace optimization path\n",
"\n",
"After cuDNN 9.0:\n",
" NVTE_ALLOW_NONDETERMINISTIC_ALGO = 1 # disables workspace optimization path\n",
" NVTE_ALLOW_NONDETERMINISTIC_ALGO = 0 # enables workspace optimization path\n",
"```\n",
"<div class=\"alert alert-info\">\n",
"<b>Note</b>\n",
" \n",
"Environment variables <code>NVTE_FLASH_ATTN</code>, <code>NVTE_FUSED_ATTN</code>, <code>NVTE_FUSED_ATTN_FORCE_WORKSPACE_OPT</code> and <code>NVTE_ALLOW_NONDETERMINISTIC_ALGO</code> are only supported in PyTorch, and will be added to JAX in the future.\n",
"</div>\n",
"\n",
"### 2.3 Example Tests\n",
"\n",
"Our [unit tests](https://github.com/NVIDIA/TransformerEngine/tree/main/tests) demonstrate the use of Transformer Engine dot product attention APIs. Users are encouraged to use them as a template when integrating Transformer Engine to their ML workflows.\n",
"\n",
"For example, in PyTorch, [test_dot_product_attention](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py) offers a variety of use cases of `pytorch.DotProductAttention`, from data types, model configs, checkpointing, to QKV layouts."
]
},
{
"cell_type": "markdown",
"id": "e60a2a3e",
"metadata": {},
"source": [
"## 3. Backend Support\n",
"\n",
"Transformer Engine supports commonly-used features such as self and cross attention, FP16/BF16 precisions, dropout, and checkpointing. But it also offers a range of other features. As of v2.0, Transformer Engine's attention backends have the following support matrix.\n",
"\n",
"| Attention Backend | Precision | Architecture | Sliding Window Attention | MQA/GQA | Multi-Latent Attention | Context Parallelism | Determinism Possible |\n",
"| :---------------- | :-------- | :----------- | :----------------------- | :------ | :--------------------- | :------------------ | :------------ |\n",
"| cuDNN attention (all frameworks) | BF16, FP16, FP8 (PyTorch only) | sm80+ | No | Yes | Yes | Yes (`bshd`,`sbhd`, `thd`) | Yes |\n",
"| flash-attention (PyTorch) | BF16, FP16 | sm80+ | Yes | Yes | No | Yes (`bshd`,`thd`) | Yes |\n",
"| Framework-native attention | BF16, FP16, FP32 | Any | No, unless used as a mask | Yes | Yes (PyTorch only) | No | Yes |\n",
"\n",
"Some unit tests are provided to serve as a starting point for integrating such features into users' models. For example,\n",
"- sliding window attention: [test_dpa_swa](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py)\n",
"- MQA/GQA: [test_te_layer_mqa_gqa](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py)\n",
"- Multi-Latent Attention: [test_dpa_mla](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py)\n",
"- context parallelism: [test_cp_with_fused_attention](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn_with_cp.py), [test_cp_with_flash_attention](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn_with_cp.py)"
]
},
{
"cell_type": "markdown",
"id": "fbdcb327",
"metadata": {},
"source": [
"### 3.1 QKV Layout\n",
"\n",
"Transformer Engine supports various layouts of the query `q`, key `k`, value `v` tensors. It has defined 15 QKV layouts, which are grouped into 3 QKV formats and 5 QKV layout groups to help with similar memory/computational operations across different layouts. The mapping relationships of these layouts and groups are,\n",
"\n",
"| `qkv_layout` &nbsp; &nbsp; &nbsp; &nbsp; | `qkv_layout_group`=`3hd` | `h3d` | `hd_2hd` | `hd_h2d` | `hd_hd_hd` |\n",
"| ----------: | -----------: | -----: | ----------: | ----------: | -------------: |\n",
"| `qkv_format`=`sbhd` | `sb3hd` | `sbh3d` | `sbhd_sb2hd` | `sbhd_sbh2d` | `sbhd_sbhd_sbhd` |\n",
"| `bshd` | `bs3hd` | `bsh3d` | `bshd_bs2hd` | `bshd_bsh2d` | `bshd_bshd_bshd` |\n",
"| `thd` | `t3hd` | `th3d` | `thd_t2hd` | `thd_th2d` | `thd_thd_thd` |\n",
"\n",
"The notation system is that `b` stands for the batch size, `s` sequence length, `h` number of attention heads, `d` head dimension, and `t` the total number of tokens in the batch, i.e. `t = sum(s_i) for i in 0,...,b-1`. Here are a few examples of the layouts and their explanations to help clarify the definition.\n",
"\n",
"**qkv_layout=sb3hd:**\n",
"`q`, `k`, `v` are sequence first, i.e. `s` is the leading dimension in each tensor. They are different slices of one tensor `qkv`: `q, k, v = [qkv[:,:,i,:,:] for i in range(3)]`. They are interleaved at the `h * d` dimension.\n",
"\n",
"**qkv_layout=bshd_bsh2d:**\n",
"`q`, `k`, `v` are batch first, i.e. `b` is the leading dimension in each tensor. `q` is contiguous, and `k`, `v` are different slices of tensor `kv`: `k, v = [kv[:,:,:,i,:] for i in range(2)]`. `k`, `v` are interleaved at the `d` dimension.\n",
"\n",
"The `s` and `h` in `bsh2d` are the max sequence length and number of heads for `k`, `v`, which can be different from the `s` and `h` in `bshd` for `q`. We denoted them as the same for brevity reasons. Transformer Engine does differentiate their values for actual execution.\n",
"\n",
"**qkv_layout=thd_thd_thd:**\n",
"`q`, `k`, `v` have variable sequence lengths in a batch. They are all contiguous and have no interleaving.\n",
"\n",
"As of v2.0, Transformer Engine has the following support matrix.\n",
"\n",
"<table class=\"docutils align-default\">\n",
" <tr>\n",
" <th>Backend</th>\n",
" <th>Supported QKV Formats</th>\n",
" <th>Notes</th>\n",
" </tr>\n",
" <tr>\n",
" <td>flash-attention</td>\n",
" <td>`bshd`, `sbhd`, `thd`</td>\n",
" <td>PyTorch: 3 formats, i.e. 15 layouts</td>\n",
" </tr>\n",
" <tr>\n",
" <td rowspan=\"2\">cuDNN attention</td>\n",
" <td rowspan=\"2\">`bshd`, `sbhd`, `thd`</td>\n",
" <td>PyTorch: 3 formats, i.e. 15 layouts</td>\n",
" </tr>\n",
" <tr>\n",
" <td>\n",
" JAX: `bs3hd`, `bshd_bs2hd`, `bshd_bshd_bshd` layouts\n",
" </td> \n",
" </tr>\n",
" <tr>\n",
" <td>Framework-native attention</td>\n",
" <td>`bshd`, `sbhd`</td>\n",
" <td>PyTorch, JAX: 2 formats, i.e. 10 layouts</td>\n",
" </tr>\n",
"</table>\n",
"\n",
"Some example usage of the different layouts can be found at [test_dpa_qkv_layout](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py) and [test_dpa_qkv_layout_thd](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py). Transformer Engine also provides a utility function [transformer_engine.pytorch.dot_product_attention.utils.get_qkv_layout](https://github.com/NVIDIA/TransformerEngine/blob/main/transformer_engine/pytorch/attention.py) to help determine which layout a set of `q`, `k`, `v` tensors have (PyTorch only).\n",
"\n",
"<div class=\"alert alert-info\">\n",
"<b>Note</b>\n",
" \n",
"When RoPE is employed, the <code>qkv_layout</code> may change in Transformer Engine PyTorch through [get_qkv_layout](https://github.com/NVIDIA/TransformerEngine/blob/main/transformer_engine/pytorch/attention.py). This is due to the in-place nature of our RoPE implementations. We convert `q`, `k`, `v` tensors from their initial layout to the corresponding <code>hd_hd_hd</code> layout. For example, from <code>sbh3d</code> in <code>pytorch.MultiHeadAttention</code> before RoPE, to <code>sbhd_sbhd_sbhd</code> in <code>pytorch.DotProductAttention</code> after RoPE.\n",
"</div>\n"
]
},
{
"cell_type": "markdown",
"id": "855d9616",
"metadata": {},
"source": [
"### 3.2 Attention Mask\n",
"\n",
"Transformer Engine supports 7 mask types, and all the masks are defined as `True` masking out the corresponding element and `False` including the corresponding element in attention calculation.\n",
"\n",
"- `no_mask`, `padding`, `causal`, `causal_bottom_right`, `padding_causal`, `padding_causal_bottom_right`, `arbitrary`\n",
"\n",
"Different backends offer different support for attention mask. As of Transformer Engine 2.0,\n",
"\n",
"<table class=\"docutils align-default\">\n",
" <tr>\n",
" <th>Backend</th>\n",
" <th>Supported Mask Types</th>\n",
" <th>Requires `attention_mask`</th>\n",
" </tr>\n",
" <tr>\n",
" <td>flash-attention</td>\n",
" <td><li>`no_mask`, `causal` (self-attention),</li><li>`padding`, `padding_causal` (self-attention),</li><li>`causal_bottom_right`, `padding_causal_bottom_right`</li></td>\n",
" <td rowspan=\"3\"><li>`no_mask`, `causal` `causal_bottom_right`: No</li><li>`padding`, `padding_causal`, `padding_causal_bottom_right`: Yes if `cu_seqlens` not provided</li><li>`arbitrary`: Yes</li></td>\n",
" </tr>\n",
" <tr>\n",
" <td>cuDNN attention</td>\n",
" <td><li>`no_mask`, `causal`,</li><li>`padding`, `padding_causal`,</li><li>`causal_bottom_right`, `padding_causal_bottom_right`</li></td>\n",
" <td></td>\n",
" </tr>\n",
" <tr>\n",
" <td>Framework-native attention</td>\n",
" <td><li>All (PyTorch)</li><li>`no_mask`, `causal`, `padding` (Jax)</li></td>\n",
" </tr>\n",
" <tr>\n",
" <td></td>\n",
" </tr>\n",
"</table>\n",
"\n",
"**Padding masks:** For `padding`, `padding_causal`, `padding_causal_bottom_right` mask types, users need to provide sequence length information to help Transformer Engine figure out where each sequence ends in a batch. As of Transformer Engine 2.0, there are two options to do so in PyTorch and one in JAX.\n",
"\n",
"* PyTorch: When both options are provided by the user, `cu_seqlens` is preferred as there is no extra conversion needed.\n",
" - `cu_seqlens`: Users can provide cumulative sequence length tensors `cu_seqlens_q` and `cu_seqlens_kv` for `q` and `k`/`v` to the flash-attention or cuDNN attention backend. An example of `cu_seqlens` is `[0, 2, 6, 7]` for a batch of 3 `[aa000, bbbb0, c0000]`.\n",
" - `attention_mask`: Users can also provide `attention_mask` as an alternative, which will then be converted to `cu_seqlens`. For self-attention, `attention_mask` should be one single tensor in shape `[batch_size, 1, 1, seqlen_q]`, and for cross-attention, `attention_mask` should be a list of two tensors in shapes `[batch_size, 1, 1, seqlen_q]` and `[batch_size, 1, 1, seqlen_kv]`, respectively.\n",
"\n",
"\n",
"* JAX: Users should provide the `attention_mask` tensor in shape `[batch_size, 1, seqlen_q, seqlen_kv]`.\n",
"\n",
"**qkv_format=thd:** Transformer Engine extracts the max sequence length information from `q`, `k`, `v` if `max_seqlen_q` and `max_seqlen_kv` are not provided. This requires GPU-CPU copy and synchronization operations. For performance reasons, please set `max_seqlen_q` and `max_seqlen_kv` to their appropriate values for `thd` QKV format.\n",
"\n",
"**Arbitrary mask:** cuDNN does not support `Arbitrary` mask type as of v9.3. However, users can convert the mask to a regular `post_scale_bias` bias and achieve the same functionality. An example script for this conversion is [arbitrary_mask_to_post_scale_bias.py](https://raw.githubusercontent.com/NVIDIA/TransformerEngine/main/docs/examples/attention/arbitrary_mask_to_post_scale_bias.py).\n"
]
},
{
"cell_type": "code",
"execution_count": 33,
"id": "a1f25a9b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Run with post_scale_bias:\n",
"[INFO | DotProductAttention]: Running with FusedAttention backend (sub-backend 1)\n",
"\n",
"Run with arbitrary mask:\n",
"[INFO | DotProductAttention]: Running with UnfusedDotProductAttention backend\n",
"\n",
"Test passed!\n"
]
}
],
"source": [
"!NVTE_DEBUG=1 NVTE_DEBUG_LEVEL=1 python arbitrary_mask_to_post_scale_bias.py"
]
},
{
"cell_type": "markdown",
"id": "dda4a589",
"metadata": {},
"source": [
"Some more examples of running Transformer Engine with different attention masks can be found at [test_dpa_mask](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py).\n",
"\n",
"### 3.3 Attention Bias\n",
"\n",
"Transformer Engine supports 4 attention bias types, `no_bias`, `pre_scale_bias`, `post_scale_bias`, and `ALiBi` (with/without custom slopes). As of Transformer Engine 2.0, their support matrix is as follows.\n",
"\n",
"<table class=\"docutils align-default\">\n",
" <tr>\n",
" <th>Backend</th>\n",
" <th>Bias Type</th>\n",
" <th>Bias Shape</th>\n",
" <th>Bias Data Type</th>\n",
" <th>Architecture</th>\n",
" </tr>\n",
" <tr>\n",
" <td>flash-attention</td>\n",
" <td>`no_bias`, `ALiBi` (with slopes)</td>\n",
" <td>N/A</td>\n",
" <td>ALiBi slopes: FP32</td>\n",
" <td>sm80+</td>\n",
" </tr>\n",
" <tr>\n",
" <td rowspan=\"2\">cuDNN attention</td>\n",
" <td>PyTorch: `no_bias`, `post_scale_bias`, `ALiBi` (without slopes)</td>\n",
" <td rowspan=\"2\">`post_scale_bias`: BHSS, 1HSS, B1SS, 11SS for forward, 1HSS for backward</td>\n",
" <td>`post_scale_bias`: same as QKV type</td>\n",
" <td>cuDNN 8.9.6+: sm90</td>\n",
" </tr>\n",
" <tr>\n",
" <td>JAX: `no_bias`, `post_scale_bias`</td> \n",
" <td>ALiBi slopes: FP32</td>\n",
" <td>cuDNN 9.0+: sm80+</td>\n",
" </tr>\n",
" <tr>\n",
" <td>Framework-native attention</td>\n",
" <td>`no_bias`, `pre_scale_bias`, `post_scale_bias`</td>\n",
" <td>`post_scale_bias`: BHSS, 1HSS, B1SS, 11SS </td>\n",
" <td>`post_scale_bias`: same as QKV type</td>\n",
" <td>sm80+</td>\n",
" </tr>\n",
"</table>\n",
"\n",
"The flash-attention backend enables `ALiBi` by asking user to pass in an `alibi_slopes` tensor, which can be the default slopes of vanilla ALiBi, or user-defined slopes. On the other hand, cuDNN attention supports `ALiBi` by taking in a `Boolean` flag, and it only supports vanilla ALiBi as of cuDNN 9.0.\n",
"\n",
"The framework-native backends do not explicitly support `ALiBi`, but users can convert `ALiBi` to a regular `post_scale_bias` bias to achieve the same effect. In PyTorch, this utility function, `transformer_engine.pytorch.attention.get_alibi`, can be used to help with the conversion.\n",
"\n",
"More examples of how to use the various attention biases are at [test_dpa_bias](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py)."
]
},
{
"cell_type": "markdown",
"id": "a0702339",
"metadata": {},
"source": [
"### 3.4 FP8 Attention\n",
"\n",
"A unique feature of Transformer Engine is its FP8 support, not only for the `Linear` layers but also for dot product attention. Transformer Engine's FP8 attention support is through its cuDNN attention sub-backend 2. Recall Figure 1: the two `MatMul` operations are performed in FP8 for computational efficiency, and the `SoftMax` operation is performed in FP32 for numerical accuracy.\n",
"\n",
"Transformer Engine supports FP8 attention through its [C APIs](../../api/c/fused_attn.rst), and [PyTorch API](../../api/pytorch.rst#transformer_engine.pytorch.DotProductAttention), as of v2.0. Its PyTorch API offers two options, both controlled through the FP8 recipe definition, `transformer_engine.common.recipe.DelayedScaling`.\n",
"\n",
"- `DelayedScaling.fp8_dpa=True (default=False)`: This enables the use of cuDNN attention sub-backend 2, when it does support the provided user inputs. The `FusedAttention` module for cuDNN attention takes FP16 or BF16 tensors as inputs, performs dot product attention in FP8, and returns attention logits in FP16 or BF16 (same as the input type). Casting operations are required to cast tensors to FP8 at the beginning, and back to FP16/BF16 at the end of the module.\n",
"\n",
"- `DelayedScaling.fp8_mha=True (default=False)`: This option, on top of `fp8_dpa=True`, removes the casting operations at the beginning and end of the `FusedAttention` module. This feature is experimental. \n",
"\n",
"Examples of using the two features are available at [test_dpa_fp8_vs_f16](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py) and [test_mha_fp8_vs_f16](https://github.com/NVIDIA/TransformerEngine/blob/main/tests/pytorch/fused_attn/test_fused_attn.py). To disable FP8 attention for backward and only use it for forward, users can also set `NVTE_FP8_DPA_BWD=0 (default=1)`."
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
# Copyright (c) 2022-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# See LICENSE for license information.
import os, sys, time
import subprocess
import pandas as pd
import numpy as np
import torch
import nvtx
import transformer_engine
from tests.pytorch.fused_attn.test_fused_attn import (
ModelConfig,
_get_attention_backends,
_run_dot_product_attention,
)
# data type
dtype = torch.bfloat16
# number of iterations after 3 warmup iterations
num_iters = 3
# checkpointing
ckpt_attn = False
# workspace optimization path for cuDNN attention
workspace_opt = True
# QKV memory layout
qkv_layout = "bshd_bshd_bshd"
# sliding window attention
swa = False
# padding between sequences for qkv_format=thd
pad_between_seqs = False
# training mode
is_training = True
model_configs = {
# test: b, h, hg, d, sq, skv, p, mask, bias
"test_0": ModelConfig(2, 16, 16, 64, 512, 512, 0.0, "no_mask", "no_bias"), # short seq
"test_1": ModelConfig(2, 16, 16, 128, 2048, 2048, 0.0, "causal", "no_bias"), # longer seq, mask
"test_2": ModelConfig(2, 16, 16, 128, 2048, 2048, 0.0, "causal", "post_scale_bias"), # bias
"test_3": ModelConfig(2, 32, 4, 128, 8192, 8192, 0.0, "causal", "no_bias"), # GQA
}
def example_attention(model, fused_attn_supported, flash_attn_supported):
config = model_configs[model]
if dtype == torch.bfloat16:
tols = dict(atol=2.5e-2, rtol=2.5e-2)
else:
tols = dict(atol=5e-3, rtol=5e-3)
if fused_attn_supported:
print()
print("Run cuDNN attention...")
fused_attn_fwd, fused_attn_bwd = _run_dot_product_attention(
dtype,
config,
"FusedAttention",
ckpt_attn,
qkv_layout,
workspace_opt,
pad_between_seqs,
is_training,
)
if flash_attn_supported:
print()
print("Run flash-attention...")
flash_attn_fwd, flash_attn_bwd = _run_dot_product_attention(
dtype,
config,
"FlashAttention",
ckpt_attn,
qkv_layout,
workspace_opt,
pad_between_seqs,
is_training,
)
if fused_attn_supported and flash_attn_supported:
torch.testing.assert_close(fused_attn_fwd, flash_attn_fwd, **tols)
for i, _ in enumerate(flash_attn_bwd):
torch.testing.assert_close(fused_attn_bwd[i], flash_attn_bwd[i], **tols)
print()
print("Test passed.")
def main():
models = ["test_0"]
for model in models:
config = model_configs[model]
available_backends, fused_attn_backends = _get_attention_backends(
config,
qkv_dtype=dtype,
qkv_layout=qkv_layout,
window_size=config.window_size,
pad_between_seqs=pad_between_seqs,
)
flash_attn_supported, fused_attn_supported, unfused_attn_supported = available_backends
example_attention(model, fused_attn_supported, flash_attn_supported)
if __name__ == "__main__":
main()
{
"cells": [
{
"cell_type": "markdown",
"id": "7b3e6954",
"metadata": {},
"source": [
"# Using FP8 with Transformer Engine\n",
"\n",
"H100 GPU introduced support for a new datatype, FP8 (8-bit floating point), enabling higher throughput of matrix multiplies and convolutions. In this example we will introduce the FP8 datatype and show how to use it with Transformer Engine.\n",
"\n",
"## Introduction to FP8\n",
"\n",
"### Structure\n",
"\n",
"The FP8 datatype supported by H100 is actually 2 distinct datatypes, useful in different parts of the training of neural networks:\n",
"\n",
"* E4M3 - it consists of 1 sign bit, 4 exponent bits and 3 bits of mantissa. It can store values up to +/-448 and `nan`.\n",
"* E5M2 - it consists of 1 sign bit, 5 exponent bits and 2 bits of mantissa. It can store values up to +/-57344, +/- `inf` and `nan`. The tradeoff of the increased dynamic range is lower precision of the stored values.\n",
"\n",
"<figure align=\"center\" id=\"fig_1\">\n",
"<img src=\"fp8_formats.png\" width=\"60%\">\n",
"<figcaption> Figure 1: Structure of the floating point datatypes. All of the values shown (in FP16, BF16, FP8 E4M3 and FP8 E5M2) are the closest representations of value 0.3952.</figcaption>\n",
"</figure>\n",
"\n",
"During training neural networks both of these types may be utilized. Typically forward activations and weights require more precision, so E4M3 datatype is best used during forward pass. In the backward pass, however, gradients flowing through the network typically are less susceptible to the loss of precision, but require higher dynamic range. Therefore they are best stored using E5M2 data format. H100 TensorCores provide support for any combination of these types as the inputs, enabling us to store each tensor using its preferred precision.\n",
"\n",
"### Mixed precision training - a quick introduction\n",
"\n",
"In order to understand how FP8 can be used for training Deep Learning models, it is useful to first remind ourselves how mixed precision works with other datatypes, especially FP16.\n",
"\n",
"Mixed precision recipe for FP16 training has 2 components: choosing which operations should be performed in FP16 and dynamic loss scaling.\n",
"\n",
"* Choosing the operations to be performed in FP16 precision requires analysis of the numerical behavior of the outputs with respect to inputs of the operation as well as the expected performance benefit. This enables marking operations like matrix multiplies, convolutions and normalization layers as safe, while leaving `norm` or `exp` operations as requiring high precision.\n",
"* Dynamic loss scaling enables avoiding both over- and underflows of the gradients during training. Those may happen since, while the dynamic range of FP16 is enough to store the distribution of the gradient values, this distribution may be centered around values too high or too low for FP16 to handle. Scaling the loss shifts those distributions (without affecting numerics by using only powers of 2) into the range representable in FP16. \n",
"\n",
"<figure align=\"center\">\n",
"<img src=\"loss_scaling.png\" width=\"50%\">\n",
"<figcaption> Figure 2: Scaling the loss enables shifting the gradient distribution into the representable range of FP16 datatype. </figcaption>\n",
"</figure>\n",
"\n",
"### Mixed precision training with FP8\n",
"\n",
"While the dynamic range provided by the FP8 types is sufficient to store any particular activation or gradient, it is not sufficient for all of them at the same time. This makes the single loss scaling factor strategy, which worked for FP16, infeasible for FP8 training and instead requires using distinct scaling factors for each FP8 tensor.\n",
"\n",
"There are multiple strategies for choosing a scaling factor that is appropriate for a given FP8 tensor:\n",
"\n",
"* just-in-time scaling. This strategy chooses the scaling factor based on the maximum of absolute values (amax) of the tensor being produced. In practice it is infeasible, as it requires multiple passes through data - the operator produces and writes out the output in higher precision, then the maximum absolute value of the output is found and applied to all values in order to obtain the final FP8 output. This results in a lot of overhead, severely diminishing gains from using FP8.\n",
"* delayed scaling. This strategy chooses the scaling factor based on the maximums of absolute values seen in some number of previous iterations. This enables full performance of FP8 computation, but requires storing the history of maximums as additional parameters of the FP8 operators. \n",
"\n",
"<figure align=\"center\">\n",
"<img src=\"delayed_scaling.png\" width=\"80%\">\n",
"<figcaption> Figure 3: Delayed scaling strategy. The FP8 operator uses scaling factor obtained using the history of amaxes (maximums of absolute values) seen in some number of previous iterations and produces both the FP8 output and the current amax, which gets stored in the history.</figcaption>\n",
"</figure>\n",
"\n",
"As one can see in Figure 3, delayed scaling strategy requires both storing the history of amaxes, but also choosing a recipe for converting that history into the scaling factor used in the next iteration."
]
},
{
"cell_type": "markdown",
"id": "f03b58ed-71e8-422a-95be-35c1cc60c4e2",
"metadata": {},
"source": [
"## MXFP8 and block scaling\n",
"\n",
"NVIDIA Blackwell architecture introduced support for a new variant of the FP8 format: [MXFP8](https://www.opencompute.org/documents/ocp-microscaling-formats-mx-v1-0-spec-final-pdf). \n",
"\n",
"### MXFP8 vs FP8\n",
"\n",
"The main difference between \"regular\" FP8 and MXFP8 lies in the granularity of the scaling. In FP8, each tensor has a single FP32 scaling factor, so all values in the tensor need to \"fit\" within the dynamic range of the FP8 datatype. This requires using the less precise E5M2 format to represent some tensors in the network (like gradients).\n",
"\n",
"MXFP8 addresses this by assigning a different scaling factor to each block of 32 [consecutive](#handling-transposes) values. This allows all values to be represented with the E4M3 datatype.\n",
"\n",
"<figure align=\"center\" id=\"fig_4\">\n",
"<img src=\"MXFP8_FP8_comparison_1.png\" width=\"100%\">\n",
"<figcaption> Figure 4: MXFP8 uses multiple scaling factors for a single tensor. The picture shows only 4 values per block for simplicity, but real MXFP8 has 32 values per block.</figcaption>\n",
"</figure>\n",
"\n",
"<figure align=\"center\" id=\"fig_5\">\n",
"<img src=\"MXFP8_FP8_comparison_2.png\" width=\"100%\">\n",
"<figcaption> Figure 5: Due to multiple scaling factors, tensor's dynamic range requirements are reduced and so E4M3 format can be used as far fewer elements get saturated to 0.</figcaption>\n",
"</figure>\n",
"\n",
"The second difference is the datatype used to store the scaling factors. FP8 uses FP32 (E8M23) while MXFP8 uses an 8-bit representation of a power of 2 (E8M0).\n",
"\n",
"<figure align=\"center\" id=\"fig_6\">\n",
"<img src=\"E8M0.png\" width=\"100%\">\n",
"<figcaption> Figure 6: Structure of the E8M0 datatype used for storing scaling factors in MXFP8.</figcaption>\n",
"</figure>\n",
"\n",
"### Handling transposes\n",
"\n",
"The forward and backward passes of linear layers involve multiple matrix multiplications with different reduction dimensions. Blackwell Tensor Cores require MXFP8 data to be \"consecutive\" over the reduction dimension, so MXFP8 training uses non-transposed and transposed MXFP8 tensors at different points. However, while transposing FP8 data is numerically trivial, transposing MXFP8 data requires requantization.\n",
"\n",
"To avoid loss of precision connected with this double quantization, Transformer Engine creates both regular and transposed copies of the tensor from the original high precision input.\n",
"\n",
"<figure align=\"center\" id=\"fig_7\">\n",
"<img src=\"linear_mxfp8.png\" width=\"80%\">\n",
"<figcaption> Figure 7: Linear layer in MXFP8. Calculating both forward and backward pass requires tensors quantized in both directions.</figcaption>\n",
"</figure>"
]
},
{
"cell_type": "markdown",
"id": "cf5e0b0d",
"metadata": {},
"source": [
"## Using FP8 with Transformer Engine\n",
"\n",
"Transformer Engine library provides tools enabling easy to use training with FP8 datatype using FP8 delayed scaling and MXFP8 strategies.\n",
"\n",
"### FP8 recipe\n",
"\n",
"The [DelayedScaling](../api/common.rst#transformer_engine.common.recipe.DelayedScaling) recipe from the `transformer_engine.common.recipe` module stores all of the required options for training with FP8 delayed scaling: length of the amax history to use for scaling factor computation, FP8 data format, etc.\n",
"Similarly, [MXFP8BlockScaling](../api/common.rst#transformer_engine.common.recipe.MXFP8BlockScaling) from the same module may be used to enable MXFP8 training."
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "0c8fd0ef",
"metadata": {},
"outputs": [],
"source": [
"from transformer_engine.common.recipe import Format, DelayedScaling, MXFP8BlockScaling\n",
"\n",
"fp8_format = Format.HYBRID # E4M3 during forward pass, E5M2 during backward pass\n",
"fp8_recipe = DelayedScaling(fp8_format=fp8_format, amax_history_len=16, amax_compute_algo=\"max\")\n",
"mxfp8_format = Format.E4M3 # E4M3 used everywhere\n",
"mxfp8_recipe = MXFP8BlockScaling(fp8_format=mxfp8_format)"
]
},
{
"cell_type": "markdown",
"id": "f9591eb5",
"metadata": {},
"source": [
"This recipe is then used to configure the FP8 training."
]
},
{
"cell_type": "markdown",
"id": "734d3934",
"metadata": {},
"source": [
"### FP8 autocasting\n",
"\n",
"Not every operation is safe to be performed using FP8. All of the modules provided by Transformer Engine library were designed to provide maximum performance benefit from FP8 datatype while maintaining accuracy. In order to enable FP8 operations, TE modules need to be wrapped inside the [fp8_autocast](../api/pytorch.rst#transformer_engine.pytorch.fp8_autocast) context manager."
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "f8b1ff7f",
"metadata": {},
"outputs": [],
"source": [
"import transformer_engine.pytorch as te\n",
"import torch\n",
"\n",
"torch.manual_seed(12345)\n",
"\n",
"my_linear = te.Linear(768, 768, bias=True)\n",
"\n",
"inp = torch.rand((1024, 768)).cuda()\n",
"\n",
"with te.fp8_autocast(enabled=True, fp8_recipe=fp8_recipe):\n",
" out_fp8 = my_linear(inp)"
]
},
{
"cell_type": "markdown",
"id": "e41161f1",
"metadata": {},
"source": [
"The `fp8_autocast` context manager hides the complexity of handling FP8:\n",
"\n",
"- All FP8-safe operations have their inputs cast to FP8\n",
"- Amax history is updated\n",
"- New scaling factors are computed and ready for the next iteration\n",
"\n",
"<div class=\"alert alert-info\">\n",
"\n",
"<b>Note</b>\n",
"\n",
"Support for FP8 in the Linear layer of Transformer Engine is currently limited to tensors with shapes where both dimensions are divisible by 16. In terms of the input to the full Transformer network, this typically requires padding sequence length to be multiple of 16.\n",
"\n",
"</div>"
]
},
{
"cell_type": "markdown",
"id": "f7bb2de9",
"metadata": {},
"source": [
"### Handling backward pass\n",
"\n",
"When a model is run inside the `fp8_autocast` region, especially in multi-GPU training, some communication is required in order to synchronize the scaling factors and amax history. In order to perform that communication without introducing much overhead, `fp8_autocast` context manager aggregates the tensors before performing the communication.\n",
"\n",
"Due to this aggregation the backward call needs to happen outside of the `fp8_autocast` context manager. It has no impact on the computation precision - the precision of the backward pass is determined by the precision of the forward pass."
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "e012bc8d",
"metadata": {},
"outputs": [],
"source": [
"loss_fp8 = out_fp8.mean()\n",
"\n",
"loss_fp8.backward() # This backward pass uses FP8, since out_fp8 was calculated inside fp8_autocast\n",
"\n",
"out_fp32 = my_linear(inp)\n",
"loss_fp32 = out_fp32.mean()\n",
"loss_fp32.backward() # This backward pass does not use FP8, since out_fp32 was calculated outside fp8_autocast"
]
},
{
"cell_type": "markdown",
"id": "1a6723ca",
"metadata": {},
"source": [
"### Precision\n",
"\n",
"If we compare the results of the FP32 and FP8 execution, we will see that they are relatively close, but different:"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "41e9a37b",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[ 0.2276, 0.2627, 0.3001, ..., 0.0346, 0.2211, 0.1188],\n",
" [-0.0963, -0.3725, 0.1717, ..., 0.0901, 0.0522, -0.3472],\n",
" [ 0.4526, 0.3482, 0.5976, ..., -0.0687, -0.0382, 0.1566],\n",
" ...,\n",
" [ 0.1698, 0.6061, 0.0385, ..., -0.2875, -0.1152, -0.0260],\n",
" [ 0.0679, 0.2946, 0.2751, ..., -0.2284, 0.0517, -0.1441],\n",
" [ 0.1865, 0.2353, 0.9172, ..., 0.1085, 0.1135, 0.1438]],\n",
" device='cuda:0', grad_fn=<_LinearBackward>)"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out_fp8"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "b328ae0e",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[ 0.2373, 0.2674, 0.2980, ..., 0.0233, 0.2498, 0.1131],\n",
" [-0.0767, -0.3778, 0.1862, ..., 0.0858, 0.0676, -0.3369],\n",
" [ 0.4615, 0.3593, 0.5813, ..., -0.0779, -0.0349, 0.1422],\n",
" ...,\n",
" [ 0.1914, 0.6038, 0.0382, ..., -0.2847, -0.0991, -0.0423],\n",
" [ 0.0864, 0.2895, 0.2719, ..., -0.2388, 0.0772, -0.1541],\n",
" [ 0.2019, 0.2275, 0.9027, ..., 0.1022, 0.1300, 0.1444]],\n",
" device='cuda:0', grad_fn=<_LinearBackward>)"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out_fp32"
]
},
{
"cell_type": "markdown",
"id": "a9413c0a",
"metadata": {},
"source": [
"That happens because in the FP8 case both the input and weights are cast to FP8 before the computation. We can see this if instead of the original inputs we use the inputs representable in FP8 (using a function defined in [quickstart_utils.py](quickstart_utils.py)):"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "ea939581",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"tensor([[ 0.2276, 0.2629, 0.3000, ..., 0.0346, 0.2211, 0.1188],\n",
" [-0.0963, -0.3724, 0.1717, ..., 0.0901, 0.0522, -0.3470],\n",
" [ 0.4526, 0.3479, 0.5976, ..., -0.0686, -0.0382, 0.1566],\n",
" ...,\n",
" [ 0.1698, 0.6062, 0.0385, ..., -0.2876, -0.1152, -0.0260],\n",
" [ 0.0679, 0.2947, 0.2750, ..., -0.2284, 0.0516, -0.1441],\n",
" [ 0.1865, 0.2353, 0.9170, ..., 0.1085, 0.1135, 0.1438]],\n",
" device='cuda:0', grad_fn=<_LinearBackward>)\n"
]
}
],
"source": [
"from quickstart_utils import cast_to_representable\n",
"\n",
"inp_representable = cast_to_representable(inp)\n",
"my_linear.weight.data = cast_to_representable(my_linear.weight.data)\n",
"\n",
"out_fp32_representable = my_linear(inp_representable)\n",
"\n",
"print(out_fp32_representable)"
]
},
{
"cell_type": "markdown",
"id": "03e703bd",
"metadata": {},
"source": [
"This time the difference is really small:"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "78f1c2eb",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[ 4.9591e-05, -1.9073e-04, 9.5367e-05, ..., -3.8147e-06,\n",
" 4.1962e-05, 2.2888e-05],\n",
" [ 2.2888e-05, -3.4332e-05, 2.2888e-05, ..., 2.6703e-05,\n",
" 5.3406e-05, -1.4114e-04],\n",
" [-3.8147e-05, 2.6703e-04, -3.8147e-06, ..., -5.7220e-05,\n",
" 4.1962e-05, -1.9073e-05],\n",
" ...,\n",
" [ 1.1444e-05, -7.2479e-05, -3.8147e-06, ..., 5.3406e-05,\n",
" -1.5259e-05, 2.2888e-05],\n",
" [ 4.9591e-05, -9.5367e-05, 6.8665e-05, ..., -1.5259e-05,\n",
" 7.6294e-05, 4.5776e-05],\n",
" [-1.5259e-05, -7.6294e-06, 1.8692e-04, ..., -3.0518e-05,\n",
" -4.5776e-05, 7.6294e-06]], device='cuda:0', grad_fn=<SubBackward0>)"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out_fp8 - out_fp32_representable"
]
},
{
"cell_type": "markdown",
"id": "63ff9b8c",
"metadata": {},
"source": [
"The differences in result coming from FP8 execution do not matter during the training process, but it is good to understand them, e.g. during debugging the model."
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
{
"cells": [
{
"cell_type": "markdown",
"id": "da9fd6a8",
"metadata": {},
"source": [
"# Getting Started\n",
"\n",
"## Overview\n",
"\n",
"Transformer Engine (TE) is a library for accelerating Transformer models on NVIDIA GPUs, providing better performance with lower memory utilization in both training and inference. It provides support for 8-bit floating point (FP8) precision on Hopper GPUs, implements a collection of highly optimized building blocks for popular Transformer architectures, and exposes an automatic-mixed-precision-like API that can be used seamlessly with your PyTorch code. It also includes a framework-agnostic C++ API that can be integrated with other deep learning libraries to enable FP8 support for Transformers.\n",
"\n",
"## Let's build a Transformer layer!\n",
"\n",
"<div class=\"alert alert-info\">\n",
"\n",
"<b>Summary</b>\n",
" \n",
"We build a basic Transformer layer using regular PyTorch modules. This will be our baseline for later comparisons with Transformer Engine.\n",
"\n",
"</div>\n",
"\n",
"Let's start with creating a GPT encoder layer using plain PyTorch. Figure 1 shows the overall structure.\n",
"\n",
"<figure align=\"center\">\n",
"<img src=\"transformer_layer.png\" width=\"20%\">\n",
"<figcaption> Figure 1: Structure of a GPT encoder layer.</figcaption>\n",
"</figure>\n",
"\n",
"We construct the components as follows:\n",
"\n",
"- `LayerNorm`: `torch.nn.LayerNorm`\n",
"- `QKV Projection`: `torch.nn.Linear` (conceptually three `Linear` layers for Q, K, and V separately, but we fuse into a single `Linear` layer that is three times larger)\n",
"- `DotProductAttention`: `DotProductAttention` from [quickstart_utils.py](quickstart_utils.py)\n",
"- `Projection`: `torch.nn.Linear`\n",
"- `Dropout`: `torch.nn.Dropout`\n",
"- `MLP`: `BasicMLP` from [quickstart_utils.py](quickstart_utils.py)\n",
"\n",
"Over the course of this tutorial we will use a few modules and helper functions defined in [quickstart_utils.py](quickstart_utils.py). Putting it all together:"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "2be43d64",
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"import quickstart_utils as utils\n",
"\n",
"class BasicTransformerLayer(torch.nn.Module):\n",
" def __init__(\n",
" self,\n",
" hidden_size: int,\n",
" ffn_hidden_size: int,\n",
" num_attention_heads: int,\n",
" layernorm_eps: int = 1e-5,\n",
" attention_dropout: float = 0.1,\n",
" hidden_dropout: float = 0.1,\n",
" ):\n",
" super().__init__()\n",
" self.num_attention_heads = num_attention_heads\n",
" self.kv_channels = hidden_size // num_attention_heads\n",
" self.ln1 = torch.nn.LayerNorm(hidden_size, eps=layernorm_eps)\n",
" self.qkv_projection = torch.nn.Linear(hidden_size, 3 * hidden_size, bias=True)\n",
" self.attention = utils.DotProductAttention(\n",
" num_attention_heads=num_attention_heads,\n",
" kv_channels=self.kv_channels,\n",
" attention_dropout=attention_dropout,\n",
" )\n",
" self.projection = torch.nn.Linear(hidden_size, hidden_size, bias=True)\n",
" self.dropout = torch.nn.Dropout(hidden_dropout)\n",
" self.ln2 = torch.nn.LayerNorm(hidden_size, eps=layernorm_eps)\n",
" self.mlp = utils.BasicMLP(\n",
" hidden_size=hidden_size,\n",
" ffn_hidden_size=ffn_hidden_size,\n",
" ) \n",
" \n",
" def forward(\n",
" self, \n",
" x: torch.Tensor, \n",
" attention_mask: torch.Tensor\n",
" ) -> torch.Tensor:\n",
" res = x\n",
" x = self.ln1(x)\n",
" \n",
" # Fused QKV projection\n",
" qkv = self.qkv_projection(x)\n",
" qkv = qkv.view(qkv.size(0), qkv.size(1), self.num_attention_heads, 3 * self.kv_channels)\n",
" q, k, v = torch.split(qkv, qkv.size(3) // 3, dim=3)\n",
" \n",
" x = self.attention(q, k, v, attention_mask)\n",
" x = self.projection(x)\n",
" x = self.dropout(x)\n",
" x = res + x\n",
" res = x\n",
" x = self.ln2(x)\n",
" x = self.mlp(x)\n",
" \n",
" return x + res"
]
},
{
"cell_type": "markdown",
"id": "40724d1d",
"metadata": {},
"source": [
"That's it! We now have a simple Transformer layer. We can test it:"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "a786f0ea",
"metadata": {},
"outputs": [],
"source": [
"# Layer configuration\n",
"hidden_size = 4096\n",
"sequence_length = 2048\n",
"batch_size = 4\n",
"ffn_hidden_size = 16384\n",
"num_attention_heads = 32\n",
"dtype = torch.float16\n",
"\n",
"# Synthetic data\n",
"x = torch.rand(sequence_length, batch_size, hidden_size).cuda().to(dtype=dtype)\n",
"dy = torch.rand(sequence_length, batch_size, hidden_size).cuda().to(dtype=dtype)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "ffdbfb7a",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"BasicTransformerLayer(\n",
" (ln1): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n",
" (qkv_projection): Linear(in_features=4096, out_features=12288, bias=True)\n",
" (attention): DotProductAttention(\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (projection): Linear(in_features=4096, out_features=4096, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" (ln2): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n",
" (mlp): BasicMLP(\n",
" (linear1): Linear(in_features=4096, out_features=16384, bias=True)\n",
" (linear2): Linear(in_features=16384, out_features=4096, bias=True)\n",
" )\n",
")"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"basic_transformer = BasicTransformerLayer(\n",
" hidden_size,\n",
" ffn_hidden_size,\n",
" num_attention_heads,\n",
")\n",
"basic_transformer.to(dtype=dtype).cuda()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "0162ad40",
"metadata": {},
"outputs": [],
"source": [
"torch.manual_seed(1234)\n",
"y = basic_transformer(x, attention_mask=None)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "65ae6dd6",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 43.0663916015625 ms\n"
]
}
],
"source": [
"utils.speedometer(\n",
" basic_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None },\n",
")"
]
},
{
"cell_type": "markdown",
"id": "43717e36",
"metadata": {},
"source": [
"## Meet Transformer Engine\n",
"\n",
"<div class=\"alert alert-info\">\n",
"\n",
"<b>Summary</b>\n",
" \n",
"We modify the example Transformer layer to include the simplest TE modules: `Linear` and `LayerNorm`.\n",
"\n",
"</div>\n",
"\n",
"Now that we have a basic Transformer layer, let's use Transformer Engine to speed up the training. "
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "004d3c92",
"metadata": {},
"outputs": [],
"source": [
"import transformer_engine.pytorch as te"
]
},
{
"cell_type": "markdown",
"id": "1931f911",
"metadata": {},
"source": [
"TE provides a set of PyTorch modules that can be used to build Transformer layers. The simplest of the provided modules are the `Linear` and `LayerNorm` layers, which we can use instead of `torch.nn.Linear` and `torch.nn.LayerNorm`. Let's modify `BasicTransformerLayer`:"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "1f44db50",
"metadata": {},
"outputs": [],
"source": [
"class BasicTEMLP(torch.nn.Module):\n",
" def __init__(self,\n",
" hidden_size: int,\n",
" ffn_hidden_size: int) -> None:\n",
" super().__init__()\n",
" self.linear1 = te.Linear(hidden_size, ffn_hidden_size, bias=True)\n",
" self.linear2 = te.Linear(ffn_hidden_size, hidden_size, bias=True)\n",
"\n",
" def forward(self, x):\n",
" x = self.linear1(x)\n",
" x = torch.nn.functional.gelu(x, approximate='tanh')\n",
" x = self.linear2(x)\n",
" return x \n",
" \n",
"class BasicTETransformerLayer(torch.nn.Module):\n",
" def __init__(self,\n",
" hidden_size: int,\n",
" ffn_hidden_size: int,\n",
" num_attention_heads: int,\n",
" layernorm_eps: int = 1e-5,\n",
" attention_dropout: float = 0.1,\n",
" hidden_dropout: float = 0.1):\n",
" super().__init__()\n",
" self.num_attention_heads = num_attention_heads\n",
" self.kv_channels = hidden_size // num_attention_heads\n",
" self.ln1 = te.LayerNorm(hidden_size, eps=layernorm_eps)\n",
" self.qkv_projection = te.Linear(hidden_size, 3 * hidden_size, bias=True)\n",
" self.attention = utils.DotProductAttention(\n",
" num_attention_heads=num_attention_heads,\n",
" kv_channels=self.kv_channels,\n",
" attention_dropout=attention_dropout,\n",
" )\n",
" self.projection = te.Linear(hidden_size, hidden_size, bias=True)\n",
" self.dropout = torch.nn.Dropout(hidden_dropout)\n",
" self.ln2 = te.LayerNorm(hidden_size, eps=layernorm_eps)\n",
" self.mlp = BasicTEMLP(\n",
" hidden_size=hidden_size,\n",
" ffn_hidden_size=ffn_hidden_size,\n",
" )\n",
" \n",
" def forward(self, \n",
" x: torch.Tensor, \n",
" attention_mask: torch.Tensor):\n",
" res = x\n",
" x = self.ln1(x)\n",
" \n",
" # Fused QKV projection\n",
" qkv = self.qkv_projection(x)\n",
" qkv = qkv.view(qkv.size(0), qkv.size(1), self.num_attention_heads, 3 * self.kv_channels)\n",
" q, k, v = torch.split(qkv, qkv.size(3) // 3, dim=3)\n",
" \n",
" x = self.attention(q, k, v, attention_mask)\n",
" x = self.projection(x)\n",
" x = self.dropout(x)\n",
" x = res + x\n",
" res = x\n",
" x = self.ln2(x)\n",
" x = self.mlp(x)\n",
" \n",
" return x + res"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "916531e8",
"metadata": {},
"outputs": [],
"source": [
"basic_te_transformer = BasicTETransformerLayer(\n",
" hidden_size, \n",
" ffn_hidden_size, \n",
" num_attention_heads,\n",
")\n",
"basic_te_transformer.to(dtype=dtype).cuda()\n",
"utils.share_parameters_with_basic_te_model(basic_te_transformer, basic_transformer)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "3643fa54",
"metadata": {},
"outputs": [],
"source": [
"torch.manual_seed(1234)\n",
"y = basic_te_transformer(x, attention_mask=None)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "10b92894",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 43.1413232421875 ms\n"
]
}
],
"source": [
"utils.speedometer(\n",
" basic_te_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None },\n",
")"
]
},
{
"cell_type": "markdown",
"id": "3f990226",
"metadata": {},
"source": [
"## Fused TE Modules\n",
"\n",
"<div class=\"alert alert-info\">\n",
"\n",
"<b>Summary</b>\n",
" \n",
"We optimize the example Transformer layer with TE modules for fused operations.\n",
"\n",
"</div>\n",
"\n",
"The `Linear` layer is enough to build any Transformer model and it enables usage of Transformer Engine even for very custom Transformers. However, having more knowledge about the model allows for additional optimizations like kernel fusion, increasing the achievable speedup.\n",
"\n",
"Transformer Engine therefore provides coarser modules that span multiple layers:\n",
"\n",
"* `LayerNormLinear`\n",
"* `LayerNormMLP`\n",
"* `TransformerLayer`\n",
"\n",
"Building a third iteration of our Transformer layer with `LayerNormLinear` and `LayerNormMLP`:"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "c55eae1f",
"metadata": {},
"outputs": [],
"source": [
"class FusedTETransformerLayer(torch.nn.Module):\n",
" def __init__(self,\n",
" hidden_size: int,\n",
" ffn_hidden_size: int,\n",
" num_attention_heads: int,\n",
" layernorm_eps: int = 1e-5,\n",
" attention_dropout: float = 0.1,\n",
" hidden_dropout: float = 0.1):\n",
" super().__init__()\n",
" self.num_attention_heads = num_attention_heads\n",
" self.kv_channels = hidden_size // num_attention_heads\n",
" self.ln_qkv = te.LayerNormLinear(hidden_size, 3 * hidden_size, eps=layernorm_eps, bias=True)\n",
" self.attention = utils.DotProductAttention(\n",
" num_attention_heads=num_attention_heads,\n",
" kv_channels=self.kv_channels,\n",
" attention_dropout=attention_dropout,\n",
" )\n",
" self.projection = te.Linear(hidden_size, hidden_size, bias=True)\n",
" self.dropout = torch.nn.Dropout(hidden_dropout)\n",
" self.ln_mlp = te.LayerNormMLP(hidden_size, ffn_hidden_size, eps=layernorm_eps, bias=True)\n",
" \n",
" \n",
" def forward(self, \n",
" x: torch.Tensor, \n",
" attention_mask: torch.Tensor):\n",
" res = x\n",
" qkv = self.ln_qkv(x)\n",
" \n",
" # Split qkv into query, key and value\n",
" qkv = qkv.view(qkv.size(0), qkv.size(1), self.num_attention_heads, 3 * self.kv_channels)\n",
" q, k, v = torch.split(qkv, qkv.size(3) // 3, dim=3)\n",
" \n",
" x = self.attention(q, k, v, attention_mask)\n",
" x = self.projection(x)\n",
" x = self.dropout(x)\n",
" x = res + x\n",
" res = x\n",
" x = self.ln_mlp(x)\n",
" \n",
" return x + res"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "85949421",
"metadata": {},
"outputs": [],
"source": [
"fused_te_transformer = FusedTETransformerLayer(hidden_size, ffn_hidden_size, num_attention_heads)\n",
"fused_te_transformer.to(dtype=dtype).cuda()\n",
"utils.share_parameters_with_fused_te_model(fused_te_transformer, basic_transformer)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "2c263e71",
"metadata": {},
"outputs": [],
"source": [
"torch.manual_seed(1234)\n",
"y = fused_te_transformer(x, attention_mask=None)"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "24e101bc",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 43.1981201171875 ms\n"
]
}
],
"source": [
"utils.speedometer(\n",
" fused_te_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None },\n",
")"
]
},
{
"cell_type": "markdown",
"id": "33f13c26",
"metadata": {},
"source": [
"Finally, the `TransformerLayer` module is convenient for creating standard Transformer architectures and it provides the highest degree of performance optimization:"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "ec8c3685",
"metadata": {},
"outputs": [],
"source": [
"te_transformer = te.TransformerLayer(hidden_size, ffn_hidden_size, num_attention_heads)\n",
"te_transformer.to(dtype=dtype).cuda()\n",
"utils.share_parameters_with_transformerlayer_te_model(te_transformer, basic_transformer)"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "e48cd590",
"metadata": {},
"outputs": [],
"source": [
"torch.manual_seed(1234)\n",
"y = te_transformer(x, attention_mask=None)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "3ec3707d-e63f-4899-8308-b11c55b5caa4",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 39.99169921875 ms\n"
]
}
],
"source": [
"utils.speedometer(\n",
" te_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None },\n",
")"
]
},
{
"cell_type": "markdown",
"id": "4034c3eb-8958-49f2-85f6-30c94977d884",
"metadata": {},
"source": [
"## Enabling FP8\n",
"\n",
"<div class=\"alert alert-info\">\n",
"\n",
"<b>Summary</b>\n",
" \n",
"We configure a TE module to perform compute in FP8.\n",
"\n",
"</div>\n",
"\n",
"Enabling FP8 support is very simple in Transformer Engine. We just need to wrap the modules within an [fp8_autocast](../api/pytorch.rst#transformer_engine.pytorch.fp8_autocast) context manager. Note that fp8_autocast should only be used to wrap the forward pass and must exit before starting a backward pass. See the [FP8 tutorial](fp8_primer.ipynb) for a detailed explanation of FP8 recipes and the supported options."
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "31256aa7-3d5e-425c-91ab-502b1326a748",
"metadata": {},
"outputs": [],
"source": [
"from transformer_engine.common.recipe import Format, DelayedScaling\n",
"\n",
"te_transformer = te.TransformerLayer(hidden_size, ffn_hidden_size, num_attention_heads)\n",
"te_transformer.to(dtype=dtype).cuda()\n",
"utils.share_parameters_with_transformerlayer_te_model(te_transformer, basic_transformer)\n",
"\n",
"fp8_format = Format.HYBRID\n",
"fp8_recipe = DelayedScaling(fp8_format=fp8_format, amax_history_len=16, amax_compute_algo=\"max\")\n",
"torch.manual_seed(1234)\n",
"with te.fp8_autocast(enabled=True, fp8_recipe=fp8_recipe):\n",
" y = te_transformer(x, attention_mask=None)"
]
},
{
"cell_type": "code",
"execution_count": 19,
"id": "793ebd2d-b84b-47bc-811a-7991df8500aa",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean time: 28.61394775390625 ms\n"
]
}
],
"source": [
"utils.speedometer(\n",
" te_transformer,\n",
" x,\n",
" dy,\n",
" forward_kwargs = { \"attention_mask\": None },\n",
" fp8_autocast_kwargs = { \"enabled\": True, \"fp8_recipe\": fp8_recipe },\n",
")"
]
}
],
"metadata": {
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
# Copyright (c) 2022-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# See LICENSE for license information.
import math
from typing import Optional
import torch
import transformer_engine.pytorch as te
def speedometer(
module: torch.nn.Module,
input: torch.Tensor,
output_grad: torch.Tensor,
forward_kwargs: dict = {},
fp8_autocast_kwargs: Optional[dict] = None,
timing_iters: int = 50,
warmup_iters: int = 50,
) -> None:
"""Measure average run time for a PyTorch module
Performs forward and backward passes.
"""
start = torch.cuda.Event(enable_timing=True)
end = torch.cuda.Event(enable_timing=True)
if fp8_autocast_kwargs is None:
fp8_autocast_kwargs = {"enabled": False}
# Warmup runs
torch.cuda.synchronize()
for _ in range(warmup_iters):
with te.fp8_autocast(**fp8_autocast_kwargs):
output = module(input, **forward_kwargs)
output.backward(output_grad)
# Timing runs
start.record()
for _ in range(timing_iters):
with te.fp8_autocast(**fp8_autocast_kwargs):
output = module(input, **forward_kwargs)
output.backward(output_grad)
end.record()
torch.cuda.synchronize()
print(f"Mean time: {start.elapsed_time(end)/timing_iters} ms")
class DotProductAttention(torch.nn.Module):
"""Attention operation in Transformer layer
Built with plain PyTorch modules.
"""
def __init__(
self,
num_attention_heads: int,
kv_channels: int,
attention_dropout: float,
) -> None:
super().__init__()
self.projection_size = kv_channels * num_attention_heads
self.hidden_size_per_attention_head = kv_channels
self.norm_factor = math.sqrt(self.hidden_size_per_attention_head)
self.dropout = torch.nn.Dropout(attention_dropout)
def masked_softmax(self, inp: torch.Tensor, mask: Optional[torch.Tensor]) -> torch.Tensor:
if mask is not None:
inp.masked_fill_(mask, -10000.0)
return torch.nn.Softmax(dim=-1)(inp)
def forward(
self,
query: torch.Tensor,
key: torch.Tensor,
value: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
) -> torch.Tensor:
b = query.size(1)
np = query.size(2)
sq = query.size(0)
sk = key.size(0)
hn = value.size(3)
# [sq, b, np, hn] -> [sq, b * np, hn]
query = query.view(sq, b * np, -1)
# [sk, b, np, hn] -> [sk, b * np, hn]
key = key.view(sk, b * np, -1)
bmm1 = (
torch.bmm(query.transpose(0, 1), key.transpose(0, 1).transpose(1, 2)) / self.norm_factor
)
# change view to [b, np, sq, sk]
attention_scores = bmm1.view(b, np, sq, sk)
attention_probs = self.masked_softmax(attention_scores, attention_mask)
attention_probs = self.dropout(attention_probs)
# change view [sk, b * np, hn]
value = value.view(sk, b * np, -1)
# change view [b * np, sq, sk]
attention_probs = attention_probs.view(b * np, sq, -1)
# matmul: [b * np, sq, hn]
context = torch.bmm(attention_probs, value.transpose(0, 1))
# change view [b, np, sq, hn]
context = context.view(b, np, sq, hn)
# [b, np, sq, hn] --> [sq, b, np, hn]
context = context.permute(2, 0, 1, 3).contiguous()
# [sq, b, np, hn] --> [sq, b, hp]
context = context.view(sq, b, self.projection_size)
return context
class BasicMLP(torch.nn.Module):
"""Feed-forward network in Transformer layer
Built with plain PyTorch modules.
"""
def __init__(
self,
hidden_size: int,
ffn_hidden_size: int,
) -> None:
super().__init__()
self.linear1 = torch.nn.Linear(hidden_size, ffn_hidden_size, bias=True)
self.linear2 = torch.nn.Linear(ffn_hidden_size, hidden_size, bias=True)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.linear1(x)
x = torch.nn.functional.gelu(x, approximate="tanh")
x = self.linear2(x)
return x
def share_parameters_with_basic_te_model(te_model, basic_model):
"""Initialize parameters for TE Transformer layer with basic modules
Parameter values are copied from pure PyTorch implementation.
"""
te_model.ln1.weight = basic_model.ln1.weight
te_model.ln1.bias = basic_model.ln1.bias
te_model.qkv_projection.weight = basic_model.qkv_projection.weight
te_model.qkv_projection.bias = basic_model.qkv_projection.bias
te_model.projection.weight = basic_model.projection.weight
te_model.projection.bias = basic_model.projection.bias
te_model.ln2.weight = basic_model.ln2.weight
te_model.ln2.bias = basic_model.ln2.bias
te_model.mlp.linear1.weight = basic_model.mlp.linear1.weight
te_model.mlp.linear1.bias = basic_model.mlp.linear1.bias
te_model.mlp.linear2.weight = basic_model.mlp.linear2.weight
te_model.mlp.linear2.bias = basic_model.mlp.linear2.bias
def share_parameters_with_fused_te_model(te_model, basic_model):
"""Initialize parameters for TE Transformer layer with fused modules
Parameter values are copied from pure PyTorch implementation.
"""
te_model.ln_qkv.layer_norm_weight = basic_model.ln1.weight
te_model.ln_qkv.layer_norm_bias = basic_model.ln1.bias
te_model.ln_qkv.weight = basic_model.qkv_projection.weight
te_model.ln_qkv.bias = basic_model.qkv_projection.bias
te_model.projection.weight = basic_model.projection.weight
te_model.projection.bias = basic_model.projection.bias
te_model.ln_mlp.layer_norm_weight = basic_model.ln2.weight
te_model.ln_mlp.layer_norm_bias = basic_model.ln2.bias
te_model.ln_mlp.fc1_weight = basic_model.mlp.linear1.weight
te_model.ln_mlp.fc1_bias = basic_model.mlp.linear1.bias
te_model.ln_mlp.fc2_weight = basic_model.mlp.linear2.weight
te_model.ln_mlp.fc2_bias = basic_model.mlp.linear2.bias
def share_parameters_with_transformerlayer_te_model(te_model, basic_model):
"""Initialize parameters for monolithic TE Transformer layer
Parameter values are copied from pure PyTorch implementation.
"""
te_model.self_attention.layernorm_qkv.layer_norm_weight = basic_model.ln1.weight
te_model.self_attention.layernorm_qkv.layer_norm_bias = basic_model.ln1.bias
te_model.self_attention.layernorm_qkv.weight = basic_model.qkv_projection.weight
te_model.self_attention.layernorm_qkv.bias = basic_model.qkv_projection.bias
te_model.self_attention.proj.weight = basic_model.projection.weight
te_model.self_attention.proj.bias = basic_model.projection.bias
te_model.layernorm_mlp.layer_norm_weight = basic_model.ln2.weight
te_model.layernorm_mlp.layer_norm_bias = basic_model.ln2.bias
te_model.layernorm_mlp.fc1_weight = basic_model.mlp.linear1.weight
te_model.layernorm_mlp.fc1_bias = basic_model.mlp.linear1.bias
te_model.layernorm_mlp.fc2_weight = basic_model.mlp.linear2.weight
te_model.layernorm_mlp.fc2_bias = basic_model.mlp.linear2.bias
def cast_to_representable(inp, scale=1.0, fp8_format="e4m3"):
from transformer_engine.pytorch.tensor.float8_tensor import Float8Quantizer
import transformer_engine_torch as tex
fp8_type = tex.DType.kFloat8E4M3 if fp8_format == "e4m3" else tex.DType.kFloat8E5M2
scale = torch.ones(1, dtype=torch.float32, device="cuda") * scale
amax_history = torch.zeros(1, 1, dtype=torch.float32, device="cuda")
quantizer = Float8Quantizer(scale=scale, amax=amax_history, fp8_dtype=fp8_type)
ret = quantizer(inp)
ret = ret.dequantize()
return ret
<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="g2be4f0f543d_0_206.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"/></clipPath><g clip-path="url(#g2be4f0f543d_0_206.0)"><path fill="#ffffff" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m331.9357 55.79134l296.126 0l0 448.85037l-296.126 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m331.9357 55.79134l296.126 0l0 448.85037l-296.126 0z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m352.72308 90.92782l254.55121 0l0 350.2362l-254.55121 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m352.72308 90.92782l254.55121 0l0 350.2362l-254.55121 0z" fill-rule="evenodd"/><path fill="#ffe599" d="m383.47375 153.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m383.47375 153.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path fill="#c9daf8" d="m392.29932 185.41951l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m392.29932 185.41951l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path fill="#000000" d="m403.88208 197.49022l2.921875 -7.625l1.09375 0l3.125 7.625l-1.15625 0l-0.890625 -2.3125l-3.1875 0l-0.828125 2.3125l-1.078125 0zm2.203125 -3.125l2.578125 0l-0.796875 -2.125q-0.359375 -0.953125 -0.53125 -1.578125q-0.15625 0.734375 -0.421875 1.453125l-0.828125 2.25zm7.6701355 2.28125l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm2.9606628 0l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm4.6950073 -0.9375l0.96875 0.125q-0.234375 0.84375 -0.859375 1.3125q-0.609375 0.46875 -1.578125 0.46875q-1.203125 0 -1.921875 -0.75q-0.703125 -0.75 -0.703125 -2.09375q0 -1.390625 0.71875 -2.15625q0.71875 -0.78125 1.859375 -0.78125q1.109375 0 1.8125 0.765625q0.703125 0.75 0.703125 2.125q0 0.078125 0 0.234375l-4.125 0q0.046875 0.921875 0.515625 1.40625q0.46875 0.484375 1.15625 0.484375q0.515625 0 0.875 -0.265625q0.359375 -0.28125 0.578125 -0.875zm-3.078125 -1.515625l3.09375 0q-0.0625 -0.6875 -0.359375 -1.046875q-0.453125 -0.53125 -1.15625 -0.53125q-0.640625 0 -1.09375 0.4375q-0.4375 0.421875 -0.484375 1.140625zm5.2233887 3.296875l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0zm7.973358 -0.84375l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm0.91378784 -5.703125l0 -1.078125l0.9375 0l0 1.078125l-0.9375 0zm0 6.546875l0 -5.53125l0.9375 0l0 5.53125l-0.9375 0zm2.0237122 -2.765625q0 -1.53125 0.84375 -2.265625q0.71875 -0.625 1.734375 -0.625q1.140625 0 1.859375 0.75q0.734375 0.75 0.734375 2.0625q0 1.0625 -0.328125 1.6875q-0.3125 0.609375 -0.921875 0.953125q-0.609375 0.328125 -1.34375 0.328125q-1.15625 0 -1.875 -0.734375q-0.703125 -0.75 -0.703125 -2.15625zm0.953125 0q0 1.0625 0.46875 1.59375q0.46875 0.53125 1.15625 0.53125q0.703125 0 1.15625 -0.53125q0.46875 -0.53125 0.46875 -1.625q0 -1.015625 -0.46875 -1.546875q-0.453125 -0.53125 -1.15625 -0.53125q-0.6875 0 -1.15625 0.53125q-0.46875 0.515625 -0.46875 1.578125zm5.3171387 2.765625l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0z" fill-rule="nonzero"/><path fill="#ead1dc" d="m392.29932 231.87492l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m392.29932 231.87492l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path fill="#000000" d="m414.7707 243.94563l0 -7.625l1.515625 0l1.796875 5.390625q0.25 0.765625 0.375 1.140625q0.125 -0.421875 0.40625 -1.234375l1.828125 -5.296875l1.34375 0l0 7.625l-0.96875 0l0 -6.390625l-2.21875 6.390625l-0.90625 0l-2.203125 -6.5l0 6.5l-0.96875 0zm8.8611145 0l0 -7.625l1.015625 0l0 6.71875l3.75 0l0 0.90625l-4.765625 0zm5.973358 0l0 -7.625l2.875 0q0.75 0 1.15625 0.0625q0.5625 0.09375 0.9375 0.359375q0.390625 0.265625 0.609375 0.75q0.234375 0.46875 0.234375 1.03125q0 0.96875 -0.625 1.65625q-0.609375 0.671875 -2.234375 0.671875l-1.953125 0l0 3.09375l-1.0 0zm1.0 -4.0l1.96875 0q0.984375 0 1.390625 -0.359375q0.421875 -0.375 0.421875 -1.03125q0 -0.484375 -0.25 -0.8125q-0.234375 -0.34375 -0.640625 -0.453125q-0.25 -0.078125 -0.9375 -0.078125l-1.953125 0l0 2.734375z" fill-rule="nonzero"/><path fill="#f6b26b" d="m406.1472 164.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m406.1472 164.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m417.29953 172.92921l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#b6d7a8" d="m406.1472 211.01541l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m406.1472 211.01541l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m417.29953 219.38391l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#fff2cc" d="m378.0643 263.19815l93.7323 0l0 11.937012l-93.7323 0z" fill-rule="evenodd"/><path fill="#595959" d="m389.50467 272.04666l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm4.3710938 0l0 -5.734375l0.703125 0l0 5.734375l-0.703125 0zm4.4960938 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.8085938 2.078125l0 -4.15625l0.625 0l0 0.59375q0.203125 -0.3125 0.515625 -0.5q0.328125 -0.1875 0.75 -0.1875q0.453125 0 0.75 0.203125q0.296875 0.1875 0.421875 0.53125q0.484375 -0.734375 1.28125 -0.734375q0.609375 0 0.9375 0.34375q0.34375 0.34375 0.34375 1.0625l0 2.84375l-0.703125 0l0 -2.609375q0 -0.421875 -0.078125 -0.609375q-0.0625 -0.1875 -0.25 -0.296875q-0.171875 -0.125 -0.40625 -0.125q-0.4375 0 -0.734375 0.296875q-0.28125 0.296875 -0.28125 0.9375l0 2.40625l-0.703125 0l0 -2.703125q0 -0.46875 -0.171875 -0.703125q-0.171875 -0.234375 -0.5625 -0.234375q-0.296875 0 -0.5625 0.15625q-0.25 0.15625 -0.359375 0.46875q-0.109375 0.296875 -0.109375 0.859375l0 2.15625l-0.703125 0zm9.3671875 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.9023438 2.078125l0 -5.734375l1.96875 0q0.671875 0 1.015625 0.09375q0.5 0.109375 0.84375 0.40625q0.453125 0.375 0.671875 0.984375q0.234375 0.59375 0.234375 1.359375q0 0.640625 -0.15625 1.15625q-0.15625 0.5 -0.390625 0.828125q-0.234375 0.328125 -0.53125 0.515625q-0.28125 0.1875 -0.6875 0.296875q-0.390625 0.09375 -0.90625 0.09375l-2.0625 0zm0.75 -0.671875l1.21875 0q0.578125 0 0.890625 -0.109375q0.328125 -0.109375 0.515625 -0.296875q0.265625 -0.265625 0.421875 -0.71875q0.15625 -0.46875 0.15625 -1.109375q0 -0.90625 -0.296875 -1.375q-0.296875 -0.484375 -0.71875 -0.65625q-0.3125 -0.109375 -0.984375 -0.109375l-1.203125 0l0 4.375zm7.7773438 -0.671875l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm6.6210938 0.953125l0.6875 0.078125q-0.109375 0.71875 -0.578125 1.125q-0.46875 0.40625 -1.140625 0.40625q-0.859375 0 -1.375 -0.546875q-0.515625 -0.5625 -0.515625 -1.609375q0 -0.671875 0.21875 -1.171875q0.234375 -0.5 0.6875 -0.75q0.453125 -0.265625 0.984375 -0.265625q0.671875 0 1.09375 0.34375q0.4375 0.34375 0.5625 0.96875l-0.6875 0.109375q-0.09375 -0.421875 -0.34375 -0.625q-0.25 -0.21875 -0.59375 -0.21875q-0.53125 0 -0.875 0.390625q-0.328125 0.375 -0.328125 1.203125q0 0.828125 0.3125 1.21875q0.328125 0.375 0.84375 0.375q0.421875 0 0.6875 -0.25q0.28125 -0.265625 0.359375 -0.78125zm1.03125 -0.5625q0 -1.15625 0.640625 -1.703125q0.53125 -0.46875 1.3125 -0.46875q0.84375 0 1.390625 0.5625q0.546875 0.5625 0.546875 1.546875q0 0.8125 -0.25 1.265625q-0.234375 0.453125 -0.703125 0.71875q-0.453125 0.25 -0.984375 0.25q-0.875 0 -1.421875 -0.5625q-0.53125 -0.5625 -0.53125 -1.609375zm0.71875 0q0 0.796875 0.34375 1.203125q0.359375 0.390625 0.890625 0.390625q0.515625 0 0.859375 -0.390625q0.359375 -0.40625 0.359375 -1.21875q0 -0.78125 -0.359375 -1.171875q-0.34375 -0.390625 -0.859375 -0.390625q-0.53125 0 -0.890625 0.390625q-0.34375 0.390625 -0.34375 1.1875zm6.6835938 2.078125l0 -0.53125q-0.390625 0.625 -1.15625 0.625q-0.5 0 -0.921875 -0.265625q-0.40625 -0.28125 -0.640625 -0.765625q-0.21875 -0.5 -0.21875 -1.140625q0 -0.609375 0.203125 -1.109375q0.203125 -0.515625 0.609375 -0.78125q0.421875 -0.28125 0.9375 -0.28125q0.375 0 0.65625 0.171875q0.296875 0.15625 0.484375 0.40625l0 -2.0625l0.703125 0l0 5.734375l-0.65625 0zm-2.21875 -2.078125q0 0.796875 0.328125 1.203125q0.34375 0.390625 0.796875 0.390625q0.46875 0 0.78125 -0.375q0.328125 -0.375 0.328125 -1.15625q0 -0.84375 -0.328125 -1.234375q-0.328125 -0.40625 -0.8125 -0.40625q-0.46875 0 -0.78125 0.390625q-0.3125 0.375 -0.3125 1.1875zm6.8242188 0.734375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0zm2.7421875 0l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm7.0898438 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.7773438 3.671875l-0.078125 -0.65625q0.234375 0.0625 0.40625 0.0625q0.234375 0 0.375 -0.078125q0.140625 -0.078125 0.21875 -0.21875q0.078125 -0.109375 0.21875 -0.515625q0.015625 -0.0625 0.0625 -0.171875l-1.578125 -4.171875l0.765625 0l0.859375 2.40625q0.171875 0.453125 0.296875 0.96875q0.125 -0.484375 0.296875 -0.953125l0.890625 -2.421875l0.703125 0l-1.578125 4.234375q-0.265625 0.671875 -0.40625 0.9375q-0.1875 0.34375 -0.4375 0.5q-0.234375 0.171875 -0.5625 0.171875q-0.203125 0 -0.453125 -0.09375zm6.875 -2.9375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0z" fill-rule="nonzero"/><path fill="#ffe599" d="m399.47375 169.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m399.47375 169.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path fill="#c9daf8" d="m408.29932 201.41951l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m408.29932 201.41951l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path fill="#000000" d="m419.88208 213.49022l2.921875 -7.625l1.09375 0l3.125 7.625l-1.15625 0l-0.890625 -2.3125l-3.1875 0l-0.828125 2.3125l-1.078125 0zm2.203125 -3.125l2.578125 0l-0.796875 -2.125q-0.359375 -0.953125 -0.53125 -1.578125q-0.15625 0.734375 -0.421875 1.453125l-0.828125 2.25zm7.6701355 2.28125l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm2.9606628 0l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm4.6950073 -0.9375l0.96875 0.125q-0.234375 0.84375 -0.859375 1.3125q-0.609375 0.46875 -1.578125 0.46875q-1.203125 0 -1.921875 -0.75q-0.703125 -0.75 -0.703125 -2.09375q0 -1.390625 0.71875 -2.15625q0.71875 -0.78125 1.859375 -0.78125q1.109375 0 1.8125 0.765625q0.703125 0.75 0.703125 2.125q0 0.078125 0 0.234375l-4.125 0q0.046875 0.921875 0.515625 1.40625q0.46875 0.484375 1.15625 0.484375q0.515625 0 0.875 -0.265625q0.359375 -0.28125 0.578125 -0.875zm-3.078125 -1.515625l3.09375 0q-0.0625 -0.6875 -0.359375 -1.046875q-0.453125 -0.53125 -1.15625 -0.53125q-0.640625 0 -1.09375 0.4375q-0.4375 0.421875 -0.484375 1.140625zm5.2233887 3.296875l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0zm7.973358 -0.84375l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm0.91378784 -5.703125l0 -1.078125l0.9375 0l0 1.078125l-0.9375 0zm0 6.546875l0 -5.53125l0.9375 0l0 5.53125l-0.9375 0zm2.0237122 -2.765625q0 -1.53125 0.84375 -2.265625q0.71875 -0.625 1.734375 -0.625q1.140625 0 1.859375 0.75q0.734375 0.75 0.734375 2.0625q0 1.0625 -0.328125 1.6875q-0.3125 0.609375 -0.921875 0.953125q-0.609375 0.328125 -1.34375 0.328125q-1.15625 0 -1.875 -0.734375q-0.703125 -0.75 -0.703125 -2.15625zm0.953125 0q0 1.0625 0.46875 1.59375q0.46875 0.53125 1.15625 0.53125q0.703125 0 1.15625 -0.53125q0.46875 -0.53125 0.46875 -1.625q0 -1.015625 -0.46875 -1.546875q-0.453125 -0.53125 -1.15625 -0.53125q-0.6875 0 -1.15625 0.53125q-0.46875 0.515625 -0.46875 1.578125zm5.3171387 2.765625l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0z" fill-rule="nonzero"/><path fill="#ead1dc" d="m408.29932 247.87492l65.25986 0l0 16.661423l-65.25986 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m408.29932 247.87492l65.25986 0l0 16.661423l-65.25986 0z" fill-rule="evenodd"/><path fill="#000000" d="m430.7707 259.94562l0 -7.6249847l1.515625 0l1.796875 5.3906097q0.25 0.765625 0.375 1.140625q0.125 -0.421875 0.40625 -1.234375l1.828125 -5.2968597l1.34375 0l0 7.6249847l-0.96875 0l0 -6.3906097l-2.21875 6.3906097l-0.90625 0l-2.203125 -6.4999847l0 6.4999847l-0.96875 0zm8.8611145 0l0 -7.6249847l1.015625 0l0 6.7187347l3.75 0l0 0.90625l-4.765625 0zm5.973358 0l0 -7.6249847l2.875 0q0.75 0 1.15625 0.0625q0.5625 0.09375 0.9375 0.359375q0.390625 0.265625 0.609375 0.75q0.234375 0.46875 0.234375 1.03125q0 0.96875 -0.625 1.6562347q-0.609375 0.671875 -2.234375 0.671875l-1.953125 0l0 3.09375l-1.0 0zm1.0 -3.9999847l1.96875 0q0.984375 0 1.390625 -0.359375q0.421875 -0.375 0.421875 -1.03125q0 -0.484375 -0.25 -0.8125q-0.234375 -0.34375 -0.640625 -0.453125q-0.25 -0.078125 -0.9375 -0.078125l-1.953125 0l0 2.734375z" fill-rule="nonzero"/><path fill="#f6b26b" d="m422.1472 180.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m422.1472 180.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m433.29953 188.92921l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#b6d7a8" d="m422.1472 227.01541l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m422.1472 227.01541l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m433.29953 235.38391l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#fff2cc" d="m394.0643 279.19815l93.7323 0l0 11.937012l-93.7323 0z" fill-rule="evenodd"/><path fill="#595959" d="m405.50467 288.04666l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm4.3710938 0l0 -5.734375l0.703125 0l0 5.734375l-0.703125 0zm4.4960938 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.8085938 2.078125l0 -4.15625l0.625 0l0 0.59375q0.203125 -0.3125 0.515625 -0.5q0.328125 -0.1875 0.75 -0.1875q0.453125 0 0.75 0.203125q0.296875 0.1875 0.421875 0.53125q0.484375 -0.734375 1.28125 -0.734375q0.609375 0 0.9375 0.34375q0.34375 0.34375 0.34375 1.0625l0 2.84375l-0.703125 0l0 -2.609375q0 -0.421875 -0.078125 -0.609375q-0.0625 -0.1875 -0.25 -0.296875q-0.171875 -0.125 -0.40625 -0.125q-0.4375 0 -0.734375 0.296875q-0.28125 0.296875 -0.28125 0.9375l0 2.40625l-0.703125 0l0 -2.703125q0 -0.46875 -0.171875 -0.703125q-0.171875 -0.234375 -0.5625 -0.234375q-0.296875 0 -0.5625 0.15625q-0.25 0.15625 -0.359375 0.46875q-0.109375 0.296875 -0.109375 0.859375l0 2.15625l-0.703125 0zm9.3671875 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.9023438 2.078125l0 -5.734375l1.96875 0q0.671875 0 1.015625 0.09375q0.5 0.109375 0.84375 0.40625q0.453125 0.375 0.671875 0.984375q0.234375 0.59375 0.234375 1.359375q0 0.640625 -0.15625 1.15625q-0.15625 0.5 -0.390625 0.828125q-0.234375 0.328125 -0.53125 0.515625q-0.28125 0.1875 -0.6875 0.296875q-0.390625 0.09375 -0.90625 0.09375l-2.0625 0zm0.75 -0.671875l1.21875 0q0.578125 0 0.890625 -0.109375q0.328125 -0.109375 0.515625 -0.296875q0.265625 -0.265625 0.421875 -0.71875q0.15625 -0.46875 0.15625 -1.109375q0 -0.90625 -0.296875 -1.375q-0.296875 -0.484375 -0.71875 -0.65625q-0.3125 -0.109375 -0.984375 -0.109375l-1.203125 0l0 4.375zm7.7773438 -0.671875l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm6.6210938 0.953125l0.6875 0.078125q-0.109375 0.71875 -0.578125 1.125q-0.46875 0.40625 -1.140625 0.40625q-0.859375 0 -1.375 -0.546875q-0.515625 -0.5625 -0.515625 -1.609375q0 -0.671875 0.21875 -1.171875q0.234375 -0.5 0.6875 -0.75q0.453125 -0.265625 0.984375 -0.265625q0.671875 0 1.09375 0.34375q0.4375 0.34375 0.5625 0.96875l-0.6875 0.109375q-0.09375 -0.421875 -0.34375 -0.625q-0.25 -0.21875 -0.59375 -0.21875q-0.53125 0 -0.875 0.390625q-0.328125 0.375 -0.328125 1.203125q0 0.828125 0.3125 1.21875q0.328125 0.375 0.84375 0.375q0.421875 0 0.6875 -0.25q0.28125 -0.265625 0.359375 -0.78125zm1.03125 -0.5625q0 -1.15625 0.640625 -1.703125q0.53125 -0.46875 1.3125 -0.46875q0.84375 0 1.390625 0.5625q0.546875 0.5625 0.546875 1.546875q0 0.8125 -0.25 1.265625q-0.234375 0.453125 -0.703125 0.71875q-0.453125 0.25 -0.984375 0.25q-0.875 0 -1.421875 -0.5625q-0.53125 -0.5625 -0.53125 -1.609375zm0.71875 0q0 0.796875 0.34375 1.203125q0.359375 0.390625 0.890625 0.390625q0.515625 0 0.859375 -0.390625q0.359375 -0.40625 0.359375 -1.21875q0 -0.78125 -0.359375 -1.171875q-0.34375 -0.390625 -0.859375 -0.390625q-0.53125 0 -0.890625 0.390625q-0.34375 0.390625 -0.34375 1.1875zm6.6835938 2.078125l0 -0.53125q-0.390625 0.625 -1.15625 0.625q-0.5 0 -0.921875 -0.265625q-0.40625 -0.28125 -0.640625 -0.765625q-0.21875 -0.5 -0.21875 -1.140625q0 -0.609375 0.203125 -1.109375q0.203125 -0.515625 0.609375 -0.78125q0.421875 -0.28125 0.9375 -0.28125q0.375 0 0.65625 0.171875q0.296875 0.15625 0.484375 0.40625l0 -2.0625l0.703125 0l0 5.734375l-0.65625 0zm-2.21875 -2.078125q0 0.796875 0.328125 1.203125q0.34375 0.390625 0.796875 0.390625q0.46875 0 0.78125 -0.375q0.328125 -0.375 0.328125 -1.15625q0 -0.84375 -0.328125 -1.234375q-0.328125 -0.40625 -0.8125 -0.40625q-0.46875 0 -0.78125 0.390625q-0.3125 0.375 -0.3125 1.1875zm6.8242188 0.734375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0zm2.7421875 0l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm7.0898438 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.7773438 3.671875l-0.078125 -0.65625q0.234375 0.0625 0.40625 0.0625q0.234375 0 0.375 -0.078125q0.140625 -0.078125 0.21875 -0.21875q0.078125 -0.109375 0.21875 -0.515625q0.015625 -0.0625 0.0625 -0.171875l-1.578125 -4.171875l0.765625 0l0.859375 2.40625q0.171875 0.453125 0.296875 0.96875q0.125 -0.484375 0.296875 -0.953125l0.890625 -2.421875l0.703125 0l-1.578125 4.234375q-0.265625 0.671875 -0.40625 0.9375q-0.1875 0.34375 -0.4375 0.5q-0.234375 0.171875 -0.5625 0.171875q-0.203125 0 -0.453125 -0.09375zm6.875 -2.9375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0z" fill-rule="nonzero"/><path fill="#ffe599" d="m415.47375 185.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m415.47375 185.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path fill="#c9daf8" d="m424.29932 217.41951l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m424.29932 217.41951l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path fill="#000000" d="m435.88208 229.49022l2.921875 -7.625l1.09375 0l3.125 7.625l-1.15625 0l-0.890625 -2.3125l-3.1875 0l-0.828125 2.3125l-1.078125 0zm2.203125 -3.125l2.578125 0l-0.796875 -2.125q-0.359375 -0.953125 -0.53125 -1.578125q-0.15625 0.734375 -0.421875 1.453125l-0.828125 2.25zm7.6701355 2.28125l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm2.9606628 0l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm4.6950073 -0.9375l0.96875 0.125q-0.234375 0.84375 -0.859375 1.3125q-0.609375 0.46875 -1.578125 0.46875q-1.203125 0 -1.921875 -0.75q-0.703125 -0.75 -0.703125 -2.09375q0 -1.390625 0.71875 -2.15625q0.71875 -0.78125 1.859375 -0.78125q1.109375 0 1.8125 0.765625q0.703125 0.75 0.703125 2.125q0 0.078125 0 0.234375l-4.125 0q0.046875 0.921875 0.515625 1.40625q0.46875 0.484375 1.15625 0.484375q0.515625 0 0.875 -0.265625q0.359375 -0.28125 0.578125 -0.875zm-3.078125 -1.515625l3.09375 0q-0.0625 -0.6875 -0.359375 -1.046875q-0.453125 -0.53125 -1.15625 -0.53125q-0.640625 0 -1.09375 0.4375q-0.4375 0.421875 -0.484375 1.140625zm5.2233887 3.296875l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0zm7.973358 -0.84375l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm0.91378784 -5.703125l0 -1.078125l0.9375 0l0 1.078125l-0.9375 0zm0 6.546875l0 -5.53125l0.9375 0l0 5.53125l-0.9375 0zm2.0237122 -2.765625q0 -1.53125 0.84375 -2.265625q0.71875 -0.625 1.734375 -0.625q1.140625 0 1.859375 0.75q0.734375 0.75 0.734375 2.0625q0 1.0625 -0.328125 1.6875q-0.3125 0.609375 -0.921875 0.953125q-0.609375 0.328125 -1.34375 0.328125q-1.15625 0 -1.875 -0.734375q-0.703125 -0.75 -0.703125 -2.15625zm0.953125 0q0 1.0625 0.46875 1.59375q0.46875 0.53125 1.15625 0.53125q0.703125 0 1.15625 -0.53125q0.46875 -0.53125 0.46875 -1.625q0 -1.015625 -0.46875 -1.546875q-0.453125 -0.53125 -1.15625 -0.53125q-0.6875 0 -1.15625 0.53125q-0.46875 0.515625 -0.46875 1.578125zm5.3171387 2.765625l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0z" fill-rule="nonzero"/><path fill="#ead1dc" d="m424.29932 263.8749l65.25986 0l0 16.661438l-65.25986 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m424.29932 263.8749l65.25986 0l0 16.661438l-65.25986 0z" fill-rule="evenodd"/><path fill="#000000" d="m446.7707 275.94562l0 -7.625l1.515625 0l1.796875 5.390625q0.25 0.765625 0.375 1.140625q0.125 -0.421875 0.40625 -1.234375l1.828125 -5.296875l1.34375 0l0 7.625l-0.96875 0l0 -6.390625l-2.21875 6.390625l-0.90625 0l-2.203125 -6.5l0 6.5l-0.96875 0zm8.8611145 0l0 -7.625l1.015625 0l0 6.71875l3.75 0l0 0.90625l-4.765625 0zm5.973358 0l0 -7.625l2.875 0q0.75 0 1.15625 0.0625q0.5625 0.09375 0.9375 0.359375q0.390625 0.265625 0.609375 0.75q0.234375 0.46875 0.234375 1.03125q0 0.96875 -0.625 1.65625q-0.609375 0.671875 -2.234375 0.671875l-1.953125 0l0 3.09375l-1.0 0zm1.0 -4.0l1.96875 0q0.984375 0 1.390625 -0.359375q0.421875 -0.375 0.421875 -1.03125q0 -0.484375 -0.25 -0.8125q-0.234375 -0.34375 -0.640625 -0.453125q-0.25 -0.078125 -0.9375 -0.078125l-1.953125 0l0 2.734375z" fill-rule="nonzero"/><path fill="#f6b26b" d="m438.1472 196.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m438.1472 196.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m449.29953 204.92921l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#b6d7a8" d="m438.1472 243.01541l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m438.1472 243.01541l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m449.29953 251.38391l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#fff2cc" d="m410.0643 295.19815l93.7323 0l0 11.937012l-93.7323 0z" fill-rule="evenodd"/><path fill="#595959" d="m421.50467 304.04666l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm4.3710938 0l0 -5.734375l0.703125 0l0 5.734375l-0.703125 0zm4.4960938 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.8085938 2.078125l0 -4.15625l0.625 0l0 0.59375q0.203125 -0.3125 0.515625 -0.5q0.328125 -0.1875 0.75 -0.1875q0.453125 0 0.75 0.203125q0.296875 0.1875 0.421875 0.53125q0.484375 -0.734375 1.28125 -0.734375q0.609375 0 0.9375 0.34375q0.34375 0.34375 0.34375 1.0625l0 2.84375l-0.703125 0l0 -2.609375q0 -0.421875 -0.078125 -0.609375q-0.0625 -0.1875 -0.25 -0.296875q-0.171875 -0.125 -0.40625 -0.125q-0.4375 0 -0.734375 0.296875q-0.28125 0.296875 -0.28125 0.9375l0 2.40625l-0.703125 0l0 -2.703125q0 -0.46875 -0.171875 -0.703125q-0.171875 -0.234375 -0.5625 -0.234375q-0.296875 0 -0.5625 0.15625q-0.25 0.15625 -0.359375 0.46875q-0.109375 0.296875 -0.109375 0.859375l0 2.15625l-0.703125 0zm9.3671875 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.9023438 2.078125l0 -5.734375l1.96875 0q0.671875 0 1.015625 0.09375q0.5 0.109375 0.84375 0.40625q0.453125 0.375 0.671875 0.984375q0.234375 0.59375 0.234375 1.359375q0 0.640625 -0.15625 1.15625q-0.15625 0.5 -0.390625 0.828125q-0.234375 0.328125 -0.53125 0.515625q-0.28125 0.1875 -0.6875 0.296875q-0.390625 0.09375 -0.90625 0.09375l-2.0625 0zm0.75 -0.671875l1.21875 0q0.578125 0 0.890625 -0.109375q0.328125 -0.109375 0.515625 -0.296875q0.265625 -0.265625 0.421875 -0.71875q0.15625 -0.46875 0.15625 -1.109375q0 -0.90625 -0.296875 -1.375q-0.296875 -0.484375 -0.71875 -0.65625q-0.3125 -0.109375 -0.984375 -0.109375l-1.203125 0l0 4.375zm7.7773438 -0.671875l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm6.6210938 0.953125l0.6875 0.078125q-0.109375 0.71875 -0.578125 1.125q-0.46875 0.40625 -1.140625 0.40625q-0.859375 0 -1.375 -0.546875q-0.515625 -0.5625 -0.515625 -1.609375q0 -0.671875 0.21875 -1.171875q0.234375 -0.5 0.6875 -0.75q0.453125 -0.265625 0.984375 -0.265625q0.671875 0 1.09375 0.34375q0.4375 0.34375 0.5625 0.96875l-0.6875 0.109375q-0.09375 -0.421875 -0.34375 -0.625q-0.25 -0.21875 -0.59375 -0.21875q-0.53125 0 -0.875 0.390625q-0.328125 0.375 -0.328125 1.203125q0 0.828125 0.3125 1.21875q0.328125 0.375 0.84375 0.375q0.421875 0 0.6875 -0.25q0.28125 -0.265625 0.359375 -0.78125zm1.03125 -0.5625q0 -1.15625 0.640625 -1.703125q0.53125 -0.46875 1.3125 -0.46875q0.84375 0 1.390625 0.5625q0.546875 0.5625 0.546875 1.546875q0 0.8125 -0.25 1.265625q-0.234375 0.453125 -0.703125 0.71875q-0.453125 0.25 -0.984375 0.25q-0.875 0 -1.421875 -0.5625q-0.53125 -0.5625 -0.53125 -1.609375zm0.71875 0q0 0.796875 0.34375 1.203125q0.359375 0.390625 0.890625 0.390625q0.515625 0 0.859375 -0.390625q0.359375 -0.40625 0.359375 -1.21875q0 -0.78125 -0.359375 -1.171875q-0.34375 -0.390625 -0.859375 -0.390625q-0.53125 0 -0.890625 0.390625q-0.34375 0.390625 -0.34375 1.1875zm6.6835938 2.078125l0 -0.53125q-0.390625 0.625 -1.15625 0.625q-0.5 0 -0.921875 -0.265625q-0.40625 -0.28125 -0.640625 -0.765625q-0.21875 -0.5 -0.21875 -1.140625q0 -0.609375 0.203125 -1.109375q0.203125 -0.515625 0.609375 -0.78125q0.421875 -0.28125 0.9375 -0.28125q0.375 0 0.65625 0.171875q0.296875 0.15625 0.484375 0.40625l0 -2.0625l0.703125 0l0 5.734375l-0.65625 0zm-2.21875 -2.078125q0 0.796875 0.328125 1.203125q0.34375 0.390625 0.796875 0.390625q0.46875 0 0.78125 -0.375q0.328125 -0.375 0.328125 -1.15625q0 -0.84375 -0.328125 -1.234375q-0.328125 -0.40625 -0.8125 -0.40625q-0.46875 0 -0.78125 0.390625q-0.3125 0.375 -0.3125 1.1875zm6.8242188 0.734375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0zm2.7421875 0l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm7.0898438 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.7773438 3.671875l-0.078125 -0.65625q0.234375 0.0625 0.40625 0.0625q0.234375 0 0.375 -0.078125q0.140625 -0.078125 0.21875 -0.21875q0.078125 -0.109375 0.21875 -0.515625q0.015625 -0.0625 0.0625 -0.171875l-1.578125 -4.171875l0.765625 0l0.859375 2.40625q0.171875 0.453125 0.296875 0.96875q0.125 -0.484375 0.296875 -0.953125l0.890625 -2.421875l0.703125 0l-1.578125 4.234375q-0.265625 0.671875 -0.40625 0.9375q-0.1875 0.34375 -0.4375 0.5q-0.234375 0.171875 -0.5625 0.171875q-0.203125 0 -0.453125 -0.09375zm6.875 -2.9375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0z" fill-rule="nonzero"/><path fill="#ffe599" d="m431.47375 201.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m431.47375 201.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path fill="#c9daf8" d="m440.29932 233.41951l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m440.29932 233.41951l65.25986 0l0 16.661407l-65.25986 0z" fill-rule="evenodd"/><path fill="#000000" d="m451.88208 245.49022l2.921875 -7.625l1.09375 0l3.125 7.625l-1.15625 0l-0.890625 -2.3125l-3.1875 0l-0.828125 2.3125l-1.078125 0zm2.203125 -3.125l2.578125 0l-0.796875 -2.125q-0.359375 -0.953125 -0.53125 -1.578125q-0.15625 0.734375 -0.421875 1.453125l-0.828125 2.25zm7.6701355 2.28125l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm2.9606628 0l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm4.6950073 -0.9375l0.96875 0.125q-0.234375 0.84375 -0.859375 1.3125q-0.609375 0.46875 -1.578125 0.46875q-1.203125 0 -1.921875 -0.75q-0.703125 -0.75 -0.703125 -2.09375q0 -1.390625 0.71875 -2.15625q0.71875 -0.78125 1.859375 -0.78125q1.109375 0 1.8125 0.765625q0.703125 0.75 0.703125 2.125q0 0.078125 0 0.234375l-4.125 0q0.046875 0.921875 0.515625 1.40625q0.46875 0.484375 1.15625 0.484375q0.515625 0 0.875 -0.265625q0.359375 -0.28125 0.578125 -0.875zm-3.078125 -1.515625l3.09375 0q-0.0625 -0.6875 -0.359375 -1.046875q-0.453125 -0.53125 -1.15625 -0.53125q-0.640625 0 -1.09375 0.4375q-0.4375 0.421875 -0.484375 1.140625zm5.2233887 3.296875l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0zm7.973358 -0.84375l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm0.91378784 -5.703125l0 -1.078125l0.9375 0l0 1.078125l-0.9375 0zm0 6.546875l0 -5.53125l0.9375 0l0 5.53125l-0.9375 0zm2.0237122 -2.765625q0 -1.53125 0.84375 -2.265625q0.71875 -0.625 1.734375 -0.625q1.140625 0 1.859375 0.75q0.734375 0.75 0.734375 2.0625q0 1.0625 -0.328125 1.6875q-0.3125 0.609375 -0.921875 0.953125q-0.609375 0.328125 -1.34375 0.328125q-1.15625 0 -1.875 -0.734375q-0.703125 -0.75 -0.703125 -2.15625zm0.953125 0q0 1.0625 0.46875 1.59375q0.46875 0.53125 1.15625 0.53125q0.703125 0 1.15625 -0.53125q0.46875 -0.53125 0.46875 -1.625q0 -1.015625 -0.46875 -1.546875q-0.453125 -0.53125 -1.15625 -0.53125q-0.6875 0 -1.15625 0.53125q-0.46875 0.515625 -0.46875 1.578125zm5.3171387 2.765625l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0z" fill-rule="nonzero"/><path fill="#ead1dc" d="m440.29932 279.8749l65.25986 0l0 16.661438l-65.25986 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m440.29932 279.8749l65.25986 0l0 16.661438l-65.25986 0z" fill-rule="evenodd"/><path fill="#000000" d="m462.7707 291.94562l0 -7.625l1.515625 0l1.796875 5.390625q0.25 0.765625 0.375 1.140625q0.125 -0.421875 0.40625 -1.234375l1.828125 -5.296875l1.34375 0l0 7.625l-0.96875 0l0 -6.390625l-2.21875 6.390625l-0.90625 0l-2.203125 -6.5l0 6.5l-0.96875 0zm8.8611145 0l0 -7.625l1.015625 0l0 6.71875l3.75 0l0 0.90625l-4.765625 0zm5.973358 0l0 -7.625l2.875 0q0.75 0 1.15625 0.0625q0.5625 0.09375 0.9375 0.359375q0.390625 0.265625 0.609375 0.75q0.234375 0.46875 0.234375 1.03125q0 0.96875 -0.625 1.65625q-0.609375 0.671875 -2.234375 0.671875l-1.953125 0l0 3.09375l-1.0 0zm1.0 -4.0l1.96875 0q0.984375 0 1.390625 -0.359375q0.421875 -0.375 0.421875 -1.03125q0 -0.484375 -0.25 -0.8125q-0.234375 -0.34375 -0.640625 -0.453125q-0.25 -0.078125 -0.9375 -0.078125l-1.953125 0l0 2.734375z" fill-rule="nonzero"/><path fill="#f6b26b" d="m454.1472 212.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m454.1472 212.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m465.29953 220.92921l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#b6d7a8" d="m454.1472 259.0154l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m454.1472 259.0154l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m465.29953 267.3839l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#fff2cc" d="m426.0643 311.19815l93.73227 0l0 11.937012l-93.73227 0z" fill-rule="evenodd"/><path fill="#595959" d="m437.50467 320.04666l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm4.3710938 0l0 -5.734375l0.703125 0l0 5.734375l-0.703125 0zm4.4960938 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.8085938 2.078125l0 -4.15625l0.625 0l0 0.59375q0.203125 -0.3125 0.515625 -0.5q0.328125 -0.1875 0.75 -0.1875q0.453125 0 0.75 0.203125q0.296875 0.1875 0.421875 0.53125q0.484375 -0.734375 1.28125 -0.734375q0.609375 0 0.9375 0.34375q0.34375 0.34375 0.34375 1.0625l0 2.84375l-0.703125 0l0 -2.609375q0 -0.421875 -0.078125 -0.609375q-0.0625 -0.1875 -0.25 -0.296875q-0.171875 -0.125 -0.40625 -0.125q-0.4375 0 -0.734375 0.296875q-0.28125 0.296875 -0.28125 0.9375l0 2.40625l-0.703125 0l0 -2.703125q0 -0.46875 -0.171875 -0.703125q-0.171875 -0.234375 -0.5625 -0.234375q-0.296875 0 -0.5625 0.15625q-0.25 0.15625 -0.359375 0.46875q-0.109375 0.296875 -0.109375 0.859375l0 2.15625l-0.703125 0zm9.3671875 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.9023438 2.078125l0 -5.734375l1.96875 0q0.671875 0 1.015625 0.09375q0.5 0.109375 0.84375 0.40625q0.453125 0.375 0.671875 0.984375q0.234375 0.59375 0.234375 1.359375q0 0.640625 -0.15625 1.15625q-0.15625 0.5 -0.390625 0.828125q-0.234375 0.328125 -0.53125 0.515625q-0.28125 0.1875 -0.6875 0.296875q-0.390625 0.09375 -0.90625 0.09375l-2.0625 0zm0.75 -0.671875l1.21875 0q0.578125 0 0.890625 -0.109375q0.328125 -0.109375 0.515625 -0.296875q0.265625 -0.265625 0.421875 -0.71875q0.15625 -0.46875 0.15625 -1.109375q0 -0.90625 -0.296875 -1.375q-0.296875 -0.484375 -0.71875 -0.65625q-0.3125 -0.109375 -0.984375 -0.109375l-1.203125 0l0 4.375zm7.7773438 -0.671875l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm6.6210938 0.953125l0.6875 0.078125q-0.109375 0.71875 -0.578125 1.125q-0.46875 0.40625 -1.140625 0.40625q-0.859375 0 -1.375 -0.546875q-0.515625 -0.5625 -0.515625 -1.609375q0 -0.671875 0.21875 -1.171875q0.234375 -0.5 0.6875 -0.75q0.453125 -0.265625 0.984375 -0.265625q0.671875 0 1.09375 0.34375q0.4375 0.34375 0.5625 0.96875l-0.6875 0.109375q-0.09375 -0.421875 -0.34375 -0.625q-0.25 -0.21875 -0.59375 -0.21875q-0.53125 0 -0.875 0.390625q-0.328125 0.375 -0.328125 1.203125q0 0.828125 0.3125 1.21875q0.328125 0.375 0.84375 0.375q0.421875 0 0.6875 -0.25q0.28125 -0.265625 0.359375 -0.78125zm1.03125 -0.5625q0 -1.15625 0.640625 -1.703125q0.53125 -0.46875 1.3125 -0.46875q0.84375 0 1.390625 0.5625q0.546875 0.5625 0.546875 1.546875q0 0.8125 -0.25 1.265625q-0.234375 0.453125 -0.703125 0.71875q-0.453125 0.25 -0.984375 0.25q-0.875 0 -1.421875 -0.5625q-0.53125 -0.5625 -0.53125 -1.609375zm0.71875 0q0 0.796875 0.34375 1.203125q0.359375 0.390625 0.890625 0.390625q0.515625 0 0.859375 -0.390625q0.359375 -0.40625 0.359375 -1.21875q0 -0.78125 -0.359375 -1.171875q-0.34375 -0.390625 -0.859375 -0.390625q-0.53125 0 -0.890625 0.390625q-0.34375 0.390625 -0.34375 1.1875zm6.6835938 2.078125l0 -0.53125q-0.390625 0.625 -1.15625 0.625q-0.5 0 -0.921875 -0.265625q-0.40625 -0.28125 -0.640625 -0.765625q-0.21875 -0.5 -0.21875 -1.140625q0 -0.609375 0.203125 -1.109375q0.203125 -0.515625 0.609375 -0.78125q0.421875 -0.28125 0.9375 -0.28125q0.375 0 0.65625 0.171875q0.296875 0.15625 0.484375 0.40625l0 -2.0625l0.703125 0l0 5.734375l-0.65625 0zm-2.21875 -2.078125q0 0.796875 0.328125 1.203125q0.34375 0.390625 0.796875 0.390625q0.46875 0 0.78125 -0.375q0.328125 -0.375 0.328125 -1.15625q0 -0.84375 -0.328125 -1.234375q-0.328125 -0.40625 -0.8125 -0.40625q-0.46875 0 -0.78125 0.390625q-0.3125 0.375 -0.3125 1.1875zm6.8242188 0.734375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0zm2.7421875 0l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm7.0898438 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.7773438 3.671875l-0.078125 -0.65625q0.234375 0.0625 0.40625 0.0625q0.234375 0 0.375 -0.078125q0.140625 -0.078125 0.21875 -0.21875q0.078125 -0.109375 0.21875 -0.515625q0.015625 -0.0625 0.0625 -0.171875l-1.578125 -4.171875l0.765625 0l0.859375 2.40625q0.171875 0.453125 0.296875 0.96875q0.125 -0.484375 0.296875 -0.953125l0.890625 -2.421875l0.703125 0l-1.578125 4.234375q-0.265625 0.671875 -0.40625 0.9375q-0.1875 0.34375 -0.4375 0.5q-0.234375 0.171875 -0.5625 0.171875q-0.203125 0 -0.453125 -0.09375zm6.875 -2.9375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0z" fill-rule="nonzero"/><path fill="#ffe599" d="m447.47375 217.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m447.47375 217.8622l82.92914 0l0 105.98427l-82.92914 0z" fill-rule="evenodd"/><path fill="#c9daf8" d="m456.29932 249.41951l65.25983 0l0 16.661423l-65.25983 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m456.29932 249.41951l65.25983 0l0 16.661423l-65.25983 0z" fill-rule="evenodd"/><path fill="#000000" d="m467.88208 261.4902l2.921875 -7.6249847l1.09375 0l3.125 7.6249847l-1.15625 0l-0.890625 -2.3125l-3.1875 0l-0.828125 2.3125l-1.078125 0zm2.203125 -3.125l2.578125 0l-0.796875 -2.125q-0.359375 -0.95310974 -0.53125 -1.5781097q-0.15625 0.734375 -0.421875 1.4531097l-0.828125 2.25zm7.6701355 2.28125l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.73435974l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.73435974l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm2.9606628 0l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.73435974l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.73435974l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm4.6950073 -0.9375l0.96875 0.125q-0.234375 0.84375 -0.859375 1.3125q-0.609375 0.46875 -1.578125 0.46875q-1.203125 0 -1.921875 -0.75q-0.703125 -0.75 -0.703125 -2.09375q0 -1.390625 0.71875 -2.15625q0.71875 -0.78123474 1.859375 -0.78123474q1.109375 0 1.8125 0.76560974q0.703125 0.75 0.703125 2.125q0 0.078125 0 0.234375l-4.125 0q0.046875 0.921875 0.515625 1.40625q0.46875 0.484375 1.15625 0.484375q0.515625 0 0.875 -0.265625q0.359375 -0.28125 0.578125 -0.875zm-3.078125 -1.515625l3.09375 0q-0.0625 -0.6875 -0.359375 -1.046875q-0.453125 -0.53125 -1.15625 -0.53125q-0.640625 0 -1.09375 0.4375q-0.4375 0.421875 -0.484375 1.140625zm5.2233887 3.296875l0 -5.5312347l0.84375 0l0 0.79685974q0.609375 -0.92185974 1.75 -0.92185974q0.5 0 0.921875 0.18748474q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0zm7.973358 -0.84375l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.73435974l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.73435974l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm0.91378784 -5.7031097l0 -1.078125l0.9375 0l0 1.078125l-0.9375 0zm0 6.5468597l0 -5.5312347l0.9375 0l0 5.5312347l-0.9375 0zm2.0237122 -2.765625q0 -1.53125 0.84375 -2.265625q0.71875 -0.62498474 1.734375 -0.62498474q1.140625 0 1.859375 0.74998474q0.734375 0.75 0.734375 2.0625q0 1.0625 -0.328125 1.6875q-0.3125 0.609375 -0.921875 0.953125q-0.609375 0.328125 -1.34375 0.328125q-1.15625 0 -1.875 -0.734375q-0.703125 -0.75 -0.703125 -2.15625zm0.953125 0q0 1.0625 0.46875 1.59375q0.46875 0.53125 1.15625 0.53125q0.703125 0 1.15625 -0.53125q0.46875 -0.53125 0.46875 -1.625q0 -1.015625 -0.46875 -1.546875q-0.453125 -0.53125 -1.15625 -0.53125q-0.6875 0 -1.15625 0.53125q-0.46875 0.515625 -0.46875 1.578125zm5.3171387 2.765625l0 -5.5312347l0.84375 0l0 0.79685974q0.609375 -0.92185974 1.75 -0.92185974q0.5 0 0.921875 0.18748474q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0z" fill-rule="nonzero"/><path fill="#ead1dc" d="m456.29932 295.8749l65.25983 0l0 16.661438l-65.25983 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m456.29932 295.8749l65.25983 0l0 16.661438l-65.25983 0z" fill-rule="evenodd"/><path fill="#000000" d="m478.7707 307.94562l0 -7.625l1.515625 0l1.796875 5.390625q0.25 0.765625 0.375 1.140625q0.125 -0.421875 0.40625 -1.234375l1.828125 -5.296875l1.34375 0l0 7.625l-0.96875 0l0 -6.390625l-2.21875 6.390625l-0.90625 0l-2.203125 -6.5l0 6.5l-0.96875 0zm8.8611145 0l0 -7.625l1.015625 0l0 6.71875l3.75 0l0 0.90625l-4.765625 0zm5.973358 0l0 -7.625l2.875 0q0.75 0 1.15625 0.0625q0.5625 0.09375 0.9375 0.359375q0.390625 0.265625 0.609375 0.75q0.234375 0.46875 0.234375 1.03125q0 0.96875 -0.625 1.65625q-0.609375 0.671875 -2.234375 0.671875l-1.953125 0l0 3.09375l-1.0 0zm1.0 -4.0l1.96875 0q0.984375 0 1.390625 -0.359375q0.421875 -0.375 0.421875 -1.03125q0 -0.484375 -0.25 -0.8125q-0.234375 -0.34375 -0.640625 -0.453125q-0.25 -0.078125 -0.9375 -0.078125l-1.953125 0l0 2.734375z" fill-rule="nonzero"/><path fill="#f6b26b" d="m470.1472 228.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m470.1472 228.56071l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m481.29953 236.92921l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#f6b26b" d="m470.1472 275.0154l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m470.1472 275.0154l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m481.29953 283.3839l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.5256653 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165833 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#fff2cc" d="m442.0643 327.19815l93.73227 0l0 11.937012l-93.73227 0z" fill-rule="evenodd"/><path fill="#595959" d="m453.50467 336.04666l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm4.3710938 0l0 -5.734375l0.703125 0l0 5.734375l-0.703125 0zm4.4960938 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.8085938 2.078125l0 -4.15625l0.625 0l0 0.59375q0.203125 -0.3125 0.515625 -0.5q0.328125 -0.1875 0.75 -0.1875q0.453125 0 0.75 0.203125q0.296875 0.1875 0.421875 0.53125q0.484375 -0.734375 1.28125 -0.734375q0.609375 0 0.9375 0.34375q0.34375 0.34375 0.34375 1.0625l0 2.84375l-0.703125 0l0 -2.609375q0 -0.421875 -0.078125 -0.609375q-0.0625 -0.1875 -0.25 -0.296875q-0.171875 -0.125 -0.40625 -0.125q-0.4375 0 -0.734375 0.296875q-0.28125 0.296875 -0.28125 0.9375l0 2.40625l-0.703125 0l0 -2.703125q0 -0.46875 -0.171875 -0.703125q-0.171875 -0.234375 -0.5625 -0.234375q-0.296875 0 -0.5625 0.15625q-0.25 0.15625 -0.359375 0.46875q-0.109375 0.296875 -0.109375 0.859375l0 2.15625l-0.703125 0zm9.3671875 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.9023438 2.078125l0 -5.734375l1.96875 0q0.671875 0 1.015625 0.09375q0.5 0.109375 0.84375 0.40625q0.453125 0.375 0.671875 0.984375q0.234375 0.59375 0.234375 1.359375q0 0.640625 -0.15625 1.15625q-0.15625 0.5 -0.390625 0.828125q-0.234375 0.328125 -0.53125 0.515625q-0.28125 0.1875 -0.6875 0.296875q-0.390625 0.09375 -0.90625 0.09375l-2.0625 0zm0.75 -0.671875l1.21875 0q0.578125 0 0.890625 -0.109375q0.328125 -0.109375 0.515625 -0.296875q0.265625 -0.265625 0.421875 -0.71875q0.15625 -0.46875 0.15625 -1.109375q0 -0.90625 -0.296875 -1.375q-0.296875 -0.484375 -0.71875 -0.65625q-0.3125 -0.109375 -0.984375 -0.109375l-1.203125 0l0 4.375zm7.7773438 -0.671875l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm6.6210938 0.953125l0.6875 0.078125q-0.109375 0.71875 -0.578125 1.125q-0.46875 0.40625 -1.140625 0.40625q-0.859375 0 -1.375 -0.546875q-0.515625 -0.5625 -0.515625 -1.609375q0 -0.671875 0.21875 -1.171875q0.234375 -0.5 0.6875 -0.75q0.453125 -0.265625 0.984375 -0.265625q0.671875 0 1.09375 0.34375q0.4375 0.34375 0.5625 0.96875l-0.6875 0.109375q-0.09375 -0.421875 -0.34375 -0.625q-0.25 -0.21875 -0.59375 -0.21875q-0.53125 0 -0.875 0.390625q-0.328125 0.375 -0.328125 1.203125q0 0.828125 0.3125 1.21875q0.328125 0.375 0.84375 0.375q0.421875 0 0.6875 -0.25q0.28125 -0.265625 0.359375 -0.78125zm1.03125 -0.5625q0 -1.15625 0.640625 -1.703125q0.53125 -0.46875 1.3125 -0.46875q0.84375 0 1.390625 0.5625q0.546875 0.5625 0.546875 1.546875q0 0.8125 -0.25 1.265625q-0.234375 0.453125 -0.703125 0.71875q-0.453125 0.25 -0.984375 0.25q-0.875 0 -1.421875 -0.5625q-0.53125 -0.5625 -0.53125 -1.609375zm0.71875 0q0 0.796875 0.34375 1.203125q0.359375 0.390625 0.890625 0.390625q0.515625 0 0.859375 -0.390625q0.359375 -0.40625 0.359375 -1.21875q0 -0.78125 -0.359375 -1.171875q-0.34375 -0.390625 -0.859375 -0.390625q-0.53125 0 -0.890625 0.390625q-0.34375 0.390625 -0.34375 1.1875zm6.6835938 2.078125l0 -0.53125q-0.390625 0.625 -1.15625 0.625q-0.5 0 -0.921875 -0.265625q-0.40625 -0.28125 -0.640625 -0.765625q-0.21875 -0.5 -0.21875 -1.140625q0 -0.609375 0.203125 -1.109375q0.203125 -0.515625 0.609375 -0.78125q0.421875 -0.28125 0.9375 -0.28125q0.375 0 0.65625 0.171875q0.296875 0.15625 0.484375 0.40625l0 -2.0625l0.703125 0l0 5.734375l-0.65625 0zm-2.21875 -2.078125q0 0.796875 0.328125 1.203125q0.34375 0.390625 0.796875 0.390625q0.46875 0 0.78125 -0.375q0.328125 -0.375 0.328125 -1.15625q0 -0.84375 -0.328125 -1.234375q-0.328125 -0.40625 -0.8125 -0.40625q-0.46875 0 -0.78125 0.390625q-0.3125 0.375 -0.3125 1.1875zm6.8242188 0.734375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0zm2.7421875 0l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm7.089813 -0.515625q-0.390625 0.328125 -0.7499695 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.8593445 -0.109375 1.2499695 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.7968445 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.5312195 0 0.8593445 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.1405945 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.32809448 -0.1875 0.46871948 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.7773438 3.671875l-0.078125 -0.65625q0.234375 0.0625 0.40625 0.0625q0.234375 0 0.375 -0.078125q0.140625 -0.078125 0.21875 -0.21875q0.078125 -0.109375 0.21875 -0.515625q0.015625 -0.0625 0.0625 -0.171875l-1.578125 -4.171875l0.765625 0l0.859375 2.40625q0.171875 0.453125 0.296875 0.96875q0.125 -0.484375 0.296875 -0.953125l0.890625 -2.421875l0.703125 0l-1.578125 4.234375q-0.265625 0.671875 -0.40625 0.9375q-0.1875 0.34375 -0.4375 0.5q-0.234375 0.171875 -0.5625 0.171875q-0.203125 0 -0.453125 -0.09375zm6.875 -2.9375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m352.72308 72.23753l182.64569 0l0 20.062988l-182.64569 0z" fill-rule="evenodd"/><path fill="#595959" d="m362.8012 87.54903l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm8.010498 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.240509 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.307373 3.8125l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161896 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.4323425 3.8125l0 -10.484375l2.078125 0l2.484375 7.421875q0.34375 1.03125 0.5 1.546875q0.1875 -0.5625 0.5625 -1.671875l2.515625 -7.296875l1.859375 0l0 10.484375l-1.328125 0l0 -8.78125l-3.046875 8.78125l-1.265625 0l-3.03125 -8.9375l0 8.9375l-1.328125 0zm11.599396 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm12.229248 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.494843 1.34375l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm7.151123 4.53125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0z" fill-rule="nonzero"/><path fill="#b6d7a8" d="m502.1472 307.0154l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m502.1472 307.0154l37.5748 0l0 11.937012l-37.5748 0z" fill-rule="evenodd"/><path fill="#000000" d="m513.2995 315.3839l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.525696 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165527 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#ffe599" d="m495.47375 265.8622l82.92914 0l0 105.98425l-82.92914 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m495.47375 265.8622l82.92914 0l0 105.98425l-82.92914 0z" fill-rule="evenodd"/><path fill="#c9daf8" d="m504.29932 297.4195l65.25983 0l0 16.661438l-65.25983 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m504.29932 297.4195l65.25983 0l0 16.661438l-65.25983 0z" fill-rule="evenodd"/><path fill="#000000" d="m515.8821 309.4902l2.921875 -7.625l1.09375 0l3.125 7.625l-1.15625 0l-0.890625 -2.3125l-3.1875 0l-0.828125 2.3125l-1.078125 0zm2.203125 -3.125l2.578125 0l-0.796875 -2.125q-0.359375 -0.953125 -0.53125 -1.578125q-0.15625 0.734375 -0.421875 1.453125l-0.828125 2.25zm7.670166 2.28125l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm2.9606323 0l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm4.6950073 -0.9375l0.96875 0.125q-0.234375 0.84375 -0.859375 1.3125q-0.609375 0.46875 -1.578125 0.46875q-1.203125 0 -1.921875 -0.75q-0.703125 -0.75 -0.703125 -2.09375q0 -1.390625 0.71875 -2.15625q0.71875 -0.78125 1.859375 -0.78125q1.109375 0 1.8125 0.765625q0.703125 0.75 0.703125 2.125q0 0.078125 0 0.234375l-4.125 0q0.046875 0.921875 0.515625 1.40625q0.46875 0.484375 1.15625 0.484375q0.515625 0 0.875 -0.265625q0.359375 -0.28125 0.578125 -0.875zm-3.078125 -1.515625l3.09375 0q-0.0625 -0.6875 -0.359375 -1.046875q-0.453125 -0.53125 -1.15625 -0.53125q-0.640625 0 -1.09375 0.4375q-0.4375 0.421875 -0.484375 1.140625zm5.2233887 3.296875l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0zm7.9733887 -0.84375l0.125 0.828125q-0.390625 0.09375 -0.703125 0.09375q-0.5 0 -0.78125 -0.15625q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.109375l0 -3.171875l-0.6875 0l0 -0.734375l0.6875 0l0 -1.359375l0.9375 -0.5625l0 1.921875l0.9375 0l0 0.734375l-0.9375 0l0 3.234375q0 0.390625 0.046875 0.515625q0.046875 0.109375 0.15625 0.1875q0.109375 0.0625 0.328125 0.0625q0.15625 0 0.40625 -0.046875zm0.9137573 -5.703125l0 -1.078125l0.9375 0l0 1.078125l-0.9375 0zm0 6.546875l0 -5.53125l0.9375 0l0 5.53125l-0.9375 0zm2.0237427 -2.765625q0 -1.53125 0.84375 -2.265625q0.71875 -0.625 1.734375 -0.625q1.140625 0 1.859375 0.75q0.734375 0.75 0.734375 2.0625q0 1.0625 -0.328125 1.6875q-0.3125 0.609375 -0.921875 0.953125q-0.609375 0.328125 -1.34375 0.328125q-1.15625 0 -1.875 -0.734375q-0.703125 -0.75 -0.703125 -2.15625zm0.953125 0q0 1.0625 0.46875 1.59375q0.46875 0.53125 1.15625 0.53125q0.703125 0 1.15625 -0.53125q0.46875 -0.53125 0.46875 -1.625q0 -1.015625 -0.46875 -1.546875q-0.453125 -0.53125 -1.15625 -0.53125q-0.6875 0 -1.15625 0.53125q-0.46875 0.515625 -0.46875 1.578125zm5.3170776 2.765625l0 -5.53125l0.84375 0l0 0.796875q0.609375 -0.921875 1.75 -0.921875q0.5 0 0.921875 0.1875q0.421875 0.171875 0.625 0.46875q0.21875 0.296875 0.296875 0.6875q0.046875 0.265625 0.046875 0.921875l0 3.390625l-0.9375 0l0 -3.359375q0 -0.578125 -0.109375 -0.859375q-0.109375 -0.28125 -0.390625 -0.453125q-0.265625 -0.171875 -0.640625 -0.171875q-0.59375 0 -1.03125 0.390625q-0.4375 0.375 -0.4375 1.4375l0 3.015625l-0.9375 0z" fill-rule="nonzero"/><path fill="#ead1dc" d="m504.29932 343.8749l65.25983 0l0 16.661438l-65.25983 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m504.29932 343.8749l65.25983 0l0 16.661438l-65.25983 0z" fill-rule="evenodd"/><path fill="#000000" d="m526.7707 355.94562l0 -7.625l1.515625 0l1.796875 5.390625q0.25 0.765625 0.375 1.140625q0.125 -0.421875 0.40625 -1.234375l1.828125 -5.296875l1.34375 0l0 7.625l-0.96875 0l0 -6.390625l-2.21875 6.390625l-0.90625 0l-2.203125 -6.5l0 6.5l-0.96875 0zm8.861084 0l0 -7.625l1.015625 0l0 6.71875l3.75 0l0 0.90625l-4.765625 0zm5.9733887 0l0 -7.625l2.875 0q0.75 0 1.15625 0.0625q0.5625 0.09375 0.9375 0.359375q0.390625 0.265625 0.609375 0.75q0.234375 0.46875 0.234375 1.03125q0 0.96875 -0.625 1.65625q-0.609375 0.671875 -2.234375 0.671875l-1.953125 0l0 3.09375l-1.0 0zm1.0 -4.0l1.96875 0q0.984375 0 1.390625 -0.359375q0.421875 -0.375 0.421875 -1.03125q0 -0.484375 -0.25 -0.8125q-0.234375 -0.34375 -0.640625 -0.453125q-0.25 -0.078125 -0.9375 -0.078125l-1.953125 0l0 2.734375z" fill-rule="nonzero"/><path fill="#f6b26b" d="m518.14716 276.5607l37.57483 0l0 11.937012l-37.57483 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m518.14716 276.5607l37.57483 0l0 11.937012l-37.57483 0z" fill-rule="evenodd"/><path fill="#000000" d="m529.2995 284.92923l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.525696 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165527 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#f6b26b" d="m518.14716 323.0154l37.57483 0l0 11.937012l-37.57483 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m518.14716 323.0154l37.57483 0l0 11.937012l-37.57483 0z" fill-rule="evenodd"/><path fill="#000000" d="m529.2995 331.3839l0 -4.765625l0.65625 0l2.5 3.734375l0 -3.734375l0.609375 0l0 4.765625l-0.65625 0l-2.5 -3.75l0 3.75l-0.609375 0zm4.525696 -1.71875q0 -0.96875 0.53125 -1.421875q0.453125 -0.390625 1.09375 -0.390625q0.703125 0 1.15625 0.46875q0.453125 0.46875 0.453125 1.28125q0 0.671875 -0.203125 1.0625q-0.1875 0.375 -0.578125 0.59375q-0.375 0.203125 -0.828125 0.203125q-0.734375 0 -1.1875 -0.453125q-0.4375 -0.46875 -0.4375 -1.34375zm0.609375 0q0 0.65625 0.28125 0.984375q0.296875 0.328125 0.734375 0.328125q0.4375 0 0.71875 -0.328125q0.296875 -0.328125 0.296875 -1.015625q0 -0.640625 -0.296875 -0.96875q-0.296875 -0.328125 -0.71875 -0.328125q-0.4375 0 -0.734375 0.328125q-0.28125 0.328125 -0.28125 1.0zm3.3112793 1.71875l0 -3.453125l0.515625 0l0 0.53125q0.203125 -0.375 0.375 -0.484375q0.171875 -0.125 0.375 -0.125q0.296875 0 0.609375 0.1875l-0.203125 0.546875q-0.21875 -0.125 -0.4375 -0.125q-0.1875 0 -0.34375 0.125q-0.140625 0.109375 -0.21875 0.3125q-0.09375 0.3125 -0.09375 0.671875l0 1.8125l-0.578125 0zm2.2165527 0l0 -3.453125l0.53125 0l0 0.484375q0.15625 -0.25 0.421875 -0.40625q0.28125 -0.15625 0.625 -0.15625q0.375 0 0.625 0.15625q0.25 0.15625 0.34375 0.453125q0.40625 -0.609375 1.0625 -0.609375q0.515625 0 0.78125 0.296875q0.28125 0.28125 0.28125 0.859375l0 2.375l-0.578125 0l0 -2.171875q0 -0.359375 -0.0625 -0.5q-0.046875 -0.15625 -0.203125 -0.25q-0.140625 -0.09375 -0.34375 -0.09375q-0.359375 0 -0.609375 0.25q-0.234375 0.234375 -0.234375 0.765625l0 2.0l-0.59375 0l0 -2.25q0 -0.375 -0.140625 -0.5625q-0.140625 -0.203125 -0.46875 -0.203125q-0.25 0 -0.453125 0.125q-0.203125 0.125 -0.296875 0.375q-0.09375 0.25 -0.09375 0.71875l0 1.796875l-0.59375 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m370.67584 147.43964l218.64569 0l0 242.17322l-218.64569 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m370.67584 147.43964l218.64569 0l0 242.17322l-218.64569 0z" fill-rule="evenodd"/><path fill="#fff2cc" d="m490.0643 375.19815l93.73227 0l0 11.937012l-93.73227 0z" fill-rule="evenodd"/><path fill="#595959" d="m501.50467 384.04666l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm4.3710938 0l0 -5.734375l0.703125 0l0 5.734375l-0.703125 0zm4.4960938 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.8085632 2.078125l0 -4.15625l0.625 0l0 0.59375q0.203125 -0.3125 0.515625 -0.5q0.328125 -0.1875 0.75 -0.1875q0.453125 0 0.75 0.203125q0.296875 0.1875 0.421875 0.53125q0.484375 -0.734375 1.28125 -0.734375q0.609375 0 0.9375 0.34375q0.34375 0.34375 0.34375 1.0625l0 2.84375l-0.703125 0l0 -2.609375q0 -0.421875 -0.078125 -0.609375q-0.0625 -0.1875 -0.25 -0.296875q-0.171875 -0.125 -0.40625 -0.125q-0.4375 0 -0.734375 0.296875q-0.28125 0.296875 -0.28125 0.9375l0 2.40625l-0.703125 0l0 -2.703125q0 -0.46875 -0.171875 -0.703125q-0.171875 -0.234375 -0.5625 -0.234375q-0.296875 0 -0.5625 0.15625q-0.25 0.15625 -0.359375 0.46875q-0.109375 0.296875 -0.109375 0.859375l0 2.15625l-0.703125 0zm9.3671875 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.9023438 2.078125l0 -5.734375l1.96875 0q0.671875 0 1.015625 0.09375q0.5 0.109375 0.84375 0.40625q0.453125 0.375 0.671875 0.984375q0.234375 0.59375 0.234375 1.359375q0 0.640625 -0.15625 1.15625q-0.15625 0.5 -0.390625 0.828125q-0.234375 0.328125 -0.53125 0.515625q-0.28125 0.1875 -0.6875 0.296875q-0.390625 0.09375 -0.90625 0.09375l-2.0625 0zm0.75 -0.671875l1.21875 0q0.578125 0 0.890625 -0.109375q0.328125 -0.109375 0.515625 -0.296875q0.265625 -0.265625 0.421875 -0.71875q0.15625 -0.46875 0.15625 -1.109375q0 -0.90625 -0.296875 -1.375q-0.296875 -0.484375 -0.71875 -0.65625q-0.3125 -0.109375 -0.984375 -0.109375l-1.203125 0l0 4.375zm7.7773438 -0.671875l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm6.6210938 0.953125l0.6875 0.078125q-0.109375 0.71875 -0.578125 1.125q-0.46875 0.40625 -1.140625 0.40625q-0.859375 0 -1.375 -0.546875q-0.515625 -0.5625 -0.515625 -1.609375q0 -0.671875 0.21875 -1.171875q0.234375 -0.5 0.6875 -0.75q0.453125 -0.265625 0.984375 -0.265625q0.671875 0 1.09375 0.34375q0.4375 0.34375 0.5625 0.96875l-0.6875 0.109375q-0.09375 -0.421875 -0.34375 -0.625q-0.25 -0.21875 -0.59375 -0.21875q-0.53125 0 -0.875 0.390625q-0.328125 0.375 -0.328125 1.203125q0 0.828125 0.3125 1.21875q0.328125 0.375 0.84375 0.375q0.421875 0 0.6875 -0.25q0.28125 -0.265625 0.359375 -0.78125zm1.03125 -0.5625q0 -1.15625 0.640625 -1.703125q0.53125 -0.46875 1.3125 -0.46875q0.84375 0 1.390625 0.5625q0.546875 0.5625 0.546875 1.546875q0 0.8125 -0.25 1.265625q-0.234375 0.453125 -0.703125 0.71875q-0.453125 0.25 -0.984375 0.25q-0.875 0 -1.421875 -0.5625q-0.53125 -0.5625 -0.53125 -1.609375zm0.71875 0q0 0.796875 0.34375 1.203125q0.359375 0.390625 0.890625 0.390625q0.515625 0 0.859375 -0.390625q0.359375 -0.40625 0.359375 -1.21875q0 -0.78125 -0.359375 -1.171875q-0.34375 -0.390625 -0.859375 -0.390625q-0.53125 0 -0.890625 0.390625q-0.34375 0.390625 -0.34375 1.1875zm6.6835938 2.078125l0 -0.53125q-0.390625 0.625 -1.15625 0.625q-0.5 0 -0.921875 -0.265625q-0.40625 -0.28125 -0.640625 -0.765625q-0.21875 -0.5 -0.21875 -1.140625q0 -0.609375 0.203125 -1.109375q0.203125 -0.515625 0.609375 -0.78125q0.421875 -0.28125 0.9375 -0.28125q0.375 0 0.65625 0.171875q0.296875 0.15625 0.484375 0.40625l0 -2.0625l0.703125 0l0 5.734375l-0.65625 0zm-2.21875 -2.078125q0 0.796875 0.328125 1.203125q0.34375 0.390625 0.796875 0.390625q0.46875 0 0.78125 -0.375q0.328125 -0.375 0.328125 -1.15625q0 -0.84375 -0.328125 -1.234375q-0.328125 -0.40625 -0.8125 -0.40625q-0.46875 0 -0.78125 0.390625q-0.3125 0.375 -0.3125 1.1875zm6.8242188 0.734375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0zm2.7421875 0l0 -5.734375l0.75 0l0 5.0625l2.828125 0l0 0.671875l-3.578125 0zm7.0898438 -0.515625q-0.390625 0.328125 -0.75 0.46875q-0.359375 0.140625 -0.78125 0.140625q-0.671875 0 -1.046875 -0.328125q-0.359375 -0.34375 -0.359375 -0.859375q0 -0.3125 0.125 -0.5625q0.140625 -0.25 0.359375 -0.390625q0.234375 -0.15625 0.515625 -0.234375q0.203125 -0.0625 0.625 -0.109375q0.859375 -0.109375 1.25 -0.25q0.015625 -0.140625 0.015625 -0.171875q0 -0.4375 -0.203125 -0.609375q-0.265625 -0.234375 -0.796875 -0.234375q-0.5 0 -0.734375 0.171875q-0.234375 0.171875 -0.359375 0.609375l-0.6875 -0.09375q0.09375 -0.4375 0.3125 -0.703125q0.21875 -0.28125 0.625 -0.421875q0.40625 -0.15625 0.9375 -0.15625q0.53125 0 0.859375 0.125q0.34375 0.125 0.5 0.328125q0.15625 0.1875 0.21875 0.46875q0.03125 0.1875 0.03125 0.65625l0 0.9375q0 0.96875 0.046875 1.234375q0.046875 0.265625 0.171875 0.5l-0.734375 0q-0.109375 -0.21875 -0.140625 -0.515625zm-0.0625 -1.5625q-0.375 0.15625 -1.140625 0.265625q-0.4375 0.0625 -0.625 0.140625q-0.171875 0.078125 -0.265625 0.234375q-0.09375 0.140625 -0.09375 0.328125q0 0.28125 0.203125 0.46875q0.21875 0.1875 0.625 0.1875q0.40625 0 0.71875 -0.171875q0.328125 -0.1875 0.46875 -0.5q0.109375 -0.234375 0.109375 -0.703125l0 -0.25zm1.7773438 3.671875l-0.078125 -0.65625q0.234375 0.0625 0.40625 0.0625q0.234375 0 0.375 -0.078125q0.140625 -0.078125 0.21875 -0.21875q0.078125 -0.109375 0.21875 -0.515625q0.015625 -0.0625 0.0625 -0.171875l-1.578125 -4.171875l0.765625 0l0.859375 2.40625q0.171875 0.453125 0.296875 0.96875q0.125 -0.484375 0.296875 -0.953125l0.890625 -2.421875l0.703125 0l-1.578125 4.234375q-0.265625 0.671875 -0.40625 0.9375q-0.1875 0.34375 -0.4375 0.5q-0.234375 0.171875 -0.5625 0.171875q-0.203125 0 -0.453125 -0.09375zm6.875 -2.9375l0.71875 0.09375q-0.171875 0.640625 -0.640625 1.0q-0.453125 0.34375 -1.1875 0.34375q-0.90625 0 -1.4375 -0.5625q-0.53125 -0.5625 -0.53125 -1.578125q0 -1.046875 0.53125 -1.625q0.546875 -0.578125 1.40625 -0.578125q0.828125 0 1.359375 0.578125q0.53125 0.5625 0.53125 1.59375q0 0.0625 -0.015625 0.1875l-3.09375 0q0.046875 0.671875 0.390625 1.046875q0.34375 0.359375 0.875 0.359375q0.375 0 0.640625 -0.203125q0.28125 -0.203125 0.453125 -0.65625zm-2.3125 -1.125l2.3125 0q-0.046875 -0.53125 -0.265625 -0.796875q-0.328125 -0.40625 -0.875 -0.40625q-0.484375 0 -0.8125 0.328125q-0.328125 0.328125 -0.359375 0.875zm3.9023438 2.46875l0 -4.15625l0.640625 0l0 0.640625q0.234375 -0.453125 0.4375 -0.59375q0.21875 -0.140625 0.453125 -0.140625q0.359375 0 0.734375 0.234375l-0.25 0.65625q-0.25 -0.15625 -0.515625 -0.15625q-0.234375 0 -0.421875 0.140625q-0.171875 0.140625 -0.25 0.375q-0.125 0.375 -0.125 0.828125l0 2.171875l-0.703125 0z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m466.4029 345.04593l21.29132 19.716553" fill-rule="evenodd"/><path stroke="#595959" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="2.0,6.0" d="m466.4029 345.04593l21.29132 19.716553" fill-rule="evenodd"/><path fill="#d9d2e9" d="m404.16403 106.2769l151.55905 0l0 28.283463l-151.55905 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m404.16403 106.2769l151.55905 0l0 28.283463l-151.55905 0z" fill-rule="evenodd"/><path fill="#000000" d="m444.02625 125.498634l0 -10.484375l7.59375 0l0 1.234375l-6.203125 0l0 3.203125l5.796875 0l0 1.234375l-5.796875 0l0 3.578125l6.4375 0l0 1.234375l-7.828125 0zm9.588104 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm13.396271 0l-1.203125 0l0 -10.484375l1.296875 0l0 3.734375q0.8125 -1.015625 2.078125 -1.015625q0.703125 0 1.328125 0.28125q0.625 0.28125 1.03125 0.796875q0.40625 0.5 0.625 1.234375q0.234375 0.71875 0.234375 1.53125q0 1.96875 -0.96875 3.03125q-0.953125 1.0625 -2.3125 1.0625q-1.34375 0 -2.109375 -1.125l0 0.953125zm-0.015625 -3.859375q0 1.375 0.375 1.984375q0.609375 0.984375 1.640625 0.984375q0.84375 0 1.453125 -0.734375q0.625 -0.734375 0.625 -2.1875q0 -1.484375 -0.59375 -2.1875q-0.59375 -0.71875 -1.421875 -0.71875q-0.84375 0 -1.46875 0.734375q-0.609375 0.734375 -0.609375 2.125zm12.182343 1.40625l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.104248 4.53125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm12.213593 3.796875l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125zm7.291748 -5.21875l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.256134 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm7.916748 0.625l1.25 0.1875q0.078125 0.578125 0.4375 0.84375q0.46875 0.359375 1.3124695 0.359375q0.890625 0 1.375 -0.359375q0.484375 -0.359375 0.65625 -1.0q0.109375 -0.390625 0.09375 -1.65625q-0.84375 1.0 -2.109375 1.0q-1.5624695 0 -2.4218445 -1.125q-0.859375 -1.140625 -0.859375 -2.71875q0 -1.09375 0.390625 -2.0q0.40625 -0.921875 1.140625 -1.421875q0.75 -0.5 1.7655945 -0.5q1.34375 0 2.21875 1.078125l0 -0.90625l1.1875 0l0 6.5625q0 1.78125 -0.359375 2.515625q-0.359375 0.734375 -1.15625 1.1562576q-0.78125 0.4375 -1.921875 0.4375q-1.3593445 0 -2.2030945 -0.6093826q-0.828125 -0.609375 -0.796875 -1.84375zm1.0625 -4.5625q0 1.5 0.59375 2.1875q0.59375 0.6875 1.4843445 0.6875q0.890625 0 1.484375 -0.6875q0.609375 -0.6875 0.609375 -2.140625q0 -1.390625 -0.625 -2.09375q-0.609375 -0.71875 -1.484375 -0.71875q-0.8593445 0 -1.4687195 0.703125q-0.59375 0.6875 -0.59375 2.0625z" fill-rule="nonzero"/><path fill="#cfe2f3" d="m426.94357 402.49213l106.11026 0l0 28.283478l-106.11026 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m426.94357 402.49213l106.11026 0l0 28.283478l-106.11026 0z" fill-rule="evenodd"/><path fill="#000000" d="m445.36987 421.71387l0 -10.484375l7.078125 0l0 1.234375l-5.6875 0l0 3.25l4.921875 0l0 1.234375l-4.921875 0l0 4.765625l-1.390625 0zm8.718231 -9.015625l0 -1.46875l1.296875 0l0 1.46875l-1.296875 0zm0 9.015625l0 -7.59375l1.296875 0l0 7.59375l-1.296875 0zm3.2561646 0l0 -7.59375l1.15625 0l0 1.078125q0.84375 -1.25 2.421875 -1.25q0.6875 0 1.265625 0.25q0.578125 0.234375 0.859375 0.640625q0.28125 0.40625 0.40625 0.953125q0.0625 0.359375 0.0625 1.25l0 4.671875l-1.28125 0l0 -4.625q0 -0.78125 -0.15625 -1.171875q-0.15625 -0.390625 -0.546875 -0.625q-0.375 -0.234375 -0.890625 -0.234375q-0.8125 0 -1.421875 0.53125q-0.59375 0.515625 -0.59375 1.96875l0 4.15625l-1.28125 0zm13.104218 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.276123 3.8125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm7.5 0l0 -10.484375l1.4375 0l5.5 8.234375l0 -8.234375l1.328125 0l0 10.484375l-1.421875 0l-5.5 -8.25l0 8.25l-1.34375 0zm9.959259 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.2917175 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm4.8962708 0l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.3594055 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.7969055 0 -1.3281555 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0z" fill-rule="nonzero"/><path fill="#fce5cd" d="m401.26904 461.42126l151.55902 0l0 28.283447l-151.55902 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m401.26904 461.42126l151.55902 0l0 28.283447l-151.55902 0z" fill-rule="evenodd"/><path fill="#000000" d="m448.39197 480.643l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm8.166718 0l0 -10.484375l2.078125 0l2.484375 7.421875q0.34375 1.03125 0.5 1.546875q0.1875 -0.5625 0.5625 -1.671875l2.515625 -7.296875l1.859375 0l0 10.484375l-1.328125 0l0 -8.78125l-3.046875 8.78125l-1.265625 0l-3.03125 -8.9375l0 8.9375l-1.328125 0zm16.358887 0l0 -10.484375l1.390625 0l0 4.296875l5.453125 0l0 -4.296875l1.390625 0l0 10.484375l-1.390625 0l0 -4.9375l-5.453125 0l0 4.9375l-1.390625 0zm15.584259 -2.453125l1.328125 0.171875q-0.3125 1.171875 -1.171875 1.8125q-0.84375 0.640625 -2.171875 0.640625q-1.671875 0 -2.65625 -1.015625q-0.96875 -1.03125 -0.96875 -2.890625q0 -1.921875 0.984375 -2.96875q1.0 -1.0625 2.578125 -1.0625q1.515625 0 2.484375 1.03125q0.96875 1.03125 0.96875 2.921875q0 0.109375 -0.015625 0.34375l-5.65625 0q0.0625 1.25 0.703125 1.921875q0.640625 0.65625 1.59375 0.65625q0.703125 0 1.203125 -0.359375q0.5 -0.375 0.796875 -1.203125zm-4.234375 -2.078125l4.25 0q-0.09375 -0.953125 -0.484375 -1.4375q-0.625 -0.75 -1.609375 -0.75q-0.875 0 -1.484375 0.59375q-0.609375 0.59375 -0.671875 1.59375zm12.135498 3.59375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm8.229218 3.8125l0 -0.953125q-0.71875 1.125 -2.125 1.125q-0.90625 0 -1.671875 -0.5q-0.75 -0.5 -1.171875 -1.390625q-0.421875 -0.90625 -0.421875 -2.078125q0 -1.140625 0.375 -2.0625q0.390625 -0.921875 1.140625 -1.40625q0.765625 -0.5 1.703125 -0.5q0.6875 0 1.21875 0.296875q0.53125 0.28125 0.875 0.734375l0 -3.75l1.28125 0l0 10.484375l-1.203125 0zm-4.0625 -3.796875q0 1.46875 0.609375 2.1875q0.625 0.71875 1.453125 0.71875q0.84375 0 1.4375 -0.6875q0.59375 -0.6875 0.59375 -2.109375q0 -1.5625 -0.609375 -2.28125q-0.59375 -0.734375 -1.484375 -0.734375q-0.84375 0 -1.421875 0.703125q-0.578125 0.703125 -0.578125 2.203125z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m331.9357 35.355644l182.64566 0l0 20.062992l-182.64566 0z" fill-rule="evenodd"/><path fill="#595959" d="m342.01382 50.66714l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm8.010468 0l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm8.24054 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.3073425 3.8125l0 -7.59375l1.15625 0l0 1.0625q0.34375 -0.5625 0.9375 -0.890625q0.609375 -0.34375 1.359375 -0.34375q0.84375 0 1.375 0.34375q0.546875 0.34375 0.765625 0.984375q0.90625 -1.328125 2.359375 -1.328125q1.125 0 1.734375 0.625q0.609375 0.625 0.609375 1.921875l0 5.21875l-1.28125 0l0 -4.78125q0 -0.78125 -0.125 -1.109375q-0.125 -0.34375 -0.453125 -0.546875q-0.328125 -0.21875 -0.78125 -0.21875q-0.796875 0 -1.328125 0.53125q-0.53125 0.53125 -0.53125 1.703125l0 4.421875l-1.28125 0l0 -4.9375q0 -0.859375 -0.3125 -1.28125q-0.3125 -0.4375 -1.03125 -0.4375q-0.546875 0 -1.015625 0.296875q-0.453125 0.28125 -0.671875 0.828125q-0.203125 0.546875 -0.203125 1.59375l0 3.9375l-1.28125 0zm17.161896 -0.9375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.541748 3.8125l0 -10.484375l7.078125 0l0 1.234375l-5.6875 0l0 3.25l4.921875 0l0 1.234375l-4.921875 0l0 4.765625l-1.390625 0zm8.233856 -3.796875q0 -2.109375 1.171875 -3.125q0.984375 -0.84375 2.390625 -0.84375q1.578125 0 2.5625 1.03125q1.0 1.015625 1.0 2.828125q0 1.46875 -0.4375 2.3125q-0.4375 0.828125 -1.28125 1.296875q-0.84375 0.46875 -1.84375 0.46875q-1.59375 0 -2.578125 -1.015625q-0.984375 -1.03125 -0.984375 -2.953125zm1.328125 0q0 1.453125 0.625 2.1875q0.640625 0.71875 1.609375 0.71875q0.96875 0 1.59375 -0.71875q0.640625 -0.734375 0.640625 -2.234375q0 -1.40625 -0.640625 -2.125q-0.640625 -0.734375 -1.59375 -0.734375q-0.96875 0 -1.609375 0.71875q-0.625 0.71875 -0.625 2.1875zm7.291748 3.796875l0 -7.59375l1.15625 0l0 1.140625q0.453125 -0.796875 0.828125 -1.046875q0.375 -0.265625 0.8125 -0.265625q0.65625 0 1.328125 0.40625l-0.4375 1.203125q-0.46875 -0.28125 -0.953125 -0.28125q-0.421875 0 -0.765625 0.25q-0.328125 0.25 -0.46875 0.703125q-0.21875 0.6875 -0.21875 1.5l0 3.984375l-1.28125 0zm12.536896 -3.671875l1.390625 0.34375q-0.4375 1.703125 -1.578125 2.609375q-1.125 0.890625 -2.765625 0.890625q-1.6875 0 -2.75 -0.6875q-1.0625 -0.6875 -1.625 -2.0q-0.546875 -1.3125 -0.546875 -2.8125q0 -1.640625 0.625 -2.859375q0.625 -1.21875 1.78125 -1.84375q1.15625 -0.640625 2.546875 -0.640625q1.5625 0 2.640625 0.8125q1.078125 0.796875 1.5 2.25l-1.375 0.3125q-0.359375 -1.140625 -1.0625 -1.65625q-0.6875 -0.53125 -1.734375 -0.53125q-1.21875 0 -2.03125 0.578125q-0.8125 0.578125 -1.140625 1.5625q-0.328125 0.96875 -0.328125 2.015625q0 1.328125 0.390625 2.328125q0.390625 1.0 1.21875 1.5q0.828125 0.484375 1.78125 0.484375q1.171875 0 1.96875 -0.671875q0.8125 -0.671875 1.09375 -1.984375zm7.896759 2.734375q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm8.291748 3.8125l0 -1.109375q-0.890625 1.28125 -2.421875 1.28125q-0.671875 0 -1.25 -0.25q-0.578125 -0.265625 -0.875 -0.65625q-0.28125 -0.390625 -0.390625 -0.953125q-0.078125 -0.375 -0.078125 -1.203125l0 -4.703125l1.28125 0l0 4.203125q0 1.015625 0.078125 1.359375q0.125 0.515625 0.515625 0.8125q0.40625 0.28125 0.984375 0.28125q0.578125 0 1.078125 -0.296875q0.515625 -0.296875 0.71875 -0.8125q0.21875 -0.515625 0.21875 -1.484375l0 -4.0625l1.28125 0l0 7.59375l-1.140625 0zm2.6510925 -2.265625l1.265625 -0.203125q0.109375 0.765625 0.59375 1.171875q0.5 0.40625 1.375 0.40625q0.890625 0 1.3125 -0.359375q0.4375 -0.359375 0.4375 -0.84375q0 -0.4375 -0.375 -0.6875q-0.265625 -0.171875 -1.3125 -0.4375q-1.421875 -0.359375 -1.96875 -0.609375q-0.546875 -0.265625 -0.828125 -0.734375q-0.28125 -0.46875 -0.28125 -1.015625q0 -0.515625 0.21875 -0.9375q0.234375 -0.4375 0.640625 -0.734375q0.296875 -0.21875 0.8125 -0.359375q0.53125 -0.15625 1.125 -0.15625q0.890625 0 1.5625 0.265625q0.671875 0.25 1.0 0.6875q0.328125 0.4375 0.4375 1.171875l-1.25 0.171875q-0.09375 -0.578125 -0.5 -0.90625q-0.40625 -0.34375 -1.15625 -0.34375q-0.890625 0 -1.28125 0.296875q-0.375 0.296875 -0.375 0.6875q0 0.25 0.15625 0.453125q0.15625 0.203125 0.5 0.34375q0.1875 0.078125 1.140625 0.328125q1.359375 0.359375 1.890625 0.59375q0.546875 0.234375 0.859375 0.6875q0.3125 0.4375 0.3125 1.09375q0 0.640625 -0.375 1.21875q-0.375 0.5625 -1.09375 0.875q-0.703125 0.3125 -1.59375 0.3125q-1.484375 0 -2.265625 -0.609375q-0.765625 -0.625 -0.984375 -1.828125zm12.796875 1.328125q-0.71875 0.609375 -1.375 0.859375q-0.65625 0.25 -1.421875 0.25q-1.25 0 -1.921875 -0.609375q-0.671875 -0.609375 -0.671875 -1.5625q0 -0.5625 0.25 -1.015625q0.25 -0.46875 0.65625 -0.75q0.421875 -0.28125 0.9375 -0.421875q0.375 -0.09375 1.140625 -0.1875q1.5625 -0.1875 2.296875 -0.453125q0.015625 -0.265625 0.015625 -0.328125q0 -0.796875 -0.375 -1.109375q-0.484375 -0.4375 -1.453125 -0.4375q-0.921875 0 -1.359375 0.328125q-0.421875 0.3125 -0.625 1.109375l-1.265625 -0.171875q0.171875 -0.796875 0.5625 -1.296875q0.390625 -0.5 1.140625 -0.765625q0.75 -0.265625 1.71875 -0.265625q0.984375 0 1.59375 0.234375q0.609375 0.21875 0.890625 0.5625q0.28125 0.34375 0.40625 0.875q0.0625 0.328125 0.0625 1.1875l0 1.71875q0 1.796875 0.078125 2.28125q0.078125 0.46875 0.328125 0.90625l-1.34375 0q-0.203125 -0.40625 -0.265625 -0.9375zm-0.109375 -2.875q-0.703125 0.28125 -2.09375 0.484375q-0.796875 0.109375 -1.125 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.265625 -0.171875 0.59375q0 0.515625 0.390625 0.859375q0.390625 0.34375 1.140625 0.34375q0.734375 0 1.3125 -0.3125q0.59375 -0.328125 0.859375 -0.890625q0.203125 -0.4375 0.203125 -1.296875l0 -0.46875zm3.276123 3.8125l0 -10.484375l1.28125 0l0 10.484375l-1.28125 0zm3.396759 0l0 -10.484375l1.390625 0l0 9.25l5.15625 0l0 1.234375l-6.546875 0zm8.166748 0l0 -10.484375l2.078125 0l2.484375 7.421875q0.34375 1.03125 0.5 1.546875q0.1875 -0.5625 0.5625 -1.671875l2.515625 -7.296875l1.859375 0l0 10.484375l-1.328125 0l0 -8.78125l-3.046875 8.78125l-1.265625 0l-3.03125 -8.9375l0 8.9375l-1.328125 0z" fill-rule="nonzero"/></g></svg>
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="g2b9cad20c36_0_55.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"/></clipPath><g clip-path="url(#g2b9cad20c36_0_55.0)"><path fill="#ffffff" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="evenodd"/><path fill="#ffe599" d="m336.6168 25.52496l286.77164 0l0 428.25195l-286.77164 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m336.6168 25.52496l286.77164 0l0 428.25195l-286.77164 0z" fill-rule="evenodd"/><path fill="#c9daf8" d="m367.14734 111.34603l225.7008 0l0 140.0l-225.7008 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m367.14734 111.34603l225.7008 0l0 140.0l-225.7008 0z" fill-rule="evenodd"/><path fill="#000000" d="m433.9443 142.24666l3.125 -0.46875q0.1875 0.90625 0.796875 1.375q0.609375 0.46875 1.703125 0.46875q1.21875 0 1.8125 -0.4375q0.421875 -0.3125 0.421875 -0.828125q0 -0.359375 -0.21875 -0.59375q-0.234375 -0.21875 -1.046875 -0.40625q-3.765625 -0.828125 -4.765625 -1.515625q-1.390625 -0.953125 -1.390625 -2.640625q0 -1.53125 1.203125 -2.5625q1.203125 -1.046875 3.734375 -1.046875q2.40625 0 3.578125 0.796875q1.1875 0.78125 1.625 2.3125l-2.9375 0.546875q-0.1875 -0.6875 -0.71875 -1.046875q-0.515625 -0.375 -1.484375 -0.375q-1.234375 0 -1.765625 0.34375q-0.359375 0.25 -0.359375 0.625q0 0.34375 0.3125 0.578125q0.421875 0.296875 2.90625 0.875q2.484375 0.5625 3.46875 1.375q0.96875 0.828125 0.96875 2.3125q0 1.609375 -1.34375 2.78125q-1.34375 1.15625 -4.0 1.15625q-2.390625 0 -3.796875 -0.96875q-1.390625 -0.984375 -1.828125 -2.65625zm20.506561 -0.375l3.09375 0.515625q-0.59375 1.703125 -1.890625 2.59375q-1.28125 0.890625 -3.21875 0.890625q-3.0625 0 -4.546875 -2.0q-1.15625 -1.609375 -1.15625 -4.046875q0 -2.921875 1.515625 -4.578125q1.53125 -1.65625 3.875 -1.65625q2.625 0 4.140625 1.734375q1.515625 1.734375 1.4375 5.296875l-7.78125 0q0.03125 1.390625 0.75 2.15625q0.71875 0.765625 1.796875 0.765625q0.71875 0 1.21875 -0.390625q0.5 -0.40625 0.765625 -1.28125zm0.171875 -3.140625q-0.03125 -1.359375 -0.703125 -2.0625q-0.65625 -0.703125 -1.609375 -0.703125q-1.015625 0 -1.6875 0.75q-0.65625 0.734375 -0.65625 2.015625l4.65625 0zm5.615967 6.875l0 -16.21875l3.109375 0l0 16.21875l-3.109375 0zm4.9352417 -11.75l1.71875 0l0 -0.890625q0 -1.46875 0.3125 -2.203125q0.328125 -0.734375 1.171875 -1.1875q0.84375 -0.46875 2.140625 -0.46875q1.328125 0 2.59375 0.40625l-0.421875 2.171875q-0.734375 -0.1875 -1.421875 -0.1875q-0.671875 0 -0.96875 0.328125q-0.296875 0.3125 -0.296875 1.203125l0 0.828125l2.328125 0l0 2.453125l-2.328125 0l0 9.296875l-3.109375 0l0 -9.296875l-1.71875 0l0 -2.453125zm7.0759583 16.234375l0 -2.015625l12.921875 0l0 2.015625l-12.921875 0zm16.756592 -12.65625l-2.828125 -0.5q0.484375 -1.703125 1.640625 -2.515625q1.15625 -0.828125 3.453125 -0.828125q2.078125 0 3.09375 0.5q1.0155945 0.484375 1.4218445 1.25q0.421875 0.75 0.421875 2.78125l-0.03125 3.625q0 1.546875 0.140625 2.28125q0.15625 0.734375 0.578125 1.578125l-3.0780945 0q-0.125 -0.3125 -0.296875 -0.921875q-0.078125 -0.265625 -0.109375 -0.359375q-0.796875 0.765625 -1.71875 1.15625q-0.90625 0.390625 -1.921875 0.390625q-1.828125 0 -2.875 -0.984375q-1.03125 -0.984375 -1.03125 -2.484375q0 -1.0 0.46875 -1.78125q0.484375 -0.78125 1.328125 -1.1875q0.859375 -0.421875 2.484375 -0.734375q2.171875 -0.40625 3.015625 -0.765625l0 -0.296875q0 -0.90625 -0.453125 -1.28125q-0.4375 -0.390625 -1.65625 -0.390625q-0.828125 0 -1.296875 0.328125q-0.46875 0.328125 -0.75 1.140625zm4.15625 2.53125q-0.59375 0.203125 -1.890625 0.484375q-1.296875 0.265625 -1.6875 0.53125q-0.609375 0.4375 -0.609375 1.09375q0 0.65625 0.484375 1.140625q0.484375 0.46875 1.234375 0.46875q0.84375 0 1.609375 -0.5625q0.5625 -0.40625 0.734375 -1.015625q0.125 -0.40625 0.125 -1.515625l0 -0.625zm11.506561 -6.109375l0 2.484375l-2.125 0l0 4.734375q0 1.4375 0.0625 1.671875q0.0625 0.234375 0.265625 0.390625q0.21875 0.15625 0.53125 0.15625q0.4375 0 1.25 -0.296875l0.265625 2.40625q-1.078125 0.46875 -2.453125 0.46875q-0.84375 0 -1.515625 -0.28125q-0.671875 -0.28125 -1.0 -0.71875q-0.3125 -0.453125 -0.421875 -1.21875q-0.109375 -0.546875 -0.109375 -2.203125l0 -5.109375l-1.421875 0l0 -2.484375l1.421875 0l0 -2.328125l3.125 -1.8125l0 4.140625l2.125 0zm7.544739 0l0 2.484375l-2.1250305 0l0 4.734375q0 1.4375 0.0625 1.671875q0.0625 0.234375 0.265625 0.390625q0.21875 0.15625 0.53125 0.15625q0.4375 0 1.2500305 -0.296875l0.265625 2.40625q-1.0781555 0.46875 -2.4531555 0.46875q-0.84375 0 -1.515625 -0.28125q-0.671875 -0.28125 -1.0 -0.71875q-0.3125 -0.453125 -0.421875 -1.21875q-0.109375 -0.546875 -0.109375 -2.203125l0 -5.109375l-1.421875 0l0 -2.484375l1.421875 0l0 -2.328125l3.125 -1.8125l0 4.140625l2.1250305 0zm12.841553 11.75l-3.109375 0l0 -6.0q0 -1.90625 -0.203125 -2.453125q-0.1875 -0.5625 -0.640625 -0.875q-0.453125 -0.3125 -1.078125 -0.3125q-0.8125 0 -1.453125 0.453125q-0.640625 0.4375 -0.875 1.171875q-0.234375 0.71875 -0.234375 2.6875l0 5.328125l-3.109375 0l0 -11.75l2.875 0l0 1.734375q1.546875 -2.0 3.875 -2.0q1.03125 0 1.875 0.375q0.859375 0.375 1.296875 0.953125q0.4375 0.5625 0.609375 1.296875q0.171875 0.734375 0.171875 2.09375l0 7.296875z" fill-rule="nonzero"/><path fill="#000000" d="m461.4582 172.46915l0 -4.734375q-0.375 0.546875 -1.0625 0.90625q-0.6875 0.34375 -1.46875 0.34375q-1.71875 0 -2.96875 -1.375q-1.234375 -1.375 -1.234375 -3.765625q0 -1.46875 0.5 -2.625q0.515625 -1.15625 1.46875 -1.75q0.96875 -0.59375 2.109375 -0.59375q1.796875 0 2.828125 1.515625l0 -1.296875l1.46875 0l0 13.375l-1.640625 0zm-5.046875 -8.5625q0 1.859375 0.78125 2.796875q0.78125 0.9375 1.875 0.9375q1.046875 0 1.796875 -0.890625q0.765625 -0.890625 0.765625 -2.703125q0 -1.9375 -0.796875 -2.90625q-0.796875 -0.96875 -1.875 -0.96875q-1.0625 0 -1.8125 0.90625q-0.734375 0.90625 -0.734375 2.828125zm7.750702 8.5625l0 -1.1875l10.859375 0l0 1.1875l-10.859375 0zm11.891357 0l0 -13.375l1.484375 0l0 1.25q0.53125 -0.734375 1.1875 -1.09375q0.671875 -0.375 1.625 -0.375q1.234375 0 2.171875 0.640625q0.953125 0.625 1.4375 1.796875q0.484375 1.15625 0.484375 2.546875q0 1.484375 -0.53125 2.671875q-0.53125 1.1875 -1.546875 1.828125q-1.015625 0.625 -2.140625 0.625q-0.8125 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0625 -0.875l0 4.703125l-1.640625 0zm1.484375 -8.484375q0 1.859375 0.75 2.765625q0.765625 0.890625 1.828125 0.890625q1.09375 0 1.875 -0.921875q0.78125 -0.9375 0.78125 -2.875q0 -1.84375 -0.765625 -2.765625q-0.75 -0.921875 -1.8125 -0.921875q-1.046875 0 -1.859375 0.984375q-0.796875 0.96875 -0.796875 2.84375zm8.875702 4.78125l0 -9.671875l1.46875 0l0 1.46875q0.5625 -1.03125 1.03125 -1.359375q0.484375 -0.328125 1.0625 -0.328125q0.828125 0 1.6875 0.53125l-0.5625 1.515625q-0.609375 -0.359375 -1.203125 -0.359375q-0.546875 0 -0.96875 0.328125q-0.421875 0.328125 -0.609375 0.890625q-0.28125 0.875 -0.28125 1.921875l0 5.0625l-1.625 0zm5.618927 -4.84375q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.046875 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.03125 0 -3.28125 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125 0.921875 2.046875 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.234375 0 -2.046875 0.921875q-0.796875 0.90625 -0.796875 2.765625zm9.281982 -6.609375l0 -1.90625l1.640625 0l0 1.90625l-1.640625 0zm-2.078125 15.203125l0.3125 -1.390625q0.5 0.125 0.78125 0.125q0.5 0 0.734375 -0.328125q0.25 -0.328125 0.25 -1.671875l0 -10.15625l1.640625 0l0 10.203125q0 1.78125 -0.46875 2.484375q-0.59375 0.90625 -1.96875 0.90625q-0.65625 0 -1.28125 -0.171875z" fill-rule="nonzero"/><path fill="#000000" d="m455.82574 190.76602l0 -13.359375l1.640625 0l0 7.625l3.890625 -3.9375l2.109375 0l-3.6875 3.59375l4.0625 6.078125l-2.015625 0l-3.203125 -4.953125l-1.15625 1.125l0 3.828125l-1.640625 0zm7.8125 3.703125l0 -1.1875l10.859375 0l0 1.1875l-10.859375 0zm11.891327 0l0 -13.375l1.484375 0l0 1.25q0.53125 -0.734375 1.1875 -1.09375q0.671875 -0.375 1.625 -0.375q1.234375 0 2.171875 0.640625q0.953125 0.625 1.4375 1.796875q0.484375 1.15625 0.484375 2.546875q0 1.484375 -0.53125 2.671875q-0.53125 1.1875 -1.546875 1.828125q-1.015625 0.625 -2.140625 0.625q-0.8125 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0625 -0.875l0 4.703125l-1.640625 0zm1.484375 -8.484375q0 1.859375 0.75 2.765625q0.765625 0.890625 1.828125 0.890625q1.09375 0 1.875 -0.921875q0.78125 -0.9375 0.78125 -2.875q0 -1.84375 -0.765625 -2.765625q-0.75 -0.921875 -1.8125 -0.921875q-1.046875 0 -1.859375 0.984375q-0.796875 0.96875 -0.796875 2.84375zm8.875732 4.78125l0 -9.671875l1.46875 0l0 1.46875q0.5625 -1.03125 1.03125 -1.359375q0.484375 -0.328125 1.0625 -0.328125q0.828125 0 1.6875 0.53125l-0.5625 1.515625q-0.609375 -0.359375 -1.203125 -0.359375q-0.546875 0 -0.96875 0.328125q-0.421875 0.328125 -0.609375 0.890625q-0.28125 0.875 -0.28125 1.921875l0 5.0625l-1.625 0zm5.618927 -4.84375q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.0468445 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.0312195 0 -3.2812195 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125 0.921875 2.0468445 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.2343445 0 -2.0468445 0.921875q-0.796875 0.90625 -0.796875 2.765625zm9.281952 -6.609375l0 -1.90625l1.640625 0l0 1.90625l-1.640625 0zm-2.078125 15.203125l0.3125 -1.390625q0.5 0.125 0.78125 0.125q0.5 0 0.734375 -0.328125q0.25 -0.328125 0.25 -1.671875l0 -10.15625l1.640625 0l0 10.203125q0 1.78125 -0.46875 2.484375q-0.59375 0.90625 -1.96875 0.90625q-0.65625 0 -1.28125 -0.171875z" fill-rule="nonzero"/><path fill="#000000" d="m458.51324 212.76602l-3.6875 -9.671875l1.734375 0l2.078125 5.796875q0.328125 0.9375 0.625 1.9375q0.203125 -0.765625 0.609375 -1.828125l2.140625 -5.90625l1.6875 0l-3.65625 9.671875l-1.53125 0zm5.125 3.703125l0 -1.1875l10.859375 0l0 1.1875l-10.859375 0zm11.891327 0l0 -13.375l1.484375 0l0 1.25q0.53125 -0.734375 1.1875 -1.09375q0.671875 -0.375 1.625 -0.375q1.234375 0 2.171875 0.640625q0.953125 0.625 1.4375 1.796875q0.484375 1.15625 0.484375 2.546875q0 1.484375 -0.53125 2.671875q-0.53125 1.1875 -1.546875 1.828125q-1.015625 0.625 -2.140625 0.625q-0.8125 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0625 -0.875l0 4.703125l-1.640625 0zm1.484375 -8.484375q0 1.859375 0.75 2.765625q0.765625 0.890625 1.828125 0.890625q1.09375 0 1.875 -0.921875q0.78125 -0.9375 0.78125 -2.875q0 -1.84375 -0.765625 -2.765625q-0.75 -0.921875 -1.8125 -0.921875q-1.046875 0 -1.859375 0.984375q-0.796875 0.96875 -0.796875 2.84375zm8.875732 4.78125l0 -9.671875l1.46875 0l0 1.46875q0.5625 -1.03125 1.03125 -1.359375q0.484375 -0.328125 1.0625 -0.328125q0.828125 0 1.6875 0.53125l-0.5625 1.515625q-0.609375 -0.359375 -1.203125 -0.359375q-0.546875 0 -0.96875 0.328125q-0.421875 0.328125 -0.609375 0.890625q-0.28125 0.875 -0.28125 1.921875l0 5.0625l-1.625 0zm5.618927 -4.84375q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.0468445 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.0312195 0 -3.2812195 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125 0.921875 2.0468445 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.2343445 0 -2.0468445 0.921875q-0.796875 0.90625 -0.796875 2.765625zm9.281952 -6.609375l0 -1.90625l1.640625 0l0 1.90625l-1.640625 0zm-2.078125 15.203125l0.3125 -1.390625q0.5 0.125 0.78125 0.125q0.5 0 0.734375 -0.328125q0.25 -0.328125 0.25 -1.671875l0 -10.15625l1.640625 0l0 10.203125q0 1.78125 -0.46875 2.484375q-0.59375 0.90625 -1.96875 0.90625q-0.65625 0 -1.28125 -0.171875z" fill-rule="nonzero"/><path fill="#000000" d="m454.69257 229.92227q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.046875 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.03125 0 -3.28125 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125 0.921875 2.046875 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.234375 0 -2.046875 0.921875q-0.796875 0.90625 -0.796875 2.765625zm7.781952 8.546875l0 -1.1875l10.859375 0l0 1.1875l-10.859375 0zm11.891357 0l0 -13.375l1.484375 0l0 1.25q0.53125 -0.734375 1.1875 -1.09375q0.671875 -0.375 1.625 -0.375q1.234375 0 2.171875 0.640625q0.953125 0.625 1.4375 1.796875q0.484375 1.15625 0.484375 2.546875q0 1.484375 -0.53125 2.671875q-0.53125 1.1875 -1.546875 1.828125q-1.015625 0.625 -2.140625 0.625q-0.8125 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0625 -0.875l0 4.703125l-1.640625 0zm1.484375 -8.484375q0 1.859375 0.75 2.765625q0.765625 0.890625 1.828125 0.890625q1.09375 0 1.875 -0.921875q0.78125 -0.9375 0.78125 -2.875q0 -1.84375 -0.765625 -2.765625q-0.75 -0.921875 -1.8125 -0.921875q-1.046875 0 -1.859375 0.984375q-0.796875 0.96875 -0.796875 2.84375zm8.875702 4.78125l0 -9.671875l1.46875 0l0 1.46875q0.5625 -1.03125 1.03125 -1.359375q0.484375 -0.328125 1.0625 -0.328125q0.828125 0 1.6875 0.53125l-0.5625 1.515625q-0.609375 -0.359375 -1.203125 -0.359375q-0.546875 0 -0.96875 0.328125q-0.421875 0.328125 -0.609375 0.890625q-0.28125 0.875 -0.28125 1.921875l0 5.0625l-1.625 0zm5.618927 -4.84375q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.046875 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.03125 0 -3.28125 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125 0.921875 2.046875 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.234375 0 -2.046875 0.921875q-0.796875 0.90625 -0.796875 2.765625zm9.281982 -6.609375l0 -1.90625l1.640625 0l0 1.90625l-1.640625 0zm-2.078125 15.203125l0.3125 -1.390625q0.5 0.125 0.78125 0.125q0.5 0 0.734375 -0.328125q0.25 -0.328125 0.25 -1.671875l0 -10.15625l1.640625 0l0 10.203125q0 1.78125 -0.46875 2.484375q-0.59375 0.90625 -1.96875 0.90625q-0.65625 0 -1.28125 -0.171875z" fill-rule="nonzero"/><path fill="#ead1dc" d="m367.13647 337.462l225.7008 0l0 97.7323l-225.7008 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m367.13647 337.462l225.7008 0l0 97.7323l-225.7008 0z" fill-rule="evenodd"/><path fill="#000000" d="m461.238 349.83817l2.875 0l0 1.609375q1.53125 -1.875 3.65625 -1.875q1.125 0 1.953125 0.46875q0.828125 0.46875 1.359375 1.40625q0.78125 -0.9375 1.671875 -1.40625q0.90625 -0.46875 1.921875 -0.46875q1.296875 0 2.1875 0.53125q0.890625 0.515625 1.34375 1.546875q0.3125 0.75 0.3125 2.421875l0 7.515625l-3.109375 0l0 -6.71875q0 -1.75 -0.3125 -2.25q-0.4375 -0.671875 -1.328125 -0.671875q-0.65625 0 -1.234375 0.40625q-0.578125 0.390625 -0.828125 1.171875q-0.25 0.765625 -0.25 2.421875l0 5.640625l-3.109375 0l0 -6.4375q0 -1.71875 -0.171875 -2.21875q-0.15625 -0.5 -0.515625 -0.734375q-0.34375 -0.25 -0.9375 -0.25q-0.71875 0 -1.296875 0.390625q-0.578125 0.390625 -0.828125 1.125q-0.25 0.71875 -0.25 2.421875l0 5.703125l-3.109375 0l0 -11.75zm20.379395 11.75l0 -16.21875l3.109375 0l0 16.21875l-3.109375 0zm6.200897 -11.75l2.90625 0l0 1.734375q0.5625 -0.890625 1.515625 -1.4375q0.96875 -0.5625 2.140625 -0.5625q2.046875 0 3.46875 1.609375q1.4375 1.59375 1.4375 4.46875q0 2.9375 -1.4375 4.578125q-1.4375 1.625 -3.484375 1.625q-0.96875 0 -1.765625 -0.390625q-0.796875 -0.390625 -1.671875 -1.328125l0 5.921875l-3.109375 0l0 -16.21875zm3.078125 5.671875q0 1.984375 0.78125 2.9375q0.796875 0.9375 1.921875 0.9375q1.078125 0 1.796875 -0.859375q0.71875 -0.875 0.71875 -2.859375q0 -1.84375 -0.734375 -2.734375q-0.734375 -0.90625 -1.84375 -0.90625q-1.125 0 -1.890625 0.890625q-0.75 0.875 -0.75 2.59375z" fill-rule="nonzero"/><path fill="#000000" d="m442.0112 385.54504l1.59375 0.234375q0.109375 0.75 0.5625 1.078125q0.609375 0.453125 1.671875 0.453125q1.140625 0 1.75 -0.453125q0.625 -0.453125 0.84375 -1.265625q0.125 -0.5 0.109375 -2.109375q-1.0625 1.265625 -2.671875 1.265625q-2.0 0 -3.09375 -1.4375q-1.09375 -1.4375 -1.09375 -3.453125q0 -1.390625 0.5 -2.5625q0.515625 -1.171875 1.453125 -1.796875q0.953125 -0.640625 2.25 -0.640625q1.703125 0 2.8125 1.375l0 -1.15625l1.515625 0l0 8.359375q0 2.265625 -0.46875 3.203125q-0.453125 0.9375 -1.453125 1.484375q-0.984375 0.546875 -2.453125 0.546875q-1.71875 0 -2.796875 -0.78125q-1.0625 -0.765625 -1.03125 -2.34375zm1.359375 -5.8125q0 1.90625 0.75 2.78125q0.765625 0.875 1.90625 0.875q1.125 0 1.890625 -0.859375q0.765625 -0.875 0.765625 -2.734375q0 -1.78125 -0.796875 -2.671875q-0.78125 -0.90625 -1.890625 -0.90625q-1.09375 0 -1.859375 0.890625q-0.765625 0.875 -0.765625 2.625zm15.641357 3.828125q-0.921875 0.765625 -1.765625 1.09375q-0.828125 0.3125 -1.796875 0.3125q-1.59375 0 -2.453125 -0.78125q-0.859375 -0.78125 -0.859375 -1.984375q0 -0.71875 0.328125 -1.296875q0.328125 -0.59375 0.84375 -0.9375q0.53125 -0.359375 1.1875 -0.546875q0.46875 -0.125 1.453125 -0.25q1.984375 -0.234375 2.921875 -0.5625q0.015625 -0.34375 0.015625 -0.421875q0 -1.0 -0.46875 -1.421875q-0.625 -0.546875 -1.875 -0.546875q-1.15625 0 -1.703125 0.40625q-0.546875 0.40625 -0.8125 1.421875l-1.609375 -0.21875q0.21875 -1.015625 0.71875 -1.640625q0.5 -0.640625 1.453125 -0.984375q0.953125 -0.34375 2.1875 -0.34375q1.25 0 2.015625 0.296875q0.78125 0.28125 1.140625 0.734375q0.375 0.4375 0.515625 1.109375q0.078125 0.421875 0.078125 1.515625l0 2.1875q0 2.28125 0.109375 2.890625q0.109375 0.59375 0.40625 1.15625l-1.703125 0q-0.265625 -0.515625 -0.328125 -1.1875zm-0.140625 -3.671875q-0.890625 0.375 -2.671875 0.625q-1.015625 0.140625 -1.4375 0.328125q-0.421875 0.1875 -0.65625 0.53125q-0.21875 0.34375 -0.21875 0.78125q0 0.65625 0.5 1.09375q0.5 0.4375 1.453125 0.4375q0.9375 0 1.671875 -0.40625q0.75 -0.421875 1.09375 -1.140625q0.265625 -0.5625 0.265625 -1.640625l0 -0.609375zm7.781952 3.390625l0.234375 1.453125q-0.6875 0.140625 -1.234375 0.140625q-0.890625 0 -1.390625 -0.28125q-0.484375 -0.28125 -0.6875 -0.734375q-0.203125 -0.46875 -0.203125 -1.9375l0 -5.578125l-1.203125 0l0 -1.265625l1.203125 0l0 -2.390625l1.625 -0.984375l0 3.375l1.65625 0l0 1.265625l-1.65625 0l0 5.671875q0 0.6875 0.078125 0.890625q0.09375 0.203125 0.28125 0.328125q0.203125 0.109375 0.578125 0.109375q0.265625 0 0.71875 -0.0625zm8.230194 -1.640625l1.6875 0.203125q-0.40625 1.484375 -1.484375 2.3125q-1.078125 0.8125 -2.765625 0.8125q-2.125 0 -3.375 -1.296875q-1.234375 -1.3125 -1.234375 -3.671875q0 -2.453125 1.25 -3.796875q1.265625 -1.34375 3.265625 -1.34375q1.9375 0 3.15625 1.328125q1.234375 1.3125 1.234375 3.703125q0 0.15625 0 0.4375l-7.21875 0q0.09375 1.59375 0.90625 2.453125q0.8125 0.84375 2.015625 0.84375q0.90625 0 1.546875 -0.46875q0.640625 -0.484375 1.015625 -1.515625zm-5.390625 -2.65625l5.40625 0q-0.109375 -1.21875 -0.625 -1.828125q-0.78125 -0.953125 -2.03125 -0.953125q-1.125 0 -1.90625 0.765625q-0.765625 0.75 -0.84375 2.015625zm7.625702 9.46875l0 -1.1875l10.859375 0l0 1.1875l-10.859375 0zm11.891357 0l0 -13.375l1.484375 0l0 1.25q0.53125 -0.734375 1.1875 -1.09375q0.671875 -0.375 1.625 -0.375q1.234375 0 2.1718445 0.640625q0.953125 0.625 1.4375 1.796875q0.484375 1.15625 0.484375 2.546875q0 1.484375 -0.53125 2.671875q-0.53125 1.1875 -1.546875 1.828125q-1.0155945 0.625 -2.1405945 0.625q-0.8125 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0625 -0.875l0 4.703125l-1.640625 0zm1.484375 -8.484375q0 1.859375 0.75 2.765625q0.765625 0.890625 1.828125 0.890625q1.09375 0 1.875 -0.921875q0.7812195 -0.9375 0.7812195 -2.875q0 -1.84375 -0.7655945 -2.765625q-0.75 -0.921875 -1.8125 -0.921875q-1.046875 0 -1.859375 0.984375q-0.796875 0.96875 -0.796875 2.84375zm8.875702 4.78125l0 -9.671875l1.46875 0l0 1.46875q0.5625 -1.03125 1.03125 -1.359375q0.484375 -0.328125 1.0625 -0.328125q0.828125 0 1.6875 0.53125l-0.5625 1.515625q-0.609375 -0.359375 -1.203125 -0.359375q-0.546875 0 -0.96875 0.328125q-0.421875 0.328125 -0.609375 0.890625q-0.28125 0.875 -0.28125 1.921875l0 5.0625l-1.625 0zm5.618927 -4.84375q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.046875 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.03125 0 -3.28125 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125 0.921875 2.046875 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.234375 0 -2.046875 0.921875q-0.796875 0.90625 -0.796875 2.765625zm9.281982 -6.609375l0 -1.90625l1.640625 0l0 1.90625l-1.640625 0zm-2.078125 15.203125l0.3125 -1.390625q0.5 0.125 0.78125 0.125q0.5 0 0.734375 -0.328125q0.25 -0.328125 0.25 -1.671875l0 -10.15625l1.640625 0l0 10.203125q0 1.78125 -0.46875 2.484375q-0.59375 0.90625 -1.96875 0.90625q-0.65625 0 -1.28125 -0.171875z" fill-rule="nonzero"/><path fill="#000000" d="m456.43134 406.74817l0 -1.421875q-1.125 1.640625 -3.0625 1.640625q-0.859375 0 -1.609375 -0.328125q-0.734375 -0.328125 -1.09375 -0.828125q-0.359375 -0.5 -0.5 -1.21875q-0.109375 -0.46875 -0.109375 -1.53125l0 -5.984375l1.640625 0l0 5.359375q0 1.28125 0.109375 1.734375q0.15625 0.640625 0.65625 1.015625q0.5 0.375 1.234375 0.375q0.734375 0 1.375 -0.375q0.65625 -0.390625 0.921875 -1.03125q0.265625 -0.65625 0.265625 -1.890625l0 -5.1875l1.640625 0l0 9.671875l-1.46875 0zm4.0476074 3.703125l0 -13.375l1.484375 0l0 1.25q0.53125 -0.734375 1.1875 -1.09375q0.671875 -0.375 1.625 -0.375q1.234375 0 2.171875 0.640625q0.953125 0.625 1.4375 1.796875q0.484375 1.15625 0.484375 2.546875q0 1.484375 -0.53125 2.671875q-0.53125 1.1875 -1.546875 1.828125q-1.015625 0.625 -2.140625 0.625q-0.8125 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0625 -0.875l0 4.703125l-1.640625 0zm1.484375 -8.484375q0 1.859375 0.75 2.765625q0.765625 0.890625 1.828125 0.890625q1.09375 0 1.875 -0.921875q0.78125 -0.9375 0.78125 -2.875q0 -1.84375 -0.765625 -2.765625q-0.75 -0.921875 -1.8125 -0.921875q-1.046875 0 -1.859375 0.984375q-0.796875 0.96875 -0.796875 2.84375zm7.375702 8.484375l0 -1.1875l10.859375 0l0 1.1875l-10.859375 0zm11.891357 0l0 -13.375l1.484375 0l0 1.25q0.53125 -0.734375 1.1875 -1.09375q0.671875 -0.375 1.625 -0.375q1.234375 0 2.171875 0.640625q0.953125 0.625 1.4375 1.796875q0.484375 1.15625 0.484375 2.546875q0 1.484375 -0.53125 2.671875q-0.53125 1.1875 -1.546875 1.828125q-1.015625 0.625 -2.140625 0.625q-0.8125 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0625 -0.875l0 4.703125l-1.640625 0zm1.484375 -8.484375q0 1.859375 0.75 2.765625q0.765625 0.890625 1.828125 0.890625q1.09375 0 1.875 -0.921875q0.78125 -0.9375 0.78125 -2.875q0 -1.84375 -0.765625 -2.765625q-0.75 -0.921875 -1.8125 -0.921875q-1.046875 0 -1.859375 0.984375q-0.796875 0.96875 -0.796875 2.84375zm8.875702 4.78125l0 -9.671875l1.46875 0l0 1.46875q0.5625 -1.03125 1.03125 -1.359375q0.484375 -0.328125 1.0625 -0.328125q0.828125 0 1.6875 0.53125l-0.5625 1.515625q-0.609375 -0.359375 -1.203125 -0.359375q-0.546875 0 -0.96875 0.328125q-0.421875 0.328125 -0.609375 0.890625q-0.28125 0.875 -0.28125 1.921875l0 5.0625l-1.625 0zm5.618927 -4.84375q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.046875 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.03125 0 -3.28125 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125 0.921875 2.046875 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.234375 0 -2.046875 0.921875q-0.796875 0.90625 -0.796875 2.765625zm9.281952 -6.609375l0 -1.90625l1.640625 0l0 1.90625l-1.640625 0zm-2.078125 15.203125l0.3125 -1.390625q0.5 0.125 0.78125 0.125q0.5 0 0.734375 -0.328125q0.25 -0.328125 0.25 -1.671875l0 -10.15625l1.640625 0l0 10.203125q0 1.78125 -0.46875 2.484375q-0.59375 0.90625 -1.96875 0.90625q-0.65625 0 -1.28125 -0.171875z" fill-rule="nonzero"/><path fill="#000000" d="m444.44452 428.74817l0 -1.21875q-0.90625 1.4375 -2.703125 1.4375q-1.15625 0 -2.125 -0.640625q-0.96875 -0.640625 -1.5 -1.78125q-0.53125 -1.140625 -0.53125 -2.625q0 -1.453125 0.484375 -2.625q0.484375 -1.1875 1.4375 -1.8125q0.96875 -0.625 2.171875 -0.625q0.875 0 1.546875 0.375q0.6875 0.359375 1.109375 0.953125l0 -4.796875l1.640625 0l0 13.359375l-1.53125 0zm-5.171875 -4.828125q0 1.859375 0.78125 2.78125q0.78125 0.921875 1.84375 0.921875q1.078125 0 1.828125 -0.875q0.75 -0.890625 0.75 -2.6875q0 -1.984375 -0.765625 -2.90625q-0.765625 -0.9375 -1.890625 -0.9375q-1.078125 0 -1.8125 0.890625q-0.734375 0.890625 -0.734375 2.8125zm8.672577 -0.015625q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.046875 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.03125 0 -3.28125 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125 0.921875 2.046875 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.234375 0 -2.046875 0.921875q-0.796875 0.90625 -0.796875 2.765625zm11.078857 4.84375l-2.96875 -9.671875l1.703125 0l1.53125 5.578125l0.578125 2.078125q0.046875 -0.15625 0.5 -2.0l1.546875 -5.65625l1.6875 0l1.4375 5.609375l0.484375 1.84375l0.5625 -1.859375l1.65625 -5.59375l1.59375 0l-3.03125 9.671875l-1.703125 0l-1.53125 -5.796875l-0.375 -1.640625l-1.953125 7.4375l-1.71875 0zm11.691681 0l0 -9.671875l1.46875 0l0 1.375q1.0625 -1.59375 3.078125 -1.59375q0.875 0 1.609375 0.3125q0.734375 0.3125 1.09375 0.828125q0.375 0.5 0.515625 1.203125q0.09375 0.453125 0.09375 1.59375l0 5.953125l-1.640625 0l0 -5.890625q0 -1.0 -0.203125 -1.484375q-0.1875 -0.5 -0.671875 -0.796875q-0.484375 -0.296875 -1.140625 -0.296875q-1.046875 0 -1.8125 0.671875q-0.75 0.65625 -0.75 2.515625l0 5.28125l-1.640625 0zm8.860107 3.703125l0 -1.1875l10.859375 0l0 1.1875l-10.859375 0zm11.891327 0l0 -13.375l1.484375 0l0 1.25q0.53125 -0.734375 1.1875 -1.09375q0.671875 -0.375 1.625 -0.375q1.234375 0 2.171875 0.640625q0.953125 0.625 1.4375 1.796875q0.484375 1.15625 0.484375 2.546875q0 1.484375 -0.53125 2.671875q-0.53125 1.1875 -1.546875 1.828125q-1.015625 0.625 -2.140625 0.625q-0.8125 0 -1.46875 -0.34375q-0.65625 -0.34375 -1.0625 -0.875l0 4.703125l-1.640625 0zm1.484375 -8.484375q0 1.859375 0.75 2.765625q0.765625 0.890625 1.828125 0.890625q1.09375 0 1.875 -0.921875q0.78125 -0.9375 0.78125 -2.875q0 -1.84375 -0.765625 -2.765625q-0.75 -0.921875 -1.8125 -0.921875q-1.046875 0 -1.859375 0.984375q-0.796875 0.96875 -0.796875 2.84375zm8.875702 4.78125l0 -9.671875l1.46875 0l0 1.46875q0.5625 -1.03125 1.03125 -1.359375q0.484375 -0.328125 1.0625 -0.328125q0.828125 0 1.6875 0.53125l-0.5625 1.515625q-0.609375 -0.359375 -1.203125 -0.359375q-0.546875 0 -0.96875 0.328125q-0.421875 0.328125 -0.609375 0.890625q-0.28125 0.875 -0.28125 1.921875l0 5.0625l-1.625 0zm5.618927 -4.84375q0 -2.6875 1.484375 -3.96875q1.25 -1.078125 3.0469055 -1.078125q2.0 0 3.265625 1.3125q1.265625 1.296875 1.265625 3.609375q0 1.859375 -0.5625 2.9375q-0.5625 1.0625 -1.640625 1.65625q-1.0625 0.59375 -2.328125 0.59375q-2.0312805 0 -3.2812805 -1.296875q-1.25 -1.3125 -1.25 -3.765625zm1.6875 0q0 1.859375 0.796875 2.796875q0.8125305 0.921875 2.0469055 0.921875q1.21875 0 2.03125 -0.921875q0.8125 -0.9375 0.8125 -2.84375q0 -1.796875 -0.8125 -2.71875q-0.8125 -0.921875 -2.03125 -0.921875q-1.234375 0 -2.0469055 0.921875q-0.796875 0.90625 -0.796875 2.765625zm9.282013 -6.609375l0 -1.90625l1.640625 0l0 1.90625l-1.640625 0zm-2.078125 15.203125l0.3125 -1.390625q0.5 0.125 0.78125 0.125q0.5 0 0.734375 -0.328125q0.25 -0.328125 0.25 -1.671875l0 -10.15625l1.640625 0l0 10.203125q0 1.78125 -0.46875 2.484375q-0.59375 0.90625 -1.96875 0.90625q-0.65625 0 -1.28125 -0.171875z" fill-rule="nonzero"/><path fill="#f6b26b" d="m367.13638 44.619377l225.70078 0l0 47.338585l-225.70078 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m367.13638 44.619377l225.70078 0l0 47.338585l-225.70078 0z" fill-rule="evenodd"/><path fill="#000000" d="m397.96548 52.206165l0 -2.703125l2.921875 0l0 2.703125l-2.921875 0zm0 12.562504l0 -11.062504l2.921875 0l0 11.062504l-2.921875 0zm15.988129 0l-2.921875 0l0 -5.640629q0 -1.796875 -0.1875 -2.3125q-0.1875 -0.53125 -0.609375 -0.828125q-0.421875 -0.296875 -1.015625 -0.296875q-0.765625 0 -1.375 0.421875q-0.59375 0.421875 -0.828125 1.109375q-0.21875 0.6875 -0.21875 2.53125l0 5.015629l-2.921875 0l0 -11.062504l2.71875 0l0 1.625q1.4375 -1.875 3.640625 -1.875q0.96875 0 1.765625 0.359375q0.8125 0.34375 1.21875 0.890625q0.40625 0.53125 0.5625 1.21875q0.171875 0.6875 0.171875 1.96875l0 6.875004zm2.8874512 -11.062504l2.71875 0l0 1.625q0.53125 -0.828125 1.4375 -1.34375q0.90625 -0.53125 2.015625 -0.53125q1.921875 0 3.265625 1.515625q1.34375 1.5 1.34375 4.203125q0 2.765625 -1.359375 4.3125q-1.34375 1.5312538 -3.265625 1.5312538q-0.921875 0 -1.671875 -0.359375q-0.734375 -0.375 -1.5625 -1.2500038l0 5.562504l-2.921875 0l0 -15.265629zm2.890625 5.34375q0 1.859375 0.734375 2.75q0.75 0.890625 1.8125 0.890625q1.015625 0 1.6875 -0.8125q0.6875 -0.8125 0.6875 -2.6875q0 -1.734375 -0.703125 -2.578125q-0.703125 -0.84375 -1.734375 -0.84375q-1.0625 0 -1.78125 0.828125q-0.703125 0.828125 -0.703125 2.453125zm17.496826 5.718754l0 -1.6562538q-0.609375 0.8906288 -1.59375 1.4062538q-0.984375 0.5 -2.078125 0.5q-1.109375 0 -2.0 -0.484375q-0.875 -0.5 -1.28125 -1.3750038q-0.390625 -0.890625 -0.390625 -2.453125l0 -7.0l2.921875 0l0 5.078125q0 2.34375 0.15625 2.875q0.171875 0.515625 0.59375 0.828125q0.4375 0.296875 1.09375 0.296875q0.75 0 1.34375 -0.40625q0.59375 -0.40625 0.8125 -1.015625q0.21875 -0.609375 0.21875 -2.984375l0 -4.671875l2.921875 0l0 11.062504l-2.71875 0zm10.824951 -11.062504l0 2.328125l-2.0 0l0 4.46875q0 1.34375 0.046875 1.578125q0.0625 0.21875 0.265625 0.375q0.203125 0.140625 0.5 0.140625q0.40625 0 1.171875 -0.28125l0.25 2.2656288q-1.015625 0.4375 -2.3125 0.4375q-0.796875 0 -1.4375 -0.265625q-0.625 -0.265625 -0.921875 -0.6875q-0.296875 -0.4218788 -0.40625 -1.1406288q-0.09375 -0.515625 -0.09375 -2.0625l0 -4.828125l-1.34375 0l0 -2.328125l1.34375 0l0 -2.1875l2.9375 -1.71875l0 3.90625l2.0 0zm0.2899475 15.281254l0 -1.890625l12.171875 0l0 1.890625l-12.171875 0zm13.596069 -4.21875l0 -15.265629l2.921875 0l0 15.265629l-2.921875 0zm8.113129 -7.687504l-2.65625 -0.484375q0.453125 -1.59375 1.546875 -2.359375q1.09375 -0.78125 3.25 -0.78125q1.953125 0 2.90625 0.46875q0.96875 0.453125 1.359375 1.171875q0.390625 0.71875 0.390625 2.625l-0.03125 3.40625q0 1.46875 0.125 2.15625q0.140625 0.6875 0.53125 1.4843788l-2.890625 0q-0.109375 -0.296875 -0.28125 -0.8593788q-0.078125 -0.265625 -0.109375 -0.34375q-0.75 0.7187538 -1.609375 1.0937538q-0.84375 0.359375 -1.8125 0.359375q-1.703125 0 -2.6875 -0.921875q-0.984375 -0.9375038 -0.984375 -2.3437538q0 -0.9375 0.4375 -1.671875q0.453125 -0.734375 1.25 -1.125q0.8125 -0.390625 2.34375 -0.6875q2.046875 -0.390625 2.84375 -0.71875l0 -0.296875q0 -0.84375 -0.421875 -1.203125q-0.421875 -0.359375 -1.578125 -0.359375q-0.78125 0 -1.21875 0.3125q-0.4375 0.3125 -0.703125 1.078125zm3.921875 2.375q-0.5625 0.1875 -1.78125 0.453125q-1.21875 0.25 -1.59375 0.5q-0.578125 0.40625 -0.578125 1.03125q0 0.625 0.453125 1.078125q0.46875 0.4375 1.171875 0.4375q0.796875 0 1.515625 -0.515625q0.53125 -0.40625 0.6875 -0.96875q0.125 -0.375 0.125 -1.4375l0 -0.578125zm4.3616943 -5.75l3.125 0l2.640625 7.859375l2.578125 -7.859375l3.03125 0l-3.90625 10.640629l-0.6875 1.9375q-0.390625 0.96875 -0.75 1.46875q-0.34375 0.515625 -0.796875 0.828125q-0.453125 0.328125 -1.109375 0.5q-0.65625 0.171875 -1.5 0.171875q-0.84375 0 -1.65625 -0.171875l-0.25 -2.296875q0.6875 0.140625 1.234375 0.140625q1.015625 0 1.5 -0.609375q0.5 -0.59375 0.765625 -1.515625l-4.21875 -11.093754zm19.65857 7.546875l2.90625 0.484375q-0.546875 1.609375 -1.765625 2.4531288q-1.21875 0.828125 -3.03125 0.828125q-2.890625 0 -4.28125 -1.8906288q-1.09375 -1.5 -1.09375 -3.8125q0 -2.75 1.4375 -4.296875q1.4375 -1.5625 3.640625 -1.5625q2.46875 0 3.890625 1.640625q1.421875 1.625 1.359375 4.984375l-7.328125 0q0.03125 1.296875 0.703125 2.03125q0.6875 0.71875 1.703125 0.71875q0.6875 0 1.15625 -0.375q0.46875 -0.375 0.703125 -1.203125zm0.171875 -2.96875q-0.03125 -1.265625 -0.65625 -1.921875q-0.625 -0.671875 -1.53125 -0.671875q-0.953125 0 -1.578125 0.703125q-0.625 0.703125 -0.609375 1.890625l4.375 0zm8.080444 6.484379l-2.921875 0l0 -11.062504l2.71875 0l0 1.578125q0.703125 -1.125 1.25 -1.46875q0.5625 -0.359375 1.265625 -0.359375q1.0 0 1.9375 0.5625l-0.90625 2.546875q-0.75 -0.484375 -1.375 -0.484375q-0.625 0 -1.046875 0.34375q-0.421875 0.328125 -0.671875 1.21875q-0.25 0.890625 -0.25 3.703125l0 3.4218788zm15.565674 0l-2.921875 0l0 -5.640629q0 -1.796875 -0.1875 -2.3125q-0.1875 -0.53125 -0.609375 -0.828125q-0.421875 -0.296875 -1.015625 -0.296875q-0.765625 0 -1.375 0.421875q-0.59375 0.421875 -0.828125 1.109375q-0.21875 0.6875 -0.21875 2.53125l0 5.015629l-2.921875 0l0 -11.062504l2.71875 0l0 1.625q1.4375 -1.875 3.640625 -1.875q0.96875 0 1.765625 0.359375q0.8125 0.34375 1.21875 0.890625q0.40625 0.53125 0.5625 1.21875q0.171875 0.6875 0.171875 1.96875l0 6.875004zm2.2937012 -5.687504q0 -1.453125 0.71875 -2.8125q0.71875 -1.375 2.03125 -2.09375q1.3125 -0.71875 2.9375 -0.71875q2.515625 0 4.109375 1.640625q1.609375 1.625 1.609375 4.109375q0 2.515625 -1.625 4.171875q-1.609375 1.6406288 -4.0625 1.6406288q-1.53125 0 -2.90625 -0.6875q-1.375 -0.6875038 -2.09375 -2.0156288q-0.71875 -1.328125 -0.71875 -3.234375zm3.0 0.15625q0 1.640625 0.78125 2.515625q0.78125 0.875 1.921875 0.875q1.140625 0 1.921875 -0.875q0.78125 -0.875 0.78125 -2.53125q0 -1.625 -0.78125 -2.5q-0.78125 -0.875 -1.921875 -0.875q-1.140625 0 -1.921875 0.875q-0.78125 0.875 -0.78125 2.515625zm13.496826 5.531254l-2.921875 0l0 -11.062504l2.71875 0l0 1.578125q0.703125 -1.125 1.25 -1.46875q0.5625 -0.359375 1.265625 -0.359375q1.0 0 1.9375 0.5625l-0.90625 2.546875q-0.75 -0.484375 -1.375 -0.484375q-0.625 0 -1.046875 0.34375q-0.421875 0.328125 -0.671875 1.21875q-0.25 0.890625 -0.25 3.703125l0 3.4218788zm5.284485 -11.062504l2.703125 0l0 1.515625q1.4375 -1.765625 3.4375 -1.765625q1.0625 0 1.84375 0.4375q0.78125 0.4375 1.28125 1.328125q0.734375 -0.890625 1.578125 -1.328125q0.84375 -0.4375 1.796875 -0.4375q1.21875 0 2.0625 0.5q0.84375 0.484375 1.265625 1.453125q0.296875 0.703125 0.296875 2.28125l0 7.078129l-2.921875 0l0 -6.328129q0 -1.640625 -0.3125 -2.125q-0.40625 -0.625 -1.25 -0.625q-0.609375 0 -1.15625 0.375q-0.53125 0.375 -0.78125 1.109375q-0.234375 0.71875 -0.234375 2.28125l0 5.312504l-2.921875 0l0 -6.062504q0 -1.609375 -0.15625 -2.078125q-0.15625 -0.46875 -0.484375 -0.703125q-0.328125 -0.234375 -0.890625 -0.234375q-0.671875 0 -1.21875 0.375q-0.546875 0.359375 -0.78125 1.046875q-0.234375 0.6875 -0.234375 2.28125l0 5.375004l-2.921875 0l0 -11.062504z" fill-rule="nonzero"/><path fill="#000000" d="m456.0383 88.208664l-2.96875 -9.671875l1.703125 0l1.53125 5.578125l0.578125 2.078125q0.046875 -0.15625 0.5 -2.0l1.546875 -5.65625l1.6875 0l1.4375 5.609375l0.484375 1.84375l0.5625 -1.859375l1.65625 -5.59375l1.59375 0l-3.03125 9.671875l-1.703125 0l-1.53125 -5.796875l-0.375 -1.640625l-1.953125 7.4375l-1.71875 0zm18.316711 -3.109375l1.6875 0.203125q-0.40625 1.484375 -1.484375 2.3125q-1.078125 0.8125 -2.765625 0.8125q-2.125 0 -3.375 -1.296875q-1.234375 -1.3125 -1.234375 -3.671875q0 -2.453125 1.25 -3.796875q1.265625 -1.34375 3.265625 -1.34375q1.9375 0 3.15625 1.328125q1.234375 1.3125 1.234375 3.703125q0 0.15625 0 0.4375l-7.21875 0q0.09375 1.59375 0.90625 2.453125q0.8125 0.84375 2.015625 0.84375q0.90625 0 1.546875 -0.46875q0.640625 -0.484375 1.015625 -1.515625zm-5.390625 -2.65625l5.40625 0q-0.109375 -1.21875 -0.625 -1.828125q-0.78125 -0.953125 -2.03125 -0.953125q-1.125 0 -1.90625 0.765625q-0.765625 0.75 -0.84375 2.015625zm9.141327 -5.703125l0 -1.890625l1.640625 0l0 1.890625l-1.640625 0zm0 11.46875l0 -9.671875l1.640625 0l0 9.671875l-1.640625 0zm3.8323364 0.796875l1.59375 0.234375q0.109375 0.75 0.5625 1.078125q0.609375 0.453125 1.671875 0.453125q1.140625 0 1.75 -0.453125q0.625 -0.453125 0.84375 -1.265625q0.125 -0.5 0.109375 -2.109375q-1.0625 1.265625 -2.671875 1.265625q-2.0 0 -3.09375 -1.4375q-1.09375 -1.4375 -1.09375 -3.453125q0 -1.390625 0.5 -2.5625q0.515625 -1.171875 1.453125 -1.796875q0.953125 -0.640625 2.25 -0.640625q1.703125 0 2.8125 1.375l0 -1.15625l1.515625 0l0 8.359375q0 2.265625 -0.46875 3.203125q-0.453125 0.9375 -1.453125 1.484375q-0.984375 0.546875 -2.453125 0.546875q-1.71875 0 -2.796875 -0.78125q-1.0625 -0.765625 -1.03125 -2.34375zm1.359375 -5.8125q0 1.90625 0.75 2.78125q0.765625 0.875 1.90625 0.875q1.125 0 1.890625 -0.859375q0.765625 -0.875 0.765625 -2.734375q0 -1.78125 -0.796875 -2.671875q-0.78125 -0.90625 -1.890625 -0.90625q-1.09375 0 -1.859375 0.890625q-0.765625 0.875 -0.765625 2.625zm9.328827 5.015625l0 -13.359375l1.640625 0l0 4.796875q1.140625 -1.328125 2.890625 -1.328125q1.078125 0 1.859375 0.421875q0.796875 0.421875 1.140625 1.171875q0.34375 0.75 0.34375 2.171875l0 6.125l-1.640625 0l0 -6.125q0 -1.234375 -0.53125 -1.796875q-0.53125 -0.5625 -1.515625 -0.5625q-0.71875 0 -1.359375 0.390625q-0.640625 0.375 -0.921875 1.015625q-0.265625 0.640625 -0.265625 1.78125l0 5.296875l-1.640625 0zm13.953827 -1.46875l0.234375 1.453125q-0.6875 0.140625 -1.234375 0.140625q-0.890625 0 -1.390625 -0.28125q-0.484375 -0.28125 -0.6875 -0.734375q-0.203125 -0.46875 -0.203125 -1.9375l0 -5.578125l-1.203125 0l0 -1.265625l1.203125 0l0 -2.390625l1.625 -0.984375l0 3.375l1.65625 0l0 1.265625l-1.65625 0l0 5.671875q0 0.6875 0.078125 0.890625q0.09375 0.203125 0.28125 0.328125q0.203125 0.109375 0.578125 0.109375q0.265625 0 0.71875 -0.0625z" fill-rule="nonzero"/><path fill="#f6b26b" d="m362.3648 270.73596l235.24408 0l0 47.338593l-235.24408 0z" fill-rule="evenodd"/><path stroke="#595959" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m362.3648 270.73596l235.24408 0l0 47.338593l-235.24408 0z" fill-rule="evenodd"/><path fill="#000000" d="m375.95526 279.82275l2.71875 0l0 1.625q0.53125 -0.828125 1.4375 -1.34375q0.90625 -0.53125 2.015625 -0.53125q1.921875 0 3.265625 1.515625q1.34375 1.5 1.34375 4.203125q0 2.765625 -1.359375 4.3125q-1.34375 1.53125 -3.265625 1.53125q-0.921875 0 -1.671875 -0.359375q-0.734375 -0.375 -1.5625 -1.25l0 5.5625l-2.921875 0l0 -15.265625zm2.890625 5.34375q0 1.859375 0.734375 2.75q0.75 0.890625 1.8125 0.890625q1.015625 0 1.6875 -0.8125q0.6875 -0.8125 0.6875 -2.6875q0 -1.734375 -0.703125 -2.578125q-0.703125 -0.84375 -1.734375 -0.84375q-1.0625 0 -1.78125 0.828125q-0.703125 0.828125 -0.703125 2.453125zm9.543701 0.03125q0 -1.453125 0.71875 -2.8125q0.71875 -1.375 2.03125 -2.09375q1.3125 -0.71875 2.9375 -0.71875q2.515625 0 4.109375 1.640625q1.609375 1.625 1.609375 4.109375q0 2.515625 -1.625 4.171875q-1.609375 1.640625 -4.0625 1.640625q-1.53125 0 -2.90625 -0.6875q-1.375 -0.6875 -2.09375 -2.015625q-0.71875 -1.328125 -0.71875 -3.234375zm3.0 0.15625q0 1.640625 0.78125 2.515625q0.78125 0.875 1.921875 0.875q1.140625 0 1.921875 -0.875q0.78125 -0.875 0.78125 -2.53125q0 -1.625 -0.78125 -2.5q-0.78125 -0.875 -1.921875 -0.875q-1.140625 0 -1.921875 0.875q-0.78125 0.875 -0.78125 2.515625zm9.668701 2.375l2.9375 -0.453125q0.1875 0.859375 0.75 1.3125q0.578125 0.4375 1.609375 0.4375q1.140625 0 1.71875 -0.421875q0.375 -0.296875 0.375 -0.78125q0 -0.328125 -0.203125 -0.546875q-0.21875 -0.21875 -0.984375 -0.390625q-3.53125 -0.78125 -4.484375 -1.421875q-1.3125 -0.90625 -1.3125 -2.5q0 -1.4375 1.125 -2.40625q1.140625 -0.984375 3.53125 -0.984375q2.265625 0 3.375 0.75q1.109375 0.734375 1.515625 2.171875l-2.75 0.515625q-0.1875 -0.640625 -0.6875 -0.984375q-0.484375 -0.34375 -1.40625 -0.34375q-1.15625 0 -1.65625 0.3125q-0.328125 0.234375 -0.328125 0.59375q0 0.3125 0.296875 0.53125q0.390625 0.296875 2.71875 0.828125q2.34375 0.53125 3.28125 1.296875q0.90625 0.78125 0.90625 2.1875q0 1.515625 -1.265625 2.609375q-1.265625 1.09375 -3.765625 1.09375q-2.25 0 -3.578125 -0.90625q-1.3125 -0.921875 -1.71875 -2.5zm17.97107 -7.90625l0 2.328125l-2.0 0l0 4.46875q0 1.34375 0.046875 1.578125q0.0625 0.21875 0.265625 0.375q0.203125 0.140625 0.5 0.140625q0.40625 0 1.171875 -0.28125l0.25 2.265625q-1.015625 0.4375 -2.3125 0.4375q-0.796875 0 -1.4375 -0.265625q-0.625 -0.265625 -0.921875 -0.6875q-0.296875 -0.421875 -0.40625 -1.140625q-0.09375 -0.515625 -0.09375 -2.0625l0 -4.828125l-1.34375 0l0 -2.328125l1.34375 0l0 -2.1875l2.9375 -1.71875l0 3.90625l2.0 0zm0.2899475 15.28125l0 -1.890625l12.171875 0l0 1.890625l-12.171875 0zm15.783569 -11.90625l-2.65625 -0.484375q0.453125 -1.59375 1.546875 -2.359375q1.09375 -0.78125 3.25 -0.78125q1.953125 0 2.90625 0.46875q0.96875 0.453125 1.359375 1.171875q0.390625 0.71875 0.390625 2.625l-0.03125 3.40625q0 1.46875 0.125 2.15625q0.140625 0.6875 0.53125 1.484375l-2.890625 0q-0.109375 -0.296875 -0.28125 -0.859375q-0.078125 -0.265625 -0.109375 -0.34375q-0.75 0.71875 -1.609375 1.09375q-0.84375 0.359375 -1.8125 0.359375q-1.703125 0 -2.6875 -0.921875q-0.984375 -0.9375 -0.984375 -2.34375q0 -0.9375 0.4375 -1.671875q0.453125 -0.734375 1.25 -1.125q0.8125 -0.390625 2.34375 -0.6875q2.046875 -0.390625 2.84375 -0.71875l0 -0.296875q0 -0.84375 -0.421875 -1.203125q-0.421875 -0.359375 -1.578125 -0.359375q-0.78125 0 -1.21875 0.3125q-0.4375 0.3125 -0.703125 1.078125zm3.921875 2.375q-0.5625 0.1875 -1.78125 0.453125q-1.21875 0.25 -1.59375 0.5q-0.578125 0.40625 -0.578125 1.03125q0 0.625 0.453125 1.078125q0.46875 0.4375 1.171875 0.4375q0.796875 0 1.515625 -0.515625q0.53125 -0.40625 0.6875 -0.96875q0.125 -0.375 0.125 -1.4375l0 -0.578125zm10.830444 -5.75l0 2.328125l-2.0 0l0 4.46875q0 1.34375 0.046875 1.578125q0.0625 0.21875 0.265625 0.375q0.203125 0.140625 0.5 0.140625q0.40625 0 1.171875 -0.28125l0.25 2.265625q-1.015625 0.4375 -2.3125 0.4375q-0.796875 0 -1.4375 -0.265625q-0.625 -0.265625 -0.921875 -0.6875q-0.296875 -0.421875 -0.40625 -1.140625q-0.09375 -0.515625 -0.09375 -2.0625l0 -4.828125l-1.34375 0l0 -2.328125l1.34375 0l0 -2.1875l2.9375 -1.71875l0 3.90625l2.0 0zm7.102417 0l0 2.328125l-2.0 0l0 4.46875q0 1.34375 0.046875 1.578125q0.0625 0.21875 0.265625 0.375q0.203125 0.140625 0.5 0.140625q0.40625 0 1.171875 -0.28125l0.25 2.265625q-1.015625 0.4375 -2.3125 0.4375q-0.796875 0 -1.4375 -0.265625q-0.625 -0.265625 -0.921875 -0.6875q-0.296875 -0.421875 -0.40625 -1.140625q-0.09375 -0.515625 -0.09375 -2.0625l0 -4.828125l-1.34375 0l0 -2.328125l1.34375 0l0 -2.1875l2.9375 -1.71875l0 3.90625l2.0 0zm12.0868225 11.0625l-2.921875 0l0 -5.640625q0 -1.796875 -0.1875 -2.3125q-0.1875 -0.53125 -0.609375 -0.828125q-0.421875 -0.296875 -1.015625 -0.296875q-0.765625 0 -1.375 0.421875q-0.59375 0.421875 -0.828125 1.109375q-0.21875 0.6875 -0.21875 2.53125l0 5.015625l-2.921875 0l0 -11.0625l2.71875 0l0 1.625q1.4375 -1.875 3.640625 -1.875q0.96875 0 1.765625 0.359375q0.8125 0.34375 1.21875 0.890625q0.40625 0.53125 0.5625 1.21875q0.171875 0.6875 0.171875 1.96875l0 6.875zm1.2312012 4.21875l0 -1.890625l12.171875 0l0 1.890625l-12.171875 0zm13.596069 -4.21875l0 -15.265625l2.921875 0l0 15.265625l-2.921875 0zm8.113129 -7.6875l-2.65625 -0.484375q0.453125 -1.59375 1.546875 -2.359375q1.09375 -0.78125 3.25 -0.78125q1.953125 0 2.90625 0.46875q0.96875 0.453125 1.359375 1.171875q0.390625 0.71875 0.390625 2.625l-0.03125 3.40625q0 1.46875 0.125 2.15625q0.140625 0.6875 0.53125 1.484375l-2.890625 0q-0.109375 -0.296875 -0.28125 -0.859375q-0.078125 -0.265625 -0.109375 -0.34375q-0.75 0.71875 -1.609375 1.09375q-0.84375 0.359375 -1.8125 0.359375q-1.703125 0 -2.6875 -0.921875q-0.984375 -0.9375 -0.984375 -2.34375q0 -0.9375 0.4375 -1.671875q0.453125 -0.734375 1.25 -1.125q0.8125 -0.390625 2.34375 -0.6875q2.046875 -0.390625 2.84375 -0.71875l0 -0.296875q0 -0.84375 -0.421875 -1.203125q-0.421875 -0.359375 -1.578125 -0.359375q-0.78125 0 -1.21875 0.3125q-0.4375 0.3125 -0.703125 1.078125zm3.921875 2.375q-0.5625 0.1875 -1.78125 0.453125q-1.21875 0.25 -1.59375 0.5q-0.578125 0.40625 -0.578125 1.03125q0 0.625 0.453125 1.078125q0.46875 0.4375 1.171875 0.4375q0.796875 0 1.515625 -0.515625q0.53125 -0.40625 0.6875 -0.96875q0.125 -0.375 0.125 -1.4375l0 -0.578125zm4.3616943 -5.75l3.125 0l2.640625 7.859375l2.578125 -7.859375l3.03125 0l-3.90625 10.640625l-0.6875 1.9375q-0.390625 0.96875 -0.75 1.46875q-0.34375 0.515625 -0.796875 0.828125q-0.453125 0.328125 -1.109375 0.5q-0.65625 0.171875 -1.5 0.171875q-0.84375 0 -1.65625 -0.171875l-0.25 -2.296875q0.6875 0.140625 1.234375 0.140625q1.015625 0 1.5 -0.609375q0.5 -0.59375 0.765625 -1.515625l-4.21875 -11.09375zm19.6586 7.546875l2.90625 0.484375q-0.546875 1.609375 -1.765625 2.453125q-1.21875 0.828125 -3.03125 0.828125q-2.890625 0 -4.28125 -1.890625q-1.09375 -1.5 -1.09375 -3.8125q0 -2.75 1.4375 -4.296875q1.4375 -1.5625 3.640625 -1.5625q2.46875 0 3.890625 1.640625q1.421875 1.625 1.359375 4.984375l-7.328125 0q0.03125 1.296875 0.703125 2.03125q0.6875 0.71875 1.703125 0.71875q0.6875 0 1.15625 -0.375q0.46875 -0.375 0.703125 -1.203125zm0.171875 -2.96875q-0.03125 -1.265625 -0.65625 -1.921875q-0.625 -0.671875 -1.53125 -0.671875q-0.953125 0 -1.578125 0.703125q-0.625 0.703125 -0.609375 1.890625l4.375 0zm8.080444 6.484375l-2.921875 0l0 -11.0625l2.71875 0l0 1.578125q0.703125 -1.125 1.25 -1.46875q0.5625 -0.359375 1.265625 -0.359375q1.0 0 1.9375 0.5625l-0.90625 2.546875q-0.75 -0.484375 -1.375 -0.484375q-0.625 0 -1.046875 0.34375q-0.421875 0.328125 -0.671875 1.21875q-0.25 0.890625 -0.25 3.703125l0 3.421875zm15.565674 0l-2.921875 0l0 -5.640625q0 -1.796875 -0.1875 -2.3125q-0.1875 -0.53125 -0.609375 -0.828125q-0.421875 -0.296875 -1.015625 -0.296875q-0.765625 0 -1.375 0.421875q-0.59375 0.421875 -0.828125 1.109375q-0.21875 0.6875 -0.21875 2.53125l0 5.015625l-2.921875 0l0 -11.0625l2.71875 0l0 1.625q1.4375 -1.875 3.640625 -1.875q0.96875 0 1.765625 0.359375q0.8125 0.34375 1.21875 0.890625q0.40625 0.53125 0.5625 1.21875q0.171875 0.6875 0.171875 1.96875l0 6.875zm2.2937012 -5.6875q0 -1.453125 0.71875 -2.8125q0.71875 -1.375 2.03125 -2.09375q1.3125 -0.71875 2.9375 -0.71875q2.515625 0 4.109375 1.640625q1.609375 1.625 1.609375 4.109375q0 2.515625 -1.625 4.171875q-1.609375 1.640625 -4.0625 1.640625q-1.53125 0 -2.90625 -0.6875q-1.375 -0.6875 -2.09375 -2.015625q-0.71875 -1.328125 -0.71875 -3.234375zm3.0 0.15625q0 1.640625 0.78125 2.515625q0.78125 0.875 1.921875 0.875q1.140625 0 1.921875 -0.875q0.78125 -0.875 0.78125 -2.53125q0 -1.625 -0.78125 -2.5q-0.78125 -0.875 -1.921875 -0.875q-1.140625 0 -1.921875 0.875q-0.78125 0.875 -0.78125 2.515625zm13.496826 5.53125l-2.921875 0l0 -11.0625l2.71875 0l0 1.578125q0.703125 -1.125 1.25 -1.46875q0.5625 -0.359375 1.265625 -0.359375q1.0 0 1.9375 0.5625l-0.90625 2.546875q-0.75 -0.484375 -1.375 -0.484375q-0.625 0 -1.046875 0.34375q-0.421875 0.328125 -0.671875 1.21875q-0.25 0.890625 -0.25 3.703125l0 3.421875zm5.284424 -11.0625l2.703125 0l0 1.515625q1.4375 -1.765625 3.4375 -1.765625q1.0625 0 1.84375 0.4375q0.78125 0.4375 1.28125 1.328125q0.734375 -0.890625 1.578125 -1.328125q0.84375 -0.4375 1.796875 -0.4375q1.21875 0 2.0625 0.5q0.84375 0.484375 1.265625 1.453125q0.296875 0.703125 0.296875 2.28125l0 7.078125l-2.921875 0l0 -6.328125q0 -1.640625 -0.3125 -2.125q-0.40625 -0.625 -1.25 -0.625q-0.609375 0 -1.15625 0.375q-0.53125 0.375 -0.78125 1.109375q-0.234375 0.71875 -0.234375 2.28125l0 5.3125l-2.921875 0l0 -6.0625q0 -1.609375 -0.15625 -2.078125q-0.15625 -0.46875 -0.484375 -0.703125q-0.328125 -0.234375 -0.890625 -0.234375q-0.671875 0 -1.21875 0.375q-0.546875 0.359375 -0.78125 1.046875q-0.234375 0.6875 -0.234375 2.28125l0 5.375l-2.921875 0l0 -11.0625z" fill-rule="nonzero"/><path fill="#000000" d="m456.0384 314.32526l-2.96875 -9.671875l1.703125 0l1.53125 5.578125l0.578125 2.078125q0.046875 -0.15625 0.5 -2.0l1.546875 -5.65625l1.6875 0l1.4375 5.609375l0.484375 1.84375l0.5625 -1.859375l1.65625 -5.59375l1.59375 0l-3.03125 9.671875l-1.703125 0l-1.53125 -5.796875l-0.375 -1.640625l-1.953125 7.4375l-1.71875 0zm18.31668 -3.109375l1.6875 0.203125q-0.40625 1.484375 -1.484375 2.3125q-1.078125 0.8125 -2.765625 0.8125q-2.125 0 -3.375 -1.296875q-1.234375 -1.3125 -1.234375 -3.671875q0 -2.453125 1.25 -3.796875q1.265625 -1.34375 3.265625 -1.34375q1.9375 0 3.15625 1.328125q1.234375 1.3125 1.234375 3.703125q0 0.15625 0 0.4375l-7.21875 0q0.09375 1.59375 0.90625 2.453125q0.8125 0.84375 2.015625 0.84375q0.90625 0 1.546875 -0.46875q0.640625 -0.484375 1.015625 -1.515625zm-5.390625 -2.65625l5.40625 0q-0.109375 -1.21875 -0.625 -1.828125q-0.78125 -0.953125 -2.03125 -0.953125q-1.125 0 -1.90625 0.765625q-0.765625 0.75 -0.84375 2.015625zm9.141357 -5.703125l0 -1.890625l1.640625 0l0 1.890625l-1.640625 0zm0 11.46875l0 -9.671875l1.640625 0l0 9.671875l-1.640625 0zm3.832306 0.796875l1.59375 0.234375q0.109375 0.75 0.5625 1.078125q0.609375 0.453125 1.671875 0.453125q1.140625 0 1.75 -0.453125q0.625 -0.453125 0.84375 -1.265625q0.125 -0.5 0.109375 -2.109375q-1.0625 1.265625 -2.671875 1.265625q-2.0 0 -3.09375 -1.4375q-1.09375 -1.4375 -1.09375 -3.453125q0 -1.390625 0.5 -2.5625q0.515625 -1.171875 1.453125 -1.796875q0.953125 -0.640625 2.25 -0.640625q1.703125 0 2.8125 1.375l0 -1.15625l1.515625 0l0 8.359375q0 2.265625 -0.46875 3.203125q-0.453125 0.9375 -1.453125 1.484375q-0.984375 0.546875 -2.453125 0.546875q-1.71875 0 -2.796875 -0.78125q-1.0625 -0.765625 -1.03125 -2.34375zm1.359375 -5.8125q0 1.90625 0.75 2.78125q0.765625 0.875 1.90625 0.875q1.125 0 1.890625 -0.859375q0.765625 -0.875 0.765625 -2.734375q0 -1.78125 -0.796875 -2.671875q-0.78125 -0.90625 -1.890625 -0.90625q-1.09375 0 -1.859375 0.890625q-0.765625 0.875 -0.765625 2.625zm9.328827 5.015625l0 -13.359375l1.640625 0l0 4.796875q1.140625 -1.328125 2.890625 -1.328125q1.078125 0 1.859375 0.421875q0.796875 0.421875 1.140625 1.171875q0.34375 0.75 0.34375 2.171875l0 6.125l-1.640625 0l0 -6.125q0 -1.234375 -0.53125 -1.796875q-0.53125 -0.5625 -1.515625 -0.5625q-0.71875 0 -1.359375 0.390625q-0.640625 0.375 -0.921875 1.015625q-0.265625 0.640625 -0.265625 1.78125l0 5.296875l-1.640625 0zm13.953857 -1.46875l0.234375 1.453125q-0.6875 0.140625 -1.234375 0.140625q-0.890625 0 -1.390625 -0.28125q-0.484375 -0.28125 -0.6875 -0.734375q-0.203125 -0.46875 -0.203125 -1.9375l0 -5.578125l-1.203125 0l0 -1.265625l1.203125 0l0 -2.390625l1.625 -0.984375l0 3.375l1.65625 0l0 1.265625l-1.65625 0l0 5.671875q0 0.6875 0.078125 0.890625q0.09375 0.203125 0.28125 0.328125q0.203125 0.109375 0.578125 0.109375q0.265625 0 0.71875 -0.0625z" fill-rule="nonzero"/><path fill="#fff2cc" d="m336.61154 467.13776l286.77164 0l0 47.338623l-286.77164 0z" fill-rule="evenodd"/><path fill="#595959" d="m391.4173 498.00705l0 -14.3125l1.890625 0l0 12.625l7.046875 0l0 1.6875l-8.9375 0zm10.935547 0l0 -14.3125l1.75 0l0 14.3125l-1.75 0zm11.255859 -1.28125q-0.984375 0.828125 -1.890625 1.171875q-0.90625 0.34375 -1.9375 0.34375q-1.703125 0 -2.625 -0.828125q-0.921875 -0.84375 -0.921875 -2.140625q0 -0.765625 0.34375 -1.390625q0.359375 -0.625 0.921875 -1.0q0.5625 -0.390625 1.265625 -0.59375q0.515625 -0.125 1.5625 -0.265625q2.125 -0.25 3.125 -0.59375q0.015625 -0.359375 0.015625 -0.46875q0 -1.0625 -0.5 -1.515625q-0.671875 -0.59375 -2.0 -0.59375q-1.25 0 -1.84375 0.4375q-0.578125 0.4375 -0.859375 1.546875l-1.71875 -0.234375q0.234375 -1.109375 0.765625 -1.78125q0.53125 -0.6875 1.546875 -1.046875q1.015625 -0.375 2.359375 -0.375q1.328125 0 2.15625 0.3125q0.828125 0.3125 1.21875 0.796875q0.390625 0.46875 0.546875 1.1875q0.09375 0.453125 0.09375 1.625l0 2.34375q0 2.453125 0.109375 3.109375q0.109375 0.640625 0.453125 1.234375l-1.84375 0q-0.265625 -0.546875 -0.34375 -1.28125zm-0.15625 -3.921875q-0.953125 0.390625 -2.875 0.65625q-1.078125 0.15625 -1.53125 0.359375q-0.4375 0.1875 -0.6875 0.5625q-0.25 0.375 -0.25 0.84375q0 0.703125 0.53125 1.171875q0.53125 0.46875 1.5625 0.46875q1.015625 0 1.796875 -0.4375q0.796875 -0.453125 1.171875 -1.21875q0.28125 -0.609375 0.28125 -1.765625l0 -0.640625zm4.498047 5.203125l0 -10.375l1.578125 0l0 1.453125q0.484375 -0.75 1.296875 -1.21875q0.8125 -0.46875 1.84375 -0.46875q1.15625 0 1.890625 0.484375q0.734375 0.46875 1.046875 1.328125q1.234375 -1.8125 3.203125 -1.8125q1.546875 0 2.375 0.859375q0.828125 0.859375 0.828125 2.625l0 7.125l-1.75 0l0 -6.53125q0 -1.0625 -0.171875 -1.515625q-0.171875 -0.46875 -0.625 -0.75q-0.4375 -0.28125 -1.046875 -0.28125q-1.09375 0 -1.828125 0.734375q-0.71875 0.71875 -0.71875 2.3125l0 6.03125l-1.75 0l0 -6.734375q0 -1.171875 -0.4375 -1.75q-0.421875 -0.59375 -1.40625 -0.59375q-0.734375 0 -1.375 0.390625q-0.625 0.390625 -0.90625 1.140625q-0.28125 0.75 -0.28125 2.171875l0 5.375l-1.765625 0zm23.441406 -1.28125q-0.984375 0.828125 -1.890625 1.171875q-0.90625 0.34375 -1.9375 0.34375q-1.703125 0 -2.625 -0.828125q-0.921875 -0.84375 -0.921875 -2.140625q0 -0.765625 0.34375 -1.390625q0.359375 -0.625 0.921875 -1.0q0.5625 -0.390625 1.265625 -0.59375q0.515625 -0.125 1.5625 -0.265625q2.125 -0.25 3.125 -0.59375q0.015625 -0.359375 0.015625 -0.46875q0 -1.0625 -0.5 -1.515625q-0.671875 -0.59375 -2.0 -0.59375q-1.25 0 -1.84375 0.4375q-0.578125 0.4375 -0.859375 1.546875l-1.71875 -0.234375q0.234375 -1.109375 0.765625 -1.78125q0.53125 -0.6875 1.546875 -1.046875q1.015625 -0.375 2.359375 -0.375q1.328125 0 2.15625 0.3125q0.828125 0.3125 1.21875 0.796875q0.390625 0.46875 0.546875 1.1875q0.09375 0.453125 0.09375 1.625l0 2.34375q0 2.453125 0.109375 3.109375q0.109375 0.640625 0.453125 1.234375l-1.84375 0q-0.265625 -0.546875 -0.34375 -1.28125zm-0.15625 -3.921875q-0.953125 0.390625 -2.875 0.65625q-1.078125 0.15625 -1.53125 0.359375q-0.4375 0.1875 -0.6875 0.5625q-0.25 0.375 -0.25 0.84375q0 0.703125 0.53125 1.171875q0.53125 0.46875 1.5625 0.46875q1.015625 0 1.796875 -0.4375q0.796875 -0.453125 1.171875 -1.21875q0.28125 -0.609375 0.28125 -1.765625l0 -0.640625zm4.732422 5.203125l0 -14.3125l4.921875 0q1.671875 0 2.5625 0.203125q1.21875 0.28125 2.09375 1.015625q1.125 0.96875 1.6875 2.453125q0.5625 1.484375 0.5625 3.40625q0 1.625 -0.375 2.890625q-0.375 1.25 -0.984375 2.078125q-0.59375 0.828125 -1.296875 1.3125q-0.703125 0.46875 -1.703125 0.71875q-1.0 0.234375 -2.3125 0.234375l-5.15625 0zm1.890625 -1.6875l3.0625 0q1.40625 0 2.203125 -0.265625q0.8125 -0.265625 1.296875 -0.75q0.671875 -0.671875 1.046875 -1.796875q0.375 -1.140625 0.375 -2.765625q0 -2.25 -0.734375 -3.453125q-0.734375 -1.203125 -1.796875 -1.609375q-0.75 -0.296875 -2.4375 -0.296875l-3.015625 0l0 10.9375zm19.427734 -1.65625l1.8125 0.234375q-0.421875 1.578125 -1.59375 2.46875q-1.15625 0.875 -2.96875 0.875q-2.265625 0 -3.609375 -1.390625q-1.328125 -1.40625 -1.328125 -3.9375q0 -2.625 1.34375 -4.0625q1.34375 -1.453125 3.5 -1.453125q2.078125 0 3.390625 1.421875q1.328125 1.40625 1.328125 3.984375q0 0.15625 -0.015625 0.46875l-7.734375 0q0.09375 1.703125 0.96875 2.609375q0.875 0.90625 2.171875 0.90625q0.96875 0 1.640625 -0.5q0.6875 -0.515625 1.09375 -1.625zm-5.78125 -2.84375l5.796875 0q-0.109375 -1.296875 -0.65625 -1.953125q-0.84375 -1.015625 -2.1875 -1.015625q-1.203125 0 -2.03125 0.8125q-0.828125 0.796875 -0.921875 2.15625zm16.576172 2.390625l1.71875 0.21875q-0.28125 1.796875 -1.453125 2.8125q-1.15625 1.0 -2.859375 1.0q-2.125 0 -3.421875 -1.390625q-1.296875 -1.390625 -1.296875 -3.984375q0 -1.6875 0.546875 -2.9375q0.5625 -1.265625 1.703125 -1.890625q1.140625 -0.640625 2.484375 -0.640625q1.6875 0 2.75 0.859375q1.078125 0.859375 1.390625 2.421875l-1.71875 0.265625q-0.234375 -1.046875 -0.859375 -1.5625q-0.625 -0.53125 -1.5 -0.53125q-1.328125 0 -2.15625 0.953125q-0.828125 0.953125 -0.828125 3.0q0 2.09375 0.796875 3.046875q0.796875 0.9375 2.09375 0.9375q1.03125 0 1.71875 -0.625q0.703125 -0.640625 0.890625 -1.953125zm2.578125 -1.390625q0 -2.875 1.59375 -4.265625q1.34375 -1.15625 3.265625 -1.15625q2.140625 0 3.484375 1.40625q1.359375 1.40625 1.359375 3.875q0 2.0 -0.59375 3.15625q-0.59375 1.140625 -1.75 1.78125q-1.140625 0.625 -2.5 0.625q-2.1875 0 -3.53125 -1.390625q-1.328125 -1.40625 -1.328125 -4.03125zm1.796875 0q0 2.0 0.859375 2.984375q0.875 0.984375 2.203125 0.984375q1.3125 0 2.171875 -0.984375q0.875 -1.0 0.875 -3.046875q0 -1.921875 -0.875 -2.90625q-0.875 -1.0 -2.171875 -1.0q-1.328125 0 -2.203125 1.0q-0.859375 0.984375 -0.859375 2.96875zm16.701172 5.1875l0 -1.3125q-0.984375 1.546875 -2.90625 1.546875q-1.234375 0 -2.28125 -0.6875q-1.03125 -0.6875 -1.609375 -1.90625q-0.5625 -1.21875 -0.5625 -2.8125q0 -1.5625 0.515625 -2.828125q0.515625 -1.265625 1.546875 -1.9375q1.046875 -0.671875 2.3125 -0.671875q0.9375 0 1.671875 0.40625q0.734375 0.390625 1.203125 1.015625l0 -5.125l1.734375 0l0 14.3125l-1.625 0zm-5.5625 -5.171875q0 1.984375 0.84375 2.96875q0.84375 0.984375 1.984375 0.984375q1.15625 0 1.953125 -0.9375q0.8125 -0.9375 0.8125 -2.875q0 -2.125 -0.828125 -3.125q-0.8125 -1.0 -2.015625 -1.0q-1.171875 0 -1.96875 0.96875q-0.78125 0.953125 -0.78125 3.015625zm17.060547 1.828125l1.8125 0.234375q-0.421875 1.578125 -1.59375 2.46875q-1.15625 0.875 -2.96875 0.875q-2.265625 0 -3.609375 -1.390625q-1.328125 -1.40625 -1.328125 -3.9375q0 -2.625 1.34375 -4.0625q1.34375 -1.453125 3.5 -1.453125q2.078125 0 3.390625 1.421875q1.328125 1.40625 1.328125 3.984375q0 0.15625 -0.015625 0.46875l-7.734375 0q0.09375 1.703125 0.96875 2.609375q0.875 0.90625 2.171875 0.90625q0.96875 0 1.640625 -0.5q0.6875 -0.515625 1.09375 -1.625zm-5.78125 -2.84375l5.796875 0q-0.109375 -1.296875 -0.65625 -1.953125q-0.84375 -1.015625 -2.1875 -1.015625q-1.203125 0 -2.03125 0.8125q-0.828125 0.796875 -0.921875 2.15625zm9.779297 6.1875l0 -10.375l1.578125 0l0 1.578125q0.609375 -1.109375 1.125 -1.453125q0.515625 -0.359375 1.125 -0.359375q0.890625 0 1.8125 0.5625l-0.609375 1.640625q-0.640625 -0.390625 -1.28125 -0.390625q-0.578125 0 -1.046875 0.359375q-0.453125 0.34375 -0.65625 0.953125q-0.28125 0.9375 -0.28125 2.046875l0 5.4375l-1.765625 0zm6.8320312 0l0 -14.3125l1.890625 0l0 12.625l7.046875 0l0 1.6875l-8.9375 0zm17.748047 -1.28125q-0.984375 0.828125 -1.890625 1.171875q-0.90625 0.34375 -1.9375 0.34375q-1.703125 0 -2.625 -0.828125q-0.921875 -0.84375 -0.921875 -2.140625q0 -0.765625 0.34375 -1.390625q0.359375 -0.625 0.921875 -1.0q0.5625 -0.390625 1.265625 -0.59375q0.515625 -0.125 1.5625 -0.265625q2.125 -0.25 3.125 -0.59375q0.015625 -0.359375 0.015625 -0.46875q0 -1.0625 -0.5 -1.515625q-0.671875 -0.59375 -2.0 -0.59375q-1.25 0 -1.84375 0.4375q-0.578125 0.4375 -0.859375 1.546875l-1.71875 -0.234375q0.234375 -1.109375 0.765625 -1.78125q0.53125 -0.6875 1.546875 -1.046875q1.015625 -0.375 2.359375 -0.375q1.328125 0 2.15625 0.3125q0.828125 0.3125 1.21875 0.796875q0.390625 0.46875 0.546875 1.1875q0.09375 0.453125 0.09375 1.625l0 2.34375q0 2.453125 0.109375 3.109375q0.109375 0.640625 0.453125 1.234375l-1.84375 0q-0.265625 -0.546875 -0.34375 -1.28125zm-0.15625 -3.921875q-0.953125 0.390625 -2.875 0.65625q-1.078125 0.15625 -1.53125 0.359375q-0.4375 0.1875 -0.6875 0.5625q-0.25 0.375 -0.25 0.84375q0 0.703125 0.53125 1.171875q0.53125 0.46875 1.5625 0.46875q1.015625 0 1.796875 -0.4375q0.796875 -0.453125 1.171875 -1.21875q0.28125 -0.609375 0.28125 -1.765625l0 -0.640625zm4.419922 9.203125l-0.1875 -1.65625q0.578125 0.15625 1.0 0.15625q0.59375 0 0.9375 -0.203125q0.359375 -0.1875 0.578125 -0.53125q0.171875 -0.265625 0.546875 -1.3125q0.046875 -0.15625 0.15625 -0.4375l-3.9375 -10.390625l1.890625 0l2.15625 6.015625q0.421875 1.140625 0.75 2.390625q0.3125 -1.203125 0.71875 -2.359375l2.21875 -6.046875l1.765625 0l-3.953125 10.546875q-0.625 1.71875 -0.984375 2.359375q-0.46875 0.875 -1.078125 1.265625q-0.59375 0.40625 -1.4375 0.40625q-0.515625 0 -1.140625 -0.203125zm17.1875 -7.34375l1.8125 0.234375q-0.421875 1.578125 -1.59375 2.46875q-1.15625 0.875 -2.96875 0.875q-2.265625 0 -3.609375 -1.390625q-1.328125 -1.40625 -1.328125 -3.9375q0 -2.625 1.34375 -4.0625q1.34375 -1.453125 3.5 -1.453125q2.078125 0 3.390625 1.421875q1.328125 1.40625 1.328125 3.984375q0 0.15625 -0.015625 0.46875l-7.734375 0q0.09375 1.703125 0.96875 2.609375q0.875 0.90625 2.171875 0.90625q0.96875 0 1.640625 -0.5q0.6875 -0.515625 1.09375 -1.625zm-5.78125 -2.84375l5.796875 0q-0.109375 -1.296875 -0.65625 -1.953125q-0.84375 -1.015625 -2.1875 -1.015625q-1.203125 0 -2.03125 0.8125q-0.828125 0.796875 -0.921875 2.15625zm9.779297 6.1875l0 -10.375l1.578125 0l0 1.578125q0.609375 -1.109375 1.125 -1.453125q0.515625 -0.359375 1.125 -0.359375q0.890625 0 1.8125 0.5625l-0.609375 1.640625q-0.640625 -0.390625 -1.28125 -0.390625q-0.578125 0 -1.046875 0.359375q-0.453125 0.34375 -0.65625 0.953125q-0.28125 0.9375 -0.28125 2.046875l0 5.4375l-1.765625 0z" fill-rule="nonzero"/></g></svg>
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment