From d3c075614d4de07c21c5c386d9f0642adf1ed642 Mon Sep 17 00:00:00 2001 From: nroope Date: Fri, 19 Dec 2025 16:42:06 +0100 Subject: [PATCH 01/20] Update README.md --- README.md | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c3c6eaf..d5d245b 100644 --- a/README.md +++ b/README.md @@ -3,8 +3,11 @@ ## Prune and Quantize ML models PQuant is a library for training compressed machine learning models, developed at CERN as part of the [Next Generation Triggers](https://nextgentriggers.web.cern.ch/t13/) project. -Installation via pip: ```pip install pquant-ml```. -To run the code, [HGQ2](https://github.com/calad0i/HGQ2) is also needed. +Installation via pip: ```pip install pquant-ml```. + +With TensorFlow ```pip install pquant-ml[tensorflow]```. + +With PyTorch ```pip install pquant-ml[torch]```. PQuant replaces the layers and activations it finds with a Compressed (in the case of layers) or Quantized (in the case of activations) variant. These automatically handle the quantization of the weights, biases and activations, and the pruning of the weights. Both PyTorch and TensorFlow models are supported. @@ -47,6 +50,12 @@ For detailed documentation check this page: [PQuantML documentation](https://pqu ### Authors - Roope Niemi (CERN) - Anastasiia Petrovych (CERN) + - Arghya Das (Purdue University) + - Enrico Lupi (CERN) - Chang Sun (Caltech) + - Dimitrios Danopoulos (CERN) + - Marlon Joshua Helbing + - Mia Liu (Purdue University) - Michael Kagan (SLAC National Accelerator Laboratory) - Vladimir Loncar (CERN) + - Maurizio Pierini (CERN) From b31bf2c99e023c759b47f5712fed57b49bbbfc58 Mon Sep 17 00:00:00 2001 From: Anastasiia Date: Mon, 12 Jan 2026 11:47:48 +0100 Subject: [PATCH 02/20] Add removed property at training model (#22) --- src/pquant/data_models/training_model.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pquant/data_models/training_model.py b/src/pquant/data_models/training_model.py index 78d0c37..228c1f6 100644 --- a/src/pquant/data_models/training_model.py +++ b/src/pquant/data_models/training_model.py @@ -11,3 +11,5 @@ class BaseTrainingModel(BaseModel): rewind: str = Field(default="never") rounds: int = Field(default=1) save_weights_epoch: int = Field(default=-1) + pruning_first: bool = Field(default=False) + \ No newline at end of file From b37020941b96f58be5089531a5e17cb731242fbe Mon Sep 17 00:00:00 2001 From: Anastasiia Date: Mon, 12 Jan 2026 15:44:08 +0100 Subject: [PATCH 03/20] Modified 'post_round' function condition (#23) --- src/pquant/core/keras/layers.py | 5 +++-- src/pquant/core/torch/layers.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/pquant/core/keras/layers.py b/src/pquant/core/keras/layers.py index b615cbe..823ff49 100644 --- a/src/pquant/core/keras/layers.py +++ b/src/pquant/core/keras/layers.py @@ -1347,11 +1347,12 @@ def call(self, x, training=None): def call_post_round_functions(model, rewind, rounds, r): + last_round = (r == rounds - 1) if rewind == "round": rewind_weights_functions(model) - elif rewind == "post-ticket-search" and r == rounds - 1: + elif rewind == "post-ticket-search" and last_round: rewind_weights_functions(model) - else: + elif not last_round: post_round_functions(model) diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index 6e67655..c13228f 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -1404,11 +1404,12 @@ def apply_final_compression(module): def call_post_round_functions(model, rewind, rounds, r): + last_round = (r == rounds - 1) if rewind == "round": rewind_weights_functions(model) - elif rewind == "post-ticket-search" and r == rounds - 1: + elif rewind == "post-ticket-search" and last_round: rewind_weights_functions(model) - elif r != rounds - 1: + elif not last_round: post_round_functions(model) From fae6df9dca8e1563cf570df328273e36446a9205 Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Tue, 3 Feb 2026 18:16:48 +0100 Subject: [PATCH 04/20] Add minor modifications to the core functionality --- examples/example_finetuning.ipynb | 2 +- examples/example_jet_tagging.ipynb | 612 +++++++++++++++++-- src/pquant/configs/config_ap.yaml | 2 +- src/pquant/configs/config_autosparse.yaml | 2 +- src/pquant/configs/config_cs.yaml | 2 +- src/pquant/configs/config_fitcompress.yaml | 2 +- src/pquant/configs/config_mdmm.yaml | 2 +- src/pquant/configs/config_pdp.yaml | 2 +- src/pquant/configs/config_wanda.yaml | 2 +- src/pquant/configs/finetuning.yaml | 5 +- src/pquant/core/torch/train.py | 2 +- src/pquant/data_models/quantization_model.py | 4 +- src/pquant/pruning_methods/cs.py | 6 +- src/pquant/pruning_methods/pdp.py | 8 +- tests/test_keras_compression_layers.py | 8 +- tests/test_torch_compression_layers.py | 8 +- 16 files changed, 599 insertions(+), 70 deletions(-) diff --git a/examples/example_finetuning.ipynb b/examples/example_finetuning.ipynb index e713e80..2961ae3 100644 --- a/examples/example_finetuning.ipynb +++ b/examples/example_finetuning.ipynb @@ -150,7 +150,7 @@ " enable_quantization: true\n", " hgq_gamma: 0.0003\n", " hgq_heterogeneous: true\n", - " layer_specific: []\n", + " layer_specific: {}\n", " use_high_granularity_quantization: false\n", " use_real_tanh: false\n", " use_symmetric_quantization: false\n", diff --git a/examples/example_jet_tagging.ipynb b/examples/example_jet_tagging.ipynb index 4d6396f..54e917f 100644 --- a/examples/example_jet_tagging.ipynb +++ b/examples/example_jet_tagging.ipynb @@ -2,40 +2,307 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "2ac0177a-0354-437b-b13a-947144dba15e", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Defaulting to user installation because normal site-packages is not writeable\n", + "Requirement already satisfied: scikit-learn in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (1.7.2)\n", + "Requirement already satisfied: numpy>=1.22.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from scikit-learn) (1.26.4)\n", + "Requirement already satisfied: scipy>=1.8.0 in /opt/conda/lib/python3.11/site-packages (from scikit-learn) (1.11.3)\n", + "Requirement already satisfied: joblib>=1.2.0 in /opt/conda/lib/python3.11/site-packages (from scikit-learn) (1.4.2)\n", + "Requirement already satisfied: threadpoolctl>=3.1.0 in /opt/conda/lib/python3.11/site-packages (from scikit-learn) (3.5.0)\n", + "Defaulting to user installation because normal site-packages is not writeable\n", + "Requirement already satisfied: pandas in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (2.3.3)\n", + "Requirement already satisfied: numpy>=1.23.2 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pandas) (1.26.4)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/conda/lib/python3.11/site-packages (from pandas) (2.9.0)\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/conda/lib/python3.11/site-packages (from pandas) (2024.2)\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/conda/lib/python3.11/site-packages (from pandas) (2024.2)\n", + "Requirement already satisfied: six>=1.5 in /opt/conda/lib/python3.11/site-packages (from python-dateutil>=2.8.2->pandas) (1.16.0)\n" + ] + } + ], "source": [ "!pip install scikit-learn\n", "!pip install pandas\n", - "!pip install da4ml\n", + "# !pip install da4ml\n", "# For da4ml, also required: !conda install conda-forge::verilator -y" ] }, + { + "cell_type": "code", + "execution_count": 2, + "id": "42b74c37-9a48-4ffb-8981-f27ad8ca52fc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Defaulting to user installation because normal site-packages is not writeable\n", + "Requirement already satisfied: keras in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (3.13.0)\n", + "Requirement already satisfied: absl-py in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras) (2.3.1)\n", + "Requirement already satisfied: numpy in /opt/conda/lib/python3.11/site-packages (from keras) (1.24.4)\n", + "Requirement already satisfied: rich in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras) (14.2.0)\n", + "Requirement already satisfied: namex in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras) (0.1.0)\n", + "Requirement already satisfied: h5py in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras) (3.15.1)\n", + "Requirement already satisfied: optree in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras) (0.18.0)\n", + "Requirement already satisfied: ml-dtypes in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras) (0.5.4)\n", + "Requirement already satisfied: packaging in /opt/conda/lib/python3.11/site-packages (from keras) (24.1)\n", + "Requirement already satisfied: typing-extensions>=4.6.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from optree->keras) (4.15.0)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from rich->keras) (4.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/conda/lib/python3.11/site-packages (from rich->keras) (2.18.0)\n", + "Requirement already satisfied: mdurl~=0.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich->keras) (0.1.2)\n" + ] + } + ], + "source": [ + "!pip install keras" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d2507537-4a38-49fd-bec5-0a1d1f04d96c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Defaulting to user installation because normal site-packages is not writeable\n", + "Requirement already satisfied: pquant-ml[torch] in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (0.0.3)\n", + "Requirement already satisfied: hgq2 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pquant-ml[torch]) (0.1.6)\n", + "Requirement already satisfied: keras>=3 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pquant-ml[torch]) (3.13.0)\n", + "Requirement already satisfied: optuna in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pquant-ml[torch]) (4.6.0)\n", + "Requirement already satisfied: pydantic>=2 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pquant-ml[torch]) (2.12.5)\n", + "Requirement already satisfied: pyyaml>=6.0.1 in /opt/conda/lib/python3.11/site-packages (from pquant-ml[torch]) (6.0.2)\n", + "Requirement already satisfied: quantizers>=1.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pquant-ml[torch]) (1.2.2)\n", + "Requirement already satisfied: torch>=2.1 in /opt/conda/lib/python3.11/site-packages (from pquant-ml[torch]) (2.3.1+cu121)\n", + "Requirement already satisfied: absl-py in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3->pquant-ml[torch]) (2.3.1)\n", + "Requirement already satisfied: numpy in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3->pquant-ml[torch]) (1.26.4)\n", + "Requirement already satisfied: rich in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3->pquant-ml[torch]) (14.2.0)\n", + "Requirement already satisfied: namex in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3->pquant-ml[torch]) (0.1.0)\n", + "Requirement already satisfied: h5py in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3->pquant-ml[torch]) (3.15.1)\n", + "Requirement already satisfied: optree in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3->pquant-ml[torch]) (0.18.0)\n", + "Requirement already satisfied: ml-dtypes in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3->pquant-ml[torch]) (0.5.4)\n", + "Requirement already satisfied: packaging in /opt/conda/lib/python3.11/site-packages (from keras>=3->pquant-ml[torch]) (24.1)\n", + "Requirement already satisfied: annotated-types>=0.6.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pydantic>=2->pquant-ml[torch]) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.41.5 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pydantic>=2->pquant-ml[torch]) (2.41.5)\n", + "Requirement already satisfied: typing-extensions>=4.14.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pydantic>=2->pquant-ml[torch]) (4.15.0)\n", + "Requirement already satisfied: typing-inspection>=0.4.2 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from pydantic>=2->pquant-ml[torch]) (0.4.2)\n", + "Requirement already satisfied: filelock in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (3.13.1)\n", + "Requirement already satisfied: sympy in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (1.12)\n", + "Requirement already satisfied: networkx in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (3.3)\n", + "Requirement already satisfied: jinja2 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (3.1.4)\n", + "Requirement already satisfied: fsspec in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (2024.2.0)\n", + "Requirement already satisfied: nvidia-cuda-nvrtc-cu12==12.1.105 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (12.1.105)\n", + "Requirement already satisfied: nvidia-cuda-runtime-cu12==12.1.105 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (12.1.105)\n", + "Requirement already satisfied: nvidia-cuda-cupti-cu12==12.1.105 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (12.1.105)\n", + "Requirement already satisfied: nvidia-cudnn-cu12==8.9.2.26 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (8.9.2.26)\n", + "Requirement already satisfied: nvidia-cublas-cu12==12.1.3.1 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (12.1.3.1)\n", + "Requirement already satisfied: nvidia-cufft-cu12==11.0.2.54 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (11.0.2.54)\n", + "Requirement already satisfied: nvidia-curand-cu12==10.3.2.106 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (10.3.2.106)\n", + "Requirement already satisfied: nvidia-cusolver-cu12==11.4.5.107 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (11.4.5.107)\n", + "Requirement already satisfied: nvidia-cusparse-cu12==12.1.0.106 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (12.1.0.106)\n", + "Requirement already satisfied: nvidia-nccl-cu12==2.20.5 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (2.20.5)\n", + "Requirement already satisfied: nvidia-nvtx-cu12==12.1.105 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (12.1.105)\n", + "Requirement already satisfied: triton==2.3.1 in /opt/conda/lib/python3.11/site-packages (from torch>=2.1->pquant-ml[torch]) (2.3.1)\n", + "Requirement already satisfied: nvidia-nvjitlink-cu12 in /opt/conda/lib/python3.11/site-packages (from nvidia-cusolver-cu12==11.4.5.107->torch>=2.1->pquant-ml[torch]) (12.1.105)\n", + "Requirement already satisfied: tqdm in /opt/conda/lib/python3.11/site-packages (from hgq2->pquant-ml[torch]) (4.66.5)\n", + "Requirement already satisfied: alembic>=1.5.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from optuna->pquant-ml[torch]) (1.18.0)\n", + "Requirement already satisfied: colorlog in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from optuna->pquant-ml[torch]) (6.10.1)\n", + "Requirement already satisfied: sqlalchemy>=1.4.2 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from optuna->pquant-ml[torch]) (2.0.45)\n", + "Requirement already satisfied: Mako in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from alembic>=1.5.0->optuna->pquant-ml[torch]) (1.3.10)\n", + "Requirement already satisfied: greenlet>=1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from sqlalchemy>=1.4.2->optuna->pquant-ml[torch]) (3.3.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.11/site-packages (from jinja2->torch>=2.1->pquant-ml[torch]) (2.1.5)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from rich->keras>=3->pquant-ml[torch]) (4.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/conda/lib/python3.11/site-packages (from rich->keras>=3->pquant-ml[torch]) (2.18.0)\n", + "Requirement already satisfied: mpmath>=0.19 in /opt/conda/lib/python3.11/site-packages (from sympy->torch>=2.1->pquant-ml[torch]) (1.3.0)\n", + "Requirement already satisfied: mdurl~=0.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich->keras>=3->pquant-ml[torch]) (0.1.2)\n" + ] + } + ], + "source": [ + "!pip install pquant-ml[torch]" + ] + }, { "cell_type": "code", "execution_count": null, - "id": "94033d11-b0c5-4ca6-9664-cd6dd0193c14", + "id": "8a519b2a-ec80-4995-9bd7-86b24193d96c", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Defaulting to user installation because normal site-packages is not writeable\n", + "Collecting tensorflow\n", + " Using cached tensorflow-2.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.5 kB)\n", + "Requirement already satisfied: absl-py>=1.0.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (2.3.1)\n", + "Requirement already satisfied: astunparse>=1.6.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (1.6.3)\n", + "Requirement already satisfied: flatbuffers>=24.3.25 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (25.12.19)\n", + "Requirement already satisfied: gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (0.7.0)\n", + "Requirement already satisfied: google_pasta>=0.1.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (0.2.0)\n", + "Requirement already satisfied: libclang>=13.0.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (18.1.1)\n", + "Requirement already satisfied: opt_einsum>=2.3.2 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (3.4.0)\n", + "Requirement already satisfied: packaging in /opt/conda/lib/python3.11/site-packages (from tensorflow) (24.1)\n", + "Requirement already satisfied: protobuf>=5.28.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (6.33.4)\n", + "Requirement already satisfied: requests<3,>=2.21.0 in /opt/conda/lib/python3.11/site-packages (from tensorflow) (2.32.3)\n", + "Requirement already satisfied: setuptools in /opt/conda/lib/python3.11/site-packages (from tensorflow) (75.1.0)\n", + "Requirement already satisfied: six>=1.12.0 in /opt/conda/lib/python3.11/site-packages (from tensorflow) (1.16.0)\n", + "Requirement already satisfied: termcolor>=1.1.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (3.3.0)\n", + "Requirement already satisfied: typing_extensions>=3.6.6 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (4.15.0)\n", + "Requirement already satisfied: wrapt>=1.11.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (2.0.1)\n", + "Requirement already satisfied: grpcio<2.0,>=1.24.3 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (1.76.0)\n", + "Requirement already satisfied: tensorboard~=2.20.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (2.20.0)\n", + "Requirement already satisfied: keras>=3.10.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (3.13.0)\n", + "Collecting numpy>=1.26.0 (from tensorflow)\n", + " Using cached numpy-2.4.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl.metadata (6.6 kB)\n", + "Requirement already satisfied: h5py>=3.11.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (3.15.1)\n", + "Requirement already satisfied: ml_dtypes<1.0.0,>=0.5.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorflow) (0.5.4)\n", + "Requirement already satisfied: wheel<1.0,>=0.23.0 in /opt/conda/lib/python3.11/site-packages (from astunparse>=1.6.0->tensorflow) (0.44.0)\n", + "Requirement already satisfied: rich in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3.10.0->tensorflow) (14.2.0)\n", + "Requirement already satisfied: namex in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3.10.0->tensorflow) (0.1.0)\n", + "Requirement already satisfied: optree in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from keras>=3.10.0->tensorflow) (0.18.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/conda/lib/python3.11/site-packages (from requests<3,>=2.21.0->tensorflow) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.11/site-packages (from requests<3,>=2.21.0->tensorflow) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/conda/lib/python3.11/site-packages (from requests<3,>=2.21.0->tensorflow) (1.26.20)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.11/site-packages (from requests<3,>=2.21.0->tensorflow) (2024.8.30)\n", + "Requirement already satisfied: markdown>=2.6.8 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorboard~=2.20.0->tensorflow) (3.10)\n", + "Requirement already satisfied: pillow in /opt/conda/lib/python3.11/site-packages (from tensorboard~=2.20.0->tensorflow) (10.4.0)\n", + "Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorboard~=2.20.0->tensorflow) (0.7.2)\n", + "Requirement already satisfied: werkzeug>=1.0.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from tensorboard~=2.20.0->tensorflow) (3.1.5)\n", + "Requirement already satisfied: markupsafe>=2.1.1 in /opt/conda/lib/python3.11/site-packages (from werkzeug>=1.0.1->tensorboard~=2.20.0->tensorflow) (2.1.5)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from rich->keras>=3.10.0->tensorflow) (4.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/conda/lib/python3.11/site-packages (from rich->keras>=3.10.0->tensorflow) (2.18.0)\n", + "Requirement already satisfied: mdurl~=0.1 in /eos/user/a/apetrovy/.local/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich->keras>=3.10.0->tensorflow) (0.1.2)\n", + "Using cached tensorflow-2.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (620.6 MB)\n", + "Using cached numpy-2.4.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl (16.7 MB)\n", + "Installing collected packages: numpy, tensorflow\n", + "\u001b[33m WARNING: The scripts f2py and numpy-config are installed in '/eos/user/a/apetrovy/.local/bin' which is not on PATH.\n", + " Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.\u001b[0m\u001b[33m\n", + "\u001b[0m" + ] + } + ], + "source": [ + "!pip install tensorflow\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "212a2d48-668d-49ba-b8ad-22870ae0e79b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found existing installation: tensorflow 2.20.0\n", + "Uninstalling tensorflow-2.20.0:\n", + " Successfully uninstalled tensorflow-2.20.0\n", + "\u001b[33mWARNING: Skipping tensorflow-cpu as it is not installed.\u001b[0m\u001b[33m\n", + "\u001b[0m\u001b[33mWARNING: Skipping tensorflow-intel as it is not installed.\u001b[0m\u001b[33m\n", + "\u001b[0m\u001b[33mWARNING: Skipping tf-nightly as it is not installed.\u001b[0m\u001b[33m\n", + "\u001b[0mFound existing installation: keras 3.13.0\n", + "Uninstalling keras-3.13.0:\n", + " Successfully uninstalled keras-3.13.0\n", + "\u001b[33mWARNING: Skipping keras-nightly as it is not installed.\u001b[0m\u001b[33m\n", + "\u001b[0mFound existing installation: optree 0.18.0\n", + "Uninstalling optree-0.18.0:\n", + " Successfully uninstalled optree-0.18.0\n", + "\u001b[33mWARNING: Skipping dmtree as it is not installed.\u001b[0m\u001b[33m\n", + "\u001b[0mFound existing installation: numpy 2.4.1\n", + "Uninstalling numpy-2.4.1:\n", + " Successfully uninstalled numpy-2.4.1\n" + ] + } + ], + "source": [ + "!pip uninstall -y \\\n", + " tensorflow tensorflow-cpu tensorflow-intel \\\n", + " tf-nightly keras keras-nightly \\\n", + " optree dmtree \\\n", + " numpy\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "dd0f307f-a8c5-4678-a6df-3b04cae845d4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "hgq2 0.1.6 requires keras>=3.11, but you have keras 3.1.1 which is incompatible.\u001b[0m\u001b[31m\n", + "\u001b[0m" + ] + } + ], + "source": [ + "!pip install \\\n", + " numpy>=2.0 \\\n", + " keras==3.1.1 \\\n", + " torch \\\n", + " optree\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "d297db89-ae62-4b49-9f3c-60aa2fc4548c", "metadata": {}, "outputs": [], + "source": [ + "!pip install -U numpy>=2.0\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "94033d11-b0c5-4ca6-9664-cd6dd0193c14", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/eos/user/a/apetrovy/.local/lib/python3.11/site-packages/keras/src/export/tf2onnx_lib.py:8: FutureWarning: In the future `np.object` will be defined as the corresponding NumPy scalar.\n", + " if not hasattr(np, \"object\"):\n" + ] + } + ], "source": [ "import os\n", + "os.environ[\"KERAS_BACKEND\"] = \"torch\"\n", "import random\n", "import numpy as np\n", "import torch.nn.functional as F\n", "import numpy as np\n", "from torch.utils.data import TensorDataset, DataLoader\n", + "import pquant\n", + "import torch\n", "\n", - "os.environ[\"KERAS_BACKEND\"] = \"torch\"\n", "import keras\n", "keras.backend.set_image_data_format(\"channels_first\")\n", "from pquant.layers import PQDense\n", "from pquant.activations import PQActivation\n", "from pquant import get_ebops\n", - "from da4ml.trace.ops import quantize, relu\n", - "from da4ml.trace import comb_trace, FixedVariableArrayInput, FixedVariableArray\n", - "from da4ml.codegen import VerilogModel\n", + "# from da4ml.trace.ops import quantize, relu\n", + "# from da4ml.trace import comb_trace, FixedVariableArrayInput, FixedVariableArray\n", + "# from da4ml.codegen import VerilogModel\n", "import random\n", "\n", "def set_seed(seed):\n", @@ -49,7 +316,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "1473a61e-955d-430a-9737-7fd2fc7ba59b", "metadata": {}, "outputs": [], @@ -110,7 +377,40 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, + "id": "3912af56-072b-49b7-b5fa-7ebdcb1eb7f6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-0.5112194 , 0.4709012 , 0.3854093 , ..., -0.8356734 ,\n", + " 0.1872339 , 0.4656152 ],\n", + " [ 0.74837166, -0.49861702, 0.12397022, ..., -0.4347707 ,\n", + " 0.2630706 , -0.8720775 ],\n", + " [ 0.8549553 , -0.9071467 , -1.2185662 , ..., 1.3785621 ,\n", + " -1.2894171 , -0.7336955 ],\n", + " ...,\n", + " [ 1.7038691 , -2.5647378 , -1.2492837 , ..., -0.40030617,\n", + " -1.3103083 , -1.0565869 ],\n", + " [-0.00470833, 0.27720675, 1.2619908 , ..., 1.5371776 ,\n", + " 1.7332124 , 0.00434187],\n", + " [-1.4528095 , 0.83477974, 0.59786695, ..., -0.57906324,\n", + " 0.89104605, 2.080072 ]], dtype=float32)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X_train" + ] + }, + { + "cell_type": "code", + "execution_count": 6, "id": "f2659f88-9f0c-45f4-a09d-8ef346915187", "metadata": {}, "outputs": [], @@ -139,10 +439,34 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "id": "ea03f950-15d5-47df-86bd-921956e98d83", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/eos/user/a/apetrovy/.local/lib/python3.11/site-packages/pquant/core/torch/quantizer.py:10: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n", + " self.k = torch.nn.Parameter(torch.tensor(k), requires_grad=False)\n", + "/eos/user/a/apetrovy/.local/lib/python3.11/site-packages/pquant/core/torch/quantizer.py:11: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n", + " self.i = torch.nn.Parameter(torch.tensor(i), requires_grad=False)\n", + "/eos/user/a/apetrovy/.local/lib/python3.11/site-packages/pquant/core/torch/quantizer.py:12: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n", + " self.f = torch.nn.Parameter(torch.tensor(f), requires_grad=False)\n" + ] + }, + { + "data": { + "text/plain": [ + "tensor([[0.1250, 0.0000, 0.0000, 0.0000, 0.0000]], device='cuda:0',\n", + " grad_fn=)" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "from pquant import cs_config, dst_config\n", "\n", @@ -173,6 +497,8 @@ "config.quantization_parameters.default_data_fractional_bits = 2.\n", "config.quantization_parameters.default_weight_fractional_bits = 3.\n", "config.quantization_parameters.use_relu_multiplier = False\n", + "config.quantization_parameters.overflow = \"WRAP\"\n", + "config.quantization_parameters\n", "model = build_model(config)\n", "\n", "model.to(\"cuda\")\n", @@ -181,57 +507,140 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "id": "10cc72e0-a4c5-4e19-a31d-ed9de6bee9f7", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "Model(\n", + " (dense1): PQDense(\n", + " in_features=16 out_features=64 bias=True quantize_input=True quantize_output=False \n", + " (pruning_layer): \n", + " (input_quantizer): Quantizer()\n", + " (weight_quantizer): Quantizer()\n", + " (bias_quantizer): Quantizer()\n", + " (output_quantizer): Quantizer()\n", + " )\n", + " (relu): PQActivation(\n", + " quantize_input = True, quantize_output = False\n", + " (output_quantizer): Quantizer()\n", + " (input_quantizer): Quantizer()\n", + " )\n", + " (dense2): PQDense(\n", + " in_features=64 out_features=32 bias=True quantize_input=True quantize_output=False \n", + " (pruning_layer): \n", + " (input_quantizer): Quantizer()\n", + " (weight_quantizer): Quantizer()\n", + " (bias_quantizer): Quantizer()\n", + " (output_quantizer): Quantizer()\n", + " )\n", + " (dense3): PQDense(\n", + " in_features=32 out_features=32 bias=True quantize_input=True quantize_output=False \n", + " (pruning_layer): \n", + " (input_quantizer): Quantizer()\n", + " (weight_quantizer): Quantizer()\n", + " (bias_quantizer): Quantizer()\n", + " (output_quantizer): Quantizer()\n", + " )\n", + " (dense4): PQDense(\n", + " in_features=32 out_features=5 bias=True quantize_input=True quantize_output=True \n", + " (pruning_layer): \n", + " (input_quantizer): Quantizer()\n", + " (weight_quantizer): Quantizer()\n", + " (bias_quantizer): Quantizer()\n", + " (output_quantizer): Quantizer()\n", + " )\n", + ")" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "model" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "id": "7c2f58dd-d4ab-4f2b-a1d1-5ac220569d81", "metadata": {}, "outputs": [], "source": [ - "loss_func = torch.nn.CrossEntropyLoss()\n", + "loss_function = torch.nn.CrossEntropyLoss()\n", "optimizer = torch.optim.Adam(lr=1e-2, params=model.parameters())\n", "scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[600, 800], gamma=0.1)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "id": "983c6bad-310e-4d7c-a6a6-f0164f01808c", "metadata": {}, "outputs": [], "source": [ "from pquant import get_layer_keep_ratio, get_model_losses\n", "train_accuracies = []\n", - "\n", - "def training_loop(model, trainloader, device, loss_func, optimizer, epoch, scheduler=None, *args, **kwargs):\n", - " for data in trainloader:\n", - " inputs, labels = data\n", + "train_task_losses = []\n", + "train_model_losses = []\n", + "train_total_losses = []\n", + "epoch_task_loss = 0\n", + "epoch_model_loss = 0\n", + "epoch_total_loss = 0\n", + "epoch_acc = 0\n", + "n_batches = 0\n", + "\n", + "def training_loop(model, trainloader, device, loss_function, optimizer, epoch, scheduler=None, *args, **kwargs):\n", + " for inputs, labels in trainloader:\n", " inputs, labels = inputs.to(device), labels.to(device)\n", + "\n", " optimizer.zero_grad()\n", + " \n", + " epoch_task_loss = 0.0\n", + " epoch_model_loss = 0.0\n", + " epoch_total_loss = 0.0\n", + " epoch_acc = 0.0\n", + " n_batches = 0\n", + "\n", " outputs = model(inputs)\n", - " loss = loss_func(outputs, labels)\n", - " losses = get_model_losses(model, torch.tensor(0.).to(device))\n", - " loss += losses\n", - " loss.backward()\n", + "\n", + " task_loss = loss_function(outputs, labels)\n", + " model_loss = get_model_losses(\n", + " model, torch.tensor(0.).to(device)\n", + " )\n", + "\n", + " total_loss = task_loss + model_loss\n", + " total_loss.backward()\n", " optimizer.step()\n", - " epoch += 1\n", - " accuracy = torch.mean((torch.argmax(outputs, dim=1) == torch.argmax(labels, dim=1)).float())\n", + "\n", + " acc = torch.mean(\n", + " (torch.argmax(outputs, dim=1)\n", + " == torch.argmax(labels, dim=1)).float()\n", + " )\n", + "\n", + " epoch_task_loss += task_loss.item()\n", + " epoch_model_loss += model_loss.item()\n", + " epoch_total_loss += total_loss.item()\n", + " epoch_acc += acc.item()\n", + " n_batches += 1\n", + "\n", " if scheduler is not None:\n", " scheduler.step()\n", - " train_accuracies.append(accuracy.cpu().numpy())\n", + "\n", + " train_task_losses.append(epoch_task_loss / n_batches)\n", + " train_model_losses.append(epoch_model_loss / n_batches)\n", + " train_total_losses.append(epoch_total_loss / n_batches)\n", + " train_accuracies.append(epoch_acc / n_batches)\n", "\n", "val_accuracies = []\n", "remaining_weights = []\n", "ebops = []\n", - "def validate_loop(model, testloader, device, loss_func, epoch, *args, **kwargs):\n", + "val_losses = []\n", + "def validate_loop(model, testloader, device, loss_function, epoch, *args, **kwargs):\n", " correct = 0\n", " total = 0\n", " model.eval()\n", @@ -240,7 +649,8 @@ " inputs, labels = data\n", " inputs, labels = inputs.to(device), labels.to(device)\n", " outputs = model(inputs)\n", - " loss = loss_func(outputs, labels)\n", + " loss = loss_function(outputs, labels)\n", + " val_losses.append(loss)\n", " accuracy = torch.mean((torch.argmax(outputs, dim=1) == torch.argmax(labels, dim=1)).float())\n", " val_accuracies.append(accuracy.cpu().numpy())\n", " ratio = get_layer_keep_ratio(model)\n", @@ -250,10 +660,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "id": "e67b9c3b-00eb-4945-822f-ae9e0864d5d5", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/eos/user/a/apetrovy/.local/lib/python3.11/site-packages/keras/src/backend/torch/numpy.py:649: UserWarning: Tensor.T is deprecated on 0-D tensors. This function is the identity in these cases. (Triggered internally at ../aten/src/ATen/native/TensorShape.cpp:3683.)\n", + " return cast(torch.count_nonzero(x, dim=axis).T, \"int32\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Remaining weights=5.26% EBOPs=5493 Accuracy=18.59:%\n" + ] + } + ], "source": [ "from pquant import train_model\n", "model.to(\"cuda\")\n", @@ -264,7 +690,7 @@ " trainloader = train_loader, \n", " device=\"cuda\",\n", " testloader = test_loader, \n", - " loss_func = loss_func,\n", + " loss_function = loss_function,\n", " optimizer = optimizer,\n", " scheduler=scheduler\n", " )\n", @@ -273,21 +699,119 @@ }, { "cell_type": "code", - "execution_count": null, - "id": "a3bd9d0a-476b-4530-93aa-41ea2578a521", + "execution_count": 15, + "id": "3d6cb97b-6c01-4217-a0f9-4979f7d638b7", "metadata": {}, - "outputs": [], - "source": [] + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA04AAAIhCAYAAAB5deq6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAACdYUlEQVR4nOzdd3QUVQPG4d9m0zuE3nvvIFKUIh2k2ukKCiIooqhYkKaCiiAWPhtNAREFRKRKlyI1SK8htEAgQHrZ7M73x5oNSwJJENyg73NODtkpd+5uLsm8c+/cMRmGYSAiIiIiIiI35ObqCoiIiIiIiOR2Ck4iIiIiIiJZUHASERERERHJgoKTiIiIiIhIFhScREREREREsqDgJCIiIiIikgUFJxERERERkSwoOImIiIiIiGRBwUlERERERCQLCk4iIjlgMpmy9bVu3bq/dZxRo0ZhMpluad9169bdljr8HVu2bKFp06YEBgaSL18+HnjgAdauXZutfT/++GNMJhPLly+/4TZfffUVJpOJBQsWZLtOzZo1o1mzZk7LTCYTo0aNynLfGTNmYDKZOHnyZLaPl2bp0qU3PEapUqXo27dvjsv8u9LayI8//viPH1tE5G7l7uoKiIjcTbZs2eL0euzYsaxdu5Y1a9Y4La9SpcrfOk7//v1p27btLe1bp04dtmzZ8rfrcKvCw8Np06YNVatWZc6cOVitVlatWsWOHTto3rx5lvv37NmTV199lWnTpt3wM5g+fTr58+enY8eOf6uuW7ZsoVixYn+rjKwsXbqUzz77LNPwtHDhQgIDA+/o8UVE5PZQcBIRyYEGDRo4vc6fPz9ubm4Zll8vISEBX1/fbB+nWLFit3xCHxgYmGV97qSlS5cSGxvL9OnTqVSpEgCdO3fO9v4hISF07tyZRYsWERUVRUhIiNP6Q4cOsWXLFl566SU8PDz+Vl1d+TkB1K5d26XHFxGR7NNQPRGR26xZs2ZUq1aNDRs20KhRI3x9fXnqqacAmDdvHq1bt6Zw4cL4+PhQuXJlXnvtNeLj453KyGyoXqlSpXjwwQdZvnw5derUwcfHh0qVKjFt2jSn7TIbqte3b1/8/f05duwY7du3x9/fn+LFi/PSSy+RnJzstP+ZM2d4+OGHCQgIIDg4mB49erB9+3ZMJhMzZszI8v2bzWYADh8+nN2PLIN+/fqRkpLCnDlzMqybPn06gOMzHT16NPfeey958+YlMDCQOnXq8M0332AYRpbHyWyo3tatW2ncuDHe3t4UKVKEESNGYLFYMuybnZ9l3759+eyzzxzHSvtKG/KX2VC9U6dO0bNnTwoUKICXlxeVK1dm4sSJ2Gw2xzYnT57EZDLx4Ycf8tFHH1G6dGn8/f1p2LAhW7duzfJ9Z9e+ffvo3LkzefLkwdvbm1q1ajFz5kynbWw2G+PGjaNixYr4+PgQHBxMjRo1+Pjjjx3bXLx4kWeeeYbixYvj5eVF/vz5ady4Mb/99ptTWb/99hstWrQgMDAQX19fGjduzOrVq522yW5ZIiK3m3qcRETugIiICHr27Mkrr7zCu+++i5ub/TrV0aNHad++PUOHDsXPz49Dhw4xYcIEtm3blmG4X2b27NnDSy+9xGuvvUbBggX5+uuv6devH+XKlaNJkyY33ddisdCpUyf69evHSy+9xIYNGxg7dixBQUGMHDkSgPj4eJo3b87ly5eZMGEC5cqVY/ny5Tz22GPZfu8PPfQQI0aMYODAgVStWpVy5cple980LVu2pGTJkkybNo0hQ4Y4llutVr799lsaNGjgGIp48uRJBgwYQIkSJQB78BkyZAhnz551vK/sOnDgAC1atKBUqVLMmDEDX19fPv/880wDXHZ+lm+99Rbx8fH8+OOPTsM8CxcunOnxL168SKNGjUhJSWHs2LGUKlWKJUuW8PLLL3P8+HE+//xzp+0/++wzKlWqxOTJkx3Ha9++PWFhYQQFBeXovV/v8OHDNGrUiAIFCjBlyhRCQkL47rvv6Nu3LxcuXOCVV14B4P3332fUqFG8+eabNGnSBIvFwqFDh7h69aqjrF69erFr1y7eeecdKlSowNWrV9m1axdRUVGObb777jt69+5N586dmTlzJh4eHnzxxRe0adOGFStW0KJFi2yXJSJyRxgiInLL+vTpY/j5+Tkta9q0qQEYq1evvum+NpvNsFgsxvr16w3A2LNnj2Pd22+/bVz/K7pkyZKGt7e3ER4e7liWmJho5M2b1xgwYIBj2dq1aw3AWLt2rVM9AeOHH35wKrN9+/ZGxYoVHa8/++wzAzCWLVvmtN2AAQMMwJg+ffpN35NhGMbixYuNggULGsWLFzeKFy9uHD9+PMt9MpP2Gezatcux7JdffjEA46uvvsp0H6vValgsFmPMmDFGSEiIYbPZHOuaNm1qNG3a1Gl7wHj77bcdrx977DHDx8fHOH/+vGNZamqqUalSJQMwwsLCMj3uzX6Wzz33XIafZZqSJUsaffr0cbx+7bXXDMD4448/nLZ79tlnDZPJZBw+fNgwDMMICwszAKN69epGamqqY7tt27YZgDF37txMj5cmrY3Mnz//hts8/vjjhpeXl3Hq1Cmn5e3atTN8fX2Nq1evGoZhGA8++KBRq1atmx7P39/fGDp06A3Xx8fHG3nz5jU6duzotNxqtRo1a9Y06tevn+2yRETuFA3VExG5A/LkycMDDzyQYfmJEyfo3r07hQoVwmw24+HhQdOmTQE4ePBgluXWqlXL0bMC4O3tTYUKFQgPD89yX5PJlGEyhRo1ajjtu379egICAjJMyvDEE09kWT7A5s2beeihh/j888/ZtGkTHh4eNG/enLCwMMc2/fv3p2TJklmW9eSTT+Lm5uY0FHH69On4+fk59YCtWbOGli1bEhQU5PhMR44cSVRUFJGRkdmqd5q1a9fSokULChYs6FhmNpsz7XH7uz/LzKxZs4YqVapQv359p+V9+/bFMIwMvZIdOnRwDI0E+88TyFZ7yE5dWrRoQfHixTPUJSEhwdGDVr9+ffbs2cOgQYNYsWIFMTExGcqqX78+M2bMYNy4cWzdujXD0MfNmzdz+fJl+vTpQ2pqquPLZrPRtm1btm/f7hgCmVVZIiJ3ioKTiMgdkNlQrLi4OO6//37++OMPxo0bx7p169i+fbtjSu3ExMQsy71+ogQALy+vbO3r6+uLt7d3hn2TkpIcr6OiopxCQ5rMlmXmnXfeoWLFinTr1o3ixYuzfv16R3gKDw/HZrOxceNGOnTokGVZJUuWpEWLFsyZM4fk5GQuXbrEkiVLeOSRRwgICABg27ZttG7dGrBPUb5p0ya2b9/OG2+8AWTvM71WVFQUhQoVyrD8+mW342d5o+Nn1naKFCniWH+t69uDl5fX3zr+rdRlxIgRfPjhh2zdupV27doREhJCixYt2LFjh2OfefPm0adPH77++msaNmxI3rx56d27N+fPnwfgwoULADz88MN4eHg4fU2YMAHDMLh8+XK2yhIRuVN0j5OIyB2Q2TOY1qxZw7lz51i3bp2jZwJwuhfE1UJCQti2bVuG5dk9KT1+/LjTyXyxYsVYv349zZo1o3nz5vTt25fw8HBefvnlbJXXr18/Vq1axc8//8y5c+dISUmhX79+jvXff/89Hh4eLFmyxCkULlq0KFvlXy8kJCTT93r9sjv1swwJCSEiIiLD8nPnzgGQL1++v1X+naiLu7s7w4YNY9iwYVy9epXffvuN119/nTZt2nD69Gl8fX3Jly8fkydPZvLkyZw6dYrFixfz2muvERkZyfLlyx1lffLJJzec6TAtvGdVlojInaIeJxGRf0hamErrFUjzxRdfuKI6mWratCmxsbEsW7bMafn333+frf2rVavGzp07OXDggGNZ0aJFWb9+PYZh8Pbbb/Paa69RpkyZbJXXpUsXQkJCmDZtGtOnT6dChQrcd999jvUmkwl3d3en4WqJiYl8++232Sr/es2bN2f16tWOHhCwT0gxb948p+1y8rPMSS9QixYtOHDgALt27XJaPmvWLEwmU7aeg3W7tGjRwhEQr6+Lr69vpgEnODiYhx9+mOeee47Lly9n+sDgEiVKMHjwYFq1auV4n40bNyY4OJgDBw5Qr169TL88PT2zVZaIyJ2iHicRkX9Io0aNyJMnDwMHDuTtt9/Gw8OD2bNns2fPHldXzaFPnz5MmjSJnj17Mm7cOMqVK8eyZctYsWIFgGN2wBsZN24ca9asoVmzZgwfPpw6depw+fJlfv31V86cOUOxYsWYOnUqjz32GJUrV86yPl5eXvTo0YNPPvkEwzAYP3680/oOHTrw0Ucf0b17d5555hmioqL48MMPMwSa7HrzzTdZvHgxDzzwACNHjsTX15fPPvssw3TxOflZVq9eHYAJEybQrl07zGYzNWrUyDQIvPjii8yaNYsOHTowZswYSpYsya+//srnn3/Os88+S4UKFW7pfd3IjaYub9q0KW+//TZLliyhefPmjBw5krx58zJ79mx+/fVX3n//fcesfR07dqRatWrUq1eP/PnzEx4ezuTJkylZsiTly5cnOjqa5s2b0717dypVqkRAQADbt29n+fLldOvWDQB/f38++eQT+vTpw+XLl3n44YcpUKAAFy9eZM+ePVy8eJGpU6dmqywRkTvGtXNTiIjc3W40q17VqlUz3X7z5s1Gw4YNDV9fXyN//vxG//79jV27dmWYse5Gs+p16NAhQ5nXzxZ3o1n1rq/njY5z6tQpo1u3boa/v78REBBgPPTQQ8bSpUsNwPj5559v9FE4hIWFGX379jWKFCliuLu7GwUKFDAeeeQRY8uWLcaFCxeMsmXLGoUKFXLMEJeVPXv2GIBhNpuNc+fOZVg/bdo0o2LFioaXl5dRpkwZ47333jO++eabDLPgZWdWPcMwjE2bNhkNGjQwvLy8jEKFChnDhw83vvzyywzlZfdnmZycbPTv39/Inz+/YTKZnMq5flY9wzCM8PBwo3v37kZISIjh4eFhVKxY0fjggw8Mq9Xq9BkDxgcffJDh88jsPV0vrY3c6Cut7ezdu9fo2LGjERQUZHh6eho1a9bMMLPixIkTjUaNGhn58uUzPD09jRIlShj9+vUzTp48aRiGYSQlJRkDBw40atSoYQQGBho+Pj5GxYoVjbffftuIj493Kmv9+vVGhw4djLx58xoeHh5G0aJFjQ4dOjhm/8tJWSIit5vJMLLxhEAREflPe/fdd3nzzTc5deoUxYoVc3V1RERE/nEaqiciIk4+/fRTACpVqoTFYmHNmjVMmTKFnj17KjSJiMh/loKTiIg48fX1ZdKkSZw8eZLk5GRKlCjBq6++yptvvunqqomIiLiMhuqJiIiIiIhkQdORi4iIiIiIZEHBSUREREREJAsKTiIiIiIiIln4z00OYbPZOHfuHAEBAY4nv4uIiIiIyH+PYRjExsZSpEiRLB/y/p8LTufOnaN48eKuroaIiIiIiOQSp0+fzvKRG/+54BQQEADYP5zAwECX1sVisbBy5Upat26Nh4eHS+sidwe1GckptRnJKbUZySm1Gcmp3NRmYmJiKF68uCMj3Mx/LjilDc8LDAzMFcHJ19eXwMBAlzcauTuozUhOqc1ITqnNSE6pzUhO5cY2k51beFw6OcSGDRvo2LEjRYoUwWQysWjRoptuv2DBAlq1akX+/PkJDAykYcOGrFix4p+prIiIiIiI/Ge5NDjFx8dTs2ZNPv3002xtv2HDBlq1asXSpUvZuXMnzZs3p2PHjuzevfsO11RERERERP7LXDpUr127drRr1y7b20+ePNnp9bvvvsvPP//ML7/8Qu3atW9z7UREREREROzu6nucbDYbsbGx5M2b94bbJCcnk5yc7HgdExMD2MdWWiyWO17Hm0k7vqvrIXcPtRnJKbUZySm1Gcmpm7UZwzCwWq1YrVYMw/inqya5VGpqKu7u7sTFxeHufmfjiMlkwt3dHbPZnOn6nPyuMxm5pBWbTCYWLlxIly5dsr3PBx98wPjx4zl48CAFChTIdJtRo0YxevToDMvnzJmDr6/vrVZXRERERG7Czc2N4OBgfHx89OxMcanU1FQuX75MSkpKhnUJCQl0796d6OjoLCeOu2uD09y5c+nfvz8///wzLVu2vOF2mfU4FS9enEuXLuWKWfVWrVpFq1atcs2MIpK7qc1ITqnNSE6pzUhOZdZmbDYbYWFhmM1m8ufPj4eHh8KTOBiGQXx8PH5+fne8XRiGQVRUFPHx8ZQuXTpDz1NMTAz58uXLVnC6K4fqzZs3j379+jF//vybhiYALy8vvLy8Miz38PDINX8QclNd5O6gNiM5pTYjOaU2Izl1bZtJSkrCMAyKFi2qET6Sgc1mw2Kx4OPjg5vbnZ+rzs3Njfj4eIAMv9dy8nvOpbPq3Yq5c+fSt29f5syZQ4cOHVxdHRERERG5gX/ipFgkK7erV8ulPU5xcXEcO3bM8TosLIzQ0FDy5s1LiRIlGDFiBGfPnmXWrFmAPTT17t2bjz/+mAYNGnD+/HkAfHx8CAoKcsl7EBERERGRfz+XXgbYsWMHtWvXdkwlPmzYMGrXrs3IkSMBiIiI4NSpU47tv/jiC1JTU3nuuecoXLiw4+uFF15wSf1FREREROS/waU9Ts2aNbvp1JQzZsxwer1u3bo7WyERERERkdusWbNm1KpVK8MzSXODvn37cvXqVRYtWuTqquR6GngqIiIiIoL9XpibffXt2/eWyl2wYAFjx47NdfX6O/X5Lwatu3JWPRERERGR2y0iIsLx/bx58xg5ciSHDx92LPPx8XHa3mKxZGtWtrx58/6j9ZI7Qz1OIiIiInLHGYZBQkrqP/6Vk0eWFipUyPEVFBSEyWRyvE5KSiI4OJgffviBZs2a4e3tzXfffUdUVBRPPPEExYoVw9fXl+rVqzN37lyncps1a8bQoUMdr0uVKsW7777LU089RUBAACVKlODLL7+8pXp5eHgwcODAmx7/xx9/pHr16vj4+BASEkLLli0d03Nfb+fOnRQoUIB33nkn25/btWw2G2PGjKFYsWJ4eXlRq1Ytli9f7lifkpLCkCFDqFSpEr6+vpQqVYr33nvPsX7UqFGUKFECLy8vihQpwvPPP39L9bgT1OMkIiIiIndcosVKlZEr/vHjHhjTBl/P23fK++qrrzJx4kSmT5+Ol5cXSUlJ1K1bl1dffZXAwEB+/fVXevXqRZkyZbj33ntvWM7EiRMZO3Ysr7/+Oj/++CPPPvssTZo0oVKlSjmqT1bHj4iI4IknnuD999+na9euxMbGsnHjxkwD5bp16+jSpQvvvfcezz77bI4/G4CPP/6YiRMn8sUXX1C7dm2mTZtGp06d2L9/P+XLl2fKlCn88ssvTJs2jcqVK3P27FlOnz4N2APepEmT+P7776latSrnz59nz549t1SPO0HBSUREREQkm4YOHUq3bt2clr388suO74cMGcLy5cuZP3/+TYNT+/btGTRoEGAPY5MmTWLdunU5Dk5Fixa96fEjIiJITU2lW7dulCxZEoDq1atnKOfnn3+mV69efPHFFzzxxBM5qsO1PvzwQ1599VUef/xxACZMmMDatWuZPHkyn332GadOnaJ8+fI0bNiQoKAgSpcu7dj31KlTFCpUiJYtW+Lh4UGJEiWoX7/+LdfldlNwcqEzcWfYn7KfYpeKUbdwXVdXR0REROSO8fEwc2BMG5cc93aqV6+e02ur1cr48eOZN28eZ8+eJTk5meTkZPz8/G5aTo0aNRzfpw29i4yMzHF9sjp+zZo1adGiBdWrV6dNmza0bt2ahx9+mDx58jjK+OOPP1iyZAnz58+na9euOa5DmpiYGM6dO0fjxo2dljdu3NjRc9S3b19atWrFPffcQ7t27ejYsSOtW7cG4JFHHmHy5MmUKVOGtm3b0r59ezp27Ii7e+6ILLrHyYU2ndvE3IS5fHfwO1dXRUREROSOMplM+Hq6/+NfJpPptr6P6wPRxIkTmTRpEq+88gpr1qwhNDSUNm3akJKSctNyrp9UwmQyYbPZclyfrI5vNptZtWoVy5Yto0qVKnzyySdUrFiRsLAwRxlly5alUqVKTJs2Lct6Z8f1n7lhGI5lderU4fjx47z++uskJiby6KOP8vDDDwNQvHhxDh8+zGeffYaPjw+DBg2iSZMmWCyWv12n20HByYXMJvsVEKthdXFNRERERORWbNy4kc6dO9OzZ09q1qxJmTJlOHr0aK46vslkonHjxowePZrdu3fj6enJwoULHevz5cvHmjVrOH78OI899tgtB5XAwECKFCnC77//7rR88+bNVK5c2Wm7bt268eWXXzJv3jx++uknLl++DNhnCOzUqRNTpkxh3bp1bNmyhb17995SfW633NHv9R+VFpxsRs6vLoiIiIiI65UrV46ffvqJzZs3kydPHj766CPOnz/vFBRcefw//viD1atX07p1awoUKMAff/zBxYsXM9SvQIECrFmzhubNm/PEE0/w/fff33SIXFhYGKGhoRnqMnz4cN5++23Kli1LrVq1mD59OqGhocyePRuASZMmUbBgQcqVK0dgYCDz58+nUKFCBAcHM2PGDKxWK/feey++vr58++23+Pj4OO7NcjUFJxdyM9k7/NTjJCIiInJ3euuttwgLC6NNmzb4+vryzDPP0KVLF6Kjo3PF8QMDA9mwYQOTJ08mJiaGkiVLMnHiRNq1a5ehrEKFCrFmzRqaNWtGjx49mDNnDmZz5veIDRs2LMOytWvX8vzzzxMTE8NLL71EZGQkVapUYfHixZQvXx4Af39/PvjgA44ePYrZbOaee+5h6dKluLm5ERwczPjx4xk2bBhWq5Xq1avzyy+/EBISchs/sVtnMnIyuf2/QExMDEFBQURHRxMYGOjSuiw8spCRW0bSsHBDvmx947n7RdJYLBaWLl1K+/bts/XAPRG1GckptRnJqczaTFJSEmFhYZQuXRpvb28X11ByG5vNRkxMDIGBgbi53fk7h27WHnOSDXSPkws57nGyqcdJRERERCQ3U3ByoYQU21//5o6ZQkREREREJHMKTi6053QsAKeuxLm4JiIiIiIicjMKTi7kmFUPzaonIiIiIpKbKTi5kLubfVJDQ9ORi4iIiIjkagpOLuT+1ywiNjQ5hIiIiIhIbqbg5ELupr96nDRUT0REREQkV1NwciGzm/0eJw3VExERERHJ3RScXMg9LTipx0lEREREJFdTcHIhD7OCk4iIiMi/XbNmzRg6dKirq5GldevWYTKZuHr1arb3KVWqFJMnT75jdcpNFJxcKG06ckOTQ4iIiIi4nMlkuulX3759b6ncBQsWMHbs2L9Vt759+2IymRg4cGCGdYMGDfpb9buTRo0aRa1atVxdjdtCwcmFPMxpk0MYLq6JiIiIiERERDi+Jk+eTGBgoNOyjz/+2Gl7i8WSrXLz5s1LQEDA365f8eLF+f7770lMTHQsS0pKYu7cuZQoUeJvly83p+DkQmnTkaMeJxEREfm3MwxIif/nv4zsX6AuVKiQ4ysoKAiTyeR4nZSURHBwMD/88APNmjXD29ub7777jqioKJ544gmKFSuGr68v1atXZ+7cuU7lXj9Ur1SpUrz77rs89dRTBAQEUKJECb788sss61enTh1KlCjBggULHMsWLFhA8eLFqV27ttO2ycnJPP/88xQoUABvb2/uu+8+tm/f7rTN0qVLqVChAj4+PjRv3pyTJ09mOObmzZtp0qQJPj4+FC9enOeff574+PhsfJrZs3fvXh544AF8fHwICQnhmWeeIS4uzrF+3bp11K9fHz8/P4KDg2ncuDHh4eEA7Nmzh+bNmxMQEEBgYCB169Zlx44dt61u13O/YyVLlhwPwNU9TiIiIvJvZ0mAd4v888d9/Rx4+t224l599VUmTpzI9OnT8fLyIikpibp16/Lqq68SGBjIr7/+Sq9evShTpgz33nvvDcuZOHEiY8eO5fXXX+fHH3/k2WefpUmTJlSqVOmmx3/yySeZPn06PXr0AGDatGk89dRTrFu3zmm7V155hZ9++omZM2dSsmRJ3n//fdq0acOxY8fImzcvp0+fplu3bgwcOJBnn32WHTt28NJLLzmVsXfvXtq0acPYsWP55ptvuHjxIoMHD2bw4MFMnz791j7AayQkJNC2bVsaNGjA9u3biYyMpH///gwePJgZM2aQmppKly5dePrpp5k7dy4pKSls27YNk8kEQI8ePahduzZTp07FbDYTGhqKh4fH367XjSg4uZCHZtUTERERuasMHTqUbt26OS17+eWXHd8PGTKE5cuXM3/+/JsGp/bt2zNo0CDAHsYmTZrEunXrsgxOvXr1YsSIEZw8eRKTycSmTZv4/vvvnYJTfHw8U6dOZcaMGbRr1w6Ar776ilWrVvHNN98wfPhwpk6dSpkyZZg0aRImk4mKFSuyd+9eJkyY4Cjngw8+oHv37o7esvLlyzNlyhSaNm3K1KlT8fb2ztZndiOzZ88mMTGRWbNm4ednD7effvopHTt2ZMKECXh4eBAdHc2DDz5I2bJlAahcubJj/1OnTjF8+HDHZ1a+fPm/VZ+sKDi5UNpznFBwEhERkX87D197748rjnsb1atXz+m11Wpl/PjxzJs3j7Nnz5KcnExycrIjCNxIjRo1HN+nDQmMjIzM8vj58uWjQ4cOzJw5E8Mw6NChA/ny5XPa5vjx41gsFho3buxY5uHhQf369Tl48CAABw8epEGDBo7eG4CGDRs6lbNz506OHTvG7NmzHcsMw8BmsxEWFuYUYm7FoUOHqFmzptNn1bhxY2w2G4cPH6ZJkyb07duXNm3a0KpVK1q2bMmjjz5K4cKFARg2bBj9+/fn22+/pWXLljzyyCOOgHUn6B4nF/LUUD0RERH5rzCZ7EPm/umva4LB7XB9IJo4cSKTJk3ilVdeYc2aNYSGhtKmTRtSUlJuWs71Q8pMJhM2W/bOCZ966ilmzJjBzJkzeeqppzKsN/66r8t03Xs3DMOxzMjGvV82m40BAwYQGhrq+NqzZw9Hjx69LQHl2vpcL2359OnT2bJlC40aNWLevHlUqFCBrVu3AvYZ+/bv30+HDh1Ys2YNVapUYeHChX+7Xjei4ORC7n89xwmTgpOIiIjI3Wjjxo107tyZnj17UrNmTcqUKcPRo0fv6DHbtm1LSkoKKSkptGnTJsP6cuXK4enpye+//+5YZrFY2LFjh6OXqEqVKo4Akub613Xq1GH//v2UK1cuw5enp+fffh+VK1cmNDTUabKJTZs24ebmRoUKFRzLateuzYgRI9i8eTPVqlVjzpw5jnUVKlTgxRdfZOXKlXTr1u223Ht1IwpOLuShoXoiIiIid7Vy5cqxatUqNm/ezMGDBxkwYADnz5+/o8c0m80cPHiQgwcPYk67EH8NPz8/nn32WYYPH87y5cs5cOAATz/9NAkJCfTr1w+AgQMHcvz4cYYNG8bhw4eZM2cOM2bMcCrn1VdfZcuWLTz33HOEhoZy9OhRFi9ezJAhQ3JU38TERKdeq9DQUE6cOEGPHj3w9vamT58+7Nu3j7Vr1zJkyBB69epFwYIFCQsLY8SIEWzZsoXw8HBWrlzJkSNHqFy5MomJiQwePJh169YRHh7Opk2b2L59+98ePngzusfJhTzc0j5+BScRERGRu9Fbb71FWFgYbdq0wdfXl2eeeYYuXboQHR19R48bGBh40/Xjx4/HZrPRq1cvYmNjqVevHitWrCBPnjwAlChRgp9++okXX3yRzz//nPr16zumSE9To0YN1q9fzxtvvMH999+PYRiULVuWxx57LEd1PXLkSIbp0hs3bsyGDRtYsWIFL7zwAvfccw++vr489NBDfPTRRwD4+vpy6NAhZs6cSVRUFIULF2bw4MEMGDCA1NRUoqKi6N27NxcuXCBfvnx069aN0aNH56huOWEysjPA8V8kJiaGoKAgoqOjs2xwd9qm48cZ+HsXAP7s/ecNx3iKpLFYLCxdupT27dvf0ek25d9DbUZySm1GciqzNpOUlERYWBilS5f+2zOvyb+PzWYjJiaGwMBA3Nzu/AC4m7XHnGQDDdVzIQ9zeoefzVCvk4iIiIhIbqXg5ELp9ziB1bC6sCYiIiIiInIzCk4udG2Pk4KTiIiIiEjupeDkQk49TjYFJxERERGR3ErByYXU4yQiIiIicndQcHIhj2vm3dfkECIiIiIiuZeCkwt5mM0Yhn0K8lRbqotrIyIiIiIiN6Lg5EJmk4m0H4HFquAkIiIiIpJbKTi5kNnNBH/1OKUoOImIiIiI5FoKTi5kdgNHj5Nm1RMRERH5V2rWrBlDhw51dTVyrFSpUkyePNnV1cg1FJxcyOzmBob9R6AeJxERERHXMplMN/3q27fvLZW7YMECxo4d67J6mUwmFi1adMvHv5FRo0ZRq1at215ubuWe9SZyp5hNOIKTxaoeJxERERFXioiIcHw/b948Ro4cyeHDhx3LfHx8nLa3WCx4eHhkWW7evHn/0XrJnaEeJxcyu5kwNDmEiIiI/AcYhkGCJeEf/zIMI9t1LFSokOMrKCgIk8nkeJ2UlERwcDA//PADzZo1w9vbm++++46oqCieeOIJihUrhq+vL9WrV2fu3LlO5V4/VK9UqVK8++67PPXUUwQEBFCiRAm+/PLLW6pXoUKFmDNnDmXLlsXT05OKFSvy7bffOh0LoGvXrphMJsfr48eP07lzZwoWLIi/vz/33HMPv/32W7Y/q+zYu3cvDzzwAD4+PoSEhPDMM88QFxfnWL9u3Trq16+Pn58fwcHBNG7cmPDwcAD27NlD8+bNCQgIIDAwkLp167Jjx47bWr+cUo+TC5lM6ZNDKDiJiIjIv1liaiL3zrn3Hz/uH93/wNfD97aV9+qrrzJx4kSmT5+Ol5cXSUlJ1K1bl1dffZXAwEB+/fVXevXqRZkyZbj33hu/34kTJzJ27Fhef/11fvzxR5599lmaNGlCpUqVclSfhQsX8sILLzB58mRatmzJkiVLePLJJylWrBjNmzdn+/btFChQgOnTp9O2bVvMfz1HNC4ujvbt2zNu3Di8vb2ZOXMmHTt25PDhw5QoUeJvfUYACQkJtG3blgYNGrB9+3YiIyPp378/gwcPZtq0aaSmptKtWzeefvpp5s6dS0pKCtu2bbOfHwM9evSgdu3aTJ06FbPZTGhoaLZ69+4kBSeX+6vHydBQPREREZHcbujQoXTr1s1p2csvv+z4fsiQISxfvpz58+ffNDi1b9+eQYMGAfYwNmnSJNatW5fj4PThhx/St29fR1nDhg1j69atfPjhhzRv3pz8+fMDEBwcTKFChRz71axZk5o1azpejxs3joULF7J48WIGDx6cozpkZvbs2SQmJjJr1iz8/PwA+PTTT+nYsSPvvfceSUlJREdH8+CDD1K2bFkAKleu7Nj/1KlTDB8+3PF5lC9f/m/X6e9ScHI1Q0P1RERE5N/Px92HP7r/4ZLj3k716tVzem21Whk/fjzz5s3j7NmzJCcnk5yc7AgLN1KjRg3H92lD7yIjI3Ncn4MHD/LMM884LWvcuDEff/zxTfeLj49n9OjRLFmyhHPnzpGamkpiYiKnTp3KcR1uVK+aNWs6fQ6NGzfGZrNx+PBhatWqRZ8+fWjTpg2tWrWiZcuWPProoxQuXBiwB8D+/fvz7bff0rJlSx555BFHwHIV3ePkYibs3ZGpmhxCRERE/sVMJhO+Hr7/+Ffa0K/b5fpANHHiRCZNmsQrr7zCmjVrCA0NpU2bNqSkpNy0nOuHnZlMJmw22y3V6fr3aBhGlu97+PDh/PTTT7zzzjts3LiR0NBQqlevnmW9s+tmdUhbPm3aNLZs2UKjRo2YN28eFSpUYOvWrYB9xr79+/fToUMH1qxZQ5UqVVi4cOFtqdutUnByOfs4Uz3HSUREROTus3HjRjp37kzPnj2pWbMmZcqU4ejRo//Y8StXrszvv//utGzz5s1Ow948PDywXneRfuPGjfTt25euXbtSvXp1ChUqxMmTJ29bvapUqUJoaCjx8fGOZZs2bcLNzY0KFSo4ltWuXZsRI0awefNmqlWrxpw5cxzrKlSowIsvvsjKlSvp1q0b06dPv231uxUKTq6mySFERERE7lrlypVj1apVbN68mYMHDzJgwADOnz//jx1/+PDhzJgxg//9738cPXqUjz76iAULFjjdd1WqVClWr17N+fPnuXLliqPeCxYsIDQ0lD179tC9e/db6vFKTEwkNDTU6evYsWP06NEDb29v+vTpw759+1i7di1DhgyhV69eFCxYkPDwcF5//XW2bNlCeHg4K1eu5MiRI1SuXJnExEQGDx7MunXrCA8PZ9OmTWzfvt0pDLqC7nFyMdNf2TVVk0OIiIiI3HXeeustwsLCaNOmDb6+vjzzzDN06dKF6Ojof+T4Xbp04eOPP+aDDz7g+eefp3Tp0kyfPp1mzZo5tpk4cSLDhg3jq6++omjRopw8eZJJkybx1FNP0ahRI/Lly8err75KTExMjo9/5MgRateu7bSsadOmrFu3jhUrVvDCCy9wzz334Ovry0MPPcRHH30E2J89dejQIWbNmkVUVBSFCxdm8ODBDBgwgNTUVKKioujduzcXLlwgX758dOvWjdGjR/+tz+rvMhk5mdz+XyAmJoagoCCio6MJDAx0aV0sFgt1pj0I3ud4tdaH9KzZxqX1kdzPYrGwdOlS2rdv7/IpOeXuoDYjOaU2IzmVWZtJSkoiLCyM0qVL4+3t7eIaSm5js9mIiYkhMDAQN7c7PwDuZu0xJ9lAQ/Vczj5Uz6oeJxERERGRXEvBycXShurpHicRERERkdxLwcnl/rrH6RannxQRERERkTtPwcnF0nqcrJqOXEREREQk11JwcjXjr6F6Ng3VExERkX+X/9gcZJJL3a52qODkYqa/JofQdOQiIiLyb5E2u15CQoKLayICKSkpAJjN5r9Vjp7j5GKO5zhpcggRERH5lzCbzQQHBxMZGQmAr68vJpPJxbWS3MJms5GSkkJSUtIdn47cZrNx8eJFfH19cXf/e9FHwcnFTNiTb6qG6omIiMi/SKFChQAc4UkkjWEYJCYm4uPj848Eajc3N0qUKPG3j+XS4LRhwwY++OADdu7cSUREBAsXLqRLly433Wf9+vUMGzaM/fv3U6RIEV555RUGDhz4z1T4DjAZ9uCUYrO4uCYiIiIit4/JZKJw4cIUKFAAi0XnOZLOYrGwYcMGmjRp8o88aNvT0/O29Gy5NDjFx8dTs2ZNnnzySR566KEstw8LC6N9+/Y8/fTTfPfdd2zatIlBgwaRP3/+bO2fG6nHSURERP7NzGbz3763RP5dzGYzqampeHt7/yPB6XZxaXBq164d7dq1y/b2//vf/yhRogSTJ08GoHLlyuzYsYMPP/zw7g1Of/U4Way6EiMiIiIiklvdVfc4bdmyhdatWzsta9OmDd988w0WiyXTxJqcnExycrLjdUxMDGDvInR1t7HFYnH0OKVYU1xeH8n90tqI2opkl9qM5JTajOSU2ozkVG5qMzmpw10VnM6fP0/BggWdlhUsWJDU1FQuXbpE4cKFM+zz3nvvMXr06AzLV65cia+v7x2ra3a5/TWr3pmIMyxdutTFtZG7xapVq1xdBbnLqM1ITqnNSE6pzUhO5YY2k5Mp8++q4ARkmA0j7YFWN5olY8SIEQwbNszxOiYmhuLFi9O6dWsCAwPvXEWzwWKxMH76MgBC8uejfdv2Lq2P5H4Wi4VVq1bRqlWru2pMsLiO2ozklNqM5JTajORUbmozaaPRsuOuCk6FChXi/PnzTssiIyNxd3cnJCQk0328vLzw8vLKsNzDw8PlPygAs8ne42QlNVfUR+4OuaX9yt1DbUZySm1GckptRnIqN7SZnBz/zj5x6jZr2LBhhi69lStXUq9ePZd/6LfKDU0OISIiIiKS27k0OMXFxREaGkpoaChgn248NDSUU6dOAfZhdr1793ZsP3DgQMLDwxk2bBgHDx5k2rRpfPPNN7z88suuqP5t4abpyEVEREREcj2XDtXbsWMHzZs3d7xOuxepT58+zJgxg4iICEeIAihdujRLly7lxRdf5LPPPqNIkSJMmTLlrp2KHMCc1uOkB+CKiIiIiORaLg1OzZo1c0zukJkZM2ZkWNa0aVN27dp1B2v1z3IzqcdJRERERCS3u6vucfo3SutxSjXU4yQiIiIiklspOLlYWnCyGupxEhERERHJrRScXMysySFERERERHI9BScXM5vU4yQiIiIiktspOLmY4wG4Ck4iIiIiIrmWgpOLeegeJxERERGRXE/BycXSepxsCk4iIiIiIrmWgpOLuZvsj9JSj5OIiIiISO6l4ORi5r9+BDasLq6JiIiIiIjciIKTi7n/NauehuqJiIiIiOReCk4u5pEWnFBwEhERERHJrRScXCytx8nQUD0RERERkVxLwcnF1OMkIiIiIpL7KTi5mHqcRERERERyPwUnF0vrcUI9TiIiIiIiuZaCk4u5//UAXPU4iYiIiIjkXgpOLuaZ1uNksmEzbK6tjIiIiIiIZErBycU8TOk/glSbhuuJiIiIiORGCk4uln6Pk4KTiIiIiEhupeDkYp5u6cHJYrO4sCYiIiIiInIjCk4u5m5yw7DZw1OcJc7FtRERERERkcwoOLmYh5sJwxoAQFRilItrIyIiIiIimVFwcjGzGxip9uB0KeGSi2sjIiIiIiKZUXByMbMJjFR/ACITFZxERERERHIjBScXM5vA9lePU2TCRRfXRkREREREMqPg5GLX9jhdTNA9TiIiIiIiuZGCk4uZTWCy6R4nEREREZHcTMHJxUwm8HfPA8AFDdUTEREREcmVFJxygWCvQABiU/QcJxERERGR3EjBKRfI62O/xykxNcHFNRERERERkcwoOOUC+fzsPU5JCk4iIiIiIrmSglMuUNDfHpxSjEQX10RERERERDKj4JQLFPorOBlYSbGmuLg2IiIiIiJyPQWnXKBIULDj+3hLvOsqIiIiIiIimVJwygWCfbwwbO4AJOg+JxERERGRXEfBKRfw83THsHkB6nESEREREcmNFJxyAT8vd/grOCVY1OMkIiIiIpLbKDjlAn5eZkePU5wegisiIiIikusoOOUC9qF6ngDEJGuonoiIiIhIbqPglAv4eZodQ/UuJ8a6uDYiIiIiInI9BadcwM3NhBveAFxRcBIRERERyXUUnHIJD5MPANHJusdJRERERCS3UXDKJTxMvgBcTY5xcU1EREREROR6Ck65hJebPwDRydEuromIiIiIiFxPwSmX8DEHABCboh4nEREREZHcRsEpl/Bz/ys4WRScRERERERyGwWnXMLfIxCAhFTNqiciIiIiktsoOOUSgV724JSo4CQiIiIikusoOOUSeb2DAUiyaTpyEREREZHcRsEplyjglwcAK8mkWFNcXBsREREREbmWglMuUdAvGMMwARCjmfVERERERHIVBadcItjPC6w+gJ7lJCIiIiKS2yg45RLBPh4YNntwUo+TiIiIiEjuouCUSwT7emKox0lEREREJFdScMolgn09MKy+AFxNuurayoiIiIiIiBMFp1wiyMfD0eMUGX/FxbUREREREZFrKTjlEt4eZsyGHwAXExScRERERERyEwWnXMTL7A9AVKLucRIRERERyU0UnHIRX3MAAFcSr7q2IiIiIiIi4kTBKRfx97AHp+gU9TiJiIiIiOQmCk65SKBXEACxeo6TiIiIiEiuouCUi+T5KzidTz6qZzmJiIiIiOQiLg9On3/+OaVLl8bb25u6deuycePGm24/e/Zsatasia+vL4ULF+bJJ58kKirqH6rtnVXItxiGYQbgwx0furg2IiIiIiKSxqXBad68eQwdOpQ33niD3bt3c//999OuXTtOnTqV6fa///47vXv3pl+/fuzfv5/58+ezfft2+vfv/w/X/M4oHFCApHOPALA6fDUWq8XFNRIREREREXBxcProo4/o168f/fv3p3LlykyePJnixYszderUTLffunUrpUqV4vnnn6d06dLcd999DBgwgB07dvzDNb8zgn08SI2pgYcRRKwllu0Xtru6SiIiIiIiAri76sApKSns3LmT1157zWl569at2bx5c6b7NGrUiDfeeIOlS5fSrl07IiMj+fHHH+nQocMNj5OcnExycrLjdUyMfeIFi8WCxeLaHp2046f9G+DlBrjhba2AxX07ey7s4Z7897iwhpLbXN9mRLKiNiM5pTYjOaU2IzmVm9pMTurgsuB06dIlrFYrBQsWdFpesGBBzp8/n+k+jRo1Yvbs2Tz22GMkJSWRmppKp06d+OSTT254nPfee4/Ro0dnWL5y5Up8fX3/3pu4TVatWgXAsWgAdy5GFcC7IKw9uJYip4q4tG6SO6W1GZHsUpuRnFKbkZxSm5Gcyg1tJiEhIdvbuiw4pTGZTE6vDcPIsCzNgQMHeP755xk5ciRt2rQhIiKC4cOHM3DgQL755ptM9xkxYgTDhg1zvI6JiaF48eK0bt2awMDA2/dGboHFYmHVqlW0atUKDw8PTl1O4JMDv2NLLArAZY/LtG3XFjeTy+fwkFzi+jYjkhW1GckptRnJKbUZyanc1GbSRqNlh8uCU758+TCbzRl6lyIjIzP0QqV57733aNy4McOHDwegRo0a+Pn5cf/99zNu3DgKFy6cYR8vLy+8vLwyLPfw8HD5DypNWl3KFgzi5dYV+HBVMobNgwsJF5h/bD49q/R0dRUll8lN7VfuDmozklNqM5JTajOSU7mhzeTk+C7ryvD09KRu3boZuuhWrVpFo0aNMt0nISEBNzfnKpvN9um7DcO4MxX9hz3XvBzFgoNJvtgKgFXhru/CFBERERH5r3PpGLBhw4bx9ddfM23aNA4ePMiLL77IqVOnGDhwIGAfZte7d2/H9h07dmTBggVMnTqVEydOsGnTJp5//nnq169PkSL/jnuBTCYTjcqGYEssDsClxEsurpGIiIiIiLj0HqfHHnuMqKgoxowZQ0REBNWqVWPp0qWULFkSgIiICKdnOvXt25fY2Fg+/fRTXnrpJYKDg3nggQeYMGGCq97CHVGjWDDz9wQACk4iIiIiIrmByyeHGDRoEIMGDcp03YwZMzIsGzJkCEOGDLnDtXKtmsWCMVLtwSkhNYEESwK+HrljBkARERERkf8iTdeWC1UqHECx4GAMmycA5+LOubhGIiIiIiL/bQpOuZCH2Y2XWlfASPUHoOvirsSmxLq4ViIiIiIi/10KTrlU2fz+uHledrzeHbnbhbUREREREflvU3DKpQoEeJOaUMrxeu+lva6rjIiIiIjIf5yCUy6Vz9+T5IiHsP01XG/vRQUnERERERFXUXDKpdzNbuT1LEriqf4AbDu/jStJV1xcKxERERGR/yYFp1wsOdWGLbkQ1sSiWGwWfj72s6urJCIiIiLyn6TglIvFJqUCYLl6LwATd05k/pH5rqySiIiIiMh/koJTLvZmh8oAWKJrYsYLgDFbxjBkzRBe2fAKNsPmyuqJiIiIiPxnKDjlYv3vL8P0vveA4YXpYg/H8nWn17EsbBmnY0+7rnIiIiIiIv8hCk653H3l81Eo0JsrlyrQomAfp3WnYk4B9hn3vvrzK6w2qyuqKCIiIiLyr6fglMt5mN3ocW8JAH7+vbDTuvCYcAC6L+3OlN1TWHJiyT9ePxERERGR/wIFp7vAE/eWwN3NhJEajCW6lmN5aOQetp/f7ngdER+R7TJ3XtjJ/kv7b2c1RURERET+tRSc7gL5/L1468EqACRFPEzypeYArAhfzlMrnnJs91noZ0zdM5UES8JNy4tKjKLv8r48/uvjJKUm3bmKi4iIiIj8Syg43SX6NCrFsFYVwHDHcuXeG273eejnTNk9xWnZRzs/Yvy28Y7XJ6JPOL7fH6VeJxERERGRrCg43UUerlsMACM1mITTvW+43eyDs6k+szrPrX6O8Jhwpu+bzuyDszkffx6AsOgwx7abzm66s5W+zTQBhoiIiIi4goLTXaRIsA9P1C8OgDWuCsEXPqa21/MkXejg2CbYK9jx/YYzG5x6ms7FnQPgwKVjjmVf7f2K6jOrM2nnpGzXwxXhJSYlhoVHF3L/vPsZtXnUP358EREREflvU3C6y4zrUp2Rf93vdPpyIhtCi5AaW9mxPuLAAF6oMtnx+vezvzu+/+noT/x64ldWhi/PUO60fdMwDMNp2enY00zbN409F/eklx8XQdMfmjJu6zg+D/2cZWHLcvweIhMiuZR4KUf7DFs7jJGbRxKbEstPR3/K8TFFRERulz/O7aD3rwMJjz7l6qqIyD/I3dUVkJwxu5l4qG4xvt54gnPR9okdDEserIlFMWxepKYEM3lJKo/Un8WCS09hI9Wx7+Lji1l8fLF9H5sZS3RdPPNsc6w/F3+OvRf38vXer4mzxHE27iwAId4hrHx4JbMOzOKPiD+ITo5m3uF5jv3alW6X7fonpSbRYn4LTJjY2WsnHm4eWe6Takvlj/N/OC2zWC14mLPeVyQnLFYLr258lToF6tCzSs9MtzEMA5PJlOm6iLgINp/bTJdyXTC7mW97/ZaFLeO7A7OZ2OxDCvkVuu3l5yYHog6Q1zvvv/59utLZuLMU8Cmg36W3oP+qJwF4etnLrHz8BxfXRkT+KepxugsF+Xjwbf976VA97blOZhJODibx1NOAibjkVKZvPIclKX+GfQM8Ayjp2YSE8IFY4yo6rfvl+C8M3zCcw1cOO0ITQFRSFC+ue5GPd33M1oitGcqMSozKMDvf6ZjTjnuqnJbHngbAwOB83Hn2R+3nyeVPOvVqXU26yvwj80m2JpOUmkS3xd0ylHMx8SIAcSlxDF07lKUnlmb+YeVQgiWBtafW/uOzDabaUrPe6DqnY08zbus4x2eam+26sIuuP3fNtP3kJkuOL2dV+CombJ+Q6frQyFDu/74JC48uzHT9gJVDGLVlFO9vm3hH6vfKhlf489Ie3t6Yef3+LcKjw3lsyWO0+rGVq6vyr/XHuZ20/akt3X95xtVVuatFJJxxdRVE5B+k4HSXKpvfn8961GHf6DYsGNQIMAEmPu9Rh54N7A/MTY2rkGG/sbUWcDGsK7ak4thSnIPVZ6Gf3fB4G85suOG6Zj80o8vPXYhOjmbm/pksP7mc9gvb025BO2yGDcAxRfrpmPQ/MqdiTjNs7TB2XNhBz6U9iU6OBuCtzW8xZssY3vz9TdadWec0mUWa0zH2+7W+Pfgtq0+t5tWNrxKbEsuXf35JhwUdGLZumNPQw+n7prP8ZMYhivayTtN/ZX9WnlzJ+9vf5/m1z/PJ7k84fPkwX+/9GovV4rT9mdgzRCZEEpUY5Vj205GfaP1j61saujj74GwazmnIjvM7AByf2bVsho3N5zYTmxLrWDZ07YvMOzyPF9YMvWHZ8ZZ4Jmyb4BRMwR66LFYLV5KuMO/QPJKtyTet49pTaxm6dqjjZ5RT/Vc+zbGrxxj023M33W7DmQ30WNojR2Fw5cmVzD442+nnbRgGy8OWcyY2Zyc1m8PSh91kFmZfWfcW0SlXGbl5ZKb7h8UeBmDO4W9zdNyc2nzhtxy/t7vJj/s3O77P7P+D/H3v/T4NgEPROzKsS7Gm/NPVuWuZjNvfs3w7GYbB6vDVRMRl/zmPInJjCk53OX8vd0rm9XW8blYxP+O6VOf3V5uTEtWUlMuNiA8bQnJkW+JPDKHfzJ1ciEnGw2zikVo1b1s9zsad5eFfHubDHR8yfP1wwH7ieTb2LDP2zeDeOffyW/hv/BmZPhX6gUsnOBd/zvG697LeGIbButPrAFh+crmjrOsdjbKfNJ6OTu/VajS3EZ/s/oRTsadYFb6KmJQYAI5cOcJHOz9i+PrhWKwWDMNwnGTHpMTQd0Vf/oj4g5fWv+S4f2rWgVn0W9mPj3d9TJefu7DkxBJSrCnEpMTQbkE7WsxvQbMfmrH9/HbOxZ1j1JZRRMRHsODIzzn+7MZvG0+SNYmxW99h7JaxNP+heYaT4sk7JzNg1QB6LO9BkpH01/uyn6QfvXqEafumcS7uHJcSLzF933RHUB29eTTfHfyOZ1YOcHwWw9YNo/2C9kzdM5WBq55l3B/j+Hjnx07HMwyDZWHLHKH1+bXPs/rUat79Yzy3wmJLcfr3Rp5b/Rx/XvyTMZuzd5wESwIvrX+J8dvGs/lc+sn2srBlDN8wnEd+eSxH9UxKTQ/JF+Iy3od3PjY+22VZbJabro+3xDN1z1SnxwOkSbWlsujYIqee3+vvQWy34OZDZK8mXWXCtgkcuXIk23W+Fdf+f7pd4pPTJ6CJSrzq+D7Zmsz+S/udwtSZ2DN8svsTpwsZ19bNVWyGLUfH33p2B23md2bj6S13sFbp4pIzb58fbP0fjefdz56kQ/9IPW63S4mX/uGwffPg5Ko2eOTKEc7EnuGHg78wdN1QOi58KNPtLFYLq0+tJjE1McO62JRY5hycm+N7knOD6KRoWs7ryNsbP3R1Vf4VopOj6bqgJzP+nJf1xv9yCk7/AiH+Xkx5ojZf966Hr6f9trVieXwpG5Kf5AudsCUVJSWqGbbkogB0qF6Y759pSKtKRUk635mUy41uWr5h9cpWPTIbmrfm9Bom7rQPW3px3Yt8c2CyY93hqHCnbU9En+BMXPauok/Y9RYf7fiIrWf23XCbtPBxIe6CY9mDCx+k7nd1afZDMz7Z/Qlv/P4GkQmRme6f1rtyKvYUIzaOoO53dRm3ZZzTNhN3TOJwVPqJ6aGL57hVF2IS+OHID1xOusz4belDsS4mXGTmgVn29xR3hoMph5i0y3kWxEk7J9FraS9eXvey47ld2yK2seykvQcsITWe8JhwHlr8EKvCVwH2GRUPXLY/x+vbg9/y1qa3OHbFPuPiutPreGXDK3Ra1MnpOKtPrs+07tvPb2f9afu6X47/wvbz27M8YUi1pXL48mHHdvGW9FBy8Hzmf6g3nNnAzP0zHftsPbfTse7L3d87ln/7p/19x1liMu0lWxW+irDoMA5fPuyYbRIg1nLF8f3xK/YrtFablS/2fMFXf36F4X4xQ1kJlgRmH5xNdHI0hpH+K3V/5BFiU2KZsmuKUwBK8/4fH/N56Oc8uvhxwP4Zzj44G6vNyncH5vHWprd4+Of04BeXEpehjENRh1ketpzLSZczrBu7eSLfHfyO7st6ZFj3dxiG4aiLzbDx6C89eXRxD6w2K4mpifxw+Idb7plMk2RNf4j3oYunSLAkEJ0czYM/dePxXx/nkx1fO9Y/8UtfvvzzS15b965TGZeTLtNsXitGbnRenibVlso3e6cxYsNbWT40PDs+3zWN5t+34XTMGS4lXqLh7Pt5ZvnQG25//TEH/PYM5xJO8Pya59kduTvTE9ns2nNxj1O7TnPo8iEWHVv01/+T9P+f1waN2YenYcXC/KTvst3zdH2v/D8h7eLOtX935h1YTPMfHmDUhuzPEvt3ud0kOG0+u4N7Z9/PzL3z/7H6ABy/Yv9d32Xh40wPXQJAsi02w3ZPLX2BOt/VYejaoUzY/E2G9c+vfJP3tr3LU0uH3ra6RSdHOy5q3knv/v41F5JOsuDEzDt+rJtJSk1i0o4pHLl81KX1WBa2jKNXbq0ONsPGm2sncyx2DxN3p58DfRk6g+8PZpysa3X4Gt7/Y9INf4elWFMYsW40q09lfj6R22lyiH+JTjWLZFhWvWgQxy86XyE/9k473M32k7sTF+OwXGkIgM0SjNn3JEnnHsMjcA/ehRfYl6f6kXimN+6+xzC5x+GR5w9MpoxX8ywx1fEI3Jth+Yc7bny15/jV4xmW5eQemOn7p990/eGoU1TNV5UdZ086lqX1cF1OusyXf37pWG7YzJjcsp5mPS2IpDkUdYg1J9NP3mNTs39lzjAMpxPMxNREx8XL9WfW0WtpL/pW7cvQdUOd9vsp8UfI5GJwZGIkkYn2ELjw2ELWnXb+pdTj18wnO0iz6NgiUlJttC71gNMx+yzr4/g+2YjNMDlCUmoST614CoAifkUcn3GZoDJExEfQokSL9Pds9Qbsv4g/2/0ZX+/7mjfufYPHKz3OH9eEoGgOMGXXFJ6v83z6suRonlttH+p3JvYMJpOJc1fT70XbFbWOtj+1ZWTDkZyMSe/FWRW2kXqFq3Eh4QJ1CtZhR8QOhq0bBoC7yYNUw8IHTT6gTak2XE5K//m9ueUlfiu2nD/O7eDT0E8zfF7J1mS8zF68uXEMq07/yi/Hljv931gdtpOfjy/hx2Pf8dXer6iVvxbdynejS7kuRCZEsuy4/eeTbEvEMAwG//YiCdZoTsdEsOaEfWhlXGp6+zh2OeOJ8CNLHgYgr1cB5neaSwHfAoB9Ns2VpxfZP2tu7+MDZh+Yz4QdYxnZYDSNijTm0JU/AThyOYzJOz9h8/k1LDu+juntP3faLzo5Gg83D7zdvdl+fjuV8lbiStIVvtk7i60RW3n3/tHcU+geAC4npgfYQet6kc+rCM2KtuN8on0o5Y+HlvPCPfZ7c65a7BdGtkVsdzre5D++43LyBRaemMuY+1/P8D7Gb/mEecfsw9VCPEvycoP+gP3/5ce7PqFUYAm6lO+S7c9l6l77yfrbGz6hiF8xEqwxbI1cQ2xyLAFeAY7tvvlzFv/781OSrIk8VeU5XrxnIAA27OEjlSR6L+tN9TwNmdPpy4wH+kt0cjTfH/qeBkUaUDN/+uiBI5dP0HNpT0y48Wcf5yG6j/zyCADepmCMa4LTC6tfoXmJ++lWoTOWlADcPO0nPH9eOEHDktVvWId4Szz9lg3m0JU9/K/l1zQoWidbn9Xt8N6WKcw9+jXFfKqy7NHvsdgsjNv+BgALT85gTNOXAFh/ej2F/ApRMW/FmxV3UxabhR3nd1CvYD08zB7EpaSHXtM1wWlt+EZm7JvD+81GUdCvIC+teZ1EWzQf7hpDn+qP3PLxl51YxZaz23ir8StZTqh0OuY0XRY/CECyEc2llDD7SH6cJ7Y5efUM2y+ucey3+PgCRjUZ7FTWjij7+rC4PTT8rindK/VmSL1+t/w+EiwJtJnfCTfMrO++IluTQ2UmKTWJL/d8w4Nl2lEmT5lMtzl0Kf1ilc2w4WZy40L8BayGlSL+Gc+X7oTPd81g6l77heNp+79iXON36Vyu420p+0T0CfJ65SXYOzjLbdeEbeGVDa8A8GfvP284uVGaM7FniLfEO/7PDP1tJOsupI+mOX75FJEJUXyyx/7eupZ/EC93+wX2ufuW8O7OEQDEJRm81mggvh6+xKTEYBgGYdFhTN01i80XVrEk/EfGBY/jbqPg9C82pEV5jl2M46nGpdl+8jKtqxZyhCaA4tcM8bNcboLlchP791frO4ITGNgSS5KSWBKwkXyxFSZzIp55N2P2Po3Z134ik3qD4HQzx+K2Z1j23h/vOb5PjS+Nu1/G+5uuZxhmUi41xSu//Zd8amwl3AMO8fbWV3hn25ukZDE8zLB6EXfsNXxLfI3ZJ2PPQOKZnvgU+y7Tfa1YWHRyRvprUyxD1gwhn08+aheozZnYM4RGhjKp+ST8PPwc2+26sIuPdn7kdO+RzXzl2qIJvRiaITTlxJVk516I6JSrWe6z5fQ+lp5c7LRsV+Qup9c/Hv2Rh8s/jMlkIik1iedWp/+xvXboZdoQtCUnljiWmcxJPLHkCfZFpfcUvvPHO0QnR2cIJ1/t/Yrnaj3nmJ1u3sEFjnXfH/4+0/qfiz/HwN8GOi0b/ccIx/fF/IuTzyP9D22qYT9ZHb5hODHJsURf0+N0Jfkidb+rSyGfkpkeq8GcBgytM5RVp38FYP/lUKf1M4584PQ69GIooRdDsRkwaovzPVIvrB1KgtUekuYcmo2/UcFxsrP34l4K+hXkWNSNezMvJ0cy5veP+LS1fYjjs78967R+asz/SDqaxGNVsh66aBgGv5/9nfjUeFoUb+GYce3Pi3/y87Gf+eGIfQaxMVvfJtAj2LHfltMH2Xze/n9wx8WNnIw+SamgUoB92GCr+W0p6FuMgn752Xbhd0r4ledUfPoV0KdWPMW37b4lxZrClWTn/wuXks/x44n0K+JXbUfptLAbfaqmXwxwswU57XMyKv2qdoIlgZ+O/kTrkq0p6FeQfRf38cPRbx2f8czDU+hQvjFf7pmOr4cfi0/8CEC9QvUwMCgeUJwL8ReYuucL+lTtRXjMGeYc+J7XG9gn02lWvJnjWGevJHAx7qDj9fJjW3mkavokF5N3p7eLaQc+Y2DtPniZM/bq772yhVn7Z9G5XGeCvIIyrB+xZiIbIxfyaeinfN/+R6rmt5/k/LjP/lBzAxtXEqMJ8g7AalgxkX6ytOjADlJs6Sf/686uYN3ZFXSr0Bl3czJp8f/45fM3DU7jN33N/iv2e6S+27P6bwWnb/fPIdWWSt9qvTKc2IVGhpJkTaJB4QaOZXOP2nsdzyTae823nzngWJc2SmLzmR0MXmP//bTxsY3ZOsm83oIjP/P2ljcBeKLcs7zeeBCno6/peTbZh7QFeAbw/LpBAPT9dTBNStYnzpZ+X9Hp2NMsOb6UhkXuZdiaN7DYklnY5XusNhvdfn6cFsXbMvr+4ZhMJj744xM83Nx5od5AFh1bzMjN9uNXy1uPR6u2cZQZkxLDF6Hf8HilhykeaH/G41vrpjrVP9mU3iN3OTGa7w8uZPHxRZxLdB4i7EXGyaSuFWe9zJf7JzOoTl/MbmZOXD3BO1sm0a9GL/L6BFLAq8BN9z8cdYyHl3R1vN574QR1Cuc8zIZFh9Hj197EWq4y++APLOn2IwuPLqJ75Sfw9/R3bJeYmt5DHxEbRUH/PHT4qSvJRiwbHt1IHp9gx/ptETuZ9uds3m36Jnm98zodb9OZbUze8TkTmr1NmeDSnIk9g7ubu2O2z6TUJOIt8YT4hDjtl2pLdYSmNG9uep3qITUpk6dEtt/vrguhhEWf4KEK6ZNk7b94hCeWPkohr7KsfDzrx7OsOJJ+ntFv+bN81moSPu4+gL232GKz4OvhS2xKLN/sncU3+/4HwMpuvxFjiWPtOedbEBYd2sjJ6PRzpWOXI6haoJR9n2OhjuULT07nyJXDPFqpK6P+eBWDjBfdtyZtpW5CXYoFFcv6w8glFJz+xcrm92fJkPsB6FYnY6P0MLvxyRO1OR+dxDtL7X/o7ymVh+0n009azJai1C4RzO5TVwE3sPli2HxJvtARk/sVvAsvIOVSc2ypgY59rMkFMLklYUsuiMnjKskRD+Fbyv4fMfnSA6RcbIFv6U8xe9v/qNhS8pKaUAbP4B2O+0IsV+uSFPEwbt5n8S642BHQ4k8Mxa/MZKf3YViCsFxpiEeerdgSS2CzpP8Cu1FoMgwTJpP9imtqfAW61izHwtABeIasxyN4B24e6Vf6rQmlsvqonaTdo/XjkR8dy1afWk2nsvZhbyeiT9BvZb8cz6SXGl8Gd7+M98NkxZpYHLOPfbKF5Iut8My7HpM588/lijXr8sdsGYO32ZuOZTsy//CPbLtuqvisXBua0mTWowMwftsEGhVpyPmE83yy56NMt7GlhODm6Xx/i80SiDW+HO7+hzC5p58gnok7zRkyn3jiw+0fY7V6w3UX484nhme6faot9aY9qjfyv11zMyxbezr9yq9BKvGk/xy6L+1O2YAq1A5pkWE/AGtyfsxeF1l/bjnJ1tHEJGccBnPWdoZx28dRMV9FFh1bRFRiFBXzVmRwbecrzGtPreXNTW86htIMqfEKz9TuRUxKDD2WZhzyF2O56vh+9/kDTus6LurIjp47cDe5M23PjyTZ4gmPO0x4nP3evGtDU5pey3oBEOh24xMLw+qJyZxCWMxRRm1527Hc3WzDarPyyvo38HX3dQoKw1aPZNOFFUz7czazOnxF/xVDMEwWrMn5cfO8hMlk8OiSRzMcq92CdribvFncZQGvrZ3An1fX8/OxxaQayX+9R/ukOQ0KpAcjL3d3TiftdrSj+UcWseL0QnpV7U6DwvdkOMbMPYuITM78WUAf7PiAhYfWsPChGYD9IoSHmwdtSrVhZ2T6pA6/HN5GscBCBHkFOd0TNv3P+cw58jVWw8JLdd5wKjvJFp1hsP7gVcOwmdN/952JzTg0ddXJ1Zy4GsbTNZ9i9alV12ybcZh1giUBs5uZj7Z/Qs0CNWhfpjWGYfDE4v7EpFxlUbe5rDu9jtjkBN7fYb9olpJqYkDtXo4yYpNjHe3it4fW8Mmu/xHg5e90nAvxF9hyZr/jtcmczMFLhxm0ZoBj2f3z7mdu+x+olr+y077RydEcvXKUeoXqZaj/suNrHKEJYN6RWbzeeBCnrqb3TKeaI2g0txGDqr2S/lkkHmLOIedhAY8v7ktMaiSf70n/XffDvg2ERYcRk3qRhWHfsjDsWwp4FyEyyX6R5Iejc52GDy888is7Lm3gSNQJOpftxM/Hf+V47B5WntjIqsftF5aO/nXva2b2R57kf/ucf4/aUv1xc48jhas33O9aPxz8lbMJx5l5wN5bu+23dQBUCWhMd/ON77t8b9M0p9dTdn5O1QJlebHeINzd7KeiMSkxxKbE8uEfnxEec5o5Hb/C292bK0lXWB2+nrOxEcw9OJ94m72uCdbLPL30FY7H72DtyZ3M7fw/R/nRqecd/wePRJ0lLiWBZMM+XLH3r89SLKAo1fJV5bfw1RyLtQeLAcuimdXxU0eoABi6ZhhJRjSP/9KHpQ8tdNxb2qhQS8beP4JBK0ZwLGYP8zrOo2Leso791oSlP+rlWhtOHqBMnhIkpiYSb4knn08+AGb8OZev933DK/cMp1P5Nuy6sIuryVd5Ye0LABTyLknjEnXtn90f8zGwEpF8hKNRJykfUsr+eVgS8Hb3xs1k/4+d1sN49ZqLptsjN1F/dn1K+1fjy7aTeHbFq5yOO8Kv3X6m569Pcz4p/W/Pw4u7E5Oa8VaGTWe3EZmY/nf0o50fMvb+18njnYer11302h+9ma92WzMNTQBLkpZQ7khz+t1z9wQnk+HKu2ddICYmhqCgIKKjowkMDMx6hzvIYrGwdOlS2rdvj4eHa5+jsXzfefacucpLrSpw6Hwsnb6ai2fe3/GN68Tm4Q9xMTaZs1cTmbf9NEWDffh07TGn/Ud1rMzEY50Be7ixJTs/e8Uz32+YfcJJPNMTDC/cvM7hkWcr1vhypMZXBJsHXoUX4Bm0m5Sr95B8oQOl8gZxMioB96Ad+BT5EWtSERLCBhNQ2XnYjeX8Ewxt8Bjjl+8Fw43qVfZw0sj4XI2kiK54hqwHk5X44y/j5hWBZ8jvBCe3ZstLvZmy+hiTfrPfr2T2O4pP8WkkR7bnxyde44lfuzvCx60wm8y81eAt/rz0J39e3Muxq0expQSTcqURbl6ReAannwTZLEGY3GMcwQ7AsHkQENuTuKCbD08ESDjdG9/isxyvfePbk+C3FFtKCPd6jmXt0dP4lf0Ak1sqlugamMwJuPsfu0mJGRX1LUvzkg1Ze3I7ZxNv/Ic6TeyhMQRUynwmupyyWQJx80gPBz7xbUj0W+G0jc/VnrQv/SALz47H5hsKQNL5TngXcu5Nsy/vjGe+Nbi5Z7wH4FYYVm8Mqy9unuk9fpardfEI3nmTvbJmxhMrGQNvWZ7mmG0mJrcU+lcbyNf7/pfJ3pkb02gc3x+ax31FG3Nf0cb0Xt7LaX0Z7/uZ1eU9xm76iBWnF9yglBsb32gKJ2NP8L+9k3O8b2YMSxCBRk1iPW88y2dO9CjyJQv2byExz83/X1ULup990Rv/9vE+a/YNz61zHuoU4FaUWGsEZDIEOk1ez8I8U/Mpxm9/B4AqeWtw4PKfTtuYcOOLll8zZcsC9sUvyawYB1+3EHsPp+nmF2983IIp4leMXlUfo12Z1niYPajzrb1X6cnKg5l+MD0E+FGcIfV70LlsZ/w8/Nh36SA9lnbHuGao6PB6w7m/aHM6/dwegPr5m7LtovOQYjfDi1WPLKWAn70H49s9S3g/1N5r3KLwY6yOuPWb0yv6N+GN+/vja/bjva2TeL3hMN7aMJED0Zt4vsbr1CxUljOxZ2hSvAkh3iG0nNOdyNT0Cz1elkp82Pp53tg4kpgcDMu+kQBzQWKtF7LeMBs6l36UYoGF+Cz0czCl4hffiXg/59939fJ2YMflX52WtcozklVXxoDNl8kt3qV4QHE83TyZf/gnZh2ckaM6jA0aS4cOHRznM4ZhEBEfQYhPCK1mP8UV488M+zQs2IKuFdozZsu7xKU6XwCrHXIf4bFhXE7JOBIkM/m9StCj8hO4u1udLmo9UnoglfNWZczOIVmWUda/Dosest8X9VXoTKbsSS+nap572H8lfaRMPs8SXEpJv+hRLrAGZ+LC6F25P/vPR7ApKvOREb7mQFJsyVgNC1+1momvuw/dl9mHXZvxZe2jy2nyQxOnfRrl70KZvHmonK8Sb2x61bG8Q7F+tK9Yh893zWD/le00yN+aj1uN5blVr7IvahffP/gtw1ZM4UTy6pu+7+pBzdkbvTbLz+dO+PL+hTQsU84lx06Tk2yg4ORCuSk4Xa/Ua/Zfrnl8Pdg9srXTOsMwuBiXzJbjUbzwfShdahWhV8NSPDrzO0weV0iNruu0/UutKjBxlT2QlC/gz0eP1mLgdzs5ezX9xsGqRQLZfy4GsAFutK9eiE+fqMOWE1FMWH6A/Vd2YU0qwooh7eg05xXcAw6ScLovbu4xFPWuzuLBjen82SbqlsyDt2cKC8I/JzWuItbkwviXtXeXJ4Q/jTXJPrb5wWplWHsokvgUK93vLcG7Xe3DUXadusK2sMuMX3YITBYw3Dk0th2V3l6ET9E5uPvfeIay1LjyuPvbr6LHnxiKYfPEv9z7N9w+8exjpMbUBsC78HzMfsdJjatEyqVmGKlBmH1P4FN8OqmxVUg6343+TYoy78LTjv1tlgDij7+CZ97fMay+eIZsAJOVV6p+w/t7B+DmdQnD6kX7wG+Yv/sQhtWXTa+15MC5GAb8sAD3gP0EJLXF292Ls9HR+FcY4xTWMpNZjx/Yg138sdfwrzDW/t7OPYybZxRe+ey/iLc8vpuG39e+adlprAklHD2MmfGNfJWr5i329wu0zjuSlZfHOG0zqPznPNvofjp9N5owq73n74Xys/ho38uYvdKvoBmGiQFlpzJ930wsvumzmdlS8uDm6XzlzJpUELN35ic5lqimpCYWw7vIfPwS2lIvz0OsjX0Vs/d5DKs3r1T6gfdCh+Hud+OAmnShA+62vLgXvvFU5tbk/FjjKgM2rMmFMHuf46sHx/Dcup7gmXFylpwo7F2OiKScBeh/ukyTNYgHC7/IL5GjALBE18SWXACvAqtuvmMmDKsPO3tvwYSJOl93w+ST8Z7Lm+5v8wST5Yb/Z3ziHiTeaz1uHrcWyB8tOJUfLjyb9YbXyONekiupmfeQ3g5PVxnKV9dM8AP2odImU3o4yu9VnEvJ9hPdzK4yF/GpwLnEm8/02LZoXxqXLEu8JZ6FB7ZwOP7O3ETuafIlxch8YpBeFQbx3aFpGG7p91H6GqVJMGU9fPx2SolqQv1iFQhN/Drrjf9iGG58ft9intv04A23sVmCmdR4DkWCzTyxss0Nt8uJQb5D6d7hYWKtsRTxL0KPxc+w7+ofVAlqwMGruzFMyVgTi2H2+ecfpZDfqzgXk7N/8bNCUE2ORO/JesMsNAh8ht9PHsY9b+Zt2NstgJp5mvBHVHqgLR9Qj6OxGR8TkF218jYh9LL972Od4A6ciD7JVWM/lcxPcyI6jBT/32657Dth8yPbCfD1dmkdcpINNFRPMlWvZB52hF/h4boZu09NJhMFArzpVLMIJUP8qFI4kFSbDWuC/d6Rjx+vRakQP8oW8Ofc1UQqFAxg56kr7Ay/wsyn6lMk2Ie3O1bhkzXHaFIhHw/VKUbRPD70n7mDjUcv0bJyAd7pUh03NxONy+XjifqlGLEglvIF/ClfwJ/kyAdJjrT/QbCm5OfNhyoT7OvJ+uHNAfhuazhztqbfiJtwcgBu3md5t20Xft13nlSrjY8fr01MooUlf56jneNBwlCnRB7qlMhDx5pFeHPhXgY2LYu3h5kXmlfns231Mfsdw3L5PpIj2xBQ+bqhL+e6413kewybJ5Xzlad8/gB+OdYH3+KZz+rTq1Zr3m5fn4SUVOqNMxMfYT/5+OSJ2iSmWFl/tAi/7isKNk/ef6gWdUrmYeaM9ngXtD/sNzmyPbOebMyE5SEYBhw4URt3NzfaPlqCUUv74l1oMckXWzJieGUalc1H5cKBFA32oWiwD+937MicP2rw8ZO1MZngfHQSfX6bjOmvIYqJZx/H5B6Nd0HnyTA+7taWV/+Y4xQ+7J/xc6wf9iAtp2/C7BvGG80eJ+xiAvOOJ5EaWx1/L3csV+viHrCPxLPdcfO8jM0ShHvAAafeNoCi1t4cPeyOZ54teOTZ4tS7BDCtZzsW7K7MDxftfxg6VGjAsrVFne5Pu6+UfThOjXx1CLtgD06dqldm2t66xPLXTIOne+OWmo/OHWuz90IYW+LtwckaX4kKHo9znFFOxw0wlyABe3BKG96SpmXZ2jxZqxuvL7yXZ5tWoEvtovSZOYItF38h0K0EvRqWYsGRBzhmC8eweWO5WhevfOsc+6cmlOK7h4fj5+XOIwtCcb/B/YLF3drxUpteJKfaKBXix+krCTQpXxCf9flIJGNwMqw+mMzZm50tLeCkxlXAlhKCZ17nabGtyfkxmZMwmeOwXK2PZ56MQzRTE0rh7nsyQ5npZRTAsPpickt2DNVNuVKfEO+8xPpk/py1a3km1WVA/Tb8OGMtXqZg5j3yMnn93Wm/OOfByQ1PvNzt98+1LzSIxae+xSMoNMN2htULkzn5r+/TP88HC4zG21qCb/9cgU/RuZjcnGeWG3JvV+b+mZfTzMpQZpprTyJ9LbVJ8NhtP47hxvAWDflhzo3rb4mujVtqAcwh6b2tmYWmtKGNt8P1oQkgv7U1l9zTf09kdXKaWWhKiboPN7woGpSXC+7zWX52Bstv0slgWL0xmTN/SPm1bTAlqgmGzQOv/Jlfbb9RaAL49sjn4GYPyIUSB3DB75MchSZT7L2kup/92yFh84AP+f7PDYTm4KkCbinFub9sCVJXVMhwsc+welGFkTxQsTStKxcnyZKaIfxmhzWpIL1KjeHb8Dcxe9mHdO5NOEf/5S9zKHYrnUp1Z99V+++IA9FbHcPmauVpwd6k7M90l9mFtNSEUmDzwt0/69EOaXISmoAMoSltSHRONStZj+fqdefRH0Y7LiSmRDXBMEx45VtPki3WEZrSLs5lJzQlX2iHR/B23Lwy9nymhSaAXVfTA1nTsuV5o1gPHv3Bgle+nF2MCIzpg7vZINo4itU/Z49MMKxeWJOK4e5nvzjVr8Q0vjn1lGO9t0fufhba9RScJFNf9q7H+iORtKtW+IbbmEwmahUPBsATN7rfW4JDETG0rVbIcUJSoaB9NqkvetUlOdVGoLe9Z6111UK0ruo8nG/mk/W5EJtE4SAfp+WP1StOgQAvahYPxs3NxICmZVhzMJJpfe/haoKF6sWcb5x+pF4xrsSn0LJKQUqG+FJl5AqsiaWpUTyYx+qn3zuRx8+TXg1LZfreigb7MP3J+o7XL7aqQEJKR776vRIY7rSsXIANkW3xKpB+sje+S31e/ckHT7Mbs99oQLCvJ0VXJzLrjP2PRMrlhk4now/VtN8Y6+vpzuLBjbmaYKFuyTyOG6O71ilKgzIh1CuZh8qFA7FYbZTz7MDBQw1x87rAo9Ub0qRCfppUsN/Uu+PkZSxWg4KB3kzr0Y4nZ+RjwkPVyePnSZfaRZ3e38N1izmF4mJ5fDFbSmB42E/Y+9Vty+FzVtYdbGzvifrrxKt99SKMXjaQWO/luLnH4B5wCMMws6B/Z0qE+NKtdD82H4+ia83SmEwmIqMH0LW1/dgPhAxh1aHTjO9Sl4joJAoHebNy/3nWnl6Lm88Zxx+V97s0pUxICNN+r86MLa2IM47hkXeTY/KRcvmDeeGBGkx/73kwzDR8oghl1gzk6JUlmMzxWOPLUbGA/QbfR6rez/c7emGz5KFAgDfdyj7GzLP2k7xPu3WmbrESFAj05snabVn/yxo882ynsl8LJrRrxYPfL8LkcdVxElbIpygnUu3DNErEj+NY/Ea8i/yEYZhoWKw2dUvmYcXQ5o7P9OuezfjtQGWK5/XFZDLxetMejFtaiyAfTzo1K8TILRdwDziIzRLIvYHPcG8Z+715HYr3YUX0ywBYYqtgSyqCV/7fsMTU4NVWPXmgUvr/m7S2H+RZgMS/Oj5SLjfEPXAvyZHteKD4A2yL2EmKJZlU98t45NkKWLEmliY1riI+RZ2HkhhWL0bV/5Ciwf48u/kBp3V5zGUoZurKuSvRjGvfkv6LJuNd6BcArElFSI2rSL9qfdhwai9Ho/cAJrzyOwea+0N6cF/RFsw98j9OG/bhZG3K3UNR3/LMCvsdy9W6hHiUIcbvB1Jjq2DEV8Pkd5A6QZ3ZEbmJKV2GUDLEn0VPvIufl5mSIX4YhuE4qbEmFaJUysuEuX+Cu69ziLBZgnHzuJr+Xk3pYWJ8p1Y8droe49cu5YD1U9zc7bOR2lL9yGOrT7TZ3jareDzJ/qS5GDZvht73AIWCfHmpdWXGLL2PnecOcjFg0l/HCqRT1Vo0KVWVttOv4l4g4xBRa2JxyvjVIdxmP7EuH1iT09EluGjbRou8w7I8mdjS7yt+O76HMbtXZLremlgUN+9z1PUZxq4U+6QhqXHpJ9KJ5x6mmG8lzlt24JH/xqHVmlQIs/eNezP71nyID/5ce+MgE1+a1JjaeOZf4fhcr/V08fk827MCVpvBD3u28cH+m0/d7ZZYmbfumcCqI4fYnPJKhvVPVXyZ6UfGg8nCvEffplz+vHyx/jjTzz12w3BgS8mDyRyfacD0SC3Bi83v4bXrblkxbJ6Y3G4cSFuUvpcu5bsQl2Rh+Ob+TkO93VPKkep5455YS3RtPIJ2Y7lahxA/H+oWqQjZCE4pke0I8QmhR41WmEwmBtcYwdw96yjgH8QRPsVkslHa1IMfnkx/zIS3hzsmqz+42y+atQr4mF1nTxIV6Dylu2Fzx+RmH9ppWL35ru0P1CmZl2p/zmT4mtF45PmDjfwAf3WwLj6ZMfXbUv15smYXhv1h/5volVqObkXGMOvoBDyCMu/daVGsI+suf+a0rFOpx2lfrhmDl47HSgql/Ctz2t15OvXAmL4UzOPGUavzvVWZ8Y19CDe8uGI7AIYZk3tchlA2tdk8vti0jb1ur2XY/0azCgO0rViLED8fvug0lOfWHMKIr8bBoRNZf/wYz292Di/dy77E94fmYgrYfdP69i/9KYN63s/ABdPZljA5vR6xlXH3O4bJzYI1sTgmj8tO/+eq5i9DreIh7B48ifpf98bN3z4MNeVKfTyMvPi5B5EQ6DwU1hJbhc4l+zG+dysMA05GxdNu9vN45sk4ude1UuMqYk0qRMtinWhcsiLlCnrz5MqeGKn+PNekLt9kPt/WXUFD9VwoNw/V+zc5fTmBk1Hx3F/+5rMGZSU8Kp7Jvx2lZ4OS1CkRTOkRSzH7H8K70E8knXuC4yOH8PvRS7ibTTT46yR4Z/gVHpszBaw+pMZVJaCy/ZeuYfVkT98dmN1uPi3o9SwWC1/NX8ol/7I837ICwb6eN9w21WpzmkUxK93+t4oDKd9iswSxvv/75PP34sC5GHqs6IzJw36/zt4+e/nzzFXe+fUgB87FkOi5C8Pqw/4RQ7I80UtOtRKdaKFAQHqXvM1m8POes+w+dZW5B+djGG7sfvF1ArzTx8j/EXaZqb9vZaf1TSzRdTk81D5k5dD5GNzd3ChXwJ8zVxKYvukk0YkW6pfOy6P1ijuO8fvRSxQK8qZcAX+iEy3UHj8Dd49ktr74DHn90j+/D5Yf4svNf/JNz+bcXz4f45cf4mq8hUVnpmAyJ9C7wnPMPT8Qw2Zmb9/dbDx6iafnLQK3eDYMGUjBwOwPNTAMg0m/HeXAuWjAxHvdqpM/wD4T2MXYZJp/Mp2EFINXm7emfEF/xvy6g173VqHffaUzLa/D3MGcSrH/ER5V/RfWHb7Ciy3LUya/P2eiYlm9eg0V6zTk132RFM3jQ8FAL/IHwrO/O9/M7RHfiJ3P/g+TyUSrz6cRFn0anyL2HrsyHh35ufu7jhuOT1yMo93sF/AI3s4DAe8woGETqhQOJDrRwvmYJJIsNh79fhxeBewn9pardZjdZSJ1S+bjjRU/sPi8fVhnz1Kjefbejkz57QgdahQhf4AXMzefpEXlglQtEsixyDhqFgsmNjmVIJ/Mf0/2nb2IrWf2M+TezjzXrBprDl7g+S0dMZkTsaXkxeQRTTX359lvveZkMLYuewfPcConJsnCF+uPsuFYBIcTltO/9kNUyl+QV36bjMnNypKe77Js3zkCfTzo3aCs075bw0/w9Dr7fZ5Blmb83v8TR5mN5znPNmeJrsUb9d8mJvUCUw8PIzW+AjM7fUiIry/7z8XwYI0imN1MVJ7aEXffk1hiqvJSzXeYtX03UQGf45VwH7uHjuFibCJNpz9HqsWf/H4BxPjYA1p98yTaVy3DykPHeL9Lc4YvXsa56MsMf6AZgzd1xLCZ2fDoVvL6eWMYBi8tXszyCx+D1RfzdYHz5UrzeGfD1zccDrn+4e0MXfQL52OvUDZvETbEvuu4X9Aw3JjR4lcqFyjEgl1n2BN5gGUX38WweWJLLkDt4HbM7ZX+uIMr8cnc931z3NxjsVmCSLncmABPX0qG+HPMZv9ZPVToA0a1aQvA/VPHcsV7AWDC5GbBMMzs6LGTTccuYbHaaHvNhb9uM//HUdJPwG2pAeRNfJT41GgG39uVsEuxzDu0AK8CK53eX0WPx3m7eR+6r7T/XzFS8lHZ/AIFAzxZn/AqN/JcxQ8Z2MA+BG5B6GFeX/+e4z7HOj6D2JXoPF1/SXNHwq32CxEru25l/Mo/6FqjIs0qFCXVaqPTtK85kbiRlkW74u3hwdLzkzCZbE4T47x/71zaVaqWoS6xSRZe+XkV5+Oj+Oaxx5x+7wG0+eY9Tqf8TpDtXjYPegurzaD6/x7FzeucY6ipT0odEj3tM6x6Jtdi5zPpQ4qfXjSZrdEZnwN1vefLzqV/46pUmjwAz7xbqOvzPDMefZrwqHgeXGKfMTE1rgI18zZkf4o9XD1b8UM+2fmFYxg8wJxWq6hepBDRiRZSrTaSUlNpO/NVUmwWPPPaH4Q+of4cqhUsTctvRjtd4LyWNbEozQv0ZWJH+wgVi9WGxWpw+upFev7W1rFdyuUGHH7xKwDqT3uYRLNzqKrm3YP8npVYdfI3ivqWJcJsn+kuv601655Jv7d3/ZGLlC/gT5FgH1KtNtp8/QmnLl/B0zMZb3d3Vvd/i0vxybyweC6nPD7JtM6Bcd1Z/cwreHuYuRibRLvp7xBjuYrZZGZWtxF8sXUTB6OO8up9j/Dxzq+JNNvblDnqMbYOGeH4O73vbDQXYpI4HhlLjwal8POy96P0/2E2myIX4ZvYklL5fBjTpiNVi+RxqsOuU5d4+dfZFAsows6kj8Fkw7AEOfWuftTgR+oULUWIf/qMob8dOI+/lzsNyuaj6id9cAvchRF9L7ufneryc2Dd43QTCk5yu4RdiufhqZuJik/h/vL5+LbfvRm2SbJYafnRevL5e/Fl77o0mvIxXoUWknS2B8dHZn2j6vXuZJs5fD6WQbN3UjjIh2/71Xf0fNUc/xWp+aeRHNmOYyPS/wjYbAZT1x8nf4CXU1C5VdN+D8PDbMq0F/D4xTjaT1lDt9qleK9bjb91nJOX4olPSaVqkYxTPGcWNj9dc5SrCRYGP1COeybMpXz+/CwdbL+5PTI2ifhkK6Xz+WUo6++4FJeM2WQij9+Ng/G1hi78hd+i38BypRGHhk51ms75Rm0m1Wqj0vgJmNzjMZkTMPse47mqYxnc1P5MoOgEC/EpqTT7aiTuAXt5vtoEBt5X0+m4i3afYe7Ow3z2+H3ku+YPZJotJyLp89P7FPSowph2D9K84l/PmTpximc3dgDg5epT6FOneYZ9c8pmM3C75kLE6F+3c/pKNHWKFWXbqdOM73w/zWb1w80zktS4qoxtPpDH6lS+YXnxyan4epr/el5YIkkWK2Xy+99w+6i4BJp83wqTOYnXqs2g5z3p03hX/fo+3DyiSblaD1NKMaY8+DStK5cgOdXKVxtO0LhcPmqXyJOhzBlb/+TdDd8x5cHnaFulJDabwY+7ztCwTIjjcRKHzsfg5+lOVHwC3edPoFGRBnz9+EOZ1tEwDF5csJqiQUEMb5k+w19iipWVB85TOsSPYUu+p3bB8iw5/wlGfCUODR+PzWbQYvrrXHJfgmG4OT2vbG+f9CvtNpvB9M3H8HL34Kudi/Fy92DZ0wMdF4hSrTYW7DpLgLc795YJwd/LHU935/9vQxcsY234ZtqVbcqods3xMLux+shxhq4fSGpsdf584UPHyd/pywn8succ8bZzfHPgC3pU7M9brTNvSxdjkxm+eAXl8hRlwZEl3Fe8NlMecn7Ad3KqlSHzl3IgKozoAPuJ8tu1ZtOmYkUazbPPuudnrc7Wp+y9KS/8tISDl07j5pbKKesyTOZ43DwvY00qyC9df6RsgWBH2WevJvL9zn2EX7nCQ7VLMfh355/Ry1W/4r0tn+BpLcafQydwPcMwOHMlkUJB3phNJlYeOE+qzWDE7vT7k7Y+vgs/r5z/XbgSn8LhC7HUKBaEr6f9BHrzsUucuhLH+M2fYvH9g5drTGHK9mkkcoEPm47nwerpj3WY9PsSph0fcaPiHdLayp7Tl/n0902806EVBf664NTos3FEey2nT+l3SXY7yw+n7A+u/qrZz0TFwotLZuOeZyOpcRU4POyTDFPWX4lPITY5mTZz+2AY7ux6eg6+Xu7MC93GuD32CVkKxb9ItPUcCf4/Us/nRZ6p14HaJfLg4+l80c9qM6j1rf3vTGp8aeY8OJ3aJewXQ99cuoxFF17HcvVezD6nMKUU47uu46hZND8Wm42UVBt/nLhMyRBfQvy9MoTU6xmGgWGAzTCc/vZ0mfsKx1Och8q7pRZgZ99VTttZrDbMJhMmExk+k12nIunz0wd0LN+Sdzq0zvL5TWnlhUfFUza/f5bbW6w2lu87T/mC/vh7ufPK8m/Ym2J/9tzqblsoEHDj35frj5xm7JpFdPAvzODHOrv8HFjB6SYUnOR2ioxN4oftp+ndqJRjGOL1bDbD8Uux/8wd/HbwAo/VK86Eh3MeAO50m0n7dXDtL8zl+yIY+N1OhraswNCWFW77MbMrMcWKt4dbtn753ymxSRa83M0ZTvZcLSI6kadmbqBPg4o8Xr+U07qbtZlTUQkkWFK5HJfCn2ej6X9f6QzBcfvJy2w+FsXgB8rluIcU7BcY8vl7OnoRwf4Ht9qn/TB7XWB+52+pWjjkJiXcPmsPRZJitVG7eLDjhO12GvXrNpIsqbzbqaFTiBu7fD2z9y7j20deol7JfDlqw9cHwptJSEnFy918Sz+na1ksFqbMXcqjHR6geIh9uPXu0xfpPv89WpRowtoL83Dz34c1sSgHBmZ+NT+z3yW3yjAMVuw/T/ViwRQN9smw3mYzOHEpnrL5/bJ1vOsf4n29q/EpNPvmTTxNPvz+7Nt4urtRfaY9CBczt2JZT+cpvWOTLFyMTebIhTiSU624u5noUOPGD1lNslhpNPVtfM3+RCadxeQex7ZnviAyJplgX09H73N21Pz8Say+OyltHcQv/QZmvUMOhUfFs+dMNB1rFObMlUQSUqxULBTgtM3GE8cYtLFrpvtbrzbAHLwV99hm7B6ceS8KQGRMEltORNGxRhGmbP6Vb/4KYtue2I3PX2Fuw5GLlAzxpWTIjS9U7Qy/jNnNzXEbQUJKKg98MYH8vvn4pd8AIqITWRR6iu71y9ywBxug8uddcfc7RmP/V/jfQ+mzjSZZrEzffJTWVYoRGZNMqXy+GW4vuB26fj+cY8nO/7f8LHXZ2n/GbT/W7fLxpuV8fWw4kL0H7eamc2AFp5tQcBJXik60sGxvBF1qF72lGyJd1Wai4pLJ6+fp0tAitya3/p5ZffACpy8n0Ldx5sMP/00Mw8BiNXJd4L6RG7WZJIsVT7Mbyw6GMWzZp7zV5Cl61s84NOzf4GpCCm5uJscFsSr/a4/Z5zS9in/MKw88kMXeWUuy2O+3upKQQrLFRqlb7LVec/gsSw8cZWS7xje8eHenJaakUn9u5rOmftVkFaNX/cyHDz5C1SJ5M93mehdiEmkybTB53EuydfDteYxFTv267wRzdu3kk26dyJtJb/qdNmDhV2yOmQJA2+AP+CX8Ox6r0IPRbW/8rCxXu5qQwn1fDqewbwlWPfNSltvnpr9NmlVPJJcK8vHg8WsmqLhbhLjgD4f8u7WoXNDVVfjHmEwmPN3v/osOaRd7OlQtQ4eqmT+Q+t/i+vtH36gziY0nTjC0SbPbUn7aZ/l3eyseqFiUByoWzXrDO8jH052Sqc9y5HIYlYNqUyokhJWRn1A/b2calC7EsmcGZF3INQoG+rDhqc/w93LdKWqHamXoUK1M1hveIaMf6EHHmedoU7YB4zq0YVh0UwrdgV7y2ynY15Otz07E6y65QHSrFJxEREREbqJH/Yr0qF/R1dXIteb36MfSZcvp9KC992DHyXuocN2QvpzIyWQ7/0aFgnzZ/vzbjtd3YjjgneDKsPtP+fe/QxERERG5Y9zNblzb0VCvVPaG5Yncbf7d/WkiIiIiIiK3gYKTiIiIiIhIFhScREREREREsqDgJCIiIiIikgUFJxERERERkSwoOImIiIiIiGRBwUlERERERCQLCk4iIiIiIiJZUHASERERERHJgoKTiIiIiIhIFhScREREREREsqDgJCIiIiIikgUFJxERERERkSwoOImIiIiIiGRBwUlERERERCQLCk4iIiIiIiJZUHASERERERHJgoKTiIiIiIhIFhScREREREREsqDgJCIiIiIikgUFJxERERERkSwoOImIiIiIiGRBwUlERERERCQLtxScTp8+zZkzZxyvt23bxtChQ/nyyy9vW8VERERERERyi1sKTt27d2ft2rUAnD9/nlatWrFt2zZef/11xowZk6OyPv/8c0qXLo23tzd169Zl48aNN90+OTmZN954g5IlS+Ll5UXZsmWZNm3arbwNERERERGRbLml4LRv3z7q168PwA8//EC1atXYvHkzc+bMYcaMGdkuZ968eQwdOpQ33niD3bt3c//999OuXTtOnTp1w30effRRVq9ezTfffMPhw4eZO3culSpVupW3ISIiIiIiki3ut7KTxWLBy8sLgN9++41OnToBUKlSJSIiIrJdzkcffUS/fv3o378/AJMnT2bFihVMnTqV9957L8P2y5cvZ/369Zw4cYK8efMCUKpUqVt5CyIiIiIiItl2S8GpatWq/O9//6NDhw6sWrWKsWPHAnDu3DlCQkKyVUZKSgo7d+7ktddec1reunVrNm/enOk+ixcvpl69erz//vt8++23+Pn50alTJ8aOHYuPj0+m+yQnJ5OcnOx4HRMTA9jDn8ViyVZd75S047u6HnL3UJuRnFKbkZxSm5GcUpuRnMpNbSYndbil4DRhwgS6du3KBx98QJ8+fahZsyZgDzZpQ/iycunSJaxWKwULFnRaXrBgQc6fP5/pPidOnOD333/H29ubhQsXcunSJQYNGsTly5dveJ/Te++9x+jRozMsX7lyJb6+vtmq6522atUqV1dB7jJqM5JTajOSU2ozklNqM5JTuaHNJCQkZHvbWwpOzZo149KlS8TExJAnTx7H8meeeSbHYcRkMjm9Ngwjw7I0NpsNk8nE7NmzCQoKAuzD/R5++GE+++yzTHudRowYwbBhwxyvY2JiKF68OK1btyYwMDBHdb3dLBYLq1atolWrVnh4eLi0LnJ3UJuRnFKbkZxSm5GcUpuRnMpNbSZtNFp23FJwSkxMxDAMR2gKDw9n4cKFVK5cmTZt2mSrjHz58mE2mzP0LkVGRmbohUpTuHBhihYt6ghNAJUrV8YwDM6cOUP58uUz7OPl5eW4H+taHh4eLv9BpclNdZG7g9qM5JTajOSU2ozklNqM5FRuaDM5Of4tzarXuXNnZs2aBcDVq1e59957mThxIl26dGHq1KnZKsPT05O6detm6KJbtWoVjRo1ynSfxo0bc+7cOeLi4hzLjhw5gpubG8WKFbuVtyIiIiIiIpKlWwpOu3bt4v777wfgxx9/pGDBgoSHhzNr1iymTJmS7XKGDRvG119/zbRp0zh48CAvvvgip06dYuDAgYB9mF3v3r0d23fv3p2QkBCefPJJDhw4wIYNGxg+fDhPPfXUDSeHEBERERER+btuaaheQkICAQEBgH2ShW7duuHm5kaDBg0IDw/PdjmPPfYYUVFRjBkzhoiICKpVq8bSpUspWbIkABEREU7PdPL392fVqlUMGTKEevXqERISwqOPPsq4ceNu5W2IiIiIiIhkyy0Fp3LlyrFo0SK6du3KihUrePHFFwH7/Uk5nXBh0KBBDBo0KNN1mT1Mt1KlSrliBg4REREREfnvuKWheiNHjuTll1+mVKlS1K9fn4YNGwL23qfatWvf1gqKiIiIiIi42i31OD388MPcd999REREOJ7hBNCiRQu6du162yonIiIiIiKSG9xScAIoVKgQhQoV4syZM5hMJooWLZrth9+KiIiIiIjcTW5pqJ7NZmPMmDEEBQVRsmRJSpQoQXBwMGPHjsVms93uOoqIiIiIiLjULfU4vfHGG3zzzTeMHz+exo0bYxgGmzZtYtSoUSQlJfHOO+/c7nqKiIiIiIi4zC0Fp5kzZ/L111/TqVMnx7KaNWtStGhRBg0apOAkIiIiIiL/Krc0VO/y5ctUqlQpw/JKlSpx+fLlv10pERERERGR3OSWglPNmjX59NNPMyz/9NNPqVGjxt+ulIiIiIiISG5yS0P13n//fTp06MBvv/1Gw4YNMZlMbN68mdOnT7N06dLbXUcRERERERGXuqUep6ZNm3LkyBG6du3K1atXuXz5Mt26dWP//v1Mnz79dtdRRERERETEpW75OU5FihTJMAnEnj17mDlzJtOmTfvbFRMREREREcktbqnHSURERERE5L9EwUlERERERCQLCk4iIiIiIiJZyNE9Tt26dbvp+qtXr/6duoiIiIiIiORKOQpOQUFBWa7v3bv336qQiIiIiIhIbpOj4KSpxkVERERE5L9I9ziJiIiIiIhkQcFJREREREQkCwpOIiIiIiIiWVBwEhERERERyYKCk4iIiIiISBYUnERERERERLKg4CQiIiIiIpIFBScREREREZEsKDiJiIiIiIhkQcFJREREREQkCwpOIiIiIiIiWVBwEhERERERyYKCk4iIiIiISBYUnERERERERLKg4CQiIiIiIpIFBScREREREZEsKDiJiIiIiIhkQcFJREREREQkCwpOIiIiIiIiWVBwEhERERERyYKCk4iIiIiISBYUnERERERERLKg4CQiIiIiIpIFBScREREREZEsKDiJiIiIiIhkQcFJREREREQkCwpOIiIiIiIiWVBwEhERERERyYKCk4iIiIiISBYUnERERERERLKg4CQiIiIiIpIFBScREREREZEsKDiJiIiIiIhkQcFJREREREQkCwpOIiIiIiIiWVBwEhERERERyYKCk4iIiIiISBYUnERERERERLKg4CQiIiIiIpIFBScREREREZEsKDiJiIiIiIhkQcFJREREREQkCwpOIiIiIiIiWVBwEhERERERyYKCk4iIiIiISBZcHpw+//xzSpcujbe3N3Xr1mXjxo3Z2m/Tpk24u7tTq1atO1tBERERERH5z3NpcJo3bx5Dhw7ljTfeYPfu3dx///20a9eOU6dO3XS/6OhoevfuTYsWLf6hmoqIiIiIyH+ZuysP/tFHH9GvXz/69+8PwOTJk1mxYgVTp07lvffeu+F+AwYMoHv37pjNZhYtWnTTYyQnJ5OcnOx4HRMTA4DFYsFisfz9N/E3pB3f1fWQu4fajOSU2ozklNqM5JTajORUbmozOamDyTAM4w7W5YZSUlLw9fVl/vz5dO3a1bH8hRdeIDQ0lPXr12e63/Tp0/n888/ZsmUL48aNY9GiRYSGht7wOKNGjWL06NEZls+ZMwdfX9+//T5EREREROTulJCQQPfu3YmOjiYwMPCm27qsx+nSpUtYrVYKFizotLxgwYKcP38+032OHj3Ka6+9xsaNG3F3z17VR4wYwbBhwxyvY2JiKF68OK1bt87yw7nTLBYLq1atolWrVnh4eLi0LnJ3UJuRnFKbkZxSm5GcUpuRnMpNbSZtNFp2uHSoHoDJZHJ6bRhGhmUAVquV7t27M3r0aCpUqJDt8r28vPDy8sqw3MPDw+U/qDS5qS5yd1CbkZxSm5GcUpuRnFKbkZzKDW0mJ8d3WXDKly8fZrM5Q+9SZGRkhl4ogNjYWHbs2MHu3bsZPHgwADabDcMwcHd3Z+XKlTzwwAP/SN1FREREROS/xWWz6nl6elK3bl1WrVrltHzVqlU0atQow/aBgYHs3buX0NBQx9fAgQOpWLEioaGh3Hvvvf9U1UVERERE5D/GpUP1hg0bRq9evahXrx4NGzbkyy+/5NSpUwwcOBCw35909uxZZs2ahZubG9WqVXPav0CBAnh7e2dYLiIiIiIicju5NDg99thjREVFMWbMGCIiIqhWrRpLly6lZMmSAERERGT5TCcREREREZE7zeWTQwwaNIhBgwZlum7GjBk33XfUqFGMGjXq9ldKRERERETkGi67x0lEREREROT/7d17cFTl/cfxz+a2JDFkgJiEQMAwpUUMKCRUbpaiErloh9ZWyyCX1tZJuRTIWC5Fh0tVmOmUojOSKgWcDigMI1rsZJRoFRWi+AsEI6C2UwSEpFzEJJKS2z6/P5Ys2eyGJytJTi7v18zOnn3O95zznJMvDB9296SjIDgBAAAAgAXBCQAAAAAsCE4AAAAAYEFwAgAAAAALghMAAAAAWBCcAAAAAMCC4AQAAAAAFgQnAAAAALAgOAEAAACABcEJAAAAACwITgAAAABgQXACAAAAAAuCEwAAAABYEJwAAAAAwILgBAAAAAAWBCcAAAAAsCA4AQAAAIAFwQkAAAAALAhOAAAAAGBBcAIAAAAAC4ITAAAAAFgQnAAAAADAguAEAAAAABYEJwAAAACwIDgBAAAAgAXBCQAAAAAsCE4AAAAAYEFwAgAAAAALghMAAAAAWBCcAAAAAMCC4AQAAAAAFgQnAAAAALAgOAEAAACABcEJAAAAACwITgAAAABgQXACAAAAAAuCEwAAAABYEJwAAAAAwILgBAAAAAAWBCcAAAAAsCA4AQAAAIAFwQkAAAAALAhOAAAAAGBBcAIAAAAAC4ITAAAAAFgQnAAAAADAguAEAAAAABYEJwAAAACwIDgBAAAAgAXBCQAAAAAsCE4AAAAAYEFwAgAAAAALghMAAAAAWBCcAAAAAMCC4AQAAAAAFgQnAAAAALAgOAEAAACABcEJAAAAACwITgAAAABgQXACAAAAAAvHg9OGDRuUlpambt26KSMjQ++9916Ttbt27dKECRN04403qnv37ho1apTeeOONNpwtAAAAgK7I0eC0Y8cOLVy4UMuXL9ehQ4d0xx13aNKkSTp58mTQ+nfffVcTJkxQXl6eCgsLNX78eN133306dOhQG88cAAAAQFfiaHBat26dHn74Yf3qV7/SzTffrPXr1ys1NVW5ublB69evX6/FixdrxIgRGjhwoJ566ikNHDhQr732WhvPHAAAAEBXEuHUgaurq1VYWKilS5f6jWdlZWn//v3N2ofH41FFRYV69uzZZE1VVZWqqqp8r8vLyyVJNTU1qqmp+RYzbzn1x3d6Hug46BmEip5BqOgZhIqeQajaU8+EMgfHgtP58+dVV1enpKQkv/GkpCSVlpY2ax9/+tOfdOnSJT3wwANN1qxZs0arVq0KGN+zZ49iYmJCm3Qryc/Pd3oK6GDoGYSKnkGo6BmEip5BqNpDz1RWVja71rHgVM/lcvm9NsYEjAXz0ksvaeXKlfr73/+uxMTEJuuWLVumnJwc3+vy8nKlpqYqKytL3bt3//YTbwE1NTXKz8/XhAkTFBkZ6ehc0DHQMwgVPYNQ0TMIFT2DULWnnqn/NFpzOBacEhISFB4eHvDu0tmzZwPehWpsx44devjhh7Vz507dfffd16x1u91yu90B45GRkY7/oOq1p7mgY6BnECp6BqGiZxAqegahag89E8rxHbs5RFRUlDIyMgLeosvPz9fo0aOb3O6ll17S7Nmz9eKLL2rKlCmtPU0AAAAAcPajejk5OZoxY4YyMzM1atQoPf/88zp58qSys7MleT9md/r0af3tb3+T5A1NM2fO1NNPP62RI0f63q2Kjo5WfHy8Y+cBAAAAoHNzNDg9+OCDunDhglavXq2SkhKlp6crLy9P/fv3lySVlJT4/U6n5557TrW1tZo7d67mzp3rG581a5ZeeOGFtp4+AAAAgC7C8ZtDzJkzR3PmzAm6rnEYeuedd1p/QgAAAADQiKO/ABcAAAAAOgKCEwAAAABYEJwAAAAAwILgBAAAAAAWBCcAAAAAsCA4AQAAAIAFwQkAAAAALAhOAAAAAGBBcAIAAAAAC4ITAAAAAFgQnAAAAADAguAEAAAAABYEJwAAAACwIDgBAAAAgAXBCQAAAAAsCE4AAAAAYEFwAgAAAAALghMAAAAAWBCcAAAAAMCC4AQAAAAAFgQnAAAAALAgOAEAAACABcEJAAAAACwITgAAAABgQXACAAAAAAuCEwAAAABYEJwAAAAAwILgBAAAAAAWBCcAAAAAsCA4AQAAAIAFwQkAAAAALAhOAAAAAGBBcAIAAAAAC4ITAAAAAFgQnAAAAADAguAEAAAAABYEJwAAAACwIDgBAAAAgAXBCQAAAAAsCE4AAAAAYEFwAgAAAAALghMAAAAAWBCcAAAAAMCC4AQAAAAAFgQnAAAAALAgOAEAAACABcEJAAAAACwITgAAAABgQXACAAAAAAuCEwAAAABYEJwAAAAAwILgBAAAAAAWBCcAAAAAsCA4AQAAAIAFwQkAAAAALAhOAAAAAGBBcHLYDZfPOD0FAAAAABYEJwe5vnhPdx1bqvDX5kuVXzk9HQAAAABNIDg5yHW6UEYuhX38kvT0bdLry6QTBVJdrdNTAwAAANBAhNMT6Mo8YxZq32mX7vh6p1znPpU+2OB9uOOllNuk5CFS4mCpR38pPlXqniKFRzo9bQAAAKDLITg57OINA1X7s3cVeeJd6fBL0r/fki5/LR3f63005AqTbkiWYhOkmJ5SdE8pptfV5W7xkvsGKSpWioq78hwrua8sR7gdOUcAAACgo3M8OG3YsEF//OMfVVJSoltuuUXr16/XHXfc0WT93r17lZOToyNHjiglJUWLFy9WdnZ2G864FbjCpIETvA9PnVT6sVRaLJV+Ip37VCo7JZV9KdVVSxVnvI9vIyxSioqRIrp5Q1RENyncfXU56HOj9WGRUniUFB5xZTnyynOEd7x+uX6db31TdVcernApLNx7LVyulr2+AAAAwHVyNDjt2LFDCxcu1IYNGzRmzBg999xzmjRpko4ePap+/foF1B8/flyTJ0/Wr3/9a23dulX79u3TnDlzdOONN+r+++934AxaQVi4lDLM+2jI45EunfMGqMoL0v++8t5QouHz5XKp+hup+pJUVeF9rv5Gqr18ZR810uUySWVtflohqQ9RvkAV5h+uwiK8AcuvJsJb51sOb1Af3mg8rFFNw23rw1vDhyvIWCjrW6rGJVedUULFEbm+iJMio0Lfh1zXMRfXle0bLjd4JvACAIBOzGWMMU4d/Pbbb9fw4cOVm5vrG7v55ps1depUrVmzJqB+yZIl2r17t44dO+Yby87O1uHDh1VQUNCsY5aXlys+Pl5lZWXq3r379Z/EdaipqVFeXp4mT56syMhW/O5SXe3VQFV9SaqrkmqrvIGqtvGy5dlT433nq65G8tReea7xHsNT433tGwtSU1ftXfZwA4zOqXGYCgscCwhfCjJm2Uewuib3EexYTe234Vij85Gujgesb6o2lPUKvv5b7evqeo/Ho1NffqnU1FSFhYc7PG9dXee3bZB11zXeeF1T4y15jBY6j5COH2p98+ZVW1en4uJiDRkyVBH1PdOS820xrbz/NvnPoM5wDlJtXZ2Kiop02223Xe2ZlsDPoDkHaOXdt87+a+vqdPDgQQ2bOl+R8UmtcozmCiUbOPaOU3V1tQoLC7V06VK/8aysLO3fvz/oNgUFBcrKyvIbu+eee7Rp0ybV1NQEDR9VVVWqqqryvS4vL5fkDS01NTXXexrXpf74bTKPiFjvI6b1D9UsxnjDk6fG+/FE4/E+e2olU/+69sq6uivrvMsuT21gvcfTYLku8Lnhth7PlfEmtpXxzs94gjwCx10N16l521x7vOnjGONRRXm54m6IkUvmOo5hrpxnkHO5vh/slX17F+G8MEn9JYnfeIBmipA0TJJOOjwRdBgRkjIl6Qtn54GOI0LS9yVdPneP97v6Dgrl3+GOBafz58+rrq5OSUn+KTMpKUmlpaVBtyktLQ1aX1tbq/Pnz6t3794B26xZs0arVq0KGN+zZ49iYtpHisjPz3d6Cp1Q+JVHG2jwH+xtok8r7/9KqHLJI1f9svF4g5q3wDfurfNu41L9mK6Euvr1xrdf72Xy+LbxbS+PXEZSkGMEO65L9bUN51J/XMklT6NtGh9Xvvm6GoS9hudwdb4Njh9wjXwD3u2N8XsdsE+/a3V122vv11y5Nldf++27wbqA/ZomjlO/HxN8jg2vydX1Te/36vVXg+0aX79ArkbXxVrT1L4abev/xzH49n41Aedj20+jOnOtumD7NcGGr13XZE3D4aavVXOuScPFZl33xnXNug7Xp+lr0p610rVw7gND31JHm29r9lsr7beVdtuaf+4O/9/Hqjji7P/sVVZWNrvW8ZtDuBq9BWiMCRiz1Qcbr7ds2TLl5OT4XpeXlys1NVVZWVnt4qN6+fn5mjBhQut+VA+dBj2DUNEzCBU9g1DRMwhVe+qZ+k+jNYdjwSkhIUHh4eEB7y6dPXs24F2lesnJyUHrIyIi1KtXr6DbuN1uud2Bt+GOjIx0/AdVrz3NBR0DPYNQ0TMIFT2DUNEzCFV76JlQjh/WivO4pqioKGVkZAR8TC0/P1+jR48Ous2oUaMC6vfs2aPMzEzHLzoAAACAzsux4CRJOTk5+utf/6rNmzfr2LFjWrRokU6ePOn7vUzLli3TzJkzffXZ2dk6ceKEcnJydOzYMW3evFmbNm3So48+6tQpAAAAAOgCHP2O04MPPqgLFy5o9erVKikpUXp6uvLy8tS/f39JUklJiU6evHpbn7S0NOXl5WnRokV69tlnlZKSomeeeabz/A4nAAAAAO2S4zeHmDNnjubMmRN03QsvvBAwNm7cOB08eLCVZwUAAAAAVzn6UT0AAAAA6AgITgAAAABgQXACAAAAAAuCEwAAAABYEJwAAAAAwILgBAAAAAAWBCcAAAAAsCA4AQAAAIAFwQkAAAAALAhOAAAAAGBBcAIAAAAAC4ITAAAAAFgQnAAAAADAIsLpCbQ1Y4wkqby83OGZSDU1NaqsrFR5ebkiIyOdng46AHoGoaJnECp6BqGiZxCq9tQz9ZmgPiNcS5cLThUVFZKk1NRUh2cCAAAAoD2oqKhQfHz8NWtcpjnxqhPxeDw6c+aM4uLi5HK5HJ1LeXm5UlNTderUKXXv3t3RuaBjoGcQKnoGoaJnECp6BqFqTz1jjFFFRYVSUlIUFnbtbzF1uXecwsLC1LdvX6en4ad79+6ONw06FnoGoaJnECp6BqGiZxCq9tIztnea6nFzCAAAAACwIDgBAAAAgAXByUFut1srVqyQ2+12eiroIOgZhIqeQajoGYSKnkGoOmrPdLmbQwAAAABAqHjHCQAAAAAsCE4AAAAAYEFwAgAAAAALghMAAAAAWBCcHLJhwwalpaWpW7duysjI0Hvvvef0lOCQNWvWaMSIEYqLi1NiYqKmTp2qzz77zK/GGKOVK1cqJSVF0dHR+uEPf6gjR4741VRVVWn+/PlKSEhQbGysfvSjH+nLL79sy1OBA9asWSOXy6WFCxf6xugXBHP69Gk99NBD6tWrl2JiYnTbbbepsLDQt56+QUO1tbV67LHHlJaWpujoaA0YMECrV6+Wx+Px1dAzXdu7776r++67TykpKXK5XHr11Vf91rdUf1y8eFEzZsxQfHy84uPjNWPGDH399detfHZNMGhz27dvN5GRkWbjxo3m6NGjZsGCBSY2NtacOHHC6anBAffcc4/ZsmWL+eSTT0xRUZGZMmWK6devn/nmm298NWvXrjVxcXHm5ZdfNsXFxebBBx80vXv3NuXl5b6a7Oxs06dPH5Ofn28OHjxoxo8fb2699VZTW1vrxGmhDRw4cMDcdNNNZujQoWbBggW+cfoFjX311Vemf//+Zvbs2ebDDz80x48fN2+++ab597//7auhb9DQE088YXr16mX+8Y9/mOPHj5udO3eaG264waxfv95XQ890bXl5eWb58uXm5ZdfNpLMK6+84re+pfpj4sSJJj093ezfv9/s37/fpKenm3vvvbetTtMPwckB3//+9012drbf2KBBg8zSpUsdmhHak7NnzxpJZu/evcYYYzwej0lOTjZr16711Vy+fNnEx8ebv/zlL8YYY77++msTGRlptm/f7qs5ffq0CQsLM6+//nrbngDaREVFhRk4cKDJz88348aN8wUn+gXBLFmyxIwdO7bJ9fQNGpsyZYr55S9/6Tf2k5/8xDz00EPGGHoG/hoHp5bqj6NHjxpJ5oMPPvDVFBQUGEnm008/beWzCsRH9dpYdXW1CgsLlZWV5TeelZWl/fv3OzQrtCdlZWWSpJ49e0qSjh8/rtLSUr+ecbvdGjdunK9nCgsLVVNT41eTkpKi9PR0+qqTmjt3rqZMmaK7777bb5x+QTC7d+9WZmamfvaznykxMVHDhg3Txo0bfevpGzQ2duxYvfXWW/r8888lSYcPH9b777+vyZMnS6JncG0t1R8FBQWKj4/X7bff7qsZOXKk4uPjHemhiDY/Yhd3/vx51dXVKSkpyW88KSlJpaWlDs0K7YUxRjk5ORo7dqzS09MlydcXwXrmxIkTvpqoqCj16NEjoIa+6ny2b9+ugwcP6qOPPgpYR78gmP/85z/Kzc1VTk6Ofv/73+vAgQP67W9/K7fbrZkzZ9I3CLBkyRKVlZVp0KBBCg8PV11dnZ588klNmzZNEn/X4Npaqj9KS0uVmJgYsP/ExERHeojg5BCXy+X32hgTMIauZ968efr444/1/vvvB6z7Nj1DX3U+p06d0oIFC7Rnzx5169atyTr6BQ15PB5lZmbqqaeekiQNGzZMR44cUW5urmbOnOmro29Qb8eOHdq6datefPFF3XLLLSoqKtLChQuVkpKiWbNm+eroGVxLS/RHsHqneoiP6rWxhIQEhYeHB6Tks2fPBqRydC3z58/X7t279fbbb6tv376+8eTkZEm6Zs8kJyerurpaFy9ebLIGnUNhYaHOnj2rjIwMRUREKCIiQnv37tUzzzyjiIgI38+bfkFDvXv31uDBg/3Gbr75Zp08eVISf88g0O9+9zstXbpUP//5zzVkyBDNmDFDixYt0po1ayTRM7i2luqP5ORk/fe//w3Y/7lz5xzpIYJTG4uKilJGRoby8/P9xvPz8zV69GiHZgUnGWM0b9487dq1S//85z+Vlpbmtz4tLU3Jycl+PVNdXa29e/f6eiYjI0ORkZF+NSUlJfrkk0/oq07mrrvuUnFxsYqKinyPzMxMTZ8+XUVFRRowYAD9ggBjxowJ+DUHn3/+ufr37y+Jv2cQqLKyUmFh/v9MDA8P992OnJ7BtbRUf4waNUplZWU6cOCAr+bDDz9UWVmZMz3U5rejgO925Js2bTJHjx41CxcuNLGxseaLL75wempwwG9+8xsTHx9v3nnnHVNSUuJ7VFZW+mrWrl1r4uPjza5du0xxcbGZNm1a0Ft69u3b17z55pvm4MGD5s477+SWr11Ew7vqGUO/INCBAwdMRESEefLJJ82//vUvs23bNhMTE2O2bt3qq6Fv0NCsWbNMnz59fLcj37Vrl0lISDCLFy/21dAzXVtFRYU5dOiQOXTokJFk1q1bZw4dOuT79Tot1R8TJ040Q4cONQUFBaagoMAMGTKE25F3Nc8++6zp37+/iYqKMsOHD/fdehpdj6Sgjy1btvhqPB6PWbFihUlOTjZut9v84Ac/MMXFxX77+d///mfmzZtnevbsaaKjo829995rTp482cZnAyc0Dk70C4J57bXXTHp6unG73WbQoEHm+eef91tP36Ch8vJys2DBAtOvXz/TrVs3M2DAALN8+XJTVVXlq6Fnura333476L9fZs2aZYxpuf64cOGCmT59uomLizNxcXFm+vTp5uLFi210lv5cxhjT9u9zAQAAAEDHwXecAAAAAMCC4AQAAAAAFgQnAAAAALAgOAEAAACABcEJAAAAACwITgAAAABgQXACAAAAAAuCEwAAAABYEJwAAAiBy+XSq6++6vQ0AABtjOAEAOgwZs+eLZfLFfCYOHGi01MDAHRyEU5PAACAUEycOFFbtmzxG3O73Q7NBgDQVfCOEwCgQ3G73UpOTvZ79OjRQ5L3Y3S5ubmaNGmSoqOjlZaWpp07d/ptX1xcrDvvvFPR0dHq1auXHnnkEX3zzTd+NZs3b9Ytt9wit9ut3r17a968eX7rz58/rx//+MeKiYnRwIEDtXv37tY9aQCA4whOAIBO5fHHH9f999+vw4cP66GHHtK0adN07NgxSVJlZaUmTpyoHj166KOPPtLOnTv15ptv+gWj3NxczZ07V4888oiKi4u1e/dufec73/E7xqpVq/TAAw/o448/1uTJkzV9+nR99dVXbXqeAIC25TLGGKcnAQBAc8yePVtbt25Vt27d/MaXLFmixx9/XC6XS9nZ2crNzfWtGzlypIYPH64NGzZo48aNWrJkiU6dOqXY2FhJUl5enu677z6dOXNGSUlJ6tOnj37xi1/oiSeeCDoHl8ulxx57TH/4wx8kSZcuXVJcXJzy8vL4rhUAdGJ8xwkA0KGMHz/eLxhJUs+ePX3Lo0aN8ls3atQoFRUVSZKOHTumW2+91ReaJGnMmDHyeDz67LPP5HK5dObMGd11113XnMPQoUN9y7GxsYqLi9PZs2e/7SkBADoAghMAoEOJjY0N+OicjcvlkiQZY3zLwWqio6Obtb/IyMiAbT0eT0hzAgB0LHzHCQDQqXzwwQcBrwcNGiRJGjx4sIqKinTp0iXf+n379iksLEzf/e53FRcXp5tuuklvvfVWm84ZAND+8Y4TAKBDqaqqUmlpqd9YRESEEhISJEk7d+5UZmamxo4dq23btunAgQPatGmTJGn69OlasWKFZs2apZUrV+rcuXOaP3++ZsyYoaSkJEnSypUrlZ2drcTERE2aNEkVFRXat2+f5s+f37YnCgBoVwhOAIAO5fXXX1fv3r39xr73ve/p008/leS949327ds1Z84cJScna9u2bRo8eLAkKSYmRm+88YYWLFigESNGKCYmRvfff7/WrVvn29esWbN0+fJl/fnPf9ajjz6qhIQE/fSnP227EwQAtEvcVQ8A0Gm4XC698sormjp1qtNTAQB0MnzHCQAAAAAsCE4AAAAAYMF3nAAAnQafPgcAtBbecQIAAAAAC4ITAAAAAFgQnAAAAADAguAEAAAAABYEJwAAAACwIDgBAAAAgAXBCQAAAAAsCE4AAAAAYPH/FgLOhMveBEIAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "epochs = range(1, len(train_total_losses) + 1)\n", + "\n", + "plt.figure(figsize=(10, 6))\n", + "\n", + "plt.plot(epochs, train_task_losses, label=\"Train Task Loss\")\n", + "plt.plot(epochs, train_model_losses, label=\"Train Model Loss\")\n", + "plt.plot(epochs, train_total_losses, label=\"Train Total Loss\")\n", + "\n", + "plt.xlabel(\"Epoch\")\n", + "plt.ylabel(\"Loss\")\n", + "plt.title(\"Training & Validation Losses\")\n", + "plt.legend()\n", + "plt.grid(True)\n", + "\n", + "plt.show()" + ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, + "id": "7c8b2ad1-1faf-4084-b686-8e152ec30a98", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.019663196057081223" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "max(train_model_losses)\n", + "#min(train_task_losses)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, "id": "c94d83bf-d7e6-4900-91ec-92cfc80132f9", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAGwCAYAAABB4NqyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAB8h0lEQVR4nO3deVxU5f4H8M/MAAOooIACIiK5BIorlLmVppG2WtdcwyX9lUtel9ui1xa1hbKbV7ulpWJey9JK7VqpiZYrprlm7oYKKoi4gIpsM+f3B85wzsyZmTPDDDM4n/frxesFZ86cOefMMOd7vs/3eR6VIAgCiIiIiLyI2t07QERERFTdGAARERGR12EARERERF6HARARERF5HQZARERE5HUYABEREZHXYQBEREREXsfH3TvgifR6PS5cuIA6depApVK5e3eIiIhIAUEQcP36dTRs2BBqtfUcDwMgGRcuXEB0dLS7d4OIiIgckJ2djUaNGlldhwGQjDp16gCoOIFBQUEufS1BEND1/V9QcKscz3WNxeSHWrj09YiIiO5UhYWFiI6ONl7HrWEAJMPQ7BUUFOTyAAgAnr0/HvM3/wWNNrBaXo+IiOhOpqR8hUXQHsBXU/E2lOn0bt4TIiIi78AAyAP4qisiVQZARERE1YMBkAfw9al4G0rLBTfvCRERkXdgDZAHYBMYEZF30Ov1KC0tdfdu1Gh+fn42u7grwQDIA/hpKprAyvUMgIiI7lSlpaU4ffo09PyurxK1Wo3Y2Fj4+flVaTsMgDyAIQPEJjAiojuTIAjIycmBRqNBdHS0UzIY3sgwUHFOTg4aN25cpcGKGQB5AB82gRER3dHKy8tRVFSEhg0bIjAw0N27U6PVr18fFy5cQHl5OXx9fR3eDkNQD6C9XQRdUq5z854QEZEr6HQV3+9VbbahynNoOKeOYgDkAQL9NACAW6UMgIiI7mScX7LqnHUOGQB5gIDbAVARAyAiIqJqwQDIAwT6VZRiMQAiIiKqHgyAPECgMQNU7uY9ISIicq3u3btj4sSJ7t4N9gLzBAG+bAIjIiLPYqvWZtiwYViyZInd2121alWVem85CwMgD2DIAJWU66HTC9CoWSRHRETulZOTY/x9xYoVeOONN3D8+HHjsoCAAMn6ZWVligKbkJAQ5+1kFbAJzAMYaoAA4FYZs0BERHc6QRBQVFrulh9BUDbobkREhPEnODgYKpXK+HdxcTHq1q2Lb775Bt27d4e/vz++/PJLXL58GYMGDUKjRo0QGBiI1q1b4+uvv5Zs17QJrEmTJnj33Xfx3HPPoU6dOmjcuDEWLFjgzNMty+0ZoHnz5uGDDz5ATk4OWrVqhTlz5qBbt26y6w4fPhz//e9/zZa3bNkShw8fNv597do1TJs2DatWrcLVq1cRGxuLDz/8EI888ojLjqMq/H3VUKkAQaioA6qtdfvbQkRELnSrTIeWb/zsltc+MvNhyY13Vbz66qv48MMP8fnnn0Or1aK4uBiJiYl49dVXERQUhJ9++gkpKSm466670LFjR4vb+fDDD/HWW2/hn//8J7777juMGTMG999/P+Li4pyyn3LcmgFasWIFJk6ciGnTpmH//v3o1q0b+vTpg6ysLNn1586di5ycHONPdnY2QkJC8MwzzxjXKS0txUMPPYQzZ87gu+++w/Hjx7Fw4UJERUVV12HZTaVSIdCXYwEREVHNMnHiRDz99NOIjY1Fw4YNERUVhZdeegnt2rXDXXfdhfHjx+Phhx/Gt99+a3U7jzzyCMaOHYtmzZrh1VdfRVhYGDZv3uzSfXdrqmH27NkYOXIkRo0aBQCYM2cOfv75Z8yfPx+pqalm6wcHByM4ONj49/fff4+rV69ixIgRxmWLFy/GlStXkJGRYWyLjImJcfGRVF2Anw9ulupYCE1E5AUCfDU4MvNht722syQlJUn+1ul0eO+997BixQqcP38eJSUlKCkpQa1ataxup02bNsbfDU1teXl5TttPOW4LgEpLS7F3715MmTJFsjw5ORkZGRmKtpGWloZevXpJApw1a9agU6dOGDduHP73v/+hfv36GDx4MF599VVoNPJvuuENMigsLHTgiKomkIMhEhF5DZVK5bRmKHcyDWw+/PBD/Pvf/8acOXPQunVr1KpVCxMnTkRpaanV7ZgWT6tUKuj1rp0f021nPz8/HzqdDuHh4ZLl4eHhyM3Ntfn8nJwcrFu3Dl999ZVkeWZmJn755RcMGTIEa9euxcmTJzFu3DiUl5fjjTfekN1WamoqZsyY4fjBOAGnwyAioppu27ZtePLJJ/Hss88CqJi9/eTJk4iPj3fznplzey8w03EGBEFQNM/HkiVLULduXfTt21eyXK/Xo0GDBliwYAESExMxcOBATJs2DfPnz7e4ralTp6KgoMD4k52d7dCxVEUAB0MkIqIarlmzZkhPT0dGRgaOHj2KF154QVFSwx3clgEKCwuDRqMxOzF5eXlmWSFTgiBg8eLFSElJMZtZNzIyEr6+vpLmrvj4eOTm5qK0tFR2Jl6tVgutVluFo6k6YwaI3eCJiKiGev3113H69Gk8/PDDCAwMxPPPP4++ffuioKDA3btmxm0BkJ+fHxITE5Geno6nnnrKuDw9PR1PPvmk1edu2bIFp06dwsiRI80e69KlC7766ivo9Xqo1RUJrhMnTiAyMlI2+PEUAb6cD4yIiDzT8OHDMXz4cOPfTZo0kR1PKCQkBN9//73VbZn27jpz5ozZOgcOHLB/J+3k1iawyZMnY9GiRVi8eDGOHj2KSZMmISsrC6NHjwZQ0TQ1dOhQs+elpaWhY8eOSEhIMHtszJgxuHz5MiZMmIATJ07gp59+wrvvvotx48a5/HiqgkXQRERE1cetJegDBgzA5cuXMXPmTOTk5CAhIQFr16419urKyckxGxOooKAAK1euxNy5c2W3GR0djQ0bNmDSpElo06YNoqKiMGHCBLz66qsuP56qMARAN4pZA0RERORqKkHpmNhepLCwEMHBwSgoKEBQUFC1vOaMHw7j8x1nAACn3ukDH43b69OJiMhJiouLcfr0acTGxsLf39/du1OjWTuX9ly/eZX1EJeuV45DdFH0OxER3TmYc6g6Z51DBkAeIk8U9JSWu3bwJyIiql6Gnsm2BgQk2wzn0NLgxkrV/GEo7xD/eKgFBiz4DQAHQyQiutP4+PggMDAQly5dgq+vr7GXMtlHr9fj0qVLCAwMhI9P1UIYBkAeouNdoYiqG4Dz127h4LlriI+so2hASCIi8nwqlQqRkZE4ffo0zp496+7dqdHUajUaN25c5WskAyAPUltb8XZMXXUIZTo9hnZq4t4dIiIip/Hz80Pz5s3ZDFZFfn5+TsmgMQDyIP5+le2Zn23JZABERHSHUavV7AXmIdgI6UECfCvfDjYPExERuQ4vsx7E37cyA6Rm/Q8REZHLMADyIAGiAEjDAIiIiMhlGAB5EHEAxPiHiIjIdRgAeRBxETSbwIiIiFyHAZAHCWANEBERUbVgAORB2ARGRERUPRgAeZAAUROYRs0IiIiIyFUYAHkQrY9oHCCmgIiIiFyGAZAHCZAUQbtxR4iIiO5wDIA8iLQGiBEQERGRqzAA8iCSgRDVKvxx7ho2Hrnoxj0iIiK6M3EyVA/ib9IE9sTHOwAAGyc/gGYNartrt4iIiO44zAB5EHERtLgJ7NzVInfsDhER0R2LAZAH8dWIe4HJLyciIqKq45XVg4jH/tEL8suJiIio6hgAeRDxDPBlOr3xdx8GQERERE7FAMiDRNULMP5eXFYZADEDRERE5FwMgDxIWG0tnusSCwC4VVru5r0hIiK6czEA8jD3xtYDANws1RmX6QXB0upERETkAAZAHkajrnhLbokCIFE5EBERETkBAyAPYyh4LhI1gen0zAARERE5EwMgD2MoeBbHPGwCIyIici4GQB5Grss7M0BERETOxQDIw8h1eddZyAAdyy3E8t1ZEJghIiIisgsnQ/UwPhrzAEhvIQPUe842AECAnwZPtoty6X4RERHdSZgB8jCGXmBitprA/jhX4KrdISIiuiO5PQCaN28eYmNj4e/vj8TERGzbts3iusOHD4dKpTL7adWqlez6y5cvh0qlQt++fV20987nK5cBstHExRYwIiIi+7g1AFqxYgUmTpyIadOmYf/+/ejWrRv69OmDrKws2fXnzp2LnJwc4092djZCQkLwzDPPmK179uxZvPTSS+jWrZurD8Op6gb6mS2zNQ4Qe4kRERHZx60B0OzZszFy5EiMGjUK8fHxmDNnDqKjozF//nzZ9YODgxEREWH82bNnD65evYoRI0ZI1tPpdBgyZAhmzJiBu+66y+Z+lJSUoLCwUPLjLqG1zAOgC9duWX0Oi6CJiIjs47YAqLS0FHv37kVycrJkeXJyMjIyMhRtIy0tDb169UJMTIxk+cyZM1G/fn2MHDlS0XZSU1MRHBxs/ImOjlZ2EC7g76sxW/bO2qM4kH3N4nPYS56IiMg+bguA8vPzodPpEB4eLlkeHh6O3Nxcm8/PycnBunXrMGrUKMnyHTt2IC0tDQsXLlS8L1OnTkVBQYHxJzs7W/Fzq8sPBy9YfIxNYERERPZxezd4lUpa9CsIgtkyOUuWLEHdunUlBc7Xr1/Hs88+i4ULFyIsLEzxPmi1Wmi1WsXru0Nxmc7iYwyAiIiI7OO2ACgsLAwajcYs25OXl2eWFTIlCAIWL16MlJQU+PlV1sz89ddfOHPmDB5//HHjMr2+ooLYx8cHx48fR9OmTZ14FK7xau84vL/+mGRZUamVAIiTpRIREdnFbU1gfn5+SExMRHp6umR5eno6OnfubPW5W7ZswalTp8xqfOLi4nDo0CEcOHDA+PPEE0+gR48eOHDggFtre+wxprt5kHa9uGJy1IxT+Vh7KEfymABmgIiIiOzh1iawyZMnIyUlBUlJSejUqRMWLFiArKwsjB49GkBFbc758+exdOlSyfPS0tLQsWNHJCQkSJb7+/ubLatbty4AmC2vaW6UlAEABi/aBQDY9koP42MsgiYiIrKPWwOgAQMG4PLly5g5cyZycnKQkJCAtWvXGnt15eTkmI0JVFBQgJUrV2Lu3Lnu2GW3uVFSLvk773qJ8XfWABEREdnH7UXQY8eOxdixY2UfW7Jkidmy4OBgFBUVKd6+3DZqAtM6oOvF5ZI5wcRj/zD+ISIiso/bp8IgeWO6N0XT+rWMf98oLkeZqNpZ3OzFDBAREZF9GAB5MHGQc/lmKS5cKzb+XW4hGCIiIiLbGAB5MNPMTo9/ba58TG95PSIiIrKOAZAH01lJ7UgyQEwBERER2YUBkAezltgpLa8MgMoZABEREdmFAZAHs5YBKhEFQOJgiIiIiGxjAOTBrNX2iIOeM5dvVsfuGJXrGHAREVHNxgDIg4kDoPtb1Jc8Js4Anb1chBe/2lct+7T9ZD5avvEzVvyeZXtlIiIiD8UAyIOJW8DuDq8teaykXDo56o9/5CDrsvIBIh31whd7UKrT49WVh1z+WkRERK7CAMiDiTNAAX7SQbtLZOp+SnWWZ4x3FpVK5fLXICIicjUGQB5MXAQd6KeRPFZSZh4AVcdwQALHHCIiojsAAyAPprcWAJWbZ3tcHZqcyruBm6WuzzIRERG5GgMgD6YTN4H5mgZA5hkgS73GnJW1eeN/fzplO0RERO7GAMiDiYugA81qgMwzMXLjBmVfKcJ9qZvw6Za/qrw/HHCRiIjuFAyAPNjHg9oDAKY/3hJ+PtK3qkimKapMZx6gvL/+GC4WluC9dceqvD8sfyYie53Ku4GMU/nu3g0iMwyAPFhyqwgce6s3hneJhdYkADor0+VdboDCMtGyySsOVKk5jB3AiMhevWZvweBFu3Di4nV37wqRBAMgD+d/u/bHNAN0NKfQbF1DBmjv2St48F+b8evxPEkz2qr953Ey74bD+6JiDoiIHHQ8lwEQeRYGQDWEsiawimzP3+bvRGb+Tbzxvz/NusarraRxCm6VIbeg2OLjzAARkaP4/UGehgFQDeGnMX+ramulhdHler3J475mTV5y2zFoO2MD7kvdhMs3Sqqwp0RE5phBJk/DAKiGMK0BAoD5z3ZAaC0/49+mRdBaH7VZ13idghqgYxZS1byDIyKiOwUDoBrCtAkMALo1l06QuuHwRTSZ8pPxb51egGnPdbmu8krxDs5z5BUWY87GE8gpuOWy17hZUo6/zc/Agq1VH0KBiDdQ5GkYANUQcgEQIB39eeW+c5LHSsv1OHdV2lvM0mCJSvALzHM8/8VezNl4EsMX/+6y11i68yz2nr2Kd9dWfQgFIiJPwwCohrBWu2PJ8YvX8delm5JlljJAeg5yWKMcyL4GoOI9tmZ/1lWM+u/vyLxkf++/W6XljuwaEVGNwACohrCUAbKXpQBISW0Q1TxPzcvAxqN5GPXfPe7eFaf55dhFTF31B4rL7tx56QRBqFJztSdiApk8jY/tVcgTOCsAstQEJv6ytRQL7c+65pR9oOqXmX/T9ko1xHNLKoK5qLoBePHB5m7eG9cY/eVeHMi+hl9f6m42DU5NxSZ08jTMANUQjjSBybF0V2lrnq+tJy7hRgmbRGwpKCpz22sfzSnEmC/34lSedww4d8HKmFU13c+HL+JiYQl+PXbJ3bviRIyAyLMwAKohVE66fVKSAfrj/DVMX3MYBbcqL+bpRy6aPae0XI85G09gf9ZVp+xbTffv9BNoO3MD/nfgvFte/+l5GVj3Zy6eXbTbKdu7sxpgyB2qMvUOkasxAKrh7A2LZKYLu7288otq1vrjWJJxBu/+dNS4zHSQRQBYknEaczaexFPzMuzci+ohCAJmrT+Gb37PrpbXm7vpJABg2uo/q+X1TN26XROTW3jnZkbEeG31fOLEMpvAyNPcGY3LXszea4DlJjDzAOf4xes4k38T8zafwimZOcROXHR8XrHqcCD7GuZtrhjDpv890dX2unfK9/ydchzkPlUZdoPI1ZgBqqHiIuo49DzDF9LVm6WYveE4ztwujpWJfyAAGPb5bnyz5xz2yRRAqx24Qq74PQv3vbsJx3LNJ3N1tmu33FOPc6fc6Xr6petOOc93MgZA5MkYANUgY7s3rej50qMZ/vvcvQ5tY9musygqLccrK//AR7+cQt95OwDIZ4AgCDh7uch8+W3WJla15NWVh5BbWIxXvvvD7ueaKtfpPbLGQO1IZEh288C3nkyIv1b4X0Gehk1gNcgrvePwSu+4Km1j7aFc1A30w67MywCAa7d7Lck1jdm6vogLs/OuF+PnwxfRt11D1PH3tbkfpeUWipEAXL5Rgh1/XcbDrcKh9dHIrnOzpBzd/7UZ7aLrYuHQJPn9s7kXriH3uoIgOK2QnaimEGeA+PknT+P2DNC8efMQGxsLf39/JCYmYtu2bRbXHT58OFQqldlPq1atjOssXLgQ3bp1Q7169VCvXj306tULu3c7p1fMneLHgxfMltnqBi9HnOgYvHAXXv/+T8z84Yjsuuev3cI/vjlo/Nval+Ezn+3E37/ejwVbMi2us+lYHi5dL5HtnSanKpmiC9du4bQd4+iYZsbyCovR9f1fMWfjCYf3geTc+Smgmh4zSAIgN+4HkRy3BkArVqzAxIkTMW3aNOzfvx/dunVDnz59kJWVJbv+3LlzkZOTY/zJzs5GSEgInnnmGeM6mzdvxqBBg/Drr79i586daNy4MZKTk3H+vHu6JnuiwuJyFBZLx/SRmwrDVswgvtAbiqR/PZ6HW6U6fLMnG3nXK3sjjVu2TzJX2a3ScrN5ygwyb0/fseWE5TFQ7A1oxIdXVFqO/1u6B9/tPWf5CSKd3/sFPf61WfEYP6YXrU9+PYXz125hzsaTFp+z5uAF7D3rWcMJsImJquoOG8ya7jBuDYBmz56NkSNHYtSoUYiPj8ecOXMQHR2N+fPny64fHByMiIgI48+ePXtw9epVjBgxwrjOsmXLMHbsWLRr1w5xcXFYuHAh9Ho9Nm3aVF2HVSNVNQNkEOjng/fXH8Mr3/2BAZ/9ZlxumLvK4MzlInR9/1f8ZWWOqkb1AuzeJ0vENU5p204j/chFvPRtZUbqZkk5/v71fvx8ONfiNrKuWK6HEjPNbpVaGnvgtsMXCvD3r/fjb/MrhhPIu16MT7f8hcs3ShS9njXOKkc6cweNJO3pPLGuzVGCpAnM9a9XUq5DTsEt178Q3RHcFgCVlpZi7969SE5OlixPTk5GRoaycWXS0tLQq1cvxMTEWFynqKgIZWVlCAkJsbhOSUkJCgsLJT81hbO+LOVqgK7cLLX6HLlmrEA/jTGIUNJs9MvRPADAzr8uI7egGIXFlVmWKCsBkKXDvnS9BH3mbsOibdLmM3Ex5iWTwOJmSTnmbjqJNQcv4IUv9pq8TuULlckVisvQmJyXcp319yjbJLD6v6V78d66Yxi7bJ+i17PGx0kjiHf/12anbIdsu5OyJtV9LH3mbEOn1F+qpZcp1XxuC4Dy8/Oh0+kQHh4uWR4eHo7cXMt34QY5OTlYt24dRo0aZXW9KVOmICoqCr169bK4TmpqKoKDg40/0dHVN2ZMVY3r0azK21j/Zw6OXDD/wjh/zfqdlFwvsNpaH7t6h5Xq9NhxKh+DFv6G+1I34ddjecbH/C0UQJu6XlxmDFS+2HkGR3MK8bZoEEdAmgEqE2VkbpaUo9WbP2PBVvl6I/EXuNLJKU0P39ZEsxp15b+hXi/g4O1s2a7TVxS9njW+7JFW48h1HT9x8Tpu1sCpaMTHUh2JLcOcd2sP2b6GELm9CNo0i6C0t8ySJUtQt25d9O3b1+I6s2bNwtdff41Vq1bB39/f4npTp05FQUGB8Sc7u3pGDnaGkV1j8VJyiyptY/SX+/DKSvu7pcs2gWl9oLbjU1VSrkfGX/nGvycsP2D83VrgIIgKYFtP34Bp35uPvixYCF7KRBmZIznW7xTFX+BlNpqyDEwDQFuBk4/oRN5y8gznmioEQIKHFxl7YkuRXi9Y7eGohOnn5bfMy0j+91Y8/p/tVdquO4hrC6vz7fLUsP/P8wV3ZHNyTW22dVsAFBYWBo1GY5btycvLM8sKmRIEAYsXL0ZKSgr8/Pxk1/nXv/6Fd999Fxs2bECbNm2sbk+r1SIoKEjyU1OoVCq0i67nlteWG++mtlZj1gRkjbWLhbXAwbQ16qtdFYXzgdrKkR1yRJNlSgOgyifLFX9LXkf0j600A2TKVn2V+HQVlTo3APJ1UhMYKfPEJ9txX+omFFchkBVfS1QA1v9Z8R2ZWQMvnOKPfnUOiuiJvecuXS/BY//Zfsc1Jx/PvY77UjcZv4NrErd9O/r5+SExMRHp6emS5enp6ejcubPV527ZsgWnTp3CyJEjZR//4IMP8NZbb2H9+vVISpIfI+ZO4u/rORe5AF8fuwYCdDQAsvSIuJng/LXK2hpxYCGuybEV04i/s23V8hiYZsBsBVnijNQtJwdAtjJAl2+UOHz39suxi9hgpWjc1eR2++rNUuQ7oXjcUX+eL8SVm6U4LNOkrJSk2QhAHf+aO1xbdTeBebKzl2teAKvEP1cfwsXCEvxz9SF374rd3PqfNXnyZKSkpCApKQmdOnXCggULkJWVhdGjRwOoaJo6f/48li5dKnleWloaOnbsiISEBLNtzpo1C6+//jq++uorNGnSxJhhql27NmrXru36g3IDZxW62kuuSSjAT22WAbIWAFy4dgvNGsi/L7ZqZ+RcF3XvLymr3L9us35F20bB0PpqECS6oFgbR2jjkYuSQmylPeVMm8BsPU8cBBaVObfOw1oAtO3kJaSk7cbT7aMwe0A7AMDRnEKcuHgdT7aLsrrd4jIdnluyBwDwx/RkBCkY/NLVdHoB7d+quKE69lZv+PsqqyFzFnEgWZXSK9PP/Z0TAFVjBkjUCCYIAorL9Ajwq/g83Cgpx/vrjuHxtg1xb6zlzjHOdqfGf470IPYUbv3PGjBgAC5fvoyZM2ciJycHCQkJWLt2rbFXV05OjtmYQAUFBVi5ciXmzp0ru8158+ahtLQU/fr1kyx/8803MX36dJcch7u5K9srFwDp9OYBwBkrdz7rD+ei7mn5i6e1wMnSl2mhaP4v0+7nB88VAADq19Ealy3ecVp2O9tOXsKopXsky8oV1gCZvh+2MkClusqsj7ObwKwVpP/nl1MAgFX7zxsDoD5zKwYiDauttfQ0ANL3/kZxuVsDoB//uIBG9QLRXBRI598oQaN6gdW6H+ILQZVqr0ymjxCPrF5cpqv2wK4qxP+mrrxMbjt5CR/f/jwD0iawoYt3Y8epfOye1gthtbWYveEEvvjtLL747SzOvPeoC/dKSlIPdQeNDK+pwYfh9luLsWPHYuzYsbKPLVmyxGxZcHAwioosj8dy5swZJ+1ZzWHpi8VXo5I0rzibOMNioNPrJV8+peV6HM25bnU71ywMMGgt3rB0VOIBHi01rykpUt1zxnxQwurIADm7CUzcHKfTC4ovzEdzCq02WYgfcueEl7tPX8GLX+0HUJGJMnBknjolcguKofVRo14t89pDcRNpVV7f9HwGiAKegltl1RoApa49Cq2vBpMfcqyjhfhYXPk5SUmTjvYvPvvbTlZ0slj3Zy5S7ouxOvaYK0nroWp24CBWlWDf3TyneIQcFhdRB0H+PmjWoDbuDq+cJf6fj8S79HXlBvjT6aX/EHe/vg5rD+U4tH2rX5gWHrouGkfoerF8c5LS3lymZCeMlWPaDd6eJjBn1wCJLsSOHrdBuYXicUevaz/9kYMxX+7FDQe7dwsQsOVE5bAJ4gDE8Blcte8cPvn1lNlz7aHTC/jpjxycyqso9jQ0s5lSOk6UzdczqQES98azdLPgCuev3cJnWzPx0aaTDvdsM73oi33521ks23W2CntomeFj/+f5AuMyf5+Ky527wnXx++hohwpP5Kqbjerg9gwQVZ2/rwa7p/WCj1qFJz/ZYVzu6n+yi4XFZst0er0kABIE4CcHAyDrRdDyj4kv8pZeV0kPHbmtKy6CtrMbfIkkAHJuDZC4IL2qbfXF5XrUvl1vJj4mR+/sx31VMdDjXfVr4eWHHZvkVzzYpvi9N+zS5Nvzz3W/uz5aNQwGUBG8ZV8tQuOQQEXNEF/+dhZvrjksWabXC2bF/joXZIAEQdrj8UZJ9QVA14oqB0J19LtEZ9LsY1BYXIbXbg9d8WS7KNTWuuZS9Jho6ABD5sxtXbbd1CPO1ZgBIrfz99WYFUM/kxSNxiGuq4P4LdN8oD6d4Lw7AmsXbLmHBEFQ1ORn67t82OLdsnU7cvsjd2Ew/T6QW0cQBGw7eQn5N0okmbQSB+60BaFy8ETzfancmdyCYhzPlW+OLC7T2axxEgeO4mOqamCVV+h4ry1xRkScpTAtJBbP4/bxr6fwwAebsSTjjKLX+EU0OKfxtWTOlbMyQJLxqwTBZCgGp7yEIuLmWMXZTxOCSTBnIG4+L6viuEly5ALbAGMA5PSXU8RVzcZlOj22nLjkcCa1qsQBUMapfLz41T5cul7xP11UWo6Mv/Il3xel5XqM/mIvlliov6xODIDuYMEBvtjycvdqfU29HXUmSrZliVxQUVKur3IzD1AxCevhCwVmy8XbTl13FJO/OYCO726SzHIPSHugAPK92X78IwcpabvRZ+42yYXb0jF/v/88Vu+Xn7x17aFcSeYPqJhPbOyyvcZJagGg1+wteHjOVskyg06pm/DIR9sky0z35EDWNeN5Fx+T0syYJZn5NzFx+X6cuGi9VsyUIEgvJOJBJK19dmannwAAzPjhiLLXkVkmFwBJh1ew/5wIgoDZG45jzYELkmXOGIvKHt/vP4/DFwokzbGOxnbi3bWUta2ueETra2gCc08E5Kr38T+/nMKwxbvxf//dY3tlhcp1ekxbfQg/HLxgc13x9/3gRbvw4x85eOvHiv+tF77Yi8ELd+GLnWeM6/zvwHmsP5yL6Qr//1yJAdAdrrp7GpTr9U6bgNNaN3i5L5DiMp3NiUeVMnSZFTMEKrdKdfhsSyZW7TuP/BslklnugYr6g3KdHttOXsLNknLZfV1/e/ycS9dLrGYugIqxjSauOIBJKw5i6OLdxrsrg/8dOG/2nDe+P2xxOoCdmZfNll0tKsOJi5WBkWlwAQCjlu7B3E0VM9pbGljSEXvPXsX3By5gwGc77X6u+MIsrvky3Xe5T1JksOXR4SXPlXlP5GpixOfkte//RJ5ME7FYSbkO45btw/LdFT1dd/51GR/9cgrvrK2cxkUvCNIgwsXpi60nLmHiigN49KPt0rGzHIyAJEXQok2Ij6O6moMM59FJiTqHX9/Z+/DV7Toquf9rR605eAHLdmVh/Nf7ba4rl/E3TEhrKEBfvb/yO6rgVvU149rCAOgOIxfvVHWqDHvIdYN3lLW7eLlml1tlOqdkgAD5NLkhuJK7GIibj9QqFT765RRS0nZj1H/3yNZBiC8AkgBI5rjEx7T1xCW89r10wLF6geY9ks5ambleSUGrAEH2/H90OwASnwJ7m8B+PZaHTUfNx1+6ameB77d7z+GQqMhV3ASg0wuywYJ4WVRdy5PtSp9jvkx8DjcdvYj9WVcl79OB7GuY9M0Bq9v9bu85/HQoB1NWVbyfedfNmwL1epPMgYVgwVm9B62dT0eYDupo3J6Ls1pyX0GGfXEkA3TlZinun/UrPtxw3OF9EhS8j45wxTg8tibCNtDrBdkmYl+NWvK+NmtQ2TnHk8qfGAB5gRcfbO6yIkNTpkXQVdqW1QyQ+UW8uEyPsnLn/HfJfakYLnpyAUTHdzcZf1erK6fm2Jl5WfJFcPF2vYv40MRZqw9+tv0Fe/KitAmrbi37xuBR2qPHWiwpDgKVjo8EVGTpRiz5HSPtSNfr9AJ+PZaHa0WlZkGNuNlLPAq4aebEuI4oUAhU+D8hl6EwnMOsy0UY+d89eGpehtlnZt/Za1a3a3onLHdh1gvSQFTumOZtPoWE6T9j+8l88wftJK5BE/eodPQiKz514vMobi50xQXctBkaqLyhEu+T0h6CadszkXWlyDh2liNcNSSAKwJIpVPomGa/xc8XF9GLB5T1pAJwBkBewlmZEVucWQRt7R9b7ktz1H9/R66NZgel5EaINpxDuULry6I7JtPjF69tqLPRW8gAyXXdNz0Ppl3l5TJA1ppK3l9/zOoI2AbWvqikk8Qq/0JzpMh7+e9ZGLHkd/T/bKfVAnZpxkJ+GgbxRV3pp1TuPBg+C1/8dsa4zDSwtNVsZPY5kTk200BOLis3a/1x6PQCZv18zOrrKSE+hhvFTs4ACQKu3izFzr8uS/5/dS4cq0xMJxMAffDzcWRbyZYaFMuMeWYvSY2Yk4KWnIJbFof7qAofhYMUZfwl3+zmq1FJ/1clwV9V9sy5GAB5ieoarlyvFyxORGhrdGFTVgMgmS/Nvy65dq4dQ88VW8Gkta8OQ2pZfGjLf8+2uL4gCGaZMNOu8nIpaFv+b6ntDIylAEinF7BT9MVnT32IIxfSDYcrgrUTF29YDcpumNQAyb2W+GJh6NX2+Y7TmLrqkMWgUW5xSbkexWU6LNx2uvL1TXrhyAWGRaXlxtcx/ZzIvc6rKw9JzrW1429UT1mTnjWSQTnLxDVAjn1/SJt/gYfnbMWghb/he1FNiJLPz8q95zBowW+yTTN5181veuSbwG7vh0mmrbDYdtOrPVlOoGKKH9MBF8XnwllNYB+sl2aMnVUj5isaQdXaNi0NKeKjVltsumUGiFxGLvULVD1N2joqWNF65VaawBrUcWIA5IbbiOLyin92W4XWFYXn1vdP6XfAzVKd2Xm4ZfKls/u0+XAEzmDp/N8sLcfr/6scF8eeXmD2XkgAabGytc+Eac2K3BetOANUfPtiP+OHI/h6d5Yk0BCzVA9mOhK6rTvx0/k30X5mOv7xbUWvQWuZQrH1oglnTY9f3L2/SWgtq6+vhHhaFvFr3Swpx7zNpywOo2CJ6UCIhjon8eCoSr6b/vHtQezMvIz5m82boP65ynwSThXML9zGGiCTl1MSv5fZ+X3T+b1f0PPDLbgqCtgkWS8nfX+ZZoMNPRyrSpwBsvZdaymj66NRSZs8XXDszsAAiBSZM7CdovX0ephNhmoQE2rfmETW7hTkaoBczfBlYzMDpFIS4Cj7EnjiP9uN9UQGrpzeRMzS+Te90NvTvGpPL729Z69iwvL9koultZnerdUAGe76pZPlSi8elsZRsVgDZPIxt9W7Zd2fOSgp12PVvvOymVIld8am147rooERldZtWCPOAIkvWv/55SRmrT+Oh+dstbkNQRDwyncHMeOHw9JxgMQjITtYECx3wd1qofbJ9LNmuPCanmclGSh7munEx5x9tbJ5TXzhLy7TYfnuLFy4dkvxduWYNlXZU6N0LLcQvxyTbwoXjyknl+VZsuM0Pvn1FErK5TNAvhq15LO698xVCIKAa0Wlxl6PnoAjQZMi4pSoNeV6vcWu912ahWHdn/Jds+VYu1PIv66sl4IzGXra2Cq0VkFaEyRH6V1QZv7NKhVeWmMtc2Q6ArHYdZMmA3sCMiXZoqM5hfjHNwdxJKfQ7DFrQYZpBsi0+QWQBkCmmTRLn1u5PS7T6c0yDIU2AqCGwZVNVH9dumH+egpOo+lrltjoQWgvSz0S5ebGsyT7yi18s6eiOPahluHG5eLdkwykacfnR64zR1TdAMmI4EDFTYhpTVZlLzApJdlkewa5LDXpESr3OnM3ncIPBy+gXqAv9r+RDEf5VSHo7T2noh7xx/FdkWCS4fcRZfFLyvWoI3qsXKc3juFjqdnVR62S1DntOXsVSzLOKB57q7owA+Qlvhh5L+rX0SI8yHIzlK+VwjeNwqK4gltlOJ0vP9mg1keNNx5rqWg7QEVBtZyVe89hxR7LdTOuYrhg2spi7Mu6Jvn7qMyF3FnjFVlyzM6mCjmW7sxvmGSA7KkBUrLui1/tkw1+AOs92G5IMkDyhaZyNUC2WMoAmQYctmpJxOtfvllqXgOkIAIyfU8ko3M7obaiRJIBEo3WbMfntajMQvG0hV5gOr2AS9dL8PK3B/HHuWtYtC0Tm49X1rWJ38fa/uYBUPvGdc2WCQJws0T6/lpqAlPyObAnuLT0GRU3//56u27P3qEfTCktVrZGbgBS8WfeNOsm/pxZmpvOR6M2O8+eFvwADIC8Rrfm9bH7nz3RM77yjqxRvQCM7Bpr/NtaV3kfhV3b/7p002Ixsp+P2q6pOa7cLIFeL2D6msN4439/Gpcbaiiqm9ImMCVsjdtS3856KVew1FPFtAnsH98cVDxuiJJskbVaGmsB0P9EIyjrTYrHDb9dFXXNvVWqw/o/K5vXxJ/wMp0ev5+5gpJyncVxgEwDjsJb1muAxJ8bnV6QDBiq1wuK6sJM3xJx7yRXZoDsyfKJM6TirIfFDJBeQL9PM/Dt3nN44uMdePunoxj++e8QBAGn8m5IAku57yi5Xqff7j2H+1I3SZYZTr9pFq24TI9ynV7SbVusXKeXfLZsEZ8r8a6Jz4Xct2lhcZlk37aeuIT31x+z+r6aTn/kCLnNmzbXWXrMUhOYn0bl1LGOXIUBkBdRqVSSL93trz6I10UZGbm7q4rnAXUDLY8180jrCKvZIwM/jRo94xso3t8/zxfib59mYEnGGSzdeVZS8OkOhqDlqsKLvTW20u729phzNgGWMwpXTS4U5XrBbHBG2efdLDVOTmqNtVoWpRdi0yJoQzBnGKEWAAqLyzH6y32S5xWX6TB9zWH0+3Qnnvl0J17//k/5kaB19meAxFm/cr0gaQIr0+sVVYWZBqUlFuZns6S4TIdPt/yFY7kWMmzifZSM1aM86BcPpqizMD2IeLiKMp0eZy+bd0X/fMcZ9Jq9xThpLlA57ULe9WL8+McFlOn0soX1ctO9WGoCKy7TYfCiXWg3M122S7y9IyxbCiLFv5vGbAeyr6HN9A145bs/jMuGLt6N+Zv/wjdWst32NoHJ3dTIZTjF+/qXybkUZ30s/T/6aNQe1dvLEgZAXsZSLzEAqKOtDHJiwyp7lEQG+UPrYz41hMG7T7XGkhH32nxtARVBWO9WEcp2FsB+UXOSAEHxIH6uYGgCG7Nsn401bbNV9xAc4P7yPEvXU7m095/n5S+oYtN/OCzbHGjKWlrf2sVATK8XJDVMz3+xB7PTT+DCNevjRC3aloklGWeMk8t+s+ecxUExzQIgGzVApnO+iW9GynVKM0Am2Yty+wKgJRln8N66Y8b6DwDYlXkZ8zafgl4vSGuKHBjrSRAE/HN1ZTAsDpwsHd/ABb/JLjeMurzjVGUAYjjGR+Zux4tf7Ufa9tOKe4QaLv5m57BMZ6yH+/GPHLPn2Tuw64HsynqpJz7egVe+qwj6JRkgkwjo49t1ft/uNR9Y8Ey+5eE97Akyfj2eh7YzNmDtoRxJICQX4Is/S89/sVfyWOf3frH5Wj4alaIu+b85cfoORzAA8jLWxigUp5f/3rOZ8fdGNpqt/H01ir4kLt/uwaOwntpMuV7AAQuznleHLAUDpill64sr0M+9AdB7645hzxn5Imm5TIelOorpaw6j7YwN+PVYHg5fsB38/Hm+wGoGaI2CyRmBiuDN9AL+0aaTxl43ch/X5b9nyQZycvtdqtObFYnLnRdxjxfTDJD4n7FcJyirAdKbXrztawKTO5YBC37DrPXH8eOhHEk2xZFJbm+aNO2WVWGCWLm1Dftk6A34y7E8xftp+DxYqwFauvOMWc8swyzySplmFQ0F4dLpcqTPsXZucguLsXj7adkmOtOhGKwZ8fnvuF5SjrHL9kk+i3IfG0v7o3ScIUFQNuBh6rqqD95ZFQyA7jC9btf4hNU2Hx0YsD5Kc0gtP0TVDUC9QF90aRpmXN7IxpxJWh+1ogCoye2skqMTtOr0guws7dWltFwvO4eVI2zdtcpNxlrdcgrksyVyWTi5AKhcp8eSjDMouFWGjUcvKpqv6oUv9jqlO7dOkJ/LzNB8Jzeu1cajeZIxd6yRqwEqkKkBWiYawkBcG6PT66U1R3q9oguGtYu3kkxIba3lz1XW5ZuKJrndn3VVdjkAXLkhvUiLt/f5jjM2909M7iJsusxXo1LcPGccCNFks+LmuJyCYjy7aJfkcdPA0tHBBq1lgKy9d/87cAEzfzyCf3xzEHM2nsBT83YY/5cs1eDYIg2A5DJA0r8FQcD8zX9h5T7ziZflWBqHy1T1TtVtzv15dnKqMd2bIiY0EJ2bhso+bi326NI8DB/2bwu1SgV/38qLUEMbAZBKpbIaAP3wYlf8dekGujarCKocnSqjTKc3FiI/k9hINl1sL1+Nyq4Cz6U7z1b5NQHbd+uBdt51Vie5sViKbSwTYN7tXE5hcRlCLQTv9tBb+AI2XDjq+Ns3f5rZdsrMB6iUawITH7N4kMFyk/0bmrbbYs83MfNeYNJmNTn7s65i6qpDeP2xlpLMol4vQC36v9Wo1ZILsaXm5qfmZeDMe4/KPnZFpj7M4LydY97ITT9hGiho1GrF/7+WmsDOXZXuV6ZJk5Pp9nV6we7eV+UmNWNmGSAFweumY3nYdLv32Dd7sjGscxOz/0XT7f70Rw5Cavmhk8n1QNIcK1sELd3uL8fy8P565dkanUkTtCVOmjXJYcwA3WH8fNTo2z4KDYL8ZR+XqwHa+nIP/OuZthh8b2PU0vogwE8DlUqFWrezEA8rqNmxNPghALRsGIS+7aOMdz11Axy7+Oj0grHo01kZkriIIDvXr2N7JQVs3bUGekAGyBK5u87Scj3OX7uFk6IuteKMj1qlbMZyQVDe49Ca/x04j6GLd5stN3SVr+rkwEszzpp1H75803yQRks9qXR6QdJ0oyT4AeTrVwwsZRFS0nbjWO51DFm0y/g/DQDXTAI2jVqa9XFkqIYrJufA2QOWHsi6hic/3m7821dtOwNkGIHe0k3H+avWAzPT54nP81+XbqDHvzbjOxs3Y7fKdCbvT+VnvN/8DGw/Zd9EtjdvT4djGgCJb0RP5d3AuK/2YdBC8xorcQBkOiAoYH7M9kxeDBgGIvX8DBADIC8jF6c0Dg1Ev8RGZlmc9RPvx4rn70PrRubNBaasfdhNtzvpoRa4NzYEbz6ufEwgoOKLx5BV8HdChqT73fWRNjzJruc4o2fDb5mXkX3F+pdugJtrgKyxlBno8t4veOjfW3Hp9nQH09dUTpeh0wuKMkB6QXBKE9j3By7I9iwyfK/XqmIAlFtYjLEmxfDyGQs9ynR67Mu6KpnDTWkTgSnD/pfp9Phi5xlJUbl0glo9XvxqH7787axkfCTxRfiyyaja7649hhMXK3v8ONLh4MpNxwfJVGL94VwcPFfZDK5R287gGoJdS+MAiUdrlmM6COLMH48Ym8Fe/Go/TuffxEvfHoQgCFi4NVN2GxUZw8rtiL+H95y13KRoiSF4Nr0ZEWfXT+VVBuimzZmSAEjmfa7q26b08+2sibMd5bnfsuQS9nzcokMCES0qgG4cEmhWCBxxO9Mk90/01pOtJOMOGYTU8sM3L3QCAOQWFOMzC18apsp1gvGO199HepEMqeVnHIvm7vA6OC4zuJfYXWG1zHquRQb7o1mD2thmYWh9wLzI0xGWer2IaX08997E1ozup/JuoH4dLX4STWGhvBeRc6Z0sKWOhSEfnE2nE/DmmsNm05mU6wWH5rMzNJX8N+MM3v7pqNk2Db7ffx4//pFj1qtp3ua/jL8X3CqzWs/iSAbItBnQ1fM++WhUNueXq2USAJlemC8WWp5eBTAvBv9qVxYeig9Hj7gGkizg+j9z8c7ao6ZPB1CR/RRvp6pJTsMxmxZBiwOKQtF4Wr+bjPouDojkMkBVna1+2a4sdL/b9pAnbAKjavV0h0YAgDYKsjqmNky6H9te6SFZ9utL3QHIF8HeVb+2zfqhe5qEKH79cr3e2IyiNckAhdaqrBvx9bH9XyV3d1Jb62OzmPumhfminM3PgwMgW5mBMp3ebF4tpZOg6gXBKQNN2lLLSjGwM5XrBbPgBzCfqkMpw+dWbmoK8UVLycCUhcVlVoMwJRmgT7f8hVe+O2gMpEzfO1e/lz4mdUtyDBkg40CIdr6G3Gf33O2skfg9NK0dEjNtArM2HIkShslZrd2MiOfNGywq7PbVqCTPk88AVT1w/fiXkzbXqep5qCrP/ZYll2jZMAi7/9kTK8d0tvu5/r4aSUYIqKzFaVq/ttn6StKbtqbY6CXKIJXrLDeBvfBA08ptmvSzjw4xD8LE35mz/tYGDepo8e8B7azWMgHVGABVQxbEUb/bmBeqXK9HXqG0B5nS2bRLyvXY5aLZ7cVqa6tWBK2UpSDn419OmRXf2rM9uf8bQy3MpeslWLjttM1tFd4qt3oBVTJswXvrjuGbPeeMgwWaBjzVkwGy/hqmGSB7e3HJfXYLi8vx0abKC7yfRo0Pfj5ucRtFpTpJc5W1r5mfZMYiMmU4r2bnWxCwaFsmUtJ2WRw/yFejlmT3ist0uF5cZhz7Srz9qhA3VVrk5gwQm8C8kKUC6aqIDgnEyjGdoVGr0PeTHQCUzW1kK+CIj6yDExevI+tKUUUNkKEJTNRLrVvzMEm3f9Mi2q0v90Ds1LWSZeIMUP97ovFMUiObvdkA8/mFXMUZc/zY49HWkZImK7HH2kQiPjIIx3Kv44eDF6zOyA5UXFhzTbrQV9cAll2ahUoGzrPE0qjnzmZpUtzz127hawdmxTZ8bH1lPqeGa9rzX+yx+R4BFRkgZ70vhmlASk2CEUea+eT4+ahl99VXrbZZBG1o7jRkyOxNbsgVcu/PuoqNRyvnK7PVXFhcqpPMoWft5nDcV/vwaBv5XnYGFwuLcTr/ptnrlpbrjU2jdSzUuflq1CgzyQA9+ckOZF66iY8Gtce2E5dQpHCevKpyQn+HKmEARHb7x0Mt8GH6Ccx4opVkeWJMPUlTmJIvV1s9fvw0auM65Tq9cRqKAF8NZj7ZCp9tycTMJxMkg4SZblFu3CHTL0HDOjYDoNuFrPZ2n7dXdRcHWuoVFVbbDx8P7gAA+OBnZd1gJ644YLbMGdOHKKH0vFkbD8eTvbP2KHq1DJd0XzcwXKj3m0zGa0lBUZnD48gU3CqT1L8YMhGmGQlHBlOUE+inkf0+0SjIABmy1MaBEO18bbn/c3Hwo8TgRbtwr6i531YWytbj/ztwweb8ZJay674atWS+xuyrRci8/fffv95vdZvO5u4mMAZAZLfxPZtj4L2NZSfsFBfv2iqWBWwHHFpftTEbMv7r/ci73cPI31eDpzs0wtBOTQAAen0gesWHIzLYH3/KDJY4+oGm+C3zsnEkaUs9FOQuLGJ/3E7r+qjVKNO57i7JWfFP2rAkBPhqJDUAckIsjL0j7i1lK1tnjaVMiLMpD4CqpwnMFXr8azP6JzUyW25vrFFYXGbXSMJiT32yQ1LzUlymgyAIkswCAPx74wmHtm8q0FeDazAfZ0muvsqU4XNrSEbZm/VyVhC3WzSyummmzNSSjDNVfr0iC9nq/BslkulKjuZY7zDiSiyCphrJ0mzl4myLogyQjaYeQahMoxuCH0DaBAZUBC6LhiXhrb4JsgN7TekTh+/HdZFsV47Si3x1N1EZvPZovF3r39+iPjo3C7M6mS0gLSIXEw+cZ1pbZQ8lTTLOoHTeJqVF0PfdpbxIvzrJfca3nriEnX8pn1up4FaZopsUOaYFvy9/9wf+b+kepzV5mfJ3cFysTf94wPiZ0OsF/GfTSbsHZLRnIlilbBWHz/jhiF3bG9U11myZ0l58SgrmXYUBEN2xLF1UxWxdVK8WlZnNzQMA/lYmZ1XShdNSLwelg/A5Y7A+a+S23qphkDHjpZRhP3vJDEcgZmlcHPHAeVWpy75eXD3F40ozQGG15QN4U8EODtrpapYCF7lB7yy5VaZ3am3WxqN5DnWdV8KRgUFDavmhaf3axs/Ex7+ewofp9mekXNHUXWBj4lx7tXDSAK2mHm0T6ZLtGrh7HCAGQOR0C4cm4R8PtTAbfl2OrYzLtaJS2btd027wYo8p+Ke11MvKVhOYgY/M88VPNQwPILZqrP0978R0egG+dmaeDBm5Nx9vib91aIRZ/drIrmfpfQjUOicDVF2UBmnNG5j3WpQTGWx9GAd71XLSCN9HFY4cLccQTJSV6x2uAbLkBxt1KY6yd0JSoLKurSr3KlNW/oG3frQvG+MOjpwfJRrVDUCIghvZmsrzv9GoxnmoZTjG92yuaNJTW6MDx0cGyY7DYW2gwJEy6WCDuQPbISLIH58M6SD7uNImMPFaT7ZriO9Gd8K8IYnGZU1CA816YbRqaMe0GzL7UabTOzyRbB1/X3zYvy2SW8pngixt1lkZIFd77dF4rHmxi+I7SpVKhU3/eMDmeo3qOTcActYULqfybtheyYLpj1d0XijV6R1uArPkuouGiXBk5HdD7y+lzaKmTuffxPLfsx16bnUTB0BKs5tKaNS2B5oEYHHuSVsc/T5zFg/+SiNvYKkWY1yPppjxRCsMurexbAbI2kCBctkZgyfbReG3f/ZEu+i6so8rzQAVlerw09+74rkusZjxRCskNQmR9KRSqVRmk3r6VDGD4oyLVd1APyTF1DNbrlapZLNi4hogR9LVVZk77d5Y5fU3o7rdhTaN6ip+/wD5satMmV54q3qnHegBU5wYBgot0+llBzD1RI40gRkzQAo+Ez3jzEct3mHn/FzuJD4/wQHKPmNKAkONWqWormu6SY9gpTgXGHm1Vg2DMfPJVlgy4h7J8riIIAzr3MRioGNruoSm9Ws5tD/i2p5hnWIsrnejpBytGgbjjcdbom5gRaDTqWkoereKwOSHWgAwvxOz50ZUblVnXay+HNURX4yUTgOiUavMCssBaYDqSN1TVSYdVdpMJVaVnmpyHmkdKSkgr2rxu7snuf2gXxv4aSr2oaRcb1ctyjtPJTh1X6b0iVO8riOBoyEDpORjq/VVY+rt/TG839eK3FccbC9/SQCkrG5NSdOWWmV7mAEAaBFu+0YnSGbsLXePA+T2AGjevHmIjY2Fv78/EhMTsW3bNovrDh8+HCqVyuynVStp9Lly5Uq0bNkSWq0WLVu2xOrVq119GFQFQzs1MZs3xlaAY6sWZtGwe9AnIQI/ju9q176I74qe7tDIrjmjNGoVPk1JxN97NgdQMXeamEqlwtLnpIHHI60jFG9fbrJNR/j7atCteX2TfZO/yEh7gTkQAFVhwEGtlUJ3Sxxt7rAkpJYftoqmf6nqPGXuDoAC/DTGm4rdp69g8jcHJY83DLY8SOp9dznWzGFJk1DlNym2msDkAu06/hWBgDgotnT+fTVq9E6o+F80dOV3VXOeK4gzk4YbMluU3Jxo1CqzyWAd9eP4bmbLvLoJbMWKFZg4cSKmTZuG/fv3o1u3bujTpw+ysuTHdpg7dy5ycnKMP9nZ2QgJCcEzzzxjXGfnzp0YMGAAUlJScPDgQaSkpKB///7Ytcv6OCjkWWxNBmprqojYsFqY/2wiEqLsm/NM3Mzjo6naMF3/d/9dZsvub1Eff3+wmfHvWf3aYnb/thjSsbFkvcfbNkRILT9JQbeS2dQdpVapZC8O0hog+78uTHuXPdI6Al+N6qjouXIZKTnic+eK71PJZ6KKAZa7m8ACfDVWbx763xNt8bF6Ci+sSpneIFhjLXCMDgnA1ld64JPB0ro+Q8ZB3AQW5C+fHfFRq43BbZlOQN71Yly54RkZoK7NwmyuI/6+VJoBUtoEZmvk7Nn92yp6vSCZpjmvbgKbPXs2Ro4ciVGjRiE+Ph5z5sxBdHQ05s+fL7t+cHAwIiIijD979uzB1atXMWLECOM6c+bMwUMPPYSpU6ciLi4OU6dORc+ePTFnzhyL+1FSUoLCwkLJD7mX7QyQaz664s36adSSOxR7e2DFRwbh7b7mzQbibdbW+uDpDo0w/sHmxmWNQwIRHOCL3f/sif8Mam9c7sp5ldQqlexddqRoMlu5U7727+Z3dWK1TS74GrUaHU0yCa/2jkNHmXofa7VcBv8Z1F5af+CCU6SRfAbU+Nczyr7w5dgK7A3CamsxrkdT2yvaSZwBklPHQoAAyDdhVEWYhcE35Vg7bz3jwhFSyw+PtolE6tOtjctDbzdBiwNYS8GBn4/K+J1SqtPj3nc24du95xTvn6m7whxrgpfz7H2Nba4j/mpQWmivJJg3DZLkbkqSWynLYsv9P3vtOEClpaXYu3cvkpOTJcuTk5ORkZGhaBtpaWno1asXYmIqazV27txpts2HH37Y6jZTU1MRHBxs/ImOtnwXRK4jvrDYCjZ8XTRbujjLYfoPu2lyd7u3J/cPLrcsItgfx9/ujWWjOmLNi12Mry+XInZFl1etj1r2izMmtPIuXS4D1LJhEJ5uH2Vxu3LjC2nUKmMzIQCM6d4UK17oZL6elW/HsNpa7Jz6IB5v21ASDJe4YBwa8W74aFR4yMaYShX7J39xVzrLdoCfGn0SnD8GS4Cvxmr21NL8UYCygNQeSptqAOs3POILuXg9QycE8UXcUqeLigyQ867G4UH++Hhwe8TaEQhtebk7fvp7VzyTKB3lW0nmVXyMSid7VTRZtUolqWWsrfU1+4woCaT+M6i97P+z1zaB5efnQ6fTITxc+mUSHh6O3Nxcm8/PycnBunXrMGrUKMny3Nxcu7c5depUFBQUGH+ys2tG18c7TY+7K+tSbAU4zvyyEhN/z/qoVZKLX+PQQLvuWgH5uW4sNaxpfTTo0izM5oWhnqgod89rvezaH7EdUx7E3eF1cN9dIXjg7vqyzQzibuCm1yBDsfe7T7fG5IdaSEbaNrA055bcd+boB6QZD/HrtW9cF3W0PkiKqYfQWn5Y+ty9smP0iAf2U/JeyWWezPdV2gRmaY4lg2fva4xvR8uP+aR0WgUftbrKd8dyx2YrA1Rdk8QC1nty2rOu+LtC/Hk1DMQqfv8s1ZX5atR2ZZWnP97S6uMatQqPtWko27vMkvAgf7RqGIwgkyyVkgCjSWgghnRsjPEPNlNcK6ikoL9Up5fULXa/u75Zu5WS8/Z424ayWTx3N4G5vU+maQQoCIKiqHDJkiWoW7cu+vbtW+VtarVaaLXOGzuBHCPuJm6rN4+viwblM/SQASq+dE33opbWB/l21AYozQDZ45MhHTBs8W682idOdrTt0Fp+iubeiqobgJ8n3W/8W9wE1rtVBPx81IgIqiyKFV9IFg1NQq/bYwr5+2okGR2xAJMmMMPdqVwQ2C+xET7d8pfxb/Gd7/3N62Pl6M5Qq1VW/5+zrxTJ7q8lHw/ugHve2Wi2vGNsCF544K7b+yEOgNRWL0j+vmq83be1ZFmDOlrjNC5Kp1XwUVB7YcsrvePw+Y7T+PGPHOOyQF8fAJaLe+WK/j8fcQ+aKRgywJWs1R/5it6PZqKeg7VkBkK0VFfmq1HZ1cPPVtd6e4ZjMDB8rkz3Q8m2VCoV3nmq4nP3whd7FL2ekhqgnIJbaNkwCDunPoi1h3LRP6kRNhyWJhOUdjxQq1U4+EYydIKADm+l395vRU91GbdlgMLCwqDRaMwyM3l5eWYZHFOCIGDx4sVISUmBn5/0HyMiIsKhbZL7iWMaWxcvR75glIgU9YKpyABJX6cq3boN7N3zFuEVX+qGpq/2jevh4JvJGNIxRjYQeLxtQ4f2S3z3/GlKIj4a1F6yfXGAqjRTYOltlFtumoESBxoatcr4nlu7mblWVNmtW8mdc/06WtlmnxUvdMKDcRXfGeLN+GhUdo+HFC0q9pWLf+S6l2vUKosT9irlp1Gb1bz4+1rPdMh9vjvdFSo5hup0d3gdzB/SQfJ/aUrcNCe+IYiuV7HP4gu0pQyQj0Zlc5wu8fyHtj4BhhjG9KPy+mOWM0eG/TS9ubO38L6oVNpZwlImVMl2z1+tmIYoMjgAI7vGmtWIDe/cxPi7khqx4EBfSfd7d88G77YAyM/PD4mJiUhPT5csT09PR+fO1qcM2LJlC06dOoWRI0eaPdapUyezbW7YsMHmNsn9xF9A4u8Aa3UJzhZZt/KLVq4Z7vnbPbseaFHf7DE5ckWX9gZvi4beUzHa9JjKOhlrQcBzXWLx+mMtbRYom5r0UAv0jGsgKbwWE183lY7Ma9rTx3BJl9t709om8YVL6SlLfbo1ouoGYNmojorP82NtrdfaqEyawKxdOEJsNF+aZoC0PmoM6RiDDZPul4zS7aOxLwMkX0SuMgtUtT42aoBkLmLWCpCTW4ZjhoOD4CnxYHwD9GkdiZZWRlEXZ0xUKhXWTeiG/z53L5rcrr+RNIFZzACpoTFp8jZlTxG4payItfkRDZ8z0+faO7TDPU2kn4WOsfLDF8g1tZuOVv+gTBOe+P9B3AFhtUwTuC3unl3HrS8/efJkLFq0CIsXL8bRo0cxadIkZGVlYfTo0QAqanOGDh1q9ry0tDR07NgRCQnmd04TJkzAhg0b8P777+PYsWN4//33sXHjRkycONHVh0NVpDG54zf4bkxnPJPYCPc0qefyfYgS9XryVZs3gT3RtiHWTeiGBUMTocTDrSLQt11DzHyy8ovC3rRv49BAzB3YHq0aKuvSr1JVTAdi7aIhJ8jfF2nD77GYQRI3SSntzdQ4JBDfjRYVON++qMsFJ6ZF2OIgS2mxZI+4Btgx5UF0aRam+MLx+mMt8eEzbRV15/XRqK0GVvWDzDMV4qJU00JiwyMtwutgwdCkyvXUasUF00BFxsp0DB9fjcqsF15tfx+z905cX1Nbax6wWzv3n6UkYpgoCyDHWnd3pZ+jcJnzamAa0MVHBkluUAzNj4Dlwf8MWTFrwa2fHeNSGYKuxBhpMKKkXsa0vtHeAOj5++/CjCdaISY0EB1jQ/Du061l13vz8ZbG/83gAF9se6UHfjK5aRp0r3kPNEvvZ9P6tfFK77vt2levzQABwIABAzBnzhzMnDkT7dq1w9atW7F27Vpjr66cnByzMYEKCgqwcuVK2ewPAHTu3BnLly/H559/jjZt2mDJkiVYsWIFOnZUNvYIuY/0jr/y97sj6uCDZ9pKghNXiQ2rhSfbNcTTHaIQ4KcxC1ZUKhXiI4MUD9KnUaswZ2B7ySzurv6nr2rtiCXi726lF67aWh8kNTHPTsgx3aZhYDoA0DswBIDSC0egnw/+ltgIT3dohP8Mam81c2ar2aBBHfNaQnGAMfPJVpKmFEvd9n01Kot1L4+2ls9YmRfPqiWjWO95rRc0apXkItwrPhwviMarsmfgT0BZYGpt+IaNk23Px2awemxnvPuU+cXc1nuS3CocGrUKY7s3tZhNNgQd1j4zkkJsG8dt2M7DrcIlQ2EoqbM2DZJNd0mtkk6s3M+k15i/rwbDOjfBlpd7YMULnRAc4IuEKOnNUI+766NRvUAkNQnBmfcexcE3k2WbOeV6/n08uD16xTeQ3tjcZmt8NjNurgFyexH02LFjMXbsWNnHlixZYrYsODgYRUVF5iuL9OvXD/369XPG7lE1UquAe5rUw9WiMtlxNFw3Ck4llUqFuQPFTUDO/w9tWNfy3awj1k/sht9PX8Hr/zvs1O2aEmeAlDaBmTbBCLffRbmLlvhi+krvuyU1QUrmIzLlyLQYtuqnLB33k+0a4n8HLuDFHs3MHouPDEKDOv6ICPZHXEQQdv+zJ2KnrgVQeT5MadQqxIbVwlt9E/D6938q2oeHWobjWO5149++Pmr0aR2JA9kFeLRNhLE7s/hC7qtRSWpGXDFStbXCb3tqi9o3rof2jevh0PkCfL278sbYVvf8Do3r4fCMh+Hvq8E3FiY3NZzTimZ4+f3V2nFxN9zAqVQqPNU+Cq/dfg8t3ZykizojmP5vmD5HLwDto+tiQs/maFQvwCwAUqIqI6bHhNbComH3yD426N7G+PGPHDzUMhwf/Hzc5ra8vhcYkYFKpcI3L3SCXpD/B3VVZsP6Pjl/m4+1aYgjFwoVZ0ZsiYsIwl1htY0BUFXnq7JEvFV7MkByBt7bGF/tzrI4pk65TpBkKhwpCO7aPAwnqzBruhxLxzO7fzu88VhL4+B7YmqVCh+J6qqUZE3qBlRkf1Lui5EJgOTP/YsPNsPZy0VYc/ACgIreUWG1tfjQpGlPHAD5aNSSAEilUmHNi13wxMc7ZF+jjtbH7ikiqjI0k9yZmvlkK9Ty02DR9tMAlA2JYQhwnu4Qhb/yb+CzLZmSx0NrVbxv1v532jQKxu4zVxTtt/j7S/w5tvQpbi6aS0u8vqXgRqVSYdLtYSiUMP33cfaUMQa1tD7G4TCUBECOTLDsTG6fC4xITKVSWfzndEP84xIatQpTH4nHQy2d1zPRz0eNcT2aYkSXJmjooqZC8Z280gyQaaBk+CIODvDF5pe64zULvWLKdHrJ58CRDNArD8fhjcdaYtHQJPRWOFqtLZYG0tOoVbLBD2D9Ltf0wjS7f1u0bRSMN5+w3VvIlNZHg1dFE4xaqlUSZxjC62jNJtlt06gu2jaSrzf7RqbZA6ioJ7EkykbGc0LP5nYN7umrUSMuMkjyt1I+GjWm9ok3W27oKSXuRWhq0kMt8MIDd+F/Cop9pQFQ5e9K4njx2zbtEfN9tbuZSYatHm/V5R4F43C5kmecBSIFlI5w6kzuTtHa4+WH4/Dm467rkVMmGsRPaQbIfEwuy4+Jmfaec2QakAA/DZ7rGoteLcPRIsL2bNVKyM3lNcHCGEgGHWLMi/cNTU2mvW6e7tAI/3uxq2SQx/UTpTVJ1u6aw0X1RZbmvVKpVPh8+D14rkssRnaLRVGpeUanpFw+bRMfGYStL/cAIB1zZ0SXWPSykM37aFB7yWjCpiY91AK7pvW0+LgcyYClTggIwmRqt0zV0vpgap94tI2uK/u4eOoS8Xtk72jH4vU1GpXZjV+ElSEBlLJWyG8ocu7W3PYcZI7a/FJ3fNCvDQbLFFlXJzaBEVnh7oG6PIl4FGNLF51Pn+2A0V/uc/g15gxoh/QjF/HsfTGS5c6cB+2nv3d1+LmmTWD9EhtZbIrY9I8HcOhcAfokmGefVo3tjLRtpzGhl/XgCaho4mzbKBgHzxUAsN584aNR48AbD0GnF6yOntwjrgF63O7ibDpuDAAkxtTDsdzrsq/VODQQv03taRakWtqtmNBaWPrcvXjko20W98dSvZalAmdxgOHrhOYcawGaHLlXFPfSrMouSaZekdmQtTGRLDG9d7RWOL5sVEes+D3bZu++qmgSVss4TIE7MQCiGsMdTWCOFNLeqZSMYtw7IRKfpSTihS/2yj5u6+61b/so9JWZW6yqAZB4Sg6lwwnIMZ3bzNqFs2n92mhqYQTluIggfGDHpKo948Nx8FwBIoL8bdZv2DPHFiDfxXxKnziEB/nj0TbyPc7k3kdr/yrxkXUQVtvP4ijqcscUExqIEV1iZdeXTE/ihAyQMyZ6FR+C3Bx4Sol7icqdlyfa2T/Qqel/j7XPUHRIIF562L7u7DUVm8Coxhhze66op6xMvulsnwzpgLqBvpj1tzbV9pqeqkzhPFbJLcMxq18bSdPN58PvwRNtG9pVuCmmdAoJS4Z0jMG9sSF47VHzmgp7mNYAuaiW1MzoB5pi7sB2WDO+i9MLR6f0icOjrSPx5cjKoULq+Pvi7z2bWwzg5Fgb3kGlUuH3ab3QK74i62Q6SajpIYUHabHl5R6oZ2HcHunozvZfxoZ3boJWDYPw88T7sXPqg3Y3Uz3aOhJ+GjXaN64rWqrCm4+3REJUkNnUME+1j0KT0ED0jDcfWNC03kqaAVKjvijInjuwnVOajXhjV8EpGSCdTodDhw4hJiYG9eq5frA68k4JUcH4Y3pytY4M3b5xPex//SG3z1rsCXrFN4BGrUKiTE2LmEqlQv+kaMkycZOLI6o6yXstrQ++kZlx3l7NG0hriVzVm8aUn48aT7aLuv2azt12WG0tPhnSwbkblaFSqbBo2D04efE6oupJC/VNL8i2yv3EQaDcaOu2TK/i6NX1avnhj+nJKNPp0Xr6BgAVgcuILrGyWat/D2hnNodd12ZheLhVOPqYjOskPhNqVUVz0ez+bRFaW6t4BHpbbE3o6y0cupJMnDgRrVu3xsiRI6HT6fDAAw8gIyMDgYGB+PHHH9G9e3cn7yZRBUuFna7E4KdC3UA/HJ7xsFN6odhLV8UMUFV9NaojTl26gU5NpdMKuKMbr6fevSvdLXGXbwPTQNJWrlG8vukAkNXF31cjLeq3sb7p90hwgC9SRAOkWnvO0x3sH+tHzLQDiad+hqqbQ99k3333Hdq2rWi//uGHH3D69GkcO3YMEydOxLRp05y6g0TkOfx9NS6biNYaR7rBO1PnZmGS0bwNEqIcrydylKXpHNytKtdUa70FbXFG/Y6jxL3J7b1RahBkYdiEavj3qq7MpadzKADKz89HRERFz4a1a9fimWeeQYsWLTBy5EgcOnTIqTtIROTMXmDOsPbv3TCrXxtjTUt1GnhvY6eNa+RMhnGtHA3QJPPw2WiiEXfdN52hvDo5kkn59NkOeLhVOCb2kq+Hq46MMwOgCg4FQOHh4Thy5Ah0Oh3Wr1+PXr16AQCKioqg0Th/KHUi8m717eym7GotGwahf1K0W5pH/X01+DQlEY3quX5uPHs82TYKnw+/x2zcIqV+fak73v9ba4QHaTH/WeuTDd8sqey6b627v6uJAwmln4SKnpJJFmuXquMTxfingkO5wxEjRqB///6IjIyESqXCQw89BADYtWsX4uLibDybiEiZhUOT8P2B8xhvY7BBb+SOqWGsUatVVSp09/NRY8A9jTHgHtu9nG6UWB6xuTqJA2BnxcKuCKpNPyu3yszHfvJGDgVA06dPR0JCArKzs/HMM89Aq624O9NoNJgyZYpTd5CIvNdDLcOdOmUI3RlayBRSu5uz4hZHuvXbYjrp7mUL4zF5G4erxwyzrRcXFxuXDRs2rOp7REREdwRXtRA+0KI+5g5sh/jIINsrO0GXZqE217E2DpI9kluFIymmns3hJqri0vUSl227JnEo1NTpdHjrrbcQFRWF2rVrIzOzYmbd119/HWlpaU7dQSIiMmcYhM/atAbu5qp9U6lUeLJdVLVkgj7o1wbzBluvSarYKee8ntZHg+/GdMZUmYlQnSX/BgMgwMEA6J133sGSJUswa9Ys+PlVVvy3bt0aixYtctrOERGRvLf7JmBM96YOFx1XB3eMk+RszyRFIzjQdk8zTyvUFzOtAerqwolODQyTBI/t3tTGmu7jUBPY0qVLsWDBAvTs2ROjR482Lm/Tpg2OHTvmtJ0jIiJ5dQP98Gpvz+504g3drT99tgPOXb3lljGhHDHzyVbVMp3QxF7N8VT7KMSEBrr8tRzlUAB0/vx5NGvWzGy5Xq9HWZlnVOcTEZF7eUMA1DtBfsJYTyJOAMkN6OkKKpXKI2Z8t8ahJrBWrVph27ZtZsu//fZbtG/fvso7RURENV9Cw5qRFbnTmU6FQRUcygC9+eabSElJwfnz56HX67Fq1SocP34cS5cuxY8//ujsfSQiohpk3YRuWLrzLCb24vhN5LkcygA9/vjjWLFiBdauXQuVSoU33ngDR48exQ8//GAcFJGIiLxTfGQQUp9ujfAgf3fvCgF44YGKQuSHW3FMLTGVwNyYmcLCQgQHB6OgoABBQdUzzgQREbnXwq2ZeGftUcmyM+896qa9cR5BEJCZfxMxIYHw0bhv6pDqYM/126EzkZ2djXPnzhn/3r17NyZOnIgFCxY4sjkiIiK3+7/778If05Mxd2A7AEBYbccmdvU0KpUKTevXvuODH3s5VAM0ePBgPP/880hJSUFubi569eqFhIQEfPnll8jNzcUbb7zh7P0kIiJyuSB/XzzRtiHqBfqhZUO2ANzJHAoH//zzT9x7770AgG+++QatW7dGRkYGvvrqKyxZssSZ+0dERFStVCoV7m9RH2EePLghVZ1DAVBZWZlxAtSNGzfiiSeeAADExcUhJyfHeXtHRERE5AIOjwP06aefYtu2bUhPT0fv3r0BABcuXEBoqO1J44iIiIjcyaEA6P3338dnn32G7t27Y9CgQWjbti0AYM2aNcamMSIiIiJP5XA3eJ1Oh8LCQtSrV8+47MyZMwgMDESDBg2ctoPuwG7wRERENY/Lu8HfunULJSUlxuDn7NmzmDNnDo4fP17jgx8iIiK68zkUAD355JNYunQpAODatWvo2LEjPvzwQ/Tt2xfz58936g4SEREROZtDAdC+ffvQrVs3AMB3332H8PBwnD17FkuXLsVHH33k1B0kIiIicjaHAqCioiLUqVMHALBhwwY8/fTTUKvVuO+++3D27Fm7tjVv3jzExsbC398fiYmJsrPMi5WUlGDatGmIiYmBVqtF06ZNsXjxYsk6c+bMwd13342AgABER0dj0qRJKC4utu8giYiI6I7l0EjQzZo1w/fff4+nnnoKP//8MyZNmgQAyMvLs6toeMWKFZg4cSLmzZuHLl264LPPPkOfPn1w5MgRNG7cWPY5/fv3x8WLF5GWloZmzZohLy8P5eXlxseXLVuGKVOmYPHixejcuTNOnDiB4cOHAwD+/e9/O3K4REREdIdxqBfYd999h8GDB0On0+HBBx9Eeno6ACA1NRVbt27FunXrFG2nY8eO6NChg6RuKD4+Hn379kVqaqrZ+uvXr8fAgQORmZmJkJAQ2W2++OKLOHr0KDZt2mRc9o9//AO7d++2mV0yYC8wIiKimsflvcD69euHrKws7NmzBz///LNxec+ePRVnWUpLS7F3714kJydLlicnJyMjI0P2OWvWrEFSUhJmzZqFqKgotGjRAi+99BJu3bplXKdr167Yu3cvdu/eDQDIzMzE2rVr8eijlmf0LSkpQWFhoeSHiIiI7lwONYEBQEREBCIiInDu3DmoVCpERUXZNQhifn4+dDodwsPDJcvDw8ORm5sr+5zMzExs374d/v7+WL16NfLz8zF27FhcuXLFWAc0cOBAXLp0CV27doUgCCgvL8eYMWMwZcoUi/uSmpqKGTNmKN53IiIiqtkcygDp9XrMnDkTwcHBiImJQePGjVG3bl289dZb0Ov1dm1LpVJJ/hYEwWyZ+HVVKhWWLVuGe++9F4888ghmz56NJUuWGLNAmzdvxjvvvIN58+Zh3759WLVqFX788Ue89dZbFvdh6tSpKCgoMP5kZ2fbdQxERERUsziUAZo2bRrS0tLw3nvvoUuXLhAEATt27MD06dNRXFyMd955x+Y2wsLCoNFozLI9eXl5Zlkhg8jISERFRSE4ONi4LD4+HoIg4Ny5c2jevDlef/11pKSkYNSoUQCA1q1b4+bNm3j++ecxbdo0qNXmMZ9WqzVO7kpERER3PocyQP/973+xaNEijBkzBm3atEHbtm0xduxYLFy4EEuWLFG0DT8/PyQmJhoLqA3S09PRuXNn2ed06dIFFy5cwI0bN4zLTpw4AbVajUaNGgGo6KJvGuRoNBoIggAHZ/0gIiKiO4xDAdCVK1cQFxdntjwuLg5XrlxRvJ3Jkydj0aJFWLx4MY4ePYpJkyYhKysLo0ePBlDRNDV06FDj+oMHD0ZoaChGjBiBI0eOYOvWrXj55Zfx3HPPISAgAADw+OOPY/78+Vi+fDlOnz6N9PR0vP7663jiiSeg0WgcOVwiIiK6wzjUBNa2bVt8/PHHZqM+f/zxx2jTpo3i7QwYMACXL1/GzJkzkZOTg4SEBKxduxYxMTEAgJycHGRlZRnXr127NtLT0zF+/HgkJSUhNDQU/fv3x9tvv21c57XXXoNKpcJrr72G8+fPo379+nj88ccVNcsRERGRd3BoHKAtW7bg0UcfRePGjdGpUyeoVCpkZGQgOzsba9euNU6TUVNxHCAiIqKax+XjAD3wwAM4ceIEnnrqKVy7dg1XrlzB008/jcOHD+Pzzz93aKeJiIiIqotDGSBLDh48iA4dOkCn0zlrk27BDBAREVHN4/IMEBEREVFNxgCIiIiIvA4DICIiIvI6dnWDf/rpp60+fu3atarsCxEREVG1sCsAEk9BYelx8cCFRERERJ7IrgCIXdyJiIjoTsAaICIiIvI6DICIiIjI6zAAIiIiIq/DAIiIiIi8DgMgIiIi8joMgIiIiMjrMAAiIiIir8MAiIiIiLwOAyAiIiLyOgyAiIiIyOswACIiIiKvwwCIiIiIvA4DICIiIvI6DICIiIjI6zAAIiIiIq/DAIiIiIi8DgMgIiIi8joMgIiIiMjrMAAiIiIir8MAiIiIiLwOAyAiIiLyOgyAiIiIyOswACIiIiKvwwCIiIiIvA4DICIiIvI6bg+A5s2bh9jYWPj7+yMxMRHbtm2zun5JSQmmTZuGmJgYaLVaNG3aFIsXL5asc+3aNYwbNw6RkZHw9/dHfHw81q5d68rDICIiohrEx50vvmLFCkycOBHz5s1Dly5d8Nlnn6FPnz44cuQIGjduLPuc/v374+LFi0hLS0OzZs2Ql5eH8vJy4+OlpaV46KGH0KBBA3z33Xdo1KgRsrOzUadOneo6LCIiIvJwKkEQBHe9eMeOHdGhQwfMnz/fuCw+Ph59+/ZFamqq2frr16/HwIEDkZmZiZCQENltfvrpp/jggw9w7Ngx+Pr6KtqPkpISlJSUGP8uLCxEdHQ0CgoKEBQUZOdRERERkTsUFhYiODhY0fXbbU1gpaWl2Lt3L5KTkyXLk5OTkZGRIfucNWvWICkpCbNmzUJUVBRatGiBl156Cbdu3ZKs06lTJ4wbNw7h4eFISEjAu+++C51OZ3FfUlNTERwcbPyJjo52zkESERGRR3JbE1h+fj50Oh3Cw8Mly8PDw5Gbmyv7nMzMTGzfvh3+/v5YvXo18vPzMXbsWFy5csVYB5SZmYlffvkFQ4YMwdq1a3Hy5EmMGzcO5eXleOONN2S3O3XqVEyePNn4tyEDRERERHcmt9YAAYBKpZL8LQiC2TIDvV4PlUqFZcuWITg4GAAwe/Zs9OvXD5988gkCAgKg1+vRoEEDLFiwABqNBomJibhw4QI++OADiwGQVquFVqt17oERERGRx3JbE1hYWBg0Go1ZticvL88sK2QQGRmJqKgoY/ADVNQMCYKAc+fOGddp0aIFNBqNZJ3c3FyUlpa64EiIiIiopnFbAOTn54fExESkp6dLlqenp6Nz586yz+nSpQsuXLiAGzduGJedOHECarUajRo1Mq5z6tQp6PV6yTqRkZHw8/NzwZEQERFRTePWcYAmT56MRYsWYfHixTh69CgmTZqErKwsjB49GkBFbc7QoUON6w8ePBihoaEYMWIEjhw5gq1bt+Lll1/Gc889h4CAAADAmDFjcPnyZUyYMAEnTpzATz/9hHfffRfjxo1zyzESERGR53FrDdCAAQNw+fJlzJw5Ezk5OUhISMDatWsRExMDAMjJyUFWVpZx/dq1ayM9PR3jx49HUlISQkND0b9/f7z99tvGdaKjo7FhwwZMmjQJbdq0QVRUFCZMmIBXX3212o+PiIiIPJNbxwHyVPaMI0BERESeoUaMA0RERETkLgyAiIiIyOswACIiIiKvwwCIiIiIvA4DICIiIvI6DICIiIjI6zAAIiIiIq/DAIiIiIi8DgMgIiIi8joMgIiIiMjrMAAiIiIir8MAiIiIiLwOAyAiIiLyOgyAiIiIyOswACIiIiKvwwCIiIiIvA4DICIiIvI6DICIiIjI6zAAIiIiIq/DAIiIiIi8DgMgIiIi8joMgIiIiMjrMAAiIiIir8MAiIiIiLwOAyAiIiLyOgyAiIiIyOswACIiIiKvwwCIiIiIvA4DICIiIvI6DICIiIjI6zAAIiIiIq/DAIiIiIi8jtsDoHnz5iE2Nhb+/v5ITEzEtm3brK5fUlKCadOmISYmBlqtFk2bNsXixYtl112+fDlUKhX69u3rgj0nIiKimsrHnS++YsUKTJw4EfPmzUOXLl3w2WefoU+fPjhy5AgaN24s+5z+/fvj4sWLSEtLQ7NmzZCXl4fy8nKz9c6ePYuXXnoJ3bp1c/VhEBERUQ2jEgRBcNeLd+zYER06dMD8+fONy+Lj49G3b1+kpqaarb9+/XoMHDgQmZmZCAkJsbhdnU6HBx54ACNGjMC2bdtw7do1fP/994r3q7CwEMHBwSgoKEBQUJBdx0RERETuYc/1221NYKWlpdi7dy+Sk5Mly5OTk5GRkSH7nDVr1iApKQmzZs1CVFQUWrRogZdeegm3bt2SrDdz5kzUr18fI0eOVLQvJSUlKCwslPwQERHRncttTWD5+fnQ6XQIDw+XLA8PD0dubq7sczIzM7F9+3b4+/tj9erVyM/Px9ixY3HlyhVjHdCOHTuQlpaGAwcOKN6X1NRUzJgxw+FjISIioprF7UXQKpVK8rcgCGbLDPR6PVQqFZYtW4Z7770XjzzyCGbPno0lS5bg1q1buH79Op599lksXLgQYWFhivdh6tSpKCgoMP5kZ2dX6ZiIiIjIs7ktAxQWFgaNRmOW7cnLyzPLChlERkYiKioKwcHBxmXx8fEQBAHnzp3DzZs3cebMGTz++OPGx/V6PQDAx8cHx48fR9OmTc22q9VqodVqnXFYREREVAO4LQPk5+eHxMREpKenS5anp6ejc+fOss/p0qULLly4gBs3bhiXnThxAmq1Go0aNUJcXBwOHTqEAwcOGH+eeOIJ9OjRAwcOHEB0dLRLj4mIiIhqBrd2g588eTJSUlKQlJSETp06YcGCBcjKysLo0aMBVDRNnT9/HkuXLgUADB48GG+99RZGjBiBGTNmID8/Hy+//DKee+45BAQEAAASEhIkr1G3bl3Z5UREROS93BoADRgwAJcvX8bMmTORk5ODhIQErF27FjExMQCAnJwcZGVlGdevXbs20tPTMX78eCQlJSE0NBT9+/fH22+/7a5DICIiohrIreMAeSqOA0RERFTz1IhxgIiIiIjchQEQEREReR0GQEREROR1GAARERGR12EARERERF6HARARERF5HQZARERE5HUYABEREZHXYQBEREREXocBEBEREXkdBkBERETkdRgAERERkddhAERERERehwEQEREReR0GQEREROR1GAARERGR12EARERERF6HARARERF5HQZARERE5HUYABEREZHXYQBEREREXocBEBEREXkdBkBERETkdRgAERERkddhAERERERehwEQEREReR0GQEREROR1GAARERGR12EARERERF6HARARERF5HQZARERE5HUYABEREZHXYQBEREREXsftAdC8efMQGxsLf39/JCYmYtu2bVbXLykpwbRp0xATEwOtVoumTZti8eLFxscXLlyIbt26oV69eqhXrx569eqF3bt3u/owiIiIqAZxawC0YsUKTJw4EdOmTcP+/fvRrVs39OnTB1lZWRaf079/f2zatAlpaWk4fvw4vv76a8TFxRkf37x5MwYNGoRff/0VO3fuROPGjZGcnIzz589XxyERERFRDaASBEFw14t37NgRHTp0wPz5843L4uPj0bdvX6Smppqtv379egwcOBCZmZkICQlR9Bo6nQ716tXDxx9/jKFDh8quU1JSgpKSEuPfhYWFiI6ORkFBAYKCguw8KiIiInKHwsJCBAcHK7p+uy0DVFpair179yI5OVmyPDk5GRkZGbLPWbNmDZKSkjBr1ixERUWhRYsWeOmll3Dr1i2Lr1NUVISysjKrAVNqaiqCg4ONP9HR0Y4dFBEREdUIPu564fz8fOh0OoSHh0uWh4eHIzc3V/Y5mZmZ2L59O/z9/bF69Wrk5+dj7NixuHLliqQOSGzKlCmIiopCr169LO7L1KlTMXnyZOPfhgwQERER3ZncFgAZqFQqyd+CIJgtM9Dr9VCpVFi2bBmCg4MBALNnz0a/fv3wySefICAgQLL+rFmz8PXXX2Pz5s3w9/e3uA9arRZarbaKR0JEREQ1hduawMLCwqDRaMyyPXl5eWZZIYPIyEhERUUZgx+gomZIEAScO3dOsu6//vUvvPvuu9iwYQPatGnj/AMgIiKiGsttAZCfnx8SExORnp4uWZ6eno7OnTvLPqdLly64cOECbty4YVx24sQJqNVqNGrUyLjsgw8+wFtvvYX169cjKSnJNQdARERENZZbu8FPnjwZixYtwuLFi3H06FFMmjQJWVlZGD16NICK2hxxz63BgwcjNDQUI0aMwJEjR7B161a8/PLLeO6554zNX7NmzcJrr72GxYsXo0mTJsjNzUVubq4kaCIiIiLv5tYaoAEDBuDy5cuYOXMmcnJykJCQgLVr1yImJgYAkJOTIxkTqHbt2khPT8f48eORlJSE0NBQ9O/fH2+//bZxnXnz5qG0tBT9+vWTvNabb76J6dOnV8txERERkWdz6zhAnsqecQSIiIjIM9SIcYCIiIiI3IUBEBEREXkdBkBERETkdRgAERERkddhAERERERehwEQEREReR0GQEREROR1GAARERGR12EARERERF6HARARERF5HQZARERE5HUYABEREZHXYQBEREREXocBEBEREXkdBkBERETkdRgAERERkddhAERERERehwEQEREReR0GQEREROR1GAARERGR12EARERERF6HARARERF5HQZARERE5HUYABEREZHXYQBEREREXocBEBEREXkdBkBERETkdRgAERERkddhAERERERehwEQEREReR0GQEREROR1GAARERGR13F7ADRv3jzExsbC398fiYmJ2LZtm9X1S0pKMG3aNMTExECr1aJp06ZYvHixZJ2VK1eiZcuW0Gq1aNmyJVavXu3KQyAiIqIaxq0B0IoVKzBx4kRMmzYN+/fvR7du3dCnTx9kZWVZfE7//v2xadMmpKWl4fjx4/j6668RFxdnfHznzp0YMGAAUlJScPDgQaSkpKB///7YtWtXdRwSERER1QAqQRAEd714x44d0aFDB8yfP9+4LD4+Hn379kVqaqrZ+uvXr8fAgQORmZmJkJAQ2W0OGDAAhYWFWLdunXFZ7969Ua9ePXz99deK9quwsBDBwcEoKChAUFCQnUdFRERE7mDP9dunmvbJTGlpKfbu3YspU6ZIlicnJyMjI0P2OWvWrEFSUhJmzZqFL774ArVq1cITTzyBt956CwEBAQAqMkCTJk2SPO/hhx/GnDlzLO5LSUkJSkpKjH8XFBQAqDiRREREVDMYrttKcjtuC4Dy8/Oh0+kQHh4uWR4eHo7c3FzZ52RmZmL79u3w9/fH6tWrkZ+fj7Fjx+LKlSvGOqDc3Fy7tgkAqampmDFjhtny6Ohoew+LiIiI3Oz69esIDg62uo7bAiADlUol+VsQBLNlBnq9HiqVCsuWLTMe2OzZs9GvXz988sknxiyQPdsEgKlTp2Ly5MmS17ly5QpCQ0OtPs8RhYWFiI6ORnZ2NpvXXIjnuXrwPFcfnuvqwfNcPVx1ngVBwPXr19GwYUOb67otAAoLC4NGozHLzOTl5ZllcAwiIyMRFRUlieri4+MhCALOnTuH5s2bIyIiwq5tAoBWq4VWq5Usq1u3rp1HZJ+goCD+c1UDnufqwfNcfXiuqwfPc/VwxXm2lfkxcFsvMD8/PyQmJiI9PV2yPD09HZ07d5Z9TpcuXXDhwgXcuHHDuOzEiRNQq9Vo1KgRAKBTp05m29ywYYPFbRIREZH3cWs3+MmTJ2PRokVYvHgxjh49ikmTJiErKwujR48GUNE0NXToUOP6gwcPRmhoKEaMGIEjR45g69atePnll/Hcc88Zm78mTJiADRs24P3338exY8fw/vvvY+PGjZg4caI7DpGIiIg8kFtrgAYMGIDLly9j5syZyMnJQUJCAtauXYuYmBgAQE5OjmRMoNq1ayM9PR3jx49HUlISQkND0b9/f7z99tvGdTp37ozly5fjtddew+uvv46mTZtixYoV6NixY7UfnxytVos333zTrMmNnIvnuXrwPFcfnuvqwfNcPTzhPLt1HCAiIiIid3D7VBhERERE1Y0BEBEREXkdBkBERETkdRgAERERkddhAFSN5s2bh9jYWPj7+yMxMRHbtm1z9y7VKKmpqbjnnntQp04dNGjQAH379sXx48cl6wiCgOnTp6Nhw4YICAhA9+7dcfjwYck6JSUlGD9+PMLCwozzyZ07d646D6VGSU1NhUqlkgwlwfPsHOfPn8ezzz6L0NBQBAYGol27dti7d6/xcZ5n5ygvL8drr72G2NhYBAQE4K677sLMmTOh1+uN6/Bc22/r1q14/PHH0bBhQ6hUKnz//feSx511Tq9evYqUlBQEBwcjODgYKSkpuHbtWtUPQKBqsXz5csHX11dYuHChcOTIEWHChAlCrVq1hLNnz7p712qMhx9+WPj888+FP//8Uzhw4IDw6KOPCo0bNxZu3LhhXOe9994T6tSpI6xcuVI4dOiQMGDAACEyMlIoLCw0rjN69GghKipKSE9PF/bt2yf06NFDaNu2rVBeXu6Ow/Jou3fvFpo0aSK0adNGmDBhgnE5z3PVXblyRYiJiRGGDx8u7Nq1Szh9+rSwceNG4dSpU8Z1eJ6d4+233xZCQ0OFH3/8UTh9+rTw7bffCrVr1xbmzJljXIfn2n5r164Vpk2bJqxcuVIAIKxevVryuLPOae/evYWEhAQhIyNDyMjIEBISEoTHHnusyvvPAKia3HvvvcLo0aMly+Li4oQpU6a4aY9qvry8PAGAsGXLFkEQBEGv1wsRERHCe++9Z1ynuLhYCA4OFj799FNBEATh2rVrgq+vr7B8+XLjOufPnxfUarWwfv366j0AD3f9+nWhefPmQnp6uvDAAw8YAyCeZ+d49dVXha5du1p8nOfZeR599FHhueeekyx7+umnhWeffVYQBJ5rZzANgJx1To8cOSIAEH777TfjOjt37hQACMeOHavSPrMJrBqUlpZi7969SE5OlixPTk5GRkaGm/aq5isoKAAAhISEAABOnz6N3NxcyXnWarV44IEHjOd57969KCsrk6zTsGFDJCQk8L0wMW7cODz66KPo1auXZDnPs3OsWbMGSUlJeOaZZ9CgQQO0b98eCxcuND7O8+w8Xbt2xaZNm3DixAkAwMGDB7F9+3Y88sgjAHiuXcFZ53Tnzp0IDg6WDGZ83333ITg4uMrn3e2zwXuD/Px86HQ6swlZw8PDzSZuJWUEQcDkyZPRtWtXJCQkAIDxXMqd57NnzxrX8fPzQ7169czW4XtRafny5di3bx9+//13s8d4np0jMzMT8+fPx+TJk/HPf/4Tu3fvxt///ndotVoMHTqU59mJXn31VRQUFCAuLg4ajQY6nQ7vvPMOBg0aBICfaVdw1jnNzc1FgwYNzLbfoEGDKp93BkDVSKVSSf4WBMFsGSnz4osv4o8//sD27dvNHnPkPPO9qJSdnW2cU8/f39/iejzPVaPX65GUlIR3330XANC+fXscPnwY8+fPl8yByPNcdStWrMCXX36Jr776Cq1atcKBAwcwceJENGzYEMOGDTOux3PtfM44p3LrO+O8swmsGoSFhUGj0ZhFq3l5eWbRMdk2fvx4rFmzBr/++isaNWpkXB4REQEAVs9zREQESktLcfXqVYvreLu9e/ciLy8PiYmJ8PHxgY+PD7Zs2YKPPvoIPj4+xvPE81w1kZGRaNmypWRZfHy8cf5Dfp6d5+WXX8aUKVMwcOBAtG7dGikpKZg0aRJSU1MB8Fy7grPOaUREBC5evGi2/UuXLlX5vDMAqgZ+fn5ITExEenq6ZHl6ejo6d+7spr2qeQRBwIsvvohVq1bhl19+QWxsrOTx2NhYRERESM5zaWkptmzZYjzPiYmJ8PX1layTk5ODP//8k+/FbT179sShQ4dw4MAB409SUhKGDBmCAwcO4K677uJ5doIuXbqYDeNw4sQJ42TQ/Dw7T1FREdRq6eVOo9EYu8HzXDufs85pp06dUFBQgN27dxvX2bVrFwoKCqp+3qtUQk2KGbrBp6WlCUeOHBEmTpwo1KpVSzhz5oy7d63GGDNmjBAcHCxs3rxZyMnJMf4UFRUZ13nvvfeE4OBgYdWqVcKhQ4eEQYMGyXa7bNSokbBx40Zh3759woMPPujVXVmVEPcCEwSeZ2fYvXu34OPjI7zzzjvCyZMnhWXLlgmBgYHCl19+aVyH59k5hg0bJkRFRRm7wa9atUoICwsTXnnlFeM6PNf2u379urB//35h//79AgBh9uzZwv79+43DuzjrnPbu3Vto06aNsHPnTmHnzp1C69at2Q2+pvnkk0+EmJgYwc/PT+jQoYOx+zYpA0D25/PPPzeuo9frhTfffFOIiIgQtFqtcP/99wuHDh2SbOfWrVvCiy++KISEhAgBAQHCY489JmRlZVXz0dQspgEQz7Nz/PDDD0JCQoKg1WqFuLg4YcGCBZLHeZ6do7CwUJgwYYLQuHFjwd/fX7jrrruEadOmCSUlJcZ1eK7t9+uvv8p+Jw8bNkwQBOed08uXLwtDhgwR6tSpI9SpU0cYMmSIcPXq1Srvv0oQBKFqOSQiIiKimoU1QEREROR1GAARERGR12EARERERF6HARARERF5HQZARERE5HUYABEREZHXYQBEREREXocBEBEREXkdBkBERAqoVCp8//337t4NInISBkBE5PGGDx8OlUpl9tO7d2937xoR1VA+7t4BIiIlevfujc8//1yyTKvVumlviKimYwaIiGoErVaLiIgIyU+9evUAVDRPzZ8/H3369EFAQABiY2Px7bffSp5/6NAhPPjggwgICEBoaCief/553LhxQ7LO4sWL0apVK2i1WkRGRuLFF1+UPJ6fn4+nnnoKgYGBaN68OdasWePagyYil2EARER3hNdffx1/+9vfcPDgQTz77LMYNGgQjh49CgAoKipC7969Ua9ePfz+++/49ttvsXHjRkmAM3/+fIwbNw7PP/88Dh06hDVr1qBZs2aS15gxYwb69++PP/74A4888giGDBmCK1euVOtxEpGTVHk+eSIiFxs2bJig0WiEWrVqSX5mzpwpCIIgABBGjx4teU7Hjh2FMWPGCIIgCAsWLBDq1asn3Lhxw/j4Tz/9JKjVaiE3N1cQBEFo2LChMG3aNIv7AEB47bXXjH/fuHFDUKlUwrp165x2nERUfVgDREQ1Qo8ePTB//nzJspCQEOPvnTp1kjzWqVMnHDhwAABw9OhRtG3bFrVq1TI+3qVLF+j1ehw/fhwqlQoXLlxAz549re5DmzZtjL/XqlULderUQV5enqOHRERuxACIiGqEWrVqmTVJ2aJSqQAAgiAYf5dbJyAgQNH2fH19zZ6r1+vt2ici8gysASKiO8Jvv/1m9ndcXBwAoGXLljhw4ABu3rxpfHzHjh1Qq9Vo0aIF6tSpgyZNmmDTpk3Vus9E5D7MABFRjVBSUoLc3FzJMh8fH4SFhQEAvv32WyQlJaFr165YtmwZdu/ejbS0NADAkCFD8Oabb2LYsGGYPn06Ll26hPHjxyMlJQXh4eEAgOnTp2P06NFo0KAB+vTpg+vXr2PHjh0YP3589R4oEVULBkBEVCOsX78ekZGRkmV33303jh07BqCih9by5csxduxYREREYNmyZWjZsiUAIDAwED///DMmTJiAe+65B4GBgfjb3/6G2bNnG7c1bNgwFBcX49///jdeeuklhIWFoV+/ftV3gERUrVSCIAju3gkioqpQqVRYvXo1+vbt6+5dIaIagjVARERE5HUYABEREZHXYQ0QEdV4bMknInsxA0RERERehwEQEREReR0GQEREROR1GAARERGR12EARERERF6HARARERF5HQZARERE5HUYABEREZHX+X8etpJHpNPneAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAGwCAYAAABB4NqyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABvs0lEQVR4nO3deVhUZfsH8O/MAMOOLLLIJu4oLglKrpkpbi1mprng/ivTzOW1Nw1tMRPTMn0rLRe0xZRXzfItU0FTMUzLPTGXUFEBERcWUZaZ8/sDZpidGRjmqHw/18V1wZkzZ845M8y5z/3cz/NIBEEQQERERFSHSMXeASIiIiJbYwBEREREdQ4DICIiIqpzGAARERFRncMAiIiIiOocBkBERERU5zAAIiIiojrHTuwdeBAplUpkZmbCzc0NEolE7N0hIiIiMwiCgIKCAjRo0ABSqekcDwMgAzIzMxEcHCz2bhAREVE1XLlyBUFBQSbXYQBkgJubG4DyE+ju7i7y3hAREZE58vPzERwcrL6Om8IAyABVs5e7uzsDICIiooeMOeUrLIImIiKiOocBEBEREdU5DICIiIiozmEARERERHUOAyAiIiKqcxgAERERUZ3DAIiIiIjqHAZAREREVOcwACIiIqI6hwEQERER1TkMgIiIiKjOYQBEREREdQ4DICIiIqpzGAARERFRncMAiIiIiOoc0QOg5cuXIywsDI6OjoiMjERKSorRdceMGQOJRKL306pVK6317ty5g8mTJyMgIACOjo4IDw/H9u3ba/tQiIiI6CEhagCUmJiIadOmIS4uDseOHUO3bt3Qr18/ZGRkGFx/2bJlyMrKUv9cuXIFXl5eePHFF9XrlJSUoHfv3rh06RI2b96Ms2fPYtWqVQgMDLTVYREREdEDTiIIgiDWi0dHR6N9+/ZYsWKFell4eDgGDhyI+Pj4Kp//ww8/YNCgQbh48SJCQ0MBAF988QUWL16Mv//+G/b29tXar/z8fHh4eCAvLw/u7u7V2gYRERHZliXXb9EyQCUlJThy5AhiYmK0lsfExCA1NdWsbaxZswa9evVSBz8AsG3bNnTq1AmTJ0+Gn58fIiIisGDBAigUCqPbKS4uRn5+vtYPERERPbpEC4Byc3OhUCjg5+entdzPzw/Z2dlVPj8rKwu//PILJkyYoLU8PT0dmzdvhkKhwPbt2zFnzhx8/PHH+OCDD4xuKz4+Hh4eHuqf4ODg6h0UERHRAyz/fqnYu/DAEL0IWiKRaP0tCILeMkPWrVuHevXqYeDAgVrLlUolfH19sXLlSkRGRuKll15CXFycVjObrtmzZyMvL0/9c+XKlWodCxER0YPq093n0ebdXdh1uuokQ11gJ9YL+/j4QCaT6WV7cnJy9LJCugRBQEJCAmJjY+Hg4KD1WEBAAOzt7SGTydTLwsPDkZ2djZKSEr31AUAul0Mul9fgaIiIiB5sHyedAwDE/fAXYlr5i7w34hMtA+Tg4IDIyEgkJSVpLU9KSkLnzp1NPnffvn24cOECxo8fr/dYly5dcOHCBSiVSvWyc+fOISAgwGDwQ0REVJe4O4qW+3igiNoENmPGDKxevRoJCQk4c+YMpk+fjoyMDEycOBFAedPUqFGj9J63Zs0aREdHIyIiQu+xV199FTdv3sTUqVNx7tw5/Pzzz1iwYAEmT55c68dDREQPt8s372J1SjrulRjvOPOwc3eqXg/pR42oYeDQoUNx8+ZNzJs3D1lZWYiIiMD27dvVvbqysrL0xgTKy8vDli1bsGzZMoPbDA4Oxq5duzB9+nS0adMGgYGBmDp1Kt58881aPx4iQ8ytayMi8fVesh8lCiVuF5XgjT4txN6dWuHBAAiAyOMAPag4DtCD62ZhMTydHSCVPhwBxd3iMvRZuh+PN/LGRy+2FXt3iGziQk4hNh7OwMQejeHj+nDVVzac9bP69y2vdkJkqJeIe2M9CqWAxm+Vz4jwbNsG+M+wx0Teo9rxUIwDRGSpYxm3ETk/Ga98e0TsXTHbzyezcPX2PWw+clXsXSGymcFfpGL1gYuY8d8TWstvFhajuKzmTUt5RaV48qO9+HDH3zXelqZ9525o/f3CioNW3b6YCjS6v7vIWQMEMACih0jCb5cAAElp18XdEQsIeDgTrGUKJRTKh2/f75cqkHohFyVllZ0gliWfx+e/XhBxr+qeO0XlF9vUC7nqZVdvFyFyfjIG/OdAjbf/3eEMXMy9ixV7/6nxtjRNeohurixVolBWvVIdwwCIHhr2IjR7FZcp8NPJTNy+WwIA+PH4NcxIPG6Vu1hz3StRYNuJTOTds80AZgqlgL7LUjDgPylQViMIUigFrNj7Dw5fvFULe2faW1tPYfjqQ5j/cxqA8ozDJ8nnsHjnWdwtLqv2du8WlyE77761drPOKNP4/Mz/6QyA8uYxY3ILi83abm3dWNw1UPj8+a8XcD3f+Hu//9wN/PfP2hs7TqEUMOTLg5i56UTVK5tQqqg8Z2UMhgAwAKKHiJ3M9gHQkl3n8Np3xzByzSEAwNSNx/H9sWvYeNi8LzxrVNjN+eEvvL7hGGZtOWlyvVKF0iqBWU7BfVzIKcTf2QUouG9Z0HDlVhHavLsTH+74G0O+tH7zwa7T2Ri28ndk3rln8PHvj14DAHx98DIAoEjjglZagy/9JxbvxePxu3HNyOvaQk7BfVRVsplTcB/z/pdmMsioLYIg4MSVOwZHGs4puI8dVQy+91XqJUTNT8b6Q5erfC17afUuXUqlgJX7/8GRy4aD8xb+bnrLFu88i/Ff/WF0m6MSDuPfm09i5+lsfH3wEu6XWvfm6MTVOzh88RY2H7mKpz9NwY0C84JEXaUaWdGyhzC7WxsYANFDw05m+4/r1mPlF9TTmfn461qeenlmnu0uhFuOltcP/fKX8QuIIAh4YtGviF6wG6UKJY5cvo07RSXVej0JKgNNSwOqEasPGbyLrq77pQpcyCnAsYzb6LVkH17+5ggOpt/EO9tOm/V8zbT//dLqB0CqzMRv5yubdIpKyqrMWFzMvYvf029W+3VVfv07Bx0/2I03TQTB//3zCjp+sBsJv13E05+m1Pg1jSkuU2DM2sNoOOtnPLH4VxzNuI1/bhTik+TzeO7z3/DW96f0nmPqop16IRcf/Jymfk/jtv6FQzrnLCvvHros3IPP9pwHANhr3AxZ0o/nfyczsWD733hhxUFk3CzCmax8vLvtNG5WvI/ujoZ7R/11LR9KpYDktOtG3/NXvjmCt388jfC3d2hlWP65UWhx9nHTn1fwx6XyIE3ztu+va/kY/EUqOnyQjJ0WjuaseQNg6mYgJ/8+VqekI6+o+hnn7Lz7BrNM/zuRiS0PUD0kK6HIqorLFLhZWIIG9Zysvm1zm8Cy8u6hzyf7kX+/DAljotCzhemRxU3RvFF6+tPK2oUyRfWahmQ1aMZzdpAZfay4TInMiiaa9b9fxrv/S0OwlxNS/t3T4tfRrJ/RzKDcL1Vg6Mrf0SHUE3OebmnwuRm3irT+tnQIgCu3irD5yFW08HdDv9YBGPrlQZy4mqe3Xo6JJglNmmO5WBLM3SkqgYeTvd6+l2oMsDrky4P461o+Dsc9BV83R4PbefKjvQCA5BlPoImvK7Ly7mHWllMY3Tm0ys/lD8eu4V6pAjcLi/HRrvIRfP/751UsGmy4N+G/N1cGR/dLldh1OrtWRvv98Vgm9p4tLxa+fLMIg5ZrT17908ksvefI7YzfvAxffUhv2esbj+HQW73Uf6/afxHX7tzDR7vO4bWeTbVuhorLlHC01/7fuH23BJ4u5QPfKpQCDqXfROsgD/ydXaBep/viX9W/3ygoxucj2qPQRKCy/nAG5v7wF5r6uiJpxhMAYLCJWBCArw5exviuYTh+5Q4Gfv4bmvi6IrniOboKi8vgIJPCoeIc/XHpFt6oeC8vLRwAO51s1+Wb5f9jr3xzBDEt/fBkC18M6xiitU7evVKsTknH8OgQBHiUfxdr3gyY+v4a/9WfOHUtDyev5lnUU+z23RJIJMD5nEK8+MVB9Gnlhy9jo9SPlyqUmLLhGACgR/P68H4AegcyA0RWNXzVIXReuAenM/UvWjWl+aVnqjbl7R9PI7+i6Wbcuj8teo3DF2+h64d7kFxRaG3s7rJUoUT+/VLsP3fD7PZ0U3dd90sVmJ54HNtOZBpdJ9BEUKm5bdUF6Mqt6mWpNAOFuyWVF4Rdaddx4sodrD5w0eDzDJ2HuyUK5OTfx8e7zqLH4l9x9XaRgWdW6rboVyzbfR6vrj+KtMx8g8GPJYq0AiAlLt+8qxXg5eTfx/h1f+DXv3MAlNen/Hj8GtrNS8L7P51BSZkSl3LvqtdXKAVcyr2L7w5l4K9r+QCA7QYu+ID2Z+dCTvmF97M9F7Dv3I0qP5f3ShSYlngcs78/pQ5+VG4UFGPHX9lVZj5e/uZItZtLjBEEAceu3KlyPQed/1XdJpeq9v16fjF6LdmnzmLqJn/tNG4kinQyjr+cysJj7ydh1f50AMDGPzIwfPUhxK45DKWR103LKn8vNT/vuv77R3mz93mN5kVjhcXHMm4DKM94AOWfK0N1RHlFpWj/fpJWc7Fmpvm/f1yBwsS52pV2HbN1Mm4F90vR9r1d+HTPBbz4ReV2NWuAdpzOxhoD/8dKpYBTFa+/QyPjvP1UFrp+uAcnNN57ze/gUoUSUR8kI3J+Mr7cV16YvvO0dmcVzf87S5vWawsDoEfMtTv38NLKg9Wa7M4aQ0IduVz+j/9DRdORNWl+6aVcyMWEr/7E5Zt39dYzlR1ISruOvkv340zFF56uSeuP4Orte5jwdfkFytiXT2FxGWJXH8KohMNI+M1wQKDLVC+Mbw5extZj1/B6xR2SiuaXjJuJ4es1v9xqUusClAcKKpoZFM0A5/UNx7Dwl7+17phV8wxpinhnJzou2I1P91zApZtF+GKfdq+dL/f9g3Hr/kBxmQJXdLJHht5bSxVpXNCS0q7jicV71e8tUF6/sfvvHIxd9wdS/8lFryX7MHXjcQBAwm8XMfm7o+hRkcUBys9zj4/24q2tlRed9FzD+xn3w1/q3+V25RkKzQuwqSDeVKD43GcHMPHbI1huRg+oX//OQeadezh/vUBrecH9UnyVegkF90tRqlDih2PXzCryXpWSjg2HM6pcz1lemZHp8EEyrt3WDsb/ZUZB74WcQnXPT1d5ZdNUUtp1zNK46KveY4VSQE7+fXWW4YPt5UXXqiDk+JU7+HJfusHXUn233C02niW8baBJudhIs6qqyUvz6+O5z34DUB7A7j2bg4ybRei4IBklZUocv3IH3/x+GTP+e1yrs8O/t5zUChzMkXDgkvr3q7fv4fnlv2FZ8nm974X3f0rT+vuva3lo+94u9d+O9pXhwaT1R3H19j1MWn8Ud4pKsP1UFiLe3anukXs9/z4USgEKpYAbhZXn6dTVPNwrUUChFLSOw1ggamtsAnvExG09hd/Tb+H39Fu4tHCAWc8RBAFHLt/GpPVHMa1XMwyPDqn6SQYcrbjrAQAne+PNNbqvbU4TiSAIWsWFoxMOAyj/R0oY00FrXVODJP5fxcVv8ndHsedfPfQeL9VJDRu7SN0sLFFnJzYfuYqXuzfWW+f23RKtu6xSA19kV28XYeX+dL0LhIrmHanqImqI5peboaxJ5p17GLP2MEZ1aoiRj5ePtP75rxew/VQWNrz8uFbtQ7GRJjDN5jtVpiotKx9fj+uI23dLzOqSLNV4r/efu4H4X8rHcdl9Jgep/+RqrWuq5ut+qRKZd+7hUu5dtA/11GsCUdEM4BbvPKt+XRXNJpHhq/SbYnSHXDCU5dLNQKh8d6gyUFCduyBPZ/Wy6PjdSPn3kwb3XbcpUZOqqXPxzrO4nn8fL3UIQcsGhgd8u3OvBJ0X7gEAraa6N7ecxPZT2fg9/SYeb+SNd7adhq+bHIfjekGhFLAu9RK6NvFBc3835BYWw8XBDk4OMizYbt64Ox5O9uqu8DfvlmBVinbg8f3Ra1gypF2V2/nz0i0cy7gNJ4fKz8L/fa2dPfsk6Tw+HtIWcVtPYeMf+p0TGnhU3Ryv+qyZqtW5Y6AmxlizqqE6uOyKG7NnPzuALAPB5tyKgLmht7PWcksDIN0OAscy7uBYxh18kmz6eXFbT6FA4/jz75fhRkEx6rtVNlVdu3MPHT5IVn9P/t/Xf+LSwgHI1Qh6bmjcgD7zWWXZwPvPtVL/vuFwBk5czcMXIyPh5SLeHJ3MAD1icvKrTnnvOp2tdbc998e/MPiLg8gpKNa6s9V18J+bWJZ8HsVlCpy8ekdvnBjNWoD/7LmA+6UKCIKAr1IvIfWfXL0Mk1Ip4IUVqWj9zk71/kz+7ihe/CIV/ZeloN+yFCxLPg+lUsCy3efx1UH93iG6hb7/O5GJv7MK9JbpP89wgZ/uEPHGblTM6a77SfI5rXT5G5tPYtDy39SBXKlCiRGrD+Hrg5exu6IJBgA+3lV+od57NgcnrlQGM5qlAH9dy9PqZm7sTlRl3v/ScO56IeZoZCUW7zyL05n5aPPuLq2Mg+aXelFJGcoUShw4n4tbd/XvgPefu4EbBcXqmoWq/HHpNr4+eAmrU9IxqiKIBcoDWVWTkuYxGnP2egE6L9yD4asP4Z0fKwuiNWPfopIyfJKsn5WqCUPZNVXTxtGM2+rhEnSpsiua/wM3CorxxOJfDWZedbNhxnx98DL6mxiu4LcLlcXEHT/Yrc7Mbj9VniH+5a9sdXCSU9FctjolHe//lIaBn/+Gm4XFiJqfjJ4f77UoQ6yqU1Gp7phSqf/cxPPLU3HTyHkFKjsJGAp+ACCgnuH6LE0OMgkUSgH3TPTgMlQfVGwkOFFngHS669+6W2Iw+NF0Wee9Nyejq/ne+LiZH1BoPs9Q0LY6JV1dIF65P/rvpWZTa7aRDPxcjf/TVSkXcfjiLSyz8v+npZgBesRUlVrc8VcWJn57FD6ucvw5p7zI8NvftVPaGw9n4KWOITh+5Q4u5hbi+ceCAADDVv0OoDwNXlhchik9m+BfMc2Nvtavf+fAw9le3btjQtcwreLZnIJiHM24A6C89qN/a3/1F7PKmax8NPF1xdLk8wZfI8Sr8m7p8MVb6vS3pn9vPoln2jbQWiYIAm4UFMPbRXtaDc1mpv/+cUXrjkjTTY07Hgm0M04JBy5i24lM/KPTFXlPRZCT+McVjOoUiv/7+k+9CwUAfLrnAga0CcCYtdpdb0sVAgRBwNs/nsY3v1+GvUyC6b2bYdvxTK1MhiG6GYVTOlmirh/+isNvPYWsvPt6GaDle//BEgPNWyodPqji1lLDmax8vP2jfg+uUoVSr+ly2W7D77muxD+v4MPBbQCU38mr7pgX7TiLc9eNdwf/r5ELpimGLhIp53MRu+YQUs7n4qkWvlijk5EEypsyDqbfRLCX9t399fxizP7+FIZHh+DXv2+gR/P6+Ds7H+/+L01vG6YY+5zqjmw8LfE4nm4ToLXsqkb2cUnSOfyn4rzfK1Wo36usvPsmC4Sr8sel23rLLCmQX1fRFGaMsSyJUinA3ozeoyeu5qHQgrqUS7l3sXjXWdQzMqeWKgDSLTZOv1H18AS6X+Eb/6i6yVGzENyS47hbosCNgmI09HY2mM0qUSgRt/UvA8/UphkAWRLr2mpsM2MYAD1iqgqAVFMy5BYWo88n+9G1qY/eOrO+P4Wn2zbAwM/L26wD6zmjY1jlfDiqL8JP91wwGQA52Em1Cv92pV3HnKdbIvPOPbz4xUE82aK+1vq6wY+KqeaAH45n4p8bd/HthGijdT1OBnpP3S4qRYcPkjGuSxjefqYyKNNsCvq3iS7HN+9W/sPr3uXN+8n0xeudbafx/dGrJgt8MwwERocv3kLU/GT13XCpQsCiHWdNvtb2U1no0thHK3uTfqNQKzWtMu6rP/DXtXyEB1Q2pxSVKNRj6tSmT3dfUDftVIcglNcfOGgEQOtSL5l8jqn315jdZwyPQp5S0T1eM5Ona+uxaxjWMVhv+cY/rqizF9XNWA1a/pvZ62beMX6e/6MTdP58qrLA29T/YXW0mLsDI6JDzVrXWKZFRTVOl668e6Vm99hcc8BwfZAhmnVhhly6WYT0G4Va008AQJKRz48pusXEhhSVKCC3k0Iikag7gJjji73/4LNfLyCuf7jBrLiXs4NWU64hPxy7ZvYAlrrErgRiAPSIqSrVfE3jy+/s9QKcvW44c6CZgj959Y5WAKSpVKHEj8czkZ13D84OMq16CLmdDKVllRdeVV3QR7vO4tqde3qZJ2Oq6jl06loevjl4CT8bCaDuFpdhwH9S8FIH/YtPwm8X1QHQ/VIF7pvZVdpQGrikTInhFVmyqlS3d5OppgBDJq0/iuZ+blpp6dR/DI9Lo2qC0gwkywtMLf+aigz1VBfEm8NYIbG5jl25o9cduzaYyiipmMpsHKvIeFrbPzfMP3+qHk+ucjuLsjrpFryGOYrLlGZ3IKiKsVHHz2Tlaw1dYMqWo9btuNHz4316y4wVYddUwf1SjF17GN6uclgy0sZnFVPEqArGdd0vU8DbxcHkzcm0xOP4v25hFu3vg4IB0CPkyOVbuGKkmFbFzsz/jn7LKgdSM3XRffXbo0g2clczeu1hrSYqVS8KS0dKXV/FHQhQ3vXbWDNQcZkSpzPztdqgDRm68netbp6GDHosEMlnrmvdZWXeuY+Bn/+G9iGe+NOCi74pl6zQA0pFN8i1ZC41zRokS/h7VF13YU3vmjkwooq155DSlH+vDO5Ohr9aVZ/RFv5uuHb7ntGmq6pYGrxomvzdUQBAVENP9Xg+5th/zvx1HxTDVx/SGz4i2MvJ4BARYo7yXVN/Xrqtd1NlJ5XUeMTneyVK+Hs4VpmdLTTQe27vzB5VZsrExiLoR8TRjNt4YcVBrbbwi7l38d8/r2gVk1ZnDh1Tw+obC36A8mzURY07+ztFpeqmCmurqgbGlBsFxbh9t6TK4AcorxFq4a/d46awuAzHr9yx2t0sAFzMtW5zgybduhBTfj6VpdXDw1wB7toBkIuJQRw19a3mwH0nLcyoWXsWcU1v/XAKlww0YWp6uk0ARjxuXvOPIb5u1R9ETvX/1yvcD8399Kd+MGbTAzSCryV0AxvdgQVtYUDrgKpXqgFDwwo0qu9S4+3uPJ1tVqCdd0/7O+KJZvXNugkSuzc8A6CHTMH9Umz684rWMOV/XcszmP5/8qO9+Pfmk3j60wM4ceUOBi3/Ta+njTl0i3mrq0ShRFHFmBAPkt6f7EPfZfvNWtfRXoaIQI9qv5ZEArzRx3jdlIo5Y608yHS//Do11q81M6SqkWen9Wpq9j7odie2lZ9PZqlHgDbGwU5qcmTvqvi612wU3Ub1XTDy8VB893/R1d6GZrfmh8Wnwx4zOwtuTWO6NDT62FMtfM3ahr+7ZVnVGb2r/p6pyrU798xq9j18UTvz7WQvMzn694Piwd9DUlu5/x+0fncX3th8Eq9tOKpePi3xeJXPfWPzCXWPK0tdvlVk9jQCTXxdERnqafTx20UlZqdlPZ0N97CwtjtFpbhuxvABQHkA1Cao+gGQIACTn2xS7ec/CN42Mg2Gpl7hflpfgE4OMrQLrlfl8xzspGji62r08Q4NDdei6a/niV+mdkdbM17TVjRHR7aXSWs0ua+xqTcAoHF9F3w23HQg2bN5+UXX21WOOQPC0czP+Dk3plfLyqk8aus8b3m1k8nH3+jTHKFmBrpv9GmOZ9o2sGg6mjkDws1e15RWRsZpAoDXejZB+oL+WD6iPd7q38LoepZ0bx/QOgB9Wvlh/sAILB3aDj9N6WrR/lpKtwja0V5q0RQ4YmEA9BDRHIRM1evktwu5Zs38rNvmbcndcfkIq+YFCF7ODibvVDYfuWq0YFFXzxZ+OPDmk0h8+XFsnmj6i9BWapoBepB0aeKtt6xfRNVNUFWltuP6h6Ohjwv+mFM5l5NMAnz4Qhuz9uueiclUQ7yMf241U/7/7tsCTg4ybJ7YCb/N6olQb2d0M9Dj0ZZejApS/+5gJ9VL/1uSmahvoAmsUyNvHJ3bG8kznqiy59OUpyozaRO6NcLOad3RtYn2+amqeczbpXIfmpkIWi2hOw5XZKgX/lnQH483Mhz4Osikes8xRhWAmtMtHgDCA9wxoVsjvNqjcpBT3dcK8jRvzkNnBzvE9a8Mptpq3ES5yO0glUrQv3UA/q9bI4zqFIrXe+rfJBXcL0NEoPFASpOyohB/5OOhGPhYoMnvLM0bk4lPNEaHhp4Y3Um/ebZjmBee1RlOxBhjA5PqErstgAHQQ86c7A8AvQG+Fg1uiz6tzJ8k1Ng4PLo8Xewhtzf+sVqafN7oyLm6HO2lCPJ0RnQjb0SZeeevyZyLuSWkkvKgoZFPzdvWHwSG2t8DPJww8Qn9Ua01VVV/8lhIPQCAs8aXYG5hCZr7u+GPuF5GnlXJ1OfH0JAGKp0aVQZ0qmDCXiZFYD0n7J3ZA1+N7aj3nDf7Gr/jDvV2xsQnGuN/r3VFU19Xrbv4ab2aYsP/Pa61fkAVgaGrvLIwWvci/PW4jlrDMZji4yo3ONL6U+G+8HJxgEQiMXph7tjQC6mzeupdyCUSCb4Z31Ere2soyALKg4ABbQLUk3cCQJ9W/ojrH66XMTGVzTOkQ0MvuOtM+SKTSrDh/x43+H8nk0rMvtiqMm7mZICOzOmFba91AQDU15i009lBhncq3qf3B0ZgbJeqez+pJirVPF+qkbtlUgn8NLJ5EokE856LwIyY5njtySYI0zjmgvtl+P7VLjgypxeSZzyBcV3C0MLfcJBaVZmB5ufDW2Mk5ln9WmDTxM7oYKDX70eD22JcV/N6e5k71UWxhR1irI0B0EPC2ISbDmbezehq4uuqNVOvpleeaKS3TDXaqqYBbfQL+zydHUxO2WAJ3S+2KT2bwMfV/DRwTQZua19xEdc0PDoEbYLqQSqV6DUxGMourDUwIJ7KmM4NAQD1nO0xz4Jaikb1XbBkSNsqu7qa8yVv6DuqQT1HzIxphu8ndUavcMO1CX4GMnxuGhd3VerbTiZV70ezimyCZu8oY01iS4e2M7i8ia+ryboCzdmldQtdJRKJ3hQpgx4L1Oq+G/t4qNaAmR++0Aaz+rVA6yAPJM14AqMr3jOgvIkvRCeLGmYiMH69ZxO4aJwj3ePo3qy+1hhUxnz4Qmvse6OHwYk16zlX/m8Ya4b+78ROaGBkUl2JRAJ7jWY5zf81VSAjlQAn3onB58PbA4A6WHkspB7+r3sjPKlTz2JpUbvcXopB7YP0lkskEq0AQuV8TqHBYNBQFloVdOpm2ga2085qzBkQDm9XuXp9H41A0MlBhrFdwvBHXC/EPh6KMZ0b4qtxHbF2bAd893/R6BXuh1e6V35/Do0KRvyg1lqvDwB9IwKwaWIn7JreHR5Gmvpn9mmOX2f2UP9dcL8UDnZSeLvK0cTXFW8/09JoZkdu4JyojvPJ5vWx51891K0A7z3XCn1a+WllqAyd00BPJ7jKzftuv1fFyPSV6zEAIjMYu5gb+lIwh+oDHj+oNTx1LsLmppQdDQQ6ni4OVit+0/0n/FdMc/wR1wtJ07trfckY071p/SrXMcZQENfQu/ICp1vAOjiy8ks7yNMJh+OewpMtfJEwRjvIXFQxYnHcgHD8u29zfDMu2qIg9vPh7TGofVCVo60ef7s3JvUwnckxJMzHBXYyKdqHeMLJwXBXbncDnw8/rexH5c6lzuqJKT2bYGJFUC23kyF5Rnckz+iOTRM7IXVWT71ttQmqh/1vPKm3/OtxHU0G15oXbHPqa9yd7LXmG+vSxFtrvCvNjA0AuGicD0d7qdbnvFF9F7xg4MINlL8XM2KaawVA9jKpXkbDWFH0oMcC1b97ONnDRW6H59sH6q3n5VL5vkgkEvQK187wmpO51Dy/PhoB5bxnWyGufzh2Tuuutf7B2U/hj7he6uDTTfecyfU/Q6ZuYnzd5GgbbPiiPr13MwDAi5FBaFkxWGeP5vW1Ju1UebZdA71Mpiq40/1svNQxRCtgnNBN+7tFc3+bVgSCquyYTCrBE83q48nmvujc2AerR0fhFY3X1cy0aH5Xy+2k6NDQC43rV50hU73/uvsFGO5QUd9NjhkV50pT/KA2WD6iPT4d3h4OdlJsn9oNh996CkGezvgyNgr/p/GdqnuzML1XM8ikEoPvp4pmYGnuUCeWDolibRwH6CFRYGR0T/tqFlKqvjSGdQzBSx2CcTqzsndYdbNKQHndwP7zld2sf53ZA8ev3MYHP5+xuDu1oS82iUSCpn5uGPl4KL7cb3xQscB6TmYXRxpiKLDUvPDr1qLEtKy8023VwF1dpNqzhR/+WdAft4tKtC4o9jIpJvUob+e/cMP8LvwuRoISXW6O9vh33xbYfipL3SU78eXHMXRl5UCNhoZEaKZR92Hos/XdhGiDAW6Xxt7qWjTNzJKfu6PeaOFNfCtfw1g2IqCeI1r4u2kNb9CgnpPJ+ag0a1LM+b/QryeS4LrGeCfNdGpgNGc4l9vJYK+RZfpuwuMG50BydpCpMzOad8/2Mil6hftiVr8W6kxYzxa+6BXuC38PR/UgoZ8Nfwz598rwfcUcXqosQufGPtg1vTvSb9zFxG+PANDOAOmeg9efaoqhBgYC1aUZ9LXQGBE80NMJnZvoZzld5HZaF0XdC6Sro/7ntbm/G3IvGB6M08vZAc+1DURhsUIvC9unlT8Ozu4JPzdHFJaU4e+sgvJid43RqlWkEgne6NMcX+yrHO9JlRXUbX50ldshflBrxK45hNcMdFDQ/JxMNxBY6NIMZAM1AiDN98PcOiQAWDCoNYZ0CDaY1fNzd4SPq1yrCPnwW08ZLEB2cpChv0Z3fGcHOzgb+T5pFeAOuZ1UPQK3qlnaUAAUHuCOz4Y/hjtFpXhhRXlvZFVg82JkkMnhE0K9xS0nYAboIZF/X3+YclVKtDo0/0EkEonWXZGdVIJFL7RBt6Y+JrNBhi6gT7cJ0CqYDvNxwfOPBZmcrFO3+FLFVNt+sJczdk7rbrSOKcDDEfZGzo2h5i1dhs6r5sWhia8b4ge1RkSgOxLGRGnVptzUCfRkUolW8KPLkiZDLwN3z4sHGy8u1mwKC9MZF8RQLKE5aJy9gfFSWgd5aAXIb/VvgZkxzfBG3xZoE+QBbxcHqxSJ28uk+GVqN5yb3w9v9m2hrsfQ/WLXLOb31swAGRnrZbxGDYNuANQywF2dAarvJtf7DGjWNDnay+DmaIdGPi4I8XJGfTe5weaBNaMrm0E1Lx4OFdMWTHyiMR6vqF2yk0mxenQH/Euj+7JMp1lK88LZzM9NqyDdSycA0sxuzejdTG9AQEM0P+Oa/yemep1p0s1i6Z6TyFBPrUwqACRNr8wq1XO2h1QqQezjoWjVQP9zFODhBKlUAndHe3QM84JEIjF4sZdIyj/7muNPqb7jdJur7WVSNPNzw6G3eiG2U0O9bQV5OmPd2A74ZWo3vTHADNG8QdAMouU6GSBzOdrL8Hgjb6NBU7CX9vtqjd5Xvu6OWp0YVP/zmjdg/SL8EVjPCR+92AaN62v3/lUFQAtNdHxIX9AfH73Ytsb7WhPMAD0kDE1w1/rdXQbbaqtDM31pbyfFkA7BGNIhGC3m/mLW859uE4CJTzSGnUxqsD5h8YttMfHbI5jVrwXuFJWq78ym9GyC4jIlDlzI1XuO7h2trub+bngsxNPgXDl2MoleJqt1oAcWPN8aP53KrHJIgOgwL70Rk3XvfoZ1DFEXOGoyNGu6Kaa+DJe91A4KpQAfVzkkEv1mGQDo1Fi/N5eK5pQdpmpMvpsQDY+Ki4+KvZ32F2nqrJ5w09lGmI8reld0h946qUv5fFwWBuWjO4Xiq4OX8WKkdhNSed2HRKsXjqZhHUPQuL4L5v9cPoy/OU1gcwaEY82B8gErVV23U/79JG7dLUGItzOCvZzw3YRoNDNQXKoZUMvtpZBKJdg1vTsEwGDzQHSYl9Z7o3vhN0Y3cNc8n7rHpfn/76nz/xIV6on/ncg06zVVNDM2jeq74s2+LeBgJzVZfK5JIikPXr75vXz+OFd55eelnrM9Phv+GBztZMi4VaTuyepoL8OQqCCk/nMTzz2m37RXlSsG5ihTnSU3R3v1BLaqAGJslzC4yu3x1tZTAAAXM+paejQ3b6weoPwczB8Ygdt3S7RuBjQDmOreuBryyZB2tTLisrujPZr5ueLc9UL1/7jmDdWzbRtgxchIrec807YB/nciU938qLm+k70MDnZSjOncEMOjQ/Sa2cTAAOgBdfnmXew8nY3953IxtVdTo01gmkVkA1oHaE1eGOTphKy8+1o9Ap5/LNBgV0bNO2bNf9Ty5UayNxoZhM8qiiIB4OXujfDG5pN4TqO4sG+EP46/3Rv1nB0gCII6AJJJtedSX/RCG/UkleaMdjuqUyiKissQ08of2Xn3MeHrP9XHoPnP9/PrXdV3lKE+zjiWcQcDWgcgItAdpQoB53MKMfeHylmP+0b4o32oJ4I8ndDxg90ADAcfhriZWUOlIjVxx9apsXeVd9/GeusA2m3suoGWZgLIUPOG7h2nseYqFZlUYtEYKypxA1qiT4Q/2ocYHz/KGM1j0gyYjWWAJBIJfp3ZA8VlCvV5C/ZyVs/SLpFIDJ4LoDwbo6Kqf9PMsmh+PiIC3fGpzqCOEYEe6NnCF4fSb5ocF0Y3cNcc3V23CdRPY0BEN53mphHRIVAoBXQ2MNyBMbqfcWPBpynvD4xA75bl40Bpjvk1Z0BLBHiUf4YWD26Lx+PL/6/kdlIsGtzWotnhNb3yRGOc3nAMC19ojakbjwMozwAB5ecku6J1X/V5tpdJMTw6BBJJeRY9yNP6A2aONDDKt1YAVIMyA10Na7FX6rbXuiL/Xil8DRSVG8rQLR3aDm/2bW7wnEY38sLaMR0eqPGBGAA9oJ5YvFf9u6HsiKF5Xpa+1A7vPNtSfcEO83HB6z2bas16PaVnEzQyUHineeHS/Of8fER7vLHpBHIKzJ/td3BkENoG19O741VdoDT/Aeyk2ins9qH11L+bM9qts4MdZlTUmOjebSk1zo9mDx13R3v89xXtcYU6NPSCIAhwdrCDj6sDgjyd9f6Jqxosbv2EaCxNPof5A1tXud+adN9HP3e5emDGek5V93oz1YSmGSBb+sVjTp2CqQu5uRzspOhs5kjRmiQS7WPXbDJVmKgVMtVbyxTN3kCGaow0g5P3no0weNFYGRsJhSCYfM80ty1Ae3wj3ebFes4O+HFyF3VGSpOdTGp2t2WVJ5rXx7Ld5g15YUr3ZuUdEI5crhzzS/O4NONTVRBZ3Qtj75Z+OP1eH0ilksoAqOK2SvMzoZs9M5S9rU2aWR9rZoCA8mbwNzafxMdWblJytJfplSIceusp3Cws0esFCZRfR0wFlA9S8AMwAHpofT6iPV755oj678+GPwZ7mVTrw+ruZI/BkUGY/3OaevJOYzU9WlkfjS+KJ5rVx+G4Xgifu0Ovy2JzI2NQSCQSvQJSY2RSKSQSze1WvrafmXUHhthJJVo9pQz1WNPeDwlGGWj/B8p78ZQolHrNP7q6NPFBFyPZA1N0x+zo3dIPp67lw99dvw7FmC2vdsILKw7qLdftZfHt+Gi8ueUk4ge1xtXb93D44i2jgZ2p76rDcU8h/15plVmh2tatWfn5buDhCEfNbJCFWThzBNZzwscvtoW7k73BL3KpVIL2IfWQlXffaGBoJ5NW+aWruW1BANqHeOLL2Ei0C65nMMNmzVGY24d4IvHlx9UZsZrSDPQ0s3KazXW6mavq0A3+VN9N/9e9EV7fcAyA4Zo2W6puEbQ5XowKxtNtGpjdVFkTfu6OBofCMOaV7o3w5f50zIzR77EmNgZAD5j1hy6bnAdqQOsAdAzzQp9W/ohp6YddFXUqqvoOzX9yD6fymo4ZvZvh3f+lQSLRHitFk+YXq6Hmgwb1HPHPDe0Zysd0aYi7xWV4woL2cV12Ugn6Rfhjxd5/EFjPCV4ag3LVq8FUGB0aemllAWrS3lxVLVJN6TZNOdrJ8OPkLhZtIzLU8ECRpTojAndt6oPfKrqeK5UCGno7o1U1ipZ93RzNLoytTQEeTjj01lNwc7SDnUyK3f8qHwXZVHfdmngh0nBXd5XNEzujrBp1UKZIJBL0qeYksdUR3cj8JrOqaPbktNMJAI7N7Q2JxLrBwA+Tu+BYxm08XTFGWY/mlUNhlCjE7XKtGdhaOwMEmB4kVEyz+4djeu9mZg9YaUsMgB4gV24VIW7rXybXGdOloXo+pNefaqoOgFQfLs0vGdWd1UsdQ6AUoP5SMMROKjH4u0qYj6teACS3k6mbn6rLwU6KNkH1kDzjCfh7OMJVboctr3aGi1xWrXRp0vTuOHAhFyMfD0XePf2ecw+ibk190CvcF8lncgAYHmdH18rYSEzdeByLXzRveglDpFLj9S5AZTMCAGx/vVu1X6e2tAuqB0B7YEZzxlWpTVKpBA5WLO5s7PtwjzqunQHSPi+eLta/sWgXXE9rgE3Nwv97JeYNzldbNFtlH4aJQq3pQQx+AAZAD5QbhVXX2WhOEKr5ZV9UUt7Epfklo/rnd7SXVVkLoBk4GYo7xnZpiOQz+r2tqmtC1zDsPXdDfUetOWS+qclUq9LUzw1NK5rffFzlSJ3Vs9ayAdai6v684XAGtp/K0uqqbUxMK3+cejdGqwjX2jQ/B8aG3BfDrund8celW1qDTz5qtr/eDdn598zqdv0gk2tlgMS56Mf1D0fqP7no1bL6mWprs2YRNFXfg31lqGOqmr8F0G6OcXKQqQtmW1c0Y2hmTczJJKhoNnsZCoC6NPHBT1O64sTVO4jb+hf+ZcaAYKbMebol5tRoC+YRu0bFEsa61RtjywvKg9BlVaWZn5vZNWYPq5YN3NXzRT3MNO/8TQ1iWZv+r3sjrVGOxaIZ9DxI/091GQOgB4hmd1djdIs7f53ZA4X3ywzW9uhOKmiKZg2QsaaniEAPRAR6YEDrgFqviyHx8SuaakqzqcecG7xHWUSgu3rwQHowMAB6gNw1Y/JO3bt+U8OZW1IPodl0ZmpcGqD2i4KpekK9nXH5ZpF6niSgPJsnCJbPyq16LlFNaGY96noAJJFI9AYOJHGJ3hC5fPlyhIWFwdHREZGRkUhJSTG67pgxY9RDn2v+tGpleDbtjRs3QiKRYODAgbW099ZVk9nLNa0d0wELnm9t0ZQEmilZXvceTt+Mi8b4rmFYPbpyAtatk7ogpqUfVo2KMvFMwyT8JFANaWaT63oARA8eUTNAiYmJmDZtGpYvX44uXbrgyy+/RL9+/ZCWloaQEP1aiGXLlmHhwoXqv8vKytC2bVu8+OKLeutevnwZM2fORLduD17vFWOMZYC+mxCN7Pz7Rsfd0fVki5oV+/HO/+EU4u2MuU+31FrWLrgeVlYj+CGyNrGKoImMEfUTuWTJEowfPx4TJkxAeHg4li5diuDgYKxYscLg+h4eHvD391f//Pnnn7h9+zbGjh2rtZ5CocCIESPw3nvvoVGjqovfiouLkZ+fr/UjhsJiw+NUODrIMKh9kMGhx2vDgzC+C4mPgTBZw9SnmuLJ5vXxpMaYPEQPAtECoJKSEhw5cgQxMTFay2NiYpCammrWNtasWYNevXohNFR73pV58+ahfv36GD9+vFnbiY+Ph4eHh/onODjYvIOwMkMzvgPmFUdbw5rRUZg/MMLsTBMRUVWm926GtWM7MgNEDxzRmsByc3OhUCjg5+entdzPzw/Z2dlVPj8rKwu//PILvvvuO63lv/32G9asWYPjx4+bvS+zZ8/GjBkz1H/n5+eLEgQlpxkeZ6eBh216DTwV7lf1SlRnMAFERI8y0XuB6Xa5NndG4HXr1qFevXpaBc4FBQUYOXIkVq1aBR8f8+dkksvlkMurnnizNl3Pv4/zOYXqvxt4OGLt2I7ILSw2OOkcUW3rGOYN4ILYu0FEVCtEC4B8fHwgk8n0sj05OTl6WSFdgiAgISEBsbGxcHCo7JL9zz//4NKlS3jmmWfUy5TK8uYjOzs7nD17Fo0bN7biUVhP9ILdesua+7uhOdgcReLo2tQH68Z2qFYXeiKiB51oAZCDgwMiIyORlJSE559/Xr08KSkJzz33nMnn7tu3DxcuXNCr8WnRogVOnTqltWzOnDkoKCjAsmXLRKvtqY7qzINFZG09ajDRLRHRg0zUJrAZM2YgNjYWUVFR6NSpE1auXImMjAxMnDgRQHltzrVr1/D1119rPW/NmjWIjo5GRESE1nJHR0e9ZfXq1QMAveUPOrGGjSciIqoLRA2Ahg4dips3b2LevHnIyspCREQEtm/fru7VlZWVhYyMDK3n5OXlYcuWLVi2bJkYu1wrlAYGCHvhEZ7okYiISGwSgakGPfn5+fDw8EBeXh7c3Wt/QsJ7JQqEv71D/feqUVF4oll9ONix2ygREZG5LLl+i94LjID7pdoDIPZuye7oREREtYkphgfArrTKnnBBnpwpmIiIqLYxABJZ6oVcvLmlsufaxpcfF3FviIiI6gYGQCI7mnFb/buPqxxBnhz0kIiIqLYxABKZk0NlGZajPd8OIiIiW+AVV2RO9jL17w6cLJCIiMgmeMUVmZND5VtQqrTNrO9ERER1HQMgkWmOwlRSxgCIiIjIFhgAiaxUURn0SDn/FxERkU0wABJZqaIyBdStqY+Ie0JERFR3MAASmWYGKK5/SxH3hIiIqO5gACSysooM0KDHAuHhbC/y3hAREdUNDIBEVlKRAbKTsf6HiIjIVhgAiUyVAbLnGEBEREQ2w6uuyFQ1QAyAiIiIbIdXXZFVBkBsAiMiIrIVBkAiU3WDt2MGiIiIyGZ41RUZm8CIiIhsj1ddkZVVzP/lwCYwIiIim2EAJLKSMjaBERER2RqvuiJjExgREZHt8aorMlUTGHuBERER2Q4DIJGpmsCYASIiIrIdXnVFppoKw4EBEBERkc3wqiuyouIyAICLXCbynhAREdUddmLvQF0lCAJWpaTjz8u3AQDODnwriIiIbIUZIJEcu3IHC7b/rf6bGSAiIiLbYQAkkn1nb2j9zQwQERGR7TAAEsm9UoXW3y4MgIiIiGyGAZBISsqUWn87swmMiIjIZhgAiUTV/V2FGSAiIiLbYQAkklKdDJCjPd8KIiIiWxH9qrt8+XKEhYXB0dERkZGRSElJMbrumDFjIJFI9H5atWqlXmfVqlXo1q0bPD094enpiV69euHw4cO2OBSL6GaAJBJOhUFERGQrogZAiYmJmDZtGuLi4nDs2DF069YN/fr1Q0ZGhsH1ly1bhqysLPXPlStX4OXlhRdffFG9zt69ezFs2DD8+uuvOHjwIEJCQhATE4Nr167Z6rDMUqoRAA2JChJxT4iIiOoeiSAIglgvHh0djfbt22PFihXqZeHh4Rg4cCDi4+OrfP4PP/yAQYMG4eLFiwgNDTW4jkKhgKenJz777DOMGjXKrP3Kz8+Hh4cH8vLy4O7ubt7BWGjCV38g+UwOFg5qjZc6htTKaxAREdUllly/RcsAlZSU4MiRI4iJidFaHhMTg9TUVLO2sWbNGvTq1cto8AMARUVFKC0thZeXl9F1iouLkZ+fr/VT24oraoAc7ERvhSQiIqpzRLv65ubmQqFQwM/PT2u5n58fsrOzq3x+VlYWfvnlF0yYMMHkerNmzUJgYCB69epldJ34+Hh4eHiof4KDg807iBpQNYFxFngiIiLbE/3qq1v8KwiCWQXB69atQ7169TBw4ECj6yxatAgbNmzA999/D0dHR6PrzZ49G3l5eeqfK1eumL3/1VXCDBAREZFoRBt8xsfHBzKZTC/bk5OTo5cV0iUIAhISEhAbGwsHBweD63z00UdYsGABkpOT0aZNG5Pbk8vlkMvllh1ADZUqykuvHJgBIiIisjnRrr4ODg6IjIxEUlKS1vKkpCR07tzZ5HP37duHCxcuYPz48QYfX7x4Md5//33s2LEDUVFRVttna2IGiIiISDyiDj88Y8YMxMbGIioqCp06dcLKlSuRkZGBiRMnAihvmrp27Rq+/vprreetWbMG0dHRiIiI0NvmokWLMHfuXHz33Xdo2LChOsPk6uoKV1fX2j8oM7EGiIiISDyiBkBDhw7FzZs3MW/ePGRlZSEiIgLbt29X9+rKysrSGxMoLy8PW7ZswbJlywxuc/ny5SgpKcHgwYO1lr/zzjt49913a+U4qoO9wIiIiMQj6jhADypbjAPU8YNk5BQU4+fXu6JVA49aeQ0iIqK65KEYB6iuU02FIWcGiIiIyOZ49RWJajJU1gARERHZHq++IlF1g2cAREREZHu8+oqkTFmeAbKTcRZ4IiIiW2MAJAKlUoCyovTcTsq3gIiIyNZ49RVBmbKy4x0zQERERLbHAEgEquYvALCTMgAiIiKyNQZAItDKALEJjIiIyOZ49RVBmUIzAGIGiIiIyNYYAIlA1QQmkQBSBkBEREQ2xwBIBKoMkD2bv4iIiETBK7AIFBU1QDJmf4iIiETBAEgEpQoOgkhERCQmBkAiUGWAWABNREQkDgZAIlDNA2bHecCIiIhEwSuwCJgBIiIiEhcDIBGUciJUIiIiUTEAEkFlBoinn4iISAy8AotA3QuMTWBERESiYAAkAo4DREREJC4GQCJQjwTNXmBERESi4BVYBGXMABEREYmKAZAIFBW9wOzZC4yIiEgUDIBEoBoIkRkgIiIicTAAEoGqCJo1QEREROLgFVgEqm7wzAARERGJgwGQCDgQIhERkbh4BRZBKecCIyIiEhUDIBEoFJwLjIiISEwMgERQxgwQERGRqBgAiUAdALEXGBERkSh4BRZBGSdDJSIiEhUDIBFUZoAYABEREYlB9ABo+fLlCAsLg6OjIyIjI5GSkmJ03TFjxkAikej9tGrVSmu9LVu2oGXLlpDL5WjZsiW2bt1a24dhEdVkqOwGT0REJA5Rr8CJiYmYNm0a4uLicOzYMXTr1g39+vVDRkaGwfWXLVuGrKws9c+VK1fg5eWFF198Ub3OwYMHMXToUMTGxuLEiROIjY3FkCFDcOjQIVsdVpVYBE1ERCQuiSAIglgvHh0djfbt22PFihXqZeHh4Rg4cCDi4+OrfP4PP/yAQYMG4eLFiwgNDQUADB06FPn5+fjll1/U6/Xt2xeenp7YsGGDwe0UFxejuLhY/Xd+fj6Cg4ORl5cHd3f36h6eUfN/SsPqAxfxyhONMLtfuNW3T0REVBfl5+fDw8PDrOu3aBmgkpISHDlyBDExMVrLY2JikJqaatY21qxZg169eqmDH6A8A6S7zT59+pjcZnx8PDw8PNQ/wcHBFhyJ5ZgBIiIiEpdoAVBubi4UCgX8/Py0lvv5+SE7O7vK52dlZeGXX37BhAkTtJZnZ2dbvM3Zs2cjLy9P/XPlyhULjsRyZUpVLzDWABEREYnBTuwdkEi0syCCIOgtM2TdunWoV68eBg4cWONtyuVyyOVy83bYChTMABEREYlKtBSEj48PZDKZXmYmJydHL4OjSxAEJCQkIDY2Fg4ODlqP+fv7V2ubtlSq4ECIREREYhLtCuzg4IDIyEgkJSVpLU9KSkLnzp1NPnffvn24cOECxo8fr/dYp06d9La5a9euKrdpS8wAERERicviJrCGDRti3LhxGDNmDEJCQmr04jNmzEBsbCyioqLQqVMnrFy5EhkZGZg4cSKA8tqca9eu4euvv9Z63po1axAdHY2IiAi9bU6dOhXdu3fHhx9+iOeeew4//vgjkpOTceDAgRrtqzWVcjJUIiIiUVmcAfrXv/6FH3/8EY0aNULv3r2xceNGrS7klhg6dCiWLl2KefPmoV27dti/fz+2b9+u7tWVlZWlNyZQXl4etmzZYjD7AwCdO3fGxo0bsXbtWrRp0wbr1q1DYmIioqOjq7WPtYEZICIiInFVexygEydOICEhARs2bEBZWRmGDx+OcePGoX379tbeR5uzZByB6pjw1Z9IPnMd8YNaY1jHmmXRiIiIqJxNxgFq27Ytli1bhmvXruGdd97B6tWr0aFDB7Rt2xYJCQkQcXzFB56iohu8jBkgIiIiUVS7G3xpaSm2bt2KtWvXIikpCY8//jjGjx+PzMxMxMXFITk5Gd9995019/WRoRoI0Z41QERERKKwOAA6evQo1q5diw0bNkAmkyE2NhaffPIJWrRooV4nJiYG3bt3t+qOPkpUk6HKOBAiERGRKCwOgDp06IDevXtjxYoVGDhwIOzt7fXWadmyJV566SWr7OCjSDUStD2bwIiIiERhcQCUnp6uNfeWIS4uLli7dm21d+pRp2oCYw0QERGROCxug8nJycGhQ4f0lh86dAh//vmnVXbqUadqArPnSNBERESisPgKPHnyZIOThV67dg2TJ0+2yk496pgBIiIiEpfFAVBaWprBsX4ee+wxpKWlWWWnHnVlHAmaiIhIVBYHQHK5HNevX9dbnpWVBTs70SeXfyhUjgTNJjAiIiIxWHwF7t27N2bPno28vDz1sjt37uCtt95C7969rbpzj6pSJTNAREREYrI4ZfPxxx+je/fuCA0NxWOPPQYAOH78OPz8/PDNN99YfQcfRYX3ywAArnJmzIiIiMRg8RU4MDAQJ0+exPr163HixAk4OTlh7NixGDZsmMExgUibIAgoqAiA3B15voiIiMRQrRSEi4sLXn75ZWvvS51wr1Sh7gXm5sgMEBERkRiqfQVOS0tDRkYGSkpKtJY/++yzNd6pR5kq+yOTSuDsIBN5b4iIiOqmao0E/fzzz+PUqVOQSCTqWd8lkvKCXoVCYd09fMQU3C8FUF7/ozpnREREZFsW9wKbOnUqwsLCcP36dTg7O+P06dPYv38/oqKisHfv3lrYxUdL3r2K+h8nNn8RERGJxeKr8MGDB7Fnzx7Ur18fUqkUUqkUXbt2RXx8PF5//XUcO3asNvbzkaHKALnJWQBNREQkFoszQAqFAq6urgAAHx8fZGZmAgBCQ0Nx9uxZ6+7dI+h+aXkTIet/iIiIxGNxBigiIgInT55Eo0aNEB0djUWLFsHBwQErV65Eo0aNamMfHykVJVNg+Q8REZF4LA6A5syZg7t37wIA5s+fj6effhrdunWDt7c3EhMTrb6Dj5qK+AcSMAIiIiISi8UBUJ8+fdS/N2rUCGlpabh16xY8PT3Zq8kMQmUERERERCKxqAaorKwMdnZ2+Ouvv7SWe3l5Mfgxk1CRA+LZIiIiEo9FAZCdnR1CQ0M51k8NsAaIiIhIfBb3ApszZw5mz56NW7du1cb+PPJYA0RERCQ+i2uA/vOf/+DChQto0KABQkND4eLiovX40aNHrbZzj6LKkbNF3hEiIqI6zOIAaODAgbWwG3UPAyAiIiLxWBwAvfPOO7WxH3WGugaITWBERESisbgGiGpG3QuM8Q8REZFoLM4ASaVSk13e2UPMNPU4QERERCQaiwOgrVu3av1dWlqKY8eO4auvvsJ7771ntR17VFV2g2cKiIiISCwWB0DPPfec3rLBgwejVatWSExMxPjx462yY48qDgRNREQkPqvVAEVHRyM5OdlamyMiIiKqNVYJgO7du4dPP/0UQUFBFj93+fLlCAsLg6OjIyIjI5GSkmJy/eLiYsTFxSE0NBRyuRyNGzdGQkKC1jpLly5F8+bN4eTkhODgYEyfPh3379+3eN9qA8cBIiIiEp/FTWC6k54KgoCCggI4Ozvj22+/tWhbiYmJmDZtGpYvX44uXbrgyy+/RL9+/ZCWloaQkBCDzxkyZAiuX7+ONWvWoEmTJsjJyUFZWZn68fXr12PWrFlISEhA586dce7cOYwZMwYA8Mknn1h6uFbHJjAiIiLxWRwAffLJJ1oBkFQqRf369REdHQ1PT0+LtrVkyRKMHz8eEyZMAFCeudm5cydWrFiB+Ph4vfV37NiBffv2IT09HV5eXgCAhg0baq1z8OBBdOnSBcOHD1c/PmzYMBw+fNiifas1LIImIiISncUBkCqbUlMlJSU4cuQIZs2apbU8JiYGqampBp+zbds2REVFYdGiRfjmm2/g4uKCZ599Fu+//z6cnJwAAF27dsW3336Lw4cPo2PHjkhPT8f27dsxevRoo/tSXFyM4uJi9d/5+flWOELDOBs8ERGR+CwOgNauXQtXV1e8+OKLWss3bdqEoqIik4GGptzcXCgUCvj5+Wkt9/PzQ3Z2tsHnpKen48CBA3B0dMTWrVuRm5uLSZMm4datW+o6oJdeegk3btxA165dIQgCysrK8Oqrr+oFWpri4+Nt1oWfs8ETERGJz+Ii6IULF8LHx0dvua+vLxYsWGDxDug2BQmCYLR5SKlUQiKRYP369ejYsSP69++PJUuWYN26dbh37x4AYO/evfjggw+wfPlyHD16FN9//z1++uknvP/++0b3Yfbs2cjLy1P/XLlyxeLjMFflOIiMgIiIiMRicQbo8uXLCAsL01seGhqKjIwMs7fj4+MDmUyml+3JycnRywqpBAQEIDAwEB4eHupl4eHhEAQBV69eRdOmTTF37lzExsaq64pat26Nu3fv4uWXX0ZcXBykUv2YTy6XQy6Xm73vNcEMEBERkfgszgD5+vri5MmTestPnDgBb29vs7fj4OCAyMhIJCUlaS1PSkpC586dDT6nS5cuyMzMRGFhoXrZuXPnIJVK1V3wi4qK9IIcmUwGQRDUXdDFxBogIiIi8VkcAL300kt4/fXX8euvv0KhUEChUGDPnj2YOnUqXnrpJYu2NWPGDKxevRoJCQk4c+YMpk+fjoyMDEycOBFAedPUqFGj1OsPHz4c3t7eGDt2LNLS0rB//3688cYbGDdunLoI+plnnsGKFSuwceNGXLx4EUlJSZg7dy6effZZyGQySw/X6pgBIiIiEp/FTWDz58/H5cuX8dRTT8HOrvzpSqUSo0aNsrgGaOjQobh58ybmzZuHrKwsREREYPv27QgNDQUAZGVlaTWrubq6IikpCVOmTEFUVBS8vb0xZMgQzJ8/X73OnDlzIJFIMGfOHFy7dg3169fHM888gw8++MDSQ60VleMAMQIiIiISi0SoZrvQ+fPncfz4cTg5OaF169bqoOVRkJ+fDw8PD+Tl5cHd3d2q2/7m4CXM/fE0+kX4Y8XISKtum4iIqC6z5PptcQZIpWnTpmjatGl1n15nqTNATAARERGJxuIaoMGDB2PhwoV6yxcvXqw3NhDpU9cAsQmMiIhINBYHQPv27cOAAQP0lvft2xf79++3yk49yoTKCIiIiIhEYnEAVFhYCAcHB73l9vb2tTqFxKOCk6ESERGJz+IAKCIiAomJiXrLN27ciJYtW1plpx5lAidDJSIiEp3FRdBz587FCy+8gH/++Qc9e/YEAOzevRvfffcdNm/ebPUdJCIiIrI2iwOgZ599Fj/88AMWLFiAzZs3w8nJCW3btsWePXus3mX8UcQmMCIiIvFVqxv8gAED1IXQd+7cwfr16zFt2jScOHECCoXCqjv4qFEVQbMFjIiISDwW1wCp7NmzByNHjkSDBg3w2WefoX///vjzzz+tuW+PNMY/RERE4rEoA3T16lWsW7cOCQkJuHv3LoYMGYLS0lJs2bKFBdBmYhE0ERGR+MzOAPXv3x8tW7ZEWloaPv30U2RmZuLTTz+tzX17JHE2eCIiIvGZnQHatWsXXn/9dbz66qucAqMGBFZBExERic7sDFBKSgoKCgoQFRWF6OhofPbZZ7hx40Zt7tsjibPBExERic/sAKhTp05YtWoVsrKy8Morr2Djxo0IDAyEUqlEUlISCgoKanM/HxmVNUDi7gcREVFdZnEvMGdnZ4wbNw4HDhzAqVOn8K9//QsLFy6Er68vnn322drYx0cKa4CIiIjEV+1u8ADQvHlzLFq0CFevXsWGDRustU+PNGaAiIiIxFejAEhFJpNh4MCB2LZtmzU2VyewBoiIiEg8VgmAyHwcCZqIiEh8DIBsjE1gRERE4mMAZGOC+jdGQERERGJhAGRjzAARERGJjwGQjbEbPBERkfgYANkYM0BERETiYwBEREREdQ4DIBvjXGBERETiYwBkaxwHiIiISHQMgGysMgNEREREYmEAZGOVRdAMgYiIiMTCAMjGBI2hEImIiEgcDIBsjN3giYiIxMcAyMbYC4yIiEh8DIBsjBkgIiIi8TEAsjFOhUFERCQ+0QOg5cuXIywsDI6OjoiMjERKSorJ9YuLixEXF4fQ0FDI5XI0btwYCQkJWuvcuXMHkydPRkBAABwdHREeHo7t27fX5mGYjxkgIiIi0dmJ+eKJiYmYNm0ali9fji5duuDLL79Ev379kJaWhpCQEIPPGTJkCK5fv441a9agSZMmyMnJQVlZmfrxkpIS9O7dG76+vti8eTOCgoJw5coVuLm52eqwTFLXADECIiIiEo2oAdCSJUswfvx4TJgwAQCwdOlS7Ny5EytWrEB8fLze+jt27MC+ffuQnp4OLy8vAEDDhg211klISMCtW7eQmpoKe3t7AEBoaKjJ/SguLkZxcbH67/z8/JoclkmCwCYwIiIisYnWBFZSUoIjR44gJiZGa3lMTAxSU1MNPmfbtm2IiorCokWLEBgYiGbNmmHmzJm4d++e1jqdOnXC5MmT4efnh4iICCxYsAAKhcLovsTHx8PDw0P9ExwcbJ2DNEDgUNBERESiEy0DlJubC4VCAT8/P63lfn5+yM7ONvic9PR0HDhwAI6Ojti6dStyc3MxadIk3Lp1S10HlJ6ejj179mDEiBHYvn07zp8/j8mTJ6OsrAxvv/22we3Onj0bM2bMUP+dn59fa0EQu8ETERGJT9QmMEC/FkYQBKP1MUqlEhKJBOvXr4eHhweA8ma0wYMH4/PPP4eTkxOUSiV8fX2xcuVKyGQyREZGIjMzE4sXLzYaAMnlcsjlcusemBHsBk9ERCQ+0ZrAfHx8IJPJ9LI9OTk5elkhlYCAAAQGBqqDHwAIDw+HIAi4evWqep1mzZpBJpNprZOdnY2SkpJaOBLLsBs8ERGR+EQLgBwcHBAZGYmkpCSt5UlJSejcubPB53Tp0gWZmZkoLCxULzt37hykUimCgoLU61y4cAFKpVJrnYCAADg4ONTCkRAREdHDRtRxgGbMmIHVq1cjISEBZ86cwfTp05GRkYGJEycCKK/NGTVqlHr94cOHw9vbG2PHjkVaWhr279+PN954A+PGjYOTkxMA4NVXX8XNmzcxdepUnDt3Dj///DMWLFiAyZMni3KMutgERkREJD5Ra4CGDh2KmzdvYt68ecjKykJERAS2b9+u7raelZWFjIwM9fqurq5ISkrClClTEBUVBW9vbwwZMgTz589XrxMcHIxdu3Zh+vTpaNOmDQIDAzF16lS8+eabNj8+U1gETUREJB6JIKg7ZlOF/Px8eHh4IC8vD+7u7lbd9js//oWvDl7GlJ5N8K+Y5lbdNhERUV1myfVb9Kkw6hoOA0RERCQ+BkA2VjkQIkMgIiIisTAAsjF2gyciIhIfAyAbYy8wIiIi8TEAsjFOhUFERCQ+BkA2xgwQERGR+BgA2RxrgIiIiMTGAMjGmAEiIiISHwMgG6sMgBgBERERiYUBkI0J4MDbREREYmMAZGNsAiMiIhIfAyAbYzd4IiIi8TEAsjFmgIiIiMTHAEgkjH+IiIjEwwDIxlgETUREJD4GQLbGJjAiIiLRMQCyMRZBExERiY8BkI0JFVXQzAARERGJhwGQjbECiIiISHwMgGyMU2EQERGJjwGQjVXWABEREZFYGADZGGuAiIiIxMcAyMaYASIiIhIfAyBbYw0QERGR6BgA2ZhqJGjGP0REROJhAGRj6l5g4u4GERFRncYAyMYEdREQQyAiIiKxMACyMXUTmMj7QUREVJcxALIxgZOhEhERiY4BkI1xMlQiIiLxMQASCTNARERE4mEAZGMCZ0MlIiISnegB0PLlyxEWFgZHR0dERkYiJSXF5PrFxcWIi4tDaGgo5HI5GjdujISEBIPrbty4ERKJBAMHDqyFPa8uFkETERGJzU7MF09MTMS0adOwfPlydOnSBV9++SX69euHtLQ0hISEGHzOkCFDcP36daxZswZNmjRBTk4OysrK9Na7fPkyZs6ciW7dutX2YViERdBERETiEzUAWrJkCcaPH48JEyYAAJYuXYqdO3dixYoViI+P11t/x44d2LdvH9LT0+Hl5QUAaNiwod56CoUCI0aMwHvvvYeUlBTcuXOnNg/DIiyCJiIiEp9oTWAlJSU4cuQIYmJitJbHxMQgNTXV4HO2bduGqKgoLFq0CIGBgWjWrBlmzpyJe/fuaa03b9481K9fH+PHjzdrX4qLi5Gfn6/1U1sEDgVNREQkOtEyQLm5uVAoFPDz89Na7ufnh+zsbIPPSU9Px4EDB+Do6IitW7ciNzcXkyZNwq1bt9R1QL/99hvWrFmD48ePm70v8fHxeO+996p9LJbgbPBERETiE70IWndWdEEQjM6UrlQqIZFIsH79enTs2BH9+/fHkiVLsG7dOty7dw8FBQUYOXIkVq1aBR8fH7P3Yfbs2cjLy1P/XLlypUbHZIrA2eCJiIhEJ1oGyMfHBzKZTC/bk5OTo5cVUgkICEBgYCA8PDzUy8LDwyEIAq5evYq7d+/i0qVLeOaZZ9SPK5VKAICdnR3Onj2Lxo0b621XLpdDLpdb47CqxAwQERGR+ETLADk4OCAyMhJJSUlay5OSktC5c2eDz+nSpQsyMzNRWFioXnbu3DlIpVIEBQWhRYsWOHXqFI4fP67+efbZZ/Hkk0/i+PHjCA4OrtVjMoeqBogJICIiIvGI2gtsxowZiI2NRVRUFDp16oSVK1ciIyMDEydOBFDeNHXt2jV8/fXXAIDhw4fj/fffx9ixY/Hee+8hNzcXb7zxBsaNGwcnJycAQEREhNZr1KtXz+BysTEAIiIiEo+oAdDQoUNx8+ZNzJs3D1lZWYiIiMD27dsRGhoKAMjKykJGRoZ6fVdXVyQlJWHKlCmIioqCt7c3hgwZgvnz54t1CBar7ATGCIiIiEgsEkHg5Ay68vPz4eHhgby8PLi7u1t12yNW/47fLtzEspfa4bl2gVbdNhERUV1myfVb9F5gdQ3DTSIiIvExALIxdoMnIiISHwMgGxM4GSoREZHoGADZGCdDJSIiEh8DIJGwFxgREZF4GADZGGugiYiIxMcAyNbYBEZERCQ6BkA2xiJoIiIi8TEAsjEWQRMREYmPAZCNVdYAMQIiIiISCwMgG+Ns8EREROJjACQSxj9ERETiYQBkY+wGT0REJD4GQDbGucCIiIjExwBIJAx/iIiIxMMAyMbYBEZERCQ+BkC2xl5gREREomMAJBIGQEREROJhAGRjbAIjIiISHwMgG1P3AmMZNBERkWgYAImF8Q8REZFoGADZmMBGMCIiItExALKxyiYwIiIiEgsDIJFwJGgiIiLxMACyMYEtYERERKJjAGRjqviH+R8iIiLx2Im9A3UVW8CIiOoepVKJkpISsXfjoebg4ACptOb5GwZANiawDYyIqE4qKSnBxYsXoVQqxd6Vh5pUKkVYWBgcHBxqtB0GQCLhQIhERHWHIAjIysqCTCZDcHCwVTIYdZFSqURmZiaysrIQEhJSow5FDIBEwiYwIqK6o6ysDEVFRWjQoAGcnZ3F3p2HWv369ZGZmYmysjLY29tXezsMQW2MLWBERHWPQqEAgBo321DlOVSd0+piAGRjqpGgmQAiIqp7OAZczVnrHDIAEgv/B4iIiEQjegC0fPlyhIWFwdHREZGRkUhJSTG5fnFxMeLi4hAaGgq5XI7GjRsjISFB/fiqVavQrVs3eHp6wtPTE7169cLhw4dr+zDMxiYwIiKqy3r06IFp06aJvRviFkEnJiZi2rRpWL58Obp06YIvv/wS/fr1Q1paGkJCQgw+Z8iQIbh+/TrWrFmDJk2aICcnB2VlZerH9+7di2HDhqFz585wdHTEokWLEBMTg9OnTyMwMNBWh2ZU5UCITAEREdGDq6qmptGjR2PdunUWb/f777+vUfGytYgaAC1ZsgTjx4/HhAkTAABLly7Fzp07sWLFCsTHx+utv2PHDuzbtw/p6enw8vICADRs2FBrnfXr12v9vWrVKmzevBm7d+/GqFGjDO5HcXExiouL1X/n5+fX5LDMwmZgIiJ6kGVlZal/T0xMxNtvv42zZ8+qlzk5OWmtX1paalZgo7p+i020JrCSkhIcOXIEMTExWstjYmKQmppq8Dnbtm1DVFQUFi1ahMDAQDRr1gwzZ87EvXv3jL5OUVERSktLTZ7w+Ph4eHh4qH+Cg4Ord1Bm4ECIREQkCAKKSspE+TH3OuTv76/+8fDwgEQiUf99//591KtXD//973/Ro0cPODo64ttvv8XNmzcxbNgwBAUFwdnZGa1bt8aGDRu0tqvbBNawYUMsWLAA48aNg5ubG0JCQrBy5Uprnm6DRMsA5ebmQqFQwM/PT2u5n58fsrOzDT4nPT0dBw4cgKOjI7Zu3Yrc3FxMmjQJt27d0qoD0jRr1iwEBgaiV69eRvdl9uzZmDFjhvrv/Pz8WguCOBcYERHdK1Wg5ds7RXnttHl94Oxgncv/m2++iY8//hhr166FXC7H/fv3ERkZiTfffBPu7u74+eefERsbi0aNGiE6Otrodj7++GO8//77eOutt7B582a8+uqr6N69O1q0aGGV/TRE9IEQddsYBUEw2u6oVCohkUiwfv16eHh4AChvRhs8eDA+//xzvXTcokWLsGHDBuzduxeOjo5G90Eul0Mul9fwSCzDrpBERPSwmzZtGgYNGqS1bObMmerfp0yZgh07dmDTpk0mA6D+/ftj0qRJAMqDqk8++QR79+59NAMgHx8fyGQyvWxPTk6OXlZIJSAgAIGBgergBwDCw8MhCAKuXr2Kpk2bqpd/9NFHWLBgAZKTk9GmTZvaOYjqYAsYEVGd52QvQ9q8PqK9trVERUVp/a1QKLBw4UIkJibi2rVr6hpbFxcXk9vRvE6rmtpycnKstp+GiBYAOTg4IDIyEklJSXj++efVy5OSkvDcc88ZfE6XLl2wadMmFBYWwtXVFQBw7tw5SKVSBAUFqddbvHgx5s+fj507d+q9OWJTN4ExAUREVGdJJBKrNUOJSTew+fjjj/HJJ59g6dKlaN26NVxcXDBt2jSUlJSY3I5u8bREIqn1SWNFHQdoxowZWL16NRISEnDmzBlMnz4dGRkZmDhxIoDy2hzNnlvDhw+Ht7c3xo4di7S0NOzfvx9vvPEGxo0bp27+WrRoEebMmYOEhAQ0bNgQ2dnZyM7ORmFhoSjHaAzjHyIietSkpKTgueeew8iRI9G2bVs0atQI58+fF3u3DBI1ABo6dCiWLl2KefPmoV27dti/fz+2b9+O0NBQAOVd8DIyMtTru7q6IikpCXfu3EFUVBRGjBiBZ555Bv/5z3/U6yxfvhwlJSUYPHgwAgIC1D8fffSRzY/PEPYCIyKiR1WTJk2QlJSE1NRUnDlzBq+88orRjk1iEz3/NmnSJHXhky5DAyy1aNECSUlJRrd36dIlK+1Z7WATGBERParmzp2Lixcvok+fPnB2dsbLL7+MgQMHIi8vT+xd0yN6AFR3MQIiIqKHw5gxYzBmzBj13w0bNjTYouHl5YUffvjB5Lb27t2r9behxMXx48ct30kLiT4XWF3DFjAiIiLxMQCyMaGiEYxNYEREROJhACQSxj9ERETiYQBkY2wCIyIiEh8DIBtTBUCcCoOIiEg8DIBEwvCHiIhIPAyAiIiIqM5hAGRjqnET2AJGREQkHgZAIpGwEYyIiEg0DIBsjJ3AiIiorujRowemTZum/rthw4ZYunSpyedIJJIqR5O2BgZANlbZC0zc/SAiIjLlmWeeQa9evQw+dvDgQUgkEhw9etSibf7xxx94+eWXrbF7NcYAiIiIiPSMHz8ee/bsweXLl/UeS0hIQLt27dC+fXuLtlm/fn04OztbaxdrhAGQjQlsBCMiIkEASu6K82PmiLxPP/00fH19sW7dOq3lRUVFSExMxMCBAzFs2DAEBQXB2dkZrVu3xoYNG0xuU7cJ7Pz58+jevTscHR3RsmVLJCUlWXomq42zwdsYm8CIiAilRcCCBuK89luZgINLlavZ2dlh1KhRWLduHd5++231AL6bNm1CSUkJJkyYgA0bNuDNN9+Eu7s7fv75Z8TGxqJRo0aIjo6ucvtKpRKDBg2Cj48Pfv/9d+Tn52vVC9U2ZoBEwl5gRET0oBs3bhwuXbqEvXv3qpclJCRg0KBBCAwMxMyZM9GuXTs0atQIU6ZMQZ8+fbBp0yaztp2cnIwzZ87gm2++Qbt27dC9e3csWLCglo5EHzNANsYGMCIigr1zeSZGrNc2U4sWLdC5c2ckJCTgySefxD///IOUlBTs2rULCoUCCxcuRGJiIq5du4bi4mIUFxfDxaXq7BIAnDlzBiEhIQgKClIv69Spk8WHU10MgGyMTWBERASJxKxmqAfB+PHj8dprr+Hzzz/H2rVrERoaiqeeegqLFy/GJ598gqVLl6J169ZwcXHBtGnTUFJSYtZ2BQO1SLacJ5NNYCJhAERERA+DIUOGQCaT4bvvvsNXX32FsWPHQiKRICUlBc899xxGjhyJtm3bolGjRjh//rzZ223ZsiUyMjKQmVmZCTt48GBtHIJBDIBsjo1gRET08HB1dcXQoUPx1ltvITMzE2PGjAEANGnSBElJSUhNTcWZM2fwyiuvIDs72+zt9urVC82bN8eoUaNw4sQJpKSkIC4urpaOQh8DIBtTN4GxCJqIiB4S48ePx+3bt9GrVy+EhIQAAObOnYv27dujT58+6NGjB/z9/TFw4ECztymVSrF161YUFxejY8eOmDBhAj744INaOgJ9rAESCZvAiIjoYdGpUye9mh0vL68qp6zQ7D0GAJcuXdL6u1mzZkhJSdFaZqg2qDYwA2RjbAAjIiISHwMgG1NFtkwAERERiYcBkEjYBEZERCQeBkA2xiYwIiIi8TEAsrHK2i6mgIiI6hpbFfg+yqx1DhkAiYRNYEREdYdMJgMAs0dJJuNU51B1TquL3eBtjNE/EVHdY2dnB2dnZ9y4cQP29vaQSpl/qA6lUokbN27A2dkZdnY1C2EYANmYKvxhAoiIqO6QSCQICAjAxYsXcfnyZbF356EmlUoREhJS43nDGACJxJYTvhERkfgcHBzQtGlTNoPVkIODg1UyaAyAbI0tYEREdZZUKoWjo6PYu0F4AIqgly9fjrCwMDg6OiIyMlJvSGxdxcXFiIuLQ2hoKORyORo3boyEhAStdbZs2YKWLVtCLpejZcuW2Lp1a20egkXYBEZERCQ+UQOgxMRETJs2DXFxcTh27Bi6deuGfv36ISMjw+hzhgwZgt27d2PNmjU4e/YsNmzYgBYtWqgfP3jwIIYOHYrY2FicOHECsbGxGDJkCA4dOmSLQzIbW8CIiIjEIxFE7JYUHR2N9u3bY8WKFepl4eHhGDhwIOLj4/XW37FjB1566SWkp6fDy8vL4DaHDh2K/Px8/PLLL+plffv2haenJzZs2GDWfuXn58PDwwN5eXlwd3e38KhMa/X2DtwtUWDfGz0Q6u1i1W0TERHVZZZcv0WrASopKcGRI0cwa9YsreUxMTFITU01+Jxt27YhKioKixYtwjfffAMXFxc8++yzeP/99+Hk5ASgPAM0ffp0ref16dMHS5cuNbovxcXFKC4uVv+dl5cHoPxEWpui+C6UJUoUFhQg315h9e0TERHVVarrtjm5HdECoNzcXCgUCvj5+Wkt9/PzQ3Z2tsHnpKen48CBA3B0dMTWrVuRm5uLSZMm4datW+o6oOzsbIu2CQDx8fF477339JYHBwdbelhmi1haa5smIiKq0woKCuDh4WFyHdF7gel2BxcEwWgXcaVSCYlEgvXr16sPbMmSJRg8eDA+//xzdRbIkm0CwOzZszFjxgyt17l16xa8vb2t3l09Pz8fwcHBuHLlitWb16gSz7Nt8DzbDs+1bfA820ZtnWdBEFBQUIAGDRpUua5oAZCPjw9kMpleZiYnJ0cvg6MSEBCAwMBAraguPDwcgiDg6tWraNq0Kfz9/S3aJgDI5XLI5XKtZfXq1bPwiCzj7u7Ofy4b4Hm2DZ5n2+G5tg2eZ9uojfNcVeZHRbReYA4ODoiMjERSUpLW8qSkJHTu3Nngc7p06YLMzEwUFhaql507dw5SqRRBQUEAgE6dOultc9euXUa3SURERHWPqN3gZ8yYgdWrVyMhIQFnzpzB9OnTkZGRgYkTJwIob5oaNWqUev3hw4fD29sbY8eORVpaGvbv34833ngD48aNUzd/TZ06Fbt27cKHH36Iv//+Gx9++CGSk5Mxbdo0MQ6RiIiIHkCi1gANHToUN2/exLx585CVlYWIiAhs374doaGhAICsrCytMYFcXV2RlJSEKVOmICoqCt7e3hgyZAjmz5+vXqdz587YuHEj5syZg7lz56Jx48ZITExEdHS0zY/PELlcjnfeeUevyY2si+fZNniebYfn2jZ4nm3jQTjPoo4DRERERCQG0afCICIiIrI1BkBERERU5zAAIiIiojqHARARERHVOQyAbGj58uUICwuDo6MjIiMjkZKSIvYuPVTi4+PRoUMHuLm5wdfXFwMHDsTZs2e11hEEAe+++y4aNGgAJycn9OjRA6dPn9Zap7i4GFOmTIGPj496PrmrV6/a8lAeKvHx8ZBIJFpDSfA8W8e1a9cwcuRIeHt7w9nZGe3atcORI0fUj/M8W0dZWRnmzJmDsLAwODk5oVGjRpg3bx6USqV6HZ5ry+3fvx/PPPMMGjRoAIlEgh9++EHrcWud09u3byM2NhYeHh7w8PBAbGws7ty5U/MDEMgmNm7cKNjb2wurVq0S0tLShKlTpwouLi7C5cuXxd61h0afPn2EtWvXCn/99Zdw/PhxYcCAAUJISIhQWFioXmfhwoWCm5ubsGXLFuHUqVPC0KFDhYCAACE/P1+9zsSJE4XAwEAhKSlJOHr0qPDkk08Kbdu2FcrKysQ4rAfa4cOHhYYNGwpt2rQRpk6dql7O81xzt27dEkJDQ4UxY8YIhw4dEi5evCgkJycLFy5cUK/D82wd8+fPF7y9vYWffvpJuHjxorBp0ybB1dVVWLp0qXodnmvLbd++XYiLixO2bNkiABC2bt2q9bi1zmnfvn2FiIgIITU1VUhNTRUiIiKEp59+usb7zwDIRjp27ChMnDhRa1mLFi2EWbNmibRHD7+cnBwBgLBv3z5BEARBqVQK/v7+wsKFC9Xr3L9/X/Dw8BC++OILQRAE4c6dO4K9vb2wceNG9TrXrl0TpFKpsGPHDtsewAOuoKBAaNq0qZCUlCQ88cQT6gCI59k63nzzTaFr165GH+d5tp4BAwYI48aN01o2aNAgYeTIkYIg8Fxbg24AZK1zmpaWJgAQfv/9d/U6Bw8eFAAIf//9d432mU1gNlBSUoIjR44gJiZGa3lMTAxSU1NF2quHX15eHgDAy8sLAHDx4kVkZ2drnWe5XI4nnnhCfZ6PHDmC0tJSrXUaNGiAiIgIvhc6Jk+ejAEDBqBXr15ay3merWPbtm2IiorCiy++CF9fXzz22GNYtWqV+nGeZ+vp2rUrdu/ejXPnzgEATpw4gQMHDqB///4AeK5rg7XO6cGDB+Hh4aE1mPHjjz8ODw+PGp930WeDrwtyc3OhUCj0JmT18/PTm7iVzCMIAmbMmIGuXbsiIiICANTn0tB5vnz5snodBwcHeHp66q3D96LSxo0bcfToUfzxxx96j/E8W0d6ejpWrFiBGTNm4K233sLhw4fx+uuvQy6XY9SoUTzPVvTmm28iLy8PLVq0gEwmg0KhwAcffIBhw4YB4Ge6NljrnGZnZ8PX11dv+76+vjU+7wyAbEgikWj9LQiC3jIyz2uvvYaTJ0/iwIEDeo9V5zzzvah05coV9Zx6jo6ORtfjea4ZpVKJqKgoLFiwAADw2GOP4fTp01ixYoXWHIg8zzWXmJiIb7/9Ft999x1atWqF48ePY9q0aWjQoAFGjx6tXo/n2vqscU4NrW+N884mMBvw8fGBTCbTi1ZzcnL0omOq2pQpU7Bt2zb8+uuvCAoKUi/39/cHAJPn2d/fHyUlJbh9+7bRdeq6I0eOICcnB5GRkbCzs4OdnR327duH//znP7Czs1OfJ57nmgkICEDLli21loWHh6vnP+Tn2XreeOMNzJo1Cy+99BJat26N2NhYTJ8+HfHx8QB4rmuDtc6pv78/rl+/rrf9Gzdu1Pi8MwCyAQcHB0RGRiIpKUlreVJSEjp37izSXj18BEHAa6+9hu+//x579uxBWFiY1uNhYWHw9/fXOs8lJSXYt2+f+jxHRkbC3t5ea52srCz89ddffC8qPPXUUzh16hSOHz+u/omKisKIESNw/PhxNGrUiOfZCrp06aI3jMO5c+fUk0Hz82w9RUVFkEq1L3cymUzdDZ7n2vqsdU47deqEvLw8HD58WL3OoUOHkJeXV/PzXqMSajKbqhv8mjVrhLS0NGHatGmCi4uLcOnSJbF37aHx6quvCh4eHsLevXuFrKws9U9RUZF6nYULFwoeHh7C999/L5w6dUoYNmyYwW6XQUFBQnJysnD06FGhZ8+edborqzk0e4EJAs+zNRw+fFiws7MTPvjgA+H8+fPC+vXrBWdnZ+Hbb79Vr8PzbB2jR48WAgMD1d3gv//+e8HHx0f497//rV6H59pyBQUFwrFjx4Rjx44JAIQlS5YIx44dUw/vYq1z2rdvX6FNmzbCwYMHhYMHDwqtW7dmN/iHzeeffy6EhoYKDg4OQvv27dXdt8k8AAz+rF27Vr2OUqkU3nnnHcHf31+Qy+VC9+7dhVOnTmlt5969e8Jrr70meHl5CU5OTsLTTz8tZGRk2PhoHi66ARDPs3X873//EyIiIgS5XC60aNFCWLlypdbjPM/WkZ+fL0ydOlUICQkRHB0dhUaNGglxcXFCcXGxeh2ea8v9+uuvBr+TR48eLQiC9c7pzZs3hREjRghubm6Cm5ubMGLECOH27ds13n+JIAhCzXJIRERERA8X1gARERFRncMAiIiIiOocBkBERERU5zAAIiIiojqHARARERHVOQyAiIiIqM5hAERERER1DgMgIiIiqnMYABERmUEikeCHH34QezeIyEoYABHRA2/MmDGQSCR6P3379hV714joIWUn9g4QEZmjb9++WLt2rdYyuVwu0t4Q0cOOGSAieijI5XL4+/tr/Xh6egIob55asWIF+vXrBycnJ4SFhWHTpk1azz916hR69uwJJycneHt74+WXX0ZhYaHWOgkJCWjVqhXkcjkCAgLw2muvaT2em5uL559/Hs7OzmjatCm2bdtWuwdNRLWGARARPRLmzp2LF154ASdOnMDIkSMxbNgwnDlzBgBQVFSEvn37wtPTE3/88Qc2bdqE5ORkrQBnxYoVmDx5Ml5++WWcOnUK27ZtQ5MmTbRe47333sOQIUNw8uRJ9O/fHyNGjMCtW7dsepxEZCU1nk+eiKiWjR49WpDJZIKLi4vWz7x58wRBEAQAwsSJE7WeEx0dLbz66quCIAjCypUrBU9PT6GwsFD9+M8//yxIpVIhOztbEARBaNCggRAXF2d0HwAIc+bMUf9dWFgoSCQS4ZdffrHacRKR7bAGiIgeCk8++SRWrFihtczLy0v9e6dOnbQe69SpE44fPw4AOHPmDNq2bQsXFxf14126dIFSqcTZs2chkUiQmZmJp556yuQ+tGnTRv27i4sL3NzckJOTU91DIiIRMQAiooeCi4uLXpNUVSQSCQBAEAT174bWcXJyMmt79vb2es9VKpUW7RMRPRhYA0REj4Tff/9d7+8WLVoAAFq2bInjx4/j7t276sd/++03SKVSNGvWDG5ubmjYsCF2795t030mIvEwA0RED4Xi4mJkZ2drLbOzs4OPjw8AYNOmTYiKikLXrl2xfv16HD58GGvWrAEAjBgxAu+88w5Gjx6Nd999Fzdu3MCUKVMQGxsLPz8/AMC7776LiRMnwtfXF/369UNBQQF+++03TJkyxbYHSkQ2wQCIiB4KO3bsQEBAgNay5s2b4++//wZQ3kNr48aNmDRpEvz9/bF+/Xq0bNkSAODs7IydO3di6tSp6NChA5ydnfHCCy9gyZIl6m2NHj0a9+/fxyeffIKZM2fCx8cHgwcPtt0BEpFNSQRBEMTeCSKimpBIJNi6dSsGDhwo9q4Q0UOCNUBERERU5zAAIiIiojqHNUBE9NBjSz4RWYoZICIiIqpzGAARERFRncMAiIiIiOocBkBERERU5zAAIiIiojqHARARERHVOQyAiIiIqM5hAERERER1zv8DdO/aEtczeIEAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAGwCAYAAABB4NqyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABQ1ElEQVR4nO3deXxTVd4/8E+WJuma7hstpayllKW0LKWyKgiigjpSESoOKOIAsjgziigi6tQdxAcQcOnPmbEgAgOOLBaVpbI5pWXfKXQNXaBJF5q2yf39UYiGFkzaNLdtPu/XK6+nOTm5+eaqk89z7rnnSARBEEBERETkQKRiF0BERERkbwxARERE5HAYgIiIiMjhMAARERGRw2EAIiIiIofDAEREREQOhwGIiIiIHI5c7AJaIqPRiPz8fLi7u0MikYhdDhEREVlAEASUlZUhODgYUundx3gYgBqQn5+P0NBQscsgIiKiRsjJyUFISMhd+zAANcDd3R1A3Qn08PAQuRoiIiKyhE6nQ2hoqOl3/G4YgBpw67KXh4cHAxAREVErY8n0FU6CJiIiIofDAEREREQOhwGIiIiIHA4DEBERETkcBiAiIiJyOAxARERE5HAYgIiIiMjhMAARERGRw2EAIiIiIofDAEREREQOhwGIiIiIHA4DEBERETkcboZqR/paA4rLqyGVAEFqZ7HLISIiclgcAbKjE3k6xL/zE55Yc1DsUoiIiBya6AFo5cqVCA8Ph0qlQkxMDPbt23fHvmlpaYiPj4ePjw+cnZ0RERGBpUuXmvVJTk6GRCKp96iqqmrur/KHpJK6/2swCuIWQkRE5OBEvQS2fv16zJ07FytXrkR8fDxWr16NMWPG4NSpU2jfvn29/q6urpg1axZ69eoFV1dXpKWl4bnnnoOrqyumT59u6ufh4YGzZ8+avVelUjX79/kjspsJSGD+ISIiEpWoAeijjz7CtGnT8MwzzwAAli1bhp07d2LVqlVISkqq1z86OhrR0dGm5x06dMCmTZuwb98+swAkkUgQGBhocR16vR56vd70XKfTNebr/CGppC4AcQSIiIhIXKJdAquurkZ6ejpGjRpl1j5q1Cjs37/fomNkZGRg//79GDp0qFl7eXk5wsLCEBISggcffBAZGRl3PU5SUhLUarXpERoaat2XsdCtAGTkEBAREZGoRAtAxcXFMBgMCAgIMGsPCAiARqO563tDQkKgVCoRGxuLmTNnmkaQACAiIgLJycnYunUrUlJSoFKpEB8fj/Pnz9/xeAsWLIBWqzU9cnJymvbl7kB682wzABEREYlL9NvgJTdHRW4RBKFe2+327duH8vJyHDx4EC+//DI6d+6MiRMnAgAGDhyIgQMHmvrGx8ejb9+++OSTT7B8+fIGj6dUKqFUKpv4Tf7YbyNAzf5RREREdBeiBSBfX1/IZLJ6oz2FhYX1RoVuFx4eDgDo2bMnrl69isWLF5sC0O2kUin69et31xEge+ElMCIiopZBtEtgCoUCMTExSE1NNWtPTU3FoEGDLD6OIAhmE5gbej0zMxNBQUGNrtVWeBs8ERFRyyDqJbD58+cjMTERsbGxiIuLw5o1a5CdnY0ZM2YAqJubk5eXh6+++goAsGLFCrRv3x4REREA6tYF+uCDDzB79mzTMd944w0MHDgQXbp0gU6nw/Lly5GZmYkVK1bY/wvehrfBExERtQyiBqCEhASUlJRgyZIlKCgoQFRUFLZt24awsDAAQEFBAbKzs039jUYjFixYgKysLMjlcnTq1AnvvPMOnnvuOVOf0tJSTJ8+HRqNBmq1GtHR0di7dy/69+9v9+93O94GT0RE1DJIBIHjEbfT6XRQq9XQarXw8PCw2XHzSm8g/p2foJRLcfatMTY7LhEREVn3+y36VhiO5NYcIE6CJiIiEhcDkB3JeBs8ERFRi8AAZEcSzgEiIiJqERiA7OjWXWBA3e35REREJA4GIDv6Xf7hZTAiIiIRMQDZ0e+3+OBlMCIiIvEwANnR7y+B8U4wIiIi8TAA2ZH5JTAGICIiIrEwANmRVPL7ESARCyEiInJwDEB2JOUcICIiohaBAciOeBs8ERFRy8AAZEe/nwPEESAiIiLxMADZkUQigcS0H5i4tRARETkyBiA7k5r2A2MCIiIiEgsDkJ1xR3giIiLxMQDZmZQ7whMREYmOAcjOTAGICYiIiEg0DEB2dutWeF4CIyIiEg8DkJ3duguMt8ETERGJhwHIzn4bARK5ECIiIgfGAGRnvA2eiIhIfAxAdsYAREREJD4GIDuTcg4QERGR6BiA7OzWHCAOABEREYmHAcjObl0C4wgQERGReBiA7EzCrTCIiIhExwBkZ7wNnoiISHwMQHbGu8CIiIjExwBkZ6bd4DkEREREJBoGIDszTYLmCBAREZFoGIDsjLfBExERiY8ByM4kvA2eiIhIdAxAdia7ecY5CZqIiEg8DEB2xrvAiIiIxMcAZGemAGQUuRAiIiIHxgBkZ1KuBE1ERCQ6BiA7u3UXGCdBExERiYcByM64DhAREZH4GIDsTC7jCBAREZHYGIDsTMp1gIiIiETHAGRn8ptzgGoZgIiIiETDAGRntyZBczNUIiIi8TAA2ZmMI0BERESiYwCyM9MIEO8CIyIiEo3oAWjlypUIDw+HSqVCTEwM9u3bd8e+aWlpiI+Ph4+PD5ydnREREYGlS5fW67dx40ZERkZCqVQiMjISmzdvbs6vYBWZtO6U1xoYgIiIiMQiagBav3495s6di4ULFyIjIwODBw/GmDFjkJ2d3WB/V1dXzJo1C3v37sXp06fx6quv4tVXX8WaNWtMfQ4cOICEhAQkJibi6NGjSExMxIQJE3Do0CF7fa27knMEiIiISHQSQRDvl3jAgAHo27cvVq1aZWrr3r07xo8fj6SkJIuO8eijj8LV1RX//Oc/AQAJCQnQ6XTYvn27qc/o0aPh5eWFlJQUi46p0+mgVquh1Wrh4eFhxTf6Yy9+cxQbj+Ti5TERmDG0k02PTURE5Mis+f0WbQSouroa6enpGDVqlFn7qFGjsH//fouOkZGRgf3792Po0KGmtgMHDtQ75v3333/XY+r1euh0OrNHc5FzKwwiIiLRiRaAiouLYTAYEBAQYNYeEBAAjUZz1/eGhIRAqVQiNjYWM2fOxDPPPGN6TaPRWH3MpKQkqNVq0yM0NLQR38gyUgYgIiIi0Yk+CVpyc2XkWwRBqNd2u3379uF///sfPv30UyxbtqzepS1rj7lgwQJotVrTIycnx8pvYTmOABEREYlPLtYH+/r6QiaT1RuZKSwsrDeCc7vw8HAAQM+ePXH16lUsXrwYEydOBAAEBgZafUylUgmlUtmYr2E17gZPREQkPtFGgBQKBWJiYpCammrWnpqaikGDBll8HEEQoNfrTc/j4uLqHfOHH36w6pjNyRSAeBcYERGRaEQbAQKA+fPnIzExEbGxsYiLi8OaNWuQnZ2NGTNmAKi7NJWXl4evvvoKALBixQq0b98eERERAOrWBfrggw8we/Zs0zHnzJmDIUOG4N1338W4ceOwZcsW7Nq1C2lpafb/gg3gCBAREZH4RA1ACQkJKCkpwZIlS1BQUICoqChs27YNYWFhAICCggKzNYGMRiMWLFiArKwsyOVydOrUCe+88w6ee+45U59BgwZh3bp1ePXVV/Haa6+hU6dOWL9+PQYMGGD379cQBiAiIiLxiboOUEvVnOsAfbDzLP7v5wt4elAHLH64h02PTURE5MhaxTpAjoojQEREROJjALIz7gZPREQkPgYgOzPtBs8AREREJBoGIDvjCBAREZH4GIDsjLvBExERiY8ByM6kEo4AERERiY0ByM7kMs4BIiIiEhsDkJ39NgJkFLkSIiIix8UAZGe/7QYvciFEREQOjAHIzqSmAMQEREREJBYGIDszjQBxChAREZFoGIDsTMYRICIiItExANkZ9wIjIiISHwOQnd26BFbLa2BERESiYQCyM4W87pTX8DYwIiIi0TAA2ZlSLgMAVNUwABEREYmFAcjOlDdHgPS1BpErISIiclwMQHamcuIIEBERkdgYgOyMI0BERETiYwCys1sjQPpajgARERGJhQHIzm6NAFXVGCAIvBWeiIhIDAxAdqa8OQJkFIBaLoZIREQkCgYgO7s1AgTUjQIRERGR/TEA2dnvAxDnAREREYmDAcjOJBLJ7+4EYwAiIiISAwOQCH4/EZqIiIjsjwFIBKZb4bkYIhERkSgYgESgdLo5AsTFEImIiETBACQClZwjQERERGJiABIBR4CIiIjExQAkAo4AERERiYsBSAS3RoC4ISoREZE4GIBEoOQIEBERkagYgESg4ggQERGRqBiARHBrBKiKI0BERESiYAASAUeAiIiIxMUAJALTHCDuBUZERCQKBiARmNYB4l5gREREorA6AOXk5CA3N9f0/PDhw5g7dy7WrFlj08LaMo4AERERicvqAPTkk0/i559/BgBoNBqMHDkShw8fxiuvvIIlS5bYvMC2iLvBExERicvqAHTixAn0798fAPDNN98gKioK+/fvx9dff43k5GRb19cmmXaD5wgQERGRKKwOQDU1NVAqlQCAXbt24eGHHwYAREREoKCgwLbVtVG37gKrrOYIEBERkRisDkA9evTAp59+in379iE1NRWjR48GAOTn58PHx8fmBbZFbko5AKBCXytyJURERI7J6gD07rvvYvXq1Rg2bBgmTpyI3r17AwC2bt1qujRGd8cAREREJC65tW8YNmwYiouLodPp4OXlZWqfPn06XF1dbVpcW+V6MwCVMwARERGJwuoRoBEjRqCsrMws/ACAt7c3EhISrC5g5cqVCA8Ph0qlQkxMDPbt23fHvps2bcLIkSPh5+cHDw8PxMXFYefOnWZ9kpOTIZFI6j2qqqqsrq25uDEAERERicrqALR7925UV1fXa6+qqrpreGnI+vXrMXfuXCxcuBAZGRkYPHgwxowZg+zs7Ab77927FyNHjsS2bduQnp6O4cOH46GHHkJGRoZZPw8PDxQUFJg9VCqVVbU1p98ugXESNBERkRgsvgR27Ngx09+nTp2CRqMxPTcYDNixYwfatWtn1Yd/9NFHmDZtGp555hkAwLJly7Bz506sWrUKSUlJ9fovW7bM7Pk//vEPbNmyBd999x2io6NN7RKJBIGBgVbVYk9uqt9GgIxGAVKpROSKiIiIHIvFAahPnz6my0kjRoyo97qzszM++eQTiz+4uroa6enpePnll83aR40ahf3791t0DKPRiLKyMnh7e5u1l5eXIywsDAaDAX369MGbb75pFpBup9frodfrTc91Op3F36Mxbo0AAUBljcHsORERETU/i395s7KyIAgCOnbsiMOHD8PPz8/0mkKhgL+/P2QymcUfXFxcDIPBgICAALP2gIAAs9Glu/nwww9RUVGBCRMmmNoiIiKQnJyMnj17QqfT4eOPP0Z8fDyOHj2KLl26NHicpKQkvPHGGxbX3lRKuRQyqQQGo4AKfS0DEBERkZ1Z/MsbFhYGoG7UxZYkEvPLP4Ig1GtrSEpKChYvXowtW7bA39/f1D5w4EAMHDjQ9Dw+Ph59+/bFJ598guXLlzd4rAULFmD+/Pmm5zqdDqGhodZ+FYtJJBK4KeXQ3qhBWVUtAjya7aOIiIioAY0aejh37hx2796NwsLCeoFo0aJFFh3D19cXMpms3mhPYWFhvVGh261fvx7Tpk3Dhg0bcN999921r1QqRb9+/XD+/Pk79lEqlabVre3lVgDiWkBERET2Z3UAWrt2LZ5//nn4+voiMDDQbLRGIpFYHIAUCgViYmKQmpqKRx55xNSempqKcePG3fF9KSkpmDp1KlJSUjB27Ng//BxBEJCZmYmePXtaVJe9uCrrLhfyVngiIiL7szoAvfXWW3j77bfx0ksvNfnD58+fj8TERMTGxiIuLg5r1qxBdnY2ZsyYAaDu0lReXh6++uorAHXh56mnnsLHH3+MgQMHmkaPnJ2doVarAQBvvPEGBg4ciC5dukCn02H58uXIzMzEihUrmlyvLXEtICIiIvFYHYCuX7+Oxx9/3CYfnpCQgJKSEixZsgQFBQWIiorCtm3bTPONCgoKzNYEWr16NWprazFz5kzMnDnT1D5lyhTTTvSlpaWYPn06NBoN1Go1oqOjsXfv3ha3TYcrt8MgIiISjUQQBMGaN0ybNg39+vUzjdK0RTqdDmq1GlqtFh4ezTND+fl/pWP7CQ2WjOuBp+I6NMtnEBERORJrfr8tGgH6/d1TnTt3xmuvvYaDBw+iZ8+ecHJyMuv7wgsvNKJkx8NLYEREROKxKAAtXbrU7Lmbmxv27NmDPXv2mLVLJBIGIAuZNkStYgAiIiKyN4sCUFZWVnPX4XDcVRwBIiIiEovVm6GSbaid6y4dam/UiFwJERGR47H6LrDfr5j8exKJBCqVCp07d8a4cePq7c9F5jxdFACA0koGICIiInuzOgBlZGTgyJEjMBgM6NatGwRBwPnz5yGTyRAREYGVK1fixRdfRFpaGiIjI5uj5jbB8+YIUClHgIiIiOzO6ktg48aNw3333Yf8/Hykp6fjyJEjyMvLw8iRIzFx4kTk5eVhyJAhmDdvXnPU22Z4uty8BFZZLXIlREREjsfqAPT+++/jzTffNLu/3sPDA4sXL8Z7770HFxcXLFq0COnp6TYttK25FYA4AkRERGR/VgcgrVaLwsLCeu1FRUXQ6XQAAE9PT1RXc2TjbtTOdXOAtDdqYDRatRYlERERNVGjLoFNnToVmzdvRm5uLvLy8rB582ZMmzYN48ePBwAcPnwYXbt2tXWtbcqtu8AEASjjWkBERER2ZfUk6NWrV2PevHl44oknUFtb98Mtl8sxZcoU04KJERER+Oyzz2xbaRujkEvhqpChotqA0hvVULs4/fGbiIiIyCasDkBubm5Yu3Ytli5dikuXLkEQBHTq1Alubm6mPn369LFljW2Wp4sCFdU3UFpZgzAfsashIiJyHFYHoFvc3NzQq1cvW9bicNTOTsgrvcGJ0ERERHZmUQB69NFHkZycDA8PDzz66KN37btp0yabFOYITHeC8VZ4IiIiu7IoAKnVakgkEtPfZBumtYA4AkRERGRXFgWgL7/8ssG/qWm8Xetuhb+qqxK5EiIiIsfSqM1Qa2trsWvXLqxevRplZWUAgPz8fJSXl9u0uLauo2/dxPFLRRUiV0JERORYrJ4EfeXKFYwePRrZ2dnQ6/UYOXIk3N3d8d5776Gqqgqffvppc9TZJnX0cwUAXChkcCQiIrInq0eA5syZg9jYWFy/fh3Ozs6m9kceeQQ//vijTYtr69p7uwAA8kpviFwJERGRY7F6BCgtLQ2//PILFAqFWXtYWBjy8vJsVpgj8HKpO4eV1QbUGIxwkjXqiiQRERFZyepfXKPRCIPBUK89NzcX7u7uNinKUXg4/7b6s453ghEREdmN1QFo5MiRWLZsmem5RCJBeXk5Xn/9dTzwwAO2rK3Nk0klcFfWDcLxVngiIiL7sfoS2NKlSzF8+HBERkaiqqoKTz75JM6fPw9fX1+kpKQ0R41tmoezE8r0tQxAREREdmR1AAoODkZmZiZSUlJw5MgRGI1GTJs2DZMmTTKbFE2WubUdBgMQERGR/TRqLzBnZ2dMnToVU6dOtXU9DkftzNWgiYiI7M3qOUDBwcF48sknsWbNGpw7d645anIovu5KAEChTi9yJURERI7D6gD04YcfwsPDAx999BEiIiIQFBSEJ554Ap9++ilOnz7dHDW2aSFedZcNuRYQERGR/Vh9CWzixImYOHEiAODq1av4+eef8d///hezZ8++4y3ydGftPOsCUO71SpErISIichyNmgNUXl6OtLQ07NmzB7t370ZGRgZ69uyJoUOH2rq+Nu/WCFDudY4AERER2YvVAWjAgAE4duwYoqKiMGzYMLzyyisYPHgwPD09m6G8ts90CYwBiIiIyG6sngN0/vx5uLi4oGPHjujYsSM6d+7M8NME7Tzr9gPjWkBERET2Y3UAunbtGn7++WfEx8dj165dGDp0KAIDA5GQkMCd4BvBWSGDj2vdnmCcB0RERGQfjdp9s1evXnjhhRewceNGbN++HWPGjMGmTZswc+ZMW9fnEIJvToTOL60SuRIiIiLHYPUcoIyMDOzevRu7d+/Gvn37UFZWht69e2POnDkYPnx4c9TY5gV4KHE8DygsYwAiIiKyB6sDUL9+/RAdHY2hQ4fi2WefxZAhQ+Dh4dEctTkMfw8VAC6GSEREZC9WB6Br164x8NiY/63VoMsYgIiIiOzB6jlADD+25+9eNwJUxEtgREREdtGoSdBkW7dGgK7yEhgREZFdMAC1AAG35gBxBIiIiMguGIBaAH+PuhGg4vJqGIyCyNUQERG1fQxALYCPqwISCWAwCrhWUS12OURERG2e1XeBzZ8/v8F2iUQClUqFzp07Y9y4cfD29m5ycY5CLpPCz02JwjI9NNoq+N2cE0RERETNo1ELIR45cgQGgwHdunWDIAg4f/48ZDIZIiIisHLlSrz44otIS0tDZGRkc9TcJrX3dkFhmR6XSyrQM0QtdjlERERtmtWXwMaNG4f77rsP+fn5SE9Px5EjR5CXl4eRI0di4sSJyMvLw5AhQzBv3rzmqLfN6uDrCgC4XFwhciVERERtn9UB6P3338ebb75pth6Qh4cHFi9ejPfeew8uLi5YtGgR0tPTbVpoWxd+MwBllTAAERERNTerA5BWq0VhYWG99qKiIuh0OgCAp6cnqqstm8y7cuVKhIeHQ6VSISYmBvv27btj302bNmHkyJHw8/ODh4cH4uLisHPnznr9Nm7ciMjISCiVSkRGRmLz5s0WfjvxhPm4AOAIEBERkT006hLY1KlTsXnzZuTm5iIvLw+bN2/GtGnTMH78eADA4cOH0bVr1z881vr16zF37lwsXLgQGRkZGDx4MMaMGYPs7OwG++/duxcjR47Etm3bkJ6ejuHDh+Ohhx5CRkaGqc+BAweQkJCAxMREHD16FImJiZgwYQIOHTpk7Ve1qw4+dSNAV0oqRa6EiIio7ZMIgmDVwjPl5eWYN28evvrqK9TW1gIA5HI5pkyZgqVLl8LV1RWZmZkAgD59+tz1WAMGDEDfvn2xatUqU1v37t0xfvx4JCUlWVRPjx49kJCQgEWLFgEAEhISoNPpsH37dlOf0aNHw8vLCykpKQ0eQ6/XQ6//bRVmnU6H0NBQaLVau239Ua6vRdTrdaNZxxaPgofKyS6fS0RE1FbodDqo1WqLfr+tHgFyc3PD2rVrUVJSYrojrKSkBGvWrIGra90oRp8+ff4w/FRXVyM9PR2jRo0yax81ahT2799vUS1GoxFlZWVmt9wfOHCg3jHvv//+ux4zKSkJarXa9AgNDbXo823JTSk33f7Oy2BERETNq9ELIbq5uaFXr17o3bs33NzcrH5/cXExDAYDAgICzNoDAgKg0WgsOsaHH36IiooKTJgwwdSm0WisPuaCBQug1WpNj5ycHCu+ie10uDkPKIsBiIiIqFlZvQ5QRUUF3nnnHfz4448oLCyE0Wg0e/3SpUtWHU8ikZg9FwShXltDUlJSsHjxYmzZsgX+/v5NOqZSqYRSKf7igx18XPHr5eu4VMQARERE1JysDkDPPPMM9uzZg8TERAQFBVkUVhri6+sLmUxWb2SmsLCw3gjO7davX49p06Zhw4YNuO+++8xeCwwMbNQxW4JeIWpsSM/FwUslYpdCRETUplkdgLZv347vv/8e8fHxTfpghUKBmJgYpKam4pFHHjG1p6amYty4cXd8X0pKCqZOnYqUlBSMHTu23utxcXFITU01W4jxhx9+wKBBg5pUrz0M6eoHAEi/ch21BiPkMm7VRkRE1BysDkBeXl422+dr/vz5SExMRGxsLOLi4rBmzRpkZ2djxowZAOrm5uTl5eGrr74CUBd+nnrqKXz88ccYOHCgaaTH2dkZanXd9hFz5szBkCFD8O6772LcuHHYsmULdu3ahbS0NJvU3JxCvVygkElRbTBCo6tCiJeL2CURERG1SVYPMbz55ptYtGgRKiubvl5NQkICli1bhiVLlqBPnz7Yu3cvtm3bhrCwMABAQUGB2ZpAq1evRm1tLWbOnImgoCDTY86cOaY+gwYNwrp16/Dll1+iV69eSE5Oxvr16zFgwIAm19vcpFIJ2nk5AwByr98QuRoiIqK2y+p1gKKjo3Hx4kUIgoAOHTrAycl8vZojR47YtEAxWLOOgK1N/uwQ0i4U48PHe+OxmBC7fjYREVFrZs3vt9WXwG6t9kzNI0itAgAUaDkCRERE1FysDkCvv/56c9RBN/l71N2OX1Sm/4OeRERE1Fi8zaiF8XevGwEqZAAiIiJqNhaNAHl7e+PcuXPw9fWFl5fXXdf+uXbtms2Kc0S3tsPgCBAREVHzsSgALV26FO7u7gCAZcuWNWc9Ds//ZgDKvX7D4lWxiYiIyDoWBaApU6Y0+DfZXkSQB1wVMmh0VTicdQ0DOvqIXRIREVGbY/UkaKBuF/YLFy40uBfYkCFDbFKYo3JTyhHf2Rc/nLqKs1fLGICIiIiagdUB6ODBg3jyySdx5coV3L6EkEQigcFgsFlxjurWYoh5pbwVnoiIqDlYHYBmzJiB2NhYfP/9903aDJXuLFhdF4AKSqtEroSIiKhtsjoAnT9/Ht9++y06d+7cHPUQgGBPjgARERE1J6vXARowYAAuXLjQHLXQTWE+dZugXi6uELkSIiKitsnqEaDZs2fjxRdfhEajQc+ePevtBdarVy+bFeeown1dAQAlFdW4XlENL1eFyBURERG1LVYHoMceewwAMHXqVFObRCIxrVnDSdBN56qUo52nM/JKb2BLZh6ejg8XuyQiIqI2xeoAlJWV1Rx10G0S+oXio9Rz+PZILgMQERGRjVkdgMLCwpqjDrrNhNi6AHQqXwddVQ08VE5//CYiIiKyiEUBaOvWrRgzZgycnJywdevWu/Z9+OGHbVKYowtUq9DBxwWXSyrxv8vXMCIiQOySiIiI2gyLAtD48eOh0Wjg7++P8ePH37Ef5wDZ1oBwH1wuqUTa+RIGICIiIhuy6DZ4o9EIf39/0993ejD82NZ9kXWhZ1NGLvS1PLdERES2YvU6QGQ/IyL84eumRGllDf53+brY5RAREbUZjdoMtaKiAnv27EF2djaqq6vNXnvhhRdsUhgBMqkEw7r54dv0XHx3NB/xnX3FLomIiKhNsDoAZWRk4IEHHkBlZSUqKirg7e2N4uJiuLi4wN/fnwHIxh6PCcG36bnYkpmPxQ/3gMpJJnZJRERErZ7Vl8DmzZuHhx56CNeuXYOzszMOHjyIK1euICYmBh988EFz1OjQ+od7I8BDiRs1BhzOuiZ2OURERG2C1QEoMzMTL774ImQyGWQyGfR6PUJDQ/Hee+/hlVdeaY4aHZpEIsHQrn4AgN1ni0SuhoiIqG2wOgA5OTlBIpEAAAICApCdnQ0AUKvVpr/JtoZ3q7sD74dTGhiMgsjVEBERtX5WzwGKjo7G//73P3Tt2hXDhw/HokWLUFxcjH/+85/o2bNnc9To8IZ184fa2Qm512/gpzOFGBnJNYGIiIiawuoRoH/84x8ICgoCALz55pvw8fHB888/j8LCQqxZs8bmBRLgrJDhif6hAICUwxxlIyIiaiqrR4BiY2NNf/v5+WHbtm02LYga9lCvYKzecwm/Xr4Go1GAVCoRuyQiIqJWiwshthIRge5QOUlRVlWLvec5GZqIiKgprA5AJSUlmDlzJiIjI+Hr6wtvb2+zBzUPuUyKh3oFAwBm/CsdVTXcGoOIiKixrL4ENnnyZFy8eBHTpk1DQECA6Y4wan7zR3XFhvRcVNUY8clP5/G3+yPELomIiKhVsjoApaWlIS0tDb17926OeugugtTOuDfCHz+eKcTOk1cZgIiIiBrJ6ktgERERuHHjRnPUQhb4KKEPZFIJLhSWI+dapdjlEBERtUpWB6CVK1di4cKF2LNnD0pKSqDT6cwe1LzUzk6Iae8FAHhxw1HUGIwiV0RERNT6WB2APD09odVqMWLECPj7+8PLywteXl7w9PSEl5dXc9RIt3l+eCfIpRIczrqGHSc0YpdDRETU6lg9B2jSpElQKBT4+uuvOQlaJMO7+eOJ/qH418FsZGSX4qHewWKXRERE1KpYHYBOnDiBjIwMdOvWrTnqIQv1DvHEv5CNb/6Xgzn3dYHa2UnskoiIiFoNqy+BxcbGIicnpzlqISsM6+YPd5Uc5fpafPjDWbHLISIialWsDkCzZ8/GnDlzkJycjPT0dBw7dszsQfbh567EW+OjAABfHbiCMxpOQCciIrKURBAEwZo3SKX1M5NEIoEgCJBIJDAYWv8KxTqdDmq1GlqtFh4eHmKXc0eCIGDcil9wLFeLiEB37Jg7ROySiIiIRGPN77fVc4CysrIaXRjZlkQiwfg+7XAsV4szmjKcytchMrjlBjYiIqKWwuoAFBYW1hx1UCNNGtgeS3edQ1lVLVbvvYhlCX14Zx4REdEfaNRu8P/85z8RHx+P4OBgXLlyBQCwbNkybNmyxabF0R9TymVY+1QsAGBLZj62Hs0XuSIiIqKWz+oAtGrVKsyfPx8PPPAASktLTXN+PD09sWzZMlvXRxYY2NEH4/rUrQX0n4w8kashIiJq+awOQJ988gnWrl2LhQsXQiaTmdpjY2Nx/PhxmxZHlnuyf3sAwM9ni5CRfV3kaoiIiFo2qwNQVlYWoqOj67UrlUpUVFTYpCiyXv9wb8SE1W1FsvPkVZGrISIiatmsDkDh4eHIzMys1759+3ZERkZaXcDKlSsRHh4OlUqFmJgY7Nu37459CwoK8OSTT6Jbt26QSqWYO3duvT7JycmQSCT1HlVVVVbX1ppIJBJMvDkKdCirRORqiIiIWjar7wL729/+hpkzZ6KqqgqCIODw4cNISUlBUlISPvvsM6uOtX79esydOxcrV65EfHw8Vq9ejTFjxuDUqVNo3759vf56vR5+fn5YuHAhli5desfjenh44OxZ89WRVSqVVbW1RgPCvQEAGdml2HGiAPf3COQdYURERA2weiFEAFi7di3eeust05YY7dq1w+LFizFt2jSrjjNgwAD07dsXq1atMrV1794d48ePR1JS0l3fO2zYMPTp06fexOvk5GTMnTsXpaWlFteh1+uh1+tNz3U6HUJDQ1v8QogN6bFoByqq6yam9w71RPLT/eDlqhC5KiIiouZnzUKIjboN/tlnn8WVK1dQWFgIjUaDnJwcTJs2DXl5lt+BVF1djfT0dIwaNcqsfdSoUdi/f39jyjIpLy9HWFgYQkJC8OCDDyIjI+Ou/ZOSkqBWq02P0NDQJn2+mJKn9jf9fTSnFF/+woUriYiIbteoAHSLr68v/P39odFoMHv2bHTu3Nni9xYXF8NgMCAgIMCsPSAgABqNptE1RUREIDk5GVu3bkVKSgpUKhXi4+Nx/vz5O75nwYIF0Gq1pkdr3uy1XwdvfPF0rGl3+AOXOB+IiIjodhYHoNLSUkyaNAl+fn4IDg7G8uXLYTQasWjRInTs2BEHDx7EF198YXUBt89RubWnWGMNHDgQkydPRu/evTF48GB888036Nq1Kz755JM7vkepVMLDw8Ps0ZqNiAjAf2bGAwCO5mhRYzCKXBEREVHLYvEk6FdeeQV79+7FlClTsGPHDsybNw87duxAVVUVtm/fjqFDh1r1wb6+vpDJZPVGewoLC+uNCjWFVCpFv3797joC1BaFebvAXSlHmb4Why5dwz1dfMUuiYiIqMWweATo+++/x5dffokPPvgAW7duhSAI6Nq1K3766Serww8AKBQKxMTEIDU11aw9NTUVgwYNsvp4dyIIAjIzMxEUFGSzY7YGUqnEtDHq5M8PYUsmV4gmIiK6xeIRoPz8fNM6Px07doRKpcIzzzzTpA+fP38+EhMTERsbi7i4OKxZswbZ2dmYMWMGgLq5OXl5efjqq69M77m1BlF5eTmKioqQmZkJhUJhqu2NN97AwIED0aVLF+h0OixfvhyZmZlYsWJFk2ptjWYM64RDWdcAAHPWZeLe7gFwU1q98gEREVGbY/GvodFohJOTk+m5TCaDq6trkz48ISEBJSUlWLJkCQoKChAVFYVt27aZdpwvKChAdna22Xt+vwp1eno6vv76a4SFheHy5csA6uYqTZ8+HRqNBmq1GtHR0di7dy/69+8PRzO8mz/efawnXtpYt0VJyqFsPDuko8hVERERic/idYCkUinGjBkDpVIJAPjuu+8wYsSIeiFo06ZNtq/SzqxZR6A1+ObXHPx94zEEeCix52/DoXKS/fGbiIiIWhlrfr8tHgGaMmWK2fPJkyc3rjqyu/HR7bBs1znka6vw//ZfxnNDO4ldEhERkagatRJ0W9fWRoAA4Jv/5eDv3x5DsFqFX14ewS0yiIiozWn2laCp9Xm4dzAUcinytVU4lqsVuxwiIiJRMQA5CJWTDD1u3hY/bsUvqKoxiFwRERGReBiAHMiDvYJNf6//tfVu90FERNRUDEAOZNo94Xh1bHcADEBEROTYGIAczMN96kaBTmt0KK2sFrkaIiIicTAAORh/dxW6+LtBEIAl350CbwIkIiJHxADkgBY9FAmZVIJNGXk4ma8TuxwiIiK7YwByQIO7+GFoVz8AwIOfpPGOMCIicjgMQA7q3u7+pr/X7L0kYiVERET2xwDkoJ7s3x6jIgMAAB+lnsOec0UiV0RERGQ/DEAOSiKR4LUHI03PV+2+wAnRRETkMBiAHFiotwveeLgHAODgpWvotfgH/PdYvshVERERNT8GIAc3ZVAHTB/SEQBQpq/FrK8zcLm4QuSqiIiImhcDEOH5oZ3Q0dfV9Px/V66LWA0REVHzYwAieLkq8NNfh+HP8R0AAP88cJnzgYiIqE1jACKTqfHhkEslOJqrxcUiXgYjIqK2iwGITEK9XRDbwQsA8OKGo9wrjIiI2iwGIDLz5IAwAMDRnFJMTf5V5GqIiIiaBwMQmXm4dzDee6wXAOBIdim+/CVL5IqIiIhsjwGI6pnQLxSxYXWXwt747hTOaspEroiIiMi2GICoQf94tKfp78wc3hZPRERtCwMQNahrgLtpgcRjuVqRqyEiIrItBiC6o/4dvAEA/z6UjSPZHAUiIqK2gwGI7mhwV194qOQAgL9+cxRlVTUiV0RERGQbDEB0R0q5DN8+PwgKmRSXiivwzvYzYpdERERkEwxAdFddA9yR/Od+AOouhY37vzRcKeEq0URE1LoxANEfGtTZF38d1RUAcDRXy5EgIiJq9RiAyCKzRnTBJxOjAQAZ2aXiFkNERNREDEBkseER/pBIAI2uCj1f34kFm44jq7gCN6oNYpdGRERkFQYgspibUo5RkQEAgDJ9LVIOZ2P4B7sx/5tMcQsjIiKyEgMQWeX/nuyLRQ9GmrVtP6HBnHUZOJHHBROJiKh1YAAiqzjJpJh6TzguvD0GQ7v6mdq3ZObjwU/SUFhWJWJ1RERElmEAokaRy6T48ul+yFw0Er1C1Kb2fx/MRlGZHhcKy0WsjoiI6O4kgiAIYhfR0uh0OqjVami1Wnh4eIhdTotXoa/FX/59BHvOFZm175o/FJ393USqioiIHI01v98cAaImc1XKsfap2Hrt9320B1/+kiVCRURERHfHAEQ2oZBL8UDPwHrtb3x3Cifztag1GEWoioiIqGEMQGQz7/2pNx6NbgcACPNxMbWPXZ6Gzgu349M9F8ErrkRE1BJwDlADOAeoaW5UG6BykmL13kv1ts3Y9JdB6NveS6TKiIioLeMcIBKVs0IGiUSC54Z0xN6/DTd7beHmE/hs3yVU1/KSGBERiUcudgHUdkkkErT3ccGKJ/ti5tdHAACnC3R463sdisr08HVT4p4uvugexFE2IiKyL14CawAvgdne5eIKDPtgd4Ov9evghXXT4yCTSuxbFBERtSm8BEYtTpiPC54b0hFjogKhdnYye+3Xy9fx2pYTMBqZxYmIyD5ED0ArV65EeHg4VCoVYmJisG/fvjv2LSgowJNPPolu3bpBKpVi7ty5DfbbuHEjIiMjoVQqERkZic2bNzdT9WQpiUSCBQ90x6rJMTi88F4ceW0kLrw9Bp38XAEAXx/Kxqo9F0WukoiIHIWoAWj9+vWYO3cuFi5ciIyMDAwePBhjxoxBdnZ2g/31ej38/PywcOFC9O7du8E+Bw4cQEJCAhITE3H06FEkJiZiwoQJOHToUHN+FbKCUi6Dt6sCcpkUHz8RbWp/f+dZnMrXiVgZERE5ClHnAA0YMAB9+/bFqlWrTG3du3fH+PHjkZSUdNf3Dhs2DH369MGyZcvM2hMSEqDT6bB9+3ZT2+jRo+Hl5YWUlBSL6uIcIPv65UIxFm4+jssllQCApQm9Mb5PO0gknBNERESWaxVzgKqrq5Geno5Ro0aZtY8aNQr79+9v9HEPHDhQ75j333//XY+p1+uh0+nMHmQ/8Z19sfapWNzKO/PWH0X4gm3YcaJA3MKIiKjNEi0AFRcXw2AwICAgwKw9ICAAGo2m0cfVaDRWHzMpKQlqtdr0CA0NbfTnU+N0CXDHwge6m7XN+NcRvLzxGPadL8Kvl6+JVBkREbVFok+Cvv0yhyAITb70Ye0xFyxYAK1Wa3rk5OQ06fOpcZ4Z3BHn3hqDVx6IMLWt+zUHiZ8fxuOfHkCHl7/nqBAREdmEaAHI19cXMpms3shMYWFhvREcawQGBlp9TKVSCQ8PD7MHiUMhl2L6kE4Y1s2vwdfnrMtEXukNO1dFRERtjWgBSKFQICYmBqmpqWbtqampGDRoUKOPGxcXV++YP/zwQ5OOSfb33p964cs/98PCB7qjZzu1qV1fa0T8Oz9hwD92YczH+3AiTytilURE1FqJuhXG/PnzkZiYiNjYWMTFxWHNmjXIzs7GjBkzANRdmsrLy8NXX31lek9mZiYAoLy8HEVFRcjMzIRCoUBkZCQAYM6cORgyZAjeffddjBs3Dlu2bMGuXbuQlpZm9+9HjefvroJ/NxWGd/PHs0M6AgD2nCvClC8OAwCu6vS4qtPjwU/SsO2FwYgM5qgdERFZTtQAlJCQgJKSEixZsgQFBQWIiorCtm3bEBYWBqBu4cPb1wSKjv5t3Zj09HR8/fXXCAsLw+XLlwEAgwYNwrp16/Dqq6/itddeQ6dOnbB+/XoMGDDAbt+LmsfQrn5YMq4Hfjh5FR7Ocmw7Xnepc+dJDQMQERFZhXuBNYDrALUO/2//Zby+9SQAwN9didcf6oGxvYJEroqIiMTSKtYBImqqERH+cHaSAQAKy/SY+fURzP8mE7UGo8iVERFRS8cARK1WqLcLlj3Rx6xt05E8DPtgN07mc3I0ERHdGQMQtWr39wjEf2ffgycHtDe15V6/gbHL07AxPRcZ2deR/EsWR4WIiMgM5wA1gHOAWiejUUD8uz+hQFvV4OsdfV0xsJMP3hwXBZmU+4wREbU11vx+MwA1gAGo9bpSUoHvjubjgx/O3bXfsG5+qDUIUDlJ8cK9XdArxNM+BRIRUbOx5vdb1NvgiWwtzMcVs0Z0QZm+Fqv3XDJ7zV0pR5m+FgCw+2yRqX3X6UIcXTQKahcnu9ZKRETi4QhQAzgC1DZU1Rhw5Mp1KORStPdxgZ+bEn/79hi+Tc+t19fLxQlLE/rgns6+AACZVNLkPemIiMi+eAmsiRiA2rZfL1+Dt6sCwWpnvPqfE9h45LdANLZXEM5qyiCTSPBYTDv0CfVC/3BvEaslIiJLMQA1EQOQY3l/5xms+PniHV//++hueH5oJ44IERG1cFwIkcgKj/UNgUJ+5/8U3ttxFkPf340TeVoUaLkTPRFRW8ARoAZwBMjx5FyrhLtKjotF5dh58irW7ruEIA8V8hu4pf7tR6IwaUBYg8cxGAUUlekRqFY1d8lERHQbXgJrIgYgqqyuhbOTDIIArPs1B69sPm56zUUhw5rEWCz/6Tx0N2qw9qlY+HsoUV1rxAspGdh9rgjjegejnZczerZTY1g3f6hubtlBRETNhwGoiRiA6HabjuTi/+2/jKO5jdtiY9bwzvjr/d1Mz2/9Z8d5RUREtsM5QEQ29mjfEGyZdQ92zR+K4EZc3vq/ny/gSPZ1AEB+6Q3EvLULs1MybF0mERFZiCNADeAIEN3NhcJy/N9P5xGodsax3FLsv1iCUG9nvD2+J4LUKijkUuw8qcHmjHzobtQgr/S3idNOMglqDL/9J7fx+TjEhPE2eyIiW+AlsCZiACJbOpx1DRNWH7jj61383dDB1xWvjY1Eex8XAID2Rg0UMimcFZw7RERkKQagJmIAIls7kn0dyb9cxtaj+XBRyLD4oR54fetJ3KgxmPVTyqXQ1/62c/34PsH4cEIfbt5KRGQBBqAmYgCi5mI0CjAKAuQyKX4+W4itmfn477F8s8tiDXl5TASeHdyRQYiI6C4YgJqIAYjs7eClEuw5V4RVu+tWpO4a4AZPFwUOZ10z9RkVGYBVk2Ow73wR9p4rxqN92yEyyAMC6u4qk8t4TwMROTYGoCZiACKxbMnMw8WiCsy9twskEmDbcQ2+O5qPHSc1Fr3/1bHdAQAHL11DlwA3TBrQHjnXbqBfBy8GJCJq8xiAmogBiFqaOesysCUzv9Hv93VTQioBott7YmhXf7ir5BgZGYAzmjJotFUYHRVow2qJiMRhze+33E41EVETLBzbHR4qJ3g4y9ErxBPZJZX46uBl5FyzbG+y4nI9AGDnyavYefJqg308XZzw2thIPBYTYrO6iYhaKo4ANYAjQNQa5JXeQMqhbMR18kEXfzeU6WtRWlljmjf08Y/nMHtEFzweE4KVuy8ief9li447594ueHpQB3i5Kkxt1yuq4enixJWriahF4yWwJmIAoraqutaIGoMRu05fxcFL13AiT4uIQHf4uSux8uYEbADwcVUgMtgDmdmlKNPXAqi7JX/J+CgoZFLIpBIcuFiC2A5ecFFwIJmIWgYGoCZiACJHlHu9Eq9vOYkfzxRa/J4n+oXincd6NWNVRESW4xwgIrJaiJcLPn+6H5J/ycLi704BANp5OsNdJUdZVa3Zlh63rPs1B1KpBEeuXMcZTRnu6eyLGUM7oUewh9klNCKiloYjQA3gCBA5usvFFWjn5Qyn3906v/14AS6XVCLneiW+PpT9h8d4sFcQPn4imos3EpHd8BJYEzEAEd3dqXwdqmoNeHTl/rv2iwnzwtCufrhcXIEwH1c80DMQXQLc7VQlETkaBqAmYgAissz6X7PhrnLCmJvrCH2eloW3vj991/dMH9IR3QLcMbCTDw5cLMGQLr5wUcrhpuQVeSJqGgagJmIAImocQRCgrzVC5STDoi0nsOvUVXQP8rBoYvWs4Z3x7JCOKKuqwZHsUnT0dUVUO7UdqiaitoIBqIkYgIhsS6OtwvKfzls0d+j35t7XBS+M6ILicj0qqw0I9nSGk0yCE3k6dPRzhStHjYjodxiAmogBiKh5FOqq4OWqwH8y8vB/P1+Am1KOk/m6Rh/vTzEhePuRKCjlMmhv1EBfY4C/h8qGFRNRa8IA1EQMQET2c62iGtW1RtyoMaC0shqrdl/EYzEhyL1+A2/+95TVx3t1bHec0ZQhI/s6wn3d8OzgcAR7OkMqlaCdp3MzfAMiaikYgJqIAYioZTh/tQybM/Jw7moZ5FIpJBJgz7kiuKvkuKrTW328ZweHY+HYSLO2WoMR1QYjV7QmagMYgJqIAYio5dPXGpBz7QZ+PlOIH05pcL6wHKWVNX/4vt4hatQaBSx8oDs6+LpiyheHUVimx5d/7gcvFwXyS28gvrOvHb4BEdkaA1ATMQARtT61BiMmfXYIh7KuwcvFCR883hud/Nzw8qZjyC6phEIuxeWSSouO1c7TGXmlN9AtwB0juvtj9ojOHCEiagUYgJqIAYiodRIEAfvOF8PPXYnuQeb/7RaV6fHujjNIPXUV2ht/PFL0e8O7+WHWiC7oHaLGlWuV+PlMITJySvHMPeGIbu9ly69ARE3AANREDEBEbVt1rREn87WQSCQI9FChxmDE7nNF8HNTYNbXGag1Wv4/i0mP9oSHygk/nrmKxQ/3gIfKqRkrJ6K7YQBqIgYgIsd1raIapZXVqKw2oHuQB9b9mo2Fm09Y9F61sxMejwmBi0KGM5oyjI9uhyFd/QCAK10T2QEDUBMxABHR79UajLh/2V5kFVcgoV8ocq/fwMjIABRoq7Bq90WLjuHnroRMIsGEfqGYd18XSCTcJJbI1qz5/eb/S0JE9AfkMil2zh2Ccn0tPF0UpnZBEODrpoSTTAIfVyV+PluIozml6OTnhuN5WuSV3jD1LSqru21/+Y/noa2sxqsPRsJJJsUHO8/i++MFSHq0JwZ29LH7dyNyVBwBagBHgIioqbSVNZizPgO7zxZZ9b6e7dQYHuGPE3lanC7QoWc7NZY90QcSSKCQSyGVABKJBIIgQBCAaoMR566WoWc7NUeVyOHxElgTMQARka2U62vx/bF8xIR5I8TLGfd+uMdsZKgxott7oqhMj9zrvx3nqbgwLBkXBaNRwLE8Lf7f/ssoq6rF0K6+GNbNH3//9hhOFejwRL9QBHs6Y83eS4jt4IW/juqGXy4UY/sJDfacK4JMKkG/Dl4o0FZhdFQgBnXyhUImRfqVazAYAS9XJ9QYBDzUKwgZOaU4XaCDk0yKI1euY3AXX+w9X4z7ugegsroWIV7O+DY9D6Hezjh06RpOFejQztMZwyP88HhMKGoMRoT5uEJ7owaFZVVIOZwDD5Uc93UPgNJJCpWTDL1DPJFfegMlFdXoFuCOk/larNx9EV0C3NDF3x0P9Q6CUi5r6j8maiMYgJqIAYiImosgCPgo9RyuVVTDXeUEjfYGHosJwZLvTuF8YTmcZBJ0DXDHtYpqFGirxC63RZFJJTDcdoees5MMod7OmDwwDHvPFWH/xRK8OjYSH/5wFiUV1Xj3sZ7o5OeGMn0thnfzh77WgJLyapzR6HC6oAyRwR4IVjvjjEYHf3cVCsuq0CvEExIAHXxdkXu9Est/PI+rOj3+MqwT8rU3MKSLH3zclDirKcOULw7j+WGdkNAvFO9sPwN/DyUmDQjDP74/jQJdFRaMicCJPC3iO/uixmCEBBKoXZyw86QGrgo5/D2UcHaS4XJJBcb2DDKN4tUajDijKYOTTAovFyeoFLIG7zAUBAHnC8tNrwWq/3gvvALtDTg7yVBrrLuE25YwADURAxARiUEQBLPLWFU1Bvx1w1EYBQGvPNAd566WYd3hHCjkUkS392rUXmkN8XVTQimXWj0y5ewkQ2f/uvlOtuaqkKGi2mDTY47rE4yT+TpcKCxv8rF83RQoLq+2QVX1uShkqGzguz/atx3OXS0DAJzI++NNhIPVKuTfDNFBahWm3ROO/x4rQGZOqanPxP7tsf9iMXqHeOLKtUr0DlHjWkU1fjpTiBs1BgwI90Znfzd08HFFhd6AP8WGoLhMj7NXy/DZvkuo0BsQqFbBRSHD6YIydPBxQZcAN/i5KaF0kiGruALfpufi8ZgQLHooEq4KOS4Vl+OHU1fRI1iNoTfvkrSVVhWAVq5ciffffx8FBQXo0aMHli1bhsGDB9+x/549ezB//nycPHkSwcHB+Pvf/44ZM2aYXk9OTsaf//zneu+7ceMGVCrLdolmACKi1iC7pBJnNDp08nfD98cK4OniBKVcik5+bgCA6PZe2HFCg7QLxZh2TweE+bhCJpHAcHP+UGV1LU4V6DAg3Acyad28In2tEQAglUgglQAn83VIPXUVM4Z1glIuhb7WCFeFDKWVNXBWyKBykqGqpu7HWhAAhVyKiupaODvJUKk3QOkkRa1RgFIuhZNMCgBYufsC3ttxFhP7t8dzQzqipEKPAA8VUg5no297L0QEeSDIQ4ULReWorDagvKoWu05fRY9gD7ir5JBKJDirKcNDvYORtP00dp68avW5U8ql6Brg3izhjSzT0dcVO+YOgUIutdkxW00AWr9+PRITE7Fy5UrEx8dj9erV+Oyzz3Dq1Cm0b9++Xv+srCxERUXh2WefxXPPPYdffvkFf/nLX5CSkoLHHnsMQF0AmjNnDs6ePWv23sDAQIvrYgAiImo+tQYj9pwrQnxnX6icmj5/53pFNf518ArcVHJ0DXBHrVFAr3ZqKORSLNt1Dv7uKmTmlmL78QJ08HXF+3/qja4BbnB2kuHjH8+jQFuFYE9npJ66ij8P6oAgTxX83VXYeCQXg7v4wkPlhE/3XMTpAh2mDOqAcF9XnC4ow8l8LbxcFOgf7g3pzZG7jn6uSPz8ECICPbD44R64UFiOYE8VNqbnokc7NYLVztDoquDnrsTnaVk4U6DDPZ19cW/3AKz/Xw72nqubNN87RI03xkVBAuD/7b8Mb1cFzmjKkHahGI9Et0OwpwrHcrWQSiR4sFcQDlwswf6LJdDoquCmlCPU2wUP9grCpiO5uFhUAQDwdlWgW4A7QrycsSE91+wc9uvghRqDYBodujUC5+umhJtSBh83JdKvXG/yPysAkEiAcF9X/GVYZzwa3Q5Sqe0m77eaADRgwAD07dsXq1atMrV1794d48ePR1JSUr3+L730ErZu3YrTp0+b2mbMmIGjR4/iwIEDAOoC0Ny5c1FaWtrouhiAiIjanrKqGqicZKaRqJZGEAQUlenh72HZ1YqmfM7Ok1fRNcAN63/NQZiPK54cUDfoUFVjgEZbhTAfl3p3FeZcq8TR3FJEBnmgo58b8kpv4MfTVzG8mz+8XBWQoG4h0SlfHMaYnoEI9nRGuI8rYjt4Q3ujBjtPanA0pxTPDe2Izv7uzfLdWkUAqq6uhouLCzZs2IBHHnnE1D5nzhxkZmZiz5499d4zZMgQREdH4+OPPza1bd68GRMmTEBlZSWcnJyQnJyMZ555Bu3atYPBYECfPn3w5ptvIjo6+o616PV66PV603OdTofQ0FAGICIiolbEmgAkWgwuLi6GwWBAQECAWXtAQAA0Gk2D79FoNA32r62tRXFxMQAgIiICycnJ2Lp1K1JSUqBSqRAfH4/z58/fsZakpCSo1WrTIzQ0tInfjoiIiFoy0ccBbx9iu/0uCEv6/7594MCBmDx5Mnr37o3Bgwfjm2++QdeuXfHJJ5/c8ZgLFiyAVqs1PXJychr7dYiIiKgVEG0rDF9fX8hksnqjPYWFhfVGeW4JDAxssL9cLoePT8NLyEulUvTr1++uI0BKpRJKZdtaC4GIiIjuTLQRIIVCgZiYGKSmppq1p6amYtCgQQ2+Jy4url7/H374AbGxsXByqr9AFFA3QpSZmYmgoCDbFE5EREStnqiXwObPn4/PPvsMX3zxBU6fPo158+YhOzvbtK7PggUL8NRTT5n6z5gxA1euXMH8+fNx+vRpfPHFF/j888/x17/+1dTnjTfewM6dO3Hp0iVkZmZi2rRpyMzMNFsriIiIiBybqLvBJyQkoKSkBEuWLEFBQQGioqKwbds2hIWFAQAKCgqQnZ1t6h8eHo5t27Zh3rx5WLFiBYKDg7F8+XLTGkAAUFpaiunTp0Oj0UCtViM6Ohp79+5F//797f79iIiIqGUSfSXolojrABEREbU+reI2eCIiIiKxMAARERGRw2EAIiIiIofDAEREREQOhwGIiIiIHA4DEBERETkcBiAiIiJyOKIuhNhS3VoaSafTiVwJERERWerW77YlSxwyADWgrKwMABAaGipyJURERGStsrIyqNXqu/bhStANMBqNyM/Ph7u7OyQSiU2PrdPpEBoaipycHK4y3Yx4nu2D59l+eK7tg+fZPprrPAuCgLKyMgQHB0MqvfssH44ANUAqlSIkJKRZP8PDw4P/cdkBz7N98DzbD8+1ffA820dznOc/Gvm5hZOgiYiIyOEwABEREZHDYQCyM6VSiddffx1KpVLsUto0nmf74Hm2H55r++B5to+WcJ45CZqIiIgcDkeAiIiIyOEwABEREZHDYQAiIiIih8MARERERA6HAciOVq5cifDwcKhUKsTExGDfvn1il9SqJCUloV+/fnB3d4e/vz/Gjx+Ps2fPmvURBAGLFy9GcHAwnJ2dMWzYMJw8edKsj16vx+zZs+Hr6wtXV1c8/PDDyM3NtedXaVWSkpIgkUgwd+5cUxvPs23k5eVh8uTJ8PHxgYuLC/r06YP09HTT6zzPtlFbW4tXX30V4eHhcHZ2RseOHbFkyRIYjUZTH55r6+3duxcPPfQQgoODIZFI8J///MfsdVud0+vXryMxMRFqtRpqtRqJiYkoLS1t+hcQyC7WrVsnODk5CWvXrhVOnTolzJkzR3B1dRWuXLkidmmtxv333y98+eWXwokTJ4TMzExh7NixQvv27YXy8nJTn3feeUdwd3cXNm7cKBw/flxISEgQgoKCBJ1OZ+ozY8YMoV27dkJqaqpw5MgRYfjw4ULv3r2F2tpaMb5Wi3b48GGhQ4cOQq9evYQ5c+aY2nmem+7atWtCWFiY8PTTTwuHDh0SsrKyhF27dgkXLlww9eF5to233npL8PHxEf773/8KWVlZwoYNGwQ3Nzdh2bJlpj4819bbtm2bsHDhQmHjxo0CAGHz5s1mr9vqnI4ePVqIiooS9u/fL+zfv1+IiooSHnzwwSbXzwBkJ/379xdmzJhh1hYRESG8/PLLIlXU+hUWFgoAhD179giCIAhGo1EIDAwU3nnnHVOfqqoqQa1WC59++qkgCIJQWloqODk5CevWrTP1ycvLE6RSqbBjxw77foEWrqysTOjSpYuQmpoqDB061BSAeJ5t46WXXhLuueeeO77O82w7Y8eOFaZOnWrW9uijjwqTJ08WBIHn2hZuD0C2OqenTp0SAAgHDx409Tlw4IAAQDhz5kyTauYlMDuorq5Geno6Ro0aZdY+atQo7N+/X6SqWj+tVgsA8Pb2BgBkZWVBo9GYnWelUomhQ4eaznN6ejpqamrM+gQHByMqKor/LG4zc+ZMjB07Fvfdd59ZO8+zbWzduhWxsbF4/PHH4e/vj+joaKxdu9b0Os+z7dxzzz348ccfce7cOQDA0aNHkZaWhgceeAAAz3VzsNU5PXDgANRqNQYMGGDqM3DgQKjV6iafd26GagfFxcUwGAwICAgwaw8ICIBGoxGpqtZNEATMnz8f99xzD6KiogDAdC4bOs9Xrlwx9VEoFPDy8qrXh/8sfrNu3TocOXIEv/76a73XeJ5t49KlS1i1ahXmz5+PV155BYcPH8YLL7wApVKJp556iufZhl566SVotVpERERAJpPBYDDg7bffxsSJEwHw3+nmYKtzqtFo4O/vX+/4/v7+TT7vDEB2JJFIzJ4LglCvjSwza9YsHDt2DGlpafVea8x55j+L3+Tk5GDOnDn44YcfoFKp7tiP57lpjEYjYmNj8Y9//AMAEB0djZMnT2LVqlV46qmnTP14nptu/fr1+Ne//oWvv/4aPXr0QGZmJubOnYvg4GBMmTLF1I/n2vZscU4b6m+L885LYHbg6+sLmUxWL60WFhbWS8f0x2bPno2tW7fi559/RkhIiKk9MDAQAO56ngMDA1FdXY3r16/fsY+jS09PR2FhIWJiYiCXyyGXy7Fnzx4sX74ccrncdJ54npsmKCgIkZGRZm3du3dHdnY2AP77bEt/+9vf8PLLL+OJJ55Az549kZiYiHnz5iEpKQkAz3VzsNU5DQwMxNWrV+sdv6ioqMnnnQHIDhQKBWJiYpCammrWnpqaikGDBolUVesjCAJmzZqFTZs24aeffkJ4eLjZ6+Hh4QgMDDQ7z9XV1dizZ4/pPMfExMDJycmsT0FBAU6cOMF/Fjfde++9OH78ODIzM02P2NhYTJo0CZmZmejYsSPPsw3Ex8fXW8bh3LlzCAsLA8B/n22psrISUqn5z51MJjPdBs9zbXu2OqdxcXHQarU4fPiwqc+hQ4eg1Wqbft6bNIWaLHbrNvjPP/9cOHXqlDB37lzB1dVVuHz5stiltRrPP/+8oFarhd27dwsFBQWmR2VlpanPO++8I6jVamHTpk3C8ePHhYkTJzZ422VISIiwa9cu4ciRI8KIESMc+lZWS/z+LjBB4Hm2hcOHDwtyuVx4++23hfPnzwv//ve/BRcXF+Ff//qXqQ/Ps21MmTJFaNeunek2+E2bNgm+vr7C3//+d1MfnmvrlZWVCRkZGUJGRoYAQPjoo4+EjIwM0/Iutjqno0ePFnr16iUcOHBAOHDggNCzZ0/eBt/arFixQggLCxMUCoXQt29f0+3bZBkADT6+/PJLUx+j0Si8/vrrQmBgoKBUKoUhQ4YIx48fNzvOjRs3hFmzZgne3t6Cs7Oz8OCDDwrZ2dl2/jaty+0BiOfZNr777jshKipKUCqVQkREhLBmzRqz13mebUOn0wlz5swR2rdvL6hUKqFjx47CwoULBb1eb+rDc229n3/+ucH/TZ4yZYogCLY7pyUlJcKkSZMEd3d3wd3dXZg0aZJw/fr1JtcvEQRBaNoYEhEREVHrwjlARERE5HAYgIiIiMjhMAARERGRw2EAIiIiIofDAEREREQOhwGIiIiIHA4DEBERETkcBiAiIiJyOAxAREQWkEgk+M9//iN2GURkIwxARNTiPf3005BIJPUeo0ePFrs0Imql5GIXQERkidGjR+PLL780a1MqlSJVQ0StHUeAiKhVUCqVCAwMNHt4eXkBqLs8tWrVKowZMwbOzs4IDw/Hhg0bzN5//PhxjBgxAs7OzvDx8cH06dNRXl5u1ueLL75Ajx49oFQqERQUhFmzZpm9XlxcjEceeQQuLi7o0qULtm7d2rxfmoiaDQMQEbUJr732Gh577DEcPXoUkydPxsSJE3H69GkAQGVlJUaPHg0vLy/8+uuv2LBhA3bt2mUWcFatWoWZM2di+vTpOH78OLZu3YrOnTubfcYbb7yBCRMm4NixY3jggQcwadIkXLt2za7fk4hspMn7yRMRNbMpU6YIMplMcHV1NXssWbJEEARBACDMmDHD7D0DBgwQnn/+eUEQBGHNmjWCl5eXUF5ebnr9+++/F6RSqaDRaARBEITg4GBh4cKFd6wBgPDqq6+anpeXlwsSiUTYvn27zb4nEdkP5wARUaswfPhwrFq1yqzN29vb9HdcXJzZa3FxccjMzAQAnD59Gr1794arq6vp9fj4eBiNRpw9exYSiQT5+fm4995771pDr169TH+7urrC3d0dhYWFjf1KRCQiBiAiahVcXV3rXZL6IxKJBAAgCILp74b6ODs7W3Q8Jyeneu81Go1W1URELQPnABFRm3Dw4MF6zyMiIgAAkZGRyMzMREVFhen1X375BVKpFF27doW7uzs6dOiAH3/80a41E5F4OAJERK2CXq+HRqMxa5PL5fD19QUAbNiwAbGxsbjnnnvw73//G4cPH8bnn38OAJg0aRJef/11TJkyBYsXL0ZRURFmz56NxMREBAQEAAAWL16MGTNmwN/fH2PGjEFZWRl++eUXzJ49275flIjsggGIiFqFHTt2ICgoyKytW7duOHPmDIC6O7TWrVuHv/zlLwgMDMS///1vREZGAgBcXFywc+dOzJkzB/369YOLiwsee+wxfPTRR6ZjTZkyBVVVVVi6dCn++te/wtfXF3/605/s9wWJyK4kgiAIYhdBRNQUEokEmzdvxvjx48UuhYhaCc4BIiIiIofDAEREREQOh3OAiKjV45V8IrIWR4CIiIjI4TAAERERkcNhACIiIiKHwwBEREREDocBiIiIiBwOAxARERE5HAYgIiIicjgMQERERORw/j8NR9vFBg+IhwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAGwCAYAAAC0HlECAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABTM0lEQVR4nO3de1yUZd4/8M/MwAwwwHAchlFENEUJD4WFqKVpoQaa1bMdUFZ3ezQzNTfdStv9ZW2JnZ/ddTM7rO6aRltqWSqpa2okeMBI8GyeUDkJw3AeYOb6/UHcOeJh0IGbgc/79ZrXy7nv78x856bnmc9e93Vft0IIIUBERERE16SUuwEiIiIiV8DQREREROQAhiYiIiIiBzA0ERERETmAoYmIiIjIAQxNRERERA5gaCIiIiJygJvcDXQkNpsNFy5cgI+PDxQKhdztEBERkQOEEKioqIDRaIRSefXxJIYmJ7pw4QLCwsLkboOIiIhuQF5eHrp27XrV/QxNTuTj4wOg8aD7+vrK3A0RERE5ory8HGFhYdLv+NUwNDlR0yk5X19fhiYiIiIXc72pNZwITkREROQAhiYiIiIiBzA0ERERETmAoYmIiIjIAQxNRERERA5gaCIiIiJyAEMTERERkQMYmoiIiIgcwNBERERE5ACGJiIiIiIHMDQREREROYChiYiIiMgBsoampUuXon///tINbuPi4rBp0yZp/5QpU6BQKOwegwcPtnsPi8WCWbNmISgoCFqtFuPHj8e5c+fsakwmE5KTk6HT6aDT6ZCcnIyysjK7mrNnz2LcuHHQarUICgrC7NmzUVdX12rfvSUKy2uRV1qN2nqr3K0QERF1WrKGpq5du2Lx4sXYt28f9u3bh5EjR+KBBx7AwYMHpZoxY8YgPz9femzcuNHuPebMmYN169YhNTUV6enpqKysRGJiIqzWXwNGUlISsrOzkZaWhrS0NGRnZyM5OVnab7VakZCQgKqqKqSnpyM1NRVr1qzB3LlzW/8gOOCxDzJx1xvfIee8We5WiIiIOi/Rzvj7+4uPPvpICCHE5MmTxQMPPHDV2rKyMuHu7i5SU1OlbefPnxdKpVKkpaUJIYQ4dOiQACAyMzOlmoyMDAFAHDlyRAghxMaNG4VSqRTnz5+Xaj799FOh0WiE2Wx2uHez2SwAtOg1jhj51nci/PlvRMbPF536vkREROT473e7mdNktVqRmpqKqqoqxMXFSdu3b98OvV6P3r17Y+rUqSgqKpL2ZWVlob6+HvHx8dI2o9GI6Oho7Nq1CwCQkZEBnU6H2NhYqWbw4MHQ6XR2NdHR0TAajVLN6NGjYbFYkJWVddWeLRYLysvL7R6tQaVUAABsQrTK+xMREdH1yR6acnJy4O3tDY1Gg+nTp2PdunWIiooCAIwdOxarVq3Ctm3b8Pbbb2Pv3r0YOXIkLBYLAKCgoABqtRr+/v527xkSEoKCggKpRq/XN/tcvV5vVxMSEmK339/fH2q1Wqq5kpSUFGmelE6nQ1hY2I0fiGtQKn4JTbZWeXsiIiJygJvcDURGRiI7OxtlZWVYs2YNJk+ejB07diAqKgqPPvqoVBcdHY1BgwYhPDwcGzZswEMPPXTV9xRCQPFL0ABg9++bqbnc/Pnz8eyzz0rPy8vLWyU4NYUmK0eaiIiIZCP7SJNarcYtt9yCQYMGISUlBQMGDMBf//rXK9aGhoYiPDwcx48fBwAYDAbU1dXBZDLZ1RUVFUkjRwaDAYWFhc3eq7i42K7m8hElk8mE+vr6ZiNQl9JoNNKVf02P1iCdnrMxNBEREclF9tB0OSGEdPrtciUlJcjLy0NoaCgAICYmBu7u7tiyZYtUk5+fj9zcXAwZMgQAEBcXB7PZjD179kg1u3fvhtlstqvJzc1Ffn6+VLN582ZoNBrExMQ4/Tu2lPKX0GRlaCIiIpKNrKfnFixYgLFjxyIsLAwVFRVITU3F9u3bkZaWhsrKSixcuBAPP/wwQkNDcfr0aSxYsABBQUF48MEHAQA6nQ5PPPEE5s6di8DAQAQEBGDevHno168f7r33XgBA3759MWbMGEydOhXLli0DAEybNg2JiYmIjIwEAMTHxyMqKgrJycl48803UVpainnz5mHq1KmtNnrUEqpfzhDy9BwREZF8ZA1NhYWFSE5ORn5+PnQ6Hfr374+0tDTcd999qKmpQU5ODv7973+jrKwMoaGhuOeee/DZZ5/Bx8dHeo93330Xbm5ueOSRR1BTU4NRo0ZhxYoVUKlUUs2qVaswe/Zs6Sq78ePHY8mSJdJ+lUqFDRs2YMaMGRg6dCg8PT2RlJSEt956q+0OxjXw9BwREZH8FEJw+MJZysvLodPpYDabnTpC9eiyDOw+VYolSbchsb/x+i8gIiIihzn6+93u5jRRc9KSA4y3REREsmFocgE8PUdERCQ/hiYXwKvniIiI5MfQ5AJ49RwREZH8GJpcAE/PERERyY+hyQXwNipERETyY2hyARxpIiIikh9DkwvgRHAiIiL5MTS5AK7TREREJD+GJhfQdPWcjXOaiIiIZMPQ5AJ4eo6IiEh+DE0uQMWr54iIiGTH0OQCePUcERGR/BiaXMCvp+dkboSIiKgTY2hyATw9R0REJD+GJhfQdHpOMDQRERHJhqHJBSiabtjLOU1ERESyYWhyATw9R0REJD+GJhfAq+eIiIjkx9DkAnj1HBERkfwYmlyASrr3HEeaiIiI5MLQ5AJ4GxUiIiL5MTS5AE4EJyIikh9Dkwv4ZaCJ6zQRERHJiKHJBfD0HBERkfwYmlyAilfPERERyY6hyQXw6jkiIiL5MTS5AJ6eIyIikh9DkwtQNd17jiNNREREsmFocgG8jQoREZH8GJpcQNPpOc5pIiIikg9DkwtQKnj1HBERkdwYmlwAr54jIiKSH0OTC+DVc0RERPJjaHIBql/+ShxpIiIikg9Dkwv4dU4TQxMREZFcGJpcgIqn54iIiGTH0OQCOBGciIhIfgxNLkAhhSaZGyEiIurEGJpcAE/PERERyY+hyQXw6jkiIiL5MTS5AF49R0REJD+GJhfA03NERETyY2hyAbx6joiISH4MTS6At1EhIiKSH0OTC2g6PcfMREREJB9ZQ9PSpUvRv39/+Pr6wtfXF3Fxcdi0aZO0XwiBhQsXwmg0wtPTEyNGjMDBgwft3sNisWDWrFkICgqCVqvF+PHjce7cObsak8mE5ORk6HQ66HQ6JCcno6yszK7m7NmzGDduHLRaLYKCgjB79mzU1dW12ndviV8yE0/PERERyUjW0NS1a1csXrwY+/btw759+zBy5Eg88MADUjB644038M4772DJkiXYu3cvDAYD7rvvPlRUVEjvMWfOHKxbtw6pqalIT09HZWUlEhMTYbVapZqkpCRkZ2cjLS0NaWlpyM7ORnJysrTfarUiISEBVVVVSE9PR2pqKtasWYO5c+e23cG4Bl49R0RE1A6Idsbf31989NFHwmazCYPBIBYvXiztq62tFTqdTrz//vtCCCHKysqEu7u7SE1NlWrOnz8vlEqlSEtLE0IIcejQIQFAZGZmSjUZGRkCgDhy5IgQQoiNGzcKpVIpzp8/L9V8+umnQqPRCLPZfNVea2trhdlslh55eXkCwDVfcyN+yjOJ8Oe/EXGLtjr1fYmIiEgIs9ns0O93u5nTZLVakZqaiqqqKsTFxeHUqVMoKChAfHy8VKPRaDB8+HDs2rULAJCVlYX6+nq7GqPRiOjoaKkmIyMDOp0OsbGxUs3gwYOh0+nsaqKjo2E0GqWa0aNHw2KxICsr66o9p6SkSKf8dDodwsLCnHMwLiONNPH0HBERkWxkD005OTnw9vaGRqPB9OnTsW7dOkRFRaGgoAAAEBISYlcfEhIi7SsoKIBarYa/v/81a/R6fbPP1ev1djWXf46/vz/UarVUcyXz58+H2WyWHnl5eS389o75dZ2mVnl7IiIicoCb3A1ERkYiOzsbZWVlWLNmDSZPnowdO3ZI+5tuVttECNFs2+Uur7lS/Y3UXE6j0UCj0VyzF2f49eo5jjQRERHJRfaRJrVajVtuuQWDBg1CSkoKBgwYgL/+9a8wGAwA0Gykp6ioSBoVMhgMqKurg8lkumZNYWFhs88tLi62q7n8c0wmE+rr65uNQMmBE8GJiIjkJ3toupwQAhaLBRERETAYDNiyZYu0r66uDjt27MCQIUMAADExMXB3d7eryc/PR25urlQTFxcHs9mMPXv2SDW7d++G2Wy2q8nNzUV+fr5Us3nzZmg0GsTExLTq93UElxwgIiKSn6yn5xYsWICxY8ciLCwMFRUVSE1Nxfbt25GWlgaFQoE5c+Zg0aJF6NWrF3r16oVFixbBy8sLSUlJAACdTocnnngCc+fORWBgIAICAjBv3jz069cP9957LwCgb9++GDNmDKZOnYply5YBAKZNm4bExERERkYCAOLj4xEVFYXk5GS8+eabKC0txbx58zB16lT4+vrKc3AuIZ2e40gTERGRbGQNTYWFhUhOTkZ+fj50Oh369++PtLQ03HfffQCA5557DjU1NZgxYwZMJhNiY2OxefNm+Pj4SO/x7rvvws3NDY888ghqamowatQorFixAiqVSqpZtWoVZs+eLV1lN378eCxZskTar1KpsGHDBsyYMQNDhw6Fp6cnkpKS8NZbb7XRkbg2Xj1HREQkP4UQ/CV2lvLycuh0OpjNZqeOUF0oq8GQxdugVilx7LWxTntfIiIicvz3u93NaaLmpCUHmG+JiIhkw9DkAnj1HBERkfwYmlxA00gTwMngREREcmFocgGqSxbY5Ck6IiIieTA0uQDFJX8lrtVEREQkD4YmF3DpSJON958jIiKSBUOTC7h0ThNPzxEREcmDockFKC+d08SJ4ERERLJgaHIBvHqOiIhIfgxNLuCSzMTTc0RERDJhaHIBCoVCCk4caSIiIpIHQ5OLaJrXxMxEREQkD4YmF6Hk/eeIiIhkxdDkIprWauLpOSIiInkwNLkIt19GmhoYmoiIiGTB0OQiNO6NfypLg1XmToiIiDonhiYXoXFTAQBq63kfFSIiIjkwNLkIj19GmmrrOdJEREQkB4YmF9E00mRp4EgTERGRHBiaXARHmoiIiOTF0OQiPNyb5jQxNBEREcmBoclFNIUmCyeCExERyYKhyUVo3LjkABERkZwYmlzEr6fnONJEREQkB4YmF8GJ4ERERPJiaHIR0uKWPD1HREQkC4YmF8HTc0RERPJiaHIRnAhOREQkL4YmF8GRJiIiInkxNLkITgQnIiKSF0OTi+BIExERkbwYmlxE00gT5zQRERHJg6HJRTQtOcDbqBAREcmDoclFSHOaONJEREQkC4YmF+HRtLglJ4ITERHJgqHJRWh+mQhuaeDpOSIiIjkwNLkILjlAREQkL4YmFyHde44TwYmIiGTB0OQiONJEREQkL4YmF+FxyZwmIYTM3RAREXU+DE0uoik0AZwMTkREJAeGJhfheUloqq7jKToiIqK2xtDkIlRKBbTqxuBUWdsgczdERESdD0OTC/H2cAMAVFjqZe6EiIio82FociHemsbQxJEmIiKitidraEpJScEdd9wBHx8f6PV6TJgwAUePHrWrmTJlChQKhd1j8ODBdjUWiwWzZs1CUFAQtFotxo8fj3PnztnVmEwmJCcnQ6fTQafTITk5GWVlZXY1Z8+exbhx46DVahEUFITZs2ejrq6uVb77jfD2cAcAVFoYmoiIiNqarKFpx44dePrpp5GZmYktW7agoaEB8fHxqKqqsqsbM2YM8vPzpcfGjRvt9s+ZMwfr1q1Damoq0tPTUVlZicTERFitv06YTkpKQnZ2NtLS0pCWlobs7GwkJydL+61WKxISElBVVYX09HSkpqZizZo1mDt3busehBbwaRppYmgiIiJqc25yfnhaWprd8+XLl0Ov1yMrKwt33323tF2j0cBgMFzxPcxmMz7++GOsXLkS9957LwDgk08+QVhYGLZu3YrRo0fj8OHDSEtLQ2ZmJmJjYwEAH374IeLi4nD06FFERkZi8+bNOHToEPLy8mA0GgEAb7/9NqZMmYLXXnsNvr6+rXEIWqTp9FwFT88RERG1uXY1p8lsNgMAAgIC7LZv374der0evXv3xtSpU1FUVCTty8rKQn19PeLj46VtRqMR0dHR2LVrFwAgIyMDOp1OCkwAMHjwYOh0Orua6OhoKTABwOjRo2GxWJCVlXXFfi0WC8rLy+0eralpIjhHmoiIiNpeuwlNQgg8++yzGDZsGKKjo6XtY8eOxapVq7Bt2za8/fbb2Lt3L0aOHAmLxQIAKCgogFqthr+/v937hYSEoKCgQKrR6/XNPlOv19vVhISE2O339/eHWq2Wai6XkpIizZHS6XQICwu78QPgAE4EJyIiko+sp+cuNXPmTBw4cADp6el22x999FHp39HR0Rg0aBDCw8OxYcMGPPTQQ1d9PyEEFAqF9PzSf99MzaXmz5+PZ599VnpeXl7eqsHJp2nJgVouOUBERNTW2sVI06xZs7B+/Xp899136Nq16zVrQ0NDER4ejuPHjwMADAYD6urqYDKZ7OqKioqkkSODwYDCwsJm71VcXGxXc/mIkslkQn19fbMRqCYajQa+vr52j9YkzWni6TkiIqI2J2toEkJg5syZWLt2LbZt24aIiIjrvqakpAR5eXkIDQ0FAMTExMDd3R1btmyRavLz85Gbm4shQ4YAAOLi4mA2m7Fnzx6pZvfu3TCbzXY1ubm5yM/Pl2o2b94MjUaDmJgYp3zfmyXNaeLpOSIiojYn6+m5p59+GqtXr8ZXX30FHx8faaRHp9PB09MTlZWVWLhwIR5++GGEhobi9OnTWLBgAYKCgvDggw9KtU888QTmzp2LwMBABAQEYN68eejXr590NV3fvn0xZswYTJ06FcuWLQMATJs2DYmJiYiMjAQAxMfHIyoqCsnJyXjzzTdRWlqKefPmYerUqe3iyjngkjlNHGkiIiJqc7KONC1duhRmsxkjRoxAaGio9Pjss88AACqVCjk5OXjggQfQu3dvTJ48Gb1790ZGRgZ8fHyk93n33XcxYcIEPPLIIxg6dCi8vLzw9ddfQ6X69Sa3q1atQr9+/RAfH4/4+Hj0798fK1eulParVCps2LABHh4eGDp0KB555BFMmDABb731VtsdkOvw4dVzREREslEIIYTcTXQU5eXl0Ol0MJvNrTI6tedUKR5ZloEeQVpsmzfC6e9PRETUGTn6+90uJoKTYzgRnIiISD4MTS6ESw4QERHJh6HJhfh6Nt6wt7beBkuD9TrVRERE5EwMTS7ER+MG5S/rbJprONpERETUlhiaXIhSqZBGm8zVDE1ERERtiaHJxeiaQhNHmoiIiNoUQ5OL8fslNJVxpImIiKhNMTS5GF+ONBEREcmCocnF8PQcERGRPBiaXIyf1y+n5xiaiIiI2hRDk4tpGmkqZ2giIiJqUwxNLoan54iIiOTB0ORi/DzVAICy6jqZOyEiIupcGJpcDK+eIyIikgdDk4tpmghu4jpNREREbYqhycWEB3oBAPJKq1FvtcncDRERUefB0ORiDL4e0KpVaLAJnCmpkrsdIiKiToOhycUoFApEBGsBAKcvVsvcDRERUefB0OSCgrw1AAATr6AjIiJqMwxNLsiPV9ARERG1OYYmF+Tn1bhWE0eaiIiI2g5DkwtqWhW8jMsOEBERtRmGJhfkz5v2EhERtTmGJhfUdHqOt1IhIiJqOwxNLqjp6rmicovMnRAREXUeDE0uyKDzAAAUlNfK3AkREVHnwdDkgppCU0VtAyotDTJ3Q0RE1DkwNLkgb40bfDRuAIACM0ebiIiI2sINhab9+/cjJydHev7VV19hwoQJWLBgAerqODm5LTSNNhXyFB0REVGbuKHQ9OSTT+LYsWMAgJMnT+Kxxx6Dl5cXPv/8czz33HNObZCurCk05XOkiYiIqE3cUGg6duwYBg4cCAD4/PPPcffdd2P16tVYsWIF1qxZ48z+6CoMvr9MBjfXyNwJERFR53BDoUkIAZvNBgDYunUr7r//fgBAWFgYLl686Lzu6KpCeQUdERFRm7qh0DRo0CC8+uqrWLlyJXbs2IGEhAQAwKlTpxASEuLUBunKQppCE0/PERERtYkbCk3vvvsusrKyMHPmTLz44ou45ZZbAABffPEFhgwZ4tQG6cpCOaeJiIioTbndyIsGDBiA3NzcZtvffPNNqFSqm26Krs/g6wmAI01ERERtpUUjTdXV1Xj66afRpUsX6PV6JCUl2c1h8vDwgLu7u9ObpOaarp4rqaqDpcEqczdEREQdX4tC00svvYQVK1YgISEBjz32GLZs2YKnnnqqtXqja/D3cofarfHPx3vQERERtb4WnZ5bu3YtPv74Yzz22GMAgEmTJmHo0KGwWq08LdfGFAoFQnUeOFNSjXxzLcICvORuiYiIqENr0UhTXl4e7rrrLun5nXfeCTc3N1y4cMHpjdH1hfhy2QEiIqK20qLQZLVaoVar7ba5ubmhoYE3jZWDtFYTF7gkIiJqdS06PSeEwJQpU6DRaKRttbW1mD59OrRarbRt7dq1zuuQroq3UiEiImo7LQpNkydPbrZt0qRJTmuGWqbpViq8aS8REVHra1FoWr58eWv1QTeAC1wSERG1nRta3BIAysrKcOLECSgUCvTs2RN+fn5ObIscIU0EZ2giIiJqdS2+jcrp06eRkJCAoKAgxMbG4s4770RQUBASExNx+vTpVmiRriZU17gqeFGFBQ1Wm8zdEBERdWwtXnJg8ODBOHDgAP7yl79gzZo1+OKLL/DKK6/gp59+QlxcHM6dO+fw+6WkpOCOO+6Aj48P9Ho9JkyYgKNHj9rVCCGwcOFCGI1GeHp6YsSIETh48KBdjcViwaxZsxAUFAStVovx48c368NkMiE5ORk6nQ46nQ7JyckoKyuzqzl79izGjRsHrVaLoKAgzJ49G3V1dS05RG1K76OB2k0Jq03gQhlHm4iIiFpTi1cEj4yMxPHjxzF//nxMmDABDz74IBYsWIBjx46hd+/eeOmllxx+vx07duDpp59GZmYmtmzZgoaGBsTHx6OqqkqqeeONN/DOO+9gyZIl2Lt3LwwGA+677z5UVFRINXPmzMG6deuQmpqK9PR0VFZWIjExEVbrr7cXSUpKQnZ2NtLS0pCWlobs7GwkJydL+61WKxISElBVVYX09HSkpqZizZo1mDt3bksOUZtSKhXo9suilmdKq65TTURERDdFtEBoaKj4/vvvr7p/x44dIjQ0tCVvaaeoqEgAEDt27BBCCGGz2YTBYBCLFy+Wampra4VOpxPvv/++EEKIsrIy4e7uLlJTU6Wa8+fPC6VSKdLS0oQQQhw6dEgAEJmZmVJNRkaGACCOHDkihBBi48aNQqlUivPnz0s1n376qdBoNMJsNjvUv9lsFgAcrneGJ1bsEeHPfyP+nXG6zT6TiIioI3H097tFI00lJSXo3r37Vff36NEDJSUlNxzgzGYzACAgIAAAcOrUKRQUFCA+Pl6q0Wg0GD58OHbt2gUAyMrKQn19vV2N0WhEdHS0VJORkQGdTofY2FipZvDgwdDpdHY10dHRMBqNUs3o0aNhsViQlZV1xX4tFgvKy8vtHm0tPLBxfawzFznSRERE1JpaFJqMRmOz+USXys3NRWho6A01IoTAs88+i2HDhiE6OhoAUFBQAAAICQmxqw0JCZH2FRQUQK1Ww9/f/5o1er2+2Wfq9Xq7mss/x9/fH2q1Wqq5XEpKijRHSqfTISwsrKVf+6Z1D2w8PXe6pLrNP5uIiKgzaVFoeuCBB/DHP/4RxcXFzfYVFRXh+eefx4QJE26okZkzZ+LAgQP49NNPm+1TKBR2z4UQzbZd7vKaK9XfSM2l5s+fD7PZLD3y8vKu2VNraBppOl3CkSYiIqLW1KJ1ml566SVs3LgRPXv2xKRJk9CnTx8AwKFDh7B69WoYDAb8v//3/1rcxKxZs7B+/Xrs3LkTXbt2lbYbDAYAjaNAl45gFRUVSaNCBoMBdXV1MJlMdqNNRUVFGDJkiFRTWFjY7HOLi4vt3mf37t12+00mE+rr65uNQDXRaDR2t5SRQ6TBBwBwsrgSFystCPKWtx8iIqKOqkUjTf7+/ti9ezcmTpyI1NRUzJkzB3PmzMF//vMfJCUlISMjQ5qP5AghBGbOnIm1a9di27ZtiIiIsNsfEREBg8GALVu2SNvq6uqwY8cOKRDFxMTA3d3driY/Px+5ublSTVxcHMxmM/bs2SPV7N69G2az2a4mNzcX+fn5Us3mzZuh0WgQExPTgqPUtkJ8PXCr0Rc2Afxw4qLc7RAREXVYCiGEuJEXCiGk03TBwcHXPV12JTNmzMDq1avx1VdfITIyUtqu0+ng6dm4cOPrr7+OlJQULF++HL169cKiRYuwfft2HD16FD4+jaMsTz31FL755husWLECAQEBmDdvHkpKSpCVlQWVSgUAGDt2LC5cuIBly5YBAKZNm4bw8HB8/fXXABqXHBg4cCBCQkLw5ptvorS0FFOmTMGECRPw97//3aHvU15eDp1OB7PZDF9f3xYfjxv13Bc/4T/7zuEP9/bGM/f2arPPJSIi6ggc/f2+4duoKBQK6PV6CCHw3XffoaamBkOGDGk2Iftali5dCgAYMWKE3fbly5djypQpAIDnnnsONTU1mDFjBkwmE2JjY7F582YpMAHAu+++Czc3NzzyyCOoqanBqFGjsGLFCikwAcCqVaswe/Zs6Sq78ePHY8mSJdJ+lUqFDRs2YMaMGRg6dCg8PT2RlJSEt956q6WHps01rdWUZ+JkcCIiotbSopGmsrIyPPPMM9i/fz8GDx6Mt99+G/fff7902X5wcDC2bNmC/v37t1rD7ZlcI01fZZ/HM6nZuLN7AP4zPa7NPpeIiKgjcPT3u0VzmubNm4eMjAw8+uijyMnJwZgxY2C1WpGRkYHdu3cjKioKL7744k03Ty3Txa/xVGZ+eY3MnRAREXVcLTo9t2nTJqxevRrDhw/H7373O4SFhWHbtm3SopGvv/46xo8f3yqN0tXpfTwAAEXlFoeWYyAiIqKWa9FIU2FhIXr37g0A6NKlCzw8POwWdOzWrdsV13Ci1qX3bVxmwNJgQ3ltg8zdEBERdUwtCk02m81ucrVKpbru4pDU+jzcVfDxaBw0LCqvlbkbIiKijqnFV8999NFH8Pb2BgA0NDRgxYoVCAoKAgBUVFQ4tztymMHXAxW1lfi5uBK9Qnyu/wIiIiJqkRaFpm7duuHDDz+UnhsMBqxcubJZDbW9u3sH43hRJTbkFGBM9I3d/4+IiIiurkWh6fTp063UBt2sIT0D8XH6KZwsrpS7FSIiog6pRXOa7r//fpjNZun5a6+9hrKyMul5SUkJoqKinNYcOS5U98uyA2bOaSIiImoNLQpNaWlpsFgs0vPXX38dpaWl0vOGhgYcPXrUed2Rw5rWaiqtqkNtvVXmboiIiDqeFoWmy93gbeuoFfh6usFL3Xhl4/kyLnJJRETkbDcVmqj9UCgU0j3ozpRUydwNERFRx9Oi0KRQKJqtxcS1mdqPnsGNS0GcLGZoIiIicrYWXT0nhMCUKVOg0TSuQF1bW4vp06dDq9UCgN18J2p7PYMb/w5r95/H74dGQKlkoCUiInKWFoWmyZMn2z2fNGlSs5rf/va3N9cR3bAHb++K93ecxKH8chw4b8bAMD+5WyIiIuowWhSali9f3lp9kBNEBGkxqq8em3ILsP1oEUMTERGRE3EieAcz5JbGW9rsO22SuRMiIqKOhaGpg7mjuz8AYN+ZUpir62XuhoiIqONgaOpgIkN80Mfgg9p6G9b9eE7udoiIiDoMhqYORqFQYPxAIwAg82TpdaqJiIjIUQxNHVBsRCAAIP3ERVRZGmTuhoiIqGNgaOqAbgvzQ0SQFpWWBmw9XCh3O0RERB0CQ1MHpFQqcF9UCABg14kSmbshIiLqGBiaOqjBPQIAABknGZqIiIicgaGpg7qjewBUSgXOllYjr7Ra7naIiIhcHkNTB+Xj4Y5B4Y1rNn2SeUbmboiIiFwfQ1MH9uTwHgAaQ1Mlr6IjIiK6KQxNHdg9kXp09fdEVZ0VWWd4WxUiIqKbwdDUgSkUCtzZvXFC+PIfTsncDRERkWtjaOrgHrq9KwBg+9Fi7DvNFcKJiIhuFENTBzesVxDGRhsAAC+szYEQQuaOiIiIXBNDUyfwYkJfAMCJokocK6yUuRsiIiLXxNDUCXT198JdvYIAABk/X5S5GyIiItfE0NRJxPyyZtOBc2aZOyEiInJNDE2dxMAwPwDAhpx87DhWLG8zRERELoihqZO4q1cw7u2rh6XBhne2HJO7HSIiIpfD0NRJqJQK/DkxCgDwU14ZzNX1MndERETkWhiaOpFuAV7w9XADAPz+X3thtXH5ASIiIkcxNHUiCoUCfxwdCQDIOmPCR9+flLkjIiIi18HQ1Mkkx3XH/w6LAACkbDrCG/kSERE5iKGpE/rdL6EJAEebiIiIHMTQ1Al18fNEQv9QAMC6H8+jwWqTuSMiIqL2j6Gpk0p5qB98PNxwpqQaC78+KHc7RERE7R5DUyfl6+EuTQr/JPMsauutMndERETUvjE0dWKTYsPh7+UOAFiwNkfmboiIiNo3hqZOTKlU4NE7ugEAdh4vhhBct4mIiOhqGJo6uWfv6w1PdxUuVtbhUH653O0QERG1W7KGpp07d2LcuHEwGo1QKBT48ssv7fZPmTIFCoXC7jF48GC7GovFglmzZiEoKAharRbjx4/HuXPn7GpMJhOSk5Oh0+mg0+mQnJyMsrIyu5qzZ89i3Lhx0Gq1CAoKwuzZs1FXV9caX7tdUbspcWdEAAAg4W/pWJlxmvObiIiIrkDW0FRVVYUBAwZgyZIlV60ZM2YM8vPzpcfGjRvt9s+ZMwfr1q1Damoq0tPTUVlZicTERFitv/7wJyUlITs7G2lpaUhLS0N2djaSk5Ol/VarFQkJCaiqqkJ6ejpSU1OxZs0azJ071/lfuh2KvzVE+vefvzqIl77i1XRERESXU4h2MpFFoVBg3bp1mDBhgrRtypQpKCsrazYC1cRsNiM4OBgrV67Eo48+CgC4cOECwsLCsHHjRowePRqHDx9GVFQUMjMzERsbCwDIzMxEXFwcjhw5gsjISGzatAmJiYnIy8uD0WgEAKSmpmLKlCkoKiqCr6/vFT/fYrHAYrFIz8vLyxEWFgaz2XzV17RHVpvA06v2I+1gAYDGm/seemU0NG4qmTsjIiJqfeXl5dDpdNf9/W73c5q2b98OvV6P3r17Y+rUqSgqKpL2ZWVlob6+HvHx8dI2o9GI6Oho7Nq1CwCQkZEBnU4nBSYAGDx4MHQ6nV1NdHS0FJgAYPTo0bBYLMjKyrpqbykpKdIpP51Oh7CwMKd977akUirw3sTbsX3eCPh5ucNqE/jxbJncbREREbUr7To0jR07FqtWrcK2bdvw9ttvY+/evRg5cqQ0ulNQUAC1Wg1/f3+714WEhKCgoECq0ev1zd5br9fb1YSEhNjt9/f3h1qtlmquZP78+TCbzdIjLy/vpr6vnJRKBboHaRH7y/ym3PNmmTsiIiJqX9zkbuBamk65AUB0dDQGDRqE8PBwbNiwAQ899NBVXyeEgEKhkJ5f+u+bqbmcRqOBRqO57vdwJZEGX3x7sBCvbjiMcQOMCPH1kLslIiKidqFdjzRdLjQ0FOHh4Th+/DgAwGAwoK6uDiaTya6uqKhIGjkyGAwoLCxs9l7FxcV2NZePKJlMJtTX1zcbgeroBnTVSf+OXfRfFJbXytgNERFR++FSoamkpAR5eXkIDW282WxMTAzc3d2xZcsWqSY/Px+5ubkYMmQIACAuLg5msxl79uyRanbv3g2z2WxXk5ubi/z8fKlm8+bN0Gg0iImJaYuv1m6M7KPHgDA/6flH35+UrxkiIqJ2RNar5yorK3HixAkAwG233YZ33nkH99xzDwICAhAQEICFCxfi4YcfRmhoKE6fPo0FCxbg7NmzOHz4MHx8fAAATz31FL755husWLECAQEBmDdvHkpKSpCVlQWVqvHqr7Fjx+LChQtYtmwZAGDatGkIDw/H119/DaBxyYGBAwciJCQEb775JkpLSzFlyhRMmDABf//73x3+Po7Ovm/vhBBI2XQEH+w8CS+1Cj88PxL+WrXcbREREbUKh3+/hYy+++47AaDZY/LkyaK6ulrEx8eL4OBg4e7uLrp16yYmT54szp49a/ceNTU1YubMmSIgIEB4enqKxMTEZjUlJSVi4sSJwsfHR/j4+IiJEycKk8lkV3PmzBmRkJAgPD09RUBAgJg5c6aora1t0fcxm80CgDCbzTd0PNoTm80m7v/rThH+/Dfipa9y5W6HiIio1Tj6+91u1mnqCDrKSFOT748XI/njPVApFdg2dzjCA7Vyt0REROR0HWadJpLPXb2CcVevIFhtAhty8q//AiIiog6MoYmuaURk4xpXb6QdxfmyGpm7ISIikg9DE13TyD6/Lgz61rdHZeyEiIhIXgxNdE0RQVosn3IHAODL7PM4W1Itc0dERETyYGii67qnjx53dg+AEMAPP1+Uux0iIiJZMDSRQwb3aLwn3T/TT8FcXS9zN0RERG2PoYkckhQbDr2PBseLKjH385/kboeIiKjNMTSRQww6Dyy4vy8AYOvhQqzafUbmjoiIiNoWQxM5bMgtgdK/3/z2KLguKhERdSYMTeQwvY8HUqcNBgCUVddj5uofZe6IiIio7TA0UYsM7hGIcQOMAIC0gwU4eMEsc0dERERtg6GJWuyt3/RHjyAtrDaBhL+lY9HGw7yijoiIOjyGJmoxjZsKK/83FkpF4/MPdp7Esp0/y9sUERFRK2NoohvSxc8Tz43pIz3/KvuCjN0QERG1PoYmumHTh/fE2hlDAADny2qw93SpzB0RERG1HoYmuim3d/PH/f0MAIANB/Jl7oaIiKj1MDTRTRsTHQoAWLHrND5OP8X1m4iIqENiaKKbNqqPHlGhvgCAv3xzCHtO8TQdERF1PAxNdNO0Gjd88VQcNG6N/zk9+kEmTl2skrkrIiIi52JoIqfwUrthWXKM9HzZDi5BQEREHQtDEznNiEg9bu/mBwDI5UrhRETUwTA0kVO9/chAAMDxwkpYGqzyNkNEROREDE3kVOEBXgjx1cDSYMPYv36PKkuD3C0RERE5BUMTOZVSqcBjd3QDAJwsrsIrXx+SuSMiIiLnYGgip5s9qhfe+s0AAMBn+/Lw2AcZKK6wyNwVERHRzWFoIqdTKRX4n5iuSIptHHHKPFmKaSv3cdFLIiJyaQxN1GpemxCNJ4f3AAD8eLYMJ7l2ExERuTCGJmo1CoUC88f2lZYhyD5bxtEmIiJyWQxN1Opu7+YPAJj7+U+I/FMa/pl+CscLK2TuioiIqGUYmqjVPTm8J3qHeAMA6qw2vPLNIdz37k4czi+XuTMiIiLHMTRRqwv20eDLp4fi1QnRdtvH/vV7vL35KGrruQgmERG1fwxN1Ca81G6YNDgcnzwRa7f979tO4K1vj8rUFRERkeMYmqhNDesVhNOLE7B2xhBp278yTnPlcCIiavcYmkgWt3fzx9FXxyAswBP1VoHxS9LR989p+ObABblbIyIiuiKGJpKNxk2FwRGBAICfi6tQU2/FzNU/4qWvcmG1cWkCIiJqXxiaSFbDegU12/avjDP46PuTMnRDRER0dQxNJKvE/kYk9Atttj1l0xF0f2ED3t1yDObqehk6IyIisqcQXKLZacrLy6HT6WA2m+Hr6yt3Oy6lps4KlVKBfHMNhr+53W7flCHdsXD8rfI0RkREHZ6jv98caaJ2wVOtgtpNifBALTLnj7Lbt2LXafzjuxPIPW+WqTsiIiKGJmqHDDoPbPnD3ehj8JG2vfntUcz7/CdU1zWg3mqTsTsiIuqseHrOiXh6zvn6/HkTauubh6QRkcFYknQ7vDVuMnRFREQdCU/PUYfwQfIg3NUrCD2CtXbbtx8txsvrD8LSwFuwEBFR2+D/TKd27e7ewbi7dzDqrTYcya9AsI8GSR9l4mRxFT7POoeSqjr8c8odcrdJRESdAEeayCW4q5To11UHg84DG2bdJW3fdqQIX2Wfl7EzIiLqLBiayOV4qlX4z5Nx0vP5a3NwrLBCxo6IiKgzkDU07dy5E+PGjYPRaIRCocCXX35pt18IgYULF8JoNMLT0xMjRozAwYMH7WosFgtmzZqFoKAgaLVajB8/HufOnbOrMZlMSE5Ohk6ng06nQ3JyMsrKyuxqzp49i3HjxkGr1SIoKAizZ89GXV1da3xtcoI7IwJw8OXR8PVwQ3WdFfHv7sQtCzZi3Y/nrv9iIiKiGyBraKqqqsKAAQOwZMmSK+5/44038M4772DJkiXYu3cvDAYD7rvvPlRU/DqqMGfOHKxbtw6pqalIT09HZWUlEhMTYbX+OkE4KSkJ2dnZSEtLQ1paGrKzs5GcnCztt1qtSEhIQFVVFdLT05Gamoo1a9Zg7ty5rffl6aZpNW74y4Ro6XmDTeAPn/2EBetyYKpi4CUiIudqN0sOKBQKrFu3DhMmTADQOMpkNBoxZ84cPP/88wAaR5VCQkLw+uuv48knn4TZbEZwcDBWrlyJRx99FABw4cIFhIWFYePGjRg9ejQOHz6MqKgoZGZmIjY2FgCQmZmJuLg4HDlyBJGRkdi0aRMSExORl5cHo9EIAEhNTcWUKVNQVFTk8PIBXHJAHkIILNt5Eos3HWm2LzYiAM/e1xuxPQJl6IyIiFyByy85cOrUKRQUFCA+Pl7aptFoMHz4cOzatQsAkJWVhfr6ersao9GI6OhoqSYjIwM6nU4KTAAwePBg6HQ6u5ro6GgpMAHA6NGjYbFYkJWVddUeLRYLysvL7R7U9hQKBaYP74lXLxl1arL7VCke/SAT3x0pkqEzIiLqSNptaCooKAAAhISE2G0PCQmR9hUUFECtVsPf3/+aNXq9vtn76/V6u5rLP8ff3x9qtVqquZKUlBRpnpROp0NYWFgLvyU502N3hOGlcVFI6N/8BsC//9deJH+8G/0XfottRwpl6I6IiFxduw1NTRQKhd1zIUSzbZe7vOZK9TdSc7n58+fDbDZLj7y8vGv2Ra3LTaXE74ZG4B9Jt+NUyv346aV47HphJNQqJYQAvj9+EeW1Dfj9in34IosTxomIqGXabWgyGAwA0Gykp6ioSBoVMhgMqKurg8lkumZNYWHzkYXi4mK7mss/x2Qyob6+vtkI1KU0Gg18fX3tHtQ+KBQK6DzdYfTzxPNj+zTb//6On2XoioiIXFm7DU0REREwGAzYsmWLtK2urg47duzAkCFDAAAxMTFwd3e3q8nPz0dubq5UExcXB7PZjD179kg1u3fvhtlstqvJzc1Ffn6+VLN582ZoNBrExMS06vek1vfEsAgcWBiPUyn3Y+uzwwEAJ4oqkbLpMDYfvPrpVyIiokvJehuVyspKnDhxQnp+6tQpZGdnIyAgAN26dcOcOXOwaNEi9OrVC7169cKiRYvg5eWFpKQkAIBOp8MTTzyBuXPnIjAwEAEBAZg3bx769euHe++9FwDQt29fjBkzBlOnTsWyZcsAANOmTUNiYiIiIyMBAPHx8YiKikJycjLefPNNlJaWYt68eZg6dSpHjzoIXw93AMAtem/c3s0P+8+WYdmOkwCA1x6MRtKd3a572peIiDo3WZcc2L59O+65555m2ydPnowVK1ZACIGXX34Zy5Ytg8lkQmxsLP7xj38gOvrXq6Rqa2vxxz/+EatXr0ZNTQ1GjRqF9957z25SdmlpKWbPno3169cDAMaPH48lS5bAz89Pqjl79ixmzJiBbdu2wdPTE0lJSXjrrbeg0Wgc/j5ccsA15JVWI/7dnaipt7/Zb+7Lo+Gt4e0YiYg6G0d/v9vNOk0dAUOT6zhSUI4/rcvFvjP28+Fm3nMLnhrRE1sOFSIm3B9hAV4ydUhERG2FoUkGDE2uZ8uhQkz9976r7te4KXFXr2D8ObEvwgO1bdgZERG1FYYmGTA0uabSqjr8+atcbDiQf826CQONOF5UCbWbEmOjDZh2d8826pCIiFoTQ5MMGJpc2w8nLmLiR7sdrv965jD066prxY6IiKgtMDTJgKHJ9VkarNh57CICvdXQebqjZ7A30nILMP2TK99O56kRPZF0ZzfOfSIicmEMTTJgaOq4dp8swe5TpZgytDu2HS7CnM+ypX0+Hm54ZlQvfJF1Dn9OjIKHuxIx4QHyNUtERC3C0CQDhqbO40JZDYYs3nbV/bNH9cIzo3pBpeTaT0RE7Z2jv9/tdkVwovbM6OeJHsFXv5rub/89jp4LNmL3yZI27IqIiFoTR5qciCNNnUtheS3OmWpwoqgC+8+U4bN9V75hc4BWjaQ7u2He6Mg27pCIiBzB03MyYGjq3E5drEJplQUx4QHYcCAfT6/eb7d/3Ywh0GrcYKqqQ3QXHbS/rD5+oqgScz77ERMGdsG4AUbofTS8pQsRURtiaJIBQxNdasexYjy5ch9q620AAHeVAvXWX//PLSbcH2qVEhlXOIU3YaARDwzsghGRwQxQREStjKFJBgxNdLny2noUmGvx6LIMmKrrW/z6xQ/1w2N3dgMArPvxHPafKcOLCX3h4a5ydqtERJ2Wo7/fvDspUSvy9XCHr4c71jw1BI9/mInCcovd/h7BWsy85xbofTzw923HsftUqd3+F9bmoKfeG2v3n8OnexrnTIX4ajBzZK82+w5ERNSII01OxJEmuhZLgxVuSiVsQsBqE/j2YAFG9Q2Bt+bX/+1yzlSNbw8W4v5+Bgx7/TtYbVf+P8/H7+wGXw83jIk24LZu/hBCoLbehgvmGvQM9m6rr0RE1CHw9JwMGJrImd7bfgIrfjiNoorG0amIIC2q6xqajVZFhfriUH659NxdpUDanLsZnoiIHMTQJAOGJnI2m02goLwWAkAXP0+cKKrAS+sPIvNk6VVHoZr8fmgEnrm3F3Se7m3TLBGRi2JokgFDE7WVBqsN+86Y8J99eVi7/7y0fdGD/bBgXY70vHugFz7531ho1W44XVKFEF8PGP085WiZiKjdYmiSAUMTySHfXINlO05i2t09YPTzxJ5TpXh789Fmk8qb+Hu5o3uQFj+eLUOIrwYvj78Vd/cOxqacAniqVbi/XygqLQ3QqlVc7oCIOgWGJhkwNFF7knXGhIeX7rrh1/fSe2NibDd4adwwIjIY9VaBLn6eEEIgr7QGYQGeDFVE1CEwNMmAoYnam5/yyvB5Vh56BHlj7+lSbD9ajJp6q1PeW+2mxKODwjD0liCMiTY45T2JiOTA0CQDhiZq7yotDXh61X6Ya+qx6MF+2Hm8GLcafZG6Jw9+Xu5YtfssAOCNh/vjVEkV/rM3DyVVddd937d+MwAJ/ULhqeaim0TkehiaZMDQRB1RvdWGwvJabMopwL8zTyOvtAYAEBsRYDdvKshbg2dG3YJ9Z0zwUqvwzU/5uC3cH+9Puh1e6sa1qKosDXBTKaBxY7giovaDoUkGDE3U2VTU1uOZ1GxsO1Lk8Gt66b3x7Zy7oVRyPhQRtQ8MTTJgaKLOSAiB744W4fcr9jn8mvEDjLi7dzC+OXABwd4aTB7SHdFddK3YJRHR1TE0yYChiTqz9OMXAQBDbwmUrqrbd7oUb6QdxcELZlTVXX8C+u3d/LB8yp3QeXFBTiJqOwxNMmBoIrq6rDMmlFRaMG1l1jXr7osKwasTonGssAKBWg2CvNXQ+3q0UZdE1BkxNMmAoYno+jYfLMAXWecwe1QvRHfRIetMKR5emnHN1zw/pg8GdffHLcHecFMpoFIqpMnlREQ3i6FJBgxNRDfmeGEFgrw1+OD7k1i6/edr1ioUgBDA8N7BiOsZiCfv7sFFNonopjA0yYChiejmWG0C2XkmhAV4obbOhikr9uBkcdU1X6NSKtBL742zpdWINurw1D09cU+kvo06JqKOgKFJBgxNRM6XnVeGytoGnC+rxp+/PIg6q+26r0keHI4+oT74fN85BHlroHFTIv3ERdzWzQ8Duvrh98MioPPkZHMiasTQJAOGJqLW9XNxJc6WVKNvqC/W7D8HL7UKu34uwa4TF1FVZ8X9/QzYmFNw3fcJD/TCKw9E47ypBlsPF+KFsX3grXHDrp9LEOitRpi/J27R+7TBNyKi9oChSQYMTUTyKKqolW4o/M/0U1jy3QmUOnD7F0e8MLYP500RdXAMTTJgaCJqH4QQUCgUqK5rQHWdFWXVdQjVeWLr4UKk5Rbg5+JKHCusdPj93pt4O4K8NQgL8ESozhNF5bX4Mvs8HhjYBSFcDoHI5TE0yYChich1ZJ4swZmSKryedlQalQr20WBgmB+2HCq84mt8PNwwMTYc7+/49Qq/IT0DUWlpwAtj+6C4wgKFQoHbwvyg99XwHntELoKhSQYMTUSux2YTSNl0GHmlNXj94f7QebnDVFWHXT+XYEjPQEz8aDcO5Ze3+H0T+ofiH0m32237ubgSXfw84eHOMEXUnjA0yYChiajjsdkE0k9cxMVKCzYcyMfh/HKYquuRHBeOH8+asPe06Zqvf/a+3qi32rBs50nUNdgQ4qvB6w/3x4hIPY4XViAswIshikhmDE0yYGgi6pyW/3AKf/3vcZRV1yM+KgSbr3J6r4lKqcCd3QOQcbIEY6MNWDopBgBQaWmAp7sKKiUnnRO1JYYmGTA0EREAWBqsWP7DaSzedKTZvj4GHxwpqLDb1iNYC5tN4HRJNcIDvfD2bwagwtKAuB6BuFBWA28PN+h9OOGcqLUwNMmAoYmILnWmpAqfZJ7BiEg9NG5KXDDXYlz/UEx4bxd+yitz+H3Ubkqs/P2diO0RKG0rq66DztOdSyEQOQFDkwwYmojIESWVFhw4b8bQnkEoq67Dxpx8BPloEKrzuObNi/+U0Bf/e1cPfLb3LJ5fk4MBXXVY8bs74a9Vt2H3RB0PQ5MMGJqI6Gb993Ah3tp8DId/uWJv9shbsGLXaZTXNlzzdcN7ByMswBNrss6jpt6KPyX0xfiBRiiggMZdCV+PxtvGlNfWw9fDHduOFKKX3gdhAV433XNheS10nu6c0E4ui6FJBgxNROQstfVWKBUKqN2UyPi5BI9/mHlT7+elViE8UCuFsSZH/jIGapUSeaZqfJV9Ad8fL8bdvYIx4bYu2JSbj3e3HEdXf088NaInvsq+gOIKC15M6AulQoG/fHMIRwsrYLUJ+Hu5o3uQFgZfDzww0Ig+Bl98kXUO50zV6BnsDX+tGv276qDzdMe/M84AAE4WV+Lu3sHIOmOCSqlAeKAW7koFSqvrUFxhQXWdFduOFKF7oBf0Ph64LyoEUUZfnDfVoE+oD7YdKcLxokqcvliFfl10GN47GBer6nBbmB+Mfp44eMEMP081uvh74qlPslBaVYdAbzV+NzQCo281XPGYl1TVoYuf500d6+upt9pgE4LreLUjDE0yYGgiotZysrgSa/afQ7cAL+w9bcL4AUacLa3Gn77MRajOA+GBXhACUCoUyDhZIne7LuG2bn5QANh/tgz3RYXgQlkNDl5oDJVfTI/D9qPF+P2wCARo1cgrrcbPxZX49mABwgK8MKK3Hv/adRqj+upx6mIVBoT5wSYE+nf1g8ZNidc3HcFH6acwY0RPGP08EeStxpjoUFhtAmP/uhOWBhvSnrkb7+/4GduPFeNPCX3xc1El3vj2KKYP74FArQZ+Xu6ICfdHVZ0Vwd4apJ8oxnlTDUqr6vHQ7V3wn315GDfAiFuCvaH85YrLY4UVOFpQAaOfJ7zUKvQNvfJv0amLVSgqr4VCoUD3IC8Ee2ugUChgtQnp6s2mlfUBoLjCgvLaetTUWREV6it93qUufe3V1FttqLfa4KV2AwBU1NbD010FN5Xyhv6GzsLQJAOGJiJqD9Jy8/HtwUI8NaInPNxUWLXnDHadKME9kcHYcawYP50z3/RnqJQKeLqrUGm59mnDK/HxcEOlpQHX+/VRKRUID/CCpcGG82U1131fpQKwtcIvWpC3BhcrLTf9Pq3V37VEhfrCVF2HBpuA1SYcuiejStkYngDgNzFdodW44V8Zp6W/V0K/UBzKL0eDzQajzhMKBeCtccfOY8Wos9oANF4lOiJSDzelAkN6BmJAmB/2nTHhnc1H8dM5M0J8Nejq74WsMyZ4uCtxR/cA+HmpYbMJDAjTYdHGI+gRrMU7jwzEgK46HC9q/B8NlbUNeHVCtNMvgGBokgFDExG1dxW19ViZeQZe7ipEGnyRdaYUvp7uMPh6IECrRliAFypq65Gy8QgG9wjEI4PCoHZTQqVUoKbeCjelArnnzbhF741Abw2EEKiz2lBlsUKraTzdVFtnw9+2Hceovnrc3s0fdVYbyqrqYRUCNXVW9A31+eW9lDhfVoMwf09UWazQuCtRXluPKosVRj8PWG1CGpHIK63GXW98BwDIfXk0dp24iME9A7EpJx9+Xmr0DvFBRJAWlZYGnL5YBZ2nOz7POgdLgxW3hfkhVOeJr7Iv4L6oEHhr3PDw0l3SD3xLDAjza9GVj3TzgrzVuFj5a9hblhxzxdOrN4OhSQYMTURErSfrjAk+Hm7oHeLjlPf7+qcL+OHERUwaHA5LgxU+Hu7oHeKDf6afwtGCCoToPPC3/x4HALzxcH+MH2iEh7sK3x0twhf7zuH+fqFY9+N5hOo88JtBXRERpEXqnjyolAr8z6CuWLr9Zxy8UI7wAC88MNCIH06UYN+ZUhy6UI5H7ghD7xBvVFmsiAn3x0vrD2LPqVK8PykGvh5uCPbR4J8/nEalpQHRRl/oPN2h1bjhWGEF0k9cRJC3Bg/d1gUHzpuxdHvjvRDDAjzxuyERmDykOz7JPIOc82Y0WG34MvsCDL4e+O2QcGzKKUBheS2m3tUDXhoVDl0ox55TpThTUg0BgT/c1xsH8sxIO1ggHSdfDzfMHtULr2443OwY9g31RXFFLS5W1sFH44Yu/p7SOmQ9grU4WVzllL8V0BieHrujG2aP6gW1m3NP53WI0LRw4UK8/PLLdttCQkJQUND4xxRC4OWXX8YHH3wAk8mE2NhY/OMf/8Ctt94q1VssFsybNw+ffvopampqMGrUKLz33nvo2rWrVGMymTB79mysX78eADB+/Hj8/e9/h5+fX4v6ZWgiIupYKmrrkW+udVpQu5ZL5xC1xOmLVfDxcEOgt8bpPdVbbVAqFFApFThSUI4qSwMO51fg9MUqzI2PhKe6cXSxtKoOfp7uzeY6WRqs+M/ePARoNRjVt/F03dofz0OrdsPYaAMKK2oRqvPE3P/8hHOmajx2ZxiUCgVG32qAEMCPZ0349mABorvo8JtBYU7/fk06TGj64osvsHXrVmmbSqVCcHAwAOD111/Ha6+9hhUrVqB379549dVXsXPnThw9ehQ+Po3/gT/11FP4+uuvsWLFCgQGBmLu3LkoLS1FVlYWVKrGP/bYsWNx7tw5fPDBBwCAadOmoXv37vj6669b1C9DExERketx9PfbrQ17uiFubm4wGJqfuxRC4P/+7//w4osv4qGHHgIA/Otf/0JISAhWr16NJ598EmazGR9//DFWrlyJe++9FwDwySefICwsDFu3bsXo0aNx+PBhpKWlITMzE7GxsQCADz/8EHFxcTh69CgiIyPb7ssSERFRuyXvNX4OOH78OIxGIyIiIvDYY4/h5MmTAIBTp06hoKAA8fHxUq1Go8Hw4cOxa9cuAEBWVhbq6+vtaoxGI6Kjo6WajIwM6HQ6KTABwODBg6HT6aSaq7FYLCgvL7d7EBERUcfUrkNTbGws/v3vf+Pbb7/Fhx9+iIKCAgwZMgQlJSXSvKaQkBC711w656mgoABqtRr+/v7XrNHr9c0+W6/XSzVXk5KSAp1OJz3CwlrvfCsRERHJq12HprFjx+Lhhx9Gv379cO+992LDhg0AGk/DNbl80pwjE+kur7lSvSPvM3/+fJjNZumRl5d33e9ERERErqldh6bLabVa9OvXD8ePH5fmOV0+GlRUVCSNPhkMBtTV1cFkMl2zprCwsNlnFRcXNxvFupxGo4Gvr6/dg4iIiDomlwpNFosFhw8fRmhoKCIiImAwGLBlyxZpf11dHXbs2IEhQ4YAAGJiYuDu7m5Xk5+fj9zcXKkmLi4OZrMZe/bskWp2794Ns9ks1RARERG166vn5s2bh3HjxqFbt24oKirCq6++ivLyckyePBkKhQJz5szBokWL0KtXL/Tq1QuLFi2Cl5cXkpKSAAA6nQ5PPPEE5s6di8DAQAQEBGDevHnS6T4A6Nu3L8aMGYOpU6di2bJlABqXHEhMTOSVc0RERCRp16Hp3LlzePzxx3Hx4kUEBwdj8ODByMzMRHh4OADgueeeQ01NDWbMmCEtbrl582ZpjSYAePfdd+Hm5oZHHnlEWtxyxYoV0hpNALBq1SrMnj1buspu/PjxWLJkSdt+WSIiImrX2vXilq6Gi1sSERG5Hkd/v11qThMRERGRXBiaiIiIiBzA0ERERETkAIYmIiIiIgcwNBERERE5oF0vOeBqmi5E5I17iYiIXEfT7/b1FhRgaHKiiooKAOCNe4mIiFxQRUUFdDrdVfdznSYnstlsuHDhAnx8fK57s9+WKC8vR1hYGPLy8rj+UyvicW47PNZtg8e5bfA4t53WOtZCCFRUVMBoNEKpvPrMJY40OZFSqUTXrl1b7f15U+C2wePcdnis2waPc9vgcW47rXGsrzXC1IQTwYmIiIgcwNBERERE5ACGJheg0Wjw0ksvQaPRyN1Kh8bj3HZ4rNsGj3Pb4HFuO3Ifa04EJyIiInIAR5qIiIiIHMDQREREROQAhiYiIiIiBzA0ERERETmAockFvPfee4iIiICHhwdiYmLw/fffy92Sy0hJScEdd9wBHx8f6PV6TJgwAUePHrWrEUJg4cKFMBqN8PT0xIgRI3Dw4EG7GovFglmzZiEoKAharRbjx4/HuXPn2vKruJSUlBQoFArMmTNH2sbj7Dznz5/HpEmTEBgYCC8vLwwcOBBZWVnSfh7rm9fQ0IA//elPiIiIgKenJ3r06IFXXnkFNptNquFxbrmdO3di3LhxMBqNUCgU+PLLL+32O+uYmkwmJCcnQ6fTQafTITk5GWVlZTf/BQS1a6mpqcLd3V18+OGH4tChQ+KZZ54RWq1WnDlzRu7WXMLo0aPF8uXLRW5ursjOzhYJCQmiW7duorKyUqpZvHix8PHxEWvWrBE5OTni0UcfFaGhoaK8vFyqmT59uujSpYvYsmWL2L9/v7jnnnvEgAEDRENDgxxfq13bs2eP6N69u+jfv7945plnpO08zs5RWloqwsPDxZQpU8Tu3bvFqVOnxNatW8WJEyekGh7rm/fqq6+KwMBA8c0334hTp06Jzz//XHh7e4v/+7//k2p4nFtu48aN4sUXXxRr1qwRAMS6devs9jvrmI4ZM0ZER0eLXbt2iV27dono6GiRmJh40/0zNLVzd955p5g+fbrdtj59+ogXXnhBpo5cW1FRkQAgduzYIYQQwmazCYPBIBYvXizV1NbWCp1OJ95//30hhBBlZWXC3d1dpKamSjXnz58XSqVSpKWlte0XaOcqKipEr169xJYtW8Tw4cOl0MTj7DzPP/+8GDZs2FX381g7R0JCgvj9739vt+2hhx4SkyZNEkLwODvD5aHJWcf00KFDAoDIzMyUajIyMgQAceTIkZvqmafn2rG6ujpkZWUhPj7ebnt8fDx27dolU1euzWw2AwACAgIAAKdOnUJBQYHdMdZoNBg+fLh0jLOyslBfX29XYzQaER0dzb/DZZ5++mkkJCTg3nvvtdvO4+w869evx6BBg/Cb3/wGer0et912Gz788ENpP4+1cwwbNgz//e9/cezYMQDATz/9hPT0dNx///0AeJxbg7OOaUZGBnQ6HWJjY6WawYMHQ6fT3fRx5w1727GLFy/CarUiJCTEbntISAgKCgpk6sp1CSHw7LPPYtiwYYiOjgYA6The6RifOXNGqlGr1fD3929Ww7/Dr1JTU7F//37s3bu32T4eZ+c5efIkli5dimeffRYLFizAnj17MHv2bGg0Gvz2t7/lsXaS559/HmazGX369IFKpYLVasVrr72Gxx9/HAD/m24NzjqmBQUF0Ov1zd5fr9ff9HFnaHIBCoXC7rkQotk2ur6ZM2fiwIEDSE9Pb7bvRo4x/w6/ysvLwzPPPIPNmzfDw8PjqnU8zjfPZrNh0KBBWLRoEQDgtttuw8GDB7F06VL89re/lep4rG/OZ599hk8++QSrV6/GrbfeiuzsbMyZMwdGoxGTJ0+W6nicnc8Zx/RK9c447jw9144FBQVBpVI1S8ZFRUXNkjhd26xZs7B+/Xp899136Nq1q7TdYDAAwDWPscFgQF1dHUwm01VrOrusrCwUFRUhJiYGbm5ucHNzw44dO/C3v/0Nbm5u0nHicb55oaGhiIqKstvWt29fnD17FgD/m3aWP/7xj3jhhRfw2GOPoV+/fkhOTsYf/vAHpKSkAOBxbg3OOqYGgwGFhYXN3r+4uPimjztDUzumVqsRExODLVu22G3fsmULhgwZIlNXrkUIgZkzZ2Lt2rXYtm0bIiIi7PZHRETAYDDYHeO6ujrs2LFDOsYxMTFwd3e3q8nPz0dubi7/Dr8YNWoUcnJykJ2dLT0GDRqEiRMnIjs7Gz169OBxdpKhQ4c2Wzbj2LFjCA8PB8D/pp2luroaSqX9T6RKpZKWHOBxdj5nHdO4uDiYzWbs2bNHqtm9ezfMZvPNH/ebmkZOra5pyYGPP/5YHDp0SMyZM0dotVpx+vRpuVtzCU899ZTQ6XRi+/btIj8/X3pUV1dLNYsXLxY6nU6sXbtW5OTkiMcff/yKl7h27dpVbN26Vezfv1+MHDmyU1827IhLr54TgsfZWfbs2SPc3NzEa6+9Jo4fPy5WrVolvLy8xCeffCLV8FjfvMmTJ4suXbpISw6sXbtWBAUFieeee06q4XFuuYqKCvHjjz+KH3/8UQAQ77zzjvjxxx+lZXScdUzHjBkj+vfvLzIyMkRGRobo168flxzoLP7xj3+I8PBwoVarxe233y5dLk/XB+CKj+XLl0s1NptNvPTSS8JgMAiNRiPuvvtukZOTY/c+NTU1YubMmSIgIEB4enqKxMREcfbs2Tb+Nq7l8tDE4+w8X3/9tYiOjhYajUb06dNHfPDBB3b7eaxvXnl5uXjmmWdEt27dhIeHh+jRo4d48cUXhcVikWp4nFvuu+++u+L/T548ebIQwnnHtKSkREycOFH4+PgIHx8fMXHiRGEymW66f4UQQtzcWBURERFRx8c5TUREREQOYGgiIiIicgBDExEREZEDGJqIiIiIHMDQREREROQAhiYiIiIiBzA0ERERETmAoYmIiIjIAQxNREStSKFQ4Msvv5S7DSJyAoYmIuqwpkyZAoVC0ewxZswYuVsjIhfkJncDREStacyYMVi+fLndNo1GI1M3ROTKONJERB2aRqOBwWCwe/j7+wNoPHW2dOlSjB07Fp6enoiIiMDnn39u9/qcnByMHDkSnp6eCAwMxLRp01BZWWlX889//hO33norNBoNQkNDMXPmTLv9Fy9exIMPPggvLy/06tUL69evb90vTUStgqGJiDq1P//5z3j44Yfx008/YdKkSXj88cdx+PBhAEB1dTXGjBkDf39/7N27F59//jm2bt1qF4qWLl2Kp59+GtOmTUNOTg7Wr1+PW265xe4zXn75ZTzyyCM4cOAA7r//fkycOBGlpaVt+j2JyAkEEVEHNXnyZKFSqYRWq7V7vPLKK0IIIQCI6dOn270mNjZWPPXUU0IIIT744APh7+8vKisrpf0bNmwQSqVSFBQUCCGEMBqN4sUXX7xqDwDEn/70J+l5ZWWlUCgUYtOmTU77nkTUNjiniYg6tHvuuQdLly612xYQECD9Oy4uzm5fXFwcsrOzAQCHDx/GgAEDoNVqpf1Dhw6FzWbD0aNHoVAocOHCBYwaNeqaPfTv31/6t1arhY+PD4qKim70KxGRTBiaiKhD02q1zU6XXY9CoQAACCGkf1+pxtPT06H3c3d3b/Zam83Wop6ISH6c00REnVpmZmaz53369AEAREVFITs7G1VVVdL+H374AUqlEr1794aPjw+6d++O//73v23aMxHJgyNNRNShWSwWFBQU2G1zc3NDUFAQAODzzz/HoEGDMGzYMKxatQp79uzBxx9/DACYOHEiXnrpJUyePBkLFy5EcXExZs2aheTkZISEhAAAFi5ciOnTp0Ov12Ps2LGoqKjADz/8gFmzZrXtFyWiVsfQREQdWlpaGkJDQ+22RUZG4siRIwAar2xLTU3FjBkzYDAYsGrVKkRFRQEAvLy88O233+KZZ57BHXfcAS8vLzz88MN45513pPeaPHkyamtr8e6772LevHkICgrC//zP/7TdFySiNqMQQgi5myAikoNCocC6deswYcIEuVshIhfAOU1EREREDmBoIiIiInIA5zQRUafF2QlE1BIcaSIiIiJyAEMTERERkQMYmoiIiIgcwNBERERE5ACGJiIiIiIHMDQREREROYChiYiIiMgBDE1EREREDvj/JaDEVxn3QuwAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "import matplotlib.pyplot as plt\n", "\n", + "plt.plot(train_total_losses, label=\"Train\")\n", + "plt.ylabel(\"Losses\")\n", + "plt.xlabel('Epoch')\n", + "plt.ylim(0.6, 0.77)\n", + "plt.legend()\n", + "plt.show()\n", + "\n", + "\n", "plt.plot(train_accuracies, label=\"Train\")\n", "plt.plot(val_accuracies, label=\"Valid\")\n", "plt.ylabel(\"Accuracy\")\n", @@ -296,6 +820,7 @@ "plt.legend()\n", "plt.show()\n", "\n", + "\n", "plt.plot(remaining_weights)\n", "plt.ylabel('Remaining weights')\n", "plt.xlabel('Epoch')\n", @@ -342,10 +867,13 @@ "import yaml\n", "if True:\n", " inp = FixedVariableArrayInput((16))\n", + " print(\"pre-quant min/max:\", inp.min(), inp.max())\n", " x = quantize(inp, k=1, i=data_i, f=data_f, overflow_mode=\"WRAP\", round_mode=\"RND\")\n", "\n", " x = w0 @ x\n", " x = x + b0\n", + " print(\"pre-quant min/max:\", x.min(), x.max())\n", + "\n", " x = quantize(x, k=0, i=data_i, f=data_f, overflow_mode=\"SAT\", round_mode=\"RND\") \n", " x = w1 @ x\n", " x = x + b1\n", @@ -401,9 +929,9 @@ ], "metadata": { "kernelspec": { - "display_name": "pquantml-dev-kernel", + "display_name": "Python 3 (ipykernel)", "language": "python", - "name": "pquantml-dev-kernel" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -415,7 +943,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.11.10" } }, "nbformat": 4, diff --git a/src/pquant/configs/config_ap.yaml b/src/pquant/configs/config_ap.yaml index a03cc73..c2df5fd 100644 --- a/src/pquant/configs/config_ap.yaml +++ b/src/pquant/configs/config_ap.yaml @@ -19,7 +19,7 @@ quantization_parameters: hgq_beta: 1e-5 hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: true diff --git a/src/pquant/configs/config_autosparse.yaml b/src/pquant/configs/config_autosparse.yaml index 57ad178..a17cd92 100644 --- a/src/pquant/configs/config_autosparse.yaml +++ b/src/pquant/configs/config_autosparse.yaml @@ -22,7 +22,7 @@ quantization_parameters: hgq_beta: 1e-5 hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: true diff --git a/src/pquant/configs/config_cs.yaml b/src/pquant/configs/config_cs.yaml index e36c4d4..6c2a825 100644 --- a/src/pquant/configs/config_cs.yaml +++ b/src/pquant/configs/config_cs.yaml @@ -18,7 +18,7 @@ quantization_parameters: hgq_beta: 1e-5 hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: true diff --git a/src/pquant/configs/config_fitcompress.yaml b/src/pquant/configs/config_fitcompress.yaml index fd9489c..c4f7511 100644 --- a/src/pquant/configs/config_fitcompress.yaml +++ b/src/pquant/configs/config_fitcompress.yaml @@ -17,7 +17,7 @@ quantization_parameters: hgq_beta: 1e-5 hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: true diff --git a/src/pquant/configs/config_mdmm.yaml b/src/pquant/configs/config_mdmm.yaml index 2ed0ac6..3f82de1 100644 --- a/src/pquant/configs/config_mdmm.yaml +++ b/src/pquant/configs/config_mdmm.yaml @@ -28,7 +28,7 @@ quantization_parameters: hgq_beta: 1e-5 hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: true diff --git a/src/pquant/configs/config_pdp.yaml b/src/pquant/configs/config_pdp.yaml index 197d60a..b80f382 100644 --- a/src/pquant/configs/config_pdp.yaml +++ b/src/pquant/configs/config_pdp.yaml @@ -20,7 +20,7 @@ quantization_parameters: hgq_beta: 1e-5 hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: false diff --git a/src/pquant/configs/config_wanda.yaml b/src/pquant/configs/config_wanda.yaml index 4755ed2..4871787 100644 --- a/src/pquant/configs/config_wanda.yaml +++ b/src/pquant/configs/config_wanda.yaml @@ -22,7 +22,7 @@ quantization_parameters: hgq_beta: 1e-5 hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: true diff --git a/src/pquant/configs/finetuning.yaml b/src/pquant/configs/finetuning.yaml index 7ccf9a3..e1453a1 100644 --- a/src/pquant/configs/finetuning.yaml +++ b/src/pquant/configs/finetuning.yaml @@ -8,7 +8,10 @@ quantization_parameters: enable_quantization: true hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: + layer3.0.conv1: + integer_bits: 4 + fractional_bits: 6 use_high_granularity_quantization: false use_real_tanh: false use_symmetric_quantization: false diff --git a/src/pquant/core/torch/train.py b/src/pquant/core/torch/train.py index a4e940a..bf0f803 100644 --- a/src/pquant/core/torch/train.py +++ b/src/pquant/core/torch/train.py @@ -25,7 +25,7 @@ def train_model(model, config, train_func, valid_func, input_shape=None, **kwarg valid_func(model, epoch=epoch, **kwargs) post_epoch_functions(model, e, training_config.pretraining_epochs) epoch += 1 - post_pretrain_functions(model, config, kwargs['trainloader'], kwargs['loss_function'], input_shape=input_shape) + post_pretrain_functions(model, config, kwargs['trainloader'], kwargs['loss_func'], input_shape=input_shape) for r in range(training_config.rounds): for e in range(training_config.epochs): model.train() diff --git a/src/pquant/data_models/quantization_model.py b/src/pquant/data_models/quantization_model.py index 86c47f1..0edfff1 100644 --- a/src/pquant/data_models/quantization_model.py +++ b/src/pquant/data_models/quantization_model.py @@ -1,5 +1,3 @@ -from typing import List - from pydantic import BaseModel, Field @@ -16,7 +14,7 @@ class BaseQuantizationModel(BaseModel): hgq_gamma: float = Field(default=0.0003) hgq_beta: float = Field(default=1e-5) hgq_heterogeneous: bool = Field(default=True) - layer_specific: List = Field(default_factory=list) + layer_specific: dict[str, dict] = Field(default_factory=dict) use_high_granularity_quantization: bool = Field(default=False) use_real_tanh: bool = Field(default=False) overflow: str = Field(default="SAT") diff --git a/src/pquant/pruning_methods/cs.py b/src/pquant/pruning_methods/cs.py index 9a2de5c..2b013cf 100644 --- a/src/pquant/pruning_methods/cs.py +++ b/src/pquant/pruning_methods/cs.py @@ -13,7 +13,7 @@ def __init__(self, config, layer_type, *args, **kwargs): config = TuningConfig.load_from_config(config) self.config = config self.final_temp = config.pruning_parameters.final_temp - self.do_hard_mask = False + self.is_finetuning = False self.layer_type = layer_type self.is_pretraining = True @@ -33,10 +33,10 @@ def call(self, weight): return mask * weight def pre_finetune_function(self): - self.do_hard_mask = True + self.is_finetuning = True def get_mask(self): - if self.do_hard_mask: + if self.is_finetuning: mask = self.get_hard_mask() return mask else: diff --git a/src/pquant/pruning_methods/pdp.py b/src/pquant/pruning_methods/pdp.py index 0b5272d..aa188ee 100644 --- a/src/pquant/pruning_methods/pdp.py +++ b/src/pquant/pruning_methods/pdp.py @@ -16,7 +16,7 @@ def __init__(self, config, layer_type, *args, **kwargs): self.temp = config.pruning_parameters.temperature self.is_pretraining = True self.config = config - self.fine_tuning = False + self.is_finetuning = False self.layer_type = layer_type def build(self, input_shape): @@ -38,7 +38,7 @@ def post_round_function(self): pass def get_hard_mask(self, weight=None): - if self.fine_tuning: + if self.is_finetuning: return self.mask if weight is None: return ops.cast((self.mask >= 0.5), self.mask.dtype) @@ -53,7 +53,7 @@ def get_hard_mask(self, weight=None): return self.mask def pre_finetune_function(self): - self.fine_tuning = True + self.is_finetuning = True self.mask = ops.cast((self.mask >= 0.5), self.mask.dtype) def get_mask_structured_linear(self, weight): @@ -134,7 +134,7 @@ def get_mask(self, weight): return mask def call(self, weight): - if self.fine_tuning: + if self.is_finetuning: mask = self.mask else: if self.config.pruning_parameters.structured_pruning: diff --git a/tests/test_keras_compression_layers.py b/tests/test_keras_compression_layers.py index 87dfa57..d99f520 100644 --- a/tests/test_keras_compression_layers.py +++ b/tests/test_keras_compression_layers.py @@ -81,7 +81,7 @@ def config_pdp(): "hgq_gamma": 0.0003, "hgq_beta": 1e-5, "hgq_heterogeneous": True, - "layer_specific": [], + "layer_specific": {}, "use_high_granularity_quantization": False, "use_real_tanh": False, "use_relu_multiplier": True, @@ -120,7 +120,7 @@ def config_ap(): "hgq_gamma": 0.0003, "hgq_beta": 1e-5, "hgq_heterogeneous": True, - "layer_specific": [], + "layer_specific": {}, "use_high_granularity_quantization": False, "use_real_tanh": False, "use_relu_multiplier": True, @@ -162,7 +162,7 @@ def config_wanda(): "hgq_gamma": 0.0003, "hgq_beta": 1e-5, "hgq_heterogeneous": True, - "layer_specific": [], + "layer_specific": {}, "use_high_granularity_quantization": False, "use_real_tanh": False, "use_relu_multiplier": True, @@ -200,7 +200,7 @@ def config_cs(): "hgq_gamma": 0.0003, "hgq_beta": 1e-5, "hgq_heterogeneous": True, - "layer_specific": [], + "layer_specific": {}, "use_high_granularity_quantization": False, "use_real_tanh": False, "use_relu_multiplier": True, diff --git a/tests/test_torch_compression_layers.py b/tests/test_torch_compression_layers.py index bb6c26c..b2ec570 100644 --- a/tests/test_torch_compression_layers.py +++ b/tests/test_torch_compression_layers.py @@ -76,7 +76,7 @@ def config_pdp(): "hgq_gamma": 0.0003, "hgq_beta": 1e-5, "hgq_heterogeneous": True, - "layer_specific": [], + "layer_specific": {}, "use_high_granularity_quantization": False, "use_real_tanh": False, "use_relu_multiplier": True, @@ -115,7 +115,7 @@ def config_ap(): "hgq_gamma": 0.0003, "hgq_beta": 1e-5, "hgq_heterogeneous": True, - "layer_specific": [], + "layer_specific": {}, "use_high_granularity_quantization": False, "use_real_tanh": False, "use_relu_multiplier": True, @@ -157,7 +157,7 @@ def config_wanda(): "hgq_gamma": 0.0003, "hgq_beta": 1e-5, "hgq_heterogeneous": True, - "layer_specific": [], + "layer_specific": {}, "use_high_granularity_quantization": False, "use_real_tanh": False, "use_relu_multiplier": True, @@ -195,7 +195,7 @@ def config_cs(): "hgq_gamma": 0.0003, "hgq_beta": 1e-5, "hgq_heterogeneous": True, - "layer_specific": [], + "layer_specific": {}, "use_high_granularity_quantization": False, "use_real_tanh": False, "use_relu_multiplier": True, From 30ff9efc67fe3014d50966058857b514a8818842 Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Tue, 3 Feb 2026 18:17:58 +0100 Subject: [PATCH 05/20] Add minor modifications to the core functionality --- src/pquant/core/finetuning.py | 1 + src/pquant/data_models/training_model.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pquant/core/finetuning.py b/src/pquant/core/finetuning.py index 4d4fb94..118dbf5 100644 --- a/src/pquant/core/finetuning.py +++ b/src/pquant/core/finetuning.py @@ -10,6 +10,7 @@ import yaml from pydantic import BaseModel, Field, field_validator + from pquant.core import constants from pquant.data_models.finetuning_model import BaseFinetuningModel from pquant.data_models.fitcompress_model import BaseFitCompressModel diff --git a/src/pquant/data_models/training_model.py b/src/pquant/data_models/training_model.py index 228c1f6..481f371 100644 --- a/src/pquant/data_models/training_model.py +++ b/src/pquant/data_models/training_model.py @@ -12,4 +12,4 @@ class BaseTrainingModel(BaseModel): rounds: int = Field(default=1) save_weights_epoch: int = Field(default=-1) pruning_first: bool = Field(default=False) - \ No newline at end of file + From 3c93c793c5324daf07ec2ff34c48d89e058e32ee Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Tue, 3 Feb 2026 18:21:19 +0100 Subject: [PATCH 06/20] Add parameter --- src/pquant/data_models/training_model.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/pquant/data_models/training_model.py b/src/pquant/data_models/training_model.py index 481f371..f03319e 100644 --- a/src/pquant/data_models/training_model.py +++ b/src/pquant/data_models/training_model.py @@ -11,5 +11,4 @@ class BaseTrainingModel(BaseModel): rewind: str = Field(default="never") rounds: int = Field(default=1) save_weights_epoch: int = Field(default=-1) - pruning_first: bool = Field(default=False) - + pruning_first: bool = Field(default=False) \ No newline at end of file From 577cd47b5e0e256e0eef1b87e33b65b1951c046d Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Tue, 3 Feb 2026 18:26:20 +0100 Subject: [PATCH 07/20] Add loss function modifications --- src/pquant/core/keras/layers.py | 14 ++++++++++---- src/pquant/core/torch/layers.py | 8 +++++--- src/pquant/pruning_methods/activation_pruning.py | 3 ++- src/pquant/pruning_methods/mdmm.py | 4 +++- src/pquant/pruning_methods/pdp.py | 2 ++ src/pquant/pruning_methods/wanda.py | 3 ++- 6 files changed, 24 insertions(+), 10 deletions(-) diff --git a/src/pquant/core/keras/layers.py b/src/pquant/core/keras/layers.py index 823ff49..860bcc4 100644 --- a/src/pquant/core/keras/layers.py +++ b/src/pquant/core/keras/layers.py @@ -1628,9 +1628,13 @@ def get_layer_keep_ratio(model): return remaining_weights / total_w return 0.0 +def is_training_stage(layer): + return False if layer.pruning_layer.is_finetune and layer.pruning_layer.is_pretraining else True + def get_model_losses(model, losses): for layer in model.layers: + loss = 0 if isinstance( layer, ( @@ -1639,14 +1643,16 @@ def get_model_losses(model, losses): PQConv1d, PQDense, ), - ): - loss = layer.pruning_layer.calculate_additional_loss() + ): + if layer.enable_pruning and is_training_stage(layer): + loss += layer.pruning_layer.calculate_additional_loss() if layer.enable_quantization and layer.use_hgq: loss += layer.hgq_loss() losses += loss elif isinstance(layer, PQSeparableConv2d): - loss = layer.depthwise_conv.pruning_layer.calculate_additional_loss() - loss += layer.pointwise_conv.pruning_layer.calculate_additional_loss() + if layer.enable_pruning and is_training_stage(layer): + loss += layer.depthwise_conv.pruning_layer.calculate_additional_loss() + loss += layer.pointwise_conv.pruning_layer.calculate_additional_loss() if layer.enable_quantization and layer.use_hgq: loss += layer.depthwise_conv.hgq_loss() loss += layer.pointwise_conv.hgq_loss() diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index c13228f..e30fae7 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -1533,13 +1533,15 @@ def get_layer_keep_ratio(model): return 0.0 -def get_model_losses(model, losses): +def is_training_stage(layer): + return False if layer.pruning_layer.is_finetune and layer.pruning_layer.is_pretraining else True + +def get_model_losses(model, losses): for layer in model.modules(): loss = 0.0 if isinstance(layer, (PQConv2d, PQConv1d, PQDense)): - - if layer.enable_pruning and not layer.use_fitcompress: + if layer.enable_pruning and is_training_stage(layer) and not layer.use_fitcompress: loss += layer.pruning_layer.calculate_additional_loss() if layer.use_hgq: loss += layer.hgq_loss() diff --git a/src/pquant/pruning_methods/activation_pruning.py b/src/pquant/pruning_methods/activation_pruning.py index 2cd4f2c..aacb27e 100644 --- a/src/pquant/pruning_methods/activation_pruning.py +++ b/src/pquant/pruning_methods/activation_pruning.py @@ -18,6 +18,7 @@ def __init__(self, config, layer_type, *args, **kwargs): self.activations = None self.total = 0.0 self.is_pretraining = True + self.is_finetuning = False self.threshold = ops.convert_to_tensor(config.pruning_parameters.threshold) self.t_start_collecting_batch = self.config.pruning_parameters.t_start_collecting_batch @@ -85,7 +86,7 @@ def post_round_function(self): pass def pre_finetune_function(self): - pass + self.is_finetuning = True def calculate_additional_loss(self): return 0 diff --git a/src/pquant/pruning_methods/mdmm.py b/src/pquant/pruning_methods/mdmm.py index 8140335..87ed7e6 100644 --- a/src/pquant/pruning_methods/mdmm.py +++ b/src/pquant/pruning_methods/mdmm.py @@ -29,6 +29,8 @@ def __init__(self, config, layer_type, *args, **kwargs): self.penalty_loss = None self.built = False self.is_finetuning = False + self.is_pretraining = True + def build(self, input_shape): pruning_parameters = self.config.pruning_parameters @@ -121,7 +123,7 @@ def post_epoch_function(self, epoch, total_epochs): pass def post_pre_train_function(self): - pass + self.is_pretraining = False def post_round_function(self): pass diff --git a/src/pquant/pruning_methods/pdp.py b/src/pquant/pruning_methods/pdp.py index aa188ee..99fadda 100644 --- a/src/pquant/pruning_methods/pdp.py +++ b/src/pquant/pruning_methods/pdp.py @@ -18,6 +18,8 @@ def __init__(self, config, layer_type, *args, **kwargs): self.config = config self.is_finetuning = False self.layer_type = layer_type + + def build(self, input_shape): input_shape_concatenated = list(input_shape) + [1] diff --git a/src/pquant/pruning_methods/wanda.py b/src/pquant/pruning_methods/wanda.py index 2477a87..c637c1c 100644 --- a/src/pquant/pruning_methods/wanda.py +++ b/src/pquant/pruning_methods/wanda.py @@ -20,6 +20,7 @@ def __init__(self, config, layer_type, *args, **kwargs): self.done = False self.sparsity = self.config.pruning_parameters.sparsity self.is_pretraining = True + self.is_finetuning = False self.N = self.config.pruning_parameters.N self.M = self.config.pruning_parameters.M self.t_start_collecting_batch = self.config.pruning_parameters.t_start_collecting_batch @@ -124,7 +125,7 @@ def post_round_function(self): pass def pre_finetune_function(self): - pass + self.is_finetuning = True def calculate_additional_loss(self): return 0 From 11b38890a0e3c4de6f78f98fdcc9c50a25856647 Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Tue, 3 Feb 2026 18:41:14 +0100 Subject: [PATCH 08/20] Add support for different fixed granularities --- src/pquant/configs/config_ap.yaml | 1 + src/pquant/configs/config_autosparse.yaml | 1 + src/pquant/configs/config_cs.yaml | 1 + src/pquant/configs/config_dst.yaml | 1 + src/pquant/configs/config_fitcompress.yaml | 1 + src/pquant/configs/config_mdmm.yaml | 1 + src/pquant/configs/config_pdp.yaml | 1 + src/pquant/configs/config_wanda.yaml | 1 + src/pquant/configs/finetuning.yaml | 1 + src/pquant/core/keras/quantizer.py | 41 +++++++++++++++++--- src/pquant/core/torch/quantizer.py | 34 ++++++++++++++-- src/pquant/data_models/quantization_model.py | 9 +++++ 12 files changed, 83 insertions(+), 10 deletions(-) diff --git a/src/pquant/configs/config_ap.yaml b/src/pquant/configs/config_ap.yaml index c2df5fd..1573d49 100644 --- a/src/pquant/configs/config_ap.yaml +++ b/src/pquant/configs/config_ap.yaml @@ -13,6 +13,7 @@ quantization_parameters: default_data_keep_negatives: 0. default_data_integer_bits: 0. default_data_fractional_bits: 7. + granularity: "per_tensor" quantize_input: true quantize_output: false enable_quantization: true diff --git a/src/pquant/configs/config_autosparse.yaml b/src/pquant/configs/config_autosparse.yaml index a17cd92..04f281b 100644 --- a/src/pquant/configs/config_autosparse.yaml +++ b/src/pquant/configs/config_autosparse.yaml @@ -16,6 +16,7 @@ quantization_parameters: default_data_keep_negatives: 0. default_data_integer_bits: 0. default_data_fractional_bits: 7. + granularity: "per_tensor" quantize_input: true quantize_output: false enable_quantization: true diff --git a/src/pquant/configs/config_cs.yaml b/src/pquant/configs/config_cs.yaml index 6c2a825..d66dbe8 100644 --- a/src/pquant/configs/config_cs.yaml +++ b/src/pquant/configs/config_cs.yaml @@ -12,6 +12,7 @@ quantization_parameters: default_data_keep_negatives: 0. default_data_integer_bits: 0. default_data_fractional_bits: 7. + granularity: "per_tensor" quantize_input: true quantize_output: false enable_quantization: true diff --git a/src/pquant/configs/config_dst.yaml b/src/pquant/configs/config_dst.yaml index 060ab6c..7adc89c 100644 --- a/src/pquant/configs/config_dst.yaml +++ b/src/pquant/configs/config_dst.yaml @@ -14,6 +14,7 @@ quantization_parameters: default_data_keep_negatives: 0. default_data_integer_bits: 0. default_data_fractional_bits: 7. + granularity: "per_tensor" quantize_input: true quantize_output: false enable_quantization: true diff --git a/src/pquant/configs/config_fitcompress.yaml b/src/pquant/configs/config_fitcompress.yaml index c4f7511..165a5c3 100644 --- a/src/pquant/configs/config_fitcompress.yaml +++ b/src/pquant/configs/config_fitcompress.yaml @@ -11,6 +11,7 @@ quantization_parameters: default_data_keep_negatives: 0. default_data_integer_bits: 0. default_data_fractional_bits: 7. + granularity: "per_tensor" quantize_input: true quantize_output: false enable_quantization: true diff --git a/src/pquant/configs/config_mdmm.yaml b/src/pquant/configs/config_mdmm.yaml index 3f82de1..14602ee 100644 --- a/src/pquant/configs/config_mdmm.yaml +++ b/src/pquant/configs/config_mdmm.yaml @@ -23,6 +23,7 @@ quantization_parameters: default_data_keep_negatives: 0. default_data_integer_bits: 0. default_data_fractional_bits: 7. + granularity: "per_tensor" quantize_input: true quantize_output: false hgq_beta: 1e-5 diff --git a/src/pquant/configs/config_pdp.yaml b/src/pquant/configs/config_pdp.yaml index b80f382..5bbaf0e 100644 --- a/src/pquant/configs/config_pdp.yaml +++ b/src/pquant/configs/config_pdp.yaml @@ -14,6 +14,7 @@ quantization_parameters: default_data_keep_negatives: 0. default_data_integer_bits: 0. default_data_fractional_bits: 7. + granularity: "per_tensor" quantize_input: true quantize_output: false enable_quantization: true diff --git a/src/pquant/configs/config_wanda.yaml b/src/pquant/configs/config_wanda.yaml index 4871787..39e57f9 100644 --- a/src/pquant/configs/config_wanda.yaml +++ b/src/pquant/configs/config_wanda.yaml @@ -16,6 +16,7 @@ quantization_parameters: default_data_keep_negatives: 0. default_data_integer_bits: 0. default_data_fractional_bits: 7. + granularity: "per_tensor" quantize_input: true quantize_output: false enable_quantization: true diff --git a/src/pquant/configs/finetuning.yaml b/src/pquant/configs/finetuning.yaml index e1453a1..e5c2657 100644 --- a/src/pquant/configs/finetuning.yaml +++ b/src/pquant/configs/finetuning.yaml @@ -13,6 +13,7 @@ quantization_parameters: integer_bits: 4 fractional_bits: 6 use_high_granularity_quantization: false + granularity: "per_tensor" use_real_tanh: false use_symmetric_quantization: false training_parameters: diff --git a/src/pquant/core/keras/quantizer.py b/src/pquant/core/keras/quantizer.py index f292cb1..84e8e6d 100644 --- a/src/pquant/core/keras/quantizer.py +++ b/src/pquant/core/keras/quantizer.py @@ -1,12 +1,13 @@ import keras from keras.initializers import Constant +from keras import ops from pquant.core.quantizer_functions import create_quantizer class Quantizer(keras.layers.Layer): # HGQ quantizer wrapper - def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, hgq_gamma=0): + def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, granularity, hgq_gamma=0): super().__init__() self.k = k self.i = i @@ -17,7 +18,28 @@ def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, hgq self.quantizer = create_quantizer(self.k, self.i, self.f, overflow, round_mode, is_heterogeneous, is_data) self.is_pretraining = False self.hgq_gamma = hgq_gamma - + self.is_data = is_data + self.granularity = granularity + + def compute_dynamic_bits(self, x): + if self.granularity == "per_channel": + if ops.ndim(x) == 2: + abs_x = ops.max(ops.abs(x), axis=0, keepdims=True) + elif ops.ndim(x) == 3: + abs_x = ops.max(ops.abs(x), axis=(0, 1), keepdims=True) + elif ops.ndim(x) == 4: + abs_x = ops.max(ops.abs(x), axis=(0, 1, 2), keepdims=True) + else: + raise ValueError("Unsupported tensor rank") + elif self.granularity == "per_weight": + abs_x = ops.abs(x) + else: + raise ValueError(f"compute_dynamic_bits called for granularity={self.granularity}") + m = ops.ceil(ops.log(abs_x + 1e-6) / ops.log(2.0)) + int_bits = ops.maximum(m, 0.0) + frac_bits = ops.maximum(self.b - int_bits - self.k, 0.0) + return int_bits, frac_bits + def build(self, input_shape): super().build(input_shape) self.i = self.add_variable((), Constant(self.i), dtype="float32", trainable=False) @@ -52,13 +74,20 @@ def call(self, x, training=None): if not self.built: self.build(x.shape) if self.use_hgq: - x = self.quantizer(x, training=training) + return self.quantizer(x, training=training) + elif self.is_data or ops.ndim(x) == 1 or self.granularity == "per_tensor": + i, f = self.i, self.f else: - x = self.quantizer(x, k=self.k, i=self.i, f=self.f, training=training) - return x + i, f = self.compute_dynamic_bits(x) + self.i.assign(i) + self.f.assign(f) + + return self.quantizer(x, k=self.k, i=i, f=f, training=training) def hgq_loss(self): if self.is_pretraining or not self.use_hgq: return 0.0 - loss = (keras.ops.sum(self.quantizer.quantizer.i) + keras.ops.sum(self.quantizer.quantizer.f)) * self.hgq_gamma + loss = 0 + for layer_loss in self.quantizer.quantizer.losses: + loss += layer_loss return loss diff --git a/src/pquant/core/torch/quantizer.py b/src/pquant/core/torch/quantizer.py index eb530f7..330a2cc 100644 --- a/src/pquant/core/torch/quantizer.py +++ b/src/pquant/core/torch/quantizer.py @@ -5,7 +5,7 @@ class Quantizer(nn.Module): - def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, hgq_gamma=0): + def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, granularity, hgq_gamma=0): super().__init__() self.k = torch.nn.Parameter(torch.tensor(k), requires_grad=False) self.i = torch.nn.Parameter(torch.tensor(i), requires_grad=False) @@ -16,6 +16,8 @@ def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, hgq self.quantizer = create_quantizer(self.k, self.i, self.f, overflow, round_mode, is_heterogeneous, is_data) self.is_pretraining = False self.hgq_gamma = hgq_gamma + self.is_data = is_data + self.granularity = granularity def get_quantization_bits(self): if self.use_hgq: @@ -40,12 +42,36 @@ def set_quantization_bits(self, i, f): def post_pre_train_function(self): self.is_pretraining = False + def compute_dynamic_bits(self, x): + if self.granularity == "per_channel": + if torch.ndim(x) == 2: + abs_x = torch.amax(torch.abs(x), dim=1, keepdim=True) + elif torch.ndim(x) == 3: + abs_x = torch.amax(torch.abs(x), dim=(1, 2), keepdim=True) + elif torch.ndim(x) == 4: + abs_x = torch.amax(torch.abs(x), dim=(1, 2, 3), keepdim=True) + elif self.granularity == "per_weight": + abs_x = torch.abs(x) + else: + raise ValueError("The selected granularity is not supported.") + + m = torch.ceil(torch.log2(abs_x + 1e-6)) + int_bits = torch.clamp(m, min=0) + frac_bits = torch.clamp(self.b - int_bits - self.k, min=0) + return int_bits, frac_bits + + def forward(self, x): if self.use_hgq: - x = self.quantizer(x, training=self.training) + return self.quantizer(x, training=self.training) else: - x = self.quantizer(x, k=self.k, i=self.i, f=self.f, training=self.training) - return x + if self.is_data or x.ndim == 1 or self.granularity == 'per_tensor': + i, f = self.i, self.f + else: + i, f = self.compute_dynamic_bits(x) + self.i.data = i + self.f.data = f + return self.quantizer(x, k=self.k, i=i, f=f, training=self.training) def hgq_loss(self): if self.is_pretraining or not self.use_hgq: diff --git a/src/pquant/data_models/quantization_model.py b/src/pquant/data_models/quantization_model.py index 0edfff1..7cde730 100644 --- a/src/pquant/data_models/quantization_model.py +++ b/src/pquant/data_models/quantization_model.py @@ -1,6 +1,14 @@ +from typing import List +from enum import Enum from pydantic import BaseModel, Field +class QuantizationGranularity(str, Enum): + PER_TENSOR = "per_tensor" + PER_CHANNEL = "per_channel" + PER_WEIGHT = "per_weight" + + class BaseQuantizationModel(BaseModel): default_weight_keep_negatives: float = Field(default=1.0) default_weight_integer_bits: float = Field(default=0.0) @@ -10,6 +18,7 @@ class BaseQuantizationModel(BaseModel): default_data_fractional_bits: float = Field(default=7.0) quantize_input: bool = Field(default=True) quantize_output: bool = Field(default=False) + granularity: QuantizationGranularity = Field(default=QuantizationGranularity.PER_TENSOR) enable_quantization: bool = Field(default=True) hgq_gamma: float = Field(default=0.0003) hgq_beta: float = Field(default=1e-5) From 6867e73747150c2b240427236febd7f5a9dfc52c Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Thu, 12 Feb 2026 17:40:42 +0100 Subject: [PATCH 09/20] Add weight and biasses parameters registration and hgq quantization limitation --- src/pquant/core/keras/layers.py | 69 +++++++++-------- src/pquant/core/keras/quantizer.py | 10 ++- src/pquant/core/torch/activations.py | 3 +- src/pquant/core/torch/layers.py | 108 ++++++++++++++------------- src/pquant/core/torch/quantizer.py | 25 +++++-- 5 files changed, 119 insertions(+), 96 deletions(-) diff --git a/src/pquant/core/keras/layers.py b/src/pquant/core/keras/layers.py index 860bcc4..543c89c 100644 --- a/src/pquant/core/keras/layers.py +++ b/src/pquant/core/keras/layers.py @@ -81,6 +81,7 @@ def __init__( self.enable_pruning = config.pruning_parameters.enable_pruning self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress self.hgq_gamma = config.quantization_parameters.hgq_gamma + self.granularity = config.quantization_parameters.granularity self.final_compression_done = False self.built = False self.parallelization_factor = -1 @@ -106,46 +107,47 @@ def get_output_quantization_bits(self): def build(self, input_shape): super().build(input_shape) self.weight_quantizer = Quantizer( - ops.convert_to_tensor(self.k_weight), - ops.convert_to_tensor(self.i_weight), - ops.convert_to_tensor(self.f_weight), - self.overflow, - self.round_mode, - self.use_hgq, - False, - self.hgq_gamma, + k=ops.convert_to_tensor(self.k_weight), + i=ops.convert_to_tensor(self.i_weight), + f=ops.convert_to_tensor(self.f_weight), + overflow=self.overflow, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=False, + granularity=self.granularity, + hgq_gamma=self.hgq_gamma, ) # if self.use_bias: self.bias_quantizer = Quantizer( - ops.convert_to_tensor(self.k_bias), - ops.convert_to_tensor(self.i_bias), - ops.convert_to_tensor(self.f_bias), - self.overflow, - self.round_mode, - self.use_hgq, - False, - self.hgq_gamma, + k=ops.convert_to_tensor(self.k_bias), + i=ops.convert_to_tensor(self.i_bias), + f=ops.convert_to_tensor(self.f_bias), + overflow=self.overflow, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=False, + hgq_gamma=self.hgq_gamma, ) self.input_quantizer = Quantizer( - ops.convert_to_tensor(self.k_input), - ops.convert_to_tensor(self.i_input), - ops.convert_to_tensor(self.f_input), - self.overflow, - self.round_mode, - self.use_hgq, - True, - self.hgq_gamma, + k=ops.convert_to_tensor(self.k_input), + i=ops.convert_to_tensor(self.i_input), + f=ops.convert_to_tensor(self.f_input), + overflow=self.overflow, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=True, + hgq_gamma=self.hgq_gamma, ) self.output_quantizer = Quantizer( - ops.convert_to_tensor(self.k_output), - ops.convert_to_tensor(self.i_output), - ops.convert_to_tensor(self.f_output), - self.overflow, - self.round_mode, - self.use_hgq, - True, - self.hgq_gamma, + k=ops.convert_to_tensor(self.k_output), + i=ops.convert_to_tensor(self.i_output), + f=ops.convert_to_tensor(self.f_output), + overflow=self.overflow, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=True, + hgq_gamma=self.hgq_gamma, ) self.input_shape = (1,) + input_shape[1:] self.n_parallel = ops.prod(input_shape[1:-1]) @@ -997,6 +999,7 @@ def __init__( self.use_hgq = config.quantization_parameters.use_high_granularity_quantization self.hgq_beta = config.quantization_parameters.hgq_beta self.quantize_input = quantize_input + self.granularity= config.quantization_parameters.granularity self.config = config self.f_weight = self.f_bias = ops.convert_to_tensor(config.quantization_parameters.default_weight_fractional_bits) self.i_weight = self.i_bias = ops.convert_to_tensor(config.quantization_parameters.default_weight_integer_bits) @@ -1629,7 +1632,7 @@ def get_layer_keep_ratio(model): return 0.0 def is_training_stage(layer): - return False if layer.pruning_layer.is_finetune and layer.pruning_layer.is_pretraining else True + return False if layer.pruning_layer.is_finetuning and layer.pruning_layer.is_pretraining else True def get_model_losses(model, losses): diff --git a/src/pquant/core/keras/quantizer.py b/src/pquant/core/keras/quantizer.py index 84e8e6d..45d2c60 100644 --- a/src/pquant/core/keras/quantizer.py +++ b/src/pquant/core/keras/quantizer.py @@ -1,13 +1,14 @@ import keras from keras.initializers import Constant from keras import ops +from enum import Enum from pquant.core.quantizer_functions import create_quantizer class Quantizer(keras.layers.Layer): # HGQ quantizer wrapper - def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, granularity, hgq_gamma=0): + def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=False, granularity="per_tensor", hgq_gamma=0): super().__init__() self.k = k self.i = i @@ -19,7 +20,10 @@ def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, gra self.is_pretraining = False self.hgq_gamma = hgq_gamma self.is_data = is_data - self.granularity = granularity + if isinstance(granularity, Enum): + self.granularity = granularity.value + else: + self.granularity = granularity def compute_dynamic_bits(self, x): if self.granularity == "per_channel": @@ -75,7 +79,7 @@ def call(self, x, training=None): self.build(x.shape) if self.use_hgq: return self.quantizer(x, training=training) - elif self.is_data or ops.ndim(x) == 1 or self.granularity == "per_tensor": + elif self.granularity == "per_tensor": i, f = self.i, self.f else: i, f = self.compute_dynamic_bits(x) diff --git a/src/pquant/core/torch/activations.py b/src/pquant/core/torch/activations.py index d9e9157..4a76f55 100644 --- a/src/pquant/core/torch/activations.py +++ b/src/pquant/core/torch/activations.py @@ -68,6 +68,7 @@ def __init__( self.hgq_gamma = config.quantization_parameters.hgq_gamma self.hgq_heterogeneous = config.quantization_parameters.hgq_heterogeneous self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress + self.granularity = config.quantization_parameters.granularity self.post_fitcompress_calibration = False self.saved_inputs = [] @@ -98,7 +99,7 @@ def check_is_built(self, input_shape): round_mode=self.round_mode, is_data=True, is_heterogeneous=self.use_hgq, - hgq_gamma=self.hgq_gamma, + hgq_gamma=self.hgq_gamma, ) if self.use_hgq: self.input_quantizer.quantizer.build(input_shape) diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index e30fae7..680b8ad 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -76,6 +76,7 @@ def __init__( self.enable_pruning = enable_pruning if enable_pruning is not None else config.pruning_parameters.enable_pruning self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress self.hgq_gamma = config.quantization_parameters.hgq_gamma + self.granularity = config.quantization_parameters.granularity self.final_compression_done = False self.built = False self.parallelization_factor = -1 @@ -91,46 +92,47 @@ def check_is_built(self, input_shape): return # Build function to delay quantizer creation until after custom i,f bits have been set self.input_quantizer = Quantizer( - torch.tensor(self.k_input), - torch.tensor(self.i_input), - torch.tensor(self.f_input), - self.overflow, - self.round_mode, - self.use_hgq, - True, - self.hgq_gamma, + k=torch.tensor(self.k_input), + i=torch.tensor(self.i_input), + f=torch.tensor(self.f_input), + overflow=self.overflow, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=True, + hgq_gamma=self.hgq_gamma, ) self.weight_quantizer = Quantizer( - torch.tensor(self.k_weight), - torch.tensor(self.i_weight), - torch.tensor(self.f_weight), - self.overflow, - self.round_mode, - self.use_hgq, - False, - self.hgq_gamma, + k=torch.tensor(self.k_weight), + i=torch.tensor(self.i_weight), + f=torch.tensor(self.f_weight), + overflow=self.overflow, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=False, + hgq_gamma=self.hgq_gamma, + granularity=self.granularity ) self.bias_quantizer = Quantizer( - torch.tensor(self.k_bias), - torch.tensor(self.i_bias), - torch.tensor(self.f_bias), - self.overflow, - self.round_mode, - self.use_hgq, - False, - self.hgq_gamma, + k=torch.tensor(self.k_bias), + i=torch.tensor(self.i_bias), + f=torch.tensor(self.f_bias), + overflow=self.overflow, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=False, + hgq_gamma=self.hgq_gamma, ) self.output_quantizer = Quantizer( - torch.tensor(self.k_output), - torch.tensor(self.i_output), - torch.tensor(self.f_output), - self.overflow, - self.round_mode, - self.use_hgq, - True, - self.hgq_gamma, + k=torch.tensor(self.k_output), + i=torch.tensor(self.i_output), + f=torch.tensor(self.f_output), + overflow=self.overflow, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=True, + hgq_gamma=self.hgq_gamma, ) self.n_parallel = ops.prod(tuple(input_shape)[1:-1]) @@ -254,12 +256,12 @@ def __init__( self.out_features = out_features self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress self._weight = nn.Parameter(self.weight.clone()).to(self.weight.device) + self.register_parameter("_weight", self._weight) if bias: self._bias = nn.Parameter(self.bias.clone()).to(self.bias.device) + self.register_parameter("_bias", self._bias) else: self.register_parameter("_bias", None) - del self._parameters["weight"] - del self._parameters["bias"] self.pruning_layer.build(self._weight.shape) def ebops(self, include_mask=False): @@ -371,12 +373,12 @@ def __init__( ) self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress self._weight = nn.Parameter(self.weight.clone()).to(self.weight.device) + self.register_parameter("_weight", self._weight) if bias: self._bias = nn.Parameter(self.bias.clone()).to(self.bias.device) + self.register_parameter("_bias", self._bias) else: self.register_parameter("_bias", None) - del self._parameters["weight"] - del self._parameters["bias"] self.pruning_layer.build(self._weight.shape) def ebops(self, include_mask=False): @@ -429,7 +431,17 @@ def apply_final_compression(self): def forward(self, x): x = self.pre_forward(x) - x = super().forward(x) + weight = self.weight + bias = self.bias + x = F.conv2d( + x, + weight, + bias, + self.stride, + self.padding, + self.dilation, + self.groups, + ) x = self.post_forward(x) return x @@ -501,13 +513,12 @@ def __init__( **kwargs, ) self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress - self._weight = nn.Parameter(self.weight.clone()).to(self.weight.device) + self.register_parameter("_weight", self._weight) if bias: self._bias = nn.Parameter(self.bias.clone()).to(self.bias.device) + self.register_parameter("_bias", self._bias) else: self.register_parameter("_bias", None) - del self._parameters["weight"] - del self._parameters["bias"] self.pruning_layer.build(self._weight.shape) def ebops(self, include_mask=False): @@ -592,7 +603,6 @@ def add_compression_layers(model, config, input_shape, device="cuda"): class PQAvgPoolBase(nn.Module): - def __init__( self, config, @@ -702,7 +712,6 @@ def extra_repr(self) -> str: class PQAvgPool1d(PQAvgPoolBase, nn.AvgPool1d): - def __init__( self, config, @@ -739,7 +748,6 @@ def forward(self, x): class PQAvgPool2d(PQAvgPoolBase, nn.AvgPool2d): - def __init__( self, config, @@ -778,7 +786,6 @@ def forward(self, x): class PQBatchNorm2d(nn.BatchNorm2d): - def __init__( self, config, @@ -825,8 +832,6 @@ def __init__( self.quantize_input = quantize_input self._weight = nn.Parameter(self.weight.clone()) self._bias = nn.Parameter(self.bias.clone()) - del self._parameters["weight"] - del self._parameters["bias"] self.built = False self.final_compression_done = False self.is_pretraining = True @@ -934,7 +939,6 @@ def forward(self, input: torch.Tensor) -> torch.Tensor: class PQBatchNorm1d(nn.BatchNorm1d): - def __init__( self, config, @@ -980,9 +984,7 @@ def __init__( self.config = config self.quantize_input = quantize_input self._weight = nn.Parameter(self.weight.clone()) - self._bias = nn.Parameter(self.bias.clone()) - del self._parameters["weight"] - del self._parameters["bias"] + self.register_parameter("_weight", self._weight) self.built = False self.final_compression_done = False self.is_pretraining = True @@ -1136,7 +1138,7 @@ def add_layer_specific_quantization_to_model(name, layer, config): i = torch.tensor(layer_config["bias"]["integer_bits"]) f = torch.tensor(layer_config["bias"]["fractional_bits"]) layer.i_bias = i - layer.f_biast = f + layer.f_bias = f if "input" in layer_config: if "integer_bits" in layer_config["input"]: input_int_bits = torch.tensor(layer_config["input"]["integer_bits"]) @@ -1417,6 +1419,8 @@ def post_epoch_functions(model, epoch, total_epochs, **kwargs): for layer in model.modules(): if isinstance(layer, (PQConv2d, PQConv1d, PQDense)): layer.pruning_layer.post_epoch_function(epoch, total_epochs, **kwargs) + # if isinstance(layer, Quantizer): + # layer.post_epoch_function() def pre_epoch_functions(model, epoch, total_epochs): @@ -1534,7 +1538,7 @@ def get_layer_keep_ratio(model): def is_training_stage(layer): - return False if layer.pruning_layer.is_finetune and layer.pruning_layer.is_pretraining else True + return False if layer.pruning_layer.is_finetuning and layer.pruning_layer.is_pretraining else True def get_model_losses(model, losses): diff --git a/src/pquant/core/torch/quantizer.py b/src/pquant/core/torch/quantizer.py index 330a2cc..bd860e3 100644 --- a/src/pquant/core/torch/quantizer.py +++ b/src/pquant/core/torch/quantizer.py @@ -1,11 +1,11 @@ import torch import torch.nn as nn +from enum import Enum from pquant.core.quantizer_functions import create_quantizer - class Quantizer(nn.Module): - def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, granularity, hgq_gamma=0): + def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=False, granularity='per_tensor', hgq_gamma=0): super().__init__() self.k = torch.nn.Parameter(torch.tensor(k), requires_grad=False) self.i = torch.nn.Parameter(torch.tensor(i), requires_grad=False) @@ -17,7 +17,11 @@ def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data, gra self.is_pretraining = False self.hgq_gamma = hgq_gamma self.is_data = is_data - self.granularity = granularity + if isinstance(granularity, Enum): + self.granularity = granularity.value + else: + self.granularity = granularity + self.b = torch.nn.Parameter(torch.tensor(self.k + self.i + self.f), requires_grad=False) def get_quantization_bits(self): if self.use_hgq: @@ -44,11 +48,11 @@ def post_pre_train_function(self): def compute_dynamic_bits(self, x): if self.granularity == "per_channel": - if torch.ndim(x) == 2: + if x.ndim == 2: abs_x = torch.amax(torch.abs(x), dim=1, keepdim=True) - elif torch.ndim(x) == 3: + elif x.ndim == 3: abs_x = torch.amax(torch.abs(x), dim=(1, 2), keepdim=True) - elif torch.ndim(x) == 4: + elif x.ndim == 4: abs_x = torch.amax(torch.abs(x), dim=(1, 2, 3), keepdim=True) elif self.granularity == "per_weight": abs_x = torch.abs(x) @@ -65,7 +69,7 @@ def forward(self, x): if self.use_hgq: return self.quantizer(x, training=self.training) else: - if self.is_data or x.ndim == 1 or self.granularity == 'per_tensor': + if self.granularity == 'per_tensor': i, f = self.i, self.f else: i, f = self.compute_dynamic_bits(x) @@ -80,3 +84,10 @@ def hgq_loss(self): for layer_loss in self.quantizer.quantizer.losses: loss += layer_loss return loss + + def post_epoch_function(self): + if self.use_hgq: + constrained_i = self.quantizer.quantizer._i.constraint(self.quantizer.quantizer._i) + self.quantizer.quantizer._i.assign(constrained_i) + constrained_f = self.quantizer.quantizer._f.constraint(self.quantizer.quantizer._f) + self.quantizer.quantizer._f.assign(constrained_f) From 084e62d0139c50e1d45e7f391e6dac3a0d093efa Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Thu, 12 Feb 2026 18:02:54 +0100 Subject: [PATCH 10/20] Modified hgq constraint comment --- src/pquant/core/torch/layers.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index 680b8ad..eea6221 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -513,6 +513,7 @@ def __init__( **kwargs, ) self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress + self._weight = nn.Parameter(self.weight.clone()).to(self.weight.device) self.register_parameter("_weight", self._weight) if bias: self._bias = nn.Parameter(self.bias.clone()).to(self.bias.device) @@ -830,8 +831,13 @@ def __init__( self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress self.config = config self.quantize_input = quantize_input - self._weight = nn.Parameter(self.weight.clone()) - self._bias = nn.Parameter(self.bias.clone()) + self._weight = nn.Parameter(self.weight.clone()).to(self.weight.device) + self.register_parameter("_weight", self._weight) + if self.bias: + self._bias = nn.Parameter(self.bias.clone()).to(self.bias.device) + self.register_parameter("_bias", self._bias) + else: + self.register_parameter("_bias", None) self.built = False self.final_compression_done = False self.is_pretraining = True @@ -983,7 +989,13 @@ def __init__( self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress self.config = config self.quantize_input = quantize_input - self._weight = nn.Parameter(self.weight.clone()) + self._weight = nn.Parameter(self.weight.clone()).to(self.weight.device) + self.register_parameter("_weight", self._weight) + if self.bias: + self._bias = nn.Parameter(self.bias.clone()).to(self.bias.device) + self.register_parameter("_bias", self._bias) + else: + self.register_parameter("_bias", None) self.register_parameter("_weight", self._weight) self.built = False self.final_compression_done = False @@ -1419,8 +1431,8 @@ def post_epoch_functions(model, epoch, total_epochs, **kwargs): for layer in model.modules(): if isinstance(layer, (PQConv2d, PQConv1d, PQDense)): layer.pruning_layer.post_epoch_function(epoch, total_epochs, **kwargs) - # if isinstance(layer, Quantizer): - # layer.post_epoch_function() + elif isinstance(layer, Quantizer): + layer.post_epoch_function() def pre_epoch_functions(model, epoch, total_epochs): From f639d11c4bff747dcf02bd1e3341ae7570c1afbb Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Fri, 13 Feb 2026 15:30:17 +0100 Subject: [PATCH 11/20] Modified default values for pruning_first and use_relu_multiplier --- src/pquant/configs/config_ap.yaml | 4 ++-- src/pquant/configs/config_autosparse.yaml | 4 ++-- src/pquant/configs/config_cs.yaml | 4 ++-- src/pquant/configs/config_dst.yaml | 4 ++-- src/pquant/configs/config_fitcompress.yaml | 4 ++-- src/pquant/configs/config_mdmm.yaml | 4 ++-- src/pquant/configs/config_pdp.yaml | 2 +- src/pquant/configs/config_wanda.yaml | 4 ++-- src/pquant/configs/finetuning.yaml | 4 ++-- 9 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/pquant/configs/config_ap.yaml b/src/pquant/configs/config_ap.yaml index 1573d49..4655c97 100644 --- a/src/pquant/configs/config_ap.yaml +++ b/src/pquant/configs/config_ap.yaml @@ -23,7 +23,7 @@ quantization_parameters: layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false - use_relu_multiplier: true + use_relu_multiplier: false use_symmetric_quantization: false overflow: SAT round_mode: RND @@ -41,7 +41,7 @@ training_parameters: epochs: 200 fine_tuning_epochs: 0 pretraining_epochs: 0 - pruning_first: false + pruning_first: true rewind: never rounds: 1 save_weights_epoch: -1 diff --git a/src/pquant/configs/config_autosparse.yaml b/src/pquant/configs/config_autosparse.yaml index 04f281b..07f59d8 100644 --- a/src/pquant/configs/config_autosparse.yaml +++ b/src/pquant/configs/config_autosparse.yaml @@ -26,7 +26,7 @@ quantization_parameters: layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false - use_relu_multiplier: true + use_relu_multiplier: false use_symmetric_quantization: false overflow: SAT round_mode: RND @@ -44,7 +44,7 @@ training_parameters: epochs: 100 fine_tuning_epochs: 0 pretraining_epochs: 0 - pruning_first: false + pruning_first: true rewind: never rounds: 1 save_weights_epoch: -1.0 diff --git a/src/pquant/configs/config_cs.yaml b/src/pquant/configs/config_cs.yaml index d66dbe8..0c953df 100644 --- a/src/pquant/configs/config_cs.yaml +++ b/src/pquant/configs/config_cs.yaml @@ -22,7 +22,7 @@ quantization_parameters: layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false - use_relu_multiplier: true + use_relu_multiplier: false use_symmetric_quantization: false overflow: SAT round_mode: RND @@ -40,7 +40,7 @@ training_parameters: epochs: 85 fine_tuning_epochs: 85 pretraining_epochs: 0 - pruning_first: false + pruning_first: true rewind: post-ticket-search rounds: 3 save_weights_epoch: 2 diff --git a/src/pquant/configs/config_dst.yaml b/src/pquant/configs/config_dst.yaml index 7adc89c..fcfdd5f 100644 --- a/src/pquant/configs/config_dst.yaml +++ b/src/pquant/configs/config_dst.yaml @@ -24,7 +24,7 @@ quantization_parameters: layer_specific: [] use_high_granularity_quantization: false use_real_tanh: false - use_relu_multiplier: true + use_relu_multiplier: false use_symmetric_quantization: false overflow: SAT round_mode: RND @@ -42,7 +42,7 @@ training_parameters: epochs: 160 fine_tuning_epochs: 0 pretraining_epochs: 0 - pruning_first: false + pruning_first: true rewind: never rounds: 1 save_weights_epoch: -1 diff --git a/src/pquant/configs/config_fitcompress.yaml b/src/pquant/configs/config_fitcompress.yaml index 165a5c3..60d4465 100644 --- a/src/pquant/configs/config_fitcompress.yaml +++ b/src/pquant/configs/config_fitcompress.yaml @@ -21,7 +21,7 @@ quantization_parameters: layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false - use_relu_multiplier: true + use_relu_multiplier: false use_symmetric_quantization: false overflow: SAT round_mode: RND @@ -39,7 +39,7 @@ training_parameters: epochs: 200 fine_tuning_epochs: 0 pretraining_epochs: 100 - pruning_first: false + pruning_first: true rewind: never rounds: 1 save_weights_epoch: -1 diff --git a/src/pquant/configs/config_mdmm.yaml b/src/pquant/configs/config_mdmm.yaml index 14602ee..5dbf3e6 100644 --- a/src/pquant/configs/config_mdmm.yaml +++ b/src/pquant/configs/config_mdmm.yaml @@ -32,7 +32,7 @@ quantization_parameters: layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false - use_relu_multiplier: true + use_relu_multiplier: false use_symmetric_quantization: false overflow: SAT round_mode: RND @@ -40,7 +40,7 @@ training_parameters: epochs: 200 fine_tuning_epochs: 30 pretraining_epochs: 0 - pruning_first: false + pruning_first: true rewind: never rounds: 1 save_weights_epoch: -1 diff --git a/src/pquant/configs/config_pdp.yaml b/src/pquant/configs/config_pdp.yaml index 5bbaf0e..ca3745c 100644 --- a/src/pquant/configs/config_pdp.yaml +++ b/src/pquant/configs/config_pdp.yaml @@ -41,7 +41,7 @@ training_parameters: epochs: 100 fine_tuning_epochs: 20 pretraining_epochs: 1 - pruning_first: false + pruning_first: true rewind: never rounds: 1 save_weights_epoch: -1 diff --git a/src/pquant/configs/config_wanda.yaml b/src/pquant/configs/config_wanda.yaml index 39e57f9..9b23239 100644 --- a/src/pquant/configs/config_wanda.yaml +++ b/src/pquant/configs/config_wanda.yaml @@ -26,7 +26,7 @@ quantization_parameters: layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false - use_relu_multiplier: true + use_relu_multiplier: false use_symmetric_quantization: false overflow: SAT round_mode: RND @@ -44,7 +44,7 @@ training_parameters: epochs: 200 fine_tuning_epochs: 0 pretraining_epochs: 50 - pruning_first: false + pruning_first: true rewind: never rounds: 1 save_weights_epoch: -1 diff --git a/src/pquant/configs/finetuning.yaml b/src/pquant/configs/finetuning.yaml index e5c2657..3be9128 100644 --- a/src/pquant/configs/finetuning.yaml +++ b/src/pquant/configs/finetuning.yaml @@ -7,7 +7,7 @@ quantization_parameters: default_fractional_bits: 7. enable_quantization: true hgq_gamma: 0.0003 - hgq_heterogeneous: True + hgq_heterogeneous: true layer_specific: layer3.0.conv1: integer_bits: 4 @@ -34,7 +34,7 @@ training_parameters: epochs: 2 fine_tuning_epochs: 2 pretraining_epochs: 0 - pruning_first: false + pruning_first: true rewind: never rounds: 2 save_weights_epoch: 2 From 9495b47bb626795bc496df14351bb41da1f5bfa4 Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Fri, 13 Feb 2026 18:40:49 +0100 Subject: [PATCH 12/20] Add overflow modes for data and parameters seperately; fixed if self.bias condition error; fix fine-tuning module if mlflow disabled and allow fine-tuning of parameters in quantization/pruning/fitcompress sections --- docs/source/reference.md | 3 +- examples/example_jet_tagging.ipynb | 5 ++- src/pquant/configs/config_ap.yaml | 3 +- src/pquant/configs/config_autosparse.yaml | 3 +- src/pquant/configs/config_cs.yaml | 3 +- src/pquant/configs/config_dst.yaml | 3 +- src/pquant/configs/config_fitcompress.yaml | 3 +- src/pquant/configs/config_mdmm.yaml | 3 +- src/pquant/configs/config_pdp.yaml | 3 +- src/pquant/configs/config_wanda.yaml | 3 +- src/pquant/configs/finetuning.yaml | 9 +++-- src/pquant/core/finetuning.py | 18 ++++++--- src/pquant/core/keras/activations.py | 6 +-- src/pquant/core/keras/layers.py | 29 ++++++++------- src/pquant/core/keras/quantizer.py | 4 +- src/pquant/core/torch/activations.py | 7 ++-- src/pquant/core/torch/layers.py | 39 +++++++++++--------- src/pquant/core/torch/quantizer.py | 4 +- src/pquant/data_models/quantization_model.py | 3 +- tests/test_keras_compression_layers.py | 12 ++++-- tests/test_torch_compression_layers.py | 12 ++++-- 21 files changed, 104 insertions(+), 71 deletions(-) diff --git a/docs/source/reference.md b/docs/source/reference.md index 6692474..10802c7 100644 --- a/docs/source/reference.md +++ b/docs/source/reference.md @@ -38,7 +38,8 @@ If you require additional parameters for the training or optimization loops, ple | `layer_specific` | dict | `{}` | Dictionary for per-layer quantization overrides. | | `use_hgq` | bool | `false` | Enable or disable High Granularity Quantization (HGQ). | | `use_real_tanh` | bool | `false` | Use a real `tanh` instead of hard/approximate `tanh`. | -| `overflow` | str | `"SAT"` | Overflow handling mode (`SAT`, `SAT_SYM`, `WRAP`, `WRAP_SM`). | +| `overflow_mode_data` | str | `"SAT"` | Overflow handling mode for input and output quantizers(`SAT`, `SAT_SYM`, `WRAP`, `WRAP_SM`). | +| `overflow_mode_parameters` | str | `"SAT"` | Overflow handling mode for weight and biases quantizers(`SAT`, `SAT_SYM`, `WRAP`, `WRAP_SM`). | | `round_mode` | str | `"RND"` | Rounding mode (`TRN`, `RND`, `RND_CONV`, `RND_ZERO`, etc.). | | `use_relu_multiplier` | bool | `true` | Enable a learned bit-shift multiplier inside ReLU layers. | diff --git a/examples/example_jet_tagging.ipynb b/examples/example_jet_tagging.ipynb index 54e917f..eb5a914 100644 --- a/examples/example_jet_tagging.ipynb +++ b/examples/example_jet_tagging.ipynb @@ -439,7 +439,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "ea03f950-15d5-47df-86bd-921956e98d83", "metadata": {}, "outputs": [ @@ -497,7 +497,8 @@ "config.quantization_parameters.default_data_fractional_bits = 2.\n", "config.quantization_parameters.default_weight_fractional_bits = 3.\n", "config.quantization_parameters.use_relu_multiplier = False\n", - "config.quantization_parameters.overflow = \"WRAP\"\n", + "config.quantization_parameters.overflow_mode_data = \"WRAP\"\n", + "config.quantization_parameters.overflow_mode_parameters = \"SAT\"\n", "config.quantization_parameters\n", "model = build_model(config)\n", "\n", diff --git a/src/pquant/configs/config_ap.yaml b/src/pquant/configs/config_ap.yaml index 4655c97..35085d8 100644 --- a/src/pquant/configs/config_ap.yaml +++ b/src/pquant/configs/config_ap.yaml @@ -25,7 +25,8 @@ quantization_parameters: use_real_tanh: false use_relu_multiplier: false use_symmetric_quantization: false - overflow: SAT + overflow_mode_parameters: SAT + overflow_mode_data: SAT round_mode: RND fitcompress_parameters: enable_fitcompress : false diff --git a/src/pquant/configs/config_autosparse.yaml b/src/pquant/configs/config_autosparse.yaml index 07f59d8..8372aa4 100644 --- a/src/pquant/configs/config_autosparse.yaml +++ b/src/pquant/configs/config_autosparse.yaml @@ -28,7 +28,8 @@ quantization_parameters: use_real_tanh: false use_relu_multiplier: false use_symmetric_quantization: false - overflow: SAT + overflow_mode_parameters: SAT + overflow_mode_data: SAT round_mode: RND fitcompress_parameters: enable_fitcompress : false diff --git a/src/pquant/configs/config_cs.yaml b/src/pquant/configs/config_cs.yaml index 0c953df..c27277c 100644 --- a/src/pquant/configs/config_cs.yaml +++ b/src/pquant/configs/config_cs.yaml @@ -24,7 +24,8 @@ quantization_parameters: use_real_tanh: false use_relu_multiplier: false use_symmetric_quantization: false - overflow: SAT + overflow_mode_parameters: SAT + overflow_mode_data: SAT round_mode: RND fitcompress_parameters: enable_fitcompress : false diff --git a/src/pquant/configs/config_dst.yaml b/src/pquant/configs/config_dst.yaml index fcfdd5f..5175783 100644 --- a/src/pquant/configs/config_dst.yaml +++ b/src/pquant/configs/config_dst.yaml @@ -26,7 +26,8 @@ quantization_parameters: use_real_tanh: false use_relu_multiplier: false use_symmetric_quantization: false - overflow: SAT + overflow_mode_parameters: SAT + overflow_mode_data: SAT round_mode: RND fitcompress_parameters: enable_fitcompress : false diff --git a/src/pquant/configs/config_fitcompress.yaml b/src/pquant/configs/config_fitcompress.yaml index 60d4465..583ab50 100644 --- a/src/pquant/configs/config_fitcompress.yaml +++ b/src/pquant/configs/config_fitcompress.yaml @@ -23,7 +23,8 @@ quantization_parameters: use_real_tanh: false use_relu_multiplier: false use_symmetric_quantization: false - overflow: SAT + overflow_mode_parameters: SAT + overflow_mode_data: SAT round_mode: RND fitcompress_parameters: enable_fitcompress : true diff --git a/src/pquant/configs/config_mdmm.yaml b/src/pquant/configs/config_mdmm.yaml index 5dbf3e6..4899e8f 100644 --- a/src/pquant/configs/config_mdmm.yaml +++ b/src/pquant/configs/config_mdmm.yaml @@ -34,7 +34,8 @@ quantization_parameters: use_real_tanh: false use_relu_multiplier: false use_symmetric_quantization: false - overflow: SAT + overflow_mode_parameters: SAT + overflow_mode_data: SAT round_mode: RND training_parameters: epochs: 200 diff --git a/src/pquant/configs/config_pdp.yaml b/src/pquant/configs/config_pdp.yaml index ca3745c..fd68fcf 100644 --- a/src/pquant/configs/config_pdp.yaml +++ b/src/pquant/configs/config_pdp.yaml @@ -25,7 +25,8 @@ quantization_parameters: use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: false - overflow: SAT + overflow_mode_parameters: SAT + overflow_mode_data: SAT round_mode: RND fitcompress_parameters: enable_fitcompress : false diff --git a/src/pquant/configs/config_wanda.yaml b/src/pquant/configs/config_wanda.yaml index 9b23239..c47385f 100644 --- a/src/pquant/configs/config_wanda.yaml +++ b/src/pquant/configs/config_wanda.yaml @@ -28,7 +28,8 @@ quantization_parameters: use_real_tanh: false use_relu_multiplier: false use_symmetric_quantization: false - overflow: SAT + overflow_mode_parameters: SAT + overflow_mode_data: SAT round_mode: RND fitcompress_parameters: enable_fitcompress : false diff --git a/src/pquant/configs/finetuning.yaml b/src/pquant/configs/finetuning.yaml index 3be9128..58c4ca0 100644 --- a/src/pquant/configs/finetuning.yaml +++ b/src/pquant/configs/finetuning.yaml @@ -16,6 +16,8 @@ quantization_parameters: granularity: "per_tensor" use_real_tanh: false use_symmetric_quantization: false + overflow_mode_parameters: WRAP + overflow_mode_data: SAT training_parameters: batch_size: 128 optimizer: sgd @@ -49,15 +51,16 @@ fitcompress_parameters: approximate : true f_lambda : 0.5 finetuning_parameters: - experiment_name: resnet_18_experiment_2 + experiment_name: resnet_18_experiment_4 + epochs: 2 model_name: resnet18 - num_trials: 10 + num_trials: 1 sampler: type: RandomSampler hyperparameter_search: numerical: learning_rate: [1e-5, 1e-3, 0.2] - epochs: [20, 100, 20] + # epochs: [20, 100, 20] batch_size: [16, 256, 32] default_integer_bits: [0, 8, 2] categorical: diff --git a/src/pquant/core/finetuning.py b/src/pquant/core/finetuning.py index 118dbf5..cbedacf 100644 --- a/src/pquant/core/finetuning.py +++ b/src/pquant/core/finetuning.py @@ -10,7 +10,6 @@ import yaml from pydantic import BaseModel, Field, field_validator - from pquant.core import constants from pquant.data_models.finetuning_model import BaseFinetuningModel from pquant.data_models.fitcompress_model import BaseFitCompressModel @@ -38,6 +37,8 @@ def get_sampler(sampler_type, **kwargs): def log_model_by_backend(model, name, signature=None, registered_model_name=None): backend = keras.backend.backend() + print("Backend:", backend) + print("Registry keys:", constants.LOG_FUNCTIONS_REGISTRY.keys()) kwargs = { "artifact_path": name, @@ -112,7 +113,7 @@ def load_from_config(cls, config): ) def get_dict(self): - return self.model_dump() + return self.model_dump(mode="json") class TuningTask: @@ -146,7 +147,7 @@ def set_enable_mlflow(self): self.enable_mlflow = True def get_dict(self): - return self.config.model_dump() + return self.config.model_dump(mode="json") def set_objective_function(self, name: str, fn: Callable, direction: str): if not callable(fn): @@ -250,7 +251,12 @@ def objective(self, trial, model, train_func, valid_func, **kwargs): logging.info(f"Suggested {param_name} = {new_value}") applied = False - for sub_config in [self.config.training_parameters, self.config.finetuning_parameters]: + for sub_config in [ + self.config.training_parameters, + self.config.pruning_parameters, + self.config.quantization_parameters, + self.config.fitcompress_parameters, + ]: if hasattr(sub_config, param_name): setattr(sub_config, param_name, new_value) applied = True @@ -308,13 +314,13 @@ def objective(self, trial, model, train_func, valid_func, **kwargs): return objectives if len(objectives) > 1 else objectives[0] def run_optimization(self, model, **kwargs): + finetuning_parameters = self.config.finetuning_parameters if self.enable_mlflow: import mlflow - + if not self.tracking_uri: raise ValueError("Tracking URI must be set when MLflow logging is enabled.") mlflow.set_tracking_uri(self.tracking_uri) - finetuning_parameters = self.config.finetuning_parameters mlflow.set_experiment(finetuning_parameters.experiment_name) sampler = get_sampler(finetuning_parameters.sampler.type, **finetuning_parameters.sampler.params) diff --git a/src/pquant/core/keras/activations.py b/src/pquant/core/keras/activations.py index 7cd81cc..ec59568 100644 --- a/src/pquant/core/keras/activations.py +++ b/src/pquant/core/keras/activations.py @@ -59,7 +59,7 @@ def __init__( self.use_hgq = config.quantization_parameters.use_high_granularity_quantization self.is_pretraining = True self.round_mode = config.quantization_parameters.round_mode - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.use_multiplier = config.quantization_parameters.use_relu_multiplier self.hgq_beta = config.quantization_parameters.hgq_beta self.hgq_gamma = config.quantization_parameters.hgq_gamma @@ -79,7 +79,7 @@ def build(self, input_shape): k=self.k_output, i=self.i_output, f=self.f_output, - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_data=True, is_heterogeneous=self.use_hgq, @@ -89,7 +89,7 @@ def build(self, input_shape): k=self.k_input, i=self.i_input, f=self.f_input, - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_data=True, is_heterogeneous=self.use_hgq, diff --git a/src/pquant/core/keras/layers.py b/src/pquant/core/keras/layers.py index 543c89c..6e1d9ae 100644 --- a/src/pquant/core/keras/layers.py +++ b/src/pquant/core/keras/layers.py @@ -76,7 +76,8 @@ def __init__( self.pruning_first = config.training_parameters.pruning_first self.enable_quantization = config.quantization_parameters.enable_quantization self.round_mode = config.quantization_parameters.round_mode - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_parameters = config.quantization_parameters.overflow_mode_parameters + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.use_hgq = config.quantization_parameters.use_high_granularity_quantization self.enable_pruning = config.pruning_parameters.enable_pruning self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress @@ -110,7 +111,7 @@ def build(self, input_shape): k=ops.convert_to_tensor(self.k_weight), i=ops.convert_to_tensor(self.i_weight), f=ops.convert_to_tensor(self.f_weight), - overflow=self.overflow, + overflow=self.overflow_mode_parameters, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=False, @@ -123,7 +124,7 @@ def build(self, input_shape): k=ops.convert_to_tensor(self.k_bias), i=ops.convert_to_tensor(self.i_bias), f=ops.convert_to_tensor(self.f_bias), - overflow=self.overflow, + overflow=self.overflow_mode_parameters, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=False, @@ -133,7 +134,7 @@ def build(self, input_shape): k=ops.convert_to_tensor(self.k_input), i=ops.convert_to_tensor(self.i_input), f=ops.convert_to_tensor(self.f_input), - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -143,7 +144,7 @@ def build(self, input_shape): k=ops.convert_to_tensor(self.k_output), i=ops.convert_to_tensor(self.i_output), f=ops.convert_to_tensor(self.f_output), - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -990,7 +991,8 @@ def __init__( synchronized, **kwargs, ) - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_parameters = config.quantization_parameters.overflow_mode_parameters + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.round_mode = config.quantization_parameters.round_mode self.hgq_gamma = config.quantization_parameters.hgq_gamma self.data_k = config.quantization_parameters.default_data_keep_negatives @@ -1014,7 +1016,7 @@ def build(self, input_shape): k=1.0, i=self.i_input, f=self.f_input, - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -1025,7 +1027,7 @@ def build(self, input_shape): i=self.i_weight, f=self.f_weight, round_mode=self.round_mode, - overflow=self.overflow, + overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, ) @@ -1034,7 +1036,7 @@ def build(self, input_shape): i=self.i_bias, f=self.f_bias, round_mode=self.round_mode, - overflow=self.overflow, + overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, ) @@ -1171,8 +1173,7 @@ def __init__( self.k_output = config.quantization_parameters.default_data_keep_negatives self.i_output = config.quantization_parameters.default_data_integer_bits self.f_output = config.quantization_parameters.default_data_fractional_bits - - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.config = config self.is_pretraining = True self.round_mode = config.quantization_parameters.round_mode @@ -1194,7 +1195,7 @@ def build(self, input_shape): k=1.0, i=self.i_input, f=self.f_input, - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -1204,7 +1205,7 @@ def build(self, input_shape): k=1.0, i=self.i_output, f=self.f_output, - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -1265,7 +1266,7 @@ def get_config(self): "i_output": self.i_output, "f_output": self.f_output, "is_pretraining": self.is_pretraining, - "overflow": self.overflow, + "overflow": self.overflow_mode_data, "hgq_gamma": self.hgq_gamma, "hgq_heterogeneous": self.hgq_heterogeneous, "pooling": self.pooling, diff --git a/src/pquant/core/keras/quantizer.py b/src/pquant/core/keras/quantizer.py index 45d2c60..1375b16 100644 --- a/src/pquant/core/keras/quantizer.py +++ b/src/pquant/core/keras/quantizer.py @@ -16,10 +16,10 @@ def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=Fals self.overflow = overflow self.round_mode = round_mode self.use_hgq = is_heterogeneous - self.quantizer = create_quantizer(self.k, self.i, self.f, overflow, round_mode, is_heterogeneous, is_data) + self.is_data = is_data + self.quantizer = create_quantizer(self.k, self.i, self.f, self.overflow, self.round_mode, self.use_hgq, self.is_data) self.is_pretraining = False self.hgq_gamma = hgq_gamma - self.is_data = is_data if isinstance(granularity, Enum): self.granularity = granularity.value else: diff --git a/src/pquant/core/torch/activations.py b/src/pquant/core/torch/activations.py index 4a76f55..749fdeb 100644 --- a/src/pquant/core/torch/activations.py +++ b/src/pquant/core/torch/activations.py @@ -62,7 +62,8 @@ def __init__( self.use_hgq = config.quantization_parameters.use_high_granularity_quantization self.is_pretraining = True self.round_mode = config.quantization_parameters.round_mode - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_parameters = config.quantization_parameters.overflow_mode_parameters + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.use_multiplier = config.quantization_parameters.use_relu_multiplier self.hgq_beta = config.quantization_parameters.hgq_beta self.hgq_gamma = config.quantization_parameters.hgq_gamma @@ -85,7 +86,7 @@ def check_is_built(self, input_shape): k=self.k_output, i=self.i_output, f=self.f_output, - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_data=True, is_heterogeneous=self.use_hgq, @@ -95,7 +96,7 @@ def check_is_built(self, input_shape): k=self.k_input, i=self.i_input, f=self.f_input, - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_data=True, is_heterogeneous=self.use_hgq, diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index eea6221..3ce6058 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -71,7 +71,8 @@ def __init__( self.pruning_first = config.training_parameters.pruning_first self.enable_quantization = config.quantization_parameters.enable_quantization self.round_mode = config.quantization_parameters.round_mode - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_parameters = config.quantization_parameters.overflow_mode_parameters + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.use_hgq = config.quantization_parameters.use_high_granularity_quantization self.enable_pruning = enable_pruning if enable_pruning is not None else config.pruning_parameters.enable_pruning self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress @@ -95,7 +96,7 @@ def check_is_built(self, input_shape): k=torch.tensor(self.k_input), i=torch.tensor(self.i_input), f=torch.tensor(self.f_input), - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -105,7 +106,7 @@ def check_is_built(self, input_shape): k=torch.tensor(self.k_weight), i=torch.tensor(self.i_weight), f=torch.tensor(self.f_weight), - overflow=self.overflow, + overflow=self.overflow_mode_parameters, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=False, @@ -117,7 +118,7 @@ def check_is_built(self, input_shape): k=torch.tensor(self.k_bias), i=torch.tensor(self.i_bias), f=torch.tensor(self.f_bias), - overflow=self.overflow, + overflow=self.overflow_mode_parameters, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=False, @@ -128,7 +129,7 @@ def check_is_built(self, input_shape): k=torch.tensor(self.k_output), i=torch.tensor(self.i_output), f=torch.tensor(self.f_output), - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -627,7 +628,7 @@ def __init__( self.k_output = config.quantization_parameters.default_data_keep_negatives self.i_output = config.quantization_parameters.default_data_integer_bits self.f_output = config.quantization_parameters.default_data_fractional_bits - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.config = config self.is_pretraining = True self.round_mode = config.quantization_parameters.round_mode @@ -646,7 +647,7 @@ def build(self, input_shape): k=torch.tensor(self.k_input), i=torch.tensor(self.i_input), f=torch.tensor(self.f_input), - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -656,7 +657,7 @@ def build(self, input_shape): k=torch.tensor(self.k_output), i=torch.tensor(self.i_output), f=torch.tensor(self.f_output), - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -822,7 +823,8 @@ def __init__( self.k_bias = config.quantization_parameters.default_weight_keep_negatives self.i_bias = config.quantization_parameters.default_weight_integer_bits self.f_bias = config.quantization_parameters.default_weight_fractional_bits - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_parameters = config.quantization_parameters.overflow_mode_parameters + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.round_mode = config.quantization_parameters.round_mode self.use_hgq = config.quantization_parameters.use_high_granularity_quantization self.hgq_gamma = config.quantization_parameters.hgq_gamma @@ -833,7 +835,7 @@ def __init__( self.quantize_input = quantize_input self._weight = nn.Parameter(self.weight.clone()).to(self.weight.device) self.register_parameter("_weight", self._weight) - if self.bias: + if self.bias is not None: self._bias = nn.Parameter(self.bias.clone()).to(self.bias.device) self.register_parameter("_bias", self._bias) else: @@ -852,7 +854,7 @@ def check_is_built(self, input_shape): k=torch.tensor(self.k_input), i=torch.tensor(self.i_input), f=torch.tensor(self.f_input), - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -863,7 +865,7 @@ def check_is_built(self, input_shape): i=torch.tensor(self.i_weight), f=torch.tensor(self.f_weight), round_mode=self.round_mode, - overflow=self.overflow, + overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, ) @@ -872,7 +874,7 @@ def check_is_built(self, input_shape): i=torch.tensor(self.i_bias), f=torch.tensor(self.f_bias), round_mode=self.round_mode, - overflow=self.overflow, + overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, ) @@ -980,7 +982,8 @@ def __init__( self.k_bias = config.quantization_parameters.default_weight_keep_negatives self.i_bias = config.quantization_parameters.default_weight_integer_bits self.f_bias = config.quantization_parameters.default_weight_fractional_bits - self.overflow = config.quantization_parameters.overflow + self.overflow_mode_parameters = config.quantization_parameters.overflow_mode_parameters + self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.round_mode = config.quantization_parameters.round_mode self.use_hgq = config.quantization_parameters.use_high_granularity_quantization self.hgq_gamma = config.quantization_parameters.hgq_gamma @@ -991,7 +994,7 @@ def __init__( self.quantize_input = quantize_input self._weight = nn.Parameter(self.weight.clone()).to(self.weight.device) self.register_parameter("_weight", self._weight) - if self.bias: + if self.bias is not None: self._bias = nn.Parameter(self.bias.clone()).to(self.bias.device) self.register_parameter("_bias", self._bias) else: @@ -1011,7 +1014,7 @@ def check_is_built(self, input_shape): k=torch.tensor(self.k_input), i=torch.tensor(self.i_input), f=torch.tensor(self.f_input), - overflow=self.overflow, + overflow=self.overflow_mode_data, round_mode=self.round_mode, is_heterogeneous=self.use_hgq, is_data=True, @@ -1022,7 +1025,7 @@ def check_is_built(self, input_shape): i=torch.tensor(self.i_weight), f=torch.tensor(self.f_weight), round_mode=self.round_mode, - overflow=self.overflow, + overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, ) @@ -1031,7 +1034,7 @@ def check_is_built(self, input_shape): i=torch.tensor(self.i_bias), f=torch.tensor(self.f_bias), round_mode=self.round_mode, - overflow=self.overflow, + overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, ) diff --git a/src/pquant/core/torch/quantizer.py b/src/pquant/core/torch/quantizer.py index bd860e3..e04d17e 100644 --- a/src/pquant/core/torch/quantizer.py +++ b/src/pquant/core/torch/quantizer.py @@ -13,10 +13,10 @@ def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=Fals self.overflow = overflow self.round_mode = round_mode self.use_hgq = is_heterogeneous - self.quantizer = create_quantizer(self.k, self.i, self.f, overflow, round_mode, is_heterogeneous, is_data) + self.is_data = is_data + self.quantizer = create_quantizer(self.k, self.i, self.f, self.overflow, self.round_mode, self.use_hgq, self.is_data) self.is_pretraining = False self.hgq_gamma = hgq_gamma - self.is_data = is_data if isinstance(granularity, Enum): self.granularity = granularity.value else: diff --git a/src/pquant/data_models/quantization_model.py b/src/pquant/data_models/quantization_model.py index 7cde730..31cefd8 100644 --- a/src/pquant/data_models/quantization_model.py +++ b/src/pquant/data_models/quantization_model.py @@ -26,6 +26,7 @@ class BaseQuantizationModel(BaseModel): layer_specific: dict[str, dict] = Field(default_factory=dict) use_high_granularity_quantization: bool = Field(default=False) use_real_tanh: bool = Field(default=False) - overflow: str = Field(default="SAT") + overflow_mode_parameters: str = Field(default="SAT") + overflow_mode_data: str = Field(default="SAT") round_mode: str = Field(default="RND") use_relu_multiplier: bool = Field(default=True) diff --git a/tests/test_keras_compression_layers.py b/tests/test_keras_compression_layers.py index d99f520..8a2fabd 100644 --- a/tests/test_keras_compression_layers.py +++ b/tests/test_keras_compression_layers.py @@ -87,7 +87,8 @@ def config_pdp(): "use_relu_multiplier": True, "use_symmetric_quantization": False, "round_mode": "RND", - "overflow": "SAT", + "overflow_mode_parameters": "SAT", + "overflow_mode_data": "SAT", }, "training_parameters": {"pruning_first": False}, "fitcompress_parameters": {"enable_fitcompress": False}, @@ -126,7 +127,8 @@ def config_ap(): "use_relu_multiplier": True, "use_symmetric_quantization": False, "round_mode": "RND", - "overflow": "SAT", + "overflow_mode_parameters": "SAT", + "overflow_mode_data": "SAT", }, "training_parameters": {"pruning_first": False}, "fitcompress_parameters": {"enable_fitcompress": False}, @@ -168,7 +170,8 @@ def config_wanda(): "use_relu_multiplier": True, "use_symmetric_quantization": False, "round_mode": "RND", - "overflow": "SAT", + "overflow_mode_parameters": "SAT", + "overflow_mode_data": "SAT", }, "training_parameters": {"pruning_first": False}, "fitcompress_parameters": {"enable_fitcompress": False}, @@ -206,7 +209,8 @@ def config_cs(): "use_relu_multiplier": True, "use_symmetric_quantization": False, "round_mode": "RND", - "overflow": "SAT", + "overflow_mode_parameters": "SAT", + "overflow_mode_data": "SAT", }, "training_parameters": {"pruning_first": False}, "fitcompress_parameters": {"enable_fitcompress": False}, diff --git a/tests/test_torch_compression_layers.py b/tests/test_torch_compression_layers.py index b2ec570..ccf90a3 100644 --- a/tests/test_torch_compression_layers.py +++ b/tests/test_torch_compression_layers.py @@ -82,7 +82,8 @@ def config_pdp(): "use_relu_multiplier": True, "use_symmetric_quantization": False, "round_mode": "RND", - "overflow": "SAT", + "overflow_mode_parameters": "SAT", + "overflow_mode_data": "SAT", }, "training_parameters": {"pruning_first": False}, "fitcompress_parameters": {"enable_fitcompress": False}, @@ -121,7 +122,8 @@ def config_ap(): "use_relu_multiplier": True, "use_symmetric_quantization": False, "round_mode": "RND", - "overflow": "SAT", + "overflow_mode_parameters": "SAT", + "overflow_mode_data": "SAT", }, "training_parameters": {"pruning_first": False}, "fitcompress_parameters": {"enable_fitcompress": False}, @@ -163,7 +165,8 @@ def config_wanda(): "use_relu_multiplier": True, "use_symmetric_quantization": False, "round_mode": "RND", - "overflow": "SAT", + "overflow_mode_parameters": "SAT", + "overflow_mode_data": "SAT", }, "training_parameters": {"pruning_first": False}, "fitcompress_parameters": {"enable_fitcompress": False}, @@ -201,7 +204,8 @@ def config_cs(): "use_relu_multiplier": True, "use_symmetric_quantization": False, "round_mode": "RND", - "overflow": "SAT", + "overflow_mode_parameters": "SAT", + "overflow_mode_data": "SAT", }, "training_parameters": {"pruning_first": False}, "fitcompress_parameters": {"enable_fitcompress": False}, From 10a3dd9f7b68cbc9bbef513c8ff1f208d7d54455 Mon Sep 17 00:00:00 2001 From: Anastasiia Petrovych Date: Mon, 16 Feb 2026 11:19:26 +0100 Subject: [PATCH 13/20] Fixed model logging error --- src/pquant/configs/finetuning.yaml | 2 +- src/pquant/core/constants.py | 17 ++--------------- src/pquant/core/finetuning.py | 17 +++++++---------- 3 files changed, 10 insertions(+), 26 deletions(-) diff --git a/src/pquant/configs/finetuning.yaml b/src/pquant/configs/finetuning.yaml index 58c4ca0..ad68e50 100644 --- a/src/pquant/configs/finetuning.yaml +++ b/src/pquant/configs/finetuning.yaml @@ -16,7 +16,7 @@ quantization_parameters: granularity: "per_tensor" use_real_tanh: false use_symmetric_quantization: false - overflow_mode_parameters: WRAP + overflow_mode_parameters: SAT overflow_mode_data: SAT training_parameters: batch_size: 128 diff --git a/src/pquant/core/constants.py b/src/pquant/core/constants.py index 7fa5482..993042b 100644 --- a/src/pquant/core/constants.py +++ b/src/pquant/core/constants.py @@ -43,29 +43,16 @@ "BruteForceSampler": optuna.samplers.BruteForceSampler, } - -try: - import mlflow - - LOG_FUNCTIONS_REGISTRY = { - "torch": mlflow.pytorch.log_model, - "tensorflow": mlflow.tensorflow.log_model, - } -except ModuleNotFoundError: - LOG_FUNCTIONS_REGISTRY = {} - - TRACKING_URI = "http://0.0.0.0:5000/" DB_STORAGE = "sqlite:///optuna_study.db" -JAX_BACKEND = "jax" +TORCH_BACKEND = "torch" +TF_BACKEND = 'tensorflow' FINETUNING_DIRECTION = {"maximize", "minimize"} CONFIG_FILE = "config.yaml" N_JOBS = 1 -TORCH_BACKEND = "torch" -TF_BACKEND = 'tensorflow' METRIC_REGISTRY = { diff --git a/src/pquant/core/finetuning.py b/src/pquant/core/finetuning.py index cbedacf..f0a6ea0 100644 --- a/src/pquant/core/finetuning.py +++ b/src/pquant/core/finetuning.py @@ -36,24 +36,21 @@ def get_sampler(sampler_type, **kwargs): def log_model_by_backend(model, name, signature=None, registered_model_name=None): + import mlflow + backend = keras.backend.backend() - print("Backend:", backend) - print("Registry keys:", constants.LOG_FUNCTIONS_REGISTRY.keys()) - kwargs = { "artifact_path": name, "signature": signature, "registered_model_name": registered_model_name, } - - if backend == constants.JAX_BACKEND: - raise NotImplementedError("JAX is not supported yet.") - - if backend not in constants.LOG_FUNCTIONS_REGISTRY: + if backend == constants.TORCH_BACKEND: + return mlflow.pytorch.log_model(model, **kwargs) + elif backend == constants.TF_BACKEND: + return mlflow.tensorflow.log_model(model, **kwargs) + else: raise ValueError(f"Unsupported backend: {backend}") - return constants.LOG_FUNCTIONS_REGISTRY[backend](model, **kwargs) - class MetricFunction(BaseModel): function_name: Callable From 2b328443efb74c5eb5a539ec5c80610a2dbefae7 Mon Sep 17 00:00:00 2001 From: Roope Niemi Date: Mon, 16 Feb 2026 21:54:08 +0100 Subject: [PATCH 14/20] Add Torch HGQ serialization. Add Keras serialization for dense --- src/pquant/__init__.py | 2 + src/pquant/configs/config_dst.yaml | 2 +- src/pquant/core/keras/activations.py | 72 ++++++------ src/pquant/core/keras/layers.py | 165 ++++++++++++++++++--------- src/pquant/core/keras/quantizer.py | 70 ++++++++++-- src/pquant/core/torch/layers.py | 67 ++++++----- src/pquant/core/torch/quantizer.py | 57 ++++++--- src/pquant/core/torch/train.py | 4 +- 8 files changed, 296 insertions(+), 143 deletions(-) diff --git a/src/pquant/__init__.py b/src/pquant/__init__.py index 1285546..cb15a47 100644 --- a/src/pquant/__init__.py +++ b/src/pquant/__init__.py @@ -24,6 +24,7 @@ get_ebops, get_layer_keep_ratio, get_model_losses, + load_torch_hgq_model, post_training_prune, ) from .core.torch.train import train_model @@ -52,6 +53,7 @@ _forwards.append("load_from_file") _forwards.append("load_from_dictionary") _forwards.append("get_ebops") + _forwards.append("load_torch_hgq_model") __all__ = _forwards else: diff --git a/src/pquant/configs/config_dst.yaml b/src/pquant/configs/config_dst.yaml index 5175783..89536ec 100644 --- a/src/pquant/configs/config_dst.yaml +++ b/src/pquant/configs/config_dst.yaml @@ -21,7 +21,7 @@ quantization_parameters: hgq_beta: 1e-5 hgq_gamma: 0.0003 hgq_heterogeneous: True - layer_specific: [] + layer_specific: {} use_high_granularity_quantization: false use_real_tanh: false use_relu_multiplier: false diff --git a/src/pquant/core/keras/activations.py b/src/pquant/core/keras/activations.py index ec59568..8ff7dd6 100644 --- a/src/pquant/core/keras/activations.py +++ b/src/pquant/core/keras/activations.py @@ -23,6 +23,7 @@ def hard_tanh(x): activation_registry = {"relu": relu, "tanh": tanh, "hard_tanh": hard_tanh} +@keras.saving.register_keras_serializable(package="PQuant") class PQActivation(keras.layers.Layer): def __init__( self, @@ -32,8 +33,9 @@ def __init__( out_quant_bits: Tuple[T, T, T] = None, quantize_input=True, quantize_output=False, + **kwargs, ): - super().__init__() + super().__init__(**kwargs) if isinstance(config, dict): from pquant.core.finetuning import TuningConfig @@ -51,7 +53,8 @@ def __init__( self.f_output = config.quantization_parameters.default_data_fractional_bits else: self.k_output, self.i_output, self.f_output = out_quant_bits - + self.in_quant_bits = in_quant_bits + self.out_quant_bits = out_quant_bits self.activation_name = activation.lower() self.activation_function = activation_registry.get(self.activation_name) self.config = config @@ -73,34 +76,34 @@ def __init__( self.built = False def build(self, input_shape): - super().build(input_shape) self.input_shape = (1,) + input_shape[1:] - self.output_quantizer = Quantizer( - k=self.k_output, - i=self.i_output, - f=self.f_output, - overflow=self.overflow_mode_data, - round_mode=self.round_mode, - is_data=True, - is_heterogeneous=self.use_hgq, - hgq_gamma=self.hgq_gamma, - ) - self.input_quantizer = Quantizer( - k=self.k_input, - i=self.i_input, - f=self.f_input, - overflow=self.overflow_mode_data, - round_mode=self.round_mode, - is_data=True, - is_heterogeneous=self.use_hgq, - hgq_gamma=self.hgq_gamma, - ) - if self.use_hgq: - self.input_quantizer.build(input_shape) - self.output_quantizer.build(input_shape) + + if self.quantize_input: + self.input_quantizer = Quantizer( + k=self.k_input, + i=self.i_input, + f=self.f_input, + overflow=self.overflow_mode_data, + round_mode=self.round_mode, + is_data=True, + is_heterogeneous=self.use_hgq, + hgq_gamma=self.hgq_gamma, + ) + if self.quantize_output: + self.output_quantizer = Quantizer( + k=self.k_output, + i=self.i_output, + f=self.f_output, + overflow=self.overflow_mode_data, + round_mode=self.round_mode, + is_data=True, + is_heterogeneous=self.use_hgq, + hgq_gamma=self.hgq_gamma, + ) if self.use_multiplier: self.multiplier = self.add_weight(shape=(1,), trainable=True, initializer=keras.initializers.Constant(-1.0)) + super().build(input_shape) def get_input_quantization_bits(self): return self.input_quantizer.get_quantization_bits() @@ -118,9 +121,11 @@ def post_pre_train_function(self): self.is_pretraining = False def ebops(self): - bw_inp = self.input_quantizer.quantizer.bits_(self.input_shape) - bw_out = self.output_quantizer.quantizer.bits_(self.input_shape) - return keras.ops.sum((2.0**bw_inp) * bw_out) * 1e-4 # type: ignore + if self.quantize_input and self.quantize_output: + bw_inp = self.input_quantizer.quantizer.bits_(self.input_shape) + bw_out = self.output_quantizer.quantizer.bits_(self.input_shape) + return keras.ops.sum((2.0**bw_inp) * bw_out) * 1e-4 # type: ignore + return 0.0 def hgq_loss(self): if self.is_pretraining or not self.use_hgq: @@ -162,10 +167,11 @@ def get_config(self): config.update( { "config": self.config.get_dict(), - "i_input": float(self.i_input), - "f_input": float(self.f_input), - "i_output": float(self.i_output), - "f_output": float(self.f_output), + "quantize_input": self.quantize_input, + "quantize_output": self.quantize_output, + "activation": self.activation_name, + "in_quant_bits": self.in_quant_bits, + "out_quant_bits": self.out_quant_bits, } ) return config diff --git a/src/pquant/core/keras/layers.py b/src/pquant/core/keras/layers.py index 6e1d9ae..3894e96 100644 --- a/src/pquant/core/keras/layers.py +++ b/src/pquant/core/keras/layers.py @@ -1,7 +1,7 @@ from typing import Tuple, TypeVar import keras -from keras import ops +from keras import constraints, initializers, ops, regularizers from keras.layers import ( Activation, AveragePooling1D, @@ -19,6 +19,7 @@ from keras.src.layers.input_spec import InputSpec from keras.src.ops.operation_utils import compute_pooling_output_shape +from pquant.core.finetuning import TuningConfig from pquant.core.keras.activations import PQActivation from pquant.core.keras.quantizer import Quantizer from pquant.core.utils import get_pruning_layer @@ -26,6 +27,7 @@ T = TypeVar("T") +@keras.saving.register_keras_serializable(package="PQuant") class PQWeightBiasBase(keras.layers.Layer): def __init__( self, @@ -41,6 +43,8 @@ def __init__( **kwargs, ): super().__init__(**kwargs) + if isinstance(config, dict): + config = TuningConfig.load_from_config(config) if in_quant_bits is not None: self.k_input, self.i_input, self.f_input = in_quant_bits else: @@ -73,6 +77,10 @@ def __init__( self.quantize_input = quantize_input self.quantize_output = quantize_output + self.in_quant_bits = in_quant_bits + self.weight_quant_bits = weight_quant_bits + self.bias_quant_bits = bias_quant_bits + self.out_quant_bits = out_quant_bits self.pruning_first = config.training_parameters.pruning_first self.enable_quantization = config.quantization_parameters.enable_quantization self.round_mode = config.quantization_parameters.round_mode @@ -89,24 +97,8 @@ def __init__( self.hgq_beta = config.quantization_parameters.hgq_beta self.input_shape = None self.is_pretraining = True + self.config = config - def set_enable_pruning(self, enable_pruning): - self.enable_pruning = enable_pruning - - def get_weight_quantization_bits(self): - return self.weight_quantizer.get_quantization_bits() - - def get_bias_quantization_bits(self): - return self.bias_quantizer.get_quantization_bits() - - def get_input_quantization_bits(self): - return self.input_quantizer.get_quantization_bits() - - def get_output_quantization_bits(self): - return self.output_quantizer.get_quantization_bits() - - def build(self, input_shape): - super().build(input_shape) self.weight_quantizer = Quantizer( k=ops.convert_to_tensor(self.k_weight), i=ops.convert_to_tensor(self.i_weight), @@ -150,9 +142,27 @@ def build(self, input_shape): is_data=True, hgq_gamma=self.hgq_gamma, ) - self.input_shape = (1,) + input_shape[1:] + + def set_enable_pruning(self, enable_pruning): + self.enable_pruning = enable_pruning + + def get_weight_quantization_bits(self): + return self.weight_quantizer.get_quantization_bits() + + def get_bias_quantization_bits(self): + return self.bias_quantizer.get_quantization_bits() + + def get_input_quantization_bits(self): + return self.input_quantizer.get_quantization_bits() + + def get_output_quantization_bits(self): + return self.output_quantizer.get_quantization_bits() + + def build(self, input_shape): + self.input_shape = (1,) + tuple(input_shape[1:]) self.n_parallel = ops.prod(input_shape[1:-1]) self.parallelization_factor = self.parallelization_factor if self.parallelization_factor > 0 else self.n_parallel + super().build(input_shape=input_shape) def apply_final_compression(self): pass @@ -219,7 +229,44 @@ def collect_output(self, x, training): collect_x = self.handle_transpose(x, self.data_transpose, self.do_transpose_data) self.pruning_layer.collect_output(collect_x, training) + @classmethod + def from_config(cls, config): + # Deserialize all sublayers first + input_quantizer = keras.saving.deserialize_keras_object(config.pop("input_quantizer")) + weight_quantizer = keras.saving.deserialize_keras_object(config.pop("weight_quantizer")) + bias_quantizer = keras.saving.deserialize_keras_object(config.pop("bias_quantizer")) + output_quantizer = keras.saving.deserialize_keras_object(config.pop("output_quantizer")) + + instance = cls(**config) + instance.input_quantizer = input_quantizer + instance.weight_quantizer = weight_quantizer + instance.bias_quantizer = bias_quantizer + + if True: + instance.output_quantizer = output_quantizer + return instance + + def get_config(self): + config = super().get_config() + config.update( + { + "config": self.config, + "input_quantizer": keras.saving.serialize_keras_object(self.input_quantizer), + "weight_quantizer": keras.saving.serialize_keras_object(self.weight_quantizer), + "bias_quantizer": keras.saving.serialize_keras_object(self.bias_quantizer), + "quantize_input": self.quantize_input, + "quantize_output": self.quantize_output, + "in_quant_bits": self.in_quant_bits, + "weight_quant_bits": self.weight_quant_bits, + "bias_quant_bits": self.bias_quant_bits, + "out_quant_bits": self.out_quant_bits, + } + ) + config.update({"output_quantizer": keras.saving.serialize_keras_object(self.output_quantizer)}) + return config + +@keras.saving.register_keras_serializable(package="PQuant") class PQDepthwiseConv2d(PQWeightBiasBase, keras.layers.DepthwiseConv2D): def __init__( self, @@ -426,6 +473,7 @@ def extra_repr(self) -> str: ) +@keras.saving.register_keras_serializable(package="PQuant") class PQConv2d(PQWeightBiasBase, keras.layers.Conv2D): def __init__( self, @@ -584,6 +632,7 @@ def call(self, x, training=None): return x +@keras.saving.register_keras_serializable(package="PQuant") class PQSeparableConv2d(Layer): def __init__( self, @@ -663,6 +712,7 @@ def call(self, x, training=None): return x +@keras.saving.register_keras_serializable(package="PQuant") class PQConv1d(PQWeightBiasBase, keras.layers.Conv1D): def __init__( self, @@ -821,45 +871,28 @@ def call(self, x, training=None): return x -class PQDense(PQWeightBiasBase, keras.layers.Dense): +@keras.saving.register_keras_serializable(package="PQuant") +class PQDense(PQWeightBiasBase): def __init__( self, config, units, - device=None, - dtype=None, quantize_input=True, quantize_output=False, in_quant_bits: Tuple[T, T, T] = None, weight_quant_bits: Tuple[T, T, T] = None, bias_quant_bits: Tuple[T, T, T] = None, out_quant_bits: Tuple[T, T, T] = None, - activation=None, use_bias=True, kernel_initializer="glorot_uniform", bias_initializer="zeros", kernel_regularizer=None, bias_regularizer=None, - activity_regularizer=None, kernel_constraint=None, bias_constraint=None, - lora_rank=None, - lora_alpha=None, **kwargs, ): super().__init__( - units=units, - activation=None, - use_bias=use_bias, - kernel_initializer=kernel_initializer, - bias_initializer=bias_initializer, - kernel_regularizer=kernel_regularizer, - bias_regularizer=bias_regularizer, - activity_regularizer=activity_regularizer, - kernel_constraint=kernel_constraint, - bias_constraint=bias_constraint, - lora_rank=lora_rank, - lora_alpha=lora_alpha, config=config, layer_type="linear", quantize_input=quantize_input, @@ -875,9 +908,24 @@ def __init__( self.data_transpose = (0, 1) # Always (BATCH_SIZE, OUT_FEATURES) self.do_transpose_data = False self.use_bias = use_bias + self.units = units + self.kernel_initializer = initializers.get(kernel_initializer) + self.bias_initializer = initializers.get(bias_initializer) + self.kernel_regularizer = regularizers.get(kernel_regularizer) + self.bias_regularizer = regularizers.get(bias_regularizer) + self.kernel_constraint = constraints.get(kernel_constraint) + self.bias_constraint = constraints.get(bias_constraint) + self.input_spec = InputSpec(min_ndim=2) def build(self, input_shape): - super().build(input_shape) + input_dim = input_shape[-1] + self._kernel = self.add_weight( + name="kernel", + shape=(input_dim, self.units), + initializer=self.kernel_initializer, + regularizer=self.kernel_regularizer, + constraint=self.kernel_constraint, + ) if self.use_bias: self._bias = self.add_weight( name="bias", @@ -888,12 +936,7 @@ def build(self, input_shape): ) else: self._bias = None - if self.use_hgq: - self.input_quantizer.build(input_shape) - self.weight_quantizer.build(self._kernel.shape) - if self.use_bias: - self.bias_quantizer.build(self._bias.shape) - self.output_quantizer.build(self.compute_output_shape(input_shape)) + super().build(input_shape) @property def kernel(self): @@ -919,10 +962,6 @@ def bias(self): bias = self.bias_quantizer(self._bias) return bias - @bias.setter - def bias(self, bias): - self._bias = bias - def ebops(self, include_mask=False): bw_inp = self.input_quantizer.get_total_bits(self.input_shape) bw_ker = self.weight_quantizer.get_total_bits(ops.shape(self._kernel)) @@ -934,13 +973,19 @@ def ebops(self, include_mask=False): step_size_mask = ops.cast((ops.abs(self._kernel) > quantization_step_size), self._kernel.dtype) bw_ker = bw_ker * step_size_mask ebops = ops.sum(ops.matmul(bw_inp, bw_ker)) - ebops = ebops * self.n_parallel / self.parallelization_factor + ebops = ebops * self.parallelization_factor / self.n_parallel if self.use_bias: bw_bias = self.bias_quantizer.get_total_bits(ops.shape(self._bias)) size = ops.cast(ops.prod(self.input_shape), self.dtype) ebops += ops.mean(bw_bias) * size return ebops + def apply_final_compression(self): + self._kernel.assign(self.kernel) + if self._bias is not None: + self._bias.assign = self.bias + self.final_compression_done = True + def call(self, x, training=None): x = self.pre_forward(x, training) x = ops.matmul(x, self.kernel) @@ -952,6 +997,17 @@ def call(self, x, training=None): self.add_loss(self.hgq_loss()) return x + def get_config(self): + config = super().get_config() + config.update( + { + "config": self.config.model_dump(), + "units": self.units, + "use_bias": self.use_bias, + } + ) + return config + class PQBatchNormalization(keras.layers.BatchNormalization): def __init__( @@ -1001,7 +1057,7 @@ def __init__( self.use_hgq = config.quantization_parameters.use_high_granularity_quantization self.hgq_beta = config.quantization_parameters.hgq_beta self.quantize_input = quantize_input - self.granularity= config.quantization_parameters.granularity + self.granularity = config.quantization_parameters.granularity self.config = config self.f_weight = self.f_bias = ops.convert_to_tensor(config.quantization_parameters.default_weight_fractional_bits) self.i_weight = self.i_bias = ops.convert_to_tensor(config.quantization_parameters.default_weight_integer_bits) @@ -1351,7 +1407,7 @@ def call(self, x, training=None): def call_post_round_functions(model, rewind, rounds, r): - last_round = (r == rounds - 1) + last_round = r == rounds - 1 if rewind == "round": rewind_weights_functions(model) elif rewind == "post-ticket-search" and last_round: @@ -1632,9 +1688,10 @@ def get_layer_keep_ratio(model): return remaining_weights / total_w return 0.0 + def is_training_stage(layer): return False if layer.pruning_layer.is_finetuning and layer.pruning_layer.is_pretraining else True - + def get_model_losses(model, losses): for layer in model.layers: @@ -1647,7 +1704,7 @@ def get_model_losses(model, losses): PQConv1d, PQDense, ), - ): + ): if layer.enable_pruning and is_training_stage(layer): loss += layer.pruning_layer.calculate_additional_loss() if layer.enable_quantization and layer.use_hgq: diff --git a/src/pquant/core/keras/quantizer.py b/src/pquant/core/keras/quantizer.py index 1375b16..a72f309 100644 --- a/src/pquant/core/keras/quantizer.py +++ b/src/pquant/core/keras/quantizer.py @@ -1,18 +1,30 @@ +from enum import Enum + import keras -from keras.initializers import Constant from keras import ops -from enum import Enum from pquant.core.quantizer_functions import create_quantizer +@keras.saving.register_keras_serializable(package="PQuant") class Quantizer(keras.layers.Layer): # HGQ quantizer wrapper - def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=False, granularity="per_tensor", hgq_gamma=0): + def __init__( + self, + k=0.0, + i=0.0, + f=7.0, + overflow="SAT", + round_mode="RND", + is_heterogeneous=False, + is_data=False, + granularity="per_tensor", + hgq_gamma=0, + ): super().__init__() - self.k = k - self.i = i - self.f = f + self.k = float(k) + self.i = float(i) + self.f = float(f) self.overflow = overflow self.round_mode = round_mode self.use_hgq = is_heterogeneous @@ -24,7 +36,7 @@ def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=Fals self.granularity = granularity.value else: self.granularity = granularity - + def compute_dynamic_bits(self, x): if self.granularity == "per_channel": if ops.ndim(x) == 2: @@ -43,13 +55,9 @@ def compute_dynamic_bits(self, x): int_bits = ops.maximum(m, 0.0) frac_bits = ops.maximum(self.b - int_bits - self.k, 0.0) return int_bits, frac_bits - + def build(self, input_shape): super().build(input_shape) - self.i = self.add_variable((), Constant(self.i), dtype="float32", trainable=False) - self.f = self.add_variable((), Constant(self.f), dtype="float32", trainable=False) - if self.use_hgq: - self.quantizer.build(input_shape) def get_total_bits(self, shape): if self.use_hgq: @@ -95,3 +103,41 @@ def hgq_loss(self): for layer_loss in self.quantizer.quantizer.losses: loss += layer_loss return loss + + @classmethod + def from_config(cls, config): + use_hgq = config["is_heterogeneous"] + instance = cls( + k=config.pop("k"), + i=config.pop("i"), + f=config.pop("f"), + round_mode=config.pop("round_mode"), + overflow=config.pop("overflow"), + is_heterogeneous=config.pop("is_heterogeneous"), + is_data=config.pop("is_data"), + granularity=config.pop("granularity"), + ) + + if use_hgq: + quantizer_config = config.pop("quantizer") + instance.quantizer = keras.saving.deserialize_keras_object(quantizer_config) + return instance + + def get_config(self): + config = super().get_config() + config.update( + { + "k": self.k, + "i": self.i, + "f": self.f, + "overflow": self.overflow, + "round_mode": self.round_mode, + "is_data": self.is_data, + "hgq_gamma": self.hgq_gamma, + "is_heterogeneous": self.use_hgq, + "granularity": self.granularity, + } + ) + if self.use_hgq: + config.update({"quantizer": keras.saving.serialize_keras_object(self.quantizer)}) + return config diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index 3ce6058..446dc52 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -92,16 +92,17 @@ def check_is_built(self, input_shape): if self.built: return # Build function to delay quantizer creation until after custom i,f bits have been set - self.input_quantizer = Quantizer( - k=torch.tensor(self.k_input), - i=torch.tensor(self.i_input), - f=torch.tensor(self.f_input), - overflow=self.overflow_mode_data, - round_mode=self.round_mode, - is_heterogeneous=self.use_hgq, - is_data=True, - hgq_gamma=self.hgq_gamma, - ) + if self.quantize_input: + self.input_quantizer = Quantizer( + k=torch.tensor(self.k_input), + i=torch.tensor(self.i_input), + f=torch.tensor(self.f_input), + overflow=self.overflow_mode_data, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=True, + hgq_gamma=self.hgq_gamma, + ) self.weight_quantizer = Quantizer( k=torch.tensor(self.k_weight), i=torch.tensor(self.i_weight), @@ -111,7 +112,7 @@ def check_is_built(self, input_shape): is_heterogeneous=self.use_hgq, is_data=False, hgq_gamma=self.hgq_gamma, - granularity=self.granularity + granularity=self.granularity, ) self.bias_quantizer = Quantizer( @@ -124,17 +125,17 @@ def check_is_built(self, input_shape): is_data=False, hgq_gamma=self.hgq_gamma, ) - - self.output_quantizer = Quantizer( - k=torch.tensor(self.k_output), - i=torch.tensor(self.i_output), - f=torch.tensor(self.f_output), - overflow=self.overflow_mode_data, - round_mode=self.round_mode, - is_heterogeneous=self.use_hgq, - is_data=True, - hgq_gamma=self.hgq_gamma, - ) + if self.quantize_output: + self.output_quantizer = Quantizer( + k=torch.tensor(self.k_output), + i=torch.tensor(self.i_output), + f=torch.tensor(self.f_output), + overflow=self.overflow_mode_data, + round_mode=self.round_mode, + is_heterogeneous=self.use_hgq, + is_data=True, + hgq_gamma=self.hgq_gamma, + ) self.n_parallel = ops.prod(tuple(input_shape)[1:-1]) self.parallelization_factor = self.parallelization_factor if self.parallelization_factor > 0 else self.n_parallel @@ -264,6 +265,7 @@ def __init__( else: self.register_parameter("_bias", None) self.pruning_layer.build(self._weight.shape) + self.final_compression_done = nn.Parameter(torch.tensor(False), requires_grad=False) def ebops(self, include_mask=False): bw_inp = self.input_quantizer.get_total_bits(self.input_shape) @@ -279,7 +281,7 @@ def ebops(self, include_mask=False): bw_bias = self.bias_quantizer.get_total_bits(ops.shape(self._bias)) size = ops.cast(ops.prod(self.input_shape[:-1]) * self.out_features, self._weight.dtype) ebops += ops.mean(bw_bias) * size - ebops = ebops * self.n_parallel / self.parallelization_factor + ebops = ebops * self.parallelization_factor / self.n_parallel return ebops @property @@ -304,7 +306,7 @@ def apply_final_compression(self): self._weight.data = self.weight if self._bias is not None: self._bias.data = self.bias - self.final_compression_done = True + self.final_compression_done.data = torch.tensor(True) def forward(self, x): x = self.pre_forward(x) @@ -1415,13 +1417,13 @@ def add_pruning_to_model(module, config, prefix=""): def apply_final_compression(module): for layer in module.modules(): - if isinstance(layer, (PQWeightBiasBase, PQBatchNorm2d, PQBatchNorm1d)): + if isinstance(layer, (PQWeightBiasBase, PQBatchNorm2d, PQBatchNorm1d, Quantizer)): layer.apply_final_compression() return module def call_post_round_functions(model, rewind, rounds, r): - last_round = (r == rounds - 1) + last_round = r == rounds - 1 if rewind == "round": rewind_weights_functions(model) elif rewind == "post-ticket-search" and last_round: @@ -1468,13 +1470,13 @@ def pre_finetune_functions(model): layer.pruning_layer.pre_finetune_function() -def post_pretrain_functions(model, config, train_loader=None, loss_func=None, input_shape=None): +def post_pretrain_functions(model, config, train_loader=None, loss_function=None, input_shape=None): if config.fitcompress_parameters.enable_fitcompress: from pquant.core.torch.fit_compress import call_fitcompress # noqa: 811 config, pruning_mask_importance_scores = call_fitcompress( - config, model, train_loader, loss_func, input_shape=input_shape + config, model, train_loader, loss_function, input_shape=input_shape ) idx = 0 for layer in model.modules(): @@ -1667,3 +1669,12 @@ def get_ebops(model): elif isinstance(m, (PQAvgPoolBase, PQBatchNorm1d, PQBatchNorm2d, PQActivation)): ebops += m.ebops() return ebops + + +def load_torch_hgq_model(model, path_to_checkpoint): + model.load_state_dict(torch.load(path_to_checkpoint), strict=False) + for m in model.modules(): + if isinstance(m, Quantizer) and m.quantizer.built: + # Populate HGQ quantizer bit values from PQuantML quantizer + m.reload_from_local() + return model diff --git a/src/pquant/core/torch/quantizer.py b/src/pquant/core/torch/quantizer.py index e04d17e..a3fd576 100644 --- a/src/pquant/core/torch/quantizer.py +++ b/src/pquant/core/torch/quantizer.py @@ -1,27 +1,29 @@ +from enum import Enum + import torch import torch.nn as nn -from enum import Enum from pquant.core.quantizer_functions import create_quantizer + class Quantizer(nn.Module): - def __init__(self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=False, granularity='per_tensor', hgq_gamma=0): + def __init__( + self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=False, granularity='per_tensor', hgq_gamma=0 + ): super().__init__() self.k = torch.nn.Parameter(torch.tensor(k), requires_grad=False) - self.i = torch.nn.Parameter(torch.tensor(i), requires_grad=False) - self.f = torch.nn.Parameter(torch.tensor(f), requires_grad=False) self.overflow = overflow self.round_mode = round_mode self.use_hgq = is_heterogeneous self.is_data = is_data - self.quantizer = create_quantizer(self.k, self.i, self.f, self.overflow, self.round_mode, self.use_hgq, self.is_data) + self.quantizer = create_quantizer(self.k, i, f, self.overflow, self.round_mode, self.use_hgq, self.is_data) self.is_pretraining = False self.hgq_gamma = hgq_gamma if isinstance(granularity, Enum): self.granularity = granularity.value else: self.granularity = granularity - self.b = torch.nn.Parameter(torch.tensor(self.k + self.i + self.f), requires_grad=False) + self.final_compression_done = nn.Parameter(torch.tensor(False), requires_grad=False) def get_quantization_bits(self): if self.use_hgq: @@ -51,12 +53,12 @@ def compute_dynamic_bits(self, x): if x.ndim == 2: abs_x = torch.amax(torch.abs(x), dim=1, keepdim=True) elif x.ndim == 3: - abs_x = torch.amax(torch.abs(x), dim=(1, 2), keepdim=True) + abs_x = torch.amax(torch.abs(x), dim=(1, 2), keepdim=True) elif x.ndim == 4: - abs_x = torch.amax(torch.abs(x), dim=(1, 2, 3), keepdim=True) + abs_x = torch.amax(torch.abs(x), dim=(1, 2, 3), keepdim=True) elif self.granularity == "per_weight": abs_x = torch.abs(x) - else: + else: raise ValueError("The selected granularity is not supported.") m = torch.ceil(torch.log2(abs_x + 1e-6)) @@ -64,10 +66,13 @@ def compute_dynamic_bits(self, x): frac_bits = torch.clamp(self.b - int_bits - self.k, min=0) return int_bits, frac_bits - def forward(self, x): if self.use_hgq: - return self.quantizer(x, training=self.training) + x = self.quantizer(x, training=self.training) + if not hasattr(self, "f"): + _, i, f = self.get_quantization_bits() + self.initialize_quantization_parameters(i, f) + return x else: if self.granularity == 'per_tensor': i, f = self.i, self.f @@ -75,7 +80,10 @@ def forward(self, x): i, f = self.compute_dynamic_bits(x) self.i.data = i self.f.data = f - return self.quantizer(x, k=self.k, i=i, f=f, training=self.training) + if not hasattr(self, "f"): + _, i, f = self.get_quantization_bits() + self.initialize_quantization_parameters(i, f) + x = self.quantizer(x, k=self.k, i=i, f=f, training=self.training) def hgq_loss(self): if self.is_pretraining or not self.use_hgq: @@ -84,10 +92,31 @@ def hgq_loss(self): for layer_loss in self.quantizer.quantizer.losses: loss += layer_loss return loss - + def post_epoch_function(self): - if self.use_hgq: + if self.use_hgq and self.quantizer.quantizer.built: constrained_i = self.quantizer.quantizer._i.constraint(self.quantizer.quantizer._i) self.quantizer.quantizer._i.assign(constrained_i) constrained_f = self.quantizer.quantizer._f.constraint(self.quantizer.quantizer._f) self.quantizer.quantizer._f.assign(constrained_f) + + def apply_final_compression(self): + if self.use_hgq and not self.quantizer.built: + return + _, i, f = self.get_quantization_bits() + self.i.data = i + self.f.data = f + self.b.data = i + f + self.final_compression_done.data = torch.tensor(True) + + def initialize_quantization_parameters(self, i, f): + # Lazy initialization + self.i = torch.nn.Parameter(torch.tensor(i), requires_grad=False) + self.f = torch.nn.Parameter(torch.tensor(f), requires_grad=False) + self.b = torch.nn.Parameter(torch.tensor(self.k + i + f), requires_grad=False) + + def reload_from_local(self): + if not self.use_hgq: + return + self.quantizer.quantizer._i.assign(self.i) + self.quantizer.quantizer._f.assign(self.f) diff --git a/src/pquant/core/torch/train.py b/src/pquant/core/torch/train.py index bf0f803..12a790b 100644 --- a/src/pquant/core/torch/train.py +++ b/src/pquant/core/torch/train.py @@ -25,7 +25,9 @@ def train_model(model, config, train_func, valid_func, input_shape=None, **kwarg valid_func(model, epoch=epoch, **kwargs) post_epoch_functions(model, e, training_config.pretraining_epochs) epoch += 1 - post_pretrain_functions(model, config, kwargs['trainloader'], kwargs['loss_func'], input_shape=input_shape) + post_pretrain_functions( + model, config, kwargs.get("trainloader", None), kwargs.get("loss_function", None), input_shape=input_shape + ) for r in range(training_config.rounds): for e in range(training_config.epochs): model.train() From 4ba7f4227d6bbdf1e0006b85cd8d6ff0bfab349b Mon Sep 17 00:00:00 2001 From: Roope Niemi Date: Tue, 17 Feb 2026 13:27:14 +0100 Subject: [PATCH 15/20] fixed dynamic bit calculation bug, added torch fixed point quantizer --- src/pquant/core/keras/activations.py | 2 +- src/pquant/core/keras/quantizer.py | 34 ++- .../core/torch/fixed_point_quantizer.py | 212 ++++++++++++++++++ src/pquant/core/torch/quantizer.py | 26 ++- 4 files changed, 253 insertions(+), 21 deletions(-) create mode 100644 src/pquant/core/torch/fixed_point_quantizer.py diff --git a/src/pquant/core/keras/activations.py b/src/pquant/core/keras/activations.py index 8ff7dd6..d2af1a0 100644 --- a/src/pquant/core/keras/activations.py +++ b/src/pquant/core/keras/activations.py @@ -76,7 +76,7 @@ def __init__( self.built = False def build(self, input_shape): - self.input_shape = (1,) + input_shape[1:] + self.input_shape = (1,) + tuple(input_shape[1:]) if self.quantize_input: self.input_quantizer = Quantizer( diff --git a/src/pquant/core/keras/quantizer.py b/src/pquant/core/keras/quantizer.py index a72f309..890f34e 100644 --- a/src/pquant/core/keras/quantizer.py +++ b/src/pquant/core/keras/quantizer.py @@ -22,14 +22,17 @@ def __init__( hgq_gamma=0, ): super().__init__() - self.k = float(k) - self.i = float(i) - self.f = float(f) + self.k_init = float(k) + self.i_init = float(i) + self.f_init = float(f) + self.b_init = self.k_init + self.i_init + self.f_init self.overflow = overflow self.round_mode = round_mode self.use_hgq = is_heterogeneous self.is_data = is_data - self.quantizer = create_quantizer(self.k, self.i, self.f, self.overflow, self.round_mode, self.use_hgq, self.is_data) + self.quantizer = create_quantizer( + self.k_init, self.i_init, self.f_init, self.overflow, self.round_mode, self.use_hgq, self.is_data + ) self.is_pretraining = False self.hgq_gamma = hgq_gamma if isinstance(granularity, Enum): @@ -53,10 +56,20 @@ def compute_dynamic_bits(self, x): raise ValueError(f"compute_dynamic_bits called for granularity={self.granularity}") m = ops.ceil(ops.log(abs_x + 1e-6) / ops.log(2.0)) int_bits = ops.maximum(m, 0.0) - frac_bits = ops.maximum(self.b - int_bits - self.k, 0.0) + b = self.b if hasattr(self, "b") else self.b_init + frac_bits = ops.maximum(b - int_bits - self.k_init, 0.0) return int_bits, frac_bits def build(self, input_shape): + if self.granularity == "per_tensor": + self.k = self.add_weight(shape=(), initializer=keras.initializers.Constant(self.k_init), trainable=False) + self.i = self.add_weight(shape=(), initializer=keras.initializers.Constant(self.i_init), trainable=False) + self.f = self.add_weight(shape=(), initializer=keras.initializers.Constant(self.f_init), trainable=False) + else: + i, _ = self.compute_dynamic_bits(keras.ops.ones(input_shape)) + self.k = self.add_weight(shape=i.shape, initializer=keras.initializers.Constant(self.k_init), trainable=False) + self.i = self.add_weight(shape=i.shape, initializer=keras.initializers.Constant(self.i_init), trainable=False) + self.f = self.add_weight(shape=i.shape, initializer=keras.initializers.Constant(self.f_init), trainable=False) super().build(input_shape) def get_total_bits(self, shape): @@ -83,17 +96,16 @@ def post_pretrain(self): self.is_pretraining = True def call(self, x, training=None): - if not self.built: - self.build(x.shape) if self.use_hgq: return self.quantizer(x, training=training) + if not training: + return self.quantizer(x, k=self.k, i=self.i, f=self.f, training=training) elif self.granularity == "per_tensor": i, f = self.i, self.f else: i, f = self.compute_dynamic_bits(x) self.i.assign(i) self.f.assign(f) - return self.quantizer(x, k=self.k, i=i, f=f, training=training) def hgq_loss(self): @@ -127,9 +139,9 @@ def get_config(self): config = super().get_config() config.update( { - "k": self.k, - "i": self.i, - "f": self.f, + "k": self.k_init, + "i": self.i_init, + "f": self.f_init, "overflow": self.overflow, "round_mode": self.round_mode, "is_data": self.is_data, diff --git a/src/pquant/core/torch/fixed_point_quantizer.py b/src/pquant/core/torch/fixed_point_quantizer.py new file mode 100644 index 0000000..e3ecc4e --- /dev/null +++ b/src/pquant/core/torch/fixed_point_quantizer.py @@ -0,0 +1,212 @@ +# PyTorch version of https://github.com/calad0i/quantizers/blob/master/src/quantizers/fixed_point/_fixed_point_ops.py + +from collections.abc import Callable +from functools import wraps +from typing import Any, TypeVar + +import torch +from numpy.typing import ArrayLike + +round_mode_registry: dict[str, Callable[[Any], Any]] = {} +saturation_mode_registry: dict[str, Callable[[Any, Any, Any, Any], Any]] = {} + +T = TypeVar('T', bound=ArrayLike) + + +def _clip(x, min_value, max_value): + mask_overflow = x > max_value + mask_underflow = x < min_value + return torch.where(mask_overflow, max_value, torch.where(mask_underflow, min_value, x)) + + +def rnd_mode(name: str): + def inner(func): + assert name not in round_mode_registry, f"Round mode '{name}' already exists." + + @wraps(func) + def wrapper(x): + xq = func(x) + return xq.detach() + (x - x.detach()) + + round_mode_registry[name] = wrapper + return wrapper + + return inner + + +@rnd_mode('TRN') +def floor(x): + return torch.floor(x) + + +@rnd_mode('RND') +def round(x): + # Round to nearest, ties positive infinity. + return torch.floor(x + 0.5) + + +@rnd_mode('RND_CONV') +def round_conv(x): + # Round to nearest, ties to even. + return torch.round(x) + + +@rnd_mode('TRN_ZERO') +def floor_zero(x): + # Truncate towards zero. + sign = torch.sign(x) + return torch.floor(torch.abs(x)) * sign # type: ignore + + +@rnd_mode('RND_ZERO') +def round_zero(x): + # Round to nearest, ties towards zero. + sign = torch.sign(x) + return -torch.floor(-torch.abs(x) + 0.5) * sign # type:ignore + + +@rnd_mode('RND_MIN_INF') +def round_min_inf(x): + # Round to nearest, ties towards negative infinity. + return -torch.floor(-x + 0.5) # type:ignore + + +@rnd_mode('RND_INF') +def round_inf(x): + # Round to nearest, ties away from zero. + sign = torch.sign(x) + return torch.floor(torch.abs(x) + 0.5) * sign # type: ignore + + +def sat_mode(name: str | list | tuple): + names = (name,) if isinstance(name, str) else name + + def inner(func): + for name in names: + assert name not in saturation_mode_registry, f"Saturation mode '{name}' already exists." + saturation_mode_registry[name] = func + saturation_mode_registry[func.__name__.upper()] = func + return func + + return inner + + +@sat_mode('WRAP') +def wrap(x, k, i, f): + xs = x + bk = i + k + bias = k * 2.0 ** (bk - 1) + return (xs + bias) % (2.0**bk) - bias + + +@sat_mode('SAT') +def sat(x, k, i, f): + f_eps = 2.0 ** (-f) + __max = 2.0**i + _max = __max - f_eps + _min = -__max * k + r = _clip(x, _min, _max) + return r + + +@sat_mode('SAT_SYM') +def sat_sym(x, k, i, f): + f_eps = 2.0 ** (-f) + _max = 2.0**i - f_eps + _min = -_max * k + r = _clip(x, _min, _max) + return r + + +@sat_mode('WRAP_SM') +def wrap_sm_fn(x, k, i, f, training=None, quant_fn: Callable = lambda x: x): + # x=ops.round(x*2.**f) + # High and low bounds are reflective. When overflows, can be less trash than WARP but still more trash than SAT. + eps = 2.0**-f + high = 2.0**i - eps + low = -(high + eps) * k + interval = 2.0 ** (i + k) + c1 = ((x) / interval) % 2 >= 1 # type: ignore + c1 = c1 & (torch.abs(x) > eps / 2) + c2 = ((x + eps / 2) / (interval / 2)) % 2 >= 1 # type: ignore + qx = quant_fn(x) + mapped = ((qx - high - eps) % interval) + low + + mapped = torch.where(c2, -mapped - eps, mapped) # type: ignore + mapped = torch.where(c1, -mapped - eps, mapped) # type: ignore + + return mapped + + +class FixedPointQuantizer: + def round(self, x, f: Any = 1.0): + scale = 2.0**f + x = x * scale + xq = self.round_fn(x) + xq = xq / scale + return xq + + def saturate(self, x, k, i, f): + return self.sat_fn(x, k, i, f) + + def __init__(self, round_mode: str = 'TRN', overflow_mode: str = 'WRAP'): + round_mode = round_mode.upper() + overflow_mode = overflow_mode.upper() + self.stochastic = False + + if round_mode.startswith('S_'): + round_mode = round_mode[2:] + self.stochastic = True + + if overflow_mode == 'WRAP_SM': + assert round_mode in ( + 'RND', + 'RND_CONV', + ), 'WRAP_SM only supports RND and RND_CONV rounding modes in this implementation.' + + self.round_mode = round_mode + self.overflow_mode = overflow_mode + round_fn = round_mode_registry[round_mode] + sat_fn = saturation_mode_registry[overflow_mode] + self.sat_fn = sat_fn + self.round_fn = round_fn + + def forward(self, x, k, i, f, training=False): + # Workaround for gradient computation around 0. + # When have values outside boundary rounded to boundary, grad on f presents despite the value + # will be clipped off anyway. Thus have saturation before rounding, except for + # wrap mode, which doesn't round during training. + + if self.overflow_mode != 'WRAP': + x = self.saturate(x, k, i, f) + x = self.round(x, f) + if self.overflow_mode == 'WRAP' and not training: + x = self.saturate(x, k, i, f) + return x + + def forward_wrap_sm(self, x, k, i, f, training=False): + def quant_fn(x): + return self.round(x, f, training and self.stochastic) + + x = wrap_sm_fn(x, k, i, f, training, quant_fn) + return x + + def __call__(self, x, k, i, f, training=False, seed_gen=None): + i = torch.maximum(i, -f).detach() + (i - i.detach()) # type: ignore + if self.stochastic and training: + assert seed_gen is not None, 'Seed generator must be provided for stochastic rounding.' + if self.overflow_mode != 'WRAP_SM': + return self.forward(x, k, i, f, training) + else: + return self.forward_wrap_sm(x, k, i, f, training) + + +def get_fixed_quantizer(round_mode: str = 'TRN', overflow_mode: str = 'WRAP'): + """Get a stateless fixed-point quantizer given the round and overflow mode. + The quantizer is differentiable w.r.t. to the input and f, also i if using saturation overflow mode. + + Args: + round_mode: round mode, one of + """ + quantizer = FixedPointQuantizer(round_mode, overflow_mode) + return quantizer # type: ignore diff --git a/src/pquant/core/torch/quantizer.py b/src/pquant/core/torch/quantizer.py index a3fd576..75079c8 100644 --- a/src/pquant/core/torch/quantizer.py +++ b/src/pquant/core/torch/quantizer.py @@ -13,9 +13,12 @@ def __init__( super().__init__() self.k = torch.nn.Parameter(torch.tensor(k), requires_grad=False) self.overflow = overflow + self.b_init = k + i + f self.round_mode = round_mode self.use_hgq = is_heterogeneous self.is_data = is_data + self.i_init = i + self.f_init = f self.quantizer = create_quantizer(self.k, i, f, self.overflow, self.round_mode, self.use_hgq, self.is_data) self.is_pretraining = False self.hgq_gamma = hgq_gamma @@ -63,27 +66,30 @@ def compute_dynamic_bits(self, x): m = torch.ceil(torch.log2(abs_x + 1e-6)) int_bits = torch.clamp(m, min=0) - frac_bits = torch.clamp(self.b - int_bits - self.k, min=0) + b = self.b if hasattr(self, "b") else self.k + self.i_init + self.f_init + frac_bits = torch.clamp(b - int_bits - self.k, min=0) return int_bits, frac_bits def forward(self, x): if self.use_hgq: x = self.quantizer(x, training=self.training) - if not hasattr(self, "f"): - _, i, f = self.get_quantization_bits() - self.initialize_quantization_parameters(i, f) + _, i, f = self.get_quantization_bits() + self.initialize_quantization_parameters(i, f) return x else: + if not self.training or self.final_compression_done: + self.quantizer(x, k=self.k, i=self.i, f=self.f, training=self.training) if self.granularity == 'per_tensor': - i, f = self.i, self.f + self.initialize_quantization_parameters(self.i_init, self.f_init) + _, i, f = self.get_quantization_bits() else: i, f = self.compute_dynamic_bits(x) + self.initialize_quantization_parameters(i, f) self.i.data = i self.f.data = f - if not hasattr(self, "f"): - _, i, f = self.get_quantization_bits() - self.initialize_quantization_parameters(i, f) + _, i, f = self.get_quantization_bits() x = self.quantizer(x, k=self.k, i=i, f=f, training=self.training) + return x def hgq_loss(self): if self.is_pretraining or not self.use_hgq: @@ -110,10 +116,12 @@ def apply_final_compression(self): self.final_compression_done.data = torch.tensor(True) def initialize_quantization_parameters(self, i, f): + if hasattr(self, "f"): + return # Lazy initialization self.i = torch.nn.Parameter(torch.tensor(i), requires_grad=False) self.f = torch.nn.Parameter(torch.tensor(f), requires_grad=False) - self.b = torch.nn.Parameter(torch.tensor(self.k + i + f), requires_grad=False) + self.b = torch.nn.Parameter(torch.tensor(self.k.detach().clone() + i + f), requires_grad=False) def reload_from_local(self): if not self.use_hgq: From 21a3011a4bc10d5cdbe59635b7d677188d0044c6 Mon Sep 17 00:00:00 2001 From: Roope Niemi Date: Sun, 22 Feb 2026 16:30:00 +0100 Subject: [PATCH 16/20] fix fitcompress, make pdp structured do output channel pruning, rename tuningconfig to PQConfig, finetuning parameters to hpo_parameters, fix layer replacement --- src/pquant/__init__.py | 10 +- src/pquant/configs/finetuning.yaml | 2 +- ...ning.py => hyperparameter_optimization.py} | 50 +++--- src/pquant/core/keras/activations.py | 6 +- src/pquant/core/keras/layers.py | 47 +++--- src/pquant/core/keras/quantizer.py | 6 +- src/pquant/core/quantizer_functions.py | 39 +++-- src/pquant/core/torch/activations.py | 8 +- src/pquant/core/torch/fit_compress.py | 2 +- src/pquant/core/torch/layers.py | 145 +++++++++++++----- src/pquant/core/torch/quantizer.py | 19 ++- ...y => hyperparameter_optimization_model.py} | 2 +- src/pquant/data_models/pruning_model.py | 2 +- .../pruning_methods/activation_pruning.py | 20 ++- src/pquant/pruning_methods/autosparse.py | 4 +- src/pquant/pruning_methods/cs.py | 4 +- src/pquant/pruning_methods/dst.py | 4 +- src/pquant/pruning_methods/fitcompress.py | 4 +- src/pquant/pruning_methods/mdmm.py | 5 +- src/pquant/pruning_methods/pdp.py | 21 ++- src/pquant/pruning_methods/wanda.py | 4 +- 21 files changed, 257 insertions(+), 147 deletions(-) rename src/pquant/core/{finetuning.py => hyperparameter_optimization.py} (91%) rename src/pquant/data_models/{finetuning_model.py => hyperparameter_optimization_model.py} (93%) diff --git a/src/pquant/__init__.py b/src/pquant/__init__.py index cb15a47..299fa45 100644 --- a/src/pquant/__init__.py +++ b/src/pquant/__init__.py @@ -6,11 +6,13 @@ backend = os.getenv("KERAS_BACKEND", "tensorflow") if backend == "torch": from . import configs, pruning_methods - from .core.finetuning import ( + from .core.hyperparameter_optimization import ( + PQConfig, ap_config, autosparse_config, cs_config, dst_config, + fitcompress_config, load_from_dictionary, load_from_file, mdmm_config, @@ -50,15 +52,18 @@ _forwards.append("mdmm_config") _forwards.append("pdp_config") _forwards.append("wanda_config") + _forwards.append("fitcompress_config") _forwards.append("load_from_file") _forwards.append("load_from_dictionary") _forwards.append("get_ebops") _forwards.append("load_torch_hgq_model") + _forwards.append("PQConfig") __all__ = _forwards else: from . import configs, pruning_methods - from .core.finetuning import ( + from .core.hyperparameter_optimization import ( + PQConfig, ap_config, autosparse_config, cs_config, @@ -103,4 +108,5 @@ _forwards.append("wanda_config") _forwards.append("load_from_file") _forwards.append("load_from_dictionary") + _forwards.append("PQConfig") __all__ = _forwards diff --git a/src/pquant/configs/finetuning.yaml b/src/pquant/configs/finetuning.yaml index ad68e50..fb48ead 100644 --- a/src/pquant/configs/finetuning.yaml +++ b/src/pquant/configs/finetuning.yaml @@ -50,7 +50,7 @@ fitcompress_parameters: greedy_astar : true approximate : true f_lambda : 0.5 -finetuning_parameters: +hpo_parameters: experiment_name: resnet_18_experiment_4 epochs: 2 model_name: resnet18 diff --git a/src/pquant/core/finetuning.py b/src/pquant/core/hyperparameter_optimization.py similarity index 91% rename from src/pquant/core/finetuning.py rename to src/pquant/core/hyperparameter_optimization.py index f0a6ea0..682ff15 100644 --- a/src/pquant/core/finetuning.py +++ b/src/pquant/core/hyperparameter_optimization.py @@ -11,8 +11,10 @@ from pydantic import BaseModel, Field, field_validator from pquant.core import constants -from pquant.data_models.finetuning_model import BaseFinetuningModel from pquant.data_models.fitcompress_model import BaseFitCompressModel +from pquant.data_models.hyperparameter_optimization_model import ( + BaseHyperparameterOptimizationModel, +) from pquant.data_models.pruning_model import ( ActivationPruningModel, AutoSparsePruningModel, @@ -37,7 +39,7 @@ def get_sampler(sampler_type, **kwargs): def log_model_by_backend(model, name, signature=None, registered_model_name=None): import mlflow - + backend = keras.backend.backend() kwargs = { "artifact_path": name, @@ -63,8 +65,8 @@ def validate_direction(cls, direction): return direction -class TuningConfig(BaseModel): - finetuning_parameters: BaseFinetuningModel +class PQConfig(BaseModel): + hpo_parameters: BaseHyperparameterOptimizationModel pruning_parameters: Annotated[ Union[ CSPruningModel, @@ -102,7 +104,7 @@ def load_from_config(cls, config): pruning_model_cls = constants.PRUNING_MODEL_REGISTRY.get(pruning_method, BasePruningModel) return cls( - finetuning_parameters=BaseFinetuningModel(**config.get("finetuning_parameters", {})), + hpo_parameters=BaseHyperparameterOptimizationModel(**config.get("hpo_parameters", {})), pruning_parameters=pruning_model_cls(**config.get("pruning_parameters", {})), quantization_parameters=BaseQuantizationModel(**config.get("quantization_parameters", {})), training_parameters=BaseTrainingModel(**config.get("training_parameters", {})), @@ -114,7 +116,7 @@ def get_dict(self): class TuningTask: - def __init__(self, config: TuningConfig): + def __init__(self, config: PQConfig): self.config = config self.hyperparameters = {} self.objectives: Dict[str, MetricFunction] = {} @@ -192,7 +194,7 @@ def get_scheduler_function(self) -> Callable: return self._scheduler_function def set_hyperparameters(self): - hp_config = self.config.finetuning_parameters.hyperparameter_search + hp_config = self.config.hpo_parameters.hyperparameter_search numerical_params = hp_config.numerical categorical_params = hp_config.categorical @@ -300,7 +302,7 @@ def objective(self, trial, model, train_func, valid_func, **kwargs): signature = infer_signature(sample_input.cpu().numpy(), sample_output.detach().cpu().numpy()) mlflow.log_text(yaml.safe_dump(self.get_dict()), "config.yaml") - model_name = self.config.finetuning_parameters.model_name + model_name = self.config.hpo_parameters.model_name log_model_by_backend( model=trained_model, name=model_name, @@ -311,25 +313,25 @@ def objective(self, trial, model, train_func, valid_func, **kwargs): return objectives if len(objectives) > 1 else objectives[0] def run_optimization(self, model, **kwargs): - finetuning_parameters = self.config.finetuning_parameters + hpo_parameters = self.config.hpo_parameters if self.enable_mlflow: import mlflow - + if not self.tracking_uri: raise ValueError("Tracking URI must be set when MLflow logging is enabled.") mlflow.set_tracking_uri(self.tracking_uri) - mlflow.set_experiment(finetuning_parameters.experiment_name) + mlflow.set_experiment(hpo_parameters.experiment_name) - sampler = get_sampler(finetuning_parameters.sampler.type, **finetuning_parameters.sampler.params) + sampler = get_sampler(hpo_parameters.sampler.type, **hpo_parameters.sampler.params) study = optuna.create_study( - study_name=finetuning_parameters.experiment_name, + study_name=hpo_parameters.experiment_name, storage=self.storage_db, sampler=sampler, load_if_exists=True, directions=[metric_object.direction for _, metric_object in self.objectives.items()], ) - num_trials = finetuning_parameters.num_trials + num_trials = hpo_parameters.num_trials study.optimize( lambda trial: self.objective( trial, @@ -349,61 +351,61 @@ def ap_config(): yaml_name = "config_ap.yaml" parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(parent, "configs", yaml_name) - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def autosparse_config(): yaml_name = "config_autosparse.yaml" parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(parent, "configs", yaml_name) - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def cs_config(): yaml_name = "config_cs.yaml" parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(parent, "configs", yaml_name) - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def dst_config(): yaml_name = "config_dst.yaml" parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(parent, "configs", yaml_name) - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def fitcompress_config(): yaml_name = "config_fitcompress.yaml" parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(parent, "configs", yaml_name) - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def mdmm_config(): yaml_name = "config_mdmm.yaml" parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(parent, "configs", yaml_name) - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def pdp_config(): yaml_name = "config_pdp.yaml" parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(parent, "configs", yaml_name) - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def wanda_config(): yaml_name = "config_wanda.yaml" parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(parent, "configs", yaml_name) - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def load_from_file(path): - return TuningConfig.load_from_file(path) + return PQConfig.load_from_file(path) def load_from_dictionary(config): - return TuningConfig.load_from_config(config) + return PQConfig.load_from_config(config) diff --git a/src/pquant/core/keras/activations.py b/src/pquant/core/keras/activations.py index d2af1a0..ddc3a27 100644 --- a/src/pquant/core/keras/activations.py +++ b/src/pquant/core/keras/activations.py @@ -37,9 +37,9 @@ def __init__( ): super().__init__(**kwargs) if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) if in_quant_bits is None: self.k_input = config.quantization_parameters.default_data_keep_negatives self.i_input = config.quantization_parameters.default_data_integer_bits @@ -88,6 +88,7 @@ def build(self, input_shape): is_data=True, is_heterogeneous=self.use_hgq, hgq_gamma=self.hgq_gamma, + place="datalane", ) if self.quantize_output: self.output_quantizer = Quantizer( @@ -99,6 +100,7 @@ def build(self, input_shape): is_data=True, is_heterogeneous=self.use_hgq, hgq_gamma=self.hgq_gamma, + place="datalane", ) if self.use_multiplier: diff --git a/src/pquant/core/keras/layers.py b/src/pquant/core/keras/layers.py index 3894e96..09622d9 100644 --- a/src/pquant/core/keras/layers.py +++ b/src/pquant/core/keras/layers.py @@ -19,7 +19,7 @@ from keras.src.layers.input_spec import InputSpec from keras.src.ops.operation_utils import compute_pooling_output_shape -from pquant.core.finetuning import TuningConfig +from pquant.core.hyperparameter_optimization import PQConfig from pquant.core.keras.activations import PQActivation from pquant.core.keras.quantizer import Quantizer from pquant.core.utils import get_pruning_layer @@ -39,12 +39,13 @@ def __init__( weight_quant_bits: Tuple[T, T, T] = None, bias_quant_bits: Tuple[T, T, T] = None, out_quant_bits: Tuple[T, T, T] = None, + enable_pruning=None, *args, **kwargs, ): super().__init__(**kwargs) if isinstance(config, dict): - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) if in_quant_bits is not None: self.k_input, self.i_input, self.f_input = in_quant_bits else: @@ -87,7 +88,7 @@ def __init__( self.overflow_mode_parameters = config.quantization_parameters.overflow_mode_parameters self.overflow_mode_data = config.quantization_parameters.overflow_mode_data self.use_hgq = config.quantization_parameters.use_high_granularity_quantization - self.enable_pruning = config.pruning_parameters.enable_pruning + self.enable_pruning = enable_pruning if enable_pruning is not None else config.pruning_parameters.enable_pruning self.use_fitcompress = config.fitcompress_parameters.enable_fitcompress self.hgq_gamma = config.quantization_parameters.hgq_gamma self.granularity = config.quantization_parameters.granularity @@ -109,6 +110,7 @@ def __init__( is_data=False, granularity=self.granularity, hgq_gamma=self.hgq_gamma, + place="weight", ) # if self.use_bias: @@ -121,6 +123,7 @@ def __init__( is_heterogeneous=self.use_hgq, is_data=False, hgq_gamma=self.hgq_gamma, + place="bias", ) self.input_quantizer = Quantizer( k=ops.convert_to_tensor(self.k_input), @@ -131,6 +134,7 @@ def __init__( is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.output_quantizer = Quantizer( k=ops.convert_to_tensor(self.k_output), @@ -141,6 +145,7 @@ def __init__( is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) def set_enable_pruning(self, enable_pruning): @@ -295,6 +300,7 @@ def __init__( weight_quant_bits: Tuple[T, T, T] = None, bias_quant_bits: Tuple[T, T, T] = None, out_quant_bits: Tuple[T, T, T] = None, + enable_pruning=None, **kwargs, ): super().__init__( @@ -321,6 +327,7 @@ def __init__( weight_quant_bits=weight_quant_bits, bias_quant_bits=bias_quant_bits, out_quant_bits=out_quant_bits, + enable_pruning=enable_pruning, **kwargs, ) self.depthwise_regularizer = depthwise_regularizer @@ -500,6 +507,7 @@ def __init__( weight_quant_bits: Tuple[T, T, T] = None, bias_quant_bits: Tuple[T, T, T] = None, out_quant_bits: Tuple[T, T, T] = None, + enable_pruning=None, **kwargs, ): super().__init__( @@ -527,6 +535,7 @@ def __init__( weight_quant_bits=weight_quant_bits, bias_quant_bits=bias_quant_bits, out_quant_bits=out_quant_bits, + enable_pruning=enable_pruning, **kwargs, ) @@ -725,6 +734,7 @@ def __init__( weight_quant_bits: Tuple[T, T, T] = None, bias_quant_bits: Tuple[T, T, T] = None, out_quant_bits: Tuple[T, T, T] = None, + enable_pruning=None, strides=1, padding="valid", data_format=None, @@ -767,6 +777,7 @@ def __init__( weight_quant_bits=weight_quant_bits, bias_quant_bits=bias_quant_bits, out_quant_bits=out_quant_bits, + enable_pruning=enable_pruning, **kwargs, ) @@ -883,6 +894,7 @@ def __init__( weight_quant_bits: Tuple[T, T, T] = None, bias_quant_bits: Tuple[T, T, T] = None, out_quant_bits: Tuple[T, T, T] = None, + enable_pruning=None, use_bias=True, kernel_initializer="glorot_uniform", bias_initializer="zeros", @@ -901,6 +913,7 @@ def __init__( weight_quant_bits=weight_quant_bits, bias_quant_bits=bias_quant_bits, out_quant_bits=out_quant_bits, + enable_pruning=enable_pruning, **kwargs, ) self.weight_transpose = (1, 0) @@ -993,8 +1006,6 @@ def call(self, x, training=None): if bias is not None: x = ops.add(x, bias) x = self.post_forward(x, training) - if self.use_hgq and self.enable_quantization: - self.add_loss(self.hgq_loss()) return x def get_config(self): @@ -1077,6 +1088,7 @@ def build(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.weight_quantizer = Quantizer( k=1.0, @@ -1086,6 +1098,7 @@ def build(self, input_shape): overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, + place="weight", ) self.bias_quantizer = Quantizer( k=1.0, @@ -1095,6 +1108,7 @@ def build(self, input_shape): overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, + place="bias", ) self.input_quantizer.build(input_shape) self.weight_quantizer.build(self.moving_variance.shape) @@ -1256,6 +1270,7 @@ def build(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.output_quantizer = Quantizer( k=1.0, @@ -1266,6 +1281,7 @@ def build(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) if self.use_hgq: self.input_quantizer.build(input_shape) @@ -1636,19 +1652,10 @@ def get_layer_keep_ratio(model): PQDense, ), ): - if layer.pruning_first: - weight = ops.transpose(layer.pruning_layer.get_hard_mask(), layer.weight_transpose_back) * layer._kernel - if layer.enable_quantization: - weight = layer.weight_quantizer(weight) - weight = weight - else: - weight = layer._kernel - if layer.enable_quantization: - weight = layer.weight_quantizer(weight) - weight = ops.transpose(layer.pruning_layer.get_hard_mask(), layer.weight_transpose_back) * weight - total_w += ops.size(weight) - rem = ops.count_nonzero(weight) - remaining_weights += rem + weight = layer.kernel + total_w += ops.size(weight) + rem = ops.count_nonzero(weight) + remaining_weights += rem elif isinstance(layer, PQSeparableConv2d): depthwise_weight = ops.cast(layer.depthwise_conv.kernel, layer.depthwise_conv.kernel.dtype) pointwise_weight = ops.cast(layer.pointwise_conv.kernel, layer.pointwise_conv.kernel.dtype) @@ -1690,12 +1697,12 @@ def get_layer_keep_ratio(model): def is_training_stage(layer): - return False if layer.pruning_layer.is_finetuning and layer.pruning_layer.is_pretraining else True + return False if layer.pruning_layer.is_finetuning or layer.pruning_layer.is_pretraining else True def get_model_losses(model, losses): for layer in model.layers: - loss = 0 + loss = 0.0 if isinstance( layer, ( diff --git a/src/pquant/core/keras/quantizer.py b/src/pquant/core/keras/quantizer.py index 890f34e..d4398a0 100644 --- a/src/pquant/core/keras/quantizer.py +++ b/src/pquant/core/keras/quantizer.py @@ -20,6 +20,7 @@ def __init__( is_data=False, granularity="per_tensor", hgq_gamma=0, + place="datalane", ): super().__init__() self.k_init = float(k) @@ -30,8 +31,9 @@ def __init__( self.round_mode = round_mode self.use_hgq = is_heterogeneous self.is_data = is_data + self.place = place self.quantizer = create_quantizer( - self.k_init, self.i_init, self.f_init, self.overflow, self.round_mode, self.use_hgq, self.is_data + self.k_init, self.i_init, self.f_init, self.overflow, self.round_mode, self.use_hgq, self.is_data, place ) self.is_pretraining = False self.hgq_gamma = hgq_gamma @@ -128,6 +130,7 @@ def from_config(cls, config): is_heterogeneous=config.pop("is_heterogeneous"), is_data=config.pop("is_data"), granularity=config.pop("granularity"), + place=config.pop("place"), ) if use_hgq: @@ -148,6 +151,7 @@ def get_config(self): "hgq_gamma": self.hgq_gamma, "is_heterogeneous": self.use_hgq, "granularity": self.granularity, + "place": self.place, } ) if self.use_hgq: diff --git a/src/pquant/core/quantizer_functions.py b/src/pquant/core/quantizer_functions.py index 1776e3e..81922d7 100644 --- a/src/pquant/core/quantizer_functions.py +++ b/src/pquant/core/quantizer_functions.py @@ -1,43 +1,48 @@ +import keras + + def create_fixed_quantizer(k, i, f, overflow, round_mode): - from quantizers import get_fixed_quantizer + if keras.backend.backend() == "torch": + from pquant.core.torch.fixed_point_quantizer import get_fixed_quantizer + else: + from quantizers import get_fixed_quantizer quantizer = get_fixed_quantizer(round_mode=round_mode, overflow_mode=overflow) return quantizer -def create_hgq_parameters_quantizer(k, i, f, overflow, round_mode): - from hgq.quantizer import Quantizer +def create_hgq_parameters_quantizer(k, i, f, overflow, round_mode, place): + from hgq.quantizer import Quantizer, QuantizerConfig - return Quantizer( - k0=k, - i0=i, - f0=f, - round_mode=round_mode, - overflow_mode=overflow, - q_type="kif", - homogeneous_axis=(), + quantizer_config = QuantizerConfig( + q_type="kif", place=place, k0=k, i0=i, f0=f, overflow_mode=overflow, round_mode=round_mode, homogeneous_axis=() ) + return Quantizer(config=quantizer_config) + def create_hgq_data_quantizer(k, i, f, overflow, round_mode): - from hgq.quantizer import Quantizer + from hgq.quantizer import Quantizer, QuantizerConfig - return Quantizer( + quantizer_config = QuantizerConfig( + q_type="kif", + place="datalane", k0=k, i0=i, f0=f, - round_mode=round_mode, overflow_mode=overflow, - q_type="kif", + round_mode=round_mode, homogeneous_axis=(0,), ) + return Quantizer(config=quantizer_config) + -def create_quantizer(k, i, f, overflow, round_mode, is_heterogeneous, is_data): +def create_quantizer(k, i, f, overflow, round_mode, is_heterogeneous, is_data, place="datalane"): if is_heterogeneous: if is_data: return create_hgq_data_quantizer(k, i, f, overflow, round_mode) else: - return create_hgq_parameters_quantizer(k, i, f, overflow, round_mode) + return create_hgq_parameters_quantizer(k, i, f, overflow, round_mode, place) else: return create_fixed_quantizer(k, i, f, overflow, round_mode) diff --git a/src/pquant/core/torch/activations.py b/src/pquant/core/torch/activations.py index 749fdeb..4630a04 100644 --- a/src/pquant/core/torch/activations.py +++ b/src/pquant/core/torch/activations.py @@ -37,9 +37,9 @@ def __init__( ): super().__init__() if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) self.config = config if in_quant_bits is None: self.k_input = config.quantization_parameters.default_data_keep_negatives @@ -91,6 +91,7 @@ def check_is_built(self, input_shape): is_data=True, is_heterogeneous=self.use_hgq, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.input_quantizer = Quantizer( k=self.k_input, @@ -100,7 +101,8 @@ def check_is_built(self, input_shape): round_mode=self.round_mode, is_data=True, is_heterogeneous=self.use_hgq, - hgq_gamma=self.hgq_gamma, + hgq_gamma=self.hgq_gamma, + place="datalane", ) if self.use_hgq: self.input_quantizer.quantizer.build(input_shape) diff --git a/src/pquant/core/torch/fit_compress.py b/src/pquant/core/torch/fit_compress.py index 9794931..d99e4fb 100644 --- a/src/pquant/core/torch/fit_compress.py +++ b/src/pquant/core/torch/fit_compress.py @@ -452,7 +452,7 @@ def assign_parameters(self, model, params): for _, module in model.named_modules(): if isinstance(module, (PQDense, PQConv2d)): for name_param, matrix_param in list(module.named_parameters()): - if name_param.endswith('weight'): + if name_param.endswith('_weight'): matrix_param.data = nn.parameter.Parameter(params[i].to(self.device)) matrix_param.collect = True i += 1 diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index 446dc52..fc62be8 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -102,6 +102,7 @@ def check_is_built(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.weight_quantizer = Quantizer( k=torch.tensor(self.k_weight), @@ -113,6 +114,7 @@ def check_is_built(self, input_shape): is_data=False, hgq_gamma=self.hgq_gamma, granularity=self.granularity, + place="weight", ) self.bias_quantizer = Quantizer( @@ -124,6 +126,7 @@ def check_is_built(self, input_shape): is_heterogeneous=self.use_hgq, is_data=False, hgq_gamma=self.hgq_gamma, + place="bias", ) if self.quantize_output: self.output_quantizer = Quantizer( @@ -135,6 +138,7 @@ def check_is_built(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.n_parallel = ops.prod(tuple(input_shape)[1:-1]) @@ -598,11 +602,9 @@ def extra_repr(self): return s.format(**self.__dict__) -def add_compression_layers(model, config, input_shape, device="cuda"): +def add_compression_layers(model, config): model = add_quantized_activations_to_model_layer(model, config) model = add_pruning_to_model(model, config) - model.to(device) - model(torch.rand(input_shape, device=next(model.parameters()).device)) return model @@ -654,6 +656,7 @@ def build(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.output_quantizer = Quantizer( k=torch.tensor(self.k_output), @@ -664,6 +667,7 @@ def build(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.input_shape = (1,) + input_shape[1:] @@ -861,6 +865,7 @@ def check_is_built(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.weight_quantizer = Quantizer( k=torch.tensor(self.k_weight), @@ -870,6 +875,7 @@ def check_is_built(self, input_shape): overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, + place="weight", ) self.bias_quantizer = Quantizer( k=torch.tensor(self.k_bias), @@ -879,6 +885,7 @@ def check_is_built(self, input_shape): overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, + place="bias", ) if self.use_hgq: self.input_quantizer.quantizer.build(input_shape) @@ -1021,6 +1028,7 @@ def check_is_built(self, input_shape): is_heterogeneous=self.use_hgq, is_data=True, hgq_gamma=self.hgq_gamma, + place="datalane", ) self.weight_quantizer = Quantizer( k=torch.tensor(self.k_weight), @@ -1030,6 +1038,7 @@ def check_is_built(self, input_shape): overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, + place="weight", ) self.bias_quantizer = Quantizer( k=torch.tensor(self.k_bias), @@ -1039,6 +1048,7 @@ def check_is_built(self, input_shape): overflow=self.overflow_mode_parameters, is_data=False, is_heterogeneous=self.use_hgq, + place="bias", ) if self.use_hgq: self.input_quantizer.quantizer.build(input_shape) @@ -1113,16 +1123,23 @@ def add_layer_specific_quantization_to_model(name, layer, config): if name in config.quantization_parameters.layer_specific: layer_config = config.quantization_parameters.layer_specific[name] if "weight" in layer_config: + weight_k_bits = layer_config["weight"]["keep_negatives"] weight_int_bits = layer_config["weight"]["integer_bits"] weight_fractional_bits = layer_config["weight"]["fractional_bits"] + layer.k_weight = torch.tensor(weight_k_bits) layer.i_weight = torch.tensor(weight_int_bits) layer.f_weight = torch.tensor(weight_fractional_bits) if "bias" in layer_config: + bias_k_bits = layer_config["bias"]["keep_negatives"] bias_int_bits = layer_config["bias"]["integer_bits"] bias_fractional_bits = layer_config["bias"]["fractional_bits"] + layer.k_bias = torch.tensor(bias_k_bits) layer.i_bias = torch.tensor(bias_int_bits) layer.f_bias = torch.tensor(bias_fractional_bits) if "input" in layer_config: + if "keep_negatives" in layer_config["input"]: + input_keep_negatives = torch.tensor(layer_config["input"]["keep_negatives"]) + layer.k_input = input_keep_negatives if "integer_bits" in layer_config["input"]: input_int_bits = torch.tensor(layer_config["input"]["integer_bits"]) layer.i_input = input_int_bits @@ -1133,6 +1150,9 @@ def add_layer_specific_quantization_to_model(name, layer, config): quantize = layer_config["input"]["quantize"] layer.quantize_input = quantize if "output" in layer_config: + if "keep_negatives" in layer_config["input"]: + output_keep_negatives = torch.tensor(layer_config["output"]["keep_negatives"]) + layer.k_output = output_keep_negatives if "integer_bits" in layer_config["output"]: output_int_bits = torch.tensor(layer_config["output"]["integer_bits"]) layer.i_output = input_int_bits @@ -1475,13 +1495,19 @@ def post_pretrain_functions(model, config, train_loader=None, loss_function=None if config.fitcompress_parameters.enable_fitcompress: from pquant.core.torch.fit_compress import call_fitcompress # noqa: 811 + for layer in model.modules(): + if isinstance( + layer, (PQConv2d, PQConv1d, PQDense, PQActivation, PQBatchNorm2d, PQBatchNorm1d, PQAvgPoolBase, Quantizer) + ): + # Trigger it here to enable quantization before FITCompress + layer.post_pre_train_function() config, pruning_mask_importance_scores = call_fitcompress( config, model, train_loader, loss_function, input_shape=input_shape ) idx = 0 for layer in model.modules(): if isinstance(layer, (PQConv2d, PQConv1d, PQDense)): - layer.post_pre_train_function() + # layer.post_pre_train_function() # set_data_quantization_bits(model) layer.pruning_layer.mask.assign(pruning_mask_importance_scores[idx]) layer.pruning_layer.pre_finetune_function() # So mask is not updated during training anymore @@ -1533,16 +1559,7 @@ def get_layer_keep_ratio(model): remaining_weights = 0 for layer in model.modules(): if isinstance(layer, (PQConv2d, PQConv1d, PQDense)): - if layer.pruning_first: - weight = layer.pruning_layer.get_hard_mask() * layer._weight - if layer.enable_quantization: - weight = layer.weight_quantizer(weight) - weight = weight - else: - weight = layer._weight - if layer.enable_quantization: - weight = layer.weight_quantizer(weight) - weight = layer.pruning_layer.get_hard_mask() * weight + weight = layer.weight total_w += ops.size(weight) rem = ops.count_nonzero(weight) remaining_weights += rem @@ -1555,7 +1572,7 @@ def get_layer_keep_ratio(model): def is_training_stage(layer): - return False if layer.pruning_layer.is_finetuning and layer.pruning_layer.is_pretraining else True + return False if layer.pruning_layer.is_finetuning or layer.pruning_layer.is_pretraining else True def get_model_losses(model, losses): @@ -1573,43 +1590,95 @@ def get_model_losses(model, losses): return losses -def create_default_layer_quantization_pruning_config(model): - config = {"layer_specific": {}, "disable_pruning_for_layers": []} +def create_default_layer_quantization_pruning_config(model, config): + # subconfig = {"layer_specific": {}, "disable_pruning_for_layers": []} for name, layer in model.named_modules(): if layer.__class__ in [nn.Linear, nn.Conv1d, nn.Conv2d]: if layer.bias is None: - config.layer_specific[name] = { - "input": {"integer_bits": 0, "fractional_bits": 7, "quantize": True}, - "weight": {"integer_bits": 0, "fractional_bits": 7}, - "output": {"integer_bits": 0, "fractional_bits": 7, "quantize": True}, + config.quantization_parameters.layer_specific[name] = { + "input": { + "keep_negatives": config.quantization_parameters.default_data_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7, + "quantize": config.quantization_parameters.quantize_input, + }, + "weight": { + "keep_negatives": config.quantization_parameters.default_weight_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7, + }, + "output": { + "keep_negatives": config.quantization_parameters.default_data_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7, + "quantize": config.quantization_parameters.quantize_output, + }, } else: - config.layer_specific[name] = { - "input": {"integer_bits": 0, "fractional_bits": 7, "quantize": True}, - "weight": {"integer_bits": 0, "fractional_bits": 7}, - "bias": {"integer_bits": 0, "fractional_bits": 7}, - "output": {"integer_bits": 0, "fractional_bits": 7, "quantize": True}, + config.quantization_parameters.layer_specific[name] = { + "input": { + "keep_negatives": config.quantization_parameters.default_data_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7, + "quantize": config.quantization_parameters.quantize_input, + }, + "weight": { + "keep_negatives": config.quantization_parameters.default_weight_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7, + }, + "bias": { + "keep_negatives": config.quantization_parameters.default_weight_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7, + }, + "output": { + "keep_negatives": config.quantization_parameters.default_data_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7, + "quantize": config.quantization_parameters.quantize_output, + }, } - config.disable_pruning_for_layers.append(name) + config.pruning_parameters.disable_pruning_for_layers.append(name) elif layer.__class__ in [nn.Tanh, nn.ReLU, nn.AvgPool1d, nn.AvgPool2d, nn.AvgPool3d]: - config.layer_specific[name] = { - "input": {"quantize": True, "integer_bits": 0.0, "fractional_bits": 7.0}, - "output": {"quantize": True, "integer_bits": 0.0, "fractional_bits": 7.0}, + config.quantization_parameters.layer_specific[name] = { + "input": { + "quantize": config.quantization_parameters.quantize_input, + "keep_negatives": config.quantization_parameters.default_data_keep_negatives, + "integer_bits": 0.0, + "fractional_bits": 7.0, + }, + "output": { + "quantize": config.quantization_parameters.quantize_output, + "keep_negatives": config.quantization_parameters.default_data_keep_negatives, + "integer_bits": 0.0, + "fractional_bits": 7.0, + }, } elif layer.__class__ in [nn.BatchNorm2d]: - config.layer_specific[name] = { - "input": {"quantize": True, "integer_bits": 0.0, "fractional_bits": 7.0}, - "weight": {"integer_bits": 0, "fractional_bits": 7.0}, - "bias": {"integer_bits": 0, "fractional_bits": 7.0}, + config.quantization_parameters.layer_specific[name] = { + "input": { + "quantize": config.quantization_parameters.quantize_input, + "keep_negatives": config.quantization_parameters.default_data_keep_negatives, + "integer_bits": 0.0, + "fractional_bits": 7.0, + }, + "weight": { + "keep_negatives": config.quantization_parameters.default_weight_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7.0, + }, + "bias": { + "keep_negatives": config.quantization_parameters.default_weight_keep_negatives, + "integer_bits": 0, + "fractional_bits": 7.0, + }, } return config def populate_config_with_all_layers(model, config): - custom_scheme = create_default_layer_quantization_pruning_config(model) - config.quantization_parameters.layer_specific = custom_scheme["layer_specific"] - config.pruning_parameters.disable_pruning_for_layers = custom_scheme["disable_pruning_for_layers"] - return config + return create_default_layer_quantization_pruning_config(model, config) def remove_compression_layers(module, config): diff --git a/src/pquant/core/torch/quantizer.py b/src/pquant/core/torch/quantizer.py index 75079c8..078de57 100644 --- a/src/pquant/core/torch/quantizer.py +++ b/src/pquant/core/torch/quantizer.py @@ -8,7 +8,17 @@ class Quantizer(nn.Module): def __init__( - self, k, i, f, overflow, round_mode, is_heterogeneous, is_data=False, granularity='per_tensor', hgq_gamma=0 + self, + k, + i, + f, + overflow, + round_mode, + is_heterogeneous, + is_data=False, + granularity='per_tensor', + hgq_gamma=0, + place="datalane", ): super().__init__() self.k = torch.nn.Parameter(torch.tensor(k), requires_grad=False) @@ -19,7 +29,7 @@ def __init__( self.is_data = is_data self.i_init = i self.f_init = f - self.quantizer = create_quantizer(self.k, i, f, self.overflow, self.round_mode, self.use_hgq, self.is_data) + self.quantizer = create_quantizer(self.k, i, f, self.overflow, self.round_mode, self.use_hgq, self.is_data, place) self.is_pretraining = False self.hgq_gamma = hgq_gamma if isinstance(granularity, Enum): @@ -27,6 +37,8 @@ def __init__( else: self.granularity = granularity self.final_compression_done = nn.Parameter(torch.tensor(False), requires_grad=False) + if self.granularity == 'per_tensor': + self.initialize_quantization_parameters(self.i_init, self.f_init) def get_quantization_bits(self): if self.use_hgq: @@ -77,10 +89,7 @@ def forward(self, x): self.initialize_quantization_parameters(i, f) return x else: - if not self.training or self.final_compression_done: - self.quantizer(x, k=self.k, i=self.i, f=self.f, training=self.training) if self.granularity == 'per_tensor': - self.initialize_quantization_parameters(self.i_init, self.f_init) _, i, f = self.get_quantization_bits() else: i, f = self.compute_dynamic_bits(x) diff --git a/src/pquant/data_models/finetuning_model.py b/src/pquant/data_models/hyperparameter_optimization_model.py similarity index 93% rename from src/pquant/data_models/finetuning_model.py rename to src/pquant/data_models/hyperparameter_optimization_model.py index c51be21..1c53ca7 100644 --- a/src/pquant/data_models/finetuning_model.py +++ b/src/pquant/data_models/hyperparameter_optimization_model.py @@ -13,7 +13,7 @@ class Sampler(BaseModel): params: Dict[str, Any] = Field(default_factory=dict) -class BaseFinetuningModel(BaseModel): +class BaseHyperparameterOptimizationModel(BaseModel): experiment_name: str = Field(default="experiment_1") model_name: str = Field(default="example_model") sampler: Sampler = Field(default_factory=Sampler) diff --git a/src/pquant/data_models/pruning_model.py b/src/pquant/data_models/pruning_model.py index 3c89828..8b44f49 100644 --- a/src/pquant/data_models/pruning_model.py +++ b/src/pquant/data_models/pruning_model.py @@ -26,7 +26,7 @@ class DSTPruningModel(BasePruningModel): class FITCompressPruningModel(BasePruningModel): pruning_method: Literal["fitcompress"] = "fitcompress" - min_frac_bit: float = Field(default=2.0) + min_frac_bits: float = Field(default=2.0) class PDPPruningModel(BasePruningModel): diff --git a/src/pquant/pruning_methods/activation_pruning.py b/src/pquant/pruning_methods/activation_pruning.py index aacb27e..0940f88 100644 --- a/src/pquant/pruning_methods/activation_pruning.py +++ b/src/pquant/pruning_methods/activation_pruning.py @@ -6,9 +6,9 @@ class ActivationPruning(keras.layers.Layer): def __init__(self, config, layer_type, *args, **kwargs): if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) super().__init__(*args, **kwargs) self.config = config self.act_type = "relu" @@ -29,7 +29,8 @@ def build(self, input_shape): self.shape = (input_shape[0], 1, 1) else: self.shape = (input_shape[0], 1, 1, 1) - self.mask = ops.ones(self.shape) + self.mask = self.add_weight(shape=self.shape, initializer="ones", trainable=False) + self.mask_placeholder = ops.ones(self.shape) def collect_output(self, output, training): """ @@ -37,7 +38,7 @@ def collect_output(self, output, training): linear/convolution layer are over 0. Every t_delta steps, uses these values to update the mask to prune those channels and neurons that are active less than a given threshold """ - if not training or self.is_pretraining: + if not training or self.is_pretraining or self.is_finetuning: # Don't collect during validation return if self.activations is None: @@ -56,15 +57,19 @@ def collect_output(self, output, training): self.total = 0 self.batches_collected = 0 if self.layer_type == "linear": - self.mask = ops.expand_dims(ops.cast((pct_active > self.threshold), pct_active.dtype), 1) + self.mask_placeholder = ops.expand_dims(ops.cast((pct_active > self.threshold), pct_active.dtype), 1) else: pct_active = ops.reshape(pct_active, (pct_active.shape[0], -1)) pct_active_avg = ops.mean(pct_active, axis=-1) pct_active_above_threshold = ops.cast((pct_active_avg > self.threshold), pct_active_avg.dtype) if len(output.shape) == 3: - self.mask = ops.reshape(pct_active_above_threshold, list(pct_active_above_threshold.shape) + [1, 1]) + self.mask_placeholder = ops.reshape( + pct_active_above_threshold, list(pct_active_above_threshold.shape) + [1, 1] + ) else: - self.mask = ops.reshape(pct_active_above_threshold, list(pct_active_above_threshold.shape) + [1, 1, 1]) + self.mask_placeholder = ops.reshape( + pct_active_above_threshold, list(pct_active_above_threshold.shape) + [1, 1, 1] + ) self.activations *= 0.0 def call(self, weight): # Mask is only updated every t_delta step, using collect_output @@ -97,6 +102,7 @@ def get_layer_sparsity(self, weight): def post_epoch_function(self, epoch, total_epochs): if self.is_pretraining is False: self.t += 1 + self.mask.assign(self.mask_placeholder) pass def get_config(self): diff --git a/src/pquant/pruning_methods/autosparse.py b/src/pquant/pruning_methods/autosparse.py index a8b7cca..552b2c0 100644 --- a/src/pquant/pruning_methods/autosparse.py +++ b/src/pquant/pruning_methods/autosparse.py @@ -57,9 +57,9 @@ class AutoSparse(keras.layers.Layer): def __init__(self, config, layer_type, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) self.g = ops.sigmoid self.config = config self.layer_type = layer_type diff --git a/src/pquant/pruning_methods/cs.py b/src/pquant/pruning_methods/cs.py index 2b013cf..6d5c69a 100644 --- a/src/pquant/pruning_methods/cs.py +++ b/src/pquant/pruning_methods/cs.py @@ -8,9 +8,9 @@ class ContinuousSparsification(keras.layers.Layer): def __init__(self, config, layer_type, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) self.config = config self.final_temp = config.pruning_parameters.final_temp self.is_finetuning = False diff --git a/src/pquant/pruning_methods/dst.py b/src/pquant/pruning_methods/dst.py index f7e92bd..e8aac5f 100644 --- a/src/pquant/pruning_methods/dst.py +++ b/src/pquant/pruning_methods/dst.py @@ -34,9 +34,9 @@ class DST(keras.layers.Layer): def __init__(self, config, layer_type, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) self.config = config self.is_pretraining = True self.layer_type = layer_type diff --git a/src/pquant/pruning_methods/fitcompress.py b/src/pquant/pruning_methods/fitcompress.py index 07fa30a..f2b3b5c 100644 --- a/src/pquant/pruning_methods/fitcompress.py +++ b/src/pquant/pruning_methods/fitcompress.py @@ -6,9 +6,9 @@ class FITCompress(keras.layers.Layer): def __init__(self, config, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) self.config = config self.is_pretraining = True self.is_finetuning = False diff --git a/src/pquant/pruning_methods/mdmm.py b/src/pquant/pruning_methods/mdmm.py index 87ed7e6..ae88fce 100644 --- a/src/pquant/pruning_methods/mdmm.py +++ b/src/pquant/pruning_methods/mdmm.py @@ -20,9 +20,9 @@ class MDMM(keras.layers.Layer): def __init__(self, config, layer_type, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) self.config = config self.layer_type = layer_type self.constraint_layer = None @@ -30,7 +30,6 @@ def __init__(self, config, layer_type, *args, **kwargs): self.built = False self.is_finetuning = False self.is_pretraining = True - def build(self, input_shape): pruning_parameters = self.config.pruning_parameters diff --git a/src/pquant/pruning_methods/pdp.py b/src/pquant/pruning_methods/pdp.py index 99fadda..5405935 100644 --- a/src/pquant/pruning_methods/pdp.py +++ b/src/pquant/pruning_methods/pdp.py @@ -7,9 +7,9 @@ class PDP(keras.layers.Layer): def __init__(self, config, layer_type, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) self.init_r = ops.convert_to_tensor(config.pruning_parameters.sparsity) self.epsilon = ops.convert_to_tensor(config.pruning_parameters.epsilon) self.r = config.pruning_parameters.sparsity @@ -18,8 +18,6 @@ def __init__(self, config, layer_type, *args, **kwargs): self.config = config self.is_finetuning = False self.layer_type = layer_type - - def build(self, input_shape): input_shape_concatenated = list(input_shape) + [1] @@ -65,7 +63,7 @@ def get_mask_structured_linear(self, weight): """ if self.is_pretraining: return self.mask - norm = ops.norm(weight, axis=0, ord=2, keepdims=True) + norm = ops.norm(weight, axis=1, ord=2, keepdims=True) norm_flat = ops.ravel(norm) """ Do top_k for all neuron norms. Returns sorted array, just use the values on both sides of the threshold (sparsity * size(norm)) to calculate t directly """ @@ -77,10 +75,10 @@ def get_mask_structured_linear(self, weight): Wt = W_all[lim + 1] # norm = ops.expand_dims(norm, -1) t = ops.ones(norm.shape) * 0.5 * (Wh + Wt) - soft_input = ops.concatenate((t**2, norm**2), axis=0) / self.temp - softmax_result = ops.softmax(soft_input, axis=0) - _, mw = ops.unstack(softmax_result, axis=0) - mw = ops.expand_dims(mw, 0) + soft_input = ops.concatenate((t**2, norm**2), axis=1) / self.temp + softmax_result = ops.softmax(soft_input, axis=1) + _, mw = ops.unstack(softmax_result, axis=1) + mw = ops.expand_dims(mw, -1) self.mask = mw return mw @@ -91,8 +89,9 @@ def get_mask_structured_channel(self, weight): """ if self.is_pretraining: return self.mask - weight_reshaped = ops.reshape(weight, (weight.shape[0], weight.shape[1], -1)) - norm = ops.norm(weight_reshaped, axis=2, ord=2) + weight_reshaped = ops.reshape(weight, (weight.shape[0], -1)) + norm = ops.norm(weight_reshaped, axis=1, ord=2) + norm_flat = ops.ravel(norm) """ Do top_k for all channel norms. Returns sorted array, just use the values on both sides of the threshold (sparsity * size(norm)) to calculate t directly """ diff --git a/src/pquant/pruning_methods/wanda.py b/src/pquant/pruning_methods/wanda.py index c637c1c..55b202e 100644 --- a/src/pquant/pruning_methods/wanda.py +++ b/src/pquant/pruning_methods/wanda.py @@ -7,9 +7,9 @@ class Wanda(keras.layers.Layer): def __init__(self, config, layer_type, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(config, dict): - from pquant.core.finetuning import TuningConfig + from pquant.core.hyperparameter_optimization import PQConfig - config = TuningConfig.load_from_config(config) + config = PQConfig.load_from_config(config) self.config = config self.act_type = "relu" self.t = 0 From b515217a776093299bd828bd6723fab08eaf38f4 Mon Sep 17 00:00:00 2001 From: Roope Niemi Date: Sun, 22 Feb 2026 16:42:17 +0100 Subject: [PATCH 17/20] rename rewind options to match paper --- src/pquant/configs/config_cs.yaml | 2 +- src/pquant/core/keras/layers.py | 4 ++-- src/pquant/core/torch/layers.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/pquant/configs/config_cs.yaml b/src/pquant/configs/config_cs.yaml index c27277c..521d22f 100644 --- a/src/pquant/configs/config_cs.yaml +++ b/src/pquant/configs/config_cs.yaml @@ -42,7 +42,7 @@ training_parameters: fine_tuning_epochs: 85 pretraining_epochs: 0 pruning_first: true - rewind: post-ticket-search + rewind: post-training-stage rounds: 3 save_weights_epoch: 2 batch_size: 256 diff --git a/src/pquant/core/keras/layers.py b/src/pquant/core/keras/layers.py index 09622d9..5be2cb9 100644 --- a/src/pquant/core/keras/layers.py +++ b/src/pquant/core/keras/layers.py @@ -1424,9 +1424,9 @@ def call(self, x, training=None): def call_post_round_functions(model, rewind, rounds, r): last_round = r == rounds - 1 - if rewind == "round": + if rewind == "every-round": rewind_weights_functions(model) - elif rewind == "post-ticket-search" and last_round: + elif rewind == "post-training-stage" and last_round: rewind_weights_functions(model) elif not last_round: post_round_functions(model) diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index fc62be8..47f90ea 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -1444,9 +1444,9 @@ def apply_final_compression(module): def call_post_round_functions(model, rewind, rounds, r): last_round = r == rounds - 1 - if rewind == "round": + if rewind == "every-round": rewind_weights_functions(model) - elif rewind == "post-ticket-search" and last_round: + elif rewind == "post-training-stage" and last_round: rewind_weights_functions(model) elif not last_round: post_round_functions(model) From c571b91e53d9638d3bd642e599c22e3985814cdb Mon Sep 17 00:00:00 2001 From: Roope Niemi Date: Sun, 22 Feb 2026 19:48:01 +0100 Subject: [PATCH 18/20] remove extras from configs, fix pdp bug with Keras, add input_shape back to add_compression_layers as an option --- src/pquant/configs/config_ap.yaml | 13 ---------- src/pquant/configs/config_autosparse.yaml | 13 ---------- src/pquant/configs/config_cs.yaml | 13 ---------- src/pquant/configs/config_dst.yaml | 22 +++++++--------- src/pquant/configs/config_fitcompress.yaml | 13 ---------- src/pquant/configs/config_mdmm.yaml | 13 ---------- src/pquant/configs/config_pdp.yaml | 13 ---------- src/pquant/configs/config_wanda.yaml | 13 ---------- src/pquant/configs/finetuning.yaml | 2 +- src/pquant/core/torch/layers.py | 5 +++- src/pquant/pruning_methods/pdp.py | 29 ++++++++++++---------- 11 files changed, 30 insertions(+), 119 deletions(-) diff --git a/src/pquant/configs/config_ap.yaml b/src/pquant/configs/config_ap.yaml index 35085d8..00ffe9e 100644 --- a/src/pquant/configs/config_ap.yaml +++ b/src/pquant/configs/config_ap.yaml @@ -46,16 +46,3 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 -batch_size: 256 -cosine_tmax: 200 -gamma: 0.1 -l2_decay: 0.0001 -label_smoothing: 0.0 -lr: 0.01 -lr_schedule: cosine -milestones: -- -1 -- -1 -momentum: 0.9 -optimizer: sgd -plot_frequency: 100 diff --git a/src/pquant/configs/config_autosparse.yaml b/src/pquant/configs/config_autosparse.yaml index 8372aa4..b19e342 100644 --- a/src/pquant/configs/config_autosparse.yaml +++ b/src/pquant/configs/config_autosparse.yaml @@ -49,16 +49,3 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1.0 -batch_size: 256 -cosine_tmax: 200 -gamma: 0.1 -l2_decay: 3.0517578125e-05 -label_smoothing: 0.1 -lr: 0.01 -lr_schedule: cosine -milestones: -- -1 -- -1 -momentum: 0.875 -optimizer: sgd -plot_frequency: 100 diff --git a/src/pquant/configs/config_cs.yaml b/src/pquant/configs/config_cs.yaml index 521d22f..cde3178 100644 --- a/src/pquant/configs/config_cs.yaml +++ b/src/pquant/configs/config_cs.yaml @@ -45,16 +45,3 @@ training_parameters: rewind: post-training-stage rounds: 3 save_weights_epoch: 2 -batch_size: 256 -cosine_tmax: 200 -gamma: 0.1 -l2_decay: 0.0001 -label_smoothing: 0.0 -lr: 0.1 -lr_schedule: multistep -milestones: -- 56 -- 71 -momentum: 0.9 -optimizer: sgd -plot_frequency: 100 diff --git a/src/pquant/configs/config_dst.yaml b/src/pquant/configs/config_dst.yaml index 89536ec..9a54073 100644 --- a/src/pquant/configs/config_dst.yaml +++ b/src/pquant/configs/config_dst.yaml @@ -47,16 +47,12 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 -batch_size: 64 -cosine_tmax: 200 -gamma: 0.1 -l2_decay: 0.0001 -label_smoothing: 0.0 -lr: 0.01 -lr_schedule: multistep -milestones: -- 80 -- 120 -momentum: 0.9 -optimizer: sgd -plot_frequency: 100 +hpo_parameters: + experiment_name: experiment_name + model_name: jet_tagger + num_trials: 1 + sampler: + type: RandomSampler + hyperparameter_search: + numerical: {} + categorical: {} diff --git a/src/pquant/configs/config_fitcompress.yaml b/src/pquant/configs/config_fitcompress.yaml index 583ab50..38fd5ba 100644 --- a/src/pquant/configs/config_fitcompress.yaml +++ b/src/pquant/configs/config_fitcompress.yaml @@ -44,16 +44,3 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 -batch_size: 256 -cosine_tmax: 200 -gamma: 0.1 -l2_decay: 0.0001 -label_smoothing: 0.0 -lr: 0.1 -lr_schedule: cosine -milestones: -- -1 -- -1 -momentum: 0.9 -optimizer: adam -plot_frequency: 100 diff --git a/src/pquant/configs/config_mdmm.yaml b/src/pquant/configs/config_mdmm.yaml index 4899e8f..d793829 100644 --- a/src/pquant/configs/config_mdmm.yaml +++ b/src/pquant/configs/config_mdmm.yaml @@ -55,19 +55,6 @@ fitcompress_parameters: greedy_astar : true approximate : true f_lambda : 1 -batch_size: 64 -cosine_tmax: 200 -gamma: 0.1 -l2_decay: 0.0001 -label_smoothing: 0.0 -lr: 0.01 -lr_schedule: multistep -milestones: -- 75 -- 120 -momentum: 0.9 -optimizer: sgd -plot_frequency: 100 # Note: # use_grad: true is having some bug... flip gradient impl not working as intended diff --git a/src/pquant/configs/config_pdp.yaml b/src/pquant/configs/config_pdp.yaml index fd68fcf..9bf7c8e 100644 --- a/src/pquant/configs/config_pdp.yaml +++ b/src/pquant/configs/config_pdp.yaml @@ -46,16 +46,3 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 -batch_size: 256 -cosine_tmax: 200 -gamma: 0.1 -l2_decay: 0.0001 -label_smoothing: 0.0 -lr: 0.01 -lr_schedule: cosine -milestones: -- -1 -- -1 -momentum: 0.9 -optimizer: sgd -plot_frequency: 100 diff --git a/src/pquant/configs/config_wanda.yaml b/src/pquant/configs/config_wanda.yaml index c47385f..6db406a 100644 --- a/src/pquant/configs/config_wanda.yaml +++ b/src/pquant/configs/config_wanda.yaml @@ -49,16 +49,3 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 -batch_size: 64 -cosine_tmax: 200 -gamma: 0.1 -l2_decay: 0.0001 -label_smoothing: 0.0 -lr: 0.01 -lr_schedule: cosine -milestones: -- -1 -- -1 -momentum: 0.9 -optimizer: sgd -plot_frequency: 100 diff --git a/src/pquant/configs/finetuning.yaml b/src/pquant/configs/finetuning.yaml index fb48ead..70a6237 100644 --- a/src/pquant/configs/finetuning.yaml +++ b/src/pquant/configs/finetuning.yaml @@ -51,7 +51,7 @@ fitcompress_parameters: approximate : true f_lambda : 0.5 hpo_parameters: - experiment_name: resnet_18_experiment_4 + experiment_name: experiment_name epochs: 2 model_name: resnet18 num_trials: 1 diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index 47f90ea..07ed7fc 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -602,9 +602,12 @@ def extra_repr(self): return s.format(**self.__dict__) -def add_compression_layers(model, config): +def add_compression_layers(model, config, input_shape=None): model = add_quantized_activations_to_model_layer(model, config) model = add_pruning_to_model(model, config) + model.to("cuda") + if input_shape is not None: + model(torch.rand(input_shape).to("cuda")) return model diff --git a/src/pquant/pruning_methods/pdp.py b/src/pquant/pruning_methods/pdp.py index 5405935..dfe7b92 100644 --- a/src/pquant/pruning_methods/pdp.py +++ b/src/pquant/pruning_methods/pdp.py @@ -23,7 +23,15 @@ def build(self, input_shape): input_shape_concatenated = list(input_shape) + [1] self.softmax_shape = input_shape_concatenated self.t = ops.ones(input_shape_concatenated) * 0.5 - self.mask = ops.ones(input_shape) + if self.config.pruning_parameters.structured_pruning: + if self.layer_type == "linear": + shape = (input_shape[0], 1) + else: + if len(input_shape) == 3: + shape = (input_shape[0], 1, 1) + else: + shape = (input_shape[0], 1, 1, 1) + self.mask = self.add_weight(shape=shape, initializer="ones", name="mask", trainable=False) self.flat_weight_size = ops.cast(ops.size(self.mask), self.mask.dtype) super().build(input_shape) @@ -49,12 +57,13 @@ def get_hard_mask(self, weight=None): mask = self.get_mask_structured_linear(weight) else: mask = self.get_mask(weight) - self.mask = ops.cast((mask >= 0.5), mask.dtype) + self.mask.assign(ops.cast((mask >= 0.5), mask.dtype)) return self.mask def pre_finetune_function(self): self.is_finetuning = True - self.mask = ops.cast((self.mask >= 0.5), self.mask.dtype) + if hasattr(self, "mask"): + self.mask.assign(ops.cast((self.mask >= 0.5), self.mask.dtype)) def get_mask_structured_linear(self, weight): """ @@ -79,7 +88,7 @@ def get_mask_structured_linear(self, weight): softmax_result = ops.softmax(soft_input, axis=1) _, mw = ops.unstack(softmax_result, axis=1) mw = ops.expand_dims(mw, -1) - self.mask = mw + self.mask.assign(mw) return mw def get_mask_structured_channel(self, weight): @@ -110,12 +119,11 @@ def get_mask_structured_channel(self, weight): diff = len(weight.shape) - len(mw.shape) for _ in range(diff): mw = ops.expand_dims(mw, -1) - self.mask = mw + self.mask.assign(mw) return mw def get_mask(self, weight): if self.is_pretraining: - self.mask = ops.ones(weight.shape) return self.mask weight_reshaped = ops.reshape(weight, self.softmax_shape) abs_weight_flat = ops.ravel(ops.abs(weight)) @@ -131,7 +139,7 @@ def get_mask(self, weight): softmax_result = ops.softmax(soft_input, axis=-1) _, mw = ops.unstack(softmax_result, axis=-1) mask = ops.reshape(mw, weight.shape) - self.mask = mask + self.mask.assign(mask) return mask def call(self, weight): @@ -160,10 +168,5 @@ def post_epoch_function(self, epoch, total_epochs): def get_config(self): config = super().get_config() - config.update( - { - "config": self.config.get_dict(), - "layer_type": self.layer_type, - } - ) + config.update({"config": self.config.get_dict(), "layer_type": self.layer_type, "mask": self.mask}) return config From f0e7da307d30eadc039bfca3a8a001aaa3900e32 Mon Sep 17 00:00:00 2001 From: Roope Niemi Date: Sun, 22 Feb 2026 22:01:04 +0100 Subject: [PATCH 19/20] add hpo_params to all configs, make hpo return best_trials in case of multiobjective optimization --- src/pquant/configs/config_ap.yaml | 11 ++++++++++- src/pquant/configs/config_autosparse.yaml | 9 +++++++++ src/pquant/configs/config_cs.yaml | 9 +++++++++ src/pquant/configs/config_fitcompress.yaml | 9 +++++++++ src/pquant/configs/config_mdmm.yaml | 9 +++++++++ src/pquant/configs/config_pdp.yaml | 9 +++++++++ src/pquant/configs/config_wanda.yaml | 9 +++++++++ src/pquant/core/hyperparameter_optimization.py | 6 ++++-- src/pquant/core/keras/layers.py | 2 +- src/pquant/core/torch/layers.py | 2 +- 10 files changed, 70 insertions(+), 5 deletions(-) diff --git a/src/pquant/configs/config_ap.yaml b/src/pquant/configs/config_ap.yaml index 00ffe9e..0c3cb22 100644 --- a/src/pquant/configs/config_ap.yaml +++ b/src/pquant/configs/config_ap.yaml @@ -1,5 +1,5 @@ pruning_parameters: - disable_pruning_for_layers: [] # Disable pruning for these layers, even if enable_pruning is true + disable_pruning_for_layers: [] enable_pruning: true pruning_method: activation_pruning threshold: 0.2 @@ -46,3 +46,12 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 +hpo_parameters: + experiment_name: experiment_name + model_name: jet_tagger + num_trials: 1 + sampler: + type: RandomSampler + hyperparameter_search: + numerical: {} + categorical: {} diff --git a/src/pquant/configs/config_autosparse.yaml b/src/pquant/configs/config_autosparse.yaml index b19e342..e5ce8ed 100644 --- a/src/pquant/configs/config_autosparse.yaml +++ b/src/pquant/configs/config_autosparse.yaml @@ -49,3 +49,12 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1.0 +hpo_parameters: + experiment_name: experiment_name + model_name: jet_tagger + num_trials: 1 + sampler: + type: RandomSampler + hyperparameter_search: + numerical: {} + categorical: {} diff --git a/src/pquant/configs/config_cs.yaml b/src/pquant/configs/config_cs.yaml index cde3178..88594c8 100644 --- a/src/pquant/configs/config_cs.yaml +++ b/src/pquant/configs/config_cs.yaml @@ -45,3 +45,12 @@ training_parameters: rewind: post-training-stage rounds: 3 save_weights_epoch: 2 +hpo_parameters: + experiment_name: experiment_name + model_name: jet_tagger + num_trials: 1 + sampler: + type: RandomSampler + hyperparameter_search: + numerical: {} + categorical: {} diff --git a/src/pquant/configs/config_fitcompress.yaml b/src/pquant/configs/config_fitcompress.yaml index 38fd5ba..cd8e502 100644 --- a/src/pquant/configs/config_fitcompress.yaml +++ b/src/pquant/configs/config_fitcompress.yaml @@ -44,3 +44,12 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 +hpo_parameters: + experiment_name: experiment_name + model_name: jet_tagger + num_trials: 1 + sampler: + type: RandomSampler + hyperparameter_search: + numerical: {} + categorical: {} diff --git a/src/pquant/configs/config_mdmm.yaml b/src/pquant/configs/config_mdmm.yaml index d793829..33e3587 100644 --- a/src/pquant/configs/config_mdmm.yaml +++ b/src/pquant/configs/config_mdmm.yaml @@ -55,6 +55,15 @@ fitcompress_parameters: greedy_astar : true approximate : true f_lambda : 1 +hpo_parameters: + experiment_name: experiment_name + model_name: jet_tagger + num_trials: 1 + sampler: + type: RandomSampler + hyperparameter_search: + numerical: {} + categorical: {} # Note: # use_grad: true is having some bug... flip gradient impl not working as intended diff --git a/src/pquant/configs/config_pdp.yaml b/src/pquant/configs/config_pdp.yaml index 9bf7c8e..937b5b9 100644 --- a/src/pquant/configs/config_pdp.yaml +++ b/src/pquant/configs/config_pdp.yaml @@ -46,3 +46,12 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 +hpo_parameters: + experiment_name: experiment_name + model_name: jet_tagger + num_trials: 1 + sampler: + type: RandomSampler + hyperparameter_search: + numerical: {} + categorical: {} diff --git a/src/pquant/configs/config_wanda.yaml b/src/pquant/configs/config_wanda.yaml index 6db406a..f128994 100644 --- a/src/pquant/configs/config_wanda.yaml +++ b/src/pquant/configs/config_wanda.yaml @@ -49,3 +49,12 @@ training_parameters: rewind: never rounds: 1 save_weights_epoch: -1 +hpo_parameters: + experiment_name: experiment_name + model_name: jet_tagger + num_trials: 1 + sampler: + type: RandomSampler + hyperparameter_search: + numerical: {} + categorical: {} diff --git a/src/pquant/core/hyperparameter_optimization.py b/src/pquant/core/hyperparameter_optimization.py index 682ff15..55a6077 100644 --- a/src/pquant/core/hyperparameter_optimization.py +++ b/src/pquant/core/hyperparameter_optimization.py @@ -343,8 +343,10 @@ def run_optimization(self, model, **kwargs): n_trials=num_trials, n_jobs=1, ) - - return study.best_params + if len(self.objectives.keys()) == 1: + return study.best_params + else: + return study.best_trials def ap_config(): diff --git a/src/pquant/core/keras/layers.py b/src/pquant/core/keras/layers.py index 5be2cb9..6a30fc5 100644 --- a/src/pquant/core/keras/layers.py +++ b/src/pquant/core/keras/layers.py @@ -2251,7 +2251,7 @@ def post_training_prune(model, config, calibration_data): return apply_final_compression(model, config) -def get_ebops(model): +def get_ebops(model, **kwargs): ebops = 0 for m in model.layers: if isinstance(m, (PQWeightBiasBase)): diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index 07ed7fc..70305c7 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -1733,7 +1733,7 @@ def post_training_prune(model, config, calibration_data): return remove_compression_layers(model, config) -def get_ebops(model): +def get_ebops(model, **kwargs): ebops = 0 for m in model.modules(): if isinstance(m, (PQWeightBiasBase)): From f623e3e1c3e20633cd5189a9de89b26759eccfc9 Mon Sep 17 00:00:00 2001 From: Roope Niemi Date: Mon, 23 Feb 2026 08:03:30 +0100 Subject: [PATCH 20/20] add missing add to cuda --- src/pquant/core/torch/layers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pquant/core/torch/layers.py b/src/pquant/core/torch/layers.py index 70305c7..114a912 100644 --- a/src/pquant/core/torch/layers.py +++ b/src/pquant/core/torch/layers.py @@ -608,6 +608,7 @@ def add_compression_layers(model, config, input_shape=None): model.to("cuda") if input_shape is not None: model(torch.rand(input_shape).to("cuda")) + model.to("cuda") return model