diff --git a/notebooks/analysis.ipynb b/notebooks/analysis.ipynb index 18527fa..cfa5c8d 100644 --- a/notebooks/analysis.ipynb +++ b/notebooks/analysis.ipynb @@ -2,159 +2,172 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Requirement already satisfied: matplotlib in /home/andrew/miniconda3/lib/python3.12/site-packages (3.9.2)\n", - "Requirement already satisfied: seaborn in /home/andrew/miniconda3/lib/python3.12/site-packages (0.13.2)\n", - "Requirement already satisfied: plotly in /home/andrew/miniconda3/lib/python3.12/site-packages (5.24.1)\n", - "Requirement already satisfied: streamlit in /home/andrew/miniconda3/lib/python3.12/site-packages (1.39.0)\n", - "Requirement already satisfied: contourpy>=1.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (1.3.0)\n", - "Requirement already satisfied: cycler>=0.10 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (0.12.1)\n", - "Requirement already satisfied: fonttools>=4.22.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (4.53.1)\n", - "Requirement already satisfied: kiwisolver>=1.3.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (1.4.7)\n", - "Requirement already satisfied: numpy>=1.23 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (1.26.4)\n", - "Requirement already satisfied: packaging>=20.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (23.2)\n", - "Requirement already satisfied: pillow>=8 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (10.3.0)\n", - "Requirement already satisfied: pyparsing>=2.3.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (3.1.4)\n", - "Requirement already satisfied: python-dateutil>=2.7 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (2.9.0)\n", - "Requirement already satisfied: pandas>=1.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from seaborn) (2.2.2)\n", - "Requirement already satisfied: tenacity>=6.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from plotly) (9.0.0)\n", - "Requirement already satisfied: altair<6,>=4.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (5.4.1)\n", - "Requirement already satisfied: blinker<2,>=1.0.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (1.8.2)\n", - "Requirement already satisfied: cachetools<6,>=4.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (5.5.0)\n", - "Requirement already satisfied: click<9,>=7.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (8.1.7)\n", - "Requirement already satisfied: protobuf<6,>=3.20 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (5.28.3)\n", - "Requirement already satisfied: pyarrow>=7.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (18.0.0)\n", - "Requirement already satisfied: requests<3,>=2.27 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (2.32.3)\n", - "Requirement already satisfied: rich<14,>=10.14.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (13.7.1)\n", - "Requirement already satisfied: toml<2,>=0.10.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (0.10.2)\n", - "Requirement already satisfied: typing-extensions<5,>=4.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (4.11.0)\n", - "Requirement already satisfied: gitpython!=3.1.19,<4,>=3.0.7 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (3.1.43)\n", - "Requirement already satisfied: pydeck<1,>=0.8.0b4 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (0.9.1)\n", - "Requirement already satisfied: tornado<7,>=6.0.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (6.4)\n", - "Requirement already satisfied: watchdog<6,>=2.1.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (5.0.3)\n", - "Requirement already satisfied: jinja2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from altair<6,>=4.0->streamlit) (3.1.3)\n", - "Requirement already satisfied: jsonschema>=3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from altair<6,>=4.0->streamlit) (4.22.0)\n", - "Requirement already satisfied: narwhals>=1.5.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from altair<6,>=4.0->streamlit) (1.11.1)\n", - "Requirement already satisfied: gitdb<5,>=4.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from gitpython!=3.1.19,<4,>=3.0.7->streamlit) (4.0.11)\n", - "Requirement already satisfied: pytz>=2020.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.2->seaborn) (2024.1)\n", - "Requirement already satisfied: tzdata>=2022.7 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.2->seaborn) (2024.1)\n", - "Requirement already satisfied: six>=1.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from python-dateutil>=2.7->matplotlib) (1.16.0)\n", - "Requirement already satisfied: charset-normalizer<4,>=2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests<3,>=2.27->streamlit) (2.0.4)\n", - "Requirement already satisfied: idna<4,>=2.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests<3,>=2.27->streamlit) (3.4)\n", - "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests<3,>=2.27->streamlit) (2.1.0)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests<3,>=2.27->streamlit) (2024.2.2)\n", - "Requirement already satisfied: markdown-it-py>=2.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from rich<14,>=10.14.0->streamlit) (3.0.0)\n", - "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from rich<14,>=10.14.0->streamlit) (2.18.0)\n", - "Requirement already satisfied: smmap<6,>=3.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.19,<4,>=3.0.7->streamlit) (5.0.1)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jinja2->altair<6,>=4.0->streamlit) (2.1.5)\n", - "Requirement already satisfied: attrs>=22.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (23.2.0)\n", - "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (2023.12.1)\n", - "Requirement already satisfied: referencing>=0.28.4 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (0.35.1)\n", - "Requirement already satisfied: rpds-py>=0.7.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (0.18.1)\n", - "Requirement already satisfied: mdurl~=0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from markdown-it-py>=2.2.0->rich<14,>=10.14.0->streamlit) (0.1.2)\n", + "Requirement already satisfied: matplotlib in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (3.10.0)\n", + "Requirement already satisfied: seaborn in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (0.13.2)\n", + "Requirement already satisfied: plotly in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (5.24.1)\n", + "Requirement already satisfied: streamlit in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (1.42.0)\n", + "Requirement already satisfied: contourpy>=1.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (1.3.1)\n", + "Requirement already satisfied: cycler>=0.10 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (0.12.1)\n", + "Requirement already satisfied: fonttools>=4.22.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (4.56.0)\n", + "Requirement already satisfied: kiwisolver>=1.3.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (1.4.8)\n", + "Requirement already satisfied: numpy>=1.23 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (1.26.4)\n", + "Requirement already satisfied: packaging>=20.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (24.2)\n", + "Requirement already satisfied: pillow>=8 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (11.1.0)\n", + "Requirement already satisfied: pyparsing>=2.3.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (3.2.1)\n", + "Requirement already satisfied: python-dateutil>=2.7 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (2.9.0.post0)\n", + "Requirement already satisfied: pandas>=1.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from seaborn) (2.2.3)\n", + "Requirement already satisfied: tenacity>=6.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from plotly) (9.0.0)\n", + "Requirement already satisfied: altair<6,>=4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (5.5.0)\n", + "Requirement already satisfied: blinker<2,>=1.0.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (1.9.0)\n", + "Requirement already satisfied: cachetools<6,>=4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (5.5.1)\n", + "Requirement already satisfied: click<9,>=7.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (8.1.8)\n", + "Requirement already satisfied: protobuf<6,>=3.20 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (5.29.3)\n", + "Requirement already satisfied: pyarrow>=7.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (19.0.0)\n", + "Requirement already satisfied: requests<3,>=2.27 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (2.32.3)\n", + "Requirement already satisfied: rich<14,>=10.14.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (13.9.4)\n", + "Requirement already satisfied: toml<2,>=0.10.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (0.10.2)\n", + "Requirement already satisfied: typing-extensions<5,>=4.4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (4.12.2)\n", + "Requirement already satisfied: gitpython!=3.1.19,<4,>=3.0.7 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (3.1.44)\n", + "Requirement already satisfied: pydeck<1,>=0.8.0b4 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (0.9.1)\n", + "Requirement already satisfied: tornado<7,>=6.0.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (6.4.2)\n", + "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from altair<6,>=4.0->streamlit) (3.1.5)\n", + "Requirement already satisfied: jsonschema>=3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from altair<6,>=4.0->streamlit) (4.23.0)\n", + "Requirement already satisfied: narwhals>=1.14.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from altair<6,>=4.0->streamlit) (1.27.0)\n", + "Requirement already satisfied: gitdb<5,>=4.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from gitpython!=3.1.19,<4,>=3.0.7->streamlit) (4.0.12)\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.2->seaborn) (2025.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.2->seaborn) (2025.1)\n", + "Requirement already satisfied: six>=1.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from python-dateutil>=2.7->matplotlib) (1.17.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests<3,>=2.27->streamlit) (3.4.1)\n", + "Requirement already satisfied: idna<4,>=2.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests<3,>=2.27->streamlit) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests<3,>=2.27->streamlit) (2.3.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests<3,>=2.27->streamlit) (2025.1.31)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from rich<14,>=10.14.0->streamlit) (3.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from rich<14,>=10.14.0->streamlit) (2.19.1)\n", + "Requirement already satisfied: smmap<6,>=3.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.19,<4,>=3.0.7->streamlit) (5.0.2)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jinja2->altair<6,>=4.0->streamlit) (3.0.2)\n", + "Requirement already satisfied: attrs>=22.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (25.1.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (2024.10.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (0.36.2)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (0.22.3)\n", + "Requirement already satisfied: mdurl~=0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich<14,>=10.14.0->streamlit) (0.1.2)\n", "Note: you may need to restart the kernel to use updated packages.\n", "Looking in indexes: https://download.pytorch.org/whl/cpu\n", - "Requirement already satisfied: torch in /home/andrew/miniconda3/lib/python3.12/site-packages (2.5.0+cpu)\n", - "Requirement already satisfied: torchvision in /home/andrew/miniconda3/lib/python3.12/site-packages (0.20.0+cpu)\n", - "Requirement already satisfied: torchaudio in /home/andrew/miniconda3/lib/python3.12/site-packages (2.5.0+cpu)\n", - "Requirement already satisfied: filelock in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (3.13.1)\n", - "Requirement already satisfied: typing-extensions>=4.8.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (4.11.0)\n", - "Requirement already satisfied: networkx in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (3.2.1)\n", - "Requirement already satisfied: jinja2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (3.1.3)\n", - "Requirement already satisfied: fsspec in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (2024.2.0)\n", - "Requirement already satisfied: setuptools in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (68.2.2)\n", - "Requirement already satisfied: sympy==1.13.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (1.13.1)\n", - "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from sympy==1.13.1->torch) (1.3.0)\n", - "Requirement already satisfied: numpy in /home/andrew/miniconda3/lib/python3.12/site-packages (from torchvision) (1.26.4)\n", - "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torchvision) (10.3.0)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jinja2->torch) (2.1.5)\n", + "Requirement already satisfied: torch in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (2.4.1)\n", + "Requirement already satisfied: torchvision in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (0.21.0)\n", + "Requirement already satisfied: torchaudio in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (2.6.0)\n", + "Requirement already satisfied: filelock in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.17.0)\n", + "Requirement already satisfied: typing-extensions>=4.8.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (4.12.2)\n", + "Requirement already satisfied: sympy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (1.13.1)\n", + "Requirement already satisfied: networkx in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.4.2)\n", + "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.1.5)\n", + "Requirement already satisfied: fsspec in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (2024.6.1)\n", + "Requirement already satisfied: numpy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torchvision) (1.26.4)\n", + "\u001b[33mWARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'SSLError(SSLCertVerificationError('“pytorch.org” certificate is expired'))': /whl/cpu/torch/\u001b[0m\u001b[33m\n", + "\u001b[0mCollecting torch\n", + " Using cached https://download.pytorch.org/whl/cpu/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl.metadata (28 kB)\n", + "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torchvision) (11.1.0)\n", + "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from sympy->torch) (1.3.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jinja2->torch) (3.0.2)\n", + "Using cached https://download.pytorch.org/whl/cpu/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl (66.5 MB)\n", + "Installing collected packages: torch\n", + " Attempting uninstall: torch\n", + " Found existing installation: torch 2.4.1\n", + " Uninstalling torch-2.4.1:\n", + " Successfully uninstalled torch-2.4.1\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "transformer-lens 2.14.0 requires torch<2.5,>=2.2, but you have torch 2.6.0 which is incompatible.\u001b[0m\u001b[31m\n", + "\u001b[0mSuccessfully installed torch-2.6.0\n", "Note: you may need to restart the kernel to use updated packages.\n", - "Requirement already satisfied: jaxtyping in /home/andrew/miniconda3/lib/python3.12/site-packages (0.2.34)\n", - "Requirement already satisfied: transformer_lens in /home/andrew/miniconda3/lib/python3.12/site-packages (2.8.1)\n", - "Requirement already satisfied: typeguard==2.13.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jaxtyping) (2.13.3)\n", - "Requirement already satisfied: accelerate>=0.23.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (1.0.1)\n", - "Requirement already satisfied: beartype<0.15.0,>=0.14.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.14.1)\n", - "Requirement already satisfied: better-abc<0.0.4,>=0.0.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.0.3)\n", - "Requirement already satisfied: datasets>=2.7.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (3.0.2)\n", - "Requirement already satisfied: einops>=0.6.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.8.0)\n", - "Requirement already satisfied: fancy-einsum>=0.0.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.0.3)\n", - "Requirement already satisfied: numpy>=1.26 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (1.26.4)\n", - "Requirement already satisfied: pandas>=1.1.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (2.2.2)\n", - "Requirement already satisfied: rich>=12.6.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (13.7.1)\n", - "Requirement already satisfied: sentencepiece in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.2.0)\n", - "Requirement already satisfied: torch>=1.10 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (2.5.0+cpu)\n", - "Requirement already satisfied: tqdm>=4.64.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (4.66.6)\n", - "Requirement already satisfied: transformers>=4.37.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (4.46.0)\n", - "Requirement already satisfied: typing-extensions in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (4.11.0)\n", - "Requirement already satisfied: wandb>=0.13.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.18.5)\n", - "Requirement already satisfied: packaging>=20.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (23.2)\n", - "Requirement already satisfied: psutil in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (5.9.8)\n", - "Requirement already satisfied: pyyaml in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (6.0.1)\n", - "Requirement already satisfied: huggingface-hub>=0.21.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (0.26.2)\n", - "Requirement already satisfied: safetensors>=0.4.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (0.4.5)\n", - "Requirement already satisfied: filelock in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.13.1)\n", - "Requirement already satisfied: pyarrow>=15.0.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (18.0.0)\n", - "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (0.3.8)\n", - "Requirement already satisfied: requests>=2.32.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (2.32.3)\n", - "Requirement already satisfied: xxhash in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.5.0)\n", - "Requirement already satisfied: multiprocess<0.70.17 in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (0.70.16)\n", - "Requirement already satisfied: fsspec<=2024.9.0,>=2023.1.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from fsspec[http]<=2024.9.0,>=2023.1.0->datasets>=2.7.1->transformer_lens) (2024.2.0)\n", - "Requirement already satisfied: aiohttp in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.10.10)\n", - "Requirement already satisfied: python-dateutil>=2.8.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2.9.0)\n", - "Requirement already satisfied: pytz>=2020.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2024.1)\n", - "Requirement already satisfied: tzdata>=2022.7 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2024.1)\n", - "Requirement already satisfied: markdown-it-py>=2.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from rich>=12.6.0->transformer_lens) (3.0.0)\n", - "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from rich>=12.6.0->transformer_lens) (2.18.0)\n", - "Requirement already satisfied: networkx in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch>=1.10->transformer_lens) (3.2.1)\n", - "Requirement already satisfied: jinja2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch>=1.10->transformer_lens) (3.1.3)\n", - "Requirement already satisfied: setuptools in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch>=1.10->transformer_lens) (68.2.2)\n", - "Requirement already satisfied: sympy==1.13.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch>=1.10->transformer_lens) (1.13.1)\n", - "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from sympy==1.13.1->torch>=1.10->transformer_lens) (1.3.0)\n", - "Requirement already satisfied: regex!=2019.12.17 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformers>=4.37.2->transformer_lens) (2024.4.28)\n", - "Requirement already satisfied: tokenizers<0.21,>=0.20 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformers>=4.37.2->transformer_lens) (0.20.1)\n", - "Requirement already satisfied: click!=8.0.0,>=7.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (8.1.7)\n", - "Requirement already satisfied: docker-pycreds>=0.4.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (0.4.0)\n", - "Requirement already satisfied: gitpython!=3.1.29,>=1.0.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (3.1.43)\n", - "Requirement already satisfied: platformdirs in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (3.10.0)\n", - "Requirement already satisfied: protobuf!=4.21.0,!=5.28.0,<6,>=3.19.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (5.28.3)\n", - "Requirement already satisfied: sentry-sdk>=2.0.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (2.17.0)\n", - "Requirement already satisfied: setproctitle in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (1.3.3)\n", - "Requirement already satisfied: six>=1.4.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from docker-pycreds>=0.4.0->wandb>=0.13.5->transformer_lens) (1.16.0)\n", - "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (2.4.3)\n", - "Requirement already satisfied: aiosignal>=1.1.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.3.1)\n", - "Requirement already satisfied: attrs>=17.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (23.2.0)\n", - "Requirement already satisfied: frozenlist>=1.1.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.5.0)\n", - "Requirement already satisfied: multidict<7.0,>=4.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (6.1.0)\n", - "Requirement already satisfied: yarl<2.0,>=1.12.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.16.0)\n", - "Requirement already satisfied: gitdb<5,>=4.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (4.0.11)\n", - "Requirement already satisfied: mdurl~=0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from markdown-it-py>=2.2.0->rich>=12.6.0->transformer_lens) (0.1.2)\n", - "Requirement already satisfied: charset-normalizer<4,>=2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2.0.4)\n", - "Requirement already satisfied: idna<4,>=2.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.4)\n", - "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2.1.0)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2024.2.2)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jinja2->torch>=1.10->transformer_lens) (2.1.5)\n", - "Requirement already satisfied: smmap<6,>=3.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (5.0.1)\n", - "Requirement already satisfied: propcache>=0.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from yarl<2.0,>=1.12.0->aiohttp->datasets>=2.7.1->transformer_lens) (0.2.0)\n", + "Requirement already satisfied: jaxtyping in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (0.2.38)\n", + "Requirement already satisfied: transformer_lens in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (2.14.0)\n", + "Requirement already satisfied: wadler-lindig>=0.1.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jaxtyping) (0.1.3)\n", + "Requirement already satisfied: accelerate>=0.23.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (1.3.0)\n", + "Requirement already satisfied: beartype<0.15.0,>=0.14.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.14.1)\n", + "Requirement already satisfied: better-abc<0.0.4,>=0.0.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.0.3)\n", + "Requirement already satisfied: datasets>=2.7.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (2.21.0)\n", + "Requirement already satisfied: einops>=0.6.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.8.1)\n", + "Requirement already satisfied: fancy-einsum>=0.0.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.0.3)\n", + "Requirement already satisfied: numpy>=1.24 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (1.26.4)\n", + "Requirement already satisfied: pandas>=1.1.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (2.2.3)\n", + "Requirement already satisfied: rich>=12.6.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (13.9.4)\n", + "Requirement already satisfied: sentencepiece in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.2.0)\n", + "Collecting torch<2.5,>=2.2 (from transformer_lens)\n", + " Using cached torch-2.4.1-cp311-none-macosx_11_0_arm64.whl.metadata (26 kB)\n", + "Requirement already satisfied: tqdm>=4.64.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (4.67.1)\n", + "Requirement already satisfied: transformers>=4.43 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (4.48.3)\n", + "Requirement already satisfied: transformers-stream-generator<0.0.6,>=0.0.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.0.5)\n", + "Requirement already satisfied: typeguard<5.0,>=4.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (4.4.2)\n", + "Requirement already satisfied: typing-extensions in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (4.12.2)\n", + "Requirement already satisfied: wandb>=0.13.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.19.6)\n", + "Requirement already satisfied: packaging>=20.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (24.2)\n", + "Requirement already satisfied: psutil in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (6.1.1)\n", + "Requirement already satisfied: pyyaml in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (6.0.2)\n", + "Requirement already satisfied: huggingface-hub>=0.21.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (0.28.1)\n", + "Requirement already satisfied: safetensors>=0.4.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (0.4.5)\n", + "Requirement already satisfied: filelock in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (3.17.0)\n", + "Requirement already satisfied: pyarrow>=15.0.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (19.0.0)\n", + "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (0.3.8)\n", + "Requirement already satisfied: requests>=2.32.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (2.32.3)\n", + "Requirement already satisfied: xxhash in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (3.5.0)\n", + "Requirement already satisfied: multiprocess in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (0.70.16)\n", + "Requirement already satisfied: fsspec<=2024.6.1,>=2023.1.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from fsspec[http]<=2024.6.1,>=2023.1.0->datasets>=2.7.1->transformer_lens) (2024.6.1)\n", + "Requirement already satisfied: aiohttp in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (3.11.12)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.1.5->transformer_lens) (2.9.0.post0)\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.1.5->transformer_lens) (2025.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.1.5->transformer_lens) (2025.1)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from rich>=12.6.0->transformer_lens) (3.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from rich>=12.6.0->transformer_lens) (2.19.1)\n", + "Requirement already satisfied: sympy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch<2.5,>=2.2->transformer_lens) (1.13.1)\n", + "Requirement already satisfied: networkx in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch<2.5,>=2.2->transformer_lens) (3.4.2)\n", + "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch<2.5,>=2.2->transformer_lens) (3.1.5)\n", + "Requirement already satisfied: regex!=2019.12.17 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformers>=4.43->transformer_lens) (2024.11.6)\n", + "Requirement already satisfied: tokenizers<0.22,>=0.21 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformers>=4.43->transformer_lens) (0.21.0)\n", + "Requirement already satisfied: click!=8.0.0,>=7.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (8.1.8)\n", + "Requirement already satisfied: docker-pycreds>=0.4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (0.4.0)\n", + "Requirement already satisfied: gitpython!=3.1.29,>=1.0.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (3.1.44)\n", + "Requirement already satisfied: platformdirs in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (4.3.6)\n", + "Requirement already satisfied: protobuf!=4.21.0,!=5.28.0,<6,>=3.19.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (5.29.3)\n", + "Requirement already satisfied: pydantic<3,>=2.6 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (2.10.6)\n", + "Requirement already satisfied: sentry-sdk>=2.0.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (2.21.0)\n", + "Requirement already satisfied: setproctitle in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (1.3.4)\n", + "Requirement already satisfied: setuptools in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (75.8.0)\n", + "Requirement already satisfied: six>=1.4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from docker-pycreds>=0.4.0->wandb>=0.13.5->transformer_lens) (1.17.0)\n", + "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (2.4.6)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.3.2)\n", + "Requirement already satisfied: attrs>=17.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (25.1.0)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.5.0)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (6.1.0)\n", + "Requirement already satisfied: propcache>=0.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (0.2.1)\n", + "Requirement already satisfied: yarl<2.0,>=1.17.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.18.3)\n", + "Requirement already satisfied: gitdb<5,>=4.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (4.0.12)\n", + "Requirement already satisfied: mdurl~=0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich>=12.6.0->transformer_lens) (0.1.2)\n", + "Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pydantic<3,>=2.6->wandb>=0.13.5->transformer_lens) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.27.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pydantic<3,>=2.6->wandb>=0.13.5->transformer_lens) (2.27.2)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.4.1)\n", + "Requirement already satisfied: idna<4,>=2.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2.3.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2025.1.31)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jinja2->torch<2.5,>=2.2->transformer_lens) (3.0.2)\n", + "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from sympy->torch<2.5,>=2.2->transformer_lens) (1.3.0)\n", + "Requirement already satisfied: smmap<6,>=3.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (5.0.2)\n", + "Using cached torch-2.4.1-cp311-none-macosx_11_0_arm64.whl (62.1 MB)\n", + "Installing collected packages: torch\n", + " Attempting uninstall: torch\n", + " Found existing installation: torch 2.6.0\n", + " Uninstalling torch-2.6.0:\n", + " Successfully uninstalled torch-2.6.0\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "torchaudio 2.6.0 requires torch==2.6.0, but you have torch 2.4.1 which is incompatible.\n", + "torchvision 0.21.0 requires torch==2.6.0, but you have torch 2.4.1 which is incompatible.\u001b[0m\u001b[31m\n", + "\u001b[0mSuccessfully installed torch-2.4.1\n", "Note: you may need to restart the kernel to use updated packages.\n", - "Processing /home/andrew/Documents/Code/MechIR\n", - " Preparing metadata (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25hBuilding wheels for collected packages: mechir\n", - " Building wheel for mechir (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for mechir: filename=mechir-0.0.1-py3-none-any.whl size=76527 sha256=549ec71593e7476a5bcecf264dc4a238176babaf5b1c3472e53fab84a2df16e9\n", - " Stored in directory: /tmp/pip-ephem-wheel-cache-tq1ty5ep/wheels/16/4d/fd/e2f041bb0629a1af518d10cab4601f84986b0213a8a30041cf\n", - "Successfully built mechir\n", - "Installing collected packages: mechir\n", - " Attempting uninstall: mechir\n", - " Found existing installation: mechir 0.0.1\n", - " Uninstalling mechir-0.0.1:\n", - " Successfully uninstalled mechir-0.0.1\n", - "Successfully installed mechir-0.0.1\n", "Note: you may need to restart the kernel to use updated packages.\n" ] } @@ -168,7 +181,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ @@ -181,7 +194,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -190,7 +203,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -201,7 +214,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ @@ -252,7 +265,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 19, "metadata": {}, "outputs": [], "source": [ @@ -331,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ @@ -354,17 +367,17 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 21, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAB64AAAI2CAYAAADgnaZqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAADNp0lEQVR4nOzdeXxU1f3/8fdM9pAECIuBAGEHpQgEQZF9kUUpIoK7giiJWwW0KlARFbSVCtXSbzURF6QVy6oobqyyVEQCiIqsAgEMICQEsi9zf3/wy5SQBGYyE84kvJ6Pxzwgc8+5n89MMnPnM+eec22WZVkCAAAAAAAAAAAAAMAQu+kEAAAAAAAAAAAAAACXNwauAQAAAAAAAAAAAABGMXANAAAAAAAAAAAAADCKgWsAAAAAAAAAAAAAgFEMXAMAAAAAAAAAAAAAjGLgGgAAAAAAAAAAAABgFAPXAAAAAAAAAAAAAACjGLgGAAAAAAAAAAAAABjFwDUAAAAAAAAAAAAAwCgGrlGpvPfee7LZbGrcuLHpVKqU559/XjabTb169TKdCgAAlZLNZpPNZtOaNWtMpwIAVQK1X8Wg9gMAwDPUfgBQsRi4xiVTVCCXdgsNDVWLFi00cuRI/fe//62wHA4cOFBmDqXd3nvvvQrLBRe3Zs0at35f598OHDgg6X9fernTpzTHjx/XX/7yF91www1q0KCBQkJCVK1aNTVu3FhDhw5VYmKiTp06VaJfVlaWPv/8c02bNk3Dhg1TTEyMM97zzz9fIc8dAPcUFhZq/vz5uu+++9SyZUvVqFFDgYGBqlu3rrp166aJEyfqxx9/NJ2mTxg1apTL76kMNpjXq1evch9HR40a5dxP48aN3e5Tms8//1xxcXFq06aNIiMjFRAQoFq1aqlz584aN26cvv3221L77dy5U++8844effRRdenSRaGhoc6YAHwPtR/cVdG1X1hYmOrXr69rrrlGDz74oObMmaPMzEy3cty/f7+mTJmi7t27q379+goKClJ4eLhatGih22+/Xf/+97+VlZVVAc8OAG+i9nMdtV/lUpG1X1Hd1qxZMw0ePFjPPfectm3b5lZ+vPYAuMrfdAK4PF1xxRXO/zscDqWmpmrv3r3au3ev3n//fU2ZMqXUAb3q1aurVatWio6O9jiHiIgIhYSEXLDNxbajYgUGBhb7WzlXamqq8vPzFRAQoMjIyFLb+Pn5lbivdu3apd5/oT6WZenPf/6zXnrppWJfRISFhclms+ngwYM6ePCgPv74Yz399NOaOXOmRo8e7Wy3adMm3XjjjWXGBGDWxo0bNXLkSO3evdt5X0BAgMLDw3Xy5Elt2LBBGzZs0F/+8hcNGzZM8+bNU2BgoMGMfYPdbledOnUu2OZi21HxIiMjSz2W5uXlKS0tTZJUs2bNUv+mq1evXuK+4ODgUu+/UB9J2r17t+6++25t3rzZeZ+fn5+qV6+u9PR0fffdd/ruu+/0+uuvq3fv3po/f75q167tbPvQQw/p66+/LvuBAvBZ1H5wRUXXfnl5eTp27JhSUlKUlJSkt99+W4899pieeuopTZo0Sf7+ZX89lp+fr6eeekr/93//p4KCAuf91atXV35+vvPvef78+YqKitLs2bN10003ufPwAVwi1H7lQ+1XOVRk7WdZlk6fPq3U1FT98ssvWrZsmaZOnarrrrtOCQkJuvrqqy+YG689AG6xgEtkypQpliSrtD+7goICa/369VbHjh2dbTZs2OD1HPbv3+/c/7vvvuv1/VdWRb+bnj17mk7FZT179nQ553fffdf5e9+/f79bcRwOh3X33Xc7+1977bXWokWLrLS0NGeb9PR0a8mSJdbvf/97S5J18803F9vH6tWrrZo1a1p9+/a1nnrqKWvevHlWVFSUJcmaMmWKW/kA8K6lS5daQUFBliSrVq1a1p///Gdr9+7dzu0FBQXWd999Z02YMMGKiIiwJBV7/V+ORo4caUmyYmJiTKfiU4qOE6tXrzadiktWr17tVs4xMTGWJGvkyJFux9q0aZNVo0YNS5JVrVo1a+LEidb3339vORwOy7Isq7Cw0Prpp5+sl156ybriiissSdbWrVuL7aNv377WVVddZd1zzz3WzJkzrSeeeKLMz5UAzKP2812Xc+3ncDisnTt3Wm+++abVtm1bZ7tevXpZOTk5pe4vNzfX6tWrl7PtwIEDrc8//9zKzMx0tvntt9+sDz74wOrevbslyRo7dqwHjxZARaH2cx+1X+ku59ovPT3dWrVqlTVmzBjn6ykwMND65JNPytwfrz0A7mLGNXyCn5+funbtqo8++kgNGzaUJH388ce6/vrrDWeGy9306dP173//W5I0btw4zZw5s8SypBERERo6dKiGDh2qtWvXav78+cW2d+/eXampqcXumzBhQsUmDuCi9uzZo3vuuUe5ubm66qqr9OWXX6pBgwbF2vj5+emaa67RNddco6eeeqrYagoALu7kyZMaNmyYTp06pfr16+urr75SmzZtirWx2+266qqrdNVVV2ncuHEaP358iWPtl19+WWw2HUv6ApUXtR9MsdlsatWqlVq1aqUHH3zQuVrWmjVr9PjjjyshIaFEn7FjxzqvYTpz5kyNHz++RJvatWvrzjvv1J133qlFixaxzCngg6j9AO+IiIhQ79691bt3bz322GMaPHiwDh06pDvuuENbt25VixYtirXntQegPLjGNXxKgwYNVKtWLUlSRkZGie1F16sydd2Uomu19erVS5K0cuVK3XTTTapTp46Cg4N15ZVX6oUXXlBOTs4F93Py5Em9+OKLuvbaaxUZGang4GA1btxY/fv31xtvvKH09PRS+y1evFiDBw/WFVdc4VxKbfDgwVqyZMlFc//88891ww03qEaNGgoLC1O7du00ffp05efnu/TYDxw4oHHjxqlNmzYKCwtTaGioWrdurbFjxyo5ObnUPuf/vlavXq2hQ4eqXr168vPzu+h1ME07ceKEpk6dKknq27dvqYPW5+vRo4f+/ve/F7vvQkuTAzDn2Wef1enTpxUcHKwlS5aUKJ7OFxkZqY8++qjYElruvs/t27dPDz/8sFq0aKGQkBBFREQoNjZWL774ok6fPl1m7MOHD2v8+PFq06aNqlWrpqCgINWvX18dO3bU+PHj9d1335Xok5aWpueee06xsbGKiIhQYGCgoqKidPXVV+uhhx7SypUrXX+yvKjoGllr1qzRmTNn9Oyzz6p169YKCQlRrVq1NHjw4DKvc3yur776SnfccYdiYmIUEhKiyMhIXX311frDH/6gb775ptQ+R48e1VNPPeV8HqtVq6Y2bdro6aef1rFjxy4YLy0tTU899ZSaNWum4OBg1atXTyNGjFBSUpJLj9vhcOjf//63brzxRudxvE6dOurfv7/mzZsny7JK7Vd0fbH33ntPGRkZeu6559S2bVuFh4cXu6anr5o+fboOHz4sSZo3b16JQevzhYaGKiEhQW3bti12P8dSoOqh9qP2M8nPz08zZsxwLun99ttvF1u+VJJ27NjhHMwePXp0qYPW57v11ls1efJk7ycMwCPUftR+1H7ed/XVV2vhwoWy2WzKzMzUiy++WKKNN157AC5Dpqd84/JxoeXiihw+fNjZ5vXXXy+xvWjZr/IuUePpcnHnLqs2ffp0y2azWTabzapRo4Zls9mc++7du7dVUFBQ6j6+/PJLq2bNms62/v7+Vq1atayAgADnfUuWLCnWJzc317r99tud2+12u1WzZk3Lbrc777vzzjutvLy8C+ZddKtRo4bl7+9vSbJ69OhhTZw48YJLr/3rX/9yLukiyQoKCrJCQkKcP4eHh1tffvlliX7n/r5ee+0153NUvXp1KyAgoFzLjRa5FEuFT58+3dlv3bp15c61NEXL7rBUOGDG0aNHne+hDzzwQLn348773H/+859i76Xh4eHFfm7YsKG1Y8eOEjG2bdtW7Ljh5+dn1axZs9hx5/z300OHDlmNGjUqcdzw8/Nz3lfeJUI9XS6uKP4HH3xgNW/e3JJkBQcHW6Ghoc5tgYGBpR5XLMuyMjMzrREjRhQ7roWHh1vVq1d3/tyuXbsS/dasWeNcrlr/f8nqatWqOX+uWbNmme/1+/fvd75vF+VXtIRZYGCg9fHHH19w6bWTJ09aPXr0KJbzuflKsoYMGWLl5uaW6FsU99VXX7VatmzpjFn0WNy9BEaRS7FUeH5+vvNx9u3bt1x5luXcYzsA30Ptdxa1n+/Xft99952z/eTJk4tte+SRR5yfvQ4cOFDO7AGYRu1H7UftV7G130033eT8zJCVleW831uvPQCXH2ZcwycUFhbqm2++0S233CJJqlu3ru677z7DWZXt+++/14QJEzRhwgQdP35caWlpOnXqlJ577jlJZ8+6nDNnTol+W7du1c0336y0tDS1adNGn332mbKysnTixAllZ2dr8+bNevLJJxUeHl6s36RJk/Sf//xHNptNkydP1smTJ5WamqoTJ05o0qRJks7OYirtzO6lS5fqhRdekCSNGDFCycnJSktL0+nTp/V///d/2rhxo954440yH+vy5ct13333qbCwUE8//bT279+v7OxsZWZmaufOnRoxYoTOnDnj3Hdpjh07pieffFIjR45UcnKyTp06pezsbJ8/E73ojNQ6deqoW7duhrMB4E2rV6+Ww+GQJOexxxMXe5/bsmWLc3msrl27avv27Tp9+rSysrK0dOlS1atXT4cOHdLvf//7ErPOnnzySaWlpSk2NlbffPON8vPzlZqaqpycHO3evVuvvvpqiVmszz//vJKTk9W4cWOtWLFCeXl5Sk1NVW5urg4cOKA33nhD1113nceP2xOPPvqoAgMDtWrVKmVmZiojI0ObNm1Sq1atlJeXp7i4OOfv6Fz333+/FixYILvdrmeeeUaHDh3S6dOnderUKf3222/697//rS5duhTrc+jQIQ0dOlSnTp3SVVddpfXr1ysjI0MZGRlau3atWrVqpbS0NN188806cuRIsb6FhYUaMWKEDh48qJo1a2r+/PnKzMxUenq6fvrpJ1177bUaOXJkmY+zsLBQw4YN09q1a9W+fXt98sknyszM1KlTp5SRkaE5c+aobt26Wrp0qZ555pky9/P888/r9OnTWrJkiTIyMpSWlqZDhw6pbt26bj7zl87mzZudMwm98ToDUDVQ+1H7+ZJrrrnGeSz9+uuvi20rqgc7dOigmJiYS54bAO+g9qP2o/arWEWrl+Tm5mrjxo3O+7392gNwGTE9co7Lx7lnfl9xxRXOW506dZxnAUZERFh33313mWcze/Os+4iIiGJ5lHa70GMoa6bssGHDLElWv379Smzr1q2bJclq0aKFderUKZdyPnz4sPMM+YkTJ5ba5oknnrAkWQEBAdavv/5abNtVV13lPLuysLCwRN8333yzzDMwCwsLrRYtWliSrISEhDJzHDJkiCXJGjt2bLH7zz3bfdiwYS49XleV96z72rVrl/n7/vzzz4v1a9CggSXJuuGGG7yau2Ux4xow7dlnn3W+Lxw5cqTc+3H1fW7gwIGWJKt58+ZWZmZmie1btmxxvtf/9a9/LbataJbTf//7X5fzuvLKK51ntntb0Vn3drv9osfR8x+LZf3vrPs6depYx44dK7F9+/btzjbr168vtm3FihXObf/85z9dzvmhhx5ynlmfkpJSYvuhQ4ecZ9E/+uijxbb95z//ccZcsWJFib6ZmZlWs2bNyjyD/f3337ckWa1bty7z2L9582bLZrNZgYGBJZ6TouOFn5+ftWXLFpcf88WU96z74ODgMn/f58+qnj17tjPGhg0bvJa7ZTHjGvB11H7Uft5U0att3XDDDZYkKzo62nlffn6+c4bjmDFjypk5AF9A7Vd+1H7FXW61n6szrjds2ODc/1tvveW831uvPQCXH2Zcw4hjx445b7/99psKCwslSVlZWUpPT7/otUa84fTp08XyKO1WlqCgIP3xj38sddvNN98sSdq+fXux+/fs2aP169dLkl5++WWXr9WxaNEiFRQUKDg4WBMmTCi1zbPPPqugoCDl5+dr4cKFzvu3b9+uHTt2ONvY7SVf8mPGjFF0dHSp+127dq327Nmj2rVr68EHHywzx6IZEl9++WWZbSZOnFjmtkvpxIkTZf6+z78+3cmTJyWdvb4KgKql6PUtee81Xtb73KlTp5zvj0899ZRCQ0NLtOnQoYOGDRsm6ewsqnPVqFFDkpSSkuJyLuXp4y6Hw3HR42hp1ywtEhcXV+oZ423btlWTJk0klTyWvvPOO5Kk3/3ud3r44YddytOyLM2fP1+S9NBDDykqKqpEmwYNGuihhx6SJH344YfFthX93LVrV/Xt27dE39DQUD399NNlxn/77bclSQ8//HCZx/6OHTuqTZs2ysvL0+rVq0ttM3DgQHXo0KHMOJdKTk5Omb/vEydOFGtbEa8zAJUPtR+1n68rOkalpqY670tNTXVeg5RjGFC5Uft5jtrvrMut9nPVua+rc4+l1IMAyouBaxhhWVaxW3Z2trZu3aqRI0fq008/VY8ePfTRRx9VaA7vvvtuiTzOv5WlTZs2CgsLK3Vb/fr1JRU/UEvSf//7X0mSn5+fBg0a5HKemzdvliR16tRJERERpbapWbOmrrnmmmLtz/2/v7+/unfvXmpfu92uXr16lbptw4YNkqT09HTVr19fUVFRpd7GjBkjSTp48GCp+wkJCVFsbOxFHumlsX///jJ/30OHDjWdHoBK6kLvc1u2bHEeU/r161fmPm644QZJZwv2/Px85/2DBw+WJI0cOVJPPvmkvv76a2VlZV0wn6I+EyZMUFxcnL744gudPn3a9QfkgpiYmIseR59//vky+1977bVlbrvYsbTo8bli//79zv248vyfPHlS+/fvd95fdCzt06dPmX3L2lZYWOhcKu35558v8zgaFRWlXbt2SSr7WNq1a9cy419KI0eOLPP3vW3bNtPpAfBB1H7UfgBQlVD7UfuVpirWfgBgCgPX8AnBwcFq3769Zs+erVtuuUW5ubkaNWqUyx+0Xn311TI/DBw6dMjr+Z5/HbJz+fv7S5IKCgqK3X/06FFJUu3atVWtWjWXYx0/flySyjwzvkiDBg2KtT/3/7Vr11ZQUNBF+57v119/lSTl5+df8KzKtLQ0SVJ2dnap+6lVq1apZ/z7ulq1akkq+eEZQOVX9PqWvPMav9D73Lnvyxd6Ly96Ly4oKCiW0/Tp09W7d29lZGRo5syZ6tWrlyIiInTNNddoypQpJa7LJZ09u/+2225Tfn6+3nrrLQ0aNEg1atRQ27Zt9dRTTzkL5XOVdRwdO3asy8+DO1w5lp77JY70v2OpO9eZdPf5P7+PK8fhso6jRdeWk6S0tLQLHkuLHmtZX0z56vXMLsTbrzMAVQO1X9mo/cwoOkade9yKjIyUzWYrth1A5UTtR+13Pmo/7zr3b/jc1xv1IIDyqlzVBC4LRWdwp6en67PPPnOpT0ZGRpkfBoqWojOtqOitTIqeu2uvvfaiZ1ZeaKaCn5/fpUzba9q0aSNJzCADqqCi17ckbd261eP9VeT7XI0aNbRq1SqtW7dOTz/9tLp27Sp/f38lJSXpxRdfVIsWLUosMRcQEKD//Oc/2rZtm5577jn16dNHoaGh+vHHH/Xqq6+qTZs2mjFjRrE+ZR1H09PTK+yxuauyHUvP/Qzy+eefu3QcLWumQmU8lnr7dQag6qH28x2Xc+1XtDxts2bNnPf5+/urZcuWkjiGAZUdtR+136VwOdd+33//vfP/5x5LqQcBlBcD1/A5555Jd+5yLRfy/PPPl/khoHHjxhWUqXuKrqty4sQJZWZmutyv6Cy7w4cPX7Bd0fZzz8or+v+JEyeUl5dXZt/Sztg8N+eylq6p6oquZ/Pbb785r1EHoGro3bu38yz5JUuWVGisc9+XL/ReXrTN39+/1Os/devWTa+88orWr1+vU6dO6eOPP1bbtm2VnZ2t0aNHl3p9znbt2umFF17QypUrderUKa1YsUI9evRQYWGhnnrqqWIFZlnH0ffee8+DR+9d5Tkuufv8n9+n6P9lHSsvtK1WrVrOGQSX47H0mmuucV7braJfZwAqJ2q/4qj9Lr3Nmzc7P0Odv4x6UT24devWy+55AaoSaj9qv/NR+3nXsmXLJElBQUG67rrrnPdfytcegKqFgWv4nHM/PLizrJqvu/766yWdPQPv888/d7nfudcvK+vMx1OnThW7Htr5fQsKCrRu3bpS+zocDq1Zs6bUbUXXVDl69Gix66ddLu6//36FhoZK+t8XZK5wOBwVmRYAL7jiiit06623SpI++OAD7d692+W+rr4XFImNjXUWaytXriyz3YoVKySd/cIhICDggvsMDg7WkCFDtHjxYklSTk7ORU+w8ff3V9++fbVs2TIFBQXJsixnzMqi6Fj6ySefuNynSZMmzi+DXHn+a9WqpSZNmjjvLzqWrl69usy+q1atKvX+gIAAde7c2e2cqwp/f3/FxcVJOvvcr1271uW+HEuBywO1X3HUfpfeCy+8IOns7LZ77rmn2LZHHnlENptNhYWFevHFF13eJ8cwwLdQ+1H7nY/az3s2bdrkXDXnjjvuUHBwsHPbpXztAahaGLiGz/nggw+c/y/6wFAVNG/eXD169JAkTZo0yeVruN16663y9/dXTk6OXnnllVLbvPzyy8rNzVVAQIDzA4EkXX311bryyislSS+99FKpBfQ777xT5lmIvXv3VvPmzSVJ48ePv+CZ+1LVu15J7dq19eyzz0o6+4H3ySefvOgHpw0bNlTYNYEAeNe0adMUFham7OxsDRs27IJnVUtnr1N16623ur18Wo0aNTRgwABJ0l//+tdSr2P1/fffa9GiRZKkO++803l/QUHBBb/8DAkJcf7/3OusFV1bqzRBQUHOpccq2zUoH3jgAUnSTz/9pDfeeMOlPjabTbfffrskKSEhwXmttHP9+uuvSkhIkFT8+Zfk7Lt+/fpSv+zPzs7WX//61zLjFw3cfvbZZxddBreqHUcl6emnn1b9+vUlnX1uf/rppwu2z87O1iOPPKIffvjhUqQHwDBqv+Ko/S6dwsJCPfnkk/r0008lnV22vujxF2nTpo1zOft33nlHr7322kX3+9FHH2natGlezxeAZ6j9qP2KUPt5zw8//KDhw4fLsixVq1ZNkydPLtHmUr32AFQtleuIhSrt6NGjevbZZzVnzhxJ0nXXXacuXboYzsq7Xn/9dQUHB2vPnj3q2rWrvvjiC+Xn50s6Wzh/9913euihh4qdBRkdHe0cCP3LX/6iKVOm6NSpU5LOnm0/efJk54emJ554QvXq1SsW86WXXpJ09mzBu+66y/lFRU5Ojt5880099thjqlGjRqn5+vv7680335S/v7/Wr1+vHj16aOXKlc6cJemXX37Rm2++qU6dOumf//yn50+Sj5kwYYLzg+vf/vY3de3aVUuWLCn25dOZM2f06aefatiwYerevbsOHTpUYj9paWk6ceKE81ZUjGRlZRW7PyMj49I8MABq2bKl5s6dq8DAQP30009q3769XnnlFe3du9fZprCwUFu3btVzzz2npk2bOs9yd9e0adMUEBCgvXv3asCAAc5BOYfDoc8++0w33nijCgoK1KxZM8XHxzv7HT58WC1atNC0adO0detWFRQUOLdt377dOTOoWrVq6tmzp3NbTEyMJk6cqI0bNxb7ImPv3r26++67lZWVJbvd7vxSpbLo3bu37rjjDknSY489pokTJxb7Av7EiROaPXu280uOIpMmTVKNGjWUmpqqfv366b///a9z24YNG9SvXz+dOnVKkZGRmjBhQrG+t956q2JjY53/X7RokfP6ZT///LMGDRqk3377rcyc77nnHvXr10+WZemWW27RtGnT9Ouvvzq3Z2ZmavXq1Xr00UfVtGnTcj4zvqt27dpatGiRIiIi9Ouvv+raa6/VpEmT9OOPPzpPBrMsSzt37tT06dPVrFkzvfHGGyVOFMvNzS3zeHnu/eceYwH4Lmo/aj8TLMvSnj17lJiYqNjYWM2cOVPS2SXBX3/99VL7/P3vf1f37t0lnR3Qv/HGG/Xll18qOzvb2SY1NVULFixQnz59dMstt1SJwQigqqH2o/aTqP284cyZM1qzZo3i4+PVuXNnHTp0SIGBgVqwYEGx61sXuZSvPQBViAVcIlOmTLEkWZKsK664otitevXqzm2SrLZt21pHjhwpsY93333XkmTFxMSUK4f9+/c7Y0RERJTI4/zb448/Xupj6NmzZ5kxVq9e7YxRmi+//LLY4w0ICLBq1aplBQQEOO9bsmRJsT65ubnWbbfd5txut9utmjVrWna73XnfnXfeaeXl5ZUa809/+lOx57dmzZqWv7+/Jcnq3r27NXHixAs+riVLlljh4eElcg4KCiq232nTphXr5+nv60J69ux50d/F+XlIsvbv3+92LIfDYb3wwgtWSEhIsccbHh5e7HmRZEVGRlrvv/9+iX3ExMQUa1fWbeTIkW7nB8Az69evt5o3b17stRgYGGhFRkYWe5+12Wwl3mvdeZ/78MMPrcDAwGLHoeDgYOfPDRs2tHbs2FGsz7nHLUmWn5+fFRkZWWw/gYGB1oIFC4r1O7dP0THj3Fg2m83629/+Vq7na+TIkc79Xuw4esUVV1jJycml5rZ69eoyYxS9x0+ZMqXEtszMTGvYsGHFHmNERESxY2u7du1K9FuzZk2xNtWqVbOqVavm/LlGjRrW2rVrS81n3759VsOGDZ1tg4KCnPsKDAy0Pv744ws+rvT0dGvw4MElcq5Ro4Zls9mc9/n7+5foW3T8ePfdd8t8vsrj3M8rF/pdnJ9HeY9TO3bssGJjY4s9B/7+/lZkZKTzM0nRbcCAAdaJEyeK9T/3WH6xW3mO9QC8i9rvLGo/7yhv7Ve7dm3n7zcyMtLy8/MrUc9NmzbNys/Pv+A+c3NzrUcffbTE8ap69erFPktIsho0aGB98cUXXnrkALyN2s891H6Xd+0XHBzs/N3WrVvXCg0NLVF7XX/99dYPP/xw0X168toDcPnxF2DAsWPHiv0cEBCgqKgotWvXTsOHD9d9992nwMDACs3h9OnTF12yrSKWJenfv7/27Nmj119/XZ999pn27dunzMxMRUdHq1WrVho2bJj69OlTrE9gYKD+85//6LbbbtPbb7+tzZs3Ky0tTbVq1dI111yjMWPG6JZbbikz5rRp09SlSxfNnDlTmzdvVm5urq688krdfffdeuKJJ5xn5pdl6NCh2rt3r/75z3/q888/1549e3Tq1ClVq1ZNrVu3VqdOnXTTTTfpxhtv9Mpz5GtsNpuee+45xcXF6d1339WKFSu0c+dOnTx5Una7XTExMerQoYMGDx6s2267TeHh4aZTBuCGrl27aufOnVqwYIE+/fRTffvttzp+/LjOnDmjyMhItW7dWj179tS9996rVq1alTvO7bffro4dO+rVV1/VihUrdPjwYQUEBKh9+/a65ZZbNG7cOEVERBTrEx0draVLl2r16tX65ptvdPjwYR0/flz+/v5q3ry5evfurbFjx6pFixbF+n311VdavXq11q9fr+TkZOdxt3nz5urevbseffRRdezYsdyPRTo7Y+D843lpis5Q95bQ0FAtWrRIy5Yt09tvv61vv/1WJ06cUHh4uK6++mr16tVLd999d4l+PXv21M8//6wZM2bos88+04EDB2Sz2XTllVfqpptu0pNPPqmoqKhSYzZt2lTbtm3TSy+9pCVLlujIkSMKDg7WDTfcoAkTJlz0uYyIiNAnn3yizz//XHPmzNE333yjY8eOybIsRUdH66qrrlLv3r112223eeU58kVXXnmlkpKStGzZMi1ZskQbNmzQ0aNHdfr0aUVERKhZs2bq1q2b7rnnHucsBwBVA7UftZ8pJ06ccP4/NDRUderUUXR0tNq3b68ePXro1ltvdena6oGBgfrHP/6hJ554Qu+++65WrVqlffv2KTU1VYGBgWrevLmuueYaDR06VEOHDlVQUFBFPiwAHqD2Kx9qv8uz9svJyVFOTo6ksyuzhIeHKyoqSldeeaViY2N16623ql27di7t61K99gBUDTbL4kr3AAAAAAAAAAAAAOANR48e1fLly/Xdd9/pu+++07Zt25STk6OePXtqzZo1Hu179erVmjFjhr799ltlZGQoJiZGI0aM0IQJEy54cmZGRob+8pe/aOHChTp48KDCwsJ07bXX6o9//KN69erlUU7ewsA1AAAAAAAAAAAAAHjJa6+9pvHjx5e439OB61mzZmns2LGyLEsNGjRQnTp1tGPHDudqS+vXr1dkZGSJfidOnFC3bt20a9cuBQUF6aqrrtJvv/2mw4cPy2az6R//+IceeeSRcuflLXbTCQAAAAAAAAAAAABAVREREaF+/fpp4sSJWrx4sSZPnuzxPpOSkjRu3DhJUkJCgpKTk7Vlyxb98ssv6tixo37++WeNGTOm1L4PPPCAdu3apY4dO+qXX37Rli1blJycrISEBFmWpccff1zbtm3zOEdPMeMaAAAAAAAAAAAAACrIP/7xD/3hD3/waMb10KFD9fHHH+u+++7TnDlzim3bs2ePWrduLYfDoe+//15XX321c9vWrVsVGxsru92uXbt2qXnz5sX63nfffZo7d66GDRumRYsWlSs3b2HGNQAAAAAAAAAAAAD4qIyMDH3xxReSpLi4uBLbW7RooT59+kiSFixYUGzbwoULJUl9+vQpMWgtSfHx8ZKkzz77TJmZmV7N210MXAMAAAAAAAAAAACAj9q6datyc3MVFBSkzp07l9qme/fukqSNGzcWu7/o5x49epTar3PnzgoKClJOTo7x5cIZuAYAAAAAAAAAAAAAH7V7925JUqNGjRQQEFBqm2bNmkmSdu3aVWrfou3nCwgIUMOGDUvte6n5G40OAAAAAAAAAAAAAAYlJCQoMTHRrT5xcXHOZbYrWmpqqiQpMjKyzDZF29LS0rzW91Jj4PoisnNyTKdwydksy3QKRlg2m+kUjDD1+zb5fBc6zP2NB59KNhY7r2aMsdgmhYYEm06hUsjIyjYW218OY7GzCs29F4U5sozEtezmPv7Z8s08ZknyyzxpLHZhRD1jsR0BIcZi//dXc7/vbw6aKbIeuCbaSFxJiqpezVjsyuaMwWNegKHDjkMGP3sbrC9zCszFDgs0t8CdzVFoLLZl9zMS1+RjtueeMRbbCjT33m/yM2VwiLnPN5WJyRqv0OBXi3kGg5t878/ON1dXhwSYedwmv8O2FeYZi235BRqLbXMUGIv975/NDaQNa13bSNwaYaFG4lZGgR1Ge2U/k25upC1btrjVJyUlxSuxXZHz/8crAwPLfh8ICgqSJGVnF/8c4EnfS42BawAAAAAAAAAAAACXrXr16ik2NtbtPpdKcPDZCVp5eWWfPJObmytJCjnvRL/g4GBlZWWVq++lxsA1AAAAAAAAAAAAgErH5qVVf+Lj4y/Zst/lUbNmTUn/W/a7NEXbitqe2zcrK6tcfS81c+uXAAAAAAAAAAAAAAAuqGXLlpKk5ORk5efnl9pm3759xdqe33fv3r2l9svPz1dycnKpfS81Bq4BAAAAAAAAAAAAVDo2u59Xbr6uQ4cOCgwMVG5urjZt2lRqm3Xr1kmSunTpUuz+6667rtj2823atEl5eXkKDg5W+/btvZd0OTBwDQAAAAAAAAAAAAA+Kjw8XAMGDJAkJSYmlti+Z88erVq1SpI0fPjwYtuKfl69enWps64TEhIkSYMGDVJYWJhX83YXA9cAAAAAAAAAAAAAKp2qNuO6W7duaty4sV577bUS2yZPniybzaa5c+cqMTFRlmVJklJSUnTnnXfK4XBo6NChateuXbF+sbGxGjx4sAoLC3XHHXcoJSVFkmRZlhITEzV37lzZ7XY9++yzFf74LsbfdAIAAAAAAAAAAAAA4C5fGnQ+16FDh9ShQwfnzzk5OZKkDRs2qHbt2s77n376aT399NPOnw8fPqyDBw/q1KlTJfbZqVMnzZw5U0888YTi4+M1bdo01a5dWzt27FBubq5atWqlt956q9R83nnnHXXt2lVJSUlq0qSJrrrqKp04cUKHDh2SzWbTa6+9ptjYWC89+vJjxjUAAAAAAAAAAAAAeElhYaFOnjzpvGVmZkqSCgoKit2flZXl1n7HjRun5cuXa9CgQcrMzNSOHTsUExOjSZMmafPmzcUGxc9Vp04dJSUladKkSYqJidGOHTuUmZmpQYMGaeXKlfrDH/7g8WP2BmZcAwDgo1avXq0ZM2bo22+/VUZGhmJiYjRixAhNmDBB1apVM50eAAAAAMAN1HgAAHifzc83Z1w3btzYuZS3Ow4cOHDRNn379lXfvn3d3nd4eLheeuklvfTSS273vVSYcQ0AgA+aNWuW+vbtq2XLlik4OFhXXnmlDhw4oGnTpqlTp05KTU01nSIAAAAAwEXUeAAAABfHwDUAAD4mKSlJ48aNkyQlJCQoOTlZW7Zs0S+//KKOHTvq559/1pgxY8wmCQAAAABwCTUeAAAVx27388oNvoGBawAAfMzUqVPlcDh07733Ki4uTjabTZJUv359zZs3T3a7XYsXL9b27dsNZwoAAAAAuBhqPAAAANcwcA0AgA/JyMjQF198IUmKi4srsb1Fixbq06ePJGnBggWXNDcAAAAAgHuo8QAAqFg2u59XbvANDFwDAOBDtm7dqtzcXAUFBalz586ltunevbskaePGjZcyNQAAAACAm6jxAACoWAxcVy0MXAMA4EN2794tSWrUqJECAgJKbdOsWTNJ0q5duy5ZXgAAAAAA91HjAQAAuM7fdAIAAOB/UlNTJUmRkZFltinalpaWdklyAgAAAACUDzUeAAAVy2Znjm5Vwm8TAAAfkpOTI0kKDAwss01QUJAkKTs7+5LkBAAAAAAoH2o8AAAA1zHjGgAAHxIcHCxJysvLK7NNbm6uJCkkJKTMNgkJCUpMTHQ57shRozT6gQddbg8AAAAAuDhqPAAAKhbXp65aGLgGAMCH1KxZU9L/lpMrTdG2oralSUlJ0ZYtW1yOO2DgQJfbAgAAAABcQ40HAADgOgauAQDwIS1btpQkJScnKz8/XwEBASXa7Nu3r1jb0tSrV0+xsbEux42KinIzUwAAAADAxVDjAQBQsZhxXbUwcA0AgA/p0KGDAgMDlZubq02bNqlr164l2qxbt06S1KVLlzL3Ex8fr/j4eJfjZmRxLTUAAAAA8DZqPAAAKhYD11WL3XQCAADgf8LDwzVgwABJKvX6ZXv27NGqVaskScOHD7+kuQEAAAAA3EONBwAA4DoGrgEA8DGTJ0+WzWbT3LlzlZiYKMuyJJ29ptmdd94ph8OhoUOHql27doYzBQAAAABcDDUeAAAVx+bn55UbfAMD1wAA+JhOnTpp5syZks4uBxcTE6PY2Fg1adJESUlJatWqld566y3DWQIAAAAAXEGNBwAA4BoGrgEA8EHjxo3T8uXLNWjQIGVmZmrHjh2KiYnRpEmTtHnzZtWuXdt0igAAAAAAF1HjAQBQMWx2P6/c4Bv8TScAAABK17dvX/Xt29d0GgAAAAAAL6DGAwAAuLBKN+N69erVGjx4sOrUqaOQkBC1bt1akydPVmZmpunUAAAAAABuosYDAAAAAJQXM66rlko1cD1r1iz17dtXy5YtU3BwsK688kodOHBA06ZNU6dOnZSammo6RQAAAACAi6jxAAAAAACesNv9vHKDb6g0A9dJSUkaN26cJCkhIUHJycnasmWLfvnlF3Xs2FE///yzxowZYzZJAAAAAIBLqPEAAAAAAMC5Ks3A9dSpU+VwOHTvvfcqLi5ONptNklS/fn3NmzdPdrtdixcv1vbt2w1nCgAAAAC4GGo8AAAAAICnWCq8aqkUA9cZGRn64osvJElxcXEltrdo0UJ9+vSRJC1YsOCS5gYAAAAAcA81HgAAAAAAOF+lGLjeunWrcnNzFRQUpM6dO5fapnv37pKkjRs3XsrUAAAAAABuosYDAAAAAHgDM66rlkoxcL17925JUqNGjRQQEFBqm2bNmkmSdu3adcnyAgAAAAC4jxoPAAAAAACcz990Aq5ITU2VJEVGRpbZpmhbWlraJckJAAAAAFA+1HgAAAAAAG9gtnTVUikGrnNyciRJgYGBZbYJCgqSJGVnZ1+SnAAAAAAA5UONBwAAAADwBgauq5ZKsVR4cHCwJCkvL6/MNrm5uZKkkJCQS5ITAAAAAKB8qPEAAAAAAMD5KsWM65o1a0r633JypSnaVtS2LAkJCUpMTHQ59qj779eDDz7ocnsAAAAAwIWZrPHuGzVKox+gxgMAAACAqoAZ11VLpRi4btmypSQpOTlZ+fn5CggIKNFm3759xdqWJSUlRVu2bHE59sBBg9zIFACAyin4VLKx2Dk1GhmLHWrwk1C2o5qRuMEqMBJXkuRwGAudX7u5sdhn8i1jsSMc+cZix0aZ+RuXpG41co3EdQRWigWtfILJGq//wIFuZAoAQOVk7hOoFGQz97k/2G6w3ik091nQz26uuLVZZv7aDJZZCnQUmgtuN/f6KrSX/Mx+qdzRpq6x2H6FZupL4HJVKQauO3TooMDAQOXm5mrTpk3q2rVriTbr1q2TJHXp0uWC+6pXr55iY2Ndjh0VFeVesgAAAACAC6LGAwAAAAB4g82PGddVSaUYuA4PD9eAAQP0ySefKDExscSXGnv27NGqVaskScOHD7/gvuLj4xUfH+9y7OycHPcTBgAAAACUyWSNdyYr2/2EAQAAAABAhas0a9lNnjxZNptNc+fOVWJioqz/vwRJSkqK7rzzTjkcDg0dOlTt2rUznCkAAAAA4GKo8QAAAAAAnrLZ/bxyg2+oNAPXnTp10syZMyWdPaM+JiZGsbGxatKkiZKSktSqVSu99dZbhrMEAAAAALiCGg8AAAAA4CkGrquWSjNwLUnjxo3T8uXLNWjQIGVmZmrHjh2KiYnRpEmTtHnzZtWuXdt0igAAAAAAF1HjAQAAAACAIpXiGtfn6tu3r/r27Ws6DQAAAACAF1DjAQAAAADKi9nSVUulmnENAAAAAAAAAAAAAKh6GLgGAMDHHD16VHPnztXjjz+uLl26KCQkRDabTb169TKdGgAAAADATdR4AABUHLvd5pUbfEOlWyocAICq7sMPP9T48eNNpwEAAAAA8AJqPAAAANcw4xoAAB8TERGhfv36aeLEiVq8eLEmT55sOiUAAAAAQDlR4wEAUHFsdptXbhVl9erVGjx4sOrUqaOQkBC1bt1akydPVmZmplv7WbNmjWw2m0u3F154oUT/i/WJiory1kP2CDOuAQDwMaNHj9bo0aOdPx85csRgNgAAAAAAT1DjAQBQcWw2313me9asWRo7dqwsy1KDBg3UsGFD7dixQ9OmTdOiRYu0fv16RUZGurSv6tWrq2vXrmVuT09P148//ihJuv7668tsd8011ygoKKjE/bVq1XIpj4rGwDUAAAAAAAAAAAAAeElSUpLGjRsnSUpISNCYMWNks9n066+/asiQIUpKStKYMWO0aNEil/bXoUMHrV+/vsztL7zwgn788Uc1bNhQffv2LbPdggUL1LhxY3ceyiXFUuEAAAAAAAAAAAAAKh273eaVm7dNnTpVDodD9957r+Li4pwzw+vXr6958+bJbrdr8eLF2r59u8exLMvS+++/L0m67777ZLdX3uHfyps5AAAAAAAAAAAAAPiQjIwMffHFF5KkuLi4EttbtGihPn36SDo7A9pTa9eu1S+//CJJGjVqlMf7M4mlwgEAAAAAAAAAAABUOrYKmC3tqa1btyo3N1dBQUHq3LlzqW26d++uFStWaOPGjR7He++99yRJ3bp1U/PmzS/YdurUqfr1119VUFCg6Oho9enTR7fffnup1702gYFrAAAAAAAAAAAAAPCC3bt3S5IaNWqkgICAUts0a9ZMkrRr1y6PYmVmZmrhwoWSXJtt/c477xT7ec6cOZoyZYoWLVqk2NhYj3LxBgauAQCoghISEpSYmOhy+wduG6K4e++owIwAAAAAAOXlbo1336hRGv3AgxWYEQAAvsFbM67dPdZKZ5cBj4+PL3F/amqqJCkyMrLMvkXb0tLS3Ip5vgULFigjI0OhoaG67bbbymx38803695771W7du3UoEEDZWRkaMWKFfrTn/6kX375Rf3799fWrVvVsGFDj/LxFAPXAABUQSkpKdqyZYvL7W/q1aUCswEAAAAAeMLdGq//wIEVmA0AAL7DbvPOwLW7x9qiPqXJycmRJAUGBpbZt2hp7uzsbLdinq9omfBbb71V4eHhZbb76KOPiv0cHBysO+64Q/369VPHjh2VnJysF154QbNnz/YoH08xcA0AQBVUr149t5Z2iapbpwKzAQAAAAB4wu0aLyqqArMBAKDqcfdYW9SnNMHBwZKkvLy8Mvvm5uZKkkJCQtyKea79+/dr7dq1klxbJrw0tWvX1sSJE/Xwww9ryZIleuutt2Tz0skA5cHANQAAVVB8fHypy9SUpeBXz66lAgAAAACoOO7WeGeyPJu9BQBAZeGtpcLdPdZeSM2aNSX9b8nw0hRtK2pbHnPmzJFlWYqJiVHv3r3LvZ/rr7/emVNqaqpq1apV7n15ym4sMgAAAAAAAAAAAABUIS1btpQkJScnKz8/v9Q2+/btK9bWXZZl6f3335ckjRw50qNZ0ucuaV5QUFDu/XgDA9cAAAAAAAAAAAAAKh2b3eaVmzd16NBBgYGBys3N1aZNm0pts27dOklSly5dyhXj66+/1v79+2Wz2TRy5Mhy5ypJP/74o6SzS5ybnG0tMXANAIDPOXTokGrXru28TZgwQZK0YcOGYvdPnz7dcKYAAAAAgIuhxgMA4PISHh6uAQMGSJISExNLbN+zZ49WrVolSRo+fHi5Yrz33nuSpO7du6tp06blS1RnZ1jPmDFDktSnTx/5+5u9yjQD1wAA+JjCwkKdPHnSecvMzJR09kPEufdnZWUZzhQAAAAAcDHUeAAAVBy73eaVm7dNnjxZNptNc+fOVWJioizLkiSlpKTozjvvlMPh0NChQ9WuXbti/Ro3bqzGjRtr4cKFZe47IyPDuf3++++/aC4TJkzQnDlzdObMmWL3Hzp0SMOHD9fGjRvl7++v5557zt2H6XVmh80BAEAJjRs3dn6QAQAAAABUbtR4AABUHJuPTtHt1KmTZs6cqSeeeELx8fGaNm2aateurR07dig3N1etWrXSW2+9VaLfwYMHJZ0dnC7LwoULlZmZqWrVqrk0Y3vnzp165ZVX9MADD6hp06aKjIxUenq6du3aJcuyFBwcrNmzZ+vaa68t/wP2EgauAQAAAAAAAAAAAMCLxo0bp7Zt22rGjBn69ttvdfz4ccXExGj48OGaOHGiwsLCyrXfomXChw8f7tI+Hn74YUVFRWnz5s06cuSIDhw4oKCgILVp00b9+vXTY489pmbNmpUrF29j4BoAAAAAAAAAAABApWOzeX+Zb2/q27ev+vbt63J7V1ZpWbNmjVs5DBgwwHnNbV/noxPoAQAAAAAAAAAAAACXC2ZcAwAAAAAAAAAAAKh07HbfnnEN9zDjGgAAAAAAAAAAAABgFDOuAQAAAAAAAAAAAFQ6NmZcVykMXAMAAAAAAAAAAACodBi4rlpYKhwAAAAAAAAAAAAAYBQzrgEAAAAAAAAAAABUOnYbM66rEmZcAwAAAAAAAAAAAACMYsY1AAAAAAAAAAAAgEqHa1xXLQxcX4S9MN9IXMt2mU6Gv0wftz0/x1hs/yM/GImb3yjWSFxJ8jf4d1ZQo4Gx2MCFOEJrGovtZ/DDpcMyFlohualG4uYFm/tdB9rNvf9aBpeNqpH7m7HYjpDqxmL7G3xtnwow83ceavA9Ba4z+ZVGgaG/kQKHw0xgmT3WhgWaO+745WYYi23f+62x2AVX9TES12HzMxJXkhzBNYzFNrkqZqHJFzd8n8HvXQr9gozFNvma9DP4mjRVa5l755esgBBjsW2FecZiy8/cs+5XmGsstsn3FeByxMA1AAAAAAAAAAAAgEqHGddVCwPXAAAAAAAAAAAAACodOwPXVcrluS4zAAAAAAAAAAAAAMBnMOMaAAAAAAAAAAAAQKVjszHjuiphxjUAAAAAAAAAAAAAwChmXAMAAAAAAAAAAACodGxM0a1S+HUCAAAAAAAAAAAAAIxi4BoAAB9iWZb++9//asKECerWrZtq1aqlgIAA1alTR/3799e///1vWZZlOk0AAAAAgAuo8QAAqFh2u80rN/gGlgoHAMCHrFq1Sv369XP+3LRpUzVp0kT79+/X8uXLtXz5cs2bN0+LFi1SUFCQwUwBAAAAABdDjQcAQMWyMehcpTDjGgAAH2JZlpo0aaLXX39dx44d0759+7R582adPHlS77//voKCgrRs2TI999xzplMFAAAAAFwENR4AAIDrGLgGAMCHdO7cWbt27dLjjz+uunXrFtt27733Or/MmD17thwOh4kUAQAAAAAuosYDAKBi2Ww2r9zgGxi4BgDAh0RERCggIKDM7YMGDZIkpaam6rfffrtUaQEAAAAAyoEaDwAAwHVc4xoAgEokOzvb+f+QkBCDmQAAAAAAPEWNBwCAZ+xc47pKYcY1AACVyLx58yRJ7dq1U0REhOFsAAAAAACeoMYDAAD4H2ZcAwBQSSQlJenNN9+UJE2YMMFwNgAAAAAAT1DjAQDgORszrqsUBq4BAKgEjh07pmHDhqmgoEC33HKL7rjjDtMpAQAAAADKiRoPAADv8GPgukphqXAAAHxcenq6Bg0apOTkZHXs2FHvvfee6ZQAAAAAAOVEjQcAAFA6ZlwDAODDMjIyNHDgQG3dulVt2rTRl19+6dJ1zxISEpSYmOhynNH33Kkx99/nSaoAAAAAgIu4VDXefaNGafQDD3qSKgAAlQIzrqsWBq4BAPBRWVlZuummm7Rx40a1aNFCK1asUK1atVzqm5KSoi1btrgc68Yb+pQ3TQAAAACACy5ljdd/4MDypgkAAGAMA9cAAPignJwcDRkyRGvXrlVMTIxWrlypqKgol/vXq1dPsbGxLrePuuKK8qQJAAAAAHDBJa/x3Ng3AACVGTOuqxYGrgEA8DH5+fm69dZbtXLlSkVHR2vVqlVq2LChW/uIj49XfHy8y+3zTh13N00AAAAAgAtM1HhnsrLdTRMAAMA4Bq4BAPAhhYWFuuuuu/TZZ58pKipKq1atUtOmTU2nBQAAAAAoB2o8AAAqFjOuqxYGrgEA8CHz58/XwoULJUnBwcEaPXp0mW1nzZqlDh06XKrUAAAAAABuosYDAKBiMXBdtTBwDQCAD8nNzXX+/8CBAzpw4ECZbdPT0y9BRgAAAACA8qLGAwAAcJ3ddAIAAOB/Ro0aJcuyXLr16tXLdLoAAAAAgAugxgMAoGL5221eucE3MHANAAAAAAAAAAAAADCq0gxcHz16VHPnztXjjz+uLl26KCQkRDabjTMRAQAAAKASosYDAAAAAHjKz27zyg2+odJc4/rDDz/U+PHjTacBAAAAAPACajwAAAAAAHCuSjNwHRERoX79+qlTp07q1KmTtm7dqqlTp5pOCwAAAABQDtR4AAAAAABPMVu6aqk0S4WPHj1ay5cv18svv6xbbrlFdevWNZ0SAAAAAKCcqPEAAAAAAFXd6tWrNXjwYNWpU0chISFq3bq1Jk+erMzMTLf3NWrUKNlstgvevvjiizL7Z2Rk6Nlnn1Xr1q0VEhKiOnXqaPDgwVqzZo0Hj9C7Ks2MawAAAAAAAAAAAAAo4mf33Tm6s2bN0tixY2VZlho0aKCGDRtqx44dmjZtmhYtWqT169crMjLS7f02bNhQjRo1KnVbzZo1S73/xIkT6tatm3bt2qWgoCBdddVV+u2337Rs2TJ99tln+sc//qFHHnnE7Vy8jYFrAAAAAAAAAAAAAJWOry4VnpSUpHHjxkmSEhISNGbMGNlsNv36668aMmSIkpKSNGbMGC1atMjtfY8ePVrPP/+8W30eeOAB7dq1Sx07dtTSpUtVv359WZalt956S/Hx8Xr88cd1/fXXq3379m7n402+exoCAAAAAAAAAAAAAFQyU6dOlcPh0L333qu4uDjZbGcH2OvXr6958+bJbrdr8eLF2r59e4XnsnXrVi1dulR2u10ffvih6tevL0my2WyKi4vTvffeq8LCQk2dOrXCc7kYBq4BAAAAAAAAAAAAVDp+dptXbt6UkZHhvNZ0XFxcie0tWrRQnz59JEkLFizwauzSLFy4UJLUp08fNW/evMT2+Ph4SdJnn31WrmtvexNLhQMAAAAAAAAAAACAF2zdulW5ubkKCgpS586dS23TvXt3rVixQhs3bnR7/6tXr9ZPP/2kkydPqkaNGurYsaPuuecexcTElNq+KEaPHj1K3d65c2cFBQUpJydH27ZtU9euXd3OyVuYcQ0AAAAAAAAAAACg0vHFGde7d++WJDVq1EgBAQGltmnWrJkkadeuXW7vf+3atVq4cKFWr16tJUuW6Nlnn1WLFi00ffr0C+ZTFPN8AQEBatiwYbnz8abLbsZ1QkKCEhMTXW5//8j7NOaB0RWYEQAAAACgvNyt8UaOGqXRDzxYgRkBAAAAACobd2tL6ewy4EXLbJ8rNTVVkhQZGVlm36JtaWlpLsdr0aKFZsyYoT59+qhx48YKCgrS9u3bNWPGDC1YsEDPPPOMwsLC9Mgjj1ySfCrCZTdwnZKSoi1btrjcftCA/hWYDQAAviHDL8xY7NycQmOxqwf5GYtt+QcbiRvy224jcSXpOzU0Frtj4XFjsQur1TIWO7LvRGOx07560Vjs8CAzr6/sfIeRuJc7d2u8AQMHVmA2AAD4Bn8vzxxzR77DMhbbz9zDNio919zn0NqBZmJbNnOLydoK843FdgSYqXUkSeZe2sqzBxqLHViQYyiywd91JeNn886bv7u1ZVGf0uTknP27CQws+283KChIkpSdne1yvD/96U8l7rv22ms1f/58Pfroo/rnP/+pP/3pT7rvvvsUFva/73srKp+KcNkNXNerV0+xsbEut4+KiqrAbAAAAAAAnqDGAwAAAIDLl7eW+Xa3tizqU5rg4LMnHuTl5ZXZNzc3V5IUEhLiVsyyvPzyy5o9e7ZOnTqlVatWaciQIcXyycrKuqT5lNdlN3AdHx9f6rT9suRmnqnAbAAAAAAAnnC3xsvIMnv2OAAAAADA97hbW15IzZo1Jf1vie7SFG0rauup6tWrq02bNtq6dav27NlTIp+srKxLmk95XXYD1wAAAAAAAAAAAAAqP2/NuPamli1bSpKSk5OVn5+vgICAEm327dtXrK03FC0FXlBQUCKfI0eOaO/evaX2y8/PV3JystfzKQ9zF4Jw06FDh1S7dm3nbcKECZKkDRs2FLt/+vTphjMFAAAAAFwMNR4AAAAAoCrq0KGDAgMDlZubq02bNpXaZt26dZKkLl26eCVmQUGBdu7cKUlq0KBBsW3XXXddsZjn27Rpk/Ly8hQcHKz27dt7JZ/yqjQD14WFhTp58qTzlpmZKensL+Lc+7OysgxnCgAAAAC4GGo8AAAAAICn/O02r9y8KTw8XAMGDJAkJSYmlti+Z88erVq1SpI0fPhwr8RMSEhQenq6/P391adPn2LbimKsXr261FnXCQkJkqRBgwYpLCzMK/mUV6UZuG7cuLEsy7ro7fnnnzedKgAAAADgIqjxAAAAAABV1eTJk2Wz2TR37lwlJibKsixJUkpKiu688045HA4NHTpU7dq1K9avcePGaty4sRYuXFjs/uXLl+uZZ54pcf3qvLw8zZo1S0888YQk6aGHHlK9evWKtYmNjdXgwYNVWFioO+64QykpKZIky7KUmJiouXPnym6369lnn/Xqc1AeXOMaAAAAAAAAAAAAQKXji9e4lqROnTpp5syZeuKJJxQfH69p06apdu3a2rFjh3Jzc9WqVSu99dZbJfodPHhQkpSRkVHs/szMTE2fPl3Tp0/XFVdc4VwOfNeuXc62t956q2bMmFFqPu+88466du2qpKQkNWnSRFdddZVOnDihQ4cOyWaz6bXXXlNsbKw3n4JyqTQzrgEAuFwsWLBAcXFxuuaaa1S/fn0FBQUpPDxcsbGxmjx5sk6ePGk6RQAAAACAi6jxAACoOH52m1duFWHcuHFavny5Bg0apMzMTO3YsUMxMTGaNGmSNm/erNq1a7u8r44dO2ry5Mnq16+fgoODtXPnTv3www+qXr26hg0bpqVLl2rhwoUKDAwstX+dOnWUlJSkSZMmKSYmRjt27FBmZqYGDRqklStX6g9/+IO3HrZHbFbR3HSUKjfzjJG4lu0yPafA4OO2bObOyrHn5xiL7X/kByNx8xsZPHPH5OvLchgL7bD5GYttUmhIsOkU3Na+fXt9//33CgoKUr169VS7dm0dP35cycnJkqS6devqq6++KrGMjCdSz5i7fmhuobmPItWDzL0u/PLNPOf+aYeMxJWk79TQWOyOoRkXb1RBCqvVMhY7su9EY7HTvnrRWGxHgJn3/ux8c8f5yPBQY7Erm4ysbNMpXHIFDnPHWoOhFRJg7nO/X665445977fGYhdc1efijSqAQ745y6aiGfwaQ4UGX9xhoSHGYpeXiRovO8fcd00mjzt+Bl8YJl+TqdmFxmLXDjTzGdjk9+e2wnxjsU3VOpJkciSp0GDwwMJcI3GDwmsYiVsZPfv5z17Zz7RBV3plP/DMZTo6CgCA73r00Uf19ddf68yZM9q/f7++++47HTx4UNu3b9fvfvc7HT9+XHfddZfpNAEAAAAALqDGAwCg4vjyjGu4j4FrAAB8zJgxY9SjRw8FBAQUu79t27Z6++23JUk7duzQzz9752xCAAAAAEDFocYDAABwjb/pBAAAgOuuvPJ/S9ZkZZlb3hsAAAAA4DlqPAAAPMNs6aqFGdcAAFQi69evlySFhYWpVatWhrMBAAAAAHiCGg8AAOB/mHENAICPczgcOnr0qL766is988wzkqS//OUvCgsLM5wZAAAAAMBd1HgAAHgPM66rFgauAQDwUa+99prGjx9f7L7OnTtrzpw5GjhwoKGsAAAAAADlQY0HAID3MXBdtbBUOAAAPio6Olpdu3bVtddeq3r16slms2nbtm16//33derUKdPpAQAAAADcQI0HAABwYcy4BgDAR40YMUIjRoxw/rx9+3Y99thjmjdvnn7++Wdt3rxZfn5+pfZNSEhQYmKiy7Huvm+URo1+wOOcAQAAAAClu5Q13qj779eDDz7occ4AAPg6ZlxXLQxcAwBQSVx99dVatmyZmjZtqm3btunDDz/U3XffXWrblJQUbdmyxeV99+vPsnQAAAAAcClVZI03cNAgb6UJAABwyTBwDQBAJRIeHq6ePXtq0aJFSkpKKvNLjXr16ik2Ntbl/V4RFeWtFAEAAAAALqqoGi+KGg8AcJlgxnXVwsA1AACVTEFBQbF/SxMfH6/4+HiX95l6JsvjvAAAAAAA7quIGi87J8fjvAAAAC41Bq4BAKhEUlNTtWbNGklShw4dzCYDAAAAAPAINR4AAJ5hxnXVYjedAAAA+J+vv/5a06ZN04EDB0ps27JliwYMGKD09HRFR0drxIgRlz5BAAAAAIDLqPEAAKhYfnabV27wDcy4BgDAh6SlpWny5MmaPHmyoqKiFB0dLT8/Px06dEgpKSmSpOjoaH366acKCwsznC0AAAAA4EKo8QAAAFzHwDUAAD7k+uuv18yZM7VmzRr99NNP2r17t3JyclSzZk317t1bv//97/Xggw8qPDzcdKoAAAAAgIugxgMAoGL52ZgtXZUwcA0AgA+pW7euxo8fr/Hjx5tOBQAAAADgIWo8AAAA1zFwDQAAAAAAAAAAAKDSsTPjukqxm04AAAAAAAAAAAAAAHB5Y8Y1AAAAAAAAAAAAgErHjwnXVQoD1wAAAAAAAAAAAAAqHbudkeuqhKXCAQAAAAAAAAAAAABGMeMaAAAAAAAAAAAAQKXjZ2PGdVXCjGsAAAAAAAAAAAAAgFHMuAYAAAAAAAAAAABQ6diZcV2lMOMaAAAAAAAAAAAAAGAUM64BAAAAAAAAAAAAVDp+TLiuUhi4BgAAAAAAAAAAAFDp2O2MXFclDFxfhGU38xTZ8rONxJUk2f2MhQ44vsdY7Jx6vzMWu9A/2FhsR+NORuLaLMtIXEmS5TAWOt/gFRoCZO45LzD464ZrwgpOG4sdnmfwmJdv7phnP3nQTODgcDNxJTWuGWQs9rrfCo3Fvr6asdA69dkkY7EzFGgsdvipI0bi1sj4zUhcSVL4deZiw2X+MvM51O5n7ngXdHyXsdhZdVoai20LCDEWu7BNX3OxHWY++AcUZBmJK0mWv7nPN5bN4GdZriXp80x+7RKUm24sthVo7sO3wy/AWOzagea+6yq0m3ncJ7ILjMSVpLAAc7VOtfwcY7FNfodt8qhz2jLz+65jJCpgHgPXAAAAAAAAAAAAACodTqirWsxN/QMAAAAAAAAAAAAAQMy4BgAAAAAAAAAAAFAJ+THhukphxjUAAAAAAAAAAAAAwChmXAMAAAAAAAAAAACodLjGddXCwDUAAAAAAAAAAACASsfPzsB1VcJS4QAAAAAAAAAAAAAAoxi4BgDAx3322Wey2Wyy2Wxq3Lix6XQAAAAAAB6izgMAwDvsNptXbvANDFwDAODDMjIy9PDDD5tOAwAAAADgJdR5AAAApWPgGgAAHzZp0iQlJyfr5ptvNp0KAAAAAMALqPMAAPAeP5t3bvANDFwDAOCjNm7cqP/7v//TzTffrKFDh5pOBwAAAADgIeo8AAAuL6tXr9bgwYNVp04dhYSEqHXr1po8ebIyMzPd2k9hYaGWL1+ucePGqXPnzqpRo4YCAwNVr1493XzzzVq2bFmZfQ8cOOC8RElZt+uuu87Th+oV/qYTAAAAJeXn52vMmDEKDQ3VP/7xD61YscJ0SgAAAAAAD1DnAQDgfb58fepZs2Zp7NixsixLDRo0UMOGDbVjxw5NmzZNixYt0vr16xUZGenSvt577z09+OCDkiS73a7mzZsrLCxMe/fu1dKlS7V06VLFxcXpzTfflO0Cz0nXrl1Lvb9NmzbuP8AKwMA1AAA+6M9//rN+/PFH/e1vf1ODBg1MpwMAAAAA8BB1HgAA3udn982B66SkJI0bN06SlJCQoDFjxshms+nXX3/VkCFDlJSUpDFjxmjRokUu7c+yLF199dV6/PHHNXz4cFWvXl2SVFBQoNdee01PP/20EhMT1b59ez388MNl7mf9+vUeP7aKxFLhAAD4mJ9//lkvv/yyYmNj9Yc//MF0OgAAAAAAD1HnAQBweZk6daocDofuvfdexcXFOWdB169fX/PmzZPdbtfixYu1fft2l/Y3bNgwbdu2TQ888IBz0FqS/P399cc//tE5GzshIcH7D+YSYuAaAAAfYlmWxowZo/z8fCUkJMjPz890SgAAAAAAD1DnAQBQcew279y8KSMjQ1988YUkKS4ursT2Fi1aqE+fPpKkBQsWuLTPyMjICy4BPmjQIEnSrl273E3Xp7BUOAAAPuSNN97Qhg0b9Pjjj+uaa64xnQ4AAAAAwEPUeQAAXF62bt2q3NxcBQUFqXPnzqW26d69u1asWKGNGzd6JWZ2drYkKTQ09ILtHn/8ce3cuVM2m02NGzfWgAEDNHToUNntvjHXmYFrAAB8xJEjRzRx4kRFR0dr2rRpptMBAAAAAHiIOg8AgIrld4FZyKbs3r1bktSoUSMFBASU2qZZs2aSvDdDet68eZLODohfyKxZs4r9XHRd7MWLF6tJkyZeycUTDFwDAOAj/vCHP+j06dN69913FR4e7tG+EhISlJiY6HL70ffcrjGj7vMoJgAAAACgOG/Vee7WeKNG3a8H/v+1LgEAwMW5e6yVzi4DHh8fX+L+1NRUSWeX9y5L0ba0tDS3Ypbm448/1qeffiqbzaann366xHZ/f3/dc889uuOOO9SmTRvVr19fJ06c0LJly/Tss89q27Zt6t+/v5KSkhQREeFxPp5g4BoAAB+xZcsWSdIjjzyiRx55pNi2oqVeDh06pKioKEnS4sWLdf3115e6r5SUFOf+XHFjv97lSRkAAAAAcAHeqvPcrfEGDhxU3pQBAKhU7F6ace3usbaoT2lycnIkSYGBgWX2DQoKkvS/zwPltXPnTo0cOVKSNG7cuFI/RzRo0EBz584tdl/9+vU1ZswY9e7dWx07dtTevXv197//Xc8++6xH+XiKgWsAAHzMsWPHytzmcDic2/Py8spsV69ePcXGxrocMyqqrusJAgAAAADc4mmd536NF+VeggAAVFJ+Xro0s7vH2qI+pQkODpZ04e9vc3NzJUkhISFuxTzXoUOHNGDAAKWnp+vGG2/UK6+84vY+mjdvrocfflivvPKKFi9ezMA1AAA468CBA2Vue++993T//fcrJibmgu2KxMfHl7pMTVny0o663BYAAAAA4Bpv1Xnu1nhZ2TkutwUAAO4fay+kZs2akv63ZHhpirYVtXXX0aNH1bdvXyUnJ6tXr15atGhRmdfTvpiiWdp79uwpV39vYuAaAAAAAAAAAAAAQKXjraXCvally5aSpOTkZOXn55c6oLxv375ibd1x/Phx9enTR3v27FGXLl30ySefOGd5l0fRkuYFBQXl3oe3eGkCPQAAAAAAAAAAAABc3jp06KDAwEDl5uZq06ZNpbZZt26dJKlLly5u7Ts1NVU33HCDfv75Z8XGxurzzz9XWFiYR/n++OOPks5eC9s0Bq4BAAAAAAAAAAAAVDp+NptXbt4UHh6uAQMGSJISExNLbN+zZ49WrVolSRo+fLjL+z19+rT69++v7du363e/+52++uorVa9e3aNcMzIy9M9//lOS1L9/f4/25Q0MXAMAUAmMGjVKlmW5dH1rAAAAAIDvo84DAKDqmjx5smw2m+bOnavExERZliVJSklJ0Z133imHw6GhQ4eqXbt2xfo1btxYjRs31sKFC4vdn5WVpZtuuklJSUlq3bq1Vq5cqVq1armUS1xcnBYvXqzc3Nxi9+/cuVMDBw7U/v37FRYWpqeeesqDR+wdXOMaAAAAAAAAAAAAQKXji9e4lqROnTpp5syZeuKJJxQfH69p06apdu3a2rFjh3Jzc9WqVSu99dZbJfodPHhQ0tmZ0Od6/fXXtX79eufPw4YNKzP2woULFRUV5fx506ZNeuuttxQQEKDmzZsrIiJCJ06ccF5nu2bNmpo/f74aN27syUP2ikoxcG1Zlr755hstXbpU69ev188//6zTp0+rRo0a6tChg0aOHKm77rpLNh/94wQAAAAA/A81HgAAAADAG/x8eG3pcePGqW3btpoxY4a+/fZbHT9+XDExMRo+fLgmTpzo1rWpz50tvXPnzgu2zcnJKfbzxIkT9cUXXygpKUlHjx7Vnj17FBoaqo4dO2rQoEF69NFHiw10m2Sziuam+7CVK1eqX79+zp+bNm2qmjVrav/+/UpNTZUk3XTTTVq0aJGCgoK8GjsnO9ur+3OVLd9MXEmS3c9Y6IDje4zFzqn3O2OxTTL1XaDN5FuP5TAWOt/gFRoCDH7vW2Dw1x0WGmIueCWSl3bUWGxb3uV5zLOfPGgmcHC4mbiSfqvZwljsHb9lGYt9ff1QY7HtuWeMxc7wjzAWOzwzxUhcv4zfjMSVJHvz64zFLg+TNV5Glrnjjr/MfA512Mwd74KO7zIWO6tOS2OxAwz9riXJMvj5ptBh5oN/QIG517Xl7933KLdiG/xdmyzpQ0OCzQWvRLKycy7eqIL455wyFtsKrGYstsMvwFhse2G+sdiFdjOP+0R2gZG4khQWYO77vWrKMxa70N/c+6/D4IEn29AXm3UizH2XUNlsO3LKK/tpH13DK/uBZ3z4PIT/sSxLTZo00euvv65jx45p37592rx5s06ePKn3339fQUFBWrZsmZ577jnTqQIAAAAALoIaDwAAAADgDXabzSs3+IZKMXDduXNn7dq1S48//rjq1q1bbNu9997r/DJj9uzZcjjMndUMAAAAALg4ajwAAAAAAHC+SjFwHRERoYCAspccGTRokCQpNTVVv/1mbnk+AAAAAMDFUeMBAAAAALzBZvPODb6hUgxcX0z2OdehDgnhWqYAAAAAUJlR4wEAAAAAcPnxN52AN8ybN0+S1K5dO0VERBjOBgAAAADgCWo8AAAAAIAr7GK6dFVS6Qeuk5KS9Oabb0qSJkyYYDgbAAAAAIAnqPEAAAAAAK5ime+qpVIvFX7s2DENGzZMBQUFuuWWW3THHXeYTgkAAAAAUE7UeAAAAAAAXL4q7Yzr9PR0DRo0SMnJyerYsaPee+89l/olJCQoMTHR5Tj3jxqlBx98sJxZAgAAAABccalqvJGjRmn0A9R4AAAAAFAV2JlxXaVUyoHrjIwMDRw4UFu3blWbNm305Zdfunzds5SUFG3ZssXlWIMGDixvmgAAVBpWYDVjsdP9zF27NNjf3CfbID8zH8PivjhqJK4k3X9ttrHYXQ98aiy2Vf82Y7EfW27u9z1tQLix2PacM0biWgX5RuJWBZeyxhtAjQcAuAzkFTqMxc6wm/sc6CgwFlo1HTnGYm88bu6Bd6llps6LcuQZiStJuX61jMXed9rcIrq1QwqNxQ4P8jMWO8Ju7m8NuBxVuoHrrKws3XTTTdq4caNatGihFStWqFYt1w8U9erVU2xsrMvto6KiypMmAAAAAMAF1HgAAAAAgPLiGtdVS6UauM7JydGQIUO0du1axcTEaOXKlW5/6RAfH6/4+HjXY2abmxkEAAAAAFWZiRovI4saDwAAAAAAX1RpBq7z8/N16623auXKlYqOjtaqVavUsGFD02kBAAAAAMqBGg8AAAAA4Cm7mHJdlVSKgevCwkLddddd+uyzzxQVFaVVq1apadOmptMCAAAAAJQDNR4AAAAAwBtYKrxqqRQD1/Pnz9fChQslScHBwRo9enSZbWfNmqUOHTpcqtQAAAAAAG6ixgMAAAAAAOerFAPXubm5zv8fOHBABw4cKLNtenr6JcgIAAAAAFBe1HgAAAAAAG+wM+O6SrF7uoPRo0dr9OjR2r9/vzfyKdWoUaNkWZZLt169elVYHgAAAAAAz1HjAQAAAACA83k84/r999+Xv7+/3n77bW/kAwAAAAAAAAAAAAAXxYTrqsXjGdd169ZVaGiobFz9HAAAr3j++edls9kueHvzzTdNpwkAAAAAcAE1HgAAgGs8nnHduXNnffLJJzpy5Iiio6O9kRMAANDZk8NatGhR6rZ69epd4mwAAAAAAJ6gxgMAwPvsTKytUjweuB47dqw++eQTTZkyRbNnz/ZGTgAAQNKgQYP03nvvmU4DAAAAAOAF1HgAAHgf49ZVi8dLhffu3Vt/+9vfNGfOHN12223asmWLN/ICAAAAAAAAAAAAAFwmPJ5x3bRpU0lSQECAFi1apEWLFikkJES1atWSn59fqX1sNpv27dvnaWgAAAAAAAAAAAAAlymPZ+jCp3g8cH3gwIES92VlZSkrK6vMPjbm7QMAcFHff/+97rrrLh09elTh4eG6+uqrdccdd6hNmzamUwMAAAAAuIkaDwAA4MI8Hrh+9913vZEHAAA4z7Zt27Rt2zbnz0uXLtVLL72ksWPH6tVXXy1zZRMAAAAAgO+hxgMAwPuYLFu1eDxwPXLkSG/kAQAA/r/69evrxRdf1IABA9S0aVOFh4dr9+7d+uc//6k333xTr732mgICAjR9+nTTqQIAAAAALoIaDwAAwDUeD1wDAADviouLK3Ff27Zt9cYbb6hJkyZ65pln9Le//U2PPPKIGjdufOkTBAAAAAC4jBoPAICKY2fCdZXCwDUAAJXIk08+qddff12//vqrli5dqscff7zUdgkJCUpMTHR5v/ePvE9jHhjtrTQBAAAAAC6oqBrvnpGjdP/oB7yVJgAAPouVwqsWrw1cHz58WDNnztSXX36pgwcPKicnRwUFBc7taWlpeuONN2Sz2fTUU0/J358xcwAA3OXn56drr71WS5Ys0Z49e8psl5KSoi1btri830ED+nsjPQAAAACAGyqqxrthwEBvpAcAAHBJeWX0ePny5brtttt0+vRpWZYlqeTF0GvWrKmPPvpISUlJatOmjYYMGeKN0AAAXHYCAwMlqdgJYuerV6+eYmNjXd5nVFSUx3kBAAAAANxXETXeFdR4AIDLhN10AvAqjweuDx06pOHDh+vMmTMaMmSI7rvvPo0ZM0anTp0q0Xb06NHavHmzli1bxsA1AADl9OOPP0qSGjRoUGab+Ph4xcfHu7zP3MwzHucFAAAAAHBfRdR4pzKyPM4LAADgUvP4RIQZM2bozJkzuu222/TRRx9p2LBhzrMEzzdgwABJ0nfffedpWAAALkvLli3TTz/9JEnq35/lvQEAAACgMqPGAwDAMzabzSs3+AaPB66//PJL2Ww2TZ069aJtmzRpoqCgIO3fv9/TsAAAVEk//fST4uPj9f333xe73+FwaN68ebrrrrskSYMHD1anTp1MpAgAAAAAcBE1HgAAgOs8Xio8OTlZISEhatGihUvtw8LClJ6e7mlYAACqpPz8fCUmJioxMVGRkZGKiYmRv7+/9u7dq7S0NElS9+7dNXfuXMOZAgAAAAAuhhoPAICKZWeydJXi8cC13W5XYWGhS20LCgp0+vRpRUREeBoWAIAqqXHjxpo2bZq++eYb/fzzz9q7d69ycnIUGRmpQYMG6a677tKdd94pPz8/06kCAAAAAC6CGg8AgIrFuHXV4vHAdUxMjH7++WclJyerUaNGF2y7du1a5efnuzw7GwCAy02NGjX0pz/9yXQaAAAAAAAvoMYDAABwncfXuO7Xr58k6c0337xgu/z8fP3pT3+SzWbToEGDPA0LAAAAAAAAAAAA4DJmt3nnBt/g8cD1+PHjFRgYqBkzZujtt98utc2WLVvUr18/ffvttwoPD9cjjzziaVgAAAAAAAAAAAAAQBXh8cB1TEyMZs+ercLCQsXFxemKK65QWlqaJOn6669XdHS0OnXqpHXr1snf31/vv/++ateu7XHiAAAAAAAAAAAAAC5fNpvNKzf4Bo8HriXp7rvv1ueff65mzZrpt99+U15enizL0saNG5WSkiLLstS8eXN98cUXGjJkiDdCAgAAAAAAAAAAAIDPWr16tQYPHqw6deooJCRErVu31uTJk5WZmVnufS5atEi9e/dWzZo1Va1aNbVv316vvvqq8vPzL9jv+PHjGjt2rJo2barg4GBFRUXp9ttv17Zt28qdi7f5e2tHN9xwg3bt2qW1a9dqw4YN+vXXX1VYWKioqCh17dpVvXv3lp+fn7fCAQAAAAAAAAAAALiM+fL1qWfNmqWxY8fKsiw1aNBADRs21I4dOzRt2jQtWrRI69evV2RkpFv7/OMf/6gZM2ZIkpo1a6Zq1arpxx9/1FNPPaVPPvlEX331lYKCgkr027t3r7p166Zjx46pWrVqatOmjQ4fPqz58+fro48+0oIFC3xi8rHXBq6ls9Pxe/bsqZ49e3pztwAAAAAAAAAAAABQjK+OWyclJWncuHGSpISEBI0ZM0Y2m02//vqrhgwZoqSkJI0ZM0aLFi1yeZ9LlizRjBkzFBQUpPnz5zsHmnfu3Kkbb7xRa9eu1aRJk5wD20Usy9KIESN07NgxDRw4UB9++KGqV6+ugoICvfjii5o6daruvvtu7d69W/Xq1fPac1AeHi8VfuDAAS+kAQAAAAAAAAAAAACV39SpU+VwOHTvvfcqLi7OeR3t+vXra968ebLb7Vq8eLG2b9/u8j5feOEFSdIzzzxTbHZ069atNXv2bEnS//3f/+m3334r1u/jjz/Wtm3bVL16dX3wwQeqXr26JMnf318vvviievTooYyMDL366qsePWZv8Hjgunnz5ho0aJA++ugjFRYWeiMnAAAAAAAAAAAAALggu83mlZs3ZWRk6IsvvpAkxcXFldjeokUL9enTR5K0YMECl/a5Z88eff/992Xus0+fPmrevLlyc3O1dOnSYtuKYowYMUI1a9Ys0bdof/Pnz3cpl4rk8cC1w+HQV199pVtvvVUNGzbU5MmTdfDgQW/kBgAAAAAAAAAAAACVxtatW5Wbm6ugoCB17ty51Dbdu3eXJG3cuNGlfRa1a9q0qaKjo93aZ9HPPXr0uGC/w4cP68iRIy7lU1E8HrhesWKFRowYoYCAAB09elQvv/yymjVrphtvvJFZ2AAAAAAAAAAAAAAqhM3mnZs37d69W5LUqFEjBQQElNqmWbNmkqRdu3a5tc+ifq7uMy8vz3nZ57L6NmzYUIGBgW7lU1H8Pd1Bnz591KdPH508eVLvvfee3n77be3cuVNffPGFvvzyS0VFRWn06NF68MEHFRMT442cAQAAAAAAAAAAAMArEhISlJiY6FafuLg4xcfHl7g/NTVVkhQZGVlm36JtaWlpLsUq7z7T09PlcDgu2Ndms6lGjRo6fvy4y/lUFI8HrovUqlVLTz75pJ588kmtX79eCQkJWrRokVJSUvTyyy/rz3/+s2644QbFx8fr97//vfz8/LwVumJZDiNhCwNCjcSVvH9miTsyo9oYix0gy1jsAnOhZRmK7TD4mAMNvv/4GXzcOYVm3s8kyc9u8I0FLnH4lX7m36Xw11X7jMWe2qv0ZXUuCYeZ1+TEfs2NxJWkJkF5xmJbdW8xFvtwprn33xf6tzAWO9/cw1ZeVGsjce15WUbiSlIlqa58QoBVYCx2vs1rJbhbAgpzjcSVpOw6LY3F9jf4GTSnwFxsf4O1baGh0BlWkJnAksI8X8yw3PIMHmyp8Xyfyd9RdUeOsdjpBt8PbA5znzFi61UzFjvd0Beb1aqZe/81+T1y8xBzdbUj0NzfWYHBL5NzZeY7M3PvZpWPzUuDHCkpKdqyZYvbfUqTk3P2WFg0i7k0QUFnf8vZ2dkuxSrvPov6eTufilIhVXO3bt3UrVs3zZo1S3PnztXs2bP1ww8/6KuvvtJXX33lnIU9ZswYNWrUqCJSAAAAAAAAAAAAAFCVeWkCar169RQbG+t2n9IEBwdLOrtMd1lyc8+eYBwSEuJSrPLus6ift/OpKBV6uneNGjX0hz/8QTfccIMeeughrV27VpKcs7D/8pe/aMSIEfrzn//MMuIAAAAAAAAAAAAALrn4+PhSl/0uj5o1a0r63/LepSnaVtS2ovZZvXp12e12ORyOMvtalqVTp065lU9FqbD1NPLy8vSvf/1LPXv2VJs2bbRu3TpJUkxMjMaPH682bdqosLBQ//nPf9S+fXt9//33FZUKAAAAAAAAAAAAgCrGZjm8cvOmli3PXjIpOTlZ+fn5pbbZt29fsbau7nPv3r1ltiltn4GBgc7Jw2X1PXTokHM2tqv5VBSvD1z/9NNPGjdunOrXr6+RI0dq3bp1stlsuvHGG/XJJ5/ol19+0YwZM7R9+3atWrVKbdu2VXp6up555hlvpwIAAAAAAAAAAAAAl0yHDh0UGBio3Nxcbdq0qdQ2RRN+u3Tp4tI+r7vuOknS/v37deTIEbf2WdS3aHtZ/Ro0aKAGDRq4lE9F8crAdU5OjubMmaOuXbvq6quv1qxZs5Samqq6detq0qRJ+uWXX/TJJ5/opptuks1mc/br1auXvvzyS/n7+5f5iwMAAAAAAAAAAACAEiyHd25eFB4ergEDBkiSEhMTS2zfs2ePVq1aJUkaPny4S/ts2bKl2rZtW+Y+V61apb179yowMFBDhgwptq0oxoIFC5SWllaib9H+RowY4VIuFcnjgevHHntM9erV0+jRo/XNN9/Isiz16tVL//nPf3To0CFNmzZNjRo1KrP/FVdcoaioKKWnp3uaCgAAAAAAAAAAAAAYNXnyZNlsNs2dO1eJiYmyLEuSlJKSojvvvFMOh0NDhw5Vu3btivVr3LixGjdurIULF5bY55QpUyRJr7zyij755BPn/bt27dKDDz4oSXrkkUdUp06dYv2GDh2qq6++Wunp6br77rudY7KFhYV67rnntHbtWoWGhuqPf/yj956AcvL3dAf//Oc/JZ29WPfIkSP10EMPub3++fXXX69jx455mgoAAAAAAAAAAACAy8X/HxD2NZ06ddLMmTP1xBNPKD4+XtOmTVPt2rW1Y8cO5ebmqlWrVnrrrbdK9Dt48KAkKSMjo8S2W2+9VePGjdNrr72mIUOGqFmzZgoLC9OPP/6owsJCdevWTX/+859L9LPb7VqwYIG6d++uzz//XNHR0WrdurUOHTqk48ePKyAgQP/6179Uv3597z8RbvJ4xvW1116rd999V0eOHNHMmTPLddHuDz/8UKtXr/Y0FQAAqpzPPvtMw4YNU/369RUUFKSoqCh17dpVzz77rAoKCkynBwAAAABwAzUeAABe5oNLhRcZN26cli9frkGDBikzM1M7duxQTEyMJk2apM2bN6t27dpu7/Nvf/ub5s+fr549e+rEiRPavXu3rrrqKr3yyitatWqVgoODS+3XsmVLbd++XY899pjq1KmjH374QdLZZcS//fZb3XLLLR49Vm+xWZaPnorgI3KyMo3Eddj8jMSVpHMuQ37JFTjM/TkGmHzcBl+FpkIb/FUr0M/cL9vkO25eYcUcfF3hZzf3nIeHhhiL7YmCggLdf//9+te//iVJatiwoaKionTy5EkdPnxYeXl5OnPmjMLCwrwSLzsnxyv7KY8py/cZiz21V7Sx2LbckmdNXgr7HRFG4kpSk6A8Y7Etu8cLDZXboRxzn+uqBXh8nmq5mTzWR4aYec7teVlG4kpSUESksdiVTW7mGWOx821m3osCHObef/PtgcZim/wMmltg7rO3v8HHXWjovT/b4PMdZvBYm2fqCZfZ11f1atR4rjiTle2V/ZRHYIG52OlWkLHYNWTucecGVDMWO8fQF5smax27sW9UJXuemfEKSXIEmvs7M/n9uakxixphoUbiVka5p1O9sh/qat9g7hs8AABQpocfflj/+te/1KlTJyUkJKhDhw7ObVlZWVqxYoWCgswV5AAAAAAA11HjAQBQMWwVNFsaZjBwDQCAj1m9erVmz56txo0ba+XKlQoPDy+2PTQ0VEOGDDGUHQAAAADAHdR4AAAArvHaehrff/+94uLidNVVVykiIkJ+fn5l3vz9GS8HAKAsM2bMkCQ9+eSTJb7QAAAAAABULtR4AABUIB++xjXc55UR5H/84x964oknVFhYKC6ZDQBA+eXk5Oirr76SJPXr1087duxQYmKiduzYoaCgIHXo0EEPPPCAYmJiDGcKAAAAALgYajwAAADXeTxw/e2332rs2LGSpEceeUQ33XSTbrzxRkVGRmr+/Pk6evSoVqxYoQ8++EARERH6+9//rnr16nmcOAAAVdH333+v/Px8SdK6dev02GOPKS8vz7n9008/1fTp0/Xuu+/qzjvvNJUmAAAAAMAF1HgAAFQwZktXKR4vFf73v/9dlmVp7NixmjVrlgYOHChJCgwMVJ8+fXTXXXfpnXfe0caNG2Wz2TR58mTFxsZ6nDgAAFVRSkqK8/+PPvqoOnTooE2bNik3N1d79uzRbbfdptzcXI0cOVJbt241mCkAAAAA4GKo8QAAqGAsFV6leDxwvWHDBtlsNues6yLnLxnevn17zZo1S/v27dNf//pXT8MCAFAlZWRkOP8fGhqqzz//XJ06dVJgYKCaN2+uefPmqX379srPz9dLL71kMFMAAAAAwMVQ4wEAALjO46XCjx07pqCgoGLXYbHb7crJySnR9pZbblFAQIAWL16sF1980dPQAABUOcHBwc7/jxo1SjVr1iy23W63a/z48Ro5cqS++uorORwO2e0lz0NLSEhQYmKiy3FH3X+/HnzwwfInDgAAAAAowVSNd9+oURr9ADUeAOAy4GC2dFXi8cB1aGiobDZbsfvCw8N1+vRp5ebmKigoyHl/QECAQkNDdfDgQU/DAgBQJZ37JcaVV15Zapui+8+cOaOTJ0+qTp06JdqkpKRoy5YtLscdOGiQm5kCAAAAAC7GVI3X//9fzhEAAKAy8XjgOjo6Wjt37lRBQYH8/c/urlmzZtq6dau+++47devWzdn2119/VXp6ukJDQz0NCwBAldS6dWvn/wMDA0ttc+4Z+44yziisV6+eYmNjXY4bFRXlclsAAAAAgGuo8QAAqFg2rk9dpXg8cH3llVfqp59+0g8//KAOHTpIknr16qUtW7boxRdf1NKlSxUcHKy8vDw9/vjjkqS2bdt6GhYAgCopOjpaMTExOnjwoH755ZdS2+zbt0/S2S83atWqVWqb+Ph4xcfHuxw3u5RLfAAAAAAAPGOqxjuTle1+sgAAAIaVvGCKm/r37y/LsvTJJ58473v00UcVFBSklStXqkGDBuratauio6O1ZMkS2Ww2PfbYY56GBQCgyrr99tslSf/+979VUFBQYvs777wjSerZs6dztRMAAAAAgG+ixgMAoAJZDu/c4BM8Hri+9dZbNWXKFNWvX995X5MmTfTBBx8oPDxcqamp+uabb3Ty5EnZbDY9/fTTuvvuuz0NCwBAlfXHP/5R1atX1/79+/XYY48p5//PhrYsS3//+9/1ySefyGazacKECYYzBQAAAABcDDUeAAAVyLK8c4NPsFlWxf02UlNT9dlnn+nQoUOqXr26+vfvr+bNm1dUuAqRk5VpJK7D5mckriTZbMZCq8Bh7s0hwOTjNvieaCq0wV+1Av3M/bJNHv/yCs2dNeZnN/ech4eGGIvtiRUrVmjIkCHKzs5W9erV1bJlSx0+fFgpKSmy2WyaPn26/vjHP3otnsmlwqcs32cs9tRe0cZi23IzjMTd74gwEleSmgTlGYtt2c3NXDmUY+5zXbUAj89TLTeTx/rIEDPPuT0vy0hcSQqKiDQWu7LJzTxjLHa+zcx7UYDD3Ptvvr3067leCiY/g+YWmPvs7W/wcRcaeu/PNvh8hxk81uaZesJl9vVVvRo1nitMLhUeWGAudroVZCx2DZl73LkB1YzFzjH0xabJWsdu7BtVyZ5nZrxCkhyB5v7OTH5/bmrMokZYqJG4lVH+sf1e2U/AFU28sh94pkKr5sjISN1zzz3On9PT0xUbGyubzaakpKSKDA0AQKXWr18/ff/993r55Ze1YsUKbdu2TdWrV9eQIUP0xBNPqGfPnqZTBAAAAAC4iBoPAIAKwjLfVcolPd27oKBA27Ztk83klF4AACqJFi1a6N133zWdBgAAAADAC6jxAAAALszcmokAAAAAAAAAAAAAUE42ZlxXKeYuBOGmBQsWKC4uTtdcc43q16+voKAghYeHKzY2VpMnT9bJkydNpwgAAAAAcAN1HgAAAAAAKFJpZly/9NJL+v777xUUFKR69erp6quv1vHjx7V161Zt3bpViYmJ+uqrr9SuXTvTqQIAAAAAXECdBwAAAADwCDOuq5RKM+P60Ucf1ddff60zZ85o//79+u6773Tw4EFt375dv/vd73T8+HHdddddptMEAAAAALiIOg8AAAAAABSpNAPXY8aMUY8ePRQQEFDs/rZt2+rtt9+WJO3YsUM///yzifQAAAAAAG6izgMAAAAAeMRyeOcGn1Bplgq/kCuvvNL5/6ysLIOZAAAAAAC8gToPAAAAAHBRDDpXKZVmxvWFrF+/XpIUFhamVq1aGc4GAAAAAOAp6jwAAAAAAC4vbs+49vPzq4g83OZwOHT06FF99dVXeuaZZyRJf/nLXxQWFmY4MwAAAABAeVDnAQAAAADcYWPGdZXi9sC1ZVkVkYfLXnvtNY0fP77YfZ07d9acOXM0cOBAQ1kBAAAAAMqLOg8AAAAAALg9cD1lypSKyMNl0dHR6tq1qwoKCpScnKyjR49q27Ztev/993XdddepRo0aRvMDAAAAALiHOg8AAAAAUC4OZlxXJTbL9BRqD23fvl2PPfaY1q1bp/bt22vz5s0XXM48ISFBiYmJLu///lEj9eADD3gjVbc4bOaWZLfZjIVWgcPcn2OAycdt8FVoKrTBX7UC/cz9sk2+4+YVmjuA+9nNPefhoSHGYlcm2Tk5xmJPWb7PWOypvaKNxbblZhiJu98RYSSuJDUJyjMW27K7fb6m1xzKMfe5rlqA3Vhsk8f6yBAzz7k9L8tIXEkKiog0FrsiuFPnuV3jjbxPYx4Y7a1U3ZJvM/NeFOAw9/6bbw80FtvkZ9DcAnOfvf0NPu5CQ+/92Qaf7zCDx9o8U0+4zL6+qlejxnPFmaxsY7EDC8zFTreCjMWuIXOPOzegmrHYOYa+2DRZ69iNfaMq2fMyjcV2BJr7OzP5/bmpMYsaYaFG4lZGhQe/98p+/GLaeWU/8Iy5b/C85Oqrr9ayZcvUtGlTbdu2TR9++KHuvvvuMtunpKRoy5YtLu9/0MAB3kgTAACfFnjgO2Oxp/a+2ljsPINfpmf4BxiJe4W/uS8ZM+e+bCx2wEhzqwZlF5gbMKoTau7jfnpuobHYNkNnih3NN/eeEmMscsVwp85zu8Yb0N9baQIA4LNMDh7n+5s7ucDP4Akd2XZzg3p5Bkf1QgzVmCYnaIRY5mq8bH9zf2fZeeae8wCTk2IKThuKzMC1yyr3/Fycp9IPXEtSeHi4evbsqUWLFikpKemCA9f16tVTbGysy/uOioryRooAAAAAADe4WudR4wEAAADAZcxiqfCqpEoMXEtSQUFBsX/LEh8fr/j4eJf3m5NlbukNAAAAALicuVLnuVvj5Wae8TgvAAAAAADgfVVi4Do1NVVr1qyRJHXo0MFsMgAAAAAAj1HnAQAAAAAuxsaM6yrFbjoBV3z99deaNm2aDhw4UGLbli1bNGDAAKWnpys6OlojRoy49AkCAAAAANxCnQcAAAAAAM5VKWZcp6WlafLkyZo8ebKioqIUHR0tPz8/HTp0SCkpKZKk6OhoffrppwoLCzOcLQAAAADgYqjzAAAAAAAeY8Z1lVIpBq6vv/56zZw5U2vWrNFPP/2k3bt3KycnRzVr1lTv3r31+9//Xg8++KDCw8NNpwoAAAAAcAF1HgAAAAAAOFelGLiuW7euxo8fr/Hjx5tOBQAAAADgBdR5AAAAAACPMeO6SqkU17gGAOByceDAAdlsNpdu999/v+l0AQAAAAAXQI0HAEAFcxR65wafUClmXAMAcLkIDg5W165dy9yek5OjpKQkSWeXWAUAAAAA+C5qPAAAANcxcA0AgA+JiorS+vXry9w+Z84cjRo1SiEhIbr99tsvYWYAAAAAAHdR4wEAULEsB0uFVyUsFQ4AQCXy3nvvSZKGDRumiIgIs8kAAAAAADxCjQcAAFyxdetW3X777YqKilJwcLCaNm2qsWPH6rfffnN7X5Zl6b///a8mTJigbt26qVatWgoICFCdOnXUv39//fvf/5ZlWWX2v9jlT6Kiosr9OJlxDQBAJXHgwAF9/fXXkqRRo0aZTQYAAAAA4BFqPAAAvOAyuD714sWLdccddyg/P19169ZVmzZttGvXLv3973/XggULtH79ejVt2tTl/a1atUr9+vVz/ty0aVM1adJE+/fv1/Lly7V8+XLNmzdPixYtUlBQUJn7ueaaa0rdXqtWLfce4DmYcQ0AQCUxZ84cWZalRo0aqU+fPqbTAQAAAAB4gBoPAABczJEjR3TvvfcqPz9fkydP1pEjR5SUlKQjR45o4MCBSklJ0e23337BGdLnsyxLTZo00euvv65jx45p37592rx5s06ePKn3339fQUFBWrZsmZ577rkL7qdo0Pz828cff1zux8vANQAAlYBlWZozZ44k6b777pPdziEcAAAAACorajwAALzEUeidm4/661//qqysLPXo0UMvvvii/P3PLqZdvXp1ffDBB6pevbo2b96sTz/91OV9du7cWbt27dLjjz+uunXrFtt27733OgesZ8+eLcclvoY4n4gAAKgEvv76a+3fv18SS8gBAAAAQGVHjQcAgHdYhYVeufmqhQsXSpLi4uJKbKtZs6ZGjBghSZo/f77L+4yIiFBAQECZ2wcNGiRJSk1NLdc1tD3BNa4BAKgE3nvvPUlS9+7d1axZM7PJAAAAAAA8Qo0HAAAu5tChQzpy5IgkqUePHqW26d69u2bPnq2NGzd6LW52drbz/yEhIWW2mzp1qn799VcVFBQoOjpaffr00e23337B62JfDAPXAAD4uIyMDOeZdZyJDwAAAACVGzUeAABedImXsr6Udu/eLUkKDAxUgwYNSm1TdALcL7/8ovz8/AvOpHbVvHnzJEnt2rVTREREme3eeeedYj/PmTNHU6ZM0aJFixQbG1uu2AxcAwDg4xYuXKjMzEyFhoY6l365mISEBCUmJroc48EhvRV3+83lTREAAAAA4KJLUePdf+/dGjN6VDkzBADg8uPusVY6u3x3fHx8BWV0dqlu6eyS4DabrdQ2kZGRkiSHw6HTp0+rVq1aHsVMSkrSm2++KUmaMGFCqW1uvvlm3XvvvWrXrp0aNGigjIwMrVixQn/605/0yy+/qH///tq6dasaNmzodnwGrgEA8HFFS8gNHz5c4eHhLvVJSUnRli1bXI6R0uXq8qQGAAAAAHDTpajxBvXvV57UAACofBzeuT61u8faoj4VKScnR9LZGddlOXdZ7nOX+C6PY8eOadiwYSooKNAtt9yiO+64o9R2H330UbGfg4ODdccdd6hfv37q2LGjkpOT9cILL2j27Nlu58DANQAAPmz//v1au3atJPeWkKtXr55by7HUq+PZmXgAAAAAgIu7VDVe1BVXuJsaAACXNXePtUV9yjJu3Di9/vrrbufRs2dPrVmzRtLZAWFJysvLK7N9bm6u8/8Xuh71xaSnp2vQoEFKTk5Wx44dnSfauaN27dqaOHGiHn74YS1ZskRvvfVWmTPFy8LANQAAPmzOnDmyLEuNGzdWr169XO4XHx/v1jI1hTvXlSM7AAAAAIA7LlWNl3s6tRzZAQBQ+VhemnHt7rH2YsLCwsq1bHf16tWd/69Zs6YkKS0tTZZllToIXLScuN1uv+D1qC8kIyNDAwcO1NatW9WmTRt9+eWX5d7X9ddf78wrNTXV7eeAgWsAAHyUZVl6//33JUkjR450++w0AAAAAIDvoMYDAKACOBymMyjVtGnTNG3aNI/20bJlS0lnZ1wfOnRIjRo1KtFm3759kqQmTZooICDA7RhZWVm66aabtHHjRrVo0UIrVqzw6DrZ5y5rXlBQ4HZ/e7kjAwCACvX1119r//79stlsGjlypOl0AAAAAAAeoMYDAADuaNSokerXry9JWreu9BUzi+7v0qWL2/vPycnRkCFDtHbtWsXExGjlypWKiooqf8KSfvzxR/2/9u49rqoy7///e4OAoIiaKQgqoKZmaZJ2sjxlmb9Kp0nT7KDWJM1kWeOoU3NrfsvGU44dZm6FX01W92Sjfp07c6azmmFpo3geRTQpJTyGKCCyYV+/P/yxiwDdG/bm2sDr+XisxwP2Onw+Gzbrsy6udV1LOj/NeXU6wOm4BgAgQJU9R6Rfv35KSEiwmwwAAAAAoEZo4wEA4HvGVeqTJVDdfffdkqTU1NQK63Jzc7V8+XJJ0siRI706rtPp1N13363PPvtMsbGxWrNmjdq1a1ejXEtKSrRgwQJJ0qBBg9SokfcTf9NxDQBAgFqyZImMMVq3bp3tVAAAAAAANUQbDwAAeGvKlCkKDw/X+vXrNWPGDJWWnu9kz8vL05gxY5SXl6devXrpzjvvrLDvjTfeqPj4eL300kvlXi8tLdWYMWP0r3/9S9HR0VqzZo0SExM9yuf3v/+93nzzTZ05c6bc64cOHdKIESO0ceNGNWrUSDNmzKjW++UZ1wAAAAAAAAAAAADqngAeLe0L7dq101tvvaV7771Xzz//vFJSUtSuXTvt3btXBQUFatOmjZYtWyaHw1Fh38OHD+vbb7/VqVOnyr2+bNkyrVixQtL5Kb0feuihKuO/+uqr6tWrl/v7vXv3au7cuXr44YeVmJioli1bKi8vTxkZGTLGqHHjxnrttdd07bXXVuv90nENAAAAAAAAAAAAAAFoxIgRSkxM1OzZs7V+/Xrt3LlTbdu21fjx4zV9+nS1bt3aq+OdO3fO/XVWVpaysrKq3DYvL6/c97/+9a8VHR2tzZs3Kzs7W1lZWQoLC1P37t01ePBgTZw4UR07dvQqn5+i4xoAAAAAAAAAAABA3eNy2c6gViQlJbmfZ+2pqjqkx40bp3HjxlUrjyFDhmjIkCHV2tcTdFwDAAAAAAAAAAAAqHNMaf2eKryhCbKdAAAAAAAAAAAAAACgYWPENQAAAAAAAAAAAIC6x8WI6/qEEdcAAAAAAAAAAAAAAKsYcQ0AAAAAAAAAAACg7mHEdb3CiGsAAAAAAAAAAAAAgFWMuAYAAAAAAAAAAABQ5xiXy3YK8CE6rgEAAAAAAAAAAADUPUwVXq/QcX0RRS6HlbgOh7ESV5KMsRfbZS+0FGzndy1JpRbfuNPSzUihFn/eQc4ia7GdwWHWYttUXGrzjxue2NT4cmuxo4vsPbkkLqTAWuzG585YiesKirISV5IOjfiDtdiJh9Otxe5uLbJU2qidtdgljVtZi73229NW4t4cbe/6RmpqMXbdkldir+5Ehtn5jOQ57TX9i4rt/ROpSYi937XN9s7ZEnvX3rZ+5C2NvWu6c4q0FttmK6uxKbYYPdxi7LrjaEmotdgxZ49Yix3SpKW12E6HvZ95s2B79dYVFGIlbljBcStxJelosL3PWWiwvZGlzUKDrcUOdhZai10a3txabKAhouMaAAAAAAAAAAAAQN3DiOt6xd7txwAAAAAAAAAAAAAAiBHXAAAAAAAAAAAAAOog47I3hT58jxHXAAAAAAAAAAAAAACrGHENAAAAAAAAAAAAoO7hGdf1Ch3XAAAAAAAAAAAAAOoeOq7rFaYKBwAAAAAAAAAAAABYxYhrAAAAAAAAAAAAAHWOKWXEdX3CiGsAAALQyZMn9cwzz6hHjx5q2rSpQkNDFRcXp3vuuUdpaWm20wMAAAAAeIE2HgAAwMXRcQ0AQIDJzMzUlVdeqdmzZ2v37t1q06aNunfvrtOnT2v58uXq16+fFi5caDtNAAAAAIAHaOMBAOBHLpdvFgQEOq4BAAgwjz76qHJyctS5c2ft3LlTBw4c0NatW3Xs2DFNnjxZxhhNnTpVmZmZtlMFAAAAAFwEbTwAAADP0HENAEAAOXPmjNauXStJmj9/vi6//HL3usaNG2v+/Pnq1KmTSkpK9NFHH9lKEwAAAADgAdp4AAD4mavUNwsCAh3XAAAEkHPnzskYI0nq2LFjhfUOh8P9utPprNXcAAAAAADeoY0HAIB/GVepTxYEBjquAQAIIK1atVJcXJwk6csvv6ywvqCgQNu2bZMkXXPNNbWZGgAAAADAS7TxAAAAPEfHNQAAAWbOnDlyOByaMmWKXnvtNR05ckSFhYX6+uuvNWzYMB09elT333+/+vbtaztVAAAAAMBF0MYDAMB/jMvlkwWBoZHtBAAAQHn33XefoqKiNGvWLD3yyCPl1sXExGjRokVKTk62lB0AAAAAwBu08QAAADzDiGsAAALQ/v37dezYMQUFBSk+Pl49evRQRESEcnJytGTJEu3evdt2igAAAAAAD9HGAwDAP0ypyycLAgMjrgEACDCPPfaY/vu//1t9+vTRhx9+qMsuu0ySdPbsWT377LOaP3+++vbtqx07dqhDhw6VHiMlJUWpqakex7z57jH6xZhxvkgfAAAAAPATNtp4I+4bq/vGPeST/AEAAGoLHdcAAASQHTt2aNGiRQoJCdHy5cvL/dMiPDxc8+bNU3p6uj777DPNnj1bixcvrvQ4OTk5Sk9P9zjuVTfdXOPcAQAAAADl2Wrj9Rs8pMa5AwBQFzBaun6h4xoAgACSlpYmY4w6d+5c5Z32t956qz777DNt3ry5yuPExMQoKSnJ47iXtG7jda4AAAAAgAuz1cZr3Sba61wBAKiLjIuO6/qEjmsAAALImTNnPN62qKioynXJyclKTk72+FhfZp30eFsAAAAAgGdstfG++yHf420BAAACRZDtBAAAwI/KnnWWmZmpb7/9ttJtPv74Y0lSly5dai0vAAAAAID3aOMBAOBfptTlkwWBgY5rAAACyK233qrWrVvL6XRq5MiR2rdvn3vd2bNnNXXqVH322WeSpAcffNBWmgAAAAAAD9DGAwAA8BxThQMAEECaNGmiv/3tb/rFL36hf//73+rWrZs6dOigyMhI7d+/X4WFhZKkxx57TMOHD7ecLQAAAADgQmjjAQDgX4yWrl8YcQ0AQIAZPHiwduzYoYkTJ+qyyy7TkSNHtGfPHkVFRWn48OFavXq1/vznP9tOEwAAAADgAdp4AAAAnmHENQAAASgxMVGvvvqq7TQAAAAAAD5AGw8AAP9wlZbaTgE+VKdHXP/rX/+Sw+GQw+FQfHy87XQAAAAAADVAGw8AAAAA4A3jcvlkQWCosx3X+fn5+vWvf207DQAAAACAD9DGAwAAAACgYauzHdfPPPOMvvvuOw0fPtx2KgAAAACAGqKNBwAAAADwlil1+WRBYKiTHdcbN27UX/7yFw0fPly/+MUvbKcDAAAAAKgB2ngAAAAAAFRt69atGjVqlKKjo9W4cWMlJiZq0qRJOn78eLWON3PmTPejuqpaFi9eXOX+TqdT8+fPV8+ePdWkSRO1aNFCAwcO1MqVK6v7FiVJjWq0twVOp1OPPPKIIiIi9Oc//1mffvqp7ZQAAAAAANVEGw8AAAAAUF0NYbT0ypUrNXr0aDmdTrVu3Vrdu3dXRkaGXnnlFS1fvlxpaWlKTEys1rFbt26tzp07V7ouJiam0teLiop0yy23KC0tTcHBwerevbsKCgq0bt06rVu3TtOmTdOcOXOqlU+d67iePXu2du3apYULFyouLs52OgAAAACAGqCNBwAAAABA5bKzs/XAAw/I6XRq+vTpmjFjhho1aqS8vDyNHj1aH374oUaNGqWvv/5aDofD6+MPHTpUS5Ys8WqfadOmKS0tTQkJCfrggw/UpUsXSdKqVat0zz33aO7cuerbt6/uvPNOr/OpU1OF79mzR3/84x+VlJSkxx9/3HY6AAAAAIAaoI0HAAAAAKgJ43L5ZAlU8+fPV2Fhofr166fnnntOjRqdH5McFRWld955R1FRUdq8ebNWr15dK/kcPXrUPYX466+/7u60lqRhw4Zp6tSpks5PRV4ddabj2hijRx55RE6nUykpKQoODradEgAAAACgmmjjAQAAAABqylXq8skSqFasWCFJmjBhQoV1LVq00MiRIyVJy5Ytq5V8Vq1apeLiYnXu3FkDBw6ssD45OVmSlJ6ergMHDnh9/DrTcb1o0SJt2LBBEydOVO/evW2nAwAAAACoAdp4AAAAAABU7dChQ8rOzpYk9evXr9JtbrrpJknSxo0bqxVj+/btGjNmjAYNGqThw4dr+vTp2r17d5Xbl8Upi/tzsbGxSkhIqHZOdeIZ19nZ2Xr66acVGxurWbNm2U4HAAAAAFADtPEAAAAAAL5gAni0dE3t27dPkhQaGqq4uLhKt+nYsaMk6ZtvvpHT6VRISIhXMbZt26Zt27a5v1+1apVeeOEFTZo0SS+++GKF2dHKciqLW1VOBw8eVEZGhle5SHWk4/rxxx/X6dOn9cYbbygyMrJGx0pJSVFqaqrH298/dpzGP/RwjWICAAAAAH5ks4137wNj9eB42ngAAAAAgB9527aUzk/fXTY1tj/88MMPks5PCe5wOCrdpmXLlpIkl8ul06dP65JLLvHo2G3bttVzzz2nIUOGKDExUZGRkdq3b5/++7//W4sXL9ZLL72kkJAQzZs3r9KcyuJeKKfc3FyPcvmpOtFxnZ6eLkn6zW9+o9/85jfl1p09e1bS+eHy0dHRkqSVK1fqhhtuqPRYOTk57uN54pYht1UnZQAA6pSrW9m7JDgre880zXNFWIvdNKqplbghJ7x/toyvNIuMtxb7XPurrcXOPuO0FrtlY3t/XxEh9p5KNKTRQStxnUGXWYlbF9ls4w26ZUh1UgYAoE6JCrN3HegKa2UtttNhr227MTvfWuwbY+21bYOLTluJu7ckykpcSTp66qy12N1a2ftdNyo6ZS22K6xmN7vWRKHTzmje8MZWwtZJvhpx7W3bsmwffyoqKpJ0fsR1VcLCwtxfl7WnPVHZM7OvvPJKLVq0SAkJCZo2bZoWLlyo3/zmN4qPj69WTt7kU6ZOdFyXOXr0aJXrXC6Xe31xcXGV28XExCgpKcnjmG3+/3+UAAAAAAB8izYeAAAAACAQeNu2LNunKk8++aRefvllr/Po37+/1q1bJ0lq3Pj8HQwXahOfO3fO/XV4eLjX8SozefJkvfzyy/r++++1atUqPfHEE+513uRUnXzqRMd1VlZWleuWLFmi8ePHq0OHDhfcrkxycrJXw/ZP5Rd6vC0AAAAA4OJstvGO5RV4vC0AAAAAILAZl29GXHvbtryYpk2bejxt909FRf04o0SLFi0knZ9y2xhT6XThZVN3BwUFqVmzZtXMtrzg4GBde+21+sc//qHMzMxy68pyKotbmZ9Oce6tOtFxDQAAAAAAAAAAAAA/5aupwn1t1qxZmjVrVo2Ocdll5x9JVlxcrEOHDql9+/YVtjlw4PxjARMSEhQSElKjeD9VNhV4SUlJhZw2bNig/fv3V7lvWU5l+XvD3oPnAAAAAAAAAAAAAAAVtG/fXm3btpUkffHFF5VuU/b69ddf79PYu3btkiTFxcWVe/26666TJKWlpVW6X3Z2tg4ePFhuW2/QcQ0AAAAAAAAAAACgzjGlLp8sgeruu++WJKWmplZYl5ubq+XLl0uSRo4c6bOY//znP7V7925J0q233lpu3fDhwxUSEqLMzEytXbu2wr4pKSmSpF69eqlTp05ex67zHdfjxo2TMcajZ58BAAAAAAIbbTwAAAAAAM6bMmWKwsPDtX79es2YMUOlpaWSpLy8PI0ZM0Z5eXnq1auX7rzzzgr73njjjYqPj9dLL71U7vXdu3crOTlZ27dvL/e6y+XS0qVLNWbMGEnSHXfcoT59+pTbpk2bNu5ngT/88MPKyMhwr3v//fc1b948SdKzzz5brffLM64BAAAAAAAAAAAA1DkuV+COlvaFdu3a6a233tK9996r559/XikpKWrXrp327t2rgoICtWnTRsuWLZPD4aiw7+HDh/Xtt9/q1KlT5V53Op1KTU1VamqqWrZsqQ4dOqhRo0bav3+/cnNzJUk33XST3n777UpzmjdvnrZs2aKvvvpK3bt31xVXXKH8/Hz3s60nT56s4cOHV+v91vkR1wAA1EcFBQWaM2eOkpKSFBkZqaZNm+qqq67SvHnzVFxcbDs9AAAAAIAXaOMBAIDqGjFihDZt2qQRI0ZIknbu3KlLL71UEydO1I4dO7yekjs+Pl6zZs3S7bffrubNm2v//v3atm2bQkNDNXToUL399ttau3atmjdvXun+4eHhWrdunebMmaPLL79c+/bt04kTJ9S/f3+tWLFCL774YrXfq8MYY6q9dwNwKr/QStzK7oyoLTY/Ei6Ln8aQYHs/81KLb9xp6WakUIs/78auc9ZiO4PDrMUusfg5K7X4t31JZIS94NV07Ngx3Xzzzdq1a5eCgoJ0+eWXKzg4WLt27VJpaal69+6tNWvWKDIy0mcxz+Xn+exY3jqrEGuxbZ0DJalpqJ37B0NOHLASV5J+iIy3FjsyLNha7OwzTmuxWza2977DQ+zdIxt2eJuVuM7Wl1mJK0lhzVpai13XHMsrsBbb1rnozLlSK3ElqcjihWATi+chm+2dohJ7P3NbP/KIknw7gSWdC/XdNbm3ii3+fTV12Lu+CWsaZS12ddlo4+UVnPXZsbzVWCXWYjsd9iYY3Zht71x0Y6y9/30EFdu5tso4a+9/bEfz7d1s0q2Vvd/1pTpjLbYrzF69LSi1c11XF/+nacvB3z3gk+MkvFj56GLULkZcAwAQYB588EHt2rVLXbt2VUZGhnbu3Klt27bpwIED6tmzpzZv3qzHHnvMdpoAAAAAAA/QxgMAwH9MaalPFgQGOq4BAAggO3fu1EcffSRJev3118tN89KhQwe9+eabCgoK0v/8z/9o7969ttIEAAAAAHiANh4AAIDn6LgGACCApKWlSZJiY2N1ww03VFjfs2dPde3aVcYY/f3vf6/t9AAAAAAAXqCNBwCAfxmXyycLAgMd1wAABJAffvhB0vl/alQlLi5OkvTll1/WSk4AAAAAgOqhjQcAAOC5RrYTAAAAP2revLkkKTs7u8ptDh8+LElMIwcAAAAAAY42HgAA/mVKGS1dnzDiGgCAANKnTx9J5/+psXHjxgrrd+7cqYyMDElSbm5ureYGAAAAAPAObTwAAADP0XENAEAAueaaa9z/2Bg3bpx27drlXpeZman77rtPpaWlkqTCwkIrOQIAAAAAPEMbDwAA/zKlLp8sCAx0XAMAEGD+9re/qW3btsrIyFCPHj3UqVMndenSRV27dlVmZqbuv/9+SVJkZKTlTAEAAAAAF0MbDwAA/3GVunyyIDDwjGsAAAJM586dtXXrVs2dO1erVq3SoUOH1KRJE9111136P//n/2j16tWSpOjo6CqPkZKSotTUVI9jjh/7gB55aHyNcwcAAAAAlGejjffA2HEa//Cvapw7AABAbaLjGgCAANS6dWstWLBACxYsqLBu7ty5kn58VlplcnJylJ6e7nG8oUNu8T5JAAAAAIBHaruNd8uQ27xPEgCAOsi4GC1dn9BxDQBAHeJ0OvXhhx9KkoYPH17ldjExMUpKSvL4uBe6sx8AAAAA4B+08QAAAH5ExzUAAHXIggULdPz4cSUmJmrYsGFVbpecnKzk5GSPj3suP88X6QEAAAAAvOCvNl5ewVlfpAcAQMAzPJ+6XgmynQAAACgvLS1NH330kUpLS92vnT17VrNnz9Yf/vAHBQcH67XXXlNISIjFLAEAAAAAnqCNBwAA4BlGXAMAEGA2b96sp556ShEREUpISFBoaKgyMjJUWFioiIgILVmyRAMHDrSdJgAAAADAA7TxAADwH1NqbKcAH6LjGgCAADNgwACNHz9eX375pb777juVlJSoXbt2Gjp0qJ566il16NDBdooAAAAAAA/RxgMAwH9cTBVer9BxDQBAgLnqqqv017/+1XYaAAAAAAAfoI0HAADgGTquAQAAAAAAAAAAANQ5xsVU4fVJkO0EAAAAAAAAAAAAAAANGyOuAQAAAAAAAAAAANQ5rlJGXNcnjLgGAAAAAAAAAAAAAFjFiGsAAAAAAAAAAAAAdY4pddlOAT5ExzUAAAAAAAAAAACAOscwVXi9wlThAAAAAAAAAAAAAACrGHENAAAAAAAAAAAAoM5xMeK6XmHENQAAAAAAAAAAAADAKkZcAwAAAAAAAAAAAKhzTKnLdgrwIUZcAwAAAAAAAAAAAACsYsT1RTgcDitxS1z25uQPtvOWJUlBFmMXW3wOgsW3rRBLt6/Y/IznukKsxW5u8axbYi806oAj54KtxW7rzLEW2xkVay329qOFVuLGNO1gJa4klTrt3QF7pMDeWbBbo1xrsR35Tmuxs0NjrMWOadneStzgM0etxJUkNWtpL3Yd08zepaCOFdo5F0WF2avzDoe96/5Qi41bm208m+1qW/9DyQ9uaiWuJFk8pchpsV1dHBJqLXaYtch1S5jF4UpBecesxd51roW12P2OrbUW+5tmt1mLHRocbiVu5yh7H/Imtv6hKmnPCTv/S5CkqNgoa7FDjL02fX6xneubS6xErZtcFq+J4Ht0XAMAAAAAAAAAAACoc4zFQYnwPaYKBwAAAAAAAAAAAABYxYhrAAAAAAAAAAAAAHWOq9Teo+nge4y4BgAAAAAAAAAAAABYxYhrAAAAAAAAAAAAAHUOz7iuXxhxDQAAAAAAAAAAAACwihHXAAAAAAAAAAAAAOocRlzXL4y4BgDAD44cOaK3335bTzzxhK6//nqFh4fL4XBowIABF93X6XRq/vz56tmzp5o0aaIWLVpo4MCBWrlypf8TBwAAAABUQBsPAIDA5Cp1+WRBYGDENQAAfvDuu+/qqaee8nq/oqIi3XLLLUpLS1NwcLC6d++ugoICrVu3TuvWrdO0adM0Z84cP2QMAAAAAKgKbTwAAAD/Y8Q1AAB+0KxZMw0ePFhPP/20Vq5cqenTp3u037Rp05SWlqaEhATt3r1b27dv1/79+/Xee+8pLCxMc+fO1fvvv+/n7AEAAAAAP0UbDwCAwGRcxicLAgMd1wAA+MFDDz2kTz75RH/84x911113qXXr1hfd5+jRo1q8eLEk6fXXX1eXLl3c64YNG6apU6dKkmbOnOmXnAEAAAAAlaONBwAAbNq6datGjRql6OhoNW7cWImJiZo0aZKOHz/u9bGysrLkcDg8WsaPH19h//j4+IvuV1RUVK33yVThAAAEiFWrVqm4uFidO3fWwIEDK6xPTk7W888/r/T0dB04cEAdO3a0kCUAAAAAwBO08QAA8D9Xaf0fLb1y5UqNHj1aTqdTrVu3Vvfu3ZWRkaFXXnlFy5cvV1pamhITEz0+XuPGjdW3b98q1xcVFWnLli2SpBtuuKHK7a644gpFRUVVui4oqHpjp+m4BgAgQGzcuFGSdNNNN1W6PjY2VgkJCTp48KA2btzIPzUAAAAAIIDRxgMAADWVnZ2tBx54QE6nU9OnT9eMGTPUqFEj5eXlafTo0frwww81atQoff3113I4HB4dMzo6WmlpaVWuf/PNNzVu3DiFh4dr1KhRVW736quvasCAAd6+pQtiqnAAAALEvn37JOmC/6woW5eRkVErOQEAAAAAqoc2HgAA/mdKXT5ZAtX8+fNVWFiofv366bnnnlOjRufHJEdFRemdd95RVFSUNm/erNWrV/ss5pIlSyRJv/zlL9WsWTOfHdcTdFwDABAgfvjhB0lSy5Ytq9ymbF1ubm6t5AQAAAAAqB7aeAAA+J8pNT5ZAtWKFSskSRMmTKiwrkWLFho5cqQkadmyZT6Jl5WVpc8//1ySNG7cOJ8c0xt0XAMAECCKiookSaGhoVVuExYWJkk6e/ZsreQEAAAAAKge2ngAAKAmDh06pOzsbElSv379Kt2m7JEkZY8oqak333xTxhi1b99egwYNuuC2ixcv1h133KGbb75Z9913nxYvXqwzZ87UKD7PuAYAIEA0btxYklRcXFzlNufOnZMkhYeH10pOAAAAAIDqoY0HAID/uQJ4tHRNlT12JDQ0VHFxcZVuU/bYkW+++UZOp1MhISHVjmeM0ZtvvilJevDBBxUUdOHxz3//+9/Lff/OO+9o+vTpeuedd3TLLbdUKwc6rgEACBAtWrSQ9ON0cpUpW1e2bVVSUlKUmprqcey7x4zVfeMe8nh7AAAAAMCF2WzjjR83Tr/61a883h4AgIbO21ornZ++Ozk52U8Zlb9OcDgclW5T9tgRl8ul06dP65JLLql2vM8//1wHDx6UdOFpwgcMGKCbb75Zffr0Ufv27VVcXKy0tDTNmDFDW7du1bBhw7RhwwYlJSV5nQMd1wAABIjLLrtMGzZs0P79+6vc5sCBA+5tLyQnJ0fp6ekex77p5iEebwsAAAAAuDibbbyht93m8bYAANRlxuXyyXG8rbVl+/iTN48dkWr+6JElS5ZIOj/9eNlI7gttVyYiIsI9ZfiNN96o9PR0TZ06VZ9++qnXOdBxDQBAgLjuuuv0xhtvKC0trdL12dnZ7jverrvuugseKyYmxqs72lq3ifY8UQAAAADARdls40VH08YDAMAb3tbasn2q8uSTT+rll1/2Oo/+/ftr3bp1krx77IhUs0eP5Ofna8WKFZIuPNr6QsLDw/XCCy9o6NChWrt2rXJzcy86q8zP0XENAECAGD58uCZOnKjMzEytXbtWAwcOLLc+JSVFktSrVy916tTpgsdKTk72apqab0/me58wAAAAAKBKNtt4RTUccQUAQF3hq2dce1trL6Zp06bVmrY7KirK/XVZp29ubq6MMZVOF142nXhQUJCaNWtWzWylFStWqKCgQBERERo5cmS1j3PDDTdIOj91+TfffKOrr77aq/0v/FRtAABQa9q0aeO+OHr44YeVkZHhXvf+++9r3rx5kqRnn33WSn4AAAAAAM/RxgMAwP9MqfHJ4muzZs3SiRMnvF7ee+899zHKHiVSXFysQ4cOVRqn7LEjCQkJCgkJqXa+ZdN/jxgxQpGRkdU+zk+nNS8pKfF6f0ZcAwDgB4cOHVKvXr3c35c9j2TDhg1q1aqV+/WpU6dq6tSp7u/nzZunLVu26KuvvlL37t11xRVXKD8/330BMnnyZA0fPryW3gUAAAAAQKKNBwAAal/79u3Vtm1bff/99/riiy903333Vdjmiy++kCRdf/311Y5z8OBBrV+/XlL1pwkvs2vXLvfXcXFxXu/PiGsAAPygtLRUJ0+edC8FBQWSzt9l9tPXCwsLy+0XHh6udevWac6cObr88su1b98+nThxQv3799eKFSv04osv2ng7AAAAANCg0cYDACAwmVKXT5ZAdffdd0uSUlNTK6zLzc3V8uXLJalG03u/+eabMsYoPj5eAwYMqPZxJGnu3LmSpMsvv1yxsbFe78+IawAA/CA+Pl7GVG+KmdDQUE2bNk3Tpk3zcVYAAAAAgOqgjQcAAGyYMmWKXnvtNa1fv14zZszQs88+q+DgYOXl5WnMmDHKy8tTr169dOedd1bY98Ybb9Thw4f15JNP6sknn6z0+MYYvfXWW5KksWPHVvoc7Z968cUXFRYWpjFjxpR7hvfJkyf1zDPPaMWKFZKk5557rlrvl45rAAAAAAAAAAAAAHWOyw/Ppw4k7dq101tvvaV7771Xzz//vFJSUtSuXTvt3btXBQUFatOmjZYtW1Zph/Phw4f17bff6tSpU1Ue//PPP9fBgwflcDg0duzYi+Zz+PBhvfzyy5o0aZLi4+N16aWX6uzZs9qzZ49KSkoUFBSk2bNnu0eKe6vOTBU+c+ZMORyOCy6LFy+2nSYAAAAAwEO08wAAAAAAuLARI0Zo06ZNGjFihCRp586duvTSSzVx4kTt2LFDnTp1qvaxlyxZIknq16+fEhISLrr96NGjNWnSJF177bU6d+6ctm/frgMHDigxMVGPPPKItmzZoqlTp1Y7nzo34rp169bq3LlzpetiYmJqORsAAAAAQE3RzgMAAAAAVIep5yOuyyQlJbmfZ+2prKysi26zZMkSd+e1J6677jpdd911XuXhjTrXcT106FCvfoAAAAAAgMBGOw8AAAAAANS5jmsAAAAAAAAAAAAAcJmGMeK6oaDjGgAAAAAAAAAAAECdU0rHdb1S5zqut2/frjFjxujIkSOKjIxUjx49NHr0aHXv3t12agAAAACAaqCdBwAAAAAA6lzH9bZt27Rt2zb396tWrdILL7ygSZMm6cUXX1RwcLC95AAAAAAAXqOdBwAAAACojlIGXNcrQbYT8FTbtm313HPPadOmTTp+/LiKioq0Y8cOPfroozLG6KWXXtLTTz9tO00AAAAAgIdo5wEAAAAAgDIOY+r+5O/z5s3TtGnT1KhRI2VmZio+Pt5nx84rOOuzY3mjxGXv1xLssBbaKpt35dj8kQdZCm7z5+20+PfVPNTe/UJFFn/oNn/fl0RG2Ateh3x7Mt9a7LbOo9ZiO6NircXecazQStyYpqFW4kp2nzmUX+yyFrtbo1xrsR2lTmuxs0NjrMWOcZy2EjfobJ6VuJLUKLabtdj+4q92XlFhgU+OUx3HiuycB6PC7I1YL7Z4Idi4kb2Wls023jmLP/NGlhqYpRbbeCEW/4lS4LR3fdMkxF7bNjIi3FrsuqTorJ3/aUpScN731mJvOdfCWuykw59ai/1Np9usxQ61dB5sG2HvPPR9ob3z7ze5RdZiXxfb1FrsEFNiLfaRIjuf8Q6X2Pt51zV/b325T44z6th/fHIc1EydGXF9IZMnT1bbtm1VUlKiVatW2U4HAAAAAFBDtPMAAAAAAGhY6twzrisTHBysa6+9Vv/4xz+UmZl5wW1TUlKUmprq8bEfGDtO4x/+VU1TBAAAAAB4wdN2nrdtvPHjxupXDz/sixQBAAAAAJbxjOv6pV50XEtSaOj5aS9LSi48ZUROTo7S09M9Pu4tQ+xNswIAQG2JsDjV36kQe9MJB5XYm97LltjiHGuxS6LaWovtahJiLfaB0y2txb75t+9ai33g9fusxS50NbcSN/LIPitxJUn1cKpwybN2nrdtvKG3DalxXgAABLp3dh+3FvvOy+y18f5nw35rsXtf291a7I7frbMW+7XSHlbiJrfMthJXko4/9l/WYv8/JxKsxV78/860FnvYly9bi72u75NW4o5lqnCP2Xw0HXyv3nRc79q1S5IUFxd3we1iYmKUlJTk8XGjo6NrlBcAAAAAoHo8aefRxgMAAAAAoH6oFx3X//znP7V7925J0q233nrBbZOTk5WcnOzxsfMKztYoNwAAAACA9zxt53nbxisqLKhxbgAAAACAwMBU4fWLvXlBvbB7924lJydr+/bt5V53uVxaunSpxowZI0m644471KdPHxspAgAAAAC8QDsPAAAAAAD8VJ0Yce10OpWamqrU1FS1bNlSHTp0UKNGjbR//37l5uZKkm666Sa9/fbbljMFAAAAAHiCdh4AAAAAoKZ4xnX9Uic6ruPj4zVr1ix99dVX2rNnj/bv36+ioiK1bNlSQ4cO1ZgxY3TvvfcqODjYdqoAAAAAAA/QzgMAAAAAAD9VJzqumzdvrj/84Q+20wAAAAAA+AjtPAAAAABATfGM6/qlTnRcAwAAAAAAAAAAAMBP0XFdvwTZTgAAgProyJEjevvtt/XEE0/o+uuvV3h4uBwOhwYMGHDB/f79739r4cKFGjNmjDp37iyHwyGHw6ElS5bUSt4AAAAAgIpo4wEAAPgfI64BAPCDd999V0899ZTX+z3yyCPavn27HzICAAAAAFQXbTwAAAJTqWHIdX1CxzUAAH7QrFkzDR48WH369FGfPn20detWPf/88xfdLzExUd26dXPvN3HiRO3YsaMWMgYAAAAAVIU2HgAAgP/RcQ0AgB889NBDeuihh9zfZ2dne7TfypUry30fEhLi07wAAAAAAN6jjQcAQGDiGdf1C8+4BgAAAAAAAAAAAABYxYhrAAAAAAAAAAAAAHUOz7iuX+i4BgAAAAAAAAAAAFDnMFV4/cJU4QAAAAAAAAAAAAAAqxhxDQAAAAAAAAAAAKDOYarw+oWOawAA6qGUlBSlpqZ6vP29D4zT2Ice9mNGAAAAAIDq8raN133ICA345X1+zAgAAMD36LgGAKAeysnJUXp6usfbD7r1Nj9mAwAAAACoCW/beHFX9/NjNgAABA6ecV2/0HENAEA9FBMTo6SkJI+3b9Mm2o/ZAAAAAABqwts2XlSr1n7MBgAAwD/ouAYAoB5KTk5WcnKyx9sfP13ox2wAAAAAADXhbRvvr5u/82M2AAAEDp5xXb/QcQ0AAAAAAAAAAACgznHZTgA+FWQ7AQAAAAAAAAAAAABAw0bHNQAAfnDo0CG1atXKvfz+97+XJG3YsKHc6/PmzSu337x588qt3759uyTp8ccfL/f6oUOHav09AQAAAEBDRRsPAIDAVGqMTxYEBqYKBwDAD0pLS3Xy5MkKr5eUlJR7vbCw/LOlCwsLK90vPz9f+fn55Y4PAAAAAKgdtPEAAAD8j45rAAD8ID4+XqYad+rNnDlTM2fO9H1CAAAAAIBqo40HAEBgKmWwdL3CVOEAAAAAAAAAAAAAAKsYcQ0AAAAAAAAAAACgzuH51PULHdcAAAAAAAAAAAAA6hymCq9fmCocAAAAAAAAAAAAAALMqVOntGzZMk2ZMkUDBgxQZGSkHA6H4uPjfXL8rVu3atSoUYqOjlbjxo2VmJioSZMm6fjx4xfcz+l0av78+erZs6eaNGmiFi1aaODAgVq5cmWN8mHENQAAAAAAAAAAAIA6p75PFb5u3TqNGjXKL8deuXKlRo8eLafTqdatW6t79+7KyMjQK6+8ouXLlystLU2JiYkV9isqKtItt9yitLQ0BQcHq3v37iooKNC6deu0bt06TZs2TXPmzKlWToy4BgAAAAAAAAAAAIAAEx4ern79+mny5MlaunSp/vKXv/jkuNnZ2XrggQfkdDo1ffp0ZWdna8uWLcrOztZtt92mnJwcjRo1SqaSGwOmTZumtLQ0JSQkaPfu3dq+fbv279+v9957T2FhYZo7d67ef//9auVFxzUAAAAAAAAAAACAOqfU+GYJVEOGDNHnn3+uF198UaNHj1b79u19ctz58+ersLBQ/fr103PPPadGjc5P0h0VFaV33nlHUVFR2rx5s1avXl1uv6NHj2rx4sWSpNdff11dunRxrxs2bJimTp0qSZo5c2a18qLjGgAAAAAAAAAAAAAaiBUrVkiSJkyYUGFdixYtNHLkSEnSsmXLyq1btWqViouL1blzZw0cOLDCvsnJyZKk9PR0HThwwOu86LgGAAAAAAAAAAAAUOeUGuOTpSE5dOiQsrOzJUn9+vWrdJubbrpJkrRx48Zyr5d9X7b+52JjY5WQkFDpvp6g4xoAAAAAAAAAAABAnVPfpwr3h3379kmSQkNDFRcXV+k2HTt2lCR98803cjqdFfYtW3+hfTMyMrzOrZHXewAAAAAAAAAAAABAPZGSkqLU1FSv9pkwYYJ7auy65IcffpB0fkpwh8NR6TYtW7aUJLlcLp0+fVqXXHJJuX3L1l9o39zcXK9zo+P6IqKahFdrv5SUFOXk5CgmJqbWP7QNMXZDfM/EJra3GluKWxM2Yzc0lzaLqPa+nPurF/uSyOr9zGseu+qLSn/HrsmFZ13+fXcNr94Z2BfvOXtpxecU1Vbs6vJFbGs17/IB1YxMzatNjSOaVGs/X/yO2lez3Nb1v8m6Fruuv+emFmNXF7GrFzvSYuy6Frcheqh3+2rvW5f/Lv58dw8rcWvCJ7HjuluL/etq7eWL2PHVjFzz2Nd+stZa7GJLcWvCJ7F7L7AWe2y19qLm1abFJssnx5k5c6bS09O92icnJ8cnsWtbUVGRpPMjrqsSFhbm/vrs2bPV2ven+3nMwC+SkpKMJJOUlETsehyX2MRuCLEb4nuGd/hsEpvY9SsusRtebHimoX4+GmLshvieiU3s+h4X3uGzWbuITez6HJfY1Ly6ZPHixSYpKcmrZfHixVUeb9KkSUaS10v//v0vmOf7779vJJkOHTpU+70uW7bMSDJt2rSpcpv//Oc/7pxOnDjhfv3yyy83ksyiRYuq3Peee+4xkszEiRO9zo0R1wAAAAAAAAAAAAAarOTkZJ+Ojm/atKl7em1vREVF+SyHqrRo0ULS+am8jTGVThdeNiV4UFCQmjVrVmHfsvWV+elU5N6i4xoAAAAAAAAAAAAAfGTWrFmaNWuW7TQqddlll0mSiouLdejQIbVvX/ERIwcOHJAkJSQkKCQkpNy+GzZs0P79+6s8ftm+ZXG8EeT1HgAAAAAAAAAAAACAOqd9+/Zq27atJOmLL76odJuy16+//vpyr1933XWSpLS0tEr3y87O1sGDB8tt6w06rgEAAAAAAAAAAACggbj77rslSampqRXW5ebmavny5ZKkkSNHlls3fPhwhYSEKDMzU2vXrq2wb0pKiiSpV69e6tSpk9d50XENAAAAAAAAAAAAAPXI6NGjFR8fr9/97ncV1k2ZMkXh4eFav369ZsyYodLSUklSXl6exowZo7y8PPXq1Ut33nlnuf3atGnjfhb4ww8/rIyMDPe6999/X/PmzZMkPfvss9XKmWdcAwAAAAAAAAAAAEAAatWqlftrp9MpSTp06FC51++99169+uqr5fY7cuSIvv32W504caLCMdu1a6e33npL9957r55//nmlpKSoXbt22rt3rwoKCtSmTRstW7ZMDoejwr7z5s3Tli1b9NVXX6l79+664oorlJ+f73629eTJkzV8+PBqvVdGXAMAAAAAAAAAAABAADp58qR7OX36tCTJ5XKVe/3MmTNeH3fEiBHatGmTRowYIUnauXOnLr30Uk2cOFE7duyocqrv8PBwrVu3TnPmzNHll1+uffv26cSJE+rfv79WrFihF198sdrvlRHXAAAAAAAAAAAAABCAjDHV2m/dunUX3SYpKcn9PGtvhIaGatq0aZo2bVo1MqsaI64BAAAAAAAAAAAAAFbRcQ0AAAAAAAAAAAAAsIqOawAAAAAAAAAAAACAVTzj2k8mTJignJwcxcTEELsexyU2sRtC7Ib4nuEdPpvEJnb9ikvshhcbnmmon4+GGLshvmdiE7u+x4V3+GzWLmITuz7HJTY1D/CWw1T3id4AAAAAAAAAAAAAAPgAU4UDAAAAAAAAAAAAAKyi4xoAAAAAAAAAAAAAYBUd1wAAAAAAAAAAAAAAq+i49rG1a9fqjjvu0KWXXqrw8HB17dpV06dPV0FBgd9iHjlyRG+//baeeOIJXX/99QoPD5fD4dCAAQP8FlOSjDH68ssv9fvf/1433nijLrnkEoWEhOjSSy/Vrbfeqr/97W/y5yPUly9frgkTJqh3795q27atwsLCFBkZqaSkJE2fPl0nT570W+yf+9e//iWHwyGHw6H4+Hi/xpo5c6Y7VlXL4sWL/ZrDv/71L/3yl790/9yjo6PVt29f/dd//ZdKSkp8GisrK+ui77dsGT9+vE9jlzl58qSeeeYZ9ejRQ02bNlVoaKji4uJ0zz33KC0tzS8xyxQUFGjOnDlKSkpSZGSkmjZtqquuukrz5s1TcXFxtY9bk/OG0+nU/Pnz1bNnTzVp0kQtWrTQwIEDtXLlSr/G/ve//62FCxdqzJgx6ty5s/v3vmTJEo/iwveoedQ8f9a8QKh3EjWvtmqev+qdRM1DzVHvqHe08Xxb7yT7Na8+tvEkezWPeld/UPOoebTxaOP5Sn2sdzWJTc0DvGDgM6+88opxOBxGkomLizO9evUyYWFhRpLp1q2bOXnypF/iLly40EiqsPTv398v8cp8+umn5eIlJiaaq6++2rRs2dL92u23326Kior8Er9nz55GkgkLCzPx8fGmd+/epn379u7YrVu3Ntu2bfNL7J86c+ZMubgdOnTwa7xnn33W/f769u1b6fK///u/fontdDrN/fff736v7dq1M3369DGJiYkmNDTUSDJnzpzxacycnJwq32ffvn3N1Vdf7c4nNTXVp7GNMWbfvn0mJibGSDJBQUEmMTHRXHXVVSYyMtJIMg6Hw/zpT3/yeVxjjDl69Ki54oor3LGvuOIK07NnTxMcHGwkmd69e5vTp09X69jVPW+cPXvW3HjjjUaSCQ4ONj169DAdO3Z07z9t2jS/xS77m//58sYbb3j+xuEz1Dxqnr9rns16Zww1rzZrnj/rnTHUPNQM9Y56RxvP9/XOGLs1r7628YyxV/Ood/UDNY+aRxuPNp6v1Nd6V5PY1DzAc3Rc+8jmzZtNUFCQcTgcJiUlxbhcLmOMMdnZ2e4C9Mtf/tIvsV9//XUzePBg8/TTT5uVK1ea6dOn18oF3ieffGISEhLMyy+/bI4ePVpu3VtvveW+uJ06dapf4qempprPP//cFBcXl3t9x44d7sJ4+eWX+yX2Tz3++ONGkhk+fHit/lNj7Nixfo1TmV/96ldGkunTp49JT08vt66goMC89957FX4f/rZkyRIjyYSHh5u8vDyfH3/QoEFGkuncubPZvXu3+/WzZ8+ayZMnG0mmUaNGZt++fT6PPWTIECPJdO3a1WRmZrpfz8rKcl/sPPDAA9U6dnXPG0888YSRZBISEszevXvdr7/33nvuv/lVq1b5JfZdd91lRo8ebRYsWGDWr19vevTowQWeJdQ8al5t1Dyb9c4Yal5t1jx/1jtjqHmoPuod9Y42np16Z4x/a159beMZY6/mUe/qPmoeNY82Hm08X6qv9a4msal5gOfouPaRsuL+4IMPVli3b98+ExQUZCSZ7du3+z2XV199tVYu8PLy8i5YzF944QUjybRs2dKUlpb6NZef27Rpk/uupf/85z9+i/PVV1+ZoKAgM3z4cPPGG2/U639qrFmzxkgy8fHxNbojztcGDBhgJJn77rvP58c+ffq0+27jyu74dLlcplOnTkaSefXVV30ae8eOHe7P8IYNGyqs37Ztm7tRuWfPnhrH8+S8ceTIEfcdqGvWrKmwvuxCLSkpyeexK1PWeOYCr/ZR8yqi5vmezX9qUPNqr+bVdr0zhpoHz1HvKqLe+QdtvIr8VfMaUhvPGHs1j3pX91DzKqLm+R5tvIpo49Xteudp7MpQ84Cq8YxrH8jPz9eHH34oSZowYUKF9Z07d9agQYMknX9+SX3RrFkzhYSEVLl+6NChkqQffvhBx48fr620JEndunVzf11YWOiXGE6nU4888ogiIiL05z//2S8xAsmCBQskSZMnT1ZkZKTlbM7LysrS559/LkkaN26cz49/7tw59/OMOnbsWGG9w+Fwv+50On0au+wZM7GxsbrhhhsqrO/Zs6e6du0qY4z+/ve/+zR2VVatWqXi4mJ17txZAwcOrLA+OTlZkpSenq4DBw7USk6ofdS8ylHz6hdqXu3VvECsdxI1D9S7qlDv6pdArHeSf2sebbyKqHmg5lWOmle/BGLNo41HvQNQOTqufWDr1q06d+6cwsLCdM0111S6zU033SRJ2rhxY22mZtXZs2fdX4eHh9dq7LIC2bRpU3Xp0sUvMWbPnq1du3bp+eefV1xcnF9iXMj27ds1ZswYDRo0SMOHD9f06dO1e/duv8QqKirSxx9/LEkaPHiw/vOf/+jJJ5/UrbfeqjvvvFMzZszQt99+65fYF/Lmm2/KGKP27du7G1G+1KpVK/fv9ssvv6ywvqCgQNu2bZOkKv/2q+uHH36QdP4iryoXys0fys5fZeezn4uNjVVCQkK5bVH/UPMqR83zn9qsdxI1T6rdmheI9U6i5oF6VxXqnX/RxjvPnzWPNl5F1DxQ8ypHzfMf2njn0caj3gGoHB3XPrBv3z5JUvv27au8U6/s7qWMjIxay8u2pUuXSjp/F1WzZs38Hs/lcun777/XkiVL3HepzZkzR02bNvV5rD179uiPf/yjkpKS9Pjjj/v8+J7Ytm2bli5dqrVr12rVqlWaNWuWrrzySj311FMqLS31aazt27e777z74osv1KtXL7388sv65JNPtHr1aj3//PPq0qWL+3deG4wxevPNNyVJDz74oIKC/HM6mzNnjhwOh6ZMmaLXXntNR44cUWFhob7++msNGzZMR48e1f3336++ffv6NG7z5s0lSdnZ2VVuc/jwYUnS3r17fRq7KmXnusru0izTEM91DQ01r3LUPP+pzXonUfNqu+YFYr2TqHmg3lWFeudfDb2NJ9VOzaONVx41D9S8ylHz/Ic2Hm086h2AC6Hj2gfK7iJq2bJllduUrcvNza2VnGzbsmWLFi9eLEn6/e9/79dYL730khwOh4KDgxUbG6vx48crPj5eH3zwgR577DGfxzPG6JFHHpHT6VRKSoqCg4N9HuNC2rZtq+eee06bNm3S8ePHVVRUpB07dujRRx+VMUYvvfSSnn76aZ/GzMnJcX/92GOPqVevXvr666917tw5ZWZm6p577tG5c+c0duxYbd261aexq/L555/r4MGDkvwznU6Z++67T6tWrVK3bt30yCOPKCYmRk2aNNG1116rPXv2aNGiRXrrrbd8HrdPnz6Szl/kVXaX386dO90XUbV1XuFcB4nPQWWoef5ho95J1LzarnmBWO8kznXgM1AZ6p3/0Mb7UW3UPNp45XG+A5+Biqh5/kEb70e08ah3AKpGx7UPFBUVSZJCQ0Or3CYsLExS+Wlm6qujR4/ql7/8pUpKSnTXXXdp9OjRfo0XGxurvn376tprr1VMTIwcDoe2bdumt956S6dOnfJ5vEWLFmnDhg2aOHGievfu7fPjX8yECRM0ffp0XXPNNWrVqpXCwsJ05ZVXatGiRZo7d64kaeHChcrKyvJZzPz8fPfXERER+uCDD9SnTx+FhoaqU6dOWrp0qa666io5nU698MILPot7IUuWLJF0fnqXC90p5wv79+/XsWPHFBQUpPj4ePXo0UMRERHKycnRkiVL/DKd0TXXXOO+0Bs3bpx27drlXpeZman77rvPfReqv5539HOc6yDxOfg5ap7/2Kh3EjWvtmteINY7iXMd+Az8HPXOv2jj/ai2ah5tvB9xvgOfgfKoef5DG+9HtPGodwCqRse1DzRu3FiSVFxcXOU2586dk1T7z0SpbXl5eRo6dKi+++47XX311e4i7E8jR45UWlqaNm7cqO+//17btm3Ttddeq6VLl2rgwIE+nWImOztbTz/9tGJjYzVr1iyfHddXJk+erLZt26qkpESrVq3y2XHLPuPS+QuOFi1alFsfFBSkp556SpL08ccfy+Vy+Sx2ZfLz87VixQp3Pv702GOP6amnnlKrVq20Z88eHTx4UNu3b9eJEyc0ZcoUbdq0SX379vXLs3D+9re/qW3btsrIyFCPHj3UqVMndenSRV27dlVmZqbuv/9+SVJkZKTPY1eGcx0kPgc/Rc2zx1/1TqLm2ah5gVbvJM514DPwU9Q7uxpKG0+qvZpHG688znfgM/Ajap49tPH8gzbejzjXAXUHHdc+UFbsyqabqEzZup8XxvokPz9ft912m7Zu3aru3bvro48+qpVnwPxcjx499M9//lOtWrXStm3b9O677/rs2I8//rhOnz6tV155pVYLq6eCg4N17bXXSjp/B5uv/PRz261bt0q3KXv9zJkzOnnypM9iV2bFihUqKChQRESERo4c6bc4O3bs0KJFixQSEqLly5frsssuc68LDw/XvHnzdPPNN+v06dOaPXu2z+N37txZW7du1W9/+1t17NhRhw8f1okTJ3TXXXdp8+bNuuKKKyRJ0dHRPo9dGc51kPgclKHm2eWveidR82zUvECrdxLnOvAZKEO9s6+htPGk2ql5tPEq4nwHPgPnUfPsoo3ne7TxyuNcB9QddFz7QNlJ/7vvvpPT6ax0mwMHDpTbtr4pLCzU7bffro0bN6pz58769NNPdckll1jLJzIyUv3795d0/rk0vpKeni5J+s1vfqPo6Ohyy6RJkyRJhw4dcr/25Zdf+iy2p8qmOykpKfHZMbt27Vrh+D/307sX/X1nYtkdryNGjPDrhXZaWpqMMercubM6dOhQ6Ta33nqrJGnz5s1+yaF169ZasGCBMjMzVVRUpJMnT2rFihXq3r27eyqfsul3/K3s/LV///4qt6nv5zpQ8yRqXqDUPH/UO4maZ6vmBVK9k6h5oN5J1LtAqXdSw2jjSbVT82zXO4mah8BDzaPmBUrNo43nW7ZrHvUOQHXRce0DvXr1UmhoqM6dO6evv/660m2++OILSdL1119fm6nViqKiIg0bNkzr169Xhw4d9Nlnn9Xq3VJVKbvI8fXFjnT+eTc/X06fPi3p/MVN2WsXmnrEX8qeGxIXF+ezY8bGxrovcL755ptKtykr7I0bN/brxf3Bgwe1fv16Sf6fTufMmTMeb1v2nJTa4nQ69eGHH0qShg8fXisxr7vuOknnL3wrk52drYMHD5bbFvUPNY+aFyg1zx/1TqLmeaI2a56NeidR80C9o94FTr2T6n8bT6q9mheo9U6i5sEeah41L1BqHm083wrUmke9A3AxdFz7QGRkpIYMGSJJSk1NrbA+MzNTa9askXT+Tqr6xOl06u6779Znn32m2NhYrVmzRu3atbOdln744QetW7dO0vkLcF/JysqSMabS5Y033pAkdejQwf3agAEDfBbbE//85z/dd6yV3THnK6NGjZJ0/hkllV00//Wvf5Uk9e/fX40aNfJp7J968803ZYxRfHy833++ZXfXZWZmVvmsl48//liS1KVLF7/m8nMLFizQ8ePHlZiYqGHDhtVKzOHDhyskJESZmZlau3ZthfUpKSmSzv/NderUqVZyQu2j5lHzAqHm+bPeSdS8QKp5NuqdRM0D9Y56Fxj1TmoYbTyp9mpeoNY7iZoHe6h51LxAqHm08XwvUGse9Q7ARRn4xNdff20cDodxOBwmJSXFuFwuY4wx33//vbn66quNJPOLX/yiVnJ59dVXjSTTv39/v8YpKSkxI0aMMJJMdHS0ycjI8Gu8n1q3bp15/vnnzcGDByus27Jli+ndu7eRZGJjY82ZM2dqJac33njDSDIdOnTwW4xdu3aZCRMmmG3btpV7vbS01LzzzjumWbNmRpK54447fB772LFjJioqykgyycnJ5uzZs8YYY1wul3n55ZeNJONwOMzatWt9HruMy+UyCQkJRpJ59tln/RanTH5+vmndurWRZPr06VPuM15YWGimTJliJBlJ5n//9399Hv+LL74wH374oSkpKSkX949//KMJCgoywcHBZs2aNT6J5el5Y+LEiUaSSUhIMHv37nW/vmrVKhMWFlatn0V1z1ll59Y33njDq/1Qc9Q8ap6/a57NemcMNa+2a15t1jtjqHnwHPWOekcbz//1rixebdW8htTGM8ZezaPe1T3UPGoebTzaeL7UUOqdN7F/jpoHVI2Oax9auHChcTgcRpJp166d6dWrl/uE16VLF3P8+HG/xP3uu+/MJZdc4l6aNGliJJlGjRqVe33u3Lk+jfvOO++4i1t8fLzp27dvlUt6erpPY//jH/9wx46OjjZXX321ueaaa0xMTIz79djYWLN161afxr2Q2vinxtatW93vr2XLlqZXr16mT58+pkWLFu7Xb7rpJpObm+uX+J988okJDw83kkxUVJTp06eP+2fucDjM/Pnz/RK3zNq1a92xvvnmG7/GKvPJJ5+4/6aCgoJMQkKC6dGjh4mIiHD/zB977DG/xF64cKGRZCIiIkz37t1Nr1693HEjIiLMsmXLqn3s6p43CgsLzfXXX28kmeDgYNOzZ0/TsWNH989i8uTJfos9d+7ccusbNWpkJJmmTZuWe/27776r9s8FnqPmUfP8WfNs1ztjqHm1WfP8We+MoeahZqh31DvaeP6td8bUfs2rr208Y+zVPOpd/UDNo+bRxqON5yv1td7VJDY1D/AcHdc+9umnn5qhQ4eali1bmrCwMHPZZZeZZ555xq93xx08eNB9cr3Q4us7ucouaDxZfH232tGjR82f/vQnM2zYMNOxY0cTGRlpQkJCTOvWrc3AgQPNn/70J3P69GmfxryY2vinRm5urpk1a5a5/fbbTWJiovt9t2nTxgwdOtS8/fbb5e5k84d9+/aZcePGmbi4OBMSEmJatWplhg0bZtatW+fXuMYYM3bsWCP5/67bnztw4ICZOHGi6dq1qwkPDzchISEmJibGDB8+3Kxevdpvcbdu3WrGjx9vunTpYiIjI014eLi57LLLzKRJk0xWVlaNjl2T88a5c+fMnDlzzJVXXmnCw8NNVFSU6d+/v1mxYoVfYz/77LMe7VfZHcvwD2oeNc9fNS8Q6p0x1Lzaqnn+rHfGUPNQc9Q76h1tPP+yUfPqYxvPGHs1j3pXf1DzqHm08fyHNl7dr3c1iU3NAzznMMYYAQAAAAAAAAAAAABgSZDtBAAAAAAAAAAAAAAADRsd1wAAAAAAAAAAAAAAq+i4BgAAAAAAAAAAAABYRcc1AAAAAAAAAAAAAMAqOq4BAAAAAAAAAAAAAFbRcQ0AAAAAAAAAAAAAsIqOawAAAAAAAAAAAACAVXRcAwAAAAAAAAAAAACsouMaAAAAAAAAAAAAAGAVHdcAAAAAAAAAAAAAAKvouAZQK7KysuRwOORwOJSVlWU7HQAA/IaaBwBoKKh5AICGgHoHALWHjmsgAMycOdN98XMxP71QWrJkif+TAwDAh6h5AICGgpoHAGgIqHcAAF+i4xoAAAAAAAAAAAAAYBUd1wAAAAAAAAAAAAAAq+i4BgAAAAAAAAAAAABYRcc1UA/t2rVLEyZMUOfOnRUREaGmTZuqR48e+sMf/qATJ05Uuo/T6dSqVas0YcIE9e7dWzExMQoNDVXr1q01ZMgQLV26VMaYC8bNzs5WcnKy2rVrp7CwMMXFxWn8+PHav3+/P94mAADUPABAg0HNAwA0BNQ7AGjgDADrnn32WSPJePInefDgQfe2b7zxRoX1c+fONUFBQe5tIiIiTGhoqPv7mJgYk56eXmG/tWvXureRZJo1a2YiIyPLvTZy5EhTWlpaaV5btmwxLVq0cG8bHh5umjZt6j7W3//+d/e6gwcPevsjAgDUE9Q8AEBDQc0DADQE1DsAgC8x4hqoR15//XVNmzZNEREReuGFF5STk6OCggIVFhZq8+bNGjRokHJycjRs2DDl5+eX2zciIkLJycn65JNPlJeXp7y8PJ0+fVonT57Uyy+/rGbNmmn58uX685//XCHumTNndNdddyk3N1ft27fXxx9/rIKCAp05c0Zffvml2rVrp+Tk5Nr6MQAAGgBqHgCgoaDmAQAaAuodAEASI66BQPDTOxPbtGlzwaVVq1aV3pl4+vRp07x5cyPJfPjhh5XGcTqd5uqrrzaSzMKFC73Kcfny5UaS6dixY4V1c+fONZJMaGio+c9//lNhfU5OTrm7FrkzEQAaLmoeAKChoOYBABoC6h0AwJcYcQ0EmKNHj15wqepZLv/3//5fnTp1Sr169dKQIUMq3aZRo0a69957JUkfffSRV3ndfvvtkqQDBw7oyJEj5da9++67kqSRI0eqW7duFfaNjo7Wo48+6lU8AED9R80DADQU1DwAQENAvQMA1FQj2wkAKM8Yc8H1WVlZSkhIqPD6hg0bJEl79uxRdHR0lfufPXtWkvTtt99WWHfmzBktXrxYq1ev1p49e3Tq1Ck5nc4K2x0+fNgdo7i4WDt37pQkDRo0qMq4gwYN0uzZsy/wzgAADQ01DwDQUFDzAAANAfUOAFBTdFwD9cT3338vSSoqKlJRUdFFty8sLCz3/b59+3TzzTfr8OHD7tciIiLUvHlzBQWdn5zh6NGjkqSCggL3Nj/88INKSkokSbGxsVXGi4uL8/CdAABwYdQ8AEBDQc0DADQE1DsAQBmmCgfqidLSUknSqFGjZIy56JKVlVVu//Hjx+vw4cOKj4/X8uXLdfLkSRUUFOjYsWM6cuSIsrOz3dte7O5JAAD8iZoHAGgoqHkAgIaAegcAKMOIa6CeKJviprKpci7m0KFD+vLLLyVJS5cu1XXXXVdhm58//6VMy5YtFRwcrNLS0nIXgT93oXUAAHiDmgcAaCioeQCAhoB6BwAow4hroJ7o27evJGnLli3Kycnxat9Dhw65v+7Vq1el23z66aeVvh4aGqoePXpIktauXVtljDVr1niVEwAAVaHmAQAaCmoeAKAhoN4BAMrQcQ3UEyNHjlTz5s3ldDr129/+9oLT3rhcLp06dcr9fVRUlPvr7du3V9j+zJkzmjVrVpXHGzVqlCRp+fLlysjIqLD+2LFjWrx4sSdvAwCAi6LmAQAaCmoeAKAhoN4BAMrQcQ3UE82bN9dLL70kSXr33Xd1++23a9OmTXK5XJLOX9Tt2bNHCxYsUPfu3bV69Wr3vt26dVP79u0lSQ899JC2bNniXvfVV19pwIABys3NrTL2r3/9a8XFxencuXO67bbb9Nlnn7kvMDdt2qTBgwe78wAAoKaoeQCAhoKaBwBoCKh3AIAyPOMaqEfGjh2rs2fPatKkSfrggw/0wQcfKCwsTE2bNtXp06fldDrd2zocDvfXQUFB+stf/qK77rpLu3fvVu/evRURESFJKiwsVJMmTfTee+9p8ODBlcZt1qyZ/vGPf+iWW25RVlaWBg8erIiICAUFBSk/P1+RkZF67bXX3HcwAgBQU9Q8AEBDQc0DADQE1DsAgMSIa6DeefTRR5WRkaHf/e536tmzp8LCwnTq1Ck1bdpUvXv31uOPP65PPvlE9957b7n97rjjDq1fv1633367mjdvrpKSErVq1Urjx4/Xli1bdPPNN18wbu/evbVjxw796le/UmxsrEpKShQVFaWxY8cqPT1d11xzjT/fNgCgAaLmAQAaCmoeAKAhoN4BABzmQg+MAAAAAAAAAAAAAADAzxhxDQAAAAAAAAAAAACwio5rAAAAAAAAAAAAAIBVdFwDAAAAAAAAAAAAAKyi4xoAAAAAAAAAAAAAYBUd1wAAAAAAAAAAAAAAq+i4BgAAAAAAAAAAAABYRcc1AAAAAAAAAAAAAMAqOq4BAAAAAAAAAAAAAFbRcQ0AAAAAAAAAAAAAsIqOawAAAAAAAAAAAACAVXRcAwAAAAAAAAAAAACsouMaAAAAAAAAAAAAAGAVHdcAAAAAAAAAAAAAAKvouAYAAAAAAAAAAAAAWPX/Abqv/fyq+2jHAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAB64AAAI2CAYAAADgnaZqAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAvY1JREFUeJzs3QeYU1XawPE3mQ4z9DJD76CsAoOgNEVEigURsWChFxWl6FpwRVbBLljYdZ0RFeFbcaVYURAEBFREuoBUaeJQBByZXpLvOcdNdkoyk8xk5iSZ/+957pNMcu89J8kk9577nvMei91utwsAAAAAAAAAAAAAAIZYTRUMAAAAAAAAAAAAAIBC4BoAAAAAAAAAAAAAYBSBawAAAAAAAAAAAACAUQSuAQAAAAAAAAAAAABGEbgGAAAAAAAAAAAAABhF4BoAAAAAAAAAAAAAYBSBawAAAAAAAAAAAACAUQSuAQAAAAAAAAAAAABGEbgGAAAAAAAAAAAAABhF4BpBYe7cuWKxWKRJkyamqxJU/v73v+v3tWfPnqarAgBAQFLHUbWsWbPGdFUAwC/RlisbtOUAACg92nMAUP4IXMNvGtSulkqVKknLli1l2LBh8u2335ZZHQ4fPuy2Dq4WdXEF5qiTRW8+r4KL+rzzXiQruERHR0u9evXkkksukdGjR8u7774rqampXtXx0KFDMm3aNOnRo4feV0REhMTExOj/51tvvVX+/e9/S1paWhm9QwB8JTc3Vz744AMZOnSotGrVSqpVqybh4eFSp04d6d69u0yZMkV27txpupp+Yfjw4R7/DhOcME8FMkp6HFWftYP6LAs+HxYWJjVr1pTmzZvLddddJ0888YRs27bNq/rx3QMCA205+FtbrqhtXDl16pQ899xzcvXVV0uDBg0kKipKKleurI9vAwcOlMTERPn9998Lbafacl988YXMmDFDBg0aJI0bN3aWp74XAPwD55Seoz0XWMqyPVfcNq6oY+LYsWOlbdu2UqNGDWebsHPnzjJp0iT5/vvvXW63Z88eefvtt2X8+PHSpUsXff7oKBNAxRVqugJAXnXr1nXet9lscvbsWTlw4IBe5s2bpwOBrhqBVatWldatW0v9+vVLXYcqVaroxmpRinseZUs1MvL+r+Sl/meys7P1CZI6UXIlJCSk0GO1atVyPp6VlSUnT56UpKQk2bx5s7z11lty3333yUMPPSSPPfaYhIa6/+lUZav1/vnPf0pOTk6+/1H1nOP/WTWcYmNjZc6cOXLttdeW4F0AUNY2bNigL7bv27fP+Zj6bVGdUM6cOSPffPONXtTFTnXBcsGCBfr3qaKzWq1Su3btItcp7nmUPXWMdHUsVcfAc+fO6fvVq1d3+T+tjmkFRUZGOh+32+3yxx9/6GPyzz//LEuXLpXp06fLZZddJgkJCXLxxRcXWTe+e0Bgoi0Hf2jLebqNOlY9++yz8vTTT+frUKw6MauL5UeOHNHLxx9/LA8//LDMmjVLRo4c6Vxv48aNcs011xT7egGYwzllydCeCwxl2Z5zxd1z6vt1xx13yKZNm/Idd9X6ycnJ8sMPP+jl1VdflSuvvFJfD1XHbYe7775bvv76aw9eMYAKxQ4YNm3aNLv6V3T175iTk2Nfv369vWPHjs51vvnmG5/X4dChQ879v/POOz7ff6B/NldccYU9UKi6elpn9Vk7Pnf1P5CXzWaz79mzx/7GG2/YL7roIud6PXv2tGdkZLjcX2Zmpn7esW6/fv3sX3zxhT01NdW5zunTp+3vvfeevUePHnqdiRMn+uBVA/C1Tz75xB4REaG/pzVr1rQ/++yz9n379uU7Pv3www/2Rx991F6lShW93rlz5+wV2bBhw/T70LhxY9NV8SuOY8Lq1avtgUDV05s6q89bras+/4KSk5Ptq1atso8ZM8b5fQoPD7d/+umnbvfHdw8ILLTl/FdFbssVR7X17rjjDuf2l156qX3x4sX5jifqGPbhhx/ar7/+er3ODTfckG8f6hhZvXp1+1VXXWV/6KGH7AsWLLDHxsbqddV7D8Aszim9R3vOtYrcnivOxo0b7dWqVdPbV65c2T5lyhT79u3b9XFWyc3Nte/atcv+9NNP2+vWravX27p1a759qOPohRdeaL/zzjvts2bNsj/wwANuzy0BVByMuIZfUz20unXrJh999JE0bNhQP6Z6PHft2tV01RDkVC97NfJDLSpduKOXvUptN2HCBD1irKCJEyc657xR606ePLnQOqpX4ZAhQ/SyePFiUlIBfmj//v1y5513SmZmplx44YWyfPlynTqy4PFJTSegFpVlIe8IHAD/G/moetWrRWUuUWnDjx07Jrfddpts3bpVpxDOi+8eEFxoy8FfvfDCC3rqJkWlL1Vtt4IpSdUxTKUKV8vatWv1CLG81JRQaoR4Xo8++mg51B5AcTinBMqeylqgMhWo6TTUFIlffvmlThNecPS++g6qRR1v1XXSgsdb9f3MmxmFKV0AKMxxjYCgTjDVvBhKSkpKoecd81uZmmPFMbebml9E+eqrr3T6Z5U6R6VaueCCC+TJJ5+UjIyMYg/6Tz31lFx66aU65YvaVr2mPn36yL/+9S+dYsWVJUuW6IvBKkWMI/Wa+vvDDz8stu5qDhI1n5ea50elRWvXrp1uyKsUbZ5Q84Wpkw91cqK2V3ORtGnTRgdxjx496nKbgp/X6tWr9QWBuLg4fbJS3Lwp5U3VaebMmc6U3ip1eN5UU8ru3budwWzV4HEVtC7opptukqlTp5ZRrQGU1OOPP67THKvfYPU7WvAiR0Hq91pdlM+bOsvb37mDBw/KPffcowN5KoWpulgaHx+vjwmqLu788ssv+vdG/Qar+RgjIiJ0o7Fjx476cZWSqyCVNkzNOaz2r8pRxw01dYFK36zSdKljmAmOeaxUB6Dz58/rz0EdT9T7oc4B1HHN3bxYeakGswqMqrkm1bbq81Gv7f7775fvvvvO5TYnTpzQF6wc76Na1H3VaUlNHVEU9X6qbdV8zup/Rn3GN998s55qwhMqna66eK7SjTqO4+r8QR37VbrCPzv5F+aYi0z9r6lzI/WZXnTRRTr1YXHzeZqgPoNFixbpuqWmpur/7bL47gHwP7TlaMv5k99++01PX6FcddVVLoPWBV1++eXy2muv5XusqNTkAMyiPUd7jvZc2VPnG+r/V1Gvs2DQuiB1jqGum6rXmBfHUwAumR7yDRSVXs7hl19+ca7z6quvuk0TVtJ0NqVNL5c3DdsLL7xgt1gselHpUtStY99XXnmlTkfkyvLly3WqMce6oaGhOp1RWFiY8zGVqqxgaupbb73V+bzVatX7ULeOx4YMGWLPysoqst6ORdVXlavuX3755TrFS1Gp2v7v//7PmXpJLep+VFSU8++YmBj9uor6vF555RXne1S1alX9ekuSnqY80supNFKO9adOnZrvuXvvvVc/HhISYj98+HCJ6w/ArBMnTjh/Q0eNGlXi/XjzO/ef//wn32+p+u3M+3fDhg3tu3fvLlTGtm3b8h031O+P+jvvcafg7+mxY8fsjRo1KnTcUNs6HitpStHSppZzlK+mU2jRooW+HxkZaa9UqZLzOZVm2tVxRVHTMtx88835jmvqvVTvuePvdu3aFdpuzZo1zvRmjhRnanH8rd6fdevWuSxTHTscqdUc9XOkGlT3P/744yLTtJ05c0Yfb/PWOW991TJgwAB9vC/IUe5LL71kb9WqlbNMx2vxNm1qeaWWu/baa53nDGlpaT7/7gEoX7Tl/kRbzv/bcg7qM3Zs5+74XlKOYyKpwgFzaM/RnqM9V/apwrOzs52vU6X69qW8x3cAFRcjruHXcnNzdW+6G2+8Uf9dp04dGTp0qPir7du36/Rgajl16pTutadSpqhec44emu+++26h7VS6zBtuuEGvr3qoff7555KWlqZ7g6enp8umTZvkwQcf1L3u8nrsscfkP//5j+6Jp0buql7+Kl2Z2k495+j15mpU7yeffKJHDiiqJ6HqUa/KVz1B//nPf8qGDRv0yAB3VqxYoT8L9RmpHoyHDh3SdVWjqPbs2aP3qXpYOvbtiur1qF7XsGHD9DrqvVL78NdRyCqFlPofVL7++ut8zzl6tHbo0EH3CgUQmNTvtOoxrTiOPaVR3O/cli1bnGnsVDrVHTt26N9hdQxQv9Oqt7dKrXz99dcXGqWm9qt+t1VPe3WsVKOr1DFAjQhTWSFeeumlQr2e1agyVQ/Vu3vlypWSlZWlt1Hlqx7d6nf/sssuE5PGjx+ve6mvWrVKH1PU6964caOeukHVd+zYsc7PKK8RI0bIwoULdTqyRx55RL9v6r1U7/np06d1L/guXbrk20ato0ZOqHVU+rL169fr8tSi0oKqMtV7rI7Rx48fz7etOv6pY9yRI0ekevXqOoWoqq8aUbdr1y494k597u6o7VVqNVVO+/bt5dNPP9Xbq7qo8tX5gjrmqP8D9XrcUZ+pep1qNInaTtVXvS7H8crfOLKXqP85da5RVt89AObRlqMt548c7TY1Gq579+6mqwPAx2jP0Z6jPVf21LmNI5MMbTcAZcJ05BzI21O8bt26zqV27drOHoOqt9sdd9zhdiSrL3vpq7Ly1sPVUtRrcNe7etCgQfr53r17F3que/fu+rmWLVvaf//9d4/qrEYuOHrUq970rjzwwAP6edUb9Ndff8333IUXXujsiZmbm1to2zfeeMNtb021vqqrei4hIcFtHVWvQrXOxIkT3faeU++LL5V1L/2rr75ar1+/fv18PQ0dPWLHjBlTqvoDMOvxxx93/i4cP368xPvx9HeuX79+eh3VI131MC9oy5Ytzt/6F198Md9zjlFR3377rcf1uuCCC5y94H3N0UNf9fov7jha8LUojvdLHf9PnjxZ6PkdO3Y411m/fn2+51auXOl87vXXX/e4znfffbezF35SUlKh59WIBkeP+/Hjx+d7To2scJSpyi9IfZ7Nmzd329t93rx5+vE2bdq4PfZv2rRJH19Uz/uC74mjZ7w6V1L/J75S1j30v/nmG+f+33zzTZ9/9wCUL9pytOX8oS1Xq1Ytt5/3F198kW+7Bg0a6G1Uu87XGHENmEd7ruRoz1Xs9pwaHe/u8y44qnrOnDnOMlT7zpcYcQ1AYcQ1/IrqyehYVG861XtNUT0VVU+u4uYl8QXVyy1vPVwt7qi5aP7617+6fE717lNU78u89u/fr3sEKs8884zH8zQuXrxYcnJy9PwralSAK2o+GVUn1WtTzSvpoOqg5mR2rKN6MxY0ZswYqV+/vsv9qt6Eqt61atWS0aNHu62jY0TF8uXL3a4zZcoUCSRqbh1F9Wh1UPcdc9Y4ngcQmNRoJwdffZ/d/c6pntiO30c1p5aa86kglcVB9eJ2jLrKS81nqSQlJXlcl5Js4y3Ve76446irOU4dVA98V73L1VxYTZs2dXksffvtt/XtX/7yFz23nCfU77bqVa+oueDUvHAFqfnw1HPK+++/n+85x99qZIWaI7Mg9XmqUWzuvPXWW/pW1dfdsV/NbadGWaiRCWr0iCv9+vXT/yeBIu/3Ku+xtCy+ewDKF2052nKmqFHq7j7vgnOTO443HGuA4ER7rvRoz1XM9pw6Xrr7vNVxNi/abgDKGoFr+BV10pF3Uel3VOo1lZrls88+k8svv1w++uijMq3DO++8U6geBRd31MlIdHS0y+fq1atX6CKt8u233+rbkJAQ6d+/v1dpWZROnTpJlSpVXK6jUt2o9NZ51897PzQ0VHr06OFyW3UBpGfPni6f++abb/StugClXpc6OXS1qAsmikq740pUVJROiQQAwaqo3zmVVs5xTOndu7fbfVx99dXOxr26eO1w3XXX6Vt1jFRp5tQUBio4UBTHNuoiubqgsGzZMn2R35fUdAnFHUdVOjR3VEo2d4o7ljpenydUWlTHfjx5/1XjXG1T8Fjaq1cvt9u6e04FcxxpstV74e44qpa9e/cWeSxVF1oAwB/QlqMtZ4o6Prv7vFUKWQAoKdpztOcqSntO/R+6+7y3bdtmunoAKhgC1/Brqge6midkzpw5es4MNWfL8OHDPT4pU/PBuDtxUPOF+FrBecvyUhcWFNWzPq8TJ07oW9XjvXLlyh6XpeZdU9z1pM/buzDv+nnvqzJVL/7iti3o119/1bfqhLuoHphqXhZFXbRypWbNmi5HCPgzx0mxqnve3oVqbrq8zwMITHm/2774Phf1O5f3d7mo33LHb7E6fuSt0wsvvCBXXnml7u0+a9YsfYFaXfxWF7mnTZtWaA4vx0iAW265Rf9+v/nmm/oiu+q1r3q/q+ccjeq83B1HJ06cKGXBk2Np3gs+eY+l6iKLp7x9/wtu48lx2N1x1DEPnaKOlUUdSx2v1d1FLH+e+8yVvP/Deb9vvv7uATCPtpx7tOXMcRxvONYAwYn2HO25gmjP+R5tNwBlLfBaGaiwHD2+Vc/wzz//3KNt1MmfuxMHR+o60xwBz0DieO9UL8riemEWNbJBjUwINI50Rs2bN8934t2qVSt9X40qARC41GgrB198n8vyd05doFi1apWsW7dOpzBTPbXV79HmzZvlqaeekpYtWxZKRxcWFib/+c9/dI/pJ554QvcgVynQdu7cqQME6vXPnDkz3zbujqPqeOwvAu1Ymvcc5IsvvvDoOOpuVEOgHUu3b9/uvJ/3WOrr7x4A/0Jbzn9U5LZc3uMNo8eA4ER7jvZceajI7TmFthuAskbgGgEjb6+7vKldiqJOCtydMDRp0kT8gWMOFjVfSGpqqsfbOXrk/fLLL0Wu53g+bw8+x31VpppnxR1XvTvz1tldmptgpdIIOebFK5h6zzEfjjphq2jvCxBMVI93R4/6Dz/8sEzLyvu7XNRvueM5dRHD1fxR3bt3l+eff17PsanmWfv44491j3s1QmrkyJEu5/Ns166dPPnkk/LVV1/pbVauXKlTuKoGuOqpnze46O44OnfuXPEXJTkuefv+F9zGcd/dsbKo51QPdcdog4p2zFi6dKm+VaMEL7vsMiPfPQDlj7ZcfrTlzHG029Q87I75yQEED9pztOcKoj3neyorgGNeb9puAMoCgWsEjLwnGt6kYfN3Xbt21bfq5FL10vNU3vnO3PWSVCeveedPK7itSlOkena6YrPZZM2aNUXOv6JS+eSdby3YqUaBozfknXfeme+5e++9V/cQVZ+j6hnrKfU+A/AfdevWlZtuuknff++992Tfvn0eb1vUvJmuqLnSHBdV1AUHd9RFCMfFCdXDvri0rAMGDJAlS5bovzMyMoq9KKsa3OoirgooqmCieh2OMgPtWPrpp596vE3Tpk2dF448ef/VxQm1TcFj6erVq91uq0ZQuKI+x86dO3td50C3ceNG50jL2267Tf+/mvjuASh/tOXyoy1nzogRI/ToxLydIzxBuw0IDLTnaM8VRHvO99T/nJpj3fHer1271uNtOZ4C8ASBawQMdcJZ8OQiGLRo0UL3ilQee+wxj+d8Uyfi6kRBncSqnpmuPPPMM3rOFXVC5ThxVy6++GK54IIL9P2nn37a5UnD22+/7bbHourBquqtTJ48ucie/sEw34m6EPXggw/KZ5995kx16Hj9edPkOFIgqvfulVdeKXa/H330kcyYMaOMag2gpNT3Mjo6WvdwHzRoUJE9sB1zWqnfWG9TranUcH379tX3X3zxRZdzXqme8osXL9b3hwwZ4nxcXawuqsEXFRXlvJ93TjbHPFyuqIscjjRlgTZn5ahRo/Ttrl275F//+pdH26jORrfeequ+n5CQ4JxXreA8oOq5gu+/4thWXUhyFRxQ/z/qc3XH0dBXgdzi0uYG+nFU+fHHH2Xw4MH6QpoKWk2dOtXYdw9A+aMtlx9tOXPU3OCPP/6482K7aucVF6z65ptvymwuWAC+R3uO9pwD7bmyo9Lb16tXz/neqs+uKOr9VIN+VLsQAIoTWEcxVEjqxEM1LN999139t0or2aVLFwkmr776qu5VuX//ft0DftmyZZKdne0Mmv7www9y99135+sxWb9+fWfj+bnnnpNp06bpXvmKulUXhB0nWA888IDExcXlK1Nd5HD0LLz99tudFzbUxZM33nhD7rvvPn0S7oq6yKLWUbfqBE9drFGNfkedlZ9//lmvo0YHvP766xJo1MUL9XkkJibqXrSzZs3Sj6terOrzcuW1116THj16OC8CXXPNNbJ8+XJ9cpb3ZHXhwoV6HqIbb7wxKE9egUCn5qyfP3++hIeH68ZX+/bt9UXlAwcOONdRv81qagA1r1izZs2cPeJLclFFXZBW+1YXPRyNOHURQzV+1e+Iuqih5gIeN26cczv1m63mPFPbq3qodRx27NjhzAqhAoRXXHFFvlStU6ZMkQ0bNuS76KHKv+OOO/TFFnWRw3EBJlCoi/BqBK+ijl/qNea9YK/Sqc6ZM8d5QcRBBRnUsU79Fvfu3Vu+/fbbfBep1WPqmKp68j/66KP5tlUXt9TxwXFfXZByzHX2008/Sf/+/XUaUnfUZ6T2r4436nigPkt1YcVBpZxVx+jx48fr/7FAdP78eX0RSP3vqhEJx44d098rdRzMO7+1ie8egPJBW462nD9Sx3RHwOLll1/Wn5tKdZq344E6hqmOyyropdp46hjmKtilzjEciyMIpc6n8j6u5msHUH5oz9GeU2jPlX1HMPWeValSRb/uSy+9VH8ear51R4cwdbtnzx554YUX9HdAdUoo2FlM/R+7O2bmfTzvcRZABWAHDJs2bZo6Yumlbt26+ZaqVas6n1PLRRddZD9+/Hihfbzzzjv6+caNG5eoDocOHXKWUaVKlUL1KLhMmDDB5Wu44oor3JaxevVqZxmuLF++PN/rDQsLs9esWVPfOh778MMP822TmZlpv+WWW5zPW61We/Xq1fWt47EhQ4bYs7KyXJb5t7/9Ld/7q7YNDQ3V93v06GGfMmVKka9L1ScmJqZQnSMiIvLtd8aMGT79vIqi6lrcZ1GwHmqpVauW8/OtUaOGPSQkJN9rUK9TvY7s7Owi96k+k/HjxzvfR8eiPtvKlSvne6xBgwb2ZcuW+fDVA/Cl9evX21u0aJHvexseHq5/I/L+zloslkK/td78zr3//vt6v3mPQ5GRkc6/GzZsaN+9e7fb45Za1G+Wqlfe/aj7CxcuzLdd3m0cx4y8ZanX8vLLL5fo/Ro2bJhzv8UdR9Vy9OhRl3VTx8vifuPVcbeg1NRU+6BBg/K9RvVe5j22tmvXrtB2a9asybeO+q3O+3tdrVo1+9q1a13W5+DBg/rzcayrjn+Ofan3/+OPPy7ydSUnJ9uvu+66QnVWZarPwvGYOqYUpP631HPqf82X8p6vFPVZFKyH+j9yfLZ16tSxV6pUKd/rUkvXrl3tP/74Y5l+9wCUL9pyf6ItZ7Ytp/4HvGWz2exPPvmkPSoqqlC7L+/7ohZ1/Jk3b57bY2BxizpHAlD+aM95h/ZcxW7PlfRYpf634+Pj870H6vWq/+eC10b79u1r/+2339wez4tbSnK8BxCYQk0HzoG8Tp48me9v1WsxNjZWzwOj0ksOHTpU95gsS6qXdXEp3soiJWWfPn10L33VY1/1yjx48KDunad647du3Vr39FajdPNS78V//vMfueWWW+Stt97Sc5SpXt9q3haVgk+lrla9/txRvQHViAc1mlhtq3q5qbRzqpem6tnv6MnvzsCBA3WvTtULX83ppuqvejKqHqFt2rTRPfSvvfZa3cPU36meew5qzrPatWvr9171zFWjEFTvS0/m41OfyT/+8Q/9/r3zzjt6Phz1Waren+o5lZZPfTbqvVOLSuUEwD+p0Teqd7AaHapG3Hz//fdy6tQpPQJH9dhWv3Oq9/tdd92lf6dLSo346dixo7z00kt6NJbqWa6Of+r3R/2GT5o0Sfdizkv9Pn3yySe6B/d3332nt1F1U6On1O+M6rGuRnKpXvx5ffnll3obNcLq6NGjzuOu2kaNJlK9wVVdSkP1gi54PHfF0ZvdV9Rvt+rxreZ2U8dE9Xmp3/aYmBidVrVnz576+FaQ+gxVj/qZM2fq4+/hw4d12jl1PFTHMJVCVJ2LuKJ6zm/btk0fL9VILZWGUI26u/rqq3WP/uLeS/W5qjnR1DFUjUZUn6V679R1H/UZX3jhhfqzVMd5f6dG+alFUf+H6n1X75t6H9VIBnUcVedz/vTdA+BbtOVoywUSdaxXIy1VqlfVblPnYOrYc+bMGT1aUY1q7NChg1x33XX6M1LHNQCBhfZcydCeq5jtuZJS7/PmzZv156beQzXSXWXcUedj6v1RI627d++uR6g7RrgDQHEsKnpd7FoAAAAAAAAAAAAAgGKpjhwrVqzQ0wepRXWWUZ3+VacbNbVaaaxevVp32lEdfVSafdXx8uabb9Ydb4oagKfWVdMVLVq0SI4cOSLR0dE63f9f//pX3UnIHxC4BgAAAAAAAAAAAAAfeeWVV2Ty5MmFHi9t4Hr27Nk6M4cK7zZo0EBnj929e7czC5PKzKGyixSkMlmoLAh79+7VmWBVZojTp0/rzB8qW4XKJHvvvfeKaVbTFQAAAAAAAAAAAACAYKFS5vfu3VumTJkiS5YskalTp5Z6n5s3b9bTUCgJCQl66ogtW7bIzz//rKc4UNMnqGmHXBk1apQOWqv11PpqO7W92o8Kgk+YMEGPCjeNEdcAAAAAAAAAAAAAUEbUiOb777+/VCOuBw4cKB9//LEMHTpU3n333XzP7d+/X9q0aSM2m022b98uF198sfO5rVu36rnmrVarDl63aNEi37Zqf/Pnz5dBgwbJ4sWLxSRGXAMAAAAAAAAAAACAn0pJSZFly5bp+2PHji30fMuWLaVXr176/sKFC/M9p+a0VtTzBYPWyrhx4/Tt559/LqmpqWISgWsAAAAAAAAAAAAA8FNbt27V81ir+ak7d+7scp0ePXro2w0bNuR73PH35Zdf7nI7tT+134yMDOPpwglcAwAAAAAAAAAAAICf2rdvn75t1KiRhIWFuVynefPm+lalA3e1reP5gtT+GjZs6HLb8hZqtHQAAAAAAAAAAAAAMCghIUESExO92kal7Hak2S5rZ8+e1bc1atRwu47juXPnzvls2/JG4LoY59PSjZQbZhFjbGKu8Fy73VjZGTnmyo4ON5f8wGLLNVKu3RoiFe01K9bM88bKtodXNle21dzhJjIqyljZgSTF0PFOyTX38ytZuRXvtz892yamRIUZPN4ZPMew5GYZK9seEm6sbIstx1jZ//7JTCNrUJtaYkq16ErGyg406RkZUtGY/A00yW4x2LCuoJ+3qfc812buNUf+ftRY2VnVG0tFVCkq0nQVAoLJNl6omGtzpOWa++2PtqVVyOsulmwzrzsk9YyYklslzljZtjBz17m+/dXc//h3R8wF0kZdUt9IubFVzV3LDTThHUb6ZD+P3dBItmzZ4tU2SUlJUl4y/tuWDQ93f61HpftW0tPTfbZteSNwDQAAAAAAAAAAAKDCiouLk/j4eK+3KS+RkX923svKcj9AQs2BrUQVGMyltk1LSyvRtuWNwDUAAAAAAAAAAACAgGPxUXZXlfK7vNJ+l0T16tXzpf12xfGcY92826rAdUm2LW/m8jUCAAAAAAAAAAAAAIrUqlUrfXv06FHJzs52uc7BgwfzrVtw2wMHDrjcTu1P7dfVtuWNwDUAAAAAAAAAAACAgBxx7YvF33Xo0EHPUa1Sem/cuNHlOuvWrdO3Xbp0yff4ZZddlu/5gtT+VBpxlVK8ffv2YhKBawAAAAAAAAAAAADwUzExMdK3b199PzExsdDz+/fvl1WrVun7gwcPzvec4+/Vq1e7HHWdkJCgb/v37y/R0dFiEoFrAAAAAAAAAAAAAAEn2EZcd+/eXZo0aSKvvPJKoeemTp0qFotF5s+fr4PXdrtdP56UlCRDhgwRm80mAwcOlHbt2uXbLj4+Xq677jrJzc2V2267Ta+vqO3VftT+rFarPP7442JaqOkKAAAAAAAAAAAAAIC3/CnonNexY8d0em+HjIwMffvNN99IrVq1nI8//PDDenH45Zdf5MiRI/L7778X2menTp1k1qxZ8sADD8i4ceNkxowZel+7d+/WKcRbt24tb775psv6vP3229KtWzfZvHmzNG3aVC688EL57bffdD1VMFwFylWA2zRGXAMAAAAAAAAAAACAj6jRzWfOnHEuqamp+vGcnJx8j6elpXm130mTJsmKFSt0Wm+1TxW0bty4sTz22GOyadOmfEHxvGrXrq2D1mo9tb7aTm2v9vPVV1/J/fffL/6AEdcAAPgpNefIzJkz5fvvv5eUlBR9QnHzzTfLo48+KpUrVzZdPQAAAACAF2jjAQDge5YQ/xxxrdJ9O1J5e+Pw4cPFrnPVVVfppSTzZD/99NN68VeMuAYAwA/Nnj1bn3wsXbpUIiMj5YILLtAnLSr9i0oJc/bsWdNVBAAAAAB4iDYeAABA8QhcAwDgZ1TKFpXyRUlISJCjR4/Kli1b5Oeff5aOHTvKTz/9JGPGjDFdTQAAAACAB2jjAQBQdqzWEJ8s8A8ErgEA8DPTp08Xm80md911l4wdO1YsFot+vF69erJgwQKxWq2yZMkS2bFjh+mqAgAAAACKQRsPAADAMwSuAQDwI2qes2XLlun76oJGQS1btpRevXrp+wsXLiz3+gEAAAAAPEcbDwCAsmWxhvhkgX8gcA0AgB/ZunWrZGZmSkREhHTu3NnlOj169NC3GzZsKOfaAQAAAAC8QRsPAICyReA6uBC4BgDAj+zbt0/fNmrUSMLCwlyu07x5c327d+/ecq0bAAAAAMA7tPEAAAA8F+rFugAAoIydPXtW39aoUcPtOo7nzp07V271AgAAAAB4jzYeAABly2JljG4w4dMEAMCPZGRk6Nvw8HC366gUc0p6enq51QsAAAAA4D3aeAAAAJ5jxDUAAH4kMjJS32ZlZbldR82PpkRFRbldJyEhQRITEz0ud9jw4TJy1Giv6goAAAAAKBptPAAAyhbzUwcXAtcAAPiR6tWr50sn54rjOce6riQlJcmWLVs8Lrdvv35e1RMAAAAAUDzaeAAAAJ4jcA0AgB9p1aqVvj169KhkZ2dLWFhYoXUOHjyYb11X4uLiJD4+3uNyY2NjS1RfAAAAAIB7tPEAAChbjLgOLgSuAQDwIx06dNBzn6lUcRs3bpRu3boVWmfdunX6tkuXLm73M27cOL14KiWNudQAAAAAwNdo4wEAULYIXAcXq+kKAACA/4mJiZG+ffvq+67mL9u/f7+sWrVK3x88eHC51w8AAAAA4DnaeAAAAJ4jcA0AgJ+ZOnWqWCwWmT9/vr6wYbfbnXOaDRkyRGw2mwwcOFDatWtnuqoAAAAAgGLQxgMAoOxYQkJ8ssA/ELgGAMDPdOrUSWbNmqXvq1RwjRs31nOZNW3aVDZv3iytW7eWN99803Q1AQAAAAAeoI0HAADgGQLXAAD4oUmTJsmKFSukf//+kpqaKrt379YXNx577DHZtGmT1KpVy3QVAQAAAAAeoo0HAEDZzXHtiwX+IdR0BQAAgGtXXXWVXgAAAAAAgY82HgAAQJCNuF69erVcd911Urt2bYmKipI2bdroeWJUT0UAAAAAQGChjQcAAAAAKClGXAeXgApcz549W/dKXLp0qURGRsoFF1wghw8flhkzZui5Ys6ePWu6igAAAAAAD9HGAwAAAACUhtUa4pMF/iFgAtebN2/Wc8EoCQkJcvToUdmyZYv8/PPP0rFjR/npp59kzJgxpqsJAAAAAPAAbTwAAAAAABCQgevp06eLzWaTu+66S8aOHSsWi0U/Xq9ePVmwYIFYrVZZsmSJ7Nixw3RVAQAAAADFoI0HAAAAACgtUoUHl4AIXKekpMiyZcv0fXVBo6CWLVtKr1699P2FCxeWe/0AAAAAAJ6jjQcAAAAAAAIycL1161bJzMyUiIgI6dy5s8t1evTooW83bNhQzrUDAAAAAHiDNh4AAAAAwBcYcR1cAiJwvW/fPn3bqFEjCQsLc7lO8+bN9e3evXvLtW4AAAAAAO/QxgMAAAAAAAWFSgA4e/asvq1Ro4bbdRzPnTt3rtzqBQAAAADwHm08AAAAAIAvMFo6uARE4DojI0PfhoeHu11HpZhT0tPTy61eAAAAAADv0cYDAAAAAPgCgevgEhCpwiMjI/VtVlaW23XU/GhKVFRUudULAAAAAOA92ngAAAAAACAgR1xXr149Xzo5VxzPOdZ1JyEhQRITEz0ue+jw4TJy1GiP1wcAAAAA+G8bb/iIETJ6NG08AAAAAAgGjLgOLgERuG7VqpW+PXr0qGRnZ0tYWFihdQ4ePJhvXXeSkpJky5YtHpfdp18/r+sLAECgsRssO8JiM1Z2pDXHWNmSaybxTYjV3OmfxW7uPy3b4D95uC3XXOFWc9+vXGvhc/byclvbOkbKDcn9c4Qw/LuN169/f6/rCwBAoIn8/aixsjOqNTJWdiWDV7vTbZWNlR0pBtu2NjNtjuxaLcSU8wYbmFVs2cbKjo819z/evZq5tpYtPCASFwNBIyAC1x06dNBzn6lUcRs3bpRu3boVWmfdunX6tkuXLkXuKy4uTuLj4z0uOzY2tgQ1BgAAAAC4QxsPAAAAAOALlhBGXAeTgAhcx8TESN++feXTTz/VKeAKXtTYv3+/rFq1St8fPHhwkfsaN26cXjx1Pi29hLUGAAAAAPhbGy89I6OEtQYAAAAAAGUpYHIcTJ06VSwWi8yfP19f2LD/N9WkSgs3ZMgQsdlsMnDgQGnXrp3pqgIAAAAAikEbDwAAAADgizmufbHAPwRM4LpTp04ya9YsfV/1pm/cuLFOB9e0aVPZvHmztG7dWt58803T1QQAAAAAeIA2HgAAAACgtAhcB5eACVwrkyZNkhUrVkj//v0lNTVVdu/erS9uPPbYY7Jp0yapVauW6SoCAAAAADxEGw8AAAAAAATUHNd5XXXVVXoBAAAAAAQ+2ngAAAAAgJJitHRwCagR1wAAAAAAAAAAAACA4EPgGgAAP3PixAmZP3++TJgwQbp06SJRUVFisVikZ8+epqsGAAAAAPASbTwAAMqO1WrxyQL/EHCpwgEACHbvv/++TJ482XQ1AAAAAAA+QBsPAADAM4y4BgDAz1SpUkV69+4tU6ZMkSVLlsjUqVNNVwkAAAAAUEK08QAAKDsWq8UnS1lZvXq1XHfddVK7dm2ddaVNmzb6XCA1NdWr/axZs0ZnbPFkefLJJwttX9w2sbGx4g8YcQ0AgJ8ZOXKkXhyOHz9utD4AAAAAgJKjjQcAQNlRQVd/NXv2bJk4caLY7XZp0KCBNGzYUHbv3i0zZsyQxYsXy/r166VGjRoe7atq1arSrVs3t88nJyfLzp079f2uXbu6Xe+SSy6RiIiIQo/XrFlT/AGBawAAAAAAAAAAAADwkc2bN8ukSZP0/YSEBBkzZowOsv/6668yYMAA/bx6TAWwPdGhQwcd6HZHjbJWgWsVHL/qqqvcrrdw4UJp0qSJ+CtShQMAAAAAAAAAAAAIOFarxSeLr02fPl1sNpvcddddMnbsWOfI8Hr16smCBQvEarXqKUR27NhR6rLsdrvMmzdP3x86dKjed6AK3JoDAAAAAAAAAAAAgB9JSUmRZcuW6fsqaF1Qy5YtpVevXs4R0KW1du1a+fnnn/X94cOHSyAjVTgAAAAAAAAAAACAgGMpg9HSpbV161bJzMzUc0l37tzZ5To9evSQlStXyoYNG0pd3ty5c/Vt9+7dpUWLFsWOBFfpynNycqR+/fo6gH7rrbe6nPfaBALXAAAAAAAAAAAAAOAD+/bt07eNGjWSsLAwl+s0b95c3+7du7dUZaWmpsqiRYs8Hm399ttv5/v73XfflWnTpum5tuPj48U0AtcAAAShhIQESUxM9Hj9ocOHy8hRo8u0TgAAAACA8mnjjbplgIy967YyrRMAAME04trbY60jDfi4ceMKPX727Fl9W6NGDbfbOp47d+6clMbChQt1avJKlSrJLbfc4na9G264Qc+33a5dO2nQoIHeRo34/tvf/qbTjPfp00ePFG/YsKGYROAaAIAglJSUJFu2bPF4/T79+pVpfQAAAAAA5dfGu7ZnlzKtDwAA/sJqsRg51jq2cSUjI0PfhoeHu93WkZo7PT1dfJEm/KabbpKYmBi363300Uf5/o6MjJTbbrtNevfuLR07dpSjR4/Kk08+KXPmzBGTCFwDABCE4uLivErtEhsbW6b1AQAAAACUYxuvTu0yrQ8AABX9WOvYxhUVFFaysrLcbqvmwFaioqKkpA4dOiRr1671OE24K7Vq1ZIpU6bIPffcIx9++KG8+eabYvFRZ4CSIHANAEAQUilqXKWpced8Wul69gEAAAAA/KeNl/Nr6ebLBACgoqUK9/ZYW5Tq1avnSxnuiuM5x7ol8e6774rdbpfGjRvLlVdeWeL9dO3a1VkntdSsWVNMsRorGQAAAAAAAAAAAACCSKtWrfStSr+dnZ3tcp2DBw/mW9dbdrtd5s2bp+8PGzasVKOk86Y0z8nJEZMIXAMAAAAAAAAAAAAIyBHXvlh8qUOHDjoYrNKBb9y40eU669at07ddunQpURlff/21ThWuAtYqcF0aO3fudKY4NznaWiFwDQCAnzl27JieW8SxPProo/rxb775Jt/jL7zwgumqAgAAAACKQRsPAICKJSYmRvr27avvJyYmFnp+//79smrVKn1/8ODBJSpj7ty5+rZHjx7SrFmzEtdVjbCeOXOmvt+rVy8JDTU7yzSBawAA/Exubq6cOXPGuaSmpjpPIvI+npaWZrqqAAAAAIBi0MYDAKDsWK0Wnyy+NnXqVD0aev78+Tp4rVJ7K0lJSTJkyBCx2WwycOBAadeuXb7tmjRpopdFixa53XdKSorz+REjRhRbF9VpTs2Hff78+UKd61TgfMOGDTpg/cQTT4hpZsPmAACgEHVi4jiRAQAAAAAENtp4AACUHYufDtHt1KmTzJo1Sx544AEZN26czJgxQ2dY2b17t04h3rp1a3nzzTcLbXfkyBFncNodFbRWHeEqV67s0YjtPXv2yPPPPy+jRo3So7Nr1KghycnJsnfvXn2OolKEz5kzRy699FIxjcA1AAAAAAAAAAAAAPjQpEmT5KKLLtKpuL///ns5deqUNG7cWAebp0yZItHR0aVKEz548GCP9nHPPfdIbGysbNq0SY4fPy6HDx+WiIgIadu2rfTu3Vvuu+8+ad68ufgDAtcAAAAAAAAAAAAAAo5Kx+3PrrrqKr14yu5BlpY1a9Z4VQc137Zjzm1/56cD6AEAAAAAAAAAAAAAFQUjrgEAAAAAAAAAAAAEHKvVv0dcwzuMuAYAAAAAAAAAAAAAGMWIawAAAAAAAAAAAAABx8KI66BC4BoAAAAAAAAAAABAwCFwHVxIFQ4AAAAAAAAAAAAAMIoR1wAAAAAAAAAAAAACjtXCiOtgwohrAAAAAAAAAAAAAIBRjLgGAAAAAAAAAAAAEHCY4zq4ELguhql/9xy7oYJV2TabsbJtBl93dLi5BAQhmSnGyrYe+N5IuTkX9hJTbJYQc2VHVjNWtsmMKbkmv9zwfxZzv7+5IREV7jsZYvD7aDf4Q2Tul1/EHhZlrGxLbpaxsiXE3LsekptZ4X5T4Dlrbraxsu0Gj3nGVMTXrP7PsjOMlR16/EdjZWc3ijdSbqjB/7Ocag2MlQ0UxVapurGyQwwGEExefojKPGus7KxIc593uNVa4dqX1TJPGyvbFlXVWNmhBr/bv4eZ+x+vxGVNoFwRuAYAAAAAAAAAAAAQcBhxHVwIXAMAAAAAAAAAAAAIOFYC10GlYubsAgAAAAAAAAAAAAD4DUZcAwAAAAAAAAAAAAg4FoNz3sP3GHENAAAAAAAAAAAAADCKEdcAAAAAAAAAAAAAAo6FIbpBhY8TAAAAAAAAAAAAAGAUgWsAAPyI3W6Xb7/9Vh599FHp3r271KxZU8LCwqR27drSp08f+fe//63XAQAAAAD4P9p4AACULavV4pMF/oFU4QAA+JFVq1ZJ7969nX83a9ZMmjZtKocOHZIVK1boZcGCBbJ48WKJiIgwWlcAAAAAQNFo4wEAULYsBJ2DCiOuAQDwI6qnvbqI8eqrr8rJkyfl4MGDsmnTJjlz5ozMmzdPX8hYunSpPPHEE6arCgAAAAAoBm08AAAAzxG4BgDAj3Tu3Fn27t0rEyZMkDp16uR77q677nJezJgzZ47YbDZDtQQAAAAAeII2HgAAZctisfhkgX8gcA0AgB+pUqWKnu/Mnf79++vbs2fPyunTp8uxZgAAAAAAb9HGAwAA8BxzXAMAEEDS09Od96OioozWBQAAAABQOrTxAAAoHStzXAcVRlwDABBAFixYoG/btWune+4DAAAAAAIXbTwAAID/YcQ1AAABYvPmzfLGG2/o+48++qjp6gAAAAAASoE2HgAApWdhxHVQIXANAEAAOHnypAwaNEhycnLkxhtvlNtuu810lQAAAAAAJUQbDwAA3wghcB1USBUOAICfS05Olv79+8vRo0elY8eOMnfuXNNVAgAAAACUEG08AAAA1xhxDQCAH0tJSZF+/frJ1q1bpW3btrJ8+XKP5j1LSEiQxMREj8sZOny4jBw1upS1BQAAAAD4Qxtv5J1DZMyIoaWsLQAA/o8R18GFwDUAAH4qLS1Nrr32WtmwYYO0bNlSVq5cKTVr1vRo26SkJNmyZYvHZfXp168UNQUAAAAA+FMb75qre5WipgAAAGYQuAYAwA9lZGTIgAEDZO3atdK4cWP56quvJDY21uPt4+LiJD4+3uP1vdk3AAAAAMDP23h165awpgAABBZGXAcXAtcAAPiZ7Oxsuemmm/SFjPr168uqVaukYcOGXu1j3LhxevHU+bT0EtQUAAAAAOCPbbys30+VoKYAAABmEbgGAMCP5Obmyu233y6ff/657n2vLmg0a9bMdLUAAAAAACVAGw8AgLLFiOvgQuAaAAA/8sEHH8iiRYv0/cjISBk5cqTbdWfPni0dOnQox9oBAAAAALxBGw8AgLJF4Dq4ELgGAMCPZGZmOu8fPnxYL+4kJyeXU60AAAAAACVBGw8AAMBzVi/WBQAAZWz48OFit9s9Wnr27Gm6ugAAAACAItDGAwCgbIVaLT5Z4B8IXAMAAAAAAAAAAAAAjAqYwPWJEydk/vz5MmHCBOnSpYtERUWJxWKhJyIAAAAABCDaeAAAAAAAX8xx7YsF/iFg5rh+//33ZfLkyaarAQAAAADwAdp4AAAAAAAgIAPXVapUkd69e0unTp30snXrVpk+fbrpagEAAAAASoA2HgAAAACgtBgtHVwCJlX4yJEjZcWKFfLMM8/IjTfeKHXq1DFdJQAAAABACdHGAwAAAAAEu9WrV8t1110ntWvX1lNktWnTRqZOnSqpqale72v48OF6iq2ilmXLlrndPiUlRR5//HFdB1UXVSdVtzVr1oi/CJgR1wAAAAAAAAAAAADgEGL13zG6s2fPlokTJ4rdbpcGDRpIw4YNZffu3TJjxgxZvHixrF+/XmrUqOH1fhs2bCiNGjVy+Vz16tVdPv7bb79J9+7dZe/evRIRESEXXnihnD59WpYuXSqff/65/OMf/5B7771XTCNwDQAAAAAAAAAAACDg+Guq8M2bN8ukSZP0/YSEBBkzZoweEf3rr7/KgAED9PPqMRXALkkGs7///e9ebTNq1CgdtO7YsaN88sknUq9ePR1Qf/PNN2XcuHEyYcIE6dq1q7Rv315M8t9uCAAAAAAAAAAAAAAQYKZPny42m03uuusuGTt2rA5aKypgvGDBArFarbJkyRLZsWNHmddl69atOlitynz//fd1HRRVJ1U3Vcfc3FxdZ9MIXAMAAAAAAAAAAAAIyBHXvlh8Sc0l7ZhrWgWGC2rZsqX06tVL31+4cKGUtUWLFulbVWaLFi0KPa9GXCsqZXhJ5t72JVKFAwAAAAAAAAAAAICPRjhnZmbquaQ7d+7scp0ePXrIypUrZcOGDV7vf/Xq1bJr1y45c+aMVKtWTaf/vvPOO6Vx48Yu13eUcfnll7t8XtVR1TUjI0O2bdsm3bp1E1MYcQ0AAAAAAAAAAAAg4PjjiOt9+/bp20aNGklYWJjLdZo3b65v1bzT3lq7dq0eRa0C2B9++KE8/vjjehT3Cy+8UGR9HGUWpOrYsGHDEtfHlyrciGs1AXpiYqLH6w8bPlxGjhpdpnUCAAAAAJRPG2/EsKEyZtTIMq0TAAAAACC425aONOCONNt5nT17Vt/WqFHD7baO586dO+dxeS1btpSZM2fqlN9NmjTRo6TVHNnqMZVy/JFHHpHo6Gi59957y6U+ZaHCBa6TkpJky5YtHq/ft1+/Mq0PAAD+INTHvQq9kW2zGys7xNzLNiY502as7Frh5sq2W8wlGrLkZhsr2xYWaaxsMffVlixruJFyw3MyxByDn3WAtfH69+1TpvUBAMAfpIREGys7MyPXWNlVI0KMlW0PNXc+FnX6z5F0Jvwgf47QK28dc0+JKbmVaxoru8ZVU4yVfe7Lp4yVHRNh7vuVnm3uWgY8E2KxGGlbOrZxRaXcVsLD3V+fUEFnJT093ePy/va3vxV67NJLL5UPPvhAxo8fL6+//rpeZ+jQoTqAXdb1KQsVLnAdFxcn8fHxHq8fGxtbpvUBAAAAAJQcbTwAAAAAqLh8lebb27alYxtXIiP/7GyRlZXldls1B7YSFRUlvvDMM8/InDlz5Pfff5dVq1bJgAED8tUnLS2tXOtTUhUucK2G7Lsatu9OSprZngUAAAAAAN+18TJTz5dpfQAAAAAAwd+2LEr16tXzpeh2xfGcY93Sqlq1qrRt21a2bt0q+/fvL1QfFbguz/qUVIULXAMAAAAAAAAAAAAIfL4ace1LrVq10rdHjx6V7OxsCQsLK7TOwYMH863rC+H/TQWek5NTqD7Hjx+XAwcOuNxO1VHV1df1KQlzk/156dixY1KrVi3n8uijj+rHv/nmm3yPv/DCC6arCgAAAAAoBm08AAAAAEAw6tChgw4iq/TbGzdudLnOunXr9G2XLl18UmZOTo7s2bNH32/QoEG+5y677LJ8ZRak6qjSiKuU4u3btxeTAiZwnZubK2fOnHEuqampzg8i7+NqqDsAAAAAwL/RxgMAAAAAlFao1eKTxZdiYmKkb9+++n5iYmKh51UqbzUPtTJ48GCflJmQkCDJyckSGhoqvXr1yveco4zVq1e7HHWttlX69+8v0dHRYlLABK6bNGkidru92OXvf/+76aoCAAAAAIpBGw8AAAAAEKymTp0qFotF5s+fr4PXqn2rJCUlyZAhQ8Rms8nAgQOlXbt2hdrKTZo0kUWLFuV7fMWKFfLII48Umr9ajZSePXu2PPDAA/rvu+++W+Li4vKtEx8fL9ddd53uQH7bbbfpOiiqTqpuqo5Wq1Uef/xxMY05rgEAAAAAAAAAAAAEHH+c41rp1KmTzJo1SweUx40bJzNmzNDTYe3evVunEG/durW8+eabhbY7cuSIvk1JScn3uMpSpqbSUkvdunWd6cD37t3rXPemm26SmTNnuqzP22+/Ld26dZPNmzdL06ZN5cILL5TffvtNT+OlAuyvvPKKDnCbFjAjrgEAqCgWLlwoY8eOlUsuuUTq1asnEREROr2MOnFQPfVU2lQAAAAAQGCgjQcAQNkGrn2xlIVJkybpkdIqBbcKPKugdePGjeWxxx6TTZs26UC2pzp27KjPG3r37q3nolbzWf/4449StWpVGTRokHzyySd6lLaaW9uV2rVr66C1KlvVQdVF1UnV7auvvpL7779f/IHF7hibDpdS0tKlosmxmfuXMFi0RIWZ68cRkpm/50x5sh743ki5ORfmn2OhPNnEP3tglTWLwZeda/DLHV0pSgJN+/btZfv27fpihkrrok5gTp06JUePHtXP16lTR7788stCaWRKIz0jQyricSfE4BfDVNFn03PNFCwitcJtxsq2W8wd5y252cbKtoVFGivbZCsj11Dh4bmZYkpETDVjZQeazNTzFfK3yBiDr9lu8DhvzTZ3bhV6/EdjZWc3MjRCw+R3y27u/MZmCZGKqFKUufObQGrjnT2fJqZk5po7EawaYe57EZJt7j0PPXfMWNk/SEMj5XasZO56am7lmsbKrnHVFGNln/vyqQrZtk3PNnOsrxFTyUi5gejxL37yyX5m9L/AJ/tB6VTAVjMAAP5t/Pjx8vXXX8v58+fl0KFD8sMPP+gUMTt27JC//OUv+gLH7bffbrqaAAAAAAAP0MYDAKBijriG9whcAwDgZ8aMGSOXX365hIWF5Xv8oosukrfeekvfV6lcfvrJN70JAQAAAABlhzYeAACAZ0I9XA8AAPiBCy74X8qatDRzacgAAAAAAKVHGw8AgNJhtHRwYcQ1AAABZP369fo2OjpaWrdubbo6AAAAAIBSoI0HAADwP4y4BgDAz9lsNjlx4oR8+eWX8sgjj+jHnnvuOX1hAwAAAAAQWGjjAQDgO4y4Di4ErgEA8FOvvPKKTJ48Od9jnTt3lnfffVf69etnrF4AAAAAAO/RxgMAwPcIXAcXUoUDAOCn6tevL926dZNLL71U4uLixGKxyLZt22TevHny+++/m64eAAAAAMALtPEAAACKxohrAAD81M0336wXhx07dsh9990nCxYskJ9++kk2bdokISEhLrdNSEiQxMREj8saPmKEjB492if1BgAAAACYbePdMXS4DB85yif1BgDAnzHiOrgQuAYAIEBcfPHFsnTpUmnWrJnulf/+++/LHXfc4XLdpKQk2bJli8f77te/vw9rCgAAAAAw2cbr3YfU4wAAIPAQuAYAIIDExMTIFVdcIYsXL5bNmze7vaih0s7Fx8d7vN/Y2Fgf1hIAAAAAYLKNV5c2HgCggmDEdXAhcA0AQIDJycnJd+vKuHHj9OKp9IwMn9QNAAAAAGC+jXf2fJpP6gYAAFCeCFwDABBAzp49K2vWrNH3O3ToYLo6AAAAAIBSoI0HAEDpMOI6uFhNVwAAAPzP119/LTNmzJDDhw8Xek7NZ9a3b19JTk6W+vXry80332ykjgAAAAAAz9DGAwCg7APXvljgHxhxDQCAHzl37pxMnTpVL2reaXXxIiQkRI4dOyZJSUl6HfXYZ599JtHR0aarCwAAAAAoAm08AAAAzxG4BgDAj3Tt2lVmzZqlU8Xt2rVL9u3bJxkZGVK9enW58sor5frrr5fRo0dLTEyM6aoCAAAAAIpBGw8AgLIVYmG0dDAhcA0AgB+pU6eOTJ48WS8AAAAAgMBGGw8AAMBzBK4BAAAAAAAAAAAABBwrI66DitV0BQAAAAAAAAAAAAAAFRsjrgEAAAAAAAAAAAAEnBAGXAcVAtcAAAAAAAAAAAAAAo7VSuQ6mJAqHAAAAAAAAAAAAABgFCOuAQAAAAAAAAAAAAScEAsjroMJI64BAAAAAAAAAAAAAEYx4hoAAAAAAAAAAABAwLEy4jqoMOIaAAAAAAAAAAAAAGAUI64BAAAAAAAAAAAABJwQBlwHFQLXAAAAAAAAAAAAAAKO1UrkOpgQuPZToWIzVrY1JMRY2RGn9horO612K2NlW8KijJWd2/YqM+Xa7GJKWE6asbLtoRHmyraY+24zz4j/s5v7SkpEZrKxsu3hlY2VbQsJM1JurXBz5xi5VjOvWfktPcdY2dFh4cbKrpydYazs3NBIY2WbOur8YTf3Wdc2VnLgsVvNNYMt2elmCraaOw8MO7XfWNkZcX+pkL+BtiadjJVtMXVSaTd3fpNtcBa+MDF3Ep9jsP0Az0Tn/GGs7JgsQ8c7JdvgtY8zR4yVLZExxopuUt3Mta51p3PFlK7mLiXI758/ZqzsFDHX3on5/bixsqulnDZTcMxlZsoFDCNwDQAAAAAAAAAAACDgMGgquJjrFgoAAAAAAAAAAAAAACOuAQAAAAAAAAAAAASiEAZcBxVGXAMAAAAAAAAAAAAAjGLENQAAAAAAAAAAAICAwxzXwYXANQAAAAAAAAAAAICAE2IlcB1MSBUOAAAAAAAAAAAAADCKwDUAAH7u888/F4vFopcmTZqYrg4AAAAAoJRo5wEA4LtU4b5Y4B8IXAMA4MdSUlLknnvuMV0NAAAAAICP0M4DAABwjcA1AAB+7LHHHpOjR4/KDTfcYLoqAAAAAAAfoJ0HAIDvhFh8s8A/ELgGAMBPbdiwQf75z3/qixkDBw40XR0AAAAAQCnRzgMAoGJZvXq1XHfddVK7dm2JioqSNm3ayNSpUyU1NdWr/eTm5sqKFStk0qRJ0rlzZ6lWrZqEh4dLXFycPq9YunSp220PHz7snKLE3XLZZZeJPwg1XQEAAFBYdna2jBkzRipVqiT/+Mc/ZOXKlaarBAAAAAAoBdp5AAD4nj/PTz179myZOHGi2O12adCggTRs2FB2794tM2bMkMWLF8v69eulRo0aHu1r7ty5Mnr0aH3farVKixYtJDo6Wg4cOCCffPKJXsaOHStvvPGGDkS7061bN5ePt23bVvwBgWsAAPzQs88+Kzt37pSXX35Zn9QAAAAAAAIb7TwAAHwvxOqfgevNmzfr0dFKQkKC7rymAsq//vqrDBgwQD+vHlMBbE/Y7Xa5+OKLZcKECTJ48GCpWrWqfjwnJ0deeeUVefjhhyUxMVHat28v99xzj9v9qGC5PyNVOAAAfuann36SZ555RuLj4+X+++83XR0AAAAAQCnRzgMAoGKZPn262Gw2ueuuu/RIaMco6Hr16smCBQv0qOklS5bIjh07PNrfoEGDZNu2bTJq1Chn0FoJDQ2Vv/71r87R2CpIHsgIXAMA4EdUzznV006lkFMnGSEhIaarBAAAAAAoBdp5AACUHTXg2heLL6WkpMiyZcv0fRW0Lqhly5bSq1cvfX/hwoUe7bNGjRpFpgDv37+/vt27d68EMlKFAwDgR/71r3/JN998o1O+XHLJJaarAwAAAAAoJdp5AABULFu3bpXMzEyJiIiQzp07u1ynR48esnLlStmwYYNPykxPT9e3lSpVKnI9dT6yZ88eHQRv0qSJ9O3bVwYOHKhHgPsDAtcAAPiJ48ePy5QpU6R+/foyY8YM09UBAAAAAJQS7TwAAMpWSBGjkE3Zt2+fvm3UqJGEhYW5XKd58+Y+HSG9YMECZ0C8KLNnz873t2NebJW2vGnTpmIagWsAAPyEmufsjz/+kHfeeUdiYmJKtS+Vfk6ddHhq+PARMuq/86AAAAAAAPyrnedtG2/knbfKmOFDS1weAAAVjbfHWkca8HHjxhV6/OzZs8703u44njt37pyU1scffyyfffaZHkX98MMPF3pezYN95513ym233SZt27bV82z/9ttvsnTpUnn88cf13Nl9+vSRzZs3S5UqVcQkAtcAAPiJLVu26Nt7771XL65SvRw7dkxiY2P1fdULrmvXri73lZSU5NyfJ/r1+3MOFAAAAACA/7XzvG3jXdP7ylLWHACAwGD10Yhrb4+1jm1cycjI0Lfh4eFut1VpxPOeD5TUnj17ZNiwYfr+pEmTXJ5HNGjQQObPn5/vMRW8HjNmjFx55ZXSsWNHOXDggLz22ms6kG0SgWsAAPzMyZMn3T5ns9mcz2dlZbldLy4uTuLj4z0u03GRBAAAAADgf+0879t4dUpQSwAAAk+Ij6Zm9vZY69jGlcjIyGKv36o5sJWoqCgpqWPHjuk5qpOTk+Waa66R559/3ut9tGjRQu655x69repAR+AaAABohw8fdvvc3LlzZcSIEdK4ceMi13NQKWpcpalxJy39z16AAAAAAAD/a+d528bLOnfCq3oCAFDReXusLUr16tXzpQx3xfGcY11vnThxQq666io5evSo9OzZUxYvXux2Pu3iOEZp79+/X0wjcA0AAAAAAAAAAACgwqYK96VWrVrpWxVUzs7OdhlQPnjwYL51vXHq1Cnp1auXDjR36dJFPv30U+co75JwpDTPyckR03w0gB4AAAAAAAAAAAAAKrYOHTroYLBKB75x40aX66xbt07fqsCzN86ePStXX321/PTTTzq1+RdffCHR0dGlqu/OnTudc2GbRuAaAAAAAAAAAAAAQMAJsVh8svhSTEyMnntaSUxMLPS8Gim9atUqfX/w4MEe7/ePP/6QPn36yI4dO+Qvf/mLfPnll1K1atVS1TUlJUVef/11fV/t2zQC1wAABIDhw4eL3W73aH5rAAAAAID/o50HAEDwmjp1qlgsFpk/f74OXqtjvpKUlCRDhgwRm80mAwcOlHbt2uXbrkmTJnpZtGhRvsfT0tLk2muvlc2bN0ubNm3kq6++kpo1a3pUl7Fjx8qSJUv0CPC89uzZI/369ZNDhw7pUdsPPfSQmMYc1wAAAAAAAAAAAAACjj/Oca106tRJZs2aJQ888ICMGzdOZsyYIbVq1ZLdu3frAHLr1q3lzTffLLTdkSNHnCOh83r11Vdl/fr1zr8HDRrktmwV9I6NjXX+rdKVq7LUXNstWrSQKlWqyG+//eacZ7t69erywQcf6IC5aQERuFa9EL777jv55JNP9Iei8rar4fDVqlXTeeKHDRsmt99+u+65AAAAAADwb7TxAAAAAAC+EOLHuaUnTZokF110kcycOVO+//57OXXqlDRu3FinB58yZYpXc1Nn5hktrUZKFyUjIyPf36qsZcuW6dHaJ06c0KnKK1WqJB07dpT+/fvL+PHj8wW6TbLYHWPT/Zga7t67d2/n382aNdPRfzV0XU1Crqjh8YsXL5aIiAiflp2Sli4mhIpNTLFZQoyVHXFqr7Gy02q3MlZ2mMHP224183nn2sz99ITlmPleK/bQiAr3WeuyDR5pKkVFmis8gKSl5z+ZKU+hGb8bK9seXtlY2baQMCPlWnOzxZRcq5nXrPyWnmOs7Ogwcy2oypJlrOzcUHO/vzZDB570HHMHvNpVKkkgMdnGy0g3dy5oyTZUtsHzwLBT+42VnRH3F6mITPb3sJg68beba1NnG5yFL8zgZ23wkCfRlaLMFR5Ass6dMFa2JcvcsdbkMc965s8RckZExhgr+nT1lkbK3X06TUzpWs/cubc187yxslNCqxgrOyY1yVjZISmnjZRrbXGZkXID0bbjvrm22L5+NZ/sB6Xjx/0Q/kfF1ps2baqHwZ88eVIPXd+0aZOcOXNG5s2bpy9kLF26VJ544gnTVQUAAAAAFIM2HgAAAADAV6nCfbHAPwRE4Lpz586yd+9emTBhgtSpUyffc3fddZfzYsacOXP0ZOYAAAAAAP9FGw8AAAAAAARk4FpNEq4mDHdH5V9XVEq506fNpG0AAAAAAHiGNh4AAAAAwBfUYGlfLPAPARG4Lk56njnKoqKY5wYAAAAAAhltPAAAAAAAKp5QCQILFizQt+3atdM99wEAAAAAgYs2HgAAAADAE1ZhuHQwCfjA9ebNm+WNN97Q9x999FHT1QEAAAAAlAJtPAAAAACAp0jzHVwCOlX4yZMnZdCgQZKTkyM33nij3HbbbaarBAAAAAAoIdp4AAAAAABUXAE74jo5OVn69+8vR48elY4dO8rcuXM92i4hIUESExM9LmfY8OEyctToUtQUAAAAAOAvbbwRw4fL6NG08QAAAAAgGFgZcR1UAjJwnZKSIv369ZOtW7dK27ZtZfny5R7Pe5aUlCRbtmzxuKy+/fqVoqYAAASGrFybsbJTrDHGyrblGCtaqtsyjJS74ZS5F92lZrqxsmNtWcbKzgypaazsg3+YS7BUKyrXWNkxESFGyq1iNfd/FujKs43XnzYeAKACsIdXNlZ2cohnx/CyEBlqLnoREWLuUvvYZSeMlT3iUjPtvG6HPxNT7PVuMVb2fSvMfdYz+pq7fmPNOG+sbHtOtrGygYoo4ALXaWlpcu2118qGDRukZcuWsnLlSqlZ0/OLgXFxcRIfH+/x+rGxsSWsKQAAAACgOLTxAAAAAAAlxRzXwSWgAtcZGRkyYMAAWbt2rTRu3Fi++uorry86jBs3Ti+eSkkzNzIIAAAAAIKZiTZeRjptPAAAAAAA/FHABK6zs7Plpptu0hcy6tevL6tWrZKGDRuarhYAAAAAoARo4wEAAAAASssqDLkOJgERuM7NzZXbb79dPv/8c937Xl3QaNasmelqAQAAAABKgDYeAAAAAMAXSBUeXAIicP3BBx/IokWL9P3IyEgZOXKk23Vnz54tHTp0KMfaAQAAAAC8QRsPAAAAAAAEZOA6MzPTef/w4cN6cSc5ObmcagUAAAAAKAnaeAAAAAAAX7Ay4jqoWEu7A9UzXi2HDh2SsjJ8+HCx2+0eLT179iyzegAAAAAASo82HgAAAAAA8PmI63nz5kloaKi89dZbpd0VAAAAAAAAAAAAAHiEAdfBpdQjruvUqSOVKlUSC7OfAwDgE3//+9/1cbWo5Y033jBdTQAAAACAB2jjAQAAlNOI686dO8unn34qx48fl/r165d2dwAAIE/nsJYtW7p8Li4urtzrAwAAAAAoOdp4AAD4npWBtUGl1IHriRMn6sD1tGnTZM6cOb6pFQAAkP79+8vcuXNNVwMAAAAA4AO08QAA8D3i1sGl1KnCr7zySnn55Zfl3XfflVtuuUW2bNnim5oBAAAAAAAAAAAAACqEUo+4btasmb4NCwuTxYsX6yUqKkpq1qwpISEhLrdR87YcPHiwtEUDAAAAAAAAAAAAqKBKPUIXwRW4Pnz4cKHH0tLS9OKOClwDAICibd++XW6//XY5ceKExMTEyMUXXyy33XabtG3b1nTVAAAAAABeoo0HAABQxoHrd955p7S7AAAALmzbtk0vDp988ok8/fTTMnHiRHnppZfcZjYBAAAAAPgf2ngAAPgeg2WDS6kD18OGDfNNTQAAgFavXj156qmnpG/fvnpKDtUTf9++ffL666/LG2+8Ia+88oqeouOFF14wXVUAAAAAQDFo4wEAAJRT4BoAAPjW2LFjCz120UUXyb/+9S9p2rSpPPLII/Lyyy/LvffeK02aNDFSRwAAAACAZ2jjAQBQdqwMuA4qBK4BAAggDz74oLz66qvy66+/6rRyEyZMcLleQkKCJCYmerzfO4cNlxEjR/mwpgAAAAAAU228EcOGyphRI31YUwAA/BOZwoOLzwLXv/zyi8yaNUuWL18uR44ckYyMDMnJyXE+f+7cOd2LUOWaf+ihhyQ0lJg5AADeUnOeXXrppfLhhx/K/v373a6XlJQkW7Zs8Xi/V/ft56MaAgAAAABMt/H69+3joxoCAACUH59Ej1esWCG33HKL/PHHH2K3211Ohl69enX56KOPZPPmzdK2bVsZMGCAL4oGAKDCCQ8P17d5O4gVFBcXJ/Hx8R7vs25srE/qBgAAAAAw38aLpY0HAKggrKYrAP8KXB87dkwGDx4s58+f18HooUOHypgxY+T3338vtO7IkSNl06ZNsnTpUgLXAACU0M6dO/VtgwYN3K4zbtw4vXjq95Q0n9QNAAAAAGC+jZeZet4ndQMAAAiojggzZ87UQWs14lqNqB40aJCzl2BBffv21bc//PBDaYsFAKBCUp2/du3ape/36UPqNwAAAAAIZLTxAAAoHZUB2hcLgiRwrea0Vh/o9OnTi123adOmEhERIYcOHSptsQAABCV1wUL1ot++fXu+x202myxYsEBuv/12/fd1110nnTp1MlRLAAAAAIAnaOMBAACUY6rwo0ePSlRUlLRs2dKj9aOjoyU5Obm0xQIAEJSys7MlMTFRLzVq1JDGjRtLaGioHDhwQM6dO6fX6dGjh8yfP990VQEAAAAAxaCNBwBA2bIyWDqolDpwbbVaJTc316N1c3Jy5I8//pAqVaqUtlgAAIJSkyZNZMaMGfLdd9/JTz/9pC9mZGRk6Asc/fv3173xhwwZIiEhIaarCgAAAAAoBm08AADKFnHr4FLqwLXqJahOutTI60aNGhW57tq1a3UvQ09HZwMAUNFUq1ZN/va3v5muBgAAAADAB2jjAQAAlOMc171799a3b7zxRpHrqYC1OklT82Gr3oQAAAAAAAAAAAAAUJpU4b5YECSB68mTJ0t4eLjMnDlT3nrrLZfrbNmyRQe4v//+e4mJiZF77723tMUCAAAAAAAAAAAAAIKE1RepwufMmaPnuR47dqzUrVtXzp07p5/r2rWr1K9fXzp16iTr1q2T0NBQmTdvntSqVcsXdQcAAAAAAAAAAABQQalMz75YECSBa+WOO+6QL774Qpo3by6nT5+WrKwssdvtsmHDBklKStL3W7RoIcuWLZMBAwb4okgAAAAAAAAAAAAA8FurV6+W6667TmrXri1RUVHSpk0bmTp1qqSmppZ4n4sXL5Yrr7xSqlevLpUrV5b27dvLSy+9pKdtLsqpU6dk4sSJ0qxZM4mMjJTY2Fi59dZbZdu2beIvQn21o6uvvlr27t0ra9eulW+++UZ+/fVXPQpbvehu3brpNzAkJMRXxQEAAAAAAAAAAACowPx5furZs2frQLEa4NugQQNp2LCh7N69W2bMmKGDz+vXr5caNWp4tc+//vWvevpmRQ0oVoHrnTt3ykMPPSSffvqpfPnllxIREVFouwMHDkj37t3l5MmTepu2bdvKL7/8Ih988IF89NFHsnDhQr8YfOyzwLWihtJfccUVegEAAAAAAAAAAACAsuKvcevNmzfLpEmT9P2EhAQZM2aMjqOqgb8qQKyeV4+pALanPvzwQx20VoFpFXB2BJr37Nkj11xzjR5c/NhjjzkD2w4qcH7zzTfroHW/fv3k/fffl6pVq0pOTo489dRTMn36dJ1de9++fRIXFycBnSr88OHDvqkJAAAAAAAAAAAAAAQ4FQy22Wxy1113ydixY53zaNerV08WLFggVqtVlixZIjt27PB4n08++aS+feSRR/KNjlbpx+fMmaPv//Of/9TTOuf18ccf63TgKlj93nvv6VslNDRUB64vv/xySUlJ0enGTSt14FrNXd2/f389jFylBgcAAAAAAAAAAACAsma1WHyy+JIKAi9btkzfV0Hrglq2bCm9evXS91WKbk/s379ftm/f7nafan8qZpuZmSmffPJJvuccZahR12pe7IIc+1OjuAM+cK16C6h86TfddJPOza4mFD9y5IhvagcAAAAAAAAAAAAAAWLr1q06gKxSenfu3NnlOj169NC3GzZs8GifG/67XrNmzaR+/fpe7dPxtxpZXdR2as7r48ePS0AHrleuXKkj9GFhYXLixAl55pln9GTgKpc6o7ABAAAAAAAAAAAAlAU1WNoXiy+puaKVRo0a6fipKyqWquzdu9erfTb/73ae7jMrK8s57bO7bdXA5PDwcK/qU1ZCS7sDNfRcLWfOnJG5c+fKW2+9pScBV0Pgly9fLrGxsTJy5EgZPXq0NG7c2De1BgAAAAAAAAAAAAAfSEhIkMTERK+2USm2x40bV+jxs2fP6tsaNWq43dbx3Llz5zwq62wJ95mcnKyzZxe1rZp/u1q1anLq1CmP6+O3gWuHmjVryoMPPqiX9evX6w948eLFkpSUpEdhP/vss3L11VfrD/D666+XkJAQCQRh9hwj5WZbfPbReC0sN9NY2em1WxkrO9Tq4y41XsjIMVd2qNiNlJtrplgtxR5hrOzo0ie6KLGs7D8PTiaEGPx+wf8/o6q2DGNlJxv8PbDYzJxjxMdVFlOSc8z9+FeubO73N8zQsVZpEZVlrGxbuLn/tRybmfc8U1z3oi4P5n7NApDd3DlRblglI+X6evSAN1Jj21bI31+DhzyxGyzb0M+vhBu8xhRi8P3OyKWNB/dsIebOS15cddBY2dN7uk6dWi7+GxQwYUrvFsbKbhphps1hr3OjmPJLqrnP+sk+LY2VbfDSomTFtjFWtjUrzUi5gRFB8w8WH50Aq3jmli1bvN7GlYyMP693OkYxu6LSiCvp6ekelZVRwn06tvN1fcpKmURHu3fvrpfZs2fL/PnzZc6cOfLjjz/qubDV4hiFPWbMGD1MHgAAAAAAAAAAAABMdE6Oi4uT+Ph4r7dxJTIy0pmm2x01B7YSFRXlUVmRJdynYztf16eslOmwXjWs/P7779cjre+++25Zu3atftwxCvu5557T82Or0dikEQcAAAAAAAAAAABQ3lTGaFdpv0uievXq+dJ7u+J4zrFuWe2zatWqYrVadbpwd9va7Xb5/fffvapPWSmznIkqav9///d/csUVV0jbtm1l3bp1+nEVoJ48ebJ+LDc3V/7zn/9I+/btZfv27WVVFQAAAAAAAAAAAABBxmK3+WTxpVat/pwW9+jRo5Kdne1ynYMHD+Zb19N9HjhwwO06rvap0oM7Bg+72/bYsWPO0die1idgAte7du2SSZMmSb169WTYsGE6YK0m9b7mmmvk008/lZ9//llmzpwpO3bskFWrVslFF12kJwZ/5JFHfF0VAAAAAAAAAAAAACg3HTp00AFjlX5748aNLtdxDPjt0qWLR/u87LLL9O2hQ4fk+PHjXu3Tsa3jeXfbNWjQQC8BH7hWE3u/++670q1bN7n44ov13NZquHmdOnXkscce08FqFbS+9tprdRDboWfPnrJ8+XIJDQ11+8EBAAAAAAAAAAAAQCFqtLQvFh+KiYmRvn376vuJiYmFnt+/f78e3KsMHjzYo322atVKDwZ2t0+1PzWiWgXMBwwYkO85RxkLFy6Uc+fOFdrWsT81vbNppQ5c33fffXry8ZEjR8p3332n86CrgLRKAa6Gls+YMUMaNWrkdvu6detKbGysHnUNAAAAAAAAAAAAAIFs6tSpejDv/PnzdWBYxU+VpKQkGTJkiJ5zeuDAgdKuXbt82zVp0kQvixYtKrTPadOm6dvnn39eDxh22Lt3r4wePVrfv/fee6V27dr5tlPlqIHHKhZ7xx13OGOyakrnJ554QtauXSuVKlWSv/71r2JaaGl38Prrrzsn61apwe+++26v85937dpVTp48WdqqAAAAAAAAAAAAAKgo/hsQ9jedOnWSWbNmyQMPPCDjxo3TA31r1aolu3fv1inEW7duLW+++Wah7Y4cOaJvU1JSCj1300036emaX3nlFT2qunnz5hIdHS07d+7UQeju3bvLs88+W2g7q9WqR1v36NFDvvjiC6lfv760adNGD0A+deqUhIWFyf/93//paaADfsT1pZdeKu+8847Op64+gJJM2v3+++/L6tWrS1sVAACCzueffy6DBg3SJw0RERE6S4mamuPxxx+XnJwc09UDAAAAAHiBNh4AAMGfKtxBBZlXrFgh/fv3l9TUVB20bty4sZ5medOmTTqQ7a2XX35ZPvjgA7niiivkt99+k3379smFF16oR2GrdOGRkZEut1Px2x07duhM2mpE9o8//uhMI/7999/LjTfeKP7AYneMTYdLmannjZSbbSn1YPgSC7NlGSs72xpurOwQ6//mXy9vmTll86PoiVBDrzvX4C9PusH3Ozqs1P2FSizL4Jtu8vtVtXKUBCJ1wWLEiBG6p5vSsGFDfUHjzJkz8ssvv0hWVpacP39e96jzhfNp6WJKeI65spPtEcbKriZmXndmWGUxJSPH3O9QZYO/v1Yx97qtWanGyraFm/tfM/WvlmMz91lXi65krOxAk5Fm8HthCTFSrsXcqZjR70WYyddtsL1j8iKPqY87PMTch23yqlpWrq1CtvFiKtHG80R6RoaYMm3FQWNlT+9Z31jZlszCI+PKyyFbFWNlN40wcy3ZbjV3/fxYhplzOtNtW4OndVIjytx7bs1KM1JuRJUaRsoNRJl/nPXJfnjP/YO5X3cAAODWPffcoy9oqJQyCQkJ0qFDB+dzaWlpsnLlSt07HwAAAADg/2jjAQBQNixlNFoaZhC4BgDAz6jpM+bMmSNNmjSRr776SmJiYvI9X6lSJT2HCQAAAADA/9HGAwAA8IzP8kps375dxo4dq/OoV6lSRUJCQtwuoaHEywEAcGfmzJn69sEHHyx0QQMAAAAAEFho4wEAUDHnuIb3fBJB/sc//iEPPPCA5ObmClNmAwBQchkZGfLll1/q+71795bdu3dLYmKivlVp41Q6uVGjRknjxo1NVxUAAAAAUAzaeAAAAOUYuP7+++9l4sSJ+v69994r1157rVxzzTVSo0YN+eCDD+TEiRN6jpb33ntPj8R+7bXXJC4urrTFAgAQlFQGk+zsbH1/3bp1ct9990lWVpbz+c8++0xeeOEFeeedd2TIkCEGawoAAAAAKA5tPAAAyhijpYNKqVOFq0C0GmWtgtezZ8+Wfv366cfDw8OlV69ecvvtt8vbb78tGzZsEIvFIlOnTpX4+Hhf1B0AgKCTlJTkvD9+/Hjd+37jxo2SmZkp+/fvl1tuuUXfHzZsmGzdutVoXQEAAAAARaONBwBAGSNVeFApdeD6m2++0QFpx6hrh4Ipw9u3b68D2wcPHpQXX3yxtMUCABCUUlJSnPcrVaokX3zxhXTq1El3CGvRooUsWLBAH1NVj/2nn37aaF0BAAAAAEWjjQcAAFCOqcJPnjyp52PJOw+L1WrV87cUdOONN0pYWJgsWbJEnnrqqdIWDQBA0ImMjHTeHz58uFSvXj3f8+oYO3nyZN0bX82TZrPZ9GMFJSQk6HnTPDV0+HAZOWp0KWsPAAAAAPCHNt7wESNk9GjaeACACsDGaOlgUurAteopqEZc5xUTEyN//PGHTnOjgtoOKmit1j9y5EhpiwUAICjlvYhxwQUXuFzH8fj58+flzJkzUrt2bZfp6LZs2eJxuX3+O9UHAAAAACDw23j9+vcvUX0BAAACOnBdv3592bNnj+Tk5Eho6J+7a968uZ6T5YcffpDu3bs71/31118lOTlZB68BAEBhbdq0cd5XqeOK67GveuO7EhcXJ/Hx8R6XGxsb61U9AQAAAADFo40HAEDZsjA/dVApdeBa9QjctWuX/Pjjj9KhQwf9WM+ePXUPQJUO/JNPPtEnX1lZWTJhwgT9/EUXXVT6mgMAEIRUhzA1/YbKTvLzzz+7XOfgwYP6Vh1fa9as6XKdcePG6cVT59PSS1hjAAAAAIC/tfHSXUzjCAAA4O8KT5jipT59+ojdbpdPP/3U+dj48eN1ivCvvvpKGjRoIN26ddMnaR9++KFOK37fffeVtlgAAILWrbfeqm///e9/64wmBb399tv69oorrnBmOwEAAAAA+CfaeAAAlCE14toXC4IjcH3TTTfJtGnTpF69es7HmjZtKu+9956e6/rs2bPy3Xff6flZVND64YcfljvuuKO0xQIAELT++te/StWqVeXQoUO6s1fGf3vKq45ir732mu4spo6pjz76qOmqAgAAAACKQRsPAIAyZLf7ZoFfsNjVGVIZUUHrzz//XI4dO6ZPztTo7BYtWkggyUw9b6TcbIu53pVhtixjZWdbXc/1Ux5CrBZjZWfmmOvNE2rodecaPA6kG3y/o8NK3V+oxLIMvukmv19VK0dJIFq5cqUMGDBA0tPT9TG0VatW8ssvv0hSUpK+oPHCCy/oix++YjJVeHiOubKT7RHGyq4mZl53ZlhlMSUjx9zvUGWDv79WMfe6rVmpxsq2hZv7XzP1r5ZjM/dZV4uuZKzsQJORZvB7YQkxUq7F3KmY0e9FmMnXbbC9Y/KSm6mPOzzE3Idt8hpnVq6tQrbxYirRxvP3VOHTVvyZ+tyE6T3rGyvbkplirOxDtirGym4aYeZast1q7vr5sQwz53Sm27YGT+ukRpS599yalWak3IgqNYyUG4iyTx7yyX7C6jb1yX5QOmX6616jRg258847nX8nJydLfHy8PhnbvHlzWRYNAEBA6927t2zfvl2eeeYZfYFj27Zt+uKGutDxwAMP6BRyAAAAAIDAQBsPAIAyQprvoFKu3ZLUHC7qpEwFrgEAQNFatmwp77zzjulqAAAAAAB8gDYeAABA0czl0wAAAAAAAAAAAACAErIw4jqomJsQwUsLFy6UsWPHyiWXXCL16tWTiIgIiYmJ0anHp06dKmfOnDFdRQAAAACAF2jnAQAAAACAgBtx/fTTT+t5YNSFjLi4OLn44ovl1KlTsnXrVr0kJibKl19+Ke3atTNdVQAAAACAB2jnAQAAAABKhRHXQSVgRlyPHz9evv76azl//rwcOnRIfvjhBzly5Ijs2LFD/vKXv+iLG7fffrvpagIAAAAAPEQ7DwAAAAAABFzgesyYMXL55ZdLWFhYvscvuugieeutt/T93bt3y08//WSohgAAAAAAb9DOAwAAAACUesS1Lxb4hYBJFV6UCy64wHk/LS3NaF0AAAAAAKVHOw8AAAAAUCyCzkElYEZcF2X9+vX6Njo6Wlq3bm26OgAAAACAUqKdBwAAAABAxeL1iOuQkBDxBzabTU6cOCFffvmlPPLII/qx5557Tl/UAAAAAAAEHtp5AAAAAABvWBhxXbED13a7XUx65ZVXZPLkyfke69y5s7z77rvSr18/Y/UCAAAAAJQM7TwAAAAAAOB14HratGliUv369aVbt26Sk5MjR48e1b3xt23bJvPmzZPLLrtMqlWrZrR+AAAAAADv0M4DAAAAAJSIjRHXwcRiNz2EupR27Ngh9913n6xbt07at28vmzZtKjKdeUJCgiQmJnq8/xHDhsqYUSOlvGVbvO5T4DNhtixjZWdbw42VHWK1GCs7M8fcD2uoodeda/CXJ93g+x0dZjVWdpbBN93k96tq5ShjZQeS82npxsoOzzFXdrI9wljZ1cTM684MqyymZOSY+x2qbPD31yrmXrc1K9VY2bZwc/9rpv7VcmzmPutq0ZUkmHjTzvO6jTd8mIweNUpMsFnMTL1lMXcqZvR7EWbydRts75i8yGPq4w4PMfdhm7yqlpVrq5BtvJhKtPE8kZ6RYazsaSsOGit7es/6xsq2ZKYYK/uQrYqxsptGmLmWbLeau35+LCOkQrZtDZ7WSY0oc++5NSvNSLkRVWoYKTcQ5R7Z7pP9hDRu55P9oHTM/br7yMUXXyxLly6VZs2a6R7577//vtxxxx1u109KSpItW7Z4vP/+ffv4qKYAAPgvk8Hj7FBzF55CDHboSLeaCeplGbySHhVqqZAXdqPs5joFpoeaCx6nZ5l7z8MMXUyPyflDzAmuwLU37Tyv23j9+vqwpgAA+Kfwwz8YK3v6lRcbKzvL4KCYlNAwY2XXNdjWSp3/jJFyw4aZywybnmOujVe7krmQTnJmrrGyLQZ7ip3INvO70thIqQEqsMfnItgC10pMTIxcccUVsnjxYtm8eXORgeu4uDiJj4/3eN+xsbE+qiUAAAAAwNftPNp4AAAAAFCB2UkVHkyCInCtqLnQ8t66M27cOL14KjP1fKnrBgAAAAAom3aet228jDRzKfQBAAAAAECQB67Pnj0ra9as0fc7dOhgujoAAAAAgFKinQcAAAAAKI6FEddBxSoB4Ouvv5YZM2bI4cOHCz2n5jLr27evJCcnS/369eXmm282UkcAAAAAgOdo5wEAAAAAgIAbcX3u3DmZOnWqXtR8ZOrCRUhIiBw7dkySkpL0Ouqxzz77TKKjo01XFwAAAABQDNp5AAAAAIBSY8R1UAmIwHXXrl1l1qxZOk3crl27ZN++fZKRkSHVq1eXK6+8Uq6//noZPXq0xMTEmK4qAAAAAMADtPMAAAAAAEDABa7r1KkjkydP1gsAAAAAIPDRzgMAAAAAlBojroNKQMxxDQBARaHm+bRYLB4tI0aMMF1dAAAAAEARaOMBAFDGbLm+WeAXAmLENQAAFUVkZKR069bN7fMqhermzZudKVYBAAAAAP6LNh4AAIDnCFwDAOBHYmNjZf369W6ff/fdd2X48OESFRUlt956a7nWDQAAAADgHdp4AACULbuNVOHBhFThAAAEkLlz5+rbQYMGSZUqVUxXBwAAAABQCrTxAACAJ7Zu3ao7ualOcSqjS7NmzWTixIly+vRpr/dlt9vl22+/lUcffVS6d+8uNWvWlLCwMKldu7b06dNH/v3vf+t13Clu+hNVx5JixDUAAAE0N9rXX3+t76se+QAAAACAwEUbDwAAH6gA81MvWbJEbrvtNsnOzpY6depI27ZtZe/evfLaa6/JwoULdXYXFcj21KpVq6R3797Ov9W2TZs2lUOHDsmKFSv0smDBAlm8eLFERES43c8ll1zi8nkVCC8pRlwDABAgVAo51dOtUaNG0qtXL9PVAQAAAACUAm08AABQnOPHj8tdd92lg9ZTp07Vf2/evFnf9uvXT5KSkvRI7KJGSBek1lWB6ldffVVOnjwpBw8elE2bNsmZM2dk3rx5Ohi9dOlSeeKJJ4rcjyNoXnD5+OOPS/x6CVwDABAA1MmEuqihDB06VKxWDuEAAAAAEKho4wEA4MMR175Y/NSLL74oaWlpcvnll8tTTz0loaF/JtOuWrWqvPfee/pWBZ0/++wzj/fZuXNnPWJ7woQJegR3XipI7ghYz5kzR2zlPIc4Z0QAAAQAlT5OpWpRSCEHAAAAAIGNNh4AAL5hz831yeKvFi1apG/Hjh1b6Lnq1avLzTffrO9/8MEHHu+zSpUqek5rd/r3769vz549W6I5tEuDOa4BAAgAc+fO1bc9evSQ5s2bm64OAAAAAKAUaOMBAIDiHDt2TKcEV9SIa1fUuYQaGb1hwwaflZuenu68HxUV5Xa96dOny6+//io5OTlSv359PfWJSlte1LzYxSFwDQCAn0tJSXH2rKMnPgAAAAAENtp4AAD4UDmnsi5P+/bt07fh4eHSoEEDl+s4OsD9/PPPeh7sokZSe2rBggX6tl27dnp0tjtvv/12vr/VNCjTpk2TxYsXS3x8fInKJnANAICfUxc0UlNTpVKlSs7UL8VJSEiQxMREj8sYcdcdMmYkF0wAAAAAIBjaeKMHXCljb72hFLUEAKBi8fZY60jfPW7cuDKr09mzZ50pwS0Wi8t1atSooW/VXNR//PGH1KxZs1Rlbt68Wd544w19/9FHH3W5zg033KDnwlaBbRVQV53yVq5cKX/72990AL1Pnz6ydetWadiwodflE7gGACBAUsgNHjxYYmJiPNomKSlJtmzZ4nEZ/fv0LnH9AAAAAAD+1cZL6nJxiesHAEBAsflmfmpvj7WObcpSRkaGc8S1O3nTcudN8V0SJ0+elEGDBunU3zfeeKPcdtttLtf76KOP8v0dGRmp1+3du7d07NhRjh49Kk8++aROYe4tAtcAAPixQ4cOydq1a71OIRcXF+dVOpbYunVLVD8AAAAAgP+18eJql260FQAAFY23x1rHNu5MmjRJXn31Va/rccUVV8iaNWucAWElKyvL7fqZmZkezUddnOTkZOnfv78OOqvgs6OjnTdq1aolU6ZMkXvuuUc+/PBDefPNN92OFHeHwDUAAH5MzQtit9ulSZMm0rNnT4+3UylqvElTk/nHn2lnAAAAAACB38bL3bOuhDUEACCw2H004trbY21xoqOjS5S2u2rVqs77KkW4cu7cOX3+4CoI7EgnbrVai5yPuigq1Xe/fv10eu+2bdvK8uXLS7yvrl27OuulFm/fAwLXAAD4KXUyMm/ePH1/2LBhXvdOAwAAAAD4D9p4AACUAZtN/NGMGTP0UhqtWrVyjrg+duyYNGrUqNA6Bw8e1LdNmzaVsLAwr8tIS0uTa6+9VjZs2CAtW7bUc1WXZp7svGnNVcpxb1lLXDIAAChTX3/9tU4jpy5mqIsaAAAAAIDARRsPAAB4o1GjRlKvXj19f90619lUHI936dKlRHNoDxgwQE9j0rhxY/nqq68kNja2VHXeuXOnM815SQLgBK4BAPBTjnlELr/8ct1jDgAAAAAQuGjjAQBQNqnCfbH4q5tuuknfJiYmFnpOpRBfuHChvn/zzTd7td/s7Gy9bxWsrl+/vqxatUoaNmxYqrqqEdYzZ87U93v16iWhod4n/iZwDQCAH1/UUKnk1qxZY7oqAAAAAIBSoo0HAAC89dBDD0lUVJQeFf3EE09Ibu6fQfbk5GS5/fbb9W2HDh3k+uuvL7Rt9+7dpUmTJvLKK6/ke1ztQ237+eef6xHWKmjdrFkzj+rz6KOPyrvvvivnz5/P97hKZT548GCdclwFrFVdS4I5rgEAAAAAAAAAAAAEHj8eLe0LDRs2lHnz5smQIUNk+vTpkpCQoB/bs2ePpKamSt26deWDDz7QU5EU9Msvv8iRI0fk999/z/e4Wn/RokXOlN4jR450W/7s2bN1YNxBlfv888/LqFGjdLC7Ro0aOni+d+9e3UFP7W/OnDly6aWXluj1ErgGAAAAAAAAAAAAAD80ePBgHSR+9tln9cjrH3/8Uc99PWLECJk6darUqVPHq/1lZmY67x8+fFgv7qigdF733HOPHqW9adMmOX78uN42IiJC2rZtK71795b77rtPmjdvLiVF4BoAAAAAAAAAAABA4LHZpCKIj493zmftKXcB6eHDh+ulJPr27auXskLgGgAAAAAAAAAAAEDAsf93zmcEB6vpCgAAAAAAAAAAAAAAKjZGXAMAAAAAAAAAAAAIPDZGXAcTRlwDAAAAAAAAAAAAAIxixDUAAAAAAAAAAACAwMOI66DCiGsAAAAAAAAAAAAAgFGMuAYAAAAAAAAAAAAQcOw2m+kqwIcIXAMAAAAAAAAAAAAIPKQKDyoErouRnGMmm3pMhEVMSc4292+RkWXuB6ZymLnM+eEh5j7v9By7kXINvt1Sw55qrOxMiTFWtplP+k+R9iyDpUcZLDtwnMwJN1Z2XPoJY2WHVa5hrOxsi5n3vEqIuWOtzRpmrOyI1NPGyj4ZYu7/LDzEXK/jKuEhxsoOyU4zUm5uVDUj5cI7GTZz594Wi5kzMrvd3JmgzeRJqMF2Vq7BF55tcMCJqbatNTtDTMkOiZCKKCvX5Jcbnvg+8kJjZcdmmLvo0yDM3DWfyMzzxsq2WasaK/vY4L8ZKbfZL1vElLbGShbJDW1orOycyFrGyl595A9jZV8Va+qcMtpQuYBZBK4BAAAAAAAAAAAABB5GXAcVg2MeAQAAAAAAAAAAAABgxDUAAAAAAAAAAACAAGS3GZwrBz7HiGsAAAAAAAAAAAAAgFGMuAYAAAAAAAAAAAAQeJjjOqgQuAYAAAAAAAAAAAAQeAhcBxVShQMAAAAAAAAAAAAAjGLENQAAAAAAAAAAAICAY89lxHUwYcQ1AAB+6MyZM/LYY4/JxRdfLNHR0RIeHi4NGjSQW265RdavX2+6egAAAAAAL9DGAwAAKB6BawAA/Mz+/fvloosukmeffVZ27doldevWlbZt28off/whCxculMsvv1xefvll09UEAAAAAHiANh4AAGXIZvPNAr9A4BoAAD9z9913S1JSkrRs2VJ+/PFHOXjwoGzdulVOnTolDz74oNjtdnn44Yf1xQ8AAAAAgH+jjQcAAOAZAtcAAPiR8+fPy+rVq/X9F198US688ELnc5GRkfqxFi1aSE5OjixfvtxgTQEAAAAAxaGNBwBAGbPl+maBXyBwDQCAH8nMzNS97ZXmzZsXet5isTgfz87OLvf6AQAAAAA8RxsPAICyZbfl+mSBfyBwDQCAH6lVq5Y0aNBA3//2228LPZ+amirbtm3T9zt37lzu9QMAAAAAeI42HgAAgOcIXAMA4Geee+453ev+oYcekjlz5siJEyckLS1NNm7cKAMGDJCTJ0/KnXfeKd26dTNdVQAAAABAMWjjAQBQduw2m08W+IdQ0xUAAAD53XHHHVK1alWZMWOGjBkzJt9zcXFx8q9//UvGjRtnrH4AAAAAAM/RxgMAAPAMI64BAPBDBw4ckFOnTonVapUmTZrIxRdfLJUqVZKkpCSZO3eu7Nq1y3QVAQAAAAAeoo0HAEDZsOfafLLAPzDiGgAAPzN+/Hh5/fXXpVOnTrJs2TJp1aqVfjw9PV2mTZsmL774ok4ht2PHDmncuLHLfSQkJEhiYqLHZQ6+Y5jcMXykz14DAAAAAMBcG++qm26XgbcP99lrAAAAKA8ErgEA8CPqQoVKExcWFiYLFy7Md9EiKipKXnjhBdmyZYt89dVX8uyzz8obb7zhcj+q175az1OX9+7rk/oDAAAAAMy38dr3uMon9QcAwN8xWjq4ELgGAMCPrF+/Xux2u7Rs2dJtT/s+ffroixqbNm1yux81T1p8fLzH5dapG1ui+gIAAAAA/K+NV7NO3RLVFwCAQGO3EbgOJgSuAQDwI+fPn/d43YyMDLfPjRs3Ti+eOno2xeN1AQAAAAD+3cb79vAZj9cFAADwF1bTFQAAAP/jmOts//79cuTIEZfrfPnll/q2devW5Vo3AAAAAIB3aOMBAFD2qcJ9scA/ELgGAMCPqBRxderUkezsbLn55ptl3759zufS09Pl4Ycf1inklKFDhxqsKQAAAACgOLTxAAAAPEeqcAAA/EjlypXl3//+twwcOFB++OEHueCCC/Q8aDExMXLgwAFJS0vT640fP15uuOEG09UFAAAAABSBNh4AAGWL0dLBhRHXAAD4md69e8uOHTvkvvvu02nlTpw4IT/99JNUrVpVX8j47LPP5B//+IfpagIAAAAAPEAbDwAAwDOMuAYAwA81a9ZMZs+ebboaAAAAAAAfoI0HAEDZsOXmmq4CfCigR1x//vnnYrFY9NKkSRPT1QEAAAAAlAJtPAAAAACAN+w2m08W+IeADVynpKTIPffcY7oaAAAAAAAfoI0HAAAAAEDFFrCB68cee0yOHj2q54EBAAAAAAQ22ngAAAAAAG/Zc20+WeAfAjJwvWHDBvnnP/+pL2gMHDjQdHUAAAAAAKVAGw8AAAAAAPe2bt0qt956q8TGxkpkZKQ0a9ZMJk6cKKdPn5aS+Pvf/+6cqsvd8sYbb7jdPjs7W1588UVp166dVK5cWapXry5XXnmlLFmyREojVAKMeiPGjBkjlSpVkn/84x+ycuVK01UCAAAAAJQQbTwAAAAAQElVhNHSS5Yskdtuu023n+vUqSNt27aVvXv3ymuvvSYLFy6U9evX60B2Saj9tWzZ0uVzcXFxLh/PyMiQq6++WpcbEhKi65Oamipr1qzRyyOPPCLPPfdcxQhcP/vss7Jz5055+eWXpUGDBqarAwAAAAAoBdp4AAAAAAC4dvz4cbnrrrt00Hrq1KnyxBNPSGhoqCQnJ+tg9rJly/RI7I0bN+pR0t7q37+/zJ0716ttVGBaBa2bNm0qX3zxhbRu3Vo//sknn8gtt9wizz//vHTr1k2uv/764E4V/tNPP8kzzzwj8fHxcv/995uuDgAAAACgFGjjAQAAAABKw26z+WTxVy+++KKkpaXJ5ZdfLk899ZQOWitVq1aV9957T99u2rRJPvvss3Kpz8mTJ50pxN966y1n0FoZMGCAPPzww85U5CURMIFru92u08epHgUJCQl66DkAAAAAIDDRxgMAAAAAlJYt1+aTxV8tWrRI344dO7bQc2pe6Ztvvlnf/+CDD8qlPmpUdVZWlk4vrua0LmjcuHH6dsuWLXLw4MHgDVz/61//km+++Ubuu+8+ueSSS0xXBwAAAABQCrTxAAAAAABw79ixYzpVuKJGXLvSo0cPfbthw4YSlbF9+3a5/fbbpVevXnLDDTfodOS7du1yu76jHEe5BdWvX1+nEC9pnQJijmv1oUyZMkW/2BkzZpiuDgAAAACgFGjjAQAAAAB8we7Ho6VLa9++ffo2PDxcGjRo4HKd5s2b69uff/5ZZzQLCwvzqoxt27bpJe+I6qefflomTpwoL730UqHsaI46Ocp1V6dDhw7J3r17JSgD12qusz/++EPeeecdiYmJKdW+VAq6xMREj9cfctcwGTpiVKnKBAAAAAD4RxvvzmHDZcRI2ngAAAAAgJK3LR3pux2pscvC2bNnnSnBLRaLy3Vq1Kihb202m25n16xZ06N916tXT8+Z3bdvX2nWrJlum6ug9Ouvv67nsH7llVd0EPyFF15wWSdHuUXV6dy5cxKUgWuVB12599579ZJXenq6c7h8bGysvr9kyRLp2rWry30lJSU59+eJXlf3LUXNAQAIDFUjzM0raouoZazsbIu5U6ENx1OMlNu9fiUxJSTjD2Nl78mpaqzsk7//eb5qwgW1zH3eoRm/GyvbFlG6QGhJpWWb6+UdFSkBxWQb7+q+/UpRcwAAAkPHWubaOulirn2ZbDN3/htdNdpY2WG/eT+HqK9UiWlipNzMRh3FlOPns42VXSPS3PerUpi5mWf7hh4yVna2tZWxslG+I669bVs6tilLGRkZzhHX7kRERBRqT3vC1ZzZF110kZ7WS6X6fuSRR+Tll1/WbfYmTZqUqE7e1CegAtcOJ0+edPuc6kngeF5NCu5OXFycxMfHe1xm3f9eKAEAAAAA+BZtPAAAAACAP/C2benYxp1JkybJq6++6nU9rrjiClmzZo2+HxkZWWybODMz03k/KipKfOHBBx/Udf/111916vAJEyY4n/OmTiWpT0AErg8fPuz2ublz58qIESOkcePGRa7noIbsezNs/1RyqsfrAgAAAAD8u433e0qax+sCAAAAAPyb3eabEdfeti2LEx0d7XHa7ryqVv1f1kCVItyRcttut7tMF+5I3W21WqVKlSriC2pe60svvVQ+/PBD2b9/f77nHHVylFtcivOgDFwDAAAAAAAAAAAAQFmkCve1GTNm6KU0WrVq5RzdrKbTatSoUaF1Dh78c7oIld5bzUntK45U4Dk5OYXq9M0338iBAwfcbuuok6P+3jA3KQEAAAAAAAAAAAAAoBAVqK5Xr56+v27dusIr5Hm8S5cu4ks7d+7Utw0aNMj3+GWXXaZv169f73K748ePy6FDh/Kt6w0C1wAAAAAAAAAAAAACcsS1LxZ/ddNNN+nbxMTEQs+pFOILFy7U92+++Waflbl06VLZtWuXvt+nT598z91www16ZLdKIb569epC2yYkJOjbDh06SIsWLSpe4Hr48OE6r7snc58BAAAAAPwbbTwAAAAAAP700EMPSVRUlKxdu1aeeOIJyc3N1Y8nJyfL7bffrm9VkPj666+Xgrp37y5NmjSRV155Jd/jKiit5vPevn17vsdtNpssWLBA71e57rrrpFOnTvnWqVu3rnMu8FGjRsnevXudz3366afywgsv6PvTpk2TkmCOawAAAAAAAAAAAAABRwVbg1nDhg1l3rx5MmTIEJk+fboe0awe27Nnj6SmpupA8gcffCAWi6XQtr/88oscOXJEfv/993yPZ2dn6xHcaqlRo4Y0btxYQkND9bzVahS30qNHD5k/f77LOqng9ObNm+W7776Ttm3byl/+8hdJSUlxzm394IMP6pHZJRHwI64BAAhG6qTjueeek/j4eImJiZHo6Ghp3769PinIysoyXT0AAAAAgBdo4wEAgJIaPHiwfP/99/pW+fHHH6V27dpy3333yY4dO7xOya1GYc+YMUOuvfZaqVatmg5Yb9u2TcLDw6V///46YK3SgKvnXFEjwNesWaPPbS688ELZt2+f/Pbbb3LFFVfIokWL5KWXXirxa7XYVQ42uHUqOdVIuTERIWLK+cw/0wyYkJFr7t+xcpi5fhzhIYV7wpSXjBwz77nBt1sq5aQYKzszPMZY2VkGv1/RlmxjZUdEV5VAc+rUKbnqqqtk586dYrVa9cE/JCRE/61SwVxyySWyatUqfbHDV5JT0322L29FSo6xsrMt5pLPbDhu5reoe/1KYoo1y8x5lbI3PcJY2SdTzF2IvKCWuc+7tpw3VrYtwszxNjXX3DldzRhzn3Wg+T0lzVjZrnrAlweTzX6bwSsOYQbbWbkGX3i2wQEnptq2kbZMMSU7xNw5Ro7B/zODzcuAPOaZaONlpiSLKekSViF/A6PDzV3sCvvtzxFuJpyNaVLhrp8fP2/uOleNSHOvO8rgBd2IX7YZKzu7Tisj5UZUqWGk3EB06K93+WQ/TV9yPboY5YsR1wAA+JmhQ4fqCxht2rTRc4SoHnSqx5tKtdKuXTvZtGmTjB8/3nQ1AQAAAAAeoI0HAEDZsefm+mSBfyBwDQCAH1EXMJYvX67vv/XWW/nSvKi5Rt59913dQ////u//9DwmAAAAAAD/RRsPAADAcwSuAQDwI+vXr9e39evXl65duxZ6XvXGV730VcrP//znPwZqCAAAAADwFG08AADKlt1m88kC/0DgGgAAP3L27FnnRQ13GjRooG+//fbbcqsXAAAAAMB7tPEAAAA8F+rFugAAoIxVq1ZN3x4/ftztOr/88ou+JY0cAAAAAPg32ngAAJQtey6jpYMJI64BAPAjnTp1cl7U2LBhg8v50fbu3avvnzt3rtzrBwAAAADwHG08AAAAzxG4BgDAj3Tu3Nl5YWP48OGyc+dO53P79++XO+64Q3Jzc/XfaWlpxuoJAAAAACgebTwAAMp+xLUvFvgHAtcAAPiZf//731KvXj3d6/7iiy+WFi1aSOvWraVNmzb6wsadd96p14uJiTFdVQAAAABAMWjjAQBQdmy5Np8s8A/McQ0AgJ9p2bKlbN26VZ5//nn55JNP5NixY1K5cmW58cYb5cknn5TPPvtMrxcbG+t2HwkJCZKYmOhxmXcNGy4jRo32Sf0BAAAAAGbbeCOG3SVjRo7wSf0BAADKC4FrAAD8UJ06dWTmzJl6KUhd7FAc6eZcSUpKki1btnhc3tV9+5WwpgAAAAAAf2vj9e97dQlrCgBAYLHbGC0dTAhcAwAQQLKzs2XZsmX6/g033OB2vbi4OImPj/d4v0X17AcAAAAAlA3aeAAAAP9D4BoAgACieuefPn1amjVrJgMGDHC73rhx4/TiqeTUdB/VEAAAAABguo2XmZLsoxoCAODf7MxPHVSspisAAADyW79+vSxfvlxyc3Odj6Wnp8uzzz4rf/vb3yQkJETmzJkjYWFhRusJAAAAACgebTwAAADPMOIaAAA/s2nTJpk8ebJUqlRJmjZtKuHh4bJ3715JS0vTj82dO1euvPJK09UEAAAAAHiANh4AAGXHnms3XQX4EIFrAAD8TM+ePWXEiBHy7bffytGjRyUnJ0caNmwo/fv31xc7GjdubLqKAAAAAAAP0cYDAKDs2EgVHlQIXAMA4Gfat28vb7/9tulqAAAAAAB8gDYeAACAZwhcAwAAAAAAAAAAAAg4dhupwoOJ1XQFAAAAAAAAAAAAAAAVGyOuAQAAAAAAAAAAAAQcWy4jroMJI64BAAAAAAAAAAAAAEYx4hoAAAAAAAAAAABAwLHn2kxXAT5E4BoAAAAAAAAAAABAwLGTKjyokCocAAAAAAAAAAAAAGAUI64BAAAAAAAAAAAABBwbI66DCiOuAQAAAAAAAAAAAABGMeIaAAAAAAAAAAAAQMCx59pMVwE+xIhrAAAAAAAAAAAAAIBRjLguRpUwM+WeSssxU7CIVI0IMVa2xWJuLoLwEIuxss2VLGI1VLjFYu5Vp4REGyvb0E+Klm0z9/3KCgs3VnaEsZIDS4TBrmzW5FPGyt6ZWd1Y2ZefWm2k3J+r9BNTwkOijJXdsqq5f/LKYebK/um3NGNlV61f1VjZYXYz59IpWebOb2oaKznwmDwPzTF0PmawqWOsvaFkGZzrzmQbz+Bhx9j/+DmbuZZWNYNX1sxdOUIgOJFp7vpevewkY2VnV61vrOztJ82de8dFNzZWdm62mZGOJ1LN/QpeEHrOWNmWlGxjZR8PjzNWdlyNRsbKDjl/0kzBVWqYKTcA2Qxe94bvEbgGAAAAAAAAAAAAEHDsBjuswvdIFQ4AAAAAAAAAAAAAMIoR1wAAAAAAAAAAAAACji3XzJQFKBuMuAYAAAAAAAAAAAAAGMWIawAAAAAAAAAAAAABhzmugwsjrgEAAAAAAAAAAAAARjHiGgAAAAAAAAAAAEDAYcR1cGHENQAAZeDEiRMyf/58mTBhgnTp0kWioqLEYrFIz549i902OztbXnzxRWnXrp1UrlxZqlevLldeeaUsWbKkXOoOAAAAAMiPNh4AAP7JlmvzyQL/wIhrAADKwPvvvy+TJ0/2eruMjAy5+uqrZf369RISEiJt27aV1NRUWbNmjV4eeeQRee6558qkzgAAAAAA12jjAQAAlD1GXAMAUAaqVKkivXv3lilTpuhe9FOnTvVoO3XRQl3QaNq0qezatUu2b98uBw4ckI8//lgiIiLk+eefl08//bTM6w8AAAAA+B/aeAAA+Ce7ze6TBf6BwDUAAGVg5MiRsmLFCnnmmWfkxhtvlDp16hS7zcmTJ+WNN97Q99966y1p3bq187kBAwbIww8/rO///e9/L8OaAwAAAAAKoo0HAABM2rp1q9x6660SGxsrkZGR0qxZM5k4caKcPn3a630dPnxYT3niyTJixIhC2zdp0qTY7VTWmZIgVTgAAH7ik08+kaysLGnZsqWe76ygcePGyfTp/9/enUBHUaUNH38SQkJCElaBQAhhlW1AAiiISABl+UB4RVCWUcAFnBHFkUHUGYHDMgLKIOqcIfl0QP1GkPDiiMwMMzCyCYhDgiA7hEUMASEsgSyQpb5zr6dbQjohnVR1dbr/v3OKarqq+qnuVPq5N/fWvbMlJSVFUlNTpXnz5racJwAAAADg9qjjAQBgvcIC379bevXq1TJy5EjJy8vTnefU1COHDx+Wd955R5KSkvToLqohu6xUw3ePHj1K3K4anZOTk/Xje++9t8T92rdvLzVq1HC5LTCwfPdO03ANAICX+Prrr/W6Z8+eLrc3atRIDy934sQJvS9/1AAAAAAA70UdDwAAVFRaWpo8/vjjutFaTVUyffp0CQoKkitXrujG7HXr1uk7sb/55ht9p3NZqLu2VWN3ST788EMZN26chIaG6tcuybvvvivx8fFiJoYKBwDASxw5ckSvS/tjhWOb6lEHAAAAAPBe1PEAALCeUVBoyuKt3nzzTcnOzpb7779fZs2apRutFXWn8yeffKLXu3btkrVr15oWc9myZXo9bNgwiYyMFE+i4RoAAC9x8eJFva5du3aJ+zi2Xbp0yWPnBQAAAABwH3U8AACsZxQYpizeatWqVXo9YcKEYttq1aolI0aM0I9XrlxpSjw1//XmzZv1Y3XXtafRcA0AgJdQc4cowcHBJe4TEhKi1zk5OR47LwAAAACA+6jjAQCAijh9+rQeKlxRd1y74piSxDFFSUWpYcINw5CYmBjp06dPqfsuWbJEBg8eLH379pUxY8bo/1+9erVC8ZnjGgAAL1GtWjW9vnHjRon7XL9+Xa/V/CIAAAAAAO9FHQ8AAOsVevHd0mZNOxIcHCzR0dGlTjty/PhxPQ921apVyx1PNVirhmvliSeekMDA0u9//vTTT4v8Xw1drubhVusHH3ywXOdAwzUAAF5CDe1y83Byrji2OfYtSUJCgiQmJpY59vhx4+Tpp58u8/4AAAAAAO+t4z0yeqyMGfdkmfcHAMDfuZtrHcN3T5w40bJzunhTOSEgIKDUaUcKCwslMzNT6tSpU+54aojwEydO3HaY8Pj4eH2XddeuXfWd2aqT3ldffSXTp0+X3bt3y5AhQ2Tbtm0SFxfn9jnQcA0AgJdo1aqVTujHjh0rcZ/U1FTnvqVJT0+XlJSUMsceOGCAG2cKAAAAAPDmOl7Pvv3dOFMAACovo7DQlNdxN9c6jvGWaUfMmHpk2bJlzuHHHXdyl7afQ1hYmHPI8Pvuu09/ji+//LJs2LDB7XOg4RoAAC/RrVs3Wbp0qe6d5oqaz8TR403tW5qoqCi3erQ1aNDAzbMFAAAAAHhrHa9efep4AAC4w91c6zimJC+++KIsXrzY7fPo1auXbNq0ye1pRyo69ci1a9dk1apVt73bujQq/ty5c2XgwIGyceNGuXTp0m1HlbkVDdcAAHiJoUOHyqRJk+To0aM6sffu3bvYcDVKp06dpEWLFqW+lhqixp1hanIr2BsPAAAAAOA9dbxTGdfKedYAAPjnHNfu5trbCQ8PL9ew3TVq1HA+djT6qgZgNf+0q+HCHcOJq/moIyMjy32+qtE6KytL3z09YsSIcr/Ovffe6xy6XM273blzZ7eOL31WbQAA4DH169d3Fo6eeuopOXz4sHPbF198IQsWLNCPZ8yYYds5AgAAAADKhjoeAADWMwoMUxazzZkzRy5cuOD28vnnnztfwzGViLrj+vTp06VOO9K0aVOpWrVquc/XMfz38OHDJSIiotyvc/Ow5vn5+W4fzx3XAABYQBUkVK/5W+cjUfOb1a1b1/m8mutDLQ7qDxfJycmyY8cOadeunbRv314P0+IogEyZMkX32gcAAAAAeA51PAAA4GkxMTHSsGFDOXPmjGzdulXGjBlTbB/1vNK9e/dyx1FTl2zZsqVCw4Q77Nu3z/k4Ojra7eO54xoAAAsUFBRIRkaGc1HDrDh6md38fHZ2drF5QNQcJvPmzZO2bdvKkSNHdE87NbeJGq7lrbfesukdAQAAAID/oo4HAIB3MgoKTVm81SOPPKLXiYmJxbapIcSTkpL044oM7/3hhx/qochjY2MlPj6+AmcrMn/+fL1W5Z5GjRq5fTx3XAMAYAGV5FWyL+9wKtOmTdMLAAAAAMB+1PEAAIAdpk6dKu+//76+I3r69Ol6ipEqVarIlStXZPTo0XqtRoV56KGHih173333yQ8//CAvvviiXlxR5ZuPPvpIPx47dqzLebRvpjrdhYSE6Ng3z+GtOvC99tprumOeMmvWrHK9XxquAQAAAAAAAAAAAFQ6hRbMT+1NGjdurBuWR40aJbNnz5aEhAT93KFDh/QIMPXr15eVK1e6bHBWjdanTp2Sy5cvl/j6mzdv1kOFq+NVw/XtqNdcvHixTJ48WXfsu+OOOyQnJ0cOHjyoR6IJDAyUN954w3mnuM8OFT5z5kz9oZW2LFmyxO7TBAAAAACUEfU8AAAAAABKN3z4cNm5c6deK999951uMJ40aZLs3btXWrRoUe7XXrZsmV7ff//90rRp09vuP3LkSN1ofc8998j169dlz549kpqaKs2aNZNnnnlGkpOT5eWXXy73+VS6O67r1asnLVu2dLktKirK4+cDAAAAAKgY6nkAAAAAgPIwfPyOa4e4uDjnfNZldfLkyTI1XDsar8uiW7duerFKpWu4HjhwoFsfIAAAAADAu1HPAwAAAAAAla7hGgAAAAAAAAAAAAAKDf+449pf0HANAAAAAAAAAAAAoNIpoOHap1S6hms1yffo0aPl7NmzEhERIR06dNATgbdr187uUwMAAAAAlAP1PAAAAAAAUOkarr/99lu9OKxZs0bmzp0rkydPlrfeekuqVKli6/kBAAAAANxDPQ8AAAAAUB4F3HDtUwKlkmjYsKHMmjVLdu7cKefPn5fc3FzZu3evPPvss2IYhrz99tvy6quv2n2aAAAAAIAyop4HAAAAAAAcAgz114BKbsGCBTJt2jQJCgqSo0ePSmxsrGmvnZudJXb4Mde+H0uNEPvuZrhhY9eYakEBtsW2L7LIdZs+86BA+951QaF911nVKva976y8QttiV69qXz+piLBQ22JXJrk5ObbFrnLljG2xk6/Xsi123A8bbIl7vMUAsUuwjd+BDcPs+x46k23f9+/xS7m2xe7WKNy22FWNfFvins217xpvUse+z7uy1fOuZNmX8/JtKofa+PXrt3de2PmR21jVsu0zz7Oxjlcz2L4yRq6NF7mdv191IsLsC16JnMq4ZlvshnnnbIudV6ORbbH3/phtW+yo8GC/m1v22g376lltgi7ZFjugIM+22GnBUbbFjgrItC12YM4VW+IGNWpjS9zK6NN6bU15ncd+PGDK68BP7rguzZQpU3RP/fz8fD2kHAAAAACgcqOeBwAAAACAf6l0c1y7ouY7u+eee+Szzz7TPfFLk5CQIImJiWV+7fHjxsrTTz1lwlkCAAAAAMyu57lbx3t87DgZ/9TTJp0lAAAAAMBOzHHtW3yi4VoJDv5pOBTVG7806enpkpKSUubXHTigf4XPDQAAb/fJ/vO2xX6olX1DTf2/bcdsi93lnna2xG3+/Saxy/sFHWyLPbF2mm2xzz/3e9ti/58LTW2LveT/zrQt9pDti22Ju6nHi2KXsT44VHhZ63nu1vEe7G/flAkAAHhKmI1Tdl2ual8dLzDfvuGj7dToRrptsfNrNLQlbmH1qmKX1MzatsXu+9IK22KnfjDGttjZhTVtix1x9og9gRkq3OunLIA1fKbhet++fXodHR1d6n5RUVESFxdX5tdt0KBBhc8NAAAAAGBNPY86HgAAAAAAvsEnGq7//ve/y/79+/Xjfv36lbrvxIkT9VJWudlZFT4/AAAAAIA19Tx363hXsnJMOT8AAAAAgP0YKty32DdmjBvUHyvUHyL27NlT5PnCwkJZvny5jB49Wv9/8ODB0rVrV5vOEgAAAABQVtTzAAAAAABApbvjOi8vTxITE/VSu3ZtadKkiQQFBcmxY8fk0qVLep+ePXvKxx9/bPepAgAAAADKgHoeAAAAAKCimOPat1SKhuvY2FiZM2eO7NixQw4ePKj/kJGbm6v/uDFw4EDdE3/UqFFSpUoVu08VAAAAAFAG1PMAAAAAAECla7iuWbOm/O53v7P7NAAAAAAAJqGeBwAAAACoKOa49i2VouEaAAAAAAAAAAAAAG5Gw7VvCbT7BAAA8EVnz57Vc3K+8MIL0r17dwkNDZWAgACJj48v9bj//ve/smjRIj08asuWLfUxalm2bJnHzh0AAAAAUBR1PAAAAOtxxzUAABZYsWKF/OY3v3H7uGeeeUb27NljyTkBAAAAAMqHOh4AAN6pwOCWa19CwzUAABaIjIyUBx54QLp27aqX3bt3y+zZs297XLNmzaRNmzbO4yZNmiR79+71yDkDAAAAAFyjjgcAAGA9Gq4BALDAk08+qReHtLS0Mh23evXqIv+vWrWq6ecGAAAAAHAPdTwAALwTc1z7Fua4BgAAAAAAAAAAAADYijuuAQAAAAAAAAAAAFQ6zHHtW2i4BgAAAAAAAAAAAFDpMFS4b2GocAAAAAAAAAAAAACArbjjGgAAAAAAAAAAAEClw1DhvoWGawAAfFBCQoIkJiaWef92/YdL/LAxlp4TAAAAAMAzdbxRj4+TsU8+Zek5AQAAmI2GawAAfFB6erqkpKSUef/ozvdbej4AAAAAAM/V8fr0G2Dp+QAA4C2Y49q30HANAIAPioqKkri4uDLvX6NuPUvPBwAAAADguTpe/foNLD0fAAAAK9BwDQCAD5o4caJeyuovu7639HwAAAAAAJ6r453PzLb0fAAA8BbMce1baLgGAAAAAAAAAAAAUOkU2n0CMFWguS8HAAAAAAAAAAAAAIB7aLgGAMACp0+flrp16zqXV155RT+/bdu2Is8vWLCgyHHq/zdv37Nnj37++eefL/K8en0AAAAAgGdQxwMAwHuHCjdjgXdgqHAAACxQUFAgGRkZxZ7Pz88v8nx2dtF5x9T/XR137do1vdz8+gAAAAAAz6COBwAAYD0argEAsEBsbKwY5eipN3PmTL0AAAAAALwHdTwAALxTATdL+xSGCgcAAAAAAAAAAAAA2Io7rgEAAAAAAAAAAABUOsxP7VtouAYAAAAAAAAAAABQ6TBUuG9hqHAAAAAAAAAAAAAA8DKXL1+WlStXytSpUyU+Pl4iIiIkICBAYmNjTXn93bt3y2OPPSYNGjSQatWqSbNmzWTy5Mly/vz5Uo/Ly8uTN998Uzp27CjVq1eXWrVqSe/evWX16tUVOh/uuAYAAAAAAAAAAABQ6fj6UOGbNm3SDctWUI3MI0eO1I3Q9erVk3bt2snhw4flnXfekaSkJPnqq690Q/atcnNz5cEHH9Tbq1Spoo/LysrS56qWadOmybx588p1TtxxDQAAAAAAAAAAAABeJjQ0VO6//36ZMmWKLF++XP70pz+Z8rppaWny+OOP60br119/Xf8/OTlZrwcMGCDp6em6wdxw0TFANUyrRuumTZvK/v37Zc+ePXLs2DH5/PPPJSQkRObPny9ffPFFuc6LhmsAAAAAAAAAAAAAlXKOazMWb9W/f3/ZvHmzvPXWW/ru6JiYGFNeVw3znZ2drRvFZ82aJUFBPw3SXaNGDfnkk0/0eteuXbJ27doix507d06WLFmiH3/wwQdy5513OrcNGTJEXn75Zf145syZ5TovGq4BAAAAAAAAAAAAwE+sWrVKrydMmFBsm5qvesSIEfqxml/7ZmvWrJEbN25Iy5Yt9ZzWt5o4caJep6SkSGpqqtvnRcM1AAAAAAAAAAAAgEo5x7UZiz85ffq0HhJcUXdcu9KzZ0+9/vrrr4s87/i/Y/utGjVqpIcQd3VsWdBwDQAAAAAAAAAAAKDS8fWhwq1w5MgRvQ4ODpbo6GiX+zRv3lyvjx8/rufBvvVYx/bSjj18+LDb5/bTgOUAAAAAAAAAAAAA4IcSEhIkMTHRrWPUMNuOobErk4sXLzqHBA8ICHC5T+3atfW6sLBQMjMzpU6dOkWOdWwv7dhLly65fW40XN9GtbDq5b7A09PTJSoqqlwXbUxYucKaErsi7Irtj+/ZrNjhNsYuL2KXL3aETXErws7Y/ubJLjF++d3/3iMdbItdXhWOHd3Otti/KndkMz7zWNti37N+oy1xlRvlOqqSX+NKl4W2xB5brqjmxEbZ1age6p+/F8T2+rjEJra7qtkYu7LF9Ud3RIb55e+FnXHrRITZ+J5r2xY7yA+vs9ah1WyLnbZ8gi1xK8KM2OX9xE15323j7YuNMllinDTldWbOnKnnZXaH+hlXRrm5uc47rksSEhLifJyTk1OuY28+rswMWCIuLk4NLKDXxPbduMQmtj/E9sf3DPdwbRKb2L4Vl9j+Fxtl46/Xhz/G9sf3TGxi+3pcuIdr07OITWxfjktscl5lsmTJEv3zcmdRx5Rk8uTJ+hpwd+nVq1ep5/nFF1/o/Zo0aVLu97py5Ur9GvXr1y9xnwMHDjjP6cKFC87n27Ztq5/785//XOKxjz76qN5n0qRJbp8bd1wDAAAAAAAAAAAA8Fvqzngz744PDw93Dq/tjho1aojV1BDhjqG8DcNwOVy4Y0jwwMBAiYyMLHasY/vthiJ3Fw3XAAAAAAAAAAAAAGCSOXPm6MUbtWrVSq9v3Lghp0+flpiY4tNIpqam6nXTpk2latWqRY7dtm2bHDt2rMTXdxzriOOOQLePAAAAAAAAAAAAAABUOjExMdKwYUP9eOvWrS73cTzfvXv3Is9369ZNr7/66iuXx6WlpcmJEyeK7OsOGq4BAAAAAAAAAAAAwE888sgjep2YmFhsmxpCPCkpST8eMWJEkW1Dhw7Vd2AfPXpUNm7cWOzYhIQEve7UqZO0aNHC7fOi4RoAAAAAAAAAAAAAfMjIkSMlNjZWfvvb3xbbNnXqVAkNDZUtW7bI9OnTpaCgQD9/5coVGT16tF6rxueHHnqoyHH169d3zgX+1FNPyeHDh53bvvjiC1mwYIF+PGPGjHKdM3NcAwAAAAAAAAAAAIAXqlu3rvNxXl6eXqu5qW9+ftSoUfLuu+8WOe7s2bNy6tQpuXDhQrHXbNy4sXz00Uf6uNmzZ+s7pdVzhw4dkqysLN1AvXLlSgkICCh2rGqcTk5Olh07dki7du2kffv2cu3aNefc1lOmTNF3ZpcHd1wDAAAAAAAAAAAAgBfKyMhwLpmZmfq5wsLCIs9fvXrV7dcdPny47Ny5U6+V7777Tu644w6ZNGmS7N27t8ShvtWd2ps2bZJ58+ZJ27Zt5ciRI7pxvFevXrJq1Sp56623yv1eueMaAAAAAAAAAAAAALyQYRjlOk41Lt9OXFyccz5rdwQHB8u0adP0YibuuAYAAAAAAAAAAAAA2IqGawAAAAAAAAAAAACArWi4BgAAAAAAAAAAAADYijmuLTJhwgRJT0+XqKgoYvtwXGIT2x9i++N7hnu4NolNbN+KS2z/i42y8dfrwx9j++N7JjaxfT0u3MO16VnEJrYvxyU2OQ9wV4BR3hm9AQAAAAAAAAAAAAAwAUOFAwAAAAAAAAAAAABsRcM1AAAAAAAAAAAAAMBWNFwDAAAAAAAAAAAAAGxFw7XJNm7cKIMHD5Y77rhDQkNDpXXr1vL6669LVlaWZTHPnj0rH3/8sbzwwgvSvXt3HTcgIEDi4+PFSmp69O3bt8srr7wi9913n9SpU0eqVq2q33u/fv3kr3/9q97HKklJSTJhwgTp0qWLNGzYUEJCQiQiIkLi4uL0Z56RkSGe8o9//EN/5mqJjY21NNbMmTOdsUpalixZYvn7HTZsmPNzb9CggfTo0UN+//vfS35+vqmxTp48edv361jGjx8vVlDX0muvvSYdOnSQ8PBwCQ4OlujoaHn00Uflq6++Eiup74558+bp61pd3yr+XXfdJQsWLJAbN27Y8r2Rl5cnb775pnTs2FGqV68utWrVkt69e8vq1astjf3f//5XFi1aJKNHj5aWLVs6f+7Lli0r8/uGuch55Dwrc5435DuFnOeZnGdVvlPIeago8h35jjqeufnOG3KeL9bx7Mx55DvfQc4j51HHo45nFl/MdxWJTc4D3GDANO+8844REBCgSjRGdHS00alTJyMkJET/v02bNkZGRoYlcRctWqRj3Lr06tXLsNKGDRuKxGvWrJnRuXNno3bt2s7nBg0aZOTm5loSv2PHjjqG+oxjY2ONLl26GDExMc7Y9erVM7799lvDalevXi0St0mTJpbGmzFjhvP99ejRw+Xyt7/9zZLYeXl5xi9/+Uvne23cuLHRtWtX/bMPDg7Wz6nPw0zp6eklvk+1qGvOcT6JiYmG2Y4cOWJERUXp1w8MDNTv9a677jIiIiL0c+p3/o9//KNhhXPnzhnt27d3xlaP1XVfpUoV/Zy65jMzMz36vZGTk2Pcd999el91Hh06dDCaN2/uPH7atGmWxXb8zt+6LF261O33j4oj55HzrM55duY7hZznuZxnZb5TyHmoCPId+Y46nvn5zu6c56t1PDtzHvnON5DzyHnU8ajjmcVX811FYpPzgLKj4doku3bt0l/C6ss+ISHBKCws1M+npaU5E9CwYcMsif3BBx8YDzzwgPHqq68aq1evNl5//XWPFPDWr19vNG3a1Fi8eLFORjf76KOPnIXbl19+2ZL4Kplv3rzZuHHjRpHn9+7d60yMbdu2Naz2/PPP61hDhw716B81xo4da3ja008/rWOrQl1KSkqRbVlZWcbnn39e7OdhtWXLlulzCg0NNa5cuWL66/fp00e/fsuWLY39+/cXKehMmTJFbwsKCtIFQbP1799fv37r1q2No0ePOp8/efKks7Dz+OOPe/R744UXXtD7qd/9Q4cOOZ9XP3vH7/yaNWssif3www8bI0eONBYuXGhs2bJFFy4p4NmDnEfO80TOszPfKeQ8z+U8K/OdQs5DeZHvyHfU8ezJd1bnPF+t49mZ88h3lR85j5xHHY86npl8Nd9VJDY5Dyg7Gq5N4kjuTzzxRLFt6otfFf7U9j179lh+Lu+++65HCngqmZaWzOfOnavPQ/VULCgoMDxp586dzl5LBw4csCzOjh079M9W/fxVkvHlP2p8+eWXOq7qBVqRHnFmi4+P1+c1ZswY019bvU9Hb2NXPT5VRa5FixZ6u/q9M5OqqDiu4W3bthXbrnrdOiqVBw8e9Mj3xtmzZ509UNX1cCtHQS0uLs702K44Ks8U8DyPnFccOc+3/qhBzvNczvN0vlPIeSgr8l1x5DtrUMfzXM7zpzqenTmPfFf5kPOKI+eZjzpecdTxKne+K2tsV8h5QMmY49oE165dk3Xr1unHam6SW6k5C/r06eOcv8RXREZG6rlfSjJw4EC9vnjxopw/f96DZybSpk0b5+Ps7GxLYqj5MJ555hkJCwuT9957T3zdwoUL9XrKlCl6XhJvoOaJ2bx5s348btw401//+vXrzvmMmjdvXmy7mofE8by6HszkmGOmUaNGcu+99xbbruZhUXNNqfP79NNPxRPWrFmj56BR32lq7pdbTZw4Ua9TUlIkNTXVI+cEzyPnuUbO8y3kPM/lPG/Mdwo5D+Q718h3vsUb853VOY86XnHkPJDzXCPn+RZvzHnU8ch3AFyj4doEu3fv1okgJCRE7r77bpf79OzZU6+//vpr8Rc5OTnOx6GhoR6N7UiQ4eHhcuedd1oS44033pB9+/bJ7NmzJTo6Wjxtz549Mnr0aF15GDp0qLz++uuyf/9+S2Ll5ubKv//9b/34gQcekAMHDsiLL74o/fr1k4ceekimT58up06dEk/78MMPdSEnJibGWYkyU926dZ0/2+3btxfbnpWVJd9++61+XNLvfnmpipGjkFeS0s7NCo7vL8f32a3UuTZt2rTIvvA95DzXyHm+ke8Ucp5nc5435juFnAfynWvkO2tRx7M+51HHK46cB3Kea+Q861DH+wl1PPIdANdouDbBkSNH9FolmZJ66jl6Lx0+fFj8xfLly529qFQvRqsVFhbKmTNnZNmyZc5eavPmzdOFPLMdPHhQ/vCHP0hcXJw8//zzYgdVsFCf8caNG3WPsTlz5sgvfvEL+c1vfiMFBQWmFygdPe+2bt0qnTp1ksWLF8v69etl7dq1upCrCtKOn7knqIKdKuApTzzxhAQGWvN1pq4h1QNx6tSp8v7778vZs2d1b9dvvvlGhgwZIufOnZNf/vKX0qNHD1Pj1qxZU6/T0tJK3OeHH37Q60OHDoknv+tc9dL05+86f0POc42c5xv5TiHneTbneWO+U8h5IN+5Rr6zlr/X8TyV86jjFUXOAznPNXKedajjUccj3wEoDQ3XJvYiql27don7OLZdunRJ/EFycrIsWbJEP37llVcsjfX222/rBFylShXdM2r8+PESGxsr//znP+W5556zpGChhtJRBZ6EhAQd15MaNmwos2bNkp07d+qhilSvwb1798qzzz6rz019Hq+++qqpMdPT052P1WeqCniqgKN65B49elQeffRR/Xjs2LG6p64nqKF0Tpw4YdlwOg5jxozRhWg1TJP6uUdFRUn16tXlnnvu0QX9P//5z/LRRx+ZHrdr167OQp6rXn7fffedsxDlqe8VvuugcB0UR87znXynkPM8m/O8Md8pfNeBa6A48p11qON5NudRxyuK7ztwDRRHzrMGdbyfUccj3wEoGQ3XJlBJVgkODi5xHzXczq3DzPgq1VNr2LBhkp+fLw8//LCMHDnS0niqUKd6hamEq5KvKuypnnsq6V6+fNn0eCqhb9u2TSZNmiRdunQRT1PzDakhdNQQLmrIF3VtqV6J6rzmz5+v91m0aJGeJ8XM+Y4c1Nw3qvCsCiHqmm/RooXukXjXXXfpQu/cuXPFE1QPVMfwLqX1lDPDsWPH5Mcff9S9H1XloUOHDvpzUAVfdR5WDGekfr6Ogp4qwKrhmxxUoVoVPB29UK2a7+hWfNdB4TooipznW/lOIed5Nud5Y75T+K4D10BR5DtrUcfzfM6jjvczvu/ANVAUOc861PF+Rh2PfAegZDRcm6BatWp6fePGjRL3Ub227JgTxdOuXLkiAwcOlO+//146d+7sTMJWGjFihJ77RfXgUkPqqMKdKuypQkfv3r1NHWJG9RRTPf9UoVINY+NtpkyZonsvqsK16k1n9jXuKHDUqlWryHZV8FHD+Shqzhg1vJGVVIFz1apVzvOxkuqJqd6bKlCrnoiqN6QaYujChQt6mB3VS1RVMKyYC+evf/2r/nmqXoiqUKkK02rootatW+uCnhrKR4mIiBBP4LsOCtfBz8h5vpfvFHKe53Oet+U7he86cA38jHxnL3+p43ky51HHK4rvO3AN/IycZx/qeNagjvczvuuAyoOGaxM4kp1juAlXHNtuTYy+RCXdAQMG6CFV2rVrJ//61788MgfMrVQy/Pvf/64TsirsrVixwrTXVvO+ZGZmyjvvvOPRxFpWangfVbhVVCHALDdft2poGVccz1+9elUyMjLESqpwl5WVpXsHqgK+VdRwRarXp5rjKSkpSVq1auXcpgowCxYskL59++pr4o033jA9fsuWLfXv00svvaR7X6r5X1TBUvX43bVrl7Rv317v16BBA/EEvuugcB38hJznm/lOIed5Pud5W75T+K4D18BPyHf285c6nqdyHnW84vi+A9fAT8h59qKOZz7qeEXxXQdUHjRcm8Dxpa9646khRVxJTU0tsq+vUcN6DBo0SPcOVIlpw4YNUqdOHdvORxW+evXq5ZyXxiwpKSl6/etf/1on1puXyZMn622nT592Prd9+3bxNMdwJ6qHollUb7hbX7+03otW90x09HgdPny4pQVt1eNVzbGjrukmTZq43Kdfv356rQpdVqhXr54sXLhQF9rVkDaq8KwKuKoS5RjKxzH8jtUc319qiKGS+Pp3Hch5CjnPO3KeFflOIefZk/O8Kd8p5DyQ78h33pLv/KWO56mcZ3e+U8h58DbkPHKet+Q86ni+lfPIdwDKi4ZrE3Tq1EknPTWUxDfffONyn61bt+p19+7dxdeoxDNkyBDZsmWLToL/+c9/PNpbqiSOQo7ZhR3HfDe3Lqp3mqNw43iutKFHrOKYNyQ6Otq011RDCDkKOMePHy81sauCnpWFezWkjbrWPDGcjupl6e48KZ6iKpPr1q3Tj4cOHeqRmN26dXMWfEsackr9fG7eF76HnEfO85acZ0W+U8h53pXz7Mh3CjkP5DvynbfkO3+o43ky53lrvlPIebALOY+c5y05jzqef+Q88h2A26Hh2gSqZ1T//v3148TExGLbVa+iL7/80tmTypeoRPPII4/oQp0qBKj32bhxY7tPSw/rsWnTJmcB3CwnT57UPdVcLUuXLtX7qIKQ47n4+HjxJDWUkKPHmqPHnFkee+wx5xwlrgrNf/nLX/Ra9QgNCgoSq3z44Yf6s42NjbX883X0rlO/wyXN9aLmvlHUPC2epHosnj9/Xpo1a6YrWJ6gCpNqeCH1eWzcuLHY9oSEBOfvnJq7Br6JnEfO84acZ2W+U8h53pPz7Mh3CjkP5DvynTfkO3+p43ky53lrvlPIebALOY+c5w05jzqe/+Q88h2A2zJgim+++cYICAjQS0JCglFYWKifP3PmjNG5c2dDfdT/8z//45Fzeffdd3W8Xr16WRonPz/fGD58uI7VoEED4/Dhw4anbNq0yZg9e7Zx4sSJYtuSk5ONLl266PNq1KiRcfXqVY+c09KlS3XMJk2aWBZj3759xoQJE4xvv/22yPMFBQXGJ598YkRGRupzGDx4sOmxf/zxR6NGjRr69SdOnGjk5OTo59W1vnjxYv28uv43btxoWEXFatq0qY41Y8YMw2rXrl0z6tWrp+N17dq1yDWenZ1tTJ06VW9Ty9/+9jfT42/dutVYt26d/l27Oe4f/vAHIzAw0KhSpYrx5ZdfevR7Y9KkSXo/9XM4dOiQ8/k1a9YYISEh5fosyvud5fhuVb978CxyHjnP6pxnZ75TyHmezXmezHcKOQ9lRb4j31HHsz7feTrn+VMdz86cR76rfMh55DzqeNTxzOQv+c6d2Lci5wElo+HaRIsWLdIJTn3hNG7c2OjUqZPzC+/OO+80zp8/b0nc77//3qhTp45zqV69uo4ZFBRU5Pn58+ebGlcVKhzJLTY21ujRo0eJS0pKiqmxP/vsM2dsVbhUX/R33323ERUV5XxeFe52795teIon/qih3o/j/dWuXVtfY6rgUatWLefzPXv2NC5dumRJ/PXr1xuhoaE6jirsqdiOz1xd+2+++aZhJVV4dMQ6fvy44QnqPTt+p1TBShVsOnToYISFhTk/8+eee86y7xT1+ipWu3bt9M/bEVetV65c6fHvDVXI7N69u95XFTI7duxoNG/e3PlZTJkyxbLY6v83b1f7q+PCw8OLPK9eH9Yj55HzrMx5duc7hZznuZxnZb5TyHmoCPId+Y46nrX5zo6c56t1PDtzHvnON5DzyHnU8ajjmcVX811FYpPzgLKj4dpkGzZsMAYOHKgTsCrctWrVynjttdcs7R2neuc5vlxLW8zuyeUo0JRlMbu32rlz54w//vGPxpAhQ3RyiYiIMKpWrap7kfXu3Vtvy8zMNDzJE3/UUIW3OXPmGIMGDTKaNWvmfN/169fX193HH39cpCebFY4cOWKMGzfOiI6O1rHr1q2rfw6qt6jVxo4d65Fet7dKTU3VPfJat26tC7jqfauC7dChQ421a9daFlcV6sePH68riOpnrWKr75TJkycbJ0+etO174/r168a8efOMX/ziF/qcVGFf/UxWrVplaWz1/7Ic56rHMqxBziPnWZXzvCHfKeQ8z+Q8K/OdQs5DRZHvyHfU8Xwv5/liHc/OnEe+8x3kPHIedTzrUMer/PmuIrHJeUDZBah/bj+gOAAAAAAAAAAAAAAA1gi06HUBAAAAAAAAAAAAACgTGq4BAAAAAAAAAAAAALai4RoAAAAAAAAAAAAAYCsargEAAAAAAAAAAAAAtqLhGgAAAAAAAAAAAABgKxquAQAAAAAAAAAAAAC2ouEaAAAAAAAAAAAAAGArGq4BAAAAAAAAAAAAALai4RoAAAAAAAAAAAAAYCsargEAAAAAAAAAAAAAtqLhGoBHnDx5UgICAvSiHgMA4KvIeQAAf0HOAwD4A/IdAHgODdeAF5g5c6az8ONOQWnZsmUeOT8AAMxCzgMA+AtyHgDAH5DvAABmouEaAAAAAAAAAAAAAGArGq4BAAAAAAAAAAAAALai4RoAAAAAAAAAAAAAYCsargEftG/fPpkwYYK0bNlSwsLCJDw8XDp06CC/+93v5MKFCy6PycvLkzVr1ujjunTpIlFRURIcHCz16tWT/v37y/Lly8UwjFLjpqWlycSJE6Vx48YSEhIi0dHRMn78eDl27JhF7xQA4O/IeQAAf0HOAwD4A/IdAPg5A4DtZsyYoUpOermdEydOOPddunRpse3z5883AgMDnfuEhYUZwcHBzv9HRUUZKSkpxY7buHGjcx+1REZGGhEREUWeGzFihFFQUODyvJKTk41atWo59w0NDTXCw8Odr/Xpp586t6n3AADwT+Q8AIC/IOcBAPwB+Q4AYCbuuAZ8yAcffCDTpk3TvRHnzp0r6enpkpWVJdnZ2bJr1y7p06ePfm7IkCFy7dq1IseqY1SvwvXr18uVK1f0kpmZKRkZGbJ48WKJjIyUpKQkee+994rFvXr1qjz88MNy6dIliYmJkX//+986rnp++/btuqeiem0AAMxCzgMA+AtyHgDAH5DvAACaqc3gACrcM7F+/fqlLnXr1nXZMzEzM9OoWbOmfn7dunUu4+Tl5RmdO3fW+yxatMitc0xKStLHNW/e3GVvSLVN9YA8cOBAse3p6elFei3SMxEA/Bc5DwDgL8h5AAB/QL4DAJiJO64BL3Pu3LlSl5Lmcvnf//1fuXz5snTq1EnP3eJKUFCQjBo1Sj/+17/+5dZ5DRo0SK9TU1Pl7NmzRbatWLFCr0eMGCFt2rQpdmyDBg3k2WefdSseAMD3kfMAAP6CnAcA8AfkOwBARQVV+BUAmMowVOe9kp08eVKaNm1a7Plt27bp9cGDB3WBqiQ5OTl6ferUqWLb1BA4S5YskbVr1+rXUQXGvLy8Yvv98MMPzhg3btyQ7777Tj9WQ/aURG174403Sn1vAAD/Qs4DAPgLch4AwB+Q7wAAFUXDNeAjzpw5o9e5ubl6uR01P8zNjhw5In379tWFt5vnh6lZs6YEBv40OIPqGamoeV4cLl68KPn5+fpxo0aNSowXHR3t9nsCAMAVch4AwF+Q8wAA/oB8BwBwYKhwwEcUFBTo9WOPPaZ7N95uUT0cbzZ+/HhduIuNjZWkpCTJyMjQBbkff/xRD6GTlpZW5t6TAABYiZwHAPAX5DwAgD8g3wEAHLjjGvARjiFuXA2VczunT5+W7du368fLly+Xbt26Fdvn1vlfHGrXri1VqlTRBcybC4G3Km0bAADuIOcBAPwFOQ8A4A/IdwAAB+64BnxEjx499Do5OVnS09PdLuA5dOrUyeU+GzZscPl8cHCwdOjQQT/euHFjiTG+/PJLt84JAICSkPMAAP6CnAcA8AfkOwCAAw3XgI8YMWKEnrclLy9PXnrppVKHvSksLJTLly87/1+jRg3n4z179hTb/+rVqzJnzpwSX08N46OooXgOHz5cbLsalmfJkiVuvR8AAEpCzgMA+AtyHgDAH5DvAAAONFwDPkIV7t5++239eMWKFTJo0CDZuXOnLswpan3w4EFZuHChtGvXTtauXes8tk2bNhITE6MfP/nkk7p3o8OOHTskPj5eLl26VGLsX/3qVxIdHS3Xr1+XAQMGyH/+8x9nAVOdwwMPPOA8DwAAKoqcBwDwF+Q8AIA/IN8BAByY4xrwIWPHjpWcnByZPHmy/POf/9RLSEiIhIeHS2Zmpu616BAQEOB8HBgYKH/605/k4Ycflv3790uXLl0kLCxMb8vOzpbq1avL559/rgtqrkRGRspnn30mDz74oJw8eVLvp45Xr3vt2jWJiIiQ999/39mDEQCAiiLnAQD8BTkPAOAPyHcAAIU7rgEf8+yzz+phbX77299Kx44ddQFPDZ+jCnmq4Pb888/L+vXrZdSoUUWOGzx4sGzZskX3aFS9HPPz86Vu3boyfvx43VOxb9++pcZVr7137155+umnpVGjRvp4NVSPKnSmpKTI3XffbfE7BwD4G3IeAMBfkPMAAP6AfAcACDBKmzACAAAAAAAAAAAAAACLccc1AAAAAAAAAAAAAMBWNFwDAAAAAAAAAAAAAGxFwzUAAAAAAAAAAAAAwFY0XAMAAAAAAAAAAAAAbEXDNQAAAAAAAAAAAADAVjRcAwAAAAAAAAAAAABsRcM1AAAAAAAAAAAAAMBWNFwDAAAAAAAAAAAAAGxFwzUAAAAAAAAAAAAAwFY0XAMAAAAAAAAAAAAAbEXDNQAAAAAAAAAAAADAVjRcAwAAAAAAAAAAAABsRcM1AAAAAAAAAAAAAMBWNFwDAAAAAAAAAAAAAMRO/x8QLv381PSPzQAAAABJRU5ErkJggg==", "text/plain": [ "
" ] }, - "execution_count": 25, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } @@ -372,11 +385,18 @@ "source": [ "plot_multiple_components(components[::-1], names[::-1], save_path='plot/patch.pdf')" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "base", + "display_name": "mechir", "language": "python", "name": "python3" }, @@ -390,7 +410,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.11.8" } }, "nbformat": 4, diff --git a/notebooks/exploration.ipynb b/notebooks/exploration.ipynb new file mode 100644 index 0000000..a690fb8 --- /dev/null +++ b/notebooks/exploration.ipynb @@ -0,0 +1,809 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "uxgwIW6br_8c" + }, + "source": [ + "# MechIR Activation Patching\n", + "\n", + "This notebook serves to walk you through a simple example of activation patching in `MechIR`. For more details on the specifics of this process check out our other notebook!.\n", + "\n", + "NOTE: Our initial implementation is flexible enough to support loading a wide variety of Transformer-based IR models, but we have mainly tested the following models:\n", + "- TAS-B (bi-encoder) [[Hofstätter et al.]](https://arxiv.org/abs/2104.06967) [[HF model card]](https://huggingface.co/sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco)\n", + "- monoELECTRA (cross-encoder) [[Pradeep et al.]](https://link.springer.com/chapter/10.1007/978-3-030-99736-6_44) [[HF model card]](https://huggingface.co/crystina-z/monoELECTRA_LCE_nneg31)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "_F6EgFvTr_8d" + }, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "id": "6mLCPXacr_8d", + "outputId": "83496403-e207-4d18-e378-2ad133bf57dc" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "^C\n", + "\u001b[31mERROR: Operation cancelled by user\u001b[0m\u001b[31m\n", + "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install -U -q git+https://github.com/Parry-Parry/MechIR.git@sae\n", + "%pip install -q transformer_lens\n", + "%pip install -q matplotlib seaborn plotly" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "m1zmJUINr_8e", + "outputId": "8cb4185e-d3e7-4864-800d-0d59454b22dd" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n", + "2025-04-08 12:01:05.170 WARNING streamlit.runtime.scriptrunner_utils.script_run_context: Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n" + ] + } + ], + "source": [ + "from mechir import Dot, Cat\n", + "from mechir.data import MechIRDataset, DotDataCollator, CatDataCollator\n", + "from mechir.perturb import perturbation\n", + "from mechir.plotting import plot_components\n", + "\n", + "import torch\n", + "from torch.utils.data import DataLoader" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "RxPp15vmr_8e" + }, + "source": [ + "## Load Model\n", + "\n", + "* `Dot` : A bi-encoder architecture with flexibility for different BERT architectures and pooling forms.\n", + "* `Cat` : A cross-encoder architecture with with flexibility for different BERT architectures. Checkout our `monoT5` class if you want to work with sequence-to-sequence models!" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "id": "5JNgLgOpr_8e", + "outputId": "a654dcb5-0f12-4204-c2d6-33d604fa45b1" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:root:Support for BERT in TransformerLens is currently experimental, until such a time when it has feature parity with HookedTransformer and has been tested on real research tasks. Until then, backward compatibility is not guaranteed. Please see the docs for information on the limitations of the current implementation.\n", + "If using BERT for interpretability research, keep in mind that BERT has some significant architectural differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning that the last LayerNorm in a block cannot be folded.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Moving model to device: mps\n", + "Loaded pretrained model sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco into HookedEncoder\n" + ] + } + ], + "source": [ + "model_name = \"sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco\"\n", + "model = Dot(model_name)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "id": "2SvOoLZUr_8f" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:root:Support for BERT in TransformerLens is currently experimental, until such a time when it has feature parity with HookedTransformer and has been tested on real research tasks. Until then, backward compatibility is not guaranteed. Please see the docs for information on the limitations of the current implementation.\n", + "If using BERT for interpretability research, keep in mind that BERT has some significant architectural differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning that the last LayerNorm in a block cannot be folded.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Moving model to device: mps\n", + "Loaded pretrained model crystina-z/monoELECTRA_LCE_nneg31 into HookedEncoder\n" + ] + } + ], + "source": [ + "cat_model_name = \"crystina-z/monoELECTRA_LCE_nneg31\"\n", + "cat_model = Cat(cat_model_name)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gugeJ5hbr_8f" + }, + "source": [ + "## Load Dataset\n", + "\n", + "We recommend the use of `ir-datasets` as it is the easiest way to get started with MechIR. By default `MechIR` will load relevance judgements from these datasets however you can change this or even use your own documents and queries using the `MechDataset` class." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "id": "6s20bffar_8g", + "outputId": "2ea72393-f32a-4577-e66a-fa3063801bed" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
query_iddoc_idrelevanceiteration
01123910
11150210
21446210
31456910
41547210
\n", + "
" + ], + "text/plain": [ + " query_id doc_id relevance iteration\n", + "0 1 1239 1 0\n", + "1 1 1502 1 0\n", + "2 1 4462 1 0\n", + "3 1 4569 1 0\n", + "4 1 5472 1 0" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Load smallest dataset for quick testing\n", + "dataset = MechIRDataset(\"vaswani\")\n", + "dataset.pairs.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "id": "KDhYqYk4r_8g", + "outputId": "71a93c7d-9de3-462c-f4ba-028b2a0719b6" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Total queries in dataset: 93\n", + "\n", + "----------- Examples of queries: -----------\n", + "\n", + "MEASUREMENT OF DIELECTRIC CONSTANT OF LIQUIDS BY THE USE OF MICROWAVE TECHNIQUES\n", + "\n", + "MATHEMATICAL ANALYSIS AND DESIGN DETAILS OF WAVEGUIDE FED MICROWAVE RADIATIONS\n", + "\n", + "USE OF DIGITAL COMPUTERS IN THE DESIGN OF BAND PASS FILTERS HAVING GIVEN PHASE AND ATTENUATION CHARACTERISTICS\n", + "\n" + ] + } + ], + "source": [ + "# Print examples of queries\n", + "print(\"Total queries in dataset:\", len(dataset.queries.items()))\n", + "print(\"\\n----------- Examples of queries: -----------\\n\")\n", + "example_queries = list(dataset.queries.values())[:3]\n", + "for query in example_queries:\n", + " print(query)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "id": "DPaJz6i7r_8g", + "outputId": "bf0f8323-8973-48c3-d409-cba9943cd24c" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Total documents in dataset: 11429\n", + "Minimum Length (in words): 2\n", + "Maximum Length (in words): 269\n", + "Average Length (in words): 41.93\n", + "\n", + "----------- Examples of documents: -----------\n", + "\n", + "compact memories have flexible capacities a digital data storage\n", + "system with capacity up to bits and random and or sequential access\n", + "is described\n", + "\n", + "an electronic analogue computer for solving systems of linear equations\n", + "mathematical derivation of the operating principle and stability\n", + "conditions for a computer consisting of amplifiers\n", + "\n", + "electronic coordinate transformer circuit details are given for\n", + "the construction of an electronic calculating unit which enables\n", + "the polar coordinates of a vector modulus and cosine or sine of the\n", + "argument to be derived from those of a rectangular system of axes\n", + "\n" + ] + } + ], + "source": [ + "# Calculate document stats\n", + "doc_lengths = [len(doc.split()) for doc in dataset.docs.values()]\n", + "\n", + "# Print examples of documents\n", + "print(\"Total documents in dataset:\", len(dataset.docs.items()))\n", + "print(f\"Minimum Length (in words): {min(doc_lengths)}\")\n", + "print(f\"Maximum Length (in words): {max(doc_lengths)}\")\n", + "print(f\"Average Length (in words): {(sum(doc_lengths) / len(doc_lengths) if doc_lengths else 0):.2f}\")\n", + "print(\"\\n----------- Examples of documents: -----------\\n\")\n", + "example_docs = list(dataset.docs.values())[:3]\n", + "for doc in example_docs:\n", + " print(doc)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "id": "5mePEZwlr_8h", + "outputId": "c86a1970-a6d3-4356-ae45-f6641ba06c3e" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Query: MEASUREMENT OF DIELECTRIC CONSTANT OF LIQUIDS BY THE USE OF MICROWAVE TECHNIQUES\n", + "\n", + "-------------------------------\n", + "Document:\n", + "broadband millimetre wave paramagnetic resonance spectrometer the\n", + "specimen and waveguide which can be cooled by means of a cryostat\n", + "are placed between close pole pieces giving high uniform magnetic\n", + "fields design details and some measurements on zero field splittings\n", + "are given\n", + "\n", + "Document:\n", + "microwave measurements of dielectric absorption in dilute solutions\n", + "\n", + "Document:\n", + "dielectric properties of ice at very low frequencies and the influence\n", + "of a polarizing field measurements at frequencies down to are reported\n", + "the loss factor passes through a low frequency maximum which is distinguishable\n", + "from that associated with the dipole dispersion by its different\n", + "temperature dependence the effect of impurities is to shift the\n", + "maximum towards higher frequencies application of a unidirectional\n", + "field does not affect the permittivity of the pure crystals but eliminates\n", + "the low frequency dispersion when impurities are present the\n", + "observations are consistent with macdonalds theory\n", + "\n" + ] + } + ], + "source": [ + "# Print example of one query and relevant documents\n", + "query_rel_doc_ex_df = dataset.pairs.head(3)[[\"query_id\", \"doc_id\"]]\n", + "query_id = query_rel_doc_ex_df[\"query_id\"].unique()[0]\n", + "doc_ids = query_rel_doc_ex_df[\"doc_id\"]\n", + "print(f\"Query: {dataset.queries[query_id]}\")\n", + "print(\"-------------------------------\")\n", + "for doc_id in doc_ids:\n", + " print(f\"Document:\\n{dataset.docs[doc_id]}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "yvY9hwtRr_8h" + }, + "source": [ + "### Paired Dataset Creation\n", + "\n", + "Activation patching relies on pairs of inputs, consisting of one *perturbed* input and one *baseline* input, where the *perturbed* input is constructed by applying some function to modify an *original* input (e.g., inserting a query term to the end of a document) and the *baseline* input is a padded variant of the *original* input to maintain token lengths between the pairs.\n", + "\n", + "In the main demo, we show one possible type of perturbation (appending a query term to the end of a document), but there are several other possible types of functions that could be use to generate the activation patching input pairs depending on what behavior you are trying to investigate. In this section, we discuss possible general perturbation methods, and describe some specific perturbations that we define aligning with IR axioms." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "id": "FlzOPo-4r_8h" + }, + "outputs": [], + "source": [ + "# Helper function just to print\n", + "def pretty_print_triplets(batch, tokenizer):\n", + " \"\"\"\n", + " Pretty prints triplets of queries, documents, and their corresponding perturbed documents from a batch.\n", + "\n", + " Args:\n", + " batch (dict): A dictionary containing 'queries', 'documents', and 'perturbed_documents' from a DataLoader.\n", + " tokenizer: The tokenizer used to decode the input IDs.\n", + " \"\"\"\n", + " # Get the queries, documents, and perturbed documents from the batch\n", + " queries = batch[\"queries\"]\n", + " documents = batch[\"documents\"]\n", + " perturbed_documents = batch[\"perturbed_documents\"]\n", + "\n", + " # Loop through the batch size\n", + " for i in range(len(documents[\"input_ids\"])):\n", + " # Get the input IDs\n", + " query_ids = queries[\"input_ids\"][i]\n", + " original_ids = documents[\"input_ids\"][i]\n", + " perturbed_ids = perturbed_documents[\"input_ids\"][i]\n", + "\n", + " # Decode the input IDs to text\n", + " query_decoded = tokenizer.decode(query_ids.tolist(), skip_special_tokens=False).replace(\"[PAD]\", \"\").strip()\n", + " original_doc_decoded = tokenizer.decode(original_ids.tolist(), skip_special_tokens=False).replace(\"[PAD]\", \"\").strip()\n", + " perturbed_doc_decoded = tokenizer.decode(perturbed_ids.tolist(), skip_special_tokens=False).replace(\"[PAD]\", \"\").strip()\n", + "\n", + " # Pretty print\n", + " print(f\"Triplet {i + 1}:\")\n", + " print(\"Query:\", query_decoded)\n", + " print(\"Original Document:\", original_doc_decoded)\n", + " print(\"Perturbed Document:\", perturbed_doc_decoded)\n", + " print(\"=\" * 50) # Separator for clarity" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "6h1rtEMatoXR" + }, + "source": [ + "## Make your own" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "id": "M7Ht999zr_8h" + }, + "outputs": [], + "source": [ + "@perturbation(perturb_type=\"replace\")\n", + "def my_perturbation(text: str, query: str = None) -> str:\n", + " \"\"\"\n", + " A simple perturbation function that replaces the first word of the text with 'REPLACED'.\n", + "\n", + " Args:\n", + " text (str): The input text to be perturbed.\n", + "\n", + " Returns:\n", + " str: The perturbed text.\n", + " \"\"\"\n", + " words = text.split()\n", + " if words:\n", + " words[0] = \"REPLACED\"\n", + " return \" \".join(words)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1KEsmNqcuKuk" + }, + "source": [ + "# Using Perturbations\n", + "\n", + "Once you have a perturbation and a dataset, we provide collate functions which automatically apply your perturbation to your dataset and allow batching for more efficient experiments. A standard torch dataloader is all you need but remember that different architectures need different input formats so make sure to use the correct collate functon for your model." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "id": "DjASsJm5r_8i" + }, + "outputs": [], + "source": [ + "data_collator = DotDataCollator(model.tokenizer, my_perturbation, perturb_type=\"replace\")\n", + "dataloader = DataLoader(dataset, batch_size=1, collate_fn=data_collator)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "id": "I1xQgcD4r_8i" + }, + "outputs": [], + "source": [ + "cat_data_collator = CatDataCollator(cat_model.tokenizer, my_perturbation, perturb_type=\"replace\")\n", + "cat_dataloader = DataLoader(dataset, batch_size=1, collate_fn=cat_data_collator)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "id": "cbOf8RGBr_8i", + "outputId": "6fc2f9ca-cd05-4a73-e9dd-4009b0b1e014" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Triplet 1:\n", + "Query: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques [SEP]\n", + "Original Document: [CLS] broadband millimetre wave paramagnetic resonance spectrometer the specimen and waveguide which can be cooled by means of a cryostat are placed between close pole pieces giving high uniform magnetic fields design details and some measurements on zero field splittings are given [SEP]\n", + "Perturbed Document: [CLS] replaced millimetre wave paramagnetic resonance spectrometer the specimen and waveguide which can be cooled by means of a cryostat are placed between close pole pieces giving high uniform magnetic fields design details and some measurements on zero field splittings are given [SEP]\n", + "==================================================\n", + "Triplet 1:\n", + "Query: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques [SEP]\n", + "Original Document: [CLS] microwave measurements of dielectric absorption in dilute solutions [SEP]\n", + "Perturbed Document: [CLS] replaced measurements of dielectric absorption in dilute solutions [SEP]\n", + "==================================================\n" + ] + } + ], + "source": [ + "for i, batch in enumerate(dataloader):\n", + " pretty_print_triplets(batch, model.tokenizer)\n", + "\n", + " # stop after 2 batches\n", + " if i == 1:\n", + " break" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "IuPZDLP4r_8i" + }, + "source": [ + "### Verify Difference in Performance on Perturbed Pairs\n", + "\n", + "Before we can finalize our paired dataset and proceedto the patching experiments, there's a couple things to check:\n", + "- Does the chosen perturbation even have an effect on model behavior?\n", + "- If yes, what is that effect? (i.e., Do the *baseline* or *perturbed* inputs have a higher relevance score on average?)\n", + "\n", + "So first, let's calculate the performances of the three toy perturbations we defined earlier (prepend, append, replace) and plot their distributions." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "id": "GgPaKaKmr_8i" + }, + "outputs": [], + "source": [ + "# Helper function for plotting difference in performance\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib.ticker import MaxNLocator\n", + "def plot_score_dists(baseline_scores, perturbed_scores, type=\"hist\"):\n", + " plt.figure(figsize=(8, 6))\n", + "\n", + " if type == \"hist\":\n", + " plt.hist(baseline_scores, label='Baseline', color='blue')\n", + " plt.hist(perturbed_scores, label='Perturbed', color='orange')\n", + " plt.gca().yaxis.set_major_locator(MaxNLocator(integer=True))\n", + " plt.ylabel('Frequency')\n", + " elif type == \"box\":\n", + " plt.boxplot([baseline_scores, perturbed_scores], labels=['Baseline', 'Perturbed'])\n", + " plt.ylabel('Scores')\n", + "\n", + " plt.xlabel('Scores')\n", + " plt.title('Distribution of Baseline vs Perturbed Scores')\n", + " plt.legend()\n", + "\n", + " plt.show()\n", + "\n", + " return" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "id": "UYvnhDmqr_8j" + }, + "outputs": [], + "source": [ + "baseline_performance, perturbed_performance = [], []\n", + "for i, batch in enumerate(cat_dataloader):\n", + " # Get the queries, documents, and perturbed documents from the batch\n", + " sequences = batch[\"sequences\"]\n", + " perturbed_sequences = batch[\"perturbed_sequences\"]\n", + "\n", + " baseline_scores = model.forward(**sequences) # [batch_size x 1]\n", + " perturbed_scores = model.forward(**perturbed_sequences) # [batch_size x 1]\n", + "\n", + " baseline_performance += baseline_scores.flatten().tolist()\n", + " perturbed_performance += perturbed_scores.flatten().tolist()\n", + "\n", + " # stop after 2 batches\n", + " if i == 1:\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "id": "UE3pic6kr_8j" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsAAAAIjCAYAAAAN/63DAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQAAS09JREFUeJzt3QucjPX////XsqwzIaySM5EQSkoHkWN9VDpSKFE+6UAiJSUVUUgnHT6hD6X6JJU+KdE5n5xyCClSlGMKOZ/md3u+//9rvrNrrbVmd2b3/bjfbmPNzLXXdc1cMzvPeV2v630lhEKhkAEAAACeyBPrFQAAAACyEwEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARjIQg899JAlJCRky7IuvPBCdwl89tlnbtn/+c9/smX5Xbt2tUqVKlk827Fjh918881Wrlw599zcddddllONHz/ePYZffvnliK8BxB+9Ry655JIsX45eF3p96HUC4HAEYOAYA0dwKVCggJUvX95atWplY8aMsb///jsqy1m3bp0LzgsXLrR4E8/rlhGPPfaY2449e/a0f//733bDDTekG1RSb+/q1avbPffcY3/++We2rrfvgi9zwSVfvnxWpUoV69y5s/38889RXdZrr71mo0ePNp8oLN94441WtWpV9zrXF8Tzzz/fHnzwwVivGpBlErNu1kDu9PDDD1vlypVt//79tmHDBvfhrEriyJEj7b333rO6deuGpx04cKDde++9xxwyBw8e7AJY/fr1M/x7H3/8sWW19NbtpZdeskOHDlk8mzVrlp199tkZ/mDXY7z77rvd//fs2WPz58934ejzzz+3OXPmWLzJjtdALN1xxx125plnuvfeggUL7MUXX7QPPvjAlixZ4r6MRisAf//99zl678CxWLlypXtOCxYsaDfddJN7b69fv949v48//rh7vwO5EQEYOEZt2rSxRo0aha8PGDDABSvt1vzHP/5hy5cvdx8mkpiY6C5ZadeuXVaoUCHLnz+/xZKqcvFu06ZNVrt27QxPf9JJJ9n1118fvq72iSJFitgTTzxhP/30k6sIx5NYvway2nnnnWdXXnml+78qljVq1HCheMKECe59eDx27txphQsXtqx+n8abUaNGudYg7dWpWLHiYe+X7JTV2wCIRAsEEAUXXXSRPfDAA/brr7/axIkT0+0BnjFjhjVt2tRKlCjhwlTNmjXtvvvuc/epmqxqTPABH+zyDfr41N9Zp04dV4nULkp9oAa/e6T+z4MHD7pptFtTHy4K6WvXrk0xjao+6uFNLXKeR1u3tHqA9YGmCmqFChUsKSnJPVaFx1AolGI6zadXr142depU9/g07WmnnWbTp0/P0POvD+pu3bpZ2bJl3S7cevXquVCUehf66tWrXcUwWPfI/tmM0vMokV9sFi9e7B6/dssHu5BVTduyZUuK31WbjCqLep70GMuUKWMXX3yxq7ZF+vbbb61169ZWvHhxt40vuOAC+/rrrzPdB/7mm2/ao48+aieffLJbv+bNm7vKX2qZWe7GjRvdc5FWpXDFihVu+c8884y7rsqtptMXB61HqVKl3HtB74nMvu9E2zXw4YcfuqCs13rRokWtXbt2tnTp0hS/p22l996qVausbdu2brpOnTq5506vD72Pg9dI8JpOq+c68jnWz0B679PIar32MOh50JeyKVOmHPb4tm7d6l4vwfunWrVqriqbek+LptNj0nbT35UuXbq42zJCz4FeF6nDr+j1mZqeX70u9JwVK1bM/U1Q1TzSW2+9ZQ0bNnSFgNKlS7svkb///nuGtoHo8WlPi/4G6PnR+/qWW26xv/76K8U85s2b51rQtAwtS3vm9L4DMoIKMBAl6ifVh5w+2Lp3757mNPogVqVYbRJqpdCHmoJIEDJq1arlbh80aJD16NHDfZDLOeecE56HQpWq0Ndee637YNGHQ3oUfPQB3b9/fxcU9cHSokULV/EJKtUZkZF1i6SQq7D96aefunCqD/uPPvrI9dDqw1CVp0hfffWVCwH//Oc/3Yeh+qo7dOhga9ascUHpSHbv3u0Ch55HhWh9COoDWB+wCgF33nmnW3f1/Pbu3dt92AdtDSeeeGK6j1mB7Y8//gi3QHz33Xeu1UWhRssJKMCpF1VfDBR+tZ21e14///e//4W/BN16663uoEStp0KPtqUet/YaNGjQwE2jvQnavgoQatXIkyePjRs3zoW9L7/80s466yw7VsOGDXPz6du3r23bts2GDx/uwoYCbyCzy9XrT4FIITt1a8kbb7xhefPmtauuuir8hXDo0KGukq75bd++3YUYfQHQF4FjpfAkwetD21jhT6FIQVFV1+eff96FbG27yC9oBw4ccNPpPn0pU0jVttPz89tvv4VfnwppmZHe+1R7D6655hr3etD66nnWc6QvfMHzoHXX86r3isLfKaecYt98842rdKtFIehT1vusffv27nWk+em1/s4777j5ZoSC7yeffOK2f/CF4kj0JUABU8FU66GwredV692xY8fwNHofKBhrW+sL0lNPPeX+xmla/U5620D0eIP5qMKvLzj6EqXf13y0t0l/y1q2bOnew2oz03z15SStLxJAmkIAMmTcuHEqW4bmzp17xGmKFy8eOuOMM8LXH3zwQfc7gVGjRrnrmzdvPuI8NH9No+WldsEFF7j7xo4dm+Z9ugQ+/fRTN+1JJ50U2r59e/j2N998093+1FNPhW+rWLFiqEuXLkedZ3rrpt/XfAJTp0510z7yyCMpprvyyitDCQkJoZUrV4Zv03T58+dPcduiRYvc7U8//XQoPaNHj3bTTZw4MXzbvn37Qk2aNAkVKVIkxWPX+rVr1y7d+UVOq/mmvpx77rmhP/74I8W0u3btOuz3X3/9dTf9F198keL1cdtttx1xmYcOHQpVr1491KpVK/f/yPlXrlw5dPHFFx/2ely9evVRXwO1atUK7d27N3y7tr1uX7JkyTEvNy0vvPBCivkFateuHbrooovC1+vVq5fh5z9S8DheeeUV995Zt25d6IMPPghVqlTJvZb0uvz7779DJUqUCHXv3j3F727YsME975G367Wq+d17772HLUvrF/k6Tu/5jlw3/czI+zR4Xb399tvh27Zt2xZKTk5O8bdjyJAhocKFC4d+/PHHFL+vdc6bN29ozZo1Kd5nw4cPD09z4MCB0HnnnXfE92qk77//PlSwYEE3bf369UN33nmnm+fOnTtTTLd169ZQ0aJFQ40bNw7t3r07xX3Ba0bvuzJlyoTq1KmTYppp06a5+Q8aNOio2+DLL790t0+aNCnF7dOnT09x+zvvvHPUv8dAemiBAKJI1aL0RoMIqh/vvvtupg8YU9VYlZGM0pHyqqgG1EOZnJxs//3vfy0raf6q/qmCE0nVV2Ve7UqNpKq0jkIPqEquXaxHO8pfy1Hl7rrrrgvfpgqRlqveRh2wllmNGzd21V1dpk2b5qrpquqqsq3KcyCykq5KsarGOthOItsbtP1VddXBhGlRVV7VQVXTVEHUfHRRK4naFr744otMvW70eonsDw6q98Fze7zLveKKK1wbhCq+AR1ItmzZMlfpjHz8ev60rMxQ9VEVPx3wptYGrZ9aXdSTr22kir9eB8H666LXoLaj9kSkptFAskp671Ot/+WXXx6+rte53qeqcOrAWtFeDG2nE044IcXj0ftEbU3aJsHrX8995GPRY7799tsztJ6q5mr7q0qtCqqqtZdddpmrWOvA1oCeX/1tU7VVbQmRgj0cquarMqu9OJHTaFudeuqprr3kaNtAj1utHKqERz5u7ZnQ39dgOwZ/S/W+1J4a4FjRAgFEkQJXWn1zAYWBl19+2e0C1geJwoXCg0Kpdjln9MCsYznYKfWBWvqwUi9hZvpfj4X6KPVBHxm+Rbtog/sjaRdvavrwT933l9Zy9BhTP39HWs6xUG+hAkfkB7n6mLW9tB2DkKFh0dTbOnny5MMOHNIu9YBaD7RrWj2d+kBX76OCj3qHJQiG6e2+1vz0vByL1M9t8PvBc3u8y9XzpNey2iCGDBniblMYVjDT6zugFhrtrtfBa+qRVb+xWociR05Jj9pvFAoV8LRMbeOgFzt4DEfaja+QGUm/p3aYrJLe+1Tvv9THBug5Eb0v9YVOj0e95Udq0wleZ3p96wtt6lYNvU4zSstW+4iCtb60KFTqtapWJ7X66D0QtJtoux1J8F5La9kKwGrTONo20OPWa+1If0eDx632ELVI6X2ndhW1QSm460ucvnwAR0MABqJEfYP6w60PtyNRpVCVG1UxVA1R75yCgj601TusD/ajOZa+3Yw60sk69IGYkXWKhiMtJ/UBc7GmoCfajkEAvvrqq11/pvqb1eusMKKKqQJeZOVU0ynAqUdT23vEiBGuV1V9i+oXDabV7UcaAi8zPalHe26jsVz1uqriqWqi5qEwrOdKQTWg3mkFKe0B0ePXlwiFl7Fjx7ovhUdz+umnp/hCEil4DApywYGKkVKPxqKQlNEvnUd7j2TF+1SPR1XQfv36pXl/EJijSa8TPce6NGnSxJo1a2aTJk064nN+vNLaBnrcCr9ablqCLwTBSX7UY//++++74wu0h+DJJ590t2W2dxv+IAADUaIPXtFBHenRH3wFA110QJVOznD//fe7UKwPmmifOS717maFHh0wFll1U2UvraPGVdEJqpNyLOsWHFyj3aaRVeAffvghfH80aD6qlOmDM/LDNNrLiTxwJ6j2B1XUmTNnukqUKpSBI+3mV7VOu4h1UTVLB7+ptUIBOGgBUbUyq0JHWqKxXFXfdPBS0Abx448/pjk0WcmSJV1Q1kXPoUKxDo7LSADOyGNQeDqe5+5Ir/Gg+p36fZKZPQx6/+l9GLksPV8SHKinx6Pn52iPRa9vvf40bWTo0wgcxyMY6lEH3AXrE7S2HOlLfvBe07JTV+J1W0bei1qO/m6ce+65GfoSoVYjXfQe0mgUOrhTe2KO9/WE3I8eYCAKdAS1dv1qd2EwlE9a0jqDWFBx27t3r/sZjIOZ0WGMjubVV19N0Zesqok+1BS4Ij90VDXZt29f+DbtBk09XNqxrJt276s6FgyBFVDFTx/8kcs/HlqO+iYj+08VUp9++mkXCLSrNJpUbRINtRZZXU1dqU59NjE9F5HtEEFYU5tIsO3VFqFtoSPig4AdafPmzZYVorFc9WTqy58qvwog2v2vUBwp9bBw2j4KU8HjPx5atgK8vlCm1ROa0edOr/HU2ykyAAa9t8E21Wgfx0o94NoLENBoGHqf6m9BUL3W3oLZs2e7ymZqev8FX8T0+tf/NdpF5Hrp9Z8RGuEjrecrOEYgaGfQiAv6IquRHdTnHil47Ss06zWtin7kNlW/v0Y6UQvR0ehxa/2DVppIepzB3x598Uz9nkv9txRIDxVg4Bjpj7mqi/pjrCF+FH51gIiqGzoTXOoDRCKpB1IfoPog0PSqAD733HOuD05DAQUftAoT+hDRB44+kHUQT+SwW8dCFTfNWxU3ra+CmUJH5FBtqpYoGGuXvT6AtJta4xlHHpR2rOt26aWXul2oqm6rr1GBUbu9tftbY5umnndmqU/xhRdecMOeadxVVdD0WDRckh5r6h7kY6EhqIJxnfXlYNGiRW5Z2q0ftD8odKmKqZ5JBQn1fupxRo5NK/oSou2s/mE9Fwp/qnTNnTvX7bYVVbDVFqAvBzo4SdtM89N6aA+BlhUE8GiK1nLV466DqfSaViCNHPJKNPSbejUVuPW61EFTwbBwx0vrqBConmJV1dWSod3lGkZP7UaqKKb+MpYWrZu+TPXp08cN5aXtpNeynhdVGlXV1hdZrb+CfhBEj4XaFzQ0oLa9DjZ75ZVX3HtTw6EF1E6jvycaNlGvba2XDvrTWe/0nOk9pdeh1k2PTccU6LZgTOG0Qnxa1IKj9416tYO9QjpwU4FcjzE4I56eX3151d8KPS/qtVVVXO8JDdmmgxF18Knmp9ePvnjqgMRgGDS9LzUM4dHo97QnQUFb7TQK3pqv9qjoADnNS+8hLU+vMx1MqL8len/poD2tp74UAEeV7hgRAA4bBim4aNiucuXKuSGiNKxU5HBbRxoGbebMmaH27duHypcv735fP6+77rrDhjp699133RBSiYmJKYYy0vBKp512Wprrd6QhsDQc14ABA9zwRBruSMM8/frrr4f9/pNPPumGTEtKSnJDfc2bN++weaa3bqmHQRMNTdW7d2/3OPPly+eG2hoxYkSKobZE80lreLAjDc+W2saNG0M33nhjqHTp0u55Pf3009Mc/ul4hkHLkyePew61vSKHa5PffvstdPnll7thuDTk1lVXXeWG6tLv6TUgGobsnnvucUOBaTgpDXGl/z/33HOHLfu7774LXXHFFaFSpUq57aF1ufrqq93rJzPDoL311lsp5q/fSWuIrIwsNz16DwRDakUOSxfQkHhnnXWWe5403amnnhp69NFH3fBZ6TnS4zjStBrOTduhQIECoapVq4a6du3qXs8Bvab0/Kdlx44doY4dO7p11DIjX9OrVq0KtWjRwj03ZcuWDd13332hGTNmpDkM2pHep8Fr8KOPPgrVrVvXzUvPQ1qPTe8fvXerVavmXtd6fZ9zzjmhJ554IsVztmXLltANN9wQKlasmHvc+r+2ZUaGQfv666/de09Dl+l39T495ZRT3HOmx5vae++959ZB20/L0/bU35hIb7zxhhvSTY+tZMmSoU6dOrn3SKT0toG8+OKLoYYNG7rl6P2i93S/fv3c+0oWLFjg3otaVy1H781LLrkkxXYG0pOgf44ekwEAAIDcgR5gAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AonwsgAnWJVZ+7RgPrRPk0tAAAAjp9G9tVJUXSGTZ3gJz0E4AxQ+K1QoUKsVwMAAABHsXbtWnfmzfQQgDMgOJWqnlCdZhEAAADxZfv27a5gGeS29BCAMyBoe1D4JQADAADEr4y0q3IQHAAAALxCAAYAAIBXCMAAAADwCj3AURx648CBA3bw4MFYrwqiIG/evJaYmMiwdwAA5EIE4CjYt2+frV+/3nbt2hXrVUEUFSpUyJKTky1//vyxXhUAABBFBOAonCRj9erVrmKogZcVlqga5vxqvr7UbN682W3b6tWrH3VAbQAAkHMQgI+TgpJCsMadU8UQuUPBggUtX7589uuvv7ptXKBAgVivEgAAiBLKWlFChTD3YZsCAJA78QkPAAAArxCAAQAA4BUCcBbSsXDZecmJKlWqZKNHjw5f1wGEU6dOjek6AQCA3I0A7LGuXbu6wBlcSpUqZa1bt7bFixfHbJ00nFybNm1itnwAAJD7EYA9p8Cr0KnLzJkz3ckfLrnkkpitT7ly5SwpKSlmywcAALkfAdhzCpsKnbrUr1/f7r33Xlu7dq0bA1f69+9vNWrUcEO8ValSxR544AHbv39/+PcXLVpkzZo1s6JFi1qxYsWsYcOGNm/evPD9X331lZ133nluWDENFXfHHXfYzp07j7g+kS0Qv/zyi7s+ZcoUtwytQ7169Wz27NkpfudYlwEAAPxGAEbYjh07bOLEiVatWjXXDiEKtuPHj7dly5bZU089ZS+99JKNGjUq/DudOnWyk08+2ebOnWvz5893AVrj58qqVatchblDhw6ureKNN95wYbVXr17HtF7333+/9e3b1xYuXOjC+HXXXedOOx3NZQAAAI+EcFTbtm0L6anSz9R2794dWrZsmfuZmp7d7Lwcqy5duoTy5s0bKly4sLvoMSYnJ4fmz59/xN8ZMWJEqGHDhuHrRYsWDY0fPz7Nabt16xbq0aNHitu+/PLLUJ48ecLPV8WKFUOjRo2KeM4s9M4777j/r1692l1/+eWXw/cvXbrU3bZ8+fIMLyOz0tu2AAAg5+S11KgAe06tBaqs6jJnzhxr1aqVOwhNZ0ATVVTPPfdc1yJRpEgRGzhwoK1Zsyb8+3369LGbb77ZWrRoYcOGDXMV2cj2CFWP9XvBRfMPTh+dUXXr1g3/Pzk52f3ctGlTVJcBAAD8QQD2XOHChV3Lgy5nnnmmvfzyy65/Vq0O6rVVi0Pbtm1t2rRp9t1337l2BJ0aOPDQQw/Z0qVLrV27djZr1iyrXbu2vfPOO+GWiltuuSUcsHVRYP3pp5+satWqGV7HoKVC1BMsCrjRXAYAAPBHYqxXAPFFAVOnAN69e7d98803VrFiRRd6A0FlOJL6cnXp3bu3688dN26cXX755dagQQPXO6xwnVWyYxkAACB3oQLsub1799qGDRvcZfny5Xb77be7quqll15q1atXd+0OkydPdq0NY8aMCVd3RSFZB5t99tlnLhh//fXX7mC4WrVqhUeQUIjWNKrMqir77rvvRvUAtexYBgAAueGEWZyg6/9QAc5C7pCuODd9+vRwX61GfDj11FPtrbfesgsvvNDdpqquwqSCstocNAya2h4kb968tmXLFuvcubNt3LjRSpcubVdccYUNHjw43Lv7+eefuwqyhinTMW5qS7jmmmuitv7ZsQwAALJKaFKcJ8Xj8ZqZdYzPMJSgI+FivRLxbvv27Va8eHHbtm2bG+s20p49e9zBVpUrV7YCBQrEbB0RfWxbAEBWUpU0Vwdgy94AnF5eS40WCAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIVTIWel17L57C5xerrB49G1a1fbunWrTZ06Narz/eyzz6xZs2b2119/WYkSJaI6bwAAEN+oAHtM4TIhIcFd8ufPb9WqVbOHH37YDhw4cFzzvOyyy6K6ngAAANFEBdhzrVu3tnHjxtnevXvtv//9r912222WL18+GzBgwDHN5+DBgy5IR0u05wcAABCgAuy5pKQkK1eunFWsWNF69uxpLVq0sPfee88F4r59+9pJJ51khQsXtsaNG7u2gcD48eNd64CmrV27tpvPTTfdZBMmTLB33303XFnW7+ii/6uVIbBw4UJ32y+//HLE+a1ZsyY8/eDBg+3EE0+0YsWK2a233mr79u0L33fo0CEbOnSoVa5c2QoWLGj16tWz//znPykep8J9jRo13P1qfQiWCwAA/EMFGCkoIG7ZssV69eply5Yts8mTJ1v58uXtnXfecdXiJUuWWPXq1d20u3btsscff9xefvllK1WqlCUnJ9vu3btt+/btrqosJUuWtG+++SZDy049vzJlyrjbZ86caQUKFHBBWsH1xhtvdPc/+uij7n6F34kTJ9rYsWPdun3xxRd2/fXXu8B8wQUX2Nq1a+2KK65w1e0ePXrYvHnz7O67786y5xAAAMQ3AjCcUCjkguZHH31k1113nQuwqsAq/IqqwdOnT3e3P/bYY+62/fv323PPPecqrpEBWtVjVZWPVVrzE/Unv/LKK1aoUCE77bTTXJ/yPffcY0OGDHG/o/X55JNPrEmTJm76KlWq2FdffWUvvPCCC8DPP/+8Va1a1Z588kl3f82aNV2QV9gGAAD+IQB7btq0aVakSBEXJNVK0LFjR7vyyitdS4JaBiIp2KryGhlM69atG7V1OdL8FIgVfgMKujt27HCVXf1U5fjiiy9O8TtqkTjjjDPc/5cvX+5aOCIFYRkAAPiHAOw59cOqQqrwqWpvYmKivfHGG5Y3b16bP3+++xlJYTmy2puRA9Xy5MkTrjIHFLhTy+j8IikAywcffOD6lSOpjxgAACA1ArDndICbhj+LpMqpRmHYtGmTnXfeecc0PwVp/W4k9eLK+vXr7YQTTggfBJdRixYtcr3FCsjyv//9zwXxChUquB7j4IA5tTukpVatWu7gukiaBwAA8BOjQOAwan3o1KmTde7c2aZMmWKrV6+2OXPmuIPNVGlNT6VKlWzx4sW2YsUK++OPP1ylVwFbYfWhhx6yn376yc0j6MfNCLUzdOvWzR2Up9EcHnzwQXeQnirLRYsWdf3JvXv3diNQrFq1yhYsWGBPP/20uy4aNULLVd+w1uu1115zLR4AAMBPVICzUg4+M5sOdnvkkUfcaAm///67lS5d2s4++2y75JJL0v297t27u9EaGjVq5NoTPv30U7vwwgvt9ddfd8Osqcf3zDPPdPO+6qqrMrQuzZs3d6M7nH/++a4PWQfpKUwHdDCcqswK6D///LMbTq1BgwZ23333uftPOeUUe/vtt11IVjA+66yz3IFzGrYNAAD4JyEU2ZiZzTRc1YgRI1yvqXaPa6ityLOIadVU7XvppZfcGLLnnnuu61cNhuEKKo6//vprivkqCN17773h66pIagisuXPnuqB0++23W79+/TK8nhrWq3jx4rZt2zY3Dm2kPXv2uAqpxqDVUF3IPdi2AICspMNeQpNy+UmfOmZfzEwvr8VVC8TOnTvdEf7PPvtsmvcPHz7cxowZ48Z3/fbbb12/aqtWrVwwiaRhsRSgg4sCbuST0bJlS3eiBwVtBW5VD1988cUsf3wAAACIPzFtgWjTpo27pEXV39GjR9vAgQOtffv27rZXX33VypYta1OnTrVrr702PK36QI807uykSZNcD6nGkdUBWhpHVgdgjRw50p0UAQAAAH6J24PgtOt5w4YN7tS8AZW1NZ7r7NmzU0w7bNgwNz6tRi9QhffAgQPh+zStekcVfgOqIutgqL/++ivNZavPVJXjyAsAAAByh7g9CE7hV1TxjaTrwX1yxx13uAOeglPuDhgwwLVBqMIbzEc9nKnnEdwXDMuVuod48ODBWfK4AAAAEFtxG4Azqk+fPuH/a4QBVXpvueUWF2IzeyIEhejI+aoCrGG80hPDYwmRRdimAADkTnHbAhH09G7cuDHF7bp+pH5fUYuEWiB++eWX8HzSmkfkMlJTcNbRg5GXI8mXL5/7qdPxIncJtmmwjQEAQO4QtxVgtS0ooM6cOdPq168frsRqNAiNJ3skOsBNJ0goU6aMu96kSRO7//773QkZgiAzY8YMq1mzZprtD8dKpwrWuLM6a5oUKlTomE/ni/ir/Cr8aptq26Y+HTQAAMjZYhqAdaKElStXpjjwTQFW/bw6ecFdd93lTpigcX8ViB944AErX758eKxgHeCmQNysWTM3EoSu62QH119/fTjcduzY0fXz6kxi/fv3t++//96eeuopGzVqVNQeR1BJDkIwcgeF3/T2NgAAgJwppgF43rx5LrwGgr7bLl26uFPV6mQVGitYw5XpRBhNmza16dOnh09KoFaFyZMnu3F9NXKDQrICcGT/rkaO+Pjjj92JMBo2bOjOaDZo0KCoDoGmim9ycrKrOqvSjJxPewuo/AIAkDvF9ExwOcWxnFkEAAAgIzgTnKdnggMAAACyGwEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBX4j4A//3333bXXXdZxYoVrWDBgnbOOefY3Llzw/eHQiEbNGiQJScnu/tbtGhhP/30U4p5/Pnnn9apUycrVqyYlShRwrp162Y7duyIwaMBAABArMV9AL755pttxowZ9u9//9uWLFliLVu2dCH3999/d/cPHz7cxowZY2PHjrVvv/3WChcubK1atbI9e/aE56Hwu3TpUjefadOm2RdffGE9evSI4aMCAABArCSEVEKNU7t377aiRYvau+++a+3atQvf3rBhQ2vTpo0NGTLEypcvb3fffbf17dvX3bdt2zYrW7asjR8/3q699lpbvny51a5d21WNGzVq5KaZPn26tW3b1n777Tf3+0ezfft2K168uJu3qsgAAADHKyHBLDQpwXK1jtkXM48lr8V1BfjAgQN28OBBK1CgQIrb1erw1Vdf2erVq23Dhg2uIhzQA2/cuLHNnj3bXddPtT0E4Vc0fZ48eVzFOC179+51T2LkBQAAALlDXAdgVX+bNGniKr3r1q1zYXjixIku1K5fv96FX1HFN5KuB/fpZ5kyZVLcn5iYaCVLlgxPk9rQoUNdkA4uFSpUyLLHCAAAgOwV1wFY1PurLo2TTjrJkpKSXL/vdddd5yq4WWXAgAGufB5c1q5dm2XLAgAAQPaK+wBctWpV+/zzz92oDQqic+bMsf3791uVKlWsXLlybpqNGzem+B1dD+7Tz02bNh3WWqGRIYJpUlPQVu9I5AUAAAC5Q9wH4IBGd9BQZ3/99Zd99NFH1r59e6tcubILsTNnzgxPp35d9faqdUL0c+vWrTZ//vzwNLNmzbJDhw65XmEAAAD4JdHinMKuWiBq1qxpK1eutHvuucdOPfVUu/HGGy0hIcGNEfzII49Y9erVXSB+4IEH3MgOl112mfv9WrVqWevWra179+5uqDRVj3v16uVGiMjICBAAAADIXeI+AKsHVz25GrJMB6516NDBHn30UcuXL5+7v1+/frZz5043rq8qvU2bNnXDnEWOHDFp0iQXeps3b+56hzUP9RIDAADAP3E9DnC8YBxgAAAQbYwDHF25ZhxgAAAAINoIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvBL3AbhSpUqWkJBw2OW2225z91944YWH3XfrrbemmMeaNWusXbt2VqhQIStTpozdc889duDAgRg9IgAAAMRSosW5uXPn2sGDB8PXv//+e7v44ovtqquuCt/WvXt3e/jhh8PXFXQD+l2F33Llytk333xj69evt86dO1u+fPnssccey8ZHAgAAgHgQ9wH4xBNPTHF92LBhVrVqVbvgggtSBF4F3LR8/PHHtmzZMvvkk0+sbNmyVr9+fRsyZIj179/fHnroIcufP3+WPwYAAADEj7hvgYi0b98+mzhxot10002u1SEwadIkK126tNWpU8cGDBhgu3btCt83e/ZsO/300134DbRq1cq2b99uS5cuTXM5e/fudfdHXgAAAJA7xH0FONLUqVNt69at1rVr1/BtHTt2tIoVK1r58uVt8eLFrrK7YsUKmzJlirt/w4YNKcKvBNd1X1qGDh1qgwcPztLHAgAAgNjIUQH4X//6l7Vp08aF3UCPHj3C/1elNzk52Zo3b26rVq1yrRKZoSpynz59wtdVAa5QocJxrj0AAADiQY4JwL/++qvr4w0qu0fSuHFj93PlypUuAKs3eM6cOSmm2bhxo/t5pL7hpKQkdwEAAEDuk2N6gMeNG+eGMNOIDulZuHCh+6lKsDRp0sSWLFlimzZtCk8zY8YMK1asmNWuXTuL1xoAAADxJkdUgA8dOuQCcJcuXSwx8f9WWW0Or732mrVt29ZKlSrleoB79+5t559/vtWtW9dN07JlSxd0b7jhBhs+fLjr+x04cKAbR5gqLwAAgH9yRABW64NOZqHRHyJpCDPdN3r0aNu5c6fr0+3QoYMLuIG8efPatGnTrGfPnq4aXLhwYRekI8cNBgAAgD8SQqFQKNYrEe90EFzx4sVt27ZtrnUCAADgeGlE19Ck/xvWNVfqGIrLvJZjeoABAACAaCAAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwSqYC8M8//xz9NQEAAADiNQBXq1bNmjVrZhMnTrQ9e/ZEf60AAACAeArACxYssLp161qfPn2sXLlydsstt9icOXOiv3YAAABAPATg+vXr21NPPWXr1q2zV155xdavX29Nmza1OnXq2MiRI23z5s3RXk8AAAAg9gfBJSYm2hVXXGFvvfWWPf7447Zy5Urr27evVahQwTp37uyCMQAAAJBrAvC8efPsn//8pyUnJ7vKr8LvqlWrbMaMGa463L59++itKQAAABAFiZn5JYXdcePG2YoVK6xt27b26quvup958vx/ebpy5co2fvx4q1SpUjTWEQAAAIhtAH7++eftpptusq5du7rqb1rKlClj//rXv453/QAAAIDYB+CffvrpqNPkz5/funTpkpnZAwAAAPHVA6z2Bx34lppumzBhQjTWCwAAAIifADx06FArXbp0mm0Pjz32WDTWCwAAAIifALxmzRp3oFtqFStWdPcBAAAAuSoAq9K7ePHiw25ftGiRlSpVKhrrBQAAAMRPAL7uuuvsjjvusE8//dQOHjzoLrNmzbI777zTrr322uivJQAAABDLUSCGDBliv/zyizVv3tydDU4OHTrkzv5GDzAAAADiWUIoFApl9pd//PFH1/ZQsGBBO/30010PcG60fft2K168uG3bts2KFSsW69UBAAC5QEKCWWhSguVqHTMdM7M0r2WqAhyoUaOGuwAAAAA5RaYCsHp+darjmTNn2qZNm1z7QyT1AwMAAAC5JgDrYDcF4Hbt2lmdOnUsQTV8AAAAILcG4MmTJ9ubb75pbdu2jf4aAQAAAPE2DFr+/PmtWrVq0V8bAAAAIB4D8N13321PPfWUHccAEgAAAEDOaYH46quv3EkwPvzwQzvttNMsX758Ke6fMmVKtNYPAAAAiH0ALlGihF1++eXRXRMAAAAgXgPwuHHjLLv8/vvv1r9/f1dt3rVrl+s91vIbNWrk7lcbxoMPPmgvvfSSbd261c4991x7/vnnrXr16uF5/Pnnn3b77bfb+++/b3ny5LEOHTq4Fo4iRYpk2+MAAABADu4BlgMHDtgnn3xiL7zwgv3999/utnXr1tmOHTuitnJ//fWXC7RqsVAAXrZsmT355JN2wgknhKcZPny4jRkzxsaOHWvffvutFS5c2Fq1amV79uwJT9OpUydbunSpzZgxw6ZNm2ZffPGF9ejRI2rrCQAAgFx+KuRff/3VWrdubWvWrLG9e/e6UyJXqVLFjQ+s6wqj0XDvvffa119/bV9++WWa92vVy5cv7w7K69u3r7tNp78rW7asG6f42muvteXLl1vt2rVt7ty54arx9OnT3RBuv/32m/v9o+FUyAAAINo4FXJ0HUtey1QFWEFXYVIV2oIFC4ZvV1+wzg4XLe+9955bzlVXXWVlypSxM844w7U6BFavXm0bNmywFi1ahG/TA2/cuLHNnj3bXddP9SwH4Vc0vVohVDFOi0K8nsTICwAAAHKHTAVgVWQHDhzoxgOOVKlSJdezGy0///xzuJ/3o48+sp49e9odd9xhEyZMcPcr/IoqvpF0PbhPPxWeIyUmJlrJkiXD06Q2dOhQF6SDS4UKFaL2mAAAAJADA/ChQ4fs4MGDh92uloKiRYtGY73Cy2nQoIE99thjrvqrvt3u3btHrcXiSAYMGODK58Fl7dq1Wbo8AAAAxHkAbtmypY0ePTp8PSEhwR38ptEYonl65OTkZNe/G6lWrVqu91jKlSvnfm7cuDHFNLoe3KefmzZtOuwAPo0MEUyTWlJSkusdibwAAADA4wCskRh0cJrCqUZb6NixY7j94fHHH4/aymkEiBUrVqS4TQfcVaxY0f2/cuXKLsRG9h2rX1e9vU2aNHHX9VPDo82fPz88zaxZs1x1Wb3CAAAA8EumxgE++eSTbdGiRTZ58mRbvHixq/5269bNDTcWeVDc8erdu7edc845rgXi6quvtjlz5tiLL77oLkHl+a677rJHHnnE9QkrED/wwANuZIfLLrssXDHWiBVB68T+/futV69eboSIjIwAAQAAgNwlU8OgZSeN26ue3J9++skF3D59+rgwGwhOhKFQrEpv06ZN7bnnnrMaNWqEp1G7g0Jv5IkwNHZwRk+EwTBoAAAg2hgGLbqOJa9lKgC/+uqr6d7fuXNny00IwAAAINoIwLHLa4mZHQc4ktoKdJpiDYtWqFChXBeAAQAA4PlBcDoBRuRFPcA6WE3tB6+//nr01xIAAACIZQBOiw5CGzZs2GHVYQAAACBXBuDgDGvr1q2L5iwBAACAqMpUD/B7772X4rqOo1u/fr0988wzbuxeAAAAIFcF4GCM3YDG4z3xxBPtoosucifJAAAAAHJVANZZ1AAAAADzvQcYAAAAyJUVYJ2NLaNGjhyZmUUAAAAA8ROAv/vuO3fRCTBq1qzpbvvxxx8tb9681qBBgxS9wQAAAECOD8CXXnqpFS1a1CZMmGAnnHCCu00nxLjxxhvtvPPOs7vvvjva6wkAAABERUJIY5gdo5NOOsk+/vhjO+2001Lc/v3331vLli1z3VjAx3JuaQAAgIzQjvLQpFy+t7zjMcfMbMlreTK7gM2bNx92u277+++/MzNLAAAAIFtkKgBffvnlrt1hypQp9ttvv7nL22+/bd26dbMrrrgi+msJAAAAxLIHeOzYsda3b1/r2LGjOxDOzSgx0QXgESNGRGvdAAAAgPjoAQ7s3LnTVq1a5f5ftWpVK1y4sOVG9AADAIBoowc4h/UAB9avX+8u1atXd+H3OLI0AAAAkC0yFYC3bNlizZs3txo1aljbtm1dCBa1QDAEGgAAAHJdAO7du7fly5fP1qxZY4UKFQrffs0119j06dOjuX4AAABA7A+C0xjAH330kZ188skpblcrxK+//hqtdQMAAADiowKsg98iK7+BP//805KSkqKxXgAAAED8BGCd7vjVV18NX09ISLBDhw7Z8OHDrVmzZtFcPwAAACD2LRAKujoIbt68ebZv3z7r16+fLV261FWAv/766+iuIQAAABDrCnCdOnXsxx9/tKZNm1r79u1dS4TOAPfdd9+58YABAACAXFMB1pnfWrdu7c4Gd//992fNWgEAAADxUgHW8GeLFy/OmrUBAAAA4rEF4vrrr7d//etf0V8bAAAAIB4Pgjtw4IC98sor9sknn1jDhg3daZAjjRw5MlrrBwAAAMQuAP/8889WqVIl+/77761BgwbuNh0MF0lDogEAAAC5IgDrTG/r16+3Tz/9NHzq4zFjxljZsmWzav0AAACA2PUAh0KhFNc//PBDNwQaAAAAkKsPgjtSIAYAAAByVQBWf2/qHl96fgEAAJBre4BV8e3ataslJSW563v27LFbb731sFEgpkyZEt21BAAAAGIRgLt06XLYeMAAAABArg3A48aNy7o1AQAAAOL9IDgAAAAgpyEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK/kqAA8bNgwS0hIsLvuuit824UXXuhui7zceuutKX5vzZo11q5dOytUqJCVKVPG7rnnHjtw4EAMHgEAAABiLdFyiLlz59oLL7xgdevWPey+7t2728MPPxy+rqAbOHjwoAu/5cqVs2+++cbWr19vnTt3tnz58tljjz2WbesPAACA+JAjKsA7duywTp062UsvvWQnnHDCYfcr8CrgBpdixYqF7/v4449t2bJlNnHiRKtfv761adPGhgwZYs8++6zt27cvmx8JAAAAYi1HBODbbrvNVXFbtGiR5v2TJk2y0qVLW506dWzAgAG2a9eu8H2zZ8+2008/3cqWLRu+rVWrVrZ9+3ZbunRpmvPbu3evuz/yAgAAgNwh7lsgJk+ebAsWLHAtEGnp2LGjVaxY0cqXL2+LFy+2/v3724oVK2zKlCnu/g0bNqQIvxJc131pGTp0qA0ePDjqjwUAAACxF9cBeO3atXbnnXfajBkzrECBAmlO06NHj/D/VelNTk625s2b26pVq6xq1aqZWq6qyH369AlfVwW4QoUKmZoXAAAA4ktct0DMnz/fNm3aZA0aNLDExER3+fzzz23MmDHu/zrALbXGjRu7nytXrnQ/1RO8cePGFNME13VfWpKSklwfceQFAAAAuUNcB2BVcpcsWWILFy4MXxo1auQOiNP/8+bNe9jv6HZRJViaNGni5qEgHVBFWaG2du3a2fhoAAAAEA/iugWiaNGi7sC2SIULF7ZSpUq529Xm8Nprr1nbtm3dbeoB7t27t51//vnh4dJatmzpgu4NN9xgw4cPd32/AwcOdAfWqdILAAAAv8R1AD6a/Pnz2yeffGKjR4+2nTt3uj7dDh06uIAbUJV42rRp1rNnT1cNVoDu0qVLinGDAQAA4I+EUCgUivVKxDsdBFe8eHHbtm0b/cAAACAqEhLMQpMSLFfrGIrLvBbXPcAAAABAtBGAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXsnR4wADAIBc7LXcPURYaFKs18BfVIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4JW4DsDPP/+81a1b14oVK+YuTZo0sQ8//DB8/549e+y2226zUqVKWZEiRaxDhw62cePGFPNYs2aNtWvXzgoVKmRlypSxe+65xw4cOBCDRwMAAIB4ENcB+OSTT7Zhw4bZ/Pnzbd68eXbRRRdZ+/btbenSpe7+3r172/vvv29vvfWWff7557Zu3Tq74oorwr9/8OBBF3737dtn33zzjU2YMMHGjx9vgwYNiuGjAgAAQCwlhEKhkOUgJUuWtBEjRtiVV15pJ554or322mvu//LDDz9YrVq1bPbs2Xb22We7avEll1zignHZsmXdNGPHjrX+/fvb5s2bLX/+/Bla5vbt26148eK2bds2V4kGAADZ4LWEWK8BjlfH7IuZx5LX4roCHEnV3MmTJ9vOnTtdK4Sqwvv377cWLVqEpzn11FPtlFNOcQFY9PP0008Ph19p1aqVe4KCKnJa9u7d66aJvAAAACB3iPsAvGTJEtffm5SUZLfeequ98847Vrt2bduwYYOr4JYoUSLF9Aq7uk/0MzL8BvcH9x3J0KFD3TeI4FKhQoUseWwAAADIfnEfgGvWrGkLFy60b7/91nr27GldunSxZcuWZekyBwwY4MrnwWXt2rVZujwAAABkn0SLc6ryVqtWzf2/YcOGNnfuXHvqqafsmmuucQe3bd26NUUVWKNAlCtXzv1fP+fMmZNifsEoEcE0aVG1WRcAAADkPnFfAU7t0KFDrkdXYThfvnw2c+bM8H0rVqxww56pR1j0Uy0UmzZtCk8zY8YM1xitNgoAAAD4J64rwGpFaNOmjTuw7e+//3YjPnz22Wf20Ucfud7cbt26WZ8+fdzIEAq1t99+uwu9GgFCWrZs6YLuDTfcYMOHD3d9vwMHDnRjB1PhBQAA8FNcB2BVbjt37mzr1693gVcnxVD4vfjii939o0aNsjx58rgTYKgqrBEennvuufDv582b16ZNm+Z6hxWMCxcu7HqIH3744Rg+KgAAAMRSjhsHOBYYBxgAgBhgHOCcryPjAAMAAAAxRwAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOCVuA/AX3zxhV166aVWvnx5S0hIsKlTp6a4v2vXru72yEvr1q1TTPPnn39ap06drFixYlaiRAnr1q2b7dixI5sfCQAAAOJB3AfgnTt3Wr169ezZZ5894jQKvOvXrw9fXn/99RT3K/wuXbrUZsyYYdOmTXOhukePHtmw9gAAAIg3iRbn2rRp4y7pSUpKsnLlyqV53/Lly2369Ok2d+5ca9Sokbvt6aeftrZt29oTTzzhKssAAADwR9xXgDPis88+szJlyljNmjWtZ8+etmXLlvB9s2fPdm0PQfiVFi1aWJ48eezbb79Nc3579+617du3p7gAAAAgd8jxAVjtD6+++qrNnDnTHn/8cfv8889dxfjgwYPu/g0bNrhwHCkxMdFKlizp7kvL0KFDrXjx4uFLhQoVsuWxAAAAIOvFfQvE0Vx77bXh/59++ulWt25dq1q1qqsKN2/ePFPzHDBggPXp0yd8XRVgQjAAAEDukOMrwKlVqVLFSpcubStXrnTX1Ru8adOmFNMcOHDAjQxxpL5h9RRrxIjICwAAAHKHXBeAf/vtN9cDnJyc7K43adLEtm7davPnzw9PM2vWLDt06JA1btw4hmsKAACAWIj7FgiN1xtUc2X16tW2cOFC18Ory+DBg61Dhw6umrtq1Srr16+fVatWzVq1auWmr1WrlusT7t69u40dO9b2799vvXr1cq0TjAABAADgn7ivAM+bN8/OOOMMdxH15ur/gwYNsrx589rixYvtH//4h9WoUcOd4KJhw4b25ZdfujaGwKRJk+zUU091PcEa/qxp06b24osvxvBRAQAAIFYSQqFQKGZLzyF0EJxGg9i2bRv9wAAAZJfXEmK9BjheHUNxmdfivgIMAAAARBMBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAV7wKwM8++6xVqlTJChQoYI0bN7Y5c+bEepUAAACQzbwJwG+88Yb16dPHHnzwQVuwYIHVq1fPWrVqZZs2bYr1qgEAACAbeROAR44cad27d7cbb7zRateubWPHjrVChQrZK6+8EutVAwAAQDZKNA/s27fP5s+fbwMGDAjflidPHmvRooXNnj37sOn37t3rLoFt27a5n9u3b7ds9WZxy82Kd99m//9TCwDA4XbFegVw3LIxOwU5LRQKHXVaLwLwH3/8YQcPHrSyZcumuF3Xf/jhh8OmHzp0qA0ePPiw2ytUqJCl6+mf4lY8d2d8AAD81j37P+j//vtvK36UgOFFAD5WqhSrXzhw6NAh+/PPP61UqVKWkJBgPtO3K30RWLt2rRUrVizWq4MoYtvmbmzf3Ittm7uxfTNOlV+F3/Llyx91Wi8CcOnSpS1v3ry2cePGFLfrerly5Q6bPikpyV0ilShRIsvXMyfRm5A3Yu7Ets3d2L65F9s2d2P7ZszRKr9eHQSXP39+a9iwoc2cOTNFVVfXmzRpEtN1AwAAQPbyogIsamno0qWLNWrUyM466ywbPXq07dy5040KAQAAAH94E4CvueYa27x5sw0aNMg2bNhg9evXt+nTpx92YBzSp9YQjaWcukUEOR/bNndj++ZebNvcje2bNRJCGRkrAgAAAMglvOgBBgAAAAIEYAAAAHiFAAwAAACvEIABAADgFQIwMu3HH3+09u3buxONaHDupk2b2qeffhrr1UKUfPDBB9a4cWMrWLCgnXDCCXbZZZfFepUQZXv37nUj4ugMlwsXLoz16uA4/fLLL9atWzerXLmye99WrVrVjR6wb9++WK8aMunZZ5+1SpUqWYECBdzf4zlz5sR6lXINAjAy7ZJLLrEDBw7YrFmzbP78+VavXj13m4aZQ8729ttv2w033ODGyV60aJF9/fXX1rFjx1ivFqKsX79+GTplKHKGH374wZ3k6YUXXrClS5faqFGjbOzYsXbffffFetWQCW+88YY7h4G+xCxYsMB9xrZq1co2bdoU61XLFRgGDZnyxx9/2IknnmhffPGFnXfeee42nX9bleAZM2ZYixYtYr2KyCR9qVHFYfDgwa6ahNzpww8/dB+u+rJz2mmn2XfffeeqwchdRowYYc8//7z9/PPPsV4VHCNVfM8880x75pln3HV9ualQoYLdfvvtdu+998Z69XI8KsDIlFKlSlnNmjXt1VdfdWfUU2hS1aFMmTLutNPIuVRp+P333y1Pnjx2xhlnWHJysrVp08a+//77WK8aomTjxo3WvXt3+/e//22FChWK9eogC23bts1KliwZ69XAMVLbivasRhaT9DdZ12fPnh3TdcstCMDIFPUMfvLJJ65qVLRoUdefNHLkSHd2PfWLIucKKkUPPfSQDRw40KZNm+a26YUXXmh//vlnrFcPx0k7/bp27Wq33nqrOzU8cq+VK1fa008/bbfcckusVwWZ2Mt68ODBw85Wq+u0GUYHARgpaLeKwm16F/WZ6UP0tttucxXfL7/80jXm6yCpSy+91NavXx/rh4Hj2LbazSb333+/dejQwVX0x40b5+5/6623Yv0wcJzbV4FI7UoDBgyI9Sojyts2kvbitG7d2q666ipX7QeQEj3ASGHz5s22ZcuWdKepUqWKC70tW7a0v/76y/X9BqpXr+76RulPyrnbVge8XXTRRW4ba2SPyH407X579NFHs2FtkVXb9+qrr7b333/fhaaAKk158+a1Tp062YQJE7JhbZEV2zZ//vzu/+vWrXN7bM4++2wbP36823WOnNcCofak//znPylG4OnSpYtt3brV3n333ZiuX26QGOsVQHzRgW26HM2uXbvcz9R/WHU9qCAiZ25bVXyTkpJsxYoV4QC8f/9+N8RSxYoVs2FNkZXbd8yYMfbII4+Eryss6chyHXGuLznIuds2qPw2a9YsvOeG8Jsz6cuMtuHMmTPDAVifrbreq1evWK9erkAARqY0adLE9YXq2+igQYPcmJMvvfSSrV692tq1axfr1cNxUEVf/aEaekdHHCv06khy0e5U5GynnHJKiutFihRxPzVm7MknnxyjtUI0KPyq8qv37BNPPOEqx4Fy5crFdN1w7DRKiz5j1at/1lln2ejRo91B5xqeEsePAIxM0ckvdMCb+kS1u1wVQg2lpN0yGqsQOZsCb2JiohsLePfu3a4yqPGeOcARiF8aglIHvumS+ssM3Y45zzXXXOO+xKjIpAPfNEyhPndTHxiHzKEHGAAAAF6hOQgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAANADqBTovbs2dNOOeUUS0pKsnLlylmrVq3s66+/jvWqAUCOkxjrFQAAHF2HDh1s3759NmHCBKtSpYpt3LjRZs6caVu2bMmS5WlZ+fPnz5J5A0CsUQEGgDi3detW+/LLL+3xxx+3Zs2aWcWKFe2ss86yAQMG2D/+8Y/wNLfccouVLVvWChQoYHXq1LFp06aF5/H222/baaed5qrHlSpVsieffDLFMnTbkCFDrHPnzlasWDHr0aOHu/2rr76y8847zwoWLGgVKlSwO+64w3bu3Bn+veeee86qV6/ulqllX3nlldn2vABAZhGAASDOFSlSxF2mTp1qe/fuPez+Q4cOWZs2bVw7xMSJE23ZsmU2bNgwy5s3r7t//vz5dvXVV9u1115rS5YssYceesgeeOABGz9+fIr5PPHEE1avXj377rvv3P2rVq2y1q1bu+rz4sWL7Y033nCBuFevXm76efPmuUD88MMP24oVK2z69Ol2/vnnZ9OzAgCZlxAKhULH8fsAgGygCm737t1t9+7d1qBBA7vgggtcoK1bt659/PHHLgAvX77catSocdjvdurUyfUQa7pAv3797IMPPrClS5eGK8BnnHGGvfPOO+Fpbr75ZheiX3jhhfBtCsBatqrA//3vf+3GG2+03377zYoWLZrlzwEARAsVYADIAVSFXbdunb333nuuKvvZZ5+5IKwq7sKFC+3kk09OM/yKgvG5556b4jZd/+mnn+zgwYPh2xo1apRimkWLFrn5BxVoXXTgnSrOq1evtosvvti1Y6gn+YYbbrBJkybZrl27sugZAIDoIQADQA6hPluFTrUnfPPNN9a1a1d78MEHXX9uNBQuXDjF9R07dri+YgXs4KJQrOBctWpVV/VdsGCBvf7665acnGyDBg1yLRTqRwaAeEYABoAcqnbt2q4VQW0QakP48ccf05yuVq1ahw2XpuuqGAd9wmlRhVn9xNWqVTvsEowQkZiYaC1atLDhw4e7PuFffvnFZs2aFeVHCgDRxTBoABDnNNTZVVddZTfddJMLu6q86gA0hc727du7nlwdfKY2iZEjR7qA+sMPP1hCQoJrl7j77rvtzDPPdKM8XHPNNTZ79mx75pln3AgO6enfv7+dffbZ7qA39QOrQqxAPGPGDPf7GmXi559/dss+4YQTXE+w2iNq1qyZbc8NAGQGARgA4px6bxs3bmyjRo1yIzPs37/fDUmmg+Luu+++8EFyffv2teuuu85VhRWCNRJEUMl98803XYuCQrDaFTRyg1oo0qOw/fnnn9v999/vhkLTMdNqfVCIlhIlStiUKVPcqBJ79uxxw6GpHULDrQFAPGMUCAAAAHiFHmAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAABgPvl/ch/bWP+j78wAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plot_score_dists(baseline_performance, perturbed_performance)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "id": "jF7N9Kder_8j" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/var/folders/8_/slwlpfyn0w34d9zzt9s9qk_c0000gn/T/ipykernel_82715/956876871.py:13: MatplotlibDeprecationWarning: The 'labels' parameter of boxplot() has been renamed 'tick_labels' since Matplotlib 3.9; support for the old name will be dropped in 3.11.\n", + " plt.boxplot([baseline_scores, perturbed_scores], labels=['Baseline', 'Perturbed'])\n", + "/var/folders/8_/slwlpfyn0w34d9zzt9s9qk_c0000gn/T/ipykernel_82715/956876871.py:18: UserWarning: No artists with labels found to put in legend. Note that artists whose label start with an underscore are ignored when legend() is called with no argument.\n", + " plt.legend()\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAArEAAAIjCAYAAAAUdENlAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQAAQgdJREFUeJzt3QmcTfX/x/HP2McyirJF9kKFkDWlIokiS36lRUnq156SUdGuQtr+Lb96hNJCtopSKkrxa0GiyDaiLCEZIev9P97f///cx50xM2bGnbnznXk9H487d+65555z7nbu+3zP53xPXCgUChkAAADgkUKxXgAAAAAgqwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLHAETzwwAMWFxeXK/Nq166duwTmzJnj5j1p0qRcmX/fvn2tRo0alpf9/fffdt1111mlSpXca3P77bebr8aOHeuew9q1a9P9DCDv0XekS5cuOT4ffS70+dDnBMDhCLEoUILQEFxKlChhVapUsY4dO9qzzz5rO3fujMp8NmzY4MLvDz/8YHlNXl62zHjsscfc+3jjjTfaG2+8YVdeeWWGYSP1+123bl27++677c8//8zV5S7ogg2y4FK0aFGrVauWXXXVVbZmzZqozuutt96yp59+2goSBd5rrrnGateu7T7n2sg766yzbNiwYbFeNCDHFMm5SQN510MPPWQ1a9a0/fv326ZNm9wPrFr0nnrqKXv//fetYcOG4XHvu+8+Gzx4cJaD4oMPPuhCVOPGjTP9uE8++cRyWkbL9sorr9ihQ4csL/v888+tZcuWmf5x1nMcOHCg+/+ff/6xBQsWuIDzxRdf2Lfffmt5TW58BmLp1ltvtTPOOMN99xYuXGj/+c9/bMaMGbZkyRK3QRmtELt06VKvW+mzYtWqVe41jY+Pt2uvvdZ9tzdu3Ohe3yeeeMJ934H8iBCLAqlTp07WrFmz8O3ExEQXjrSL8OKLL7Zly5a5HwQpUqSIu+Sk3bt3W8mSJa1YsWIWS2ody+v++OMPa9CgQabHP+GEE+yKK64I31YpQunSpW3kyJG2cuVK1zKbl8T6M5DT2rZtaz179nT/q+XwpJNOcsF23Lhx7nt4NHbt2mWlSpWynP6e5jWjR492ZTbau1K9evXDvi+5KaffAyAS5QTA/zv33HPt/vvvt19//dXGjx+fYU3srFmz7Mwzz7RjjjnGBaKTTz7ZhgwZ4u5Tq65aRYIf6WD3aVDXpnrHU0891bUIaneffhSDx6ZXD3nw4EE3jnYR6gdCQXv9+vUpxlHri2paU4uc5pGWLa2aWP0oqSWzWrVqVrx4cfdcFQBDoVCK8TSdm2++2aZNm+aen8Y95ZRTbObMmZl6/fVj269fP6tYsaLbHdqoUSMXbFLvjk5KSnItd8GyR9aTZpZeR4ncOPnxxx/d89cu7mB3rFq1tm3bluKxKjlRC59eJz3HChUqWIcOHVyrV6RvvvnGLrjgAitbtqx7j88++2z7+uuvs10XPXHiRHv00UetatWqbvnOO+881wKXWnbmu3nzZvdapNVi98svv7j5P//88+62WlA1nsK/lqN8+fLuu6DvRHa/d6L3NfDRRx+5sKvPepkyZaxz5872008/pXic3it991avXm0XXnihG69Pnz7utdPnQ9/j4DMSfKbTqkGOfI11HcjoexrZaq6Wfr0O2rCaMmXKYc/vr7/+cp+X4PtTp04d1zqaeo+HxtNz0vum9crVV1/thmWGXgN9LlIHWNHnMzW9vvpc6DVLSEhw6wS1Xkd69913rWnTpm5j/rjjjnMbgr///num3gPR89MeD60D9Proez1gwADbvn17iml8//33rpxL89C8tIdM3zsgM2iJBSKovlI/VPpx6t+/f5rj6MdULbYqOVBZgn6YFCaCoFC/fn03fOjQoXb99de7H2Np3bp1eBoKRmoN/te//uV+HLSCz4jCi35k77nnHhf29OPQvn171/IStBhnRmaWLZKCqgLz7NmzXcDUD/bHH3/sakr1g6YWoEhfffWV+yH/97//7X7QVGfco0cPW7dunQs76dmzZ48LDXodFYT1Q6YfUf1I6of8tttuc8uuGtg77rjD/WAHJQLHH398hs9ZoWvr1q3hcoJFixa5shEFE80noBCm2kyFewVYvc/a1a3r//73v+ENmRtuuMEdaKflVHDRe6nnrdb7Jk2auHHUqq/3VyFAZQ+FChWyMWPGuMA2d+5ca968uWXV448/7qZz11132Y4dO+zJJ590gUGhNZDd+erzp1CjoJy6TGPChAlWuHBh69WrV3ijbvjw4a5FW9NLTk52QUQhXmE+qxSAJPh86D1WgFOwUdhT6+eLL77ogrLeu8iNrAMHDrjxdJ82rBQ09d7p9fntt9/Cn08FrezI6HuqVvzevXu7z4OWV6+zXiNttAWvg5Zdr6u+KwpwJ554os2bN8+1OGt3f1C3q+9Z165d3edI09NnferUqW66maHw+umnn7r3P9goSI+CvEKiwqWWQ4FZr6uW+/LLLw+Po++Bwq3ea23kPPPMM24dp3H1mIzeA9HzDaajlnZtpGhDSI/XdLTXR+uy888/332HVbKl6WoDI62NASBNIaAAGTNmjJoPQ999912645QtWzZ0+umnh28PGzbMPSYwevRod3vLli3pTkPT1ziaX2pnn322u++ll15K8z5dArNnz3bjnnDCCaHk5OTw8IkTJ7rhzzzzTHhY9erVQ1dfffURp5nRsunxmk5g2rRpbtxHHnkkxXg9e/YMxcXFhVatWhUepvGKFSuWYtjixYvd8Oeeey6UkaefftqNN378+PCwffv2hVq1ahUqXbp0iueu5evcuXOG04scV9NNfWnTpk1o69atKcbdvXv3YY9/++233fhffvllis/HTTfdlO48Dx06FKpbt26oY8eO7v/I6desWTPUoUOHwz6PSUlJR/wM1K9fP7R3797wcL33Gr5kyZIszzctL7/8corpBRo0aBA699xzw7cbNWqU6dc/UvA8XnvtNffd2bBhQ2jGjBmhGjVquM+SPpc7d+4MHXPMMaH+/funeOymTZvc6x45XJ9VTW/w4MGHzUvLF/k5zuj1jlw2XWfmexp8riZPnhwetmPHjlDlypVTrDsefvjhUKlSpUIrVqxI8Xgtc+HChUPr1q1L8T178sknw+McOHAg1LZt23S/q5GWLl0aio+Pd+M2btw4dNttt7lp7tq1K8V4f/31V6hMmTKhFi1ahPbs2ZPivuAzo+9dhQoVQqeeemqKcaZPn+6mP3To0CO+B3PnznXD33zzzRTDZ86cmWL41KlTj7g+BjJCOQGQilptMuqlIGiFeO+997J9EJRab9VCkVk6glstmwHVFFauXNk+/PBDy0mavlrh1JISSa2gyq3aLRlJrcM6Ojqg1mrtrjzS0eeaj1rQLrvssvAwtdRovqr100FY2dWiRQvXyqrL9OnTXau2WlfVwqwW4EBki7ZabNV6qwPIJLJUQO+/Wj91gFxa1DquVjq1aqklT9PRRWUZKgH48ssvs/W50eclsl42aEUPXtujnW/37t1dSYFaXgM6OOrnn392LY6Rz1+vn+aVHWoFVMubDuJSmYCWT2UjqlHXe6SWd30OguXXRZ9BvY/aI5CaeqnIKRl9T7X8l1xySfi2Puf6nqqlUQeLivYm6H069thjUzwffU9UIqT3JPj867WPfC56zrfcckumllOtqnr/1Vqslky1mnbr1s21HOtgzYBeX63b1OqpXfyRgj0NalVXC6n2pkSOo/eqXr16rlTjSO+BnrfKItQiHfm8tYdA69fgfQzWpfpeao8JkFWUEwCpKDSlVUcW0A/6q6++6nan6sdAAUEBQMFSu28ze7BRVg7gSX3wkX5wVFuXnXrQrFBdoX6sIwO0aHdncH8k7S5NTT/gqevg0pqPnmPq1y+9+WSFau0UGiJ/jFXXq/dL72MQFNTllmo933nnncMOhtHu6YB242s3r2oc9aOsWkCFF9XSShDuMtoVrOnpdcmK1K9t8PjgtT3a+ep10mdZJQUPP/ywG6ZAq3Clz3dA5Sja9a0DslQzqvpbleFE9uiREZWyKNgppGmeeo+D2uTgOaS3S1xBMZIep9KSnJLR91Tfv9S18npNRN9LbZTp+ajWOr2Sl+Bzps+3NkpTlz3oc5pZmrdKMRSOteGhYKjPqsqGVDaj70BQuqH3LT3Bdy2teSvEquThSO+Bnrc+a+mtR4PnrVILlRvpe6fSD5UUKXxrQ0wbEMCREGKBCKqj08pXP1DpUYudWlDUmqBWCdWS6cdeP7yqpdWP85FkpY41s9I7IYN+1DKzTNGQ3nxSHwQWawprovcxCLGXXnqpq1dUva9qfxUo1HKpkBbZgqnxFMJUs6j3e8SIEa52U3V8qp8MxtXw9LpXy06N5pFe22jMV7WfanlUq56moUCr10phM6BaYoUh7YnQ89eGgALISy+95DbsjuS0005LsVERKXgOCmPBwXeRUvcSoqCT2Q3HI31HcuJ7quej1shBgwaleX8QeqNJnxO9xrq0atXKzjnnHHvzzTfTfc2PVlrvgZ63Aqzmm5Yg1AcnclHN+QcffODq7dVSP2rUKDcsu7XMKDgIsUAE/XiKDlTIiFba+nHXRQcJqQP+e++91wVb/VhE+wxfqXfdKrjoIKjI1i+1sKV1NLNaVoJWQsnKsgUHjGgXZGRr7PLly8P3R4OmoxYr/fhF/iBGez6RB6MEre5Ba+Znn33mWoTUUhhIb5e5Ws20u1UXtSrpgC6VKSjEBuUUajXMqeCQlmjMV61gOiAnKClYsWJFmt1elStXzoVdXfQaKtjqgK/MhNjMPAcFoKN57dL7jAet0Km/J9lp6df3T9/DyHnp9ZLg4DM9H70+R3ou+nzr86dxI4ObeoY4GkE3gjqILFieoEwkvQ314LumeaduEdewzHwXNR+tN9q0aZOpDQGV7eii75B6SdABi9ojcrSfJ+R/1MQC/09H9mo3qna9Bd3EpCWtMz0FLV979+5110E/iZntIudIXn/99RR1umq90A+TQlPkD4daL/bt2xcepl2KqbviysqyaVe5WqmC7pUCannTj3fk/I+G5qM6wsh6TAXN5557zv2oa7djNKnVR9SNV2QrZ+oW49RnfdJrEVlaEAQulVwE771KDPRe6EjtICRH2rJli+WEaMxXNYragFMLrEKEdqUr2EZK3eWY3h8FouD5Hw3NWyFcG4Vp1Uhm9rXTZzz1+xQZ4oJa1OA9VS8UWaWaaLXGB9RLg76nWhcErchqtZ8/f75rYUxN379gY0qff/2vXhgil0uf/8xQzxNpvV5BzXxQGqCeALQxqh4HVPcdKfjsK/jqM62W9cj3VPXv6oFD5ThHouet5Q/KUiLpeQbrHm08pv7OpV6XAhmhJRYFklbIauXTClXdxyjA6qAHtTLojF2pD3qIpJpA/QhqZa7x1RL3wgsvuLowdTMT/FgqEOiHQD8a+lHVgSmRXTplhVq+NG21fGl5Fa4UHCK7AVOrhcKtdn/rR0S7fNXfbeSBVlldtosuusjtjlQrs+r8FPq0C1m7ktX3ZeppZ5fq9l5++WXXpZb65VRLlp6LuuLRc01dk5sV6t4o6PdXAX/x4sVuXtpFHpQSKDipNVE1hAoDqoXU84zsu1S0IaH3WfW0ei0U4NTi9N1337ldoKKWZO1iV8DXATd6zzQ9LYda6jWvIERHU7Tmq5pvHSCkz7RCZWR3SqJuxVS7qNCsz6UOBAq6HDtaWkYFOdXYqnVb5Q3a9awu2lS6o5a91BtUadGyaYPozjvvdN1E6X3SZ1mvi1r81LqsjVEtv8J6ECazQqUA6nZO770OoHrttdfcd1NdbQVUmqL1ibrk02dby6UD2XR2Mr1m+k7pc6hl03NTjb2GBX3OphXE06JyFn1vVLsc7J3RwYgK1XqOwZnL9PpqA1TrCr0uqj1V67S+E+oOTAfY6YBKTU+fH2086iC7oIstfS/Vxd2R6HFq0VdYVmmKwrOmqz0bOuhL09J3SPPT50wHyGldou+XDkTTcirYA0eUYd8FQD4TdLETXNQlVKVKlVz3Q+qyKLIrp/S62Prss89CXbt2DVWpUsU9XteXXXbZYd3ovPfee657oiJFiqToJkdd95xyyilpLl963Supq6fExETX9Y260lEXQr/++uthjx81apTrjqt48eKuG6nvv//+sGlmtGypu9gSdXt0xx13uOdZtGhR143TiBEjUnTjJJpOWl1Ppdf1V2qbN28OXXPNNaHjjjvOva6nnXZaml0LHU0XW4UKFXKvod6vyK7A5LfffgtdcsklrosndefUq1cv1w2UHqfPgKiLq7vvvtt1M6WuitR9kv5/4YUXDpv3okWLQt27dw+VL1/evR9alksvvdR9frLTxda7776bYvp6TFrdL2VmvhnRdyDorimyy7OAultr3ry5e500Xr169UKPPvqo65opI+k9j/TGVVdheh9KlCgRql27dqhv377u8xzQZ0qvf1r+/vvv0OWXX+6WUfOM/EyvXr061L59e/faVKxYMTRkyJDQrFmz0uxiK73vafAZ/Pjjj0MNGzZ009LrkNZz0/dH3906deq4z7U+361btw6NHDkyxWu2bdu20JVXXhlKSEhwz1v/673MTBdbX3/9tfvuqVssPVbf0xNPPNG9Znq+qb3//vtuGfT+aX56P7WOiTRhwgTXXZieW7ly5UJ9+vRx35FIGb0H8p///CfUtGlTNx99X/SdHjRokPteycKFC913Ucuq+ei72aVLlxTvM5CROP05ctQFAAAA8g5qYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7BepkBzqlpc6yoo7To31aUAAAABw99f6qk1/obIiRpyIv0CFWAbZatWqxXgwAAAAcgU6brrMkpqdAhdjg1JV6UXRaOwAAAOQtycnJrtHxSKccL1AhNighUIAlxAIAAORdRyr95MAuAAAAeIcQCwAAAO8QYgEAAOCdAlUTCwAAgJzvIuvAgQN28ODBNO8vXLiwFSlS5Ki7OyXEAgAAICr27dtnGzdutN27d2c4XsmSJa1y5cpWrFixbM+LEAsAAIConFQqKSnJtbTqRAUKqKlbW9VKq6C7ZcsWN27dunUzPKFBRgixAAAAOGoKpwqy6uNVLa3piY+Pt6JFi9qvv/7qHlOiRIlszY8DuwAAABA1mWlZzW7ra4ppHPUUAAAAgFxGiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAIGrUjVY0xjkSutgCokxnKJk7d67r7FkdObdt29b1mQcAvmK9hsxQt1miEx2oG62MBCdDCB6THYRYIIqmTJliAwcOtLVr14aH1ahRw0aNGmXdu3eP6bIBQHawXkNmacPmmGOOsT/++MPdVl+xaZ3sQAFW42jco9kYIsQCUVzR9+zZ0y688ELr2rWr7dmzx22Jrlq1yg2fNGkSK3wAXq7XOnfubHfffbdbp2nd9tFHH7FeQ5oqVarkroMgmx4F2GDc7IoLRaMowRPJyclWtmxZ27FjhyUkJMR6cZDPdrXVqVPHbVGqtUK3AxqmVgudxWTlypXsggPg1XrtuOOOs61btx7WEqvh27ZtY72GdD8/+/fvT/M+lRBk9JnJbF6jJRaIAtWKBSv4ChUqWLt27axUqVK2a9cumzNnjq1evTo8nu4DAF/Wazo1aFotsTNmzHC7hlmvIS0KqTm9cUOIBaJg/fr14foftVhMnDgxxan1NFw1QMF4AJDX/f777+66cePGtnjxYps+fXr4vmrVqrnhixYtCo8H5Da62AKi4JtvvnHXCqrHH3+8vfLKK+4oXl3rdnAUZjAeAOR1W7ZscdcKqqk3wHVbwyPHA3IbIRaIgqAGVnU+69ats+uuu84VrOtat4MuRCJrZQEgLytfvnz4/2LFitngwYPdgaq61u20xgNyEyEWiIJNmza5axWx64jd+fPn286dO921bgfF7cF4AJDXaW9SoEOHDnbxxRe7mn9d63Za4wG5iZpYIBNUDrB8+fJ07y9S5P++SjqY6/vvv7fWrVuH71PH4MFBXhpv4cKFaU6jXr16rnYWAPKCWbNmueuKFSva0qVLU6zX1DuBAq26UdJ4gwYNiuGSoqAixAKZoADbtGnTI46noKpLpMhWCvWpqEtaFixYYE2aNInC0gLA0W+cBwdsbd682Z2hS/3Bag9TmTJl3Ma6eiUIxmPjHLFAP7FAFFb2+/btszPPPNPVvqp0IHU/scHwr776KkUtWSRW9gByk4JnZjbOjwYb58gO+okFokjh8kgr4jvvvNNGjBjhdrE1bNjQPv30U2vfvr39+OOPbpeb+lhs2bJlri0zAGREG84KmelRf7DaOJc2bdq4Ex+MGzfOrr76aneA19dff+3u08a5+o9Nbx5ATqElFogi1YWNHj3aDhw4EB6mOtg77rjDnnzyyZguGwBkVbdu3ey9995L936dYnvatGm5ukzI/5IzmdfonQCIIgVV1cSqVVZ0rdsEWAA+UkBVUE0LARaxRogFokw1r3369HH/6zq9GlgA8IGCqo4L6NWrl7uta90mwCLWCLEAACBDqnnVSQ5E1+nVwAK5iRALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO0VivQBALK1cudJ27twZ9ekuW7YsxXW0lSlTxurWrZsj0wbgN9ZrKCjiQqFQyAqI5ORkK1u2rO3YscMSEhJivTjIAyv6k046yXy1YsUKVvgAUmC9hoKU12iJRYEVtFSMHz/e6tevH9Vp79mzx9auXWs1atSw+Pj4qE5brSBXXHFFjrS0APAb6zUUJIRYFHha0Tdp0iTq023Tpk3UpwkAmcF6DQWBdwd2/c///I/bCixRooS1aNHCvv3221gvEgAAAHKZVyF2woQJduedd9qwYcNs4cKF1qhRI+vYsaP98ccfsV40AAAA5CKvQuxTTz1l/fv3t2uuucYaNGhgL730kpUsWdJee+21WC8aAAAAcpE3NbH79u2zBQsWWGJiYnhYoUKFrH379jZ//vw0H7N37153iTzaDYhUqXScxf+1wmyDP9tzWl4tNwCkhfUaCgpvQuzWrVvt4MGDVrFixRTDdXv58uVpPmb48OH24IMP5tISwkcDmhaz+l8OMPvSvFH//5cbANLCeg0FhTchNjvUaqsa2siW2GrVqsV0mZC3vLxgn/UeOtbq16tnvli2fLm9POpyuzjWCwIgT2K9hoLCmxB73HHHWeHChW3z5s0phut2pUqV0nxM8eLF3QVIz6a/Q7bnmJPMqjQ2X+zZdMgtNwCkhfUaCgpvCmaKFStmTZs2tc8++yw87NChQ+52q1atYrpsAAAAyF3etMSKSgOuvvpqa9asmTVv3tyefvpp27Vrl+utAAAAAAWHVyG2d+/etmXLFhs6dKht2rTJGjdubDNnzjzsYC8AAADkb16FWLn55pvdBQAAAAWXdyEWiJbdu3e7a539Ldr27Nlja9eudadIjo+Pj+q0ly1bFtXpAcg/WK+hICHEosAK+hfWWeB8VKZMmVgvAoA8hvUaChJCLAqsbt26uet69eq50xdHu1XhiiuusPHjx1v9+urGO/or+rp160Z9ugD8xnoNBQkhFgWW+h6+7rrrcnQeWtE3adIkR+cBAAHWayhIvOknFgAAAAgQYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAkKF9+/bZm2++6f7XtW4DsUaIBaLs4MGD9v3337v/da3bAOCrQYMGWYkSJeypp55yt3Wt2xoOxBIhFoiiKVOmWJ06dWzAgAHutq51W8MBwDcKqiNGjLBQKJRiuG5rOEEWsRQXSv3JzMeSk5OtbNmytmPHDktISIj14iCfUVDt2bOndenSxXr06GF9+/a1sWPH2uTJk2369Ok2adIk6969e6wXEwCc3bt32/Lly9O9XyUDrVq1cv8XLVrULrjgAvvggw/soosuspkzZ9r+/fvdffPnz7dixYqlOY169epZyZIlc+gZoKDntSK5ulRAPl3Zq2TglltusbZt29rQoUPtl19+ccOLFCnibuuLeOutt1q1atWscOHCaU6DlT2A3KR1WtOmTTM1rgKrAqwE14Eg6KZlwYIF1qRJk6NcUiBttMQCmbBw4cJMr+yzi5U9gLy0cX7ppZfa6tWrrXfv3q5sYM+ePbZ27VqrUaOGxcfH2+OPP27vvvuu1a5d2yZOnJjmNNg4R3bQEgtEkVbECpnp0a61e++91x555BF74YUXbMOGDeH7qlSpYjfeeKPdf//99uijj7pdcunNAwByi8JlRhvOwV6jhg0bhsdr06ZN+P7TTjvNhViNxwY4YoEQC0RhZa+tRrnvvvtcvZjqYE899VRbunSpPfbYYy7ASuvWrVnZA/DCOeecYytWrHAb3wMHDnS1rxs3brTKlSu7EgK1xAbjAbFAOQEQBToAolSpUla+fHn77bffXC1s4MCBA1a1alXbtm2b7dq1K90DIAAgL1H5QGZKAVSWoPICILfzGl1sAVEwb948F1b/+OMP1wOBWix27tzprnVbw3W/xgMAHyiYnnHGGRmOo/sJsIgVQiwQBdrFJm+88YYtWbLElQ1o61HXKinQ8MjxACCvU68rW7ZssYoVK6Z5v4Zv3bqVE7ogZgixQBSoRkx0lO6qVats9uzZ9tZbb7nrlStXWq1atVKMBwB53dy5c11vBNOmTXMlAzfddJOdf/757lq3p06daklJSW48IBY4sAuIAvUPq25ndBCXVvjt2rUL33fo0CEbPny41axZ040HAD4I9hzpIFWVDDz//PMp7tfwyPGA3EZLLBAF6mJm1KhR7sxc3bp1S1ETq9saPnLkyHRPdAAAeU2w50glUWkJhrOHCbFC7wRAlE89q65otAsuoBZYBVhOOQvAJ6p1rVOnjusPVnuYChUqlGIPkzbQFWRVMsUGOmKR1wixQA6s+FUjFvSnqBICVvAAfN0w79mzp3Xp0sUSExPD/V+rREp7mCZNmsQGOqKOEJsGQiwAAFnDHibkNkJsGgixAABkHXuYkBfzGr0TAACADCmwRva6AuQF9E4AAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeMeLELt27Vrr16+f1axZ0+Lj46127do2bNgw27dvX6wXDQAAADFQxDywfPlyO3TokL388stWp04dW7p0qfXv39927dplI0eOjPXiAQAAIJfFhUKhkHloxIgR9uKLL9qaNWsy/Zjk5GQrW7as7dixwxISEnJ0+QAAAJB1mc1rXrTEpkVPrFy5chmOs3fvXneJfFEAAADgPy9qYlNbtWqVPffcczZgwIAMxxs+fLhL8sGlWrVqubaMAAAAyKchdvDgwRYXF5fhRfWwkX7//Xe74IILrFevXq4uNiOJiYmuxTa4rF+/PoefEQAAAPJ9TeyWLVts27ZtGY5Tq1YtK1asmPt/w4YN1q5dO2vZsqWNHTvWChXKWganJhYAACBv86Im9vjjj3eXzFAL7DnnnGNNmza1MWPGZDnAAgAAIP/w4sAuBVi1wFavXt11qaUW3EClSpViumwAAADIfV6E2FmzZrmDuXSpWrVqivs87SEMAAAAR8GLffJ9+/Z1YTWtCwAAAAoeL0IsAAAAEIkQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAACAghlik5OTbdq0abZs2bJoTA4AAACIfoi99NJL7fnnn3f/79mzx5o1a+aGNWzY0CZPnpydSQIAAAA5G2K//PJLa9u2rft/6tSpFgqF7K+//rJnn33WHnnkkexMEgAAAMjZELtjxw4rV66c+3/mzJnWo0cPK1mypHXu3NlWrlyZnUkCAAAAORtiq1WrZvPnz7ddu3a5EHv++ee74du3b7cSJUpkZ5IAAABAphWxbLj99tutT58+Vrp0aTvxxBOtXbt24TKD0047LTuTBAAAAHI2xP773/+25s2b2/r1661Dhw5WqND/NejWqlWLmlgAAADkuLiQjsrKpn379llSUpLVrl3bihTJVh7OVeoKrGzZsq6mNyEhIdaLAwAAgGzmtWzVxO7evdv69evnDuY65ZRTbN26dW74LbfcYo8//nh2JgkAAABkWrZCbGJioi1evNjmzJmT4kCu9u3b24QJE7IzSQAAACDTslUDoLNzKay2bNnS4uLiwsPVKrt69ersTBIAAADI2ZbYLVu2WIUKFQ4bri63IkMtAAAAkGdCrE4zO2PGjPDtILi++uqr1qpVq+gtHQAAABCtcoLHHnvMOnXqZD///LMdOHDAnnnmGff/vHnz7IsvvsjOJAEAAICcbYk988wz3YFdCrA6ucEnn3ziygt0Fq+mTZtmZ5IAAABAzrXE7t+/3wYMGGD333+/vfLKK1l9OAAAAJD7LbFFixa1yZMnH/2cAQAAgNwsJ+jWrZvrZgsAAADw5sCuunXr2kMPPWRff/21q4EtVapUivtvvfXWaC0fAAAAcJi4UCgUsiyqWbNmuvepu601a9aYz+fiBQAAQN7Oa9lqiU1KSrJY2bt3r7Vo0cL1jrBo0SJr3LhxzJYFAAAAHtXERlJDbjYac7Nt0KBBVqVKlVybHwAAAPJRiH399dddH7Hx8fHu0rBhQ3vjjTcsJ3300UeuT9qRI0fm6HwAAACQt2WrnOCpp55y/cTefPPN1qZNGzfsq6++shtuuMG2bt1qd9xxR7SX0zZv3mz9+/d3vSKULFky06UHukTWWAAAAKCAhtjnnnvOXnzxRbvqqqvCwy6++GI75ZRT7IEHHoh6iFW5Qt++fV1Ibtasma1duzZTjxs+fLg9+OCDUV0WAAAAeFpOsHHjRmvduvVhwzVM92XW4MGDXW8GGV2WL1/uQvPOnTstMTExS8up8XVkW3BZv359lh4PAACAfNQSW6dOHZs4caINGTIkxfAJEya4PmQza+DAga6FNSO1atWyzz//3ObPn2/FixdPcZ9aZfv06WPjxo1L87EaP/VjAAAAUED7idVpZ3v37m3t27cP18TqxAefffaZC7eXXHJJVBdy3bp1KepZN2zYYB07drRJkya57raqVq2aqenQTywAAEAB7ie2R48e9s0339jo0aPDp5+tX7++ffvtt3b66adbtJ144okpbpcuXdpd165dO9MBFgAAAPlHtkKs6HSz48ePj+7SAAAAADkVYj/88EMrXLiw26Uf6eOPP7ZDhw5Zp06dLCfVqFEjV0+wAAAAgHzQO4F6FTh48OBhwxUsdR8AAACQ50LsypUrrUGDBocNr1evnq1atSoaywUAAABEN8TqiLE1a9YcNlwBtlSpUtmZJAAAAJCzIbZr1652++232+rVq1MEWPX7qjN3AQAAAHkuxD755JOuxVXlAzVr1nQX/V++fHkbOXJk9JcSAAAAONreCVROMG/ePJs1a5YtXrzY4uPjrVGjRta2bdvsTA4AAADIuZZYnfp1+vTp7v+4uDg7//zzrUKFCq71VSdAuP76623v3r1ZWwIAAAAgJ0PsQw89ZD/99FP49pIlS6x///7WoUMH17XWBx98YMOHD8/qMgAAAAA5F2J/+OEHO++888K333nnHWvevLm98sorduedd9qzzz5rEydOzNoSAAAAADkZYrdv324VK1YM3/7iiy9SnJ3rjDPOsPXr12d1GQAAAICcC7EKsElJSe7/ffv22cKFC61ly5bh+3fu3GlFixbN2hIAAAAAORliL7zwQlf7OnfuXEtMTLSSJUum6JHgxx9/tNq1a2d1GQAAAICc62Lr4Ycftu7du9vZZ59tpUuXtnHjxlmxYsXC97/22muuxwIAAAAgJ8WFQqFQVh+0Y8cOF2ILFy6cYviff/7phkcG27wkOTnZ9XGr5U9ISIj14gAAACCbeS3bJztIS7ly5bIzOQAAACDnTzsLAAAAxBIhFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvONViJ0xY4a1aNHC4uPj7dhjj7Vu3brFepEAAAAQA0XME5MnT7b+/fvbY489Zueee64dOHDAli5dGuvFAgAAQAx4EWIVWG+77TYbMWKE9evXLzy8QYMGMV0uAAAAxIYX5QQLFy6033//3QoVKmSnn366Va5c2Tp16nTElti9e/dacnJyigsAAAD850WIXbNmjbt+4IEH7L777rPp06e7mth27drZn3/+me7jhg8fbmXLlg1fqlWrlotLDQAAgHwZYgcPHmxxcXEZXpYvX26HDh1y4997773Wo0cPa9q0qY0ZM8bd/+6776Y7/cTERNuxY0f4sn79+lx8dgAAAMiXNbEDBw60vn37ZjhOrVq1bOPGjYfVwBYvXtzdt27dunQfq3F0AQAAQP4S0xB7/PHHu8uRqOVVYfSXX36xM8880w3bv3+/rV271qpXr54LSwoAAIC8xIveCRISEuyGG26wYcOGubpWBVf1VCC9evWK9eIBAAAgl3kRYkWhtUiRInbllVfanj173EkPPv/8c3eAFwAAAAqWuFAoFLICQl1sqZcCHeSl1l0AAAD4mde86GILAAAAiESIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeMebELtixQrr2rWrHXfccZaQkGBnnnmmzZ49O9aLBQAAgBjwJsR26dLFDhw4YJ9//rktWLDAGjVq5IZt2rQp1osGAACAXOZFiN26dautXLnSBg8ebA0bNrS6deva448/brt377alS5fGevEAAACQy7wIseXLl7eTTz7ZXn/9ddu1a5drkX355ZetQoUK1rRp03Qft3fvXktOTk5xAQAAgP+KmAfi4uLs008/tW7dulmZMmWsUKFCLsDOnDnTjj322HQfN3z4cHvwwQdzdVkBAACQz1tiVR6ggJrRZfny5RYKheymm25ywXXu3Ln27bffukB70UUX2caNG9OdfmJiou3YsSN8Wb9+fa4+PwAAAOSMuJASYoxs2bLFtm3bluE4tWrVcsH1/PPPt+3bt7ueCQKqje3Xr58Lw5mhcoKyZcu6QBs5HQAAAOQNmc1rMS0nOP74493lSHQAl6iMIJJuHzp0KMeWDwAAAHmTFwd2tWrVytW+Xn311bZ48WLXZ+zdd99tSUlJ1rlz51gvHgAAAHKZFyFWJzjQQVx///23nXvuudasWTP76quv7L333nP9xQIAAKBgiWlNbG6jJhYAACB/5DUvWmIBAACASIRYAAAAeIcQCwAAAO94ccYuwCcHDx50fRvrRByVK1e2tm3bWuHChWO9WAAA5CuEWCCKpkyZYgMHDrS1a9eGh9WoUcNGjRpl3bt3j+myAUB2sXGOvIhyAiCKAbZnz5522mmn2fz5823nzp3uWrc1XPcDgG+07qpTp46dc845dvnll7tr3WadhlgjxAJRaqVQC2yXLl1s2rRp1rJlSytdurS71m0Nv+uuu9x4AOALNs6Rl9FPLBAFc+bMca0TWrkruKam4a1bt7bZs2dbu3btYrKMAJAV2uhWi6sCqzbGI0/9rlO+d+vWzZYuXWorV66ktABRRT+xQC5SnZiceuqpad4fDA/GA4C8TjWwqu8fMmRIigArup2YmOhO/67xgFggxAJRoAMdRK0SaQmGB+MBQF7HxjnyOkIsEAU6Ule9EDz22GNuN1sk3R4+fLjVrFnTjQcAPmDjHHkdIRaIAtWDqRut6dOnuzqxyAMgdFvDR44cSd0YAG+wcY68jhALRIn6gZ00aZItWbLEHcSlYnRdq7VCw+knFoBP2DhHXkfvBECU0Sk4gPx+Ehe1wCrAsnGOWOY1QiwAAMgQG+fIi3mN084CAIAMKbDSxzXyGkIsEGW0WAAAkPMIsUAO147p6F4dHEHtGABfsXGOvIjeCYAo4RzjAPIjrbt0+lmdWvvyyy9317rNOg2xRogFotRKoRbYLl26uHOMt2zZ0kqXLu2udVvD77rrLjceAPiCjXPkZfROAETBnDlzXOuEVu4KrqlpuPqMnT17NgdHAPCCNrrV4qrAqo3xQoUKpTjZgfqKVT/YK1eupLQAMclrtMQCUcA5xgHkN6qBVX3/kCFDUgRY0e3ExERLSkpy4wGxQIgFooBzjAPIb9g4R15HiAWigHOMA8hv2DhHXkeIBaKAc4wDyG/YOEdeR4gFokT9wE6aNMmWLFniDuJSMbqu1Vqh4fQTC8AnbJwjr6N3AiDK6BQcQH4/iYtaYBVg2ThHLPMaIRYAAGSIjXPkxbzGaWcBAECGFFjp4xp5DTWxAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvFLECJBQKuevk5ORYLwoAAADSEOS0ILelp0CF2J07d7rratWqxXpRAAAAcITcVrZs2XTvjwsdKebmI4cOHbINGzZYmTJlLC4uLtaLg3y+FamNpfXr11tCQkKsFwcAjhrrNeQWRVMF2CpVqlihQulXvhaolli9EFWrVo31YqAA0YqelT2A/IT1GnJDRi2wAQ7sAgAAgHcIsQAAAPAOIRbIAcWLF7dhw4a5awDID1ivIa8pUAd2AQAAIH+gJRYAAADeIcQCAADAO4RYAAAAeIcQC+SiGjVq2NNPPx2+rZNuTJs2LabLBADR0rdvX+vWrVvUpztnzhy3vvzrr7+iPm34ixCLArVy1UowuJQvX94uuOAC+/HHH2O2TBs3brROnTrFbP4A/FyHFStWzOrUqWMPPfSQHThwIM+FTiA3EGJRoCi0Kjjq8tlnn1mRIkWsS5cuMVueSpUq0V0NgCyvw1auXGkDBw60Bx54wEaMGJHl6Rw8eNCdij1aoj09IDMIsShQFBgVHHVp3LixDR482J0HfMuWLe7+e+65x0466SQrWbKk1apVy+6//37bv39/+PGLFy+2c845x8qUKeNOu9i0aVP7/vvvw/d/9dVX1rZtW4uPj3fnGL/11ltt165d6S5PZDnB2rVr3e0pU6a4eWgZGjVqZPPnz0/xmKzOA0D+W4dVr17dbrzxRmvfvr29//77tnfvXrvrrrvshBNOsFKlSlmLFi3cLvjA2LFj7ZhjjnHjNmjQwE3n2muvtXHjxtl7770XbuHVY9Ladf/DDz+4YVpPpTe9devWhcd/8MEH7fjjj3fryRtuuMH27dsXvk9hd/jw4VazZk23HtN6btKkSSme54cffujWxbpf68NgvkAkQiwKrL///tvGjx/vdsmptEAUTrVy/vnnn+2ZZ56xV155xUaPHh1+TJ8+faxq1ar23Xff2YIFC1wILlq0qLtv9erVrpWkR48erkRhwoQJLnDefPPNWVque++91/0Y6UdDK/HLLrssvLswWvMAkD8o5Ckgah2gDd533nnHrRt69erl1hVqsQ3s3r3bnnjiCXv11Vftp59+smeffdYuvfTSFHuoWrdunel5p55ehQoV3HDt5Vq2bJkLw2+//bbbMFeoDSjAvv766/bSSy+5x91xxx12xRVX2BdffOHuV8NC9+7d7aKLLnLrweuuu86ta4HD6GQHQEFw9dVXhwoXLhwqVaqUu+jjX7ly5dCCBQvSfcyIESNCTZs2Dd8uU6ZMaOzYsWmO269fv9D111+fYtjcuXNDhQoVCu3Zs8fdrl69emj06NHh+7UMU6dOdf8nJSW526+++mr4/p9++skNW7ZsWabnASD/rsO6du3q/j906FBo1qxZoeLFi4f69u3r1m2///57ivHPO++8UGJiovt/zJgxbl3yww8/pDvNwOzZs92427dvDw9btGiRG6b11JGmV65cudCuXbvCw1588cVQ6dKlQwcPHgz9888/oZIlS4bmzZuX4nFat1122WXufy1zgwYNUtx/zz33HLZMQJHDYy2Qf2m31Isvvuj+3759u73wwgvuwKpvv/3W7Z5Ty6ZaJ9TiqZZatYBqd1jgzjvvdK0Cb7zxhtuNp9aO2rVrh0sN1ALy5ptvhsdXTtWus6SkJKtfv36mlrFhw4bh/ytXruyu//jjD6tXr17U5gHAT9OnT7fSpUu7Mid97y+//HLr2bOn24OkPTeRVGIQ7GUSHQwWuX45WulNT+UBKocKtGrVyq1P1cKqa7XgdujQIcVj1Jp8+umnu//ViqtyiEiaBpAaIRYFimrFVD4Q0G6wsmXLurKBzp07u3IB7fbq2LGjG65dc6NGjQqPr4Mo9KMxY8YM++ijj9x5xDXOJZdc4lbOAwYMcDWqqZ144omZXsagPEFUgybBARPRmgcAvzfEFSCrVKniDk7VxnfhwoVdiZOuIynwRpYeBOuUjBQq9H+VhpFnpY88NiCr04ukdZhoHar63Ugc5IqsIsSiQNMKWCvsPXv22Lx581xrrGpSA7/++uthj1Frhy6q41K96pgxY1yIbdKkiauljQzJ0ZYb8wDgz4a4qAVTvQNoj40O+swKhWE9NpIOyBLVyB577LHuf9WmZpb2GGmdqpAr//3vf12Y1oGo5cqVCx8EdvbZZ6f5eO1R0gFjkTQNIDUO7EKBot1rmzZtchftsrrllltcy4AOIKhbt65bsaplVeUEKiuYOnVq+LFaKevgCR2soHD79ddfuwO8gl346tlAQVjjaIWvAyp01G80D7rKjXkA8Is2qrUX6aqrrnIHUam0SCVSOoBKLZ5HOgGLSpR++eUX27p1q2txVUhW4NSeJ61jNI3IPVJHotKAfv36uQ1u9TKgPVZaR6nBQAfP6sBVNQKoZwStaxcuXGjPPfecuy3qzUDzvfvuu91yvfXWW65cAkiNEIsCZebMma7OVBfVXCmEvvvuu9auXTu7+OKL3YpVK1t1v6WwqC62AtpNt23bNvdDoR8NHdWretrgqFvVhuno2hUrVrjWELWODB061O3yi5bcmAcA/2iPkNZN6jv25JNPdicw0PrtSGVG/fv3d+M3a9bMtcBq41wlTepVYPny5W6dox4IHnnkkUwvy3nnnecaBc466yzr3bu3W7cqEAcefvhht25VyFYjgHpHUFBWl1uiZZ48ebLrflD1terF4LHHHjuKVwf5VZyO7or1QgAAAABZQUssAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFgFy0ZcsWu/HGG92pNYsXL26VKlWyjh07utN9AgAyr0gWxgUAHKUePXrYvn37bNy4cVarVi3bvHmzffbZZ7Zt27YcmZ/mVaxYsRyZNgDEEi2xAJBL/vrrL5s7d6498cQTds4551j16tWtefPmlpiYaBdffHF4nAEDBljFihWtRIkSduqpp9r06dPD05g8ebKdcsoprhW3Ro0aNmrUqBTz0LCHH37YrrrqKktISLDrr7/eDf/qq6+sbdu2Fh8fb9WqVbNbb73Vdu3aFX7cCy+8YHXr1nXz1Lx79uyZa68LAGQHIRYAcknp0qXdZdq0abZ3797D7j906JB16tTJlRaMHz/efv75Z3v88cetcOHC7v4FCxbYpZdeav/6179syZIl9sADD9j9999vY8eOTTGdkSNHWqNGjWzRokXu/tWrV9sFF1zgWoF//PFHmzBhggu1N998sxv/+++/d6H2oYcesl9++cVmzpxpZ511Vi69KgCQPXGhUCiUzccCALJILan9+/e3PXv2WJMmTezss892obRhw4b2ySefuBC7bNkyO+mkkw57bJ8+fVxNrcYLDBo0yGbMmGE//fRTuCX29NNPt6lTp4bHue6661wQfvnll8PDFGI1b7XGfvjhh3bNNdfYb7/9ZmXKlMnx1wAAooGWWADIRWoN3bBhg73//vuudXTOnDkuzKo19YcffrCqVaumGWBF4bZNmzYphun2ypUr7eDBg+FhzZo1SzHO4sWL3fSDlmBddDCZWn6TkpKsQ4cOrrRBNbpXXnmlvfnmm7Z79+4cegUAIDoIsQCQy1R3quCoXf3z5s2zvn372rBhw1y9ajSUKlUqxe2///7b1dkqJAcXBVuF39q1a7vW14ULF9rbb79tlStXtqFDh7pyBNXnAkBeRYgFgBhr0KCB262vkgLt0l+xYkWa49WvX/+wrrh0Wy23Qd1sWtTSq/raOnXqHHYJei4oUqSItW/f3p588klXN7t27Vr7/PPPo/xMASB66GILAHKJutHq1auXXXvttS6wqgVUB1UpOHbt2tXVqOqAKpUcPPXUUy5kLl++3OLi4lzpwcCBA+2MM85wvQ/07t3b5s+fb88//7zrWSAj99xzj7Vs2dIdyKX6WLXUKtTOmjXLPV69H6xZs8bN+9hjj3U1sio1OPnkk3PttQGArCLEAkAuUS1qixYtbPTo0a7HgP3797vurnSg15AhQ8IHft1111122WWXudZZBVn1UBC0qE6cONHt7leQ1a5/9SigcoSMKDB/8cUXdu+997putnQ8r8oIFITlmGOOsSlTprjeDv755x/X1ZZKC9SVFwDkVfROAAAAAO9QEwsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAADMN/8LSaqnHTCV9gsAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plot_score_dists(baseline_performance, perturbed_performance, type=\"box\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['input_ids', 'token_type_ids', 'attention_mask'])\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "The size of tensor a (330) must match the size of tensor b (2) at non-singleton dimension 1", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[17], line 8\u001b[0m\n\u001b[1;32m 6\u001b[0m perturbed_sequences \u001b[38;5;241m=\u001b[39m batch[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mperturbed_sequences\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28mprint\u001b[39m(sequences\u001b[38;5;241m.\u001b[39mkeys())\n\u001b[0;32m----> 8\u001b[0m patch_head_out \u001b[38;5;241m=\u001b[39m \u001b[43mcat_model\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpatch\u001b[49m\u001b[43m(\u001b[49m\u001b[43msequences\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mperturbed_sequences\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpatch_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mhead_all\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 9\u001b[0m patching_head_outputs\u001b[38;5;241m.\u001b[39mappend(patch_head_out)\n\u001b[1;32m 11\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m i \u001b[38;5;241m==\u001b[39m ITERS:\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/cat.py:184\u001b[0m, in \u001b[0;36mCat.patch\u001b[0;34m(self, sequences, sequences_p, patch_type, layer_head_list, patching_metric)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mpatch\u001b[39m(\n\u001b[1;32m 174\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 175\u001b[0m sequences: \u001b[38;5;28mdict\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 179\u001b[0m patching_metric: Callable \u001b[38;5;241m=\u001b[39m linear_rank_function,\n\u001b[1;32m 180\u001b[0m ):\n\u001b[1;32m 181\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m (\n\u001b[1;32m 182\u001b[0m patch_type \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_patch_funcs\n\u001b[1;32m 183\u001b[0m ), \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPatch type \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mpatch_type\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m not recognized. Choose from \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_patch_funcs\u001b[38;5;241m.\u001b[39mkeys()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m--> 184\u001b[0m scores, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mscore\u001b[49m\u001b[43m(\u001b[49m\u001b[43msequences\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 185\u001b[0m scores_p, cache \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscore(sequences_p, cache\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 187\u001b[0m patching_kwargs \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 188\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcorrupted_tokens\u001b[39m\u001b[38;5;124m\"\u001b[39m: sequences,\n\u001b[1;32m 189\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mclean_cache\u001b[39m\u001b[38;5;124m\"\u001b[39m: cache,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 193\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mscores_p\u001b[39m\u001b[38;5;124m\"\u001b[39m: scores_p,\n\u001b[1;32m 194\u001b[0m }\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/cat.py:170\u001b[0m, in \u001b[0;36mCat.score\u001b[0;34m(self, sequences, cache)\u001b[0m\n\u001b[1;32m 165\u001b[0m logits, cache \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrun_with_cache(\n\u001b[1;32m 166\u001b[0m sequences[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124minput_ids\u001b[39m\u001b[38;5;124m\"\u001b[39m], sequences[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mattention_mask\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[1;32m 167\u001b[0m )\n\u001b[1;32m 168\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m logits, cache\n\u001b[0;32m--> 170\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\u001b[43msequences\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43minput_ids\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msequences\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mattention_mask\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 171\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m logits, \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/cat.py:62\u001b[0m, in \u001b[0;36mCat.forward\u001b[0;34m(self, input_ids, attention_mask, token_type_ids)\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\n\u001b[1;32m 57\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 58\u001b[0m input_ids: Float[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch seq\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 59\u001b[0m attention_mask: Float[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch seq\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 60\u001b[0m token_type_ids: Float[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch seq\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 61\u001b[0m ):\n\u001b[0;32m---> 62\u001b[0m model_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_model\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 63\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minput_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 64\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 65\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 66\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlogits\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 67\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 68\u001b[0m model_output \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 69\u001b[0m F\u001b[38;5;241m.\u001b[39mlog_softmax(model_output, dim\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m)[:, \u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 70\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msoftmax_output\n\u001b[1;32m 71\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m model_output[:, \u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 72\u001b[0m )\n\u001b[1;32m 73\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m model_output\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py:53\u001b[0m, in \u001b[0;36mHookedEncoderForSequenceClassification.forward\u001b[0;34m(self, input, return_type, token_type_ids, attention_mask, start_at_layer, stop_at_layer)\u001b[0m\n\u001b[1;32m 35\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\n\u001b[1;32m 36\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 37\u001b[0m \u001b[38;5;28minput\u001b[39m: Int[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch pos\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 42\u001b[0m stop_at_layer: Optional[\u001b[38;5;28mint\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 43\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Optional[Float[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch pos d_vocab\u001b[39m\u001b[38;5;124m\"\u001b[39m]]:\n\u001b[1;32m 44\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Input must be a batch of tokens. Strings and lists of strings are not yet supported.\u001b[39;00m\n\u001b[1;32m 45\u001b[0m \n\u001b[1;32m 46\u001b[0m \u001b[38;5;124;03m return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits).\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;124;03m attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to.\u001b[39;00m\n\u001b[1;32m 51\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m---> 53\u001b[0m hidden \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 54\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 55\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 56\u001b[0m \u001b[43m \u001b[49m\u001b[43mstart_at_layer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstart_at_layer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 57\u001b[0m \u001b[43m \u001b[49m\u001b[43mstop_at_layer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop_at_layer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 58\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43membeddings\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 59\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 60\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 61\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m return_type \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124membeddings\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m stop_at_layer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 62\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m hidden\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/hooked/HookedEncoder.py:240\u001b[0m, in \u001b[0;36mHookedEncoder.forward\u001b[0;34m(self, input, return_type, token_type_ids, attention_mask, start_at_layer, stop_at_layer)\u001b[0m\n\u001b[1;32m 237\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m start_at_layer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 238\u001b[0m start_at_layer \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m--> 240\u001b[0m resid \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mencoder_output\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 241\u001b[0m \u001b[43m \u001b[49m\u001b[43mresidual\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 242\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 243\u001b[0m \u001b[43m \u001b[49m\u001b[43mstart_at_layer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstart_at_layer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 244\u001b[0m \u001b[43m \u001b[49m\u001b[43mstop_at_layer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop_at_layer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mone_zero_attention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 246\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 248\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stop_at_layer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mor\u001b[39;00m return_type \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124membeddings\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 249\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m resid\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/hooked/HookedEncoder.py:172\u001b[0m, in \u001b[0;36mHookedEncoder.encoder_output\u001b[0;34m(self, tokens, token_type_ids, start_at_layer, stop_at_layer, one_zero_attention_mask)\u001b[0m\n\u001b[1;32m 169\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m one_zero_attention_mask \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 170\u001b[0m one_zero_attention_mask \u001b[38;5;241m=\u001b[39m one_zero_attention_mask\u001b[38;5;241m.\u001b[39mto(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcfg\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[0;32m--> 172\u001b[0m resid \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhook_full_embed(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43membed\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtokens\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 174\u001b[0m large_negative_number \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m-\u001b[39mtorch\u001b[38;5;241m.\u001b[39minf\n\u001b[1;32m 175\u001b[0m mask \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 176\u001b[0m repeat(\u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m one_zero_attention_mask, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch pos -> batch 1 1 pos\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 177\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m one_zero_attention_mask \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 178\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 179\u001b[0m )\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/hooked/hooked_components.py:46\u001b[0m, in \u001b[0;36mBertEmbed.forward\u001b[0;34m(self, input_ids, token_type_ids)\u001b[0m\n\u001b[1;32m 42\u001b[0m position_embeddings_out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhook_pos_embed(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpos_embed(index_ids))\n\u001b[1;32m 43\u001b[0m token_type_embeddings_out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhook_token_type_embed(\n\u001b[1;32m 44\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtoken_type_embed(token_type_ids)\n\u001b[1;32m 45\u001b[0m ) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcfg\u001b[38;5;241m.\u001b[39muse_token_type_ids \u001b[38;5;28;01melse\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mzeros_like(word_embeddings_out)\n\u001b[0;32m---> 46\u001b[0m embeddings_out \u001b[38;5;241m=\u001b[39m \u001b[43mword_embeddings_out\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mposition_embeddings_out\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mtoken_type_embeddings_out\u001b[49m\n\u001b[1;32m 47\u001b[0m layer_norm_out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mln(embeddings_out)\n\u001b[1;32m 48\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m layer_norm_out\n", + "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (330) must match the size of tensor b (2) at non-singleton dimension 1" + ] + } + ], + "source": [ + "ITERS=10\n", + "patching_head_outputs = []\n", + "for i, batch in enumerate(cat_dataloader):\n", + " # Get the queries, documents, and perturbed documents from the batch\n", + " sequences = batch[\"sequences\"]\n", + " perturbed_sequences = batch[\"perturbed_sequences\"]\n", + " patch_head_out = cat_model.patch(sequences, perturbed_sequences, patch_type=\"head_all\")\n", + " patching_head_outputs.append(patch_head_out)\n", + " \n", + " if i == ITERS:\n", + " break\n", + "mean_head_outputs = torch.mean(torch.stack(patching_head_outputs), axis=0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plot_components(mean_head_outputs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "mechir", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/notebooks/plot/patch.cross.svg b/notebooks/plot/patch.cross.svg new file mode 100644 index 0000000..6eed501 --- /dev/null +++ b/notebooks/plot/patch.cross.svg @@ -0,0 +1,3293 @@ + + + + + + + + 2025-04-02T10:48:51.174872 + image/svg+xml + + + Matplotlib v3.10.0, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/notebooks/plot/patch.pdf b/notebooks/plot/patch.pdf index e641065..01947a4 100644 Binary files a/notebooks/plot/patch.pdf and b/notebooks/plot/patch.pdf differ diff --git a/pyproject.toml b/pyproject.toml index 2a33bf5..b1abc6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "mechir" -version = "0.0.3" +version = "0.0.4" dependencies = [ "torch", "transformers", @@ -12,6 +12,7 @@ dependencies = [ "sae_lens", "ir_datasets", "streamlit", + "seaborn" ] requires-python = ">=3.10" authors = [ diff --git a/requirements.dev.txt b/requirements.dev.txt new file mode 100644 index 0000000..fbe9f98 --- /dev/null +++ b/requirements.dev.txt @@ -0,0 +1,3 @@ +pytest +ruff +black \ No newline at end of file diff --git a/setup.py b/setup.py index 3e56a70..c0f3468 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ setup( name=package_name, # The name of your package - version='0.0.2', # Your package version + version='0.0.4', # Your package version packages=find_packages(where='src'), # Look for packages in the 'src' directory package_dir={'': 'src'}, # Map the package name to the 'src' directory author='Anon A. Mous', @@ -21,4 +21,4 @@ 'mechir': ['perturb/data/stopwords.txt'], }, python_requires='>=3.10', -) \ No newline at end of file +) diff --git a/src/mechir/__init__.py b/src/mechir/__init__.py index 78eabc1..6473353 100644 --- a/src/mechir/__init__.py +++ b/src/mechir/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.0.3" +__version__ = "0.0.4" class MechirConfig: @@ -6,7 +6,7 @@ class MechirConfig: _instance = None _config = { - "ignore-official": False, # default value + "ignore-official": True, # default value # Add other default config options here } diff --git a/src/mechir/data/loader/__init__.py b/src/mechir/data/loader/__init__.py index 556e2f1..cfea2f8 100644 --- a/src/mechir/data/loader/__init__.py +++ b/src/mechir/data/loader/__init__.py @@ -1,230 +1,7 @@ import torch - -def pad(a: list, b: list, tok: str): - assert type(a) == type(b) == list, "Both a and b must be lists" - - padded = [] - i, j = 0, 0 - while i < len(a) and j < len(b): - if a[i] == b[j]: - padded.append(a[i]) - i += 1 - j += 1 - else: - padded.append(tok) - j += 1 - - while j < len(b): - padded.append(tok) - j += 1 - - return padded - - -class BaseCollator(object): - tokenizer = None - transformation_func: callable = None - special_mask: bool = False - q_max_length: int = 30 - d_max_length: int = 300 - special_token: int = "a" - perturb_type: str = "append" - pre_perturbed: bool = False - - def __init__( - self, - tokenizer, - transformation_func=None, - special_mask=False, - q_max_length=30, - d_max_length=200, - special_token="a", - perturb_type="append", - pre_perturbed=False, - ) -> None: - assert ( - transformation_func is not None or pre_perturbed - ), "Either a transformation function or pre-perturbed data must be provided." - self.tokenizer = tokenizer - # self.tokenizer.add_special_tokens({"additional_special_tokens": [special_token]}) - # self.special_token_id = self.tokenizer.convert_tokens_to_ids(special_token) - - self.transformation_func = transformation_func - self.special_mask = special_mask - self.perturb_type = perturb_type - self.q_max_length = q_max_length - self.d_max_length = d_max_length - self.special_token = special_token - self.special_token_id = self.tokenizer.convert_tokens_to_ids(self.special_token) - self.perturb_type = perturb_type - self.pre_perturbed = pre_perturbed - - def get_data(self, batch): - if self.pre_perturbed: - queries, docs, perturbed = zip(*batch) - else: - queries, docs = zip(*batch) - perturbed = [ - self.transformation_func(doc, query=query) for query, doc in batch - ] - - batch_padded_docs, batch_padded_perturbed_docs = [], [] - - for doc_a, doc_b in zip(docs, perturbed): - padded_a, padded_b = self.pad_by_perturb_type(doc_a, doc_b) - batch_padded_docs.append(padded_a) - batch_padded_perturbed_docs.append(padded_b) - - return queries, batch_padded_docs, batch_padded_perturbed_docs - - def pad(self, a: str, b: str): - # turn both sequences into list of tokenized elements - a = self.tokenizer.tokenize(a) - b = self.tokenizer.tokenize(b) - - return self.tokenizer.decode( - self.tokenizer.tokens_to_ids(pad(a, b, self.special_token)) - ) - - def pad_by_perturb_type(self, doc_a: str, doc_b: str): - accepted_perturb_types = ["append", "prepend", "replace", "inject"] - assert ( - self.perturb_type in accepted_perturb_types - ), f"Perturbation type must be one of the following: {accepted_perturb_types}" - - doc_a = self.tokenizer.tokenize(doc_a) - doc_b = self.tokenizer.tokenize(doc_b) - - if self.perturb_type == "append": - assert len(doc_a) < len( - doc_b - ), "Perturbed document should be longer than original for append perturbation." - doc_a = doc_a + [self.special_token] * (len(doc_b) - len(doc_a)) - elif self.perturb_type == "prepend": - assert len(doc_a) < len( - doc_b - ), "Perturbed document should be longer than original for prepend perturbation." - doc_a = [self.special_token] * (len(doc_b) - len(doc_a)) + doc_a - elif self.perturb_type == "replace": - if len(doc_a) == len(doc_b): - pass # no padding needed - else: - padded_a, padded_b = [], [] - idx_a, idx_b = 0, 0 - while idx_a < len(doc_a) and idx_b < len(doc_b): - if doc_a[idx_a] == doc_b[idx_b]: - padded_a.append(doc_a[idx_a]) - padded_b.append(doc_b[idx_b]) - idx_a += 1 - idx_b += 1 - else: - padded_a.append(doc_a[idx_a]) - padded_b.append(doc_b[idx_b]) - idx_a += 1 - idx_b += 1 - - if len(doc_a) < len(doc_b): - # Replaced term is shorter in length than the term it was replaced with - while idx_b < len(doc_b) and ( - idx_a >= len(doc_a) or doc_b[idx_b] != doc_a[idx_a] - ): - padded_a.append(self.special_token) - padded_b.append(doc_b[idx_b]) - idx_b += 1 - if len(doc_a) > len(doc_b): - # Replaced term is longer than the term it was replaced with - while idx_a < len(doc_a) and ( - idx_b >= len(doc_b) or doc_b[idx_b] != doc_a[idx_a] - ): - padded_a.append(doc_a[idx_a]) - padded_b.append(self.special_token) - idx_a += 1 - - doc_a, doc_b = padded_a, padded_b - - elif self.perturb_type == "inject": - pass - - assert len(doc_a) == len( - doc_b - ), "Failed to pad input pairs, mismatch in document lengths post-padding." - return self.tokenizer.convert_tokens_to_string( - doc_a - ), self.tokenizer.convert_tokens_to_string(doc_b) - - -def pad_tokenized( - a_batch: torch.Tensor, - b_batch: torch.Tensor, - pad_tok: int, -): - - a_batch_input_ids, b_batch_input_ids = a_batch["input_ids"], b_batch["input_ids"] - a_batch_attn_mask, b_batch_attn_mask = ( - a_batch["attention_mask"], - b_batch["attention_mask"], - ) - - a_batch_final, b_batch_final = [], [] - a_batch_attn_final, b_batch_attn_final = [], [] - - for a_tokens, b_tokens, a_mask, b_mask in zip( - a_batch_input_ids, b_batch_input_ids, a_batch_attn_mask, b_batch_attn_mask - ): - a_padded_tokens, b_padded_tokens = [], [] - a_padded_attn_mask, b_padded_attn_mask = [], [] - - if len(a_tokens) == len(b_tokens): - # No padding needed - a_padded_tokens.append(a_tokens) - b_padded_tokens.append(b_tokens) - a_padded_attn_mask.append(a_mask) - b_padded_attn_mask.append(b_mask) - else: - # Determine where to pad - idx_a, idx_b = 0, 0 - while idx_a < len(a_tokens) and idx_b < len(b_tokens): - if a_tokens[idx_a] == b_tokens[idx_b]: - a_padded_tokens.append(a_tokens[idx_a]) - b_padded_tokens.append(b_tokens[idx_b]) - a_padded_attn_mask.append(a_mask[idx_a]) - b_padded_attn_mask.append(b_mask[idx_b]) - idx_a += 1 - idx_b += 1 - elif len(a_tokens) < len(b_tokens): - # Accounts for the following perturbations: append, prepend, insert - # Also for replacement where the replaced term is equal to or shorter in length than the term is was replaced with - a_padded_tokens.append(torch.tensor([pad_tok], dtype=torch.int32)) - b_padded_tokens.append(b_tokens[idx_b]) - a_padded_attn_mask.append(a_mask[idx_a]) - b_padded_attn_mask.append(b_mask[idx_b]) - idx_b += 1 - elif len(a_tokens) > len(b_tokens): - # Account for replacement perturbation where the replaced term is longer than the term is was replaced with - a_padded_tokens.append(a_tokens[idx_a]) - b_padded_tokens.append(torch.tensor([pad_tok], dtype=torch.int32)) - a_padded_attn_mask.append(a_mask[idx_a]) - b_padded_attn_mask.append(b_mask[idx_b]) - idx_a += 1 - - a_batch_final.append(torch.tensor(a_padded_tokens)) - b_batch_final.append(torch.tensor(b_padded_tokens)) - a_batch_attn_final.append(torch.tensor(a_padded_attn_mask)) - b_batch_attn_final.append(torch.tensor(b_padded_attn_mask)) - - finalized_tokenized_a_batch = { - "input_ids": torch.stack(a_batch_final), - "attention_mask": torch.stack(a_batch_attn_final), - } - finalized_tokenized_b_batch = { - "input_ids": torch.stack(b_batch_final), - "attention_mask": torch.stack(b_batch_attn_final), - } - - return finalized_tokenized_a_batch, finalized_tokenized_b_batch - - +from .base import __all__ as base_all +from .base import * from .cat import __all__ as cat_all from .cat import * from .dot import __all__ as dot_all @@ -232,4 +9,4 @@ def pad_tokenized( from .t5 import __all__ as t5_all from .t5 import * -__all__ = cat_all + dot_all + t5_all + ["pad", "pad_tokenized", "BaseCollator"] +__all__ = base_all + cat_all + dot_all + t5_all diff --git a/src/mechir/data/loader/base.py b/src/mechir/data/loader/base.py new file mode 100644 index 0000000..687ac97 --- /dev/null +++ b/src/mechir/data/loader/base.py @@ -0,0 +1,232 @@ +import torch + + +def pad(a: list, b: list, tok: str): + assert type(a) == type(b) == list, "Both a and b must be lists" + + padded = [] + i, j = 0, 0 + while i < len(a) and j < len(b): + if a[i] == b[j]: + padded.append(a[i]) + i += 1 + j += 1 + else: + padded.append(tok) + j += 1 + + while j < len(b): + padded.append(tok) + j += 1 + + return padded + + +class BaseCollator(object): + tokenizer = None + transformation_func: callable = None + special_mask: bool = False + q_max_length: int = 30 + d_max_length: int = 300 + special_token: int = "a" + perturb_type: str = None + pre_perturbed: bool = False + + def __init__( + self, + tokenizer, + transformation_func=None, + special_mask=False, + q_max_length=30, + d_max_length=200, + special_token="a", + perturb_type=None, + pre_perturbed=False, + ) -> None: + assert ( + transformation_func is not None or pre_perturbed + ), "Either a transformation function or pre-perturbed data must be provided." + self.tokenizer = tokenizer + # self.tokenizer.add_special_tokens({"additional_special_tokens": [special_token]}) + # self.special_token_id = self.tokenizer.convert_tokens_to_ids(special_token) + + self.transformation_func = transformation_func + self.special_mask = special_mask + self.perturb_type = perturb_type + self.q_max_length = q_max_length + self.d_max_length = d_max_length + self.special_token = special_token + self.special_token_id = self.tokenizer.convert_tokens_to_ids(self.special_token) + self.perturb_type = ( + perturb_type + if perturb_type is not None + else transformation_func.perturb_type + ) + self.pre_perturbed = pre_perturbed + + def get_data(self, batch): + if self.pre_perturbed: + queries, docs, perturbed = zip(*batch) + else: + queries, docs = zip(*batch) + perturbed = [ + self.transformation_func(doc, query=query) for query, doc in batch + ] + + batch_padded_docs, batch_padded_perturbed_docs = [], [] + + for doc_a, doc_b in zip(docs, perturbed): + padded_a, padded_b = self.pad_by_perturb_type(doc_a, doc_b) + batch_padded_docs.append(padded_a) + batch_padded_perturbed_docs.append(padded_b) + + return queries, batch_padded_docs, batch_padded_perturbed_docs + + def pad(self, a: str, b: str): + # turn both sequences into list of tokenized elements + a = self.tokenizer.tokenize(a) + b = self.tokenizer.tokenize(b) + + return self.tokenizer.decode( + self.tokenizer.tokens_to_ids(pad(a, b, self.special_token)) + ) + + def pad_by_perturb_type(self, doc_a: str, doc_b: str): + accepted_perturb_types = ["append", "prepend", "replace", "inject"] + assert ( + self.perturb_type in accepted_perturb_types + ), f"Perturbation type must be one of the following: {accepted_perturb_types}" + + doc_a = self.tokenizer.tokenize(doc_a) + doc_b = self.tokenizer.tokenize(doc_b) + + if self.perturb_type == "append": + assert len(doc_a) < len( + doc_b + ), "Perturbed document should be longer than original for append perturbation." + doc_a = doc_a + [self.special_token] * (len(doc_b) - len(doc_a)) + elif self.perturb_type == "prepend": + assert len(doc_a) < len( + doc_b + ), "Perturbed document should be longer than original for prepend perturbation." + doc_a = [self.special_token] * (len(doc_b) - len(doc_a)) + doc_a + elif self.perturb_type == "replace": + if len(doc_a) == len(doc_b): + pass # no padding needed + else: + padded_a, padded_b = [], [] + idx_a, idx_b = 0, 0 + while idx_a < len(doc_a) and idx_b < len(doc_b): + if doc_a[idx_a] == doc_b[idx_b]: + padded_a.append(doc_a[idx_a]) + padded_b.append(doc_b[idx_b]) + idx_a += 1 + idx_b += 1 + else: + padded_a.append(doc_a[idx_a]) + padded_b.append(doc_b[idx_b]) + idx_a += 1 + idx_b += 1 + + if len(doc_a) < len(doc_b): + # Replaced term is shorter in length than the term it was replaced with + while idx_b < len(doc_b) and ( + idx_a >= len(doc_a) or doc_b[idx_b] != doc_a[idx_a] + ): + padded_a.append(self.special_token) + padded_b.append(doc_b[idx_b]) + idx_b += 1 + if len(doc_a) > len(doc_b): + # Replaced term is longer than the term it was replaced with + while idx_a < len(doc_a) and ( + idx_b >= len(doc_b) or doc_b[idx_b] != doc_a[idx_a] + ): + padded_a.append(doc_a[idx_a]) + padded_b.append(self.special_token) + idx_a += 1 + + doc_a, doc_b = padded_a, padded_b + + elif self.perturb_type == "inject": + pass + + assert len(doc_a) == len( + doc_b + ), "Failed to pad input pairs, mismatch in document lengths post-padding." + return self.tokenizer.convert_tokens_to_string( + doc_a + ), self.tokenizer.convert_tokens_to_string(doc_b) + + +def pad_tokenized( + a_batch: torch.Tensor, + b_batch: torch.Tensor, + pad_tok: int, +): + + a_batch_input_ids, b_batch_input_ids = a_batch["input_ids"], b_batch["input_ids"] + a_batch_attn_mask, b_batch_attn_mask = ( + a_batch["attention_mask"], + b_batch["attention_mask"], + ) + + a_batch_final, b_batch_final = [], [] + a_batch_attn_final, b_batch_attn_final = [], [] + + for a_tokens, b_tokens, a_mask, b_mask in zip( + a_batch_input_ids, b_batch_input_ids, a_batch_attn_mask, b_batch_attn_mask + ): + a_padded_tokens, b_padded_tokens = [], [] + a_padded_attn_mask, b_padded_attn_mask = [], [] + + if len(a_tokens) == len(b_tokens): + # No padding needed + a_padded_tokens.append(a_tokens) + b_padded_tokens.append(b_tokens) + a_padded_attn_mask.append(a_mask) + b_padded_attn_mask.append(b_mask) + else: + # Determine where to pad + idx_a, idx_b = 0, 0 + while idx_a < len(a_tokens) and idx_b < len(b_tokens): + if a_tokens[idx_a] == b_tokens[idx_b]: + a_padded_tokens.append(a_tokens[idx_a]) + b_padded_tokens.append(b_tokens[idx_b]) + a_padded_attn_mask.append(a_mask[idx_a]) + b_padded_attn_mask.append(b_mask[idx_b]) + idx_a += 1 + idx_b += 1 + elif len(a_tokens) < len(b_tokens): + # Accounts for the following perturbations: append, prepend, insert + # Also for replacement where the replaced term is equal to or shorter in length than the term is was replaced with + a_padded_tokens.append(torch.tensor([pad_tok], dtype=torch.int32)) + b_padded_tokens.append(b_tokens[idx_b]) + a_padded_attn_mask.append(a_mask[idx_a]) + b_padded_attn_mask.append(b_mask[idx_b]) + idx_b += 1 + elif len(a_tokens) > len(b_tokens): + # Account for replacement perturbation where the replaced term is longer than the term is was replaced with + a_padded_tokens.append(a_tokens[idx_a]) + b_padded_tokens.append(torch.tensor([pad_tok], dtype=torch.int32)) + a_padded_attn_mask.append(a_mask[idx_a]) + b_padded_attn_mask.append(b_mask[idx_b]) + idx_a += 1 + + a_batch_final.append(torch.tensor(a_padded_tokens)) + b_batch_final.append(torch.tensor(b_padded_tokens)) + a_batch_attn_final.append(torch.tensor(a_padded_attn_mask)) + b_batch_attn_final.append(torch.tensor(b_padded_attn_mask)) + + finalized_tokenized_a_batch = { + "input_ids": torch.stack(a_batch_final), + "attention_mask": torch.stack(a_batch_attn_final), + } + finalized_tokenized_b_batch = { + "input_ids": torch.stack(b_batch_final), + "attention_mask": torch.stack(b_batch_attn_final), + } + + return finalized_tokenized_a_batch, finalized_tokenized_b_batch + + +__all__ = ["BaseCollator", "pad_tokenized", "pad"] diff --git a/src/mechir/data/loader/cat.py b/src/mechir/data/loader/cat.py index 3be1c52..d957acf 100644 --- a/src/mechir/data/loader/cat.py +++ b/src/mechir/data/loader/cat.py @@ -1,4 +1,4 @@ -from . import BaseCollator +from .base import BaseCollator class CatDataCollator(BaseCollator): diff --git a/src/mechir/data/loader/dot.py b/src/mechir/data/loader/dot.py index 4fdb9f9..5f4f703 100644 --- a/src/mechir/data/loader/dot.py +++ b/src/mechir/data/loader/dot.py @@ -1,4 +1,4 @@ -from . import BaseCollator +from .base import BaseCollator class DotDataCollator(BaseCollator): diff --git a/src/mechir/data/loader/t5.py b/src/mechir/data/loader/t5.py index 29a7b09..2236418 100644 --- a/src/mechir/data/loader/t5.py +++ b/src/mechir/data/loader/t5.py @@ -1,4 +1,4 @@ -from . import BaseCollator +from .base import BaseCollator class MonoT5DataCollator(BaseCollator): diff --git a/src/mechir/modelling/__init__.py b/src/mechir/modelling/__init__.py index 1bc85eb..0cdaa7f 100644 --- a/src/mechir/modelling/__init__.py +++ b/src/mechir/modelling/__init__.py @@ -1,5 +1,18 @@ +from . import architectures as architectures +from . import hooked as hooked + from .patched import PatchedMixin as PatchedMixin from .sae import SAEMixin as SAEMixin from .cat import Cat as Cat from .dot import Dot as Dot from .t5 import MonoT5 as MonoT5 + +__all__ = [ + "architectures", + "hooked", + "PatchedMixin", + "SAEMixin", + "Cat", + "Dot", + "MonoT5", +] diff --git a/src/mechir/modelling/architectures/__init__.py b/src/mechir/modelling/architectures/__init__.py new file mode 100644 index 0000000..3c372fe --- /dev/null +++ b/src/mechir/modelling/architectures/__init__.py @@ -0,0 +1,12 @@ +from .base import HookedEncoder, HookedEncoderForSequenceClassification +from .distilbert import HookedDistilBert, HookedDistilBertForSequenceClassification +from .electra import HookedElectra, HookedElectraForSequenceClassification + +__all__ = [ + "HookedEncoder", + "HookedEncoderForSequenceClassification", + "HookedDistilBert", + "HookedDistilBertForSequenceClassification", + "HookedElectra", + "HookedElectraForSequenceClassification", +] diff --git a/src/mechir/modelling/hooked/HookedDistilBert.py b/src/mechir/modelling/architectures/base.py similarity index 61% rename from src/mechir/modelling/hooked/HookedDistilBert.py rename to src/mechir/modelling/architectures/base.py index ef928cf..63d66cb 100644 --- a/src/mechir/modelling/hooked/HookedDistilBert.py +++ b/src/mechir/modelling/architectures/base.py @@ -7,6 +7,7 @@ from __future__ import annotations import logging +import os from typing import Dict, List, Optional, Tuple, Union, cast, overload import torch @@ -16,15 +17,35 @@ from transformers import AutoTokenizer from typing_extensions import Literal -from transformer_lens import ActivationCache, FactoredMatrix, HookedTransformerConfig -from transformer_lens.components import BertBlock, BertMLMHead, Unembed +from mechir.modelling.hooked import loading_from_pretrained as loading +from transformer_lens.ActivationCache import ActivationCache +from transformer_lens.components import ( + BertBlock, + BertMLMHead, + Unembed, + BertNSPHead, + BertPooler, +) +from transformer_lens.FactoredMatrix import FactoredMatrix from transformer_lens.hook_points import HookedRootModule, HookPoint from transformer_lens.utilities import devices -from . import loading_from_pretrained as loading -from .hooked_components import DistilBertEmbed +from mechir.modelling.hooked.config import HookedTransformerConfig +from mechir.modelling.hooked.components import BertEmbed +from mechir.modelling.hooked.linear import ClassificationHead, MLPClassificationHead + + +class HookedEncoder(HookedRootModule): + """ + This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. + + Limitations: + - The model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. + + Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: + - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model + """ -class HookedDistilBert(HookedRootModule): def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): super().__init__() if isinstance(cfg, Dict): @@ -41,7 +62,11 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): if tokenizer is not None: self.tokenizer = tokenizer elif self.cfg.tokenizer_name is not None: - self.tokenizer = AutoTokenizer.from_pretrained(self.cfg.tokenizer_name) + huggingface_token = os.environ.get("HF_TOKEN", "") + self.tokenizer = AutoTokenizer.from_pretrained( + self.cfg.tokenizer_name, + token=huggingface_token if len(huggingface_token) > 0 else None, + ) else: self.tokenizer = None @@ -54,37 +79,124 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): if self.cfg.d_vocab_out == -1: self.cfg.d_vocab_out = self.cfg.d_vocab - self.embed = DistilBertEmbed(self.cfg) + self.embed = BertEmbed(self.cfg) self.blocks = nn.ModuleList( [BertBlock(self.cfg) for _ in range(self.cfg.n_layers)] ) - self.mlm_head = BertMLMHead(cfg) + self.mlm_head = BertMLMHead(self.cfg) self.unembed = Unembed(self.cfg) + self.nsp_head = BertNSPHead(self.cfg) + self.pooler = BertPooler(self.cfg) self.hook_full_embed = HookPoint() + self.use_token_type_ids = self.cfg.use_token_type_ids + if move_to_device: self.to(self.cfg.device) self.setup() - @overload - def forward( + def to_tokens( self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal["embeddings"], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch pos d_vocab"]: ... + input: Union[str, List[str]], + move_to_device: bool = True, + truncate: bool = True, + ) -> Tuple[ + Int[torch.Tensor, "batch pos"], + Int[torch.Tensor, "batch pos"], + Int[torch.Tensor, "batch pos"], + ]: + """Converts a string to a tensor of tokens. + Taken mostly from the HookedTransformer implementation, but does not support default padding + sides or prepend_bos. + Args: + input (Union[str, List[str]]): The input to tokenize. + move_to_device (bool): Whether to move the output tensor of tokens to the device the model lives on. Defaults to True + truncate (bool): If the output tokens are too long, whether to truncate the output + tokens to the model's max context window. Does nothing for shorter inputs. Defaults to + True. + """ - @overload - def forward( + assert self.tokenizer is not None, "Cannot use to_tokens without a tokenizer" + + encodings = self.tokenizer( + input, + return_tensors="pt", + padding=True, + truncation=truncate, + max_length=self.cfg.n_ctx if truncate else None, + ) + + tokens = encodings.input_ids + + if move_to_device: + tokens = tokens.to(self.cfg.device) + token_type_ids = ( + encodings.token_type_ids.to(self.cfg.device) + if self.use_token_type_ids + else None + ) + attention_mask = encodings.attention_mask.to(self.cfg.device) + + return tokens, token_type_ids, attention_mask + + def encoder_output( self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], + tokens: Int[torch.Tensor, "batch pos"], token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: ... + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, + one_zero_attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + ) -> Float[torch.Tensor, "batch pos d_vocab"]: + """Processes input through the encoder layers and returns the resulting residual stream. + + Args: + input: Input tokens as integers with shape (batch, position) + token_type_ids: Optional binary ids indicating segment membership. + Shape (batch_size, sequence_length). For example, with input + "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be + [0, 0, ..., 0, 1, ..., 1, 1] where 0 marks tokens from sentence A + and 1 marks tokens from sentence B. + one_zero_attention_mask: Optional binary mask of shape (batch_size, sequence_length) + where 1 indicates tokens to attend to and 0 indicates tokens to ignore. + Used primarily for handling padding in batched inputs. + + Returns: + resid: Final residual stream tensor of shape (batch, position, d_model) + + Raises: + AssertionError: If using string input without a tokenizer + """ + + if tokens.device.type != self.cfg.device: + tokens = tokens.to(self.cfg.device) + if one_zero_attention_mask is not None: + one_zero_attention_mask = one_zero_attention_mask.to(self.cfg.device) + + resid = self.hook_full_embed(self.embed(tokens, token_type_ids)) + + large_negative_number = -torch.inf + mask = ( + repeat(1 - one_zero_attention_mask, "batch pos -> batch 1 1 pos") + if one_zero_attention_mask is not None + else None + ) + additive_attention_mask = ( + torch.where(mask == 1, large_negative_number, 0) + if mask is not None + else None + ) + + if start_at_layer is None: + start_at_layer = 0 + + idx_and_block = list(zip(range(self.cfg.n_layers), self.blocks)) + + for _, block in idx_and_block[start_at_layer:stop_at_layer]: + resid = block(resid, additive_attention_mask) + + return resid def forward( self, @@ -92,7 +204,9 @@ def forward( return_type: Optional[str] = "embeddings", token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, + ) -> Union[Float[torch.Tensor, "batch pos d_vocab"], None]: """Input must be a batch of tokens. Strings and lists of strings are not yet supported. return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). @@ -102,55 +216,88 @@ def forward( attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. """ - tokens = input + if start_at_layer is None: + if isinstance(input, str) or isinstance(input, list): + assert ( + self.tokenizer is not None + ), "Must provide a tokenizer if input is a string" + input, token_type_ids_from_tokenizer, attention_mask = self.to_tokens( + input + ) + + # If token_type_ids or attention mask are not provided, use the ones from the tokenizer + token_type_ids = ( + token_type_ids_from_tokenizer + if token_type_ids is None + else token_type_ids + ) + else: + assert type(input) is torch.Tensor + residual = input - if tokens.device.type != self.cfg.device: - tokens = tokens.to(self.cfg.device) + if residual.device.type != self.cfg.device: + residual = residual.to(self.cfg.device) if attention_mask is not None: attention_mask = attention_mask.to(self.cfg.device) - - resid = self.hook_full_embed(self.embed(tokens)) - - large_negative_number = -torch.inf - mask = ( - repeat(1 - attention_mask, "batch pos -> batch 1 1 pos") - if attention_mask is not None - else None - ) - additive_attention_mask = ( - torch.where(mask == 1, large_negative_number, 0) - if mask is not None - else None + if start_at_layer is None: + start_at_layer = 0 + + resid = self.encoder_output( + residual, + token_type_ids=token_type_ids, + start_at_layer=start_at_layer, + stop_at_layer=stop_at_layer, + one_zero_attention_mask=attention_mask, ) - for block in self.blocks: - resid = block(resid, additive_attention_mask) - - if return_type == "embeddings": + if stop_at_layer is not None or return_type == "embeddings": return resid resid = self.mlm_head(resid) + logits = self.unembed(resid) - if return_type is None: - return + if return_type == "predictions": + # Get predictions for masked tokens + logprobs = logits[logits == self.tokenizer.mask_token_id].log_softmax( + dim=-1 + ) + predictions = self.tokenizer.decode(logprobs.argmax(dim=-1)) + + # If input was a list of strings, split predictions into a list + if " " in predictions: + # Split along space + predictions = predictions.split(" ") + predictions = [ + f"Prediction {i}: {p}" for i, p in enumerate(predictions) + ] + return predictions + + elif return_type is None: + return None - logits = self.unembed(resid) return logits @overload def run_with_cache( self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], ActivationCache]: ... + ) -> Tuple[ + Float[torch.Tensor, "batch pos d_vocab"], + ActivationCache, + ]: ... @overload def run_with_cache( - self, *model_args, return_cache_object: Literal[False] = False, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], Dict[str, torch.Tensor]]: ... + self, *model_args, return_cache_object: Literal[False], **kwargs + ) -> Tuple[ + Float[torch.Tensor, "batch pos d_vocab"], + Dict[str, torch.Tensor], + ]: ... def run_with_cache( self, *model_args, return_cache_object: bool = True, + cache_as_dict: bool = False, remove_batch_dim: bool = False, **kwargs, ) -> Tuple[ @@ -164,14 +311,15 @@ def run_with_cache( *model_args, remove_batch_dim=remove_batch_dim, **kwargs ) if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) + if not cache_as_dict: + cache = ActivationCache( + cache_dict, self, has_batch_dim=not remove_batch_dim + ) return out, cache else: - return out, cache_dict + return out, None - def to( + def to( # type: ignore self, device_or_dtype: Union[torch.device, str, torch.dtype], print_details: bool = True, @@ -202,7 +350,7 @@ def from_pretrained( move_to_device=True, dtype=torch.float32, **from_pretrained_kwargs, - ) -> HookedDistilBert: + ) -> HookedEncoder: """Loads in the pretrained weights from huggingface. Currently supports loading weight from HuggingFace BertForMaskedLM. Unlike HookedTransformer, this does not yet do any preprocessing on the model.""" logging.warning( "Support for BERT in TransformerLens is currently experimental, until such a time when it has feature " @@ -260,6 +408,9 @@ def W_U(self) -> Float[torch.Tensor, "d_model d_vocab"]: @property def b_U(self) -> Float[torch.Tensor, "d_vocab"]: + """ + Convenience to get the unembedding bias + """ return self.unembed.b_U @property @@ -369,13 +520,17 @@ def b_out(self) -> Float[torch.Tensor, "n_layers d_model"]: @property def QK(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] + """Returns a FactoredMatrix object with the product of the Q and K matrices for each layer and head. + Useful for visualizing attention patterns.""" return FactoredMatrix(self.W_Q, self.W_K.transpose(-2, -1)) @property def OV(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] + """Returns a FactoredMatrix object with the product of the O and V matrices for each layer and head.""" return FactoredMatrix(self.W_V, self.W_O) def all_head_labels(self) -> List[str]: + """Returns a list of strings with the format "L{l}H{h}", where l is the layer index and h is the head index.""" return [ f"L{l}H{h}" for l in range(self.cfg.n_layers) @@ -383,9 +538,9 @@ def all_head_labels(self) -> List[str]: ] -class HookedDistilBertForSequenceClassification(HookedDistilBert): +class HookedEncoderForSequenceClassification(HookedEncoder): """ - This class implements a BERT-style encoder for sequence classification using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedDistilBert. + This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. Limitations: - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. @@ -397,31 +552,22 @@ class HookedDistilBertForSequenceClassification(HookedDistilBert): """ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): - super().__init__(cfg, tokenizer, move_to_device=move_to_device, **kwargs) - self.classifier = nn.Linear(cfg.d_model, cfg.n_labels) + super().__init__(cfg, tokenizer, move_to_device, **kwargs) + self.classifier = ( + ClassificationHead(cfg) + if not self.cfg.use_mlp_head + else MLPClassificationHead(cfg) + ) self.setup() - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal["logits"], - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch pos d_vocab"]: ... - - @overload def forward( self, input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: ... - - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "logits", + return_type: Optional[str] = "embeddings", + token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: """Input must be a batch of tokens. Strings and lists of strings are not yet supported. @@ -432,35 +578,21 @@ def forward( attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. """ - tokens = input - - if tokens.device.type != self.cfg.device: - tokens = tokens.to(self.cfg.device) - if attention_mask is not None: - attention_mask = attention_mask.to(self.cfg.device) - - resid = self.hook_full_embed(self.embed(tokens)) - - large_negative_number = -torch.inf - mask = ( - repeat(1 - attention_mask, "batch pos -> batch 1 1 pos") - if attention_mask is not None - else None - ) - additive_attention_mask = ( - torch.where(mask == 1, large_negative_number, 0) - if mask is not None - else None + hidden = super().forward( + input, + token_type_ids=token_type_ids, + start_at_layer=start_at_layer, + stop_at_layer=stop_at_layer, + return_type="embeddings", + attention_mask=attention_mask, ) - - for block in self.blocks: - resid = block(resid, additive_attention_mask) - - if return_type == "embeddings": - return resid + if return_type == "embeddings" or stop_at_layer is not None: + return hidden + logits = self.classifier(hidden[:, 0, :]) if return_type is None: - return - - logits = self.classifier(resid[:, 0, :]) + return None return logits + + +__all__ = ["HookedEncoder", "HookedEncoderForSequenceClassification"] diff --git a/src/mechir/modelling/architectures/distilbert.py b/src/mechir/modelling/architectures/distilbert.py new file mode 100644 index 0000000..e245629 --- /dev/null +++ b/src/mechir/modelling/architectures/distilbert.py @@ -0,0 +1,112 @@ +"""Hooked Encoder. + +Contains a BERT style model. This is separate from :class:`transformer_lens.HookedTransformer` +because it has a significantly different architecture to e.g. GPT style transformers. +""" + +from __future__ import annotations + +from typing import Dict, Optional, overload + +import torch +from einops import repeat +from jaxtyping import Float, Int +from torch import nn +from transformers import AutoTokenizer +from typing_extensions import Literal + +from transformer_lens.components import BertBlock, BertMLMHead, Unembed +from transformer_lens.hook_points import HookPoint +from mechir.modelling.hooked.components import BertEmbed +from mechir.modelling.hooked.linear import MLPClassificationHead +from mechir.modelling.architectures.base import HookedEncoder +from mechir.modelling.hooked.config import HookedTransformerConfig + + +HookedDistilBert = HookedEncoder + +class HookedDistilBertForSequenceClassification(HookedDistilBert): + """ + This class implements a BERT-style encoder for sequence classification using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedDistilBert. + + Limitations: + - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. + - Also note that model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. + + Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: + - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model + - The model only accepts tokens as inputs, and not strings, or lists of strings + """ + + def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): + super().__init__(cfg, tokenizer, move_to_device=move_to_device, **kwargs) + self.classifier = MLPClassificationHead(self.cfg) + self.setup() + + @overload + def forward( + self, + input: Int[torch.Tensor, "batch pos"], + return_type: Literal["logits"], + attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + ) -> Float[torch.Tensor, "batch pos d_vocab"]: ... + + @overload + def forward( + self, + input: Int[torch.Tensor, "batch pos"], + return_type: Literal[None], + attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: ... + + def forward( + self, + input: Int[torch.Tensor, "batch pos"], + return_type: Optional[str] = "logits", + attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: + """Input must be a batch of tokens. Strings and lists of strings are not yet supported. + + return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). + + token_type_ids Optional[torch.Tensor]: Binary ids indicating whether a token belongs to sequence A or B. For example, for two sentences: "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be [0, 0, ..., 0, 1, ..., 1, 1]. `0` represents tokens from Sentence A, `1` from Sentence B. If not provided, BERT assumes a single sequence input. Typically, shape is (batch_size, sequence_length). + + attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. + """ + + tokens = input + + if tokens.device.type != self.cfg.device: + tokens = tokens.to(self.cfg.device) + if attention_mask is not None: + attention_mask = attention_mask.to(self.cfg.device) + + resid = self.hook_full_embed(self.embed(tokens)) + + large_negative_number = -torch.inf + mask = ( + repeat(1 - attention_mask, "batch pos -> batch 1 1 pos") + if attention_mask is not None + else None + ) + additive_attention_mask = ( + torch.where(mask == 1, large_negative_number, 0) + if mask is not None + else None + ) + + for block in self.blocks: + resid = block(resid, additive_attention_mask) + + if return_type == "embeddings": + return resid + + if return_type is None: + return + + logits = self.mlp(resid[:, 0, :]) + logits = self.out_proj(logits) + return logits + + +__all__ = ["HookedDistilBert", "HookedDistilBertForSequenceClassification"] diff --git a/src/mechir/modelling/architectures/electra.py b/src/mechir/modelling/architectures/electra.py new file mode 100644 index 0000000..51ed8d7 --- /dev/null +++ b/src/mechir/modelling/architectures/electra.py @@ -0,0 +1,93 @@ +"""Hooked ELECTRA. + +Contains a ELECTRA style model. This is separate from :class:`transformer_lens.HookedTransformer` +because it has a significantly different architecture to e.g. GPT style transformers. +""" + +from __future__ import annotations + +import logging +from typing import Dict, Optional, Union + +import torch +from jaxtyping import Float, Int +from torch import nn +from transformer_lens.hook_points import HookPoint +from mechir.modelling.hooked.linear import ClassificationHead, HiddenLinear +from mechir.modelling.architectures.base import HookedEncoder +from mechir.modelling.hooked.config import HookedTransformerConfig + + +class ElectraClassificationHead(nn.Module): + """ + Transforms ELECTRA embeddings into logits. The purpose of this module is to predict masked tokens in a sentence. + """ + + def __init__(self, cfg: Union[Dict, HookedTransformerConfig]): + super().__init__() + self.cfg = HookedTransformerConfig.unwrap(cfg) + self.dense = HiddenLinear(cfg) + self.out_proj = ClassificationHead(cfg) + self.activation = nn.GELU() + + self.hook_pre = HookPoint() # [batch, pos, d_mlp] + self.hook_post = HookPoint() # [batch, pos, d_mlp] + + def forward(self, resid: Float[torch.Tensor, "batch d_model"]) -> torch.Tensor: + pre_act = self.hook_pre(self.dense(resid)) + post_act = self.hook_post(self.activation(pre_act)) + return self.out_proj(post_act) + +HookedElectra = HookedEncoder + +class HookedElectraForSequenceClassification(HookedEncoder): + """ + This class implements a ELECTRA-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. + + + Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: + - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model + - The model only accepts tokens as inputs, and not strings, or lists of strings + """ + + def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): + super().__init__(cfg, tokenizer, move_to_device, **kwargs) + self.classifier = ElectraClassificationHead(cfg) + self.setup() + + def forward( + self, + input: Int[torch.Tensor, "batch pos"], + return_type: Optional[str] = "embeddings", + token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, + attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, + ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: + """Input must be a batch of tokens. Strings and lists of strings are not yet supported. + + return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). + + token_type_ids Optional[torch.Tensor]: Binary ids indicating whether a token belongs to sequence A or B. For example, for two sentences: "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be [0, 0, ..., 0, 1, ..., 1, 1]. `0` represents tokens from Sentence A, `1` from Sentence B. If not provided, BERT assumes a single sequence input. Typically, shape is (batch_size, sequence_length). + + attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. + """ + + hidden = super().forward( + input, + token_type_ids=token_type_ids, + start_at_layer=start_at_layer, + stop_at_layer=stop_at_layer, + return_type="embeddings", + attention_mask=attention_mask, + ) + if return_type == "embeddings" or stop_at_layer is not None: + return hidden + logits = self.classifier(hidden[:, 0, :]) + + if return_type is None: + return None + return logits + + +__all__ = ["HookedElectra", "HookedElectraForSequenceClassification"] diff --git a/src/mechir/modelling/cat.py b/src/mechir/modelling/cat.py index f3636fd..fe4aeaf 100644 --- a/src/mechir/modelling/cat.py +++ b/src/mechir/modelling/cat.py @@ -1,44 +1,28 @@ -from typing import Callable +from typing import Callable, Dict, Tuple, Union import logging -import os import torch from jaxtyping import Float -from transformers import AutoModelForSequenceClassification, AutoTokenizer, AutoConfig +from transformers import AutoModelForSequenceClassification, AutoTokenizer from transformer_lens.ActivationCache import ActivationCache from transformer_lens.hook_points import HookedRootModule import transformer_lens.utils as utils import torch.nn.functional as F -from .patched import PatchedMixin -from .sae import SAEMixin -from .hooked.loading_from_pretrained import get_official_model_name -from .hooked.HookedDistilBert import HookedDistilBertForSequenceClassification -from ..util import linear_rank_function -from ..modelling.hooked.HookedEncoderForSequenceClassification import HookedEncoderForSequenceClassification -from ..modelling.hooked.HookedElectra import HookedElectraForSequenceClassification +from mechir.modelling.patched import PatchedMixin +from mechir.modelling.sae import SAEMixin +from mechir.modelling.hooked.loading_from_pretrained import get_official_model_name +from mechir.util import linear_rank_function +from mechir.modelling.architectures import HookedEncoderForSequenceClassification logger = logging.getLogger(__name__) -def get_hooked(architecture): - huggingface_token = os.environ.get("HF_TOKEN", None) - hf_config = AutoConfig.from_pretrained( - get_official_model_name(architecture), token=huggingface_token - ) - architecture = hf_config.architectures[0] - if "distilbert" in architecture.lower(): - return HookedDistilBertForSequenceClassification - if "electra" in architecture.lower(): - return HookedElectraForSequenceClassification - return HookedEncoderForSequenceClassification - - class Cat(HookedRootModule, PatchedMixin, SAEMixin): def __init__( self, model_name_or_path: str, num_labels: int = 2, tokenizer=None, - special_token: str = "X", + special_token: str = "a", softmax_output: bool = False, return_cache: bool = False, ) -> None: @@ -59,7 +43,7 @@ def __init__( .to(self._device) ) - self._model = get_hooked(model_name_or_path).from_pretrained( + self._model = HookedEncoderForSequenceClassification.from_pretrained( self.model_name_or_path, device=self._device, hf_model=self.__hf_model ) @@ -74,7 +58,12 @@ def forward( attention_mask: Float[torch.Tensor, "batch seq"], token_type_ids: Float[torch.Tensor, "batch seq"] = None, ): - model_output = self._model(input=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, return_type="logits") + model_output = self._model( + input=input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + return_type="logits", + ) model_output = ( F.log_softmax(model_output, dim=-1)[:, 0] if self.softmax_output @@ -169,16 +158,42 @@ def get_act_patch_attn_head_by_pos( results[index] = patching_metric(output, scores, scores_p).mean() return results - - def score(self, sequences: dict, cache=False): + + def run_with_cache( + self, + *model_args, + return_cache_object: bool = True, + cache_as_dict: bool = False, + remove_batch_dim: bool = False, + **kwargs, + ) -> Tuple[ + Float[torch.Tensor, "batch pos d_vocab"], + Union[ActivationCache, Dict[str, torch.Tensor]], + ]: + """ + Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. + """ + out, cache_dict = super().run_with_cache( + *model_args, remove_batch_dim=remove_batch_dim, **kwargs + ) + if return_cache_object: + if not cache_as_dict: + cache = ActivationCache( + cache_dict, self, has_batch_dim=not remove_batch_dim + ) + return out, cache + else: + return out, None + + def score(self, sequences: dict, cache=False, cache_as_dict=False): if cache: logits, cache = self.run_with_cache( - sequences["input_ids"], sequences["attention_mask"] + sequences["input_ids"], sequences["attention_mask"], cache_as_dict=cache_as_dict ) return logits, cache logits = self.forward(sequences["input_ids"], sequences["attention_mask"]) - return logits, logits + return logits, None def patch( self, @@ -205,4 +220,4 @@ def patch( patched_output = self._patch_funcs[patch_type](**patching_kwargs) if self._return_cache: return patched_output, cache - return patched_output + return patched_output, None diff --git a/src/mechir/modelling/dot.py b/src/mechir/modelling/dot.py index 41b5d8a..76b7275 100644 --- a/src/mechir/modelling/dot.py +++ b/src/mechir/modelling/dot.py @@ -1,18 +1,17 @@ -from typing import Callable +from typing import Callable, Dict, Tuple, Union import logging import os import torch from jaxtyping import Float -from transformers import AutoModel, AutoTokenizer, AutoConfig +from transformers import AutoModel, AutoTokenizer from transformer_lens.ActivationCache import ActivationCache from transformer_lens.hook_points import HookedRootModule import transformer_lens.utils as utils -from .patched import PatchedMixin -from .sae import SAEMixin -from .hooked.HookedDistilBert import HookedDistilBert -from .hooked.HookedEncoder import HookedEncoder -from .hooked.loading_from_pretrained import get_official_model_name -from ..util import batched_dot_product, linear_rank_function +from mechir.modelling.patched import PatchedMixin +from mechir.modelling.sae import SAEMixin +from mechir.modelling.hooked.loading_from_pretrained import get_official_model_name +from mechir.util import batched_dot_product, linear_rank_function +from mechir.modelling.architectures import HookedEncoder logger = logging.getLogger(__name__) @@ -22,24 +21,13 @@ } -def get_hooked(architecture): - huggingface_token = os.environ.get("HF_TOKEN", None) - hf_config = AutoConfig.from_pretrained( - get_official_model_name(architecture), token=huggingface_token - ) - architecture = hf_config.architectures[0] - if "distilbert" in architecture.lower(): - return HookedDistilBert - return HookedEncoder - - class Dot(HookedRootModule, PatchedMixin, SAEMixin): def __init__( self, model_name_or_path: str, pooling_type: str = "cls", tokenizer=None, - special_token: str = "X", + special_token: str = "a", return_cache: bool = False, ) -> None: super().__init__() @@ -55,7 +43,7 @@ def __init__( self.__hf_model = ( AutoModel.from_pretrained(model_name_or_path).eval().to(self._device) ) - self._model = get_hooked(model_name_or_path).from_pretrained( + self._model = HookedEncoder.from_pretrained( self.model_name_or_path, device=self._device, hf_model=self.__hf_model ) @@ -71,7 +59,12 @@ def forward( attention_mask: Float[torch.Tensor, "batch seq"], token_type_ids: Float[torch.Tensor, "batch seq"] = None, ): - model_output = self._model(input=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, return_type="embeddings") + model_output = self._model( + input=input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + return_type="embeddings", + ) return self._pooling(model_output) def get_act_patch_block_every( @@ -168,17 +161,43 @@ def get_act_patch_attn_head_by_pos( results[index] = patching_metric(output, scores, scores_p).mean() return results - - def score(self, queries: dict, documents: dict, reps_q=None, cache=False): + + def run_with_cache( + self, + *model_args, + return_cache_object: bool = True, + cache_as_dict: bool = False, + remove_batch_dim: bool = False, + **kwargs, + ) -> Tuple[ + Float[torch.Tensor, "batch pos d_vocab"], + Union[ActivationCache, Dict[str, torch.Tensor]], + ]: + """ + Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. + """ + out, cache_dict = super().run_with_cache( + *model_args, remove_batch_dim=remove_batch_dim, **kwargs + ) + if return_cache_object: + if not cache_as_dict: + cache = ActivationCache( + cache_dict, self, has_batch_dim=not remove_batch_dim + ) + return out, cache + else: + return out, None + + def score(self, queries: dict, documents: dict, reps_q=None, cache=False, cache_as_dict=False): if reps_q is None: reps_q = self.forward(queries["input_ids"], queries["attention_mask"]) if cache: reps_d, cache_d = self.run_with_cache( - documents["input_ids"], documents["attention_mask"] + documents["input_ids"], documents["attention_mask"], cache_as_dict=cache_as_dict ) return batched_dot_product(reps_q, reps_d), reps_q, reps_d, cache_d reps_d = self.forward(documents["input_ids"], documents["attention_mask"]) - return batched_dot_product(reps_q, reps_d), reps_q, reps_d + return batched_dot_product(reps_q, reps_d), reps_q, reps_d, None def patch( self, @@ -192,7 +211,7 @@ def patch( assert ( patch_type in self._patch_funcs ), f"Patch type {patch_type} not recognized. Choose from {self._patch_funcs.keys()}" - scores, reps_q, _ = self.score(queries, documents) + scores, reps_q, _, _ = self.score(queries, documents) scores_p, _, _, cache_d = self.score( queries, documents_p, cache=True, reps_q=reps_q ) @@ -210,4 +229,4 @@ def patch( patched_output = self._patch_funcs[patch_type](**patching_kwargs) if self._return_cache: return patched_output, cache_d - return patched_output # PatchingOutput(output, scores, scores_p) + return patched_output, None # PatchingOutput(output, scores, scores_p) diff --git a/src/mechir/modelling/hooked/HookedElectra.py b/src/mechir/modelling/hooked/HookedElectra.py deleted file mode 100644 index 7e6e18f..0000000 --- a/src/mechir/modelling/hooked/HookedElectra.py +++ /dev/null @@ -1,204 +0,0 @@ -"""Hooked Encoder. - -Contains a BERT style model. This is separate from :class:`transformer_lens.HookedTransformer` -because it has a significantly different architecture to e.g. GPT style transformers. -""" - -from __future__ import annotations - -import logging -from typing import Dict, Optional, Tuple, Union, overload - -import torch -from jaxtyping import Float, Int -from torch import nn -from typing_extensions import Literal -from .HookedTransformerConfig import HookedTransformerConfig -from transformer_lens.ActivationCache import ActivationCache -from .HookedEncoder import HookedEncoder -from transformer_lens.hook_points import HookPoint -from .linear import ClassificationHead, HiddenLinear -from . import loading_from_pretrained as loading - - -class ElectraClassificationHead(nn.Module): - """ - Transforms ELECTRA embeddings into logits. The purpose of this module is to predict masked tokens in a sentence. - """ - - def __init__(self, cfg: Union[Dict, HookedTransformerConfig]): - super().__init__() - self.cfg = HookedTransformerConfig.unwrap(cfg) - self.dense = HiddenLinear(cfg) - self.out_proj = ClassificationHead(cfg) - self.activation = nn.GELU() - - self.hook_pre = HookPoint() # [batch, pos, d_mlp] - self.hook_post = HookPoint() # [batch, pos, d_mlp] - - def forward(self, resid: Float[torch.Tensor, "batch d_model"]) -> torch.Tensor: - pre_act = self.hook_pre(self.dense(resid)) - post_act = self.hook_post(self.activation(pre_act)) - return self.out_proj(post_act) - - -class HookedElectraForSequenceClassification(HookedEncoder): - """ - This class implements a BERT-style encoder for ELECTRA using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. - - Limitations: - - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. - - Also note that model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. - - Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: - - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model - - The model only accepts tokens as inputs, and not strings, or lists of strings - """ - - def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): - super().__init__(cfg, tokenizer, move_to_device, **kwargs) - self.classifier = ElectraClassificationHead(cfg) - self.setup() - - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal["logits"], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch n_labels"]: ... - - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch n_labels"]]: ... - - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "logits", - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch n_labels"]]: - """Input must be a batch of tokens. Strings and lists of strings are not yet supported. - - return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). - - token_type_ids Optional[torch.Tensor]: Binary ids indicating whether a token belongs to sequence A or B. For example, for two sentences: "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be [0, 0, ..., 0, 1, ..., 1, 1]. `0` represents tokens from Sentence A, `1` from Sentence B. If not provided, BERT assumes a single sequence input. Typically, shape is (batch_size, sequence_length). - - attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. - """ - - hidden = super().forward( - input, - token_type_ids=token_type_ids, - return_type="embeddings", - attention_mask=attention_mask, - ) - if return_type == "embeddings": - return hidden - logits = self.classifier(hidden[:, 0, :]) - - if return_type is None: - return None - return logits - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], ActivationCache]: ... - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[False], **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], Dict[str, torch.Tensor]]: ... - - def run_with_cache( - self, - *model_args, - return_cache_object: bool = True, - remove_batch_dim: bool = False, - **kwargs, - ) -> Tuple[ - Float[torch.Tensor, "batch pos d_vocab"], - Union[ActivationCache, Dict[str, torch.Tensor]], - ]: - """ - Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. - """ - out, cache_dict = super().run_with_cache( - *model_args, remove_batch_dim=remove_batch_dim, **kwargs - ) - if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) - return out, cache - else: - return out, cache_dict - - @classmethod - def from_pretrained( - cls, - model_name: str, - checkpoint_index: Optional[int] = None, - checkpoint_value: Optional[int] = None, - hf_model=None, - device: Optional[str] = None, - tokenizer=None, - move_to_device=True, - dtype=torch.float32, - **from_pretrained_kwargs, - ) -> HookedElectraForSequenceClassification: - """Loads in the pretrained weights from huggingface. Currently supports loading weight from HuggingFace BertForMaskedLM. Unlike HookedTransformer, this does not yet do any preprocessing on the model.""" - logging.warning( - "Support for BERT in TransformerLens is currently experimental, until such a time when it has feature " - "parity with HookedTransformer and has been tested on real research tasks. Until then, backward " - "compatibility is not guaranteed. Please see the docs for information on the limitations of the current " - "implementation." - "\n" - "If using BERT for interpretability research, keep in mind that BERT has some significant architectural " - "differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning " - "that the last LayerNorm in a block cannot be folded." - ) - - assert not ( - from_pretrained_kwargs.get("load_in_8bit", False) - or from_pretrained_kwargs.get("load_in_4bit", False) - ), "Quantization not supported" - - if "torch_dtype" in from_pretrained_kwargs: - dtype = from_pretrained_kwargs["torch_dtype"] - - official_model_name = loading.get_official_model_name(model_name) - - cfg = loading.get_pretrained_model_config( - official_model_name, - checkpoint_index=checkpoint_index, - checkpoint_value=checkpoint_value, - fold_ln=False, - device=device, - n_devices=1, - dtype=dtype, - **from_pretrained_kwargs, - ) - - state_dict = loading.get_pretrained_state_dict( - official_model_name, cfg, hf_model, dtype=dtype, **from_pretrained_kwargs - ) - - model = cls(cfg, tokenizer, move_to_device=False) - - model.load_state_dict(state_dict, strict=False) - - if move_to_device: - model.to(cfg.device) - - print(f"Loaded pretrained model {model_name} into HookedTransformer") - - return model diff --git a/src/mechir/modelling/hooked/HookedEncoder.py b/src/mechir/modelling/hooked/HookedEncoder.py deleted file mode 100644 index 38619cf..0000000 --- a/src/mechir/modelling/hooked/HookedEncoder.py +++ /dev/null @@ -1,408 +0,0 @@ -"""Hooked Encoder. - -Contains a BERT style model. This is separate from :class:`transformer_lens.HookedTransformer` -because it has a significantly different architecture to e.g. GPT style transformers. -""" - -from __future__ import annotations - -import logging -import os -from typing import Dict, List, Optional, Tuple, Union, cast, overload - -import torch -from einops import repeat -from jaxtyping import Float, Int -from torch import nn -from transformers import AutoTokenizer -from typing_extensions import Literal - -from . import loading_from_pretrained as loading -from transformer_lens.ActivationCache import ActivationCache -from transformer_lens.components import BertBlock, BertEmbed, BertMLMHead, Unembed -from transformer_lens.FactoredMatrix import FactoredMatrix -from transformer_lens.hook_points import HookedRootModule, HookPoint -from .HookedTransformerConfig import HookedTransformerConfig -from transformer_lens.utilities import devices - - -class HookedEncoder(HookedRootModule): - """ - This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. - - Limitations: - - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. - - Also note that model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. - - Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: - - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model - - The model only accepts tokens as inputs, and not strings, or lists of strings - """ - - def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): - super().__init__() - if isinstance(cfg, Dict): - cfg = HookedTransformerConfig(**cfg) - elif isinstance(cfg, str): - raise ValueError( - "Please pass in a config dictionary or HookedTransformerConfig object. If you want to load a pretrained model, use HookedEncoder.from_pretrained() instead." - ) - self.cfg = cfg - - assert ( - self.cfg.n_devices == 1 - ), "Multiple devices not supported for HookedEncoder" - if tokenizer is not None: - self.tokenizer = tokenizer - elif self.cfg.tokenizer_name is not None: - huggingface_token = os.environ.get("HF_TOKEN", None) - self.tokenizer = AutoTokenizer.from_pretrained( - self.cfg.tokenizer_name, - token=huggingface_token, - ) - else: - self.tokenizer = None - - if self.cfg.d_vocab == -1: - # If we have a tokenizer, vocab size can be inferred from it. - assert ( - self.tokenizer is not None - ), "Must provide a tokenizer if d_vocab is not provided" - self.cfg.d_vocab = max(self.tokenizer.vocab.values()) + 1 - if self.cfg.d_vocab_out == -1: - self.cfg.d_vocab_out = self.cfg.d_vocab - - self.embed = BertEmbed(self.cfg) - self.blocks = nn.ModuleList( - [BertBlock(self.cfg) for _ in range(self.cfg.n_layers)] - ) - self.mlm_head = BertMLMHead(cfg) - self.unembed = Unembed(self.cfg) - - self.hook_full_embed = HookPoint() - - if move_to_device: - self.to(self.cfg.device) - - self.setup() - - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal["logits"], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch pos d_vocab"]: ... - - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: ... - - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "logits", - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: - """Input must be a batch of tokens. Strings and lists of strings are not yet supported. - - return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). - - token_type_ids Optional[torch.Tensor]: Binary ids indicating whether a token belongs to sequence A or B. For example, for two sentences: "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be [0, 0, ..., 0, 1, ..., 1, 1]. `0` represents tokens from Sentence A, `1` from Sentence B. If not provided, BERT assumes a single sequence input. Typically, shape is (batch_size, sequence_length). - - attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. - """ - - tokens = input - - if tokens.device.type != self.cfg.device: - tokens = tokens.to(self.cfg.device) - if attention_mask is not None: - attention_mask = attention_mask.to(self.cfg.device) - - resid = self.hook_full_embed(self.embed(tokens, token_type_ids)) - - large_negative_number = -torch.inf - mask = ( - repeat(1 - attention_mask, "batch pos -> batch 1 1 pos") - if attention_mask is not None - else None - ) - additive_attention_mask = ( - torch.where(mask == 1, large_negative_number, 0) - if mask is not None - else None - ) - - for block in self.blocks: - resid = block(resid, additive_attention_mask) - - if return_type == "embeddings": - return resid - - resid = self.mlm_head(resid) - - if return_type is None: - return None - - logits = self.unembed(resid) - return logits - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], ActivationCache]: ... - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[False], **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], Dict[str, torch.Tensor]]: ... - - def run_with_cache( - self, - *model_args, - return_cache_object: bool = True, - remove_batch_dim: bool = False, - **kwargs, - ) -> Tuple[ - Float[torch.Tensor, "batch pos d_vocab"], - Union[ActivationCache, Dict[str, torch.Tensor]], - ]: - """ - Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. - """ - out, cache_dict = super().run_with_cache( - *model_args, remove_batch_dim=remove_batch_dim, **kwargs - ) - if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) - return out, cache - else: - return out, cache_dict - - def to( # type: ignore - self, - device_or_dtype: Union[torch.device, str, torch.dtype], - print_details: bool = True, - ): - return devices.move_to_and_update_config(self, device_or_dtype, print_details) - - def cuda(self): - # Wrapper around cuda that also changes self.cfg.device - return self.to("cuda") - - def cpu(self): - # Wrapper around cuda that also changes self.cfg.device - return self.to("cpu") - - def mps(self): - # Wrapper around cuda that also changes self.cfg.device - return self.to("mps") - - @classmethod - def from_pretrained( - cls, - model_name: str, - checkpoint_index: Optional[int] = None, - checkpoint_value: Optional[int] = None, - hf_model=None, - device: Optional[str] = None, - tokenizer=None, - move_to_device=True, - dtype=torch.float32, - **from_pretrained_kwargs, - ) -> HookedEncoder: - """Loads in the pretrained weights from huggingface. Currently supports loading weight from HuggingFace BertForMaskedLM. Unlike HookedTransformer, this does not yet do any preprocessing on the model.""" - logging.warning( - "Support for BERT in TransformerLens is currently experimental, until such a time when it has feature " - "parity with HookedTransformer and has been tested on real research tasks. Until then, backward " - "compatibility is not guaranteed. Please see the docs for information on the limitations of the current " - "implementation." - "\n" - "If using BERT for interpretability research, keep in mind that BERT has some significant architectural " - "differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning " - "that the last LayerNorm in a block cannot be folded." - ) - - assert not ( - from_pretrained_kwargs.get("load_in_8bit", False) - or from_pretrained_kwargs.get("load_in_4bit", False) - ), "Quantization not supported" - - if "torch_dtype" in from_pretrained_kwargs: - dtype = from_pretrained_kwargs["torch_dtype"] - - official_model_name = loading.get_official_model_name(model_name) - - cfg = loading.get_pretrained_model_config( - official_model_name, - checkpoint_index=checkpoint_index, - checkpoint_value=checkpoint_value, - fold_ln=False, - device=device, - n_devices=1, - dtype=dtype, - **from_pretrained_kwargs, - ) - - state_dict = loading.get_pretrained_state_dict( - official_model_name, cfg, hf_model, dtype=dtype, **from_pretrained_kwargs - ) - - model = cls(cfg, tokenizer, move_to_device=False) - - model.load_state_dict(state_dict, strict=False) - - if move_to_device: - model.to(cfg.device) - - print(f"Loaded pretrained model {model_name} into HookedEncoder") - - return model - - @property - def W_U(self) -> Float[torch.Tensor, "d_model d_vocab"]: - """ - Convenience to get the unembedding matrix (ie the linear map from the final residual stream to the output logits) - """ - return self.unembed.W_U - - @property - def b_U(self) -> Float[torch.Tensor, "d_vocab"]: - """ - Convenience to get the unembedding bias - """ - return self.unembed.b_U - - @property - def W_E(self) -> Float[torch.Tensor, "d_vocab d_model"]: - """ - Convenience to get the embedding matrix - """ - return self.embed.embed.W_E - - @property - def W_pos(self) -> Float[torch.Tensor, "n_ctx d_model"]: - """ - Convenience function to get the positional embedding. Only works on models with absolute positional embeddings! - """ - return self.embed.pos_embed.W_pos - - @property - def W_E_pos(self) -> Float[torch.Tensor, "d_vocab+n_ctx d_model"]: - """ - Concatenated W_E and W_pos. Used as a full (overcomplete) basis of the input space, useful for full QK and full OV circuits. - """ - return torch.cat([self.W_E, self.W_pos], dim=0) - - @property - def W_K(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: - """Stacks the key weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_K for block in self.blocks], dim=0 - ) - - @property - def W_Q(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: - """Stacks the query weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_Q for block in self.blocks], dim=0 - ) - - @property - def W_V(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: - """Stacks the value weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_V for block in self.blocks], dim=0 - ) - - @property - def W_O(self) -> Float[torch.Tensor, "n_layers n_heads d_head d_model"]: - """Stacks the attn output weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_O for block in self.blocks], dim=0 - ) - - @property - def W_in(self) -> Float[torch.Tensor, "n_layers d_model d_mlp"]: - """Stacks the MLP input weights across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.W_in for block in self.blocks], dim=0 - ) - - @property - def W_out(self) -> Float[torch.Tensor, "n_layers d_mlp d_model"]: - """Stacks the MLP output weights across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.W_out for block in self.blocks], dim=0 - ) - - @property - def b_K(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: - """Stacks the key biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_K for block in self.blocks], dim=0 - ) - - @property - def b_Q(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: - """Stacks the query biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_Q for block in self.blocks], dim=0 - ) - - @property - def b_V(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: - """Stacks the value biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_V for block in self.blocks], dim=0 - ) - - @property - def b_O(self) -> Float[torch.Tensor, "n_layers d_model"]: - """Stacks the attn output biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_O for block in self.blocks], dim=0 - ) - - @property - def b_in(self) -> Float[torch.Tensor, "n_layers d_mlp"]: - """Stacks the MLP input biases across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.b_in for block in self.blocks], dim=0 - ) - - @property - def b_out(self) -> Float[torch.Tensor, "n_layers d_model"]: - """Stacks the MLP output biases across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.b_out for block in self.blocks], dim=0 - ) - - @property - def QK(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] - """Returns a FactoredMatrix object with the product of the Q and K matrices for each layer and head. - Useful for visualizing attention patterns.""" - return FactoredMatrix(self.W_Q, self.W_K.transpose(-2, -1)) - - @property - def OV(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] - """Returns a FactoredMatrix object with the product of the O and V matrices for each layer and head.""" - return FactoredMatrix(self.W_V, self.W_O) - - def all_head_labels(self) -> List[str]: - """Returns a list of strings with the format "L{l}H{h}", where l is the layer index and h is the head index.""" - return [ - f"L{l}H{h}" - for l in range(self.cfg.n_layers) - for h in range(self.cfg.n_heads) - ] diff --git a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py deleted file mode 100644 index 181c504..0000000 --- a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py +++ /dev/null @@ -1,182 +0,0 @@ -"""Hooked Encoder. - -Contains a BERT style model. This is separate from :class:`transformer_lens.HookedTransformer` -because it has a significantly different architecture to e.g. GPT style transformers. -""" - -from __future__ import annotations - -import logging -from typing import Dict, Optional, Tuple, Union, overload - -import torch -from jaxtyping import Float, Int -from torch import nn -from typing_extensions import Literal - -from transformer_lens.ActivationCache import ActivationCache -from .HookedEncoder import HookedEncoder -from .linear import ClassificationHead -from . import loading_from_pretrained as loading - - -class HookedEncoderForSequenceClassification(HookedEncoder): - """ - This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. - - Limitations: - - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. - - Also note that model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. - - Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: - - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model - - The model only accepts tokens as inputs, and not strings, or lists of strings - """ - - def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): - super().__init__(cfg, tokenizer, move_to_device, **kwargs) - self.classifier = ClassificationHead(cfg) - self.setup() - - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal["logits"], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch pos d_vocab"]: ... - - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: ... - - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "logits", - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: - """Input must be a batch of tokens. Strings and lists of strings are not yet supported. - - return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). - - token_type_ids Optional[torch.Tensor]: Binary ids indicating whether a token belongs to sequence A or B. For example, for two sentences: "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be [0, 0, ..., 0, 1, ..., 1, 1]. `0` represents tokens from Sentence A, `1` from Sentence B. If not provided, BERT assumes a single sequence input. Typically, shape is (batch_size, sequence_length). - - attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. - """ - - hidden = super().forward( - input, - token_type_ids=token_type_ids, - return_type="embeddings", - attention_mask=attention_mask, - ) - if return_type == "embeddings": - return hidden - logits = self.classifier(hidden[:, 0, :]) - - if return_type is None: - return None - return logits - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch n_labels"], ActivationCache]: ... - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[False], **kwargs - ) -> Tuple[Float[torch.Tensor, "batch n_labels"], Dict[str, torch.Tensor]]: ... - - def run_with_cache( - self, - *model_args, - return_cache_object: bool = True, - remove_batch_dim: bool = False, - **kwargs, - ) -> Tuple[ - Float[torch.Tensor, "batch n_labels"], - Union[ActivationCache, Dict[str, torch.Tensor]], - ]: - """ - Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. - """ - out, cache_dict = super().run_with_cache( - *model_args, remove_batch_dim=remove_batch_dim, **kwargs - ) - if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) - return out, cache - else: - return out, cache_dict - - @classmethod - def from_pretrained( - cls, - model_name: str, - checkpoint_index: Optional[int] = None, - checkpoint_value: Optional[int] = None, - hf_model=None, - device: Optional[str] = None, - tokenizer=None, - move_to_device=True, - dtype=torch.float32, - **from_pretrained_kwargs, - ) -> HookedEncoderForSequenceClassification: - """Loads in the pretrained weights from huggingface. Currently supports loading weight from HuggingFace BertForMaskedLM. Unlike HookedTransformer, this does not yet do any preprocessing on the model.""" - logging.warning( - "Support for BERT in TransformerLens is currently experimental, until such a time when it has feature " - "parity with HookedTransformer and has been tested on real research tasks. Until then, backward " - "compatibility is not guaranteed. Please see the docs for information on the limitations of the current " - "implementation." - "\n" - "If using BERT for interpretability research, keep in mind that BERT has some significant architectural " - "differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning " - "that the last LayerNorm in a block cannot be folded." - ) - - assert not ( - from_pretrained_kwargs.get("load_in_8bit", False) - or from_pretrained_kwargs.get("load_in_4bit", False) - ), "Quantization not supported" - - if "torch_dtype" in from_pretrained_kwargs: - dtype = from_pretrained_kwargs["torch_dtype"] - - official_model_name = loading.get_official_model_name(model_name) - - cfg = loading.get_pretrained_model_config( - official_model_name, - checkpoint_index=checkpoint_index, - checkpoint_value=checkpoint_value, - fold_ln=False, - device=device, - n_devices=1, - dtype=dtype, - **from_pretrained_kwargs, - ) - - state_dict = loading.get_pretrained_state_dict( - official_model_name, cfg, hf_model, dtype=dtype, **from_pretrained_kwargs - ) - - model = cls(cfg, tokenizer, move_to_device=False) - - model.load_state_dict(state_dict, strict=False) - - if move_to_device: - model.to(cfg.device) - - print(f"Loaded pretrained model {model_name} into HookedTransformer") - - return model diff --git a/src/mechir/modelling/hooked/hooked_components.py b/src/mechir/modelling/hooked/components.py similarity index 60% rename from src/mechir/modelling/hooked/hooked_components.py rename to src/mechir/modelling/hooked/components.py index 90d83ec..f9c35a6 100644 --- a/src/mechir/modelling/hooked/hooked_components.py +++ b/src/mechir/modelling/hooked/components.py @@ -6,14 +6,14 @@ import einops import torch import torch.nn as nn -from jaxtyping import Int +from jaxtyping import Int, Float from transformer_lens.components import Embed, LayerNorm, PosEmbed, TokenTypeEmbed from transformer_lens.hook_points import HookPoint -from transformer_lens.HookedTransformerConfig import HookedTransformerConfig +from mechir.modelling.hooked.config import HookedTransformerConfig -class DistilBertEmbed(nn.Module): +class BertEmbed(nn.Module): """ Custom embedding layer for a BERT-like model. This module computes the sum of the token, positional and token-type embeddings and takes the layer norm of the result. """ @@ -23,27 +23,31 @@ def __init__(self, cfg: Union[Dict, HookedTransformerConfig]): self.cfg = HookedTransformerConfig.unwrap(cfg) self.embed = Embed(self.cfg) self.pos_embed = PosEmbed(self.cfg) - # self.token_type_embed = TokenTypeEmbed(self.cfg) + self.token_type_embed = TokenTypeEmbed(self.cfg) self.ln = LayerNorm(self.cfg) self.hook_embed = HookPoint() self.hook_pos_embed = HookPoint() - # self.hook_token_type_embed = HookPoint() + self.hook_token_type_embed = HookPoint() + self.use_token_type_ids = self.cfg.use_token_type_ids + def forward( self, input_ids: Int[torch.Tensor, "batch pos"], token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - ): + ) -> Float[torch.Tensor, "batch pos d_model"]: base_index_id = torch.arange(input_ids.shape[1], device=input_ids.device) - index_ids = einops.repeat( - base_index_id, "pos -> batch pos", batch=input_ids.shape[0] - ) + index_ids = einops.repeat(base_index_id, "pos -> batch pos", batch=input_ids.shape[0]) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) + word_embeddings_out = self.hook_embed(self.embed(input_ids)) position_embeddings_out = self.hook_pos_embed(self.pos_embed(index_ids)) + token_type_embeddings_out = self.hook_token_type_embed( + self.token_type_embed(token_type_ids) + ) if self.use_token_type_ids else torch.zeros_like(word_embeddings_out) - embeddings_out = ( - word_embeddings_out + position_embeddings_out - ) + embeddings_out = word_embeddings_out + position_embeddings_out + token_type_embeddings_out layer_norm_out = self.ln(embeddings_out) return layer_norm_out diff --git a/src/mechir/modelling/hooked/HookedTransformerConfig.py b/src/mechir/modelling/hooked/config.py similarity index 99% rename from src/mechir/modelling/hooked/HookedTransformerConfig.py rename to src/mechir/modelling/hooked/config.py index d3d085c..7ede4d6 100644 --- a/src/mechir/modelling/hooked/HookedTransformerConfig.py +++ b/src/mechir/modelling/hooked/config.py @@ -180,6 +180,7 @@ class HookedTransformerConfig: in Gemma-2 (see attn_scores_soft_cap for details). Defaults to -1.0, which means not set. num_labels (int): The number of labels for the classification task. Defaults to 1. + use_token_type_ids (bool): Whether to use token type ids. Defaults to True. """ n_layers: int @@ -244,6 +245,8 @@ class HookedTransformerConfig: attn_scores_soft_cap: float = -1.0 output_logits_soft_cap: float = -1.0 num_labels: int = 1 + use_token_type_ids: bool = True + use_mlp_head: bool = False def __post_init__(self): if self.n_heads == -1: diff --git a/src/mechir/modelling/hooked/conversion.py b/src/mechir/modelling/hooked/conversion.py index 8e2d626..1602324 100644 --- a/src/mechir/modelling/hooked/conversion.py +++ b/src/mechir/modelling/hooked/conversion.py @@ -1,13 +1,15 @@ import einops from functools import partial -from .loading_from_pretrained import register_with_transformer_lens -from .HookedTransformerConfig import HookedTransformerConfig +from mechir.modelling.hooked.loading_from_pretrained import ( + register_with_transformer_lens, +) +from mechir.modelling.hooked.config import HookedTransformerConfig def convert_distilbert_weights( distilbert, cfg: HookedTransformerConfig, sequence_classification=False, raw=False ): - embeddings = distilbert.embeddings + embeddings = distilbert.embeddings if not raw else distilbert.embeddings state_dict = { "embed.embed.W_E": embeddings.word_embeddings.weight, "embed.pos_embed.W_pos": embeddings.position_embeddings.weight, @@ -17,7 +19,7 @@ def convert_distilbert_weights( } for l in range(cfg.n_layers): - block = distilbert.transformer.layer[l] + block = distilbert.transformer.layer[l] if not raw else distilbert.transformer.layer[l] state_dict[f"blocks.{l}.attn.W_Q"] = einops.rearrange( block.attention.q_lin.weight, "(i h) m -> i m h", i=cfg.n_heads ) @@ -59,9 +61,13 @@ def convert_distilbert_weights( if not raw: if sequence_classification: - classification_head = distilbert.pre_classifier - state_dict["classifier.W"] = classification_head.weight - state_dict["classifier.b"] = classification_head.bias + if hasattr(distilbert, "pre_classifier") and hasattr(distilbert, "classifier"): + pre_classification_head = distilbert.pre_classifier + classification_head = distilbert.classifier + state_dict["classifier.dense.W"] = classification_head.weight + state_dict["classifier.dense.b"] = classification_head.bias + state_dict["classifier.out_proj.W"] = pre_classification_head.weight + state_dict["classifier.out_proj.b"] = pre_classification_head.bias return state_dict @@ -85,7 +91,11 @@ def convert_bert_based_weights( raw=False, model_name: str = "bert", ): - embeddings = getattr(bert, model_name).embeddings if not raw else bert.embeddings + if not hasattr(bert, "embeddings"): + bert = getattr(bert, model_name) + embeddings = bert.embeddings + else: + embeddings = bert.embeddings state_dict = { "embed.embed.W_E": embeddings.word_embeddings.weight, "embed.pos_embed.W_pos": embeddings.position_embeddings.weight, @@ -94,9 +104,14 @@ def convert_bert_based_weights( "embed.ln.b": embeddings.LayerNorm.bias, } + if not hasattr(bert, "encoder"): + encoder = getattr(bert, model_name).encoder + else: + encoder = bert.encoder + for l in range(cfg.n_layers): block = ( - getattr(bert, model_name).encoder.layer[l] + encoder.layer[l] if not raw else bert.encoder.layer[l] ) @@ -150,84 +165,15 @@ def convert_bert_based_weights( state_dict["classifier.W"] = classification_head.weight state_dict["classifier.b"] = classification_head.bias else: - if not "electra" in model_name: - mlm_head = bert.cls.predictions - state_dict["mlm_head.W"] = mlm_head.transform.dense.weight - state_dict["mlm_head.b"] = mlm_head.transform.dense.bias - state_dict["mlm_head.ln.w"] = mlm_head.transform.LayerNorm.weight - state_dict["mlm_head.ln.b"] = mlm_head.transform.LayerNorm.bias - # "unembed.W_U": mlm_head.decoder.weight.T, - state_dict["unembed.b_U"] = mlm_head.bias - # Note: BERT uses tied embeddings - state_dict["unembed.W_U"] = embeddings.word_embeddings.weight.T - - return state_dict - - -def convert_bert_weights( - bert, cfg: HookedTransformerConfig, sequence_classification=False, raw=False -): - print(dir(bert)) - embeddings = bert.bert.embeddings if not raw else bert.embeddings - state_dict = { - "embed.embed.W_E": embeddings.word_embeddings.weight, - "embed.pos_embed.W_pos": embeddings.position_embeddings.weight, - "embed.token_type_embed.W_token_type": embeddings.token_type_embeddings.weight, - "embed.ln.w": embeddings.LayerNorm.weight, - "embed.ln.b": embeddings.LayerNorm.bias, - } - - for l in range(cfg.n_layers): - block = bert.bert.encoder.layer[l] if not raw else bert.encoder.layer[l] - state_dict[f"blocks.{l}.attn.W_Q"] = einops.rearrange( - block.attention.self.query.weight, "(i h) m -> i m h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.b_Q"] = einops.rearrange( - block.attention.self.query.bias, "(i h) -> i h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.W_K"] = einops.rearrange( - block.attention.self.key.weight, "(i h) m -> i m h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.b_K"] = einops.rearrange( - block.attention.self.key.bias, "(i h) -> i h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.W_V"] = einops.rearrange( - block.attention.self.value.weight, "(i h) m -> i m h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.b_V"] = einops.rearrange( - block.attention.self.value.bias, "(i h) -> i h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.W_O"] = einops.rearrange( - block.attention.output.dense.weight, - "m (i h) -> i h m", - i=cfg.n_heads, - ) - state_dict[f"blocks.{l}.attn.b_O"] = block.attention.output.dense.bias - state_dict[f"blocks.{l}.ln1.w"] = block.attention.output.LayerNorm.weight - state_dict[f"blocks.{l}.ln1.b"] = block.attention.output.LayerNorm.bias - state_dict[f"blocks.{l}.mlp.W_in"] = einops.rearrange( - block.intermediate.dense.weight, "mlp model -> model mlp" - ) - state_dict[f"blocks.{l}.mlp.b_in"] = block.intermediate.dense.bias - state_dict[f"blocks.{l}.mlp.W_out"] = einops.rearrange( - block.output.dense.weight, "model mlp -> mlp model" - ) - state_dict[f"blocks.{l}.mlp.b_out"] = block.output.dense.bias - state_dict[f"blocks.{l}.ln2.w"] = block.output.LayerNorm.weight - state_dict[f"blocks.{l}.ln2.b"] = block.output.LayerNorm.bias - if not raw: - if sequence_classification: - classification_head = bert.classifier - state_dict["classifier.W"] = classification_head.weight - state_dict["classifier.b"] = classification_head.bias - else: - mlm_head = bert.cls.predictions - state_dict["mlm_head.W"] = mlm_head.transform.dense.weight - state_dict["mlm_head.b"] = mlm_head.transform.dense.bias - state_dict["mlm_head.ln.w"] = mlm_head.transform.LayerNorm.weight - state_dict["mlm_head.ln.b"] = mlm_head.transform.LayerNorm.bias - # "unembed.W_U": mlm_head.decoder.weight.T, - state_dict["unembed.b_U"] = mlm_head.bias + if "electra" not in model_name: + if hasattr(bert, "cls"): + mlm_head = bert.cls.predictions + state_dict["mlm_head.W"] = mlm_head.transform.dense.weight + state_dict["mlm_head.b"] = mlm_head.transform.dense.bias + state_dict["mlm_head.ln.w"] = mlm_head.transform.LayerNorm.weight + state_dict["mlm_head.ln.b"] = mlm_head.transform.LayerNorm.bias + # "unembed.W_U": mlm_head.decoder.weight.T, + state_dict["unembed.b_U"] = mlm_head.bias # Note: BERT uses tied embeddings state_dict["unembed.W_U"] = embeddings.word_embeddings.weight.T @@ -235,18 +181,28 @@ def convert_bert_weights( register_with_transformer_lens( - partial(convert_bert_weights, raw=True), - ["BertModel", "BertForMaskedLM"], + partial(convert_bert_based_weights, model_name='encoder', raw=True), + ["BertModel"], function_type="conversion", ) register_with_transformer_lens( - partial(convert_bert_weights, sequence_classification=True), + partial(convert_bert_based_weights, model_name='bert', raw=False), + ["BERTForPreTraining", "BertForMaskedLM"], + function_type="conversion", +) +register_with_transformer_lens( + partial(convert_bert_based_weights, sequence_classification=True), "BertForSequenceClassification", function_type="conversion", ) +register_with_transformer_lens( + partial(convert_bert_based_weights, model_name="roberta", raw=False), + ["RobertaForMaskedLM", "RobertaForPreTraining"], + function_type="conversion", +) register_with_transformer_lens( partial(convert_bert_based_weights, model_name="roberta", raw=True), - ["RobertaModel", "RobertaForMaskedLM"], + ["RobertaModel"], function_type="conversion", ) register_with_transformer_lens( @@ -256,6 +212,11 @@ def convert_bert_weights( "RobertaForSequenceClassification", function_type="conversion", ) +register_with_transformer_lens( + partial(convert_bert_based_weights, model_name="electra", raw=False), + "ElectraForPreTraining", + function_type="conversion", +) register_with_transformer_lens( partial(convert_bert_based_weights, model_name="electra", raw=True), ["ElectraModel"], diff --git a/src/mechir/modelling/hooked/linear.py b/src/mechir/modelling/hooked/linear.py index f891f67..cabf611 100644 --- a/src/mechir/modelling/hooked/linear.py +++ b/src/mechir/modelling/hooked/linear.py @@ -8,7 +8,11 @@ import torch.nn as nn from jaxtyping import Float from transformer_lens.utilities.addmm import batch_addmm -from .HookedTransformerConfig import HookedTransformerConfig +from transformer_lens.hook_points import HookPoint +from transformer_lens.factories.activation_function_factory import ( + ActivationFunctionFactory, +) +from mechir.modelling.hooked.config import HookedTransformerConfig class ClassificationHead(nn.Module): @@ -39,3 +43,24 @@ def forward( self, x: Float[torch.Tensor, "batch pos d_model"] ) -> Float[torch.Tensor, "batch pos d_model"]: return batch_addmm(self.b, self.W.T, x) + + +class MLPClassificationHead(nn.Module): + """ + Transforms ELECTRA embeddings into logits. The purpose of this module is to predict masked tokens in a sentence. + """ + + def __init__(self, cfg: Union[Dict, HookedTransformerConfig]): + super().__init__() + self.cfg = HookedTransformerConfig.unwrap(cfg) + self.dense = HiddenLinear(cfg) + self.out_proj = ClassificationHead(cfg) + self.activation = ActivationFunctionFactory.pick_activation_function(self.cfg) + + self.hook_pre = HookPoint() # [batch, pos, d_mlp] + self.hook_post = HookPoint() # [batch, pos, d_mlp] + + def forward(self, resid: Float[torch.Tensor, "batch d_model"]) -> torch.Tensor: + pre_act = self.hook_pre(self.dense(resid)) + post_act = self.hook_post(self.activation(pre_act)) + return self.out_proj(post_act) diff --git a/src/mechir/modelling/hooked/loading_from_pretrained.py b/src/mechir/modelling/hooked/loading_from_pretrained.py index d61950a..a58bbac 100644 --- a/src/mechir/modelling/hooked/loading_from_pretrained.py +++ b/src/mechir/modelling/hooked/loading_from_pretrained.py @@ -22,8 +22,8 @@ ) import transformer_lens.utils as utils -from .HookedTransformerConfig import HookedTransformerConfig -from ... import config +from mechir.modelling.hooked.config import HookedTransformerConfig +from mechir import config from transformer_lens.pretrained.weight_conversions import ( convert_bloom_weights, convert_coder_weights, diff --git a/src/mechir/modelling/hooked/states.py b/src/mechir/modelling/hooked/states.py index 32bc228..199ef7a 100644 --- a/src/mechir/modelling/hooked/states.py +++ b/src/mechir/modelling/hooked/states.py @@ -1,6 +1,8 @@ # Description: This file contains the state dictionary for the models in the hooked library. import torch -from .loading_from_pretrained import extend_transformer_lens_registry +from mechir.modelling.hooked.loading_from_pretrained import ( + extend_transformer_lens_registry, +) @extend_transformer_lens_registry("GPTNeoForCausalLM") @@ -85,31 +87,6 @@ def GPTJForCausalLM_state_dict(hf_config): } -@extend_transformer_lens_registry("GPTNeoForCausalLM") -def GPTNeoForCausalLM_state_dict(hf_config): - state = { - "d_model": hf_config.hidden_size, - "d_head": hf_config.hidden_size // hf_config.num_heads, - "n_heads": hf_config.num_heads, - "d_mlp": hf_config.hidden_size * 4, - "n_layers": hf_config.num_layers, - "n_ctx": hf_config.max_position_embeddings, - "eps": hf_config.layer_norm_epsilon, - "d_vocab": hf_config.vocab_size, - "act_fn": hf_config.hidden_act, - "use_attn_scale": False, - "use_local_attn": True, - "scale_attn_by_inverse_layer_idx": False, - "parallel_attn_mlp": True, - "positional_embedding_type": "rotary", - "rotary_adjacent_pairs": False, - "normalization_type": "LN", - } - rotary_pct = hf_config.rotary_pct - state["rotary_dim"] = round(rotary_pct * state["d_head"]) - return state - - @extend_transformer_lens_registry( ["BertModel", "BertForMaskedLM", "ElectraForPreTraining"] ) @@ -128,22 +105,20 @@ def BertModel_state_dict(hf_config): } -@extend_transformer_lens_registry( - ["BertForSequenceClassification", "ElectraForSequenceClassification"] -) +@extend_transformer_lens_registry("BertForSequenceClassification") def BertForSequenceClassification_state_dict(hf_config): return { - "d_model": hf_config.hidden_size, - "d_head": hf_config.hidden_size // hf_config.num_attention_heads, - "n_heads": hf_config.num_attention_heads, - "d_mlp": hf_config.intermediate_size, - "n_layers": hf_config.num_hidden_layers, - "n_ctx": hf_config.max_position_embeddings, - "eps": hf_config.layer_norm_eps, - "d_vocab": hf_config.vocab_size, - "act_fn": "gelu", - "attention_dir": "bidirectional", + **BertModel_state_dict(hf_config), + "num_labels": hf_config.num_labels, + } + + +@extend_transformer_lens_registry("ElectraForSequenceClassification") +def ElectraForSequenceClassification_state_dict(hf_config): + return { + **BertModel_state_dict(hf_config), "num_labels": hf_config.num_labels, + "use_mlp_head": True, } @@ -157,10 +132,10 @@ def DistilBert_state_dict(hf_config): "n_layers": hf_config.n_layers, "n_ctx": hf_config.max_position_embeddings, "eps": 1e-12, - "d_vocab": hf_config.vocab_size, # hacky fix for special pad token + "d_vocab": hf_config.vocab_size, "act_fn": hf_config.activation, "attention_dir": "birectional", - # dropout, initializer_range, pad_token_id, qa_dropout, seq_classif_dropout, sinusoidal_pos_embds, tie_weights + "use_token_type_ids": False, } diff --git a/src/mechir/modelling/patched.py b/src/mechir/modelling/patched.py index f229606..de8e094 100644 --- a/src/mechir/modelling/patched.py +++ b/src/mechir/modelling/patched.py @@ -16,9 +16,9 @@ def __init__(self) -> None: @property def _patch_funcs(self): return { - 'block_all' : self.get_act_patch_block_every, - 'head_all' : self.get_act_patch_attn_head_out_all_pos, - 'head_by_pos' : self.get_act_patch_attn_head_by_pos, + "block_all": self.get_act_patch_block_every, + "head_all": self.get_act_patch_attn_head_out_all_pos, + "head_by_pos": self.get_act_patch_attn_head_by_pos, } def _patch_residual_component( @@ -114,7 +114,9 @@ def _get_act_patch_block_every( patched_outputs = self.run_with_hooks( corrupted_tokens["input_ids"], attention_mask=corrupted_tokens["attention_mask"], - fwd_hooks=[('_model.'+utils.get_act_name(component, layer), hook_fn)], + fwd_hooks=[ + ("_model." + utils.get_act_name(component, layer), hook_fn) + ], ) yield (component_idx, layer, position), patched_outputs @@ -141,7 +143,7 @@ def _get_act_patch_attn_head_out_all_pos( patched_outputs = self.run_with_hooks( corrupted_tokens["input_ids"], attention_mask=corrupted_tokens["attention_mask"], - fwd_hooks=[('_model.'+utils.get_act_name("z", layer), hook_fn)], + fwd_hooks=[("_model." + utils.get_act_name("z", layer), hook_fn)], ) yield (layer, head), patched_outputs @@ -171,7 +173,9 @@ def _get_act_patch_attn_head_by_pos( patched_outputs = self.run_with_hooks( corrupted_tokens["input_ids"], attention_mask=corrupted_tokens["attention_mask"], - fwd_hooks=[('_model.'+utils.get_act_name(component, layer), hook_fn)], + fwd_hooks=[ + ("_model." + utils.get_act_name(component, layer), hook_fn) + ], ) yield (component_idx, i, position), patched_outputs diff --git a/src/mechir/modelling/steering.py b/src/mechir/modelling/steering.py new file mode 100644 index 0000000..35f768f --- /dev/null +++ b/src/mechir/modelling/steering.py @@ -0,0 +1,65 @@ +import torch +from torch import nn +from dataclasses import dataclass +from typing import Optional, List + + +@dataclass +class EmbeddingSteerConfig: + idx: List[int] + mode: Optional[str] = "increase" + scale: Optional[int] = 10 + out_hook_point: Optional[str] = "model.embedding.W" + + +class EmbeddingSteerWrapper(nn.Module): + def __init__(self, steer): + super().__init__() + self.steer = steer + + def forward(self, x): + return self.steer(x) + + +class EmbeddingSteer(HookedRootModule): + def __init__(self, model, config): + super().__init__() + self.config = config + self.layer = model[self.config.out_hook_point] + self.scale = self.config.scale + self.idx = self.config.idx + if self.config.mode == "increase": + self.scaling = self.increase + elif self.config.mode == "decrease": + self.scaling = self.decrease + else: + raise ValueError(f"Invalid mode {self.config.mode}") + + def increase(self, x): + return x * -self.scale + + def decrease(self, x): + return x * self.scale + + def __post_init__(self): + U, S, V = torch.linalg.svd(self.layer, full_matrices=False) + self.U, self.S, self.V = U, S, V + + self.U_0 = U[:, 0].clone().detach() + + def forward(self, x): + self.U_0[x] = self.scaling(self.U_0[x]) + + +class EmbeddingSteeringContext: + def __init__(self, model, steer): + self.embedding = steer.config.out_hook_point + self.original = model[self.embedding] + self.steer = steer + self.model = model + + def __enter__(self): + self.model[self.embedding] = self.steer + + def __exit__(self, exc_type, exc_val, exc_tb): + self.model[self.embedding] = self.original diff --git a/src/mechir/modelling/t5.py b/src/mechir/modelling/t5.py index 47ae557..76ed518 100644 --- a/src/mechir/modelling/t5.py +++ b/src/mechir/modelling/t5.py @@ -7,10 +7,10 @@ from transformer_lens.ActivationCache import ActivationCache from transformer_lens.hook_points import HookedRootModule import transformer_lens.utils as utils -from .patched import PatchedMixin -from .sae import SAEMixin -from ..util import linear_rank_function -from .hooked.loading_from_pretrained import get_official_model_name +from mechir.modelling.patched import PatchedMixin +from mechir.modelling.sae import SAEMixin +from mechir.modelling.hooked.loading_from_pretrained import get_official_model_name +from mechir.util import linear_rank_function logger = logging.getLogger(__name__) @@ -56,7 +56,9 @@ def forward( input_ids: Float[torch.Tensor, "batch seq"], attention_mask: Float[torch.Tensor, "batch seq"], ): - model_output = self._model(input=input_ids, one_hot_attention_mask=attention_mask, return_type="logits") + model_output = self._model( + input=input_ids, one_hot_attention_mask=attention_mask, return_type="logits" + ) model_output = ( model_output[:, 0, (self.pos_token, self.neg_token)].softmax(dim=-1)[:, 0] if self.softmax_output @@ -166,7 +168,7 @@ def score(self, sequences: dict, cache=False): return logits, cache logits = self.forward(sequences["input_ids"], sequences["attention_mask"]) - return logits + return logits, None def patch( self, diff --git a/src/mechir/perturb/__init__.py b/src/mechir/perturb/__init__.py index 7987896..47a390a 100644 --- a/src/mechir/perturb/__init__.py +++ b/src/mechir/perturb/__init__.py @@ -3,6 +3,7 @@ from abc import ABC, abstractmethod from typing import TYPE_CHECKING from ..util import is_ir_axioms_availible +from functools import wraps from transformers.utils import _LazyModule, OptionalDependencyNotAvailable @@ -28,17 +29,23 @@ def apply(self, document: str, query: str = None) -> str: return document -def perturbation(f): - """ - An alternative decorator for subclassing AbstractPerturbation. - """ - argcount = f.__code__.co_argcount +def perturbation(f=None, *, perturb_type: str = "append"): + def decorator(func): + argcount = func.__code__.co_argcount + + class CustomPerturbation(AbstractPerturbation): + def __init__(self): + self.perturb_type = perturb_type - class CustomPerturbation(AbstractPerturbation): - def apply(self, document: str, query: str = None) -> str: - return f(document, query) if argcount > 1 else f(document) + def apply(self, document: str, query: str = None) -> str: + return func(document, query) if argcount > 1 else func(document) - return CustomPerturbation() + return CustomPerturbation() + + if f is None: + return decorator # used as @perturbation(...) + else: + return decorator(f) # used as @perturbation # Explicitly define what should be importable from this module diff --git a/src/mechir/perturb/axiom/frequency.py b/src/mechir/perturb/axiom/frequency.py index 9338576..eb362bb 100644 --- a/src/mechir/perturb/axiom/frequency.py +++ b/src/mechir/perturb/axiom/frequency.py @@ -22,6 +22,8 @@ class FrequencyPerturbation(IndexPerturbation): exact_match: Forces returned terms to be present in both texts """ + perturb_type = "append" + def __init__( self, index_location: Any | Path | str, @@ -57,6 +59,13 @@ def __init__( self.num_additions = num_additions self.loc = loc + if self.loc == "end": + self.perturb_type = "append" + elif self.loc == "start": + self.perturb_type = "prepend" + else: + raise ValueError("loc must be either 'start' or 'end'") + def _get_random_terms(self, text: str, terms: list) -> list: return random.choices( list(self.get_freq_text(text, terms).keys()), k=self.num_additions diff --git a/src/mechir/perturb/axiom/proximity.py b/src/mechir/perturb/axiom/proximity.py index d584108..1a93067 100644 --- a/src/mechir/perturb/axiom/proximity.py +++ b/src/mechir/perturb/axiom/proximity.py @@ -57,6 +57,13 @@ def __init__( self.num_additions = num_additions self.loc = loc + if self.loc == "end": + self.perturb_type = "append" + elif self.loc == "start": + self.perturb_type = "prepend" + else: + raise ValueError("loc must be either 'start' or 'end'") + def _get_random_terms(self, text: str, terms: list) -> list: return random.choices( list(self.get_freq_text(text, terms).keys()), k=self.num_additions diff --git a/test/acceptance/test_cat.py b/test/acceptance/test_cat.py new file mode 100644 index 0000000..6f4861c --- /dev/null +++ b/test/acceptance/test_cat.py @@ -0,0 +1,57 @@ +import pytest +import torch +from mechir import Cat +from transformers import AutoTokenizer + + +@pytest.fixture(scope="module") +def cat_model(): + tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased") + return Cat( + model_name_or_path="bert-base-uncased", + tokenizer=tokenizer, + softmax_output=True, + return_cache=True, + ) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained("bert-base-uncased") + + +@pytest.mark.parametrize("text,label", [("Good day", 0), ("Bad day", 1)]) +def test_forward_and_softmax(cat_model, tokenizer, text, label): + enc = tokenizer(text, return_tensors="pt", padding=True) + logits = cat_model.forward(enc.input_ids, enc.attention_mask) + # softmax_output=False returns raw logits + assert logits.dim() == 1 + + +@pytest.mark.parametrize("patch_type", ["block_all", "head_all", "head_by_pos"]) +def test_patch_methods_shapes(cat_model, tokenizer, patch_type): + text = "Patch test" + enc = tokenizer(text, return_tensors="pt", padding=True) + seqs = {"input_ids": enc.input_ids, "attention_mask": enc.attention_mask} + # Prepare positive and perturbed + seqs_p = seqs + scores, cache = cat_model.score(seqs, cache=True) + # call patch + if patch_type == "head_by_pos": + layer_head_list = [(0, 0)] + out, _ = cat_model.patch( + seqs, seqs_p, patch_type=patch_type, layer_head_list=layer_head_list + ) + assert out.shape[0] == 2 # components + else: + out, _ = cat_model.patch(seqs, seqs_p, patch_type=patch_type) + assert out.ndim >= 2 + + +def test_score_without_cache(cat_model, tokenizer): + text = "Simple test" + enc = tokenizer(text, return_tensors="pt", padding=True) + logits, cache = cat_model.score( + {"input_ids": enc.input_ids, "attention_mask": enc.attention_mask}, cache=False + ) + assert cache is None diff --git a/test/acceptance/test_dot.py b/test/acceptance/test_dot.py new file mode 100644 index 0000000..dcf93bb --- /dev/null +++ b/test/acceptance/test_dot.py @@ -0,0 +1,57 @@ +import pytest +import torch +from mechir import Dot +from transformers import AutoTokenizer +from transformer_lens.ActivationCache import ActivationCache + + +@pytest.fixture(scope="module") +def dot_model(): + tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased") + return Dot( + model_name_or_path="bert-base-uncased", pooling_type="mean", return_cache=True + ) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained("bert-base-uncased") + + +@pytest.mark.parametrize("pooling", ["cls", "mean"]) +def test_forward_pooling(pooling): + model = Dot("bert-base-uncased", pooling_type=pooling, return_cache=False) + tok = AutoTokenizer.from_pretrained("bert-base-uncased") + enc = tok("Forward test", return_tensors="pt", padding=True) + out = model.forward(enc.input_ids, enc.attention_mask) + assert out.dim() == 2 # (batch, d_model) + + +@pytest.mark.parametrize("cache_flag", [False, True]) +def test_score_cache_flags(dot_model, tokenizer, cache_flag): + texts = ["A quick test"] + enc = tokenizer(texts, return_tensors="pt", padding=True) + q = {"input_ids": enc.input_ids, "attention_mask": enc.attention_mask} + d = q + scores, reps_q, reps_d, cache = dot_model.score(q, d, cache=cache_flag) + assert isinstance(scores, torch.Tensor) + if cache_flag: + assert isinstance(cache, ActivationCache) + else: + assert cache is None + + +@pytest.mark.parametrize("patch_type", ["block_all", "head_all", "head_by_pos"]) +def test_patch_methods_shapes(dot_model, tokenizer, patch_type): + tok = tokenizer(["Patch"], return_tensors="pt", padding=True) + q = {"input_ids": tok.input_ids, "attention_mask": tok.attention_mask} + d = q + d_p = q + if patch_type == "head_by_pos": + out, _ = dot_model.patch( + q, d, d_p, patch_type=patch_type, layer_head_list=[(0, 0)] + ) + assert out.shape[0] == 2 + else: + out, _ = dot_model.patch(q, d, d_p, patch_type=patch_type) + assert out.dim() >= 2 diff --git a/test/acceptance/test_hookeddistilbert.py b/test/acceptance/test_hookeddistilbert.py new file mode 100644 index 0000000..3b867fa --- /dev/null +++ b/test/acceptance/test_hookeddistilbert.py @@ -0,0 +1,100 @@ +from typing import List + +import pytest +import torch +import torch.nn.functional as F +from jaxtyping import Float +from torch.testing import assert_close +from transformers import AutoTokenizer, AutoModel, DistilBertModel + +from mechir.modelling.architectures import HookedDistilBert + +MODEL_NAME = "distilbert-base-uncased-finetuned-sst-2-english" + +def get_embeddings(model): + try: + return model.distilbert.embeddings + except AttributeError: + return model.embeddings + +@pytest.fixture(scope="module") +def our_distilbert(): + return HookedDistilBert.from_pretrained(MODEL_NAME, device="cpu", hf_model=DistilBertModel.from_pretrained(MODEL_NAME)) + + +@pytest.fixture(scope="module") +def huggingface_distilbert(): + return DistilBertModel.from_pretrained(MODEL_NAME) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained(MODEL_NAME) + + +@pytest.fixture +def tokens(tokenizer): + return tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] + + +def test_full_model(our_distilbert, huggingface_distilbert, tokenizer): + sequences = [ + "Hello, my [MASK] is distilbert.", + "I went to the [MASK] to buy some groceries.", + ] + tokenized = tokenizer(sequences, return_tensors="pt", padding=True) + input_ids = tokenized["input_ids"] + attention_mask = tokenized["attention_mask"] + + huggingface_distilbert_logits = huggingface_distilbert( + input_ids, attention_mask=attention_mask, output_hidden_states=True + ).hidden_states[-1] + our_distilbert_logits = our_distilbert(input_ids, attention_mask=attention_mask) + assert_close(huggingface_distilbert_logits, our_distilbert_logits, rtol=1.3e-6, atol=4e-5) + + +def test_embed_one_prediction(our_distilbert, huggingface_distilbert, tokens): + huggingface_embed = get_embeddings(huggingface_distilbert) + our_embed = our_distilbert.embed + + huggingface_embed_out = huggingface_embed(tokens)[0] + our_embed_out = our_embed(tokens).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_embed_two_predictions(our_distilbert, huggingface_distilbert, tokenizer): + encoding = tokenizer( + "Hello, my [MASK] is distilbert.", + "I went to the [MASK] to buy some groceries.", + return_tensors="pt", + ) + input_ids = encoding["input_ids"] + + huggingface_embed_out = get_embeddings(huggingface_distilbert)( + input_ids + )[0] + our_embed_out = our_distilbert.embed(input_ids).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_distilbert_block(our_distilbert, huggingface_distilbert, tokens): + huggingface_embed = get_embeddings(huggingface_distilbert) + huggingface_block = huggingface_distilbert.transformer.layer[0] + + embed_out = huggingface_embed(tokens) + + our_block = our_distilbert.blocks[0] + + our_block_out = our_block(embed_out) + huggingface_block_out = huggingface_block(embed_out)[0] + assert_close(our_block_out, huggingface_block_out) + + +def test_run_with_cache(our_distilbert, tokens): + _, cache = our_distilbert.run_with_cache(tokens) + + # check that an arbitrary subset of the keys exist + assert "embed.hook_embed" in cache + assert "blocks.0.attn.hook_q" in cache + assert "blocks.3.attn.hook_attn_scores" in cache + assert "blocks.5.hook_resid_post" in cache diff --git a/test/acceptance/test_hookedelectra.py b/test/acceptance/test_hookedelectra.py new file mode 100644 index 0000000..94d2e75 --- /dev/null +++ b/test/acceptance/test_hookedelectra.py @@ -0,0 +1,115 @@ +from typing import List + +import pytest +import torch +import torch.nn.functional as F +from jaxtyping import Float +from torch.testing import assert_close +from transformers import AutoTokenizer, AutoModel, ElectraModel + +from mechir.modelling.architectures import HookedEncoder + +MODEL_NAME = "google/electra-base-discriminator" + +def get_embeddings(model): + try: + return model.electra.embeddings + except AttributeError: + return model.embeddings + +@pytest.fixture(scope="module") +def our_electra(): + return HookedEncoder.from_pretrained(MODEL_NAME, device="cpu", hf_model=ElectraModel.from_pretrained(MODEL_NAME)) + + +@pytest.fixture(scope="module") +def huggingface_electra(): + return ElectraModel.from_pretrained(MODEL_NAME) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained(MODEL_NAME) + + +@pytest.fixture +def tokens(tokenizer): + return tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] + + +def test_full_model(our_electra, huggingface_electra, tokenizer): + sequences = [ + "Hello, my [MASK] is electra.", + "I went to the [MASK] to buy some groceries.", + ] + tokenized = tokenizer(sequences, return_tensors="pt", padding=True) + input_ids = tokenized["input_ids"] + attention_mask = tokenized["attention_mask"] + + huggingface_electra_logits = huggingface_electra( + input_ids, attention_mask=attention_mask, output_hidden_states=True + ).hidden_states[-1] + our_electra_logits = our_electra(input_ids, attention_mask=attention_mask) + assert_close(huggingface_electra_logits, our_electra_logits, rtol=1.3e-6, atol=4e-5) + + +def test_embed_one_prediction(our_electra, huggingface_electra, tokens): + huggingface_embed = get_embeddings(huggingface_electra) + our_embed = our_electra.embed + + huggingface_embed_out = huggingface_embed(tokens)[0] + our_embed_out = our_embed(tokens).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_embed_two_predictions(our_electra, huggingface_electra, tokenizer): + encoding = tokenizer( + "Hello, my [MASK] is electra.", + "I went to the [MASK] to buy some groceries.", + return_tensors="pt", + ) + input_ids = encoding["input_ids"] + token_type_ids = encoding["token_type_ids"] + + huggingface_embed_out = get_embeddings(huggingface_electra)( + input_ids, token_type_ids=token_type_ids + )[0] + our_embed_out = our_electra.embed(input_ids, token_type_ids=token_type_ids).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_attention(our_electra, huggingface_electra, tokens): + huggingface_embed = get_embeddings(huggingface_electra) + huggingface_attn = huggingface_electra.encoder.layer[0].attention + + embed_out = huggingface_embed(tokens) + + our_attn = our_electra.blocks[0].attn + + our_attn_out = our_attn(embed_out, embed_out, embed_out) + huggingface_self_attn_out = huggingface_attn.self(embed_out)[0] + huggingface_attn_out = huggingface_attn.output.dense(huggingface_self_attn_out) + assert_close(our_attn_out, huggingface_attn_out) + + +def test_electra_block(our_electra, huggingface_electra, tokens): + huggingface_embed = get_embeddings(huggingface_electra) + huggingface_block = huggingface_electra.encoder.layer[0] + + embed_out = huggingface_embed(tokens) + + our_block = our_electra.blocks[0] + + our_block_out = our_block(embed_out) + huggingface_block_out = huggingface_block(embed_out)[0] + assert_close(our_block_out, huggingface_block_out) + + +def test_run_with_cache(our_electra, tokens): + _, cache = our_electra.run_with_cache(tokens) + + # check that an arbitrary subset of the keys exist + assert "embed.hook_embed" in cache + assert "blocks.0.attn.hook_q" in cache + assert "blocks.3.attn.hook_attn_scores" in cache + assert "blocks.7.hook_resid_post" in cache diff --git a/test/acceptance/test_hookedencoder.py b/test/acceptance/test_hookedencoder.py new file mode 100644 index 0000000..4b1b90b --- /dev/null +++ b/test/acceptance/test_hookedencoder.py @@ -0,0 +1,149 @@ +from typing import List + +import pytest +import torch +import torch.nn.functional as F +from jaxtyping import Float +from torch.testing import assert_close +from transformers import AutoTokenizer, AutoModel, BertForPreTraining + +from mechir.modelling.architectures import HookedEncoder + +MODEL_NAME = "bert-base-cased" + +def get_embeddings(model): + try: + return model.bert.embeddings + except AttributeError: + return model.embeddings + +@pytest.fixture(scope="module") +def our_bert(): + return HookedEncoder.from_pretrained(MODEL_NAME, device="cpu", hf_model=BertForPreTraining.from_pretrained(MODEL_NAME)) + + +@pytest.fixture(scope="module") +def huggingface_bert(): + return BertForPreTraining.from_pretrained(MODEL_NAME) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained(MODEL_NAME) + + +@pytest.fixture +def tokens(tokenizer): + return tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] + + +def test_full_model(our_bert, huggingface_bert, tokenizer): + sequences = [ + "Hello, my [MASK] is Bert.", + "I went to the [MASK] to buy some groceries.", + ] + tokenized = tokenizer(sequences, return_tensors="pt", padding=True) + input_ids = tokenized["input_ids"] + attention_mask = tokenized["attention_mask"] + + huggingface_bert_logits = huggingface_bert( + input_ids, attention_mask=attention_mask, output_hidden_states=True + ).hidden_states[-1] + our_bert_logits = our_bert(input_ids, attention_mask=attention_mask) + assert_close(huggingface_bert_logits, our_bert_logits, rtol=1.3e-6, atol=4e-5) + + +def test_embed_one_prediction(our_bert, huggingface_bert, tokens): + huggingface_embed = get_embeddings(huggingface_bert) + our_embed = our_bert.embed + + huggingface_embed_out = huggingface_embed(tokens)[0] + our_embed_out = our_embed(tokens).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_embed_two_predictions(our_bert, huggingface_bert, tokenizer): + encoding = tokenizer( + "Hello, my [MASK] is Bert.", + "I went to the [MASK] to buy some groceries.", + return_tensors="pt", + ) + input_ids = encoding["input_ids"] + token_type_ids = encoding["token_type_ids"] + + huggingface_embed_out = get_embeddings(huggingface_bert)( + input_ids, token_type_ids=token_type_ids + )[0] + our_embed_out = our_bert.embed(input_ids, token_type_ids=token_type_ids).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_attention(our_bert, huggingface_bert, tokens): + huggingface_embed = get_embeddings(huggingface_bert) + huggingface_attn = huggingface_bert.bert.encoder.layer[0].attention + + embed_out = huggingface_embed(tokens) + + our_attn = our_bert.blocks[0].attn + + our_attn_out = our_attn(embed_out, embed_out, embed_out) + huggingface_self_attn_out = huggingface_attn.self(embed_out)[0] + huggingface_attn_out = huggingface_attn.output.dense(huggingface_self_attn_out) + assert_close(our_attn_out, huggingface_attn_out) + + +def test_bert_block(our_bert, huggingface_bert, tokens): + huggingface_embed = get_embeddings(huggingface_bert) + huggingface_block = huggingface_bert.bert.encoder.layer[0] + + embed_out = huggingface_embed(tokens) + + our_block = our_bert.blocks[0] + + our_block_out = our_block(embed_out) + huggingface_block_out = huggingface_block(embed_out)[0] + assert_close(our_block_out, huggingface_block_out) + + +def test_run_with_cache(our_bert, tokens): + _, cache = our_bert.run_with_cache(tokens) + + # check that an arbitrary subset of the keys exist + assert "embed.hook_embed" in cache + assert "blocks.0.attn.hook_q" in cache + assert "blocks.3.attn.hook_attn_scores" in cache + assert "blocks.7.hook_resid_post" in cache + + +def test_from_pretrained_revision(): + """ + Check that the from_pretrained parameter `revision` (= git version) works + """ + + _ = HookedEncoder.from_pretrained(MODEL_NAME, revision="main") + + try: + _ = HookedEncoder.from_pretrained(MODEL_NAME, revision="inexistent_branch_name") + except: + pass + else: + raise AssertionError("Should have raised an error") + + +@pytest.mark.skipif( + torch.backends.mps.is_available() or not torch.cuda.is_available(), + reason="bfloat16 unsupported by MPS: https://github.com/pytorch/pytorch/issues/78168 or no GPU", +) +@pytest.mark.parametrize("dtype", [torch.bfloat16, torch.float16]) +def test_half_precision(dtype): + """Check the 16 bits loading and inferences.""" + model = HookedEncoder.from_pretrained(MODEL_NAME, torch_dtype=dtype, hf_model=BertForPreTraining.from_pretrained(MODEL_NAME)) + assert model.W_K.dtype == dtype + + _ = model(model.tokenizer("Hello, world", return_tensors="pt")["input_ids"]) + + +@pytest.mark.skipif(not torch.cuda.is_available(), reason="Requires a CUDA device") +def test_cuda(mlm_tokens): + model = HookedEncoder.from_pretrained(MODEL_NAME) + model(mlm_tokens)