From bb63dab4b0dae74aef604425a0f43d8c85e3f0df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Sat, 22 Feb 2025 15:34:13 +0000 Subject: [PATCH 01/21] update hooked encoder --- src/mechir/modelling/hooked/HookedEncoder.py | 269 ++++++++++++------- 1 file changed, 168 insertions(+), 101 deletions(-) diff --git a/src/mechir/modelling/hooked/HookedEncoder.py b/src/mechir/modelling/hooked/HookedEncoder.py index 38619cf..e12f691 100644 --- a/src/mechir/modelling/hooked/HookedEncoder.py +++ b/src/mechir/modelling/hooked/HookedEncoder.py @@ -19,24 +19,23 @@ from . import loading_from_pretrained as loading from transformer_lens.ActivationCache import ActivationCache -from transformer_lens.components import BertBlock, BertEmbed, BertMLMHead, Unembed +from transformer_lens.components import BertBlock, BertEmbed, BertMLMHead, Unembed, BertNSPHead, BertPooler from transformer_lens.FactoredMatrix import FactoredMatrix from transformer_lens.hook_points import HookedRootModule, HookPoint -from .HookedTransformerConfig import HookedTransformerConfig from transformer_lens.utilities import devices +from .HookedTransformerConfig import HookedTransformerConfig + class HookedEncoder(HookedRootModule): """ This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. Limitations: - - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. - - Also note that model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. + - The model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model - - The model only accepts tokens as inputs, and not strings, or lists of strings """ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): @@ -49,35 +48,31 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): ) self.cfg = cfg - assert ( - self.cfg.n_devices == 1 - ), "Multiple devices not supported for HookedEncoder" + assert self.cfg.n_devices == 1, "Multiple devices not supported for HookedEncoder" if tokenizer is not None: self.tokenizer = tokenizer elif self.cfg.tokenizer_name is not None: - huggingface_token = os.environ.get("HF_TOKEN", None) + huggingface_token = os.environ.get("HF_TOKEN", "") self.tokenizer = AutoTokenizer.from_pretrained( self.cfg.tokenizer_name, - token=huggingface_token, + token=huggingface_token if len(huggingface_token) > 0 else None, ) else: self.tokenizer = None if self.cfg.d_vocab == -1: # If we have a tokenizer, vocab size can be inferred from it. - assert ( - self.tokenizer is not None - ), "Must provide a tokenizer if d_vocab is not provided" + assert self.tokenizer is not None, "Must provide a tokenizer if d_vocab is not provided" self.cfg.d_vocab = max(self.tokenizer.vocab.values()) + 1 if self.cfg.d_vocab_out == -1: self.cfg.d_vocab_out = self.cfg.d_vocab self.embed = BertEmbed(self.cfg) - self.blocks = nn.ModuleList( - [BertBlock(self.cfg) for _ in range(self.cfg.n_layers)] - ) - self.mlm_head = BertMLMHead(cfg) + self.blocks = nn.ModuleList([BertBlock(self.cfg) for _ in range(self.cfg.n_layers)]) + self.mlm_head = BertMLMHead(self.cfg) self.unembed = Unembed(self.cfg) + self.nsp_head = BertNSPHead(self.cfg) + self.pooler = BertPooler(self.cfg) self.hook_full_embed = HookPoint() @@ -86,31 +81,112 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): self.setup() - @overload - def forward( + def to_tokens( self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal["logits"], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch pos d_vocab"]: ... + input: Union[str, List[str]], + move_to_device: bool = True, + truncate: bool = True, + ) -> Tuple[ + Int[torch.Tensor, "batch pos"], + Int[torch.Tensor, "batch pos"], + Int[torch.Tensor, "batch pos"], + ]: + """Converts a string to a tensor of tokens. + Taken mostly from the HookedTransformer implementation, but does not support default padding + sides or prepend_bos. + Args: + input (Union[str, List[str]]): The input to tokenize. + move_to_device (bool): Whether to move the output tensor of tokens to the device the model lives on. Defaults to True + truncate (bool): If the output tokens are too long, whether to truncate the output + tokens to the model's max context window. Does nothing for shorter inputs. Defaults to + True. + """ - @overload - def forward( + assert self.tokenizer is not None, "Cannot use to_tokens without a tokenizer" + + encodings = self.tokenizer( + input, + return_tensors="pt", + padding=True, + truncation=truncate, + max_length=self.cfg.n_ctx if truncate else None, + ) + + tokens = encodings.input_ids + + if move_to_device: + tokens = tokens.to(self.cfg.device) + token_type_ids = encodings.token_type_ids.to(self.cfg.device) + attention_mask = encodings.attention_mask.to(self.cfg.device) + + return tokens, token_type_ids, attention_mask + + def encoder_output( self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], + tokens: Int[torch.Tensor, "batch pos"], token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: ... + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, + one_zero_attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + ) -> Float[torch.Tensor, "batch pos d_vocab"]: + """Processes input through the encoder layers and returns the resulting residual stream. + + Args: + input: Input tokens as integers with shape (batch, position) + token_type_ids: Optional binary ids indicating segment membership. + Shape (batch_size, sequence_length). For example, with input + "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be + [0, 0, ..., 0, 1, ..., 1, 1] where 0 marks tokens from sentence A + and 1 marks tokens from sentence B. + one_zero_attention_mask: Optional binary mask of shape (batch_size, sequence_length) + where 1 indicates tokens to attend to and 0 indicates tokens to ignore. + Used primarily for handling padding in batched inputs. + + Returns: + resid: Final residual stream tensor of shape (batch, position, d_model) + + Raises: + AssertionError: If using string input without a tokenizer + """ + + if tokens.device.type != self.cfg.device: + tokens = tokens.to(self.cfg.device) + if one_zero_attention_mask is not None: + one_zero_attention_mask = one_zero_attention_mask.to(self.cfg.device) + + resid = self.hook_full_embed(self.embed(tokens, token_type_ids)) + + large_negative_number = -torch.inf + mask = ( + repeat(1 - one_zero_attention_mask, "batch pos -> batch 1 1 pos") + if one_zero_attention_mask is not None + else None + ) + additive_attention_mask = ( + torch.where(mask == 1, large_negative_number, 0) if mask is not None else None + ) + + if start_at_layer is None: + start_at_layer = 0 + + idx_and_block = list(zip(range(self.cfg.n_layers), self.blocks)) + for _, block in idx_and_block[start_at_layer:stop_at_layer]: + resid = block(resid, additive_attention_mask) + + return resid + + @overload def forward( self, input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "logits", + return_type: Optional[str], token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, + tokens: Optional[Int[torch.Tensor, "batch pos"]] = None, + ) -> Union[Float[torch.Tensor, "batch pos d_vocab"], None]: """Input must be a batch of tokens. Strings and lists of strings are not yet supported. return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). @@ -119,51 +195,72 @@ def forward( attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. """ + + if start_at_layer is None: + if isinstance(input, str) or isinstance(input, list): + assert self.tokenizer is not None, "Must provide a tokenizer if input is a string" + residual, token_type_ids_from_tokenizer, attention_mask = self.to_tokens(input) + + # If token_type_ids or attention mask are not provided, use the ones from the tokenizer + token_type_ids = ( + token_type_ids_from_tokenizer if token_type_ids is None else token_type_ids + ) + one_zero_attention_mask = ( + attention_mask if one_zero_attention_mask is None else one_zero_attention_mask + ) + else: + assert type(input) is torch.Tensor + residual = input - tokens = input - - if tokens.device.type != self.cfg.device: - tokens = tokens.to(self.cfg.device) + if residual.device.type != self.cfg.device: + residual = residual.to(self.cfg.device) if attention_mask is not None: attention_mask = attention_mask.to(self.cfg.device) - - resid = self.hook_full_embed(self.embed(tokens, token_type_ids)) - - large_negative_number = -torch.inf - mask = ( - repeat(1 - attention_mask, "batch pos -> batch 1 1 pos") - if attention_mask is not None - else None - ) - additive_attention_mask = ( - torch.where(mask == 1, large_negative_number, 0) - if mask is not None - else None + if start_at_layer is None: + start_at_layer = 0 + + resid = self.encoder_output( + residual, + token_type_ids=token_type_ids, + start_at_layer=start_at_layer, + stop_at_layer=stop_at_layer, + one_zero_attention_mask=one_zero_attention_mask, ) - for block in self.blocks: - resid = block(resid, additive_attention_mask) - - if return_type == "embeddings": + if stop_at_layer is not None or return_type == 'embeddings': return resid resid = self.mlm_head(resid) + logits = self.unembed(resid) + + if return_type == "predictions": + # Get predictions for masked tokens + logprobs = logits[tokens == self.tokenizer.mask_token_id].log_softmax(dim=-1) + predictions = self.tokenizer.decode(logprobs.argmax(dim=-1)) + + # If input was a list of strings, split predictions into a list + if " " in predictions: + # Split along space + predictions = predictions.split(" ") + predictions = [f"Prediction {i}: {p}" for i, p in enumerate(predictions)] + return predictions - if return_type is None: + elif return_type is None: return None - logits = self.unembed(resid) return logits @overload def run_with_cache( self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], ActivationCache]: ... + ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], ActivationCache,]: + ... @overload def run_with_cache( self, *model_args, return_cache_object: Literal[False], **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], Dict[str, torch.Tensor]]: ... + ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], Dict[str, torch.Tensor],]: + ... def run_with_cache( self, @@ -182,9 +279,7 @@ def run_with_cache( *model_args, remove_batch_dim=remove_batch_dim, **kwargs ) if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) + cache = ActivationCache(cache_dict, self, has_batch_dim=not remove_batch_dim) return out, cache else: return out, cache_dict @@ -307,86 +402,62 @@ def W_E_pos(self) -> Float[torch.Tensor, "d_vocab+n_ctx d_model"]: @property def W_K(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: """Stacks the key weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_K for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).attn.W_K for block in self.blocks], dim=0) @property def W_Q(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: """Stacks the query weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_Q for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).attn.W_Q for block in self.blocks], dim=0) @property def W_V(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: """Stacks the value weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_V for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).attn.W_V for block in self.blocks], dim=0) @property def W_O(self) -> Float[torch.Tensor, "n_layers n_heads d_head d_model"]: """Stacks the attn output weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_O for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).attn.W_O for block in self.blocks], dim=0) @property def W_in(self) -> Float[torch.Tensor, "n_layers d_model d_mlp"]: """Stacks the MLP input weights across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.W_in for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).mlp.W_in for block in self.blocks], dim=0) @property def W_out(self) -> Float[torch.Tensor, "n_layers d_mlp d_model"]: """Stacks the MLP output weights across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.W_out for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).mlp.W_out for block in self.blocks], dim=0) @property def b_K(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: """Stacks the key biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_K for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).attn.b_K for block in self.blocks], dim=0) @property def b_Q(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: """Stacks the query biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_Q for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).attn.b_Q for block in self.blocks], dim=0) @property def b_V(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: """Stacks the value biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_V for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).attn.b_V for block in self.blocks], dim=0) @property def b_O(self) -> Float[torch.Tensor, "n_layers d_model"]: """Stacks the attn output biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_O for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).attn.b_O for block in self.blocks], dim=0) @property def b_in(self) -> Float[torch.Tensor, "n_layers d_mlp"]: """Stacks the MLP input biases across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.b_in for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).mlp.b_in for block in self.blocks], dim=0) @property def b_out(self) -> Float[torch.Tensor, "n_layers d_model"]: """Stacks the MLP output biases across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.b_out for block in self.blocks], dim=0 - ) + return torch.stack([cast(BertBlock, block).mlp.b_out for block in self.blocks], dim=0) @property def QK(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] @@ -401,8 +472,4 @@ def OV(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] def all_head_labels(self) -> List[str]: """Returns a list of strings with the format "L{l}H{h}", where l is the layer index and h is the head index.""" - return [ - f"L{l}H{h}" - for l in range(self.cfg.n_layers) - for h in range(self.cfg.n_heads) - ] + return [f"L{l}H{h}" for l in range(self.cfg.n_layers) for h in range(self.cfg.n_heads)] From 28bc8f61c04402878a3b8c7d4864fc3059001e20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Sat, 22 Feb 2025 15:41:16 +0000 Subject: [PATCH 02/21] update the sequence classification components --- src/mechir/modelling/hooked/HookedEncoder.py | 4 +- .../HookedEncoderForSequenceClassification.py | 121 +----------------- 2 files changed, 7 insertions(+), 118 deletions(-) diff --git a/src/mechir/modelling/hooked/HookedEncoder.py b/src/mechir/modelling/hooked/HookedEncoder.py index e12f691..698d3a2 100644 --- a/src/mechir/modelling/hooked/HookedEncoder.py +++ b/src/mechir/modelling/hooked/HookedEncoder.py @@ -176,16 +176,14 @@ def encoder_output( return resid - @overload def forward( self, input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str], + return_type: Optional[str] = 'embeddings', token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, start_at_layer: Optional[int] = None, stop_at_layer: Optional[int] = None, - tokens: Optional[Int[torch.Tensor, "batch pos"]] = None, ) -> Union[Float[torch.Tensor, "batch pos d_vocab"], None]: """Input must be a batch of tokens. Strings and lists of strings are not yet supported. diff --git a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py index 181c504..03de1af 100644 --- a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py +++ b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py @@ -38,30 +38,14 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): self.classifier = ClassificationHead(cfg) self.setup() - @overload def forward( self, input: Int[torch.Tensor, "batch pos"], - return_type: Literal["logits"], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch pos d_vocab"]: ... - - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: ... - - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "logits", + return_type: Optional[str] = 'embeddings', token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: """Input must be a batch of tokens. Strings and lists of strings are not yet supported. @@ -75,108 +59,15 @@ def forward( hidden = super().forward( input, token_type_ids=token_type_ids, + start_at_layer=start_at_layer, + stop_at_layer=stop_at_layer, return_type="embeddings", attention_mask=attention_mask, ) - if return_type == "embeddings": + if return_type == "embeddings" or stop_at_layer is not None: return hidden logits = self.classifier(hidden[:, 0, :]) if return_type is None: return None return logits - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch n_labels"], ActivationCache]: ... - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[False], **kwargs - ) -> Tuple[Float[torch.Tensor, "batch n_labels"], Dict[str, torch.Tensor]]: ... - - def run_with_cache( - self, - *model_args, - return_cache_object: bool = True, - remove_batch_dim: bool = False, - **kwargs, - ) -> Tuple[ - Float[torch.Tensor, "batch n_labels"], - Union[ActivationCache, Dict[str, torch.Tensor]], - ]: - """ - Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. - """ - out, cache_dict = super().run_with_cache( - *model_args, remove_batch_dim=remove_batch_dim, **kwargs - ) - if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) - return out, cache - else: - return out, cache_dict - - @classmethod - def from_pretrained( - cls, - model_name: str, - checkpoint_index: Optional[int] = None, - checkpoint_value: Optional[int] = None, - hf_model=None, - device: Optional[str] = None, - tokenizer=None, - move_to_device=True, - dtype=torch.float32, - **from_pretrained_kwargs, - ) -> HookedEncoderForSequenceClassification: - """Loads in the pretrained weights from huggingface. Currently supports loading weight from HuggingFace BertForMaskedLM. Unlike HookedTransformer, this does not yet do any preprocessing on the model.""" - logging.warning( - "Support for BERT in TransformerLens is currently experimental, until such a time when it has feature " - "parity with HookedTransformer and has been tested on real research tasks. Until then, backward " - "compatibility is not guaranteed. Please see the docs for information on the limitations of the current " - "implementation." - "\n" - "If using BERT for interpretability research, keep in mind that BERT has some significant architectural " - "differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning " - "that the last LayerNorm in a block cannot be folded." - ) - - assert not ( - from_pretrained_kwargs.get("load_in_8bit", False) - or from_pretrained_kwargs.get("load_in_4bit", False) - ), "Quantization not supported" - - if "torch_dtype" in from_pretrained_kwargs: - dtype = from_pretrained_kwargs["torch_dtype"] - - official_model_name = loading.get_official_model_name(model_name) - - cfg = loading.get_pretrained_model_config( - official_model_name, - checkpoint_index=checkpoint_index, - checkpoint_value=checkpoint_value, - fold_ln=False, - device=device, - n_devices=1, - dtype=dtype, - **from_pretrained_kwargs, - ) - - state_dict = loading.get_pretrained_state_dict( - official_model_name, cfg, hf_model, dtype=dtype, **from_pretrained_kwargs - ) - - model = cls(cfg, tokenizer, move_to_device=False) - - model.load_state_dict(state_dict, strict=False) - - if move_to_device: - model.to(cfg.device) - - print(f"Loaded pretrained model {model_name} into HookedTransformer") - - return model From 6247609a5b5dc995aed8b72a67f30d125b09481d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Sat, 22 Feb 2025 15:45:05 +0000 Subject: [PATCH 03/21] update electra --- src/mechir/modelling/hooked/HookedElectra.py | 134 ++---------------- .../HookedEncoderForSequenceClassification.py | 5 +- 2 files changed, 11 insertions(+), 128 deletions(-) diff --git a/src/mechir/modelling/hooked/HookedElectra.py b/src/mechir/modelling/hooked/HookedElectra.py index 7e6e18f..979d5f9 100644 --- a/src/mechir/modelling/hooked/HookedElectra.py +++ b/src/mechir/modelling/hooked/HookedElectra.py @@ -7,19 +7,15 @@ from __future__ import annotations import logging -from typing import Dict, Optional, Tuple, Union, overload +from typing import Dict, Optional, Union import torch from jaxtyping import Float, Int from torch import nn -from typing_extensions import Literal from .HookedTransformerConfig import HookedTransformerConfig -from transformer_lens.ActivationCache import ActivationCache from .HookedEncoder import HookedEncoder from transformer_lens.hook_points import HookPoint from .linear import ClassificationHead, HiddenLinear -from . import loading_from_pretrained as loading - class ElectraClassificationHead(nn.Module): """ @@ -44,7 +40,7 @@ def forward(self, resid: Float[torch.Tensor, "batch d_model"]) -> torch.Tensor: class HookedElectraForSequenceClassification(HookedEncoder): """ - This class implements a BERT-style encoder for ELECTRA using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. + This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. Limitations: - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. @@ -54,37 +50,20 @@ class HookedElectraForSequenceClassification(HookedEncoder): - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model - The model only accepts tokens as inputs, and not strings, or lists of strings """ - def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): super().__init__(cfg, tokenizer, move_to_device, **kwargs) self.classifier = ElectraClassificationHead(cfg) self.setup() - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal["logits"], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch n_labels"]: ... - - @overload def forward( self, input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], + return_type: Optional[str] = 'embeddings', token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch n_labels"]]: ... - - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "logits", - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch n_labels"]]: + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, + ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: """Input must be a batch of tokens. Strings and lists of strings are not yet supported. return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). @@ -97,108 +76,15 @@ def forward( hidden = super().forward( input, token_type_ids=token_type_ids, + start_at_layer=start_at_layer, + stop_at_layer=stop_at_layer, return_type="embeddings", attention_mask=attention_mask, ) - if return_type == "embeddings": + if return_type == "embeddings" or stop_at_layer is not None: return hidden logits = self.classifier(hidden[:, 0, :]) if return_type is None: return None - return logits - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], ActivationCache]: ... - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[False], **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], Dict[str, torch.Tensor]]: ... - - def run_with_cache( - self, - *model_args, - return_cache_object: bool = True, - remove_batch_dim: bool = False, - **kwargs, - ) -> Tuple[ - Float[torch.Tensor, "batch pos d_vocab"], - Union[ActivationCache, Dict[str, torch.Tensor]], - ]: - """ - Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. - """ - out, cache_dict = super().run_with_cache( - *model_args, remove_batch_dim=remove_batch_dim, **kwargs - ) - if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) - return out, cache - else: - return out, cache_dict - - @classmethod - def from_pretrained( - cls, - model_name: str, - checkpoint_index: Optional[int] = None, - checkpoint_value: Optional[int] = None, - hf_model=None, - device: Optional[str] = None, - tokenizer=None, - move_to_device=True, - dtype=torch.float32, - **from_pretrained_kwargs, - ) -> HookedElectraForSequenceClassification: - """Loads in the pretrained weights from huggingface. Currently supports loading weight from HuggingFace BertForMaskedLM. Unlike HookedTransformer, this does not yet do any preprocessing on the model.""" - logging.warning( - "Support for BERT in TransformerLens is currently experimental, until such a time when it has feature " - "parity with HookedTransformer and has been tested on real research tasks. Until then, backward " - "compatibility is not guaranteed. Please see the docs for information on the limitations of the current " - "implementation." - "\n" - "If using BERT for interpretability research, keep in mind that BERT has some significant architectural " - "differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning " - "that the last LayerNorm in a block cannot be folded." - ) - - assert not ( - from_pretrained_kwargs.get("load_in_8bit", False) - or from_pretrained_kwargs.get("load_in_4bit", False) - ), "Quantization not supported" - - if "torch_dtype" in from_pretrained_kwargs: - dtype = from_pretrained_kwargs["torch_dtype"] - - official_model_name = loading.get_official_model_name(model_name) - - cfg = loading.get_pretrained_model_config( - official_model_name, - checkpoint_index=checkpoint_index, - checkpoint_value=checkpoint_value, - fold_ln=False, - device=device, - n_devices=1, - dtype=dtype, - **from_pretrained_kwargs, - ) - - state_dict = loading.get_pretrained_state_dict( - official_model_name, cfg, hf_model, dtype=dtype, **from_pretrained_kwargs - ) - - model = cls(cfg, tokenizer, move_to_device=False) - - model.load_state_dict(state_dict, strict=False) - - if move_to_device: - model.to(cfg.device) - - print(f"Loaded pretrained model {model_name} into HookedTransformer") - - return model + return logits \ No newline at end of file diff --git a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py index 03de1af..f385dc8 100644 --- a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py +++ b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py @@ -6,15 +6,12 @@ from __future__ import annotations -import logging -from typing import Dict, Optional, Tuple, Union, overload - +from typing import Optional import torch from jaxtyping import Float, Int from torch import nn from typing_extensions import Literal -from transformer_lens.ActivationCache import ActivationCache from .HookedEncoder import HookedEncoder from .linear import ClassificationHead from . import loading_from_pretrained as loading From 3ad3b4859a32bd3584b671a5cf742311c3b46f45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Sat, 22 Feb 2025 16:26:40 +0000 Subject: [PATCH 04/21] mo updates --- .../modelling/hooked/HookedDistilBert.py | 317 +----------------- src/mechir/modelling/hooked/HookedElectra.py | 5 +- 2 files changed, 2 insertions(+), 320 deletions(-) diff --git a/src/mechir/modelling/hooked/HookedDistilBert.py b/src/mechir/modelling/hooked/HookedDistilBert.py index ef928cf..ba7e71e 100644 --- a/src/mechir/modelling/hooked/HookedDistilBert.py +++ b/src/mechir/modelling/hooked/HookedDistilBert.py @@ -24,9 +24,8 @@ from .hooked_components import DistilBertEmbed -class HookedDistilBert(HookedRootModule): +class HookedDistilBert(HookedEncoder): def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): - super().__init__() if isinstance(cfg, Dict): cfg = HookedTransformerConfig(**cfg) elif isinstance(cfg, str): @@ -68,320 +67,6 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): self.setup() - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal["embeddings"], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Float[torch.Tensor, "batch pos d_vocab"]: ... - - @overload - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Literal[None], - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: ... - - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "embeddings", - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: - """Input must be a batch of tokens. Strings and lists of strings are not yet supported. - - return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). - - token_type_ids Optional[torch.Tensor]: Binary ids indicating whether a token belongs to sequence A or B. For example, for two sentences: "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be [0, 0, ..., 0, 1, ..., 1, 1]. `0` represents tokens from Sentence A, `1` from Sentence B. If not provided, BERT assumes a single sequence input. Typically, shape is (batch_size, sequence_length). - - attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. - """ - - tokens = input - - if tokens.device.type != self.cfg.device: - tokens = tokens.to(self.cfg.device) - if attention_mask is not None: - attention_mask = attention_mask.to(self.cfg.device) - - resid = self.hook_full_embed(self.embed(tokens)) - - large_negative_number = -torch.inf - mask = ( - repeat(1 - attention_mask, "batch pos -> batch 1 1 pos") - if attention_mask is not None - else None - ) - additive_attention_mask = ( - torch.where(mask == 1, large_negative_number, 0) - if mask is not None - else None - ) - - for block in self.blocks: - resid = block(resid, additive_attention_mask) - - if return_type == "embeddings": - return resid - - resid = self.mlm_head(resid) - - if return_type is None: - return - - logits = self.unembed(resid) - return logits - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], ActivationCache]: ... - - @overload - def run_with_cache( - self, *model_args, return_cache_object: Literal[False] = False, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], Dict[str, torch.Tensor]]: ... - - def run_with_cache( - self, - *model_args, - return_cache_object: bool = True, - remove_batch_dim: bool = False, - **kwargs, - ) -> Tuple[ - Float[torch.Tensor, "batch pos d_vocab"], - Union[ActivationCache, Dict[str, torch.Tensor]], - ]: - """ - Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. - """ - out, cache_dict = super().run_with_cache( - *model_args, remove_batch_dim=remove_batch_dim, **kwargs - ) - if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) - return out, cache - else: - return out, cache_dict - - def to( - self, - device_or_dtype: Union[torch.device, str, torch.dtype], - print_details: bool = True, - ): - return devices.move_to_and_update_config(self, device_or_dtype, print_details) - - def cuda(self): - # Wrapper around cuda that also changes self.cfg.device - return self.to("cuda") - - def cpu(self): - # Wrapper around cuda that also changes self.cfg.device - return self.to("cpu") - - def mps(self): - # Wrapper around cuda that also changes self.cfg.device - return self.to("mps") - - @classmethod - def from_pretrained( - cls, - model_name: str, - checkpoint_index: Optional[int] = None, - checkpoint_value: Optional[int] = None, - hf_model=None, - device: Optional[str] = None, - tokenizer=None, - move_to_device=True, - dtype=torch.float32, - **from_pretrained_kwargs, - ) -> HookedDistilBert: - """Loads in the pretrained weights from huggingface. Currently supports loading weight from HuggingFace BertForMaskedLM. Unlike HookedTransformer, this does not yet do any preprocessing on the model.""" - logging.warning( - "Support for BERT in TransformerLens is currently experimental, until such a time when it has feature " - "parity with HookedTransformer and has been tested on real research tasks. Until then, backward " - "compatibility is not guaranteed. Please see the docs for information on the limitations of the current " - "implementation." - "\n" - "If using BERT for interpretability research, keep in mind that BERT has some significant architectural " - "differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning " - "that the last LayerNorm in a block cannot be folded." - ) - - assert not ( - from_pretrained_kwargs.get("load_in_8bit", False) - or from_pretrained_kwargs.get("load_in_4bit", False) - ), "Quantization not supported" - - if "torch_dtype" in from_pretrained_kwargs: - dtype = from_pretrained_kwargs["torch_dtype"] - - official_model_name = loading.get_official_model_name(model_name) - - cfg = loading.get_pretrained_model_config( - official_model_name, - checkpoint_index=checkpoint_index, - checkpoint_value=checkpoint_value, - fold_ln=False, - device=device, - n_devices=1, - dtype=dtype, - **from_pretrained_kwargs, - ) - - state_dict = loading.get_pretrained_state_dict( - official_model_name, cfg, hf_model, dtype=dtype, **from_pretrained_kwargs - ) - - model = cls(cfg, tokenizer, move_to_device=False) - - model.load_state_dict(state_dict, strict=False) - - if move_to_device: - model.to(cfg.device) - - print(f"Loaded pretrained model {model_name} into HookedEncoder") - - return model - - @property - def W_U(self) -> Float[torch.Tensor, "d_model d_vocab"]: - """ - Convenience to get the unembedding matrix (ie the linear map from the final residual stream to the output logits) - """ - return self.unembed.W_U - - @property - def b_U(self) -> Float[torch.Tensor, "d_vocab"]: - return self.unembed.b_U - - @property - def W_E(self) -> Float[torch.Tensor, "d_vocab d_model"]: - """ - Convenience to get the embedding matrix - """ - return self.embed.embed.W_E - - @property - def W_pos(self) -> Float[torch.Tensor, "n_ctx d_model"]: - """ - Convenience function to get the positional embedding. Only works on models with absolute positional embeddings! - """ - return self.embed.pos_embed.W_pos - - @property - def W_E_pos(self) -> Float[torch.Tensor, "d_vocab+n_ctx d_model"]: - """ - Concatenated W_E and W_pos. Used as a full (overcomplete) basis of the input space, useful for full QK and full OV circuits. - """ - return torch.cat([self.W_E, self.W_pos], dim=0) - - @property - def W_K(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: - """Stacks the key weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_K for block in self.blocks], dim=0 - ) - - @property - def W_Q(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: - """Stacks the query weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_Q for block in self.blocks], dim=0 - ) - - @property - def W_V(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: - """Stacks the value weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_V for block in self.blocks], dim=0 - ) - - @property - def W_O(self) -> Float[torch.Tensor, "n_layers n_heads d_head d_model"]: - """Stacks the attn output weights across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.W_O for block in self.blocks], dim=0 - ) - - @property - def W_in(self) -> Float[torch.Tensor, "n_layers d_model d_mlp"]: - """Stacks the MLP input weights across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.W_in for block in self.blocks], dim=0 - ) - - @property - def W_out(self) -> Float[torch.Tensor, "n_layers d_mlp d_model"]: - """Stacks the MLP output weights across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.W_out for block in self.blocks], dim=0 - ) - - @property - def b_K(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: - """Stacks the key biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_K for block in self.blocks], dim=0 - ) - - @property - def b_Q(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: - """Stacks the query biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_Q for block in self.blocks], dim=0 - ) - - @property - def b_V(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: - """Stacks the value biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_V for block in self.blocks], dim=0 - ) - - @property - def b_O(self) -> Float[torch.Tensor, "n_layers d_model"]: - """Stacks the attn output biases across all layers""" - return torch.stack( - [cast(BertBlock, block).attn.b_O for block in self.blocks], dim=0 - ) - - @property - def b_in(self) -> Float[torch.Tensor, "n_layers d_mlp"]: - """Stacks the MLP input biases across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.b_in for block in self.blocks], dim=0 - ) - - @property - def b_out(self) -> Float[torch.Tensor, "n_layers d_model"]: - """Stacks the MLP output biases across all layers""" - return torch.stack( - [cast(BertBlock, block).mlp.b_out for block in self.blocks], dim=0 - ) - - @property - def QK(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] - return FactoredMatrix(self.W_Q, self.W_K.transpose(-2, -1)) - - @property - def OV(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] - return FactoredMatrix(self.W_V, self.W_O) - - def all_head_labels(self) -> List[str]: - return [ - f"L{l}H{h}" - for l in range(self.cfg.n_layers) - for h in range(self.cfg.n_heads) - ] - class HookedDistilBertForSequenceClassification(HookedDistilBert): """ diff --git a/src/mechir/modelling/hooked/HookedElectra.py b/src/mechir/modelling/hooked/HookedElectra.py index 979d5f9..fa4aee1 100644 --- a/src/mechir/modelling/hooked/HookedElectra.py +++ b/src/mechir/modelling/hooked/HookedElectra.py @@ -42,9 +42,6 @@ class HookedElectraForSequenceClassification(HookedEncoder): """ This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. - Limitations: - - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. - - Also note that model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model @@ -87,4 +84,4 @@ def forward( if return_type is None: return None - return logits \ No newline at end of file + return logits From c9b49c8b8ad1624cd371bf358cd440187f1e0cc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 24 Feb 2025 09:29:10 +0000 Subject: [PATCH 05/21] Genuinely no idea what I did --- src/mechir/modelling/cat.py | 11 +- src/mechir/modelling/dot.py | 7 +- .../modelling/hooked/HookedDistilBert.py | 8 +- src/mechir/modelling/hooked/HookedElectra.py | 4 +- src/mechir/modelling/hooked/HookedEncoder.py | 131 +++++++++++++----- .../HookedEncoderForSequenceClassification.py | 3 +- .../hooked/HookedTransformerConfig.py | 2 + .../modelling/hooked/hooked_components.py | 4 +- src/mechir/modelling/hooked/states.py | 3 +- src/mechir/modelling/patched.py | 16 ++- src/mechir/modelling/t5.py | 4 +- 11 files changed, 137 insertions(+), 56 deletions(-) diff --git a/src/mechir/modelling/cat.py b/src/mechir/modelling/cat.py index f3636fd..75ab391 100644 --- a/src/mechir/modelling/cat.py +++ b/src/mechir/modelling/cat.py @@ -13,7 +13,9 @@ from .hooked.loading_from_pretrained import get_official_model_name from .hooked.HookedDistilBert import HookedDistilBertForSequenceClassification from ..util import linear_rank_function -from ..modelling.hooked.HookedEncoderForSequenceClassification import HookedEncoderForSequenceClassification +from ..modelling.hooked.HookedEncoderForSequenceClassification import ( + HookedEncoderForSequenceClassification, +) from ..modelling.hooked.HookedElectra import HookedElectraForSequenceClassification logger = logging.getLogger(__name__) @@ -74,7 +76,12 @@ def forward( attention_mask: Float[torch.Tensor, "batch seq"], token_type_ids: Float[torch.Tensor, "batch seq"] = None, ): - model_output = self._model(input=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, return_type="logits") + model_output = self._model( + input=input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + return_type="logits", + ) model_output = ( F.log_softmax(model_output, dim=-1)[:, 0] if self.softmax_output diff --git a/src/mechir/modelling/dot.py b/src/mechir/modelling/dot.py index 41b5d8a..8ceefc4 100644 --- a/src/mechir/modelling/dot.py +++ b/src/mechir/modelling/dot.py @@ -71,7 +71,12 @@ def forward( attention_mask: Float[torch.Tensor, "batch seq"], token_type_ids: Float[torch.Tensor, "batch seq"] = None, ): - model_output = self._model(input=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, return_type="embeddings") + model_output = self._model( + input=input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + return_type="embeddings", + ) return self._pooling(model_output) def get_act_patch_block_every( diff --git a/src/mechir/modelling/hooked/HookedDistilBert.py b/src/mechir/modelling/hooked/HookedDistilBert.py index ba7e71e..0a18fd7 100644 --- a/src/mechir/modelling/hooked/HookedDistilBert.py +++ b/src/mechir/modelling/hooked/HookedDistilBert.py @@ -6,7 +6,6 @@ from __future__ import annotations -import logging from typing import Dict, List, Optional, Tuple, Union, cast, overload import torch @@ -16,12 +15,11 @@ from transformers import AutoTokenizer from typing_extensions import Literal -from transformer_lens import ActivationCache, FactoredMatrix, HookedTransformerConfig +from transformer_lens import HookedTransformerConfig from transformer_lens.components import BertBlock, BertMLMHead, Unembed -from transformer_lens.hook_points import HookedRootModule, HookPoint -from transformer_lens.utilities import devices -from . import loading_from_pretrained as loading +from transformer_lens.hook_points import HookPoint from .hooked_components import DistilBertEmbed +from .HookedEncoder import HookedEncoder class HookedDistilBert(HookedEncoder): diff --git a/src/mechir/modelling/hooked/HookedElectra.py b/src/mechir/modelling/hooked/HookedElectra.py index fa4aee1..11688dc 100644 --- a/src/mechir/modelling/hooked/HookedElectra.py +++ b/src/mechir/modelling/hooked/HookedElectra.py @@ -17,6 +17,7 @@ from transformer_lens.hook_points import HookPoint from .linear import ClassificationHead, HiddenLinear + class ElectraClassificationHead(nn.Module): """ Transforms ELECTRA embeddings into logits. The purpose of this module is to predict masked tokens in a sentence. @@ -47,6 +48,7 @@ class HookedElectraForSequenceClassification(HookedEncoder): - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model - The model only accepts tokens as inputs, and not strings, or lists of strings """ + def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): super().__init__(cfg, tokenizer, move_to_device, **kwargs) self.classifier = ElectraClassificationHead(cfg) @@ -55,7 +57,7 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): def forward( self, input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = 'embeddings', + return_type: Optional[str] = "embeddings", token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, start_at_layer: Optional[int] = None, diff --git a/src/mechir/modelling/hooked/HookedEncoder.py b/src/mechir/modelling/hooked/HookedEncoder.py index 698d3a2..9430774 100644 --- a/src/mechir/modelling/hooked/HookedEncoder.py +++ b/src/mechir/modelling/hooked/HookedEncoder.py @@ -19,7 +19,14 @@ from . import loading_from_pretrained as loading from transformer_lens.ActivationCache import ActivationCache -from transformer_lens.components import BertBlock, BertEmbed, BertMLMHead, Unembed, BertNSPHead, BertPooler +from transformer_lens.components import ( + BertBlock, + BertEmbed, + BertMLMHead, + Unembed, + BertNSPHead, + BertPooler, +) from transformer_lens.FactoredMatrix import FactoredMatrix from transformer_lens.hook_points import HookedRootModule, HookPoint from transformer_lens.utilities import devices @@ -48,7 +55,9 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): ) self.cfg = cfg - assert self.cfg.n_devices == 1, "Multiple devices not supported for HookedEncoder" + assert ( + self.cfg.n_devices == 1 + ), "Multiple devices not supported for HookedEncoder" if tokenizer is not None: self.tokenizer = tokenizer elif self.cfg.tokenizer_name is not None: @@ -62,13 +71,17 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): if self.cfg.d_vocab == -1: # If we have a tokenizer, vocab size can be inferred from it. - assert self.tokenizer is not None, "Must provide a tokenizer if d_vocab is not provided" + assert ( + self.tokenizer is not None + ), "Must provide a tokenizer if d_vocab is not provided" self.cfg.d_vocab = max(self.tokenizer.vocab.values()) + 1 if self.cfg.d_vocab_out == -1: self.cfg.d_vocab_out = self.cfg.d_vocab self.embed = BertEmbed(self.cfg) - self.blocks = nn.ModuleList([BertBlock(self.cfg) for _ in range(self.cfg.n_layers)]) + self.blocks = nn.ModuleList( + [BertBlock(self.cfg) for _ in range(self.cfg.n_layers)] + ) self.mlm_head = BertMLMHead(self.cfg) self.unembed = Unembed(self.cfg) self.nsp_head = BertNSPHead(self.cfg) @@ -76,6 +89,8 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): self.hook_full_embed = HookPoint() + self.use_token_type_ids = self.cfg.use_token_type_ids + if move_to_device: self.to(self.cfg.device) @@ -116,7 +131,7 @@ def to_tokens( if move_to_device: tokens = tokens.to(self.cfg.device) - token_type_ids = encodings.token_type_ids.to(self.cfg.device) + token_type_ids = encodings.token_type_ids.to(self.cfg.device) if self.use_token_type_ids else None attention_mask = encodings.attention_mask.to(self.cfg.device) return tokens, token_type_ids, attention_mask @@ -163,12 +178,14 @@ def encoder_output( else None ) additive_attention_mask = ( - torch.where(mask == 1, large_negative_number, 0) if mask is not None else None + torch.where(mask == 1, large_negative_number, 0) + if mask is not None + else None ) if start_at_layer is None: start_at_layer = 0 - + idx_and_block = list(zip(range(self.cfg.n_layers), self.blocks)) for _, block in idx_and_block[start_at_layer:stop_at_layer]: @@ -179,7 +196,7 @@ def encoder_output( def forward( self, input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = 'embeddings', + return_type: Optional[str] = "embeddings", token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, start_at_layer: Optional[int] = None, @@ -193,18 +210,26 @@ def forward( attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. """ - + if start_at_layer is None: if isinstance(input, str) or isinstance(input, list): - assert self.tokenizer is not None, "Must provide a tokenizer if input is a string" - residual, token_type_ids_from_tokenizer, attention_mask = self.to_tokens(input) + assert ( + self.tokenizer is not None + ), "Must provide a tokenizer if input is a string" + residual, token_type_ids_from_tokenizer, attention_mask = ( + self.to_tokens(input) + ) # If token_type_ids or attention mask are not provided, use the ones from the tokenizer token_type_ids = ( - token_type_ids_from_tokenizer if token_type_ids is None else token_type_ids + token_type_ids_from_tokenizer + if token_type_ids is None + else token_type_ids ) one_zero_attention_mask = ( - attention_mask if one_zero_attention_mask is None else one_zero_attention_mask + attention_mask + if one_zero_attention_mask is None + else one_zero_attention_mask ) else: assert type(input) is torch.Tensor @@ -225,7 +250,7 @@ def forward( one_zero_attention_mask=one_zero_attention_mask, ) - if stop_at_layer is not None or return_type == 'embeddings': + if stop_at_layer is not None or return_type == "embeddings": return resid resid = self.mlm_head(resid) @@ -233,14 +258,18 @@ def forward( if return_type == "predictions": # Get predictions for masked tokens - logprobs = logits[tokens == self.tokenizer.mask_token_id].log_softmax(dim=-1) + logprobs = logits[tokens == self.tokenizer.mask_token_id].log_softmax( + dim=-1 + ) predictions = self.tokenizer.decode(logprobs.argmax(dim=-1)) # If input was a list of strings, split predictions into a list if " " in predictions: # Split along space predictions = predictions.split(" ") - predictions = [f"Prediction {i}: {p}" for i, p in enumerate(predictions)] + predictions = [ + f"Prediction {i}: {p}" for i, p in enumerate(predictions) + ] return predictions elif return_type is None: @@ -251,14 +280,18 @@ def forward( @overload def run_with_cache( self, *model_args, return_cache_object: Literal[True] = True, **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], ActivationCache,]: - ... + ) -> Tuple[ + Float[torch.Tensor, "batch pos d_vocab"], + ActivationCache, + ]: ... @overload def run_with_cache( self, *model_args, return_cache_object: Literal[False], **kwargs - ) -> Tuple[Float[torch.Tensor, "batch pos d_vocab"], Dict[str, torch.Tensor],]: - ... + ) -> Tuple[ + Float[torch.Tensor, "batch pos d_vocab"], + Dict[str, torch.Tensor], + ]: ... def run_with_cache( self, @@ -277,7 +310,9 @@ def run_with_cache( *model_args, remove_batch_dim=remove_batch_dim, **kwargs ) if return_cache_object: - cache = ActivationCache(cache_dict, self, has_batch_dim=not remove_batch_dim) + cache = ActivationCache( + cache_dict, self, has_batch_dim=not remove_batch_dim + ) return out, cache else: return out, cache_dict @@ -400,62 +435,86 @@ def W_E_pos(self) -> Float[torch.Tensor, "d_vocab+n_ctx d_model"]: @property def W_K(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: """Stacks the key weights across all layers""" - return torch.stack([cast(BertBlock, block).attn.W_K for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).attn.W_K for block in self.blocks], dim=0 + ) @property def W_Q(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: """Stacks the query weights across all layers""" - return torch.stack([cast(BertBlock, block).attn.W_Q for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).attn.W_Q for block in self.blocks], dim=0 + ) @property def W_V(self) -> Float[torch.Tensor, "n_layers n_heads d_model d_head"]: """Stacks the value weights across all layers""" - return torch.stack([cast(BertBlock, block).attn.W_V for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).attn.W_V for block in self.blocks], dim=0 + ) @property def W_O(self) -> Float[torch.Tensor, "n_layers n_heads d_head d_model"]: """Stacks the attn output weights across all layers""" - return torch.stack([cast(BertBlock, block).attn.W_O for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).attn.W_O for block in self.blocks], dim=0 + ) @property def W_in(self) -> Float[torch.Tensor, "n_layers d_model d_mlp"]: """Stacks the MLP input weights across all layers""" - return torch.stack([cast(BertBlock, block).mlp.W_in for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).mlp.W_in for block in self.blocks], dim=0 + ) @property def W_out(self) -> Float[torch.Tensor, "n_layers d_mlp d_model"]: """Stacks the MLP output weights across all layers""" - return torch.stack([cast(BertBlock, block).mlp.W_out for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).mlp.W_out for block in self.blocks], dim=0 + ) @property def b_K(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: """Stacks the key biases across all layers""" - return torch.stack([cast(BertBlock, block).attn.b_K for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).attn.b_K for block in self.blocks], dim=0 + ) @property def b_Q(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: """Stacks the query biases across all layers""" - return torch.stack([cast(BertBlock, block).attn.b_Q for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).attn.b_Q for block in self.blocks], dim=0 + ) @property def b_V(self) -> Float[torch.Tensor, "n_layers n_heads d_head"]: """Stacks the value biases across all layers""" - return torch.stack([cast(BertBlock, block).attn.b_V for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).attn.b_V for block in self.blocks], dim=0 + ) @property def b_O(self) -> Float[torch.Tensor, "n_layers d_model"]: """Stacks the attn output biases across all layers""" - return torch.stack([cast(BertBlock, block).attn.b_O for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).attn.b_O for block in self.blocks], dim=0 + ) @property def b_in(self) -> Float[torch.Tensor, "n_layers d_mlp"]: """Stacks the MLP input biases across all layers""" - return torch.stack([cast(BertBlock, block).mlp.b_in for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).mlp.b_in for block in self.blocks], dim=0 + ) @property def b_out(self) -> Float[torch.Tensor, "n_layers d_model"]: """Stacks the MLP output biases across all layers""" - return torch.stack([cast(BertBlock, block).mlp.b_out for block in self.blocks], dim=0) + return torch.stack( + [cast(BertBlock, block).mlp.b_out for block in self.blocks], dim=0 + ) @property def QK(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] @@ -470,4 +529,8 @@ def OV(self) -> FactoredMatrix: # [n_layers, n_heads, d_model, d_model] def all_head_labels(self) -> List[str]: """Returns a list of strings with the format "L{l}H{h}", where l is the layer index and h is the head index.""" - return [f"L{l}H{h}" for l in range(self.cfg.n_layers) for h in range(self.cfg.n_heads)] + return [ + f"L{l}H{h}" + for l in range(self.cfg.n_layers) + for h in range(self.cfg.n_heads) + ] diff --git a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py index f385dc8..243c901 100644 --- a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py +++ b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py @@ -14,7 +14,6 @@ from .HookedEncoder import HookedEncoder from .linear import ClassificationHead -from . import loading_from_pretrained as loading class HookedEncoderForSequenceClassification(HookedEncoder): @@ -38,7 +37,7 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): def forward( self, input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = 'embeddings', + return_type: Optional[str] = "embeddings", token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, start_at_layer: Optional[int] = None, diff --git a/src/mechir/modelling/hooked/HookedTransformerConfig.py b/src/mechir/modelling/hooked/HookedTransformerConfig.py index d3d085c..9cc5947 100644 --- a/src/mechir/modelling/hooked/HookedTransformerConfig.py +++ b/src/mechir/modelling/hooked/HookedTransformerConfig.py @@ -180,6 +180,7 @@ class HookedTransformerConfig: in Gemma-2 (see attn_scores_soft_cap for details). Defaults to -1.0, which means not set. num_labels (int): The number of labels for the classification task. Defaults to 1. + use_token_type_ids (bool): Whether to use token type ids. Defaults to True. """ n_layers: int @@ -244,6 +245,7 @@ class HookedTransformerConfig: attn_scores_soft_cap: float = -1.0 output_logits_soft_cap: float = -1.0 num_labels: int = 1 + use_token_type_ids: bool = True def __post_init__(self): if self.n_heads == -1: diff --git a/src/mechir/modelling/hooked/hooked_components.py b/src/mechir/modelling/hooked/hooked_components.py index 90d83ec..f5fd342 100644 --- a/src/mechir/modelling/hooked/hooked_components.py +++ b/src/mechir/modelling/hooked/hooked_components.py @@ -42,8 +42,6 @@ def forward( word_embeddings_out = self.hook_embed(self.embed(input_ids)) position_embeddings_out = self.hook_pos_embed(self.pos_embed(index_ids)) - embeddings_out = ( - word_embeddings_out + position_embeddings_out - ) + embeddings_out = word_embeddings_out + position_embeddings_out layer_norm_out = self.ln(embeddings_out) return layer_norm_out diff --git a/src/mechir/modelling/hooked/states.py b/src/mechir/modelling/hooked/states.py index 32bc228..a09b1e1 100644 --- a/src/mechir/modelling/hooked/states.py +++ b/src/mechir/modelling/hooked/states.py @@ -157,9 +157,10 @@ def DistilBert_state_dict(hf_config): "n_layers": hf_config.n_layers, "n_ctx": hf_config.max_position_embeddings, "eps": 1e-12, - "d_vocab": hf_config.vocab_size, # hacky fix for special pad token + "d_vocab": hf_config.vocab_size, "act_fn": hf_config.activation, "attention_dir": "birectional", + "use_token_type_ids": False, # dropout, initializer_range, pad_token_id, qa_dropout, seq_classif_dropout, sinusoidal_pos_embds, tie_weights } diff --git a/src/mechir/modelling/patched.py b/src/mechir/modelling/patched.py index f229606..de8e094 100644 --- a/src/mechir/modelling/patched.py +++ b/src/mechir/modelling/patched.py @@ -16,9 +16,9 @@ def __init__(self) -> None: @property def _patch_funcs(self): return { - 'block_all' : self.get_act_patch_block_every, - 'head_all' : self.get_act_patch_attn_head_out_all_pos, - 'head_by_pos' : self.get_act_patch_attn_head_by_pos, + "block_all": self.get_act_patch_block_every, + "head_all": self.get_act_patch_attn_head_out_all_pos, + "head_by_pos": self.get_act_patch_attn_head_by_pos, } def _patch_residual_component( @@ -114,7 +114,9 @@ def _get_act_patch_block_every( patched_outputs = self.run_with_hooks( corrupted_tokens["input_ids"], attention_mask=corrupted_tokens["attention_mask"], - fwd_hooks=[('_model.'+utils.get_act_name(component, layer), hook_fn)], + fwd_hooks=[ + ("_model." + utils.get_act_name(component, layer), hook_fn) + ], ) yield (component_idx, layer, position), patched_outputs @@ -141,7 +143,7 @@ def _get_act_patch_attn_head_out_all_pos( patched_outputs = self.run_with_hooks( corrupted_tokens["input_ids"], attention_mask=corrupted_tokens["attention_mask"], - fwd_hooks=[('_model.'+utils.get_act_name("z", layer), hook_fn)], + fwd_hooks=[("_model." + utils.get_act_name("z", layer), hook_fn)], ) yield (layer, head), patched_outputs @@ -171,7 +173,9 @@ def _get_act_patch_attn_head_by_pos( patched_outputs = self.run_with_hooks( corrupted_tokens["input_ids"], attention_mask=corrupted_tokens["attention_mask"], - fwd_hooks=[('_model.'+utils.get_act_name(component, layer), hook_fn)], + fwd_hooks=[ + ("_model." + utils.get_act_name(component, layer), hook_fn) + ], ) yield (component_idx, i, position), patched_outputs diff --git a/src/mechir/modelling/t5.py b/src/mechir/modelling/t5.py index 47ae557..70e55b1 100644 --- a/src/mechir/modelling/t5.py +++ b/src/mechir/modelling/t5.py @@ -56,7 +56,9 @@ def forward( input_ids: Float[torch.Tensor, "batch seq"], attention_mask: Float[torch.Tensor, "batch seq"], ): - model_output = self._model(input=input_ids, one_hot_attention_mask=attention_mask, return_type="logits") + model_output = self._model( + input=input_ids, one_hot_attention_mask=attention_mask, return_type="logits" + ) model_output = ( model_output[:, 0, (self.pos_token, self.neg_token)].softmax(dim=-1)[:, 0] if self.softmax_output From b06545a9732d2dcf39c1e3ef183dd075a3dced0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 24 Feb 2025 14:09:57 +0000 Subject: [PATCH 06/21] Doing away with Electra and DistilBERT classes --- src/mechir/modelling/cat.py | 21 ++-------------- src/mechir/modelling/dot.py | 14 +---------- src/mechir/modelling/hooked/HookedEncoder.py | 2 +- .../HookedEncoderForSequenceClassification.py | 6 ++--- .../hooked/HookedTransformerConfig.py | 1 + .../modelling/hooked/hooked_components.py | 22 +++++++++------- src/mechir/modelling/hooked/linear.py | 23 +++++++++++++++++ src/mechir/modelling/hooked/states.py | 25 ++++++++----------- test/test_cat.py | 0 test/test_dot.py | 0 10 files changed, 54 insertions(+), 60 deletions(-) create mode 100644 test/test_cat.py create mode 100644 test/test_dot.py diff --git a/src/mechir/modelling/cat.py b/src/mechir/modelling/cat.py index 75ab391..4730124 100644 --- a/src/mechir/modelling/cat.py +++ b/src/mechir/modelling/cat.py @@ -11,29 +11,12 @@ from .patched import PatchedMixin from .sae import SAEMixin from .hooked.loading_from_pretrained import get_official_model_name -from .hooked.HookedDistilBert import HookedDistilBertForSequenceClassification from ..util import linear_rank_function -from ..modelling.hooked.HookedEncoderForSequenceClassification import ( - HookedEncoderForSequenceClassification, -) -from ..modelling.hooked.HookedElectra import HookedElectraForSequenceClassification +from ..modelling.hooked.HookedEncoderForSequenceClassification import HookedEncoderForSequenceClassification logger = logging.getLogger(__name__) -def get_hooked(architecture): - huggingface_token = os.environ.get("HF_TOKEN", None) - hf_config = AutoConfig.from_pretrained( - get_official_model_name(architecture), token=huggingface_token - ) - architecture = hf_config.architectures[0] - if "distilbert" in architecture.lower(): - return HookedDistilBertForSequenceClassification - if "electra" in architecture.lower(): - return HookedElectraForSequenceClassification - return HookedEncoderForSequenceClassification - - class Cat(HookedRootModule, PatchedMixin, SAEMixin): def __init__( self, @@ -61,7 +44,7 @@ def __init__( .to(self._device) ) - self._model = get_hooked(model_name_or_path).from_pretrained( + self._model = HookedEncoderForSequenceClassification.from_pretrained( self.model_name_or_path, device=self._device, hf_model=self.__hf_model ) diff --git a/src/mechir/modelling/dot.py b/src/mechir/modelling/dot.py index 8ceefc4..fb33a77 100644 --- a/src/mechir/modelling/dot.py +++ b/src/mechir/modelling/dot.py @@ -9,7 +9,6 @@ import transformer_lens.utils as utils from .patched import PatchedMixin from .sae import SAEMixin -from .hooked.HookedDistilBert import HookedDistilBert from .hooked.HookedEncoder import HookedEncoder from .hooked.loading_from_pretrained import get_official_model_name from ..util import batched_dot_product, linear_rank_function @@ -22,17 +21,6 @@ } -def get_hooked(architecture): - huggingface_token = os.environ.get("HF_TOKEN", None) - hf_config = AutoConfig.from_pretrained( - get_official_model_name(architecture), token=huggingface_token - ) - architecture = hf_config.architectures[0] - if "distilbert" in architecture.lower(): - return HookedDistilBert - return HookedEncoder - - class Dot(HookedRootModule, PatchedMixin, SAEMixin): def __init__( self, @@ -55,7 +43,7 @@ def __init__( self.__hf_model = ( AutoModel.from_pretrained(model_name_or_path).eval().to(self._device) ) - self._model = get_hooked(model_name_or_path).from_pretrained( + self._model = HookedEncoder.from_pretrained( self.model_name_or_path, device=self._device, hf_model=self.__hf_model ) diff --git a/src/mechir/modelling/hooked/HookedEncoder.py b/src/mechir/modelling/hooked/HookedEncoder.py index 9430774..34c4f0d 100644 --- a/src/mechir/modelling/hooked/HookedEncoder.py +++ b/src/mechir/modelling/hooked/HookedEncoder.py @@ -21,7 +21,6 @@ from transformer_lens.ActivationCache import ActivationCache from transformer_lens.components import ( BertBlock, - BertEmbed, BertMLMHead, Unembed, BertNSPHead, @@ -32,6 +31,7 @@ from transformer_lens.utilities import devices from .HookedTransformerConfig import HookedTransformerConfig +from .hooked_components import BertEmbed class HookedEncoder(HookedRootModule): diff --git a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py index 243c901..d1109cf 100644 --- a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py +++ b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py @@ -9,11 +9,9 @@ from typing import Optional import torch from jaxtyping import Float, Int -from torch import nn -from typing_extensions import Literal from .HookedEncoder import HookedEncoder -from .linear import ClassificationHead +from .linear import ClassificationHead, MLPClassificationHead class HookedEncoderForSequenceClassification(HookedEncoder): @@ -31,7 +29,7 @@ class HookedEncoderForSequenceClassification(HookedEncoder): def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): super().__init__(cfg, tokenizer, move_to_device, **kwargs) - self.classifier = ClassificationHead(cfg) + self.classifier = ClassificationHead(cfg) if not self.cfg.use_mlp_head else MLPClassificationHead(cfg) self.setup() def forward( diff --git a/src/mechir/modelling/hooked/HookedTransformerConfig.py b/src/mechir/modelling/hooked/HookedTransformerConfig.py index 9cc5947..7ede4d6 100644 --- a/src/mechir/modelling/hooked/HookedTransformerConfig.py +++ b/src/mechir/modelling/hooked/HookedTransformerConfig.py @@ -246,6 +246,7 @@ class HookedTransformerConfig: output_logits_soft_cap: float = -1.0 num_labels: int = 1 use_token_type_ids: bool = True + use_mlp_head: bool = False def __post_init__(self): if self.n_heads == -1: diff --git a/src/mechir/modelling/hooked/hooked_components.py b/src/mechir/modelling/hooked/hooked_components.py index f5fd342..b977eef 100644 --- a/src/mechir/modelling/hooked/hooked_components.py +++ b/src/mechir/modelling/hooked/hooked_components.py @@ -6,14 +6,14 @@ import einops import torch import torch.nn as nn -from jaxtyping import Int +from jaxtyping import Int, Float from transformer_lens.components import Embed, LayerNorm, PosEmbed, TokenTypeEmbed from transformer_lens.hook_points import HookPoint from transformer_lens.HookedTransformerConfig import HookedTransformerConfig -class DistilBertEmbed(nn.Module): +class BertEmbed(nn.Module): """ Custom embedding layer for a BERT-like model. This module computes the sum of the token, positional and token-type embeddings and takes the layer norm of the result. """ @@ -23,25 +23,29 @@ def __init__(self, cfg: Union[Dict, HookedTransformerConfig]): self.cfg = HookedTransformerConfig.unwrap(cfg) self.embed = Embed(self.cfg) self.pos_embed = PosEmbed(self.cfg) - # self.token_type_embed = TokenTypeEmbed(self.cfg) + self.token_type_embed = TokenTypeEmbed(self.cfg) if self.cfg.use_token_type_ids else nn.Identity() self.ln = LayerNorm(self.cfg) self.hook_embed = HookPoint() self.hook_pos_embed = HookPoint() - # self.hook_token_type_embed = HookPoint() + self.hook_token_type_embed = HookPoint() if self.cfg.use_token_type_ids else nn.Identity() def forward( self, input_ids: Int[torch.Tensor, "batch pos"], token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - ): + ) -> Float[torch.Tensor, "batch pos d_model"]: base_index_id = torch.arange(input_ids.shape[1], device=input_ids.device) - index_ids = einops.repeat( - base_index_id, "pos -> batch pos", batch=input_ids.shape[0] - ) + index_ids = einops.repeat(base_index_id, "pos -> batch pos", batch=input_ids.shape[0]) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) + word_embeddings_out = self.hook_embed(self.embed(input_ids)) position_embeddings_out = self.hook_pos_embed(self.pos_embed(index_ids)) + token_type_embeddings_out = self.hook_token_type_embed( + self.token_type_embed(token_type_ids) + ) - embeddings_out = word_embeddings_out + position_embeddings_out + embeddings_out = word_embeddings_out + position_embeddings_out + token_type_embeddings_out layer_norm_out = self.ln(embeddings_out) return layer_norm_out diff --git a/src/mechir/modelling/hooked/linear.py b/src/mechir/modelling/hooked/linear.py index f891f67..889cc1d 100644 --- a/src/mechir/modelling/hooked/linear.py +++ b/src/mechir/modelling/hooked/linear.py @@ -8,6 +8,8 @@ import torch.nn as nn from jaxtyping import Float from transformer_lens.utilities.addmm import batch_addmm +from transformer_lens.hook_points import HookPoint +from transformer_lens.factories.activation_function_factory import ActivationFunctionFactory from .HookedTransformerConfig import HookedTransformerConfig @@ -39,3 +41,24 @@ def forward( self, x: Float[torch.Tensor, "batch pos d_model"] ) -> Float[torch.Tensor, "batch pos d_model"]: return batch_addmm(self.b, self.W.T, x) + + +class MLPClassificationHead(nn.Module): + """ + Transforms ELECTRA embeddings into logits. The purpose of this module is to predict masked tokens in a sentence. + """ + + def __init__(self, cfg: Union[Dict, HookedTransformerConfig]): + super().__init__() + self.cfg = HookedTransformerConfig.unwrap(cfg) + self.dense = HiddenLinear(cfg) + self.out_proj = ClassificationHead(cfg) + self.activation = ActivationFunctionFactory.pick_activation_function(self.cfg) + + self.hook_pre = HookPoint() # [batch, pos, d_mlp] + self.hook_post = HookPoint() # [batch, pos, d_mlp] + + def forward(self, resid: Float[torch.Tensor, "batch d_model"]) -> torch.Tensor: + pre_act = self.hook_pre(self.dense(resid)) + post_act = self.hook_post(self.activation(pre_act)) + return self.out_proj(post_act) diff --git a/src/mechir/modelling/hooked/states.py b/src/mechir/modelling/hooked/states.py index a09b1e1..b626433 100644 --- a/src/mechir/modelling/hooked/states.py +++ b/src/mechir/modelling/hooked/states.py @@ -128,22 +128,20 @@ def BertModel_state_dict(hf_config): } -@extend_transformer_lens_registry( - ["BertForSequenceClassification", "ElectraForSequenceClassification"] -) +@extend_transformer_lens_registry("BertForSequenceClassification") def BertForSequenceClassification_state_dict(hf_config): return { - "d_model": hf_config.hidden_size, - "d_head": hf_config.hidden_size // hf_config.num_attention_heads, - "n_heads": hf_config.num_attention_heads, - "d_mlp": hf_config.intermediate_size, - "n_layers": hf_config.num_hidden_layers, - "n_ctx": hf_config.max_position_embeddings, - "eps": hf_config.layer_norm_eps, - "d_vocab": hf_config.vocab_size, - "act_fn": "gelu", - "attention_dir": "bidirectional", + **BertModel_state_dict(hf_config), + "num_labels": hf_config.num_labels, + } + + +@extend_transformer_lens_registry("ElectraForSequenceClassification") +def ElectraForSequenceClassification_state_dict(hf_config): + return { + **BertModel_state_dict(hf_config), "num_labels": hf_config.num_labels, + "use_mlp_head": True } @@ -161,7 +159,6 @@ def DistilBert_state_dict(hf_config): "act_fn": hf_config.activation, "attention_dir": "birectional", "use_token_type_ids": False, - # dropout, initializer_range, pad_token_id, qa_dropout, seq_classif_dropout, sinusoidal_pos_embds, tie_weights } diff --git a/test/test_cat.py b/test/test_cat.py new file mode 100644 index 0000000..e69de29 diff --git a/test/test_dot.py b/test/test_dot.py new file mode 100644 index 0000000..e69de29 From d3273b9a20efda963e1901b744a449a02bf690eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 24 Feb 2025 14:18:12 +0000 Subject: [PATCH 07/21] better practise --- src/mechir/data/loader/__init__.py | 229 +---------------------------- src/mechir/data/loader/base.py | 225 ++++++++++++++++++++++++++++ src/mechir/data/loader/cat.py | 2 +- src/mechir/data/loader/dot.py | 2 +- src/mechir/data/loader/t5.py | 2 +- test/test_cat.py | 0 test/test_dot.py | 0 test/test_output_parity.py | 53 +++++++ 8 files changed, 284 insertions(+), 229 deletions(-) create mode 100644 src/mechir/data/loader/base.py delete mode 100644 test/test_cat.py delete mode 100644 test/test_dot.py create mode 100644 test/test_output_parity.py diff --git a/src/mechir/data/loader/__init__.py b/src/mechir/data/loader/__init__.py index 556e2f1..cfea2f8 100644 --- a/src/mechir/data/loader/__init__.py +++ b/src/mechir/data/loader/__init__.py @@ -1,230 +1,7 @@ import torch - -def pad(a: list, b: list, tok: str): - assert type(a) == type(b) == list, "Both a and b must be lists" - - padded = [] - i, j = 0, 0 - while i < len(a) and j < len(b): - if a[i] == b[j]: - padded.append(a[i]) - i += 1 - j += 1 - else: - padded.append(tok) - j += 1 - - while j < len(b): - padded.append(tok) - j += 1 - - return padded - - -class BaseCollator(object): - tokenizer = None - transformation_func: callable = None - special_mask: bool = False - q_max_length: int = 30 - d_max_length: int = 300 - special_token: int = "a" - perturb_type: str = "append" - pre_perturbed: bool = False - - def __init__( - self, - tokenizer, - transformation_func=None, - special_mask=False, - q_max_length=30, - d_max_length=200, - special_token="a", - perturb_type="append", - pre_perturbed=False, - ) -> None: - assert ( - transformation_func is not None or pre_perturbed - ), "Either a transformation function or pre-perturbed data must be provided." - self.tokenizer = tokenizer - # self.tokenizer.add_special_tokens({"additional_special_tokens": [special_token]}) - # self.special_token_id = self.tokenizer.convert_tokens_to_ids(special_token) - - self.transformation_func = transformation_func - self.special_mask = special_mask - self.perturb_type = perturb_type - self.q_max_length = q_max_length - self.d_max_length = d_max_length - self.special_token = special_token - self.special_token_id = self.tokenizer.convert_tokens_to_ids(self.special_token) - self.perturb_type = perturb_type - self.pre_perturbed = pre_perturbed - - def get_data(self, batch): - if self.pre_perturbed: - queries, docs, perturbed = zip(*batch) - else: - queries, docs = zip(*batch) - perturbed = [ - self.transformation_func(doc, query=query) for query, doc in batch - ] - - batch_padded_docs, batch_padded_perturbed_docs = [], [] - - for doc_a, doc_b in zip(docs, perturbed): - padded_a, padded_b = self.pad_by_perturb_type(doc_a, doc_b) - batch_padded_docs.append(padded_a) - batch_padded_perturbed_docs.append(padded_b) - - return queries, batch_padded_docs, batch_padded_perturbed_docs - - def pad(self, a: str, b: str): - # turn both sequences into list of tokenized elements - a = self.tokenizer.tokenize(a) - b = self.tokenizer.tokenize(b) - - return self.tokenizer.decode( - self.tokenizer.tokens_to_ids(pad(a, b, self.special_token)) - ) - - def pad_by_perturb_type(self, doc_a: str, doc_b: str): - accepted_perturb_types = ["append", "prepend", "replace", "inject"] - assert ( - self.perturb_type in accepted_perturb_types - ), f"Perturbation type must be one of the following: {accepted_perturb_types}" - - doc_a = self.tokenizer.tokenize(doc_a) - doc_b = self.tokenizer.tokenize(doc_b) - - if self.perturb_type == "append": - assert len(doc_a) < len( - doc_b - ), "Perturbed document should be longer than original for append perturbation." - doc_a = doc_a + [self.special_token] * (len(doc_b) - len(doc_a)) - elif self.perturb_type == "prepend": - assert len(doc_a) < len( - doc_b - ), "Perturbed document should be longer than original for prepend perturbation." - doc_a = [self.special_token] * (len(doc_b) - len(doc_a)) + doc_a - elif self.perturb_type == "replace": - if len(doc_a) == len(doc_b): - pass # no padding needed - else: - padded_a, padded_b = [], [] - idx_a, idx_b = 0, 0 - while idx_a < len(doc_a) and idx_b < len(doc_b): - if doc_a[idx_a] == doc_b[idx_b]: - padded_a.append(doc_a[idx_a]) - padded_b.append(doc_b[idx_b]) - idx_a += 1 - idx_b += 1 - else: - padded_a.append(doc_a[idx_a]) - padded_b.append(doc_b[idx_b]) - idx_a += 1 - idx_b += 1 - - if len(doc_a) < len(doc_b): - # Replaced term is shorter in length than the term it was replaced with - while idx_b < len(doc_b) and ( - idx_a >= len(doc_a) or doc_b[idx_b] != doc_a[idx_a] - ): - padded_a.append(self.special_token) - padded_b.append(doc_b[idx_b]) - idx_b += 1 - if len(doc_a) > len(doc_b): - # Replaced term is longer than the term it was replaced with - while idx_a < len(doc_a) and ( - idx_b >= len(doc_b) or doc_b[idx_b] != doc_a[idx_a] - ): - padded_a.append(doc_a[idx_a]) - padded_b.append(self.special_token) - idx_a += 1 - - doc_a, doc_b = padded_a, padded_b - - elif self.perturb_type == "inject": - pass - - assert len(doc_a) == len( - doc_b - ), "Failed to pad input pairs, mismatch in document lengths post-padding." - return self.tokenizer.convert_tokens_to_string( - doc_a - ), self.tokenizer.convert_tokens_to_string(doc_b) - - -def pad_tokenized( - a_batch: torch.Tensor, - b_batch: torch.Tensor, - pad_tok: int, -): - - a_batch_input_ids, b_batch_input_ids = a_batch["input_ids"], b_batch["input_ids"] - a_batch_attn_mask, b_batch_attn_mask = ( - a_batch["attention_mask"], - b_batch["attention_mask"], - ) - - a_batch_final, b_batch_final = [], [] - a_batch_attn_final, b_batch_attn_final = [], [] - - for a_tokens, b_tokens, a_mask, b_mask in zip( - a_batch_input_ids, b_batch_input_ids, a_batch_attn_mask, b_batch_attn_mask - ): - a_padded_tokens, b_padded_tokens = [], [] - a_padded_attn_mask, b_padded_attn_mask = [], [] - - if len(a_tokens) == len(b_tokens): - # No padding needed - a_padded_tokens.append(a_tokens) - b_padded_tokens.append(b_tokens) - a_padded_attn_mask.append(a_mask) - b_padded_attn_mask.append(b_mask) - else: - # Determine where to pad - idx_a, idx_b = 0, 0 - while idx_a < len(a_tokens) and idx_b < len(b_tokens): - if a_tokens[idx_a] == b_tokens[idx_b]: - a_padded_tokens.append(a_tokens[idx_a]) - b_padded_tokens.append(b_tokens[idx_b]) - a_padded_attn_mask.append(a_mask[idx_a]) - b_padded_attn_mask.append(b_mask[idx_b]) - idx_a += 1 - idx_b += 1 - elif len(a_tokens) < len(b_tokens): - # Accounts for the following perturbations: append, prepend, insert - # Also for replacement where the replaced term is equal to or shorter in length than the term is was replaced with - a_padded_tokens.append(torch.tensor([pad_tok], dtype=torch.int32)) - b_padded_tokens.append(b_tokens[idx_b]) - a_padded_attn_mask.append(a_mask[idx_a]) - b_padded_attn_mask.append(b_mask[idx_b]) - idx_b += 1 - elif len(a_tokens) > len(b_tokens): - # Account for replacement perturbation where the replaced term is longer than the term is was replaced with - a_padded_tokens.append(a_tokens[idx_a]) - b_padded_tokens.append(torch.tensor([pad_tok], dtype=torch.int32)) - a_padded_attn_mask.append(a_mask[idx_a]) - b_padded_attn_mask.append(b_mask[idx_b]) - idx_a += 1 - - a_batch_final.append(torch.tensor(a_padded_tokens)) - b_batch_final.append(torch.tensor(b_padded_tokens)) - a_batch_attn_final.append(torch.tensor(a_padded_attn_mask)) - b_batch_attn_final.append(torch.tensor(b_padded_attn_mask)) - - finalized_tokenized_a_batch = { - "input_ids": torch.stack(a_batch_final), - "attention_mask": torch.stack(a_batch_attn_final), - } - finalized_tokenized_b_batch = { - "input_ids": torch.stack(b_batch_final), - "attention_mask": torch.stack(b_batch_attn_final), - } - - return finalized_tokenized_a_batch, finalized_tokenized_b_batch - - +from .base import __all__ as base_all +from .base import * from .cat import __all__ as cat_all from .cat import * from .dot import __all__ as dot_all @@ -232,4 +9,4 @@ def pad_tokenized( from .t5 import __all__ as t5_all from .t5 import * -__all__ = cat_all + dot_all + t5_all + ["pad", "pad_tokenized", "BaseCollator"] +__all__ = base_all + cat_all + dot_all + t5_all diff --git a/src/mechir/data/loader/base.py b/src/mechir/data/loader/base.py new file mode 100644 index 0000000..e7b7c6e --- /dev/null +++ b/src/mechir/data/loader/base.py @@ -0,0 +1,225 @@ + +def pad(a: list, b: list, tok: str): + assert type(a) == type(b) == list, "Both a and b must be lists" + + padded = [] + i, j = 0, 0 + while i < len(a) and j < len(b): + if a[i] == b[j]: + padded.append(a[i]) + i += 1 + j += 1 + else: + padded.append(tok) + j += 1 + + while j < len(b): + padded.append(tok) + j += 1 + + return padded + + +class BaseCollator(object): + tokenizer = None + transformation_func: callable = None + special_mask: bool = False + q_max_length: int = 30 + d_max_length: int = 300 + special_token: int = "a" + perturb_type: str = "append" + pre_perturbed: bool = False + + def __init__( + self, + tokenizer, + transformation_func=None, + special_mask=False, + q_max_length=30, + d_max_length=200, + special_token="a", + perturb_type="append", + pre_perturbed=False, + ) -> None: + assert ( + transformation_func is not None or pre_perturbed + ), "Either a transformation function or pre-perturbed data must be provided." + self.tokenizer = tokenizer + # self.tokenizer.add_special_tokens({"additional_special_tokens": [special_token]}) + # self.special_token_id = self.tokenizer.convert_tokens_to_ids(special_token) + + self.transformation_func = transformation_func + self.special_mask = special_mask + self.perturb_type = perturb_type + self.q_max_length = q_max_length + self.d_max_length = d_max_length + self.special_token = special_token + self.special_token_id = self.tokenizer.convert_tokens_to_ids(self.special_token) + self.perturb_type = perturb_type + self.pre_perturbed = pre_perturbed + + def get_data(self, batch): + if self.pre_perturbed: + queries, docs, perturbed = zip(*batch) + else: + queries, docs = zip(*batch) + perturbed = [ + self.transformation_func(doc, query=query) for query, doc in batch + ] + + batch_padded_docs, batch_padded_perturbed_docs = [], [] + + for doc_a, doc_b in zip(docs, perturbed): + padded_a, padded_b = self.pad_by_perturb_type(doc_a, doc_b) + batch_padded_docs.append(padded_a) + batch_padded_perturbed_docs.append(padded_b) + + return queries, batch_padded_docs, batch_padded_perturbed_docs + + def pad(self, a: str, b: str): + # turn both sequences into list of tokenized elements + a = self.tokenizer.tokenize(a) + b = self.tokenizer.tokenize(b) + + return self.tokenizer.decode( + self.tokenizer.tokens_to_ids(pad(a, b, self.special_token)) + ) + + def pad_by_perturb_type(self, doc_a: str, doc_b: str): + accepted_perturb_types = ["append", "prepend", "replace", "inject"] + assert ( + self.perturb_type in accepted_perturb_types + ), f"Perturbation type must be one of the following: {accepted_perturb_types}" + + doc_a = self.tokenizer.tokenize(doc_a) + doc_b = self.tokenizer.tokenize(doc_b) + + if self.perturb_type == "append": + assert len(doc_a) < len( + doc_b + ), "Perturbed document should be longer than original for append perturbation." + doc_a = doc_a + [self.special_token] * (len(doc_b) - len(doc_a)) + elif self.perturb_type == "prepend": + assert len(doc_a) < len( + doc_b + ), "Perturbed document should be longer than original for prepend perturbation." + doc_a = [self.special_token] * (len(doc_b) - len(doc_a)) + doc_a + elif self.perturb_type == "replace": + if len(doc_a) == len(doc_b): + pass # no padding needed + else: + padded_a, padded_b = [], [] + idx_a, idx_b = 0, 0 + while idx_a < len(doc_a) and idx_b < len(doc_b): + if doc_a[idx_a] == doc_b[idx_b]: + padded_a.append(doc_a[idx_a]) + padded_b.append(doc_b[idx_b]) + idx_a += 1 + idx_b += 1 + else: + padded_a.append(doc_a[idx_a]) + padded_b.append(doc_b[idx_b]) + idx_a += 1 + idx_b += 1 + + if len(doc_a) < len(doc_b): + # Replaced term is shorter in length than the term it was replaced with + while idx_b < len(doc_b) and ( + idx_a >= len(doc_a) or doc_b[idx_b] != doc_a[idx_a] + ): + padded_a.append(self.special_token) + padded_b.append(doc_b[idx_b]) + idx_b += 1 + if len(doc_a) > len(doc_b): + # Replaced term is longer than the term it was replaced with + while idx_a < len(doc_a) and ( + idx_b >= len(doc_b) or doc_b[idx_b] != doc_a[idx_a] + ): + padded_a.append(doc_a[idx_a]) + padded_b.append(self.special_token) + idx_a += 1 + + doc_a, doc_b = padded_a, padded_b + + elif self.perturb_type == "inject": + pass + + assert len(doc_a) == len( + doc_b + ), "Failed to pad input pairs, mismatch in document lengths post-padding." + return self.tokenizer.convert_tokens_to_string( + doc_a + ), self.tokenizer.convert_tokens_to_string(doc_b) + + +def pad_tokenized( + a_batch: torch.Tensor, + b_batch: torch.Tensor, + pad_tok: int, +): + + a_batch_input_ids, b_batch_input_ids = a_batch["input_ids"], b_batch["input_ids"] + a_batch_attn_mask, b_batch_attn_mask = ( + a_batch["attention_mask"], + b_batch["attention_mask"], + ) + + a_batch_final, b_batch_final = [], [] + a_batch_attn_final, b_batch_attn_final = [], [] + + for a_tokens, b_tokens, a_mask, b_mask in zip( + a_batch_input_ids, b_batch_input_ids, a_batch_attn_mask, b_batch_attn_mask + ): + a_padded_tokens, b_padded_tokens = [], [] + a_padded_attn_mask, b_padded_attn_mask = [], [] + + if len(a_tokens) == len(b_tokens): + # No padding needed + a_padded_tokens.append(a_tokens) + b_padded_tokens.append(b_tokens) + a_padded_attn_mask.append(a_mask) + b_padded_attn_mask.append(b_mask) + else: + # Determine where to pad + idx_a, idx_b = 0, 0 + while idx_a < len(a_tokens) and idx_b < len(b_tokens): + if a_tokens[idx_a] == b_tokens[idx_b]: + a_padded_tokens.append(a_tokens[idx_a]) + b_padded_tokens.append(b_tokens[idx_b]) + a_padded_attn_mask.append(a_mask[idx_a]) + b_padded_attn_mask.append(b_mask[idx_b]) + idx_a += 1 + idx_b += 1 + elif len(a_tokens) < len(b_tokens): + # Accounts for the following perturbations: append, prepend, insert + # Also for replacement where the replaced term is equal to or shorter in length than the term is was replaced with + a_padded_tokens.append(torch.tensor([pad_tok], dtype=torch.int32)) + b_padded_tokens.append(b_tokens[idx_b]) + a_padded_attn_mask.append(a_mask[idx_a]) + b_padded_attn_mask.append(b_mask[idx_b]) + idx_b += 1 + elif len(a_tokens) > len(b_tokens): + # Account for replacement perturbation where the replaced term is longer than the term is was replaced with + a_padded_tokens.append(a_tokens[idx_a]) + b_padded_tokens.append(torch.tensor([pad_tok], dtype=torch.int32)) + a_padded_attn_mask.append(a_mask[idx_a]) + b_padded_attn_mask.append(b_mask[idx_b]) + idx_a += 1 + + a_batch_final.append(torch.tensor(a_padded_tokens)) + b_batch_final.append(torch.tensor(b_padded_tokens)) + a_batch_attn_final.append(torch.tensor(a_padded_attn_mask)) + b_batch_attn_final.append(torch.tensor(b_padded_attn_mask)) + + finalized_tokenized_a_batch = { + "input_ids": torch.stack(a_batch_final), + "attention_mask": torch.stack(a_batch_attn_final), + } + finalized_tokenized_b_batch = { + "input_ids": torch.stack(b_batch_final), + "attention_mask": torch.stack(b_batch_attn_final), + } + + return finalized_tokenized_a_batch, finalized_tokenized_b_batch + +__all__ = ["BaseCollator", "pad_tokenized", "pad"] \ No newline at end of file diff --git a/src/mechir/data/loader/cat.py b/src/mechir/data/loader/cat.py index 3be1c52..d957acf 100644 --- a/src/mechir/data/loader/cat.py +++ b/src/mechir/data/loader/cat.py @@ -1,4 +1,4 @@ -from . import BaseCollator +from .base import BaseCollator class CatDataCollator(BaseCollator): diff --git a/src/mechir/data/loader/dot.py b/src/mechir/data/loader/dot.py index 4fdb9f9..5f4f703 100644 --- a/src/mechir/data/loader/dot.py +++ b/src/mechir/data/loader/dot.py @@ -1,4 +1,4 @@ -from . import BaseCollator +from .base import BaseCollator class DotDataCollator(BaseCollator): diff --git a/src/mechir/data/loader/t5.py b/src/mechir/data/loader/t5.py index 29a7b09..2236418 100644 --- a/src/mechir/data/loader/t5.py +++ b/src/mechir/data/loader/t5.py @@ -1,4 +1,4 @@ -from . import BaseCollator +from .base import BaseCollator class MonoT5DataCollator(BaseCollator): diff --git a/test/test_cat.py b/test/test_cat.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/test_dot.py b/test/test_dot.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/test_output_parity.py b/test/test_output_parity.py new file mode 100644 index 0000000..6fb6e31 --- /dev/null +++ b/test/test_output_parity.py @@ -0,0 +1,53 @@ +from mechir import Dot, Cat +import pandas as pd +try: + from pyterrier_dr import ElectraScorer, HgfBiEncoder +except ImportError: + return + +test_dataframe = pd.DataFrame([ + { + 'qid': "1", + "query": "What is the capital of France?", + "docno": "100", + "text": "Paris is the capital of France." + }, + { + "qid": "1", + "query" : "What is the capital of France?", + "docno" : "101", + "text" : "The capital of China is Beijing." + }, + { + "qid" : "2", + "query" : "What is the capital of China?", + "docno" : "100", + "text" : "Paris is the capital of France." + }, + { + "qid" : "2", + "query" : "What is the capital of China?", + "docno" : "101", + "text" : "The capital of China is Beijing." + }, +]) + +CROSS_ENCODER_CHECKPOINT = "" +BI_ENCODER_CHECKPOINT = "" + +def score_cat(model, df): + + + +def test_electra_equivelance(): + hgf_cat = ElectraScorer() + mechir_cat = Cat(CROSS_ENCODER_CHECKPOINT) + + hgf_scores = hgf_cat.transform(test_dataframe) + + +def test_bi_equivelance(): + hgf_dot = HgfBiEncoder(BI_ENCODER_CHECKPOINT) + mechir_dot = Dot(BI_ENCODER_CHECKPOINT) + + hgf_scores = hgf_dot.transform(test_dataframe) \ No newline at end of file From ea9e15d19ace7d7f38ee305b8ed120b1c0d50d21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Fri, 28 Feb 2025 11:50:08 +0000 Subject: [PATCH 08/21] further updates to tests and standardization of output --- src/mechir/modelling/cat.py | 2 +- src/mechir/modelling/dot.py | 2 +- src/mechir/modelling/t5.py | 2 +- test/test_output_parity.py | 42 +++++++++++++++++++++++++++++++++---- 4 files changed, 41 insertions(+), 7 deletions(-) diff --git a/src/mechir/modelling/cat.py b/src/mechir/modelling/cat.py index 4730124..c36b81b 100644 --- a/src/mechir/modelling/cat.py +++ b/src/mechir/modelling/cat.py @@ -168,7 +168,7 @@ def score(self, sequences: dict, cache=False): return logits, cache logits = self.forward(sequences["input_ids"], sequences["attention_mask"]) - return logits, logits + return logits, None def patch( self, diff --git a/src/mechir/modelling/dot.py b/src/mechir/modelling/dot.py index fb33a77..1c20161 100644 --- a/src/mechir/modelling/dot.py +++ b/src/mechir/modelling/dot.py @@ -171,7 +171,7 @@ def score(self, queries: dict, documents: dict, reps_q=None, cache=False): ) return batched_dot_product(reps_q, reps_d), reps_q, reps_d, cache_d reps_d = self.forward(documents["input_ids"], documents["attention_mask"]) - return batched_dot_product(reps_q, reps_d), reps_q, reps_d + return batched_dot_product(reps_q, reps_d), reps_q, reps_d, None def patch( self, diff --git a/src/mechir/modelling/t5.py b/src/mechir/modelling/t5.py index 70e55b1..be165e4 100644 --- a/src/mechir/modelling/t5.py +++ b/src/mechir/modelling/t5.py @@ -168,7 +168,7 @@ def score(self, sequences: dict, cache=False): return logits, cache logits = self.forward(sequences["input_ids"], sequences["attention_mask"]) - return logits + return logits, None def patch( self, diff --git a/test/test_output_parity.py b/test/test_output_parity.py index 6fb6e31..6c5e48f 100644 --- a/test/test_output_parity.py +++ b/test/test_output_parity.py @@ -35,19 +35,53 @@ CROSS_ENCODER_CHECKPOINT = "" BI_ENCODER_CHECKPOINT = "" + def score_cat(model, df): - + queries = df.query.to_list() + docs = df.text.to_list() + + tokenizer = model.tokenizer + + sequences = tokenizer(queries, docs, return_tensors="pt", padding=True, truncation=True) + + scores, _ = model.score(dict(sequences)) + return scores.cpu().numpy().tolist() + + +def score_dot(model, df): + queries = df.query.to_list() + docs = df.text.to_list() + + tokenizer = model.tokenizer + + queries = tokenizer(queries, return_tensors="pt", padding=True, truncation=True) + docs = tokenizer(docs, return_tensors="pt", padding=True, truncation=True) + + scores, _, _, _ = model.score(dict(queries), dict(docs)) + return scores.cpu().numpy().tolist() def test_electra_equivelance(): hgf_cat = ElectraScorer() - mechir_cat = Cat(CROSS_ENCODER_CHECKPOINT) + mechir_cat = Cat(CROSS_ENCODER_CHECKPOINT, softmax_output=True) - hgf_scores = hgf_cat.transform(test_dataframe) + hgf_scores = hgf_cat.transform(test_dataframe).score.to_list() + mechir_scores = score_cat(mechir_cat, test_dataframe) + query_id_doc_id_pairs = zip(test_dataframe.qid.to_list(), test_dataframe.docno.to_list()) + + # check they are close + for hgf, mechir, pair in zip(hgf_scores, mechir_scores, query_id_doc_id_pairs): + assert abs(hgf - mechir) < 0.01, f"Pair {pair} is not close, {hgf} != {mechir}" def test_bi_equivelance(): hgf_dot = HgfBiEncoder(BI_ENCODER_CHECKPOINT) mechir_dot = Dot(BI_ENCODER_CHECKPOINT) - hgf_scores = hgf_dot.transform(test_dataframe) \ No newline at end of file + hgf_scores = hgf_dot.transform(test_dataframe) + mechir_scores = score_dot(mechir_dot, test_dataframe) + query_id_doc_id_pairs = zip(test_dataframe.qid.to_list(), test_dataframe.docno.to_list()) + + # check they are close + for hgf, mechir, pair in zip(hgf_scores, mechir_scores, query_id_doc_id_pairs): + assert abs(hgf - mechir) < 0.01, f"Pair {pair} is not close, {hgf} != {mechir}" From 356b0e1d64ce8929f2989aa95b49ed8462dd1092 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Fri, 28 Feb 2025 12:27:44 +0000 Subject: [PATCH 09/21] add hardcoded checkpoints --- test/test_output_parity.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_output_parity.py b/test/test_output_parity.py index 6c5e48f..6ccfd9c 100644 --- a/test/test_output_parity.py +++ b/test/test_output_parity.py @@ -32,8 +32,8 @@ }, ]) -CROSS_ENCODER_CHECKPOINT = "" -BI_ENCODER_CHECKPOINT = "" +CROSS_ENCODER_CHECKPOINT = "crystina-z/monoELECTRA_LCE_nneg31" +BI_ENCODER_CHECKPOINT = "sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco" def score_cat(model, df): From b61b0861b9f8f17c9d41d401a2b0fa716e4c3b31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Fri, 28 Feb 2025 12:28:09 +0000 Subject: [PATCH 10/21] run the test --- test/test_output_parity.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test/test_output_parity.py b/test/test_output_parity.py index 6ccfd9c..805da60 100644 --- a/test/test_output_parity.py +++ b/test/test_output_parity.py @@ -85,3 +85,9 @@ def test_bi_equivelance(): # check they are close for hgf, mechir, pair in zip(hgf_scores, mechir_scores, query_id_doc_id_pairs): assert abs(hgf - mechir) < 0.01, f"Pair {pair} is not close, {hgf} != {mechir}" + + +test_electra_equivelance() +test_bi_equivelance() + +print("All tests passed!") From c7f8fa6ba0816600aadba3514c1fa5f6f404c7d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Wed, 2 Apr 2025 10:49:16 +0100 Subject: [PATCH 11/21] minor things --- notebooks/analysis.ipynb | 335 +-- notebooks/plot/patch.cross.svg | 3293 ++++++++++++++++++++++++++++++ notebooks/plot/patch.pdf | Bin 25378 -> 25454 bytes src/mechir/modelling/steering.py | 65 + test/evaluate_models.py | 51 + 5 files changed, 3593 insertions(+), 151 deletions(-) create mode 100644 notebooks/plot/patch.cross.svg create mode 100644 src/mechir/modelling/steering.py create mode 100644 test/evaluate_models.py diff --git a/notebooks/analysis.ipynb b/notebooks/analysis.ipynb index 18527fa..f091bf6 100644 --- a/notebooks/analysis.ipynb +++ b/notebooks/analysis.ipynb @@ -2,159 +2,185 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Requirement already satisfied: matplotlib in /home/andrew/miniconda3/lib/python3.12/site-packages (3.9.2)\n", - "Requirement already satisfied: seaborn in /home/andrew/miniconda3/lib/python3.12/site-packages (0.13.2)\n", - "Requirement already satisfied: plotly in /home/andrew/miniconda3/lib/python3.12/site-packages (5.24.1)\n", - "Requirement already satisfied: streamlit in /home/andrew/miniconda3/lib/python3.12/site-packages (1.39.0)\n", - "Requirement already satisfied: contourpy>=1.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (1.3.0)\n", - "Requirement already satisfied: cycler>=0.10 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (0.12.1)\n", - "Requirement already satisfied: fonttools>=4.22.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (4.53.1)\n", - "Requirement already satisfied: kiwisolver>=1.3.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (1.4.7)\n", - "Requirement already satisfied: numpy>=1.23 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (1.26.4)\n", - "Requirement already satisfied: packaging>=20.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (23.2)\n", - "Requirement already satisfied: pillow>=8 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (10.3.0)\n", - "Requirement already satisfied: pyparsing>=2.3.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (3.1.4)\n", - "Requirement already satisfied: python-dateutil>=2.7 in /home/andrew/miniconda3/lib/python3.12/site-packages (from matplotlib) (2.9.0)\n", - "Requirement already satisfied: pandas>=1.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from seaborn) (2.2.2)\n", - "Requirement already satisfied: tenacity>=6.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from plotly) (9.0.0)\n", - "Requirement already satisfied: altair<6,>=4.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (5.4.1)\n", - "Requirement already satisfied: blinker<2,>=1.0.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (1.8.2)\n", - "Requirement already satisfied: cachetools<6,>=4.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (5.5.0)\n", - "Requirement already satisfied: click<9,>=7.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (8.1.7)\n", - "Requirement already satisfied: protobuf<6,>=3.20 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (5.28.3)\n", - "Requirement already satisfied: pyarrow>=7.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (18.0.0)\n", - "Requirement already satisfied: requests<3,>=2.27 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (2.32.3)\n", - "Requirement already satisfied: rich<14,>=10.14.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (13.7.1)\n", - "Requirement already satisfied: toml<2,>=0.10.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (0.10.2)\n", - "Requirement already satisfied: typing-extensions<5,>=4.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (4.11.0)\n", - "Requirement already satisfied: gitpython!=3.1.19,<4,>=3.0.7 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (3.1.43)\n", - "Requirement already satisfied: pydeck<1,>=0.8.0b4 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (0.9.1)\n", - "Requirement already satisfied: tornado<7,>=6.0.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (6.4)\n", - "Requirement already satisfied: watchdog<6,>=2.1.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from streamlit) (5.0.3)\n", - "Requirement already satisfied: jinja2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from altair<6,>=4.0->streamlit) (3.1.3)\n", - "Requirement already satisfied: jsonschema>=3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from altair<6,>=4.0->streamlit) (4.22.0)\n", - "Requirement already satisfied: narwhals>=1.5.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from altair<6,>=4.0->streamlit) (1.11.1)\n", - "Requirement already satisfied: gitdb<5,>=4.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from gitpython!=3.1.19,<4,>=3.0.7->streamlit) (4.0.11)\n", - "Requirement already satisfied: pytz>=2020.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.2->seaborn) (2024.1)\n", - "Requirement already satisfied: tzdata>=2022.7 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.2->seaborn) (2024.1)\n", - "Requirement already satisfied: six>=1.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from python-dateutil>=2.7->matplotlib) (1.16.0)\n", - "Requirement already satisfied: charset-normalizer<4,>=2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests<3,>=2.27->streamlit) (2.0.4)\n", - "Requirement already satisfied: idna<4,>=2.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests<3,>=2.27->streamlit) (3.4)\n", - "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests<3,>=2.27->streamlit) (2.1.0)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests<3,>=2.27->streamlit) (2024.2.2)\n", - "Requirement already satisfied: markdown-it-py>=2.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from rich<14,>=10.14.0->streamlit) (3.0.0)\n", - "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from rich<14,>=10.14.0->streamlit) (2.18.0)\n", - "Requirement already satisfied: smmap<6,>=3.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.19,<4,>=3.0.7->streamlit) (5.0.1)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jinja2->altair<6,>=4.0->streamlit) (2.1.5)\n", - "Requirement already satisfied: attrs>=22.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (23.2.0)\n", - "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (2023.12.1)\n", - "Requirement already satisfied: referencing>=0.28.4 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (0.35.1)\n", - "Requirement already satisfied: rpds-py>=0.7.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (0.18.1)\n", - "Requirement already satisfied: mdurl~=0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from markdown-it-py>=2.2.0->rich<14,>=10.14.0->streamlit) (0.1.2)\n", + "Requirement already satisfied: matplotlib in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (3.10.0)\n", + "Requirement already satisfied: seaborn in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (0.13.2)\n", + "Requirement already satisfied: plotly in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (5.24.1)\n", + "Requirement already satisfied: streamlit in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (1.42.0)\n", + "Requirement already satisfied: contourpy>=1.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (1.3.1)\n", + "Requirement already satisfied: cycler>=0.10 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (0.12.1)\n", + "Requirement already satisfied: fonttools>=4.22.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (4.56.0)\n", + "Requirement already satisfied: kiwisolver>=1.3.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (1.4.8)\n", + "Requirement already satisfied: numpy>=1.23 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (1.26.4)\n", + "Requirement already satisfied: packaging>=20.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (24.2)\n", + "Requirement already satisfied: pillow>=8 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (11.1.0)\n", + "Requirement already satisfied: pyparsing>=2.3.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (3.2.1)\n", + "Requirement already satisfied: python-dateutil>=2.7 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from matplotlib) (2.9.0.post0)\n", + "Requirement already satisfied: pandas>=1.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from seaborn) (2.2.3)\n", + "Requirement already satisfied: tenacity>=6.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from plotly) (9.0.0)\n", + "Requirement already satisfied: altair<6,>=4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (5.5.0)\n", + "Requirement already satisfied: blinker<2,>=1.0.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (1.9.0)\n", + "Requirement already satisfied: cachetools<6,>=4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (5.5.1)\n", + "Requirement already satisfied: click<9,>=7.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (8.1.8)\n", + "Requirement already satisfied: protobuf<6,>=3.20 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (5.29.3)\n", + "Requirement already satisfied: pyarrow>=7.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (19.0.0)\n", + "Requirement already satisfied: requests<3,>=2.27 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (2.32.3)\n", + "Requirement already satisfied: rich<14,>=10.14.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (13.9.4)\n", + "Requirement already satisfied: toml<2,>=0.10.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (0.10.2)\n", + "Requirement already satisfied: typing-extensions<5,>=4.4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (4.12.2)\n", + "Requirement already satisfied: gitpython!=3.1.19,<4,>=3.0.7 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (3.1.44)\n", + "Requirement already satisfied: pydeck<1,>=0.8.0b4 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (0.9.1)\n", + "Requirement already satisfied: tornado<7,>=6.0.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from streamlit) (6.4.2)\n", + "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from altair<6,>=4.0->streamlit) (3.1.5)\n", + "Requirement already satisfied: jsonschema>=3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from altair<6,>=4.0->streamlit) (4.23.0)\n", + "Requirement already satisfied: narwhals>=1.14.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from altair<6,>=4.0->streamlit) (1.27.0)\n", + "Requirement already satisfied: gitdb<5,>=4.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from gitpython!=3.1.19,<4,>=3.0.7->streamlit) (4.0.12)\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.2->seaborn) (2025.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.2->seaborn) (2025.1)\n", + "Requirement already satisfied: six>=1.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from python-dateutil>=2.7->matplotlib) (1.17.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests<3,>=2.27->streamlit) (3.4.1)\n", + "Requirement already satisfied: idna<4,>=2.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests<3,>=2.27->streamlit) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests<3,>=2.27->streamlit) (2.3.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests<3,>=2.27->streamlit) (2025.1.31)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from rich<14,>=10.14.0->streamlit) (3.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from rich<14,>=10.14.0->streamlit) (2.19.1)\n", + "Requirement already satisfied: smmap<6,>=3.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.19,<4,>=3.0.7->streamlit) (5.0.2)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jinja2->altair<6,>=4.0->streamlit) (3.0.2)\n", + "Requirement already satisfied: attrs>=22.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (25.1.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (2024.10.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (0.36.2)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jsonschema>=3.0->altair<6,>=4.0->streamlit) (0.22.3)\n", + "Requirement already satisfied: mdurl~=0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich<14,>=10.14.0->streamlit) (0.1.2)\n", "Note: you may need to restart the kernel to use updated packages.\n", "Looking in indexes: https://download.pytorch.org/whl/cpu\n", - "Requirement already satisfied: torch in /home/andrew/miniconda3/lib/python3.12/site-packages (2.5.0+cpu)\n", - "Requirement already satisfied: torchvision in /home/andrew/miniconda3/lib/python3.12/site-packages (0.20.0+cpu)\n", - "Requirement already satisfied: torchaudio in /home/andrew/miniconda3/lib/python3.12/site-packages (2.5.0+cpu)\n", - "Requirement already satisfied: filelock in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (3.13.1)\n", - "Requirement already satisfied: typing-extensions>=4.8.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (4.11.0)\n", - "Requirement already satisfied: networkx in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (3.2.1)\n", - "Requirement already satisfied: jinja2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (3.1.3)\n", - "Requirement already satisfied: fsspec in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (2024.2.0)\n", - "Requirement already satisfied: setuptools in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (68.2.2)\n", - "Requirement already satisfied: sympy==1.13.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch) (1.13.1)\n", - "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from sympy==1.13.1->torch) (1.3.0)\n", - "Requirement already satisfied: numpy in /home/andrew/miniconda3/lib/python3.12/site-packages (from torchvision) (1.26.4)\n", - "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torchvision) (10.3.0)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jinja2->torch) (2.1.5)\n", + "Requirement already satisfied: torch in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (2.4.1)\n", + "Collecting torchvision\n", + " Downloading https://download.pytorch.org/whl/cpu/torchvision-0.21.0-cp311-cp311-macosx_11_0_arm64.whl.metadata (6.1 kB)\n", + "Collecting torchaudio\n", + " Downloading https://download.pytorch.org/whl/cpu/torchaudio-2.6.0-cp311-cp311-macosx_11_0_arm64.whl.metadata (6.6 kB)\n", + "Requirement already satisfied: filelock in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.17.0)\n", + "Requirement already satisfied: typing-extensions>=4.8.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (4.12.2)\n", + "Requirement already satisfied: sympy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (1.13.3)\n", + "Requirement already satisfied: networkx in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.4.2)\n", + "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.1.5)\n", + "Requirement already satisfied: fsspec in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (2024.6.1)\n", + "Requirement already satisfied: numpy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torchvision) (1.26.4)\n", + "Collecting torch\n", + " Downloading https://download.pytorch.org/whl/cpu/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl.metadata (28 kB)\n", + "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torchvision) (11.1.0)\n", + "Collecting sympy==1.13.1 (from torch)\n", + " Downloading https://download.pytorch.org/whl/sympy-1.13.1-py3-none-any.whl (6.2 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.2/6.2 MB\u001b[0m \u001b[31m9.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from sympy==1.13.1->torch) (1.3.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jinja2->torch) (3.0.2)\n", + "Downloading https://download.pytorch.org/whl/cpu/torchvision-0.21.0-cp311-cp311-macosx_11_0_arm64.whl (1.8 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.8/1.8 MB\u001b[0m \u001b[31m6.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading https://download.pytorch.org/whl/cpu/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl (66.5 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m66.5/66.5 MB\u001b[0m \u001b[31m8.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hDownloading https://download.pytorch.org/whl/cpu/torchaudio-2.6.0-cp311-cp311-macosx_11_0_arm64.whl (1.8 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.8/1.8 MB\u001b[0m \u001b[31m9.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hInstalling collected packages: sympy, torch, torchvision, torchaudio\n", + " Attempting uninstall: sympy\n", + " Found existing installation: sympy 1.13.3\n", + " Uninstalling sympy-1.13.3:\n", + " Successfully uninstalled sympy-1.13.3\n", + " Attempting uninstall: torch\n", + " Found existing installation: torch 2.4.1\n", + " Uninstalling torch-2.4.1:\n", + " Successfully uninstalled torch-2.4.1\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "transformer-lens 2.14.0 requires torch<2.5,>=2.2, but you have torch 2.6.0 which is incompatible.\u001b[0m\u001b[31m\n", + "\u001b[0mSuccessfully installed sympy-1.13.1 torch-2.6.0 torchaudio-2.6.0 torchvision-0.21.0\n", "Note: you may need to restart the kernel to use updated packages.\n", - "Requirement already satisfied: jaxtyping in /home/andrew/miniconda3/lib/python3.12/site-packages (0.2.34)\n", - "Requirement already satisfied: transformer_lens in /home/andrew/miniconda3/lib/python3.12/site-packages (2.8.1)\n", - "Requirement already satisfied: typeguard==2.13.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jaxtyping) (2.13.3)\n", - "Requirement already satisfied: accelerate>=0.23.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (1.0.1)\n", - "Requirement already satisfied: beartype<0.15.0,>=0.14.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.14.1)\n", - "Requirement already satisfied: better-abc<0.0.4,>=0.0.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.0.3)\n", - "Requirement already satisfied: datasets>=2.7.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (3.0.2)\n", - "Requirement already satisfied: einops>=0.6.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.8.0)\n", - "Requirement already satisfied: fancy-einsum>=0.0.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.0.3)\n", - "Requirement already satisfied: numpy>=1.26 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (1.26.4)\n", - "Requirement already satisfied: pandas>=1.1.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (2.2.2)\n", - "Requirement already satisfied: rich>=12.6.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (13.7.1)\n", - "Requirement already satisfied: sentencepiece in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.2.0)\n", - "Requirement already satisfied: torch>=1.10 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (2.5.0+cpu)\n", - "Requirement already satisfied: tqdm>=4.64.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (4.66.6)\n", - "Requirement already satisfied: transformers>=4.37.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (4.46.0)\n", - "Requirement already satisfied: typing-extensions in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (4.11.0)\n", - "Requirement already satisfied: wandb>=0.13.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformer_lens) (0.18.5)\n", - "Requirement already satisfied: packaging>=20.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (23.2)\n", - "Requirement already satisfied: psutil in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (5.9.8)\n", - "Requirement already satisfied: pyyaml in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (6.0.1)\n", - "Requirement already satisfied: huggingface-hub>=0.21.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (0.26.2)\n", - "Requirement already satisfied: safetensors>=0.4.3 in /home/andrew/miniconda3/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (0.4.5)\n", - "Requirement already satisfied: filelock in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.13.1)\n", - "Requirement already satisfied: pyarrow>=15.0.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (18.0.0)\n", - "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (0.3.8)\n", - "Requirement already satisfied: requests>=2.32.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (2.32.3)\n", - "Requirement already satisfied: xxhash in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.5.0)\n", - "Requirement already satisfied: multiprocess<0.70.17 in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (0.70.16)\n", - "Requirement already satisfied: fsspec<=2024.9.0,>=2023.1.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from fsspec[http]<=2024.9.0,>=2023.1.0->datasets>=2.7.1->transformer_lens) (2024.2.0)\n", - "Requirement already satisfied: aiohttp in /home/andrew/miniconda3/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.10.10)\n", - "Requirement already satisfied: python-dateutil>=2.8.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2.9.0)\n", - "Requirement already satisfied: pytz>=2020.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2024.1)\n", - "Requirement already satisfied: tzdata>=2022.7 in /home/andrew/miniconda3/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2024.1)\n", - "Requirement already satisfied: markdown-it-py>=2.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from rich>=12.6.0->transformer_lens) (3.0.0)\n", - "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from rich>=12.6.0->transformer_lens) (2.18.0)\n", - "Requirement already satisfied: networkx in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch>=1.10->transformer_lens) (3.2.1)\n", - "Requirement already satisfied: jinja2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch>=1.10->transformer_lens) (3.1.3)\n", - "Requirement already satisfied: setuptools in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch>=1.10->transformer_lens) (68.2.2)\n", - "Requirement already satisfied: sympy==1.13.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from torch>=1.10->transformer_lens) (1.13.1)\n", - "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from sympy==1.13.1->torch>=1.10->transformer_lens) (1.3.0)\n", - "Requirement already satisfied: regex!=2019.12.17 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformers>=4.37.2->transformer_lens) (2024.4.28)\n", - "Requirement already satisfied: tokenizers<0.21,>=0.20 in /home/andrew/miniconda3/lib/python3.12/site-packages (from transformers>=4.37.2->transformer_lens) (0.20.1)\n", - "Requirement already satisfied: click!=8.0.0,>=7.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (8.1.7)\n", - "Requirement already satisfied: docker-pycreds>=0.4.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (0.4.0)\n", - "Requirement already satisfied: gitpython!=3.1.29,>=1.0.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (3.1.43)\n", - "Requirement already satisfied: platformdirs in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (3.10.0)\n", - "Requirement already satisfied: protobuf!=4.21.0,!=5.28.0,<6,>=3.19.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (5.28.3)\n", - "Requirement already satisfied: sentry-sdk>=2.0.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (2.17.0)\n", - "Requirement already satisfied: setproctitle in /home/andrew/miniconda3/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (1.3.3)\n", - "Requirement already satisfied: six>=1.4.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from docker-pycreds>=0.4.0->wandb>=0.13.5->transformer_lens) (1.16.0)\n", - "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (2.4.3)\n", - "Requirement already satisfied: aiosignal>=1.1.2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.3.1)\n", - "Requirement already satisfied: attrs>=17.3.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (23.2.0)\n", - "Requirement already satisfied: frozenlist>=1.1.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.5.0)\n", - "Requirement already satisfied: multidict<7.0,>=4.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (6.1.0)\n", - "Requirement already satisfied: yarl<2.0,>=1.12.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.16.0)\n", - "Requirement already satisfied: gitdb<5,>=4.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (4.0.11)\n", - "Requirement already satisfied: mdurl~=0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from markdown-it-py>=2.2.0->rich>=12.6.0->transformer_lens) (0.1.2)\n", - "Requirement already satisfied: charset-normalizer<4,>=2 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2.0.4)\n", - "Requirement already satisfied: idna<4,>=2.5 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.4)\n", - "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2.1.0)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /home/andrew/miniconda3/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2024.2.2)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from jinja2->torch>=1.10->transformer_lens) (2.1.5)\n", - "Requirement already satisfied: smmap<6,>=3.0.1 in /home/andrew/miniconda3/lib/python3.12/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (5.0.1)\n", - "Requirement already satisfied: propcache>=0.2.0 in /home/andrew/miniconda3/lib/python3.12/site-packages (from yarl<2.0,>=1.12.0->aiohttp->datasets>=2.7.1->transformer_lens) (0.2.0)\n", + "Requirement already satisfied: jaxtyping in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (0.2.38)\n", + "Requirement already satisfied: transformer_lens in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (2.14.0)\n", + "Requirement already satisfied: wadler-lindig>=0.1.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jaxtyping) (0.1.3)\n", + "Requirement already satisfied: accelerate>=0.23.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (1.3.0)\n", + "Requirement already satisfied: beartype<0.15.0,>=0.14.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.14.1)\n", + "Requirement already satisfied: better-abc<0.0.4,>=0.0.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.0.3)\n", + "Requirement already satisfied: datasets>=2.7.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (2.21.0)\n", + "Requirement already satisfied: einops>=0.6.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.8.1)\n", + "Requirement already satisfied: fancy-einsum>=0.0.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.0.3)\n", + "Requirement already satisfied: numpy>=1.24 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (1.26.4)\n", + "Requirement already satisfied: pandas>=1.1.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (2.2.3)\n", + "Requirement already satisfied: rich>=12.6.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (13.9.4)\n", + "Requirement already satisfied: sentencepiece in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.2.0)\n", + "Collecting torch<2.5,>=2.2 (from transformer_lens)\n", + " Using cached torch-2.4.1-cp311-none-macosx_11_0_arm64.whl.metadata (26 kB)\n", + "Requirement already satisfied: tqdm>=4.64.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (4.67.1)\n", + "Requirement already satisfied: transformers>=4.43 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (4.48.3)\n", + "Requirement already satisfied: transformers-stream-generator<0.0.6,>=0.0.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.0.5)\n", + "Requirement already satisfied: typeguard<5.0,>=4.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (4.4.2)\n", + "Requirement already satisfied: typing-extensions in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (4.12.2)\n", + "Requirement already satisfied: wandb>=0.13.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformer_lens) (0.19.6)\n", + "Requirement already satisfied: packaging>=20.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (24.2)\n", + "Requirement already satisfied: psutil in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (6.1.1)\n", + "Requirement already satisfied: pyyaml in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (6.0.2)\n", + "Requirement already satisfied: huggingface-hub>=0.21.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (0.28.1)\n", + "Requirement already satisfied: safetensors>=0.4.3 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from accelerate>=0.23.0->transformer_lens) (0.4.5)\n", + "Requirement already satisfied: filelock in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (3.17.0)\n", + "Requirement already satisfied: pyarrow>=15.0.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (19.0.0)\n", + "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (0.3.8)\n", + "Requirement already satisfied: requests>=2.32.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (2.32.3)\n", + "Requirement already satisfied: xxhash in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (3.5.0)\n", + "Requirement already satisfied: multiprocess in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (0.70.16)\n", + "Requirement already satisfied: fsspec<=2024.6.1,>=2023.1.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from fsspec[http]<=2024.6.1,>=2023.1.0->datasets>=2.7.1->transformer_lens) (2024.6.1)\n", + "Requirement already satisfied: aiohttp in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from datasets>=2.7.1->transformer_lens) (3.11.12)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.1.5->transformer_lens) (2.9.0.post0)\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.1.5->transformer_lens) (2025.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pandas>=1.1.5->transformer_lens) (2025.1)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from rich>=12.6.0->transformer_lens) (3.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from rich>=12.6.0->transformer_lens) (2.19.1)\n", + "Requirement already satisfied: sympy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch<2.5,>=2.2->transformer_lens) (1.13.1)\n", + "Requirement already satisfied: networkx in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch<2.5,>=2.2->transformer_lens) (3.4.2)\n", + "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch<2.5,>=2.2->transformer_lens) (3.1.5)\n", + "Requirement already satisfied: regex!=2019.12.17 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformers>=4.43->transformer_lens) (2024.11.6)\n", + "Requirement already satisfied: tokenizers<0.22,>=0.21 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from transformers>=4.43->transformer_lens) (0.21.0)\n", + "Requirement already satisfied: click!=8.0.0,>=7.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (8.1.8)\n", + "Requirement already satisfied: docker-pycreds>=0.4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (0.4.0)\n", + "Requirement already satisfied: gitpython!=3.1.29,>=1.0.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (3.1.44)\n", + "Requirement already satisfied: platformdirs in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (4.3.6)\n", + "Requirement already satisfied: protobuf!=4.21.0,!=5.28.0,<6,>=3.19.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (5.29.3)\n", + "Requirement already satisfied: pydantic<3,>=2.6 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (2.10.6)\n", + "Requirement already satisfied: sentry-sdk>=2.0.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (2.21.0)\n", + "Requirement already satisfied: setproctitle in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (1.3.4)\n", + "Requirement already satisfied: setuptools in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from wandb>=0.13.5->transformer_lens) (75.8.0)\n", + "Requirement already satisfied: six>=1.4.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from docker-pycreds>=0.4.0->wandb>=0.13.5->transformer_lens) (1.17.0)\n", + "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (2.4.6)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.3.2)\n", + "Requirement already satisfied: attrs>=17.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (25.1.0)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.5.0)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (6.1.0)\n", + "Requirement already satisfied: propcache>=0.2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (0.2.1)\n", + "Requirement already satisfied: yarl<2.0,>=1.17.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.18.3)\n", + "Requirement already satisfied: gitdb<5,>=4.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (4.0.12)\n", + "Requirement already satisfied: mdurl~=0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich>=12.6.0->transformer_lens) (0.1.2)\n", + "Requirement already satisfied: annotated-types>=0.6.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pydantic<3,>=2.6->wandb>=0.13.5->transformer_lens) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.27.2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from pydantic<3,>=2.6->wandb>=0.13.5->transformer_lens) (2.27.2)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.4.1)\n", + "Requirement already satisfied: idna<4,>=2.5 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2.3.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2025.1.31)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jinja2->torch<2.5,>=2.2->transformer_lens) (3.0.2)\n", + "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from sympy->torch<2.5,>=2.2->transformer_lens) (1.3.0)\n", + "Requirement already satisfied: smmap<6,>=3.0.1 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (5.0.2)\n", + "Using cached torch-2.4.1-cp311-none-macosx_11_0_arm64.whl (62.1 MB)\n", + "Installing collected packages: torch\n", + " Attempting uninstall: torch\n", + " Found existing installation: torch 2.6.0\n", + " Uninstalling torch-2.6.0:\n", + " Successfully uninstalled torch-2.6.0\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "torchaudio 2.6.0 requires torch==2.6.0, but you have torch 2.4.1 which is incompatible.\n", + "torchvision 0.21.0 requires torch==2.6.0, but you have torch 2.4.1 which is incompatible.\u001b[0m\u001b[31m\n", + "\u001b[0mSuccessfully installed torch-2.4.1\n", "Note: you may need to restart the kernel to use updated packages.\n", - "Processing /home/andrew/Documents/Code/MechIR\n", - " Preparing metadata (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25hBuilding wheels for collected packages: mechir\n", - " Building wheel for mechir (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for mechir: filename=mechir-0.0.1-py3-none-any.whl size=76527 sha256=549ec71593e7476a5bcecf264dc4a238176babaf5b1c3472e53fab84a2df16e9\n", - " Stored in directory: /tmp/pip-ephem-wheel-cache-tq1ty5ep/wheels/16/4d/fd/e2f041bb0629a1af518d10cab4601f84986b0213a8a30041cf\n", - "Successfully built mechir\n", - "Installing collected packages: mechir\n", - " Attempting uninstall: mechir\n", - " Found existing installation: mechir 0.0.1\n", - " Uninstalling mechir-0.0.1:\n", - " Successfully uninstalled mechir-0.0.1\n", - "Successfully installed mechir-0.0.1\n", "Note: you may need to restart the kernel to use updated packages.\n" ] } @@ -252,7 +278,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -331,7 +357,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -354,17 +380,17 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAB64AAAI2CAYAAADgnaZqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAADNp0lEQVR4nOzdeXxU1f3/8fdM9pAECIuBAGEHpQgEQZF9kUUpIoK7giiJWwW0KlARFbSVCtXSbzURF6QVy6oobqyyVEQCiIqsAgEMICQEsi9zf3/wy5SQBGYyE84kvJ6Pxzwgc8+5n89MMnPnM+eec22WZVkCAAAAAAAAAAAAAMAQu+kEAAAAAAAAAAAAAACXNwauAQAAAAAAAAAAAABGMXANAAAAAAAAAAAAADCKgWsAAAAAAAAAAAAAgFEMXAMAAAAAAAAAAAAAjGLgGgAAAAAAAAAAAABgFAPXAAAAAAAAAAAAAACjGLgGAAAAAAAAAAAAABjFwDUAAAAAAAAAAAAAwCgGrlGpvPfee7LZbGrcuLHpVKqU559/XjabTb169TKdCgAAlZLNZpPNZtOaNWtMpwIAVQK1X8Wg9gMAwDPUfgBQsRi4xiVTVCCXdgsNDVWLFi00cuRI/fe//62wHA4cOFBmDqXd3nvvvQrLBRe3Zs0at35f598OHDgg6X9fernTpzTHjx/XX/7yF91www1q0KCBQkJCVK1aNTVu3FhDhw5VYmKiTp06VaJfVlaWPv/8c02bNk3Dhg1TTEyMM97zzz9fIc8dAPcUFhZq/vz5uu+++9SyZUvVqFFDgYGBqlu3rrp166aJEyfqxx9/NJ2mTxg1apTL76kMNpjXq1evch9HR40a5dxP48aN3e5Tms8//1xxcXFq06aNIiMjFRAQoFq1aqlz584aN26cvv3221L77dy5U++8844effRRdenSRaGhoc6YAHwPtR/cVdG1X1hYmOrXr69rrrlGDz74oObMmaPMzEy3cty/f7+mTJmi7t27q379+goKClJ4eLhatGih22+/Xf/+97+VlZVVAc8OAG+i9nMdtV/lUpG1X1Hd1qxZMw0ePFjPPfectm3b5lZ+vPYAuMrfdAK4PF1xxRXO/zscDqWmpmrv3r3au3ev3n//fU2ZMqXUAb3q1aurVatWio6O9jiHiIgIhYSEXLDNxbajYgUGBhb7WzlXamqq8vPzFRAQoMjIyFLb+Pn5lbivdu3apd5/oT6WZenPf/6zXnrppWJfRISFhclms+ngwYM6ePCgPv74Yz399NOaOXOmRo8e7Wy3adMm3XjjjWXGBGDWxo0bNXLkSO3evdt5X0BAgMLDw3Xy5Elt2LBBGzZs0F/+8hcNGzZM8+bNU2BgoMGMfYPdbledOnUu2OZi21HxIiMjSz2W5uXlKS0tTZJUs2bNUv+mq1evXuK+4ODgUu+/UB9J2r17t+6++25t3rzZeZ+fn5+qV6+u9PR0fffdd/ruu+/0+uuvq3fv3po/f75q167tbPvQQw/p66+/LvuBAvBZ1H5wRUXXfnl5eTp27JhSUlKUlJSkt99+W4899pieeuopTZo0Sf7+ZX89lp+fr6eeekr/93//p4KCAuf91atXV35+vvPvef78+YqKitLs2bN10003ufPwAVwi1H7lQ+1XOVRk7WdZlk6fPq3U1FT98ssvWrZsmaZOnarrrrtOCQkJuvrqqy+YG689AG6xgEtkypQpliSrtD+7goICa/369VbHjh2dbTZs2OD1HPbv3+/c/7vvvuv1/VdWRb+bnj17mk7FZT179nQ553fffdf5e9+/f79bcRwOh3X33Xc7+1977bXWokWLrLS0NGeb9PR0a8mSJdbvf/97S5J18803F9vH6tWrrZo1a1p9+/a1nnrqKWvevHlWVFSUJcmaMmWKW/kA8K6lS5daQUFBliSrVq1a1p///Gdr9+7dzu0FBQXWd999Z02YMMGKiIiwJBV7/V+ORo4caUmyYmJiTKfiU4qOE6tXrzadiktWr17tVs4xMTGWJGvkyJFux9q0aZNVo0YNS5JVrVo1a+LEidb3339vORwOy7Isq7Cw0Prpp5+sl156ybriiissSdbWrVuL7aNv377WVVddZd1zzz3WzJkzrSeeeKLMz5UAzKP2812Xc+3ncDisnTt3Wm+++abVtm1bZ7tevXpZOTk5pe4vNzfX6tWrl7PtwIEDrc8//9zKzMx0tvntt9+sDz74wOrevbslyRo7dqwHjxZARaH2cx+1X+ku59ovPT3dWrVqlTVmzBjn6ykwMND65JNPytwfrz0A7mLGNXyCn5+funbtqo8++kgNGzaUJH388ce6/vrrDWeGy9306dP173//W5I0btw4zZw5s8SypBERERo6dKiGDh2qtWvXav78+cW2d+/eXampqcXumzBhQsUmDuCi9uzZo3vuuUe5ubm66qqr9OWXX6pBgwbF2vj5+emaa67RNddco6eeeqrYagoALu7kyZMaNmyYTp06pfr16+urr75SmzZtirWx2+266qqrdNVVV2ncuHEaP358iWPtl19+WWw2HUv6ApUXtR9MsdlsatWqlVq1aqUHH3zQuVrWmjVr9PjjjyshIaFEn7FjxzqvYTpz5kyNHz++RJvatWvrzjvv1J133qlFixaxzCngg6j9AO+IiIhQ79691bt3bz322GMaPHiwDh06pDvuuENbt25VixYtirXntQegPLjGNXxKgwYNVKtWLUlSRkZGie1F16sydd2Uomu19erVS5K0cuVK3XTTTapTp46Cg4N15ZVX6oUXXlBOTs4F93Py5Em9+OKLuvbaaxUZGang4GA1btxY/fv31xtvvKH09PRS+y1evFiDBw/WFVdc4VxKbfDgwVqyZMlFc//88891ww03qEaNGgoLC1O7du00ffp05efnu/TYDxw4oHHjxqlNmzYKCwtTaGioWrdurbFjxyo5ObnUPuf/vlavXq2hQ4eqXr168vPzu+h1ME07ceKEpk6dKknq27dvqYPW5+vRo4f+/ve/F7vvQkuTAzDn2Wef1enTpxUcHKwlS5aUKJ7OFxkZqY8++qjYElruvs/t27dPDz/8sFq0aKGQkBBFREQoNjZWL774ok6fPl1m7MOHD2v8+PFq06aNqlWrpqCgINWvX18dO3bU+PHj9d1335Xok5aWpueee06xsbGKiIhQYGCgoqKidPXVV+uhhx7SypUrXX+yvKjoGllr1qzRmTNn9Oyzz6p169YKCQlRrVq1NHjw4DKvc3yur776SnfccYdiYmIUEhKiyMhIXX311frDH/6gb775ptQ+R48e1VNPPeV8HqtVq6Y2bdro6aef1rFjxy4YLy0tTU899ZSaNWum4OBg1atXTyNGjFBSUpJLj9vhcOjf//63brzxRudxvE6dOurfv7/mzZsny7JK7Vd0fbH33ntPGRkZeu6559S2bVuFh4cXu6anr5o+fboOHz4sSZo3b16JQevzhYaGKiEhQW3bti12P8dSoOqh9qP2M8nPz08zZsxwLun99ttvF1u+VJJ27NjhHMwePXp0qYPW57v11ls1efJk7ycMwCPUftR+1H7ed/XVV2vhwoWy2WzKzMzUiy++WKKNN157AC5Dpqd84/JxoeXiihw+fNjZ5vXXXy+xvWjZr/IuUePpcnHnLqs2ffp0y2azWTabzapRo4Zls9mc++7du7dVUFBQ6j6+/PJLq2bNms62/v7+Vq1atayAgADnfUuWLCnWJzc317r99tud2+12u1WzZk3Lbrc777vzzjutvLy8C+ZddKtRo4bl7+9vSbJ69OhhTZw48YJLr/3rX/9yLukiyQoKCrJCQkKcP4eHh1tffvlliX7n/r5ee+0153NUvXp1KyAgoFzLjRa5FEuFT58+3dlv3bp15c61NEXL7rBUOGDG0aNHne+hDzzwQLn348773H/+859i76Xh4eHFfm7YsKG1Y8eOEjG2bdtW7Ljh5+dn1axZs9hx5/z300OHDlmNGjUqcdzw8/Nz3lfeJUI9XS6uKP4HH3xgNW/e3JJkBQcHW6Ghoc5tgYGBpR5XLMuyMjMzrREjRhQ7roWHh1vVq1d3/tyuXbsS/dasWeNcrlr/f8nqatWqOX+uWbNmme/1+/fvd75vF+VXtIRZYGCg9fHHH19w6bWTJ09aPXr0KJbzuflKsoYMGWLl5uaW6FsU99VXX7VatmzpjFn0WNy9BEaRS7FUeH5+vvNx9u3bt1x5luXcYzsA30Ptdxa1n+/Xft99952z/eTJk4tte+SRR5yfvQ4cOFDO7AGYRu1H7UftV7G130033eT8zJCVleW831uvPQCXH2ZcwycUFhbqm2++0S233CJJqlu3ru677z7DWZXt+++/14QJEzRhwgQdP35caWlpOnXqlJ577jlJZ8+6nDNnTol+W7du1c0336y0tDS1adNGn332mbKysnTixAllZ2dr8+bNevLJJxUeHl6s36RJk/Sf//xHNptNkydP1smTJ5WamqoTJ05o0qRJks7OYirtzO6lS5fqhRdekCSNGDFCycnJSktL0+nTp/V///d/2rhxo954440yH+vy5ct13333qbCwUE8//bT279+v7OxsZWZmaufOnRoxYoTOnDnj3Hdpjh07pieffFIjR45UcnKyTp06pezsbJ8/E73ojNQ6deqoW7duhrMB4E2rV6+Ww+GQJOexxxMXe5/bsmWLc3msrl27avv27Tp9+rSysrK0dOlS1atXT4cOHdLvf//7ErPOnnzySaWlpSk2NlbffPON8vPzlZqaqpycHO3evVuvvvpqiVmszz//vJKTk9W4cWOtWLFCeXl5Sk1NVW5urg4cOKA33nhD1113nceP2xOPPvqoAgMDtWrVKmVmZiojI0ObNm1Sq1atlJeXp7i4OOfv6Fz333+/FixYILvdrmeeeUaHDh3S6dOnderUKf3222/697//rS5duhTrc+jQIQ0dOlSnTp3SVVddpfXr1ysjI0MZGRlau3atWrVqpbS0NN188806cuRIsb6FhYUaMWKEDh48qJo1a2r+/PnKzMxUenq6fvrpJ1177bUaOXJkmY+zsLBQw4YN09q1a9W+fXt98sknyszM1KlTp5SRkaE5c+aobt26Wrp0qZ555pky9/P888/r9OnTWrJkiTIyMpSWlqZDhw6pbt26bj7zl87mzZudMwm98ToDUDVQ+1H7+ZJrrrnGeSz9+uuvi20rqgc7dOigmJiYS54bAO+g9qP2o/arWEWrl+Tm5mrjxo3O+7392gNwGTE9co7Lx7lnfl9xxRXOW506dZxnAUZERFh33313mWcze/Os+4iIiGJ5lHa70GMoa6bssGHDLElWv379Smzr1q2bJclq0aKFderUKZdyPnz4sPMM+YkTJ5ba5oknnrAkWQEBAdavv/5abNtVV13lPLuysLCwRN8333yzzDMwCwsLrRYtWliSrISEhDJzHDJkiCXJGjt2bLH7zz3bfdiwYS49XleV96z72rVrl/n7/vzzz4v1a9CggSXJuuGGG7yau2Ux4xow7dlnn3W+Lxw5cqTc+3H1fW7gwIGWJKt58+ZWZmZmie1btmxxvtf/9a9/LbataJbTf//7X5fzuvLKK51ntntb0Vn3drv9osfR8x+LZf3vrPs6depYx44dK7F9+/btzjbr168vtm3FihXObf/85z9dzvmhhx5ynlmfkpJSYvuhQ4ecZ9E/+uijxbb95z//ccZcsWJFib6ZmZlWs2bNyjyD/f3337ckWa1bty7z2L9582bLZrNZgYGBJZ6TouOFn5+ftWXLFpcf88WU96z74ODgMn/f58+qnj17tjPGhg0bvJa7ZTHjGvB11H7Uft5U0att3XDDDZYkKzo62nlffn6+c4bjmDFjypk5AF9A7Vd+1H7FXW61n6szrjds2ODc/1tvveW831uvPQCXH2Zcw4hjx445b7/99psKCwslSVlZWUpPT7/otUa84fTp08XyKO1WlqCgIP3xj38sddvNN98sSdq+fXux+/fs2aP169dLkl5++WWXr9WxaNEiFRQUKDg4WBMmTCi1zbPPPqugoCDl5+dr4cKFzvu3b9+uHTt2ONvY7SVf8mPGjFF0dHSp+127dq327Nmj2rVr68EHHywzx6IZEl9++WWZbSZOnFjmtkvpxIkTZf6+z78+3cmTJyWdvb4KgKql6PUtee81Xtb73KlTp5zvj0899ZRCQ0NLtOnQoYOGDRsm6ewsqnPVqFFDkpSSkuJyLuXp4y6Hw3HR42hp1ywtEhcXV+oZ423btlWTJk0klTyWvvPOO5Kk3/3ud3r44YddytOyLM2fP1+S9NBDDykqKqpEmwYNGuihhx6SJH344YfFthX93LVrV/Xt27dE39DQUD399NNlxn/77bclSQ8//HCZx/6OHTuqTZs2ysvL0+rVq0ttM3DgQHXo0KHMOJdKTk5Omb/vEydOFGtbEa8zAJUPtR+1n68rOkalpqY670tNTXVeg5RjGFC5Uft5jtrvrMut9nPVua+rc4+l1IMAyouBaxhhWVaxW3Z2trZu3aqRI0fq008/VY8ePfTRRx9VaA7vvvtuiTzOv5WlTZs2CgsLK3Vb/fr1JRU/UEvSf//7X0mSn5+fBg0a5HKemzdvliR16tRJERERpbapWbOmrrnmmmLtz/2/v7+/unfvXmpfu92uXr16lbptw4YNkqT09HTVr19fUVFRpd7GjBkjSTp48GCp+wkJCVFsbOxFHumlsX///jJ/30OHDjWdHoBK6kLvc1u2bHEeU/r161fmPm644QZJZwv2/Px85/2DBw+WJI0cOVJPPvmkvv76a2VlZV0wn6I+EyZMUFxcnL744gudPn3a9QfkgpiYmIseR59//vky+1977bVlbrvYsbTo8bli//79zv248vyfPHlS+/fvd95fdCzt06dPmX3L2lZYWOhcKu35558v8zgaFRWlXbt2SSr7WNq1a9cy419KI0eOLPP3vW3bNtPpAfBB1H7UfgBQlVD7UfuVpirWfgBgCgPX8AnBwcFq3769Zs+erVtuuUW5ubkaNWqUyx+0Xn311TI/DBw6dMjr+Z5/HbJz+fv7S5IKCgqK3X/06FFJUu3atVWtWjWXYx0/flySyjwzvkiDBg2KtT/3/7Vr11ZQUNBF+57v119/lSTl5+df8KzKtLQ0SVJ2dnap+6lVq1apZ/z7ulq1akkq+eEZQOVX9PqWvPMav9D73Lnvyxd6Ly96Ly4oKCiW0/Tp09W7d29lZGRo5syZ6tWrlyIiInTNNddoypQpJa7LJZ09u/+2225Tfn6+3nrrLQ0aNEg1atRQ27Zt9dRTTzkL5XOVdRwdO3asy8+DO1w5lp77JY70v2OpO9eZdPf5P7+PK8fhso6jRdeWk6S0tLQLHkuLHmtZX0z56vXMLsTbrzMAVQO1X9mo/cwoOkade9yKjIyUzWYrth1A5UTtR+13Pmo/7zr3b/jc1xv1IIDyqlzVBC4LRWdwp6en67PPPnOpT0ZGRpkfBoqWojOtqOitTIqeu2uvvfaiZ1ZeaKaCn5/fpUzba9q0aSNJzCADqqCi17ckbd261eP9VeT7XI0aNbRq1SqtW7dOTz/9tLp27Sp/f38lJSXpxRdfVIsWLUosMRcQEKD//Oc/2rZtm5577jn16dNHoaGh+vHHH/Xqq6+qTZs2mjFjRrE+ZR1H09PTK+yxuauyHUvP/Qzy+eefu3QcLWumQmU8lnr7dQag6qH28x2Xc+1XtDxts2bNnPf5+/urZcuWkjiGAZUdtR+136VwOdd+33//vfP/5x5LqQcBlBcD1/A5555Jd+5yLRfy/PPPl/khoHHjxhWUqXuKrqty4sQJZWZmutyv6Cy7w4cPX7Bd0fZzz8or+v+JEyeUl5dXZt/Sztg8N+eylq6p6oquZ/Pbb785r1EHoGro3bu38yz5JUuWVGisc9+XL/ReXrTN39+/1Os/devWTa+88orWr1+vU6dO6eOPP1bbtm2VnZ2t0aNHl3p9znbt2umFF17QypUrderUKa1YsUI9evRQYWGhnnrqqWIFZlnH0ffee8+DR+9d5Tkuufv8n9+n6P9lHSsvtK1WrVrOGQSX47H0mmuucV7braJfZwAqJ2q/4qj9Lr3Nmzc7P0Odv4x6UT24devWy+55AaoSaj9qv/NR+3nXsmXLJElBQUG67rrrnPdfytcegKqFgWv4nHM/PLizrJqvu/766yWdPQPv888/d7nfudcvK+vMx1OnThW7Htr5fQsKCrRu3bpS+zocDq1Zs6bUbUXXVDl69Gix66ddLu6//36FhoZK+t8XZK5wOBwVmRYAL7jiiit06623SpI++OAD7d692+W+rr4XFImNjXUWaytXriyz3YoVKySd/cIhICDggvsMDg7WkCFDtHjxYklSTk7ORU+w8ff3V9++fbVs2TIFBQXJsixnzMqi6Fj6ySefuNynSZMmzi+DXHn+a9WqpSZNmjjvLzqWrl69usy+q1atKvX+gIAAde7c2e2cqwp/f3/FxcVJOvvcr1271uW+HEuBywO1X3HUfpfeCy+8IOns7LZ77rmn2LZHHnlENptNhYWFevHFF13eJ8cwwLdQ+1H7nY/az3s2bdrkXDXnjjvuUHBwsHPbpXztAahaGLiGz/nggw+c/y/6wFAVNG/eXD169JAkTZo0yeVruN16663y9/dXTk6OXnnllVLbvPzyy8rNzVVAQIDzA4EkXX311bryyislSS+99FKpBfQ777xT5lmIvXv3VvPmzSVJ48ePv+CZ+1LVu15J7dq19eyzz0o6+4H3ySefvOgHpw0bNlTYNYEAeNe0adMUFham7OxsDRs27IJnVUtnr1N16623ur18Wo0aNTRgwABJ0l//+tdSr2P1/fffa9GiRZKkO++803l/QUHBBb/8DAkJcf7/3OusFV1bqzRBQUHOpccq2zUoH3jgAUnSTz/9pDfeeMOlPjabTbfffrskKSEhwXmttHP9+uuvSkhIkFT8+Zfk7Lt+/fpSv+zPzs7WX//61zLjFw3cfvbZZxddBreqHUcl6emnn1b9+vUlnX1uf/rppwu2z87O1iOPPKIffvjhUqQHwDBqv+Ko/S6dwsJCPfnkk/r0008lnV22vujxF2nTpo1zOft33nlHr7322kX3+9FHH2natGlezxeAZ6j9qP2KUPt5zw8//KDhw4fLsixVq1ZNkydPLtHmUr32AFQtleuIhSrt6NGjevbZZzVnzhxJ0nXXXacuXboYzsq7Xn/9dQUHB2vPnj3q2rWrvvjiC+Xn50s6Wzh/9913euihh4qdBRkdHe0cCP3LX/6iKVOm6NSpU5LOnm0/efJk54emJ554QvXq1SsW86WXXpJ09mzBu+66y/lFRU5Ojt5880099thjqlGjRqn5+vv7680335S/v7/Wr1+vHj16aOXKlc6cJemXX37Rm2++qU6dOumf//yn50+Sj5kwYYLzg+vf/vY3de3aVUuWLCn25dOZM2f06aefatiwYerevbsOHTpUYj9paWk6ceKE81ZUjGRlZRW7PyMj49I8MABq2bKl5s6dq8DAQP30009q3769XnnlFe3du9fZprCwUFu3btVzzz2npk2bOs9yd9e0adMUEBCgvXv3asCAAc5BOYfDoc8++0w33nijCgoK1KxZM8XHxzv7HT58WC1atNC0adO0detWFRQUOLdt377dOTOoWrVq6tmzp3NbTEyMJk6cqI0bNxb7ImPv3r26++67lZWVJbvd7vxSpbLo3bu37rjjDknSY489pokTJxb7Av7EiROaPXu280uOIpMmTVKNGjWUmpqqfv366b///a9z24YNG9SvXz+dOnVKkZGRmjBhQrG+t956q2JjY53/X7RokfP6ZT///LMGDRqk3377rcyc77nnHvXr10+WZemWW27RtGnT9Ouvvzq3Z2ZmavXq1Xr00UfVtGnTcj4zvqt27dpatGiRIiIi9Ouvv+raa6/VpEmT9OOPPzpPBrMsSzt37tT06dPVrFkzvfHGGyVOFMvNzS3zeHnu/eceYwH4Lmo/aj8TLMvSnj17lJiYqNjYWM2cOVPS2SXBX3/99VL7/P3vf1f37t0lnR3Qv/HGG/Xll18qOzvb2SY1NVULFixQnz59dMstt1SJwQigqqH2o/aTqP284cyZM1qzZo3i4+PVuXNnHTp0SIGBgVqwYEGx61sXuZSvPQBViAVcIlOmTLEkWZKsK664otitevXqzm2SrLZt21pHjhwpsY93333XkmTFxMSUK4f9+/c7Y0RERJTI4/zb448/Xupj6NmzZ5kxVq9e7YxRmi+//LLY4w0ICLBq1aplBQQEOO9bsmRJsT65ubnWbbfd5txut9utmjVrWna73XnfnXfeaeXl5ZUa809/+lOx57dmzZqWv7+/Jcnq3r27NXHixAs+riVLlljh4eElcg4KCiq232nTphXr5+nv60J69ux50d/F+XlIsvbv3+92LIfDYb3wwgtWSEhIsccbHh5e7HmRZEVGRlrvv/9+iX3ExMQUa1fWbeTIkW7nB8Az69evt5o3b17stRgYGGhFRkYWe5+12Wwl3mvdeZ/78MMPrcDAwGLHoeDgYOfPDRs2tHbs2FGsz7nHLUmWn5+fFRkZWWw/gYGB1oIFC4r1O7dP0THj3Fg2m83629/+Vq7na+TIkc79Xuw4esUVV1jJycml5rZ69eoyYxS9x0+ZMqXEtszMTGvYsGHFHmNERESxY2u7du1K9FuzZk2xNtWqVbOqVavm/LlGjRrW2rVrS81n3759VsOGDZ1tg4KCnPsKDAy0Pv744ws+rvT0dGvw4MElcq5Ro4Zls9mc9/n7+5foW3T8ePfdd8t8vsrj3M8rF/pdnJ9HeY9TO3bssGJjY4s9B/7+/lZkZKTzM0nRbcCAAdaJEyeK9T/3WH6xW3mO9QC8i9rvLGo/7yhv7Ve7dm3n7zcyMtLy8/MrUc9NmzbNys/Pv+A+c3NzrUcffbTE8ap69erFPktIsho0aGB98cUXXnrkALyN2s891H6Xd+0XHBzs/N3WrVvXCg0NLVF7XX/99dYPP/xw0X168toDcPnxF2DAsWPHiv0cEBCgqKgotWvXTsOHD9d9992nwMDACs3h9OnTF12yrSKWJenfv7/27Nmj119/XZ999pn27dunzMxMRUdHq1WrVho2bJj69OlTrE9gYKD+85//6LbbbtPbb7+tzZs3Ky0tTbVq1dI111yjMWPG6JZbbikz5rRp09SlSxfNnDlTmzdvVm5urq688krdfffdeuKJJ5xn5pdl6NCh2rt3r/75z3/q888/1549e3Tq1ClVq1ZNrVu3VqdOnXTTTTfpxhtv9Mpz5GtsNpuee+45xcXF6d1339WKFSu0c+dOnTx5Una7XTExMerQoYMGDx6s2267TeHh4aZTBuCGrl27aufOnVqwYIE+/fRTffvttzp+/LjOnDmjyMhItW7dWj179tS9996rVq1alTvO7bffro4dO+rVV1/VihUrdPjwYQUEBKh9+/a65ZZbNG7cOEVERBTrEx0draVLl2r16tX65ptvdPjwYR0/flz+/v5q3ry5evfurbFjx6pFixbF+n311VdavXq11q9fr+TkZOdxt3nz5urevbseffRRdezYsdyPRTo7Y+D843lpis5Q95bQ0FAtWrRIy5Yt09tvv61vv/1WJ06cUHh4uK6++mr16tVLd999d4l+PXv21M8//6wZM2bos88+04EDB2Sz2XTllVfqpptu0pNPPqmoqKhSYzZt2lTbtm3TSy+9pCVLlujIkSMKDg7WDTfcoAkTJlz0uYyIiNAnn3yizz//XHPmzNE333yjY8eOybIsRUdH66qrrlLv3r112223eeU58kVXXnmlkpKStGzZMi1ZskQbNmzQ0aNHdfr0aUVERKhZs2bq1q2b7rnnHucsBwBVA7UftZ8pJ06ccP4/NDRUderUUXR0tNq3b68ePXro1ltvdena6oGBgfrHP/6hJ554Qu+++65WrVqlffv2KTU1VYGBgWrevLmuueYaDR06VEOHDlVQUFBFPiwAHqD2Kx9qv8uz9svJyVFOTo6ksyuzhIeHKyoqSldeeaViY2N16623ql27di7t61K99gBUDTbL4kr3AAAAAAAAAAAAAOANR48e1fLly/Xdd9/pu+++07Zt25STk6OePXtqzZo1Hu179erVmjFjhr799ltlZGQoJiZGI0aM0IQJEy54cmZGRob+8pe/aOHChTp48KDCwsJ07bXX6o9//KN69erlUU7ewsA1AAAAAAAAAAAAAHjJa6+9pvHjx5e439OB61mzZmns2LGyLEsNGjRQnTp1tGPHDudqS+vXr1dkZGSJfidOnFC3bt20a9cuBQUF6aqrrtJvv/2mw4cPy2az6R//+IceeeSRcuflLXbTCQAAAAAAAAAAAABAVREREaF+/fpp4sSJWrx4sSZPnuzxPpOSkjRu3DhJUkJCgpKTk7Vlyxb98ssv6tixo37++WeNGTOm1L4PPPCAdu3apY4dO+qXX37Rli1blJycrISEBFmWpccff1zbtm3zOEdPMeMaAAAAAAAAAAAAACrIP/7xD/3hD3/waMb10KFD9fHHH+u+++7TnDlzim3bs2ePWrduLYfDoe+//15XX321c9vWrVsVGxsru92uXbt2qXnz5sX63nfffZo7d66GDRumRYsWlSs3b2HGNQAAAAAAAAAAAAD4qIyMDH3xxReSpLi4uBLbW7RooT59+kiSFixYUGzbwoULJUl9+vQpMWgtSfHx8ZKkzz77TJmZmV7N210MXAMAAAAAAAAAAACAj9q6datyc3MVFBSkzp07l9qme/fukqSNGzcWu7/o5x49epTar3PnzgoKClJOTo7x5cIZuAYAAAAAAAAAAAAAH7V7925JUqNGjRQQEFBqm2bNmkmSdu3aVWrfou3nCwgIUMOGDUvte6n5G40OAAAAAAAAAAAAAAYlJCQoMTHRrT5xcXHOZbYrWmpqqiQpMjKyzDZF29LS0rzW91Jj4PoisnNyTKdwydksy3QKRlg2m+kUjDD1+zb5fBc6zP2NB59KNhY7r2aMsdgmhYYEm06hUsjIyjYW218OY7GzCs29F4U5sozEtezmPv7Z8s08ZknyyzxpLHZhRD1jsR0BIcZi//dXc7/vbw6aKbIeuCbaSFxJiqpezVjsyuaMwWNegKHDjkMGP3sbrC9zCszFDgs0t8CdzVFoLLZl9zMS1+RjtueeMRbbCjT33m/yM2VwiLnPN5WJyRqv0OBXi3kGg5t878/ON1dXhwSYedwmv8O2FeYZi235BRqLbXMUGIv975/NDaQNa13bSNwaYaFG4lZGgR1Ge2U/k25upC1btrjVJyUlxSuxXZHz/8crAwPLfh8ICgqSJGVnF/8c4EnfS42BawAAAAAAAAAAAACXrXr16ik2NtbtPpdKcPDZCVp5eWWfPJObmytJCjnvRL/g4GBlZWWVq++lxsA1AAAAAAAAAAAAgErH5qVVf+Lj4y/Zst/lUbNmTUn/W/a7NEXbitqe2zcrK6tcfS81c+uXAAAAAAAAAAAAAAAuqGXLlpKk5ORk5efnl9pm3759xdqe33fv3r2l9svPz1dycnKpfS81Bq4BAAAAAAAAAAAAVDo2u59Xbr6uQ4cOCgwMVG5urjZt2lRqm3Xr1kmSunTpUuz+6667rtj2823atEl5eXkKDg5W+/btvZd0OTBwDQAAAAAAAAAAAAA+Kjw8XAMGDJAkJSYmlti+Z88erVq1SpI0fPjwYtuKfl69enWps64TEhIkSYMGDVJYWJhX83YXA9cAAAAAAAAAAAAAKp2qNuO6W7duaty4sV577bUS2yZPniybzaa5c+cqMTFRlmVJklJSUnTnnXfK4XBo6NChateuXbF+sbGxGjx4sAoLC3XHHXcoJSVFkmRZlhITEzV37lzZ7XY9++yzFf74LsbfdAIAAAAAAAAAAAAA4C5fGnQ+16FDh9ShQwfnzzk5OZKkDRs2qHbt2s77n376aT399NPOnw8fPqyDBw/q1KlTJfbZqVMnzZw5U0888YTi4+M1bdo01a5dWzt27FBubq5atWqlt956q9R83nnnHXXt2lVJSUlq0qSJrrrqKp04cUKHDh2SzWbTa6+9ptjYWC89+vJjxjUAAAAAAAAAAAAAeElhYaFOnjzpvGVmZkqSCgoKit2flZXl1n7HjRun5cuXa9CgQcrMzNSOHTsUExOjSZMmafPmzcUGxc9Vp04dJSUladKkSYqJidGOHTuUmZmpQYMGaeXKlfrDH/7g8WP2BmZcAwDgo1avXq0ZM2bo22+/VUZGhmJiYjRixAhNmDBB1apVM50eAAAAAMAN1HgAAHifzc83Z1w3btzYuZS3Ow4cOHDRNn379lXfvn3d3nd4eLheeuklvfTSS273vVSYcQ0AgA+aNWuW+vbtq2XLlik4OFhXXnmlDhw4oGnTpqlTp05KTU01nSIAAAAAwEXUeAAAABfHwDUAAD4mKSlJ48aNkyQlJCQoOTlZW7Zs0S+//KKOHTvq559/1pgxY8wmCQAAAABwCTUeAAAVx27388oNvoGBawAAfMzUqVPlcDh07733Ki4uTjabTZJUv359zZs3T3a7XYsXL9b27dsNZwoAAAAAuBhqPAAAANcwcA0AgA/JyMjQF198IUmKi4srsb1Fixbq06ePJGnBggWXNDcAAAAAgHuo8QAAqFg2u59XbvANDFwDAOBDtm7dqtzcXAUFBalz586ltunevbskaePGjZcyNQAAAACAm6jxAACoWAxcVy0MXAMA4EN2794tSWrUqJECAgJKbdOsWTNJ0q5duy5ZXgAAAAAA91HjAQAAuM7fdAIAAOB/UlNTJUmRkZFltinalpaWdklyAgAAAACUDzUeAAAVy2Znjm5Vwm8TAAAfkpOTI0kKDAwss01QUJAkKTs7+5LkBAAAAAAoH2o8AAAA1zHjGgAAHxIcHCxJysvLK7NNbm6uJCkkJKTMNgkJCUpMTHQ57shRozT6gQddbg8AAAAAuDhqPAAAKhbXp65aGLgGAMCH1KxZU9L/lpMrTdG2oralSUlJ0ZYtW1yOO2DgQJfbAgAAAABcQ40HAADgOgauAQDwIS1btpQkJScnKz8/XwEBASXa7Nu3r1jb0tSrV0+xsbEux42KinIzUwAAAADAxVDjAQBQsZhxXbUwcA0AgA/p0KGDAgMDlZubq02bNqlr164l2qxbt06S1KVLlzL3Ex8fr/j4eJfjZmRxLTUAAAAA8DZqPAAAKhYD11WL3XQCAADgf8LDwzVgwABJKvX6ZXv27NGqVaskScOHD7+kuQEAAAAA3EONBwAA4DoGrgEA8DGTJ0+WzWbT3LlzlZiYKMuyJJ29ptmdd94ph8OhoUOHql27doYzBQAAAABcDDUeAAAVx+bn55UbfAMD1wAA+JhOnTpp5syZks4uBxcTE6PY2Fg1adJESUlJatWqld566y3DWQIAAAAAXEGNBwAA4BoGrgEA8EHjxo3T8uXLNWjQIGVmZmrHjh2KiYnRpEmTtHnzZtWuXdt0igAAAAAAF1HjAQBQMWx2P6/c4Bv8TScAAABK17dvX/Xt29d0GgAAAAAAL6DGAwAAuLBKN+N69erVGjx4sOrUqaOQkBC1bt1akydPVmZmpunUAAAAAABuosYDAAAAAJQXM66rlko1cD1r1iz17dtXy5YtU3BwsK688kodOHBA06ZNU6dOnZSammo6RQAAAACAi6jxAAAAAACesNv9vHKDb6g0A9dJSUkaN26cJCkhIUHJycnasmWLfvnlF3Xs2FE///yzxowZYzZJAAAAAIBLqPEAAAAAAMC5Ks3A9dSpU+VwOHTvvfcqLi5ONptNklS/fn3NmzdPdrtdixcv1vbt2w1nCgAAAAC4GGo8AAAAAICnWCq8aqkUA9cZGRn64osvJElxcXEltrdo0UJ9+vSRJC1YsOCS5gYAAAAAcA81HgAAAAAAOF+lGLjeunWrcnNzFRQUpM6dO5fapnv37pKkjRs3XsrUAAAAAABuosYDAAAAAHgDM66rlkoxcL17925JUqNGjRQQEFBqm2bNmkmSdu3adcnyAgAAAAC4jxoPAAAAAACcz990Aq5ITU2VJEVGRpbZpmhbWlraJckJAAAAAFA+1HgAAAAAAG9gtnTVUikGrnNyciRJgYGBZbYJCgqSJGVnZ1+SnAAAAAAA5UONBwAAAADwBgauq5ZKsVR4cHCwJCkvL6/MNrm5uZKkkJCQS5ITAAAAAKB8qPEAAAAAAMD5KsWM65o1a0r633JypSnaVtS2LAkJCUpMTHQ59qj779eDDz7ocnsAAAAAwIWZrPHuGzVKox+gxgMAAACAqoAZ11VLpRi4btmypSQpOTlZ+fn5CggIKNFm3759xdqWJSUlRVu2bHE59sBBg9zIFACAyin4VLKx2Dk1GhmLHWrwk1C2o5qRuMEqMBJXkuRwGAudX7u5sdhn8i1jsSMc+cZix0aZ+RuXpG41co3EdQRWigWtfILJGq//wIFuZAoAQOVk7hOoFGQz97k/2G6w3ik091nQz26uuLVZZv7aDJZZCnQUmgtuN/f6KrSX/Mx+qdzRpq6x2H6FZupL4HJVKQauO3TooMDAQOXm5mrTpk3q2rVriTbr1q2TJHXp0uWC+6pXr55iY2Ndjh0VFeVesgAAAACAC6LGAwAAAAB4g82PGddVSaUYuA4PD9eAAQP0ySefKDExscSXGnv27NGqVaskScOHD7/gvuLj4xUfH+9y7OycHPcTBgAAAACUyWSNdyYr2/2EAQAAAABAhas0a9lNnjxZNptNc+fOVWJioqz/vwRJSkqK7rzzTjkcDg0dOlTt2rUznCkAAAAA4GKo8QAAAAAAnrLZ/bxyg2+oNAPXnTp10syZMyWdPaM+JiZGsbGxatKkiZKSktSqVSu99dZbhrMEAAAAALiCGg8AAAAA4CkGrquWSjNwLUnjxo3T8uXLNWjQIGVmZmrHjh2KiYnRpEmTtHnzZtWuXdt0igAAAAAAF1HjAQAAAACAIpXiGtfn6tu3r/r27Ws6DQAAAACAF1DjAQAAAADKi9nSVUulmnENAAAAAAAAAAAAAKh6GLgGAMDHHD16VHPnztXjjz+uLl26KCQkRDabTb169TKdGgAAAADATdR4AABUHLvd5pUbfEOlWyocAICq7sMPP9T48eNNpwEAAAAA8AJqPAAAANcw4xoAAB8TERGhfv36aeLEiVq8eLEmT55sOiUAAAAAQDlR4wEAUHFsdptXbhVl9erVGjx4sOrUqaOQkBC1bt1akydPVmZmplv7WbNmjWw2m0u3F154oUT/i/WJiory1kP2CDOuAQDwMaNHj9bo0aOdPx85csRgNgAAAAAAT1DjAQBQcWw2313me9asWRo7dqwsy1KDBg3UsGFD7dixQ9OmTdOiRYu0fv16RUZGurSv6tWrq2vXrmVuT09P148//ihJuv7668tsd8011ygoKKjE/bVq1XIpj4rGwDUAAAAAAAAAAAAAeElSUpLGjRsnSUpISNCYMWNks9n066+/asiQIUpKStKYMWO0aNEil/bXoUMHrV+/vsztL7zwgn788Uc1bNhQffv2LbPdggUL1LhxY3ceyiXFUuEAAAAAAAAAAAAAKh273eaVm7dNnTpVDodD9957r+Li4pwzw+vXr6958+bJbrdr8eLF2r59u8exLMvS+++/L0m67777ZLdX3uHfyps5AAAAAAAAAAAAAPiQjIwMffHFF5KkuLi4EttbtGihPn36SDo7A9pTa9eu1S+//CJJGjVqlMf7M4mlwgEAAAAAAAAAAABUOrYKmC3tqa1btyo3N1dBQUHq3LlzqW26d++uFStWaOPGjR7He++99yRJ3bp1U/PmzS/YdurUqfr1119VUFCg6Oho9enTR7fffnup1702gYFrAAAAAAAAAAAAAPCC3bt3S5IaNWqkgICAUts0a9ZMkrRr1y6PYmVmZmrhwoWSXJtt/c477xT7ec6cOZoyZYoWLVqk2NhYj3LxBgauAQCoghISEpSYmOhy+wduG6K4e++owIwAAAAAAOXlbo1336hRGv3AgxWYEQAAvsFbM67dPdZKZ5cBj4+PL3F/amqqJCkyMrLMvkXb0tLS3Ip5vgULFigjI0OhoaG67bbbymx38803695771W7du3UoEEDZWRkaMWKFfrTn/6kX375Rf3799fWrVvVsGFDj/LxFAPXAABUQSkpKdqyZYvL7W/q1aUCswEAAAAAeMLdGq//wIEVmA0AAL7DbvPOwLW7x9qiPqXJycmRJAUGBpbZt2hp7uzsbLdinq9omfBbb71V4eHhZbb76KOPiv0cHBysO+64Q/369VPHjh2VnJysF154QbNnz/YoH08xcA0AQBVUr149t5Z2iapbpwKzAQAAAAB4wu0aLyqqArMBAKDqcfdYW9SnNMHBwZKkvLy8Mvvm5uZKkkJCQtyKea79+/dr7dq1klxbJrw0tWvX1sSJE/Xwww9ryZIleuutt2Tz0skA5cHANQAAVVB8fHypy9SUpeBXz66lAgAAAACoOO7WeGeyPJu9BQBAZeGtpcLdPdZeSM2aNSX9b8nw0hRtK2pbHnPmzJFlWYqJiVHv3r3LvZ/rr7/emVNqaqpq1apV7n15ym4sMgAAAAAAAAAAAABUIS1btpQkJScnKz8/v9Q2+/btK9bWXZZl6f3335ckjRw50qNZ0ucuaV5QUFDu/XgDA9cAAAAAAAAAAAAAKh2b3eaVmzd16NBBgYGBys3N1aZNm0pts27dOklSly5dyhXj66+/1v79+2Wz2TRy5Mhy5ypJP/74o6SzS5ybnG0tMXANAIDPOXTokGrXru28TZgwQZK0YcOGYvdPnz7dcKYAAAAAgIuhxgMA4PISHh6uAQMGSJISExNLbN+zZ49WrVolSRo+fHi5Yrz33nuSpO7du6tp06blS1RnZ1jPmDFDktSnTx/5+5u9yjQD1wAA+JjCwkKdPHnSecvMzJR09kPEufdnZWUZzhQAAAAAcDHUeAAAVBy73eaVm7dNnjxZNptNc+fOVWJioizLkiSlpKTozjvvlMPh0NChQ9WuXbti/Ro3bqzGjRtr4cKFZe47IyPDuf3++++/aC4TJkzQnDlzdObMmWL3Hzp0SMOHD9fGjRvl7++v5557zt2H6XVmh80BAEAJjRs3dn6QAQAAAABUbtR4AABUHJuPTtHt1KmTZs6cqSeeeELx8fGaNm2aateurR07dig3N1etWrXSW2+9VaLfwYMHJZ0dnC7LwoULlZmZqWrVqrk0Y3vnzp165ZVX9MADD6hp06aKjIxUenq6du3aJcuyFBwcrNmzZ+vaa68t/wP2EgauAQAAAAAAAAAAAMCLxo0bp7Zt22rGjBn69ttvdfz4ccXExGj48OGaOHGiwsLCyrXfomXChw8f7tI+Hn74YUVFRWnz5s06cuSIDhw4oKCgILVp00b9+vXTY489pmbNmpUrF29j4BoAAAAAAAAAAABApWOzeX+Zb2/q27ev+vbt63J7V1ZpWbNmjVs5DBgwwHnNbV/noxPoAQAAAAAAAAAAAACXC2ZcAwAAAAAAAAAAAKh07HbfnnEN9zDjGgAAAAAAAAAAAABgFDOuAQAAAAAAAAAAAFQ6NmZcVykMXAMAAAAAAAAAAACodBi4rlpYKhwAAAAAAAAAAAAAYBQzrgEAAAAAAAAAAABUOnYbM66rEmZcAwAAAAAAAAAAAACMYsY1AAAAAAAAAAAAgEqHa1xXLQxcX4S9MN9IXMt2mU6Gv0wftz0/x1hs/yM/GImb3yjWSFxJ8jf4d1ZQo4Gx2MCFOEJrGovtZ/DDpcMyFlohualG4uYFm/tdB9rNvf9aBpeNqpH7m7HYjpDqxmL7G3xtnwow83ceavA9Ba4z+ZVGgaG/kQKHw0xgmT3WhgWaO+745WYYi23f+62x2AVX9TES12HzMxJXkhzBNYzFNrkqZqHJFzd8n8HvXQr9gozFNvma9DP4mjRVa5l755esgBBjsW2FecZiy8/cs+5XmGsstsn3FeByxMA1AAAAAAAAAAAAgEqHGddVCwPXAAAAAAAAAAAAACodOwPXVcrluS4zAAAAAAAAAAAAAMBnMOMaAAAAAAAAAAAAQKVjszHjuiphxjUAAAAAAAAAAAAAwChmXAMAAAAAAAAAAACodGxM0a1S+HUCAAAAAAAAAAAAAIxi4BoAAB9iWZb++9//asKECerWrZtq1aqlgIAA1alTR/3799e///1vWZZlOk0AAAAAgAuo8QAAqFh2u80rN/gGlgoHAMCHrFq1Sv369XP+3LRpUzVp0kT79+/X8uXLtXz5cs2bN0+LFi1SUFCQwUwBAAAAABdDjQcAQMWyMehcpTDjGgAAH2JZlpo0aaLXX39dx44d0759+7R582adPHlS77//voKCgrRs2TI999xzplMFAAAAAFwENR4AAIDrGLgGAMCHdO7cWbt27dLjjz+uunXrFtt27733Or/MmD17thwOh4kUAQAAAAAuosYDAKBi2Ww2r9zgGxi4BgDAh0RERCggIKDM7YMGDZIkpaam6rfffrtUaQEAAAAAyoEaDwAAwHVc4xoAgEokOzvb+f+QkBCDmQAAAAAAPEWNBwCAZ+xc47pKYcY1AACVyLx58yRJ7dq1U0REhOFsAAAAAACeoMYDAAD4H2ZcAwBQSSQlJenNN9+UJE2YMMFwNgAAAAAAT1DjAQDgORszrqsUBq4BAKgEjh07pmHDhqmgoEC33HKL7rjjDtMpAQAAAADKiRoPAADv8GPgukphqXAAAHxcenq6Bg0apOTkZHXs2FHvvfee6ZQAAAAAAOVEjQcAAFA6ZlwDAODDMjIyNHDgQG3dulVt2rTRl19+6dJ1zxISEpSYmOhynNH33Kkx99/nSaoAAAAAgIu4VDXefaNGafQDD3qSKgAAlQIzrqsWBq4BAPBRWVlZuummm7Rx40a1aNFCK1asUK1atVzqm5KSoi1btrgc68Yb+pQ3TQAAAACACy5ljdd/4MDypgkAAGAMA9cAAPignJwcDRkyRGvXrlVMTIxWrlypqKgol/vXq1dPsbGxLrePuuKK8qQJAAAAAHDBJa/x3Ng3AACVGTOuqxYGrgEA8DH5+fm69dZbtXLlSkVHR2vVqlVq2LChW/uIj49XfHy8y+3zTh13N00AAAAAgAtM1HhnsrLdTRMAAMA4Bq4BAPAhhYWFuuuuu/TZZ58pKipKq1atUtOmTU2nBQAAAAAoB2o8AAAqFjOuqxYGrgEA8CHz58/XwoULJUnBwcEaPXp0mW1nzZqlDh06XKrUAAAAAABuosYDAKBiMXBdtTBwDQCAD8nNzXX+/8CBAzpw4ECZbdPT0y9BRgAAAACA8qLGAwAAcJ3ddAIAAOB/Ro0aJcuyXLr16tXLdLoAAAAAgAugxgMAoGL5221eucE3MHANAAAAAAAAAAAAADCq0gxcHz16VHPnztXjjz+uLl26KCQkRDabjTMRAQAAAKASosYDAAAAAHjKz27zyg2+odJc4/rDDz/U+PHjTacBAAAAAPACajwAAAAAAHCuSjNwHRERoX79+qlTp07q1KmTtm7dqqlTp5pOCwAAAABQDtR4AAAAAABPMVu6aqk0S4WPHj1ay5cv18svv6xbbrlFdevWNZ0SAAAAAKCcqPEAAAAAAFXd6tWrNXjwYNWpU0chISFq3bq1Jk+erMzMTLf3NWrUKNlstgvevvjiizL7Z2Rk6Nlnn1Xr1q0VEhKiOnXqaPDgwVqzZo0Hj9C7Ks2MawAAAAAAAAAAAAAo4mf33Tm6s2bN0tixY2VZlho0aKCGDRtqx44dmjZtmhYtWqT169crMjLS7f02bNhQjRo1KnVbzZo1S73/xIkT6tatm3bt2qWgoCBdddVV+u2337Rs2TJ99tln+sc//qFHHnnE7Vy8jYFrAAAAAAAAAAAAAJWOry4VnpSUpHHjxkmSEhISNGbMGNlsNv36668aMmSIkpKSNGbMGC1atMjtfY8ePVrPP/+8W30eeOAB7dq1Sx07dtTSpUtVv359WZalt956S/Hx8Xr88cd1/fXXq3379m7n402+exoCAAAAAAAAAAAAAFQyU6dOlcPh0L333qu4uDjZbGcH2OvXr6958+bJbrdr8eLF2r59e4XnsnXrVi1dulR2u10ffvih6tevL0my2WyKi4vTvffeq8LCQk2dOrXCc7kYBq4BAAAAAAAAAAAAVDp+dptXbt6UkZHhvNZ0XFxcie0tWrRQnz59JEkLFizwauzSLFy4UJLUp08fNW/evMT2+Ph4SdJnn31WrmtvexNLhQMAAAAAAAAAAACAF2zdulW5ubkKCgpS586dS23TvXt3rVixQhs3bnR7/6tXr9ZPP/2kkydPqkaNGurYsaPuuecexcTElNq+KEaPHj1K3d65c2cFBQUpJydH27ZtU9euXd3OyVuYcQ0AAAAAAAAAAACg0vHFGde7d++WJDVq1EgBAQGltmnWrJkkadeuXW7vf+3atVq4cKFWr16tJUuW6Nlnn1WLFi00ffr0C+ZTFPN8AQEBatiwYbnz8abLbsZ1QkKCEhMTXW5//8j7NOaB0RWYEQAAAACgvNyt8UaOGqXRDzxYgRkBAAAAACobd2tL6ewy4EXLbJ8rNTVVkhQZGVlm36JtaWlpLsdr0aKFZsyYoT59+qhx48YKCgrS9u3bNWPGDC1YsEDPPPOMwsLC9Mgjj1ySfCrCZTdwnZKSoi1btrjcftCA/hWYDQAAviHDL8xY7NycQmOxqwf5GYtt+QcbiRvy224jcSXpOzU0Frtj4XFjsQur1TIWO7LvRGOx07560Vjs8CAzr6/sfIeRuJc7d2u8AQMHVmA2AAD4Bn8vzxxzR77DMhbbz9zDNio919zn0NqBZmJbNnOLydoK843FdgSYqXUkSeZe2sqzBxqLHViQYyiywd91JeNn886bv7u1ZVGf0uTknP27CQws+283KChIkpSdne1yvD/96U8l7rv22ms1f/58Pfroo/rnP/+pP/3pT7rvvvsUFva/73srKp+KcNkNXNerV0+xsbEut4+KiqrAbAAAAAAAnqDGAwAAAIDLl7eW+Xa3tizqU5rg4LMnHuTl5ZXZNzc3V5IUEhLiVsyyvPzyy5o9e7ZOnTqlVatWaciQIcXyycrKuqT5lNdlN3AdHx9f6rT9suRmnqnAbAAAAAAAnnC3xsvIMnv2OAAAAADA97hbW15IzZo1Jf1vie7SFG0rauup6tWrq02bNtq6dav27NlTIp+srKxLmk95XXYD1wAAAAAAAAAAAAAqP2/NuPamli1bSpKSk5OVn5+vgICAEm327dtXrK03FC0FXlBQUCKfI0eOaO/evaX2y8/PV3JystfzKQ9zF4Jw06FDh1S7dm3nbcKECZKkDRs2FLt/+vTphjMFAAAAAFwMNR4AAAAAoCrq0KGDAgMDlZubq02bNpXaZt26dZKkLl26eCVmQUGBdu7cKUlq0KBBsW3XXXddsZjn27Rpk/Ly8hQcHKz27dt7JZ/yqjQD14WFhTp58qTzlpmZKensL+Lc+7OysgxnCgAAAAC4GGo8AAAAAICn/O02r9y8KTw8XAMGDJAkJSYmlti+Z88erVq1SpI0fPhwr8RMSEhQenq6/P391adPn2LbimKsXr261FnXCQkJkqRBgwYpLCzMK/mUV6UZuG7cuLEsy7ro7fnnnzedKgAAAADgIqjxAAAAAABV1eTJk2Wz2TR37lwlJibKsixJUkpKiu688045HA4NHTpU7dq1K9avcePGaty4sRYuXFjs/uXLl+uZZ54pcf3qvLw8zZo1S0888YQk6aGHHlK9evWKtYmNjdXgwYNVWFioO+64QykpKZIky7KUmJiouXPnym6369lnn/Xqc1AeXOMaAAAAAAAAAAAAQKXji9e4lqROnTpp5syZeuKJJxQfH69p06apdu3a2rFjh3Jzc9WqVSu99dZbJfodPHhQkpSRkVHs/szMTE2fPl3Tp0/XFVdc4VwOfNeuXc62t956q2bMmFFqPu+88466du2qpKQkNWnSRFdddZVOnDihQ4cOyWaz6bXXXlNsbKw3n4JyqTQzrgEAuFwsWLBAcXFxuuaaa1S/fn0FBQUpPDxcsbGxmjx5sk6ePGk6RQAAAACAi6jxAACoOH52m1duFWHcuHFavny5Bg0apMzMTO3YsUMxMTGaNGmSNm/erNq1a7u8r44dO2ry5Mnq16+fgoODtXPnTv3www+qXr26hg0bpqVLl2rhwoUKDAwstX+dOnWUlJSkSZMmKSYmRjt27FBmZqYGDRqklStX6g9/+IO3HrZHbFbR3HSUKjfzjJG4lu0yPafA4OO2bObOyrHn5xiL7X/kByNx8xsZPHPH5OvLchgL7bD5GYttUmhIsOkU3Na+fXt9//33CgoKUr169VS7dm0dP35cycnJkqS6devqq6++KrGMjCdSz5i7fmhuobmPItWDzL0u/PLNPOf+aYeMxJWk79TQWOyOoRkXb1RBCqvVMhY7su9EY7HTvnrRWGxHgJn3/ux8c8f5yPBQY7Erm4ysbNMpXHIFDnPHWoOhFRJg7nO/X665445977fGYhdc1efijSqAQ745y6aiGfwaQ4UGX9xhoSHGYpeXiRovO8fcd00mjzt+Bl8YJl+TqdmFxmLXDjTzGdjk9+e2wnxjsU3VOpJkciSp0GDwwMJcI3GDwmsYiVsZPfv5z17Zz7RBV3plP/DMZTo6CgCA73r00Uf19ddf68yZM9q/f7++++47HTx4UNu3b9fvfvc7HT9+XHfddZfpNAEAAAAALqDGAwCg4vjyjGu4j4FrAAB8zJgxY9SjRw8FBAQUu79t27Z6++23JUk7duzQzz9752xCAAAAAEDFocYDAABwjb/pBAAAgOuuvPJ/S9ZkZZlb3hsAAAAA4DlqPAAAPMNs6aqFGdcAAFQi69evlySFhYWpVatWhrMBAAAAAHiCGg8AAOB/mHENAICPczgcOnr0qL766is988wzkqS//OUvCgsLM5wZAAAAAMBd1HgAAHgPM66rFgauAQDwUa+99prGjx9f7L7OnTtrzpw5GjhwoKGsAAAAAADlQY0HAID3MXBdtbBUOAAAPio6Olpdu3bVtddeq3r16slms2nbtm16//33derUKdPpAQAAAADcQI0HAABwYcy4BgDAR40YMUIjRoxw/rx9+3Y99thjmjdvnn7++Wdt3rxZfn5+pfZNSEhQYmKiy7Huvm+URo1+wOOcAQAAAAClu5Q13qj779eDDz7occ4AAPg6ZlxXLQxcAwBQSVx99dVatmyZmjZtqm3btunDDz/U3XffXWrblJQUbdmyxeV99+vPsnQAAAAAcClVZI03cNAgb6UJAABwyTBwDQBAJRIeHq6ePXtq0aJFSkpKKvNLjXr16ik2Ntbl/V4RFeWtFAEAAAAALqqoGi+KGg8AcJlgxnXVwsA1AACVTEFBQbF/SxMfH6/4+HiX95l6JsvjvAAAAAAA7quIGi87J8fjvAAAAC41Bq4BAKhEUlNTtWbNGklShw4dzCYDAAAAAPAINR4AAJ5hxnXVYjedAAAA+J+vv/5a06ZN04EDB0ps27JliwYMGKD09HRFR0drxIgRlz5BAAAAAIDLqPEAAKhYfnabV27wDcy4BgDAh6SlpWny5MmaPHmyoqKiFB0dLT8/Px06dEgpKSmSpOjoaH366acKCwsznC0AAAAA4EKo8QAAAFzHwDUAAD7k+uuv18yZM7VmzRr99NNP2r17t3JyclSzZk317t1bv//97/Xggw8qPDzcdKoAAAAAgIugxgMAoGL52ZgtXZUwcA0AgA+pW7euxo8fr/Hjx5tOBQAAAADgIWo8AAAA1zFwDQAAAAAAAAAAAKDSsTPjukqxm04AAAAAAAAAAAAAAHB5Y8Y1AAAAAAAAAAAAgErHjwnXVQoD1wAAAAAAAAAAAAAqHbudkeuqhKXCAQAAAAAAAAAAAABGMeMaAAAAAAAAAAAAQKXjZ2PGdVXCjGsAAAAAAAAAAAAAgFHMuAYAAAAAAAAAAABQ6diZcV2lMOMaAAAAAAAAAAAAAGAUM64BAAAAAAAAAAAAVDp+TLiuUhi4BgAAAAAAAAAAAFDp2O2MXFclDFxfhGU38xTZ8rONxJUk2f2MhQ44vsdY7Jx6vzMWu9A/2FhsR+NORuLaLMtIXEmS5TAWOt/gFRoCZO45LzD464ZrwgpOG4sdnmfwmJdv7phnP3nQTODgcDNxJTWuGWQs9rrfCo3Fvr6asdA69dkkY7EzFGgsdvipI0bi1sj4zUhcSVL4deZiw2X+MvM51O5n7ngXdHyXsdhZdVoai20LCDEWu7BNX3OxHWY++AcUZBmJK0mWv7nPN5bN4GdZriXp80x+7RKUm24sthVo7sO3wy/AWOzagea+6yq0m3ncJ7ILjMSVpLAAc7VOtfwcY7FNfodt8qhz2jLz+65jJCpgHgPXAAAAAAAAAAAAACodTqirWsxN/QMAAAAAAAAAAAAAQMy4BgAAAAAAAAAAAFAJ+THhukphxjUAAAAAAAAAAAAAwChmXAMAAAAAAAAAAACodLjGddXCwDUAAAAAAAAAAACASsfPzsB1VcJS4QAAAAAAAAAAAAAAoxi4BgDAx3322Wey2Wyy2Wxq3Lix6XQAAAAAAB6izgMAwDvsNptXbvANDFwDAODDMjIy9PDDD5tOAwAAAADgJdR5AAAApWPgGgAAHzZp0iQlJyfr5ptvNp0KAAAAAMALqPMAAPAeP5t3bvANDFwDAOCjNm7cqP/7v//TzTffrKFDh5pOBwAAAADgIeo8AAAuL6tXr9bgwYNVp04dhYSEqHXr1po8ebIyMzPd2k9hYaGWL1+ucePGqXPnzqpRo4YCAwNVr1493XzzzVq2bFmZfQ8cOOC8RElZt+uuu87Th+oV/qYTAAAAJeXn52vMmDEKDQ3VP/7xD61YscJ0SgAAAAAAD1DnAQDgfb58fepZs2Zp7NixsixLDRo0UMOGDbVjxw5NmzZNixYt0vr16xUZGenSvt577z09+OCDkiS73a7mzZsrLCxMe/fu1dKlS7V06VLFxcXpzTfflO0Cz0nXrl1Lvb9NmzbuP8AKwMA1AAA+6M9//rN+/PFH/e1vf1ODBg1MpwMAAAAA8BB1HgAA3udn982B66SkJI0bN06SlJCQoDFjxshms+nXX3/VkCFDlJSUpDFjxmjRokUu7c+yLF199dV6/PHHNXz4cFWvXl2SVFBQoNdee01PP/20EhMT1b59ez388MNl7mf9+vUeP7aKxFLhAAD4mJ9//lkvv/yyYmNj9Yc//MF0OgAAAAAAD1HnAQBweZk6daocDofuvfdexcXFOWdB169fX/PmzZPdbtfixYu1fft2l/Y3bNgwbdu2TQ888IBz0FqS/P399cc//tE5GzshIcH7D+YSYuAaAAAfYlmWxowZo/z8fCUkJMjPz890SgAAAAAAD1DnAQBQcew279y8KSMjQ1988YUkKS4ursT2Fi1aqE+fPpKkBQsWuLTPyMjICy4BPmjQIEnSrl273E3Xp7BUOAAAPuSNN97Qhg0b9Pjjj+uaa64xnQ4AAAAAwEPUeQAAXF62bt2q3NxcBQUFqXPnzqW26d69u1asWKGNGzd6JWZ2drYkKTQ09ILtHn/8ce3cuVM2m02NGzfWgAEDNHToUNntvjHXmYFrAAB8xJEjRzRx4kRFR0dr2rRpptMBAAAAAHiIOg8AgIrld4FZyKbs3r1bktSoUSMFBASU2qZZs2aSvDdDet68eZLODohfyKxZs4r9XHRd7MWLF6tJkyZeycUTDFwDAOAj/vCHP+j06dN69913FR4e7tG+EhISlJiY6HL70ffcrjGj7vMoJgAAAACgOG/Vee7WeKNG3a8H/v+1LgEAwMW5e6yVzi4DHh8fX+L+1NRUSWeX9y5L0ba0tDS3Ypbm448/1qeffiqbzaann366xHZ/f3/dc889uuOOO9SmTRvVr19fJ06c0LJly/Tss89q27Zt6t+/v5KSkhQREeFxPp5g4BoAAB+xZcsWSdIjjzyiRx55pNi2oqVeDh06pKioKEnS4sWLdf3115e6r5SUFOf+XHFjv97lSRkAAAAAcAHeqvPcrfEGDhxU3pQBAKhU7F6ace3usbaoT2lycnIkSYGBgWX2DQoKkvS/zwPltXPnTo0cOVKSNG7cuFI/RzRo0EBz584tdl/9+vU1ZswY9e7dWx07dtTevXv197//Xc8++6xH+XiKgWsAAHzMsWPHytzmcDic2/Py8spsV69ePcXGxrocMyqqrusJAgAAAADc4mmd536NF+VeggAAVFJ+Xro0s7vH2qI+pQkODpZ04e9vc3NzJUkhISFuxTzXoUOHNGDAAKWnp+vGG2/UK6+84vY+mjdvrocfflivvPKKFi9ezMA1AAA468CBA2Vue++993T//fcrJibmgu2KxMfHl7pMTVny0o663BYAAAAA4Bpv1Xnu1nhZ2TkutwUAAO4fay+kZs2akv63ZHhpirYVtXXX0aNH1bdvXyUnJ6tXr15atGhRmdfTvpiiWdp79uwpV39vYuAaAAAAAAAAAAAAQKXjraXCvally5aSpOTkZOXn55c6oLxv375ibd1x/Phx9enTR3v27FGXLl30ySefOGd5l0fRkuYFBQXl3oe3eGkCPQAAAAAAAAAAAABc3jp06KDAwEDl5uZq06ZNpbZZt26dJKlLly5u7Ts1NVU33HCDfv75Z8XGxurzzz9XWFiYR/n++OOPks5eC9s0Bq4BAAAAAAAAAAAAVDp+NptXbt4UHh6uAQMGSJISExNLbN+zZ49WrVolSRo+fLjL+z19+rT69++v7du363e/+52++uorVa9e3aNcMzIy9M9//lOS1L9/f4/25Q0MXAMAUAmMGjVKlmW5dH1rAAAAAIDvo84DAKDqmjx5smw2m+bOnavExERZliVJSklJ0Z133imHw6GhQ4eqXbt2xfo1btxYjRs31sKFC4vdn5WVpZtuuklJSUlq3bq1Vq5cqVq1armUS1xcnBYvXqzc3Nxi9+/cuVMDBw7U/v37FRYWpqeeesqDR+wdXOMaAAAAAAAAAAAAQKXji9e4lqROnTpp5syZeuKJJxQfH69p06apdu3a2rFjh3Jzc9WqVSu99dZbJfodPHhQ0tmZ0Od6/fXXtX79eufPw4YNKzP2woULFRUV5fx506ZNeuuttxQQEKDmzZsrIiJCJ06ccF5nu2bNmpo/f74aN27syUP2ikoxcG1Zlr755hstXbpU69ev188//6zTp0+rRo0a6tChg0aOHKm77rpLNh/94wQAAAAA/A81HgAAAADAG/x8eG3pcePGqW3btpoxY4a+/fZbHT9+XDExMRo+fLgmTpzo1rWpz50tvXPnzgu2zcnJKfbzxIkT9cUXXygpKUlHjx7Vnj17FBoaqo4dO2rQoEF69NFHiw10m2Sziuam+7CVK1eqX79+zp+bNm2qmjVrav/+/UpNTZUk3XTTTVq0aJGCgoK8GjsnO9ur+3OVLd9MXEmS3c9Y6IDje4zFzqn3O2OxTTL1XaDN5FuP5TAWOt/gFRoCDH7vW2Dw1x0WGmIueCWSl3bUWGxb3uV5zLOfPGgmcHC4mbiSfqvZwljsHb9lGYt9ff1QY7HtuWeMxc7wjzAWOzwzxUhcv4zfjMSVJHvz64zFLg+TNV5Glrnjjr/MfA512Mwd74KO7zIWO6tOS2OxAwz9riXJMvj5ptBh5oN/QIG517Xl7933KLdiG/xdmyzpQ0OCzQWvRLKycy7eqIL455wyFtsKrGYstsMvwFhse2G+sdiFdjOP+0R2gZG4khQWYO77vWrKMxa70N/c+6/D4IEn29AXm3UizH2XUNlsO3LKK/tpH13DK/uBZ3z4PIT/sSxLTZo00euvv65jx45p37592rx5s06ePKn3339fQUFBWrZsmZ577jnTqQIAAAAALoIaDwAAAADgDXabzSs3+IZKMXDduXNn7dq1S48//rjq1q1bbNu9997r/DJj9uzZcjjMndUMAAAAALg4ajwAAAAAAHC+SjFwHRERoYCAspccGTRokCQpNTVVv/1mbnk+AAAAAMDFUeMBAAAAALzBZvPODb6hUgxcX0z2OdehDgnhWqYAAAAAUJlR4wEAAAAAcPnxN52AN8ybN0+S1K5dO0VERBjOBgAAAADgCWo8AAAAAIAr7GK6dFVS6Qeuk5KS9Oabb0qSJkyYYDgbAAAAAIAnqPEAAAAAAK5ime+qpVIvFX7s2DENGzZMBQUFuuWWW3THHXeYTgkAAAAAUE7UeAAAAAAAXL4q7Yzr9PR0DRo0SMnJyerYsaPee+89l/olJCQoMTHR5Tj3jxqlBx98sJxZAgAAAABccalqvJGjRmn0A9R4AAAAAFAV2JlxXaVUyoHrjIwMDRw4UFu3blWbNm305Zdfunzds5SUFG3ZssXlWIMGDixvmgAAVBpWYDVjsdP9zF27NNjf3CfbID8zH8PivjhqJK4k3X9ttrHYXQ98aiy2Vf82Y7EfW27u9z1tQLix2PacM0biWgX5RuJWBZeyxhtAjQcAuAzkFTqMxc6wm/sc6CgwFlo1HTnGYm88bu6Bd6llps6LcuQZiStJuX61jMXed9rcIrq1QwqNxQ4P8jMWO8Ju7m8NuBxVuoHrrKws3XTTTdq4caNatGihFStWqFYt1w8U9erVU2xsrMvto6KiypMmAAAAAMAF1HgAAAAAgPLiGtdVS6UauM7JydGQIUO0du1axcTEaOXKlW5/6RAfH6/4+HjXY2abmxkEAAAAAFWZiRovI4saDwAAAAAAX1RpBq7z8/N16623auXKlYqOjtaqVavUsGFD02kBAAAAAMqBGg8AAAAA4Cm7mHJdlVSKgevCwkLddddd+uyzzxQVFaVVq1apadOmptMCAAAAAJQDNR4AAAAAwBtYKrxqqRQD1/Pnz9fChQslScHBwRo9enSZbWfNmqUOHTpcqtQAAAAAAG6ixgMAAAAAAOerFAPXubm5zv8fOHBABw4cKLNtenr6JcgIAAAAAFBe1HgAAAAAAG+wM+O6SrF7uoPRo0dr9OjR2r9/vzfyKdWoUaNkWZZLt169elVYHgAAAAAAz1HjAQAAAACA83k84/r999+Xv7+/3n77bW/kAwAAAAAAAAAAAAAXxYTrqsXjGdd169ZVaGiobFz9HAAAr3j++edls9kueHvzzTdNpwkAAAAAcAE1HgAAgGs8nnHduXNnffLJJzpy5Iiio6O9kRMAANDZk8NatGhR6rZ69epd4mwAAAAAAJ6gxgMAwPvsTKytUjweuB47dqw++eQTTZkyRbNnz/ZGTgAAQNKgQYP03nvvmU4DAAAAAOAF1HgAAHgf49ZVi8dLhffu3Vt/+9vfNGfOHN12223asmWLN/ICAAAAAAAAAAAAAFwmPJ5x3bRpU0lSQECAFi1apEWLFikkJES1atWSn59fqX1sNpv27dvnaWgAAAAAAAAAAAAAlymPZ+jCp3g8cH3gwIES92VlZSkrK6vMPjbm7QMAcFHff/+97rrrLh09elTh4eG6+uqrdccdd6hNmzamUwMAAAAAuIkaDwAA4MI8Hrh+9913vZEHAAA4z7Zt27Rt2zbnz0uXLtVLL72ksWPH6tVXXy1zZRMAAAAAgO+hxgMAwPuYLFu1eDxwPXLkSG/kAQAA/r/69evrxRdf1IABA9S0aVOFh4dr9+7d+uc//6k333xTr732mgICAjR9+nTTqQIAAAAALoIaDwAAwDUeD1wDAADviouLK3Ff27Zt9cYbb6hJkyZ65pln9Le//U2PPPKIGjdufOkTBAAAAAC4jBoPAICKY2fCdZXCwDUAAJXIk08+qddff12//vqrli5dqscff7zUdgkJCUpMTHR5v/ePvE9jHhjtrTQBAAAAAC6oqBrvnpGjdP/oB7yVJgAAPouVwqsWrw1cHz58WDNnztSXX36pgwcPKicnRwUFBc7taWlpeuONN2Sz2fTUU0/J358xcwAA3OXn56drr71WS5Ys0Z49e8psl5KSoi1btri830ED+nsjPQAAAACAGyqqxrthwEBvpAcAAHBJeWX0ePny5brtttt0+vRpWZYlqeTF0GvWrKmPPvpISUlJatOmjYYMGeKN0AAAXHYCAwMlqdgJYuerV6+eYmNjXd5nVFSUx3kBAAAAANxXETXeFdR4AIDLhN10AvAqjweuDx06pOHDh+vMmTMaMmSI7rvvPo0ZM0anTp0q0Xb06NHavHmzli1bxsA1AADl9OOPP0qSGjRoUGab+Ph4xcfHu7zP3MwzHucFAAAAAHBfRdR4pzKyPM4LAADgUvP4RIQZM2bozJkzuu222/TRRx9p2LBhzrMEzzdgwABJ0nfffedpWAAALkvLli3TTz/9JEnq35/lvQEAAACgMqPGAwDAMzabzSs3+AaPB66//PJL2Ww2TZ069aJtmzRpoqCgIO3fv9/TsAAAVEk//fST4uPj9f333xe73+FwaN68ebrrrrskSYMHD1anTp1MpAgAAAAAcBE1HgAAgOs8Xio8OTlZISEhatGihUvtw8LClJ6e7mlYAACqpPz8fCUmJioxMVGRkZGKiYmRv7+/9u7dq7S0NElS9+7dNXfuXMOZAgAAAAAuhhoPAICKZWeydJXi8cC13W5XYWGhS20LCgp0+vRpRUREeBoWAIAqqXHjxpo2bZq++eYb/fzzz9q7d69ycnIUGRmpQYMG6a677tKdd94pPz8/06kCAAAAAC6CGg8AgIrFuHXV4vHAdUxMjH7++WclJyerUaNGF2y7du1a5efnuzw7GwCAy02NGjX0pz/9yXQaAAAAAAAvoMYDAABwncfXuO7Xr58k6c0337xgu/z8fP3pT3+SzWbToEGDPA0LAAAAAAAAAAAA4DJmt3nnBt/g8cD1+PHjFRgYqBkzZujtt98utc2WLVvUr18/ffvttwoPD9cjjzziaVgAAAAAAAAAAAAAQBXh8cB1TEyMZs+ercLCQsXFxemKK65QWlqaJOn6669XdHS0OnXqpHXr1snf31/vv/++ateu7XHiAAAAAAAAAAAAAC5fNpvNKzf4Bo8HriXp7rvv1ueff65mzZrpt99+U15enizL0saNG5WSkiLLstS8eXN98cUXGjJkiDdCAgAAAAAAAAAAAIDPWr16tQYPHqw6deooJCRErVu31uTJk5WZmVnufS5atEi9e/dWzZo1Va1aNbVv316vvvqq8vPzL9jv+PHjGjt2rJo2barg4GBFRUXp9ttv17Zt28qdi7f5e2tHN9xwg3bt2qW1a9dqw4YN+vXXX1VYWKioqCh17dpVvXv3lp+fn7fCAQAAAAAAAAAAALiM+fL1qWfNmqWxY8fKsiw1aNBADRs21I4dOzRt2jQtWrRI69evV2RkpFv7/OMf/6gZM2ZIkpo1a6Zq1arpxx9/1FNPPaVPPvlEX331lYKCgkr027t3r7p166Zjx46pWrVqatOmjQ4fPqz58+fro48+0oIFC3xi8rHXBq6ls9Pxe/bsqZ49e3pztwAAAAAAAAAAAABQjK+OWyclJWncuHGSpISEBI0ZM0Y2m02//vqrhgwZoqSkJI0ZM0aLFi1yeZ9LlizRjBkzFBQUpPnz5zsHmnfu3Kkbb7xRa9eu1aRJk5wD20Usy9KIESN07NgxDRw4UB9++KGqV6+ugoICvfjii5o6daruvvtu7d69W/Xq1fPac1AeHi8VfuDAAS+kAQAAAAAAAAAAAACV39SpU+VwOHTvvfcqLi7OeR3t+vXra968ebLb7Vq8eLG2b9/u8j5feOEFSdIzzzxTbHZ069atNXv2bEnS//3f/+m3334r1u/jjz/Wtm3bVL16dX3wwQeqXr26JMnf318vvviievTooYyMDL366qsePWZv8Hjgunnz5ho0aJA++ugjFRYWeiMnAAAAAAAAAAAAALggu83mlZs3ZWRk6IsvvpAkxcXFldjeokUL9enTR5K0YMECl/a5Z88eff/992Xus0+fPmrevLlyc3O1dOnSYtuKYowYMUI1a9Ys0bdof/Pnz3cpl4rk8cC1w+HQV199pVtvvVUNGzbU5MmTdfDgQW/kBgAAAAAAAAAAAACVxtatW5Wbm6ugoCB17ty51Dbdu3eXJG3cuNGlfRa1a9q0qaKjo93aZ9HPPXr0uGC/w4cP68iRIy7lU1E8HrhesWKFRowYoYCAAB09elQvv/yymjVrphtvvJFZ2AAAAAAAAAAAAAAqhM3mnZs37d69W5LUqFEjBQQElNqmWbNmkqRdu3a5tc+ifq7uMy8vz3nZ57L6NmzYUIGBgW7lU1H8Pd1Bnz591KdPH508eVLvvfee3n77be3cuVNffPGFvvzyS0VFRWn06NF68MEHFRMT442cAQAAAAAAAAAAAMArEhISlJiY6FafuLg4xcfHl7g/NTVVkhQZGVlm36JtaWlpLsUq7z7T09PlcDgu2Ndms6lGjRo6fvy4y/lUFI8HrovUqlVLTz75pJ588kmtX79eCQkJWrRokVJSUvTyyy/rz3/+s2644QbFx8fr97//vfz8/LwVumJZDiNhCwNCjcSVvH9miTsyo9oYix0gy1jsAnOhZRmK7TD4mAMNvv/4GXzcOYVm3s8kyc9u8I0FLnH4lX7m36Xw11X7jMWe2qv0ZXUuCYeZ1+TEfs2NxJWkJkF5xmJbdW8xFvtwprn33xf6tzAWO9/cw1ZeVGsjce15WUbiSlIlqa58QoBVYCx2vs1rJbhbAgpzjcSVpOw6LY3F9jf4GTSnwFxsf4O1baGh0BlWkJnAksI8X8yw3PIMHmyp8Xyfyd9RdUeOsdjpBt8PbA5znzFi61UzFjvd0Beb1aqZe/81+T1y8xBzdbUj0NzfWYHBL5NzZeY7M3PvZpWPzUuDHCkpKdqyZYvbfUqTk3P2WFg0i7k0QUFnf8vZ2dkuxSrvPov6eTufilIhVXO3bt3UrVs3zZo1S3PnztXs2bP1ww8/6KuvvtJXX33lnIU9ZswYNWrUqCJSAAAAAAAAAAAAAFCVeWkCar169RQbG+t2n9IEBwdLOrtMd1lyc8+eYBwSEuJSrPLus6ift/OpKBV6uneNGjX0hz/8QTfccIMeeughrV27VpKcs7D/8pe/aMSIEfrzn//MMuIAAAAAAAAAAAAALrn4+PhSl/0uj5o1a0r63/LepSnaVtS2ovZZvXp12e12ORyOMvtalqVTp065lU9FqbD1NPLy8vSvf/1LPXv2VJs2bbRu3TpJUkxMjMaPH682bdqosLBQ//nPf9S+fXt9//33FZUKAAAAAAAAAAAAgCrGZjm8cvOmli3PXjIpOTlZ+fn5pbbZt29fsbau7nPv3r1ltiltn4GBgc7Jw2X1PXTokHM2tqv5VBSvD1z/9NNPGjdunOrXr6+RI0dq3bp1stlsuvHGG/XJJ5/ol19+0YwZM7R9+3atWrVKbdu2VXp6up555hlvpwIAAAAAAAAAAAAAl0yHDh0UGBio3Nxcbdq0qdQ2RRN+u3Tp4tI+r7vuOknS/v37deTIEbf2WdS3aHtZ/Ro0aKAGDRq4lE9F8crAdU5OjubMmaOuXbvq6quv1qxZs5Samqq6detq0qRJ+uWXX/TJJ5/opptuks1mc/br1auXvvzyS/n7+5f5iwMAAAAAAAAAAACAEiyHd25eFB4ergEDBkiSEhMTS2zfs2ePVq1aJUkaPny4S/ts2bKl2rZtW+Y+V61apb179yowMFBDhgwptq0oxoIFC5SWllaib9H+RowY4VIuFcnjgevHHntM9erV0+jRo/XNN9/Isiz16tVL//nPf3To0CFNmzZNjRo1KrP/FVdcoaioKKWnp3uaCgAAAAAAAAAAAAAYNXnyZNlsNs2dO1eJiYmyLEuSlJKSojvvvFMOh0NDhw5Vu3btivVr3LixGjdurIULF5bY55QpUyRJr7zyij755BPn/bt27dKDDz4oSXrkkUdUp06dYv2GDh2qq6++Wunp6br77rudY7KFhYV67rnntHbtWoWGhuqPf/yj956AcvL3dAf//Oc/JZ29WPfIkSP10EMPub3++fXXX69jx455mgoAAAAAAAAAAACAy8X/HxD2NZ06ddLMmTP1xBNPKD4+XtOmTVPt2rW1Y8cO5ebmqlWrVnrrrbdK9Dt48KAkKSMjo8S2W2+9VePGjdNrr72mIUOGqFmzZgoLC9OPP/6owsJCdevWTX/+859L9LPb7VqwYIG6d++uzz//XNHR0WrdurUOHTqk48ePKyAgQP/6179Uv3597z8RbvJ4xvW1116rd999V0eOHNHMmTPLddHuDz/8UKtXr/Y0FQAAqpzPPvtMw4YNU/369RUUFKSoqCh17dpVzz77rAoKCkynBwAAAABwAzUeAABe5oNLhRcZN26cli9frkGDBikzM1M7duxQTEyMJk2apM2bN6t27dpu7/Nvf/ub5s+fr549e+rEiRPavXu3rrrqKr3yyitatWqVgoODS+3XsmVLbd++XY899pjq1KmjH374QdLZZcS//fZb3XLLLR49Vm+xWZaPnorgI3KyMo3Eddj8jMSVpHMuQ37JFTjM/TkGmHzcBl+FpkIb/FUr0M/cL9vkO25eYcUcfF3hZzf3nIeHhhiL7YmCggLdf//9+te//iVJatiwoaKionTy5EkdPnxYeXl5OnPmjMLCwrwSLzsnxyv7KY8py/cZiz21V7Sx2LbckmdNXgr7HRFG4kpSk6A8Y7Etu8cLDZXboRxzn+uqBXh8nmq5mTzWR4aYec7teVlG4kpSUESksdiVTW7mGWOx821m3osCHObef/PtgcZim/wMmltg7rO3v8HHXWjovT/b4PMdZvBYm2fqCZfZ11f1atR4rjiTle2V/ZRHYIG52OlWkLHYNWTucecGVDMWO8fQF5smax27sW9UJXuemfEKSXIEmvs7M/n9uakxixphoUbiVka5p1O9sh/qat9g7hs8AABQpocfflj/+te/1KlTJyUkJKhDhw7ObVlZWVqxYoWCgswV5AAAAAAA11HjAQBQMWwVNFsaZjBwDQCAj1m9erVmz56txo0ba+XKlQoPDy+2PTQ0VEOGDDGUHQAAAADAHdR4AAAArvHaehrff/+94uLidNVVVykiIkJ+fn5l3vz9GS8HAKAsM2bMkCQ9+eSTJb7QAAAAAABULtR4AABUIB++xjXc55UR5H/84x964oknVFhYKC6ZDQBA+eXk5Oirr76SJPXr1087duxQYmKiduzYoaCgIHXo0EEPPPCAYmJiDGcKAAAAALgYajwAAADXeTxw/e2332rs2LGSpEceeUQ33XSTbrzxRkVGRmr+/Pk6evSoVqxYoQ8++EARERH6+9//rnr16nmcOAAAVdH333+v/Px8SdK6dev02GOPKS8vz7n9008/1fTp0/Xuu+/qzjvvNJUmAAAAAMAF1HgAAFQwZktXKR4vFf73v/9dlmVp7NixmjVrlgYOHChJCgwMVJ8+fXTXXXfpnXfe0caNG2Wz2TR58mTFxsZ6nDgAAFVRSkqK8/+PPvqoOnTooE2bNik3N1d79uzRbbfdptzcXI0cOVJbt241mCkAAAAA4GKo8QAAqGAsFV6leDxwvWHDBtlsNues6yLnLxnevn17zZo1S/v27dNf//pXT8MCAFAlZWRkOP8fGhqqzz//XJ06dVJgYKCaN2+uefPmqX379srPz9dLL71kMFMAAAAAwMVQ4wEAALjO46XCjx07pqCgoGLXYbHb7crJySnR9pZbblFAQIAWL16sF1980dPQAABUOcHBwc7/jxo1SjVr1iy23W63a/z48Ro5cqS++uorORwO2e0lz0NLSEhQYmKiy3FH3X+/HnzwwfInDgAAAAAowVSNd9+oURr9ADUeAOAy4GC2dFXi8cB1aGiobDZbsfvCw8N1+vRp5ebmKigoyHl/QECAQkNDdfDgQU/DAgBQJZ37JcaVV15Zapui+8+cOaOTJ0+qTp06JdqkpKRoy5YtLscdOGiQm5kCAAAAAC7GVI3X//9fzhEAAKAy8XjgOjo6Wjt37lRBQYH8/c/urlmzZtq6dau+++47devWzdn2119/VXp6ukJDQz0NCwBAldS6dWvn/wMDA0ttc+4Z+44yziisV6+eYmNjXY4bFRXlclsAAAAAgGuo8QAAqFg2rk9dpXg8cH3llVfqp59+0g8//KAOHTpIknr16qUtW7boxRdf1NKlSxUcHKy8vDw9/vjjkqS2bdt6GhYAgCopOjpaMTExOnjwoH755ZdS2+zbt0/S2S83atWqVWqb+Ph4xcfHuxw3u5RLfAAAAAAAPGOqxjuTle1+sgAAAIaVvGCKm/r37y/LsvTJJ58473v00UcVFBSklStXqkGDBuratauio6O1ZMkS2Ww2PfbYY56GBQCgyrr99tslSf/+979VUFBQYvs777wjSerZs6dztRMAAAAAgG+ixgMAoAJZDu/c4BM8Hri+9dZbNWXKFNWvX995X5MmTfTBBx8oPDxcqamp+uabb3Ty5EnZbDY9/fTTuvvuuz0NCwBAlfXHP/5R1atX1/79+/XYY48p5//PhrYsS3//+9/1ySefyGazacKECYYzBQAAAABcDDUeAAAVyLK8c4NPsFlWxf02UlNT9dlnn+nQoUOqXr26+vfvr+bNm1dUuAqRk5VpJK7D5mckriTZbMZCq8Bh7s0hwOTjNvieaCq0wV+1Av3M/bJNHv/yCs2dNeZnN/ech4eGGIvtiRUrVmjIkCHKzs5W9erV1bJlSx0+fFgpKSmy2WyaPn26/vjHP3otnsmlwqcs32cs9tRe0cZi23IzjMTd74gwEleSmgTlGYtt2c3NXDmUY+5zXbUAj89TLTeTx/rIEDPPuT0vy0hcSQqKiDQWu7LJzTxjLHa+zcx7UYDD3Ptvvr3067leCiY/g+YWmPvs7W/wcRcaeu/PNvh8hxk81uaZesJl9vVVvRo1nitMLhUeWGAudroVZCx2DZl73LkB1YzFzjH0xabJWsdu7BtVyZ5nZrxCkhyB5v7OTH5/bmrMokZYqJG4lVH+sf1e2U/AFU28sh94pkKr5sjISN1zzz3On9PT0xUbGyubzaakpKSKDA0AQKXWr18/ff/993r55Ze1YsUKbdu2TdWrV9eQIUP0xBNPqGfPnqZTBAAAAAC4iBoPAIAKwjLfVcolPd27oKBA27Ztk83klF4AACqJFi1a6N133zWdBgAAAADAC6jxAAAALszcmokAAAAAAAAAAAAAUE42ZlxXKeYuBOGmBQsWKC4uTtdcc43q16+voKAghYeHKzY2VpMnT9bJkydNpwgAAAAAcAN1HgAAAAAAKFJpZly/9NJL+v777xUUFKR69erp6quv1vHjx7V161Zt3bpViYmJ+uqrr9SuXTvTqQIAAAAAXECdBwAAAADwCDOuq5RKM+P60Ucf1ddff60zZ85o//79+u6773Tw4EFt375dv/vd73T8+HHdddddptMEAAAAALiIOg8AAAAAABSpNAPXY8aMUY8ePRQQEFDs/rZt2+rtt9+WJO3YsUM///yzifQAAAAAAG6izgMAAAAAeMRyeOcGn1Bplgq/kCuvvNL5/6ysLIOZAAAAAAC8gToPAAAAAHBRDDpXKZVmxvWFrF+/XpIUFhamVq1aGc4GAAAAAOAp6jwAAAAAAC4vbs+49vPzq4g83OZwOHT06FF99dVXeuaZZyRJf/nLXxQWFmY4MwAAAABAeVDnAQAAAADcYWPGdZXi9sC1ZVkVkYfLXnvtNY0fP77YfZ07d9acOXM0cOBAQ1kBAAAAAMqLOg8AAAAAALg9cD1lypSKyMNl0dHR6tq1qwoKCpScnKyjR49q27Ztev/993XdddepRo0aRvMDAAAAALiHOg8AAAAAUC4OZlxXJTbL9BRqD23fvl2PPfaY1q1bp/bt22vz5s0XXM48ISFBiYmJLu///lEj9eADD3gjVbc4bOaWZLfZjIVWgcPcn2OAycdt8FVoKrTBX7UC/cz9sk2+4+YVmjuA+9nNPefhoSHGYlcm2Tk5xmJPWb7PWOypvaKNxbblZhiJu98RYSSuJDUJyjMW27K7fb6m1xzKMfe5rlqA3Vhsk8f6yBAzz7k9L8tIXEkKiog0FrsiuFPnuV3jjbxPYx4Y7a1U3ZJvM/NeFOAw9/6bbw80FtvkZ9DcAnOfvf0NPu5CQ+/92Qaf7zCDx9o8U0+4zL6+qlejxnPFmaxsY7EDC8zFTreCjMWuIXOPOzegmrHYOYa+2DRZ69iNfaMq2fMyjcV2BJr7OzP5/bmpMYsaYaFG4lZGhQe/98p+/GLaeWU/8Iy5b/C85Oqrr9ayZcvUtGlTbdu2TR9++KHuvvvuMtunpKRoy5YtLu9/0MAB3kgTAACfFnjgO2Oxp/a+2ljsPINfpmf4BxiJe4W/uS8ZM+e+bCx2wEhzqwZlF5gbMKoTau7jfnpuobHYNkNnih3NN/eeEmMscsVwp85zu8Yb0N9baQIA4LNMDh7n+5s7ucDP4Akd2XZzg3p5Bkf1QgzVmCYnaIRY5mq8bH9zf2fZeeae8wCTk2IKThuKzMC1yyr3/Fycp9IPXEtSeHi4evbsqUWLFikpKemCA9f16tVTbGysy/uOioryRooAAAAAADe4WudR4wEAAADAZcxiqfCqpEoMXEtSQUFBsX/LEh8fr/j4eJf3m5NlbukNAAAAALicuVLnuVvj5Wae8TgvAAAAAADgfVVi4Do1NVVr1qyRJHXo0MFsMgAAAAAAj1HnAQAAAAAuxsaM6yrFbjoBV3z99deaNm2aDhw4UGLbli1bNGDAAKWnpys6OlojRoy49AkCAAAAANxCnQcAAAAAAM5VKWZcp6WlafLkyZo8ebKioqIUHR0tPz8/HTp0SCkpKZKk6OhoffrppwoLCzOcLQAAAADgYqjzAAAAAAAeY8Z1lVIpBq6vv/56zZw5U2vWrNFPP/2k3bt3KycnRzVr1lTv3r31+9//Xg8++KDCw8NNpwoAAAAAcAF1HgAAAAAAOFelGLiuW7euxo8fr/Hjx5tOBQAAAADgBdR5AAAAAACPMeO6SqkU17gGAOByceDAAdlsNpdu999/v+l0AQAAAAAXQI0HAEAFcxR65wafUClmXAMAcLkIDg5W165dy9yek5OjpKQkSWeXWAUAAAAA+C5qPAAAANcxcA0AgA+JiorS+vXry9w+Z84cjRo1SiEhIbr99tsvYWYAAAAAAHdR4wEAULEsB0uFVyUsFQ4AQCXy3nvvSZKGDRumiIgIs8kAAAAAADxCjQcAAFyxdetW3X777YqKilJwcLCaNm2qsWPH6rfffnN7X5Zl6b///a8mTJigbt26qVatWgoICFCdOnXUv39//fvf/5ZlWWX2v9jlT6Kiosr9OJlxDQBAJXHgwAF9/fXXkqRRo0aZTQYAAAAA4BFqPAAAvOAyuD714sWLdccddyg/P19169ZVmzZttGvXLv3973/XggULtH79ejVt2tTl/a1atUr9+vVz/ty0aVM1adJE+/fv1/Lly7V8+XLNmzdPixYtUlBQUJn7ueaaa0rdXqtWLfce4DmYcQ0AQCUxZ84cWZalRo0aqU+fPqbTAQAAAAB4gBoPAABczJEjR3TvvfcqPz9fkydP1pEjR5SUlKQjR45o4MCBSklJ0e23337BGdLnsyxLTZo00euvv65jx45p37592rx5s06ePKn3339fQUFBWrZsmZ577rkL7qdo0Pz828cff1zux8vANQAAlYBlWZozZ44k6b777pPdziEcAAAAACorajwAALzEUeidm4/661//qqysLPXo0UMvvvii/P3PLqZdvXp1ffDBB6pevbo2b96sTz/91OV9du7cWbt27dLjjz+uunXrFtt27733OgesZ8+eLcclvoY4n4gAAKgEvv76a+3fv18SS8gBAAAAQGVHjQcAgHdYhYVeufmqhQsXSpLi4uJKbKtZs6ZGjBghSZo/f77L+4yIiFBAQECZ2wcNGiRJSk1NLdc1tD3BNa4BAKgE3nvvPUlS9+7d1axZM7PJAAAAAAA8Qo0HAAAu5tChQzpy5IgkqUePHqW26d69u2bPnq2NGzd6LW52drbz/yEhIWW2mzp1qn799VcVFBQoOjpaffr00e23337B62JfDAPXAAD4uIyMDOeZdZyJDwAAAACVGzUeAABedImXsr6Udu/eLUkKDAxUgwYNSm1TdALcL7/8ovz8/AvOpHbVvHnzJEnt2rVTREREme3eeeedYj/PmTNHU6ZM0aJFixQbG1uu2AxcAwDg4xYuXKjMzEyFhoY6l365mISEBCUmJroc48EhvRV3+83lTREAAAAA4KJLUePdf+/dGjN6VDkzBADg8uPusVY6u3x3fHx8BWV0dqlu6eyS4DabrdQ2kZGRkiSHw6HTp0+rVq1aHsVMSkrSm2++KUmaMGFCqW1uvvlm3XvvvWrXrp0aNGigjIwMrVixQn/605/0yy+/qH///tq6dasaNmzodnwGrgEA8HFFS8gNHz5c4eHhLvVJSUnRli1bXI6R0uXq8qQGAAAAAHDTpajxBvXvV57UAACofBzeuT61u8faoj4VKScnR9LZGddlOXdZ7nOX+C6PY8eOadiwYSooKNAtt9yiO+64o9R2H330UbGfg4ODdccdd6hfv37q2LGjkpOT9cILL2j27Nlu58DANQAAPmz//v1au3atJPeWkKtXr55by7HUq+PZmXgAAAAAgIu7VDVe1BVXuJsaAACXNXePtUV9yjJu3Di9/vrrbufRs2dPrVmzRtLZAWFJysvLK7N9bm6u8/8Xuh71xaSnp2vQoEFKTk5Wx44dnSfauaN27dqaOHGiHn74YS1ZskRvvfVWmTPFy8LANQAAPmzOnDmyLEuNGzdWr169XO4XHx/v1jI1hTvXlSM7AAAAAIA7LlWNl3s6tRzZAQBQ+VhemnHt7rH2YsLCwsq1bHf16tWd/69Zs6YkKS0tTZZllToIXLScuN1uv+D1qC8kIyNDAwcO1NatW9WmTRt9+eWX5d7X9ddf78wrNTXV7eeAgWsAAHyUZVl6//33JUkjR450++w0AAAAAIDvoMYDAKACOBymMyjVtGnTNG3aNI/20bJlS0lnZ1wfOnRIjRo1KtFm3759kqQmTZooICDA7RhZWVm66aabtHHjRrVo0UIrVqzw6DrZ5y5rXlBQ4HZ/e7kjAwCACvX1119r//79stlsGjlypOl0AAAAAAAeoMYDAADuaNSokerXry9JWreu9BUzi+7v0qWL2/vPycnRkCFDtHbtWsXExGjlypWKiooqf8KSfvzxR/2/9u49rqoy7///e4OAoIiaKQgqoKZmaZJ2sjxlmb9Kp0nT7KDWJM1kWeOoU3NrfsvGU44dZm6FX01W92Sjfp07c6azmmFpo3geRTQpJTyGKCCyYV+/P/yxiwDdG/bm2sDr+XisxwP2Onw+Gzbrsy6udV1LOj/NeXU6wOm4BgAgQJU9R6Rfv35KSEiwmwwAAAAAoEZo4wEA4HvGVeqTJVDdfffdkqTU1NQK63Jzc7V8+XJJ0siRI706rtPp1N13363PPvtMsbGxWrNmjdq1a1ejXEtKSrRgwQJJ0qBBg9SokfcTf9NxDQBAgFqyZImMMVq3bp3tVAAAAAAANUQbDwAAeGvKlCkKDw/X+vXrNWPGDJWWnu9kz8vL05gxY5SXl6devXrpzjvvrLDvjTfeqPj4eL300kvlXi8tLdWYMWP0r3/9S9HR0VqzZo0SExM9yuf3v/+93nzzTZ05c6bc64cOHdKIESO0ceNGNWrUSDNmzKjW++UZ1wAAAAAAAAAAAADqngAeLe0L7dq101tvvaV7771Xzz//vFJSUtSuXTvt3btXBQUFatOmjZYtWyaHw1Fh38OHD+vbb7/VqVOnyr2+bNkyrVixQtL5Kb0feuihKuO/+uqr6tWrl/v7vXv3au7cuXr44YeVmJioli1bKi8vTxkZGTLGqHHjxnrttdd07bXXVuv90nENAAAAAAAAAAAAAAFoxIgRSkxM1OzZs7V+/Xrt3LlTbdu21fjx4zV9+nS1bt3aq+OdO3fO/XVWVpaysrKq3DYvL6/c97/+9a8VHR2tzZs3Kzs7W1lZWQoLC1P37t01ePBgTZw4UR07dvQqn5+i4xoAAAAAAAAAAABA3eNy2c6gViQlJbmfZ+2pqjqkx40bp3HjxlUrjyFDhmjIkCHV2tcTdFwDAAAAAAAAAAAAqHNMaf2eKryhCbKdAAAAAAAAAAAAAACgYWPENQAAAAAAAAAAAIC6x8WI6/qEEdcAAAAAAAAAAAAAAKsYcQ0AAAAAAAAAAACg7mHEdb3CiGsAAAAAAAAAAAAAgFWMuAYAAAAAAAAAAABQ5xiXy3YK8CE6rgEAAAAAAAAAAADUPUwVXq/QcX0RRS6HlbgOh7ESV5KMsRfbZS+0FGzndy1JpRbfuNPSzUihFn/eQc4ia7GdwWHWYttUXGrzjxue2NT4cmuxo4vsPbkkLqTAWuzG585YiesKirISV5IOjfiDtdiJh9Otxe5uLbJU2qidtdgljVtZi73229NW4t4cbe/6RmpqMXbdkldir+5Ehtn5jOQ57TX9i4rt/ROpSYi937XN9s7ZEnvX3rZ+5C2NvWu6c4q0FttmK6uxKbYYPdxi7LrjaEmotdgxZ49Yix3SpKW12E6HvZ95s2B79dYVFGIlbljBcStxJelosL3PWWiwvZGlzUKDrcUOdhZai10a3txabKAhouMaAAAAAAAAAAAAQN3DiOt6xd7txwAAAAAAAAAAAAAAiBHXAAAAAAAAAAAAAOog47I3hT58jxHXAAAAAAAAAAAAAACrGHENAAAAAAAAAAAAoO7hGdf1Ch3XAAAAAAAAAAAAAOoeOq7rFaYKBwAAAAAAAAAAAABYxYhrAAAAAAAAAAAAAHWOKWXEdX3CiGsAAALQyZMn9cwzz6hHjx5q2rSpQkNDFRcXp3vuuUdpaWm20wMAAAAAeIE2HgAAwMXRcQ0AQIDJzMzUlVdeqdmzZ2v37t1q06aNunfvrtOnT2v58uXq16+fFi5caDtNAAAAAIAHaOMBAOBHLpdvFgQEOq4BAAgwjz76qHJyctS5c2ft3LlTBw4c0NatW3Xs2DFNnjxZxhhNnTpVmZmZtlMFAAAAAFwEbTwAAADP0HENAEAAOXPmjNauXStJmj9/vi6//HL3usaNG2v+/Pnq1KmTSkpK9NFHH9lKEwAAAADgAdp4AAD4mavUNwsCAh3XAAAEkHPnzskYI0nq2LFjhfUOh8P9utPprNXcAAAAAADeoY0HAIB/GVepTxYEBjquAQAIIK1atVJcXJwk6csvv6ywvqCgQNu2bZMkXXPNNbWZGgAAAADAS7TxAAAAPEfHNQAAAWbOnDlyOByaMmWKXnvtNR05ckSFhYX6+uuvNWzYMB09elT333+/+vbtaztVAAAAAMBF0MYDAMB/jMvlkwWBoZHtBAAAQHn33XefoqKiNGvWLD3yyCPl1sXExGjRokVKTk62lB0AAAAAwBu08QAAADzDiGsAAALQ/v37dezYMQUFBSk+Pl49evRQRESEcnJytGTJEu3evdt2igAAAAAAD9HGAwDAP0ypyycLAgMjrgEACDCPPfaY/vu//1t9+vTRhx9+qMsuu0ySdPbsWT377LOaP3+++vbtqx07dqhDhw6VHiMlJUWpqakex7z57jH6xZhxvkgfAAAAAPATNtp4I+4bq/vGPeST/AEAAGoLHdcAAASQHTt2aNGiRQoJCdHy5cvL/dMiPDxc8+bNU3p6uj777DPNnj1bixcvrvQ4OTk5Sk9P9zjuVTfdXOPcAQAAAADl2Wrj9Rs8pMa5AwBQFzBaun6h4xoAgACSlpYmY4w6d+5c5Z32t956qz777DNt3ry5yuPExMQoKSnJ47iXtG7jda4AAAAAgAuz1cZr3Sba61wBAKiLjIuO6/qEjmsAAALImTNnPN62qKioynXJyclKTk72+FhfZp30eFsAAAAAgGdstfG++yHf420BAAACRZDtBAAAwI/KnnWWmZmpb7/9ttJtPv74Y0lSly5dai0vAAAAAID3aOMBAOBfptTlkwWBgY5rAAACyK233qrWrVvL6XRq5MiR2rdvn3vd2bNnNXXqVH322WeSpAcffNBWmgAAAAAAD9DGAwAA8BxThQMAEECaNGmiv/3tb/rFL36hf//73+rWrZs6dOigyMhI7d+/X4WFhZKkxx57TMOHD7ecLQAAAADgQmjjAQDgX4yWrl8YcQ0AQIAZPHiwduzYoYkTJ+qyyy7TkSNHtGfPHkVFRWn48OFavXq1/vznP9tOEwAAAADgAdp4AAAAnmHENQAAASgxMVGvvvqq7TQAAAAAAD5AGw8AAP9wlZbaTgE+VKdHXP/rX/+Sw+GQw+FQfHy87XQAAAAAADVAGw8AAAAA4A3jcvlkQWCosx3X+fn5+vWvf207DQAAAACAD9DGAwAAAACgYauzHdfPPPOMvvvuOw0fPtx2KgAAAACAGqKNBwAAAADwlil1+WRBYKiTHdcbN27UX/7yFw0fPly/+MUvbKcDAAAAAKgB2ngAAAAAAFRt69atGjVqlKKjo9W4cWMlJiZq0qRJOn78eLWON3PmTPejuqpaFi9eXOX+TqdT8+fPV8+ePdWkSRO1aNFCAwcO1MqVK6v7FiVJjWq0twVOp1OPPPKIIiIi9Oc//1mffvqp7ZQAAAAAANVEGw8AAAAAUF0NYbT0ypUrNXr0aDmdTrVu3Vrdu3dXRkaGXnnlFS1fvlxpaWlKTEys1rFbt26tzp07V7ouJiam0teLiop0yy23KC0tTcHBwerevbsKCgq0bt06rVu3TtOmTdOcOXOqlU+d67iePXu2du3apYULFyouLs52OgAAAACAGqCNBwAAAABA5bKzs/XAAw/I6XRq+vTpmjFjhho1aqS8vDyNHj1aH374oUaNGqWvv/5aDofD6+MPHTpUS5Ys8WqfadOmKS0tTQkJCfrggw/UpUsXSdKqVat0zz33aO7cuerbt6/uvPNOr/OpU1OF79mzR3/84x+VlJSkxx9/3HY6AAAAAIAaoI0HAAAAAKgJ43L5ZAlU8+fPV2Fhofr166fnnntOjRqdH5McFRWld955R1FRUdq8ebNWr15dK/kcPXrUPYX466+/7u60lqRhw4Zp6tSpks5PRV4ddabj2hijRx55RE6nUykpKQoODradEgAAAACgmmjjAQAAAABqylXq8skSqFasWCFJmjBhQoV1LVq00MiRIyVJy5Ytq5V8Vq1apeLiYnXu3FkDBw6ssD45OVmSlJ6ergMHDnh9/DrTcb1o0SJt2LBBEydOVO/evW2nAwAAAACoAdp4AAAAAABU7dChQ8rOzpYk9evXr9JtbrrpJknSxo0bqxVj+/btGjNmjAYNGqThw4dr+vTp2r17d5Xbl8Upi/tzsbGxSkhIqHZOdeIZ19nZ2Xr66acVGxurWbNm2U4HAAAAAFADtPEAAAAAAL5gAni0dE3t27dPkhQaGqq4uLhKt+nYsaMk6ZtvvpHT6VRISIhXMbZt26Zt27a5v1+1apVeeOEFTZo0SS+++GKF2dHKciqLW1VOBw8eVEZGhle5SHWk4/rxxx/X6dOn9cYbbygyMrJGx0pJSVFqaqrH298/dpzGP/RwjWICAAAAAH5ks4137wNj9eB42ngAAAAAgB9527aUzk/fXTY1tj/88MMPks5PCe5wOCrdpmXLlpIkl8ul06dP65JLLvHo2G3bttVzzz2nIUOGKDExUZGRkdq3b5/++7//W4sXL9ZLL72kkJAQzZs3r9KcyuJeKKfc3FyPcvmpOtFxnZ6eLkn6zW9+o9/85jfl1p09e1bS+eHy0dHRkqSVK1fqhhtuqPRYOTk57uN54pYht1UnZQAA6pSrW9m7JDgre880zXNFWIvdNKqplbghJ7x/toyvNIuMtxb7XPurrcXOPuO0FrtlY3t/XxEh9p5KNKTRQStxnUGXWYlbF9ls4w26ZUh1UgYAoE6JCrN3HegKa2UtttNhr227MTvfWuwbY+21bYOLTluJu7ckykpcSTp66qy12N1a2ftdNyo6ZS22K6xmN7vWRKHTzmje8MZWwtZJvhpx7W3bsmwffyoqKpJ0fsR1VcLCwtxfl7WnPVHZM7OvvPJKLVq0SAkJCZo2bZoWLlyo3/zmN4qPj69WTt7kU6ZOdFyXOXr0aJXrXC6Xe31xcXGV28XExCgpKcnjmG3+/3+UAAAAAAB8izYeAAAAACAQeNu2LNunKk8++aRefvllr/Po37+/1q1bJ0lq3Pj8HQwXahOfO3fO/XV4eLjX8SozefJkvfzyy/r++++1atUqPfHEE+513uRUnXzqRMd1VlZWleuWLFmi8ePHq0OHDhfcrkxycrJXw/ZP5Rd6vC0AAAAA4OJstvGO5RV4vC0AAAAAILAZl29GXHvbtryYpk2bejxt909FRf04o0SLFi0knZ9y2xhT6XThZVN3BwUFqVmzZtXMtrzg4GBde+21+sc//qHMzMxy68pyKotbmZ9Oce6tOtFxDQAAAAAAAAAAAAA/5aupwn1t1qxZmjVrVo2Ocdll5x9JVlxcrEOHDql9+/YVtjlw4PxjARMSEhQSElKjeD9VNhV4SUlJhZw2bNig/fv3V7lvWU5l+XvD3oPnAAAAAAAAAAAAAAAVtG/fXm3btpUkffHFF5VuU/b69ddf79PYu3btkiTFxcWVe/26666TJKWlpVW6X3Z2tg4ePFhuW2/QcQ0AAAAAAAAAAACgzjGlLp8sgeruu++WJKWmplZYl5ubq+XLl0uSRo4c6bOY//znP7V7925J0q233lpu3fDhwxUSEqLMzEytXbu2wr4pKSmSpF69eqlTp05ex67zHdfjxo2TMcajZ58BAAAAAAIbbTwAAAAAAM6bMmWKwsPDtX79es2YMUOlpaWSpLy8PI0ZM0Z5eXnq1auX7rzzzgr73njjjYqPj9dLL71U7vXdu3crOTlZ27dvL/e6y+XS0qVLNWbMGEnSHXfcoT59+pTbpk2bNu5ngT/88MPKyMhwr3v//fc1b948SdKzzz5brffLM64BAAAAAAAAAAAA1DkuV+COlvaFdu3a6a233tK9996r559/XikpKWrXrp327t2rgoICtWnTRsuWLZPD4aiw7+HDh/Xtt9/q1KlT5V53Op1KTU1VamqqWrZsqQ4dOqhRo0bav3+/cnNzJUk33XST3n777UpzmjdvnrZs2aKvvvpK3bt31xVXXKH8/Hz3s60nT56s4cOHV+v91vkR1wAA1EcFBQWaM2eOkpKSFBkZqaZNm+qqq67SvHnzVFxcbDs9AAAAAIAXaOMBAIDqGjFihDZt2qQRI0ZIknbu3KlLL71UEydO1I4dO7yekjs+Pl6zZs3S7bffrubNm2v//v3atm2bQkNDNXToUL399ttau3atmjdvXun+4eHhWrdunebMmaPLL79c+/bt04kTJ9S/f3+tWLFCL774YrXfq8MYY6q9dwNwKr/QStzK7oyoLTY/Ei6Ln8aQYHs/81KLb9xp6WakUIs/78auc9ZiO4PDrMUusfg5K7X4t31JZIS94NV07Ngx3Xzzzdq1a5eCgoJ0+eWXKzg4WLt27VJpaal69+6tNWvWKDIy0mcxz+Xn+exY3jqrEGuxbZ0DJalpqJ37B0NOHLASV5J+iIy3FjsyLNha7OwzTmuxWza2977DQ+zdIxt2eJuVuM7Wl1mJK0lhzVpai13XHMsrsBbb1rnozLlSK3ElqcjihWATi+chm+2dohJ7P3NbP/KIknw7gSWdC/XdNbm3ii3+fTV12Lu+CWsaZS12ddlo4+UVnPXZsbzVWCXWYjsd9iYY3Zht71x0Y6y9/30EFdu5tso4a+9/bEfz7d1s0q2Vvd/1pTpjLbYrzF69LSi1c11XF/+nacvB3z3gk+MkvFj56GLULkZcAwAQYB588EHt2rVLXbt2VUZGhnbu3Klt27bpwIED6tmzpzZv3qzHHnvMdpoAAAAAAA/QxgMAwH9MaalPFgQGOq4BAAggO3fu1EcffSRJev3118tN89KhQwe9+eabCgoK0v/8z/9o7969ttIEAAAAAHiANh4AAIDn6LgGACCApKWlSZJiY2N1ww03VFjfs2dPde3aVcYY/f3vf6/t9AAAAAAAXqCNBwCAfxmXyycLAgMd1wAABJAffvhB0vl/alQlLi5OkvTll1/WSk4AAAAAgOqhjQcAAOC5RrYTAAAAP2revLkkKTs7u8ptDh8+LElMIwcAAAAAAY42HgAA/mVKGS1dnzDiGgCAANKnTx9J5/+psXHjxgrrd+7cqYyMDElSbm5ureYGAAAAAPAObTwAAADP0XENAEAAueaaa9z/2Bg3bpx27drlXpeZman77rtPpaWlkqTCwkIrOQIAAAAAPEMbDwAA/zKlLp8sCAx0XAMAEGD+9re/qW3btsrIyFCPHj3UqVMndenSRV27dlVmZqbuv/9+SVJkZKTlTAEAAAAAF0MbDwAA/3GVunyyIDDwjGsAAAJM586dtXXrVs2dO1erVq3SoUOH1KRJE9111136P//n/2j16tWSpOjo6CqPkZKSotTUVI9jjh/7gB55aHyNcwcAAAAAlGejjffA2HEa//Cvapw7AABAbaLjGgCAANS6dWstWLBACxYsqLBu7ty5kn58VlplcnJylJ6e7nG8oUNu8T5JAAAAAIBHaruNd8uQ27xPEgCAOsi4GC1dn9BxDQBAHeJ0OvXhhx9KkoYPH17ldjExMUpKSvL4uBe6sx8AAAAA4B+08QAAAH5ExzUAAHXIggULdPz4cSUmJmrYsGFVbpecnKzk5GSPj3suP88X6QEAAAAAvOCvNl5ewVlfpAcAQMAzPJ+6XgmynQAAACgvLS1NH330kUpLS92vnT17VrNnz9Yf/vAHBQcH67XXXlNISIjFLAEAAAAAnqCNBwAA4BlGXAMAEGA2b96sp556ShEREUpISFBoaKgyMjJUWFioiIgILVmyRAMHDrSdJgAAAADAA7TxAADwH1NqbKcAH6LjGgCAADNgwACNHz9eX375pb777juVlJSoXbt2Gjp0qJ566il16NDBdooAAAAAAA/RxgMAwH9cTBVer9BxDQBAgLnqqqv017/+1XYaAAAAAAAfoI0HAADgGTquAQAAAAAAAAAAANQ5xsVU4fVJkO0EAAAAAAAAAAAAAAANGyOuAQAAAAAAAAAAANQ5rlJGXNcnjLgGAAAAAAAAAAAAAFjFiGsAAAAAAAAAAAAAdY4pddlOAT5ExzUAAAAAAAAAAACAOscwVXi9wlThAAAAAAAAAAAAAACrGHENAAAAAAAAAAAAoM5xMeK6XmHENQAAAAAAAAAAAADAKkZcAwAAAAAAAAAAAKhzTKnLdgrwIUZcAwAAAAAAAAAAAACsYsT1RTgcDitxS1z25uQPtvOWJUlBFmMXW3wOgsW3rRBLt6/Y/IznukKsxW5u8axbYi806oAj54KtxW7rzLEW2xkVay329qOFVuLGNO1gJa4klTrt3QF7pMDeWbBbo1xrsR35Tmuxs0NjrMWOadneStzgM0etxJUkNWtpL3Yd08zepaCOFdo5F0WF2avzDoe96/5Qi41bm208m+1qW/9DyQ9uaiWuJFk8pchpsV1dHBJqLXaYtch1S5jF4UpBecesxd51roW12P2OrbUW+5tmt1mLHRocbiVu5yh7H/Imtv6hKmnPCTv/S5CkqNgoa7FDjL02fX6xneubS6xErZtcFq+J4Ht0XAMAAAAAAAAAAACoc4zFQYnwPaYKBwAAAAAAAAAAAABYxYhrAAAAAAAAAAAAAHWOq9Teo+nge4y4BgAAAAAAAAAAAABYxYhrAAAAAAAAAAAAAHUOz7iuXxhxDQAAAAAAAAAAAACwihHXAAAAAAAAAAAAAOocRlzXL4y4BgDAD44cOaK3335bTzzxhK6//nqFh4fL4XBowIABF93X6XRq/vz56tmzp5o0aaIWLVpo4MCBWrlypf8TBwAAAABUQBsPAIDA5Cp1+WRBYGDENQAAfvDuu+/qqaee8nq/oqIi3XLLLUpLS1NwcLC6d++ugoICrVu3TuvWrdO0adM0Z84cP2QMAAAAAKgKbTwAAAD/Y8Q1AAB+0KxZMw0ePFhPP/20Vq5cqenTp3u037Rp05SWlqaEhATt3r1b27dv1/79+/Xee+8pLCxMc+fO1fvvv+/n7AEAAAAAP0UbDwCAwGRcxicLAgMd1wAA+MFDDz2kTz75RH/84x911113qXXr1hfd5+jRo1q8eLEk6fXXX1eXLl3c64YNG6apU6dKkmbOnOmXnAEAAAAAlaONBwAAbNq6datGjRql6OhoNW7cWImJiZo0aZKOHz/u9bGysrLkcDg8WsaPH19h//j4+IvuV1RUVK33yVThAAAEiFWrVqm4uFidO3fWwIEDK6xPTk7W888/r/T0dB04cEAdO3a0kCUAAAAAwBO08QAA8D9Xaf0fLb1y5UqNHj1aTqdTrVu3Vvfu3ZWRkaFXXnlFy5cvV1pamhITEz0+XuPGjdW3b98q1xcVFWnLli2SpBtuuKHK7a644gpFRUVVui4oqHpjp+m4BgAgQGzcuFGSdNNNN1W6PjY2VgkJCTp48KA2btzIPzUAAAAAIIDRxgMAADWVnZ2tBx54QE6nU9OnT9eMGTPUqFEj5eXlafTo0frwww81atQoff3113I4HB4dMzo6WmlpaVWuf/PNNzVu3DiFh4dr1KhRVW736quvasCAAd6+pQtiqnAAAALEvn37JOmC/6woW5eRkVErOQEAAAAAqoc2HgAA/mdKXT5ZAtX8+fNVWFiofv366bnnnlOjRufHJEdFRemdd95RVFSUNm/erNWrV/ss5pIlSyRJv/zlL9WsWTOfHdcTdFwDABAgfvjhB0lSy5Ytq9ymbF1ubm6t5AQAAAAAqB7aeAAA+J8pNT5ZAtWKFSskSRMmTKiwrkWLFho5cqQkadmyZT6Jl5WVpc8//1ySNG7cOJ8c0xt0XAMAECCKiookSaGhoVVuExYWJkk6e/ZsreQEAAAAAKge2ngAAKAmDh06pOzsbElSv379Kt2m7JEkZY8oqak333xTxhi1b99egwYNuuC2ixcv1h133KGbb75Z9913nxYvXqwzZ87UKD7PuAYAIEA0btxYklRcXFzlNufOnZMkhYeH10pOAAAAAIDqoY0HAID/uQJ4tHRNlT12JDQ0VHFxcZVuU/bYkW+++UZOp1MhISHVjmeM0ZtvvilJevDBBxUUdOHxz3//+9/Lff/OO+9o+vTpeuedd3TLLbdUKwc6rgEACBAtWrSQ9ON0cpUpW1e2bVVSUlKUmprqcey7x4zVfeMe8nh7AAAAAMCF2WzjjR83Tr/61a883h4AgIbO21ornZ++Ozk52U8Zlb9OcDgclW5T9tgRl8ul06dP65JLLql2vM8//1wHDx6UdOFpwgcMGKCbb75Zffr0Ufv27VVcXKy0tDTNmDFDW7du1bBhw7RhwwYlJSV5nQMd1wAABIjLLrtMGzZs0P79+6vc5sCBA+5tLyQnJ0fp6ekex77p5iEebwsAAAAAuDibbbyht93m8bYAANRlxuXyyXG8rbVl+/iTN48dkWr+6JElS5ZIOj/9eNlI7gttVyYiIsI9ZfiNN96o9PR0TZ06VZ9++qnXOdBxDQBAgLjuuuv0xhtvKC0trdL12dnZ7jverrvuugseKyYmxqs72lq3ifY8UQAAAADARdls40VH08YDAMAb3tbasn2q8uSTT+rll1/2Oo/+/ftr3bp1krx77IhUs0eP5Ofna8WKFZIuPNr6QsLDw/XCCy9o6NChWrt2rXJzcy86q8zP0XENAECAGD58uCZOnKjMzEytXbtWAwcOLLc+JSVFktSrVy916tTpgsdKTk72apqab0/me58wAAAAAKBKNtt4RTUccQUAQF3hq2dce1trL6Zp06bVmrY7KirK/XVZp29ubq6MMZVOF142nXhQUJCaNWtWzWylFStWqKCgQBERERo5cmS1j3PDDTdIOj91+TfffKOrr77aq/0v/FRtAABQa9q0aeO+OHr44YeVkZHhXvf+++9r3rx5kqRnn33WSn4AAAAAAM/RxgMAwP9MqfHJ4muzZs3SiRMnvF7ee+899zHKHiVSXFysQ4cOVRqn7LEjCQkJCgkJqXa+ZdN/jxgxQpGRkdU+zk+nNS8pKfF6f0ZcAwDgB4cOHVKvXr3c35c9j2TDhg1q1aqV+/WpU6dq6tSp7u/nzZunLVu26KuvvlL37t11xRVXKD8/330BMnnyZA0fPryW3gUAAAAAQKKNBwAAal/79u3Vtm1bff/99/riiy903333Vdjmiy++kCRdf/311Y5z8OBBrV+/XlL1pwkvs2vXLvfXcXFxXu/PiGsAAPygtLRUJ0+edC8FBQWSzt9l9tPXCwsLy+0XHh6udevWac6cObr88su1b98+nThxQv3799eKFSv04osv2ng7AAAAANCg0cYDACAwmVKXT5ZAdffdd0uSUlNTK6zLzc3V8uXLJalG03u/+eabMsYoPj5eAwYMqPZxJGnu3LmSpMsvv1yxsbFe78+IawAA/CA+Pl7GVG+KmdDQUE2bNk3Tpk3zcVYAAAAAgOqgjQcAAGyYMmWKXnvtNa1fv14zZszQs88+q+DgYOXl5WnMmDHKy8tTr169dOedd1bY98Ybb9Thw4f15JNP6sknn6z0+MYYvfXWW5KksWPHVvoc7Z968cUXFRYWpjFjxpR7hvfJkyf1zDPPaMWKFZKk5557rlrvl45rAAAAAAAAAAAAAHWOyw/Ppw4k7dq101tvvaV7771Xzz//vFJSUtSuXTvt3btXBQUFatOmjZYtW1Zph/Phw4f17bff6tSpU1Ue//PPP9fBgwflcDg0duzYi+Zz+PBhvfzyy5o0aZLi4+N16aWX6uzZs9qzZ49KSkoUFBSk2bNnu0eKe6vOTBU+c+ZMORyOCy6LFy+2nSYAAAAAwEO08wAAAAAAuLARI0Zo06ZNGjFihCRp586duvTSSzVx4kTt2LFDnTp1qvaxlyxZIknq16+fEhISLrr96NGjNWnSJF177bU6d+6ctm/frgMHDigxMVGPPPKItmzZoqlTp1Y7nzo34rp169bq3LlzpetiYmJqORsAAAAAQE3RzgMAAAAAVIep5yOuyyQlJbmfZ+2prKysi26zZMkSd+e1J6677jpdd911XuXhjTrXcT106FCvfoAAAAAAgMBGOw8AAAAAANS5jmsAAAAAAAAAAAAAcJmGMeK6oaDjGgAAAAAAAAAAAECdU0rHdb1S5zqut2/frjFjxujIkSOKjIxUjx49NHr0aHXv3t12agAAAACAaqCdBwAAAAAA6lzH9bZt27Rt2zb396tWrdILL7ygSZMm6cUXX1RwcLC95AAAAAAAXqOdBwAAAACojlIGXNcrQbYT8FTbtm313HPPadOmTTp+/LiKioq0Y8cOPfroozLG6KWXXtLTTz9tO00AAAAAgIdo5wEAAAAAgDIOY+r+5O/z5s3TtGnT1KhRI2VmZio+Pt5nx84rOOuzY3mjxGXv1xLssBbaKpt35dj8kQdZCm7z5+20+PfVPNTe/UJFFn/oNn/fl0RG2Ateh3x7Mt9a7LbOo9ZiO6NircXecazQStyYpqFW4kp2nzmUX+yyFrtbo1xrsR2lTmuxs0NjrMWOcZy2EjfobJ6VuJLUKLabtdj+4q92XlFhgU+OUx3HiuycB6PC7I1YL7Z4Idi4kb2Wls023jmLP/NGlhqYpRbbeCEW/4lS4LR3fdMkxF7bNjIi3FrsuqTorJ3/aUpScN731mJvOdfCWuykw59ai/1Np9usxQ61dB5sG2HvPPR9ob3z7ze5RdZiXxfb1FrsEFNiLfaRIjuf8Q6X2Pt51zV/b325T44z6th/fHIc1EydGXF9IZMnT1bbtm1VUlKiVatW2U4HAAAAAFBDtPMAAAAAAGhY6twzrisTHBysa6+9Vv/4xz+UmZl5wW1TUlKUmprq8bEfGDtO4x/+VU1TBAAAAAB4wdN2nrdtvPHjxupXDz/sixQBAAAAAJbxjOv6pV50XEtSaOj5aS9LSi48ZUROTo7S09M9Pu4tQ+xNswIAQG2JsDjV36kQe9MJB5XYm97LltjiHGuxS6LaWovtahJiLfaB0y2txb75t+9ai33g9fusxS50NbcSN/LIPitxJUn1cKpwybN2nrdtvKG3DalxXgAABLp3dh+3FvvOy+y18f5nw35rsXtf291a7I7frbMW+7XSHlbiJrfMthJXko4/9l/WYv8/JxKsxV78/860FnvYly9bi72u75NW4o5lqnCP2Xw0HXyv3nRc79q1S5IUFxd3we1iYmKUlJTk8XGjo6NrlBcAAAAAoHo8aefRxgMAAAAAoH6oFx3X//znP7V7925J0q233nrBbZOTk5WcnOzxsfMKztYoNwAAAACA9zxt53nbxisqLKhxbgAAAACAwMBU4fWLvXlBvbB7924lJydr+/bt5V53uVxaunSpxowZI0m644471KdPHxspAgAAAAC8QDsPAAAAAAD8VJ0Yce10OpWamqrU1FS1bNlSHTp0UKNGjbR//37l5uZKkm666Sa9/fbbljMFAAAAAHiCdh4AAAAAoKZ4xnX9Uic6ruPj4zVr1ix99dVX2rNnj/bv36+ioiK1bNlSQ4cO1ZgxY3TvvfcqODjYdqoAAAAAAA/QzgMAAAAAAD9VJzqumzdvrj/84Q+20wAAAAAA+AjtPAAAAABATfGM6/qlTnRcAwAAAAAAAAAAAMBP0XFdvwTZTgAAgProyJEjevvtt/XEE0/o+uuvV3h4uBwOhwYMGHDB/f79739r4cKFGjNmjDp37iyHwyGHw6ElS5bUSt4AAAAAgIpo4wEAAPgfI64BAPCDd999V0899ZTX+z3yyCPavn27HzICAAAAAFQXbTwAAAJTqWHIdX1CxzUAAH7QrFkzDR48WH369FGfPn20detWPf/88xfdLzExUd26dXPvN3HiRO3YsaMWMgYAAAAAVIU2HgAAgP/RcQ0AgB889NBDeuihh9zfZ2dne7TfypUry30fEhLi07wAAAAAAN6jjQcAQGDiGdf1C8+4BgAAAAAAAAAAAABYxYhrAAAAAAAAAAAAAHUOz7iuX+i4BgAAAAAAAAAAAFDnMFV4/cJU4QAAAAAAAAAAAAAAqxhxDQAAAAAAAAAAAKDOYarw+oWOawAA6qGUlBSlpqZ6vP29D4zT2Ice9mNGAAAAAIDq8raN133ICA345X1+zAgAAMD36LgGAKAeysnJUXp6usfbD7r1Nj9mAwAAAACoCW/beHFX9/NjNgAABA6ecV2/0HENAEA9FBMTo6SkJI+3b9Mm2o/ZAAAAAABqwts2XlSr1n7MBgAAwD/ouAYAoB5KTk5WcnKyx9sfP13ox2wAAAAAADXhbRvvr5u/82M2AAAEDp5xXb/QcQ0AAAAAAAAAAACgznHZTgA+FWQ7AQAAAAAAAAAAAABAw0bHNQAAfnDo0CG1atXKvfz+97+XJG3YsKHc6/PmzSu337x588qt3759uyTp8ccfL/f6oUOHav09AQAAAEBDRRsPAIDAVGqMTxYEBqYKBwDAD0pLS3Xy5MkKr5eUlJR7vbCw/LOlCwsLK90vPz9f+fn55Y4PAAAAAKgdtPEAAAD8j45rAAD8ID4+XqYad+rNnDlTM2fO9H1CAAAAAIBqo40HAEBgKmWwdL3CVOEAAAAAAAAAAAAAAKsYcQ0AAAAAAAAAAACgzuH51PULHdcAAAAAAAAAAAAA6hymCq9fmCocAAAAAAAAAAAAAALMqVOntGzZMk2ZMkUDBgxQZGSkHA6H4uPjfXL8rVu3atSoUYqOjlbjxo2VmJioSZMm6fjx4xfcz+l0av78+erZs6eaNGmiFi1aaODAgVq5cmWN8mHENQAAAAAAAAAAAIA6p75PFb5u3TqNGjXKL8deuXKlRo8eLafTqdatW6t79+7KyMjQK6+8ouXLlystLU2JiYkV9isqKtItt9yitLQ0BQcHq3v37iooKNC6deu0bt06TZs2TXPmzKlWToy4BgAAAAAAAAAAAIAAEx4ern79+mny5MlaunSp/vKXv/jkuNnZ2XrggQfkdDo1ffp0ZWdna8uWLcrOztZtt92mnJwcjRo1SqaSGwOmTZumtLQ0JSQkaPfu3dq+fbv279+v9957T2FhYZo7d67ef//9auVFxzUAAAAAAAAAAACAOqfU+GYJVEOGDNHnn3+uF198UaNHj1b79u19ctz58+ersLBQ/fr103PPPadGjc5P0h0VFaV33nlHUVFR2rx5s1avXl1uv6NHj2rx4sWSpNdff11dunRxrxs2bJimTp0qSZo5c2a18qLjGgAAAAAAAAAAAAAaiBUrVkiSJkyYUGFdixYtNHLkSEnSsmXLyq1btWqViouL1blzZw0cOLDCvsnJyZKk9PR0HThwwOu86LgGAAAAAAAAAAAAUOeUGuOTpSE5dOiQsrOzJUn9+vWrdJubbrpJkrRx48Zyr5d9X7b+52JjY5WQkFDpvp6g4xoAAAAAAAAAAABAnVPfpwr3h3379kmSQkNDFRcXV+k2HTt2lCR98803cjqdFfYtW3+hfTMyMrzOrZHXewAAAAAAAAAAAABAPZGSkqLU1FSv9pkwYYJ7auy65IcffpB0fkpwh8NR6TYtW7aUJLlcLp0+fVqXXHJJuX3L1l9o39zcXK9zo+P6IqKahFdrv5SUFOXk5CgmJqbWP7QNMXZDfM/EJra3GluKWxM2Yzc0lzaLqPa+nPurF/uSyOr9zGseu+qLSn/HrsmFZ13+fXcNr94Z2BfvOXtpxecU1Vbs6vJFbGs17/IB1YxMzatNjSOaVGs/X/yO2lez3Nb1v8m6Fruuv+emFmNXF7GrFzvSYuy6Frcheqh3+2rvW5f/Lv58dw8rcWvCJ7HjuluL/etq7eWL2PHVjFzz2Nd+stZa7GJLcWvCJ7F7L7AWe2y19qLm1abFJssnx5k5c6bS09O92icnJ8cnsWtbUVGRpPMjrqsSFhbm/vrs2bPV2ven+3nMwC+SkpKMJJOUlETsehyX2MRuCLEb4nuGd/hsEpvY9SsusRtebHimoX4+GmLshvieiU3s+h4X3uGzWbuITez6HJfY1Ly6ZPHixSYpKcmrZfHixVUeb9KkSUaS10v//v0vmOf7779vJJkOHTpU+70uW7bMSDJt2rSpcpv//Oc/7pxOnDjhfv3yyy83ksyiRYuq3Peee+4xkszEiRO9zo0R1wAAAAAAAAAAAAAarOTkZJ+Ojm/atKl7em1vREVF+SyHqrRo0ULS+am8jTGVThdeNiV4UFCQmjVrVmHfsvWV+elU5N6i4xoAAAAAAAAAAAAAfGTWrFmaNWuW7TQqddlll0mSiouLdejQIbVvX/ERIwcOHJAkJSQkKCQkpNy+GzZs0P79+6s8ftm+ZXG8EeT1HgAAAAAAAAAAAACAOqd9+/Zq27atJOmLL76odJuy16+//vpyr1933XWSpLS0tEr3y87O1sGDB8tt6w06rgEAAAAAAAAAAACggbj77rslSampqRXW5ebmavny5ZKkkSNHlls3fPhwhYSEKDMzU2vXrq2wb0pKiiSpV69e6tSpk9d50XENAAAAAAAAAAAAAPXI6NGjFR8fr9/97ncV1k2ZMkXh4eFav369ZsyYodLSUklSXl6exowZo7y8PPXq1Ut33nlnuf3atGnjfhb4ww8/rIyMDPe6999/X/PmzZMkPfvss9XKmWdcAwAAAAAAAAAAAEAAatWqlftrp9MpSTp06FC51++99169+uqr5fY7cuSIvv32W504caLCMdu1a6e33npL9957r55//nmlpKSoXbt22rt3rwoKCtSmTRstW7ZMDoejwr7z5s3Tli1b9NVXX6l79+664oorlJ+f73629eTJkzV8+PBqvVdGXAMAAAAAAAAAAABAADp58qR7OX36tCTJ5XKVe/3MmTNeH3fEiBHatGmTRowYIUnauXOnLr30Uk2cOFE7duyocqrv8PBwrVu3TnPmzNHll1+uffv26cSJE+rfv79WrFihF198sdrvlRHXAAAAAAAAAAAAABCAjDHV2m/dunUX3SYpKcn9PGtvhIaGatq0aZo2bVo1MqsaI64BAAAAAAAAAAAAAFbRcQ0AAAAAAAAAAAAAsIqOawAAAAAAAAAAAACAVTzj2k8mTJignJwcxcTEELsexyU2sRtC7Ib4nuEdPpvEJnb9ikvshhcbnmmon4+GGLshvmdiE7u+x4V3+GzWLmITuz7HJTY1D/CWw1T3id4AAAAAAAAAAAAAAPgAU4UDAAAAAAAAAAAAAKyi4xoAAAAAAAAAAAAAYBUd1wAAAAAAAAAAAAAAq+i49rG1a9fqjjvu0KWXXqrw8HB17dpV06dPV0FBgd9iHjlyRG+//baeeOIJXX/99QoPD5fD4dCAAQP8FlOSjDH68ssv9fvf/1433nijLrnkEoWEhOjSSy/Vrbfeqr/97W/y5yPUly9frgkTJqh3795q27atwsLCFBkZqaSkJE2fPl0nT570W+yf+9e//iWHwyGHw6H4+Hi/xpo5c6Y7VlXL4sWL/ZrDv/71L/3yl790/9yjo6PVt29f/dd//ZdKSkp8GisrK+ui77dsGT9+vE9jlzl58qSeeeYZ9ejRQ02bNlVoaKji4uJ0zz33KC0tzS8xyxQUFGjOnDlKSkpSZGSkmjZtqquuukrz5s1TcXFxtY9bk/OG0+nU/Pnz1bNnTzVp0kQtWrTQwIEDtXLlSr/G/ve//62FCxdqzJgx6ty5s/v3vmTJEo/iwveoedQ8f9a8QKh3EjWvtmqev+qdRM1DzVHvqHe08Xxb7yT7Na8+tvEkezWPeld/UPOoebTxaOP5Sn2sdzWJTc0DvGDgM6+88opxOBxGkomLizO9evUyYWFhRpLp1q2bOXnypF/iLly40EiqsPTv398v8cp8+umn5eIlJiaaq6++2rRs2dL92u23326Kior8Er9nz55GkgkLCzPx8fGmd+/epn379u7YrVu3Ntu2bfNL7J86c+ZMubgdOnTwa7xnn33W/f769u1b6fK///u/fontdDrN/fff736v7dq1M3369DGJiYkmNDTUSDJnzpzxacycnJwq32ffvn3N1Vdf7c4nNTXVp7GNMWbfvn0mJibGSDJBQUEmMTHRXHXVVSYyMtJIMg6Hw/zpT3/yeVxjjDl69Ki54oor3LGvuOIK07NnTxMcHGwkmd69e5vTp09X69jVPW+cPXvW3HjjjUaSCQ4ONj169DAdO3Z07z9t2jS/xS77m//58sYbb3j+xuEz1Dxqnr9rns16Zww1rzZrnj/rnTHUPNQM9Y56RxvP9/XOGLs1r7628YyxV/Ood/UDNY+aRxuPNp6v1Nd6V5PY1DzAc3Rc+8jmzZtNUFCQcTgcJiUlxbhcLmOMMdnZ2e4C9Mtf/tIvsV9//XUzePBg8/TTT5uVK1ea6dOn18oF3ieffGISEhLMyy+/bI4ePVpu3VtvveW+uJ06dapf4qempprPP//cFBcXl3t9x44d7sJ4+eWX+yX2Tz3++ONGkhk+fHit/lNj7Nixfo1TmV/96ldGkunTp49JT08vt66goMC89957FX4f/rZkyRIjyYSHh5u8vDyfH3/QoEFGkuncubPZvXu3+/WzZ8+ayZMnG0mmUaNGZt++fT6PPWTIECPJdO3a1WRmZrpfz8rKcl/sPPDAA9U6dnXPG0888YSRZBISEszevXvdr7/33nvuv/lVq1b5JfZdd91lRo8ebRYsWGDWr19vevTowQWeJdQ8al5t1Dyb9c4Yal5t1jx/1jtjqHmoPuod9Y42np16Z4x/a159beMZY6/mUe/qPmoeNY82Hm08X6qv9a4msal5gOfouPaRsuL+4IMPVli3b98+ExQUZCSZ7du3+z2XV199tVYu8PLy8i5YzF944QUjybRs2dKUlpb6NZef27Rpk/uupf/85z9+i/PVV1+ZoKAgM3z4cPPGG2/U639qrFmzxkgy8fHxNbojztcGDBhgJJn77rvP58c+ffq0+27jyu74dLlcplOnTkaSefXVV30ae8eOHe7P8IYNGyqs37Ztm7tRuWfPnhrH8+S8ceTIEfcdqGvWrKmwvuxCLSkpyeexK1PWeOYCr/ZR8yqi5vmezX9qUPNqr+bVdr0zhpoHz1HvKqLe+QdtvIr8VfMaUhvPGHs1j3pX91DzKqLm+R5tvIpo49Xteudp7MpQ84Cq8YxrH8jPz9eHH34oSZowYUKF9Z07d9agQYMknX9+SX3RrFkzhYSEVLl+6NChkqQffvhBx48fr620JEndunVzf11YWOiXGE6nU4888ogiIiL05z//2S8xAsmCBQskSZMnT1ZkZKTlbM7LysrS559/LkkaN26cz49/7tw59/OMOnbsWGG9w+Fwv+50On0au+wZM7GxsbrhhhsqrO/Zs6e6du0qY4z+/ve/+zR2VVatWqXi4mJ17txZAwcOrLA+OTlZkpSenq4DBw7USk6ofdS8ylHz6hdqXu3VvECsdxI1D9S7qlDv6pdArHeSf2sebbyKqHmg5lWOmle/BGLNo41HvQNQOTqufWDr1q06d+6cwsLCdM0111S6zU033SRJ2rhxY22mZtXZs2fdX4eHh9dq7LIC2bRpU3Xp0sUvMWbPnq1du3bp+eefV1xcnF9iXMj27ds1ZswYDRo0SMOHD9f06dO1e/duv8QqKirSxx9/LEkaPHiw/vOf/+jJJ5/UrbfeqjvvvFMzZszQt99+65fYF/Lmm2/KGKP27du7G1G+1KpVK/fv9ssvv6ywvqCgQNu2bZOkKv/2q+uHH36QdP4iryoXys0fys5fZeezn4uNjVVCQkK5bVH/UPMqR83zn9qsdxI1T6rdmheI9U6i5oF6VxXqnX/RxjvPnzWPNl5F1DxQ8ypHzfMf2njn0caj3gGoHB3XPrBv3z5JUvv27au8U6/s7qWMjIxay8u2pUuXSjp/F1WzZs38Hs/lcun777/XkiVL3HepzZkzR02bNvV5rD179uiPf/yjkpKS9Pjjj/v8+J7Ytm2bli5dqrVr12rVqlWaNWuWrrzySj311FMqLS31aazt27e777z74osv1KtXL7388sv65JNPtHr1aj3//PPq0qWL+3deG4wxevPNNyVJDz74oIKC/HM6mzNnjhwOh6ZMmaLXXntNR44cUWFhob7++msNGzZMR48e1f3336++ffv6NG7z5s0lSdnZ2VVuc/jwYUnS3r17fRq7KmXnusru0izTEM91DQ01r3LUPP+pzXonUfNqu+YFYr2TqHmg3lWFeudfDb2NJ9VOzaONVx41D9S8ylHz/Ic2Hm086h2AC6Hj2gfK7iJq2bJllduUrcvNza2VnGzbsmWLFi9eLEn6/e9/79dYL730khwOh4KDgxUbG6vx48crPj5eH3zwgR577DGfxzPG6JFHHpHT6VRKSoqCg4N9HuNC2rZtq+eee06bNm3S8ePHVVRUpB07dujRRx+VMUYvvfSSnn76aZ/GzMnJcX/92GOPqVevXvr666917tw5ZWZm6p577tG5c+c0duxYbd261aexq/L555/r4MGDkvwznU6Z++67T6tWrVK3bt30yCOPKCYmRk2aNNG1116rPXv2aNGiRXrrrbd8HrdPnz6Szl/kVXaX386dO90XUbV1XuFcB4nPQWWoef5ho95J1LzarnmBWO8kznXgM1AZ6p3/0Mb7UW3UPNp45XG+A5+Biqh5/kEb70e08ah3AKpGx7UPFBUVSZJCQ0Or3CYsLExS+Wlm6qujR4/ql7/8pUpKSnTXXXdp9OjRfo0XGxurvn376tprr1VMTIwcDoe2bdumt956S6dOnfJ5vEWLFmnDhg2aOHGievfu7fPjX8yECRM0ffp0XXPNNWrVqpXCwsJ05ZVXatGiRZo7d64kaeHChcrKyvJZzPz8fPfXERER+uCDD9SnTx+FhoaqU6dOWrp0qa666io5nU698MILPot7IUuWLJF0fnqXC90p5wv79+/XsWPHFBQUpPj4ePXo0UMRERHKycnRkiVL/DKd0TXXXOO+0Bs3bpx27drlXpeZman77rvPfReqv5539HOc6yDxOfg5ap7/2Kh3EjWvtmteINY7iXMd+Az8HPXOv2jj/ai2ah5tvB9xvgOfgfKoef5DG+9HtPGodwCqRse1DzRu3FiSVFxcXOU2586dk1T7z0SpbXl5eRo6dKi+++47XX311e4i7E8jR45UWlqaNm7cqO+//17btm3Ttddeq6VLl2rgwIE+nWImOztbTz/9tGJjYzVr1iyfHddXJk+erLZt26qkpESrVq3y2XHLPuPS+QuOFi1alFsfFBSkp556SpL08ccfy+Vy+Sx2ZfLz87VixQp3Pv702GOP6amnnlKrVq20Z88eHTx4UNu3b9eJEyc0ZcoUbdq0SX379vXLs3D+9re/qW3btsrIyFCPHj3UqVMndenSRV27dlVmZqbuv/9+SVJkZKTPY1eGcx0kPgc/Rc2zx1/1TqLm2ah5gVbvJM514DPwU9Q7uxpKG0+qvZpHG688znfgM/Ajap49tPH8gzbejzjXAXUHHdc+UFbsyqabqEzZup8XxvokPz9ft912m7Zu3aru3bvro48+qpVnwPxcjx499M9//lOtWrXStm3b9O677/rs2I8//rhOnz6tV155pVYLq6eCg4N17bXXSjp/B5uv/PRz261bt0q3KXv9zJkzOnnypM9iV2bFihUqKChQRESERo4c6bc4O3bs0KJFixQSEqLly5frsssuc68LDw/XvHnzdPPNN+v06dOaPXu2z+N37txZW7du1W9/+1t17NhRhw8f1okTJ3TXXXdp8+bNuuKKKyRJ0dHRPo9dGc51kPgclKHm2eWveidR82zUvECrdxLnOvAZKEO9s6+htPGk2ql5tPEq4nwHPgPnUfPsoo3ne7TxyuNcB9QddFz7QNlJ/7vvvpPT6ax0mwMHDpTbtr4pLCzU7bffro0bN6pz58769NNPdckll1jLJzIyUv3795d0/rk0vpKeni5J+s1vfqPo6Ohyy6RJkyRJhw4dcr/25Zdf+iy2p8qmOykpKfHZMbt27Vrh+D/307sX/X1nYtkdryNGjPDrhXZaWpqMMercubM6dOhQ6Ta33nqrJGnz5s1+yaF169ZasGCBMjMzVVRUpJMnT2rFihXq3r27eyqfsul3/K3s/LV///4qt6nv5zpQ8yRqXqDUPH/UO4maZ6vmBVK9k6h5oN5J1LtAqXdSw2jjSbVT82zXO4mah8BDzaPmBUrNo43nW7ZrHvUOQHXRce0DvXr1UmhoqM6dO6evv/660m2++OILSdL1119fm6nViqKiIg0bNkzr169Xhw4d9Nlnn9Xq3VJVKbvI8fXFjnT+eTc/X06fPi3p/MVN2WsXmnrEX8qeGxIXF+ezY8bGxrovcL755ptKtykr7I0bN/brxf3Bgwe1fv16Sf6fTufMmTMeb1v2nJTa4nQ69eGHH0qShg8fXisxr7vuOknnL3wrk52drYMHD5bbFvUPNY+aFyg1zx/1TqLmeaI2a56NeidR80C9o94FTr2T6n8bT6q9mheo9U6i5sEeah41L1BqHm083wrUmke9A3AxdFz7QGRkpIYMGSJJSk1NrbA+MzNTa9askXT+Tqr6xOl06u6779Znn32m2NhYrVmzRu3atbOdln744QetW7dO0vkLcF/JysqSMabS5Y033pAkdejQwf3agAEDfBbbE//85z/dd6yV3THnK6NGjZJ0/hkllV00//Wvf5Uk9e/fX40aNfJp7J968803ZYxRfHy833++ZXfXZWZmVvmsl48//liS1KVLF7/m8nMLFizQ8ePHlZiYqGHDhtVKzOHDhyskJESZmZlau3ZthfUpKSmSzv/NderUqVZyQu2j5lHzAqHm+bPeSdS8QKp5NuqdRM0D9Y56Fxj1TmoYbTyp9mpeoNY7iZoHe6h51LxAqHm08XwvUGse9Q7ARRn4xNdff20cDodxOBwmJSXFuFwuY4wx33//vbn66quNJPOLX/yiVnJ59dVXjSTTv39/v8YpKSkxI0aMMJJMdHS0ycjI8Gu8n1q3bp15/vnnzcGDByus27Jli+ndu7eRZGJjY82ZM2dqJac33njDSDIdOnTwW4xdu3aZCRMmmG3btpV7vbS01LzzzjumWbNmRpK54447fB772LFjJioqykgyycnJ5uzZs8YYY1wul3n55ZeNJONwOMzatWt9HruMy+UyCQkJRpJ59tln/RanTH5+vmndurWRZPr06VPuM15YWGimTJliJBlJ5n//9399Hv+LL74wH374oSkpKSkX949//KMJCgoywcHBZs2aNT6J5el5Y+LEiUaSSUhIMHv37nW/vmrVKhMWFlatn0V1z1ll59Y33njDq/1Qc9Q8ap6/a57NemcMNa+2a15t1jtjqHnwHPWOekcbz//1rixebdW8htTGM8ZezaPe1T3UPGoebTzaeL7UUOqdN7F/jpoHVI2Oax9auHChcTgcRpJp166d6dWrl/uE16VLF3P8+HG/xP3uu+/MJZdc4l6aNGliJJlGjRqVe33u3Lk+jfvOO++4i1t8fLzp27dvlUt6erpPY//jH/9wx46OjjZXX321ueaaa0xMTIz79djYWLN161afxr2Q2vinxtatW93vr2XLlqZXr16mT58+pkWLFu7Xb7rpJpObm+uX+J988okJDw83kkxUVJTp06eP+2fucDjM/Pnz/RK3zNq1a92xvvnmG7/GKvPJJ5+4/6aCgoJMQkKC6dGjh4mIiHD/zB977DG/xF64cKGRZCIiIkz37t1Nr1693HEjIiLMsmXLqn3s6p43CgsLzfXXX28kmeDgYNOzZ0/TsWNH989i8uTJfos9d+7ccusbNWpkJJmmTZuWe/27776r9s8FnqPmUfP8WfNs1ztjqHm1WfP8We+MoeahZqh31DvaeP6td8bUfs2rr208Y+zVPOpd/UDNo+bRxqON5yv1td7VJDY1D/AcHdc+9umnn5qhQ4eali1bmrCwMHPZZZeZZ555xq93xx08eNB9cr3Q4us7ucouaDxZfH232tGjR82f/vQnM2zYMNOxY0cTGRlpQkJCTOvWrc3AgQPNn/70J3P69GmfxryY2vinRm5urpk1a5a5/fbbTWJiovt9t2nTxgwdOtS8/fbb5e5k84d9+/aZcePGmbi4OBMSEmJatWplhg0bZtatW+fXuMYYM3bsWCP5/67bnztw4ICZOHGi6dq1qwkPDzchISEmJibGDB8+3Kxevdpvcbdu3WrGjx9vunTpYiIjI014eLi57LLLzKRJk0xWVlaNjl2T88a5c+fMnDlzzJVXXmnCw8NNVFSU6d+/v1mxYoVfYz/77LMe7VfZHcvwD2oeNc9fNS8Q6p0x1Lzaqnn+rHfGUPNQc9Q76h1tPP+yUfPqYxvPGHs1j3pXf1DzqHm08fyHNl7dr3c1iU3NAzznMMYYAQAAAAAAAAAAAABgSZDtBAAAAAAAAAAAAAAADRsd1wAAAAAAAAAAAAAAq+i4BgAAAAAAAAAAAABYRcc1AAAAAAAAAAAAAMAqOq4BAAAAAAAAAAAAAFbRcQ0AAAAAAAAAAAAAsIqOawAAAAAAAAAAAACAVXRcAwAAAAAAAAAAAACsouMaAAAAAAAAAAAAAGAVHdcAAAAAAAAAAAAAAKvouAZQK7KysuRwOORwOJSVlWU7HQAA/IaaBwBoKKh5AICGgHoHALWHjmsgAMycOdN98XMxP71QWrJkif+TAwDAh6h5AICGgpoHAGgIqHcAAF+i4xoAAAAAAAAAAAAAYBUd1wAAAAAAAAAAAAAAq+i4BgAAAAAAAAAAAABYRcc1UA/t2rVLEyZMUOfOnRUREaGmTZuqR48e+sMf/qATJ05Uuo/T6dSqVas0YcIE9e7dWzExMQoNDVXr1q01ZMgQLV26VMaYC8bNzs5WcnKy2rVrp7CwMMXFxWn8+PHav3+/P94mAADUPABAg0HNAwA0BNQ7AGjgDADrnn32WSPJePInefDgQfe2b7zxRoX1c+fONUFBQe5tIiIiTGhoqPv7mJgYk56eXmG/tWvXureRZJo1a2YiIyPLvTZy5EhTWlpaaV5btmwxLVq0cG8bHh5umjZt6j7W3//+d/e6gwcPevsjAgDUE9Q8AEBDQc0DADQE1DsAgC8x4hqoR15//XVNmzZNEREReuGFF5STk6OCggIVFhZq8+bNGjRokHJycjRs2DDl5+eX2zciIkLJycn65JNPlJeXp7y8PJ0+fVonT57Uyy+/rGbNmmn58uX685//XCHumTNndNdddyk3N1ft27fXxx9/rIKCAp05c0Zffvml2rVrp+Tk5Nr6MQAAGgBqHgCgoaDmAQAaAuodAEASI66BQPDTOxPbtGlzwaVVq1aV3pl4+vRp07x5cyPJfPjhh5XGcTqd5uqrrzaSzMKFC73Kcfny5UaS6dixY4V1c+fONZJMaGio+c9//lNhfU5OTrm7FrkzEQAaLmoeAKChoOYBABoC6h0AwJcYcQ0EmKNHj15wqepZLv/3//5fnTp1Sr169dKQIUMq3aZRo0a69957JUkfffSRV3ndfvvtkqQDBw7oyJEj5da9++67kqSRI0eqW7duFfaNjo7Wo48+6lU8AED9R80DADQU1DwAQENAvQMA1FQj2wkAKM8Yc8H1WVlZSkhIqPD6hg0bJEl79uxRdHR0lfufPXtWkvTtt99WWHfmzBktXrxYq1ev1p49e3Tq1Ck5nc4K2x0+fNgdo7i4WDt37pQkDRo0qMq4gwYN0uzZsy/wzgAADQ01DwDQUFDzAAANAfUOAFBTdFwD9cT3338vSSoqKlJRUdFFty8sLCz3/b59+3TzzTfr8OHD7tciIiLUvHlzBQWdn5zh6NGjkqSCggL3Nj/88INKSkokSbGxsVXGi4uL8/CdAABwYdQ8AEBDQc0DADQE1DsAQBmmCgfqidLSUknSqFGjZIy56JKVlVVu//Hjx+vw4cOKj4/X8uXLdfLkSRUUFOjYsWM6cuSIsrOz3dte7O5JAAD8iZoHAGgoqHkAgIaAegcAKMOIa6CeKJviprKpci7m0KFD+vLLLyVJS5cu1XXXXVdhm58//6VMy5YtFRwcrNLS0nIXgT93oXUAAHiDmgcAaCioeQCAhoB6BwAow4hroJ7o27evJGnLli3Kycnxat9Dhw65v+7Vq1el23z66aeVvh4aGqoePXpIktauXVtljDVr1niVEwAAVaHmAQAaCmoeAKAhoN4BAMrQcQ3UEyNHjlTz5s3ldDr129/+9oLT3rhcLp06dcr9fVRUlPvr7du3V9j+zJkzmjVrVpXHGzVqlCRp+fLlysjIqLD+2LFjWrx4sSdvAwCAi6LmAQAaCmoeAKAhoN4BAMrQcQ3UE82bN9dLL70kSXr33Xd1++23a9OmTXK5XJLOX9Tt2bNHCxYsUPfu3bV69Wr3vt26dVP79u0lSQ899JC2bNniXvfVV19pwIABys3NrTL2r3/9a8XFxencuXO67bbb9Nlnn7kvMDdt2qTBgwe78wAAoKaoeQCAhoKaBwBoCKh3AIAyPOMaqEfGjh2rs2fPatKkSfrggw/0wQcfKCwsTE2bNtXp06fldDrd2zocDvfXQUFB+stf/qK77rpLu3fvVu/evRURESFJKiwsVJMmTfTee+9p8ODBlcZt1qyZ/vGPf+iWW25RVlaWBg8erIiICAUFBSk/P1+RkZF67bXX3HcwAgBQU9Q8AEBDQc0DADQE1DsAgMSIa6DeefTRR5WRkaHf/e536tmzp8LCwnTq1Ck1bdpUvXv31uOPP65PPvlE9957b7n97rjjDq1fv1633367mjdvrpKSErVq1Urjx4/Xli1bdPPNN18wbu/evbVjxw796le/UmxsrEpKShQVFaWxY8cqPT1d11xzjT/fNgCgAaLmAQAaCmoeAKAhoN4BABzmQg+MAAAAAAAAAAAAAADAzxhxDQAAAAAAAAAAAACwio5rAAAAAAAAAAAAAIBVdFwDAAAAAAAAAAAAAKyi4xoAAAAAAAAAAAAAYBUd1wAAAAAAAAAAAAAAq+i4BgAAAAAAAAAAAABYRcc1AAAAAAAAAAAAAMAqOq4BAAAAAAAAAAAAAFbRcQ0AAAAAAAAAAAAAsIqOawAAAAAAAAAAAACAVXRcAwAAAAAAAAAAAACsouMaAAAAAAAAAAAAAGAVHdcAAAAAAAAAAAAAAKvouAYAAAAAAAAAAAAAWPX/Abqv/fyq+2jHAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA8YAAAI2CAYAAACSdJFsAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAjt5JREFUeJzt3QeYFFX28OEzmQGGnHMGRcmgJAmiwKqoCAomkoJrAnNEXMGEWdxVMKF8igLiiqgoCEhQRLKAREkiIBkmwYT+nnPd6v8M0z1Mp+r0e5+n6KYr3NthbtWpm2IcDodDAAAAAACIUrHBzgAAAAAAAMFEYAwAAAAAiGoExgAAAACAqEZgDAAAAACIagTGAAAAAICoRmAMAAAAAIhqBMYAAAAAgKhGYAwAAAAAiGoExgAAAACAqEZgDKBQMTExZlm4cGGwswIACDLOCQAiFYFxhMvJyZFp06bJzTffLI0aNZIyZcpIYmKiVKpUSTp16iSPPPKIrF+/PtjZDAmDBw92nvDPttSpUyfY2Y16Xbt2LfL3deai37VFv0tP93Hlm2++keHDh0vTpk2lXLlykpCQIOXLl5d27drJqFGj5Oeff3a536ZNm+S9996TO+64Q9q3by/Fixd3pgn4G+eEouOcEF4CeU6wyvP69evL5ZdfLk888YSsWbPGo/zxtweEvvhgZwCBs2zZMhk0aJBs2bLF+ZoW7ikpKXL48GFZunSpWZ577jnp27evTJ061RTS0S42NlYqVqxY6DZnW4/A0+CzcuXKBV4/ffq0HD161DwvW7asy9906dKlC7xWrFgxl68Xto/Sv68bbrhBVqxY4XwtLi7ObH/8+HH55ZdfzPLaa69Jt27dzIVRhQoVnNvedttt8sMPPxThHQO+4ZzgHc4J4SGQ5wSHwyEnTpyQI0eOyO+//y5fffWVjB07Vi688EKZOHGiNGvWrNC88bcHhAkHItKsWbMcSUlJDv2Ky5cv73j22WcdW7Zsca7Pzs52/PLLL46HH37YUapUKbPd0aNHHdFs0KBB5nOoXbt2sLMSUvQz0WXBggWOcKD59CTP+n3rtvr9e2r58uWOMmXKmP1LlCjheOSRRxxr16515ObmmvU5OTmODRs2OJ5++mlH5cqVzXarV6/Od4yLL77Yce655zpuvPFGx8svv+y49957nfkH/IVzguc4J7gWzeeE48ePO+bPn++49dZbnX9PiYmJji+//NLt8fjbA8IHNcYRaOvWrXLjjTfKqVOn5Nxzz5Vvv/1WatSokW8brdFq06aNWR544AEZOnRo0PILhCO9y6939o8dOybVqlWT7777zjSjPrOmSf8GddHm1Pfcc0+BJtL696l/j5bJkyfb9h4QHTgnAP5RqlQp0/JHlzvvvNM0q96zZ48MGDBAVq9eLQ0bNsy3PX97QHihj3EEevzxx02TH20G9PnnnxcohF01P/rvf/+brymRXpzn7Te1YMECueqqq6Rq1aqmED+zv+X27dvln//8pzkpJCcnm5NHq1at5KmnnjJ5ceePP/4wwYIGFCVKlJCkpCQTZLRu3dq8rk1Qz6RNorR/jx5f09HmRlWqVDFNmbRZ6vfffy/BHpDk5MmT5nto0qSJ+Ty0b5KeQN31M81LAyw9ydauXdvsq9+Pvre77rpLfvrpJ5f77N+/35xQrc9RF33+4IMPyoEDBwpNTz9P3Vf7TulvRr/j/v37y8qVK4v0vnNzc+Wjjz6Sf/zjH6YZm34f2qzw0ksvNc3B/q5gKMjqx6W/tdTUVPOdnn/++aZpmb6+c+dOCWXjx483v1+l7/PMoPhM2ndYm9zpe8wrb1AMBALnBM4JnBP8T7+DGTNmmLylpaWZ33Yg/vYA2CjYVdbwr/379ztiY2NNU5xhw4Z5fZz333/f2YTs1VdfdcTExJj/ly5d2pGQkJCvidGnn37qbCakS0pKSr7/16xZ07Fx48YCaaxZs8ZRtmxZ53ZxcXHm/1Zarpoy7dmzx1GrVi3nen2vuo/ua73WpUuXoDSbs9L/+OOPHQ0aNDDPixUr5ihevLhznTa5+vbbb13un5aW5ujfv79zW+uz1M/c+n/z5s0L7Ldw4UJnc16rSa8u1v/181m8eLHLNHfs2OFsNmblz2rKpc+/+OKLQpugHT582HHRRRfly3Pe/OrSp08fx6lTpwrsa6X74osvOho1auRM03ovmrdQbUqdlZXlfJ/aFNqfrL89imf4A+cEzgmcEwJ7TrjsssvM9vobT09P9/vfHgD7cOUVYaZOneo8AcyePdvniyA9iesFxuDBgx27d+929ofZtm2beb5y5UpzUaTbduzY0bFu3Tpn30rtV1O1alWzrn79+o6TJ0/mS0MDCl3XqlUrx08//eTsl6knTO1/oyfH8ePH59tHTy66T506dRzz5s0zebHytHPnTsebb77peOihh4J6EaQXHdpnVPsh6eeg70v7ojZu3Nh5fH39TNdee63zwk7fg17wWQ4ePOj46KOPHLfddlu+ffQ7sS4aNM0lS5Y41y1atMiZZrly5Rx//PFHvn31M2vTpo0zz9OmTTMBn9J+sZ07d853cXXmBYXurxecuq5Fixamj5VeyKnU1FTHBx984KhUqZJZP2rUKLcXHyVLlnRUqVLF8fnnnztOnz5t1ul7t44VioGx/l6tNN544w2HPxEYw584J3BOsHBOCMw54T//+Y/z+Pod+/tvD4B9uPKKMI8//rizIN67d69fLs779u3rdrtevXqZbfRuuKuT1qpVqxzx8fFmmxdeeCHfuuTkZPP6jz/+WOR8nXPOOc478P5mXQTpRYgOlFTYcuZ7UdbnVbFiRceBAwcKrNcLRGubvBcrSi/orHV6ki0qvSiyLmL27dtXYL1eTFh3+++4445867RWx0pT0z+Tfp968eruguLDDz80rzdp0sRx7Ngxl/lbsWKFqe3Ru/5nfibWxYdeZOvvxF+8vQjSC3533/eZtcLvvPOOM42lS5c6/InAGP7EOcF7nBOi+5xQ1MBYzwHW8d9++22//+0BsA99jCNwQKC8fVX8QefWc0UHHdKBJJT2R9I+lGdq2bKlGaBIad+ivHQOP7Vv374i58WbfTyl/aO0D1Zhi/Z/ckfnstV5Cc+kfaXq1q1rnq9bty7fOp3HVp133nmmX15R6HWXTv2jtB+d9qk7k/Zn0nXqk08+ybfO+n/Hjh3l4osvLrCvfp/aH82dd9991zxqft31h9J+gdqvTafL0D6JrvTq1cv8ToItMzPT7fd96NChgP+dAYHAOcF3nBOi85xQVHn/rnQ6JwvnCSD8EBijUDrQhw5o4sqqVaucg2j06NHD7TEuueQS54k/KyvL+boOPKJ0br/77rvPzOWanp5eaH6sfR5++GFzsTFnzpxCB3Lxhg5w8r/WFG6XJ5980u3+F1xwgdt1OojMmSdP9eOPP+Z7f0WxY8cO53GK8vnrSVr3sVjz7nbv3t3tvu7W5eTkmHkZlX4WegHmbtm8ebPZbteuXS6PpRdhoUB/h+6+7zVr1gQ7e0BI4JzAOSFazgkAog+BcYTRkS4tZ55ovT2eTjnjyl9//eV8Xr16dbfHsEZhzM7OzpcnHdVXpzzQO+0vv/yydO3a1YwoqlMWjBkzRvbu3VvgWFoLce2115qLqbffflt69+5tagz0zruus064ebk7OY8cOVICQUfQdCc+/u8Z0vJeDFojiFoXYEXl6ed/5j7W86Lum5d+jzr9hDWCaWE1KdZ7dXeB66omJdr+zoBA4ZzAOeFMnBP8K+9vOO/fG+cJIPwQGEeYvFPG6Jx6vgrkVDJ68TJ//nxZvHixaZ6ld4n1IkGnhNBpD3SajzOb2iUkJMinn35qavB0Kge9e63Nu9avXy8vvviief8vvfRSvn3cnZyPHz8uoeLMuW1DndYOWL755puz1qYUVqMSjtMV+fvvDAgUzgmcE+wQzeeEtWvXOp/rFFcWzhNA+CEwjjB6t926m69z5gVS3ru61nyurljr9ALHVT+bTp06yfPPPy9LliwxfdS++OILc7c/IyPDTHTvas7F5s2by7/+9S8zP6XuM2/ePLnooovMyVlrCfKeqNydlHWuxFBh9QVz17TMH5//mftYz13VwljcrdM74VZNhyd5jhRag2X1oQv03xngC84JnBPOxDnBv7766ivzqHNuX3jhhUH52wPgHwTGEaZy5cpyzTXXmOcff/yxbNmypcj7Wn3Dikr7mVmFvl6MuKMXKNaFi97dL0yxYsWkT58+MnPmTOeASHpxVBg9GetAIXpy0hOTvg8rzXDRoUMH8/jll18WeR8dtMW6qCzK568XLtZAL1Zwp9wNgKK09sYV/R7btWvncZ4jhf7mtD+j9dkvWrTIo4F8ALtwTuCccCbOCf6zfPly+frrr83zAQMGmN9rMP72APgHgXEEGjdunJQsWdLcXdfRPwu7+2v1B9LC29NmZNrsrWfPnub5Cy+84LK/kN6l/+yzz8zzgQMHOl/XvmWFBQg6wIslb382qw+TK3oBZDXBctcHLlQNGzbMPG7YsEHefPPNIje1u+6668zziRMnOvuk5fXnn3+adWd+/sraVy8yFy5cWGBf/f3o9+qOFRjqRYF1YeBOJPav0qae1sA5+tnqd1cY/Txvv/12+fXXX23KIfA3zgmcEyycE/xHy/J+/fqZILZEiRIyevTooP3tAfATG6eGgo0+//xzM0+gfsUVKlRwPPfcc46tW7c612dnZ5t5AkePHu0oU6aM2e7o0aMF5qzU+fwKs3LlSkdCQoLZtlOnTmZeRpWTk+P46quvHNWqVTPrdO7DkydPOvfbsWOHo169eo6xY8eafGRlZTnXrV271tG1a1ezX4kSJRyHDx92rtP5Ih9++GHHTz/95MjMzHS+ru+tf//+zjknN2zY4PWclWd7z+4UZZ7ELl26mG3GjBlTYN2AAQOc+df3qPNNWg4ePGjmRxw6dGi+fXQb6/tr2rRpvjl1dV5Ma47PcuXKOf744498++pn3qpVK+f6GTNmmN+F2rhxo8mrdWxX70u37dGjh1mnvzX9LvPO1ZiamuqYP3++4/bbb3eULl3a7VyR+lvzp0DPWZmX/g6tOUH1t/rII484fv31V0dubq5Zr4+//fab4/nnn3dUrVrVbLd69ep8x9DfsX6/1jJhwgRn/vO+rov+XQHe4JzAOYFzgu/nhBMnTphjDB8+3FGsWDHne/36668D9rcHwD4ExhFMT4INGjRwnhCsAlxPeHqitV6LiYlxDBw40HH69GmPL4LUJ5984iz0ddFAwTph6FKzZk1zUs1LL4Ly5isuLs7kK+9x9Pn06dPz7Zd3H30PZcuWzZeWvpdXXnnFq8/LugjS4+rF1tmW3bt3+/UiKC0tzdG3b99871E/S72AsP7fvHnzAvstXLgw3zZ64aiL9X890S5atMhlfrZv326+H2vbpKQk57H08//iiy8KfV/Hjx93XH755QXyrGnqd2G9Fh8fH5YXQUWhv23rYjLv+9Xfsz7mfb1nz56OQ4cO5dvf+lsryqJ/N4C3OCd4hnNCdJ8T9HdkfbeVKlVyFC9evECZ3KFDB3MzNJB/ewDs8/dICYhIOqLnpk2bZPr06TJ79mz5+eefzXQMJ0+eNP2QmjRpIl26dJGbbrpJGjdu7HU62vyqdevWZgRQ7bukA3tof6MWLVrI1VdfLaNGjTJTbuSl00HMmjXL9GX66aefzD6aN+0b1qBBAzNohU6doaOQ5vXdd9+ZfbSp1+7du52DsOg+nTt3ljvuuMPkxRfanM/V4C6FjcLpDzqSqjYx1H5x7777rvm+Dh06ZKb6aNasmZm65IYbbiiwn36Hv/32mxl5VZuv7dy50zSpO+ecc+Syyy4z84FaA7mcqV69emY016efftoMDqLNvLSPlM5zqfOCnu2z1O9V+5PpKKQffPCB+S71s9NrQv2Ozz33XPNd6nQqkUo/Zx01V783/QyXLl1qmjDqXKr6+egopTqY0I033uh2/lfADpwTvMM5ITrPCdqfXRelv0P93PVz089Ry3Jt8qz95EPpbw+Ab2I0OvbxGAAAAACAEKA36OfOnSu//PKLWfSGl97o0RswrsYQ8MSCBQvMjTe9waPzzut86/379zc3z7S/vTu67XPPPSczZswwo9dr//sLLrhA7r//fnOjLxQQGAMAAABAhHj11VflnnvuKfC6r4HxhAkTTOsdDR9r1KghFStWlI0bN5qBELU1hbbecTUNn7Z20dZzmzdvNgMjauuRgwcPmtZB2qLljTfeMAOUBlt4DdMIAAAAACi0W0OPHj3kkUceMdPduRo13VMrV640XWGUjm6v3VdWrVolv//+u+lmoV04br31Vrcj7WtQrNvp9rqf7q/H0SD77rvvNrXawUaNMQAAAABEKK2Rveuuu3yqMb7qqqvkiy++kJtvvtmMIZDX1q1bTV95HZNBp+XTcRAsq1evds5zr8GxjgGRlx5vypQpZkozazq/YKHGGAAAAADgtn/wnDlz8s1ZnpcOjNi9e3fzXAeZy0v7FCtdf2ZQrEaMGGEedbDAtLQ0CSYCYwAAAACAS1rrq/2ItX9wu3btXG6jMwGoZcuW5Xvd+v9FF13kcj89nh5XBwcLdnNqAmMAAAAAgEtbtmwxj7Vq1TLT77mi01MqbS7tal9r/Zn0eDVr1nS5r92YxxgAAAAAgkQHoZo0aZJH+2iTZqsZcqAdOXLEPLoacdpirTt69Kjf9rUbgXGApKZn2JJOvOTakk56TkzA0yiZmy52cMTa87OPybLn/cSlHQ54GjmlqoodchOSbUnnxz/t+W5+2hX4An5Ym+pihyql3c9NCPvL/xybhs08bVNCJRPtacCWkRX4c2Zygj3vJcamsVNjck7bko4jLjHgacTkZosdPvrNnov7vk0q2JJOmZLFbUkn3CW2HOr1vo9eWcuM1OyJffv2iV0yMzPNY2Ki+79TbQ6tMjIy/Lav3QiMAQAAACBIqlatakZu9nQfuxQrVsw8nj7t/kaZ9kFWycnJBfZNT0/3al+7ERgDAAAAgA9iYuO83lebRNvVLNobZcuWzdcs2hVrnbVt3n01MPZmX7sx+BYAAAAAwKVGjRqZx927d0tWVpbLbbZv355v2zP33bZtm8v99Hh6XFf72o3AGAAAAAB8rDH2dgl1LVu2NH2Etcnz8uXLXW6zePFi89i+fft8r1944YX51p9Jj6fNrLXJdYsWLSSYCIwBAAAAAC6lpKRIz549zXNXo2dv3bpV5s+fb57369cv3zrr/wsWLHBZa6wjcqvevXtLyZIlJZgIjAEAAAAgymuMO3XqJHXq1JFXX321wLrRo0dLTEyMTJkyxQTHjv+NjK+jYw8cOFByc3PlqquukubNm+fbTwcVu/zyyyUnJ0cGDBjgHE1b99fj6PFiY2Pl8ccfl2Bj8C0AAAAA8EEoBbh79uwxzZ/PnDJp6dKlUqHC/03z9eCDD5rF8scff8iuXbvk2LFjBY7Ztm1befnll+Xee+81A4WNGzfOHGvjxo2miXXjxo3l7bffdpmf9957Tzp27CgrV66UunXryrnnniuHDh0y+dRgWwNxT0flDgRqjAEAAAAgQmjt7OHDh51LWlqaeT07Ozvf6zpatCdGjRolc+fONc2e9ZgaFNeuXVseffRRWbFiRb6gO6+KFSuaoFi30+11P91fj/P999/LXXfdJaEgqmqMtW37Sy+9JD///LOkpqaaL6Z///7y8MMPS4kSJYKdPQBAAFD2AwACLSYudGqMtTm01dTZEzt37jzrNhdffLFZvOmn/PTTT5slVEVNjfGECRPMl/jVV1+ZUc/OOecc8+VrMwBtGlDY3FoAgPBE2Q8AAIoiKgJjrbrXqn9r5DOdK2vVqlXy+++/S+vWreW3336TW2+9NdjZBAD4EWU/AMAusbFxXi8IDVERGI8dO9aMlHbTTTfJ8OHDTSdvVa1aNZk6daoZCW3mzJmybt26YGcVAOAnlP0AAKCoIj4w1v5kc+bMMc/1wuhMDRs2lO7du5vn06dPtz1/AAD/o+wHANgpEqZrinYRHxivXr3aDCGelJQk7dq1c7lN586dzeOyZctszh0AIBAo+wEAdiIwDn8RHxhv2bLFPNaqVUsSEhJcblO/fn3zuHnzZlvzBgAIDMp+AADgiYifrskacbRcuXJut7HWHT161LZ8AQACh7IfAGCnmNiIr2+MeBH/DWZmZprHxMREt9toUzuVkZFhW74AAIFD2Q8AADwR8TXGOm+lOn36tNtttB+aSk5OdruNTvUxadKkIqc7aPBgGTrsFo/yCgAIrbJfUf4DAM6GvsLhL+ID47Jly+ZrVueKtc7a1pV9+/aZ+S+LqmevXh7lEwAQemW/ovwHACDyRXxg3KhRI/O4e/duycrKcjkIy/bt2/Nt60rVqlWlVatWRU63SpUqXuUXABA6Zb+i/AcAnA01xuEv4gPjli1bmj5m2mRu+fLl0rFjxwLbLF682Dy2b9/e7XFGjBhhlqJKTafPGgCEe9mvKP8BAGdDYBz+In7wrZSUFOnZs6d57qqP2NatW2X+/Pnmeb9+/WzPHwDA/yj7AQCAJyI+MFajR4+WmJgYmTJlirlAcjgczn5jAwcOlNzcXLnqqqukefPmwc4qAMBPKPsBAHaJiYvzekFoiIrAuG3btvLyyy+b59ocrnbt2qa/WN26dWXlypXSuHFjefvtt4OdTQCAH1H2AwCAooqKwFiNGjVK5s6dK71795a0tDTZuHGjuUh69NFHZcWKFVKhQoVgZxEA4GeU/QAAu/oYe7sgNET84Ft5XXzxxWYBAEQPyn4AAHA2URUYAwAAAIC/UfMb/giMAQAAAMAHsQTGYS9q+hgDAAAAAOAKNcYAAAAA4AOaUoc/aowBAAAAAFGNGmMAAAAA8AE1xuGPGmMAAAAAQFSjxhgAAAAAfECNcfgjMAYAAAAAHxAYhz+aUgMAAAAAoho1xgAAAADgA2qMwx+BcYAUO7bblnQyy9SyJZ3iNvxSMnJL2PPdSLYt6Uhuri3JZFVoEPA0TmY5xA6lcrNsSadVFXt+a53KnAp4GrmJNPwJNXb8tSTF2FO+FIu1qbzMsed3HBcb+JNZjMOe8tKmYlkSc3PsSSg28L/pnNgEscOAppVsSScuJ/DnGCCaEBgDAAAAgA9i4qgxDndUNQAAAAAAoho1xgAAAADgA/oYhz8CYwAAAADwAYFx+KMpNQAAAAAgqlFjDAAAAAA+oMY4/FFjDAAAAACIalERGO/fv1+mTJkid999t7Rv316Sk5MlJiZGunbtGuysAQAChLIfAGCX2NgYrxeEhqhoSv3JJ5/IPffcE+xsAABsRNkPAACKKipqjEuVKiU9evSQRx55RGbOnCmjR48OdpYAAAFG2Q8AsEtMbIzXS6AsWLBALr/8cqlYsaJpNdWkSRNzLkxLS/PoOAsXLjQtroqy/Otf/yqw/9n2qVKlioSCqKgxHjp0qFkse/fuDWp+AACBR9kPALCLBnihZMKECTJy5EhxOBxSo0YNqVmzpmzcuFHGjRsnn332mSxZskTKlStXpGOVLl1aOnbs6Hb98ePHZf369eZ5hw4d3G7Xpk0bSUpKKvB6+fLlJRRERWAMAAAAANFg5cqVMmrUKPN84sSJcuutt5rA/c8//5Q+ffqY9fqaBshF0bJlSxNIu6O1xBoYa/B98cUXu91u+vTpUqdOHQlVUdGUGgAAAACiYfCtsWPHSm5urtx0000yfPhwZ212tWrVZOrUqRIbG2u6GK1bt87ntBwOh3z44Yfm+c0332yOHa7CN+cAAAAAAKfU1FSZM2eOea5B8ZkaNmwo3bt3d9bg+mrRokXy+++/m+eDBw+WcEZTagAAAADwQSAH0fLE6tWr5dSpU6Yvb7t27Vxu07lzZ5k3b54sW7bM5/QmT55sHjt16iQNGjQ4a022NufOzs6W6tWrmwD9uuuuc9nvOBgIjAEAAAAgAmzZssU81qpVSxISElxuU79+ffO4efNmn9JKS0uTGTNmFLm2+L333sv3/w8++EDGjBlj+jq3atVKgo3AuIi04/qkSZOKvP2wa/vI8JsGBDRPAIDQK/9vHjxYhg67JaB5AgBETo2xp+cZq5n0iBEjCrx+5MgR81jYiNPWuqNHj4ovpk+fbppuFy9eXK699lq321155ZWmv3Pz5s3NCNm6j9ZYP/bYY6YZ9qWXXmpqunXwrmAiMC6iffv2yapVq4q8/WVd2wc0PwCA0Cz/L+3VK6D5AQCEnlgfpmvy9Dxj7eNKZmameUxMTHS7r9V0OSMjQ/zRjPqaa66RlJQUt9v997//zff/YsWKyYABA6RHjx7SunVr2b17txnZ+p133pFgIjAuoqpVq3pUxV+lUsWA5gcAEKLlf5UqAc0PACC6zzPWPq5o0KlOnz7tdl/tg6ySk5PFWzt27DADb/ky6FaFChXkkUcekX/+85/y+eefy9tvvx3U+aAJjItImyq4aq7gTvafvrXZBwCEZ/l/Mt23O/AAgOhqSu3peaYwZcuWzdek2hVrnbWtNz744AMzVVPt2rWlW7duXh+nQ4cOzjzpUr58eQkWpmsCAAAAgAjQqFEj86jNk7Oyslxus3379nzb+jJ38aBBg3yq5c3b5FtHqw4mAmMAAAAA8LHG2NvFn1q2bGmCTW0uvXz5cpfbLF682Dy2b+/dmEg//PCDaUqtAbEGxr5Yv369swl4MGuLoyYw3rNnj2nDbi0PP/yweX3p0qX5Xh8/fnywswoA8BPKfgBAtNFBsHr27GmeuxrpeuvWrTJ//nzzvF+/fj4NutW5c2epV6+e13nVGuKXXnrJPNc5jePjg9vLNyoC45ycHDl8+LBz0Tm3rC8j7+vp6enBzioAwE8o+wEAdomNjfF68bfRo0eb2twpU6aY4FibPlsjWQ8cOFByc3PlqquuMtMn5VWnTh2zWHMTu6JTLVnrhwwZcta86E1p7Y988uTJAjevNTBftmyZCYifeOIJCbaoGHxLv2DrBwEAiA6U/QAAu8SEUHVj27Zt5eWXX5Z7773XDOo1btw400Jq48aNpol148aNzQjQZ9q1a5cz+HVHg2K90VyiRIki1Thv2rRJnn/+eRk2bJipXdY5lI8fPy6bN28252htQq3TNF1wwQUSbFERGAMAAABAtBg1apScf/75pqnyzz//LH/99ZcZQVqDWZ0iqWTJkj41o+7Xr1+RjqFTMek0hitWrJC9e/fKzp07zTzKTZs2NfMY33nnnVK/fn0JBTEObqcHhF3TNWWWqWVLOvE2TCl2KldsUUzsGfEuJuO4Lenklgj8QAUns+wpJkrFuh490d8yJMGWdIpnup8qIZy+f1XMh7kOo40d0zUliD0FZkxudkRVtWTaUB+QZFOtkU3FsiRm2zP9mCM+KeBp5MbESSSJy/l7LtpASypZ2pZ0wl3Lx77xet/VT/f2a17gnRCq9AcAAAAAwH40pQYAAAAAHwRiEC3YixpjAAAAAEBUo8YYAAAAAHwQQ41x2CMwBgAAAAAfEBiHP5pSAwAAAACiGjXGAAAAAOCD2BhqjMMdNcYAAAAAgKhGjTEAAAAA+IA+xuGPwDhAcouXtSWdOJv+CHMdgU8j+dSRwCciIqeL2fPdJMba0yDDYUPTnTKnDoodcpNL25JOvE1/N8cSAv9bK27D3yZCUIw95UtOXJIt6djVAjHOhpOZHWWyirMlFRFHQrIt6cTknA58InH2fGpxOaci6u8TiBYExgAAAADgA2qMwx+BMQAAAAD4IJbAOOwx+BYAAAAAIKpRYwwAAAAAPohhuqawR40xAAAAACCqUWMMAAAAAKE/WQACiK8QAAAAABDVIj4wdjgc8uOPP8rDDz8snTp1kvLly0tCQoJUrFhRLr30Uvnoo4/MNgCAyEHZDwCwe1RqbxeEhohvSj1//nzp0aOH8//16tWTunXryo4dO2Tu3LlmmTp1qnz22WeSlMRE6QAQCSj7AQB2Yh7j8BcVNcZ6MfTaa6/JgQMHZPv27bJixQo5fPiwfPjhh+aC6KuvvpInnngi2FkFAPgJZT8AAPBEjCPC25KdOHFCkpOTTRM6V5555hl57LHHpFy5cnLw4EGJjfXPvYLTx/4SO+QUK2VLOnb8SuIzjgQ+Ef1uipW1JZ3EzKO2pJNTvFzA04g/ac/vOTe5tC3pZMUm2pJOelZuwNMonmDP/c2SxZMlnASr7Fcn0zMk0BJsqpjIFXsSsmuWk5zcwJ/M4myqNbLr6i1W7EkoJud0wNPIibOndUhczilb0rHr/RRPLmZLOuGu26uLvN53waiL/JoXeCfia4xLlSrl9sJI9e7d2zweOXLEXBwBAMIfZT8AAPBExPcxPpuMjP+7s6+1CwCAyEfZDwDwJwbRCn8RX2N8Njr4imrevLmpYQAARD7KfgAAkFdU1xivXLlS3nrrLfNcp/QAAEQ+yn4AgL8xKnX4i9rAWEcp7du3r2RnZ8vVV18tAwYMCHaWAAABRtkPAAjngfcQOFHZlPr48eNm4JXdu3dL69atZfLkycHOEgAgwCj7AQCAO1FXY5yamiq9evWS1atXS9OmTeXbb78tUv+yiRMnyqRJk4qcztAbB8qtQ272MbcAgGCW/d6U/zcPHixDh93iQ24BAOGGGuPwF1WBcXp6ulx22WWybNkyadiwocybN0/Kly9fpH337dsnq1atKnJa/7ikuw85BQCEQtnvTfl/aa9eXuYUAAAES9QExpmZmdKnTx9ZtGiR1K5dW77//nupUqVKkfevWrWqtGrVqsjbV6lc2cucAgBCpez3qvz38PgAgPBHjXH4i3E4HA6JcFlZWXLVVVfJ119/LdWrVzcXSPXq1QtomqeP/SV2yClmzzQjdvxK4jOOBD4R/W6KlbUlncTMo7akk1O8XMDTiD9pz+85N7m0LelkxSbakk56Vm7A0yieYM9QESWLh99cv8Eo+9XJ9P+bIzlQEmy6/soVexKKsen95OQ6Iubi2K6rt1ixJ6GYnNMBTyMnLknsEJdzypZ07Ho/xZOL2ZJOuLti0k9e7/vl8PZ+zQu8E/E1xjk5OXL99debCyO9iz9//nxbLowAAMFD2Q8AsBM1xuEv4gPjadOmyYwZM8zzYsWKydChQ91uO2HCBGnZsqWNuQMABAJlPwDATgTG4S/iA+NTp/6vOcvOnTvNUthUHgCA8EfZDwAAPBEVfYyDgT7GnqOPsXfoY+w5+hhHRx/jYKGPsefoY+w5+hh7jj7G3qGPcdFcN3m51/t+OridX/MC79hzRQUAAAAAQIiK+KbUAAAAABBI9DEOf9QYAwAAAACiGjXGAAAAAOADaozDHzXGAAAAABBhFixYIJdffrlUrFhRkpOTpUmTJjJ69GhJS0vz+FiDBw+WmJiYQpc5c+a43T81NVUef/xxkwfNi+ZJ87Zw4UIJFdQYAwAAAIAP4mJDq75xwoQJMnLkSNEJiGrUqCE1a9aUjRs3yrhx4+Szzz6TJUuWSLlyns9sUrNmTalVq5bLdWXLup755dChQ9KpUyfZvHmzJCUlybnnnisHDx6Ur776Sr7++mt544035Pbbb5dgIzAGAAAAgAhpSr1y5UoZNWqUeT5x4kS59dZbTY3un3/+KX369DHr9TUNkD01dOhQefLJJz3aZ9iwYSYobt26tcyaNUuqVatmAva3335bRowYIXfffbd06NBBWrRoIcEUWrc2AAAAAABeGzt2rOTm5spNN90kw4cPN0Gx0oB06tSpEhsbKzNnzpR169YFPC+rV682wbCm+cknn5g8KM2T5k3zmJOTY/IcbATGAAAAAOBjjbG3iz9pX16rr68Gnmdq2LChdO/e3TyfPn26BNqMGTPMo6bZoEGDAuu1xlhpk2pv+j77E02pAQAAACACaA3tqVOnTF/edu3audymc+fOMm/ePFm2bJlXA3pt2LBBDh8+LGXKlDHNo2+88UapXbu2y+2tNC666CKX6zWPmtfMzExZs2aNdOzYUYKFGmMAAAAAiIAa4y1btphHHSArISHB5Tb169c3j9rv11OLFi0ytcAaIH/++edmpGmthR4/fnyh+bHSPJPmUQf08jY//kSNMQAAAAAEiQ6QNWnSJI/20WbSVjPkvI4cOWIeCxtx2lp39OjRIqfXsGFDeemll0yT6Dp16phaXu2jrK9pk+yHHnpISpYsWWB06UDlJxAIjAMkNa6kLemcysyxJZ3SSXEBT8MRX0zskHzw7ztXgfaL/H33K9Ba5/wV8DRySpQXO5S7+BFb0jn63VO2pJOSFPjfdEZWbsDTgGfibRiZNCvXIXaIC51BVv3i+KnA/71USLTnb9IRY0+jv5icLFvSyU2w4RrAnj8bOR2baEs6idmZtqQjYs/1WbiL+98AV97Yt2+frFq1yuN9XNEmySox0f3vUINalZGRUeT0HnvssQKvXXDBBTJt2jS544475D//+Y/Z5uabbzYBcqDzEwgExgAAAADgA1+aRFetWlVatWrl8T6uFCv2942M06dPu91X+yCr5ORk8YdnnnlG3nnnHTl27JjMnz/fTAmVNz/p6em25sdbBMYAAAAAECTaJNpVs2hvlC1bNl8TZlesdda2vipdurQ0bdrUDPy1devWAvnRwNjO/HiLwbcAAAAAIAIG32rUqJF53L17t2Rlue4KsX379nzb+kPi/5pKZ2dnu8zPtm3bXO6nedS8+js/3iAwBgAAAIAI0LJlSxOkavPk5cuXu9xm8eLF5rF9+/Z+STM7O1s2bdpknteoUSPfugsvvDBfmmfSPGoza21y3aJFCwkmAmMAAAAA8HHgRW8Xf0pJSZGePXua565GutamztoPWPXr189vo2ofP35c4uPjzajVeVlp6PROrmqNdV/Vu3fvfIN2BQOBMQAAAABEiNGjR0tMTIxMmTLFBMcOh8M5kvXAgQMlNzdXrrrqKmnevHm+/erUqWMWnac4r7lz55rpmM7sP6w1vRMmTJB7773X/P+2224rMCiYDip2+eWXS05OjgwYMMA5mrbmSfOmeYyNjTXzIQcbg28BAAAAgA/83VfYF23btpWXX37ZBKw6qNe4ceOkQoUKsnHjRtPEunHjxvL2228X2G/Xrl3mMTU1Nd/raWlpMn78eLNUrlzZ2Vx68+bNzm2vueYaM6exK++995507NhRVq5cKXXr1pVzzz1XDh06JHv27DEB/KuvvurxqNyBEBU1xjrptE6C3aZNG6lWrZqZK0ubGegXoHdUDh8+HOwsAgD8jLIfABBtg29ZRo0aZWp6tYmyBrYaFNeuXVseffRRWbFihQmUi6p169bmvNmjRw/TF1j7E//6669mNOq+ffvKrFmzTC2zu7mKK1asaIJiTVvzoHnRPGnevv/+e7nrrrskFMQ4rLr1CKYdudeuXWsuirR6X38If/31l3MEtEqVKsl3331XoDmBL46cTBc7nMqx5+srnRQX8DTisuz5zOKP7rElnV+kpi3ptC6e/65eIOSUKC92KHfxI7akc/S7p2xJJzfh77kEAykjK1fsUC6luISbYJT9KiMzUwItO9eesj8uxp4aEJuSkSMZOQFPo0KiPX+Tjhh76jZiclyPahuO5aVdV7w5NiWUmPP33K+BlpRSxpZ0wt3j3/zm9b7jep/j17zAO1FRY3zHHXfIDz/8ICdPnpQdO3bIL7/8YpoKrFu3Ts477zxzoXT99dcHO5sAAD+i7AcARGuNMTwXFYHxrbfeKhdddJEkJCTke/3888+Xd9991zzXKv3ffvP+Tg8AILRQ9gMAgKKK+sG3zjnn/5oupKfb05QXABBclP0AAH+i5jf8RUWNcWGWLFliHnXeLB2hDQAQ+Sj7AQCARHuNsc7dtX//fjPois7JpZ577rmgTyoNAAgcyn4AQKBQYxz+oiow1jmy7rnnnnyvtWvXTj744APp1atX0PIFAAgcyn4AQKARGIe/qGpKXb16dTO59AUXXGCm7tAJpdesWSMffvihHDt2LNjZAwAEAGU/AAA4m6iqMe7fv79ZLDplx5133ilTp041o5LqZNdxca7n6504caJMmjSpyGndcPNgGTx0mF/yDQAITtnvTfk/eMgQueWWW3zONwAgfFBjHP6iKjA+U7NmzeSrr76SevXqmdqDTz75RG644QaX2+7bt09WrVpV5GP3uJTmeQAQ7mW/N+V/r969/ZRTAABgl6gOjFVKSop06dJFPvvsM1m5cqXbiyNtfteqVasiH7dylSp+zCUAIBhlvzflfxXKfwCIOtQYh7+oD4xVdnZ2vkdXRowYYZaiOnKSeTEBINzLfm/K/4zMTJ/zBgAA7BX1gfGRI0dk4cKF5nnLli2DnR0AgA0o+wEA/kSNcfiL+FGpf/jhBxk3bpzs3LmzwDrtM9azZ085fvy4GbU07+AsAIDwRdkPALA7MPZ2QWiI+Brjo0ePyujRo82i/b70IkhHH92zZ48ZUEXpa7Nnz5aSJUsGO7sAAD+g7AcAAJ6I+MC4Q4cO8vLLL5smcxs2bJAtW7ZIZmamlC1bVrp16yZXXHGFmVZDB2IBAEQGyn4AgJ3iYqj5DXcxDofDEexMRCK7Bt86lWPP11c6yf0cn/4Sl2XPZxZ/dI8t6fwiNW1Jp3Xx1ICnkVOivNih3MWP2JLO0e+esiWd3IRiAU8jIytX7FAupbgt6UQCOwbfys51RNSFnl3Xk0cycgKeRoVEe/4mHTH29IaLycmKmPLSriveHJsSSsw5ZUs6SSllbEkn3L25rGDXnaL654V1/JoXeCfia4wBAAAAIJBiqTEOexE/+BYAAAAAAIWhxhgAAAAAfBBHhXHYIzAGAAAAAB/EMu1S2KMpNQAAAAAgqlFjDAAAAAA+YLqm8EeNMQAAAAAgqlFjDAAAAAA+YLqm8EeNMQAAAAAgqlFjDAAAAAA+YLqm8EdgDAAAAAA+YLqm8EdgHCAls0/Ykk7K6Qxb0pGsuIAnEXt4l9iiWIotydQpm2RLOosP5gQ8jQ4lxBbHvn7UlnRSJdGWdFKO7Q14GmVSD4otUi60J50I4HAEPo2kU8fteS+J9vzx58Yl2JJOhcTcgKeRE2vPezmUkW1LOiUT7CkvS2RlBjyNnPhiYge7wqMTDnu+m4q2pAIEH4ExAAAAAPiAwbfCH4NvAQAAAACiGjXGAAAAAOADBt8Kf9QYAwAAAACiGjXGAAAAAOAD+hiHPwJjAAAAAPBBHNM1hT2aUgMAAAAAolpUBsZff/21xMTEmKVOnTrBzg4AwCaU/wCAQDWl9nZBaIi6wDg1NVX++c9/BjsbAACbUf4DAAB3oi4wfvTRR2X37t1y5ZVXBjsrAAAbUf4DAAI5XZO3C0JDVAXGy5Ytk3//+9/mouiqq64KdnYAADah/AcARJsFCxbI5ZdfLhUrVpTk5GRp0qSJjB49WtLS0jw6Tk5OjsydO1dGjRol7dq1kzJlykhiYqJUrVrVnFe/+uort/vu3LnT2YXJ3XLhhRdKKIiaUamzsrLk1ltvleLFi8sbb7wh8+bNC3aWAAA2oPwHAARaqPUVnjBhgowcOVIcDofUqFFDatasKRs3bpRx48bJZ599JkuWLJFy5coV6ViTJ0+WW265xTyPjY2VBg0aSMmSJWXbtm0ya9YsswwfPlzeeustE+i607FjR5evN23aVEJB1ATGzz77rKxfv15eeeUV8+MAAEQHyn8AQDRN17Ry5UpTu6smTpxobg5rwPrnn39Knz59zHp9TQPkonA4HNKsWTO5++67pV+/flK6dGnzenZ2trz66qvy4IMPyqRJk6RFixaFjuWhwXgoi4qm1L/99ps888wz0qpVK7nrrruCnR0AgE0o/wEA0Wbs2LGSm5srN910k6nJtWpxq1WrJlOnTjW1vjNnzpR169YV6Xh9+/aVNWvWyLBhw5xBsYqPj5f777/fWZusQXg4i/jAWO9w6B0RbUqnX1ZcXFywswQAsAHlPwDALlph7O3i7xkY5syZY55rUHymhg0bSvfu3c3z6dOnF+mY5cqVK7SJdO/evc3j5s2bJZxFfFPqN998U5YuXWqq/tu0aRPs7AAAbEL5DwCINqtXr5ZTp05JUlKSGSjLlc6dO5vxNnRgSn/IyMgwjzqWR2H0fLxp0yYTZNepU0d69uxpBsTUGuxQENGB8d69e+WRRx6R6tWrm47mAIDoQPkPALBTXIgMvrVlyxbzWKtWLUlISHC5Tf369f1awzt16lRnwH22AcHysvola7PuunXrSrBFdGCs/clOnDgh77//vqSkpPh0LG2Gp19eUQ298Tq5dfDNPqUJAAi/8n/w4CEy7H/9rQAA8Pd5xmomPWLEiAKvHzlyxDwWNuK0te7o0aPiqy+++EJmz55taoF1EK4zaT/kG2+8UQYMGGBGn9Z+zocOHTJTPD3++OOm7/Kll15qBgQrVaqUBFNEB8arVq0yj7fffrtZXFX579mzR6pUqWKe692KDh06uDzWvn37nMcrin/06OZDzgEA4Vr+9+r1d18rAED08GW6Jk/PM9Y+rmRmZppHnWfYHW1mnfd86K1NmzbJoEGDzHMdBdvVeVRng5gyZUq+1zQ41jFAunXrJq1btzbTPr3++usmUA6miA6MLQcOHHC7Tkdss9afPn3a7XY6gbWOalpUVapU8jCXAIDIKP//DrYBANEjzodusp6eZ6x9XClWrNhZz2vaB1klJyeLt/bs2WP6CB8/flz+8Y9/yPPPP+/xMXQ+ZJ3eSffVG9QExgG0c+fOQieqHjJkiNSuXbvQ7SzaVMFVcwV3Th/dX+RtAQCRU/6nZ/x9tx4AgKLw9DxTmLJly+ZrUu2Ktc7a1lP79++Xiy++WHbv3i1du3Y18yG76898NlYt89atWyXYIjowBgAAAIBQbkrtT40aNTKPGrTqdIWuAtbt27fn29YTf/31l5nuSQPZ9u3by5dffumspfaG1eQ7Oztbgi00xsYGAAAAAPikZcuWJtjU5tLLly93uc3ixYvNowa2njhy5Ihccskl8ttvv5mm3998842ULFnSp/yuX7/e2Rc52AiMAQAAAMDH6Zq8XfxJZ2LQvr/K1UjXWtM7f/5887xfv35FPu6JEyfM6NHr1q2T8847T7777jspXbq0T3lNTU2V//znP+a5HjvYojYwHjx4sDgcjiL1LwMARA7KfwBAJBs9erSZPklHg9bgWM951kjWAwcONINPXnXVVdK8efN8+9WpU8csM2bMyPd6enq6XHbZZWZKpSZNmsj3338v5cuXL/K0UjqwljXgV94RrXv16iU7duwwtc4PPPCABBt9jAEAAAAgAvoYq7Zt28rLL78s9957rxnUa9y4cVKhQgXZuHGjCVAbN24sb7/9doH9du3a5azJzeu1116TJUuWOP/ft29ft2lrUJ13dgZtzq1paV9nHYVa5yrWeYytfs46ANi0adNMQB5sBMYAAAAAEKTpmgJB5xU+//zz5aWXXpKff/7ZDJqlszFo8+lHHnnEo77Bp/LU9mpNb2GseZQtmtacOXNMbbOOZq1NuYsXL27mL+7du7fccccdITPNYYzDqluHX9k1XVPMad8m5i6y2LjAJ3H477tUAVcsxZZkDpZtaEs6Gw+mBzyNDtWKix1iT520JZ3U+FK2pJOSti/gacSlHhQ7xDa40JZ0IoEd0zXFZx4TOzgSS9iSTm6cd9N8eCo2JyvgaeTE2vNeDmXYM4JryQR7rvZLiPs5V/0lJ977kXM9kWvTpXVGtj3pVCxlzzVAuFuz1/tyuUX1Mn7NC7xDjTEAAAAAREhTangnxCr9AQAAAACwFzXGAAAAAOADKozDHzXGAAAAAICoRo0xAAAAAPggVqgyDncExgAAAADgA5pShz+aUgMAAAAAoho1xgAAAADgg1hqjMMegXGAOBJL2JLO8bhStqRTLD7wf+1Jcfb8HIfP2W9LOkMuyLAlnY47Zwc8DUe1a8UOd86157sZ1zPFlnRiM08GPA1HdlbA04BnTufkBjyN1Fh7fsO52bYkI2VzM21JZ9lfgX9D7cvbU/ZXyT1tSzqn4srbks72E4FvxFghOUfskJIUZ0s6pWLt+Q0A0YLAGAAAAAB8QB/j8EcfYwAAAABAVKPGGAAAAAB8wHRN4Y/AGAAAAAB8QFPq8EdTagAAAABAVKPGGAAAAAB8wHRN4c/nGuOhQ4eaZceOHf7JEQAAAAAA4VRj/OGHH0p8fLy8++67/skRAAAAAIQRKozDn881xpUqVZLixYtLTAj3OH/yySdN/gpb3nrrrWBnEwDgR5T9AADAthrjdu3ayZdffil79+6V6tWrSyjTIL5hw4Yu11WtWtX2/AAAAo+yHwAQaLEhXEkImwLjkSNHmsB4zJgx8s4770go6927t0yePDnY2QAA2IiyHwAQaMTF4c/nptTdunWTV155RT744AO59tprZdWqVf7JGQAAAAAA4VBjXK9ePfOYkJAgn332mVmSk5OlfPnyEhcX53If7de1fft2X5MGAAAAgPCvbUT4B8Y7d+4s8Fp6erpZ3AnWQF1r166V66+/Xvbv3y8pKSnSrFkzGTBggDRt2jQo+QEABB5lPwAACHhg/P7770u4WLNmjVkss2bNkqefftr0k37xxRfd1nADAMIXZT8AINBCeYYe2BQYDxo0SEJdtWrV5KmnnpKePXuapt9aY7Blyxb5z3/+Y6bqePXVV01T8PHjxwc7qwAAP6HsBwAARRXjcDgcEsX0guihhx6S+Ph42bp1q9SpU8cvxz2VdlLscCLHnpqOYvGBvwuWlHlU7DB8zn5b0hlyQS1b0rlwx5cBT8PR4Vqxw92zt9qSzrierqfu8bfyR214P5n2lDVxTTpLJAlU2a+OpbrvSuQvp3PsOXXn2pKKSNnYLFvSWfZXdsDTaF/epk8t+7QtyZwqXt6WdPacCPxvoEKyPddMKUn2pBObfcqWdJJKlrYlnXC371ia1/tWLVPCr3lBkGqMw919990nr732mvz555+med3dd9/tcruJEyfKpEmTinzcIYNulluHDfVjTgEAdpf93pT/Nw4aLEOGDvNTTgEA4YCW1OHPb4HxH3/8IS+//LJ8++23smvXLsnMzJTs7P+7M3v06FF58803Tfv7Bx54wNylDwXat+yCCy6Qzz//3NQauLNv3z6PpqLq3fNSP+UQABCsst+b8v+Snr38kEMAAGAnv0Snc+fONXMYnzhxQqyW2Wd2QC9btqz897//lZUrV5qRQPv06SOhIjEx0TzmDeTPVLVqVWnVqlWRj1mlShW/5A0AELyy35vyvzLlPwBEHaZrCn8+B8Z79uyRfv36ycmTJ02we/PNN8utt94qx44dK7Dt0KFDZcWKFfLVV1+FVGC8fv1681ijRg2324wYMcIsodbHGAAQuLLfm/Lfjj7GAAAgxG5uvPTSSyYo1hpjrRHu27ev8y78mXRkUPXLL79IqNAgfcOGDeb5pZfS/BkAogFlPwDAn7S1rLcLIiQw1j7F+oWOHTv2rNvWrVtXkpKSZMeOHWIXvfDRO/1r167N93pubq5MnTpVrr/+evP/yy+/XNq2bWtbvgAAgUPZDwAAbG1KvXv3bklOTpaGDYs2/UnJkiXl+PHjYpesrCwzmqgu5cqVk9q1a5uBv7Zt22YGBFOdO3eWKVOm2JYnAEBgUfYDAOwUS8Vv2PM5MI6NjZWcnJwibasDnOgAXaVKlRK76NyU48aNk59++kl+++03c1GkI2brhVLv3r1NrcHAgQPNCKUAgMhA2Q8AsBNxcfjzOTDWu/B60aE1x7Vq1Sp020WLFpm7+EWtXfaHMmXKyGOPPWZbegCA4KPsBwAAtvYx7tGjh3l86623Ct1OA2K9SNH+yHq3HgAAAAAipSm1twsiJDC+5557zCjUOjr1u+++63KbVatWmQD6559/lpSUFLn99tt9TRYAAAAAgNAIjLUp9TvvvGP6GQ8fPlwqV67sHNikQ4cOUr16dTPi5+LFi83AJx9++KFUqFDBH3kHAAAAgKBjuqbw53NgrG644Qb55ptvpH79+nLw4EE5ffq0OBwOWbZsmezbt888b9CggcyZM0f69OnjjyQBAAAAAG4sWLDATEtYsWJFM4tQkyZNZPTo0ZKWlub1MT/77DPp1q2blC1bVkqUKCEtWrSQF1980XSbLcxff/0lI0eOlHr16kmxYsWkSpUqct1118maNWskYgbfslxyySWyefNmM8DW0qVL5c8//zS1yPqmO3bsaD5ARv8EAAAAEGlCra/whAkTTCCqFZQ1atSQmjVrysaNG82MDRrcLlmyxMzU4In777/fdJ9VWiGqgfH69evlgQcekC+//FK+++47SUpKKrCfzgzRqVMnOXDggNmnadOm8scff8i0adPkv//9r0yfPj0kKk/9FhgrbQrQpUsXswAAAABANAiluHjlypUyatQo83zixIly6623mjhNKy41ANX1+poGyEX1+eefm6BYA18NaK1AdtOmTfKPf/zDVI4++uijzsDZooF5//79TVDcq1cv+eSTT6R06dJmGt+nnnpKxo4da1ofb9myRapWrSph3ZR6586d/skJAAAAAMAnGmzm5ubKTTfdZMaAsvoxV6tWTaZOnSqxsbEyc+ZMWbduXZGP+a9//cs8PvTQQ/lqd7V5to43pf7973+bbrV5ffHFF6a5tAbDH3/8sXlUOvaUBsYXXXSRpKammubYweZzYKx9h3X6Ja0G16bTAAAAABBNYmNivF78SYNMHddJaVB8poYNG0r37t3Nc23CXBRbt26VtWvXuj2mHk9jwlOnTsmsWbPyrbPS0Fpj7Zd8Jut4Wgsd9oGx3o3Q9uTXXHONabuuHbp37drln9wBAAAAAIpk9erVJkDVJs/t2rVzuU3nzp3Now6UXBTL/redDpylMw55ckzr/1ozXNh+2ud47969EtaB8bx588wdgISEBNm/f78888wzpjO2tjWnFhkAAABApNOKX28Xf9K+uqpWrVomPnNFYzWlAyd7csz6/9uvqMfUmYqsbrfu9tWK1cTERI/yE7KDb2nVuS6HDx+WyZMny7vvvms6YWsV/rfffmtGpR46dKjccsstZs5jAAAAAIA4B8iaNGmSR/toE+QRI0YUeP3IkSPmsbARp611R48eLVJaR7w85vHjx03r4sL21f7PZcqUMdM5FTU/IT8qdfny5eW+++4ziw7/rV+wjnSm8xhrLfKzzz5rpnTSL/CKK66I+KmbcuNc36Hxtxfmb7clnbFdXTeb8Kv//eEE2iM9GtiSTt2k07ak46h0dcDT+CPNnu/mX5c2tCWdLHvejpyu0iTgacSeThc7RHaJ7V9xNszZUTo3U+xw3FFw2o1AiMnNtiWdVlVLBDyN49kOsUOJEj43+iuSBLHn/TRIDvw5Mzcx8N+/ys615zM7JfZca9pTCoS/GIf337vGS6tWrfJ4H1cyM/8+P1i1sK5YUyplZGQUKa1ML49p7efv/ITFdE0WnadKF50/a8qUKWaksl9//dX0RdbFqkXWYcK1mh8AAAAAwpbD+7vuOk1Rq1atPN7HlWLFijmbMbujfZBVcnJykdIq5uUxrf38nZ+wCowtWi1+1113mZri2267zcxvpaxa5Oeee870T9baZJpZAwAAAIg22qLWVbNob1gjP1vNn12x1rkaJdqfx9SpmXRqKG1O7W5fnef42LFjBfYNhoC1w9G7Av/v//0/6dKlizRt2lQWL15sXtcA+J577jGv6cBcn376qbRo0cI5BDgAAAAAhJMYR67Xiz81atTIPO7evVuysrJcbrN9+/Z82xb1mNu2bXO7jatjavNpq/LT3b579uxx1iYXNT9hExhv2LBBRo0aZSaQHjRokAmItVO1jlL95Zdfyu+//y4vvfSSmVB6/vz5cv7555uO2TpZNAAAAADAOy1btjQBqTZPXr58ucttrArL9u3bF+mYF154oXncsWOH2ymV3B3T2tda726/GjVqmCXsA2PtWP3BBx9Ix44dpVmzZqZvsVaXV6pUSR599FETDGtQfNlll5kg2dK1a1czcnV8fLzbLw4AAAAAQprW/Hq7+FFKSor07NnTPHc10vXWrVtN5aTq169fkY7ZqFEjU5np7ph6PK0R1oC8T58++dZZaUyfPt3lqNPW8bR7bbD5HBjfeeedpvO3Dqb1008/mXbiGvBqE2mtGh83blyhA2xVrlzZDMaltcYAAAAAAO+NHj3aVEbqIMgaeGp8Zo3zNHDgQNPn96qrrpLmzZvn269OnTpmmTFjRoFjjhkzxjw+//zzpsLTonMP67S86vbbb5eKFSvm20/T0YpTjfVuuOEGZ8ynXWqfeOIJMwZV8eLF5f7775dgi3FYn5SXtEO11Vlam07rIFuetg8fMGCAHDhwQBYsWCCRIiPP8OSBNGZu5EzXFHMqVeywI7dUZE3XFBvQMfSMPZn2TNZTIsGe6UdsmklDyiXHRcx0TUml3M9diPxOpgd+uonE7IyImq6pjNjzfk4lBH66nky7pmuyqbyMtWm6ptjTaZEzXZNN5xi7poUqU7K4LemEu1Mn/x5AyhtJKWXE31599VW59957TVBcs2ZNqVChgmzcuNE0sW7cuLGZXldfyyvmfy1733//fRk8eHCBY+o4UXpcVb9+fSlZsqSsX7/eBLk6I9HcuXPzjURt2bJli3Tu3NnMVVyiRAlp0qSJqUDV/yckJJgK1auvDvz0o2fjc6l6wQUXmA9P25u//PLLXnWa/uSTT2wLir/++mvp27ev6QOtc2ZpbbU2AX/88cclO9ueeRQBAPai7AcARENTaouO+aSBau/evSUtLc0ExToQlnZzXbFiRYGguCheeeUVmTZtmhlc+dChQybgPffcc00tsjandhUUK40PdXwpbWmsNco6ja/VzPrnn38OiaDYLzXG4UIvfIYMGWJGylZ650QvjA4fPix//PGHGQ3t5MmT5s6HP1Bj7DlqjL1DjbHnqDGOnhpju8t+RY2x56gx9hw1xp6jxtg71BgXzakT7qcyitRzbKQJ/BV1iPjnP/9pLozatm0rEydONCO2WdLT02XevHmmFgEAEDko+wEAdvD3tEuwX1QExtpM+5133jGdyb///nszWlte2uH7zBHUAADhjbIfAAAUld/a4axdu1aGDx9u2pmXKlVK4uLi3C46PZOddN5kdd999xW4MAIARCbKfgBAtPYxhuf8EqG+8cYbZtQzHZEs1Los6xzL3333nXneo0cP0/Fchy3XR20+p83qhg0bZjqjAwAiA2U/AACwNTDWkcRGjhzpnLvqsssuk3/84x9Srlw5M2rZ/v37TR+ujz/+2NQkv/7662beY7toTXZWVpZ5vnjxYjMamg62Ypk9e7aMHz/ejKyt83oBAMIfZT8AwFbU/IY9n5tSa6CrtcQaHE+YMEF69eplXk9MTJTu3bvL9ddfL++9954sW7bMzI2lE063atVK7KITWVvuuOMOU0uwfPlyM4fX1q1b5dprrzXPdQ7m1atX25YvAEDgUPYDAGxFU+qw53NgvHTpUhPwWrXGljObVLdo0cIEztu3b5cXXnhB7JKamppvoJVvvvnGjE6qgXuDBg1k6tSpJm9as/D000/bli8AQOBQ9gMAAFubUh84cMD018rbTys2Ntb07zqTTt6ckJAgM2fOlKeeekrskHei6cGDB0vZsmXzrde83nPPPabWQPuj5ebmmtfOpNN8aP+0oho8ZIjccsstPuYeABDMst+b8v/mwYNl6DDKfwCIKrnU/Eq0B8Z6J15rjPPS0T9PnDhhmqnlnR9Sg2LdfteuXWKXvBdD55xzjsttrNdPnjwphw8flooVK7pslrdq1aoip9urd2+v8gsACJ2y35vy/9L/dSkCAABRFBhXr15dNm3aJNnZ2c5pmOrXr2/6bP3yyy/SqVMn57Z//vmnHD9+3ATHdmnSpInzuTahO1vNgtYauKIDhnnSN7pKlSoe5RMAEHplv6L8BwCcTQx9hcOez4Gx3nHfsGGD/Prrr2ZwE9W1a1dzd12bS8+aNctcfOhooHfffbdZf/7554tdNHDXZt5aS/3777+73Eb7PSvNZ/ny5V1uM2LECLMUVYaLpuQAgPAq+70p/0+mZ3iRYwAAENaDb1166aVmoK0vv/wy3wig2oT6+++/lxo1akjHjh3NRcrnn39uml3rtBl2uu6668zjRx99ZGq2z6SjZqsuXbo4a70BAOGNsh8AYBtGpQ57PgfG11xzjYwZM0aqVavmfK1u3bpm3mLta3zkyBH56aefTP8tDYoffPBBueGGG8RO999/v5QuXVp27NhhgnJrYDAN6HW6KQ3qNW8PP/ywrfkCAAQOZT8AwDY6I4+3C0JCjOPMeZX8SIPir7/+Wvbs2WMuTrR2WafJCIZ58+ZJnz59JCMjw+SlUaNG8scff5hBVfTCaPz48eYiyl/sako9Zu7fTQEDbWzX6gFPI+bU/02vEkg7ckvZkk7dpNO2pOOIDXxN157MOLFDiQSf79UVSa5N56ByyYH/3GJPp4sdkkqVk3Bkd9lvV1PqxGx7mmsfd/zfAJqBVEbseT+nEkoEPI3MbEdElZexYs/7iT2dFvA0chMD//0rm34Ckm3TyaxMSfvGBgpnWQd2eL1vQuW6fs0LvBPQK+py5crJjTfe6Py/DrylA5joxcjKlSvFTj169JC1a9fKM888Yy6U1qxZYy6S9ILp3nvvNU3pAACRhbIfAGALmkSHPVs7VWkfL70oOXN6J7s0bNhQ3n///aCkDQAIDsp+AABwNow2AgAAAAA+YLqm8GdPBxUAAAAAAEIUNcYAAAAA4AtqjMMeNcYAAAAAgKhGjTEAAAAA+IIa47BHYAwAAAAAviAwDns0pQYAAAAARDWPa4zj4uICkxMAAAAACENM1xSFgbHD4QhMTgAAAAAACIfAeMyYMYHJCQAAAACEo1xqjMMdgTEAAAAAIKoxKnWAJO78xZZ0xnZrZks6p2MTA55GanyC2KFyfIwt6aRNecaWdBIGBf5mVUb2abFDxeL2FEnHT+XYkk6MDV1P9mcF/m9T1bYllciQmJ0R8DSy4pPFDnE59nSfyogtYUs6p7MD/36SbTrHnM6xp3Yq2WFP+Z8RH/jfQMZpez6zhFh7fgMp2SdsSUekuE3phDm6m4Y9AmMAAAAA8AWDb4U9pmsCAAAAAEQ1aowBAAAAwAdM1xT+qDEGAAAAAEQ1aowBAAAAwBfUGIc9aowBAAAAAFGNGmMAAAAA8AU1xmEv4muMd+7cKTExMUVahgwZEuzsAgD8gLIfAGCr3BzvF4SEiK8xLlasmHTs2NHt+szMTFm5cqV53qFDBxtzBgAIFMp+AADgiYgPjKtUqSJLlixxu/6DDz6QwYMHS3Jyslx33XW25g0AEBiU/QAAOzlyaUod7iK+KfXZTJ482Tz27dtXSpUqFezsAABsQNkPAEDRrF692txE1pvO2iKrXr16MnLkSDl48KDHx3I4HPLjjz/Kww8/LJ06dZLy5ctLQkKCVKxYUS699FL56KOPzDbunK17lObRWxFfY3y2Pmg//PCDea41BwCAyEfZDwDwuwjtKzxz5kwZMGCAZGVlSaVKlaRp06ayefNmef3112X69OmmdZYGykU1f/586dGjh/P/um/dunVlx44dMnfuXLNMnTpVPvvsM0lKSnJ7nDZt2rhcr4G2t6K6xlib0ukdiVq1akn37t2DnR0AgA0o+wEAOLu9e/fKTTfdZILi0aNHm//r+Bz62KtXL9m3b5+pSS6shvdMuq0Gwq+99pocOHBAtm/fLitWrJDDhw/Lhx9+aILdr776Sp544olCj2MF5WcuX3zxhdfvN2oDY/1S9OJI3XzzzRIbG7UfBQBEDcp+AEBAROCo1C+88IKkp6fLRRddJE899ZTEx//d2Lh06dLy8ccfm0cNamfPnl3kY7Zr187UON99992mBjovDcKtgPidd96RXJv7bUftFYE2o9Mqe0VTOgCIDpT9AIBAcOTkeL2EqhkzZpjH4cOHF1hXtmxZ6d+/v3k+bdq0Ih9Tx/XQPsXu9O7d2zweOXLEqz7MvoiP9oFXOnfuLPXr1w92dgAANqDsBwDg7Pbs2WOaTCutMXZFz6Vas7ts2TK/pZuRkeF8rjNHuDN27Fj5888/JTs7W6pXr266Rmmz7sL6JZ9NVAbGqampzjsg1BgAQHSg7AcABEyETde0ZcsW85iYmCg1atRwuY11g/n33383/ZALqwkuKh14SzVv3rzQWSPee++9fP/XblJjxowxg3a1atXKq7SjMjDWC6O0tDQpXry4swnA2UycOFEmTZpU5DRu6dNNhl93pQ+5BAAEu+z3pvwfctMNcutQAm8AQGDOM1bz5hEjRgQsT0eOHHE2mdZpkFwpV66cedS+wCdOnPBpRGilA3u99dZb5rlO5+TKlVdeafoia+CsAbve9J43b5489thjJkDXKZ90eqmaNWt6nH58NDel69evn6SkpBRpHx11bdWqVUVOY1/7Zl7nDwAQGmW/N+V/70v/bxoKAECU8GEQLU/PM9Y+gZSZmemsMXYnb7PlvE2gvaEjVPft29c0jb766qvNFFGu/Pe//833f51XWbfVKaBat24tu3fvln/961+mibenoi4w1kFXFi1a5HFTuqpVq3pULV+1om93TAAAwS/7vSn/q1Su7HH+AADRy9PzjLWPO6NGjTLTIXmqS5cusnDhQmfAqU6fPu12+1OnThWpP/DZHD9+3Ay6pUGtBrfWjWxPVKhQQR555BH55z//KZ9//rm8/fbbbmu63YmP1vkr69SpI127di3yftpUwZPmCjmbFnuZQwBAqJT93pT/p0783fwMABA9HD7UGHt6njmbkiVLetWsuXTp0s7n2oRaHT161Jw/XQWZVnNrnfqwsP7AhdGm0DonsjZ/btq0qXz77bdeH6tDhw7OfOni6WcQVYGxfqk6cbQaNGiQx3cRAADhh7IfABBNg2+NGzfOLL5o1KiRs8ZYR6iuVatWgW22b99uHuvWrevVwFs6R/Jll11mRrVu2LCh6SvsSz/lvM2+tUm2p2Kjcf5KvSjSiyMAQOSj7AcAwDO1atWSatWqmeeLF7tuCWu93r59e6/6MPfp08d0c6pdu7Z8//33UqVKFZ/yvH79emczcG8C7KgKjK326joXl97ZAABEPsp+AIAdTam9XULVNddcYx5djZitTaynT59unnsy04PSqZ302BoM6xzE8+fP92oU6by0hvill14yz3VO4/h4zxtGR11grE3qrE7lAIDIR9kPAIDnHnjgATOoltbqPvHEE5KTk+McLOv66683jy1btpQrrriiwL6dOnUy43q8+uqr+V7XY+i+X3/9takh1qC4Xr16RcqPTuGkY4acPHky3+va1FtnnNAm2RoQa169EVV9jAEAAADA70K45tdbNWvWNGN0DBw4UMaOHWvmW9bXNm3aJGlpaVK5cmWZNm2ay7E7/vjjD9m1a5ccO3Ys3+u6/YwZM5xNnocOHeo2/QkTJpjA26LpPv/88zJs2DATTOs8yhqcb9682dwA1+PpNE0XXHCBV++XwBgAAAAAUIDWxGoQ+uyzz5qa419//dX0PR4yZIiMHj1aKlWqJJ7IO8XTzp07zeKOBr156VRMWsu8YsUK2bt3r9lX51LW0ax1HuM777xT6tevL96KcWh4Db+za7qm7BrNbEnndKz7yb39JfW0PaP5JcfbMyKtY8pTtqSTMGhMwNPYcdz9HHb+VKtU4H9n6vgpe+7qVkqOC3gaB9LteS+1y5e0JZ1IYMd0TVnx3s8X6YlTOfZcIsTH2lMun7bh/dh1jsnOtee7SXbYU/5nxiYFPI2MbHuuMxJs+j2nZJ+wJZ3Esr4NiBQtTi/+xOt9EzsP8Gte4B1qjAEAAADAB47/9b9F+IqqwbcAAAAAADgTNcYAAAAA4IsIHHwr2lBjDAAAAACIatQYAwAAAIAvqDEOe9QYAwAAAACiGjXGAAAAAOADR64904EhcAiMAQAAAMAXNKUOewTGAfJzsXNtSadKpj2t4WskpAU8jWKnToodcmNL25LOnn6P2ZJOvT9WBTyNpmKPnPiatqSTXayCLeks2HUi4GlcXCVG7FHSpnTC34HsxICnUTVjv9ghoUQ5W9LJign8Z6ZKxQX+wjU3NkHskJR20JZ0DsTZ8xtIjAt8bVupxDixQ1xWui3p5CSXsSUdIFoQGAMAAACAL6gxDnsMvgUAAAAAiGrUGAMAAACADxh8K/xRYwwAAAAAiGrUGAMAAACAL+hjHPYIjAEAAADAFwTGYY+m1AAAAACAqEaNMQAAAAD4wJFDjXG4i5oa48OHD8ujjz4qzZo1k5IlS0piYqLUqFFDrr32WlmyZEmwswcACADKfgAAUBRRERhv3bpVzj//fHn22Wdlw4YNUrlyZWnatKmcOHFCpk+fLhdddJG88sorwc4mAMCPKPsBALbR6Zq8XRASoiIwvu2222Tfvn3SsGFD+fXXX2X79u2yevVq+euvv+S+++4Th8MhDz74oLmIAgBEBsp+AABQVBEfGJ88eVIWLFhgnr/wwgty7rnnOtcVK1bMvNagQQPJzs6Wb7/9Nog5BQD4C2U/AMD2Uam9XRASIj4wPnXqlKkVUPXr1y+wPiYmxvl6VlaW7fkDAPgfZT8AwE6O3ByvF4SGiA+MK1SoYAZaUT/++GOB9WlpabJmzRrzvF27drbnDwDgf5T9AADAExEfGKvnnnvO1A488MAD8s4778j+/fslPT1dli9fLn369JEDBw7IjTfeKB07dgx2VgEAfkLZDwCwiyM31+sFoSEq5jG+4YYbpHTp0jJu3Di59dZb862rWrWqvPnmmzJixIig5Q8A4H+U/QAAoKiiosZYbdu2zYxEGhsbK3Xq1DFzWhYvXtyMWDp58mQzlQcAILJQ9gMA7ODIyfV6QWiIihrjO+64Q/7zn/9I27ZtZc6cOdKoUSPzekZGhowZM8aMTqpN6datWye1a9d2eYyJEyfKpEmTipzmxddcL1ddP9hv7wEAYH/Z70353++GQXLD4KF+eQ8AAMAeER8Y6wWPNpdLSEiQ6dOn57v4SU5OlvHjx8uqVavk+++/l2effVbeeustl8fR2gXdrqhadL7YL/kHAASv7Pem/L+oR0+f8w8ACC/U/Ia/iA+MlyxZYqbsaNiwodsagUsvvdRcHK1YscLtcbQ/WqtWrYqcbvlKlb3KLwAgdMp+b8r/SpWreJxfAEB4YxCt8BfxgfHJkyeLvG1mZqbbdTpAiyeDtPy483CRtwUAhGbZ7035v/tIapG3BQAAoSHiB9+y+pRt3bpVdu3a5XKb7777zjw2btzY1rwBAAKDsh8AYCcG3wp/ER8Ya1O5SpUqSVZWlvTv31+2bNniXKcDsDz44IOmKZ26+eabg5hTAIC/UPYDAABPRHxT6hIlSshHH30kV111lfzyyy9yzjnnmP5mKSkpZhqP9PR05+ilV155ZbCzCwDwA8p+AICdqPkNfxFfY6x69OhhRii98847TfO6/fv3y2+//SalS5c2F0SzZ8+WN954I9jZBAD4EWU/AAAoqoivMbbUq1dPJkyYEOxsAABsRNkPALBDbk5OsLMAH0VNYAwAAAAAgcB0TeEvKppSAwAAAADgDjXGAAAAAOADBt8Kf9QYAwAAAABcWr16tVx33XVSpUoVKVasmBm/Y+TIkXLw4EHxxpNPPikxMTGFLm+99Zbb/XUqxhdeeEGaN29uZqEoW7asdOvWTWbOnCm+oMYYAAAAAHwQqTXGM2fOlAEDBphgtFKlStK0aVPZvHmzvP766zJ9+nRZsmSJCZS9ocdr2LChy3VVq1Z1+XpmZqZccsklJt24uDiTn7S0NFm4cKFZHnroIXnuuee8yg+BMQAAAAAgn71798pNN91kguLRo0fLE088IfHx8XL8+HETLM+ZM8fUJC9fvtzU8nqqd+/eMnnyZI/20cBXg+K6devKN998I40bNzavz5o1S6699lp5/vnnpWPHjnLFFVd4nB+aUgMAAACAj6NSe7uEqhdeeEHS09PloosukqeeesoExap06dLy8ccfm8cVK1bI7NmzbcnPgQMHnE2s3333XWdQrPr06SMPPvigs6m2NwiMAQAAAMAHuTm5Xi+hasaMGeZx+PDhBdZpv97+/fub59OmTbMlP1orfPr0adP8WvsUn2nEiBHmcdWqVbJ9+3aPj09gDAAAAABw2rNnj2lKrbTG2JXOnTubx2XLlok31q5dK9dff710795drrzyStNce8OGDW63t9Kx0j1T9erVTRNrb/NEH2MAAAAA8EGkDb61ZcsW85iYmCg1atRwuU39+vXN4++//276ISckJHiUxpo1a8ySt0b46aefNiNev/jii2ZwLVd5stJ1l6cdO3aYAcI8RWAMAAAAAEEyceJEmTRpkkf7aPNmq+lwIBw5csTZZNrdwFrlypUzj7m5uXLixAkpX758kY5drVo102e5Z8+eZkTrlJQUE/T+5z//MX2IX331VRNkjx8/3mWerHQLy9PRo0fFUwTGAdK6gj0fbYbkv5MSKMdziwc8jZKlS4odEg553ufAG6VS6tiSzqlarQOext6TWWKHcsXs+T0XT7CnF0nP+B0BTyMrtlHA04BnSicF/necm1RB7JAVY8+5bNneVFvS6VQ98OeyuMwTYodN2aVtSefAsQxb0jmnQuC/m/jMY2KH3KQUW9JJz7KnhjK5mC3JRHWN8b59+0y/WE/3CaTMzExnjbE7SUlJzucZGUUvK1z1WT7//PPlzTffNE2hdeTpV155RW6//XapU6eOV3nyJD8WAmMAAAAACBKds7dVq1Ye7+POqFGj5LXXXvM4H126dDFzAatixf6+I6KDXblz6tQp5/Pk5GTxh/vuu8/k/c8//zRNq++++27nOk/y5E1+CIwBAAAAwAe+TLukTaL92Sy6ZMmSRW7WnFfp0v/XEkWbUFtNkh0Oh8vm1FbT5tjYWClVqpT4g/YrvuCCC+Tzzz+XrVu35ltn5clK92xNwD1FYAwAAAAAETL41rhx48zii0aNGjlrZ3WE6lq1ahXYxpoSSZs/ezrwVmGsptLZ2dkF8rR06VLZtm2b232tPFn59wTTNQEAAAAAnDQQ1kGy1OLFi8UV6/X27duLP61fv948njka9oUXXmgelyxZ4nI/nV5KR6TOu60nCIwBAAAAwMcaY2+XUHXNNdeYR1cjZmsT6+nTp5vn/fv391uaX331lXMu40svvTTfOp3rWGumtYn1ggULXI7urVq2bCkNGjTwOG0CYwAAAABAPg888IAZxGrRokXyxBNPSE5Ojnn9+PHjcv3115tHDUKvuOKKgrMAdOpkRpTWqZfy0qBX+1OvXbs23+s65dPUqVPNcdXll18ubdu2zbdN5cqVnX2xhw0blm+u4i+//NI5vdOYMWPEG/QxBgAAAAAfaGAXaWrWrCkffvihDBw4UMaOHWtqZPW1TZs2SVpamglUp02b5nJgrj/++EN27dolx47lnyYtKyvL1EDronMO165dW+Lj402/YWvu4c6dO8uUKVNc5kmD35UrV8pPP/0kTZs2lfPOO09SU1OdfYt1VGutWfZG1NQY65f33HPPmaHQdRJpHa2tRYsW5sMtbMhvAED4ouwHAMB7/fr1k59//tk8ql9//VUqVqwod955p6xbt87jJstai6wDg1122WVSpkwZExCvWbPGDLjVu3dvExBrM2ld54rWYOuUUnpuP/fcc2XLli1y6NAhM9XUjBkz5MUXX/T6vcY4dPztCPfXX3/JxRdfbDpy63Di+iHqUOD6f20S0KZNG5k/f765aPKXU6nHxQ4Z4r8R4ApjxxzyJRPtuU+TcOjvO0qBdiTl/yYkD6SUpLiAp7H3ZJbYoVyxwL8XlZxgz28t6Y81AU8jq5Lnoy56I6lUOQk3wSj71fG0DAm0YpJ/pM5AyYqxp2HZsr2ptqTTqXrxgKcRezpN7LA5I8mWdA6k2nMD6ZwKgf9uKspJsUNukn/LFHfScgrW0gVC+ZTAfzeRYMf9N3m9b90XXdeOwl5RUWN88803mwuhJk2amLboeqdD70xolXvz5s1lxYoVcscddwQ7mwAAP6LsBwDYxZGT4/WC0BDxgbFeCH377bfm+bvvvpuvul/btH/wwQemJuH//b//Z9rLAwDCH2U/AADwRMQHxtY8V9WrV5cOHToUWK+1BlqboC3KP/300yDkEADgb5T9AAA7OXJzvV4QGiI+MD5y5Ijz4sgda/LoH3/80bZ8AQACh7IfAAB4IuKna7JGNNu7d6/bbXQ4cUVzOgCIDJT9AAA7OXKo+Q13EV9jbE0MrRdHy5Ytc9kPzZoc2po7CwAQ3ij7AQCAJyI+MG7Xrp3zAmnw4MFmhFLL1q1b5YYbbjDTdqj09PSg5RMA4D+U/QAAu2uMvV0QGiI+MFYfffSRVKtWzdQONGvWzIxO2rhxYzPwil4g3XjjjWY7f89lCQAIHsp+AIBdcnNyvV4QGiK+j7Fq2LChrF69Wp5//nmZNWuW7NmzR0qUKCFXX321/Otf/5LZs2eb7apUqeL2GBMnTpRJkyYVOc0hg26SW4cO8Uv+AQDBKfu9Kf9vGjRYhgy7xef8AwAA+0RFYKwqVaokL730klnOpBdNymp258q+fftk1apVRU6vd89LvMwpACBUyn5vyv9LevbyIqcAgHDGtEvhL2oCY3eysrJkzpw55vmVV17pdruqVatKq1atinzcs9VAAABCv+xXlP8AAES+qA+MtRbh4MGDUq9ePenTp4/b7UaMGGGWojqVetxPOQQABKvs96b8P56W4YccAgDCCYNohb+oGHxryZIl8u233zpHIFUZGRny7LPPymOPPSZxcXHyzjvvSEJCQlDzCQDwH8p+AABQVFFRY7xixQq55557pHjx4lK3bl1JTEw0o5TqFB362uTJk6Vbt27BziYAwI8o+wEAdnHkOIKdBfgoKgLjrl27ypAhQ+THH3+U3bt3S3Z2ttSsWVN69+5tLppq164d7CwCAPyMsh8AYBemXQp/UREYt2jRQt57771gZwMAYCPKfgAAUFRRERgDAAAAQKA4cmlKHe6iYvAtAAAAAADcocYYAAAAAHyQy+BbYY8aYwAAAABAVKPGGAAAAAB84GBU6rBHYAwAAAAAPmAe4/BHU2oAAAAAQFSjxhgAAAAAfMDgW+GPGmMAAAAAQFSjxhgAAAAAfMDgW+GPGmMAAAAAQFSjxjhA9p+KsyWdaln7bEknq3T1gKex9kC62KFqydq2pJOTZc+dw/1p2QFP45z4o2KHmNQsW9LZm1jVlnSqlqsV8DTiTh4QW5QqZ086ESDJhlvOscf/CnwiIrL+VFlb0rnorwW2pPN7qV4BTyMxLlns0LC0PXUbJRLsSee3Q4G/BihdvbTYIcER+POySj0dY0s65W1JJfzl5tLHONwRGAMAAACAD5iuKfzRlBoAAAAAENWoMQYAAAAAH+Qy+FbYo8YYAAAAABDVqDEGAAAAAB/Qxzj8UWMMAAAAAIhq1BgDAAAAgA+oMQ5/YVNjvH//fpkyZYrcfffd0r59e0lOTpaYmBjp2rXrWffNysqSF154QZo3by4lSpSQsmXLSrdu3WTmzJm25B0A4D3KfwBAOAy+5e2C0BA2NcaffPKJ3HPPPR7vl5mZKZdccoksWbJE4uLipGnTppKWliYLFy40y0MPPSTPPfdcQPIMAPAd5T8AAAi0sKkxLlWqlPTo0UMeeeQRc6d/9OjRRdpPL3z0oqhu3bqyYcMGWbt2rWzbtk2++OILSUpKkueff16+/PLLgOcfAOAdyn8AQKhz5Dq8XhAawiYwHjp0qMydO1eeeeYZufrqq6VSpUpn3efAgQPy1ltvmefvvvuuNG7c2LmuT58+8uCDD5rnTz75ZABzDgDwBeU/AADBs3r1arnuuuukSpUqUqxYMalXr56MHDlSDh486PGxdu7cabpDFWUZMmRIgf3r1Klz1v20xVhEN6X2xqxZs+T06dPSsGFD06fsTCNGjJCxY8fKqlWrZPv27VK/fv2g5BMA4F+U/wAAO+VG6OBbM2fOlAEDBpgxO/TGtHZL2rx5s7z++usyffp00zJLA+Wi0sC6Y8eObtdrULty5UrzvEOHDm63O++886R06dIu18XGelf3G9GB8bJly8xj586dXa6vXr26aWK3Y8cOsy0XRgAQGSj/AQDwzd69e+Wmm24yQbF2Y3riiSckPj5ejh8/boLlOXPmmJrk5cuXm5raotBaZw2m3fnggw9k8ODBZqBNPbY7EyZMKNIgnBHZlNobW7ZsMY+FXfBY6/TOBwAgMlD+AwDs5MjJ9XoJVS+88IKkp6fLRRddJE899ZQJipXW1H788cfmccWKFTJ79my/pTl58mTz2LdvXzPGiJ0iOjA+cuSIeSxXrpzbbax1R48etS1fAIDAovwHANg9j7G3S6iaMWOGeRw+fHiBdTr9Yf/+/c3zadOm+SU97X/8ww8/mOdaa2y3iA6MrY7XiYmJbrfRkUlVRkaGbfkCAAQW5T8AAN7bs2ePaUqttMbYFau7ktV9yVfajNrhcEitWrWke/fuhW6rA2xefvnlcvHFF8sNN9xg/n/y5Emf0o/oPsbauVvpACzunDp1yjxqO3YAQGSg/AcA2CnSBt/a8r8uSXqDuUaNGoV2Sfr9999NP+SEhASv09OAWANjdfPNN591AK1PP/003/+1abf2g9bHSy65xKs8RHRgrFX8eZvUuWKts7Z1Z+LEiTJp0qQip33N9YPkhsFDi7w9ACAyyv8hgwfLLbfcUuTtAQDRzdPzjNW8WWdYCJQjec6R7gbWsrok5ebmyokTJ6R8+fJep6dNqHVAzLM1o9YBt7SWuG3btqZmWW+A62BeOjCYTiulUzIuXbpUWrVq5XEeIjowbtSokflgtm3b5nYbnabD2rYw+/btM9N6FFXni3t6kFMAQKSU/7179fIgpwCASODI9X4QLU/PM9Y+odIlyR/dkqxBt7R5dmEDZ1rbWYoXL+5sUt2pUyfzOT744IMyb948j/MQ0YHxhRdeKO+//77bIcG13bx1Z0K3LUzVqlU9uvNQqXIVD3MLAIiE8l+nogAAoKg8Pc9Y+7gzatQoee211zzOR5cuXWThwoUed0nytVtSamqqc6Avbwfd0vSffvpp6d27tyxYsMAMrHm2FmFRFRhfeeWVcuedd8rWrVvNB9StW7cCzRZUy5YtpUGDBoUeS5sqeNJcYdfhVC9zDQAI5/I/k8G8ACDq+NLH2NPzzNmULFnSq2bNpUuXdj63gkoNMLX/r6vm1FZza+0P7MvUShoUp6Wlmdpfa6Rrb3To0MHZtFv7Pbdu3dqj/SN6VOrKlSs7f2TDhg3LN1fll19+KePHjzfPx4wZE7Q8AgD8j/IfABCt0zWNGzdODh065PHyxRdfOI9hdTPSGmMdobqwLkl169b1aeAtq3l0v379JCUlxevj5G32nZ2d7fH+YVNjrF+I3tk/s9279iGrUKGC83VtU66LRS9+Vq5cKT/99JM0bdpUzjvvPFNdb32R9913n6lZAACEJsp/AADsVatWLalWrZr8+eefsnjxYjMl0pn0ddW+fXuv09FuTYsWLfLL3MXr1693Pnc3knZE1Bjn5OTI4cOHnYtWt1t3A/K+np6eXqC9ubaVf+655+Tcc881Q4/rHRFtQ6/V9i+++GKQ3hEAoCgo/wEAoc6Rk+v1EqquueYa8+hqxGxtYj19+nTz3Jfmz9bcxXXq1DEjTvvi+eefN496zq9evXrk1hjrh6UfmrfV6g899JBZAADhhfIfAAD7PfDAA/LOO++YGl2dDkm7H8XFxcnx48fl+uuvN4/aouuKK64osK+OEP3HH3+YgcB0cUXP7R9++KF5PmjQILfTQln0hraOhK1p5+1DrTfHH330UecAXk899ZRX7zdsaowBAAAAIFQH3/J2CVU1a9Y0gWt8fLyMHTvWNK1u06aNqY2dM2eOGc9j2rRpLgNaDYp37dolx44dO+vcxbq/BsZno8e8++67pWLFilKvXj254IILpFmzZmY2CK3V1kHAtNbYqumO2BpjAAAAAIB9+vXrZ4LQZ5991tQc//rrryZAHjJkiIwePVoqVark86BbF110kRnA62wGDBhgHn/++WfZvXu3rF271tRga/60m9Ttt98uLVq08Do/BMYAAAAA4INAjC4dKlq1auXsT1xUO3fuLFJgbAXHRXHhhReaJVBoSg0AAAAAiGrUGAMAAACAD3K9HCQSoYPAGAAAAAB8kENgHPZoSg0AAAAAiGrUGAMAAACADyJ47K2oQY0xAAAAACCqUWMMAAAAAD6gj3H4o8YYAAAAABDVqDEGAAAAAB/Qxzj8ERgHSPEEeyrjjyVUtSWd2OxciRTVT++zJZ3s0tVsSSe3RELA09h+opzY4eJ7P7Elne3v3mBLOum5ZQKeRsr+LWKL6ufYk04E+HjDwYCncUUje8r+/7d0my3ptLmgqS3p1N+9MOBpvJPTTOwwotxeW9I5eMfjtqTzj0N1A57GW28/KXbo8+NrtqSzsOMoW9IZVL6kLemEO5pShz+aUgMAAAAAoho1xgAAAADgA5pShz9qjAEAAAAAUY0aYwAAAADwAX2Mwx81xgAAAACAqEaNMQAAAAD4gD7G4Y/AGAAAAAB8QGAc/sKmKfX+/ftlypQpcvfdd0v79u0lOTlZYmJipGvXroXu98svv8grr7wi119/vTRs2NDso8vkyZNtyzsAwHuU/wAAINDCpsb4k08+kXvuucfj/W699VZZu3ZtQPIEAAg8yn8AQKhj8K3wFzaBcalSpaRHjx7Stm1bs6xevVrGjh171v3q1asn55xzjnO/O++8U9atW2dLngEAvqP8BwAAgRY2gfHQoUPNYtm7d2+R9ps5c2a+/yckJPg9bwCAwKH8BwCEOvoYh7+w6WMMAAAAAEBU1xgDAAAAQCiij3H4IzAGAAAAAB/QlDr80ZQaAAAAABDVqDEGAAAAAB/QlDr8ERgX0cSJE2XSpElF3n7gTYNl0NBhAc0TACD0yv+mPftJ1743BDRPAADAvwiMi2jfvn2yatWqIm/f/dJeAc0PACA0y/8arS8KaH4AAKGHPsbhj8C4iKpWrSqtWrUq8vaVK1cJaH4AAKFZ/peuUCmg+QEAAP5HYFxEI0aMMEtRHTyRHtD8AABCs/x/b8XugOYHABB66GMc/giMAQAAAMAHucHOAHzGdE0AAAAAgKgWNoHxnj17pEKFCs7l4YcfNq8vXbo03+vjx4/Pt5/+P+/6tWvXmtfvuuuufK/r8QEAoYfyHwAQDk2pvV0QGsKmKXVOTo4cPny4wOvZ2dn5Xk9Pz9+3V//var/U1FSz5D0+ACD0UP4DAIBAC5vAuE6dOuLw4o7Kk08+aRYAQHii/AcAhDqmawp/YdOUGgAAAACAqK4xBgAAAIBQRF/h8EdgDAAAAAA+oCl1+KMpNQAAAAAgn2PHjsm0adPkgQcekK5du0pKSorExMSYsT/8YfXq1XLddddJlSpVpFixYlKvXj0ZOXKkHDx4sND9srKy5IUXXpDmzZtLiRIlpGzZstKtWzeZOXOmT/mhxhgAAAAAfBCJTakXLlxoAtdA0CB2wIABJsitVKmSNG3aVDZv3iyvv/66TJ8+XZYsWWIC5TNlZmbKJZdcYtbHxcWZ/dLS0kxedXnooYfkueee8ypP1BgDAAAAAPJJTk6Wiy66SO677z6ZOnWq/Pvf/xZ/2Lt3r9x0000mKB49erT5/8qVK81jr169ZN++fSYgdzUjhQa+GhTXrVtXNmzYIGvXrpVt27bJF198IUlJSfL888/Ll19+6VW+CIwBAAAAwMc+xt4uoapnz57yww8/yIsvvmhqd2vVquWX42oz6PT0dBN0P/XUUxIf/3cj5tKlS8vHH39sHlesWCGzZ8/Ot9+BAwfkrbfeMs/fffddady4sXNdnz595MEHHzTPvZ2qkcAYAAAAAGCLGTNmmMfhw4cXWKf9hfv372+ea//mvGbNmiWnT5+Whg0bmj7FZxoxYoR5XLVqlWzfvt3jfBEYAwAAAICPfYy9XaLJnj17TJNppTXGrnTu3Nk8Llu2LN/r1v+t9WeqXr26aWLtat+iIDAGAAAAAB9EYlPqQNiyZYt5TExMlBo1arjcpn79+ubx999/N/2Qz9zXWl/YvjqQl6cYlRoAAAAAgmTixIkyadIkj/bRZshW0+FwcuTIEWeTaZ36yZVy5cqZx9zcXDlx4oSUL18+377W+sL2PXr0qMd5IzAOkIqlinv8B6EjsFWtWjWgP/JQTqd8il2fWTlb0omPoO+mSXKxgKeh9k4dbks6nvI2nWJ2pHNu18CnAY8MbVMrYn7Db1zTzJZ0POV1OjWaBjyNfxY9Nz6lI1LHlnQumLvAlnRO25CGp7xOp81LtqQzyKNUKP8D7S3HTq/31cGitF+sJ/S7DEeZmZnOGmN3dHRpS0ZGhlf75t2vyBwICa1atdKGFOaRdEInDdIJ3TRIJ3TTQHT+tiItnUh6L5GWTiS9l0hMB5576623zPfiyaL7uDNy5EjzXXu6dOnSpdB8fvnll2a72rVre/1ep02bZo5RuXJlt9ts3LjRmadDhw45Xz/33HPNa2+++abbfa+99lqzzZ133ulx3qgxBgAAAIAg0Rp8f9bilyxZ0tn82BOlS5eWQNMm1FZTZ52n2FVzaqvJdGxsrJQqVarAvtb6szXV9hSBMQAAAABEiHHjxpklFDVq1Mg86rRLOkK1q7mRramWdITphISEfPsuXbpUtm3b5vb41r5WOp5gVGoAAAAAQMBpIFytWjXzfPHixS63sV5v3759vtcvvPBC87hkyRKX++k0UDt27Mi3rScIjAEAAAAAtrjmmmvMo6uRuLWJ9fTp083z/v3751t35ZVXmhrkrVu3yoIFC1wOMKdatmwpDRo08DhfBMYAAAAAAL8ZMGCA1KlTR+6///4C6x544AFJTk6WRYsWyRNPPCE5OTnm9ePHj8v1119vHjW4veKKK/LtV7lyZWdf7GHDhuWbq/jLL7+U8ePHm+djxozxKs/0MQYAAAAAFFChQgXn86ysLPOofYPzvj5w4ECZMGFCvv32798vu3btkkOHDhU4Zs2aNeXDDz80+40dO9bU9OprmzZtkrS0NBMAT5s2zeXAXBr8rly5Un766Sdp2rSpnHfeeZKamursW3zfffeZmmVvUGMMAAAAACjg8OHDzuXEiRPmtdzc3Hyvnzx5UjzVr18/+fnnn82j+vXXX6VixYpy5513yrp169w2hdaa5oULF8pzzz0n5557rmzZssUE3126dJEZM2bIiy++KN6ixhgAAAAAUIBOqeQNDV7PplWrVs7+xJ5ITEyUhx56yCz+RI0xAAAAACCqERgDAAAAAKIagTEAAAAAIKrRxzhEDB8+XPbt2ydVq1YlnRBKg3RCNw3SCd00EJ2/rUhLJ5LeS6SlE0nvJRLTAcJVjMPbHtUAAAAAAEQAmlIDAAAAAKIagTEAAAAAIKoRGAMAAAAAohqBcZAtWLBALr/8cqlYsaIkJydLkyZNZPTo0ZKWluaX4+/fv1+mTJkid999t7Rv396kERMTI127dhV/0W7qP/74ozz88MPSqVMnKV++vCQkJJj3dOmll8pHH33k9eTgZ9JJwHXwiDZt2ki1atUkKSlJUlJSzATh+rkdPnxYAuHrr782n5suderU8csxn3zySecx3S1vvfWW+Pt99O3b1/nZValSRTp27CiPP/64ZGdne33cnTt3nvW9WMuQIUN8eg/6HT/66KPSrFkzKVmypJnkvUaNGnLttdfKkiVLxF/0b/C5554zvy39jWlaLVq0kPHjx8vp06dt+RvMysqSF154QZo3by4lSpSQsmXLSrdu3WTmzJl+SeOXX36RV155Ra6//npp2LCh8zuaPHlykd8fQrP8p+wP3bI/GOV/JJT9dpX/4VT2+5IO5T/ggg6+heB4/fXXHTExMXrV4KhRo4ajZcuWjqSkJPP/c845x3H48GGf03jllVfM8c5cunTp4vCXefPm5Tt2vXr1HK1bt3aUK1fO+dpll13myMzM9Dmt5s2bm+Pp51SnTh1HmzZtHLVq1XKmU6lSJceaNWsc/nTy5Ml8adSuXdsvxx0zZowzzx07dnS5/Pe///VLWllZWY4bb7zR+R5q1qzpaNu2rfmuEhMTzWv6Pr21b98+t+9BF/09WGlPmjTJ63S2bNniqFq1qjlObGysyX+LFi0cKSkp5jX9e3r55Zcdvjpw4IDjvPPOc6ajz/W3FxcXZ17T392JEycC+jeYkZHh6NSpk9lW023WrJmjfv36zv0feughn9Ow/p7OXN5///0iflII1fKfsj90y347y/9IKfvtKv/Drez3JR3Kf6AgAuMgWbFihSl0tSCfOHGiIzc317y+d+9e54mkb9++Pqfz7rvvOnr06OF45JFHHDNnznSMHj3a7xdHc+fOddStW9fx2muvmZNKXh9++KHzYu/BBx/0OS09sf7www+O06dP53t93bp1zpPZueee6/Cnu+66yxz3yiuvDEhgPGjQIEeg3XLLLSYtvSBatWpVvnVpaWmOL774osBn6k+TJ0826ScnJzuOHz/u9XG6d+9ujtOwYUPHhg0b8l1I3HfffWZdfHy8uYDyRc+ePc2xmjRp4ti6davz9Z07dzovJm666aaA/g3efffdZjv929q0aZPzdf2urL+pWbNm+ZTG1Vdf7RgwYIDjpZdecixatMhcgHFhFBnlP2V/6Jb9dpb/kVL221X+h1vZ70s6lP9AQQTGQWKdaG+++eYC67RQ14smXb927Vq/pjthwgS/Xxzpya6wE+vTTz9t0tRahJycHEeg/Pzzz847nhs3bvTLMX/66SfzXej3pSeLcAyM58+fb9LRWpai3un2t65du5o83HDDDV4fQ/Nu1bC5qknR4KJBgwZmvf7OvaUX2tbvaOnSpQXWa62UFdT89ttvAfkb3L9/v7M2R7+/M1kXPq1atfI6DVesoIwLo8gr/yn7Q6fst6v8j5Sy367yPxLK/qKm4wrlP+Bw0Mc4CFJTU2XOnDnmufaZOpP29ejevbuzX1WoK1WqlOlX5k7v3r3N45EjR+TgwYMBy8c555zjfJ6enu7z8bSPz6233irFixeXN954Q8LVSy+9ZB7vu+8+01/KbtoH7YcffjDPBw8e7PVxTp065eyvWL9+/QLrtW+U9bp+d96y+qlVr15dOnToUGC99vnSvqCal08//VQCYdasWaYvm5YF2q/sTCNGjDCPq1atku3btwckDwiMSCr/KftDW6SU/XaV/5T9AAiMg2D16tWmkNcBMNq1a+dym86dO5vHZcuWSbjLyMhwPtdBIQLFOqnpQBmNGzf2+XjPPvusrF+/XsaOHWsG9wiUtWvXmsEv9GL4yiuvNAPJbNiwwS/HzszMlO+++84879Gjh2zcuFFGjRplBsa54oor5IknnpBdu3ZJIH3wwQfmQqJWrVrOC35vVKhQwfk96IA/rgZMWbNmjXnu7u+qKPQi3ro4cqewfPiD9XdvlQNn0rzVrVs337YID9FU/lP2B6/8j6Sy367yn7IfAIFxEGzZssU86snC3d12687n5s2bJdxNnTrVebdVaxj8KTc3V/78808ziqJ1R1pHk9QLJF/89ttv8swzz5hRKe+66y4JJD2Z62ekI9Tq3eJx48bJ+eefL/fcc4/k5OT4fNFl3T1fvHixtGzZUl577TWZO3euzJ4921z46YWk9R35m14U6cWRuvnmmyU21rciR79brRl44IEH5J133jGjcWoN0fLly6VPnz5y4MABufHGG81oq94qU6aMedy7d6/bbf744w/zuGnTJglkGeGqZiQSy4hoEk3lP2V/8Mr/SCv77Sj/KfsBEBgHgXVXsly5cm63sdYdPXpUwtnKlSudU07olB7+8uqrr5oTZFxcnLmDqtNA6FQa33zzjdxxxx0+n9C1GZ1eVEycONGkEQg6bcZTTz0lP//8s2lmqHf4161bJ7fddpvJg77HRx55xKc09u3b53yun4teHOlFhNZYbd261Uxxoc8HDRpkarL8TZvR7dixwy9N6dQNN9xgLh616aR+R1WrVjVTWVxwwQXmgvbNN9+UDz/80Kc02rZt67w4cnVH/tdff3VekATq7zOayohoEy3fLWV/cMv/SCv77Sj/KfsBEBgHgZ4Alc6/5442szuzKVq40bu3Om+izpF49dVXy4ABA/x2bL0g0rvCekLUk6NeKOmddz0pHjt2zKdj68l16dKlcuedd5o5MwNF+xdqszlt9qXNxPQ715oCTf/555832+gcg9pPy5f+jBbtL6cXj3ry199egwYNTG2Bzs+oF4JPP/20+Js1H6I2CyvsDrgntm3bJn/99ZepgdALYp3PUt+bXghqer42Q9Tvw7pA0gs6bVJp0QtKvTizanL80Z8xmsuIaBQN3y1lf/DL/0gs+wNd/lP2AyAwDoJixYqZx8Imitc7uYHulxVIx48fNwOv7N69W1q3bu33CeP79+9v+pXpXV1tTqcXRnqhpCd7HbDC2yZoeqdY79LrxZc2aQsWHSxFaxT0wlLvkPv6W7NO9GXLls23Xi8utMme0v5o2jzRX/TCbMaMGc60/UFrPjS/eiGpNQRaI6FNBg8dOmSa12nti140+9p37qOPPjKfv9YO6IWXXkhqs0MdeEUvkLS5ngrUgDbRUEZEq0j/bin7Q6P8j7Sy367yn7IfiG4ExkFgnaCsJjOuWOvOPJmFAz0p9urVyzTPatq0qXz77bd+7192Jj2BffXVV+aEqRdKn3zyiVfH0T5lJ06ckNdffz0oo3hatAmfXuwpPRl7K+/vJ+/IrXlZr588eVIOHz4s/qIXRjogit7N14tZX2kzQ61N0X6ZOlpvo0aNnOv0AmH8+PFy8cUXm+9PB8/xhY4Iqr/fe++919R2aL8yvfjS2q8VK1bIeeedZ7arUqWKBEKklxHRLJK/W8r+0Cn/I6nst7P8p+wHohuBcRBYBbreUXc3rYA1DH/ewj8caPOiyy67zNzN1xPMvHnzpHz58rakrRczXbp0cfZv84ZOgaBuv/12c+LLu4wcOdKs27Nnj/O1QI1MmbcpldYaeEvvcp95vMJqFvxZa2DVFPXr188vF5paS6R97/R3Vbt2bZfb6IirSi9gfFWpUiUz3YlemGrzNr1w1As+veC3mutZze78zfq712aD7oRrGRHtIrX8p+wPrfI/ksp+u8t/yn4gehEYB4EOgqEnKm0Oo4NhuKKjSKr27dtLuNATiI4MuWjRInPi+v777wN2V9Ud6yLCl2DS6iN35qJ3oq0LCOu1wpo7+crq3+TLdCHaLNC6iPj9998LPcnqRZK/LmS1iZv+DvzZlE5rNTztpxUIGsxY89Dq9CqBcOGFF+abhsZVs09rYBtrW4SHSCz/KftDr/yPpLI/VMp/yn4g8hEYB4HeQe3Zs6d5PmnSpALr9S7l/PnznXdcw4GeMK655hpzQaQnZM1/zZo1bc2DNi9auHCh8+LTGzrQid6VdrW8//77Zhu92LBe69q1qwSCNg207kxbd8G9dd111zn7Trm6aHzvvffMo9a4xMfHiz/nr9TBUfz1GVl3x/Xvw10fMmveTn/MZeqO1iToKLL16tUzwUAg6EWXNhnU96rTuJxJR8y1fufaBw7hI9LKf8r+0C3/I6XsD5Xyn7IfiAIOBMXy5csdMTExZpk4caIjNzfXvP7nn386Wrdu7dCv5qqrrvJ7uhMmTDDH7tKli9+OmZ2d7ejXr585bpUqVRybN292BMLChQsdY8eOdezYsaPAupUrVzratGlj8lC9enXHyZMn/Z7++++/b45fu3Ztn4+1fv16x/Dhwx1r1qzJ93pOTo7j448/dpQqVcqkdfnll/uc1l9//eUoXbq0Od6IESMcGRkZ5nX9zb322mvmdf0dLliwwOe0rOPWrVvXHHfMmDEOf0lNTXVUqlTJHLdt27b5fmfp6emOBx54wKzT5b///a9PaS1evNgxZ84c89vOm8YzzzzjiI2NdcTFxTnmz58f0L/BO++802ynn+WmTZucr8+aNcuRlJRU6Pv09u/cKnv0t47IKv8p+0Oj7Lez/I+Ust/O8j/cy35P0jkT5T/w951PBMkrr7xiTkpaENWsWdPRsmVLZ6HXuHFjx8GDB31OY/fu3Y7y5cs7lxIlSpjjx8fH53v9+eef9zoNPZFbJ6Q6deo4Onbs6HZZtWqV1+l8/vnnznT0IkwL8Xbt2jmqVq3qfF0vjFavXu0IBH9eHGkerTyXK1fOfPd6si9btqzz9c6dOzuOHj3ql7zPnTvXkZycbI6rF0qalvW56W/whRdecPiLXmRZx/39998d/qTvw/oN60WKXjg0a9bMUbx4cefndscdd/jlb1OPpcdt2rSp+X6sNPRx2rRpAf8b1Iux9u3bm231Yqx58+aO+vXrO9/nfffd53Ma+v+863V73a9kyZL5XtfjI7zKf8r+0A2M7Sz/I6Xst6v8D7ey35d0KP+BggiMg2zevHmO3r17m5OjXhQ1atTI8eijj/rtrrfeYbcK08IWX+7uWhcNRVl8uTN94MABx8svv+zo06ePOUmkpKQ4EhISzF3kbt26mXUnTpxwBIo/L470gmfcuHGOyy67zFGvXj3ne6lcubL5PUyZMiXfHWt/2LJli2Pw4MGOGjVqmLQqVKhgPkutjfGnQYMG+b1mKq/t27ebO+pNmjQxF3z6XvRC78orr3TMnj3bbxeuQ4YMMQGKfjeajv5tjhw50rFz507b/gZPnTrleO655xznn3++yYNe2OrnOmPGDL+kof8vyn6uauoQ2uU/ZX/oBsZ2l/+RUvbbUf6HW9nvSzqU/0BBMfpPsJtzAwAAAAAQLAy+BQAAAACIagTGAAAAAICoRmAMAAAAAIhqBMYAAAAAgKhGYAwAAAAAiGoExgAAAACAqEZgDAAAAACIagTGAAAAAICoRmAMAAAAAIhqBMYAAAAAgKhGYAygyHbu3CkxMTFm0ecAgMhH2Q8gGhAYA37y5JNPOi8cPLnImDx5si35AwD4H2U/AEQGAmMAAAAAQFQjMAYAAAAARDUCYwAAAABAVCMwBkLU+vXrZfjw4dKwYUMpXry4lCxZUpo1ayaPPfaYHDp0yOU+WVlZMmvWLLNfmzZtpGrVqpKYmCiVKlWSnj17ytSpU8XhcBSa7t69e2XEiBFSs2ZNSUpKkho1asiQIUNk27ZtAXqnAAALZT8ABIkDgF+MGTNGrzrMcjY7duxwbvv+++8XWP/88887YmNjndsUL17ckZiY6Px/1apVHatWrSqw34IFC5zb6FKqVClHSkpKvtf69+/vyMnJcZmvlStXOsqWLevcNjk52VGyZEnnsT799FPnOn0PABDtKPsBIDJQYwyEmHfffVceeughU1Pw9NNPy759+yQtLU3S09NlxYoV0r17d/Nanz59JDU1Nd++uo/e8Z87d64cP37cLCdOnJDDhw/La6+9JqVKlZLp06fLG2+8USDdkydPytVXXy1Hjx6VWrVqyXfffWfS1dd//PFHU4ugxwYA+B9lPwAEWbAjcyASaw0qV65c6FKhQgWXtQYnTpxwlClTxrw+Z84cl+lkZWU5WrdubbZ55ZVXPMrj9OnTzX7169d3WVOh67R2YuPGjQXW79u3L1+NArUGAEDZDwCRghpjIAAOHDhQ6OKun9hnn30mx44dk5YtW5p+Ya7Ex8fLwIEDzfNvv/3Wo3xddtll5nH79u2yf//+fOs++eQT89i/f38555xzCuxbpUoVue222zxKDwCiCWU/AISv+GBnAIhEZxvkZOfOnVK3bt0Cry9dutQ8/vbbb+ZixJ2MjAzzuGvXrgLrtPnbW2+9JbNnzzbH0YstHZjlTH/88YczjdOnT8uvv/5qnmtzPXd03bPPPlvoewOAaEXZDwDhi8AYCCF//vmneczMzDTL2Wjfs7y2bNkiF198sbnwydv3rEyZMhIb+3cDEa21UNqHzHLkyBHJzs42z6tXr+42PR2lFADgX5T9ABB8NKUGQkhOTo55vO6660zNw9kWrX3IS6fW0AujOnXqmIFWdOAVvQj666+/TPM5nY6jqDUbAAB7UPYDQPBRYwyEEKt5m6tmcmezZ88eM4Ko0jkrL7zwwgLbnNm3zFKuXDmJi4szF2d5L6DOVNg6AIB3KPsBIPioMQZCSMeOHc3jypUrzbQcnl4cWXQAF1fmzZvn8vXExERp1qyZeb5gwQK3acyfP9+jPAEAzo6yHwCCj8AYCCE6Kqj2CdMBU+69995Cm7zl5uaawVUspUuXdj5fu3aty4FZxo0b5/Z42oRPaTO8zZs3F1ivTfJ0YBcAgH9R9gNA8BEYAyFEL4xeffVV5xQaOsXGzz//bC6ElD7qaKMvvfSSNG3a1Iw+atFpNmrVqmWeDx061NQ8WH766Sfp2rWrHD161G3a//znP80AK6dOnZJevXrJ999/77w40zz06NHDmQ8AgP9Q9gNA8NHHGAgxgwYNMlNyjBw5Ur755huzJCUlScmSJeXEiRP5pt+IiYlxPteRR//973/L1VdfLRs2bJA2bdqYUUmtEUxLlCghX3zxhbnIcaVUqVLy+eefyyWXXGIGdtHtdH89bmpqqqSkpMg777zjrF0AAPgPZT8ABBc1xkAIuu2220yTtvvvv1+aN29uLo606ZxeIOlFz1133SVz586VgQMH5tvv8ssvl0WLFpnaBq2B0Gk4KlSoYEYs1VoEnc6jMHrsdevWyS233GKm7tD9tZmeXrCtWrVK2rVrF+B3DgDRi7IfAIInxsG4/QAAAACAKEaNMQAAAAAgqhEYAwAAAACiGoExAAAAACCqERgDAAAAAKIagTEAAAAAIKoRGAMAAAAAohqBMQAAAAAgqhEYAwAAAACiGoExAAAAACCqERgDAAAAAKIagTEAAAAAIKoRGAMAAAAAohqBMQAAAAAgqhEYAwAAAAAkmv1/tiyGWAYuweQAAAAASUVORK5CYII=", "text/plain": [ - "
" + "
" ] }, - "execution_count": 25, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } @@ -372,11 +398,18 @@ "source": [ "plot_multiple_components(components[::-1], names[::-1], save_path='plot/patch.pdf')" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "base", + "display_name": "mechir", "language": "python", "name": "python3" }, @@ -390,7 +423,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.11.8" } }, "nbformat": 4, diff --git a/notebooks/plot/patch.cross.svg b/notebooks/plot/patch.cross.svg new file mode 100644 index 0000000..6eed501 --- /dev/null +++ b/notebooks/plot/patch.cross.svg @@ -0,0 +1,3293 @@ + + + + + + + + 2025-04-02T10:48:51.174872 + image/svg+xml + + + Matplotlib v3.10.0, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/notebooks/plot/patch.pdf b/notebooks/plot/patch.pdf index e641065acce3c8bdeda09f77fae86a5449ddfec7..da20b2c9c3fcd119d13c2d2847d3ae23a12e48bd 100644 GIT binary patch delta 13310 zcmZvC2RxQv_;>bB_A@A=kiGXVdu8vD5X!#IxQ!69S4Fn$O^QSmMMm~|%use@ulIf) zeg5zJ|MmLZp9kkY=Umsh&h@>n>wM3X+kv&wfmM}=1@_c%HUI_7K2ZY6_?+`1!SpZl z3!EcZYv8ER*sE-8FJHHr5+_=7oa{}1xn^Z}Pak`}nav`hk-*+?xIz2Up1d9Q!FKxQ z`D*9phx4=+G8WPiEp;9zQ-L_ge5RlYa5@ zp4qGUYw{-Ms@;ek7uu}0#x3OIlM|fL)~tq?@nGeF&mr;d4mojEV$?>$L$kNNw_4b0 zNzQk3sZvr#Yqn3`m1?rXoA@}T{WjMcamt;ENX}a+E$~Xzvayr%M;3El?R<>VqO%R7 zES4+r-mZH6`N=T-@Ul(SME-h2QOx4aALO3&#oP(RuI}lJukWKAn-3n}H4sg>n(uFa z?EwKuAQ7vVri>@*0S=OHhAveqwnn$fM^Q5^N;IoDZH=(DVBt64>~ZtgIEKaS?qzo- ztWXwTVPD|ctH$l2Vf=l^`cBx(_@lh<5k;Yv3)B61yOKKeGAyqQ+Oh0QBQTvgNR-bK z+S(MJQZo{BaOj2o*~HEr>A-E!=kQ*;0g5e+e+Ytw#5ovW4ulnP2NNeG)<{yXhNTBR z#Hy)El-E!`EjuD8<7}-U-$32`dYO%cghLN+x33tK?K+TjJpO5LxEo#5MMT52P9PaK zoiRKxV)JU)i*NeTAJbwI+iw_!FR>5qeCe^&C#^aD+7=Zhh_!A%T-iHW^}HEuqyptK z3L35nlBY~Ouw1!%r$jAZKL1%uxRUCjMB+hiRW4eEjAtauL=ud~Mk+Zs^UsuWxXr%4P23Byyj-UUV~eypyNa zwea_ws&-`{QZFc}5#E$|{f_cs-M8sP9T7_sy&|GwnQ)Pot+D0PkyR-}$N94A7%Q`3 z>8)1Na1j`K+pBd%Y)A}>h=0xCv2@ppq`$eDRA=!#ywL9iN5tO740Hw}E@AxRe>!Ff zN;l*W^^B)bJBF;-YeZk-021FYqIX&HcEhGB>g3imZGiM_hMl~r&dQQuFtHHRZY@8b z)r~d8?HsGHlK8U|^M>MdafgwYyVl&J0#?Sp>+}uNIy$Uh4OH{-i!;HPyqcN9G0bv7 zrRz_F2|iiXifBo0+S}N8_TG08nc;pRK+*bl$%pk9OXd*dVh3tVWzmYLq!F4xvJK?W zvIn1;b*rM*ZW-m=$_FQYPSxm$v=~4uUXek>M^E@UiR}E6I*)7mY3KEDK+|PwJmyG#)dCU zy5$)JtWvkC(l`)BWgomoz6`qSbdu_6<`evqUO{cSHg?pFz9zAD8*%ZO?ie;&)*1}@ zyTL}mm=)KqG%w~?kQe1EQ|O$9gB+~bEeklfw}ze!^)>#)H!5Dm!9{~a%Sncd0|*Gh z7_?o$)PDS1Q?uGtv&rmtZ=Yuv|2ZU^MH}d8N~30Wjl910Ik&7*6K9~37s04>+0NL# zKkl(oY?|it)bshV6jze9$#tZAoQ3m;cCoN&9Iz+s%{}q+hX5=Po5oRorBLWJHKnS7 zA}oZaME)qw|EPEC_T(o1`3pYZ9u4t~m}^Q9DzIONiQBcpoUf%hiB*-o_cB;<6f3sD zzr{Kb>e*MapPD3YknMh5N_oE{Gf|Dhp5H^ASnB-Kw3AlJ!}(o4na31`ne)_E`4t33 z$;xCp&7+OGZW=eigYpPx56RMYFRX|i6W3^uWhzclG?1A;cO3{{kS!ZlTpF_q6VRzu5eb7E=O;y6U&uG!!S zGXHw>WQY$7kAhIn&;D>BHBtO#wiPy?@=o3DVQn3eb2@ECg5tn-4Nk0?ucs;0MDksQ z=2sk4iqj7{ZY4gc(-L?AdGAi(ojZx~dm?5%6bG-~TWTk;6oCN~H^Ner#T{BguFEMO zBEM2l}RKO5@VO`(jGD=F0XS5qBp$p+N5DF=EFz=MNgPK_f`S_Y|_ z-~&?&@sw_Aa{Yvem$v1lg^meYodiXd$%hB&N0H~HG9UU+H0jrEF7ux(w~rG{<~&~i zDWPK-Q2OEIl{M(GOL%R@onc$;!)ekHc&+7o%jv=SwVAV|qfVFMYeQJMK2>wmv_7Of zm+F7wGgidiaMS(<;$x9w^GaI4u4H8Ky|euwvq6%&rP|8lvT1@BOe z1pex~$R9-~_VFL(@Hd*LLJU@M6}Xygwz}IZ=xv$L;rF+|_~2vfZ*fPkkJ{ZEFxsexnPuyS`tz-RN^BFu`K@ zr8V!ax))I{X!^89BK+qyujqj_m|_H*sB#U1ZyCEU&ruj#Q|PSmvzgrYZh1KMz6iD~ zzZY|kJyc!|FLpA1{*WkG-aoXQIsdJt9BULq;l6AqG;I-;{ai2Pc2(m1r1SjjY}pXi&$~ex+SfO*IDEhR4gKnFdcf`i-T6V<=GjrBf5rYTS2>vF?P#ZyS&D znP}Q+Nm7uG_`kEzr=`_lB>b6a@SBU3z4xv!rSq#bpPH(8G-NVTtb&r}xU^)_9_wni zMYmB=6NPlyqq4E`pY<($<8re2%(`{QS`H076=24lUovhiBbt`0=@lnuw(=EFM|%C& zTx#-f-%m@GJ>`n?(QP47EKKW2={VYs&d-n71uMihFa@Txos;xrOQ6+}4-b%!+?`tF+e@U|<9@IFX^&Uo>%087mS;EB% z41%AZgamCHE7++)iPTay2V}_)7|9+?)L}h0RA(b(%UX;>YOdIwcSYjXH!ic3q$V~N zx9G@=$?lisyuMN3yj0ng%!C$)=LiK9T@WuUF=$VvFH10EsOOl5iWs}go@>()iaUE8 zY{|HFF5SK9-UZI#6)wawS6zEm*|^xP`RgT`Pa#)Z?*=^2a zu@$H1kg#fp>h-OdL*)kh5euGQ_J+5aQ}I>XN-yJ%Ilxl6D3$LXWU*D zWbbc#VBtB&uQ+_oU(9#*lU517TI4@q8wKe;{th#yj^sA9_#_V|Jiqu2Z_#+-nXmg{ z)Ia4K3&G*X-6|+gOXXmwAT{X0(e$&na-35k{v(U3#qa7!(Po{Djv@Y}WJ!+qf>Ev z-tVsb;v+<}vb*#}2*s8AzQIrH1fvSg+Z&;h1Z*c!E_$sm* zz7$27IQnWTsBf0+2^j{vWg7QQG*@1qb%8{9Sc?RwXRYz5rNYB|jiYW7r6DmDeC!SZ z6ZbI5J6=a*XOv+LqqXFFrUK{r%4}`Q{tu>VS7rCzY>*mHq;{rPt15o~N#)fx$EKOh zcOI#Hu6;#Y`cdus@Z?67mJoRr+2guWJ2G~_BM<`^5lLO$UkT9^ws;`5s{F*vmU_$> zYfaLhzF~0wb9T{q#$m0DD}iWn`aTJJKUCeU)Iwm0JjZ|`Z%O93SUV_5ytw58541Bn zIwHU6`VGkXB_9o&LW?Z3lnqAYACYMd6CP*|$YAhrq$`BooV4>7T*YW`wZvmffy(>& zelG0A>6@50EelZ^!J7rg^2E?lz{RMVWi}_cIEtCywXi3=b#J03s&2JBJ`g}I1q-E=u@>Ts2Q8G?=`AePRGPf!#Z1sZJXAs8h&M=eLahuREkAdHoO>4nEr^@k z*LXCPNV&(CTNWe_WtRMM3lnd7R6h9d7MI0js#zRTC6$$`bl6mPHi4yl)oa(GVUuM4 z(+=fjV!brkFc{>zTHg$ab>7D%JiH%DVz=`)(%gDhrNdxh-1=R9THd=^S%66IX+q=I zv(|?NvY(f)wAX$Iv_#z^r*QuSf=PD9Q#H3MmfgpR6wycT}SSvM`%;ZeQY_lREoB zt<8t0XfJ?No6+ipRFe$gYq+$TzlT$sf=$y`;voygp}>$g_u`ivz>ZQVmYK-NnWAg? z2rVeJ+3ix*(S!_Pa@J)yDYt+4K4Ymr^C*b(TUp_?if5i#-#D0b27aQ7cG`M>paP5C=6rFq z;!i(-pW0bAB063&FSMX_9_AegxXJTGS*3K}HGGnbM_9`=Px&zSUd?0xwzo9BkBI{M zP+wx5-=uDF7$2+n$X}^us`*Q~`0-%ClS|$#PWg?uOE{QN%Ui^Ro@VvVUzqOnSaRFP zkk)WzMXd@u7Y=SQgw8~xPmW|vsKQNR@TsSu<9?I{FqH)xAIMsgQwRaSagf|HqkrBs*y;a&%WV{N$O0vZ*d#iVMo|6X-h--{QU0{93Ra<_%CUF%zyr7 zVO6PQpZ6{q#(M~CQi1DJczwTd@FAYdk?@zanxS-l-rc6rIv)t z+hb&S-Zo#Dj?x`@nf@DYa-H9Bzg$kNQs(`45?`2qI*#{HcycfK#CF+z59l(^v51IR zqR;p={w#Mb)*3Y5i434$R7Xfhlk;yCwc^0zQbKZ4QM^7*?md-!ic)?oOp@&}$BYe5 zbaZZld2CGzTbW3e*!keVKTlPClGB$jH~IAT%zx6k>(yDsNq#0S8WoZHo%zwl&B_8gaT!Uahp2ztc=(6S`mw>!w$KEN`q*5< zr!`3M=?(A`?dRV6p4oxE21Q!F;oX`nj@PCtr8)FAmOpIq`g7W$E*A%Khb>5V(=3xN zr7fBox2%k5tIUu4zV^8xrlzE9Hrx2dobq`nOQhSw{g>kCCc(HcwH8gQTZtT@gWF@h zb&s#J7vQj@nv$Lus9{qjvJi2A-J{~{1? zSfyd|(o08GN%_y-mQEV(Yc}6bj_pa|_59?^@y~x=^UHqKYIZ{fn-1L)8}E~O{c;`E z^JpH?28H?i=#5G4vNrPI>I*IseIUwBt+EO0znaeMyxYS%{)9MoVI zBOws2`AYxbdSz|e*Sy?0@|haL^QWjv@NAs`c~V5y^z`xY`ho~}x|n9^vBr(8If0S3 zXlcwkSplWwrO&@c^1F?h1@MV_HJqi|cP^H9#G<&!`8Kp-KKM=DrEikh3i{!-#%*XcrMWWP!Up8j90H-;vkuIb6b_!mNYgtg6Nwf8FHH003v%2W1|^5O2!f6 zh8<9@qrlmYdNOzCrm1-)Xumc{-DKp6h=-yOSM9oK9YA!`U=Hdp}2VK zwkql&2ZrqFmBmsnPItm%9|E5YY1IHw()1|DP@F3DGZ3>nNQ`ESz=+}^Z1M3X481n2 zH$)zKmO~r0ABOuB&@{!%3~GE~ux50_;xb94=`V1vpfD(<@o(p{9!iKc-D$v(cRP_c z?4OA^SWaDOCW)T*(=!VtVlw@{{fROP3ix1{QE~c5JT&9ihv0z*>Ac10v0O?spCms1 z<^$K!<4g`XrJ(t})aP_=`Lp%U#MQQ<%&Ch-M{S(^p1 zb>_hsI~WsPg4jAbLNpnjRd3BQ;2{Zzh zDvFdT2c#D=E<@EyhBYD%r8S# zVBDkkSA6Tb`d*4wXBsA>%9qV;EY8YanU7rtyE;e`I_Vd-9iCv4-0fP>*}lMNBhAmV zUdHSX!gtoyxkti7KitHtbC%06Fm~H_p-F4ZmuZ{u%4643qn9_jAd;?+EG+L{jRShf zPsBUsPAhdv64ae>28DU8ezC5XcX8jg3Ax#u?I_&2_o|7lNlsZ@$|u{1+55#D8Qbi{ z$S0|;t)kke&P>o;X5fc!t#^;@2C}Mt%g;}5Zr|4RB!M#Nm8sAc!r1w{_MXi$W_>Tm zA{S5NKS|Tq|BPh}kLYd?J!pEG7kxS>K9WMfeAV_@$|{Z1I0Fv3_F)}Mx=0fo6^vbB zwU{Hd>7VKnpa-p{y=wc(4OXE{j+b)`Y+KvILY4SSaf7+0Pr(`qLg%+?vD*#|(;@SV z6Z#eVN>vVg?5|2uY3YzOntk*`N2pIVNp zx$)0YqjsJ`Z|9!$Sj3h4Gw6x>x=Gb8d$rx`YHP$*e@|aVP}yQ@V=nRxZhFOnyS8MK zq~@O7LFAC!;1iE3 z2M0A2iW-+f9@OGH+kEt2XiLJsYvPu}t4Gk;ur;owqEuJ*@xjKsK=X1lqa(eGH?__+ zj1y%aCm!EY65LY>W#f9Q4J!*(Q1C4En3{M~68GKF;JuW8pPlOJm9!c`Y;>U?QfC~x zO!Nc1=cBltFPCg!#zs2)u{!Zl*0@Wl**gPOb237tqjl$qQAv z-0cosY^-URm{6MCH)&Y6-6q{ON^}47lMX~*Q8GRnX$^nSL$>9hmso&DM;r=Nxi~zm zQTQ4lWYcJ)Hu~t!dG+z8@6PF+zp8Bg>~(v0LzOJ|La3b?QZfHem)zHZDB?RzrygEr z54bLqjfK}d)%rkBoB9=%@4p$8f-f0r^|?lt?AtqeawCUUUP3)ik|D6QEPXU675^k8 z=yRJa!?%e@?EZlu^P(yvXoU)G7NFO@WG}LEA<(Neuqu|jX=?#<>ywHqC(B_6_fcHtvB0m^-=q1z4q+iO;#-6y5qM9;O;NocZ zl&!DBDE0nWjLuNOJWqT7Zaj2SUp7BD)p<3p<*3!96@APA#+;fwu^p^RRY=Fw2(qf? zNsMyUuMJKmNbfGTG!Q0ImpbnDO-z>97F5W^=Nsvlp1;+5NktX9)^Pu=f_wDzMpD6P zSDe~nzbxjy6-see33bYdf68dqAAe)Dh8To;{6s1z$KgH!3PXbx1dd9Se*KX>%DoX{ z)Tlva&9Y+smMpHHu=X+skKWEVFAueQ9#j9K#~-R@+H@=A+Q(Hr+z{i6+$oQ{aX625viKB5b-9AD z8f`LlHLRdi#b(uOykvr?CZeGuRsgo}6+jkh&lvfz3n|2qlyCAE zQq;b2amB#GLrB6tii>tIm+sb>G+TQVsykUMy`oAL3-9ufln-8^EMnw{F$yOpvCTn< z3N!TDI~Up(N}E2JS-z*GFn?;1&=c9A|`C! z%>X;b`-4nH)9yv3h30l&eU%wuXsY+9+EF;MRSXD z04kZA4oD!Rv+F@f|CO~T%ZPy|CLrj>OKRzlxj7SAq2x}av1Tp$-h6yw!{SGQ{xU+% zK#(q5oibGMxuA5rRdEz^zzdA(2SHkjUZCBd%lCaOH57&X^RBQhJ*-0s7tYr$OGHi1q;gcpOkxW`x>%m zUgDHAs#&${GVZtdeVDQ4F9k)ITpHdQN|B;5tNbghnZh;% z9jrF#hMB!1&3un<2zNRUR5ZCQ9afN#kPCO}0~l8RkQPymv+QiNy<4>Ao9^zcag2BN zR?AOgn`zVZod(6bYCP~wCI%P~Kw%-OXn;O%NY_$RV}6~D$<#|y-NOR|;8VVfK;qy5 zlT~A0%AS?CuDZNr9`nEf3Ltuzs^IqztmTIFds-c&M`M~d$mM@!qXUo9fIe%^!rG$iCmXddJ1Vf#sbcW{R?&VhXO9`~yrl8KWw3n{s;dzKNqp$(F?Kcf;3vizqQGGlIw;x@;JpG9$Il*UMA`G%C5cn z1M=D*z8^8UcYjMqK@G>j0uwBjP&XevaA52vG2$d0`e^;FErGbfES~?t#76i{Z@-Sg z=O6#3aWL_5*k>rJs{1eH6chy0^I%f%b$m#n3~}If^=GZzX#a`X4gN?r>5eo zU%dL(q&ozKEBlLb!Izv?q)yNZj>}1lzcS0<-Shvg+<5T?c|x*otJK`=#P>98JU) z0qdqz&D)`{bdm-R?LzTbH+cZ zyT%;1Wa)Eu&@=9H=4Rf&P9}0WQ*4`hiQd;L+wU@{7oUW$E+>Zv-T8VA1Y((=^QB>Zo!k;J%ii-QIUmvj)W*m$(c@)L0* z?E=5Ey$!Pr?DJIYvN*)OgrZGT2M#XkM^&Cv4KA)KS?El)>HewKAt#fjXgN+0BuHT- zsWVsk@PT=EyWU6P;49f&U38_P=E@BxIig*`#$|~_suO7~^i9fLDHquft~xw-4AB>m zQcgGe!xH`88S}~u#jH5&9e1fTf8vC0OBDBizi%{u_xsvlK(mZQ3;A8?aqZN&kD@@N zE*B)E8oEP4^dR3PXuanfsRp0LgM_OHGn1)=HSqBksq@E(x;qKe%d9*49dT!)# zUTehT?2W<AoO=G#@% zjm7rD-L_C{jW;Ecr9)43!k=JpKD_n#8 zjUjz1!$cuSZ_v&BYr87i+bJGazeUUZoS={*)Be-ro?WWmQlW4HTxZ;yUtcp3yYS`u zkFC-A(9$WAtA;6!AW9qB-L`G!=GEIFy{p3|@#cB6M!x6i%)P7k9Y17tV@}e@OGU{U zU42Ju|6KU<+=Bny$NH(cc7el>H) zvBVMxFK79EWUc!?F#{qoxvPBZ%oFL@OA=w7BX2Nt5HsYr3{kvWRar zObvZC7MIU$IJ8Z}UFLM_dq$=Ia6*NhfsrqP>ReXhqk~$!tE%P&nR;lp73>uvkj7X& zHGM&*a%$-kOb}^WrreM54FLfxF%l-FJ%$t;TYtWOWsCgEE{bt3p_m_>^s`2^nMLHJ zl{|xKy$YB5eu#!G{u!2mAFXDA#~<0_xSp#WX6^pvMZHbx7AQhq326DpfNqJnY#&-(B#YIq%+VIXc*&+XV*+mSZ3D1)u+5&U++H zKJ|z`TB>*a>|h&Ak8?RY`b}_yHPu$<5a(VG_FL6T>>zTtlWxx^@C^$Kp8hAi_R@rp zQ5vd;iq}l)%CmfkD>Ob#XPpnV*pameSU%o4%{o5-&vq=KFTGfP$i8;Ioz=3Idpnd` z{C3|J`HjYKy_%PxQop4Bfkg5T{U+Vyp+eCo@#zXP1(z2N7zU5L18%%5=%cu<6PfIu zrk45X_HmWlp+pOf#Ih*`$Ja30YT6S6VhO9>AC-+!u5YwZ3wV)RT9k{uvQpK!`(MZ> zc8xjTa3wBIm6?QOq-)IRy55?&YO$&F#5qQ}g-6zztmXck%1xSv0z)#5mY>@^Mk^`} zlm(f3{#tF*Uq8_6>_%G)HjJNR1$KB4B=K-x#TMbO)#5JW1QoC^;bXV94!wCaDck^k zyg`*iAJB6c^rK(w@;M9ukpKt>Km-T_j$wd567YxnBLIIG(1Zk<;7tgi1rD@8pD-CD zAOi9x%Wg?u`Iy5kLbR5I_pR0TJjo3>Za@!hk_IV2A()VY&z)4F)7306I7w0-%Bc zbTD8Uz5oZ7VZZ_muz}md0UH>QhXe}Yd2k>P2C%{bW;iPXV1)tgNT3tmi2xMffD#f= zfQ2G~2*|Rba3BI6iU1-IKqw4wfoDN~!T=m3uz(am0BUf6014p0n_z%F5+HyBI0$kW z&vC77!35zH#G@mLl>00oyBAxBvhE delta 13235 zcmZu%bwE?!+ozFMQbbBxx=XqSNKK?uL_|_48F5J^ln#}aAvHokkQgY63W&fcX^;*X zJ?Zyu{Jw8|efe9={AP_@D=Kwl^0r zeb$X`JN$EKKa{y=G=1{=``5gKHkIJzM3o)jY~N8iwD;^pEu`Qe5&lee_7dya5s(I8 zU_c-d`vC3xq`BPFW3=8$*5Nk$AFj~Z_|Pg-vBdiZr`;bMkIx|TscbOmA04&pDtBM% zA6{x`hj|E(x6TwNE;@NUqhU8^_Z#F4ky~S1oLfSxoT9(lE4Ln#HgDY+zRbva+=RG( z=J+B|7%dz7gLC`HJU2 z124fe6B2DVLvn=E)lZVA7w2V!=OdqZef7bRy z=T}~j$`JBgl{`iA&BiF7dLcv}1=++y`$8X1BK@*m=5x{*MH{FRKV0fUunz-ni>Ffq$?yky+gpQ5n z&lv)Hj`D8oI%WC*TpcmkQxU;Xrhb?xRNa(zZDe_P%i1mYXBG{{inlUcyjKd3YlsKR zX|}p(_L2NH=ZTNP1y#Mt2`k+K-jdKClN6BBR2yPG)2KS5vZE)dO)qbUUDHxMP!!Va zK+iu2Czs~jGp8_VrEDTL;a6s& zn8Htj0VhJ%aAK`dGl@Ug55{vi>Zavl5BT0>qUIdo4rW@Fvacpn2A>8z=MThd{esVg?XelxCd~3z|mn8o4QfVzY+f#MTH~ z@K0iFNTg4Qo8*lwv`&#U$!om|esSyv_rp(TD2q5OR+0rCSe7~w5(x}O4H{(y+=rB| z5fWWT5!Io0H{6?#VU%;9yZ&AkDtbIzGp4_a2 zd?jy>qOLw#!T-Z$Pxj0}CJVF5oFNRT&RfEBb%uhvby+2i<&)XqM-C(X)~tKOKOz%; z0L>lihCelnsYdoYssHn}Vc~jl^qdO=P)~h5TRsls0$zBi*#JH$HUZ901Mqp@vet$I z)g4zx;0KvwK56!hPA3OR7yi-ioNU?w78RDjRDv*O7*tu4=W-}YV7>@`MW~9$s{+oV zN~Vh?5IUgmlw^QwQ_)a0U)=z5BJ$zT*?{*?MTeIUtJ%|v)cGu)_sbK3XTPwn6O&%u zmK+UQPW{kjvaRA_K>{bg=!lGH_Z=3g@>OfS)t})JWS=>Dyu@xqA5W3f-{c^14Gnd? z9jwpilu1Ku|NMf>Nj>O)qoDt(K@OzMf2M%`XKrrFP3D@nSY`!Xy|)?{$IbbB;BY=8 zt?6Y`{xx0Wql79Zmcr>HhXCKYqq^qWspZGYFSH-7(iUIj@k{ZI*1VIns!QX3MQ?R+ zsf%nLZwOt@$>k}&)#)Gg9lnQB4Z_qC$GuN$Y3OWn(FX~G=F?@O zfXh!xrS`84Hcx{)8vy7TdQSGRf&J`kZXA60Q*junph_PgJz+vxKSZ3*IEg2;#>Go* zwXG6~M}mKOLzwbRLD`JJ#6QwO3eX0Am3;X0S?rqwC*ky;AJ%g%4kgY3l*2xBohAo) zY9yr0f2bE3>#X=LKNw|=Or&Xkl(t}!&3bn;DIEPN+6(4Hq#@kw6muM)ptA*#Tq4k% z2@*&=G=HcQQ==GlTZJ*F@OX8ALqn5ty_5Y$#>^#e=Zns=l*2#2K9GAb3;g2v1Q0O) z41egisx`s*dOhJ|^wYQUUD}Fc+;n$mD)~3772^CN!S~^Fti`D1gS>v;Ms<>n=NWgM zE;dB-H#=DzfATbW)w6h?iC9tBza~+d2i+0E*|ov(xMjA1-)rK&jz_LSS8?-q?{|w# zdlUD1d~!4L(sEUnfRMAVDxC6nCIQWFva;n{`%t0|IPLt%Q{~~T(@6oHmWk@h#J!bq zOv;gH@5=D%Y33(iuGZ{kgA>FF|7tLDiQQ3E@w?QWZ*5~wLk{R4wqU{cXAfkqx~e{Y-0HXO5}d5k^8k1wSN8H& zeNSY6bI?-W*`Mj@v*T5nc@|N`xX@l}wxlOTu^sT!TqSfX5jfbW0uE1?61@P--mT^v z8Lr(6(}xzP74NHD1eYW1q7>)zVp;U;j7^S<`RUv~tafBZkB??#v^|fn8enu0`WJnW_7*bwlxaS52eV{G}@-(vfL=`h0Us0AH{Z|M1JLY)c-Nuf}4 zcEs0;NZPin6$?w-@+qG2vzk{2$zNk*!vwQV9fHgC(4@+PnuJ1qC*!*w=pStDV$Srn{vC(Ri}I6dPUJcDjs9 zjS{U3TQ1+}IB4zZb4wt!gUaQjgwQW80gJ!7uJE+-3xV%Y$}1J48L6|4Ul*%O7M=s( zq5R;XUYpsAyG@TWCa!2YMEP|GQ#vOUaeuWvx(2nq#&FF!c%wG+v)wc#*u=s(nHwJR zi6)yX^LNJgC{IhDSN`K;hu+4lI}W1Pu!S00&-CQ!n0p#yAiA?X0-H}Hc|-Ae9GSQ& zjBaw1uIARxskz-M$vRo9x$zJX7ZNIL&^79_hOY9Sk(2Y_|MuaXv^HE_JdGeD=3J=F zRRSws=LrKHIYF1p@8dpisO*lpA$Q48NIsh3Uckt&mMxh05tZvR-Pq^dvHzs-Hz!?W z#P|!T!`(93gFOd+JA&L71oHOk&Gq#r;r40;-1#BHuSUTn2gtWYsOzd8)ingMKv&-! z<0Povz%E&yuVe?+FL1Bl(X{*+1-e1uB8{vk=FM#OLO#{h=E9V>nX=n??{ym0b1pThvuxP$26;x@>jyQYJ;E+$CkI;`@y0?^9_+xWO&4>H@f*-wh^frE;%pn4C zpZpoFI58-*;21XCarGGq+p8=!!|-}JW_eet=z zMCnv7l=s+_>HQ_d5!Fi*-y0IwsZBFgha{kUUnw+8iiU3VP}sBCB;KZf@iYXM7DAK*VBw?`a+~$EH_Hr8e6hN)c1QY zwJjXktg62>Y)@XLEloPh551>iAC+oeSkv9A7uzh~8!&Tfb34dz@8M310Pl~SP=Q!_ zoUP%5y&J>~y6R&tdmCYChw`VIS}FPamd)D$0d$q;kRXnTbAj~uAmvNOO5wh~O?{aP z+n3t(8$V6*n~1Kf>Ymux%Y|RfhkdE__y9S%Y0sa084erMza%WMw{sF3$F25zN7x`5 z?R<Z1b$<7I_lw zeES2ObW&Kr?-oEDof5WhIsU5-KG<_YOs}UtHoZAtikiN67Ke>UmS!iSdckakm;8qx z-Z*c;fS~Uc6?WGeo^V+sW5A?2^av_@hKq0Hc+|Gx?66u>%YJypceC09@#=x6vO$+h zatRso;hw8iX~xujA&B=bu>ZA>$^p^OvB(u8>$|L|TM+xV0& zoUs`HsM+wyghHHk^E@|#S>YI$6$Z=}z(inh2;f58D9Gd80^ATKnhgd&vVgBQl6*hb zy0>+MC=|0c@va(QYx)QHki0{tV+=GL8(_Xo}X64iYkbsepBDoNw=;^oS>;uvhNKfz&8J7MR3CerX(<< zz{V7F%?(%YK7P1CjUx0#cMg*DHuj-0Im5c5BEP*_em{rFyPar((@Z3;@n@R-H@E`A{PoW%y!`K zLO|i}`hC1GR9A*MwA-#%T$8!g6Ztg669!l~nR!{P;3p7*0rW17h!sp4&y1;zk-b); zU)*ok8+pq6hx<<=MQAi^XU2bhQ$KeCwAu+G)Dyv7rS04cG@BPc{+=g#6f?-p-ZIV3 z^VUH3W1jFLR`TnbdMDrQa8W$A+{9B98meNF%v};PoOLrS4W`=~hVA0(FfUoa&^=Hm zC&HhsT_nNd+mj68#C9xm{VjI;WSKWF@4j8o-nhFxb0;Y4AXth98ylR^RhJ_=2Hgl? zY=?Jw=2mH5PWRW>RXW)Nc151LrFlgZ*Rf$U_Tq99RK=VLJ0VP`7CsDQ32tN0Tc6yz z*F6=TgD(BudFh|@_VYZwRoIZgciW?5+Oda^6&vVFEk-mkH+%XDW7CA!H{C+)XhGJD zE4m0w0Dzr3)^VX!dru8)0`9Kk^2F0(a-DnQ3I}Oq1N#@m8wXZ$a>=n{HkiV*ss#CH z2f-AstP0MdcWvbhIP#CFx~l|3_|8TOPm{ReCsao9*F*SD%#HcU?&t02x@&*~@p;v^ z>UM>cW*rOirxDfl!i?$xaJJs4$~UI%JALA!ozB+CY+|Nb0CqS(>Mm{a4;q|&ZAJYW ztDlGc`nD7*c~uN5yO#H&f*$^dilUkr#<#+rUvar45CxmbNLC;_i*k?VK>Cb5)`v`* z0#F(CurW)z_^{+A%hP%HB90D41(!?6O%C6z80*B!z`~s`?%lxMAABNm*e2^W+%rx^ z)mq$mr@!Jp-SZ`{sg~F!mC7}l^_e#xJ}I!>%bp~sH0=#17l+C=pUbdLQE>-jR-8v#0i&J~GG?9npfASg2zjjz2PjB_jE?pf%q!fEkw9c0a z;Op0u-&!tAso#|oeAe|}$G?^G#WfZtD2jL}1x=qB<=_nu=iZLTeIeSSda7O$@Er2tLqRPr9wYO|;(vyZx~GEBpAMQl2`1fg$BmC%w6zHkxzC@jmNqGl za8tHOEW8@@)<}M6z$fp6?d3#TVsIdI+#H^N<>J)5+@DG~YvMM6R%r>)r|093t5GCE zHzv>X`;!#%>9*62+?WPx`l|vzx;kU0PI2WwOEdgzFP`OclO@{j1QTwDq_Et^EeR;y zv3u@%>$36$eeNo=oS}|W_IqK?(Mh!iUYbQ8dfA-*eLSPR`J3C-6Bx^I&%!GNfXY@Jl>{RcVWfeWjBOU0`0p^w2z}h9%}j z+nsm`&IJEK(Zt6zA?nU>IrdNA0fM`^{B@qF7fwrCtnIw zLG?~!$7Y+au7i3Cx8s8SffZkoZxr{RmXQ0IL~nR>+cC!X$8-f)IcF7Rsp{xnWOI{^ z!++MLLKW0sH}pG#H2epq4|ET9D=9!K4)J(k83^|$@JZ(;vpFaG6&NPhXXx-HgE>1FK@?I(vz)1lQL?du$wlBd@X{`A-b2Mgvo3Ec@oqKH|VGG-u{O2})^ zkwHV=g!e~uIaB1o!?o!{QcE=6B5`Tv?o)XuM=l0*` ztf}@a7ZG_Ho#}&C#zZivcKs3Aej&ElLrnw@dN{DqErX`I2G7&>z>h-MC_C*x$Wg@^ z5kiRLEUTBU3mV1`AV7EurLy*r+<_;(u4iD%s@}oG41`wUp~K}y!U6;1n|+PMT~oS@ zjHYl9CX!4hlVi@MYhF;i@on6Q(agByf@_}Hfv_M&j371@}-LbwCeNv~jpRy(PEBcew1HS3$JWohaoVjMPlr4u{IHZaf&sCN|`aACXa2AqH{enVmD zO$TPx>bD_h2rYN;LEZ-U0AHL=fyzcwVDBk_NXttE3d}7v>bH&05XZz8E778tunqx$ zSYnSRKB{awf(%jHW~uN;2tp!q;>gSk8e@BMnQywc#nv!%vKc$amMv} zVOvVD(G$)Q{VM6ua=-`d|B-51TCbA!UD;mq6je%I#NMW;xYg=?(6e>numDl#v!67W z+T4p5K}T2L-=f6PKL0O1K4gQvGw{X zlCvZqauKTRF*EKB9igI!a$H!+9hxOHTvY(!XmC9Ada-rs4=X#k2kcIUpAm8tRZ@U@ z(4tPMn{RCfvE_q^6y)gHsEn#E*z7Q|Ej~15r^DyqmQNP6*ThDf0BX0}k(SZy7J{q$nJfl)gJcKsSzlYw)WA;k< z`CJvOlia5yIfV#jt+4JSy6IBbYchJ8lUV?z9%PV#Z^bu>0{%Fyko#3J# zB>Jur@oS>YqTR$@{mn!kFA;i=*3QH7KW&F^D}SCXuJsciJ=u}A zR`UP-&S-X`W2i-YE$UNRw4d$E_g95RtAOs-CeKyL`g?89riXq*>=C2vFIavKuM-0I z87p#J+FUg(@^)iUwHhfSg*9uhwtAI4wGozTG3#cjJKd=SCRVNmLZ*bZZ(1&T41cH) zdhBKM7{#F{C_?{a)nRz&j%N3jXFtX{D$COa9rFHp%}w&U=Za7wo`%J<5RjNcgq~#0 zVGNCTzm}wz>)ZXukZrhH8enF_PdW54`<-))v|!AQ)4Q`PrA8@542)gs(qe0~RNxV` ziS|szd9W|Vs5pw~0)-cQf^`Ua)<+|)LINram&otBMn~5eQ+-bN4H{=ACg9<3Qz}Bk9pOlfiiGjY^UX;5O4biBJ$tO~0b) z+7aDu%=#Ucop^sGJsuQ-iCn`^%q_P@T0zQzwW~=rP+mxgt3|-b315KfJ&9VJ7XHaV z(_lsh$<4^Hj&@bnn`?lf<`j4bF*;$Ab!DAh+^!zG%}wE8dG_zcH@os} zEuFDc{cu{M_ZEZi0IWz0yjrvrl?OPnOVNy_5#Z8SF8W>6`I0LJ-=QcRVdt6*UKM7< zyX(4gl$x8t!CN<3jiDNps$o?fgaLKNcy^6`*Im6p3%vFY41Mifgr8}Q+cM?VSA){W z+|m~mhJppY+l5I-!#2xU);NjjQ~OTxfs||jn25EKfP9Nv<)^IvCEL?1M7nXe-yKdn zJ|PFbvO%DTvZ1!BN8d&Rv#iG!?J%QeZN13F0b{v`tld|=^I_DcU|}9s^}b`EdQ!Ji z_awdPi)zZ9+ZCslk{8UOt78P6vC{8WX<*%xy1e5}nL{KV1E)4)Z@uh7EV@EFvB)KZ zLs2ClkOa55T@K{Nw$?yLXmMQ6sE=+PpsB(+I?BRMT^q|Ciz->qsjQB`aX-4_l5+%o z`GdXQZshZTf=l||tzmpsI?f}uZp za_}RsBN=a%wv~>Q;@uZ@%FktR@Y8+Q4`S!1x0q)FSHOUw!1-{swQ0R;&`6D+ zT>ESw+RtKSVYfug*p`6dP7!`Bhw)n>W5R)LHGO?TyMp#Rpf%o5$y-}psU|-UfP{e9 zD5Cq}ngzH>Y?-xv-K6U>Eu3A>*RhaaGRk$d(-wIF0!Egp1=fp~)~(F0cvGYU*q}(9 zo~dR36%7Wch^ZAEw0dSgZWR|6#n1aeTfK5Tfl5+HP3~s5;6FW z1oT6K!zCnRsB=-J_P|(jKoV%mU>g3kqPY^e{ z0uZRhGT?(ZzNcUS`JZ!gt*Vhi?6&`kkx;`1Wbuu9uMmpoKf`^Ft_LbzZ(hHZ`^<0( z5oGUFw!_<~6n8rx^NBci^h3&*gs}I8$4^$%B3GC~-AG z%)^eL4;ABq3rip}F!9*R3g#N~Px@#1WnV!}_2OyG)5eyoj2WzI#mI_@YE+odMT7-o zNq8V{VL=i895#0wlOC1JeHH6=&k9@cv-ti#k@RL=p!K?UYkVXQV*@>#>bw-VE!_H~ zNBE}T^&QI+lBq43noBn-L-F=p<6{&E8R3^1QQA^s;=q|Sq- zPO$(i8olR?Yt0;F5K<18iLPf5KX8~D)Y8x=3_TAE-aTz$&|ite$2p_zm9fDk7Dh`jvdZPwsBQw4v1sOK-l^VZW@SF&)BWbg$3h1?=ys zBGRNB=*R_T1Lw>R5}J z#YZ<4zu^&?xVn34BcRPR*Dh_hh;+aLnh-rgl7bHeV4pg566AmIgPjmyMz&=)Prmx- zGd8r9EvJS3VF3pZm0Tnp{kL%Nr!}6*PDomKa zS{3YTfdeEQEgum<4tB{q588N&@HLDb!oe;%H^5ET^#c{X1qV2}VpGy*4VSpaYIbaCxPUz*^v5o26EbV5*Y^o%nU55Hf z8?>!4eKJOUF_FERvpueH?n)UG@sBPSNA{;0&aOnR-~1HmCBmNlxg@&bvS-i1;L~6z zy7+2HcVuQX3$r2{m%xKrVXe9YfJl$Jdur~MVcWuPlkztG`YqeE8tV(ETdawMLgQz% zPGPfTQM@dU8}&P>PxLs+(PHl<*i3vbyNt}HhRt4zy76L{h84=Tb>W%S<9Nq`NJ*}z z&Q<#Y#WPnnmgW5({vxL>WSqJ~^eL8`56Poabru!rO+}-|K5^k@ih&0G)Ez)*!*K@@ z;dG<`BM`hnF=ByaZLAg9R=xy^?e>jOh(SRP4S!t|@aG*u7@L0P zHwN9CjUP5OZCYnPX}C}bq3KK>$AA1S=k9o8iglyOo2E;{2X8=iU;C5W8YYsTI(C7 z>4kRj)AjyA+dC&`B5a^2P(;XmZ}ziFpP}~{Eq8XxE!<9BW$)npoSE-D5TS;KH#un_ zSlF#G5bhr_V9sNidE7To@Q6`8VO{b1*RK6L`N=I1`*bJAhC!*6TJJEfFHc$SU#l8m zQtZ}=3*6fQGFlIH41tp!FBQ3kx5PdIXD%oILFrgEe!q`&m|K6xU^($N%iHmmsZQ`~ zsjVBu(vfSx>`(Q$Qn4`bv9pH;-Ty^;c>N} zZ7Bcq5uwdENOr9|QDU33X}xvv{O(wsm&A)ONZh+7_&)a=M@bjB3U1^ZUlIXk2J) zmV~Da-H2FBZl5mT_WB{Mr5mEAd7)AMrroO8k9nq_XO(%HwY&1_n!L%-4QOZTiw@;= zTHx%5eaDZ3+n*x>3bceQ+tS-fUeFzsTWeh&FCL#Gj;%JLUHGA*^p@!B+_w31+qRM} zCZVO)G1~bFrlZ^l$xDg#qz|}#-89ES@E@&VcqkG}-wPuj+&}CPK8q+~%M&zN-D<#a zyXUNTi`;Ho*7nIdVV7$3VJ3I>yzArp$k{Q~MTCJsLb^6o1R(^N6J8|1)9z1%NFn;5 z+9l8GyC4F3$!{Rqw{CsnmlURotEst8#%#iQ*DWSv-%#oqqQ?9HAEPXn$alo=AEb-6 zM;$%gmR*_Dg#{!{{Z1Q)pv_Rt(Yqv2lBB6TIEHlP#PHzxZYU4Z9)fo1VndA^z+_XV zsM1d1BZ<3*DS#b&S0WrkxDsLnn>MNBLr%d*;&)+CSkja%9K*2k8@|qg^n?enuJ{%C zaw0w87@n07wNX7-4y1}}#V_ARA7-9v7ldj~K_%>p@FDBr0n9~IEAM!bX7G`SnuOiq zq^SoeYqf)CyBR4{v?y!MgS1^`PGmG3Bfip8{2kyz!r+)oD?>%u8ZdOS9V;p*bDMf4 zM;GRmY)7T%hYHHsElZga>Jo&TzudLEh8z$`hoI$AiZ6F_3vTit=XBAosGvA#!Y*&} z)LXdut8MO;bZ#Uq9Q3ElD8;y4wrfZ(0h*+#$0)^TyBoz5Io4orZdFnK+5#Er!`y^$G zw<{EG{$%&GiZksCeB|XW8z=H*nlrp1dbcZODi9t(T%^2Wz=%f>vqutLR))Q6yL zx-3vZskr*@)!2d3;UEZddBm5!^g&n{&mF54i#lP+qu#vQ~w8B~1~-8^UW|>|RNpa)o1LoEcW6 zxsdPRBiS{%>!*aNWOf_`;xeg%cJ~S8MI=%)?NqqIzXIf<8lcbnx_WtPI{G?u@M|ha zh)GC^i;IbiOGrsc%B3FJ-E~|NTz(|DltY zO|7(-CzO+sOkK6-1^yqF64J8&K1xhl{;%W3B<1A(MJFyVFZP!T;xbZy(TU5+N#gbb z|D&t81gP!550sFW_-nI-tjxcK5ton`|BFsiOy=MEi%Uv~{~zT5(9GhJlG1-MmX!Ki zJ4rd2znDmg%bsiUzZ8*@lKnqSusoz><>mjaoV0`t@E4)9wA|l>vVVPDacMcZf16rd zMoi)_%gBh!|HVT_O7cHK1OM%6;xcl7n_5;}{4Zf-C1n5YlAsO#W+E#i`IlAY#3cW( zFrfX#<-}$GVj?FkEAel0$;rt4^-f+)_Aia)CI2?Hyo~t2Ka+%%n1nd}znFkADJGuE S=A=z7AuUJF!=q)SP5xiXSGwl_ diff --git a/src/mechir/modelling/steering.py b/src/mechir/modelling/steering.py new file mode 100644 index 0000000..35f768f --- /dev/null +++ b/src/mechir/modelling/steering.py @@ -0,0 +1,65 @@ +import torch +from torch import nn +from dataclasses import dataclass +from typing import Optional, List + + +@dataclass +class EmbeddingSteerConfig: + idx: List[int] + mode: Optional[str] = "increase" + scale: Optional[int] = 10 + out_hook_point: Optional[str] = "model.embedding.W" + + +class EmbeddingSteerWrapper(nn.Module): + def __init__(self, steer): + super().__init__() + self.steer = steer + + def forward(self, x): + return self.steer(x) + + +class EmbeddingSteer(HookedRootModule): + def __init__(self, model, config): + super().__init__() + self.config = config + self.layer = model[self.config.out_hook_point] + self.scale = self.config.scale + self.idx = self.config.idx + if self.config.mode == "increase": + self.scaling = self.increase + elif self.config.mode == "decrease": + self.scaling = self.decrease + else: + raise ValueError(f"Invalid mode {self.config.mode}") + + def increase(self, x): + return x * -self.scale + + def decrease(self, x): + return x * self.scale + + def __post_init__(self): + U, S, V = torch.linalg.svd(self.layer, full_matrices=False) + self.U, self.S, self.V = U, S, V + + self.U_0 = U[:, 0].clone().detach() + + def forward(self, x): + self.U_0[x] = self.scaling(self.U_0[x]) + + +class EmbeddingSteeringContext: + def __init__(self, model, steer): + self.embedding = steer.config.out_hook_point + self.original = model[self.embedding] + self.steer = steer + self.model = model + + def __enter__(self): + self.model[self.embedding] = self.steer + + def __exit__(self, exc_type, exc_val, exc_tb): + self.model[self.embedding] = self.original diff --git a/test/evaluate_models.py b/test/evaluate_models.py new file mode 100644 index 0000000..5c6caa9 --- /dev/null +++ b/test/evaluate_models.py @@ -0,0 +1,51 @@ +from functools import partial +from mechir import Dot, Cat +import pandas as pd +import pyterrier as pt + + +class DummyTransformer(pt.Transformer): + def __init__(self, transform_func): + super().__init__() + self.transform_func = transform_func + + def transform(self, inps: pd.DataFrame): + outs = inps.copy() + outs['score'] = self.transform_func(inps) + return outs + + +CROSS_ENCODER_CHECKPOINT = "crystina-z/monoELECTRA_LCE_nneg31" +BI_ENCODER_CHECKPOINT = "sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco" + + +def score_cat(model, df): + queries = df.query.to_list() + docs = df.text.to_list() + + tokenizer = model.tokenizer + + sequences = tokenizer(queries, docs, return_tensors="pt", padding=True, truncation=True) + + scores, _ = model.score(dict(sequences)) + return scores.cpu().numpy().tolist() + + +def score_dot(model, df): + queries = df.query.to_list() + docs = df.text.to_list() + + tokenizer = model.tokenizer + + queries = tokenizer(queries, return_tensors="pt", padding=True, truncation=True) + docs = tokenizer(docs, return_tensors="pt", padding=True, truncation=True) + + scores, _, _, _ = model.score(dict(queries), dict(docs)) + return scores.cpu().numpy().tolist() + + +cat_score = partial(score_cat, Cat.from_pretrained(CROSS_ENCODER_CHECKPOINT)) +CatTransformer = DummyTransformer(cat_score) + +dot_score = partial(score_dot, Dot.from_pretrained(BI_ENCODER_CHECKPOINT)) +DotTransformer = DummyTransformer(dot_score) From 7908935e578f4cdb506696c58dffbd933cb66a92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Sun, 6 Apr 2025 09:28:15 +0200 Subject: [PATCH 12/21] minor updates --- notebooks/analysis.ipynb | 55 +-- notebooks/exploration.ipynb | 824 ++++++++++++++++++++++++++++++++++++ notebooks/plot/patch.pdf | Bin 25454 -> 25454 bytes 3 files changed, 845 insertions(+), 34 deletions(-) create mode 100644 notebooks/exploration.ipynb diff --git a/notebooks/analysis.ipynb b/notebooks/analysis.ipynb index f091bf6..cfa5c8d 100644 --- a/notebooks/analysis.ipynb +++ b/notebooks/analysis.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": 14, "metadata": {}, "outputs": [ { @@ -60,43 +60,30 @@ "Note: you may need to restart the kernel to use updated packages.\n", "Looking in indexes: https://download.pytorch.org/whl/cpu\n", "Requirement already satisfied: torch in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (2.4.1)\n", - "Collecting torchvision\n", - " Downloading https://download.pytorch.org/whl/cpu/torchvision-0.21.0-cp311-cp311-macosx_11_0_arm64.whl.metadata (6.1 kB)\n", - "Collecting torchaudio\n", - " Downloading https://download.pytorch.org/whl/cpu/torchaudio-2.6.0-cp311-cp311-macosx_11_0_arm64.whl.metadata (6.6 kB)\n", + "Requirement already satisfied: torchvision in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (0.21.0)\n", + "Requirement already satisfied: torchaudio in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (2.6.0)\n", "Requirement already satisfied: filelock in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.17.0)\n", "Requirement already satisfied: typing-extensions>=4.8.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (4.12.2)\n", - "Requirement already satisfied: sympy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (1.13.3)\n", + "Requirement already satisfied: sympy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (1.13.1)\n", "Requirement already satisfied: networkx in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.4.2)\n", "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (3.1.5)\n", "Requirement already satisfied: fsspec in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torch) (2024.6.1)\n", "Requirement already satisfied: numpy in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torchvision) (1.26.4)\n", - "Collecting torch\n", - " Downloading https://download.pytorch.org/whl/cpu/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl.metadata (28 kB)\n", + "\u001b[33mWARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'SSLError(SSLCertVerificationError('“pytorch.org” certificate is expired'))': /whl/cpu/torch/\u001b[0m\u001b[33m\n", + "\u001b[0mCollecting torch\n", + " Using cached https://download.pytorch.org/whl/cpu/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl.metadata (28 kB)\n", "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from torchvision) (11.1.0)\n", - "Collecting sympy==1.13.1 (from torch)\n", - " Downloading https://download.pytorch.org/whl/sympy-1.13.1-py3-none-any.whl (6.2 MB)\n", - "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.2/6.2 MB\u001b[0m \u001b[31m9.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", - "\u001b[?25hRequirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from sympy==1.13.1->torch) (1.3.0)\n", + "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from sympy->torch) (1.3.0)\n", "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (from jinja2->torch) (3.0.2)\n", - "Downloading https://download.pytorch.org/whl/cpu/torchvision-0.21.0-cp311-cp311-macosx_11_0_arm64.whl (1.8 MB)\n", - "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.8/1.8 MB\u001b[0m \u001b[31m6.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", - "\u001b[?25hDownloading https://download.pytorch.org/whl/cpu/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl (66.5 MB)\n", - "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m66.5/66.5 MB\u001b[0m \u001b[31m8.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", - "\u001b[?25hDownloading https://download.pytorch.org/whl/cpu/torchaudio-2.6.0-cp311-cp311-macosx_11_0_arm64.whl (1.8 MB)\n", - "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.8/1.8 MB\u001b[0m \u001b[31m9.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", - "\u001b[?25hInstalling collected packages: sympy, torch, torchvision, torchaudio\n", - " Attempting uninstall: sympy\n", - " Found existing installation: sympy 1.13.3\n", - " Uninstalling sympy-1.13.3:\n", - " Successfully uninstalled sympy-1.13.3\n", + "Using cached https://download.pytorch.org/whl/cpu/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl (66.5 MB)\n", + "Installing collected packages: torch\n", " Attempting uninstall: torch\n", " Found existing installation: torch 2.4.1\n", " Uninstalling torch-2.4.1:\n", " Successfully uninstalled torch-2.4.1\n", "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", "transformer-lens 2.14.0 requires torch<2.5,>=2.2, but you have torch 2.6.0 which is incompatible.\u001b[0m\u001b[31m\n", - "\u001b[0mSuccessfully installed sympy-1.13.1 torch-2.6.0 torchaudio-2.6.0 torchvision-0.21.0\n", + "\u001b[0mSuccessfully installed torch-2.6.0\n", "Note: you may need to restart the kernel to use updated packages.\n", "Requirement already satisfied: jaxtyping in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (0.2.38)\n", "Requirement already satisfied: transformer_lens in /opt/anaconda3/envs/mechir/lib/python3.11/site-packages (2.14.0)\n", @@ -194,7 +181,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ @@ -207,7 +194,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -216,7 +203,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -227,7 +214,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ @@ -278,7 +265,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 19, "metadata": {}, "outputs": [], "source": [ @@ -357,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ @@ -380,17 +367,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA8YAAAI2CAYAAACSdJFsAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAjt5JREFUeJzt3QeYFFX28OEzmQGGnHMGRcmgJAmiwKqoCAomkoJrAnNEXMGEWdxVMKF8igLiiqgoCEhQRLKAREkiIBkmwYT+nnPd6v8M0z1Mp+r0e5+n6KYr3NthbtWpm2IcDodDAAAAAACIUrHBzgAAAAAAAMFEYAwAAAAAiGoExgAAAACAqEZgDAAAAACIagTGAAAAAICoRmAMAAAAAIhqBMYAAAAAgKhGYAwAAAAAiGoExgAAAACAqEZgDKBQMTExZlm4cGGwswIACDLOCQAiFYFxhMvJyZFp06bJzTffLI0aNZIyZcpIYmKiVKpUSTp16iSPPPKIrF+/PtjZDAmDBw92nvDPttSpUyfY2Y16Xbt2LfL3deai37VFv0tP93Hlm2++keHDh0vTpk2lXLlykpCQIOXLl5d27drJqFGj5Oeff3a536ZNm+S9996TO+64Q9q3by/Fixd3pgn4G+eEouOcEF4CeU6wyvP69evL5ZdfLk888YSsWbPGo/zxtweEvvhgZwCBs2zZMhk0aJBs2bLF+ZoW7ikpKXL48GFZunSpWZ577jnp27evTJ061RTS0S42NlYqVqxY6DZnW4/A0+CzcuXKBV4/ffq0HD161DwvW7asy9906dKlC7xWrFgxl68Xto/Sv68bbrhBVqxY4XwtLi7ObH/8+HH55ZdfzPLaa69Jt27dzIVRhQoVnNvedttt8sMPPxThHQO+4ZzgHc4J4SGQ5wSHwyEnTpyQI0eOyO+//y5fffWVjB07Vi688EKZOHGiNGvWrNC88bcHhAkHItKsWbMcSUlJDv2Ky5cv73j22WcdW7Zsca7Pzs52/PLLL46HH37YUapUKbPd0aNHHdFs0KBB5nOoXbt2sLMSUvQz0WXBggWOcKD59CTP+n3rtvr9e2r58uWOMmXKmP1LlCjheOSRRxxr16515ObmmvU5OTmODRs2OJ5++mlH5cqVzXarV6/Od4yLL77Yce655zpuvPFGx8svv+y49957nfkH/IVzguc4J7gWzeeE48ePO+bPn++49dZbnX9PiYmJji+//NLt8fjbA8IHNcYRaOvWrXLjjTfKqVOn5Nxzz5Vvv/1WatSokW8brdFq06aNWR544AEZOnRo0PILhCO9y6939o8dOybVqlWT7777zjSjPrOmSf8GddHm1Pfcc0+BJtL696l/j5bJkyfb9h4QHTgnAP5RqlQp0/JHlzvvvNM0q96zZ48MGDBAVq9eLQ0bNsy3PX97QHihj3EEevzxx02TH20G9PnnnxcohF01P/rvf/+brymRXpzn7Te1YMECueqqq6Rq1aqmED+zv+X27dvln//8pzkpJCcnm5NHq1at5KmnnjJ5ceePP/4wwYIGFCVKlJCkpCQTZLRu3dq8rk1Qz6RNorR/jx5f09HmRlWqVDFNmbRZ6vfffy/BHpDk5MmT5nto0qSJ+Ty0b5KeQN31M81LAyw9ydauXdvsq9+Pvre77rpLfvrpJ5f77N+/35xQrc9RF33+4IMPyoEDBwpNTz9P3Vf7TulvRr/j/v37y8qVK4v0vnNzc+Wjjz6Sf/zjH6YZm34f2qzw0ksvNc3B/q5gKMjqx6W/tdTUVPOdnn/++aZpmb6+c+dOCWXjx483v1+l7/PMoPhM2ndYm9zpe8wrb1AMBALnBM4JnBP8T7+DGTNmmLylpaWZ33Yg/vYA2CjYVdbwr/379ztiY2NNU5xhw4Z5fZz333/f2YTs1VdfdcTExJj/ly5d2pGQkJCvidGnn37qbCakS0pKSr7/16xZ07Fx48YCaaxZs8ZRtmxZ53ZxcXHm/1Zarpoy7dmzx1GrVi3nen2vuo/ua73WpUuXoDSbs9L/+OOPHQ0aNDDPixUr5ihevLhznTa5+vbbb13un5aW5ujfv79zW+uz1M/c+n/z5s0L7Ldw4UJnc16rSa8u1v/181m8eLHLNHfs2OFsNmblz2rKpc+/+OKLQpugHT582HHRRRfly3Pe/OrSp08fx6lTpwrsa6X74osvOho1auRM03ovmrdQbUqdlZXlfJ/aFNqfrL89imf4A+cEzgmcEwJ7TrjsssvM9vobT09P9/vfHgD7cOUVYaZOneo8AcyePdvniyA9iesFxuDBgx27d+929ofZtm2beb5y5UpzUaTbduzY0bFu3Tpn30rtV1O1alWzrn79+o6TJ0/mS0MDCl3XqlUrx08//eTsl6knTO1/oyfH8ePH59tHTy66T506dRzz5s0zebHytHPnTsebb77peOihh4J6EaQXHdpnVPsh6eeg70v7ojZu3Nh5fH39TNdee63zwk7fg17wWQ4ePOj46KOPHLfddlu+ffQ7sS4aNM0lS5Y41y1atMiZZrly5Rx//PFHvn31M2vTpo0zz9OmTTMBn9J+sZ07d853cXXmBYXurxecuq5Fixamj5VeyKnU1FTHBx984KhUqZJZP2rUKLcXHyVLlnRUqVLF8fnnnztOnz5t1ul7t44VioGx/l6tNN544w2HPxEYw584J3BOsHBOCMw54T//+Y/z+Pod+/tvD4B9uPKKMI8//rizIN67d69fLs779u3rdrtevXqZbfRuuKuT1qpVqxzx8fFmmxdeeCHfuuTkZPP6jz/+WOR8nXPOOc478P5mXQTpRYgOlFTYcuZ7UdbnVbFiRceBAwcKrNcLRGubvBcrSi/orHV6ki0qvSiyLmL27dtXYL1eTFh3+++4445867RWx0pT0z+Tfp968eruguLDDz80rzdp0sRx7Ngxl/lbsWKFqe3Ru/5nfibWxYdeZOvvxF+8vQjSC3533/eZtcLvvPOOM42lS5c6/InAGP7EOcF7nBOi+5xQ1MBYzwHW8d9++22//+0BsA99jCNwQKC8fVX8QefWc0UHHdKBJJT2R9I+lGdq2bKlGaBIad+ivHQOP7Vv374i58WbfTyl/aO0D1Zhi/Z/ckfnstV5Cc+kfaXq1q1rnq9bty7fOp3HVp133nmmX15R6HWXTv2jtB+d9qk7k/Zn0nXqk08+ybfO+n/Hjh3l4osvLrCvfp/aH82dd9991zxqft31h9J+gdqvTafL0D6JrvTq1cv8ToItMzPT7fd96NChgP+dAYHAOcF3nBOi85xQVHn/rnQ6JwvnCSD8EBijUDrQhw5o4sqqVaucg2j06NHD7TEuueQS54k/KyvL+boOPKJ0br/77rvPzOWanp5eaH6sfR5++GFzsTFnzpxCB3Lxhg5w8r/WFG6XJ5980u3+F1xwgdt1OojMmSdP9eOPP+Z7f0WxY8cO53GK8vnrSVr3sVjz7nbv3t3tvu7W5eTkmHkZlX4WegHmbtm8ebPZbteuXS6PpRdhoUB/h+6+7zVr1gQ7e0BI4JzAOSFazgkAog+BcYTRkS4tZ55ovT2eTjnjyl9//eV8Xr16dbfHsEZhzM7OzpcnHdVXpzzQO+0vv/yydO3a1YwoqlMWjBkzRvbu3VvgWFoLce2115qLqbffflt69+5tagz0zruus064ebk7OY8cOVICQUfQdCc+/u8Z0vJeDFojiFoXYEXl6ed/5j7W86Lum5d+jzr9hDWCaWE1KdZ7dXeB66omJdr+zoBA4ZzAOeFMnBP8K+9vOO/fG+cJIPwQGEeYvFPG6Jx6vgrkVDJ68TJ//nxZvHixaZ6ld4n1IkGnhNBpD3SajzOb2iUkJMinn35qavB0Kge9e63Nu9avXy8vvviief8vvfRSvn3cnZyPHz8uoeLMuW1DndYOWL755puz1qYUVqMSjtMV+fvvDAgUzgmcE+wQzeeEtWvXOp/rFFcWzhNA+CEwjjB6t926m69z5gVS3ru61nyurljr9ALHVT+bTp06yfPPPy9LliwxfdS++OILc7c/IyPDTHTvas7F5s2by7/+9S8zP6XuM2/ePLnooovMyVlrCfKeqNydlHWuxFBh9QVz17TMH5//mftYz13VwljcrdM74VZNhyd5jhRag2X1oQv03xngC84JnBPOxDnBv7766ivzqHNuX3jhhUH52wPgHwTGEaZy5cpyzTXXmOcff/yxbNmypcj7Wn3Dikr7mVmFvl6MuKMXKNaFi97dL0yxYsWkT58+MnPmTOeASHpxVBg9GetAIXpy0hOTvg8rzXDRoUMH8/jll18WeR8dtMW6qCzK568XLtZAL1Zwp9wNgKK09sYV/R7btWvncZ4jhf7mtD+j9dkvWrTIo4F8ALtwTuCccCbOCf6zfPly+frrr83zAQMGmN9rMP72APgHgXEEGjdunJQsWdLcXdfRPwu7+2v1B9LC29NmZNrsrWfPnub5Cy+84LK/kN6l/+yzz8zzgQMHOl/XvmWFBQg6wIslb382qw+TK3oBZDXBctcHLlQNGzbMPG7YsEHefPPNIje1u+6668zziRMnOvuk5fXnn3+adWd+/sraVy8yFy5cWGBf/f3o9+qOFRjqRYF1YeBOJPav0qae1sA5+tnqd1cY/Txvv/12+fXXX23KIfA3zgmcEyycE/xHy/J+/fqZILZEiRIyevTooP3tAfATG6eGgo0+//xzM0+gfsUVKlRwPPfcc46tW7c612dnZ5t5AkePHu0oU6aM2e7o0aMF5qzU+fwKs3LlSkdCQoLZtlOnTmZeRpWTk+P46quvHNWqVTPrdO7DkydPOvfbsWOHo169eo6xY8eafGRlZTnXrV271tG1a1ezX4kSJRyHDx92rtP5Ih9++GHHTz/95MjMzHS+ru+tf//+zjknN2zY4PWclWd7z+4UZZ7ELl26mG3GjBlTYN2AAQOc+df3qPNNWg4ePGjmRxw6dGi+fXQb6/tr2rRpvjl1dV5Ma47PcuXKOf744498++pn3qpVK+f6GTNmmN+F2rhxo8mrdWxX70u37dGjh1mnvzX9LvPO1ZiamuqYP3++4/bbb3eULl3a7VyR+lvzp0DPWZmX/g6tOUH1t/rII484fv31V0dubq5Zr4+//fab4/nnn3dUrVrVbLd69ep8x9DfsX6/1jJhwgRn/vO+rov+XQHe4JzAOYFzgu/nhBMnTphjDB8+3FGsWDHne/36668D9rcHwD4ExhFMT4INGjRwnhCsAlxPeHqitV6LiYlxDBw40HH69GmPL4LUJ5984iz0ddFAwTph6FKzZk1zUs1LL4Ly5isuLs7kK+9x9Pn06dPz7Zd3H30PZcuWzZeWvpdXXnnFq8/LugjS4+rF1tmW3bt3+/UiKC0tzdG3b99871E/S72AsP7fvHnzAvstXLgw3zZ64aiL9X890S5atMhlfrZv326+H2vbpKQk57H08//iiy8KfV/Hjx93XH755QXyrGnqd2G9Fh8fH5YXQUWhv23rYjLv+9Xfsz7mfb1nz56OQ4cO5dvf+lsryqJ/N4C3OCd4hnNCdJ8T9HdkfbeVKlVyFC9evECZ3KFDB3MzNJB/ewDs8/dICYhIOqLnpk2bZPr06TJ79mz5+eefzXQMJ0+eNP2QmjRpIl26dJGbbrpJGjdu7HU62vyqdevWZgRQ7bukA3tof6MWLVrI1VdfLaNGjTJTbuSl00HMmjXL9GX66aefzD6aN+0b1qBBAzNohU6doaOQ5vXdd9+ZfbSp1+7du52DsOg+nTt3ljvuuMPkxRfanM/V4C6FjcLpDzqSqjYx1H5x7777rvm+Dh06ZKb6aNasmZm65IYbbiiwn36Hv/32mxl5VZuv7dy50zSpO+ecc+Syyy4z84FaA7mcqV69emY016efftoMDqLNvLSPlM5zqfOCnu2z1O9V+5PpKKQffPCB+S71s9NrQv2Ozz33XPNd6nQqkUo/Zx01V783/QyXLl1qmjDqXKr6+egopTqY0I033uh2/lfADpwTvMM5ITrPCdqfXRelv0P93PVz089Ry3Jt8qz95EPpbw+Ab2I0OvbxGAAAAACAEKA36OfOnSu//PKLWfSGl97o0RswrsYQ8MSCBQvMjTe9waPzzut86/379zc3z7S/vTu67XPPPSczZswwo9dr//sLLrhA7r//fnOjLxQQGAMAAABAhHj11VflnnvuKfC6r4HxhAkTTOsdDR9r1KghFStWlI0bN5qBELU1hbbecTUNn7Z20dZzmzdvNgMjauuRgwcPmtZB2qLljTfeMAOUBlt4DdMIAAAAACi0W0OPHj3kkUceMdPduRo13VMrV640XWGUjm6v3VdWrVolv//+u+lmoV04br31Vrcj7WtQrNvp9rqf7q/H0SD77rvvNrXawUaNMQAAAABEKK2Rveuuu3yqMb7qqqvkiy++kJtvvtmMIZDX1q1bTV95HZNBp+XTcRAsq1evds5zr8GxjgGRlx5vypQpZkozazq/YKHGGAAAAADgtn/wnDlz8s1ZnpcOjNi9e3fzXAeZy0v7FCtdf2ZQrEaMGGEedbDAtLQ0CSYCYwAAAACAS1rrq/2ItX9wu3btXG6jMwGoZcuW5Xvd+v9FF13kcj89nh5XBwcLdnNqAmMAAAAAgEtbtmwxj7Vq1TLT77mi01MqbS7tal9r/Zn0eDVr1nS5r92YxxgAAAAAgkQHoZo0aZJH+2iTZqsZcqAdOXLEPLoacdpirTt69Kjf9rUbgXGApKZn2JJOvOTakk56TkzA0yiZmy52cMTa87OPybLn/cSlHQ54GjmlqoodchOSbUnnxz/t+W5+2hX4An5Ym+pihyql3c9NCPvL/xybhs08bVNCJRPtacCWkRX4c2Zygj3vJcamsVNjck7bko4jLjHgacTkZosdPvrNnov7vk0q2JJOmZLFbUkn3CW2HOr1vo9eWcuM1OyJffv2iV0yMzPNY2Ki+79TbQ6tMjIy/Lav3QiMAQAAACBIqlatakZu9nQfuxQrVsw8nj7t/kaZ9kFWycnJBfZNT0/3al+7ERgDAAAAgA9iYuO83lebRNvVLNobZcuWzdcs2hVrnbVt3n01MPZmX7sx+BYAAAAAwKVGjRqZx927d0tWVpbLbbZv355v2zP33bZtm8v99Hh6XFf72o3AGAAAAAB8rDH2dgl1LVu2NH2Etcnz8uXLXW6zePFi89i+fft8r1944YX51p9Jj6fNrLXJdYsWLSSYCIwBAAAAAC6lpKRIz549zXNXo2dv3bpV5s+fb57369cv3zrr/wsWLHBZa6wjcqvevXtLyZIlJZgIjAEAAAAgymuMO3XqJHXq1JFXX321wLrRo0dLTEyMTJkyxQTHjv+NjK+jYw8cOFByc3PlqquukubNm+fbTwcVu/zyyyUnJ0cGDBjgHE1b99fj6PFiY2Pl8ccfl2Bj8C0AAAAA8EEoBbh79uwxzZ/PnDJp6dKlUqHC/03z9eCDD5rF8scff8iuXbvk2LFjBY7Ztm1befnll+Xee+81A4WNGzfOHGvjxo2miXXjxo3l7bffdpmf9957Tzp27CgrV66UunXryrnnniuHDh0y+dRgWwNxT0flDgRqjAEAAAAgQmjt7OHDh51LWlqaeT07Ozvf6zpatCdGjRolc+fONc2e9ZgaFNeuXVseffRRWbFiRb6gO6+KFSuaoFi30+11P91fj/P999/LXXfdJaEgqmqMtW37Sy+9JD///LOkpqaaL6Z///7y8MMPS4kSJYKdPQBAAFD2AwACLSYudGqMtTm01dTZEzt37jzrNhdffLFZvOmn/PTTT5slVEVNjfGECRPMl/jVV1+ZUc/OOecc8+VrMwBtGlDY3FoAgPBE2Q8AAIoiKgJjrbrXqn9r5DOdK2vVqlXy+++/S+vWreW3336TW2+9NdjZBAD4EWU/AMAusbFxXi8IDVERGI8dO9aMlHbTTTfJ8OHDTSdvVa1aNZk6daoZCW3mzJmybt26YGcVAOAnlP0AAKCoIj4w1v5kc+bMMc/1wuhMDRs2lO7du5vn06dPtz1/AAD/o+wHANgpEqZrinYRHxivXr3aDCGelJQk7dq1c7lN586dzeOyZctszh0AIBAo+wEAdiIwDn8RHxhv2bLFPNaqVUsSEhJcblO/fn3zuHnzZlvzBgAIDMp+AADgiYifrskacbRcuXJut7HWHT161LZ8AQACh7IfAGCnmNiIr2+MeBH/DWZmZprHxMREt9toUzuVkZFhW74AAIFD2Q8AADwR8TXGOm+lOn36tNtttB+aSk5OdruNTvUxadKkIqc7aPBgGTrsFo/yCgAIrbJfUf4DAM6GvsLhL+ID47Jly+ZrVueKtc7a1pV9+/aZ+S+LqmevXh7lEwAQemW/ovwHACDyRXxg3KhRI/O4e/duycrKcjkIy/bt2/Nt60rVqlWlVatWRU63SpUqXuUXABA6Zb+i/AcAnA01xuEv4gPjli1bmj5m2mRu+fLl0rFjxwLbLF682Dy2b9/e7XFGjBhhlqJKTafPGgCEe9mvKP8BAGdDYBz+In7wrZSUFOnZs6d57qqP2NatW2X+/Pnmeb9+/WzPHwDA/yj7AQCAJyI+MFajR4+WmJgYmTJlirlAcjgczn5jAwcOlNzcXLnqqqukefPmwc4qAMBPKPsBAHaJiYvzekFoiIrAuG3btvLyyy+b59ocrnbt2qa/WN26dWXlypXSuHFjefvtt4OdTQCAH1H2AwCAooqKwFiNGjVK5s6dK71795a0tDTZuHGjuUh69NFHZcWKFVKhQoVgZxEA4GeU/QAAu/oYe7sgNET84Ft5XXzxxWYBAEQPyn4AAHA2URUYAwAAAIC/UfMb/giMAQAAAMAHsQTGYS9q+hgDAAAAAOAKNcYAAAAA4AOaUoc/aowBAAAAAFGNGmMAAAAA8AE1xuGPGmMAAAAAQFSjxhgAAAAAfECNcfgjMAYAAAAAHxAYhz+aUgMAAAAAoho1xgAAAADgA2qMwx+BcYAUO7bblnQyy9SyJZ3iNvxSMnJL2PPdSLYt6Uhuri3JZFVoEPA0TmY5xA6lcrNsSadVFXt+a53KnAp4GrmJNPwJNXb8tSTF2FO+FIu1qbzMsed3HBcb+JNZjMOe8tKmYlkSc3PsSSg28L/pnNgEscOAppVsSScuJ/DnGCCaEBgDAAAAgA9i4qgxDndUNQAAAAAAoho1xgAAAADgA/oYhz8CYwAAAADwAYFx+KMpNQAAAAAgqlFjDAAAAAA+oMY4/FFjDAAAAACIalERGO/fv1+mTJkid999t7Rv316Sk5MlJiZGunbtGuysAQAChLIfAGCX2NgYrxeEhqhoSv3JJ5/IPffcE+xsAABsRNkPAACKKipqjEuVKiU9evSQRx55RGbOnCmjR48OdpYAAAFG2Q8AsEtMbIzXS6AsWLBALr/8cqlYsaJpNdWkSRNzLkxLS/PoOAsXLjQtroqy/Otf/yqw/9n2qVKlioSCqKgxHjp0qFkse/fuDWp+AACBR9kPALCLBnihZMKECTJy5EhxOBxSo0YNqVmzpmzcuFHGjRsnn332mSxZskTKlStXpGOVLl1aOnbs6Hb98ePHZf369eZ5hw4d3G7Xpk0bSUpKKvB6+fLlJRRERWAMAAAAANFg5cqVMmrUKPN84sSJcuutt5rA/c8//5Q+ffqY9fqaBshF0bJlSxNIu6O1xBoYa/B98cUXu91u+vTpUqdOHQlVUdGUGgAAAACiYfCtsWPHSm5urtx0000yfPhwZ212tWrVZOrUqRIbG2u6GK1bt87ntBwOh3z44Yfm+c0332yOHa7CN+cAAAAAAKfU1FSZM2eOea5B8ZkaNmwo3bt3d9bg+mrRokXy+++/m+eDBw+WcEZTagAAAADwQSAH0fLE6tWr5dSpU6Yvb7t27Vxu07lzZ5k3b54sW7bM5/QmT55sHjt16iQNGjQ4a022NufOzs6W6tWrmwD9uuuuc9nvOBgIjAEAAAAgAmzZssU81qpVSxISElxuU79+ffO4efNmn9JKS0uTGTNmFLm2+L333sv3/w8++EDGjBlj+jq3atVKgo3AuIi04/qkSZOKvP2wa/vI8JsGBDRPAIDQK/9vHjxYhg67JaB5AgBETo2xp+cZq5n0iBEjCrx+5MgR81jYiNPWuqNHj4ovpk+fbppuFy9eXK699lq321155ZWmv3Pz5s3NCNm6j9ZYP/bYY6YZ9qWXXmpqunXwrmAiMC6iffv2yapVq4q8/WVd2wc0PwCA0Cz/L+3VK6D5AQCEnlgfpmvy9Dxj7eNKZmameUxMTHS7r9V0OSMjQ/zRjPqaa66RlJQUt9v997//zff/YsWKyYABA6RHjx7SunVr2b17txnZ+p133pFgIjAuoqpVq3pUxV+lUsWA5gcAEKLlf5UqAc0PACC6zzPWPq5o0KlOnz7tdl/tg6ySk5PFWzt27DADb/ky6FaFChXkkUcekX/+85/y+eefy9tvvx3U+aAJjItImyq4aq7gTvafvrXZBwCEZ/l/Mt23O/AAgOhqSu3peaYwZcuWzdek2hVrnbWtNz744AMzVVPt2rWlW7duXh+nQ4cOzjzpUr58eQkWpmsCAAAAgAjQqFEj86jNk7Oyslxus3379nzb+jJ38aBBg3yq5c3b5FtHqw4mAmMAAAAA8LHG2NvFn1q2bGmCTW0uvXz5cpfbLF682Dy2b+/dmEg//PCDaUqtAbEGxr5Yv369swl4MGuLoyYw3rNnj2nDbi0PP/yweX3p0qX5Xh8/fnywswoA8BPKfgBAtNFBsHr27GmeuxrpeuvWrTJ//nzzvF+/fj4NutW5c2epV6+e13nVGuKXXnrJPNc5jePjg9vLNyoC45ycHDl8+LBz0Tm3rC8j7+vp6enBzioAwE8o+wEAdomNjfF68bfRo0eb2twpU6aY4FibPlsjWQ8cOFByc3PlqquuMtMn5VWnTh2zWHMTu6JTLVnrhwwZcta86E1p7Y988uTJAjevNTBftmyZCYifeOIJCbaoGHxLv2DrBwEAiA6U/QAAu8SEUHVj27Zt5eWXX5Z7773XDOo1btw400Jq48aNpol148aNzQjQZ9q1a5cz+HVHg2K90VyiRIki1Thv2rRJnn/+eRk2bJipXdY5lI8fPy6bN28252htQq3TNF1wwQUSbFERGAMAAABAtBg1apScf/75pqnyzz//LH/99ZcZQVqDWZ0iqWTJkj41o+7Xr1+RjqFTMek0hitWrJC9e/fKzp07zTzKTZs2NfMY33nnnVK/fn0JBTEObqcHhF3TNWWWqWVLOvE2TCl2KldsUUzsGfEuJuO4Lenklgj8QAUns+wpJkrFuh490d8yJMGWdIpnup8qIZy+f1XMh7kOo40d0zUliD0FZkxudkRVtWTaUB+QZFOtkU3FsiRm2zP9mCM+KeBp5MbESSSJy/l7LtpASypZ2pZ0wl3Lx77xet/VT/f2a17gnRCq9AcAAAAAwH40pQYAAAAAHwRiEC3YixpjAAAAAEBUo8YYAAAAAHwQQ41x2CMwBgAAAAAfEBiHP5pSAwAAAACiGjXGAAAAAOCD2BhqjMMdNcYAAAAAgKhGjTEAAAAA+IA+xuGPwDhAcouXtSWdOJv+CHMdgU8j+dSRwCciIqeL2fPdJMba0yDDYUPTnTKnDoodcpNL25JOvE1/N8cSAv9bK27D3yZCUIw95UtOXJIt6djVAjHOhpOZHWWyirMlFRFHQrIt6cTknA58InH2fGpxOaci6u8TiBYExgAAAADgA2qMwx+BMQAAAAD4IJbAOOwx+BYAAAAAIKpRYwwAAAAAPohhuqawR40xAAAAACCqUWMMAAAAAKE/WQACiK8QAAAAABDVIj4wdjgc8uOPP8rDDz8snTp1kvLly0tCQoJUrFhRLr30Uvnoo4/MNgCAyEHZDwCwe1RqbxeEhohvSj1//nzp0aOH8//16tWTunXryo4dO2Tu3LlmmTp1qnz22WeSlMRE6QAQCSj7AQB2Yh7j8BcVNcZ6MfTaa6/JgQMHZPv27bJixQo5fPiwfPjhh+aC6KuvvpInnngi2FkFAPgJZT8AAPBEjCPC25KdOHFCkpOTTRM6V5555hl57LHHpFy5cnLw4EGJjfXPvYLTx/4SO+QUK2VLOnb8SuIzjgQ+Ef1uipW1JZ3EzKO2pJNTvFzA04g/ac/vOTe5tC3pZMUm2pJOelZuwNMonmDP/c2SxZMlnASr7Fcn0zMk0BJsqpjIFXsSsmuWk5zcwJ/M4myqNbLr6i1W7EkoJud0wNPIibOndUhczilb0rHr/RRPLmZLOuGu26uLvN53waiL/JoXeCfia4xLlSrl9sJI9e7d2zweOXLEXBwBAMIfZT8AAPBExPcxPpuMjP+7s6+1CwCAyEfZDwDwJwbRCn8RX2N8Njr4imrevLmpYQAARD7KfgAAkFdU1xivXLlS3nrrLfNcp/QAAEQ+yn4AgL8xKnX4i9rAWEcp7du3r2RnZ8vVV18tAwYMCHaWAAABRtkPAAjngfcQOFHZlPr48eNm4JXdu3dL69atZfLkycHOEgAgwCj7AQCAO1FXY5yamiq9evWS1atXS9OmTeXbb78tUv+yiRMnyqRJk4qcztAbB8qtQ272MbcAgGCW/d6U/zcPHixDh93iQ24BAOGGGuPwF1WBcXp6ulx22WWybNkyadiwocybN0/Kly9fpH337dsnq1atKnJa/7ikuw85BQCEQtnvTfl/aa9eXuYUAAAES9QExpmZmdKnTx9ZtGiR1K5dW77//nupUqVKkfevWrWqtGrVqsjbV6lc2cucAgBCpez3qvz38PgAgPBHjXH4i3E4HA6JcFlZWXLVVVfJ119/LdWrVzcXSPXq1QtomqeP/SV2yClmzzQjdvxK4jOOBD4R/W6KlbUlncTMo7akk1O8XMDTiD9pz+85N7m0LelkxSbakk56Vm7A0yieYM9QESWLh99cv8Eo+9XJ9P+bIzlQEmy6/soVexKKsen95OQ6Iubi2K6rt1ixJ6GYnNMBTyMnLknsEJdzypZ07Ho/xZOL2ZJOuLti0k9e7/vl8PZ+zQu8E/E1xjk5OXL99debCyO9iz9//nxbLowAAMFD2Q8AsBM1xuEv4gPjadOmyYwZM8zzYsWKydChQ91uO2HCBGnZsqWNuQMABAJlPwDATgTG4S/iA+NTp/6vOcvOnTvNUthUHgCA8EfZDwAAPBEVfYyDgT7GnqOPsXfoY+w5+hhHRx/jYKGPsefoY+w5+hh7jj7G3qGPcdFcN3m51/t+OridX/MC79hzRQUAAAAAQIiK+KbUAAAAABBI9DEOf9QYAwAAAACiGjXGAAAAAOADaozDHzXGAAAAABBhFixYIJdffrlUrFhRkpOTpUmTJjJ69GhJS0vz+FiDBw+WmJiYQpc5c+a43T81NVUef/xxkwfNi+ZJ87Zw4UIJFdQYAwAAAIAP4mJDq75xwoQJMnLkSNEJiGrUqCE1a9aUjRs3yrhx4+Szzz6TJUuWSLlyns9sUrNmTalVq5bLdWXLup755dChQ9KpUyfZvHmzJCUlybnnnisHDx6Ur776Sr7++mt544035Pbbb5dgIzAGAAAAgAhpSr1y5UoZNWqUeT5x4kS59dZbTY3un3/+KX369DHr9TUNkD01dOhQefLJJz3aZ9iwYSYobt26tcyaNUuqVatmAva3335bRowYIXfffbd06NBBWrRoIcEUWrc2AAAAAABeGzt2rOTm5spNN90kw4cPN0Gx0oB06tSpEhsbKzNnzpR169YFPC+rV682wbCm+cknn5g8KM2T5k3zmJOTY/IcbATGAAAAAOBjjbG3iz9pX16rr68Gnmdq2LChdO/e3TyfPn26BNqMGTPMo6bZoEGDAuu1xlhpk2pv+j77E02pAQAAACACaA3tqVOnTF/edu3audymc+fOMm/ePFm2bJlXA3pt2LBBDh8+LGXKlDHNo2+88UapXbu2y+2tNC666CKX6zWPmtfMzExZs2aNdOzYUYKFGmMAAAAAiIAa4y1btphHHSArISHB5Tb169c3j9rv11OLFi0ytcAaIH/++edmpGmthR4/fnyh+bHSPJPmUQf08jY//kSNMQAAAAAEiQ6QNWnSJI/20WbSVjPkvI4cOWIeCxtx2lp39OjRIqfXsGFDeemll0yT6Dp16phaXu2jrK9pk+yHHnpISpYsWWB06UDlJxAIjAMkNa6kLemcysyxJZ3SSXEBT8MRX0zskHzw7ztXgfaL/H33K9Ba5/wV8DRySpQXO5S7+BFb0jn63VO2pJOSFPjfdEZWbsDTgGfibRiZNCvXIXaIC51BVv3i+KnA/71USLTnb9IRY0+jv5icLFvSyU2w4RrAnj8bOR2baEs6idmZtqQjYs/1WbiL+98AV97Yt2+frFq1yuN9XNEmySox0f3vUINalZGRUeT0HnvssQKvXXDBBTJt2jS544475D//+Y/Z5uabbzYBcqDzEwgExgAAAADgA1+aRFetWlVatWrl8T6uFCv2942M06dPu91X+yCr5ORk8YdnnnlG3nnnHTl27JjMnz/fTAmVNz/p6em25sdbBMYAAAAAECTaJNpVs2hvlC1bNl8TZlesdda2vipdurQ0bdrUDPy1devWAvnRwNjO/HiLwbcAAAAAIAIG32rUqJF53L17t2Rlue4KsX379nzb+kPi/5pKZ2dnu8zPtm3bXO6nedS8+js/3iAwBgAAAIAI0LJlSxOkavPk5cuXu9xm8eLF5rF9+/Z+STM7O1s2bdpknteoUSPfugsvvDBfmmfSPGoza21y3aJFCwkmAmMAAAAA8HHgRW8Xf0pJSZGePXua565GutamztoPWPXr189vo2ofP35c4uPjzajVeVlp6PROrmqNdV/Vu3fvfIN2BQOBMQAAAABEiNGjR0tMTIxMmTLFBMcOh8M5kvXAgQMlNzdXrrrqKmnevHm+/erUqWMWnac4r7lz55rpmM7sP6w1vRMmTJB7773X/P+2224rMCiYDip2+eWXS05OjgwYMMA5mrbmSfOmeYyNjTXzIQcbg28BAAAAgA/83VfYF23btpWXX37ZBKw6qNe4ceOkQoUKsnHjRtPEunHjxvL2228X2G/Xrl3mMTU1Nd/raWlpMn78eLNUrlzZ2Vx68+bNzm2vueYaM6exK++995507NhRVq5cKXXr1pVzzz1XDh06JHv27DEB/KuvvurxqNyBEBU1xjrptE6C3aZNG6lWrZqZK0ubGegXoHdUDh8+HOwsAgD8jLIfABBtg29ZRo0aZWp6tYmyBrYaFNeuXVseffRRWbFihQmUi6p169bmvNmjRw/TF1j7E//6669mNOq+ffvKrFmzTC2zu7mKK1asaIJiTVvzoHnRPGnevv/+e7nrrrskFMQ4rLr1CKYdudeuXWsuirR6X38If/31l3MEtEqVKsl3331XoDmBL46cTBc7nMqx5+srnRQX8DTisuz5zOKP7rElnV+kpi3ptC6e/65eIOSUKC92KHfxI7akc/S7p2xJJzfh77kEAykjK1fsUC6luISbYJT9KiMzUwItO9eesj8uxp4aEJuSkSMZOQFPo0KiPX+Tjhh76jZiclyPahuO5aVdV7w5NiWUmPP33K+BlpRSxpZ0wt3j3/zm9b7jep/j17zAO1FRY3zHHXfIDz/8ICdPnpQdO3bIL7/8YpoKrFu3Ts477zxzoXT99dcHO5sAAD+i7AcARGuNMTwXFYHxrbfeKhdddJEkJCTke/3888+Xd9991zzXKv3ffvP+Tg8AILRQ9gMAgKKK+sG3zjnn/5oupKfb05QXABBclP0AAH+i5jf8RUWNcWGWLFliHnXeLB2hDQAQ+Sj7AQCARHuNsc7dtX//fjPois7JpZ577rmgTyoNAAgcyn4AQKBQYxz+oiow1jmy7rnnnnyvtWvXTj744APp1atX0PIFAAgcyn4AQKARGIe/qGpKXb16dTO59AUXXGCm7tAJpdesWSMffvihHDt2LNjZAwAEAGU/AAA4m6iqMe7fv79ZLDplx5133ilTp041o5LqZNdxca7n6504caJMmjSpyGndcPNgGTx0mF/yDQAITtnvTfk/eMgQueWWW3zONwAgfFBjHP6iKjA+U7NmzeSrr76SevXqmdqDTz75RG644QaX2+7bt09WrVpV5GP3uJTmeQAQ7mW/N+V/r969/ZRTAABgl6gOjFVKSop06dJFPvvsM1m5cqXbiyNtfteqVasiH7dylSp+zCUAIBhlvzflfxXKfwCIOtQYh7+oD4xVdnZ2vkdXRowYYZaiOnKSeTEBINzLfm/K/4zMTJ/zBgAA7BX1gfGRI0dk4cKF5nnLli2DnR0AgA0o+wEA/kSNcfiL+FGpf/jhBxk3bpzs3LmzwDrtM9azZ085fvy4GbU07+AsAIDwRdkPALA7MPZ2QWiI+Brjo0ePyujRo82i/b70IkhHH92zZ48ZUEXpa7Nnz5aSJUsGO7sAAD+g7AcAAJ6I+MC4Q4cO8vLLL5smcxs2bJAtW7ZIZmamlC1bVrp16yZXXHGFmVZDB2IBAEQGyn4AgJ3iYqj5DXcxDofDEexMRCK7Bt86lWPP11c6yf0cn/4Sl2XPZxZ/dI8t6fwiNW1Jp3Xx1ICnkVOivNih3MWP2JLO0e+esiWd3IRiAU8jIytX7FAupbgt6UQCOwbfys51RNSFnl3Xk0cycgKeRoVEe/4mHTH29IaLycmKmPLSriveHJsSSsw5ZUs6SSllbEkn3L25rGDXnaL654V1/JoXeCfia4wBAAAAIJBiqTEOexE/+BYAAAAAAIWhxhgAAAAAfBBHhXHYIzAGAAAAAB/EMu1S2KMpNQAAAAAgqlFjDAAAAAA+YLqm8EeNMQAAAAAgqlFjDAAAAAA+YLqm8EeNMQAAAAAgqlFjDAAAAAA+YLqm8EdgDAAAAAA+YLqm8EdgHCAls0/Ykk7K6Qxb0pGsuIAnEXt4l9iiWIotydQpm2RLOosP5gQ8jQ4lxBbHvn7UlnRSJdGWdFKO7Q14GmVSD4otUi60J50I4HAEPo2kU8fteS+J9vzx58Yl2JJOhcTcgKeRE2vPezmUkW1LOiUT7CkvS2RlBjyNnPhiYge7wqMTDnu+m4q2pAIEH4ExAAAAAPiAwbfCH4NvAQAAAACiGjXGAAAAAOADBt8Kf9QYAwAAAACiGjXGAAAAAOAD+hiHPwJjAAAAAPBBHNM1hT2aUgMAAAAAolpUBsZff/21xMTEmKVOnTrBzg4AwCaU/wCAQDWl9nZBaIi6wDg1NVX++c9/BjsbAACbUf4DAAB3oi4wfvTRR2X37t1y5ZVXBjsrAAAbUf4DAAI5XZO3C0JDVAXGy5Ytk3//+9/mouiqq64KdnYAADah/AcARJsFCxbI5ZdfLhUrVpTk5GRp0qSJjB49WtLS0jw6Tk5OjsydO1dGjRol7dq1kzJlykhiYqJUrVrVnFe/+uort/vu3LnT2YXJ3XLhhRdKKIiaUamzsrLk1ltvleLFi8sbb7wh8+bNC3aWAAA2oPwHAARaqPUVnjBhgowcOVIcDofUqFFDatasKRs3bpRx48bJZ599JkuWLJFy5coV6ViTJ0+WW265xTyPjY2VBg0aSMmSJWXbtm0ya9YsswwfPlzeeustE+i607FjR5evN23aVEJB1ATGzz77rKxfv15eeeUV8+MAAEQHyn8AQDRN17Ry5UpTu6smTpxobg5rwPrnn39Knz59zHp9TQPkonA4HNKsWTO5++67pV+/flK6dGnzenZ2trz66qvy4IMPyqRJk6RFixaFjuWhwXgoi4qm1L/99ps888wz0qpVK7nrrruCnR0AgE0o/wEA0Wbs2LGSm5srN910k6nJtWpxq1WrJlOnTjW1vjNnzpR169YV6Xh9+/aVNWvWyLBhw5xBsYqPj5f777/fWZusQXg4i/jAWO9w6B0RbUqnX1ZcXFywswQAsAHlPwDALlph7O3i7xkY5syZY55rUHymhg0bSvfu3c3z6dOnF+mY5cqVK7SJdO/evc3j5s2bJZxFfFPqN998U5YuXWqq/tu0aRPs7AAAbEL5DwCINqtXr5ZTp05JUlKSGSjLlc6dO5vxNnRgSn/IyMgwjzqWR2H0fLxp0yYTZNepU0d69uxpBsTUGuxQENGB8d69e+WRRx6R6tWrm47mAIDoQPkPALBTXIgMvrVlyxbzWKtWLUlISHC5Tf369f1awzt16lRnwH22AcHysvola7PuunXrSrBFdGCs/clOnDgh77//vqSkpPh0LG2Gp19eUQ298Tq5dfDNPqUJAAi/8n/w4CEy7H/9rQAA8Pd5xmomPWLEiAKvHzlyxDwWNuK0te7o0aPiqy+++EJmz55taoF1EK4zaT/kG2+8UQYMGGBGn9Z+zocOHTJTPD3++OOm7/Kll15qBgQrVaqUBFNEB8arVq0yj7fffrtZXFX579mzR6pUqWKe692KDh06uDzWvn37nMcrin/06OZDzgEA4Vr+9+r1d18rAED08GW6Jk/PM9Y+rmRmZppHnWfYHW1mnfd86K1NmzbJoEGDzHMdBdvVeVRng5gyZUq+1zQ41jFAunXrJq1btzbTPr3++usmUA6miA6MLQcOHHC7Tkdss9afPn3a7XY6gbWOalpUVapU8jCXAIDIKP//DrYBANEjzodusp6eZ6x9XClWrNhZz2vaB1klJyeLt/bs2WP6CB8/flz+8Y9/yPPPP+/xMXQ+ZJ3eSffVG9QExgG0c+fOQieqHjJkiNSuXbvQ7SzaVMFVcwV3Th/dX+RtAQCRU/6nZ/x9tx4AgKLw9DxTmLJly+ZrUu2Ktc7a1lP79++Xiy++WHbv3i1du3Y18yG76898NlYt89atWyXYIjowBgAAAIBQbkrtT40aNTKPGrTqdIWuAtbt27fn29YTf/31l5nuSQPZ9u3by5dffumspfaG1eQ7Oztbgi00xsYGAAAAAPikZcuWJtjU5tLLly93uc3ixYvNowa2njhy5Ihccskl8ttvv5mm3998842ULFnSp/yuX7/e2Rc52AiMAQAAAMDH6Zq8XfxJZ2LQvr/K1UjXWtM7f/5887xfv35FPu6JEyfM6NHr1q2T8847T7777jspXbq0T3lNTU2V//znP+a5HjvYojYwHjx4sDgcjiL1LwMARA7KfwBAJBs9erSZPklHg9bgWM951kjWAwcONINPXnXVVdK8efN8+9WpU8csM2bMyPd6enq6XHbZZWZKpSZNmsj3338v5cuXL/K0UjqwljXgV94RrXv16iU7duwwtc4PPPCABBt9jAEAAAAgAvoYq7Zt28rLL78s9957rxnUa9y4cVKhQgXZuHGjCVAbN24sb7/9doH9du3a5azJzeu1116TJUuWOP/ft29ft2lrUJ13dgZtzq1paV9nHYVa5yrWeYytfs46ANi0adNMQB5sBMYAAAAAEKTpmgJB5xU+//zz5aWXXpKff/7ZDJqlszFo8+lHHnnEo77Bp/LU9mpNb2GseZQtmtacOXNMbbOOZq1NuYsXL27mL+7du7fccccdITPNYYzDqluHX9k1XVPMad8m5i6y2LjAJ3H477tUAVcsxZZkDpZtaEs6Gw+mBzyNDtWKix1iT520JZ3U+FK2pJOSti/gacSlHhQ7xDa40JZ0IoEd0zXFZx4TOzgSS9iSTm6cd9N8eCo2JyvgaeTE2vNeDmXYM4JryQR7rvZLiPs5V/0lJ977kXM9kWvTpXVGtj3pVCxlzzVAuFuz1/tyuUX1Mn7NC7xDjTEAAAAAREhTangnxCr9AQAAAACwFzXGAAAAAOADKozDHzXGAAAAAICoRo0xAAAAAPggVqgyDncExgAAAADgA5pShz+aUgMAAAAAoho1xgAAAADgg1hqjMMegXGAOBJL2JLO8bhStqRTLD7wf+1Jcfb8HIfP2W9LOkMuyLAlnY47Zwc8DUe1a8UOd86157sZ1zPFlnRiM08GPA1HdlbA04BnTufkBjyN1Fh7fsO52bYkI2VzM21JZ9lfgX9D7cvbU/ZXyT1tSzqn4srbks72E4FvxFghOUfskJIUZ0s6pWLt+Q0A0YLAGAAAAAB8QB/j8EcfYwAAAABAVKPGGAAAAAB8wHRN4Y/AGAAAAAB8QFPq8EdTagAAAABAVKPGGAAAAAB8wHRN4c/nGuOhQ4eaZceOHf7JEQAAAAAA4VRj/OGHH0p8fLy8++67/skRAAAAAIQRKozDn881xpUqVZLixYtLTAj3OH/yySdN/gpb3nrrrWBnEwDgR5T9AADAthrjdu3ayZdffil79+6V6tWrSyjTIL5hw4Yu11WtWtX2/AAAAo+yHwAQaLEhXEkImwLjkSNHmsB4zJgx8s4770go6927t0yePDnY2QAA2IiyHwAQaMTF4c/nptTdunWTV155RT744AO59tprZdWqVf7JGQAAAAAA4VBjXK9ePfOYkJAgn332mVmSk5OlfPnyEhcX53If7de1fft2X5MGAAAAgPCvbUT4B8Y7d+4s8Fp6erpZ3AnWQF1r166V66+/Xvbv3y8pKSnSrFkzGTBggDRt2jQo+QEABB5lPwAACHhg/P7770u4WLNmjVkss2bNkqefftr0k37xxRfd1nADAMIXZT8AINBCeYYe2BQYDxo0SEJdtWrV5KmnnpKePXuapt9aY7Blyxb5z3/+Y6bqePXVV01T8PHjxwc7qwAAP6HsBwAARRXjcDgcEsX0guihhx6S+Ph42bp1q9SpU8cvxz2VdlLscCLHnpqOYvGBvwuWlHlU7DB8zn5b0hlyQS1b0rlwx5cBT8PR4Vqxw92zt9qSzrierqfu8bfyR214P5n2lDVxTTpLJAlU2a+OpbrvSuQvp3PsOXXn2pKKSNnYLFvSWfZXdsDTaF/epk8t+7QtyZwqXt6WdPacCPxvoEKyPddMKUn2pBObfcqWdJJKlrYlnXC371ia1/tWLVPCr3lBkGqMw919990nr732mvz555+med3dd9/tcruJEyfKpEmTinzcIYNulluHDfVjTgEAdpf93pT/Nw4aLEOGDvNTTgEA4YCW1OHPb4HxH3/8IS+//LJ8++23smvXLsnMzJTs7P+7M3v06FF58803Tfv7Bx54wNylDwXat+yCCy6Qzz//3NQauLNv3z6PpqLq3fNSP+UQABCsst+b8v+Snr38kEMAAGAnv0Snc+fONXMYnzhxQqyW2Wd2QC9btqz897//lZUrV5qRQPv06SOhIjEx0TzmDeTPVLVqVWnVqlWRj1mlShW/5A0AELyy35vyvzLlPwBEHaZrCn8+B8Z79uyRfv36ycmTJ02we/PNN8utt94qx44dK7Dt0KFDZcWKFfLVV1+FVGC8fv1681ijRg2324wYMcIsodbHGAAQuLLfm/Lfjj7GAAAgxG5uvPTSSyYo1hpjrRHu27ev8y78mXRkUPXLL79IqNAgfcOGDeb5pZfS/BkAogFlPwDAn7S1rLcLIiQw1j7F+oWOHTv2rNvWrVtXkpKSZMeOHWIXvfDRO/1r167N93pubq5MnTpVrr/+evP/yy+/XNq2bWtbvgAAgUPZDwAAbG1KvXv3bklOTpaGDYs2/UnJkiXl+PHjYpesrCwzmqgu5cqVk9q1a5uBv7Zt22YGBFOdO3eWKVOm2JYnAEBgUfYDAOwUS8Vv2PM5MI6NjZWcnJwibasDnOgAXaVKlRK76NyU48aNk59++kl+++03c1GkI2brhVLv3r1NrcHAgQPNCKUAgMhA2Q8AsBNxcfjzOTDWu/B60aE1x7Vq1Sp020WLFpm7+EWtXfaHMmXKyGOPPWZbegCA4KPsBwAAtvYx7tGjh3l86623Ct1OA2K9SNH+yHq3HgAAAAAipSm1twsiJDC+5557zCjUOjr1u+++63KbVatWmQD6559/lpSUFLn99tt9TRYAAAAAgNAIjLUp9TvvvGP6GQ8fPlwqV67sHNikQ4cOUr16dTPi5+LFi83AJx9++KFUqFDBH3kHAAAAgKBjuqbw53NgrG644Qb55ptvpH79+nLw4EE5ffq0OBwOWbZsmezbt888b9CggcyZM0f69OnjjyQBAAAAAG4sWLDATEtYsWJFM4tQkyZNZPTo0ZKWlub1MT/77DPp1q2blC1bVkqUKCEtWrSQF1980XSbLcxff/0lI0eOlHr16kmxYsWkSpUqct1118maNWskYgbfslxyySWyefNmM8DW0qVL5c8//zS1yPqmO3bsaD5ARv8EAAAAEGlCra/whAkTTCCqFZQ1atSQmjVrysaNG82MDRrcLlmyxMzU4In777/fdJ9VWiGqgfH69evlgQcekC+//FK+++47SUpKKrCfzgzRqVMnOXDggNmnadOm8scff8i0adPkv//9r0yfPj0kKk/9FhgrbQrQpUsXswAAAABANAiluHjlypUyatQo83zixIly6623mjhNKy41ANX1+poGyEX1+eefm6BYA18NaK1AdtOmTfKPf/zDVI4++uijzsDZooF5//79TVDcq1cv+eSTT6R06dJmGt+nnnpKxo4da1ofb9myRapWrSph3ZR6586d/skJAAAAAMAnGmzm5ubKTTfdZMaAsvoxV6tWTaZOnSqxsbEyc+ZMWbduXZGP+a9//cs8PvTQQ/lqd7V5to43pf7973+bbrV5ffHFF6a5tAbDH3/8sXlUOvaUBsYXXXSRpKammubYweZzYKx9h3X6Ja0G16bTAAAAABBNYmNivF78SYNMHddJaVB8poYNG0r37t3Nc23CXBRbt26VtWvXuj2mHk9jwlOnTsmsWbPyrbPS0Fpj7Zd8Jut4Wgsd9oGx3o3Q9uTXXHONabuuHbp37drln9wBAAAAAIpk9erVJkDVJs/t2rVzuU3nzp3Now6UXBTL/redDpylMw55ckzr/1ozXNh+2ud47969EtaB8bx588wdgISEBNm/f78888wzpjO2tjWnFhkAAABApNOKX28Xf9K+uqpWrVomPnNFYzWlAyd7csz6/9uvqMfUmYqsbrfu9tWK1cTERI/yE7KDb2nVuS6HDx+WyZMny7vvvms6YWsV/rfffmtGpR46dKjccsstZs5jAAAAAIA4B8iaNGmSR/toE+QRI0YUeP3IkSPmsbARp611R48eLVJaR7w85vHjx03r4sL21f7PZcqUMdM5FTU/IT8qdfny5eW+++4ziw7/rV+wjnSm8xhrLfKzzz5rpnTSL/CKK66I+KmbcuNc36Hxtxfmb7clnbFdXTeb8Kv//eEE2iM9GtiSTt2k07ak46h0dcDT+CPNnu/mX5c2tCWdLHvejpyu0iTgacSeThc7RHaJ7V9xNszZUTo3U+xw3FFw2o1AiMnNtiWdVlVLBDyN49kOsUOJEj43+iuSBLHn/TRIDvw5Mzcx8N+/ys615zM7JfZca9pTCoS/GIf337vGS6tWrfJ4H1cyM/8+P1i1sK5YUyplZGQUKa1ML49p7efv/ITFdE0WnadKF50/a8qUKWaksl9//dX0RdbFqkXWYcK1mh8AAAAAwpbD+7vuOk1Rq1atPN7HlWLFijmbMbujfZBVcnJykdIq5uUxrf38nZ+wCowtWi1+1113mZri2267zcxvpaxa5Oeee870T9baZJpZAwAAAIg22qLWVbNob1gjP1vNn12x1rkaJdqfx9SpmXRqKG1O7W5fnef42LFjBfYNhoC1w9G7Av/v//0/6dKlizRt2lQWL15sXtcA+J577jGv6cBcn376qbRo0cI5BDgAAAAAhJMYR67Xiz81atTIPO7evVuysrJcbrN9+/Z82xb1mNu2bXO7jatjavNpq/LT3b579uxx1iYXNT9hExhv2LBBRo0aZSaQHjRokAmItVO1jlL95Zdfyu+//y4vvfSSmVB6/vz5cv7555uO2TpZNAAAAADAOy1btjQBqTZPXr58ucttrArL9u3bF+mYF154oXncsWOH2ymV3B3T2tda726/GjVqmCXsA2PtWP3BBx9Ix44dpVmzZqZvsVaXV6pUSR599FETDGtQfNlll5kg2dK1a1czcnV8fLzbLw4AAAAAQprW/Hq7+FFKSor07NnTPHc10vXWrVtN5aTq169fkY7ZqFEjU5np7ph6PK0R1oC8T58++dZZaUyfPt3lqNPW8bR7bbD5HBjfeeedpvO3Dqb1008/mXbiGvBqE2mtGh83blyhA2xVrlzZDMaltcYAAAAAAO+NHj3aVEbqIMgaeGp8Zo3zNHDgQNPn96qrrpLmzZvn269OnTpmmTFjRoFjjhkzxjw+//zzpsLTonMP67S86vbbb5eKFSvm20/T0YpTjfVuuOEGZ8ynXWqfeOIJMwZV8eLF5f7775dgi3FYn5SXtEO11Vlam07rIFuetg8fMGCAHDhwQBYsWCCRIiPP8OSBNGZu5EzXFHMqVeywI7dUZE3XFBvQMfSMPZn2TNZTIsGe6UdsmklDyiXHRcx0TUml3M9diPxOpgd+uonE7IyImq6pjNjzfk4lBH66nky7pmuyqbyMtWm6ptjTaZEzXZNN5xi7poUqU7K4LemEu1Mn/x5AyhtJKWXE31599VW59957TVBcs2ZNqVChgmzcuNE0sW7cuLGZXldfyyvmfy1733//fRk8eHCBY+o4UXpcVb9+fSlZsqSsX7/eBLk6I9HcuXPzjURt2bJli3Tu3NnMVVyiRAlp0qSJqUDV/yckJJgK1auvDvz0o2fjc6l6wQUXmA9P25u//PLLXnWa/uSTT2wLir/++mvp27ev6QOtc2ZpbbU2AX/88cclO9ueeRQBAPai7AcARENTaouO+aSBau/evSUtLc0ExToQlnZzXbFiRYGguCheeeUVmTZtmhlc+dChQybgPffcc00tsjandhUUK40PdXwpbWmsNco6ja/VzPrnn38OiaDYLzXG4UIvfIYMGWJGylZ650QvjA4fPix//PGHGQ3t5MmT5s6HP1Bj7DlqjL1DjbHnqDGOnhpju8t+RY2x56gx9hw1xp6jxtg71BgXzakT7qcyitRzbKQJ/BV1iPjnP/9pLozatm0rEydONCO2WdLT02XevHmmFgEAEDko+wEAdvD3tEuwX1QExtpM+5133jGdyb///nszWlte2uH7zBHUAADhjbIfAAAUld/a4axdu1aGDx9u2pmXKlVK4uLi3C46PZOddN5kdd999xW4MAIARCbKfgBAtPYxhuf8EqG+8cYbZtQzHZEs1Los6xzL3333nXneo0cP0/Fchy3XR20+p83qhg0bZjqjAwAiA2U/AACwNTDWkcRGjhzpnLvqsssuk3/84x9Srlw5M2rZ/v37TR+ujz/+2NQkv/7662beY7toTXZWVpZ5vnjxYjMamg62Ypk9e7aMHz/ejKyt83oBAMIfZT8AwFbU/IY9n5tSa6CrtcQaHE+YMEF69eplXk9MTJTu3bvL9ddfL++9954sW7bMzI2lE063atVK7KITWVvuuOMOU0uwfPlyM4fX1q1b5dprrzXPdQ7m1atX25YvAEDgUPYDAGxFU+qw53NgvHTpUhPwWrXGljObVLdo0cIEztu3b5cXXnhB7JKamppvoJVvvvnGjE6qgXuDBg1k6tSpJm9as/D000/bli8AQOBQ9gMAAFubUh84cMD018rbTys2Ntb07zqTTt6ckJAgM2fOlKeeekrskHei6cGDB0vZsmXzrde83nPPPabWQPuj5ebmmtfOpNN8aP+0oho8ZIjccsstPuYeABDMst+b8v/mwYNl6DDKfwCIKrnU/Eq0B8Z6J15rjPPS0T9PnDhhmqnlnR9Sg2LdfteuXWKXvBdD55xzjsttrNdPnjwphw8flooVK7pslrdq1aoip9urd2+v8gsACJ2y35vy/9L/dSkCAABRFBhXr15dNm3aJNnZ2c5pmOrXr2/6bP3yyy/SqVMn57Z//vmnHD9+3ATHdmnSpInzuTahO1vNgtYauKIDhnnSN7pKlSoe5RMAEHplv6L8BwCcTQx9hcOez4Gx3nHfsGGD/Prrr2ZwE9W1a1dzd12bS8+aNctcfOhooHfffbdZf/7554tdNHDXZt5aS/3777+73Eb7PSvNZ/ny5V1uM2LECLMUVYaLpuQAgPAq+70p/0+mZ3iRYwAAENaDb1166aVmoK0vv/wy3wig2oT6+++/lxo1akjHjh3NRcrnn39uml3rtBl2uu6668zjRx99ZGq2z6SjZqsuXbo4a70BAOGNsh8AYBtGpQ57PgfG11xzjYwZM0aqVavmfK1u3bpm3mLta3zkyBH56aefTP8tDYoffPBBueGGG8RO999/v5QuXVp27NhhgnJrYDAN6HW6KQ3qNW8PP/ywrfkCAAQOZT8AwDY6I4+3C0JCjOPMeZX8SIPir7/+Wvbs2WMuTrR2WafJCIZ58+ZJnz59JCMjw+SlUaNG8scff5hBVfTCaPz48eYiyl/sako9Zu7fTQEDbWzX6gFPI+bU/02vEkg7ckvZkk7dpNO2pOOIDXxN157MOLFDiQSf79UVSa5N56ByyYH/3GJPp4sdkkqVk3Bkd9lvV1PqxGx7mmsfd/zfAJqBVEbseT+nEkoEPI3MbEdElZexYs/7iT2dFvA0chMD//0rm34Ckm3TyaxMSfvGBgpnWQd2eL1vQuW6fs0LvBPQK+py5crJjTfe6Py/DrylA5joxcjKlSvFTj169JC1a9fKM888Yy6U1qxZYy6S9ILp3nvvNU3pAACRhbIfAGALmkSHPVs7VWkfL70oOXN6J7s0bNhQ3n///aCkDQAIDsp+AABwNow2AgAAAAA+YLqm8GdPBxUAAAAAAEIUNcYAAAAA4AtqjMMeNcYAAAAAgKhGjTEAAAAA+IIa47BHYAwAAAAAviAwDns0pQYAAAAARDWPa4zj4uICkxMAAAAACENM1xSFgbHD4QhMTgAAAAAACIfAeMyYMYHJCQAAAACEo1xqjMMdgTEAAAAAIKoxKnWAJO78xZZ0xnZrZks6p2MTA55GanyC2KFyfIwt6aRNecaWdBIGBf5mVUb2abFDxeL2FEnHT+XYkk6MDV1P9mcF/m9T1bYllciQmJ0R8DSy4pPFDnE59nSfyogtYUs6p7MD/36SbTrHnM6xp3Yq2WFP+Z8RH/jfQMZpez6zhFh7fgMp2SdsSUekuE3phDm6m4Y9AmMAAAAA8AWDb4U9pmsCAAAAAEQ1aowBAAAAwAdM1xT+qDEGAAAAAEQ1aowBAAAAwBfUGIc9aowBAAAAAFGNGmMAAAAA8AU1xmEv4muMd+7cKTExMUVahgwZEuzsAgD8gLIfAGCr3BzvF4SEiK8xLlasmHTs2NHt+szMTFm5cqV53qFDBxtzBgAIFMp+AADgiYgPjKtUqSJLlixxu/6DDz6QwYMHS3Jyslx33XW25g0AEBiU/QAAOzlyaUod7iK+KfXZTJ482Tz27dtXSpUqFezsAABsQNkPAEDRrF692txE1pvO2iKrXr16MnLkSDl48KDHx3I4HPLjjz/Kww8/LJ06dZLy5ctLQkKCVKxYUS699FL56KOPzDbunK17lObRWxFfY3y2Pmg//PCDea41BwCAyEfZDwDwuwjtKzxz5kwZMGCAZGVlSaVKlaRp06ayefNmef3112X69OmmdZYGykU1f/586dGjh/P/um/dunVlx44dMnfuXLNMnTpVPvvsM0lKSnJ7nDZt2rhcr4G2t6K6xlib0ukdiVq1akn37t2DnR0AgA0o+wEAOLu9e/fKTTfdZILi0aNHm//r+Bz62KtXL9m3b5+pSS6shvdMuq0Gwq+99pocOHBAtm/fLitWrJDDhw/Lhx9+aILdr776Sp544olCj2MF5WcuX3zxhdfvN2oDY/1S9OJI3XzzzRIbG7UfBQBEDcp+AEBAROCo1C+88IKkp6fLRRddJE899ZTEx//d2Lh06dLy8ccfm0cNamfPnl3kY7Zr187UON99992mBjovDcKtgPidd96RXJv7bUftFYE2o9Mqe0VTOgCIDpT9AIBAcOTkeL2EqhkzZpjH4cOHF1hXtmxZ6d+/v3k+bdq0Ih9Tx/XQPsXu9O7d2zweOXLEqz7MvoiP9oFXOnfuLPXr1w92dgAANqDsBwDg7Pbs2WOaTCutMXZFz6Vas7ts2TK/pZuRkeF8rjNHuDN27Fj5888/JTs7W6pXr266Rmmz7sL6JZ9NVAbGqampzjsg1BgAQHSg7AcABEyETde0ZcsW85iYmCg1atRwuY11g/n33383/ZALqwkuKh14SzVv3rzQWSPee++9fP/XblJjxowxg3a1atXKq7SjMjDWC6O0tDQpXry4swnA2UycOFEmTZpU5DRu6dNNhl93pQ+5BAAEu+z3pvwfctMNcutQAm8AQGDOM1bz5hEjRgQsT0eOHHE2mdZpkFwpV66cedS+wCdOnPBpRGilA3u99dZb5rlO5+TKlVdeafoia+CsAbve9J43b5489thjJkDXKZ90eqmaNWt6nH58NDel69evn6SkpBRpHx11bdWqVUVOY1/7Zl7nDwAQGmW/N+V/70v/bxoKAECU8GEQLU/PM9Y+gZSZmemsMXYnb7PlvE2gvaEjVPft29c0jb766qvNFFGu/Pe//833f51XWbfVKaBat24tu3fvln/961+mibenoi4w1kFXFi1a5HFTuqpVq3pULV+1om93TAAAwS/7vSn/q1Su7HH+AADRy9PzjLWPO6NGjTLTIXmqS5cusnDhQmfAqU6fPu12+1OnThWpP/DZHD9+3Ay6pUGtBrfWjWxPVKhQQR555BH55z//KZ9//rm8/fbbbmu63YmP1vkr69SpI127di3yftpUwZPmCjmbFnuZQwBAqJT93pT/p0783fwMABA9HD7UGHt6njmbkiVLetWsuXTp0s7n2oRaHT161Jw/XQWZVnNrnfqwsP7AhdGm0DonsjZ/btq0qXz77bdeH6tDhw7OfOni6WcQVYGxfqk6cbQaNGiQx3cRAADhh7IfABBNg2+NGzfOLL5o1KiRs8ZYR6iuVatWgW22b99uHuvWrevVwFs6R/Jll11mRrVu2LCh6SvsSz/lvM2+tUm2p2Kjcf5KvSjSiyMAQOSj7AcAwDO1atWSatWqmeeLF7tuCWu93r59e6/6MPfp08d0c6pdu7Z8//33UqVKFZ/yvH79emczcG8C7KgKjK326joXl97ZAABEPsp+AIAdTam9XULVNddcYx5djZitTaynT59unnsy04PSqZ302BoM6xzE8+fP92oU6by0hvill14yz3VO4/h4zxtGR11grE3qrE7lAIDIR9kPAIDnHnjgATOoltbqPvHEE5KTk+McLOv66683jy1btpQrrriiwL6dOnUy43q8+uqr+V7XY+i+X3/9takh1qC4Xr16RcqPTuGkY4acPHky3+va1FtnnNAm2RoQa169EVV9jAEAAADA70K45tdbNWvWNGN0DBw4UMaOHWvmW9bXNm3aJGlpaVK5cmWZNm2ay7E7/vjjD9m1a5ccO3Ys3+u6/YwZM5xNnocOHeo2/QkTJpjA26LpPv/88zJs2DATTOs8yhqcb9682dwA1+PpNE0XXHCBV++XwBgAAAAAUIDWxGoQ+uyzz5qa419//dX0PR4yZIiMHj1aKlWqJJ7IO8XTzp07zeKOBr156VRMWsu8YsUK2bt3r9lX51LW0ax1HuM777xT6tevL96KcWh4Db+za7qm7BrNbEnndKz7yb39JfW0PaP5JcfbMyKtY8pTtqSTMGhMwNPYcdz9HHb+VKtU4H9n6vgpe+7qVkqOC3gaB9LteS+1y5e0JZ1IYMd0TVnx3s8X6YlTOfZcIsTH2lMun7bh/dh1jsnOtee7SXbYU/5nxiYFPI2MbHuuMxJs+j2nZJ+wJZ3Esr4NiBQtTi/+xOt9EzsP8Gte4B1qjAEAAADAB47/9b9F+IqqwbcAAAAAADgTNcYAAAAA4IsIHHwr2lBjDAAAAACIatQYAwAAAIAvqDEOe9QYAwAAAACiGjXGAAAAAOADR64904EhcAiMAQAAAMAXNKUOewTGAfJzsXNtSadKpj2t4WskpAU8jWKnToodcmNL25LOnn6P2ZJOvT9WBTyNpmKPnPiatqSTXayCLeks2HUi4GlcXCVG7FHSpnTC34HsxICnUTVjv9ghoUQ5W9LJign8Z6ZKxQX+wjU3NkHskJR20JZ0DsTZ8xtIjAt8bVupxDixQ1xWui3p5CSXsSUdIFoQGAMAAACAL6gxDnsMvgUAAAAAiGrUGAMAAACADxh8K/xRYwwAAAAAiGrUGAMAAACAL+hjHPYIjAEAAADAFwTGYY+m1AAAAACAqEaNMQAAAAD4wJFDjXG4i5oa48OHD8ujjz4qzZo1k5IlS0piYqLUqFFDrr32WlmyZEmwswcACADKfgAAUBRRERhv3bpVzj//fHn22Wdlw4YNUrlyZWnatKmcOHFCpk+fLhdddJG88sorwc4mAMCPKPsBALbR6Zq8XRASoiIwvu2222Tfvn3SsGFD+fXXX2X79u2yevVq+euvv+S+++4Th8MhDz74oLmIAgBEBsp+AABQVBEfGJ88eVIWLFhgnr/wwgty7rnnOtcVK1bMvNagQQPJzs6Wb7/9Nog5BQD4C2U/AMD2Uam9XRASIj4wPnXqlKkVUPXr1y+wPiYmxvl6VlaW7fkDAPgfZT8AwE6O3ByvF4SGiA+MK1SoYAZaUT/++GOB9WlpabJmzRrzvF27drbnDwDgf5T9AADAExEfGKvnnnvO1A488MAD8s4778j+/fslPT1dli9fLn369JEDBw7IjTfeKB07dgx2VgEAfkLZDwCwiyM31+sFoSEq5jG+4YYbpHTp0jJu3Di59dZb862rWrWqvPnmmzJixIig5Q8A4H+U/QAAoKiiosZYbdu2zYxEGhsbK3Xq1DFzWhYvXtyMWDp58mQzlQcAILJQ9gMA7ODIyfV6QWiIihrjO+64Q/7zn/9I27ZtZc6cOdKoUSPzekZGhowZM8aMTqpN6datWye1a9d2eYyJEyfKpEmTipzmxddcL1ddP9hv7wEAYH/Z70353++GQXLD4KF+eQ8AAMAeER8Y6wWPNpdLSEiQ6dOn57v4SU5OlvHjx8uqVavk+++/l2effVbeeustl8fR2gXdrqhadL7YL/kHAASv7Pem/L+oR0+f8w8ACC/U/Ia/iA+MlyxZYqbsaNiwodsagUsvvdRcHK1YscLtcbQ/WqtWrYqcbvlKlb3KLwAgdMp+b8r/SpWreJxfAEB4YxCt8BfxgfHJkyeLvG1mZqbbdTpAiyeDtPy483CRtwUAhGbZ7035v/tIapG3BQAAoSHiB9+y+pRt3bpVdu3a5XKb7777zjw2btzY1rwBAAKDsh8AYCcG3wp/ER8Ya1O5SpUqSVZWlvTv31+2bNniXKcDsDz44IOmKZ26+eabg5hTAIC/UPYDAABPRHxT6hIlSshHH30kV111lfzyyy9yzjnnmP5mKSkpZhqP9PR05+ilV155ZbCzCwDwA8p+AICdqPkNfxFfY6x69OhhRii98847TfO6/fv3y2+//SalS5c2F0SzZ8+WN954I9jZBAD4EWU/AAAoqoivMbbUq1dPJkyYEOxsAABsRNkPALBDbk5OsLMAH0VNYAwAAAAAgcB0TeEvKppSAwAAAADgDjXGAAAAAOADBt8Kf9QYAwAAAABcWr16tVx33XVSpUoVKVasmBm/Y+TIkXLw4EHxxpNPPikxMTGFLm+99Zbb/XUqxhdeeEGaN29uZqEoW7asdOvWTWbOnCm+oMYYAAAAAHwQqTXGM2fOlAEDBphgtFKlStK0aVPZvHmzvP766zJ9+nRZsmSJCZS9ocdr2LChy3VVq1Z1+XpmZqZccsklJt24uDiTn7S0NFm4cKFZHnroIXnuuee8yg+BMQAAAAAgn71798pNN91kguLRo0fLE088IfHx8XL8+HETLM+ZM8fUJC9fvtzU8nqqd+/eMnnyZI/20cBXg+K6devKN998I40bNzavz5o1S6699lp5/vnnpWPHjnLFFVd4nB+aUgMAAACAj6NSe7uEqhdeeEHS09PloosukqeeesoExap06dLy8ccfm8cVK1bI7NmzbcnPgQMHnE2s3333XWdQrPr06SMPPvigs6m2NwiMAQAAAMAHuTm5Xi+hasaMGeZx+PDhBdZpv97+/fub59OmTbMlP1orfPr0adP8WvsUn2nEiBHmcdWqVbJ9+3aPj09gDAAAAABw2rNnj2lKrbTG2JXOnTubx2XLlok31q5dK9dff710795drrzyStNce8OGDW63t9Kx0j1T9erVTRNrb/NEH2MAAAAA8EGkDb61ZcsW85iYmCg1atRwuU39+vXN4++//276ISckJHiUxpo1a8ySt0b46aefNiNev/jii2ZwLVd5stJ1l6cdO3aYAcI8RWAMAAAAAEEyceJEmTRpkkf7aPNmq+lwIBw5csTZZNrdwFrlypUzj7m5uXLixAkpX758kY5drVo102e5Z8+eZkTrlJQUE/T+5z//MX2IX331VRNkjx8/3mWerHQLy9PRo0fFUwTGAdK6gj0fbYbkv5MSKMdziwc8jZKlS4odEg553ufAG6VS6tiSzqlarQOext6TWWKHcsXs+T0XT7CnF0nP+B0BTyMrtlHA04BnSicF/necm1RB7JAVY8+5bNneVFvS6VQ98OeyuMwTYodN2aVtSefAsQxb0jmnQuC/m/jMY2KH3KQUW9JJz7KnhjK5mC3JRHWN8b59+0y/WE/3CaTMzExnjbE7SUlJzucZGUUvK1z1WT7//PPlzTffNE2hdeTpV155RW6//XapU6eOV3nyJD8WAmMAAAAACBKds7dVq1Ye7+POqFGj5LXXXvM4H126dDFzAatixf6+I6KDXblz6tQp5/Pk5GTxh/vuu8/k/c8//zRNq++++27nOk/y5E1+CIwBAAAAwAe+TLukTaL92Sy6ZMmSRW7WnFfp0v/XEkWbUFtNkh0Oh8vm1FbT5tjYWClVqpT4g/YrvuCCC+Tzzz+XrVu35ltn5clK92xNwD1FYAwAAAAAETL41rhx48zii0aNGjlrZ3WE6lq1ahXYxpoSSZs/ezrwVmGsptLZ2dkF8rR06VLZtm2b232tPFn59wTTNQEAAAAAnDQQ1kGy1OLFi8UV6/X27duLP61fv948njka9oUXXmgelyxZ4nI/nV5KR6TOu60nCIwBAAAAwMcaY2+XUHXNNdeYR1cjZmsT6+nTp5vn/fv391uaX331lXMu40svvTTfOp3rWGumtYn1ggULXI7urVq2bCkNGjTwOG0CYwAAAABAPg888IAZxGrRokXyxBNPSE5Ojnn9+PHjcv3115tHDUKvuOKKgrMAdOpkRpTWqZfy0qBX+1OvXbs23+s65dPUqVPNcdXll18ubdu2zbdN5cqVnX2xhw0blm+u4i+//NI5vdOYMWPEG/QxBgAAAAAfaGAXaWrWrCkffvihDBw4UMaOHWtqZPW1TZs2SVpamglUp02b5nJgrj/++EN27dolx47lnyYtKyvL1EDronMO165dW+Lj402/YWvu4c6dO8uUKVNc5kmD35UrV8pPP/0kTZs2lfPOO09SU1OdfYt1VGutWfZG1NQY65f33HPPmaHQdRJpHa2tRYsW5sMtbMhvAED4ouwHAMB7/fr1k59//tk8ql9//VUqVqwod955p6xbt87jJstai6wDg1122WVSpkwZExCvWbPGDLjVu3dvExBrM2ld54rWYOuUUnpuP/fcc2XLli1y6NAhM9XUjBkz5MUXX/T6vcY4dPztCPfXX3/JxRdfbDpy63Di+iHqUOD6f20S0KZNG5k/f765aPKXU6nHxQ4Z4r8R4ApjxxzyJRPtuU+TcOjvO0qBdiTl/yYkD6SUpLiAp7H3ZJbYoVyxwL8XlZxgz28t6Y81AU8jq5Lnoy56I6lUOQk3wSj71fG0DAm0YpJ/pM5AyYqxp2HZsr2ptqTTqXrxgKcRezpN7LA5I8mWdA6k2nMD6ZwKgf9uKspJsUNukn/LFHfScgrW0gVC+ZTAfzeRYMf9N3m9b90XXdeOwl5RUWN88803mwuhJk2amLboeqdD70xolXvz5s1lxYoVcscddwQ7mwAAP6LsBwDYxZGT4/WC0BDxgbFeCH377bfm+bvvvpuvul/btH/wwQemJuH//b//Z9rLAwDCH2U/AADwRMQHxtY8V9WrV5cOHToUWK+1BlqboC3KP/300yDkEADgb5T9AAA7OXJzvV4QGiI+MD5y5Ijz4sgda/LoH3/80bZ8AQACh7IfAAB4IuKna7JGNNu7d6/bbXQ4cUVzOgCIDJT9AAA7OXKo+Q13EV9jbE0MrRdHy5Ytc9kPzZoc2po7CwAQ3ij7AQCAJyI+MG7Xrp3zAmnw4MFmhFLL1q1b5YYbbjDTdqj09PSg5RMA4D+U/QAAu2uMvV0QGiI+MFYfffSRVKtWzdQONGvWzIxO2rhxYzPwil4g3XjjjWY7f89lCQAIHsp+AIBdcnNyvV4QGiK+j7Fq2LChrF69Wp5//nmZNWuW7NmzR0qUKCFXX321/Otf/5LZs2eb7apUqeL2GBMnTpRJkyYVOc0hg26SW4cO8Uv+AQDBKfu9Kf9vGjRYhgy7xef8AwAA+0RFYKwqVaokL730klnOpBdNymp258q+fftk1apVRU6vd89LvMwpACBUyn5vyv9LevbyIqcAgHDGtEvhL2oCY3eysrJkzpw55vmVV17pdruqVatKq1atinzcs9VAAABCv+xXlP8AAES+qA+MtRbh4MGDUq9ePenTp4/b7UaMGGGWojqVetxPOQQABKvs96b8P56W4YccAgDCCYNohb+oGHxryZIl8u233zpHIFUZGRny7LPPymOPPSZxcXHyzjvvSEJCQlDzCQDwH8p+AABQVFFRY7xixQq55557pHjx4lK3bl1JTEw0o5TqFB362uTJk6Vbt27BziYAwI8o+wEAdnHkOIKdBfgoKgLjrl27ypAhQ+THH3+U3bt3S3Z2ttSsWVN69+5tLppq164d7CwCAPyMsh8AYBemXQp/UREYt2jRQt57771gZwMAYCPKfgAAUFRRERgDAAAAQKA4cmlKHe6iYvAtAAAAAADcocYYAAAAAHyQy+BbYY8aYwAAAABAVKPGGAAAAAB84GBU6rBHYAwAAAAAPmAe4/BHU2oAAAAAQFSjxhgAAAAAfMDgW+GPGmMAAAAAQFSjxhgAAAAAfMDgW+GPGmMAAAAAQFSjxjhA9p+KsyWdaln7bEknq3T1gKex9kC62KFqydq2pJOTZc+dw/1p2QFP45z4o2KHmNQsW9LZm1jVlnSqlqsV8DTiTh4QW5QqZ086ESDJhlvOscf/CnwiIrL+VFlb0rnorwW2pPN7qV4BTyMxLlns0LC0PXUbJRLsSee3Q4G/BihdvbTYIcER+POySj0dY0s65W1JJfzl5tLHONwRGAMAAACAD5iuKfzRlBoAAAAAENWoMQYAAAAAH+Qy+FbYo8YYAAAAABDVqDEGAAAAAB/Qxzj8UWMMAAAAAIhq1BgDAAAAgA+oMQ5/YVNjvH//fpkyZYrcfffd0r59e0lOTpaYmBjp2rXrWffNysqSF154QZo3by4lSpSQsmXLSrdu3WTmzJm25B0A4D3KfwBAOAy+5e2C0BA2NcaffPKJ3HPPPR7vl5mZKZdccoksWbJE4uLipGnTppKWliYLFy40y0MPPSTPPfdcQPIMAPAd5T8AAAi0sKkxLlWqlPTo0UMeeeQRc6d/9OjRRdpPL3z0oqhu3bqyYcMGWbt2rWzbtk2++OILSUpKkueff16+/PLLgOcfAOAdyn8AQKhz5Dq8XhAawiYwHjp0qMydO1eeeeYZufrqq6VSpUpn3efAgQPy1ltvmefvvvuuNG7c2LmuT58+8uCDD5rnTz75ZABzDgDwBeU/AADBs3r1arnuuuukSpUqUqxYMalXr56MHDlSDh486PGxdu7cabpDFWUZMmRIgf3r1Klz1v20xVhEN6X2xqxZs+T06dPSsGFD06fsTCNGjJCxY8fKqlWrZPv27VK/fv2g5BMA4F+U/wAAO+VG6OBbM2fOlAEDBpgxO/TGtHZL2rx5s7z++usyffp00zJLA+Wi0sC6Y8eObtdrULty5UrzvEOHDm63O++886R06dIu18XGelf3G9GB8bJly8xj586dXa6vXr26aWK3Y8cOsy0XRgAQGSj/AQDwzd69e+Wmm24yQbF2Y3riiSckPj5ejh8/boLlOXPmmJrk5cuXm5raotBaZw2m3fnggw9k8ODBZqBNPbY7EyZMKNIgnBHZlNobW7ZsMY+FXfBY6/TOBwAgMlD+AwDs5MjJ9XoJVS+88IKkp6fLRRddJE899ZQJipXW1H788cfmccWKFTJ79my/pTl58mTz2LdvXzPGiJ0iOjA+cuSIeSxXrpzbbax1R48etS1fAIDAovwHANg9j7G3S6iaMWOGeRw+fHiBdTr9Yf/+/c3zadOm+SU97X/8ww8/mOdaa2y3iA6MrY7XiYmJbrfRkUlVRkaGbfkCAAQW5T8AAN7bs2ePaUqttMbYFau7ktV9yVfajNrhcEitWrWke/fuhW6rA2xefvnlcvHFF8sNN9xg/n/y5Emf0o/oPsbauVvpACzunDp1yjxqO3YAQGSg/AcA2CnSBt/a8r8uSXqDuUaNGoV2Sfr9999NP+SEhASv09OAWANjdfPNN591AK1PP/003/+1abf2g9bHSy65xKs8RHRgrFX8eZvUuWKts7Z1Z+LEiTJp0qQip33N9YPkhsFDi7w9ACAyyv8hgwfLLbfcUuTtAQDRzdPzjNW8WWdYCJQjec6R7gbWsrok5ebmyokTJ6R8+fJep6dNqHVAzLM1o9YBt7SWuG3btqZmWW+A62BeOjCYTiulUzIuXbpUWrVq5XEeIjowbtSokflgtm3b5nYbnabD2rYw+/btM9N6FFXni3t6kFMAQKSU/7179fIgpwCASODI9X4QLU/PM9Y+odIlyR/dkqxBt7R5dmEDZ1rbWYoXL+5sUt2pUyfzOT744IMyb948j/MQ0YHxhRdeKO+//77bIcG13bx1Z0K3LUzVqlU9uvNQqXIVD3MLAIiE8l+nogAAoKg8Pc9Y+7gzatQoee211zzOR5cuXWThwoUed0nytVtSamqqc6Avbwfd0vSffvpp6d27tyxYsMAMrHm2FmFRFRhfeeWVcuedd8rWrVvNB9StW7cCzRZUy5YtpUGDBoUeS5sqeNJcYdfhVC9zDQAI5/I/k8G8ACDq+NLH2NPzzNmULFnSq2bNpUuXdj63gkoNMLX/r6vm1FZza+0P7MvUShoUp6Wlmdpfa6Rrb3To0MHZtFv7Pbdu3dqj/SN6VOrKlSs7f2TDhg3LN1fll19+KePHjzfPx4wZE7Q8AgD8j/IfABCt0zWNGzdODh065PHyxRdfOI9hdTPSGmMdobqwLkl169b1aeAtq3l0v379JCUlxevj5G32nZ2d7fH+YVNjrF+I3tk/s9279iGrUKGC83VtU66LRS9+Vq5cKT/99JM0bdpUzjvvPFNdb32R9913n6lZAACEJsp/AADsVatWLalWrZr8+eefsnjxYjMl0pn0ddW+fXuv09FuTYsWLfLL3MXr1693Pnc3knZE1Bjn5OTI4cOHnYtWt1t3A/K+np6eXqC9ubaVf+655+Tcc881Q4/rHRFtQ6/V9i+++GKQ3hEAoCgo/wEAoc6Rk+v1EqquueYa8+hqxGxtYj19+nTz3Jfmz9bcxXXq1DEjTvvi+eefN496zq9evXrk1hjrh6UfmrfV6g899JBZAADhhfIfAAD7PfDAA/LOO++YGl2dDkm7H8XFxcnx48fl+uuvN4/aouuKK64osK+OEP3HH3+YgcB0cUXP7R9++KF5PmjQILfTQln0hraOhK1p5+1DrTfHH330UecAXk899ZRX7zdsaowBAAAAIFQH3/J2CVU1a9Y0gWt8fLyMHTvWNK1u06aNqY2dM2eOGc9j2rRpLgNaDYp37dolx44dO+vcxbq/BsZno8e8++67pWLFilKvXj254IILpFmzZmY2CK3V1kHAtNbYqumO2BpjAAAAAIB9+vXrZ4LQZ5991tQc//rrryZAHjJkiIwePVoqVark86BbF110kRnA62wGDBhgHn/++WfZvXu3rF271tRga/60m9Ttt98uLVq08Do/BMYAAAAA4INAjC4dKlq1auXsT1xUO3fuLFJgbAXHRXHhhReaJVBoSg0AAAAAiGrUGAMAAACAD3K9HCQSoYPAGAAAAAB8kENgHPZoSg0AAAAAiGrUGAMAAACADyJ47K2oQY0xAAAAACCqUWMMAAAAAD6gj3H4o8YYAAAAABDVqDEGAAAAAB/Qxzj8ERgHSPEEeyrjjyVUtSWd2OxciRTVT++zJZ3s0tVsSSe3RELA09h+opzY4eJ7P7Elne3v3mBLOum5ZQKeRsr+LWKL6ufYk04E+HjDwYCncUUje8r+/7d0my3ptLmgqS3p1N+9MOBpvJPTTOwwotxeW9I5eMfjtqTzj0N1A57GW28/KXbo8+NrtqSzsOMoW9IZVL6kLemEO5pShz+aUgMAAAAAoho1xgAAAADgA5pShz9qjAEAAAAAUY0aYwAAAADwAX2Mwx81xgAAAACAqEaNMQAAAAD4gD7G4Y/AGAAAAAB8QGAc/sKmKfX+/ftlypQpcvfdd0v79u0lOTlZYmJipGvXroXu98svv8grr7wi119/vTRs2NDso8vkyZNtyzsAwHuU/wAAINDCpsb4k08+kXvuucfj/W699VZZu3ZtQPIEAAg8yn8AQKhj8K3wFzaBcalSpaRHjx7Stm1bs6xevVrGjh171v3q1asn55xzjnO/O++8U9atW2dLngEAvqP8BwAAgRY2gfHQoUPNYtm7d2+R9ps5c2a+/yckJPg9bwCAwKH8BwCEOvoYh7+w6WMMAAAAAEBU1xgDAAAAQCiij3H4IzAGAAAAAB/QlDr80ZQaAAAAABDVqDEGAAAAAB/QlDr8ERgX0cSJE2XSpElF3n7gTYNl0NBhAc0TACD0yv+mPftJ1743BDRPAADAvwiMi2jfvn2yatWqIm/f/dJeAc0PACA0y/8arS8KaH4AAKGHPsbhj8C4iKpWrSqtWrUq8vaVK1cJaH4AAKFZ/peuUCmg+QEAAP5HYFxEI0aMMEtRHTyRHtD8AABCs/x/b8XugOYHABB66GMc/giMAQAAAMAHucHOAHzGdE0AAAAAgKgWNoHxnj17pEKFCs7l4YcfNq8vXbo03+vjx4/Pt5/+P+/6tWvXmtfvuuuufK/r8QEAoYfyHwAQDk2pvV0QGsKmKXVOTo4cPny4wOvZ2dn5Xk9Pz9+3V//var/U1FSz5D0+ACD0UP4DAIBAC5vAuE6dOuLw4o7Kk08+aRYAQHii/AcAhDqmawp/YdOUGgAAAACAqK4xBgAAAIBQRF/h8EdgDAAAAAA+oCl1+KMpNQAAAAAgn2PHjsm0adPkgQcekK5du0pKSorExMSYsT/8YfXq1XLddddJlSpVpFixYlKvXj0ZOXKkHDx4sND9srKy5IUXXpDmzZtLiRIlpGzZstKtWzeZOXOmT/mhxhgAAAAAfBCJTakXLlxoAtdA0CB2wIABJsitVKmSNG3aVDZv3iyvv/66TJ8+XZYsWWIC5TNlZmbKJZdcYtbHxcWZ/dLS0kxedXnooYfkueee8ypP1BgDAAAAAPJJTk6Wiy66SO677z6ZOnWq/Pvf/xZ/2Lt3r9x0000mKB49erT5/8qVK81jr169ZN++fSYgdzUjhQa+GhTXrVtXNmzYIGvXrpVt27bJF198IUlJSfL888/Ll19+6VW+CIwBAAAAwMc+xt4uoapnz57yww8/yIsvvmhqd2vVquWX42oz6PT0dBN0P/XUUxIf/3cj5tKlS8vHH39sHlesWCGzZ8/Ot9+BAwfkrbfeMs/fffddady4sXNdnz595MEHHzTPvZ2qkcAYAAAAAGCLGTNmmMfhw4cXWKf9hfv372+ea//mvGbNmiWnT5+Whg0bmj7FZxoxYoR5XLVqlWzfvt3jfBEYAwAAAICPfYy9XaLJnj17TJNppTXGrnTu3Nk8Llu2LN/r1v+t9WeqXr26aWLtat+iIDAGAAAAAB9EYlPqQNiyZYt5TExMlBo1arjcpn79+ubx999/N/2Qz9zXWl/YvjqQl6cYlRoAAAAAgmTixIkyadIkj/bRZshW0+FwcuTIEWeTaZ36yZVy5cqZx9zcXDlx4oSUL18+377W+sL2PXr0qMd5IzAOkIqlinv8B6EjsFWtWjWgP/JQTqd8il2fWTlb0omPoO+mSXKxgKeh9k4dbks6nvI2nWJ2pHNu18CnAY8MbVMrYn7Db1zTzJZ0POV1OjWaBjyNfxY9Nz6lI1LHlnQumLvAlnRO25CGp7xOp81LtqQzyKNUKP8D7S3HTq/31cGitF+sJ/S7DEeZmZnOGmN3dHRpS0ZGhlf75t2vyBwICa1atdKGFOaRdEInDdIJ3TRIJ3TTQHT+tiItnUh6L5GWTiS9l0hMB5576623zPfiyaL7uDNy5EjzXXu6dOnSpdB8fvnll2a72rVre/1ep02bZo5RuXJlt9ts3LjRmadDhw45Xz/33HPNa2+++abbfa+99lqzzZ133ulx3qgxBgAAAIAg0Rp8f9bilyxZ0tn82BOlS5eWQNMm1FZTZ52n2FVzaqvJdGxsrJQqVarAvtb6szXV9hSBMQAAAABEiHHjxpklFDVq1Mg86rRLOkK1q7mRramWdITphISEfPsuXbpUtm3b5vb41r5WOp5gVGoAAAAAQMBpIFytWjXzfPHixS63sV5v3759vtcvvPBC87hkyRKX++k0UDt27Mi3rScIjAEAAAAAtrjmmmvMo6uRuLWJ9fTp083z/v3751t35ZVXmhrkrVu3yoIFC1wOMKdatmwpDRo08DhfBMYAAAAAAL8ZMGCA1KlTR+6///4C6x544AFJTk6WRYsWyRNPPCE5OTnm9ePHj8v1119vHjW4veKKK/LtV7lyZWdf7GHDhuWbq/jLL7+U8ePHm+djxozxKs/0MQYAAAAAFFChQgXn86ysLPOofYPzvj5w4ECZMGFCvv32798vu3btkkOHDhU4Zs2aNeXDDz80+40dO9bU9OprmzZtkrS0NBMAT5s2zeXAXBr8rly5Un766Sdp2rSpnHfeeZKamursW3zfffeZmmVvUGMMAAAAACjg8OHDzuXEiRPmtdzc3Hyvnzx5UjzVr18/+fnnn82j+vXXX6VixYpy5513yrp169w2hdaa5oULF8pzzz0n5557rmzZssUE3126dJEZM2bIiy++KN6ixhgAAAAAUIBOqeQNDV7PplWrVs7+xJ5ITEyUhx56yCz+RI0xAAAAACCqERgDAAAAAKIagTEAAAAAIKrRxzhEDB8+XPbt2ydVq1YlnRBKg3RCNw3SCd00EJ2/rUhLJ5LeS6SlE0nvJRLTAcJVjMPbHtUAAAAAAEQAmlIDAAAAAKIagTEAAAAAIKoRGAMAAAAAohqBcZAtWLBALr/8cqlYsaIkJydLkyZNZPTo0ZKWluaX4+/fv1+mTJkid999t7Rv396kERMTI127dhV/0W7qP/74ozz88MPSqVMnKV++vCQkJJj3dOmll8pHH33k9eTgZ9JJwHXwiDZt2ki1atUkKSlJUlJSzATh+rkdPnxYAuHrr782n5suderU8csxn3zySecx3S1vvfWW+Pt99O3b1/nZValSRTp27CiPP/64ZGdne33cnTt3nvW9WMuQIUN8eg/6HT/66KPSrFkzKVmypJnkvUaNGnLttdfKkiVLxF/0b/C5554zvy39jWlaLVq0kPHjx8vp06dt+RvMysqSF154QZo3by4lSpSQsmXLSrdu3WTmzJl+SeOXX36RV155Ra6//npp2LCh8zuaPHlykd8fQrP8p+wP3bI/GOV/JJT9dpX/4VT2+5IO5T/ggg6+heB4/fXXHTExMXrV4KhRo4ajZcuWjqSkJPP/c845x3H48GGf03jllVfM8c5cunTp4vCXefPm5Tt2vXr1HK1bt3aUK1fO+dpll13myMzM9Dmt5s2bm+Pp51SnTh1HmzZtHLVq1XKmU6lSJceaNWsc/nTy5Ml8adSuXdsvxx0zZowzzx07dnS5/Pe///VLWllZWY4bb7zR+R5q1qzpaNu2rfmuEhMTzWv6Pr21b98+t+9BF/09WGlPmjTJ63S2bNniqFq1qjlObGysyX+LFi0cKSkp5jX9e3r55Zcdvjpw4IDjvPPOc6ajz/W3FxcXZ17T392JEycC+jeYkZHh6NSpk9lW023WrJmjfv36zv0feughn9Ow/p7OXN5///0iflII1fKfsj90y347y/9IKfvtKv/Drez3JR3Kf6AgAuMgWbFihSl0tSCfOHGiIzc317y+d+9e54mkb9++Pqfz7rvvOnr06OF45JFHHDNnznSMHj3a7xdHc+fOddStW9fx2muvmZNKXh9++KHzYu/BBx/0OS09sf7www+O06dP53t93bp1zpPZueee6/Cnu+66yxz3yiuvDEhgPGjQIEeg3XLLLSYtvSBatWpVvnVpaWmOL774osBn6k+TJ0826ScnJzuOHz/u9XG6d+9ujtOwYUPHhg0b8l1I3HfffWZdfHy8uYDyRc+ePc2xmjRp4ti6davz9Z07dzovJm666aaA/g3efffdZjv929q0aZPzdf2urL+pWbNm+ZTG1Vdf7RgwYIDjpZdecixatMhcgHFhFBnlP2V/6Jb9dpb/kVL221X+h1vZ70s6lP9AQQTGQWKdaG+++eYC67RQ14smXb927Vq/pjthwgS/Xxzpya6wE+vTTz9t0tRahJycHEeg/Pzzz847nhs3bvTLMX/66SfzXej3pSeLcAyM58+fb9LRWpai3un2t65du5o83HDDDV4fQ/Nu1bC5qknR4KJBgwZmvf7OvaUX2tbvaOnSpQXWa62UFdT89ttvAfkb3L9/v7M2R7+/M1kXPq1atfI6DVesoIwLo8gr/yn7Q6fst6v8j5Sy367yPxLK/qKm4wrlP+Bw0Mc4CFJTU2XOnDnmufaZOpP29ejevbuzX1WoK1WqlOlX5k7v3r3N45EjR+TgwYMBy8c555zjfJ6enu7z8bSPz6233irFixeXN954Q8LVSy+9ZB7vu+8+01/KbtoH7YcffjDPBw8e7PVxTp065eyvWL9+/QLrtW+U9bp+d96y+qlVr15dOnToUGC99vnSvqCal08//VQCYdasWaYvm5YF2q/sTCNGjDCPq1atku3btwckDwiMSCr/KftDW6SU/XaV/5T9AAiMg2D16tWmkNcBMNq1a+dym86dO5vHZcuWSbjLyMhwPtdBIQLFOqnpQBmNGzf2+XjPPvusrF+/XsaOHWsG9wiUtWvXmsEv9GL4yiuvNAPJbNiwwS/HzszMlO+++84879Gjh2zcuFFGjRplBsa54oor5IknnpBdu3ZJIH3wwQfmQqJWrVrOC35vVKhQwfk96IA/rgZMWbNmjXnu7u+qKPQi3ro4cqewfPiD9XdvlQNn0rzVrVs337YID9FU/lP2B6/8j6Sy367yn7IfAIFxEGzZssU86snC3d12687n5s2bJdxNnTrVebdVaxj8KTc3V/78808ziqJ1R1pHk9QLJF/89ttv8swzz5hRKe+66y4JJD2Z62ekI9Tq3eJx48bJ+eefL/fcc4/k5OT4fNFl3T1fvHixtGzZUl577TWZO3euzJ4921z46YWk9R35m14U6cWRuvnmmyU21rciR79brRl44IEH5J133jGjcWoN0fLly6VPnz5y4MABufHGG81oq94qU6aMedy7d6/bbf744w/zuGnTJglkGeGqZiQSy4hoEk3lP2V/8Mr/SCv77Sj/KfsBEBgHgXVXsly5cm63sdYdPXpUwtnKlSudU07olB7+8uqrr5oTZFxcnLmDqtNA6FQa33zzjdxxxx0+n9C1GZ1eVEycONGkEQg6bcZTTz0lP//8s2lmqHf4161bJ7fddpvJg77HRx55xKc09u3b53yun4teHOlFhNZYbd261Uxxoc8HDRpkarL8TZvR7dixwy9N6dQNN9xgLh616aR+R1WrVjVTWVxwwQXmgvbNN9+UDz/80Kc02rZt67w4cnVH/tdff3VekATq7zOayohoEy3fLWV/cMv/SCv77Sj/KfsBEBgHgZ4Alc6/5442szuzKVq40bu3Om+izpF49dVXy4ABA/x2bL0g0rvCekLUk6NeKOmddz0pHjt2zKdj68l16dKlcuedd5o5MwNF+xdqszlt9qXNxPQ715oCTf/555832+gcg9pPy5f+jBbtL6cXj3ry199egwYNTG2Bzs+oF4JPP/20+Js1H6I2CyvsDrgntm3bJn/99ZepgdALYp3PUt+bXghqer42Q9Tvw7pA0gs6bVJp0QtKvTizanL80Z8xmsuIaBQN3y1lf/DL/0gs+wNd/lP2AyAwDoJixYqZx8Imitc7uYHulxVIx48fNwOv7N69W1q3bu33CeP79+9v+pXpXV1tTqcXRnqhpCd7HbDC2yZoeqdY79LrxZc2aQsWHSxFaxT0wlLvkPv6W7NO9GXLls23Xi8utMme0v5o2jzRX/TCbMaMGc60/UFrPjS/eiGpNQRaI6FNBg8dOmSa12nti140+9p37qOPPjKfv9YO6IWXXkhqs0MdeEUvkLS5ngrUgDbRUEZEq0j/bin7Q6P8j7Sy367yn7IfiG4ExkFgnaCsJjOuWOvOPJmFAz0p9urVyzTPatq0qXz77bd+7192Jj2BffXVV+aEqRdKn3zyiVfH0T5lJ06ckNdffz0oo3hatAmfXuwpPRl7K+/vJ+/IrXlZr588eVIOHz4s/qIXRjogit7N14tZX2kzQ61N0X6ZOlpvo0aNnOv0AmH8+PFy8cUXm+9PB8/xhY4Iqr/fe++919R2aL8yvfjS2q8VK1bIeeedZ7arUqWKBEKklxHRLJK/W8r+0Cn/I6nst7P8p+wHohuBcRBYBbreUXc3rYA1DH/ewj8caPOiyy67zNzN1xPMvHnzpHz58rakrRczXbp0cfZv84ZOgaBuv/12c+LLu4wcOdKs27Nnj/O1QI1MmbcpldYaeEvvcp95vMJqFvxZa2DVFPXr188vF5paS6R97/R3Vbt2bZfb6IirSi9gfFWpUiUz3YlemGrzNr1w1As+veC3mutZze78zfq712aD7oRrGRHtIrX8p+wPrfI/ksp+u8t/yn4gehEYB4EOgqEnKm0Oo4NhuKKjSKr27dtLuNATiI4MuWjRInPi+v777wN2V9Ud6yLCl2DS6iN35qJ3oq0LCOu1wpo7+crq3+TLdCHaLNC6iPj9998LPcnqRZK/LmS1iZv+DvzZlE5rNTztpxUIGsxY89Dq9CqBcOGFF+abhsZVs09rYBtrW4SHSCz/KftDr/yPpLI/VMp/yn4g8hEYB4HeQe3Zs6d5PmnSpALr9S7l/PnznXdcw4GeMK655hpzQaQnZM1/zZo1bc2DNi9auHCh8+LTGzrQid6VdrW8//77Zhu92LBe69q1qwSCNg207kxbd8G9dd111zn7Trm6aHzvvffMo9a4xMfHiz/nr9TBUfz1GVl3x/Xvw10fMmveTn/MZeqO1iToKLL16tUzwUAg6EWXNhnU96rTuJxJR8y1fufaBw7hI9LKf8r+0C3/I6XsD5Xyn7IfiAIOBMXy5csdMTExZpk4caIjNzfXvP7nn386Wrdu7dCv5qqrrvJ7uhMmTDDH7tKli9+OmZ2d7ejXr585bpUqVRybN292BMLChQsdY8eOdezYsaPAupUrVzratGlj8lC9enXHyZMn/Z7++++/b45fu3Ztn4+1fv16x/Dhwx1r1qzJ93pOTo7j448/dpQqVcqkdfnll/uc1l9//eUoXbq0Od6IESMcGRkZ5nX9zb322mvmdf0dLliwwOe0rOPWrVvXHHfMmDEOf0lNTXVUqlTJHLdt27b5fmfp6emOBx54wKzT5b///a9PaS1evNgxZ84c89vOm8YzzzzjiI2NdcTFxTnmz58f0L/BO++802ynn+WmTZucr8+aNcuRlJRU6Pv09u/cKnv0t47IKv8p+0Oj7Lez/I+Ust/O8j/cy35P0jkT5T/w951PBMkrr7xiTkpaENWsWdPRsmVLZ6HXuHFjx8GDB31OY/fu3Y7y5cs7lxIlSpjjx8fH53v9+eef9zoNPZFbJ6Q6deo4Onbs6HZZtWqV1+l8/vnnznT0IkwL8Xbt2jmqVq3qfF0vjFavXu0IBH9eHGkerTyXK1fOfPd6si9btqzz9c6dOzuOHj3ql7zPnTvXkZycbI6rF0qalvW56W/whRdecPiLXmRZx/39998d/qTvw/oN60WKXjg0a9bMUbx4cefndscdd/jlb1OPpcdt2rSp+X6sNPRx2rRpAf8b1Iux9u3bm231Yqx58+aO+vXrO9/nfffd53Ma+v+863V73a9kyZL5XtfjI7zKf8r+0A2M7Sz/I6Xst6v8D7ey35d0KP+BggiMg2zevHmO3r17m5OjXhQ1atTI8eijj/rtrrfeYbcK08IWX+7uWhcNRVl8uTN94MABx8svv+zo06ePOUmkpKQ4EhISzF3kbt26mXUnTpxwBIo/L470gmfcuHGOyy67zFGvXj3ne6lcubL5PUyZMiXfHWt/2LJli2Pw4MGOGjVqmLQqVKhgPkutjfGnQYMG+b1mKq/t27ebO+pNmjQxF3z6XvRC78orr3TMnj3bbxeuQ4YMMQGKfjeajv5tjhw50rFz507b/gZPnTrleO655xznn3++yYNe2OrnOmPGDL+kof8vyn6uauoQ2uU/ZX/oBsZ2l/+RUvbbUf6HW9nvSzqU/0BBMfpPsJtzAwAAAAAQLAy+BQAAAACIagTGAAAAAICoRmAMAAAAAIhqBMYAAAAAgKhGYAwAAAAAiGoExgAAAACAqEZgDAAAAACIagTGAAAAAICoRmAMAAAAAIhqBMYAAAAAgKhGYAygyHbu3CkxMTFm0ecAgMhH2Q8gGhAYA37y5JNPOi8cPLnImDx5si35AwD4H2U/AEQGAmMAAAAAQFQjMAYAAAAARDUCYwAAAABAVCMwBkLU+vXrZfjw4dKwYUMpXry4lCxZUpo1ayaPPfaYHDp0yOU+WVlZMmvWLLNfmzZtpGrVqpKYmCiVKlWSnj17ytSpU8XhcBSa7t69e2XEiBFSs2ZNSUpKkho1asiQIUNk27ZtAXqnAAALZT8ABIkDgF+MGTNGrzrMcjY7duxwbvv+++8XWP/88887YmNjndsUL17ckZiY6Px/1apVHatWrSqw34IFC5zb6FKqVClHSkpKvtf69+/vyMnJcZmvlStXOsqWLevcNjk52VGyZEnnsT799FPnOn0PABDtKPsBIDJQYwyEmHfffVceeughU1Pw9NNPy759+yQtLU3S09NlxYoV0r17d/Nanz59JDU1Nd++uo/e8Z87d64cP37cLCdOnJDDhw/La6+9JqVKlZLp06fLG2+8USDdkydPytVXXy1Hjx6VWrVqyXfffWfS1dd//PFHU4ugxwYA+B9lPwAEWbAjcyASaw0qV65c6FKhQgWXtQYnTpxwlClTxrw+Z84cl+lkZWU5WrdubbZ55ZVXPMrj9OnTzX7169d3WVOh67R2YuPGjQXW79u3L1+NArUGAEDZDwCRghpjIAAOHDhQ6OKun9hnn30mx44dk5YtW5p+Ya7Ex8fLwIEDzfNvv/3Wo3xddtll5nH79u2yf//+fOs++eQT89i/f38555xzCuxbpUoVue222zxKDwCiCWU/AISv+GBnAIhEZxvkZOfOnVK3bt0Cry9dutQ8/vbbb+ZixJ2MjAzzuGvXrgLrtPnbW2+9JbNnzzbH0YstHZjlTH/88YczjdOnT8uvv/5qnmtzPXd03bPPPlvoewOAaEXZDwDhi8AYCCF//vmneczMzDTL2Wjfs7y2bNkiF198sbnwydv3rEyZMhIb+3cDEa21UNqHzHLkyBHJzs42z6tXr+42PR2lFADgX5T9ABB8NKUGQkhOTo55vO6660zNw9kWrX3IS6fW0AujOnXqmIFWdOAVvQj666+/TPM5nY6jqDUbAAB7UPYDQPBRYwyEEKt5m6tmcmezZ88eM4Ko0jkrL7zwwgLbnNm3zFKuXDmJi4szF2d5L6DOVNg6AIB3KPsBIPioMQZCSMeOHc3jypUrzbQcnl4cWXQAF1fmzZvn8vXExERp1qyZeb5gwQK3acyfP9+jPAEAzo6yHwCCj8AYCCE6Kqj2CdMBU+69995Cm7zl5uaawVUspUuXdj5fu3aty4FZxo0b5/Z42oRPaTO8zZs3F1ivTfJ0YBcAgH9R9gNA8BEYAyFEL4xeffVV5xQaOsXGzz//bC6ElD7qaKMvvfSSNG3a1Iw+atFpNmrVqmWeDx061NQ8WH766Sfp2rWrHD161G3a//znP80AK6dOnZJevXrJ999/77w40zz06NHDmQ8AgP9Q9gNA8NHHGAgxgwYNMlNyjBw5Ur755huzJCUlScmSJeXEiRP5pt+IiYlxPteRR//973/L1VdfLRs2bJA2bdqYUUmtEUxLlCghX3zxhbnIcaVUqVLy+eefyyWXXGIGdtHtdH89bmpqqqSkpMg777zjrF0AAPgPZT8ABBc1xkAIuu2220yTtvvvv1+aN29uLo606ZxeIOlFz1133SVz586VgQMH5tvv8ssvl0WLFpnaBq2B0Gk4KlSoYEYs1VoEnc6jMHrsdevWyS233GKm7tD9tZmeXrCtWrVK2rVrF+B3DgDRi7IfAIInxsG4/QAAAACAKEaNMQAAAAAgqhEYAwAAAACiGoExAAAAACCqERgDAAAAAKIagTEAAAAAIKoRGAMAAAAAohqBMQAAAAAgqhEYAwAAAACiGoExAAAAACCqERgDAAAAAKIagTEAAAAAIKoRGAMAAAAAohqBMQAAAAAgqhEYAwAAAAAkmv1/tiyGWAYuweQAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAB64AAAI2CAYAAADgnaZqAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAvY1JREFUeJzs3QeYU1XawPE3mQ4z9DJD76CsAoOgNEVEigURsWChFxWl6FpwRVbBLljYdZ0RFeFbcaVYURAEBFREuoBUaeJQBByZXpLvOcdNdkoyk8xk5iSZ/+957pNMcu89J8kk9577nvMei91utwsAAAAAAAAAAAAAAIZYTRUMAAAAAAAAAAAAAIBC4BoAAAAAAAAAAAAAYBSBawAAAAAAAAAAAACAUQSuAQAAAAAAAAAAAABGEbgGAAAAAAAAAAAAABhF4BoAAAAAAAAAAAAAYBSBawAAAAAAAAAAAACAUQSuAQAAAAAAAAAAAABGEbgGAAAAAAAAAAAAABhF4BpBYe7cuWKxWKRJkyamqxJU/v73v+v3tWfPnqarAgBAQFLHUbWsWbPGdFUAwC/RlisbtOUAACg92nMAUP4IXMNvGtSulkqVKknLli1l2LBh8u2335ZZHQ4fPuy2Dq4WdXEF5qiTRW8+r4KL+rzzXiQruERHR0u9evXkkksukdGjR8u7774rqampXtXx0KFDMm3aNOnRo4feV0REhMTExOj/51tvvVX+/e9/S1paWhm9QwB8JTc3Vz744AMZOnSotGrVSqpVqybh4eFSp04d6d69u0yZMkV27txpupp+Yfjw4R7/DhOcME8FMkp6HFWftYP6LAs+HxYWJjVr1pTmzZvLddddJ0888YRs27bNq/rx3QMCA205+FtbrqhtXDl16pQ899xzcvXVV0uDBg0kKipKKleurI9vAwcOlMTERPn9998Lbafacl988YXMmDFDBg0aJI0bN3aWp74XAPwD55Seoz0XWMqyPVfcNq6oY+LYsWOlbdu2UqNGDWebsHPnzjJp0iT5/vvvXW63Z88eefvtt2X8+PHSpUsXff7oKBNAxRVqugJAXnXr1nXet9lscvbsWTlw4IBe5s2bpwOBrhqBVatWldatW0v9+vVLXYcqVaroxmpRinseZUs1MvL+r+Sl/meys7P1CZI6UXIlJCSk0GO1atVyPp6VlSUnT56UpKQk2bx5s7z11lty3333yUMPPSSPPfaYhIa6/+lUZav1/vnPf0pOTk6+/1H1nOP/WTWcYmNjZc6cOXLttdeW4F0AUNY2bNigL7bv27fP+Zj6bVGdUM6cOSPffPONXtTFTnXBcsGCBfr3qaKzWq1Su3btItcp7nmUPXWMdHUsVcfAc+fO6fvVq1d3+T+tjmkFRUZGOh+32+3yxx9/6GPyzz//LEuXLpXp06fLZZddJgkJCXLxxRcXWTe+e0Bgoi0Hf2jLebqNOlY9++yz8vTTT+frUKw6MauL5UeOHNHLxx9/LA8//LDMmjVLRo4c6Vxv48aNcs011xT7egGYwzllydCeCwxl2Z5zxd1z6vt1xx13yKZNm/Idd9X6ycnJ8sMPP+jl1VdflSuvvFJfD1XHbYe7775bvv76aw9eMYAKxQ4YNm3aNLv6V3T175iTk2Nfv369vWPHjs51vvnmG5/X4dChQ879v/POOz7ff6B/NldccYU9UKi6elpn9Vk7Pnf1P5CXzWaz79mzx/7GG2/YL7roIud6PXv2tGdkZLjcX2Zmpn7esW6/fv3sX3zxhT01NdW5zunTp+3vvfeevUePHnqdiRMn+uBVA/C1Tz75xB4REaG/pzVr1rQ/++yz9n379uU7Pv3www/2Rx991F6lShW93rlz5+wV2bBhw/T70LhxY9NV8SuOY8Lq1avtgUDV05s6q89bras+/4KSk5Ptq1atso8ZM8b5fQoPD7d/+umnbvfHdw8ILLTl/FdFbssVR7X17rjjDuf2l156qX3x4sX5jifqGPbhhx/ar7/+er3ODTfckG8f6hhZvXp1+1VXXWV/6KGH7AsWLLDHxsbqddV7D8Aszim9R3vOtYrcnivOxo0b7dWqVdPbV65c2T5lyhT79u3b9XFWyc3Nte/atcv+9NNP2+vWravX27p1a759qOPohRdeaL/zzjvts2bNsj/wwANuzy0BVByMuIZfUz20unXrJh999JE0bNhQP6Z6PHft2tV01RDkVC97NfJDLSpduKOXvUptN2HCBD1irKCJEyc657xR606ePLnQOqpX4ZAhQ/SyePFiUlIBfmj//v1y5513SmZmplx44YWyfPlynTqy4PFJTSegFpVlIe8IHAD/G/moetWrRWUuUWnDjx07Jrfddpts3bpVpxDOi+8eEFxoy8FfvfDCC3rqJkWlL1Vtt4IpSdUxTKUKV8vatWv1CLG81JRQaoR4Xo8++mg51B5AcTinBMqeylqgMhWo6TTUFIlffvmlThNecPS++g6qRR1v1XXSgsdb9f3MmxmFKV0AKMxxjYCgTjDVvBhKSkpKoecd81uZmmPFMbebml9E+eqrr3T6Z5U6R6VaueCCC+TJJ5+UjIyMYg/6Tz31lFx66aU65YvaVr2mPn36yL/+9S+dYsWVJUuW6IvBKkWMI/Wa+vvDDz8stu5qDhI1n5ea50elRWvXrp1uyKsUbZ5Q84Wpkw91cqK2V3ORtGnTRgdxjx496nKbgp/X6tWr9QWBuLg4fbJS3Lwp5U3VaebMmc6U3ip1eN5UU8ru3budwWzV4HEVtC7opptukqlTp5ZRrQGU1OOPP67THKvfYPU7WvAiR0Hq91pdlM+bOsvb37mDBw/KPffcowN5KoWpulgaHx+vjwmqLu788ssv+vdG/Qar+RgjIiJ0o7Fjx476cZWSqyCVNkzNOaz2r8pRxw01dYFK36zSdKljmAmOeaxUB6Dz58/rz0EdT9T7oc4B1HHN3bxYeakGswqMqrkm1bbq81Gv7f7775fvvvvO5TYnTpzQF6wc76Na1H3VaUlNHVEU9X6qbdV8zup/Rn3GN998s55qwhMqna66eK7SjTqO4+r8QR37VbrCPzv5F+aYi0z9r6lzI/WZXnTRRTr1YXHzeZqgPoNFixbpuqWmpur/7bL47gHwP7TlaMv5k99++01PX6FcddVVLoPWBV1++eXy2muv5XusqNTkAMyiPUd7jvZc2VPnG+r/V1Gvs2DQuiB1jqGum6rXmBfHUwAumR7yDRSVXs7hl19+ca7z6quvuk0TVtJ0NqVNL5c3DdsLL7xgt1gselHpUtStY99XXnmlTkfkyvLly3WqMce6oaGhOp1RWFiY8zGVqqxgaupbb73V+bzVatX7ULeOx4YMGWLPysoqst6ORdVXlavuX3755TrFS1Gp2v7v//7PmXpJLep+VFSU8++YmBj9uor6vF555RXne1S1alX9ekuSnqY80supNFKO9adOnZrvuXvvvVc/HhISYj98+HCJ6w/ArBMnTjh/Q0eNGlXi/XjzO/ef//wn32+p+u3M+3fDhg3tu3fvLlTGtm3b8h031O+P+jvvcafg7+mxY8fsjRo1KnTcUNs6HitpStHSppZzlK+mU2jRooW+HxkZaa9UqZLzOZVm2tVxRVHTMtx88835jmvqvVTvuePvdu3aFdpuzZo1zvRmjhRnanH8rd6fdevWuSxTHTscqdUc9XOkGlT3P/744yLTtJ05c0Yfb/PWOW991TJgwAB9vC/IUe5LL71kb9WqlbNMx2vxNm1qeaWWu/baa53nDGlpaT7/7gEoX7Tl/kRbzv/bcg7qM3Zs5+74XlKOYyKpwgFzaM/RnqM9V/apwrOzs52vU6X69qW8x3cAFRcjruHXcnNzdW+6G2+8Uf9dp04dGTp0qPir7du36/Rgajl16pTutadSpqhec44emu+++26h7VS6zBtuuEGvr3qoff7555KWlqZ7g6enp8umTZvkwQcf1L3u8nrsscfkP//5j+6Jp0buql7+Kl2Z2k495+j15mpU7yeffKJHDiiqJ6HqUa/KVz1B//nPf8qGDRv0yAB3VqxYoT8L9RmpHoyHDh3SdVWjqPbs2aP3qXpYOvbtiur1qF7XsGHD9DrqvVL78NdRyCqFlPofVL7++ut8zzl6tHbo0EH3CgUQmNTvtOoxrTiOPaVR3O/cli1bnGnsVDrVHTt26N9hdQxQv9Oqt7dKrXz99dcXGqWm9qt+t1VPe3WsVKOr1DFAjQhTWSFeeumlQr2e1agyVQ/Vu3vlypWSlZWlt1Hlqx7d6nf/sssuE5PGjx+ve6mvWrVKH1PU6964caOeukHVd+zYsc7PKK8RI0bIwoULdTqyRx55RL9v6r1U7/np06d1L/guXbrk20ato0ZOqHVU+rL169fr8tSi0oKqMtV7rI7Rx48fz7etOv6pY9yRI0ekevXqOoWoqq8aUbdr1y494k597u6o7VVqNVVO+/bt5dNPP9Xbq7qo8tX5gjrmqP8D9XrcUZ+pep1qNInaTtVXvS7H8crfOLKXqP85da5RVt89AObRlqMt548c7TY1Gq579+6mqwPAx2jP0Z6jPVf21LmNI5MMbTcAZcJ05BzI21O8bt26zqV27drOHoOqt9sdd9zhdiSrL3vpq7Ly1sPVUtRrcNe7etCgQfr53r17F3que/fu+rmWLVvaf//9d4/qrEYuOHrUq970rjzwwAP6edUb9Ndff8333IUXXujsiZmbm1to2zfeeMNtb021vqqrei4hIcFtHVWvQrXOxIkT3faeU++LL5V1L/2rr75ar1+/fv18PQ0dPWLHjBlTqvoDMOvxxx93/i4cP368xPvx9HeuX79+eh3VI131MC9oy5Ytzt/6F198Md9zjlFR3377rcf1uuCCC5y94H3N0UNf9fov7jha8LUojvdLHf9PnjxZ6PkdO3Y411m/fn2+51auXOl87vXXX/e4znfffbezF35SUlKh59WIBkeP+/Hjx+d7To2scJSpyi9IfZ7Nmzd329t93rx5+vE2bdq4PfZv2rRJH19Uz/uC74mjZ7w6V1L/J75S1j30v/nmG+f+33zzTZ9/9wCUL9pytOX8oS1Xq1Ytt5/3F198kW+7Bg0a6G1Uu87XGHENmEd7ruRoz1Xs9pwaHe/u8y44qnrOnDnOMlT7zpcYcQ1AYcQ1/IrqyehYVG861XtNUT0VVU+u4uYl8QXVyy1vPVwt7qi5aP7617+6fE717lNU78u89u/fr3sEKs8884zH8zQuXrxYcnJy9PwralSAK2o+GVUn1WtTzSvpoOqg5mR2rKN6MxY0ZswYqV+/vsv9qt6Eqt61atWS0aNHu62jY0TF8uXL3a4zZcoUCSRqbh1F9Wh1UPcdc9Y4ngcQmNRoJwdffZ/d/c6pntiO30c1p5aa86kglcVB9eJ2jLrKS81nqSQlJXlcl5Js4y3Ve76446irOU4dVA98V73L1VxYTZs2dXksffvtt/XtX/7yFz23nCfU77bqVa+oueDUvHAFqfnw1HPK+++/n+85x99qZIWaI7Mg9XmqUWzuvPXWW/pW1dfdsV/NbadGWaiRCWr0iCv9+vXT/yeBIu/3Ku+xtCy+ewDKF2052nKmqFHq7j7vgnOTO443HGuA4ER7rvRoz1XM9pw6Xrr7vNVxNi/abgDKGoFr+BV10pF3Uel3VOo1lZrls88+k8svv1w++uijMq3DO++8U6geBRd31MlIdHS0y+fq1atX6CKt8u233+rbkJAQ6d+/v1dpWZROnTpJlSpVXK6jUt2o9NZ51897PzQ0VHr06OFyW3UBpGfPni6f++abb/StugClXpc6OXS1qAsmikq740pUVJROiQQAwaqo3zmVVs5xTOndu7fbfVx99dXOxr26eO1w3XXX6Vt1jFRp5tQUBio4UBTHNuoiubqgsGzZMn2R35fUdAnFHUdVOjR3VEo2d4o7ljpenydUWlTHfjx5/1XjXG1T8Fjaq1cvt9u6e04FcxxpstV74e44qpa9e/cWeSxVF1oAwB/QlqMtZ4o6Prv7vFUKWQAoKdpztOcqSntO/R+6+7y3bdtmunoAKhgC1/Brqge6midkzpw5es4MNWfL8OHDPT4pU/PBuDtxUPOF+FrBecvyUhcWFNWzPq8TJ07oW9XjvXLlyh6XpeZdU9z1pM/buzDv+nnvqzJVL/7iti3o119/1bfqhLuoHphqXhZFXbRypWbNmi5HCPgzx0mxqnve3oVqbrq8zwMITHm/2774Phf1O5f3d7mo33LHb7E6fuSt0wsvvCBXXnml7u0+a9YsfYFaXfxWF7mnTZtWaA4vx0iAW265Rf9+v/nmm/oiu+q1r3q/q+ccjeq83B1HJ06cKGXBk2Np3gs+eY+l6iKLp7x9/wtu48lx2N1x1DEPnaKOlUUdSx2v1d1FLH+e+8yVvP/Deb9vvv7uATCPtpx7tOXMcRxvONYAwYn2HO25gmjP+R5tNwBlLfBaGaiwHD2+Vc/wzz//3KNt1MmfuxMHR+o60xwBz0DieO9UL8riemEWNbJBjUwINI50Rs2bN8934t2qVSt9X40qARC41GgrB198n8vyd05doFi1apWsW7dOpzBTPbXV79HmzZvlqaeekpYtWxZKRxcWFib/+c9/dI/pJ554QvcgVynQdu7cqQME6vXPnDkz3zbujqPqeOwvAu1Ymvcc5IsvvvDoOOpuVEOgHUu3b9/uvJ/3WOrr7x4A/0Jbzn9U5LZc3uMNo8eA4ER7jvZceajI7TmFthuAskbgGgEjb6+7vKldiqJOCtydMDRp0kT8gWMOFjVfSGpqqsfbOXrk/fLLL0Wu53g+bw8+x31VpppnxR1XvTvz1tldmptgpdIIOebFK5h6zzEfjjphq2jvCxBMVI93R4/6Dz/8sEzLyvu7XNRvueM5dRHD1fxR3bt3l+eff17PsanmWfv44491j3s1QmrkyJEu5/Ns166dPPnkk/LVV1/pbVauXKlTuKoGuOqpnze46O44OnfuXPEXJTkuefv+F9zGcd/dsbKo51QPdcdog4p2zFi6dKm+VaMEL7vsMiPfPQDlj7ZcfrTlzHG029Q87I75yQEED9pztOcKoj3neyorgGNeb9puAMoCgWsEjLwnGt6kYfN3Xbt21bfq5FL10vNU3vnO3PWSVCeveedPK7itSlOkena6YrPZZM2aNUXOv6JS+eSdby3YqUaBozfknXfeme+5e++9V/cQVZ+j6hnrKfU+A/AfdevWlZtuuknff++992Tfvn0eb1vUvJmuqLnSHBdV1AUHd9RFCMfFCdXDvri0rAMGDJAlS5bovzMyMoq9KKsa3OoirgooqmCieh2OMgPtWPrpp596vE3Tpk2dF448ef/VxQm1TcFj6erVq91uq0ZQuKI+x86dO3td50C3ceNG50jL2267Tf+/mvjuASh/tOXyoy1nzogRI/ToxLydIzxBuw0IDLTnaM8VRHvO99T/nJpj3fHer1271uNtOZ4C8ASBawQMdcJZ8OQiGLRo0UL3ilQee+wxj+d8Uyfi6kRBncSqnpmuPPPMM3rOFXVC5ThxVy6++GK54IIL9P2nn37a5UnD22+/7bbHourBquqtTJ48ucie/sEw34m6EPXggw/KZ5995kx16Hj9edPkOFIgqvfulVdeKXa/H330kcyYMaOMag2gpNT3Mjo6WvdwHzRoUJE9sB1zWqnfWG9TranUcH379tX3X3zxRZdzXqme8osXL9b3hwwZ4nxcXawuqsEXFRXlvJ93TjbHPFyuqIscjjRlgTZn5ahRo/Ttrl275F//+pdH26jORrfeequ+n5CQ4JxXreA8oOq5gu+/4thWXUhyFRxQ/z/qc3XH0dBXgdzi0uYG+nFU+fHHH2Xw4MH6QpoKWk2dOtXYdw9A+aMtlx9tOXPU3OCPP/6482K7aucVF6z65ptvymwuWAC+R3uO9pwD7bmyo9Lb16tXz/neqs+uKOr9VIN+VLsQAIoTWEcxVEjqxEM1LN999139t0or2aVLFwkmr776qu5VuX//ft0DftmyZZKdne0Mmv7www9y99135+sxWb9+fWfj+bnnnpNp06bpXvmKulUXhB0nWA888IDExcXlK1Nd5HD0LLz99tudFzbUxZM33nhD7rvvPn0S7oq6yKLWUbfqBE9drFGNfkedlZ9//lmvo0YHvP766xJo1MUL9XkkJibqXrSzZs3Sj6terOrzcuW1116THj16OC8CXXPNNbJ8+XJ9cpb3ZHXhwoV6HqIbb7wxKE9egUCn5qyfP3++hIeH68ZX+/bt9UXlAwcOONdRv81qagA1r1izZs2cPeJLclFFXZBW+1YXPRyNOHURQzV+1e+Iuqih5gIeN26cczv1m63mPFPbq3qodRx27NjhzAqhAoRXXHFFvlStU6ZMkQ0bNuS76KHKv+OOO/TFFnWRw3EBJlCoi/BqBK+ijl/qNea9YK/Sqc6ZM8d5QcRBBRnUsU79Fvfu3Vu+/fbbfBep1WPqmKp68j/66KP5tlUXt9TxwXFfXZByzHX2008/Sf/+/XUaUnfUZ6T2r4436nigPkt1YcVBpZxVx+jx48fr/7FAdP78eX0RSP3vqhEJx44d098rdRzMO7+1ie8egPJBW462nD9Sx3RHwOLll1/Wn5tKdZq344E6hqmOyyropdp46hjmKtilzjEciyMIpc6n8j6u5msHUH5oz9GeU2jPlX1HMPWeValSRb/uSy+9VH8ear51R4cwdbtnzx554YUX9HdAdUoo2FlM/R+7O2bmfTzvcRZABWAHDJs2bZo6Yumlbt26+ZaqVas6n1PLRRddZD9+/Hihfbzzzjv6+caNG5eoDocOHXKWUaVKlUL1KLhMmDDB5Wu44oor3JaxevVqZxmuLF++PN/rDQsLs9esWVPfOh778MMP822TmZlpv+WWW5zPW61We/Xq1fWt47EhQ4bYs7KyXJb5t7/9Ld/7q7YNDQ3V93v06GGfMmVKka9L1ScmJqZQnSMiIvLtd8aMGT79vIqi6lrcZ1GwHmqpVauW8/OtUaOGPSQkJN9rUK9TvY7s7Owi96k+k/HjxzvfR8eiPtvKlSvne6xBgwb2ZcuW+fDVA/Cl9evX21u0aJHvexseHq5/I/L+zloslkK/td78zr3//vt6v3mPQ5GRkc6/GzZsaN+9e7fb45Za1G+Wqlfe/aj7CxcuzLdd3m0cx4y8ZanX8vLLL5fo/Ro2bJhzv8UdR9Vy9OhRl3VTx8vifuPVcbeg1NRU+6BBg/K9RvVe5j22tmvXrtB2a9asybeO+q3O+3tdrVo1+9q1a13W5+DBg/rzcayrjn+Ofan3/+OPPy7ydSUnJ9uvu+66QnVWZarPwvGYOqYUpP631HPqf82X8p6vFPVZFKyH+j9yfLZ16tSxV6pUKd/rUkvXrl3tP/74Y5l+9wCUL9pyf6ItZ7Ytp/4HvGWz2exPPvmkPSoqqlC7L+/7ohZ1/Jk3b57bY2BxizpHAlD+aM95h/ZcxW7PlfRYpf634+Pj870H6vWq/+eC10b79u1r/+2339wez4tbSnK8BxCYQk0HzoG8Tp48me9v1WsxNjZWzwOj0ksOHTpU95gsS6qXdXEp3soiJWWfPn10L33VY1/1yjx48KDunad647du3Vr39FajdPNS78V//vMfueWWW+Stt97Sc5SpXt9q3haVgk+lrla9/txRvQHViAc1mlhtq3q5qbRzqpem6tnv6MnvzsCBA3WvTtULX83ppuqvejKqHqFt2rTRPfSvvfZa3cPU36meew5qzrPatWvr9171zFWjEFTvS0/m41OfyT/+8Q/9/r3zzjt6Phz1Waren+o5lZZPfTbqvVOLSuUEwD+p0Teqd7AaHapG3Hz//fdy6tQpPQJH9dhWv3Oq9/tdd92lf6dLSo346dixo7z00kt6NJbqWa6Of+r3R/2GT5o0Sfdizkv9Pn3yySe6B/d3332nt1F1U6On1O+M6rGuRnKpXvx5ffnll3obNcLq6NGjzuOu2kaNJlK9wVVdSkP1gi54PHfF0ZvdV9Rvt+rxreZ2U8dE9Xmp3/aYmBidVrVnz576+FaQ+gxVj/qZM2fq4+/hw4d12jl1PFTHMJVCVJ2LuKJ6zm/btk0fL9VILZWGUI26u/rqq3WP/uLeS/W5qjnR1DFUjUZUn6V679R1H/UZX3jhhfqzVMd5f6dG+alFUf+H6n1X75t6H9VIBnUcVedz/vTdA+BbtOVoywUSdaxXIy1VqlfVblPnYOrYc+bMGT1aUY1q7NChg1x33XX6M1LHNQCBhfZcydCeq5jtuZJS7/PmzZv156beQzXSXWXcUedj6v1RI627d++uR6g7RrgDQHEsKnpd7FoAAAAAAAAAAAAAgGKpjhwrVqzQ0wepRXWWUZ3+VacbNbVaaaxevVp32lEdfVSafdXx8uabb9Ydb4oagKfWVdMVLVq0SI4cOSLR0dE63f9f//pX3UnIHxC4BgAAAAAAAAAAAAAfeeWVV2Ty5MmFHi9t4Hr27Nk6M4cK7zZo0EBnj929e7czC5PKzKGyixSkMlmoLAh79+7VmWBVZojTp0/rzB8qW4XKJHvvvfeKaVbTFQAAAAAAAAAAAACAYKFS5vfu3VumTJkiS5YskalTp5Z6n5s3b9bTUCgJCQl66ogtW7bIzz//rKc4UNMnqGmHXBk1apQOWqv11PpqO7W92o8Kgk+YMEGPCjeNEdcAAAAAAAAAAAAAUEbUiOb777+/VCOuBw4cKB9//LEMHTpU3n333XzP7d+/X9q0aSM2m022b98uF198sfO5rVu36rnmrVarDl63aNEi37Zqf/Pnz5dBgwbJ4sWLxSRGXAMAAAAAAAAAAACAn0pJSZFly5bp+2PHji30fMuWLaVXr176/sKFC/M9p+a0VtTzBYPWyrhx4/Tt559/LqmpqWISgWsAAAAAAAAAAAAA8FNbt27V81ir+ak7d+7scp0ePXro2w0bNuR73PH35Zdf7nI7tT+134yMDOPpwglcAwAAAAAAAAAAAICf2rdvn75t1KiRhIWFuVynefPm+lalA3e1reP5gtT+GjZs6HLb8hZqtHQAAAAAAAAAAAAAMCghIUESExO92kal7Hak2S5rZ8+e1bc1atRwu47juXPnzvls2/JG4LoY59PSjZQbZhFjbGKu8Fy73VjZGTnmyo4ON5f8wGLLNVKu3RoiFe01K9bM88bKtodXNle21dzhJjIqyljZgSTF0PFOyTX38ytZuRXvtz892yamRIUZPN4ZPMew5GYZK9seEm6sbIstx1jZ//7JTCNrUJtaYkq16ErGyg406RkZUtGY/A00yW4x2LCuoJ+3qfc812buNUf+ftRY2VnVG0tFVCkq0nQVAoLJNl6omGtzpOWa++2PtqVVyOsulmwzrzsk9YyYklslzljZtjBz17m+/dXc//h3R8wF0kZdUt9IubFVzV3LDTThHUb6ZD+P3dBItmzZ4tU2SUlJUl4y/tuWDQ93f61HpftW0tPTfbZteSNwDQAAAAAAAAAAAKDCiouLk/j4eK+3KS+RkX923svKcj9AQs2BrUQVGMyltk1LSyvRtuWNwDUAAAAAAAAAAACAgGPxUXZXlfK7vNJ+l0T16tXzpf12xfGcY92826rAdUm2LW/m8jUCAAAAAAAAAAAAAIrUqlUrfXv06FHJzs52uc7BgwfzrVtw2wMHDrjcTu1P7dfVtuWNwDUAAAAAAAAAAACAgBxx7YvF33Xo0EHPUa1Sem/cuNHlOuvWrdO3Xbp0yff4ZZddlu/5gtT+VBpxlVK8ffv2YhKBawAAAAAAAAAAAADwUzExMdK3b199PzExsdDz+/fvl1WrVun7gwcPzvec4+/Vq1e7HHWdkJCgb/v37y/R0dFiEoFrAAAAAAAAAAAAAAEn2EZcd+/eXZo0aSKvvPJKoeemTp0qFotF5s+fr4PXdrtdP56UlCRDhgwRm80mAwcOlHbt2uXbLj4+Xq677jrJzc2V2267Ta+vqO3VftT+rFarPP7442JaqOkKAAAAAAAAAAAAAIC3/CnonNexY8d0em+HjIwMffvNN99IrVq1nI8//PDDenH45Zdf5MiRI/L7778X2menTp1k1qxZ8sADD8i4ceNkxowZel+7d+/WKcRbt24tb775psv6vP3229KtWzfZvHmzNG3aVC688EL57bffdD1VMFwFylWA2zRGXAMAAAAAAAAAAACAj6jRzWfOnHEuqamp+vGcnJx8j6elpXm130mTJsmKFSt0Wm+1TxW0bty4sTz22GOyadOmfEHxvGrXrq2D1mo9tb7aTm2v9vPVV1/J/fffL/6AEdcAAPgpNefIzJkz5fvvv5eUlBR9QnHzzTfLo48+KpUrVzZdPQAAAACAF2jjAQDge5YQ/xxxrdJ9O1J5e+Pw4cPFrnPVVVfppSTzZD/99NN68VeMuAYAwA/Nnj1bn3wsXbpUIiMj5YILLtAnLSr9i0oJc/bsWdNVBAAAAAB4iDYeAABA8QhcAwDgZ1TKFpXyRUlISJCjR4/Kli1b5Oeff5aOHTvKTz/9JGPGjDFdTQAAAACAB2jjAQBQdqzWEJ8s8A8ErgEA8DPTp08Xm80md911l4wdO1YsFot+vF69erJgwQKxWq2yZMkS2bFjh+mqAgAAAACKQRsPAADAMwSuAQDwI2qes2XLlun76oJGQS1btpRevXrp+wsXLiz3+gEAAAAAPEcbDwCAsmWxhvhkgX8gcA0AgB/ZunWrZGZmSkREhHTu3NnlOj169NC3GzZsKOfaAQAAAAC8QRsPAICyReA6uBC4BgDAj+zbt0/fNmrUSMLCwlyu07x5c327d+/ecq0bAAAAAMA7tPEAAAA8F+rFugAAoIydPXtW39aoUcPtOo7nzp07V271AgAAAAB4jzYeAABly2JljG4w4dMEAMCPZGRk6Nvw8HC366gUc0p6enq51QsAAAAA4D3aeAAAAJ5jxDUAAH4kMjJS32ZlZbldR82PpkRFRbldJyEhQRITEz0ud9jw4TJy1Giv6goAAAAAKBptPAAAyhbzUwcXAtcAAPiR6tWr50sn54rjOce6riQlJcmWLVs8Lrdvv35e1RMAAAAAUDzaeAAAAJ4jcA0AgB9p1aqVvj169KhkZ2dLWFhYoXUOHjyYb11X4uLiJD4+3uNyY2NjS1RfAAAAAIB7tPEAAChbjLgOLgSuAQDwIx06dNBzn6lUcRs3bpRu3boVWmfdunX6tkuXLm73M27cOL14KiWNudQAAAAAwNdo4wEAULYIXAcXq+kKAACA/4mJiZG+ffvq+67mL9u/f7+sWrVK3x88eHC51w8AAAAA4DnaeAAAAJ4jcA0AgJ+ZOnWqWCwWmT9/vr6wYbfbnXOaDRkyRGw2mwwcOFDatWtnuqoAAAAAgGLQxgMAoOxYQkJ8ssA/ELgGAMDPdOrUSWbNmqXvq1RwjRs31nOZNW3aVDZv3iytW7eWN99803Q1AQAAAAAeoI0HAADgGQLXAAD4oUmTJsmKFSukf//+kpqaKrt379YXNx577DHZtGmT1KpVy3QVAQAAAAAeoo0HAEDZzXHtiwX+IdR0BQAAgGtXXXWVXgAAAAAAgY82HgAAQJCNuF69erVcd911Urt2bYmKipI2bdroeWJUT0UAAAAAQGChjQcAAAAAKClGXAeXgApcz549W/dKXLp0qURGRsoFF1wghw8flhkzZui5Ys6ePWu6igAAAAAAD9HGAwAAAACUhtUa4pMF/iFgAtebN2/Wc8EoCQkJcvToUdmyZYv8/PPP0rFjR/npp59kzJgxpqsJAAAAAPAAbTwAAAAAABCQgevp06eLzWaTu+66S8aOHSsWi0U/Xq9ePVmwYIFYrVZZsmSJ7Nixw3RVAQAAAADFoI0HAAAAACgtUoUHl4AIXKekpMiyZcv0fXVBo6CWLVtKr1699P2FCxeWe/0AAAAAAJ6jjQcAAAAAAAIycL1161bJzMyUiIgI6dy5s8t1evTooW83bNhQzrUDAAAAAHiDNh4AAAAAwBcYcR1cAiJwvW/fPn3bqFEjCQsLc7lO8+bN9e3evXvLtW4AAAAAAO/QxgMAAAAAAAWFSgA4e/asvq1Ro4bbdRzPnTt3rtzqBQAAAADwHm08AAAAAIAvMFo6uARE4DojI0PfhoeHu11HpZhT0tPTy61eAAAAAADv0cYDAAAAAPgCgevgEhCpwiMjI/VtVlaW23XU/GhKVFRUudULAAAAAOA92ngAAAAAACAgR1xXr149Xzo5VxzPOdZ1JyEhQRITEz0ue+jw4TJy1GiP1wcAAAAA+G8bb/iIETJ6NG08AAAAAAgGjLgOLgERuG7VqpW+PXr0qGRnZ0tYWFihdQ4ePJhvXXeSkpJky5YtHpfdp18/r+sLAECgsRssO8JiM1Z2pDXHWNmSaybxTYjV3OmfxW7uPy3b4D95uC3XXOFWc9+vXGvhc/byclvbOkbKDcn9c4Qw/LuN169/f6/rCwBAoIn8/aixsjOqNTJWdiWDV7vTbZWNlR0pBtu2NjNtjuxaLcSU8wYbmFVs2cbKjo819z/evZq5tpYtPCASFwNBIyAC1x06dNBzn6lUcRs3bpRu3boVWmfdunX6tkuXLkXuKy4uTuLj4z0uOzY2tgQ1BgAAAAC4QxsPAAAAAOALlhBGXAeTgAhcx8TESN++feXTTz/VKeAKXtTYv3+/rFq1St8fPHhwkfsaN26cXjx1Pi29hLUGAAAAAPhbGy89I6OEtQYAAAAAAGUpYHIcTJ06VSwWi8yfP19f2LD/N9WkSgs3ZMgQsdlsMnDgQGnXrp3pqgIAAAAAikEbDwAAAADgizmufbHAPwRM4LpTp04ya9YsfV/1pm/cuLFOB9e0aVPZvHmztG7dWt58803T1QQAAAAAeIA2HgAAAACgtAhcB5eACVwrkyZNkhUrVkj//v0lNTVVdu/erS9uPPbYY7Jp0yapVauW6SoCAAAAADxEGw8AAAAAAATUHNd5XXXVVXoBAAAAAAQ+2ngAAAAAgJJitHRwCagR1wAAAAAAAAAAAACA4EPgGgAAP3PixAmZP3++TJgwQbp06SJRUVFisVikZ8+epqsGAAAAAPASbTwAAMqO1WrxyQL/EHCpwgEACHbvv/++TJ482XQ1AAAAAAA+QBsPAADAM4y4BgDAz1SpUkV69+4tU6ZMkSVLlsjUqVNNVwkAAAAAUEK08QAAKDsWq8UnS1lZvXq1XHfddVK7dm2ddaVNmzb6XCA1NdWr/axZs0ZnbPFkefLJJwttX9w2sbGx4g8YcQ0AgJ8ZOXKkXhyOHz9utD4AAAAAgJKjjQcAQNlRQVd/NXv2bJk4caLY7XZp0KCBNGzYUHbv3i0zZsyQxYsXy/r166VGjRoe7atq1arSrVs3t88nJyfLzp079f2uXbu6Xe+SSy6RiIiIQo/XrFlT/AGBawAAAAAAAAAAAADwkc2bN8ukSZP0/YSEBBkzZowOsv/6668yYMAA/bx6TAWwPdGhQwcd6HZHjbJWgWsVHL/qqqvcrrdw4UJp0qSJ+CtShQMAAAAAAAAAAAAIOFarxSeLr02fPl1sNpvcddddMnbsWOfI8Hr16smCBQvEarXqKUR27NhR6rLsdrvMmzdP3x86dKjed6AK3JoDAAAAAAAAAAAAgB9JSUmRZcuW6fsqaF1Qy5YtpVevXs4R0KW1du1a+fnnn/X94cOHSyAjVTgAAAAAAAAAAACAgGMpg9HSpbV161bJzMzUc0l37tzZ5To9evSQlStXyoYNG0pd3ty5c/Vt9+7dpUWLFsWOBFfpynNycqR+/fo6gH7rrbe6nPfaBALXAAAAAAAAAAAAAOAD+/bt07eNGjWSsLAwl+s0b95c3+7du7dUZaWmpsqiRYs8Hm399ttv5/v73XfflWnTpum5tuPj48U0AtcAAAShhIQESUxM9Hj9ocOHy8hRo8u0TgAAAACA8mnjjbplgIy967YyrRMAAME04trbY60jDfi4ceMKPX727Fl9W6NGDbfbOp47d+6clMbChQt1avJKlSrJLbfc4na9G264Qc+33a5dO2nQoIHeRo34/tvf/qbTjPfp00ePFG/YsKGYROAaAIAglJSUJFu2bPF4/T79+pVpfQAAAAAA5dfGu7ZnlzKtDwAA/sJqsRg51jq2cSUjI0PfhoeHu93WkZo7PT1dfJEm/KabbpKYmBi363300Uf5/o6MjJTbbrtNevfuLR07dpSjR4/Kk08+KXPmzBGTCFwDABCE4uLivErtEhsbW6b1AQAAAACUYxuvTu0yrQ8AABX9WOvYxhUVFFaysrLcbqvmwFaioqKkpA4dOiRr1671OE24K7Vq1ZIpU6bIPffcIx9++KG8+eabYvFRZ4CSIHANAEAQUilqXKWpced8Wul69gEAAAAA/KeNl/Nr6ebLBACgoqUK9/ZYW5Tq1avnSxnuiuM5x7ol8e6774rdbpfGjRvLlVdeWeL9dO3a1VkntdSsWVNMsRorGQAAAAAAAAAAAACCSKtWrfStSr+dnZ3tcp2DBw/mW9dbdrtd5s2bp+8PGzasVKOk86Y0z8nJEZMIXAMAAAAAAAAAAAAIyBHXvlh8qUOHDjoYrNKBb9y40eU669at07ddunQpURlff/21ThWuAtYqcF0aO3fudKY4NznaWiFwDQCAnzl27JieW8SxPProo/rxb775Jt/jL7zwgumqAgAAAACKQRsPAICKJSYmRvr27avvJyYmFnp+//79smrVKn1/8ODBJSpj7ty5+rZHjx7SrFmzEtdVjbCeOXOmvt+rVy8JDTU7yzSBawAA/Exubq6cOXPGuaSmpjpPIvI+npaWZrqqAAAAAIBi0MYDAKDsWK0Wnyy+NnXqVD0aev78+Tp4rVJ7K0lJSTJkyBCx2WwycOBAadeuXb7tmjRpopdFixa53XdKSorz+REjRhRbF9VpTs2Hff78+UKd61TgfMOGDTpg/cQTT4hpZsPmAACgEHVi4jiRAQAAAAAENtp4AACUHYufDtHt1KmTzJo1Sx544AEZN26czJgxQ2dY2b17t04h3rp1a3nzzTcLbXfkyBFncNodFbRWHeEqV67s0YjtPXv2yPPPPy+jRo3So7Nr1KghycnJsnfvXn2OolKEz5kzRy699FIxjcA1AAAAAAAAAAAAAPjQpEmT5KKLLtKpuL///ns5deqUNG7cWAebp0yZItHR0aVKEz548GCP9nHPPfdIbGysbNq0SY4fPy6HDx+WiIgIadu2rfTu3Vvuu+8+ad68ufgDAtcAAAAAAAAAAAAAAo5Kx+3PrrrqKr14yu5BlpY1a9Z4VQc137Zjzm1/56cD6AEAAAAAAAAAAAAAFQUjrgEAAAAAAAAAAAAEHKvVv0dcwzuMuAYAAAAAAAAAAAAAGMWIawAAAAAAAAAAAAABx8KI66BC4BoAAAAAAAAAAABAwCFwHVxIFQ4AAAAAAAAAAAAAMIoR1wAAAAAAAAAAAAACjtXCiOtgwohrAAAAAAAAAAAAAIBRjLgGAAAAAAAAAAAAEHCY4zq4ELguhql/9xy7oYJV2TabsbJtBl93dLi5BAQhmSnGyrYe+N5IuTkX9hJTbJYQc2VHVjNWtsmMKbkmv9zwfxZzv7+5IREV7jsZYvD7aDf4Q2Tul1/EHhZlrGxLbpaxsiXE3LsekptZ4X5T4Dlrbraxsu0Gj3nGVMTXrP7PsjOMlR16/EdjZWc3ijdSbqjB/7Ocag2MlQ0UxVapurGyQwwGEExefojKPGus7KxIc593uNVa4dqX1TJPGyvbFlXVWNmhBr/bv4eZ+x+vxGVNoFwRuAYAAAAAAAAAAAAQcBhxHVwIXAMAAAAAAAAAAAAIOFYC10GlYubsAgAAAAAAAAAAAAD4DUZcAwAAAAAAAAAAAAg4FoNz3sP3GHENAAAAAAAAAAAAADCKEdcAAAAAAAAAAAAAAo6FIbpBhY8TAAAAAAAAAAAAAGAUgWsAAPyI3W6Xb7/9Vh599FHp3r271KxZU8LCwqR27drSp08f+fe//63XAQAAAAD4P9p4AACULavV4pMF/oFU4QAA+JFVq1ZJ7969nX83a9ZMmjZtKocOHZIVK1boZcGCBbJ48WKJiIgwWlcAAAAAQNFo4wEAULYsBJ2DCiOuAQDwI6qnvbqI8eqrr8rJkyfl4MGDsmnTJjlz5ozMmzdPX8hYunSpPPHEE6arCgAAAAAoBm08AAAAzxG4BgDAj3Tu3Fn27t0rEyZMkDp16uR77q677nJezJgzZ47YbDZDtQQAAAAAeII2HgAAZctisfhkgX8gcA0AgB+pUqWKnu/Mnf79++vbs2fPyunTp8uxZgAAAAAAb9HGAwAA8BxzXAMAEEDS09Od96OioozWBQAAAABQOrTxAAAoHStzXAcVRlwDABBAFixYoG/btWune+4DAAAAAAIXbTwAAID/YcQ1AAABYvPmzfLGG2/o+48++qjp6gAAAAAASoE2HgAApWdhxHVQIXANAEAAOHnypAwaNEhycnLkxhtvlNtuu810lQAAAAAAJUQbDwAA3wghcB1USBUOAICfS05Olv79+8vRo0elY8eOMnfuXNNVAgAAAACUEG08AAAA1xhxDQCAH0tJSZF+/frJ1q1bpW3btrJ8+XKP5j1LSEiQxMREj8sZOny4jBw1upS1BQAAAAD4Qxtv5J1DZMyIoaWsLQAA/o8R18GFwDUAAH4qLS1Nrr32WtmwYYO0bNlSVq5cKTVr1vRo26SkJNmyZYvHZfXp168UNQUAAAAA+FMb75qre5WipgAAAGYQuAYAwA9lZGTIgAEDZO3atdK4cWP56quvJDY21uPt4+LiJD4+3uP1vdk3AAAAAMDP23h165awpgAABBZGXAcXAtcAAPiZ7Oxsuemmm/SFjPr168uqVaukYcOGXu1j3LhxevHU+bT0EtQUAAAAAOCPbbys30+VoKYAAABmEbgGAMCP5Obmyu233y6ff/657n2vLmg0a9bMdLUAAAAAACVAGw8AgLLFiOvgQuAaAAA/8sEHH8iiRYv0/cjISBk5cqTbdWfPni0dOnQox9oBAAAAALxBGw8AgLJF4Dq4ELgGAMCPZGZmOu8fPnxYL+4kJyeXU60AAAAAACVBGw8AAMBzVi/WBQAAZWz48OFit9s9Wnr27Gm6ugAAAACAItDGAwCgbIVaLT5Z4B8IXAMAAAAAAAAAAAAAjAqYwPWJEydk/vz5MmHCBOnSpYtERUWJxWKhJyIAAAAABCDaeAAAAAAAX8xx7YsF/iFg5rh+//33ZfLkyaarAQAAAADwAdp4AAAAAAAgIAPXVapUkd69e0unTp30snXrVpk+fbrpagEAAAAASoA2HgAAAACgtBgtHVwCJlX4yJEjZcWKFfLMM8/IjTfeKHXq1DFdJQAAAABACdHGAwAAAAAEu9WrV8t1110ntWvX1lNktWnTRqZOnSqpqale72v48OF6iq2ilmXLlrndPiUlRR5//HFdB1UXVSdVtzVr1oi/CJgR1wAAAAAAAAAAAADgEGL13zG6s2fPlokTJ4rdbpcGDRpIw4YNZffu3TJjxgxZvHixrF+/XmrUqOH1fhs2bCiNGjVy+Vz16tVdPv7bb79J9+7dZe/evRIRESEXXnihnD59WpYuXSqff/65/OMf/5B7771XTCNwDQAAAAAAAAAAACDg+Guq8M2bN8ukSZP0/YSEBBkzZoweEf3rr7/KgAED9PPqMRXALkkGs7///e9ebTNq1CgdtO7YsaN88sknUq9ePR1Qf/PNN2XcuHEyYcIE6dq1q7Rv315M8t9uCAAAAAAAAAAAAAAQYKZPny42m03uuusuGTt2rA5aKypgvGDBArFarbJkyRLZsWNHmddl69atOlitynz//fd1HRRVJ1U3Vcfc3FxdZ9MIXAMAAAAAAAAAAAAIyBHXvlh8Sc0l7ZhrWgWGC2rZsqX06tVL31+4cKGUtUWLFulbVWaLFi0KPa9GXCsqZXhJ5t72JVKFAwAAAAAAAAAAAICPRjhnZmbquaQ7d+7scp0ePXrIypUrZcOGDV7vf/Xq1bJr1y45c+aMVKtWTaf/vvPOO6Vx48Yu13eUcfnll7t8XtVR1TUjI0O2bdsm3bp1E1MYcQ0AAAAAAAAAAAAg4PjjiOt9+/bp20aNGklYWJjLdZo3b65v1bzT3lq7dq0eRa0C2B9++KE8/vjjehT3Cy+8UGR9HGUWpOrYsGHDEtfHlyrciGs1AXpiYqLH6w8bPlxGjhpdpnUCAAAAAJRPG2/EsKEyZtTIMq0TAAAAACC425aONOCONNt5nT17Vt/WqFHD7baO586dO+dxeS1btpSZM2fqlN9NmjTRo6TVHNnqMZVy/JFHHpHo6Gi59957y6U+ZaHCBa6TkpJky5YtHq/ft1+/Mq0PAAD+INTHvQq9kW2zGys7xNzLNiY502as7Frh5sq2W8wlGrLkZhsr2xYWaaxsMffVlixruJFyw3MyxByDn3WAtfH69+1TpvUBAMAfpIREGys7MyPXWNlVI0KMlW0PNXc+FnX6z5F0Jvwgf47QK28dc0+JKbmVaxoru8ZVU4yVfe7Lp4yVHRNh7vuVnm3uWgY8E2KxGGlbOrZxRaXcVsLD3V+fUEFnJT093ePy/va3vxV67NJLL5UPPvhAxo8fL6+//rpeZ+jQoTqAXdb1KQsVLnAdFxcn8fHxHq8fGxtbpvUBAAAAAJQcbTwAAAAAqLh8lebb27alYxtXIiP/7GyRlZXldls1B7YSFRUlvvDMM8/InDlz5Pfff5dVq1bJgAED8tUnLS2tXOtTUhUucK2G7Lsatu9OSprZngUAAAAAAN+18TJTz5dpfQAAAAAAwd+2LEr16tXzpeh2xfGcY93Sqlq1qrRt21a2bt0q+/fvL1QfFbguz/qUVIULXAMAAAAAAAAAAAAIfL4ace1LrVq10rdHjx6V7OxsCQsLK7TOwYMH863rC+H/TQWek5NTqD7Hjx+XAwcOuNxO1VHV1df1KQlzk/156dixY1KrVi3n8uijj+rHv/nmm3yPv/DCC6arCgAAAAAoBm08AAAAAEAw6tChgw4iq/TbGzdudLnOunXr9G2XLl18UmZOTo7s2bNH32/QoEG+5y677LJ8ZRak6qjSiKuU4u3btxeTAiZwnZubK2fOnHEuqampzg8i7+NqqDsAAAAAwL/RxgMAAAAAlFao1eKTxZdiYmKkb9+++n5iYmKh51UqbzUPtTJ48GCflJmQkCDJyckSGhoqvXr1yveco4zVq1e7HHWttlX69+8v0dHRYlLABK6bNGkidru92OXvf/+76aoCAAAAAIpBGw8AAAAAEKymTp0qFotF5s+fr4PXqn2rJCUlyZAhQ8Rms8nAgQOlXbt2hdrKTZo0kUWLFuV7fMWKFfLII48Umr9ajZSePXu2PPDAA/rvu+++W+Li4vKtEx8fL9ddd53uQH7bbbfpOiiqTqpuqo5Wq1Uef/xxMY05rgEAAAAAAAAAAAAEHH+c41rp1KmTzJo1SweUx40bJzNmzNDTYe3evVunEG/durW8+eabhbY7cuSIvk1JScn3uMpSpqbSUkvdunWd6cD37t3rXPemm26SmTNnuqzP22+/Ld26dZPNmzdL06ZN5cILL5TffvtNT+OlAuyvvPKKDnCbFjAjrgEAqCgWLlwoY8eOlUsuuUTq1asnEREROr2MOnFQPfVU2lQAAAAAQGCgjQcAQNkGrn2xlIVJkybpkdIqBbcKPKugdePGjeWxxx6TTZs26UC2pzp27KjPG3r37q3nolbzWf/4449StWpVGTRokHzyySd6lLaaW9uV2rVr66C1KlvVQdVF1UnV7auvvpL7779f/IHF7hibDpdS0tKlosmxmfuXMFi0RIWZ68cRkpm/50x5sh743ki5ORfmn2OhPNnEP3tglTWLwZeda/DLHV0pSgJN+/btZfv27fpihkrrok5gTp06JUePHtXP16lTR7788stCaWRKIz0jQyricSfE4BfDVNFn03PNFCwitcJtxsq2W8wd5y252cbKtoVFGivbZCsj11Dh4bmZYkpETDVjZQeazNTzFfK3yBiDr9lu8DhvzTZ3bhV6/EdjZWc3MjRCw+R3y27u/MZmCZGKqFKUufObQGrjnT2fJqZk5po7EawaYe57EZJt7j0PPXfMWNk/SEMj5XasZO56am7lmsbKrnHVFGNln/vyqQrZtk3PNnOsrxFTyUi5gejxL37yyX5m9L/AJ/tB6VTAVjMAAP5t/Pjx8vXXX8v58+fl0KFD8sMPP+gUMTt27JC//OUv+gLH7bffbrqaAAAAAAAP0MYDAKBijriG9whcAwDgZ8aMGSOXX365hIWF5Xv8oosukrfeekvfV6lcfvrJN70JAQAAAABlhzYeAACAZ0I9XA8AAPiBCy74X8qatDRzacgAAAAAAKVHGw8AgNJhtHRwYcQ1AAABZP369fo2OjpaWrdubbo6AAAAAIBSoI0HAADwP4y4BgDAz9lsNjlx4oR8+eWX8sgjj+jHnnvuOX1hAwAAAAAQWGjjAQDgO4y4Di4ErgEA8FOvvPKKTJ48Od9jnTt3lnfffVf69etnrF4AAAAAAO/RxgMAwPcIXAcXUoUDAOCn6tevL926dZNLL71U4uLixGKxyLZt22TevHny+++/m64eAAAAAMALtPEAAACKxohrAAD81M0336wXhx07dsh9990nCxYskJ9++kk2bdokISEhLrdNSEiQxMREj8saPmKEjB492if1BgAAAACYbePdMXS4DB85yif1BgDAnzHiOrgQuAYAIEBcfPHFsnTpUmnWrJnulf/+++/LHXfc4XLdpKQk2bJli8f77te/vw9rCgAAAAAw2cbr3YfU4wAAIPAQuAYAIIDExMTIFVdcIYsXL5bNmze7vaih0s7Fx8d7vN/Y2Fgf1hIAAAAAYLKNV5c2HgCggmDEdXAhcA0AQIDJycnJd+vKuHHj9OKp9IwMn9QNAAAAAGC+jXf2fJpP6gYAAFCeCFwDABBAzp49K2vWrNH3O3ToYLo6AAAAAIBSoI0HAEDpMOI6uFhNVwAAAPzP119/LTNmzJDDhw8Xek7NZ9a3b19JTk6W+vXry80332ykjgAAAAAAz9DGAwCg7APXvljgHxhxDQCAHzl37pxMnTpVL2reaXXxIiQkRI4dOyZJSUl6HfXYZ599JtHR0aarCwAAAAAoAm08AAAAzxG4BgDAj3Tt2lVmzZqlU8Xt2rVL9u3bJxkZGVK9enW58sor5frrr5fRo0dLTEyM6aoCAAAAAIpBGw8AgLIVYmG0dDAhcA0AgB+pU6eOTJ48WS8AAAAAgMBGGw8AAMBzBK4BAAAAAAAAAAAABBwrI66DitV0BQAAAAAAAAAAAAAAFRsjrgEAAAAAAAAAAAAEnBAGXAcVAtcAAAAAAAAAAAAAAo7VSuQ6mJAqHAAAAAAAAAAAAABgFCOuAQAAAAAAAAAAAAScEAsjroMJI64BAAAAAAAAAAAAAEYx4hoAAAAAAAAAAABAwLEy4jqoMOIaAAAAAAAAAAAAAGAUI64BAAAAAAAAAAAABJwQBlwHFQLXAAAAAAAAAAAAAAKO1UrkOpgQuPZToWIzVrY1JMRY2RGn9horO612K2NlW8KijJWd2/YqM+Xa7GJKWE6asbLtoRHmyraY+24zz4j/s5v7SkpEZrKxsu3hlY2VbQsJM1JurXBz5xi5VjOvWfktPcdY2dFh4cbKrpydYazs3NBIY2WbOur8YTf3Wdc2VnLgsVvNNYMt2elmCraaOw8MO7XfWNkZcX+pkL+BtiadjJVtMXVSaTd3fpNtcBa+MDF3Ep9jsP0Az0Tn/GGs7JgsQ8c7JdvgtY8zR4yVLZExxopuUt3Mta51p3PFlK7mLiXI758/ZqzsFDHX3on5/bixsqulnDZTcMxlZsoFDCNwDQAAAAAAAAAAACDgMGgquJjrFgoAAAAAAAAAAAAAACOuAQAAAAAAAAAAAASiEAZcBxVGXAMAAAAAAAAAAAAAjGLENQAAAAAAAAAAAICAwxzXwYXANQAAAAAAAAAAAICAE2IlcB1MSBUOAAAAAAAAAAAAADCKwDUAAH7u888/F4vFopcmTZqYrg4AAAAAoJRo5wEA4LtU4b5Y4B8IXAMA4MdSUlLknnvuMV0NAAAAAICP0M4DAABwjcA1AAB+7LHHHpOjR4/KDTfcYLoqAAAAAAAfoJ0HAIDvhFh8s8A/ELgGAMBPbdiwQf75z3/qixkDBw40XR0AAAAAQCnRzgMAoGJZvXq1XHfddVK7dm2JioqSNm3ayNSpUyU1NdWr/eTm5sqKFStk0qRJ0rlzZ6lWrZqEh4dLXFycPq9YunSp220PHz7snKLE3XLZZZeJPwg1XQEAAFBYdna2jBkzRipVqiT/+Mc/ZOXKlaarBAAAAAAoBdp5AAD4nj/PTz179myZOHGi2O12adCggTRs2FB2794tM2bMkMWLF8v69eulRo0aHu1r7ty5Mnr0aH3farVKixYtJDo6Wg4cOCCffPKJXsaOHStvvPGGDkS7061bN5ePt23bVvwBgWsAAPzQs88+Kzt37pSXX35Zn9QAAAAAAAIb7TwAAHwvxOqfgevNmzfr0dFKQkKC7rymAsq//vqrDBgwQD+vHlMBbE/Y7Xa5+OKLZcKECTJ48GCpWrWqfjwnJ0deeeUVefjhhyUxMVHat28v99xzj9v9qGC5PyNVOAAAfuann36SZ555RuLj4+X+++83XR0AAAAAQCnRzgMAoGKZPn262Gw2ueuuu/RIaMco6Hr16smCBQv0qOklS5bIjh07PNrfoEGDZNu2bTJq1Chn0FoJDQ2Vv/71r87R2CpIHsgIXAMA4EdUzznV006lkFMnGSEhIaarBAAAAAAoBdp5AACUHTXg2heLL6WkpMiyZcv0fRW0Lqhly5bSq1cvfX/hwoUe7bNGjRpFpgDv37+/vt27d68EMlKFAwDgR/71r3/JN998o1O+XHLJJaarAwAAAAAoJdp5AABULFu3bpXMzEyJiIiQzp07u1ynR48esnLlStmwYYNPykxPT9e3lSpVKnI9dT6yZ88eHQRv0qSJ9O3bVwYOHKhHgPsDAtcAAPiJ48ePy5QpU6R+/foyY8YM09UBAAAAAJQS7TwAAMpWSBGjkE3Zt2+fvm3UqJGEhYW5XKd58+Y+HSG9YMECZ0C8KLNnz873t2NebJW2vGnTpmIagWsAAPyEmufsjz/+kHfeeUdiYmJKtS+Vfk6ddHhq+PARMuq/86AAAAAAAPyrnedtG2/knbfKmOFDS1weAAAVjbfHWkca8HHjxhV6/OzZs8703u44njt37pyU1scffyyfffaZHkX98MMPF3pezYN95513ym233SZt27bV82z/9ttvsnTpUnn88cf13Nl9+vSRzZs3S5UqVcQkAtcAAPiJLVu26Nt7771XL65SvRw7dkxiY2P1fdULrmvXri73lZSU5NyfJ/r1+3MOFAAAAACA/7XzvG3jXdP7ylLWHACAwGD10Yhrb4+1jm1cycjI0Lfh4eFut1VpxPOeD5TUnj17ZNiwYfr+pEmTXJ5HNGjQQObPn5/vMRW8HjNmjFx55ZXSsWNHOXDggLz22ms6kG0SgWsAAPzMyZMn3T5ns9mcz2dlZbldLy4uTuLj4z0u03GRBAAAAADgf+0879t4dUpQSwAAAk+Ij6Zm9vZY69jGlcjIyGKv36o5sJWoqCgpqWPHjuk5qpOTk+Waa66R559/3ut9tGjRQu655x69repAR+AaAABohw8fdvvc3LlzZcSIEdK4ceMi13NQKWpcpalxJy39z16AAAAAAAD/a+d528bLOnfCq3oCAFDReXusLUr16tXzpQx3xfGcY11vnThxQq666io5evSo9OzZUxYvXux2Pu3iOEZp79+/X0wjcA0AAAAAAAAAAACgwqYK96VWrVrpWxVUzs7OdhlQPnjwYL51vXHq1Cnp1auXDjR36dJFPv30U+co75JwpDTPyckR03w0gB4AAAAAAAAAAAAAKrYOHTroYLBKB75x40aX66xbt07fqsCzN86ePStXX321/PTTTzq1+RdffCHR0dGlqu/OnTudc2GbRuAaAAAAAAAAAAAAQMAJsVh8svhSTEyMnntaSUxMLPS8Gim9atUqfX/w4MEe7/ePP/6QPn36yI4dO+Qvf/mLfPnll1K1atVS1TUlJUVef/11fV/t2zQC1wAABIDhw4eL3W73aH5rAAAAAID/o50HAEDwmjp1qlgsFpk/f74OXqtjvpKUlCRDhgwRm80mAwcOlHbt2uXbrkmTJnpZtGhRvsfT0tLk2muvlc2bN0ubNm3kq6++kpo1a3pUl7Fjx8qSJUv0CPC89uzZI/369ZNDhw7pUdsPPfSQmMYc1wAAAAAAAAAAAAACjj/Oca106tRJZs2aJQ888ICMGzdOZsyYIbVq1ZLdu3frAHLr1q3lzTffLLTdkSNHnCOh83r11Vdl/fr1zr8HDRrktmwV9I6NjXX+rdKVq7LUXNstWrSQKlWqyG+//eacZ7t69erywQcf6IC5aQERuFa9EL777jv55JNP9Iei8rar4fDVqlXTeeKHDRsmt99+u+65AAAAAADwb7TxAAAAAAC+EOLHuaUnTZokF110kcycOVO+//57OXXqlDRu3FinB58yZYpXc1Nn5hktrUZKFyUjIyPf36qsZcuW6dHaJ06c0KnKK1WqJB07dpT+/fvL+PHj8wW6TbLYHWPT/Zga7t67d2/n382aNdPRfzV0XU1Crqjh8YsXL5aIiAiflp2Sli4mhIpNTLFZQoyVHXFqr7Gy02q3MlZ2mMHP224183nn2sz99ITlmPleK/bQiAr3WeuyDR5pKkVFmis8gKSl5z+ZKU+hGb8bK9seXtlY2baQMCPlWnOzxZRcq5nXrPyWnmOs7Ogwcy2oypJlrOzcUHO/vzZDB570HHMHvNpVKkkgMdnGy0g3dy5oyTZUtsHzwLBT+42VnRH3F6mITPb3sJg68beba1NnG5yFL8zgZ23wkCfRlaLMFR5Ass6dMFa2JcvcsdbkMc965s8RckZExhgr+nT1lkbK3X06TUzpWs/cubc187yxslNCqxgrOyY1yVjZISmnjZRrbXGZkXID0bbjvrm22L5+NZ/sB6Xjx/0Q/kfF1ps2baqHwZ88eVIPXd+0aZOcOXNG5s2bpy9kLF26VJ544gnTVQUAAAAAFIM2HgAAAADAV6nCfbHAPwRE4Lpz586yd+9emTBhgtSpUyffc3fddZfzYsacOXP0ZOYAAAAAAP9FGw8AAAAAAARk4FpNEq4mDHdH5V9XVEq506fNpG0AAAAAAHiGNh4AAAAAwBfUYGlfLPAPARG4Lk56njnKoqKY5wYAAAAAAhltPAAAAAAAKp5QCQILFizQt+3atdM99wEAAAAAgYs2HgAAAADAE1ZhuHQwCfjA9ebNm+WNN97Q9x999FHT1QEAAAAAlAJtPAAAAACAp0jzHVwCOlX4yZMnZdCgQZKTkyM33nij3HbbbaarBAAAAAAoIdp4AAAAAABUXAE74jo5OVn69+8vR48elY4dO8rcuXM92i4hIUESExM9LmfY8OEyctToUtQUAAAAAOAvbbwRw4fL6NG08QAAAAAgGFgZcR1UAjJwnZKSIv369ZOtW7dK27ZtZfny5R7Pe5aUlCRbtmzxuKy+/fqVoqYAAASGrFybsbJTrDHGyrblGCtaqtsyjJS74ZS5F92lZrqxsmNtWcbKzgypaazsg3+YS7BUKyrXWNkxESFGyq1iNfd/FujKs43XnzYeAKACsIdXNlZ2cohnx/CyEBlqLnoREWLuUvvYZSeMlT3iUjPtvG6HPxNT7PVuMVb2fSvMfdYz+pq7fmPNOG+sbHtOtrGygYoo4ALXaWlpcu2118qGDRukZcuWsnLlSqlZ0/OLgXFxcRIfH+/x+rGxsSWsKQAAAACgOLTxAAAAAAAlxRzXwSWgAtcZGRkyYMAAWbt2rTRu3Fi++uorry86jBs3Ti+eSkkzNzIIAAAAAIKZiTZeRjptPAAAAAAA/FHABK6zs7Plpptu0hcy6tevL6tWrZKGDRuarhYAAAAAoARo4wEAAAAASssqDLkOJgERuM7NzZXbb79dPv/8c937Xl3QaNasmelqAQAAAABKgDYeAAAAAMAXSBUeXAIicP3BBx/IokWL9P3IyEgZOXKk23Vnz54tHTp0KMfaAQAAAAC8QRsPAAAAAAAEZOA6MzPTef/w4cN6cSc5ObmcagUAAAAAKAnaeAAAAAAAX7Ay4jqoWEu7A9UzXi2HDh2SsjJ8+HCx2+0eLT179iyzegAAAAAASo82HgAAAAAA8PmI63nz5kloaKi89dZbpd0VAAAAAAAAAAAAAHiEAdfBpdQjruvUqSOVKlUSC7OfAwDgE3//+9/1cbWo5Y033jBdTQAAAACAB2jjAQAAlNOI686dO8unn34qx48fl/r165d2dwAAIE/nsJYtW7p8Li4urtzrAwAAAAAoOdp4AAD4npWBtUGl1IHriRMn6sD1tGnTZM6cOb6pFQAAkP79+8vcuXNNVwMAAAAA4AO08QAA8D3i1sGl1KnCr7zySnn55Zfl3XfflVtuuUW2bNnim5oBAAAAAAAAAAAAACqEUo+4btasmb4NCwuTxYsX6yUqKkpq1qwpISEhLrdR87YcPHiwtEUDAAAAAAAAAAAAqKBKPUIXwRW4Pnz4cKHH0tLS9OKOClwDAICibd++XW6//XY5ceKExMTEyMUXXyy33XabtG3b1nTVAAAAAABeoo0HAABQxoHrd955p7S7AAAALmzbtk0vDp988ok8/fTTMnHiRHnppZfcZjYBAAAAAPgf2ngAAPgeg2WDS6kD18OGDfNNTQAAgFavXj156qmnpG/fvnpKDtUTf9++ffL666/LG2+8Ia+88oqeouOFF14wXVUAAAAAQDFo4wEAAJRT4BoAAPjW2LFjCz120UUXyb/+9S9p2rSpPPLII/Lyyy/LvffeK02aNDFSRwAAAACAZ2jjAQBQdqwMuA4qBK4BAAggDz74oLz66qvy66+/6rRyEyZMcLleQkKCJCYmerzfO4cNlxEjR/mwpgAAAAAAU228EcOGyphRI31YUwAA/BOZwoOLzwLXv/zyi8yaNUuWL18uR44ckYyMDMnJyXE+f+7cOd2LUOWaf+ihhyQ0lJg5AADeUnOeXXrppfLhhx/K/v373a6XlJQkW7Zs8Xi/V/ft56MaAgAAAABMt/H69+3joxoCAACUH59Ej1esWCG33HKL/PHHH2K3211Ohl69enX56KOPZPPmzdK2bVsZMGCAL4oGAKDCCQ8P17d5O4gVFBcXJ/Hx8R7vs25srE/qBgAAAAAw38aLpY0HAKggrKYrAP8KXB87dkwGDx4s58+f18HooUOHypgxY+T3338vtO7IkSNl06ZNsnTpUgLXAACU0M6dO/VtgwYN3K4zbtw4vXjq95Q0n9QNAAAAAGC+jZeZet4ndQMAAAiojggzZ87UQWs14lqNqB40aJCzl2BBffv21bc//PBDaYsFAKBCUp2/du3ape/36UPqNwAAAAAIZLTxAAAoHZUB2hcLgiRwrea0Vh/o9OnTi123adOmEhERIYcOHSptsQAABCV1wUL1ot++fXu+x202myxYsEBuv/12/fd1110nnTp1MlRLAAAAAIAnaOMBAACUY6rwo0ePSlRUlLRs2dKj9aOjoyU5Obm0xQIAEJSys7MlMTFRLzVq1JDGjRtLaGioHDhwQM6dO6fX6dGjh8yfP990VQEAAAAAxaCNBwBA2bIyWDqolDpwbbVaJTc316N1c3Jy5I8//pAqVaqUtlgAAIJSkyZNZMaMGfLdd9/JTz/9pC9mZGRk6Asc/fv3173xhwwZIiEhIaarCgAAAAAoBm08AADKFnHr4FLqwLXqJahOutTI60aNGhW57tq1a3UvQ09HZwMAUNFUq1ZN/va3v5muBgAAAADAB2jjAQAAlOMc171799a3b7zxRpHrqYC1OklT82Gr3oQAAAAAAAAAAAAAUJpU4b5YECSB68mTJ0t4eLjMnDlT3nrrLZfrbNmyRQe4v//+e4mJiZF77723tMUCAAAAAAAAAAAAAIKE1RepwufMmaPnuR47dqzUrVtXzp07p5/r2rWr1K9fXzp16iTr1q2T0NBQmTdvntSqVcsXdQcAAAAAAAAAAABQQalMz75YECSBa+WOO+6QL774Qpo3by6nT5+WrKwssdvtsmHDBklKStL3W7RoIcuWLZMBAwb4okgAAAAAAAAAAAAA8FurV6+W6667TmrXri1RUVHSpk0bmTp1qqSmppZ4n4sXL5Yrr7xSqlevLpUrV5b27dvLSy+9pKdtLsqpU6dk4sSJ0qxZM4mMjJTY2Fi59dZbZdu2beIvQn21o6uvvlr27t0ra9eulW+++UZ+/fVXPQpbvehu3brpNzAkJMRXxQEAAAAAAAAAAACowPx5furZs2frQLEa4NugQQNp2LCh7N69W2bMmKGDz+vXr5caNWp4tc+//vWvevpmRQ0oVoHrnTt3ykMPPSSffvqpfPnllxIREVFouwMHDkj37t3l5MmTepu2bdvKL7/8Ih988IF89NFHsnDhQr8YfOyzwLWihtJfccUVegEAAAAAAAAAAACAsuKvcevNmzfLpEmT9P2EhAQZM2aMjqOqgb8qQKyeV4+pALanPvzwQx20VoFpFXB2BJr37Nkj11xzjR5c/NhjjzkD2w4qcH7zzTfroHW/fv3k/fffl6pVq0pOTo489dRTMn36dJ1de9++fRIXFycBnSr88OHDvqkJAAAAAAAAAAAAAAQ4FQy22Wxy1113ydixY53zaNerV08WLFggVqtVlixZIjt27PB4n08++aS+feSRR/KNjlbpx+fMmaPv//Of/9TTOuf18ccf63TgKlj93nvv6VslNDRUB64vv/xySUlJ0enGTSt14FrNXd2/f389jFylBgcAAAAAAAAAAACAsma1WHyy+JIKAi9btkzfV0Hrglq2bCm9evXS91WKbk/s379ftm/f7nafan8qZpuZmSmffPJJvuccZahR12pe7IIc+1OjuAM+cK16C6h86TfddJPOza4mFD9y5IhvagcAAAAAAAAAAAAAAWLr1q06gKxSenfu3NnlOj169NC3GzZs8GifG/67XrNmzaR+/fpe7dPxtxpZXdR2as7r48ePS0AHrleuXKkj9GFhYXLixAl55pln9GTgKpc6o7ABAAAAAAAAAAAAlAU1WNoXiy+puaKVRo0a6fipKyqWquzdu9erfTb/73ae7jMrK8s57bO7bdXA5PDwcK/qU1ZCS7sDNfRcLWfOnJG5c+fKW2+9pScBV0Pgly9fLrGxsTJy5EgZPXq0NG7c2De1BgAAAAAAAAAAAAAfSEhIkMTERK+2USm2x40bV+jxs2fP6tsaNWq43dbx3Llz5zwq62wJ95mcnKyzZxe1rZp/u1q1anLq1CmP6+O3gWuHmjVryoMPPqiX9evX6w948eLFkpSUpEdhP/vss3L11VfrD/D666+XkJAQCQRh9hwj5WZbfPbReC0sN9NY2em1WxkrO9Tq4y41XsjIMVd2qNiNlJtrplgtxR5hrOzo0ie6KLGs7D8PTiaEGPx+wf8/o6q2DGNlJxv8PbDYzJxjxMdVFlOSc8z9+FeubO73N8zQsVZpEZVlrGxbuLn/tRybmfc8U1z3oi4P5n7NApDd3DlRblglI+X6evSAN1Jj21bI31+DhzyxGyzb0M+vhBu8xhRi8P3OyKWNB/dsIebOS15cddBY2dN7uk6dWi7+GxQwYUrvFsbKbhphps1hr3OjmPJLqrnP+sk+LY2VbfDSomTFtjFWtjUrzUi5gRFB8w8WH50Aq3jmli1bvN7GlYyMP693OkYxu6LSiCvp6ekelZVRwn06tvN1fcpKmURHu3fvrpfZs2fL/PnzZc6cOfLjjz/qubDV4hiFPWbMGD1MHgAAAAAAAAAAAABMdE6Oi4uT+Ph4r7dxJTIy0pmm2x01B7YSFRXlUVmRJdynYztf16eslOmwXjWs/P7779cjre+++25Zu3atftwxCvu5557T82Or0dikEQcAAAAAAAAAAABQ3lTGaFdpv0uievXq+dJ7u+J4zrFuWe2zatWqYrVadbpwd9va7Xb5/fffvapPWSmznIkqav9///d/csUVV0jbtm1l3bp1+nEVoJ48ebJ+LDc3V/7zn/9I+/btZfv27WVVFQAAAAAAAAAAAABBxmK3+WTxpVat/pwW9+jRo5Kdne1ynYMHD+Zb19N9HjhwwO06rvap0oM7Bg+72/bYsWPO0die1idgAte7du2SSZMmSb169WTYsGE6YK0m9b7mmmvk008/lZ9//llmzpwpO3bskFWrVslFF12kJwZ/5JFHfF0VAAAAAAAAAAAAACg3HTp00AFjlX5748aNLtdxDPjt0qWLR/u87LLL9O2hQ4fk+PHjXu3Tsa3jeXfbNWjQQC8BH7hWE3u/++670q1bN7n44ov13NZquHmdOnXkscce08FqFbS+9tprdRDboWfPnrJ8+XIJDQ11+8EBAAAAAAAAAAAAQCFqtLQvFh+KiYmRvn376vuJiYmFnt+/f78e3KsMHjzYo322atVKDwZ2t0+1PzWiWgXMBwwYkO85RxkLFy6Uc+fOFdrWsT81vbNppQ5c33fffXry8ZEjR8p3332n86CrgLRKAa6Gls+YMUMaNWrkdvu6detKbGysHnUNAAAAAAAAAAAAAIFs6tSpejDv/PnzdWBYxU+VpKQkGTJkiJ5zeuDAgdKuXbt82zVp0kQvixYtKrTPadOm6dvnn39eDxh22Lt3r4wePVrfv/fee6V27dr5tlPlqIHHKhZ7xx13OGOyakrnJ554QtauXSuVKlWSv/71r2JaaGl38Prrrzsn61apwe+++26v85937dpVTp48WdqqAAAAAAAAAAAAAKgo/hsQ9jedOnWSWbNmyQMPPCDjxo3TA31r1aolu3fv1inEW7duLW+++Wah7Y4cOaJvU1JSCj1300036emaX3nlFT2qunnz5hIdHS07d+7UQeju3bvLs88+W2g7q9WqR1v36NFDvvjiC6lfv760adNGD0A+deqUhIWFyf/93//paaADfsT1pZdeKu+8847Op64+gJJM2v3+++/L6tWrS1sVAACCzueffy6DBg3SJw0RERE6S4mamuPxxx+XnJwc09UDAAAAAHiBNh4AAMGfKtxBBZlXrFgh/fv3l9TUVB20bty4sZ5medOmTTqQ7a2XX35ZPvjgA7niiivkt99+k3379smFF16oR2GrdOGRkZEut1Px2x07duhM2mpE9o8//uhMI/7999/LjTfeKP7AYneMTYdLmannjZSbbSn1YPgSC7NlGSs72xpurOwQ6//mXy9vmTll86PoiVBDrzvX4C9PusH3Ozqs1P2FSizL4Jtu8vtVtXKUBCJ1wWLEiBG6p5vSsGFDfUHjzJkz8ssvv0hWVpacP39e96jzhfNp6WJKeI65spPtEcbKriZmXndmWGUxJSPH3O9QZYO/v1Yx97qtWanGyraFm/tfM/WvlmMz91lXi65krOxAk5Fm8HthCTFSrsXcqZjR70WYyddtsL1j8iKPqY87PMTch23yqlpWrq1CtvFiKtHG80R6RoaYMm3FQWNlT+9Z31jZlszCI+PKyyFbFWNlN40wcy3ZbjV3/fxYhplzOtNtW4OndVIjytx7bs1KM1JuRJUaRsoNRJl/nPXJfnjP/YO5X3cAAODWPffcoy9oqJQyCQkJ0qFDB+dzaWlpsnLlSt07HwAAAADg/2jjAQBQNixlNFoaZhC4BgDAz6jpM+bMmSNNmjSRr776SmJiYvI9X6lSJT2HCQAAAADA/9HGAwAA8IzP8kps375dxo4dq/OoV6lSRUJCQtwuoaHEywEAcGfmzJn69sEHHyx0QQMAAAAAEFho4wEAUDHnuIb3fBJB/sc//iEPPPCA5ObmClNmAwBQchkZGfLll1/q+71795bdu3dLYmKivlVp41Q6uVGjRknjxo1NVxUAAAAAUAzaeAAAAOUYuP7+++9l4sSJ+v69994r1157rVxzzTVSo0YN+eCDD+TEiRN6jpb33ntPj8R+7bXXJC4urrTFAgAQlFQGk+zsbH1/3bp1ct9990lWVpbz+c8++0xeeOEFeeedd2TIkCEGawoAAAAAKA5tPAAAyhijpYNKqVOFq0C0GmWtgtezZ8+Wfv366cfDw8OlV69ecvvtt8vbb78tGzZsEIvFIlOnTpX4+Hhf1B0AgKCTlJTkvD9+/Hjd+37jxo2SmZkp+/fvl1tuuUXfHzZsmGzdutVoXQEAAAAARaONBwBAGSNVeFApdeD6m2++0QFpx6hrh4Ipw9u3b68D2wcPHpQXX3yxtMUCABCUUlJSnPcrVaokX3zxhXTq1El3CGvRooUsWLBAH1NVj/2nn37aaF0BAAAAAEWjjQcAAFCOqcJPnjyp52PJOw+L1WrV87cUdOONN0pYWJgsWbJEnnrqqdIWDQBA0ImMjHTeHz58uFSvXj3f8+oYO3nyZN0bX82TZrPZ9GMFJSQk6HnTPDV0+HAZOWp0KWsPAAAAAPCHNt7wESNk9GjaeACACsDGaOlgUurAteopqEZc5xUTEyN//PGHTnOjgtoOKmit1j9y5EhpiwUAICjlvYhxwQUXuFzH8fj58+flzJkzUrt2bZfp6LZs2eJxuX3+O9UHAAAAACDw23j9+vcvUX0BAAACOnBdv3592bNnj+Tk5Eho6J+7a968uZ6T5YcffpDu3bs71/31118lOTlZB68BAEBhbdq0cd5XqeOK67GveuO7EhcXJ/Hx8R6XGxsb61U9AQAAAADFo40HAEDZsjA/dVApdeBa9QjctWuX/Pjjj9KhQwf9WM+ePXUPQJUO/JNPPtEnX1lZWTJhwgT9/EUXXVT6mgMAEIRUhzA1/YbKTvLzzz+7XOfgwYP6Vh1fa9as6XKdcePG6cVT59PSS1hjAAAAAIC/tfHSXUzjCAAA4O8KT5jipT59+ojdbpdPP/3U+dj48eN1ivCvvvpKGjRoIN26ddMnaR9++KFOK37fffeVtlgAAILWrbfeqm///e9/64wmBb399tv69oorrnBmOwEAAAAA+CfaeAAAlCE14toXC4IjcH3TTTfJtGnTpF69es7HmjZtKu+9956e6/rs2bPy3Xff6flZVND64YcfljvuuKO0xQIAELT++te/StWqVeXQoUO6s1fGf3vKq45ir732mu4spo6pjz76qOmqAgAAAACKQRsPAIAyZLf7ZoFfsNjVGVIZUUHrzz//XI4dO6ZPztTo7BYtWkggyUw9b6TcbIu53pVhtixjZWdbXc/1Ux5CrBZjZWfmmOvNE2rodecaPA6kG3y/o8NK3V+oxLIMvukmv19VK0dJIFq5cqUMGDBA0tPT9TG0VatW8ssvv0hSUpK+oPHCCy/oix++YjJVeHiOubKT7RHGyq4mZl53ZlhlMSUjx9zvUGWDv79WMfe6rVmpxsq2hZv7XzP1r5ZjM/dZV4uuZKzsQJORZvB7YQkxUq7F3KmY0e9FmMnXbbC9Y/KSm6mPOzzE3Idt8hpnVq6tQrbxYirRxvP3VOHTVvyZ+tyE6T3rGyvbkplirOxDtirGym4aYeZast1q7vr5sQwz53Sm27YGT+ukRpS599yalWak3IgqNYyUG4iyTx7yyX7C6jb1yX5QOmX6616jRg258847nX8nJydLfHy8PhnbvHlzWRYNAEBA6927t2zfvl2eeeYZfYFj27Zt+uKGutDxwAMP6BRyAAAAAIDAQBsPAIAyQprvoFKu3ZLUHC7qpEwFrgEAQNFatmwp77zzjulqAAAAAAB8gDYeAABA0czl0wAAAAAAAAAAAACAErIw4jqomJsQwUsLFy6UsWPHyiWXXCL16tWTiIgIiYmJ0anHp06dKmfOnDFdRQAAAACAF2jnAQAAAACAgBtx/fTTT+t5YNSFjLi4OLn44ovl1KlTsnXrVr0kJibKl19+Ke3atTNdVQAAAACAB2jnAQAAAABKhRHXQSVgRlyPHz9evv76azl//rwcOnRIfvjhBzly5Ijs2LFD/vKXv+iLG7fffrvpagIAAAAAPEQ7DwAAAAAABFzgesyYMXL55ZdLWFhYvscvuugieeutt/T93bt3y08//WSohgAAAAAAb9DOAwAAAACUesS1Lxb4hYBJFV6UCy64wHk/LS3NaF0AAAAAAKVHOw8AAAAAUCyCzkElYEZcF2X9+vX6Njo6Wlq3bm26OgAAAACAUqKdBwAAAABAxeL1iOuQkBDxBzabTU6cOCFffvmlPPLII/qx5557Tl/UAAAAAAAEHtp5AAAAAABvWBhxXbED13a7XUx65ZVXZPLkyfke69y5s7z77rvSr18/Y/UCAAAAAJQM7TwAAAAAAOB14HratGliUv369aVbt26Sk5MjR48e1b3xt23bJvPmzZPLLrtMqlWrZrR+AAAAAADv0M4DAAAAAJSIjRHXwcRiNz2EupR27Ngh9913n6xbt07at28vmzZtKjKdeUJCgiQmJnq8/xHDhsqYUSOlvGVbvO5T4DNhtixjZWdbw42VHWK1GCs7M8fcD2uoodeda/CXJ93g+x0dZjVWdpbBN93k96tq5ShjZQeS82npxsoOzzFXdrI9wljZ1cTM684MqyymZOSY+x2qbPD31yrmXrc1K9VY2bZwc/9rpv7VcmzmPutq0ZUkmHjTzvO6jTd8mIweNUpMsFnMTL1lMXcqZvR7EWbydRts75i8yGPq4w4PMfdhm7yqlpVrq5BtvJhKtPE8kZ6RYazsaSsOGit7es/6xsq2ZKYYK/uQrYqxsptGmLmWbLeau35+LCOkQrZtDZ7WSY0oc++5NSvNSLkRVWoYKTcQ5R7Z7pP9hDRu55P9oHTM/br7yMUXXyxLly6VZs2a6R7577//vtxxxx1u109KSpItW7Z4vP/+ffv4qKYAAPgvk8Hj7FBzF55CDHboSLeaCeplGbySHhVqqZAXdqPs5joFpoeaCx6nZ5l7z8MMXUyPyflDzAmuwLU37Tyv23j9+vqwpgAA+Kfwwz8YK3v6lRcbKzvL4KCYlNAwY2XXNdjWSp3/jJFyw4aZywybnmOujVe7krmQTnJmrrGyLQZ7ip3INvO70thIqQEqsMfnItgC10pMTIxcccUVsnjxYtm8eXORgeu4uDiJj4/3eN+xsbE+qiUAAAAAwNftPNp4AAAAAFCB2UkVHkyCInCtqLnQ8t66M27cOL14KjP1fKnrBgAAAAAom3aet228jDRzKfQBAAAAAECQB67Pnj0ra9as0fc7dOhgujoAAAAAgFKinQcAAAAAKI6FEddBxSoB4Ouvv5YZM2bI4cOHCz2n5jLr27evJCcnS/369eXmm282UkcAAAAAgOdo5wEAAAAAgIAbcX3u3DmZOnWqXtR8ZOrCRUhIiBw7dkySkpL0Ouqxzz77TKKjo01XFwAAAABQDNp5AAAAAIBSY8R1UAmIwHXXrl1l1qxZOk3crl27ZN++fZKRkSHVq1eXK6+8Uq6//noZPXq0xMTEmK4qAAAAAMADtPMAAAAAAEDABa7r1KkjkydP1gsAAAAAIPDRzgMAAAAAlBojroNKQMxxDQBARaHm+bRYLB4tI0aMMF1dAAAAAEARaOMBAFDGbLm+WeAXAmLENQAAFUVkZKR069bN7fMqhermzZudKVYBAAAAAP6LNh4AAIDnCFwDAOBHYmNjZf369W6ff/fdd2X48OESFRUlt956a7nWDQAAAADgHdp4AACULbuNVOHBhFThAAAEkLlz5+rbQYMGSZUqVUxXBwAAAABQCrTxAACAJ7Zu3ao7ualOcSqjS7NmzWTixIly+vRpr/dlt9vl22+/lUcffVS6d+8uNWvWlLCwMKldu7b06dNH/v3vf+t13Clu+hNVx5JixDUAAAE0N9rXX3+t76se+QAAAACAwEUbDwAAH6gA81MvWbJEbrvtNsnOzpY6depI27ZtZe/evfLaa6/JwoULdXYXFcj21KpVq6R3797Ov9W2TZs2lUOHDsmKFSv0smDBAlm8eLFERES43c8ll1zi8nkVCC8pRlwDABAgVAo51dOtUaNG0qtXL9PVAQAAAACUAm08AABQnOPHj8tdd92lg9ZTp07Vf2/evFnf9uvXT5KSkvRI7KJGSBek1lWB6ldffVVOnjwpBw8elE2bNsmZM2dk3rx5Ohi9dOlSeeKJJ4rcjyNoXnD5+OOPS/x6CVwDABAA1MmEuqihDB06VKxWDuEAAAAAEKho4wEA4MMR175Y/NSLL74oaWlpcvnll8tTTz0loaF/JtOuWrWqvPfee/pWBZ0/++wzj/fZuXNnPWJ7woQJegR3XipI7ghYz5kzR2zlPIc4Z0QAAAQAlT5OpWpRSCEHAAAAAIGNNh4AAL5hz831yeKvFi1apG/Hjh1b6Lnq1avLzTffrO9/8MEHHu+zSpUqek5rd/r3769vz549W6I5tEuDOa4BAAgAc+fO1bc9evSQ5s2bm64OAAAAAKAUaOMBAIDiHDt2TKcEV9SIa1fUuYQaGb1hwwaflZuenu68HxUV5Xa96dOny6+//io5OTlSv359PfWJSlte1LzYxSFwDQCAn0tJSXH2rKMnPgAAAAAENtp4AAD4UDmnsi5P+/bt07fh4eHSoEEDl+s4OsD9/PPPeh7sokZSe2rBggX6tl27dnp0tjtvv/12vr/VNCjTpk2TxYsXS3x8fInKJnANAICfUxc0UlNTpVKlSs7UL8VJSEiQxMREj8sYcdcdMmYkF0wAAAAAIBjaeKMHXCljb72hFLUEAKBi8fZY60jfPW7cuDKr09mzZ50pwS0Wi8t1atSooW/VXNR//PGH1KxZs1Rlbt68Wd544w19/9FHH3W5zg033KDnwlaBbRVQV53yVq5cKX/72990AL1Pnz6ydetWadiwodflE7gGACBAUsgNHjxYYmJiPNomKSlJtmzZ4nEZ/fv0LnH9AAAAAAD+1cZL6nJxiesHAEBAsflmfmpvj7WObcpSRkaGc8S1O3nTcudN8V0SJ0+elEGDBunU3zfeeKPcdtttLtf76KOP8v0dGRmp1+3du7d07NhRjh49Kk8++aROYe4tAtcAAPixQ4cOydq1a71OIRcXF+dVOpbYunVLVD8AAAAAgP+18eJql260FQAAFY23x1rHNu5MmjRJXn31Va/rccUVV8iaNWucAWElKyvL7fqZmZkezUddnOTkZOnfv78OOqvgs6OjnTdq1aolU6ZMkXvuuUc+/PBDefPNN92OFHeHwDUAAH5MzQtit9ulSZMm0rNnT4+3UylqvElTk/nHn2lnAAAAAACB38bL3bOuhDUEACCw2H004trbY21xoqOjS5S2u2rVqs77KkW4cu7cOX3+4CoI7EgnbrVai5yPuigq1Xe/fv10eu+2bdvK8uXLS7yvrl27OuulFm/fAwLXAAD4KXUyMm/ePH1/2LBhXvdOAwAAAAD4D9p4AACUAZtN/NGMGTP0UhqtWrVyjrg+duyYNGrUqNA6Bw8e1LdNmzaVsLAwr8tIS0uTa6+9VjZs2CAtW7bUc1WXZp7svGnNVcpxb1lLXDIAAChTX3/9tU4jpy5mqIsaAAAAAIDARRsPAAB4o1GjRlKvXj19f90619lUHI936dKlRHNoDxgwQE9j0rhxY/nqq68kNja2VHXeuXOnM815SQLgBK4BAPBTjnlELr/8ct1jDgAAAAAQuGjjAQBQNqnCfbH4q5tuuknfJiYmFnpOpRBfuHChvn/zzTd7td/s7Gy9bxWsrl+/vqxatUoaNmxYqrqqEdYzZ87U93v16iWhod4n/iZwDQCAH1/UUKnk1qxZY7oqAAAAAIBSoo0HAAC89dBDD0lUVJQeFf3EE09Ibu6fQfbk5GS5/fbb9W2HDh3k+uuvL7Rt9+7dpUmTJvLKK6/ke1ztQ237+eef6xHWKmjdrFkzj+rz6KOPyrvvvivnz5/P97hKZT548GCdclwFrFVdS4I5rgEAAAAAAAAAAAAEHj8eLe0LDRs2lHnz5smQIUNk+vTpkpCQoB/bs2ePpKamSt26deWDDz7QU5EU9Msvv8iRI0fk999/z/e4Wn/RokXOlN4jR450W/7s2bN1YNxBlfv888/LqFGjdLC7Ro0aOni+d+9e3UFP7W/OnDly6aWXluj1ErgGAAAAAAAAAAAAAD80ePBgHSR+9tln9cjrH3/8Uc99PWLECJk6darUqVPHq/1lZmY67x8+fFgv7qigdF733HOPHqW9adMmOX78uN42IiJC2rZtK71795b77rtPmjdvLiVF4BoAAAAAAAAAAABA4LHZpCKIj493zmftKXcB6eHDh+ulJPr27auXskLgGgAAAAAAAAAAAEDAsf93zmcEB6vpCgAAAAAAAAAAAAAAKjZGXAMAAAAAAAAAAAAIPDZGXAcTRlwDAAAAAAAAAAAAAIxixDUAAAAAAAAAAACAwMOI66DCiGsAAAAAAAAAAAAAgFGMuAYAAAAAAAAAAAAQcOw2m+kqwIcIXAMAAAAAAAAAAAAIPKQKDyoErouRnGMmm3pMhEVMSc4292+RkWXuB6ZymLnM+eEh5j7v9By7kXINvt1Sw55qrOxMiTFWtplP+k+R9iyDpUcZLDtwnMwJN1Z2XPoJY2WHVa5hrOxsi5n3vEqIuWOtzRpmrOyI1NPGyj4ZYu7/LDzEXK/jKuEhxsoOyU4zUm5uVDUj5cI7GTZz594Wi5kzMrvd3JmgzeRJqMF2Vq7BF55tcMCJqbatNTtDTMkOiZCKKCvX5Jcbnvg+8kJjZcdmmLvo0yDM3DWfyMzzxsq2WasaK/vY4L8ZKbfZL1vElLbGShbJDW1orOycyFrGyl595A9jZV8Va+qcMtpQuYBZBK4BAAAAAAAAAAAABB5GXAcVg2MeAQAAAAAAAAAAAABgxDUAAAAAAAAAAACAAGS3GZwrBz7HiGsAAAAAAAAAAAAAgFGMuAYAAAAAAAAAAAAQeJjjOqgQuAYAAAAAAAAAAAAQeAhcBxVShQMAAAAAAAAAAAAAjGLENQAAAAAAAAAAAICAY89lxHUwYcQ1AAB+6MyZM/LYY4/JxRdfLNHR0RIeHi4NGjSQW265RdavX2+6egAAAAAAL9DGAwAAKB6BawAA/Mz+/fvloosukmeffVZ27doldevWlbZt28off/whCxculMsvv1xefvll09UEAAAAAHiANh4AAGXIZvPNAr9A4BoAAD9z9913S1JSkrRs2VJ+/PFHOXjwoGzdulVOnTolDz74oNjtdnn44Yf1xQ8AAAAAgH+jjQcAAOAZAtcAAPiR8+fPy+rVq/X9F198US688ELnc5GRkfqxFi1aSE5OjixfvtxgTQEAAAAAxaGNBwBAGbPl+maBXyBwDQCAH8nMzNS97ZXmzZsXet5isTgfz87OLvf6AQAAAAA8RxsPAICyZbfl+mSBfyBwDQCAH6lVq5Y0aNBA3//2228LPZ+amirbtm3T9zt37lzu9QMAAAAAeI42HgAAgOcIXAMA4Geee+453ev+oYcekjlz5siJEyckLS1NNm7cKAMGDJCTJ0/KnXfeKd26dTNdVQAAAABAMWjjAQBQduw2m08W+IdQ0xUAAAD53XHHHVK1alWZMWOGjBkzJt9zcXFx8q9//UvGjRtnrH4AAAAAAM/RxgMAAPAMI64BAPBDBw4ckFOnTonVapUmTZrIxRdfLJUqVZKkpCSZO3eu7Nq1y3QVAQAAAAAeoo0HAEDZsOfafLLAPzDiGgAAPzN+/Hh5/fXXpVOnTrJs2TJp1aqVfjw9PV2mTZsmL774ok4ht2PHDmncuLHLfSQkJEhiYqLHZQ6+Y5jcMXykz14DAAAAAMBcG++qm26XgbcP99lrAAAAKA8ErgEA8CPqQoVKExcWFiYLFy7Md9EiKipKXnjhBdmyZYt89dVX8uyzz8obb7zhcj+q175az1OX9+7rk/oDAAAAAMy38dr3uMon9QcAwN8xWjq4ELgGAMCPrF+/Xux2u7Rs2dJtT/s+ffroixqbNm1yux81T1p8fLzH5dapG1ui+gIAAAAA/K+NV7NO3RLVFwCAQGO3EbgOJgSuAQDwI+fPn/d43YyMDLfPjRs3Ti+eOno2xeN1AQAAAAD+3cb79vAZj9cFAADwF1bTFQAAAP/jmOts//79cuTIEZfrfPnll/q2devW5Vo3AAAAAIB3aOMBAFD2qcJ9scA/ELgGAMCPqBRxderUkezsbLn55ptl3759zufS09Pl4Ycf1inklKFDhxqsKQAAAACgOLTxAAAAPEeqcAAA/EjlypXl3//+twwcOFB++OEHueCCC/Q8aDExMXLgwAFJS0vT640fP15uuOEG09UFAAAAABSBNh4AAGWL0dLBhRHXAAD4md69e8uOHTvkvvvu02nlTpw4IT/99JNUrVpVX8j47LPP5B//+IfpagIAAAAAPEAbDwAAwDOMuAYAwA81a9ZMZs+ebboaAAAAAAAfoI0HAEDZsOXmmq4CfCigR1x//vnnYrFY9NKkSRPT1QEAAAAAlAJtPAAAAACAN+w2m08W+IeADVynpKTIPffcY7oaAAAAAAAfoI0HAAAAAEDFFrCB68cee0yOHj2q54EBAAAAAAQ22ngAAAAAAG/Zc20+WeAfAjJwvWHDBvnnP/+pL2gMHDjQdHUAAAAAAKVAGw8AAAAAAPe2bt0qt956q8TGxkpkZKQ0a9ZMJk6cKKdPn5aS+Pvf/+6cqsvd8sYbb7jdPjs7W1588UVp166dVK5cWapXry5XXnmlLFmyREojVAKMeiPGjBkjlSpVkn/84x+ycuVK01UCAAAAAJQQbTwAAAAAQElVhNHSS5Yskdtuu023n+vUqSNt27aVvXv3ymuvvSYLFy6U9evX60B2Saj9tWzZ0uVzcXFxLh/PyMiQq6++WpcbEhKi65Oamipr1qzRyyOPPCLPPfdcxQhcP/vss7Jz5055+eWXpUGDBqarAwAAAAAoBdp4AAAAAAC4dvz4cbnrrrt00Hrq1KnyxBNPSGhoqCQnJ+tg9rJly/RI7I0bN+pR0t7q37+/zJ0716ttVGBaBa2bNm0qX3zxhbRu3Vo//sknn8gtt9wizz//vHTr1k2uv/764E4V/tNPP8kzzzwj8fHxcv/995uuDgAAAACgFGjjAQAAAABKw26z+WTxVy+++KKkpaXJ5ZdfLk899ZQOWitVq1aV9957T99u2rRJPvvss3Kpz8mTJ50pxN966y1n0FoZMGCAPPzww85U5CURMIFru92u08epHgUJCQl66DkAAAAAIDDRxgMAAAAAlJYt1+aTxV8tWrRI344dO7bQc2pe6Ztvvlnf/+CDD8qlPmpUdVZWlk4vrua0LmjcuHH6dsuWLXLw4MHgDVz/61//km+++Ubuu+8+ueSSS0xXBwAAAABQCrTxAAAAAABw79ixYzpVuKJGXLvSo0cPfbthw4YSlbF9+3a5/fbbpVevXnLDDTfodOS7du1yu76jHEe5BdWvX1+nEC9pnQJijmv1oUyZMkW/2BkzZpiuDgAAAACgFGjjAQAAAAB8we7Ho6VLa9++ffo2PDxcGjRo4HKd5s2b69uff/5ZZzQLCwvzqoxt27bpJe+I6qefflomTpwoL730UqHsaI46Ocp1V6dDhw7J3r17JSgD12qusz/++EPeeecdiYmJKdW+VAq6xMREj9cfctcwGTpiVKnKBAAAAAD4RxvvzmHDZcRI2ngAAAAAgJK3LR3pux2pscvC2bNnnSnBLRaLy3Vq1Kihb202m25n16xZ06N916tXT8+Z3bdvX2nWrJlum6ug9Ouvv67nsH7llVd0EPyFF15wWSdHuUXV6dy5cxKUgWuVB12599579ZJXenq6c7h8bGysvr9kyRLp2rWry30lJSU59+eJXlf3LUXNAQAIDFUjzM0raouoZazsbIu5U6ENx1OMlNu9fiUxJSTjD2Nl78mpaqzsk7//eb5qwgW1zH3eoRm/GyvbFlG6QGhJpWWb6+UdFSkBxWQb7+q+/UpRcwAAAkPHWubaOulirn2ZbDN3/htdNdpY2WG/eT+HqK9UiWlipNzMRh3FlOPns42VXSPS3PerUpi5mWf7hh4yVna2tZWxslG+I669bVs6tilLGRkZzhHX7kRERBRqT3vC1ZzZF110kZ7WS6X6fuSRR+Tll1/WbfYmTZqUqE7e1CegAtcOJ0+edPuc6kngeF5NCu5OXFycxMfHe1xm3f9eKAEAAAAA+BZtPAAAAACAP/C2benYxp1JkybJq6++6nU9rrjiClmzZo2+HxkZWWybODMz03k/KipKfOHBBx/Udf/111916vAJEyY4n/OmTiWpT0AErg8fPuz2ublz58qIESOkcePGRa7noIbsezNs/1RyqsfrAgAAAAD8u433e0qax+sCAAAAAPyb3eabEdfeti2LEx0d7XHa7ryqVv1f1kCVItyRcttut7tMF+5I3W21WqVKlSriC2pe60svvVQ+/PBD2b9/f77nHHVylFtcivOgDFwDAAAAAAAAAAAAQFmkCve1GTNm6KU0WrVq5RzdrKbTatSoUaF1Dh78c7oIld5bzUntK45U4Dk5OYXq9M0338iBAwfcbuuok6P+3jA3KQEAAAAAAAAAAAAAoBAVqK5Xr56+v27dusIr5Hm8S5cu4ks7d+7Utw0aNMj3+GWXXaZv169f73K748ePy6FDh/Kt6w0C1wAAAAAAAAAAAAACcsS1LxZ/ddNNN+nbxMTEQs+pFOILFy7U92+++Waflbl06VLZtWuXvt+nT598z91www16ZLdKIb569epC2yYkJOjbDh06SIsWLSpe4Hr48OE6r7snc58BAAAAAPwbbTwAAAAAAP700EMPSVRUlKxdu1aeeOIJyc3N1Y8nJyfL7bffrm9VkPj666+Xgrp37y5NmjSRV155Jd/jKiit5vPevn17vsdtNpssWLBA71e57rrrpFOnTvnWqVu3rnMu8FGjRsnevXudz3366afywgsv6PvTpk2TkmCOawAAAAAAAAAAAAABRwVbg1nDhg1l3rx5MmTIEJk+fboe0awe27Nnj6SmpupA8gcffCAWi6XQtr/88oscOXJEfv/993yPZ2dn6xHcaqlRo4Y0btxYQkND9bzVahS30qNHD5k/f77LOqng9ObNm+W7776Ttm3byl/+8hdJSUlxzm394IMP6pHZJRHwI64BAAhG6qTjueeek/j4eImJiZHo6Ghp3769PinIysoyXT0AAAAAgBdo4wEAgJIaPHiwfP/99/pW+fHHH6V27dpy3333yY4dO7xOya1GYc+YMUOuvfZaqVatmg5Yb9u2TcLDw6V///46YK3SgKvnXFEjwNesWaPPbS688ELZt2+f/Pbbb3LFFVfIokWL5KWXXirxa7XYVQ42uHUqOdVIuTERIWLK+cw/0wyYkJFr7t+xcpi5fhzhIYV7wpSXjBwz77nBt1sq5aQYKzszPMZY2VkGv1/RlmxjZUdEV5VAc+rUKbnqqqtk586dYrVa9cE/JCRE/61SwVxyySWyatUqfbHDV5JT0322L29FSo6xsrMt5pLPbDhu5reoe/1KYoo1y8x5lbI3PcJY2SdTzF2IvKCWuc+7tpw3VrYtwszxNjXX3DldzRhzn3Wg+T0lzVjZrnrAlweTzX6bwSsOYQbbWbkGX3i2wQEnptq2kbZMMSU7xNw5Ro7B/zODzcuAPOaZaONlpiSLKekSViF/A6PDzV3sCvvtzxFuJpyNaVLhrp8fP2/uOleNSHOvO8rgBd2IX7YZKzu7Tisj5UZUqWGk3EB06K93+WQ/TV9yPboY5YsR1wAA+JmhQ4fqCxht2rTRc4SoHnSqx5tKtdKuXTvZtGmTjB8/3nQ1AQAAAAAeoI0HAEDZsefm+mSBfyBwDQCAH1EXMJYvX67vv/XWW/nSvKi5Rt59913dQ////u//9DwmAAAAAAD/RRsPAADAcwSuAQDwI+vXr9e39evXl65duxZ6XvXGV730VcrP//znPwZqCAAAAADwFG08AADKlt1m88kC/0DgGgAAP3L27FnnRQ13GjRooG+//fbbcqsXAAAAAMB7tPEAAAA8F+rFugAAoIxVq1ZN3x4/ftztOr/88ou+JY0cAAAAAPg32ngAAJQtey6jpYMJI64BAPAjnTp1cl7U2LBhg8v50fbu3avvnzt3rtzrBwAAAADwHG08AAAAzxG4BgDAj3Tu3Nl5YWP48OGyc+dO53P79++XO+64Q3Jzc/XfaWlpxuoJAAAAACgebTwAAMp+xLUvFvgHAtcAAPiZf//731KvXj3d6/7iiy+WFi1aSOvWraVNmzb6wsadd96p14uJiTFdVQAAAABAMWjjAQBQdmy5Np8s8A/McQ0AgJ9p2bKlbN26VZ5//nn55JNP5NixY1K5cmW58cYb5cknn5TPPvtMrxcbG+t2HwkJCZKYmOhxmXcNGy4jRo32Sf0BAAAAAGbbeCOG3SVjRo7wSf0BAADKC4FrAAD8UJ06dWTmzJl6KUhd7FAc6eZcSUpKki1btnhc3tV9+5WwpgAAAAAAf2vj9e97dQlrCgBAYLHbGC0dTAhcAwAQQLKzs2XZsmX6/g033OB2vbi4OImPj/d4v0X17AcAAAAAlA3aeAAAAP9D4BoAgACieuefPn1amjVrJgMGDHC73rhx4/TiqeTUdB/VEAAAAABguo2XmZLsoxoCAODf7MxPHVSspisAAADyW79+vSxfvlxyc3Odj6Wnp8uzzz4rf/vb3yQkJETmzJkjYWFhRusJAAAAACgebTwAAADPMOIaAAA/s2nTJpk8ebJUqlRJmjZtKuHh4bJ3715JS0vTj82dO1euvPJK09UEAAAAAHiANh4AAGXHnms3XQX4EIFrAAD8TM+ePWXEiBHy7bffytGjRyUnJ0caNmwo/fv31xc7GjdubLqKAAAAAAAP0cYDAKDs2EgVHlQIXAMA4Gfat28vb7/9tulqAAAAAAB8gDYeAACAZwhcAwAAAAAAAAAAAAg4dhupwoOJ1XQFAAAAAAAAAAAAAAAVGyOuAQAAAAAAAAAAAAQcWy4jroMJI64BAAAAAAAAAAAAAEYx4hoAAAAAAAAAAABAwLHn2kxXAT5E4BoAAAAAAAAAAABAwLGTKjyokCocAAAAAAAAAAAAAGAUI64BAAAAAAAAAAAABBwbI66DCiOuAQAAAAAAAAAAAABGMeIaAAAAAAAAAAAAQMCx59pMVwE+xIhrAAAAAAAAAAAAAIBRjLguRpUwM+WeSssxU7CIVI0IMVa2xWJuLoLwEIuxss2VLGI1VLjFYu5Vp4REGyvb0E+Klm0z9/3KCgs3VnaEsZIDS4TBrmzW5FPGyt6ZWd1Y2ZefWm2k3J+r9BNTwkOijJXdsqq5f/LKYebK/um3NGNlV61f1VjZYXYz59IpWebOb2oaKznwmDwPzTF0PmawqWOsvaFkGZzrzmQbz+Bhx9j/+DmbuZZWNYNX1sxdOUIgOJFp7vpevewkY2VnV61vrOztJ82de8dFNzZWdm62mZGOJ1LN/QpeEHrOWNmWlGxjZR8PjzNWdlyNRsbKDjl/0kzBVWqYKTcA2Qxe94bvEbgGAAAAAAAAAAAAEHDsBjuswvdIFQ4AAAAAAAAAAAAAMIoR1wAAAAAAAAAAAAACji3XzJQFKBuMuAYAAAAAAAAAAAAAGMWIawAAAAAAAAAAAAABhzmugwsjrgEAAAAAAAAAAAAARjHiGgAAAAAAAAAAAEDAYcR1cGHENQAAZeDEiRMyf/58mTBhgnTp0kWioqLEYrFIz549i902OztbXnzxRWnXrp1UrlxZqlevLldeeaUsWbKkXOoOAAAAAMiPNh4AAP7JlmvzyQL/wIhrAADKwPvvvy+TJ0/2eruMjAy5+uqrZf369RISEiJt27aV1NRUWbNmjV4eeeQRee6558qkzgAAAAAA12jjAQAAlD1GXAMAUAaqVKkivXv3lilTpuhe9FOnTvVoO3XRQl3QaNq0qezatUu2b98uBw4ckI8//lgiIiLk+eefl08//bTM6w8AAAAA+B/aeAAA+Ce7ze6TBf6BwDUAAGVg5MiRsmLFCnnmmWfkxhtvlDp16hS7zcmTJ+WNN97Q99966y1p3bq187kBAwbIww8/rO///e9/L8OaAwAAAAAKoo0HAABM2rp1q9x6660SGxsrkZGR0qxZM5k4caKcPn3a630dPnxYT3niyTJixIhC2zdp0qTY7VTWmZIgVTgAAH7ik08+kaysLGnZsqWe76ygcePGyfTp/9/enUBHUaUNH38SQkJCElaBQAhhlW1AAiiISABl+UB4RVCWUcAFnBHFkUHUGYHDMgLKIOqcIfl0QP1GkPDiiMwMMzCyCYhDgiA7hEUMASEsgSyQpb5zr6dbQjohnVR1dbr/v3OKarqq+qnuVPq5N/fWvbMlJSVFUlNTpXnz5racJwAAAADg9qjjAQBgvcIC379bevXq1TJy5EjJy8vTnefU1COHDx+Wd955R5KSkvToLqohu6xUw3ePHj1K3K4anZOTk/Xje++9t8T92rdvLzVq1HC5LTCwfPdO03ANAICX+Prrr/W6Z8+eLrc3atRIDy934sQJvS9/1AAAAAAA70UdDwAAVFRaWpo8/vjjutFaTVUyffp0CQoKkitXrujG7HXr1uk7sb/55ht9p3NZqLu2VWN3ST788EMZN26chIaG6tcuybvvvivx8fFiJoYKBwDASxw5ckSvS/tjhWOb6lEHAAAAAPBe1PEAALCeUVBoyuKt3nzzTcnOzpb7779fZs2apRutFXWn8yeffKLXu3btkrVr15oWc9myZXo9bNgwiYyMFE+i4RoAAC9x8eJFva5du3aJ+zi2Xbp0yWPnBQAAAABwH3U8AACsZxQYpizeatWqVXo9YcKEYttq1aolI0aM0I9XrlxpSjw1//XmzZv1Y3XXtafRcA0AgJdQc4cowcHBJe4TEhKi1zk5OR47LwAAAACA+6jjAQCAijh9+rQeKlxRd1y74piSxDFFSUWpYcINw5CYmBjp06dPqfsuWbJEBg8eLH379pUxY8bo/1+9erVC8ZnjGgAAL1GtWjW9vnHjRon7XL9+Xa/V/CIAAAAAAO9FHQ8AAOsVevHd0mZNOxIcHCzR0dGlTjty/PhxPQ921apVyx1PNVirhmvliSeekMDA0u9//vTTT4v8Xw1drubhVusHH3ywXOdAwzUAAF5CDe1y83Byrji2OfYtSUJCgiQmJpY59vhx4+Tpp58u8/4AAAAAAO+t4z0yeqyMGfdkmfcHAMDfuZtrHcN3T5w40bJzunhTOSEgIKDUaUcKCwslMzNT6tSpU+54aojwEydO3HaY8Pj4eH2XddeuXfWd2aqT3ldffSXTp0+X3bt3y5AhQ2Tbtm0SFxfn9jnQcA0AgJdo1aqVTujHjh0rcZ/U1FTnvqVJT0+XlJSUMsceOGCAG2cKAAAAAPDmOl7Pvv3dOFMAACovo7DQlNdxN9c6jvGWaUfMmHpk2bJlzuHHHXdyl7afQ1hYmHPI8Pvuu09/ji+//LJs2LDB7XOg4RoAAC/RrVs3Wbp0qe6d5oqaz8TR403tW5qoqCi3erQ1aNDAzbMFAAAAAHhrHa9efep4AAC4w91c6zimJC+++KIsXrzY7fPo1auXbNq0ye1pRyo69ci1a9dk1apVt73bujQq/ty5c2XgwIGyceNGuXTp0m1HlbkVDdcAAHiJoUOHyqRJk+To0aM6sffu3bvYcDVKp06dpEWLFqW+lhqixp1hanIr2BsPAAAAAOA9dbxTGdfKedYAAPjnHNfu5trbCQ8PL9ew3TVq1HA+djT6qgZgNf+0q+HCHcOJq/moIyMjy32+qtE6KytL3z09YsSIcr/Ovffe6xy6XM273blzZ7eOL31WbQAA4DH169d3Fo6eeuopOXz4sHPbF198IQsWLNCPZ8yYYds5AgAAAADKhjoeAADWMwoMUxazzZkzRy5cuOD28vnnnztfwzGViLrj+vTp06VOO9K0aVOpWrVquc/XMfz38OHDJSIiotyvc/Ow5vn5+W4fzx3XAABYQBUkVK/5W+cjUfOb1a1b1/m8mutDLQ7qDxfJycmyY8cOadeunbRv314P0+IogEyZMkX32gcAAAAAeA51PAAA4GkxMTHSsGFDOXPmjGzdulXGjBlTbB/1vNK9e/dyx1FTl2zZsqVCw4Q77Nu3z/k4Ojra7eO54xoAAAsUFBRIRkaGc1HDrDh6md38fHZ2drF5QNQcJvPmzZO2bdvKkSNHdE87NbeJGq7lrbfesukdAQAAAID/oo4HAIB3MgoKTVm81SOPPKLXiYmJxbapIcSTkpL044oM7/3hhx/qochjY2MlPj6+AmcrMn/+fL1W5Z5GjRq5fTx3XAMAYAGV5FWyL+9wKtOmTdMLAAAAAMB+1PEAAIAdpk6dKu+//76+I3r69Ol6ipEqVarIlStXZPTo0XqtRoV56KGHih173333yQ8//CAvvviiXlxR5ZuPPvpIPx47dqzLebRvpjrdhYSE6Ng3z+GtOvC99tprumOeMmvWrHK9XxquAQAAAAAAAAAAAFQ6hRbMT+1NGjdurBuWR40aJbNnz5aEhAT93KFDh/QIMPXr15eVK1e6bHBWjdanTp2Sy5cvl/j6mzdv1kOFq+NVw/XtqNdcvHixTJ48WXfsu+OOOyQnJ0cOHjyoR6IJDAyUN954w3mnuM8OFT5z5kz9oZW2LFmyxO7TBAAAAACUEfU8AAAAAABKN3z4cNm5c6deK999951uMJ40aZLs3btXWrRoUe7XXrZsmV7ff//90rRp09vuP3LkSN1ofc8998j169dlz549kpqaKs2aNZNnnnlGkpOT5eWXXy73+VS6O67r1asnLVu2dLktKirK4+cDAAAAAKgY6nkAAAAAgPIwfPyOa4e4uDjnfNZldfLkyTI1XDsar8uiW7duerFKpWu4HjhwoFsfIAAAAADAu1HPAwAAAAAAla7hGgAAAAAAAAAAAAAKDf+449pf0HANAAAAAAAAAAAAoNIpoOHap1S6hms1yffo0aPl7NmzEhERIR06dNATgbdr187uUwMAAAAAlAP1PAAAAAAAUOkarr/99lu9OKxZs0bmzp0rkydPlrfeekuqVKli6/kBAAAAANxDPQ8AAAAAUB4F3HDtUwKlkmjYsKHMmjVLdu7cKefPn5fc3FzZu3evPPvss2IYhrz99tvy6quv2n2aAAAAAIAyop4HAAAAAAAcAgz114BKbsGCBTJt2jQJCgqSo0ePSmxsrGmvnZudJXb4Mde+H0uNEPvuZrhhY9eYakEBtsW2L7LIdZs+86BA+951QaF911nVKva976y8QttiV69qXz+piLBQ22JXJrk5ObbFrnLljG2xk6/Xsi123A8bbIl7vMUAsUuwjd+BDcPs+x46k23f9+/xS7m2xe7WKNy22FWNfFvins217xpvUse+z7uy1fOuZNmX8/JtKofa+PXrt3de2PmR21jVsu0zz7Oxjlcz2L4yRq6NF7mdv191IsLsC16JnMq4ZlvshnnnbIudV6ORbbH3/phtW+yo8GC/m1v22g376lltgi7ZFjugIM+22GnBUbbFjgrItC12YM4VW+IGNWpjS9zK6NN6bU15ncd+PGDK68BP7rguzZQpU3RP/fz8fD2kHAAAAACgcqOeBwAAAACAf6l0c1y7ouY7u+eee+Szzz7TPfFLk5CQIImJiWV+7fHjxsrTTz1lwlkCAAAAAMyu57lbx3t87DgZ/9TTJp0lAAAAAMBOzHHtW3yi4VoJDv5pOBTVG7806enpkpKSUubXHTigf4XPDQAAb/fJ/vO2xX6olX1DTf2/bcdsi93lnna2xG3+/Saxy/sFHWyLPbF2mm2xzz/3e9ti/58LTW2LveT/zrQt9pDti22Ju6nHi2KXsT44VHhZ63nu1vEe7G/flAkAAHhKmI1Tdl2ual8dLzDfvuGj7dToRrptsfNrNLQlbmH1qmKX1MzatsXu+9IK22KnfjDGttjZhTVtix1x9og9gRkq3OunLIA1fKbhet++fXodHR1d6n5RUVESFxdX5tdt0KBBhc8NAAAAAGBNPY86HgAAAAAAvsEnGq7//ve/y/79+/Xjfv36lbrvxIkT9VJWudlZFT4/AAAAAIA19Tx363hXsnJMOT8AAAAAgP0YKty32DdmjBvUHyvUHyL27NlT5PnCwkJZvny5jB49Wv9/8ODB0rVrV5vOEgAAAABQVtTzAAAAAABApbvjOi8vTxITE/VSu3ZtadKkiQQFBcmxY8fk0qVLep+ePXvKxx9/bPepAgAAAADKgHoeAAAAAKCimOPat1SKhuvY2FiZM2eO7NixQw4ePKj/kJGbm6v/uDFw4EDdE3/UqFFSpUoVu08VAAAAAFAG1PMAAAAAAECla7iuWbOm/O53v7P7NAAAAAAAJqGeBwAAAACoKOa49i2VouEaAAAAAAAAAAAAAG5Gw7VvCbT7BAAA8EVnz57Vc3K+8MIL0r17dwkNDZWAgACJj48v9bj//ve/smjRIj08asuWLfUxalm2bJnHzh0AAAAAUBR1PAAAAOtxxzUAABZYsWKF/OY3v3H7uGeeeUb27NljyTkBAAAAAMqHOh4AAN6pwOCWa19CwzUAABaIjIyUBx54QLp27aqX3bt3y+zZs297XLNmzaRNmzbO4yZNmiR79+71yDkDAAAAAFyjjgcAAGA9Gq4BALDAk08+qReHtLS0Mh23evXqIv+vWrWq6ecGAAAAAHAPdTwAALwTc1z7Fua4BgAAAAAAAAAAAADYijuuAQAAAAAAAAAAAFQ6zHHtW2i4BgAAAAAAAAAAAFDpMFS4b2GocAAAAAAAAAAAAACArbjjGgAAAAAAAAAAAEClw1DhvoWGawAAfFBCQoIkJiaWef92/YdL/LAxlp4TAAAAAMAzdbxRj4+TsU8+Zek5AQAAmI2GawAAfFB6erqkpKSUef/ozvdbej4AAAAAAM/V8fr0G2Dp+QAA4C2Y49q30HANAIAPioqKkri4uDLvX6NuPUvPBwAAAADguTpe/foNLD0fAAAAK9BwDQCAD5o4caJeyuovu7639HwAAAAAAJ6r453PzLb0fAAA8BbMce1baLgGAAAAAAAAAAAAUOkU2n0CMFWguS8HAAAAAAAAAAAAAIB7aLgGAMACp0+flrp16zqXV155RT+/bdu2Is8vWLCgyHHq/zdv37Nnj37++eefL/K8en0AAAAAgGdQxwMAwHuHCjdjgXdgqHAAACxQUFAgGRkZxZ7Pz88v8nx2dtF5x9T/XR137do1vdz8+gAAAAAAz6COBwAAYD0argEAsEBsbKwY5eipN3PmTL0AAAAAALwHdTwAALxTATdL+xSGCgcAAAAAAAAAAAAA2Io7rgEAAAAAAAAAAABUOsxP7VtouAYAAAAAAAAAAABQ6TBUuG9hqHAAAAAAAAAAAAAA8DKXL1+WlStXytSpUyU+Pl4iIiIkICBAYmNjTXn93bt3y2OPPSYNGjSQatWqSbNmzWTy5Mly/vz5Uo/Ly8uTN998Uzp27CjVq1eXWrVqSe/evWX16tUVOh/uuAYAAAAAAAAAAABQ6fj6UOGbNm3SDctWUI3MI0eO1I3Q9erVk3bt2snhw4flnXfekaSkJPnqq690Q/atcnNz5cEHH9Tbq1Spoo/LysrS56qWadOmybx588p1TtxxDQAAAAAAAAAAAABeJjQ0VO6//36ZMmWKLF++XP70pz+Z8rppaWny+OOP60br119/Xf8/OTlZrwcMGCDp6em6wdxw0TFANUyrRuumTZvK/v37Zc+ePXLs2DH5/PPPJSQkRObPny9ffPFFuc6LhmsAAAAAAAAAAAAAlXKOazMWb9W/f3/ZvHmzvPXWW/ru6JiYGFNeVw3znZ2drRvFZ82aJUFBPw3SXaNGDfnkk0/0eteuXbJ27doix507d06WLFmiH3/wwQdy5513OrcNGTJEXn75Zf145syZ5TovGq4BAAAAAAAAAAAAwE+sWrVKrydMmFBsm5qvesSIEfqxml/7ZmvWrJEbN25Iy5Yt9ZzWt5o4caJep6SkSGpqqtvnRcM1AAAAAAAAAAAAgEo5x7UZiz85ffq0HhJcUXdcu9KzZ0+9/vrrr4s87/i/Y/utGjVqpIcQd3VsWdBwDQAAAAAAAAAAAKDS8fWhwq1w5MgRvQ4ODpbo6GiX+zRv3lyvjx8/rufBvvVYx/bSjj18+LDb5/bTgOUAAAAAAAAAAAAA4IcSEhIkMTHRrWPUMNuOobErk4sXLzqHBA8ICHC5T+3atfW6sLBQMjMzpU6dOkWOdWwv7dhLly65fW40XN9GtbDq5b7A09PTJSoqqlwXbUxYucKaErsi7Irtj+/ZrNjhNsYuL2KXL3aETXErws7Y/ubJLjF++d3/3iMdbItdXhWOHd3Otti/KndkMz7zWNti37N+oy1xlRvlOqqSX+NKl4W2xB5brqjmxEbZ1age6p+/F8T2+rjEJra7qtkYu7LF9Ud3RIb55e+FnXHrRITZ+J5r2xY7yA+vs9ah1WyLnbZ8gi1xK8KM2OX9xE15323j7YuNMllinDTldWbOnKnnZXaH+hlXRrm5uc47rksSEhLifJyTk1OuY28+rswMWCIuLk4NLKDXxPbduMQmtj/E9sf3DPdwbRKb2L4Vl9j+Fxtl46/Xhz/G9sf3TGxi+3pcuIdr07OITWxfjktscl5lsmTJEv3zcmdRx5Rk8uTJ+hpwd+nVq1ep5/nFF1/o/Zo0aVLu97py5Ur9GvXr1y9xnwMHDjjP6cKFC87n27Ztq5/785//XOKxjz76qN5n0qRJbp8bd1wDAAAAAAAAAAAA8Fvqzngz744PDw93Dq/tjho1aojV1BDhjqG8DcNwOVy4Y0jwwMBAiYyMLHasY/vthiJ3Fw3XAAAAAAAAAAAAAGCSOXPm6MUbtWrVSq9v3Lghp0+flpiY4tNIpqam6nXTpk2latWqRY7dtm2bHDt2rMTXdxzriOOOQLePAAAAAAAAAAAAAABUOjExMdKwYUP9eOvWrS73cTzfvXv3Is9369ZNr7/66iuXx6WlpcmJEyeK7OsOGq4BAAAAAAAAAAAAwE888sgjep2YmFhsmxpCPCkpST8eMWJEkW1Dhw7Vd2AfPXpUNm7cWOzYhIQEve7UqZO0aNHC7fOi4RoAAAAAAAAAAAAAfMjIkSMlNjZWfvvb3xbbNnXqVAkNDZUtW7bI9OnTpaCgQD9/5coVGT16tF6rxueHHnqoyHH169d3zgX+1FNPyeHDh53bvvjiC1mwYIF+PGPGjHKdM3NcAwAAAAAAAAAAAIAXqlu3rvNxXl6eXqu5qW9+ftSoUfLuu+8WOe7s2bNy6tQpuXDhQrHXbNy4sXz00Uf6uNmzZ+s7pdVzhw4dkqysLN1AvXLlSgkICCh2rGqcTk5Olh07dki7du2kffv2cu3aNefc1lOmTNF3ZpcHd1wDAAAAAAAAAAAAgBfKyMhwLpmZmfq5wsLCIs9fvXrV7dcdPny47Ny5U6+V7777Tu644w6ZNGmS7N27t8ShvtWd2ps2bZJ58+ZJ27Zt5ciRI7pxvFevXrJq1Sp56623yv1eueMaAAAAAAAAAAAAALyQYRjlOk41Lt9OXFyccz5rdwQHB8u0adP0YibuuAYAAAAAAAAAAAAA2IqGawAAAAAAAAAAAACArWi4BgAAAAAAAAAAAADYijmuLTJhwgRJT0+XqKgoYvtwXGIT2x9i++N7hnu4NolNbN+KS2z/i42y8dfrwx9j++N7JjaxfT0u3MO16VnEJrYvxyU2OQ9wV4BR3hm9AQAAAAAAAAAAAAAwAUOFAwAAAAAAAAAAAABsRcM1AAAAAAAAAAAAAMBWNFwDAAAAAAAAAAAAAGxFw7XJNm7cKIMHD5Y77rhDQkNDpXXr1vL6669LVlaWZTHPnj0rH3/8sbzwwgvSvXt3HTcgIEDi4+PFSmp69O3bt8srr7wi9913n9SpU0eqVq2q33u/fv3kr3/9q97HKklJSTJhwgTp0qWLNGzYUEJCQiQiIkLi4uL0Z56RkSGe8o9//EN/5mqJjY21NNbMmTOdsUpalixZYvn7HTZsmPNzb9CggfTo0UN+//vfS35+vqmxTp48edv361jGjx8vVlDX0muvvSYdOnSQ8PBwCQ4OlujoaHn00Uflq6++Eiup74558+bp61pd3yr+XXfdJQsWLJAbN27Y8r2Rl5cnb775pnTs2FGqV68utWrVkt69e8vq1astjf3f//5XFi1aJKNHj5aWLVs6f+7Lli0r8/uGuch55Dwrc5435DuFnOeZnGdVvlPIeago8h35jjqeufnOG3KeL9bx7Mx55DvfQc4j51HHo45nFl/MdxWJTc4D3GDANO+8844REBCgSjRGdHS00alTJyMkJET/v02bNkZGRoYlcRctWqRj3Lr06tXLsNKGDRuKxGvWrJnRuXNno3bt2s7nBg0aZOTm5loSv2PHjjqG+oxjY2ONLl26GDExMc7Y9erVM7799lvDalevXi0St0mTJpbGmzFjhvP99ejRw+Xyt7/9zZLYeXl5xi9/+Uvne23cuLHRtWtX/bMPDg7Wz6nPw0zp6eklvk+1qGvOcT6JiYmG2Y4cOWJERUXp1w8MDNTv9a677jIiIiL0c+p3/o9//KNhhXPnzhnt27d3xlaP1XVfpUoV/Zy65jMzMz36vZGTk2Pcd999el91Hh06dDCaN2/uPH7atGmWxXb8zt+6LF261O33j4oj55HzrM55duY7hZznuZxnZb5TyHmoCPId+Y46nvn5zu6c56t1PDtzHvnON5DzyHnU8ajjmcVX811FYpPzgLKj4doku3bt0l/C6ss+ISHBKCws1M+npaU5E9CwYcMsif3BBx8YDzzwgPHqq68aq1evNl5//XWPFPDWr19vNG3a1Fi8eLFORjf76KOPnIXbl19+2ZL4Kplv3rzZuHHjRpHn9+7d60yMbdu2Naz2/PPP61hDhw716B81xo4da3ja008/rWOrQl1KSkqRbVlZWcbnn39e7OdhtWXLlulzCg0NNa5cuWL66/fp00e/fsuWLY39+/cXKehMmTJFbwsKCtIFQbP1799fv37r1q2No0ePOp8/efKks7Dz+OOPe/R744UXXtD7qd/9Q4cOOZ9XP3vH7/yaNWssif3www8bI0eONBYuXGhs2bJFFy4p4NmDnEfO80TOszPfKeQ8z+U8K/OdQs5DeZHvyHfU8ezJd1bnPF+t49mZ88h3lR85j5xHHY86npl8Nd9VJDY5Dyg7Gq5N4kjuTzzxRLFt6otfFf7U9j179lh+Lu+++65HCngqmZaWzOfOnavPQ/VULCgoMDxp586dzl5LBw4csCzOjh079M9W/fxVkvHlP2p8+eWXOq7qBVqRHnFmi4+P1+c1ZswY019bvU9Hb2NXPT5VRa5FixZ6u/q9M5OqqDiu4W3bthXbrnrdOiqVBw8e9Mj3xtmzZ509UNX1cCtHQS0uLs702K44Ks8U8DyPnFccOc+3/qhBzvNczvN0vlPIeSgr8l1x5DtrUMfzXM7zpzqenTmPfFf5kPOKI+eZjzpecdTxKne+K2tsV8h5QMmY49oE165dk3Xr1unHam6SW6k5C/r06eOcv8RXREZG6rlfSjJw4EC9vnjxopw/f96DZybSpk0b5+Ps7GxLYqj5MJ555hkJCwuT9957T3zdwoUL9XrKlCl6XhJvoOaJ2bx5s348btw401//+vXrzvmMmjdvXmy7mofE8by6HszkmGOmUaNGcu+99xbbruZhUXNNqfP79NNPxRPWrFmj56BR32lq7pdbTZw4Ua9TUlIkNTXVI+cEzyPnuUbO8y3kPM/lPG/Mdwo5D+Q718h3vsUb853VOY86XnHkPJDzXCPn+RZvzHnU8ch3AFyj4doEu3fv1okgJCRE7r77bpf79OzZU6+//vpr8Rc5OTnOx6GhoR6N7UiQ4eHhcuedd1oS44033pB9+/bJ7NmzJTo6Wjxtz549Mnr0aF15GDp0qLz++uuyf/9+S2Ll5ubKv//9b/34gQcekAMHDsiLL74o/fr1k4ceekimT58up06dEk/78MMPdSEnJibGWYkyU926dZ0/2+3btxfbnpWVJd9++61+XNLvfnmpipGjkFeS0s7NCo7vL8f32a3UuTZt2rTIvvA95DzXyHm+ke8Ucp5nc5435juFnAfynWvkO2tRx7M+51HHK46cB3Kea+Q861DH+wl1PPIdANdouDbBkSNH9FolmZJ66jl6Lx0+fFj8xfLly529qFQvRqsVFhbKmTNnZNmyZc5eavPmzdOFPLMdPHhQ/vCHP0hcXJw8//zzYgdVsFCf8caNG3WPsTlz5sgvfvEL+c1vfiMFBQWmFygdPe+2bt0qnTp1ksWLF8v69etl7dq1upCrCtKOn7knqIKdKuApTzzxhAQGWvN1pq4h1QNx6tSp8v7778vZs2d1b9dvvvlGhgwZIufOnZNf/vKX0qNHD1Pj1qxZU6/T0tJK3OeHH37Q60OHDoknv+tc9dL05+86f0POc42c5xv5TiHneTbneWO+U8h5IN+5Rr6zlr/X8TyV86jjFUXOAznPNXKedajjUccj3wEoDQ3XJvYiql27don7OLZdunRJ/EFycrIsWbJEP37llVcsjfX222/rBFylShXdM2r8+PESGxsr//znP+W5556zpGChhtJRBZ6EhAQd15MaNmwos2bNkp07d+qhilSvwb1798qzzz6rz019Hq+++qqpMdPT052P1WeqCniqgKN65B49elQeffRR/Xjs2LG6p64nqKF0Tpw4YdlwOg5jxozRhWg1TJP6uUdFRUn16tXlnnvu0QX9P//5z/LRRx+ZHrdr167OQp6rXn7fffedsxDlqe8VvuugcB0UR87znXynkPM8m/O8Md8pfNeBa6A48p11qON5NudRxyuK7ztwDRRHzrMGdbyfUccj3wEoGQ3XJlBJVgkODi5xHzXczq3DzPgq1VNr2LBhkp+fLw8//LCMHDnS0niqUKd6hamEq5KvKuypnnsq6V6+fNn0eCqhb9u2TSZNmiRdunQRT1PzDakhdNQQLmrIF3VtqV6J6rzmz5+v91m0aJGeJ8XM+Y4c1Nw3qvCsCiHqmm/RooXukXjXXXfpQu/cuXPFE1QPVMfwLqX1lDPDsWPH5Mcff9S9H1XloUOHDvpzUAVfdR5WDGekfr6Ogp4qwKrhmxxUoVoVPB29UK2a7+hWfNdB4TooipznW/lOIed5Nud5Y75T+K4D10BR5DtrUcfzfM6jjvczvu/ANVAUOc861PF+Rh2PfAegZDRcm6BatWp6fePGjRL3Ub227JgTxdOuXLkiAwcOlO+//146d+7sTMJWGjFihJ77RfXgUkPqqMKdKuypQkfv3r1NHWJG9RRTPf9UoVINY+NtpkyZonsvqsK16k1n9jXuKHDUqlWryHZV8FHD+Shqzhg1vJGVVIFz1apVzvOxkuqJqd6bKlCrnoiqN6QaYujChQt6mB3VS1RVMKyYC+evf/2r/nmqXoiqUKkK02rootatW+uCnhrKR4mIiBBP4LsOCtfBz8h5vpfvFHKe53Oet+U7he86cA38jHxnL3+p43ky51HHK4rvO3AN/IycZx/qeNagjvczvuuAyoOGaxM4kp1juAlXHNtuTYy+RCXdAQMG6CFV2rVrJ//61788MgfMrVQy/Pvf/64TsirsrVixwrTXVvO+ZGZmyjvvvOPRxFpWangfVbhVVCHALDdft2poGVccz1+9elUyMjLESqpwl5WVpXsHqgK+VdRwRarXp5rjKSkpSVq1auXcpgowCxYskL59++pr4o033jA9fsuWLfXv00svvaR7X6r5X1TBUvX43bVrl7Rv317v16BBA/EEvuugcB38hJznm/lOIed5Pud5W75T+K4D18BPyHf285c6nqdyHnW84vi+A9fAT8h59qKOZz7qeEXxXQdUHjRcm8Dxpa9646khRVxJTU0tsq+vUcN6DBo0SPcOVIlpw4YNUqdOHdvORxW+evXq5ZyXxiwpKSl6/etf/1on1puXyZMn622nT592Prd9+3bxNMdwJ6qHollUb7hbX7+03otW90x09HgdPny4pQVt1eNVzbGjrukmTZq43Kdfv356rQpdVqhXr54sXLhQF9rVkDaq8KwKuKoS5RjKxzH8jtUc319qiKGS+Pp3Hch5CjnPO3KeFflOIefZk/O8Kd8p5DyQ78h33pLv/KWO56mcZ3e+U8h58DbkPHKet+Q86ni+lfPIdwDKi4ZrE3Tq1EknPTWUxDfffONyn61bt+p19+7dxdeoxDNkyBDZsmWLToL/+c9/PNpbqiSOQo7ZhR3HfDe3Lqp3mqNw43iutKFHrOKYNyQ6Otq011RDCDkKOMePHy81sauCnpWFezWkjbrWPDGcjupl6e48KZ6iKpPr1q3Tj4cOHeqRmN26dXMWfEsackr9fG7eF76HnEfO85acZ0W+U8h53pXz7Mh3CjkP5DvynbfkO3+o43ky53lrvlPIebALOY+c5y05jzqef+Q88h2A26Hh2gSqZ1T//v3148TExGLbVa+iL7/80tmTypeoRPPII4/oQp0qBKj32bhxY7tPSw/rsWnTJmcB3CwnT57UPdVcLUuXLtX7qIKQ47n4+HjxJDWUkKPHmqPHnFkee+wx5xwlrgrNf/nLX/Ra9QgNCgoSq3z44Yf6s42NjbX883X0rlO/wyXN9aLmvlHUPC2epHosnj9/Xpo1a6YrWJ6gCpNqeCH1eWzcuLHY9oSEBOfvnJq7Br6JnEfO84acZ2W+U8h53pPz7Mh3CjkP5DvynTfkO3+p43ky53lrvlPIebALOY+c5w05jzqe/+Q88h2A2zJgim+++cYICAjQS0JCglFYWKifP3PmjNG5c2dDfdT/8z//45Fzeffdd3W8Xr16WRonPz/fGD58uI7VoEED4/Dhw4anbNq0yZg9e7Zx4sSJYtuSk5ONLl266PNq1KiRcfXqVY+c09KlS3XMJk2aWBZj3759xoQJE4xvv/22yPMFBQXGJ598YkRGRupzGDx4sOmxf/zxR6NGjRr69SdOnGjk5OTo59W1vnjxYv28uv43btxoWEXFatq0qY41Y8YMw2rXrl0z6tWrp+N17dq1yDWenZ1tTJ06VW9Ty9/+9jfT42/dutVYt26d/l27Oe4f/vAHIzAw0KhSpYrx5ZdfevR7Y9KkSXo/9XM4dOiQ8/k1a9YYISEh5fosyvud5fhuVb978CxyHjnP6pxnZ75TyHmezXmezHcKOQ9lRb4j31HHsz7feTrn+VMdz86cR76rfMh55DzqeNTxzOQv+c6d2Lci5wElo+HaRIsWLdIJTn3hNG7c2OjUqZPzC+/OO+80zp8/b0nc77//3qhTp45zqV69uo4ZFBRU5Pn58+ebGlcVKhzJLTY21ujRo0eJS0pKiqmxP/vsM2dsVbhUX/R33323ERUV5XxeFe52795teIon/qih3o/j/dWuXVtfY6rgUatWLefzPXv2NC5dumRJ/PXr1xuhoaE6jirsqdiOz1xd+2+++aZhJVV4dMQ6fvy44QnqPTt+p1TBShVsOnToYISFhTk/8+eee86y7xT1+ipWu3bt9M/bEVetV65c6fHvDVXI7N69u95XFTI7duxoNG/e3PlZTJkyxbLY6v83b1f7q+PCw8OLPK9eH9Yj55HzrMx5duc7hZznuZxnZb5TyHmoCPId+Y46nrX5zo6c56t1PDtzHvnON5DzyHnU8ajjmcVX811FYpPzgLKj4dpkGzZsMAYOHKgTsCrctWrVynjttdcs7R2neuc5vlxLW8zuyeUo0JRlMbu32rlz54w//vGPxpAhQ3RyiYiIMKpWrap7kfXu3Vtvy8zMNDzJE3/UUIW3OXPmGIMGDTKaNWvmfN/169fX193HH39cpCebFY4cOWKMGzfOiI6O1rHr1q2rfw6qt6jVxo4d65Fet7dKTU3VPfJat26tC7jqfauC7dChQ421a9daFlcV6sePH68riOpnrWKr75TJkycbJ0+etO174/r168a8efOMX/ziF/qcVGFf/UxWrVplaWz1/7Ic56rHMqxBziPnWZXzvCHfKeQ8z+Q8K/OdQs5DRZHvyHfU8Xwv5/liHc/OnEe+8x3kPHIedTzrUMer/PmuIrHJeUDZBah/bj+gOAAAAAAAAAAAAAAA1gi06HUBAAAAAAAAAAAAACgTGq4BAAAAAAAAAAAAALai4RoAAAAAAAAAAAAAYCsargEAAAAAAAAAAAAAtqLhGgAAAAAAAAAAAABgKxquAQAAAAAAAAAAAAC2ouEaAAAAAAAAAAAAAGArGq4BAAAAAAAAAAAAALai4RoAAAAAAAAAAAAAYCsargEAAAAAAAAAAAAAtqLhGoBHnDx5UgICAvSiHgMA4KvIeQAAf0HOAwD4A/IdAHgODdeAF5g5c6az8ONOQWnZsmUeOT8AAMxCzgMA+AtyHgDAH5DvAABmouEaAAAAAAAAAAAAAGArGq4BAAAAAAAAAAAAALai4RoAAAAAAAAAAAAAYCsargEftG/fPpkwYYK0bNlSwsLCJDw8XDp06CC/+93v5MKFCy6PycvLkzVr1ujjunTpIlFRURIcHCz16tWT/v37y/Lly8UwjFLjpqWlycSJE6Vx48YSEhIi0dHRMn78eDl27JhF7xQA4O/IeQAAf0HOAwD4A/IdAPg5A4DtZsyYoUpOermdEydOOPddunRpse3z5883AgMDnfuEhYUZwcHBzv9HRUUZKSkpxY7buHGjcx+1REZGGhEREUWeGzFihFFQUODyvJKTk41atWo59w0NDTXCw8Odr/Xpp586t6n3AADwT+Q8AIC/IOcBAPwB+Q4AYCbuuAZ8yAcffCDTpk3TvRHnzp0r6enpkpWVJdnZ2bJr1y7p06ePfm7IkCFy7dq1IseqY1SvwvXr18uVK1f0kpmZKRkZGbJ48WKJjIyUpKQkee+994rFvXr1qjz88MNy6dIliYmJkX//+986rnp++/btuqeiem0AAMxCzgMA+AtyHgDAH5DvAACaqc3gACrcM7F+/fqlLnXr1nXZMzEzM9OoWbOmfn7dunUu4+Tl5RmdO3fW+yxatMitc0xKStLHNW/e3GVvSLVN9YA8cOBAse3p6elFei3SMxEA/Bc5DwDgL8h5AAB/QL4DAJiJO64BL3Pu3LlSl5Lmcvnf//1fuXz5snTq1EnP3eJKUFCQjBo1Sj/+17/+5dZ5DRo0SK9TU1Pl7NmzRbatWLFCr0eMGCFt2rQpdmyDBg3k2WefdSseAMD3kfMAAP6CnAcA8AfkOwBARQVV+BUAmMowVOe9kp08eVKaNm1a7Plt27bp9cGDB3WBqiQ5OTl6ferUqWLb1BA4S5YskbVr1+rXUQXGvLy8Yvv98MMPzhg3btyQ7777Tj9WQ/aURG174403Sn1vAAD/Qs4DAPgLch4AwB+Q7wAAFUXDNeAjzpw5o9e5ubl6uR01P8zNjhw5In379tWFt5vnh6lZs6YEBv40OIPqGamoeV4cLl68KPn5+fpxo0aNSowXHR3t9nsCAMAVch4AwF+Q8wAA/oB8BwBwYKhwwEcUFBTo9WOPPaZ7N95uUT0cbzZ+/HhduIuNjZWkpCTJyMjQBbkff/xRD6GTlpZW5t6TAABYiZwHAPAX5DwAgD8g3wEAHLjjGvARjiFuXA2VczunT5+W7du368fLly+Xbt26Fdvn1vlfHGrXri1VqlTRBcybC4G3Km0bAADuIOcBAPwFOQ8A4A/IdwAAB+64BnxEjx499Do5OVnS09PdLuA5dOrUyeU+GzZscPl8cHCwdOjQQT/euHFjiTG+/PJLt84JAICSkPMAAP6CnAcA8AfkOwCAAw3XgI8YMWKEnrclLy9PXnrppVKHvSksLJTLly87/1+jRg3n4z179hTb/+rVqzJnzpwSX08N46OooXgOHz5cbLsalmfJkiVuvR8AAEpCzgMA+AtyHgDAH5DvAAAONFwDPkIV7t5++239eMWKFTJo0CDZuXOnLswpan3w4EFZuHChtGvXTtauXes8tk2bNhITE6MfP/nkk7p3o8OOHTskPj5eLl26VGLsX/3qVxIdHS3Xr1+XAQMGyH/+8x9nAVOdwwMPPOA8DwAAKoqcBwDwF+Q8AIA/IN8BAByY4xrwIWPHjpWcnByZPHmy/POf/9RLSEiIhIeHS2Zmpu616BAQEOB8HBgYKH/605/k4Ycflv3790uXLl0kLCxMb8vOzpbq1avL559/rgtqrkRGRspnn30mDz74oJw8eVLvp45Xr3vt2jWJiIiQ999/39mDEQCAiiLnAQD8BTkPAOAPyHcAAIU7rgEf8+yzz+phbX77299Kx44ddQFPDZ+jCnmq4Pb888/L+vXrZdSoUUWOGzx4sGzZskX3aFS9HPPz86Vu3boyfvx43VOxb9++pcZVr7137155+umnpVGjRvp4NVSPKnSmpKTI3XffbfE7BwD4G3IeAMBfkPMAAP6AfAcACDBKmzACAAAAAAAAAAAAAACLccc1AAAAAAAAAAAAAMBWNFwDAAAAAAAAAAAAAGxFwzUAAAAAAAAAAAAAwFY0XAMAAAAAAAAAAAAAbEXDNQAAAAAAAAAAAADAVjRcAwAAAAAAAAAAAABsRcM1AAAAAAAAAAAAAMBWNFwDAAAAAAAAAAAAAGxFwzUAAAAAAAAAAAAAwFY0XAMAAAAAAAAAAAAAbEXDNQAAAAAAAAAAAADAVjRcAwAAAAAAAAAAAABsRcM1AAAAAAAAAAAAAMBWNFwDAAAAAAAAAAAAAMRO/x8QLv381PSPzQAAAABJRU5ErkJggg==", "text/plain": [ - "
" + "
" ] }, - "execution_count": 13, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } diff --git a/notebooks/exploration.ipynb b/notebooks/exploration.ipynb new file mode 100644 index 0000000..386db82 --- /dev/null +++ b/notebooks/exploration.ipynb @@ -0,0 +1,824 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "uxgwIW6br_8c" + }, + "source": [ + "# MechIR Activation Patching\n", + "\n", + "This notebook serves to walk you through a simple example of activation patching in `MechIR`. For more details on the specifics of this process check out our other notebook!.\n", + "\n", + "NOTE: Our initial implementation is flexible enough to support loading a wide variety of Transformer-based IR models, but we have mainly tested the following models:\n", + "- TAS-B (bi-encoder) [[Hofstätter et al.]](https://arxiv.org/abs/2104.06967) [[HF model card]](https://huggingface.co/sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco)\n", + "- monoELECTRA (cross-encoder) [[Pradeep et al.]](https://link.springer.com/chapter/10.1007/978-3-030-99736-6_44) [[HF model card]](https://huggingface.co/crystina-z/monoELECTRA_LCE_nneg31)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "_F6EgFvTr_8d" + }, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "6mLCPXacr_8d", + "outputId": "83496403-e207-4d18-e378-2ad133bf57dc" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Processing /Users/cchen207/git/brown/research/MechIR\n", + " Preparing metadata (setup.py) ... \u001b[?25ldone\n", + "\u001b[?25hBuilding wheels for collected packages: mechir\n", + " Building wheel for mechir (setup.py) ... \u001b[?25ldone\n", + "\u001b[?25h Created wheel for mechir: filename=mechir-0.0.1-py3-none-any.whl size=66580 sha256=4241b7eb8845a4555551762d163cda108fb4b63b267c325fdd617e1d76d6602b\n", + " Stored in directory: /private/var/folders/w8/j9c1qwbx3cn8hf10x5nz_xtr0000gp/T/pip-ephem-wheel-cache-ugqeu0x0/wheels/39/37/7c/9f04c1e8f880bc1e666f79cde17d9e585bcff18fdf2b5a9b0d\n", + "Successfully built mechir\n", + "Installing collected packages: mechir\n", + " Attempting uninstall: mechir\n", + " Found existing installation: mechir 0.0.1\n", + " Uninstalling mechir-0.0.1:\n", + " Successfully uninstalled mechir-0.0.1\n", + "Successfully installed mechir-0.0.1\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: transformer_lens in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (2.7.0)\n", + "Requirement already satisfied: accelerate>=0.23.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.34.2)\n", + "Requirement already satisfied: beartype<0.15.0,>=0.14.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.14.1)\n", + "Requirement already satisfied: better-abc<0.0.4,>=0.0.3 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.0.3)\n", + "Requirement already satisfied: datasets>=2.7.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (3.0.1)\n", + "Requirement already satisfied: einops>=0.6.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.8.0)\n", + "Requirement already satisfied: fancy-einsum>=0.0.3 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.0.3)\n", + "Requirement already satisfied: jaxtyping>=0.2.11 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.2.34)\n", + "Requirement already satisfied: numpy>=1.26 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (2.1.1)\n", + "Requirement already satisfied: pandas>=1.1.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (2.2.3)\n", + "Requirement already satisfied: rich>=12.6.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (13.9.1)\n", + "Requirement already satisfied: sentencepiece in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.2.0)\n", + "Requirement already satisfied: torch!=2.0,!=2.1.0,>=1.10 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (2.4.1)\n", + "Requirement already satisfied: tqdm>=4.64.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (4.66.5)\n", + "Requirement already satisfied: transformers>=4.37.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (4.45.1)\n", + "Requirement already satisfied: typing-extensions in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (4.12.2)\n", + "Requirement already satisfied: wandb>=0.13.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.18.2)\n", + "Requirement already satisfied: packaging>=20.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (24.1)\n", + "Requirement already satisfied: psutil in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (6.0.0)\n", + "Requirement already satisfied: pyyaml in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (6.0.2)\n", + "Requirement already satisfied: huggingface-hub>=0.21.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (0.25.1)\n", + "Requirement already satisfied: safetensors>=0.4.3 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (0.4.5)\n", + "Requirement already satisfied: filelock in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.16.1)\n", + "Requirement already satisfied: pyarrow>=15.0.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (17.0.0)\n", + "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (0.3.8)\n", + "Requirement already satisfied: requests>=2.32.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (2.32.3)\n", + "Requirement already satisfied: xxhash in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.5.0)\n", + "Requirement already satisfied: multiprocess in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (0.70.16)\n", + "Requirement already satisfied: fsspec<=2024.6.1,>=2023.1.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from fsspec[http]<=2024.6.1,>=2023.1.0->datasets>=2.7.1->transformer_lens) (2024.6.1)\n", + "Requirement already satisfied: aiohttp in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.10.8)\n", + "Requirement already satisfied: typeguard==2.13.3 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from jaxtyping>=0.2.11->transformer_lens) (2.13.3)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2.9.0)\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2024.2)\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2024.2)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from rich>=12.6.0->transformer_lens) (3.0.0)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from rich>=12.6.0->transformer_lens) (2.18.0)\n", + "Requirement already satisfied: sympy in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (1.13.3)\n", + "Requirement already satisfied: networkx in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (3.3)\n", + "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (3.1.4)\n", + "Requirement already satisfied: setuptools in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (75.1.0)\n", + "Requirement already satisfied: regex!=2019.12.17 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformers>=4.37.2->transformer_lens) (2024.9.11)\n", + "Requirement already satisfied: tokenizers<0.21,>=0.20 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformers>=4.37.2->transformer_lens) (0.20.0)\n", + "Requirement already satisfied: click!=8.0.0,>=7.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (8.1.7)\n", + "Requirement already satisfied: docker-pycreds>=0.4.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (0.4.0)\n", + "Requirement already satisfied: gitpython!=3.1.29,>=1.0.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (3.1.43)\n", + "Requirement already satisfied: platformdirs in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (4.3.6)\n", + "Requirement already satisfied: protobuf!=4.21.0,!=5.28.0,<6,>=3.19.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (5.28.2)\n", + "Requirement already satisfied: sentry-sdk>=1.0.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (2.15.0)\n", + "Requirement already satisfied: setproctitle in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (1.3.3)\n", + "Requirement already satisfied: six>=1.4.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from docker-pycreds>=0.4.0->wandb>=0.13.5->transformer_lens) (1.16.0)\n", + "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (2.4.3)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.3.1)\n", + "Requirement already satisfied: attrs>=17.3.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (24.2.0)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.4.1)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (6.1.0)\n", + "Requirement already satisfied: yarl<2.0,>=1.12.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.13.1)\n", + "Requirement already satisfied: gitdb<5,>=4.0.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (4.0.11)\n", + "Requirement already satisfied: mdurl~=0.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from markdown-it-py>=2.2.0->rich>=12.6.0->transformer_lens) (0.1.2)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2.2.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2024.8.30)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from jinja2->torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (2.1.5)\n", + "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from sympy->torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (1.3.0)\n", + "Requirement already satisfied: smmap<6,>=3.0.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (5.0.1)\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: matplotlib in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (3.9.2)\n", + "Requirement already satisfied: seaborn in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (0.13.2)\n", + "Requirement already satisfied: plotly in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (5.24.1)\n", + "Requirement already satisfied: contourpy>=1.0.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (1.3.0)\n", + "Requirement already satisfied: cycler>=0.10 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (0.12.1)\n", + "Requirement already satisfied: fonttools>=4.22.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (4.54.1)\n", + "Requirement already satisfied: kiwisolver>=1.3.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (1.4.7)\n", + "Requirement already satisfied: numpy>=1.23 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (2.1.1)\n", + "Requirement already satisfied: packaging>=20.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (24.1)\n", + "Requirement already satisfied: pillow>=8 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (11.0.0)\n", + "Requirement already satisfied: pyparsing>=2.3.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (3.2.0)\n", + "Requirement already satisfied: python-dateutil>=2.7 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (2.9.0)\n", + "Requirement already satisfied: pandas>=1.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from seaborn) (2.2.3)\n", + "Requirement already satisfied: tenacity>=6.2.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from plotly) (9.0.0)\n", + "Requirement already satisfied: pytz>=2020.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.2->seaborn) (2024.2)\n", + "Requirement already satisfied: tzdata>=2022.7 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.2->seaborn) (2024.2)\n", + "Requirement already satisfied: six>=1.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from python-dateutil>=2.7->matplotlib) (1.16.0)\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install -U -q git+https://github.com/Parry-Parry/MechIR.git\n", + "%pip install -q transformer_lens\n", + "%pip install -q matplotlib seaborn plotly" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "m1zmJUINr_8e", + "outputId": "8cb4185e-d3e7-4864-800d-0d59454b22dd" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/opt/anaconda3/envs/mechir/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "from mechir import Dot, Cat\n", + "from mechir.data import MechIRDataset, DotDataCollator, CatDataCollator\n", + "from mechir.perturb import perturbation\n", + "from mechir.plotting import plot_components\n", + "\n", + "import torch\n", + "from torch.utils.data import DataLoader" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "RxPp15vmr_8e" + }, + "source": [ + "## Load Model\n", + "\n", + "* `Dot` : A bi-encoder architecture with flexibility for different BERT architectures and pooling forms.\n", + "* `Cat` : A cross-encoder architecture with with flexibility for different BERT architectures. Checkout our `monoT5` class if you want to work with sequence-to-sequence models!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "5JNgLgOpr_8e", + "outputId": "a654dcb5-0f12-4204-c2d6-33d604fa45b1" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:root:Support for BERT in TransformerLens is currently experimental, until such a time when it has feature parity with HookedTransformer and has been tested on real research tasks. Until then, backward compatibility is not guaranteed. Please see the docs for information on the limitations of the current implementation.\n", + "If using BERT for interpretability research, keep in mind that BERT has some significant architectural differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning that the last LayerNorm in a block cannot be folded.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Moving model to device: mps\n", + "Loaded pretrained model sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco into HookedEncoder\n" + ] + } + ], + "source": [ + "model_name = \"sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco\"\n", + "model = Dot(model_name)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "2SvOoLZUr_8f" + }, + "outputs": [], + "source": [ + "cat_model_name = \"crystina-z/monoELECTRA_LCE_nneg31\"\n", + "cat_model = Cat(cat_model_name)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gugeJ5hbr_8f" + }, + "source": [ + "## Load Dataset\n", + "\n", + "We recommend the use of `ir-datasets` as it is the easiest way to get started with MechIR. By default `MechIR` will load relevance judgements from these datasets however you can change this or even use your own documents and queries using the `MechDataset` class." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "6s20bffar_8g", + "outputId": "2ea72393-f32a-4577-e66a-fa3063801bed" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
query_iddoc_idrelevanceiteration
01123910
11150210
21446210
31456910
41547210
\n", + "
" + ], + "text/plain": [ + " query_id doc_id relevance iteration\n", + "0 1 1239 1 0\n", + "1 1 1502 1 0\n", + "2 1 4462 1 0\n", + "3 1 4569 1 0\n", + "4 1 5472 1 0" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Load smallest dataset for quick testing\n", + "dataset = MechIRDataset(\"vaswani\")\n", + "dataset.pairs.head()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "KDhYqYk4r_8g", + "outputId": "71a93c7d-9de3-462c-f4ba-028b2a0719b6" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Total queries in dataset: 93\n", + "\n", + "----------- Examples of queries: -----------\n", + "\n", + "MEASUREMENT OF DIELECTRIC CONSTANT OF LIQUIDS BY THE USE OF MICROWAVE TECHNIQUES\n", + "\n", + "MATHEMATICAL ANALYSIS AND DESIGN DETAILS OF WAVEGUIDE FED MICROWAVE RADIATIONS\n", + "\n", + "USE OF DIGITAL COMPUTERS IN THE DESIGN OF BAND PASS FILTERS HAVING GIVEN PHASE AND ATTENUATION CHARACTERISTICS\n", + "\n" + ] + } + ], + "source": [ + "# Print examples of queries\n", + "print(\"Total queries in dataset:\", len(dataset.queries.items()))\n", + "print(\"\\n----------- Examples of queries: -----------\\n\")\n", + "example_queries = list(dataset.queries.values())[:3]\n", + "for query in example_queries:\n", + " print(query)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "DPaJz6i7r_8g", + "outputId": "bf0f8323-8973-48c3-d409-cba9943cd24c" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Total documents in dataset: 11429\n", + "Minimum Length (in words): 2\n", + "Maximum Length (in words): 269\n", + "Average Length (in words): 41.93\n", + "\n", + "----------- Examples of documents: -----------\n", + "\n", + "compact memories have flexible capacities a digital data storage\n", + "system with capacity up to bits and random and or sequential access\n", + "is described\n", + "\n", + "an electronic analogue computer for solving systems of linear equations\n", + "mathematical derivation of the operating principle and stability\n", + "conditions for a computer consisting of amplifiers\n", + "\n", + "electronic coordinate transformer circuit details are given for\n", + "the construction of an electronic calculating unit which enables\n", + "the polar coordinates of a vector modulus and cosine or sine of the\n", + "argument to be derived from those of a rectangular system of axes\n", + "\n" + ] + } + ], + "source": [ + "# Calculate document stats\n", + "doc_lengths = [len(doc.split()) for doc in dataset.docs.values()]\n", + "\n", + "# Print examples of documents\n", + "print(\"Total documents in dataset:\", len(dataset.docs.items()))\n", + "print(f\"Minimum Length (in words): {min(doc_lengths)}\")\n", + "print(f\"Maximum Length (in words): {max(doc_lengths)}\")\n", + "print(f\"Average Length (in words): {(sum(doc_lengths) / len(doc_lengths) if doc_lengths else 0):.2f}\")\n", + "print(\"\\n----------- Examples of documents: -----------\\n\")\n", + "example_docs = list(dataset.docs.values())[:3]\n", + "for doc in example_docs:\n", + " print(doc)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "5mePEZwlr_8h", + "outputId": "c86a1970-a6d3-4356-ae45-f6641ba06c3e" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Query: MEASUREMENT OF DIELECTRIC CONSTANT OF LIQUIDS BY THE USE OF MICROWAVE TECHNIQUES\n", + "\n", + "-------------------------------\n", + "Document:\n", + "broadband millimetre wave paramagnetic resonance spectrometer the\n", + "specimen and waveguide which can be cooled by means of a cryostat\n", + "are placed between close pole pieces giving high uniform magnetic\n", + "fields design details and some measurements on zero field splittings\n", + "are given\n", + "\n", + "Document:\n", + "microwave measurements of dielectric absorption in dilute solutions\n", + "\n", + "Document:\n", + "dielectric properties of ice at very low frequencies and the influence\n", + "of a polarizing field measurements at frequencies down to are reported\n", + "the loss factor passes through a low frequency maximum which is distinguishable\n", + "from that associated with the dipole dispersion by its different\n", + "temperature dependence the effect of impurities is to shift the\n", + "maximum towards higher frequencies application of a unidirectional\n", + "field does not affect the permittivity of the pure crystals but eliminates\n", + "the low frequency dispersion when impurities are present the\n", + "observations are consistent with macdonalds theory\n", + "\n" + ] + } + ], + "source": [ + "# Print example of one query and relevant documents\n", + "query_rel_doc_ex_df = dataset.pairs.head(3)[[\"query_id\", \"doc_id\"]]\n", + "query_id = query_rel_doc_ex_df[\"query_id\"].unique()[0]\n", + "doc_ids = query_rel_doc_ex_df[\"doc_id\"]\n", + "print(f\"Query: {dataset.queries[query_id]}\")\n", + "print(\"-------------------------------\")\n", + "for doc_id in doc_ids:\n", + " print(f\"Document:\\n{dataset.docs[doc_id]}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "yvY9hwtRr_8h" + }, + "source": [ + "### Paired Dataset Creation\n", + "\n", + "Activation patching relies on pairs of inputs, consisting of one *perturbed* input and one *baseline* input, where the *perturbed* input is constructed by applying some function to modify an *original* input (e.g., inserting a query term to the end of a document) and the *baseline* input is a padded variant of the *original* input to maintain token lengths between the pairs.\n", + "\n", + "In the main demo, we show one possible type of perturbation (appending a query term to the end of a document), but there are several other possible types of functions that could be use to generate the activation patching input pairs depending on what behavior you are trying to investigate. In this section, we discuss possible general perturbation methods, and describe some specific perturbations that we define aligning with IR axioms." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "FlzOPo-4r_8h" + }, + "outputs": [], + "source": [ + "# Helper function just to print\n", + "def pretty_print_triplets(batch, tokenizer):\n", + " \"\"\"\n", + " Pretty prints triplets of queries, documents, and their corresponding perturbed documents from a batch.\n", + "\n", + " Args:\n", + " batch (dict): A dictionary containing 'queries', 'documents', and 'perturbed_documents' from a DataLoader.\n", + " tokenizer: The tokenizer used to decode the input IDs.\n", + " \"\"\"\n", + " # Get the queries, documents, and perturbed documents from the batch\n", + " queries = batch[\"queries\"]\n", + " documents = batch[\"documents\"]\n", + " perturbed_documents = batch[\"perturbed_documents\"]\n", + "\n", + " # Loop through the batch size\n", + " for i in range(len(documents[\"input_ids\"])):\n", + " # Get the input IDs\n", + " query_ids = queries[\"input_ids\"][i]\n", + " original_ids = documents[\"input_ids\"][i]\n", + " perturbed_ids = perturbed_documents[\"input_ids\"][i]\n", + "\n", + " # Decode the input IDs to text\n", + " query_decoded = tokenizer.decode(query_ids.tolist(), skip_special_tokens=False).replace(\"[PAD]\", \"\").strip()\n", + " original_doc_decoded = tokenizer.decode(original_ids.tolist(), skip_special_tokens=False).replace(\"[PAD]\", \"\").strip()\n", + " perturbed_doc_decoded = tokenizer.decode(perturbed_ids.tolist(), skip_special_tokens=False).replace(\"[PAD]\", \"\").strip()\n", + "\n", + " # Pretty print\n", + " print(f\"Triplet {i + 1}:\")\n", + " print(\"Query:\", query_decoded)\n", + " print(\"Original Document:\", original_doc_decoded)\n", + " print(\"Perturbed Document:\", perturbed_doc_decoded)\n", + " print(\"=\" * 50) # Separator for clarity" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "6h1rtEMatoXR" + }, + "source": [ + "## Make your own" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "M7Ht999zr_8h" + }, + "outputs": [], + "source": [ + "@perturbation\n", + "def my_perturbation(text: str, query: str = None) -> str:\n", + " \"\"\"\n", + " A simple perturbation function that replaces the first word of the text with 'REPLACED'.\n", + "\n", + " Args:\n", + " text (str): The input text to be perturbed.\n", + "\n", + " Returns:\n", + " str: The perturbed text.\n", + " \"\"\"\n", + " words = text.split()\n", + " if words:\n", + " words[0] = \"REPLACED\"\n", + " return \" \".join(words)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1KEsmNqcuKuk" + }, + "source": [ + "# Using Perturbations\n", + "\n", + "Once you have a perturbation and a dataset, we provide collate functions which automatically apply your perturbation to your dataset and allow batching for more efficient experiments. A standard torch dataloader is all you need but remember that different architectures need different input formats so make sure to use the correct collate functon for your model." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "DjASsJm5r_8i" + }, + "outputs": [], + "source": [ + "data_collator = DotDataCollator(model.tokenizer, my_perturbation)\n", + "dataloader = DataLoader(dataset, batch_size=1, collate_fn=data_collator)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "I1xQgcD4r_8i" + }, + "outputs": [], + "source": [ + "cat_data_collator = CatDataCollator(cat_model.tokenizer, my_perturbation)\n", + "cat_dataloader = DataLoader(dataset, batch_size=1, collate_fn=cat_data_collator)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "cbOf8RGBr_8i", + "outputId": "6fc2f9ca-cd05-4a73-e9dd-4009b0b1e014" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Triplet 1:\n", + "Query: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques [SEP]\n", + "Original Document: [CLS] broadband millimetre wave paramagnetic resonance spectrometer the specimen and waveguide which can be cooled by means of a cryostat are placed between close pole pieces giving high uniform magnetic fields design details and some measurements on zero field splittings are given X X X X X X X X X X X X X X [SEP]\n", + "Perturbed Document: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques broadband millimetre wave paramagnetic resonance spectrometer the specimen and waveguide which can be cooled by means of a cryostat are placed between close pole pieces giving high uniform magnetic fields design details and some measurements on zero field splittings are given [SEP]\n", + "==================================================\n", + "Triplet 1:\n", + "Query: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques [SEP]\n", + "Original Document: [CLS] microwave measurements of dielectric absorption in dilute solutions X X X X X X X X X X X X X X [SEP]\n", + "Perturbed Document: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques microwave measurements of dielectric absorption in dilute solutions [SEP]\n", + "==================================================\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/opt/anaconda3/envs/mechir/lib/python3.12/site-packages/transformers/tokenization_utils_base.py:2855: UserWarning: `max_length` is ignored when `padding`=`True` and there is no truncation strategy. To pad to max length, use `padding='max_length'`.\n", + " warnings.warn(\n" + ] + } + ], + "source": [ + "for i, batch in enumerate(cat_data_collator):\n", + " pretty_print_triplets(batch, cat_model.tokenizer)\n", + "\n", + " # stop after 2 batches\n", + " if i == 1:\n", + " break" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "IuPZDLP4r_8i" + }, + "source": [ + "### Verify Difference in Performance on Perturbed Pairs\n", + "\n", + "Before we can finalize our paired dataset and proceedto the patching experiments, there's a couple things to check:\n", + "- Does the chosen perturbation even have an effect on model behavior?\n", + "- If yes, what is that effect? (i.e., Do the *baseline* or *perturbed* inputs have a higher relevance score on average?)\n", + "\n", + "So first, let's calculate the performances of the three toy perturbations we defined earlier (prepend, append, replace) and plot their distributions." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "GgPaKaKmr_8i" + }, + "outputs": [], + "source": [ + "# Helper function for plotting difference in performance\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib.ticker import MaxNLocator\n", + "def plot_score_dists(baseline_scores, perturbed_scores, type=\"hist\"):\n", + " plt.figure(figsize=(8, 6))\n", + "\n", + " if type == \"hist\":\n", + " plt.hist(baseline_scores, label='Baseline', color='blue')\n", + " plt.hist(perturbed_scores, label='Perturbed', color='orange')\n", + " plt.gca().yaxis.set_major_locator(MaxNLocator(integer=True))\n", + " plt.ylabel('Frequency')\n", + " elif type == \"box\":\n", + " plt.boxplot([baseline_scores, perturbed_scores], labels=['Baseline', 'Perturbed'])\n", + " plt.ylabel('Scores')\n", + "\n", + " plt.xlabel('Scores')\n", + " plt.title('Distribution of Baseline vs Perturbed Scores')\n", + " plt.legend()\n", + "\n", + " plt.show()\n", + "\n", + " return" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "UYvnhDmqr_8j" + }, + "outputs": [], + "source": [ + "baseline_performance, perturbed_performance = [], []\n", + "for i, batch in enumerate(cat_dataloader):\n", + " # Get the queries, documents, and perturbed documents from the batch\n", + " sequences = batch[\"sequences\"]\n", + " perturbed_sequences = batch[\"perturbed_sequences\"]\n", + "\n", + " baseline_scores = model.forward(**sequences) # [batch_size x 1]\n", + " perturbed_scores = model.forward(**perturbed_sequences) # [batch_size x 1]\n", + "\n", + " baseline_performance += baseline_scores.flatten().tolist()\n", + " perturbed_performance += perturbed_scores.flatten().tolist()\n", + "\n", + " # stop after 2 batches\n", + " if i == 1:\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "UE3pic6kr_8j" + }, + "outputs": [], + "source": [ + "plot_score_dists(baseline_performance, perturbed_performance)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "jF7N9Kder_8j" + }, + "outputs": [], + "source": [ + "plot_score_dists(baseline_performance, perturbed_performance, type=\"box\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "ITERS=10\n", + "patching_head_outputs = []\n", + "for i, batch in enumerate(dataloader):\n", + " # Get the queries, documents, and perturbed documents from the batch\n", + " sequences = batch[\"sequences\"]\n", + " perturbed_sequences = batch[\"perturbed_sequences\"]\n", + " \n", + " patch_head_out = cat_model.patch(sequences, perturbed_sequences, patch_type=\"head_all\")\n", + " patching_head_outputs.append(patch_head_out)\n", + " \n", + " if i == ITERS:\n", + " break\n", + "mean_head_outputs = torch.mean(torch.stack(patching_head_outputs), axis=0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plot_components(mean_head_outputs)" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "mechir", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.6" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/notebooks/plot/patch.pdf b/notebooks/plot/patch.pdf index da20b2c9c3fcd119d13c2d2847d3ae23a12e48bd..01947a4191a3025166558dce7170c431985aec82 100644 GIT binary patch delta 19 bcmaENjPczu#tq-&SWHcgEjRy;D`o)zVy6hr delta 19 bcmaENjPczu#tq-&SWFB}j5q&|D`o)zVqplw From b93756fceae6f88f92320224c1735d52c2c2df0d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Tue, 8 Apr 2025 12:08:03 +0200 Subject: [PATCH 13/21] progress --- notebooks/exploration.ipynb | 267 +++++++++--------- pyproject.toml | 1 + src/mechir/data/loader/base.py | 10 +- .../modelling/hooked/HookedDistilBert.py | 4 +- src/mechir/modelling/hooked/HookedEncoder.py | 25 +- .../modelling/hooked/hooked_components.py | 5 +- src/mechir/perturb/__init__.py | 25 +- src/mechir/perturb/axiom/frequency.py | 8 + src/mechir/perturb/axiom/proximity.py | 8 +- 9 files changed, 177 insertions(+), 176 deletions(-) diff --git a/notebooks/exploration.ipynb b/notebooks/exploration.ipynb index 386db82..a690fb8 100644 --- a/notebooks/exploration.ipynb +++ b/notebooks/exploration.ipynb @@ -26,7 +26,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": { "id": "6mLCPXacr_8d", "outputId": "83496403-e207-4d18-e378-2ad133bf57dc" @@ -46,20 +46,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Processing /Users/cchen207/git/brown/research/MechIR\n", - " Preparing metadata (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25hBuilding wheels for collected packages: mechir\n", - " Building wheel for mechir (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for mechir: filename=mechir-0.0.1-py3-none-any.whl size=66580 sha256=4241b7eb8845a4555551762d163cda108fb4b63b267c325fdd617e1d76d6602b\n", - " Stored in directory: /private/var/folders/w8/j9c1qwbx3cn8hf10x5nz_xtr0000gp/T/pip-ephem-wheel-cache-ugqeu0x0/wheels/39/37/7c/9f04c1e8f880bc1e666f79cde17d9e585bcff18fdf2b5a9b0d\n", - "Successfully built mechir\n", - "Installing collected packages: mechir\n", - " Attempting uninstall: mechir\n", - " Found existing installation: mechir 0.0.1\n", - " Uninstalling mechir-0.0.1:\n", - " Successfully uninstalled mechir-0.0.1\n", - "Successfully installed mechir-0.0.1\n", - "Note: you may need to restart the kernel to use updated packages.\n" + "^C\n", + "\u001b[31mERROR: Operation cancelled by user\u001b[0m\u001b[31m\n", + "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n" ] }, { @@ -76,71 +65,6 @@ "name": "stdout", "output_type": "stream", "text": [ - "Requirement already satisfied: transformer_lens in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (2.7.0)\n", - "Requirement already satisfied: accelerate>=0.23.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.34.2)\n", - "Requirement already satisfied: beartype<0.15.0,>=0.14.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.14.1)\n", - "Requirement already satisfied: better-abc<0.0.4,>=0.0.3 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.0.3)\n", - "Requirement already satisfied: datasets>=2.7.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (3.0.1)\n", - "Requirement already satisfied: einops>=0.6.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.8.0)\n", - "Requirement already satisfied: fancy-einsum>=0.0.3 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.0.3)\n", - "Requirement already satisfied: jaxtyping>=0.2.11 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.2.34)\n", - "Requirement already satisfied: numpy>=1.26 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (2.1.1)\n", - "Requirement already satisfied: pandas>=1.1.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (2.2.3)\n", - "Requirement already satisfied: rich>=12.6.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (13.9.1)\n", - "Requirement already satisfied: sentencepiece in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.2.0)\n", - "Requirement already satisfied: torch!=2.0,!=2.1.0,>=1.10 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (2.4.1)\n", - "Requirement already satisfied: tqdm>=4.64.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (4.66.5)\n", - "Requirement already satisfied: transformers>=4.37.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (4.45.1)\n", - "Requirement already satisfied: typing-extensions in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (4.12.2)\n", - "Requirement already satisfied: wandb>=0.13.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformer_lens) (0.18.2)\n", - "Requirement already satisfied: packaging>=20.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (24.1)\n", - "Requirement already satisfied: psutil in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (6.0.0)\n", - "Requirement already satisfied: pyyaml in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (6.0.2)\n", - "Requirement already satisfied: huggingface-hub>=0.21.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (0.25.1)\n", - "Requirement already satisfied: safetensors>=0.4.3 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from accelerate>=0.23.0->transformer_lens) (0.4.5)\n", - "Requirement already satisfied: filelock in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.16.1)\n", - "Requirement already satisfied: pyarrow>=15.0.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (17.0.0)\n", - "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (0.3.8)\n", - "Requirement already satisfied: requests>=2.32.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (2.32.3)\n", - "Requirement already satisfied: xxhash in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.5.0)\n", - "Requirement already satisfied: multiprocess in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (0.70.16)\n", - "Requirement already satisfied: fsspec<=2024.6.1,>=2023.1.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from fsspec[http]<=2024.6.1,>=2023.1.0->datasets>=2.7.1->transformer_lens) (2024.6.1)\n", - "Requirement already satisfied: aiohttp in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from datasets>=2.7.1->transformer_lens) (3.10.8)\n", - "Requirement already satisfied: typeguard==2.13.3 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from jaxtyping>=0.2.11->transformer_lens) (2.13.3)\n", - "Requirement already satisfied: python-dateutil>=2.8.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2.9.0)\n", - "Requirement already satisfied: pytz>=2020.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2024.2)\n", - "Requirement already satisfied: tzdata>=2022.7 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.1.5->transformer_lens) (2024.2)\n", - "Requirement already satisfied: markdown-it-py>=2.2.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from rich>=12.6.0->transformer_lens) (3.0.0)\n", - "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from rich>=12.6.0->transformer_lens) (2.18.0)\n", - "Requirement already satisfied: sympy in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (1.13.3)\n", - "Requirement already satisfied: networkx in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (3.3)\n", - "Requirement already satisfied: jinja2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (3.1.4)\n", - "Requirement already satisfied: setuptools in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (75.1.0)\n", - "Requirement already satisfied: regex!=2019.12.17 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformers>=4.37.2->transformer_lens) (2024.9.11)\n", - "Requirement already satisfied: tokenizers<0.21,>=0.20 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from transformers>=4.37.2->transformer_lens) (0.20.0)\n", - "Requirement already satisfied: click!=8.0.0,>=7.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (8.1.7)\n", - "Requirement already satisfied: docker-pycreds>=0.4.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (0.4.0)\n", - "Requirement already satisfied: gitpython!=3.1.29,>=1.0.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (3.1.43)\n", - "Requirement already satisfied: platformdirs in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (4.3.6)\n", - "Requirement already satisfied: protobuf!=4.21.0,!=5.28.0,<6,>=3.19.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (5.28.2)\n", - "Requirement already satisfied: sentry-sdk>=1.0.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (2.15.0)\n", - "Requirement already satisfied: setproctitle in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from wandb>=0.13.5->transformer_lens) (1.3.3)\n", - "Requirement already satisfied: six>=1.4.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from docker-pycreds>=0.4.0->wandb>=0.13.5->transformer_lens) (1.16.0)\n", - "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (2.4.3)\n", - "Requirement already satisfied: aiosignal>=1.1.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.3.1)\n", - "Requirement already satisfied: attrs>=17.3.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (24.2.0)\n", - "Requirement already satisfied: frozenlist>=1.1.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.4.1)\n", - "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (6.1.0)\n", - "Requirement already satisfied: yarl<2.0,>=1.12.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from aiohttp->datasets>=2.7.1->transformer_lens) (1.13.1)\n", - "Requirement already satisfied: gitdb<5,>=4.0.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (4.0.11)\n", - "Requirement already satisfied: mdurl~=0.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from markdown-it-py>=2.2.0->rich>=12.6.0->transformer_lens) (0.1.2)\n", - "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.3.2)\n", - "Requirement already satisfied: idna<4,>=2.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (3.10)\n", - "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2.2.3)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from requests>=2.32.2->datasets>=2.7.1->transformer_lens) (2024.8.30)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from jinja2->torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (2.1.5)\n", - "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from sympy->torch!=2.0,!=2.1.0,>=1.10->transformer_lens) (1.3.0)\n", - "Requirement already satisfied: smmap<6,>=3.0.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from gitdb<5,>=4.0.1->gitpython!=3.1.29,>=1.0.0->wandb>=0.13.5->transformer_lens) (5.0.1)\n", "Note: you may need to restart the kernel to use updated packages.\n" ] }, @@ -158,36 +82,19 @@ "name": "stdout", "output_type": "stream", "text": [ - "Requirement already satisfied: matplotlib in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (3.9.2)\n", - "Requirement already satisfied: seaborn in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (0.13.2)\n", - "Requirement already satisfied: plotly in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (5.24.1)\n", - "Requirement already satisfied: contourpy>=1.0.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (1.3.0)\n", - "Requirement already satisfied: cycler>=0.10 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (0.12.1)\n", - "Requirement already satisfied: fonttools>=4.22.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (4.54.1)\n", - "Requirement already satisfied: kiwisolver>=1.3.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (1.4.7)\n", - "Requirement already satisfied: numpy>=1.23 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (2.1.1)\n", - "Requirement already satisfied: packaging>=20.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (24.1)\n", - "Requirement already satisfied: pillow>=8 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (11.0.0)\n", - "Requirement already satisfied: pyparsing>=2.3.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (3.2.0)\n", - "Requirement already satisfied: python-dateutil>=2.7 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from matplotlib) (2.9.0)\n", - "Requirement already satisfied: pandas>=1.2 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from seaborn) (2.2.3)\n", - "Requirement already satisfied: tenacity>=6.2.0 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from plotly) (9.0.0)\n", - "Requirement already satisfied: pytz>=2020.1 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.2->seaborn) (2024.2)\n", - "Requirement already satisfied: tzdata>=2022.7 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from pandas>=1.2->seaborn) (2024.2)\n", - "Requirement already satisfied: six>=1.5 in /opt/anaconda3/envs/mechir/lib/python3.12/site-packages (from python-dateutil>=2.7->matplotlib) (1.16.0)\n", "Note: you may need to restart the kernel to use updated packages.\n" ] } ], "source": [ - "%pip install -U -q git+https://github.com/Parry-Parry/MechIR.git\n", + "%pip install -U -q git+https://github.com/Parry-Parry/MechIR.git@sae\n", "%pip install -q transformer_lens\n", "%pip install -q matplotlib seaborn plotly" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "id": "m1zmJUINr_8e", "outputId": "8cb4185e-d3e7-4864-800d-0d59454b22dd" @@ -197,8 +104,9 @@ "name": "stderr", "output_type": "stream", "text": [ - "/opt/anaconda3/envs/mechir/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" + "/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n", + "2025-04-08 12:01:05.170 WARNING streamlit.runtime.scriptrunner_utils.script_run_context: Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n" ] } ], @@ -226,7 +134,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": { "id": "5JNgLgOpr_8e", "outputId": "a654dcb5-0f12-4204-c2d6-33d604fa45b1" @@ -256,11 +164,28 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { "id": "2SvOoLZUr_8f" }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:root:Support for BERT in TransformerLens is currently experimental, until such a time when it has feature parity with HookedTransformer and has been tested on real research tasks. Until then, backward compatibility is not guaranteed. Please see the docs for information on the limitations of the current implementation.\n", + "If using BERT for interpretability research, keep in mind that BERT has some significant architectural differences to GPT. For example, LayerNorms are applied *after* the attention and MLP components, meaning that the last LayerNorm in a block cannot be folded.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Moving model to device: mps\n", + "Loaded pretrained model crystina-z/monoELECTRA_LCE_nneg31 into HookedEncoder\n" + ] + } + ], "source": [ "cat_model_name = \"crystina-z/monoELECTRA_LCE_nneg31\"\n", "cat_model = Cat(cat_model_name)" @@ -279,7 +204,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": { "id": "6s20bffar_8g", "outputId": "2ea72393-f32a-4577-e66a-fa3063801bed" @@ -361,7 +286,7 @@ "4 1 5472 1 0" ] }, - "execution_count": 3, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } @@ -374,7 +299,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": { "id": "KDhYqYk4r_8g", "outputId": "71a93c7d-9de3-462c-f4ba-028b2a0719b6" @@ -408,7 +333,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": { "id": "DPaJz6i7r_8g", "outputId": "bf0f8323-8973-48c3-d409-cba9943cd24c" @@ -458,7 +383,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": { "id": "5mePEZwlr_8h", "outputId": "c86a1970-a6d3-4356-ae45-f6641ba06c3e" @@ -521,7 +446,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": { "id": "FlzOPo-4r_8h" }, @@ -572,13 +497,13 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": { "id": "M7Ht999zr_8h" }, "outputs": [], "source": [ - "@perturbation\n", + "@perturbation(perturb_type=\"replace\")\n", "def my_perturbation(text: str, query: str = None) -> str:\n", " \"\"\"\n", " A simple perturbation function that replaces the first word of the text with 'REPLACED'.\n", @@ -608,31 +533,31 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": { "id": "DjASsJm5r_8i" }, "outputs": [], "source": [ - "data_collator = DotDataCollator(model.tokenizer, my_perturbation)\n", + "data_collator = DotDataCollator(model.tokenizer, my_perturbation, perturb_type=\"replace\")\n", "dataloader = DataLoader(dataset, batch_size=1, collate_fn=data_collator)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": { "id": "I1xQgcD4r_8i" }, "outputs": [], "source": [ - "cat_data_collator = CatDataCollator(cat_model.tokenizer, my_perturbation)\n", + "cat_data_collator = CatDataCollator(cat_model.tokenizer, my_perturbation, perturb_type=\"replace\")\n", "cat_dataloader = DataLoader(dataset, batch_size=1, collate_fn=cat_data_collator)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": { "id": "cbOf8RGBr_8i", "outputId": "6fc2f9ca-cd05-4a73-e9dd-4009b0b1e014" @@ -644,28 +569,20 @@ "text": [ "Triplet 1:\n", "Query: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques [SEP]\n", - "Original Document: [CLS] broadband millimetre wave paramagnetic resonance spectrometer the specimen and waveguide which can be cooled by means of a cryostat are placed between close pole pieces giving high uniform magnetic fields design details and some measurements on zero field splittings are given X X X X X X X X X X X X X X [SEP]\n", - "Perturbed Document: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques broadband millimetre wave paramagnetic resonance spectrometer the specimen and waveguide which can be cooled by means of a cryostat are placed between close pole pieces giving high uniform magnetic fields design details and some measurements on zero field splittings are given [SEP]\n", + "Original Document: [CLS] broadband millimetre wave paramagnetic resonance spectrometer the specimen and waveguide which can be cooled by means of a cryostat are placed between close pole pieces giving high uniform magnetic fields design details and some measurements on zero field splittings are given [SEP]\n", + "Perturbed Document: [CLS] replaced millimetre wave paramagnetic resonance spectrometer the specimen and waveguide which can be cooled by means of a cryostat are placed between close pole pieces giving high uniform magnetic fields design details and some measurements on zero field splittings are given [SEP]\n", "==================================================\n", "Triplet 1:\n", "Query: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques [SEP]\n", - "Original Document: [CLS] microwave measurements of dielectric absorption in dilute solutions X X X X X X X X X X X X X X [SEP]\n", - "Perturbed Document: [CLS] measurement of dielectric constant of liquids by the use of microwave techniques microwave measurements of dielectric absorption in dilute solutions [SEP]\n", + "Original Document: [CLS] microwave measurements of dielectric absorption in dilute solutions [SEP]\n", + "Perturbed Document: [CLS] replaced measurements of dielectric absorption in dilute solutions [SEP]\n", "==================================================\n" ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/opt/anaconda3/envs/mechir/lib/python3.12/site-packages/transformers/tokenization_utils_base.py:2855: UserWarning: `max_length` is ignored when `padding`=`True` and there is no truncation strategy. To pad to max length, use `padding='max_length'`.\n", - " warnings.warn(\n" - ] } ], "source": [ - "for i, batch in enumerate(cat_data_collator):\n", - " pretty_print_triplets(batch, cat_model.tokenizer)\n", + "for i, batch in enumerate(dataloader):\n", + " pretty_print_triplets(batch, model.tokenizer)\n", "\n", " # stop after 2 batches\n", " if i == 1:\n", @@ -689,7 +606,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": { "id": "GgPaKaKmr_8i" }, @@ -721,7 +638,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": { "id": "UYvnhDmqr_8j" }, @@ -746,22 +663,54 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": { "id": "UE3pic6kr_8j" }, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsAAAAIjCAYAAAAN/63DAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQAAS09JREFUeJzt3QucjPX////XsqwzIaySM5EQSkoHkWN9VDpSKFE+6UAiJSUVUUgnHT6hD6X6JJU+KdE5n5xyCClSlGMKOZ/md3u+//9rvrNrrbVmd2b3/bjfbmPNzLXXdc1cMzvPeV2v630lhEKhkAEAAACeyBPrFQAAAACyEwEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARjIQg899JAlJCRky7IuvPBCdwl89tlnbtn/+c9/smX5Xbt2tUqVKlk827Fjh918881Wrlw599zcddddllONHz/ePYZffvnliK8BxB+9Ry655JIsX45eF3p96HUC4HAEYOAYA0dwKVCggJUvX95atWplY8aMsb///jsqy1m3bp0LzgsXLrR4E8/rlhGPPfaY2449e/a0f//733bDDTekG1RSb+/q1avbPffcY3/++We2rrfvgi9zwSVfvnxWpUoV69y5s/38889RXdZrr71mo0ePNp8oLN94441WtWpV9zrXF8Tzzz/fHnzwwVivGpBlErNu1kDu9PDDD1vlypVt//79tmHDBvfhrEriyJEj7b333rO6deuGpx04cKDde++9xxwyBw8e7AJY/fr1M/x7H3/8sWW19NbtpZdeskOHDlk8mzVrlp199tkZ/mDXY7z77rvd//fs2WPz58934ejzzz+3OXPmWLzJjtdALN1xxx125plnuvfeggUL7MUXX7QPPvjAlixZ4r6MRisAf//99zl678CxWLlypXtOCxYsaDfddJN7b69fv949v48//rh7vwO5EQEYOEZt2rSxRo0aha8PGDDABSvt1vzHP/5hy5cvdx8mkpiY6C5ZadeuXVaoUCHLnz+/xZKqcvFu06ZNVrt27QxPf9JJJ9n1118fvq72iSJFitgTTzxhP/30k6sIx5NYvway2nnnnWdXXnml+78qljVq1HCheMKECe59eDx27txphQsXtqx+n8abUaNGudYg7dWpWLHiYe+X7JTV2wCIRAsEEAUXXXSRPfDAA/brr7/axIkT0+0BnjFjhjVt2tRKlCjhwlTNmjXtvvvuc/epmqxqTPABH+zyDfr41N9Zp04dV4nULkp9oAa/e6T+z4MHD7pptFtTHy4K6WvXrk0xjao+6uFNLXKeR1u3tHqA9YGmCmqFChUsKSnJPVaFx1AolGI6zadXr142depU9/g07WmnnWbTp0/P0POvD+pu3bpZ2bJl3S7cevXquVCUehf66tWrXcUwWPfI/tmM0vMokV9sFi9e7B6/dssHu5BVTduyZUuK31WbjCqLep70GMuUKWMXX3yxq7ZF+vbbb61169ZWvHhxt40vuOAC+/rrrzPdB/7mm2/ao48+aieffLJbv+bNm7vKX2qZWe7GjRvdc5FWpXDFihVu+c8884y7rsqtptMXB61HqVKl3HtB74nMvu9E2zXw4YcfuqCs13rRokWtXbt2tnTp0hS/p22l996qVausbdu2brpOnTq5506vD72Pg9dI8JpOq+c68jnWz0B679PIar32MOh50JeyKVOmHPb4tm7d6l4vwfunWrVqriqbek+LptNj0nbT35UuXbq42zJCz4FeF6nDr+j1mZqeX70u9JwVK1bM/U1Q1TzSW2+9ZQ0bNnSFgNKlS7svkb///nuGtoHo8WlPi/4G6PnR+/qWW26xv/76K8U85s2b51rQtAwtS3vm9L4DMoIKMBAl6ifVh5w+2Lp3757mNPogVqVYbRJqpdCHmoJIEDJq1arlbh80aJD16NHDfZDLOeecE56HQpWq0Ndee637YNGHQ3oUfPQB3b9/fxcU9cHSokULV/EJKtUZkZF1i6SQq7D96aefunCqD/uPPvrI9dDqw1CVp0hfffWVCwH//Oc/3Yeh+qo7dOhga9ascUHpSHbv3u0Ch55HhWh9COoDWB+wCgF33nmnW3f1/Pbu3dt92AdtDSeeeGK6j1mB7Y8//gi3QHz33Xeu1UWhRssJKMCpF1VfDBR+tZ21e14///e//4W/BN16663uoEStp0KPtqUet/YaNGjQwE2jvQnavgoQatXIkyePjRs3zoW9L7/80s466yw7VsOGDXPz6du3r23bts2GDx/uwoYCbyCzy9XrT4FIITt1a8kbb7xhefPmtauuuir8hXDo0KGukq75bd++3YUYfQHQF4FjpfAkwetD21jhT6FIQVFV1+eff96FbG27yC9oBw4ccNPpPn0pU0jVttPz89tvv4VfnwppmZHe+1R7D6655hr3etD66nnWc6QvfMHzoHXX86r3isLfKaecYt98842rdKtFIehT1vusffv27nWk+em1/s4777j5ZoSC7yeffOK2f/CF4kj0JUABU8FU66GwredV692xY8fwNHofKBhrW+sL0lNPPeX+xmla/U5620D0eIP5qMKvLzj6EqXf13y0t0l/y1q2bOnew2oz03z15SStLxJAmkIAMmTcuHEqW4bmzp17xGmKFy8eOuOMM8LXH3zwQfc7gVGjRrnrmzdvPuI8NH9No+WldsEFF7j7xo4dm+Z9ugQ+/fRTN+1JJ50U2r59e/j2N998093+1FNPhW+rWLFiqEuXLkedZ3rrpt/XfAJTp0510z7yyCMpprvyyitDCQkJoZUrV4Zv03T58+dPcduiRYvc7U8//XQoPaNHj3bTTZw4MXzbvn37Qk2aNAkVKVIkxWPX+rVr1y7d+UVOq/mmvpx77rmhP/74I8W0u3btOuz3X3/9dTf9F198keL1cdtttx1xmYcOHQpVr1491KpVK/f/yPlXrlw5dPHFFx/2ely9evVRXwO1atUK7d27N3y7tr1uX7JkyTEvNy0vvPBCivkFateuHbrooovC1+vVq5fh5z9S8DheeeUV995Zt25d6IMPPghVqlTJvZb0uvz7779DJUqUCHXv3j3F727YsME975G367Wq+d17772HLUvrF/k6Tu/5jlw3/czI+zR4Xb399tvh27Zt2xZKTk5O8bdjyJAhocKFC4d+/PHHFL+vdc6bN29ozZo1Kd5nw4cPD09z4MCB0HnnnXfE92qk77//PlSwYEE3bf369UN33nmnm+fOnTtTTLd169ZQ0aJFQ40bNw7t3r07xX3Ba0bvuzJlyoTq1KmTYppp06a5+Q8aNOio2+DLL790t0+aNCnF7dOnT09x+zvvvHPUv8dAemiBAKJI1aL0RoMIqh/vvvtupg8YU9VYlZGM0pHyqqgG1EOZnJxs//3vfy0raf6q/qmCE0nVV2Ve7UqNpKq0jkIPqEquXaxHO8pfy1Hl7rrrrgvfpgqRlqveRh2wllmNGzd21V1dpk2b5qrpquqqsq3KcyCykq5KsarGOthOItsbtP1VddXBhGlRVV7VQVXTVEHUfHRRK4naFr744otMvW70eonsDw6q98Fze7zLveKKK1wbhCq+AR1ItmzZMlfpjHz8ev60rMxQ9VEVPx3wptYGrZ9aXdSTr22kir9eB8H666LXoLaj9kSkptFAskp671Ot/+WXXx6+rte53qeqcOrAWtFeDG2nE044IcXj0ftEbU3aJsHrX8995GPRY7799tsztJ6q5mr7q0qtCqqqtZdddpmrWOvA1oCeX/1tU7VVbQmRgj0cquarMqu9OJHTaFudeuqprr3kaNtAj1utHKqERz5u7ZnQ39dgOwZ/S/W+1J4a4FjRAgFEkQJXWn1zAYWBl19+2e0C1geJwoXCg0Kpdjln9MCsYznYKfWBWvqwUi9hZvpfj4X6KPVBHxm+Rbtog/sjaRdvavrwT933l9Zy9BhTP39HWs6xUG+hAkfkB7n6mLW9tB2DkKFh0dTbOnny5MMOHNIu9YBaD7RrWj2d+kBX76OCj3qHJQiG6e2+1vz0vByL1M9t8PvBc3u8y9XzpNey2iCGDBniblMYVjDT6zugFhrtrtfBa+qRVb+xWociR05Jj9pvFAoV8LRMbeOgFzt4DEfaja+QGUm/p3aYrJLe+1Tvv9THBug5Eb0v9YVOj0e95Udq0wleZ3p96wtt6lYNvU4zSstW+4iCtb60KFTqtapWJ7X66D0QtJtoux1J8F5La9kKwGrTONo20OPWa+1If0eDx632ELVI6X2ndhW1QSm460ucvnwAR0MABqJEfYP6w60PtyNRpVCVG1UxVA1R75yCgj601TusD/ajOZa+3Yw60sk69IGYkXWKhiMtJ/UBc7GmoCfajkEAvvrqq11/pvqb1eusMKKKqQJeZOVU0ynAqUdT23vEiBGuV1V9i+oXDabV7UcaAi8zPalHe26jsVz1uqriqWqi5qEwrOdKQTWg3mkFKe0B0ePXlwiFl7Fjx7ovhUdz+umnp/hCEil4DApywYGKkVKPxqKQlNEvnUd7j2TF+1SPR1XQfv36pXl/EJijSa8TPce6NGnSxJo1a2aTJk064nN+vNLaBnrcCr9ablqCLwTBSX7UY//++++74wu0h+DJJ590t2W2dxv+IAADUaIPXtFBHenRH3wFA110QJVOznD//fe7UKwPmmifOS717maFHh0wFll1U2UvraPGVdEJqpNyLOsWHFyj3aaRVeAffvghfH80aD6qlOmDM/LDNNrLiTxwJ6j2B1XUmTNnukqUKpSBI+3mV7VOu4h1UTVLB7+ptUIBOGgBUbUyq0JHWqKxXFXfdPBS0Abx448/pjk0WcmSJV1Q1kXPoUKxDo7LSADOyGNQeDqe5+5Ir/Gg+p36fZKZPQx6/+l9GLksPV8SHKinx6Pn52iPRa9vvf40bWTo0wgcxyMY6lEH3AXrE7S2HOlLfvBe07JTV+J1W0bei1qO/m6ce+65GfoSoVYjXfQe0mgUOrhTe2KO9/WE3I8eYCAKdAS1dv1qd2EwlE9a0jqDWFBx27t3r/sZjIOZ0WGMjubVV19N0Zesqok+1BS4Ij90VDXZt29f+DbtBk09XNqxrJt276s6FgyBFVDFTx/8kcs/HlqO+iYj+08VUp9++mkXCLSrNJpUbRINtRZZXU1dqU59NjE9F5HtEEFYU5tIsO3VFqFtoSPig4AdafPmzZYVorFc9WTqy58qvwog2v2vUBwp9bBw2j4KU8HjPx5atgK8vlCm1ROa0edOr/HU2ykyAAa9t8E21Wgfx0o94NoLENBoGHqf6m9BUL3W3oLZs2e7ymZqev8FX8T0+tf/NdpF5Hrp9Z8RGuEjrecrOEYgaGfQiAv6IquRHdTnHil47Ss06zWtin7kNlW/v0Y6UQvR0ehxa/2DVppIepzB3x598Uz9nkv9txRIDxVg4Bjpj7mqi/pjrCF+FH51gIiqGzoTXOoDRCKpB1IfoPog0PSqAD733HOuD05DAQUftAoT+hDRB44+kHUQT+SwW8dCFTfNWxU3ra+CmUJH5FBtqpYoGGuXvT6AtJta4xlHHpR2rOt26aWXul2oqm6rr1GBUbu9tftbY5umnndmqU/xhRdecMOeadxVVdD0WDRckh5r6h7kY6EhqIJxnfXlYNGiRW5Z2q0ftD8odKmKqZ5JBQn1fupxRo5NK/oSou2s/mE9Fwp/qnTNnTvX7bYVVbDVFqAvBzo4SdtM89N6aA+BlhUE8GiK1nLV466DqfSaViCNHPJKNPSbejUVuPW61EFTwbBwx0vrqBConmJV1dWSod3lGkZP7UaqKKb+MpYWrZu+TPXp08cN5aXtpNeynhdVGlXV1hdZrb+CfhBEj4XaFzQ0oLa9DjZ75ZVX3HtTw6EF1E6jvycaNlGvba2XDvrTWe/0nOk9pdeh1k2PTccU6LZgTOG0Qnxa1IKj9416tYO9QjpwU4FcjzE4I56eX3151d8KPS/qtVVVXO8JDdmmgxF18Knmp9ePvnjqgMRgGDS9LzUM4dHo97QnQUFb7TQK3pqv9qjoADnNS+8hLU+vMx1MqL8len/poD2tp74UAEeV7hgRAA4bBim4aNiucuXKuSGiNKxU5HBbRxoGbebMmaH27duHypcv735fP6+77rrDhjp699133RBSiYmJKYYy0vBKp512Wprrd6QhsDQc14ABA9zwRBruSMM8/frrr4f9/pNPPumGTEtKSnJDfc2bN++weaa3bqmHQRMNTdW7d2/3OPPly+eG2hoxYkSKobZE80lreLAjDc+W2saNG0M33nhjqHTp0u55Pf3009Mc/ul4hkHLkyePew61vSKHa5PffvstdPnll7thuDTk1lVXXeWG6tLv6TUgGobsnnvucUOBaTgpDXGl/z/33HOHLfu7774LXXHFFaFSpUq57aF1ufrqq93rJzPDoL311lsp5q/fSWuIrIwsNz16DwRDakUOSxfQkHhnnXWWe5403amnnhp69NFH3fBZ6TnS4zjStBrOTduhQIECoapVq4a6du3qXs8Bvab0/Kdlx44doY4dO7p11DIjX9OrVq0KtWjRwj03ZcuWDd13332hGTNmpDkM2pHep8Fr8KOPPgrVrVvXzUvPQ1qPTe8fvXerVavmXtd6fZ9zzjmhJ554IsVztmXLltANN9wQKlasmHvc+r+2ZUaGQfv666/de09Dl+l39T495ZRT3HOmx5vae++959ZB20/L0/bU35hIb7zxhhvSTY+tZMmSoU6dOrn3SKT0toG8+OKLoYYNG7rl6P2i93S/fv3c+0oWLFjg3otaVy1H781LLrkkxXYG0pOgf44ekwEAAIDcgR5gAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AonwsgAnWJVZ+7RgPrRPk0tAAAAjp9G9tVJUXSGTZ3gJz0E4AxQ+K1QoUKsVwMAAABHsXbtWnfmzfQQgDMgOJWqnlCdZhEAAADxZfv27a5gGeS29BCAMyBoe1D4JQADAADEr4y0q3IQHAAAALxCAAYAAIBXCMAAAADwCj3AURx648CBA3bw4MFYrwqiIG/evJaYmMiwdwAA5EIE4CjYt2+frV+/3nbt2hXrVUEUFSpUyJKTky1//vyxXhUAABBFBOAonCRj9erVrmKogZcVlqga5vxqvr7UbN682W3b6tWrH3VAbQAAkHMQgI+TgpJCsMadU8UQuUPBggUtX7589uuvv7ptXKBAgVivEgAAiBLKWlFChTD3YZsCAJA78QkPAAAArxCAAQAA4BUCcBbSsXDZecmJKlWqZKNHjw5f1wGEU6dOjek6AQCA3I0A7LGuXbu6wBlcSpUqZa1bt7bFixfHbJ00nFybNm1itnwAAJD7EYA9p8Cr0KnLzJkz3ckfLrnkkpitT7ly5SwpKSlmywcAALkfAdhzCpsKnbrUr1/f7r33Xlu7dq0bA1f69+9vNWrUcEO8ValSxR544AHbv39/+PcXLVpkzZo1s6JFi1qxYsWsYcOGNm/evPD9X331lZ133nluWDENFXfHHXfYzp07j7g+kS0Qv/zyi7s+ZcoUtwytQ7169Wz27NkpfudYlwEAAPxGAEbYjh07bOLEiVatWjXXDiEKtuPHj7dly5bZU089ZS+99JKNGjUq/DudOnWyk08+2ebOnWvz5893AVrj58qqVatchblDhw6ureKNN95wYbVXr17HtF7333+/9e3b1xYuXOjC+HXXXedOOx3NZQAAAI+EcFTbtm0L6anSz9R2794dWrZsmfuZmp7d7Lwcqy5duoTy5s0bKly4sLvoMSYnJ4fmz59/xN8ZMWJEqGHDhuHrRYsWDY0fPz7Nabt16xbq0aNHitu+/PLLUJ48ecLPV8WKFUOjRo2KeM4s9M4777j/r1692l1/+eWXw/cvXbrU3bZ8+fIMLyOz0tu2AAAg5+S11KgAe06tBaqs6jJnzhxr1aqVOwhNZ0ATVVTPPfdc1yJRpEgRGzhwoK1Zsyb8+3369LGbb77ZWrRoYcOGDXMV2cj2CFWP9XvBRfMPTh+dUXXr1g3/Pzk52f3ctGlTVJcBAAD8QQD2XOHChV3Lgy5nnnmmvfzyy65/Vq0O6rVVi0Pbtm1t2rRp9t1337l2BJ0aOPDQQw/Z0qVLrV27djZr1iyrXbu2vfPOO+GWiltuuSUcsHVRYP3pp5+satWqGV7HoKVC1BMsCrjRXAYAAPBHYqxXAPFFAVOnAN69e7d98803VrFiRRd6A0FlOJL6cnXp3bu3688dN26cXX755dagQQPXO6xwnVWyYxkAACB3oQLsub1799qGDRvcZfny5Xb77be7quqll15q1atXd+0OkydPdq0NY8aMCVd3RSFZB5t99tlnLhh//fXX7mC4WrVqhUeQUIjWNKrMqir77rvvRvUAtexYBgAAueGEWZyg6/9QAc5C7pCuODd9+vRwX61GfDj11FPtrbfesgsvvNDdpqquwqSCstocNAya2h4kb968tmXLFuvcubNt3LjRSpcubVdccYUNHjw43Lv7+eefuwqyhinTMW5qS7jmmmuitv7ZsQwAALJKaFKcJ8Xj8ZqZdYzPMJSgI+FivRLxbvv27Va8eHHbtm2bG+s20p49e9zBVpUrV7YCBQrEbB0RfWxbAEBWUpU0Vwdgy94AnF5eS40WCAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIVTIWel17L57C5xerrB49G1a1fbunWrTZ06Narz/eyzz6xZs2b2119/WYkSJaI6bwAAEN+oAHtM4TIhIcFd8ufPb9WqVbOHH37YDhw4cFzzvOyyy6K6ngAAANFEBdhzrVu3tnHjxtnevXvtv//9r912222WL18+GzBgwDHN5+DBgy5IR0u05wcAABCgAuy5pKQkK1eunFWsWNF69uxpLVq0sPfee88F4r59+9pJJ51khQsXtsaNG7u2gcD48eNd64CmrV27tpvPTTfdZBMmTLB33303XFnW7+ii/6uVIbBw4UJ32y+//HLE+a1ZsyY8/eDBg+3EE0+0YsWK2a233mr79u0L33fo0CEbOnSoVa5c2QoWLGj16tWz//znPykep8J9jRo13P1qfQiWCwAA/EMFGCkoIG7ZssV69eply5Yts8mTJ1v58uXtnXfecdXiJUuWWPXq1d20u3btsscff9xefvllK1WqlCUnJ9vu3btt+/btrqosJUuWtG+++SZDy049vzJlyrjbZ86caQUKFHBBWsH1xhtvdPc/+uij7n6F34kTJ9rYsWPdun3xxRd2/fXXu8B8wQUX2Nq1a+2KK65w1e0ePXrYvHnz7O67786y5xAAAMQ3AjCcUCjkguZHH31k1113nQuwqsAq/IqqwdOnT3e3P/bYY+62/fv323PPPecqrpEBWtVjVZWPVVrzE/Unv/LKK1aoUCE77bTTXJ/yPffcY0OGDHG/o/X55JNPrEmTJm76KlWq2FdffWUvvPCCC8DPP/+8Va1a1Z588kl3f82aNV2QV9gGAAD+IQB7btq0aVakSBEXJNVK0LFjR7vyyitdS4JaBiIp2KryGhlM69atG7V1OdL8FIgVfgMKujt27HCVXf1U5fjiiy9O8TtqkTjjjDPc/5cvX+5aOCIFYRkAAPiHAOw59cOqQqrwqWpvYmKivfHGG5Y3b16bP3+++xlJYTmy2puRA9Xy5MkTrjIHFLhTy+j8IikAywcffOD6lSOpjxgAACA1ArDndICbhj+LpMqpRmHYtGmTnXfeecc0PwVp/W4k9eLK+vXr7YQTTggfBJdRixYtcr3FCsjyv//9zwXxChUquB7j4IA5tTukpVatWu7gukiaBwAA8BOjQOAwan3o1KmTde7c2aZMmWKrV6+2OXPmuIPNVGlNT6VKlWzx4sW2YsUK++OPP1ylVwFbYfWhhx6yn376yc0j6MfNCLUzdOvWzR2Up9EcHnzwQXeQnirLRYsWdf3JvXv3diNQrFq1yhYsWGBPP/20uy4aNULLVd+w1uu1115zLR4AAMBPVICzUg4+M5sOdnvkkUfcaAm///67lS5d2s4++2y75JJL0v297t27u9EaGjVq5NoTPv30U7vwwgvt9ddfd8Osqcf3zDPPdPO+6qqrMrQuzZs3d6M7nH/++a4PWQfpKUwHdDCcqswK6D///LMbTq1BgwZ23333uftPOeUUe/vtt11IVjA+66yz3IFzGrYNAAD4JyEU2ZiZzTRc1YgRI1yvqXaPa6ityLOIadVU7XvppZfcGLLnnnuu61cNhuEKKo6//vprivkqCN17773h66pIagisuXPnuqB0++23W79+/TK8nhrWq3jx4rZt2zY3Dm2kPXv2uAqpxqDVUF3IPdi2AICspMNeQpNy+UmfOmZfzEwvr8VVC8TOnTvdEf7PPvtsmvcPHz7cxowZ48Z3/fbbb12/aqtWrVwwiaRhsRSgg4sCbuST0bJlS3eiBwVtBW5VD1988cUsf3wAAACIPzFtgWjTpo27pEXV39GjR9vAgQOtffv27rZXX33VypYta1OnTrVrr702PK36QI807uykSZNcD6nGkdUBWhpHVgdgjRw50p0UAQAAAH6J24PgtOt5w4YN7tS8AZW1NZ7r7NmzU0w7bNgwNz6tRi9QhffAgQPh+zStekcVfgOqIutgqL/++ivNZavPVJXjyAsAAAByh7g9CE7hV1TxjaTrwX1yxx13uAOeglPuDhgwwLVBqMIbzEc9nKnnEdwXDMuVuod48ODBWfK4AAAAEFtxG4Azqk+fPuH/a4QBVXpvueUWF2IzeyIEhejI+aoCrGG80hPDYwmRRdimAADkTnHbAhH09G7cuDHF7bp+pH5fUYuEWiB++eWX8HzSmkfkMlJTcNbRg5GXI8mXL5/7qdPxIncJtmmwjQEAQO4QtxVgtS0ooM6cOdPq168frsRqNAiNJ3skOsBNJ0goU6aMu96kSRO7//773QkZgiAzY8YMq1mzZprtD8dKpwrWuLM6a5oUKlTomE/ni/ir/Cr8aptq26Y+HTQAAMjZYhqAdaKElStXpjjwTQFW/bw6ecFdd93lTpigcX8ViB944AErX758eKxgHeCmQNysWTM3EoSu62QH119/fTjcduzY0fXz6kxi/fv3t++//96eeuopGzVqVNQeR1BJDkIwcgeF3/T2NgAAgJwppgF43rx5LrwGgr7bLl26uFPV6mQVGitYw5XpRBhNmza16dOnh09KoFaFyZMnu3F9NXKDQrICcGT/rkaO+Pjjj92JMBo2bOjOaDZo0KCoDoGmim9ycrKrOqvSjJxPewuo/AIAkDvF9ExwOcWxnFkEAAAgIzgTnKdnggMAAACyGwEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBX4j4A//3333bXXXdZxYoVrWDBgnbOOefY3Llzw/eHQiEbNGiQJScnu/tbtGhhP/30U4p5/Pnnn9apUycrVqyYlShRwrp162Y7duyIwaMBAABArMV9AL755pttxowZ9u9//9uWLFliLVu2dCH3999/d/cPHz7cxowZY2PHjrVvv/3WChcubK1atbI9e/aE56Hwu3TpUjefadOm2RdffGE9evSI4aMCAABArCSEVEKNU7t377aiRYvau+++a+3atQvf3rBhQ2vTpo0NGTLEypcvb3fffbf17dvX3bdt2zYrW7asjR8/3q699lpbvny51a5d21WNGzVq5KaZPn26tW3b1n777Tf3+0ezfft2K168uJu3qsgAAADHKyHBLDQpwXK1jtkXM48lr8V1BfjAgQN28OBBK1CgQIrb1erw1Vdf2erVq23Dhg2uIhzQA2/cuLHNnj3bXddPtT0E4Vc0fZ48eVzFOC179+51T2LkBQAAALlDXAdgVX+bNGniKr3r1q1zYXjixIku1K5fv96FX1HFN5KuB/fpZ5kyZVLcn5iYaCVLlgxPk9rQoUNdkA4uFSpUyLLHCAAAgOwV1wFY1PurLo2TTjrJkpKSXL/vdddd5yq4WWXAgAGufB5c1q5dm2XLAgAAQPaK+wBctWpV+/zzz92oDQqic+bMsf3791uVKlWsXLlybpqNGzem+B1dD+7Tz02bNh3WWqGRIYJpUlPQVu9I5AUAAAC5Q9wH4IBGd9BQZ3/99Zd99NFH1r59e6tcubILsTNnzgxPp35d9faqdUL0c+vWrTZ//vzwNLNmzbJDhw65XmEAAAD4JdHinMKuWiBq1qxpK1eutHvuucdOPfVUu/HGGy0hIcGNEfzII49Y9erVXSB+4IEH3MgOl112mfv9WrVqWevWra179+5uqDRVj3v16uVGiMjICBAAAADIXeI+AKsHVz25GrJMB6516NDBHn30UcuXL5+7v1+/frZz5043rq8qvU2bNnXDnEWOHDFp0iQXeps3b+56hzUP9RIDAADAP3E9DnC8YBxgAAAQbYwDHF25ZhxgAAAAINoIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvBL3AbhSpUqWkJBw2OW2225z91944YWH3XfrrbemmMeaNWusXbt2VqhQIStTpozdc889duDAgRg9IgAAAMRSosW5uXPn2sGDB8PXv//+e7v44ovtqquuCt/WvXt3e/jhh8PXFXQD+l2F33Llytk333xj69evt86dO1u+fPnssccey8ZHAgAAgHgQ9wH4xBNPTHF92LBhVrVqVbvgggtSBF4F3LR8/PHHtmzZMvvkk0+sbNmyVr9+fRsyZIj179/fHnroIcufP3+WPwYAAADEj7hvgYi0b98+mzhxot10002u1SEwadIkK126tNWpU8cGDBhgu3btCt83e/ZsO/300134DbRq1cq2b99uS5cuTXM5e/fudfdHXgAAAJA7xH0FONLUqVNt69at1rVr1/BtHTt2tIoVK1r58uVt8eLFrrK7YsUKmzJlirt/w4YNKcKvBNd1X1qGDh1qgwcPztLHAgAAgNjIUQH4X//6l7Vp08aF3UCPHj3C/1elNzk52Zo3b26rVq1yrRKZoSpynz59wtdVAa5QocJxrj0AAADiQY4JwL/++qvr4w0qu0fSuHFj93PlypUuAKs3eM6cOSmm2bhxo/t5pL7hpKQkdwEAAEDuk2N6gMeNG+eGMNOIDulZuHCh+6lKsDRp0sSWLFlimzZtCk8zY8YMK1asmNWuXTuL1xoAAADxJkdUgA8dOuQCcJcuXSwx8f9WWW0Or732mrVt29ZKlSrleoB79+5t559/vtWtW9dN07JlSxd0b7jhBhs+fLjr+x04cKAbR5gqLwAAgH9yRABW64NOZqHRHyJpCDPdN3r0aNu5c6fr0+3QoYMLuIG8efPatGnTrGfPnq4aXLhwYRekI8cNBgAAgD8SQqFQKNYrEe90EFzx4sVt27ZtrnUCAADgeGlE19Ck/xvWNVfqGIrLvJZjeoABAACAaCAAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwSqYC8M8//xz9NQEAAADiNQBXq1bNmjVrZhMnTrQ9e/ZEf60AAACAeArACxYssLp161qfPn2sXLlydsstt9icOXOiv3YAAABAPATg+vXr21NPPWXr1q2zV155xdavX29Nmza1OnXq2MiRI23z5s3RXk8AAAAg9gfBJSYm2hVXXGFvvfWWPf7447Zy5Urr27evVahQwTp37uyCMQAAAJBrAvC8efPsn//8pyUnJ7vKr8LvqlWrbMaMGa463L59++itKQAAABAFiZn5JYXdcePG2YoVK6xt27b26quvup958vx/ebpy5co2fvx4q1SpUjTWEQAAAIhtAH7++eftpptusq5du7rqb1rKlClj//rXv453/QAAAIDYB+CffvrpqNPkz5/funTpkpnZAwAAAPHVA6z2Bx34lppumzBhQjTWCwAAAIifADx06FArXbp0mm0Pjz32WDTWCwAAAIifALxmzRp3oFtqFStWdPcBAAAAuSoAq9K7ePHiw25ftGiRlSpVKhrrBQAAAMRPAL7uuuvsjjvusE8//dQOHjzoLrNmzbI777zTrr322uivJQAAABDLUSCGDBliv/zyizVv3tydDU4OHTrkzv5GDzAAAADiWUIoFApl9pd//PFH1/ZQsGBBO/30010PcG60fft2K168uG3bts2KFSsW69UBAAC5QEKCWWhSguVqHTMdM7M0r2WqAhyoUaOGuwAAAAA5RaYCsHp+darjmTNn2qZNm1z7QyT1AwMAAAC5JgDrYDcF4Hbt2lmdOnUsQTV8AAAAILcG4MmTJ9ubb75pbdu2jf4aAQAAAPE2DFr+/PmtWrVq0V8bAAAAIB4D8N13321PPfWUHccAEgAAAEDOaYH46quv3EkwPvzwQzvttNMsX758Ke6fMmVKtNYPAAAAiH0ALlGihF1++eXRXRMAAAAgXgPwuHHjLLv8/vvv1r9/f1dt3rVrl+s91vIbNWrk7lcbxoMPPmgvvfSSbd261c4991x7/vnnrXr16uF5/Pnnn3b77bfb+++/b3ny5LEOHTq4Fo4iRYpk2+MAAABADu4BlgMHDtgnn3xiL7zwgv3999/utnXr1tmOHTuitnJ//fWXC7RqsVAAXrZsmT355JN2wgknhKcZPny4jRkzxsaOHWvffvutFS5c2Fq1amV79uwJT9OpUydbunSpzZgxw6ZNm2ZffPGF9ejRI2rrCQAAgFx+KuRff/3VWrdubWvWrLG9e/e6UyJXqVLFjQ+s6wqj0XDvvffa119/bV9++WWa92vVy5cv7w7K69u3r7tNp78rW7asG6f42muvteXLl1vt2rVt7ty54arx9OnT3RBuv/32m/v9o+FUyAAAINo4FXJ0HUtey1QFWEFXYVIV2oIFC4ZvV1+wzg4XLe+9955bzlVXXWVlypSxM844w7U6BFavXm0bNmywFi1ahG/TA2/cuLHNnj3bXddP9SwH4Vc0vVohVDFOi0K8nsTICwAAAHKHTAVgVWQHDhzoxgOOVKlSJdezGy0///xzuJ/3o48+sp49e9odd9xhEyZMcPcr/IoqvpF0PbhPPxWeIyUmJlrJkiXD06Q2dOhQF6SDS4UKFaL2mAAAAJADA/ChQ4fs4MGDh92uloKiRYtGY73Cy2nQoIE99thjrvqrvt3u3btHrcXiSAYMGODK58Fl7dq1Wbo8AAAAxHkAbtmypY0ePTp8PSEhwR38ptEYonl65OTkZNe/G6lWrVqu91jKlSvnfm7cuDHFNLoe3KefmzZtOuwAPo0MEUyTWlJSkusdibwAAADA4wCskRh0cJrCqUZb6NixY7j94fHHH4/aymkEiBUrVqS4TQfcVaxY0f2/cuXKLsRG9h2rX1e9vU2aNHHX9VPDo82fPz88zaxZs1x1Wb3CAAAA8EumxgE++eSTbdGiRTZ58mRbvHixq/5269bNDTcWeVDc8erdu7edc845rgXi6quvtjlz5tiLL77oLkHl+a677rJHHnnE9QkrED/wwANuZIfLLrssXDHWiBVB68T+/futV69eboSIjIwAAQAAgNwlU8OgZSeN26ue3J9++skF3D59+rgwGwhOhKFQrEpv06ZN7bnnnrMaNWqEp1G7g0Jv5IkwNHZwRk+EwTBoAAAg2hgGLbqOJa9lKgC/+uqr6d7fuXNny00IwAAAINoIwLHLa4mZHQc4ktoKdJpiDYtWqFChXBeAAQAA4PlBcDoBRuRFPcA6WE3tB6+//nr01xIAAACIZQBOiw5CGzZs2GHVYQAAACBXBuDgDGvr1q2L5iwBAACAqMpUD/B7772X4rqOo1u/fr0988wzbuxeAAAAIFcF4GCM3YDG4z3xxBPtoosucifJAAAAAHJVANZZ1AAAAADzvQcYAAAAyJUVYJ2NLaNGjhyZmUUAAAAA8ROAv/vuO3fRCTBq1qzpbvvxxx8tb9681qBBgxS9wQAAAECOD8CXXnqpFS1a1CZMmGAnnHCCu00nxLjxxhvtvPPOs7vvvjva6wkAAABERUJIY5gdo5NOOsk+/vhjO+2001Lc/v3331vLli1z3VjAx3JuaQAAgIzQjvLQpFy+t7zjMcfMbMlreTK7gM2bNx92u277+++/MzNLAAAAIFtkKgBffvnlrt1hypQp9ttvv7nL22+/bd26dbMrrrgi+msJAAAAxLIHeOzYsda3b1/r2LGjOxDOzSgx0QXgESNGRGvdAAAAgPjoAQ7s3LnTVq1a5f5ftWpVK1y4sOVG9AADAIBoowc4h/UAB9avX+8u1atXd+H3OLI0AAAAkC0yFYC3bNlizZs3txo1aljbtm1dCBa1QDAEGgAAAHJdAO7du7fly5fP1qxZY4UKFQrffs0119j06dOjuX4AAABA7A+C0xjAH330kZ188skpblcrxK+//hqtdQMAAADiowKsg98iK7+BP//805KSkqKxXgAAAED8BGCd7vjVV18NX09ISLBDhw7Z8OHDrVmzZtFcPwAAACD2LRAKujoIbt68ebZv3z7r16+fLV261FWAv/766+iuIQAAABDrCnCdOnXsxx9/tKZNm1r79u1dS4TOAPfdd9+58YABAACAXFMB1pnfWrdu7c4Gd//992fNWgEAAADxUgHW8GeLFy/OmrUBAAAA4rEF4vrrr7d//etf0V8bAAAAIB4Pgjtw4IC98sor9sknn1jDhg3daZAjjRw5MlrrBwAAAMQuAP/8889WqVIl+/77761BgwbuNh0MF0lDogEAAAC5IgDrTG/r16+3Tz/9NHzq4zFjxljZsmWzav0AAACA2PUAh0KhFNc//PBDNwQaAAAAkKsPgjtSIAYAAAByVQBWf2/qHl96fgEAAJBre4BV8e3ataslJSW563v27LFbb731sFEgpkyZEt21BAAAAGIRgLt06XLYeMAAAABArg3A48aNy7o1AQAAAOL9IDgAAAAgpyEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK/kqAA8bNgwS0hIsLvuuit824UXXuhui7zceuutKX5vzZo11q5dOytUqJCVKVPG7rnnHjtw4EAMHgEAAABiLdFyiLlz59oLL7xgdevWPey+7t2728MPPxy+rqAbOHjwoAu/5cqVs2+++cbWr19vnTt3tnz58tljjz2WbesPAACA+JAjKsA7duywTp062UsvvWQnnHDCYfcr8CrgBpdixYqF7/v4449t2bJlNnHiRKtfv761adPGhgwZYs8++6zt27cvmx8JAAAAYi1HBODbbrvNVXFbtGiR5v2TJk2y0qVLW506dWzAgAG2a9eu8H2zZ8+2008/3cqWLRu+rVWrVrZ9+3ZbunRpmvPbu3evuz/yAgAAgNwh7lsgJk+ebAsWLHAtEGnp2LGjVaxY0cqXL2+LFy+2/v3724oVK2zKlCnu/g0bNqQIvxJc131pGTp0qA0ePDjqjwUAAACxF9cBeO3atXbnnXfajBkzrECBAmlO06NHj/D/VelNTk625s2b26pVq6xq1aqZWq6qyH369AlfVwW4QoUKmZoXAAAA4ktct0DMnz/fNm3aZA0aNLDExER3+fzzz23MmDHu/zrALbXGjRu7nytXrnQ/1RO8cePGFNME13VfWpKSklwfceQFAAAAuUNcB2BVcpcsWWILFy4MXxo1auQOiNP/8+bNe9jv6HZRJViaNGni5qEgHVBFWaG2du3a2fhoAAAAEA/iugWiaNGi7sC2SIULF7ZSpUq529Xm8Nprr1nbtm3dbeoB7t27t51//vnh4dJatmzpgu4NN9xgw4cPd32/AwcOdAfWqdILAAAAv8R1AD6a/Pnz2yeffGKjR4+2nTt3uj7dDh06uIAbUJV42rRp1rNnT1cNVoDu0qVLinGDAQAA4I+EUCgUivVKxDsdBFe8eHHbtm0b/cAAACAqEhLMQpMSLFfrGIrLvBbXPcAAAABAtBGAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXsnR4wADAIBc7LXcPURYaFKs18BfVIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4JW4DsDPP/+81a1b14oVK+YuTZo0sQ8//DB8/549e+y2226zUqVKWZEiRaxDhw62cePGFPNYs2aNtWvXzgoVKmRlypSxe+65xw4cOBCDRwMAAIB4ENcB+OSTT7Zhw4bZ/Pnzbd68eXbRRRdZ+/btbenSpe7+3r172/vvv29vvfWWff7557Zu3Tq74oorwr9/8OBBF3737dtn33zzjU2YMMHGjx9vgwYNiuGjAgAAQCwlhEKhkOUgJUuWtBEjRtiVV15pJ554or322mvu//LDDz9YrVq1bPbs2Xb22We7avEll1zignHZsmXdNGPHjrX+/fvb5s2bLX/+/Bla5vbt26148eK2bds2V4kGAADZ4LWEWK8BjlfH7IuZx5LX4roCHEnV3MmTJ9vOnTtdK4Sqwvv377cWLVqEpzn11FPtlFNOcQFY9PP0008Ph19p1aqVe4KCKnJa9u7d66aJvAAAACB3iPsAvGTJEtffm5SUZLfeequ98847Vrt2bduwYYOr4JYoUSLF9Aq7uk/0MzL8BvcH9x3J0KFD3TeI4FKhQoUseWwAAADIfnEfgGvWrGkLFy60b7/91nr27GldunSxZcuWZekyBwwY4MrnwWXt2rVZujwAAABkn0SLc6ryVqtWzf2/YcOGNnfuXHvqqafsmmuucQe3bd26NUUVWKNAlCtXzv1fP+fMmZNifsEoEcE0aVG1WRcAAADkPnFfAU7t0KFDrkdXYThfvnw2c+bM8H0rVqxww56pR1j0Uy0UmzZtCk8zY8YM1xitNgoAAAD4J64rwGpFaNOmjTuw7e+//3YjPnz22Wf20Ucfud7cbt26WZ8+fdzIEAq1t99+uwu9GgFCWrZs6YLuDTfcYMOHD3d9vwMHDnRjB1PhBQAA8FNcB2BVbjt37mzr1693gVcnxVD4vfjii939o0aNsjx58rgTYKgqrBEennvuufDv582b16ZNm+Z6hxWMCxcu7HqIH3744Rg+KgAAAMRSjhsHOBYYBxgAgBhgHOCcryPjAAMAAAAxRwAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOCVuA/AX3zxhV166aVWvnx5S0hIsKlTp6a4v2vXru72yEvr1q1TTPPnn39ap06drFixYlaiRAnr1q2b7dixI5sfCQAAAOJB3AfgnTt3Wr169ezZZ5894jQKvOvXrw9fXn/99RT3K/wuXbrUZsyYYdOmTXOhukePHtmw9gAAAIg3iRbn2rRp4y7pSUpKsnLlyqV53/Lly2369Ok2d+5ca9Sokbvt6aeftrZt29oTTzzhKssAAADwR9xXgDPis88+szJlyljNmjWtZ8+etmXLlvB9s2fPdm0PQfiVFi1aWJ48eezbb79Nc3579+617du3p7gAAAAgd8jxAVjtD6+++qrNnDnTHn/8cfv8889dxfjgwYPu/g0bNrhwHCkxMdFKlizp7kvL0KFDrXjx4uFLhQoVsuWxAAAAIOvFfQvE0Vx77bXh/59++ulWt25dq1q1qqsKN2/ePFPzHDBggPXp0yd8XRVgQjAAAEDukOMrwKlVqVLFSpcubStXrnTX1Ru8adOmFNMcOHDAjQxxpL5h9RRrxIjICwAAAHKHXBeAf/vtN9cDnJyc7K43adLEtm7davPnzw9PM2vWLDt06JA1btw4hmsKAACAWIj7FgiN1xtUc2X16tW2cOFC18Ory+DBg61Dhw6umrtq1Srr16+fVatWzVq1auWmr1WrlusT7t69u40dO9b2799vvXr1cq0TjAABAADgn7ivAM+bN8/OOOMMdxH15ur/gwYNsrx589rixYvtH//4h9WoUcOd4KJhw4b25ZdfujaGwKRJk+zUU091PcEa/qxp06b24osvxvBRAQAAIFYSQqFQKGZLzyF0EJxGg9i2bRv9wAAAZJfXEmK9BjheHUNxmdfivgIMAAAARBMBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAAMAAMArBGAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAADgFQIwAAAAvEIABgAAgFcIwAAAAPAKARgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAV7wKwM8++6xVqlTJChQoYI0bN7Y5c+bEepUAAACQzbwJwG+88Yb16dPHHnzwQVuwYIHVq1fPWrVqZZs2bYr1qgEAACAbeROAR44cad27d7cbb7zRateubWPHjrVChQrZK6+8EutVAwAAQDZKNA/s27fP5s+fbwMGDAjflidPHmvRooXNnj37sOn37t3rLoFt27a5n9u3b7ds9WZxy82Kd99m//9TCwDA4XbFegVw3LIxOwU5LRQKHXVaLwLwH3/8YQcPHrSyZcumuF3Xf/jhh8OmHzp0qA0ePPiw2ytUqJCl6+mf4lY8d2d8AAD81j37P+j//vtvK36UgOFFAD5WqhSrXzhw6NAh+/PPP61UqVKWkJBgPtO3K30RWLt2rRUrVizWq4MoYtvmbmzf3Ittm7uxfTNOlV+F3/Llyx91Wi8CcOnSpS1v3ry2cePGFLfrerly5Q6bPikpyV0ilShRIsvXMyfRm5A3Yu7Ets3d2L65F9s2d2P7ZszRKr9eHQSXP39+a9iwoc2cOTNFVVfXmzRpEtN1AwAAQPbyogIsamno0qWLNWrUyM466ywbPXq07dy5040KAQAAAH94E4CvueYa27x5sw0aNMg2bNhg9evXt+nTpx92YBzSp9YQjaWcukUEOR/bNndj++ZebNvcje2bNRJCGRkrAgAAAMglvOgBBgAAAAIEYAAAAHiFAAwAAACvEIABAADgFQIwMu3HH3+09u3buxONaHDupk2b2qeffhrr1UKUfPDBB9a4cWMrWLCgnXDCCXbZZZfFepUQZXv37nUj4ugMlwsXLoz16uA4/fLLL9atWzerXLmye99WrVrVjR6wb9++WK8aMunZZ5+1SpUqWYECBdzf4zlz5sR6lXINAjAy7ZJLLrEDBw7YrFmzbP78+VavXj13m4aZQ8729ttv2w033ODGyV60aJF9/fXX1rFjx1ivFqKsX79+GTplKHKGH374wZ3k6YUXXrClS5faqFGjbOzYsXbffffFetWQCW+88YY7h4G+xCxYsMB9xrZq1co2bdoU61XLFRgGDZnyxx9/2IknnmhffPGFnXfeee42nX9bleAZM2ZYixYtYr2KyCR9qVHFYfDgwa6ahNzpww8/dB+u+rJz2mmn2XfffeeqwchdRowYYc8//7z9/PPPsV4VHCNVfM8880x75pln3HV9ualQoYLdfvvtdu+998Z69XI8KsDIlFKlSlnNmjXt1VdfdWfUU2hS1aFMmTLutNPIuVRp+P333y1Pnjx2xhlnWHJysrVp08a+//77WK8aomTjxo3WvXt3+/e//22FChWK9eogC23bts1KliwZ69XAMVLbivasRhaT9DdZ12fPnh3TdcstCMDIFPUMfvLJJ65qVLRoUdefNHLkSHd2PfWLIucKKkUPPfSQDRw40KZNm+a26YUXXmh//vlnrFcPx0k7/bp27Wq33nqrOzU8cq+VK1fa008/bbfcckusVwWZ2Mt68ODBw85Wq+u0GUYHARgpaLeKwm16F/WZ6UP0tttucxXfL7/80jXm6yCpSy+91NavXx/rh4Hj2LbazSb333+/dejQwVX0x40b5+5/6623Yv0wcJzbV4FI7UoDBgyI9Sojyts2kvbitG7d2q666ipX7QeQEj3ASGHz5s22ZcuWdKepUqWKC70tW7a0v/76y/X9BqpXr+76RulPyrnbVge8XXTRRW4ba2SPyH407X579NFHs2FtkVXb9+qrr7b333/fhaaAKk158+a1Tp062YQJE7JhbZEV2zZ//vzu/+vWrXN7bM4++2wbP36823WOnNcCofak//znPylG4OnSpYtt3brV3n333ZiuX26QGOsVQHzRgW26HM2uXbvcz9R/WHU9qCAiZ25bVXyTkpJsxYoV4QC8f/9+N8RSxYoVs2FNkZXbd8yYMfbII4+Eryss6chyHXGuLznIuds2qPw2a9YsvOeG8Jsz6cuMtuHMmTPDAVifrbreq1evWK9erkAARqY0adLE9YXq2+igQYPcmJMvvfSSrV692tq1axfr1cNxUEVf/aEaekdHHCv06khy0e5U5GynnHJKiutFihRxPzVm7MknnxyjtUI0KPyq8qv37BNPPOEqx4Fy5crFdN1w7DRKiz5j1at/1lln2ejRo91B5xqeEsePAIxM0ckvdMCb+kS1u1wVQg2lpN0yGqsQOZsCb2JiohsLePfu3a4yqPGeOcARiF8aglIHvumS+ssM3Y45zzXXXOO+xKjIpAPfNEyhPndTHxiHzKEHGAAAAF6hOQgAAABeIQADAADAKwRgAAAAeIUADAAAAK8QgAEAAOAVAjAAAAC8QgAGAACAVwjAAAAA8AoBGAAAAF4hAANADqBTovbs2dNOOeUUS0pKsnLlylmrVq3s66+/jvWqAUCOkxjrFQAAHF2HDh1s3759NmHCBKtSpYpt3LjRZs6caVu2bMmS5WlZ+fPnz5J5A0CsUQEGgDi3detW+/LLL+3xxx+3Zs2aWcWKFe2ss86yAQMG2D/+8Y/wNLfccouVLVvWChQoYHXq1LFp06aF5/H222/baaed5qrHlSpVsieffDLFMnTbkCFDrHPnzlasWDHr0aOHu/2rr76y8847zwoWLGgVKlSwO+64w3bu3Bn+veeee86qV6/ulqllX3nlldn2vABAZhGAASDOFSlSxF2mTp1qe/fuPez+Q4cOWZs2bVw7xMSJE23ZsmU2bNgwy5s3r7t//vz5dvXVV9u1115rS5YssYceesgeeOABGz9+fIr5PPHEE1avXj377rvv3P2rVq2y1q1bu+rz4sWL7Y033nCBuFevXm76efPmuUD88MMP24oVK2z69Ol2/vnnZ9OzAgCZlxAKhULH8fsAgGygCm737t1t9+7d1qBBA7vgggtcoK1bt659/PHHLgAvX77catSocdjvdurUyfUQa7pAv3797IMPPrClS5eGK8BnnHGGvfPOO+Fpbr75ZheiX3jhhfBtCsBatqrA//3vf+3GG2+03377zYoWLZrlzwEARAsVYADIAVSFXbdunb333nuuKvvZZ5+5IKwq7sKFC+3kk09OM/yKgvG5556b4jZd/+mnn+zgwYPh2xo1apRimkWLFrn5BxVoXXTgnSrOq1evtosvvti1Y6gn+YYbbrBJkybZrl27sugZAIDoIQADQA6hPluFTrUnfPPNN9a1a1d78MEHXX9uNBQuXDjF9R07dri+YgXs4KJQrOBctWpVV/VdsGCBvf7665acnGyDBg1yLRTqRwaAeEYABoAcqnbt2q4VQW0QakP48ccf05yuVq1ahw2XpuuqGAd9wmlRhVn9xNWqVTvsEowQkZiYaC1atLDhw4e7PuFffvnFZs2aFeVHCgDRxTBoABDnNNTZVVddZTfddJMLu6q86gA0hc727du7nlwdfKY2iZEjR7qA+sMPP1hCQoJrl7j77rvtzDPPdKM8XHPNNTZ79mx75pln3AgO6enfv7+dffbZ7qA39QOrQqxAPGPGDPf7GmXi559/dss+4YQTXE+w2iNq1qyZbc8NAGQGARgA4px6bxs3bmyjRo1yIzPs37/fDUmmg+Luu+++8EFyffv2teuuu85VhRWCNRJEUMl98803XYuCQrDaFTRyg1oo0qOw/fnnn9v999/vhkLTMdNqfVCIlhIlStiUKVPcqBJ79uxxw6GpHULDrQFAPGMUCAAAAHiFHmAAAAB4hQAMAAAArxCAAQAA4BUCMAAAALxCAAYAAIBXCMAAAADwCgEYAAAAXiEAAwAAwCsEYAAAAHiFAAwAAACvEIABAABgPvl/ch/bWP+j78wAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "plot_score_dists(baseline_performance, perturbed_performance)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": { "id": "jF7N9Kder_8j" }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/var/folders/8_/slwlpfyn0w34d9zzt9s9qk_c0000gn/T/ipykernel_82715/956876871.py:13: MatplotlibDeprecationWarning: The 'labels' parameter of boxplot() has been renamed 'tick_labels' since Matplotlib 3.9; support for the old name will be dropped in 3.11.\n", + " plt.boxplot([baseline_scores, perturbed_scores], labels=['Baseline', 'Perturbed'])\n", + "/var/folders/8_/slwlpfyn0w34d9zzt9s9qk_c0000gn/T/ipykernel_82715/956876871.py:18: UserWarning: No artists with labels found to put in legend. Note that artists whose label start with an underscore are ignored when legend() is called with no argument.\n", + " plt.legend()\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAArEAAAIjCAYAAAAUdENlAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQAAQgdJREFUeJzt3QmcTfX/x/HP2McyirJF9kKFkDWlIokiS36lRUnq156SUdGuQtr+Lb96hNJCtopSKkrxa0GiyDaiLCEZIev9P97f///cx50xM2bGnbnznXk9H487d+65555z7nbu+3zP53xPXCgUChkAAADgkUKxXgAAAAAgqwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLHAETzwwAMWFxeXK/Nq166duwTmzJnj5j1p0qRcmX/fvn2tRo0alpf9/fffdt1111mlSpXca3P77bebr8aOHeuew9q1a9P9DCDv0XekS5cuOT4ffS70+dDnBMDhCLEoUILQEFxKlChhVapUsY4dO9qzzz5rO3fujMp8NmzY4MLvDz/8YHlNXl62zHjsscfc+3jjjTfaG2+8YVdeeWWGYSP1+123bl27++677c8//8zV5S7ogg2y4FK0aFGrVauWXXXVVbZmzZqozuutt96yp59+2goSBd5rrrnGateu7T7n2sg766yzbNiwYbFeNCDHFMm5SQN510MPPWQ1a9a0/fv326ZNm9wPrFr0nnrqKXv//fetYcOG4XHvu+8+Gzx4cJaD4oMPPuhCVOPGjTP9uE8++cRyWkbL9sorr9ihQ4csL/v888+tZcuWmf5x1nMcOHCg+/+ff/6xBQsWuIDzxRdf2Lfffmt5TW58BmLp1ltvtTPOOMN99xYuXGj/+c9/bMaMGbZkyRK3QRmtELt06VKvW+mzYtWqVe41jY+Pt2uvvdZ9tzdu3Ohe3yeeeMJ934H8iBCLAqlTp07WrFmz8O3ExEQXjrSL8OKLL7Zly5a5HwQpUqSIu+Sk3bt3W8mSJa1YsWIWS2ody+v++OMPa9CgQabHP+GEE+yKK64I31YpQunSpW3kyJG2cuVK1zKbl8T6M5DT2rZtaz179nT/q+XwpJNOcsF23Lhx7nt4NHbt2mWlSpWynP6e5jWjR492ZTbau1K9evXDvi+5KaffAyAS5QTA/zv33HPt/vvvt19//dXGjx+fYU3srFmz7Mwzz7RjjjnGBaKTTz7ZhgwZ4u5Tq65aRYIf6WD3aVDXpnrHU0891bUIaneffhSDx6ZXD3nw4EE3jnYR6gdCQXv9+vUpxlHri2paU4uc5pGWLa2aWP0oqSWzWrVqVrx4cfdcFQBDoVCK8TSdm2++2aZNm+aen8Y95ZRTbObMmZl6/fVj269fP6tYsaLbHdqoUSMXbFLvjk5KSnItd8GyR9aTZpZeR4ncOPnxxx/d89cu7mB3rFq1tm3bluKxKjlRC59eJz3HChUqWIcOHVyrV6RvvvnGLrjgAitbtqx7j88++2z7+uuvs10XPXHiRHv00UetatWqbvnOO+881wKXWnbmu3nzZvdapNVi98svv7j5P//88+62WlA1nsK/lqN8+fLuu6DvRHa/d6L3NfDRRx+5sKvPepkyZaxz5872008/pXic3it991avXm0XXnihG69Pnz7utdPnQ9/j4DMSfKbTqkGOfI11HcjoexrZaq6Wfr0O2rCaMmXKYc/vr7/+cp+X4PtTp04d1zqaeo+HxtNz0vum9crVV1/thmWGXgN9LlIHWNHnMzW9vvpc6DVLSEhw6wS1Xkd69913rWnTpm5j/rjjjnMbgr///num3gPR89MeD60D9Proez1gwADbvn17iml8//33rpxL89C8tIdM3zsgM2iJBSKovlI/VPpx6t+/f5rj6MdULbYqOVBZgn6YFCaCoFC/fn03fOjQoXb99de7H2Np3bp1eBoKRmoN/te//uV+HLSCz4jCi35k77nnHhf29OPQvn171/IStBhnRmaWLZKCqgLz7NmzXcDUD/bHH3/sakr1g6YWoEhfffWV+yH/97//7X7QVGfco0cPW7dunQs76dmzZ48LDXodFYT1Q6YfUf1I6of8tttuc8uuGtg77rjD/WAHJQLHH398hs9ZoWvr1q3hcoJFixa5shEFE80noBCm2kyFewVYvc/a1a3r//73v+ENmRtuuMEdaKflVHDRe6nnrdb7Jk2auHHUqq/3VyFAZQ+FChWyMWPGuMA2d+5ca968uWXV448/7qZz11132Y4dO+zJJ590gUGhNZDd+erzp1CjoJy6TGPChAlWuHBh69WrV3ijbvjw4a5FW9NLTk52QUQhXmE+qxSAJPh86D1WgFOwUdhT6+eLL77ogrLeu8iNrAMHDrjxdJ82rBQ09d7p9fntt9/Cn08FrezI6HuqVvzevXu7z4OWV6+zXiNttAWvg5Zdr6u+KwpwJ554os2bN8+1OGt3f1C3q+9Z165d3edI09NnferUqW66maHw+umnn7r3P9goSI+CvEKiwqWWQ4FZr6uW+/LLLw+Po++Bwq3ea23kPPPMM24dp3H1mIzeA9HzDaajlnZtpGhDSI/XdLTXR+uy888/332HVbKl6WoDI62NASBNIaAAGTNmjJoPQ999912645QtWzZ0+umnh28PGzbMPSYwevRod3vLli3pTkPT1ziaX2pnn322u++ll15K8z5dArNnz3bjnnDCCaHk5OTw8IkTJ7rhzzzzTHhY9erVQ1dfffURp5nRsunxmk5g2rRpbtxHHnkkxXg9e/YMxcXFhVatWhUepvGKFSuWYtjixYvd8Oeeey6UkaefftqNN378+PCwffv2hVq1ahUqXbp0iueu5evcuXOG04scV9NNfWnTpk1o69atKcbdvXv3YY9/++233fhffvllis/HTTfdlO48Dx06FKpbt26oY8eO7v/I6desWTPUoUOHwz6PSUlJR/wM1K9fP7R3797wcL33Gr5kyZIszzctL7/8corpBRo0aBA699xzw7cbNWqU6dc/UvA8XnvtNffd2bBhQ2jGjBmhGjVquM+SPpc7d+4MHXPMMaH+/funeOymTZvc6x45XJ9VTW/w4MGHzUvLF/k5zuj1jlw2XWfmexp8riZPnhwetmPHjlDlypVTrDsefvjhUKlSpUIrVqxI8Xgtc+HChUPr1q1L8T178sknw+McOHAg1LZt23S/q5GWLl0aio+Pd+M2btw4dNttt7lp7tq1K8V4f/31V6hMmTKhFi1ahPbs2ZPivuAzo+9dhQoVQqeeemqKcaZPn+6mP3To0CO+B3PnznXD33zzzRTDZ86cmWL41KlTj7g+BjJCOQGQilptMuqlIGiFeO+997J9EJRab9VCkVk6glstmwHVFFauXNk+/PBDy0mavlrh1JISSa2gyq3aLRlJrcM6Ojqg1mrtrjzS0eeaj1rQLrvssvAwtdRovqr100FY2dWiRQvXyqrL9OnTXau2WlfVwqwW4EBki7ZabNV6qwPIJLJUQO+/Wj91gFxa1DquVjq1aqklT9PRRWUZKgH48ssvs/W50eclsl42aEUPXtujnW/37t1dSYFaXgM6OOrnn392LY6Rz1+vn+aVHWoFVMubDuJSmYCWT2UjqlHXe6SWd30OguXXRZ9BvY/aI5CaeqnIKRl9T7X8l1xySfi2Puf6nqqlUQeLivYm6H069thjUzwffU9UIqT3JPj867WPfC56zrfcckumllOtqnr/1Vqslky1mnbr1s21HOtgzYBeX63b1OqpXfyRgj0NalVXC6n2pkSOo/eqXr16rlTjSO+BnrfKItQiHfm8tYdA69fgfQzWpfpeao8JkFWUEwCpKDSlVUcW0A/6q6++6nan6sdAAUEBQMFSu28ze7BRVg7gSX3wkX5wVFuXnXrQrFBdoX6sIwO0aHdncH8k7S5NTT/gqevg0pqPnmPq1y+9+WSFau0UGiJ/jFXXq/dL72MQFNTllmo933nnncMOhtHu6YB242s3r2oc9aOsWkCFF9XSShDuMtoVrOnpdcmK1K9t8PjgtT3a+ep10mdZJQUPP/ywG6ZAq3Clz3dA5Sja9a0DslQzqvpbleFE9uiREZWyKNgppGmeeo+D2uTgOaS3S1xBMZIep9KSnJLR91Tfv9S18npNRN9LbZTp+ajWOr2Sl+Bzps+3NkpTlz3oc5pZmrdKMRSOteGhYKjPqsqGVDaj70BQuqH3LT3Bdy2teSvEquThSO+Bnrc+a+mtR4PnrVILlRvpe6fSD5UUKXxrQ0wbEMCREGKBCKqj08pXP1DpUYudWlDUmqBWCdWS6cdeP7yqpdWP85FkpY41s9I7IYN+1DKzTNGQ3nxSHwQWawprovcxCLGXXnqpq1dUva9qfxUo1HKpkBbZgqnxFMJUs6j3e8SIEa52U3V8qp8MxtXw9LpXy06N5pFe22jMV7WfanlUq56moUCr10phM6BaYoUh7YnQ89eGgALISy+95DbsjuS0005LsVERKXgOCmPBwXeRUvcSoqCT2Q3HI31HcuJ7quej1shBgwaleX8QeqNJnxO9xrq0atXKzjnnHHvzzTfTfc2PVlrvgZ63Aqzmm5Yg1AcnclHN+QcffODq7dVSP2rUKDcsu7XMKDgIsUAE/XiKDlTIiFba+nHXRQcJqQP+e++91wVb/VhE+wxfqXfdKrjoIKjI1i+1sKV1NLNaVoJWQsnKsgUHjGgXZGRr7PLly8P3R4OmoxYr/fhF/iBGez6RB6MEre5Ba+Znn33mWoTUUhhIb5e5Ws20u1UXtSrpgC6VKSjEBuUUajXMqeCQlmjMV61gOiAnKClYsWJFmt1elStXzoVdXfQaKtjqgK/MhNjMPAcFoKN57dL7jAet0Km/J9lp6df3T9/DyHnp9ZLg4DM9H70+R3ou+nzr86dxI4ObeoY4GkE3gjqILFieoEwkvQ314LumeaduEdewzHwXNR+tN9q0aZOpDQGV7eii75B6SdABi9ojcrSfJ+R/1MQC/09H9mo3qna9Bd3EpCWtMz0FLV979+5110E/iZntIudIXn/99RR1umq90A+TQlPkD4daL/bt2xcepl2KqbviysqyaVe5WqmC7pUCannTj3fk/I+G5qM6wsh6TAXN5557zv2oa7djNKnVR9SNV2QrZ+oW49RnfdJrEVlaEAQulVwE771KDPRe6EjtICRH2rJli+WEaMxXNYragFMLrEKEdqUr2EZK3eWY3h8FouD5Hw3NWyFcG4Vp1Uhm9rXTZzz1+xQZ4oJa1OA9VS8UWaWaaLXGB9RLg76nWhcErchqtZ8/f75rYUxN379gY0qff/2vXhgil0uf/8xQzxNpvV5BzXxQGqCeALQxqh4HVPcdKfjsK/jqM62W9cj3VPXv6oFD5ThHouet5Q/KUiLpeQbrHm08pv7OpV6XAhmhJRYFklbIauXTClXdxyjA6qAHtTLojF2pD3qIpJpA/QhqZa7x1RL3wgsvuLowdTMT/FgqEOiHQD8a+lHVgSmRXTplhVq+NG21fGl5Fa4UHCK7AVOrhcKtdn/rR0S7fNXfbeSBVlldtosuusjtjlQrs+r8FPq0C1m7ktX3ZeppZ5fq9l5++WXXpZb65VRLlp6LuuLRc01dk5sV6t4o6PdXAX/x4sVuXtpFHpQSKDipNVE1hAoDqoXU84zsu1S0IaH3WfW0ei0U4NTi9N1337ldoKKWZO1iV8DXATd6zzQ9LYda6jWvIERHU7Tmq5pvHSCkz7RCZWR3SqJuxVS7qNCsz6UOBAq6HDtaWkYFOdXYqnVb5Q3a9awu2lS6o5a91BtUadGyaYPozjvvdN1E6X3SZ1mvi1r81LqsjVEtv8J6ECazQqUA6nZO770OoHrttdfcd1NdbQVUmqL1ibrk02dby6UD2XR2Mr1m+k7pc6hl03NTjb2GBX3OphXE06JyFn1vVLsc7J3RwYgK1XqOwZnL9PpqA1TrCr0uqj1V67S+E+oOTAfY6YBKTU+fH2086iC7oIstfS/Vxd2R6HFq0VdYVmmKwrOmqz0bOuhL09J3SPPT50wHyGldou+XDkTTcirYA0eUYd8FQD4TdLETXNQlVKVKlVz3Q+qyKLIrp/S62Prss89CXbt2DVWpUsU9XteXXXbZYd3ovPfee657oiJFiqToJkdd95xyyilpLl963Supq6fExETX9Y260lEXQr/++uthjx81apTrjqt48eKuG6nvv//+sGlmtGypu9gSdXt0xx13uOdZtGhR143TiBEjUnTjJJpOWl1Ppdf1V2qbN28OXXPNNaHjjjvOva6nnXZaml0LHU0XW4UKFXKvod6vyK7A5LfffgtdcsklrosndefUq1cv1w2UHqfPgKiLq7vvvtt1M6WuitR9kv5/4YUXDpv3okWLQt27dw+VL1/evR9alksvvdR9frLTxda7776bYvp6TFrdL2VmvhnRdyDorimyy7OAultr3ry5e500Xr169UKPPvqo65opI+k9j/TGVVdheh9KlCgRql27dqhv377u8xzQZ0qvf1r+/vvv0OWXX+6WUfOM/EyvXr061L59e/faVKxYMTRkyJDQrFmz0uxiK73vafAZ/Pjjj0MNGzZ009LrkNZz0/dH3906deq4z7U+361btw6NHDkyxWu2bdu20JVXXhlKSEhwz1v/673MTBdbX3/9tfvuqVssPVbf0xNPPNG9Znq+qb3//vtuGfT+aX56P7WOiTRhwgTXXZieW7ly5UJ9+vRx35FIGb0H8p///CfUtGlTNx99X/SdHjRokPteycKFC913Ucuq+ei72aVLlxTvM5CROP05ctQFAAAA8g5qYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7BepkBzqlpc6yoo7To31aUAAAABw99f6qk1/obIiRpyIv0CFWAbZatWqxXgwAAAAcgU6brrMkpqdAhdjg1JV6UXRaOwAAAOQtycnJrtHxSKccL1AhNighUIAlxAIAAORdRyr95MAuAAAAeIcQCwAAAO8QYgEAAOCdAlUTCwAAgJzvIuvAgQN28ODBNO8vXLiwFSlS5Ki7OyXEAgAAICr27dtnGzdutN27d2c4XsmSJa1y5cpWrFixbM+LEAsAAIConFQqKSnJtbTqRAUKqKlbW9VKq6C7ZcsWN27dunUzPKFBRgixAAAAOGoKpwqy6uNVLa3piY+Pt6JFi9qvv/7qHlOiRIlszY8DuwAAABA1mWlZzW7ra4ppHPUUAAAAgFxGiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAIGrUjVY0xjkSutgCokxnKJk7d67r7FkdObdt29b1mQcAvmK9hsxQt1miEx2oG62MBCdDCB6THYRYIIqmTJliAwcOtLVr14aH1ahRw0aNGmXdu3eP6bIBQHawXkNmacPmmGOOsT/++MPdVl+xaZ3sQAFW42jco9kYIsQCUVzR9+zZ0y688ELr2rWr7dmzx22Jrlq1yg2fNGkSK3wAXq7XOnfubHfffbdbp2nd9tFHH7FeQ5oqVarkroMgmx4F2GDc7IoLRaMowRPJyclWtmxZ27FjhyUkJMR6cZDPdrXVqVPHbVGqtUK3AxqmVgudxWTlypXsggPg1XrtuOOOs61btx7WEqvh27ZtY72GdD8/+/fvT/M+lRBk9JnJbF6jJRaIAtWKBSv4ChUqWLt27axUqVK2a9cumzNnjq1evTo8nu4DAF/Wazo1aFotsTNmzHC7hlmvIS0KqTm9cUOIBaJg/fr14foftVhMnDgxxan1NFw1QMF4AJDX/f777+66cePGtnjxYps+fXr4vmrVqrnhixYtCo8H5Da62AKi4JtvvnHXCqrHH3+8vfLKK+4oXl3rdnAUZjAeAOR1W7ZscdcKqqk3wHVbwyPHA3IbIRaIgqAGVnU+69ats+uuu84VrOtat4MuRCJrZQEgLytfvnz4/2LFitngwYPdgaq61u20xgNyEyEWiIJNmza5axWx64jd+fPn286dO921bgfF7cF4AJDXaW9SoEOHDnbxxRe7mn9d63Za4wG5iZpYIBNUDrB8+fJ07y9S5P++SjqY6/vvv7fWrVuH71PH4MFBXhpv4cKFaU6jXr16rnYWAPKCWbNmueuKFSva0qVLU6zX1DuBAq26UdJ4gwYNiuGSoqAixAKZoADbtGnTI46noKpLpMhWCvWpqEtaFixYYE2aNInC0gLA0W+cBwdsbd682Z2hS/3Bag9TmTJl3Ma6eiUIxmPjHLFAP7FAFFb2+/btszPPPNPVvqp0IHU/scHwr776KkUtWSRW9gByk4JnZjbOjwYb58gO+okFokjh8kgr4jvvvNNGjBjhdrE1bNjQPv30U2vfvr39+OOPbpeb+lhs2bJlri0zAGREG84KmelRf7DaOJc2bdq4Ex+MGzfOrr76aneA19dff+3u08a5+o9Nbx5ATqElFogi1YWNHj3aDhw4EB6mOtg77rjDnnzyyZguGwBkVbdu3ey9995L936dYnvatGm5ukzI/5IzmdfonQCIIgVV1cSqVVZ0rdsEWAA+UkBVUE0LARaxRogFokw1r3369HH/6zq9GlgA8IGCqo4L6NWrl7uta90mwCLWCLEAACBDqnnVSQ5E1+nVwAK5iRALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO0VivQBALK1cudJ27twZ9ekuW7YsxXW0lSlTxurWrZsj0wbgN9ZrKCjiQqFQyAqI5ORkK1u2rO3YscMSEhJivTjIAyv6k046yXy1YsUKVvgAUmC9hoKU12iJRYEVtFSMHz/e6tevH9Vp79mzx9auXWs1atSw+Pj4qE5brSBXXHFFjrS0APAb6zUUJIRYFHha0Tdp0iTq023Tpk3UpwkAmcF6DQWBdwd2/c///I/bCixRooS1aNHCvv3221gvEgAAAHKZVyF2woQJduedd9qwYcNs4cKF1qhRI+vYsaP98ccfsV40AAAA5CKvQuxTTz1l/fv3t2uuucYaNGhgL730kpUsWdJee+21WC8aAAAAcpE3NbH79u2zBQsWWGJiYnhYoUKFrH379jZ//vw0H7N37153iTzaDYhUqXScxf+1wmyDP9tzWl4tNwCkhfUaCgpvQuzWrVvt4MGDVrFixRTDdXv58uVpPmb48OH24IMP5tISwkcDmhaz+l8OMPvSvFH//5cbANLCeg0FhTchNjvUaqsa2siW2GrVqsV0mZC3vLxgn/UeOtbq16tnvli2fLm9POpyuzjWCwIgT2K9hoLCmxB73HHHWeHChW3z5s0phut2pUqV0nxM8eLF3QVIz6a/Q7bnmJPMqjQ2X+zZdMgtNwCkhfUaCgpvCmaKFStmTZs2tc8++yw87NChQ+52q1atYrpsAAAAyF3etMSKSgOuvvpqa9asmTVv3tyefvpp27Vrl+utAAAAAAWHVyG2d+/etmXLFhs6dKht2rTJGjdubDNnzjzsYC8AAADkb16FWLn55pvdBQAAAAWXdyEWiJbdu3e7a539Ldr27Nlja9eudadIjo+Pj+q0ly1bFtXpAcg/WK+hICHEosAK+hfWWeB8VKZMmVgvAoA8hvUaChJCLAqsbt26uet69eq50xdHu1XhiiuusPHjx1v9+urGO/or+rp160Z9ugD8xnoNBQkhFgWW+h6+7rrrcnQeWtE3adIkR+cBAAHWayhIvOknFgAAAAgQYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAkKF9+/bZm2++6f7XtW4DsUaIBaLs4MGD9v3337v/da3bAOCrQYMGWYkSJeypp55yt3Wt2xoOxBIhFoiiKVOmWJ06dWzAgAHutq51W8MBwDcKqiNGjLBQKJRiuG5rOEEWsRQXSv3JzMeSk5OtbNmytmPHDktISIj14iCfUVDt2bOndenSxXr06GF9+/a1sWPH2uTJk2369Ok2adIk6969e6wXEwCc3bt32/Lly9O9XyUDrVq1cv8XLVrULrjgAvvggw/soosuspkzZ9r+/fvdffPnz7dixYqlOY169epZyZIlc+gZoKDntSK5ulRAPl3Zq2TglltusbZt29rQoUPtl19+ccOLFCnibuuLeOutt1q1atWscOHCaU6DlT2A3KR1WtOmTTM1rgKrAqwE14Eg6KZlwYIF1qRJk6NcUiBttMQCmbBw4cJMr+yzi5U9gLy0cX7ppZfa6tWrrXfv3q5sYM+ePbZ27VqrUaOGxcfH2+OPP27vvvuu1a5d2yZOnJjmNNg4R3bQEgtEkVbECpnp0a61e++91x555BF74YUXbMOGDeH7qlSpYjfeeKPdf//99uijj7pdcunNAwByi8JlRhvOwV6jhg0bhsdr06ZN+P7TTjvNhViNxwY4YoEQC0RhZa+tRrnvvvtcvZjqYE899VRbunSpPfbYYy7ASuvWrVnZA/DCOeecYytWrHAb3wMHDnS1rxs3brTKlSu7EgK1xAbjAbFAOQEQBToAolSpUla+fHn77bffXC1s4MCBA1a1alXbtm2b7dq1K90DIAAgL1H5QGZKAVSWoPICILfzGl1sAVEwb948F1b/+OMP1wOBWix27tzprnVbw3W/xgMAHyiYnnHGGRmOo/sJsIgVQiwQBdrFJm+88YYtWbLElQ1o61HXKinQ8MjxACCvU68rW7ZssYoVK6Z5v4Zv3bqVE7ogZgixQBSoRkx0lO6qVats9uzZ9tZbb7nrlStXWq1atVKMBwB53dy5c11vBNOmTXMlAzfddJOdf/757lq3p06daklJSW48IBY4sAuIAvUPq25ndBCXVvjt2rUL33fo0CEbPny41axZ040HAD4I9hzpIFWVDDz//PMp7tfwyPGA3EZLLBAF6mJm1KhR7sxc3bp1S1ETq9saPnLkyHRPdAAAeU2w50glUWkJhrOHCbFC7wRAlE89q65otAsuoBZYBVhOOQvAJ6p1rVOnjusPVnuYChUqlGIPkzbQFWRVMsUGOmKR1wixQA6s+FUjFvSnqBICVvAAfN0w79mzp3Xp0sUSExPD/V+rREp7mCZNmsQGOqKOEJsGQiwAAFnDHibkNkJsGgixAABkHXuYkBfzGr0TAACADCmwRva6AuQF9E4AAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeMeLELt27Vrr16+f1axZ0+Lj46127do2bNgw27dvX6wXDQAAADFQxDywfPlyO3TokL388stWp04dW7p0qfXv39927dplI0eOjPXiAQAAIJfFhUKhkHloxIgR9uKLL9qaNWsy/Zjk5GQrW7as7dixwxISEnJ0+QAAAJB1mc1rXrTEpkVPrFy5chmOs3fvXneJfFEAAADgPy9qYlNbtWqVPffcczZgwIAMxxs+fLhL8sGlWrVqubaMAAAAyKchdvDgwRYXF5fhRfWwkX7//Xe74IILrFevXq4uNiOJiYmuxTa4rF+/PoefEQAAAPJ9TeyWLVts27ZtGY5Tq1YtK1asmPt/w4YN1q5dO2vZsqWNHTvWChXKWganJhYAACBv86Im9vjjj3eXzFAL7DnnnGNNmza1MWPGZDnAAgAAIP/w4sAuBVi1wFavXt11qaUW3EClSpViumwAAADIfV6E2FmzZrmDuXSpWrVqivs87SEMAAAAR8GLffJ9+/Z1YTWtCwAAAAoeL0IsAAAAEIkQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAACAghlik5OTbdq0abZs2bJoTA4AAACIfoi99NJL7fnnn3f/79mzx5o1a+aGNWzY0CZPnpydSQIAAAA5G2K//PJLa9u2rft/6tSpFgqF7K+//rJnn33WHnnkkexMEgAAAMjZELtjxw4rV66c+3/mzJnWo0cPK1mypHXu3NlWrlyZnUkCAAAAORtiq1WrZvPnz7ddu3a5EHv++ee74du3b7cSJUpkZ5IAAABAphWxbLj99tutT58+Vrp0aTvxxBOtXbt24TKD0047LTuTBAAAAHI2xP773/+25s2b2/r1661Dhw5WqND/NejWqlWLmlgAAADkuLiQjsrKpn379llSUpLVrl3bihTJVh7OVeoKrGzZsq6mNyEhIdaLAwAAgGzmtWzVxO7evdv69evnDuY65ZRTbN26dW74LbfcYo8//nh2JgkAAABkWrZCbGJioi1evNjmzJmT4kCu9u3b24QJE7IzSQAAACDTslUDoLNzKay2bNnS4uLiwsPVKrt69ersTBIAAADI2ZbYLVu2WIUKFQ4bri63IkMtAAAAkGdCrE4zO2PGjPDtILi++uqr1qpVq+gtHQAAABCtcoLHHnvMOnXqZD///LMdOHDAnnnmGff/vHnz7IsvvsjOJAEAAICcbYk988wz3YFdCrA6ucEnn3ziygt0Fq+mTZtmZ5IAAABAzrXE7t+/3wYMGGD333+/vfLKK1l9OAAAAJD7LbFFixa1yZMnH/2cAQAAgNwsJ+jWrZvrZgsAAADw5sCuunXr2kMPPWRff/21q4EtVapUivtvvfXWaC0fAAAAcJi4UCgUsiyqWbNmuvepu601a9aYz+fiBQAAQN7Oa9lqiU1KSrJY2bt3r7Vo0cL1jrBo0SJr3LhxzJYFAAAAHtXERlJDbjYac7Nt0KBBVqVKlVybHwAAAPJRiH399dddH7Hx8fHu0rBhQ3vjjTcsJ3300UeuT9qRI0fm6HwAAACQt2WrnOCpp55y/cTefPPN1qZNGzfsq6++shtuuMG2bt1qd9xxR7SX0zZv3mz9+/d3vSKULFky06UHukTWWAAAAKCAhtjnnnvOXnzxRbvqqqvCwy6++GI75ZRT7IEHHoh6iFW5Qt++fV1Ibtasma1duzZTjxs+fLg9+OCDUV0WAAAAeFpOsHHjRmvduvVhwzVM92XW4MGDXW8GGV2WL1/uQvPOnTstMTExS8up8XVkW3BZv359lh4PAACAfNQSW6dOHZs4caINGTIkxfAJEya4PmQza+DAga6FNSO1atWyzz//3ObPn2/FixdPcZ9aZfv06WPjxo1L87EaP/VjAAAAUED7idVpZ3v37m3t27cP18TqxAefffaZC7eXXHJJVBdy3bp1KepZN2zYYB07drRJkya57raqVq2aqenQTywAAEAB7ie2R48e9s0339jo0aPDp5+tX7++ffvtt3b66adbtJ144okpbpcuXdpd165dO9MBFgAAAPlHtkKs6HSz48ePj+7SAAAAADkVYj/88EMrXLiw26Uf6eOPP7ZDhw5Zp06dLCfVqFEjV0+wAAAAgHzQO4F6FTh48OBhwxUsdR8AAACQ50LsypUrrUGDBocNr1evnq1atSoaywUAAABEN8TqiLE1a9YcNlwBtlSpUtmZJAAAAJCzIbZr1652++232+rVq1MEWPX7qjN3AQAAAHkuxD755JOuxVXlAzVr1nQX/V++fHkbOXJk9JcSAAAAONreCVROMG/ePJs1a5YtXrzY4uPjrVGjRta2bdvsTA4AAADIuZZYnfp1+vTp7v+4uDg7//zzrUKFCq71VSdAuP76623v3r1ZWwIAAAAgJ0PsQw89ZD/99FP49pIlS6x///7WoUMH17XWBx98YMOHD8/qMgAAAAA5F2J/+OEHO++888K333nnHWvevLm98sorduedd9qzzz5rEydOzNoSAAAAADkZYrdv324VK1YM3/7iiy9SnJ3rjDPOsPXr12d1GQAAAICcC7EKsElJSe7/ffv22cKFC61ly5bh+3fu3GlFixbN2hIAAAAAORliL7zwQlf7OnfuXEtMTLSSJUum6JHgxx9/tNq1a2d1GQAAAICc62Lr4Ycftu7du9vZZ59tpUuXtnHjxlmxYsXC97/22muuxwIAAAAgJ8WFQqFQVh+0Y8cOF2ILFy6cYviff/7phkcG27wkOTnZ9XGr5U9ISIj14gAAACCbeS3bJztIS7ly5bIzOQAAACDnTzsLAAAAxBIhFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvONViJ0xY4a1aNHC4uPj7dhjj7Vu3brFepEAAAAQA0XME5MnT7b+/fvbY489Zueee64dOHDAli5dGuvFAgAAQAx4EWIVWG+77TYbMWKE9evXLzy8QYMGMV0uAAAAxIYX5QQLFy6033//3QoVKmSnn366Va5c2Tp16nTElti9e/dacnJyigsAAAD850WIXbNmjbt+4IEH7L777rPp06e7mth27drZn3/+me7jhg8fbmXLlg1fqlWrlotLDQAAgHwZYgcPHmxxcXEZXpYvX26HDh1y4997773Wo0cPa9q0qY0ZM8bd/+6776Y7/cTERNuxY0f4sn79+lx8dgAAAMiXNbEDBw60vn37ZjhOrVq1bOPGjYfVwBYvXtzdt27dunQfq3F0AQAAQP4S0xB7/PHHu8uRqOVVYfSXX36xM8880w3bv3+/rV271qpXr54LSwoAAIC8xIveCRISEuyGG26wYcOGubpWBVf1VCC9evWK9eIBAAAgl3kRYkWhtUiRInbllVfanj173EkPPv/8c3eAFwAAAAqWuFAoFLICQl1sqZcCHeSl1l0AAAD4mde86GILAAAAiESIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFAACAdwixAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeMebELtixQrr2rWrHXfccZaQkGBnnnmmzZ49O9aLBQAAgBjwJsR26dLFDhw4YJ9//rktWLDAGjVq5IZt2rQp1osGAACAXOZFiN26dautXLnSBg8ebA0bNrS6deva448/brt377alS5fGevEAAACQy7wIseXLl7eTTz7ZXn/9ddu1a5drkX355ZetQoUK1rRp03Qft3fvXktOTk5xAQAAgP+KmAfi4uLs008/tW7dulmZMmWsUKFCLsDOnDnTjj322HQfN3z4cHvwwQdzdVkBAACQz1tiVR6ggJrRZfny5RYKheymm25ywXXu3Ln27bffukB70UUX2caNG9OdfmJiou3YsSN8Wb9+fa4+PwAAAOSMuJASYoxs2bLFtm3bluE4tWrVcsH1/PPPt+3bt7ueCQKqje3Xr58Lw5mhcoKyZcu6QBs5HQAAAOQNmc1rMS0nOP74493lSHQAl6iMIJJuHzp0KMeWDwAAAHmTFwd2tWrVytW+Xn311bZ48WLXZ+zdd99tSUlJ1rlz51gvHgAAAHKZFyFWJzjQQVx///23nXvuudasWTP76quv7L333nP9xQIAAKBgiWlNbG6jJhYAACB/5DUvWmIBAACASIRYAAAAeIcQCwAAAO94ccYuwCcHDx50fRvrRByVK1e2tm3bWuHChWO9WAAA5CuEWCCKpkyZYgMHDrS1a9eGh9WoUcNGjRpl3bt3j+myAUB2sXGOvIhyAiCKAbZnz5522mmn2fz5823nzp3uWrc1XPcDgG+07qpTp46dc845dvnll7tr3WadhlgjxAJRaqVQC2yXLl1s2rRp1rJlSytdurS71m0Nv+uuu9x4AOALNs6Rl9FPLBAFc+bMca0TWrkruKam4a1bt7bZs2dbu3btYrKMAJAV2uhWi6sCqzbGI0/9rlO+d+vWzZYuXWorV66ktABRRT+xQC5SnZiceuqpad4fDA/GA4C8TjWwqu8fMmRIigArup2YmOhO/67xgFggxAJRoAMdRK0SaQmGB+MBQF7HxjnyOkIsEAU6Ule9EDz22GNuN1sk3R4+fLjVrFnTjQcAPmDjHHkdIRaIAtWDqRut6dOnuzqxyAMgdFvDR44cSd0YAG+wcY68jhALRIn6gZ00aZItWbLEHcSlYnRdq7VCw+knFoBP2DhHXkfvBECU0Sk4gPx+Ehe1wCrAsnGOWOY1QiwAAMgQG+fIi3mN084CAIAMKbDSxzXyGkIsEGW0WAAAkPMIsUAO147p6F4dHEHtGABfsXGOvIjeCYAo4RzjAPIjrbt0+lmdWvvyyy9317rNOg2xRogFotRKoRbYLl26uHOMt2zZ0kqXLu2udVvD77rrLjceAPiCjXPkZfROAETBnDlzXOuEVu4KrqlpuPqMnT17NgdHAPCCNrrV4qrAqo3xQoUKpTjZgfqKVT/YK1eupLQAMclrtMQCUcA5xgHkN6qBVX3/kCFDUgRY0e3ExERLSkpy4wGxQIgFooBzjAPIb9g4R15HiAWigHOMA8hv2DhHXkeIBaKAc4wDyG/YOEdeR4gFokT9wE6aNMmWLFniDuJSMbqu1Vqh4fQTC8AnbJwjr6N3AiDK6BQcQH4/iYtaYBVg2ThHLPMaIRYAAGSIjXPkxbzGaWcBAECGFFjp4xp5DTWxAAAA8A4hFgAAAN4hxAIAAMA7hFgAAAB4hxALAAAA7xBiAQAA4B1CLAAAALxDiAUAAIB3CLEAAADwDiEWAAAA3iHEAgAAwDuEWAAAAHiHEAsAAADvFLECJBQKuevk5ORYLwoAAADSEOS0ILelp0CF2J07d7rratWqxXpRAAAAcITcVrZs2XTvjwsdKebmI4cOHbINGzZYmTJlLC4uLtaLg3y+FamNpfXr11tCQkKsFwcAjhrrNeQWRVMF2CpVqlihQulXvhaolli9EFWrVo31YqAA0YqelT2A/IT1GnJDRi2wAQ7sAgAAgHcIsQAAAPAOIRbIAcWLF7dhw4a5awDID1ivIa8pUAd2AQAAIH+gJRYAAADeIcQCAADAO4RYAAAAeIcQC+SiGjVq2NNPPx2+rZNuTJs2LabLBADR0rdvX+vWrVvUpztnzhy3vvzrr7+iPm34ixCLArVy1UowuJQvX94uuOAC+/HHH2O2TBs3brROnTrFbP4A/FyHFStWzOrUqWMPPfSQHThwIM+FTiA3EGJRoCi0Kjjq8tlnn1mRIkWsS5cuMVueSpUq0V0NgCyvw1auXGkDBw60Bx54wEaMGJHl6Rw8eNCdij1aoj09IDMIsShQFBgVHHVp3LixDR482J0HfMuWLe7+e+65x0466SQrWbKk1apVy+6//37bv39/+PGLFy+2c845x8qUKeNOu9i0aVP7/vvvw/d/9dVX1rZtW4uPj3fnGL/11ltt165d6S5PZDnB2rVr3e0pU6a4eWgZGjVqZPPnz0/xmKzOA0D+W4dVr17dbrzxRmvfvr29//77tnfvXrvrrrvshBNOsFKlSlmLFi3cLvjA2LFj7ZhjjnHjNmjQwE3n2muvtXHjxtl7770XbuHVY9Ladf/DDz+4YVpPpTe9devWhcd/8MEH7fjjj3fryRtuuMH27dsXvk9hd/jw4VazZk23HtN6btKkSSme54cffujWxbpf68NgvkAkQiwKrL///tvGjx/vdsmptEAUTrVy/vnnn+2ZZ56xV155xUaPHh1+TJ8+faxq1ar23Xff2YIFC1wILlq0qLtv9erVrpWkR48erkRhwoQJLnDefPPNWVque++91/0Y6UdDK/HLLrssvLswWvMAkD8o5Ckgah2gDd533nnHrRt69erl1hVqsQ3s3r3bnnjiCXv11Vftp59+smeffdYuvfTSFHuoWrdunel5p55ehQoV3HDt5Vq2bJkLw2+//bbbMFeoDSjAvv766/bSSy+5x91xxx12xRVX2BdffOHuV8NC9+7d7aKLLnLrweuuu86ta4HD6GQHQEFw9dVXhwoXLhwqVaqUu+jjX7ly5dCCBQvSfcyIESNCTZs2Dd8uU6ZMaOzYsWmO269fv9D111+fYtjcuXNDhQoVCu3Zs8fdrl69emj06NHh+7UMU6dOdf8nJSW526+++mr4/p9++skNW7ZsWabnASD/rsO6du3q/j906FBo1qxZoeLFi4f69u3r1m2///57ivHPO++8UGJiovt/zJgxbl3yww8/pDvNwOzZs92427dvDw9btGiRG6b11JGmV65cudCuXbvCw1588cVQ6dKlQwcPHgz9888/oZIlS4bmzZuX4nFat1122WXufy1zgwYNUtx/zz33HLZMQJHDYy2Qf2m31Isvvuj+3759u73wwgvuwKpvv/3W7Z5Ty6ZaJ9TiqZZatYBqd1jgzjvvdK0Cb7zxhtuNp9aO2rVrh0sN1ALy5ptvhsdXTtWus6SkJKtfv36mlrFhw4bh/ytXruyu//jjD6tXr17U5gHAT9OnT7fSpUu7Mid97y+//HLr2bOn24OkPTeRVGIQ7GUSHQwWuX45WulNT+UBKocKtGrVyq1P1cKqa7XgdujQIcVj1Jp8+umnu//ViqtyiEiaBpAaIRYFimrFVD4Q0G6wsmXLurKBzp07u3IB7fbq2LGjG65dc6NGjQqPr4Mo9KMxY8YM++ijj9x5xDXOJZdc4lbOAwYMcDWqqZ144omZXsagPEFUgybBARPRmgcAvzfEFSCrVKniDk7VxnfhwoVdiZOuIynwRpYeBOuUjBQq9H+VhpFnpY88NiCr04ukdZhoHar63Ugc5IqsIsSiQNMKWCvsPXv22Lx581xrrGpSA7/++uthj1Frhy6q41K96pgxY1yIbdKkiauljQzJ0ZYb8wDgz4a4qAVTvQNoj40O+swKhWE9NpIOyBLVyB577LHuf9WmZpb2GGmdqpAr//3vf12Y1oGo5cqVCx8EdvbZZ6f5eO1R0gFjkTQNIDUO7EKBot1rmzZtchftsrrllltcy4AOIKhbt65bsaplVeUEKiuYOnVq+LFaKevgCR2soHD79ddfuwO8gl346tlAQVjjaIWvAyp01G80D7rKjXkA8Is2qrUX6aqrrnIHUam0SCVSOoBKLZ5HOgGLSpR++eUX27p1q2txVUhW4NSeJ61jNI3IPVJHotKAfv36uQ1u9TKgPVZaR6nBQAfP6sBVNQKoZwStaxcuXGjPPfecuy3qzUDzvfvuu91yvfXWW65cAkiNEIsCZebMma7OVBfVXCmEvvvuu9auXTu7+OKL3YpVK1t1v6WwqC62AtpNt23bNvdDoR8NHdWretrgqFvVhuno2hUrVrjWELWODB061O3yi5bcmAcA/2iPkNZN6jv25JNPdicw0PrtSGVG/fv3d+M3a9bMtcBq41wlTepVYPny5W6dox4IHnnkkUwvy3nnnecaBc466yzr3bu3W7cqEAcefvhht25VyFYjgHpHUFBWl1uiZZ48ebLrflD1terF4LHHHjuKVwf5VZyO7or1QgAAAABZQUssAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAAC8Q4gFgFy0ZcsWu/HGG92pNYsXL26VKlWyjh07utN9AgAyr0gWxgUAHKUePXrYvn37bNy4cVarVi3bvHmzffbZZ7Zt27YcmZ/mVaxYsRyZNgDEEi2xAJBL/vrrL5s7d6498cQTds4551j16tWtefPmlpiYaBdffHF4nAEDBljFihWtRIkSduqpp9r06dPD05g8ebKdcsoprhW3Ro0aNmrUqBTz0LCHH37YrrrqKktISLDrr7/eDf/qq6+sbdu2Fh8fb9WqVbNbb73Vdu3aFX7cCy+8YHXr1nXz1Lx79uyZa68LAGQHIRYAcknp0qXdZdq0abZ3797D7j906JB16tTJlRaMHz/efv75Z3v88cetcOHC7v4FCxbYpZdeav/6179syZIl9sADD9j9999vY8eOTTGdkSNHWqNGjWzRokXu/tWrV9sFF1zgWoF//PFHmzBhggu1N998sxv/+++/d6H2oYcesl9++cVmzpxpZ511Vi69KgCQPXGhUCiUzccCALJILan9+/e3PXv2WJMmTezss892obRhw4b2ySefuBC7bNkyO+mkkw57bJ8+fVxNrcYLDBo0yGbMmGE//fRTuCX29NNPt6lTp4bHue6661wQfvnll8PDFGI1b7XGfvjhh3bNNdfYb7/9ZmXKlMnx1wAAooGWWADIRWoN3bBhg73//vuudXTOnDkuzKo19YcffrCqVaumGWBF4bZNmzYphun2ypUr7eDBg+FhzZo1SzHO4sWL3fSDlmBddDCZWn6TkpKsQ4cOrrRBNbpXXnmlvfnmm7Z79+4cegUAIDoIsQCQy1R3quCoXf3z5s2zvn372rBhw1y9ajSUKlUqxe2///7b1dkqJAcXBVuF39q1a7vW14ULF9rbb79tlStXtqFDh7pyBNXnAkBeRYgFgBhr0KCB262vkgLt0l+xYkWa49WvX/+wrrh0Wy23Qd1sWtTSq/raOnXqHHYJei4oUqSItW/f3p588klXN7t27Vr7/PPPo/xMASB66GILAHKJutHq1auXXXvttS6wqgVUB1UpOHbt2tXVqOqAKpUcPPXUUy5kLl++3OLi4lzpwcCBA+2MM85wvQ/07t3b5s+fb88//7zrWSAj99xzj7Vs2dIdyKX6WLXUKtTOmjXLPV69H6xZs8bN+9hjj3U1sio1OPnkk3PttQGArCLEAkAuUS1qixYtbPTo0a7HgP3797vurnSg15AhQ8IHft1111122WWXudZZBVn1UBC0qE6cONHt7leQ1a5/9SigcoSMKDB/8cUXdu+997putnQ8r8oIFITlmGOOsSlTprjeDv755x/X1ZZKC9SVFwDkVfROAAAAAO9QEwsAAADvEGIBAADgHUIsAAAAvEOIBQAAgHcIsQAAAPAOIRYAAADeIcQCAADAO4RYAAAAeIcQCwAAAO8QYgEAAOAdQiwAAADMN/8LSaqnHTCV9gsAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "plot_score_dists(baseline_performance, perturbed_performance, type=\"box\")" ] @@ -770,15 +719,44 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['input_ids', 'token_type_ids', 'attention_mask'])\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "The size of tensor a (330) must match the size of tensor b (2) at non-singleton dimension 1", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[17], line 8\u001b[0m\n\u001b[1;32m 6\u001b[0m perturbed_sequences \u001b[38;5;241m=\u001b[39m batch[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mperturbed_sequences\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28mprint\u001b[39m(sequences\u001b[38;5;241m.\u001b[39mkeys())\n\u001b[0;32m----> 8\u001b[0m patch_head_out \u001b[38;5;241m=\u001b[39m \u001b[43mcat_model\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpatch\u001b[49m\u001b[43m(\u001b[49m\u001b[43msequences\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mperturbed_sequences\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpatch_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mhead_all\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 9\u001b[0m patching_head_outputs\u001b[38;5;241m.\u001b[39mappend(patch_head_out)\n\u001b[1;32m 11\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m i \u001b[38;5;241m==\u001b[39m ITERS:\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/cat.py:184\u001b[0m, in \u001b[0;36mCat.patch\u001b[0;34m(self, sequences, sequences_p, patch_type, layer_head_list, patching_metric)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mpatch\u001b[39m(\n\u001b[1;32m 174\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 175\u001b[0m sequences: \u001b[38;5;28mdict\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 179\u001b[0m patching_metric: Callable \u001b[38;5;241m=\u001b[39m linear_rank_function,\n\u001b[1;32m 180\u001b[0m ):\n\u001b[1;32m 181\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m (\n\u001b[1;32m 182\u001b[0m patch_type \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_patch_funcs\n\u001b[1;32m 183\u001b[0m ), \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPatch type \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mpatch_type\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m not recognized. Choose from \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_patch_funcs\u001b[38;5;241m.\u001b[39mkeys()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m--> 184\u001b[0m scores, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mscore\u001b[49m\u001b[43m(\u001b[49m\u001b[43msequences\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 185\u001b[0m scores_p, cache \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscore(sequences_p, cache\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 187\u001b[0m patching_kwargs \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 188\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcorrupted_tokens\u001b[39m\u001b[38;5;124m\"\u001b[39m: sequences,\n\u001b[1;32m 189\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mclean_cache\u001b[39m\u001b[38;5;124m\"\u001b[39m: cache,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 193\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mscores_p\u001b[39m\u001b[38;5;124m\"\u001b[39m: scores_p,\n\u001b[1;32m 194\u001b[0m }\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/cat.py:170\u001b[0m, in \u001b[0;36mCat.score\u001b[0;34m(self, sequences, cache)\u001b[0m\n\u001b[1;32m 165\u001b[0m logits, cache \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrun_with_cache(\n\u001b[1;32m 166\u001b[0m sequences[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124minput_ids\u001b[39m\u001b[38;5;124m\"\u001b[39m], sequences[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mattention_mask\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[1;32m 167\u001b[0m )\n\u001b[1;32m 168\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m logits, cache\n\u001b[0;32m--> 170\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\u001b[43msequences\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43minput_ids\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msequences\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mattention_mask\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 171\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m logits, \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/cat.py:62\u001b[0m, in \u001b[0;36mCat.forward\u001b[0;34m(self, input_ids, attention_mask, token_type_ids)\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\n\u001b[1;32m 57\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 58\u001b[0m input_ids: Float[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch seq\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 59\u001b[0m attention_mask: Float[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch seq\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 60\u001b[0m token_type_ids: Float[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch seq\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 61\u001b[0m ):\n\u001b[0;32m---> 62\u001b[0m model_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_model\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 63\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minput_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 64\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 65\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 66\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlogits\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 67\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 68\u001b[0m model_output \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 69\u001b[0m F\u001b[38;5;241m.\u001b[39mlog_softmax(model_output, dim\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m)[:, \u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 70\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msoftmax_output\n\u001b[1;32m 71\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m model_output[:, \u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 72\u001b[0m )\n\u001b[1;32m 73\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m model_output\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py:53\u001b[0m, in \u001b[0;36mHookedEncoderForSequenceClassification.forward\u001b[0;34m(self, input, return_type, token_type_ids, attention_mask, start_at_layer, stop_at_layer)\u001b[0m\n\u001b[1;32m 35\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\n\u001b[1;32m 36\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 37\u001b[0m \u001b[38;5;28minput\u001b[39m: Int[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch pos\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 42\u001b[0m stop_at_layer: Optional[\u001b[38;5;28mint\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 43\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Optional[Float[torch\u001b[38;5;241m.\u001b[39mTensor, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch pos d_vocab\u001b[39m\u001b[38;5;124m\"\u001b[39m]]:\n\u001b[1;32m 44\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Input must be a batch of tokens. Strings and lists of strings are not yet supported.\u001b[39;00m\n\u001b[1;32m 45\u001b[0m \n\u001b[1;32m 46\u001b[0m \u001b[38;5;124;03m return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits).\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;124;03m attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to.\u001b[39;00m\n\u001b[1;32m 51\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m---> 53\u001b[0m hidden \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 54\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 55\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 56\u001b[0m \u001b[43m \u001b[49m\u001b[43mstart_at_layer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstart_at_layer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 57\u001b[0m \u001b[43m \u001b[49m\u001b[43mstop_at_layer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop_at_layer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 58\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43membeddings\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 59\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 60\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 61\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m return_type \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124membeddings\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m stop_at_layer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 62\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m hidden\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/hooked/HookedEncoder.py:240\u001b[0m, in \u001b[0;36mHookedEncoder.forward\u001b[0;34m(self, input, return_type, token_type_ids, attention_mask, start_at_layer, stop_at_layer)\u001b[0m\n\u001b[1;32m 237\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m start_at_layer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 238\u001b[0m start_at_layer \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m--> 240\u001b[0m resid \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mencoder_output\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 241\u001b[0m \u001b[43m \u001b[49m\u001b[43mresidual\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 242\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 243\u001b[0m \u001b[43m \u001b[49m\u001b[43mstart_at_layer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstart_at_layer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 244\u001b[0m \u001b[43m \u001b[49m\u001b[43mstop_at_layer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop_at_layer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mone_zero_attention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 246\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 248\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stop_at_layer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mor\u001b[39;00m return_type \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124membeddings\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 249\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m resid\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/hooked/HookedEncoder.py:172\u001b[0m, in \u001b[0;36mHookedEncoder.encoder_output\u001b[0;34m(self, tokens, token_type_ids, start_at_layer, stop_at_layer, one_zero_attention_mask)\u001b[0m\n\u001b[1;32m 169\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m one_zero_attention_mask \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 170\u001b[0m one_zero_attention_mask \u001b[38;5;241m=\u001b[39m one_zero_attention_mask\u001b[38;5;241m.\u001b[39mto(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcfg\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[0;32m--> 172\u001b[0m resid \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhook_full_embed(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43membed\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtokens\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 174\u001b[0m large_negative_number \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m-\u001b[39mtorch\u001b[38;5;241m.\u001b[39minf\n\u001b[1;32m 175\u001b[0m mask \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 176\u001b[0m repeat(\u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m one_zero_attention_mask, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch pos -> batch 1 1 pos\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 177\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m one_zero_attention_mask \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 178\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 179\u001b[0m )\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/opt/anaconda3/envs/mechir/lib/python3.10/site-packages/mechir/modelling/hooked/hooked_components.py:46\u001b[0m, in \u001b[0;36mBertEmbed.forward\u001b[0;34m(self, input_ids, token_type_ids)\u001b[0m\n\u001b[1;32m 42\u001b[0m position_embeddings_out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhook_pos_embed(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpos_embed(index_ids))\n\u001b[1;32m 43\u001b[0m token_type_embeddings_out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhook_token_type_embed(\n\u001b[1;32m 44\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtoken_type_embed(token_type_ids)\n\u001b[1;32m 45\u001b[0m ) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcfg\u001b[38;5;241m.\u001b[39muse_token_type_ids \u001b[38;5;28;01melse\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mzeros_like(word_embeddings_out)\n\u001b[0;32m---> 46\u001b[0m embeddings_out \u001b[38;5;241m=\u001b[39m \u001b[43mword_embeddings_out\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mposition_embeddings_out\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mtoken_type_embeddings_out\u001b[49m\n\u001b[1;32m 47\u001b[0m layer_norm_out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mln(embeddings_out)\n\u001b[1;32m 48\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m layer_norm_out\n", + "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (330) must match the size of tensor b (2) at non-singleton dimension 1" + ] + } + ], "source": [ "ITERS=10\n", "patching_head_outputs = []\n", - "for i, batch in enumerate(dataloader):\n", + "for i, batch in enumerate(cat_dataloader):\n", " # Get the queries, documents, and perturbed documents from the batch\n", " sequences = batch[\"sequences\"]\n", " perturbed_sequences = batch[\"perturbed_sequences\"]\n", - " \n", " patch_head_out = cat_model.patch(sequences, perturbed_sequences, patch_type=\"head_all\")\n", " patching_head_outputs.append(patch_head_out)\n", " \n", @@ -795,6 +773,13 @@ "source": [ "plot_components(mean_head_outputs)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -816,7 +801,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.6" + "version": "3.10.16" } }, "nbformat": 4, diff --git a/pyproject.toml b/pyproject.toml index 2a33bf5..9738281 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,7 @@ dependencies = [ "sae_lens", "ir_datasets", "streamlit", + "seaborn" ] requires-python = ">=3.10" authors = [ diff --git a/src/mechir/data/loader/base.py b/src/mechir/data/loader/base.py index e7b7c6e..d2c6cdf 100644 --- a/src/mechir/data/loader/base.py +++ b/src/mechir/data/loader/base.py @@ -1,3 +1,5 @@ +import torch + def pad(a: list, b: list, tok: str): assert type(a) == type(b) == list, "Both a and b must be lists" @@ -27,7 +29,7 @@ class BaseCollator(object): q_max_length: int = 30 d_max_length: int = 300 special_token: int = "a" - perturb_type: str = "append" + perturb_type: str = None pre_perturbed: bool = False def __init__( @@ -38,7 +40,7 @@ def __init__( q_max_length=30, d_max_length=200, special_token="a", - perturb_type="append", + perturb_type=None, pre_perturbed=False, ) -> None: assert ( @@ -55,7 +57,7 @@ def __init__( self.d_max_length = d_max_length self.special_token = special_token self.special_token_id = self.tokenizer.convert_tokens_to_ids(self.special_token) - self.perturb_type = perturb_type + self.perturb_type = perturb_type if perturb_type is not None else transformation_func.perturb_type self.pre_perturbed = pre_perturbed def get_data(self, batch): @@ -222,4 +224,4 @@ def pad_tokenized( return finalized_tokenized_a_batch, finalized_tokenized_b_batch -__all__ = ["BaseCollator", "pad_tokenized", "pad"] \ No newline at end of file +__all__ = ["BaseCollator", "pad_tokenized", "pad"] diff --git a/src/mechir/modelling/hooked/HookedDistilBert.py b/src/mechir/modelling/hooked/HookedDistilBert.py index 0a18fd7..bc87362 100644 --- a/src/mechir/modelling/hooked/HookedDistilBert.py +++ b/src/mechir/modelling/hooked/HookedDistilBert.py @@ -18,7 +18,7 @@ from transformer_lens import HookedTransformerConfig from transformer_lens.components import BertBlock, BertMLMHead, Unembed from transformer_lens.hook_points import HookPoint -from .hooked_components import DistilBertEmbed +from .hooked_components import BertEmbed from .HookedEncoder import HookedEncoder @@ -51,7 +51,7 @@ def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): if self.cfg.d_vocab_out == -1: self.cfg.d_vocab_out = self.cfg.d_vocab - self.embed = DistilBertEmbed(self.cfg) + self.embed = BertEmbed(self.cfg) self.blocks = nn.ModuleList( [BertBlock(self.cfg) for _ in range(self.cfg.n_layers)] ) diff --git a/src/mechir/modelling/hooked/HookedEncoder.py b/src/mechir/modelling/hooked/HookedEncoder.py index 34c4f0d..42c53a7 100644 --- a/src/mechir/modelling/hooked/HookedEncoder.py +++ b/src/mechir/modelling/hooked/HookedEncoder.py @@ -216,24 +216,19 @@ def forward( assert ( self.tokenizer is not None ), "Must provide a tokenizer if input is a string" - residual, token_type_ids_from_tokenizer, attention_mask = ( + input, token_type_ids_from_tokenizer, attention_mask = ( self.to_tokens(input) ) - # If token_type_ids or attention mask are not provided, use the ones from the tokenizer - token_type_ids = ( - token_type_ids_from_tokenizer - if token_type_ids is None - else token_type_ids - ) - one_zero_attention_mask = ( - attention_mask - if one_zero_attention_mask is None - else one_zero_attention_mask - ) + # If token_type_ids or attention mask are not provided, use the ones from the tokenizer + token_type_ids = ( + token_type_ids_from_tokenizer + if token_type_ids is None + else token_type_ids + ) else: assert type(input) is torch.Tensor - residual = input + residual = input if residual.device.type != self.cfg.device: residual = residual.to(self.cfg.device) @@ -247,7 +242,7 @@ def forward( token_type_ids=token_type_ids, start_at_layer=start_at_layer, stop_at_layer=stop_at_layer, - one_zero_attention_mask=one_zero_attention_mask, + one_zero_attention_mask=attention_mask, ) if stop_at_layer is not None or return_type == "embeddings": @@ -258,7 +253,7 @@ def forward( if return_type == "predictions": # Get predictions for masked tokens - logprobs = logits[tokens == self.tokenizer.mask_token_id].log_softmax( + logprobs = logits[logits == self.tokenizer.mask_token_id].log_softmax( dim=-1 ) predictions = self.tokenizer.decode(logprobs.argmax(dim=-1)) diff --git a/src/mechir/modelling/hooked/hooked_components.py b/src/mechir/modelling/hooked/hooked_components.py index b977eef..f204ece 100644 --- a/src/mechir/modelling/hooked/hooked_components.py +++ b/src/mechir/modelling/hooked/hooked_components.py @@ -37,15 +37,12 @@ def forward( ) -> Float[torch.Tensor, "batch pos d_model"]: base_index_id = torch.arange(input_ids.shape[1], device=input_ids.device) index_ids = einops.repeat(base_index_id, "pos -> batch pos", batch=input_ids.shape[0]) - if token_type_ids is None: - token_type_ids = torch.zeros_like(input_ids) word_embeddings_out = self.hook_embed(self.embed(input_ids)) position_embeddings_out = self.hook_pos_embed(self.pos_embed(index_ids)) token_type_embeddings_out = self.hook_token_type_embed( self.token_type_embed(token_type_ids) - ) - + ) if self.cfg.use_token_type_ids else torch.zeros_like(word_embeddings_out) embeddings_out = word_embeddings_out + position_embeddings_out + token_type_embeddings_out layer_norm_out = self.ln(embeddings_out) return layer_norm_out diff --git a/src/mechir/perturb/__init__.py b/src/mechir/perturb/__init__.py index 7987896..47a390a 100644 --- a/src/mechir/perturb/__init__.py +++ b/src/mechir/perturb/__init__.py @@ -3,6 +3,7 @@ from abc import ABC, abstractmethod from typing import TYPE_CHECKING from ..util import is_ir_axioms_availible +from functools import wraps from transformers.utils import _LazyModule, OptionalDependencyNotAvailable @@ -28,17 +29,23 @@ def apply(self, document: str, query: str = None) -> str: return document -def perturbation(f): - """ - An alternative decorator for subclassing AbstractPerturbation. - """ - argcount = f.__code__.co_argcount +def perturbation(f=None, *, perturb_type: str = "append"): + def decorator(func): + argcount = func.__code__.co_argcount + + class CustomPerturbation(AbstractPerturbation): + def __init__(self): + self.perturb_type = perturb_type - class CustomPerturbation(AbstractPerturbation): - def apply(self, document: str, query: str = None) -> str: - return f(document, query) if argcount > 1 else f(document) + def apply(self, document: str, query: str = None) -> str: + return func(document, query) if argcount > 1 else func(document) - return CustomPerturbation() + return CustomPerturbation() + + if f is None: + return decorator # used as @perturbation(...) + else: + return decorator(f) # used as @perturbation # Explicitly define what should be importable from this module diff --git a/src/mechir/perturb/axiom/frequency.py b/src/mechir/perturb/axiom/frequency.py index 9338576..760e53d 100644 --- a/src/mechir/perturb/axiom/frequency.py +++ b/src/mechir/perturb/axiom/frequency.py @@ -21,6 +21,7 @@ class FrequencyPerturbation(IndexPerturbation): stopwords: Whether or not to filter valid terms with a stopword list exact_match: Forces returned terms to be present in both texts """ + perturb_type = "append" def __init__( self, @@ -57,6 +58,13 @@ def __init__( self.num_additions = num_additions self.loc = loc + if self.loc == 'end': + self.perturb_type = "append" + elif self.loc == 'start': + self.perturb_type = "prepend" + else: + raise ValueError("loc must be either 'start' or 'end'") + def _get_random_terms(self, text: str, terms: list) -> list: return random.choices( list(self.get_freq_text(text, terms).keys()), k=self.num_additions diff --git a/src/mechir/perturb/axiom/proximity.py b/src/mechir/perturb/axiom/proximity.py index d584108..52edbd1 100644 --- a/src/mechir/perturb/axiom/proximity.py +++ b/src/mechir/perturb/axiom/proximity.py @@ -20,7 +20,6 @@ class ProximityPerturbation(IndexPerturbation): stopwords: Whether or not to filter valid terms with a stopword list exact_match: Forces returned terms to be present in both texts """ - def __init__( self, index_location: Any | Path | str, @@ -57,6 +56,13 @@ def __init__( self.num_additions = num_additions self.loc = loc + if self.loc == 'end': + self.perturb_type = "append" + elif self.loc == 'start': + self.perturb_type = "prepend" + else: + raise ValueError("loc must be either 'start' or 'end'") + def _get_random_terms(self, text: str, terms: list) -> list: return random.choices( list(self.get_freq_text(text, terms).keys()), k=self.num_additions From fd4d3a0bff69fa5cabf86c3415c79c533a90f6c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Wed, 21 May 2025 09:41:23 +0100 Subject: [PATCH 14/21] minor changes --- src/mechir/modelling/cat.py | 2 +- src/mechir/modelling/dot.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/mechir/modelling/cat.py b/src/mechir/modelling/cat.py index c36b81b..8bd8616 100644 --- a/src/mechir/modelling/cat.py +++ b/src/mechir/modelling/cat.py @@ -23,7 +23,7 @@ def __init__( model_name_or_path: str, num_labels: int = 2, tokenizer=None, - special_token: str = "X", + special_token: str = "a", softmax_output: bool = False, return_cache: bool = False, ) -> None: diff --git a/src/mechir/modelling/dot.py b/src/mechir/modelling/dot.py index 1c20161..28f37d8 100644 --- a/src/mechir/modelling/dot.py +++ b/src/mechir/modelling/dot.py @@ -3,7 +3,7 @@ import os import torch from jaxtyping import Float -from transformers import AutoModel, AutoTokenizer, AutoConfig +from transformers import AutoModel, AutoTokenizer from transformer_lens.ActivationCache import ActivationCache from transformer_lens.hook_points import HookedRootModule import transformer_lens.utils as utils @@ -27,7 +27,7 @@ def __init__( model_name_or_path: str, pooling_type: str = "cls", tokenizer=None, - special_token: str = "X", + special_token: str = "a", return_cache: bool = False, ) -> None: super().__init__() From 3356b0ec1cb2728185bcf1556797ba4b32f728d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Wed, 21 May 2025 11:19:01 +0100 Subject: [PATCH 15/21] random stuff nearly done --- requirements.dev.txt | 3 + src/mechir/data/loader/base.py | 7 +- src/mechir/modelling/cat.py | 5 +- src/mechir/modelling/hooked/HookedEncoder.py | 77 +++++- .../HookedEncoderForSequenceClassification.py | 67 ----- src/mechir/modelling/hooked/conversion.py | 8 +- .../modelling/hooked/hooked_components.py | 24 +- src/mechir/modelling/hooked/linear.py | 6 +- .../hooked/loading_from_pretrained.py | 4 +- src/mechir/modelling/hooked/states.py | 31 +-- src/mechir/perturb/axiom/frequency.py | 5 +- src/mechir/perturb/axiom/proximity.py | 5 +- test/acceptance/test_cat.py | 57 +++++ test/acceptance/test_dot.py | 57 +++++ test/acceptance/test_hookedencoder.py | 240 ++++++++++++++++++ test/evaluate_models.py | 51 ---- test/integration/test_patched.py | 0 test/integration/test_sae.py | 0 test/test_output_parity.py | 93 ------- 19 files changed, 474 insertions(+), 266 deletions(-) create mode 100644 requirements.dev.txt delete mode 100644 src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py create mode 100644 test/acceptance/test_cat.py create mode 100644 test/acceptance/test_dot.py create mode 100644 test/acceptance/test_hookedencoder.py delete mode 100644 test/evaluate_models.py create mode 100644 test/integration/test_patched.py create mode 100644 test/integration/test_sae.py delete mode 100644 test/test_output_parity.py diff --git a/requirements.dev.txt b/requirements.dev.txt new file mode 100644 index 0000000..fbe9f98 --- /dev/null +++ b/requirements.dev.txt @@ -0,0 +1,3 @@ +pytest +ruff +black \ No newline at end of file diff --git a/src/mechir/data/loader/base.py b/src/mechir/data/loader/base.py index d2c6cdf..687ac97 100644 --- a/src/mechir/data/loader/base.py +++ b/src/mechir/data/loader/base.py @@ -57,7 +57,11 @@ def __init__( self.d_max_length = d_max_length self.special_token = special_token self.special_token_id = self.tokenizer.convert_tokens_to_ids(self.special_token) - self.perturb_type = perturb_type if perturb_type is not None else transformation_func.perturb_type + self.perturb_type = ( + perturb_type + if perturb_type is not None + else transformation_func.perturb_type + ) self.pre_perturbed = pre_perturbed def get_data(self, batch): @@ -224,4 +228,5 @@ def pad_tokenized( return finalized_tokenized_a_batch, finalized_tokenized_b_batch + __all__ = ["BaseCollator", "pad_tokenized", "pad"] diff --git a/src/mechir/modelling/cat.py b/src/mechir/modelling/cat.py index 8bd8616..18f1ad8 100644 --- a/src/mechir/modelling/cat.py +++ b/src/mechir/modelling/cat.py @@ -1,9 +1,8 @@ from typing import Callable import logging -import os import torch from jaxtyping import Float -from transformers import AutoModelForSequenceClassification, AutoTokenizer, AutoConfig +from transformers import AutoModelForSequenceClassification, AutoTokenizer from transformer_lens.ActivationCache import ActivationCache from transformer_lens.hook_points import HookedRootModule import transformer_lens.utils as utils @@ -12,7 +11,7 @@ from .sae import SAEMixin from .hooked.loading_from_pretrained import get_official_model_name from ..util import linear_rank_function -from ..modelling.hooked.HookedEncoderForSequenceClassification import HookedEncoderForSequenceClassification +from .hooked.HookedEncoder import HookedEncoderForSequenceClassification logger = logging.getLogger(__name__) diff --git a/src/mechir/modelling/hooked/HookedEncoder.py b/src/mechir/modelling/hooked/HookedEncoder.py index 42c53a7..dc2f66e 100644 --- a/src/mechir/modelling/hooked/HookedEncoder.py +++ b/src/mechir/modelling/hooked/HookedEncoder.py @@ -17,7 +17,7 @@ from transformers import AutoTokenizer from typing_extensions import Literal -from . import loading_from_pretrained as loading +from mechir.modelling.hooked import loading_from_pretrained as loading from transformer_lens.ActivationCache import ActivationCache from transformer_lens.components import ( BertBlock, @@ -30,8 +30,9 @@ from transformer_lens.hook_points import HookedRootModule, HookPoint from transformer_lens.utilities import devices -from .HookedTransformerConfig import HookedTransformerConfig -from .hooked_components import BertEmbed +from mechir.modelling.hooked.HookedTransformerConfig import HookedTransformerConfig +from mechir.modelling.hooked.hooked_components import BertEmbed +from mechir.modelling.hooked.linear import ClassificationHead, MLPClassificationHead class HookedEncoder(HookedRootModule): @@ -131,7 +132,11 @@ def to_tokens( if move_to_device: tokens = tokens.to(self.cfg.device) - token_type_ids = encodings.token_type_ids.to(self.cfg.device) if self.use_token_type_ids else None + token_type_ids = ( + encodings.token_type_ids.to(self.cfg.device) + if self.use_token_type_ids + else None + ) attention_mask = encodings.attention_mask.to(self.cfg.device) return tokens, token_type_ids, attention_mask @@ -216,8 +221,8 @@ def forward( assert ( self.tokenizer is not None ), "Must provide a tokenizer if input is a string" - input, token_type_ids_from_tokenizer, attention_mask = ( - self.to_tokens(input) + input, token_type_ids_from_tokenizer, attention_mask = self.to_tokens( + input ) # If token_type_ids or attention mask are not provided, use the ones from the tokenizer @@ -529,3 +534,63 @@ def all_head_labels(self) -> List[str]: for l in range(self.cfg.n_layers) for h in range(self.cfg.n_heads) ] + + +class HookedEncoderForSequenceClassification(HookedEncoder): + """ + This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. + + Limitations: + - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. + - Also note that model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. + + Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: + - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model + - The model only accepts tokens as inputs, and not strings, or lists of strings + """ + + def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): + super().__init__(cfg, tokenizer, move_to_device, **kwargs) + self.classifier = ( + ClassificationHead(cfg) + if not self.cfg.use_mlp_head + else MLPClassificationHead(cfg) + ) + self.setup() + + def forward( + self, + input: Int[torch.Tensor, "batch pos"], + return_type: Optional[str] = "embeddings", + token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, + attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, + start_at_layer: Optional[int] = None, + stop_at_layer: Optional[int] = None, + ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: + """Input must be a batch of tokens. Strings and lists of strings are not yet supported. + + return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). + + token_type_ids Optional[torch.Tensor]: Binary ids indicating whether a token belongs to sequence A or B. For example, for two sentences: "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be [0, 0, ..., 0, 1, ..., 1, 1]. `0` represents tokens from Sentence A, `1` from Sentence B. If not provided, BERT assumes a single sequence input. Typically, shape is (batch_size, sequence_length). + + attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. + """ + + hidden = super().forward( + input, + token_type_ids=token_type_ids, + start_at_layer=start_at_layer, + stop_at_layer=stop_at_layer, + return_type="embeddings", + attention_mask=attention_mask, + ) + if return_type == "embeddings" or stop_at_layer is not None: + return hidden + logits = self.classifier(hidden[:, 0, :]) + + if return_type is None: + return None + return logits + + +__all__ = ["HookedEncoder", "HookedEncoderForSequenceClassification"] diff --git a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py b/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py deleted file mode 100644 index d1109cf..0000000 --- a/src/mechir/modelling/hooked/HookedEncoderForSequenceClassification.py +++ /dev/null @@ -1,67 +0,0 @@ -"""Hooked Encoder. - -Contains a BERT style model. This is separate from :class:`transformer_lens.HookedTransformer` -because it has a significantly different architecture to e.g. GPT style transformers. -""" - -from __future__ import annotations - -from typing import Optional -import torch -from jaxtyping import Float, Int - -from .HookedEncoder import HookedEncoder -from .linear import ClassificationHead, MLPClassificationHead - - -class HookedEncoderForSequenceClassification(HookedEncoder): - """ - This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. - - Limitations: - - The current MVP implementation supports only the masked language modelling (MLM) task. Next sentence prediction (NSP), causal language modelling, and other tasks are not yet supported. - - Also note that model does not include dropouts, which may lead to inconsistent results from training or fine-tuning. - - Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: - - There is no preprocessing (e.g. LayerNorm folding) when loading a pretrained model - - The model only accepts tokens as inputs, and not strings, or lists of strings - """ - - def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): - super().__init__(cfg, tokenizer, move_to_device, **kwargs) - self.classifier = ClassificationHead(cfg) if not self.cfg.use_mlp_head else MLPClassificationHead(cfg) - self.setup() - - def forward( - self, - input: Int[torch.Tensor, "batch pos"], - return_type: Optional[str] = "embeddings", - token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, - attention_mask: Optional[Int[torch.Tensor, "batch pos"]] = None, - start_at_layer: Optional[int] = None, - stop_at_layer: Optional[int] = None, - ) -> Optional[Float[torch.Tensor, "batch pos d_vocab"]]: - """Input must be a batch of tokens. Strings and lists of strings are not yet supported. - - return_type Optional[str]: The type of output to return. Can be one of: None (return nothing, don't calculate logits), or 'logits' (return logits). - - token_type_ids Optional[torch.Tensor]: Binary ids indicating whether a token belongs to sequence A or B. For example, for two sentences: "[CLS] Sentence A [SEP] Sentence B [SEP]", token_type_ids would be [0, 0, ..., 0, 1, ..., 1, 1]. `0` represents tokens from Sentence A, `1` from Sentence B. If not provided, BERT assumes a single sequence input. Typically, shape is (batch_size, sequence_length). - - attention_mask: Optional[torch.Tensor]: A binary mask which indicates which tokens should be attended to (1) and which should be ignored (0). Primarily used for padding variable-length sentences in a batch. For instance, in a batch with sentences of differing lengths, shorter sentences are padded with 0s on the right. If not provided, the model assumes all tokens should be attended to. - """ - - hidden = super().forward( - input, - token_type_ids=token_type_ids, - start_at_layer=start_at_layer, - stop_at_layer=stop_at_layer, - return_type="embeddings", - attention_mask=attention_mask, - ) - if return_type == "embeddings" or stop_at_layer is not None: - return hidden - logits = self.classifier(hidden[:, 0, :]) - - if return_type is None: - return None - return logits diff --git a/src/mechir/modelling/hooked/conversion.py b/src/mechir/modelling/hooked/conversion.py index 8e2d626..cea0b6f 100644 --- a/src/mechir/modelling/hooked/conversion.py +++ b/src/mechir/modelling/hooked/conversion.py @@ -1,7 +1,9 @@ import einops from functools import partial -from .loading_from_pretrained import register_with_transformer_lens -from .HookedTransformerConfig import HookedTransformerConfig +from mechir.modelling.hooked.loading_from_pretrained import ( + register_with_transformer_lens, +) +from mechir.modelling.hooked.HookedTransformerConfig import HookedTransformerConfig def convert_distilbert_weights( @@ -150,7 +152,7 @@ def convert_bert_based_weights( state_dict["classifier.W"] = classification_head.weight state_dict["classifier.b"] = classification_head.bias else: - if not "electra" in model_name: + if "electra" not in model_name: mlm_head = bert.cls.predictions state_dict["mlm_head.W"] = mlm_head.transform.dense.weight state_dict["mlm_head.b"] = mlm_head.transform.dense.bias diff --git a/src/mechir/modelling/hooked/hooked_components.py b/src/mechir/modelling/hooked/hooked_components.py index f204ece..ed90c54 100644 --- a/src/mechir/modelling/hooked/hooked_components.py +++ b/src/mechir/modelling/hooked/hooked_components.py @@ -23,12 +23,16 @@ def __init__(self, cfg: Union[Dict, HookedTransformerConfig]): self.cfg = HookedTransformerConfig.unwrap(cfg) self.embed = Embed(self.cfg) self.pos_embed = PosEmbed(self.cfg) - self.token_type_embed = TokenTypeEmbed(self.cfg) if self.cfg.use_token_type_ids else nn.Identity() + self.token_type_embed = ( + TokenTypeEmbed(self.cfg) if self.cfg.use_token_type_ids else nn.Identity() + ) self.ln = LayerNorm(self.cfg) self.hook_embed = HookPoint() self.hook_pos_embed = HookPoint() - self.hook_token_type_embed = HookPoint() if self.cfg.use_token_type_ids else nn.Identity() + self.hook_token_type_embed = ( + HookPoint() if self.cfg.use_token_type_ids else nn.Identity() + ) def forward( self, @@ -36,13 +40,19 @@ def forward( token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, ) -> Float[torch.Tensor, "batch pos d_model"]: base_index_id = torch.arange(input_ids.shape[1], device=input_ids.device) - index_ids = einops.repeat(base_index_id, "pos -> batch pos", batch=input_ids.shape[0]) + index_ids = einops.repeat( + base_index_id, "pos -> batch pos", batch=input_ids.shape[0] + ) word_embeddings_out = self.hook_embed(self.embed(input_ids)) position_embeddings_out = self.hook_pos_embed(self.pos_embed(index_ids)) - token_type_embeddings_out = self.hook_token_type_embed( - self.token_type_embed(token_type_ids) - ) if self.cfg.use_token_type_ids else torch.zeros_like(word_embeddings_out) - embeddings_out = word_embeddings_out + position_embeddings_out + token_type_embeddings_out + token_type_embeddings_out = ( + self.hook_token_type_embed(self.token_type_embed(token_type_ids)) + if self.cfg.use_token_type_ids + else torch.zeros_like(word_embeddings_out) + ) + embeddings_out = ( + word_embeddings_out + position_embeddings_out + token_type_embeddings_out + ) layer_norm_out = self.ln(embeddings_out) return layer_norm_out diff --git a/src/mechir/modelling/hooked/linear.py b/src/mechir/modelling/hooked/linear.py index 889cc1d..1da6e45 100644 --- a/src/mechir/modelling/hooked/linear.py +++ b/src/mechir/modelling/hooked/linear.py @@ -9,8 +9,10 @@ from jaxtyping import Float from transformer_lens.utilities.addmm import batch_addmm from transformer_lens.hook_points import HookPoint -from transformer_lens.factories.activation_function_factory import ActivationFunctionFactory -from .HookedTransformerConfig import HookedTransformerConfig +from transformer_lens.factories.activation_function_factory import ( + ActivationFunctionFactory, +) +from mechir.modelling.hooked.HookedTransformerConfig import HookedTransformerConfig class ClassificationHead(nn.Module): diff --git a/src/mechir/modelling/hooked/loading_from_pretrained.py b/src/mechir/modelling/hooked/loading_from_pretrained.py index d61950a..7212229 100644 --- a/src/mechir/modelling/hooked/loading_from_pretrained.py +++ b/src/mechir/modelling/hooked/loading_from_pretrained.py @@ -22,8 +22,8 @@ ) import transformer_lens.utils as utils -from .HookedTransformerConfig import HookedTransformerConfig -from ... import config +from mechir.modelling.hooked.HookedTransformerConfig import HookedTransformerConfig +from mechir import config from transformer_lens.pretrained.weight_conversions import ( convert_bloom_weights, convert_coder_weights, diff --git a/src/mechir/modelling/hooked/states.py b/src/mechir/modelling/hooked/states.py index b626433..199ef7a 100644 --- a/src/mechir/modelling/hooked/states.py +++ b/src/mechir/modelling/hooked/states.py @@ -1,6 +1,8 @@ # Description: This file contains the state dictionary for the models in the hooked library. import torch -from .loading_from_pretrained import extend_transformer_lens_registry +from mechir.modelling.hooked.loading_from_pretrained import ( + extend_transformer_lens_registry, +) @extend_transformer_lens_registry("GPTNeoForCausalLM") @@ -85,31 +87,6 @@ def GPTJForCausalLM_state_dict(hf_config): } -@extend_transformer_lens_registry("GPTNeoForCausalLM") -def GPTNeoForCausalLM_state_dict(hf_config): - state = { - "d_model": hf_config.hidden_size, - "d_head": hf_config.hidden_size // hf_config.num_heads, - "n_heads": hf_config.num_heads, - "d_mlp": hf_config.hidden_size * 4, - "n_layers": hf_config.num_layers, - "n_ctx": hf_config.max_position_embeddings, - "eps": hf_config.layer_norm_epsilon, - "d_vocab": hf_config.vocab_size, - "act_fn": hf_config.hidden_act, - "use_attn_scale": False, - "use_local_attn": True, - "scale_attn_by_inverse_layer_idx": False, - "parallel_attn_mlp": True, - "positional_embedding_type": "rotary", - "rotary_adjacent_pairs": False, - "normalization_type": "LN", - } - rotary_pct = hf_config.rotary_pct - state["rotary_dim"] = round(rotary_pct * state["d_head"]) - return state - - @extend_transformer_lens_registry( ["BertModel", "BertForMaskedLM", "ElectraForPreTraining"] ) @@ -141,7 +118,7 @@ def ElectraForSequenceClassification_state_dict(hf_config): return { **BertModel_state_dict(hf_config), "num_labels": hf_config.num_labels, - "use_mlp_head": True + "use_mlp_head": True, } diff --git a/src/mechir/perturb/axiom/frequency.py b/src/mechir/perturb/axiom/frequency.py index 760e53d..eb362bb 100644 --- a/src/mechir/perturb/axiom/frequency.py +++ b/src/mechir/perturb/axiom/frequency.py @@ -21,6 +21,7 @@ class FrequencyPerturbation(IndexPerturbation): stopwords: Whether or not to filter valid terms with a stopword list exact_match: Forces returned terms to be present in both texts """ + perturb_type = "append" def __init__( @@ -58,9 +59,9 @@ def __init__( self.num_additions = num_additions self.loc = loc - if self.loc == 'end': + if self.loc == "end": self.perturb_type = "append" - elif self.loc == 'start': + elif self.loc == "start": self.perturb_type = "prepend" else: raise ValueError("loc must be either 'start' or 'end'") diff --git a/src/mechir/perturb/axiom/proximity.py b/src/mechir/perturb/axiom/proximity.py index 52edbd1..1a93067 100644 --- a/src/mechir/perturb/axiom/proximity.py +++ b/src/mechir/perturb/axiom/proximity.py @@ -20,6 +20,7 @@ class ProximityPerturbation(IndexPerturbation): stopwords: Whether or not to filter valid terms with a stopword list exact_match: Forces returned terms to be present in both texts """ + def __init__( self, index_location: Any | Path | str, @@ -56,9 +57,9 @@ def __init__( self.num_additions = num_additions self.loc = loc - if self.loc == 'end': + if self.loc == "end": self.perturb_type = "append" - elif self.loc == 'start': + elif self.loc == "start": self.perturb_type = "prepend" else: raise ValueError("loc must be either 'start' or 'end'") diff --git a/test/acceptance/test_cat.py b/test/acceptance/test_cat.py new file mode 100644 index 0000000..a4fbe4a --- /dev/null +++ b/test/acceptance/test_cat.py @@ -0,0 +1,57 @@ +import pytest +import torch +from mechir import Cat +from transformers import AutoTokenizer + + +@pytest.fixture(scope="module") +def cat_model(): + tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased") + return Cat( + model_name_or_path="bert-base-uncased", + tokenizer=tokenizer, + softmax_output=True, + return_cache=True, + ) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained("bert-base-uncased") + + +@pytest.mark.parametrize("text,label", [("Good day", 0), ("Bad day", 1)]) +def test_forward_and_softmax(cat_model, tokenizer, text, label): + enc = tokenizer(text, return_tensors="pt", padding=True) + logits = cat_model.forward(enc.input_ids, enc.attention_mask) + # softmax_output=False returns raw logits + assert logits.dim() == 1 + + +@pytest.mark.parametrize("patch_type", ["block_all", "head_all", "head_by_pos"]) +def test_patch_methods_shapes(cat_model, tokenizer, patch_type): + text = "Patch test" + enc = tokenizer(text, return_tensors="pt", padding=True) + seqs = {"input_ids": enc.input_ids, "attention_mask": enc.attention_mask} + # Prepare positive and perturbed + seqs_p = seqs + scores, cache = cat_model.score(seqs, cache=True) + # call patch + if patch_type == "head_by_pos": + layer_head_list = [(0, 0)] + out = cat_model.patch( + seqs, seqs_p, patch_type=patch_type, layer_head_list=layer_head_list + ) + assert out.shape[0] == 2 # components + else: + out = cat_model.patch(seqs, seqs_p, patch_type=patch_type) + assert out.ndim >= 2 + + +def test_score_without_cache(cat_model, tokenizer): + text = "Simple test" + enc = tokenizer(text, return_tensors="pt", padding=True) + logits, cache = cat_model.score( + {"input_ids": enc.input_ids, "attention_mask": enc.attention_mask}, cache=False + ) + assert cache is None diff --git a/test/acceptance/test_dot.py b/test/acceptance/test_dot.py new file mode 100644 index 0000000..08ba980 --- /dev/null +++ b/test/acceptance/test_dot.py @@ -0,0 +1,57 @@ +import pytest +import torch +from mechir import Dot +from transformers import AutoTokenizer +from transformer_lens.ActivationCache import ActivationCache + + +@pytest.fixture(scope="module") +def dot_model(): + tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased") + return Dot( + model_name_or_path="bert-base-uncased", pooling_type="mean", return_cache=True + ) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained("bert-base-uncased") + + +@pytest.mark.parametrize("pooling", ["cls", "mean"]) +def test_forward_pooling(pooling): + model = Dot("bert-base-uncased", pooling_type=pooling, return_cache=False) + tok = AutoTokenizer.from_pretrained("bert-base-uncased") + enc = tok("Forward test", return_tensors="pt", padding=True) + out = model.forward(enc.input_ids, enc.attention_mask) + assert out.dim() == 2 # (batch, d_model) + + +@pytest.mark.parametrize("cache_flag", [False, True]) +def test_score_cache_flags(dot_model, tokenizer, cache_flag): + texts = ["A quick test"] + enc = tokenizer(texts, return_tensors="pt", padding=True) + q = {"input_ids": enc.input_ids, "attention_mask": enc.attention_mask} + d = q + scores, reps_q, reps_d, cache = dot_model.score(q, d, cache=cache_flag) + assert isinstance(scores, torch.Tensor) + if cache_flag: + assert isinstance(cache, ActivationCache) + else: + assert cache is None + + +@pytest.mark.parametrize("patch_type", ["block_all", "head_all", "head_by_pos"]) +def test_patch_methods_shapes(dot_model, tokenizer, patch_type): + tok = tokenizer(["Patch"], return_tensors="pt", padding=True) + q = {"input_ids": tok.input_ids, "attention_mask": tok.attention_mask} + d = q + d_p = q + if patch_type == "head_by_pos": + out = dot_model.patch( + q, d, d_p, patch_type=patch_type, layer_head_list=[(0, 0)] + ) + assert out.shape[0] == 2 + else: + out = dot_model.patch(q, d, d_p, patch_type=patch_type) + assert out.dim() >= 2 diff --git a/test/acceptance/test_hookedencoder.py b/test/acceptance/test_hookedencoder.py new file mode 100644 index 0000000..65a2cc6 --- /dev/null +++ b/test/acceptance/test_hookedencoder.py @@ -0,0 +1,240 @@ +from typing import List + +import pytest +import torch +import torch.nn.functional as F +from jaxtyping import Float +from torch.testing import assert_close +from transformers import AutoTokenizer, AutoModel + +from mechir.modelling.hooked.HookedEncoder import HookedEncoder + +MODEL_NAME = "bert-base-cased" + +def get_embeddings(model): + try: + return model.bert.embeddings + except AttributeError: + return model.embeddings + +@pytest.fixture(scope="module") +def our_bert(): + return HookedEncoder.from_pretrained(MODEL_NAME, device="cpu") + + +@pytest.fixture(scope="module") +def huggingface_bert(): + return AutoModel.from_pretrained(MODEL_NAME) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained(MODEL_NAME) + + +@pytest.fixture +def tokens(tokenizer): + return tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] + + +def test_full_model(our_bert, huggingface_bert, tokenizer): + sequences = [ + "Hello, my [MASK] is Bert.", + "I went to the [MASK] to buy some groceries.", + ] + tokenized = tokenizer(sequences, return_tensors="pt", padding=True) + input_ids = tokenized["input_ids"] + attention_mask = tokenized["attention_mask"] + + huggingface_bert_logits = huggingface_bert( + input_ids, attention_mask=attention_mask + ).logits + our_bert_logits = our_bert(input_ids, one_zero_attention_mask=attention_mask) + assert_close(huggingface_bert_logits, our_bert_logits, rtol=1.3e-6, atol=4e-5) + + +def test_embed_one_prediction(our_bert, huggingface_bert, tokens): + huggingface_embed = get_embeddings(huggingface_bert) + our_embed = our_bert.embed + + huggingface_embed_out = huggingface_embed(tokens)[0] + our_embed_out = our_embed(tokens).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_embed_two_predictions(our_bert, huggingface_bert, tokenizer): + encoding = tokenizer( + "Hello, my [MASK] is Bert.", + "I went to the [MASK] to buy some groceries.", + return_tensors="pt", + ) + input_ids = encoding["input_ids"] + token_type_ids = encoding["token_type_ids"] + + huggingface_embed_out = get_embeddings(huggingface_bert)( + input_ids, token_type_ids=token_type_ids + )[0] + our_embed_out = our_bert.embed(input_ids, token_type_ids=token_type_ids).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_attention(our_bert, huggingface_bert, tokens): + huggingface_embed = get_embeddings(huggingface_bert) + huggingface_attn = huggingface_bert.encoder.layer[0].attention + + embed_out = huggingface_embed(tokens) + + our_attn = our_bert.blocks[0].attn + + our_attn_out = our_attn(embed_out, embed_out, embed_out) + huggingface_self_attn_out = huggingface_attn.self(embed_out)[0] + huggingface_attn_out = huggingface_attn.output.dense(huggingface_self_attn_out) + assert_close(our_attn_out, huggingface_attn_out) + + +def test_bert_block(our_bert, huggingface_bert, tokens): + huggingface_embed = get_embeddings(huggingface_bert) + huggingface_block = huggingface_bert.encoder.layer[0] + + embed_out = huggingface_embed(tokens) + + our_block = our_bert.blocks[0] + + our_block_out = our_block(embed_out) + huggingface_block_out = huggingface_block(embed_out)[0] + assert_close(our_block_out, huggingface_block_out) + + +def test_bert_pooler(our_bert, huggingface_bert, tokens): + huggingface_embed_out = get_embeddings(huggingface_bert)(tokens) + huggingface_encoder_out = huggingface_bert.encoder(huggingface_embed_out) + cls_token_representation = huggingface_encoder_out[0] + + our_pooler_out = our_bert.pooler(cls_token_representation) + huggingface_pooler_out = huggingface_bert.pooler(cls_token_representation) + assert_close(our_pooler_out, huggingface_pooler_out) + + +def test_nsp_head(our_bert, huggingface_bert, tokens): + huggingface_bert_pooler_output = huggingface_bert(tokens).pooler_output + our_nsp_head_out = our_bert.nsp_head(huggingface_bert_pooler_output) + huggingface_nsp_head_out = huggingface_bert.cls.seq_relationship( + huggingface_bert_pooler_output + ) + + assert_close(our_nsp_head_out, huggingface_nsp_head_out) + + +def test_mlm_head(our_bert, huggingface_bert, tokens): + huggingface_bert_core_outputs = huggingface_bert(tokens).last_hidden_state + + our_mlm_head_out = our_bert.mlm_head(huggingface_bert_core_outputs) + huggingface_predictions_out = huggingface_bert.cls.predictions.transform( + huggingface_bert_core_outputs + ) + + print((our_mlm_head_out - huggingface_predictions_out).abs().max()) + assert_close(our_mlm_head_out, huggingface_predictions_out, rtol=1.3e-3, atol=1e-5) + + +def test_unembed(our_bert, huggingface_bert, tokens): + huggingface_bert_core_outputs = huggingface_bert(tokens).last_hidden_state + + our_mlm_head_out = our_bert.mlm_head(huggingface_bert_core_outputs) + our_unembed_out = our_bert.unembed(our_mlm_head_out) + huggingface_predictions_out = huggingface_bert.cls.predictions( + huggingface_bert_core_outputs + ) + + assert_close(our_unembed_out, huggingface_predictions_out, rtol=1.3e-6, atol=4e-5) + + +def test_run_with_cache(our_bert, tokens): + _, cache = our_bert.run_with_cache(tokens) + + # check that an arbitrary subset of the keys exist + assert "embed.hook_embed" in cache + assert "blocks.0.attn.hook_q" in cache + assert "blocks.3.attn.hook_attn_scores" in cache + assert "blocks.7.hook_resid_post" in cache + assert "mlm_head.ln.hook_normalized" in cache + + +def test_from_pretrained_revision(): + """ + Check that the from_pretrained parameter `revision` (= git version) works + """ + + _ = HookedEncoder.from_pretrained(MODEL_NAME, revision="main") + + try: + _ = HookedEncoder.from_pretrained(MODEL_NAME, revision="inexistent_branch_name") + except: + pass + else: + raise AssertionError("Should have raised an error") + + +@pytest.mark.skipif( + torch.backends.mps.is_available() or not torch.cuda.is_available(), + reason="bfloat16 unsupported by MPS: https://github.com/pytorch/pytorch/issues/78168 or no GPU", +) +@pytest.mark.parametrize("dtype", [torch.bfloat16, torch.float16]) +def test_half_precision(dtype): + """Check the 16 bits loading and inferences.""" + model = HookedEncoder.from_pretrained(MODEL_NAME, torch_dtype=dtype) + assert model.W_K.dtype == dtype + + _ = model(model.tokenizer("Hello, world", return_tensors="pt")["input_ids"]) + + +def _get_predictions( + logits: Float[torch.Tensor, "batch pos d_vocab"], positions: List[int], tokenizer +): + logits_at_position = logits.squeeze(0)[positions] + predicted_tokens = F.softmax(logits_at_position, dim=-1).argmax(dim=-1) + return tokenizer.batch_decode(predicted_tokens) + + +def test_predictions_mlm(our_bert, huggingface_bert, tokenizer): + input_ids = tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] + + our_bert_logits = our_bert(input_ids) + our_prediction = _get_predictions(our_bert_logits, [2], tokenizer) + + huggingface_bert_out = huggingface_bert(input_ids).logits + huggingface_prediction = _get_predictions(huggingface_bert_out, [2], tokenizer) + + assert our_prediction == huggingface_prediction + + +def test_predictions_from_forward_function_mlm(our_bert, huggingface_bert, tokenizer): + input_ids = tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] + our_prediction = our_bert(input_ids, return_type="predictions") + + huggingface_bert_out = huggingface_bert(input_ids).prediction_logits + huggingface_prediction = _get_predictions(huggingface_bert_out, [2], tokenizer)[ + 0 + ] # prediction is returned as a list + + assert our_prediction == huggingface_prediction + + +def test_input_list_of_strings_mlm(our_bert, huggingface_bert, tokenizer): + prompts = [ + "The [MASK] sat on the mat", + "She [MASK] to the store", + "The dog [MASK] the ball", + ] + encodings = tokenizer(prompts, return_tensors="pt", truncation=True, padding=True) + our_bert_logits = our_bert(prompts) + + huggingface_bert_logits = huggingface_bert(**encodings).logits + + assert_close(our_bert_logits, huggingface_bert_logits, rtol=1.3e-6, atol=4e-5) + + +@pytest.mark.skipif(not torch.cuda.is_available(), reason="Requires a CUDA device") +def test_cuda(mlm_tokens): + model = HookedEncoder.from_pretrained(MODEL_NAME) + model(mlm_tokens) diff --git a/test/evaluate_models.py b/test/evaluate_models.py deleted file mode 100644 index 5c6caa9..0000000 --- a/test/evaluate_models.py +++ /dev/null @@ -1,51 +0,0 @@ -from functools import partial -from mechir import Dot, Cat -import pandas as pd -import pyterrier as pt - - -class DummyTransformer(pt.Transformer): - def __init__(self, transform_func): - super().__init__() - self.transform_func = transform_func - - def transform(self, inps: pd.DataFrame): - outs = inps.copy() - outs['score'] = self.transform_func(inps) - return outs - - -CROSS_ENCODER_CHECKPOINT = "crystina-z/monoELECTRA_LCE_nneg31" -BI_ENCODER_CHECKPOINT = "sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco" - - -def score_cat(model, df): - queries = df.query.to_list() - docs = df.text.to_list() - - tokenizer = model.tokenizer - - sequences = tokenizer(queries, docs, return_tensors="pt", padding=True, truncation=True) - - scores, _ = model.score(dict(sequences)) - return scores.cpu().numpy().tolist() - - -def score_dot(model, df): - queries = df.query.to_list() - docs = df.text.to_list() - - tokenizer = model.tokenizer - - queries = tokenizer(queries, return_tensors="pt", padding=True, truncation=True) - docs = tokenizer(docs, return_tensors="pt", padding=True, truncation=True) - - scores, _, _, _ = model.score(dict(queries), dict(docs)) - return scores.cpu().numpy().tolist() - - -cat_score = partial(score_cat, Cat.from_pretrained(CROSS_ENCODER_CHECKPOINT)) -CatTransformer = DummyTransformer(cat_score) - -dot_score = partial(score_dot, Dot.from_pretrained(BI_ENCODER_CHECKPOINT)) -DotTransformer = DummyTransformer(dot_score) diff --git a/test/integration/test_patched.py b/test/integration/test_patched.py new file mode 100644 index 0000000..e69de29 diff --git a/test/integration/test_sae.py b/test/integration/test_sae.py new file mode 100644 index 0000000..e69de29 diff --git a/test/test_output_parity.py b/test/test_output_parity.py deleted file mode 100644 index 805da60..0000000 --- a/test/test_output_parity.py +++ /dev/null @@ -1,93 +0,0 @@ -from mechir import Dot, Cat -import pandas as pd -try: - from pyterrier_dr import ElectraScorer, HgfBiEncoder -except ImportError: - return - -test_dataframe = pd.DataFrame([ - { - 'qid': "1", - "query": "What is the capital of France?", - "docno": "100", - "text": "Paris is the capital of France." - }, - { - "qid": "1", - "query" : "What is the capital of France?", - "docno" : "101", - "text" : "The capital of China is Beijing." - }, - { - "qid" : "2", - "query" : "What is the capital of China?", - "docno" : "100", - "text" : "Paris is the capital of France." - }, - { - "qid" : "2", - "query" : "What is the capital of China?", - "docno" : "101", - "text" : "The capital of China is Beijing." - }, -]) - -CROSS_ENCODER_CHECKPOINT = "crystina-z/monoELECTRA_LCE_nneg31" -BI_ENCODER_CHECKPOINT = "sebastian-hofstaetter/distilbert-dot-tas_b-b256-msmarco" - - -def score_cat(model, df): - queries = df.query.to_list() - docs = df.text.to_list() - - tokenizer = model.tokenizer - - sequences = tokenizer(queries, docs, return_tensors="pt", padding=True, truncation=True) - - scores, _ = model.score(dict(sequences)) - return scores.cpu().numpy().tolist() - - -def score_dot(model, df): - queries = df.query.to_list() - docs = df.text.to_list() - - tokenizer = model.tokenizer - - queries = tokenizer(queries, return_tensors="pt", padding=True, truncation=True) - docs = tokenizer(docs, return_tensors="pt", padding=True, truncation=True) - - scores, _, _, _ = model.score(dict(queries), dict(docs)) - return scores.cpu().numpy().tolist() - - -def test_electra_equivelance(): - hgf_cat = ElectraScorer() - mechir_cat = Cat(CROSS_ENCODER_CHECKPOINT, softmax_output=True) - - hgf_scores = hgf_cat.transform(test_dataframe).score.to_list() - mechir_scores = score_cat(mechir_cat, test_dataframe) - query_id_doc_id_pairs = zip(test_dataframe.qid.to_list(), test_dataframe.docno.to_list()) - - # check they are close - for hgf, mechir, pair in zip(hgf_scores, mechir_scores, query_id_doc_id_pairs): - assert abs(hgf - mechir) < 0.01, f"Pair {pair} is not close, {hgf} != {mechir}" - - -def test_bi_equivelance(): - hgf_dot = HgfBiEncoder(BI_ENCODER_CHECKPOINT) - mechir_dot = Dot(BI_ENCODER_CHECKPOINT) - - hgf_scores = hgf_dot.transform(test_dataframe) - mechir_scores = score_dot(mechir_dot, test_dataframe) - query_id_doc_id_pairs = zip(test_dataframe.qid.to_list(), test_dataframe.docno.to_list()) - - # check they are close - for hgf, mechir, pair in zip(hgf_scores, mechir_scores, query_id_doc_id_pairs): - assert abs(hgf - mechir) < 0.01, f"Pair {pair} is not close, {hgf} != {mechir}" - - -test_electra_equivelance() -test_bi_equivelance() - -print("All tests passed!") From da819e991b922d30d12674d8dd9ef4cc0f51249f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 26 May 2025 11:57:30 +0100 Subject: [PATCH 16/21] many changes --- setup.py | 2 +- src/mechir/modelling/__init__.py | 13 ++ .../modelling/architectures/__init__.py | 12 ++ .../base.py} | 14 +- .../distilbert.py} | 10 +- .../electra.py} | 16 ++- src/mechir/modelling/cat.py | 46 +++++-- src/mechir/modelling/dot.py | 48 +++++-- .../{hooked_components.py => components.py} | 31 ++--- .../{HookedTransformerConfig.py => config.py} | 0 src/mechir/modelling/hooked/conversion.py | 129 ++++++------------ src/mechir/modelling/hooked/linear.py | 2 +- .../hooked/loading_from_pretrained.py | 2 +- src/mechir/modelling/t5.py | 8 +- test/acceptance/test_cat.py | 4 +- test/acceptance/test_dot.py | 4 +- test/acceptance/test_hookedencoder.py | 111 ++------------- 17 files changed, 199 insertions(+), 253 deletions(-) create mode 100644 src/mechir/modelling/architectures/__init__.py rename src/mechir/modelling/{hooked/HookedEncoder.py => architectures/base.py} (98%) rename src/mechir/modelling/{hooked/HookedDistilBert.py => architectures/distilbert.py} (95%) rename src/mechir/modelling/{hooked/HookedElectra.py => architectures/electra.py} (86%) rename src/mechir/modelling/hooked/{hooked_components.py => components.py} (63%) rename src/mechir/modelling/hooked/{HookedTransformerConfig.py => config.py} (100%) diff --git a/setup.py b/setup.py index 3e56a70..e8afc13 100644 --- a/setup.py +++ b/setup.py @@ -21,4 +21,4 @@ 'mechir': ['perturb/data/stopwords.txt'], }, python_requires='>=3.10', -) \ No newline at end of file +) diff --git a/src/mechir/modelling/__init__.py b/src/mechir/modelling/__init__.py index 1bc85eb..0cdaa7f 100644 --- a/src/mechir/modelling/__init__.py +++ b/src/mechir/modelling/__init__.py @@ -1,5 +1,18 @@ +from . import architectures as architectures +from . import hooked as hooked + from .patched import PatchedMixin as PatchedMixin from .sae import SAEMixin as SAEMixin from .cat import Cat as Cat from .dot import Dot as Dot from .t5 import MonoT5 as MonoT5 + +__all__ = [ + "architectures", + "hooked", + "PatchedMixin", + "SAEMixin", + "Cat", + "Dot", + "MonoT5", +] diff --git a/src/mechir/modelling/architectures/__init__.py b/src/mechir/modelling/architectures/__init__.py new file mode 100644 index 0000000..3c372fe --- /dev/null +++ b/src/mechir/modelling/architectures/__init__.py @@ -0,0 +1,12 @@ +from .base import HookedEncoder, HookedEncoderForSequenceClassification +from .distilbert import HookedDistilBert, HookedDistilBertForSequenceClassification +from .electra import HookedElectra, HookedElectraForSequenceClassification + +__all__ = [ + "HookedEncoder", + "HookedEncoderForSequenceClassification", + "HookedDistilBert", + "HookedDistilBertForSequenceClassification", + "HookedElectra", + "HookedElectraForSequenceClassification", +] diff --git a/src/mechir/modelling/hooked/HookedEncoder.py b/src/mechir/modelling/architectures/base.py similarity index 98% rename from src/mechir/modelling/hooked/HookedEncoder.py rename to src/mechir/modelling/architectures/base.py index dc2f66e..63d66cb 100644 --- a/src/mechir/modelling/hooked/HookedEncoder.py +++ b/src/mechir/modelling/architectures/base.py @@ -30,8 +30,8 @@ from transformer_lens.hook_points import HookedRootModule, HookPoint from transformer_lens.utilities import devices -from mechir.modelling.hooked.HookedTransformerConfig import HookedTransformerConfig -from mechir.modelling.hooked.hooked_components import BertEmbed +from mechir.modelling.hooked.config import HookedTransformerConfig +from mechir.modelling.hooked.components import BertEmbed from mechir.modelling.hooked.linear import ClassificationHead, MLPClassificationHead @@ -297,6 +297,7 @@ def run_with_cache( self, *model_args, return_cache_object: bool = True, + cache_as_dict: bool = False, remove_batch_dim: bool = False, **kwargs, ) -> Tuple[ @@ -310,12 +311,13 @@ def run_with_cache( *model_args, remove_batch_dim=remove_batch_dim, **kwargs ) if return_cache_object: - cache = ActivationCache( - cache_dict, self, has_batch_dim=not remove_batch_dim - ) + if not cache_as_dict: + cache = ActivationCache( + cache_dict, self, has_batch_dim=not remove_batch_dim + ) return out, cache else: - return out, cache_dict + return out, None def to( # type: ignore self, diff --git a/src/mechir/modelling/hooked/HookedDistilBert.py b/src/mechir/modelling/architectures/distilbert.py similarity index 95% rename from src/mechir/modelling/hooked/HookedDistilBert.py rename to src/mechir/modelling/architectures/distilbert.py index bc87362..2946b87 100644 --- a/src/mechir/modelling/hooked/HookedDistilBert.py +++ b/src/mechir/modelling/architectures/distilbert.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import Dict, List, Optional, Tuple, Union, cast, overload +from typing import Dict, Optional, overload import torch from einops import repeat @@ -15,11 +15,11 @@ from transformers import AutoTokenizer from typing_extensions import Literal -from transformer_lens import HookedTransformerConfig from transformer_lens.components import BertBlock, BertMLMHead, Unembed from transformer_lens.hook_points import HookPoint -from .hooked_components import BertEmbed -from .HookedEncoder import HookedEncoder +from mechir.modelling.hooked.components import BertEmbed +from mechir.modelling.architectures.base import HookedEncoder +from mechir.modelling.hooked.config import HookedTransformerConfig class HookedDistilBert(HookedEncoder): @@ -147,3 +147,5 @@ def forward( logits = self.classifier(resid[:, 0, :]) return logits + +__all__ = ["HookedDistilBert", "HookedDistilBertForSequenceClassification"] diff --git a/src/mechir/modelling/hooked/HookedElectra.py b/src/mechir/modelling/architectures/electra.py similarity index 86% rename from src/mechir/modelling/hooked/HookedElectra.py rename to src/mechir/modelling/architectures/electra.py index 11688dc..51ed8d7 100644 --- a/src/mechir/modelling/hooked/HookedElectra.py +++ b/src/mechir/modelling/architectures/electra.py @@ -1,6 +1,6 @@ -"""Hooked Encoder. +"""Hooked ELECTRA. -Contains a BERT style model. This is separate from :class:`transformer_lens.HookedTransformer` +Contains a ELECTRA style model. This is separate from :class:`transformer_lens.HookedTransformer` because it has a significantly different architecture to e.g. GPT style transformers. """ @@ -12,10 +12,10 @@ import torch from jaxtyping import Float, Int from torch import nn -from .HookedTransformerConfig import HookedTransformerConfig -from .HookedEncoder import HookedEncoder from transformer_lens.hook_points import HookPoint -from .linear import ClassificationHead, HiddenLinear +from mechir.modelling.hooked.linear import ClassificationHead, HiddenLinear +from mechir.modelling.architectures.base import HookedEncoder +from mechir.modelling.hooked.config import HookedTransformerConfig class ElectraClassificationHead(nn.Module): @@ -38,10 +38,11 @@ def forward(self, resid: Float[torch.Tensor, "batch d_model"]) -> torch.Tensor: post_act = self.hook_post(self.activation(pre_act)) return self.out_proj(post_act) +HookedElectra = HookedEncoder class HookedElectraForSequenceClassification(HookedEncoder): """ - This class implements a BERT-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. + This class implements a ELECTRA-style encoder using the components in ./components.py, with HookPoints on every interesting activation. It inherits from HookedRootModule. Like HookedTransformer, it can have a pretrained Transformer's weights loaded via `.from_pretrained`. There are a few features you might know from HookedTransformer which are not yet supported: @@ -87,3 +88,6 @@ def forward( if return_type is None: return None return logits + + +__all__ = ["HookedElectra", "HookedElectraForSequenceClassification"] diff --git a/src/mechir/modelling/cat.py b/src/mechir/modelling/cat.py index 18f1ad8..fe4aeaf 100644 --- a/src/mechir/modelling/cat.py +++ b/src/mechir/modelling/cat.py @@ -1,4 +1,4 @@ -from typing import Callable +from typing import Callable, Dict, Tuple, Union import logging import torch from jaxtyping import Float @@ -7,11 +7,11 @@ from transformer_lens.hook_points import HookedRootModule import transformer_lens.utils as utils import torch.nn.functional as F -from .patched import PatchedMixin -from .sae import SAEMixin -from .hooked.loading_from_pretrained import get_official_model_name -from ..util import linear_rank_function -from .hooked.HookedEncoder import HookedEncoderForSequenceClassification +from mechir.modelling.patched import PatchedMixin +from mechir.modelling.sae import SAEMixin +from mechir.modelling.hooked.loading_from_pretrained import get_official_model_name +from mechir.util import linear_rank_function +from mechir.modelling.architectures import HookedEncoderForSequenceClassification logger = logging.getLogger(__name__) @@ -158,11 +158,37 @@ def get_act_patch_attn_head_by_pos( results[index] = patching_metric(output, scores, scores_p).mean() return results - - def score(self, sequences: dict, cache=False): + + def run_with_cache( + self, + *model_args, + return_cache_object: bool = True, + cache_as_dict: bool = False, + remove_batch_dim: bool = False, + **kwargs, + ) -> Tuple[ + Float[torch.Tensor, "batch pos d_vocab"], + Union[ActivationCache, Dict[str, torch.Tensor]], + ]: + """ + Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. + """ + out, cache_dict = super().run_with_cache( + *model_args, remove_batch_dim=remove_batch_dim, **kwargs + ) + if return_cache_object: + if not cache_as_dict: + cache = ActivationCache( + cache_dict, self, has_batch_dim=not remove_batch_dim + ) + return out, cache + else: + return out, None + + def score(self, sequences: dict, cache=False, cache_as_dict=False): if cache: logits, cache = self.run_with_cache( - sequences["input_ids"], sequences["attention_mask"] + sequences["input_ids"], sequences["attention_mask"], cache_as_dict=cache_as_dict ) return logits, cache @@ -194,4 +220,4 @@ def patch( patched_output = self._patch_funcs[patch_type](**patching_kwargs) if self._return_cache: return patched_output, cache - return patched_output + return patched_output, None diff --git a/src/mechir/modelling/dot.py b/src/mechir/modelling/dot.py index 28f37d8..76b7275 100644 --- a/src/mechir/modelling/dot.py +++ b/src/mechir/modelling/dot.py @@ -1,4 +1,4 @@ -from typing import Callable +from typing import Callable, Dict, Tuple, Union import logging import os import torch @@ -7,11 +7,11 @@ from transformer_lens.ActivationCache import ActivationCache from transformer_lens.hook_points import HookedRootModule import transformer_lens.utils as utils -from .patched import PatchedMixin -from .sae import SAEMixin -from .hooked.HookedEncoder import HookedEncoder -from .hooked.loading_from_pretrained import get_official_model_name -from ..util import batched_dot_product, linear_rank_function +from mechir.modelling.patched import PatchedMixin +from mechir.modelling.sae import SAEMixin +from mechir.modelling.hooked.loading_from_pretrained import get_official_model_name +from mechir.util import batched_dot_product, linear_rank_function +from mechir.modelling.architectures import HookedEncoder logger = logging.getLogger(__name__) @@ -161,13 +161,39 @@ def get_act_patch_attn_head_by_pos( results[index] = patching_metric(output, scores, scores_p).mean() return results - - def score(self, queries: dict, documents: dict, reps_q=None, cache=False): + + def run_with_cache( + self, + *model_args, + return_cache_object: bool = True, + cache_as_dict: bool = False, + remove_batch_dim: bool = False, + **kwargs, + ) -> Tuple[ + Float[torch.Tensor, "batch pos d_vocab"], + Union[ActivationCache, Dict[str, torch.Tensor]], + ]: + """ + Wrapper around run_with_cache in HookedRootModule. If return_cache_object is True, this will return an ActivationCache object, with a bunch of useful HookedTransformer specific methods, otherwise it will return a dictionary of activations as in HookedRootModule. This function was copied directly from HookedTransformer. + """ + out, cache_dict = super().run_with_cache( + *model_args, remove_batch_dim=remove_batch_dim, **kwargs + ) + if return_cache_object: + if not cache_as_dict: + cache = ActivationCache( + cache_dict, self, has_batch_dim=not remove_batch_dim + ) + return out, cache + else: + return out, None + + def score(self, queries: dict, documents: dict, reps_q=None, cache=False, cache_as_dict=False): if reps_q is None: reps_q = self.forward(queries["input_ids"], queries["attention_mask"]) if cache: reps_d, cache_d = self.run_with_cache( - documents["input_ids"], documents["attention_mask"] + documents["input_ids"], documents["attention_mask"], cache_as_dict=cache_as_dict ) return batched_dot_product(reps_q, reps_d), reps_q, reps_d, cache_d reps_d = self.forward(documents["input_ids"], documents["attention_mask"]) @@ -185,7 +211,7 @@ def patch( assert ( patch_type in self._patch_funcs ), f"Patch type {patch_type} not recognized. Choose from {self._patch_funcs.keys()}" - scores, reps_q, _ = self.score(queries, documents) + scores, reps_q, _, _ = self.score(queries, documents) scores_p, _, _, cache_d = self.score( queries, documents_p, cache=True, reps_q=reps_q ) @@ -203,4 +229,4 @@ def patch( patched_output = self._patch_funcs[patch_type](**patching_kwargs) if self._return_cache: return patched_output, cache_d - return patched_output # PatchingOutput(output, scores, scores_p) + return patched_output, None # PatchingOutput(output, scores, scores_p) diff --git a/src/mechir/modelling/hooked/hooked_components.py b/src/mechir/modelling/hooked/components.py similarity index 63% rename from src/mechir/modelling/hooked/hooked_components.py rename to src/mechir/modelling/hooked/components.py index ed90c54..f9c35a6 100644 --- a/src/mechir/modelling/hooked/hooked_components.py +++ b/src/mechir/modelling/hooked/components.py @@ -10,7 +10,7 @@ from transformer_lens.components import Embed, LayerNorm, PosEmbed, TokenTypeEmbed from transformer_lens.hook_points import HookPoint -from transformer_lens.HookedTransformerConfig import HookedTransformerConfig +from mechir.modelling.hooked.config import HookedTransformerConfig class BertEmbed(nn.Module): @@ -23,16 +23,14 @@ def __init__(self, cfg: Union[Dict, HookedTransformerConfig]): self.cfg = HookedTransformerConfig.unwrap(cfg) self.embed = Embed(self.cfg) self.pos_embed = PosEmbed(self.cfg) - self.token_type_embed = ( - TokenTypeEmbed(self.cfg) if self.cfg.use_token_type_ids else nn.Identity() - ) + self.token_type_embed = TokenTypeEmbed(self.cfg) self.ln = LayerNorm(self.cfg) self.hook_embed = HookPoint() self.hook_pos_embed = HookPoint() - self.hook_token_type_embed = ( - HookPoint() if self.cfg.use_token_type_ids else nn.Identity() - ) + self.hook_token_type_embed = HookPoint() + self.use_token_type_ids = self.cfg.use_token_type_ids + def forward( self, @@ -40,19 +38,16 @@ def forward( token_type_ids: Optional[Int[torch.Tensor, "batch pos"]] = None, ) -> Float[torch.Tensor, "batch pos d_model"]: base_index_id = torch.arange(input_ids.shape[1], device=input_ids.device) - index_ids = einops.repeat( - base_index_id, "pos -> batch pos", batch=input_ids.shape[0] - ) + index_ids = einops.repeat(base_index_id, "pos -> batch pos", batch=input_ids.shape[0]) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) word_embeddings_out = self.hook_embed(self.embed(input_ids)) position_embeddings_out = self.hook_pos_embed(self.pos_embed(index_ids)) - token_type_embeddings_out = ( - self.hook_token_type_embed(self.token_type_embed(token_type_ids)) - if self.cfg.use_token_type_ids - else torch.zeros_like(word_embeddings_out) - ) - embeddings_out = ( - word_embeddings_out + position_embeddings_out + token_type_embeddings_out - ) + token_type_embeddings_out = self.hook_token_type_embed( + self.token_type_embed(token_type_ids) + ) if self.use_token_type_ids else torch.zeros_like(word_embeddings_out) + + embeddings_out = word_embeddings_out + position_embeddings_out + token_type_embeddings_out layer_norm_out = self.ln(embeddings_out) return layer_norm_out diff --git a/src/mechir/modelling/hooked/HookedTransformerConfig.py b/src/mechir/modelling/hooked/config.py similarity index 100% rename from src/mechir/modelling/hooked/HookedTransformerConfig.py rename to src/mechir/modelling/hooked/config.py diff --git a/src/mechir/modelling/hooked/conversion.py b/src/mechir/modelling/hooked/conversion.py index cea0b6f..d7fb5e5 100644 --- a/src/mechir/modelling/hooked/conversion.py +++ b/src/mechir/modelling/hooked/conversion.py @@ -3,13 +3,14 @@ from mechir.modelling.hooked.loading_from_pretrained import ( register_with_transformer_lens, ) -from mechir.modelling.hooked.HookedTransformerConfig import HookedTransformerConfig +from mechir.modelling.hooked.config import HookedTransformerConfig +from mechir.modelling.hooked.loading_from_pretrained import REGISTERED_ARCHITECTURES, REGISTERED_CONVERSIONS def convert_distilbert_weights( distilbert, cfg: HookedTransformerConfig, sequence_classification=False, raw=False ): - embeddings = distilbert.embeddings + embeddings = distilbert.embeddings if not raw else distilbert.embeddings state_dict = { "embed.embed.W_E": embeddings.word_embeddings.weight, "embed.pos_embed.W_pos": embeddings.position_embeddings.weight, @@ -19,7 +20,7 @@ def convert_distilbert_weights( } for l in range(cfg.n_layers): - block = distilbert.transformer.layer[l] + block = distilbert.transformer.layer[l] if not raw else distilbert.transformer.layer[l] state_dict[f"blocks.{l}.attn.W_Q"] = einops.rearrange( block.attention.q_lin.weight, "(i h) m -> i m h", i=cfg.n_heads ) @@ -87,7 +88,11 @@ def convert_bert_based_weights( raw=False, model_name: str = "bert", ): - embeddings = getattr(bert, model_name).embeddings if not raw else bert.embeddings + if not hasattr(bert, "embeddings"): + bert = getattr(bert, model_name) + embeddings = bert.embeddings + else: + embeddings = bert.embeddings state_dict = { "embed.embed.W_E": embeddings.word_embeddings.weight, "embed.pos_embed.W_pos": embeddings.position_embeddings.weight, @@ -96,9 +101,14 @@ def convert_bert_based_weights( "embed.ln.b": embeddings.LayerNorm.bias, } + if not hasattr(bert, "encoder"): + encoder = getattr(bert, model_name).encoder + else: + encoder = bert.encoder + for l in range(cfg.n_layers): block = ( - getattr(bert, model_name).encoder.layer[l] + encoder.layer[l] if not raw else bert.encoder.layer[l] ) @@ -153,83 +163,14 @@ def convert_bert_based_weights( state_dict["classifier.b"] = classification_head.bias else: if "electra" not in model_name: - mlm_head = bert.cls.predictions - state_dict["mlm_head.W"] = mlm_head.transform.dense.weight - state_dict["mlm_head.b"] = mlm_head.transform.dense.bias - state_dict["mlm_head.ln.w"] = mlm_head.transform.LayerNorm.weight - state_dict["mlm_head.ln.b"] = mlm_head.transform.LayerNorm.bias - # "unembed.W_U": mlm_head.decoder.weight.T, - state_dict["unembed.b_U"] = mlm_head.bias - # Note: BERT uses tied embeddings - state_dict["unembed.W_U"] = embeddings.word_embeddings.weight.T - - return state_dict - - -def convert_bert_weights( - bert, cfg: HookedTransformerConfig, sequence_classification=False, raw=False -): - print(dir(bert)) - embeddings = bert.bert.embeddings if not raw else bert.embeddings - state_dict = { - "embed.embed.W_E": embeddings.word_embeddings.weight, - "embed.pos_embed.W_pos": embeddings.position_embeddings.weight, - "embed.token_type_embed.W_token_type": embeddings.token_type_embeddings.weight, - "embed.ln.w": embeddings.LayerNorm.weight, - "embed.ln.b": embeddings.LayerNorm.bias, - } - - for l in range(cfg.n_layers): - block = bert.bert.encoder.layer[l] if not raw else bert.encoder.layer[l] - state_dict[f"blocks.{l}.attn.W_Q"] = einops.rearrange( - block.attention.self.query.weight, "(i h) m -> i m h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.b_Q"] = einops.rearrange( - block.attention.self.query.bias, "(i h) -> i h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.W_K"] = einops.rearrange( - block.attention.self.key.weight, "(i h) m -> i m h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.b_K"] = einops.rearrange( - block.attention.self.key.bias, "(i h) -> i h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.W_V"] = einops.rearrange( - block.attention.self.value.weight, "(i h) m -> i m h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.b_V"] = einops.rearrange( - block.attention.self.value.bias, "(i h) -> i h", i=cfg.n_heads - ) - state_dict[f"blocks.{l}.attn.W_O"] = einops.rearrange( - block.attention.output.dense.weight, - "m (i h) -> i h m", - i=cfg.n_heads, - ) - state_dict[f"blocks.{l}.attn.b_O"] = block.attention.output.dense.bias - state_dict[f"blocks.{l}.ln1.w"] = block.attention.output.LayerNorm.weight - state_dict[f"blocks.{l}.ln1.b"] = block.attention.output.LayerNorm.bias - state_dict[f"blocks.{l}.mlp.W_in"] = einops.rearrange( - block.intermediate.dense.weight, "mlp model -> model mlp" - ) - state_dict[f"blocks.{l}.mlp.b_in"] = block.intermediate.dense.bias - state_dict[f"blocks.{l}.mlp.W_out"] = einops.rearrange( - block.output.dense.weight, "model mlp -> mlp model" - ) - state_dict[f"blocks.{l}.mlp.b_out"] = block.output.dense.bias - state_dict[f"blocks.{l}.ln2.w"] = block.output.LayerNorm.weight - state_dict[f"blocks.{l}.ln2.b"] = block.output.LayerNorm.bias - if not raw: - if sequence_classification: - classification_head = bert.classifier - state_dict["classifier.W"] = classification_head.weight - state_dict["classifier.b"] = classification_head.bias - else: - mlm_head = bert.cls.predictions - state_dict["mlm_head.W"] = mlm_head.transform.dense.weight - state_dict["mlm_head.b"] = mlm_head.transform.dense.bias - state_dict["mlm_head.ln.w"] = mlm_head.transform.LayerNorm.weight - state_dict["mlm_head.ln.b"] = mlm_head.transform.LayerNorm.bias - # "unembed.W_U": mlm_head.decoder.weight.T, - state_dict["unembed.b_U"] = mlm_head.bias + if hasattr(bert, "cls"): + mlm_head = bert.cls.predictions + state_dict["mlm_head.W"] = mlm_head.transform.dense.weight + state_dict["mlm_head.b"] = mlm_head.transform.dense.bias + state_dict["mlm_head.ln.w"] = mlm_head.transform.LayerNorm.weight + state_dict["mlm_head.ln.b"] = mlm_head.transform.LayerNorm.bias + # "unembed.W_U": mlm_head.decoder.weight.T, + state_dict["unembed.b_U"] = mlm_head.bias # Note: BERT uses tied embeddings state_dict["unembed.W_U"] = embeddings.word_embeddings.weight.T @@ -237,18 +178,28 @@ def convert_bert_weights( register_with_transformer_lens( - partial(convert_bert_weights, raw=True), - ["BertModel", "BertForMaskedLM"], + partial(convert_bert_based_weights, model_name='encoder', raw=True), + ["BertModel"], + function_type="conversion", +) +register_with_transformer_lens( + partial(convert_bert_based_weights, model_name='bert', raw=False), + ["BERTForPreTraining", "BertForMaskedLM"], function_type="conversion", ) register_with_transformer_lens( - partial(convert_bert_weights, sequence_classification=True), + partial(convert_bert_based_weights, sequence_classification=True), "BertForSequenceClassification", function_type="conversion", ) +register_with_transformer_lens( + partial(convert_bert_based_weights, model_name="roberta", raw=False), + ["RobertaForMaskedLM", "RobertaForPreTraining"], + function_type="conversion", +) register_with_transformer_lens( partial(convert_bert_based_weights, model_name="roberta", raw=True), - ["RobertaModel", "RobertaForMaskedLM"], + ["RobertaModel"], function_type="conversion", ) register_with_transformer_lens( @@ -258,6 +209,11 @@ def convert_bert_weights( "RobertaForSequenceClassification", function_type="conversion", ) +register_with_transformer_lens( + partial(convert_bert_based_weights, model_name="electra", raw=False), + ["ElectraModelForPreTraining"], + function_type="conversion", +) register_with_transformer_lens( partial(convert_bert_based_weights, model_name="electra", raw=True), ["ElectraModel"], @@ -270,3 +226,4 @@ def convert_bert_weights( "ElectraForSequenceClassification", function_type="conversion", ) + diff --git a/src/mechir/modelling/hooked/linear.py b/src/mechir/modelling/hooked/linear.py index 1da6e45..cabf611 100644 --- a/src/mechir/modelling/hooked/linear.py +++ b/src/mechir/modelling/hooked/linear.py @@ -12,7 +12,7 @@ from transformer_lens.factories.activation_function_factory import ( ActivationFunctionFactory, ) -from mechir.modelling.hooked.HookedTransformerConfig import HookedTransformerConfig +from mechir.modelling.hooked.config import HookedTransformerConfig class ClassificationHead(nn.Module): diff --git a/src/mechir/modelling/hooked/loading_from_pretrained.py b/src/mechir/modelling/hooked/loading_from_pretrained.py index 7212229..a58bbac 100644 --- a/src/mechir/modelling/hooked/loading_from_pretrained.py +++ b/src/mechir/modelling/hooked/loading_from_pretrained.py @@ -22,7 +22,7 @@ ) import transformer_lens.utils as utils -from mechir.modelling.hooked.HookedTransformerConfig import HookedTransformerConfig +from mechir.modelling.hooked.config import HookedTransformerConfig from mechir import config from transformer_lens.pretrained.weight_conversions import ( convert_bloom_weights, diff --git a/src/mechir/modelling/t5.py b/src/mechir/modelling/t5.py index be165e4..76ed518 100644 --- a/src/mechir/modelling/t5.py +++ b/src/mechir/modelling/t5.py @@ -7,10 +7,10 @@ from transformer_lens.ActivationCache import ActivationCache from transformer_lens.hook_points import HookedRootModule import transformer_lens.utils as utils -from .patched import PatchedMixin -from .sae import SAEMixin -from ..util import linear_rank_function -from .hooked.loading_from_pretrained import get_official_model_name +from mechir.modelling.patched import PatchedMixin +from mechir.modelling.sae import SAEMixin +from mechir.modelling.hooked.loading_from_pretrained import get_official_model_name +from mechir.util import linear_rank_function logger = logging.getLogger(__name__) diff --git a/test/acceptance/test_cat.py b/test/acceptance/test_cat.py index a4fbe4a..6f4861c 100644 --- a/test/acceptance/test_cat.py +++ b/test/acceptance/test_cat.py @@ -39,12 +39,12 @@ def test_patch_methods_shapes(cat_model, tokenizer, patch_type): # call patch if patch_type == "head_by_pos": layer_head_list = [(0, 0)] - out = cat_model.patch( + out, _ = cat_model.patch( seqs, seqs_p, patch_type=patch_type, layer_head_list=layer_head_list ) assert out.shape[0] == 2 # components else: - out = cat_model.patch(seqs, seqs_p, patch_type=patch_type) + out, _ = cat_model.patch(seqs, seqs_p, patch_type=patch_type) assert out.ndim >= 2 diff --git a/test/acceptance/test_dot.py b/test/acceptance/test_dot.py index 08ba980..dcf93bb 100644 --- a/test/acceptance/test_dot.py +++ b/test/acceptance/test_dot.py @@ -48,10 +48,10 @@ def test_patch_methods_shapes(dot_model, tokenizer, patch_type): d = q d_p = q if patch_type == "head_by_pos": - out = dot_model.patch( + out, _ = dot_model.patch( q, d, d_p, patch_type=patch_type, layer_head_list=[(0, 0)] ) assert out.shape[0] == 2 else: - out = dot_model.patch(q, d, d_p, patch_type=patch_type) + out, _ = dot_model.patch(q, d, d_p, patch_type=patch_type) assert out.dim() >= 2 diff --git a/test/acceptance/test_hookedencoder.py b/test/acceptance/test_hookedencoder.py index 65a2cc6..4b1b90b 100644 --- a/test/acceptance/test_hookedencoder.py +++ b/test/acceptance/test_hookedencoder.py @@ -5,9 +5,9 @@ import torch.nn.functional as F from jaxtyping import Float from torch.testing import assert_close -from transformers import AutoTokenizer, AutoModel +from transformers import AutoTokenizer, AutoModel, BertForPreTraining -from mechir.modelling.hooked.HookedEncoder import HookedEncoder +from mechir.modelling.architectures import HookedEncoder MODEL_NAME = "bert-base-cased" @@ -19,12 +19,12 @@ def get_embeddings(model): @pytest.fixture(scope="module") def our_bert(): - return HookedEncoder.from_pretrained(MODEL_NAME, device="cpu") + return HookedEncoder.from_pretrained(MODEL_NAME, device="cpu", hf_model=BertForPreTraining.from_pretrained(MODEL_NAME)) @pytest.fixture(scope="module") def huggingface_bert(): - return AutoModel.from_pretrained(MODEL_NAME) + return BertForPreTraining.from_pretrained(MODEL_NAME) @pytest.fixture(scope="module") @@ -47,9 +47,9 @@ def test_full_model(our_bert, huggingface_bert, tokenizer): attention_mask = tokenized["attention_mask"] huggingface_bert_logits = huggingface_bert( - input_ids, attention_mask=attention_mask - ).logits - our_bert_logits = our_bert(input_ids, one_zero_attention_mask=attention_mask) + input_ids, attention_mask=attention_mask, output_hidden_states=True + ).hidden_states[-1] + our_bert_logits = our_bert(input_ids, attention_mask=attention_mask) assert_close(huggingface_bert_logits, our_bert_logits, rtol=1.3e-6, atol=4e-5) @@ -80,7 +80,7 @@ def test_embed_two_predictions(our_bert, huggingface_bert, tokenizer): def test_attention(our_bert, huggingface_bert, tokens): huggingface_embed = get_embeddings(huggingface_bert) - huggingface_attn = huggingface_bert.encoder.layer[0].attention + huggingface_attn = huggingface_bert.bert.encoder.layer[0].attention embed_out = huggingface_embed(tokens) @@ -94,7 +94,7 @@ def test_attention(our_bert, huggingface_bert, tokens): def test_bert_block(our_bert, huggingface_bert, tokens): huggingface_embed = get_embeddings(huggingface_bert) - huggingface_block = huggingface_bert.encoder.layer[0] + huggingface_block = huggingface_bert.bert.encoder.layer[0] embed_out = huggingface_embed(tokens) @@ -105,50 +105,6 @@ def test_bert_block(our_bert, huggingface_bert, tokens): assert_close(our_block_out, huggingface_block_out) -def test_bert_pooler(our_bert, huggingface_bert, tokens): - huggingface_embed_out = get_embeddings(huggingface_bert)(tokens) - huggingface_encoder_out = huggingface_bert.encoder(huggingface_embed_out) - cls_token_representation = huggingface_encoder_out[0] - - our_pooler_out = our_bert.pooler(cls_token_representation) - huggingface_pooler_out = huggingface_bert.pooler(cls_token_representation) - assert_close(our_pooler_out, huggingface_pooler_out) - - -def test_nsp_head(our_bert, huggingface_bert, tokens): - huggingface_bert_pooler_output = huggingface_bert(tokens).pooler_output - our_nsp_head_out = our_bert.nsp_head(huggingface_bert_pooler_output) - huggingface_nsp_head_out = huggingface_bert.cls.seq_relationship( - huggingface_bert_pooler_output - ) - - assert_close(our_nsp_head_out, huggingface_nsp_head_out) - - -def test_mlm_head(our_bert, huggingface_bert, tokens): - huggingface_bert_core_outputs = huggingface_bert(tokens).last_hidden_state - - our_mlm_head_out = our_bert.mlm_head(huggingface_bert_core_outputs) - huggingface_predictions_out = huggingface_bert.cls.predictions.transform( - huggingface_bert_core_outputs - ) - - print((our_mlm_head_out - huggingface_predictions_out).abs().max()) - assert_close(our_mlm_head_out, huggingface_predictions_out, rtol=1.3e-3, atol=1e-5) - - -def test_unembed(our_bert, huggingface_bert, tokens): - huggingface_bert_core_outputs = huggingface_bert(tokens).last_hidden_state - - our_mlm_head_out = our_bert.mlm_head(huggingface_bert_core_outputs) - our_unembed_out = our_bert.unembed(our_mlm_head_out) - huggingface_predictions_out = huggingface_bert.cls.predictions( - huggingface_bert_core_outputs - ) - - assert_close(our_unembed_out, huggingface_predictions_out, rtol=1.3e-6, atol=4e-5) - - def test_run_with_cache(our_bert, tokens): _, cache = our_bert.run_with_cache(tokens) @@ -157,7 +113,6 @@ def test_run_with_cache(our_bert, tokens): assert "blocks.0.attn.hook_q" in cache assert "blocks.3.attn.hook_attn_scores" in cache assert "blocks.7.hook_resid_post" in cache - assert "mlm_head.ln.hook_normalized" in cache def test_from_pretrained_revision(): @@ -182,58 +137,12 @@ def test_from_pretrained_revision(): @pytest.mark.parametrize("dtype", [torch.bfloat16, torch.float16]) def test_half_precision(dtype): """Check the 16 bits loading and inferences.""" - model = HookedEncoder.from_pretrained(MODEL_NAME, torch_dtype=dtype) + model = HookedEncoder.from_pretrained(MODEL_NAME, torch_dtype=dtype, hf_model=BertForPreTraining.from_pretrained(MODEL_NAME)) assert model.W_K.dtype == dtype _ = model(model.tokenizer("Hello, world", return_tensors="pt")["input_ids"]) -def _get_predictions( - logits: Float[torch.Tensor, "batch pos d_vocab"], positions: List[int], tokenizer -): - logits_at_position = logits.squeeze(0)[positions] - predicted_tokens = F.softmax(logits_at_position, dim=-1).argmax(dim=-1) - return tokenizer.batch_decode(predicted_tokens) - - -def test_predictions_mlm(our_bert, huggingface_bert, tokenizer): - input_ids = tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] - - our_bert_logits = our_bert(input_ids) - our_prediction = _get_predictions(our_bert_logits, [2], tokenizer) - - huggingface_bert_out = huggingface_bert(input_ids).logits - huggingface_prediction = _get_predictions(huggingface_bert_out, [2], tokenizer) - - assert our_prediction == huggingface_prediction - - -def test_predictions_from_forward_function_mlm(our_bert, huggingface_bert, tokenizer): - input_ids = tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] - our_prediction = our_bert(input_ids, return_type="predictions") - - huggingface_bert_out = huggingface_bert(input_ids).prediction_logits - huggingface_prediction = _get_predictions(huggingface_bert_out, [2], tokenizer)[ - 0 - ] # prediction is returned as a list - - assert our_prediction == huggingface_prediction - - -def test_input_list_of_strings_mlm(our_bert, huggingface_bert, tokenizer): - prompts = [ - "The [MASK] sat on the mat", - "She [MASK] to the store", - "The dog [MASK] the ball", - ] - encodings = tokenizer(prompts, return_tensors="pt", truncation=True, padding=True) - our_bert_logits = our_bert(prompts) - - huggingface_bert_logits = huggingface_bert(**encodings).logits - - assert_close(our_bert_logits, huggingface_bert_logits, rtol=1.3e-6, atol=4e-5) - - @pytest.mark.skipif(not torch.cuda.is_available(), reason="Requires a CUDA device") def test_cuda(mlm_tokens): model = HookedEncoder.from_pretrained(MODEL_NAME) From 33dc9d675a4176304e8db7b9d331c38ac1c0eca5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 26 May 2025 12:17:15 +0100 Subject: [PATCH 17/21] nearly there --- src/mechir/__init__.py | 2 +- .../modelling/architectures/distilbert.py | 7 +- src/mechir/modelling/hooked/conversion.py | 11 +- test/acceptance/test_hookeddistilbert.py | 115 ++++++++++++++++++ test/acceptance/test_hookedelectra.py | 115 ++++++++++++++++++ 5 files changed, 243 insertions(+), 7 deletions(-) create mode 100644 test/acceptance/test_hookeddistilbert.py create mode 100644 test/acceptance/test_hookedelectra.py diff --git a/src/mechir/__init__.py b/src/mechir/__init__.py index 78eabc1..5701d0f 100644 --- a/src/mechir/__init__.py +++ b/src/mechir/__init__.py @@ -6,7 +6,7 @@ class MechirConfig: _instance = None _config = { - "ignore-official": False, # default value + "ignore-official": True, # default value # Add other default config options here } diff --git a/src/mechir/modelling/architectures/distilbert.py b/src/mechir/modelling/architectures/distilbert.py index 2946b87..00e865a 100644 --- a/src/mechir/modelling/architectures/distilbert.py +++ b/src/mechir/modelling/architectures/distilbert.py @@ -81,7 +81,8 @@ class HookedDistilBertForSequenceClassification(HookedDistilBert): def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): super().__init__(cfg, tokenizer, move_to_device=move_to_device, **kwargs) - self.classifier = nn.Linear(cfg.d_model, cfg.n_labels) + self.mlp = nn.Linear(cfg.d_model, cfg.d_model) + self.out_proj = nn.Linear(cfg.d_model, cfg.n_labels) self.setup() @overload @@ -145,7 +146,9 @@ def forward( if return_type is None: return - logits = self.classifier(resid[:, 0, :]) + logits = self.mlp(resid[:, 0, :]) + logits = self.out_proj(logits) return logits + __all__ = ["HookedDistilBert", "HookedDistilBertForSequenceClassification"] diff --git a/src/mechir/modelling/hooked/conversion.py b/src/mechir/modelling/hooked/conversion.py index d7fb5e5..3d27147 100644 --- a/src/mechir/modelling/hooked/conversion.py +++ b/src/mechir/modelling/hooked/conversion.py @@ -62,9 +62,12 @@ def convert_distilbert_weights( if not raw: if sequence_classification: - classification_head = distilbert.pre_classifier - state_dict["classifier.W"] = classification_head.weight - state_dict["classifier.b"] = classification_head.bias + pre_classification_head = distilbert.pre_classifier + classification_head = distilbert.classifier + state_dict["classifier.mlp.W"] = classification_head.weight + state_dict["classifier.mlp.b"] = classification_head.bias + state_dict["classifier.out_proj.W"] = pre_classification_head.weight + state_dict["classifier.out_proj.b"] = pre_classification_head.bias return state_dict @@ -211,7 +214,7 @@ def convert_bert_based_weights( ) register_with_transformer_lens( partial(convert_bert_based_weights, model_name="electra", raw=False), - ["ElectraModelForPreTraining"], + "ElectraForPreTraining", function_type="conversion", ) register_with_transformer_lens( diff --git a/test/acceptance/test_hookeddistilbert.py b/test/acceptance/test_hookeddistilbert.py new file mode 100644 index 0000000..59ace19 --- /dev/null +++ b/test/acceptance/test_hookeddistilbert.py @@ -0,0 +1,115 @@ +from typing import List + +import pytest +import torch +import torch.nn.functional as F +from jaxtyping import Float +from torch.testing import assert_close +from transformers import AutoTokenizer, AutoModel, DistilBertModel + +from mechir.modelling.architectures import HookedDistilBert + +MODEL_NAME = "distilbert-base-uncased-finetuned-sst-2-english" + +def get_embeddings(model): + try: + return model.distilbert.embeddings + except AttributeError: + return model.embeddings + +@pytest.fixture(scope="module") +def our_distilbert(): + return HookedDistilBert.from_pretrained(MODEL_NAME, device="cpu", hf_model=DistilBertModel.from_pretrained(MODEL_NAME)) + + +@pytest.fixture(scope="module") +def huggingface_distilbert(): + return DistilBertModel.from_pretrained(MODEL_NAME) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained(MODEL_NAME) + + +@pytest.fixture +def tokens(tokenizer): + return tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] + + +def test_full_model(our_distilbert, huggingface_distilbert, tokenizer): + sequences = [ + "Hello, my [MASK] is distilbert.", + "I went to the [MASK] to buy some groceries.", + ] + tokenized = tokenizer(sequences, return_tensors="pt", padding=True) + input_ids = tokenized["input_ids"] + attention_mask = tokenized["attention_mask"] + + huggingface_distilbert_logits = huggingface_distilbert( + input_ids, attention_mask=attention_mask, output_hidden_states=True + ).hidden_states[-1] + our_distilbert_logits = our_distilbert(input_ids, attention_mask=attention_mask) + assert_close(huggingface_distilbert_logits, our_distilbert_logits, rtol=1.3e-6, atol=4e-5) + + +def test_embed_one_prediction(our_distilbert, huggingface_distilbert, tokens): + huggingface_embed = get_embeddings(huggingface_distilbert) + our_embed = our_distilbert.embed + + huggingface_embed_out = huggingface_embed(tokens)[0] + our_embed_out = our_embed(tokens).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_embed_two_predictions(our_distilbert, huggingface_distilbert, tokenizer): + encoding = tokenizer( + "Hello, my [MASK] is distilbert.", + "I went to the [MASK] to buy some groceries.", + return_tensors="pt", + ) + input_ids = encoding["input_ids"] + token_type_ids = encoding["token_type_ids"] + + huggingface_embed_out = get_embeddings(huggingface_distilbert)( + input_ids, token_type_ids=token_type_ids + )[0] + our_embed_out = our_distilbert.embed(input_ids, token_type_ids=token_type_ids).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_attention(our_distilbert, huggingface_distilbert, tokens): + huggingface_embed = get_embeddings(huggingface_distilbert) + huggingface_attn = huggingface_distilbert.encoder.layer[0].attention + + embed_out = huggingface_embed(tokens) + + our_attn = our_distilbert.blocks[0].attn + + our_attn_out = our_attn(embed_out, embed_out, embed_out) + huggingface_self_attn_out = huggingface_attn.self(embed_out)[0] + huggingface_attn_out = huggingface_attn.output.dense(huggingface_self_attn_out) + assert_close(our_attn_out, huggingface_attn_out) + + +def test_distilbert_block(our_distilbert, huggingface_distilbert, tokens): + huggingface_embed = get_embeddings(huggingface_distilbert) + huggingface_block = huggingface_distilbert.encoder.layer[0] + + embed_out = huggingface_embed(tokens) + + our_block = our_distilbert.blocks[0] + + our_block_out = our_block(embed_out) + huggingface_block_out = huggingface_block(embed_out)[0] + assert_close(our_block_out, huggingface_block_out) + + +def test_run_with_cache(our_distilbert, tokens): + _, cache = our_distilbert.run_with_cache(tokens) + + # check that an arbitrary subset of the keys exist + assert "embed.hook_embed" in cache + assert "blocks.0.attn.hook_q" in cache + assert "blocks.3.attn.hook_attn_scores" in cache + assert "blocks.7.hook_resid_post" in cache diff --git a/test/acceptance/test_hookedelectra.py b/test/acceptance/test_hookedelectra.py new file mode 100644 index 0000000..94d2e75 --- /dev/null +++ b/test/acceptance/test_hookedelectra.py @@ -0,0 +1,115 @@ +from typing import List + +import pytest +import torch +import torch.nn.functional as F +from jaxtyping import Float +from torch.testing import assert_close +from transformers import AutoTokenizer, AutoModel, ElectraModel + +from mechir.modelling.architectures import HookedEncoder + +MODEL_NAME = "google/electra-base-discriminator" + +def get_embeddings(model): + try: + return model.electra.embeddings + except AttributeError: + return model.embeddings + +@pytest.fixture(scope="module") +def our_electra(): + return HookedEncoder.from_pretrained(MODEL_NAME, device="cpu", hf_model=ElectraModel.from_pretrained(MODEL_NAME)) + + +@pytest.fixture(scope="module") +def huggingface_electra(): + return ElectraModel.from_pretrained(MODEL_NAME) + + +@pytest.fixture(scope="module") +def tokenizer(): + return AutoTokenizer.from_pretrained(MODEL_NAME) + + +@pytest.fixture +def tokens(tokenizer): + return tokenizer("The [MASK] sat on the mat", return_tensors="pt")["input_ids"] + + +def test_full_model(our_electra, huggingface_electra, tokenizer): + sequences = [ + "Hello, my [MASK] is electra.", + "I went to the [MASK] to buy some groceries.", + ] + tokenized = tokenizer(sequences, return_tensors="pt", padding=True) + input_ids = tokenized["input_ids"] + attention_mask = tokenized["attention_mask"] + + huggingface_electra_logits = huggingface_electra( + input_ids, attention_mask=attention_mask, output_hidden_states=True + ).hidden_states[-1] + our_electra_logits = our_electra(input_ids, attention_mask=attention_mask) + assert_close(huggingface_electra_logits, our_electra_logits, rtol=1.3e-6, atol=4e-5) + + +def test_embed_one_prediction(our_electra, huggingface_electra, tokens): + huggingface_embed = get_embeddings(huggingface_electra) + our_embed = our_electra.embed + + huggingface_embed_out = huggingface_embed(tokens)[0] + our_embed_out = our_embed(tokens).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_embed_two_predictions(our_electra, huggingface_electra, tokenizer): + encoding = tokenizer( + "Hello, my [MASK] is electra.", + "I went to the [MASK] to buy some groceries.", + return_tensors="pt", + ) + input_ids = encoding["input_ids"] + token_type_ids = encoding["token_type_ids"] + + huggingface_embed_out = get_embeddings(huggingface_electra)( + input_ids, token_type_ids=token_type_ids + )[0] + our_embed_out = our_electra.embed(input_ids, token_type_ids=token_type_ids).squeeze(0) + assert_close(huggingface_embed_out, our_embed_out) + + +def test_attention(our_electra, huggingface_electra, tokens): + huggingface_embed = get_embeddings(huggingface_electra) + huggingface_attn = huggingface_electra.encoder.layer[0].attention + + embed_out = huggingface_embed(tokens) + + our_attn = our_electra.blocks[0].attn + + our_attn_out = our_attn(embed_out, embed_out, embed_out) + huggingface_self_attn_out = huggingface_attn.self(embed_out)[0] + huggingface_attn_out = huggingface_attn.output.dense(huggingface_self_attn_out) + assert_close(our_attn_out, huggingface_attn_out) + + +def test_electra_block(our_electra, huggingface_electra, tokens): + huggingface_embed = get_embeddings(huggingface_electra) + huggingface_block = huggingface_electra.encoder.layer[0] + + embed_out = huggingface_embed(tokens) + + our_block = our_electra.blocks[0] + + our_block_out = our_block(embed_out) + huggingface_block_out = huggingface_block(embed_out)[0] + assert_close(our_block_out, huggingface_block_out) + + +def test_run_with_cache(our_electra, tokens): + _, cache = our_electra.run_with_cache(tokens) + + # check that an arbitrary subset of the keys exist + assert "embed.hook_embed" in cache + assert "blocks.0.attn.hook_q" in cache + assert "blocks.3.attn.hook_attn_scores" in cache + assert "blocks.7.hook_resid_post" in cache From 8bde7919f37eaf61fbd23d04842bdf53c29b3d2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 26 May 2025 12:20:07 +0100 Subject: [PATCH 18/21] hmm --- src/mechir/modelling/hooked/conversion.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/mechir/modelling/hooked/conversion.py b/src/mechir/modelling/hooked/conversion.py index 3d27147..8aa9207 100644 --- a/src/mechir/modelling/hooked/conversion.py +++ b/src/mechir/modelling/hooked/conversion.py @@ -4,7 +4,6 @@ register_with_transformer_lens, ) from mechir.modelling.hooked.config import HookedTransformerConfig -from mechir.modelling.hooked.loading_from_pretrained import REGISTERED_ARCHITECTURES, REGISTERED_CONVERSIONS def convert_distilbert_weights( @@ -62,12 +61,13 @@ def convert_distilbert_weights( if not raw: if sequence_classification: - pre_classification_head = distilbert.pre_classifier - classification_head = distilbert.classifier - state_dict["classifier.mlp.W"] = classification_head.weight - state_dict["classifier.mlp.b"] = classification_head.bias - state_dict["classifier.out_proj.W"] = pre_classification_head.weight - state_dict["classifier.out_proj.b"] = pre_classification_head.bias + if hasattr(distilbert, "pre_classifier") and hasattr(distilbert, "classifier"): + pre_classification_head = distilbert.pre_classifier + classification_head = distilbert.classifier + state_dict["classifier.mlp.W"] = classification_head.weight + state_dict["classifier.mlp.b"] = classification_head.bias + state_dict["classifier.out_proj.W"] = pre_classification_head.weight + state_dict["classifier.out_proj.b"] = pre_classification_head.bias return state_dict @@ -229,4 +229,3 @@ def convert_bert_based_weights( "ElectraForSequenceClassification", function_type="conversion", ) - From a1ee4479798d3ed9271a45375dcaeade53414d1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 26 May 2025 12:39:47 +0100 Subject: [PATCH 19/21] voila --- .../modelling/architectures/distilbert.py | 48 ++----------------- src/mechir/modelling/hooked/conversion.py | 4 +- test/acceptance/test_hookeddistilbert.py | 23 ++------- 3 files changed, 9 insertions(+), 66 deletions(-) diff --git a/src/mechir/modelling/architectures/distilbert.py b/src/mechir/modelling/architectures/distilbert.py index 00e865a..e245629 100644 --- a/src/mechir/modelling/architectures/distilbert.py +++ b/src/mechir/modelling/architectures/distilbert.py @@ -18,53 +18,12 @@ from transformer_lens.components import BertBlock, BertMLMHead, Unembed from transformer_lens.hook_points import HookPoint from mechir.modelling.hooked.components import BertEmbed +from mechir.modelling.hooked.linear import MLPClassificationHead from mechir.modelling.architectures.base import HookedEncoder from mechir.modelling.hooked.config import HookedTransformerConfig -class HookedDistilBert(HookedEncoder): - def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): - if isinstance(cfg, Dict): - cfg = HookedTransformerConfig(**cfg) - elif isinstance(cfg, str): - raise ValueError( - "Please pass in a config dictionary or HookedTransformerConfig object. If you want to load a pretrained model, use HookedEncoder.from_pretrained() instead." - ) - self.cfg = cfg - - assert ( - self.cfg.n_devices == 1 - ), "Multiple devices not supported for HookedEncoder" - if tokenizer is not None: - self.tokenizer = tokenizer - elif self.cfg.tokenizer_name is not None: - self.tokenizer = AutoTokenizer.from_pretrained(self.cfg.tokenizer_name) - else: - self.tokenizer = None - - if self.cfg.d_vocab == -1: - # If we have a tokenizer, vocab size can be inferred from it. - assert ( - self.tokenizer is not None - ), "Must provide a tokenizer if d_vocab is not provided" - self.cfg.d_vocab = max(self.tokenizer.vocab.values()) + 1 - if self.cfg.d_vocab_out == -1: - self.cfg.d_vocab_out = self.cfg.d_vocab - - self.embed = BertEmbed(self.cfg) - self.blocks = nn.ModuleList( - [BertBlock(self.cfg) for _ in range(self.cfg.n_layers)] - ) - self.mlm_head = BertMLMHead(cfg) - self.unembed = Unembed(self.cfg) - - self.hook_full_embed = HookPoint() - - if move_to_device: - self.to(self.cfg.device) - - self.setup() - +HookedDistilBert = HookedEncoder class HookedDistilBertForSequenceClassification(HookedDistilBert): """ @@ -81,8 +40,7 @@ class HookedDistilBertForSequenceClassification(HookedDistilBert): def __init__(self, cfg, tokenizer=None, move_to_device=True, **kwargs): super().__init__(cfg, tokenizer, move_to_device=move_to_device, **kwargs) - self.mlp = nn.Linear(cfg.d_model, cfg.d_model) - self.out_proj = nn.Linear(cfg.d_model, cfg.n_labels) + self.classifier = MLPClassificationHead(self.cfg) self.setup() @overload diff --git a/src/mechir/modelling/hooked/conversion.py b/src/mechir/modelling/hooked/conversion.py index 8aa9207..1602324 100644 --- a/src/mechir/modelling/hooked/conversion.py +++ b/src/mechir/modelling/hooked/conversion.py @@ -64,8 +64,8 @@ def convert_distilbert_weights( if hasattr(distilbert, "pre_classifier") and hasattr(distilbert, "classifier"): pre_classification_head = distilbert.pre_classifier classification_head = distilbert.classifier - state_dict["classifier.mlp.W"] = classification_head.weight - state_dict["classifier.mlp.b"] = classification_head.bias + state_dict["classifier.dense.W"] = classification_head.weight + state_dict["classifier.dense.b"] = classification_head.bias state_dict["classifier.out_proj.W"] = pre_classification_head.weight state_dict["classifier.out_proj.b"] = pre_classification_head.bias diff --git a/test/acceptance/test_hookeddistilbert.py b/test/acceptance/test_hookeddistilbert.py index 59ace19..3b867fa 100644 --- a/test/acceptance/test_hookeddistilbert.py +++ b/test/acceptance/test_hookeddistilbert.py @@ -69,32 +69,17 @@ def test_embed_two_predictions(our_distilbert, huggingface_distilbert, tokenizer return_tensors="pt", ) input_ids = encoding["input_ids"] - token_type_ids = encoding["token_type_ids"] huggingface_embed_out = get_embeddings(huggingface_distilbert)( - input_ids, token_type_ids=token_type_ids + input_ids )[0] - our_embed_out = our_distilbert.embed(input_ids, token_type_ids=token_type_ids).squeeze(0) + our_embed_out = our_distilbert.embed(input_ids).squeeze(0) assert_close(huggingface_embed_out, our_embed_out) -def test_attention(our_distilbert, huggingface_distilbert, tokens): - huggingface_embed = get_embeddings(huggingface_distilbert) - huggingface_attn = huggingface_distilbert.encoder.layer[0].attention - - embed_out = huggingface_embed(tokens) - - our_attn = our_distilbert.blocks[0].attn - - our_attn_out = our_attn(embed_out, embed_out, embed_out) - huggingface_self_attn_out = huggingface_attn.self(embed_out)[0] - huggingface_attn_out = huggingface_attn.output.dense(huggingface_self_attn_out) - assert_close(our_attn_out, huggingface_attn_out) - - def test_distilbert_block(our_distilbert, huggingface_distilbert, tokens): huggingface_embed = get_embeddings(huggingface_distilbert) - huggingface_block = huggingface_distilbert.encoder.layer[0] + huggingface_block = huggingface_distilbert.transformer.layer[0] embed_out = huggingface_embed(tokens) @@ -112,4 +97,4 @@ def test_run_with_cache(our_distilbert, tokens): assert "embed.hook_embed" in cache assert "blocks.0.attn.hook_q" in cache assert "blocks.3.attn.hook_attn_scores" in cache - assert "blocks.7.hook_resid_post" in cache + assert "blocks.5.hook_resid_post" in cache From 5e1d9590fc05763ec50029c3248bc6de10ce50c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 26 May 2025 12:40:18 +0100 Subject: [PATCH 20/21] integration is implicit --- test/integration/test_patched.py | 0 test/integration/test_sae.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 test/integration/test_patched.py delete mode 100644 test/integration/test_sae.py diff --git a/test/integration/test_patched.py b/test/integration/test_patched.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/integration/test_sae.py b/test/integration/test_sae.py deleted file mode 100644 index e69de29..0000000 From 4985fb078cc773edbac5ce09ec644eb852400539 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CAndrew?= Date: Mon, 26 May 2025 12:40:52 +0100 Subject: [PATCH 21/21] version bump --- pyproject.toml | 2 +- setup.py | 2 +- src/mechir/__init__.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9738281..b1abc6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "mechir" -version = "0.0.3" +version = "0.0.4" dependencies = [ "torch", "transformers", diff --git a/setup.py b/setup.py index e8afc13..c0f3468 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ setup( name=package_name, # The name of your package - version='0.0.2', # Your package version + version='0.0.4', # Your package version packages=find_packages(where='src'), # Look for packages in the 'src' directory package_dir={'': 'src'}, # Map the package name to the 'src' directory author='Anon A. Mous', diff --git a/src/mechir/__init__.py b/src/mechir/__init__.py index 5701d0f..6473353 100644 --- a/src/mechir/__init__.py +++ b/src/mechir/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.0.3" +__version__ = "0.0.4" class MechirConfig: