diff --git a/models/model_loader.py b/models/model_loader.py index 475f285..69ba409 100644 --- a/models/model_loader.py +++ b/models/model_loader.py @@ -6,14 +6,74 @@ """ from dataclasses import dataclass -from typing import Optional, Tuple +from typing import Dict, Optional, Tuple import torch import torch.nn as nn -from transformers import ( - AutoModelForSequenceClassification, - AutoConfig, -) +from torch.ao.quantization import QuantStub, DeQuantStub +from transformers import AutoConfig, AutoModelForSequenceClassification + +try: + # Optional: used for true INT8 on GPUs (e.g., T4) via bitsandbytes + from transformers import BitsAndBytesConfig + + _HAS_BITSANDBYTES = True +except Exception: # pragma: no cover - optional dependency + BitsAndBytesConfig = None + _HAS_BITSANDBYTES = False + +from models.QuantWrapper import QuantDistilBertWrapper + + +import torch +import torch.nn as nn +from torch.ao.quantization import QuantStub, DeQuantStub + +class QuantDistilBertWrapper(nn.Module): + """ + Wrap a DistilBERT classification model with Quant/DeQuant stubs + so we can use static PTQ (prepare/convert) like in Lab 3. + """ + def __init__(self, base_model: nn.Module): + super().__init__() + self.quant = QuantStub() + self.dequant = DeQuantStub() + self.model = base_model # usually AutoModelForSequenceClassification + + def forward(self, input_ids, attention_mask=None, **kwargs): + """ + Quantization-aware forward that **does not** modify token IDs. + + Token IDs are categorical indices and must remain integers for + correct embedding lookup. We therefore: + 1. Compute embeddings from the original `input_ids` + 2. Quantize the **embedding activations** + 3. Run the rest of the model on quantized activations + 4. Dequantize the final logits + """ + # 1) Standard embedding lookup using raw (non-quantized) token IDs + embeddings = self.model.distilbert.embeddings(input_ids) + + # 2) Quantize the embedding activations instead of the token IDs + embeddings_q = self.quant(embeddings) + + # 3) Run the HF classification model using quantized embeddings + outputs = self.model( + inputs_embeds=embeddings_q, + attention_mask=attention_mask, + **kwargs, + ) + + # 4) Dequantize logits before returning + logits = self.dequant(outputs.logits) + + # Return an object with `.logits` so existing code keeps working + class OutputWrapper: + def __init__(self, logits_tensor): + self.logits = logits_tensor + + return OutputWrapper(logits) + @dataclass @@ -25,61 +85,59 @@ class ModelConfig: num_labels: int = 2 -def _apply_int8_quantization_cuda(model: nn.Module, verbose: bool = True) -> None: +@dataclass +class LayerwiseQuantConfig: """ - Apply INT8-like quantization for CUDA by quantizing weights to INT8 range. - - This uses symmetric per-tensor quantization: - - Quantize: Q = round(R / scale) where scale = max(abs(R)) / 127 - - Dequantize: R' = Q * scale + Configuration for hybrid precision per layer. - The weights are stored as FP32/FP16 but quantized to INT8 precision. - This is not true INT8 compute (which requires special kernels) but - simulates the accuracy/precision loss of INT8 quantization. + `layer_precision` maps a substring (matching module names) to a + desired precision: "fp32", "fp16", or "int8". + - "int8" -> attach qconfig so the module is statically quantized + - "fp32"/"fp16" -> clear qconfig so the module stays in float - Args: - model: Model to quantize (must be on CUDA) - verbose: Whether to print quantization info + Example: + LayerwiseQuantConfig( + layer_precision={ + "attention": "int8", + "ffn": "int8", + "classifier": "fp32", + } + ) """ - num_quantized = 0 - - for name, module in model.named_modules(): - if isinstance(module, nn.Linear): - # Quantize weight - with torch.no_grad(): - weight = module.weight.data - # Symmetric per-tensor quantization - # Scale = max_abs_value / 127 (INT8 max positive value) - scale = weight.abs().max() / 127.0 + layer_precision: Dict[str, str] - if scale > 0: - # Quantize: divide by scale, round, clamp to INT8 range - weight_q = torch.round(weight / scale) - weight_q = torch.clamp(weight_q, -128, 127) - # Dequantize: multiply back by scale - weight_dequant = weight_q * scale - - # Replace original weight with quantized version - module.weight.data = weight_dequant +def apply_layerwise_qconfig( + model: nn.Module, + qconfig: torch.ao.quantization.QConfig, + cfg: LayerwiseQuantConfig, +) -> None: + """ + Attach qconfig selectively to DistilBERT submodules. - # Quantize bias if it exists - if module.bias is not None: - bias = module.bias.data - scale_bias = bias.abs().max() / 127.0 + This is a simple heuristic based on layer-name substrings. It lets + you implement hybrid schemes (e.g., INT8 attention + FP32 classifier). + """ + for name, module in model.named_modules(): + # Skip explicit quant/dequant stubs themselves + if isinstance(module, (QuantStub, DeQuantStub)): + continue - if scale_bias > 0: - bias_q = torch.round(bias / scale_bias) - bias_q = torch.clamp(bias_q, -128, 127) - bias_dequant = bias_q * scale_bias - module.bias.data = bias_dequant + matched_precision: Optional[str] = None + for pattern, prec in cfg.layer_precision.items(): + if pattern in name: + matched_precision = prec - num_quantized += 1 + if matched_precision is None: + # No explicit rule -> leave whatever global qconfig is set + continue - if verbose: - print(f" ✓ Quantized {num_quantized} Linear layers to INT8 precision") - print(f" ✓ Running on CUDA (simulated INT8 compute)") + if matched_precision.lower() == "int8": + module.qconfig = qconfig + else: + # Any non-int8 precision means "keep this layer in float" + module.qconfig = None def load_model( @@ -87,7 +145,8 @@ def load_model( precision: str = "fp32", device: str = "cuda", num_labels: int = 2, - verbose: bool = True + verbose: bool = True, + layerwise_cfg: Optional[LayerwiseQuantConfig] = None, ) -> nn.Module: """ Load a transformer model in specified precision. @@ -132,44 +191,102 @@ def load_model( config = AutoConfig.from_pretrained(model_name) config.num_labels = num_labels - # Load model in FP32 first - model = AutoModelForSequenceClassification.from_pretrained( - model_name, - config=config, - torch_dtype=torch.float32 - ) + # Special handling for true INT8 on CUDA GPUs (e.g., T4) using bitsandbytes + if precision == "int8" and device == "cuda": + if not _HAS_BITSANDBYTES: + raise RuntimeError( + "INT8 on CUDA requested, but bitsandbytes/transformers integration is not " + "available. Install with `pip install bitsandbytes` and a recent " + "`transformers` version." + ) + + if verbose: + print(" Using bitsandbytes INT8 on CUDA (Tensor Core friendly, e.g., T4)") + + # Configure 8-bit loading; keeps weights in 8-bit and uses INT8 kernels + bnb_config = BitsAndBytesConfig( + load_in_8bit=True, + llm_int8_threshold=6.0, + llm_int8_has_fp16_weight=False, + ) - # Apply precision conversion - if precision == "fp32": - model = model.to(device) + # Device map: put everything on GPU 0; the benchmark harness already + # assumes a single-GPU setup. + model = AutoModelForSequenceClassification.from_pretrained( + model_name, + config=config, + quantization_config=bnb_config, + device_map={"": 0}, + ) - elif precision == "fp16": - # Convert to FP16 - model = model.half() - model = model.to(device) + else: + # Load model in FP32 first, then optionally cast/quantize + model = AutoModelForSequenceClassification.from_pretrained( + model_name, + config=config, + torch_dtype=torch.float32, + ) - elif precision == "int8": - # For CUDA: Use manual INT8 conversion with fake quantization - # This simulates INT8 by quantizing weights to int8 range but keeps computation in FP32/FP16 - if device == "cuda": - if verbose: - print(" Using CUDA-compatible INT8 (simulated quantization)") + # Apply precision conversion + if precision == "fp32": + model = model.to(device) - # Move model to CUDA first + elif precision == "fp16": + # Convert to FP16 + model = model.half() model = model.to(device) - # Apply simulated INT8 quantization to Linear layers - _apply_int8_quantization_cuda(model, verbose=verbose) - else: - # For CPU: Use PyTorch's dynamic quantization - if verbose: - print(" Using CPU dynamic quantization") - model = torch.quantization.quantize_dynamic( - model, - {nn.Linear}, - dtype=torch.qint8 - ) - model = model.to("cpu") + elif precision == "int8": + # Two paths for INT8: + # - CPU: use static PTQ with QuantDistilBertWrapper, qconfig, + # prepare + calibration + convert (like Lab 3). + # - (CUDA handled above via bitsandbytes) + if device != "cuda": + # Static PTQ on CPU using wrapper + qconfig + prepare/convert + if verbose: + print(" Using CPU static PTQ with QuantDistilBertWrapper") + + # Wrap the HF model with Quant/DeQuant stubs + wrapped = QuantDistilBertWrapper(model) + + # Default to a global qconfig if none is provided + qconfig = torch.ao.quantization.get_default_qconfig("fbgemm") + + if layerwise_cfg is not None: + # Attach qconfig selectively to attention / FFN / classifier + apply_layerwise_qconfig(wrapped, qconfig, layerwise_cfg) + else: + # Global qconfig (all eligible modules) + wrapped.qconfig = qconfig + + if verbose: + print(" Preparing model for static quantization...") + + prepared = torch.ao.quantization.prepare(wrapped, inplace=False) + prepared.eval() + + # Lightweight calibration with random token IDs. + # This is intentionally simple; the main project harness + # still uses the high-quality FP32/FP16 paths on GPU. + vocab_size = getattr(config, "vocab_size", 30522) + seq_len = getattr(config, "max_position_embeddings", 128) + + with torch.no_grad(): + for _ in range(10): + dummy_ids = torch.randint( + low=0, + high=vocab_size, + size=(8, seq_len), + dtype=torch.long, + ) + dummy_mask = torch.ones_like(dummy_ids) + _ = prepared(input_ids=dummy_ids, attention_mask=dummy_mask) + + if verbose: + print(" Converting calibrated model to INT8...") + + quantized = torch.ao.quantization.convert(prepared, inplace=False) + model = quantized.to("cpu") model.eval() diff --git a/notebooks/energy_measurement_harness.ipynb b/notebooks/energy_measurement_harness.ipynb index 963545a..8c1edeb 100644 --- a/notebooks/energy_measurement_harness.ipynb +++ b/notebooks/energy_measurement_harness.ipynb @@ -1 +1,2701 @@ -{"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.8.0"},"kaggle":{"accelerator":"gpu","dataSources":[],"dockerImageVersionId":31193,"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"markdown","source":"# Energy-Aware Quantization Measurement Harness\n## ESE 5390 Final Project: Accurate Energy Measurement for Quantized LLMs\n\nThis notebook implements a zero-I/O measurement harness for measuring:\n- **Energy consumption** (per inference)\n- **Latency** (per sample and per batch)\n- **Throughput** (samples/second)\n- **Accuracy** (classification accuracy)\n- **Memory usage** (GPU memory)\n\nAcross three precision levels:\n- **FP32**: Full precision baseline\n- **FP16**: Half precision\n- **INT8**: 8-bit quantized (simulated on GPU)","metadata":{}},{"cell_type":"code","source":"!git clone https://github.com/krishkc5/energy_aware_quantization.git","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.562275Z","iopub.execute_input":"2025-12-03T10:43:39.562589Z","iopub.status.idle":"2025-12-03T10:43:39.714104Z","shell.execute_reply.started":"2025-12-03T10:43:39.562568Z","shell.execute_reply":"2025-12-03T10:43:39.713178Z"}},"outputs":[{"name":"stdout","text":"fatal: destination path 'energy_aware_quantization' already exists and is not an empty directory.\n","output_type":"stream"}],"execution_count":1},{"cell_type":"markdown","source":"## 1. Setup and Imports","metadata":{}},{"cell_type":"code","source":"import torch\nimport torch.nn as nn\nfrom transformers import AutoModelForSequenceClassification\nimport numpy as np\nimport pandas as pd\nimport json\nimport time\nimport subprocess\nimport threading\nfrom pathlib import Path\nfrom typing import Dict, List, Tuple, Optional\nfrom datetime import datetime\nimport warnings\nwarnings.filterwarnings('ignore')\n\nprint(f\"PyTorch version: {torch.__version__}\")\nprint(f\"CUDA available: {torch.cuda.is_available()}\")\nif torch.cuda.is_available():\n print(f\"CUDA device: {torch.cuda.get_device_name(0)}\")\n print(f\"CUDA version: {torch.version.cuda}\")","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.715621Z","iopub.execute_input":"2025-12-03T10:43:39.715836Z","iopub.status.idle":"2025-12-03T10:43:39.808302Z","shell.execute_reply.started":"2025-12-03T10:43:39.715815Z","shell.execute_reply":"2025-12-03T10:43:39.807563Z"}},"outputs":[{"name":"stdout","text":"PyTorch version: 2.6.0+cu124\nCUDA available: True\nCUDA device: Tesla P100-PCIE-16GB\nCUDA version: 12.4\n","output_type":"stream"}],"execution_count":2},{"cell_type":"markdown","source":"## 2. Dataset Loading (Zero-I/O Design)\n\nLoad pre-tokenized tensors directly to GPU before any measurements.","metadata":{}},{"cell_type":"code","source":"def load_pretokenized_dataset(dataset_path: str, device: str = \"cuda\") -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, Dict]:\n \"\"\"\n Load pre-tokenized dataset from separate .pt files.\n \n Args:\n dataset_path: Path to directory containing input_ids.pt, attention_mask.pt, labels.pt, metadata.json\n device: Device to load tensors to ('cuda' or 'cpu')\n \n Returns:\n input_ids: [N, seq_len] tensor on device\n attention_mask: [N, seq_len] tensor on device\n labels: [N] tensor on device\n metadata: Dictionary with dataset info\n \"\"\"\n dataset_path = Path(dataset_path)\n print(f\"\\nLoading dataset from: {dataset_path}\")\n \n # Load tensors\n input_ids = torch.load(dataset_path / \"input_ids.pt\", map_location=device)\n attention_mask = torch.load(dataset_path / \"attention_mask.pt\", map_location=device)\n labels = torch.load(dataset_path / \"labels.pt\", map_location=device)\n \n # Load metadata\n with open(dataset_path / \"metadata.json\", 'r') as f:\n metadata = json.load(f)\n \n print(f\"✓ Loaded {input_ids.shape[0]} samples\")\n print(f\" - Sequence length: {input_ids.shape[1]}\")\n print(f\" - Device: {input_ids.device}\")\n print(f\" - Dataset: {metadata.get('dataset_name', 'unknown')}\")\n print(f\" - Labels: {metadata.get('num_labels', 2)}\")\n \n # Calculate memory footprint\n total_bytes = (input_ids.element_size() * input_ids.nelement() + \n attention_mask.element_size() * attention_mask.nelement() + \n labels.element_size() * labels.nelement())\n print(f\" - Memory: {total_bytes / 1024**2:.2f} MB\")\n \n return input_ids, attention_mask, labels, metadata","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.809181Z","iopub.execute_input":"2025-12-03T10:43:39.809413Z","iopub.status.idle":"2025-12-03T10:43:39.816850Z","shell.execute_reply.started":"2025-12-03T10:43:39.809395Z","shell.execute_reply":"2025-12-03T10:43:39.816218Z"}},"outputs":[],"execution_count":3},{"cell_type":"markdown","source":"## 3. Model Loading with Accurate Quantization\n\nLoad models in FP32, FP16, or INT8 precision.","metadata":{}},{"cell_type":"code","source":"def apply_int8_quantization_gpu(model: nn.Module, verbose: bool = True) -> None:\n \"\"\"\n Apply symmetric INT8 quantization to model weights (for GPU).\n \n Uses per-tensor symmetric quantization:\n - scale = max(|weight|) / 127\n - quantized = round(weight / scale)\n - dequantized = quantized * scale\n \n This simulates INT8 precision loss on GPU (true INT8 compute requires special kernels).\n \"\"\"\n num_quantized = 0\n \n for name, module in model.named_modules():\n if isinstance(module, nn.Linear):\n with torch.no_grad():\n # Quantize weights\n weight = module.weight.data\n scale = weight.abs().max() / 127.0\n \n if scale > 0:\n weight_q = torch.round(weight / scale)\n weight_q = torch.clamp(weight_q, -128, 127)\n module.weight.data = weight_q * scale\n \n # Quantize bias if exists\n if module.bias is not None:\n bias = module.bias.data\n scale_bias = bias.abs().max() / 127.0\n \n if scale_bias > 0:\n bias_q = torch.round(bias / scale_bias)\n bias_q = torch.clamp(bias_q, -128, 127)\n module.bias.data = bias_q * scale_bias\n \n num_quantized += 1\n \n if verbose:\n print(f\" ✓ Quantized {num_quantized} Linear layers to INT8 precision\")\n\n\ndef load_model(model_name: str, precision: str, device: str = \"cuda\", num_labels: int = 2, verbose: bool = True) -> nn.Module:\n \"\"\"\n Load model with specified precision.\n \n Args:\n model_name: HuggingFace model identifier\n precision: 'fp32', 'fp16', or 'int8'\n device: Device to load model to\n num_labels: Number of classification labels\n verbose: Print loading info\n \n Returns:\n model: Loaded model in eval mode\n \"\"\"\n if verbose:\n print(f\"\\nLoading model: {model_name}\")\n print(f\" Precision: {precision.upper()}\")\n print(f\" Device: {device}\")\n \n # Load base model in FP32\n model = AutoModelForSequenceClassification.from_pretrained(\n model_name,\n num_labels=num_labels,\n torch_dtype=torch.float32\n )\n \n # Apply precision conversion\n if precision == \"fp32\":\n model = model.to(device)\n \n elif precision == \"fp16\":\n model = model.half()\n model = model.to(device)\n if verbose:\n print(f\" ✓ Converted to FP16\")\n \n elif precision == \"int8\":\n if device == \"cuda\":\n model = model.to(device)\n apply_int8_quantization_gpu(model, verbose=verbose)\n else:\n # CPU quantization using PyTorch's dynamic quantization\n model = torch.quantization.quantize_dynamic(\n model, {nn.Linear}, dtype=torch.qint8\n )\n model = model.to(\"cpu\")\n if verbose:\n print(f\" ✓ Applied dynamic INT8 quantization (CPU)\")\n else:\n raise ValueError(f\"Unknown precision: {precision}\")\n \n model.eval()\n \n # Calculate model size\n param_size = sum(p.nelement() * p.element_size() for p in model.parameters())\n buffer_size = sum(b.nelement() * b.element_size() for b in model.buffers())\n model_size_mb = (param_size + buffer_size) / 1024**2\n num_params = sum(p.numel() for p in model.parameters())\n \n if verbose:\n print(f\" ✓ Model loaded successfully\")\n print(f\" - Parameters: {num_params:,}\")\n print(f\" - Model size: {model_size_mb:.2f} MB\")\n first_param = next(model.parameters())\n print(f\" - Param dtype: {first_param.dtype}\")\n print(f\" - Param device: {first_param.device}\")\n \n return model\n\n\ndef get_model_info(model: nn.Module) -> Dict:\n \"\"\"Get model metadata.\"\"\"\n param_size = sum(p.nelement() * p.element_size() for p in model.parameters())\n buffer_size = sum(b.nelement() * b.element_size() for b in model.buffers())\n \n return {\n \"model_size_mb\": (param_size + buffer_size) / 1024**2,\n \"num_parameters\": sum(p.numel() for p in model.parameters()),\n \"dtype\": str(next(model.parameters()).dtype),\n \"device\": str(next(model.parameters()).device)\n }","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.818175Z","iopub.execute_input":"2025-12-03T10:43:39.818345Z","iopub.status.idle":"2025-12-03T10:43:39.836808Z","shell.execute_reply.started":"2025-12-03T10:43:39.818331Z","shell.execute_reply":"2025-12-03T10:43:39.836128Z"}},"outputs":[],"execution_count":4},{"cell_type":"markdown","source":"## 4. GPU Warmup\n\nStabilize GPU clocks and compile CUDA kernels before measurement.","metadata":{}},{"cell_type":"code","source":"def warmup_gpu(model: nn.Module, input_ids: torch.Tensor, attention_mask: torch.Tensor, \n num_steps: int = 50, verbose: bool = True) -> None:\n \"\"\"\n Warmup GPU to stabilize clocks and compile kernels.\n \n Args:\n model: Model to warmup\n input_ids: Sample input tensor\n attention_mask: Sample attention mask\n num_steps: Number of warmup iterations\n verbose: Print progress\n \"\"\"\n if verbose:\n print(f\"\\nWarming up GPU for {num_steps} iterations...\")\n \n model.eval()\n \n with torch.no_grad():\n for i in range(num_steps):\n _ = model(input_ids=input_ids, attention_mask=attention_mask)\n \n if verbose and (i + 1) % 10 == 0:\n print(f\" Warmup: {i+1}/{num_steps}\", end='\\r')\n \n if torch.cuda.is_available():\n torch.cuda.synchronize()\n \n if verbose:\n print(f\"\\n ✓ Warmup complete\")","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.837641Z","iopub.execute_input":"2025-12-03T10:43:39.837989Z","iopub.status.idle":"2025-12-03T10:43:39.855895Z","shell.execute_reply.started":"2025-12-03T10:43:39.837965Z","shell.execute_reply":"2025-12-03T10:43:39.855158Z"}},"outputs":[],"execution_count":5},{"cell_type":"markdown","source":"## 5. Power Logger (Asynchronous)\n\nMonitor GPU power draw using nvidia-smi without interfering with timing.","metadata":{}},{"cell_type":"code","source":"class PowerLogger:\n \"\"\"\n Asynchronous power logger using nvidia-smi polling.\n \"\"\"\n \n def __init__(self, sample_interval_ms: int = 100, gpu_id: int = 0, verbose: bool = False):\n self.sample_interval_ms = sample_interval_ms\n self.gpu_id = gpu_id\n self.verbose = verbose\n self.samples = []\n self.is_running = False\n self._lock = threading.Lock()\n self._reader_thread = None\n \n # Verify nvidia-smi is available\n self._check_nvidia_smi()\n \n def _check_nvidia_smi(self) -> None:\n \"\"\"Check if nvidia-smi is available.\"\"\"\n try:\n result = subprocess.run(\n [\"nvidia-smi\", \"--query-gpu=power.draw\", \"--format=csv,noheader,nounits\", f\"--id={self.gpu_id}\"],\n capture_output=True, text=True, timeout=5\n )\n if result.returncode != 0:\n raise RuntimeError(f\"nvidia-smi failed: {result.stderr}\")\n power = float(result.stdout.strip())\n if self.verbose:\n print(f\" ✓ nvidia-smi available, current power: {power:.2f} W\")\n except Exception as e:\n raise RuntimeError(f\"nvidia-smi not available: {e}\")\n \n def start(self) -> None:\n \"\"\"Start power logging in background thread.\"\"\"\n with self._lock:\n if self.is_running:\n raise RuntimeError(\"PowerLogger already running\")\n self.samples = []\n self.is_running = True\n \n self._reader_thread = threading.Thread(target=self._poll_power, daemon=True)\n self._reader_thread.start()\n \n if self.verbose:\n print(f\" ✓ Power logger started (interval: {self.sample_interval_ms} ms)\")\n \n def _poll_power(self) -> None:\n \"\"\"Background thread that polls nvidia-smi.\"\"\"\n interval_sec = self.sample_interval_ms / 1000.0\n \n while self.is_running:\n try:\n result = subprocess.run(\n [\"nvidia-smi\", \"--query-gpu=power.draw\", \"--format=csv,noheader,nounits\", f\"--id={self.gpu_id}\"],\n capture_output=True, text=True, timeout=2\n )\n \n if result.returncode == 0:\n power = float(result.stdout.strip())\n with self._lock:\n self.samples.append(power)\n except:\n pass\n \n time.sleep(interval_sec)\n \n def stop(self) -> None:\n \"\"\"Stop power logging.\"\"\"\n with self._lock:\n if not self.is_running:\n raise RuntimeError(\"PowerLogger not running\")\n self.is_running = False\n \n if self._reader_thread:\n self._reader_thread.join(timeout=2)\n \n if self.verbose:\n print(f\" ✓ Power logger stopped ({len(self.samples)} samples)\")\n \n def get_samples(self) -> List[float]:\n \"\"\"Return collected power samples.\"\"\"\n with self._lock:\n return self.samples.copy()\n \n def __enter__(self):\n self.start()\n return self\n \n def __exit__(self, *args):\n if self.is_running:\n self.stop()","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.856687Z","iopub.execute_input":"2025-12-03T10:43:39.856880Z","iopub.status.idle":"2025-12-03T10:43:39.872502Z","shell.execute_reply.started":"2025-12-03T10:43:39.856865Z","shell.execute_reply":"2025-12-03T10:43:39.871822Z"}},"outputs":[],"execution_count":6},{"cell_type":"markdown","source":"## 6. Timed Inference Benchmark\n\nRun inference with precise timing and CUDA synchronization.","metadata":{}},{"cell_type":"code","source":"def run_timed_inference(model: nn.Module, input_ids: torch.Tensor, attention_mask: torch.Tensor,\n num_iters: int, verbose: bool = True) -> Dict:\n \"\"\"\n Run timed inference loop with CUDA synchronization.\n \n Args:\n model: Model in eval mode\n input_ids: Input tensor [batch_size, seq_len]\n attention_mask: Attention mask [batch_size, seq_len]\n num_iters: Number of iterations\n verbose: Print progress\n \n Returns:\n Dictionary with timing metrics\n \"\"\"\n if verbose:\n print(f\"\\nRunning {num_iters} timed inference iterations...\")\n \n model.eval()\n batch_size = input_ids.shape[0]\n latencies = []\n \n # Ensure clean state\n if torch.cuda.is_available():\n torch.cuda.synchronize()\n \n start_time = time.perf_counter()\n \n with torch.no_grad():\n for i in range(num_iters):\n iter_start = time.perf_counter()\n \n # Forward pass\n _ = model(input_ids=input_ids, attention_mask=attention_mask)\n \n # Synchronize to ensure completion\n if torch.cuda.is_available():\n torch.cuda.synchronize()\n \n iter_end = time.perf_counter()\n latencies.append(iter_end - iter_start)\n \n if verbose and (i + 1) % 50 == 0:\n print(f\" Progress: {i+1}/{num_iters}\", end='\\r')\n \n end_time = time.perf_counter()\n total_time = end_time - start_time\n \n if verbose:\n print(f\"\\n ✓ Inference complete: {total_time:.3f}s\")\n \n latencies = np.array(latencies)\n \n return {\n \"total_time\": float(total_time),\n \"num_iters\": num_iters,\n \"batch_size\": batch_size,\n \"mean_latency\": float(np.mean(latencies)),\n \"std_latency\": float(np.std(latencies)),\n \"min_latency\": float(np.min(latencies)),\n \"max_latency\": float(np.max(latencies)),\n \"median_latency\": float(np.median(latencies)),\n \"throughput\": float(batch_size * num_iters / total_time)\n }\n\n\ndef compute_accuracy(model: nn.Module, input_ids: torch.Tensor, attention_mask: torch.Tensor,\n labels: torch.Tensor, verbose: bool = True) -> Dict:\n \"\"\"\n Compute model accuracy on dataset.\n \n Returns:\n Dictionary with accuracy metrics\n \"\"\"\n if verbose:\n print(f\"\\nComputing accuracy...\")\n \n model.eval()\n \n with torch.no_grad():\n outputs = model(input_ids=input_ids, attention_mask=attention_mask)\n predictions = torch.argmax(outputs.logits, dim=-1)\n correct = (predictions == labels).sum().item()\n total = labels.shape[0]\n accuracy = correct / total\n \n if verbose:\n print(f\" ✓ Accuracy: {accuracy*100:.2f}% ({correct}/{total})\")\n \n return {\n \"accuracy\": float(accuracy),\n \"num_correct\": int(correct),\n \"num_samples\": int(total)\n }","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.873193Z","iopub.execute_input":"2025-12-03T10:43:39.873438Z","iopub.status.idle":"2025-12-03T10:43:39.891195Z","shell.execute_reply.started":"2025-12-03T10:43:39.873413Z","shell.execute_reply":"2025-12-03T10:43:39.890469Z"}},"outputs":[],"execution_count":7},{"cell_type":"markdown","source":"## 7. Energy Computation\n\nCompute energy metrics from power samples and timing.","metadata":{}},{"cell_type":"code","source":"def compute_energy_metrics(power_samples: List[float], timing_results: Dict) -> Dict:\n \"\"\"\n Compute energy consumption from power samples and timing.\n \n Energy (J) = Power (W) × Time (s)\n \n IMPORTANT: This computes energy PER SAMPLE, not per batch iteration.\n - Each iteration processes batch_size samples\n - Energy per sample = Total energy / (num_iters × batch_size)\n \n Args:\n power_samples: List of power measurements in Watts\n timing_results: Dictionary from run_timed_inference\n \n Returns:\n Dictionary with energy metrics\n \"\"\"\n if len(power_samples) == 0:\n print(\" ⚠️ Warning: No power samples collected\")\n return {\n \"mean_power_w\": None,\n \"std_power_w\": None,\n \"min_power_w\": None,\n \"max_power_w\": None,\n \"num_power_samples\": 0,\n \"total_energy_j\": None,\n \"energy_per_batch_j\": None,\n \"energy_per_batch_mj\": None,\n \"energy_per_sample_j\": None,\n \"energy_per_sample_mj\": None,\n \"samples_per_joule\": None\n }\n \n power_array = np.array(power_samples)\n \n # Power statistics\n mean_power = float(np.mean(power_array))\n std_power = float(np.std(power_array))\n min_power = float(np.min(power_array))\n max_power = float(np.max(power_array))\n \n # Energy computation\n total_time = timing_results[\"total_time\"]\n num_iters = timing_results[\"num_iters\"]\n batch_size = timing_results[\"batch_size\"]\n total_samples = num_iters * batch_size\n \n # Total energy for all iterations\n total_energy = mean_power * total_time # Joules\n \n # Energy per batch (per iteration)\n energy_per_batch = total_energy / num_iters\n energy_per_batch_mj = energy_per_batch * 1000 # millijoules\n \n # Energy per sample (correct metric!)\n energy_per_sample = total_energy / total_samples\n energy_per_sample_mj = energy_per_sample * 1000 # millijoules\n \n return {\n \"mean_power_w\": mean_power,\n \"std_power_w\": std_power,\n \"min_power_w\": min_power,\n \"max_power_w\": max_power,\n \"num_power_samples\": len(power_samples),\n \"total_energy_j\": total_energy,\n \"total_samples\": total_samples,\n # Per-batch metrics (for reference)\n \"energy_per_batch_j\": energy_per_batch,\n \"energy_per_batch_mj\": energy_per_batch_mj,\n # Per-sample metrics (the correct ones!)\n \"energy_per_sample_j\": energy_per_sample,\n \"energy_per_sample_mj\": energy_per_sample_mj,\n \"samples_per_joule\": 1.0 / energy_per_sample if energy_per_sample > 0 else None\n }","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.891897Z","iopub.execute_input":"2025-12-03T10:43:39.892110Z","iopub.status.idle":"2025-12-03T10:43:39.904570Z","shell.execute_reply.started":"2025-12-03T10:43:39.892090Z","shell.execute_reply":"2025-12-03T10:43:39.903917Z"}},"outputs":[],"execution_count":8},{"cell_type":"markdown","source":"## 8. Complete Measurement Function\n\nOrchestrate the complete measurement pipeline for one trial.","metadata":{}},{"cell_type":"code","source":"def run_single_trial(model_name: str, precision: str, input_ids: torch.Tensor, \n attention_mask: torch.Tensor, labels: torch.Tensor,\n num_iters: int = 300, warmup_iters: int = 50, \n power_interval_ms: int = 100, device: str = \"cuda\",\n trial_num: int = 1, verbose: bool = True) -> Dict:\n \"\"\"\n Run a complete measurement trial for one precision level.\n \n Pipeline:\n 1. Load model\n 2. Warmup GPU\n 3. Start power logging\n 4. Run timed inference\n 5. Stop power logging\n 6. Compute accuracy\n 7. Compute energy metrics\n 8. Collect memory stats\n \n Returns:\n Dictionary with all metrics\n \"\"\"\n print(f\"\\n{'='*70}\")\n print(f\"TRIAL {trial_num}: {precision.upper()}\")\n print(f\"{'='*70}\")\n \n # Reset GPU memory\n if torch.cuda.is_available():\n torch.cuda.reset_peak_memory_stats()\n torch.cuda.empty_cache()\n \n # 1. Load model\n model = load_model(model_name, precision, device, num_labels=2, verbose=verbose)\n model_info = get_model_info(model)\n \n # 2. Warmup\n warmup_gpu(model, input_ids, attention_mask, num_steps=warmup_iters, verbose=verbose)\n \n # 3. Start power logging\n print(f\"\\nStarting measurement...\")\n power_logger = PowerLogger(sample_interval_ms=power_interval_ms, verbose=verbose)\n power_logger.start()\n time.sleep(0.5) # Let logger stabilize\n \n # 4. Run timed inference\n timing_results = run_timed_inference(model, input_ids, attention_mask, num_iters, verbose=verbose)\n \n # 5. Stop power logging\n time.sleep(0.5) # Capture trailing samples\n power_logger.stop()\n power_samples = power_logger.get_samples()\n \n # 6. Compute accuracy\n accuracy_results = compute_accuracy(model, input_ids, attention_mask, labels, verbose=verbose)\n \n # 7. Compute energy\n energy_results = compute_energy_metrics(power_samples, timing_results)\n \n # 8. Memory stats\n memory_stats = {}\n if torch.cuda.is_available():\n memory_stats = {\n \"peak_memory_mb\": torch.cuda.max_memory_allocated() / 1024**2,\n \"allocated_memory_mb\": torch.cuda.memory_allocated() / 1024**2,\n \"reserved_memory_mb\": torch.cuda.memory_reserved() / 1024**2\n }\n \n # Combine all results\n results = {\n \"trial\": trial_num,\n \"precision\": precision,\n \"model_name\": model_name,\n \"timestamp\": datetime.now().isoformat(),\n }\n results.update(timing_results)\n results.update(accuracy_results)\n results.update(energy_results)\n results.update(memory_stats)\n results.update(model_info)\n \n # Print summary\n print(f\"\\n{'='*70}\")\n print(f\"TRIAL {trial_num} SUMMARY: {precision.upper()}\")\n print(f\"{'='*70}\")\n print(f\"Batch size: {timing_results['batch_size']} samples\")\n print(f\"Iterations: {timing_results['num_iters']}\")\n print(f\"Total samples: {energy_results.get('total_samples', timing_results['batch_size'] * timing_results['num_iters'])}\")\n print(f\"Latency: {timing_results['mean_latency']*1000:.3f} ms/batch (± {timing_results['std_latency']*1000:.3f} ms)\")\n print(f\"Throughput: {timing_results['throughput']:.2f} samples/s\")\n print(f\"Accuracy: {accuracy_results['accuracy']*100:.2f}%\")\n if energy_results['mean_power_w'] is not None:\n print(f\"Power: {energy_results['mean_power_w']:.2f} W (± {energy_results['std_power_w']:.2f} W)\")\n print(f\"Total Energy: {energy_results['total_energy_j']:.3f} J\")\n print(f\"Energy/batch: {energy_results['energy_per_batch_mj']:.3f} mJ\")\n print(f\"Energy/sample: {energy_results['energy_per_sample_mj']:.3f} mJ ← (correct per-sample metric)\")\n if memory_stats:\n print(f\"Peak Memory: {memory_stats['peak_memory_mb']:.2f} MB\")\n print(f\"{'='*70}\")\n \n # Cleanup\n del model\n if torch.cuda.is_available():\n torch.cuda.empty_cache()\n \n return results","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.905259Z","iopub.execute_input":"2025-12-03T10:43:39.905498Z","iopub.status.idle":"2025-12-03T10:43:39.923699Z","shell.execute_reply.started":"2025-12-03T10:43:39.905467Z","shell.execute_reply":"2025-12-03T10:43:39.923070Z"}},"outputs":[],"execution_count":9},{"cell_type":"markdown","source":"## 9. Multi-Trial Runner\n\nRun multiple trials for statistical significance.","metadata":{}},{"cell_type":"code","source":"def run_multi_trial_experiment(model_name: str, precision: str, dataset_path: str,\n num_trials: int = 5, num_iters: int = 300,\n warmup_iters: int = 50, power_interval_ms: int = 100,\n device: str = \"cuda\") -> Tuple[List[Dict], Dict]:\n \"\"\"\n Run multiple trials for one precision level and aggregate results.\n \n Returns:\n trial_results: List of per-trial result dictionaries\n aggregated: Dictionary with mean/std across trials\n \"\"\"\n print(f\"\\n{'#'*70}\")\n print(f\"# MULTI-TRIAL EXPERIMENT: {precision.upper()}\")\n print(f\"# Number of trials: {num_trials}\")\n print(f\"# Iterations per trial: {num_iters}\")\n print(f\"{'#'*70}\")\n \n # Load dataset once (stays on GPU)\n input_ids, attention_mask, labels, metadata = load_pretokenized_dataset(dataset_path, device)\n \n # Run trials\n trial_results = []\n for trial in range(1, num_trials + 1):\n result = run_single_trial(\n model_name=model_name,\n precision=precision,\n input_ids=input_ids,\n attention_mask=attention_mask,\n labels=labels,\n num_iters=num_iters,\n warmup_iters=warmup_iters,\n power_interval_ms=power_interval_ms,\n device=device,\n trial_num=trial,\n verbose=True\n )\n trial_results.append(result)\n \n # Aggregate results\n aggregated = aggregate_trials(trial_results)\n print_aggregated_results(aggregated, precision)\n \n return trial_results, aggregated\n\n\ndef aggregate_trials(trial_results: List[Dict]) -> Dict:\n \"\"\"Compute mean and std across trials.\"\"\"\n numeric_keys = [\n \"mean_latency\", \"std_latency\", \"throughput\", \"accuracy\",\n \"mean_power_w\", \"std_power_w\", \"total_energy_j\", \n \"energy_per_batch_j\", \"energy_per_batch_mj\",\n \"energy_per_sample_j\", \"energy_per_sample_mj\",\n \"peak_memory_mb\"\n ]\n \n aggregated = {\n \"precision\": trial_results[0][\"precision\"],\n \"model_name\": trial_results[0][\"model_name\"],\n \"num_trials\": len(trial_results),\n \"batch_size\": trial_results[0][\"batch_size\"],\n \"total_samples\": trial_results[0].get(\"total_samples\", trial_results[0][\"batch_size\"] * trial_results[0][\"num_iters\"]),\n \"model_size_mb\": trial_results[0][\"model_size_mb\"],\n \"num_parameters\": trial_results[0][\"num_parameters\"]\n }\n \n for key in numeric_keys:\n if key in trial_results[0] and trial_results[0][key] is not None:\n values = [r[key] for r in trial_results if key in r and r[key] is not None]\n if values:\n aggregated[f\"{key}_mean\"] = float(np.mean(values))\n aggregated[f\"{key}_std\"] = float(np.std(values))\n aggregated[f\"{key}_min\"] = float(np.min(values))\n aggregated[f\"{key}_max\"] = float(np.max(values))\n \n return aggregated\n\n\ndef print_aggregated_results(agg: Dict, precision: str) -> None:\n \"\"\"Pretty print aggregated results.\"\"\"\n print(f\"\\n{'='*70}\")\n print(f\"AGGREGATED RESULTS: {precision.upper()}\")\n print(f\"Trials: {agg['num_trials']}\")\n print(f\"Batch size: {agg['batch_size']} samples\")\n print(f\"Total samples per trial: {agg.get('total_samples', 'N/A')}\")\n print(f\"{'='*70}\")\n \n if 'mean_latency_mean' in agg:\n print(f\"\\nLatency (per batch):\")\n print(f\" {agg['mean_latency_mean']*1000:.3f} ± {agg['mean_latency_std']*1000:.3f} ms\")\n \n if 'throughput_mean' in agg:\n print(f\"\\nThroughput:\")\n print(f\" {agg['throughput_mean']:.2f} ± {agg['throughput_std']:.2f} samples/s\")\n \n if 'accuracy_mean' in agg:\n print(f\"\\nAccuracy:\")\n print(f\" {agg['accuracy_mean']*100:.2f} ± {agg['accuracy_std']*100:.2f}%\")\n \n if 'mean_power_w_mean' in agg:\n print(f\"\\nPower:\")\n print(f\" {agg['mean_power_w_mean']:.2f} ± {agg['mean_power_w_std']:.2f} W\")\n \n if 'energy_per_batch_mj_mean' in agg:\n print(f\"\\nEnergy per Batch:\")\n print(f\" {agg['energy_per_batch_mj_mean']:.3f} ± {agg['energy_per_batch_mj_std']:.3f} mJ\")\n \n if 'energy_per_sample_mj_mean' in agg:\n print(f\"\\nEnergy per Sample (CORRECT METRIC):\")\n print(f\" {agg['energy_per_sample_mj_mean']:.3f} ± {agg['energy_per_sample_mj_std']:.3f} mJ\")\n \n if 'peak_memory_mb_mean' in agg:\n print(f\"\\nMemory:\")\n print(f\" Model size: {agg['model_size_mb']:.2f} MB\")\n print(f\" Peak GPU: {agg['peak_memory_mb_mean']:.2f} ± {agg['peak_memory_mb_std']:.2f} MB\")\n \n print(f\"{'='*70}\\n\")","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.925605Z","iopub.execute_input":"2025-12-03T10:43:39.925866Z","iopub.status.idle":"2025-12-03T10:43:39.943574Z","shell.execute_reply.started":"2025-12-03T10:43:39.925850Z","shell.execute_reply":"2025-12-03T10:43:39.943061Z"}},"outputs":[],"execution_count":10},{"cell_type":"markdown","source":"## 10. Results Saving\n\nSave results in CSV and JSON formats.","metadata":{}},{"cell_type":"code","source":"def save_results(trial_results: List[Dict], aggregated: Dict, \n precision: str, output_dir: str = \"./results\") -> None:\n \"\"\"\n Save trial and aggregated results.\n \n Creates:\n - results/{precision}_trials.csv: Per-trial results\n - results/{precision}_aggregated.json: Aggregated statistics\n \"\"\"\n output_dir = Path(output_dir)\n output_dir.mkdir(parents=True, exist_ok=True)\n \n # Save detailed trial results\n trials_df = pd.DataFrame(trial_results)\n trials_path = output_dir / f\"{precision}_trials.csv\"\n trials_df.to_csv(trials_path, index=False)\n print(f\"\\n✓ Saved trial results: {trials_path}\")\n \n # Save aggregated results\n agg_path = output_dir / f\"{precision}_aggregated.json\"\n with open(agg_path, 'w') as f:\n json.dump(aggregated, f, indent=2)\n print(f\"✓ Saved aggregated results: {agg_path}\")\n\n\ndef save_comparison_table(all_aggregated: List[Dict], output_dir: str = \"./results\") -> pd.DataFrame:\n \"\"\"\n Create and save comparison table across all precisions.\n \"\"\"\n output_dir = Path(output_dir)\n \n comparison_df = pd.DataFrame(all_aggregated)\n comparison_path = output_dir / \"comparison_all_precisions.csv\"\n comparison_df.to_csv(comparison_path, index=False)\n print(f\"\\n✓ Saved comparison table: {comparison_path}\")\n \n return comparison_df","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.944185Z","iopub.execute_input":"2025-12-03T10:43:39.944343Z","iopub.status.idle":"2025-12-03T10:43:39.956083Z","shell.execute_reply.started":"2025-12-03T10:43:39.944330Z","shell.execute_reply":"2025-12-03T10:43:39.955427Z"}},"outputs":[],"execution_count":11},{"cell_type":"markdown","source":"## 11. Main Experiment Runner\n\nRun experiments for all precision levels.","metadata":{}},{"cell_type":"code","source":"# Configuration - Use current working directory to find dataset\nimport os\n\n# Get current working directory and find dataset\ncwd = os.getcwd()\nprint(f\"Current working directory: {cwd}\")\n\n# Try multiple paths relative to cwd\npossible_paths = [\n Path(cwd) / \"..\" / \"datasets\" / \"tokenized_data\", # From notebooks/\n Path(cwd) / \"datasets\" / \"tokenized_data\", # From repo root\n Path(cwd) / \"..\" / \"..\" / \"datasets\" / \"tokenized_data\", # From deeper nesting\n Path(\"/kaggle/working/energy_aware_quantization/datasets/tokenized_data\"), # Kaggle path\n]\n\ndataset_path = None\nfor path in possible_paths:\n abs_path = path.resolve()\n print(f\"Trying: {abs_path}\")\n if abs_path.exists() and (abs_path / \"input_ids.pt\").exists():\n dataset_path = str(abs_path)\n break\n\nif dataset_path is None:\n # Last resort: search upward from cwd\n current = Path(cwd)\n for _ in range(5): # Search up to 5 levels up\n test_path = current / \"datasets\" / \"tokenized_data\"\n print(f\"Trying: {test_path.resolve()}\")\n if test_path.exists() and (test_path / \"input_ids.pt\").exists():\n dataset_path = str(test_path.resolve())\n break\n current = current.parent\n \nif dataset_path is None:\n raise FileNotFoundError(\n f\"Could not find datasets/tokenized_data directory.\\n\"\n f\"Searched from: {cwd}\\n\"\n f\"Please ensure the dataset exists or update the dataset_path manually.\"\n )\n\nprint(f\"✓ Found dataset at: {dataset_path}\")\n\n# Set output directory\noutput_path = Path(cwd) / \"..\" / \"results\"\nif not output_path.parent.exists():\n output_path = Path(cwd) / \"results\"\noutput_dir = str(output_path.resolve())\n\nCONFIG = {\n \"model_name\": \"distilbert-base-uncased-finetuned-sst-2-english\",\n \"dataset_path\": dataset_path,\n \"precisions\": [\"fp32\", \"fp16\", \"int8\"], # Precision levels to test\n \"num_trials\": 5, # Number of trials per precision\n \"num_iters\": 300, # Inference iterations per trial\n \"warmup_iters\": 50, # Warmup iterations\n \"power_interval_ms\": 100, # Power sampling interval\n \"device\": \"cuda\" if torch.cuda.is_available() else \"cpu\",\n \"output_dir\": output_dir\n}\n\nprint(\"\\nExperiment Configuration:\")\nprint(json.dumps(CONFIG, indent=2))","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.956771Z","iopub.execute_input":"2025-12-03T10:43:39.956998Z","iopub.status.idle":"2025-12-03T10:43:39.973244Z","shell.execute_reply.started":"2025-12-03T10:43:39.956977Z","shell.execute_reply":"2025-12-03T10:43:39.972651Z"}},"outputs":[{"name":"stdout","text":"Current working directory: /kaggle/working\nTrying: /kaggle/datasets/tokenized_data\nTrying: /kaggle/working/datasets/tokenized_data\nTrying: /datasets/tokenized_data\nTrying: /kaggle/working/energy_aware_quantization/datasets/tokenized_data\n✓ Found dataset at: /kaggle/working/energy_aware_quantization/datasets/tokenized_data\n\nExperiment Configuration:\n{\n \"model_name\": \"distilbert-base-uncased-finetuned-sst-2-english\",\n \"dataset_path\": \"/kaggle/working/energy_aware_quantization/datasets/tokenized_data\",\n \"precisions\": [\n \"fp32\",\n \"fp16\",\n \"int8\"\n ],\n \"num_trials\": 5,\n \"num_iters\": 300,\n \"warmup_iters\": 50,\n \"power_interval_ms\": 100,\n \"device\": \"cuda\",\n \"output_dir\": \"/kaggle/results\"\n}\n","output_type":"stream"}],"execution_count":12},{"cell_type":"code","source":"# Run experiments for all precisions\nall_trial_results = {}\nall_aggregated = []\n\nfor precision in CONFIG[\"precisions\"]:\n trial_results, aggregated = run_multi_trial_experiment(\n model_name=CONFIG[\"model_name\"],\n precision=precision,\n dataset_path=CONFIG[\"dataset_path\"],\n num_trials=CONFIG[\"num_trials\"],\n num_iters=CONFIG[\"num_iters\"],\n warmup_iters=CONFIG[\"warmup_iters\"],\n power_interval_ms=CONFIG[\"power_interval_ms\"],\n device=CONFIG[\"device\"]\n )\n \n # Save results\n save_results(trial_results, aggregated, precision, CONFIG[\"output_dir\"])\n \n # Store for comparison\n all_trial_results[precision] = trial_results\n all_aggregated.append(aggregated)\n\n# Save comparison table\ncomparison_df = save_comparison_table(all_aggregated, CONFIG[\"output_dir\"])\n\nprint(\"\\n\" + \"#\"*70)\nprint(\"# ALL EXPERIMENTS COMPLETE\")\nprint(\"#\"*70)","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:43:39.974007Z","iopub.execute_input":"2025-12-03T10:43:39.974257Z","iopub.status.idle":"2025-12-03T10:51:58.992778Z","shell.execute_reply.started":"2025-12-03T10:43:39.974242Z","shell.execute_reply":"2025-12-03T10:51:58.992075Z"}},"outputs":[{"name":"stdout","text":"\n######################################################################\n# MULTI-TRIAL EXPERIMENT: FP32\n# Number of trials: 5\n# Iterations per trial: 300\n######################################################################\n\nLoading dataset from: /kaggle/working/energy_aware_quantization/datasets/tokenized_data\n✓ Loaded 50 samples\n - Sequence length: 128\n - Device: cuda:0\n - Dataset: sst2\n - Labels: 2\n - Memory: 0.10 MB\n\n======================================================================\nTRIAL 1: FP32\n======================================================================\n\nLoading model: distilbert-base-uncased-finetuned-sst-2-english\n Precision: FP32\n Device: cuda\n","output_type":"stream"},{"output_type":"display_data","data":{"text/plain":"config.json: 0%| | 0.00/629 [00:00","image/png":"iVBORw0KGgoAAAANSUhEUgAABv0AAAPZCAYAAAAyVeYCAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdd1gUV9sG8HuBBekooFJUVAQrxV5QbLGb2I0xGqOxJKImlliSV7GXWIMFu9Egxq4YS2LDXgOiWLFERZSi9L7s9wffTnbYBQEXFuT+XVeusLMzs8/Mnp05nmfOORK5XC4HEREREREREREREREREZVaOtoOgIiIiIiIiIiIiIiIiIg+DJN+RERERERERERERERERKUck35EREREREREREREREREpRyTfkRERERERERERERERESlHJN+RERERERERERERERERKUck35EREREREREREREREREpRyTfkRERERERERERERERESlHJN+RERERERERERERERERKUck35EREREREREREREREREpZyetgMgIiJScHZ2zve6a9asQceOHYswGrp37x569eqlsjwgIABOTk7FH9BHIjExEfv378fFixdx//59xMbGAgDMzc3h5OSEVq1aoW/fvjAzM9NuoFp09epVDB06VHjdu3dvLFq0SIsR5Z/ydczOzg6nT5/WYjSatX//fkyfPl3texKJBMbGxrC3t0eTJk0waNAg1KxZs5gjFAsNDcX69esRFBSEt2/fIjMzEwBQu3ZtHDp0SKuxUdFLTEzEoUOHcOnSJdy7dw/v3r1Deno6TE1N4eDggEaNGqFLly5o0KCBtkP9qHzM18C8PHv2DD169EBGRgZ0dHQQEBAAR0dH4f0hQ4bg2rVrareVSqUwMzNDrVq10KFDB/Tr1w9GRkbFFXqhJSUlYdeuXTh9+jTCwsKQmJiIcuXKwdzcHBUqVECtWrXg5OSEzp07w9bWVtvhFgttlf/c7s/79+9HvXr11G7Ts2dPPHz4ULSsadOm2LFjR5HEqCnTpk3DgQMHhNfbt29Hs2bNij0Of39/eHt7AwCqV6+OgIAASKXSYo+DiIhIHfb0IyIiIrX27dundrnyP7SpYI4cOYJ27dph/vz5OHv2LF6/fo3U1FSkpqbizZs3OH/+PBYtWgRPT0/s2bNH2+FqnI+PD5ydnYX/9u/fr+2Q8uXq1auiuKdNm6btkEocuVyOxMRE3L9/Hzt27MBnn32G33//XWvxhIWFYdCgQThx4gQiIyOFhB+VDXv27EHbtm0xZ84cnDx5EuHh4UhOTkZmZibevXuHoKAgbNq0Cf369cP9+/e1HS59BBYuXIiMjAwAQJcuXUQJv/fJyMhATEwMrly5gvnz56Nnz554/PhxUYWqEc+ePUPPnj2xZMkS3LhxA7GxscjMzERiYiLCw8Nx+/Zt7N+/H4sWLcL169e1HW6ZtX37drXLL1++rJLwKwrt27cX1Z8+Jn379oWNjQ0A4OnTpyU+WUpERGULe/oREVGJ1aZNGxgaGqp9r2LFisUcTdmSkZGBI0eOqH0vICAAkyZNgp4eqxEFsW3bNixcuFC0zMzMDC4uLgCAkJAQxMfHAwCSk5Px888/IyYmBmPGjCn2WLWtQoUK6Ny5s/C6NPXEUY67QoUKWoyk6JUvXx5NmzYFAMTFxSEkJATJyckAsq8hc+fORc2aNdGiRYtij+3gwYNIS0sTXltbW8PFxQV6enqws7Mr9nio+CxevBhbtmwRLdPT00O9evVgaWmJxMREPHjwAHFxcQCyE9akOWXpGqhw6dIlnD17Vnidn/t2/fr1YWdnB7lcjhcvXuDevXvCey9fvsS3336LI0eOQF9fvyhC/iByuRwTJ05EeHi4sKx8+fKoXbs2jIyMEBsbi8ePHwsjGZD2HD16FD/++CMsLS1Fy3/77TctRfThGjRoINQ1AO1dZ/T19fH1119jwYIFALJHoenTpw8sLCy0Eg8REZEyttYREVGJNWvWLNjb22s7jDLp7NmzePfunfBaKpUKT7BHRUXh/PnzaNeunbbCK3Vu3bqFxYsXi5b17dsXM2fORLly5QAAKSkpmDNnjqj328qVK9GkSRM0atSoWOPVtlq1auHXX3/VdhiFUlrjLoyc39OrV68wYMAAREVFCcu2bNmilaRfTEyM6PXChQvRunXrYo+DitehQ4dUEn6ffPIJZs6cKXpYSC6X48KFC1izZk1xh/jRK0vXQIXNmzcLf9erVy9fPZoGDx6MPn36CK//+OMPzJw5U3j977//4uTJk+jWrZtmg9WAe/fuITQ0VHjdoUMH/PrrryoPg927dw/Hjh1D+fLliztE+n/p6enYtWsXxo4dKyx7/vw5AgMDtRjVhxk8eDAGDx6s7TAAZA+R+ssvvyAjIwOJiYnYvXs3Ro0ape2wiIiImPQjIqKPQ845NE6ePIl9+/Zhz549ePToEQCgTp06GD16NDw9PdXuIzU1FQcOHMDJkydx//59xMXFwcDAANWqVUP79u0xePBgtQ0XOT/7+PHj+O2333D48GG8ePECKSkpePDggbDO48eP4ePjgytXriApKQl2dnbo0aMHvvnmG4wcOVI058upU6dgb28PX19frFixQlg+d+5cDBgwQBRHRkYGPDw8hCerra2tcfbs2UL1yMs57KKXl5fo8w8cOKCS9MvMzESzZs2QmJgIIPt8Hzx4UHj/zz//xMSJE4XXR48eFeb8SktLQ+PGjZGeng4AcHd3x65duwBkP/G+d+9e3L9/H0+fPkV8fDwSEhIglUphaWmJevXqoVevXmqTkD4+Pli9erXweuHChahTpw7WrVuHGzdu4N27d/juu+8wbtw4YZ3nz5/D398fly9fxsuXL5Gamgpzc3PUr18fvXv3RufOnSGRSAp0Pn18fJCVlSW8dnFxwbx586Cj899I64aGhpg/fz4ePXqE27dvA8humF6zZo2oETvnvECKMqKQc14XLy8v0fHduHEDf/31F+7du4eIiAjExcUhKSkJhoaGsLW1RcOGDTFo0CDUrl1b5TjUffaLFy+wdetWBAcHIyUlBVWrVkX//v3x1VdfCecp5/egMH36dFGsCxcuRJ8+ffKc0y/ne3lR7A8A7t+/j4CAANy/fx8vXrxAXFwcEhMToa+vj0qVKsHV1RX9+/dH48aNhe1z+6wDBw6IhrlVji8/8/lERETA398fFy9exPPnz5GcnAwTExNUr14dnp6eGDhwoNon1z/0Oqfu96Dc6PyhbG1tMWDAAFEiJSQkRGW9+Ph47N69G2fPnsWjR4+QmJgIY2Nj1KxZE507d8aAAQNU5rN6+fIlOnToILxu2rQpfH19sWHDBhw/fhyvXr2CtbU1mjZtqnYI4m+++Ub4O+dv4vHjx/D398e1a9cQHh6OtLQ0mJqaiubXMjY2LlQ8p0+fVrvumjVrsHbtWpw4cQLR0dGwtbVFnz59MGLECOjp6eHp06dYvXo1Ll26hMTERFSrVg39+/fH0KFDVa4/Z86cwblz53D//n28efMGcXFxSE1NhZGREapUqYKmTZti8ODBqFKlisp5ad++vai3zoMHD3DixAn8/vvvuHv3LjIyMuDo6IihQ4eqneMVyL5OnT59GkeOHMHt27cRExMDmUyGChUqwNHREe3bt8cXX3yhst2NGzewZ88eBAUFISoqCpmZmbC2tkaTJk0wePBgoRd0fqWnp2P58uWiZS1btsSvv/4qutYC2XNQtm7dGh4eHsIDLco0VSY0+T2r+66OHDkCPz8/PHjwAHK5HHXq1MGwYcPQqVMnlWM6dOgQrl+/jgcPHiAqKgpxcXFIT0+HiYkJHBwc0LJlSwwePBhWVlYq2xaknvO+a+D58+exd+9e3LlzB9HR0ZDJZMIccLVr10b9+vXRt29fmJiYiLZLSkrC/v37cerUKTx8+BDx8fEwMDCAra0tmjVrlus8ooW9b+XX06dPcfHiReH1Z599VqDtFQYOHAhfX1+8evVKWHb79m2VpN/9+/fh7++PGzduICIiAunp6ahQoQLc3NwwcOBAtGrVSmXfhakL5eXZs2ei102bNlVb16xTpw7q1Kmjdh87duzAnTt38PDhQ8TExCA+Ph6ZmZkwMzNDjRo14OnpiUGDBqmUg5L+O8uPyMhI7Nq1CxcuXMCzZ8+QlJQEExMT1KlTB927d0evXr0+eG64SpUq4c2bNwCAXbt2YdSoUcI+d+zYIdRJldfTZMw5z6NCzoS44rqhbp4+uVyOTZs24fbt24iLi8OCBQvQp0+ffM3pV9D7UnJyMvz9/XH69Gk8efJE+HeGhYUFbGxsUL9+fbRo0ULl3xoVKlSAh4cHzpw5AyB7nr8RI0ZAV1f3veeUiIioKDHpR0REH520tDSMHDkSFy5cEC2/efMmRo8eDR8fH3zyySei9x4/fozvvvtOpSEjIyMDoaGhCA0Nxa5du+Dj4wN3d/dcPzs9PR2jRo3C5cuX1b5/9epVjB49GikpKcKyp0+fwsfHB2fOnBElhpQNGjQI69evF4az8fPzU0n6nTt3TjSUUr9+/QqV8IuJicG5c+eE1/b29hg5ciR+//13oQfP6dOnERsbKxrCRk9PD02aNBH+4fvgwQPEx8fDzMwMAESNborXiga64OBgIeEHZDfUKty5cwfr1q1TiTMjIwPJycl48eIFjh8/jr59+wpD7OTm/PnzmDlzptpGXiD7vCrPy6MQHR2Ns2fP4uzZs2jTpg1+/fXXXIeezSkxMVGlPAwdOlSlERoAdHR0MHToUEyZMkVYduXKFSQmJqo0fBXW0aNH4efnpzbOhw8f4uHDh9izZw/mzJmDfv365bkvHx8fUWIXyJ5LbeHChQgPD8dPP/2kkZg14eLFi9i0aZPK8szMTDx9+hRPnz7FwYMHMW7cOHh5eRVZHAEBAZg5c6ZoaCoAiI2NRVBQEIKCgvDbb79hxYoVefaQK8x1rjjkHEIsKSlJ9PrGjRv4/vvvRb0BgezhQf/55x/8888/8Pf3h6+vL6pXr57r58THx+Pzzz//4DmJtmzZgmXLlqnM+ff27VtcvXoVV69exW+//Ya1a9eqTYQXJp63b9+if//+ovvNs2fPsHz5cty9exdDhgzByJEjRWXk0aNHWLBgASIiIlTmlNy1a5doaEHlmBT3L39/f/j4+KBNmzZ5xjZ16lSV33RoaCimTp2K2NhYDBs2TOVYJkyYoHJ9B7KT2xEREXjy5ImocTUzMxP/+9//1M7pGR4ejvDwcBw6dAjffvstJkyYkGe8ym7evInXr1+Llk2ZMkXttVZBIpGoDJ+oqTKh6e85p59//lll7tebN2/i5s2bGDt2LMaPHy96b+PGjcLDAcpiY2MRHByM4OBg7Ny5E9u2bcs1UQO8v56Tl82bN2PJkiUqy6OjoxEdHY2HDx/i8OHDaNGiBZycnIT379+/j++++04liZCRkSHct/z9/TFp0iQMHz48zxg0fd/666+/REPEqku65ZelpaUo6Zfz+rlixQqsX79eZUjaN2/e4MSJEzhx4gT69OmDefPm5Zl0eF9d6H1yJqTWr18PPT09tG7dGtWqVcvXPpYvX65yHwSy66AxMTG4fv06/P394efnJ8ybpk5J+529z99//41p06YJD8gpxMbG4vLly7h8+TJ2796NdevWqU3A51eLFi0QHByMZ8+eITIyEsePH0fPnj2RmJgomrP7iy++ED3Up82Yle3duxeHDx8u1LYFvS+lp6fjyy+/FPVeBf77d8arV69w8+ZN3LhxQ+0Dhi1bthT+7fPq1Svcvn0bbm5uhYqdiIhIU5j0IyKiEmv27NlqEysVKlSAt7d3rttFR0fjwoULsLa2hpOTE+7evSsMVSmXy7F06VJRY3hcXByGDx8uaiysVq0aqlevjujoaNy5cwdA9rCWY8aMweHDh1GpUiW1nx0VFYWoqCgYGRmhbt26MDAwEHptxcfHY+LEiaKEn4mJCVxcXBARESF8jjrm5ubo168ftm/fDiC7AezGjRuiXknK/zjW0dFRSQrm1+HDh0WNnd27d4euri66desmzAGimPPvyy+/FG3bqlUr4R++WVlZuH79uvBEtrqk36BBg9S+p5z0U7C1tUWlSpVgZmYGHR0dREdH4/79+0Kj1b59+9C+fXt07Ngx12M7evQogOzv18HBAW/evBGe8j527BjmzJkjrKurqwsXFxeYm5vj3r17wpPQ586dw4wZM97bSKJw584dlcbjvIbrbNiwoei1TCbD3bt3hbnTNEFHRwcODg6oUKECzMzMkJmZifDwcDx+/Fj4zDlz5qBNmzZ5zp958OBBGBkZCWX433//Fd77/fffMXz4cNjY2Ag9uMLCwoTPAP6b00ghP3Ot5ZzvTyEmJgY3btwQLVM3r0q1atVgaWkJc3NzZGVlITIyEg8ePBAS7j4+Pmjfvj3q1q0rfNbbt29x/fp1UZz169cXXud3zsGrV69i6tSpkMlkwjJ7e3s4ODjg4cOHiIyMBACh18W+fftQo0YNtfsq6HWuuNy9e1f0Wrn8PH/+HKNHjxY1HDo5OcHOzg4vX74UEhLPnj3DyJEjERAQkGty/f79+wCy58WsW7cu5HI5YmJihLl+7ty5I0oQNGnSROg9qXjY4NChQyrD7tasWROVK1dGaGio8BBFeHg4vvnmGwQEBOQ6TF1u8agTFhYGILvHg7m5Oa5fvy404h8/fhyBgYFITU2Fq6srZDKZ6N6wfft2DBs2DJUrVxbtUyqVonr16rCwsICpqSnS0tLw7NkzvHz5EkB2T/YZM2bg1KlTMDAwUBsXkP2btrCwQL169fD48WPRfXH16tUYOHCg8J3IZDKMHDlS5d7l4OCAatWqISkpSe19bf78+aKEn7GxMVxdXaGjo4N//vkHycnJkMvlWLt2LSpWrCjcJ97n5s2botfW1taoW7duvrZV0GSZKIrvWdmePXtgZWWF2rVr4+nTp6LyvmbNGjRq1EglAWVgYIAaNWrA3NwcxsbGSE1NxaNHj4RrT2xsLKZPn66SFFOWVz0nLxkZGaLeZlKpFK6urjAzM8Pbt2/x+vVrlaQtkN2AP2LECERHRwvLFGX0zZs3wnnOzMzE4sWLYWVlhU8//TTXOPJ738qvS5cuCX+bmZmp7W2YH0lJSSoPnllbWwt/b9q0Cb6+vsJrAwMDuLq6CudfUTb3798PS0tLTJ48OdfPyqsulB+urq7Q09MT6jZv377F3LlzAfx3DWzcuDE6deqU51CnxsbGcHBwgLm5OQwNDZGUlIT79++Lfmdz587F2rVrc91HSfyd5eaff/7BDz/8INRbJRIJ6tWrB2trazx+/BjPnz8HkN1D3svLC/7+/gXueaogkUgwZMgQ4XvZsWMHevbsiX379gnJ5Pr16+f5IOOHxNymTRu8ffsW586dE/27R139TR3Fv2lq1aoFOzs70e80L4W5L/3111+ihJ+VlZVw73jz5g1evnypkoBXlvMcXrp0iUk/IiLSOib9iIioxFLubaYsP4mB1q1bY/Xq1ShXrhyio6Px6aefCg2wz549w6tXr2BrawsA2Lp1q6ihadKkSaL5GI4cOYJJkyYByG4Q27BhA/73v//l+tl16tTB+vXrhcSgogfbvn37RI1W9vb22Llzp7DeokWLsHXr1lz3O2zYMOzcuVNoZPHz8xOSfomJiUKyDQA8PT2F4yuonEPj9ejRQ/i/IukHZDcs5Uz65UzWKZJ+0dHRePLkCYDsZJpMJhMlUZSTfkZGRnB1dRVeN2nSBIGBgWobZR49eiTEB2QPIZpX0g8AZs6cKZoLJD09HVlZWaLeB+bm5vD39xca7zIzMzF27FihN83Ro0cxYsQIUeInN2/fvlVZlteT0OqSbLklDwrjq6++wg8//ABTU1OV9/z8/ITEZ1paGk6dOpVng7udnR127NgBOzs7ZGZm4ptvvhF6f2RlZeHq1avo1asXunXrhm7duqkMMZZzTqP8UDffX2xsLIYMGSJaNmbMGLRv31543a1bN/Tu3VvtsJlnz57F6NGjhddHjx5F3bp1hc/KOcxn06ZNheE8C2L58uWihN+gQYMwc+ZM6OjoIC0tDePHjxfKWHJyMlavXq0yXKGyglznilpcXByOHz+ukixQnkfPx8dHlPBbvnw5unfvLrxev369cLwvXryAv79/nr12WrVqhZUrVwq9idPT06Gvr4/BgwerDP81btw40fBfWVlZWLp0qWh/EydOFMqB4mEQ5Yc+tmzZItwL8htPbr777juhF9uSJUtEc4KlpKRgwYIF6Nu3r7DuqVOnAGQ3aF65ckU01OaPP/4IW1tbtQnSxYsXC8MDR0VF4erVq3n29qtXrx62bNkCCwsLJCUlYeDAgUIyNiEhAXfu3EGTJk0AZCdPlBtPy5Urh5UrV4p6QiQlJeHvv/8WXj99+lQYuhnIHup469atQk/mmJgY9O3bFxEREQCy5zXt27evSm88dXJeJ/NTV1BWFGVCk99zTk2bNsX69ethZGQEmUyGqVOnIiAgQHh//fr1omTEsmXLUL16dZVzmZWVhYkTJ+LYsWMAsudhe/z4cZ7Jq9zqOXl5+/atqLfVvHnzVI4vPDwcFy9eFCVSt27dKqo7ubq6YtOmTcLvbO3atVi1apXw/tKlS9GjR49ce3jm976VX8q/gZo1axY4SSOXy/HixQssWbIECQkJovcUv9WEhATRsMlVqlSBn5+fcP6Tk5NFPZW2bduGoUOH5vnQjrq6UH5VrFgRo0ePVjsnZnx8PK5cuYIrV65g9erVaNeuHRYsWKBy7925cyecnJxUeiSmp6dj6NChCAoKAgAEBgYiKSlJZThdZSXpd5aXpUuXCskzPT09/Pbbb0I9Xi6XY9asWfjjjz8AAEFBQfjrr7/ynSRTp3fv3li5ciUSEhJw69YtBAcH4/fffxfez89Q6YWNWfFwZs5hPvM756eenh5WrVolqtPnp4wW5r6keDgGyE5Enzx5UnQ/lclkuHXrlkpSXiFnYjs/D0EQEREVNSb9iIjoozR9+nSUK1cOQHaCxcXFRZQUe/PmjdAYfvLkSdG2wcHBouF6lBvpgez5k/JK+v3888+inoCKBracw/ANHz5ctN748ePxxx9/qB3uCMhuqOrSpQuOHDkCIHu4ncjISFSsWBEnTpxAWlqasO7nn38u/P3o0SP4+Pio3WezZs1EjT6hoaGi+QednJyEIbZcXFxQtWpV4ane0NBQPHz4UDQEV82aNUXzgyiSecoJvp49e+LgwYOIiorC48ePYW9vj+DgYOH9pk2bioaOsrS0RFBQEHx8fBASEoJXr14hOTlZ7VCoisRiblq0aCE6XiD7+7l9+7ZoSK1y5cqJGhEBCD0hFM6cOZOvpF9B5Ry2C0Cuw74WRpUqVXD8+HEcO3YM9+7dQ3R0NFJTU9V+7vvO58iRI4WGdT09PXh6eoqGfMvPPDEfKjExESNGjBANqzh06FD88MMPovVsbGxw7tw5HDp0CKGhoXjz5g1SU1MLVY4KIyYmBrdu3RJeS6VSTJo0SWiYNjAwwJQpU0TDNAYGBiIrKyvXxuuCXOeA7MRXfudsyo9r167l2YujfPny+PbbbwFkl2Hlub2kUqkwHJ1Czifpz5w5k2vST1dXF3PmzBEa/gHkKzGkcOfOHdFvulKlSqJ5/8zNzTF+/HjRAyBnzpzJNcFTkHiMjIxESeaGDRuKGqmrVq0qNFAD2dctRSM1oPq7qlKlCg4dOoS///4bjx49wtu3b5Gamqr2s588eZJn0u/7778XesgaGxujefPmoiEhlT9budEUyL4e5Bz6zNjYWNSgfvr0adFvLiMjAzNmzBBto3wtUgx7m3O+pvxQd03Li6bLhKa/55wmTJggzH2pq6uLKVOmiJIR//zzD9LS0oSenYoHjc6ePYvHjx8jLi5OVG9Q9vTp0zyTfrnVc/JSvnx5GBkZiYYpT0lJQbVq1VCtWjXY2trCzs5OZZSCnHMCenl5iX5no0aNgr+/v/DdvXnzBqGhobn2wNbkfSs1NVX0IIO63uW5yTmnbU7du3cXHoC6ePGiqG6oq6uL+fPni9ZXvn5mZGTgwoULuT5Uk1td6O3bt7mOpOHo6CiqG48fPx52dnZYs2aN2rnbFM6cOYPvvvtOpddapUqV4Ovri4sXL+LZs2eIj49XO9xoZmYmnj9/nuuQsyXtd5abt2/f4p9//hHFvX37dmEEDwAqw16fOXPmg5J+xsbG6Nu3L7Zt2wYge7hjRR3e2toaXbt2FZKrJSVmhV69eqk8xJef60xh7kvK9aSkpCQsWrQIjRs3Fq5N5ubmaNiwocpIHMpxKV/bNPmgHhERUWEx6UdERCXWqVOnYG9vX+DtjIyMVBqrcvZqUn5aVPkJT8Xn5iUiIgIymUztfClSqTTXfxQqJ5UAqMwFZGRkhKpVqwrDxKkzYsQIIemXkZGB3bt3w8vLSzS0p52dnahR9+3bt6KG9ZyfqSznHEvKvXAUr5Xn1ztw4ACmTp0qWqdly5ZCD5v79+8jISFBSP4ZGRnh66+/FnoDXbt2DdHR0aLvI+c8Zlu3bs13r6qcT8nnlNsQmTnLgGJ+nLzk3CY36nqWxcTE5Dp8WM4GFEB1nrTCksvlGDdunEqiOzc552/JKWejas55BwvSc6AwUlJSMHr0aNFT3f3791dJIgDZvUp27NiRr/2+rxwVRnh4uCgJYWtrq3JdqlmzJqRSqdDwmZiYiNjYWLVlqKDXueJWp04dLFmyROihGxsbKypPGRkZH/Qbs7OzK9T9QSFnI3XNmjVVruk5r9Gaiqdq1apCshaASg8WR0dH0euc7yt/r6mpqRg6dKgooZwXTf6mX7x4IXpP0QMwLznP4b1793Dv3r33bpOfpF/O62TOe+77aLpMaPJ7VidnLIrhr+Pj4wFk/8YiIyNRpUoVxMTE4Isvvsi1p0pOeV0D86rn5EVfXx/ffvstli1bBiB7OMCQkBDhfRMTEzRp0gQDBgwQ9dLO+b3kfNBAT08Pjo6OooTty5cvc036afK+FRcXl+e+CkNHRwcDBw4U3cdylrNnz56997vMq2zmVhdKTk7O9bqsGD5aWd++fdGnTx/cunUL165dQ3BwMG7evCmaXxqAMF+totw8fvwYQ4YMyXdyJK/yWJJ+Z3l5+fKlqA4QHx+vsXpmXr788kts374dWVlZQsIPyH448H1JNG3FDOReRt+nMPelzp07Y8uWLcK9aNeuXaIe6fb29vD09MTw4cNzvc+bmJgISb+c1wUiIiJtYNKPiIg+Ourm11GXoCusrKwspKamqh1qyMrKKtdeOTmpW+99w0LVrVsXLVq0EJ5K/+OPP9C7d2/R8Jj9+/fPdwzK0tPThYSiwrZt27Bz507hdc5eAQEBAZg0aRL09P6rUrRo0UJI+slkMty8eVPo6dewYUPUrl0blpaWiImJEZJ+ypSHSYqMjFQZbs3GxgZOTk5CI8/7GiCU5TXUVUEpz1GSl7p16wpDmioEBQXlmvRTfqoayG7QzGteqpw9UXOeT2UnTpxQSfg5OTnB3t4eUqlUZe669/WUydmrQZO/s/dJT0+Hl5eXaB6/Hj16YM6cOSq/o9u3b6sk/BwcHISh7lJSUnIdTrioFHaeHoWivs7lNwZFw5xEIoGxsTHs7e3RtGlT0XyjhZVbr2fgw3/LOcv2h34fBYlHuZcSoHovMDc3z/e+/Pz8RAk/xXxLlStXhq6uLsLDw0VJ8ff9pnOWq+IuU+rk91qbc67UqKgohIaGol69evnaXtNlQpPf84das2aNKEmkp6cHFxcXWFpaQkdHR2W+1bzKSUHqOTmNGjUKDRo0wL59+/DPP//g1atXwmcphik/c+YMfvrpJ2HowYL22HwfTd63cn7H70uqK1Oe01YqlcLMzAy1atVC+/bt85xjLr/y+t1osi4kkUjg5uYmzGGWlZWFs2fPYtKkSaJr+OPHj4Wk35IlS0QJv3LlysHFxQUWFhaQSCQqc7LmVQZK0u9M0/K6B+ZXlSpV0LZtW5We9sojgmiSJmIGNFtG38fAwAC7du3Cnj17cPLkSYSGhooSzS9fvoSfnx+OHDmCAwcOqB06Wvm3n7NMEhERaQOTfkREVObZ29sLw5dJJBKcO3eu0P/YzKshzNbWVjRsYFhYmGjy9+Tk5HxNVD9ixAgh6RcZGYlJkyYJw6VJpVL069dPtH6zZs1EQ3bm5syZMypPZ6t7sltZVFQUzp8/Lxo6J+e8fsePH0dYWBiA/57cbdKkCY4fP47r16+LklTW1taoVauW8PrWrVvCHIYA0LZtW/j6+gqNsZGRkQVK+uX2/eR8crd169bYtGlTvvebFzMzMzRr1gyXLl0Slvn7+6Nbt24q68rlcpXkVOvWrUUNCMpDnwLZPaiqVasmvL5582ausSgnyABg8uTJGDlypPD6yJEjoqSfpn1oI7pCZmYmfvjhB9GQuZ988gkWL16s9jvOeU4GDRokGsIsKCgoz6SfJuLO2Uj06tUrJCYminqGPHnyRDS8mbGxcYGGiytu6uZXzI2FhQWMjY2FIehMTExw+fLlAg3JqaywSQeFnL/5sLAwlR7cOXtd59WT70PjKaycv+nly5eLri3r168XJf00qUqVKsK1Hcgexvl9PfJynsOcc+h+iEaNGqFy5cqiOXqXLl2KzZs35/r9yOVyZGRkQF9fX+Nloqg9ePBAlOh88+aN0PsIyL5XWFtbA1AtJ/7+/nBxcRFez5w5U5T0y8uHlvUWLVoIPfpTU1MRERGBoKAgzJ07V0gYKOakA7LPsXI5e/jwoWho0czMTNH7im2Kg6Ghoei6lrMOlZeCzGmb83g+//xzzJ49O9+flVNedaH81BcTEhKgp6endh5RHR0dtG/fHq1atRINtahcd1Euj/r6+jh27JhoiMURI0bkOWRocSrI7ywvdnZ2kEgkQgKzRo0awjyaRW3o0KGipF/37t3znFtaQZsxF/Y6U5j7EpCdeB4yZIgwP3RsbCyeP3+OvXv3CnMWxsXFYf/+/SrDpKelpYmSnfk5t0REREVNO/86JSIiKkGUh5GSy+WYM2eO2qe179+/j5UrV8Lf379Qn+Ph4SF6vWXLFrx9+1Z4/euvv+brCdnWrVuL5tFTno+jQ4cO+Wp8UEfRO+9Dt7O2thbFFxAQIDQYKIbZUST/oqKiRI0/OYf2zDm/i4GBgZCASU9Px+LFiwsVc0716tUTNSJevHhRGIJUWVpaGgIDAzFhwgRRw/L7jB07VpQ4unbtGubNm6cyTN///vc/0XBnenp6Ko0LORPSu3btEs7v3r17RXPC5aScQAUgarCLiooSDd1aFHLOeVOYOf+ysrIwbdo0UY/F1q1bY/ny5aIep8pyliPl405ISMDy5cvz/EzlocOAwsVtaWkpamRPT0/H8uXLhYR9enq6Sq9WT09PjSaTfHx84OzsLPyXczjfoqSjoyN6OCAxMRGLFi1SGVpNLpfj1q1bmD9/vsrcPJpUr1490bXyzZs32LJli/A6Pj4eq1evFm3Ttm3bIounsPL6TT99+lQ095KmdejQQfR648aNojklgezrmvLw023bthVdC7du3YrQ0FCVfb99+xb79+/Pdb48dfT19VXm8rx06RImTJigMmyyXC7H+fPnMWjQICHZVdrKxK+//ir05pLJZCrXMXd3d+HalbOcKF/TgoKCRN9RUfL19UVISIhwzypXrhyqV6+OHj16iIZnVf6+cp7jNWvWiHrhbN68WTS0Z8WKFfPdu1MTlD8rLCxM4z0Tgey6kfJv++DBgyrzRAPZ19Vjx46J5qIsCg8ePEDbtm2xfPly0Xy6Cq9evRLN1QyIh9lULo86Ojqi8vj333+LHpLStoL8zvJiaWkp9IYEsh/y2bBhg8poDZmZmbhy5QpmzJiR72Gb36dFixZwd3eHhYUFLCwshIR6ccSsifpTQRTmvnTv3j3s2rVLFJuFhQVcXFxU5idUNwR/zkR5bkMLExERFSf29CMiohJr9uzZap8iBoAuXbqo7SlVGF9//TX2798v/EPu77//xsWLF1GvXj2YmpoiISEBYWFhQq83Ly+vQn1O3759sWnTJqFn25MnT9ClSxfUr18fERERol6A7zNixAiVufQAFHq4HkWPPQWpVIqLFy+qHRbp7du38PDwEP7Rf/r0acTGxop6JLVo0UJoCFI07hgaGgr/EFaeq0OR9ADEQ3sCgIuLC3R0dIR1Tpw4gZ49e8LGxgZ3797NcyjLgtDR0cGUKVMwefJkIaapU6fi119/RY0aNaCjo4PIyEg8fvxYSFJMmTIl3/tv3LgxJk6cKMxlBAA7duxAQEAA6tevDyB7fiPlp8cBYMaMGSqNl8pzJgLZ8zAqnuB+Xy8DNzc3UdJ6/vz5OHbsGPT19REcHJzvYfQKq0aNGqLXa9euxfXr14Xebr/88otKYjAnPz8/BAQEiJbp6OgI352ywYMHo1mzZqJGKyA74X79+nVYWFjg9u3b751/pVq1aqJyeOnSJQwcOFBIFI8aNUr4HvMyceJEDB8+XNiPn58fzp07BwcHBzx8+FDU4GRoaFjoa01J5eXlhdOnTwsPN/j5+eHPP/+Es7MzjI2N8e7dO4SFhQkN+nXq1CmyWHR1dTFx4kRMnz5dWLZ06VIcPHgQlStXRmhoqKins6WlJYYPH15k8RSWq6urqJfquHHj0LhxY8hkMgQFBakkezSpd+/e2LlzJ+7evQsguyF1zJgxcHBwgIODA5KTk3Hnzh2UL18en376KYDsefL69++P3bt3A8i+n/Tp0we1a9eGjY0NMjIy8PLlSzx//hxZWVlqh1HLS69evfDgwQNRsu6vv/7C6dOnUb9+fVhaWiIhIQEPHz5UuV6WtjJx5coVfPLJJ3B2dsazZ89U5tJS7kHp6uoq6sk3cOBANGrUCImJibh161aRJKrU2bRpE1asWAELCwvUqFEDFhYWkMlkuHv3rqgxXXm+0uHDh2P//v3CQ1JBQUH45JNPUK9ePbx580YYqUFh0qRJxdrztnnz5sIQ6wkJCXj8+LHKPHIfytzcHGPGjMGKFSsAZP/WRowYgRo1aqBKlSrIysrC69ev8fTp0yL9zSuLjY3F+vXrsX79epQvXx61atWCiYkJ4uLiEBISInrYpm7duqK6jKurK65evSocS9euXeHq6oro6GiEhoZqbFQATSjI7+x9Jk2ahGHDhgnf0bJly7B9+3Y4OTlBX18f0dHRCAsLE+pin332mcaOQ3mOuoL40Jhr1Kihcu2pU6cOpFIp3NzcNH4NLcx9KTw8HLNmzYK3tzeqVq0Ke3t7GBoaIi4uTiWJmXMuZUD88CWgOuIJERGRNjDpR0REJVZew+0pDwH5ocqXL48tW7Zg3Lhxwpw3ycnJuQ5zWNj5X8zMzLB8+XKMHj1a+MdxXFwcLl68CCB7vru0tDRRr4ecQzkqdO/eHStWrBD1NnNwcFDpKZdfhw8fFjUUtWrVKtd5UCpUqIDmzZsLcWdkZODIkSP48ssvhXVatmyJ3377TbSdu7u7cDyOjo6oUKGCqKcjoNrTz97eHl999RW2bt0qLHv48KGQUJw6darGevv17NkTsbGxWLx4sdBYFR4enusQUwUtB6NGjYKVlRXmz58v9CSNjY1V+7S+mZkZZs6ciZ49e6q817VrV2zfvh23b98Wlikar42NjdG5c+dce3B1794dO3fuFBoxsrKyhJ6W5cqVw/jx47Fq1aoCHVdBeHh4wNbWFq9evQKQ3btNUY4AYOHChe9N+qlLbAYGBqpdV9FDpEmTJujUqRP++usv4T3F+dPV1cWkSZNUetkpMzc3R6dOnXD8+HFhmXIvht69e+cZs0KLFi2wcOFCzJo1C6mpqQCAFy9e4MWLF6L1LCwssGzZMrWNS6VZ9erVsX79ekycOFFo4I+NjRUaf3Mq6vnk+vTpg+joaKxcuVJ4iCEsLExlqEBbW1usXr0aFSpUKNJ4CmPIkCE4dOgQnj9/DiD7eqwY/tnCwgIDBgzAhg0biuSz9fT0sHHjRowfP140hO6zZ89E88flnCdw5syZSE9PF/Wmvn//vsrQmUDhysDUqVNRvXp1LF68WLjWZmZmqvQ8UlBOMJSmMvHNN99g06ZNanuejBkzBq1btxZef/fdd8IDOkB2HUfxoE/VqlXRqlWrQo9iUBixsbEq89cqlCtXTvRQk6WlJTZt2gQvLy/h3vHu3TuVe6euri6+//579OrVq8jiVqdLly6iYY4vXLig8aQfkP2dJiYmYvPmzcKDI0+ePFH7wFhRXztzJuXevXsnmltama2tLZYvXy7aZvLkyfjyyy+FeaJjY2OF+7iLiwtsbW1F91ttKsjv7H2aNGmCpUuX4ueffxauTVFRUWr3DZSMOVU/NOb+/fuLeu1HREQgIiKiyOIt7H0JyO4B/u+//+Y61UK9evXQv39/leXKPVNtbW3Z04+IiEoEJv2IiIgAODk54dChQwgICMDff/+Ne/fuITY2FnK5HObm5qhatSrc3NzQpk2bfM0NkZtmzZph37598PHxweXLl5GcnAx7e3t8+umnGDFiBD755BNhXT09vVwbFKVSKYYMGYJffvlFWFbYXn4AVIayfF8vyq5du4qSNfv37xcl/Zo0aQKpVCp60lu5d59EIkGTJk1E8/HVrFlTNMSmgqIBd+fOnXjy5AkMDAxQp04dfP3112jfvr3Gkn5AdgN627Zt8ccff+DKlSt4/vw5EhMToa+vD2trazg6OqJJkyb45JNPYGNjU+D99+nTB506dcL+/ftx8eJF3L9/H7GxsUICCMhu8Ny/fz+qVKmidh9SqRRbt27FqlWrcPLkSURHR8PCwgKtW7eGl5cXrl69mmvSTyqVYtu2bVizZg2OHTuGyMhImJqaokmTJhg3bpxKElbTDAwM8Ntvv2HlypW4du0a3r59qzJMVFFZsWIFtmzZgv379+Ply5cwNjaGi4sLxowZg0qVKuWZ9AOABQsWwNbWFidPnkRERITKkKH51atXLzRp0gT+/v64dOkSnj9/jpSUFBgbG6N69epo3bo1Bg0aJBrq7mPStGlTHDt2DPv27cPZs2fx8OFDxMfHQyKRoHz58nBwcIC7uzvat28PV1fXIo9n1KhRaN++Pfz9/XHt2jWEh4cjLS0NpqamqFWrFjp06IB+/fqJ5l4sSczNzfHHH39g5cqVOHPmDN69e4fy5cvDw8MDEyZMKPJh8qysrPD777/j1KlTOHLkCG7fvo2YmBhkZWWhQoUKcHR0FA2hDWRfhxYvXowBAwZg3759CA4ORkREBNLS0mBoaAhbW1vUrl0bzZs3R8eOHQsV14ABA9CtWzccOnQIFy9eFO7p6enpMDExQfXq1dGwYUN06dIFtWvXFm1bWsrElClT4OLigu3btwu9WpydnTFs2DB06dJFtG6VKlWwd+9erFy5EhcvXkRiYiIqVqyI9u3bY9y4cUU6DKyyJUuW4MaNG7h16xYiIiKE+5+RkRHs7e3RtGlTfPnll6hatapou3r16iEgIAB79+7F6dOn8fDhQyQkJEBfXx92dnZo2rQpBg0apNEHwvKrZs2aaNWqlVAnOnz4MIYNG1YknzV58mT06NEDe/bswY0bN/Dy5UukpKTAwMAAlStXhpOTE5o2bYpOnToVyecrNGrUCIcOHcKFCxcQEhKCJ0+e4M2bN0hOToZEIoG5uTkcHR3Rrl079O/fH8bGxqLtXVxc8Mcff+DXX3/FjRs3kJqaCltbW3Tr1g1jxozBrFmzijT+gijI7yw/unbtisaNG2P37t24ePEiHj9+jMTEROjq6sLKygrVq1dHo0aN0LFjR9FQ+dr0ITF7enpixYoV2L59Ox48eJCvaQw+VEHvSw0bNsTs2bMRHByMu3fv4u3bt8IDEhYWFsJ1v3///ioPpr19+1b0AMKgQYNKRLKWiIhIIi+usTyIiIgIb9++Rbly5WBkZKTy3u7du/G///1PeN2yZUtRD7ecFi9eLAxhVq5cOQQGBoqG2KTS4/79+xg8eLDwFHXDhg2xdevWfM0TQ0REH7/27duLep7nnEeKtOfixYuiYQoPHz4MZ2dnLUZEhcXfGRXEtm3bsHDhQgCAiYkJTp06xX+LERFRicCefkRERMXo7Nmz8Pb2RpMmTVClShWUL18ecXFxuHPnjmjeCD09PYwfP15l+6NHjyI8PBzPnj0T9eYaMGAA/5FZitWuXRs+Pj4YNWoUMjIy8M8//2DChAlYs2YN9PRYXSMiIiqpWrVqBU9PT2GISl9fX2H+PSL6OKWnp4sezhw7diz/LUZERCUGW5GIiIiKWVpamtp53BTMzMwwb948uLu7q7ynGG5MmYODg9oEIZUuLVu2xLx584S5jM6ePYuffvoJixYtUpk/h4iIiEqOGTNm4NKlS8jIyMDx48cxduzYIpnbj4hKhr179wpzq1evXh1DhgzRckRERET/4fCeRERExejFixfYvXs3bt68iZcvXyI2NhZZWVmwsLBAzZo14eHhgb59++Y6l9+QIUNw7do16OrqonLlymjXrh2+++67j3b+LyIiIuKwg0TFgb8zIiIi+hgw6UdERERERERERERERERUyuloOwAiIiIiIiIiIiIiIiIi+jBM+hERERERERERERERERGVckz6EREREREREREREREREZVyTPoRERERERERERERERERlXJM+hERERERERERERERERGVckz6EREREREREREREREREZVyTPoRERERERERERERERERlXJM+hERERERERERERERERGVckz6EREREREREREREREREZVyTPoREVGp0759ezg7O8PZ2VnboRAR0Ufo2bNnGDNmDFq0aCHcb06ePKntsD4K+/fvF86pj4+PtsPRqKtXrwrHNm3aNG2HQ0RERPTRGzJkiFD/evnypbbDISoR9LQdANHHzMfHB6tXrxZet2rVClu2bBGtc+fOHfTt21e0LCQkBAYGBsUS44dSTrqcOnUK9vb2GtmvohHI1NQUw4YN08g+6f3279+P6dOni5bp6OjA1NQUNWrUQI8ePTBo0CDo6uoWav/37t0TGk2bNm2KZs2afXDMREREgLje1bt3byxatKhQ+5HJZPDy8sKjR480Gd5HqyAP4Jw6daoII6Gcrl69imvXrgEAOnbsiDp16mg5IiIi+tjkbPfKydTUFDdu3CjGiCg/Dh06hF27duHBgwdIT0+HhYUFKlasiHr16qFv375wc3PTdohE9AGY9CMqRpcvX0Z4eDjs7OyEZbt379ZiRCWXotJoZ2fHpJ+WZWVlIS4uDkFBQQgKCsKTJ08wc+bMQu3r3r17wnfr5eXFpB8REZU4L1++FBJ+Dg4OmDlzJgwMDODo6KjlyIgK5tq1a6I6NZN+REREtHr1apXRFqKiohAVFYXQ0FDY2Ngw6UdUyjHpR1SMsrKysHfvXkyYMAEAkJycjCNHjmg5KipLsrKykJGRka+epHXq1MHPP/8MmUyGP//8E3/88QcAYM+ePZg6dWqp6Y1KRERUEJGRkcLfbm5uaNWqlcY/Izk5GUZGRhrfrzb4+fmJXg8ePFj4e9WqVbCyshJeV6xYUWOfm56eDh0dHejp8Z+0REREJUGbNm0wevRo0bKScJ/+mOpd+ZHX8SYlJWH9+vUAgHLlymHChAmoU6cOYmNj8e+//+LMmTOQSCTFGS4RFQHO6UdUTIyNjQFkD5+YlZUFADh69CiSkpKE93Jz8uRJDBs2DE2aNEH9+vXRuXNnrF69GqmpqSrrjRkzBu3bt4e7uzvq16+Pdu3aYfr06SrjWk+bNk0Y8/rChQtYtWoV2rRpgwYNGuDzzz/H/fv3NXj0wMOHDzFp0iR069YNTZs2Rb169dCiRQuMGjUK169fF9bz8fERDRMVHh4uxNm+fXtheUZGBrZu3Yo+ffrAzc0Nbm5u6N+/Pw4dOqTy2crbK+bocXd3R9OmTTFz5kykpaWpbHPkyBEMGTJEOOft27fHlClTkJCQgD179gj7/PXXX0XbnTx5Unhv7ty5eZ4T5XHHHzx4gNmzZ6N58+Zwc3PD6NGj8fz5c5Vt7t+/j4kTJ8LDwwP169dH69at8dNPP+H169ei9RTn0dnZGXv37sXatWvRrl071KtXD8HBwXnGpWBqaorGjRujWbNmonlp0tPTER8fL7zes2cPRowYgbZt28LNzQ0NGjRAp06dMHfuXLx9+1ZYr3379qKhQ1evXq12Tp+IiAjMmTMHn3zyCRo0aIAmTZpg4MCBOHr0qNo43759i2nTpqFJkyZwd3fH999/j9jY2HwdIxERffyU74n79u3Dtm3b8Mknn6B+/fr49NNPcfnyZWHdIUOG4MsvvxReHzx4UGWOkKSkJPj4+KBHjx5wcXFBw4YNMWTIEAQGBoo+9+XLl8K2Q4YMwfXr1zFw4EC4uLhgzpw5wnqFube/7zgU8ntPzW8M6jRu3Fj0n7L69euL3tPX11fZ/tixY+jZs6dQx80Zm3KdNTAwEIsWLYKHhwdcXFyE+NLT07FhwwZ89tlncHNzg6urKz799FNs2LAB6enpov2pq1cCuc8Hk5qaivnz56N58+Zwd3fHmDFj8PLly3zNL3zlyhUMGDAADRo0QNu2bbF9+3bR++q+044dO6JBgwbo06cPLl68mK8Ylfezf/9+4TiVh1ubPn26yjpERESaZGlpqVIvUO4xlnPu2/Pnz6Nv37653icBzde7/Pz80LFjR7i4uKBfv364fPmyqK5x9epVJCYmws3NTagvyOVyYXuZTIbmzZvD2dkZzZo1Q0ZGRq7nI+c8xocOHUL37t3RoEEDdOvWDQEBAUV+vDmFhYUJdaM2bdpg+PDhaNGiBbp27YoxY8bgjz/+UBlta9GiRfj888+FeqK7uzt69+6NzZs3IzMzU7Sucj3r/v37GDx4MFxdXdGlSxccP34cAHD8+HF079491zqs8vdx8eJFrFy5Eq1bt4aLiwsGDx6M0NDQXI8vp/y2pxJ9bLT/uAVRGdG5c2cEBATg9evXOH/+PDw9PYWhPXv06CH0ospp1apVWLt2rWjZs2fP4OPjg8uXL2Pr1q1CA8q5c+dw5swZ0bqvXr3C/v37ce7cORw+fBiWlpYqn+Ht7Y0XL14Ir4OCgvDdd9/hr7/+0thTWY8ePVLp1fj27VsEBgbi/Pnz2Lp1K5o3b56vfWVkZGDkyJEqFYOQkBD8+OOPePjwIaZMmaKyXVxcHAYOHChKCP3xxx8oX748fvjhB2HZjBkzsG/fPtG24eHhCA8Px4QJE9CtWzcsWLBA6Kk5fvx4Yb3Tp08Lf/fo0SNfxwMAEyZMwNOnT4XXZ8+exb1793Do0CGUL18eABAYGAgvLy9R41VkZCT27t2LwMBA+Pv7o0qVKir79vX1FX2/BZWZmYm//vpLeG1paYkKFSoIr48fP44LFy6Itvn333/x77//4vLlyzhw4EC+ewXeu3cPw4YNE31H6enpCA4ORvXq1dGtWzeVbb788ks8fvxYeH3s2DHo6elh6dKl+T1EIiIqI9atWye6Jz548ABjx47FmTNnYG5u/t7tExIS8MUXX+Dhw4fCsrS0NFy7dg3Xrl3DzJkzRT3dFJ49e4YRI0aoPGhU2Ht7fo4jv/fUwsagCUePHsWTJ0+E18+ePcOkSZNQu3Zt1KhRQ2X9uXPnqtRp0tPTMXz4cNFDZED2OXnw4AHOnTuHLVu2qE045sfEiRNFcxGeOXMG9+/fR0pKSp7b/fPPPwgICBAawyIiIjB//nw4OjqiZcuWKutv3LhRVBcMDQ3F6NGjsW3bNpVkKhERUWl37do1HDp0SHgoXt19UtP1rm3btmHhwoXC69u3b2PkyJGoVq2aaD0TExN06dIFBw4cQHh4OG7evCnci4OCgvDu3TsA2e18Uqk0X8f7559/iu7zjx8/xuTJkyGRSIS2I00frzrKnQ4uXbqEXbt2oW3btqhcubKw3NDQULSNn5+fqJ6YkZGBu3fv4u7duwgLCxOdU4WEhAR89dVXQj306dOn+P777zFmzBisW7dOWO99dfG5c+eKztuNGzcwdOhQ7N27F9WrV8/zWAvSnkr0sWFPP6JiYmlpibZt2wLI7hn14MED3Lp1CwDQr18/tduEhIQINyhra2vMnz8fmzZtEvZz48YNbNu2TVjfw8MDc+bMga+vL3bs2IFNmzZh+PDhAIDo6Gjs2bNH7ee8fv0akydPxurVq2FjYwMgO8mVM5HzIapXr45p06ZhzZo1+O2337Bt2zZ4e3tDX18fWVlZ2LBhAwCgb9++omGirK2t4efnBz8/P6xatQoAsH37diHh5+bmhjVr1uDXX38VbvibNm0Szq2yxMREVKhQAT4+PsIQqwBECdcTJ04ICT9dXV0MHz4cGzZswOLFi9GqVStIJBIYGxuja9euALKTW4rPysrKEp6+srOzg7u7e77PT2xsLBYuXIhVq1YJDWtv3rwRhl1ISUnBtGnTkJ6eDj09Pfzwww/YsmULvvnmGwDZ46/Pnj1b7b5fvHiBnj17CsdRqVKlfMV07do1ODs7o169epg6dSoAwMjICLNnz4aurq6wniIJumHDBuzYsQMbNmxAr169AGRXZBUJw1WrVmHMmDHCdn369BG+2759+0Iul+PHH38UKoVOTk5YsmQJNmzYgLFjx8LCwkJtnPHx8fjll18wa9YsocJ99OhRJCQk5Os4iYio7Hjx4gVGjhyJdevWoXbt2gCyn6hWPJj0888/4+effxbWb9OmjXCvqlixIlasWCE0xHh6egr3VmtrawDAwoULERERofK5kZGRqFy5Mn755Rds2LABHTt2/OB7e17Hkd976ofEoAlPnjxBv379sH79erRo0QJAdn0qtzrrixcvMGTIEGzatAlz5syBsbExtm3bJiT8bGxssGzZMixfvhy2trYAgOvXr4vqywVx4cIFIeFnYGCA6dOnY82aNahQocJ7RxX4999/4enpCV9fX3Tv3l1YvmvXLrXrP3/+HOPHj8f69evh4eEBILtRbcGCBYWK3c/PD3369BFejxkzRijLnp6ehdonERFRXg4cOCD00FLu0adOeHg42rVrl+d9UpP1rvj4eKxcuVJY54svvsCGDRvQuXNnhIWFqexDuZ1OuUee8oNAynG/z9OnTzF06FBs2LABn376qbB80aJFQm9BTR5vbqpVqybUkRITEzFr1ix4enrC09MT06dPR1BQkMo2Y8aMwfLly7Fp0ybs2LEDq1evhqurK4Ds71zdyBDx8fFwcHDAunXrhPMkl8uxbt06dOjQAevXr0ejRo0AiOuwOb1+/Ro//fQT1qxZg/r16wtxL1++PNdjBArenkr0sWFPP6Ji1L9/f/z99984e/as0IPO2dkZLi4uatdXrlj07dsXDg4OAIDPP/8cZ8+eFdYZNWoUAKBp06bw9fXF1q1bERERodJd/c6dO2o/Z9CgQRg5ciSA7IrIsmXLAGQ3VmiKs7Mzrl+/Dl9fXzx58gTJycmiIRIUsdna2goVEADQ19dXebr58OHDwt/Dhg0TGq569uwpDLd5+PBhoRKibPny5ahTpw46deqEgIAAPHnyBO/evUNCQgJMTU1Fw4N+8803mDhxovBakcgCsiuAiuRgQEAAXF1dERISgujoaAAFq/wBwKRJk4SGGTMzM3z99dcAsocimDZtGi5evCgMldmyZUvhnLRr1w7Hjh0TkrRv374V9cIDgIYNG2qs15uBgQGSkpJEy1q2bIm1a9fi0qVLiIyMVBlG686dO+jZsycaNGiAR48eCcttbW1F3+29e/eECq6JiQl+++034Vjyapzy9vYWKrWnT5/G+fPnIZPJEB4eLjSEEhERAUCHDh0wefJkANnDNip6+ivqPM7OzqJkjmKYKiA7GaVokJBKpfj6668hlUphbGyMTz75BDt37kRGRgaOHTsmPHSloKOjA19fX1HvtZMnTxb63v6+47h//36+7qkfUr/QhNq1a2P+/PkAgPLlywsPdakb4hzIHkVBOSkLQNRINGvWLLRr1w5A9oNKioeN/vzzT6G+XBAnT54U/h48eLAw3FWNGjWEB8ByY2lpiZUrV0JfXx8NGjTAn3/+CSD3Y+vWrRvGjh0LAGjUqBFat26NlJQUhIaGIiIiQngwL78aN24sGhWjWrVq7DFIREQlxvvuk5qudx09elTopV+vXj3MmjULANCqVSvcuHFDJXHVuHFjODg44NmzZzh+/Dh+/vlnSKVSYXStihUrokmTJvk+3oYNG+Knn34CkP3A/o0bN/Dq1StERUXh1q1baNiwoUaPNzdSqRRLlizBhAkTEBMTIyx//fo19u/fj/379+Onn37C0KFDhfeaN2+OzZs3IyQkBO/evRMN6SmXyxEaGirqKaiwePFiODg4wNraWvh+DQ0NsWTJEpiYmCA1NRU3b94EkHv741dffSXE4ujoiM6dOwPIHqkiIyMj156WBW1PJfrYMOlHVIxat24NGxsbRERE4NixYwCAAQMG5Lr+s2fPhL99fX3h6+urso5iSCSZTIavv/4ad+/ezXV/yvOwKWvatKnwt2IoSQAa7Sm1cOFC7Nixo8CxqaN8Xr7//nu16ygP96hgYmKCOnXqCK+Ve47Fx8fD1NRUtG/FE0DqNGzYEDVq1MCTJ09w9OhRTJ8+XTS0Z0GTfsqJX+W/w8PDIZfLRcMZnDt3DufOnVPZh1wux5MnT1Qa5RSNXwVVp04d/Pzzz8jKysL9+/exbNkyvHv3DtOnT4eTkxPq1q2LxMREfP7553nO+ZPf71b5GF1dXfPduKhc0c75nRIRESlTrvMo3zPyU+d59+4d4uLiAGT3wMo534mCujpItWrVVBpiPuTe/r7jyO899UNi0ISC3sNzzsMHiOuFyg98KdenlNcpCOWhRJX3V6NGDZibmwvlQR1XV1dhyKj8HJty7KampqhevbpQr3/x4kWBk35ERETFrU2bNhg9erRomZWVldp133ef1HS9S3kuXOV7rp6eHho0aKC2TaNv375YtmwZYmNjcf78edSoUUOoO3Xr1g06OvkfQE/5M3V1dVGvXj28evUKQPZ9vnr16ho93rw0adIEx48fx4kTJ3DmzBncvHlT9NDbsmXL0KtXL5iZmSEkJARfffVVnnMXqqtHm5mZCYk25e+3evXqMDExAZC/9kfl8+bg4CDUv9LS0hAZGQk7Ozu12xWkPZXoY8ThPYmKkY6OjmiYHQMDA1G3/sLIzMxEeno6/vnnH6FhwNraGosXL4afn5+oy7tyzzplZmZmwt/Kwzbmtn5BpaenC/MX6unpYdKkSdi+fTv8/PyEm7ymPktB3TwrOccHV56vsDCfrxjuISYmBhcuXBCSfrVq1dJaDzN1x61uHsf8MDU1RePGjdG0aVMMHTpU6OmYlZUlTMB88uRJoXJco0YNrFixAn5+fpg+fbqwH01/tzkpf68f+p0SEdHHrajrPID6e3FuDV6F3V9xHMf7YtAE5Xu48nHkpiB1GolEkuf7MplM9FoxP09h95dTbvWT/Hrf5ynmQALeHzsREVFxUIyQoPyfIvGT04feJxUKU+/K7z29d+/eQmyHDx8WDe2pmIevsApar1DQVD3TzMwM/fv3x9q1a3H58mVs2LAB5cqVA5A9ioQiIebv7y8k/Nq1a4cNGzbAz89PNBKWcp1EwdTUVPhb+VgVCb+c8luHLex5U0fRnkr0MWJPP6Ji1rdvX6xbtw5ZWVno1KmTqNEmJwcHB+GJ64ULF4oShgopKSnQ19fHmzdvhGU9e/YUbsCKLvTaFBsbK0wo7OzsLHSff/PmTa5PSEskEsjlcrWVBwcHB9y/fx9AdtJJMQeessI2Tjk4OAhPTgUGBqJhw4a5rturVy+sWLECGRkZ8PX1FYauLGgvPyB7vHFnZ2fhbwU7OztIJBLRBMW9e/fGokWLVPaRkpKiMuEyoLlKkXIlTPEUmHK5Gzx4MLp16wYA+Oeff9TuQ/lJuJzfrfIxhoSEFNlQYkRERIVRvnx54eliIyMjXLhwAcbGxqJ1srKy1D4Jre5e/CH39vfJ7z21KGMoCurOo4ODAx48eAAg+1gVIzUoz++s3OBoamqKhIQExMbGCsNCvXz5Uu3T3lWrVhX+vn37Nrp06QIg+8nwvHr5FYZy/S8hIUHUC1NR11VuQIuKikLVqlWRlZWFixcvqt2n8vlSV6cmIiIqqTRd71JuN7p9+7bwd2Zmpui1Mmtra7Rp0wanT5/GmTNnhKFHq1WrhgYNGhToeJTv8zKZTDT9TpUqVTR+vLmJjY3Fs2fP4ObmJizT0dGBp6cnatSoIXQmUDwcFRkZKaw3ceJEODk5AQDWrVuX78/8ECEhIcJID//++6/QFmVgYICKFSvmul1B2lOJPkZM+hEVMzs7O8ycORPR0dHCWNS56dmzJ7Zv3w4g+yYVFxcHZ2dnxMfH4/nz57h48SJsbW2xcOFC0Tx4J06cQKNGjRAXFyfMz1ccNm7cKGqMALLHSu/cuTMMDAyQlpaGhw8f4o8//oClpSXWrl2bawOEubk5YmNjERkZicOHD8PW1hZWVlZwcHBAz549haTfmDFj8M0336By5cqIjIzEkydPcPr0aXz99ddqb+rv8+mnnwpPb23atAmZmZlo1qwZYmNjcfjwYcyePVsYPsDS0hJt27bF33//LUpyFSbpt3z5cujp6cHQ0FDUO7NDhw4AsufZqVChAt6+fYuDBw/C3NwcLVu2RFZWFsLDw/HPP//g/v37OHr0aIE/OzcJCQm4ceMG5HI5Hjx4IJpLUdFIqFzu9u3bhypVquDff//NtQKonOQ+f/48mjRpAn19fTg7O6N27dpwcnLCw4cPkZCQgGHDhuGbb76Bubk5QkNDER8fn+sk4EREREVNR0cH3bt3x86dO5GcnIwRI0ZgyJAhKF++PF6/fo1Hjx7hr7/+woIFC9CsWbP37q8o7+35vadqo36haT169BCSfnPmzEFSUhIkEoloPmPlulnVqlURGhqK1NRUTJo0CU2aNMHOnTtVev4BQMeOHbFz504AgJ+fHypXrgwbGxusXbtW48fx559/okaNGqhbty5+//13JCcnAwDq1q0rDO1ZrVo1Yf158+ahX79+OHv2bK7Dlyr3ovjrr79gb28PPT09uLi4sJGLiIg0LiYmBjdu3FBZXpj7jqbrXa1atYKhoSFSUlIQEhKC+fPnw8PDA4cPH85zupJ+/frh9OnTSE1NRWhoKIDCtfncvHkTCxcuRMuWLXH06FFhaE8rKyu4urpq/HhzExcXh4EDB8LNzQ0dO3aEk5MTpFIprly5gnv37gGA0EYDiNt81q9fj969e+PcuXO4cOFCoWMoiN9++w1WVlawsbERDdHZpk2bXOfzAwrWnkr0MWLSj0gLBg0alK/1XFxc8N1332Ht2rWIj49X+/R17969AWSPc+3s7IwHDx4gPDwcY8eOBZA995zy5LxFadeuXWrj69q1K/r16wc/Pz9kZGRg5syZALKfvLG0tFQbX7NmzXDixAnIZDJMmTJF2NeiRYswdOhQXLhwAZcvX0ZYWJhGE0FdunRB7969ceDAAWRmZmLTpk3YtGmT8H7OIQf69euHv//+W3jt4uIieio8v6ytrVWOw9raWhgP38jICIsWLYKXlxfS09Oxbds2bNu2TbR+bmOZF9a9e/cwePBgleW2trZCQrVdu3awtrZGVFQU7t69K/TibNiwodrefm5ubtDX10d6ejpu376Nr7/+GgCwfft2NGvWDIsWLcKwYcMQHx+PBw8eCN898F9ZJyIi0pYffvgBN27cwMOHDxEUFISgoKBC76so7+0SiSRf91Rt1C80bdiwYQgMDMSNGzcQHh6OiRMnit5v0qSJaF6cgQMHCnXREydO4MSJEzAyMkLlypVVGv1atWqFDh064NSpU0hJScG8efMAAJUqVYKFhYVo/psP5ejoiJUrV4qW6enpieqH/fr1w7Zt25CVlYW7d+9izpw5ACDMM51T06ZNhdEzAgMDERgYCAA4deoU7O3tNRY7ERERkPv8wIW972iy3mVmZobvv/9eSPJs374d27dvh1QqzfU+CgCenp5Cm4dCYYb2dHJyUlvP+vHHH4XklSaP932Cg4MRHBys9r3Ro0cLw3D2798fe/bsgVwux5EjR3DkyBFIJBK4u7sXaXwKVapUwdy5c0XLjIyM8MMPP+S5XUHaU4k+RpzTj6iEmzBhAtavX4/WrVvDwsICUqkUlSpVQqNGjTBp0iSMGzcOQPY8KBs2bECHDh1gamqKChUqYOjQoULjhLZNnToVX331FaytrWFkZIT27dtj27ZtwpjhOf3vf/9D165d1Q5Fpa+vj02bNuHnn3+Gi4sLjI2NYWBgAHt7e7Rt2xbz58/HJ598UuhYFy1ahCVLlqBp06YwNTWFVCqFra0tevbsqTIvYOvWrUVDChTmiS8gu6ffkCFDUKFCBZQrVw5t2rSBn5+f6Pg9PT2xb98+fPbZZ6hcuTKkUinKly+POnXq4Ouvv1ZpKNIkAwMDODg4YMiQIdi9e7dwHkxMTLB161Y0b94cRkZGqFSpEsaPH4/x48er3U+FChWwZs0a1K1bV+13X69ePRw6dAiDBg1ClSpVIJVKYWZmBjc3N7Rp06bIjo+IiCg/zMzM8Mcff2DChAmoXbs2ypUrB0NDQzg4OKBz585Yvny5aLik9ynKe3t+76narF9ogr6+PrZu3YpJkybB2dkZ5cqVg4GBAZycnDBp0iRs2bJF1Lugf//+GD16NCwtLVGuXDk0b94cO3fuzPWhLUUdzcLCAoaGhmjbti38/PyE0Spyq8sW1LBhwzBz5kxUrVoVUqkUdevWha+vr+hp/po1a2Lp0qWoVq0apFIpnJycsHLlSmF49ZycnZ2xePFi1KxZkz37iIio1NF0vUtxr7W3t4e+vj7q1auH9evXo0aNGsI6OYc019PTE81fV7t2bdSsWbPAx9KpUyesWLECtWrVglQqRfXq1bFkyRJ89tlnRXa86tja2mL16tUYOHAg6tSpA0tLS+jp6cHMzAxNmzbF0qVL4eXlJazv4uKC1atXw8nJCQYGBqhVqxZWrVqFVq1afVAc+TVt2jSMGzcOlSpVgr6+Pho1aoTt27fn6zvIb3sq0cdIIi+q2d6JiMqI6dOnY//+/dDR0UFgYGCe44orGzJkCK5duwaAT1wTERERlURyuVxlrpzHjx8LiTZnZ2fREOgF4ePjg9WrVwPIfb4ZIiIi0gx19/T09HR06tQJERERkEgkuHz5MsqXLy9a5/r16/jyyy8BAJMnT8bIkSPz9Xn79+/H9OnTAQBeXl5MMuXTtGnTcODAAQD/jQpFRAXD4T2JiApBLpcjOTkZL168EIZKatmyZb4TfkRERERU8i1evBjly5dHixYtYG1tjcePH2PJkiXC+7n1siMiIqKSJSAgAP/88w+6dOmCatWqISoqCps2bUJERASA7DYd5YRfamoqEhMT4e/vDyB7hK2ePXtqJXYiooJg0o+IqBDCw8PRoUMH4bVEIsG3336rxYiIiIiISNNiY2OxdetWte81btxYmJ+YiIiISrbMzEz4+/sLSTxl1tbW8Pb2Fi0bOXKkMDoTAPTt2xeVK1cu6jCJiD4Yk35ERB9AV1cXVatWxbhx49C4cWNth0NEREREGtSuXTu8fv0ajx49QlxcHAwMDODo6Iju3btj0KBBkEql2g6RiIiI8qFu3bro3Lkzbt++jejoaOjo6KBKlSpo06YNRowYAUtLS7XblS9fHp06dRKG6iQiKuk4px8RERERERERERERERFRKaej7QCIiIiIiIiIiIiIiIiI6MMw6UdERERERERERERERERUynFOv/+XlZWFzMxM6OjoQCKRaDscIiIiKmHkcjmysrKgp6cHHR0+N5UT61JERESUF9al8sa6FBEREeUlv3UpJv3+X2ZmJm7fvq3tMIiIiKiEa9CgAfT19bUdRonDuhQRERHlB+tS6rEuRURERPnxvroUk37/T5EZbdCgAXR1dbUczcdNJpPh9u3bPNdUorGcUmnAclq8FOebT6arx7pU8eFvn0oDllMqDVhOixfrUnljXar48LdPpQHLKZUGLKfFK791KSb9/p9i6ARdXV0W0GLCc02lAcsplQYsp8WLwy2px7pU8eO5ptKA5ZRKA5bT4sW6lHqsSxU/nmsqDVhOqTRgOS1e76tL8fEqIiIiIiIiIiIiIiIiolKOST8iIiIiIiIiIiIiIiKiUo5JPyIiIiIiIiIiIiIiIqJSjkk/IiIiIiIiIiIiIiIiolKOST8iIiIiIiIiIiIiIiKiUo5JPyIiIiIiIiIiIiIiIqJSjkk/IiIiIiIiIiIiIiIiolKOST8iIiIiIiIiIiIiIiKiUo5JPyIiIiIiIiIiIiIiIqJSjkk/IiIiIiIiIiIiIiIiolKOST8iIiIiIiIiIiIiIiKiUo5JPyIiIiIiIiIiIiIiIqJSjkk/IiIiIiIiIiIiIiIiolKOST8iIiIiIiIiIiIiIiKiUo5JPyIiIqJS7s2bN5g8eTKaNWsGFxcX9OzZE7dv3xbel8vlWLVqFTw8PODi4oJhw4bh2bNnon3ExsZi0qRJaNiwIRo3bowZM2YgKSmpmI+EiIiIiIiIiIgKi0k/IiIiolIsLi4OgwYNglQqxcaNG/Hnn39i6tSpMDc3F9bZuHEjduzYAW9vb+zevRuGhoYYMWIE0tLShHUmT56MsLAwbN26Fb6+vrhx4wZmzpypjUMiIiIiIiIiIqJC0NN2APTxioiIQEREhMpymUyGhw8fIisrC7q6uirv29jYwMbGpjhCJCIiKvU2btyIypUrY+HChcKyKlWqCH/L5XJs374d3377LTp27AgAWLJkCVq2bImTJ0+ie/fuePz4Mc6fP4+9e/eiQYMGAICff/4Zo0aNwo8//ohKlSoV70ERUamRV53/yZMnrPMTEZHWsF2KiIjKIib9qMisX78es2fPLvB2s2bNgre3t+YDIiIi+gidPn0aHh4eGD9+PK5fv45KlSrhiy++wIABAwAAL1++RFRUFFq2bClsY2pqCldXVwQFBaF79+4ICgqCmZmZkPADgJYtW0JHRwchISH45JNP8h2PXC6HXC7X3AGSCsX55bkueeLS4pCYnqjtMIrVnAVzsHH1xgJvN9JrJGbOL1u9iU30TWBuYP7+FanY8HpavHiOqbixXYqIiMoiJv2KiTwlDUhL13YYxeqz9p+glp29aFlqWhq+GecFANjo4wNDg3Iq29V1rgN5bEKxxFhiGOhDYmig7SiIqIQKDg5GaGioyvKsrCyEh4fj7t270NFRHbG7Xr16cHNzK4YISZtevHgBf39/fP311xgzZgxu376NefPmQSqVonfv3oiKigIAWFpairaztLREdHQ0ACA6OhoVKlQQva+npwdzc3Nh+/yKj49XWx5Jc7KysgDwXJc0Ojo6iJPFYc21NYhKKtjvpjS79u+1Qm138d+L+N/J/2k4mpLL2tgaY5uOhU66jvAbJu3j9bR4sexTcRs9ejQ+/fRT0bKUlBR4eHgAAAIDA2FiYqKyHXv5ERFRacakX3FJS0fmxX8gT0zRdiTFZv+e3zFvn1+u748cN07t8p/7Dka9/l8WVVgljsTEEHqtGgJM+hHlS1l8iGKClxfOXbxY4O3atGqFs0eOFUFEJVgZfIhCLpejfv36mDhxIgCgbt26ePToEXbt2oXevXsXezxmZmZqh0kizZHJZAB4rkui+IR4vEt7h+jUaG2HUmxsO9vCspmlynK5XI6kpCQYGxtDIpGovG9gYVCmzpOenh50dXVhamqq7VBICa+nxUtxvkuyDRs2YNmyZRg6dCh++uknAEBaWhoWLVqEo0ePIj09HR4eHpg1axasrKyE7V69egVvb29cvXoVRkZG6NWrFyZNmgQ9PTa7aZO6YTqTkpKEv93c3GBmZlbcYRERERUp1j6KkTwxBUhIev+KH4lvmrdFjzquKsvf1wBQ2bx8mTpPHOCEqIDK4EMUS3sOwl33VqpvyOVISU2FYblygJrraV37qsg4UfBkYWlVVh+isLa2Rs2aNUXLatSogRMnTgjvA0BMTAwqVqworBMTE4PatWsDAKysrPD27VvRPjIzMxEXFydsn18SiUTt/Z00R3F+ea5LHgkkgATZ/5UR5SqUQ7kKqqN3yOVySOIkMDM3YzkFkF00+JstaXg9LV4l/RyHhIRg165dcHZ2Fi1fsGABAgMDsXLlSpiammLu3Lnw8vLCrl27AGQnM0ePHg0rKyvs2rULkZGRmDp1KqRSqfBQFhERUWnGebxLFyb9qMjYWJSHjUV5leVyuRzx8fEwM2MDABEVTll7iMLVsjJcLSurLM/X9bQMnaey+hBFw4YN8fTpU9GyZ8+ewc7ODgBgb28Pa2trXL58GXXq1AEAJCYm4tatWxg0aBAAwN3dHfHx8bhz5w7q168PALhy5QqysrLg4uJSjEdDREREVPySkpIwZcoUzJs3D+vWrROWJyQkYN++fVi6dClatGgBIDsJ2K1bNwQHB8PNzQ0XLlxAWFgYtm7dCisrK9SpUwcTJkzA0qVL4eXlBX19fW0dllh6HJBZtuadVSs5WfhTJ/U1oFfGppfJjZ4JoM95Z4nypQxeT5cunIPlPhsKvN3EcaOwbFHZmse7JFxPmfQjIiIiKsW++uorDBo0CL6+vujatStCQkKwe/duzJkzB0D2U/VDhw7FunXrUK1aNdjb22PVqlWoWLEiOnbsCACoWbMmWrdujf/973+YPXs2MjIyMHfuXHTv3h2VKlXS5uERERERFbk5c+bA09MTLVu2FCX97ty5g4yMDLRs2VJYVrNmTdja2gpJv+DgYDg5OYmG+/Tw8IC3tzfCwsJQt27dYj2WXGUmAg9XA2VoaGW1UjL++zt0PmBYQpKy2lTOCnDy0nojNVGpURavp9FXCrndJSDEW5ORlGwl5HrKpB8RERFRKebi4oLVq1dj+fLlWLNmDezt7TFjxgx8+umnwjojR45ESkoKZs6cifj4eDRq1AibNm2CgcF/Q6EuXboUc+fOxVdffQUdHR106tQJP//8szYOiYiIiKjY/Pnnn7h79y727t2r8l50dDSkUqnKvG+WlpaIiooS1lFO+AEQXivWKQi5XA65vCjGsJBnN1Cnvi6CfZce8tTM//5OeQO5RKrFaEoSOVAk5Y4KS3EdKLprAhVe2bueTupbGV+0VT+iX17TeNlYGkBehs5TtqK7nub3WsCkHxEREVEp165dO7Rr1y7X9yUSCSZMmIAJEybkuo6FhQWWLVtWFOERERERlUgRERGYP38+tmzZInoYSpvi4+Oho6Oj0X3q6OjASCKDXJYJZGa8f4OPRERMGl6/TRMtS0n7L+l388FbGJVTbRqtXMEANpYlozwUC1kmJDIZkhMSkJWVpe1o6P8pvouiuCZQ4ZXV66m1uS6szY1UlsvlQHKyBEZGRsht1pnMMnSeivp6mt99MulHRERERERERERlTmhoKGJiYtCnTx9hmUwmw/Xr1+Hn54fNmzcjIyNDmEdbISYmBtbW1gCye/WFhISI9hsdnT3km2KdgjAzM4Ourm5hDidvKfGArh6gV3Z6tm0+9gRzfnuU6/vtJl5Xu3zmV7XgPcy5qMIqeXT1AF1dmBqaajsSUiKTyQAU4TWBCq8MXk9zo+h5pqenp7anX5lTxNdTxXXhfZj0IyIiIiIiIiKiMqd58+YICAgQLZs+fTpq1KiBkSNHwsbGBlKpFJcvX0bnzp0BAE+ePMGrV6/g5uYGAHBzc4Ovry9iYmJgaWkJALh06RJMTEzg6OhY4JgkEkkRNZyWvcbYMT2r4bOWlVWWyyFHYmISTEyMIVFzXmwsDcrg2ZIg1246pBWK60DRXROo8Ph9CP7/VEgkPCv/KbrraX6vBUz6ERERERERERFRmWNiYgInJyfRMiMjI1hYWAjL+/bti0WLFsHc3BwmJiaYN28e3N3dhaSfh4cHHB0d8eOPP2LKlCmIiorCypUrMXjwYOjr6xf3IZESG8tysLEsp7JcDjni4iQwNzdTm/QjIiIqzZj0IyIiIiIiIiIiUmPGjBnQ0dHB+PHjkZ6eDg8PD8yaNUt4X1dXF76+vvD29sbAgQNhaGiI3r17Y/z48VqMmoiIiMoqJv2IiIiIiIiIiIgA7NixQ/TawMAAs2bNEiX6crKzs8PGjRuLOjQiIiKi99LRdgBERERERERERERERERE9GGY9CMiIiIiIiIiIiIiIiIq5Ti8JxERERFRCRQREYGIiAiV5TKZDA8fPkRWVhZ0dXVV3rexsYGNjU1xhEhEVCrwekpEREREZQWTfkREREREJdD69esxe/bsAm83a9YseHt7az4gIqJSaunSpVi+fHmBt5s4cSKWLVtWBBERERERERUNJv2IqEzL66nfJ0+e8KlfIiLSmtGjR+PTTz8VLUtJSYGHhwcAIDAwECYmJirb8f5ERHmJS41DYnqitsMoVgnpCYXeLjw+XMPRlGwm+iYwL2eu7TCIiIiIqJCY9CMigTwlDUhL13YYxWrp/AVYvmZ1gbebONYLS+ctKIKISjADfUgMDbQdBRGVUWWxkfpNwhtEJkaKlqWmpAp/x6TEIBnJqhsmAFnGWUUdXonCRmqi/EtMT8Tq66sRnRyt7VCKTXLjZPRa0UtluVwuR3JSMoyMjSCRSFTeTyqfBO9A76IPsISwMrKCVxMvXk+JiIiISjEm/YjoP2npyLz4D+SJKdqOpNjInr4q9HYZJy5qOJqSS2JiCL1WDQEm/YhIS8piI/XNnTcRtCso1/f7dOmjdrn75+5o9EWjogqrxGEjNVHBRSdH43Xia22HUXz0AVRSXSyXy5Ealwp9c321Sb94xCM+Mb7o4yMiIiIi0hAm/YhIRJ6YAiQkaTuMYvNDu64Y1LC5ynK5XI6kpCQYGxurbQCobF6+TJ0nubYDICJC2WuktmxtidYNWqssl8vlSEpMgrGJ+nuUgYVBmTpPRERERERERJSNST8iKtNsLMrDxqK8ynK5XI74+HiYmZmpbVAlIiIqauXKl0O58uVUlsvlckjiJDAz5z2KiIiIiIiIiP6jo+0AiIiIiIiIiIiIiIiIiOjDMOlHREREREREREREREREVMpxeE8iIiIiIiIiIiIiomIWERGBiIgIleUymQwPHz5EVlYWdHV1Vd63sbGBjY1NcYRIRKUMk35ERERERERERERERMVs/fr1mD17doG3mzVrFry9vTUfEBGVekz6EREREREREREREZF2pccBmYnajqJY9ercArWq+oiWpaWnYcS3kwEAm9b+gnIG5VS2q1enFpAcXiwxlhh6JoC+ubajICrxmPQjIiIiIiIiIiIiIu3KTAQergZSo7UdSbE5uPkmZm8OyvX9b76bonb5rBHucPumUVGFVfKUswKcvJj0I8qHEp/0k8lk8PHxweHDhxEdHY2KFSuid+/e+O677yCRSAAAcrkcv/76K/bs2YP4+Hg0bNgQ3t7ecHBw0G7wRERERERERERERJQ/qdFA6mttR1FsRne1xKdNW6ssl0OOxMQkmJgYQwKJyvs2lgZl6jwRUf6V+KTfxo0b4e/vj8WLF8PR0RF37tzB9OnTYWpqiqFDhwrr7NixA4sWLYK9vT1WrVqFESNG4OjRozAwMNDyERARERERERERERERidlYloONperwnXLIERcngbm5mdqkHxFRbnS0HcD7BAUFoUOHDmjbti3s7e3RpUsXeHh4ICQkBEB2L7/t27fj22+/RceOHVG7dm0sWbIEkZGROHnypJajJyIiIiIiIiIiIiIiIip6JT7p5+7ujitXruDp06cAgPv37+PmzZto06YNAODly5eIiopCy5YthW1MTU3h6uqKoKDcx0POjVwuL5r/NHM66CMlRxGWPZZT0hCWUyoNirqcEhERERERERERlVQlfnjPUaNGITExEV27doWuri5kMhl++OEHfPrppwCAqKgoAIClpaVoO0tLS0RHF3zS1/j4eOjoaDYXqqOjA0NZFjIzMyHPyNDovkun7EbTzMwMgN3TIcnMhEQmQ0pCArKysrQWB8tpTiynylhOSyqWU2VFXU61WfaJiIiIiIiIiIjep8Qn/Y4dO4aAgAAsW7YMjo6OuHfvHhYuXIiKFSuid+/eGv88MzMz6Orqany/8rhEyPX0AKlU4/subbJ7SqRAT08KiYSN1NDTg66uLkxNTbQdCcupEpbTHFhOSySW0xyKuJzKZLIi2S8REREREREREZEmlPik35IlSzBq1Ch0794dAODs7IxXr15h/fr16N27N6ytrQEAMTExqFixorBdTEwMateuXeDPk0gkbDilYicBWO6oxGM5pdKgKMspyz8REREREREREZVkJX5Ov9TUVJVGNl1dXWFeHXt7e1hbW+Py5cvC+4mJibh16xbc3d2LNVYiIiIiIiIiIiIiIiIibSjxPf3atWsHX19f2NraCsN7bt26FX379gWQ/dT90KFDsW7dOlSrVg329vZYtWoVKlasiI4dO2o5eiIiIiIiIiIiIiIiIqKiV+KTfj///DNWrVqF2bNnC0N4Dhw4EGPHjhXWGTlyJFJSUjBz5kzEx8ejUaNG2LRpEwwMDLQYOREREREREREREREREVHxKPFJPxMTE/z000/46aefcl1HIpFgwoQJmDBhQjFGRkRERERERERERERERFQylPg5/YiIiIiIiIiIiIiIiIgob0z6EREREREREREREREREZVyTPoRERERERERERERERERlXJM+hERERERERERERERERGVckz6EREREREREREREREREZVyTPoRERERERERERERERERlXJM+hERERERERERERERERGVckz6EREREREREREREREREZVyTPoRERERERERERERERERlXJM+hERERERERERERERERGVckz6EREREREREREREREREZVyTPoRERERERERERERERERlXJM+hERERERERERUZm0c+dO9OzZEw0bNkTDhg0xcOBABAYGCu8PGTIEzs7Oov9mzpwp2serV68watQouLq6okWLFli8eDEyMzOL+1CIiIiIoKftAIiIiIiIiIiIiLShcuXKmDx5MqpVqwa5XI6DBw9i7NixOHDgAGrVqgUAGDBgAMaPHy9sY2hoKPwtk8kwevRoWFlZYdeuXYiMjMTUqVMhlUoxceLEYj8eIiIiKtvY04+IiIiIiIiIiMqk9u3bw9PTEw4ODqhevTp++OEHGBkZITg4WFinXLlysLa2Fv4zMTER3rtw4QLCwsLwyy+/oE6dOvD09MSECRPg5+eH9PR0LRwRERERlWXs6UdERERERERERGWeTCbD8ePHkZycDHd3d2F5QEAADh8+DGtra7Rr1w7fffed0NsvODgYTk5OsLKyEtb38PCAt7c3wsLCULdu3QLFIJfLIZfLNXNA4j0XwT5LJ8XplcsBSLQaSgki/+/EaFVJiKFkYDlVh+W0pGE5Vafoyml+6wdM+hERERERERERUZn14MEDfP7550hLS4ORkRHWrFkDR0dHAECPHj1ga2uLihUr4sGDB1i6dCmePn2K1atXAwCio6NFCT8AwuuoqKgCxxIfHw8dHc0OzKWjowMjiQxyWSaQmaHRfZdGijbTzMxMSNhIDcgyIZHJkJyQgKysLK2FwXIqxnKaA8tpicRymkMRl9P87pNJPyIiIiIiIiIiKrOqV6+OgwcPIiEhASdOnMDUqVPx+++/w9HREQMHDhTWc3Z2hrW1NYYNG4bnz5+jatWqGo/FzMwMurq6Gt8vUuIBXT1AT6r5fZcyip4Senp6kLCVOrtc6OrC1NBU25GwnCphOc2B5bREYjnNoYjLqUwmy9d6TPoREREREREREVGZpa+vj2rVqgEA6tevj9u3b2P79u2YM2eOyrqurq4AgH///RdVq1aFlZUVQkJCROtER0cDAKytrQsci0QiKaKGUzbGCv7/VEgkPCv/kaBkdNMpCTGUECynarCcljgsp2oUXTnNb/1As+MFEBERERERERERlWJZWVlIT09X+969e/cA/JfQc3Nzw8OHDxETEyOsc+nSJZiYmAhDhBIREREVF/b0IyIiIiIiIiKiMmnZsmVo06YNbGxskJSUhCNHjuDatWvYvHkznj9/joCAAHh6esLCwgIPHjzAwoUL0aRJE9SuXRsA4OHhAUdHR/z444+YMmUKoqKisHLlSgwePBj6+vpaPjoiIiIqa5j0IyIiIiIiIiKiMikmJgZTp05FZGQkTE1N4ezsjM2bN6NVq1aIiIjA5cuXsX37diQnJ8PGxgadOnXCd999J2yvq6sLX19feHt7Y+DAgTA0NETv3r0xfvx4LR4VERERlVVM+hERERERERERUZm0YMGCXN+zsbHB77///t592NnZYePGjZoMi4iIiKhQOKcfERERUSnm4+MDZ2dn0X9dunQR3k9LS8Ps2bPRrFkzuLu7Y9y4cYiOjhbt49WrVxg1ahRcXV3RokULLF68GJmZmcV9KERERERERERE9AHY04+IiIiolKtVqxa2bt0qvNbV1RX+XrBgAQIDA7Fy5UqYmppi7ty58PLywq5duwAAMpkMo0ePhpWVFXbt2oXIyEhMnToVUqkUEydOLPZjISIiIiIiIiKiwmFPPyIiIqJSTldXF9bW1sJ/FSpUAAAkJCRg3759mDZtGlq0aIH69etjwYIFCAoKQnBwMADgwoULCAsLwy+//II6derA09MTEyZMgJ+fH9LT07V4VEREREREREREVBBM+hERERGVcv/++y88PDzQoUMHTJo0Ca9evQIA3LlzBxkZGWjZsqWwbs2aNWFraysk/YKDg+Hk5AQrKythHQ8PDyQmJiIsLKxYj4OIiIiIiIiIiAqPw3sSERERlWIuLi5YuHAhqlevjqioKKxZswaDBw9GQEAAoqOjIZVKYWZmJtrG0tISUVFRAIDo6GhRwg+A8FqxTkHI5XLI5fJCHk0e+4UckCP7v7JOnuP/ZZ08u3wURbkreCgspwKWUzGW05KJ5VSsiMtpSSj/RERERB87Jv2IiIiISjFPT0/h79q1a8PV1RXt2rXDsWPHUK5cuWKPJz4+Hjo6mh1MQkdHBzKZDJmZmcjIzNDovkul/28zzczMBCTaDaUkyMzMhEwmQ0JCArKysrQWB8tpDiynIiynJRTLqUhRl1Ntln0iIiKisoJJPyIiIqKPiJmZGRwcHPD8+XO0bNkSGRkZiI+PF/X2i4mJgbW1NYDsXn0hISGifURHRwOAsE5BP19XV/cDjkC9+IR46OnpQaon1fi+SxtFTwk9PT1IJGyl1tPTg66uLkxNTbUdCsupEpZTMZbTkonlVKyoy6lMJiuS/RIRERHRf5j0IyIiIvqIJCUl4cWLF7C2tkb9+vUhlUpx+fJldO7cGQDw5MkTvHr1Cm5ubgAANzc3+Pr6IiYmBpaWlgCAS5cuwcTEBI6OjgX+fIlEUiQNpxJIsnthsE32Pzwf2STZ5aMkNNiznKrB85GN5bRk4/nIVsTltCSUfyIiIqKPHZN+RERERKXY4sWL0a5dO9ja2iIyMhI+Pj7Q0dFBjx49YGpqir59+2LRokUwNzeHiYkJ5s2bB3d3dyHp5+HhAUdHR/z444+YMmUKoqKisHLlSgwePBj6+vraPTgiIiIiIiIiIso3Jv2IiIiISrHXr19j4sSJiI2NRYUKFdCoUSPs3r0bFSpUAADMmDEDOjo6GD9+PNLT0+Hh4YFZs2YJ2+vq6sLX1xfe3t4YOHAgDA0N0bt3b4wfP15bh0RERERERERERIXApB8RERFRKbZixYo83zcwMMCsWbNEib6c7OzssHHjRk2HRkRERERERERExUhH2wEQERERERERERERERER0Ydh0o+IiIiIiIiIiIiIiIiolGPSj4iIiIiIiIiIiIiIiKiUY9KPiIiIiIiIiIiIiIiIqJRj0o+IiIiIiIiIiIiIiIiolGPSj4iIiIiIiIiIiIiIiKiUY9KPiIiIiIiIiIiIiIiIqJRj0o+IiIiIiIiIiIiIiIiolGPSj4iIiIiIiIiIiIiIiKiUY9KPiIiIiIiIiIiIiIiIqJRj0o+IiIiIiIiIiIiIiIiolGPSj4iIiIiIiIiIiIiIiKiUY9KPiIiIiIiIiIiIiIiIqJRj0o+IiIiIiIiIiIiIiIiolGPSj4iIiIiIiIiIiIiIiKiUY9KPiIiIiIiIiIiIiIiIqJRj0o+IiIiIiIiIiIiIiIiolGPSj4iIiIiIiIiIiIiIiKiUY9KPiIiIiIiIiIiIiIiIqJRj0o+IiIiIiIiIiIiIiIiolGPSj4iIiIiIiIiIiIiIiKiUY9KPiIiIiIiIiIiIiIiIqJRj0o+IiIiIiIiIiIiIiIiolGPSj4iIiIiIiIiIiIiIiKiUY9KPiIiIiIiIiIiIiIiIqJTT03YARERERGWFTCZDYGAgLl26hFu3biEqKgqpqamwsLBA9erV0bhxY3Tq1AlVqlTRdqhERERERERERFTKsKcfERERURFLSkrC6tWr0aZNG4wfPx43b96Eo6MjunbtioEDB6Jly5bIysrCli1b0LlzZwwbNgw3b97UdthEREREH72dO3eiZ8+eaNiwIRo2bIiBAwciMDBQeD8tLQ2zZ89Gs2bN4O7ujnHjxiE6Olq0j1evXmHUqFFwdXVFixYtsHjxYmRmZhb3oRARERGxpx8RERFRUevQoQNq1aqFKVOmoGPHjjAxMcl13dDQUBw5cgTffvstJkyYgMGDBxdjpERERERlS+XKlTF58mRUq1YNcrkcBw8exNixY3HgwAHUqlULCxYsQGBgIFauXAlTU1PMnTsXXl5e2LVrF4DskRxGjx4NKysr7Nq1C5GRkZg6dSqkUikmTpyo5aMjIiKisoZJPyIiIqIitnbtWjRs2DBf69arVw/16tXD2LFjERERUcSREREREZVt7du3F73+4Ycf4O/vj+DgYFSuXBn79u3D0qVL0aJFCwDAggUL0K1bNwQHB8PNzQ0XLlxAWFgYtm7dCisrK9SpUwcTJkzA0qVL4eXlBX19fW0cFhEREZVRHN6TiIiIqIjlN+GnzMTEBLVq1SqCaIiIiIhIHZlMhj///BPJyclwd3fHnTt3kJGRgZYtWwrr1KxZE7a2tggODgYABAcHw8nJCVZWVsI6Hh4eSExMRFhYWHEfAhEREZVx7OlHREREVIwSExORnp6OChUqCMsOHz6Mx48fo0WLFmjevLkWoyMiIiIqex48eIDPP/8caWlpMDIywpo1a+Do6Ih79+5BKpXCzMxMtL6lpSWioqIAANHR0aKEHwDhtWKdgpDL5ZDL5YU8kjz3XAT7LJ0Up1cuByDRaigliPy/E6NVJSGGkoHlVB2W05KG5VSdoiun+a0fMOlHREREVIymTJmCihUrYvbs2QCA1atXY/Xq1TA3N8fGjRuxdOlSdOvWTctREhEREZUd1atXx8GDB5GQkIATJ05g6tSp+P3337USS3x8PHR0NDswl46ODowkMshlmUBmhkb3XRop2kwzMzMhYSM1IMuERCZDckICsrKytBYGy6kYy2kOLKclEstpDkVcTvO7Tyb9iIiIiIrR7du3MWvWLADZT2nt3LkTo0ePxg8//ICFCxdi8+bNTPoRERERFSN9fX1Uq1YNAFC/fn3cvn0b27dvR9euXZGRkYH4+HhRb7+YmBhYW1sDyO7VFxISItpfdHQ0AAjrFISZmRl0dXULeyi5S4kHdPUAPanm913KKHpK6OnpQcJW6uxyoasLU0NTbUfCcqqE5TQHltMSieU0hyIupzKZLF/rMelHREREVIzi4uJQvnx5AMCdO3fw7t079OvXDwDQvn177NmzR5vhEREREZV5WVlZSE9PR/369SGVSnH58mV07twZAPDkyRO8evUKbm5uAAA3Nzf4+voiJiYGlpaWAIBLly7BxMQEjo6OBf5siURSRA2nbIwV/P+pkEh4Vv4jQcnoplMSYighWE7VYDktcVhO1Si6cprf+gGTfkRERETFyMrKCmFhYWjcuDECAwNhZ2eHKlWqAABSUlKgp8fqGREREVFxWbZsGdq0aQMbGxskJSXhyJEjuHbtGjZv3gxTU1P07dsXixYtgrm5OUxMTDBv3jy4u7sLST8PDw84Ojrixx9/xJQpUxAVFYWVK1di8ODB0NfX1+7BERERUZnDViUiIiKiYtSlSxf88ssvuHTpEs6dO4dvvvlGeO/u3bvC0FJEREREVPRiYmIwdepUREZGwtTUFM7Ozti8eTNatWoFAJgxYwZ0dHQwfvx4pKenw8PDQxiqHQB0dXXh6+sLb29vDBw4EIaGhujduzfGjx+vrUMiIiKiMoxJPyIiIqJiNGnSJBgbG+POnTsYPnw4Ro8eLbwXGhqKrl27ajE6IiIiorJlwYIFeb5vYGCAWbNmiRJ9OdnZ2WHjxo2aDo2IiIiowJj0IyIiIipiN2/eRMOGDSGRSKCnpwcvLy+1661Zs6aYIyMiIiIiIiIioo8Fk35ERERERWzYsGEwNjZG69at0a5dO7Rp0wYmJibaDouIiIiIiIiIiD4iOtoOgIiIiOhjd+XKFcyePRt6enqYP38+mjdvjqFDh2Lbtm149uyZtsMjIiIiIiIiIqKPAHv6ERERERUxY2NjdO7cGZ07d4ZcLsetW7dw9uxZHDx4EIsWLUK1atXQvn17tGvXDo0aNYKurq62QyYiIiIiIiIiolKGST8iIiKiYiSRSODm5gY3Nzd8//33eP36NU6fPo3AwEDs3LkTBgYG8PDwwPLly7UdKhERERERERERlSJM+hERERFpUeXKlfHFF1/giy++QGpqKi5evIjAwEBth0VERERERERERKUMk35EREREWnbz5k08efIEjRs3RocOHdChQwdth0RERERERERERKUMk35ERERExWjSpEnQ19fHwoULAQD+/v6YPXs2AEBfXx/r169HixYttBkiERERERERERGVQjraDiA/3rx5g8mTJ6NZs2ZwcXFBz549cfv2beF9uVyOVatWwcPDAy4uLhg2bBiePXumvYCJiIiIcnHz5k20bt1aeL1hwwb0798fN2/eROfOnbF69WotRkdERERERERERKVViU/6xcXFYdCgQZBKpdi4cSP+/PNPTJ06Febm5sI6GzduxI4dO+Dt7Y3du3fD0NAQI0aMQFpamhYjJyIiIlL19u1bVKxYEQDw6NEjREREYOjQoTA2Nkbv3r3x8OFDLUdIRERERERERESlUYkf3nPjxo2oXLmyMAQWAFSpUkX4Wy6XY/v27fj222/RsWNHAMCSJUvQsmVLnDx5Et27dy/2mImIiIhyY2FhgfDwcDRu3Bjnz5+HtbU1atWqBQCQyWTIysrScoRERERERERERFQalfiefqdPn0b9+vUxfvx4tGjRAr169cLu3buF91++fImoqCi0bNlSWGZqagpXV1cEBQVpI2QiIiKiXLVp0wZLly7F4sWLsWHDBnTr1k1479GjR7C3t9didEREREREREREVFqV+J5+L168gL+/P77++muMGTMGt2/fxrx58yCVStG7d29ERUUBACwtLUXbWVpaIjo6usCfJ5fLIZfLNRK7aL8a3yN9TOQAUATlrqC0HwGVZCynVBoUZTnVVP1g6tSpkMlkuHDhAjw9PTFu3Djhvb///ls03x8REREREREREVF+lfikn1wuR/369TFx4kQAQN26dfHo0SPs2rULvXv31vjnxcfHQ0dHsx0gdXR0YCjLQmZmJuQZGRrdd+mU3WiamZkBQKLdUEoASWYmJDIZUhIStDqkG8tpTiynylhOSyqWU2VFXU41tU9TU1PRsOXK/P39NfIZRERERERERERU9pT4pJ+1tTVq1qwpWlajRg2cOHFCeB8AYmJiULFiRWGdmJgY1K5du8CfZ2ZmBl1d3Q+IWD15XCLkenqAVKrxfZc22T0lUqCnJ4VEwkZq6OlBV1cXpqYm2o6E5VQJy2kOLKclEstpDkVcTmUymUb3FxcXh0ePHiEiIgJt2rSBubk50tLSIJVKNf4AEhERERERERERffxKfNKvYcOGePr0qWjZs2fPYGdnBwCwt7eHtbU1Ll++jDp16gAAEhMTcevWLQwaNKjAnyeRSNhwSsVOArDcUYnHckqlQVGWU03tVy6XY8WKFdixYwdSUlIgkUiwd+9emJubw8vLC66urvDy8tLIZxERERERERERUdlR4h8j/+qrr3Dr1i34+vri33//RUBAAHbv3o0vvvgCQHYD3NChQ7Fu3TqcOnUKDx48wI8//oiKFSuiY8eOWo6eiIiISGzlypX4/fffMXXqVJw4cUI0V2D79u1x+vRpLUZHRERERERERESlVYnv6efi4oLVq1dj+fLlWLNmDezt7TFjxgx8+umnwjojR45ESkoKZs6cifj4eDRq1AibNm2CgYGBFiMnIiIiUnXgwAFMnDgRn3/+ucqQoVWrVsWLFy+0FBkREREREREREZVmJT7pBwDt2rVDu3btcn1fIpFgwoQJmDBhQjFGRURERFRwsbGxKvMVK8hkMmRmZhZ63xs2bMCyZcswdOhQ/PTTTwCAtLQ0LFq0CEePHkV6ejo8PDwwa9YsWFlZCdu9evUK3t7euHr1KoyMjNCrVy9MmjQJenqloqpIREREREREREQoBcN7EhEREX1MHBwccPHiRbXvXbt2DbVq1SrUfkNCQrBr1y44OzuLli9YsABnzpzBypUrsWPHDkRGRormDJTJZBg9ejQyMjKwa9cuLFq0CAcOHMCvv/5aqDiIiIiIiIiIiEg7mPQjIiIiKkbDhg3D1q1bsXLlSjx69AgA8Pr1a/j5+WHHjh0YNmxYgfeZlJSEKVOmYN68eTA3NxeWJyQkYN++fZg2bRpatGiB+vXrY8GCBQgKCkJwcDAA4MKFCwgLC8Mvv/yCOnXqwNPTExMmTICfnx/S09M1cchERERERERERFQMOGYTERERUTHq06cP4uLi4OPjg/Xr1wMAxo4dC0NDQ3z//ffo1q1bgfc5Z84ceHp6omXLlli3bp2w/M6dO8jIyEDLli2FZTVr1oStrS2Cg4Ph5uaG4OBgODk5iYb79PDwgLe3N8LCwlC3bt0CxSKXyyGXywt8DO/dL+SAHNn/lXXyHP8v6+TZ5aMoyl3BQ2E5FbCcirGclkwsp2JFXE5LQvknIiIi+tgx6UdERERUzL7++msMGDAAQUFBePfuHczNzeHu7g5TU9MC7+vPP//E3bt3sXfvXpX3oqOjIZVKYWZmJlpuaWmJqKgoYR3lhB8A4bVinYKIj4+Hjo5mB5PQ0dER5jvMyMzQ6L5Lpf9vM83MzAQk2g2lJMjMzIRMJkNCQgKysrK0FgfLaQ4spyIspyUUy6lIUZdTbZZ9IiIiorKCST8iIiIiLTA2NoaHh8cH7SMiIgLz58/Hli1bYGBgoKHIPoyZmRl0dXU1vt/4hHjo6elBqifV+L5LG0VPCT09PUgkbKXW09ODrq5uoZLmmsZy+h+WUzGW05KJ5VSsqMupTCYrkv0SERER0X+Y9CMiIiIqYn/99VeB1u/UqVO+1gsNDUVMTAz69OkjLJPJZLh+/Tr8/PywefNmZGRkID4+XtTbLyYmBtbW1gCye/WFhISI9hsdHQ0AwjoFIZFIiqThVAJJdi8Mtsn+h+cjmyS7fJSEBnuWUzV4PrKxnJZsPB/ZiricloTyT0RERPSxY9KPiIiIqIiNHz8+3+tKJBLcu3cvX+s2b94cAQEBomXTp09HjRo1MHLkSNjY2EAqleLy5cvo3LkzAODJkyd49eoV3NzcAABubm7w9fVFTEwMLC0tAQCXLl2CiYkJHB0d8x03ERERERERERFpF5N+REREREXs1KlTRbJfExMTODk5iZYZGRnBwsJCWN63b18sWrQI5ubmMDExwbx58+Du7i4k/Tw8PODo6Igff/wRU6ZMQVRUFFauXInBgwdDX1+/SOImIiIiIiIiIiLNY9KPiIiIqIjZ2dlp7bNnzJgBHR0djB8/Hunp6fDw8MCsWbOE93V1deHr6wtvb28MHDgQhoaG6N27d4F6JxIRERERERERkfYx6UdERESkBRcvXsStW7cQFRUFa2truLq6olWrVh+83x07doheGxgYYNasWaJEX052dnbYuHHjB382ERERERERERFpT5Ek/WJjY3Ht2jWhISs1NRUWFhaoUaMGGjVqhAYNGhTFxxIRERGVeFFRURg3bhyCg4Nhbm4OS0tLxMTEIC4uDm5ubvDx8YG1tbW2wyQiIiIiIiIiolJGo0m/a9euYfv27Th79ixkMhlsbGxQvnx56Ovr48mTJzhy5AiSk5NhZ2eHfv36YciQITAxMdFkCEREREQl2qxZs/Dy5Uts27YNzZs3F5ZfvnwZU6ZMgbe3N9asWaPFCImIiIiIiIiIqDTSWNJv+PDhCAkJQadOnbB27Vq4u7vD1NRUtI5cLseTJ09w7tw5/Pnnn9i2bRuWLFkCT09PTYVBREREVKJdunQJ3t7eooQfALRo0QKTJ0+Gt7e3dgIjIiIiIqL/Y+/e43Ou/z+OP69r12YO27DNYSjlMHLaHLOmOUWhcqgoOeWbVA4lknIaQhlRyDG+JFJEUYlCOeUQia/IKWe2YZuNHa/fH/vt0y4bbezadV087rdbt++uz+d9vT+vz8fra2/v1+fz/gAA4NLyrOhXv359TZkyJUuhLzOTyaQKFSqoQoUK6tGjh3bu3KkrV67kVQgAAABOz9vbWz4+Pjfcd7OxFAAAAAAAAHAjeVb06927d66/U7du3bw6PAAAgEvo1q2bZs2apfr166tw4cLG9itXrmj27Nnq2rWrA6MDAAAAAACAq8rTd/rdzKlTp3TixAk98MADKlq0aH4dFgAAwKmcOXNGp0+fVlhYmBo0aCBfX19FR0fr119/VeHChXX+/HmNGTPGaD906FAHRgsAAOCcjhw5ogsXLigxMVFFixZV+fLlmW8CAAB3PbsU/caPH6/U1FS98847kqS1a9fq9ddfV0pKinx8fDR37lxVr17dHocGAABwauvXr5fFYpG3t7cOHDhgbPf29pYk/fTTT8Y2k8lE0Q8AAEBSWlqa1q9frxUrVmjbtm26cuWKrFarsT/jlTKPPvqo2rVrpzJlyjgwWgAAAMewS9Fv7dq16tevn/F50qRJCgsLU//+/fX+++9r8uTJmjNnjj0ODQAA4NQyF/UAAADw71atWqUPP/xQkZGRaty4sfr27asqVaqoWLFi8vDwUGxsrE6fPq19+/bphx9+0IwZM9S2bVv17dtXJUuWdHT4AAAA+cYuRb/IyEgFBARIkk6cOKFjx45pwoQJqly5srp06aLBgwfb47AAAAAAAAC4w0ydOlW9e/dWq1at5OnpmW2bGjVq6NFHH9XAgQP1119/ad68eVq5cqV69eqVz9ECAAA4jl2Kfl5eXoqOjpYkbd68WT4+PsZynh4eHkpMTLTHYQEAAFzC1atXtXXrVp09e1ZJSUk2+0wmk7p37+6YwAAAAJzQd999J5PJlOP2lSpV0tixY22W/7yRmTNn6ocfftDRo0fl6emp4OBgDRw4UPfff7/RpkuXLtq+fbvN9zp27KhRo0YZn8+cOaORI0fq119/VaFChdS2bVu98cYbsljsMvUGAACQLbuMPOrWrasPP/xQ0dHRmjt3rpo3b27sO3r0qEqXLm2PwwIAADi97du3q2/fvoqJicl2P0U/AAAAW7kp+OX2e9u3b1fnzp1Vo0YNpaamatKkSerZs6dWr16tQoUKGe2eeeYZm1fZFCxY0Pg5NTVVL730kvz8/LRkyRJduHBBgwcPlru7uwYMGHBLsQMAANwKuxT93n77bQ0aNEgRERGqVq2aXn/9dWPf119/rbp169rjsAAAAE5v1KhRCgwM1LBhw1S+fHm5u7s7OiQAAACntn///hy1M5lM8vDwUKlSpVSkSJEcfWfu3Lk2n8ePH6+GDRtq//79qlevnrHd09NT/v7+2faxadMmHT58WPPmzZOfn5+qVq2q/v37KyIiQn369JGHh0eOYgEAALhddin6lSxZUgsWLMh239y5cxnsAACAu9bp06f19ttvq1KlSo4OBQAAwCV06NAhV0/7mUwmPfTQQ3r//fdVrFixXB0rLi5OkuTj42Oz/ZtvvtHXX38tf39/NWnSRK+88orxtN+ePXtUuXJl+fn5Ge1DQ0M1cuRIHT58WA888ECOj2+1WnO0LGnu2aNP15Rxea1WSbf2EOkdyPrPhXEoZ4jBOZCn2SFPnQ15mh375WlOxwf5vrB4Tu+0AgAAuBPVrl1bx44dU0hIiKNDAQAAcAk3urE8O9euXdPx48f1ySefaPTo0Zo0aVKOv5uWlqaxY8eqdu3aqly5srG9TZs2CggIUIkSJXTw4EFFRETo2LFjmjp1qiQpKirKpuAnyfgcGRmZ4+NLUmxsrMxmc66+82/MZrMKmVJlTU2RUpLztG9XlDFnmpKSoltcOfbOkpoiU2qqEuLilJaW5rAwyFNb5Ol1yFOnRJ5ex855mtM+7Vb0W716tb7//nudPXtWiYmJNvtMJpO+/vprex0aAADAaY0aNUr9+/eXu7u7GjZsKC8vryxtihYtmv+BAQAAOKn69evnqv3DDz8sPz8/jRkzJlffCw8P119//aXPPvvMZnvHjh2NnwMDA+Xv76/u3bvrxIkTuueee3J1jH/j7e0tNze3PO1TknQ1VnKzSBaWls94UsJisdzy+yLvKG4Wyc1NXgWz/rsk35GnBvL0OuSpUyJPr2PnPE1NTc1RO7sU/SZNmqRZs2apWrVqKl++PMt5AgAA/D9vb28FBARo+PDhNxwUHzhwIJ+jAgAAuLMEBQWpTZs2OW4/atQobdiwQZ9++qlKlSp107a1atWSJP3999+655575Ofnp71799q0iYqKkqQbvgfwRkwmk50mTpmMNfz/pTCZuCr/MMk5HtNxhhicBHmaDfLU6ZCn2bBfnuZ0fGCXot+yZcvUr18/vfLKK/boHgAAwGUNGjRIv/32m3r06KH77rtP7u7cHQgAAJBTycnJmjdv3g1Xl5Kk3377TQEBAXr77bf/tT+r1arRo0dr7dq1WrhwocqVK/ev38m4QSujoBcUFKQZM2YoOjpavr6+kqQtW7aoSJEiqlixYm5ODwAA4LbYbXnPjLueAAAA8I9t27YpPDxcTz75pKNDAQAAcDnh4eFasWKFmjZtqkaNGt32DVTh4eFatWqVpk+frsKFCxvv4PPy8pKnp6dOnDihb775RmFhYSpatKgOHjyocePGqV69eqpSpYokKTQ0VBUrVtSbb76pQYMGKTIyUpMnT1bnzp1Z/QoAAOQruxT9nnrqKa1atUoPPfSQPboHAABwWSVLlsz2PX4AAAD4d2vXrtWQIUPUuXPnPOlv8eLFkqQuXbrYbB83bpzat28vd3d3bd26VQsWLFBCQoJKly6tFi1a2Kxu5ebmphkzZmjkyJHq2LGjChYsqHbt2qlfv355EiMAAEBO2aXo99prr+ndd99Vp06d1LBhQ3l7e9vsN5lM6t69uz0ODQAA4NT69eunWbNmqU6dOvLx8XF0OAAAAC6lUKFCOVqCM6cOHjx40/2lS5fWp59++q/9lClTRrNnz86rsAAAAG6JXYp+27Zt01dffaX4+Hjt2bMny36KfgAA4G71zTff6MyZM2rSpImqVq2a5ak/k8mkjz/+2EHRAQAAOLcePXros88+00MPPSQ3NzdHhwMAAOBU7FL0Cw8PV/Xq1TV06FCVL1/+ttdXBwAAuFPEx8fr3nvvtfkMAACAnOnatasuXLigRx55RHXr1s2yupQkDR061AGRAQAAOJ5din7nzp3TsGHDVKlSJXt0DwAA4LIWLlzo6BAAAABc1qpVq/TJJ5/IZDJp69atWW40N5lMFP0AAMBdyy5Fvzp16ujYsWN66KGH7NE9AAAAAAAA7kITJ05Uy5YtNXr0aBUpUsTR4QAAADgVuxT9Xn/9db311ltyd3dXSEhIlnfVSFLRokXtcWgAAACnl5aWpm3btunYsWNKSkrKsr9Hjx4OiAoAAMD5Xb58Wc888wwFPwAAgGzYpej31FNPSZJGjBghk8mUbZsDBw7Y49AAAABOLTIyUl26dNHx48dlMplktVolyWbMRNEPAAAge2FhYdq9e7caNmzo6FAAAACcjl2KfmPHjr1hsQ8AAOBuNn78eBUtWlQbN25UWFiYli5dKj8/P3399ddasWKFZs2a5egQAQAAnNZTTz2l8PBwXbt2TQ0bNpS3t3eWNtWqVXNAZAAAAI5nl6Jf+/bt7dEtAACAy9uxY4eGDh0qf39/Y1tAQIB69+4tq9WqUaNGac6cOQ6MEAAAwHn95z//kSTNmjVLs2bNsrnp3Gq1ymQysboUAAC4a9ml6AcAAIDsxcXFqXjx4jKbzSpSpIiio6ONfUFBQTzpBwAAcBMLFixwdAgAAABOK8+Kfk8//bReeuklNW3aVGaz+V/bnz17VgsWLFCJEiV4bw0AALhrlC1bVhcuXJAkVaxYUStXrlSTJk0kSevWrVPRokUdGB0AAIBzq1+/vqNDAAAAcFp5VvRr27atRo4cqWHDhqlZs2aqXbu2AgMDVbx4cXl4eCg2NlanTp3S/v379fPPP+v3339X06ZN9eyzz+ZVCAAAAE6vcePG2rx5s1q1aqWXX35Zr776qho2bCiLxaKoqCgNHDjQ0SECAAA4lZSUFFksuZ/CutXvAQAAuKo8G/l07txZHTp00OrVq7VixQqtWLFCqampNm2sVqv8/f3VsmVLjRgxQoGBgXl1eAAAAJfwxhtvGD+HhYVp8eLFWrt2rRITExUSEqKwsDAHRgcAAOB8mjVrph49eujJJ59UsWLF/rX9zp07tWDBAlWpUkWvvPJKPkQIAADgHPL0didPT0916NBBHTp0UGJiog4cOKDIyEglJibKx8dH9913n8qWLZuXhwQAAHBpNWrUUI0aNRwdBgAAgNMKDw/X5MmTFRERoXr16t10dalNmzbp4sWLevbZZ9WpUydHhw4AAJCv7LbGQYECBRQUFGSv7gEAAFzS6dOndeXKFWPFg6SkJM2dO1dHjhxRSEiI2rdv7+AIAQAAnEvjxo3VuHFjbdu2TStXrtSXX36p8+fPS5JMJpOsVqvc3d1VrVo1devWTU888YSKFy/u4KgBAADyHwubAwAA5KNhw4apSpUqevPNNyVJEyZM0OLFi1W5cmV9//33unr1qjp37uzgKAEAAJzPgw8+qAcffFCSFBkZabO6VNmyZeXh4eHgCAEAABzL7OgAAAAA7iYHDhxQ3bp1JUkpKSlasWKFBg4cqOXLl6tPnz5asmSJgyMEAABwfv7+/nrggQcUHBys+++/n4IfAACAKPoBAADkq/j4eHl5eUmSfv/9d125ckWtWrWSJNWpU0cnT550ZHgAAAAAAABwURT9AAAA8lGpUqW0Z88eSdLatWtVsWJFlShRQpIUExMjT09PB0YHAAAAAAAAV2WXd/pZrVaZTCZ7dA0AAODSnnrqKU2ZMkXff/+9Dhw4oCFDhhj7fv/9d1WoUMGB0QEAAAAAAMBV2aXoFxYWpnbt2qlDhw6655577HEIAAAAl9SrVy+VKFFCf/zxh5577jm1b9/e2BcTE6Onn37agdEBAAAAAADAVdml6Pf4449r+fLlmjVrlurWrasOHTro0UcfZbkqAAAASW3btlXbtm2zbB81alT+BwMAAOBCYmNj5e3t7egwAAAAnJJd3uk3aNAgbdy4UdOmTVPRokU1dOhQhYaGavjw4dq7d689DgkAAOC0Ll26dEvfu3z5ct4GAgAA4OJCQ0M1cOBAbd261dGhAAAAOB27FP0kyWw2q2nTpvroo4/0888/q0+fPvrtt9/UsWNHPf7445o/f75iY2PtdXgAAACn0axZM7377rv6888//7VtQkKCVq5cqQ4dOmjx4sX5EB0AAIDreOutt3T8+HH16NFDzZo107Rp03T27FlHhwUAAOAU7LK85/WioqJ09uxZRUdHy93dXSVKlNCHH36oqVOn6r333lOzZs3yIwwAAACHWLx4saZMmaJ27drpnnvuUXBwsAIDA1WsWDF5eHgoLi5Op06d0v79+/Xbb7/Jy8tLL774ojp16uTo0AEAAJzKc889p+eee06HDh3SsmXLtGjRIk2bNk0NGzbUU089pebNm8vd3d3RYQIAADiE3Yp+V65c0apVq7Rs2TLt27dPFStW1Msvv6wnn3xSPj4+unLlikaPHq13332Xoh8AALijBQYGavr06Tp58qRWrFihrVu36ttvv1VSUpLRJiAgQMHBwZowYYKaNGkiiyVf7s0CAABwSZUrV9aQIUM0aNAgrV+/XnPnztWAAQPk7e2tJ554Qp07d1b58uUdHSYAAEC+ssts0qBBg7Ru3TpJ0mOPPaZ33nlHQUFBNm2KFCmi5557TitXrrRHCAAAAE6nXLly6tu3r/r27StJiomJUWJioooWLSoPDw8HRwcAAOBa0tLS9Msvv2jlypXat2+f/Pz81KRJE/30009avHixhg8frmeeecbRYQIAAOQbuxT9jhw5osGDB6tNmzYqUqTIDdtVrFhRCxYssEcIAAAATs/Hx8fRIQAAALic48ePa9myZVqxYoUuXryoRo0aacqUKWrcuLHc3NxktVoVERGhKVOmUPQDAAB3FbsU/ZYvX56jdoULF1b9+vXtEQIAAAAAAADuMM8++6z27Nmj0qVLq1OnTnrqqadUsmRJmzYmk0ktW7bU3LlzHRQlAACAY9il6Ld161adOXNGHTp0yLJv+fLlCggI0IMPPmiPQwMAAAAAAOAO5e/vr1mzZik0NFQmk+mG7apWraoff/wxHyMDAABwPLM9Ov3ggw8UHR2d7b6LFy9q8uTJ9jgsAAAAAAAA7mAffvihGjVqdNOCnyS5u7urTJky+RQVAACAc7DLk36HDx/Wa6+9lu2+atWqacaMGfY4LAAAAAAAAO5gO3bsuOE+k8kkLy8v3XffffLw8MjHqAAAAJyDXYp+JpNJcXFx2e6LiYlRamqqPQ4LAADg9FasWKGwsDAVK1Ysy77Lly9rw4YNatu2bf4HBgAA4AK6dOli85Sf1WrN8tSfp6enOnbsqDfffFNms10WuQIAAHBKdhn51KpVS4sWLZLVarXZbrVa9dlnn6lWrVr2OCwAAIDTGzJkiE6ePJntvlOnTmnIkCH5HBEAAIDrmDdvnkqXLq0OHTpo+vTpWrp0qaZPn6527dqpdOnSmjRpkrp166ZFixZp6tSpjg4XAAAgX9nlSb++ffuqa9eueuKJJ9SuXTv5+/vrwoULWrFihY4fP66FCxfa47AAAABO7/qbojKLjY1V4cKF8zEaAAAA1/L555+rTZs2GjBggM32Jk2aaNKkSVq9erWmTp0qq9WqlStXql+/fg6KFAAAIP/ZpegXHBys+fPna8KECYqIiFBaWprMZrOCgoI0f/58BQUF2eOwAAAATmnjxo365ZdfjM+ffPKJ/Pz8bNokJiZq27Ztqlq1aq76/uyzz7R48WKdPn1aklSpUiW98sorCgsLM/odP368vv32WyUlJSk0NFQjRoywOf6ZM2c0cuRI/frrrypUqJDatm2rN954QxaLXYaKAAAAt2zjxo2aNm1atvsaNGhg3GjeoEEDzZ07Nz9DAwAAcDi7zeTUqVNHS5Ys0bVr1xQTEyNvb28VLFjQXocDAABwWsePH9dPP/0kKf3dxzt37pSHh4dNG3d3d1WqVCnLXev/plSpUho4cKDuvfdeWa1WrVixQq+++qq++uorVapUSWPHjtXGjRs1efJkeXl5afTo0erTp4+WLFkiSUpNTdVLL70kPz8/LVmyRBcuXNDgwYPl7u6e61gAAADsrXDhwvr1118VEhKSZd+vv/5qrJqQnJzMCgoAAOCuY/fbtz09PeXp6WnvwwAAADitbt26qVu3bpKkpk2bavr06apSpUqe9N20aVObz6+//roWL16sPXv2qFSpUlq2bJkiIiLUsGFDSdLYsWPVqlUr7dmzR0FBQdq0aZMOHz6sefPmyc/PT1WrVlX//v0VERGhPn36ZClOAgAAOFKnTp00bdo0Xbx4UU2aNFHx4sV18eJF/fjjj1q+fLn69OkjSfrtt9/ybLwFAADgKuxW9Nu0aZPWrFmjc+fOKTEx0WafyWTSf//7X3sdGgAAwGllPPFnD6mpqfr++++VkJCg4OBg7du3T8nJyTZ3wleoUEEBAQFG0W/Pnj2qXLmyzXKfoaGhGjlypA4fPqwHHnjAbvECAADkVp8+feTt7a3Zs2friy++kMlkktVqlZ+fn95++2116dJFkvTEE0+oY8eO/9rfzJkz9cMPP+jo0aPy9PRUcHCwBg4cqPvvv99ow3LpAADAVdhl5DFnzhxFRESoTJkyqlChgry8vOxxGAAAAJezYsWKf23Ttm3bXPV58OBBderUSYmJiSpUqJCmTZumihUr6sCBA3J3d5e3t7dNe19fX0VGRkqSoqKisrxfMONzRpvcsFqtslqtuf7ev/Yrq2RV+n93O+t1/3u3s6bnhz3yLvehkKcG8tQWeeqcyFNbds7TvOjXarUqJiZGnTp10vPPP69z584pMjJS/v7+KlWqlMxms9G2QoUKOepz+/bt6ty5s2rUqKHU1FRNmjRJPXv21OrVq1WoUCFJYrl0AADgMuxS9Pvss8/0/PPPa+jQofboHgAAwGW99dZb2W43mUzGz7kt+t13331asWKF4uLitGbNGg0ePFiffvrp7YR5y2JjY20m3PKC2WxWamqqUlJSlJySnKd9u6T/nzNNSUmRTDdvejdISUlRamqq4uLilJaW5rA4yNPrkKc2yFMnRZ7asHee5kWfGSsYTJ8+XY0bN1ZAQIACAgJuq8+5c+fafB4/frwaNmyo/fv3q169eoqLi2O5dAAA4DLsUvS7fPmymjVrZo+uAQAAXNqOHTuybIuJidGmTZu0aNEiRURE5LpPDw8P3XvvvZKk6tWr648//tCCBQv02GOPKTk5WbGxsTZP+0VHR8vf319S+lN9e/futekvKipKkow2ueHt7S03N7dcf+/fxMbFymKxyN3inud9u5qMJyUsFotNsfhuZbFY5Obm5hSri5Cn/yBPbZGnzok8tWXvPE1NTb3tPjw8PFSqVKk86etG4uLiJEk+Pj6SxHLpAADApdil6NekSRPt2rXLuAMKAAAA6bKbSPPy8jKW55wwYYLmzJlzW8dIS0tTUlKSqlevLnd3d23dulUtW7aUJB09elRnzpxRUFCQJCkoKEgzZsxQdHS0fH19JUlbtmxRkSJFVLFixVwf22Qy2WXi1CRT+lMYzMn+g+uRzpSeH84wYU+eZoPrkY48dW5cj3R2ztO86ve5557T/PnzFRoaqgIFCuRJnxnS0tI0duxY1a5dW5UrV5aUfjNUfi6Xbq+l0lnH9h8Zl9dqFf/fN1j/uTAO5QwxOAfyNDvkqbMhT7NjvzzN6fjALkW/Dh06aOTIkUpMTFRISEiWgZEkVatWzR6HBgAAcFmVKlXS5MmTc/WdiRMn6uGHH1bp0qUVHx+vVatWafv27Zo7d668vLzUoUMHjR8/Xj4+PipSpIjGjBmj4OBgo+gXGhqqihUr6s0339SgQYMUGRmpyZMnq3PnzixFBQAAnM7Zs2d17NgxNW7cWPXr15efn1+WguKtvm4mPDxcf/31lz777LO8CPWW2Gup9EKmVFlTUySW9jXmYlNSUuQE92I4XmqKTKmpSnCCJajJ03+Qp9chT50SeXodO+dpTvu0S9HvhRdekCTNnj1bs2fPthl8Wa1WmUwmHThwwB6HBgAAcElXr17V0qVLVaJEiVx9Lzo6WoMHD9aFCxfk5eWlwMBAzZ07Vw899JAk6e2335bZbFa/fv2UlJSk0NBQjRgxwvi+m5ubZsyYoZEjR6pjx44qWLCg2rVrp379+uXp+QEAAOSF9evXGzcm/fHHH1n2m0ymWyr6jRo1Shs2bNCnn36qUqVKGdv9/Pzydbl0ey2VrquxkptFYmlflva9nptFcnOTV0HHL0FNnv6DPL0OeeqUyNPr2DlPc7q8uV2KfgsWLLBHtwAAAC7v8ccfz7ItOTlZ58+f17Vr1/Tee+/lqr+xY8fedH+BAgU0YsQIm0Lf9cqUKaPZs2fn6rgAAACO8NNPP+Vpf1arVaNHj9batWu1cOFClStXzmZ/fi+Xbq+l0ll3LZP/vxQmE1flHyY5x2M6zhCDkyBPs0GeOh3yNBv2y9Ocjg/sUvSrX7++PboFAABwedWqVcsyUPPw8FCpUqXUokULVahQwUGRAQAA3H3Cw8O1atUqTZ8+XYULFzbewefl5SVPT0+WSwcAAC7FLkW/DEeOHNEff/yhc+fOqUOHDvL399fff/8tX19fFSlSxJ6HBgAAcErjx493dAgAAAAu7eLFi/rkk0+MOaepU6eqUqVK+u9//6tatWoZxbicWLx4sSSpS5cuNtvHjRun9u3bS2K5dAAA4DrsUvS7evWqhg4dqm+//VZms1lpaWlq1KiR/P39NXHiRJUtW1ZvvvmmPQ4NAADgMs6dO6cLFy6oRIkSNu+OAQAAQPb279+v7t27y8vLS/Xq1dP27duVlJQkSTp//rzmz5+vyZMn57i/gwcP/msblksHAACuwmyPTt977z1t27ZNs2fP1q5du4wXOkpSWFiYfvnlF3scFgAAwCV8/vnnaty4sZo0aaKOHTuqSZMmCgsL05IlSxwdGgAAgFMbN26cgoKCtGbNGr377rs2c061atXS77//7sDoAAAAHMsuT/qtWbNGb775pkJDQ5Wammqzr0yZMjp9+rQ9DgsAAOD0Zs6cqQ8++EBPPvmkWrZsKT8/P0VFRen7779XeHi4YmJi9NJLLzk6TAAAAKf0xx9/6KOPPpK7u3uWOafixYsrOjraQZEBAAA4nl2e9EtISJC/v3+2+65evXpbfc+aNUuBgYF69913jW2JiYkKDw9XgwYNFBwcrL59+yoqKuq2jgMAAGAPCxcuVM+ePfXee++padOmqlmzppo2bar3339f3bt318KFCx0dIgAAgNMqWLCgrly5ku2+M2fOqGjRovkbEAAAgBOxS9EvMDBQP/zwQ7b7NmzYoOrVq99Sv3v37tWSJUsUGBhos33s2LFav369Jk+erIULF+rChQvq06fPLR0DAADAnuLj4xUSEpLtvtDQUMXHx+dzRAAAAK4jNDRUH3/8sS5dumRsM5lMunbtmhYsWKCwsDAHRgcAAOBYdin6vfLKK/ryyy81aNAgbdiwQSaTSXv37tV7772nZcuW6eWXX851n/Hx8Ro0aJDGjBkjHx8fY3tcXJyWLVumt956Sw0bNlT16tU1duxY7d69W3v27MnDswIAALh9oaGh2rJlS7b7Nm/erIYNG+ZzRAAAAK5j0KBBio+PV8uWLfXaa6/JZDJp8uTJatWqlS5fvqzXXnvN0SECAAA4jF3e6de4cWNNmjRJ77//vr755htJUnh4uEqVKqWIiIhbmswaNWqUwsLCFBISoo8//tjYvm/fPiUnJ9vcMV+hQgUFBARoz549CgoKytVxrFarzUug80re94g7iVWS7JB3ueX4CODMyFO4AnvmaV6ND5566imNGDFCFy9eVLNmzeTr66vo6GitW7dO27ZtU3h4uPbv32+0r1atWp4cFwAA4E5QsmRJrVixQvPnz9eWLVt0zz336PLly3r88cfVo0cPlvcEAAB3NbsU/STp0Ucf1aOPPqpjx47p0qVL8vHxUYUKFW6pr9WrV+t///ufvvzyyyz7oqKi5O7uLm9vb5vtvr6+ioyMzPWxYmNjZTbn7QOQZrNZBVPTlJKSImtycp727ZrSJ01TUpIlmRwbihMwpaTIlJqqq3FxSktLc1gc5On1yNPMyFNnRZ5mZu88zas+X3rpJUnSV199pa+++komk8mmoNi7d29J6UVGk8mkAwcO5MlxAQAA7hTe3t7q16+f+vXr5+hQAAAAnIpdin5Tp07V008/rZIlS+q+++7TfffdZ+y7cOGCli5dmuN37p09e1bvvvuuPvnkExUoUMAe4drw9vaWm5tbnvdrjbkiq8Uiubvned+uJn1i86osFneZTExSy2KRm5ubvLyKODoS8jQT8vQ65KlTIk+vY+c8TU1NzZN+FixYkCf9AAAAAAAAAJnZpeg3bdo0PfzwwypZsmSWfRcuXNC0adNyXPTbv3+/oqOj1b59e2NbamqqduzYoUWLFmnu3LlKTk5WbGyszdN+0dHR8vf3z3XsJpOJiVPkO5NE3sHpkadwBfbM07zqt379+nnSDwAAwN3o2rVrmj59utasWaNz584pKSkpSxtWSgAAAHcruxT9bvbOm8jIyCxLcd7Mgw8+aLwXMMOQIUN0//3368UXX1Tp0qXl7u6urVu3qmXLlpKko0eP6syZM7l+nx8AAEB+io6OVmJiYpbtAQEBDogGAADA+YWHh2vVqlVq06aNKlSoIHdWAAEAADDkWdFv1apVWrVqlaT0O+Hfe+89eXl52bRJSkrSvn37VLt27Rz3W6RIEVWuXNlmW6FChVS0aFFje4cOHTR+/Hj5+PioSJEiGjNmjIKDgyn6AQAAp3Pp0iWNGTNGP/zwg1JSUmz28R4/AACAm1u/fr0GDx6s559/3tGhAAAAOJ08K/olJycrPj5eUvqE1dWrV2U2m23aeHh46Mknn9R//vOfvDqsJOntt9+W2WxWv379lJSUpNDQUI0YMSJPjwEAAJAXhg4dqh07duill17i7nQAAIBccnNzU/ny5R0dBgAAgFPKs6Jfu3bt1K5dO0lSly5dNHLkSFWoUCGvurexcOFCm88FChTQiBEjKPQBAACn9+uvv2ro0KFq27ato0MBAABwOc8++6xWrlyp0NBQR4cCAADgdOzyTr/ri3IAAABI5+3trWLFijk6DAAAAJfk6empXbt2qVOnTmrYsKG8vb1t9ptMJnXv3t0xwQEAADiYXYp+kpSWlqZt27bp2LFjSkpKstnHAAwAANytevbsqYULF+qhhx6SxWK3oRgAAMAdKSIiQpJ05swZ7dmzJ8t+5pwAAMDdzC4zTZGRkXr++ef1999/y2QyyWq1SkofeGVgAAYAAO4WY8aMsfl85MgRPfLII6pXr16Wu9Ol9Pf+AQAAIKs///zT0SEAAAA4LbsU/caPH69ixYppwYIFCgsL09KlS+Xn56evv/5aK1as0KxZs+xxWAAAAKf0008/2XzOuBFq586dWdqaTCaKfgAAAAAAAMg1uxT9duzYoaFDh8rf39/YFhAQoN69e8tqtWrUqFGaM2eOPQ4NAADgdK4v+gEAACDnZs+erbZt29rMM/3222+qWrWqChYsaGw7efKkZs2apdGjRzsiTAAAAIcz26PTuLg4FS9eXGazWUWKFFF0dLSxLygoSLt27bLHYQEAAAAAAHCHmTRpks6ePWt8Tk1NVefOnXX06FGbdhcvXtSXX36Z3+EBAAA4Dbs86Ve2bFlduHBBklSxYkWtXLlSTZo0kSStW7dORYsWtcdhAQAAnN6KFStuuM9kMsnLy0tVqlRRQEBA/gUFAADgxKxWa462AQAA3O3sUvRr3LixNm/erFatWunll1/Wq6++qoYNG8pisSgqKkoDBw60x2EBAACc3ltvvWW80y/zZFXmbSaTSc2bN9f7779vs2QVAAAAAAAAcCN2Kfq98cYbxs9hYWFavHix1q1bp2vXrikkJERhYWH2OCwAAIDT++qrr/Taa6+pbdu2atasmXx9fRUdHa21a9dq5cqVCg8P16lTpzR+/HhNnDhRQ4cOdXTIAAAAAAAAcAF2Kfpdr0aNGqpRo4Yk6ezZs/rmm2/0+OOP58ehAQAAnEpERISefvpp/ec//zG2+fr6qnLlyvLw8NCMGTP03//+V5cuXdKnn35K0Q8AAOAGMlZKAAAAQLp8KfpltnfvXr355psU/QAAwF1p165deuGFF7Ld98ADD2jatGmSpJo1a+rixYv5GRoAAIDT6tatW5YiX+fOnW228Z4/AABwt8v3oh8AAMDdrHjx4lqzZo0eeuihLPu+//57FS9eXJIUHx8vb2/v/A4PAADA6fTp08fRIQAAALgEin4AAAD5qFevXho5cqROnTqlJk2aqHjx4rp48aJ+/PFHbdu2TeHh4ZKkbdu2qWbNmg6OFgAAwPEo+gEAAOQMRT8AAIB81KlTJ/n7+2vGjBl67733lJKSIovFoqpVq2r69Olq2rSppPTJLYuFoRoAAAAAAAByhpkkAACAfNasWTM1a9ZMaWlpunjxoooXLy6z2WzTxsfHx0HRAQAAAAAAwBXlWdEvODg4ywuVs5OamppXhwQAAHBpZrNZfn5+jg4DAAAAAAAAd4A8K/q98MILOSr6AQAA3M2GDBnyr23GjRuXD5EAAAAAAADgTpJnRb++ffvmVVcAAAB3rAMHDmTZFhsbq7Nnz6pYsWIqWbKkA6ICAAAAAACAq+OdfgAAAPloxYoV2W4/cuSIBgwYoMGDB+dvQAAAAAAAALgjmB0dAAAAAKQKFSroxRdfZGlPAAAAAAAA3BKKfgAAAE7Cy8tLJ06ccHQYAAAAAAAAcEEs7wkAAJCPLl++nGVbcnKyjhw5okmTJqlSpUr5HxQAAAAAAABcHkU/AACAfPTggw/KZDJl2W61WlW6dGlNmzbNAVEBAAAAAADA1VH0AwAAyEdjx47NUvQrUKCASpYsqVq1asliYXgGAAAAAACA3GNWCQAAIB+1b9/e0SEAAADg/+3YsUNz587Vvn37FBkZqWnTpql58+bG/rfeektfffWVzXdCQ0M1d+5c4/Ply5c1evRorV+/XmazWS1atNA777yjwoUL59t5AAAASBT9AAAAHOKvv/7Srl27FBMTIx8fH9WpU4f3+QEAAOSzhIQEBQYGqkOHDurTp0+2bRo1aqRx48YZnz08PGz2Dxw4UJGRkZo3b56Sk5P19ttva/jw4Zo4caJdYwcAALgeRT8AAIB8lJSUpEGDBumHH36Q1WqVh4eHkpKSZDKZ1LJlS73//vtZJpIAAABgH2FhYQoLC7tpGw8PD/n7+2e778iRI/rll1/05ZdfqkaNGpKkoUOHqlevXnrzzTdVsmTJPI8ZAADgRsyODgAAAOBuMmnSJG3cuFHh4eHauXOn9u7dq507dyo8PFwbN27UBx984OgQAQAAkMn27dvVsGFDtWzZUiNGjNClS5eMfbt375a3t7dR8JOkkJAQmc1m7d271xHhAgCAuxhP+gEAAOSj1atXa8CAAXrmmWeMbUWKFNEzzzyjq1evas6cORo8eLADIwQAAECGRo0a6ZFHHlHZsmV18uRJTZo0SS+++KI+//xzubm5KSoqSsWLF7f5jsVikY+PjyIjI3N9PKvVKqvVmlfhZ+7ZDn26pozLa7VKMjk0FCdi/efCOJQzxOAcyNPskKfOhjzNjv3yNKfjA4p+AAAA+SgmJkb3339/tvvuv/9+xcTE5HNEAAAAuJHWrVsbPwcGBiowMFDNmzc3nv7La7GxsTKb83ZhLrPZrEKmVFlTU6SU5Dzt2xVlzJmmpKTIxCS1lJoiU2qqEuLilJaW5rAwyFNb5Ol1yFOnRJ5ex855mtM+KfoBAADko/vvv18rV65UaGholn1ff/31DQuCAAAAcLxy5cqpWLFi+vvvv9WwYUP5+fnp4sWLNm1SUlIUExNzw/cA3oy3t7fc3NzyKtx/XI2V3CySxT3v+3YxGU9KWCwWmZilTs8LNzd5FfRydCTkaSbk6XXIU6dEnl7Hznmampqao3YU/QAAAPLRK6+8ov79++v06dNq0aKF/Pz8FB0drTVr1mjPnj2aMmWKo0MEAADADZw7d06XL182CnrBwcGKjY3Vvn37VL16dUnStm3blJaWppo1a+a6f5PJZKeJUyZjDf9/KUwmrso/THKOx3ScIQYnQZ5mgzx1OuRpNuyXpzkdH1D0AwAAyEctWrTQ1KlTNW3aNL333nuyWq0ymUyqWrWqpk6dqqZNmzo6RAAAgLtGfHy8Tpw4YXw+deqUDhw4IB8fH/n4+Gjq1Klq2bKl/Pz8dPLkSU2YMEH33nuvGjVqJEmqUKGCGjVqpGHDhik8PFzJyckaPXq0WrdurZIlSzrqtAAAwF2Koh8AAEA+SUpK0oYNG1S1alUtX75cCQkJiouLk5eXlwoVKuTo8AAAAO46+/btU9euXY3P48aNkyS1a9dOI0eO1KFDh7RixQrFxcWpRIkSeuihh9S/f395eHgY34mIiNDo0aPVrVs3mc1mtWjRQkOHDs33cwEAAKDoBwAAkE88PDz0xhtvaM6cOSpXrpwKFSpEsQ8AAMCBGjRooIMHD95w/9y5c/+1j6JFi2rixIl5GRYAAMAtMTs6AAAAgLvJ/fffr7Nnzzo6DAAAAAAAANxhKPoBAADkowEDBujjjz/WH3/84ehQAAAAAAAAcAdheU8AAIB8FBERocuXL+uZZ55R0aJF5efnZ7PfZDLp66+/dlB0AAAAAAAAcFUU/QAAAPJRtWrVVL16dUeHAQAAAAAAgDsMRT8AAIB8NH78+Dztb+bMmfrhhx909OhReXp6Kjg4WAMHDtT9999vtElMTNT48eP17bffKikpSaGhoRoxYoTNU4ZnzpzRyJEj9euvv6pQoUJq27at3njjDVksDBcBAAAAAABcAe/0AwAAcGHbt29X586dtXTpUs2bN08pKSnq2bOnEhISjDZjx47V+vXrNXnyZC1cuFAXLlxQnz59jP2pqal66aWXlJycrCVLlmj8+PH66quv9OGHHzrilAAAAAAAAHALuHUbAAAgn23atElr1qzRuXPnlJiYmGX/ggULctzX3LlzbT6PHz9eDRs21P79+1WvXj3FxcVp2bJlioiIUMOGDSWlFwFbtWqlPXv2KCgoSJs2bdLhw4c1b948+fn5qWrVqurfv78iIiLUp08feXh43N4JAwAAAAAAwO4o+gEAAOSjOXPmKCIiQmXKlFGFChXk5eWVp/3HxcVJknx8fCRJ+/btU3JyskJCQow2FSpUUEBAgFH027NnjypXrmyz3GdoaKhGjhypw4cP64EHHsjx8a1Wq6xWax6dTaZ+ZZWsSv/vbme97n/vdtb0/LBH3uU+FPLUQJ7aIk+dE3lqy8556gz5DwAAcKej6AcAAJCPPvvsMz3//PMaOnRonvedlpamsWPHqnbt2qpcubIkKSoqSu7u7vL29rZp6+vrq8jISKNN5oKfJONzRpucio2NldmctyvIm81mpaamKiUlRckpyXnat0v6/znTlJQUyeTYUJxBSkqKUlNTFRcXp7S0NIfFQZ5ehzy1QZ46KfLUhr3z1JG5DwAAcLeg6AcAAJCPLl++rGbNmtml7/DwcP3111/67LPP7NJ/Tnh7e8vNzS3P+42Ni5XFYpG7xT3P+3Y1GU9KWCwWmUzMUlssFrm5ueX5U7O3gjz9B3lqizx1TuSpLXvnaWpqql36BQAAwD8o+gEAAOSjJk2aaNeuXcb79fLKqFGjtGHDBn366acqVaqUsd3Pz0/JycmKjY21edovOjpa/v7+Rpu9e/fa9BcVFSVJRpucMplMdpk4NcmU/hQGc7L/4HqkM6XnhzNM2JOn2eB6pCNPnRvXI52d89QZ8h8AAOBOR9EPAADAzvbv32/83KFDB40cOVKJiYkKCQnJsuymJFWrVi3HfVutVo0ePVpr167VwoULVa5cOZv91atXl7u7u7Zu3aqWLVtKko4ePaozZ84oKChIkhQUFKQZM2YoOjpavr6+kqQtW7aoSJEiqlixYm5PFwAAAAAAAA5A0Q8AAMDOOnToYHN3u9Vq1ezZszV79uws200mkw4cOJDjvsPDw7Vq1SpNnz5dhQsXNt7B5+XlJU9PT3l5ealDhw4aP368fHx8VKRIEY0ZM0bBwcFG0S80NFQVK1bUm2++qUGDBikyMlKTJ09W586d5eHhkTcXAQAAAAAAAHZF0Q8AAMDOFixYYLe+Fy9eLEnq0qWLzfZx48apffv2kqS3335bZrNZ/fr1U1JSkkJDQzVixAijrZubm2bMmKGRI0eqY8eOKliwoNq1a6d+/frZLW4AAAAAAADkLYp+AAAAdnbmzBmFhYWpWLFied73wYMH/7VNgQIFNGLECJtC3/XKlCmj2bNn52VoAAAAAAAAyEdmRwcAAABwpxsyZIhOnjzp6DAAAAAAAABwB6PoBwAAYGdWq9XRIQAAAAAAAOAOR9EPAAAAAAAAAAAAcHG80w8AACAfrFq1Srt27frXdiaTSd27d7d/QAAAAAAAALijUPQDAADIBwsWLMhRO4p+AAAAAAAAuBUU/QAAAPLB0qVLVbNmTUeHAQAAAAAAgDsU7/QDAAAAAAAAAAAAXBxFPwAAAAAAAAAAAMDFUfQDAAAAAAAAAAAAXBzv9AMAALCzP//809EhAAAAAAAA4A7Hk34AAAAAAAAAAACAi6PoBwAAAAAAAAAAALg4in4AAAAAAAAAAACAi6PoBwAAAAAAAAAAALg4in4AAAAAAAAAAACAi6PoBwAAAAAAAAAAALg4in4AAAAAAAAAAACAi6PoBwAAAAAAAAAAALg4in4AAAAAAAAAAACAi6PoBwAAAAAAAAAAALg4in4AAAAAAAAAAACAi6PoBwAAAAAAAAAAALg4in4AAAAAAAAAAACAi6PoBwAAAAAAgLvSjh071Lt3b4WGhiowMFDr1q2z2W+1WjVlyhSFhoaqZs2a6t69u44fP27T5vLly3rjjTdUu3Zt1a1bV2+//bbi4+Pz8SwAAADSUfQDAAAAAADAXSkhIUGBgYEaMWJEtvtnz56thQsXauTIkVq6dKkKFiyonj17KjEx0WgzcOBAHT58WPPmzdOMGTO0c+dODR8+PL9OAQAAwEDRDwAAAAAAAHelsLAwvf7663rkkUey7LNarVqwYIFefvllNW/eXFWqVNH777+vCxcuGE8EHjlyRL/88ovGjBmjWrVqqW7duho6dKhWr16t8+fP5/fpAACAu5zF0QEAAAAAAAAAzubUqVOKjIxUSEiIsc3Ly0u1atXS7t271bp1a+3evVve3t6qUaOG0SYkJERms1l79+7Ntph4M1arVVarNc/OIVPPdujTNWVcXqtVksmhoTgR6z8XxqGcIQbnQJ5mhzx1NuRpduyXpzkdH1D0AwAAAAAAAK4TGRkpSfL19bXZ7uvrq6ioKElSVFSUihcvbrPfYrHIx8fH+H5uxMbGymzO24W5zGazCplSZU1NkVKS87RvV5QxZ5qSkiITk9RSaopMqalKiItTWlqaw8IgT22Rp9chT50SeXodO+dpTvuk6AcAAAAAAAA4AW9vb7m5ueV9x1djJTeLZHHP+75dTMaTEhaLRSZmqdPzws1NXgW9HB0JeZoJeXod8tQpkafXsXOepqam5qid0xf9Zs6cqR9++EFHjx6Vp6engoODNXDgQN1///1Gm8TERI0fP17ffvutkpKSFBoaqhEjRsjPz8+BkQMAAAAAAMBV+fv7S5Kio6NVokQJY3t0dLSqVKkiSfLz89PFixdtvpeSkqKYmBjj+7lhMpnsNHHKZKzh/y+FycRV+YdJzvGYjjPE4CTI02yQp06HPM2G/fI0p+ODvF0vwA62b9+uzp07a+nSpZo3b55SUlLUs2dPJSQkGG3Gjh2r9evXa/LkyVq4cKEuXLigPn36ODBqAAAAAAAAuLKyZcvK399fW7duNbZduXJFv//+u4KDgyVJwcHBio2N1b59+4w227ZtU1pammrWrJnvMQMAgLub0z/pN3fuXJvP48ePV8OGDbV//37Vq1dPcXFxWrZsmSIiItSwYUNJ6UXAVq1aac+ePQoKCnJA1AAAAAAAAHB28fHxOnHihPH51KlTOnDggHx8fBQQEKCuXbvq448/1r333quyZctqypQpKlGihJo3by5JqlChgho1aqRhw4YpPDxcycnJGj16tFq3bq2SJUs66rQAAMBdyumLfteLi4uTJPn4+EiS9u3bp+TkZIWEhBhtKlSooICAAIp+AAAAAAAAuKF9+/apa9euxudx48ZJktq1a6fx48frxRdf1NWrVzV8+HDFxsaqTp06mjNnjgoUKGB8JyIiQqNHj1a3bt1kNpvVokULDR06NN/PBQAAwKWKfmlpaRo7dqxq166typUrS5KioqLk7u4ub29vm7a+vr6KjIzM9TGsVqvxAsq8lPc94k5ilSQ75F1uOT4CODPyFK7Annlqj/EBAAAAHKtBgwY6ePDgDfebTCb1799f/fv3v2GbokWLauLEifYIDwAAIFdcqugXHh6uv/76S5999pndjhEbGyuzOW9fdWg2m1UwNU0pKSmyJifnad+uKX3SNCUlWbziUzKlpMiUmqqrcXFKS0tzWBzk6fXI08zIU2dFnmZm7zx1ZO4DAAAAAAAA/8Zlin6jRo3Shg0b9Omnn6pUqVLGdj8/PyUnJys2Ntbmab/o6Gj5+/vn+jje3t5yc3PLk5gzs8ZckdVikdzd87xvV5P+pMRVWSzuMpmYpJbFIjc3N3l5FXF0JORpJuTpdchTp0SeXsfOeZqammqXfgEAAAAAAIC84PRFP6vVqtGjR2vt2rVauHChypUrZ7O/evXqcnd319atW9WyZUtJ0tGjR3XmzJlbep+fyWRi4hT5ziSRd3B65ClcgT3zlPwHAAAAAACAM3P6ol94eLhWrVql6dOnq3DhwsZ7+ry8vOTp6SkvLy916NBB48ePl4+Pj4oUKaIxY8YoODj4lop+AAAAAAAAAAAAgKtx+qLf4sWLJUldunSx2T5u3Di1b99ekvT222/LbDarX79+SkpKUmhoqEaMGJHvsQIAAAAAAAAAAACO4PRFv4MHD/5rmwIFCmjEiBEU+gAAAAAAAAAAAHBXMjs6AAAAAAAAAAAAAAC3h6IfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAALiwHTt2qHfv3goNDVVgYKDWrVtns99qtWrKlCkKDQ1VzZo11b17dx0/ftymzeXLl/XGG2+odu3aqlu3rt5++23Fx8fn41kAAAAAAADgdlH0AwAAcGEJCQkKDAzUiBEjst0/e/ZsLVy4UCNHjtTSpUtVsGBB9ezZU4mJiUabgQMH6vDhw5o3b55mzJihnTt3avjw4fl1CgAAAAAAAMgDFP0AAABcWFhYmF5//XU98sgjWfZZrVYtWLBAL7/8spo3b64qVaro/fff14ULF4wnAo8cOaJffvlFY8aMUa1atVS3bl0NHTpUq1ev1vnz5/P7dAAAAAAAAHCLKPoBAADcoU6dOqXIyEiFhIQY27y8vFSrVi3t3r1bkrR79255e3urRo0aRpuQkBCZzWbt3bs332MGAAAAAADArbE4OgAAAADYR2RkpCTJ19fXZruvr6+ioqIkSVFRUSpevLjNfovFIh8fH+P7uWG1WmW1Wm8x4pv0K6tkVfp/dzvrdf97t7Om54c98i73oZCnBvLUFnnqnMhTW3bOU2fIfwAAgDsdRT8AAADkmdjYWJnNebuYhNlsVmpqqlJSUpSckpynfbuk/58zTUlJkUyODcUZpKSkKDU1VXFxcUpLS3NYHOTpdchTG+SpkyJPbdg7Tx2Z+wAAAHcLin4AAAB3KH9/f0lSdHS0SpQoYWyPjo5WlSpVJEl+fn66ePGizfdSUlIUExNjfD83vL295ebmdhtRZy82LlYWi0XuFvc879vVZDwpYbFYZDIxS22xWOTm5iYvLy9Hh0KeZkKe2iJPnRN5asveeZqammqXfgEAAPAPin4AAAB3qLJly8rf319bt25V1apVJUlXrlzR77//rmeffVaSFBwcrNjYWO3bt0/Vq1eXJG3btk1paWmqWbNmro9pMpnsMnFqkin9KQzmZP/B9UhnSs8PZ5iwJ0+zwfVIR546N65HOjvnqTPkPwAAwJ2Ooh8AAIALi4+P14kTJ4zPp06d0oEDB+Tj46OAgAB17dpVH3/8se69916VLVtWU6ZMUYkSJdS8eXNJUoUKFdSoUSMNGzZM4eHhSk5O1ujRo9W6dWuVLFnSUacFAAAAAACAXKLoBwAA4ML27dunrl27Gp/HjRsnSWrXrp3Gjx+vF198UVevXtXw4cMVGxurOnXqaM6cOSpQoIDxnYiICI0ePVrdunWT2WxWixYtNHTo0Hw/FwAAAAAAANw6in4AAAAurEGDBjp48OAN95tMJvXv31/9+/e/YZuiRYtq4sSJ9ggPAAAAAAAA+cTs6AAAAAAAAAAAAAAA3B6KfgAAAAAAAAAAAICLo+gHAAAAAAAAAAAAuDiKfgAAAAAAAAAAAICLo+gHAAAAAAAAAAAAuDiKfgAAAAAAAAAAAICLo+gHAAAAAAAAZOOjjz5SYGCgzX+PPvqosT8xMVHh4eFq0KCBgoOD1bdvX0VFRTkwYgAAcDezODoAAAAAAAAAwFlVqlRJ8+bNMz67ubkZP48dO1YbN27U5MmT5eXlpdGjR6tPnz5asmSJI0IFAAB3OYp+AAAAAAAAwA24ubnJ398/y/a4uDgtW7ZMERERatiwoaT0ImCrVq20Z88eBQUF5XOkAADgbsfyngAAAAAAAMAN/P333woNDVWzZs30xhtv6MyZM5Kkffv2KTk5WSEhIUbbChUqKCAgQHv27HFQtAAA4G7Gk34AAAAAAABANmrWrKlx48bpvvvuU2RkpKZNm6bOnTvrm2++UVRUlNzd3eXt7W3zHV9fX0VGRt7S8axWq6xWa16Efn3PdujTNWVcXqtVksmhoTgR6z8XxqGcIQbnQJ5mhzx1NuRpduyXpzkdH1D0AwAAAAAAALIRFhZm/FylShXVqlVLTZo00XfffSdPT888P15sbKzM5rxdmMtsNquQKVXW1BQpJTlP+3ZFGXOmKSkpMjFJLaWmyJSaqoS4OKWlpTksDPLUFnl6HfLUKZGn17Fznua0T4p+AAAAAAAAQA54e3urfPnyOnHihEJCQpScnKzY2Fibp/2io6OzfQdgTvt3c3PLq3D/cTVWcrNIFve879vFZDwpYbFYZGKWOj0v3NzkVdDL0ZGQp5mQp9chT50SeXodO+dpampqjtpR9AMAAAAAAAByID4+XidPnpS/v7+qV68ud3d3bd26VS1btpQkHT16VGfOnFFQUNAt9W8ymew0ccpkrOH/L4XJxFX5h0nO8ZiOM8TgJMjTbJCnToc8zYb98jSn4wOKfgAAAAAAAEA23nvvPTVp0kQBAQG6cOGCPvroI5nNZrVp00ZeXl7q0KGDxo8fLx8fHxUpUkRjxoxRcHDwLRf9AAAAbgdFPwAAAAAAACAb586d04ABA3T58mUVL15cderU0dKlS1W8eHFJ0ttvvy2z2ax+/fopKSlJoaGhGjFihIOjBgAAdyuKfgAAAAAAAEA2Pvjgg5vuL1CggEaMGEGhDwAAOAWzowMAAAAAAAAAAAAAcHso+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAujqIfAAAAAAAAAAAA4OIo+gEAAAAAAAAAAAAu7o4q+i1atEhNmzZVjRo19PTTT2vv3r2ODgkAAMBlMJYCAAC4dYylAACAo90xRb9vv/1W48aN06uvvqqvvvpKVapUUc+ePRUdHe3o0AAAAJweYykAAIBbx1gKAAA4gzum6Ddv3jw988wz6tChgypWrKjw8HB5enpq2bJljg4NAADA6TGWAgAAuHWMpQAAgDOwODqAvJCUlKT9+/frpZdeMraZzWaFhIRo9+7dOerDarVKklJSUoyf85I1LVVpXgVlNZnyvG9XY5VVcpNSixSRSVwPUxFPpaSlypSS4uhQyNNMyFNb5KlzIk9t2TtPU1NTJcku4wRHc4WxVGpqqkoVLCV3ued5367GarUq3hSvwoULy8TfhfIt6KvU1FSlOMHvKPL0H+SpLfLUOZGntuydp4ylbs7eYymlpkoFSkn8f19Wq1XWQvFKKcD/9yVJBXzT88MJfkeRp/8gT69Dnjol8vQ6ds7TnI6l7oii36VLl5SamipfX1+b7b6+vjp69GiO+khLS5Mk7du3L8/jM5Qskv4fJPk7OgDncuyIoyP4B3maCXlqgzx1UuSpjXzI04wxw53EVcZSbYu1lYrZrXu4sMijkYpUpKPDkESe4sbIU7iC/MhTxlLZy5d5KbVljjpDcUcH4ETSJB2KlJzkdxR5mgl5+g/y1HmRp//Ipzz9t7HUHVH0ywsWi0U1atSQ2WymKg0AALKwWq1KS0uTxcLwKTuMpQAAwM0wlro5xlIAAOBmcjqWuiNGWsWKFZObm1uWlyNHR0fLz88vR32YzWZ5eHjYIzwAAACnxlgKAADg1jGWAgAAzsLs6ADygoeHh6pVq6atW7ca29LS0rR161YFBwc7MDIAAADnx1gKAADg1jGWAgAAzuKOeNJPknr06KHBgwerevXqqlmzpv773//q6tWrat++vaNDAwAAcHqMpQAAAG4dYykAAOAM7piiX6tWrXTx4kV9+OGHioyMVNWqVTVnzpwcL6MAAABwN2MsBQAAcOsYSwEAAGdgslqtVkcHAQAAAAAAAAAAAODW3RHv9AMAAAAAAAAAAADuZhT9AAAAAAAAAAAAABdH0Q8AAAAAAAAAAABwcRT9AAAAAAAAAAAAABdncXQAcF1vvfWWvvrqqyzbf/jhB3388cfGPnd3d5UuXVpPPvmkevfuLYvFoqNHj2rEiBE6cuSI4uLiVKJECbVp00Z9+vSRu7u7JGnp0qVasWKF/vrrL0lStWrVNGDAANWsWTP/ThIu73byNDExUSNGjND+/ft15MgRNW7cWNOnT8/SV1JSkqZNm6avv/5akZGRKlGihF555RU99dRTdj8/3DneeustxcbGavr06UbevvHGG+rVq5fRZt26dXr11Vd18ODBG+Z2hjJlyuinn35SfHy8Jk6cqHXr1uny5csqW7asunTpomeffTY/TgvATTCWgitgLAVXwVgKuPswloIrYCwFV8FY6s5B0Q+3pVGjRho3bpzNtuLFi9vsS0pK0saNGzVq1Ci5u7vrpZdekru7u9q2batq1arJy8tLf/75p4YNGyar1aoBAwZIkn799Ve1bt1atWvXloeHh+bMmaMXXnhBq1evVsmSJfP9XOG6bjVPU1NTVaBAAXXp0kVr1qy5Yf/9+/dXdHS03n33Xd1zzz2KjIxUWlqaXc8Jd74CBQpo9uzZ6tixo3x8fLLsf+edd/TGG28Yn0NDQzVu3Dg1atRIkuTm5iZJGj9+vLZt26YJEyaoTJky2rx5s8LDw1WiRAk1a9Ysf04GwA0xloIrYCwFV8RYCrg7MJaCK2AsBVfEWMp1UfTDbfHw8JC/v/+/7nvuuee0bt06/fTTT3rppZdUrlw5lStXzmhbpkwZbd++XTt37jS2TZw40aa/MWPGaM2aNdq6davatm2b9yeDO9at5mmhQoUUHh4uSfrtt98UGxub5fs///yzduzYoXXr1qlo0aKSpLJly9rnRHBXCQkJ0d9//62ZM2fqzTffzLLfy8tLXl5eNtu8vb2z5Pru3bvVtm1bNWjQQJLUsWNHff7559q7dy+DK8AJMJaCK2AsBVfEWAq4OzCWgitgLAVXxFjKdfFOP+SbAgUKKDk5Odt9f//9t3755RfVq1fvht+/evWqUlJSsr2zAMgrN8vT7Pz000+qXr265syZo0aNGqlly5Z67733dO3aNTtGibuB2WzWgAED9Omnn+rcuXO33E9wcLB++uknnT9/XlarVdu2bdOxY8cUGhqah9ECyA+MpeAKGEvBWTCWAnA9xlJwBYyl4CwYS7kunvTDbdmwYYOCg4ONz40aNdKHH35o08ZqtWrr1q3atGmTnn/+eZt9nTp10v79+5WUlKSOHTuqf//+NzxWRESESpQooZCQkLw9CdzxbjdPb+bkyZPatWuXChQooGnTpunSpUsKDw/X5cuXsyzdAOTWI488oqpVq+rDDz/U2LFjb6mPYcOGadiwYXr44YdlsVhkMpk0ZsyYm/5jFkD+YSwFV8BYCq6KsRRw52MsBVfAWAquirGUa6Loh9vSoEEDjRw50vhcsGBB4+eMX2jJycmyWq1q06aN+vbta/P9Dz74QPHx8frzzz/1/vvva+7cuXrxxRezHGfWrFn69ttvtWDBAhUoUMBu54M70+3m6c1YrVaZTCZFREQYj7S/9dZb6tevn0aMGCFPT888Ow/cnQYOHKhu3bqpZ8+et/T9hQsXas+ePfr4448VEBCgnTt3Gmun849VwPEYS8EVMJaCK2MsBdzZGEvBFTCWgitjLOV6KPrhthQsWFD33ntvtvsyfqG5u7urRIkSsliyplvp0qUlSRUrVlRqaqqGDx+uF154wXjRpyTNnTtXs2bN0rx581SlShX7nAjuaLebpzfj7++vkiVL2qxhXaFCBVmtVp07d07ly5e/ndAB1atXT6GhoZo4caLat2+fq+9eu3ZNH3zwgaZOnarGjRtLkqpUqaIDBw5o7ty5DK4AJ8BYCq6AsRRcGWMp4M7GWAqugLEUXBljKdfDO/1gNxm/0AICAnL0C8tqtSolJUVpaWnGttmzZ2v69OmaM2eOatSoYc9wcZfKbZ5er3bt2rpw4YLi4+ONbceOHZPZbFapUqXyMlTcxd544w2tX79eu3fvztX3UlJSlJycLJPJZLPdzc1NVqs1L0MEYAeMpeAKGEvBFTCWAu5OjKXgChhLwRUwlnItPOkHh/j6669lsVgUGBgoDw8P/fHHH5o4caIee+wxubu7S0pfOuHDDz/UxIkTVaZMGUVGRkqSChUqpMKFCzsyfNxFDh8+rOTkZF2+fFnx8fE6cOCAJKlq1aqSpDZt2mj69OkaMmSI+vXrp0uXLmnChAnq0KEDSyggzwQGBurxxx/XwoULc/W9IkWKqH79+powYYI8PT0VEBCgHTt2aMWKFXrrrbfsFC2A/MBYCq6CsRScAWMpANdjLAVXwVgKzoCxlGuh6AeHsFgsmjNnjo4dOyZJCggI0PPPP6/u3bsbbZYsWaLk5GT169fP5rt9+vTJ1drWwO3o1auXTp8+bXxu27atJOngwYOSpMKFC+uTTz7RmDFj1KFDBxUtWlSPPfaYXnvtNQdEiztZv3799O233+b6e5MmTdKkSZM0cOBAxcTEKCAgQK+//rqeffZZO0QJIL8wloKrYCwFZ8FYCkBmjKXgKhhLwVkwlnIdJivPUQIAAAAAAAAAAAAujXf6AQAAAAAAAAAAAC6Ooh8AAAAAAAAAAADg4ij6AQAAAAAAAAAAAC6Ooh8AAAAAAAAAAADg4ij6AQAAAAAAAAAAAC6Ooh8AAAAAAAAAAADg4ij6AQAAAAAAAAAAAC6Ooh8AAAAAAAAAAADg4ij6AQAAAAAAAAAAAC6Ooh8AAAAAAAAAAADg4ij6AQAAAAAAAAAAAC6Ooh8AAAAAAAAAAADg4ij6AQAAAAAAAAAAAC6Ooh8AAAAAAAAAAADg4ij6AQAAAAAAAAAAAC6Ooh8AAAAAAAAAAADg4ij6AQAAAAAAAAAAAC6Ooh8AAACAO9ry5csVGBiowMBAffTRR44OR5LUtGlTI6brzZ8/X48++qiqV6+uwMBAPfnkk5JktG/atGl+h2vDGa8nAACAM8qLcZO9x17OMsYEkDcsjg4AuBt99NFHmjp1qvH5oYce0ieffGLTZt++ferQoYPNtr1796pAgQL5EmNeOXjwoBYtWqTt27fr/Pnzslqt8vf3V7ly5RQWFqZHH31UJUuWlCT9+uuv6tq1a5Y+PD09VbZsWT3yyCN68cUXVbhwYUnSqVOn1KxZM5tjZdalSxdt375dkjRu3Di1b9/+prE2bdpUp0+fttlmsVhUvHhx1alTR7169dIDDzyQ+4vgIq7PS4vFooIFC8rf319VqlRRu3bt9PDDDzswQgAAbCUkJGjp0qVau3atDh8+rISEBPn7+6tSpUpq1aqVHnvsMXl4eDg6zFxZvXq1xo0b5+gwjAklLy8vde/e3bHB5FDPnj21adMm4/OkSZPUunVrB0YEAAAc5W6ae8vOxo0bNX/+fO3fv18JCQny8vKSv7+/HnjgAbVq1Yr5HeAORtEPcAJbt27V6dOnVaZMGWPb0qVLHRhR3pg6daqmTp0qq9Vqs/3EiRM6ceKENm/erHPnzmnw4ME37efatWs6fPiwDh8+rB9//FFLliwxCn/2lpKSogsXLui7777TunXrNGvWLIWEhOTLsR0tJSVFcXFxiouL09GjR/Xtt9+qSZMmioiIUJEiRRwdHgDgLnf48GH17t1bJ0+etNl++vRpnT59Whs2bFDlypVVtWpVB0V4c1OmTFFiYmKW7evXrzd+fvXVVxUSEmKMexYtWiRJ+TIRlTFJVqZMmSxFv7CwMCOWgIAAu8eSExcvXtS2bdtstq1evZqiHwAAkHTnzr1l56uvvtJbb71ls+3ixYu6ePGiDh48KDc3N5uiX36OMQHYH0U/wAmkpaXpyy+/VP/+/SWl37W+atUqB0d1e+bPn2+z5MDDDz+sJ554Qv7+/oqJidGePXv07bff3vD7/v7+mjx5stLS0rR3715NnjxZycnJOnTokJYsWaKePXvaNf6hQ4eqatWqio6O1ocffqjDhw8rOTlZY8eOddk/m6SkJJnNZlks//5X/8MPP6yXXnpJMTEx2rp1q5YsWaLk5GStX79eb775pqZPn/6vfSQkJKhQoUJ5EToAADYuX76sF198UWfOnJEklShRQj179lRgYKDi4+O1fft2LV++3MFR3lyNGjWy3X7hwgXj53bt2qlcuXLG57p169o9rpzw9fWVr6+vo8Ow8d133yklJcVm2y+//KK4uDh5eXk5KKrcuXr1qgoWLOjoMAAAuCPdiXNvN/LBBx9Iksxms3r37q26devq6tWr+vvvv7Vp0yaZzbZv/HKWMSaAvEHRD3CwwoULKz4+XsuXL1ffvn1lNpv17bffKj4+3th3I+vWrdOnn36q/fv36+rVqypTpowef/xx/ec//5Gnp6dNuy+//FKHDh3SpUuXlJycLH9/fz344IN69dVXVbZsWaPtW2+9pa+++kqSNHfuXO3atUvLli3TpUuXVK1aNY0cOVJVqlS56TnFxMToww8/ND736NEjyx1GLVu21IABA3TixIls+/Dw8DAGHfXr19eRI0eMybudO3favehXuXJl4/jFihVTly5dJEl//fWXYmJi5OPjI0nav3+/Zs6cqV27dhnbM5YCrV69uiQpLi5O9evXV1pammrXrq3FixdLkr744gsNHTpUkrRgwQI1aNBAUvqSE1FRUfLz89PmzZuNmHbu3Kk5c+Zoz549unLlikqUKKFHHnlEr7zyihGPZPtnOGvWLG3dulWrVq1SVFSU1q1bZ/PnfSO+vr7G+Tdr1kwPPfSQevfuLUn68ccftXXrVjVs2FCS7bKo69ev17hx47Rlyxb5+Pjop59+0o4dO7Rw4UL9+eefunjxoq5du6aiRYuqTp06evnll418OnjwoJ544glJ0uOPP66IiAhJ6YPVGTNmGMcuW7as4uPjVa9ePaWmpqpmzZr64osvcvTnCgC4M3zyySdGwc/Ly0tffvmlsVy4JDVv3lwvvfSS3NzcbtpPbsZIly5d0gcffKBffvlFkZGRcnd3V4kSJVStWjV16tRJ9evXl5S+/PgHH3ygX3/9VZcuXVLBggVVokQJ1apVS926dTN+72X+/Xnw4MFslzlv3ry5pPTi3/jx4433/5UpU0Y//fST0S41NVVLlizR119/bdyoVKpUKT344IMaNWqUJOnQoUOaOXOmDhw4oKioKMXHx8vb21s1atTQiy++qHr16knKuhTW6dOnsxx3+fLlGjJkiCSpT58+6tu3r9H+77//1owZM7RlyxZFR0ercOHCqlmzpl544QVj7CDZLuverl07tW7dWpMnT9ahQ4fk6+urF154Idtl328k881krVu31urVq5WUlKS1a9dmu8T7kSNHNHv2bP3666+KjIxUkSJFVLlyZb388ss2ce7Zs0effPKJfvvtN12+fFk+Pj564IEHNGDAAFWtWjXLeYwfP974bnZ/XtdfOz8/P82fP1+nTp3S6NGj1b59e40fP1579uzRqVOndPnyZbm7u6t8+fJq06aNunXrluUGrpvFWKhQIbVo0UKSVK9ePX366afG9y5fvqyQkBClpqaqcuXK+uabb3J8vQEAcCW3M/f2/fffa9GiRTpw4ICuXbumkiVLKiwsTL1791aJEiVs2m7dulURERE6dOiQSpYsqR49etz0hp6TJ09q5syZ2rx5syIjI+Xl5aUGDRqob9++qlChwi2da1RUlM6fPy9Jqlq1qlHkzNCzZ09dvXrVZlt2Y5bMr8zJTsb8jL3OA8CtM/97EwD21LJlS7m7u+vcuXP65ZdfJP2zvECbNm1u+L0pU6bo1Vdf1datWxUbG6vk5GQdP35cH330kXr27KmkpCSj7c8//6z169fr9OnTSkhIUHJyss6cOaPly5erY8eOio6OzvYYI0eO1PTp03X+/HklJSVp9+7deuWVV7LcRX299evXGwMmb29vvfbaa9m2c3d3z/Ev/8zLSSYnJ+foO3nl+rvDM47/448/qmPHjlqzZo2ioqKUnJysqKgorVmzRp06ddKPP/5ofL9y5cqS0ouEGd/fvXu30eeePXskpQ+UoqKiJEl16tQx9n/xxRfq0qWL1q9fb0xKnj59WvPnz1fHjh0VExOTbeyjR4/WvHnzFBkZmWWZ1dxo0qSJzbKmN7obrmvXrvrhhx905coVY9vu3bu1Zs0a/f3334qLi1NycrIiIyP1/fffq1OnTjpy5Iik9EJrRvHy999/N76fcW0y/7x3716lpqZK4o40ALgbZS7wdO/e3abgl8HX11dFixa9aT+5GSO99tpr+vzzz3XmzBklJycrISFBx48f1+rVq42bdFJSUtSzZ0+tWrVKkZGRxlLZGTcv/fHHH3lzATJJTk5Wr169NGrUKOPGoMTERP3999/6/PPPjXZ//fWXVq1apSNHjigmJkYpKSm6ePGiNm7cqK5du2ZZGvNW7N27V+3atdPy5ct17tw5JScn6/Lly/r555/Vo0cPffbZZ9l+b/v27erVq5f27dunpKQknT17Vu+++662bNmSo+OePXtWu3btkiQ98MAD6tWrl7Fv9erVWdr/8ssvat++vb766ivjz/PSpUv69ddftXPnTqPdsmXL9Nxzz2nNmjWKjIw0xno///yzDhw4kJtLk62VK1dq5MiROn78uM34etGiRdq9e7dxzISEBP3vf//T+++/r2HDhtn08W8x3nvvvUZBeufOnUaxXJI2bNhgjKdu9u8OAABc3a3OvU2YMEH9+/fX9u3bjfmMU6dOadGiRWrfvr3NMvO//fabXnzxRWM8c/LkSY0aNUrz58/Ptu/9+/erffv2+uKLL4zxyMWLF/Xdd9/pqaee0t69e2/pXAsVKiSTySQp/cayOXPm6O+//7Zpk5crC9jrPADcOp70AxzM19dXjRs31tq1a/XFF1+oVKlSRsHjqaeespmsybB3715jeUV/f3+99tprKlmypD799FNt2LBBO3fu1Pz5840Jj9DQUFWrVk0lSpRQ4cKFlZiYqC1btuiTTz5RVFSUvvjiC+MprszOnTungQMHqnz58nr33Xd19uxZnT59Wps2bVLjxo1veE4HDx40fg4KCrJ56vD333/PUrS7WdEmLS1Nf/zxh02RKaOAlh+io6M1bdo043PGclYJCQl65513jHN59tln1aRJE23YsEGfffaZkpOT9c4776hhw4YqVKiQ6tWrpz///FOJiYk6cOCAatasaVPMyigA/vbbb8a2jOty/vx5jRo1SmlpaSpcuLAGDBig8uXLa/Xq1Vq+fLmOHTumSZMmKTw8PEv8J0+eVJcuXRQWFqYzZ87c1rsQg4KCjMm3G010RUdHa8iQIapUqZJOnTolKX35smHDhikgIECFCxdWamqq9u/fr4iICF29elXz58/X6NGjZTKZVLt2ba1fv14nTpzQxYsX5ePjYzNA3L17t9q0aWNTMKXoBwB3l/j4eJsJlsw3yeRWTsdIV65c0a+//iopvajUt29fWSwWnTlzRlu2bDGWsz569KiOHz8uSQoJCdELL7yg1NRUnTp1Shs3bpS7u/sNY3nggQe0aNEijRkzxvg9O2XKFPn5+cnPz++G31u4cKE2bdokKX0Cp1evXqpRo4bOnTtnM46877779NZbb6lcuXIqUqSIrFarjh8/rrFjxyopKUmzZs3Sgw8+qA4dOqhhw4bq3LmzpH+WXJdu/p4Xq9WqIUOGGDd+tWzZUh06dNCePXs0Y8YMpaWlaezYsWrSpIlKly5t893Tp0+rWbNmevrpp/XNN98YhbolS5bk6F3K3377rXFzU8uWLVWlShWVL19ex48f17Zt23Tx4kUVL15cUvoSmoMHD9a1a9ckpY8jOnfuLE9PT23fvt2YBDt//rxGjhxpFMWaN2+udu3aKTU1VZs2bbrpn2VOnTx5UqGhoXr22WeVnJxsvGOod+/eKl++vLy9vVWgQAHFxMRo9uzZ+v333/XVV1+pf//+KlWqVI5j7NChg7Zv3y6r1apVq1YZ/0bI/LRoq1atbvt8AABwVrcy9/b7779rzpw5ktLHQK+99prKly+v+fPnGysFhIeHG23ee+89Y44oJCREXbt21YEDB2xWUMhgtVr11ltvKTY2VpL0wgsvKDQ0VP/73//0wQcfKCEhQUOGDNGqVauMAl5OFSpUSEFBQdq9e7dSUlI0YcIETZgwQcWLF1eDBg3Uvn17m/f53cjQoUMVFxdnfP7iiy+0YsUKSelPEPr7+9v1PADcOop+gBN4+umntXbtWm3YsMFYricwMFA1a9bMtn3mpXc6dOig8uXLS5I6deqkDRs2GG0y/kFfv359zZgxQ/PmzdPZs2eNSY4M+/bty/Y4zz77rF588UVJ0rFjxzRx4kRJynKH0PUyDwq8vb1t9vXq1UuXL1+22Za5SJgh81JSmXl7exuTUPZ0o+WkevfuLZPJpM2bN+vSpUuSZCx7KklhYWH6/ffftX//fl26dElbtmxR8+bNVadOHS1cuFBS+tNq5cuX19GjR1W2bFlFRUUZBcDMhcCMYtZ3331nPLmZMZElpf/Zf/fdd7p69apWr16tESNGZFmXvU2bNsYSorfL39/f+Dnzk3yZDRkyRM8884zNtqCgIO3atUuff/65Tp48mWUZicz5V7duXa1fv15SeoGvTJkySkhIUKVKlfTXX38Zxb6M62QymW5rshcA4Hqu/x10/bJKuZHTMZLFYpHJZJLValWxYsV077336t5775XFYlGnTp2M9pmXXfT391f58uVVpkwZmc1mPf/88zeNxcvLS3Xr1rVZYaB69er/uiz3ypUrjZ+HDBmijh07Gp+ffvpp4+fAwEDt2LFDM2bM0NGjR5WQkGCzCkDGuQYEBCggIMDYnnnJ9Zs5cOCADh8+LCn93CdOnCh3d3eFhYXpyJEjWrNmjZKTk7VmzRp1797d5ru+vr6aPHmyPDw8VKNGDaPod6Nl4K+X+Wm+li1bSpJatGihWbNmKSUlRd9//72ee+45SdLmzZuNJzjLli2refPmycPDQ1L6kqsZMo+/goODbW4CyzjG7SpTpoxmzpyZZbnOBx98UHPnztXevXt16dIlm6cArVar9u/fr1KlSuU4xkcffVRjxoxRXFyc8W+EpKQko1gcFBRk8+5IAADuRLcz99a5c2e98MILktJ/b4aFhRm/Sy9fvqzU1FRjnsLDw0MffPCBihYtqiZNmujo0aNZltD+888/dejQIUnpBbRmzZpJSv99XrNmTe3evVuHDx/W/v37jVfH5Mbo0aP1yiuv2IylMp6+++6777J9Dc/1Ms/JbdiwwTiHEiVKaObMmSpQoIAOHDhg1/MAcGso+gFOoFGjRipdurTOnj2r7777TpKyFE4yy7iDXJJmzJhhvO8ss6NHj0pKf8dLjx499L///e+G/WXckXO9jKWApPT32mXIXNTLTubJqnPnzt20bW7UrVtXw4YNM+6Cvv4uIavVarMt80TW9cWw3CpRooR69+5tFByPHTtm7KtVq5ZN25o1a2r//v027TLekyP9U/SzWq2qW7euTp06pZ07d+rYsWNGUatIkSJGcS/zn/fy5cuNdxtmFhcXpwsXLqhUqVI22zNPXt2ujDXhM+LLTpMmTbJsGzBggM2d5NfLnH/XX6cLFy5ISp+4u3Llig4ePKiEhATjjryKFSv+69JtAIA7y/W/gy5cuHBL7wrJzRjJ09NTrVu31jfffKPNmzerVatWcnd3V8WKFdWkSRO98MIL8vLyUvny5VW3bl3t3LlTK1eu1MqVK+Xp6akqVarokUceUdeuXY0CU17JPE642UoM48aNM25Ays6NxoO3EscDDzxg8yRcjRo1tGbNmiztMtSqVcu4Lpl/r+ckpuPHjxvjrsqVK+u+++6TlF7omjVrlqT0omBG0S/zGC4kJOSGfx45va63IzQ0NEvBb+/everWrdtNl7PPGIvnNMaM/F2yZIkOHTqkgwcPKjIy0ngqs3Xr1rd+EgAAuIjbmXvLXBgsXry4ypUrpyNHjshqterEiRNKS0sz9t9zzz0245maNWtmKfplHo8cOHDghje3Hzly5JaKZZUqVdLXX3+tH3/8UT/++KN27NihyMhIY//8+fP19NNP52gMfeDAAb3++utKTU1VoUKFNGPGDGNpfXufB4BbQ9EPcAJms1nt27c37s4tUKCAnnjiidvqMyUlRUlJSfr999+NySx/f38NHDhQZcuW1fnz5zVgwABJuuG73jI/pefm5mb8/G/vhst8N9ChQ4d05coVY4Lu119/VWJi4g3vpMpw/VJS5cqVy1LcuX6pykuXLhlLN2V8vlHbfzN06FBVrVpVFovFGNDldCmC7Nr5+fkZy0xlFP2k9DvE/Pz8tHPnTm3ZssW4Qyo4ODjXhcqEhIQs23x9fXPVx81kXnq0atWq2ba5fvmxM2fOGAW/QoUKadCgQapYsaKk9JdCS7b5VK1aNRUqVEgJCQk2Rb+goCAdPXpU3333nVauXGk8LcpTfgBw9ylcuLDKlStnLPH522+/qWHDhrnu57fffsvVGGncuHGqV6+eNmzYoMOHD+vUqVM6cOCADhw4oL1792ru3Lkym82aNWuWli5dqs2bN+vIkSM6c+aM9uzZoz179ujEiRMaNWpUHlyF3ElKSjLeW2OxWNS/f3/VqlVLbm5u6tOnjy5dunRb7/79N/82hsp4p29GfLmR+Sm/Q4cOZbtSxK5du3T+/Pls3/14uzKfW8Yym1L63fT/JrtlWxcvXmwU/Jo0aaJnn31WhQsXtllSK/PEYk499dRTWrJkiSTp66+/NlZecHNzY2lPAMBdIS/n3nKzVOXtLGt5/UpJuVGwYEG1adPGeGfhb7/9pr59+yoqKkpWq1V//vnnvxb9zp07p5deekkJCQkym82aMGGCqlWrlutYbuc8AOTe7T36AiDPdOjQwSjytGjRIsuymJllFIyk9AmogwcPZvlvz5498vDwsHk66/HHH1fbtm3t/g60xo0bG++2iY2NNd4/mBsZS0nVrVtXNWrUyPZprqJFi9oU+TKWKJLSlwfNfMfR/fffn6vjV65cWXXr1lVQUJDuueeeLIO0jLvIJWV5KXHmz5nbZVz306dPa+3atZLSi3vBwcGS0t/JkzFZlPmJt8x/3n369Lnhn3d255hXa6avW7dO27dvNz7faHLo+uNlzr9GjRrpueeeU/369W94V73FYjGenPzjjz+0a9cumUwm1apVS0FBQZKk//73v0b7zNcJAHD3yPx7aP78+Ta/bzJER0dnWVI8s9yOkSwWizp27KiPP/5Ya9eu1Y4dO4zf4Zs3bzaWyyxcuLB69OihOXPmaP369dq6dauxRGfG7/+8lHmcsHHjxmzbXL58WYmJiZLSb87q1auXGjRooHLlyikmJibb72T8Ts9pgSlzHP/73/9slqTMPDbK3C4vZC763YjVatW3334ryXZstmXLFmN5zOvl5LpKtitcREVFGT//8ssv/xpXduO0jBuepPTVEsLCwlS3bl2bvnMbo5T+tGVGQXT16tXGcuoNGjS46TsjAQC4k9zq3FvmscylS5eMZTNNJpPuuecem+XYT548aTO+ylipKLPM45H69evfcJ4n8zLyOZWWlqaff/45y/batWsbY1fJ9mal7Fy5ckW9evUyxsyDBg1S8+bN8+08ANw6nvQDnESZMmU0fPhwRUVF/et7Qh5//HEtWLBAUnrRLyYmRoGBgYqNjdWJEye0efNmBQQEaNy4cTbvZFmzZo3q1KmjmJgY4/189lC0aFH16dNH77//viRp7ty5OnPmjB577DH5+PgYd+bnhZYtW2rx4sWSpOHDh2v37t0qXLiwVq9ebQxgKlaseEvLft3MQw89pKJFi+ry5cvat2+fRo0apbCwMP3888/GO3GKFSumkJAQ4zv16tXTl19+KSn9TvRChQqpUqVKxkRL5iJl5ifYHn30UU2cOFFJSUmaNWuWTCaTgoKCdO3aNZ06dUrbtm1TYmKi5s2bl2fnFx0drZ07dyomJkZbtmyxeal1kyZN9NBDD+Won8z5t23bNq1atUpms1kffPDBDb9Tt25dbd26VVevXtXJkydVoUIF+fj4GIPTzNfJ3gVsAIBzeuGFF/TNN9/ozJkzio2N1TPPPKMXXnhBlStXVnx8vLZv367ly5dr4cKFN1wGOrdjpObNm6tFixaqUqWKSpQooYsXL+rUqVOS0otKSUlJio2NVffu3fXYY4+pYsWK8vX11alTp4ynvm5UYLodTzzxhP78809J6ePC6Oho1ahRQ+fPn9fSpUv1+eefy8/PTwUKFFBiYqIOHTqkzz//XL6+vpo+ffoNi3o+Pj66fPmyLly4oK+//loBAQHGygXZqVq1qipUqKAjR44oMjJSAwcOVLt27bR3716j2Onu7p5n78OT0t+Hc+TIEUnpY+mMd+1kiIyMNJbBX7VqlXr06KGHHnpIvr6+io6O1qlTp9SzZ0917txZBQoU0K5du1S0aFH95z//0WOPPWaMvzLujH/yySdltVq1efNm1a5dW0888YTKli0rs9mstLQ0bdu2TZMmTVLhwoWNpUVzK3Nezpw5U+3atdPPP/9sc3NbhpzGmOGpp57Su+++q7NnzxrbWNoTAHA3yc3cW5s2bYyl0RctWqSSJUvq3nvv1X//+19jTBcaGmqMNWvVqqXff/9diYmJGjBggLp06aI///zTuPEosypVqqhy5co6dOiQtm/frjfffFOPPvqoLBaLTp8+rb1792rdunXasWNHrs8xLS1NL774oipXrqyWLVvqgQceUMGCBbVv3z6bm4Rq1Khx035ee+01HTx4UNI/7+jbuXOnsb9mzZp2PQ8At46iH+BEnn322Ry1q1mzpl555RVNnz5dsbGxGj9+fJY27dq1k5Q+6AgMDNTBgwd1+vRpvfrqq5LS7/CJjo7Ou+Cv07NnT126dEmzZ8+WJONlwdfL7RJO1+vXr582bdqkkydP6urVq/rss89s9hcsWNAuy2gVKlRI7777rl577TUlJydr0aJFWrRokbHf3d1d7777rvHEo5R1KcqaNWvKzc1Nfn5+Klu2rDFx6OHhYbP8aalSpTR8+HANHz5cSUlJ+uijj7LEk/n9i3nh559/zvbOsMaNGysiIiLH/ZQsWVKNGzfWhg0bFBMTozfeeENSev5lfqF0ZtcX8jKe8Ktatao8PDyMwXWZMmWyvMMQAHB3KFq0qGbPnq3evXvr5MmTOnfunMaOHZurPnI7Rjp79qw++eSTbPvKmPA5d+6cjh07dsNVDuxRYOnatas2bdqkLVu2KCEhwVgePTOz2aynnnpKixYtUnJysoYPHy4p/Q72jALY9Ro0aKA1a9YoNTVVgwYNkpQ+vsxu3Cml3+k+fvx4de/eXfHx8VnGfiaTSW+//bZKly6dB2edLvNTfi1bttTzzz9vsz8tLU1LliwxbtI6ceKE7rnnHo0bN059+vRRUlKStm/fbrOaQZ8+fSSlj2Eyxl9paWn64Ycf9MMPPxjtMsZqXl5eatWqlVatWqW0tDTNnDlTklShQgVduXIl1+f09NNP64svvpDVatWqVau0atUqmUwmBQcHG+9+zpDTGDM88cQTmjBhgjGWcnd3V4sWLXIdIwAAriync29BQUH6z3/+ozlz5igxMVHjxo2z2e/v768RI0YYn9988011795dycnJ2rRpk3HDTsarXjLLPG6KjY013gWdlw4dOmS8QuZ67du3t3lKLzuZVy3YvXt3lvf1/fjjjypbtqzdzwNA7rG8J+Ci+vfvr5kzZ6pRo0YqWrSo3N3dVbJkSdWpU0dvvPGG+vbtKyn9PR2zZs1Ss2bN5OXlpeLFi6tr164aM2aM3WMcOHCgvvjiC7Vv317lypVTgQIF5O7urlKlSqlRo0YaPHiwsbTQrSpevLi+/PJL9erVS5UqVZKnp6fc3d1VpkwZtW/fXsuWLbPbe9+aN2+uJUuWqGXLlvL19TXe/9eiRQstXrxYzZo1s2lfrlw5myJVRjHr+p9r1qyZZfnLp59+Wp9++qlatGghPz8/WSwW+fn5GQXgzAPNvGI2m1W4cGGVL19ejz76qGbMmKEZM2YY72fMqffff1/t2rVTsWLF5O3trSeffNK46z47QUFBcnd3Nz5nPOHn4eFhs3Y8T/kBwN2tYsWK+vrrrzVkyBDVqVPHGA+VLl1aoaGheu+99276pH9ux0ivv/66QkNDVapUKXl4eMjDw0P33XefevbsqSlTpkhKfzquT58+ql+/vvz9/eXu7i5PT08FBgbqtdde07Bhw/L8Ori7u2v27NkaOnSoatasqUKFCqlAgQK699579cwzzxjtBg8erG7dusnf31+FChVS06ZNNX/+fHl6embb77Bhw/TYY4/ZLKX+b2rWrKnly5erXbt2KlmypCwWi3x8fNSoUSN98skneu655277fDPLXPRr2rRplv1ms1lhYWFZ2oeFhWn58uV68sknVapUKbm7u6to0aKqX7++zfji6aef1qJFi2zGX76+vnr44Ydt3m88dOhQPfrooypUqJC8vLzUtm1bffrpp7d0TjVr1tTUqVNVuXJlFShQQJUqVdKUKVNuuMpCTmOU0ovlmZflevjhh2+6rBkAAHe7QYMGafLkyapfv76KFClizDd17txZy5cvV7ly5Yy2devW1axZs1StWjWj3cCBA9WrV69s+65WrZpWrFihTp06qVy5cnJ3d5e3t7cqV66sTp06af78+bcUs8Vi0axZs9S1a1dVr17dGJMWLlxYtWrV0vDhw/N0TtBe5wHg1pms9nxjOwAAAAAAcAorVqzQ4MGDJUkffPDBDd/TDAAAAMA1sbwnAAAAAAB3sKtXr+ry5ctatmyZJMnb2zvbpyMBAAAAuDaKfgAAAAAA3MFat26t06dPG5979ux5w6VdAQAAALguin4AAAAAANwF/P391aFDhxu+XwgAAACAa+OdfgAAAC5sx44dmjt3rvbt26fIyEhNmzZNzZs3N/ZbrVZ9+OGH+uKLLxQbG6vatWtr5MiRKl++vNHm8uXLGj16tNavXy+z2awWLVronXfeUeHChR1wRgAAAAAAALgVZkcHAAAAgFuXkJCgwMBAjRgxItv9s2fP1sKFCzVy5EgtXbpUBQsWVM+ePZWYmGi0GThwoA4fPqx58+ZpxowZ2rlzp4YPH55fpwAAAAAAAIA8wJN+AAAAd4jAwECbJ/2sVqsaNWqkHj16qGfPnpKkuLg4hYSEaPz48WrdurWOHDmiVq1a6csvv1SNGjUkST///LN69eqljRs3qmTJkg47HwAAAAAAAOQc7/T7f2lpaUpJSZHZbJbJZHJ0OAAAwMlYrValpaXJYrHIbHaNxRJOnTqlyMhIhYSEGNu8vLxUq1Yt7d69W61bt9bu3bvl7e1tFPwkKSQkRGazWXv37tUjjzySo2MxlgIAADfjimOp/MRYCgAA3ExOx1IU/f5fSkqK/vjjD0eHAQAAnFyNGjXk4eHh6DByJDIyUpLk6+trs93X11dRUVGSpKioKBUvXtxmv8VikY+Pj/H9nGAsBQAAcsKVxlL5ibEUAADIiX8bS1H0+38ZldEaNWrIzc3NwdHc2VJTU/XHH39wreHUyFO4AvI0f2Vcb+5Mz17GdalevTr5aGepqanat28f1xpOjTyFKyBP81fG9WYslT3mpfIP/46CKyBP4QrI0/yV03kpin7/L2PpBDc3NxI0n3Ct4QrIU7gC8jR/udJyS/7+/pKk6OholShRwtgeHR2tKlWqSJL8/Px08eJFm++lpKQoJibG+H5OZFwXi8VCPtoZ1xqugDyFKyBP81fG9XalsVR+Yl4q/3Gt4QrIU7gC8jR//dtYiturAAAA7lBly5aVv7+/tm7damy7cuWKfv/9dwUHB0uSgoODFRsbq3379hlttm3bprS0NNWsWTPfYwYAAAAAAMCt4Uk/AAAAFxYfH68TJ04Yn0+dOqUDBw7Ix8dHAQEB6tq1qz7++GPde++9Klu2rKZMmaISJUqoefPmkqQKFSqoUaNGGjZsmMLDw5WcnKzRo0erdevWKlmypKNOCwAAAAAAALlE0Q8AAMCF7du3T127djU+jxs3TpLUrl07jR8/Xi+++KKuXr2q4cOHKzY2VnXq1NGcOXNUoEAB4zsREREaPXq0unXrJrPZrBYtWmjo0KH5G7nwbQAAZFxJREFUfi4AAAAAAAC4dRT9AAAAXFiDBg108ODBG+43mUzq37+/+vfvf8M2RYsW1cSJE+0RHgAAAAAAAPIJ7/QDAAAAAAAAAAAAXBxFPwAAAAAAAAAAAMDFUfQDAAAAAAAAAAAAXBxFPwAAAAAAAAAAAMDFUfQDAAAAAAAAAAAAXBxFPwAAAAAAAAAAAMDFUfQDAAAAAAAAAAAAXBxFPwAAAAAAAAAAAMDFUfQDAAAAAAAAAAAAXBxFPwAAAAAAAAAAAMDFUfQDAAAAAAAAAAAAXBxFPwAAAAAAAAAAAMDFWRwdAO5cZ8+e1dmzZ7NsT01N1aFDh5SWliY3N7cs+0uXLq3SpUvnR4gAAAAAAAC4y3h4eDg6BAC4I/D3qfOh6Ae7mTlzpsLDw3P9vREjRmjkyJF5HxAAAAAAAMBdKObaNV1JSnJ0GE7BarXK099fZ69ckclkcnQ4TqGIh4d8PD0dHQbgEvj79B/8fZqVM/x9StEPdvPSSy/piSeesNl29epVhYaGSpI2btyoIkWKZPkeT/kBgC2enAYAAABwO64kJWnqjh2KSkhwdCgOZ7VadSUuTkW8vJikluRXqJD61Kvn8ElqwFXw9+k/+PvUlrP8fUrRD3aT3WRzfHy88XNQUJC8vb3zOywAcDk8OQ0AAADgdkUlJOjclSuODsPhrFarYmNj5W0yMUkN4Jbw92k6/j51ThT98ktMjMRfBFKmOyDM585JcXEODMaJFCki+fg4OgrANdyFf5+2bdhQlT76yGZbYmKieg4cKEmaM2GCPLO5i6hapUrS6dP5EqPT4O9TAAAAAAAA3KUo+uWXK1ekqVOlqChHR+JYycn//PzuuxIv+pT8/KQ+/9fencdbVdf743+dgUFkkklF1AwVTEUGlUC8OFtpfVUyU65DedVUlFIzUxQRExzw4mwp4Zjm1Zu/nLplmlpiYqGGmWmas3lA5SAInHPYvz+Qk0ewAM9h7w3PJ4/zkL3W2mu/1+LN4uN67bXWKCepYUWthcfTO//wh4ybMeMT5//Xd7+73OljBwxI/0GDWqqs0uN4CgDASvrhD3+YX/7yl3nxxRfTtm3bDBgwIKeccko++9nPNi5z6KGH5vHHH2/yvoMOOijnnHNO4+s33ngjZ599dn7/+9+nXbt22W+//XLyySenutqpNwBg9THyWJ1mzUreeqvYVaw2by5YkDcXLmwy7YP6+sbfP/n882m3nMHvhm3aZEP3EQf+lbXseHpM1675ys47LzO9UCjk/Xnz0n7ddZd7G4UN27RZq/YTAACsrMcffzwjR47Mtttum4aGhlx88cU58sgjc88996Rdu3aNy33ta1/LiSee2Ph6nXXWafx9Q0NDjjnmmHTr1i233npr3n777Xzve99Lq1atctJJJ63W7QEA1m5CP1rMD19+OeOef/4T5+88bdpyp4/dYouc3adPS5UFUHY2bNt2uV+GKBQKmVNRkU4dO7p3OgAArIIpU6Y0eT1x4sQMGTIkzzzzTHbYYYfG6W3btk337t2Xu47f/va3eeGFFzJ16tR069YtW221VUaPHp2LLrooo0aNSmt3OQIAVhOhHy3mmE03zVc22GCZ6St0ZQqsJm+++WbefPPNZaY3NDTkxRdfzOLFi1NVVbXM/A033DAbbrjh6igRAACA1WTu3LlJkk4fu2X8XXfdlZ///Ofp3r17dt111xx33HGNV/s9+eST2XLLLdOtW7fG5YcNG5azzz47L7zwQj73uc+t8OcXCoUUCoVm2JKPrXfJypf8rO2W7gP7YolCIYWkRfqOVbf0z6OljgmsOsfTj3A8baqFj6crul6hHy3GlSllaM6cJc9LW4tcdM45ufhHP1rp95109NGZdNZZLVBRCWvf3rPSAACANdbixYtz3nnnZeDAgdlyyy0bp++7777p2bNnevTokeeeey4XXXRRXnrppVx++eVJklmzZjUJ/JI0vq6pqVmpGmpra1NZWfkpt6SpysrKNDQ0pL6+PnUfeezKWuvDk6b19fWJ81Kpr69PQ0ND5s6dm8WLFxe7HD609M+iJY4JrDrH049xPG2ipY+nK7pOoR/wT++/n1x++ZLnpa0tHnts1d736KPJ2Wc3ayklrVu3ZNQooR8AALDGGjduXJ5//vn85Cc/aTL9oIMOavx9nz590r179xxxxBF55ZVXsskmmzRrDR07dlzu3WY+rdq5c1NdXZ1W1U4FLr1Sorq62pfRs2Q/VFVVpUOHDsUuhY9oaGhI0nLHBFad4+k/OZ421dLH06XHhX9bR4t8OlC+Zs1K3nqr2FWsNqdssEFGrrfeMtNX6Da0a9F+AgAAWJOdc845+c1vfpObbropGyznUSUftd122yVJXn755WyyySbp1q1bnn766SbLzPrwy7Sf9BzAT1JRUdEiJ04rlqzclRgfZX8sUVGRisQJ+xKz9M+jpY4JrDrH0+WwP5Zo4ePpiq5X6Aes1dyGFgAAYO1VKBQyfvz4/OpXv8qNN96YjTfe+N++59lnn03yz0Cvf//+ufrqqzN79ux07do1SfLoo4+mffv22XzzzVuueACAjxH6AQAAALBWGjduXO6+++5ceeWVWXfddRufwdehQ4e0bds2r7zySu66664MHz48nTt3znPPPZcJEyZkhx12SN++fZMkw4YNy+abb55TTz013/3ud1NTU5PJkydn5MiRad26dTE3DwBYywj9AAAAAFgr3XLLLUmSQw89tMn0CRMm5IADDkirVq0ybdq03HDDDZk/f3423HDD7LXXXjnuuOMal62qqsrVV1+ds88+OwcddFDWWWed7L///jnxxBNX67YAAAj9AAAAAFgrPffcc/9y/oYbbpibbrrp365no402yjXXXNNcZQEArJLKYhcAAAAAAAAAfDpCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChz1cUuAAAAAKClPPnkk3nmmWeWmb548eK8/vrr+fOf/5zKymW/E7311lunf//+q6FCAABoHkI/AAAAWEssmLMgi95fVOwyVqtRx47K7x773Uq/b6fP75R7b7+3BSoqXa3bt07bTm2LXQYAAKtI6AcAAABriUXvL8r0y6dn/qz5xS5ltTlwwwMzeJfBy84oJB8s+CDrtF0nqVh29ibrbZKHzn6o5QssEe26tcsOo3YQ+gEAlDGhHwAAAKxF5s+an/ffer/YZaw2PdIjPdr3WGZ6oVDInMVz0ql9p1RULCf1q8tatZ8AACh/y960HgAAAAAAACgrQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChz1cUuAAAAWNaTTz6ZZ555Zpnpixcvzuuvv54///nPqaxc9jt8W2+9dfr3778aKgR9CgAAUEqEfgAAlLwFcxZk0fuLil3GajXq2FH53WO/W+n37fT5nXLv7fe2QEWlq3X71mnbqW2xy9CnK0GfAgAAND+hHwAAJW/R+4sy/fLpmT9rfrFLWW0O3PDADN5l8LIzCskHCz7IOm3XSSqWnb3JepvkobMfavkCS0S7bu2yw6gdSiJM0acfoU+bKKU+BQAA1lxCPwAAysL8WfPz/lvvF7uM1aZHeqRH+x7LTC8UCpmzeE46te+UiorlpCl1Wav2U6nRp0voUwAAgNVv2YcrAAAAAAAAAGVF6AcAAAAAAABlTugHAAAAAAAAZU7oBwAAAAAAAGVO6AcAAAAAAABlTugHAAAAAAAAZU7oBwAAAAAAAGVO6AcAAAAAAABlTugHAAAAAAAAZU7oBwAAAAAAAGVO6AcAAAAAAABlruih3w9/+MOMGDEiAwYMyJAhQ3LcccflxRdfbLLMwoULM27cuAwePDgDBgzICSeckFmzZjVZ5o033sjRRx+d7bbbLkOGDMn555+f+vr61bkpAAAAAAAAUBRFD/0ef/zxjBw5MrfddlumTp2a+vr6HHnkkZk/f37jMuedd14efPDBTJ48OTfeeGPefvvtjBo1qnF+Q0NDjjnmmNTV1eXWW2/NxIkT87Of/SyXXnppMTYJAAAAAAAAVquih35TpkzJAQcckC222CJ9+/bNxIkT88Ybb+SZZ55JksydOzd33HFHTjvttAwZMiTbbLNNzjvvvMyYMSNPPvlkkuS3v/1tXnjhhVx44YXZaqutMnz48IwePTo333xzFi1aVMStAwAAAAAAgJZX9NDv4+bOnZsk6dSpU5Jk5syZqaury9ChQxuX6d27d3r27NkY+j355JPZcsst061bt8Zlhg0blvfffz8vvPDC6iseAAAAAAAAiqC62AV81OLFi3Peeedl4MCB2XLLLZMks2bNSqtWrdKxY8cmy3bt2jU1NTWNy3w08EvS+HrpMiuqUCikUCis6ib8qxU3/zrLVOFj/yVL+qMUeqQUaigR+nQ59GnJ0afL0YJ92iLjAwAAAABoJiUV+o0bNy7PP/98fvKTnxSthtra2lRWNu8FkJWVlWnX0JBCfX1SV9es6y5HS0+Z1tfXp6KolZSI+vpUNDRk/ty5Wbx4cdHK0KdN6dOP0aclSZ9+TAv3aTF7HwAAAAD+nZIJ/c4555z85je/yU033ZQNNtigcXq3bt1SV1eX2traJlf7zZ49O927d29c5umnn26yvlmzZiVJ4zIrqmPHjqmqqlrVzfhktbVJdXXSqlXzr7vMLL1Sorq6OhUVTlOnujqpqkqHDh2KXYk+/Qh9+jH6tCTp049p4T5taGhokfUCAAAAQHMo+jP9CoVCzjnnnPzqV7/K9ddfn4033rjJ/G222SatWrXKtGnTGqe9+OKLeeONN9K/f/8kSf/+/fPXv/41s2fPblzm0UcfTfv27bP55puvVD0VFRUt95P4+fAnJVBDSf20ZN/p01X+SQnUUFI/xe5Nfbrcn5RADSX108K9V44aGhoyefLk7LbbbunXr1/22GOPXHHFFU1uV1ooFHLJJZdk2LBh6devX4444oj8/e9/L17RAAAAAKy0ol/pN27cuNx999258sors+666zY+g69Dhw5p27ZtOnTokBEjRmTixInp1KlT2rdvn3PPPTcDBgxoDP2GDRuWzTffPKeeemq++93vpqamJpMnT87IkSPTunXrIm4dAEBxXXPNNbnlllty/vnnZ/PNN8/MmTPz/e9/Px06dMhhhx3WuMyNN96YiRMnplevXrnkkkty5JFH5t57702bNm2KvAUAAAAArIiih3633HJLkuTQQw9tMn3ChAk54IADkiSnn356Kisrc+KJJ2bRokUZNmxYxo4d27hsVVVVrr766px99tk56KCDss4662T//ffPiSeeuPo2BACgBM2YMSO77757dtlllyRJr169cs899zTeGr1QKOSGG27Isccemz322CNJcsEFF2To0KG5//77s88++xSrdAAAAABWQtFDv+eee+7fLtOmTZuMHTu2SdD3cRtttFGuueaa5iwNAKDsDRgwILfddlteeumlbLbZZvnLX/6SP/zhDznttNOSJK+99lpqamoydOjQxvd06NAh2223XWbMmLHSoV+hUGhy69BmU0gKH/5a2y3dB/bFEoUlzdEyfbfyxejTD+nTpvRpadKnTbV0n5ZE/wMArOGKHvoBANByjj766Lz//vv54he/mKqqqjQ0NOQ73/lOvvKVryRJ463Vu3bt2uR9Xbt2zaxZs1b682pra1NZ2byPja6srExDQ0Pq6+tTX1ffrOsuR0tPTtfX16ci5fmsyeZUX1+fhoaGzJ07N4sXLy5aHfq0KX3alD4tTfq0qZbu02L2PgDA2kLoBwCwBrvvvvty1113ZdKkSdl8883z7LPPZsKECenRo0f233//Zv+8jh07pqqqqtnXO7d2bqqrq1PdyvB16ZUS1dXVqahwkrq6ujpVVVXp0KFDsUvRpx+hT5vSp6VJnzbV0n3a0NDQIusFAOCfjPIBANZgF1xwQY4++ujG23T26dMnb7zxRn74wx9m//33T/fu3ZMks2fPTo8ePRrfN3v27PTt23elP6+ioqJlTpxWJBUf/mIJ+2OJiiXNURon7PXpMuyPJfRpabM/lmjpPi2J/gcAWMM1772XAAAoKQsWLFjmJFtVVVXj1Q29evVK9+7dM23atMb577//fp566qkMGDBgtdYKAAAAwKpzpR8AwBps1113zdVXX52ePXs23t5z6tSpGTFiRJIl37o/7LDDctVVV2XTTTdNr169cskll6RHjx7ZY489ilw9AAAAACtK6AcAsAYbM2ZMLrnkkowbN67xFp4HHXRQjj/++MZljjrqqHzwwQc566yzUltbm0GDBuXaa69NmzZtilg5AAAAACtD6AcAsAZr3759zjjjjJxxxhmfuExFRUVGjx6d0aNHr8bKAAAAAGhOnukHAAAAAAAAZU7oBwAAAAAAAGVO6AcAAAAAAABlTugHAAAAAAAAZU7oBwAAAAAAAGVO6AcAAAAAAABlTugHAAAAAAAAZU7oBwAAAAAAAGVO6AcAAAAAAABlTugHAAAAAAAAZU7oBwAAAAAAAGVO6AcAAADAWumHP/xhRowYkQEDBmTIkCE57rjj8uKLLzZZZuHChRk3blwGDx6cAQMG5IQTTsisWbOaLPPGG2/k6KOPznbbbZchQ4bk/PPPT319/ercFAAAoR8AAAAAa6fHH388I0eOzG233ZapU6emvr4+Rx55ZObPn9+4zHnnnZcHH3wwkydPzo033pi33347o0aNapzf0NCQY445JnV1dbn11lszceLE/OxnP8ull15ajE0CANZiQj8AAAAA1kpTpkzJAQcckC222CJ9+/bNxIkT88Ybb+SZZ55JksydOzd33HFHTjvttAwZMiTbbLNNzjvvvMyYMSNPPvlkkuS3v/1tXnjhhVx44YXZaqutMnz48IwePTo333xzFi1aVMStAwDWNkI/AAAAAMiSkC9JOnXqlCSZOXNm6urqMnTo0MZlevfunZ49ezaGfk8++WS23HLLdOvWrXGZYcOG5f33388LL7yw+ooHANZ61cUuAAAAAACKbfHixTnvvPMycODAbLnllkmSWbNmpVWrVunYsWOTZbt27ZqamprGZT4a+CVpfL10mRVVKBRSKBRWdRM+eb1LVr7kZ223dB/YF0sUCikkLdJ3rLqlfx4tdUxg1TmefoTjaVMtfDxd0fUK/QAAAABY640bNy7PP/98fvKTnxSthtra2lRWNu+NuSorK9PQ0JD6+vrU1dc367rL0ocnTevr65OKiiIXU3z19fVpaGjI3Llzs3jx4mKXw4eW/lm0xDGBVed4+jGOp0209PF0Rdcp9AMAAABgrXbOOefkN7/5TW666aZssMEGjdO7deuWurq61NbWNrnab/bs2enevXvjMk8//XST9c2aNStJGpdZUR07dkxVVdWqbsYnqp07N9XV1WlV7VTg0islqqurU+Ekdaqrq1NVVZUOHToUuxQ+oqGhIUnLHRNYdY6n/+R42lRLH0+XHhf+bR0t8ukAAAAAUOIKhULGjx+fX/3qV7nxxhuz8cYbN5m/zTbbpFWrVpk2bVr23nvvJMmLL76YN954I/3790+S9O/fP1dffXVmz56drl27JkkeffTRtG/fPptvvvlK1VNRUdEiJ04rlqzclRgfZX8sUVGRisQJ+xKz9M+jpY4JrDrH0+WwP5Zo4ePpiq5X6AcAAADAWmncuHG5++67c+WVV2bddddtfAZfhw4d0rZt23To0CEjRozIxIkT06lTp7Rv3z7nnntuBgwY0Bj6DRs2LJtvvnlOPfXUfPe7301NTU0mT56ckSNHpnXr1kXcOgBgbSP0AwAAAGCtdMsttyRJDj300CbTJ0yYkAMOOCBJcvrpp6eysjInnnhiFi1alGHDhmXs2LGNy1ZVVeXqq6/O2WefnYMOOijrrLNO9t9//5x44omrb0MAACL0AwAAAGAt9dxzz/3bZdq0aZOxY8c2Cfo+bqONNso111zTnKUBAKy0ymIXAAAAAAAAAHw6Qj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChzQj8AAAAAAAAoc0I/AAAAAAAAKHNFD/2mT5+eb33rWxk2bFj69OmT+++/v8n80047LX369Gnyc+SRRzZZ5r333svJJ5+cgQMHZvvtt8/pp5+eefPmrc7NAAAAAAAAgKKpLnYB8+fPT58+fTJixIiMGjVqucvsvPPOmTBhQuPr1q1bN5l/yimnpKamJlOnTk1dXV1OP/30nHXWWZk0aVKL1g4AAAAAAACloOih3/DhwzN8+PB/uUzr1q3TvXv35c7729/+lkceeSS33357tt122yTJmDFjcvTRR+fUU0/N+uuv3+w1AwCsioaGhjz00EN59NFH89RTT6WmpiYLFixI586ds9lmm2X77bfPXnvtlY033rjYpQIAAABQZooe+q2Ixx9/PEOGDEnHjh3z+c9/Pt/+9rez3nrrJUlmzJiRjh07NgZ+STJ06NBUVlbm6aefzp577rlSn1UoFFIoFJq1/g9X3PzrLFOFj/2XLOmPUuiRUqihROjT5dCnJUefLkcL9umnGR/MmzcvU6dOzS233JI5c+Zkiy22SN++fbP99tundevWmTt3bl5//fX8+Mc/zqRJk7LjjjvmhBNOyKBBg5pxCwAAAABYk5V86Lfzzjtnzz33TK9evfLqq6/m4osvzlFHHZWf/vSnqaqqyqxZs9KlS5cm76murk6nTp1SU1Oz0p9XW1ubysrmfdRhZWVl2jU0pFBfn9TVNeu6y9HSU6b19fWpKGolJaK+PhUNDZk/d24WL15ctDL0aVP69GP0aUnSpx/Twn36ada5++67Z4sttsh3v/vd7LHHHmnfvv0nLvvMM8/k7rvvzrHHHpvRo0dn5MiRq/y5S/3jH//IhRdemEceeSQffPBBNt1005x33nmNX5oqFAq59NJL8z//8z+pra3NwIEDc/bZZ+czn/nMp/5sAAAAAFaPkg/99tlnn8bf9+nTJ3369Mkee+zRePVfc+vYsWOqqqqafb2prU2qq5NWrZp/3WVm6ZUS1dXVqahwmjrV1UlVVTp06FDsSvTpR+jTj9GnJUmffkwL92lDQ8Mqv/fKK6/MwIEDV2jZrbfeOltvvXWOP/74vPnmm6v8mUvNmTMnBx98cAYPHpxrrrkm6623Xl5++eV06tSpcZlrrrkmN954YyZOnJhevXrlkksuyZFHHpl77703bdq0+dQ1AAAAANDySj70+7iNN9648WTVkCFD0q1bt7zzzjtNlqmvr8+cOXM+8TmA/0pFRUXLnDh1MnYZFR/+kCX9UQo9Ugo1lBh9+hH6tGTp049owT79NOODFQ38Pqp9+/bZYostVvkzl7rmmmuywQYbZMKECY3TPvrMwEKhkBtuuCHHHnts9thjjyTJBRdckKFDh+b+++9v8gUsAAAAAEpX897HcjV466238t577zUGegMGDEhtbW1mzpzZuMxjjz2WxYsXp1+/fsUqEwBghfztb3/LzTffnJtvvjkvvPBCs6//gQceyDbbbJMTTzwxQ4YMyX777Zfbbrutcf5rr72WmpqaDB06tHFahw4dst1222XGjBnNXg8AAAAALaPoV/rNmzcvr7zySuPr1157Lc8++2w6deqUTp065fLLL8/ee++dbt265dVXX82FF16YTTfdNDvvvHOSpHfv3tl5551z5plnZty4camrq8v48eOzzz77ZP311y/WZgEA/Fv/3//3/2XMmDHZcsst88EHH+S8887LOeeckxEjRjTbZ7z66qu55ZZb8o1vfCPf+ta38qc//SnnnntuWrVqlf3337/xGchdu3Zt8r6uXbtm1qxZK/15hUKh8dazzaqQFD78tbZbug/siyUKS5qjZfpu5YvRpx/Sp03p09KkT5tq6T4tif4HAFjDFT30mzlzZg477LDG10tvPbX//vvn7LPPzl//+tfceeedmTt3bnr06JGddtopo0ePTuvWrRvfc9FFF2X8+PE5/PDDU1lZmb322itjxoxZ7dsCALAyrrjiilx33XUZNGhQkiW34rziiiuaNfQrFArZZpttctJJJyVJPve5z+X555/Prbfemv3337/ZPmep2traVFY2780kKisr09DQkPr6+tTX1TfrusvR0pPT9fX1qXBz39TX16ehoSFz587N4sWLi1aHPm1KnzalT0uTPm2qpfu0mL0PALC2KHroN3jw4Dz33HOfOH/KlCn/dh2dO3fOpEmTmrMsAIBmc8ABB+Sss85K//79m0yfP39+Nt1008bXG2+8cebPn9+sn929e/f07t27ybTPfvaz+b//+7/G+Ukye/bs9OjRo3GZ2bNnp2/fviv9eR07dkxVVdWnqHj55tbOTXV1dapbFX34WnRLr5Sorq5umWdRl5nq6upUVVWlQ4cOxS5Fn36EPm1Kn5YmfdpUS/dpQ0NDi6wXAIB/MsoHAGhhhx56aE444YR8/vOfz6mnntoYtH3xi1/MIYcckj322CMLFizIPffcky9/+cvN+tkDBw7MSy+91GTa3//+92y00UZJkl69eqV79+6ZNm1attpqqyTJ+++/n6eeeioHH3zwSn9eRUVFy5w4rUgqPvzFEvbHEhVLmqM0Ttjr02XYH0vo09JmfyzR0n1aEv0PALCGa957LwEAsIz9998/v/jFL9KtW7fsu+++ueaaa1JXV5fTTjsthx9+eF599dXU1NTkxBNPzGmnndasn3344YfnqaeeytVXX52XX345d911V2677bYccsghSZacgDvssMNy1VVX5de//nWee+65nHrqqenRo0f22GOPZq0FAAAAgJaz0lf6NTQ05KGHHsqjjz6ap556KjU1NVmwYEE6d+6czTbbLNtvv3322muvbLzxxi1RLwBAWVp33XXzve99L1/72tcyYcKE3H777Tn99NMzcuTIjBw5ssU+t1+/frn88stz8cUX54orrkivXr1y+umn5ytf+UrjMkcddVQ++OCDnHXWWamtrc2gQYNy7bXXpk2bNi1WFwAAAADNa4VDv3nz5mXq1Km55ZZbMmfOnGyxxRbp27dvtt9++7Ru3Tpz587N66+/nh//+MeZNGlSdtxxx5xwwgkZNGhQS9YPAFBWNttss/zoRz/KAw88kPPOOy8333xzzjjjjCbP9mtuu+66a3bddddPnF9RUZHRo0dn9OjRLVYDAAAAAC1rhUO/3XffPVtssUW++93vZo899kj79u0/cdlnnnkmd999d4499tiMHj26Rb+9DgBQ6gqFQu6444787ne/y6JFi9KvX7/853/+Z+66665MnTo1X/va13LggQfmuOOOS7t27YpdLgAAAABlaIVDvyuvvDIDBw5coWW33nrrbL311jn++OPz5ptvrnJxAABrgvPOOy8/+9nPsu+++2adddbJT3/60zz00EP5yU9+kmOOOSb77bdfLrzwwnzhC1/IySefnP/3//5fsUsGAChZ7777bp5++unU1NRk4cKFjY+c2WqrrVJRUVHs8gAAimaFQ78VDfw+qn379tliiy1W+n0AAGuSn//85zn33HPzhS98IUlyyCGHZK+99sprr72WXr16Zf31189FF12UP/zhD/nBD34g9AMA+Ji5c+fmZz/7We688848++yzKRQKTeZXVFSkXbt22XPPPXPggQd63AwAsFZa4dDvmGOOyaBBgzJw4MD069cvrVu3bsm6AADWGJ07d87rr7/e+PqNN95IknTs2LHJcoMGDcodd9yxWmsDACh1V199dX784x+nY8eO+cIXvpDjjz8+ffv2zXrrrZfWrVuntrY2r7/+embOnJmHH344RxxxRAYNGpQxY8Zk8803L3b5AACrzQqHfm+++WYmT56cQqGQVq1a5XOf+1xjCDho0KB07ty5BcsEAChfp512Wk455ZTceeedadOmTf7617/mO9/5zjKhXxK3pAIA+JjHH388l112WQYPHrzc+V26dEmXLl2y7bbb5uCDD86cOXNy00035fe//73QDwBYq6xw6Pfzn/8877//fp588sn88Y9/zB//+MfccsstmTJlSioqKrLZZps1BoCDBg3KJpts0pJ1AwCUjV133TW//vWv89RTT2XRokXZaqut0qtXr2KXBQBQFn784x+v1PKdOnXK8ccf30LVAACUrhUO/ZIlz+gbNmxYhg0bliRZvHhx/vKXvzSGgL/73e9yxx13pKKiIn/+859bpGAAgHLUuXPnDB8+vNhlAADwEdOnT8+UKVMyc+bM1NTU5Iorrsgee+zROP+0007Lz372sybvGTZsWKZMmdL4+r333sv48ePz4IMPprKyMnvttVfOOOOMrLvuuqttOwAAkpUM/T6usrIynTp1SqdOndKxY8d07Ngxb731Vtq0adNc9QEAlL2HHnpopQO/d955J2+++Wa23nrrFqoKAKB8FAqFvPjii6mvr8+WW26ZioqKvP3227nuuuvywgsvpEuXLjnwwAMzaNCglVrv/Pnz06dPn4wYMSKjRo1a7jI777xzJkyY0Pi6devWTeafcsopqampydSpU1NXV5fTTz89Z511ViZNmrTyGwoA8CmsVOjX0NCQP//5z41X9s2YMSM1NTXp2bNn+vfvn6997Wvp379/+vbt21L1AgCUnbFjx6ZTp04ZMWJE9t5776y//vrLXa6hoSGPP/547r777tx77705/fTThX4AwFrvjTfeyDHHHJMXXnghSdK7d+9ccskl+a//+q/84x//yHrrrZd33303d999d66//vqVCv6GDx/+b7+c1bp163Tv3n258/72t7/lkUceye23355tt902STJmzJgcffTROfXUUz9x3AcA0BJWOPQ79NBDM3PmzCxevDif+9zn0r9//+y7777p37//Jw58AABIfvnLX+YnP/lJrr/++kyYMCEbbrhhttxyy3Tp0iWtW7fO3Llz89prr+W5555LfX19dtttt9xyyy2+SAUAkGTSpEmZP39+Lr/88rRr1y5XXHFFjjrqqKy//vq544470qVLl8yaNSvHHntsrrjiipV+BuC/8/jjj2fIkCHp2LFjPv/5z+fb3/521ltvvSTJjBkz0rFjx8bAL0mGDh2aysrKPP3009lzzz2btRYAgH9lhUO/6dOnZ5111slXvvKV7LjjjhkwYEB69erVkrUBAKwRWrdunSOOOCJHHHFEfv/732fatGn505/+lJkzZ2bhwoXp3LlzNttss4wYMSK77757unbtWuySAQBKxvTp03PKKadk9913T5L06NEj++yzT84444x06dIlSdKtW7cceeSROeecc5r1s3feeefsueee6dWrV1599dVcfPHFOeqoo/LTn/40VVVVmTVrVmMNS1VXV6dTp06pqalZ6c8rFAopFArNVf4/17tk5Ut+1nZL94F9sUShkELSIn3Hqlv659FSxwRWnePpRzieNtXCx9MVXe8Kh34///nPG2/reckll+T1119Pt27dMmDAgMafrbfeOq1atVrlogEA1nSDBw/O4MGDi10GAEDZqKmpySabbNL4eunvP37rzA022CDvvvtus372Pvvs0/j7Pn36pE+fPtljjz0ar/5rbrW1tamsrGzWdVZWVqahoSH19fWpq69v1nWXpQ9PmtbX1ycVFUUupvjq6+vT0NCQuXPnZvHixcUuhw8t/bNoiWMCq87x9GMcT5to6ePpiq5zhUO/LbfcMltuuWW+/vWvJ0nefvvtzJgxIzNmzMh9992XSZMmpaqqKp/73OcycODAfPe73121ygEAAADgQ4VCoclJ76W/r/jYCcaPv24JG2+8cdZbb728/PLLGTJkSLp165Z33nmnyTL19fWZM2fOKj0Op2PHjqmqqmquchvVzp2b6urqtKpe4VOBa6ylV0pUV1evlp4pddXV1amqqkqHDh2KXQof0dDQkKTljgmsOsfTf3I8baqlj6dLjwv/to5V/YAePXpk7733zt577526uro88cQTuf766/PQQw/lySefFPoBAAAA0Cx+//vf56233kqy5JvuFRUV+f3vf5/XX3+9cZm///3vLV7HW2+9lffee68x0BswYEBqa2szc+bMbLPNNkmSxx57LIsXL06/fv1Wev0VFRUtcuK0YsnKXYnxUfbHEhUVqcjqCc1ZcUv/PFrqmMCqczxdDvtjiRY+nq7oelcp9HvnnXcar/L74x//mGeeeSaLFi1KZWVlttpqqwwcOHBVVgsAAAAAy5g0adIy0y644IJlpq3sibZ58+bllVdeaXz92muv5dlnn02nTp3SqVOnXH755dl7773TrVu3vPrqq7nwwguz6aabZuedd06S9O7dOzvvvHPOPPPMjBs3LnV1dRk/fnz22WefZW4/CgDQ0lY49Puf//mfxpDv5ZdfTqFQSLt27bLddtvlv/7rvzJo0KD0798/7dq1a8l6AQAAAFiL/PrXv26xdc+cOTOHHXZY4+sJEyYkSfbff/+cffbZ+etf/5o777wzc+fOTY8ePbLTTjtl9OjRad26deN7LrrooowfPz6HH354Kisrs9dee2XMmDEtVjMAwCdZ4dDvzDPPTPfu3TNw4MAccsghGThwYLbaaiv3FAYAAACgxWy00UYttu7Bgwfnueee+8T5U6ZM+bfr6Ny583KvRAQAWN1WOPT71a9+lY033rglawEAWOPdcsst+fKXv5z27dsXuxQAAAAA1iArHPoJ/AAAPr2JEyfm/PPPz1577ZWvfvWr2XHHHYtdEgBASdt9991XavmWvB0oAEApW+HQ78ADD8wxxxyT3XbbLZWVlf92+TfffDM33HBDevTokW984xufqkgAgDXFI488krvuuit33HFHDjvssGy88cYZMWJE9t9//6y//vrFLg8AoOS8/vrrWXfddbPrrrumS5cuxS4HAKBkrXDot99+++Xss8/OmWeemd133z0DBw5Mnz590qVLl7Ru3Tq1tbV57bXX8swzz+Thhx/OU089ld122y0HH3xwS9YPAFBWOnbsmJEjR2bkyJH5y1/+kjvuuCPXX399Lrvssuy0004ZMWJEdtttt7Rq1arYpQIAlIRjjjkm9957b+67774MGTIk++67b/bcc8+su+66xS4NAKCkrHDoN3LkyIwYMSL33HNP7rzzztx5551paGhoskyhUEj37t2z9957Z+zYsenTp0+zFwwAsKbo27dvzjjjjPzXf/1XTjnllDz88MN5+OGHs9566+Xggw/O0UcfnbZt2xa7TACAovrOd76T73znO3n66adz11135aKLLsrZZ5+d//iP/8hXvvKV/Md//Edat25d7DIBAIpuhUO/JGnbtm1GjBiRESNGZOHChXn22WdTU1OThQsXplOnTtlss83Sq1evlqoVAGCNUSgU8vDDD+eOO+7Igw8+mI4dO+bII4/MnnvumYceeig33XRTnn/++Vx22WXFLhUAoCT069cv/fr1y+mnn57HHnss99xzT04//fQUCoV861vfypFHHlnsEgEAimqlQr+PatOmTfr379+MpQAArPleeeWV3HHHHbnzzjvz9ttvZ+jQobnooouy++67p7p6ydCsf//+2WabbXLSSScVuVoAgNJTUVGRIUOGpE2bNkmS//3f/81TTz1V5KoAAIpvlUM/AABW3l577ZX1118/BxxwQL761a9mo402Wu5yn/3sZ9OvX7/VXB0AQGl79tlnc/fdd+e+++5LTU1Nhg0blgsuuCC77757sUsDACg6oR8AwGp01VVXZfjw4amsrPyXy2222Wa58cYbV1NVAACl66WXXso999yTe+65J6+88kq23377HHvssdlrr73SqVOnYpcHAFAyhH4AAKvRrrvuWuwSAADKyhe/+MWsu+662X333XPqqaemR48eSZLXXnstr7322jLLb7311qu7RACAkiD0AwBYjb7//e/ngw8+yOTJk5eZ953vfCft27fP+PHjV39hAAAlbN68efn5z3+eu+666xOXKRQKqaioyLPPPrsaKwMAKB0rHfotXLgwP/nJT7LTTjtlyy23bImaAADWWI8++mi+973vLXfeXnvtlQsuuGA1VwQAUNpuuOGGYpcAAFAWVjr0a9OmTSZPnuxWCQAAq+Cdd97Jeuutt9x5nTt3zqxZs1ZzRQAApW3HHXcsdgkAAGWhclXetNVWW+WFF15o7loAANZ466+/fp5++unlznv66afTvXv31VwRAAAAAGuCVQr9Tj/99Fx//fX5xS9+kQ8++KC5awIAWGPts88+ufrqq3Pvvfc2mX7ffffl6quvzpe//OUiVQYAUJoOPPDA3H///Vm8ePEKLf/mm2/m/PPPz9SpU1u4MgCA0rLSt/dMksMPPzx1dXX5zne+kyRp27ZtKioqGudXVFTkD3/4Q/NUCACwBjn++OPzl7/8JSeddFLOOOOM9OjRI2+//XYWLFiQ//iP/8jxxx9f7BIBAErKfvvtl7PPPjtnnnlmdt999wwcODB9+vRJly5d0rp169TW1ua1117LM888k4cffjhPPfVUdttttxx88MHFLh0AYLVapdDvm9/8ZpOQDwCAFdO6dev88Ic/zO9+97s89thjee+999K5c+cMHTo0Q4YMKXZ5AAAlZ+TIkRkxYkTuueee3HnnnbnzzjvT0NDQZJlCoZDu3btn7733ztixY9OnT58iVQsAUDyrFPqdcMIJzV0HAMBaZaeddspOO+1U7DIAAMpC27ZtM2LEiIwYMSILFy7Ms88+m5qamixcuDCdOnXKZpttll69ehW7TACAolql0O+j3nzzzbz55pvp27dv2rVr1xw1AQCsFT744IMsXLhwmemdO3de/cUAAJSJNm3apH///sUuAwCg5Kxy6PfTn/40l19+eWpqalJRUZHbb789W2+9dY4//vjsuOOOOfzww5uzTgCANUKhUMiVV16Zn/70p6mpqVnuMs8+++xqrgoAAACAcle5Km+67rrrMn78+Oy333758Y9/nEKh0Dhvxx13zC9+8YtmKxAAYE1y3XXX5brrrsvIkSNTKBTyrW99K8cff3w+85nPZKONNsr48eOLXSIAAAAAZWiVQr+bbropxx13XE4++eQMHjy4ybzNNtssL730UrMUBwCwprn99ttzwgkn5L/+67+SJHvssUdGjRqVe+65J717984rr7xS5AoBAAAAKEerFPr94x//yIABA5Y7r1WrVpk/f/6nKgoAYE31+uuvZ6uttkpVVVWqq6tTW1ubJKmsrMwhhxyS//3f/y1yhQAAAACUo1UK/Xr27Jk//elPy5331FNP5TOf+cynqQkAYI3VuXPnxi9I9ezZM3/+858b57377rtZsGBBsUoDAAAAoIxVr8qbvva1r+Xyyy/Peuutl7322itJUl9fn9/85jeZMmVKvv3tbzdnjQAAa4yBAwfmT3/6U4YPH5599903l19+eWbNmpXq6urcdtttGTJkSLFLBAAoeQ8//HD+9Kc/5a233sqxxx6bnj17Zvr06dlkk02y/vrrF7s8AICiWKXQ78gjj8ybb76Zs846K2PHjk2SHHzwwUmSQw45JCNHjmy+CgEA1iCjRo3KP/7xjyTJt771rdTW1ubuu+/OwoULM3To0Jx55plFrhAAoHS98847Oe644/LUU09lww03zJtvvpmvf/3r6dmzZ+64446ss846jeeqAADWNqsU+iXJmDFjcthhh2XatGl5991306lTpwwZMsStPQEAPkGhUEiXLl3Sq1evJEnr1q0zZsyYjBkzpsiVAQCUhx/84Ad59913c/fdd2fTTTfNNtts0zhvyJAhueqqq4pYHQBAca1y6Jckm2yySTbZZJPmqgUAYI1WV1eXoUOH5sorr8wuu+xS7HIAAMrOQw89lPHjx6d3795paGhoMm/DDTdsvKMCQLlr3bp1sUsAytAqhX5f/vKXs/3222eHHXbI9ttvnx49ejR3XQAAa5zWrVtngw02WOYEFQAAK6ahoSHt2rVb7rza2tq0atVqNVcENJc5Cxbk/UWLil1GSSgUCmnbvXvefP/9VFRUFLucktC+det0atu22GVAyVul0G+XXXbJ9OnT8z//8z9paGjIxhtvnEGDBmWHHXbIDjvskI033ri56wQAWCMccsghue666zJs2LC0adOm2OUAAJSVfv365Y477sjw4cOXmXfPPfdk4MCBRagKaA7vL1qUy6dPz6z584tdStEVCoW8P3du2nfoIPRL0q1du4zaYQehH6yAVQr9Tj755CTJwoUL88c//jF/+MMfMn369IwfPz4LFixIjx498tBDDzVroQAAa4I333wzL730UnbZZZfsuOOO6dat2zL/E+cZfwAAy/ftb387hx12WEaOHJm99947FRUVuf/++/PDH/4wDz30UH7yk58Uu0TgU5g1f37eev/9YpdRdIVCIbW1telYUSH0A1bKp3qmX5s2bTJgwIAkSX19fT744IM8/fTTmTt3brMUBwCwpnnwwQcbn83wpz/9aZn5FRUVQj8AgE8wYMCA3HDDDZk0aVLOP//8FAqFXH311enfv3+uu+66bL311sUuEQCgaFYp9HvooYfyxBNPZPr06Zk5c2batWuXgQMH5gtf+ELGjBljgAUA8AkeeOCBYpcAAFDWBgwYkJtuuikLFizInDlz0rFjx6yzzjrFLgsAoOhWKfQ75phj0rZt23z1q1/NuHHjsuWWW7rMGAAAAIAWdeedd2bTTTfNgAED0rZt27T9yPOd3nnnnTz88MPZb7/9ilcgAEARrVLoN3LkyEyfPj0333xzfvnLX2aHHXbI9ttvnx133DG9e/du7hoBANYYd955579dxokqAIDlO+2001JVVZXRo0fn6KOPbjLv1Vdfzfe//31jKQBgrbVKod+ZZ56ZJKmtrc306dPzxBNP5H//93/zgx/8IB07dsz222+fSy+9tFkLBQBYE5x22mnLnf7RuyY4UQUA8MkOPPDAXHbZZfnzn/+ciRMnNrnaDwBgbbZKod9SHTt2zPDhw9OlS5est956adWqVf74xz/mV7/6VXPVBwCwRpk+ffoy0+bMmZPf/va3ufnmm3PRRRcVoSoAgPJxwAEH5Ctf+UpOPPHEHHTQQbniiivSq1evYpcFAFB0qxT6PfbYY3niiScyffr0PP3001mwYEG6d++e7bffPmeddVZ23HHH5q4TAGCN0KFDh+VO+/rXv56FCxfmwgsvzLXXXluEygAAysfAgQNz++2357jjjsuIESMyefLktGvXrthlAQAU1SqFfkcccUQ22mij7LDDDvnyl7+cHXfcMZtssklz1wYAsFbZYostMnny5GKXAQBQFjbYYIPccsstOf3003PUUUfli1/8YrFLAgAoqlUK/X7zm99kgw02aO5aAADWWh988EFuu+229OjRo9ilAACUjTZt2mTSpEnp06ePL08BAGu9VQr9lgZ+hUIhL730UubMmZNOnTpls802S0VFRbMWCACwJvnyl7+8zLS6urr84x//yIIFC3L++ecXoSoAgPLw61//Ot27d19m+tFHH51BgwbllVdeKUJVAAClYZVCvyS5+eabc+WVV+add95pnNa1a9ccd9xxOeSQQ5qlOACANc3WW2+9zJekWrdunQ022CB77bVXevfuXaTKAABK30YbbfSJ8wYNGpRBgwatxmoAAErLKoV+P/3pTzN+/Pjss88++dKXvpRu3bpl1qxZuffeezN+/Pi0atUqBx54YHPXCgBQ9iZOnFjsEgAAysq5556bb37zm+nZs2fOPffcf7v8mDFjVkNVAAClZ5VCv+uuuy6HHnpozjjjjCbTd99993Tp0iVTpkwR+gEALMf777+f+fPnL/fZfW+//XbWXXfdrLvuukWoDACgND3wwAP56le/mp49e+aBBx74l8tWVFQI/QCAtdYqhX6vvfZadt111+XO22WXXXLrrbd+qqIAANZUY8aMybrrrpsf/OAHy8y77LLLMn/+/EyaNKkIlQEAlKaPBn3/LvQDAFibVa7Km7p3754ZM2Ysd96TTz653AcqAwCQPPHEE9lll12WO2/48OF5/PHHV29BAAAAAKwRVulKv69+9au58sors2jRonzhC19I165d88477+S+++7LlClTcvzxxzd3nQAAa4Q5c+Z84u0711lnnbz33nurtyAAgBL3zjvv5O23307fvn2bTP/LX/6SK6+8Mn/729/SrVu3HH744dltt92KVCUAQPGtUuh37LHHpra2NlOmTMmPfvSjxulVVVU59NBDc+yxxzZbgQAAa5KNN944jz76aIYOHbrMvGnTpmWjjTYqQlUAAKXr4osvzjPPPJOf/exnjdNef/31jBw5MgsWLEifPn3y/PPPZ9SoUbn++uuzww47FLFaAIDiWaXQr6KiIqeddlqOOeaYPP3005kzZ046deqUfv36Zb311mvuGgEA1hgHHnhgJk2alE6dOmXEiBHp0qVL3nnnnfzv//5vrrvuupx00knFLhEAoKT88Y9/zFe/+tUm06677rrMnz8/11xzTYYNG5YFCxbkG9/4Rq655hqhHwCw1lqp0O+FF17Irbfemtdeey09evTIF77whQwfPrylagMAWOMcccQReeWVV3LxxRfn4osvTlVVVRoaGpIkX//61/PNb36zyBUCAJSWf/zjH9liiy2aTHvwwQez1VZbZdiwYUmStm3b5j//8z9zwQUXFKNEAICSsMKh3xNPPJFvfOMbqa+vT5cuXfLee+/lf/7nf3LWWWfl4IMPbskaAQDWGBUVFRk7dmwOP/zwPPbYY3nvvffSuXPnfP7zn89nPvOZYpcHAFByKioqUlFR0fh61qxZee2113L44Yc3WW799dfPu+++u7rLAwAoGSsc+l122WX57Gc/m6uvvjobbrhh3n///Xz/+9/P5MmThX4AACvpM5/5jJAPAGAFbLbZZnn00Ucbr+p78MEHU1FRkZ122qnJcjU1NenSpUsxSgQAKAmVK7rgX//61xx//PHZcMMNkyTt27fP9773vcyZMydvvvnmKhcwffr0fOtb38qwYcPSp0+f3H///U3mFwqFXHLJJRk2bFj69euXI444In//+9+bLPPee+/l5JNPzsCBA7P99tvn9NNPz7x581a5JgCAlnLvvffm2muvXe68KVOm5L777lvNFQEAlLZDDz00U6dOzZgxYzJ58uRcdNFF2WSTTTJ06NAmy/32t7/NlltuWaQqAQCKb4VDv3fffTcbbLBBk2lLA8BPc+uE+fPnp0+fPhk7duxy519zzTW58cYbc/bZZ+e2227LOuuskyOPPDILFy5sXOaUU07JCy+8kKlTp+bqq6/OE088kbPOOmuVawIAaCk/+tGP0rp16+XOa9u2ba655prVXBEAQGn7yle+kpNOOimPPPJIrr/++myxxRa5/PLLU139zxtYzZ49Ow8++GB23XXXIlYKAFBcK3x7z5YyfPjwDB8+fLnzCoVCbrjhhhx77LHZY489kiQXXHBBhg4dmvvvvz/77LNP/va3v+WRRx7J7bffnm233TZJMmbMmBx99NE59dRTs/7666+2bQEA+Hf+/ve/Z4sttljuvN69e+ell15azRUBAJS+o446KkcdddQnzu/atWseffTR1VgRAEDpWeEr/ZLk8MMPz8CBAxt/dthhhyTJyJEjm0wfNGhQsxT32muvpaampsntGjp06JDtttsuM2bMSJLMmDEjHTt2bAz8kmTo0KGprKzM008/3Sx1AAA0lzZt2mT27NnLnVdTU9PkG+st4Uc/+lH69OmTH/zgB43TFi5cmHHjxmXw4MEZMGBATjjhhMyaNatF6wAAAACgea3wWaVRo0a1ZB3LVVNTk2TJt7U+qmvXro0nombNmrXMQ5qrq6vTqVOnxvevjEKhkEKhsIoV/8sVN/86y1ThY/8lS/qjFHqkFGooEfp0OfRpydGny9GCfdpc44MddtghP/rRj7LbbrulXbt2jdPnz5+fa6+9NjvuuGOzfM7yPP3007n11lvTp0+fJtPPO++8PPTQQ5k8eXI6dOiQ8ePHZ9SoUbn11ltbrBYAAAAAmldJh37FUFtbm8rKlboA8t+qrKxMu4aGFOrrk7q6Zl13OVp6yrS+vj4VRa2kRNTXp6KhIfPnzs3ixYuLVoY+bUqffow+LUn69GNauE+ba53f+c538vWvfz177rln9t577/To0SNvv/12/u///i91dXW5+OKLm+VzPm7evHn57ne/m3PPPTdXXXVV4/S5c+fmjjvuyEUXXZQhQ4YkWRICfulLX8qTTz6Z/v37t0g9AAAAADSvoj/T71/p3r17kiUPY+7Ro0fj9NmzZ6dv375Jkm7duuWdd95p8r76+vrMmTOn8f0ro2PHjqmqqvoUVX+C2tqkujpp1ar5111mll4pUV1dnYoKp6lTXZ1UVaVDhw7FrkSffoQ+/Rh9WpL06ce0cJ82NDQ0y3p69+6d22+/PZdeeml++ctf5r333kvnzp0zdOjQjBo1KptuummzfM7HnXPOORk+fHiGDh3aJPSbOXNm6urqmtxOvXfv3unZs6fQDwAAAKCMlHTo16tXr3Tv3j3Tpk3LVlttlSR5//3389RTT+Xggw9OkgwYMCC1tbWZOXNmttlmmyTJY489lsWLF6dfv34r/ZkVFRUtc+LUydhlVHz4Q5b0Ryn0SCnUUGL06Ufo05KlTz+iBfu0OccHm266aSZNmrTcea+++mo23njjZvusJLnnnnvy5z//Obfffvsy82bNmpVWrVqlY8eOTaZ37dq1xG6VnhQ+/LW2W7oP7IslCkuao2X6buWL0acf0qdN6dPSpE+bauk+LYn+BwBYwxU99Js3b15eeeWVxtevvfZann322XTq1Ck9e/bMYYcdlquuuiqbbrppevXqlUsuuSQ9evTIHnvskWTJN9F33nnnnHnmmRk3blzq6uoyfvz47LPPPll//fWLtVkAACvsnXfeyX333Ze77rorTz31VJ599tlmW/ebb76ZH/zgB/nxj3+cNm3aNNt6P0lL3Sq9oaEh9fX1qa+rb9Z1l6OlJ6eX3NpX5F9fX5+GhobMLYFbUOvTf9KnTenT0qRPm2rpPi1m7wMArC2KHvrNnDkzhx12WOPrCRMmJEn233//TJw4MUcddVQ++OCDnHXWWamtrc2gQYNy7bXXNjlpddFFF2X8+PE5/PDDU1lZmb322itjxoxZ7dsCALCiPvjgg/zqV7/K3XffnUcffTT19fX53Oc+l+9///vN+jnPPPNMZs+enQMOOKBxWkNDQ6ZPn56bb745U6ZMSV1dXWpra5tc7Td79uySulX63Nq5qa6uTnWrog9fi86tfZuqrq5OVYncglqf/pM+bUqfliZ92lRL92lz3SodAIBPVvRR/uDBg/Pcc8994vyKioqMHj06o0eP/sRlOnfu/Im3yAIAKBUNDQ155JFHctddd+WBBx7IggUL0q1btzQ0NOTiiy/Ol770pWb/zM9//vO56667mkz7/ve/n89+9rM56qijsuGGG6ZVq1aZNm1a9t577yTJiy++mDfeeGOVnufXcrdKTyo+/MUS9scSFUuaozRO2OvTZdgfS+jT0mZ/LNHSfVoS/Q8AsIYreugHALCm+8Mf/pC77747v/jFL/Luu++mc+fO+cpXvpIvf/nL2WKLLTJ48OBVuqpuRbRv3z5bbrllk2nt2rVL586dG6ePGDEiEydOTKdOndK+ffuce+65GTBgwCqFfgAAAAAUh9APAKCFjRw5MhUVFRk8eHC+8Y1vZKeddkp19ZJh2Ny5c4tcXXL66aensrIyJ554YhYtWpRhw4Zl7NixxS4LAAAAgJUg9AMAaGFbbrll/vrXv2b69OmpqqrKu+++mz322CPt27cvSj033nhjk9dt2rTJ2LFjBX0AAAAAZayy2AUAAKzpfv7zn+fuu+/OkUcemb///e857bTTMmzYsIwePTq//vWvPeMGAKBIpk+fnm9961sZNmxY+vTpk/vvv7/J/EKhkEsuuSTDhg1Lv379csQRR+Tvf/97k2Xee++9nHzyyRk4cGC23377nH766Zk3b95q3AoAgCWEfgAAq8Hmm2+ek046Kb/+9a9z8803Z//998/06dNz2mmnJUluuOGGTJ8+vchVAgCsXebPn58+ffp84h0Prrnmmtx44405++yzc9ttt2WdddbJkUcemYULFzYuc8opp+SFF17I1KlTc/XVV+eJJ57IWWedtbo2AQCgkdt7AgCsZoMGDcqgQYMyZsyY/Pa3v83dd9+dX//617n//vvTs2fP/PrXvy52iQAAa4Xhw4dn+PDhy51XKBRyww035Nhjj80ee+yRJLngggsydOjQ3H///dlnn33yt7/9LY888khuv/32bLvttkmSMWPG5Oijj86pp56a9ddff7VtCwCAK/0AAIqkqqoqw4cPz4UXXphHH300F154YbbYYotilwUAQJLXXnstNTU1GTp0aOO0Dh06ZLvttsuMGTOSJDNmzEjHjh0bA78kGTp0aCorK/P000+v9poBgLWbK/0AAEpA27Zts++++2bfffctdikAACSpqalJknTt2rXJ9K5du2bWrFlJklmzZqVLly5N5ldXV6dTp06N718ZhUIhhUJhFSv+F+tdsvIlP2u7pfvAvliiUEghaZG+W+lSEn26lD5tSp+WJn3aVAv36YquV+gHAAAAACWgtrY2lZXNe2OuysrKNDQ0pL6+PnX19c267rL04UnT+vr6pKKiyMUUX319fRoaGjJ37twsXry4aHXo04/Rp03o0xKlT5to6T5d0XUK/QAAAADgY7p3754kmT17dnr06NE4ffbs2enbt2+SpFu3bnnnnXeavK++vj5z5sxpfP/K6NixY6qqqj5F1ctXO3duqqur06raqcClV0pUV1enwknqVFdXp6qqKh06dCh2Kfr0I/RpU/q0NOnTplq6TxsaGlasjhb5dAAAAAAoY7169Ur37t0zbdq0bLXVVkmS999/P0899VQOPvjgJMmAAQNSW1ubmTNnZptttkmSPPbYY1m8eHH69eu30p9ZUVHRIidOK5as3JUYH2V/LFFRkYqkJE7Y69PlsD+W0Kelzf5YooX7dEXXK/QDAAAAYK00b968vPLKK42vX3vttTz77LPp1KlTevbsmcMOOyxXXXVVNt100/Tq1SuXXHJJevTokT322CNJ0rt37+y8884588wzM27cuNTV1WX8+PHZZ599sv766xdrswCAtZTQDwAAAIC10syZM3PYYYc1vp4wYUKSZP/998/EiRNz1FFH5YMPPshZZ52V2traDBo0KNdee23atGnT+J6LLroo48ePz+GHH57KysrstddeGTNmzGrfFgAAoR8AAAAAa6XBgwfnueee+8T5FRUVGT16dEaPHv2Jy3Tu3DmTJk1qifIAAFZKZbELAAAAAAAAAD4doR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUuZIP/S677LL06dOnyc8XvvCFxvkLFy7MuHHjMnjw4AwYMCAnnHBCZs2aVcSKAQAAAAAAYPWqLnYBK2KLLbbI1KlTG19XVVU1/v68887LQw89lMmTJ6dDhw4ZP358Ro0alVtvvbUYpQIAAAAAAMBqVxahX1VVVbp3777M9Llz5+aOO+7IRRddlCFDhiRZEgJ+6UtfypNPPpn+/fuv5koBAErLD3/4w/zyl7/Miy++mLZt22bAgAE55ZRT8tnPfrZxmYULF2bixIm59957s2jRogwbNixjx45Nt27dilg5AAAAACuj5G/vmSQvv/xyhg0blt133z0nn3xy3njjjSTJzJkzU1dXl6FDhzYu27t37/Ts2TNPPvlkkaoFACgdjz/+eEaOHJnbbrstU6dOTX19fY488sjMnz+/cZnzzjsvDz74YCZPnpwbb7wxb7/9dkaNGlXEqgEAAABYWSV/pV+/fv0yYcKEbLbZZqmpqckVV1yRkSNH5q677sqsWbPSqlWrdOzYscl7unbtmpqamlX6vEKhkEKh0Bylf3zFzb/OMlX42H/Jkv4ohR4phRpKhD5dDn1acvTpcrRgn7bI+GA1mDJlSpPXEydOzJAhQ/LMM89khx12cOcEAAAAgDVEyYd+w4cPb/x93759s91222XXXXfNfffdl7Zt2zb759XW1qaysnkvgKysrEy7hoYU6uuTurpmXXc5WnrKtL6+PhVFraRE1NenoqEh8+fOzeLFi4tWhj5tSp9+jD4tSfr0Y1q4T4vZ+81p7ty5SZJOnTol+fd3TljZ0K/lvkCVFD78tbZbug/siyUKS5qjNIJ5fdpInzalT0uTPm2qpfu0JPofAGANV/Kh38d17Ngxn/nMZ/LKK69k6NChqaurS21tbZOr/WbPnr3cZwCu6Pqrqqqaq9x/qq1NqquTVq2af91lZulAv7q6OhUVTlOnujqpqkqHDh2KXYk+/Qh9+jH6tCTp049p4T5taGhokfWuTosXL855552XgQMHZsstt0ySZr9zQkt9gaqhoSH19fWpr6tv1nWXo6Unp5cE/v7u19fXp6GhIXNL4Isp+vSf9GlT+rQ06dOmWrpP15QvUAEAlLKyC/3mzZuXV199Nd27d88222yTVq1aZdq0adl7772TJC+++GLeeOONVb4VVUVFRcucOHUydhkVH/6QJf1RCj1SCjWUGH36Efq0ZOnTj2jBPl0TgtVx48bl+eefz09+8pMW+4yW+gLV3Nq5qa6uTnWrshu+NjuBf1PV1dWpKpEvpujTf9KnTenT0qRPm2rpPl0TvkAFAFDqSn6Uf/7552fXXXdNz5498/bbb+eyyy5LZWVl9t1333To0CEjRozIxIkT06lTp7Rv3z7nnntuBgwY4PkzAAAfcc455+Q3v/lNbrrppmywwQaN07t169asd05ouS9QJRUf/mIJ+2OJiiXNURon7PXpMuyPJfRpabM/lmjpPi2J/l8Fl112WS6//PIm0zbbbLP84he/SJIsXLgwEydOzL333ptFixZl2LBhGTt2bLp161aMcgGAtVzJh35vvfVWTjrppLz33nvp0qVLBg0alNtuuy1dunRJkpx++umprKzMiSee2GRwBQDAkqsYxo8fn1/96le58cYbs/HGGzeZ3xJ3TgAAWJNsscUWmTp1auPrj97V4LzzzstDDz2UyZMnp0OHDhk/fnxGjRqVW2+9tRilAgBruZIP/f77v//7X85v06ZNxo4dK+gDAFiOcePG5e67786VV16Zddddt/E5fR06dEjbtm3dOQEA4N+oqqpa7h0Q5s6dmzvuuCMXXXRRhgwZkmRJCPilL30pTz75pLEUALDalXzoBwDAqrvllluSJIceemiT6RMmTMgBBxyQxJ0TAAD+lZdffjnDhg1LmzZt0r9//5x88snp2bNnZs6cmbq6ugwdOrRx2d69e6dnz56rHPoVCoXG5002p8KSlS/5Wdst3Qf2xRKFQgpJi/TdSpeS6NOl9GlT+rQ06dOmWrhPV3S9Qj8AgDXYc88992+XcecEAIDl69evXyZMmJDNNtssNTU1ueKKKzJy5MjcddddmTVrVlq1atXkuchJ0rVr18a7K6ys2traVFZWNkfpjSorK9PQ0JD6+vrU1dc367rL0ocnTevr65MyfdZkc6qvr09DQ0Pmzp2bxYsXF60Offox+rQJfVqi9GkTLd2nK7pOoR8AAAAALMfw4cMbf9+3b99st9122XXXXXPfffelbdu2zf55HTt2bPLMwOZSO3duqqur06raqcClV0pUV1enwknqVFdXp6qqKh06dCh2Kfr0I/RpU/q0NOnTplq6TxsaGlasjhb5dAAAAABYw3Ts2DGf+cxn8sorr2To0KGpq6tLbW1tk6v9Zs+evdxnAK6IioqKFjlxWrFk5a7E+Cj7Y4mKilQkJXHCXp8uh/2xhD4tbfbHEi3cpyu63ua9XwAAAAAArKHmzZuXV199Nd27d88222yTVq1aZdq0aY3zX3zxxbzxxhur9Dw/AIBPy5V+AAAAALAc559/fnbdddf07Nkzb7/9di677LJUVlZm3333TYcOHTJixIhMnDgxnTp1Svv27XPuuedmwIABQj8AoCiEfgAAAACwHG+99VZOOumkvPfee+nSpUsGDRqU2267LV26dEmSnH766amsrMyJJ56YRYsWZdiwYRk7dmyRqwYA1lZCPwAAAABYjv/+7//+l/PbtGmTsWPHCvoAgJLgmX4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDmhH4AAAAAAABQ5oR+AAAAAAAAUOaEfgAAAAAAAFDm1qjQ7+abb85uu+2WbbfdNgceeGCefvrpYpcEAFA2jKUAAFadsRQAUGxrTOh37733ZsKECTn++OPzs5/9LH379s2RRx6Z2bNnF7s0AICSZywFALDqjKUAgFKwxoR+U6dOzde+9rWMGDEim2++ecaNG5e2bdvmjjvuKHZpAAAlz1gKAGDVGUsBAKWgutgFNIdFixblmWeeyTHHHNM4rbKyMkOHDs2MGTNWaB2FQiFJUl9f3/j7ZtXQkGywQdKqVfOvu8wUCoUU5s1L/brrpqKiotjlFF/Xrkv6o76+2JXo04/Qpx+jT0uSPv2YFu7ThoaGJGmZcUKRlcNYqqGhIetssE7ir34KhUIq5lVkXX/3kyTrdF0nDQ0NqS+Bf6P06T/p06b0aWnSp021dJ8aS/1rq2MstcE66/irnyX7el6Fv/tLdV2ntP6N0qdL6NOm9Glp0qdNtXSfruhYao0I/d599900NDSka9euTaZ37do1L7744gqtY/HixUmSmTNnNnt9jfbbr+XWTXmrqVnyUwr0KZ9En1IOVkOfLh0zrEnKZSy13n7rZb2s12Lrp3y9WPNiUiL/ROlTPok+pRysjj41llq+1TGW2m+99ZL1/N1nWTUvvlgq/0TpUz6RPqUcrI4+/XdjqTUi9GsO1dXV2XbbbVNZWSmVBgCWUSgUsnjx4lRXGz4tj7EUAPCvGEv9a8ZSAMC/sqJjqTVipLXeeuulqqpqmYcjz549O926dVuhdVRWVqZ169YtUR4AQEkzlgIAWHXGUgBAqagsdgHNoXXr1tl6660zbdq0xmmLFy/OtGnTMmDAgCJWBgBQ+oylAABWnbEUAFAq1ogr/ZLkG9/4Rr73ve9lm222Sb9+/XL99dfngw8+yAEHHFDs0gAASp6xFADAqjOWAgBKwRoT+n3pS1/KO++8k0svvTQ1NTXZaqutcu21167wbRQAANZmxlIAAKvOWAoAKAUVhUKhUOwiAAAAAAAAgFW3RjzTDwAAAAAAANZmQj8AAAAAAAAoc0I/AAAAAAAAKHNCPwAAAAAAAChz1cUugPJ12mmn5Wc/+9ky03/5y1/mqquuapzXqlWrbLjhhvl//+//5Vvf+laqq6vz4osvZuzYsfnb3/6WuXPnpkePHtl3330zatSotGrVKkly22235c4778zzzz+fJNl6661z0kknpV+/fqtvIyl7n6ZPFy5cmLFjx+aZZ57J3/72t+yyyy658sorl1nXokWLcsUVV+TnP/95ampq0qNHjxx33HH56le/2uLbx5rjtNNOS21tba688srGvj355JNz9NFHNy5z//335/jjj89zzz33ib291EYbbZQHHngg8+bNy6RJk3L//ffnvffeS69evXLooYfm4IMPXh2bBfwLxlKUA2MpyoWxFKx9jKUoB8ZSlAtjqTWH0I9PZeedd86ECROaTOvSpUuTeYsWLcpDDz2Uc845J61atcoxxxyTVq1aZb/99svWW2+dDh065C9/+UvOPPPMFAqFnHTSSUmS3//+99lnn30ycODAtG7dOtdee22++c1v5p577sn666+/2reV8rWqfdrQ0JA2bdrk0EMPzf/93/994vpHjx6d2bNn5wc/+EE22WST1NTUZPHixS26Taz52rRpk2uuuSYHHXRQOnXqtMz8M844IyeffHLj62HDhmXChAnZeeedkyRVVVVJkokTJ+axxx7LhRdemI022ii/+93vMm7cuPTo0SO777776tkY4BMZS1EOjKUoR8ZSsHYwlqIcGEtRjoylypfQj0+ldevW6d69+7+dd8ghh+T+++/PAw88kGOOOSYbb7xxNt5448ZlN9poozz++ON54oknGqdNmjSpyfrOPffc/N///V+mTZuW/fbbr/k3hjXWqvZpu3btMm7cuCTJH//4x9TW1i7z/ocffjjTp0/P/fffn86dOydJevXq1TIbwlpl6NChefnll/PDH/4wp5566jLzO3TokA4dOjSZ1rFjx2V6fcaMGdlvv/0yePDgJMlBBx2Un/70p3n66acNrqAEGEtRDoylKEfGUrB2MJaiHBhLUY6MpcqXZ/qx2rRp0yZ1dXXLnffyyy/nkUceyQ477PCJ7//ggw9SX1+/3G8WQHP5V326PA888EC22WabXHvttdl5552z99575/zzz8+CBQtasErWBpWVlTnppJNy00035a233lrl9QwYMCAPPPBA/vGPf6RQKOSxxx7LSy+9lGHDhjVjtcDqYCxFOTCWolQYSwEfZyxFOTCWolQYS5UvV/rxqfzmN7/JgAEDGl/vvPPOufTSS5ssUygUMm3atPz2t7/Nf/7nfzaZ9/Wvfz3PPPNMFi1alIMOOiijR4/+xM+66KKL0qNHjwwdOrR5N4I13qft03/l1VdfzR/+8Ie0adMmV1xxRd59992MGzcu77333jK3boCVteeee2arrbbKpZdemvPOO2+V1nHmmWfmzDPPzH/8x3+kuro6FRUVOffcc//l/8wCq4+xFOXAWIpyZSwFaz5jKcqBsRTlyliqPAn9+FQGDx6cs88+u/H1Ouus0/j7pf+g1dXVpVAoZN99980JJ5zQ5P3//d//nXnz5uUvf/lLLrjggkyZMiVHHXXUMp/zox/9KPfee29uuOGGtGnTpsW2hzXTp+3Tf6VQKKSioiIXXXRR4yXtp512Wk488cSMHTs2bdu2bbbtYO10yimn5PDDD8+RRx65Su+/8cYb8+STT+aqq65Kz54988QTTzTeO93/rELxGUtRDoylKGfGUrBmM5aiHBhLUc6MpcqP0I9PZZ111smmm2663HlL/0Fr1apVevTokerqZdttww03TJJsvvnmaWhoyFlnnZVvfvObjQ/6TJIpU6bkRz/6UaZOnZq+ffu2zIawRvu0ffqvdO/ePeuvv36Te1j37t07hUIhb731Vj7zmc98mtIhO+ywQ4YNG5ZJkyblgAMOWKn3LliwIP/93/+dyy+/PLvsskuSpG/fvnn22WczZcoUgysoAcZSlANjKcqZsRSs2YylKAfGUpQzY6ny45l+tJil/6D17Nlzhf7BKhQKqa+vz+LFixunXXPNNbnyyitz7bXXZtttt23JcllLrWyfftzAgQPz9ttvZ968eY3TXnrppVRWVmaDDTZozlJZi5188sl58MEHM2PGjJV6X319ferq6lJRUdFkelVVVQqFQnOWCLQAYynKgbEU5cBYCtZOxlKUA2MpyoGxVHlxpR9F8fOf/zzV1dXp06dPWrdunT/96U+ZNGlSvvjFL6ZVq1ZJltw64dJLL82kSZOy0UYbpaamJknSrl27rLvuusUsn7XICy+8kLq6urz33nuZN29enn322STJVlttlSTZd999c+WVV+b73/9+TjzxxLz77ru58MILM2LECLdQoNn06dMnX/7yl3PjjTeu1Pvat2+fHXfcMRdeeGHatm2bnj17Zvr06bnzzjtz2mmntVC1wOpgLEW5MJaiFBhLAR9nLEW5MJaiFBhLlRehH0VRXV2da6+9Ni+99FKSpGfPnvnP//zPHHHEEY3L3Hrrramrq8uJJ57Y5L2jRo1aqXtbw6dx9NFH5/XXX298vd9++yVJnnvuuSTJuuuumx//+Mc599xzM2LEiHTu3Dlf/OIX8+1vf7sI1bImO/HEE3Pvvfeu9PsuvvjiXHzxxTnllFMyZ86c9OzZM9/5zndy8MEHt0CVwOpiLEW5MJaiVBhLAR9lLEW5MJaiVBhLlY+KgusoAQAAAAAAoKx5ph8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJQ5oR8AAAAAAACUOaEfAAAAAAAAlDmhHwAAAAAAAJS5/x9JNrKfKyoeTQAAAABJRU5ErkJggg==\n"},"metadata":{}}],"execution_count":14},{"cell_type":"markdown","source":"## 13. Summary Table","metadata":{}},{"cell_type":"code","source":"# Create formatted summary table\nsummary_data = []\n\nfor agg in all_aggregated:\n row = {\n \"Precision\": agg[\"precision\"].upper(),\n \"Batch Size\": agg.get(\"batch_size\", \"N/A\"),\n \"Latency/batch (ms)\": f\"{agg.get('mean_latency_mean', 0)*1000:.3f} ± {agg.get('mean_latency_std', 0)*1000:.3f}\",\n \"Throughput (samples/s)\": f\"{agg.get('throughput_mean', 0):.2f} ± {agg.get('throughput_std', 0):.2f}\",\n \"Accuracy (%)\": f\"{agg.get('accuracy_mean', 0)*100:.2f} ± {agg.get('accuracy_std', 0)*100:.2f}\",\n \"Power (W)\": f\"{agg.get('mean_power_w_mean', 0):.2f} ± {agg.get('mean_power_w_std', 0):.2f}\" if agg.get('mean_power_w_mean') else \"N/A\",\n \"Energy/sample (mJ)\": f\"{agg.get('energy_per_sample_mj_mean', 0):.3f} ± {agg.get('energy_per_sample_mj_std', 0):.3f}\" if agg.get('energy_per_sample_mj_mean') else \"N/A\",\n \"Model Size (MB)\": f\"{agg.get('model_size_mb', 0):.2f}\",\n \"Peak Memory (MB)\": f\"{agg.get('peak_memory_mb_mean', 0):.2f}\" if agg.get('peak_memory_mb_mean') else \"N/A\"\n }\n summary_data.append(row)\n\nsummary_df = pd.DataFrame(summary_data)\n\nprint(\"\\n\" + \"=\"*140)\nprint(\"FINAL SUMMARY TABLE (Per-Sample Energy)\")\nprint(\"=\"*140)\nprint(summary_df.to_string(index=False))\nprint(\"=\"*140)\nprint(\"\\nNOTE: Energy/sample = Total Energy / (num_iters × batch_size)\")\nprint(f\" With batch_size={all_aggregated[0].get('batch_size', 50)} and num_iters=300:\")\nprint(f\" Energy/sample ≈ Energy/batch / {all_aggregated[0].get('batch_size', 50)}\")\nprint(\"=\"*140)\n\n# Save summary table\nsummary_path = Path(CONFIG[\"output_dir\"]) / \"summary_table.csv\"\nsummary_df.to_csv(summary_path, index=False)\nprint(f\"\\n✓ Saved summary table: {summary_path}\")","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:52:01.602340Z","iopub.execute_input":"2025-12-03T10:52:01.603005Z","iopub.status.idle":"2025-12-03T10:52:01.617275Z","shell.execute_reply.started":"2025-12-03T10:52:01.602985Z","shell.execute_reply":"2025-12-03T10:52:01.616685Z"}},"outputs":[{"name":"stdout","text":"\n============================================================================================================================================\nFINAL SUMMARY TABLE (Per-Sample Energy)\n============================================================================================================================================\nPrecision Batch Size Latency/batch (ms) Throughput (samples/s) Accuracy (%) Power (W) Energy/sample (mJ) Model Size (MB) Peak Memory (MB)\n FP32 50 86.897 ± 0.257 575.37 ± 1.70 86.00 ± 0.00 231.90 ± 1.79 403.048 ± 3.906 255.42 475.95\n FP16 50 83.087 ± 0.051 601.74 ± 0.38 86.00 ± 0.00 228.73 ± 0.51 380.106 ± 0.758 127.71 245.04\n INT8 50 86.684 ± 0.051 576.78 ± 0.34 88.00 ± 0.00 232.11 ± 0.48 402.426 ± 0.625 255.42 475.85\n============================================================================================================================================\n\nNOTE: Energy/sample = Total Energy / (num_iters × batch_size)\n With batch_size=50 and num_iters=300:\n Energy/sample ≈ Energy/batch / 50\n============================================================================================================================================\n\n✓ Saved summary table: /kaggle/results/summary_table.csv\n","output_type":"stream"}],"execution_count":15},{"cell_type":"markdown","source":"## 14. Relative Improvements\n\nCompare FP16 and INT8 against FP32 baseline.","metadata":{}},{"cell_type":"code","source":"if len(all_aggregated) >= 2:\n # Use FP32 as baseline\n baseline = next((a for a in all_aggregated if a['precision'] == 'fp32'), all_aggregated[0])\n \n print(\"\\n\" + \"=\"*70)\n print(\"RELATIVE IMPROVEMENTS vs FP32 BASELINE\")\n print(\"=\"*70)\n \n for agg in all_aggregated:\n if agg['precision'] == 'fp32':\n continue\n \n print(f\"\\n{agg['precision'].upper()} vs FP32:\")\n print(\"-\" * 40)\n \n # Latency speedup\n if 'mean_latency_mean' in baseline and 'mean_latency_mean' in agg:\n speedup = baseline['mean_latency_mean'] / agg['mean_latency_mean']\n reduction_pct = (1 - 1/speedup) * 100\n print(f\"Latency: {speedup:.2f}x faster ({reduction_pct:.1f}% reduction)\")\n \n # Energy savings\n if 'energy_per_inference_mj_mean' in baseline and 'energy_per_inference_mj_mean' in agg:\n if baseline['energy_per_inference_mj_mean'] and agg['energy_per_inference_mj_mean']:\n energy_ratio = agg['energy_per_inference_mj_mean'] / baseline['energy_per_inference_mj_mean']\n energy_savings_pct = (1 - energy_ratio) * 100\n print(f\"Energy: {energy_savings_pct:+.1f}% change\")\n \n # Accuracy delta\n if 'accuracy_mean' in baseline and 'accuracy_mean' in agg:\n accuracy_delta = (agg['accuracy_mean'] - baseline['accuracy_mean']) * 100\n print(f\"Accuracy: {accuracy_delta:+.2f} percentage points\")\n \n # Model size reduction\n if 'model_size_mb' in baseline and 'model_size_mb' in agg:\n size_ratio = agg['model_size_mb'] / baseline['model_size_mb']\n size_reduction_pct = (1 - size_ratio) * 100\n print(f\"Model Size: {size_reduction_pct:.1f}% reduction\")\n \n print(\"\\n\" + \"=\"*70)","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-12-03T10:52:01.617942Z","iopub.execute_input":"2025-12-03T10:52:01.618183Z","iopub.status.idle":"2025-12-03T10:52:01.632080Z","shell.execute_reply.started":"2025-12-03T10:52:01.618167Z","shell.execute_reply":"2025-12-03T10:52:01.631436Z"}},"outputs":[{"name":"stdout","text":"\n======================================================================\nRELATIVE IMPROVEMENTS vs FP32 BASELINE\n======================================================================\n\nFP16 vs FP32:\n----------------------------------------\nLatency: 1.05x faster (4.4% reduction)\nAccuracy: +0.00 percentage points\nModel Size: 50.0% reduction\n\nINT8 vs FP32:\n----------------------------------------\nLatency: 1.00x faster (0.2% reduction)\nAccuracy: +2.00 percentage points\nModel Size: 0.0% reduction\n\n======================================================================\n","output_type":"stream"}],"execution_count":16},{"cell_type":"markdown","source":"## Notes\n\n### Key Features:\n1. **Zero-I/O Design**: All data loaded to GPU before measurements\n2. **Accurate Quantization**: Symmetric INT8 quantization for weights\n3. **Asynchronous Power Monitoring**: nvidia-smi polling in background thread\n4. **CUDA Synchronization**: Precise timing with GPU sync after each forward pass\n5. **Statistical Significance**: Multiple trials with aggregation\n6. **Comprehensive Metrics**: Latency, throughput, energy, accuracy, memory\n\n### Checklist:\n- ✅ Dataset loads from .pt files directly to GPU\n- ✅ No I/O operations during measurement loop\n- ✅ Model loaded once per trial with correct precision\n- ✅ Warmup phase stabilizes GPU before measurement\n- ✅ Power logger runs asynchronously without blocking\n- ✅ CUDA synchronization ensures accurate timing\n- ✅ Energy computed as Power × Time\n- ✅ Results saved in CSV and JSON formats\n- ✅ Multiple trials for statistical confidence\n- ✅ Comparison plots and summary tables generated","metadata":{}}]} \ No newline at end of file +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Energy-Aware Quantization Measurement Harness\n", + "## ESE 5390 Final Project: Accurate Energy Measurement for Quantized LLMs\n", + "\n", + "This notebook implements a zero-I/O measurement harness for measuring:\n", + "- **Energy consumption** (per inference)\n", + "- **Latency** (per sample and per batch)\n", + "- **Throughput** (samples/second)\n", + "- **Accuracy** (classification accuracy)\n", + "- **Memory usage** (GPU memory)\n", + "\n", + "Across three precision levels:\n", + "- **FP32**: Full precision baseline\n", + "- **FP16**: Half precision\n", + "- **INT8**: 8-bit quantized (simulated on GPU)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.562589Z", + "iopub.status.busy": "2025-12-03T10:43:39.562275Z", + "iopub.status.idle": "2025-12-03T10:43:39.714104Z", + "shell.execute_reply": "2025-12-03T10:43:39.713178Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.562568Z" + }, + "trusted": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "fatal: destination path 'energy_aware_quantization' already exists and is not an empty directory.\n" + ] + } + ], + "source": [ + "!git clone https://github.com/krishkc5/energy_aware_quantization.git" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Setup and Imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.715836Z", + "iopub.status.busy": "2025-12-03T10:43:39.715621Z", + "iopub.status.idle": "2025-12-03T10:43:39.808302Z", + "shell.execute_reply": "2025-12-03T10:43:39.807563Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.715815Z" + }, + "trusted": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "PyTorch version: 2.6.0+cu124\n", + "CUDA available: True\n", + "CUDA device: Tesla P100-PCIE-16GB\n", + "CUDA version: 12.4\n" + ] + } + ], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "from transformers import AutoModelForSequenceClassification\n", + "import numpy as np\n", + "import pandas as pd\n", + "import json\n", + "import time\n", + "import subprocess\n", + "import threading\n", + "from pathlib import Path\n", + "from typing import Dict, List, Tuple, Optional\n", + "from datetime import datetime\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", + "\n", + "from dataclasses import dataclass\n", + "from typing import Dict, Optional, Tuple\n", + "\n", + "from torch.ao.quantization import QuantStub, DeQuantStub\n", + "from transformers import AutoConfig, AutoModelForSequenceClassification\n", + "\n", + "\n", + "print(f\"PyTorch version: {torch.__version__}\")\n", + "print(f\"CUDA available: {torch.cuda.is_available()}\")\n", + "if torch.cuda.is_available():\n", + " print(f\"CUDA device: {torch.cuda.get_device_name(0)}\")\n", + " print(f\"CUDA version: {torch.version.cuda}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Dataset Loading (Zero-I/O Design)\n", + "\n", + "Load pre-tokenized tensors directly to GPU before any measurements." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.809413Z", + "iopub.status.busy": "2025-12-03T10:43:39.809181Z", + "iopub.status.idle": "2025-12-03T10:43:39.816850Z", + "shell.execute_reply": "2025-12-03T10:43:39.816218Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.809395Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "def load_pretokenized_dataset(dataset_path: str, device: str = \"cuda\") -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, Dict]:\n", + " \"\"\"\n", + " Load pre-tokenized dataset from separate .pt files.\n", + " \n", + " Args:\n", + " dataset_path: Path to directory containing input_ids.pt, attention_mask.pt, labels.pt, metadata.json\n", + " device: Device to load tensors to ('cuda' or 'cpu')\n", + " \n", + " Returns:\n", + " input_ids: [N, seq_len] tensor on device\n", + " attention_mask: [N, seq_len] tensor on device\n", + " labels: [N] tensor on device\n", + " metadata: Dictionary with dataset info\n", + " \"\"\"\n", + " dataset_path = Path(dataset_path)\n", + " print(f\"\\nLoading dataset from: {dataset_path}\")\n", + " \n", + " # Load tensors\n", + " input_ids = torch.load(dataset_path / \"input_ids.pt\", map_location=device)\n", + " attention_mask = torch.load(dataset_path / \"attention_mask.pt\", map_location=device)\n", + " labels = torch.load(dataset_path / \"labels.pt\", map_location=device)\n", + " \n", + " # Load metadata\n", + " with open(dataset_path / \"metadata.json\", 'r') as f:\n", + " metadata = json.load(f)\n", + " \n", + " print(f\"✓ Loaded {input_ids.shape[0]} samples\")\n", + " print(f\" - Sequence length: {input_ids.shape[1]}\")\n", + " print(f\" - Device: {input_ids.device}\")\n", + " print(f\" - Dataset: {metadata.get('dataset_name', 'unknown')}\")\n", + " print(f\" - Labels: {metadata.get('num_labels', 2)}\")\n", + " \n", + " # Calculate memory footprint\n", + " total_bytes = (input_ids.element_size() * input_ids.nelement() + \n", + " attention_mask.element_size() * attention_mask.nelement() + \n", + " labels.element_size() * labels.nelement())\n", + " print(f\" - Memory: {total_bytes / 1024**2:.2f} MB\")\n", + " \n", + " return input_ids, attention_mask, labels, metadata" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Model Loading with Accurate Quantization\n", + "\n", + "Load models in FP32, FP16, or INT8 precision." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.818345Z", + "iopub.status.busy": "2025-12-03T10:43:39.818175Z", + "iopub.status.idle": "2025-12-03T10:43:39.836808Z", + "shell.execute_reply": "2025-12-03T10:43:39.836128Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.818331Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "# Optional bitsandbytes integration for true INT8 on CUDA GPUs (e.g., T4)\n", + "try:\n", + " from transformers import BitsAndBytesConfig\n", + "\n", + " _HAS_BITSANDBYTES = True\n", + "except Exception: # pragma: no cover - optional dependency\n", + " BitsAndBytesConfig = None\n", + " _HAS_BITSANDBYTES = False\n", + "\n", + "\n", + "class QuantDistilBertWrapper(nn.Module):\n", + " \"\"\"Wrap a DistilBERT classification model with Quant/DeQuant stubs.\n", + "\n", + " Used for static PTQ (prepare/convert) on CPU.\n", + " \"\"\"\n", + "\n", + " def __init__(self, base_model: nn.Module):\n", + " super().__init__()\n", + " self.quant = QuantStub()\n", + " self.dequant = DeQuantStub()\n", + " self.model = base_model # usually AutoModelForSequenceClassification\n", + "\n", + " def forward(self, input_ids, attention_mask=None, **kwargs):\n", + " \"\"\"Quantization-aware forward that keeps token IDs intact.\n", + "\n", + " Token IDs are categorical indices and must not be quantized. We:\n", + " 1. Compute embeddings from raw `input_ids`\n", + " 2. Quantize the embedding activations\n", + " 3. Run the model on quantized embeddings\n", + " 4. Dequantize the final logits\n", + " \"\"\"\n", + " # 1) Standard embedding lookup using raw token IDs\n", + " embeddings = self.model.distilbert.embeddings(input_ids)\n", + "\n", + " # 2) Quantize the embedding activations (not the IDs)\n", + " embeddings_q = self.quant(embeddings)\n", + "\n", + " # 3) Run the model using quantized embeddings\n", + " outputs = self.model(\n", + " inputs_embeds=embeddings_q,\n", + " attention_mask=attention_mask,\n", + " **kwargs,\n", + " )\n", + "\n", + " # 4) Dequantize logits before returning\n", + " logits = self.dequant(outputs.logits)\n", + "\n", + " # Return an object with `.logits` so existing code keeps working\n", + " class OutputWrapper:\n", + " def __init__(self, logits_tensor):\n", + " self.logits = logits_tensor\n", + "\n", + " return OutputWrapper(logits)\n", + "\n", + "\n", + "@dataclass\n", + "class ModelConfig:\n", + " \"\"\"Configuration for model loading.\"\"\"\n", + "\n", + " model_name: str\n", + " precision: str # \"fp32\", \"fp16\", \"int8\"\n", + " device: str = \"cuda\"\n", + " num_labels: int = 2\n", + "\n", + "\n", + "@dataclass\n", + "class LayerwiseQuantConfig:\n", + " \"\"\"Configuration for hybrid precision per layer.\n", + "\n", + " `layer_precision` maps a substring (matching module names) to a desired\n", + " precision: \"fp32\", \"fp16\", or \"int8\".\n", + " - \"int8\" -> attach qconfig so the module is statically quantized\n", + " - \"fp32\"/\"fp16\" -> clear qconfig so the module stays in float\n", + " \"\"\"\n", + "\n", + " layer_precision: Dict[str, str]\n", + "\n", + "\n", + "def apply_layerwise_qconfig(\n", + " model: nn.Module,\n", + " qconfig: torch.ao.quantization.QConfig,\n", + " cfg: LayerwiseQuantConfig,\n", + ") -> None:\n", + " \"\"\"Attach qconfig selectively to DistilBERT submodules.\"\"\"\n", + " for name, module in model.named_modules():\n", + " # Skip explicit quant/dequant stubs themselves\n", + " if isinstance(module, (QuantStub, DeQuantStub)):\n", + " continue\n", + "\n", + " matched_precision: Optional[str] = None\n", + " for pattern, prec in cfg.layer_precision.items():\n", + " if pattern in name:\n", + " matched_precision = prec\n", + "\n", + " if matched_precision is None:\n", + " # No explicit rule -> leave whatever global qconfig is set\n", + " continue\n", + "\n", + " if matched_precision.lower() == \"int8\":\n", + " module.qconfig = qconfig\n", + " else:\n", + " # Any non-int8 precision means \"keep this layer in float\"\n", + " module.qconfig = None\n", + "\n", + "\n", + "\n", + "\n", + "def load_model(\n", + " model_name: str = \"distilbert-base-uncased-finetuned-sst-2-english\",\n", + " precision: str = \"fp32\",\n", + " device: str = \"cuda\",\n", + " num_labels: int = 2,\n", + " verbose: bool = True,\n", + " layerwise_cfg: Optional[LayerwiseQuantConfig] = None,\n", + ") -> nn.Module:\n", + " \"\"\"Load DistilBERT classifier in the requested precision.\n", + "\n", + " Supports three precision modes:\n", + " - \"fp32\": Full precision (32-bit)\n", + " - \"fp16\": Half precision (16-bit)\n", + " - \"int8\": GPU (CUDA): bitsandbytes 8-bit kernels when available.\n", + " CPU: static PTQ via QuantDistilBertWrapper.\n", + " \"\"\"\n", + "\n", + " precision = precision.lower()\n", + " valid_precisions = [\"fp32\", \"fp16\", \"int8\"]\n", + "\n", + " if precision not in valid_precisions:\n", + " raise ValueError(f\"Invalid precision: {precision}. Must be one of {valid_precisions}\")\n", + "\n", + " if device == \"cuda\" and not torch.cuda.is_available():\n", + " raise ValueError(\"CUDA requested but not available\")\n", + "\n", + " if verbose:\n", + " print(f\"\\nLoading model: {model_name}\")\n", + " print(f\"Precision: {precision}\")\n", + " print(f\"Device: {device}\")\n", + "\n", + " # Load model configuration\n", + " config = AutoConfig.from_pretrained(model_name)\n", + " config.num_labels = num_labels\n", + "\n", + " # True INT8 on CUDA using bitsandbytes (e.g., on a T4)\n", + " if precision == \"int8\" and device == \"cuda\" and _HAS_BITSANDBYTES:\n", + " if verbose:\n", + " print(\" Using bitsandbytes INT8 on CUDA (Tensor Core friendly)\")\n", + "\n", + " bnb_config = BitsAndBytesConfig(\n", + " load_in_8bit=True,\n", + " llm_int8_threshold=6.0,\n", + " llm_int8_has_fp16_weight=False,\n", + " )\n", + "\n", + " model = AutoModelForSequenceClassification.from_pretrained(\n", + " model_name,\n", + " config=config,\n", + " quantization_config=bnb_config,\n", + " device_map={\"\": 0},\n", + " )\n", + "\n", + " else:\n", + " # Default: load FP32 weights, then cast/quantize as needed\n", + " model = AutoModelForSequenceClassification.from_pretrained(\n", + " model_name,\n", + " config=config,\n", + " torch_dtype=torch.float32,\n", + " )\n", + "\n", + " if precision == \"fp32\":\n", + " model = model.to(device)\n", + "\n", + " elif precision == \"fp16\":\n", + " model = model.half().to(device)\n", + "\n", + " elif precision == \"int8\":\n", + " if device == \"cuda\":\n", + " # CUDA INT8 without bitsandbytes is not supported in this notebook.\n", + " raise RuntimeError(\n", + " \"INT8 on CUDA requested in notebook, but bitsandbytes is not available. \"\n", + " \"Install bitsandbytes + recent transformers, or use device='cpu' for PTQ.\"\n", + " )\n", + " else:\n", + " # Static PTQ on CPU using wrapper + qconfig + prepare/convert\n", + " if verbose:\n", + " print(\" Using CPU static PTQ with QuantDistilBertWrapper\")\n", + "\n", + " wrapped = QuantDistilBertWrapper(model)\n", + " qconfig = torch.ao.quantization.get_default_qconfig(\"fbgemm\")\n", + "\n", + " if layerwise_cfg is not None:\n", + " apply_layerwise_qconfig(wrapped, qconfig, layerwise_cfg)\n", + " else:\n", + " wrapped.qconfig = qconfig\n", + "\n", + " if verbose:\n", + " print(\" Preparing model for static quantization...\")\n", + "\n", + " prepared = torch.ao.quantization.prepare(wrapped, inplace=False)\n", + " prepared.eval()\n", + "\n", + " vocab_size = getattr(config, \"vocab_size\", 30522)\n", + " seq_len = getattr(config, \"max_position_embeddings\", 128)\n", + "\n", + " with torch.no_grad():\n", + " for _ in range(10):\n", + " dummy_ids = torch.randint(\n", + " low=0,\n", + " high=vocab_size,\n", + " size=(8, seq_len),\n", + " dtype=torch.long,\n", + " )\n", + " dummy_mask = torch.ones_like(dummy_ids)\n", + " _ = prepared(input_ids=dummy_ids, attention_mask=dummy_mask)\n", + "\n", + " if verbose:\n", + " print(\" Converting calibrated model to INT8...\")\n", + "\n", + " quantized = torch.ao.quantization.convert(prepared, inplace=False)\n", + " model = quantized.to(\"cpu\")\n", + "\n", + " model.eval()\n", + "\n", + " if verbose:\n", + " print(\"✓ Model loaded successfully\")\n", + " print(f\" - Parameters: {count_parameters(model):,}\")\n", + " print(f\" - Model size: {get_model_size_mb(model):.2f} MB\")\n", + "\n", + " first_param = next(model.parameters())\n", + " print(f\" - Parameter dtype: {first_param.dtype}\")\n", + " print(f\" - Parameter device: {first_param.device}\")\n", + "\n", + " return model\n", + "\n", + "\n", + "def load_model_with_autocast(\n", + " model_name: str = \"distilbert-base-uncased-finetuned-sst-2-english\",\n", + " device: str = \"cuda\",\n", + " num_labels: int = 2,\n", + " verbose: bool = True,\n", + ") -> Tuple[nn.Module, torch.autocast]:\n", + " \"\"\"Load FP32 model with autocast context for FP16 inference.\"\"\"\n", + "\n", + " if verbose:\n", + " print(f\"\\nLoading model with autocast: {model_name}\")\n", + "\n", + " config = AutoConfig.from_pretrained(model_name)\n", + " config.num_labels = num_labels\n", + "\n", + " model = AutoModelForSequenceClassification.from_pretrained(\n", + " model_name,\n", + " config=config,\n", + " )\n", + " model = model.to(device)\n", + " model.eval()\n", + "\n", + " autocast_ctx = torch.autocast(device_type=device, dtype=torch.float16)\n", + "\n", + " if verbose:\n", + " print(\"Model loaded with autocast\")\n", + " print(f\" - Autocast device: {device}\")\n", + " print(\" - Autocast dtype: torch.float16\")\n", + "\n", + " return model, autocast_ctx\n", + "\n", + "\n", + "def count_parameters(model: nn.Module) -> int:\n", + " \"\"\"Count total number of parameters in model.\"\"\"\n", + "\n", + " return sum(p.numel() for p in model.parameters())\n", + "\n", + "\n", + "def get_model_size_mb(model: nn.Module) -> float:\n", + " \"\"\"Get model size in megabytes.\"\"\"\n", + "\n", + " param_size = 0\n", + " for param in model.parameters():\n", + " param_size += param.nelement() * param.element_size()\n", + "\n", + " buffer_size = 0\n", + " for buffer in model.buffers():\n", + " buffer_size += buffer.nelement() * buffer.element_size()\n", + "\n", + " size_mb = (param_size + buffer_size) / 1024 / 1024\n", + " return size_mb\n", + "\n", + "\n", + "def get_model_info(model: nn.Module) -> dict:\n", + " \"\"\"Get detailed model information as a dict.\"\"\"\n", + "\n", + " first_param = next(model.parameters())\n", + "\n", + " return {\n", + " \"num_parameters\": count_parameters(model),\n", + " \"model_size_mb\": get_model_size_mb(model),\n", + " \"dtype\": str(first_param.dtype),\n", + " \"device\": str(first_param.device),\n", + " \"is_quantized\": hasattr(model, \"qconfig\"),\n", + " }\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 4. GPU Warmup\n", + "\n", + "Stabilize GPU clocks and compile CUDA kernels before measurement." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.837989Z", + "iopub.status.busy": "2025-12-03T10:43:39.837641Z", + "iopub.status.idle": "2025-12-03T10:43:39.855895Z", + "shell.execute_reply": "2025-12-03T10:43:39.855158Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.837965Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "def warmup_gpu(model: nn.Module, input_ids: torch.Tensor, attention_mask: torch.Tensor, \n", + " num_steps: int = 50, verbose: bool = True) -> None:\n", + " \"\"\"\n", + " Warmup GPU to stabilize clocks and compile kernels.\n", + " \n", + " Args:\n", + " model: Model to warmup\n", + " input_ids: Sample input tensor\n", + " attention_mask: Sample attention mask\n", + " num_steps: Number of warmup iterations\n", + " verbose: Print progress\n", + " \"\"\"\n", + " if verbose:\n", + " print(f\"\\nWarming up GPU for {num_steps} iterations...\")\n", + " \n", + " model.eval()\n", + " \n", + " with torch.no_grad():\n", + " for i in range(num_steps):\n", + " _ = model(input_ids=input_ids, attention_mask=attention_mask)\n", + " \n", + " if verbose and (i + 1) % 10 == 0:\n", + " print(f\" Warmup: {i+1}/{num_steps}\", end='\\r')\n", + " \n", + " if torch.cuda.is_available():\n", + " torch.cuda.synchronize()\n", + " \n", + " if verbose:\n", + " print(f\"\\n ✓ Warmup complete\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 5. Power Logger (Asynchronous)\n", + "\n", + "Monitor GPU power draw using nvidia-smi without interfering with timing." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.856880Z", + "iopub.status.busy": "2025-12-03T10:43:39.856687Z", + "iopub.status.idle": "2025-12-03T10:43:39.872502Z", + "shell.execute_reply": "2025-12-03T10:43:39.871822Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.856865Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "class PowerLogger:\n", + " \"\"\"\n", + " Asynchronous power logger using nvidia-smi polling.\n", + " \"\"\"\n", + " \n", + " def __init__(self, sample_interval_ms: int = 100, gpu_id: int = 0, verbose: bool = False):\n", + " self.sample_interval_ms = sample_interval_ms\n", + " self.gpu_id = gpu_id\n", + " self.verbose = verbose\n", + " self.samples = []\n", + " self.is_running = False\n", + " self._lock = threading.Lock()\n", + " self._reader_thread = None\n", + " \n", + " # Verify nvidia-smi is available\n", + " self._check_nvidia_smi()\n", + " \n", + " def _check_nvidia_smi(self) -> None:\n", + " \"\"\"Check if nvidia-smi is available.\"\"\"\n", + " try:\n", + " result = subprocess.run(\n", + " [\"nvidia-smi\", \"--query-gpu=power.draw\", \"--format=csv,noheader,nounits\", f\"--id={self.gpu_id}\"],\n", + " capture_output=True, text=True, timeout=5\n", + " )\n", + " if result.returncode != 0:\n", + " raise RuntimeError(f\"nvidia-smi failed: {result.stderr}\")\n", + " power = float(result.stdout.strip())\n", + " if self.verbose:\n", + " print(f\" ✓ nvidia-smi available, current power: {power:.2f} W\")\n", + " except Exception as e:\n", + " raise RuntimeError(f\"nvidia-smi not available: {e}\")\n", + " \n", + " def start(self) -> None:\n", + " \"\"\"Start power logging in background thread.\"\"\"\n", + " with self._lock:\n", + " if self.is_running:\n", + " raise RuntimeError(\"PowerLogger already running\")\n", + " self.samples = []\n", + " self.is_running = True\n", + " \n", + " self._reader_thread = threading.Thread(target=self._poll_power, daemon=True)\n", + " self._reader_thread.start()\n", + " \n", + " if self.verbose:\n", + " print(f\" ✓ Power logger started (interval: {self.sample_interval_ms} ms)\")\n", + " \n", + " def _poll_power(self) -> None:\n", + " \"\"\"Background thread that polls nvidia-smi.\"\"\"\n", + " interval_sec = self.sample_interval_ms / 1000.0\n", + " \n", + " while self.is_running:\n", + " try:\n", + " result = subprocess.run(\n", + " [\"nvidia-smi\", \"--query-gpu=power.draw\", \"--format=csv,noheader,nounits\", f\"--id={self.gpu_id}\"],\n", + " capture_output=True, text=True, timeout=2\n", + " )\n", + " \n", + " if result.returncode == 0:\n", + " power = float(result.stdout.strip())\n", + " with self._lock:\n", + " self.samples.append(power)\n", + " except:\n", + " pass\n", + " \n", + " time.sleep(interval_sec)\n", + " \n", + " def stop(self) -> None:\n", + " \"\"\"Stop power logging.\"\"\"\n", + " with self._lock:\n", + " if not self.is_running:\n", + " raise RuntimeError(\"PowerLogger not running\")\n", + " self.is_running = False\n", + " \n", + " if self._reader_thread:\n", + " self._reader_thread.join(timeout=2)\n", + " \n", + " if self.verbose:\n", + " print(f\" ✓ Power logger stopped ({len(self.samples)} samples)\")\n", + " \n", + " def get_samples(self) -> List[float]:\n", + " \"\"\"Return collected power samples.\"\"\"\n", + " with self._lock:\n", + " return self.samples.copy()\n", + " \n", + " def __enter__(self):\n", + " self.start()\n", + " return self\n", + " \n", + " def __exit__(self, *args):\n", + " if self.is_running:\n", + " self.stop()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 6. Timed Inference Benchmark\n", + "\n", + "Run inference with precise timing and CUDA synchronization." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.873438Z", + "iopub.status.busy": "2025-12-03T10:43:39.873193Z", + "iopub.status.idle": "2025-12-03T10:43:39.891195Z", + "shell.execute_reply": "2025-12-03T10:43:39.890469Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.873413Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "def run_timed_inference(model: nn.Module, input_ids: torch.Tensor, attention_mask: torch.Tensor,\n", + " num_iters: int, verbose: bool = True) -> Dict:\n", + " \"\"\"\n", + " Run timed inference loop with CUDA synchronization.\n", + " \n", + " Args:\n", + " model: Model in eval mode\n", + " input_ids: Input tensor [batch_size, seq_len]\n", + " attention_mask: Attention mask [batch_size, seq_len]\n", + " num_iters: Number of iterations\n", + " verbose: Print progress\n", + " \n", + " Returns:\n", + " Dictionary with timing metrics\n", + " \"\"\"\n", + " if verbose:\n", + " print(f\"\\nRunning {num_iters} timed inference iterations...\")\n", + " \n", + " model.eval()\n", + " batch_size = input_ids.shape[0]\n", + " latencies = []\n", + " \n", + " # Ensure clean state\n", + " if torch.cuda.is_available():\n", + " torch.cuda.synchronize()\n", + " \n", + " start_time = time.perf_counter()\n", + " \n", + " with torch.no_grad():\n", + " for i in range(num_iters):\n", + " iter_start = time.perf_counter()\n", + " \n", + " # Forward pass\n", + " _ = model(input_ids=input_ids, attention_mask=attention_mask)\n", + " \n", + " # Synchronize to ensure completion\n", + " if torch.cuda.is_available():\n", + " torch.cuda.synchronize()\n", + " \n", + " iter_end = time.perf_counter()\n", + " latencies.append(iter_end - iter_start)\n", + " \n", + " if verbose and (i + 1) % 50 == 0:\n", + " print(f\" Progress: {i+1}/{num_iters}\", end='\\r')\n", + " \n", + " end_time = time.perf_counter()\n", + " total_time = end_time - start_time\n", + " \n", + " if verbose:\n", + " print(f\"\\n ✓ Inference complete: {total_time:.3f}s\")\n", + " \n", + " latencies = np.array(latencies)\n", + " \n", + " return {\n", + " \"total_time\": float(total_time),\n", + " \"num_iters\": num_iters,\n", + " \"batch_size\": batch_size,\n", + " \"mean_latency\": float(np.mean(latencies)),\n", + " \"std_latency\": float(np.std(latencies)),\n", + " \"min_latency\": float(np.min(latencies)),\n", + " \"max_latency\": float(np.max(latencies)),\n", + " \"median_latency\": float(np.median(latencies)),\n", + " \"throughput\": float(batch_size * num_iters / total_time)\n", + " }\n", + "\n", + "\n", + "def compute_accuracy(model: nn.Module, input_ids: torch.Tensor, attention_mask: torch.Tensor,\n", + " labels: torch.Tensor, verbose: bool = True) -> Dict:\n", + " \"\"\"\n", + " Compute model accuracy on dataset.\n", + " \n", + " Returns:\n", + " Dictionary with accuracy metrics\n", + " \"\"\"\n", + " if verbose:\n", + " print(f\"\\nComputing accuracy...\")\n", + " \n", + " model.eval()\n", + " \n", + " with torch.no_grad():\n", + " outputs = model(input_ids=input_ids, attention_mask=attention_mask)\n", + " predictions = torch.argmax(outputs.logits, dim=-1)\n", + " correct = (predictions == labels).sum().item()\n", + " total = labels.shape[0]\n", + " accuracy = correct / total\n", + " \n", + " if verbose:\n", + " print(f\" ✓ Accuracy: {accuracy*100:.2f}% ({correct}/{total})\")\n", + " \n", + " return {\n", + " \"accuracy\": float(accuracy),\n", + " \"num_correct\": int(correct),\n", + " \"num_samples\": int(total)\n", + " }" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 7. Energy Computation\n", + "\n", + "Compute energy metrics from power samples and timing." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.892110Z", + "iopub.status.busy": "2025-12-03T10:43:39.891897Z", + "iopub.status.idle": "2025-12-03T10:43:39.904570Z", + "shell.execute_reply": "2025-12-03T10:43:39.903917Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.892090Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "def compute_energy_metrics(power_samples: List[float], timing_results: Dict) -> Dict:\n", + " \"\"\"\n", + " Compute energy consumption from power samples and timing.\n", + " \n", + " Energy (J) = Power (W) × Time (s)\n", + " \n", + " IMPORTANT: This computes energy PER SAMPLE, not per batch iteration.\n", + " - Each iteration processes batch_size samples\n", + " - Energy per sample = Total energy / (num_iters × batch_size)\n", + " \n", + " Args:\n", + " power_samples: List of power measurements in Watts\n", + " timing_results: Dictionary from run_timed_inference\n", + " \n", + " Returns:\n", + " Dictionary with energy metrics\n", + " \"\"\"\n", + " if len(power_samples) == 0:\n", + " print(\" ⚠️ Warning: No power samples collected\")\n", + " return {\n", + " \"mean_power_w\": None,\n", + " \"std_power_w\": None,\n", + " \"min_power_w\": None,\n", + " \"max_power_w\": None,\n", + " \"num_power_samples\": 0,\n", + " \"total_energy_j\": None,\n", + " \"energy_per_batch_j\": None,\n", + " \"energy_per_batch_mj\": None,\n", + " \"energy_per_sample_j\": None,\n", + " \"energy_per_sample_mj\": None,\n", + " \"samples_per_joule\": None\n", + " }\n", + " \n", + " power_array = np.array(power_samples)\n", + " \n", + " # Power statistics\n", + " mean_power = float(np.mean(power_array))\n", + " std_power = float(np.std(power_array))\n", + " min_power = float(np.min(power_array))\n", + " max_power = float(np.max(power_array))\n", + " \n", + " # Energy computation\n", + " total_time = timing_results[\"total_time\"]\n", + " num_iters = timing_results[\"num_iters\"]\n", + " batch_size = timing_results[\"batch_size\"]\n", + " total_samples = num_iters * batch_size\n", + " \n", + " # Total energy for all iterations\n", + " total_energy = mean_power * total_time # Joules\n", + " \n", + " # Energy per batch (per iteration)\n", + " energy_per_batch = total_energy / num_iters\n", + " energy_per_batch_mj = energy_per_batch * 1000 # millijoules\n", + " \n", + " # Energy per sample (correct metric!)\n", + " energy_per_sample = total_energy / total_samples\n", + " energy_per_sample_mj = energy_per_sample * 1000 # millijoules\n", + " \n", + " return {\n", + " \"mean_power_w\": mean_power,\n", + " \"std_power_w\": std_power,\n", + " \"min_power_w\": min_power,\n", + " \"max_power_w\": max_power,\n", + " \"num_power_samples\": len(power_samples),\n", + " \"total_energy_j\": total_energy,\n", + " \"total_samples\": total_samples,\n", + " # Per-batch metrics (for reference)\n", + " \"energy_per_batch_j\": energy_per_batch,\n", + " \"energy_per_batch_mj\": energy_per_batch_mj,\n", + " # Per-sample metrics (the correct ones!)\n", + " \"energy_per_sample_j\": energy_per_sample,\n", + " \"energy_per_sample_mj\": energy_per_sample_mj,\n", + " \"samples_per_joule\": 1.0 / energy_per_sample if energy_per_sample > 0 else None\n", + " }" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 8. Complete Measurement Function\n", + "\n", + "Orchestrate the complete measurement pipeline for one trial." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.905498Z", + "iopub.status.busy": "2025-12-03T10:43:39.905259Z", + "iopub.status.idle": "2025-12-03T10:43:39.923699Z", + "shell.execute_reply": "2025-12-03T10:43:39.923070Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.905467Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "def run_single_trial(model_name: str, precision: str, input_ids: torch.Tensor, \n", + " attention_mask: torch.Tensor, labels: torch.Tensor,\n", + " num_iters: int = 300, warmup_iters: int = 50, \n", + " power_interval_ms: int = 100, device: str = \"cuda\",\n", + " trial_num: int = 1, verbose: bool = True) -> Dict:\n", + " \"\"\"\n", + " Run a complete measurement trial for one precision level.\n", + " \n", + " Pipeline:\n", + " 1. Load model\n", + " 2. Warmup GPU\n", + " 3. Start power logging\n", + " 4. Run timed inference\n", + " 5. Stop power logging\n", + " 6. Compute accuracy\n", + " 7. Compute energy metrics\n", + " 8. Collect memory stats\n", + " \n", + " Returns:\n", + " Dictionary with all metrics\n", + " \"\"\"\n", + " print(f\"\\n{'='*70}\")\n", + " print(f\"TRIAL {trial_num}: {precision.upper()}\")\n", + " print(f\"{'='*70}\")\n", + " \n", + " # Reset GPU memory\n", + " if torch.cuda.is_available():\n", + " torch.cuda.reset_peak_memory_stats()\n", + " torch.cuda.empty_cache()\n", + " \n", + " # 1. Load model\n", + " model = load_model(model_name, precision, device, num_labels=2, verbose=verbose)\n", + " model_info = get_model_info(model)\n", + " \n", + " # 2. Warmup\n", + " warmup_gpu(model, input_ids, attention_mask, num_steps=warmup_iters, verbose=verbose)\n", + " \n", + " # 3. Start power logging\n", + " print(f\"\\nStarting measurement...\")\n", + " power_logger = PowerLogger(sample_interval_ms=power_interval_ms, verbose=verbose)\n", + " power_logger.start()\n", + " time.sleep(0.5) # Let logger stabilize\n", + " \n", + " # 4. Run timed inference\n", + " timing_results = run_timed_inference(model, input_ids, attention_mask, num_iters, verbose=verbose)\n", + " \n", + " # 5. Stop power logging\n", + " time.sleep(0.5) # Capture trailing samples\n", + " power_logger.stop()\n", + " power_samples = power_logger.get_samples()\n", + " \n", + " # 6. Compute accuracy\n", + " accuracy_results = compute_accuracy(model, input_ids, attention_mask, labels, verbose=verbose)\n", + " \n", + " # 7. Compute energy\n", + " energy_results = compute_energy_metrics(power_samples, timing_results)\n", + " \n", + " # 8. Memory stats\n", + " memory_stats = {}\n", + " if torch.cuda.is_available():\n", + " memory_stats = {\n", + " \"peak_memory_mb\": torch.cuda.max_memory_allocated() / 1024**2,\n", + " \"allocated_memory_mb\": torch.cuda.memory_allocated() / 1024**2,\n", + " \"reserved_memory_mb\": torch.cuda.memory_reserved() / 1024**2\n", + " }\n", + " \n", + " # Combine all results\n", + " results = {\n", + " \"trial\": trial_num,\n", + " \"precision\": precision,\n", + " \"model_name\": model_name,\n", + " \"timestamp\": datetime.now().isoformat(),\n", + " }\n", + " results.update(timing_results)\n", + " results.update(accuracy_results)\n", + " results.update(energy_results)\n", + " results.update(memory_stats)\n", + " results.update(model_info)\n", + " \n", + " # Print summary\n", + " print(f\"\\n{'='*70}\")\n", + " print(f\"TRIAL {trial_num} SUMMARY: {precision.upper()}\")\n", + " print(f\"{'='*70}\")\n", + " print(f\"Batch size: {timing_results['batch_size']} samples\")\n", + " print(f\"Iterations: {timing_results['num_iters']}\")\n", + " print(f\"Total samples: {energy_results.get('total_samples', timing_results['batch_size'] * timing_results['num_iters'])}\")\n", + " print(f\"Latency: {timing_results['mean_latency']*1000:.3f} ms/batch (± {timing_results['std_latency']*1000:.3f} ms)\")\n", + " print(f\"Throughput: {timing_results['throughput']:.2f} samples/s\")\n", + " print(f\"Accuracy: {accuracy_results['accuracy']*100:.2f}%\")\n", + " if energy_results['mean_power_w'] is not None:\n", + " print(f\"Power: {energy_results['mean_power_w']:.2f} W (± {energy_results['std_power_w']:.2f} W)\")\n", + " print(f\"Total Energy: {energy_results['total_energy_j']:.3f} J\")\n", + " print(f\"Energy/batch: {energy_results['energy_per_batch_mj']:.3f} mJ\")\n", + " print(f\"Energy/sample: {energy_results['energy_per_sample_mj']:.3f} mJ ← (correct per-sample metric)\")\n", + " if memory_stats:\n", + " print(f\"Peak Memory: {memory_stats['peak_memory_mb']:.2f} MB\")\n", + " print(f\"{'='*70}\")\n", + " \n", + " # Cleanup\n", + " del model\n", + " if torch.cuda.is_available():\n", + " torch.cuda.empty_cache()\n", + " \n", + " return results" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 9. Multi-Trial Runner\n", + "\n", + "Run multiple trials for statistical significance." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.925866Z", + "iopub.status.busy": "2025-12-03T10:43:39.925605Z", + "iopub.status.idle": "2025-12-03T10:43:39.943574Z", + "shell.execute_reply": "2025-12-03T10:43:39.943061Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.925850Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "def run_multi_trial_experiment(model_name: str, precision: str, dataset_path: str,\n", + " num_trials: int = 5, num_iters: int = 300,\n", + " warmup_iters: int = 50, power_interval_ms: int = 100,\n", + " device: str = \"cuda\") -> Tuple[List[Dict], Dict]:\n", + " \"\"\"\n", + " Run multiple trials for one precision level and aggregate results.\n", + " \n", + " Returns:\n", + " trial_results: List of per-trial result dictionaries\n", + " aggregated: Dictionary with mean/std across trials\n", + " \"\"\"\n", + " print(f\"\\n{'#'*70}\")\n", + " print(f\"# MULTI-TRIAL EXPERIMENT: {precision.upper()}\")\n", + " print(f\"# Number of trials: {num_trials}\")\n", + " print(f\"# Iterations per trial: {num_iters}\")\n", + " print(f\"{'#'*70}\")\n", + " \n", + " # Load dataset once (stays on GPU)\n", + " input_ids, attention_mask, labels, metadata = load_pretokenized_dataset(dataset_path, device)\n", + " \n", + " # Run trials\n", + " trial_results = []\n", + " for trial in range(1, num_trials + 1):\n", + " result = run_single_trial(\n", + " model_name=model_name,\n", + " precision=precision,\n", + " input_ids=input_ids,\n", + " attention_mask=attention_mask,\n", + " labels=labels,\n", + " num_iters=num_iters,\n", + " warmup_iters=warmup_iters,\n", + " power_interval_ms=power_interval_ms,\n", + " device=device,\n", + " trial_num=trial,\n", + " verbose=True\n", + " )\n", + " trial_results.append(result)\n", + " \n", + " # Aggregate results\n", + " aggregated = aggregate_trials(trial_results)\n", + " print_aggregated_results(aggregated, precision)\n", + " \n", + " return trial_results, aggregated\n", + "\n", + "\n", + "def aggregate_trials(trial_results: List[Dict]) -> Dict:\n", + " \"\"\"Compute mean and std across trials.\"\"\"\n", + " numeric_keys = [\n", + " \"mean_latency\", \"std_latency\", \"throughput\", \"accuracy\",\n", + " \"mean_power_w\", \"std_power_w\", \"total_energy_j\", \n", + " \"energy_per_batch_j\", \"energy_per_batch_mj\",\n", + " \"energy_per_sample_j\", \"energy_per_sample_mj\",\n", + " \"peak_memory_mb\"\n", + " ]\n", + " \n", + " aggregated = {\n", + " \"precision\": trial_results[0][\"precision\"],\n", + " \"model_name\": trial_results[0][\"model_name\"],\n", + " \"num_trials\": len(trial_results),\n", + " \"batch_size\": trial_results[0][\"batch_size\"],\n", + " \"total_samples\": trial_results[0].get(\"total_samples\", trial_results[0][\"batch_size\"] * trial_results[0][\"num_iters\"]),\n", + " \"model_size_mb\": trial_results[0][\"model_size_mb\"],\n", + " \"num_parameters\": trial_results[0][\"num_parameters\"]\n", + " }\n", + " \n", + " for key in numeric_keys:\n", + " if key in trial_results[0] and trial_results[0][key] is not None:\n", + " values = [r[key] for r in trial_results if key in r and r[key] is not None]\n", + " if values:\n", + " aggregated[f\"{key}_mean\"] = float(np.mean(values))\n", + " aggregated[f\"{key}_std\"] = float(np.std(values))\n", + " aggregated[f\"{key}_min\"] = float(np.min(values))\n", + " aggregated[f\"{key}_max\"] = float(np.max(values))\n", + " \n", + " return aggregated\n", + "\n", + "\n", + "def print_aggregated_results(agg: Dict, precision: str) -> None:\n", + " \"\"\"Pretty print aggregated results.\"\"\"\n", + " print(f\"\\n{'='*70}\")\n", + " print(f\"AGGREGATED RESULTS: {precision.upper()}\")\n", + " print(f\"Trials: {agg['num_trials']}\")\n", + " print(f\"Batch size: {agg['batch_size']} samples\")\n", + " print(f\"Total samples per trial: {agg.get('total_samples', 'N/A')}\")\n", + " print(f\"{'='*70}\")\n", + " \n", + " if 'mean_latency_mean' in agg:\n", + " print(f\"\\nLatency (per batch):\")\n", + " print(f\" {agg['mean_latency_mean']*1000:.3f} ± {agg['mean_latency_std']*1000:.3f} ms\")\n", + " \n", + " if 'throughput_mean' in agg:\n", + " print(f\"\\nThroughput:\")\n", + " print(f\" {agg['throughput_mean']:.2f} ± {agg['throughput_std']:.2f} samples/s\")\n", + " \n", + " if 'accuracy_mean' in agg:\n", + " print(f\"\\nAccuracy:\")\n", + " print(f\" {agg['accuracy_mean']*100:.2f} ± {agg['accuracy_std']*100:.2f}%\")\n", + " \n", + " if 'mean_power_w_mean' in agg:\n", + " print(f\"\\nPower:\")\n", + " print(f\" {agg['mean_power_w_mean']:.2f} ± {agg['mean_power_w_std']:.2f} W\")\n", + " \n", + " if 'energy_per_batch_mj_mean' in agg:\n", + " print(f\"\\nEnergy per Batch:\")\n", + " print(f\" {agg['energy_per_batch_mj_mean']:.3f} ± {agg['energy_per_batch_mj_std']:.3f} mJ\")\n", + " \n", + " if 'energy_per_sample_mj_mean' in agg:\n", + " print(f\"\\nEnergy per Sample (CORRECT METRIC):\")\n", + " print(f\" {agg['energy_per_sample_mj_mean']:.3f} ± {agg['energy_per_sample_mj_std']:.3f} mJ\")\n", + " \n", + " if 'peak_memory_mb_mean' in agg:\n", + " print(f\"\\nMemory:\")\n", + " print(f\" Model size: {agg['model_size_mb']:.2f} MB\")\n", + " print(f\" Peak GPU: {agg['peak_memory_mb_mean']:.2f} ± {agg['peak_memory_mb_std']:.2f} MB\")\n", + " \n", + " print(f\"{'='*70}\\n\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 10. Results Saving\n", + "\n", + "Save results in CSV and JSON formats." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.944343Z", + "iopub.status.busy": "2025-12-03T10:43:39.944185Z", + "iopub.status.idle": "2025-12-03T10:43:39.956083Z", + "shell.execute_reply": "2025-12-03T10:43:39.955427Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.944330Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "def save_results(trial_results: List[Dict], aggregated: Dict, \n", + " precision: str, output_dir: str = \"./results\") -> None:\n", + " \"\"\"\n", + " Save trial and aggregated results.\n", + " \n", + " Creates:\n", + " - results/{precision}_trials.csv: Per-trial results\n", + " - results/{precision}_aggregated.json: Aggregated statistics\n", + " \"\"\"\n", + " output_dir = Path(output_dir)\n", + " output_dir.mkdir(parents=True, exist_ok=True)\n", + " \n", + " # Save detailed trial results\n", + " trials_df = pd.DataFrame(trial_results)\n", + " trials_path = output_dir / f\"{precision}_trials.csv\"\n", + " trials_df.to_csv(trials_path, index=False)\n", + " print(f\"\\n✓ Saved trial results: {trials_path}\")\n", + " \n", + " # Save aggregated results\n", + " agg_path = output_dir / f\"{precision}_aggregated.json\"\n", + " with open(agg_path, 'w') as f:\n", + " json.dump(aggregated, f, indent=2)\n", + " print(f\"✓ Saved aggregated results: {agg_path}\")\n", + "\n", + "\n", + "def save_comparison_table(all_aggregated: List[Dict], output_dir: str = \"./results\") -> pd.DataFrame:\n", + " \"\"\"\n", + " Create and save comparison table across all precisions.\n", + " \"\"\"\n", + " output_dir = Path(output_dir)\n", + " \n", + " comparison_df = pd.DataFrame(all_aggregated)\n", + " comparison_path = output_dir / \"comparison_all_precisions.csv\"\n", + " comparison_df.to_csv(comparison_path, index=False)\n", + " print(f\"\\n✓ Saved comparison table: {comparison_path}\")\n", + " \n", + " return comparison_df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 11. Main Experiment Runner\n", + "\n", + "Run experiments for all precision levels." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.956998Z", + "iopub.status.busy": "2025-12-03T10:43:39.956771Z", + "iopub.status.idle": "2025-12-03T10:43:39.973244Z", + "shell.execute_reply": "2025-12-03T10:43:39.972651Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.956977Z" + }, + "trusted": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Current working directory: /kaggle/working\n", + "Trying: /kaggle/datasets/tokenized_data\n", + "Trying: /kaggle/working/datasets/tokenized_data\n", + "Trying: /datasets/tokenized_data\n", + "Trying: /kaggle/working/energy_aware_quantization/datasets/tokenized_data\n", + "✓ Found dataset at: /kaggle/working/energy_aware_quantization/datasets/tokenized_data\n", + "\n", + "Experiment Configuration:\n", + "{\n", + " \"model_name\": \"distilbert-base-uncased-finetuned-sst-2-english\",\n", + " \"dataset_path\": \"/kaggle/working/energy_aware_quantization/datasets/tokenized_data\",\n", + " \"precisions\": [\n", + " \"fp32\",\n", + " \"fp16\",\n", + " \"int8\"\n", + " ],\n", + " \"num_trials\": 5,\n", + " \"num_iters\": 300,\n", + " \"warmup_iters\": 50,\n", + " \"power_interval_ms\": 100,\n", + " \"device\": \"cuda\",\n", + " \"output_dir\": \"/kaggle/results\"\n", + "}\n" + ] + } + ], + "source": [ + "# Configuration - Use current working directory to find dataset\n", + "import os\n", + "\n", + "# Get current working directory and find dataset\n", + "cwd = os.getcwd()\n", + "print(f\"Current working directory: {cwd}\")\n", + "\n", + "# Try multiple paths relative to cwd\n", + "possible_paths = [\n", + " Path(cwd) / \"..\" / \"datasets\" / \"tokenized_data\", # From notebooks/\n", + " Path(cwd) / \"datasets\" / \"tokenized_data\", # From repo root\n", + " Path(cwd) / \"..\" / \"..\" / \"datasets\" / \"tokenized_data\", # From deeper nesting\n", + " Path(\"/kaggle/working/energy_aware_quantization/datasets/tokenized_data\"), # Kaggle path\n", + "]\n", + "\n", + "dataset_path = None\n", + "for path in possible_paths:\n", + " abs_path = path.resolve()\n", + " print(f\"Trying: {abs_path}\")\n", + " if abs_path.exists() and (abs_path / \"input_ids.pt\").exists():\n", + " dataset_path = str(abs_path)\n", + " break\n", + "\n", + "if dataset_path is None:\n", + " # Last resort: search upward from cwd\n", + " current = Path(cwd)\n", + " for _ in range(5): # Search up to 5 levels up\n", + " test_path = current / \"datasets\" / \"tokenized_data\"\n", + " print(f\"Trying: {test_path.resolve()}\")\n", + " if test_path.exists() and (test_path / \"input_ids.pt\").exists():\n", + " dataset_path = str(test_path.resolve())\n", + " break\n", + " current = current.parent\n", + " \n", + "if dataset_path is None:\n", + " raise FileNotFoundError(\n", + " f\"Could not find datasets/tokenized_data directory.\\n\"\n", + " f\"Searched from: {cwd}\\n\"\n", + " f\"Please ensure the dataset exists or update the dataset_path manually.\"\n", + " )\n", + "\n", + "print(f\"✓ Found dataset at: {dataset_path}\")\n", + "\n", + "# Set output directory\n", + "output_path = Path(cwd) / \"..\" / \"results\"\n", + "if not output_path.parent.exists():\n", + " output_path = Path(cwd) / \"results\"\n", + "output_dir = str(output_path.resolve())\n", + "\n", + "CONFIG = {\n", + " \"model_name\": \"distilbert-base-uncased-finetuned-sst-2-english\",\n", + " \"dataset_path\": dataset_path,\n", + " \"precisions\": [\"int8\",\"fp32\", \"fp16\"], # Precision levels to test\n", + " \"num_trials\": 5, # Number of trials per precision\n", + " \"num_iters\": 300, # Inference iterations per trial\n", + " \"warmup_iters\": 50, # Warmup iterations\n", + " \"power_interval_ms\": 100, # Power sampling interval\n", + " \"device\": \"cuda\" if torch.cuda.is_available() else \"cpu\",\n", + " \"output_dir\": output_dir\n", + "}\n", + "\n", + "print(\"\\nExperiment Configuration:\")\n", + "print(json.dumps(CONFIG, indent=2))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:43:39.974257Z", + "iopub.status.busy": "2025-12-03T10:43:39.974007Z", + "iopub.status.idle": "2025-12-03T10:51:58.992778Z", + "shell.execute_reply": "2025-12-03T10:51:58.992075Z", + "shell.execute_reply.started": "2025-12-03T10:43:39.974242Z" + }, + "trusted": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "######################################################################\n", + "# MULTI-TRIAL EXPERIMENT: FP32\n", + "# Number of trials: 5\n", + "# Iterations per trial: 300\n", + "######################################################################\n", + "\n", + "Loading dataset from: /kaggle/working/energy_aware_quantization/datasets/tokenized_data\n", + "✓ Loaded 50 samples\n", + " - Sequence length: 128\n", + " - Device: cuda:0\n", + " - Dataset: sst2\n", + " - Labels: 2\n", + " - Memory: 0.10 MB\n", + "\n", + "======================================================================\n", + "TRIAL 1: FP32\n", + "======================================================================\n", + "\n", + "Loading model: distilbert-base-uncased-finetuned-sst-2-english\n", + " Precision: FP32\n", + " Device: cuda\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a8fa87c4d29e496a8cfad23d59db7ff9", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "config.json: 0%| | 0.00/629 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "import seaborn as sns\n", + "\n", + "sns.set_style(\"whitegrid\")\n", + "sns.set_palette(\"husl\")\n", + "\n", + "# Create comparison plots\n", + "fig, axes = plt.subplots(2, 3, figsize=(18, 10))\n", + "fig.suptitle(\"Energy-Aware Quantization: Performance Comparison (Per-Sample Metrics)\", fontsize=16, fontweight='bold')\n", + "\n", + "df = pd.DataFrame(all_aggregated)\n", + "\n", + "# 1. Latency per batch\n", + "ax = axes[0, 0]\n", + "if 'mean_latency_mean' in df.columns:\n", + " x = range(len(df))\n", + " ax.bar(x, df['mean_latency_mean']*1000, yerr=df['mean_latency_std']*1000, capsize=5, alpha=0.7)\n", + " ax.set_xticks(x)\n", + " ax.set_xticklabels(df['precision'].str.upper())\n", + " ax.set_ylabel('Latency (ms)', fontsize=11)\n", + " ax.set_title('Mean Latency per Batch', fontsize=12, fontweight='bold')\n", + " ax.grid(axis='y', alpha=0.3)\n", + "\n", + "# 2. Throughput\n", + "ax = axes[0, 1]\n", + "if 'throughput_mean' in df.columns:\n", + " x = range(len(df))\n", + " ax.bar(x, df['throughput_mean'], yerr=df['throughput_std'], capsize=5, alpha=0.7, color='green')\n", + " ax.set_xticks(x)\n", + " ax.set_xticklabels(df['precision'].str.upper())\n", + " ax.set_ylabel('Throughput (samples/s)', fontsize=11)\n", + " ax.set_title('Inference Throughput', fontsize=12, fontweight='bold')\n", + " ax.grid(axis='y', alpha=0.3)\n", + "\n", + "# 3. Energy per Sample (CORRECT METRIC)\n", + "ax = axes[0, 2]\n", + "if 'energy_per_sample_mj_mean' in df.columns:\n", + " x = range(len(df))\n", + " ax.bar(x, df['energy_per_sample_mj_mean'], yerr=df['energy_per_sample_mj_std'], \n", + " capsize=5, alpha=0.7, color='orange')\n", + " ax.set_xticks(x)\n", + " ax.set_xticklabels(df['precision'].str.upper())\n", + " ax.set_ylabel('Energy (mJ)', fontsize=11)\n", + " ax.set_title('Energy per Sample', fontsize=12, fontweight='bold')\n", + " ax.grid(axis='y', alpha=0.3)\n", + "\n", + "# 4. Power Draw\n", + "ax = axes[1, 0]\n", + "if 'mean_power_w_mean' in df.columns:\n", + " x = range(len(df))\n", + " ax.bar(x, df['mean_power_w_mean'], yerr=df['mean_power_w_std'], capsize=5, alpha=0.7, color='red')\n", + " ax.set_xticks(x)\n", + " ax.set_xticklabels(df['precision'].str.upper())\n", + " ax.set_ylabel('Power (W)', fontsize=11)\n", + " ax.set_title('Mean GPU Power Draw', fontsize=12, fontweight='bold')\n", + " ax.grid(axis='y', alpha=0.3)\n", + "\n", + "# 5. Accuracy\n", + "ax = axes[1, 1]\n", + "if 'accuracy_mean' in df.columns:\n", + " x = range(len(df))\n", + " ax.bar(x, df['accuracy_mean']*100, yerr=df['accuracy_std']*100, capsize=5, alpha=0.7, color='purple')\n", + " ax.set_xticks(x)\n", + " ax.set_xticklabels(df['precision'].str.upper())\n", + " ax.set_ylabel('Accuracy (%)', fontsize=11)\n", + " ax.set_title('Classification Accuracy', fontsize=12, fontweight='bold')\n", + " ax.set_ylim([0, 100])\n", + " ax.grid(axis='y', alpha=0.3)\n", + "\n", + "# 6. Model Size\n", + "ax = axes[1, 2]\n", + "if 'model_size_mb' in df.columns:\n", + " x = range(len(df))\n", + " ax.bar(x, df['model_size_mb'], alpha=0.7, color='teal')\n", + " ax.set_xticks(x)\n", + " ax.set_xticklabels(df['precision'].str.upper())\n", + " ax.set_ylabel('Size (MB)', fontsize=11)\n", + " ax.set_title('Model Size', fontsize=12, fontweight='bold')\n", + " ax.grid(axis='y', alpha=0.3)\n", + "\n", + "plt.tight_layout()\n", + "plot_path = Path(CONFIG[\"output_dir\"]) / \"comparison_plots.png\"\n", + "plt.savefig(plot_path, dpi=300, bbox_inches='tight')\n", + "print(f\"\\n✓ Saved plots: {plot_path}\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 13. Summary Table" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:52:01.603005Z", + "iopub.status.busy": "2025-12-03T10:52:01.602340Z", + "iopub.status.idle": "2025-12-03T10:52:01.617275Z", + "shell.execute_reply": "2025-12-03T10:52:01.616685Z", + "shell.execute_reply.started": "2025-12-03T10:52:01.602985Z" + }, + "trusted": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "============================================================================================================================================\n", + "FINAL SUMMARY TABLE (Per-Sample Energy)\n", + "============================================================================================================================================\n", + "Precision Batch Size Latency/batch (ms) Throughput (samples/s) Accuracy (%) Power (W) Energy/sample (mJ) Model Size (MB) Peak Memory (MB)\n", + " FP32 50 86.897 ± 0.257 575.37 ± 1.70 86.00 ± 0.00 231.90 ± 1.79 403.048 ± 3.906 255.42 475.95\n", + " FP16 50 83.087 ± 0.051 601.74 ± 0.38 86.00 ± 0.00 228.73 ± 0.51 380.106 ± 0.758 127.71 245.04\n", + " INT8 50 86.684 ± 0.051 576.78 ± 0.34 88.00 ± 0.00 232.11 ± 0.48 402.426 ± 0.625 255.42 475.85\n", + "============================================================================================================================================\n", + "\n", + "NOTE: Energy/sample = Total Energy / (num_iters × batch_size)\n", + " With batch_size=50 and num_iters=300:\n", + " Energy/sample ≈ Energy/batch / 50\n", + "============================================================================================================================================\n", + "\n", + "✓ Saved summary table: /kaggle/results/summary_table.csv\n" + ] + } + ], + "source": [ + "# Create formatted summary table\n", + "summary_data = []\n", + "\n", + "for agg in all_aggregated:\n", + " row = {\n", + " \"Precision\": agg[\"precision\"].upper(),\n", + " \"Batch Size\": agg.get(\"batch_size\", \"N/A\"),\n", + " \"Latency/batch (ms)\": f\"{agg.get('mean_latency_mean', 0)*1000:.3f} ± {agg.get('mean_latency_std', 0)*1000:.3f}\",\n", + " \"Throughput (samples/s)\": f\"{agg.get('throughput_mean', 0):.2f} ± {agg.get('throughput_std', 0):.2f}\",\n", + " \"Accuracy (%)\": f\"{agg.get('accuracy_mean', 0)*100:.2f} ± {agg.get('accuracy_std', 0)*100:.2f}\",\n", + " \"Power (W)\": f\"{agg.get('mean_power_w_mean', 0):.2f} ± {agg.get('mean_power_w_std', 0):.2f}\" if agg.get('mean_power_w_mean') else \"N/A\",\n", + " \"Energy/sample (mJ)\": f\"{agg.get('energy_per_sample_mj_mean', 0):.3f} ± {agg.get('energy_per_sample_mj_std', 0):.3f}\" if agg.get('energy_per_sample_mj_mean') else \"N/A\",\n", + " \"Model Size (MB)\": f\"{agg.get('model_size_mb', 0):.2f}\",\n", + " \"Peak Memory (MB)\": f\"{agg.get('peak_memory_mb_mean', 0):.2f}\" if agg.get('peak_memory_mb_mean') else \"N/A\"\n", + " }\n", + " summary_data.append(row)\n", + "\n", + "summary_df = pd.DataFrame(summary_data)\n", + "\n", + "print(\"\\n\" + \"=\"*140)\n", + "print(\"FINAL SUMMARY TABLE (Per-Sample Energy)\")\n", + "print(\"=\"*140)\n", + "print(summary_df.to_string(index=False))\n", + "print(\"=\"*140)\n", + "print(\"\\nNOTE: Energy/sample = Total Energy / (num_iters × batch_size)\")\n", + "print(f\" With batch_size={all_aggregated[0].get('batch_size', 50)} and num_iters=300:\")\n", + "print(f\" Energy/sample ≈ Energy/batch / {all_aggregated[0].get('batch_size', 50)}\")\n", + "print(\"=\"*140)\n", + "\n", + "# Save summary table\n", + "summary_path = Path(CONFIG[\"output_dir\"]) / \"summary_table.csv\"\n", + "summary_df.to_csv(summary_path, index=False)\n", + "print(f\"\\n✓ Saved summary table: {summary_path}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 14. Relative Improvements\n", + "\n", + "Compare FP16 and INT8 against FP32 baseline." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "execution": { + "iopub.execute_input": "2025-12-03T10:52:01.618183Z", + "iopub.status.busy": "2025-12-03T10:52:01.617942Z", + "iopub.status.idle": "2025-12-03T10:52:01.632080Z", + "shell.execute_reply": "2025-12-03T10:52:01.631436Z", + "shell.execute_reply.started": "2025-12-03T10:52:01.618167Z" + }, + "trusted": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "======================================================================\n", + "RELATIVE IMPROVEMENTS vs FP32 BASELINE\n", + "======================================================================\n", + "\n", + "FP16 vs FP32:\n", + "----------------------------------------\n", + "Latency: 1.05x faster (4.4% reduction)\n", + "Accuracy: +0.00 percentage points\n", + "Model Size: 50.0% reduction\n", + "\n", + "INT8 vs FP32:\n", + "----------------------------------------\n", + "Latency: 1.00x faster (0.2% reduction)\n", + "Accuracy: +2.00 percentage points\n", + "Model Size: 0.0% reduction\n", + "\n", + "======================================================================\n" + ] + } + ], + "source": [ + "if len(all_aggregated) >= 2:\n", + " # Use FP32 as baseline\n", + " baseline = next((a for a in all_aggregated if a['precision'] == 'fp32'), all_aggregated[0])\n", + " \n", + " print(\"\\n\" + \"=\"*70)\n", + " print(\"RELATIVE IMPROVEMENTS vs FP32 BASELINE\")\n", + " print(\"=\"*70)\n", + " \n", + " for agg in all_aggregated:\n", + " if agg['precision'] == 'fp32':\n", + " continue\n", + " \n", + " print(f\"\\n{agg['precision'].upper()} vs FP32:\")\n", + " print(\"-\" * 40)\n", + " \n", + " # Latency speedup\n", + " if 'mean_latency_mean' in baseline and 'mean_latency_mean' in agg:\n", + " speedup = baseline['mean_latency_mean'] / agg['mean_latency_mean']\n", + " reduction_pct = (1 - 1/speedup) * 100\n", + " print(f\"Latency: {speedup:.2f}x faster ({reduction_pct:.1f}% reduction)\")\n", + " \n", + " # Energy savings\n", + " if 'energy_per_inference_mj_mean' in baseline and 'energy_per_inference_mj_mean' in agg:\n", + " if baseline['energy_per_inference_mj_mean'] and agg['energy_per_inference_mj_mean']:\n", + " energy_ratio = agg['energy_per_inference_mj_mean'] / baseline['energy_per_inference_mj_mean']\n", + " energy_savings_pct = (1 - energy_ratio) * 100\n", + " print(f\"Energy: {energy_savings_pct:+.1f}% change\")\n", + " \n", + " # Accuracy delta\n", + " if 'accuracy_mean' in baseline and 'accuracy_mean' in agg:\n", + " accuracy_delta = (agg['accuracy_mean'] - baseline['accuracy_mean']) * 100\n", + " print(f\"Accuracy: {accuracy_delta:+.2f} percentage points\")\n", + " \n", + " # Model size reduction\n", + " if 'model_size_mb' in baseline and 'model_size_mb' in agg:\n", + " size_ratio = agg['model_size_mb'] / baseline['model_size_mb']\n", + " size_reduction_pct = (1 - size_ratio) * 100\n", + " print(f\"Model Size: {size_reduction_pct:.1f}% reduction\")\n", + " \n", + " print(\"\\n\" + \"=\"*70)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Notes\n", + "\n", + "### Key Features:\n", + "1. **Zero-I/O Design**: All data loaded to GPU before measurements\n", + "2. **Accurate Quantization**: Symmetric INT8 quantization for weights\n", + "3. **Asynchronous Power Monitoring**: nvidia-smi polling in background thread\n", + "4. **CUDA Synchronization**: Precise timing with GPU sync after each forward pass\n", + "5. **Statistical Significance**: Multiple trials with aggregation\n", + "6. **Comprehensive Metrics**: Latency, throughput, energy, accuracy, memory\n", + "\n", + "### Checklist:\n", + "- ✅ Dataset loads from .pt files directly to GPU\n", + "- ✅ No I/O operations during measurement loop\n", + "- ✅ Model loaded once per trial with correct precision\n", + "- ✅ Warmup phase stabilizes GPU before measurement\n", + "- ✅ Power logger runs asynchronously without blocking\n", + "- ✅ CUDA synchronization ensures accurate timing\n", + "- ✅ Energy computed as Power × Time\n", + "- ✅ Results saved in CSV and JSON formats\n", + "- ✅ Multiple trials for statistical confidence\n", + "- ✅ Comparison plots and summary tables generated" + ] + } + ], + "metadata": { + "kaggle": { + "accelerator": "gpu", + "dataSources": [], + "dockerImageVersionId": 31193, + "isGpuEnabled": true, + "isInternetEnabled": true, + "language": "python", + "sourceType": "notebook" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.0" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +}