From b837338c1e07a983b1b0b137e6507a1e522a0357 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 26 Feb 2025 11:07:02 +0100 Subject: [PATCH 001/125] feat: Added initial Pytorch example to monitor batching and per layer metrics --- .../pytorch_image_classification.ipynb | 445 ++++++++++++++++++ 1 file changed, 445 insertions(+) create mode 100644 integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb new file mode 100644 index 0000000..9ca474c --- /dev/null +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -0,0 +1,445 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Neptune + PyTorch\n", + "\n", + "Introduction\n", + "\n", + "This guide will show you how to:\n", + "- Initialize the Neptune Run object\n", + "- Log standard loss and accuracy metrics to Neptune\n", + "- Log per layer activations and gradients for debugging model training" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Before you start" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Install Neptune and Dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Install dependencies\n", + "! pip install -q -U neptune_scale torch torchvision" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# TODO - update config to include model architecture\n", + "# TODO - Add more hyperparameters\n", + "# TODO - Add additional logging metrics (weights, gradients, activations, etc.)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "import torch.optim as optim\n", + "import torch.nn.functional as F\n", + "from torch.utils.data import DataLoader\n", + "from torchvision import datasets, transforms\n", + "import numpy as np" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set Hyperparameters for Training" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "params = {\n", + " \"optimizer\": \"Adam\",\n", + " \"batch_size\": 512,\n", + " \"learning_rate\": 0.05,\n", + " \"epochs\": 5, \n", + " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", + " \"input_features\": 256,\n", + " \"n_classes\": 10,\n", + " \"input_size\": 28 * 28\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# Transform to normalize the data and convert it to tensor\n", + "transform = transforms.Compose([\n", + " transforms.ToTensor(),\n", + " transforms.Normalize((0.5,), (0.5,)) # Normalizing the image to range [-1, 1]\n", + "])\n", + "\n", + "# Download and load the MNIST dataset\n", + "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n", + "val_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform) # Use test set as validation\n", + "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n", + "\n", + "# DataLoader for training, validation, and testing\n", + "train_loader = DataLoader(train_dataset, batch_size=params[\"batch_size\"], shuffle=True)\n", + "val_loader = DataLoader(val_dataset, batch_size=params[\"batch_size\"], shuffle=False)\n", + "test_loader = DataLoader(test_dataset, batch_size=params[\"batch_size\"], shuffle=False)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Adam\n" + ] + } + ], + "source": [ + "\n", + "# Simple Convolutional Neural Network model for MNIST\n", + "class SimpleCNN(nn.Module):\n", + " def __init__(self):\n", + " super(SimpleCNN, self).__init__()\n", + " self.conv1 = nn.Conv2d(1, 32, kernel_size=3, padding=1) # Input channels = 1 (grayscale images)\n", + " self.conv2 = nn.Conv2d(32, 64, kernel_size=3, padding=1)\n", + " self.fc1 = nn.Linear(64 * 7 * 7, 128) # Flattened size of image after convolution layers\n", + " self.fc2 = nn.Linear(128, 10) # 10 output classes for digits 0-9\n", + " \n", + " def forward(self, x):\n", + " x = F.relu(self.conv1(x))\n", + " x = F.max_pool2d(x, 2) # Pooling layer to downsample\n", + " x = F.relu(self.conv2(x))\n", + " x = F.max_pool2d(x, 2)\n", + " x = x.view(-1, 64 * 7 * 7) # Flatten the tensor for the fully connected layer\n", + " x = F.relu(self.fc1(x))\n", + " x = self.fc2(x)\n", + " return x\n", + " \n", + "class SimpleNN(nn.Module):\n", + " def __init__(self):\n", + " super(SimpleNN, self).__init__()\n", + " # Define layers (increase number of layers)\n", + " self.fc1 = nn.Linear(params[\"input_size\"], params[\"input_features\"]) \n", + " self.fc2 = nn.Linear(params[\"input_features\"], 64)\n", + " self.fc3 = nn.Linear(64, params[\"n_classes\"]) # Output layer (10 classes for MNIST)\n", + "\n", + " # Registering hooks to track activations\n", + " self.hooks = []\n", + " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc1\")))\n", + " self.hooks.append(self.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", + " self.hooks.append(self.fc3.register_forward_hook(self.save_activation(\"fc3\")))\n", + "\n", + " def forward(self, x):\n", + " x = x.view(-1, params[\"input_size\"]) # Flatten the input image (28x28)\n", + " x = torch.relu(self.fc1(x)) # Apply ReLU activation\n", + " x = torch.relu(self.fc2(x)) # Apply ReLU activation\n", + " x = self.fc3(x) # Output layer\n", + " return x\n", + " \n", + " # Function to save activations\n", + " def save_activation(self, name):\n", + " def hook(model, input, output):\n", + " self.activations[name] = output\n", + " return hook\n", + " \n", + " def get_activations(self):\n", + " return self.activations\n", + "\n", + " def clear_activations(self):\n", + " self.activations = {}\n", + "\n", + "# Instantiate the model, loss function, and optimizer\n", + "# model = SimpleCNN()\n", + "model = SimpleNN()\n", + "criterion = nn.CrossEntropyLoss() # Loss function\n", + "\n", + "# Select an optimizer\n", + "if params[\"optimizer\"] == \"Adam\":\n", + " optimizer = optim.Adam(model.parameters(), lr=params[\"learning_rate\"])\n", + " print(params[\"optimizer\"])\n", + "elif params[\"optimizer\"] == \"SGD\":\n", + " optimizer = optim.SGD(model.parameters(), lr=params[\"learning_rate\"])\n", + " print(params[\"optimizer\"])\n", + "else:\n", + " print(\"No optimizer selected\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "# Function to evaluate the model (validation/test) with gradients tracked\n", + "def evaluate(model, data_loader, track_gradients=False):\n", + " model.train() if track_gradients else model.eval() # Ensure model is in training mode if tracking gradients\n", + " correct_preds = 0\n", + " total_preds = 0\n", + " epoch_loss = 0\n", + " with torch.no_grad(): # Disable gradient tracking during evaluation\n", + " for data, target in data_loader:\n", + " # Forward pass (with gradient tracking if specified)\n", + " output = model(data)\n", + " loss = criterion(output, target) # Correct loss computation\n", + " epoch_loss += loss.item()\n", + " \n", + " if track_gradients:\n", + " # Track gradients (we will backpropagate but do not update model parameters)\n", + " loss.backward()\n", + " \n", + " # Calculate accuracy\n", + " _, predicted = torch.max(output.data, 1)\n", + " total_preds += target.size(0)\n", + " correct_preds += (predicted == target).sum().item()\n", + " \n", + " accuracy = 100 * correct_preds / total_preds\n", + " return epoch_loss / len(data_loader), accuracy\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Neptune - Initialize Training Run and Log Configs" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: NEPTUNE_API_TOKEN=\"your_api_token\"\n", + "env: NEPTUNE_PROJECT=\"workspace_name/project_name\"\n" + ] + } + ], + "source": [ + "# Set environment variable for the API token\n", + "%env NEPTUNE_API_TOKEN = \"your_api_token\"\n", + "%env NEPTUNE_PROJECT = \"workspace_name/project_name\"" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "# Define Neptune parameters\n", + "from neptune_scale import Run\n", + "from random import random\n", + "\n", + "custom_name = \"torch-vision\"\n", + "custom_id = random()\n", + "\n", + "run = Run(\n", + " run_id=f\"{custom_name}-{custom_id}\"\n", + " )\n", + "\n", + "run.log_configs(\n", + " {\n", + " \"config/learning_rate\": params[\"learning_rate\"],\n", + " \"config/optimizer\": params[\"optimizer\"],\n", + " \"config/batch_size\": params[\"batch_size\"],\n", + " \"config/epochs\": params[\"epochs\"]\n", + " }\n", + ")\n", + "\n", + "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", + "run.add_tags(tags=[\"Torch-MINST\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Neptune - Log Metrics while Training" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Training loop\n", + "num_epochs = params[\"epochs\"]\n", + "step_counter = 0\n", + "for epoch in range(num_epochs):\n", + " model.train()\n", + " epoch_loss = 0\n", + " correct_preds = 0\n", + " total_preds = 0\n", + "\n", + " # Reset activations for each epoch\n", + " model.clear_activations()\n", + " \n", + " # Training step\n", + " for batch_idx, (data, target) in enumerate(train_loader):\n", + " step_counter += 1\n", + " optimizer.zero_grad()\n", + " \n", + " # Forward pass\n", + " output = model(data)\n", + " \n", + " # Compute the loss\n", + " loss = criterion(output, target)\n", + " epoch_loss += loss.item()\n", + " \n", + " # Backward pass and optimization\n", + " loss.backward()\n", + " optimizer.step()\n", + " \n", + " # Calculate accuracy\n", + " _, predicted = torch.max(output.data, 1)\n", + " total_preds += target.size(0)\n", + " correct_preds += (predicted == target).sum().item()\n", + " \n", + " # Print loss and accuracy for each batch (step)\n", + " #if (batch_idx + 1) % 5 == 0: # Every 5 steps\n", + " batch_accuracy = 100 * correct_preds / total_preds\n", + " print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\n", + " \n", + " # Validation step per training step\n", + " val_loss, val_accuracy = evaluate(model, val_loader) # Evaluate after each step\n", + " print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\n", + "\n", + " run.log_metrics(\n", + " data = {\n", + " \"metrics/train/loss\": loss.item(),\n", + " \"metrics/train/accuracy\": batch_accuracy,\n", + " \"metrics/validation/loss\": val_loss,\n", + " \"metrics/validation/accuracy\": val_accuracy,\n", + " \"epoch_value\": epoch\n", + " },\n", + " step = step_counter\n", + " )\n", + " \n", + " # Print loss and accuracy for the entire training epoch\n", + " train_accuracy = 100 * correct_preds / total_preds\n", + " print(f\"Epoch [{epoch+1}/{num_epochs}] Training complete. Loss: {epoch_loss / len(train_loader):.4f}, Accuracy: {train_accuracy:.2f}%\")\n", + "\n", + " # Track activations and gradients per layer\n", + " activation_dict_mean = {\n", + " f\"layers/layer_{name}/activation_mean\": activation.mean().item() for name, activation in model.get_activations().items()\n", + " }\n", + " \n", + " activation_dict_std = {\n", + " f\"layers/layer_{name}/activation_std\": activation.std().item() for name, activation in model.get_activations().items()\n", + " }\n", + "\n", + " params_dict_std = {\n", + " f\"layers/layer_{name.split(\".\")[0]}/{name.split(\".\")[1]}_std\": param.grad.std().item() for name, param in model.named_parameters()\n", + " }\n", + " \n", + " params_dict_mean = {\n", + " f\"layers/layer_{name.split(\".\")[0]}/{name.split(\".\")[1]}_mean\": param.grad.mean().item() for name, param in model.named_parameters()\n", + " }\n", + " \n", + " layers_dict = {**activation_dict_mean, \n", + " **activation_dict_std,\n", + " **params_dict_mean,\n", + " **params_dict_std\n", + " }\n", + " print(layers_dict)\n", + "\n", + " # data_to_log = {\n", + " # \"metrics/test/loss_epoch\": epoch_loss / len(train_loader),\n", + " # \"metrics/train/accuracy_epoch\": train_accuracy\n", + " # }.update(activation_dict)\n", + " \n", + " run.log_metrics(\n", + " data = layers_dict,\n", + " step = epoch\n", + " )\n", + " \n", + "# Final Testing Step with gradient tracking\n", + "test_loss, test_accuracy = evaluate(model, test_loader, track_gradients=False) # Track gradients during test\n", + "print(f\"Testing complete. Loss: {test_loss:.4f}, Accuracy: {test_accuracy:.2f}%\")\n", + "\n", + "run.log_configs(\n", + " {\n", + " \"metrics/test/loss\": test_loss,\n", + " \"metrics/test/accuracy\": test_accuracy\n", + " }\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "neptune:INFO: Waiting for all operations to be processed\n", + "neptune:WARNING: No timeout specified. Waiting indefinitely\n", + "neptune:INFO: All operations were processed\n" + ] + } + ], + "source": [ + "run.close()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "neptune_scale_py_312_base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 9994528160aa6dd4c7032a1b35ee835b2305fdce Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 26 Feb 2025 11:17:48 +0100 Subject: [PATCH 002/125] refactor: Update introduction section for more clarity on notebook use --- .../pytorch/pytorch_image_classification.ipynb | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 9ca474c..2e823f0 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -9,9 +9,11 @@ "Introduction\n", "\n", "This guide will show you how to:\n", - "- Initialize the Neptune Run object\n", + "- Initialize the Neptune Run object and log configuration parameters\n", "- Log standard loss and accuracy metrics to Neptune\n", - "- Log per layer activations and gradients for debugging model training" + "- Log debugging metrics during model training such as;\n", + " * Activations per layer\n", + " * Gradients (mean and std weights and biases) per layer" ] }, { From 223fde15fc8b3cc01c703477a65910555b265b2e Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 26 Feb 2025 11:30:53 +0100 Subject: [PATCH 003/125] chore: change how the custom run id gets automatically generated --- .../pytorch/pytorch_image_classification.ipynb | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 2e823f0..454c431 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -259,20 +259,15 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Define Neptune parameters\n", "from neptune_scale import Run\n", - "from random import random\n", - "\n", - "custom_name = \"torch-vision\"\n", - "custom_id = random()\n", + "from uuid import uuid4\n", "\n", - "run = Run(\n", - " run_id=f\"{custom_name}-{custom_id}\"\n", - " )\n", + "run = Run(run_id=f\"pytorch-{uuid4()}\")\n", "\n", "run.log_configs(\n", " {\n", From ceb3a249f904348c56891bab7452242fde797901 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 26 Feb 2025 11:33:16 +0100 Subject: [PATCH 004/125] chore: update instructions on how users can get and set their API token and project name --- .../pytorch_image_classification.ipynb | 26 ++++--------------- 1 file changed, 5 insertions(+), 21 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 454c431..13af69e 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -20,7 +20,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Before you start" + "## Before you start\n", + "\n", + " 1. Create a Neptune Scale account. [Register →](https://neptune.ai/early-access)\n", + " 2. Create a Neptune project that you will use for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune Scale docs.\n", + " 3. Install and configure Neptune Scale for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune Scale docs." ] }, { @@ -237,26 +241,6 @@ "## Neptune - Initialize Training Run and Log Configs" ] }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "env: NEPTUNE_API_TOKEN=\"your_api_token\"\n", - "env: NEPTUNE_PROJECT=\"workspace_name/project_name\"\n" - ] - } - ], - "source": [ - "# Set environment variable for the API token\n", - "%env NEPTUNE_API_TOKEN = \"your_api_token\"\n", - "%env NEPTUNE_PROJECT = \"workspace_name/project_name\"" - ] - }, { "cell_type": "code", "execution_count": null, From 39f5bb94282d3dfc2fe5a2ce417b7caec6241f56 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 26 Feb 2025 11:35:19 +0100 Subject: [PATCH 005/125] chore: update the introduction to be more foundation model training orientated --- .../pytorch/pytorch_image_classification.ipynb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 13af69e..667f8e2 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -8,6 +8,8 @@ "\n", "Introduction\n", "\n", + "See how Neptune Scale can be used for foundation model traning when you are required to track a large number of metrics across your transformers architecture. \n", + "\n", "This guide will show you how to:\n", "- Initialize the Neptune Run object and log configuration parameters\n", "- Log standard loss and accuracy metrics to Neptune\n", From 69fdfbafa4b36f39699b4b18ce8774ad7d57fed4 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 26 Feb 2025 11:42:06 +0100 Subject: [PATCH 006/125] chore: update dataset section with a better description --- .../pytorch/pytorch_image_classification.ipynb | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 667f8e2..91cdcb1 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -59,7 +59,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -81,7 +81,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -97,9 +97,17 @@ "}" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download and transform the data for training\n", + "In this example, we will be using the MINST dataset as part of the PyTorch library for illustration. We create a train, validation and test dataset and apply a transformation. " + ] + }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -118,7 +126,7 @@ "# DataLoader for training, validation, and testing\n", "train_loader = DataLoader(train_dataset, batch_size=params[\"batch_size\"], shuffle=True)\n", "val_loader = DataLoader(val_dataset, batch_size=params[\"batch_size\"], shuffle=False)\n", - "test_loader = DataLoader(test_dataset, batch_size=params[\"batch_size\"], shuffle=False)\n" + "test_loader = DataLoader(test_dataset, batch_size=params[\"batch_size\"], shuffle=False)" ] }, { From 1cde2871349e639ee7e9fe0589abbd6a7f441b5d Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 26 Feb 2025 14:13:46 +0100 Subject: [PATCH 007/125] refactor: update training loop where grads, norms and activations are tracked --- .../pytorch_image_classification.ipynb | 83 ++++++++++++------- 1 file changed, 55 insertions(+), 28 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 91cdcb1..4e94085 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -59,7 +59,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -81,7 +81,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -107,7 +107,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -131,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -214,7 +214,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -253,7 +253,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, "outputs": [], "source": [ @@ -261,7 +261,10 @@ "from neptune_scale import Run\n", "from uuid import uuid4\n", "\n", - "run = Run(run_id=f\"pytorch-{uuid4()}\")\n", + "run = Run(\n", + " project = \"leo/pytorch-tutorial\",\n", + " run_id=f\"pytorch-{uuid4()}\"\n", + " )\n", "\n", "run.log_configs(\n", " {\n", @@ -285,11 +288,35 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch [1/5] Training complete. Loss: 2.3020, Accuracy: 10.98%\n", + "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.1060420349240303, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.05648726597428322, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 5.867332397713199e-09, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.031105784699320793, 'fc3.bias': 0.09331735223531723}\n", + "Epoch [2/5] Training complete. Loss: 2.3023, Accuracy: 10.89%\n", + "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.10444235801696777, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.06586454063653946, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 2.2351742678949904e-09, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.028597360476851463, 'fc3.bias': 0.08579207956790924}\n", + "Epoch [3/5] Training complete. Loss: 2.3024, Accuracy: 11.08%\n", + "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.10336797684431076, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.07147954404354095, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 4.842877210364804e-09, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.034048620611429214, 'fc3.bias': 0.10214585065841675}\n", + "Epoch [4/5] Training complete. Loss: 2.3023, Accuracy: 11.11%\n", + "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.10569452494382858, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.08372259140014648, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 2.607703164514419e-09, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.038734495639801025, 'fc3.bias': 0.11620348691940308}\n", + "Epoch [5/5] Training complete. Loss: 2.3026, Accuracy: 11.02%\n", + "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.10467425733804703, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.0657070130109787, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 1.1175870895385742e-08, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.03915564715862274, 'fc3.bias': 0.11746694147586823}\n", + "Testing complete. Loss: 2.3026, Accuracy: 10.32%\n" + ] + } + ], "source": [ "# Training loop\n", + "activation_dict_mean = {}\n", + "activation_dict_std = {}\n", + "params_dict_std = {}\n", + "params_dict_mean = {}\n", + "grad_norms = {}\n", + "\n", "num_epochs = params[\"epochs\"]\n", "step_counter = 0\n", "for epoch in range(num_epochs):\n", @@ -325,11 +352,11 @@ " # Print loss and accuracy for each batch (step)\n", " #if (batch_idx + 1) % 5 == 0: # Every 5 steps\n", " batch_accuracy = 100 * correct_preds / total_preds\n", - " print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\n", + " # print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\n", " \n", " # Validation step per training step\n", " val_loss, val_accuracy = evaluate(model, val_loader) # Evaluate after each step\n", - " print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\n", + " # print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\n", "\n", " run.log_metrics(\n", " data = {\n", @@ -346,27 +373,27 @@ " train_accuracy = 100 * correct_preds / total_preds\n", " print(f\"Epoch [{epoch+1}/{num_epochs}] Training complete. Loss: {epoch_loss / len(train_loader):.4f}, Accuracy: {train_accuracy:.2f}%\")\n", "\n", - " # Track activations and gradients per layer\n", - " activation_dict_mean = {\n", - " f\"layers/layer_{name}/activation_mean\": activation.mean().item() for name, activation in model.get_activations().items()\n", - " }\n", - " \n", - " activation_dict_std = {\n", - " f\"layers/layer_{name}/activation_std\": activation.std().item() for name, activation in model.get_activations().items()\n", - " }\n", + " # Track activations\n", + " for name, activation in model.get_activations().items():\n", + " activation_dict_mean[f\"layers/layer_{name}/activation_mean\"] = activation.mean().item()\n", + " activation_dict_std[f\"layers/layer_{name}/activation_std\"] = activation.std().item()\n", + "\n", + " # Track gradients\n", + " for name, param in model.named_parameters():\n", + " params_dict_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", + " params_dict_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", + "\n", + " # Track gradient norms per layer\n", + " for param in model.parameters():\n", + " if param.grad is not None:\n", + " grad_norms[f\"{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", "\n", - " params_dict_std = {\n", - " f\"layers/layer_{name.split(\".\")[0]}/{name.split(\".\")[1]}_std\": param.grad.std().item() for name, param in model.named_parameters()\n", - " }\n", - " \n", - " params_dict_mean = {\n", - " f\"layers/layer_{name.split(\".\")[0]}/{name.split(\".\")[1]}_mean\": param.grad.mean().item() for name, param in model.named_parameters()\n", - " }\n", " \n", " layers_dict = {**activation_dict_mean, \n", " **activation_dict_std,\n", " **params_dict_mean,\n", - " **params_dict_std\n", + " **params_dict_std,\n", + " **grad_norms\n", " }\n", " print(layers_dict)\n", "\n", @@ -394,7 +421,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 19, "metadata": {}, "outputs": [ { From 2da28f4e2a10c6451ba3f4dc41be0c92efedd1cc Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 27 Feb 2025 11:28:25 +0100 Subject: [PATCH 008/125] refactor: update batch size and edit gradient norm logging code --- .../pytorch/pytorch_image_classification.ipynb | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 4e94085..75bd712 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -81,13 +81,13 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ "params = {\n", " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 512,\n", + " \"batch_size\": 256,\n", " \"learning_rate\": 0.05,\n", " \"epochs\": 5, \n", " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", @@ -253,7 +253,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 26, "metadata": {}, "outputs": [], "source": [ @@ -378,16 +378,11 @@ " activation_dict_mean[f\"layers/layer_{name}/activation_mean\"] = activation.mean().item()\n", " activation_dict_std[f\"layers/layer_{name}/activation_std\"] = activation.std().item()\n", "\n", - " # Track gradients\n", + " # Track gradients and norms per layer at each epoch\n", " for name, param in model.named_parameters():\n", " params_dict_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", " params_dict_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", - "\n", - " # Track gradient norms per layer\n", - " for param in model.parameters():\n", - " if param.grad is not None:\n", - " grad_norms[f\"{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", - "\n", + " grad_norms[f\"grad_norm/{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", " \n", " layers_dict = {**activation_dict_mean, \n", " **activation_dict_std,\n", From 123fea2d201545677ce12a5ae1275fa126b6c861 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 27 Feb 2025 11:30:07 +0100 Subject: [PATCH 009/125] chore: add data file to ignore for pytorch example --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 0022e84..2a6943c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ # Data files /how-to-guides/hpo/**/mnist +/integrations-and-supported-tools/pytorch/data \ No newline at end of file From 86b6d5dea9587a48f2e7caac90b69ad664c503b6 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 27 Feb 2025 12:44:32 +0100 Subject: [PATCH 010/125] refactor: update model architecture layers and update training loop --- .../pytorch_image_classification.ipynb | 54 +++++++++++-------- 1 file changed, 31 insertions(+), 23 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 75bd712..db925e0 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -131,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 39, "metadata": {}, "outputs": [ { @@ -168,20 +168,26 @@ " super(SimpleNN, self).__init__()\n", " # Define layers (increase number of layers)\n", " self.fc1 = nn.Linear(params[\"input_size\"], params[\"input_features\"]) \n", - " self.fc2 = nn.Linear(params[\"input_features\"], 64)\n", - " self.fc3 = nn.Linear(64, params[\"n_classes\"]) # Output layer (10 classes for MNIST)\n", + " self.fc2 = nn.Linear(params[\"input_features\"], 150)\n", + " self.fc3 = nn.Linear(150, 100)\n", + " self.fc4 = nn.Linear(100, 64)\n", + " self.fc5 = nn.Linear(64, params[\"n_classes\"]) # Output layer (10 classes for MNIST)\n", "\n", " # Registering hooks to track activations\n", " self.hooks = []\n", " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc1\")))\n", " self.hooks.append(self.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", " self.hooks.append(self.fc3.register_forward_hook(self.save_activation(\"fc3\")))\n", + " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc4\")))\n", + " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc5\")))\n", "\n", " def forward(self, x):\n", " x = x.view(-1, params[\"input_size\"]) # Flatten the input image (28x28)\n", " x = torch.relu(self.fc1(x)) # Apply ReLU activation\n", " x = torch.relu(self.fc2(x)) # Apply ReLU activation\n", - " x = self.fc3(x) # Output layer\n", + " x = torch.relu(self.fc3(x)) # Apply ReLU activation\n", + " x = torch.relu(self.fc4(x)) # Apply ReLU activation\n", + " x = self.fc5(x) # Output layer\n", " return x\n", " \n", " # Function to save activations\n", @@ -214,7 +220,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 30, "metadata": {}, "outputs": [], "source": [ @@ -253,7 +259,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 40, "metadata": {}, "outputs": [], "source": [ @@ -288,24 +294,24 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 41, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch [1/5] Training complete. Loss: 2.3020, Accuracy: 10.98%\n", - "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.1060420349240303, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.05648726597428322, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 5.867332397713199e-09, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.031105784699320793, 'fc3.bias': 0.09331735223531723}\n", - "Epoch [2/5] Training complete. Loss: 2.3023, Accuracy: 10.89%\n", - "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.10444235801696777, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.06586454063653946, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 2.2351742678949904e-09, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.028597360476851463, 'fc3.bias': 0.08579207956790924}\n", - "Epoch [3/5] Training complete. Loss: 2.3024, Accuracy: 11.08%\n", - "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.10336797684431076, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.07147954404354095, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 4.842877210364804e-09, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.034048620611429214, 'fc3.bias': 0.10214585065841675}\n", - "Epoch [4/5] Training complete. Loss: 2.3023, Accuracy: 11.11%\n", - "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.10569452494382858, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.08372259140014648, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 2.607703164514419e-09, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.038734495639801025, 'fc3.bias': 0.11620348691940308}\n", - "Epoch [5/5] Training complete. Loss: 2.3026, Accuracy: 11.02%\n", - "{'layers/layer_fc1/activation_mean': -154.4237060546875, 'layers/layer_fc2/activation_mean': -0.2948070466518402, 'layers/layer_fc3/activation_mean': 0.10467425733804703, 'layers/layer_fc1/activation_std': 38.38949966430664, 'layers/layer_fc2/activation_std': 0.1372535228729248, 'layers/layer_fc3/activation_std': 0.0657070130109787, 'layers/layer_fc1.weight_mean': 0.0, 'layers/layer_fc1.bias_mean': 0.0, 'layers/layer_fc2.weight_mean': 0.0, 'layers/layer_fc2.bias_mean': 0.0, 'layers/layer_fc3.weight_mean': 0.0, 'layers/layer_fc3.bias_mean': 1.1175870895385742e-08, 'layers/layer_fc1.weight_std': 0.0, 'layers/layer_fc1.bias_std': 0.0, 'layers/layer_fc2.weight_std': 0.0, 'layers/layer_fc2.bias_std': 0.0, 'layers/layer_fc3.weight_std': 0.0, 'layers/layer_fc3.bias_std': 0.03915564715862274, 'fc3.bias': 0.11746694147586823}\n", - "Testing complete. Loss: 2.3026, Accuracy: 10.32%\n" + "Epoch [1/5] Training complete. Loss: 2.4092, Accuracy: 37.12%\n", + "{'layers/layer_fc1/activation_mean': -162.4604949951172, 'layers/layer_fc4/activation_mean': -162.4604949951172, 'layers/layer_fc5/activation_mean': -162.4604949951172, 'layers/layer_fc2/activation_mean': -12.759440422058105, 'layers/layer_fc3/activation_mean': -5.810527801513672, 'layers/layer_fc1/activation_std': 39.21321487426758, 'layers/layer_fc4/activation_std': 39.21321487426758, 'layers/layer_fc5/activation_std': 39.21321487426758, 'layers/layer_fc2/activation_std': 10.599294662475586, 'layers/layer_fc3/activation_std': 7.578292369842529, 'layers/layer_fc1.weight_mean': 4.031595744891092e-05, 'layers/layer_fc1.bias_mean': -5.974042505840771e-05, 'layers/layer_fc2.weight_mean': -1.2967424481757917e-05, 'layers/layer_fc2.bias_mean': -0.00016122810484375805, 'layers/layer_fc3.weight_mean': 6.324012247205246e-06, 'layers/layer_fc3.bias_mean': 0.00010194708738708869, 'layers/layer_fc4.weight_mean': -0.00013273996592033654, 'layers/layer_fc4.bias_mean': 0.0001236710959346965, 'layers/layer_fc5.weight_mean': -5.18019738215969e-10, 'layers/layer_fc5.bias_mean': -6.053596512956005e-10, 'layers/layer_fc1.weight_std': 0.00062244210857898, 'layers/layer_fc1.bias_std': 0.0006770123727619648, 'layers/layer_fc2.weight_std': 0.0038620077539235353, 'layers/layer_fc2.bias_std': 0.0019892167765647173, 'layers/layer_fc3.weight_std': 0.003400410758331418, 'layers/layer_fc3.bias_std': 0.004144275560975075, 'layers/layer_fc4.weight_std': 0.0041240728460252285, 'layers/layer_fc4.bias_std': 0.004583150614053011, 'layers/layer_fc5.weight_std': 0.019711505621671677, 'layers/layer_fc5.bias_std': 0.02345249615609646, 'grad_norm/fc1.weight': 0.2794375717639923, 'grad_norm/fc1.bias': 0.010853193700313568, 'grad_norm/fc2.weight': 0.756790280342102, 'grad_norm/fc2.bias': 0.024361643940210342, 'grad_norm/fc3.weight': 0.4164503812789917, 'grad_norm/fc3.bias': 0.041247621178627014, 'grad_norm/fc4.weight': 0.3300709128379822, 'grad_norm/fc4.bias': 0.036391083151102066, 'grad_norm/fc5.weight': 0.49827632308006287, 'grad_norm/fc5.bias': 0.07035749405622482}\n", + "Epoch [2/5] Training complete. Loss: 0.9144, Accuracy: 67.32%\n", + "{'layers/layer_fc1/activation_mean': -162.39700317382812, 'layers/layer_fc4/activation_mean': -162.39700317382812, 'layers/layer_fc5/activation_mean': -162.39700317382812, 'layers/layer_fc2/activation_mean': -17.86925506591797, 'layers/layer_fc3/activation_mean': -7.695600986480713, 'layers/layer_fc1/activation_std': 39.54920959472656, 'layers/layer_fc4/activation_std': 39.54920959472656, 'layers/layer_fc5/activation_std': 39.54920959472656, 'layers/layer_fc2/activation_std': 12.200335502624512, 'layers/layer_fc3/activation_std': 7.782861709594727, 'layers/layer_fc1.weight_mean': -3.351208579260856e-05, 'layers/layer_fc1.bias_mean': 4.919838102068752e-05, 'layers/layer_fc2.weight_mean': 3.6362267564982176e-05, 'layers/layer_fc2.bias_mean': 0.00021275453036651015, 'layers/layer_fc3.weight_mean': 1.2260925359441899e-05, 'layers/layer_fc3.bias_mean': 0.0003240771184209734, 'layers/layer_fc4.weight_mean': -0.00010540984658291563, 'layers/layer_fc4.bias_mean': -0.0003542335471138358, 'layers/layer_fc5.weight_mean': 1.9608706325335845e-10, 'layers/layer_fc5.bias_mean': 1.8626451769865326e-10, 'layers/layer_fc1.weight_std': 0.0006732655456289649, 'layers/layer_fc1.bias_std': 0.0007651570485904813, 'layers/layer_fc2.weight_std': 0.007791644427925348, 'layers/layer_fc2.bias_std': 0.004374559037387371, 'layers/layer_fc3.weight_std': 0.004076257348060608, 'layers/layer_fc3.bias_std': 0.0062528871931135654, 'layers/layer_fc4.weight_std': 0.00545633165165782, 'layers/layer_fc4.bias_std': 0.006939326878637075, 'layers/layer_fc5.weight_std': 0.016751136630773544, 'layers/layer_fc5.bias_std': 0.014692811295390129, 'grad_norm/fc1.weight': 0.3019956946372986, 'grad_norm/fc1.bias': 0.012243907898664474, 'grad_norm/fc2.weight': 1.5268410444259644, 'grad_norm/fc2.bias': 0.05346183478832245, 'grad_norm/fc3.weight': 0.49922317266464233, 'grad_norm/fc3.bias': 0.062299784272909164, 'grad_norm/fc4.weight': 0.43655386567115784, 'grad_norm/fc4.bias': 0.055152054876089096, 'grad_norm/fc5.weight': 0.4234427511692047, 'grad_norm/fc5.bias': 0.044078435748815536}\n", + "Epoch [3/5] Training complete. Loss: 0.7789, Accuracy: 73.94%\n", + "{'layers/layer_fc1/activation_mean': -162.37669372558594, 'layers/layer_fc4/activation_mean': -162.37669372558594, 'layers/layer_fc5/activation_mean': -162.37669372558594, 'layers/layer_fc2/activation_mean': -18.648223876953125, 'layers/layer_fc3/activation_mean': -7.7249956130981445, 'layers/layer_fc1/activation_std': 39.64369201660156, 'layers/layer_fc4/activation_std': 39.64369201660156, 'layers/layer_fc5/activation_std': 39.64369201660156, 'layers/layer_fc2/activation_std': 13.20457935333252, 'layers/layer_fc3/activation_std': 8.523029327392578, 'layers/layer_fc1.weight_mean': -4.384123894851655e-05, 'layers/layer_fc1.bias_mean': 5.376951958169229e-05, 'layers/layer_fc2.weight_mean': 0.00012754423369187862, 'layers/layer_fc2.bias_mean': 0.0002990294306073338, 'layers/layer_fc3.weight_mean': 7.330457447096705e-05, 'layers/layer_fc3.bias_mean': 0.0008429083391092718, 'layers/layer_fc4.weight_mean': -0.00011236544378334656, 'layers/layer_fc4.bias_mean': -0.0007185321301221848, 'layers/layer_fc5.weight_mean': -1.0186340659856796e-10, 'layers/layer_fc5.bias_mean': 7.450580707946131e-10, 'layers/layer_fc1.weight_std': 0.000986010069027543, 'layers/layer_fc1.bias_std': 0.001086454140022397, 'layers/layer_fc2.weight_std': 0.012911902740597725, 'layers/layer_fc2.bias_std': 0.0031231597531586885, 'layers/layer_fc3.weight_std': 0.006555264815688133, 'layers/layer_fc3.bias_std': 0.006345531903207302, 'layers/layer_fc4.weight_std': 0.008043059147894382, 'layers/layer_fc4.bias_std': 0.0076463837176561356, 'layers/layer_fc5.weight_std': 0.038150131702423096, 'layers/layer_fc5.bias_std': 0.0253444854170084, 'grad_norm/fc1.weight': 0.4421679377555847, 'grad_norm/fc1.bias': 0.017370598390698433, 'grad_norm/fc2.weight': 2.5302963256835938, 'grad_norm/fc2.bias': 0.038298528641462326, 'grad_norm/fc3.weight': 0.8028761744499207, 'grad_norm/fc3.bias': 0.06369742006063461, 'grad_norm/fc4.weight': 0.6434571743011475, 'grad_norm/fc4.bias': 0.06096290051937103, 'grad_norm/fc5.weight': 0.9643760919570923, 'grad_norm/fc5.bias': 0.07603345066308975}\n", + "Epoch [4/5] Training complete. Loss: 0.7422, Accuracy: 75.72%\n", + "{'layers/layer_fc1/activation_mean': -162.26683044433594, 'layers/layer_fc4/activation_mean': -162.26683044433594, 'layers/layer_fc5/activation_mean': -162.26683044433594, 'layers/layer_fc2/activation_mean': -24.927921295166016, 'layers/layer_fc3/activation_mean': -10.208073616027832, 'layers/layer_fc1/activation_std': 40.17057800292969, 'layers/layer_fc4/activation_std': 40.17057800292969, 'layers/layer_fc5/activation_std': 40.17057800292969, 'layers/layer_fc2/activation_std': 16.805315017700195, 'layers/layer_fc3/activation_std': 9.891535758972168, 'layers/layer_fc1.weight_mean': -5.1961251301690936e-05, 'layers/layer_fc1.bias_mean': 6.783485878258944e-05, 'layers/layer_fc2.weight_mean': 0.00011843817628687248, 'layers/layer_fc2.bias_mean': 0.0007921061478555202, 'layers/layer_fc3.weight_mean': 0.0001514973264420405, 'layers/layer_fc3.bias_mean': 0.0010678424732759595, 'layers/layer_fc4.weight_mean': 1.0874953659367748e-05, 'layers/layer_fc4.bias_mean': -4.5362801756709814e-05, 'layers/layer_fc5.weight_mean': 1.8471837948119685e-10, 'layers/layer_fc5.bias_mean': 5.587935669737476e-10, 'layers/layer_fc1.weight_std': 0.0006585291703231633, 'layers/layer_fc1.bias_std': 0.0007209584582597017, 'layers/layer_fc2.weight_std': 0.010477228090167046, 'layers/layer_fc2.bias_std': 0.005132471676915884, 'layers/layer_fc3.weight_std': 0.008328537456691265, 'layers/layer_fc3.bias_std': 0.008772918954491615, 'layers/layer_fc4.weight_std': 0.005717130843549967, 'layers/layer_fc4.bias_std': 0.005424214527010918, 'layers/layer_fc5.weight_std': 0.031180905178189278, 'layers/layer_fc5.bias_std': 0.030189601704478264, 'grad_norm/fc1.weight': 0.29593732953071594, 'grad_norm/fc1.bias': 0.011563831008970737, 'grad_norm/fc2.weight': 2.053213596343994, 'grad_norm/fc2.bias': 0.06339646875858307, 'grad_norm/fc3.weight': 1.0201679468154907, 'grad_norm/fc3.bias': 0.08794017881155014, 'grad_norm/fc4.weight': 0.45733556151390076, 'grad_norm/fc4.bias': 0.04305489733815193, 'grad_norm/fc5.weight': 0.7882049083709717, 'grad_norm/fc5.bias': 0.09056880325078964}\n", + "Epoch [5/5] Training complete. Loss: 0.7054, Accuracy: 76.78%\n", + "{'layers/layer_fc1/activation_mean': -162.22689819335938, 'layers/layer_fc4/activation_mean': -162.22689819335938, 'layers/layer_fc5/activation_mean': -162.22689819335938, 'layers/layer_fc2/activation_mean': -26.883211135864258, 'layers/layer_fc3/activation_mean': -14.033665657043457, 'layers/layer_fc1/activation_std': 40.35655975341797, 'layers/layer_fc4/activation_std': 40.35655975341797, 'layers/layer_fc5/activation_std': 40.35655975341797, 'layers/layer_fc2/activation_std': 18.789628982543945, 'layers/layer_fc3/activation_std': 13.676512718200684, 'layers/layer_fc1.weight_mean': 1.5473866369575262e-05, 'layers/layer_fc1.bias_mean': -1.7887834474095143e-05, 'layers/layer_fc2.weight_mean': -0.00011709203681675717, 'layers/layer_fc2.bias_mean': -0.00017746593221090734, 'layers/layer_fc3.weight_mean': -9.6314717666246e-05, 'layers/layer_fc3.bias_mean': -0.0003019513678736985, 'layers/layer_fc4.weight_mean': 7.599956006743014e-05, 'layers/layer_fc4.bias_mean': 7.583803380839527e-06, 'layers/layer_fc5.weight_mean': 1.14232533154901e-10, 'layers/layer_fc5.bias_mean': 0.0, 'layers/layer_fc1.weight_std': 0.0006393238436430693, 'layers/layer_fc1.bias_std': 0.0007226161542348564, 'layers/layer_fc2.weight_std': 0.01606857031583786, 'layers/layer_fc2.bias_std': 0.002762269927188754, 'layers/layer_fc3.weight_std': 0.009842080064117908, 'layers/layer_fc3.bias_std': 0.0029760582838207483, 'layers/layer_fc4.weight_std': 0.008030076511204243, 'layers/layer_fc4.bias_std': 0.005211194045841694, 'layers/layer_fc5.weight_std': 0.02980860136449337, 'layers/layer_fc5.bias_std': 0.024154536426067352, 'grad_norm/fc1.weight': 0.2865002751350403, 'grad_norm/fc1.bias': 0.01154280360788107, 'grad_norm/fc2.weight': 3.1488263607025146, 'grad_norm/fc2.bias': 0.03378778323531151, 'grad_norm/fc3.weight': 1.2054210901260376, 'grad_norm/fc3.bias': 0.02976495958864689, 'grad_norm/fc4.weight': 0.6423847079277039, 'grad_norm/fc4.bias': 0.041362613439559937, 'grad_norm/fc5.weight': 0.7535152435302734, 'grad_norm/fc5.bias': 0.07246360927820206}\n", + "Testing complete. Loss: 0.6699, Accuracy: 78.28%\n" ] } ], @@ -338,11 +344,12 @@ " \n", " # Compute the loss\n", " loss = criterion(output, target)\n", - " epoch_loss += loss.item()\n", " \n", " # Backward pass and optimization\n", " loss.backward()\n", " optimizer.step()\n", + "\n", + " epoch_loss += loss.item()\n", " \n", " # Calculate accuracy\n", " _, predicted = torch.max(output.data, 1)\n", @@ -380,9 +387,10 @@ "\n", " # Track gradients and norms per layer at each epoch\n", " for name, param in model.named_parameters():\n", - " params_dict_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", - " params_dict_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", - " grad_norms[f\"grad_norm/{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", + " if param is not None:\n", + " params_dict_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", + " params_dict_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", + " grad_norms[f\"grad_norm/{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", " \n", " layers_dict = {**activation_dict_mean, \n", " **activation_dict_std,\n", @@ -416,7 +424,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 35, "metadata": {}, "outputs": [ { From a39077aeb688faefe39d6ee44777c67615b23a40 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 27 Feb 2025 15:42:26 +0100 Subject: [PATCH 011/125] refactor: update accuracy calculation to not output the percentage --- .../pytorch_image_classification.ipynb | 75 ++++++++++++------- 1 file changed, 48 insertions(+), 27 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index db925e0..69f68c7 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -54,7 +54,10 @@ "source": [ "# TODO - update config to include model architecture\n", "# TODO - Add more hyperparameters\n", - "# TODO - Add additional logging metrics (weights, gradients, activations, etc.)" + "# TODO - look at CNN layers\n", + "# TODO - output and log the model architecture\n", + "# TODO - check loss and accuracy calculations\n", + "# TODO - clean up the evaluation function to exclude tracking gradients" ] }, { @@ -81,14 +84,14 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 71, "metadata": {}, "outputs": [], "source": [ "params = {\n", " \"optimizer\": \"Adam\",\n", " \"batch_size\": 256,\n", - " \"learning_rate\": 0.05,\n", + " \"learning_rate\": 0.01,\n", " \"epochs\": 5, \n", " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", " \"input_features\": 256,\n", @@ -131,7 +134,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 70, "metadata": {}, "outputs": [ { @@ -168,10 +171,10 @@ " super(SimpleNN, self).__init__()\n", " # Define layers (increase number of layers)\n", " self.fc1 = nn.Linear(params[\"input_size\"], params[\"input_features\"]) \n", - " self.fc2 = nn.Linear(params[\"input_features\"], 150)\n", - " self.fc3 = nn.Linear(150, 100)\n", - " self.fc4 = nn.Linear(100, 64)\n", - " self.fc5 = nn.Linear(64, params[\"n_classes\"]) # Output layer (10 classes for MNIST)\n", + " self.fc2 = nn.Linear(params[\"input_features\"], 512)\n", + " self.fc3 = nn.Linear(512, 256)\n", + " self.fc4 = nn.Linear(256, 128)\n", + " self.fc5 = nn.Linear(128, params[\"n_classes\"]) # Output layer (10 classes for MNIST)\n", "\n", " # Registering hooks to track activations\n", " self.hooks = []\n", @@ -203,7 +206,7 @@ " self.activations = {}\n", "\n", "# Instantiate the model, loss function, and optimizer\n", - "# model = SimpleCNN()\n", + "#model = SimpleCNN()\n", "model = SimpleNN()\n", "criterion = nn.CrossEntropyLoss() # Loss function\n", "\n", @@ -220,7 +223,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -246,7 +249,7 @@ " total_preds += target.size(0)\n", " correct_preds += (predicted == target).sum().item()\n", " \n", - " accuracy = 100 * correct_preds / total_preds\n", + " accuracy = correct_preds / total_preds\n", " return epoch_loss / len(data_loader), accuracy\n" ] }, @@ -259,7 +262,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 72, "metadata": {}, "outputs": [], "source": [ @@ -277,7 +280,8 @@ " \"config/learning_rate\": params[\"learning_rate\"],\n", " \"config/optimizer\": params[\"optimizer\"],\n", " \"config/batch_size\": params[\"batch_size\"],\n", - " \"config/epochs\": params[\"epochs\"]\n", + " \"config/epochs\": params[\"epochs\"],\n", + " \"config/input_size\": params[\"input_size\"]\n", " }\n", ")\n", "\n", @@ -294,24 +298,41 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch [1/5] Training complete. Loss: 2.4092, Accuracy: 37.12%\n", - "{'layers/layer_fc1/activation_mean': -162.4604949951172, 'layers/layer_fc4/activation_mean': -162.4604949951172, 'layers/layer_fc5/activation_mean': -162.4604949951172, 'layers/layer_fc2/activation_mean': -12.759440422058105, 'layers/layer_fc3/activation_mean': -5.810527801513672, 'layers/layer_fc1/activation_std': 39.21321487426758, 'layers/layer_fc4/activation_std': 39.21321487426758, 'layers/layer_fc5/activation_std': 39.21321487426758, 'layers/layer_fc2/activation_std': 10.599294662475586, 'layers/layer_fc3/activation_std': 7.578292369842529, 'layers/layer_fc1.weight_mean': 4.031595744891092e-05, 'layers/layer_fc1.bias_mean': -5.974042505840771e-05, 'layers/layer_fc2.weight_mean': -1.2967424481757917e-05, 'layers/layer_fc2.bias_mean': -0.00016122810484375805, 'layers/layer_fc3.weight_mean': 6.324012247205246e-06, 'layers/layer_fc3.bias_mean': 0.00010194708738708869, 'layers/layer_fc4.weight_mean': -0.00013273996592033654, 'layers/layer_fc4.bias_mean': 0.0001236710959346965, 'layers/layer_fc5.weight_mean': -5.18019738215969e-10, 'layers/layer_fc5.bias_mean': -6.053596512956005e-10, 'layers/layer_fc1.weight_std': 0.00062244210857898, 'layers/layer_fc1.bias_std': 0.0006770123727619648, 'layers/layer_fc2.weight_std': 0.0038620077539235353, 'layers/layer_fc2.bias_std': 0.0019892167765647173, 'layers/layer_fc3.weight_std': 0.003400410758331418, 'layers/layer_fc3.bias_std': 0.004144275560975075, 'layers/layer_fc4.weight_std': 0.0041240728460252285, 'layers/layer_fc4.bias_std': 0.004583150614053011, 'layers/layer_fc5.weight_std': 0.019711505621671677, 'layers/layer_fc5.bias_std': 0.02345249615609646, 'grad_norm/fc1.weight': 0.2794375717639923, 'grad_norm/fc1.bias': 0.010853193700313568, 'grad_norm/fc2.weight': 0.756790280342102, 'grad_norm/fc2.bias': 0.024361643940210342, 'grad_norm/fc3.weight': 0.4164503812789917, 'grad_norm/fc3.bias': 0.041247621178627014, 'grad_norm/fc4.weight': 0.3300709128379822, 'grad_norm/fc4.bias': 0.036391083151102066, 'grad_norm/fc5.weight': 0.49827632308006287, 'grad_norm/fc5.bias': 0.07035749405622482}\n", - "Epoch [2/5] Training complete. Loss: 0.9144, Accuracy: 67.32%\n", - "{'layers/layer_fc1/activation_mean': -162.39700317382812, 'layers/layer_fc4/activation_mean': -162.39700317382812, 'layers/layer_fc5/activation_mean': -162.39700317382812, 'layers/layer_fc2/activation_mean': -17.86925506591797, 'layers/layer_fc3/activation_mean': -7.695600986480713, 'layers/layer_fc1/activation_std': 39.54920959472656, 'layers/layer_fc4/activation_std': 39.54920959472656, 'layers/layer_fc5/activation_std': 39.54920959472656, 'layers/layer_fc2/activation_std': 12.200335502624512, 'layers/layer_fc3/activation_std': 7.782861709594727, 'layers/layer_fc1.weight_mean': -3.351208579260856e-05, 'layers/layer_fc1.bias_mean': 4.919838102068752e-05, 'layers/layer_fc2.weight_mean': 3.6362267564982176e-05, 'layers/layer_fc2.bias_mean': 0.00021275453036651015, 'layers/layer_fc3.weight_mean': 1.2260925359441899e-05, 'layers/layer_fc3.bias_mean': 0.0003240771184209734, 'layers/layer_fc4.weight_mean': -0.00010540984658291563, 'layers/layer_fc4.bias_mean': -0.0003542335471138358, 'layers/layer_fc5.weight_mean': 1.9608706325335845e-10, 'layers/layer_fc5.bias_mean': 1.8626451769865326e-10, 'layers/layer_fc1.weight_std': 0.0006732655456289649, 'layers/layer_fc1.bias_std': 0.0007651570485904813, 'layers/layer_fc2.weight_std': 0.007791644427925348, 'layers/layer_fc2.bias_std': 0.004374559037387371, 'layers/layer_fc3.weight_std': 0.004076257348060608, 'layers/layer_fc3.bias_std': 0.0062528871931135654, 'layers/layer_fc4.weight_std': 0.00545633165165782, 'layers/layer_fc4.bias_std': 0.006939326878637075, 'layers/layer_fc5.weight_std': 0.016751136630773544, 'layers/layer_fc5.bias_std': 0.014692811295390129, 'grad_norm/fc1.weight': 0.3019956946372986, 'grad_norm/fc1.bias': 0.012243907898664474, 'grad_norm/fc2.weight': 1.5268410444259644, 'grad_norm/fc2.bias': 0.05346183478832245, 'grad_norm/fc3.weight': 0.49922317266464233, 'grad_norm/fc3.bias': 0.062299784272909164, 'grad_norm/fc4.weight': 0.43655386567115784, 'grad_norm/fc4.bias': 0.055152054876089096, 'grad_norm/fc5.weight': 0.4234427511692047, 'grad_norm/fc5.bias': 0.044078435748815536}\n", - "Epoch [3/5] Training complete. Loss: 0.7789, Accuracy: 73.94%\n", - "{'layers/layer_fc1/activation_mean': -162.37669372558594, 'layers/layer_fc4/activation_mean': -162.37669372558594, 'layers/layer_fc5/activation_mean': -162.37669372558594, 'layers/layer_fc2/activation_mean': -18.648223876953125, 'layers/layer_fc3/activation_mean': -7.7249956130981445, 'layers/layer_fc1/activation_std': 39.64369201660156, 'layers/layer_fc4/activation_std': 39.64369201660156, 'layers/layer_fc5/activation_std': 39.64369201660156, 'layers/layer_fc2/activation_std': 13.20457935333252, 'layers/layer_fc3/activation_std': 8.523029327392578, 'layers/layer_fc1.weight_mean': -4.384123894851655e-05, 'layers/layer_fc1.bias_mean': 5.376951958169229e-05, 'layers/layer_fc2.weight_mean': 0.00012754423369187862, 'layers/layer_fc2.bias_mean': 0.0002990294306073338, 'layers/layer_fc3.weight_mean': 7.330457447096705e-05, 'layers/layer_fc3.bias_mean': 0.0008429083391092718, 'layers/layer_fc4.weight_mean': -0.00011236544378334656, 'layers/layer_fc4.bias_mean': -0.0007185321301221848, 'layers/layer_fc5.weight_mean': -1.0186340659856796e-10, 'layers/layer_fc5.bias_mean': 7.450580707946131e-10, 'layers/layer_fc1.weight_std': 0.000986010069027543, 'layers/layer_fc1.bias_std': 0.001086454140022397, 'layers/layer_fc2.weight_std': 0.012911902740597725, 'layers/layer_fc2.bias_std': 0.0031231597531586885, 'layers/layer_fc3.weight_std': 0.006555264815688133, 'layers/layer_fc3.bias_std': 0.006345531903207302, 'layers/layer_fc4.weight_std': 0.008043059147894382, 'layers/layer_fc4.bias_std': 0.0076463837176561356, 'layers/layer_fc5.weight_std': 0.038150131702423096, 'layers/layer_fc5.bias_std': 0.0253444854170084, 'grad_norm/fc1.weight': 0.4421679377555847, 'grad_norm/fc1.bias': 0.017370598390698433, 'grad_norm/fc2.weight': 2.5302963256835938, 'grad_norm/fc2.bias': 0.038298528641462326, 'grad_norm/fc3.weight': 0.8028761744499207, 'grad_norm/fc3.bias': 0.06369742006063461, 'grad_norm/fc4.weight': 0.6434571743011475, 'grad_norm/fc4.bias': 0.06096290051937103, 'grad_norm/fc5.weight': 0.9643760919570923, 'grad_norm/fc5.bias': 0.07603345066308975}\n", - "Epoch [4/5] Training complete. Loss: 0.7422, Accuracy: 75.72%\n", - "{'layers/layer_fc1/activation_mean': -162.26683044433594, 'layers/layer_fc4/activation_mean': -162.26683044433594, 'layers/layer_fc5/activation_mean': -162.26683044433594, 'layers/layer_fc2/activation_mean': -24.927921295166016, 'layers/layer_fc3/activation_mean': -10.208073616027832, 'layers/layer_fc1/activation_std': 40.17057800292969, 'layers/layer_fc4/activation_std': 40.17057800292969, 'layers/layer_fc5/activation_std': 40.17057800292969, 'layers/layer_fc2/activation_std': 16.805315017700195, 'layers/layer_fc3/activation_std': 9.891535758972168, 'layers/layer_fc1.weight_mean': -5.1961251301690936e-05, 'layers/layer_fc1.bias_mean': 6.783485878258944e-05, 'layers/layer_fc2.weight_mean': 0.00011843817628687248, 'layers/layer_fc2.bias_mean': 0.0007921061478555202, 'layers/layer_fc3.weight_mean': 0.0001514973264420405, 'layers/layer_fc3.bias_mean': 0.0010678424732759595, 'layers/layer_fc4.weight_mean': 1.0874953659367748e-05, 'layers/layer_fc4.bias_mean': -4.5362801756709814e-05, 'layers/layer_fc5.weight_mean': 1.8471837948119685e-10, 'layers/layer_fc5.bias_mean': 5.587935669737476e-10, 'layers/layer_fc1.weight_std': 0.0006585291703231633, 'layers/layer_fc1.bias_std': 0.0007209584582597017, 'layers/layer_fc2.weight_std': 0.010477228090167046, 'layers/layer_fc2.bias_std': 0.005132471676915884, 'layers/layer_fc3.weight_std': 0.008328537456691265, 'layers/layer_fc3.bias_std': 0.008772918954491615, 'layers/layer_fc4.weight_std': 0.005717130843549967, 'layers/layer_fc4.bias_std': 0.005424214527010918, 'layers/layer_fc5.weight_std': 0.031180905178189278, 'layers/layer_fc5.bias_std': 0.030189601704478264, 'grad_norm/fc1.weight': 0.29593732953071594, 'grad_norm/fc1.bias': 0.011563831008970737, 'grad_norm/fc2.weight': 2.053213596343994, 'grad_norm/fc2.bias': 0.06339646875858307, 'grad_norm/fc3.weight': 1.0201679468154907, 'grad_norm/fc3.bias': 0.08794017881155014, 'grad_norm/fc4.weight': 0.45733556151390076, 'grad_norm/fc4.bias': 0.04305489733815193, 'grad_norm/fc5.weight': 0.7882049083709717, 'grad_norm/fc5.bias': 0.09056880325078964}\n", - "Epoch [5/5] Training complete. Loss: 0.7054, Accuracy: 76.78%\n", - "{'layers/layer_fc1/activation_mean': -162.22689819335938, 'layers/layer_fc4/activation_mean': -162.22689819335938, 'layers/layer_fc5/activation_mean': -162.22689819335938, 'layers/layer_fc2/activation_mean': -26.883211135864258, 'layers/layer_fc3/activation_mean': -14.033665657043457, 'layers/layer_fc1/activation_std': 40.35655975341797, 'layers/layer_fc4/activation_std': 40.35655975341797, 'layers/layer_fc5/activation_std': 40.35655975341797, 'layers/layer_fc2/activation_std': 18.789628982543945, 'layers/layer_fc3/activation_std': 13.676512718200684, 'layers/layer_fc1.weight_mean': 1.5473866369575262e-05, 'layers/layer_fc1.bias_mean': -1.7887834474095143e-05, 'layers/layer_fc2.weight_mean': -0.00011709203681675717, 'layers/layer_fc2.bias_mean': -0.00017746593221090734, 'layers/layer_fc3.weight_mean': -9.6314717666246e-05, 'layers/layer_fc3.bias_mean': -0.0003019513678736985, 'layers/layer_fc4.weight_mean': 7.599956006743014e-05, 'layers/layer_fc4.bias_mean': 7.583803380839527e-06, 'layers/layer_fc5.weight_mean': 1.14232533154901e-10, 'layers/layer_fc5.bias_mean': 0.0, 'layers/layer_fc1.weight_std': 0.0006393238436430693, 'layers/layer_fc1.bias_std': 0.0007226161542348564, 'layers/layer_fc2.weight_std': 0.01606857031583786, 'layers/layer_fc2.bias_std': 0.002762269927188754, 'layers/layer_fc3.weight_std': 0.009842080064117908, 'layers/layer_fc3.bias_std': 0.0029760582838207483, 'layers/layer_fc4.weight_std': 0.008030076511204243, 'layers/layer_fc4.bias_std': 0.005211194045841694, 'layers/layer_fc5.weight_std': 0.02980860136449337, 'layers/layer_fc5.bias_std': 0.024154536426067352, 'grad_norm/fc1.weight': 0.2865002751350403, 'grad_norm/fc1.bias': 0.01154280360788107, 'grad_norm/fc2.weight': 3.1488263607025146, 'grad_norm/fc2.bias': 0.03378778323531151, 'grad_norm/fc3.weight': 1.2054210901260376, 'grad_norm/fc3.bias': 0.02976495958864689, 'grad_norm/fc4.weight': 0.6423847079277039, 'grad_norm/fc4.bias': 0.041362613439559937, 'grad_norm/fc5.weight': 0.7535152435302734, 'grad_norm/fc5.bias': 0.07246360927820206}\n", - "Testing complete. Loss: 0.6699, Accuracy: 78.28%\n" + "Epoch [1/5] Training complete. Loss: 0.8909, Accuracy: 68.81%\n", + "{'layers/layer_fc1/activation_mean': -28.729677200317383, 'layers/layer_fc4/activation_mean': -28.729677200317383, 'layers/layer_fc5/activation_mean': -28.729677200317383, 'layers/layer_fc2/activation_mean': -2.5397043228149414, 'layers/layer_fc3/activation_mean': -3.0052473545074463, 'layers/layer_fc1/activation_std': 10.17672348022461, 'layers/layer_fc4/activation_std': 10.17672348022461, 'layers/layer_fc5/activation_std': 10.17672348022461, 'layers/layer_fc2/activation_std': 2.3032867908477783, 'layers/layer_fc3/activation_std': 3.2448129653930664, 'layers/layer_fc1.weight_mean': -5.6409244280075654e-05, 'layers/layer_fc1.bias_mean': 8.269915269920602e-05, 'layers/layer_fc2.weight_mean': -2.23042461584555e-05, 'layers/layer_fc2.bias_mean': -4.0248556615551934e-05, 'layers/layer_fc3.weight_mean': -1.6235278962994926e-05, 'layers/layer_fc3.bias_mean': -8.13562364783138e-05, 'layers/layer_fc4.weight_mean': -7.563584222225472e-05, 'layers/layer_fc4.bias_mean': -0.00028862591716460884, 'layers/layer_fc5.weight_mean': 1.3050055480690048e-09, 'layers/layer_fc5.bias_mean': 1.862645149230957e-09, 'layers/layer_fc1.weight_std': 0.0022773402743041515, 'layers/layer_fc1.bias_std': 0.0024427783209830523, 'layers/layer_fc2.weight_std': 0.0020020941738039255, 'layers/layer_fc2.bias_std': 0.0015959893353283405, 'layers/layer_fc3.weight_std': 0.0006584679940715432, 'layers/layer_fc3.bias_std': 0.0010985822882503271, 'layers/layer_fc4.weight_std': 0.0016911266138777137, 'layers/layer_fc4.bias_std': 0.0016698684776201844, 'layers/layer_fc5.weight_std': 0.016184095293283463, 'layers/layer_fc5.bias_std': 0.014144722372293472, 'grad_norm/fc1.weight': 1.020559310913086, 'grad_norm/fc1.bias': 0.03903047367930412, 'grad_norm/fc2.weight': 0.7248778939247131, 'grad_norm/fc2.bias': 0.036089323461055756, 'grad_norm/fc3.weight': 0.2384623885154724, 'grad_norm/fc3.bias': 0.01759118027985096, 'grad_norm/fc4.weight': 0.3064279556274414, 'grad_norm/fc4.bias': 0.019099673256278038, 'grad_norm/fc5.weight': 0.5787935256958008, 'grad_norm/fc5.bias': 0.042434170842170715}\n", + "Epoch [2/5] Training complete. Loss: 0.2732, Accuracy: 91.75%\n", + "{'layers/layer_fc1/activation_mean': -28.754409790039062, 'layers/layer_fc4/activation_mean': -28.754409790039062, 'layers/layer_fc5/activation_mean': -28.754409790039062, 'layers/layer_fc2/activation_mean': -3.4414615631103516, 'layers/layer_fc3/activation_mean': -3.730855703353882, 'layers/layer_fc1/activation_std': 10.266525268554688, 'layers/layer_fc4/activation_std': 10.266525268554688, 'layers/layer_fc5/activation_std': 10.266525268554688, 'layers/layer_fc2/activation_std': 2.995718479156494, 'layers/layer_fc3/activation_std': 3.8177525997161865, 'layers/layer_fc1.weight_mean': 7.965337863424793e-05, 'layers/layer_fc1.bias_mean': -9.780008986126631e-05, 'layers/layer_fc2.weight_mean': -1.2387715287331957e-05, 'layers/layer_fc2.bias_mean': 2.7172100089956075e-06, 'layers/layer_fc3.weight_mean': -1.2878872439614497e-05, 'layers/layer_fc3.bias_mean': -8.313333091791719e-05, 'layers/layer_fc4.weight_mean': -2.042674896074459e-05, 'layers/layer_fc4.bias_mean': -0.0001337243738817051, 'layers/layer_fc5.weight_mean': 1.2068994414971002e-10, 'layers/layer_fc5.bias_mean': -3.2596289556430236e-10, 'layers/layer_fc1.weight_std': 0.0020513804629445076, 'layers/layer_fc1.bias_std': 0.0022429414093494415, 'layers/layer_fc2.weight_std': 0.0027040187269449234, 'layers/layer_fc2.bias_std': 0.0019432309782132506, 'layers/layer_fc3.weight_std': 0.0007950548897497356, 'layers/layer_fc3.bias_std': 0.0013046229723840952, 'layers/layer_fc4.weight_std': 0.002085754182189703, 'layers/layer_fc4.bias_std': 0.0016170400194823742, 'layers/layer_fc5.weight_std': 0.022946881130337715, 'layers/layer_fc5.bias_std': 0.01414299663156271, 'grad_norm/fc1.weight': 0.9197079539299011, 'grad_norm/fc1.bias': 0.035851072520017624, 'grad_norm/fc2.weight': 0.9789657592773438, 'grad_norm/fc2.bias': 0.043927378952503204, 'grad_norm/fc3.weight': 0.2878772020339966, 'grad_norm/fc3.bias': 0.02087557688355446, 'grad_norm/fc4.weight': 0.37757423520088196, 'grad_norm/fc4.bias': 0.018285810947418213, 'grad_norm/fc5.weight': 0.8206518888473511, 'grad_norm/fc5.bias': 0.04242899268865585}\n", + "Epoch [3/5] Training complete. Loss: 0.2083, Accuracy: 93.61%\n", + "{'layers/layer_fc1/activation_mean': -28.73628807067871, 'layers/layer_fc4/activation_mean': -28.73628807067871, 'layers/layer_fc5/activation_mean': -28.73628807067871, 'layers/layer_fc2/activation_mean': -4.065245151519775, 'layers/layer_fc3/activation_mean': -5.453749656677246, 'layers/layer_fc1/activation_std': 10.463916778564453, 'layers/layer_fc4/activation_std': 10.463916778564453, 'layers/layer_fc5/activation_std': 10.463916778564453, 'layers/layer_fc2/activation_std': 3.5451228618621826, 'layers/layer_fc3/activation_std': 5.318454742431641, 'layers/layer_fc1.weight_mean': 9.08636320673395e-06, 'layers/layer_fc1.bias_mean': -1.3908885193814058e-05, 'layers/layer_fc2.weight_mean': -2.5884002752718516e-05, 'layers/layer_fc2.bias_mean': -8.652423275634646e-05, 'layers/layer_fc3.weight_mean': -1.641714152356144e-05, 'layers/layer_fc3.bias_mean': -8.157885167747736e-05, 'layers/layer_fc4.weight_mean': -3.9947924960870296e-05, 'layers/layer_fc4.bias_mean': -0.00012426286411937326, 'layers/layer_fc5.weight_mean': 3.299284456748097e-10, 'layers/layer_fc5.bias_mean': 1.8626451769865326e-10, 'layers/layer_fc1.weight_std': 0.0009469661745242774, 'layers/layer_fc1.bias_std': 0.0010165267158299685, 'layers/layer_fc2.weight_std': 0.0015304741682484746, 'layers/layer_fc2.bias_std': 0.0007763532339595258, 'layers/layer_fc3.weight_std': 0.000502292939927429, 'layers/layer_fc3.bias_std': 0.0006498315487988293, 'layers/layer_fc4.weight_std': 0.0015387408202514052, 'layers/layer_fc4.bias_std': 0.0010592670878395438, 'layers/layer_fc5.weight_std': 0.016189604997634888, 'layers/layer_fc5.bias_std': 0.010295048356056213, 'grad_norm/fc1.weight': 0.4242599606513977, 'grad_norm/fc1.bias': 0.016234157606959343, 'grad_norm/fc2.weight': 0.5541679263114929, 'grad_norm/fc2.bias': 0.017658572643995285, 'grad_norm/fc3.weight': 0.18194584548473358, 'grad_norm/fc3.bias': 0.010458745993673801, 'grad_norm/fc4.weight': 0.27863141894340515, 'grad_norm/fc4.bias': 0.012019835412502289, 'grad_norm/fc5.weight': 0.5789905786514282, 'grad_norm/fc5.bias': 0.03088514506816864}\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[73], line 47\u001b[0m\n\u001b[0;32m 43\u001b[0m batch_accuracy \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m100\u001b[39m \u001b[38;5;241m*\u001b[39m correct_preds \u001b[38;5;241m/\u001b[39m total_preds\n\u001b[0;32m 44\u001b[0m \u001b[38;5;66;03m# print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\u001b[39;00m\n\u001b[0;32m 45\u001b[0m \n\u001b[0;32m 46\u001b[0m \u001b[38;5;66;03m# Validation step per training step\u001b[39;00m\n\u001b[1;32m---> 47\u001b[0m val_loss, val_accuracy \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_loader\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Evaluate after each step\u001b[39;00m\n\u001b[0;32m 48\u001b[0m \u001b[38;5;66;03m# print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\u001b[39;00m\n\u001b[0;32m 50\u001b[0m run\u001b[38;5;241m.\u001b[39mlog_metrics(\n\u001b[0;32m 51\u001b[0m data \u001b[38;5;241m=\u001b[39m {\n\u001b[0;32m 52\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmetrics/train/loss\u001b[39m\u001b[38;5;124m\"\u001b[39m: loss\u001b[38;5;241m.\u001b[39mitem(),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 58\u001b[0m step \u001b[38;5;241m=\u001b[39m step_counter\n\u001b[0;32m 59\u001b[0m )\n", + "Cell \u001b[1;32mIn[46], line 8\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, data_loader, track_gradients)\u001b[0m\n\u001b[0;32m 6\u001b[0m epoch_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 7\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad(): \u001b[38;5;66;03m# Disable gradient tracking during evaluation\u001b[39;00m\n\u001b[1;32m----> 8\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdata_loader\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 9\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Forward pass (with gradient tracking if specified)\u001b[39;49;00m\n\u001b[0;32m 10\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 11\u001b[0m \u001b[43m \u001b[49m\u001b[43mloss\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m(\u001b[49m\u001b[43moutput\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Correct loss computation\u001b[39;49;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:708\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 706\u001b[0m \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[0;32m 707\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset() \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[1;32m--> 708\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 710\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 711\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable\n\u001b[0;32m 712\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 713\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called\n\u001b[0;32m 714\u001b[0m ):\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:764\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 762\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 763\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index() \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m--> 764\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m 765\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[0;32m 766\u001b[0m data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory_device)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\_utils\\fetch.py:52\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[1;34m(self, possibly_batched_index)\u001b[0m\n\u001b[0;32m 50\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__(possibly_batched_index)\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m---> 52\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 54\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\datasets\\mnist.py:146\u001b[0m, in \u001b[0;36mMNIST.__getitem__\u001b[1;34m(self, index)\u001b[0m\n\u001b[0;32m 143\u001b[0m img \u001b[38;5;241m=\u001b[39m Image\u001b[38;5;241m.\u001b[39mfromarray(img\u001b[38;5;241m.\u001b[39mnumpy(), mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mL\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 145\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m--> 146\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtransform\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 148\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 149\u001b[0m target \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform(target)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:95\u001b[0m, in \u001b[0;36mCompose.__call__\u001b[1;34m(self, img)\u001b[0m\n\u001b[0;32m 93\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, img):\n\u001b[0;32m 94\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransforms:\n\u001b[1;32m---> 95\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[43mt\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 96\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m img\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:277\u001b[0m, in \u001b[0;36mNormalize.forward\u001b[1;34m(self, tensor)\u001b[0m\n\u001b[0;32m 269\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, tensor: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m 270\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 271\u001b[0m \u001b[38;5;124;03m Args:\u001b[39;00m\n\u001b[0;32m 272\u001b[0m \u001b[38;5;124;03m tensor (Tensor): Tensor image to be normalized.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 275\u001b[0m \u001b[38;5;124;03m Tensor: Normalized Tensor image.\u001b[39;00m\n\u001b[0;32m 276\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 277\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnormalize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\functional.py:350\u001b[0m, in \u001b[0;36mnormalize\u001b[1;34m(tensor, mean, std, inplace)\u001b[0m\n\u001b[0;32m 347\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(tensor, torch\u001b[38;5;241m.\u001b[39mTensor):\n\u001b[0;32m 348\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mimg should be Tensor Image. Got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(tensor)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m--> 350\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF_t\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnormalize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmean\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstd\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minplace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\_functional_tensor.py:920\u001b[0m, in \u001b[0;36mnormalize\u001b[1;34m(tensor, mean, std, inplace)\u001b[0m\n\u001b[0;32m 917\u001b[0m tensor \u001b[38;5;241m=\u001b[39m tensor\u001b[38;5;241m.\u001b[39mclone()\n\u001b[0;32m 919\u001b[0m dtype \u001b[38;5;241m=\u001b[39m tensor\u001b[38;5;241m.\u001b[39mdtype\n\u001b[1;32m--> 920\u001b[0m mean \u001b[38;5;241m=\u001b[39m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mas_tensor\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdtype\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdtype\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtensor\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 921\u001b[0m std \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mas_tensor(std, dtype\u001b[38;5;241m=\u001b[39mdtype, device\u001b[38;5;241m=\u001b[39mtensor\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[0;32m 922\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (std \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39many():\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], @@ -377,7 +398,7 @@ " )\n", " \n", " # Print loss and accuracy for the entire training epoch\n", - " train_accuracy = 100 * correct_preds / total_preds\n", + " train_accuracy = correct_preds / total_preds\n", " print(f\"Epoch [{epoch+1}/{num_epochs}] Training complete. Loss: {epoch_loss / len(train_loader):.4f}, Accuracy: {train_accuracy:.2f}%\")\n", "\n", " # Track activations\n", @@ -424,7 +445,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 74, "metadata": {}, "outputs": [ { From 3e2d453a4a61ddfcbc3eda2fe9af17b4d74d8c53 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Fri, 28 Feb 2025 08:32:59 +0100 Subject: [PATCH 012/125] refactor: update model architecture layers, accuracy calculation and calculate gradient norms for batch (step) rather than epoch --- .../pytorch_image_classification.ipynb | 51 ++++++++++--------- 1 file changed, 28 insertions(+), 23 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 69f68c7..c36f7bd 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -57,7 +57,10 @@ "# TODO - look at CNN layers\n", "# TODO - output and log the model architecture\n", "# TODO - check loss and accuracy calculations\n", - "# TODO - clean up the evaluation function to exclude tracking gradients" + "# TODO - clean up the evaluation function to exclude tracking gradients\n", + "# TODO - do not use group tags\n", + "# TODO - track the input features\n", + "# TODO - clean the training loop of commented out code that is unused" ] }, { @@ -84,7 +87,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 91, "metadata": {}, "outputs": [], "source": [ @@ -134,7 +137,7 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": 92, "metadata": {}, "outputs": [ { @@ -223,7 +226,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 80, "metadata": {}, "outputs": [], "source": [ @@ -262,7 +265,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 93, "metadata": {}, "outputs": [], "source": [ @@ -298,19 +301,19 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 94, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch [1/5] Training complete. Loss: 0.8909, Accuracy: 68.81%\n", - "{'layers/layer_fc1/activation_mean': -28.729677200317383, 'layers/layer_fc4/activation_mean': -28.729677200317383, 'layers/layer_fc5/activation_mean': -28.729677200317383, 'layers/layer_fc2/activation_mean': -2.5397043228149414, 'layers/layer_fc3/activation_mean': -3.0052473545074463, 'layers/layer_fc1/activation_std': 10.17672348022461, 'layers/layer_fc4/activation_std': 10.17672348022461, 'layers/layer_fc5/activation_std': 10.17672348022461, 'layers/layer_fc2/activation_std': 2.3032867908477783, 'layers/layer_fc3/activation_std': 3.2448129653930664, 'layers/layer_fc1.weight_mean': -5.6409244280075654e-05, 'layers/layer_fc1.bias_mean': 8.269915269920602e-05, 'layers/layer_fc2.weight_mean': -2.23042461584555e-05, 'layers/layer_fc2.bias_mean': -4.0248556615551934e-05, 'layers/layer_fc3.weight_mean': -1.6235278962994926e-05, 'layers/layer_fc3.bias_mean': -8.13562364783138e-05, 'layers/layer_fc4.weight_mean': -7.563584222225472e-05, 'layers/layer_fc4.bias_mean': -0.00028862591716460884, 'layers/layer_fc5.weight_mean': 1.3050055480690048e-09, 'layers/layer_fc5.bias_mean': 1.862645149230957e-09, 'layers/layer_fc1.weight_std': 0.0022773402743041515, 'layers/layer_fc1.bias_std': 0.0024427783209830523, 'layers/layer_fc2.weight_std': 0.0020020941738039255, 'layers/layer_fc2.bias_std': 0.0015959893353283405, 'layers/layer_fc3.weight_std': 0.0006584679940715432, 'layers/layer_fc3.bias_std': 0.0010985822882503271, 'layers/layer_fc4.weight_std': 0.0016911266138777137, 'layers/layer_fc4.bias_std': 0.0016698684776201844, 'layers/layer_fc5.weight_std': 0.016184095293283463, 'layers/layer_fc5.bias_std': 0.014144722372293472, 'grad_norm/fc1.weight': 1.020559310913086, 'grad_norm/fc1.bias': 0.03903047367930412, 'grad_norm/fc2.weight': 0.7248778939247131, 'grad_norm/fc2.bias': 0.036089323461055756, 'grad_norm/fc3.weight': 0.2384623885154724, 'grad_norm/fc3.bias': 0.01759118027985096, 'grad_norm/fc4.weight': 0.3064279556274414, 'grad_norm/fc4.bias': 0.019099673256278038, 'grad_norm/fc5.weight': 0.5787935256958008, 'grad_norm/fc5.bias': 0.042434170842170715}\n", - "Epoch [2/5] Training complete. Loss: 0.2732, Accuracy: 91.75%\n", - "{'layers/layer_fc1/activation_mean': -28.754409790039062, 'layers/layer_fc4/activation_mean': -28.754409790039062, 'layers/layer_fc5/activation_mean': -28.754409790039062, 'layers/layer_fc2/activation_mean': -3.4414615631103516, 'layers/layer_fc3/activation_mean': -3.730855703353882, 'layers/layer_fc1/activation_std': 10.266525268554688, 'layers/layer_fc4/activation_std': 10.266525268554688, 'layers/layer_fc5/activation_std': 10.266525268554688, 'layers/layer_fc2/activation_std': 2.995718479156494, 'layers/layer_fc3/activation_std': 3.8177525997161865, 'layers/layer_fc1.weight_mean': 7.965337863424793e-05, 'layers/layer_fc1.bias_mean': -9.780008986126631e-05, 'layers/layer_fc2.weight_mean': -1.2387715287331957e-05, 'layers/layer_fc2.bias_mean': 2.7172100089956075e-06, 'layers/layer_fc3.weight_mean': -1.2878872439614497e-05, 'layers/layer_fc3.bias_mean': -8.313333091791719e-05, 'layers/layer_fc4.weight_mean': -2.042674896074459e-05, 'layers/layer_fc4.bias_mean': -0.0001337243738817051, 'layers/layer_fc5.weight_mean': 1.2068994414971002e-10, 'layers/layer_fc5.bias_mean': -3.2596289556430236e-10, 'layers/layer_fc1.weight_std': 0.0020513804629445076, 'layers/layer_fc1.bias_std': 0.0022429414093494415, 'layers/layer_fc2.weight_std': 0.0027040187269449234, 'layers/layer_fc2.bias_std': 0.0019432309782132506, 'layers/layer_fc3.weight_std': 0.0007950548897497356, 'layers/layer_fc3.bias_std': 0.0013046229723840952, 'layers/layer_fc4.weight_std': 0.002085754182189703, 'layers/layer_fc4.bias_std': 0.0016170400194823742, 'layers/layer_fc5.weight_std': 0.022946881130337715, 'layers/layer_fc5.bias_std': 0.01414299663156271, 'grad_norm/fc1.weight': 0.9197079539299011, 'grad_norm/fc1.bias': 0.035851072520017624, 'grad_norm/fc2.weight': 0.9789657592773438, 'grad_norm/fc2.bias': 0.043927378952503204, 'grad_norm/fc3.weight': 0.2878772020339966, 'grad_norm/fc3.bias': 0.02087557688355446, 'grad_norm/fc4.weight': 0.37757423520088196, 'grad_norm/fc4.bias': 0.018285810947418213, 'grad_norm/fc5.weight': 0.8206518888473511, 'grad_norm/fc5.bias': 0.04242899268865585}\n", - "Epoch [3/5] Training complete. Loss: 0.2083, Accuracy: 93.61%\n", - "{'layers/layer_fc1/activation_mean': -28.73628807067871, 'layers/layer_fc4/activation_mean': -28.73628807067871, 'layers/layer_fc5/activation_mean': -28.73628807067871, 'layers/layer_fc2/activation_mean': -4.065245151519775, 'layers/layer_fc3/activation_mean': -5.453749656677246, 'layers/layer_fc1/activation_std': 10.463916778564453, 'layers/layer_fc4/activation_std': 10.463916778564453, 'layers/layer_fc5/activation_std': 10.463916778564453, 'layers/layer_fc2/activation_std': 3.5451228618621826, 'layers/layer_fc3/activation_std': 5.318454742431641, 'layers/layer_fc1.weight_mean': 9.08636320673395e-06, 'layers/layer_fc1.bias_mean': -1.3908885193814058e-05, 'layers/layer_fc2.weight_mean': -2.5884002752718516e-05, 'layers/layer_fc2.bias_mean': -8.652423275634646e-05, 'layers/layer_fc3.weight_mean': -1.641714152356144e-05, 'layers/layer_fc3.bias_mean': -8.157885167747736e-05, 'layers/layer_fc4.weight_mean': -3.9947924960870296e-05, 'layers/layer_fc4.bias_mean': -0.00012426286411937326, 'layers/layer_fc5.weight_mean': 3.299284456748097e-10, 'layers/layer_fc5.bias_mean': 1.8626451769865326e-10, 'layers/layer_fc1.weight_std': 0.0009469661745242774, 'layers/layer_fc1.bias_std': 0.0010165267158299685, 'layers/layer_fc2.weight_std': 0.0015304741682484746, 'layers/layer_fc2.bias_std': 0.0007763532339595258, 'layers/layer_fc3.weight_std': 0.000502292939927429, 'layers/layer_fc3.bias_std': 0.0006498315487988293, 'layers/layer_fc4.weight_std': 0.0015387408202514052, 'layers/layer_fc4.bias_std': 0.0010592670878395438, 'layers/layer_fc5.weight_std': 0.016189604997634888, 'layers/layer_fc5.bias_std': 0.010295048356056213, 'grad_norm/fc1.weight': 0.4242599606513977, 'grad_norm/fc1.bias': 0.016234157606959343, 'grad_norm/fc2.weight': 0.5541679263114929, 'grad_norm/fc2.bias': 0.017658572643995285, 'grad_norm/fc3.weight': 0.18194584548473358, 'grad_norm/fc3.bias': 0.010458745993673801, 'grad_norm/fc4.weight': 0.27863141894340515, 'grad_norm/fc4.bias': 0.012019835412502289, 'grad_norm/fc5.weight': 0.5789905786514282, 'grad_norm/fc5.bias': 0.03088514506816864}\n" + "Epoch [1/5] Training complete. Loss: 0.9066, Accuracy: 0.69%\n", + "{'layers/layer_fc1/activation_mean': -27.44853973388672, 'layers/layer_fc4/activation_mean': -27.44853973388672, 'layers/layer_fc5/activation_mean': -27.44853973388672, 'layers/layer_fc2/activation_mean': -3.266420602798462, 'layers/layer_fc3/activation_mean': -3.977348804473877, 'layers/layer_fc1/activation_std': 9.884493827819824, 'layers/layer_fc4/activation_std': 9.884493827819824, 'layers/layer_fc5/activation_std': 9.884493827819824, 'layers/layer_fc2/activation_std': 2.8974952697753906, 'layers/layer_fc3/activation_std': 3.9147353172302246, 'layers/layer_fc1.weight_mean': 1.759609585860744e-05, 'layers/layer_fc1.bias_mean': -2.6617166440701112e-05, 'layers/layer_fc2.weight_mean': 1.845465158112347e-05, 'layers/layer_fc2.bias_mean': 6.048093564459123e-05, 'layers/layer_fc3.weight_mean': 6.554154424520675e-06, 'layers/layer_fc3.bias_mean': 4.6133769501466304e-05, 'layers/layer_fc4.weight_mean': -1.4562309843313415e-05, 'layers/layer_fc4.bias_mean': 1.2838396287406795e-05, 'layers/layer_fc5.weight_mean': 4.2628095497931895e-10, 'layers/layer_fc5.bias_mean': -9.313225884932663e-11, 'layers/layer_fc1.weight_std': 0.0014361083740368485, 'layers/layer_fc1.bias_std': 0.0014619147405028343, 'layers/layer_fc2.weight_std': 0.001974125625565648, 'layers/layer_fc2.bias_std': 0.0011026636930182576, 'layers/layer_fc3.weight_std': 0.0008759713382460177, 'layers/layer_fc3.bias_std': 0.0011844916734844446, 'layers/layer_fc4.weight_std': 0.002118356991559267, 'layers/layer_fc4.bias_std': 0.00178423966281116, 'layers/layer_fc5.weight_std': 0.021144278347492218, 'layers/layer_fc5.bias_std': 0.016225792467594147, 'grad_norm/fc1.weight': 0.6434243321418762, 'grad_norm/fc1.bias': 0.023348789662122726, 'grad_norm/fc2.weight': 0.714738130569458, 'grad_norm/fc2.bias': 0.02496359497308731, 'grad_norm/fc3.weight': 0.31714311242103577, 'grad_norm/fc3.bias': 0.018929213285446167, 'grad_norm/fc4.weight': 0.38346678018569946, 'grad_norm/fc4.bias': 0.02010788396000862, 'grad_norm/fc5.weight': 0.7561851143836975, 'grad_norm/fc5.bias': 0.04867737740278244}\n", + "Epoch [2/5] Training complete. Loss: 0.2738, Accuracy: 0.92%\n", + "{'layers/layer_fc1/activation_mean': -27.444847106933594, 'layers/layer_fc4/activation_mean': -27.444847106933594, 'layers/layer_fc5/activation_mean': -27.444847106933594, 'layers/layer_fc2/activation_mean': -4.043460845947266, 'layers/layer_fc3/activation_mean': -4.557342529296875, 'layers/layer_fc1/activation_std': 9.927286148071289, 'layers/layer_fc4/activation_std': 9.927286148071289, 'layers/layer_fc5/activation_std': 9.927286148071289, 'layers/layer_fc2/activation_std': 3.500694990158081, 'layers/layer_fc3/activation_std': 4.5128021240234375, 'layers/layer_fc1.weight_mean': 3.658090008684667e-07, 'layers/layer_fc1.bias_mean': 3.3925662137335166e-06, 'layers/layer_fc2.weight_mean': -2.423007288143708e-07, 'layers/layer_fc2.bias_mean': 1.9860681277350523e-05, 'layers/layer_fc3.weight_mean': 5.834312105434947e-06, 'layers/layer_fc3.bias_mean': -6.391452188836411e-06, 'layers/layer_fc4.weight_mean': 7.309272405109368e-06, 'layers/layer_fc4.bias_mean': 3.118724634987302e-06, 'layers/layer_fc5.weight_mean': 3.8910458188823327e-10, 'layers/layer_fc5.bias_mean': 1.3038515822572094e-09, 'layers/layer_fc1.weight_std': 0.0011528775794431567, 'layers/layer_fc1.bias_std': 0.0012104393681511283, 'layers/layer_fc2.weight_std': 0.0022255387157201767, 'layers/layer_fc2.bias_std': 0.0010018462780863047, 'layers/layer_fc3.weight_std': 0.0007322281016968191, 'layers/layer_fc3.bias_std': 0.0008695347351022065, 'layers/layer_fc4.weight_std': 0.0019773344974964857, 'layers/layer_fc4.bias_std': 0.0012858008267357945, 'layers/layer_fc5.weight_std': 0.015919363126158714, 'layers/layer_fc5.bias_std': 0.011463840492069721, 'grad_norm/fc1.weight': 0.5164875984191895, 'grad_norm/fc1.bias': 0.019329242408275604, 'grad_norm/fc2.weight': 0.8057278990745544, 'grad_norm/fc2.bias': 0.02265150472521782, 'grad_norm/fc3.weight': 0.26510223746299744, 'grad_norm/fc3.bias': 0.013885731808841228, 'grad_norm/fc4.weight': 0.3579327464103699, 'grad_norm/fc4.bias': 0.014490283094346523, 'grad_norm/fc5.weight': 0.5693259239196777, 'grad_norm/fc5.bias': 0.03439152240753174}\n", + "Epoch [3/5] Training complete. Loss: 0.2052, Accuracy: 0.94%\n", + "{'layers/layer_fc1/activation_mean': -27.409822463989258, 'layers/layer_fc4/activation_mean': -27.409822463989258, 'layers/layer_fc5/activation_mean': -27.409822463989258, 'layers/layer_fc2/activation_mean': -4.827888011932373, 'layers/layer_fc3/activation_mean': -5.646310329437256, 'layers/layer_fc1/activation_std': 10.087625503540039, 'layers/layer_fc4/activation_std': 10.087625503540039, 'layers/layer_fc5/activation_std': 10.087625503540039, 'layers/layer_fc2/activation_std': 4.256272792816162, 'layers/layer_fc3/activation_std': 5.8036675453186035, 'layers/layer_fc1.weight_mean': -1.7095830116886646e-05, 'layers/layer_fc1.bias_mean': 2.0519555619102903e-05, 'layers/layer_fc2.weight_mean': -1.0371921234764159e-05, 'layers/layer_fc2.bias_mean': -2.6764431822812185e-05, 'layers/layer_fc3.weight_mean': 1.1725012427632464e-07, 'layers/layer_fc3.bias_mean': -3.2187872420763597e-06, 'layers/layer_fc4.weight_mean': -4.269544206181308e-06, 'layers/layer_fc4.bias_mean': 2.4562015823903494e-05, 'layers/layer_fc5.weight_mean': 5.096634225765229e-10, 'layers/layer_fc5.bias_mean': 1.3038515822572094e-09, 'layers/layer_fc1.weight_std': 0.0010182075202465057, 'layers/layer_fc1.bias_std': 0.0011107329046353698, 'layers/layer_fc2.weight_std': 0.0014369278214871883, 'layers/layer_fc2.bias_std': 0.0006925289635546505, 'layers/layer_fc3.weight_std': 0.0004932511947117746, 'layers/layer_fc3.bias_std': 0.0006599067128263414, 'layers/layer_fc4.weight_std': 0.0011504496214911342, 'layers/layer_fc4.bias_std': 0.001063313102349639, 'layers/layer_fc5.weight_std': 0.010835869237780571, 'layers/layer_fc5.bias_std': 0.010039512068033218, 'grad_norm/fc1.weight': 0.4562200903892517, 'grad_norm/fc1.bias': 0.017740018665790558, 'grad_norm/fc2.weight': 0.520235002040863, 'grad_norm/fc2.bias': 0.01566654071211815, 'grad_norm/fc3.weight': 0.1785753071308136, 'grad_norm/fc3.bias': 0.010537991300225258, 'grad_norm/fc4.weight': 0.20825187861919403, 'grad_norm/fc4.bias': 0.011986151337623596, 'grad_norm/fc5.weight': 0.3875243663787842, 'grad_norm/fc5.bias': 0.030118538066744804}\n" ] }, { @@ -320,18 +323,15 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[73], line 47\u001b[0m\n\u001b[0;32m 43\u001b[0m batch_accuracy \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m100\u001b[39m \u001b[38;5;241m*\u001b[39m correct_preds \u001b[38;5;241m/\u001b[39m total_preds\n\u001b[0;32m 44\u001b[0m \u001b[38;5;66;03m# print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\u001b[39;00m\n\u001b[0;32m 45\u001b[0m \n\u001b[0;32m 46\u001b[0m \u001b[38;5;66;03m# Validation step per training step\u001b[39;00m\n\u001b[1;32m---> 47\u001b[0m val_loss, val_accuracy \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_loader\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Evaluate after each step\u001b[39;00m\n\u001b[0;32m 48\u001b[0m \u001b[38;5;66;03m# print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\u001b[39;00m\n\u001b[0;32m 50\u001b[0m run\u001b[38;5;241m.\u001b[39mlog_metrics(\n\u001b[0;32m 51\u001b[0m data \u001b[38;5;241m=\u001b[39m {\n\u001b[0;32m 52\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmetrics/train/loss\u001b[39m\u001b[38;5;124m\"\u001b[39m: loss\u001b[38;5;241m.\u001b[39mitem(),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 58\u001b[0m step \u001b[38;5;241m=\u001b[39m step_counter\n\u001b[0;32m 59\u001b[0m )\n", - "Cell \u001b[1;32mIn[46], line 8\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, data_loader, track_gradients)\u001b[0m\n\u001b[0;32m 6\u001b[0m epoch_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 7\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad(): \u001b[38;5;66;03m# Disable gradient tracking during evaluation\u001b[39;00m\n\u001b[1;32m----> 8\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdata_loader\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 9\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Forward pass (with gradient tracking if specified)\u001b[39;49;00m\n\u001b[0;32m 10\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 11\u001b[0m \u001b[43m \u001b[49m\u001b[43mloss\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m(\u001b[49m\u001b[43moutput\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Correct loss computation\u001b[39;49;00m\n", + "Cell \u001b[1;32mIn[94], line 47\u001b[0m\n\u001b[0;32m 43\u001b[0m batch_accuracy \u001b[38;5;241m=\u001b[39m correct_preds \u001b[38;5;241m/\u001b[39m total_preds\n\u001b[0;32m 44\u001b[0m \u001b[38;5;66;03m# print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\u001b[39;00m\n\u001b[0;32m 45\u001b[0m \n\u001b[0;32m 46\u001b[0m \u001b[38;5;66;03m# Validation step per training step\u001b[39;00m\n\u001b[1;32m---> 47\u001b[0m val_loss, val_accuracy \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_loader\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Evaluate after each step\u001b[39;00m\n\u001b[0;32m 48\u001b[0m \u001b[38;5;66;03m# print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\u001b[39;00m\n\u001b[0;32m 50\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m name, param \u001b[38;5;129;01min\u001b[39;00m model\u001b[38;5;241m.\u001b[39mnamed_parameters():\n", + "Cell \u001b[1;32mIn[80], line 8\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, data_loader, track_gradients)\u001b[0m\n\u001b[0;32m 6\u001b[0m epoch_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 7\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad(): \u001b[38;5;66;03m# Disable gradient tracking during evaluation\u001b[39;00m\n\u001b[1;32m----> 8\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdata_loader\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 9\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Forward pass (with gradient tracking if specified)\u001b[39;49;00m\n\u001b[0;32m 10\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 11\u001b[0m \u001b[43m \u001b[49m\u001b[43mloss\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m(\u001b[49m\u001b[43moutput\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Correct loss computation\u001b[39;49;00m\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:708\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 706\u001b[0m \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[0;32m 707\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset() \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[1;32m--> 708\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 710\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 711\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable\n\u001b[0;32m 712\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 713\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called\n\u001b[0;32m 714\u001b[0m ):\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:764\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 762\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 763\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index() \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m--> 764\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m 765\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[0;32m 766\u001b[0m data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory_device)\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\_utils\\fetch.py:52\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[1;34m(self, possibly_batched_index)\u001b[0m\n\u001b[0;32m 50\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__(possibly_batched_index)\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m---> 52\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 54\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\datasets\\mnist.py:146\u001b[0m, in \u001b[0;36mMNIST.__getitem__\u001b[1;34m(self, index)\u001b[0m\n\u001b[0;32m 143\u001b[0m img \u001b[38;5;241m=\u001b[39m Image\u001b[38;5;241m.\u001b[39mfromarray(img\u001b[38;5;241m.\u001b[39mnumpy(), mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mL\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 145\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m--> 146\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtransform\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 148\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 149\u001b[0m target \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform(target)\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:95\u001b[0m, in \u001b[0;36mCompose.__call__\u001b[1;34m(self, img)\u001b[0m\n\u001b[0;32m 93\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, img):\n\u001b[0;32m 94\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransforms:\n\u001b[1;32m---> 95\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[43mt\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 96\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m img\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:277\u001b[0m, in \u001b[0;36mNormalize.forward\u001b[1;34m(self, tensor)\u001b[0m\n\u001b[0;32m 269\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, tensor: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m 270\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 271\u001b[0m \u001b[38;5;124;03m Args:\u001b[39;00m\n\u001b[0;32m 272\u001b[0m \u001b[38;5;124;03m tensor (Tensor): Tensor image to be normalized.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 275\u001b[0m \u001b[38;5;124;03m Tensor: Normalized Tensor image.\u001b[39;00m\n\u001b[0;32m 276\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 277\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnormalize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\functional.py:350\u001b[0m, in \u001b[0;36mnormalize\u001b[1;34m(tensor, mean, std, inplace)\u001b[0m\n\u001b[0;32m 347\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(tensor, torch\u001b[38;5;241m.\u001b[39mTensor):\n\u001b[0;32m 348\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mimg should be Tensor Image. Got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(tensor)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m--> 350\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF_t\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnormalize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmean\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstd\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minplace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\_functional_tensor.py:920\u001b[0m, in \u001b[0;36mnormalize\u001b[1;34m(tensor, mean, std, inplace)\u001b[0m\n\u001b[0;32m 917\u001b[0m tensor \u001b[38;5;241m=\u001b[39m tensor\u001b[38;5;241m.\u001b[39mclone()\n\u001b[0;32m 919\u001b[0m dtype \u001b[38;5;241m=\u001b[39m tensor\u001b[38;5;241m.\u001b[39mdtype\n\u001b[1;32m--> 920\u001b[0m mean \u001b[38;5;241m=\u001b[39m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mas_tensor\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdtype\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdtype\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtensor\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 921\u001b[0m std \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mas_tensor(std, dtype\u001b[38;5;241m=\u001b[39mdtype, device\u001b[38;5;241m=\u001b[39mtensor\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[0;32m 922\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (std \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39many():\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:137\u001b[0m, in \u001b[0;36mToTensor.__call__\u001b[1;34m(self, pic)\u001b[0m\n\u001b[0;32m 129\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, pic):\n\u001b[0;32m 130\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 131\u001b[0m \u001b[38;5;124;03m Args:\u001b[39;00m\n\u001b[0;32m 132\u001b[0m \u001b[38;5;124;03m pic (PIL Image or numpy.ndarray): Image to be converted to tensor.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 135\u001b[0m \u001b[38;5;124;03m Tensor: Converted image.\u001b[39;00m\n\u001b[0;32m 136\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 137\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto_tensor\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpic\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\functional.py:176\u001b[0m, in \u001b[0;36mto_tensor\u001b[1;34m(pic)\u001b[0m\n\u001b[0;32m 174\u001b[0m img \u001b[38;5;241m=\u001b[39m img\u001b[38;5;241m.\u001b[39mpermute((\u001b[38;5;241m2\u001b[39m, \u001b[38;5;241m0\u001b[39m, \u001b[38;5;241m1\u001b[39m))\u001b[38;5;241m.\u001b[39mcontiguous()\n\u001b[0;32m 175\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(img, torch\u001b[38;5;241m.\u001b[39mByteTensor):\n\u001b[1;32m--> 176\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mimg\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdtype\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdefault_float_dtype\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mdiv(\u001b[38;5;241m255\u001b[39m)\n\u001b[0;32m 177\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 178\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m img\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } @@ -379,20 +379,25 @@ " \n", " # Print loss and accuracy for each batch (step)\n", " #if (batch_idx + 1) % 5 == 0: # Every 5 steps\n", - " batch_accuracy = 100 * correct_preds / total_preds\n", + " batch_accuracy = correct_preds / total_preds\n", " # print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\n", " \n", " # Validation step per training step\n", " val_loss, val_accuracy = evaluate(model, val_loader) # Evaluate after each step\n", " # print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\n", "\n", + " for name, param in model.named_parameters():\n", + " if param is not None:\n", + " grad_norms[f\"grad_norm/{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", + "\n", " run.log_metrics(\n", " data = {\n", " \"metrics/train/loss\": loss.item(),\n", " \"metrics/train/accuracy\": batch_accuracy,\n", " \"metrics/validation/loss\": val_loss,\n", " \"metrics/validation/accuracy\": val_accuracy,\n", - " \"epoch_value\": epoch\n", + " \"epoch_value\": epoch,\n", + " **grad_norms\n", " },\n", " step = step_counter\n", " )\n", @@ -411,7 +416,7 @@ " if param is not None:\n", " params_dict_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", " params_dict_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", - " grad_norms[f\"grad_norm/{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", + " # grad_norms[f\"grad_norm/{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", " \n", " layers_dict = {**activation_dict_mean, \n", " **activation_dict_std,\n", @@ -445,7 +450,7 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 88, "metadata": {}, "outputs": [ { From 8a3da577769ac9be330234a164c0d7fd95b5424d Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 3 Mar 2025 12:04:58 +0100 Subject: [PATCH 013/125] feat: Added a pytorch text-based example that is used to demonstrate debugging when building LLM's --- .../pytorch_text_model_debugging.ipynb | 646 ++++++++++++++++++ 1 file changed, 646 insertions(+) create mode 100644 integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb new file mode 100644 index 0000000..4c8f180 --- /dev/null +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -0,0 +1,646 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "import torch.optim as optim\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# TODO - capture logs to Neptune\n", + "# TODO - improve dataset to be more realistic\n", + "# TODO - capture the folowings; attentions, activations, gradients, weights, loss (agg and per layer), learning rate (per layer if needed)\n", + "# TODO - capture per layer losses\n", + "# TODO - increase number of layers\n", + "# TODO - Adding dropout to demonstrate the effects of regularization\n", + "# TODO - batch normalization" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "During training, capturing valuable information from each layer of a model can provide insights into how the model is learning, what features are being extracted, and how the model's performance can be improved. Researchers often look for certain metrics and activations from each layer, which can help in diagnosing issues such as vanishing gradients, overfitting, or underfitting.\n", + "\n", + "Key Information to Capture from Each Layer:\n", + "Activations:\n", + "\n", + "What it shows: The activations of each layer during forward propagation can give insights into how the model is processing input data at different levels. By examining activations, you can see if certain layers are \"dead\" (i.e., not learning anything) or if activations are exploding (leading to unstable training).\n", + "How to capture: You can register hooks to capture activations after each layer.\n", + "Gradients:\n", + "\n", + "What it shows: The gradients of each layer during backpropagation are important for diagnosing issues like vanishing or exploding gradients. If gradients are too small, it can indicate the model is struggling to learn effectively (vanishing gradients). If they're too large, it can indicate instability (exploding gradients).\n", + "How to capture: Similar to activations, you can use hooks to capture gradients during backpropagation.\n", + "Weights and Biases:\n", + "\n", + "What it shows: Tracking the weights and biases of each layer helps researchers understand how the model's parameters are evolving during training. For example, if weights are growing too large or becoming too small, it might suggest the need for better regularization or learning rate adjustments.\n", + "How to capture: You can extract weights and biases directly from the model’s parameters.\n", + "Layer-wise Loss:\n", + "\n", + "What it shows: Tracking loss at different stages (layers) can help understand which parts of the network are contributing more to the overall loss. This can be valuable for debugging or optimizing the model.\n", + "How to capture: Loss can be tracked by monitoring the output from each layer and comparing it against the target.\n", + "Learning Rate per Layer:\n", + "\n", + "What it shows: If you're using techniques like Layer-wise Learning Rate Decay (L2LRD) or applying different learning rates to different layers, capturing the learning rate used by each layer during training can be informative.\n", + "How to capture: This information would depend on how the learning rate is defined in your optimizer, but it can be tracked manually.\n", + "Layer Output Norms:\n", + "\n", + "What it shows: Tracking the L2-norm (magnitude) of the output for each layer can help in detecting issues like gradient explosion or squashing effects. A norm that’s either too high or too low could suggest the model isn't behaving as expected.\n", + "How to capture: You can compute the L2-norm for the output of each layer.\n", + "Activation Distributions:\n", + "\n", + "What it shows: Visualizing or capturing the distribution of activations (e.g., mean, variance, or histograms) can reveal issues like the saturation of neurons in activation functions, especially in cases with ReLU (leading to dead neurons).\n", + "How to capture: You can visualize or compute statistical summaries of activations using Python libraries like matplotlib or seaborn.\n", + "Feature Maps (for Convolutional Layers):\n", + "\n", + "What it shows: If you have convolutional layers, tracking the feature maps can provide insight into how each filter is detecting specific patterns or features from the input data.\n", + "How to capture: You can visualize the feature maps after convolutional layers using libraries like matplotlib." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/400, Loss: 11.5184\n", + "Epoch 2/400, Loss: 11.0458\n", + "Epoch 3/400, Loss: 10.6166\n", + "Epoch 4/400, Loss: 10.2209\n", + "Epoch 5/400, Loss: 9.8457\n", + "Epoch 6/400, Loss: 9.4807\n", + "Epoch 7/400, Loss: 9.1192\n", + "Epoch 8/400, Loss: 8.7570\n", + "Epoch 9/400, Loss: 8.3914\n", + "Epoch 10/400, Loss: 8.0211\n", + "Epoch 11/400, Loss: 7.6454\n", + "Epoch 12/400, Loss: 7.2639\n", + "Epoch 13/400, Loss: 6.8763\n", + "Epoch 14/400, Loss: 6.4826\n", + "Epoch 15/400, Loss: 6.0829\n", + "Epoch 16/400, Loss: 5.6777\n", + "Epoch 17/400, Loss: 5.2678\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[7], line 66\u001b[0m\n\u001b[0;32m 63\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(output, target_data)\n\u001b[0;32m 65\u001b[0m \u001b[38;5;66;03m# Backward pass\u001b[39;00m\n\u001b[1;32m---> 66\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 68\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[0;32m 69\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\_tensor.py:626\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 616\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 617\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 618\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 619\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 624\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 625\u001b[0m )\n\u001b[1;32m--> 626\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 628\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\__init__.py:347\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 342\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 344\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 345\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 346\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 347\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 348\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 349\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 350\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 351\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 352\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 353\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 354\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 355\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\graph.py:823\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[1;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[0;32m 821\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[0;32m 822\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 823\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 824\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[0;32m 826\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 827\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\function.py:292\u001b[0m, in \u001b[0;36mBackwardCFunction.apply\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mBackwardCFunction\u001b[39;00m(_C\u001b[38;5;241m.\u001b[39m_FunctionBase, FunctionCtx, _HookMixin):\n\u001b[0;32m 288\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 289\u001b[0m \u001b[38;5;124;03m This class is used for internal autograd work. Do not use.\u001b[39;00m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 292\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs):\n\u001b[0;32m 293\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 294\u001b[0m \u001b[38;5;124;03m Apply method used when executing this Node during the backward\u001b[39;00m\n\u001b[0;32m 295\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 296\u001b[0m \u001b[38;5;66;03m# _forward_cls is defined by derived class\u001b[39;00m\n\u001b[0;32m 297\u001b[0m \u001b[38;5;66;03m# The user should define either backward or vjp but never both.\u001b[39;00m\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "\n", + "# Define the simple LLM model with LSTM\n", + "class SimpleLLM(nn.Module):\n", + " def __init__(self, vocab_size, embed_size, hidden_size):\n", + " super(SimpleLLM, self).__init__()\n", + " self.embedding = nn.Embedding(vocab_size, embed_size)\n", + " self.lstm = nn.LSTM(embed_size, hidden_size, batch_first=True)\n", + " self.fc = nn.Linear(hidden_size, vocab_size)\n", + " \n", + " def forward(self, x):\n", + " x = self.embedding(x)\n", + " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", + " out = self.fc(lstm_out)\n", + " return out\n", + "\n", + "# Define a function to capture activations\n", + "def capture_activation(name):\n", + " def hook(model, input, output):\n", + " activations.append((name, output)) # Capture activation\n", + " return hook\n", + "\n", + "# Define a function to capture gradients\n", + "def capture_gradient(name):\n", + " def hook(module, grad_input, grad_output):\n", + " gradients.append((name, grad_output)) # Capture gradients\n", + " return hook\n", + "\n", + "# Initialize model, loss function, and optimizer\n", + "vocab_size = 100000 # Just an example vocab size\n", + "embed_size = 1000 # Embedding size\n", + "hidden_size = 256 # Hidden size for the LSTM\n", + "model = SimpleLLM(vocab_size, embed_size, hidden_size)\n", + "optimizer = optim.Adam(model.parameters(), lr=0.001)\n", + "criterion = nn.CrossEntropyLoss()\n", + "\n", + "# Container for capturing activations and gradients\n", + "activations = []\n", + "gradients = []\n", + "\n", + "# Register hooks for LSTM and final Linear layer\n", + "model.lstm.register_forward_hook(capture_activation('lstm'))\n", + "model.fc.register_forward_hook(capture_activation('fc'))\n", + "\n", + "model.lstm.register_full_backward_hook(capture_gradient('lstm'))\n", + "model.fc.register_full_backward_hook(capture_gradient('fc'))\n", + "\n", + "# Dummy input data (example with batch_size=3 and sequence_length=5)\n", + "input_data = torch.randint(0, vocab_size, (5, 10)) # Random word indices\n", + "target_data = torch.randint(0, vocab_size, (5, 10)) # Random target labels\n", + "\n", + "# Training loop\n", + "num_epochs = 400\n", + "for epoch in range(num_epochs):\n", + " optimizer.zero_grad()\n", + "\n", + " # Forward pass\n", + " output = model(input_data)\n", + " \n", + " # Reshape output and target to match the shape of CrossEntropyLoss expectations\n", + " output = output.view(-1, vocab_size) # Flatten the output\n", + " target_data = target_data.view(-1) # Flatten the target data\n", + " \n", + " # Calculate loss\n", + " loss = criterion(output, target_data)\n", + " \n", + " # Backward pass\n", + " loss.backward()\n", + "\n", + " # Optimizer step\n", + " optimizer.step()\n", + "\n", + " # Output loss for this epoch\n", + " # if epoch % 50 == 0:\n", + " print(f'Epoch {epoch+1}/{num_epochs}, Loss: {loss.item():.4f}')\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Activation for lstm layer: (tensor([[[-1.1885e-01, 7.4334e-02, -1.1457e-01, ..., 6.1157e-02,\n", + " 3.4403e-02, 1.4354e-01],\n", + " [ 2.1115e-02, -9.9912e-02, -1.5829e-01, ..., 6.8039e-02,\n", + " 5.0781e-02, 1.2131e-01],\n", + " [-5.1509e-02, -2.8223e-02, -1.2977e-01, ..., 5.5352e-02,\n", + " 1.3167e-01, -9.2887e-02],\n", + " [ 1.4676e-01, -5.5235e-02, -2.3041e-01, ..., 1.3869e-01,\n", + " 6.1838e-02, -1.1159e-01],\n", + " [ 9.8354e-03, 5.3392e-02, -3.0002e-02, ..., 1.5939e-01,\n", + " 4.8001e-02, -1.6666e-01]],\n", + "\n", + " [[-6.9048e-03, -1.4166e-01, 1.8845e-03, ..., 1.3928e-01,\n", + " -2.7910e-01, 1.3777e-01],\n", + " [-7.3346e-02, -1.0498e-01, 3.9164e-02, ..., 1.7106e-01,\n", + " -8.2462e-02, -5.4011e-02],\n", + " [ 1.3681e-01, -5.4486e-02, -7.6502e-02, ..., 1.2276e-01,\n", + " 5.4242e-03, -1.5655e-01],\n", + " [ 1.3117e-01, -1.4139e-01, -2.0100e-01, ..., 1.5732e-01,\n", + " -1.0666e-01, -8.7156e-02],\n", + " [ 9.4591e-02, -5.7793e-02, -1.3404e-01, ..., 9.6900e-02,\n", + " 4.9515e-03, -7.9186e-03]],\n", + "\n", + " [[-9.0859e-02, 5.5639e-03, -1.4513e-01, ..., -7.1660e-02,\n", + " 1.8301e-01, 7.4892e-03],\n", + " [ 9.2305e-02, 1.1849e-01, -1.5432e-01, ..., -3.9754e-02,\n", + " 2.1528e-01, -6.6336e-02],\n", + " [-4.3912e-02, 7.2306e-03, -1.6964e-01, ..., 3.7397e-02,\n", + " 1.3741e-01, -7.8224e-03],\n", + " [-2.3771e-03, -1.7121e-01, -1.9225e-01, ..., 2.1824e-02,\n", + " 3.4948e-02, -1.7641e-04],\n", + " [ 1.3603e-01, -6.7352e-02, -2.7966e-01, ..., 1.0816e-01,\n", + " -9.4453e-02, 1.8542e-01]]], grad_fn=), (tensor([[[ 9.8354e-03, 5.3392e-02, -3.0002e-02, 7.2875e-03, -1.4585e-01,\n", + " 1.2482e-02, 6.9810e-02, -9.4523e-02, 8.2014e-02, 1.4978e-02,\n", + " -2.2127e-01, -8.6931e-02, -1.7447e-02, -9.0253e-02, -5.8715e-02,\n", + " -9.7126e-02, -1.4640e-02, 6.9750e-02, -4.0392e-02, -6.4946e-03,\n", + " 6.3030e-02, -2.8948e-03, 2.3277e-02, 2.1406e-01, -7.2021e-02,\n", + " -8.5262e-02, 2.1904e-01, 5.9262e-02, 8.1430e-02, -5.0410e-02,\n", + " 4.7898e-02, -1.7856e-02, -9.6553e-02, 1.2176e-01, 1.3534e-01,\n", + " 9.9480e-02, -6.4945e-02, -8.7489e-02, 1.9893e-01, -1.1475e-02,\n", + " -1.1274e-01, 1.4311e-01, 1.2569e-01, 9.3439e-03, 7.4138e-02,\n", + " -4.6937e-02, 1.2031e-01, 3.7596e-02, -1.1246e-01, -1.1382e-01,\n", + " -4.6571e-02, -6.7600e-02, -1.0755e-01, 1.3048e-01, -1.8727e-01,\n", + " -1.5687e-01, 1.3892e-01, -2.1552e-01, -5.3767e-02, 1.5782e-02,\n", + " 1.7370e-01, 3.1182e-02, 1.0335e-01, 2.5303e-02, -1.8224e-02,\n", + " -6.4270e-03, -1.8351e-02, -7.7923e-02, 7.3565e-03, 1.0760e-01,\n", + " 2.1875e-02, -4.9347e-02, 7.0013e-02, -4.0485e-02, 8.5161e-03,\n", + " -1.6069e-01, -7.3729e-02, -1.1683e-01, 1.1353e-01, 9.2289e-02,\n", + " -2.1123e-01, -2.1108e-02, -3.3057e-02, 9.3739e-02, -7.4621e-02,\n", + " -3.0965e-02, 3.7901e-02, 9.5562e-02, 5.1191e-02, -8.1342e-02,\n", + " 1.9611e-02, -1.1581e-01, -2.1830e-01, 1.9827e-02, -3.9519e-02,\n", + " 1.1952e-01, 6.1484e-02, 1.3787e-01, 4.1152e-02, -6.1684e-02,\n", + " -5.6594e-02, -3.1420e-02, -2.2474e-02, -6.5657e-02, 1.4875e-01,\n", + " -9.1052e-02, 7.7182e-02, 4.8757e-02, 1.4903e-01, -1.3527e-03,\n", + " 2.7246e-01, 1.2794e-01, -1.7041e-01, -7.6142e-02, 1.3974e-01,\n", + " 2.4618e-02, -6.9025e-02, -9.4079e-02, -1.4909e-03, 5.9612e-02,\n", + " -4.4725e-03, -6.7179e-02, -6.1797e-02, 1.6559e-01, -8.0163e-02,\n", + " -2.2909e-01, 6.6350e-02, 1.1361e-02, 8.4108e-02, -1.1556e-01,\n", + " 6.1252e-02, 8.3287e-02, -8.7894e-02, -1.0695e-01, -5.3925e-02,\n", + " 7.1153e-02, 6.9016e-02, -1.4300e-02, -5.9164e-02, 3.7659e-02,\n", + " 1.0063e-01, 1.0336e-01, -1.9477e-02, 3.5742e-02, -3.3210e-02,\n", + " -1.6847e-03, -9.3591e-02, 1.4821e-01, 8.3384e-02, 8.9724e-02,\n", + " -1.2488e-01, -1.8335e-01, -1.4115e-01, 1.3415e-01, 1.4753e-01,\n", + " 4.4653e-02, 1.9245e-02, 3.8093e-02, 1.0158e-01, -4.3646e-02,\n", + " -7.2102e-02, 1.9797e-02, 3.9576e-02, 2.4157e-02, -1.0626e-01,\n", + " -2.5799e-01, 3.5465e-02, -1.9632e-01, 1.2119e-01, 4.7838e-02,\n", + " 1.0147e-02, -3.0875e-03, -4.0234e-03, 3.6550e-02, 2.3824e-02,\n", + " -4.8517e-02, -8.0195e-02, 3.5668e-02, -3.0944e-02, 9.3462e-02,\n", + " 1.5813e-01, -6.5189e-02, 7.8902e-03, 1.2792e-01, 1.2325e-01,\n", + " -7.1469e-02, -6.0525e-03, 4.5219e-02, 2.8174e-02, 2.1492e-01,\n", + " -5.7059e-03, -5.3889e-03, -1.9786e-01, -2.9867e-02, -1.3545e-01,\n", + " -6.7783e-02, -9.1990e-02, 1.7466e-02, -8.8718e-02, 3.5585e-02,\n", + " -4.7237e-02, 4.2510e-02, 1.7164e-01, 6.5421e-02, 7.3680e-02,\n", + " -4.8147e-02, -2.3811e-02, -1.4102e-01, -1.7029e-02, 1.5386e-01,\n", + " -1.0342e-01, 2.5038e-03, 1.0144e-01, 1.2401e-02, 5.6638e-03,\n", + " 2.2166e-02, 1.2892e-02, -2.7320e-02, 6.6498e-02, -7.0570e-02,\n", + " -1.1974e-02, 1.3094e-01, -1.1246e-01, 1.0381e-01, -7.8150e-02,\n", + " -3.4429e-02, 1.4121e-01, -1.1700e-01, -1.3806e-01, 8.8438e-02,\n", + " -7.9613e-02, 7.0094e-02, -6.7182e-02, 3.2045e-02, -2.9587e-02,\n", + " 1.8854e-01, -2.0000e-02, -1.3733e-01, -1.7716e-01, 1.5292e-02,\n", + " -4.1364e-03, 2.0001e-02, 1.2971e-01, 1.7419e-01, 5.9393e-02,\n", + " 3.4792e-02, -2.2356e-01, 7.1222e-02, -6.3988e-02, -3.1165e-02,\n", + " -5.1605e-02, -3.3989e-02, 1.2333e-02, 1.5939e-01, 4.8001e-02,\n", + " -1.6666e-01],\n", + " [ 9.4591e-02, -5.7793e-02, -1.3404e-01, 2.1016e-01, -1.3421e-01,\n", + " -1.1376e-01, 1.1109e-01, 1.0350e-01, -2.2410e-02, -4.0062e-02,\n", + " 3.2718e-02, -6.8034e-02, 5.5625e-02, 1.5184e-02, -9.1550e-04,\n", + " -6.3774e-02, -6.9557e-02, 1.0805e-01, 4.2219e-02, -6.3613e-02,\n", + " -9.4216e-02, 6.2111e-02, -5.3061e-02, -1.5292e-01, 1.2790e-01,\n", + " -7.5749e-02, 1.4717e-01, 6.3722e-02, 8.9517e-02, 6.4354e-02,\n", + " 2.1560e-01, 2.4706e-02, -5.5648e-02, -3.0421e-02, 3.9275e-02,\n", + " -8.8307e-02, 1.8278e-01, 6.1446e-02, 2.0430e-02, -1.7117e-02,\n", + " 4.9665e-02, -3.5847e-02, 3.8241e-02, -1.0973e-02, -2.3271e-02,\n", + " 7.5955e-02, 8.3174e-02, 6.0993e-02, -3.5498e-02, 1.1457e-01,\n", + " -1.0923e-02, 1.8132e-03, 1.1598e-01, -1.1598e-01, 6.3417e-02,\n", + " -4.0052e-02, 4.1007e-02, -9.3542e-02, -1.3564e-02, 6.6304e-02,\n", + " 8.8295e-02, 1.3509e-02, 4.5857e-02, 1.8167e-02, 3.7219e-02,\n", + " 5.2673e-02, -7.6027e-02, 1.7417e-02, -3.7517e-02, -7.5377e-02,\n", + " -4.5400e-02, -8.2015e-02, -1.0347e-01, -8.2943e-02, -5.7339e-02,\n", + " -1.3186e-02, 6.2147e-02, 7.8584e-02, -4.2842e-02, -6.7754e-02,\n", + " -9.0749e-02, -4.3508e-03, 1.3494e-01, -6.2708e-02, -1.9534e-01,\n", + " -9.5295e-02, -8.6889e-04, -3.6767e-02, -4.7225e-02, 1.1969e-01,\n", + " 4.6655e-02, -1.0073e-01, -5.1285e-02, -1.3837e-01, -4.0509e-03,\n", + " 1.9807e-02, -2.2002e-02, -2.2138e-01, 4.1285e-02, -9.4463e-04,\n", + " -5.3333e-02, 1.2217e-01, 4.6612e-02, 7.2419e-02, -5.5067e-02,\n", + " -8.7105e-02, 1.6203e-01, 8.3020e-02, -1.0497e-01, -1.2814e-01,\n", + " -1.8258e-01, 2.3605e-01, 4.6091e-02, 2.8127e-02, 8.7480e-02,\n", + " -3.3150e-02, -6.3353e-02, -7.9963e-02, 5.8394e-02, 5.6279e-02,\n", + " 6.6731e-02, 3.2917e-02, -1.3350e-02, -2.9612e-02, 9.0927e-02,\n", + " 6.2480e-02, -6.3447e-02, 6.6206e-02, 1.2004e-01, 1.4747e-02,\n", + " -1.6508e-02, -1.0005e-02, 4.1171e-02, -3.0836e-02, -9.5519e-02,\n", + " 2.0150e-03, 1.6366e-02, -3.0951e-02, -1.1864e-01, 2.7799e-02,\n", + " -1.2762e-01, -1.6378e-02, 5.5286e-02, -1.1001e-01, -9.1958e-02,\n", + " -1.1602e-01, -3.0696e-02, 2.6005e-02, -9.5703e-02, -1.1909e-01,\n", + " 4.2999e-02, 1.0845e-01, 6.9688e-04, -9.4792e-02, -1.8722e-01,\n", + " -1.7324e-01, 1.1747e-02, 1.2535e-01, -4.6194e-02, -2.0607e-02,\n", + " -6.7600e-03, -3.8273e-02, 1.2215e-01, -3.3380e-02, 8.3594e-02,\n", + " 1.0551e-01, 6.5389e-02, -5.0340e-02, -2.1293e-02, -8.7716e-02,\n", + " -1.0828e-01, -7.8946e-02, -1.2999e-01, 3.0540e-02, 1.7191e-01,\n", + " -5.3674e-02, 9.6773e-02, -4.7233e-02, 2.1101e-02, 1.3149e-01,\n", + " 2.7261e-04, 1.2085e-01, 1.6334e-01, 2.4205e-01, -2.2790e-02,\n", + " 1.0447e-01, 3.7401e-02, -1.5380e-01, -7.3915e-02, -2.6638e-02,\n", + " -8.2007e-02, -1.1565e-01, -1.3212e-01, 6.2574e-02, 7.9549e-02,\n", + " -6.6135e-02, -4.2917e-02, -5.5928e-02, 6.5440e-02, 1.7348e-01,\n", + " -3.5761e-02, -9.4648e-02, 6.3346e-02, -1.1129e-01, -1.1930e-01,\n", + " 1.4410e-02, -2.4488e-01, -5.3787e-03, 8.4259e-02, -1.6251e-02,\n", + " -5.1348e-02, 4.9270e-02, 4.5308e-02, 1.1668e-01, 3.0874e-02,\n", + " -1.0059e-01, -4.5658e-02, -6.3077e-02, -2.6599e-01, 7.9999e-02,\n", + " -2.5070e-02, 6.0473e-02, 1.7124e-01, 4.5684e-02, 1.5989e-02,\n", + " 2.1243e-02, -2.3085e-03, 4.8934e-02, -1.1428e-01, -5.5781e-02,\n", + " 8.1536e-02, 3.2826e-02, -1.3920e-02, 1.1348e-01, -1.1324e-01,\n", + " -6.2405e-02, 1.4879e-01, 5.6304e-02, 4.2413e-03, -8.6732e-02,\n", + " -3.8706e-02, 9.2358e-02, -1.8792e-01, 2.1576e-01, -1.2017e-02,\n", + " 1.5149e-02, -6.6635e-02, 9.9260e-03, -4.9350e-02, -1.3249e-01,\n", + " -6.0241e-02, 3.2335e-02, 5.2322e-02, 9.6900e-02, 4.9515e-03,\n", + " -7.9186e-03],\n", + " [ 1.3603e-01, -6.7352e-02, -2.7966e-01, 8.2148e-02, -6.7611e-02,\n", + " 2.0793e-02, 1.0225e-01, 1.1764e-01, -1.0157e-01, 8.2719e-02,\n", + " 3.6458e-02, 3.6482e-02, -2.3889e-02, 1.7489e-01, -1.3941e-01,\n", + " -2.7739e-02, 9.8823e-02, -8.0219e-02, -4.7139e-02, 4.4864e-02,\n", + " -8.0784e-02, -6.8581e-02, -2.5900e-02, -1.5247e-02, -2.9433e-01,\n", + " 6.6999e-02, 2.7787e-02, -3.4008e-02, -3.2132e-02, 1.6129e-01,\n", + " 3.3099e-02, -2.4889e-01, -1.4151e-01, 3.4665e-02, -7.2734e-02,\n", + " 1.2508e-01, -9.8449e-02, -2.1377e-02, -4.7112e-02, 7.7784e-02,\n", + " -1.4526e-01, -8.2125e-02, -5.7050e-02, -1.5080e-02, 7.6630e-02,\n", + " 1.9037e-02, 5.2296e-02, 8.2584e-02, -1.5881e-02, 1.7238e-01,\n", + " 8.0146e-02, 2.8528e-02, -7.5524e-02, -1.8611e-01, 2.9266e-02,\n", + " 1.4401e-01, -8.4837e-02, 8.3491e-02, 2.0179e-01, -2.6567e-02,\n", + " 1.1892e-02, 4.7864e-02, -8.6363e-03, -1.4446e-02, -5.8189e-02,\n", + " 1.9961e-02, -9.5751e-02, 1.3279e-02, -1.3346e-01, -3.3887e-02,\n", + " 1.0766e-01, -2.7566e-02, 6.8022e-02, 1.0318e-01, -5.9106e-02,\n", + " -1.1512e-01, 4.2301e-03, -5.5006e-02, 2.4342e-02, 9.3417e-02,\n", + " 9.8848e-02, 6.6830e-02, 1.0980e-01, -1.5114e-02, -3.3165e-02,\n", + " -1.3855e-01, 1.2801e-01, -9.2980e-02, 1.4366e-01, -3.9685e-03,\n", + " -7.7200e-02, -5.2525e-03, -6.4132e-02, 3.9983e-02, -7.9012e-02,\n", + " 7.6421e-02, 1.2579e-01, -2.4095e-02, -1.2270e-01, 4.7820e-02,\n", + " 3.9278e-02, -5.5515e-02, -3.4473e-02, 6.2264e-02, 2.9556e-02,\n", + " -2.1679e-02, 3.1932e-02, -3.8919e-03, -6.7579e-03, -4.6533e-02,\n", + " 5.4988e-03, 3.4696e-02, 6.4688e-02, -1.8436e-01, 1.9484e-02,\n", + " 1.0903e-01, -8.0302e-02, 1.1229e-01, 8.2936e-02, -5.2438e-02,\n", + " 1.0991e-01, 4.1805e-02, -5.1728e-02, 4.4836e-02, 6.1983e-02,\n", + " 1.2062e-01, -1.2726e-01, 5.5043e-02, 7.5568e-02, 6.8702e-02,\n", + " -3.1076e-03, 1.3862e-01, 1.7626e-01, 1.3946e-01, 2.7047e-02,\n", + " 2.7864e-02, -8.0505e-02, 4.4494e-02, -1.8062e-02, -1.3545e-01,\n", + " -3.7318e-02, -3.7964e-02, -1.7789e-01, 6.8049e-02, 1.0841e-01,\n", + " 2.4024e-02, -1.2838e-01, -3.2943e-02, -1.6632e-01, -5.1168e-02,\n", + " -3.9820e-02, -6.8890e-02, 2.4528e-02, 1.1240e-02, -1.1426e-01,\n", + " -2.8044e-02, -6.5270e-02, -5.7087e-02, 2.0904e-02, -1.6673e-01,\n", + " -8.1839e-02, -2.4754e-01, -1.9653e-01, -8.4816e-02, 3.3636e-02,\n", + " -4.3611e-02, -8.8370e-02, 4.0351e-02, 4.6051e-02, 4.9134e-02,\n", + " 1.6001e-01, -5.1358e-02, 1.5380e-03, 4.8425e-02, 7.1472e-02,\n", + " -1.4286e-01, -8.8936e-02, 1.9823e-01, 5.7555e-02, 2.0965e-01,\n", + " 7.9889e-02, -2.5934e-01, -1.3810e-01, 5.4740e-02, -6.8525e-02,\n", + " 1.0861e-01, 1.6838e-01, -2.0420e-02, -3.2166e-02, -1.4300e-01,\n", + " -1.1291e-02, -1.6591e-01, 2.6391e-03, 4.3837e-02, -1.9612e-01,\n", + " -8.1949e-02, -7.7811e-02, -2.9831e-03, -1.4750e-02, -2.7218e-02,\n", + " 5.8562e-02, 5.9297e-02, -3.0173e-02, -5.2490e-02, -6.5504e-02,\n", + " -5.5096e-02, -2.1278e-01, 3.4448e-02, 2.3712e-02, 1.4995e-01,\n", + " -5.2304e-02, 2.0103e-01, 8.4592e-02, -1.9333e-01, 7.4601e-03,\n", + " -1.1167e-01, 1.2646e-01, -2.7272e-02, 4.2437e-02, 4.4445e-02,\n", + " -1.1936e-01, -9.4888e-02, 1.3257e-01, 4.2319e-02, -6.4245e-02,\n", + " -2.3440e-01, 6.3898e-02, -7.6036e-03, -4.6509e-02, 1.3855e-01,\n", + " 1.5516e-02, 1.4544e-03, 4.3514e-02, 1.5257e-01, 3.6230e-02,\n", + " 7.0221e-02, 1.7503e-01, -1.7676e-02, 7.3860e-02, -7.0607e-02,\n", + " -4.9217e-02, -1.3995e-01, -5.4702e-02, 9.6630e-02, 2.8624e-02,\n", + " -7.4663e-02, -6.4043e-02, -4.7874e-02, -1.3468e-01, 1.5756e-02,\n", + " -2.7573e-02, 4.0813e-02, 7.2164e-03, 1.0816e-01, -9.4453e-02,\n", + " 1.8542e-01]]], grad_fn=), tensor([[[ 0.0215, 0.1231, -0.0638, 0.0187, -0.2644, 0.0308, 0.1278,\n", + " -0.1923, 0.1761, 0.0313, -0.3887, -0.2762, -0.0331, -0.2157,\n", + " -0.1169, -0.1993, -0.0309, 0.1140, -0.0784, -0.0100, 0.1152,\n", + " -0.0053, 0.0586, 0.3053, -0.1443, -0.1933, 0.4138, 0.1320,\n", + " 0.1944, -0.1169, 0.0919, -0.0294, -0.2135, 0.1794, 0.3195,\n", + " 0.2934, -0.1493, -0.1875, 0.4139, -0.0284, -0.2233, 0.2729,\n", + " 0.2229, 0.0134, 0.1441, -0.1220, 0.2427, 0.0599, -0.2447,\n", + " -0.2056, -0.0745, -0.1361, -0.2066, 0.2128, -0.3376, -0.3455,\n", + " 0.2020, -0.3326, -0.1131, 0.0389, 0.3143, 0.0481, 0.1865,\n", + " 0.0493, -0.0448, -0.0114, -0.0360, -0.1558, 0.0158, 0.2346,\n", + " 0.0448, -0.0875, 0.2106, -0.0802, 0.0163, -0.2822, -0.2053,\n", + " -0.3022, 0.2704, 0.1748, -0.4137, -0.0462, -0.0552, 0.2426,\n", + " -0.2100, -0.0747, 0.0908, 0.2231, 0.1041, -0.2129, 0.0481,\n", + " -0.2659, -0.5129, 0.0534, -0.0939, 0.2869, 0.1163, 0.2029,\n", + " 0.0973, -0.1039, -0.1211, -0.0669, -0.0492, -0.1171, 0.2898,\n", + " -0.1569, 0.1462, 0.0930, 0.2558, -0.0028, 0.4722, 0.3414,\n", + " -0.4779, -0.1352, 0.3388, 0.0520, -0.1576, -0.1733, -0.0036,\n", + " 0.1175, -0.0074, -0.1209, -0.1110, 0.3066, -0.1677, -0.4005,\n", + " 0.1238, 0.0240, 0.1342, -0.1947, 0.1602, 0.1644, -0.1760,\n", + " -0.1688, -0.1262, 0.1310, 0.1100, -0.0297, -0.1264, 0.0736,\n", + " 0.2040, 0.2224, -0.0404, 0.0804, -0.0596, -0.0038, -0.1632,\n", + " 0.2849, 0.2944, 0.1974, -0.1983, -0.3721, -0.2297, 0.2601,\n", + " 0.3040, 0.0816, 0.0473, 0.0605, 0.1459, -0.0935, -0.1308,\n", + " 0.0334, 0.0774, 0.0410, -0.1992, -0.4726, 0.0937, -0.3922,\n", + " 0.2706, 0.1369, 0.0331, -0.0064, -0.0064, 0.0590, 0.0459,\n", + " -0.1092, -0.1592, 0.0676, -0.0688, 0.3036, 0.2847, -0.1742,\n", + " 0.0185, 0.2786, 0.3010, -0.1789, -0.0100, 0.0860, 0.0644,\n", + " 0.4199, -0.0131, -0.0131, -0.3550, -0.0619, -0.2744, -0.1237,\n", + " -0.2058, 0.0355, -0.1769, 0.0767, -0.0722, 0.0700, 0.3125,\n", + " 0.1528, 0.1752, -0.0903, -0.0490, -0.2326, -0.0331, 0.2633,\n", + " -0.1920, 0.0042, 0.2633, 0.0300, 0.0111, 0.0524, 0.0232,\n", + " -0.0449, 0.1392, -0.1336, -0.0275, 0.2044, -0.2220, 0.2375,\n", + " -0.1323, -0.0737, 0.2924, -0.1898, -0.2143, 0.2357, -0.1630,\n", + " 0.1608, -0.2075, 0.0772, -0.0597, 0.3471, -0.0414, -0.3160,\n", + " -0.3764, 0.0352, -0.0072, 0.0393, 0.2600, 0.3120, 0.1206,\n", + " 0.0652, -0.5279, 0.1653, -0.1217, -0.0762, -0.0888, -0.1079,\n", + " 0.0247, 0.2520, 0.0952, -0.3274],\n", + " [ 0.1748, -0.1059, -0.3198, 0.5186, -0.2371, -0.2178, 0.2349,\n", + " 0.1604, -0.0453, -0.0654, 0.0616, -0.1189, 0.1377, 0.0229,\n", + " -0.0019, -0.2172, -0.1109, 0.3290, 0.0851, -0.1593, -0.1757,\n", + " 0.1057, -0.1162, -0.4523, 0.2734, -0.2212, 0.2794, 0.1664,\n", + " 0.1513, 0.1416, 0.3451, 0.0537, -0.1281, -0.0565, 0.0674,\n", + " -0.1500, 0.3023, 0.1289, 0.0410, -0.0305, 0.0991, -0.0734,\n", + " 0.0843, -0.0225, -0.0398, 0.1359, 0.1802, 0.1637, -0.0770,\n", + " 0.2109, -0.0221, 0.0033, 0.2165, -0.2616, 0.1870, -0.0861,\n", + " 0.0942, -0.1472, -0.0238, 0.1016, 0.2201, 0.0272, 0.0872,\n", + " 0.0414, 0.0776, 0.1133, -0.1591, 0.0262, -0.1012, -0.1846,\n", + " -0.1027, -0.1475, -0.1609, -0.1686, -0.1316, -0.0310, 0.1181,\n", + " 0.2061, -0.0945, -0.1049, -0.2091, -0.0091, 0.2936, -0.1592,\n", + " -0.3545, -0.2662, -0.0014, -0.0807, -0.1385, 0.2148, 0.1063,\n", + " -0.2819, -0.1010, -0.2188, -0.0084, 0.0397, -0.0581, -0.4686,\n", + " 0.0754, -0.0024, -0.1160, 0.2208, 0.1012, 0.1132, -0.1191,\n", + " -0.2292, 0.3214, 0.1448, -0.2169, -0.2951, -0.3646, 0.3798,\n", + " 0.1070, 0.0776, 0.1584, -0.0685, -0.1480, -0.1374, 0.1294,\n", + " 0.1036, 0.1122, 0.0730, -0.0442, -0.0602, 0.1840, 0.1364,\n", + " -0.1272, 0.1230, 0.3239, 0.0268, -0.0417, -0.0179, 0.0947,\n", + " -0.0559, -0.1942, 0.0035, 0.0318, -0.0507, -0.2642, 0.0656,\n", + " -0.2451, -0.0351, 0.1097, -0.2061, -0.2173, -0.3201, -0.0702,\n", + " 0.0417, -0.2167, -0.2739, 0.0675, 0.1890, 0.0012, -0.2239,\n", + " -0.3711, -0.3940, 0.0225, 0.2390, -0.1319, -0.0488, -0.0137,\n", + " -0.1089, 0.2475, -0.0643, 0.1355, 0.2235, 0.1510, -0.0943,\n", + " -0.0554, -0.1638, -0.2327, -0.2027, -0.2545, 0.0799, 0.2791,\n", + " -0.0913, 0.1932, -0.1046, 0.0521, 0.2579, 0.0009, 0.2206,\n", + " 0.3736, 0.3736, -0.0442, 0.1595, 0.0547, -0.2838, -0.1590,\n", + " -0.0483, -0.2983, -0.2004, -0.3418, 0.1489, 0.1367, -0.1541,\n", + " -0.0908, -0.1317, 0.1192, 0.4313, -0.0851, -0.1807, 0.1204,\n", + " -0.1759, -0.2699, 0.0323, -0.4391, -0.0156, 0.1651, -0.0330,\n", + " -0.0990, 0.1090, 0.1267, 0.2578, 0.0501, -0.1733, -0.1170,\n", + " -0.1403, -0.4787, 0.1272, -0.0632, 0.0988, 0.3264, 0.1055,\n", + " 0.0343, 0.0405, -0.0056, 0.1102, -0.2660, -0.1654, 0.1530,\n", + " 0.0636, -0.0223, 0.1924, -0.2318, -0.1286, 0.2206, 0.1257,\n", + " 0.0081, -0.2056, -0.0625, 0.2139, -0.3882, 0.6290, -0.0291,\n", + " 0.0282, -0.1494, 0.0187, -0.0855, -0.2410, -0.1237, 0.0555,\n", + " 0.1439, 0.1793, 0.0084, -0.0185],\n", + " [ 0.3369, -0.1479, -0.5276, 0.1676, -0.1363, 0.0398, 0.1985,\n", + " 0.2396, -0.1705, 0.1846, 0.0800, 0.0618, -0.0620, 0.2784,\n", + " -0.2426, -0.0458, 0.1770, -0.1390, -0.0845, 0.1183, -0.1349,\n", + " -0.1174, -0.0712, -0.0334, -0.4726, 0.1418, 0.0468, -0.0561,\n", + " -0.0607, 0.2376, 0.0528, -0.5115, -0.2481, 0.1050, -0.1376,\n", + " 0.2348, -0.1902, -0.0503, -0.1183, 0.2031, -0.2646, -0.1380,\n", + " -0.1213, -0.0263, 0.1222, 0.0415, 0.1292, 0.1448, -0.0367,\n", + " 0.3030, 0.1181, 0.0593, -0.1332, -0.4023, 0.0568, 0.2586,\n", + " -0.1512, 0.1804, 0.4000, -0.0546, 0.0219, 0.0816, -0.0157,\n", + " -0.0265, -0.1322, 0.0508, -0.2013, 0.0325, -0.2295, -0.0557,\n", + " 0.1835, -0.0470, 0.1761, 0.2084, -0.0915, -0.3008, 0.0123,\n", + " -0.1079, 0.0595, 0.1632, 0.1830, 0.1273, 0.1986, -0.0415,\n", + " -0.0714, -0.3162, 0.2042, -0.1503, 0.2874, -0.0081, -0.1331,\n", + " -0.0118, -0.1135, 0.0777, -0.1381, 0.1587, 0.2140, -0.0439,\n", + " -0.3028, 0.1019, 0.0596, -0.1013, -0.0633, 0.1042, 0.0591,\n", + " -0.0395, 0.0779, -0.0082, -0.0133, -0.0734, 0.0098, 0.0712,\n", + " 0.1292, -0.3829, 0.0417, 0.2043, -0.1393, 0.2560, 0.1654,\n", + " -0.0879, 0.2017, 0.0713, -0.0830, 0.1072, 0.1062, 0.2560,\n", + " -0.4510, 0.1106, 0.1474, 0.1427, -0.0086, 0.3868, 0.3241,\n", + " 0.4367, 0.0656, 0.0621, -0.1387, 0.1025, -0.0375, -0.2811,\n", + " -0.0774, -0.0591, -0.3121, 0.1357, 0.3206, 0.0585, -0.2302,\n", + " -0.0596, -0.2986, -0.0889, -0.0869, -0.1366, 0.0568, 0.0286,\n", + " -0.1777, -0.0427, -0.1456, -0.1354, 0.0482, -0.3316, -0.1904,\n", + " -0.4873, -0.5369, -0.1222, 0.0694, -0.0939, -0.1674, 0.0686,\n", + " 0.0854, 0.1132, 0.2842, -0.1667, 0.0033, 0.0907, 0.1604,\n", + " -0.2291, -0.2927, 0.3881, 0.0979, 0.4785, 0.1536, -0.4774,\n", + " -0.2153, 0.1427, -0.1574, 0.1796, 0.3557, -0.0449, -0.0526,\n", + " -0.3581, -0.0234, -0.3326, 0.0049, 0.0933, -0.3071, -0.1452,\n", + " -0.2091, -0.0060, -0.0311, -0.0465, 0.1057, 0.1421, -0.0608,\n", + " -0.1065, -0.0965, -0.1038, -0.3565, 0.0803, 0.0465, 0.2869,\n", + " -0.0856, 0.3659, 0.1754, -0.3457, 0.0169, -0.2568, 0.2813,\n", + " -0.0543, 0.0783, 0.1306, -0.2800, -0.1639, 0.2811, 0.0763,\n", + " -0.1103, -0.4329, 0.1929, -0.0220, -0.0827, 0.2655, 0.0324,\n", + " 0.0027, 0.0717, 0.2442, 0.0713, 0.1201, 0.2816, -0.0332,\n", + " 0.2074, -0.1304, -0.0855, -0.3259, -0.0834, 0.3056, 0.0815,\n", + " -0.1666, -0.1157, -0.0828, -0.2060, 0.0282, -0.0599, 0.0598,\n", + " 0.0145, 0.2333, -0.1539, 0.4261]]], grad_fn=)))\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjsAAAHHCAYAAABZbpmkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABHQElEQVR4nO3deZzNdf//8eeZGTNjmcVoFrJNsqay1URkm4zIZSsMyjKlhSK6KpW1xZJQruSqixlcZClEC7lEikmIaSFbE8rMUJixZIyZ9+8PP+frmMXMmTMzx8fjfrud263z2d6v83Y4z96f9+fzsRljjAAAACzKo6QLAAAAKEqEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHaAI9O/fX9WrVy+RtseOHSubzVYibRfUb7/9JpvNpri4uCJvKy4uTjabTb/99pt9WfXq1XX//fcXeduStGHDBtlsNm3YsKFY2rvSG2+8oZtuukmenp5q0KBBkbZVkt9/ICeEHVyXZs6cKZvNpoiICKePceTIEY0dO1Y7d+50XWH5dPbsWY0dO7bEfjhzY7PZ7C8vLy8FBQWpcePGGjp0qHbt2uWydmbOnFksAckZ7ljbF198oeeee0533323YmNj9frrr5d0Sdns2rVLY8eOdQijgMsY4DrUrFkzU716dSPJ7Nu3z6ljbN261UgysbGx2dadP3/enDt3rpBV5u7YsWNGkhkzZky2dRkZGebvv/8usrbzIsnce++9Zv78+WbevHlmxowZ5pFHHjEBAQHGy8vLvPnmmw7bZ2Vlmb///ttcuHChQO3ccsstpmXLlgXa58KFC+bvv/82WVlZ9mXVqlUzHTt2LNBxnK0tMzPT/P333yYzM9Ol7eXH888/bzw8PEx6enqxtNevXz9TrVq1Au2zdOlSI8msX7++SGrC9Y2RHVx3EhMTtXnzZk2dOlXBwcFasGCBy9soVaqUfHx8XH7c/PDy8pKvr2+JtC1JtWrVUt++ffXQQw9pyJAhev/993XgwAHdcccdGjFihD777DP7tjabTb6+vvL09Cyyes6cOSNJ8vT0lK+vb4md4vPw8JCvr688PIr/n92jR4+qdOnS8vb2dsnxjDH6+++/XXIsoFiUdNoCitsrr7xiypcvb9LT080TTzxhatasmeN2J06cMMOGDTPVqlUz3t7e5sYbbzQPPfSQOXbsmFm/fr2RlO11aZTn8v+zPX/+vClfvrzp379/tjZSU1ONj4+PGTFihDHGmPT0dDNq1CjTqFEj4+/vb8qUKWOaN29uvvzyS/s+iYmJObZ9aZRnzJgx5sq/2hkZGWb8+PHmpptuMt7e3qZatWpm5MiR2UafLo10fP311+aOO+4wPj4+Jjw83MydOzdffSvJDB48OMd1Bw8eNF5eXqZZs2bZPsvlo2NJSUmmf//+5sYbbzTe3t4mLCzM/OMf/zCJiYn2Gq/87JdGUmJjY40ks2HDBvPEE0+Y4OBgExgY6LDu0nEu/7xr1qwxt99+u/Hx8TF169Y1H330kUPtOfVpTsfMq7ZL35krRy6WLFliGjVqZHx9fU2FChVMnz59zO+//+6wTb9+/UzZsmXN77//bjp37mzKli1rbrjhBjNixIirjorl9T0t6Pdi9erVpnHjxsbHx8dMmzYt1zZzGtn54IMPTKNGjUy5cuWMn5+fqV+/vpk+fbpDP175utRXl9pfv369ady4sfH19TX169e3r//oo49M/fr1jY+Pj2nUqJH5/vvv8+wTXH8Y2cF1Z8GCBerWrZu8vb0VHR2tffv2aevWrQ7bnD59Wi1atNCMGTPUrl07vfXWW3r88cf1yy+/6Pfff1fdunU1fvx4SdKgQYM0f/58zZ8/X/fcc0+29kqVKqWuXbtqxYoVOn/+vMO6FStWKD09Xb169ZIkpaWl6T//+Y9atWqlSZMmaezYsTp27JiioqLsc4OCg4P17rvvSpK6du1qb7tbt265fuZHHnlEo0ePVqNGjTRt2jS1bNlSEyZMsLd7uf379+uBBx7QvffeqzfffFPly5dX//799fPPP+e/k3NQtWpVtWzZUt9++63S0tJy3a579+5avny5BgwYoJkzZ+rpp5/WqVOndOjQIUnS9OnTVblyZdWpU8f+2V966SWHYzz55JPatWuXRo8erRdeeCHPuvbt26eePXvqvvvu04QJE+Tl5aUHH3xQa9euLfBnzE9tl4uLi1OPHj3k6empCRMm6NFHH9WyZcvUvHlznTx50mHbzMxMRUVFqUKFCpoyZYpatmypN998U++9916eNc2fP18tWrSQj49Ptu9pQb4Xe/bsUXR0tO6991699dZbBZrkvHbtWkVHR6t8+fKaNGmSJk6cqFatWmnTpk2SpHvuuUdPP/20JOnFF1+011m3bl37Mfbv36/evXurU6dOmjBhgk6cOKFOnTppwYIFeuaZZ9S3b1+NGzdOBw4cUI8ePZSVlZXv+nAdKOm0BRSnbdu2GUlm7dq1xpiLc0YqV65shg4d6rDd6NGjjSSzbNmybMe4NOcjrzk7V/6f7Zo1a4wks2rVKoftOnToYG666Sb7+wsXLmSbV3HixAkTGhpqBg4caF+W15ydK0chdu7caSSZRx55xGG7Z5991khyGDW6NDKxceNG+7KjR486jD7lRXmM7BhjzNChQ40kk5CQYIzJPrJz4sQJI8m88cYbebaT27yYSyMEzZs3zzbikdvIjiSHkZzU1FRTsWJF07BhQ/uy/I7s5FXblSM758+fNyEhIaZ+/foOc6w++eQTI8mMHj3avqxfv35Gkhk/frzDMRs2bGgaN26cra0rXRoZupwz34vVq1dfta1L7V3+/R86dKjx9/fPcxQqrzk7l9rfvHmzfdmlv1OlS5c2Bw8etC//97//zdwfZMPIDq4rCxYsUGhoqFq3bi3p4pyRnj17atGiRcrMzLRv99FHH+n2229X165dsx3DmTkfbdq00Q033KDFixfbl504cUJr165Vz5497cs8PT3t8yqysrJ0/PhxXbhwQU2aNNH3339f4HYl2efIDB8+3GH5iBEjJEmffvqpw/J69eqpRYsW9vfBwcGqXbu2fv31V6fav1y5cuUkSadOncpx/aV5JRs2bNCJEyecbufRRx/N9zygSpUqOfw5+/v76+GHH9aOHTuUnJzsdA1Xs23bNh09elRPPvmkwxyrjh07qk6dOtn+XCTp8ccfd3jfokULp/9cCvq9CA8PV1RUlFNtBQYG6syZM06Nll1Sr149NW3a1P7+0pWUbdq0UdWqVbMtd8X3FdZB2MF1IzMzU4sWLVLr1q2VmJio/fv3a//+/YqIiFBKSorWrVtn3/bAgQOqX7++y9r28vJS9+7d9fHHHys9PV2StGzZMmVkZDiEHUmaO3eubrvtNvn6+qpChQoKDg7Wp59+qtTUVKfaPnjwoDw8PHTzzTc7LA8LC1NgYKAOHjzosPzyH45LypcvX6jwccnp06clSX5+fjmu9/Hx0aRJk/T5558rNDRU99xzjyZPnlzg0BEeHp7vbW+++eZsAbZWrVqSVKSXQV/q99q1a2dbV6dOnWx/Lr6+vgoODnZYVpg/l4J+LwrSp1d68sknVatWLd13332qXLmyBg4cqNWrVxfoGFd+LwMCAiRJVapUyXG5K76vsA7CDq4bX375pZKSkrRo0SLVrFnT/urRo4ckFclVWZfr1auXTp06pc8//1yStGTJEtWpU0e33367fZv//ve/6t+/v2rUqKHZs2dr9erVWrt2rdq0aVPoOQj5HZHKbUTEGFOo9iXpp59+kqenZ54/nMOGDdPevXs1YcIE+fr6atSoUapbt6527NiR73ZKly5d6Fovl1vfXT4aWNSK6oq1/H4vCtOnISEh2rlzp1auXKl//OMfWr9+ve677z7169cv38fI7fMX5fcV1kHYwXVjwYIFCgkJ0dKlS7O9oqOjtXz5cvvltDVq1NBPP/2U5/EKejrrnnvuUcWKFbV48WL9+eef+vLLL7ON6nz44Ye66aabtGzZMj300EOKiopSZGSkzp0753Tb1apVU1ZWlvbt2+ewPCUlRSdPnlS1atUK9DmcdejQIX311Vdq2rRpriM7l9SoUUMjRozQF198oZ9++knnz5/Xm2++aV/vysvH9+/fn+2Hce/evZJkvwtw+fLlJSnbpOErRz8KUtulft+zZ0+2dXv27CnyP5fi/l54e3urU6dOmjlzpg4cOKDHHntM8+bN0/79+yW59s8UuBJhB9eFv//+W8uWLdP999+vBx54INtryJAhOnXqlFauXCnp4hVBCQkJWr58ebZjXfphLFu2rKTsP4C58fDw0AMPPKBVq1Zp/vz5unDhQrawc+n/Ui//8d2yZYvi4+MdtitTpky+2+7QoYOki1cKXW7q1KmSLs4RKWrHjx9XdHS0MjMz87w66ezZs9mCXY0aNeTn52c//Sdd7Pv89vvVHDlyxOHPOS0tTfPmzVODBg0UFhZmr0GSNm7caN/uzJkzmjt3brbj5be2Jk2aKCQkRLNmzXL4bJ9//rl2795d5H8uxfm9+Ouvvxzee3h46LbbbpMk+2cv6N8noCC8SroAoDisXLlSp06d0j/+8Y8c19911132Gwz27NlT//znP/Xhhx/qwQcf1MCBA9W4cWMdP35cK1eu1KxZs3T77berRo0aCgwM1KxZs+Tn56eyZcsqIiIiz1M0PXv21IwZMzRmzBjdeuutDpfWStL999+vZcuWqWvXrurYsaMSExM1a9Ys1atXzz7fRbp4SqFevXpavHixatWqpaCgINWvXz/HeUa33367+vXrp/fee08nT55Uy5Yt9d1332nu3Lnq0qWLfbK2q+zdu1f//e9/ZYxRWlqaEhIStHTpUp0+fVpTp05V+/bt89y3bdu26tGjh+rVqycvLy8tX75cKSkpDpdDN27cWO+++65effVV3XzzzQoJCVGbNm2cqrdWrVqKiYnR1q1bFRoaqjlz5iglJUWxsbH2bdq1a6eqVasqJiZG//znP+Xp6ak5c+YoODjYfkl8QWsrVaqUJk2apAEDBqhly5aKjo5WSkqK3nrrLVWvXl3PPPOMU58nv4rze/HII4/o+PHjatOmjSpXrqyDBw9qxowZatCggf3vQIMGDeTp6alJkyYpNTVVPj4+atOmjUJCQlxWB65jJXkpGFBcOnXqZHx9fc2ZM2dy3aZ///6mVKlS5s8//zTGGPPXX3+ZIUOG2G9uV7lyZdOvXz/7emOM+fjjj029evWMl5dXrjcVvFxWVpapUqWKkWReffXVHNe//vrrplq1asbHx8c0bNjQfPLJJzkeb/PmzaZx48bG29s7XzcVHDdunAkPDzelSpUyVapUyfPmcVdq2bJlvh7PoMtuCOfh4WECAwNNw4YNzdChQ83PP/+cbfsrLz3/888/zeDBg02dOnVM2bJlTUBAgImIiDBLlixx2C85Odl07NjR+Pn55XhTwa1bt2Zr62o3FbztttuMj4+PqVOnjlm6dGm2/bdv324iIiKMt7e3qVq1qpk6dWqOx8ytttxuKrh48WLTsGFD4+PjY4KCgvK8qeCVcrsk/kq57V/Y70Ve7V3+ff3www9Nu3btTEhIiL3/HnvsMZOUlOSw3/vvv29uuukm4+npmeNNBa+kHG51cOk7dbXbF+D6YjOGWVwAAMC6mLMDAAAsjbADAAAsjbADAAAsjbADAAAsjbADAAAsjbADAAAsjZsK6uLTpY8cOSI/Pz9uWQ4AwDXCGKNTp06pUqVK8vDIffyGsKOLt4u/8sm5AADg2nD48GFVrlw51/WEHcn+UMLDhw/L39+/hKsBAAD5kZaWpipVqlz14cKEHf3f03b9/f0JOwAAXGOuNgWFCcoAAMDSCDsAAMDSCDsAAMDSmLMDAJAkZWZmKiMjo6TLAOxKlSolT0/PQh+HsAMA1zljjJKTk3Xy5MmSLgXIJjAwUGFhYYW6Dx5hBwCuc5eCTkhIiMqUKcPNVeEWjDE6e/asjh49KkmqWLGi08ci7ADAdSwzM9MedCpUqFDS5QAOSpcuLUk6evSoQkJCnD6lxQRlALiOXZqjU6ZMmRKuBMjZpe9mYeaTEXYAAJy6gttyxXeTsAMAACyNsAMAwDXIZrNpxYoVRd5Oq1atNGzYsCJvpygxQRkAkKNOnYq3vVWrCrZ9//79dfLkyVx/8BMSEjRq1Ch9++23SktLU1hYmCIiIjRjxgzNnDlT48aNy/P4xhj1799fc+fO1WOPPaZZs2Y5rB88eLBmzpypfv36KS4u7qr11qlTR4mJiTp48KDCwsLy+zE1duxYrVixQjt37nRYnpSUpPLly+f7OFezYcMGtW7dWidOnFBgYKB9+bJly1SqVCmXtVMSGNkBAFjOsWPH1LZtWwUFBWnNmjXavXu3YmNjValSJZ05c0bPPvuskpKS7K/KlStr/PjxDssuqVKlihYtWqS///7bvuzcuXNauHChqlatmq96vvnmG/3999964IEHNHfuXJd8xrCwMPn4+LjkWHkJCgq66lPF3R1hBwBgOZs2bVJqaqr+85//qGHDhgoPD1fr1q01bdo0hYeHq1y5cgoLC7O/PD095efn57DskkaNGqlKlSpatmyZfdmyZctUtWpVNWzYMF/1zJ49W71799ZDDz2kOXPmZFv/+++/Kzo6WkFBQSpbtqyaNGmiLVu2KC4uTuPGjVNCQoJsNptsNpt9FOny01jNmjXT888/73DMY8eOqVSpUtq4caMkaf78+WrSpIn9c/bu3dt+D5vffvtNrVu3liSVL19eNptN/fv3l5T9NNaJEyf08MMPq3z58ipTpozuu+8+7du3z74+Li5OgYGBWrNmjerWraty5cqpffv2DgFyw4YNuvPOO1W2bFkFBgbq7rvv1sGDB/PVl84g7AAALCcsLEwXLlzQ8uXLZYwp9PEGDhyo2NhY+/s5c+ZowIAB+dr31KlTWrp0qfr27at7771Xqamp+vrrr+3rT58+rZYtW+qPP/7QypUrlZCQoOeee05ZWVnq2bOnRowYoVtuucU+4tSzZ89sbfTp00eLFi1y+KyLFy9WpUqV1KJFC0kXL91+5ZVXlJCQoBUrVui3336zB5oqVaroo48+kiTt2bNHSUlJeuutt3L8PP3799e2bdu0cuVKxcfHyxijDh06OFwafvbsWU2ZMkXz58/Xxo0bdejQIT377LOSpAsXLqhLly5q2bKlfvjhB8XHx2vQoEFFekUgc3YA5CqnORsFnVcBlIS77rpLL774onr37q3HH39cd955p9q0aaOHH35YoaGhBT5e3759NXLkSPvow6ZNm7Ro0SJt2LDhqvsuWrRINWvW1C233CJJ6tWrl2bPnm0PIQsXLtSxY8e0detWBQUFSZJuvvlm+/7lypWTl5dXnvN8evTooWHDhumbb75xOG50dLQ9RAwcONC+/U033aS3335bd9xxh06fPq1y5crZ2w4JCXGYs3O5ffv2aeXKldq0aZOaNWsmSVqwYIGqVKmiFStW6MEHH5R0MVjNmjVLNWrUkCQNGTJE48ePlySlpaUpNTVV999/v3193bp1r9qPhcHIDgDAkl577TUlJydr1qxZuuWWWzRr1izVqVNHP/74Y4GPFRwcrI4dOyouLk6xsbHq2LGjbrjhhnztO2fOHPXt29f+vm/fvlq6dKlOnTolSdq5c6caNmxoDxvOCA4OVrt27bRgwQJJUmJiouLj49WnTx/7Ntu3b1enTp1UtWpV+fn5qWXLlpKkQ4cO5bud3bt3y8vLSxEREfZlFSpUUO3atbV79277sjJlytiDjHTxUQ+XTpkFBQWpf//+ioqKUqdOnfTWW285nOIqCoQdAIBlVahQQQ8++KCmTJmi3bt3q1KlSpoyZYpTxxo4cKDi4uI0d+5ch1GSvOzatUvffvutnnvuOXl5ecnLy0t33XWXzp49q0WLFkn6v0ciFFafPn304YcfKiMjQwsXLtStt96qW2+9VZJ05swZRUVFyd/fXwsWLNDWrVu1fPlySdL58+dd0v7lrrx6y2azOZxii42NVXx8vJo1a6bFixerVq1a+vbbb11exyWEHQDAdcHb21s1atTQmTNnnNq/ffv2On/+vDIyMhQVFZWvfWbPnq177rlHCQkJ2rlzp/01fPhwzZ49W5J02223aefOnTp+/HiudWdmZl61rc6dO+vcuXNavXq1Fi5c6DCq88svv+ivv/7SxIkT1aJFC9WpU8c+0nJ5O5LybKtu3bq6cOGCtmzZYl/2119/ac+ePapXr95Va7xcw4YNNXLkSG3evFn169fXwoULC7R/QRB2AADXrNTUVIcQsXPnTh0+fFiffPKJ+vbtq08++UR79+7Vnj17NGXKFH322Wfq3LmzU215enpq9+7d2rVrV74eSJmRkaH58+crOjpa9evXd3g98sgj2rJli37++WdFR0crLCxMXbp00aZNm/Trr7/qo48+Unx8vCSpevXqSkxM1M6dO/Xnn38qPT09x/bKli2rLl26aNSoUdq9e7eio6Pt66pWrSpvb2/NmDFDv/76q1auXKlXXnnFYf9q1arJZrPpk08+0bFjx3T69OlsbdSsWVOdO3fWo48+qm+++UYJCQnq27evbrzxxnz3a2JiokaOHKn4+HgdPHhQX3zxhfbt21ek83YIOwCAa9aGDRvUsGFDh9e4ceNUr149lSlTRiNGjFCDBg101113acmSJfrPf/6jhx56yOn2/P395e/vn69tV65cqb/++ktdu3bNtq5u3bqqW7euZs+eLW9vb33xxRcKCQlRhw4ddOutt2rixIn2QNW9e3e1b99erVu3VnBwsD744INc2+zTp48SEhLUokULh3sABQcHKy4uTkuXLlW9evU0ceLEbKfzbrzxRo0bN04vvPCCQkNDNWTIkBzbiI2NVePGjXX//feradOmMsbos88+y/eNB8uUKaNffvlF3bt3V61atTRo0CANHjxYjz32WL72d4bNuOKavGtcWlqaAgIClJqamu8vMXA9sMrVWFd+jmvxMxSVc+fOKTExUeHh4fL19S3pcoBs8vqO5vf3m5EdAABgaYQdAABgaYQdAABgaYQdAABgaYQdAIBLnh8FFAVXfDcJOwBwHbt0ufDZs2dLuBIgZ5e+m/m9tD0nPAgUAK5jnp6eCgwMtN9Nt0yZMkX69Gkgv4wxOnv2rI4eParAwMB83cgxN4QdALjOXXqa9pWPDwDcQWBgYJ5PfM8Pwg4AXOdsNpsqVqyokJAQZWRklHQ5gF2pUqUKNaJzCWEHgKXkdNdn5I+np6dLflgAd8MEZQAAYGklGnY2btyoTp06qVKlSrLZbFqxYoXDemOMRo8erYoVK6p06dKKjIzUvn37HLY5fvy4+vTpI39/fwUGBiomJibHJ7UCAIDrU4mexjpz5oxuv/12DRw4UN26dcu2fvLkyXr77bc1d+5chYeHa9SoUYqKitKuXbvsDwPr06ePkpKStHbtWmVkZGjAgAEaNGiQFi5cWNwfB0AR4xQVAGeUaNi57777dN999+W4zhij6dOn6+WXX1bnzp0lSfPmzVNoaKhWrFihXr16affu3Vq9erW2bt2qJk2aSJJmzJihDh06aMqUKapUqVKxfRYAAOCe3HbOTmJiopKTkxUZGWlfFhAQoIiICMXHx0uS4uPjFRgYaA86khQZGSkPDw9t2bKl2GsGAADux22vxkpOTpYkhYaGOiwPDQ21r0tOTlZISIjDei8vLwUFBdm3yUl6errS09Pt79PS0lxVNgAAcDNuO7JTlCZMmKCAgAD7q0qVKiVdEgAAKCJuG3Yu3S0xJSXFYXlKSop9XVhYWLY7fl64cEHHjx/P826LI0eOVGpqqv11+PBhF1cPAADchduGnfDwcIWFhWndunX2ZWlpadqyZYuaNm0qSWratKlOnjyp7du327f58ssvlZWVpYiIiFyP7ePjI39/f4cXAACwphKds3P69Gnt37/f/j4xMVE7d+5UUFCQqlatqmHDhunVV19VzZo17ZeeV6pUSV26dJEk1a1bV+3bt9ejjz6qWbNmKSMjQ0OGDFGvXr24EgtwIzldMr5qVfHX4WpW/VyA1ZRo2Nm2bZtat25tfz98+HBJUr9+/RQXF6fnnntOZ86c0aBBg3Ty5Ek1b95cq1evtt9jR5IWLFigIUOGqG3btvLw8FD37t319ttvF/tnAQAA7qlEw06rVq1kjMl1vc1m0/jx4zV+/PhctwkKCuIGggAAIFduO2cHAADAFQg7AADA0gg7AADA0gg7AADA0tz2cREAcL3gEnagaDGyAwAALI2wAwAALI3TWACg7KeSOI0EWAcjOwAAwNIIOwAAwNI4jQWgRHDaCEBxYWQHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGndQBuAWrryjslR0d1XOqa38bMNdnoFrEyM7AADA0gg7AADA0jiNBaBA8vMAz/ycJrqe0T9A8WJkBwAAWBphBwAAWBphBwAAWBphBwAAWBphBwAAWBphBwAAWBqXngPXKe4QDOB6wcgOAACwNMIOAACwNE5jAUA+cedj4NrEyA4AALA0wg4AALA0wg4AALA05uwAgAvl56nwAIoXIzsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSuKkgADsedAnAihjZAQAAlkbYAQAAlkbYAQAAlkbYAQAAlkbYAQAAlkbYAQAAlsal5wDcFpfCA3AFRnYAAICluXXYyczM1KhRoxQeHq7SpUurRo0aeuWVV2SMsW9jjNHo0aNVsWJFlS5dWpGRkdq3b18JVg0AANyJW4edSZMm6d1339W//vUv7d69W5MmTdLkyZM1Y8YM+zaTJ0/W22+/rVmzZmnLli0qW7asoqKidO7cuRKsHAAAuAu3nrOzefNmde7cWR07dpQkVa9eXR988IG+++47SRdHdaZPn66XX35ZnTt3liTNmzdPoaGhWrFihXr16lVitQMAAPfg1iM7zZo107p167R3715JUkJCgr755hvdd999kqTExEQlJycrMjLSvk9AQIAiIiIUHx9fIjUDAAD34tYjOy+88ILS0tJUp04deXp6KjMzU6+99pr69OkjSUpOTpYkhYaGOuwXGhpqX5eT9PR0paen29+npaUVQfUAAMAduPXIzpIlS7RgwQItXLhQ33//vebOnaspU6Zo7ty5hTruhAkTFBAQYH9VqVLFRRUDAAB349Zh55///KdeeOEF9erVS7feeqseeughPfPMM5owYYIkKSwsTJKUkpLisF9KSop9XU5Gjhyp1NRU++vw4cNF9yEAAECJcuuwc/bsWXl4OJbo6emprKwsSVJ4eLjCwsK0bt06+/q0tDRt2bJFTZs2zfW4Pj4+8vf3d3gBAABrcus5O506ddJrr72mqlWr6pZbbtGOHTs0depUDRw4UJJks9k0bNgwvfrqq6pZs6bCw8M1atQoVapUSV26dCnZ4gEAgFtw67AzY8YMjRo1Sk8++aSOHj2qSpUq6bHHHtPo0aPt2zz33HM6c+aMBg0apJMnT6p58+ZavXq1fH19S7ByAADgLmzm8tsRX6fS0tIUEBCg1NRUTmnhuuGq506tWlV0x7YCZ/snp/0AOMrv77dbj+wAcB0CCIDrlVtPUAYAACgswg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0r5IuAIDrdepU0hUAgPtgZAcAAFgaYQcAAFgap7EAoAgV5SnFK4+9alXRtQVcyxjZAQAAlkbYAQAAlsZpLABwQ5yiAlyHkR0AAGBphB0AAGBpnMYCgGsAN4oEnMfIDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTuoAxcY3K6ky4PiQSA3DGyAwAALI2wAwAALM2psPPrr7+6ug4AAIAi4dScnZtvvlktW7ZUTEyMHnjgAfn6+rq6LgAFwBOxASB3To3sfP/997rttts0fPhwhYWF6bHHHtN3333n6toAAAAKzamw06BBA7311ls6cuSI5syZo6SkJDVv3lz169fX1KlTdezYMVfXCQAA4JRCTVD28vJSt27dtHTpUk2aNEn79+/Xs88+qypVqujhhx9WUlKSq+oEAABwSqHCzrZt2/Tkk0+qYsWKmjp1qp599lkdOHBAa9eu1ZEjR9S5c2dX1QkAAOAUpyYoT506VbGxsdqzZ486dOigefPmqUOHDvLwuJidwsPDFRcXp+rVq7uyVgAAgAJzKuy8++67GjhwoPr376+KFSvmuE1ISIhmz55dqOKA6921cJXVtVAjgOubU2Fn3759V93G29tb/fr1c+bwAAAALuPUnJ3Y2FgtXbo02/KlS5dq7ty5hS4KAADAVZwKOxMmTNANN9yQbXlISIhef/31QhcFAADgKk6FnUOHDik8PDzb8mrVqunQoUOFLgoAAMBVnAo7ISEh+uGHH7ItT0hIUIUKFQpdFAAAgKs4FXaio6P19NNPa/369crMzFRmZqa+/PJLDR06VL169XJ1jQAAAE5z6mqsV155Rb/99pvatm0rL6+Lh8jKytLDDz/MnB0AAOBWnAo73t7eWrx4sV555RUlJCSodOnSuvXWW1WtWjVX1wcAAFAohXpcRK1atfTggw/q/vvvL7Kg88cff6hv376qUKGCPVRt27bNvt4Yo9GjR6tixYoqXbq0IiMj83UfIAAAcH1wamQnMzNTcXFxWrdunY4ePaqsrCyH9V9++aVLijtx4oTuvvtutW7dWp9//rmCg4O1b98+lS9f3r7N5MmT9fbbb2vu3LkKDw/XqFGjFBUVpV27dsnX19cldQCAVeR0x+tVq4q/DqA4ORV2hg4dqri4OHXs2FH169eXzWZzdV2SpEmTJqlKlSqKjY21L7v8kndjjKZPn66XX37Z/tDRefPmKTQ0VCtWrGCyNAAAcC7sLFq0SEuWLFGHDh1cXY+DlStXKioqSg8++KC++uor3XjjjXryySf16KOPSpISExOVnJysyMhI+z4BAQGKiIhQfHx8rmEnPT1d6enp9vdpaWlF+jkAAEDJcWrOjre3t26++WZX15LNr7/+qnfffVc1a9bUmjVr9MQTT+jpp5+2P5IiOTlZkhQaGuqwX2hoqH1dTiZMmKCAgAD7q0qVKkX3IQAAQIlyKuyMGDFCb731lowxrq7HQVZWlho1aqTXX39dDRs21KBBg/Too49q1qxZhTruyJEjlZqaan8dPnzYRRUDAAB349RprG+++Ubr16/X559/rltuuUWlSpVyWL9s2TKXFFexYkXVq1fPYVndunX10UcfSZLCwsIkSSkpKapYsaJ9m5SUFDVo0CDX4/r4+MjHx8clNQIAAPfmVNgJDAxU165dXV1LNnfffbf27NnjsGzv3r32y9zDw8MVFhamdevW2cNNWlqatmzZoieeeKLI6wMAAO7PqbBz+dVRRemZZ55Rs2bN9Prrr6tHjx767rvv9N577+m9996TJNlsNg0bNkyvvvqqatasab/0vFKlSurSpUux1AgAANybU2FHki5cuKANGzbowIED6t27t/z8/HTkyBH5+/urXLlyLinujjvu0PLlyzVy5EiNHz9e4eHhmj59uvr06WPf5rnnntOZM2c0aNAgnTx5Us2bN9fq1au5xw4AAJAk2YwTs4wPHjyo9u3b69ChQ0pPT9fevXt10003aejQoUpPTy/0BOLilpaWpoCAAKWmpsrf37+kywHscroBHJCb/NwckJsKwkry+/vt1NVYQ4cOVZMmTXTixAmVLl3avrxr165at26dM4cEAAAoEk6dxvr666+1efNmeXt7OyyvXr26/vjjD5cUBgAA4ApOjexkZWUpMzMz2/Lff/9dfn5+hS4KAADAVZwa2WnXrp2mT5/ucFXU6dOnNWbMmCJ/hARgZczRQWEwHwfImVNh580331RUVJTq1aunc+fOqXfv3tq3b59uuOEGffDBB66uEQAAwGlOhZ3KlSsrISFBixYt0g8//KDTp08rJiZGffr0cZiwDAAAUNKcvs+Ol5eX+vbt68paAAAl4MrTX5z6gtU4FXbmzZuX5/qHH37YqWIAAABczamwM3ToUIf3GRkZOnv2rLy9vVWmTBnCDgAAcBtOXXp+4sQJh9fp06e1Z88eNW/enAnKAADArTgVdnJSs2ZNTZw4MduoDwAAQElyWdiRLk5aPnLkiCsPCQAAUChOzdlZuXKlw3tjjJKSkvSvf/1Ld999t0sKAwAAcAWnwk6XLl0c3ttsNgUHB6tNmzZ68803XVEXcE3jTrYA4D6cCjtZWVmurgMAAKBIuHTODgAAgLtxamRn+PDh+d526tSpzjQBAADgEk6FnR07dmjHjh3KyMhQ7dq1JUl79+6Vp6enGjVqZN/OZrO5pkoAAAAnORV2OnXqJD8/P82dO1fly5eXdPFGgwMGDFCLFi00YsQIlxYJAADgLKfm7Lz55puaMGGCPehIUvny5fXqq69yNRYAAHArToWdtLQ0HTt2LNvyY8eO6dSpU4UuCgAAwFWcCjtdu3bVgAEDtGzZMv3+++/6/fff9dFHHykmJkbdunVzdY0AAABOc2rOzqxZs/Tss8+qd+/eysjIuHggLy/FxMTojTfecGmBAAAAheFU2ClTpoxmzpypN954QwcOHJAk1ahRQ2XLlnVpcQAAAIVVqJsKJiUlKSkpSTVr1lTZsmVljHFVXQAAAC7hVNj566+/1LZtW9WqVUsdOnRQUlKSJCkmJobLzgEAgFtx6jTWM888o1KlSunQoUOqW7eufXnPnj01fPhwLj8HcnDlw0F5MCjcFQ+yhdU4FXa++OILrVmzRpUrV3ZYXrNmTR08eNAlhQEAALiCU6exzpw5ozJlymRbfvz4cfn4+BS6KAAAAFdxKuy0aNFC8+bNs7+32WzKysrS5MmT1bp1a5cVBwAAUFhOncaaPHmy2rZtq23btun8+fN67rnn9PPPP+v48ePatGmTq2sEADgpp/k3wPXGqZGd+vXra+/evWrevLk6d+6sM2fOqFu3btqxY4dq1Kjh6hoBAACcVuCRnYyMDLVv316zZs3SSy+9VBQ1AQAAuEyBR3ZKlSqlH374oShqAQAAcDmnTmP17dtXs2fPdnUtAAAALufUBOULFy5ozpw5+t///qfGjRtneybW1KlTXVIcAABAYRUo7Pz666+qXr26fvrpJzVq1EiStHfvXodtbDab66oDAFwzuEs43FWBwk7NmjWVlJSk9evXS7r4eIi3335boaGhRVIcAABAYRVozs6VTzX//PPPdebMGZcWBAAA4EpOTVC+5MrwAwAA4G4KFHZsNlu2OTnM0QEAAO6sQHN2jDHq37+//WGf586d0+OPP57taqxly5a5rkIAAIBCKFDY6devn8P7vn37urQYAAAAVytQ2ImNjS2qOgAAAIpEoSYoAwAAuDvCDgAAsDSnHhcBoPCuvNssAKBoMLIDAAAsjbADAAAsjdNYwFXwcEMAuLYxsgMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACztmgo7EydOlM1m07Bhw+zLzp07p8GDB6tChQoqV66cunfvrpSUlJIrEgAAuJVrJuxs3bpV//73v3Xbbbc5LH/mmWe0atUqLV26VF999ZWOHDmibt26lVCVAADA3VwTYef06dPq06eP3n//fZUvX96+PDU1VbNnz9bUqVPVpk0bNW7cWLGxsdq8ebO+/fbbEqwYAAC4i2si7AwePFgdO3ZUZGSkw/Lt27crIyPDYXmdOnVUtWpVxcfH53q89PR0paWlObwAAIA1uf0dlBctWqTvv/9eW7duzbYuOTlZ3t7eCgwMdFgeGhqq5OTkXI85YcIEjRs3ztWl4jrBAzwB4Nri1iM7hw8f1tChQ7VgwQL5+vq67LgjR45Uamqq/XX48GGXHRsAALgXtw4727dv19GjR9WoUSN5eXnJy8tLX331ld5++215eXkpNDRU58+f18mTJx32S0lJUVhYWK7H9fHxkb+/v8MLAABYk1ufxmrbtq1+/PFHh2UDBgxQnTp19Pzzz6tKlSoqVaqU1q1bp+7du0uS9uzZo0OHDqlp06YlUTIAAHAzbh12/Pz8VL9+fYdlZcuWVYUKFezLY2JiNHz4cAUFBcnf319PPfWUmjZtqrvuuqskSgYAAG7GrcNOfkybNk0eHh7q3r270tPTFRUVpZkzZ5Z0WQAAwE1cc2Fnw4YNDu99fX31zjvv6J133imZggAAgFtz6wnKAAAAhUXYAQAAlkbYAQAAlnbNzdkBABS/K+8cvmpVydQBOIORHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGncQRnXNe4KCzjnyr87gDtjZAcAAFgaYQcAAFgaYQcAAFgaYQcAAFgaYQcAAFgaV2MBAIoNV0CiJDCyAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALM2rpAsAAOBynTo5vl+1qmTqgHUwsgMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNOyjDsrgLKwBAYmQHAABYHGEHAABYmluHnQkTJuiOO+6Qn5+fQkJC1KVLF+3Zs8dhm3Pnzmnw4MGqUKGCypUrp+7duyslJaWEKgYAAO7GrefsfPXVVxo8eLDuuOMOXbhwQS+++KLatWunXbt2qWzZspKkZ555Rp9++qmWLl2qgIAADRkyRN26ddOmTZtKuHpci66c5wMAuPa5ddhZvXq1w/u4uDiFhIRo+/btuueee5SamqrZs2dr4cKFatOmjSQpNjZWdevW1bfffqu77rqrJMoGAABuxK1PY10pNTVVkhQUFCRJ2r59uzIyMhQZGWnfpk6dOqpatari4+NzPU56errS0tIcXgAAwJrcemTncllZWRo2bJjuvvtu1a9fX5KUnJwsb29vBQYGOmwbGhqq5OTkXI81YcIEjRs3rijLBYDrXn5OC3PqGMXhmhnZGTx4sH766SctWrSo0McaOXKkUlNT7a/Dhw+7oEIAAOCOromRnSFDhuiTTz7Rxo0bVblyZfvysLAwnT9/XidPnnQY3UlJSVFYWFiux/Px8ZGPj09RlgwAANyEW4cdY4yeeuopLV++XBs2bFB4eLjD+saNG6tUqVJat26dunfvLknas2ePDh06pKZNm5ZEyQAAF8vPqS7ukI68uHXYGTx4sBYuXKiPP/5Yfn5+9nk4AQEBKl26tAICAhQTE6Phw4crKChI/v7+euqpp9S0aVOuxAIAAJLcPOy8++67kqRWrVo5LI+NjVX//v0lSdOmTZOHh4e6d++u9PR0RUVFaebMmcVcKQAAcFduHXaMMVfdxtfXV++8847eeeedYqgIAABca9w67AD5xeWrAIDcXDOXngMAADiDsAMAACyNsAMAACyNsAMAACyNsAMAACyNq7FwTXLm6iuu2AKA6xMjOwAAwNIIOwAAwNI4jQW3x+knAEBhMLIDAAAsjbADAAAsjbADAAAsjbADAAAsjbADAAAsjbADAAAsjUvP4Xa41ByAK1z5b8mqVSVTB0oeIzsAAMDSCDsAAMDSOI2FEsUpKwDFJad/bzi1dX1gZAcAAFgaYQcAAFgaYQcAAFgac3YAANctLk+/PjCyAwAALI2wAwAALI3TWAAAFACXsF97GNkBAACWRtgBAACWxmksFCvumAwAKG6M7AAAAEsj7AAAAEvjNBbyhRtvAXBnnCJHXhjZAQAAlkbYAQAAlkbYAQAAlsacHQAA8sB8oGsfIzsAAMDSCDsAAMDSOI2FIsPQLwDAHTCyAwAALI2wAwAALI3TWMgmP6efctqGuyoDANwRIzsAAMDSCDsAAMDSCDsAAMDSmLNjccX5tHIuNQdwvcrPv39X/vvL3Mfiw8gOAACwNMIOAACwNE5jAQDw/3E63poY2QEAAJZG2AEAAJZmM8aYki6ipKWlpSkgIECpqany9/cv6XIk5e8qKoZbAcD6iuoKLStcDZbf329GdgAAgKVZJuy88847ql69unx9fRUREaHvvvuupEsCAABuwBJXYy1evFjDhw/XrFmzFBERoenTpysqKkp79uxRSEhIidbGqSYAQGG4alqDq05ROVNPSZ8es8TIztSpU/Xoo49qwIABqlevnmbNmqUyZcpozpw5JV0aAAAoYdd82Dl//ry2b9+uyMhI+zIPDw9FRkYqPj6+BCsDAADu4Jo/jfXnn38qMzNToaGhDstDQ0P1yy+/5LhPenq60tPT7e9TU1MlXZzV7WoZGa45Tk6luerYAIBrh7O/B1ful9M++fkZvHK//NRTBD+v//+4Fw98tQvLr/mw44wJEyZo3Lhx2ZZXqVKlBKrJn4CAkq4AAOAOnP09yM9+zhy7qI5bEKdOnVJAHo1c82HnhhtukKenp1JSUhyWp6SkKCwsLMd9Ro4cqeHDh9vfZ2Vl6fjx46pQoYJsNptL60tLS1OVKlV0+PBht7mHj7uhj/JG/1wdfZQ3+ufq6KO8uWv/GGN06tQpVapUKc/trvmw4+3trcaNG2vdunXq0qWLpIvhZd26dRoyZEiO+/j4+MjHx8dhWWBgYJHW6e/v71ZfEHdEH+WN/rk6+ihv9M/V0Ud5c8f+yWtE55JrPuxI0vDhw9WvXz81adJEd955p6ZPn64zZ85owIABJV0aAAAoYZYIOz179tSxY8c0evRoJScnq0GDBlq9enW2ScsAAOD6Y4mwI0lDhgzJ9bRVSfLx8dGYMWOynTbD/6GP8kb/XB19lDf65+roo7xd6/3Dg0ABAIClXfM3FQQAAMgLYQcAAFgaYQcAAFgaYQcAAFgaYacIHD9+XH369JG/v78CAwMVExOj06dP57nPY489pho1aqh06dIKDg5W586dc322lxUUtI+OHz+up556SrVr11bp0qVVtWpVPf300/bnmlmNM9+h9957T61atZK/v79sNptOnjxZPMUWk3feeUfVq1eXr6+vIiIi9N133+W5/dKlS1WnTh35+vrq1ltv1WeffVZMlZaMgvTPzz//rO7du6t69eqy2WyaPn168RVaggrSR++//75atGih8uXLq3z58oqMjLzqd+5aV5D+WbZsmZo0aaLAwECVLVtWDRo00Pz584ux2oIh7BSBPn366Oeff9batWv1ySefaOPGjRo0aFCe+zRu3FixsbHavXu31qxZI2OM2rVrp8zMzGKqungVtI+OHDmiI0eOaMqUKfrpp58UFxen1atXKyYmphirLj7OfIfOnj2r9u3b68UXXyymKovP4sWLNXz4cI0ZM0bff/+9br/9dkVFReno0aM5br9582ZFR0crJiZGO3bsUJcuXdSlSxf99NNPxVx58Sho/5w9e1Y33XSTJk6cmOtjdaymoH20YcMGRUdHa/369YqPj1eVKlXUrl07/fHHH8VcefEoaP8EBQXppZdeUnx8vH744QcNGDBAAwYM0Jo1a4q58nwycKldu3YZSWbr1q32ZZ9//rmx2Wzmjz/+yPdxEhISjCSzf//+oiizRLmqj5YsWWK8vb1NRkZGUZRZYgrbP+vXrzeSzIkTJ4qwyuJ15513msGDB9vfZ2ZmmkqVKpkJEybkuH2PHj1Mx44dHZZFRESYxx57rEjrLCkF7Z/LVatWzUybNq0Iq3MPhekjY4y5cOGC8fPzM3Pnzi2qEktUYfvHGGMaNmxoXn755aIor9AY2XGx+Ph4BQYGqkmTJvZlkZGR8vDw0JYtW/J1jDNnzig2Nlbh4eFu/SR2Z7mijyQpNTVV/v7+8vKyzL0xJbmuf6zi/Pnz2r59uyIjI+3LPDw8FBkZqfj4+Bz3iY+Pd9hekqKionLd/lrmTP9cb1zRR2fPnlVGRoaCgoKKqswSU9j+McZo3bp12rNnj+65556iLNVphB0XS05OVkhIiMMyLy8vBQUFKTk5Oc99Z86cqXLlyqlcuXL6/PPPtXbtWnl7exdluSWiMH10yZ9//qlXXnnlqqd2rkWu6B8r+fPPP5WZmZnt8S+hoaG59kdycnKBtr+WOdM/1xtX9NHzzz+vSpUqZQvRVuBs/6SmpqpcuXLy9vZWx44dNWPGDN17771FXa5TCDv59MILL8hms+X5KuyE4j59+mjHjh366quvVKtWLfXo0UPnzp1z0ScoesXRR5KUlpamjh07ql69eho7dmzhCy8mxdU/AFxr4sSJWrRokZYvXy5fX9+SLsdt+Pn5aefOndq6datee+01DR8+XBs2bCjpsnJkrfH/IjRixAj1798/z21uuukmhYWFZZvQdeHCBR0/fvyqEwEDAgIUEBCgmjVr6q677lL58uW1fPlyRUdHF7b8YlEcfXTq1Cm1b99efn5+Wr58uUqVKlXYsotNcfSPFd1www3y9PRUSkqKw/KUlJRc+yMsLKxA21/LnOmf601h+mjKlCmaOHGi/ve//+m2224ryjJLjLP94+HhoZtvvlmS1KBBA+3evVsTJkxQq1atirJcpxB28ik4OFjBwcFX3a5p06Y6efKktm/frsaNG0uSvvzyS2VlZSkiIiLf7RljZIxRenq60zUXt6Luo7S0NEVFRcnHx0crV6685v4Pq7i/Q1bh7e2txo0ba926derSpYskKSsrS+vWrcv14b9NmzbVunXrNGzYMPuytWvXqmnTpsVQcfFypn+uN8720eTJk/Xaa69pzZo1DnPorMZV36GsrCz3/c0q4QnSltS+fXvTsGFDs2XLFvPNN9+YmjVrmujoaPv633//3dSuXdts2bLFGGPMgQMHzOuvv262bdtmDh48aDZt2mQ6depkgoKCTEpKSkl9jCJV0D5KTU01ERER5tZbbzX79+83SUlJ9teFCxdK6mMUmYL2jzHGJCUlmR07dpj333/fSDIbN240O3bsMH/99VdJfASXWrRokfHx8TFxcXFm165dZtCgQSYwMNAkJycbY4x56KGHzAsvvGDfftOmTcbLy8tMmTLF7N6924wZM8aUKlXK/PjjjyX1EYpUQfsnPT3d7Nixw+zYscNUrFjRPPvss2bHjh1m3759JfURilxB+2jixInG29vbfPjhhw7/3pw6daqkPkKRKmj/vP766+aLL74wBw4cMLt27TJTpkwxXl5e5v333y+pj5Anwk4R+Ouvv0x0dLQpV66c8ff3NwMGDHD4C5KYmGgkmfXr1xtjjPnjjz/MfffdZ0JCQkypUqVM5cqVTe/evc0vv/xSQp+g6BW0jy5dTp3TKzExsWQ+RBEqaP8YY8yYMWNy7J/Y2Nji/wBFYMaMGaZq1arG29vb3Hnnnebbb7+1r2vZsqXp16+fw/ZLliwxtWrVMt7e3uaWW24xn376aTFXXLwK0j+Xvj9Xvlq2bFn8hRejgvRRtWrVcuyjMWPGFH/hxaQg/fPSSy+Zm2++2fj6+pry5cubpk2bmkWLFpVA1fljM8aYYhtGAgAAKGZcjQUAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAOgSMXFxSkwMLDI2/ntt99ks9m0c+fOIm+rsPr372+/LT+AokfYAeAgPj5enp6e6tixY4H3rV69uqZPn+6wrGfPntq7d6+Lqrsop7BQpUoVJSUlqX79+i5t63JPPfWU6tatm+O6Q4cOydPTUytXriyy9gE4h7ADwMHs2bP11FNPaePGjTpy5Eihj1e6dGmFhIS4oLK8eXp6KiwsTF5eRfd845iYGP3yyy/avHlztnVxcXEKCQlRhw4diqx9AM4h7ACwO336tBYvXqwnnnhCHTt2VFxcXLZtVq1apTvuuEO+vr664YYb1LVrV0lSq1atdPDgQT3zzDOy2Wyy2WySHE9j7d27VzabTb/88ovDMadNm6YaNWpIkjIzMxUTE6Pw8HCVLl1atWvX1ltvvWXfduzYsZo7d64+/vhjezsbNmzI8TTWV199pTvvvFM+Pj6qWLGiXnjhBV24cMG+vlWrVnr66af13HPPKSgoSGFhYRo7dmyu/dOgQQM1atRIc+bMcVhujFFcXJz69esnm82WZ/05yWlErEGDBg61nDx5Uo888oiCg4Pl7++vNm3aKCEhIc/jAriIsAPAbsmSJapTp45q166tvn37as6cObr88Xmffvqpunbtqg4dOmjHjh1at26d7rzzTknSsmXLVLlyZY0fP15JSUlKSkrKdvxatWqpSZMmWrBggcPyBQsWqHfv3pKkrKwsVa5cWUuXLtWuXbs0evRovfjii1qyZIkk6dlnn1WPHj3Uvn17ezvNmjXL1tYff/yhDh066I477lBCQoLeffddzZ49W6+++qrDdnPnzlXZsmW1ZcsWTZ48WePHj9fatWtz7aOYmBgtWbJEZ86csS/bsGGDEhMTNXDgwKvW76wHH3xQR48e1eeff67t27erUaNGatu2rY4fP16o4wLXhZJ9DikAd9KsWTMzffp0Y4wxGRkZ5oYbbnB4snrTpk1Nnz59ct2/WrVqZtq0aQ7LYmNjTUBAgP39tGnTTI0aNezv9+zZYySZ3bt353rcwYMHm+7du9vf9+vXz3Tu3Nlhm0tP8t6xY4cxxpgXX3zR1K5d22RlZdm3eeedd0y5cuVMZmamMebik5ybN2/ucJw77rjDPP/887nWcuLECePr6+vwNPmHHnoo23EKUn9O/Xb77bfbn7D99ddfG39/f3Pu3DmHbWrUqGH+/e9/59ougIsY2QEgSdqzZ4++++47RUdHS5K8vLzUs2dPzZ49277Nzp071bZt20K106tXL/3222/69ttvJV0c1WnUqJHq1Klj3+add95R48aNFRwcrHLlyum9997ToUOHCtTO7t271bRpU/vpNEm6++67dfr0af3+++/2ZbfddpvDfhUrVtTRo0dzPW5gYKC6detmP5WVlpamjz76SDExMS6t/3IJCQk6ffq0KlSooHLlytlfiYmJOnDggNPHBa4XRTeTD8A1Zfbs2bpw4YIqVapkX2aMkY+Pj/71r38pICBApUuXLnQ7YWFhatOmjRYuXKi77rpLCxcu1BNPPGFfv2jRIj377LN688031bRpU/n5+emNN97Qli1bCt12TkqVKuXw3mazKSsrK899YmJi1LZtW+3fv1/r16+Xp6enHnzwQafr9/DwcDhdKEkZGRn2/z59+rQqVqyoDRs2ZNu3OC7rB651hB0AunDhgubNm6c333xT7dq1c1jXpUsXffDBB3r88cd12223ad26dRowYECOx/H29lZmZuZV2+vTp4+ee+45RUdH69dff1WvXr3s6zZt2qRmzZrpySeftC+7cvQiP+3UrVtXH330kYwx9tGdTZs2yc/PT5UrV75qjXlp3bq1wsPDFRsbq/Xr16tXr14qW7Zsvuu/UnBwsMMcp7S0NCUmJtrfN2rUSMnJyfLy8lL16tULVTtwPeI0FgB98sknOnHihGJiYlS/fn2HV/fu3e2nssaMGaMPPvhAY8aM0e7du/Xjjz9q0qRJ9uNUr15dGzdu1B9//KE///wz1/a6deumU6dO6YknnlDr1q0dRpNq1qypbdu2ac2aNdq7d69GjRqlrVu3OuxfvXp1/fDDD9qzZ4/+/PNPh1GQS5588kkdPnxYTz31lH755Rd9/PHHGjNmjIYPHy4Pj8L902ez2TRw4EC9++67io+PdziFlZ/6r9SmTRvNnz9fX3/9tX788Uf169dPnp6e9vWRkZFq2rSpunTpoi+++EK//fabNm/erJdeeknbtm0r1GcBrgeEHQCaPXu2IiMjFRAQkG1d9+7dtW3bNv3www9q1aqVli5dqpUrV6pBgwZq06aNvvvuO/u248eP12+//aYaNWooODg41/b8/PzUqVMnJSQkqE+fPg7rHnvsMXXr1k09e/ZURESE/vrrL4dREkl69NFHVbt2bTVp0kTBwcHatGlTtjZuvPFGffbZZ/ruu+90++236/HHH1dMTIxefvnlgnZPjvr376/U1FTdcsstioiIKFD9Vxo5cqRatmyp+++/Xx07dlSXLl3sl+JLF8PVZ599pnvuuUcDBgxQrVq11KtXLx08eFChoaEu+TyAldnMlSeKAQAALISRHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGn/DyCGTFT598z6AAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Gradient for fc layer: torch.Size([3, 5, 5000])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAHHCAYAAACiOWx7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABXPklEQVR4nO3dfVyN9/8H8NcpnW6dWulWRYTKwjDVzNdMcxBztyF3qWZDDDE3323uduNuhrmdGdm+X0MbNhqWMBu5y12hhmUxnYp0Do1KfX5/7Heur6PkKqcb83o+Htdjzud6X5/rc33Kzst1rus6CiGEABERERGVy6SmB0BERET0JGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCJ6yg0fPhwNGzY0aFMoFJg5c2aNjMeYqus49u/fD4VCgf3790ttL730Ep599tkq3zcAXL58GQqFAjExMdWyvwd9/fXX8PHxgZmZGezs7Kp8f7t27UKrVq1gYWEBhUKBvLy8Kt8nEcDQRFRj0tPTMWbMGDRt2hRWVlawsrKCn58foqKicObMmZoeXpXbsGEDFi9eLLu+YcOGUCgUUCgUMDExgZ2dHfz9/fHmm2/iyJEjNTau6lQbx5aamorhw4ejcePG+OKLL7B69eoq3d+NGzfQv39/WFpaYvny5fj6669hbW1dpfsk0lPwu+eIqt+OHTswYMAA1KlTB4MHD0bLli1hYmKC1NRUbNmyBX/88QfS09PRoEGDKh/L8OHDsX//fly+fFlqu3v3LurUqYM6depU2X579OiBlJQUg/2Wp2HDhnjmmWcwceJEAMCtW7dw/vx5xMbGQqPRYMKECfj0008NtqnMcVR0XABQUlKCwsJCKJVKmJj8/W/Rl156CdevX0dKSorsfio7NiEECgoKYGZmBlNTU6PtT45Vq1Zh1KhRuHDhAry9vat8f7t27UK3bt0QHx+P4ODgKt8f0f2q7v+IRFSmS5cuYeDAgWjQoAESEhLg6upqsH7evHlYsWKF9Ob7MPn5+VX2L2wLC4sq6fdx1a9fH0OGDDFomzdvHgYNGoRFixahSZMmGDVqlLSuqo/j7t27UlCqyTlTKBQ1tv/s7GwAMOrHcn/99ResrKyqbX9EsgkiqlZvvvmmACAOHz4se5uwsDBhbW0tLl68KLp16yZsbGxEr169hBBCHDhwQLz22mvCw8NDKJVK4e7uLsaPHy/++uuvUv1s3bpVNG/eXJibm4vmzZuLLVu2iLCwMNGgQQODOgBixowZBm1Xr14V4eHhwsnJSSiVSuHn5ye+/PJLg5p9+/YJAGLTpk3iww8/FPXr1xfm5ubi5ZdfFhcuXJDqOnbsKAAYLA+O4UENGjQQISEhZa67deuWsLe3F/Xr1xclJSUPPQ6dTifGjRsnGjRoIJRKpXB0dBTBwcEiKSnpkePSH9s333wj3n33XeHm5iYUCoW4efOmtG7fvn0Gx9i8eXNx/PhxERQUJCwsLETDhg3FypUrDca+bt06AUCkp6eXOZf6PssbW3p6ugAg1q1bZ9BHQkKCePHFF4WVlZWwtbUVr776qjh37pxBzYwZMwQAceHCBREWFiZsbW2FSqUSw4cPF/n5+eX8RP7+mTw4pvvne/ny5cLPz08olUrh6uoqRo8eLW7evGnQx/3z1KFDB2FpaSnGjRtX5v7KmoOwsDBp/eHDh0W3bt2EnZ2dsLKyEv7+/mLx4sXlHgNRRfBME1E127FjB7y9vREQEFCh7e7duwe1Wo0XX3wRn3zyifQv8djYWPz1118YNWoUHBwccPToUSxduhRXr15FbGystP1PP/2Efv36wc/PD3PmzMGNGzcQHh4Od3f3R+47KysLgYGBUCgUGDNmDBwdHbFz505ERkZCp9Nh/PjxBvVz586FiYkJJk2aBK1Wi/nz52Pw4MHStUfvvvsutFotrl69ikWLFgEAbGxsKjQf97OxsUGfPn3w5Zdf4ty5c2jevHmZdSNHjsS3336LMWPGwM/PDzdu3MCvv/6K8+fPo3Xr1rLG9cEHH0CpVGLSpEkoKCiAUql86Lhu3ryJ7t27o3///ggNDcXmzZsxatQoKJVKREREVOgYKzpne/bsQbdu3dCoUSPMnDkTd+7cwdKlS9G+fXucOHGi1MX//fv3h5eXF+bMmYMTJ05gzZo1cHJywrx58x66j8WLF+Orr77C1q1bsXLlStjY2KBFixYAgJkzZ2LWrFkIDg7GqFGjkJaWhpUrV+LYsWM4ePAgzMzMpH5u3LiBbt26YeDAgRgyZAicnZ0fOgfNmjXD6tWrMXv2bHh5eaFx48YAgPj4ePTo0QOurq4YN24cXFxccP78eezYsQPjxo2TNcdEj1TTqY3oaaLVagUA0bt371Lrbt68KXJycqTl/jNFYWFhAoCYOnVqqe3KOqM0Z84coVAoxB9//CG1tWrVSri6uoq8vDyp7aeffirzLA8eOGMQGRkpXF1dxfXr1w3qBg4cKGxtbaUx6M+O+Pr6ioKCAqluyZIlAoBITk6W2kJCQh55dul+5Z1pEkKIRYsWCQDi+++/f+hx2NraiqioqHL387Bx6Y+tUaNGpeb8YWeaAIiFCxdKbQUFBaJVq1bCyclJFBYWCiHkn2kqb2xlnWnS7+fGjRtS2+nTp4WJiYkYNmyY1KY/0xQREWHQZ58+fYSDg0OpfT1Iv31OTo7Ulp2dLZRKpejSpYsoLi6W2pctWyYAiLVr10pt+nlatWrVI/clxP/m69ixY1LbvXv3hJeXl2jQoEGpM1n3n3kkely8e46oGul0OgBlnyF46aWX4OjoKC3Lly8vVXP/9Tp6lpaW0p/z8/Nx/fp1vPDCCxBC4OTJkwCAzMxMnDp1CmFhYbC1tZXqX3nlFfj5+ZU7ZiEEvvvuO/Ts2RNCCFy/fl1a1Go1tFotTpw4YbBNeHi4wRmYDh06AAB+//33cvf1OPRzeuvWrYfW2NnZ4ciRI7h27Vql9xMWFmYw5+WpU6cO3nrrLem1UqnEW2+9hezsbCQlJVV6DI+i/3kPHz4c9vb2UnuLFi3wyiuv4Mcffyy1zciRIw1ed+jQATdu3JB+Zytiz549KCwsxPjx4w2uzRsxYgRUKhXi4uIM6s3NzREeHl7h/eidPHkS6enpGD9+fKlrnRQKRaX7JXoQQxNRNapbty4A4Pbt26XWff7554iPj8d//vOfMretU6dOmR+lZWRkSG+ONjY2cHR0RMeOHQEAWq0WAPDHH38AAJo0aVJq+2bNmpU75pycHOTl5WH16tUGoc7R0VF6o9NfnKvn6elp8PqZZ54B8PfHVVVFP6f6OS7L/PnzkZKSAg8PD7Rr1w4zZ86scJDz8vKSXevm5lbqYv2mTZsCQIXuzqso/c+7rJ+tr68vrl+/jvz8fIN2Y/7MHrZ/pVKJRo0aSev16tevX+7HnI9y6dIlAKi252LR04vXNBFVI1tbW7i6upZ5G7r+GqeHvZmam5uXuqOuuLgYr7zyCnJzczFlyhT4+PjA2toaf/75J4YPH46SkpLHHrO+jyFDhiAsLKzMGv11LHoPu+1dVOETTvRzWt5t7/3790eHDh2wdetW/PTTT1iwYAHmzZuHLVu2oFu3brL2I/csk1wPOxNSXFxs1P08Sk38zPSMPadEVYWhiaiahYSEYM2aNTh69CjatWv3WH0lJyfjt99+w/r16zFs2DCpPT4+3qBO/7ynCxculOojLS2t3H04Ojqibt26KC4uNupzcYz5scnt27exdetWeHh4wNfXt9xaV1dXjB49GqNHj0Z2djZat26Njz76SApNxhzXtWvXSj0a4rfffgMA6UJs/RmdB59q/eDZmIqMTf/zLutnm5qainr16lXpAyHv33+jRo2k9sLCQqSnpxv9+Ur6i8FTUlL47CaqUvx4jqiaTZ48GVZWVoiIiEBWVlap9RX5l73+7MD92wghsGTJEoM6V1dXtGrVCuvXr5c+sgP+Dlfnzp175D769euH7777rswzZDk5ObLHez9ra2uDsVTWnTt3MHToUOTm5uLdd98t98zNg/tzcnKCm5sbCgoKjD4u4O87Hj///HPpdWFhIT7//HM4OjqiTZs2AP73hn/gwAGDsZb1ZG25Y7v/531/GEtJScFPP/2E7t27V/aQZAkODoZSqcRnn31m8Lv55ZdfQqvVIiQkxKj7a926Nby8vLB48eJS4bM6zpTR04NnmoiqWZMmTbBhwwaEhoaiWbNm0hPBhRBIT0/Hhg0bYGJiIutRAD4+PmjcuDEmTZqEP//8EyqVCt99912Z16HMmTMHISEhePHFFxEREYHc3FwsXboUzZs3L/Maq/vNnTsX+/btQ0BAAEaMGAE/Pz/k5ubixIkT2LNnD3Jzcys8D23atMGmTZsQHR2N559/HjY2NujZs2e52/z555/SNV+3b9/GuXPnpCeCT5w40eCi6wfdunUL7u7ueO2119CyZUvY2Nhgz549OHbsGBYuXPhY43oYNzc3zJs3D5cvX0bTpk2xadMmnDp1CqtXr5ZuuW/evDkCAwMxbdo05Obmwt7eHhs3bsS9e/dK9VeRsS1YsADdunVDUFAQIiMjpUcO2NraVvn38Tk6OmLatGmYNWsWunbtildffRVpaWlYsWIFnn/++VIPKH1cJiYmWLlyJXr27IlWrVohPDwcrq6uSE1NxdmzZ7F7926j7o+eYjV01x7RU+/ixYti1KhRwtvbW1hYWAhLS0vh4+MjRo4cKU6dOmVQq3+4ZVnOnTsngoODhY2NjahXr54YMWKEOH36dJkPO/zuu++Er6+vMDc3F35+fhV6uGVWVpaIiooSHh4ewszMTLi4uIjOnTuL1atXSzX62+RjY2MNti3rlvjbt2+LQYMGCTs7O9kPt8T/P9BQoVAIlUolmjdvLkaMGCGOHDlS5jb3H0dBQYF45513RMuWLUXdunWFtbW1aNmypVixYoXBNg8b18OO7f51j3q4ZYMGDcSyZctKbX/p0iURHBwszM3NhbOzs/j3v/8t4uPjS/X5sLE97OGWe/bsEe3btxeWlpZCpVKJnj17PvThlvc/MkCIhz8K4UEP216Ivx8x4OPjI8zMzISzs7MYNWrUQx9uKVdZjxzQ+/XXX8Urr7wi/XxbtGghli5dKrtvokfhd88RERERycBrmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgQ+3NJKSkhJcu3YNdevW5bdqExERPSGEELh16xbc3NxKfb/ngxiajOTatWvw8PCo6WEQERFRJVy5cuWR38TA0GQkdevWBfD3pKtUqhoeDREREcmh0+ng4eEhvY+Xh6HJSPQfyalUKoYmIiKiJ4ycS2t4ITgRERGRDAxNRERERDIwNBERERHJwGuaqllxcTGKiopqehj0FDAzM4OpqWlND4OI6B+DoamaCCGg0WiQl5dX00Ohp4idnR1cXFz47DAiIiNgaKom+sDk5OQEKysrvolRlRJC4K+//kJ2djYAwNXVtYZHRET05GNoqgbFxcVSYHJwcKjp4dBTwtLSEgCQnZ0NJycnflRHRPSYeCF4NdBfw2RlZVXDI6Gnjf53jtfRERE9PoamasSP5Ki68XeOiMh4GJqIiIiIZGBoIqqk/fv3Q6FQSHdExsTEwM7OrkbHREREVYcXgte0nj2rb1/bt1d4k+HDhyMvLw/btm0rc/3p06fx/vvv4/Dhw9DpdHBxcUFAQACWLl2KFStWYNasWeX2L4TA8OHDsX79erz11ltYtWqVwfqoqCisWLECYWFhiImJKbefNWvWYO3atTh79ixKSkrQoEEDBAcHY+zYsfD29q7ooVfYgAED0L17d6P2uX//fnTq1Ak3b95kICMiqmE800SVlpOTg86dO8Pe3h67d+/G+fPnsW7dOri5uSE/Px+TJk1CZmamtLi7u2P27NkGbXoeHh7YuHEj7ty5I7XdvXsXGzZsgKenZ7njEEJg0KBBePvtt9G9e3f89NNPOHfuHL788ktYWFjgww8/fOi2hYWFjz8R/8/S0hJOTk5G64+IiGoXhiaqtIMHD0Kr1WLNmjV47rnn4OXlhU6dOmHRokXw8vKCjY0NXFxcpMXU1BR169Y1aNNr3bo1PDw8sGXLFqlty5Yt8PT0xHPPPVfuODZt2oSNGzdi06ZNeP/99xEYGAhPT08EBgZi3rx5WLdunVQ7fPhw9O7dGx999BHc3NzQrFkzAMDXX3+Ntm3bSuMbNGiQ9IwjvR9//BFNmzaFpaUlOnXqhMuXLxusL+vjue+//x6tW7eGhYUFGjVqhFmzZuHevXvSeoVCgTVr1qBPnz6wsrJCkyZN8MMPPwAALl++jE6dOgEAnnnmGSgUCgwfPhwA8O2338Lf3x+WlpZwcHBAcHAw8vPzy50nIiJ6PAxNVGkuLi64d+8etm7dCiHEY/cXERFhEHDWrl2L8PDwR273zTffoFmzZnj11VfLXP/gHWQJCQlIS0tDfHw8duzYAeDvW/I/+OADnD59Gtu2bcPly5elgAIAV65cQd++fdGzZ0+cOnUKb7zxBqZOnVruuH755RcMGzYM48aNw7lz5/D5558jJiYGH330kUHdrFmz0L9/f5w5cwbdu3fH4MGDkZubCw8PD3z33XcAgLS0NGRmZmLJkiXIzMxEaGgoIiIicP78eezfvx99+/Y1ys+AiIgejtc0UaUFBgbi3//+NwYNGoSRI0eiXbt2ePnllzFs2DA4OztXuL8hQ4Zg2rRp+OOPPwD8fSZr48aN2L9/f7nb/fbbb9IZI73x48djzZo1AP7+KpGrV69K66ytrbFmzRoolUqpLSIiQvpzo0aN8Nlnn+H555/H7du3YWNjg5UrV6Jx48ZYuHAhAKBZs2ZITk7GvHnzHjquWbNmYerUqQgLC5P6/eCDDzB58mTMmDFDqhs+fDhCQ0MBAB9//DE+++wzHD16FF27doW9vT0AwMnJSTqLdenSJdy7dw99+/ZFgwYNAAD+/v7lzhER0ROnrGt+K3FtrjHxTBM9lo8++ggajQarVq1C8+bNsWrVKvj4+CA5ObnCfTk6OiIkJAQxMTFYt24dQkJCUK9evUqN691338WpU6cwffp03L5922Cdv7+/QWACgKSkJPTs2ROenp6oW7cuOnbsCADIyMgAAJw/fx4BAQEG2wQFBZU7htOnT2P27NmwsbGRlhEjRiAzMxN//fWXVNeiRQvpz9bW1lCpVKU+Grxfy5Yt0blzZ/j7++P111/HF198gZs3b5Y7FiIienwMTfTYHBwc8Prrr+OTTz7B+fPn4ebmhk8++aRSfUVERCAmJgbr1683OPtTniZNmiAtLc2gzdHREd7e3mVemG1tbW3wOj8/H2q1GiqVCv/9739x7NgxbN26FcDjXSh++/ZtzJo1C6dOnZKW5ORkXLhwARYWFlKdmZmZwXYKhQIlJSUP7dfU1BTx8fHYuXMn/Pz8sHTpUjRr1gzp6emVHisRET0aQxMZlVKpROPGjSt9UXLXrl1RWFiIoqIiqNVqWduEhoYiLS0N33//faX2mZqaihs3bmDu3Lno0KEDfHx8Sp3p8fX1xdGjRw3aDh8+XG6/rVu3RlpaGry9vUstJiby/urpz4gVFxcbtCsUCrRv3x6zZs3CyZMnoVQqpaBHRERVg9c00SNptVqcOnXKoM3BwQGnT5/Gxo0bMXDgQDRt2hRCCGzfvh0//vijwQXdFWFqaorz589Lf5Zj4MCB2LJlCwYOHIhp06ZBrVbD2dkZf/zxBzZt2vTIfjw9PaFUKrF06VKMHDkSKSkp+OCDDwxqRo4ciYULF+Kdd97BG2+8gaSkpHKfGwUA06dPR48ePeDp6YnXXnsNJiYmOH36NFJSUsp9DML9GjRoAIVCgR07dqB79+6wtLTE2bNnkZCQgC5dusDJyQlHjhxBTk4OfH19ZfVJRESVwzNN9Ej79+/Hc889Z7DMmjULfn5+sLKywsSJE9GqVSsEBgZi8+bNWLNmDYYOHVrp/alUKqhUKtn1CoUCmzZtwuLFi/Hjjz+ic+fOaNasGSIiIuDh4YFff/213O0dHR0RExOD2NhY+Pn5Ye7cuaU+XvT09MR3332Hbdu2oWXLlli1ahU+/vjjcvtVq9XYsWMHfvrpJzz//PMIDAzEokWLpIu35ahfv750QbmzszPGjBkDlUqFAwcOoHv37mjatCnee+89LFy4EN26dZPdLxERVZxC8D5lo9DpdLC1tYVWqy31hn/37l2kp6fDy8vL4FoWoqrG3z0iemJV091z5b1/P4hnmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaqhGvuafqxt85IiLjYWiqBvonPt//1RlE1UH/O/fgU8eJiKji+HDLamBqago7OzvpKdNWVlZQKBQ1PCr6JxNC4K+//kJ2djbs7OxkPyiUiIgejqGpmri4uABAuV/ESmRsdnZ20u8eERE9HoamaqJQKODq6gonJycUFRXV9HDoKWBmZsYzTERERsTQVM1MTU35RkZERPQE4oXgRERERDIwNBERERHJUKOhqWHDhlAoFKWWqKgoAH9/2WhUVBQcHBxgY2ODfv36ISsry6CPjIwMhISEwMrKCk5OTnjnnXdw7949g5r9+/ejdevWMDc3h7e3N2JiYkqNZfny5WjYsCEsLCwQEBCAo0ePVtlxExER0ZOnRkPTsWPHkJmZKS3x8fEAgNdffx0AMGHCBGzfvh2xsbH4+eefce3aNfTt21favri4GCEhISgsLMShQ4ewfv16xMTEYPr06VJNeno6QkJC0KlTJ5w6dQrjx4/HG2+8gd27d0s1mzZtQnR0NGbMmIETJ06gZcuWUKvVvNONiIiIJApRix4ZPH78eOzYsQMXLlyATqeDo6MjNmzYgNdeew0AkJqaCl9fXyQmJiIwMBA7d+5Ejx49cO3aNTg7OwMAVq1ahSlTpiAnJwdKpRJTpkxBXFwcUlJSpP0MHDgQeXl52LVrFwAgICAAzz//PJYtWwYAKCkpgYeHB8aOHYupU6fKGrtOp4OtrS20Wi1UKpUxp4WIiOjp07Nn6bbt242+m4q8f9eaa5oKCwvxn//8BxEREVAoFEhKSkJRURGCg4OlGh8fH3h6eiIxMREAkJiYCH9/fykwAYBarYZOp8PZs2elmvv70Nfo+ygsLERSUpJBjYmJCYKDg6WashQUFECn0xksRERE9M9Va0LTtm3bkJeXh+HDhwMANBoNlEol7OzsDOqcnZ2h0WikmvsDk369fl15NTqdDnfu3MH169dRXFxcZo2+j7LMmTMHtra20uLh4VHhYyYiIqInR60JTV9++SW6desGNze3mh6KLNOmTYNWq5WWK1eu1PSQiIiIqArViodb/vHHH9izZw+2bNkitbm4uKCwsBB5eXkGZ5uysrKkr4VwcXEpdZeb/u66+2sevOMuKysLKpUKlpaW0sMmy6op7+snzM3NYW5uXvGDJSIioidSrTjTtG7dOjg5OSEkJERqa9OmDczMzJCQkCC1paWlISMjA0FBQQCAoKAgJCcnG9zlFh8fD5VKBT8/P6nm/j70Nfo+lEol2rRpY1BTUlKChIQEqYaIiIioxs80lZSUYN26dQgLC0OdOv8bjq2tLSIjIxEdHQ17e3uoVCqMHTsWQUFBCAwMBAB06dIFfn5+GDp0KObPnw+NRoP33nsPUVFR0lmgkSNHYtmyZZg8eTIiIiKwd+9ebN68GXFxcdK+oqOjERYWhrZt26Jdu3ZYvHgx8vPzER4eXr2TQURERLVWjYemPXv2ICMjAxEREaXWLVq0CCYmJujXrx8KCgqgVquxYsUKab2pqSl27NiBUaNGISgoCNbW1ggLC8Ps2bOlGi8vL8TFxWHChAlYsmQJ3N3dsWbNGqjVaqlmwIAByMnJwfTp06HRaNCqVSvs2rWr1MXhRERE9PSqVc9pepLxOU1ERERGxOc0ERERET2ZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISIYaD01//vknhgwZAgcHB1haWsLf3x/Hjx+X1gshMH36dLi6usLS0hLBwcG4cOGCQR+5ubkYPHgwVCoV7OzsEBkZidu3bxvUnDlzBh06dICFhQU8PDwwf/78UmOJjY2Fj48PLCws4O/vjx9//LFqDpqIiIieODUamm7evIn27dvDzMwMO3fuxLlz57Bw4UI888wzUs38+fPx2WefYdWqVThy5Aisra2hVqtx9+5dqWbw4ME4e/Ys4uPjsWPHDhw4cABvvvmmtF6n06FLly5o0KABkpKSsGDBAsycOROrV6+Wag4dOoTQ0FBERkbi5MmT6N27N3r37o2UlJTqmQwiIiKq1RRCCFFTO586dSoOHjyIX375pcz1Qgi4ublh4sSJmDRpEgBAq9XC2dkZMTExGDhwIM6fPw8/Pz8cO3YMbdu2BQDs2rUL3bt3x9WrV+Hm5oaVK1fi3XffhUajgVKplPa9bds2pKamAgAGDBiA/Px87NixQ9p/YGAgWrVqhVWrVj3yWHQ6HWxtbaHVaqFSqR5rXoiIiJ56PXuWbtu+3ei7qcj7d42eafrhhx/Qtm1bvP7663BycsJzzz2HL774Qlqfnp4OjUaD4OBgqc3W1hYBAQFITEwEACQmJsLOzk4KTAAQHBwMExMTHDlyRKr517/+JQUmAFCr1UhLS8PNmzelmvv3o6/R7+dBBQUF0Ol0BgsRERH9c9VoaPr999+xcuVKNGnSBLt378aoUaPw9ttvY/369QAAjUYDAHB2djbYztnZWVqn0Wjg5ORksL5OnTqwt7c3qCmrj/v38bAa/foHzZkzB7a2ttLi4eFR4eMnIiKiJ0eNhqaSkhK0bt0aH3/8MZ577jm8+eabGDFihKyPw2ratGnToNVqpeXKlSs1PSQiIiKqQjUamlxdXeHn52fQ5uvri4yMDACAi4sLACArK8ugJisrS1rn4uKC7Oxsg/X37t1Dbm6uQU1Zfdy/j4fV6Nc/yNzcHCqVymAhIiKif64aDU3t27dHWlqaQdtvv/2GBg0aAAC8vLzg4uKChIQEab1Op8ORI0cQFBQEAAgKCkJeXh6SkpKkmr1796KkpAQBAQFSzYEDB1BUVCTVxMfHo1mzZtKdekFBQQb70dfo90NERERPtxoNTRMmTMDhw4fx8ccf4+LFi9iwYQNWr16NqKgoAIBCocD48ePx4Ycf4ocffkBycjKGDRsGNzc39O7dG8DfZ6a6du2KESNG4OjRozh48CDGjBmDgQMHws3NDQAwaNAgKJVKREZG4uzZs9i0aROWLFmC6OhoaSzjxo3Drl27sHDhQqSmpmLmzJk4fvw4xowZU+3zQkRERLWQqGHbt28Xzz77rDA3Nxc+Pj5i9erVButLSkrE+++/L5ydnYW5ubno3LmzSEtLM6i5ceOGCA0NFTY2NkKlUonw8HBx69Ytg5rTp0+LF198UZibm4v69euLuXPnlhrL5s2bRdOmTYVSqRTNmzcXcXFxso9Dq9UKAEKr1Vbg6ImIiKhMPXqUXqpARd6/a/Q5Tf8kfE4TERGREfE5TURERERPJoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKhRkPTzJkzoVAoDBYfHx9p/d27dxEVFQUHBwfY2NigX79+yMrKMugjIyMDISEhsLKygpOTE9555x3cu3fPoGb//v1o3bo1zM3N4e3tjZiYmFJjWb58ORo2bAgLCwsEBATg6NGjVXLMRERE9GSq8TNNzZs3R2ZmprT8+uuv0roJEyZg+/btiI2Nxc8//4xr166hb9++0vri4mKEhISgsLAQhw4dwvr16xETE4Pp06dLNenp6QgJCUGnTp1w6tQpjB8/Hm+88QZ2794t1WzatAnR0dGYMWMGTpw4gZYtW0KtViM7O7t6JoGIiIhqPYUQQtTUzmfOnIlt27bh1KlTpdZptVo4Ojpiw4YNeO211wAAqamp8PX1RWJiIgIDA7Fz50706NED165dg7OzMwBg1apVmDJlCnJycqBUKjFlyhTExcUhJSVF6nvgwIHIy8vDrl27AAABAQF4/vnnsWzZMgBASUkJPDw8MHbsWEydOlXWseh0Otja2kKr1UKlUj3OtBAREVHPnqXbtm83+m4q8v5d42eaLly4ADc3NzRq1AiDBw9GRkYGACApKQlFRUUIDg6Wan18fODp6YnExEQAQGJiIvz9/aXABABqtRo6nQ5nz56Vau7vQ1+j76OwsBBJSUkGNSYmJggODpZqylJQUACdTmewEBER0T9XjYamgIAAxMTEYNeuXVi5ciXS09PRoUMH3Lp1CxqNBkqlEnZ2dgbbODs7Q6PRAAA0Go1BYNKv168rr0an0+HOnTu4fv06iouLy6zR91GWOXPmwNbWVlo8PDwqNQdERET0ZKhTkzvv1q2b9OcWLVogICAADRo0wObNm2FpaVmDI3u0adOmITo6Wnqt0+kYnIiIiP7BavzjufvZ2dmhadOmuHjxIlxcXFBYWIi8vDyDmqysLLi4uAAAXFxcSt1Np3/9qBqVSgVLS0vUq1cPpqamZdbo+yiLubk5VCqVwUJERET/XLUqNN2+fRuXLl2Cq6sr2rRpAzMzMyQkJEjr09LSkJGRgaCgIABAUFAQkpOTDe5yi4+Ph0qlgp+fn1Rzfx/6Gn0fSqUSbdq0MagpKSlBQkKCVENERERUo6Fp0qRJ+Pnnn3H58mUcOnQIffr0gampKUJDQ2Fra4vIyEhER0dj3759SEpKQnh4OIKCghAYGAgA6NKlC/z8/DB06FCcPn0au3fvxnvvvYeoqCiYm5sDAEaOHInff/8dkydPRmpqKlasWIHNmzdjwoQJ0jiio6PxxRdfYP369Th//jxGjRqF/Px8hIeH18i8EBERUe1To9c0Xb16FaGhobhx4wYcHR3x4osv4vDhw3B0dAQALFq0CCYmJujXrx8KCgqgVquxYsUKaXtTU1Ps2LEDo0aNQlBQEKytrREWFobZs2dLNV5eXoiLi8OECROwZMkSuLu7Y82aNVCr1VLNgAEDkJOTg+nTp0Oj0aBVq1bYtWtXqYvDiYiI6OlVo89p+ifhc5qIiIiMiM9pIiIiInoyMTQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREclQqdD0+++/G3scRERERLVapUKTt7c3OnXqhP/85z+4e/euscdEREREVOtUKjSdOHECLVq0QHR0NFxcXPDWW2/h6NGjxh4bERERUa1RqdDUqlUrLFmyBNeuXcPatWuRmZmJF198Ec8++yw+/fRT5OTkGHucRERERDXqsS4Er1OnDvr27YvY2FjMmzcPFy9exKRJk+Dh4YFhw4YhMzPTWOMkIiIiqlGPFZqOHz+O0aNHw9XVFZ9++ikmTZqES5cuIT4+HteuXUOvXr2MNU4iIiKiGlWnMht9+umnWLduHdLS0tC9e3d89dVX6N69O0xM/s5gXl5eiImJQcOGDY05ViIiIqIaU6nQtHLlSkRERGD48OFwdXUts8bJyQlffvnlYw2OiIiIqLaoVGi6cOHCI2uUSiXCwsIq0z0RERFRrVOpa5rWrVuH2NjYUu2xsbFYv379Yw+KiIiIqLapVGiaM2cO6tWrV6rdyckJH3/88WMPioiIiKi2qVRoysjIgJeXV6n2Bg0aICMj47EHRURERFTbVCo0OTk54cyZM6XaT58+DQcHh8ceFBEREVFtU6nQFBoairfffhv79u1DcXExiouLsXfvXowbNw4DBw409hiJiIiIalyl7p774IMPcPnyZXTu3Bl16vzdRUlJCYYNG8ZrmoiIiOgfqVKhSalUYtOmTfjggw9w+vRpWFpawt/fHw0aNDD2+IiIiIhqhUqFJr2mTZuiadOmxhoLERERUa1VqdBUXFyMmJgYJCQkIDs7GyUlJQbr9+7da5TBEREREdUWlQpN48aNQ0xMDEJCQvDss89CoVAYe1xEREREtUqlQtPGjRuxefNmdO/e3djjISIiIqqVKvXIAaVSCW9vb2OPhYiIiKjWqlRomjhxIpYsWQIhhLHHQ0RERFQrVerjuV9//RX79u3Dzp070bx5c5iZmRms37Jli1EGR0RERFRbVCo02dnZoU+fPsYeCxEREVGtVanQtG7dOmOPg4iIiKhWq9Q1TQBw79497NmzB59//jlu3boFALh27Rpu375ttMERERER1RaVCk1//PEH/P390atXL0RFRSEnJwcAMG/ePEyaNKlSA5k7dy4UCgXGjx8vtd29exdRUVFwcHCAjY0N+vXrh6ysLIPtMjIyEBISAisrKzg5OeGdd97BvXv3DGr279+P1q1bw9zcHN7e3oiJiSm1/+XLl6Nhw4awsLBAQEAAjh49WqnjICIion+mSoWmcePGoW3btrh58yYsLS2l9j59+iAhIaHC/R07dgyff/45WrRoYdA+YcIEbN++HbGxsfj5559x7do19O3bV1pfXFyMkJAQFBYW4tChQ1i/fj1iYmIwffp0qSY9PR0hISHo1KkTTp06hfHjx+ONN97A7t27pZpNmzYhOjoaM2bMwIkTJ9CyZUuo1WpkZ2dX+FiIiIjoH0pUgr29vUhNTRVCCGFjYyMuXbokhBAiPT1dWFpaVqivW7duiSZNmoj4+HjRsWNHMW7cOCGEEHl5ecLMzEzExsZKtefPnxcARGJiohBCiB9//FGYmJgIjUYj1axcuVKoVCpRUFAghBBi8uTJonnz5gb7HDBggFCr1dLrdu3aiaioKOl1cXGxcHNzE3PmzJF9HFqtVgAQWq1W/sETERFR2Xr0KL1UgYq8f1fqTFNJSQmKi4tLtV+9ehV169atUF9RUVEICQlBcHCwQXtSUhKKiooM2n18fODp6YnExEQAQGJiIvz9/eHs7CzVqNVq6HQ6nD17Vqp5sG+1Wi31UVhYiKSkJIMaExMTBAcHSzVlKSgogE6nM1iIiIjon6tSoalLly5YvHix9FqhUOD27duYMWNGhb5aZePGjThx4gTmzJlTap1Go4FSqYSdnZ1Bu7OzMzQajVRzf2DSr9evK69Gp9Phzp07uH79OoqLi8us0fdRljlz5sDW1lZaPDw85B00ERERPZEqFZoWLlyIgwcPws/PD3fv3sWgQYPQsGFD/Pnnn5g3b56sPq5cuYJx48bhv//9LywsLCozjBo1bdo0aLVaably5UpND4mIiIiqUKWe0+Tu7o7Tp09j48aNOHPmDG7fvo3IyEgMHjzY4MLw8iQlJSE7OxutW7eW2oqLi3HgwAEsW7YMu3fvRmFhIfLy8gzONmVlZcHFxQUA4OLiUuouN/3ddffXPHjHXVZWFlQqFSwtLWFqagpTU9Mya/R9lMXc3Bzm5uayjpWIiIiefJUKTQBQp04dDBkypNI77ty5M5KTkw3awsPD4ePjgylTpsDDwwNmZmZISEhAv379AABpaWnIyMhAUFAQACAoKAgfffQRsrOz4eTkBACIj4+HSqWCn5+fVPPjjz8a7Cc+Pl7qQ6lUok2bNkhISEDv3r0B/H3NVkJCAsaMGVPp4yMiIqJ/lkqFpq+++qrc9cOGDXtkH3Xr1sWzzz5r0GZtbQ0HBwepPTIyEtHR0bC3t4dKpcLYsWMRFBSEwMBAAH9fW+Xn54ehQ4di/vz50Gg0eO+99xAVFSWdBRo5ciSWLVuGyZMnIyIiAnv37sXmzZsRFxcn7Tc6OhphYWFo27Yt2rVrh8WLFyM/Px/h4eEVmhciIiL656pUaBo3bpzB66KiIvz1119QKpWwsrKSFZrkWLRoEUxMTNCvXz8UFBRArVZjxYoV0npTU1Ps2LEDo0aNQlBQEKytrREWFobZs2dLNV5eXoiLi8OECROwZMkSuLu7Y82aNVCr1VLNgAEDkJOTg+nTp0Oj0aBVq1bYtWtXqYvDiYiI6OmlEEIIY3R04cIFjBo1Cu+8845BIHla6HQ62NraQqvVQqVS1fRwiIiInmw9e5Zu277d6LupyPt3pb977kFNmjTB3LlzS52FIiIiIvonMFpoAv6+OPzatWvG7JKIiIioVqjUNU0//PCDwWshBDIzM7Fs2TK0b9/eKAMjIiIiqk0qFZr0t+brKRQKODo64uWXX8bChQuNMS4iIiKiWqVSoamkpMTY4yAiIiKq1Yx6TRMRERHRP1WlzjRFR0fLrv30008rswsiIiKiWqVSoenkyZM4efIkioqK0KxZMwDAb7/9BlNTU4PvklMoFMYZJREREVENq1Ro6tmzJ+rWrYv169fjmWeeAQDcvHkT4eHh6NChAyZOnGjUQRIRERHVtEo9Ebx+/fr46aef0Lx5c4P2lJQUdOnS5al8VhOfCE5ERGRE/5Qngut0OuTk5JRqz8nJwa1btyrTJREREVGtVqnQ1KdPH4SHh2PLli24evUqrl69iu+++w6RkZHo27evscdIREREVOMqdU3TqlWrMGnSJAwaNAhFRUV/d1SnDiIjI7FgwQKjDpCIiIioNqjUNU16+fn5uHTpEgCgcePGsLa2NtrAnjS8pomIiMiI/inXNOllZmYiMzMTTZo0gbW1NR4jfxERERHVapUKTTdu3EDnzp3RtGlTdO/eHZmZmQCAyMhIPm6AiIiI/pEqFZomTJgAMzMzZGRkwMrKSmofMGAAdu3aZbTBEREREdUWlboQ/KeffsLu3bvh7u5u0N6kSRP88ccfRhkYERERUW1SqTNN+fn5BmeY9HJzc2Fubv7YgyIiIiKqbSoVmjp06ICvvvpKeq1QKFBSUoL58+ejU6dORhscERERUW1RqY/n5s+fj86dO+P48eMoLCzE5MmTcfbsWeTm5uLgwYPGHiMRERFRjavUmaZnn30Wv/32G1588UX06tUL+fn56Nu3L06ePInGjRsbe4xERERENa7CZ5qKiorQtWtXrFq1Cu+++25VjImIiIio1qnwmSYzMzOcOXOmKsZCREREVGtV6uO5IUOG4MsvvzT2WIiIiIhqrUpdCH7v3j2sXbsWe/bsQZs2bUp959ynn35qlMERERER1RYVCk2///47GjZsiJSUFLRu3RoA8NtvvxnUKBQK442OiIiIqJaoUGhq0qQJMjMzsW/fPgB/f23KZ599Bmdn5yoZHBEREVFtUaFrmoQQBq937tyJ/Px8ow6IiIiIqDaq1IXgeg+GKCIiIqJ/qgqFJoVCUeqaJV7DRERERE+DCl3TJITA8OHDpS/lvXv3LkaOHFnq7rktW7YYb4REREREtUCFQlNYWJjB6yFDhhh1MERERES1VYVC07p166pqHERERES12mNdCE5ERET0tGBoIiIiIpKhRkPTypUr0aJFC6hUKqhUKgQFBWHnzp3S+rt37yIqKgoODg6wsbFBv379kJWVZdBHRkYGQkJCYGVlBScnJ7zzzju4d++eQc3+/fvRunVrmJubw9vbGzExMaXGsnz5cjRs2BAWFhYICAjA0aNHq+SYiYiI6MlUo6HJ3d0dc+fORVJSEo4fP46XX34ZvXr1wtmzZwEAEyZMwPbt2xEbG4uff/4Z165dQ9++faXti4uLERISgsLCQhw6dAjr169HTEwMpk+fLtWkp6cjJCQEnTp1wqlTpzB+/Hi88cYb2L17t1SzadMmREdHY8aMGThx4gRatmwJtVqN7Ozs6psMIiIiqtUUopY9odLe3h4LFizAa6+9BkdHR2zYsAGvvfYaACA1NRW+vr5ITExEYGAgdu7ciR49euDatWvSV7msWrUKU6ZMQU5ODpRKJaZMmYK4uDikpKRI+xg4cCDy8vKwa9cuAEBAQACef/55LFu2DABQUlICDw8PjB07FlOnTpU1bp1OB1tbW2i1WqhUKmNOCRER0dOnZ8/Sbdu3G303FXn/rjXXNBUXF2Pjxo3Iz89HUFAQkpKSUFRUhODgYKnGx8cHnp6eSExMBAAkJibC39/f4Lvv1Go1dDqddLYqMTHRoA99jb6PwsJCJCUlGdSYmJggODhYqilLQUEBdDqdwUJERET/XDUempKTk2FjYwNzc3OMHDkSW7duhZ+fHzQaDZRKJezs7AzqnZ2dodFoAAAajabUlwXrXz+qRqfT4c6dO7h+/TqKi4vLrNH3UZY5c+bA1tZWWjw8PCp1/ERERPRkqPHQ1KxZM5w6dQpHjhzBqFGjEBYWhnPnztX0sB5p2rRp0Gq10nLlypWaHhIRERFVoQo93LIqKJVKeHt7AwDatGmDY8eOYcmSJRgwYAAKCwuRl5dncLYpKysLLi4uAAAXF5dSd7np7667v+bBO+6ysrKgUqlgaWkJU1NTmJqallmj76Ms5ubm0tfJEBER0T9fjZ9pelBJSQkKCgrQpk0bmJmZISEhQVqXlpaGjIwMBAUFAQCCgoKQnJxscJdbfHw8VCoV/Pz8pJr7+9DX6PtQKpVo06aNQU1JSQkSEhKkGiIiIqIaPdM0bdo0dOvWDZ6enrh16xY2bNiA/fv3Y/fu3bC1tUVkZCSio6Nhb28PlUqFsWPHIigoCIGBgQCALl26wM/PD0OHDsX8+fOh0Wjw3nvvISoqSjoLNHLkSCxbtgyTJ09GREQE9u7di82bNyMuLk4aR3R0NMLCwtC2bVu0a9cOixcvRn5+PsLDw2tkXoiIiKj2qdHQlJ2djWHDhiEzMxO2trZo0aIFdu/ejVdeeQUAsGjRIpiYmKBfv34oKCiAWq3GihUrpO1NTU2xY8cOjBo1CkFBQbC2tkZYWBhmz54t1Xh5eSEuLg4TJkzAkiVL4O7ujjVr1kCtVks1AwYMQE5ODqZPnw6NRoNWrVph165dpS4OJyIioqdXrXtO05OKz2kiIiIyIj6niYiIiOjJxNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDLUaGiaM2cOnn/+edStWxdOTk7o3bs30tLSDGru3r2LqKgoODg4wMbGBv369UNWVpZBTUZGBkJCQmBlZQUnJye88847uHfvnkHN/v370bp1a5ibm8Pb2xsxMTGlxrN8+XI0bNgQFhYWCAgIwNGjR41+zERERPRkqtHQ9PPPPyMqKgqHDx9GfHw8ioqK0KVLF+Tn50s1EyZMwPbt2xEbG4uff/4Z165dQ9++faX1xcXFCAkJQWFhIQ4dOoT169cjJiYG06dPl2rS09MREhKCTp064dSpUxg/fjzeeOMN7N69W6rZtGkToqOjMWPGDJw4cQItW7aEWq1GdnZ29UwGERER1WoKIYSo6UHo5eTkwMnJCT///DP+9a9/QavVwtHRERs2bMBrr70GAEhNTYWvry8SExMRGBiInTt3okePHrh27RqcnZ0BAKtWrcKUKVOQk5MDpVKJKVOmIC4uDikpKdK+Bg4ciLy8POzatQsAEBAQgOeffx7Lli0DAJSUlMDDwwNjx47F1KlTHzl2nU4HW1tbaLVaqFQqY08NERHR06Vnz9Jt27cbfTcVef+uVdc0abVaAIC9vT0AICkpCUVFRQgODpZqfHx84OnpicTERABAYmIi/P39pcAEAGq1GjqdDmfPnpVq7u9DX6Pvo7CwEElJSQY1JiYmCA4OlmoeVFBQAJ1OZ7AQERHRP1etCU0lJSUYP3482rdvj2effRYAoNFooFQqYWdnZ1Dr7OwMjUYj1dwfmPTr9evKq9HpdLhz5w6uX7+O4uLiMmv0fTxozpw5sLW1lRYPD4/KHTgRERE9EWpNaIqKikJKSgo2btxY00ORZdq0adBqtdJy5cqVmh4SERERVaE6NT0AABgzZgx27NiBAwcOwN3dXWp3cXFBYWEh8vLyDM42ZWVlwcXFRap58C43/d1199c8eMddVlYWVCoVLC0tYWpqClNT0zJr9H08yNzcHObm5pU7YCIiInri1OiZJiEExowZg61bt2Lv3r3w8vIyWN+mTRuYmZkhISFBaktLS0NGRgaCgoIAAEFBQUhOTja4yy0+Ph4qlQp+fn5Szf196Gv0fSiVSrRp08agpqSkBAkJCVINERERPd1q9ExTVFQUNmzYgO+//x5169aVrh+ytbWFpaUlbG1tERkZiejoaNjb20OlUmHs2LEICgpCYGAgAKBLly7w8/PD0KFDMX/+fGg0Grz33nuIioqSzgSNHDkSy5Ytw+TJkxEREYG9e/di8+bNiIuLk8YSHR2NsLAwtG3bFu3atcPixYuRn5+P8PDw6p8YIiIiqnVqNDStXLkSAPDSSy8ZtK9btw7Dhw8HACxatAgmJibo168fCgoKoFarsWLFCqnW1NQUO3bswKhRoxAUFARra2uEhYVh9uzZUo2Xlxfi4uIwYcIELFmyBO7u7lizZg3UarVUM2DAAOTk5GD69OnQaDRo1aoVdu3aVericCIiIno61arnND3J+JwmIiIiI+JzmoiIiIieTAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERyVCjoenAgQPo2bMn3NzcoFAosG3bNoP1QghMnz4drq6usLS0RHBwMC5cuGBQk5ubi8GDB0OlUsHOzg6RkZG4ffu2Qc2ZM2fQoUMHWFhYwMPDA/Pnzy81ltjYWPj4+MDCwgL+/v748ccfjX68RERE9OSq0dCUn5+Pli1bYvny5WWunz9/Pj777DOsWrUKR44cgbW1NdRqNe7evSvVDB48GGfPnkV8fDx27NiBAwcO4M0335TW63Q6dOnSBQ0aNEBSUhIWLFiAmTNnYvXq1VLNoUOHEBoaisjISJw8eRK9e/dG7969kZKSUnUHT0RERE8UhRBC1PQgAEChUGDr1q3o3bs3gL/PMrm5uWHixImYNGkSAECr1cLZ2RkxMTEYOHAgzp8/Dz8/Pxw7dgxt27YFAOzatQvdu3fH1atX4ebmhpUrV+Ldd9+FRqOBUqkEAEydOhXbtm1DamoqAGDAgAHIz8/Hjh07pPEEBgaiVatWWLVqlazx63Q62NraQqvVQqVSGWtaiIiInk49e5Zu277d6LupyPt3rb2mKT09HRqNBsHBwVKbra0tAgICkJiYCABITEyEnZ2dFJgAIDg4GCYmJjhy5IhU869//UsKTACgVquRlpaGmzdvSjX370dfo98PERERUZ2aHsDDaDQaAICzs7NBu7Ozs7ROo9HAycnJYH2dOnVgb29vUOPl5VWqD/26Z555BhqNptz9lKWgoAAFBQXSa51OV5HDIyIioidMrT3TVNvNmTMHtra20uLh4VHTQyIiIqIqVGtDk4uLCwAgKyvLoD0rK0ta5+LiguzsbIP19+7dQ25urkFNWX3cv4+H1ejXl2XatGnQarXScuXKlYoeIhERET1Bam1o8vLygouLCxISEqQ2nU6HI0eOICgoCAAQFBSEvLw8JCUlSTV79+5FSUkJAgICpJoDBw6gqKhIqomPj0ezZs3wzDPPSDX370dfo99PWczNzaFSqQwWIiIi+ueq0dB0+/ZtnDp1CqdOnQLw98Xfp06dQkZGBhQKBcaPH48PP/wQP/zwA5KTkzFs2DC4ublJd9j5+vqia9euGDFiBI4ePYqDBw9izJgxGDhwINzc3AAAgwYNglKpRGRkJM6ePYtNmzZhyZIliI6OlsYxbtw47Nq1CwsXLkRqaipmzpyJ48ePY8yYMdU9JURERFRbiRq0b98+AaDUEhYWJoQQoqSkRLz//vvC2dlZmJubi86dO4u0tDSDPm7cuCFCQ0OFjY2NUKlUIjw8XNy6dcug5vTp0+LFF18U5ubmon79+mLu3LmlxrJ582bRtGlToVQqRfPmzUVcXFyFjkWr1QoAQqvVVmwSiIiIqLQePUovVaAi79+15jlNTzo+p4mIiMiI+JwmIiIioicTQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQ9IDly5ejYcOGsLCwQEBAAI4ePVrTQyIiIqJagKHpPps2bUJ0dDRmzJiBEydOoGXLllCr1cjOzq7poREREVENY2i6z6effooRI0YgPDwcfn5+WLVqFaysrLB27dqaHhoRERHVMIam/1dYWIikpCQEBwdLbSYmJggODkZiYmINjoyIiIhqgzo1PYDa4vr16yguLoazs7NBu7OzM1JTU0vVFxQUoKCgQHqt1WoBADqdrmoHSkRE9DQoKirdVgXvsfr3bSHEI2sZmippzpw5mDVrVql2Dw+PGhgNERHRU8DWtsq6vnXrFmwf0T9D0/+rV68eTE1NkZWVZdCelZUFFxeXUvXTpk1DdHS09LqkpAS5ublwcHCAQqGo8vHWdjqdDh4eHrhy5QpUKlVND6dGcS4McT4McT7+h3NhiPPxP1U5F0II3Lp1C25ubo+sZWj6f0qlEm3atEFCQgJ69+4N4O8glJCQgDFjxpSqNzc3h7m5uUGbnZ1dNYz0yaJSqZ76v+x6nAtDnA9DnI//4VwY4nz8T1XNxaPOMOkxNN0nOjoaYWFhaNu2Ldq1a4fFixcjPz8f4eHhNT00IiIiqmEMTfcZMGAAcnJyMH36dGg0GrRq1Qq7du0qdXE4ERERPX0Ymh4wZsyYMj+Oo4oxNzfHjBkzSn2E+TTiXBjifBjifPwP58IQ5+N/astcKISce+yIiIiInnJ8uCURERGRDAxNRERERDIwNBERERHJwNBEREREJANDE1VKbm4uBg8eDJVKBTs7O0RGRuL27dvlbnP37l1ERUXBwcEBNjY26NevX6knsANATEwMWrRoAQsLCzg5OSEqKqqqDsNoqmo+FApFqWXjxo1VeSiPrSp/NwDgxo0bcHd3h0KhQF5eXhUcgXFVxXzcuHEDXbt2hZubG8zNzeHh4YExY8Y8Ed99WRXzcfr0aYSGhsLDwwOWlpbw9fXFkiVLqvpQHltV/V15++230aZNG5ibm6NVq1ZVeASPZ/ny5WjYsCEsLCwQEBCAo0ePllsfGxsLHx8fWFhYwN/fHz/++KPBeiEEpk+fDldXV1haWiI4OBgXLlww7qAFUSV07dpVtGzZUhw+fFj88ssvwtvbW4SGhpa7zciRI4WHh4dISEgQx48fF4GBgeKFF14wqFm4cKFwc3MT//3vf8XFixfF6dOnxffff1+Vh2IUVTUfAMS6detEZmamtNy5c6cqD+WxVdVc6PXq1Ut069ZNABA3b96sgiMwrqqYj9zcXLFixQpx7NgxcfnyZbFnzx7RrFmzR/ZbG1TFfHz55Zfi7bffFvv37xeXLl0SX3/9tbC0tBRLly6t6sN5LFX1d2Xs2LFi2bJlYujQoaJly5ZVeASVt3HjRqFUKsXatWvF2bNnxYgRI4SdnZ3Iysoqs/7gwYPC1NRUzJ8/X5w7d0689957wszMTCQnJ0s1c+fOFba2tmLbtm3i9OnT4tVXXxVeXl5G/X8mQxNV2Llz5wQAcezYMalt586dQqFQiD///LPMbfLy8oSZmZmIjY2V2s6fPy8AiMTERCHE328ElpaWYs+ePVV7AEZWVfMhxN+haevWrVU2dmOryrkQQogVK1aIjh07ioSEhCciNFX1fNxvyZIlwt3d3XiDrwLVOR+jR48WnTp1Mt7gjaw65mLGjBm1NjS1a9dOREVFSa+Li4uFm5ubmDNnTpn1/fv3FyEhIQZtAQEB4q233hJCCFFSUiJcXFzEggULpPV5eXnC3NxcfPPNN0YbNz+eowpLTEyEnZ0d2rZtK7UFBwfDxMQER44cKXObpKQkFBUVITg4WGrz8fGBp6cnEhMTAQDx8fEoKSnBn3/+CV9fX7i7u6N///64cuVK1R7QY6qq+dCLiopCvXr10K5dO6xduxaiFj9arSrn4ty5c5g9eza++uormJg8Gf/rqurfDb1r165hy5Yt6Nixo3EPwMiqaz4AQKvVwt7e3niDN7LqnIvaprCwEElJSQbHYWJiguDg4IceR2JiokE9AKjVaqk+PT0dGo3GoMbW1hYBAQFGnZsn4/88VKtoNBo4OTkZtNWpUwf29vbQaDQP3UapVJb6UmNnZ2dpm99//x0lJSX4+OOPsXjxYnz77bfIzc3FK6+8gsLCwio5FmOoqvkAgNmzZ2Pz5s2Ij49Hv379MHr0aCxdutTox2AsVTUXBQUFCA0NxYIFC+Dp6VklY68KVfm7AQChoaGwsrJC/fr1oVKpsGbNGqOO39iqej70Dh06hE2bNuHNN980yrirQnXNRW10/fp1FBcXl/qKsvKOQ6PRlFuv/29F+qwMhiaSTJ06tcwLj+9fUlNTq2z/JSUlKCoqwmeffQa1Wo3AwEB88803uHDhAvbt21dl+32Ymp4PAHj//ffRvn17PPfcc5gyZQomT56MBQsWVOk+y1LTczFt2jT4+vpiyJAhVbaPiqjp+dBbtGgRTpw4ge+//x6XLl1CdHR0le+zLLVlPgAgJSUFvXr1wowZM9ClS5dq2ef9atNckPHxu+dIMnHiRAwfPrzcmkaNGsHFxQXZ2dkG7ffu3UNubi5cXFzK3M7FxQWFhYXIy8sz+FdSVlaWtI2rqysAwM/PT1rv6OiIevXqISMjoxJH9Hhqej7KEhAQgA8++AAFBQXV+h1MNT0Xe/fuRXJyMr799lsAkD6irFevHt59913MmjWrkkdWOTU9H/fXuri4wMfHB/b29ujQoQPef/996e9Sdakt83Hu3Dl07twZb775Jt57771KHcvjqi1zUZvVq1cPpqampe76K+84XFxcyq3X/zcrK8vg9z8rK8u4dxAa7eooemroL2A8fvy41LZ7925ZFzB+++23UltqaqrBBYxpaWkCgMGF4Ddu3BAmJiZi9+7dVXQ0j6+q5qMsH374oXjmmWeMN3gjq6q5uHjxokhOTpaWtWvXCgDi0KFDD73bpjaozt+Nn3/+WQAQ6enpRhu/sVXlfKSkpAgnJyfxzjvvVN0BGFF1/G7U9gvBx4wZI70uLi4W9evXL/dC8B49ehi0BQUFlboQ/JNPPpHWa7Vao18IztBEldK1a1fx3HPPiSNHjohff/1VNGnSxOBW2atXr4pmzZqJI0eOSG0jR44Unp6eYu/eveL48eMiKChIBAUFGfTbq1cv0bx5c3Hw4EGRnJwsevToIfz8/ERhYWG1HVtlVMV8/PDDD+KLL74QycnJ4sKFC2LFihXCyspKTJ8+vVqPraKq6nfjfvv27Xsi7p4TomrmIy4uTqxdu1YkJyeL9PR0sWPHDuHr6yvat29frcdWGVUxH8nJycLR0VEMGTLE4PEc2dnZ1XpsFVVVf1cuXLggTp48Kd566y3RtGlTcfLkSXHy5ElRUFBQbcf2KBs3bhTm5uYiJiZGnDt3Trz55pvCzs5OaDQaIYQQQ4cOFVOnTpXqDx48KOrUqSM++eQTcf78eTFjxowyHzlgZ2cnvv/+e3HmzBnRq1cvPnKAaocbN26I0NBQYWNjI1QqlQgPDxe3bt2S1qenpwsAYt++fVLbnTt3xOjRo8UzzzwjrKysRJ8+fURmZqZBv1qtVkRERAg7Ozthb28v+vTpIzIyMqrrsCqtKuZj586dolWrVsLGxkZYW1uLli1bilWrVoni4uLqPLQKq6rfjfs9SaGpKuZj7969IigoSNja2goLCwvRpEkTMWXKlKd2PmbMmCEAlFoaNGhQjUdWcVX1d6Vjx45lzkdtOwu5dOlS4enpKZRKpWjXrp04fPiwtK5jx44iLCzMoH7z5s2iadOmQqlUiubNm4u4uDiD9SUlJeL9998Xzs7OwtzcXHTu3FmkpaUZdcwKIWrx/ctEREREtQTvniMiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmojoH2n48OHo3bu39Pqll17C+PHja2w8j+tJHz/RPwFDExFVOY1Gg3HjxsHb2xsWFhZwdnZG+/btsXLlSvz111/VMoYtW7bggw8+MGqfDwazsvTs2RNdu3Ytc90vv/wChUKBM2fOGHVcRFQ16tT0AIjon+33339H+/btYWdnh48//hj+/v4wNzdHcnIyVq9ejfr16+PVV18tc9uioiKYmZkZZRz29vZG6aeiIiMj0a9fP1y9ehXu7u4G69atW4e2bduiRYsWNTI2IqoYnmkioio1evRo1KlTB8ePH0f//v3h6+uLRo0aoVevXoiLi0PPnj2lWoVCgZUrV+LVV1+FtbU1PvroIxQXFyMyMhJeXl6wtLREs2bNsGTJEoN9FBcXIzo6GnZ2dnBwcMDkyZPx4DdEPfjxVkFBASZNmoT69evD2toaAQEB2L9/v7Q+JiYGdnZ22L17N3x9fWFjY4OuXbsiMzMTADBz5kysX78e33//PRQKBRQKhcH2ej169ICjoyNiYmIM2m/fvo3Y2FhERkbixo0bCA0NRf369WFlZQV/f39888035c6rQqHAtm3bDNrs7OwM9nPlyhX0798fdnZ2sLe3R69evXD58uVy+yWih2NoIqIqc+PGDfz000+IioqCtbV1mTUKhcLg9cyZM9GnTx8kJycjIiICJSUlcHd3R2xsLM6dO4fp06fj3//+NzZv3ixts3DhQsTExGDt2rX49ddfkZubi61bt5Y7tjFjxiAxMREbN27EmTNn8Prrr6Nr1664cOGCVPPXX3/hk08+wddff40DBw4gIyMDkyZNAgBMmjQJ/fv3l4JUZmYmXnjhhVL7qVOnDoYNG4aYmBiDIBcbG4vi4mKEhobi7t27aNOmDeLi4pCSkoI333wTQ4cOxdGjRx89yQ9RVFQEtVqNunXr4pdffsHBgwel4FdYWFjpfomeakb9+l8iovscPnxYABBbtmwxaHdwcBDW1tbC2tpaTJ48WWoHIMaPH//IfqOiokS/fv2k166urmL+/PnS66KiIuHu7i569eoltXXs2FGMGzdOCCHEH3/8IUxNTcWff/5p0G/nzp3FtGnThBBCrFu3TgAQFy9elNYvX75cODs7S6/DwsIM9vEw58+fL/Vt9R06dBBDhgx56DYhISFi4sSJZY5fiL/nauvWrQbb2NrainXr1gkhhPj6669Fs2bNRElJibS+oKBAWFpait27dz9yzERUGq9pIqJqd/ToUZSUlGDw4MEoKCgwWNe2bdtS9cuXL8fatWuRkZGBO3fuoLCwEK1atQIAaLVaZGZmIiAgQKqvU6cO2rZtW+ojOr3k5GQUFxejadOmBu0FBQVwcHCQXltZWaFx48bSa1dXV2RnZ1f4eH18fPDCCy9g7dq1eOmll3Dx4kX88ssvmD17NoC/P178+OOPsXnzZvz5558oLCxEQUEBrKysKrwvvdOnT+PixYuoW7euQfvdu3dx6dKlSvdL9DRjaCKiKuPt7Q2FQoG0tDSD9kaNGgEALC0tS23z4Md4GzduxKRJk7Bw4UIEBQWhbt26WLBgAY4cOVLpcd2+fRumpqZISkqCqampwTobGxvpzw9ehK5QKB4axB4lMjISY8eOxfLly7Fu3To0btwYHTt2BAAsWLAAS5YsweLFi+Hv7w9ra2uMHz++3I/RyhpLUVGRwTG2adMG//3vf0tt6+joWKljIHra8ZomIqoyDg4OeOWVV7Bs2TLk5+dXqo+DBw/ihRdewOjRo/Hcc8/B29vb4EyJra0tXF1dDULUvXv3kJSU9NA+n3vuORQXFyM7Oxve3t4Gi4uLi+yxKZVKFBcXy6rt378/TExMsGHDBnz11VeIiIiQruc6ePAgevXqhSFDhqBly5Zo1KgRfvvtt3L7c3R0lC5KB4ALFy4YPL6hdevWuHDhApycnEodo62trexjJKL/YWgioiq1YsUK3Lt3D23btsWmTZtw/vx5pKWl4T//+Q9SU1NLnel5UJMmTXD8+HHs3r0bv/32G95//30cO3bMoGbcuHGYO3cutm3bhtTUVIwePRp5eXkP7bNp06YYPHgwhg0bhi1btiA9PR1Hjx7FnDlzEBcXJ/vYGjZsiDNnziAtLQ3Xr183ONPzIBsbGwwYMADTpk1DZmYmhg8fbnCM8fHxOHToEM6fP4+33noLWVlZ5e775ZdfxrJly3Dy5EkcP34cI0eONDgzNnjwYNSrVw+9evXCL7/8gvT0dOzfvx9vv/02rl69KvsYieh/GJqIqEo1btwYJ0+eRHBwMKZNm4aWLVuibdu2WLp0KSZNmvTIB06+9dZb6Nu3LwYMGICAgADcuHEDo0ePNqiZOHEihg4dirCwMOkjvD59+pTb77p16zBs2DBMnDgRzZo1Q+/evXHs2DF4enrKPrYRI0agWbNmaNu2LRwdHXHw4MFy6yMjI3Hz5k2o1Wq4ublJ7e+99x5at24NtVqNl156CS4uLo98aObChQvh4eGBDh06YNCgQZg0aZLBNVBWVlY4cOAAPD090bdvX/j6+iIyMhJ3796FSqWSfYxE9D8KUdkP6ImIiIieIjzTRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERyfB/iISOwAcaIQ0AAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "\n", + "# After training, you can analyze the activations and gradients\n", + "# Example: Plot the activations of the LSTM layer\n", + "layer_name, activation = activations[0] # Assuming lstm activation\n", + "print(f\"Activation for {layer_name} layer: \", activation)\n", + "\n", + "# Plotting histogram of activations of the LSTM layer\n", + "plt.hist(activation[0].detach().numpy().flatten(), bins=100, alpha=0.7, color='b', label='LSTM Activations')\n", + "plt.title(f\"Activation Distribution for {layer_name}\")\n", + "plt.xlabel(\"Activation Value\")\n", + "plt.ylabel(\"Frequency\")\n", + "plt.legend()\n", + "plt.show()\n", + "\n", + "# Example: Plot the gradients of the LSTM layer\n", + "layer_name, grad = gradients[0] # Assuming lstm gradient\n", + "print(f\"Gradient for {layer_name} layer: \", grad[0].shape)\n", + "\n", + "# Plotting histogram of gradients of the LSTM layer\n", + "plt.hist(grad[0].numpy().flatten(), bins=100, alpha=0.7, color='r', label='LSTM Gradients')\n", + "plt.title(f\"Gradient Distribution for {layer_name}\")\n", + "plt.xlabel(\"Gradient Value\")\n", + "plt.ylabel(\"Frequency\")\n", + "plt.legend()\n", + "plt.show()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Activation for fc layer: torch.Size([3, 5, 5000])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkQAAAHHCAYAAABeLEexAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABJCUlEQVR4nO3dd3wVVf7/8fdNwk0oKZQUWCD03luM9CJBIitFhRiaRl01qAhYWBUQXVGQthb46iJBRdoKSlEwUkQhgiCgoFRBRBJAgYSAhJTz+4Nf7nJJgOSmM6/n43EfD+/MuXM+M7mYd86cmbEZY4wAAAAszK2oCwAAAChqBCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCKgBBg+fLhq1KhRJH1PmDBBNputSPrOrSNHjshmsykmJqbA+4qJiZHNZtORI0ccy2rUqKE77rijwPuWpA0bNshms2nDhg2F0t/VpkyZolq1asnd3V0tWrQo8P4++OADNWjQQKVKlZKfn1+B9wfrIRAB+eDtt9+WzWZTSEiIy9s4fvy4JkyYoJ07d+ZfYTl04cIFTZgwoch+uV6LzWZzvDw8PFShQgW1bt1aTzzxhH766ad86+ftt98ulBDliuJY2xdffKGnn35a7du319y5c/XKK68UaH979+7V8OHDVbt2bb377rt65513CrQ/WJONZ5kBede+fXsdP35cR44c0YEDB1SnTp1cb2Pbtm1q27at5s6dq+HDhzutS01NVUZGhjw9PfOpYmd//PGH/P39NX78eE2YMMFpXVpamtLS0uTl5VUgfV+PzWbTbbfdpqFDh8oYo8TERO3atUtLlizR+fPn9dprr2nUqFGO9sYYpaSkqFSpUnJ3d89xP02aNFGlSpVyFQjT09OVmpoqT09PxwhajRo11KRJE61cuTLH23G1toyMDF26dEl2u11uboX7t+2zzz6rKVOm6K+//pLdbi/w/mbPnq1HHnnE5X9bQE4wQgTk0eHDh7V582ZNmzZN/v7+mj9/fr73UapUqQILQzfi4eFRJGEoU7169TR48GANGTJEI0aM0LvvvqtDhw6pbdu2Gj16tD777DNHW5vNJi8vr1yFodw6f/68JMnd3V1eXl5FdjrRzc1NXl5ehR6GJOnkyZMqXbp0voUhY4z++uuv6/YniVNlKFgGQJ689NJLpnz58iYlJcU88sgjpm7dutm2O3PmjBk5cqQJDg42drvd/O1vfzNDhgwxp06dMuvXrzeSsrzmzp1rjDFm2LBhJjg42BhjzKVLl0z58uXN8OHDs/SRmJhoPD09zejRo40xxqSkpJgXXnjBtGrVyvj4+JgyZcqYDh06mHXr1jk+c/jw4Wz7Hj9+vDHGmPHjx5ur/1eRmppqJk6caGrVqmXsdrsJDg42Y8eONRcvXnRqFxwcbMLDw83XX39t2rZtazw9PU3NmjXNvHnzcnRsJZno6Ohs1/3666/Gw8PD3HrrrVn2JfO4GWNMfHy8GT58uPnb3/5m7Ha7CQoKMn//+9/N4cOHHTVeve+dO3c2xhgzd+5cI8ls2LDBPPLII8bf39/4+fk5rcvczpX7u2bNGtO8eXPj6elpGjZsaD7++GOn2rM7ptlt83q1ZX5n1q9f77SNxYsXm1atWhkvLy9TsWJFExkZaY4dO+bUZtiwYaZs2bLm2LFj5s477zRly5Y1lSpVMqNHjzZpaWnZHu9M1/ue5vZ7sXr1atO6dWvj6elppk+fnm1/2R2DzO+mMcZ89tlnplOnTqZcuXLG29vbtGnTxsyfP/+6+wBkhxEiII/mz5+v/v37y263KyIiQgcOHNB3333n1CY5OVkdO3bUG2+8oZ49e2rmzJl6+OGHtXfvXh07dkwNGzbUxIkTJUkPPfSQPvjgA33wwQfq1KlTlv5KlSqlfv366ZNPPtGlS5ec1n3yySdKSUnRoEGDJElJSUn6z3/+oy5duui1117ThAkTdOrUKYWFhTnmKvn7+2vWrFmSpH79+jn67t+//zX3+YEHHtC4cePUqlUrTZ8+XZ07d9akSZMc/V7p4MGDuuuuu3Tbbbdp6tSpKl++vIYPH649e/bk/CBno3r16urcubO+/fZbJSUlXbPdgAEDtGzZMt133316++239fjjj+vcuXM6evSoJGnGjBmqWrWqGjRo4Nj35557zmkbjz76qH766SeNGzdOzz777HXrOnDggAYOHKjbb79dkyZNkoeHh+6++27Fxsbmeh9zUtuVYmJidM8998jd3V2TJk3Sgw8+qKVLl6pDhw46e/asU9v09HSFhYWpYsWKev3119W5c2dNnTr1hvNzPvjgA3Xs2FGenp5Zvqe5+V7s27dPERERuu222zRz5sxrTsyeMWOG+vXrJ0maNWuW03czJiZG4eHhOn36tMaOHatXX31VLVq00OrVq6+7D0C2ijqRASXZtm3bjCQTGxtrjDEmIyPDVK1a1TzxxBNO7caNG2ckmaVLl2bZRkZGhjHGmO+++y7L6EamK0eIjDFmzZo1RpJZsWKFU7vevXubWrVqOd6npaWZlJQUpzZnzpwxgYGB5v7773csO3XqVJa/vDNdPZqxc+dOI8k88MADTu3GjBljJDmNPmX+db9x40bHspMnTzqNYl2PrjNCZIwxTzzxhJFkdu3aZYzJOkJ05swZI8lMmTLluv00btzYMfJypcwRmw4dOmQZObnWCJEkpxGhxMREU7lyZdOyZUvHspyOEF2vtqtHiC5dumQCAgJMkyZNzF9//eVot3LlSiPJjBs3zrFs2LBhRpKZOHGi0zZbtmxpWrdunaWvq2WOMF3Jle/F6tWrb9iXMf87XqdOnXIsO3v2rPH29jYhISFO+2vM//5NAbnBCBGQB/Pnz1dgYKC6du0q6fIcloEDB2rhwoVKT093tPv444/VvHlzx1+6V3JlDkq3bt1UqVIlLVq0yLHszJkzio2N1cCBAx3L3N3dHfM8MjIydPr0aaWlpalNmzb6/vvvc92vJMecnSsnM0vS6NGjJUmrVq1yWt6oUSN17NjR8d7f31/169fXL7/84lL/VypXrpwk6dy5c9muz5znsmHDBp05c8blfh588MEcz0uqUqWK08/Zx8dHQ4cO1Y4dO5SQkOByDTeybds2nTx5Uo8++qjTnK/w8HA1aNAgy89Fkh5++GGn9x07dnT555Lb70XNmjUVFhbmUl+SFBsbq3PnzunZZ5/NMsetpNwmAsULgQhwUXp6uhYuXKiuXbvq8OHDOnjwoA4ePKiQkBCdOHFCa9eudbQ9dOiQmjRpkm99e3h4aMCAAfr000+VkpIiSVq6dKlSU1OdApEkzZs3T82aNZOXl5cqVqwof39/rVq1SomJiS71/euvv8rNzS3L1T5BQUHy8/PTr7/+6rS8evXqWbZRvnz5PAWUTMnJyZIkb2/vbNd7enrqtdde0+eff67AwEB16tRJkydPznUwqVmzZo7b1qlTJ8sv5Hr16kmS0z2L8lvmca9fv36WdQ0aNMjyc/Hy8pK/v7/Tsrz8XHL7vcjNMc3OoUOHJClf/13B2ghEgIvWrVun+Ph4LVy4UHXr1nW87rnnHkkqkKvNrjRo0CCdO3dOn3/+uSRp8eLFatCggZo3b+5o8+GHHzru3zJnzhytXr1asbGx6tatmzIyMvLUf07/Cr/WyIrJhzt+7N69W+7u7tf95Tpy5Ejt379fkyZNkpeXl1544QU1bNhQO3bsyHE/pUuXznOtV7rWsbtyVLGgFdSVeDn9XuT3MQXyikAEuGj+/PkKCAjQkiVLsrwiIiK0bNkyx6XEtWvX1u7du6+7vdwO83fq1EmVK1fWokWL9Mcff2jdunVZRof++9//qlatWlq6dKmGDBmisLAw9ejRQxcvXnS57+DgYGVkZOjAgQNOy0+cOKGzZ88qODg4V/vhqqNHj+qrr75SaGjoNUeIMtWuXVujR4/WF198od27d+vSpUuaOnWqY31+nmI5ePBglrC3f/9+SXLcbbx8+fKSlGWi89WjKLmpLfO479u3L8u6ffv2FfjPpbC/F7Vr15akG/67AnKKQAS44K+//tLSpUt1xx136K677sryGjFihM6dO6fly5dLunyl065du7Rs2bIs28r85Vm2bFlJWX9JXoubm5vuuusurVixQh988IHS0tKyBKLMUYArf0Fv2bJFcXFxTu3KlCmT47579+4t6fLVP1eaNm2apMtzVgra6dOnFRERofT09OtedXXhwoUs4a927dry9vZ2nGqULh/7nB73Gzl+/LjTzzkpKUnvv/++WrRooaCgIEcNkrRx40ZHu/Pnz2vevHlZtpfT2tq0aaOAgADNnj3bad8+//xz/fzzzwX+cyns70XPnj3l7e2tSZMmZfkZ58foI6zHo6gLAEqi5cuX69y5c/r73/+e7fpbbrnFcZPGgQMH6qmnntJ///tf3X333br//vvVunVrnT59WsuXL9fs2bPVvHlz1a5dW35+fpo9e7a8vb1VtmxZhYSEXPd00MCBA/XGG29o/Pjxatq0qRo2bOi0/o477tDSpUvVr18/hYeH6/Dhw5o9e7YaNWrkmH8jXT590ahRIy1atEj16tVThQoV1KRJk2znZzRv3lzDhg3TO++8o7Nnz6pz587aunWr5s2bp759+zommOeX/fv368MPP5QxRklJSY47VScnJ2vatGnq1avXdT/bvXt33XPPPWrUqJE8PDy0bNkynThxwulS8NatW2vWrFl6+eWXVadOHQUEBKhbt24u1VuvXj1FRUXpu+++U2BgoN577z2dOHFCc+fOdbTp2bOnqlevrqioKD311FNyd3fXe++9J39/f8ftAHJbW6lSpfTaa6/pvvvuU+fOnRUREaETJ05o5syZqlGjhp588kmX9ienCvt74ePjo+nTp+uBBx5Q27Ztde+996p8+fLatWuXLly4kG24BK6rKC9xA0qqPn36GC8vL3P+/Plrthk+fLgpVaqU+eOPP4wxxvz5559mxIgRjhsEVq1a1QwbNsyx3hhjPv30U9OoUSPj4eFxzRszXikjI8NUq1bNSDIvv/xytutfeeUVExwcbDw9PU3Lli3NypUrs93e5s2bTevWrY3dbs/RjRlffPFFU7NmTVOqVClTrVq1696A72qdO3fO9lLyq+mKm/G5ubkZPz8/07JlS/PEE0+YPXv2ZGl/9WX3f/zxh4mOjjYNGjQwZcuWNb6+viYkJMQsXrzY6XMJCQkmPDzceHt7Z3tjxu+++y5LXze6MWOzZs2Mp6enadCggVmyZEmWz2/fvt2EhIQYu91uqlevbqZNm5btNq9V27VuzLho0SLTsmVL4+npaSpUqHDdGzNe7Vq3A7jatT6f1+/FtWR32X2m5cuXm1tvvdWULl3a+Pj4mHbt2pkFCxbkeNtAJp5lBgAALI85RAAAwPIIRAAAwPIIRAAAwPIIRAAAwPIIRAAAwPIIRAAAwPK4MWMOZGRk6Pjx4/L29uYpygAAlBDGGJ07d05VqlSRm9v1x4AIRDlw/PhxVatWrajLAAAALvjtt99UtWrV67YhEOVA5oMjf/vtN/n4+BRxNQAAICeSkpJUrVq1Gz4AWiIQ5UjmaTIfHx8CEQAAJUxOprswqRoAAFgegQgAAFgegQgAAFgec4gAAEUiPT1dqampRV0GSji73X7DS+pzgkAEAChUxhglJCTo7NmzRV0KbgJubm6qWbOm7HZ7nrZDIAIAFKrMMBQQEKAyZcpww1u4LPPGyfHx8apevXqevksEIgBAoUlPT3eEoYoVKxZ1ObgJ+Pv76/jx40pLS1OpUqVc3g6TqgEAhSZzzlCZMmWKuBLcLDJPlaWnp+dpO0UaiCZNmqS2bdvK29tbAQEB6tu3r/bt2+fUpkuXLrLZbE6vhx9+2KnN0aNHFR4erjJlyiggIEBPPfWU0tLSnNps2LBBrVq1kqenp+rUqaOYmJiC3j0AwDVwmgz5Jb++S0UaiL766itFR0fr22+/VWxsrFJTU9WzZ0+dP3/eqd2DDz6o+Ph4x2vy5MmOdenp6QoPD9elS5e0efNmzZs3TzExMRo3bpyjzeHDhxUeHq6uXbtq586dGjlypB544AGtWbOm0PYVAAAUX0U6h2j16tVO72NiYhQQEKDt27erU6dOjuVlypRRUFBQttv44osv9NNPP+nLL79UYGCgWrRooZdeeknPPPOMJkyYILvdrtmzZ6tmzZqaOnWqJKlhw4b65ptvNH36dIWFhRXcDgIAUEQ2bNigrl276syZM/Lz8yvQvmw2m5YtW6a+ffsWaD8FqVhNqk5MTJQkVahQwWn5/Pnz9eGHHyooKEh9+vTRCy+84Dj/HBcXp6ZNmyowMNDRPiwsTI888oj27Nmjli1bKi4uTj169HDaZlhYmEaOHFmwOwQAyLE+C/oUan8rIlbkqv3w4cM1b968LMsPHDigOnXqSLp8Bd2//vUvrVq1Sr///rsCAgLUokULjRw5Ut27d7/u9o8dO6ZatWqpXr162r17d65q69Kli1q0aKEZM2Y4lt16662Kj4+Xr69vrrZ1PRMmTNAnn3yinTt3Oi2Pj49X+fLl862folBsAlFGRoZGjhyp9u3bq0mTJo7l9957r4KDg1WlShX98MMPeuaZZ7Rv3z4tXbpU0uUv35VhSJLjfUJCwnXbJCUl6a+//lLp0qWd1qWkpCglJcXxPikpKf92FABQYvXq1Utz5851Wubv7y9JOnLkiNq3by8/Pz9NmTJFTZs2VWpqqtasWaPo6Gjt3bv3utuOiYnRPffco40bN2rLli0KCQnJU612u/2aZ1fyW2H1U5CKzVVm0dHR2r17txYuXOi0/KGHHlJYWJiaNm2qyMhIvf/++1q2bJkOHTpUYLVMmjRJvr6+jle1atUKrC8AQMnh6empoKAgp5e7u7sk6dFHH5XNZtPWrVs1YMAA1atXT40bN9aoUaP07bffXne7xhjNnTtXQ4YM0b333qs5c+ZkabNp0yZ16dJFZcqUUfny5RUWFqYzZ85o+PDh+uqrrzRz5kzHxUdHjhzRhg0bZLPZdPbsWSUlJal06dL6/PPPnba5bNkyeXt768KFC5KkZ555RvXq1VOZMmVUq1YtvfDCC44rA2NiYvTiiy9q165djn4yL1Cy2Wz65JNPHNv98ccf1a1bN5UuXVoVK1bUQw89pOTkZMf64cOHq2/fvnr99ddVuXJlVaxYUdHR0U53Ln/77bdVt25deXl5KTAwUHfddVfOf1AuKBaBaMSIEVq5cqXWr1+vqlWrXrdtZmI+ePCgpMup9MSJE05tMt9nJtZrtfHx8ckyOiRJY8eOVWJiouP122+/ubZjAABLOH36tFavXq3o6GiVLVs2y/obzeFZv369Lly4oB49emjw4MFauHCh0wVGO3fuVPfu3dWoUSPFxcXpm2++UZ8+fZSenq6ZM2cqNDTU6QKkq/+Q9/Hx0R133KGPPvrIafn8+fPVt29fxzQUb29vxcTE6KefftLMmTP17rvvavr06ZKkgQMHavTo0WrcuLGjn4EDB2bZl/PnzyssLEzly5fXd999pyVLlujLL7/UiBEjsuzzoUOHtH79escFUZkBa9u2bXr88cc1ceJE7du3T6tXr3aaW1wQivSUmTFGjz32mJYtW6YNGzaoZs2aN/xM5nnLypUrS5JCQ0P1r3/9SydPnlRAQIAkKTY2Vj4+PmrUqJGjzWeffea0ndjYWIWGhmbbh6enpzw9PV3dLQA3gavns+R2vgluTitXrlS5cuUc72+//XYtWbJEBw8elDFGDRo0cGm7c+bM0aBBg+Tu7q4mTZqoVq1aWrJkiYYPHy5Jmjx5stq0aaO3337b8ZnGjRs7/ttut1/3AiRJioyM1JAhQ3ThwgWVKVNGSUlJWrVqlZYtW+Zo8/zzzzv+u0aNGhozZowWLlyop59+WqVLl1a5cuXk4eFx3X4++ugjXbx4Ue+//74jHL755pvq06ePXnvtNccUlvLly+vNN9+Uu7u7GjRooPDwcK1du1YPPvigjh49qrJly+qOO+6Qt7e3goOD1bJly9wd1Fwq0hGi6Ohoffjhh/roo4/k7e2thIQEJSQk6K+//pIkHTp0SC+99JK2b9+uI0eOaPny5Ro6dKg6deqkZs2aSZJ69uypRo0aaciQIdq1a5fWrFmj559/XtHR0Y5Q8/DDD+uXX37R008/rb179+rtt9/W4sWL9eSTTxbZvgMASp7M27dkvv79739LuvwHvqvOnj2rpUuXavDgwY5lgwcPdjptljlClBe9e/dWqVKltHz5cknSxx9/LB8fH6eLjhYtWqT27dsrKChI5cqV0/PPP6+jR4/mqp+ff/5ZzZs3dxopa9++vTIyMpzuNdi4cWPH6Ubp8kDHyZMnJUm33XabgoODVatWLQ0ZMkTz5893nNYrKEUaiGbNmqXExER16dJFlStXdrwWLVok6XLi/fLLL9WzZ081aNBAo0eP1oABA7Rixf/+UnN3d9fKlSvl7u6u0NBQDR48WEOHDtXEiRMdbWrWrKlVq1YpNjZWzZs319SpU/Wf//yHS+4BALlStmxZ1alTx/HKPFtRt25d2Wy2G06czk7miEpISIg8PDzk4eGhZ555Rt988432798vSdlO78gtu92uu+66y3Ha7KOPPtLAgQPl4XH5ZFFcXJwiIyPVu3dvrVy5Ujt27NBzzz2nS5cu5bnv7Fz9mA2bzaaMjAxJl0/dff/991qwYIEqV66scePGqXnz5gX6QOAiP2V2PdWqVdNXX311w+0EBwdnOSV2tS5dumjHjh25qg8AgJyoUKGCwsLC9NZbb+nxxx/PMo/o7Nmz15xHNGfOHI0ePdpxeizTo48+qvfee0+vvvqqmjVrprVr1+rFF1/Mdht2uz1Hj66IjIzUbbfdpj179mjdunV6+eWXHes2b96s4OBgPffcc45lv/76a677adiwoWJiYnT+/HnHcdi0aZPc3NxUv379G9aYycPDQz169FCPHj00fvx4+fn5ad26derfv3+Ot5EbxeayewAoCDm5tw3zg5Af3nrrLbVv317t2rXTxIkT1axZM6WlpSk2NlazZs3Szz//nOUzO3fu1Pfff6/58+dnmX8UERGhiRMn6uWXX9bYsWPVtGlTPfroo3r44Ydlt9u1fv163X333apUqZJq1KihLVu26MiRIypXrlyW+/ll6tSpk4KCghQZGamaNWs6Xdpft25dHT16VAsXLlTbtm2zzC+SLs8rOnz4sHbu3KmqVavK29s7y5zbyMhIjR8/XsOGDdOECRN06tQpPfbYYxoyZEiWW+Bcy8qVK/XLL7+oU6dOKl++vD777DNlZGTkKlDlVrG4ygwAgJKuVq1a+v7779W1a1eNHj1aTZo00W233aa1a9dq1qxZ2X5mzpw5atSoUbaTsfv166eTJ0/qs88+U7169fTFF19o165dateunUJDQ/Xpp586TneNGTNG7u7uatSokfz9/a8578dmsykiIkK7du1SZGSk07q///3vevLJJzVixAi1aNFCmzdv1gsvvODUZsCAAerVq5e6du0qf39/LViwIEsfZcqU0Zo1a3T69Gm1bdtWd911l7p3764333wzR8dRunxV3tKlS9WtWzc1bNhQs2fP1oIFC5wmkuc3m8nLTDCLSEpKkq+vrxITE+Xj41PU5QDIBVdHiLjKrGBcvHhRhw8fVs2aNeXl5VXU5eAmcL3vVG5+fzNCBAAALI85RAAsr7CfoQWg+GGECAAAWB6BCAAAWB6nzACUGEx0vnlwPQ/yS359lxghAgAUmsy7Exf0YxhgHZl30r7yMSCuYIQIAFzETR9zz93dXX5+fo5nVpUpU0Y2m62Iq0JJlZGRoVOnTqlMmTKOezK5ikAEAChUmU9KzwxFQF64ubmpevXqeQ7WBCIAQKGy2WyqXLmyAgIClJqaWtTloISz2+1yc8v7DCACEQCgSLi7u+d53geQXwhEAJAD3LwRuLlxlRkAALA8RogA3FQYyQHgCkaIAACA5RGIAACA5RGIAACA5RGIAACA5RGIAACA5RGIAACA5XHZPYASi0vsAeQXRogAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDleRR1AQBgJX0W9MmybEXEiiKoBMCVGCECAACWxwgRgGIpu5EUACgojBABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADL47J7AChA3D4AKBkYIQIAAJZHIAIAAJZHIAIAAJZHIAIAAJZHIAIAAJbHVWYACl12V16tiFhRBJUAwGWMEAEAAMsr0kA0adIktW3bVt7e3goICFDfvn21b98+pzYXL15UdHS0KlasqHLlymnAgAE6ceKEU5ujR48qPDxcZcqUUUBAgJ566imlpaU5tdmwYYNatWolT09P1alTRzExMQW9ewAAoIQo0kD01VdfKTo6Wt9++61iY2OVmpqqnj176vz58442Tz75pFasWKElS5boq6++0vHjx9W/f3/H+vT0dIWHh+vSpUvavHmz5s2bp5iYGI0bN87R5vDhwwoPD1fXrl21c+dOjRw5Ug888IDWrFlTqPsLAACKJ5sxxhR1EZlOnTqlgIAAffXVV+rUqZMSExPl7++vjz76SHfddZckae/evWrYsKHi4uJ0yy236PPPP9cdd9yh48ePKzAwUJI0e/ZsPfPMMzp16pTsdrueeeYZrVq1Srt373b0NWjQIJ09e1arV6++YV1JSUny9fVVYmKifHx8CmbnAQvJyRwiK9/hmflUQP7Ize/vYjWHKDExUZJUoUIFSdL27duVmpqqHj16ONo0aNBA1atXV1xcnCQpLi5OTZs2dYQhSQoLC1NSUpL27NnjaHPlNjLbZG4DQMHqs6CP0wsAiptic5VZRkaGRo4cqfbt26tJkyaSpISEBNntdvn5+Tm1DQwMVEJCgqPNlWEoc33muuu1SUpK0l9//aXSpUs7rUtJSVFKSorjfVJSUt53EAAAFFvFZoQoOjpau3fv1sKFC4u6FE2aNEm+vr6OV7Vq1Yq6JAAAUICKxQjRiBEjtHLlSm3cuFFVq1Z1LA8KCtKlS5d09uxZp1GiEydOKCgoyNFm69atTtvLvArtyjZXX5l24sQJ+fj4ZBkdkqSxY8dq1KhRjvdJSUmEIqCAcSoNQFEq0hEiY4xGjBihZcuWad26dapZs6bT+tatW6tUqVJau3atY9m+fft09OhRhYaGSpJCQ0P1448/6uTJk442sbGx8vHxUaNGjRxtrtxGZpvMbVzN09NTPj4+Ti8AAHDzKtIRoujoaH300Uf69NNP5e3t7Zjz4+vrq9KlS8vX11dRUVEaNWqUKlSoIB8fHz322GMKDQ3VLbfcIknq2bOnGjVqpCFDhmjy5MlKSEjQ888/r+joaHl6ekqSHn74Yb355pt6+umndf/992vdunVavHixVq1aVWT7DgAAio8iHSGaNWuWEhMT1aVLF1WuXNnxWrRokaPN9OnTdccdd2jAgAHq1KmTgoKCtHTpUsd6d3d3rVy5Uu7u7goNDdXgwYM1dOhQTZw40dGmZs2aWrVqlWJjY9W8eXNNnTpV//nPfxQWFlao+wsAAIqnYnUfouKK+xABecP8oNzhPkRA/iix9yECAAAoCsXiKjMAwP/k5E7eAPIXI0QAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyPIq6AADAjfVZ0OeGbVZErCiESoCbEyNEAADA8ghEAADA8ghEAADA8ghEAADA8phUDSBf5WTyLwAUN4wQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAy+PRHQDyhEd1ALgZMEIEAAAsjxEiALhJZDdatyJiRRFUApQ8jBABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADL8yjqAgCUHH0W9CnqEgCgQDBCBAAALI9ABAAALI9ABAAALI9ABAAALI9ABAAALM+lQPTLL7/kdx0AAABFxqVAVKdOHXXt2lUffvihLl686HLnGzduVJ8+fVSlShXZbDZ98sknTuuHDx8um83m9OrVq5dTm9OnTysyMlI+Pj7y8/NTVFSUkpOTndr88MMP6tixo7y8vFStWjVNnjzZ5ZoBAMDNx6VA9P3336tZs2YaNWqUgoKC9I9//ENbt27N9XbOnz+v5s2b66233rpmm169eik+Pt7xWrBggdP6yMhI7dmzR7GxsVq5cqU2btyohx56yLE+KSlJPXv2VHBwsLZv364pU6ZowoQJeuedd3JdLwAAuDm5dGPGFi1aaObMmZo6daqWL1+umJgYdejQQfXq1dP999+vIUOGyN/f/4bbuf3223X77bdft42np6eCgoKyXffzzz9r9erV+u6779SmTRtJ0htvvKHevXvr9ddfV5UqVTR//nxdunRJ7733nux2uxo3bqydO3dq2rRpTsEJAABYV54mVXt4eKh///5asmSJXnvtNR08eFBjxoxRtWrVNHToUMXHx+e5wA0bNiggIED169fXI488oj///NOxLi4uTn5+fo4wJEk9evSQm5ubtmzZ4mjTqVMn2e12R5uwsDDt27dPZ86cybbPlJQUJSUlOb0AAMDNK0+P7ti2bZvee+89LVy4UGXLltWYMWMUFRWlY8eO6cUXX9Sdd97p0qm0TL169VL//v1Vs2ZNHTp0SP/85z91++23Ky4uTu7u7kpISFBAQIDzDnl4qEKFCkpISJAkJSQkqGbNmk5tAgMDHevKly+fpd9JkybpxRdfdLlu4GbBozoAWIVLgWjatGmaO3eu9u3bp969e+v9999X79695eZ2ecCpZs2aiomJUY0aNfJU3KBBgxz/3bRpUzVr1ky1a9fWhg0b1L179zxt+3rGjh2rUaNGOd4nJSWpWrVqBdYfABSUq0PtiogVRVQJULy5FIhmzZql+++/X8OHD1flypWzbRMQEKA5c+bkqbir1apVS5UqVdLBgwfVvXt3BQUF6eTJk05t0tLSdPr0ace8o6CgIJ04ccKpTeb7a81N8vT0lKenZ77WDgAAii+XAtGBAwdu2MZut2vYsGGubP6ajh07pj///NMRwkJDQ3X27Flt375drVu3liStW7dOGRkZCgkJcbR57rnnlJqaqlKlSkmSYmNjVb9+/WxPlwEAAOtxaVL13LlztWTJkizLlyxZonnz5uV4O8nJydq5c6d27twpSTp8+LB27typo0ePKjk5WU899ZS+/fZbHTlyRGvXrtWdd96pOnXqKCwsTJLUsGFD9erVSw8++KC2bt2qTZs2acSIERo0aJCqVKkiSbr33ntlt9sVFRWlPXv2aNGiRZo5c6bTKTEAAGBtLgWiSZMmqVKlSlmWBwQE6JVXXsnxdrZt26aWLVuqZcuWkqRRo0apZcuWGjdunNzd3fXDDz/o73//u+rVq6eoqCi1bt1aX3/9tdPprPnz56tBgwbq3r27evfurQ4dOjjdY8jX11dffPGFDh8+rNatW2v06NEaN24cl9wDAAAHmzHG5PZDXl5e2rt3b5ZJ00eOHFHDhg31119/5Vd9xUJSUpJ8fX2VmJgoHx+foi4HKDRcZXbzYVI1rCQ3v79dGiEKCAjQDz/8kGX5rl27VLFiRVc2CQAAUGRcCkQRERF6/PHHtX79eqWnpys9PV3r1q3TE0884XSpPAAAQEng0lVmL730ko4cOaLu3bvLw+PyJjIyMjR06NBczSECAAAoDlwKRHa7XYsWLdJLL72kXbt2qXTp0mratKmCg4Pzuz4AAIACl6dHd9SrV0/16tXLr1oAAACKhEuBKD09XTExMVq7dq1OnjypjIwMp/Xr1q3Ll+IAAAAKg0uB6IknnlBMTIzCw8PVpEkT2Wy2/K4LAACg0LgUiBYuXKjFixerd+/e+V0PAABAoXN5UnWdOnXyuxYARYibMAKwMpfuQzR69GjNnDlTLtzkGgAAoNhxaYTom2++0fr16/X555+rcePGjqfIZ1q6dGm+FAcAAFAYXApEfn5+6tevX37XAgAAUCRcCkRz587N7zoAAACKjEtziCQpLS1NX375pf7v//5P586dkyQdP35cycnJ+VYcAABAYXBphOjXX39Vr169dPToUaWkpOi2226Tt7e3XnvtNaWkpGj27Nn5XScAIB9kdzXhiogVRVAJULy4NEL0xBNPqE2bNjpz5oxKly7tWN6vXz+tXbs234oDAAAoDC6NEH399dfavHmz7Ha70/IaNWro999/z5fCAAAACotLI0QZGRlKT0/PsvzYsWPy9vbOc1EAAACFyaVA1LNnT82YMcPx3mazKTk5WePHj+dxHgAAoMRx6ZTZ1KlTFRYWpkaNGunixYu69957deDAAVWqVEkLFizI7xoBAAAKlEuBqGrVqtq1a5cWLlyoH374QcnJyYqKilJkZKTTJGsAAICSwKVAJEkeHh4aPHhwftYCAABQJFwKRO+///511w8dOtSlYgAAAIqCS4HoiSeecHqfmpqqCxcuyG63q0yZMgQiAABQorh0ldmZM2ecXsnJydq3b586dOjApGoAAFDiuPwss6vVrVtXr776apbRIwAAgOLO5UnV2W7Mw0PHjx/Pz00CAArY1c8349lmsCKXAtHy5cud3htjFB8frzfffFPt27fPl8IAAAAKi0uBqG/fvk7vbTab/P391a1bN02dOjU/6gIAACg0LgWijIyM/K4DAACgyOTbpGoAAICSyqURolGjRuW47bRp01zpAgAAoNC4FIh27NihHTt2KDU1VfXr15ck7d+/X+7u7mrVqpWjnc1my58qAQAACpBLgahPnz7y9vbWvHnzVL58eUmXb9Z43333qWPHjho9enS+FgkAAFCQXJpDNHXqVE2aNMkRhiSpfPnyevnll7nKDAAAlDguBaKkpCSdOnUqy/JTp07p3LlzeS4KAACgMLl0yqxfv3667777NHXqVLVr106StGXLFj311FPq379/vhYIoGBcfXdiALAylwLR7NmzNWbMGN17771KTU29vCEPD0VFRWnKlCn5WiAAAEBBcykQlSlTRm+//bamTJmiQ4cOSZJq166tsmXL5mtxAAAAhSFPN2aMj49XfHy86tatq7Jly8oYk191AQAAFBqXAtGff/6p7t27q169eurdu7fi4+MlSVFRUVxyDwAAShyXTpk9+eSTKlWqlI4ePaqGDRs6lg8cOFCjRo3i0nsAKMGym3C/ImJFEVQCFB6XAtEXX3yhNWvWqGrVqk7L69atq19//TVfCgMAACgsLp0yO3/+vMqUKZNl+enTp+Xp6ZnnogAAAAqTS4GoY8eOev/99x3vbTabMjIyNHnyZHXt2jXfigMAACgMLp0ymzx5srp3765t27bp0qVLevrpp7Vnzx6dPn1amzZtyu8aAQAACpRLI0RNmjTR/v371aFDB9155506f/68+vfvrx07dqh27dr5XSMAAECByvUIUWpqqnr16qXZs2frueeeK4iaAOQzHtMBANeX6xGiUqVK6YcffiiIWgAAAIqES6fMBg8erDlz5uR3LQAAAEXCpUnVaWlpeu+99/Tll1+qdevWWZ5hNm3atHwpDgAAoDDkKhD98ssvqlGjhnbv3q1WrVpJkvbv3+/Uxmaz5V91AAAAhSBXgahu3bqKj4/X+vXrJV1+VMe///1vBQYGFkhxAAAAhSFXc4iufpr9559/rvPnz+drQQAAAIXNpUnVma4OSAAAACVRrgKRzWbLMkeIOUMAAKCky9UcImOMhg8f7niA68WLF/Xwww9nucps6dKl+VchAABAActVIBo2bJjT+8GDB+drMQAAAEUhV4Fo7ty5BVUHAABAkcnTpGoAAICbQZEGoo0bN6pPnz6qUqWKbDabPvnkE6f1xhiNGzdOlStXVunSpdWjRw8dOHDAqc3p06cVGRkpHx8f+fn5KSoqSsnJyU5tfvjhB3Xs2FFeXl6qVq2aJk+eXNC7BgAASpAiDUTnz59X8+bN9dZbb2W7fvLkyfr3v/+t2bNna8uWLSpbtqzCwsJ08eJFR5vIyEjt2bNHsbGxWrlypTZu3KiHHnrIsT4pKUk9e/ZUcHCwtm/frilTpmjChAl65513Cnz/AOBm0WdBH6cXcLNx6Vlm+eX222/X7bffnu06Y4xmzJih559/Xnfeeack6f3331dgYKA++eQTDRo0SD///LNWr16t7777Tm3atJEkvfHGG+rdu7def/11ValSRfPnz9elS5f03nvvyW63q3Hjxtq5c6emTZvmFJwAAIB1Fds5RIcPH1ZCQoJ69OjhWObr66uQkBDFxcVJkuLi4uTn5+cIQ5LUo0cPubm5acuWLY42nTp1kt1ud7QJCwvTvn37dObMmWz7TklJUVJSktMLAADcvIptIEpISJCkLM9JCwwMdKxLSEhQQECA03oPDw9VqFDBqU1227iyj6tNmjRJvr6+jle1atXyvkMAAKDYKtJTZsXV2LFjNWrUKMf7pKQkQhFKFOZ4AEDuFNsRoqCgIEnSiRMnnJafOHHCsS4oKEgnT550Wp+WlqbTp087tcluG1f2cTVPT0/5+Pg4vQAAwM2r2AaimjVrKigoSGvXrnUsS0pK0pYtWxQaGipJCg0N1dmzZ7V9+3ZHm3Xr1ikjI0MhISGONhs3blRqaqqjTWxsrOrXr6/y5csX0t4AAIDirEgDUXJysnbu3KmdO3dKujyReufOnTp69KhsNptGjhypl19+WcuXL9ePP/6ooUOHqkqVKurbt68kqWHDhurVq5cefPBBbd26VZs2bdKIESM0aNAgValSRZJ07733ym63KyoqSnv27NGiRYs0c+ZMp1NiAADA2op0DtG2bdvUtWtXx/vMkDJs2DDFxMTo6aef1vnz5/XQQw/p7Nmz6tChg1avXi0vLy/HZ+bPn68RI0aoe/fucnNz04ABA/Tvf//bsd7X11dffPGFoqOj1bp1a1WqVEnjxo3jknsAAOBgM8aYoi6iuEtKSpKvr68SExOZT4QSgUnVKGgrIlYUdQnADeXm93exnUMEAABQWAhEAADA8ghEAADA8ghEAADA8ghEAADA8ghEAADA8ghEAADA8ghEAADA8ghEAADA8or00R0AgJIpu7uhc/dqlGSMEAEAAMsjEAEAAMsjEAEAAMsjEAEAAMsjEAEAAMsjEAEAAMvjsnughMvu8mcAQO4wQgQAACyPQAQAACyPQAQAACyPQAQAACyPQAQAACyPQAQAACyPy+6BEobL7AEg/zFCBAAALI9ABAAALI9TZgCAfHH16dwVESuKqBIg9xghAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlsezzIBi7OpnQwEACgYjRAAAwPIYIQIAFIjsRjhXRKwogkqAG2OECAAAWB6BCAAAWB6BCAAAWB5ziAAAhebqeUXMKUJxwQgRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPO5UDRQj2T0dHABQ8BghAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAllesA9GECRNks9mcXg0aNHCsv3jxoqKjo1WxYkWVK1dOAwYM0IkTJ5y2cfToUYWHh6tMmTIKCAjQU089pbS0tMLeFQAAUIwV+8vuGzdurC+//NLx3sPjfyU/+eSTWrVqlZYsWSJfX1+NGDFC/fv316ZNmyRJ6enpCg8PV1BQkDZv3qz4+HgNHTpUpUqV0iuvvFLo+wIAAIqnYh+IPDw8FBQUlGV5YmKi5syZo48++kjdunWTJM2dO1cNGzbUt99+q1tuuUVffPGFfvrpJ3355ZcKDAxUixYt9NJLL+mZZ57RhAkTZLfbC3t3AABAMVSsT5lJ0oEDB1SlShXVqlVLkZGROnr0qCRp+/btSk1NVY8ePRxtGzRooOrVqysuLk6SFBcXp6ZNmyowMNDRJiwsTElJSdqzZ881+0xJSVFSUpLTCwAA3LyK9QhRSEiIYmJiVL9+fcXHx+vFF19Ux44dtXv3biUkJMhut8vPz8/pM4GBgUpISJAkJSQkOIWhzPWZ665l0qRJevHFF/N3ZwAAWWR3d/YVESuKoBJYXbEORLfffrvjv5s1a6aQkBAFBwdr8eLFKl26dIH1O3bsWI0aNcrxPikpSdWqVSuw/gAAQNEq1oHoan5+fqpXr54OHjyo2267TZcuXdLZs2edRolOnDjhmHMUFBSkrVu3Om0j8yq07OYlZfL09JSnp2f+7wBwBZ5bBgDFR7GfQ3Sl5ORkHTp0SJUrV1br1q1VqlQprV271rF+3759Onr0qEJDQyVJoaGh+vHHH3Xy5ElHm9jYWPn4+KhRo0aFXj8AACieivUI0ZgxY9SnTx8FBwfr+PHjGj9+vNzd3RURESFfX19FRUVp1KhRqlChgnx8fPTYY48pNDRUt9xyiySpZ8+eatSokYYMGaLJkycrISFBzz//vKKjoxkBAgAADsU6EB07dkwRERH6888/5e/vrw4dOujbb7+Vv7+/JGn69Olyc3PTgAEDlJKSorCwML399tuOz7u7u2vlypV65JFHFBoaqrJly2rYsGGaOHFiUe0SAAAohmzGGFPURRR3SUlJ8vX1VWJionx8fIq6HNwkmEMEZI+rzJBfcvP7u0TNIQIAACgIBCIAAGB5BCIAAGB5BCIAAGB5xfoqM+BmwiRqIGeu/rfCJGsUBgIRAKBY43lnKAycMgMAAJZHIAIAAJZHIAIAAJZHIAIAAJZHIAIAAJZHIAIAAJbHZfdAAeCeQwBQsjBCBAAALI9ABAAALI9ABAAALI9ABAAALI9ABAAALI9ABAAALI/L7gEAJc7Vt7ZYEbGiiCrBzYIRIgAAYHkEIgAAYHkEIgAAYHnMIQLyAY/qAICSjREiAABgeQQiAABgeQQiAABgeQQiAABgeQQiAABgeQQiAABgeVx2D+QSl9gDwM2HQAQAKPGy+0OF55shNzhlBgAALI9ABAAALI9ABAAALI85RACAm9LV84qYU4TrIRABN8BVZQBw8+OUGQAAsDwCEQAAsDwCEQAAsDzmEAEALIGbN+J6CETAFZhADVgLV6IhE4EIlkYAAgBIzCECAABghAjWwWgQAOBaGCECAACWxwgRAAD/H1eiWRcjRAAAwPIIRAAAwPIIRAAAwPKYQwQAwHVw80ZrIBDhpsVl9gCAnCIQAQCQC1yJdnMiEOGmwGgQACAvCEQokQhAAID8xFVmAADA8hghAgAgj7gSreQjEKHY4/QYgJKGidclD6fMAACA5VlqhOitt97SlClTlJCQoObNm+uNN95Qu3btirosXIURIQA3I06rFW+WCUSLFi3SqFGjNHv2bIWEhGjGjBkKCwvTvn37FBAQUNTlWRbhB4BVcVqteLEZY0xRF1EYQkJC1LZtW7355puSpIyMDFWrVk2PPfaYnn322et+NikpSb6+vkpMTJSPj09hlHvTIgABQP4iRF1bbn5/W2KE6NKlS9q+fbvGjh3rWObm5qYePXooLi6uCCu7uTAcDAAoqSwRiP744w+lp6crMDDQaXlgYKD27t2bpX1KSopSUlIc7xMTEyVdTpolyT1L7smybPHdi3PULj/0mtOrQLYLAPif/Pp/bU5+P7japqhk/t7OyckwSwSi3Jo0aZJefPHFLMurVatWBNXkL98HfIu6BABAMZST3w/51aawnTt3Tr6+16/LEoGoUqVKcnd314kTJ5yWnzhxQkFBQVnajx07VqNGjXK8z8jI0OnTp1WxYkXZbLYCr/dmlZSUpGrVqum3335jLlY+4HjmL45n/uFY5i+Op+uMMTp37pyqVKlyw7aWCER2u12tW7fW2rVr1bdvX0mXQ87atWs1YsSILO09PT3l6enptMzPz68QKrUGHx8f/lHnI45n/uJ45h+OZf7ieLrmRiNDmSwRiCRp1KhRGjZsmNq0aaN27dppxowZOn/+vO67776iLg0AABQxywSigQMH6tSpUxo3bpwSEhLUokULrV69OstEawAAYD2WCUSSNGLEiGxPkaFweHp6avz48VlOR8I1HM/8xfHMPxzL/MXxLByWuTEjAADAtfBwVwAAYHkEIgAAYHkEIgAAYHkEIgAAYHkEIhSo06dPKzIyUj4+PvLz81NUVJSSk5Ov2/6xxx5T/fr1Vbp0aVWvXl2PP/6443lyVpfb4ylJ77zzjrp06SIfHx/ZbDadPXu2cIotht566y3VqFFDXl5eCgkJ0datW6/bfsmSJWrQoIG8vLzUtGlTffbZZ4VUafGXm2O5Z88eDRgwQDVq1JDNZtOMGTMKr9ASIjfH891331XHjh1Vvnx5lS9fXj169Ljhdxk3RiBCgYqMjNSePXsUGxurlStXauPGjXrooYeu2f748eM6fvy4Xn/9de3evVsxMTFavXq1oqKiCrHq4iu3x1OSLly4oF69eumf//xnIVVZPC1atEijRo3S+PHj9f3336t58+YKCwvTyZMns22/efNmRUREKCoqSjt27FDfvn3Vt29f7d69u5ArL35yeywvXLigWrVq6dVXX832cUlWl9vjuWHDBkVERGj9+vWKi4tTtWrV1LNnT/3++++FXPlNxgAF5KeffjKSzHfffedY9vnnnxubzWZ+//33HG9n8eLFxm63m9TU1IIos8TI6/Fcv369kWTOnDlTgFUWX+3atTPR0dGO9+np6aZKlSpm0qRJ2ba/5557THh4uNOykJAQ849//KNA6ywJcnssrxQcHGymT59egNWVPHk5nsYYk5aWZry9vc28efMKqkRLYIQIBSYuLk5+fn5q06aNY1mPHj3k5uamLVu25Hg7iYmJ8vHxkYeHpe4jmkV+HU8runTpkrZv364ePXo4lrm5ualHjx6Ki4vL9jNxcXFO7SUpLCzsmu2twpVjiWvLj+N54cIFpaamqkKFCgVVpiUQiFBgEhISFBAQ4LTMw8NDFSpUUEJCQo628ccff+ill1664WkhK8iP42lVf/zxh9LT07M8qicwMPCaxy4hISFX7a3ClWOJa8uP4/nMM8+oSpUqWQI8codAhFx79tlnZbPZrvvau3dvnvtJSkpSeHi4GjVqpAkTJuS98GKqsI4ngJvPq6++qoULF2rZsmXy8vIq6nJKNGufg4BLRo8ereHDh1+3Ta1atRQUFJRlUmBaWppOnz59w4mV586dU69eveTt7a1ly5apVKlSeS272CqM42l1lSpVkru7u06cOOG0/MSJE9c8dkFBQblqbxWuHEtcW16O5+uvv65XX31VX375pZo1a1aQZVoCgQi55u/vL39//xu2Cw0N1dmzZ7V9+3a1bt1akrRu3TplZGQoJCTkmp9LSkpSWFiYPD09tXz58pv+r56CPp6Q7Ha7WrdurbVr16pv376SpIyMDK1du/aaD3wODQ3V2rVrNXLkSMey2NhYhYaGFkLFxZcrxxLX5urxnDx5sv71r39pzZo1TvMKkQdFPasbN7devXqZli1bmi1btphvvvnG1K1b10RERDjWHzt2zNSvX99s2bLFGGNMYmKiCQkJMU2bNjUHDx408fHxjldaWlpR7UaxkdvjaYwx8fHxZseOHebdd981kszGjRvNjh07zJ9//lkUu1BkFi5caDw9PU1MTIz56aefzEMPPWT8/PxMQkKCMcaYIUOGmGeffdbRftOmTcbDw8O8/vrr5ueffzbjx483pUqVMj/++GNR7UKxkdtjmZKSYnbs2GF27NhhKleubMaMGWN27NhhDhw4UFS7UKzk9ni++uqrxm63m//+979O/488d+5cUe3CTYFAhAL1559/moiICFOuXDnj4+Nj7rvvPqd/tIcPHzaSzPr1640x/7s0PLvX4cOHi2YnipHcHk9jjBk/fny2x3Pu3LmFvwNF7I033jDVq1c3drvdtGvXznz77beOdZ07dzbDhg1zar948WJTr149Y7fbTePGjc2qVasKueLiKzfHMvN7efWrc+fOhV94MZWb4xkcHJzt8Rw/fnzhF34TsRljTOGNRwEAABQ/XGUGAAAsj0AEAAAsj0AEAAAsj0AEAAAsj0AEAAAsj0AEAAAsj0AEAAAsj0AEoEjFxMTIz8+vwPs5cuSIbDabdu7cWeB95dXw4cMdj3EAUDgIRAByJS4uTu7u7goPD8/1Z2vUqKEZM2Y4LRs4cKD279+fT9Vdll2gqFatmuLj49WkSZN87etKjz32mBo2bJjtuqNHj8rd3V3Lly8vsP4BuI5ABCBX5syZo8cee0wbN27U8ePH87y90qVLKyAgIB8quz53d3cFBQXJw6PgnmkdFRWlvXv3avPmzVnWxcTEKCAgQL179y6w/gG4jkAEIMeSk5O1aNEiPfLIIwoPD1dMTEyWNitWrFDbtm3l5eWlSpUqqV+/fpKkLl266Ndff9WTTz4pm80mm80myfmU2f79+2Wz2bR3716nbU6fPl21a9eWJKWnpysqKko1a9ZU6dKlVb9+fc2cOdPRdsKECZo3b54+/fRTRz8bNmzI9pTZV199pXbt2snT01OVK1fWs88+q7S0NMf6Ll266PHHH9fTTz+tChUqKCgoSBMmTLjm8WnRooVatWql9957z2m5MUYxMTEaNmyYbDbbdevPTnYjay1atHCq5ezZs3rggQfk7+8vHx8fdevWTbt27brudgH8D4EIQI4tXrxYDRo0UP369TV48GC99957uvJxiKtWrVK/fv3Uu3dv7dixQ2vXrlW7du0kSUuXLlXVqlU1ceJExcfHKz4+Psv269WrpzZt2mj+/PlOy+fPn697771XkpSRkaGqVatqyZIl+umnnzRu3Dj985//1OLFiyVJY8aM0T333KNevXo5+rn11luz9PX777+rd+/eatu2rXbt2qVZs2Zpzpw5evnll53azZs3T2XLltWWLVs0efJkTZw4UbGxsdc8RlFRUVq8eLHOnz/vWLZhwwYdPnxY999//w3rd9Xdd9+tkydP6vPPP9f27dvVqlUrde/eXadPn87TdgHLKNpnywIoSW699VYzY8YMY4wxqampplKlSmb9+vWO9aGhoSYyMvKanw8ODjbTp093WjZ37lzj6+vreD99+nRTu3Ztx/t9+/YZSebnn3++5najo6PNgAEDHO+HDRtm7rzzTqc2mU9c37FjhzHGmH/+85+mfv36JiMjw9HmrbfeMuXKlTPp6enGmMtPGe/QoYPTdtq2bWueeeaZa9Zy5swZ4+XlZebOnetYNmTIkCzbyU392R235s2bO55u/vXXXxsfHx9z8eJFpza1a9c2//d//3fNfgH8DyNEAHJk37592rp1qyIiIiRJHh4eGjhwoObMmeNos3PnTnXv3j1P/QwaNEhHjhzRt99+K+ny6FCrVq3UoEEDR5u33npLrVu3lr+/v8qVK6d33nlHR48ezVU/P//8s0JDQx2n7iSpffv2Sk5O1rFjxxzLmjVr5vS5ypUr6+TJk9fcrp+fn/r37+84bZaUlKSPP/5YUVFR+Vr/lXbt2qXk5GRVrFhR5cqVc7wOHz6sQ4cOubxdwEoKbnYhgJvKnDlzlJaWpipVqjiWGWPk6empN998U76+vipdunSe+wkKClK3bt300Ucf6ZZbbtFHH32kRx55xLF+4cKFGjNmjKZOnarQ0FB5e3trypQp2rJlS577zk6pUqWc3ttsNmVkZFz3M1FRUerevbsOHjyo9evXy93dXXfffbfL9bu5uTmdmpSk1NRUx38nJyercuXK2rBhQ5bPFsYtDYCbAYEIwA2lpaXp/fff19SpU9WzZ0+ndX379tWCBQv08MMPq1mzZlq7dq3uu+++bLdjt9uVnp5+w/4iIyP19NNPKyIiQr/88osGDRrkWLdp0ybdeuutevTRRx3Lrh4FyUk/DRs21McffyxjjGOUaNOmTfL29lbVqlVvWOP1dO3aVTVr1tTcuXO1fv16DRo0SGXLls1x/Vfz9/d3mnOVlJSkw4cPO963atVKCQkJ8vDwUI0aNfJUO2BVnDIDcEMrV67UmTNnFBUVpSZNmji9BgwY4DhtNn78eC1YsEDjx4/Xzz//rB9//FGvvfaaYzs1atTQxo0b9fvvv+uPP/64Zn/9+/fXuXPn9Mgjj6hr165Oo1J169bVtm3btGbNGu3fv18vvPCCvvvuO6fP16hRQz/88IP27dunP/74w2k0JdOjjz6q3377TY899pj27t2rTz/9VOPHj9eoUaPk5pa3/zXabDbdf//9mjVrluLi4pxOl+Wk/qt169ZNH3zwgb7++mv9+OOPGjZsmNzd3R3re/ToodDQUPXt21dffPGFjhw5os2bN+u5557Ttm3b8rQvgFUQiADc0Jw5c9SjRw/5+vpmWTdgwABt27ZNP/zwg7p06aIlS5Zo+fLlatGihbp166atW7c62k6cOFFHjhxR7dq15e/vf83+vL291adPH+3atUuRkZFO6/7xj3+of//+GjhwoEJCQvTnn386jbZI0oMPPqj69eurTZs28vf316ZNm7L08be//U2fffaZtm7dqubNm+vhhx9WVFSUnn/++dwenmwNHz5ciYmJaty4sUJCQnJV/9XGjh2rzp0764477lB4eLj69u3ruA2BdDmAffbZZ+rUqZPuu+8+1atXT4MGDdKvv/6qwMDAfNkf4GZnM1efmAYAALAYRogAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDl/T9ZY3gWrIUX7gAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Gradient for lstm layer: torch.Size([3, 5, 256])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHHCAYAAACle7JuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABER0lEQVR4nO3deVyVZf7/8fdBFtlJREDFfTeXSUcjdSql1ByzZHJJc+Nrmdi4TpPTotYUmplZodWMYU2ZSmlNY+WWWjnue7mb5QZomiAqi3D9/ujHGY8swhE83Ph6Ph7nUee6t8993wd5c5/rum+bMcYIAADAgtxcXQAAAICzCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDLADTRkyBDVqVPHoc1ms2ny5Mkuqac03aj9WLNmjWw2m9asWWNvu+uuu3TrrbeW+bYl6aeffpLNZtO8efNuyPau9q9//UtNmjSRh4eHgoKCynRbkydPls1mK9NtANeLIIObwpEjRzRq1Cg1atRIPj4+8vHxUbNmzRQbG6tdu3a5urwyN3/+fL322mvFnr9OnTqy2Wyy2Wxyc3NTUFCQWrRooUcffVQbN250WV03Unmsbd++fRoyZIjq16+vf/zjH3rnnXdcXVI+J0+e1OTJk7Vjxw5Xl4KbhLurCwDK2n/+8x/17dtX7u7uGjBggFq1aiU3Nzft27dPixcv1pw5c3TkyBHVrl3bJfVdunRJ7u5l+6M4f/58ff/99xozZkyxl2ndurXGjx8vSTp//rz27t2rxMRE/eMf/9DYsWP16quvOszvzH44U9cf/vAHXbp0SZ6eniXaVkkVVlvt2rV16dIleXh4lOn2C7JmzRrl5uZq1qxZatCgwQ3ffnGcPHlSU6ZMUZ06ddS6dWtXl4ObAEEGFdrhw4fVr18/1a5dW6tWrVJ4eLjD9GnTpmn27Nlycyv64uSFCxfk6+tbJjVWrly5TNZ7vWrUqKGBAwc6tE2bNk0PP/ywZs6cqYYNG+rxxx+3Tyvr/cjIyJCnp6fc3NxcesxsNpvLtn/q1ClJKtWvlC5evCgfH59SWx9wwxmgAnv00UeNJLNhw4ZiLzN48GDj6+trDh06ZLp37278/PxMr169jDHGfPPNN+ZPf/qTiYiIMJ6enqZmzZpmzJgx5uLFi/nWs2TJEtO8eXPj5eVlmjdvbhYvXmwGDx5sateu7TCfJDNp0iSHtuPHj5uhQ4eaatWqGU9PT9OsWTMzd+5ch3lWr15tJJmFCxeav//976ZGjRrGy8vLdO7c2Rw8eNA+35133mkkObyuruFqtWvXNj169Chw2vnz502VKlVMjRo1TG5ubqH7kZaWZkaPHm1q165tPD09TUhIiImKijJbt269Zl15+/bRRx+Zp59+2lSvXt3YbDbz66+/2qetXr3aYR+bN29utmzZYiIjI03lypVNnTp1zJw5cxxqT0hIMJLMkSNHCjyWeessqrYjR44YSSYhIcFhHatWrTIdO3Y0Pj4+JjAw0Nx///1mz549DvNMmjTJSDIHDx40gwcPNoGBgSYgIMAMGTLEXLhwoYgz8ts5ubqmK493fHy8adasmfH09DTh4eFm5MiR5tdff3VYx5XHqVOnTsbb29uMHj260G3m1Xul5cuXmw4dOpjAwEDj6+trGjVqZCZOnOhwHK9+5R2rvO3v3LnT/OEPfzDe3t6mfv36JjEx0RhjzJo1a0y7du1M5cqVTaNGjcyKFSuKPCaAMcZwRQYV2n/+8x81aNBA7du3L9Fyly9fVteuXdWxY0e98sor9r9YExMTdfHiRT3++OMKDg7Wpk2b9MYbb+j48eNKTEy0L798+XJFR0erWbNmiouL05kzZzR06FDVrFnzmttOSUnR7bffLpvNplGjRikkJERffvmlYmJilJaWlu+rjqlTp8rNzU0TJkxQamqqXn75ZQ0YMMDel+Xpp59Wamqqjh8/rpkzZ0qS/Pz8SnQ8ruTn56cHH3xQc+fO1Z49e9S8efMC5xsxYoQ+/vhjjRo1Ss2aNdOZM2f03Xffae/evbrtttuKVdcLL7wgT09PTZgwQZmZmUV+nfTrr7/qvvvuU58+fdS/f38tWrRIjz/+uDw9PTVs2LAS7WNJj9nKlSvVvXt31atXT5MnT9alS5f0xhtvqEOHDtq2bVu+Dt59+vRR3bp1FRcXp23btumf//ynqlWrpmnTphW6jddee03vv/++lixZojlz5sjPz08tW7aU9Fun3ClTpigqKkqPP/649u/frzlz5mjz5s1at26dw9dgZ86cUffu3dWvXz8NHDhQoaGhxT4uP/zwg/74xz+qZcuWev755+Xl5aVDhw5p3bp1kqSmTZvq+eef13PPPadHH31UnTp1kiTdcccd9nX8+uuv+uMf/6h+/frpoYce0pw5c9SvXz99+OGHGjNmjEaMGKGHH35Y06dP15/+9CcdO3ZM/v7+xa4RNyFXJymgrKSmphpJ5oEHHsg37ddffzWnT5+2v668ojJ48GAjyTz11FP5livoyktcXJyx2Wzm559/tre1bt3ahIeHm3Pnztnbli9fXuDVEF31l3VMTIwJDw83v/zyi8N8/fr1M4GBgfYa8v76bdq0qcnMzLTPN2vWLCPJ7N69297Wo0ePa16FuVJRV2SMMWbmzJlGkvnss88K3Y/AwEATGxtb5HYKqytv3+rVq5fvmBd2RUaSmTFjhr0tMzPTtG7d2lSrVs1kZWUZY4p/Raao2gq6IpO3nTNnztjbdu7cadzc3MygQYPsbXlXOIYNG+awzgcffNAEBwfn29bV8pY/ffq0ve3UqVPG09PT3HvvvSYnJ8fe/uabbxpJ5t1337W35R2nt95665rbunJ7efLO+5Xbv9rmzZsLvGJ15fbnz59vb9u3b5+RZNzc3ByunC5btqzQ9QBXYtQSKqy0tDRJBf8lfddddykkJMT+io+PzzfPlf0/8nh7e9v//8KFC/rll190xx13yBij7du3S5KSkpK0Y8cODR48WIGBgfb577nnHjVr1qzImo0x+uSTT9SzZ08ZY/TLL7/YX127dlVqaqq2bdvmsMzQoUMdrlTk/RX8448/Frmt65F3TM+fP1/oPEFBQdq4caNOnjzp9HYGDx7scMyL4u7urscee8z+3tPTU4899phOnTqlrVu3Ol3DteSd7yFDhqhKlSr29pYtW+qee+7RF198kW+ZESNGOLzv1KmTzpw5Y//MlsTKlSuVlZWlMWPGOPT1Gj58uAICArR06VKH+b28vDR06NASb0f6X9+czz77TLm5uU6tw8/PT/369bO/b9y4sYKCgtS0aVOHK6d5/1+Wn2NUDAQZVFh5l6PT09PzTXv77be1YsUKffDBBwUu6+7uXuDXQEePHrX/wvLz81NISIjuvPNOSVJqaqok6eeff5YkNWzYMN/yjRs3LrLm06dP69y5c3rnnXccglZISIj9l09eh888tWrVcnh/yy23SPrtEn5ZyTumRV3yf/nll/X9998rIiJC7dq10+TJk0v8S6lu3brFnrd69er5OmQ3atRI0m/3fikreee7oHPbtGlT/fLLL7pw4YJDe2mes8K27+npqXr16tmn56lRo4bTI7769u2rDh066P/+7/8UGhqqfv36adGiRSUKNTVr1sx3b5rAwEBFRETka5PK9nOMioE+MqiwAgMDFR4eru+//z7ftLy/9gr7Befl5ZVvJFNOTo7uuecenT17Vn/961/VpEkT+fr66sSJExoyZIjTf6FeKW8dAwcO1ODBgwucJ69fRJ5KlSoVOJ8x5rrrKUzeMS1qCHCfPn3UqVMnLVmyRMuXL9f06dM1bdo0LV68WN27dy/Wdop7Naa4Cru5W05OTqlu51pccc7yXM8x9fb21jfffKPVq1dr6dKl+uqrr7Rw4UJ17txZy5cvL3S/rlTYPK48JrA2ggwqtB49euif//ynNm3apHbt2l3Xunbv3q0DBw7ovffe06BBg+ztK1ascJgv7340Bw8ezLeO/fv3F7mNkJAQ+fv7KycnR1FRUddV75VK8+6s6enpWrJkiSIiItS0adMi5w0PD9fIkSM1cuRInTp1SrfddptefPFFe5ApzbpOnjyZb5j8gQMHJMne2Tbvyse5c+cclr36qkVJass73wWd23379qlq1aplNnT/6u3Xq1fP3p6VlaUjR46U6udIktzc3NSlSxd16dJFr776ql566SU9/fTTWr16taKiorgTMG44vlpChfbkk0/Kx8dHw4YNU0pKSr7pJflrL+8vxiuXMcZo1qxZDvOFh4erdevWeu+99+xfN0m/BZ49e/ZccxvR0dH65JNPCrySdPr06WLXeyVfX1+HWpx16dIlPfLIIzp79qyefvrpIq9wXL29atWqqXr16srMzCz1uqTfRpq9/fbb9vdZWVl6++23FRISojZt2kiS6tevL0n65ptvHGot6A65xa3tyvN9ZUD6/vvvtXz5ct13333O7lKxREVFydPTU6+//rrDZ3Pu3LlKTU1Vjx49Sm1bZ8+ezdeWd9O7vPOaF9quDotAWeGKDCq0hg0bav78+erfv78aN25sv7OvMUZHjhzR/Pnz5ebmVqxh0U2aNFH9+vU1YcIEnThxQgEBAfrkk08K/A4/Li5OPXr0UMeOHTVs2DCdPXtWb7zxhpo3b15gn50rTZ06VatXr1b79u01fPhwNWvWTGfPntW2bdu0cuXKAn+ZXEubNm20cOFCjRs3Tr///e/l5+ennj17FrnMiRMn7H2I0tPTtWfPHiUmJio5OVnjx4936Fh7tfPnz6tmzZr605/+pFatWsnPz08rV67U5s2bNWPGjOuqqzDVq1fXtGnT9NNPP6lRo0ZauHChduzYoXfeecc+/Lh58+a6/fbbNXHiRJ09e1ZVqlTRggULdPny5XzrK0lt06dPV/fu3RUZGamYmBj78OvAwMAyf/5USEiIJk6cqClTpqhbt266//77tX//fs2ePVu///3v893U8Ho8//zz+uabb9SjRw/Vrl1bp06d0uzZs1WzZk117NhR0m9hMSgoSG+99Zb8/f3l6+ur9u3bl6i/E1AiLhotBdxQhw4dMo8//rhp0KCBqVy5svH29jZNmjQxI0aMMDt27HCYN++GeAXZs2ePiYqKMn5+fqZq1apm+PDhZufOnQUOE/3kk09M06ZNjZeXl2nWrFmJboiXkpJiYmNjTUREhPHw8DBhYWGmS5cu5p133rHPkzdkOO9mYnkKGh6cnp5uHn74YRMUFFTsG+Lp/9/MzGazmYCAANO8eXMzfPhws3HjxgKXuXI/MjMzzV/+8hfTqlUr4+/vb3x9fU2rVq3M7NmzHZYprK7C9u3Kade6IV7t2rXNm2++mW/5w4cPm6ioKOPl5WVCQ0PN3/72N7NixYp86yystsJuiLdy5UrToUMH4+3tbQICAkzPnj0LvSHe1cOXCxsWfrXCljfmt+HWTZo0MR4eHiY0NNQ8/vjjhd4Qr7iuHn69atUq06tXL1O9enXj6elpqlevbvr3728OHDjgsNxnn31mmjVrZtzd3Qu8Id7VChvuL+maQ/gBmzH0pAIAANZEHxkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZFf6GeLm5uTp58qT8/f25dTYAABZhjNH58+dVvXr1fM++u1KFDzInT57M91RVAABgDceOHSvy7usVPsj4+/tL+u1ABAQEuLgaAABQHGlpaYqIiLD/Hi9MhQ8yeV8nBQQEEGQAALCYa3ULobMvAACwLIIMAACwLIIMAACwrArfRwaoaHJycpSdne3qMlBOeXh4qFKlSq4uA7hhCDKARRhjlJycrHPnzrm6FJRzQUFBCgsL495ZuCkQZACLyAsx1apVk4+PD7+kkI8xRhcvXtSpU6ckSeHh4S6uCCh7BBnAAnJycuwhJjg42NXloBzz9vaWJJ06dUrVqlXjayZUeHT2BSwgr0+Mj4+PiyuBFeR9TuhLhZsBQQawEL5OQnHwOcHNhCADAAAsiyADAMU0efJktW7d2v5+yJAheuCBB1xWDwA6+wKWt3t3zxu2rRYtPi/R/EOGDNF7772Xr/3gwYNq0KCBpN9GY7344otaunSpTpw4oWrVqql169YaM2aMunTpUui609LSNH36dC1evFg//vijfHx8VK9ePT300EMaPny4brnllpLtnBNmzZolY0yprnPy5Mn69NNPtWPHjlJdL1BREWQAlKlu3bopISHBoS0kJESS9NNPP6lDhw4KCgrS9OnT1aJFC2VnZ2vZsmWKjY3Vvn37Clzn2bNn1bFjR6WlpemFF15QmzZtFBgYqP379yshIUHz589XbGxsgctmZWXJ09OzVPYtMDCwVNYDwHl8tQSgTHl5eSksLMzhlTckeOTIkbLZbNq0aZOio6PVqFEjNW/eXOPGjdOGDRsKXeff/vY3HT16VJs2bdLQoUPVsmVL1a5dW/fee68++ugjjRw50j5vnTp19MILL2jQoEEKCAjQo48+Kkn661//qkaNGtmv5Dz77LP5RvlMnTpVoaGh8vf3V0xMjDIyMhymX/3VUm5uruLi4lS3bl15e3urVatW+vjjj+3T16xZI5vNplWrVqlt27by8fHRHXfcof3790uS5s2bpylTpmjnzp2y2Wyy2WyaN2+ejDGaPHmyatWqJS8vL1WvXl1//vOfnTshQAVDkAHgEmfPntVXX32l2NhY+fr65pseFBRU4HK5ublauHChBg4cqOrVqxc4z9Wjdl555RW1atVK27dv17PPPitJ8vf317x587Rnzx7NmjVL//jHPzRz5kz7MosWLdLkyZP10ksvacuWLQoPD9fs2bOL3Ke4uDi9//77euutt/TDDz9o7NixGjhwoNauXesw39NPP60ZM2Zoy5Ytcnd317BhwyRJffv21fjx49W8eXMlJSUpKSlJffv21SeffKKZM2fq7bff1sGDB/Xpp5+qRYsWRdYC3Cz4agnFUlA/jJL2l8DN6T//+Y/8/Pzs77t3767ExEQdOnRIxhg1adKkROs7ffq0zp07p8aNGzu0t2nTxn5lo2fPnvroo4/s0zp37qzx48c7zP/MM8/Y/79OnTqaMGGCFixYoCeffFKS9NprrykmJkYxMTGSpL///e9auXJlvqsyeTIzM/XSSy9p5cqVioyMlCTVq1dP3333nd5++23deeed9nlffPFF+/unnnpKPXr0UEZGhry9veXn5yd3d3eFhYXZ5z969KjCwsIUFRUlDw8P1apVS+3atSvRcUPpuvrfRP49dB2CDIAydffdd2vOnDn293lXX0q7k+ySJUuUlZWlv/71r7p06ZLDtLZt2+abf+HChXr99dd1+PBhpaen6/LlywoICLBP37t3r0aMGOGwTGRkpFavXl3g9g8dOqSLFy/qnnvucWjPysrS7373O4e2li1b2v8/7zECp06dUq1atQpc90MPPaTXXntN9erVU7du3XTfffepZ8+ecnfnn3CAnwIAZcrX19c+QulKDRs2lM1mK7RDb2FCQkIUFBRkv/qSJy8E+Pv753uw5tVfXa1fv14DBgzQlClT1LVrVwUGBmrBggWaMWNGiWq5Unp6uiRp6dKlqlGjhsM0Ly8vh/ceHh72/8/7Giw3N7fQdUdERGj//v1auXKlVqxYoZEjR2r69Olau3atw7qAmxF9ZAC4RJUqVdS1a1fFx8frwoUL+aYX9pRvNzc39enTRx988IFOnjzp1Lb/+9//qnbt2nr66afVtm1bNWzYUD///LPDPE2bNtXGjRsd2orqgNysWTN5eXnp6NGjatCggcMrIiKi2LV5enoqJycnX7u3t7d69uyp119/XWvWrNH69eu1e/fuYq8XqKi4IgPAZeLj49WhQwe1a9dOzz//vFq2bKnLly9rxYoVmjNnjvbu3Vvgci+99JLWrFljX65t27by9fXVrl27tH79et16661Fbrdhw4Y6evSoFixYoN///vdaunSplixZ4jDP6NGjNWTIELVt21YdOnTQhx9+qB9++EH16tUrcJ3+/v6aMGGCxo4dq9zcXHXs2FGpqalat26dAgICNHjw4GIdkzp16ujIkSPasWOHatasKX9/f3300UfKyclR+/bt5ePjow8++EDe3t6qXbt2sdYJVGQEGQAuU69ePW3btk0vvviixo8fr6SkJIWEhKhNmzYO/WquFhwcrE2bNmnatGmaPn26jhw5Ijc3NzVs2FB9+/bVmDFjitzu/fffr7Fjx2rUqFHKzMxUjx499Oyzz2ry5Mn2efr27avDhw/rySefVEZGhqKjo/X4449r2bJlha73hRdeUEhIiOLi4vTjjz8qKChIt912m/72t78V+5hER0dr8eLFuvvuu3Xu3DklJCQoKChIU6dO1bhx45STk6MWLVro888/50nogCSbKe0ed+VMWlqaAgMDlZqa6tCRDyVTUUYtWXWkQUZGho4cOaK6deuqcuXKri4H5Ryfl7Jn1X9LrKS4v7/pIwMAACyLIAMAACyLIAMAACyLIAMAACyLIANYSAXvm49SwucENxOCDGABeXdvvXjxoosrgRXkfU646y9uBtxHBrhBrmcIe6VKlRQUFKRTp05Jknx8fPI94RkwxujixYs6deqUgoKCVKlSJVeXBJQ5ggxgEXlPQ84LM0BhgoKCHJ6eDVRkBBnAImw2m8LDw1WtWjVlZ2e7uhyUUx4eHlyJwU2FIANYTKVKlfhFBQD/H519AQCAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZbk0yEyePFk2m83h1aRJE/v0jIwMxcbGKjg4WH5+foqOjlZKSooLKwYAAOWJy6/ING/eXElJSfbXd999Z582duxYff7550pMTNTatWt18uRJ9e7d24XVAgCA8sTl95Fxd3cv8A6Uqampmjt3rubPn6/OnTtLkhISEtS0aVNt2LBBt99++40uFQAAlDMuvyJz8OBBVa9eXfXq1dOAAQN09OhRSdLWrVuVnZ2tqKgo+7xNmjRRrVq1tH79+kLXl5mZqbS0NIcXAAComFx6RaZ9+/aaN2+eGjdurKSkJE2ZMkWdOnXS999/r+TkZHl6eiooKMhhmdDQUCUnJxe6zri4OE2ZMqWMKy+/rn4wYXEfSgjc7Irzs1NRf76u54Gm5VlF3S84cmmQ6d69u/3/W7Zsqfbt26t27dpatGiRvL29nVrnxIkTNW7cOPv7tLQ0RUREXHetAACg/HH5V0tXCgoKUqNGjXTo0CGFhYUpKytL586dc5gnJSWlyKe6enl5KSAgwOEFAAAqpnIVZNLT03X48GGFh4erTZs28vDw0KpVq+zT9+/fr6NHjyoyMtKFVQIAgPLCpV8tTZgwQT179lTt2rV18uRJTZo0SZUqVVL//v0VGBiomJgYjRs3TlWqVFFAQICeeOIJRUZGMmIJAABIcnGQOX78uPr3768zZ84oJCREHTt21IYNGxQSEiJJmjlzptzc3BQdHa3MzEx17dpVs2fPdmXJAACgHHFpkFmwYEGR0ytXrqz4+HjFx8ffoIoAACjfGI3lqFz1kQEAACgJggwAALAsggwAALAsggwAALAsggwAALAsggwAALAslw6/BqSK+yA+wMpc+XPJ8GKUBFdkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZfHQSMAJPOiy/OBcOOJ44GbDFRkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZjFpCqbl6tITEiInS4MwoFM4FgJsFV2QAAIBlEWQAAIBlEWQAAIBlEWQAAIBlEWQAAIBlEWQAAIBlMfy6nGL4LICKoKB/y67Gv224HlyRAQAAlkWQAQAAlkWQAQAAlkWQAQAAlkWQAQAAlsWoJQClzpkHXZbVtm/09lFyxRnZVBHw2SwbXJEBAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWxfBrC7tZhiwWlyuH/FpBWR2f0vocVtShqRV1v4DygisyAADAsggyAADAsggyAADAsggyAADAsggyAADAshi1BEZVVACMYCsZjlfZ4LjCFbgiAwAALIsgAwAALKvcBJmpU6fKZrNpzJgx9raMjAzFxsYqODhYfn5+io6OVkpKiuuKBAAA5Uq5CDKbN2/W22+/rZYtWzq0jx07Vp9//rkSExO1du1anTx5Ur1793ZRlQAAoLxxeZBJT0/XgAED9I9//EO33HKLvT01NVVz587Vq6++qs6dO6tNmzZKSEjQf//7X23YsMGFFQMAgPLC5UEmNjZWPXr0UFRUlEP71q1blZ2d7dDepEkT1apVS+vXry90fZmZmUpLS3N4AQCAismlw68XLFigbdu2afPmzfmmJScny9PTU0FBQQ7toaGhSk5OLnSdcXFxmjJlSmmXetNhGGXF5+ywe1d+Nvhcoih8PkqfFW7P4bIrMseOHdPo0aP14YcfqnLlyqW23okTJyo1NdX+OnbsWKmtGwAAlC8uCzJbt27VqVOndNttt8nd3V3u7u5au3atXn/9dbm7uys0NFRZWVk6d+6cw3IpKSkKCwsrdL1eXl4KCAhweAEAgIrJZV8tdenSRbt373ZoGzp0qJo0aaK//vWvioiIkIeHh1atWqXo6GhJ0v79+3X06FFFRka6omQAAFDOuCzI+Pv769Zbb3Vo8/X1VXBwsL09JiZG48aNU5UqVRQQEKAnnnhCkZGRuv32211RMgAAKGfK9bOWZs6cKTc3N0VHRyszM1Ndu3bV7NmzXV0WAAAoJ8pVkFmzZo3D+8qVKys+Pl7x8fGuKQgAAJRr5SrI3CyuHs5W3oayAaXNCkM4rcjVw41dvX1AKgc3xAMAAHAWQQYAAFgWQQYAAFgWQQYAAFgWQQYAAFgWo5ZQpm7mEVqM6Lg5cd5xPUrrYa7OPgDWiv9Gc0UGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFsOv4TRXDjOtKENcb+bh6UBJlNbPihV/5irKv3dlhSsyAADAsggyAADAsggyAADAsggyAADAsggyAADAshi1hBuK3vfXj2MIAP/DFRkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZDL8uY8UZKlvc4bQMu634OMc3r+I8zJDPx41hxQdLFkdF/fxwRQYAAFgWQQYAAFgWQQYAAFgWQQYAAFgWQQYAAFgWo5ZQ7hTUs/5GjhqoqCMWiuNm3ndnWGEUiKt/nlA0K3yGyjuuyAAAAMsiyAAAAMsiyAAAAMtyKsj8+OOPpV0HAABAiTkVZBo0aKC7775bH3zwgTIyMkq7JgAAgGJxKshs27ZNLVu21Lhx4xQWFqbHHntMmzZtKu3aAAAAiuTU8OvWrVtr1qxZmjFjhv79739r3rx56tixoxo1aqRhw4bpkUceUUhISGnXCpQIwxqvX1kew5v5/JTmw2RvVhyfsmHF43pdnX3d3d3Vu3dvJSYmatq0aTp06JAmTJigiIgIDRo0SElJSaVVJwAAQD7XFWS2bNmikSNHKjw8XK+++qomTJigw4cPa8WKFTp58qR69epVWnUCAADk49RXS6+++qoSEhK0f/9+3XfffXr//fd13333yc3tt1xUt25dzZs3T3Xq1CnNWgEAABw4FWTmzJmjYcOGaciQIQoPDy9wnmrVqmnu3LnXVRwAAEBRnAoyBw8evOY8np6eGjx4sDOrBwAAKBangkxCQoL8/Pz00EMPObQnJibq4sWLN3WAsWKPbwA3F/6d+h9GkFmfU5194+LiVLVq1Xzt1apV00svvXTdRQEAABSHU0Hm6NGjqlu3br722rVr6+jRo9ddFAAAQHE4FWSqVaumXbt25WvfuXOngoODr7soAACA4nAqyPTv319//vOftXr1auXk5CgnJ0dff/21Ro8erX79+pV2jQAAAAVyqrPvCy+8oJ9++kldunSRu/tvq8jNzdWgQYPoIwMAAG4Yp4KMp6enFi5cqBdeeEE7d+6Ut7e3WrRoodq1a5d2fQAAAIVyKsjkadSokRo1alRatQA3HEMvURjOO0qivH1eyls9ZcmpIJOTk6N58+Zp1apVOnXqlHJzcx2mf/3116VSHAAAQFGc6uw7evRojR49Wjk5Obr11lvVqlUrh1dxzZkzRy1btlRAQIACAgIUGRmpL7/80j49IyNDsbGxCg4Olp+fn6Kjo5WSkuJMyQAAoAJy6orMggULtGjRIt13333XtfGaNWtq6tSpatiwoYwxeu+999SrVy9t375dzZs319ixY7V06VIlJiYqMDBQo0aNUu/evbVu3brr2i4AAKgYnO7s26BBg+veeM+ejt/hvfjii5ozZ442bNigmjVrau7cuZo/f746d+4s6bdHIzRt2lQbNmzQ7bffft3bBwAA1ubUV0vjx4/XrFmzZIwptUJycnK0YMECXbhwQZGRkdq6dauys7MVFRVln6dJkyaqVauW1q9fX+h6MjMzlZaW5vACAAAVk1NXZL777jutXr1aX375pZo3by4PDw+H6YsXLy72unbv3q3IyEhlZGTIz89PS5YsUbNmzbRjxw55enoqKCjIYf7Q0FAlJycXur64uDhNmTKlRPtTkRXUc71Fi89dUMn1uZl64ON/OO8ArsWpIBMUFKQHH3ywVApo3LixduzYodTUVH388ccaPHiw1q5d6/T6Jk6cqHHjxtnfp6WlKSIiojRKBQAA5YxTQSYhIaHUCriyv02bNm20efNmzZo1S3379lVWVpbOnTvncFUmJSVFYWFhha7Py8tLXl5epVYfAAAov5zqIyNJly9f1sqVK/X222/r/PnzkqSTJ08qPT39ugrKzc1VZmam2rRpIw8PD61atco+bf/+/Tp69KgiIyOvaxsAAKBicOqKzM8//6xu3brp6NGjyszM1D333CN/f39NmzZNmZmZeuutt4q1nokTJ6p79+6qVauWzp8/r/nz52vNmjVatmyZAgMDFRMTo3HjxqlKlSoKCAjQE088ocjISEYsAQAASU4GmdGjR6tt27bauXOngoOD7e0PPvighg8fXuz1nDp1SoMGDVJSUpICAwPVsmVLLVu2TPfcc48kaebMmXJzc1N0dLQyMzPVtWtXzZ4925mSAQBABeRUkPn222/13//+V56eng7tderU0YkTJ4q9nrlz5xY5vXLlyoqPj1d8fLwzZQIAgArOqSCTm5urnJycfO3Hjx+Xv7//dReFssWQVgAlxb8bKK+c6ux777336rXXXrO/t9lsSk9P16RJk677sQUAAADF5dQVmRkzZqhr165q1qyZMjIy9PDDD+vgwYOqWrWqPvroo9KuEQAAoEBOBZmaNWtq586dWrBggXbt2qX09HTFxMRowIAB8vb2Lu0aAQAACuRUkJEkd3d3DRw4sDRrAQAAKBGngsz7779f5PRBgwY5VQwAAEBJOH0fmStlZ2fr4sWL8vT0lI+PD0EGAADcEE6NWvr1118dXunp6dq/f786duxIZ18AAHDDOP2spas1bNhQU6dOzXe1BgAAoKyUWpCRfusAfPLkydJcJQAAQKGc6iPz73//2+G9MUZJSUl688031aFDh1IpDAAA4FqcCjIPPPCAw3ubzaaQkBB17txZM2bMKI26AAAArsnpZy0BAAC4mtM3xAMAADefqx8g2qLF5y6q5DdOBZlx48YVe95XX33VmU0AAABck1NBZvv27dq+fbuys7PVuHFjSdKBAwdUqVIl3Xbbbfb5bDZb6VQJAABQAKeCTM+ePeXv76/33ntPt9xyi6TfbpI3dOhQderUSePHjy/VIgEAAAri1H1kZsyYobi4OHuIkaRbbrlFf//73xm1BAAAbhingkxaWppOnz6dr/306dM6f/78dRcFAABQHE59tfTggw9q6NChmjFjhtq1aydJ2rhxo/7yl7+od+/epVogYAVX9+JH+cL5ASoup4LMW2+9pQkTJujhhx9Wdnb2bytyd1dMTIymT59eqgUCAAAUxqkg4+Pjo9mzZ2v69Ok6fPiwJKl+/fry9fUt1eIAAACKcl0PjUxKSlJSUpIaNmwoX19fGWNKqy4AAIBrcirInDlzRl26dFGjRo103333KSkpSZIUExPD0GsAAHDDOBVkxo4dKw8PDx09elQ+Pj729r59++qrr74qteIAAACK4lQfmeXLl2vZsmWqWbOmQ3vDhg31888/l0phAAAA1+LUFZkLFy44XInJc/bsWXl5eV13UQAAAMXhVJDp1KmT3n//fft7m82m3Nxcvfzyy7r77rtLrTgAAICiOPXV0ssvv6wuXbpoy5YtysrK0pNPPqkffvhBZ8+e1bp160q7RgAAgAI5dUXm1ltv1YEDB9SxY0f16tVLFy5cUO/evbV9+3bVr1+/tGsEAAAoUImvyGRnZ6tbt25666239PTTT5dFTQAAAMVS4isyHh4e2rVrV1nUAgAAUCJOfbU0cOBAzZ07t7RrAQAAKBGnOvtevnxZ7777rlauXKk2bdrke8bSq6++WirFAQAAFKVEQebHH39UnTp19P333+u2226TJB04cMBhHpvNVnrVAQAAFKFEQaZhw4ZKSkrS6tWrJf32SILXX39doaGhZVIcAABAUUrUR+bqp1t/+eWXunDhQqkWBAAAUFxOdfbNc3WwAQAAuJFKFGRsNlu+PjD0iQEAAK5Soj4yxhgNGTLE/mDIjIwMjRgxIt+opcWLF5dehQAAAIUoUZAZPHiww/uBAweWajEAAAAlUaIgk5CQUFZ1AAAAlNh1dfYFAABwJYIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLJcGmbi4OP3+97+Xv7+/qlWrpgceeED79+93mCcjI0OxsbEKDg6Wn5+foqOjlZKS4qKKAQBAeeLSILN27VrFxsZqw4YNWrFihbKzs3XvvffqwoUL9nnGjh2rzz//XImJiVq7dq1Onjyp3r17u7BqAABQXtiMMcbVReQ5ffq0qlWrprVr1+oPf/iDUlNTFRISovnz5+tPf/qTJGnfvn1q2rSp1q9fr9tvv/2a60xLS1NgYKBSU1MVEBBQ1rug3bt7lvk2AAAoL1q0+LxM1lvc39/lqo9MamqqJKlKlSqSpK1btyo7O1tRUVH2eZo0aaJatWpp/fr1Ba4jMzNTaWlpDi8AAFAxlZsgk5ubqzFjxqhDhw669dZbJUnJycny9PRUUFCQw7yhoaFKTk4ucD1xcXEKDAy0vyIiIsq6dAAA4CLlJsjExsbq+++/14IFC65rPRMnTlRqaqr9dezYsVKqEAAAlDfuri5AkkaNGqX//Oc/+uabb1SzZk17e1hYmLKysnTu3DmHqzIpKSkKCwsrcF1eXl7y8vIq65IBAEA54NIrMsYYjRo1SkuWLNHXX3+tunXrOkxv06aNPDw8tGrVKnvb/v37dfToUUVGRt7ocgEAQDnj0isysbGxmj9/vj777DP5+/vb+70EBgbK29tbgYGBiomJ0bhx41SlShUFBAToiSeeUGRkZLFGLAEAgIrNpUFmzpw5kqS77rrLoT0hIUFDhgyRJM2cOVNubm6Kjo5WZmamunbtqtmzZ9/gSgEAQHnk0iBTnFvYVK5cWfHx8YqPj78BFQEAACspN6OWAAAASoogAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALMulT7+2ut27e7q6BAAAbmpckQEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJbl0iDzzTffqGfPnqpevbpsNps+/fRTh+nGGD333HMKDw+Xt7e3oqKidPDgQdcUCwAAyh2XBpkLFy6oVatWio+PL3D6yy+/rNdff11vvfWWNm7cKF9fX3Xt2lUZGRk3uFIAAFAeubty4927d1f37t0LnGaM0WuvvaZnnnlGvXr1kiS9//77Cg0N1aeffqp+/frdyFIBAEA5VG77yBw5ckTJycmKioqytwUGBqp9+/Zav369CysDAADlhUuvyBQlOTlZkhQaGurQHhoaap9WkMzMTGVmZtrfp6WllU2BAADA5crtFRlnxcXFKTAw0P6KiIhwdUkAAKCMlNsgExYWJklKSUlxaE9JSbFPK8jEiROVmppqfx07dqxM6wQAAK5TboNM3bp1FRYWplWrVtnb0tLStHHjRkVGRha6nJeXlwICAhxeAACgYnJpH5n09HQdOnTI/v7IkSPasWOHqlSpolq1amnMmDH6+9//roYNG6pu3bp69tlnVb16dT3wwAOuKxoAAJQbLg0yW7Zs0d13321/P27cOEnS4MGDNW/ePD355JO6cOGCHn30UZ07d04dO3bUV199pcqVK7uqZAAAUI7YjDHG1UWUpbS0NAUGBio1NbXUv2bavbtnqa4PAACradHi8zJZb3F/f5fbPjIAAADXQpABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWZYkgEx8frzp16qhy5cpq3769Nm3a5OqSAABAOVDug8zChQs1btw4TZo0Sdu2bVOrVq3UtWtXnTp1ytWlAQAAFyv3QebVV1/V8OHDNXToUDVr1kxvvfWWfHx89O6777q6NAAA4GLlOshkZWVp69atioqKsre5ubkpKipK69evd2FlAACgPHB3dQFF+eWXX5STk6PQ0FCH9tDQUO3bt6/AZTIzM5WZmWl/n5qaKklKS0sr9frS07NLfZ0AAFhJWfx+vXK9xpgi5yvXQcYZcXFxmjJlSr72iIgIF1QDAEBFF1imaz9//rwCAwvfRrkOMlWrVlWlSpWUkpLi0J6SkqKwsLACl5k4caLGjRtnf5+bm6uzZ88qODhYNputTOstSlpamiIiInTs2DEFBAS4rA5cG+fKOjhX1sG5so7ycq6MMTp//ryqV69e5HzlOsh4enqqTZs2WrVqlR544AFJvwWTVatWadSoUQUu4+XlJS8vL4e2oKCgMq60+AICAvghtgjOlXVwrqyDc2Ud5eFcFXUlJk+5DjKSNG7cOA0ePFht27ZVu3bt9Nprr+nChQsaOnSoq0sDAAAuVu6DTN++fXX69Gk999xzSk5OVuvWrfXVV1/l6wAMAABuPuU+yEjSqFGjCv0qySq8vLw0adKkfF97ofzhXFkH58o6OFfWYbVzZTPXGtcEAABQTpXrG+IBAAAUhSADAAAsiyADAAAsiyADAAAsiyDjhLNnz2rAgAEKCAhQUFCQYmJilJ6eXuQyGRkZio2NVXBwsPz8/BQdHZ3vjsVHjx5Vjx495OPjo2rVqukvf/mLLl++XOD61q1bJ3d3d7Vu3bq0dqvCctX5Wrx4se655x6FhIQoICBAkZGRWrZsWZnso1XFx8erTp06qly5stq3b69NmzYVOX9iYqKaNGmiypUrq0WLFvriiy8cphtj9Nxzzyk8PFze3t6KiorSwYMHHeZx5vOAG3+ufvrpJ8XExKhu3bry9vZW/fr1NWnSJGVlZZXJ/lUkrvi5ypOZmanWrVvLZrNpx44dpbVLRTMosW7duplWrVqZDRs2mG+//dY0aNDA9O/fv8hlRowYYSIiIsyqVavMli1bzO23327uuOMO+/TLly+bW2+91URFRZnt27ebL774wlStWtVMnDgx37p+/fVXU69ePXPvvfeaVq1alfbuVTiuOl+jR48206ZNM5s2bTIHDhwwEydONB4eHmbbtm1ltq9WsmDBAuPp6Wneffdd88MPP5jhw4eboKAgk5KSUuD869atM5UqVTIvv/yy2bNnj3nmmWeMh4eH2b17t32eqVOnmsDAQPPpp5+anTt3mvvvv9/UrVvXXLp0yT6PM5+Hm50rztWXX35phgwZYpYtW2YOHz5sPvvsM1OtWjUzfvz4G7LPVuWqn6s8f/7zn0337t2NJLN9+/ay2k0HBJkS2rNnj5FkNm/ebG/78ssvjc1mMydOnChwmXPnzhkPDw+TmJhob9u7d6+RZNavX2+MMeaLL74wbm5uJjk52T7PnDlzTEBAgMnMzHRYX9++fc0zzzxjJk2aRJC5hvJwvq7UrFkzM2XKlOvdrQqhXbt2JjY21v4+JyfHVK9e3cTFxRU4f58+fUyPHj0c2tq3b28ee+wxY4wxubm5JiwszEyfPt0+/dy5c8bLy8t89NFHxhjnPg9wzbkqyMsvv2zq1q17PbtS4bnyXH3xxRemSZMm5ocffrihQYavlkpo/fr1CgoKUtu2be1tUVFRcnNz08aNGwtcZuvWrcrOzlZUVJS9rUmTJqpVq5bWr19vX2+LFi0c7ljctWtXpaWl6YcffrC3JSQk6Mcff9SkSZNKe9cqJFefryvl5ubq/PnzqlKlSmnsmqVlZWVp69atDsfYzc1NUVFR9mN8tfXr1zvML/12zPPmP3LkiJKTkx3mCQwMVPv27R3OW0k/Dzc7V52rgqSmpvLzUwRXnquUlBQNHz5c//rXv+Tj41Oau3VNBJkSSk5OVrVq1Rza3N3dVaVKFSUnJxe6jKenZ76HV4aGhtqXSU5OzvfYhbz3efMcPHhQTz31lD744AO5u1vipswu58rzdbVXXnlF6enp6tOnjzO7UqH88ssvysnJKfAYFnVeipo/77/Xmqekn4ebnavO1dUOHTqkN954Q4899phT+3EzcNW5MsZoyJAhGjFihMMfCTcKQeb/e+qpp2Sz2Yp87du3z2X15eTk6OGHH9aUKVPUqFEjl9VRXpT383W1+fPna8qUKVq0aFG+X6QAinbixAl169ZNDz30kIYPH+7qcnCVN954Q+fPn9fEiRNdsn3+rP//xo8fryFDhhQ5T7169RQWFqZTp045tF++fFlnz55VWFhYgcuFhYUpKytL586dc/grPyUlxb5MWFhYvp7leaNkwsLCdP78eW3ZskXbt2+3P3cqNzdXxhi5u7tr+fLl6ty5c0l22dLK+/m60oIFC/R///d/SkxMzHcJ92ZVtWpVVapUKd9IsCuP8dXCwsKKnD/vvykpKQoPD3eYJ290nzOfh5udq85VnpMnT+ruu+/WHXfcoXfeeed6d6dCc9W5+vrrr7V+/fp8z2Zq27atBgwYoPfee++69uuabkhPnAokr7Pgli1b7G3Lli0rVufRjz/+2N62b9++AjuPXtmz/O233zYBAQEmIyPD5OTkmN27dzu8Hn/8cdO4cWOze/duk56eXkZ7bG2uOl955s+fbypXrmw+/fTT0t41y2vXrp0ZNWqU/X1OTo6pUaNGkZ0S//jHPzq0RUZG5uuU+Morr9inp6amFtjZtySfB7jmXBljzPHjx03Dhg1Nv379zOXLl0tzlyosV5yrn3/+2eF307Jly4wk8/HHH5tjx46V9i7mQ5BxQrdu3czvfvc7s3HjRvPdd9+Zhg0bOgzfPH78uGncuLHZuHGjvW3EiBGmVq1a5uuvvzZbtmwxkZGRJjIy0j49bzjvvffea3bs2GG++uorExISUuDw6zyMWioeV52vDz/80Li7u5v4+HiTlJRkf507d+7G7Hg5t2DBAuPl5WXmzZtn9uzZYx599FETFBRkHwn2yCOPmKeeeso+/7p164y7u7t55ZVXzN69e82kSZMKHCYaFBRkPvvsM7Nr1y7Tq1evAodfF/V5QH6uOFfHjx83DRo0MF26dDHHjx93+BlC4Vz1c3WlI0eOMPy6vDtz5ozp37+/8fPzMwEBAWbo0KHm/Pnz9ul5J3H16tX2tkuXLpmRI0eaW265xfj4+JgHH3ww3w/kTz/9ZLp37268vb1N1apVzfjx4012dnahdRBkisdV5+vOO+80kvK9Bg8eXNa7bBlvvPGGqVWrlvH09DTt2rUzGzZssE+788478x2rRYsWmUaNGhlPT0/TvHlzs3TpUofpubm55tlnnzWhoaHGy8vLdOnSxezfv99hnmt9HlCwG32uEhISCvz54YuEa3PFz9WVbnSQsRljTNl+eQUAAFA2GLUEAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyAD4IYZMmSIHnjgAfv7u+66S2PGjHFZPdfL6vUDFQFBBrhJJScna/To0WrQoIEqV66s0NBQdejQQXPmzNHFixdvSA2LFy/WCy+8UKrrvDosFaRnz57q1q1bgdO+/fZb2Ww27dq1q1TrAlA2ePo1cBP68ccf1aFDBwUFBemll15SixYt5OXlpd27d+udd95RjRo1dP/99xe4bHZ2tjw8PEqljipVqpTKekoqJiZG0dHROn78uGrWrOkwLSEhQW3btlXLli1dUhuAkuGKDHATGjlypNzd3bVlyxb16dNHTZs2Vb169dSrVy8tXbpUPXv2tM9rs9k0Z84c3X///fL19dWLL76onJwcxcTEqG7duvL29lbjxo01a9Ysh23k5ORo3LhxCgoKUnBwsJ588kld/USUq7+ayczM1IQJE1SjRg35+vqqffv2WrNmjX36vHnzFBQUpGXLlqlp06by8/NTt27dlJSUJEmaPHmy3nvvPX322Wey2Wyy2WwOy+f54x//qJCQEM2bN8+hPT09XYmJiYqJidGZM2fUv39/1ahRQz4+PmrRooU++uijIo+rzWbTp59+6tAWFBTksJ1jx46pT58+CgoKUpUqVdSrVy/99NNPRa4XQOEIMsBN5syZM1q+fLliY2Pl6+tb4Dw2m83h/eTJk/Xggw9q9+7dGjZsmHJzc1WzZk0lJiZqz549eu655/S3v/1NixYtsi8zY8YMzZs3T++++66+++47nT17VkuWLCmytlGjRmn9+vVasGCBdu3apYceekjdunXTwYMH7fNcvHhRr7zyiv71r3/pm2++0dGjRzVhwgRJ0oQJE9SnTx97uElKStIdd9yRbzvu7u4aNGiQ5s2b5xCuEhMTlZOTo/79+ysjI0Nt2rTR0qVL9f333+vRRx/VI488ok2bNl37IBciOztbXbt2lb+/v7799lutW7fOHsaysrKcXi9wU7shj6YEUG5s2LDBSDKLFy92aA8ODja+vr7G19fXPPnkk/Z2SWbMmDHXXG9sbKyJjo62vw8PDzcvv/yy/X12drapWbOm6dWrl73tzjvvNKNHjzbGGPPzzz+bSpUqmRMnTjist0uXLmbixInGmP89EfnQoUP26fHx8SY0NNT+fvDgwQ7bKMzevXvzPfW8U6dOZuDAgYUu06NHDzN+/PgC6zfmt2O1ZMkSh2UCAwNNQkKCMcaYf/3rX6Zx48YmNzfXPj0zM9N4e3ubZcuWXbNmAPnRRwaAJGnTpk3Kzc3VgAEDlJmZ6TCtbdu2+eaPj4/Xu+++q6NHj+rSpUvKyspS69atJUmpqalKSkpS+/bt7fO7u7urbdu2+b5eyrN7927l5OSoUaNGDu2ZmZkKDg62v/fx8VH9+vXt78PDw3Xq1KkS72+TJk10xx136N1339Vdd92lQ4cO6dtvv9Xzzz8v6bevxl566SUtWrRIJ06cUFZWljIzM+Xj41PibeXZuXOnDh06JH9/f4f2jIwMHT582On1Ajczggxwk2nQoIFsNpv279/v0F6vXj1Jkre3d75lrv4KasGCBZowYYJmzJihyMhI+fv7a/r06dq4caPTdaWnp6tSpUraunWrKlWq5DDNz8/P/v9XdzS22WyFhqNriYmJ0RNPPKH4+HglJCSofv36uvPOOyVJ06dP16xZs/Taa6+pRYsW8vX11ZgxY4r8CqigWrKzsx32sU2bNvrwww/zLRsSEuLUPgA3O/rIADeZ4OBg3XPPPXrzzTd14cIFp9axbt063XHHHRo5cqR+97vfqUGDBg5XFAIDAxUeHu4QbC5fvqytW7cWus7f/e53ysnJ0alTp9SgQQOHV1hYWLFr8/T0VE5OTrHm7dOnj9zc3DR//ny9//77GjZsmL1/0Lp169SrVy8NHDhQrVq1Ur169XTgwIEi1xcSEmLveCxJBw8edBjKftttt+ngwYOqVq1avn0MDAws9j4C+B+CDHATmj17ti5fvqy2bdtq4cKF2rt3r/bv368PPvhA+/bty3dF5GoNGzbUli1btGzZMh04cEDPPvusNm/e7DDP6NGjNXXqVH366afat2+fRo4cqXPnzhW6zkaNGmnAgAEaNGiQFi9erCNHjmjTpk2Ki4vT0qVLi71vderU0a5du7R//3798ssvDldErubn56e+fftq4sSJSkpK0pAhQxz2ccWKFfrvf/+rvXv36rHHHlNKSkqR2+7cubPefPNNbd++XVu2bNGIESMcriANGDBAVatWVa9evfTtt9/qyJEjWrNmjf785z/r+PHjxd5HAP9DkAFuQvXr19f27dsVFRWliRMnqlWrVmrbtq3eeOMNTZgw4Zo3qXvsscfUu3dv9e3bV+3bt9eZM2c0cuRIh3nGjx+vRx55RIMHD7Z//fTggw8Wud6EhAQNGjRI48ePV+PGjfXAAw9o8+bNqlWrVrH3bfjw4WrcuLHatm2rkJAQrVu3rsj5Y2Ji9Ouvv6pr166qXr26vf2ZZ57Rbbfdpq5du+quu+5SWFjYNW+0N2PGDEVERKhTp056+OGHNWHCBIc+NT4+Pvrmm29Uq1Yt9e7dW02bNlVMTIwyMjIUEBBQ7H0E8D824+yXywAAAC7GFRkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZ/w+c2S35jJPqbQAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "\n", + "# Example: Plot the activations of the final Linear layer\n", + "layer_name, activation = activations[1] # Assuming fc activation\n", + "print(f\"Activation for {layer_name} layer: \", activation.shape)\n", + "\n", + "# Plotting histogram of activations of the final Linear layer\n", + "plt.hist(activation.detach().numpy().flatten(), bins=100, alpha=0.7, color='g', label='FC Activations')\n", + "plt.title(f\"Activation Distribution for {layer_name}\")\n", + "plt.xlabel(\"Activation Value\")\n", + "plt.ylabel(\"Frequency\")\n", + "plt.legend()\n", + "plt.show()\n", + "\n", + "# Example: Plot the gradients of the final Linear layer\n", + "layer_name, grad = gradients[1] # Assuming fc gradient\n", + "print(f\"Gradient for {layer_name} layer: \", grad[0].shape)\n", + "\n", + "# Plotting histogram of gradients of the final Linear layer\n", + "plt.hist(grad[0].numpy().flatten(), bins=100, alpha=0.7, color='y', label='FC Gradients')\n", + "plt.title(f\"Gradient Distribution for {layer_name}\")\n", + "plt.xlabel(\"Gradient Value\")\n", + "plt.ylabel(\"Frequency\")\n", + "plt.legend()\n", + "plt.show()\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "neptune_scale_py_312_base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From d03fd4318cf7c87807576cd5d49b979ed1440254 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 3 Mar 2025 14:45:49 +0100 Subject: [PATCH 014/125] refactor: add validation and test loss calcualtion for each epoch --- .../pytorch_text_model_debugging.ipynb | 365 +++++++++++++++--- 1 file changed, 317 insertions(+), 48 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 4c8f180..0561123 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -10,7 +10,56 @@ "import torch.nn as nn\n", "import torch.optim as optim\n", "import matplotlib.pyplot as plt\n", - "import numpy as np\n" + "import numpy as np\n", + "\n", + "from neptune_scale import Run\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "params = {\n", + " \"optimizer\": \"Adam\",\n", + " \"batch_size\": 256,\n", + " \"learning_rate\": 0.01,\n", + " \"epochs\": 400, \n", + " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", + " \"input_features\": 256,\n", + " \"n_classes\": 10,\n", + " \"input_size\": 28 * 28\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [], + "source": [ + "# Define Neptune parameters\n", + "from neptune_scale import Run\n", + "from uuid import uuid4\n", + "\n", + "run = Run(\n", + " project = \"leo/pytorch-tutorial\",\n", + " run_id=f\"pytorch-text-{uuid4()}\"\n", + " )\n", + "\n", + "run.log_configs(\n", + " {\n", + " \"config/learning_rate\": params[\"learning_rate\"],\n", + " \"config/optimizer\": params[\"optimizer\"],\n", + " \"config/batch_size\": params[\"batch_size\"],\n", + " \"config/epochs\": params[\"epochs\"],\n", + " \"config/input_size\": params[\"input_size\"]\n", + " }\n", + ")\n", + "\n", + "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", + "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])" ] }, { @@ -19,7 +68,6 @@ "metadata": {}, "outputs": [], "source": [ - "# TODO - capture logs to Neptune\n", "# TODO - improve dataset to be more realistic\n", "# TODO - capture the folowings; attentions, activations, gradients, weights, loss (agg and per layer), learning rate (per layer if needed)\n", "# TODO - capture per layer losses\n", @@ -71,46 +119,37 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "def evaluate_model(model, input, target, vocab_size):\n", + " model.eval() # set model to evaluation mode\n", + " with torch.no_grad(): # disable gradient calculation\n", + " output = model(input)\n", + " output = output.view(-1, vocab_size)\n", + " data_target = target.view(-1)\n", + " criterion = nn.CrossEntropyLoss()\n", + " loss = criterion(output, data_target)\n", + " \n", + " return loss.item()\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 29, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1/400, Loss: 11.5184\n", - "Epoch 2/400, Loss: 11.0458\n", - "Epoch 3/400, Loss: 10.6166\n", - "Epoch 4/400, Loss: 10.2209\n", - "Epoch 5/400, Loss: 9.8457\n", - "Epoch 6/400, Loss: 9.4807\n", - "Epoch 7/400, Loss: 9.1192\n", - "Epoch 8/400, Loss: 8.7570\n", - "Epoch 9/400, Loss: 8.3914\n", - "Epoch 10/400, Loss: 8.0211\n", - "Epoch 11/400, Loss: 7.6454\n", - "Epoch 12/400, Loss: 7.2639\n", - "Epoch 13/400, Loss: 6.8763\n", - "Epoch 14/400, Loss: 6.4826\n", - "Epoch 15/400, Loss: 6.0829\n", - "Epoch 16/400, Loss: 5.6777\n", - "Epoch 17/400, Loss: 5.2678\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[7], line 66\u001b[0m\n\u001b[0;32m 63\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(output, target_data)\n\u001b[0;32m 65\u001b[0m \u001b[38;5;66;03m# Backward pass\u001b[39;00m\n\u001b[1;32m---> 66\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 68\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[0;32m 69\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\_tensor.py:626\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 616\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 617\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 618\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 619\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 624\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 625\u001b[0m )\n\u001b[1;32m--> 626\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 628\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\__init__.py:347\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 342\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 344\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 345\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 346\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 347\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 348\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 349\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 350\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 351\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 352\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 353\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 354\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 355\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\graph.py:823\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[1;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[0;32m 821\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[0;32m 822\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 823\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 824\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[0;32m 826\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 827\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\function.py:292\u001b[0m, in \u001b[0;36mBackwardCFunction.apply\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mBackwardCFunction\u001b[39;00m(_C\u001b[38;5;241m.\u001b[39m_FunctionBase, FunctionCtx, _HookMixin):\n\u001b[0;32m 288\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 289\u001b[0m \u001b[38;5;124;03m This class is used for internal autograd work. Do not use.\u001b[39;00m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 292\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs):\n\u001b[0;32m 293\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 294\u001b[0m \u001b[38;5;124;03m Apply method used when executing this Node during the backward\u001b[39;00m\n\u001b[0;32m 295\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 296\u001b[0m \u001b[38;5;66;03m# _forward_cls is defined by derived class\u001b[39;00m\n\u001b[0;32m 297\u001b[0m \u001b[38;5;66;03m# The user should define either backward or vjp but never both.\u001b[39;00m\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ @@ -142,9 +181,12 @@ " return hook\n", "\n", "# Initialize model, loss function, and optimizer\n", - "vocab_size = 100000 # Just an example vocab size\n", + "vocab_size = 200000 # Just an example vocab size\n", "embed_size = 1000 # Embedding size\n", "hidden_size = 256 # Hidden size for the LSTM\n", + "batch_size = 3\n", + "sequence_length = 5\n", + "\n", "model = SimpleLLM(vocab_size, embed_size, hidden_size)\n", "optimizer = optim.Adam(model.parameters(), lr=0.001)\n", "criterion = nn.CrossEntropyLoss()\n", @@ -158,11 +200,225 @@ "model.fc.register_forward_hook(capture_activation('fc'))\n", "\n", "model.lstm.register_full_backward_hook(capture_gradient('lstm'))\n", - "model.fc.register_full_backward_hook(capture_gradient('fc'))\n", + "model.fc.register_full_backward_hook(capture_gradient('fc'))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [], + "source": [ "\n", "# Dummy input data (example with batch_size=3 and sequence_length=5)\n", - "input_data = torch.randint(0, vocab_size, (5, 10)) # Random word indices\n", - "target_data = torch.randint(0, vocab_size, (5, 10)) # Random target labels\n", + "input_data = torch.randint(0, vocab_size, (batch_size * 3, sequence_length)) # Random word indices\n", + "target_data = torch.randint(0, vocab_size, (batch_size * 3, sequence_length)) # Random target labels\n", + "\n", + "# Manually splitting the data into train, validation, and test sets\n", + "train_size = int(0.6 * input_data.size(0)) # 60% for training\n", + "val_size = int(0.2 * input_data.size(0)) # 20% for validation\n", + "test_size = input_data.size(0) - train_size - val_size # Remaining 20% for testing\n", + "\n", + "# Creating the splits manually\n", + "train_input = input_data[:train_size]\n", + "train_target = target_data[:train_size]\n", + "\n", + "val_input = input_data[train_size:train_size + val_size]\n", + "val_target = target_data[train_size:train_size + val_size]\n", + "\n", + "test_input = input_data[train_size + val_size:]\n", + "test_target = target_data[train_size + val_size:]\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/400, Loss: 12.2419\n", + "Epoch 2/400, Loss: 11.6387\n", + "Epoch 3/400, Loss: 11.1144\n", + "Epoch 4/400, Loss: 10.6412\n", + "Epoch 5/400, Loss: 10.1949\n", + "Epoch 6/400, Loss: 9.7637\n", + "Epoch 7/400, Loss: 9.3417\n", + "Epoch 8/400, Loss: 8.9259\n", + "Epoch 9/400, Loss: 8.5148\n", + "Epoch 10/400, Loss: 8.1072\n", + "Epoch 11/400, Loss: 7.7022\n", + "Epoch 12/400, Loss: 7.2992\n", + "Epoch 13/400, Loss: 6.8982\n", + "Epoch 14/400, Loss: 6.4999\n", + "Epoch 15/400, Loss: 6.1048\n", + "Epoch 16/400, Loss: 5.7135\n", + "Epoch 17/400, Loss: 5.3260\n", + "Epoch 18/400, Loss: 4.9422\n", + "Epoch 19/400, Loss: 4.5620\n", + "Epoch 20/400, Loss: 4.1852\n", + "Epoch 21/400, Loss: 3.8126\n", + "Epoch 22/400, Loss: 3.4462\n", + "Epoch 23/400, Loss: 3.0892\n", + "Epoch 24/400, Loss: 2.7456\n", + "Epoch 25/400, Loss: 2.4194\n", + "Epoch 26/400, Loss: 2.1149\n", + "Epoch 27/400, Loss: 1.8359\n", + "Epoch 28/400, Loss: 1.5853\n", + "Epoch 29/400, Loss: 1.3643\n", + "Epoch 30/400, Loss: 1.1723\n", + "Epoch 31/400, Loss: 1.0070\n", + "Epoch 32/400, Loss: 0.8651\n", + "Epoch 33/400, Loss: 0.7432\n", + "Epoch 34/400, Loss: 0.6379\n", + "Epoch 35/400, Loss: 0.5465\n", + "Epoch 36/400, Loss: 0.4668\n", + "Epoch 37/400, Loss: 0.3975\n", + "Epoch 38/400, Loss: 0.3371\n", + "Epoch 39/400, Loss: 0.2847\n", + "Epoch 40/400, Loss: 0.2394\n", + "Epoch 41/400, Loss: 0.2005\n", + "Epoch 42/400, Loss: 0.1674\n", + "Epoch 43/400, Loss: 0.1395\n", + "Epoch 44/400, Loss: 0.1162\n", + "Epoch 45/400, Loss: 0.0969\n", + "Epoch 46/400, Loss: 0.0810\n", + "Epoch 47/400, Loss: 0.0681\n", + "Epoch 48/400, Loss: 0.0577\n", + "Epoch 49/400, Loss: 0.0492\n", + "Epoch 50/400, Loss: 0.0423\n", + "Epoch 51/400, Loss: 0.0367\n", + "Epoch 52/400, Loss: 0.0321\n", + "Epoch 53/400, Loss: 0.0283\n", + "Epoch 54/400, Loss: 0.0252\n", + "Epoch 55/400, Loss: 0.0227\n", + "Epoch 56/400, Loss: 0.0205\n", + "Epoch 57/400, Loss: 0.0187\n", + "Epoch 58/400, Loss: 0.0172\n", + "Epoch 59/400, Loss: 0.0159\n", + "Epoch 60/400, Loss: 0.0148\n", + "Epoch 61/400, Loss: 0.0138\n", + "Epoch 62/400, Loss: 0.0130\n", + "Epoch 63/400, Loss: 0.0123\n", + "Epoch 64/400, Loss: 0.0117\n", + "Epoch 65/400, Loss: 0.0111\n", + "Epoch 66/400, Loss: 0.0107\n", + "Epoch 67/400, Loss: 0.0102\n", + "Epoch 68/400, Loss: 0.0099\n", + "Epoch 69/400, Loss: 0.0095\n", + "Epoch 70/400, Loss: 0.0092\n", + "Epoch 71/400, Loss: 0.0090\n", + "Epoch 72/400, Loss: 0.0087\n", + "Epoch 73/400, Loss: 0.0085\n", + "Epoch 74/400, Loss: 0.0083\n", + "Epoch 75/400, Loss: 0.0081\n", + "Epoch 76/400, Loss: 0.0079\n", + "Epoch 77/400, Loss: 0.0078\n", + "Epoch 78/400, Loss: 0.0076\n", + "Epoch 79/400, Loss: 0.0075\n", + "Epoch 80/400, Loss: 0.0074\n", + "Epoch 81/400, Loss: 0.0072\n", + "Epoch 82/400, Loss: 0.0071\n", + "Epoch 83/400, Loss: 0.0070\n", + "Epoch 84/400, Loss: 0.0069\n", + "Epoch 85/400, Loss: 0.0068\n", + "Epoch 86/400, Loss: 0.0067\n", + "Epoch 87/400, Loss: 0.0067\n", + "Epoch 88/400, Loss: 0.0066\n", + "Epoch 89/400, Loss: 0.0065\n", + "Epoch 90/400, Loss: 0.0064\n", + "Epoch 91/400, Loss: 0.0064\n", + "Epoch 92/400, Loss: 0.0063\n", + "Epoch 93/400, Loss: 0.0062\n", + "Epoch 94/400, Loss: 0.0062\n", + "Epoch 95/400, Loss: 0.0061\n", + "Epoch 96/400, Loss: 0.0060\n", + "Epoch 97/400, Loss: 0.0060\n", + "Epoch 98/400, Loss: 0.0059\n", + "Epoch 99/400, Loss: 0.0059\n", + "Epoch 100/400, Loss: 0.0058\n", + "Epoch 101/400, Loss: 0.0058\n", + "Epoch 102/400, Loss: 0.0057\n", + "Epoch 103/400, Loss: 0.0057\n", + "Epoch 104/400, Loss: 0.0056\n", + "Epoch 105/400, Loss: 0.0056\n", + "Epoch 106/400, Loss: 0.0056\n", + "Epoch 107/400, Loss: 0.0055\n", + "Epoch 108/400, Loss: 0.0055\n", + "Epoch 109/400, Loss: 0.0054\n", + "Epoch 110/400, Loss: 0.0054\n", + "Epoch 111/400, Loss: 0.0053\n", + "Epoch 112/400, Loss: 0.0053\n", + "Epoch 113/400, Loss: 0.0053\n", + "Epoch 114/400, Loss: 0.0052\n", + "Epoch 115/400, Loss: 0.0052\n", + "Epoch 116/400, Loss: 0.0052\n", + "Epoch 117/400, Loss: 0.0051\n", + "Epoch 118/400, Loss: 0.0051\n", + "Epoch 119/400, Loss: 0.0050\n", + "Epoch 120/400, Loss: 0.0050\n", + "Epoch 121/400, Loss: 0.0050\n", + "Epoch 122/400, Loss: 0.0049\n", + "Epoch 123/400, Loss: 0.0049\n", + "Epoch 124/400, Loss: 0.0049\n", + "Epoch 125/400, Loss: 0.0048\n", + "Epoch 126/400, Loss: 0.0048\n", + "Epoch 127/400, Loss: 0.0048\n", + "Epoch 128/400, Loss: 0.0047\n", + "Epoch 129/400, Loss: 0.0047\n", + "Epoch 130/400, Loss: 0.0047\n", + "Epoch 131/400, Loss: 0.0047\n", + "Epoch 132/400, Loss: 0.0046\n", + "Epoch 133/400, Loss: 0.0046\n", + "Epoch 134/400, Loss: 0.0046\n", + "Epoch 135/400, Loss: 0.0045\n", + "Epoch 136/400, Loss: 0.0045\n", + "Epoch 137/400, Loss: 0.0045\n", + "Epoch 138/400, Loss: 0.0045\n", + "Epoch 139/400, Loss: 0.0044\n", + "Epoch 140/400, Loss: 0.0044\n", + "Epoch 141/400, Loss: 0.0044\n", + "Epoch 142/400, Loss: 0.0043\n", + "Epoch 143/400, Loss: 0.0043\n", + "Epoch 144/400, Loss: 0.0043\n", + "Epoch 145/400, Loss: 0.0043\n", + "Epoch 146/400, Loss: 0.0042\n", + "Epoch 147/400, Loss: 0.0042\n", + "Epoch 148/400, Loss: 0.0042\n", + "Epoch 149/400, Loss: 0.0042\n", + "Epoch 150/400, Loss: 0.0041\n", + "Epoch 151/400, Loss: 0.0041\n", + "Epoch 152/400, Loss: 0.0041\n", + "Epoch 153/400, Loss: 0.0041\n", + "Epoch 154/400, Loss: 0.0040\n", + "Epoch 155/400, Loss: 0.0040\n", + "Epoch 156/400, Loss: 0.0040\n", + "Epoch 157/400, Loss: 0.0040\n", + "Epoch 158/400, Loss: 0.0040\n", + "Epoch 159/400, Loss: 0.0039\n", + "Epoch 160/400, Loss: 0.0039\n", + "Epoch 161/400, Loss: 0.0039\n", + "Epoch 162/400, Loss: 0.0039\n", + "Epoch 163/400, Loss: 0.0038\n", + "Epoch 164/400, Loss: 0.0038\n", + "Epoch 165/400, Loss: 0.0038\n", + "Epoch 166/400, Loss: 0.0038\n", + "Epoch 167/400, Loss: 0.0038\n", + "Epoch 168/400, Loss: 0.0037\n", + "Epoch 169/400, Loss: 0.0037\n", + "Epoch 170/400, Loss: 0.0037\n", + "Epoch 171/400, Loss: 0.0037\n", + "Epoch 172/400, Loss: 0.0037\n", + "Epoch 173/400, Loss: 0.0036\n", + "Epoch 174/400, Loss: 0.0036\n", + "Epoch 175/400, Loss: 0.0036\n", + "Epoch 176/400, Loss: 0.0036\n" + ] + } + ], + "source": [ "\n", "# Training loop\n", "num_epochs = 400\n", @@ -170,11 +426,11 @@ " optimizer.zero_grad()\n", "\n", " # Forward pass\n", - " output = model(input_data)\n", + " output = model(train_input)\n", " \n", " # Reshape output and target to match the shape of CrossEntropyLoss expectations\n", " output = output.view(-1, vocab_size) # Flatten the output\n", - " target_data = target_data.view(-1) # Flatten the target data\n", + " target_data = train_target.view(-1) # Flatten the target data\n", " \n", " # Calculate loss\n", " loss = criterion(output, target_data)\n", @@ -185,9 +441,22 @@ " # Optimizer step\n", " optimizer.step()\n", "\n", + " val_loss = evaluate_model(model, val_input, val_target, vocab_size)\n", + "\n", " # Output loss for this epoch\n", - " # if epoch % 50 == 0:\n", - " print(f'Epoch {epoch+1}/{num_epochs}, Loss: {loss.item():.4f}')\n" + " run.log_metrics(\n", + " data = {\n", + " \"training/loss\": loss.item(),\n", + " \"validation/loss\": val_loss\n", + " },\n", + " step = epoch\n", + " )\n", + " print(f'Epoch {epoch+1}/{num_epochs}, Loss: {loss.item():.4f}')\n", + "\n", + "test_loss = evaluate_model(model, test_input, test_target, vocab_size)\n", + "print(f'Test Loss: {test_loss:.4f}')\n", + "\n", + "run.close()" ] }, { From b7f8e086784d4be068dd3a60e818ff7e301184de Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 3 Mar 2025 14:59:04 +0100 Subject: [PATCH 015/125] refactor: update configs and parameters --- .../pytorch_text_model_debugging.ipynb | 325 ++++++------------ 1 file changed, 105 insertions(+), 220 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 0561123..5aa5e7b 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -17,25 +17,45 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ + "# TODO - improve dataset to be more realistic\n", + "# TODO - capture the folowings; attentions, activations, gradients, weights, loss (agg and per layer), learning rate (per layer if needed)\n", + "# TODO - capture per layer losses\n", + "# TODO - increase number of layers\n", + "# TODO - Adding dropout to demonstrate the effects of regularization\n", + "# TODO - batch normalization" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize model, loss function, and optimizer\n", + "\n", "params = {\n", " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 256,\n", - " \"learning_rate\": 0.01,\n", + " \"batch_size\": 3,\n", + " \"sequence_length\": 5,\n", + " \"learning_rate\": 0.001,\n", " \"epochs\": 400, \n", " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", " \"input_features\": 256,\n", - " \"n_classes\": 10,\n", - " \"input_size\": 28 * 28\n", + " \"n_classes\": 10, # TODO - remove\n", + " \"input_size\": 28 * 28, # TODO - remove\n", + " \"vocab_size\": 200000,\n", + " \"embed_size\": 1000,\n", + " \"hidden_size\": 256 # hidden size for the LSTM\n", "}" ] }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -62,20 +82,6 @@ "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# TODO - improve dataset to be more realistic\n", - "# TODO - capture the folowings; attentions, activations, gradients, weights, loss (agg and per layer), learning rate (per layer if needed)\n", - "# TODO - capture per layer losses\n", - "# TODO - increase number of layers\n", - "# TODO - Adding dropout to demonstrate the effects of regularization\n", - "# TODO - batch normalization" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -119,7 +125,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -138,16 +144,16 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 29, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } @@ -180,15 +186,8 @@ " gradients.append((name, grad_output)) # Capture gradients\n", " return hook\n", "\n", - "# Initialize model, loss function, and optimizer\n", - "vocab_size = 200000 # Just an example vocab size\n", - "embed_size = 1000 # Embedding size\n", - "hidden_size = 256 # Hidden size for the LSTM\n", - "batch_size = 3\n", - "sequence_length = 5\n", - "\n", - "model = SimpleLLM(vocab_size, embed_size, hidden_size)\n", - "optimizer = optim.Adam(model.parameters(), lr=0.001)\n", + "model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"])\n", + "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", "criterion = nn.CrossEntropyLoss()\n", "\n", "# Container for capturing activations and gradients\n", @@ -205,14 +204,14 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "\n", "# Dummy input data (example with batch_size=3 and sequence_length=5)\n", - "input_data = torch.randint(0, vocab_size, (batch_size * 3, sequence_length)) # Random word indices\n", - "target_data = torch.randint(0, vocab_size, (batch_size * 3, sequence_length)) # Random target labels\n", + "input_data = torch.randint(0, params[\"vocab_size\"], (params[\"batch_size\"] * 3, params[\"sequence_length\"])) # Random word indices\n", + "target_data = torch.randint(0, params[\"vocab_size\"], (params[\"batch_size\"] * 3, params[\"sequence_length\"])) # Random target labels\n", "\n", "# Manually splitting the data into train, validation, and test sets\n", "train_size = int(0.6 * input_data.size(0)) # 60% for training\n", @@ -239,197 +238,65 @@ "name": "stdout", "output_type": "stream", "text": [ - "Epoch 1/400, Loss: 12.2419\n", - "Epoch 2/400, Loss: 11.6387\n", - "Epoch 3/400, Loss: 11.1144\n", - "Epoch 4/400, Loss: 10.6412\n", - "Epoch 5/400, Loss: 10.1949\n", - "Epoch 6/400, Loss: 9.7637\n", - "Epoch 7/400, Loss: 9.3417\n", - "Epoch 8/400, Loss: 8.9259\n", - "Epoch 9/400, Loss: 8.5148\n", - "Epoch 10/400, Loss: 8.1072\n", - "Epoch 11/400, Loss: 7.7022\n", - "Epoch 12/400, Loss: 7.2992\n", - "Epoch 13/400, Loss: 6.8982\n", - "Epoch 14/400, Loss: 6.4999\n", - "Epoch 15/400, Loss: 6.1048\n", - "Epoch 16/400, Loss: 5.7135\n", - "Epoch 17/400, Loss: 5.3260\n", - "Epoch 18/400, Loss: 4.9422\n", - "Epoch 19/400, Loss: 4.5620\n", - "Epoch 20/400, Loss: 4.1852\n", - "Epoch 21/400, Loss: 3.8126\n", - "Epoch 22/400, Loss: 3.4462\n", - "Epoch 23/400, Loss: 3.0892\n", - "Epoch 24/400, Loss: 2.7456\n", - "Epoch 25/400, Loss: 2.4194\n", - "Epoch 26/400, Loss: 2.1149\n", - "Epoch 27/400, Loss: 1.8359\n", - "Epoch 28/400, Loss: 1.5853\n", - "Epoch 29/400, Loss: 1.3643\n", - "Epoch 30/400, Loss: 1.1723\n", - "Epoch 31/400, Loss: 1.0070\n", - "Epoch 32/400, Loss: 0.8651\n", - "Epoch 33/400, Loss: 0.7432\n", - "Epoch 34/400, Loss: 0.6379\n", - "Epoch 35/400, Loss: 0.5465\n", - "Epoch 36/400, Loss: 0.4668\n", - "Epoch 37/400, Loss: 0.3975\n", - "Epoch 38/400, Loss: 0.3371\n", - "Epoch 39/400, Loss: 0.2847\n", - "Epoch 40/400, Loss: 0.2394\n", - "Epoch 41/400, Loss: 0.2005\n", - "Epoch 42/400, Loss: 0.1674\n", - "Epoch 43/400, Loss: 0.1395\n", - "Epoch 44/400, Loss: 0.1162\n", - "Epoch 45/400, Loss: 0.0969\n", - "Epoch 46/400, Loss: 0.0810\n", - "Epoch 47/400, Loss: 0.0681\n", - "Epoch 48/400, Loss: 0.0577\n", - "Epoch 49/400, Loss: 0.0492\n", - "Epoch 50/400, Loss: 0.0423\n", - "Epoch 51/400, Loss: 0.0367\n", - "Epoch 52/400, Loss: 0.0321\n", - "Epoch 53/400, Loss: 0.0283\n", - "Epoch 54/400, Loss: 0.0252\n", - "Epoch 55/400, Loss: 0.0227\n", - "Epoch 56/400, Loss: 0.0205\n", - "Epoch 57/400, Loss: 0.0187\n", - "Epoch 58/400, Loss: 0.0172\n", - "Epoch 59/400, Loss: 0.0159\n", - "Epoch 60/400, Loss: 0.0148\n", - "Epoch 61/400, Loss: 0.0138\n", - "Epoch 62/400, Loss: 0.0130\n", - "Epoch 63/400, Loss: 0.0123\n", - "Epoch 64/400, Loss: 0.0117\n", - "Epoch 65/400, Loss: 0.0111\n", - "Epoch 66/400, Loss: 0.0107\n", - "Epoch 67/400, Loss: 0.0102\n", - "Epoch 68/400, Loss: 0.0099\n", - "Epoch 69/400, Loss: 0.0095\n", - "Epoch 70/400, Loss: 0.0092\n", - "Epoch 71/400, Loss: 0.0090\n", - "Epoch 72/400, Loss: 0.0087\n", - "Epoch 73/400, Loss: 0.0085\n", - "Epoch 74/400, Loss: 0.0083\n", - "Epoch 75/400, Loss: 0.0081\n", - "Epoch 76/400, Loss: 0.0079\n", - "Epoch 77/400, Loss: 0.0078\n", - "Epoch 78/400, Loss: 0.0076\n", - "Epoch 79/400, Loss: 0.0075\n", - "Epoch 80/400, Loss: 0.0074\n", - "Epoch 81/400, Loss: 0.0072\n", - "Epoch 82/400, Loss: 0.0071\n", - "Epoch 83/400, Loss: 0.0070\n", - "Epoch 84/400, Loss: 0.0069\n", - "Epoch 85/400, Loss: 0.0068\n", - "Epoch 86/400, Loss: 0.0067\n", - "Epoch 87/400, Loss: 0.0067\n", - "Epoch 88/400, Loss: 0.0066\n", - "Epoch 89/400, Loss: 0.0065\n", - "Epoch 90/400, Loss: 0.0064\n", - "Epoch 91/400, Loss: 0.0064\n", - "Epoch 92/400, Loss: 0.0063\n", - "Epoch 93/400, Loss: 0.0062\n", - "Epoch 94/400, Loss: 0.0062\n", - "Epoch 95/400, Loss: 0.0061\n", - "Epoch 96/400, Loss: 0.0060\n", - "Epoch 97/400, Loss: 0.0060\n", - "Epoch 98/400, Loss: 0.0059\n", - "Epoch 99/400, Loss: 0.0059\n", - "Epoch 100/400, Loss: 0.0058\n", - "Epoch 101/400, Loss: 0.0058\n", - "Epoch 102/400, Loss: 0.0057\n", - "Epoch 103/400, Loss: 0.0057\n", - "Epoch 104/400, Loss: 0.0056\n", - "Epoch 105/400, Loss: 0.0056\n", - "Epoch 106/400, Loss: 0.0056\n", - "Epoch 107/400, Loss: 0.0055\n", - "Epoch 108/400, Loss: 0.0055\n", - "Epoch 109/400, Loss: 0.0054\n", - "Epoch 110/400, Loss: 0.0054\n", - "Epoch 111/400, Loss: 0.0053\n", - "Epoch 112/400, Loss: 0.0053\n", - "Epoch 113/400, Loss: 0.0053\n", - "Epoch 114/400, Loss: 0.0052\n", - "Epoch 115/400, Loss: 0.0052\n", - "Epoch 116/400, Loss: 0.0052\n", - "Epoch 117/400, Loss: 0.0051\n", - "Epoch 118/400, Loss: 0.0051\n", - "Epoch 119/400, Loss: 0.0050\n", - "Epoch 120/400, Loss: 0.0050\n", - "Epoch 121/400, Loss: 0.0050\n", - "Epoch 122/400, Loss: 0.0049\n", - "Epoch 123/400, Loss: 0.0049\n", - "Epoch 124/400, Loss: 0.0049\n", - "Epoch 125/400, Loss: 0.0048\n", - "Epoch 126/400, Loss: 0.0048\n", - "Epoch 127/400, Loss: 0.0048\n", - "Epoch 128/400, Loss: 0.0047\n", - "Epoch 129/400, Loss: 0.0047\n", - "Epoch 130/400, Loss: 0.0047\n", - "Epoch 131/400, Loss: 0.0047\n", - "Epoch 132/400, Loss: 0.0046\n", - "Epoch 133/400, Loss: 0.0046\n", - "Epoch 134/400, Loss: 0.0046\n", - "Epoch 135/400, Loss: 0.0045\n", - "Epoch 136/400, Loss: 0.0045\n", - "Epoch 137/400, Loss: 0.0045\n", - "Epoch 138/400, Loss: 0.0045\n", - "Epoch 139/400, Loss: 0.0044\n", - "Epoch 140/400, Loss: 0.0044\n", - "Epoch 141/400, Loss: 0.0044\n", - "Epoch 142/400, Loss: 0.0043\n", - "Epoch 143/400, Loss: 0.0043\n", - "Epoch 144/400, Loss: 0.0043\n", - "Epoch 145/400, Loss: 0.0043\n", - "Epoch 146/400, Loss: 0.0042\n", - "Epoch 147/400, Loss: 0.0042\n", - "Epoch 148/400, Loss: 0.0042\n", - "Epoch 149/400, Loss: 0.0042\n", - "Epoch 150/400, Loss: 0.0041\n", - "Epoch 151/400, Loss: 0.0041\n", - "Epoch 152/400, Loss: 0.0041\n", - "Epoch 153/400, Loss: 0.0041\n", - "Epoch 154/400, Loss: 0.0040\n", - "Epoch 155/400, Loss: 0.0040\n", - "Epoch 156/400, Loss: 0.0040\n", - "Epoch 157/400, Loss: 0.0040\n", - "Epoch 158/400, Loss: 0.0040\n", - "Epoch 159/400, Loss: 0.0039\n", - "Epoch 160/400, Loss: 0.0039\n", - "Epoch 161/400, Loss: 0.0039\n", - "Epoch 162/400, Loss: 0.0039\n", - "Epoch 163/400, Loss: 0.0038\n", - "Epoch 164/400, Loss: 0.0038\n", - "Epoch 165/400, Loss: 0.0038\n", - "Epoch 166/400, Loss: 0.0038\n", - "Epoch 167/400, Loss: 0.0038\n", - "Epoch 168/400, Loss: 0.0037\n", - "Epoch 169/400, Loss: 0.0037\n", - "Epoch 170/400, Loss: 0.0037\n", - "Epoch 171/400, Loss: 0.0037\n", - "Epoch 172/400, Loss: 0.0037\n", - "Epoch 173/400, Loss: 0.0036\n", - "Epoch 174/400, Loss: 0.0036\n", - "Epoch 175/400, Loss: 0.0036\n", - "Epoch 176/400, Loss: 0.0036\n" + "Epoch 1/400, Loss: 12.2172\n", + "Epoch 2/400, Loss: 9.4774\n", + "Epoch 3/400, Loss: 6.9212\n", + "Epoch 4/400, Loss: 4.5795\n", + "Epoch 5/400, Loss: 2.3832\n", + "Epoch 6/400, Loss: 0.8342\n", + "Epoch 7/400, Loss: 0.2218\n", + "Epoch 8/400, Loss: 0.0458\n", + "Epoch 9/400, Loss: 0.0089\n", + "Epoch 10/400, Loss: 0.0020\n", + "Epoch 11/400, Loss: 0.0006\n", + "Epoch 12/400, Loss: 0.0002\n", + "Epoch 13/400, Loss: 0.0001\n", + "Epoch 14/400, Loss: 0.0001\n", + "Epoch 15/400, Loss: 0.0000\n", + "Epoch 16/400, Loss: 0.0000\n", + "Epoch 17/400, Loss: 0.0000\n", + "Epoch 18/400, Loss: 0.0000\n", + "Epoch 19/400, Loss: 0.0000\n", + "Epoch 20/400, Loss: 0.0000\n", + "Epoch 21/400, Loss: 0.0000\n", + "Epoch 22/400, Loss: 0.0000\n", + "Epoch 23/400, Loss: 0.0000\n", + "Epoch 24/400, Loss: 0.0000\n", + "Epoch 25/400, Loss: 0.0000\n", + "Epoch 26/400, Loss: 0.0000\n", + "Epoch 27/400, Loss: 0.0000\n", + "Epoch 28/400, Loss: 0.0000\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[9], line 19\u001b[0m\n\u001b[0;32m 16\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[0;32m 18\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[1;32m---> 19\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 21\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m evaluate_model(model, val_input, val_target, params[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mvocab_size\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 23\u001b[0m \u001b[38;5;66;03m# Output loss for this epoch\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:425\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 422\u001b[0m \u001b[38;5;66;03m# Decay the first and second moment running average coefficient\u001b[39;00m\n\u001b[0;32m 423\u001b[0m exp_avg\u001b[38;5;241m.\u001b[39mlerp_(grad, \u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m device_beta1)\n\u001b[1;32m--> 425\u001b[0m \u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmul_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbeta2\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39maddcmul_(grad, grad\u001b[38;5;241m.\u001b[39mconj(), value\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m beta2)\n\u001b[0;32m 427\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m capturable \u001b[38;5;129;01mor\u001b[39;00m differentiable:\n\u001b[0;32m 428\u001b[0m step \u001b[38;5;241m=\u001b[39m step_t\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "\n", "# Training loop\n", - "num_epochs = 400\n", - "for epoch in range(num_epochs):\n", + "for epoch in range(params[\"epochs\"]):\n", " optimizer.zero_grad()\n", "\n", " # Forward pass\n", " output = model(train_input)\n", " \n", " # Reshape output and target to match the shape of CrossEntropyLoss expectations\n", - " output = output.view(-1, vocab_size) # Flatten the output\n", + " output = output.view(-1, params[\"vocab_size\"]) # Flatten the output\n", " target_data = train_target.view(-1) # Flatten the target data\n", " \n", " # Calculate loss\n", @@ -441,7 +308,7 @@ " # Optimizer step\n", " optimizer.step()\n", "\n", - " val_loss = evaluate_model(model, val_input, val_target, vocab_size)\n", + " val_loss = evaluate_model(model, val_input, val_target, params[\"vocab_size\"])\n", "\n", " # Output loss for this epoch\n", " run.log_metrics(\n", @@ -451,10 +318,28 @@ " },\n", " step = epoch\n", " )\n", - " print(f'Epoch {epoch+1}/{num_epochs}, Loss: {loss.item():.4f}')\n", + " print(f'Epoch {epoch+1}/{params[\"epochs\"]}, Loss: {loss.item():.4f}')\n", "\n", - "test_loss = evaluate_model(model, test_input, test_target, vocab_size)\n", - "print(f'Test Loss: {test_loss:.4f}')\n", + "test_loss = evaluate_model(model, test_input, test_target, params[\"vocab_size\"])\n", + "print(f'Test Loss: {test_loss:.4f}')\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "neptune:INFO: Waiting for all operations to be processed\n", + "neptune:WARNING: No timeout specified. Waiting indefinitely\n", + "neptune:INFO: All operations were processed\n" + ] + } + ], + "source": [ "\n", "run.close()" ] From b113a36e1e06b17503c5a600c6d2f901bbb0329a Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 3 Mar 2025 15:01:31 +0100 Subject: [PATCH 016/125] refactor: update logged configs --- .../pytorch/pytorch_text_model_debugging.ipynb | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 5aa5e7b..d2ca4da 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -55,7 +55,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -73,8 +73,11 @@ " \"config/learning_rate\": params[\"learning_rate\"],\n", " \"config/optimizer\": params[\"optimizer\"],\n", " \"config/batch_size\": params[\"batch_size\"],\n", + " \"config/sequece_length\": params[\"sequence_length\"],\n", " \"config/epochs\": params[\"epochs\"],\n", - " \"config/input_size\": params[\"input_size\"]\n", + " \"config/input_size\": params[\"input_size\"],\n", + " \"data/vocab_size\": params[\"vocab_size\"],\n", + " \"data/embed_size\": params[\"embed_size\"]\n", " }\n", ")\n", "\n", From 725899344290c559db7c0b335c5cb99b45bc3ed8 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 3 Mar 2025 15:42:12 +0100 Subject: [PATCH 017/125] refactor: calculate activations per layer --- .../pytorch_text_model_debugging.ipynb | 240 +++++++++++++----- 1 file changed, 180 insertions(+), 60 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index d2ca4da..0f08f79 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -31,7 +31,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": {}, "outputs": [], "source": [ @@ -128,7 +128,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ @@ -147,22 +147,10 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 24, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "\n", "# Define the simple LLM model with LSTM\n", "class SimpleLLM(nn.Module):\n", " def __init__(self, vocab_size, embed_size, hidden_size):\n", @@ -170,12 +158,47 @@ " self.embedding = nn.Embedding(vocab_size, embed_size)\n", " self.lstm = nn.LSTM(embed_size, hidden_size, batch_first=True)\n", " self.fc = nn.Linear(hidden_size, vocab_size)\n", + "\n", + " # Initialize activations dictionary to store layer activations\n", + " self.activations = {}\n", + "\n", + " # Registering hooks to track activations\n", + " self.hooks = []\n", + " self.hooks.append(self.lstm.register_forward_hook(self.save_activation(\"lstm\")))\n", + " self.hooks.append(self.fc.register_forward_hook(self.save_activation(\"fc\")))\n", + "\n", + " # Registering hooks to track gradients\n", + "\n", " \n", " def forward(self, x):\n", " x = self.embedding(x)\n", " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", " out = self.fc(lstm_out)\n", " return out\n", + " \n", + " # Function to save activations\n", + " def save_activation(self, name):\n", + " def hook(model, input, output):\n", + " self.activations[name] = output\n", + " return hook\n", + " \n", + " def get_activations(self):\n", + " return self.activations\n", + "\n", + " def clear_activations(self):\n", + " self.activations = {}\n", + "\n", + "model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"])\n", + "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", + "criterion = nn.CrossEntropyLoss()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "\n", "# Define a function to capture activations\n", "def capture_activation(name):\n", @@ -187,11 +210,33 @@ "def capture_gradient(name):\n", " def hook(module, grad_input, grad_output):\n", " gradients.append((name, grad_output)) # Capture gradients\n", - " return hook\n", - "\n", - "model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"])\n", - "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", - "criterion = nn.CrossEntropyLoss()\n", + " return hook" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "params = {\n", + " \"optimizer\": \"Adam\",\n", + " \"batch_size\": 256,\n", + " \"learning_rate\": 0.01,\n", + " \"epochs\": 5, \n", + " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", + " \"input_features\": 256,\n", + " \"n_classes\": 10,\n", + " \"input_size\": 28 * 28\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "\n", "# Container for capturing activations and gradients\n", "activations = []\n", @@ -207,7 +252,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ @@ -234,41 +279,104 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch 1/400, Loss: 12.2172\n", - "Epoch 2/400, Loss: 9.4774\n", - "Epoch 3/400, Loss: 6.9212\n", - "Epoch 4/400, Loss: 4.5795\n", - "Epoch 5/400, Loss: 2.3832\n", - "Epoch 6/400, Loss: 0.8342\n", - "Epoch 7/400, Loss: 0.2218\n", - "Epoch 8/400, Loss: 0.0458\n", - "Epoch 9/400, Loss: 0.0089\n", - "Epoch 10/400, Loss: 0.0020\n", - "Epoch 11/400, Loss: 0.0006\n", - "Epoch 12/400, Loss: 0.0002\n", - "Epoch 13/400, Loss: 0.0001\n", - "Epoch 14/400, Loss: 0.0001\n", - "Epoch 15/400, Loss: 0.0000\n", - "Epoch 16/400, Loss: 0.0000\n", - "Epoch 17/400, Loss: 0.0000\n", - "Epoch 18/400, Loss: 0.0000\n", - "Epoch 19/400, Loss: 0.0000\n", - "Epoch 20/400, Loss: 0.0000\n", - "Epoch 21/400, Loss: 0.0000\n", - "Epoch 22/400, Loss: 0.0000\n", - "Epoch 23/400, Loss: 0.0000\n", - "Epoch 24/400, Loss: 0.0000\n", - "Epoch 25/400, Loss: 0.0000\n", - "Epoch 26/400, Loss: 0.0000\n", - "Epoch 27/400, Loss: 0.0000\n", - "Epoch 28/400, Loss: 0.0000\n" + "Epoch 1/400, Loss: 11.6612\n", + "Epoch 2/400, Loss: 11.1299\n", + "Epoch 3/400, Loss: 10.6519\n", + "Epoch 4/400, Loss: 10.2014\n", + "Epoch 5/400, Loss: 9.7649\n", + "Epoch 6/400, Loss: 9.3361\n", + "Epoch 7/400, Loss: 8.9123\n", + "Epoch 8/400, Loss: 8.4927\n", + "Epoch 9/400, Loss: 8.0768\n", + "Epoch 10/400, Loss: 7.6644\n", + "Epoch 11/400, Loss: 7.2550\n", + "Epoch 12/400, Loss: 6.8478\n", + "Epoch 13/400, Loss: 6.4424\n", + "Epoch 14/400, Loss: 6.0383\n", + "Epoch 15/400, Loss: 5.6355\n", + "Epoch 16/400, Loss: 5.2345\n", + "Epoch 17/400, Loss: 4.8363\n", + "Epoch 18/400, Loss: 4.4415\n", + "Epoch 19/400, Loss: 4.0512\n", + "Epoch 20/400, Loss: 3.6666\n", + "Epoch 21/400, Loss: 3.2899\n", + "Epoch 22/400, Loss: 2.9234\n", + "Epoch 23/400, Loss: 2.5708\n", + "Epoch 24/400, Loss: 2.2367\n", + "Epoch 25/400, Loss: 1.9260\n", + "Epoch 26/400, Loss: 1.6431\n", + "Epoch 27/400, Loss: 1.3913\n", + "Epoch 28/400, Loss: 1.1719\n", + "Epoch 29/400, Loss: 0.9843\n", + "Epoch 30/400, Loss: 0.8258\n", + "Epoch 31/400, Loss: 0.6929\n", + "Epoch 32/400, Loss: 0.5816\n", + "Epoch 33/400, Loss: 0.4882\n", + "Epoch 34/400, Loss: 0.4094\n", + "Epoch 35/400, Loss: 0.3426\n", + "Epoch 36/400, Loss: 0.2859\n", + "Epoch 37/400, Loss: 0.2378\n", + "Epoch 38/400, Loss: 0.1972\n", + "Epoch 39/400, Loss: 0.1631\n", + "Epoch 40/400, Loss: 0.1346\n", + "Epoch 41/400, Loss: 0.1110\n", + "Epoch 42/400, Loss: 0.0916\n", + "Epoch 43/400, Loss: 0.0759\n", + "Epoch 44/400, Loss: 0.0632\n", + "Epoch 45/400, Loss: 0.0531\n", + "Epoch 46/400, Loss: 0.0450\n", + "Epoch 47/400, Loss: 0.0385\n", + "Epoch 48/400, Loss: 0.0333\n", + "Epoch 49/400, Loss: 0.0291\n", + "Epoch 50/400, Loss: 0.0257\n", + "Epoch 51/400, Loss: 0.0229\n", + "Epoch 52/400, Loss: 0.0206\n", + "Epoch 53/400, Loss: 0.0187\n", + "Epoch 54/400, Loss: 0.0171\n", + "Epoch 55/400, Loss: 0.0158\n", + "Epoch 56/400, Loss: 0.0147\n", + "Epoch 57/400, Loss: 0.0137\n", + "Epoch 58/400, Loss: 0.0129\n", + "Epoch 59/400, Loss: 0.0122\n", + "Epoch 60/400, Loss: 0.0116\n", + "Epoch 61/400, Loss: 0.0110\n", + "Epoch 62/400, Loss: 0.0106\n", + "Epoch 63/400, Loss: 0.0102\n", + "Epoch 64/400, Loss: 0.0098\n", + "Epoch 65/400, Loss: 0.0095\n", + "Epoch 66/400, Loss: 0.0092\n", + "Epoch 67/400, Loss: 0.0089\n", + "Epoch 68/400, Loss: 0.0087\n", + "Epoch 69/400, Loss: 0.0085\n", + "Epoch 70/400, Loss: 0.0083\n", + "Epoch 71/400, Loss: 0.0081\n", + "Epoch 72/400, Loss: 0.0079\n", + "Epoch 73/400, Loss: 0.0078\n", + "Epoch 74/400, Loss: 0.0076\n", + "Epoch 75/400, Loss: 0.0075\n", + "Epoch 76/400, Loss: 0.0074\n", + "Epoch 77/400, Loss: 0.0073\n", + "Epoch 78/400, Loss: 0.0072\n", + "Epoch 79/400, Loss: 0.0071\n", + "Epoch 80/400, Loss: 0.0070\n", + "Epoch 81/400, Loss: 0.0069\n", + "Epoch 82/400, Loss: 0.0068\n", + "Epoch 83/400, Loss: 0.0067\n", + "Epoch 84/400, Loss: 0.0067\n", + "Epoch 85/400, Loss: 0.0066\n", + "Epoch 86/400, Loss: 0.0065\n", + "Epoch 87/400, Loss: 0.0065\n", + "Epoch 88/400, Loss: 0.0064\n", + "Epoch 89/400, Loss: 0.0063\n", + "Epoch 90/400, Loss: 0.0063\n", + "Epoch 91/400, Loss: 0.0062\n" ] }, { @@ -278,22 +386,27 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[9], line 19\u001b[0m\n\u001b[0;32m 16\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[0;32m 18\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[1;32m---> 19\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 21\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m evaluate_model(model, val_input, val_target, params[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mvocab_size\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 23\u001b[0m \u001b[38;5;66;03m# Output loss for this epoch\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:425\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 422\u001b[0m \u001b[38;5;66;03m# Decay the first and second moment running average coefficient\u001b[39;00m\n\u001b[0;32m 423\u001b[0m exp_avg\u001b[38;5;241m.\u001b[39mlerp_(grad, \u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m device_beta1)\n\u001b[1;32m--> 425\u001b[0m \u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmul_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbeta2\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39maddcmul_(grad, grad\u001b[38;5;241m.\u001b[39mconj(), value\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m beta2)\n\u001b[0;32m 427\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m capturable \u001b[38;5;129;01mor\u001b[39;00m differentiable:\n\u001b[0;32m 428\u001b[0m step \u001b[38;5;241m=\u001b[39m step_t\n", + "Cell \u001b[1;32mIn[27], line 25\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[0;32m 23\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n\u001b[1;32m---> 25\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_input\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_target\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mvocab_size\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 27\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n\u001b[0;32m 28\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m name, activation \u001b[38;5;129;01min\u001b[39;00m model\u001b[38;5;241m.\u001b[39mget_activations()\u001b[38;5;241m.\u001b[39mitems():\n", + "Cell \u001b[1;32mIn[23], line 4\u001b[0m, in \u001b[0;36mevaluate_model\u001b[1;34m(model, input, target, vocab_size)\u001b[0m\n\u001b[0;32m 2\u001b[0m model\u001b[38;5;241m.\u001b[39meval() \u001b[38;5;66;03m# set model to evaluation mode\u001b[39;00m\n\u001b[0;32m 3\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad(): \u001b[38;5;66;03m# disable gradient calculation\u001b[39;00m\n\u001b[1;32m----> 4\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 5\u001b[0m output \u001b[38;5;241m=\u001b[39m output\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size)\n\u001b[0;32m 6\u001b[0m data_target \u001b[38;5;241m=\u001b[39m target\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "Cell \u001b[1;32mIn[24], line 22\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[0;32m 21\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membedding(x)\n\u001b[1;32m---> 22\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[0;32m 23\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfc(lstm_out)\n\u001b[0;32m 24\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\rnn.py:1124\u001b[0m, in \u001b[0;36mLSTM.forward\u001b[1;34m(self, input, hx)\u001b[0m\n\u001b[0;32m 1121\u001b[0m hx \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpermute_hidden(hx, sorted_indices)\n\u001b[0;32m 1123\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m batch_sizes \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m-> 1124\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43m_VF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 1125\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1126\u001b[0m \u001b[43m \u001b[49m\u001b[43mhx\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1127\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_flat_weights\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[0;32m 1128\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1129\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum_layers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1130\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdropout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1131\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtraining\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1132\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbidirectional\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1133\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbatch_first\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1134\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1135\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1136\u001b[0m result \u001b[38;5;241m=\u001b[39m _VF\u001b[38;5;241m.\u001b[39mlstm(\n\u001b[0;32m 1137\u001b[0m \u001b[38;5;28minput\u001b[39m,\n\u001b[0;32m 1138\u001b[0m batch_sizes,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1145\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbidirectional,\n\u001b[0;32m 1146\u001b[0m )\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ + "activation_dict_mean = {}\n", + "activation_dict_std = {}\n", "\n", "# Training loop\n", "for epoch in range(params[\"epochs\"]):\n", " optimizer.zero_grad()\n", + " model.clear_activations()\n", "\n", " # Forward pass\n", " output = model(train_input)\n", @@ -313,11 +426,18 @@ "\n", " val_loss = evaluate_model(model, val_input, val_target, params[\"vocab_size\"])\n", "\n", + " # Track activations\n", + " for name, activation in model.get_activations().items():\n", + " activation_dict_mean[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", + " activation_dict_std[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", + "\n", " # Output loss for this epoch\n", " run.log_metrics(\n", " data = {\n", " \"training/loss\": loss.item(),\n", - " \"validation/loss\": val_loss\n", + " \"validation/loss\": val_loss,\n", + " **activation_dict_mean,\n", + " **activation_dict_std\n", " },\n", " step = epoch\n", " )\n", From ae330e6b3d11c9c5deb9af53107f190b07c17c28 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 3 Mar 2025 16:01:40 +0100 Subject: [PATCH 018/125] refactor: add tracking for grad norms --- .../pytorch_text_model_debugging.ipynb | 225 +++++++++--------- 1 file changed, 117 insertions(+), 108 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 0f08f79..400e331 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -22,7 +22,8 @@ "outputs": [], "source": [ "# TODO - improve dataset to be more realistic\n", - "# TODO - capture the folowings; attentions, activations, gradients, weights, loss (agg and per layer), learning rate (per layer if needed)\n", + "# TODO - capture the folowings; attentions, gradients - weights, loss (agg and per layer), learning rate (per layer if needed)\n", + " # complete - activations, grad norms\n", "# TODO - capture per layer losses\n", "# TODO - increase number of layers\n", "# TODO - Adding dropout to demonstrate the effects of regularization\n", @@ -55,7 +56,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ @@ -147,7 +148,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 34, "metadata": {}, "outputs": [], "source": [ @@ -162,13 +163,14 @@ " # Initialize activations dictionary to store layer activations\n", " self.activations = {}\n", "\n", - " # Registering hooks to track activations\n", + " ## Registering hooks to track activations\n", " self.hooks = []\n", " self.hooks.append(self.lstm.register_forward_hook(self.save_activation(\"lstm\")))\n", " self.hooks.append(self.fc.register_forward_hook(self.save_activation(\"fc\")))\n", "\n", - " # Registering hooks to track gradients\n", - "\n", + " ## Registering hooks to track gradients\n", + " self.hooks.append(self.lstm.register_full_backward_hook(self.save_gradient(\"lstm\")))\n", + " self.hooks.append(self.fc.register_full_backward_hook(self.save_gradient(\"fc\")))\n", " \n", " def forward(self, x):\n", " x = self.embedding(x)\n", @@ -188,6 +190,19 @@ " def clear_activations(self):\n", " self.activations = {}\n", "\n", + " # Function to save gradients\n", + " def save_gradient(self, name):\n", + " def hook(module, grad_input, grad_output):\n", + " self.gradients[name] = grad_output[0] # Save gradient output in the dictionary\n", + " return hook\n", + " \n", + " def get_gradients(self):\n", + " return self.gradients\n", + " \n", + " def clear_gradients(self):\n", + " self.gradients = {}\n", + "\n", + "\n", "model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"])\n", "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", "criterion = nn.CrossEntropyLoss()\n" @@ -279,104 +294,92 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 36, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch 1/400, Loss: 11.6612\n", - "Epoch 2/400, Loss: 11.1299\n", - "Epoch 3/400, Loss: 10.6519\n", - "Epoch 4/400, Loss: 10.2014\n", - "Epoch 5/400, Loss: 9.7649\n", - "Epoch 6/400, Loss: 9.3361\n", - "Epoch 7/400, Loss: 8.9123\n", - "Epoch 8/400, Loss: 8.4927\n", - "Epoch 9/400, Loss: 8.0768\n", - "Epoch 10/400, Loss: 7.6644\n", - "Epoch 11/400, Loss: 7.2550\n", - "Epoch 12/400, Loss: 6.8478\n", - "Epoch 13/400, Loss: 6.4424\n", - "Epoch 14/400, Loss: 6.0383\n", - "Epoch 15/400, Loss: 5.6355\n", - "Epoch 16/400, Loss: 5.2345\n", - "Epoch 17/400, Loss: 4.8363\n", - "Epoch 18/400, Loss: 4.4415\n", - "Epoch 19/400, Loss: 4.0512\n", - "Epoch 20/400, Loss: 3.6666\n", - "Epoch 21/400, Loss: 3.2899\n", - "Epoch 22/400, Loss: 2.9234\n", - "Epoch 23/400, Loss: 2.5708\n", - "Epoch 24/400, Loss: 2.2367\n", - "Epoch 25/400, Loss: 1.9260\n", - "Epoch 26/400, Loss: 1.6431\n", - "Epoch 27/400, Loss: 1.3913\n", - "Epoch 28/400, Loss: 1.1719\n", - "Epoch 29/400, Loss: 0.9843\n", - "Epoch 30/400, Loss: 0.8258\n", - "Epoch 31/400, Loss: 0.6929\n", - "Epoch 32/400, Loss: 0.5816\n", - "Epoch 33/400, Loss: 0.4882\n", - "Epoch 34/400, Loss: 0.4094\n", - "Epoch 35/400, Loss: 0.3426\n", - "Epoch 36/400, Loss: 0.2859\n", - "Epoch 37/400, Loss: 0.2378\n", - "Epoch 38/400, Loss: 0.1972\n", - "Epoch 39/400, Loss: 0.1631\n", - "Epoch 40/400, Loss: 0.1346\n", - "Epoch 41/400, Loss: 0.1110\n", - "Epoch 42/400, Loss: 0.0916\n", - "Epoch 43/400, Loss: 0.0759\n", - "Epoch 44/400, Loss: 0.0632\n", - "Epoch 45/400, Loss: 0.0531\n", - "Epoch 46/400, Loss: 0.0450\n", - "Epoch 47/400, Loss: 0.0385\n", - "Epoch 48/400, Loss: 0.0333\n", - "Epoch 49/400, Loss: 0.0291\n", - "Epoch 50/400, Loss: 0.0257\n", - "Epoch 51/400, Loss: 0.0229\n", - "Epoch 52/400, Loss: 0.0206\n", - "Epoch 53/400, Loss: 0.0187\n", - "Epoch 54/400, Loss: 0.0171\n", + "Epoch 1/400, Loss: 11.5970\n", + "Epoch 2/400, Loss: 11.0686\n", + "Epoch 3/400, Loss: 10.5915\n", + "Epoch 4/400, Loss: 10.1422\n", + "Epoch 5/400, Loss: 9.7076\n", + "Epoch 6/400, Loss: 9.2819\n", + "Epoch 7/400, Loss: 8.8626\n", + "Epoch 8/400, Loss: 8.4479\n", + "Epoch 9/400, Loss: 8.0365\n", + "Epoch 10/400, Loss: 7.6276\n", + "Epoch 11/400, Loss: 7.2211\n", + "Epoch 12/400, Loss: 6.8171\n", + "Epoch 13/400, Loss: 6.4153\n", + "Epoch 14/400, Loss: 6.0154\n", + "Epoch 15/400, Loss: 5.6169\n", + "Epoch 16/400, Loss: 5.2196\n", + "Epoch 17/400, Loss: 4.8243\n", + "Epoch 18/400, Loss: 4.4318\n", + "Epoch 19/400, Loss: 4.0431\n", + "Epoch 20/400, Loss: 3.6596\n", + "Epoch 21/400, Loss: 3.2834\n", + "Epoch 22/400, Loss: 2.9174\n", + "Epoch 23/400, Loss: 2.5654\n", + "Epoch 24/400, Loss: 2.2321\n", + "Epoch 25/400, Loss: 1.9223\n", + "Epoch 26/400, Loss: 1.6402\n", + "Epoch 27/400, Loss: 1.3889\n", + "Epoch 28/400, Loss: 1.1696\n", + "Epoch 29/400, Loss: 0.9814\n", + "Epoch 30/400, Loss: 0.8221\n", + "Epoch 31/400, Loss: 0.6882\n", + "Epoch 32/400, Loss: 0.5760\n", + "Epoch 33/400, Loss: 0.4818\n", + "Epoch 34/400, Loss: 0.4025\n", + "Epoch 35/400, Loss: 0.3356\n", + "Epoch 36/400, Loss: 0.2791\n", + "Epoch 37/400, Loss: 0.2315\n", + "Epoch 38/400, Loss: 0.1917\n", + "Epoch 39/400, Loss: 0.1585\n", + "Epoch 40/400, Loss: 0.1311\n", + "Epoch 41/400, Loss: 0.1086\n", + "Epoch 42/400, Loss: 0.0903\n", + "Epoch 43/400, Loss: 0.0755\n", + "Epoch 44/400, Loss: 0.0634\n", + "Epoch 45/400, Loss: 0.0537\n", + "Epoch 46/400, Loss: 0.0457\n", + "Epoch 47/400, Loss: 0.0393\n", + "Epoch 48/400, Loss: 0.0340\n", + "Epoch 49/400, Loss: 0.0297\n", + "Epoch 50/400, Loss: 0.0262\n", + "Epoch 51/400, Loss: 0.0233\n", + "Epoch 52/400, Loss: 0.0209\n", + "Epoch 53/400, Loss: 0.0189\n", + "Epoch 54/400, Loss: 0.0172\n", "Epoch 55/400, Loss: 0.0158\n", - "Epoch 56/400, Loss: 0.0147\n", - "Epoch 57/400, Loss: 0.0137\n", - "Epoch 58/400, Loss: 0.0129\n", - "Epoch 59/400, Loss: 0.0122\n", - "Epoch 60/400, Loss: 0.0116\n", - "Epoch 61/400, Loss: 0.0110\n", - "Epoch 62/400, Loss: 0.0106\n", - "Epoch 63/400, Loss: 0.0102\n", - "Epoch 64/400, Loss: 0.0098\n", - "Epoch 65/400, Loss: 0.0095\n", - "Epoch 66/400, Loss: 0.0092\n", - "Epoch 67/400, Loss: 0.0089\n", - "Epoch 68/400, Loss: 0.0087\n", - "Epoch 69/400, Loss: 0.0085\n", - "Epoch 70/400, Loss: 0.0083\n", - "Epoch 71/400, Loss: 0.0081\n", - "Epoch 72/400, Loss: 0.0079\n", - "Epoch 73/400, Loss: 0.0078\n", - "Epoch 74/400, Loss: 0.0076\n", - "Epoch 75/400, Loss: 0.0075\n", - "Epoch 76/400, Loss: 0.0074\n", - "Epoch 77/400, Loss: 0.0073\n", - "Epoch 78/400, Loss: 0.0072\n", - "Epoch 79/400, Loss: 0.0071\n", - "Epoch 80/400, Loss: 0.0070\n", - "Epoch 81/400, Loss: 0.0069\n", - "Epoch 82/400, Loss: 0.0068\n", - "Epoch 83/400, Loss: 0.0067\n", - "Epoch 84/400, Loss: 0.0067\n", - "Epoch 85/400, Loss: 0.0066\n", - "Epoch 86/400, Loss: 0.0065\n", - "Epoch 87/400, Loss: 0.0065\n", - "Epoch 88/400, Loss: 0.0064\n", - "Epoch 89/400, Loss: 0.0063\n", - "Epoch 90/400, Loss: 0.0063\n", - "Epoch 91/400, Loss: 0.0062\n" + "Epoch 56/400, Loss: 0.0146\n", + "Epoch 57/400, Loss: 0.0136\n", + "Epoch 58/400, Loss: 0.0128\n", + "Epoch 59/400, Loss: 0.0120\n", + "Epoch 60/400, Loss: 0.0114\n", + "Epoch 61/400, Loss: 0.0108\n", + "Epoch 62/400, Loss: 0.0103\n", + "Epoch 63/400, Loss: 0.0099\n", + "Epoch 64/400, Loss: 0.0095\n", + "Epoch 65/400, Loss: 0.0091\n", + "Epoch 66/400, Loss: 0.0088\n", + "Epoch 67/400, Loss: 0.0086\n", + "Epoch 68/400, Loss: 0.0083\n", + "Epoch 69/400, Loss: 0.0081\n", + "Epoch 70/400, Loss: 0.0079\n", + "Epoch 71/400, Loss: 0.0077\n", + "Epoch 72/400, Loss: 0.0075\n", + "Epoch 73/400, Loss: 0.0074\n", + "Epoch 74/400, Loss: 0.0072\n", + "Epoch 75/400, Loss: 0.0071\n", + "Epoch 76/400, Loss: 0.0070\n", + "Epoch 77/400, Loss: 0.0068\n", + "Epoch 78/400, Loss: 0.0067\n", + "Epoch 79/400, Loss: 0.0066\n" ] }, { @@ -386,15 +389,13 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[27], line 25\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[0;32m 23\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n\u001b[1;32m---> 25\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_input\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_target\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mvocab_size\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 27\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n\u001b[0;32m 28\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m name, activation \u001b[38;5;129;01min\u001b[39;00m model\u001b[38;5;241m.\u001b[39mget_activations()\u001b[38;5;241m.\u001b[39mitems():\n", - "Cell \u001b[1;32mIn[23], line 4\u001b[0m, in \u001b[0;36mevaluate_model\u001b[1;34m(model, input, target, vocab_size)\u001b[0m\n\u001b[0;32m 2\u001b[0m model\u001b[38;5;241m.\u001b[39meval() \u001b[38;5;66;03m# set model to evaluation mode\u001b[39;00m\n\u001b[0;32m 3\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad(): \u001b[38;5;66;03m# disable gradient calculation\u001b[39;00m\n\u001b[1;32m----> 4\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 5\u001b[0m output \u001b[38;5;241m=\u001b[39m output\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size)\n\u001b[0;32m 6\u001b[0m data_target \u001b[38;5;241m=\u001b[39m target\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", - "Cell \u001b[1;32mIn[24], line 22\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[0;32m 21\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membedding(x)\n\u001b[1;32m---> 22\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[0;32m 23\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfc(lstm_out)\n\u001b[0;32m 24\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\rnn.py:1124\u001b[0m, in \u001b[0;36mLSTM.forward\u001b[1;34m(self, input, hx)\u001b[0m\n\u001b[0;32m 1121\u001b[0m hx \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpermute_hidden(hx, sorted_indices)\n\u001b[0;32m 1123\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m batch_sizes \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m-> 1124\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43m_VF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 1125\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1126\u001b[0m \u001b[43m \u001b[49m\u001b[43mhx\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1127\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_flat_weights\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[0;32m 1128\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1129\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum_layers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1130\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdropout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1131\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtraining\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1132\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbidirectional\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1133\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbatch_first\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1134\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1135\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1136\u001b[0m result \u001b[38;5;241m=\u001b[39m _VF\u001b[38;5;241m.\u001b[39mlstm(\n\u001b[0;32m 1137\u001b[0m \u001b[38;5;28minput\u001b[39m,\n\u001b[0;32m 1138\u001b[0m batch_sizes,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1145\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbidirectional,\n\u001b[0;32m 1146\u001b[0m )\n", + "Cell \u001b[1;32mIn[36], line 26\u001b[0m\n\u001b[0;32m 23\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[0;32m 25\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[1;32m---> 26\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 28\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m evaluate_model(model, val_input, val_target, params[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mvocab_size\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 30\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:476\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 474\u001b[0m denom \u001b[38;5;241m=\u001b[39m (max_exp_avg_sqs[i]\u001b[38;5;241m.\u001b[39msqrt() \u001b[38;5;241m/\u001b[39m bias_correction2_sqrt)\u001b[38;5;241m.\u001b[39madd_(eps)\n\u001b[0;32m 475\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 476\u001b[0m denom \u001b[38;5;241m=\u001b[39m (\u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msqrt\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;241m/\u001b[39m bias_correction2_sqrt)\u001b[38;5;241m.\u001b[39madd_(eps)\n\u001b[0;32m 478\u001b[0m param\u001b[38;5;241m.\u001b[39maddcdiv_(exp_avg, denom, value\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m-\u001b[39mstep_size)\n\u001b[0;32m 480\u001b[0m \u001b[38;5;66;03m# Lastly, switch back to complex view\u001b[39;00m\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } @@ -402,11 +403,14 @@ "source": [ "activation_dict_mean = {}\n", "activation_dict_std = {}\n", + "grad_norms = {}\n", "\n", "# Training loop\n", "for epoch in range(params[\"epochs\"]):\n", " optimizer.zero_grad()\n", + "\n", " model.clear_activations()\n", + " model.clear_gradients()\n", "\n", " # Forward pass\n", " output = model(train_input)\n", @@ -431,13 +435,18 @@ " activation_dict_mean[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", " activation_dict_std[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", "\n", + " # Track gradient norm\n", + " for layer, gradient in model.get_gradients().items():\n", + " grad_norms[f\"grad_norm/{name}\"] = gradient.norm().item()\n", + "\n", " # Output loss for this epoch\n", " run.log_metrics(\n", " data = {\n", " \"training/loss\": loss.item(),\n", " \"validation/loss\": val_loss,\n", " **activation_dict_mean,\n", - " **activation_dict_std\n", + " **activation_dict_std,\n", + " **grad_norms\n", " },\n", " step = epoch\n", " )\n", @@ -449,7 +458,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 28, "metadata": {}, "outputs": [ { From d4d160c5f4dae47a760ea04dbbf3c18317b295fd Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 3 Mar 2025 16:18:07 +0100 Subject: [PATCH 019/125] refactor: add gradient tracking per epoch --- .../pytorch_text_model_debugging.ipynb | 25 ++++++++++++++----- 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 400e331..754c309 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -22,8 +22,8 @@ "outputs": [], "source": [ "# TODO - improve dataset to be more realistic\n", - "# TODO - capture the folowings; attentions, gradients - weights, loss (agg and per layer), learning rate (per layer if needed)\n", - " # complete - activations, grad norms\n", + "# TODO - capture the folowings; attentions, , loss (per layer), learning rate (per layer if needed)\n", + " # complete - activations, grad norms, gradients - weights+biases, loss - agg\n", "# TODO - capture per layer losses\n", "# TODO - increase number of layers\n", "# TODO - Adding dropout to demonstrate the effects of regularization\n", @@ -56,7 +56,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 37, "metadata": {}, "outputs": [], "source": [ @@ -294,7 +294,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 38, "metadata": {}, "outputs": [ { @@ -404,6 +404,9 @@ "activation_dict_mean = {}\n", "activation_dict_std = {}\n", "grad_norms = {}\n", + "gradients_mean = {}\n", + "gradients_std = {}\n", + "gradient_norms = {}\n", "\n", "# Training loop\n", "for epoch in range(params[\"epochs\"]):\n", @@ -439,6 +442,13 @@ " for layer, gradient in model.get_gradients().items():\n", " grad_norms[f\"grad_norm/{name}\"] = gradient.norm().item()\n", "\n", + " # Track gradients per layer at each epoch\n", + " for name, param in model.named_parameters():\n", + " if param is not None:\n", + " gradients_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", + " gradients_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", + " gradient_norms[f\"layers/layer_{name}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", + "\n", " # Output loss for this epoch\n", " run.log_metrics(\n", " data = {\n", @@ -446,7 +456,10 @@ " \"validation/loss\": val_loss,\n", " **activation_dict_mean,\n", " **activation_dict_std,\n", - " **grad_norms\n", + " **grad_norms,\n", + " **gradients_std,\n", + " **gradients_mean,\n", + " **gradient_norms\n", " },\n", " step = epoch\n", " )\n", @@ -458,7 +471,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 39, "metadata": {}, "outputs": [ { From 890bf2652ff5714dc6df774ea475f06505d00b1f Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 3 Mar 2025 16:26:01 +0100 Subject: [PATCH 020/125] chore: remove uneeded section --- .../pytorch_text_model_debugging.ipynb | 20 +------------------ 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 754c309..7683c79 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -22,7 +22,7 @@ "outputs": [], "source": [ "# TODO - improve dataset to be more realistic\n", - "# TODO - capture the folowings; attentions, , loss (per layer), learning rate (per layer if needed)\n", + "# TODO - capture the folowings; attentions, learning rate (per layer if needed)\n", " # complete - activations, grad norms, gradients - weights+biases, loss - agg\n", "# TODO - capture per layer losses\n", "# TODO - increase number of layers\n", @@ -228,24 +228,6 @@ " return hook" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "params = {\n", - " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 256,\n", - " \"learning_rate\": 0.01,\n", - " \"epochs\": 5, \n", - " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", - " \"input_features\": 256,\n", - " \"n_classes\": 10,\n", - " \"input_size\": 28 * 28\n", - "}" - ] - }, { "cell_type": "code", "execution_count": null, From ebd34c7f55854dd80fe103bfdc4328b17e14ac71 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 4 Mar 2025 10:44:51 +0100 Subject: [PATCH 021/125] refactor: add fully connected layer to model for more complexity --- .../pytorch_text_model_debugging.ipynb | 245 ++++++++++-------- 1 file changed, 144 insertions(+), 101 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 7683c79..adf4411 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -32,7 +32,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 121, "metadata": {}, "outputs": [], "source": [ @@ -48,7 +48,7 @@ " \"input_features\": 256,\n", " \"n_classes\": 10, # TODO - remove\n", " \"input_size\": 28 * 28, # TODO - remove\n", - " \"vocab_size\": 200000,\n", + " \"vocab_size\": 300000,\n", " \"embed_size\": 1000,\n", " \"hidden_size\": 256 # hidden size for the LSTM\n", "}" @@ -56,7 +56,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 122, "metadata": {}, "outputs": [], "source": [ @@ -129,7 +129,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 123, "metadata": {}, "outputs": [], "source": [ @@ -148,7 +148,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 124, "metadata": {}, "outputs": [], "source": [ @@ -157,8 +157,9 @@ " def __init__(self, vocab_size, embed_size, hidden_size):\n", " super(SimpleLLM, self).__init__()\n", " self.embedding = nn.Embedding(vocab_size, embed_size)\n", - " self.lstm = nn.LSTM(embed_size, hidden_size, batch_first=True)\n", - " self.fc = nn.Linear(hidden_size, vocab_size)\n", + " self.lstm = nn.LSTM(embed_size, 512, batch_first=True)\n", + " self.fc1 = nn.Linear(512, hidden_size)\n", + " self.fc2 = nn.Linear(hidden_size, vocab_size)\n", "\n", " # Initialize activations dictionary to store layer activations\n", " self.activations = {}\n", @@ -166,16 +167,19 @@ " ## Registering hooks to track activations\n", " self.hooks = []\n", " self.hooks.append(self.lstm.register_forward_hook(self.save_activation(\"lstm\")))\n", - " self.hooks.append(self.fc.register_forward_hook(self.save_activation(\"fc\")))\n", + " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc1\")))\n", + " self.hooks.append(self.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", "\n", " ## Registering hooks to track gradients\n", " self.hooks.append(self.lstm.register_full_backward_hook(self.save_gradient(\"lstm\")))\n", - " self.hooks.append(self.fc.register_full_backward_hook(self.save_gradient(\"fc\")))\n", + " self.hooks.append(self.fc1.register_full_backward_hook(self.save_gradient(\"fc1\")))\n", + " self.hooks.append(self.fc2.register_full_backward_hook(self.save_gradient(\"fc2\")))\n", " \n", " def forward(self, x):\n", " x = self.embedding(x)\n", " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", - " out = self.fc(lstm_out)\n", + " out = self.fc1(lstm_out)\n", + " out = self.fc2(out)\n", " return out\n", " \n", " # Function to save activations\n", @@ -249,7 +253,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 125, "metadata": {}, "outputs": [], "source": [ @@ -276,92 +280,136 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 126, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch 1/400, Loss: 11.5970\n", - "Epoch 2/400, Loss: 11.0686\n", - "Epoch 3/400, Loss: 10.5915\n", - "Epoch 4/400, Loss: 10.1422\n", - "Epoch 5/400, Loss: 9.7076\n", - "Epoch 6/400, Loss: 9.2819\n", - "Epoch 7/400, Loss: 8.8626\n", - "Epoch 8/400, Loss: 8.4479\n", - "Epoch 9/400, Loss: 8.0365\n", - "Epoch 10/400, Loss: 7.6276\n", - "Epoch 11/400, Loss: 7.2211\n", - "Epoch 12/400, Loss: 6.8171\n", - "Epoch 13/400, Loss: 6.4153\n", - "Epoch 14/400, Loss: 6.0154\n", - "Epoch 15/400, Loss: 5.6169\n", - "Epoch 16/400, Loss: 5.2196\n", - "Epoch 17/400, Loss: 4.8243\n", - "Epoch 18/400, Loss: 4.4318\n", - "Epoch 19/400, Loss: 4.0431\n", - "Epoch 20/400, Loss: 3.6596\n", - "Epoch 21/400, Loss: 3.2834\n", - "Epoch 22/400, Loss: 2.9174\n", - "Epoch 23/400, Loss: 2.5654\n", - "Epoch 24/400, Loss: 2.2321\n", - "Epoch 25/400, Loss: 1.9223\n", - "Epoch 26/400, Loss: 1.6402\n", - "Epoch 27/400, Loss: 1.3889\n", - "Epoch 28/400, Loss: 1.1696\n", - "Epoch 29/400, Loss: 0.9814\n", - "Epoch 30/400, Loss: 0.8221\n", - "Epoch 31/400, Loss: 0.6882\n", - "Epoch 32/400, Loss: 0.5760\n", - "Epoch 33/400, Loss: 0.4818\n", - "Epoch 34/400, Loss: 0.4025\n", - "Epoch 35/400, Loss: 0.3356\n", - "Epoch 36/400, Loss: 0.2791\n", - "Epoch 37/400, Loss: 0.2315\n", - "Epoch 38/400, Loss: 0.1917\n", - "Epoch 39/400, Loss: 0.1585\n", - "Epoch 40/400, Loss: 0.1311\n", - "Epoch 41/400, Loss: 0.1086\n", - "Epoch 42/400, Loss: 0.0903\n", - "Epoch 43/400, Loss: 0.0755\n", - "Epoch 44/400, Loss: 0.0634\n", - "Epoch 45/400, Loss: 0.0537\n", - "Epoch 46/400, Loss: 0.0457\n", - "Epoch 47/400, Loss: 0.0393\n", - "Epoch 48/400, Loss: 0.0340\n", - "Epoch 49/400, Loss: 0.0297\n", - "Epoch 50/400, Loss: 0.0262\n", - "Epoch 51/400, Loss: 0.0233\n", - "Epoch 52/400, Loss: 0.0209\n", - "Epoch 53/400, Loss: 0.0189\n", - "Epoch 54/400, Loss: 0.0172\n", - "Epoch 55/400, Loss: 0.0158\n", - "Epoch 56/400, Loss: 0.0146\n", - "Epoch 57/400, Loss: 0.0136\n", - "Epoch 58/400, Loss: 0.0128\n", - "Epoch 59/400, Loss: 0.0120\n", - "Epoch 60/400, Loss: 0.0114\n", - "Epoch 61/400, Loss: 0.0108\n", - "Epoch 62/400, Loss: 0.0103\n", - "Epoch 63/400, Loss: 0.0099\n", - "Epoch 64/400, Loss: 0.0095\n", - "Epoch 65/400, Loss: 0.0091\n", - "Epoch 66/400, Loss: 0.0088\n", - "Epoch 67/400, Loss: 0.0086\n", - "Epoch 68/400, Loss: 0.0083\n", - "Epoch 69/400, Loss: 0.0081\n", - "Epoch 70/400, Loss: 0.0079\n", - "Epoch 71/400, Loss: 0.0077\n", - "Epoch 72/400, Loss: 0.0075\n", - "Epoch 73/400, Loss: 0.0074\n", - "Epoch 74/400, Loss: 0.0072\n", - "Epoch 75/400, Loss: 0.0071\n", - "Epoch 76/400, Loss: 0.0070\n", - "Epoch 77/400, Loss: 0.0068\n", - "Epoch 78/400, Loss: 0.0067\n", - "Epoch 79/400, Loss: 0.0066\n" + "Epoch 1/400, Loss: 12.6022\n", + "Epoch 2/400, Loss: 11.9868\n", + "Epoch 3/400, Loss: 11.3907\n", + "Epoch 4/400, Loss: 10.7466\n", + "Epoch 5/400, Loss: 10.0276\n", + "Epoch 6/400, Loss: 9.2259\n", + "Epoch 7/400, Loss: 8.3386\n", + "Epoch 8/400, Loss: 7.3620\n", + "Epoch 9/400, Loss: 6.2913\n", + "Epoch 10/400, Loss: 5.1245\n", + "Epoch 11/400, Loss: 3.8811\n", + "Epoch 12/400, Loss: 2.6654\n", + "Epoch 13/400, Loss: 1.6767\n", + "Epoch 14/400, Loss: 1.0357\n", + "Epoch 15/400, Loss: 0.6587\n", + "Epoch 16/400, Loss: 0.3819\n", + "Epoch 17/400, Loss: 0.1724\n", + "Epoch 18/400, Loss: 0.0595\n", + "Epoch 19/400, Loss: 0.0203\n", + "Epoch 20/400, Loss: 0.0097\n", + "Epoch 21/400, Loss: 0.0066\n", + "Epoch 22/400, Loss: 0.0053\n", + "Epoch 23/400, Loss: 0.0046\n", + "Epoch 24/400, Loss: 0.0041\n", + "Epoch 25/400, Loss: 0.0037\n", + "Epoch 26/400, Loss: 0.0033\n", + "Epoch 27/400, Loss: 0.0030\n", + "Epoch 28/400, Loss: 0.0027\n", + "Epoch 29/400, Loss: 0.0024\n", + "Epoch 30/400, Loss: 0.0022\n", + "Epoch 31/400, Loss: 0.0020\n", + "Epoch 32/400, Loss: 0.0018\n", + "Epoch 33/400, Loss: 0.0017\n", + "Epoch 34/400, Loss: 0.0015\n", + "Epoch 35/400, Loss: 0.0014\n", + "Epoch 36/400, Loss: 0.0013\n", + "Epoch 37/400, Loss: 0.0012\n", + "Epoch 38/400, Loss: 0.0012\n", + "Epoch 39/400, Loss: 0.0011\n", + "Epoch 40/400, Loss: 0.0010\n", + "Epoch 41/400, Loss: 0.0010\n", + "Epoch 42/400, Loss: 0.0009\n", + "Epoch 43/400, Loss: 0.0009\n", + "Epoch 44/400, Loss: 0.0008\n", + "Epoch 45/400, Loss: 0.0008\n", + "Epoch 46/400, Loss: 0.0008\n", + "Epoch 47/400, Loss: 0.0007\n", + "Epoch 48/400, Loss: 0.0007\n", + "Epoch 49/400, Loss: 0.0007\n", + "Epoch 50/400, Loss: 0.0007\n", + "Epoch 51/400, Loss: 0.0006\n", + "Epoch 52/400, Loss: 0.0006\n", + "Epoch 53/400, Loss: 0.0006\n", + "Epoch 54/400, Loss: 0.0006\n", + "Epoch 55/400, Loss: 0.0006\n", + "Epoch 56/400, Loss: 0.0005\n", + "Epoch 57/400, Loss: 0.0005\n", + "Epoch 58/400, Loss: 0.0005\n", + "Epoch 59/400, Loss: 0.0005\n", + "Epoch 60/400, Loss: 0.0005\n", + "Epoch 61/400, Loss: 0.0005\n", + "Epoch 62/400, Loss: 0.0005\n", + "Epoch 63/400, Loss: 0.0005\n", + "Epoch 64/400, Loss: 0.0005\n", + "Epoch 65/400, Loss: 0.0005\n", + "Epoch 66/400, Loss: 0.0004\n", + "Epoch 67/400, Loss: 0.0004\n", + "Epoch 68/400, Loss: 0.0004\n", + "Epoch 69/400, Loss: 0.0004\n", + "Epoch 70/400, Loss: 0.0004\n", + "Epoch 71/400, Loss: 0.0004\n", + "Epoch 72/400, Loss: 0.0004\n", + "Epoch 73/400, Loss: 0.0004\n", + "Epoch 74/400, Loss: 0.0004\n", + "Epoch 75/400, Loss: 0.0004\n", + "Epoch 76/400, Loss: 0.0004\n", + "Epoch 77/400, Loss: 0.0004\n", + "Epoch 78/400, Loss: 0.0004\n", + "Epoch 79/400, Loss: 0.0004\n", + "Epoch 80/400, Loss: 0.0004\n", + "Epoch 81/400, Loss: 0.0004\n", + "Epoch 82/400, Loss: 0.0004\n", + "Epoch 83/400, Loss: 0.0004\n", + "Epoch 84/400, Loss: 0.0004\n", + "Epoch 85/400, Loss: 0.0004\n", + "Epoch 86/400, Loss: 0.0003\n", + "Epoch 87/400, Loss: 0.0003\n", + "Epoch 88/400, Loss: 0.0003\n", + "Epoch 89/400, Loss: 0.0003\n", + "Epoch 90/400, Loss: 0.0003\n", + "Epoch 91/400, Loss: 0.0003\n", + "Epoch 92/400, Loss: 0.0003\n", + "Epoch 93/400, Loss: 0.0003\n", + "Epoch 94/400, Loss: 0.0003\n", + "Epoch 95/400, Loss: 0.0003\n", + "Epoch 96/400, Loss: 0.0003\n", + "Epoch 97/400, Loss: 0.0003\n", + "Epoch 98/400, Loss: 0.0003\n", + "Epoch 99/400, Loss: 0.0003\n", + "Epoch 100/400, Loss: 0.0003\n", + "Epoch 101/400, Loss: 0.0003\n", + "Epoch 102/400, Loss: 0.0003\n", + "Epoch 103/400, Loss: 0.0003\n", + "Epoch 104/400, Loss: 0.0003\n", + "Epoch 105/400, Loss: 0.0003\n", + "Epoch 106/400, Loss: 0.0003\n", + "Epoch 107/400, Loss: 0.0003\n", + "Epoch 108/400, Loss: 0.0003\n", + "Epoch 109/400, Loss: 0.0003\n", + "Epoch 110/400, Loss: 0.0003\n", + "Epoch 111/400, Loss: 0.0003\n", + "Epoch 112/400, Loss: 0.0003\n", + "Epoch 113/400, Loss: 0.0003\n", + "Epoch 114/400, Loss: 0.0003\n", + "Epoch 115/400, Loss: 0.0003\n", + "Epoch 116/400, Loss: 0.0003\n", + "Epoch 117/400, Loss: 0.0003\n", + "Epoch 118/400, Loss: 0.0003\n", + "Epoch 119/400, Loss: 0.0003\n", + "Epoch 120/400, Loss: 0.0003\n", + "Epoch 121/400, Loss: 0.0003\n", + "Epoch 122/400, Loss: 0.0003\n", + "Epoch 123/400, Loss: 0.0003\n" ] }, { @@ -371,13 +419,7 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[36], line 26\u001b[0m\n\u001b[0;32m 23\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[0;32m 25\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[1;32m---> 26\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 28\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m evaluate_model(model, val_input, val_target, params[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mvocab_size\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 30\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:476\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 474\u001b[0m denom \u001b[38;5;241m=\u001b[39m (max_exp_avg_sqs[i]\u001b[38;5;241m.\u001b[39msqrt() \u001b[38;5;241m/\u001b[39m bias_correction2_sqrt)\u001b[38;5;241m.\u001b[39madd_(eps)\n\u001b[0;32m 475\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 476\u001b[0m denom \u001b[38;5;241m=\u001b[39m (\u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msqrt\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;241m/\u001b[39m bias_correction2_sqrt)\u001b[38;5;241m.\u001b[39madd_(eps)\n\u001b[0;32m 478\u001b[0m param\u001b[38;5;241m.\u001b[39maddcdiv_(exp_avg, denom, value\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m-\u001b[39mstep_size)\n\u001b[0;32m 480\u001b[0m \u001b[38;5;66;03m# Lastly, switch back to complex view\u001b[39;00m\n", + "Cell \u001b[1;32mIn[126], line 45\u001b[0m\n\u001b[0;32m 43\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m name, param \u001b[38;5;129;01min\u001b[39;00m model\u001b[38;5;241m.\u001b[39mnamed_parameters():\n\u001b[0;32m 44\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m param \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m---> 45\u001b[0m gradients_std[\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlayers/layer_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_std\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[43mparam\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgrad\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 46\u001b[0m gradients_mean[\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlayers/layer_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_mean\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m param\u001b[38;5;241m.\u001b[39mgrad\u001b[38;5;241m.\u001b[39mmean()\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 47\u001b[0m gradient_norms[\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlayers/layer_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_norm\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m param\u001b[38;5;241m.\u001b[39mgrad\u001b[38;5;241m.\u001b[39mnorm()\u001b[38;5;241m.\u001b[39mitem() \u001b[38;5;66;03m# L2 norm (Euclidean norm) of the gradients\u001b[39;00m\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } @@ -434,8 +476,9 @@ " # Output loss for this epoch\n", " run.log_metrics(\n", " data = {\n", - " \"training/loss\": loss.item(),\n", - " \"validation/loss\": val_loss,\n", + " \"metrics/train/loss\": loss.item(),\n", + " \"metrics/validation/loss\": val_loss,\n", + " \"epoch/value\": epoch,\n", " **activation_dict_mean,\n", " **activation_dict_std,\n", " **grad_norms,\n", @@ -453,7 +496,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 127, "metadata": {}, "outputs": [ { From ae9abc67673914961c3e9d7c12a7d1fad8a71bf0 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 4 Mar 2025 14:13:10 +0100 Subject: [PATCH 022/125] chore: fix activation saving for layers --- .../pytorch_image_classification.ipynb | 64 ++++++++----------- 1 file changed, 26 insertions(+), 38 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index c36f7bd..0fd0e1c 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -4,7 +4,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Neptune + PyTorch\n", + "# Neptune + Pytorch\n", + "## Logging and visualizing debugging metrics in PyTorch\n", "\n", "Introduction\n", "\n", @@ -15,7 +16,7 @@ "- Log standard loss and accuracy metrics to Neptune\n", "- Log debugging metrics during model training such as;\n", " * Activations per layer\n", - " * Gradients (mean and std weights and biases) per layer" + " * Gradients (mean and std) per layer" ] }, { @@ -60,12 +61,13 @@ "# TODO - clean up the evaluation function to exclude tracking gradients\n", "# TODO - do not use group tags\n", "# TODO - track the input features\n", - "# TODO - clean the training loop of commented out code that is unused" + "# TODO - clean the training loop of commented out code that is unused\n", + "# TODO - add batchnormalization and drop out layers to improve the model" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -87,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -113,7 +115,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -137,7 +139,7 @@ }, { "cell_type": "code", - "execution_count": 92, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -184,8 +186,8 @@ " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc1\")))\n", " self.hooks.append(self.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", " self.hooks.append(self.fc3.register_forward_hook(self.save_activation(\"fc3\")))\n", - " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc4\")))\n", - " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc5\")))\n", + " self.hooks.append(self.fc4.register_forward_hook(self.save_activation(\"fc4\")))\n", + " self.hooks.append(self.fc5.register_forward_hook(self.save_activation(\"fc5\")))\n", "\n", " def forward(self, x):\n", " x = x.view(-1, params[\"input_size\"]) # Flatten the input image (28x28)\n", @@ -226,7 +228,7 @@ }, { "cell_type": "code", - "execution_count": 80, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -265,7 +267,7 @@ }, { "cell_type": "code", - "execution_count": 93, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -301,38 +303,24 @@ }, { "cell_type": "code", - "execution_count": 94, + "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch [1/5] Training complete. Loss: 0.9066, Accuracy: 0.69%\n", - "{'layers/layer_fc1/activation_mean': -27.44853973388672, 'layers/layer_fc4/activation_mean': -27.44853973388672, 'layers/layer_fc5/activation_mean': -27.44853973388672, 'layers/layer_fc2/activation_mean': -3.266420602798462, 'layers/layer_fc3/activation_mean': -3.977348804473877, 'layers/layer_fc1/activation_std': 9.884493827819824, 'layers/layer_fc4/activation_std': 9.884493827819824, 'layers/layer_fc5/activation_std': 9.884493827819824, 'layers/layer_fc2/activation_std': 2.8974952697753906, 'layers/layer_fc3/activation_std': 3.9147353172302246, 'layers/layer_fc1.weight_mean': 1.759609585860744e-05, 'layers/layer_fc1.bias_mean': -2.6617166440701112e-05, 'layers/layer_fc2.weight_mean': 1.845465158112347e-05, 'layers/layer_fc2.bias_mean': 6.048093564459123e-05, 'layers/layer_fc3.weight_mean': 6.554154424520675e-06, 'layers/layer_fc3.bias_mean': 4.6133769501466304e-05, 'layers/layer_fc4.weight_mean': -1.4562309843313415e-05, 'layers/layer_fc4.bias_mean': 1.2838396287406795e-05, 'layers/layer_fc5.weight_mean': 4.2628095497931895e-10, 'layers/layer_fc5.bias_mean': -9.313225884932663e-11, 'layers/layer_fc1.weight_std': 0.0014361083740368485, 'layers/layer_fc1.bias_std': 0.0014619147405028343, 'layers/layer_fc2.weight_std': 0.001974125625565648, 'layers/layer_fc2.bias_std': 0.0011026636930182576, 'layers/layer_fc3.weight_std': 0.0008759713382460177, 'layers/layer_fc3.bias_std': 0.0011844916734844446, 'layers/layer_fc4.weight_std': 0.002118356991559267, 'layers/layer_fc4.bias_std': 0.00178423966281116, 'layers/layer_fc5.weight_std': 0.021144278347492218, 'layers/layer_fc5.bias_std': 0.016225792467594147, 'grad_norm/fc1.weight': 0.6434243321418762, 'grad_norm/fc1.bias': 0.023348789662122726, 'grad_norm/fc2.weight': 0.714738130569458, 'grad_norm/fc2.bias': 0.02496359497308731, 'grad_norm/fc3.weight': 0.31714311242103577, 'grad_norm/fc3.bias': 0.018929213285446167, 'grad_norm/fc4.weight': 0.38346678018569946, 'grad_norm/fc4.bias': 0.02010788396000862, 'grad_norm/fc5.weight': 0.7561851143836975, 'grad_norm/fc5.bias': 0.04867737740278244}\n", - "Epoch [2/5] Training complete. Loss: 0.2738, Accuracy: 0.92%\n", - "{'layers/layer_fc1/activation_mean': -27.444847106933594, 'layers/layer_fc4/activation_mean': -27.444847106933594, 'layers/layer_fc5/activation_mean': -27.444847106933594, 'layers/layer_fc2/activation_mean': -4.043460845947266, 'layers/layer_fc3/activation_mean': -4.557342529296875, 'layers/layer_fc1/activation_std': 9.927286148071289, 'layers/layer_fc4/activation_std': 9.927286148071289, 'layers/layer_fc5/activation_std': 9.927286148071289, 'layers/layer_fc2/activation_std': 3.500694990158081, 'layers/layer_fc3/activation_std': 4.5128021240234375, 'layers/layer_fc1.weight_mean': 3.658090008684667e-07, 'layers/layer_fc1.bias_mean': 3.3925662137335166e-06, 'layers/layer_fc2.weight_mean': -2.423007288143708e-07, 'layers/layer_fc2.bias_mean': 1.9860681277350523e-05, 'layers/layer_fc3.weight_mean': 5.834312105434947e-06, 'layers/layer_fc3.bias_mean': -6.391452188836411e-06, 'layers/layer_fc4.weight_mean': 7.309272405109368e-06, 'layers/layer_fc4.bias_mean': 3.118724634987302e-06, 'layers/layer_fc5.weight_mean': 3.8910458188823327e-10, 'layers/layer_fc5.bias_mean': 1.3038515822572094e-09, 'layers/layer_fc1.weight_std': 0.0011528775794431567, 'layers/layer_fc1.bias_std': 0.0012104393681511283, 'layers/layer_fc2.weight_std': 0.0022255387157201767, 'layers/layer_fc2.bias_std': 0.0010018462780863047, 'layers/layer_fc3.weight_std': 0.0007322281016968191, 'layers/layer_fc3.bias_std': 0.0008695347351022065, 'layers/layer_fc4.weight_std': 0.0019773344974964857, 'layers/layer_fc4.bias_std': 0.0012858008267357945, 'layers/layer_fc5.weight_std': 0.015919363126158714, 'layers/layer_fc5.bias_std': 0.011463840492069721, 'grad_norm/fc1.weight': 0.5164875984191895, 'grad_norm/fc1.bias': 0.019329242408275604, 'grad_norm/fc2.weight': 0.8057278990745544, 'grad_norm/fc2.bias': 0.02265150472521782, 'grad_norm/fc3.weight': 0.26510223746299744, 'grad_norm/fc3.bias': 0.013885731808841228, 'grad_norm/fc4.weight': 0.3579327464103699, 'grad_norm/fc4.bias': 0.014490283094346523, 'grad_norm/fc5.weight': 0.5693259239196777, 'grad_norm/fc5.bias': 0.03439152240753174}\n", - "Epoch [3/5] Training complete. Loss: 0.2052, Accuracy: 0.94%\n", - "{'layers/layer_fc1/activation_mean': -27.409822463989258, 'layers/layer_fc4/activation_mean': -27.409822463989258, 'layers/layer_fc5/activation_mean': -27.409822463989258, 'layers/layer_fc2/activation_mean': -4.827888011932373, 'layers/layer_fc3/activation_mean': -5.646310329437256, 'layers/layer_fc1/activation_std': 10.087625503540039, 'layers/layer_fc4/activation_std': 10.087625503540039, 'layers/layer_fc5/activation_std': 10.087625503540039, 'layers/layer_fc2/activation_std': 4.256272792816162, 'layers/layer_fc3/activation_std': 5.8036675453186035, 'layers/layer_fc1.weight_mean': -1.7095830116886646e-05, 'layers/layer_fc1.bias_mean': 2.0519555619102903e-05, 'layers/layer_fc2.weight_mean': -1.0371921234764159e-05, 'layers/layer_fc2.bias_mean': -2.6764431822812185e-05, 'layers/layer_fc3.weight_mean': 1.1725012427632464e-07, 'layers/layer_fc3.bias_mean': -3.2187872420763597e-06, 'layers/layer_fc4.weight_mean': -4.269544206181308e-06, 'layers/layer_fc4.bias_mean': 2.4562015823903494e-05, 'layers/layer_fc5.weight_mean': 5.096634225765229e-10, 'layers/layer_fc5.bias_mean': 1.3038515822572094e-09, 'layers/layer_fc1.weight_std': 0.0010182075202465057, 'layers/layer_fc1.bias_std': 0.0011107329046353698, 'layers/layer_fc2.weight_std': 0.0014369278214871883, 'layers/layer_fc2.bias_std': 0.0006925289635546505, 'layers/layer_fc3.weight_std': 0.0004932511947117746, 'layers/layer_fc3.bias_std': 0.0006599067128263414, 'layers/layer_fc4.weight_std': 0.0011504496214911342, 'layers/layer_fc4.bias_std': 0.001063313102349639, 'layers/layer_fc5.weight_std': 0.010835869237780571, 'layers/layer_fc5.bias_std': 0.010039512068033218, 'grad_norm/fc1.weight': 0.4562200903892517, 'grad_norm/fc1.bias': 0.017740018665790558, 'grad_norm/fc2.weight': 0.520235002040863, 'grad_norm/fc2.bias': 0.01566654071211815, 'grad_norm/fc3.weight': 0.1785753071308136, 'grad_norm/fc3.bias': 0.010537991300225258, 'grad_norm/fc4.weight': 0.20825187861919403, 'grad_norm/fc4.bias': 0.011986151337623596, 'grad_norm/fc5.weight': 0.3875243663787842, 'grad_norm/fc5.bias': 0.030118538066744804}\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[94], line 47\u001b[0m\n\u001b[0;32m 43\u001b[0m batch_accuracy \u001b[38;5;241m=\u001b[39m correct_preds \u001b[38;5;241m/\u001b[39m total_preds\n\u001b[0;32m 44\u001b[0m \u001b[38;5;66;03m# print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\u001b[39;00m\n\u001b[0;32m 45\u001b[0m \n\u001b[0;32m 46\u001b[0m \u001b[38;5;66;03m# Validation step per training step\u001b[39;00m\n\u001b[1;32m---> 47\u001b[0m val_loss, val_accuracy \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_loader\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Evaluate after each step\u001b[39;00m\n\u001b[0;32m 48\u001b[0m \u001b[38;5;66;03m# print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\u001b[39;00m\n\u001b[0;32m 50\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m name, param \u001b[38;5;129;01min\u001b[39;00m model\u001b[38;5;241m.\u001b[39mnamed_parameters():\n", - "Cell \u001b[1;32mIn[80], line 8\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, data_loader, track_gradients)\u001b[0m\n\u001b[0;32m 6\u001b[0m epoch_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 7\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad(): \u001b[38;5;66;03m# Disable gradient tracking during evaluation\u001b[39;00m\n\u001b[1;32m----> 8\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdata_loader\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 9\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Forward pass (with gradient tracking if specified)\u001b[39;49;00m\n\u001b[0;32m 10\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 11\u001b[0m \u001b[43m \u001b[49m\u001b[43mloss\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m(\u001b[49m\u001b[43moutput\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Correct loss computation\u001b[39;49;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:708\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 706\u001b[0m \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[0;32m 707\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset() \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[1;32m--> 708\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 710\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 711\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable\n\u001b[0;32m 712\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 713\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called\n\u001b[0;32m 714\u001b[0m ):\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:764\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 762\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 763\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index() \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m--> 764\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m 765\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[0;32m 766\u001b[0m data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory_device)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\_utils\\fetch.py:52\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[1;34m(self, possibly_batched_index)\u001b[0m\n\u001b[0;32m 50\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__(possibly_batched_index)\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m---> 52\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 54\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\datasets\\mnist.py:146\u001b[0m, in \u001b[0;36mMNIST.__getitem__\u001b[1;34m(self, index)\u001b[0m\n\u001b[0;32m 143\u001b[0m img \u001b[38;5;241m=\u001b[39m Image\u001b[38;5;241m.\u001b[39mfromarray(img\u001b[38;5;241m.\u001b[39mnumpy(), mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mL\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 145\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m--> 146\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtransform\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 148\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 149\u001b[0m target \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform(target)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:95\u001b[0m, in \u001b[0;36mCompose.__call__\u001b[1;34m(self, img)\u001b[0m\n\u001b[0;32m 93\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, img):\n\u001b[0;32m 94\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransforms:\n\u001b[1;32m---> 95\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[43mt\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 96\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m img\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:137\u001b[0m, in \u001b[0;36mToTensor.__call__\u001b[1;34m(self, pic)\u001b[0m\n\u001b[0;32m 129\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, pic):\n\u001b[0;32m 130\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 131\u001b[0m \u001b[38;5;124;03m Args:\u001b[39;00m\n\u001b[0;32m 132\u001b[0m \u001b[38;5;124;03m pic (PIL Image or numpy.ndarray): Image to be converted to tensor.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 135\u001b[0m \u001b[38;5;124;03m Tensor: Converted image.\u001b[39;00m\n\u001b[0;32m 136\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 137\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto_tensor\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpic\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\functional.py:176\u001b[0m, in \u001b[0;36mto_tensor\u001b[1;34m(pic)\u001b[0m\n\u001b[0;32m 174\u001b[0m img \u001b[38;5;241m=\u001b[39m img\u001b[38;5;241m.\u001b[39mpermute((\u001b[38;5;241m2\u001b[39m, \u001b[38;5;241m0\u001b[39m, \u001b[38;5;241m1\u001b[39m))\u001b[38;5;241m.\u001b[39mcontiguous()\n\u001b[0;32m 175\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(img, torch\u001b[38;5;241m.\u001b[39mByteTensor):\n\u001b[1;32m--> 176\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mimg\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdtype\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdefault_float_dtype\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mdiv(\u001b[38;5;241m255\u001b[39m)\n\u001b[0;32m 177\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 178\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m img\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + "Epoch [1/5] Training complete. Loss: 0.5882, Accuracy: 0.80%\n", + "{'layers/layer_fc1/activation_mean': -28.098386764526367, 'layers/layer_fc4/activation_mean': -28.098386764526367, 'layers/layer_fc5/activation_mean': -28.098386764526367, 'layers/layer_fc2/activation_mean': -4.294741630554199, 'layers/layer_fc3/activation_mean': -4.590151309967041, 'layers/layer_fc1/activation_std': 10.138233184814453, 'layers/layer_fc4/activation_std': 10.138233184814453, 'layers/layer_fc5/activation_std': 10.138233184814453, 'layers/layer_fc2/activation_std': 3.513108730316162, 'layers/layer_fc3/activation_std': 4.694570064544678, 'layers/layer_fc1.weight_mean': -4.2270323319826275e-05, 'layers/layer_fc1.bias_mean': 5.756489190389402e-05, 'layers/layer_fc2.weight_mean': 1.7486901924712583e-05, 'layers/layer_fc2.bias_mean': 5.119257184560411e-05, 'layers/layer_fc3.weight_mean': -6.444352038670331e-06, 'layers/layer_fc3.bias_mean': -3.2650757930241525e-05, 'layers/layer_fc4.weight_mean': 4.735463880933821e-05, 'layers/layer_fc4.bias_mean': 0.00019672312191687524, 'layers/layer_fc5.weight_mean': 2.773958840407431e-11, 'layers/layer_fc5.bias_mean': -3.4924596548080444e-10, 'layers/layer_fc1.weight_std': 0.0018884788732975721, 'layers/layer_fc1.bias_std': 0.0020714548882097006, 'layers/layer_fc2.weight_std': 0.002078161109238863, 'layers/layer_fc2.bias_std': 0.0017285202629864216, 'layers/layer_fc3.weight_std': 0.0006235535256564617, 'layers/layer_fc3.bias_std': 0.0014959467807784677, 'layers/layer_fc4.weight_std': 0.001397876301780343, 'layers/layer_fc4.bias_std': 0.001897347392514348, 'layers/layer_fc5.weight_std': 0.014698885381221771, 'layers/layer_fc5.bias_std': 0.015608142130076885, 'grad_norm/fc1.weight': 0.8462492823600769, 'grad_norm/fc1.bias': 0.03309129923582077, 'grad_norm/fc2.weight': 0.7523983716964722, 'grad_norm/fc2.bias': 0.039090901613235474, 'grad_norm/fc3.weight': 0.22576160728931427, 'grad_norm/fc3.bias': 0.023894065991044044, 'grad_norm/fc4.weight': 0.25318393111228943, 'grad_norm/fc4.bias': 0.021497542038559914, 'grad_norm/fc5.weight': 0.5256778597831726, 'grad_norm/fc5.bias': 0.04682442545890808}\n", + "Epoch [2/5] Training complete. Loss: 0.2444, Accuracy: 0.93%\n", + "{'layers/layer_fc1/activation_mean': -28.045276641845703, 'layers/layer_fc4/activation_mean': -28.045276641845703, 'layers/layer_fc5/activation_mean': -28.045276641845703, 'layers/layer_fc2/activation_mean': -6.288389682769775, 'layers/layer_fc3/activation_mean': -7.880246162414551, 'layers/layer_fc1/activation_std': 10.427655220031738, 'layers/layer_fc4/activation_std': 10.427655220031738, 'layers/layer_fc5/activation_std': 10.427655220031738, 'layers/layer_fc2/activation_std': 5.016049385070801, 'layers/layer_fc3/activation_std': 7.8910441398620605, 'layers/layer_fc1.weight_mean': -1.8336819266551174e-05, 'layers/layer_fc1.bias_mean': 2.4514503820682876e-05, 'layers/layer_fc2.weight_mean': -4.397415978019126e-06, 'layers/layer_fc2.bias_mean': -5.701838745153509e-06, 'layers/layer_fc3.weight_mean': 3.609377472457709e-06, 'layers/layer_fc3.bias_mean': 2.2656186047242954e-05, 'layers/layer_fc4.weight_mean': 3.2792174806672847e-06, 'layers/layer_fc4.bias_mean': 6.183851655805483e-05, 'layers/layer_fc5.weight_mean': 1.033322427623773e-09, 'layers/layer_fc5.bias_mean': 2.793967834868738e-10, 'layers/layer_fc1.weight_std': 0.0009652073495090008, 'layers/layer_fc1.bias_std': 0.0011003392282873392, 'layers/layer_fc2.weight_std': 0.0015912855742499232, 'layers/layer_fc2.bias_std': 0.0008093378273770213, 'layers/layer_fc3.weight_std': 0.000562622444704175, 'layers/layer_fc3.bias_std': 0.0008122650324366987, 'layers/layer_fc4.weight_std': 0.0013753673993051052, 'layers/layer_fc4.bias_std': 0.001243805163539946, 'layers/layer_fc5.weight_std': 0.02044137567281723, 'layers/layer_fc5.bias_std': 0.01635204255580902, 'grad_norm/fc1.weight': 0.4324897825717926, 'grad_norm/fc1.bias': 0.01757538504898548, 'grad_norm/fc2.weight': 0.57610684633255, 'grad_norm/fc2.bias': 0.018295787274837494, 'grad_norm/fc3.weight': 0.2036944478750229, 'grad_norm/fc3.bias': 0.012975896708667278, 'grad_norm/fc4.weight': 0.24896498024463654, 'grad_norm/fc4.bias': 0.014034420251846313, 'grad_norm/fc5.weight': 0.7310471534729004, 'grad_norm/fc5.bias': 0.04905613139271736}\n", + "Epoch [3/5] Training complete. Loss: 0.2124, Accuracy: 0.94%\n", + "{'layers/layer_fc1/activation_mean': -28.00644874572754, 'layers/layer_fc4/activation_mean': -28.00644874572754, 'layers/layer_fc5/activation_mean': -28.00644874572754, 'layers/layer_fc2/activation_mean': -8.306803703308105, 'layers/layer_fc3/activation_mean': -10.63320255279541, 'layers/layer_fc1/activation_std': 10.678731918334961, 'layers/layer_fc4/activation_std': 10.678731918334961, 'layers/layer_fc5/activation_std': 10.678731918334961, 'layers/layer_fc2/activation_std': 6.999919891357422, 'layers/layer_fc3/activation_std': 10.323572158813477, 'layers/layer_fc1.weight_mean': 7.430891855619848e-05, 'layers/layer_fc1.bias_mean': -9.825517918216065e-05, 'layers/layer_fc2.weight_mean': 1.702215740806423e-05, 'layers/layer_fc2.bias_mean': 5.076182424090803e-05, 'layers/layer_fc3.weight_mean': 2.006397380682756e-06, 'layers/layer_fc3.bias_mean': 9.8647487902781e-06, 'layers/layer_fc4.weight_mean': 3.732190089067444e-05, 'layers/layer_fc4.bias_mean': 0.00015728663129266351, 'layers/layer_fc5.weight_mean': 5.190377017072478e-10, 'layers/layer_fc5.bias_mean': 7.450580707946131e-10, 'layers/layer_fc1.weight_std': 0.0013835423160344362, 'layers/layer_fc1.bias_std': 0.0014023504918441176, 'layers/layer_fc2.weight_std': 0.0018697652267292142, 'layers/layer_fc2.bias_std': 0.0008941664709709585, 'layers/layer_fc3.weight_std': 0.0007453133002854884, 'layers/layer_fc3.bias_std': 0.0008750202250666916, 'layers/layer_fc4.weight_std': 0.0016579279908910394, 'layers/layer_fc4.bias_std': 0.001322976779192686, 'layers/layer_fc5.weight_std': 0.018788360059261322, 'layers/layer_fc5.bias_std': 0.0166013166308403, 'grad_norm/fc1.weight': 0.620717465877533, 'grad_norm/fc1.bias': 0.02244885452091694, 'grad_norm/fc2.weight': 0.6769528388977051, 'grad_norm/fc2.bias': 0.020245518535375595, 'grad_norm/fc3.weight': 0.26983216404914856, 'grad_norm/fc3.bias': 0.013973843306303024, 'grad_norm/fc4.weight': 0.30018848180770874, 'grad_norm/fc4.bias': 0.015015012584626675, 'grad_norm/fc5.weight': 0.671930193901062, 'grad_norm/fc5.bias': 0.049803949892520905}\n", + "Epoch [4/5] Training complete. Loss: 0.1774, Accuracy: 0.95%\n", + "{'layers/layer_fc1/activation_mean': -28.02176284790039, 'layers/layer_fc4/activation_mean': -28.02176284790039, 'layers/layer_fc5/activation_mean': -28.02176284790039, 'layers/layer_fc2/activation_mean': -9.155248641967773, 'layers/layer_fc3/activation_mean': -13.8362455368042, 'layers/layer_fc1/activation_std': 10.731369972229004, 'layers/layer_fc4/activation_std': 10.731369972229004, 'layers/layer_fc5/activation_std': 10.731369972229004, 'layers/layer_fc2/activation_std': 7.8137617111206055, 'layers/layer_fc3/activation_std': 12.559845924377441, 'layers/layer_fc1.weight_mean': 4.3527179514057934e-05, 'layers/layer_fc1.bias_mean': -5.993247759761289e-05, 'layers/layer_fc2.weight_mean': -8.3655349953915e-06, 'layers/layer_fc2.bias_mean': -2.7970250812359154e-05, 'layers/layer_fc3.weight_mean': -6.794801265641581e-06, 'layers/layer_fc3.bias_mean': -5.020684693590738e-05, 'layers/layer_fc4.weight_mean': -5.56191080249846e-05, 'layers/layer_fc4.bias_mean': -0.00022704749426338822, 'layers/layer_fc5.weight_mean': -4.933287667263642e-10, 'layers/layer_fc5.bias_mean': -1.0710209386033398e-09, 'layers/layer_fc1.weight_std': 0.0006279172375798225, 'layers/layer_fc1.bias_std': 0.0006310796015895903, 'layers/layer_fc2.weight_std': 0.0009842520812526345, 'layers/layer_fc2.bias_std': 0.00036153788096271455, 'layers/layer_fc3.weight_std': 0.0003291342291049659, 'layers/layer_fc3.bias_std': 0.0003552739799488336, 'layers/layer_fc4.weight_std': 0.0006492410320788622, 'layers/layer_fc4.bias_std': 0.0005454771453514695, 'layers/layer_fc5.weight_std': 0.0076590655371546745, 'layers/layer_fc5.bias_std': 0.005646508652716875, 'grad_norm/fc1.weight': 0.28198111057281494, 'grad_norm/fc1.bias': 0.010123053565621376, 'grad_norm/fc2.weight': 0.35634881258010864, 'grad_norm/fc2.bias': 0.008197144605219364, 'grad_norm/fc3.weight': 0.119184210896492, 'grad_norm/fc3.bias': 0.005729861091822386, 'grad_norm/fc4.weight': 0.11795385181903839, 'grad_norm/fc4.bias': 0.006662336643785238, 'grad_norm/fc5.weight': 0.2739119827747345, 'grad_norm/fc5.bias': 0.016939526423811913}\n", + "Epoch [5/5] Training complete. Loss: 0.1836, Accuracy: 0.95%\n", + "{'layers/layer_fc1/activation_mean': -27.910594940185547, 'layers/layer_fc4/activation_mean': -27.910594940185547, 'layers/layer_fc5/activation_mean': -27.910594940185547, 'layers/layer_fc2/activation_mean': -11.856008529663086, 'layers/layer_fc3/activation_mean': -16.673297882080078, 'layers/layer_fc1/activation_std': 11.18954086303711, 'layers/layer_fc4/activation_std': 11.18954086303711, 'layers/layer_fc5/activation_std': 11.18954086303711, 'layers/layer_fc2/activation_std': 10.051806449890137, 'layers/layer_fc3/activation_std': 14.125566482543945, 'layers/layer_fc1.weight_mean': -4.3949068640358746e-05, 'layers/layer_fc1.bias_mean': 5.6177024816861376e-05, 'layers/layer_fc2.weight_mean': -3.007975647051353e-05, 'layers/layer_fc2.bias_mean': -6.0376849432941526e-05, 'layers/layer_fc3.weight_mean': -6.851441867183894e-06, 'layers/layer_fc3.bias_mean': -4.150162567384541e-05, 'layers/layer_fc4.weight_mean': -3.618385744630359e-05, 'layers/layer_fc4.bias_mean': -9.973671694751829e-05, 'layers/layer_fc5.weight_mean': 1.3166733259240004e-09, 'layers/layer_fc5.bias_mean': 1.3737008197622913e-09, 'layers/layer_fc1.weight_std': 0.0014514160575345159, 'layers/layer_fc1.bias_std': 0.001587417908012867, 'layers/layer_fc2.weight_std': 0.002324556466192007, 'layers/layer_fc2.bias_std': 0.000764612341299653, 'layers/layer_fc3.weight_std': 0.0007226017769426107, 'layers/layer_fc3.bias_std': 0.0007500615902245045, 'layers/layer_fc4.weight_std': 0.001422140165232122, 'layers/layer_fc4.bias_std': 0.000992378918454051, 'layers/layer_fc5.weight_std': 0.016280390322208405, 'layers/layer_fc5.bias_std': 0.013839689083397388, 'grad_norm/fc1.weight': 0.6505305767059326, 'grad_norm/fc1.bias': 0.025364963337779045, 'grad_norm/fc2.weight': 0.8416466116905212, 'grad_norm/fc2.bias': 0.017338205128908157, 'grad_norm/fc3.weight': 0.26162052154541016, 'grad_norm/fc3.bias': 0.01199591625481844, 'grad_norm/fc4.weight': 0.25751423835754395, 'grad_norm/fc4.bias': 0.011240324936807156, 'grad_norm/fc5.weight': 0.5822373628616333, 'grad_norm/fc5.bias': 0.04151906818151474}\n", + "Testing complete. Loss: 0.1949, Accuracy: 0.95%\n" ] } ], @@ -450,7 +438,7 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": 8, "metadata": {}, "outputs": [ { From 6f237612bf40e7b49e6ee1feca9974371fddb268 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 14:45:39 +0100 Subject: [PATCH 023/125] refactor: update packages for example --- .../pytorch_text_model_debugging.ipynb | 305 ++++++++++-------- 1 file changed, 168 insertions(+), 137 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index adf4411..5c81aa8 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -2,17 +2,31 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Install dependencies\n", + "! pip install -q -U neptune_scale torch datasets" + ] + }, + { + "cell_type": "code", + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import torch\n", "import torch.nn as nn\n", "import torch.optim as optim\n", - "import matplotlib.pyplot as plt\n", + "from torch.utils.data import Dataset, DataLoader\n", "import numpy as np\n", + "from collections import Counter\n", + "from datasets import load_dataset\n", + "\n", + "import matplotlib.pyplot as plt\n", "\n", - "from neptune_scale import Run\n" + "from neptune_scale import Run" ] }, { @@ -32,7 +46,7 @@ }, { "cell_type": "code", - "execution_count": 121, + "execution_count": 133, "metadata": {}, "outputs": [], "source": [ @@ -50,13 +64,14 @@ " \"input_size\": 28 * 28, # TODO - remove\n", " \"vocab_size\": 300000,\n", " \"embed_size\": 1000,\n", - " \"hidden_size\": 256 # hidden size for the LSTM\n", + " \"hidden_size\": 256, # hidden size for the LSTM\n", + " \"dropout_prob\": 0.3\n", "}" ] }, { "cell_type": "code", - "execution_count": 122, + "execution_count": 140, "metadata": {}, "outputs": [], "source": [ @@ -148,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 124, + "execution_count": 136, "metadata": {}, "outputs": [], "source": [ @@ -157,7 +172,7 @@ " def __init__(self, vocab_size, embed_size, hidden_size):\n", " super(SimpleLLM, self).__init__()\n", " self.embedding = nn.Embedding(vocab_size, embed_size)\n", - " self.lstm = nn.LSTM(embed_size, 512, batch_first=True)\n", + " self.lstm = nn.LSTM(embed_size, 512, num_layers=1, dropout = params[\"dropout_prob\"], batch_first=True)\n", " self.fc1 = nn.Linear(512, hidden_size)\n", " self.fc2 = nn.Linear(hidden_size, vocab_size)\n", "\n", @@ -178,7 +193,7 @@ " def forward(self, x):\n", " x = self.embedding(x)\n", " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", - " out = self.fc1(lstm_out)\n", + " out = self.fc1(lstm_out) # Use the last output from the LSTM\n", " out = self.fc2(out)\n", " return out\n", " \n", @@ -253,14 +268,43 @@ }, { "cell_type": "code", - "execution_count": 125, + "execution_count": 144, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[ 38102, 83130, 136371, 174077, 163091],\n", + " [115872, 149419, 90390, 125438, 109051],\n", + " [ 21550, 231165, 31530, 59481, 190700],\n", + " [264765, 13143, 148381, 191068, 1969],\n", + " [263624, 146997, 290042, 192130, 34321],\n", + " [ 34259, 255854, 192565, 292704, 182054],\n", + " [259433, 242532, 90467, 38266, 10173],\n", + " [ 81651, 133413, 117379, 57402, 234105]])\n" + ] + } + ], "source": [ "\n", "# Dummy input data (example with batch_size=3 and sequence_length=5)\n", "input_data = torch.randint(0, params[\"vocab_size\"], (params[\"batch_size\"] * 3, params[\"sequence_length\"])) # Random word indices\n", "target_data = torch.randint(0, params[\"vocab_size\"], (params[\"batch_size\"] * 3, params[\"sequence_length\"])) # Random target labels\n", + "\n", + "# Split the data into train and validation sets\n", + "train_input, val_input = train_test_split(input_data, test_size=0.1, random_state=42)\n", + "train_target, val_target = train_test_split(target_data, test_size=0.1, random_state=42)\n", + "\n", + "print(train_input)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "\n", "# Manually splitting the data into train, validation, and test sets\n", "train_size = int(0.6 * input_data.size(0)) # 60% for training\n", @@ -280,136 +324,117 @@ }, { "cell_type": "code", - "execution_count": 126, + "execution_count": 142, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch 1/400, Loss: 12.6022\n", - "Epoch 2/400, Loss: 11.9868\n", - "Epoch 3/400, Loss: 11.3907\n", - "Epoch 4/400, Loss: 10.7466\n", - "Epoch 5/400, Loss: 10.0276\n", - "Epoch 6/400, Loss: 9.2259\n", - "Epoch 7/400, Loss: 8.3386\n", - "Epoch 8/400, Loss: 7.3620\n", - "Epoch 9/400, Loss: 6.2913\n", - "Epoch 10/400, Loss: 5.1245\n", - "Epoch 11/400, Loss: 3.8811\n", - "Epoch 12/400, Loss: 2.6654\n", - "Epoch 13/400, Loss: 1.6767\n", - "Epoch 14/400, Loss: 1.0357\n", - "Epoch 15/400, Loss: 0.6587\n", - "Epoch 16/400, Loss: 0.3819\n", - "Epoch 17/400, Loss: 0.1724\n", - "Epoch 18/400, Loss: 0.0595\n", - "Epoch 19/400, Loss: 0.0203\n", - "Epoch 20/400, Loss: 0.0097\n", - "Epoch 21/400, Loss: 0.0066\n", - "Epoch 22/400, Loss: 0.0053\n", - "Epoch 23/400, Loss: 0.0046\n", - "Epoch 24/400, Loss: 0.0041\n", - "Epoch 25/400, Loss: 0.0037\n", - "Epoch 26/400, Loss: 0.0033\n", - "Epoch 27/400, Loss: 0.0030\n", - "Epoch 28/400, Loss: 0.0027\n", - "Epoch 29/400, Loss: 0.0024\n", - "Epoch 30/400, Loss: 0.0022\n", - "Epoch 31/400, Loss: 0.0020\n", - "Epoch 32/400, Loss: 0.0018\n", - "Epoch 33/400, Loss: 0.0017\n", - "Epoch 34/400, Loss: 0.0015\n", - "Epoch 35/400, Loss: 0.0014\n", - "Epoch 36/400, Loss: 0.0013\n", - "Epoch 37/400, Loss: 0.0012\n", - "Epoch 38/400, Loss: 0.0012\n", - "Epoch 39/400, Loss: 0.0011\n", - "Epoch 40/400, Loss: 0.0010\n", - "Epoch 41/400, Loss: 0.0010\n", - "Epoch 42/400, Loss: 0.0009\n", - "Epoch 43/400, Loss: 0.0009\n", - "Epoch 44/400, Loss: 0.0008\n", - "Epoch 45/400, Loss: 0.0008\n", - "Epoch 46/400, Loss: 0.0008\n", - "Epoch 47/400, Loss: 0.0007\n", - "Epoch 48/400, Loss: 0.0007\n", - "Epoch 49/400, Loss: 0.0007\n", - "Epoch 50/400, Loss: 0.0007\n", - "Epoch 51/400, Loss: 0.0006\n", - "Epoch 52/400, Loss: 0.0006\n", - "Epoch 53/400, Loss: 0.0006\n", - "Epoch 54/400, Loss: 0.0006\n", - "Epoch 55/400, Loss: 0.0006\n", - "Epoch 56/400, Loss: 0.0005\n", - "Epoch 57/400, Loss: 0.0005\n", - "Epoch 58/400, Loss: 0.0005\n", - "Epoch 59/400, Loss: 0.0005\n", - "Epoch 60/400, Loss: 0.0005\n", - "Epoch 61/400, Loss: 0.0005\n", - "Epoch 62/400, Loss: 0.0005\n", - "Epoch 63/400, Loss: 0.0005\n", - "Epoch 64/400, Loss: 0.0005\n", - "Epoch 65/400, Loss: 0.0005\n", - "Epoch 66/400, Loss: 0.0004\n", - "Epoch 67/400, Loss: 0.0004\n", - "Epoch 68/400, Loss: 0.0004\n", - "Epoch 69/400, Loss: 0.0004\n", - "Epoch 70/400, Loss: 0.0004\n", - "Epoch 71/400, Loss: 0.0004\n", - "Epoch 72/400, Loss: 0.0004\n", - "Epoch 73/400, Loss: 0.0004\n", - "Epoch 74/400, Loss: 0.0004\n", - "Epoch 75/400, Loss: 0.0004\n", - "Epoch 76/400, Loss: 0.0004\n", - "Epoch 77/400, Loss: 0.0004\n", - "Epoch 78/400, Loss: 0.0004\n", - "Epoch 79/400, Loss: 0.0004\n", - "Epoch 80/400, Loss: 0.0004\n", - "Epoch 81/400, Loss: 0.0004\n", - "Epoch 82/400, Loss: 0.0004\n", - "Epoch 83/400, Loss: 0.0004\n", - "Epoch 84/400, Loss: 0.0004\n", - "Epoch 85/400, Loss: 0.0004\n", - "Epoch 86/400, Loss: 0.0003\n", - "Epoch 87/400, Loss: 0.0003\n", - "Epoch 88/400, Loss: 0.0003\n", - "Epoch 89/400, Loss: 0.0003\n", - "Epoch 90/400, Loss: 0.0003\n", - "Epoch 91/400, Loss: 0.0003\n", - "Epoch 92/400, Loss: 0.0003\n", - "Epoch 93/400, Loss: 0.0003\n", - "Epoch 94/400, Loss: 0.0003\n", - "Epoch 95/400, Loss: 0.0003\n", - "Epoch 96/400, Loss: 0.0003\n", - "Epoch 97/400, Loss: 0.0003\n", - "Epoch 98/400, Loss: 0.0003\n", - "Epoch 99/400, Loss: 0.0003\n", - "Epoch 100/400, Loss: 0.0003\n", - "Epoch 101/400, Loss: 0.0003\n", - "Epoch 102/400, Loss: 0.0003\n", - "Epoch 103/400, Loss: 0.0003\n", - "Epoch 104/400, Loss: 0.0003\n", - "Epoch 105/400, Loss: 0.0003\n", - "Epoch 106/400, Loss: 0.0003\n", - "Epoch 107/400, Loss: 0.0003\n", - "Epoch 108/400, Loss: 0.0003\n", - "Epoch 109/400, Loss: 0.0003\n", - "Epoch 110/400, Loss: 0.0003\n", - "Epoch 111/400, Loss: 0.0003\n", - "Epoch 112/400, Loss: 0.0003\n", - "Epoch 113/400, Loss: 0.0003\n", - "Epoch 114/400, Loss: 0.0003\n", - "Epoch 115/400, Loss: 0.0003\n", - "Epoch 116/400, Loss: 0.0003\n", - "Epoch 117/400, Loss: 0.0003\n", - "Epoch 118/400, Loss: 0.0003\n", - "Epoch 119/400, Loss: 0.0003\n", - "Epoch 120/400, Loss: 0.0003\n", - "Epoch 121/400, Loss: 0.0003\n", - "Epoch 122/400, Loss: 0.0003\n", - "Epoch 123/400, Loss: 0.0003\n" + "Epoch 1/400, Loss: 13.1628\n", + "Epoch 2/400, Loss: 12.5549\n", + "Epoch 3/400, Loss: 12.4428\n", + "Epoch 4/400, Loss: 12.2459\n", + "Epoch 5/400, Loss: 11.9436\n", + "Epoch 6/400, Loss: 11.6424\n", + "Epoch 7/400, Loss: 11.2635\n", + "Epoch 8/400, Loss: 10.8578\n", + "Epoch 9/400, Loss: 10.3417\n", + "Epoch 10/400, Loss: 9.7528\n", + "Epoch 11/400, Loss: 9.0635\n", + "Epoch 12/400, Loss: 8.2568\n", + "Epoch 13/400, Loss: 7.3002\n", + "Epoch 14/400, Loss: 6.2110\n", + "Epoch 15/400, Loss: 4.9925\n", + "Epoch 16/400, Loss: 3.7230\n", + "Epoch 17/400, Loss: 2.5503\n", + "Epoch 18/400, Loss: 1.5385\n", + "Epoch 19/400, Loss: 0.7866\n", + "Epoch 20/400, Loss: 0.4000\n", + "Epoch 21/400, Loss: 0.2285\n", + "Epoch 22/400, Loss: 0.1262\n", + "Epoch 23/400, Loss: 0.0624\n", + "Epoch 24/400, Loss: 0.0344\n", + "Epoch 25/400, Loss: 0.0240\n", + "Epoch 26/400, Loss: 0.0197\n", + "Epoch 27/400, Loss: 0.0177\n", + "Epoch 28/400, Loss: 0.0167\n", + "Epoch 29/400, Loss: 0.0155\n", + "Epoch 30/400, Loss: 0.0135\n", + "Epoch 31/400, Loss: 0.0110\n", + "Epoch 32/400, Loss: 0.0087\n", + "Epoch 33/400, Loss: 0.0071\n", + "Epoch 34/400, Loss: 0.0061\n", + "Epoch 35/400, Loss: 0.0055\n", + "Epoch 36/400, Loss: 0.0052\n", + "Epoch 37/400, Loss: 0.0049\n", + "Epoch 38/400, Loss: 0.0047\n", + "Epoch 39/400, Loss: 0.0044\n", + "Epoch 40/400, Loss: 0.0042\n", + "Epoch 41/400, Loss: 0.0040\n", + "Epoch 42/400, Loss: 0.0037\n", + "Epoch 43/400, Loss: 0.0035\n", + "Epoch 44/400, Loss: 0.0032\n", + "Epoch 45/400, Loss: 0.0030\n", + "Epoch 46/400, Loss: 0.0028\n", + "Epoch 47/400, Loss: 0.0026\n", + "Epoch 48/400, Loss: 0.0024\n", + "Epoch 49/400, Loss: 0.0022\n", + "Epoch 50/400, Loss: 0.0021\n", + "Epoch 51/400, Loss: 0.0019\n", + "Epoch 52/400, Loss: 0.0018\n", + "Epoch 53/400, Loss: 0.0017\n", + "Epoch 54/400, Loss: 0.0016\n", + "Epoch 55/400, Loss: 0.0016\n", + "Epoch 56/400, Loss: 0.0015\n", + "Epoch 57/400, Loss: 0.0014\n", + "Epoch 58/400, Loss: 0.0014\n", + "Epoch 59/400, Loss: 0.0013\n", + "Epoch 60/400, Loss: 0.0013\n", + "Epoch 61/400, Loss: 0.0012\n", + "Epoch 62/400, Loss: 0.0012\n", + "Epoch 63/400, Loss: 0.0011\n", + "Epoch 64/400, Loss: 0.0011\n", + "Epoch 65/400, Loss: 0.0011\n", + "Epoch 66/400, Loss: 0.0010\n", + "Epoch 67/400, Loss: 0.0010\n", + "Epoch 68/400, Loss: 0.0010\n", + "Epoch 69/400, Loss: 0.0010\n", + "Epoch 70/400, Loss: 0.0009\n", + "Epoch 71/400, Loss: 0.0009\n", + "Epoch 72/400, Loss: 0.0009\n", + "Epoch 73/400, Loss: 0.0009\n", + "Epoch 74/400, Loss: 0.0009\n", + "Epoch 75/400, Loss: 0.0008\n", + "Epoch 76/400, Loss: 0.0008\n", + "Epoch 77/400, Loss: 0.0008\n", + "Epoch 78/400, Loss: 0.0008\n", + "Epoch 79/400, Loss: 0.0008\n", + "Epoch 80/400, Loss: 0.0008\n", + "Epoch 81/400, Loss: 0.0008\n", + "Epoch 82/400, Loss: 0.0008\n", + "Epoch 83/400, Loss: 0.0007\n", + "Epoch 84/400, Loss: 0.0007\n", + "Epoch 85/400, Loss: 0.0007\n", + "Epoch 86/400, Loss: 0.0007\n", + "Epoch 87/400, Loss: 0.0007\n", + "Epoch 88/400, Loss: 0.0007\n", + "Epoch 89/400, Loss: 0.0007\n", + "Epoch 90/400, Loss: 0.0007\n", + "Epoch 91/400, Loss: 0.0007\n", + "Epoch 92/400, Loss: 0.0007\n", + "Epoch 93/400, Loss: 0.0007\n", + "Epoch 94/400, Loss: 0.0007\n", + "Epoch 95/400, Loss: 0.0007\n", + "Epoch 96/400, Loss: 0.0006\n", + "Epoch 97/400, Loss: 0.0006\n", + "Epoch 98/400, Loss: 0.0006\n", + "Epoch 99/400, Loss: 0.0006\n", + "Epoch 100/400, Loss: 0.0006\n", + "Epoch 101/400, Loss: 0.0006\n", + "Epoch 102/400, Loss: 0.0006\n", + "Epoch 103/400, Loss: 0.0006\n", + "Epoch 104/400, Loss: 0.0006\n" ] }, { @@ -419,7 +444,13 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[126], line 45\u001b[0m\n\u001b[0;32m 43\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m name, param \u001b[38;5;129;01min\u001b[39;00m model\u001b[38;5;241m.\u001b[39mnamed_parameters():\n\u001b[0;32m 44\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m param \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m---> 45\u001b[0m gradients_std[\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlayers/layer_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_std\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[43mparam\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgrad\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 46\u001b[0m gradients_mean[\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlayers/layer_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_mean\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m param\u001b[38;5;241m.\u001b[39mgrad\u001b[38;5;241m.\u001b[39mmean()\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 47\u001b[0m gradient_norms[\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlayers/layer_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_norm\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m param\u001b[38;5;241m.\u001b[39mgrad\u001b[38;5;241m.\u001b[39mnorm()\u001b[38;5;241m.\u001b[39mitem() \u001b[38;5;66;03m# L2 norm (Euclidean norm) of the gradients\u001b[39;00m\n", + "Cell \u001b[1;32mIn[142], line 29\u001b[0m\n\u001b[0;32m 26\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[0;32m 28\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[1;32m---> 29\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 31\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m evaluate_model(model, val_input, val_target, params[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mvocab_size\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 33\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:478\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 475\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 476\u001b[0m denom \u001b[38;5;241m=\u001b[39m (exp_avg_sq\u001b[38;5;241m.\u001b[39msqrt() \u001b[38;5;241m/\u001b[39m bias_correction2_sqrt)\u001b[38;5;241m.\u001b[39madd_(eps)\n\u001b[1;32m--> 478\u001b[0m \u001b[43mparam\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43maddcdiv_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexp_avg\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdenom\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalue\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[43mstep_size\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 480\u001b[0m \u001b[38;5;66;03m# Lastly, switch back to complex view\u001b[39;00m\n\u001b[0;32m 481\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m amsgrad \u001b[38;5;129;01mand\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mis_complex(params[i]):\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } @@ -496,7 +527,7 @@ }, { "cell_type": "code", - "execution_count": 127, + "execution_count": 143, "metadata": {}, "outputs": [ { From 2c82a0f8f0fe9d7795592b94d025c8397e3a7e5a Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 14:48:32 +0100 Subject: [PATCH 024/125] refactor: update dataset to be used in example --- .../pytorch_text_model_debugging.ipynb | 89 +++++++++++-------- 1 file changed, 50 insertions(+), 39 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 5c81aa8..2b51e73 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -266,37 +266,23 @@ "model.fc.register_full_backward_hook(capture_gradient('fc'))\n" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download or use next token prediction dataset\n", + "The dataset used in this example is taken from [HuggingFace](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset). In this example, you can increase the size of the dataset to test the logging capabilities of Neptyune, but note that increasing the dataset size will increase the time taken for the full dataset to download." + ] + }, { "cell_type": "code", - "execution_count": 144, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[ 38102, 83130, 136371, 174077, 163091],\n", - " [115872, 149419, 90390, 125438, 109051],\n", - " [ 21550, 231165, 31530, 59481, 190700],\n", - " [264765, 13143, 148381, 191068, 1969],\n", - " [263624, 146997, 290042, 192130, 34321],\n", - " [ 34259, 255854, 192565, 292704, 182054],\n", - " [259433, 242532, 90467, 38266, 10173],\n", - " [ 81651, 133413, 117379, 57402, 234105]])\n" - ] - } - ], + "outputs": [], "source": [ - "\n", - "# Dummy input data (example with batch_size=3 and sequence_length=5)\n", - "input_data = torch.randint(0, params[\"vocab_size\"], (params[\"batch_size\"] * 3, params[\"sequence_length\"])) # Random word indices\n", - "target_data = torch.randint(0, params[\"vocab_size\"], (params[\"batch_size\"] * 3, params[\"sequence_length\"])) # Random target labels\n", - "\n", - "# Split the data into train and validation sets\n", - "train_input, val_input = train_test_split(input_data, test_size=0.1, random_state=42)\n", - "train_target, val_target = train_test_split(target_data, test_size=0.1, random_state=42)\n", - "\n", - "print(train_input)\n" + "# For the example, download a random subset of 10% of the original dataset\n", + "train_subset = load_dataset(\"Na0s/Next_Token_Prediction_dataset\", split=\"train[:2%]\")\n", + "validation_subset = load_dataset(\"Na0s/Next_Token_Prediction_dataset\", split=\"validation[:2%]\")" ] }, { @@ -305,21 +291,46 @@ "metadata": {}, "outputs": [], "source": [ + "class TokenizedDataset(Dataset):\n", + " def __init__(self, tokenized_data):\n", + " self.data = tokenized_data\n", + "\n", + " def __len__(self):\n", + " return len(self.data)\n", + "\n", + " def __getitem__(self, idx):\n", + " item = self.data[idx]\n", + " # Return input_ids, attention_mask, and labels as tensors\n", + " return {\n", + " 'input_ids': torch.tensor(item['input_ids'], dtype=torch.long),\n", + " 'attention_mask': torch.tensor(item['attention_mask'], dtype=torch.long),\n", + " 'labels': torch.tensor(item['labels'], dtype=torch.long)\n", + " }\n", + " \n", + "# Assuming tokenized_data is a list of dictionaries, each containing 'input_ids', 'attention_mask', and 'labels'\n", + "train_dataset = TokenizedDataset(train_subset) # tokenized_data is the list you want to use\n", + "train_dataloader = DataLoader(train_dataset, batch_size=8, shuffle=True)\n", "\n", - "# Manually splitting the data into train, validation, and test sets\n", - "train_size = int(0.6 * input_data.size(0)) # 60% for training\n", - "val_size = int(0.2 * input_data.size(0)) # 20% for validation\n", - "test_size = input_data.size(0) - train_size - val_size # Remaining 20% for testing\n", - "\n", - "# Creating the splits manually\n", - "train_input = input_data[:train_size]\n", - "train_target = target_data[:train_size]\n", + "val_dataset = TokenizedDataset(validation_subset) # tokenized_data is the list you want to use\n", + "val_dataloader = DataLoader(val_dataset, batch_size=8, shuffle=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Determine the vocab size of the dataset\n", + "# Flatten the list of tokenized sentences into one long list of token IDs\n", + "all_tokens = [token for sentence in train_subset[\"input_ids\"] for token in sentence]\n", "\n", - "val_input = input_data[train_size:train_size + val_size]\n", - "val_target = target_data[train_size:train_size + val_size]\n", + "# Get unique token IDs\n", + "unique_tokens = set(all_tokens)\n", "\n", - "test_input = input_data[train_size + val_size:]\n", - "test_target = target_data[train_size + val_size:]\n" + "# Vocab size is the number of unique tokens\n", + "vocab_size = max(unique_tokens) + 1 # Add 1 since token ID's start from zero\n", + "print(f\"Vocabulary size: {vocab_size}\")" ] }, { From 48451d624a6453da303125dab570529d39afd194 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 14:51:03 +0100 Subject: [PATCH 025/125] refactor: update evalution function that calculates the validation losses using the data loader --- .../pytorch_text_model_debugging.ipynb | 47 ++++++++++++------- 1 file changed, 31 insertions(+), 16 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 2b51e73..da3905a 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -12,7 +12,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 147, "metadata": {}, "outputs": [], "source": [ @@ -144,20 +144,27 @@ }, { "cell_type": "code", - "execution_count": 123, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "def evaluate_model(model, input, target, vocab_size):\n", - " model.eval() # set model to evaluation mode\n", - " with torch.no_grad(): # disable gradient calculation\n", - " output = model(input)\n", - " output = output.view(-1, vocab_size)\n", - " data_target = target.view(-1)\n", - " criterion = nn.CrossEntropyLoss()\n", - " loss = criterion(output, data_target)\n", - " \n", - " return loss.item()\n", + "def evaluate(model, val_dataloader, criterion, device):\n", + " model.eval() # Set the model to evaluation mode\n", + " total_loss = 0\n", + " with torch.no_grad(): # Disable gradient calculation for validation\n", + " for batch in val_dataloader:\n", + " input_ids = batch['input_ids'].to(device)\n", + " labels = batch['labels'].to(device)\n", + "\n", + " # Forward pass for validation\n", + " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", + " \n", + " # Calculate the loss\n", + " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", + " total_loss += loss.item()\n", + "\n", + " avg_val_loss = total_loss / len(val_dataloader)\n", + " return avg_val_loss\n", " " ] }, @@ -276,7 +283,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 148, "metadata": {}, "outputs": [], "source": [ @@ -287,7 +294,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 149, "metadata": {}, "outputs": [], "source": [ @@ -317,9 +324,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 150, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vocabulary size: 128257\n" + ] + } + ], "source": [ "# Determine the vocab size of the dataset\n", "# Flatten the list of tokenized sentences into one long list of token IDs\n", From 5b5fbdd9fd4f891a3802f825ea77da052c0bf76d Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 15:31:53 +0100 Subject: [PATCH 026/125] refactor: update training loop to work with new data --- .../pytorch_text_model_debugging.ipynb | 284 +++++------------- 1 file changed, 81 insertions(+), 203 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index da3905a..0e5648d 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -35,7 +35,6 @@ "metadata": {}, "outputs": [], "source": [ - "# TODO - improve dataset to be more realistic\n", "# TODO - capture the folowings; attentions, learning rate (per layer if needed)\n", " # complete - activations, grad norms, gradients - weights+biases, loss - agg\n", "# TODO - capture per layer losses\n", @@ -46,7 +45,7 @@ }, { "cell_type": "code", - "execution_count": 133, + "execution_count": 154, "metadata": {}, "outputs": [], "source": [ @@ -54,15 +53,14 @@ "\n", "params = {\n", " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 3,\n", - " \"sequence_length\": 5,\n", + " \"batch_size\": 3, # TODO - remove\n", + " \"sequence_length\": 5, # TODO - remove\n", " \"learning_rate\": 0.001,\n", - " \"epochs\": 400, \n", + " \"epochs\": 5, \n", " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", " \"input_features\": 256,\n", " \"n_classes\": 10, # TODO - remove\n", " \"input_size\": 28 * 28, # TODO - remove\n", - " \"vocab_size\": 300000,\n", " \"embed_size\": 1000,\n", " \"hidden_size\": 256, # hidden size for the LSTM\n", " \"dropout_prob\": 0.3\n", @@ -71,7 +69,7 @@ }, { "cell_type": "code", - "execution_count": 140, + "execution_count": 156, "metadata": {}, "outputs": [], "source": [ @@ -92,7 +90,7 @@ " \"config/sequece_length\": params[\"sequence_length\"],\n", " \"config/epochs\": params[\"epochs\"],\n", " \"config/input_size\": params[\"input_size\"],\n", - " \"data/vocab_size\": params[\"vocab_size\"],\n", + " # \"data/vocab_size\": params[\"vocab_size\"],\n", " \"data/embed_size\": params[\"embed_size\"]\n", " }\n", ")\n", @@ -144,7 +142,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 162, "metadata": {}, "outputs": [], "source": [ @@ -170,7 +168,7 @@ }, { "cell_type": "code", - "execution_count": 136, + "execution_count": 160, "metadata": {}, "outputs": [], "source": [ @@ -193,9 +191,9 @@ " self.hooks.append(self.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", "\n", " ## Registering hooks to track gradients\n", - " self.hooks.append(self.lstm.register_full_backward_hook(self.save_gradient(\"lstm\")))\n", - " self.hooks.append(self.fc1.register_full_backward_hook(self.save_gradient(\"fc1\")))\n", - " self.hooks.append(self.fc2.register_full_backward_hook(self.save_gradient(\"fc2\")))\n", + " #self.hooks.append(self.lstm.register_full_backward_hook(self.save_gradient(\"lstm\")))\n", + " #self.hooks.append(self.fc1.register_full_backward_hook(self.save_gradient(\"fc1\")))\n", + " #self.hooks.append(self.fc2.register_full_backward_hook(self.save_gradient(\"fc2\")))\n", " \n", " def forward(self, x):\n", " x = self.embedding(x)\n", @@ -229,9 +227,9 @@ " self.gradients = {}\n", "\n", "\n", - "model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"])\n", + "model = SimpleLLM(vocab_size, params[\"embed_size\"], params[\"hidden_size\"])\n", "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", - "criterion = nn.CrossEntropyLoss()\n" + "criterion = nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset\n" ] }, { @@ -350,137 +348,9 @@ }, { "cell_type": "code", - "execution_count": 142, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1/400, Loss: 13.1628\n", - "Epoch 2/400, Loss: 12.5549\n", - "Epoch 3/400, Loss: 12.4428\n", - "Epoch 4/400, Loss: 12.2459\n", - "Epoch 5/400, Loss: 11.9436\n", - "Epoch 6/400, Loss: 11.6424\n", - "Epoch 7/400, Loss: 11.2635\n", - "Epoch 8/400, Loss: 10.8578\n", - "Epoch 9/400, Loss: 10.3417\n", - "Epoch 10/400, Loss: 9.7528\n", - "Epoch 11/400, Loss: 9.0635\n", - "Epoch 12/400, Loss: 8.2568\n", - "Epoch 13/400, Loss: 7.3002\n", - "Epoch 14/400, Loss: 6.2110\n", - "Epoch 15/400, Loss: 4.9925\n", - "Epoch 16/400, Loss: 3.7230\n", - "Epoch 17/400, Loss: 2.5503\n", - "Epoch 18/400, Loss: 1.5385\n", - "Epoch 19/400, Loss: 0.7866\n", - "Epoch 20/400, Loss: 0.4000\n", - "Epoch 21/400, Loss: 0.2285\n", - "Epoch 22/400, Loss: 0.1262\n", - "Epoch 23/400, Loss: 0.0624\n", - "Epoch 24/400, Loss: 0.0344\n", - "Epoch 25/400, Loss: 0.0240\n", - "Epoch 26/400, Loss: 0.0197\n", - "Epoch 27/400, Loss: 0.0177\n", - "Epoch 28/400, Loss: 0.0167\n", - "Epoch 29/400, Loss: 0.0155\n", - "Epoch 30/400, Loss: 0.0135\n", - "Epoch 31/400, Loss: 0.0110\n", - "Epoch 32/400, Loss: 0.0087\n", - "Epoch 33/400, Loss: 0.0071\n", - "Epoch 34/400, Loss: 0.0061\n", - "Epoch 35/400, Loss: 0.0055\n", - "Epoch 36/400, Loss: 0.0052\n", - "Epoch 37/400, Loss: 0.0049\n", - "Epoch 38/400, Loss: 0.0047\n", - "Epoch 39/400, Loss: 0.0044\n", - "Epoch 40/400, Loss: 0.0042\n", - "Epoch 41/400, Loss: 0.0040\n", - "Epoch 42/400, Loss: 0.0037\n", - "Epoch 43/400, Loss: 0.0035\n", - "Epoch 44/400, Loss: 0.0032\n", - "Epoch 45/400, Loss: 0.0030\n", - "Epoch 46/400, Loss: 0.0028\n", - "Epoch 47/400, Loss: 0.0026\n", - "Epoch 48/400, Loss: 0.0024\n", - "Epoch 49/400, Loss: 0.0022\n", - "Epoch 50/400, Loss: 0.0021\n", - "Epoch 51/400, Loss: 0.0019\n", - "Epoch 52/400, Loss: 0.0018\n", - "Epoch 53/400, Loss: 0.0017\n", - "Epoch 54/400, Loss: 0.0016\n", - "Epoch 55/400, Loss: 0.0016\n", - "Epoch 56/400, Loss: 0.0015\n", - "Epoch 57/400, Loss: 0.0014\n", - "Epoch 58/400, Loss: 0.0014\n", - "Epoch 59/400, Loss: 0.0013\n", - "Epoch 60/400, Loss: 0.0013\n", - "Epoch 61/400, Loss: 0.0012\n", - "Epoch 62/400, Loss: 0.0012\n", - "Epoch 63/400, Loss: 0.0011\n", - "Epoch 64/400, Loss: 0.0011\n", - "Epoch 65/400, Loss: 0.0011\n", - "Epoch 66/400, Loss: 0.0010\n", - "Epoch 67/400, Loss: 0.0010\n", - "Epoch 68/400, Loss: 0.0010\n", - "Epoch 69/400, Loss: 0.0010\n", - "Epoch 70/400, Loss: 0.0009\n", - "Epoch 71/400, Loss: 0.0009\n", - "Epoch 72/400, Loss: 0.0009\n", - "Epoch 73/400, Loss: 0.0009\n", - "Epoch 74/400, Loss: 0.0009\n", - "Epoch 75/400, Loss: 0.0008\n", - "Epoch 76/400, Loss: 0.0008\n", - "Epoch 77/400, Loss: 0.0008\n", - "Epoch 78/400, Loss: 0.0008\n", - "Epoch 79/400, Loss: 0.0008\n", - "Epoch 80/400, Loss: 0.0008\n", - "Epoch 81/400, Loss: 0.0008\n", - "Epoch 82/400, Loss: 0.0008\n", - "Epoch 83/400, Loss: 0.0007\n", - "Epoch 84/400, Loss: 0.0007\n", - "Epoch 85/400, Loss: 0.0007\n", - "Epoch 86/400, Loss: 0.0007\n", - "Epoch 87/400, Loss: 0.0007\n", - "Epoch 88/400, Loss: 0.0007\n", - "Epoch 89/400, Loss: 0.0007\n", - "Epoch 90/400, Loss: 0.0007\n", - "Epoch 91/400, Loss: 0.0007\n", - "Epoch 92/400, Loss: 0.0007\n", - "Epoch 93/400, Loss: 0.0007\n", - "Epoch 94/400, Loss: 0.0007\n", - "Epoch 95/400, Loss: 0.0007\n", - "Epoch 96/400, Loss: 0.0006\n", - "Epoch 97/400, Loss: 0.0006\n", - "Epoch 98/400, Loss: 0.0006\n", - "Epoch 99/400, Loss: 0.0006\n", - "Epoch 100/400, Loss: 0.0006\n", - "Epoch 101/400, Loss: 0.0006\n", - "Epoch 102/400, Loss: 0.0006\n", - "Epoch 103/400, Loss: 0.0006\n", - "Epoch 104/400, Loss: 0.0006\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[142], line 29\u001b[0m\n\u001b[0;32m 26\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[0;32m 28\u001b[0m \u001b[38;5;66;03m# Optimizer step\u001b[39;00m\n\u001b[1;32m---> 29\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 31\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m evaluate_model(model, val_input, val_target, params[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mvocab_size\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 33\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:478\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 475\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 476\u001b[0m denom \u001b[38;5;241m=\u001b[39m (exp_avg_sq\u001b[38;5;241m.\u001b[39msqrt() \u001b[38;5;241m/\u001b[39m bias_correction2_sqrt)\u001b[38;5;241m.\u001b[39madd_(eps)\n\u001b[1;32m--> 478\u001b[0m \u001b[43mparam\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43maddcdiv_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexp_avg\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdenom\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalue\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[43mstep_size\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 480\u001b[0m \u001b[38;5;66;03m# Lastly, switch back to complex view\u001b[39;00m\n\u001b[0;32m 481\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m amsgrad \u001b[38;5;129;01mand\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mis_complex(params[i]):\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ "activation_dict_mean = {}\n", "activation_dict_std = {}\n", @@ -489,71 +359,79 @@ "gradients_std = {}\n", "gradient_norms = {}\n", "\n", + "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", + "model.to(device)\n", + "step_counter = 0\n", + "\n", "# Training loop\n", "for epoch in range(params[\"epochs\"]):\n", - " optimizer.zero_grad()\n", - "\n", - " model.clear_activations()\n", - " model.clear_gradients()\n", - "\n", - " # Forward pass\n", - " output = model(train_input)\n", - " \n", - " # Reshape output and target to match the shape of CrossEntropyLoss expectations\n", - " output = output.view(-1, params[\"vocab_size\"]) # Flatten the output\n", - " target_data = train_target.view(-1) # Flatten the target data\n", - " \n", - " # Calculate loss\n", - " loss = criterion(output, target_data)\n", - " \n", - " # Backward pass\n", - " loss.backward()\n", - "\n", - " # Optimizer step\n", - " optimizer.step()\n", - "\n", - " val_loss = evaluate_model(model, val_input, val_target, params[\"vocab_size\"])\n", - "\n", - " # Track activations\n", - " for name, activation in model.get_activations().items():\n", - " activation_dict_mean[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", - " activation_dict_std[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", - "\n", - " # Track gradient norm\n", - " for layer, gradient in model.get_gradients().items():\n", - " grad_norms[f\"grad_norm/{name}\"] = gradient.norm().item()\n", - "\n", - " # Track gradients per layer at each epoch\n", - " for name, param in model.named_parameters():\n", - " if param is not None:\n", - " gradients_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", - " gradients_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", - " gradient_norms[f\"layers/layer_{name}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", - "\n", - " # Output loss for this epoch\n", - " run.log_metrics(\n", - " data = {\n", - " \"metrics/train/loss\": loss.item(),\n", - " \"metrics/validation/loss\": val_loss,\n", - " \"epoch/value\": epoch,\n", - " **activation_dict_mean,\n", - " **activation_dict_std,\n", - " **grad_norms,\n", - " **gradients_std,\n", - " **gradients_mean,\n", - " **gradient_norms\n", - " },\n", - " step = epoch\n", - " )\n", - " print(f'Epoch {epoch+1}/{params[\"epochs\"]}, Loss: {loss.item():.4f}')\n", + " model.train()\n", + " total_loss = 0\n", + " for batch in train_dataloader:\n", + " step_counter += 1\n", "\n", - "test_loss = evaluate_model(model, test_input, test_target, params[\"vocab_size\"])\n", - "print(f'Test Loss: {test_loss:.4f}')\n" + " input_ids = batch['input_ids'].to(device)\n", + " labels = batch['labels'].to(device)\n", + " \n", + " optimizer.zero_grad()\n", + " \n", + " # Forward pass\n", + " logits = model(input_ids)\n", + " \n", + " # Compute the loss (ignore padding tokens by masking labels)\n", + " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", + " \n", + " # Backward pass and optimization\n", + " loss.backward()\n", + " optimizer.step()\n", + " \n", + " total_loss += loss.item()\n", + " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", + "\n", + " if step_counter % 5 == 0: # Do not need to log validation at every step, although we can\n", + " val_loss = evaluate(model, val_dataloader, criterion, device)\n", + " print(f\"Step {step_counter}, Val_loss: {val_loss}\")\n", + "\n", + " # Track activations\n", + " for name, activation in model.get_activations().items():\n", + " activation_dict_mean[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", + " activation_dict_std[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", + "\n", + " # Track gradient norm\n", + " # for layer, gradient in model.get_gradients().items():\n", + " # grad_norms[f\"grad_norm/{name}\"] = gradient.norm().item()\n", + "\n", + " # Track gradients per layer at each epoch\n", + " for name, param in model.named_parameters():\n", + " if param is not None:\n", + " gradients_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", + " gradients_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", + " gradient_norms[f\"layers/layer_{name}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", + "\n", + " # Output loss for this epoch\n", + " run.log_metrics(\n", + " data = {\n", + " \"metrics/train/loss\": loss.item(),\n", + " \"metrics/validation/loss\": val_loss,\n", + " \"epoch/value\": epoch,\n", + " **activation_dict_mean,\n", + " **activation_dict_std,\n", + " # **grad_norms,\n", + " **gradients_std,\n", + " **gradients_mean,\n", + " **gradient_norms\n", + " },\n", + " step = step_counter\n", + " )\n", + " print(f\"Epoch {epoch + 1}, Loss: {total_loss / len(train_dataloader)}\")\n", + "\n", + "# test_loss = evaluate_model(model, test_input, test_target, params[\"vocab_size\"])\n", + "# print(f'Test Loss: {test_loss:.4f}')\n" ] }, { "cell_type": "code", - "execution_count": 143, + "execution_count": 164, "metadata": {}, "outputs": [ { From 8ff6f303c5b5defc81f8a30e66f8e790df390d32 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 15:50:17 +0100 Subject: [PATCH 027/125] chore: remove unused sections --- .../pytorch_text_model_debugging.ipynb | 126 +++++++++++------- 1 file changed, 78 insertions(+), 48 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 0e5648d..2b56f9d 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -69,7 +69,7 @@ }, { "cell_type": "code", - "execution_count": 156, + "execution_count": 169, "metadata": {}, "outputs": [], "source": [ @@ -232,45 +232,6 @@ "criterion = nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset\n" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# Define a function to capture activations\n", - "def capture_activation(name):\n", - " def hook(model, input, output):\n", - " activations.append((name, output)) # Capture activation\n", - " return hook\n", - "\n", - "# Define a function to capture gradients\n", - "def capture_gradient(name):\n", - " def hook(module, grad_input, grad_output):\n", - " gradients.append((name, grad_output)) # Capture gradients\n", - " return hook" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# Container for capturing activations and gradients\n", - "activations = []\n", - "gradients = []\n", - "\n", - "# Register hooks for LSTM and final Linear layer\n", - "model.lstm.register_forward_hook(capture_activation('lstm'))\n", - "model.fc.register_forward_hook(capture_activation('fc'))\n", - "\n", - "model.lstm.register_full_backward_hook(capture_gradient('lstm'))\n", - "model.fc.register_full_backward_hook(capture_gradient('fc'))\n" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -281,18 +242,18 @@ }, { "cell_type": "code", - "execution_count": 148, + "execution_count": 166, "metadata": {}, "outputs": [], "source": [ "# For the example, download a random subset of 10% of the original dataset\n", - "train_subset = load_dataset(\"Na0s/Next_Token_Prediction_dataset\", split=\"train[:2%]\")\n", - "validation_subset = load_dataset(\"Na0s/Next_Token_Prediction_dataset\", split=\"validation[:2%]\")" + "train_subset = load_dataset(\"Na0s/Next_Token_Prediction_dataset\", split=\"train[:1%]\")\n", + "validation_subset = load_dataset(\"Na0s/Next_Token_Prediction_dataset\", split=\"validation[:1%]\")" ] }, { "cell_type": "code", - "execution_count": 149, + "execution_count": 167, "metadata": {}, "outputs": [], "source": [ @@ -322,7 +283,7 @@ }, { "cell_type": "code", - "execution_count": 150, + "execution_count": 168, "metadata": {}, "outputs": [ { @@ -348,9 +309,78 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 170, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 1 / 6862, Loss: 8.108406066894531\n", + "Step 2 / 6862, Loss: 8.41750431060791\n", + "Step 3 / 6862, Loss: 7.808852195739746\n", + "Step 4 / 6862, Loss: 7.6255950927734375\n", + "Step 5 / 6862, Loss: 7.981787204742432\n", + "Step 5, Val_loss: 8.222261428833008\n", + "Step 6 / 6862, Loss: 8.22750473022461\n", + "Step 7 / 6862, Loss: 8.084123611450195\n", + "Step 8 / 6862, Loss: 8.00427532196045\n", + "Step 9 / 6862, Loss: 8.61939811706543\n", + "Step 10 / 6862, Loss: 8.01023006439209\n", + "Step 10, Val_loss: 8.182218432426453\n", + "Step 11 / 6862, Loss: 8.989625930786133\n", + "Step 12 / 6862, Loss: 8.146134376525879\n", + "Step 13 / 6862, Loss: 8.266355514526367\n", + "Step 14 / 6862, Loss: 8.179316520690918\n", + "Step 15 / 6862, Loss: 8.195590019226074\n", + "Step 15, Val_loss: 8.146348039309183\n", + "Step 16 / 6862, Loss: 7.857874393463135\n", + "Step 17 / 6862, Loss: 8.29499626159668\n", + "Step 18 / 6862, Loss: 8.928645133972168\n", + "Step 19 / 6862, Loss: 8.25019645690918\n", + "Step 20 / 6862, Loss: 8.065107345581055\n", + "Step 20, Val_loss: 8.129634300867716\n", + "Step 21 / 6862, Loss: 7.987353801727295\n", + "Step 22 / 6862, Loss: 8.142375946044922\n", + "Step 23 / 6862, Loss: 8.109880447387695\n", + "Step 24 / 6862, Loss: 8.218332290649414\n", + "Step 25 / 6862, Loss: 8.190092086791992\n", + "Step 25, Val_loss: 8.09989575544993\n", + "Step 26 / 6862, Loss: 7.514364242553711\n", + "Step 27 / 6862, Loss: 7.963650226593018\n", + "Step 28 / 6862, Loss: 7.415898323059082\n", + "Step 29 / 6862, Loss: 8.256692886352539\n", + "Step 30 / 6862, Loss: 7.928523063659668\n", + "Step 30, Val_loss: 8.071863492329916\n", + "Step 31 / 6862, Loss: 8.20297622680664\n", + "Step 32 / 6862, Loss: 7.520443439483643\n", + "Step 33 / 6862, Loss: 8.82754898071289\n", + "Step 34 / 6862, Loss: 7.743387699127197\n", + "Step 35 / 6862, Loss: 8.292647361755371\n", + "Step 35, Val_loss: 8.03139074643453\n", + "Step 36 / 6862, Loss: 7.725708961486816\n", + "Step 37 / 6862, Loss: 8.10973834991455\n", + "Step 38 / 6862, Loss: 8.47884464263916\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[170], line 32\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[0;32m 31\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[1;32m---> 32\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 34\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:425\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 422\u001b[0m \u001b[38;5;66;03m# Decay the first and second moment running average coefficient\u001b[39;00m\n\u001b[0;32m 423\u001b[0m exp_avg\u001b[38;5;241m.\u001b[39mlerp_(grad, \u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m device_beta1)\n\u001b[1;32m--> 425\u001b[0m \u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmul_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbeta2\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43maddcmul_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mgrad\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgrad\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconj\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalue\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 427\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m capturable \u001b[38;5;129;01mor\u001b[39;00m differentiable:\n\u001b[0;32m 428\u001b[0m step \u001b[38;5;241m=\u001b[39m step_t\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ "activation_dict_mean = {}\n", "activation_dict_std = {}\n", @@ -431,7 +461,7 @@ }, { "cell_type": "code", - "execution_count": 164, + "execution_count": 171, "metadata": {}, "outputs": [ { From b32b27bd934de77d31381f9aea42659540b83b54 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 15:54:10 +0100 Subject: [PATCH 028/125] chore: re-organize notebook layout --- .../pytorch_text_model_debugging.ipynb | 181 +++++++++--------- 1 file changed, 91 insertions(+), 90 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 2b56f9d..787088a 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -43,62 +43,6 @@ "# TODO - batch normalization" ] }, - { - "cell_type": "code", - "execution_count": 154, - "metadata": {}, - "outputs": [], - "source": [ - "# Initialize model, loss function, and optimizer\n", - "\n", - "params = {\n", - " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 3, # TODO - remove\n", - " \"sequence_length\": 5, # TODO - remove\n", - " \"learning_rate\": 0.001,\n", - " \"epochs\": 5, \n", - " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", - " \"input_features\": 256,\n", - " \"n_classes\": 10, # TODO - remove\n", - " \"input_size\": 28 * 28, # TODO - remove\n", - " \"embed_size\": 1000,\n", - " \"hidden_size\": 256, # hidden size for the LSTM\n", - " \"dropout_prob\": 0.3\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": 169, - "metadata": {}, - "outputs": [], - "source": [ - "# Define Neptune parameters\n", - "from neptune_scale import Run\n", - "from uuid import uuid4\n", - "\n", - "run = Run(\n", - " project = \"leo/pytorch-tutorial\",\n", - " run_id=f\"pytorch-text-{uuid4()}\"\n", - " )\n", - "\n", - "run.log_configs(\n", - " {\n", - " \"config/learning_rate\": params[\"learning_rate\"],\n", - " \"config/optimizer\": params[\"optimizer\"],\n", - " \"config/batch_size\": params[\"batch_size\"],\n", - " \"config/sequece_length\": params[\"sequence_length\"],\n", - " \"config/epochs\": params[\"epochs\"],\n", - " \"config/input_size\": params[\"input_size\"],\n", - " # \"data/vocab_size\": params[\"vocab_size\"],\n", - " \"data/embed_size\": params[\"embed_size\"]\n", - " }\n", - ")\n", - "\n", - "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", - "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -142,33 +86,7 @@ }, { "cell_type": "code", - "execution_count": 162, - "metadata": {}, - "outputs": [], - "source": [ - "def evaluate(model, val_dataloader, criterion, device):\n", - " model.eval() # Set the model to evaluation mode\n", - " total_loss = 0\n", - " with torch.no_grad(): # Disable gradient calculation for validation\n", - " for batch in val_dataloader:\n", - " input_ids = batch['input_ids'].to(device)\n", - " labels = batch['labels'].to(device)\n", - "\n", - " # Forward pass for validation\n", - " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", - " \n", - " # Calculate the loss\n", - " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", - " total_loss += loss.item()\n", - "\n", - " avg_val_loss = total_loss / len(val_dataloader)\n", - " return avg_val_loss\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 160, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -224,12 +142,7 @@ " return self.gradients\n", " \n", " def clear_gradients(self):\n", - " self.gradients = {}\n", - "\n", - "\n", - "model = SimpleLLM(vocab_size, params[\"embed_size\"], params[\"hidden_size\"])\n", - "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", - "criterion = nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset\n" + " self.gradients = {}\n" ] }, { @@ -309,7 +222,90 @@ }, { "cell_type": "code", - "execution_count": 170, + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize model, loss function, and optimizer\n", + "\n", + "params = {\n", + " \"optimizer\": \"Adam\",\n", + " \"batch_size\": 3, # TODO - remove\n", + " \"sequence_length\": 5, # TODO - remove\n", + " \"learning_rate\": 0.001,\n", + " \"epochs\": 5, \n", + " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", + " \"input_features\": 256,\n", + " \"n_classes\": 10, # TODO - remove\n", + " \"input_size\": 28 * 28, # TODO - remove\n", + " \"embed_size\": 1000,\n", + " \"hidden_size\": 256, # hidden size for the LSTM\n", + " \"dropout_prob\": 0.3,\n", + " \"vocab_size\": vocab_size\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Define Neptune parameters\n", + "from neptune_scale import Run\n", + "from uuid import uuid4\n", + "\n", + "run = Run(\n", + " project = \"leo/pytorch-tutorial\",\n", + " run_id=f\"pytorch-text-{uuid4()}\"\n", + " )\n", + "\n", + "run.log_configs(\n", + " {\n", + " \"config/learning_rate\": params[\"learning_rate\"],\n", + " \"config/optimizer\": params[\"optimizer\"],\n", + " \"config/batch_size\": params[\"batch_size\"],\n", + " \"config/sequece_length\": params[\"sequence_length\"],\n", + " \"config/epochs\": params[\"epochs\"],\n", + " \"config/input_size\": params[\"input_size\"],\n", + " \"data/vocab_size\": params[\"vocab_size\"],\n", + " \"data/embed_size\": params[\"embed_size\"]\n", + " }\n", + ")\n", + "\n", + "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", + "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 162, + "metadata": {}, + "outputs": [], + "source": [ + "def evaluate(model, val_dataloader, criterion, device):\n", + " model.eval() # Set the model to evaluation mode\n", + " total_loss = 0\n", + " with torch.no_grad(): # Disable gradient calculation for validation\n", + " for batch in val_dataloader:\n", + " input_ids = batch['input_ids'].to(device)\n", + " labels = batch['labels'].to(device)\n", + "\n", + " # Forward pass for validation\n", + " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", + " \n", + " # Calculate the loss\n", + " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", + " total_loss += loss.item()\n", + "\n", + " avg_val_loss = total_loss / len(val_dataloader)\n", + " return avg_val_loss\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, "metadata": {}, "outputs": [ { @@ -389,6 +385,11 @@ "gradients_std = {}\n", "gradient_norms = {}\n", "\n", + "# Initialize model and optimizer\n", + "model = SimpleLLM(vocab_size, params[\"embed_size\"], params[\"hidden_size\"])\n", + "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", + "criterion = nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset\n", + "\n", "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", "model.to(device)\n", "step_counter = 0\n", From 4d659a201724feb2a3a53b2520386eae5f376585 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 15:59:02 +0100 Subject: [PATCH 029/125] chore: cleanup and add parameters in right place --- .../pytorch/pytorch_text_model_debugging.ipynb | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 787088a..1f7cd03 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -86,16 +86,16 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 172, "metadata": {}, "outputs": [], "source": [ "# Define the simple LLM model with LSTM\n", "class SimpleLLM(nn.Module):\n", - " def __init__(self, vocab_size, embed_size, hidden_size):\n", + " def __init__(self, vocab_size, embed_size, hidden_size, num_layers):\n", " super(SimpleLLM, self).__init__()\n", " self.embedding = nn.Embedding(vocab_size, embed_size)\n", - " self.lstm = nn.LSTM(embed_size, 512, num_layers=1, dropout = params[\"dropout_prob\"], batch_first=True)\n", + " self.lstm = nn.LSTM(embed_size, 512, num_layers = num_layers, dropout = 0.3, batch_first=True)\n", " self.fc1 = nn.Linear(512, hidden_size)\n", " self.fc2 = nn.Linear(hidden_size, vocab_size)\n", "\n", @@ -132,7 +132,7 @@ " def clear_activations(self):\n", " self.activations = {}\n", "\n", - " # Function to save gradients\n", + " # Function to save gradients TODO: check where gradients should be calculated\n", " def save_gradient(self, name):\n", " def hook(module, grad_input, grad_output):\n", " self.gradients[name] = grad_output[0] # Save gradient output in the dictionary\n", @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 173, "metadata": {}, "outputs": [], "source": [ @@ -241,7 +241,8 @@ " \"embed_size\": 1000,\n", " \"hidden_size\": 256, # hidden size for the LSTM\n", " \"dropout_prob\": 0.3,\n", - " \"vocab_size\": vocab_size\n", + " \"vocab_size\": vocab_size,\n", + " \"num_lstm_layers\": 1\n", "}" ] }, @@ -386,7 +387,7 @@ "gradient_norms = {}\n", "\n", "# Initialize model and optimizer\n", - "model = SimpleLLM(vocab_size, params[\"embed_size\"], params[\"hidden_size\"])\n", + "model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"], params[\"num_lstm_layers\"])\n", "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", "criterion = nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset\n", "\n", From 5e7ec6e542d448e23d2ac87741c4f28823be1fcf Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 16:02:23 +0100 Subject: [PATCH 030/125] refactor: add all debugging metrics to the same dictionary variable --- .../pytorch_text_model_debugging.ipynb | 99 ++----------------- 1 file changed, 10 insertions(+), 89 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 1f7cd03..588aa80 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -248,7 +248,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 174, "metadata": {}, "outputs": [], "source": [ @@ -308,83 +308,9 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Step 1 / 6862, Loss: 8.108406066894531\n", - "Step 2 / 6862, Loss: 8.41750431060791\n", - "Step 3 / 6862, Loss: 7.808852195739746\n", - "Step 4 / 6862, Loss: 7.6255950927734375\n", - "Step 5 / 6862, Loss: 7.981787204742432\n", - "Step 5, Val_loss: 8.222261428833008\n", - "Step 6 / 6862, Loss: 8.22750473022461\n", - "Step 7 / 6862, Loss: 8.084123611450195\n", - "Step 8 / 6862, Loss: 8.00427532196045\n", - "Step 9 / 6862, Loss: 8.61939811706543\n", - "Step 10 / 6862, Loss: 8.01023006439209\n", - "Step 10, Val_loss: 8.182218432426453\n", - "Step 11 / 6862, Loss: 8.989625930786133\n", - "Step 12 / 6862, Loss: 8.146134376525879\n", - "Step 13 / 6862, Loss: 8.266355514526367\n", - "Step 14 / 6862, Loss: 8.179316520690918\n", - "Step 15 / 6862, Loss: 8.195590019226074\n", - "Step 15, Val_loss: 8.146348039309183\n", - "Step 16 / 6862, Loss: 7.857874393463135\n", - "Step 17 / 6862, Loss: 8.29499626159668\n", - "Step 18 / 6862, Loss: 8.928645133972168\n", - "Step 19 / 6862, Loss: 8.25019645690918\n", - "Step 20 / 6862, Loss: 8.065107345581055\n", - "Step 20, Val_loss: 8.129634300867716\n", - "Step 21 / 6862, Loss: 7.987353801727295\n", - "Step 22 / 6862, Loss: 8.142375946044922\n", - "Step 23 / 6862, Loss: 8.109880447387695\n", - "Step 24 / 6862, Loss: 8.218332290649414\n", - "Step 25 / 6862, Loss: 8.190092086791992\n", - "Step 25, Val_loss: 8.09989575544993\n", - "Step 26 / 6862, Loss: 7.514364242553711\n", - "Step 27 / 6862, Loss: 7.963650226593018\n", - "Step 28 / 6862, Loss: 7.415898323059082\n", - "Step 29 / 6862, Loss: 8.256692886352539\n", - "Step 30 / 6862, Loss: 7.928523063659668\n", - "Step 30, Val_loss: 8.071863492329916\n", - "Step 31 / 6862, Loss: 8.20297622680664\n", - "Step 32 / 6862, Loss: 7.520443439483643\n", - "Step 33 / 6862, Loss: 8.82754898071289\n", - "Step 34 / 6862, Loss: 7.743387699127197\n", - "Step 35 / 6862, Loss: 8.292647361755371\n", - "Step 35, Val_loss: 8.03139074643453\n", - "Step 36 / 6862, Loss: 7.725708961486816\n", - "Step 37 / 6862, Loss: 8.10973834991455\n", - "Step 38 / 6862, Loss: 8.47884464263916\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[170], line 32\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[0;32m 31\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[1;32m---> 32\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 34\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:425\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 422\u001b[0m \u001b[38;5;66;03m# Decay the first and second moment running average coefficient\u001b[39;00m\n\u001b[0;32m 423\u001b[0m exp_avg\u001b[38;5;241m.\u001b[39mlerp_(grad, \u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m device_beta1)\n\u001b[1;32m--> 425\u001b[0m \u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmul_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbeta2\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43maddcmul_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mgrad\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgrad\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconj\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalue\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 427\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m capturable \u001b[38;5;129;01mor\u001b[39;00m differentiable:\n\u001b[0;32m 428\u001b[0m step \u001b[38;5;241m=\u001b[39m step_t\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ - "activation_dict_mean = {}\n", - "activation_dict_std = {}\n", - "grad_norms = {}\n", - "gradients_mean = {}\n", - "gradients_std = {}\n", - "gradient_norms = {}\n", + "debug_metrics = {}\n", "\n", "# Initialize model and optimizer\n", "model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"], params[\"num_lstm_layers\"])\n", @@ -426,8 +352,8 @@ "\n", " # Track activations\n", " for name, activation in model.get_activations().items():\n", - " activation_dict_mean[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", - " activation_dict_std[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", + " debug_metrics[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", + " debug_metrics[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", "\n", " # Track gradient norm\n", " # for layer, gradient in model.get_gradients().items():\n", @@ -436,9 +362,9 @@ " # Track gradients per layer at each epoch\n", " for name, param in model.named_parameters():\n", " if param is not None:\n", - " gradients_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", - " gradients_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", - " gradient_norms[f\"layers/layer_{name}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", + " debug_metrics[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", + " debug_metrics[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", + " debug_metrics[f\"layers/layer_{name}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", "\n", " # Output loss for this epoch\n", " run.log_metrics(\n", @@ -446,12 +372,7 @@ " \"metrics/train/loss\": loss.item(),\n", " \"metrics/validation/loss\": val_loss,\n", " \"epoch/value\": epoch,\n", - " **activation_dict_mean,\n", - " **activation_dict_std,\n", - " # **grad_norms,\n", - " **gradients_std,\n", - " **gradients_mean,\n", - " **gradient_norms\n", + " **debug_metrics\n", " },\n", " step = step_counter\n", " )\n", @@ -463,7 +384,7 @@ }, { "cell_type": "code", - "execution_count": 171, + "execution_count": 176, "metadata": {}, "outputs": [ { From d27c2476a313bec6a97a9ea3d8b776ac5dec3ec7 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 5 Mar 2025 17:21:55 +0100 Subject: [PATCH 031/125] chore: change LSTM layers to see response in logging --- .../pytorch_text_model_debugging.ipynb | 74 +++++++++++++++++-- 1 file changed, 67 insertions(+), 7 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 588aa80..a455f6d 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -220,9 +220,16 @@ "print(f\"Vocabulary size: {vocab_size}\")" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Everytime a new LSTM layer is added when updating the `num_lstm_layers` parameter is changed, 12 new metrics will be logged which correspond to mean, std and norm that are calculated. " + ] + }, { "cell_type": "code", - "execution_count": 173, + "execution_count": 183, "metadata": {}, "outputs": [], "source": [ @@ -242,13 +249,13 @@ " \"hidden_size\": 256, # hidden size for the LSTM\n", " \"dropout_prob\": 0.3,\n", " \"vocab_size\": vocab_size,\n", - " \"num_lstm_layers\": 1\n", + " \"num_lstm_layers\": 3\n", "}" ] }, { "cell_type": "code", - "execution_count": 174, + "execution_count": 187, "metadata": {}, "outputs": [], "source": [ @@ -269,6 +276,7 @@ " \"config/sequece_length\": params[\"sequence_length\"],\n", " \"config/epochs\": params[\"epochs\"],\n", " \"config/input_size\": params[\"input_size\"],\n", + " \"config/num_lstm_layers\" : params[\"num_lstm_layers\"],\n", " \"data/vocab_size\": params[\"vocab_size\"],\n", " \"data/embed_size\": params[\"embed_size\"]\n", " }\n", @@ -280,7 +288,7 @@ }, { "cell_type": "code", - "execution_count": 162, + "execution_count": 179, "metadata": {}, "outputs": [], "source": [ @@ -306,9 +314,61 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 188, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 1 / 6862, Loss: 11.761494636535645\n", + "Step 2 / 6862, Loss: 11.750707626342773\n", + "Step 3 / 6862, Loss: 11.729766845703125\n", + "Step 4 / 6862, Loss: 11.664655685424805\n", + "Step 5 / 6862, Loss: 11.283023834228516\n", + "Step 5, Val_loss: 10.823039770126343\n", + "Step 6 / 6862, Loss: 10.765737533569336\n", + "Step 7 / 6862, Loss: 10.121588706970215\n", + "Step 8 / 6862, Loss: 9.69588565826416\n", + "Step 9 / 6862, Loss: 9.501729965209961\n", + "Step 10 / 6862, Loss: 10.076082229614258\n", + "Step 10, Val_loss: 9.637478192647299\n", + "Step 11 / 6862, Loss: 10.335653305053711\n", + "Step 12 / 6862, Loss: 9.704903602600098\n", + "Step 13 / 6862, Loss: 9.107070922851562\n", + "Step 14 / 6862, Loss: 8.985576629638672\n", + "Step 15 / 6862, Loss: 8.823647499084473\n", + "Step 15, Val_loss: 9.185553391774496\n", + "Step 16 / 6862, Loss: 9.310721397399902\n", + "Step 17 / 6862, Loss: 9.178205490112305\n", + "Step 18 / 6862, Loss: 9.248604774475098\n", + "Step 19 / 6862, Loss: 9.397645950317383\n", + "Step 20 / 6862, Loss: 9.491145133972168\n", + "Step 20, Val_loss: 9.07630173365275\n", + "Step 21 / 6862, Loss: 8.726667404174805\n", + "Step 22 / 6862, Loss: 8.85219955444336\n", + "Step 23 / 6862, Loss: 9.02513313293457\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[188], line 25\u001b[0m\n\u001b[0;32m 22\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mzero_grad()\n\u001b[0;32m 24\u001b[0m \u001b[38;5;66;03m# Forward pass\u001b[39;00m\n\u001b[1;32m---> 25\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 27\u001b[0m \u001b[38;5;66;03m# Compute the loss (ignore padding tokens by masking labels)\u001b[39;00m\n\u001b[0;32m 28\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "Cell \u001b[1;32mIn[172], line 28\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 26\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlstm(x) \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[0;32m 27\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfc1(lstm_out) \u001b[38;5;66;03m# Use the last output from the LSTM\u001b[39;00m\n\u001b[1;32m---> 28\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfc2\u001b[49m\u001b[43m(\u001b[49m\u001b[43mout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 29\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\linear.py:125\u001b[0m, in \u001b[0;36mLinear.forward\u001b[1;34m(self, input)\u001b[0m\n\u001b[0;32m 124\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[1;32m--> 125\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ "debug_metrics = {}\n", "\n", @@ -384,7 +444,7 @@ }, { "cell_type": "code", - "execution_count": 176, + "execution_count": 189, "metadata": {}, "outputs": [ { From dc4b63d6724c81e2200fc53f27e20f3d4a9277a6 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 6 Mar 2025 13:53:34 +0100 Subject: [PATCH 032/125] refactor: update location where model.train() is called in training loop --- .../pytorch/pytorch_text_model_debugging.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index a455f6d..aa1047e 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -314,7 +314,7 @@ }, { "cell_type": "code", - "execution_count": 188, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -383,9 +383,9 @@ "\n", "# Training loop\n", "for epoch in range(params[\"epochs\"]):\n", - " model.train()\n", " total_loss = 0\n", " for batch in train_dataloader:\n", + " model.train()\n", " step_counter += 1\n", "\n", " input_ids = batch['input_ids'].to(device)\n", From 1a8d02155083a14d8d6050d4b9a99c9947c4435a Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 6 Mar 2025 16:12:16 +0100 Subject: [PATCH 033/125] refactor: update how HF dataset is downloaded to only download a subset --- .../pytorch_text_model_debugging.ipynb | 70 ++++++++----------- 1 file changed, 30 insertions(+), 40 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index aa1047e..c1ac4c7 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -155,18 +155,23 @@ }, { "cell_type": "code", - "execution_count": 166, + "execution_count": 202, "metadata": {}, "outputs": [], "source": [ "# For the example, download a random subset of 10% of the original dataset\n", - "train_subset = load_dataset(\"Na0s/Next_Token_Prediction_dataset\", split=\"train[:1%]\")\n", - "validation_subset = load_dataset(\"Na0s/Next_Token_Prediction_dataset\", split=\"validation[:1%]\")" + "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", + "data_files = {\"train\": base_url + \"train-0000*-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", + " \"validation\": base_url + \"validation-00000-of-00001.parquet\"} #doanload the complete validation dataset\n", + "\n", + "data_subset = load_dataset(\"parquet\", data_files = data_files)\n", + "train_subset = data_subset[\"train\"]\n", + "validation_subset = data_subset[\"validation\"]" ] }, { "cell_type": "code", - "execution_count": 167, + "execution_count": 203, "metadata": {}, "outputs": [], "source": [ @@ -196,7 +201,7 @@ }, { "cell_type": "code", - "execution_count": 168, + "execution_count": 205, "metadata": {}, "outputs": [ { @@ -229,7 +234,7 @@ }, { "cell_type": "code", - "execution_count": 183, + "execution_count": 206, "metadata": {}, "outputs": [], "source": [ @@ -255,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 187, + "execution_count": 207, "metadata": {}, "outputs": [], "source": [ @@ -288,7 +293,7 @@ }, { "cell_type": "code", - "execution_count": 179, + "execution_count": 208, "metadata": {}, "outputs": [], "source": [ @@ -314,40 +319,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 209, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Step 1 / 6862, Loss: 11.761494636535645\n", - "Step 2 / 6862, Loss: 11.750707626342773\n", - "Step 3 / 6862, Loss: 11.729766845703125\n", - "Step 4 / 6862, Loss: 11.664655685424805\n", - "Step 5 / 6862, Loss: 11.283023834228516\n", - "Step 5, Val_loss: 10.823039770126343\n", - "Step 6 / 6862, Loss: 10.765737533569336\n", - "Step 7 / 6862, Loss: 10.121588706970215\n", - "Step 8 / 6862, Loss: 9.69588565826416\n", - "Step 9 / 6862, Loss: 9.501729965209961\n", - "Step 10 / 6862, Loss: 10.076082229614258\n", - "Step 10, Val_loss: 9.637478192647299\n", - "Step 11 / 6862, Loss: 10.335653305053711\n", - "Step 12 / 6862, Loss: 9.704903602600098\n", - "Step 13 / 6862, Loss: 9.107070922851562\n", - "Step 14 / 6862, Loss: 8.985576629638672\n", - "Step 15 / 6862, Loss: 8.823647499084473\n", - "Step 15, Val_loss: 9.185553391774496\n", - "Step 16 / 6862, Loss: 9.310721397399902\n", - "Step 17 / 6862, Loss: 9.178205490112305\n", - "Step 18 / 6862, Loss: 9.248604774475098\n", - "Step 19 / 6862, Loss: 9.397645950317383\n", - "Step 20 / 6862, Loss: 9.491145133972168\n", - "Step 20, Val_loss: 9.07630173365275\n", - "Step 21 / 6862, Loss: 8.726667404174805\n", - "Step 22 / 6862, Loss: 8.85219955444336\n", - "Step 23 / 6862, Loss: 9.02513313293457\n" + "Step 1 / 102408, Loss: 11.768007278442383\n", + "Step 2 / 102408, Loss: 11.757434844970703\n", + "Step 3 / 102408, Loss: 11.730992317199707\n", + "Step 4 / 102408, Loss: 11.66014575958252\n", + "Step 5 / 102408, Loss: 11.353281021118164\n", + "Step 5, Val_loss: 10.716990483939087\n", + "Step 6 / 102408, Loss: 10.836886405944824\n", + "Step 7 / 102408, Loss: 10.088896751403809\n", + "Step 8 / 102408, Loss: 10.012699127197266\n", + "Step 9 / 102408, Loss: 10.076889991760254\n", + "Step 10 / 102408, Loss: 9.623441696166992\n" ] }, { @@ -357,14 +346,15 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[188], line 25\u001b[0m\n\u001b[0;32m 22\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mzero_grad()\n\u001b[0;32m 24\u001b[0m \u001b[38;5;66;03m# Forward pass\u001b[39;00m\n\u001b[1;32m---> 25\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 27\u001b[0m \u001b[38;5;66;03m# Compute the loss (ignore padding tokens by masking labels)\u001b[39;00m\n\u001b[0;32m 28\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n", + "Cell \u001b[1;32mIn[209], line 38\u001b[0m\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 37\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m step_counter \u001b[38;5;241m%\u001b[39m \u001b[38;5;241m5\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m: \u001b[38;5;66;03m# Do not need to log validation at every step, although we can\u001b[39;00m\n\u001b[1;32m---> 38\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_dataloader\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 39\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Val_loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mval_loss\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 41\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", + "Cell \u001b[1;32mIn[208], line 10\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, val_dataloader, criterion, device)\u001b[0m\n\u001b[0;32m 7\u001b[0m labels \u001b[38;5;241m=\u001b[39m batch[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mlabels\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mto(device)\n\u001b[0;32m 9\u001b[0m \u001b[38;5;66;03m# Forward pass for validation\u001b[39;00m\n\u001b[1;32m---> 10\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Shape: (batch_size, seq_len, vocab_size)\u001b[39;00m\n\u001b[0;32m 12\u001b[0m \u001b[38;5;66;03m# Calculate the loss\u001b[39;00m\n\u001b[0;32m 13\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", - "Cell \u001b[1;32mIn[172], line 28\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 26\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlstm(x) \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[0;32m 27\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfc1(lstm_out) \u001b[38;5;66;03m# Use the last output from the LSTM\u001b[39;00m\n\u001b[1;32m---> 28\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfc2\u001b[49m\u001b[43m(\u001b[49m\u001b[43mout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 29\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", + "Cell \u001b[1;32mIn[172], line 26\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 24\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[0;32m 25\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membedding(x)\n\u001b[1;32m---> 26\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[0;32m 27\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfc1(lstm_out) \u001b[38;5;66;03m# Use the last output from the LSTM\u001b[39;00m\n\u001b[0;32m 28\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfc2(out)\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\linear.py:125\u001b[0m, in \u001b[0;36mLinear.forward\u001b[1;34m(self, input)\u001b[0m\n\u001b[0;32m 124\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[1;32m--> 125\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\rnn.py:1124\u001b[0m, in \u001b[0;36mLSTM.forward\u001b[1;34m(self, input, hx)\u001b[0m\n\u001b[0;32m 1121\u001b[0m hx \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpermute_hidden(hx, sorted_indices)\n\u001b[0;32m 1123\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m batch_sizes \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m-> 1124\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43m_VF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 1125\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1126\u001b[0m \u001b[43m \u001b[49m\u001b[43mhx\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1127\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_flat_weights\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[0;32m 1128\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1129\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum_layers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1130\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdropout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1131\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtraining\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1132\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbidirectional\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1133\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbatch_first\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1134\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1135\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1136\u001b[0m result \u001b[38;5;241m=\u001b[39m _VF\u001b[38;5;241m.\u001b[39mlstm(\n\u001b[0;32m 1137\u001b[0m \u001b[38;5;28minput\u001b[39m,\n\u001b[0;32m 1138\u001b[0m batch_sizes,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1145\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbidirectional,\n\u001b[0;32m 1146\u001b[0m )\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } @@ -444,7 +434,7 @@ }, { "cell_type": "code", - "execution_count": 189, + "execution_count": 210, "metadata": {}, "outputs": [ { From fd7cbc5ec2f1ccead949b22d8900598227355587 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 6 Mar 2025 16:23:24 +0100 Subject: [PATCH 034/125] refactor: update loading and processing of HF dataset to make code faster and more readable --- .../pytorch_text_model_debugging.ipynb | 74 +++++-------------- 1 file changed, 18 insertions(+), 56 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index c1ac4c7..6cde57b 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -155,7 +155,7 @@ }, { "cell_type": "code", - "execution_count": 202, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -164,39 +164,12 @@ "data_files = {\"train\": base_url + \"train-0000*-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", " \"validation\": base_url + \"validation-00000-of-00001.parquet\"} #doanload the complete validation dataset\n", "\n", - "data_subset = load_dataset(\"parquet\", data_files = data_files)\n", - "train_subset = data_subset[\"train\"]\n", - "validation_subset = data_subset[\"validation\"]" - ] - }, - { - "cell_type": "code", - "execution_count": 203, - "metadata": {}, - "outputs": [], - "source": [ - "class TokenizedDataset(Dataset):\n", - " def __init__(self, tokenized_data):\n", - " self.data = tokenized_data\n", - "\n", - " def __len__(self):\n", - " return len(self.data)\n", - "\n", - " def __getitem__(self, idx):\n", - " item = self.data[idx]\n", - " # Return input_ids, attention_mask, and labels as tensors\n", - " return {\n", - " 'input_ids': torch.tensor(item['input_ids'], dtype=torch.long),\n", - " 'attention_mask': torch.tensor(item['attention_mask'], dtype=torch.long),\n", - " 'labels': torch.tensor(item['labels'], dtype=torch.long)\n", - " }\n", - " \n", - "# Assuming tokenized_data is a list of dictionaries, each containing 'input_ids', 'attention_mask', and 'labels'\n", - "train_dataset = TokenizedDataset(train_subset) # tokenized_data is the list you want to use\n", - "train_dataloader = DataLoader(train_dataset, batch_size=8, shuffle=True)\n", + "data_subset = load_dataset(\"parquet\", data_files = data_files, num_proc=4)\n", + "train_subset = data_subset[\"train\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", + "validation_subset = data_subset[\"validation\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", "\n", - "val_dataset = TokenizedDataset(validation_subset) # tokenized_data is the list you want to use\n", - "val_dataloader = DataLoader(val_dataset, batch_size=8, shuffle=True)" + "train_dataloader = DataLoader(train_subset, batch_size=8, shuffle=True)\n", + "val_dataloader = DataLoader(validation_subset, batch_size=8, shuffle=True)" ] }, { @@ -260,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 207, + "execution_count": 218, "metadata": {}, "outputs": [], "source": [ @@ -319,24 +292,15 @@ }, { "cell_type": "code", - "execution_count": 209, + "execution_count": 219, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Step 1 / 102408, Loss: 11.768007278442383\n", - "Step 2 / 102408, Loss: 11.757434844970703\n", - "Step 3 / 102408, Loss: 11.730992317199707\n", - "Step 4 / 102408, Loss: 11.66014575958252\n", - "Step 5 / 102408, Loss: 11.353281021118164\n", - "Step 5, Val_loss: 10.716990483939087\n", - "Step 6 / 102408, Loss: 10.836886405944824\n", - "Step 7 / 102408, Loss: 10.088896751403809\n", - "Step 8 / 102408, Loss: 10.012699127197266\n", - "Step 9 / 102408, Loss: 10.076889991760254\n", - "Step 10 / 102408, Loss: 9.623441696166992\n" + "Step 1 / 102408, Loss: 11.76780891418457\n", + "Step 2 / 102408, Loss: 11.754159927368164\n" ] }, { @@ -346,15 +310,13 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[209], line 38\u001b[0m\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 37\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m step_counter \u001b[38;5;241m%\u001b[39m \u001b[38;5;241m5\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m: \u001b[38;5;66;03m# Do not need to log validation at every step, although we can\u001b[39;00m\n\u001b[1;32m---> 38\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_dataloader\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 39\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Val_loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mval_loss\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 41\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", - "Cell \u001b[1;32mIn[208], line 10\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, val_dataloader, criterion, device)\u001b[0m\n\u001b[0;32m 7\u001b[0m labels \u001b[38;5;241m=\u001b[39m batch[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mlabels\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mto(device)\n\u001b[0;32m 9\u001b[0m \u001b[38;5;66;03m# Forward pass for validation\u001b[39;00m\n\u001b[1;32m---> 10\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Shape: (batch_size, seq_len, vocab_size)\u001b[39;00m\n\u001b[0;32m 12\u001b[0m \u001b[38;5;66;03m# Calculate the loss\u001b[39;00m\n\u001b[0;32m 13\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", - "Cell \u001b[1;32mIn[172], line 26\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 24\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[0;32m 25\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membedding(x)\n\u001b[1;32m---> 26\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[0;32m 27\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfc1(lstm_out) \u001b[38;5;66;03m# Use the last output from the LSTM\u001b[39;00m\n\u001b[0;32m 28\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfc2(out)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\rnn.py:1124\u001b[0m, in \u001b[0;36mLSTM.forward\u001b[1;34m(self, input, hx)\u001b[0m\n\u001b[0;32m 1121\u001b[0m hx \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpermute_hidden(hx, sorted_indices)\n\u001b[0;32m 1123\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m batch_sizes \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m-> 1124\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43m_VF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 1125\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1126\u001b[0m \u001b[43m \u001b[49m\u001b[43mhx\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1127\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_flat_weights\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[0;32m 1128\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1129\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum_layers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1130\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdropout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1131\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtraining\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1132\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbidirectional\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1133\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbatch_first\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1134\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1135\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1136\u001b[0m result \u001b[38;5;241m=\u001b[39m _VF\u001b[38;5;241m.\u001b[39mlstm(\n\u001b[0;32m 1137\u001b[0m \u001b[38;5;28minput\u001b[39m,\n\u001b[0;32m 1138\u001b[0m batch_sizes,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1145\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbidirectional,\n\u001b[0;32m 1146\u001b[0m )\n", + "Cell \u001b[1;32mIn[219], line 32\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[0;32m 31\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[1;32m---> 32\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 34\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:476\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 474\u001b[0m denom \u001b[38;5;241m=\u001b[39m (max_exp_avg_sqs[i]\u001b[38;5;241m.\u001b[39msqrt() \u001b[38;5;241m/\u001b[39m bias_correction2_sqrt)\u001b[38;5;241m.\u001b[39madd_(eps)\n\u001b[0;32m 475\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 476\u001b[0m denom \u001b[38;5;241m=\u001b[39m \u001b[43m(\u001b[49m\u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msqrt\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m/\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbias_correction2_sqrt\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43madd_\u001b[49m\u001b[43m(\u001b[49m\u001b[43meps\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 478\u001b[0m param\u001b[38;5;241m.\u001b[39maddcdiv_(exp_avg, denom, value\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m-\u001b[39mstep_size)\n\u001b[0;32m 480\u001b[0m \u001b[38;5;66;03m# Lastly, switch back to complex view\u001b[39;00m\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } @@ -434,7 +396,7 @@ }, { "cell_type": "code", - "execution_count": 210, + "execution_count": 220, "metadata": {}, "outputs": [ { From 3be6c3e0dbaec91e3d81226b74451f6098337cbc Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 6 Mar 2025 18:18:26 +0100 Subject: [PATCH 035/125] chore: add TODO's to address --- .../pytorch/pytorch_text_model_debugging.ipynb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 6cde57b..f757ee6 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -40,7 +40,11 @@ "# TODO - capture per layer losses\n", "# TODO - increase number of layers\n", "# TODO - Adding dropout to demonstrate the effects of regularization\n", - "# TODO - batch normalization" + "# TODO - batch normalization\n", + "# TODO - add class for tracking activations, gradients, etc. automatically\n", + "# TODO - investigate packages that help with tracking layer-wise information\n", + "# TODO - visualize layer-wise information, activations, gradients etc. (use as example to share with product)\n", + "# TODO - update hooks for model, ensure tracking correct information" ] }, { From 6e1df271d23e8c064917db4b20e7dd55b7205e59 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 10 Mar 2025 11:12:41 +0100 Subject: [PATCH 036/125] chore: update sections --- .../pytorch_text_model_debugging.ipynb | 142 +++++++++++------- 1 file changed, 88 insertions(+), 54 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index f757ee6..9de1dc2 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -1,5 +1,32 @@ { "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Neptune + PyTorch\n", + "\n", + "## Logging and Visualizing debugging metrics with Neptune\n", + "\n", + "### Introduction\n", + "\n", + "See how Neptune Scale can be used for pre-training models like foundation models by tracking hundreds of metrics. \n", + "\n", + "This guide will show you how to:\n", + "- Initialize the Neptune Run object and log configuration parameters\n", + "- Log standard loss and accuracy metrics to Neptune\n", + "- Log debugging metrics during model training such as;\n", + " * Activations per layer\n", + " * Gradients (mean and std) per layer" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Install Dependencies and Import Libraries" + ] + }, { "cell_type": "code", "execution_count": null, @@ -12,10 +39,11 @@ }, { "cell_type": "code", - "execution_count": 147, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ + "# Import libraries\n", "import torch\n", "import torch.nn as nn\n", "import torch.optim as optim\n", @@ -29,6 +57,35 @@ "from neptune_scale import Run" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Initialize Parameters" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize model, loss function, and optimizer\n", + "\n", + "params = {\n", + " \"optimizer\": \"Adam\",\n", + " \"batch_size\": 3,\n", + " \"learning_rate\": 0.001,\n", + " \"epochs\": 5, \n", + " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", + " \"input_features\": 256,\n", + " \"embed_size\": 1000,\n", + " \"hidden_size\": 256, # hidden size for the LSTM\n", + " \"dropout_prob\": 0.3,\n", + " \"num_lstm_layers\": 3\n", + "}" + ] + }, { "cell_type": "code", "execution_count": null, @@ -51,41 +108,43 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "During training, capturing valuable information from each layer of a model can provide insights into how the model is learning, what features are being extracted, and how the model's performance can be improved. Researchers often look for certain metrics and activations from each layer, which can help in diagnosing issues such as vanishing gradients, overfitting, or underfitting.\n", + "## Importance of Logging Debugging Metrics\n", "\n", - "Key Information to Capture from Each Layer:\n", - "Activations:\n", + "During model training, capturing valuable information from each layer can offer critical insights into the model's learning process, identify potential issues, and guide improvements. Monitoring certain metrics and activations helps diagnose common problems such as vanishing gradients, overfitting, or underfitting. Below are key metrics to track from each layer:\n", "\n", - "What it shows: The activations of each layer during forward propagation can give insights into how the model is processing input data at different levels. By examining activations, you can see if certain layers are \"dead\" (i.e., not learning anything) or if activations are exploding (leading to unstable training).\n", - "How to capture: You can register hooks to capture activations after each layer.\n", - "Gradients:\n", + "### Key Metrics to Capture from Each Layer:\n", "\n", - "What it shows: The gradients of each layer during backpropagation are important for diagnosing issues like vanishing or exploding gradients. If gradients are too small, it can indicate the model is struggling to learn effectively (vanishing gradients). If they're too large, it can indicate instability (exploding gradients).\n", - "How to capture: Similar to activations, you can use hooks to capture gradients during backpropagation.\n", - "Weights and Biases:\n", + "- **Activations**\n", + " - **What it shows**: Provides insight into how the model is processing data. Dead or exploding activations can indicate issues with training stability.\n", + " - **How to capture**: Use hooks to capture activations after each layer.\n", "\n", - "What it shows: Tracking the weights and biases of each layer helps researchers understand how the model's parameters are evolving during training. For example, if weights are growing too large or becoming too small, it might suggest the need for better regularization or learning rate adjustments.\n", - "How to capture: You can extract weights and biases directly from the model’s parameters.\n", - "Layer-wise Loss:\n", + "- **Gradients**\n", + " - **What it shows**: Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability.\n", + " - **How to capture**: Use hooks to capture gradients during backpropagation.\n", "\n", - "What it shows: Tracking loss at different stages (layers) can help understand which parts of the network are contributing more to the overall loss. This can be valuable for debugging or optimizing the model.\n", - "How to capture: Loss can be tracked by monitoring the output from each layer and comparing it against the target.\n", - "Learning Rate per Layer:\n", + "- **Weights and Biases**\n", + " - **What it shows**: Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate.\n", + " - **How to capture**: Extract directly from the model’s parameters.\n", "\n", - "What it shows: If you're using techniques like Layer-wise Learning Rate Decay (L2LRD) or applying different learning rates to different layers, capturing the learning rate used by each layer during training can be informative.\n", - "How to capture: This information would depend on how the learning rate is defined in your optimizer, but it can be tracked manually.\n", - "Layer Output Norms:\n", + "- **Layer-wise Loss**\n", + " - **What it shows**: Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization.\n", + " - **How to capture**: Monitor outputs from each layer and compare with the target.\n", "\n", - "What it shows: Tracking the L2-norm (magnitude) of the output for each layer can help in detecting issues like gradient explosion or squashing effects. A norm that’s either too high or too low could suggest the model isn't behaving as expected.\n", - "How to capture: You can compute the L2-norm for the output of each layer.\n", - "Activation Distributions:\n", + "- **Learning Rate per Layer**\n", + " - **What it shows**: Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate.\n", + " - **How to capture**: Manually track based on optimizer settings.\n", "\n", - "What it shows: Visualizing or capturing the distribution of activations (e.g., mean, variance, or histograms) can reveal issues like the saturation of neurons in activation functions, especially in cases with ReLU (leading to dead neurons).\n", - "How to capture: You can visualize or compute statistical summaries of activations using Python libraries like matplotlib or seaborn.\n", - "Feature Maps (for Convolutional Layers):\n", + "- **Layer Output Norms**\n", + " - **What it shows**: The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients.\n", + " - **How to capture**: Compute the L2-norm for each layer’s output.\n", "\n", - "What it shows: If you have convolutional layers, tracking the feature maps can provide insight into how each filter is detecting specific patterns or features from the input data.\n", - "How to capture: You can visualize the feature maps after convolutional layers using libraries like matplotlib." + "- **Activation Distributions**\n", + " - **What it shows**: Helps diagnose saturation issues, especially with ReLU activations that may lead to dead neurons.\n", + " - **How to capture**: Visualize or compute statistical summaries using tools like matplotlib or seaborn.\n", + "\n", + "- **Feature Maps (for Convolutional Layers)**\n", + " - **What it shows**: Offers insights into how convolutional layers detect specific patterns in the data.\n", + " - **How to capture**: Visualize feature maps after convolutional layers using libraries like matplotlib.\n" ] }, { @@ -178,7 +237,7 @@ }, { "cell_type": "code", - "execution_count": 205, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -199,6 +258,7 @@ "\n", "# Vocab size is the number of unique tokens\n", "vocab_size = max(unique_tokens) + 1 # Add 1 since token ID's start from zero\n", + "params[\"vocab_size\"] = vocab_size\n", "print(f\"Vocabulary size: {vocab_size}\")" ] }, @@ -209,32 +269,6 @@ "Everytime a new LSTM layer is added when updating the `num_lstm_layers` parameter is changed, 12 new metrics will be logged which correspond to mean, std and norm that are calculated. " ] }, - { - "cell_type": "code", - "execution_count": 206, - "metadata": {}, - "outputs": [], - "source": [ - "# Initialize model, loss function, and optimizer\n", - "\n", - "params = {\n", - " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 3, # TODO - remove\n", - " \"sequence_length\": 5, # TODO - remove\n", - " \"learning_rate\": 0.001,\n", - " \"epochs\": 5, \n", - " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", - " \"input_features\": 256,\n", - " \"n_classes\": 10, # TODO - remove\n", - " \"input_size\": 28 * 28, # TODO - remove\n", - " \"embed_size\": 1000,\n", - " \"hidden_size\": 256, # hidden size for the LSTM\n", - " \"dropout_prob\": 0.3,\n", - " \"vocab_size\": vocab_size,\n", - " \"num_lstm_layers\": 3\n", - "}" - ] - }, { "cell_type": "code", "execution_count": 218, From fbfb0dad99af39edbfbfefa9e968d2799c4181e5 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 10 Mar 2025 11:16:52 +0100 Subject: [PATCH 037/125] refactor: move data downloading section --- .../pytorch_text_model_debugging.ipynb | 56 +++++++++---------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 9de1dc2..f1d90bd 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -10,7 +10,7 @@ "\n", "### Introduction\n", "\n", - "See how Neptune Scale can be used for pre-training models like foundation models by tracking hundreds of metrics. \n", + "See how Neptune Scale can be used for pre-training models like foundation models by tracking hundreds of metrics. This example is designed to be used as a code recipe for you to re-use sections with your own code to edit to adapt to your own model training needs. \n", "\n", "This guide will show you how to:\n", "- Initialize the Neptune Run object and log configuration parameters\n", @@ -86,6 +86,33 @@ "}" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download or use next token prediction dataset\n", + "The dataset used in this example is taken from [HuggingFace](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset). In this example, you can increase the size of the dataset to test the logging capabilities of Neptune, but note that increasing the dataset size will increase the time taken for the full dataset to download. The current setup only downloads the first 10 parquet files from the Hugging Face public dataset. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# For the example, download a random subset of 10% of the original dataset\n", + "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", + "data_files = {\"train\": base_url + \"train-0000*-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", + " \"validation\": base_url + \"validation-00000-of-00001.parquet\"} #doanload the complete validation dataset\n", + "\n", + "data_subset = load_dataset(\"parquet\", data_files = data_files, num_proc=4)\n", + "train_subset = data_subset[\"train\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", + "validation_subset = data_subset[\"validation\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", + "\n", + "train_dataloader = DataLoader(train_subset, batch_size=8, shuffle=True)\n", + "val_dataloader = DataLoader(validation_subset, batch_size=8, shuffle=True)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -208,33 +235,6 @@ " self.gradients = {}\n" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Download or use next token prediction dataset\n", - "The dataset used in this example is taken from [HuggingFace](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset). In this example, you can increase the size of the dataset to test the logging capabilities of Neptyune, but note that increasing the dataset size will increase the time taken for the full dataset to download." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# For the example, download a random subset of 10% of the original dataset\n", - "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", - "data_files = {\"train\": base_url + \"train-0000*-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", - " \"validation\": base_url + \"validation-00000-of-00001.parquet\"} #doanload the complete validation dataset\n", - "\n", - "data_subset = load_dataset(\"parquet\", data_files = data_files, num_proc=4)\n", - "train_subset = data_subset[\"train\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", - "validation_subset = data_subset[\"validation\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", - "\n", - "train_dataloader = DataLoader(train_subset, batch_size=8, shuffle=True)\n", - "val_dataloader = DataLoader(validation_subset, batch_size=8, shuffle=True)" - ] - }, { "cell_type": "code", "execution_count": null, From 84149d1f97ba4abc3abb6929b8ed4c26c0329581 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 10 Mar 2025 11:19:07 +0100 Subject: [PATCH 038/125] refactor: added evaluate function to model initialization cell --- .../pytorch_text_model_debugging.ipynb | 49 ++++++++----------- 1 file changed, 21 insertions(+), 28 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index f1d90bd..cb9dc22 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -176,7 +176,7 @@ }, { "cell_type": "code", - "execution_count": 172, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -232,7 +232,26 @@ " return self.gradients\n", " \n", " def clear_gradients(self):\n", - " self.gradients = {}\n" + " self.gradients = {}\n", + "\n", + "# Function to evaluate the model after each epoch/step\n", + "def evaluate(model, val_dataloader, criterion, device):\n", + " model.eval() # Set the model to evaluation mode\n", + " total_loss = 0\n", + " with torch.no_grad(): # Disable gradient calculation for validation\n", + " for batch in val_dataloader:\n", + " input_ids = batch['input_ids'].to(device)\n", + " labels = batch['labels'].to(device)\n", + "\n", + " # Forward pass for validation\n", + " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", + " \n", + " # Calculate the loss\n", + " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", + " total_loss += loss.item()\n", + "\n", + " avg_val_loss = total_loss / len(val_dataloader)\n", + " return avg_val_loss" ] }, { @@ -302,32 +321,6 @@ "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])" ] }, - { - "cell_type": "code", - "execution_count": 208, - "metadata": {}, - "outputs": [], - "source": [ - "def evaluate(model, val_dataloader, criterion, device):\n", - " model.eval() # Set the model to evaluation mode\n", - " total_loss = 0\n", - " with torch.no_grad(): # Disable gradient calculation for validation\n", - " for batch in val_dataloader:\n", - " input_ids = batch['input_ids'].to(device)\n", - " labels = batch['labels'].to(device)\n", - "\n", - " # Forward pass for validation\n", - " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", - " \n", - " # Calculate the loss\n", - " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", - " total_loss += loss.item()\n", - "\n", - " avg_val_loss = total_loss / len(val_dataloader)\n", - " return avg_val_loss\n", - " " - ] - }, { "cell_type": "code", "execution_count": 219, From 2d69e01f46efdefbe79d27f6918f1282e7acafde Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 10 Mar 2025 11:21:21 +0100 Subject: [PATCH 039/125] chore: update introduction for the model architecture and helper functions --- .../pytorch/pytorch_text_model_debugging.ipynb | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index cb9dc22..286db6f 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -174,6 +174,14 @@ " - **How to capture**: Visualize feature maps after convolutional layers using libraries like matplotlib.\n" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define PyTorch Model Architecture and Helper functions\n", + "We define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use a mebedding layer, an LSTM layer and a fully connected layer. This architecture can be adjusted to your needs and increased in size when testing the workflow." + ] + }, { "cell_type": "code", "execution_count": null, From 90e05396eda8d300b7530972ca076c0f4f9a610d Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 10 Mar 2025 11:28:07 +0100 Subject: [PATCH 040/125] fix: update input for vocab_size --- .../pytorch/pytorch_text_model_debugging.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 286db6f..9c5d7ae 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -243,7 +243,7 @@ " self.gradients = {}\n", "\n", "# Function to evaluate the model after each epoch/step\n", - "def evaluate(model, val_dataloader, criterion, device):\n", + "def evaluate(model, val_dataloader, criterion, device, vocab_size):\n", " model.eval() # Set the model to evaluation mode\n", " total_loss = 0\n", " with torch.no_grad(): # Disable gradient calculation for validation\n", @@ -331,7 +331,7 @@ }, { "cell_type": "code", - "execution_count": 219, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -398,7 +398,7 @@ " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", "\n", " if step_counter % 5 == 0: # Do not need to log validation at every step, although we can\n", - " val_loss = evaluate(model, val_dataloader, criterion, device)\n", + " val_loss = evaluate(model, val_dataloader, criterion, device, vocab_size)\n", " print(f\"Step {step_counter}, Val_loss: {val_loss}\")\n", "\n", " # Track activations\n", From 46e1f37c592df442878d9e1da1192d0a7995e3d7 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 10 Mar 2025 11:29:17 +0100 Subject: [PATCH 041/125] refactor: add the vocab_size calculation to the data formatting section --- .../pytorch_text_model_debugging.ipynb | 41 ++++++------------- 1 file changed, 13 insertions(+), 28 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 9c5d7ae..c9beb47 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -110,7 +110,19 @@ "validation_subset = data_subset[\"validation\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", "\n", "train_dataloader = DataLoader(train_subset, batch_size=8, shuffle=True)\n", - "val_dataloader = DataLoader(validation_subset, batch_size=8, shuffle=True)" + "val_dataloader = DataLoader(validation_subset, batch_size=8, shuffle=True)\n", + "\n", + "# Determine the vocab size of the dataset\n", + "# Flatten the list of tokenized sentences into one long list of token IDs\n", + "all_tokens = [token for sentence in train_subset[\"input_ids\"] for token in sentence]\n", + "\n", + "# Get unique token IDs\n", + "unique_tokens = set(all_tokens)\n", + "\n", + "# Vocab size is the number of unique tokens\n", + "vocab_size = max(unique_tokens) + 1 # Add 1 since token ID's start from zero\n", + "params[\"vocab_size\"] = vocab_size\n", + "print(f\"Vocabulary size: {vocab_size}\")" ] }, { @@ -262,33 +274,6 @@ " return avg_val_loss" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Vocabulary size: 128257\n" - ] - } - ], - "source": [ - "# Determine the vocab size of the dataset\n", - "# Flatten the list of tokenized sentences into one long list of token IDs\n", - "all_tokens = [token for sentence in train_subset[\"input_ids\"] for token in sentence]\n", - "\n", - "# Get unique token IDs\n", - "unique_tokens = set(all_tokens)\n", - "\n", - "# Vocab size is the number of unique tokens\n", - "vocab_size = max(unique_tokens) + 1 # Add 1 since token ID's start from zero\n", - "params[\"vocab_size\"] = vocab_size\n", - "print(f\"Vocabulary size: {vocab_size}\")" - ] - }, { "cell_type": "markdown", "metadata": {}, From ca1b52d3af050183ec51a25d8669e67d2d434a2b Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 10 Mar 2025 18:11:27 +0100 Subject: [PATCH 042/125] fix: refactor data loading process from HF --- .../pytorch_text_model_debugging.ipynb | 235 +++++++++++++++--- 1 file changed, 202 insertions(+), 33 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index c9beb47..ed05dd0 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -39,9 +39,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], "source": [ "# Import libraries\n", "import torch\n", @@ -66,7 +75,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -96,31 +105,78 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "DatasetDict({\n", + " train: Dataset({\n", + " features: ['text', 'meta', '__index_level_0__', 'input_ids', 'attention_mask', 'labels'],\n", + " num_rows: 81926\n", + " })\n", + " validation: Dataset({\n", + " features: ['text', 'meta', '__index_level_0__', 'input_ids', 'attention_mask', 'labels'],\n", + " num_rows: 9347\n", + " })\n", + "})\n", + "DatasetDict({\n", + " train: Dataset({\n", + " features: ['text', 'meta', '__index_level_0__', 'input_ids', 'attention_mask', 'labels'],\n", + " num_rows: 8412\n", + " })\n", + " test: Dataset({\n", + " features: ['text', 'meta', '__index_level_0__', 'input_ids', 'attention_mask', 'labels'],\n", + " num_rows: 935\n", + " })\n", + "})\n" + ] + } + ], "source": [ "# For the example, download a random subset of 10% of the original dataset\n", "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", - "data_files = {\"train\": base_url + \"train-0000*-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", + "data_files = {\"train\": base_url + \"train-00001-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", " \"validation\": base_url + \"validation-00000-of-00001.parquet\"} #doanload the complete validation dataset\n", "\n", "data_subset = load_dataset(\"parquet\", data_files = data_files, num_proc=4)\n", - "train_subset = data_subset[\"train\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", - "validation_subset = data_subset[\"validation\"].with_format(\"torch\") # HF provides methods to convert datatypes to tensors\n", + "# validation_subset = load_dataset(\"parquet\", data_files = {\"validation\": base_url + \"validation-00000-of-00001.parquet\"}, num_proc=4, split=[\"validation[:5%]\"])\n", + "validation_subset = data_subset.get(\"validation\").train_test_split(test_size=0.1)\n", + "print(data_subset)\n", + "print(validation_subset)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vocabulary size: 128257\n" + ] + } + ], + "source": [ + "train_subset = data_subset[\"train\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", + "validation_subset = data_subset[\"validation\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", "\n", "train_dataloader = DataLoader(train_subset, batch_size=8, shuffle=True)\n", "val_dataloader = DataLoader(validation_subset, batch_size=8, shuffle=True)\n", "\n", "# Determine the vocab size of the dataset\n", "# Flatten the list of tokenized sentences into one long list of token IDs\n", - "all_tokens = [token for sentence in train_subset[\"input_ids\"] for token in sentence]\n", + "all_tokens = [token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]\n", "\n", "# Get unique token IDs\n", - "unique_tokens = set(all_tokens)\n", + "# unique_tokens = set(all_tokens)\n", "\n", "# Vocab size is the number of unique tokens\n", - "vocab_size = max(unique_tokens) + 1 # Add 1 since token ID's start from zero\n", + "vocab_size = max(all_tokens) + 1 # Add 1 since token ID's start from zero\n", "params[\"vocab_size\"] = vocab_size\n", "print(f\"Vocabulary size: {vocab_size}\")" ] @@ -274,6 +330,95 @@ " return avg_val_loss" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Define the simple LLM model with LSTM\n", + "class SimpleLLM(nn.Module):\n", + " def __init__(self, vocab_size, embed_size, hidden_size, num_layers):\n", + " super(SimpleLLM, self).__init__()\n", + " self.embedding = nn.Embedding(vocab_size, embed_size)\n", + " self.lstm = nn.LSTM(embed_size, 512, num_layers = num_layers, dropout = 0.3, batch_first=True)\n", + " self.fc1 = nn.Linear(512, hidden_size)\n", + " self.fc2 = nn.Linear(hidden_size, vocab_size)\n", + " \n", + " def forward(self, x):\n", + " x = self.embedding(x)\n", + " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", + " out = self.fc1(lstm_out) # Use the last output from the LSTM\n", + " out = self.fc2(out)\n", + " return out\n", + " \n", + "# model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"], params[\"num_lstm_layers\"])\n", + "\n", + "# A class to manage hooks for activations and gradients\n", + "class HookManager:\n", + " def __init__(self, model):\n", + " self.model = model\n", + " self.hooks = []\n", + " self.activations = {}\n", + " self.gradients = {}\n", + "\n", + " # Function to save activations\n", + " def save_activation(self, name):\n", + " def hook(module, input, output):\n", + " self.activations[name] = output\n", + " return hook\n", + "\n", + " # Function to save gradients (registering hooks for the model parameters)\n", + " def save_gradient(self, name):\n", + " def hook(module, grad_input, grad_output):\n", + " self.gradients[name] = grad_output[0]\n", + " return hook\n", + "\n", + " # Function to register hooks for activations and gradients\n", + " def register_hooks(self):\n", + " # Register forward hooks for activations\n", + " self.hooks.append(self.model.lstm.register_forward_hook(self.save_activation(\"lstm\")))\n", + " self.hooks.append(self.model.fc1.register_forward_hook(self.save_activation(\"fc1\")))\n", + " self.hooks.append(self.model.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", + "\n", + " # Register backward hooks for gradients\n", + " for name, module in self.model.named_modules():\n", + " if isinstance(module, (nn.LSTM, nn.Linear)): # You can add more layer types here\n", + " self.hooks.append(module.register_full_backward_hook(self.save_gradient(name)))\n", + "\n", + " # Function to clear activations and gradients after use\n", + " def clear(self):\n", + " self.activations = {}\n", + " self.gradients = {}\n", + "\n", + " # Function to get activations\n", + " def get_activations(self):\n", + " return self.activations\n", + "\n", + " # Function to get gradients\n", + " def get_gradients(self):\n", + " return self.gradients\n", + "\n", + "# Function to evaluate the model after each epoch/step\n", + "def evaluate(model, val_dataloader, criterion, device, vocab_size):\n", + " model.eval() # Set the model to evaluation mode\n", + " total_loss = 0\n", + " with torch.no_grad(): # Disable gradient calculation for validation\n", + " for batch in val_dataloader:\n", + " input_ids = batch['input_ids'].to(device)\n", + " labels = batch['labels'].to(device)\n", + "\n", + " # Forward pass for validation\n", + " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", + " \n", + " # Calculate the loss\n", + " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", + " total_loss += loss.item()\n", + "\n", + " avg_val_loss = total_loss / len(val_dataloader)\n", + " return avg_val_loss" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -283,7 +428,7 @@ }, { "cell_type": "code", - "execution_count": 218, + "execution_count": 36, "metadata": {}, "outputs": [], "source": [ @@ -301,11 +446,9 @@ " \"config/learning_rate\": params[\"learning_rate\"],\n", " \"config/optimizer\": params[\"optimizer\"],\n", " \"config/batch_size\": params[\"batch_size\"],\n", - " \"config/sequece_length\": params[\"sequence_length\"],\n", " \"config/epochs\": params[\"epochs\"],\n", - " \"config/input_size\": params[\"input_size\"],\n", " \"config/num_lstm_layers\" : params[\"num_lstm_layers\"],\n", - " \"data/vocab_size\": params[\"vocab_size\"],\n", + " # \"data/vocab_size\": params[\"vocab_size\"],\n", " \"data/embed_size\": params[\"embed_size\"]\n", " }\n", ")\n", @@ -316,32 +459,43 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 37, "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1830: FutureWarning: Using non-full backward hooks on a Module that does not return a single Tensor or a tuple of Tensors is deprecated and will be removed in future versions. This hook will be missing some of the grad_output. Please use register_full_backward_hook to get the documented behavior.\n", + " self._maybe_warn_non_full_backward_hook(args, result, grad_fn)\n", + "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1830: FutureWarning: Using a non-full backward hook when the forward contains multiple autograd Nodes is deprecated and will be removed in future versions. This hook will be missing some grad_input. Please use register_full_backward_hook to get the documented behavior.\n", + " self._maybe_warn_non_full_backward_hook(args, result, grad_fn)\n" + ] + }, { "name": "stdout", "output_type": "stream", "text": [ - "Step 1 / 102408, Loss: 11.76780891418457\n", - "Step 2 / 102408, Loss: 11.754159927368164\n" + "Step 1 / 10241, Loss: 11.76430606842041\n", + "Step 2 / 10241, Loss: 11.756403923034668\n", + "Step 3 / 10241, Loss: 11.73224925994873\n", + "Step 4 / 10241, Loss: 11.687955856323242\n", + "Step 5 / 10241, Loss: 11.431197166442871\n" ] }, { - "ename": "KeyboardInterrupt", - "evalue": "", + "ename": "KeyError", + "evalue": "3", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[219], line 32\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[0;32m 31\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[1;32m---> 32\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 34\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:476\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 474\u001b[0m denom \u001b[38;5;241m=\u001b[39m (max_exp_avg_sqs[i]\u001b[38;5;241m.\u001b[39msqrt() \u001b[38;5;241m/\u001b[39m bias_correction2_sqrt)\u001b[38;5;241m.\u001b[39madd_(eps)\n\u001b[0;32m 475\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 476\u001b[0m denom \u001b[38;5;241m=\u001b[39m \u001b[43m(\u001b[49m\u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msqrt\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m/\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbias_correction2_sqrt\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43madd_\u001b[49m\u001b[43m(\u001b[49m\u001b[43meps\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 478\u001b[0m param\u001b[38;5;241m.\u001b[39maddcdiv_(exp_avg, denom, value\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m-\u001b[39mstep_size)\n\u001b[0;32m 480\u001b[0m \u001b[38;5;66;03m# Lastly, switch back to complex view\u001b[39;00m\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + "\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[37], line 42\u001b[0m\n\u001b[0;32m 39\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 41\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m step_counter \u001b[38;5;241m%\u001b[39m \u001b[38;5;241m5\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m: \u001b[38;5;66;03m# Do not need to log validation at every step, although we can\u001b[39;00m\n\u001b[1;32m---> 42\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_dataloader\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvocab_size\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 43\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Val_loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mval_loss\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 45\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", + "Cell \u001b[1;32mIn[30], line 69\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, val_dataloader, criterion, device, vocab_size)\u001b[0m\n\u001b[0;32m 67\u001b[0m total_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 68\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad(): \u001b[38;5;66;03m# Disable gradient calculation for validation\u001b[39;00m\n\u001b[1;32m---> 69\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mbatch\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mval_dataloader\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 70\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbatch\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43minput_ids\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 71\u001b[0m \u001b[43m \u001b[49m\u001b[43mlabels\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbatch\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mlabels\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:708\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 706\u001b[0m \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[0;32m 707\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset() \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[1;32m--> 708\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 710\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 711\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable\n\u001b[0;32m 712\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 713\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called\n\u001b[0;32m 714\u001b[0m ):\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:764\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 762\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 763\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index() \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m--> 764\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m 765\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[0;32m 766\u001b[0m data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory_device)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\_utils\\fetch.py:52\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[1;34m(self, possibly_batched_index)\u001b[0m\n\u001b[0;32m 50\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__(possibly_batched_index)\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m---> 52\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 54\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", + "\u001b[1;31mKeyError\u001b[0m: 3" ] } ], @@ -353,6 +507,9 @@ "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", "criterion = nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset\n", "\n", + "hook_manager = HookManager(model)\n", + "hook_manager.register_hooks()\n", + "\n", "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", "model.to(device)\n", "step_counter = 0\n", @@ -378,6 +535,7 @@ " # Backward pass and optimization\n", " loss.backward()\n", " optimizer.step()\n", + "\n", " \n", " total_loss += loss.item()\n", " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", @@ -391,9 +549,11 @@ " debug_metrics[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", " debug_metrics[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", "\n", - " # Track gradient norm\n", - " # for layer, gradient in model.get_gradients().items():\n", - " # grad_norms[f\"grad_norm/{name}\"] = gradient.norm().item()\n", + " # Track gradients with hooks\n", + " gradients = hook_manager.get_gradients()\n", + " for layer, gradient in gradients.items():\n", + " # debug_metrics[f\"grad_hooks/{name}\"] = gradient.shape()\n", + " print(f\"Gradients for {name}: {gradient.shape}\")\n", "\n", " # Track gradients per layer at each epoch\n", " for name, param in model.named_parameters():\n", @@ -420,9 +580,18 @@ }, { "cell_type": "code", - "execution_count": 220, + "execution_count": null, "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "neptune:INFO: Waiting for all operations to be processed\n", + "neptune:WARNING: No timeout specified. Waiting indefinitely\n", + "neptune:INFO: All operations were processed\n" + ] + }, { "name": "stderr", "output_type": "stream", From 836c452ddfcd0616d79bccb555205e15f3bf5834 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 10 Mar 2025 20:12:39 +0100 Subject: [PATCH 043/125] fix: update validation data to use test subset from HF and comment out activation tracking --- .../pytorch_text_model_debugging.ipynb | 81 +++++++++---------- 1 file changed, 36 insertions(+), 45 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index ed05dd0..1a91f7a 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -39,7 +39,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -105,7 +105,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -150,7 +150,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -163,7 +163,7 @@ ], "source": [ "train_subset = data_subset[\"train\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", - "validation_subset = data_subset[\"validation\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", + "validation_subset = validation_subset[\"test\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", "\n", "train_dataloader = DataLoader(train_subset, batch_size=8, shuffle=True)\n", "val_dataloader = DataLoader(validation_subset, batch_size=8, shuffle=True)\n", @@ -332,7 +332,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ @@ -428,7 +428,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 31, "metadata": {}, "outputs": [], "source": [ @@ -459,43 +459,42 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 32, "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1830: FutureWarning: Using non-full backward hooks on a Module that does not return a single Tensor or a tuple of Tensors is deprecated and will be removed in future versions. This hook will be missing some of the grad_output. Please use register_full_backward_hook to get the documented behavior.\n", - " self._maybe_warn_non_full_backward_hook(args, result, grad_fn)\n", - "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1830: FutureWarning: Using a non-full backward hook when the forward contains multiple autograd Nodes is deprecated and will be removed in future versions. This hook will be missing some grad_input. Please use register_full_backward_hook to get the documented behavior.\n", - " self._maybe_warn_non_full_backward_hook(args, result, grad_fn)\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ - "Step 1 / 10241, Loss: 11.76430606842041\n", - "Step 2 / 10241, Loss: 11.756403923034668\n", - "Step 3 / 10241, Loss: 11.73224925994873\n", - "Step 4 / 10241, Loss: 11.687955856323242\n", - "Step 5 / 10241, Loss: 11.431197166442871\n" + "Step 1 / 10241, Loss: 11.765188217163086\n", + "Step 2 / 10241, Loss: 11.752411842346191\n", + "Step 3 / 10241, Loss: 11.724653244018555\n", + "Step 4 / 10241, Loss: 11.64705753326416\n", + "Step 5 / 10241, Loss: 11.223550796508789\n", + "Step 5, Val_loss: 10.67489557999831\n", + "Gradients for fc2: 1.2764140652500025e-16\n", + "Gradients for fc1: 3.7073445469104627e-07\n", + "Gradients for lstm: 8.93705909277287e-09\n", + "Step 6 / 10241, Loss: 10.669904708862305\n", + "Step 7 / 10241, Loss: 10.110128402709961\n", + "Step 8 / 10241, Loss: 9.748831748962402\n" ] }, { - "ename": "KeyError", - "evalue": "3", + "ename": "KeyboardInterrupt", + "evalue": "", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[37], line 42\u001b[0m\n\u001b[0;32m 39\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 41\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m step_counter \u001b[38;5;241m%\u001b[39m \u001b[38;5;241m5\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m: \u001b[38;5;66;03m# Do not need to log validation at every step, although we can\u001b[39;00m\n\u001b[1;32m---> 42\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_dataloader\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvocab_size\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 43\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Val_loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mval_loss\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 45\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", - "Cell \u001b[1;32mIn[30], line 69\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, val_dataloader, criterion, device, vocab_size)\u001b[0m\n\u001b[0;32m 67\u001b[0m total_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 68\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad(): \u001b[38;5;66;03m# Disable gradient calculation for validation\u001b[39;00m\n\u001b[1;32m---> 69\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mbatch\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mval_dataloader\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 70\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbatch\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43minput_ids\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 71\u001b[0m \u001b[43m \u001b[49m\u001b[43mlabels\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mbatch\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mlabels\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:708\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 706\u001b[0m \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[0;32m 707\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset() \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[1;32m--> 708\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 710\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 711\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable\n\u001b[0;32m 712\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 713\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called\n\u001b[0;32m 714\u001b[0m ):\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:764\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 762\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 763\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index() \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m--> 764\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m 765\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[0;32m 766\u001b[0m data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory_device)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\_utils\\fetch.py:52\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[1;34m(self, possibly_batched_index)\u001b[0m\n\u001b[0;32m 50\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__(possibly_batched_index)\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m---> 52\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 54\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", - "\u001b[1;31mKeyError\u001b[0m: 3" + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[32], line 35\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[0;32m 34\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[1;32m---> 35\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 38\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 39\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:425\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 422\u001b[0m \u001b[38;5;66;03m# Decay the first and second moment running average coefficient\u001b[39;00m\n\u001b[0;32m 423\u001b[0m exp_avg\u001b[38;5;241m.\u001b[39mlerp_(grad, \u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m device_beta1)\n\u001b[1;32m--> 425\u001b[0m \u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmul_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbeta2\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39maddcmul_(grad, grad\u001b[38;5;241m.\u001b[39mconj(), value\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m beta2)\n\u001b[0;32m 427\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m capturable \u001b[38;5;129;01mor\u001b[39;00m differentiable:\n\u001b[0;32m 428\u001b[0m step \u001b[38;5;241m=\u001b[39m step_t\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], @@ -545,15 +544,16 @@ " print(f\"Step {step_counter}, Val_loss: {val_loss}\")\n", "\n", " # Track activations\n", - " for name, activation in model.get_activations().items():\n", - " debug_metrics[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", - " debug_metrics[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", + " # for name, activation in model.get_activations().items():\n", + " # debug_metrics[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", + " # debug_metrics[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", "\n", " # Track gradients with hooks\n", " gradients = hook_manager.get_gradients()\n", " for layer, gradient in gradients.items():\n", " # debug_metrics[f\"grad_hooks/{name}\"] = gradient.shape()\n", - " print(f\"Gradients for {name}: {gradient.shape}\")\n", + " print(f\"Gradients for {layer}: {gradient.mean().item()}\")\n", + " # print(f\"Gradients for {layer}: {gradient.grad.mean()}\")\n", "\n", " # Track gradients per layer at each epoch\n", " for name, param in model.named_parameters():\n", @@ -580,18 +580,9 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 33, "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "neptune:INFO: Waiting for all operations to be processed\n", - "neptune:WARNING: No timeout specified. Waiting indefinitely\n", - "neptune:INFO: All operations were processed\n" - ] - }, { "name": "stderr", "output_type": "stream", From 814d56f8c7f663a450a34d1febf09d6c5d30738b Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 11 Mar 2025 11:39:12 +0100 Subject: [PATCH 044/125] refactor: create a class to manage hooks for tracking gradients and activations in the model --- .../pytorch/pytorch_text_model_debugging.ipynb | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 1a91f7a..515e1f0 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -377,9 +377,8 @@ " # Function to register hooks for activations and gradients\n", " def register_hooks(self):\n", " # Register forward hooks for activations\n", - " self.hooks.append(self.model.lstm.register_forward_hook(self.save_activation(\"lstm\")))\n", - " self.hooks.append(self.model.fc1.register_forward_hook(self.save_activation(\"fc1\")))\n", - " self.hooks.append(self.model.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", + " for name, module in self.model.named_modules():\n", + " self.hooks.append(module.register_forward_hook(self.save_activation(name)))\n", "\n", " # Register backward hooks for gradients\n", " for name, module in self.model.named_modules():\n", @@ -459,7 +458,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 42, "metadata": {}, "outputs": [ { @@ -544,16 +543,19 @@ " print(f\"Step {step_counter}, Val_loss: {val_loss}\")\n", "\n", " # Track activations\n", - " # for name, activation in model.get_activations().items():\n", + " activations = hook_manager.get_activations()\n", + " for layer, activation in activations.items():\n", + " print(f\"Activations for {layer}: {activation[0].mean().item()}\")\n", " # debug_metrics[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", " # debug_metrics[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", "\n", " # Track gradients with hooks\n", " gradients = hook_manager.get_gradients()\n", " for layer, gradient in gradients.items():\n", - " # debug_metrics[f\"grad_hooks/{name}\"] = gradient.shape()\n", - " print(f\"Gradients for {layer}: {gradient.mean().item()}\")\n", - " # print(f\"Gradients for {layer}: {gradient.grad.mean()}\")\n", + " debug_metrics[f\"grad_hooks/{layer}\"] = gradient.mean().item()\n", + " print(f\"Gradients for {layer}: {gradient.mean().item()}\") # You can replace to use mean(), sum(), max() or min()\n", + " # simplified_gradient = gradient.mean(dim=(0, 1))\n", + " # print(f\"Summed Gradient for {layer}: {simplified_gradient}\")\n", "\n", " # Track gradients per layer at each epoch\n", " for name, param in model.named_parameters():\n", From 31968e9bcef0f9a64cf959f426783fb92a4056f3 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 11 Mar 2025 14:50:03 +0100 Subject: [PATCH 045/125] fix: gradients logging to Neptune --- .../pytorch_text_model_debugging.ipynb | 382 +----------------- 1 file changed, 14 insertions(+), 368 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 515e1f0..e17f807 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -458,45 +458,9 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Step 1 / 10241, Loss: 11.765188217163086\n", - "Step 2 / 10241, Loss: 11.752411842346191\n", - "Step 3 / 10241, Loss: 11.724653244018555\n", - "Step 4 / 10241, Loss: 11.64705753326416\n", - "Step 5 / 10241, Loss: 11.223550796508789\n", - "Step 5, Val_loss: 10.67489557999831\n", - "Gradients for fc2: 1.2764140652500025e-16\n", - "Gradients for fc1: 3.7073445469104627e-07\n", - "Gradients for lstm: 8.93705909277287e-09\n", - "Step 6 / 10241, Loss: 10.669904708862305\n", - "Step 7 / 10241, Loss: 10.110128402709961\n", - "Step 8 / 10241, Loss: 9.748831748962402\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[32], line 35\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[0;32m 34\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n\u001b[1;32m---> 35\u001b[0m \u001b[43moptimizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 38\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 39\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:493\u001b[0m, in \u001b[0;36mOptimizer.profile_hook_step..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 488\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 489\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 490\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must return None or a tuple of (new_args, new_kwargs), but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresult\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 493\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_optimizer_step_code()\n\u001b[0;32m 496\u001b[0m \u001b[38;5;66;03m# call optimizer step post hooks\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:91\u001b[0m, in \u001b[0;36m_use_grad_for_differentiable.._use_grad\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 89\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_grad_enabled(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdefaults[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdifferentiable\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[0;32m 90\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n\u001b[1;32m---> 91\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 92\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 93\u001b[0m torch\u001b[38;5;241m.\u001b[39m_dynamo\u001b[38;5;241m.\u001b[39mgraph_break()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:244\u001b[0m, in \u001b[0;36mAdam.step\u001b[1;34m(self, closure)\u001b[0m\n\u001b[0;32m 232\u001b[0m beta1, beta2 \u001b[38;5;241m=\u001b[39m group[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbetas\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m 234\u001b[0m has_complex \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init_group(\n\u001b[0;32m 235\u001b[0m group,\n\u001b[0;32m 236\u001b[0m params_with_grad,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 241\u001b[0m state_steps,\n\u001b[0;32m 242\u001b[0m )\n\u001b[1;32m--> 244\u001b[0m \u001b[43madam\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams_with_grad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 250\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 251\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mamsgrad\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mweight_decay\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43meps\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmaximize\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m \u001b[49m\u001b[43mforeach\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mforeach\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 260\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcapturable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdifferentiable\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 262\u001b[0m \u001b[43m \u001b[49m\u001b[43mfused\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfused\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 263\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mgrad_scale\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 264\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mgetattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfound_inf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 267\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m loss\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\optimizer.py:154\u001b[0m, in \u001b[0;36m_disable_dynamo_if_unsupported..wrapper..maybe_fallback\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m disabled_func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 153\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:876\u001b[0m, in \u001b[0;36madam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, foreach, capturable, differentiable, fused, grad_scale, found_inf, has_complex, amsgrad, beta1, beta2, lr, weight_decay, eps, maximize)\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 874\u001b[0m func \u001b[38;5;241m=\u001b[39m _single_tensor_adam\n\u001b[1;32m--> 876\u001b[0m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 877\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 878\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrads\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 879\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avgs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 880\u001b[0m \u001b[43m \u001b[49m\u001b[43mexp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_exp_avg_sqs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 882\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_steps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 883\u001b[0m \u001b[43m \u001b[49m\u001b[43mamsgrad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mamsgrad\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 884\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_complex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhas_complex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 885\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta1\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta1\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 886\u001b[0m \u001b[43m \u001b[49m\u001b[43mbeta2\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbeta2\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 887\u001b[0m \u001b[43m \u001b[49m\u001b[43mlr\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 888\u001b[0m \u001b[43m \u001b[49m\u001b[43mweight_decay\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_decay\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 889\u001b[0m \u001b[43m \u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 890\u001b[0m \u001b[43m \u001b[49m\u001b[43mmaximize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaximize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 891\u001b[0m \u001b[43m \u001b[49m\u001b[43mcapturable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcapturable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 892\u001b[0m \u001b[43m \u001b[49m\u001b[43mdifferentiable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdifferentiable\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 893\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgrad_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 894\u001b[0m \u001b[43m \u001b[49m\u001b[43mfound_inf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfound_inf\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 895\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\optim\\adam.py:425\u001b[0m, in \u001b[0;36m_single_tensor_adam\u001b[1;34m(params, grads, exp_avgs, exp_avg_sqs, max_exp_avg_sqs, state_steps, grad_scale, found_inf, amsgrad, has_complex, beta1, beta2, lr, weight_decay, eps, maximize, capturable, differentiable)\u001b[0m\n\u001b[0;32m 422\u001b[0m \u001b[38;5;66;03m# Decay the first and second moment running average coefficient\u001b[39;00m\n\u001b[0;32m 423\u001b[0m exp_avg\u001b[38;5;241m.\u001b[39mlerp_(grad, \u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m device_beta1)\n\u001b[1;32m--> 425\u001b[0m \u001b[43mexp_avg_sq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmul_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbeta2\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39maddcmul_(grad, grad\u001b[38;5;241m.\u001b[39mconj(), value\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m \u001b[38;5;241m-\u001b[39m beta2)\n\u001b[0;32m 427\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m capturable \u001b[38;5;129;01mor\u001b[39;00m differentiable:\n\u001b[0;32m 428\u001b[0m step \u001b[38;5;241m=\u001b[39m step_t\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ "debug_metrics = {}\n", "\n", @@ -518,6 +482,7 @@ " for batch in train_dataloader:\n", " model.train()\n", " step_counter += 1\n", + " hook_manager.clear()\n", "\n", " input_ids = batch['input_ids'].to(device)\n", " labels = batch['labels'].to(device)\n", @@ -546,8 +511,8 @@ " activations = hook_manager.get_activations()\n", " for layer, activation in activations.items():\n", " print(f\"Activations for {layer}: {activation[0].mean().item()}\")\n", - " # debug_metrics[f\"layers/layer_{name}/activation_mean\"] = activation[0].mean().item()\n", - " # debug_metrics[f\"layers/layer_{name}/activation_std\"] = activation[0].std().item()\n", + " debug_metrics[f\"layers/layer_{layer}/activation_mean\"] = activation[0].mean().item()\n", + " debug_metrics[f\"layers/layer_{layer}/activation_std\"] = activation[0].std().item()\n", "\n", " # Track gradients with hooks\n", " gradients = hook_manager.get_gradients()\n", @@ -602,338 +567,19 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Activation for lstm layer: (tensor([[[-1.1885e-01, 7.4334e-02, -1.1457e-01, ..., 6.1157e-02,\n", - " 3.4403e-02, 1.4354e-01],\n", - " [ 2.1115e-02, -9.9912e-02, -1.5829e-01, ..., 6.8039e-02,\n", - " 5.0781e-02, 1.2131e-01],\n", - " [-5.1509e-02, -2.8223e-02, -1.2977e-01, ..., 5.5352e-02,\n", - " 1.3167e-01, -9.2887e-02],\n", - " [ 1.4676e-01, -5.5235e-02, -2.3041e-01, ..., 1.3869e-01,\n", - " 6.1838e-02, -1.1159e-01],\n", - " [ 9.8354e-03, 5.3392e-02, -3.0002e-02, ..., 1.5939e-01,\n", - " 4.8001e-02, -1.6666e-01]],\n", - "\n", - " [[-6.9048e-03, -1.4166e-01, 1.8845e-03, ..., 1.3928e-01,\n", - " -2.7910e-01, 1.3777e-01],\n", - " [-7.3346e-02, -1.0498e-01, 3.9164e-02, ..., 1.7106e-01,\n", - " -8.2462e-02, -5.4011e-02],\n", - " [ 1.3681e-01, -5.4486e-02, -7.6502e-02, ..., 1.2276e-01,\n", - " 5.4242e-03, -1.5655e-01],\n", - " [ 1.3117e-01, -1.4139e-01, -2.0100e-01, ..., 1.5732e-01,\n", - " -1.0666e-01, -8.7156e-02],\n", - " [ 9.4591e-02, -5.7793e-02, -1.3404e-01, ..., 9.6900e-02,\n", - " 4.9515e-03, -7.9186e-03]],\n", - "\n", - " [[-9.0859e-02, 5.5639e-03, -1.4513e-01, ..., -7.1660e-02,\n", - " 1.8301e-01, 7.4892e-03],\n", - " [ 9.2305e-02, 1.1849e-01, -1.5432e-01, ..., -3.9754e-02,\n", - " 2.1528e-01, -6.6336e-02],\n", - " [-4.3912e-02, 7.2306e-03, -1.6964e-01, ..., 3.7397e-02,\n", - " 1.3741e-01, -7.8224e-03],\n", - " [-2.3771e-03, -1.7121e-01, -1.9225e-01, ..., 2.1824e-02,\n", - " 3.4948e-02, -1.7641e-04],\n", - " [ 1.3603e-01, -6.7352e-02, -2.7966e-01, ..., 1.0816e-01,\n", - " -9.4453e-02, 1.8542e-01]]], grad_fn=), (tensor([[[ 9.8354e-03, 5.3392e-02, -3.0002e-02, 7.2875e-03, -1.4585e-01,\n", - " 1.2482e-02, 6.9810e-02, -9.4523e-02, 8.2014e-02, 1.4978e-02,\n", - " -2.2127e-01, -8.6931e-02, -1.7447e-02, -9.0253e-02, -5.8715e-02,\n", - " -9.7126e-02, -1.4640e-02, 6.9750e-02, -4.0392e-02, -6.4946e-03,\n", - " 6.3030e-02, -2.8948e-03, 2.3277e-02, 2.1406e-01, -7.2021e-02,\n", - " -8.5262e-02, 2.1904e-01, 5.9262e-02, 8.1430e-02, -5.0410e-02,\n", - " 4.7898e-02, -1.7856e-02, -9.6553e-02, 1.2176e-01, 1.3534e-01,\n", - " 9.9480e-02, -6.4945e-02, -8.7489e-02, 1.9893e-01, -1.1475e-02,\n", - " -1.1274e-01, 1.4311e-01, 1.2569e-01, 9.3439e-03, 7.4138e-02,\n", - " -4.6937e-02, 1.2031e-01, 3.7596e-02, -1.1246e-01, -1.1382e-01,\n", - " -4.6571e-02, -6.7600e-02, -1.0755e-01, 1.3048e-01, -1.8727e-01,\n", - " -1.5687e-01, 1.3892e-01, -2.1552e-01, -5.3767e-02, 1.5782e-02,\n", - " 1.7370e-01, 3.1182e-02, 1.0335e-01, 2.5303e-02, -1.8224e-02,\n", - " -6.4270e-03, -1.8351e-02, -7.7923e-02, 7.3565e-03, 1.0760e-01,\n", - " 2.1875e-02, -4.9347e-02, 7.0013e-02, -4.0485e-02, 8.5161e-03,\n", - " -1.6069e-01, -7.3729e-02, -1.1683e-01, 1.1353e-01, 9.2289e-02,\n", - " -2.1123e-01, -2.1108e-02, -3.3057e-02, 9.3739e-02, -7.4621e-02,\n", - " -3.0965e-02, 3.7901e-02, 9.5562e-02, 5.1191e-02, -8.1342e-02,\n", - " 1.9611e-02, -1.1581e-01, -2.1830e-01, 1.9827e-02, -3.9519e-02,\n", - " 1.1952e-01, 6.1484e-02, 1.3787e-01, 4.1152e-02, -6.1684e-02,\n", - " -5.6594e-02, -3.1420e-02, -2.2474e-02, -6.5657e-02, 1.4875e-01,\n", - " -9.1052e-02, 7.7182e-02, 4.8757e-02, 1.4903e-01, -1.3527e-03,\n", - " 2.7246e-01, 1.2794e-01, -1.7041e-01, -7.6142e-02, 1.3974e-01,\n", - " 2.4618e-02, -6.9025e-02, -9.4079e-02, -1.4909e-03, 5.9612e-02,\n", - " -4.4725e-03, -6.7179e-02, -6.1797e-02, 1.6559e-01, -8.0163e-02,\n", - " -2.2909e-01, 6.6350e-02, 1.1361e-02, 8.4108e-02, -1.1556e-01,\n", - " 6.1252e-02, 8.3287e-02, -8.7894e-02, -1.0695e-01, -5.3925e-02,\n", - " 7.1153e-02, 6.9016e-02, -1.4300e-02, -5.9164e-02, 3.7659e-02,\n", - " 1.0063e-01, 1.0336e-01, -1.9477e-02, 3.5742e-02, -3.3210e-02,\n", - " -1.6847e-03, -9.3591e-02, 1.4821e-01, 8.3384e-02, 8.9724e-02,\n", - " -1.2488e-01, -1.8335e-01, -1.4115e-01, 1.3415e-01, 1.4753e-01,\n", - " 4.4653e-02, 1.9245e-02, 3.8093e-02, 1.0158e-01, -4.3646e-02,\n", - " -7.2102e-02, 1.9797e-02, 3.9576e-02, 2.4157e-02, -1.0626e-01,\n", - " -2.5799e-01, 3.5465e-02, -1.9632e-01, 1.2119e-01, 4.7838e-02,\n", - " 1.0147e-02, -3.0875e-03, -4.0234e-03, 3.6550e-02, 2.3824e-02,\n", - " -4.8517e-02, -8.0195e-02, 3.5668e-02, -3.0944e-02, 9.3462e-02,\n", - " 1.5813e-01, -6.5189e-02, 7.8902e-03, 1.2792e-01, 1.2325e-01,\n", - " -7.1469e-02, -6.0525e-03, 4.5219e-02, 2.8174e-02, 2.1492e-01,\n", - " -5.7059e-03, -5.3889e-03, -1.9786e-01, -2.9867e-02, -1.3545e-01,\n", - " -6.7783e-02, -9.1990e-02, 1.7466e-02, -8.8718e-02, 3.5585e-02,\n", - " -4.7237e-02, 4.2510e-02, 1.7164e-01, 6.5421e-02, 7.3680e-02,\n", - " -4.8147e-02, -2.3811e-02, -1.4102e-01, -1.7029e-02, 1.5386e-01,\n", - " -1.0342e-01, 2.5038e-03, 1.0144e-01, 1.2401e-02, 5.6638e-03,\n", - " 2.2166e-02, 1.2892e-02, -2.7320e-02, 6.6498e-02, -7.0570e-02,\n", - " -1.1974e-02, 1.3094e-01, -1.1246e-01, 1.0381e-01, -7.8150e-02,\n", - " -3.4429e-02, 1.4121e-01, -1.1700e-01, -1.3806e-01, 8.8438e-02,\n", - " -7.9613e-02, 7.0094e-02, -6.7182e-02, 3.2045e-02, -2.9587e-02,\n", - " 1.8854e-01, -2.0000e-02, -1.3733e-01, -1.7716e-01, 1.5292e-02,\n", - " -4.1364e-03, 2.0001e-02, 1.2971e-01, 1.7419e-01, 5.9393e-02,\n", - " 3.4792e-02, -2.2356e-01, 7.1222e-02, -6.3988e-02, -3.1165e-02,\n", - " -5.1605e-02, -3.3989e-02, 1.2333e-02, 1.5939e-01, 4.8001e-02,\n", - " -1.6666e-01],\n", - " [ 9.4591e-02, -5.7793e-02, -1.3404e-01, 2.1016e-01, -1.3421e-01,\n", - " -1.1376e-01, 1.1109e-01, 1.0350e-01, -2.2410e-02, -4.0062e-02,\n", - " 3.2718e-02, -6.8034e-02, 5.5625e-02, 1.5184e-02, -9.1550e-04,\n", - " -6.3774e-02, -6.9557e-02, 1.0805e-01, 4.2219e-02, -6.3613e-02,\n", - " -9.4216e-02, 6.2111e-02, -5.3061e-02, -1.5292e-01, 1.2790e-01,\n", - " -7.5749e-02, 1.4717e-01, 6.3722e-02, 8.9517e-02, 6.4354e-02,\n", - " 2.1560e-01, 2.4706e-02, -5.5648e-02, -3.0421e-02, 3.9275e-02,\n", - " -8.8307e-02, 1.8278e-01, 6.1446e-02, 2.0430e-02, -1.7117e-02,\n", - " 4.9665e-02, -3.5847e-02, 3.8241e-02, -1.0973e-02, -2.3271e-02,\n", - " 7.5955e-02, 8.3174e-02, 6.0993e-02, -3.5498e-02, 1.1457e-01,\n", - " -1.0923e-02, 1.8132e-03, 1.1598e-01, -1.1598e-01, 6.3417e-02,\n", - " -4.0052e-02, 4.1007e-02, -9.3542e-02, -1.3564e-02, 6.6304e-02,\n", - " 8.8295e-02, 1.3509e-02, 4.5857e-02, 1.8167e-02, 3.7219e-02,\n", - " 5.2673e-02, -7.6027e-02, 1.7417e-02, -3.7517e-02, -7.5377e-02,\n", - " -4.5400e-02, -8.2015e-02, -1.0347e-01, -8.2943e-02, -5.7339e-02,\n", - " -1.3186e-02, 6.2147e-02, 7.8584e-02, -4.2842e-02, -6.7754e-02,\n", - " -9.0749e-02, -4.3508e-03, 1.3494e-01, -6.2708e-02, -1.9534e-01,\n", - " -9.5295e-02, -8.6889e-04, -3.6767e-02, -4.7225e-02, 1.1969e-01,\n", - " 4.6655e-02, -1.0073e-01, -5.1285e-02, -1.3837e-01, -4.0509e-03,\n", - " 1.9807e-02, -2.2002e-02, -2.2138e-01, 4.1285e-02, -9.4463e-04,\n", - " -5.3333e-02, 1.2217e-01, 4.6612e-02, 7.2419e-02, -5.5067e-02,\n", - " -8.7105e-02, 1.6203e-01, 8.3020e-02, -1.0497e-01, -1.2814e-01,\n", - " -1.8258e-01, 2.3605e-01, 4.6091e-02, 2.8127e-02, 8.7480e-02,\n", - " -3.3150e-02, -6.3353e-02, -7.9963e-02, 5.8394e-02, 5.6279e-02,\n", - " 6.6731e-02, 3.2917e-02, -1.3350e-02, -2.9612e-02, 9.0927e-02,\n", - " 6.2480e-02, -6.3447e-02, 6.6206e-02, 1.2004e-01, 1.4747e-02,\n", - " -1.6508e-02, -1.0005e-02, 4.1171e-02, -3.0836e-02, -9.5519e-02,\n", - " 2.0150e-03, 1.6366e-02, -3.0951e-02, -1.1864e-01, 2.7799e-02,\n", - " -1.2762e-01, -1.6378e-02, 5.5286e-02, -1.1001e-01, -9.1958e-02,\n", - " -1.1602e-01, -3.0696e-02, 2.6005e-02, -9.5703e-02, -1.1909e-01,\n", - " 4.2999e-02, 1.0845e-01, 6.9688e-04, -9.4792e-02, -1.8722e-01,\n", - " -1.7324e-01, 1.1747e-02, 1.2535e-01, -4.6194e-02, -2.0607e-02,\n", - " -6.7600e-03, -3.8273e-02, 1.2215e-01, -3.3380e-02, 8.3594e-02,\n", - " 1.0551e-01, 6.5389e-02, -5.0340e-02, -2.1293e-02, -8.7716e-02,\n", - " -1.0828e-01, -7.8946e-02, -1.2999e-01, 3.0540e-02, 1.7191e-01,\n", - " -5.3674e-02, 9.6773e-02, -4.7233e-02, 2.1101e-02, 1.3149e-01,\n", - " 2.7261e-04, 1.2085e-01, 1.6334e-01, 2.4205e-01, -2.2790e-02,\n", - " 1.0447e-01, 3.7401e-02, -1.5380e-01, -7.3915e-02, -2.6638e-02,\n", - " -8.2007e-02, -1.1565e-01, -1.3212e-01, 6.2574e-02, 7.9549e-02,\n", - " -6.6135e-02, -4.2917e-02, -5.5928e-02, 6.5440e-02, 1.7348e-01,\n", - " -3.5761e-02, -9.4648e-02, 6.3346e-02, -1.1129e-01, -1.1930e-01,\n", - " 1.4410e-02, -2.4488e-01, -5.3787e-03, 8.4259e-02, -1.6251e-02,\n", - " -5.1348e-02, 4.9270e-02, 4.5308e-02, 1.1668e-01, 3.0874e-02,\n", - " -1.0059e-01, -4.5658e-02, -6.3077e-02, -2.6599e-01, 7.9999e-02,\n", - " -2.5070e-02, 6.0473e-02, 1.7124e-01, 4.5684e-02, 1.5989e-02,\n", - " 2.1243e-02, -2.3085e-03, 4.8934e-02, -1.1428e-01, -5.5781e-02,\n", - " 8.1536e-02, 3.2826e-02, -1.3920e-02, 1.1348e-01, -1.1324e-01,\n", - " -6.2405e-02, 1.4879e-01, 5.6304e-02, 4.2413e-03, -8.6732e-02,\n", - " -3.8706e-02, 9.2358e-02, -1.8792e-01, 2.1576e-01, -1.2017e-02,\n", - " 1.5149e-02, -6.6635e-02, 9.9260e-03, -4.9350e-02, -1.3249e-01,\n", - " -6.0241e-02, 3.2335e-02, 5.2322e-02, 9.6900e-02, 4.9515e-03,\n", - " -7.9186e-03],\n", - " [ 1.3603e-01, -6.7352e-02, -2.7966e-01, 8.2148e-02, -6.7611e-02,\n", - " 2.0793e-02, 1.0225e-01, 1.1764e-01, -1.0157e-01, 8.2719e-02,\n", - " 3.6458e-02, 3.6482e-02, -2.3889e-02, 1.7489e-01, -1.3941e-01,\n", - " -2.7739e-02, 9.8823e-02, -8.0219e-02, -4.7139e-02, 4.4864e-02,\n", - " -8.0784e-02, -6.8581e-02, -2.5900e-02, -1.5247e-02, -2.9433e-01,\n", - " 6.6999e-02, 2.7787e-02, -3.4008e-02, -3.2132e-02, 1.6129e-01,\n", - " 3.3099e-02, -2.4889e-01, -1.4151e-01, 3.4665e-02, -7.2734e-02,\n", - " 1.2508e-01, -9.8449e-02, -2.1377e-02, -4.7112e-02, 7.7784e-02,\n", - " -1.4526e-01, -8.2125e-02, -5.7050e-02, -1.5080e-02, 7.6630e-02,\n", - " 1.9037e-02, 5.2296e-02, 8.2584e-02, -1.5881e-02, 1.7238e-01,\n", - " 8.0146e-02, 2.8528e-02, -7.5524e-02, -1.8611e-01, 2.9266e-02,\n", - " 1.4401e-01, -8.4837e-02, 8.3491e-02, 2.0179e-01, -2.6567e-02,\n", - " 1.1892e-02, 4.7864e-02, -8.6363e-03, -1.4446e-02, -5.8189e-02,\n", - " 1.9961e-02, -9.5751e-02, 1.3279e-02, -1.3346e-01, -3.3887e-02,\n", - " 1.0766e-01, -2.7566e-02, 6.8022e-02, 1.0318e-01, -5.9106e-02,\n", - " -1.1512e-01, 4.2301e-03, -5.5006e-02, 2.4342e-02, 9.3417e-02,\n", - " 9.8848e-02, 6.6830e-02, 1.0980e-01, -1.5114e-02, -3.3165e-02,\n", - " -1.3855e-01, 1.2801e-01, -9.2980e-02, 1.4366e-01, -3.9685e-03,\n", - " -7.7200e-02, -5.2525e-03, -6.4132e-02, 3.9983e-02, -7.9012e-02,\n", - " 7.6421e-02, 1.2579e-01, -2.4095e-02, -1.2270e-01, 4.7820e-02,\n", - " 3.9278e-02, -5.5515e-02, -3.4473e-02, 6.2264e-02, 2.9556e-02,\n", - " -2.1679e-02, 3.1932e-02, -3.8919e-03, -6.7579e-03, -4.6533e-02,\n", - " 5.4988e-03, 3.4696e-02, 6.4688e-02, -1.8436e-01, 1.9484e-02,\n", - " 1.0903e-01, -8.0302e-02, 1.1229e-01, 8.2936e-02, -5.2438e-02,\n", - " 1.0991e-01, 4.1805e-02, -5.1728e-02, 4.4836e-02, 6.1983e-02,\n", - " 1.2062e-01, -1.2726e-01, 5.5043e-02, 7.5568e-02, 6.8702e-02,\n", - " -3.1076e-03, 1.3862e-01, 1.7626e-01, 1.3946e-01, 2.7047e-02,\n", - " 2.7864e-02, -8.0505e-02, 4.4494e-02, -1.8062e-02, -1.3545e-01,\n", - " -3.7318e-02, -3.7964e-02, -1.7789e-01, 6.8049e-02, 1.0841e-01,\n", - " 2.4024e-02, -1.2838e-01, -3.2943e-02, -1.6632e-01, -5.1168e-02,\n", - " -3.9820e-02, -6.8890e-02, 2.4528e-02, 1.1240e-02, -1.1426e-01,\n", - " -2.8044e-02, -6.5270e-02, -5.7087e-02, 2.0904e-02, -1.6673e-01,\n", - " -8.1839e-02, -2.4754e-01, -1.9653e-01, -8.4816e-02, 3.3636e-02,\n", - " -4.3611e-02, -8.8370e-02, 4.0351e-02, 4.6051e-02, 4.9134e-02,\n", - " 1.6001e-01, -5.1358e-02, 1.5380e-03, 4.8425e-02, 7.1472e-02,\n", - " -1.4286e-01, -8.8936e-02, 1.9823e-01, 5.7555e-02, 2.0965e-01,\n", - " 7.9889e-02, -2.5934e-01, -1.3810e-01, 5.4740e-02, -6.8525e-02,\n", - " 1.0861e-01, 1.6838e-01, -2.0420e-02, -3.2166e-02, -1.4300e-01,\n", - " -1.1291e-02, -1.6591e-01, 2.6391e-03, 4.3837e-02, -1.9612e-01,\n", - " -8.1949e-02, -7.7811e-02, -2.9831e-03, -1.4750e-02, -2.7218e-02,\n", - " 5.8562e-02, 5.9297e-02, -3.0173e-02, -5.2490e-02, -6.5504e-02,\n", - " -5.5096e-02, -2.1278e-01, 3.4448e-02, 2.3712e-02, 1.4995e-01,\n", - " -5.2304e-02, 2.0103e-01, 8.4592e-02, -1.9333e-01, 7.4601e-03,\n", - " -1.1167e-01, 1.2646e-01, -2.7272e-02, 4.2437e-02, 4.4445e-02,\n", - " -1.1936e-01, -9.4888e-02, 1.3257e-01, 4.2319e-02, -6.4245e-02,\n", - " -2.3440e-01, 6.3898e-02, -7.6036e-03, -4.6509e-02, 1.3855e-01,\n", - " 1.5516e-02, 1.4544e-03, 4.3514e-02, 1.5257e-01, 3.6230e-02,\n", - " 7.0221e-02, 1.7503e-01, -1.7676e-02, 7.3860e-02, -7.0607e-02,\n", - " -4.9217e-02, -1.3995e-01, -5.4702e-02, 9.6630e-02, 2.8624e-02,\n", - " -7.4663e-02, -6.4043e-02, -4.7874e-02, -1.3468e-01, 1.5756e-02,\n", - " -2.7573e-02, 4.0813e-02, 7.2164e-03, 1.0816e-01, -9.4453e-02,\n", - " 1.8542e-01]]], grad_fn=), tensor([[[ 0.0215, 0.1231, -0.0638, 0.0187, -0.2644, 0.0308, 0.1278,\n", - " -0.1923, 0.1761, 0.0313, -0.3887, -0.2762, -0.0331, -0.2157,\n", - " -0.1169, -0.1993, -0.0309, 0.1140, -0.0784, -0.0100, 0.1152,\n", - " -0.0053, 0.0586, 0.3053, -0.1443, -0.1933, 0.4138, 0.1320,\n", - " 0.1944, -0.1169, 0.0919, -0.0294, -0.2135, 0.1794, 0.3195,\n", - " 0.2934, -0.1493, -0.1875, 0.4139, -0.0284, -0.2233, 0.2729,\n", - " 0.2229, 0.0134, 0.1441, -0.1220, 0.2427, 0.0599, -0.2447,\n", - " -0.2056, -0.0745, -0.1361, -0.2066, 0.2128, -0.3376, -0.3455,\n", - " 0.2020, -0.3326, -0.1131, 0.0389, 0.3143, 0.0481, 0.1865,\n", - " 0.0493, -0.0448, -0.0114, -0.0360, -0.1558, 0.0158, 0.2346,\n", - " 0.0448, -0.0875, 0.2106, -0.0802, 0.0163, -0.2822, -0.2053,\n", - " -0.3022, 0.2704, 0.1748, -0.4137, -0.0462, -0.0552, 0.2426,\n", - " -0.2100, -0.0747, 0.0908, 0.2231, 0.1041, -0.2129, 0.0481,\n", - " -0.2659, -0.5129, 0.0534, -0.0939, 0.2869, 0.1163, 0.2029,\n", - " 0.0973, -0.1039, -0.1211, -0.0669, -0.0492, -0.1171, 0.2898,\n", - " -0.1569, 0.1462, 0.0930, 0.2558, -0.0028, 0.4722, 0.3414,\n", - " -0.4779, -0.1352, 0.3388, 0.0520, -0.1576, -0.1733, -0.0036,\n", - " 0.1175, -0.0074, -0.1209, -0.1110, 0.3066, -0.1677, -0.4005,\n", - " 0.1238, 0.0240, 0.1342, -0.1947, 0.1602, 0.1644, -0.1760,\n", - " -0.1688, -0.1262, 0.1310, 0.1100, -0.0297, -0.1264, 0.0736,\n", - " 0.2040, 0.2224, -0.0404, 0.0804, -0.0596, -0.0038, -0.1632,\n", - " 0.2849, 0.2944, 0.1974, -0.1983, -0.3721, -0.2297, 0.2601,\n", - " 0.3040, 0.0816, 0.0473, 0.0605, 0.1459, -0.0935, -0.1308,\n", - " 0.0334, 0.0774, 0.0410, -0.1992, -0.4726, 0.0937, -0.3922,\n", - " 0.2706, 0.1369, 0.0331, -0.0064, -0.0064, 0.0590, 0.0459,\n", - " -0.1092, -0.1592, 0.0676, -0.0688, 0.3036, 0.2847, -0.1742,\n", - " 0.0185, 0.2786, 0.3010, -0.1789, -0.0100, 0.0860, 0.0644,\n", - " 0.4199, -0.0131, -0.0131, -0.3550, -0.0619, -0.2744, -0.1237,\n", - " -0.2058, 0.0355, -0.1769, 0.0767, -0.0722, 0.0700, 0.3125,\n", - " 0.1528, 0.1752, -0.0903, -0.0490, -0.2326, -0.0331, 0.2633,\n", - " -0.1920, 0.0042, 0.2633, 0.0300, 0.0111, 0.0524, 0.0232,\n", - " -0.0449, 0.1392, -0.1336, -0.0275, 0.2044, -0.2220, 0.2375,\n", - " -0.1323, -0.0737, 0.2924, -0.1898, -0.2143, 0.2357, -0.1630,\n", - " 0.1608, -0.2075, 0.0772, -0.0597, 0.3471, -0.0414, -0.3160,\n", - " -0.3764, 0.0352, -0.0072, 0.0393, 0.2600, 0.3120, 0.1206,\n", - " 0.0652, -0.5279, 0.1653, -0.1217, -0.0762, -0.0888, -0.1079,\n", - " 0.0247, 0.2520, 0.0952, -0.3274],\n", - " [ 0.1748, -0.1059, -0.3198, 0.5186, -0.2371, -0.2178, 0.2349,\n", - " 0.1604, -0.0453, -0.0654, 0.0616, -0.1189, 0.1377, 0.0229,\n", - " -0.0019, -0.2172, -0.1109, 0.3290, 0.0851, -0.1593, -0.1757,\n", - " 0.1057, -0.1162, -0.4523, 0.2734, -0.2212, 0.2794, 0.1664,\n", - " 0.1513, 0.1416, 0.3451, 0.0537, -0.1281, -0.0565, 0.0674,\n", - " -0.1500, 0.3023, 0.1289, 0.0410, -0.0305, 0.0991, -0.0734,\n", - " 0.0843, -0.0225, -0.0398, 0.1359, 0.1802, 0.1637, -0.0770,\n", - " 0.2109, -0.0221, 0.0033, 0.2165, -0.2616, 0.1870, -0.0861,\n", - " 0.0942, -0.1472, -0.0238, 0.1016, 0.2201, 0.0272, 0.0872,\n", - " 0.0414, 0.0776, 0.1133, -0.1591, 0.0262, -0.1012, -0.1846,\n", - " -0.1027, -0.1475, -0.1609, -0.1686, -0.1316, -0.0310, 0.1181,\n", - " 0.2061, -0.0945, -0.1049, -0.2091, -0.0091, 0.2936, -0.1592,\n", - " -0.3545, -0.2662, -0.0014, -0.0807, -0.1385, 0.2148, 0.1063,\n", - " -0.2819, -0.1010, -0.2188, -0.0084, 0.0397, -0.0581, -0.4686,\n", - " 0.0754, -0.0024, -0.1160, 0.2208, 0.1012, 0.1132, -0.1191,\n", - " -0.2292, 0.3214, 0.1448, -0.2169, -0.2951, -0.3646, 0.3798,\n", - " 0.1070, 0.0776, 0.1584, -0.0685, -0.1480, -0.1374, 0.1294,\n", - " 0.1036, 0.1122, 0.0730, -0.0442, -0.0602, 0.1840, 0.1364,\n", - " -0.1272, 0.1230, 0.3239, 0.0268, -0.0417, -0.0179, 0.0947,\n", - " -0.0559, -0.1942, 0.0035, 0.0318, -0.0507, -0.2642, 0.0656,\n", - " -0.2451, -0.0351, 0.1097, -0.2061, -0.2173, -0.3201, -0.0702,\n", - " 0.0417, -0.2167, -0.2739, 0.0675, 0.1890, 0.0012, -0.2239,\n", - " -0.3711, -0.3940, 0.0225, 0.2390, -0.1319, -0.0488, -0.0137,\n", - " -0.1089, 0.2475, -0.0643, 0.1355, 0.2235, 0.1510, -0.0943,\n", - " -0.0554, -0.1638, -0.2327, -0.2027, -0.2545, 0.0799, 0.2791,\n", - " -0.0913, 0.1932, -0.1046, 0.0521, 0.2579, 0.0009, 0.2206,\n", - " 0.3736, 0.3736, -0.0442, 0.1595, 0.0547, -0.2838, -0.1590,\n", - " -0.0483, -0.2983, -0.2004, -0.3418, 0.1489, 0.1367, -0.1541,\n", - " -0.0908, -0.1317, 0.1192, 0.4313, -0.0851, -0.1807, 0.1204,\n", - " -0.1759, -0.2699, 0.0323, -0.4391, -0.0156, 0.1651, -0.0330,\n", - " -0.0990, 0.1090, 0.1267, 0.2578, 0.0501, -0.1733, -0.1170,\n", - " -0.1403, -0.4787, 0.1272, -0.0632, 0.0988, 0.3264, 0.1055,\n", - " 0.0343, 0.0405, -0.0056, 0.1102, -0.2660, -0.1654, 0.1530,\n", - " 0.0636, -0.0223, 0.1924, -0.2318, -0.1286, 0.2206, 0.1257,\n", - " 0.0081, -0.2056, -0.0625, 0.2139, -0.3882, 0.6290, -0.0291,\n", - " 0.0282, -0.1494, 0.0187, -0.0855, -0.2410, -0.1237, 0.0555,\n", - " 0.1439, 0.1793, 0.0084, -0.0185],\n", - " [ 0.3369, -0.1479, -0.5276, 0.1676, -0.1363, 0.0398, 0.1985,\n", - " 0.2396, -0.1705, 0.1846, 0.0800, 0.0618, -0.0620, 0.2784,\n", - " -0.2426, -0.0458, 0.1770, -0.1390, -0.0845, 0.1183, -0.1349,\n", - " -0.1174, -0.0712, -0.0334, -0.4726, 0.1418, 0.0468, -0.0561,\n", - " -0.0607, 0.2376, 0.0528, -0.5115, -0.2481, 0.1050, -0.1376,\n", - " 0.2348, -0.1902, -0.0503, -0.1183, 0.2031, -0.2646, -0.1380,\n", - " -0.1213, -0.0263, 0.1222, 0.0415, 0.1292, 0.1448, -0.0367,\n", - " 0.3030, 0.1181, 0.0593, -0.1332, -0.4023, 0.0568, 0.2586,\n", - " -0.1512, 0.1804, 0.4000, -0.0546, 0.0219, 0.0816, -0.0157,\n", - " -0.0265, -0.1322, 0.0508, -0.2013, 0.0325, -0.2295, -0.0557,\n", - " 0.1835, -0.0470, 0.1761, 0.2084, -0.0915, -0.3008, 0.0123,\n", - " -0.1079, 0.0595, 0.1632, 0.1830, 0.1273, 0.1986, -0.0415,\n", - " -0.0714, -0.3162, 0.2042, -0.1503, 0.2874, -0.0081, -0.1331,\n", - " -0.0118, -0.1135, 0.0777, -0.1381, 0.1587, 0.2140, -0.0439,\n", - " -0.3028, 0.1019, 0.0596, -0.1013, -0.0633, 0.1042, 0.0591,\n", - " -0.0395, 0.0779, -0.0082, -0.0133, -0.0734, 0.0098, 0.0712,\n", - " 0.1292, -0.3829, 0.0417, 0.2043, -0.1393, 0.2560, 0.1654,\n", - " -0.0879, 0.2017, 0.0713, -0.0830, 0.1072, 0.1062, 0.2560,\n", - " -0.4510, 0.1106, 0.1474, 0.1427, -0.0086, 0.3868, 0.3241,\n", - " 0.4367, 0.0656, 0.0621, -0.1387, 0.1025, -0.0375, -0.2811,\n", - " -0.0774, -0.0591, -0.3121, 0.1357, 0.3206, 0.0585, -0.2302,\n", - " -0.0596, -0.2986, -0.0889, -0.0869, -0.1366, 0.0568, 0.0286,\n", - " -0.1777, -0.0427, -0.1456, -0.1354, 0.0482, -0.3316, -0.1904,\n", - " -0.4873, -0.5369, -0.1222, 0.0694, -0.0939, -0.1674, 0.0686,\n", - " 0.0854, 0.1132, 0.2842, -0.1667, 0.0033, 0.0907, 0.1604,\n", - " -0.2291, -0.2927, 0.3881, 0.0979, 0.4785, 0.1536, -0.4774,\n", - " -0.2153, 0.1427, -0.1574, 0.1796, 0.3557, -0.0449, -0.0526,\n", - " -0.3581, -0.0234, -0.3326, 0.0049, 0.0933, -0.3071, -0.1452,\n", - " -0.2091, -0.0060, -0.0311, -0.0465, 0.1057, 0.1421, -0.0608,\n", - " -0.1065, -0.0965, -0.1038, -0.3565, 0.0803, 0.0465, 0.2869,\n", - " -0.0856, 0.3659, 0.1754, -0.3457, 0.0169, -0.2568, 0.2813,\n", - " -0.0543, 0.0783, 0.1306, -0.2800, -0.1639, 0.2811, 0.0763,\n", - " -0.1103, -0.4329, 0.1929, -0.0220, -0.0827, 0.2655, 0.0324,\n", - " 0.0027, 0.0717, 0.2442, 0.0713, 0.1201, 0.2816, -0.0332,\n", - " 0.2074, -0.1304, -0.0855, -0.3259, -0.0834, 0.3056, 0.0815,\n", - " -0.1666, -0.1157, -0.0828, -0.2060, 0.0282, -0.0599, 0.0598,\n", - " 0.0145, 0.2333, -0.1539, 0.4261]]], grad_fn=)))\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjsAAAHHCAYAAABZbpmkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABHQElEQVR4nO3deZzNdf//8eeZGTNjmcVoFrJNsqay1URkm4zIZSsMyjKlhSK6KpW1xZJQruSqixlcZClEC7lEikmIaSFbE8rMUJixZIyZ9+8PP+frmMXMmTMzx8fjfrud263z2d6v83Y4z96f9+fzsRljjAAAACzKo6QLAAAAKEqEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHaAI9O/fX9WrVy+RtseOHSubzVYibRfUb7/9JpvNpri4uCJvKy4uTjabTb/99pt9WfXq1XX//fcXeduStGHDBtlsNm3YsKFY2rvSG2+8oZtuukmenp5q0KBBkbZVkt9/ICeEHVyXZs6cKZvNpoiICKePceTIEY0dO1Y7d+50XWH5dPbsWY0dO7bEfjhzY7PZ7C8vLy8FBQWpcePGGjp0qHbt2uWydmbOnFksAckZ7ljbF198oeeee0533323YmNj9frrr5d0Sdns2rVLY8eOdQijgMsY4DrUrFkzU716dSPJ7Nu3z6ljbN261UgysbGx2dadP3/enDt3rpBV5u7YsWNGkhkzZky2dRkZGebvv/8usrbzIsnce++9Zv78+WbevHlmxowZ5pFHHjEBAQHGy8vLvPnmmw7bZ2Vlmb///ttcuHChQO3ccsstpmXLlgXa58KFC+bvv/82WVlZ9mXVqlUzHTt2LNBxnK0tMzPT/P333yYzM9Ol7eXH888/bzw8PEx6enqxtNevXz9TrVq1Au2zdOlSI8msX7++SGrC9Y2RHVx3EhMTtXnzZk2dOlXBwcFasGCBy9soVaqUfHx8XH7c/PDy8pKvr2+JtC1JtWrVUt++ffXQQw9pyJAhev/993XgwAHdcccdGjFihD777DP7tjabTb6+vvL09Cyyes6cOSNJ8vT0lK+vb4md4vPw8JCvr688PIr/n92jR4+qdOnS8vb2dsnxjDH6+++/XXIsoFiUdNoCitsrr7xiypcvb9LT080TTzxhatasmeN2J06cMMOGDTPVqlUz3t7e5sYbbzQPPfSQOXbsmFm/fr2RlO11aZTn8v+zPX/+vClfvrzp379/tjZSU1ONj4+PGTFihDHGmPT0dDNq1CjTqFEj4+/vb8qUKWOaN29uvvzyS/s+iYmJObZ9aZRnzJgx5sq/2hkZGWb8+PHmpptuMt7e3qZatWpm5MiR2UafLo10fP311+aOO+4wPj4+Jjw83MydOzdffSvJDB48OMd1Bw8eNF5eXqZZs2bZPsvlo2NJSUmmf//+5sYbbzTe3t4mLCzM/OMf/zCJiYn2Gq/87JdGUmJjY40ks2HDBvPEE0+Y4OBgExgY6LDu0nEu/7xr1qwxt99+u/Hx8TF169Y1H330kUPtOfVpTsfMq7ZL35krRy6WLFliGjVqZHx9fU2FChVMnz59zO+//+6wTb9+/UzZsmXN77//bjp37mzKli1rbrjhBjNixIirjorl9T0t6Pdi9erVpnHjxsbHx8dMmzYt1zZzGtn54IMPTKNGjUy5cuWMn5+fqV+/vpk+fbpDP175utRXl9pfv369ady4sfH19TX169e3r//oo49M/fr1jY+Pj2nUqJH5/vvv8+wTXH8Y2cF1Z8GCBerWrZu8vb0VHR2tffv2aevWrQ7bnD59Wi1atNCMGTPUrl07vfXWW3r88cf1yy+/6Pfff1fdunU1fvx4SdKgQYM0f/58zZ8/X/fcc0+29kqVKqWuXbtqxYoVOn/+vMO6FStWKD09Xb169ZIkpaWl6T//+Y9atWqlSZMmaezYsTp27JiioqLsc4OCg4P17rvvSpK6du1qb7tbt265fuZHHnlEo0ePVqNGjTRt2jS1bNlSEyZMsLd7uf379+uBBx7QvffeqzfffFPly5dX//799fPPP+e/k3NQtWpVtWzZUt9++63S0tJy3a579+5avny5BgwYoJkzZ+rpp5/WqVOndOjQIUnS9OnTVblyZdWpU8f+2V966SWHYzz55JPatWuXRo8erRdeeCHPuvbt26eePXvqvvvu04QJE+Tl5aUHH3xQa9euLfBnzE9tl4uLi1OPHj3k6empCRMm6NFHH9WyZcvUvHlznTx50mHbzMxMRUVFqUKFCpoyZYpatmypN998U++9916eNc2fP18tWrSQj49Ptu9pQb4Xe/bsUXR0tO6991699dZbBZrkvHbtWkVHR6t8+fKaNGmSJk6cqFatWmnTpk2SpHvuuUdPP/20JOnFF1+011m3bl37Mfbv36/evXurU6dOmjBhgk6cOKFOnTppwYIFeuaZZ9S3b1+NGzdOBw4cUI8ePZSVlZXv+nAdKOm0BRSnbdu2GUlm7dq1xpiLc0YqV65shg4d6rDd6NGjjSSzbNmybMe4NOcjrzk7V/6f7Zo1a4wks2rVKoftOnToYG666Sb7+wsXLmSbV3HixAkTGhpqBg4caF+W15ydK0chdu7caSSZRx55xGG7Z5991khyGDW6NDKxceNG+7KjR486jD7lRXmM7BhjzNChQ40kk5CQYIzJPrJz4sQJI8m88cYbebaT27yYSyMEzZs3zzbikdvIjiSHkZzU1FRTsWJF07BhQ/uy/I7s5FXblSM758+fNyEhIaZ+/foOc6w++eQTI8mMHj3avqxfv35Gkhk/frzDMRs2bGgaN26cra0rXRoZupwz34vVq1dfta1L7V3+/R86dKjx9/fPcxQqrzk7l9rfvHmzfdmlv1OlS5c2Bw8etC//97//zdwfZMPIDq4rCxYsUGhoqFq3bi3p4pyRnj17atGiRcrMzLRv99FHH+n2229X165dsx3DmTkfbdq00Q033KDFixfbl504cUJr165Vz5497cs8PT3t8yqysrJ0/PhxXbhwQU2aNNH3339f4HYl2efIDB8+3GH5iBEjJEmffvqpw/J69eqpRYsW9vfBwcGqXbu2fv31V6fav1y5cuUkSadOncpx/aV5JRs2bNCJEyecbufRRx/N9zygSpUqOfw5+/v76+GHH9aOHTuUnJzsdA1Xs23bNh09elRPPvmkwxyrjh07qk6dOtn+XCTp8ccfd3jfokULp/9cCvq9CA8PV1RUlFNtBQYG6syZM06Nll1Sr149NW3a1P7+0pWUbdq0UdWqVbMtd8X3FdZB2MF1IzMzU4sWLVLr1q2VmJio/fv3a//+/YqIiFBKSorWrVtn3/bAgQOqX7++y9r28vJS9+7d9fHHHys9PV2StGzZMmVkZDiEHUmaO3eubrvtNvn6+qpChQoKDg7Wp59+qtTUVKfaPnjwoDw8PHTzzTc7LA8LC1NgYKAOHjzosPzyH45LypcvX6jwccnp06clSX5+fjmu9/Hx0aRJk/T5558rNDRU99xzjyZPnlzg0BEeHp7vbW+++eZsAbZWrVqSVKSXQV/q99q1a2dbV6dOnWx/Lr6+vgoODnZYVpg/l4J+LwrSp1d68sknVatWLd13332qXLmyBg4cqNWrVxfoGFd+LwMCAiRJVapUyXG5K76vsA7CDq4bX375pZKSkrRo0SLVrFnT/urRo4ckFclVWZfr1auXTp06pc8//1yStGTJEtWpU0e33367fZv//ve/6t+/v2rUqKHZs2dr9erVWrt2rdq0aVPoOQj5HZHKbUTEGFOo9iXpp59+kqenZ54/nMOGDdPevXs1YcIE+fr6atSoUapbt6527NiR73ZKly5d6Fovl1vfXT4aWNSK6oq1/H4vCtOnISEh2rlzp1auXKl//OMfWr9+ve677z7169cv38fI7fMX5fcV1kHYwXVjwYIFCgkJ0dKlS7O9oqOjtXz5cvvltDVq1NBPP/2U5/EKejrrnnvuUcWKFbV48WL9+eef+vLLL7ON6nz44Ye66aabtGzZMj300EOKiopSZGSkzp0753Tb1apVU1ZWlvbt2+ewPCUlRSdPnlS1atUK9DmcdejQIX311Vdq2rRpriM7l9SoUUMjRozQF198oZ9++knnz5/Xm2++aV/vysvH9+/fn+2Hce/evZJkvwtw+fLlJSnbpOErRz8KUtulft+zZ0+2dXv27CnyP5fi/l54e3urU6dOmjlzpg4cOKDHHntM8+bN0/79+yW59s8UuBJhB9eFv//+W8uWLdP999+vBx54INtryJAhOnXqlFauXCnp4hVBCQkJWr58ebZjXfphLFu2rKTsP4C58fDw0AMPPKBVq1Zp/vz5unDhQrawc+n/Ui//8d2yZYvi4+MdtitTpky+2+7QoYOki1cKXW7q1KmSLs4RKWrHjx9XdHS0MjMz87w66ezZs9mCXY0aNeTn52c//Sdd7Pv89vvVHDlyxOHPOS0tTfPmzVODBg0UFhZmr0GSNm7caN/uzJkzmjt3brbj5be2Jk2aKCQkRLNmzXL4bJ9//rl2795d5H8uxfm9+Ouvvxzee3h46LbbbpMk+2cv6N8noCC8SroAoDisXLlSp06d0j/+8Y8c19911132Gwz27NlT//znP/Xhhx/qwQcf1MCBA9W4cWMdP35cK1eu1KxZs3T77berRo0aCgwM1KxZs+Tn56eyZcsqIiIiz1M0PXv21IwZMzRmzBjdeuutDpfWStL999+vZcuWqWvXrurYsaMSExM1a9Ys1atXzz7fRbp4SqFevXpavHixatWqpaCgINWvXz/HeUa33367+vXrp/fee08nT55Uy5Yt9d1332nu3Lnq0qWLfbK2q+zdu1f//e9/ZYxRWlqaEhIStHTpUp0+fVpTp05V+/bt89y3bdu26tGjh+rVqycvLy8tX75cKSkpDpdDN27cWO+++65effVV3XzzzQoJCVGbNm2cqrdWrVqKiYnR1q1bFRoaqjlz5iglJUWxsbH2bdq1a6eqVasqJiZG//znP+Xp6ak5c+YoODjYfkl8QWsrVaqUJk2apAEDBqhly5aKjo5WSkqK3nrrLVWvXl3PPPOMU58nv4rze/HII4/o+PHjatOmjSpXrqyDBw9qxowZatCggf3vQIMGDeTp6alJkyYpNTVVPj4+atOmjUJCQlxWB65jJXkpGFBcOnXqZHx9fc2ZM2dy3aZ///6mVKlS5s8//zTGGPPXX3+ZIUOG2G9uV7lyZdOvXz/7emOM+fjjj029evWMl5dXrjcVvFxWVpapUqWKkWReffXVHNe//vrrplq1asbHx8c0bNjQfPLJJzkeb/PmzaZx48bG29s7XzcVHDdunAkPDzelSpUyVapUyfPmcVdq2bJlvh7PoMtuCOfh4WECAwNNw4YNzdChQ83PP/+cbfsrLz3/888/zeDBg02dOnVM2bJlTUBAgImIiDBLlixx2C85Odl07NjR+Pn55XhTwa1bt2Zr62o3FbztttuMj4+PqVOnjlm6dGm2/bdv324iIiKMt7e3qVq1qpk6dWqOx8ytttxuKrh48WLTsGFD4+PjY4KCgvK8qeCVcrsk/kq57V/Y70Ve7V3+ff3www9Nu3btTEhIiL3/HnvsMZOUlOSw3/vvv29uuukm4+npmeNNBa+kHG51cOk7dbXbF+D6YjOGWVwAAMC6mLMDAAAsjbADAAAsjbADAAAsjbADAAAsjbADAAAsjbADAAAsjZsK6uLTpY8cOSI/Pz9uWQ4AwDXCGKNTp06pUqVK8vDIffyGsKOLt4u/8sm5AADg2nD48GFVrlw51/WEHcn+UMLDhw/L39+/hKsBAAD5kZaWpipVqlz14cKEHf3f03b9/f0JOwAAXGOuNgWFCcoAAMDSCDsAAMDSCDsAAMDSmLMDAJAkZWZmKiMjo6TLAOxKlSolT0/PQh+HsAMA1zljjJKTk3Xy5MmSLgXIJjAwUGFhYYW6Dx5hBwCuc5eCTkhIiMqUKcPNVeEWjDE6e/asjh49KkmqWLGi08ci7ADAdSwzM9MedCpUqFDS5QAOSpcuLUk6evSoQkJCnD6lxQRlALiOXZqjU6ZMmRKuBMjZpe9mYeaTEXYAAJy6gttyxXeTsAMAACyNsAMAwDXIZrNpxYoVRd5Oq1atNGzYsCJvpygxQRkAkKNOnYq3vVWrCrZ9//79dfLkyVx/8BMSEjRq1Ch9++23SktLU1hYmCIiIjRjxgzNnDlT48aNy/P4xhj1799fc+fO1WOPPaZZs2Y5rB88eLBmzpypfv36KS4u7qr11qlTR4mJiTp48KDCwsLy+zE1duxYrVixQjt37nRYnpSUpPLly+f7OFezYcMGtW7dWidOnFBgYKB9+bJly1SqVCmXtVMSGNkBAFjOsWPH1LZtWwUFBWnNmjXavXu3YmNjValSJZ05c0bPPvuskpKS7K/KlStr/PjxDssuqVKlihYtWqS///7bvuzcuXNauHChqlatmq96vvnmG/3999964IEHNHfuXJd8xrCwMPn4+LjkWHkJCgq66lPF3R1hBwBgOZs2bVJqaqr+85//qGHDhgoPD1fr1q01bdo0hYeHq1y5cgoLC7O/PD095efn57DskkaNGqlKlSpatmyZfdmyZctUtWpVNWzYMF/1zJ49W71799ZDDz2kOXPmZFv/+++/Kzo6WkFBQSpbtqyaNGmiLVu2KC4uTuPGjVNCQoJsNptsNpt9FOny01jNmjXT888/73DMY8eOqVSpUtq4caMkaf78+WrSpIn9c/bu3dt+D5vffvtNrVu3liSVL19eNptN/fv3l5T9NNaJEyf08MMPq3z58ipTpozuu+8+7du3z74+Li5OgYGBWrNmjerWraty5cqpffv2DgFyw4YNuvPOO1W2bFkFBgbq7rvv1sGDB/PVl84g7AAALCcsLEwXLlzQ8uXLZYwp9PEGDhyo2NhY+/s5c+ZowIAB+dr31KlTWrp0qfr27at7771Xqamp+vrrr+3rT58+rZYtW+qPP/7QypUrlZCQoOeee05ZWVnq2bOnRowYoVtuucU+4tSzZ89sbfTp00eLFi1y+KyLFy9WpUqV1KJFC0kXL91+5ZVXlJCQoBUrVui3336zB5oqVaroo48+kiTt2bNHSUlJeuutt3L8PP3799e2bdu0cuVKxcfHyxijDh06OFwafvbsWU2ZMkXz58/Xxo0bdejQIT377LOSpAsXLqhLly5q2bKlfvjhB8XHx2vQoEFFekUgc3YA5CqnORsFnVcBlIS77rpLL774onr37q3HH39cd955p9q0aaOHH35YoaGhBT5e3759NXLkSPvow6ZNm7Ro0SJt2LDhqvsuWrRINWvW1C233CJJ6tWrl2bPnm0PIQsXLtSxY8e0detWBQUFSZJuvvlm+/7lypWTl5dXnvN8evTooWHDhumbb75xOG50dLQ9RAwcONC+/U033aS3335bd9xxh06fPq1y5crZ2w4JCXGYs3O5ffv2aeXKldq0aZOaNWsmSVqwYIGqVKmiFStW6MEHH5R0MVjNmjVLNWrUkCQNGTJE48ePlySlpaUpNTVV999/v3193bp1r9qPhcHIDgDAkl577TUlJydr1qxZuuWWWzRr1izVqVNHP/74Y4GPFRwcrI4dOyouLk6xsbHq2LGjbrjhhnztO2fOHPXt29f+vm/fvlq6dKlOnTolSdq5c6caNmxoDxvOCA4OVrt27bRgwQJJUmJiouLj49WnTx/7Ntu3b1enTp1UtWpV+fn5qWXLlpKkQ4cO5bud3bt3y8vLSxEREfZlFSpUUO3atbV79277sjJlytiDjHTxUQ+XTpkFBQWpf//+ioqKUqdOnfTWW285nOIqCoQdAIBlVahQQQ8++KCmTJmi3bt3q1KlSpoyZYpTxxo4cKDi4uI0d+5ch1GSvOzatUvffvutnnvuOXl5ecnLy0t33XWXzp49q0WLFkn6v0ciFFafPn304YcfKiMjQwsXLtStt96qW2+9VZJ05swZRUVFyd/fXwsWLNDWrVu1fPlySdL58+dd0v7lrrx6y2azOZxii42NVXx8vJo1a6bFixerVq1a+vbbb11exyWEHQDAdcHb21s1atTQmTNnnNq/ffv2On/+vDIyMhQVFZWvfWbPnq177rlHCQkJ2rlzp/01fPhwzZ49W5J02223aefOnTp+/HiudWdmZl61rc6dO+vcuXNavXq1Fi5c6DCq88svv+ivv/7SxIkT1aJFC9WpU8c+0nJ5O5LybKtu3bq6cOGCtmzZYl/2119/ac+ePapXr95Va7xcw4YNNXLkSG3evFn169fXwoULC7R/QRB2AADXrNTUVIcQsXPnTh0+fFiffPKJ+vbtq08++UR79+7Vnj17NGXKFH322Wfq3LmzU215enpq9+7d2rVrV74eSJmRkaH58+crOjpa9evXd3g98sgj2rJli37++WdFR0crLCxMXbp00aZNm/Trr7/qo48+Unx8vCSpevXqSkxM1M6dO/Xnn38qPT09x/bKli2rLl26aNSoUdq9e7eio6Pt66pWrSpvb2/NmDFDv/76q1auXKlXXnnFYf9q1arJZrPpk08+0bFjx3T69OlsbdSsWVOdO3fWo48+qm+++UYJCQnq27evbrzxxnz3a2JiokaOHKn4+HgdPHhQX3zxhfbt21ek83YIOwCAa9aGDRvUsGFDh9e4ceNUr149lSlTRiNGjFCDBg101113acmSJfrPf/6jhx56yOn2/P395e/vn69tV65cqb/++ktdu3bNtq5u3bqqW7euZs+eLW9vb33xxRcKCQlRhw4ddOutt2rixIn2QNW9e3e1b99erVu3VnBwsD744INc2+zTp48SEhLUokULh3sABQcHKy4uTkuXLlW9evU0ceLEbKfzbrzxRo0bN04vvPCCQkNDNWTIkBzbiI2NVePGjXX//feradOmMsbos88+y/eNB8uUKaNffvlF3bt3V61atTRo0CANHjxYjz32WL72d4bNuOKavGtcWlqaAgIClJqamu8vMXA9sMrVWFd+jmvxMxSVc+fOKTExUeHh4fL19S3pcoBs8vqO5vf3m5EdAABgaYQdAABgaYQdAABgaYQdAABgaYQdAIBLnh8FFAVXfDcJOwBwHbt0ufDZs2dLuBIgZ5e+m/m9tD0nPAgUAK5jnp6eCgwMtN9Nt0yZMkX69Gkgv4wxOnv2rI4eParAwMB83cgxN4QdALjOXXqa9pWPDwDcQWBgYJ5PfM8Pwg4AXOdsNpsqVqyokJAQZWRklHQ5gF2pUqUKNaJzCWEHgKXkdNdn5I+np6dLflgAd8MEZQAAYGklGnY2btyoTp06qVKlSrLZbFqxYoXDemOMRo8erYoVK6p06dKKjIzUvn37HLY5fvy4+vTpI39/fwUGBiomJibHJ7UCAIDrU4mexjpz5oxuv/12DRw4UN26dcu2fvLkyXr77bc1d+5chYeHa9SoUYqKitKuXbvsDwPr06ePkpKStHbtWmVkZGjAgAEaNGiQFi5cWNwfB0AR4xQVAGeUaNi57777dN999+W4zhij6dOn6+WXX1bnzp0lSfPmzVNoaKhWrFihXr16affu3Vq9erW2bt2qJk2aSJJmzJihDh06aMqUKapUqVKxfRYAAOCe3HbOTmJiopKTkxUZGWlfFhAQoIiICMXHx0uS4uPjFRgYaA86khQZGSkPDw9t2bKl2GsGAADux22vxkpOTpYkhYaGOiwPDQ21r0tOTlZISIjDei8vLwUFBdm3yUl6errS09Pt79PS0lxVNgAAcDNuO7JTlCZMmKCAgAD7q0qVKiVdEgAAKCJuG3Yu3S0xJSXFYXlKSop9XVhYWLY7fl64cEHHjx/P826LI0eOVGpqqv11+PBhF1cPAADchduGnfDwcIWFhWndunX2ZWlpadqyZYuaNm0qSWratKlOnjyp7du327f58ssvlZWVpYiIiFyP7ePjI39/f4cXAACwphKds3P69Gnt37/f/j4xMVE7d+5UUFCQqlatqmHDhunVV19VzZo17ZeeV6pUSV26dJEk1a1bV+3bt9ejjz6qWbNmKSMjQ0OGDFGvXr24EgtwIzldMr5qVfHX4WpW/VyA1ZRo2Nm2bZtat25tfz98+HBJUr9+/RQXF6fnnntOZ86c0aBBg3Ty5Ek1b95cq1evtt9jR5IWLFigIUOGqG3btvLw8FD37t319ttvF/tnAQAA7qlEw06rVq1kjMl1vc1m0/jx4zV+/PhctwkKCuIGggAAIFduO2cHAADAFQg7AADA0gg7AADA0gg7AADA0tz2cREAcL3gEnagaDGyAwAALI2wAwAALI3TWACg7KeSOI0EWAcjOwAAwNIIOwAAwNI4jQWgRHDaCEBxYWQHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGmEHAABYGndQBuAWrryjslR0d1XOqa38bMNdnoFrEyM7AADA0gg7AADA0jiNBaBA8vMAz/ycJrqe0T9A8WJkBwAAWBphBwAAWBphBwAAWBphBwAAWBphBwAAWBphBwAAWBqXngPXKe4QDOB6wcgOAACwNMIOAACwNE5jAUA+cedj4NrEyA4AALA0wg4AALA0wg4AALA05uwAgAvl56nwAIoXIzsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSCDsAAMDSuKkgADsedAnAihjZAQAAlkbYAQAAlkbYAQAAlkbYAQAAlkbYAQAAlkbYAQAAlsal5wDcFpfCA3AFRnYAAICluXXYyczM1KhRoxQeHq7SpUurRo0aeuWVV2SMsW9jjNHo0aNVsWJFlS5dWpGRkdq3b18JVg0AANyJW4edSZMm6d1339W//vUv7d69W5MmTdLkyZM1Y8YM+zaTJ0/W22+/rVmzZmnLli0qW7asoqKidO7cuRKsHAAAuAu3nrOzefNmde7cWR07dpQkVa9eXR988IG+++47SRdHdaZPn66XX35ZnTt3liTNmzdPoaGhWrFihXr16lVitQMAAPfg1iM7zZo107p167R3715JUkJCgr755hvdd999kqTExEQlJycrMjLSvk9AQIAiIiIUHx9fIjUDAAD34tYjOy+88ILS0tJUp04deXp6KjMzU6+99pr69OkjSUpOTpYkhYaGOuwXGhpqX5eT9PR0paen29+npaUVQfUAAMAduPXIzpIlS7RgwQItXLhQ33//vebOnaspU6Zo7ty5hTruhAkTFBAQYH9VqVLFRRUDAAB349Zh55///KdeeOEF9erVS7feeqseeughPfPMM5owYYIkKSwsTJKUkpLisF9KSop9XU5Gjhyp1NRU++vw4cNF9yEAAECJcuuwc/bsWXl4OJbo6emprKwsSVJ4eLjCwsK0bt06+/q0tDRt2bJFTZs2zfW4Pj4+8vf3d3gBAABrcus5O506ddJrr72mqlWr6pZbbtGOHTs0depUDRw4UJJks9k0bNgwvfrqq6pZs6bCw8M1atQoVapUSV26dCnZ4gEAgFtw67AzY8YMjRo1Sk8++aSOHj2qSpUq6bHHHtPo0aPt2zz33HM6c+aMBg0apJMnT6p58+ZavXq1fH19S7ByAADgLmzm8tsRX6fS0tIUEBCg1NRUTmnhuuGq506tWlV0x7YCZ/snp/0AOMrv77dbj+wAcB0CCIDrlVtPUAYAACgswg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0wg4AALA0r5IuAIDrdepU0hUAgPtgZAcAAFgaYQcAAFgap7EAoAgV5SnFK4+9alXRtQVcyxjZAQAAlkbYAQAAlsZpLABwQ5yiAlyHkR0AAGBphB0AAGBpnMYCgGsAN4oEnMfIDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTCDgAAsDTuoAxcY3K6ky4PiQSA3DGyAwAALI2wAwAALM2psPPrr7+6ug4AAIAi4dScnZtvvlktW7ZUTEyMHnjgAfn6+rq6LgAFwBOxASB3To3sfP/997rttts0fPhwhYWF6bHHHtN3333n6toAAAAKzamw06BBA7311ls6cuSI5syZo6SkJDVv3lz169fX1KlTdezYMVfXCQAA4JRCTVD28vJSt27dtHTpUk2aNEn79+/Xs88+qypVqujhhx9WUlKSq+oEAABwSqHCzrZt2/Tkk0+qYsWKmjp1qp599lkdOHBAa9eu1ZEjR9S5c2dX1QkAAOAUpyYoT506VbGxsdqzZ486dOigefPmqUOHDvLwuJidwsPDFRcXp+rVq7uyVgAAgAJzKuy8++67GjhwoPr376+KFSvmuE1ISIhmz55dqOKA6921cJXVtVAjgOubU2Fn3759V93G29tb/fr1c+bwAAAALuPUnJ3Y2FgtXbo02/KlS5dq7ty5hS4KAADAVZwKOxMmTNANN9yQbXlISIhef/31QhcFAADgKk6FnUOHDik8PDzb8mrVqunQoUOFLgoAAMBVnAo7ISEh+uGHH7ItT0hIUIUKFQpdFAAAgKs4FXaio6P19NNPa/369crMzFRmZqa+/PJLDR06VL169XJ1jQAAAE5z6mqsV155Rb/99pvatm0rL6+Lh8jKytLDDz/MnB0AAOBWnAo73t7eWrx4sV555RUlJCSodOnSuvXWW1WtWjVX1wcAAFAohXpcRK1atfTggw/q/vvvL7Kg88cff6hv376qUKGCPVRt27bNvt4Yo9GjR6tixYoqXbq0IiMj83UfIAAAcH1wamQnMzNTcXFxWrdunY4ePaqsrCyH9V9++aVLijtx4oTuvvtutW7dWp9//rmCg4O1b98+lS9f3r7N5MmT9fbbb2vu3LkKDw/XqFGjFBUVpV27dsnX19cldQCAVeR0x+tVq4q/DqA4ORV2hg4dqri4OHXs2FH169eXzWZzdV2SpEmTJqlKlSqKjY21L7v8kndjjKZPn66XX37Z/tDRefPmKTQ0VCtWrGCyNAAAcC7sLFq0SEuWLFGHDh1cXY+DlStXKioqSg8++KC++uor3XjjjXryySf16KOPSpISExOVnJysyMhI+z4BAQGKiIhQfHx8rmEnPT1d6enp9vdpaWlF+jkAAEDJcWrOjre3t26++WZX15LNr7/+qnfffVc1a9bUmjVr9MQTT+jpp5+2P5IiOTlZkhQaGuqwX2hoqH1dTiZMmKCAgAD7q0qVKkX3IQAAQIlyKuyMGDFCb731lowxrq7HQVZWlho1aqTXX39dDRs21KBBg/Too49q1qxZhTruyJEjlZqaan8dPnzYRRUDAAB349RprG+++Ubr16/X559/rltuuUWlSpVyWL9s2TKXFFexYkXVq1fPYVndunX10UcfSZLCwsIkSSkpKapYsaJ9m5SUFDVo0CDX4/r4+MjHx8clNQIAAPfmVNgJDAxU165dXV1LNnfffbf27NnjsGzv3r32y9zDw8MVFhamdevW2cNNWlqatmzZoieeeKLI6wMAAO7PqbBz+dVRRemZZ55Rs2bN9Prrr6tHjx767rvv9N577+m9996TJNlsNg0bNkyvvvqqatasab/0vFKlSurSpUux1AgAANybU2FHki5cuKANGzbowIED6t27t/z8/HTkyBH5+/urXLlyLinujjvu0PLlyzVy5EiNHz9e4eHhmj59uvr06WPf5rnnntOZM2c0aNAgnTx5Us2bN9fq1au5xw4AAJAk2YwTs4wPHjyo9u3b69ChQ0pPT9fevXt10003aejQoUpPTy/0BOLilpaWpoCAAKWmpsrf37+kywHscroBHJCb/NwckJsKwkry+/vt1NVYQ4cOVZMmTXTixAmVLl3avrxr165at26dM4cEAAAoEk6dxvr666+1efNmeXt7OyyvXr26/vjjD5cUBgAA4ApOjexkZWUpMzMz2/Lff/9dfn5+hS4KAADAVZwa2WnXrp2mT5/ucFXU6dOnNWbMmCJ/hARgZczRQWEwHwfImVNh580331RUVJTq1aunc+fOqXfv3tq3b59uuOEGffDBB66uEQAAwGlOhZ3KlSsrISFBixYt0g8//KDTp08rJiZGffr0cZiwDAAAUNKcvs+Ol5eX+vbt68paAAAl4MrTX5z6gtU4FXbmzZuX5/qHH37YqWIAAABczamwM3ToUIf3GRkZOnv2rLy9vVWmTBnCDgAAcBtOXXp+4sQJh9fp06e1Z88eNW/enAnKAADArTgVdnJSs2ZNTZw4MduoDwAAQElyWdiRLk5aPnLkiCsPCQAAUChOzdlZuXKlw3tjjJKSkvSvf/1Ld999t0sKAwAAcAWnwk6XLl0c3ttsNgUHB6tNmzZ68803XVEXcE3jTrYA4D6cCjtZWVmurgMAAKBIuHTODgAAgLtxamRn+PDh+d526tSpzjQBAADgEk6FnR07dmjHjh3KyMhQ7dq1JUl79+6Vp6enGjVqZN/OZrO5pkoAAAAnORV2OnXqJD8/P82dO1fly5eXdPFGgwMGDFCLFi00YsQIlxYJAADgLKfm7Lz55puaMGGCPehIUvny5fXqq69yNRYAAHArToWdtLQ0HTt2LNvyY8eO6dSpU4UuCgAAwFWcCjtdu3bVgAEDtGzZMv3+++/6/fff9dFHHykmJkbdunVzdY0AAABOc2rOzqxZs/Tss8+qd+/eysjIuHggLy/FxMTojTfecGmBAAAAheFU2ClTpoxmzpypN954QwcOHJAk1ahRQ2XLlnVpcQAAAIVVqJsKJiUlKSkpSTVr1lTZsmVljHFVXQAAAC7hVNj566+/1LZtW9WqVUsdOnRQUlKSJCkmJobLzgEAgFtx6jTWM888o1KlSunQoUOqW7eufXnPnj01fPhwLj8HcnDlw0F5MCjcFQ+yhdU4FXa++OILrVmzRpUrV3ZYXrNmTR08eNAlhQEAALiCU6exzpw5ozJlymRbfvz4cfn4+BS6KAAAAFdxKuy0aNFC8+bNs7+32WzKysrS5MmT1bp1a5cVBwAAUFhOncaaPHmy2rZtq23btun8+fN67rnn9PPPP+v48ePatGmTq2sEADgpp/k3wPXGqZGd+vXra+/evWrevLk6d+6sM2fOqFu3btqxY4dq1Kjh6hoBAACcVuCRnYyMDLVv316zZs3SSy+9VBQ1AQAAuEyBR3ZKlSqlH374oShqAQAAcDmnTmP17dtXs2fPdnUtAAAALufUBOULFy5ozpw5+t///qfGjRtneybW1KlTXVIcAABAYRUo7Pz666+qXr26fvrpJzVq1EiStHfvXodtbDab66oDAFwzuEs43FWBwk7NmjWVlJSk9evXS7r4eIi3335boaGhRVIcAABAYRVozs6VTzX//PPPdebMGZcWBAAA4EpOTVC+5MrwAwAA4G4KFHZsNlu2OTnM0QEAAO6sQHN2jDHq37+//WGf586d0+OPP57taqxly5a5rkIAAIBCKFDY6devn8P7vn37urQYAAAAVytQ2ImNjS2qOgAAAIpEoSYoAwAAuDvCDgAAsDSnHhcBoPCuvNssAKBoMLIDAAAsjbADAAAsjdNYwFXwcEMAuLYxsgMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACztmgo7EydOlM1m07Bhw+zLzp07p8GDB6tChQoqV66cunfvrpSUlJIrEgAAuJVrJuxs3bpV//73v3Xbbbc5LH/mmWe0atUqLV26VF999ZWOHDmibt26lVCVAADA3VwTYef06dPq06eP3n//fZUvX96+PDU1VbNnz9bUqVPVpk0bNW7cWLGxsdq8ebO+/fbbEqwYAAC4i2si7AwePFgdO3ZUZGSkw/Lt27crIyPDYXmdOnVUtWpVxcfH53q89PR0paWlObwAAIA1uf0dlBctWqTvv/9eW7duzbYuOTlZ3t7eCgwMdFgeGhqq5OTkXI85YcIEjRs3ztWl4jrBAzwB4Nri1iM7hw8f1tChQ7VgwQL5+vq67LgjR45Uamqq/XX48GGXHRsAALgXtw4727dv19GjR9WoUSN5eXnJy8tLX331ld5++215eXkpNDRU58+f18mTJx32S0lJUVhYWK7H9fHxkb+/v8MLAABYk1ufxmrbtq1+/PFHh2UDBgxQnTp19Pzzz6tKlSoqVaqU1q1bp+7du0uS9uzZo0OHDqlp06YlUTIAAHAzbh12/Pz8VL9+fYdlZcuWVYUKFezLY2JiNHz4cAUFBcnf319PPfWUmjZtqrvuuqskSgYAAG7GrcNOfkybNk0eHh7q3r270tPTFRUVpZkzZ5Z0WQAAwE1cc2Fnw4YNDu99fX31zjvv6J133imZggAAgFtz6wnKAAAAhUXYAQAAlkbYAQAAlnbNzdkBABS/K+8cvmpVydQBOIORHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGncQRnXNe4KCzjnyr87gDtjZAcAAFgaYQcAAFgaYQcAAFgaYQcAAFgaYQcAAFgaV2MBAIoNV0CiJDCyAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALI2wAwAALM2rpAsAAOBynTo5vl+1qmTqgHUwsgMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNOyjDsrgLKwBAYmQHAABYHGEHAABYmluHnQkTJuiOO+6Qn5+fQkJC1KVLF+3Zs8dhm3Pnzmnw4MGqUKGCypUrp+7duyslJaWEKgYAAO7GrefsfPXVVxo8eLDuuOMOXbhwQS+++KLatWunXbt2qWzZspKkZ555Rp9++qmWLl2qgIAADRkyRN26ddOmTZtKuHpci66c5wMAuPa5ddhZvXq1w/u4uDiFhIRo+/btuueee5SamqrZs2dr4cKFatOmjSQpNjZWdevW1bfffqu77rqrJMoGAABuxK1PY10pNTVVkhQUFCRJ2r59uzIyMhQZGWnfpk6dOqpatari4+NzPU56errS0tIcXgAAwJrcemTncllZWRo2bJjuvvtu1a9fX5KUnJwsb29vBQYGOmwbGhqq5OTkXI81YcIEjRs3rijLBYDrXn5OC3PqGMXhmhnZGTx4sH766SctWrSo0McaOXKkUlNT7a/Dhw+7oEIAAOCOromRnSFDhuiTTz7Rxo0bVblyZfvysLAwnT9/XidPnnQY3UlJSVFYWFiux/Px8ZGPj09RlgwAANyEW4cdY4yeeuopLV++XBs2bFB4eLjD+saNG6tUqVJat26dunfvLknas2ePDh06pKZNm5ZEyQAAF8vPqS7ukI68uHXYGTx4sBYuXKiPP/5Yfn5+9nk4AQEBKl26tAICAhQTE6Phw4crKChI/v7+euqpp9S0aVOuxAIAAJLcPOy8++67kqRWrVo5LI+NjVX//v0lSdOmTZOHh4e6d++u9PR0RUVFaebMmcVcKQAAcFduHXaMMVfdxtfXV++8847eeeedYqgIAABca9w67AD5xeWrAIDcXDOXngMAADiDsAMAACyNsAMAACyNsAMAACyNsAMAACyNq7FwTXLm6iuu2AKA6xMjOwAAwNIIOwAAwNI4jQW3x+knAEBhMLIDAAAsjbADAAAsjbADAAAsjbADAAAsjbADAAAsjbADAAAsjUvP4Xa41ByAK1z5b8mqVSVTB0oeIzsAAMDSCDsAAMDSOI2FEsUpKwDFJad/bzi1dX1gZAcAAFgaYQcAAFgaYQcAAFgac3YAANctLk+/PjCyAwAALI2wAwAALI3TWAAAFACXsF97GNkBAACWRtgBAACWxmksFCvumAwAKG6M7AAAAEsj7AAAAEvjNBbyhRtvAXBnnCJHXhjZAQAAlkbYAQAAlkbYAQAAlsacHQAA8sB8oGsfIzsAAMDSCDsAAMDSOI2FIsPQLwDAHTCyAwAALI2wAwAALI3TWMgmP6efctqGuyoDANwRIzsAAMDSCDsAAMDSCDsAAMDSmLNjccX5tHIuNQdwvcrPv39X/vvL3Mfiw8gOAACwNMIOAACwNE5jAQDw/3E63poY2QEAAJZG2AEAAJZmM8aYki6ipKWlpSkgIECpqany9/cv6XIk5e8qKoZbAcD6iuoKLStcDZbf329GdgAAgKVZJuy88847ql69unx9fRUREaHvvvuupEsCAABuwBJXYy1evFjDhw/XrFmzFBERoenTpysqKkp79uxRSEhIidbGqSYAQGG4alqDq05ROVNPSZ8es8TIztSpU/Xoo49qwIABqlevnmbNmqUyZcpozpw5JV0aAAAoYdd82Dl//ry2b9+uyMhI+zIPDw9FRkYqPj6+BCsDAADu4Jo/jfXnn38qMzNToaGhDstDQ0P1yy+/5LhPenq60tPT7e9TU1MlXZzV7WoZGa45Tk6luerYAIBrh7O/B1ful9M++fkZvHK//NRTBD+v//+4Fw98tQvLr/mw44wJEyZo3Lhx2ZZXqVKlBKrJn4CAkq4AAOAOnP09yM9+zhy7qI5bEKdOnVJAHo1c82HnhhtukKenp1JSUhyWp6SkKCwsLMd9Ro4cqeHDh9vfZ2Vl6fjx46pQoYJsNptL60tLS1OVKlV0+PBht7mHj7uhj/JG/1wdfZQ3+ufq6KO8uWv/GGN06tQpVapUKc/trvmw4+3trcaNG2vdunXq0qWLpIvhZd26dRoyZEiO+/j4+MjHx8dhWWBgYJHW6e/v71ZfEHdEH+WN/rk6+ihv9M/V0Ud5c8f+yWtE55JrPuxI0vDhw9WvXz81adJEd955p6ZPn64zZ85owIABJV0aAAAoYZYIOz179tSxY8c0evRoJScnq0GDBlq9enW2ScsAAOD6Y4mwI0lDhgzJ9bRVSfLx8dGYMWOynTbD/6GP8kb/XB19lDf65+roo7xd6/3Dg0ABAIClXfM3FQQAAMgLYQcAAFgaYQcAAFgaYQcAAFgaYacIHD9+XH369JG/v78CAwMVExOj06dP57nPY489pho1aqh06dIKDg5W586dc322lxUUtI+OHz+up556SrVr11bp0qVVtWpVPf300/bnmlmNM9+h9957T61atZK/v79sNptOnjxZPMUWk3feeUfVq1eXr6+vIiIi9N133+W5/dKlS1WnTh35+vrq1ltv1WeffVZMlZaMgvTPzz//rO7du6t69eqy2WyaPn168RVaggrSR++//75atGih8uXLq3z58oqMjLzqd+5aV5D+WbZsmZo0aaLAwECVLVtWDRo00Pz584ux2oIh7BSBPn366Oeff9batWv1ySefaOPGjRo0aFCe+zRu3FixsbHavXu31qxZI2OM2rVrp8zMzGKqungVtI+OHDmiI0eOaMqUKfrpp58UFxen1atXKyYmphirLj7OfIfOnj2r9u3b68UXXyymKovP4sWLNXz4cI0ZM0bff/+9br/9dkVFReno0aM5br9582ZFR0crJiZGO3bsUJcuXdSlSxf99NNPxVx58Sho/5w9e1Y33XSTJk6cmOtjdaymoH20YcMGRUdHa/369YqPj1eVKlXUrl07/fHHH8VcefEoaP8EBQXppZdeUnx8vH744QcNGDBAAwYM0Jo1a4q58nwycKldu3YZSWbr1q32ZZ9//rmx2Wzmjz/+yPdxEhISjCSzf//+oiizRLmqj5YsWWK8vb1NRkZGUZRZYgrbP+vXrzeSzIkTJ4qwyuJ15513msGDB9vfZ2ZmmkqVKpkJEybkuH2PHj1Mx44dHZZFRESYxx57rEjrLCkF7Z/LVatWzUybNq0Iq3MPhekjY4y5cOGC8fPzM3Pnzi2qEktUYfvHGGMaNmxoXn755aIor9AY2XGx+Ph4BQYGqkmTJvZlkZGR8vDw0JYtW/J1jDNnzig2Nlbh4eFu/SR2Z7mijyQpNTVV/v7+8vKyzL0xJbmuf6zi/Pnz2r59uyIjI+3LPDw8FBkZqfj4+Bz3iY+Pd9hekqKionLd/lrmTP9cb1zRR2fPnlVGRoaCgoKKqswSU9j+McZo3bp12rNnj+65556iLNVphB0XS05OVkhIiMMyLy8vBQUFKTk5Oc99Z86cqXLlyqlcuXL6/PPPtXbtWnl7exdluSWiMH10yZ9//qlXXnnlqqd2rkWu6B8r+fPPP5WZmZnt8S+hoaG59kdycnKBtr+WOdM/1xtX9NHzzz+vSpUqZQvRVuBs/6SmpqpcuXLy9vZWx44dNWPGDN17771FXa5TCDv59MILL8hms+X5KuyE4j59+mjHjh366quvVKtWLfXo0UPnzp1z0ScoesXRR5KUlpamjh07ql69eho7dmzhCy8mxdU/AFxr4sSJWrRokZYvXy5fX9+SLsdt+Pn5aefOndq6datee+01DR8+XBs2bCjpsnJkrfH/IjRixAj1798/z21uuukmhYWFZZvQdeHCBR0/fvyqEwEDAgIUEBCgmjVr6q677lL58uW1fPlyRUdHF7b8YlEcfXTq1Cm1b99efn5+Wr58uUqVKlXYsotNcfSPFd1www3y9PRUSkqKw/KUlJRc+yMsLKxA21/LnOmf601h+mjKlCmaOHGi/ve//+m2224ryjJLjLP94+HhoZtvvlmS1KBBA+3evVsTJkxQq1atirJcpxB28ik4OFjBwcFX3a5p06Y6efKktm/frsaNG0uSvvzyS2VlZSkiIiLf7RljZIxRenq60zUXt6Luo7S0NEVFRcnHx0crV6685v4Pq7i/Q1bh7e2txo0ba926derSpYskKSsrS+vWrcv14b9NmzbVunXrNGzYMPuytWvXqmnTpsVQcfFypn+uN8720eTJk/Xaa69pzZo1DnPorMZV36GsrCz3/c0q4QnSltS+fXvTsGFDs2XLFvPNN9+YmjVrmujoaPv633//3dSuXdts2bLFGGPMgQMHzOuvv262bdtmDh48aDZt2mQ6depkgoKCTEpKSkl9jCJV0D5KTU01ERER5tZbbzX79+83SUlJ9teFCxdK6mMUmYL2jzHGJCUlmR07dpj333/fSDIbN240O3bsMH/99VdJfASXWrRokfHx8TFxcXFm165dZtCgQSYwMNAkJycbY4x56KGHzAsvvGDfftOmTcbLy8tMmTLF7N6924wZM8aUKlXK/PjjjyX1EYpUQfsnPT3d7Nixw+zYscNUrFjRPPvss2bHjh1m3759JfURilxB+2jixInG29vbfPjhhw7/3pw6daqkPkKRKmj/vP766+aLL74wBw4cMLt27TJTpkwxXl5e5v333y+pj5Anwk4R+Ouvv0x0dLQpV66c8ff3NwMGDHD4C5KYmGgkmfXr1xtjjPnjjz/MfffdZ0JCQkypUqVM5cqVTe/evc0vv/xSQp+g6BW0jy5dTp3TKzExsWQ+RBEqaP8YY8yYMWNy7J/Y2Nji/wBFYMaMGaZq1arG29vb3Hnnnebbb7+1r2vZsqXp16+fw/ZLliwxtWrVMt7e3uaWW24xn376aTFXXLwK0j+Xvj9Xvlq2bFn8hRejgvRRtWrVcuyjMWPGFH/hxaQg/fPSSy+Zm2++2fj6+pry5cubpk2bmkWLFpVA1fljM8aYYhtGAgAAKGZcjQUAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAMAACyNsAOgSMXFxSkwMLDI2/ntt99ks9m0c+fOIm+rsPr372+/LT+AokfYAeAgPj5enp6e6tixY4H3rV69uqZPn+6wrGfPntq7d6+Lqrsop7BQpUoVJSUlqX79+i5t63JPPfWU6tatm+O6Q4cOydPTUytXriyy9gE4h7ADwMHs2bP11FNPaePGjTpy5Eihj1e6dGmFhIS4oLK8eXp6KiwsTF5eRfd845iYGP3yyy/avHlztnVxcXEKCQlRhw4diqx9AM4h7ACwO336tBYvXqwnnnhCHTt2VFxcXLZtVq1apTvuuEO+vr664YYb1LVrV0lSq1atdPDgQT3zzDOy2Wyy2WySHE9j7d27VzabTb/88ovDMadNm6YaNWpIkjIzMxUTE6Pw8HCVLl1atWvX1ltvvWXfduzYsZo7d64+/vhjezsbNmzI8TTWV199pTvvvFM+Pj6qWLGiXnjhBV24cMG+vlWrVnr66af13HPPKSgoSGFhYRo7dmyu/dOgQQM1atRIc+bMcVhujFFcXJz69esnm82WZ/05yWlErEGDBg61nDx5Uo888oiCg4Pl7++vNm3aKCEhIc/jAriIsAPAbsmSJapTp45q166tvn37as6cObr88Xmffvqpunbtqg4dOmjHjh1at26d7rzzTknSsmXLVLlyZY0fP15JSUlKSkrKdvxatWqpSZMmWrBggcPyBQsWqHfv3pKkrKwsVa5cWUuXLtWuXbs0evRovfjii1qyZIkk6dlnn1WPHj3Uvn17ezvNmjXL1tYff/yhDh066I477lBCQoLeffddzZ49W6+++qrDdnPnzlXZsmW1ZcsWTZ48WePHj9fatWtz7aOYmBgtWbJEZ86csS/bsGGDEhMTNXDgwKvW76wHH3xQR48e1eeff67t27erUaNGatu2rY4fP16o4wLXhZJ9DikAd9KsWTMzffp0Y4wxGRkZ5oYbbnB4snrTpk1Nnz59ct2/WrVqZtq0aQ7LYmNjTUBAgP39tGnTTI0aNezv9+zZYySZ3bt353rcwYMHm+7du9vf9+vXz3Tu3Nlhm0tP8t6xY4cxxpgXX3zR1K5d22RlZdm3eeedd0y5cuVMZmamMebik5ybN2/ucJw77rjDPP/887nWcuLECePr6+vwNPmHHnoo23EKUn9O/Xb77bfbn7D99ddfG39/f3Pu3DmHbWrUqGH+/e9/59ougIsY2QEgSdqzZ4++++47RUdHS5K8vLzUs2dPzZ49277Nzp071bZt20K106tXL/3222/69ttvJV0c1WnUqJHq1Klj3+add95R48aNFRwcrHLlyum9997ToUOHCtTO7t271bRpU/vpNEm6++67dfr0af3+++/2ZbfddpvDfhUrVtTRo0dzPW5gYKC6detmP5WVlpamjz76SDExMS6t/3IJCQk6ffq0KlSooHLlytlfiYmJOnDggNPHBa4XRTeTD8A1Zfbs2bpw4YIqVapkX2aMkY+Pj/71r38pICBApUuXLnQ7YWFhatOmjRYuXKi77rpLCxcu1BNPPGFfv2jRIj377LN688031bRpU/n5+emNN97Qli1bCt12TkqVKuXw3mazKSsrK899YmJi1LZtW+3fv1/r16+Xp6enHnzwQafr9/DwcDhdKEkZGRn2/z59+rQqVqyoDRs2ZNu3OC7rB651hB0AunDhgubNm6c333xT7dq1c1jXpUsXffDBB3r88cd12223ad26dRowYECOx/H29lZmZuZV2+vTp4+ee+45RUdH69dff1WvXr3s6zZt2qRmzZrpySeftC+7cvQiP+3UrVtXH330kYwx9tGdTZs2yc/PT5UrV75qjXlp3bq1wsPDFRsbq/Xr16tXr14qW7Zsvuu/UnBwsMMcp7S0NCUmJtrfN2rUSMnJyfLy8lL16tULVTtwPeI0FgB98sknOnHihGJiYlS/fn2HV/fu3e2nssaMGaMPPvhAY8aM0e7du/Xjjz9q0qRJ9uNUr15dGzdu1B9//KE///wz1/a6deumU6dO6YknnlDr1q0dRpNq1qypbdu2ac2aNdq7d69GjRqlrVu3OuxfvXp1/fDDD9qzZ4/+/PNPh1GQS5588kkdPnxYTz31lH755Rd9/PHHGjNmjIYPHy4Pj8L902ez2TRw4EC9++67io+PdziFlZ/6r9SmTRvNnz9fX3/9tX788Uf169dPnp6e9vWRkZFq2rSpunTpoi+++EK//fabNm/erJdeeknbtm0r1GcBrgeEHQCaPXu2IiMjFRAQkG1d9+7dtW3bNv3www9q1aqVli5dqpUrV6pBgwZq06aNvvvuO/u248eP12+//aYaNWooODg41/b8/PzUqVMnJSQkqE+fPg7rHnvsMXXr1k09e/ZURESE/vrrL4dREkl69NFHVbt2bTVp0kTBwcHatGlTtjZuvPFGffbZZ/ruu+90++236/HHH1dMTIxefvnlgnZPjvr376/U1FTdcsstioiIKFD9Vxo5cqRatmyp+++/Xx07dlSXLl3sl+JLF8PVZ599pnvuuUcDBgxQrVq11KtXLx08eFChoaEu+TyAldnMlSeKAQAALISRHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGmEHQAAYGn/DyCGTFT598z6AAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Gradient for fc layer: torch.Size([3, 5, 5000])\n" + "ename": "KeyError", + "evalue": "0", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[45], line 3\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;66;03m# After training, you can analyze the activations and gradients\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;66;03m# Example: Plot the activations of the LSTM layer\u001b[39;00m\n\u001b[1;32m----> 3\u001b[0m layer_name, activation \u001b[38;5;241m=\u001b[39m \u001b[43mactivations\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m \u001b[38;5;66;03m# Assuming lstm activation\u001b[39;00m\n\u001b[0;32m 4\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mActivation for \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mlayer_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m layer: \u001b[39m\u001b[38;5;124m\"\u001b[39m, activation)\n\u001b[0;32m 6\u001b[0m \u001b[38;5;66;03m# Plotting histogram of activations of the LSTM layer\u001b[39;00m\n", + "\u001b[1;31mKeyError\u001b[0m: 0" ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAHHCAYAAACiOWx7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABXPklEQVR4nO3dfVyN9/8H8NcpnW6dWulWRYTKwjDVzNdMcxBztyF3qWZDDDE3323uduNuhrmdGdm+X0MbNhqWMBu5y12hhmUxnYp0Do1KfX5/7Heur6PkKqcb83o+Htdjzud6X5/rc33Kzst1rus6CiGEABERERGVy6SmB0BERET0JGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCJ6yg0fPhwNGzY0aFMoFJg5c2aNjMeYqus49u/fD4VCgf3790ttL730Ep599tkq3zcAXL58GQqFAjExMdWyvwd9/fXX8PHxgZmZGezs7Kp8f7t27UKrVq1gYWEBhUKBvLy8Kt8nEcDQRFRj0tPTMWbMGDRt2hRWVlawsrKCn58foqKicObMmZoeXpXbsGEDFi9eLLu+YcOGUCgUUCgUMDExgZ2dHfz9/fHmm2/iyJEjNTau6lQbx5aamorhw4ejcePG+OKLL7B69eoq3d+NGzfQv39/WFpaYvny5fj6669hbW1dpfsk0lPwu+eIqt+OHTswYMAA1KlTB4MHD0bLli1hYmKC1NRUbNmyBX/88QfS09PRoEGDKh/L8OHDsX//fly+fFlqu3v3LurUqYM6depU2X579OiBlJQUg/2Wp2HDhnjmmWcwceJEAMCtW7dw/vx5xMbGQqPRYMKECfj0008NtqnMcVR0XABQUlKCwsJCKJVKmJj8/W/Rl156CdevX0dKSorsfio7NiEECgoKYGZmBlNTU6PtT45Vq1Zh1KhRuHDhAry9vat8f7t27UK3bt0QHx+P4ODgKt8f0f2q7v+IRFSmS5cuYeDAgWjQoAESEhLg6upqsH7evHlYsWKF9Ob7MPn5+VX2L2wLC4sq6fdx1a9fH0OGDDFomzdvHgYNGoRFixahSZMmGDVqlLSuqo/j7t27UlCqyTlTKBQ1tv/s7GwAMOrHcn/99ResrKyqbX9EsgkiqlZvvvmmACAOHz4se5uwsDBhbW0tLl68KLp16yZsbGxEr169hBBCHDhwQLz22mvCw8NDKJVK4e7uLsaPHy/++uuvUv1s3bpVNG/eXJibm4vmzZuLLVu2iLCwMNGgQQODOgBixowZBm1Xr14V4eHhwsnJSSiVSuHn5ye+/PJLg5p9+/YJAGLTpk3iww8/FPXr1xfm5ubi5ZdfFhcuXJDqOnbsKAAYLA+O4UENGjQQISEhZa67deuWsLe3F/Xr1xclJSUPPQ6dTifGjRsnGjRoIJRKpXB0dBTBwcEiKSnpkePSH9s333wj3n33XeHm5iYUCoW4efOmtG7fvn0Gx9i8eXNx/PhxERQUJCwsLETDhg3FypUrDca+bt06AUCkp6eXOZf6PssbW3p6ugAg1q1bZ9BHQkKCePHFF4WVlZWwtbUVr776qjh37pxBzYwZMwQAceHCBREWFiZsbW2FSqUSw4cPF/n5+eX8RP7+mTw4pvvne/ny5cLPz08olUrh6uoqRo8eLW7evGnQx/3z1KFDB2FpaSnGjRtX5v7KmoOwsDBp/eHDh0W3bt2EnZ2dsLKyEv7+/mLx4sXlHgNRRfBME1E127FjB7y9vREQEFCh7e7duwe1Wo0XX3wRn3zyifQv8djYWPz1118YNWoUHBwccPToUSxduhRXr15FbGystP1PP/2Efv36wc/PD3PmzMGNGzcQHh4Od3f3R+47KysLgYGBUCgUGDNmDBwdHbFz505ERkZCp9Nh/PjxBvVz586FiYkJJk2aBK1Wi/nz52Pw4MHStUfvvvsutFotrl69ikWLFgEAbGxsKjQf97OxsUGfPn3w5Zdf4ty5c2jevHmZdSNHjsS3336LMWPGwM/PDzdu3MCvv/6K8+fPo3Xr1rLG9cEHH0CpVGLSpEkoKCiAUql86Lhu3ryJ7t27o3///ggNDcXmzZsxatQoKJVKREREVOgYKzpne/bsQbdu3dCoUSPMnDkTd+7cwdKlS9G+fXucOHGi1MX//fv3h5eXF+bMmYMTJ05gzZo1cHJywrx58x66j8WLF+Orr77C1q1bsXLlStjY2KBFixYAgJkzZ2LWrFkIDg7GqFGjkJaWhpUrV+LYsWM4ePAgzMzMpH5u3LiBbt26YeDAgRgyZAicnZ0fOgfNmjXD6tWrMXv2bHh5eaFx48YAgPj4ePTo0QOurq4YN24cXFxccP78eezYsQPjxo2TNcdEj1TTqY3oaaLVagUA0bt371Lrbt68KXJycqTl/jNFYWFhAoCYOnVqqe3KOqM0Z84coVAoxB9//CG1tWrVSri6uoq8vDyp7aeffirzLA8eOGMQGRkpXF1dxfXr1w3qBg4cKGxtbaUx6M+O+Pr6ioKCAqluyZIlAoBITk6W2kJCQh55dul+5Z1pEkKIRYsWCQDi+++/f+hx2NraiqioqHL387Bx6Y+tUaNGpeb8YWeaAIiFCxdKbQUFBaJVq1bCyclJFBYWCiHkn2kqb2xlnWnS7+fGjRtS2+nTp4WJiYkYNmyY1KY/0xQREWHQZ58+fYSDg0OpfT1Iv31OTo7Ulp2dLZRKpejSpYsoLi6W2pctWyYAiLVr10pt+nlatWrVI/clxP/m69ixY1LbvXv3hJeXl2jQoEGpM1n3n3kkely8e46oGul0OgBlnyF46aWX4OjoKC3Lly8vVXP/9Tp6lpaW0p/z8/Nx/fp1vPDCCxBC4OTJkwCAzMxMnDp1CmFhYbC1tZXqX3nlFfj5+ZU7ZiEEvvvuO/Ts2RNCCFy/fl1a1Go1tFotTpw4YbBNeHi4wRmYDh06AAB+//33cvf1OPRzeuvWrYfW2NnZ4ciRI7h27Vql9xMWFmYw5+WpU6cO3nrrLem1UqnEW2+9hezsbCQlJVV6DI+i/3kPHz4c9vb2UnuLFi3wyiuv4Mcffyy1zciRIw1ed+jQATdu3JB+Zytiz549KCwsxPjx4w2uzRsxYgRUKhXi4uIM6s3NzREeHl7h/eidPHkS6enpGD9+fKlrnRQKRaX7JXoQQxNRNapbty4A4Pbt26XWff7554iPj8d//vOfMretU6dOmR+lZWRkSG+ONjY2cHR0RMeOHQEAWq0WAPDHH38AAJo0aVJq+2bNmpU75pycHOTl5WH16tUGoc7R0VF6o9NfnKvn6elp8PqZZ54B8PfHVVVFP6f6OS7L/PnzkZKSAg8PD7Rr1w4zZ86scJDz8vKSXevm5lbqYv2mTZsCQIXuzqso/c+7rJ+tr68vrl+/jvz8fIN2Y/7MHrZ/pVKJRo0aSev16tevX+7HnI9y6dIlAKi252LR04vXNBFVI1tbW7i6upZ5G7r+GqeHvZmam5uXuqOuuLgYr7zyCnJzczFlyhT4+PjA2toaf/75J4YPH46SkpLHHrO+jyFDhiAsLKzMGv11LHoPu+1dVOETTvRzWt5t7/3790eHDh2wdetW/PTTT1iwYAHmzZuHLVu2oFu3brL2I/csk1wPOxNSXFxs1P08Sk38zPSMPadEVYWhiaiahYSEYM2aNTh69CjatWv3WH0lJyfjt99+w/r16zFs2DCpPT4+3qBO/7ynCxculOojLS2t3H04Ojqibt26KC4uNupzcYz5scnt27exdetWeHh4wNfXt9xaV1dXjB49GqNHj0Z2djZat26Njz76SApNxhzXtWvXSj0a4rfffgMA6UJs/RmdB59q/eDZmIqMTf/zLutnm5qainr16lXpAyHv33+jRo2k9sLCQqSnpxv9+Ur6i8FTUlL47CaqUvx4jqiaTZ48GVZWVoiIiEBWVlap9RX5l73+7MD92wghsGTJEoM6V1dXtGrVCuvXr5c+sgP+Dlfnzp175D769euH7777rswzZDk5ObLHez9ra2uDsVTWnTt3MHToUOTm5uLdd98t98zNg/tzcnKCm5sbCgoKjD4u4O87Hj///HPpdWFhIT7//HM4OjqiTZs2AP73hn/gwAGDsZb1ZG25Y7v/531/GEtJScFPP/2E7t27V/aQZAkODoZSqcRnn31m8Lv55ZdfQqvVIiQkxKj7a926Nby8vLB48eJS4bM6zpTR04NnmoiqWZMmTbBhwwaEhoaiWbNm0hPBhRBIT0/Hhg0bYGJiIutRAD4+PmjcuDEmTZqEP//8EyqVCt99912Z16HMmTMHISEhePHFFxEREYHc3FwsXboUzZs3L/Maq/vNnTsX+/btQ0BAAEaMGAE/Pz/k5ubixIkT2LNnD3Jzcys8D23atMGmTZsQHR2N559/HjY2NujZs2e52/z555/SNV+3b9/GuXPnpCeCT5w40eCi6wfdunUL7u7ueO2119CyZUvY2Nhgz549OHbsGBYuXPhY43oYNzc3zJs3D5cvX0bTpk2xadMmnDp1CqtXr5ZuuW/evDkCAwMxbdo05Obmwt7eHhs3bsS9e/dK9VeRsS1YsADdunVDUFAQIiMjpUcO2NraVvn38Tk6OmLatGmYNWsWunbtildffRVpaWlYsWIFnn/++VIPKH1cJiYmWLlyJXr27IlWrVohPDwcrq6uSE1NxdmzZ7F7926j7o+eYjV01x7RU+/ixYti1KhRwtvbW1hYWAhLS0vh4+MjRo4cKU6dOmVQq3+4ZVnOnTsngoODhY2NjahXr54YMWKEOH36dJkPO/zuu++Er6+vMDc3F35+fhV6uGVWVpaIiooSHh4ewszMTLi4uIjOnTuL1atXSzX62+RjY2MNti3rlvjbt2+LQYMGCTs7O9kPt8T/P9BQoVAIlUolmjdvLkaMGCGOHDlS5jb3H0dBQYF45513RMuWLUXdunWFtbW1aNmypVixYoXBNg8b18OO7f51j3q4ZYMGDcSyZctKbX/p0iURHBwszM3NhbOzs/j3v/8t4uPjS/X5sLE97OGWe/bsEe3btxeWlpZCpVKJnj17PvThlvc/MkCIhz8K4UEP216Ivx8x4OPjI8zMzISzs7MYNWrUQx9uKVdZjxzQ+/XXX8Urr7wi/XxbtGghli5dKrtvokfhd88RERERycBrmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgQ+3NJKSkhJcu3YNdevW5bdqExERPSGEELh16xbc3NxKfb/ngxiajOTatWvw8PCo6WEQERFRJVy5cuWR38TA0GQkdevWBfD3pKtUqhoeDREREcmh0+ng4eEhvY+Xh6HJSPQfyalUKoYmIiKiJ4ycS2t4ITgRERGRDAxNRERERDIwNBERERHJwGuaqllxcTGKiopqehj0FDAzM4OpqWlND4OI6B+DoamaCCGg0WiQl5dX00Ohp4idnR1cXFz47DAiIiNgaKom+sDk5OQEKysrvolRlRJC4K+//kJ2djYAwNXVtYZHRET05GNoqgbFxcVSYHJwcKjp4dBTwtLSEgCQnZ0NJycnflRHRPSYeCF4NdBfw2RlZVXDI6Gnjf53jtfRERE9PoamasSP5Ki68XeOiMh4GJqIiIiIZGBoIqqk/fv3Q6FQSHdExsTEwM7OrkbHREREVYcXgte0nj2rb1/bt1d4k+HDhyMvLw/btm0rc/3p06fx/vvv4/Dhw9DpdHBxcUFAQACWLl2KFStWYNasWeX2L4TA8OHDsX79erz11ltYtWqVwfqoqCisWLECYWFhiImJKbefNWvWYO3atTh79ixKSkrQoEEDBAcHY+zYsfD29q7ooVfYgAED0L17d6P2uX//fnTq1Ak3b95kICMiqmE800SVlpOTg86dO8Pe3h67d+/G+fPnsW7dOri5uSE/Px+TJk1CZmamtLi7u2P27NkGbXoeHh7YuHEj7ty5I7XdvXsXGzZsgKenZ7njEEJg0KBBePvtt9G9e3f89NNPOHfuHL788ktYWFjgww8/fOi2hYWFjz8R/8/S0hJOTk5G64+IiGoXhiaqtIMHD0Kr1WLNmjV47rnn4OXlhU6dOmHRokXw8vKCjY0NXFxcpMXU1BR169Y1aNNr3bo1PDw8sGXLFqlty5Yt8PT0xHPPPVfuODZt2oSNGzdi06ZNeP/99xEYGAhPT08EBgZi3rx5WLdunVQ7fPhw9O7dGx999BHc3NzQrFkzAMDXX3+Ntm3bSuMbNGiQ9IwjvR9//BFNmzaFpaUlOnXqhMuXLxusL+vjue+//x6tW7eGhYUFGjVqhFmzZuHevXvSeoVCgTVr1qBPnz6wsrJCkyZN8MMPPwAALl++jE6dOgEAnnnmGSgUCgwfPhwA8O2338Lf3x+WlpZwcHBAcHAw8vPzy50nIiJ6PAxNVGkuLi64d+8etm7dCiHEY/cXERFhEHDWrl2L8PDwR273zTffoFmzZnj11VfLXP/gHWQJCQlIS0tDfHw8duzYAeDvW/I/+OADnD59Gtu2bcPly5elgAIAV65cQd++fdGzZ0+cOnUKb7zxBqZOnVruuH755RcMGzYM48aNw7lz5/D5558jJiYGH330kUHdrFmz0L9/f5w5cwbdu3fH4MGDkZubCw8PD3z33XcAgLS0NGRmZmLJkiXIzMxEaGgoIiIicP78eezfvx99+/Y1ys+AiIgejtc0UaUFBgbi3//+NwYNGoSRI0eiXbt2ePnllzFs2DA4OztXuL8hQ4Zg2rRp+OOPPwD8fSZr48aN2L9/f7nb/fbbb9IZI73x48djzZo1AP7+KpGrV69K66ytrbFmzRoolUqpLSIiQvpzo0aN8Nlnn+H555/H7du3YWNjg5UrV6Jx48ZYuHAhAKBZs2ZITk7GvHnzHjquWbNmYerUqQgLC5P6/eCDDzB58mTMmDFDqhs+fDhCQ0MBAB9//DE+++wzHD16FF27doW9vT0AwMnJSTqLdenSJdy7dw99+/ZFgwYNAAD+/v7lzhER0ROnrGt+K3FtrjHxTBM9lo8++ggajQarVq1C8+bNsWrVKvj4+CA5ObnCfTk6OiIkJAQxMTFYt24dQkJCUK9evUqN691338WpU6cwffp03L5922Cdv7+/QWACgKSkJPTs2ROenp6oW7cuOnbsCADIyMgAAJw/fx4BAQEG2wQFBZU7htOnT2P27NmwsbGRlhEjRiAzMxN//fWXVNeiRQvpz9bW1lCpVKU+Grxfy5Yt0blzZ/j7++P111/HF198gZs3b5Y7FiIienwMTfTYHBwc8Prrr+OTTz7B+fPn4ebmhk8++aRSfUVERCAmJgbr1683OPtTniZNmiAtLc2gzdHREd7e3mVemG1tbW3wOj8/H2q1GiqVCv/9739x7NgxbN26FcDjXSh++/ZtzJo1C6dOnZKW5ORkXLhwARYWFlKdmZmZwXYKhQIlJSUP7dfU1BTx8fHYuXMn/Pz8sHTpUjRr1gzp6emVHisRET0aQxMZlVKpROPGjSt9UXLXrl1RWFiIoqIiqNVqWduEhoYiLS0N33//faX2mZqaihs3bmDu3Lno0KEDfHx8Sp3p8fX1xdGjRw3aDh8+XG6/rVu3RlpaGry9vUstJiby/urpz4gVFxcbtCsUCrRv3x6zZs3CyZMnoVQqpaBHRERVg9c00SNptVqcOnXKoM3BwQGnT5/Gxo0bMXDgQDRt2hRCCGzfvh0//vijwQXdFWFqaorz589Lf5Zj4MCB2LJlCwYOHIhp06ZBrVbD2dkZf/zxBzZt2vTIfjw9PaFUKrF06VKMHDkSKSkp+OCDDwxqRo4ciYULF+Kdd97BG2+8gaSkpHKfGwUA06dPR48ePeDp6YnXXnsNJiYmOH36NFJSUsp9DML9GjRoAIVCgR07dqB79+6wtLTE2bNnkZCQgC5dusDJyQlHjhxBTk4OfH19ZfVJRESVwzNN9Ej79+/Hc889Z7DMmjULfn5+sLKywsSJE9GqVSsEBgZi8+bNWLNmDYYOHVrp/alUKqhUKtn1CoUCmzZtwuLFi/Hjjz+ic+fOaNasGSIiIuDh4YFff/213O0dHR0RExOD2NhY+Pn5Ye7cuaU+XvT09MR3332Hbdu2oWXLlli1ahU+/vjjcvtVq9XYsWMHfvrpJzz//PMIDAzEokWLpIu35ahfv750QbmzszPGjBkDlUqFAwcOoHv37mjatCnee+89LFy4EN26dZPdLxERVZxC8D5lo9DpdLC1tYVWqy31hn/37l2kp6fDy8vL4FoWoqrG3z0iemJV091z5b1/P4hnmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaqhGvuafqxt85IiLjYWiqBvonPt//1RlE1UH/O/fgU8eJiKji+HDLamBqago7OzvpKdNWVlZQKBQ1PCr6JxNC4K+//kJ2djbs7OxkPyiUiIgejqGpmri4uABAuV/ESmRsdnZ20u8eERE9HoamaqJQKODq6gonJycUFRXV9HDoKWBmZsYzTERERsTQVM1MTU35RkZERPQE4oXgRERERDIwNBERERHJUKOhqWHDhlAoFKWWqKgoAH9/2WhUVBQcHBxgY2ODfv36ISsry6CPjIwMhISEwMrKCk5OTnjnnXdw7949g5r9+/ejdevWMDc3h7e3N2JiYkqNZfny5WjYsCEsLCwQEBCAo0ePVtlxExER0ZOnRkPTsWPHkJmZKS3x8fEAgNdffx0AMGHCBGzfvh2xsbH4+eefce3aNfTt21favri4GCEhISgsLMShQ4ewfv16xMTEYPr06VJNeno6QkJC0KlTJ5w6dQrjx4/HG2+8gd27d0s1mzZtQnR0NGbMmIETJ06gZcuWUKvVvNONiIiIJApRix4ZPH78eOzYsQMXLlyATqeDo6MjNmzYgNdeew0AkJqaCl9fXyQmJiIwMBA7d+5Ejx49cO3aNTg7OwMAVq1ahSlTpiAnJwdKpRJTpkxBXFwcUlJSpP0MHDgQeXl52LVrFwAgICAAzz//PJYtWwYAKCkpgYeHB8aOHYupU6fKGrtOp4OtrS20Wi1UKpUxp4WIiOjp07Nn6bbt242+m4q8f9eaa5oKCwvxn//8BxEREVAoFEhKSkJRURGCg4OlGh8fH3h6eiIxMREAkJiYCH9/fykwAYBarYZOp8PZs2elmvv70Nfo+ygsLERSUpJBjYmJCYKDg6WashQUFECn0xksRERE9M9Va0LTtm3bkJeXh+HDhwMANBoNlEol7OzsDOqcnZ2h0WikmvsDk369fl15NTqdDnfu3MH169dRXFxcZo2+j7LMmTMHtra20uLh4VHhYyYiIqInR60JTV9++SW6desGNze3mh6KLNOmTYNWq5WWK1eu1PSQiIiIqArViodb/vHHH9izZw+2bNkitbm4uKCwsBB5eXkGZ5uysrKkr4VwcXEpdZeb/u66+2sevOMuKysLKpUKlpaW0sMmy6op7+snzM3NYW5uXvGDJSIioidSrTjTtG7dOjg5OSEkJERqa9OmDczMzJCQkCC1paWlISMjA0FBQQCAoKAgJCcnG9zlFh8fD5VKBT8/P6nm/j70Nfo+lEol2rRpY1BTUlKChIQEqYaIiIioxs80lZSUYN26dQgLC0OdOv8bjq2tLSIjIxEdHQ17e3uoVCqMHTsWQUFBCAwMBAB06dIFfn5+GDp0KObPnw+NRoP33nsPUVFR0lmgkSNHYtmyZZg8eTIiIiKwd+9ebN68GXFxcdK+oqOjERYWhrZt26Jdu3ZYvHgx8vPzER4eXr2TQURERLVWjYemPXv2ICMjAxEREaXWLVq0CCYmJujXrx8KCgqgVquxYsUKab2pqSl27NiBUaNGISgoCNbW1ggLC8Ps2bOlGi8vL8TFxWHChAlYsmQJ3N3dsWbNGqjVaqlmwIAByMnJwfTp06HRaNCqVSvs2rWr1MXhRERE9PSqVc9pepLxOU1ERERGxOc0ERERET2ZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISIYaD01//vknhgwZAgcHB1haWsLf3x/Hjx+X1gshMH36dLi6usLS0hLBwcG4cOGCQR+5ubkYPHgwVCoV7OzsEBkZidu3bxvUnDlzBh06dICFhQU8PDwwf/78UmOJjY2Fj48PLCws4O/vjx9//LFqDpqIiIieODUamm7evIn27dvDzMwMO3fuxLlz57Bw4UI888wzUs38+fPx2WefYdWqVThy5Aisra2hVqtx9+5dqWbw4ME4e/Ys4uPjsWPHDhw4cABvvvmmtF6n06FLly5o0KABkpKSsGDBAsycOROrV6+Wag4dOoTQ0FBERkbi5MmT6N27N3r37o2UlJTqmQwiIiKq1RRCCFFTO586dSoOHjyIX375pcz1Qgi4ublh4sSJmDRpEgBAq9XC2dkZMTExGDhwIM6fPw8/Pz8cO3YMbdu2BQDs2rUL3bt3x9WrV+Hm5oaVK1fi3XffhUajgVKplPa9bds2pKamAgAGDBiA/Px87NixQ9p/YGAgWrVqhVWrVj3yWHQ6HWxtbaHVaqFSqR5rXoiIiJ56PXuWbtu+3ei7qcj7d42eafrhhx/Qtm1bvP7663BycsJzzz2HL774Qlqfnp4OjUaD4OBgqc3W1hYBAQFITEwEACQmJsLOzk4KTAAQHBwMExMTHDlyRKr517/+JQUmAFCr1UhLS8PNmzelmvv3o6/R7+dBBQUF0Ol0BgsRERH9c9VoaPr999+xcuVKNGnSBLt378aoUaPw9ttvY/369QAAjUYDAHB2djbYztnZWVqn0Wjg5ORksL5OnTqwt7c3qCmrj/v38bAa/foHzZkzB7a2ttLi4eFR4eMnIiKiJ0eNhqaSkhK0bt0aH3/8MZ577jm8+eabGDFihKyPw2ratGnToNVqpeXKlSs1PSQiIiKqQjUamlxdXeHn52fQ5uvri4yMDACAi4sLACArK8ugJisrS1rn4uKC7Oxsg/X37t1Dbm6uQU1Zfdy/j4fV6Nc/yNzcHCqVymAhIiKif64aDU3t27dHWlqaQdtvv/2GBg0aAAC8vLzg4uKChIQEab1Op8ORI0cQFBQEAAgKCkJeXh6SkpKkmr1796KkpAQBAQFSzYEDB1BUVCTVxMfHo1mzZtKdekFBQQb70dfo90NERERPtxoNTRMmTMDhw4fx8ccf4+LFi9iwYQNWr16NqKgoAIBCocD48ePx4Ycf4ocffkBycjKGDRsGNzc39O7dG8DfZ6a6du2KESNG4OjRozh48CDGjBmDgQMHws3NDQAwaNAgKJVKREZG4uzZs9i0aROWLFmC6OhoaSzjxo3Drl27sHDhQqSmpmLmzJk4fvw4xowZU+3zQkRERLWQqGHbt28Xzz77rDA3Nxc+Pj5i9erVButLSkrE+++/L5ydnYW5ubno3LmzSEtLM6i5ceOGCA0NFTY2NkKlUonw8HBx69Ytg5rTp0+LF198UZibm4v69euLuXPnlhrL5s2bRdOmTYVSqRTNmzcXcXFxso9Dq9UKAEKr1Vbg6ImIiKhMPXqUXqpARd6/a/Q5Tf8kfE4TERGREfE5TURERERPJoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmoiIiIhkYGgiIiIikoGhiYiIiEgGhiYiIiIiGRiaiIiIiGRgaCIiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKhRkPTzJkzoVAoDBYfHx9p/d27dxEVFQUHBwfY2NigX79+yMrKMugjIyMDISEhsLKygpOTE9555x3cu3fPoGb//v1o3bo1zM3N4e3tjZiYmFJjWb58ORo2bAgLCwsEBATg6NGjVXLMRERE9GSq8TNNzZs3R2ZmprT8+uuv0roJEyZg+/btiI2Nxc8//4xr166hb9++0vri4mKEhISgsLAQhw4dwvr16xETE4Pp06dLNenp6QgJCUGnTp1w6tQpjB8/Hm+88QZ2794t1WzatAnR0dGYMWMGTpw4gZYtW0KtViM7O7t6JoGIiIhqPYUQQtTUzmfOnIlt27bh1KlTpdZptVo4Ojpiw4YNeO211wAAqamp8PX1RWJiIgIDA7Fz50706NED165dg7OzMwBg1apVmDJlCnJycqBUKjFlyhTExcUhJSVF6nvgwIHIy8vDrl27AAABAQF4/vnnsWzZMgBASUkJPDw8MHbsWEydOlXWseh0Otja2kKr1UKlUj3OtBAREVHPnqXbtm83+m4q8v5d42eaLly4ADc3NzRq1AiDBw9GRkYGACApKQlFRUUIDg6Wan18fODp6YnExEQAQGJiIvz9/aXABABqtRo6nQ5nz56Vau7vQ1+j76OwsBBJSUkGNSYmJggODpZqylJQUACdTmewEBER0T9XjYamgIAAxMTEYNeuXVi5ciXS09PRoUMH3Lp1CxqNBkqlEnZ2dgbbODs7Q6PRAAA0Go1BYNKv168rr0an0+HOnTu4fv06iouLy6zR91GWOXPmwNbWVlo8PDwqNQdERET0ZKhTkzvv1q2b9OcWLVogICAADRo0wObNm2FpaVmDI3u0adOmITo6Wnqt0+kYnIiIiP7BavzjufvZ2dmhadOmuHjxIlxcXFBYWIi8vDyDmqysLLi4uAAAXFxcSt1Np3/9qBqVSgVLS0vUq1cPpqamZdbo+yiLubk5VCqVwUJERET/XLUqNN2+fRuXLl2Cq6sr2rRpAzMzMyQkJEjr09LSkJGRgaCgIABAUFAQkpOTDe5yi4+Ph0qlgp+fn1Rzfx/6Gn0fSqUSbdq0MagpKSlBQkKCVENERERUo6Fp0qRJ+Pnnn3H58mUcOnQIffr0gampKUJDQ2Fra4vIyEhER0dj3759SEpKQnh4OIKCghAYGAgA6NKlC/z8/DB06FCcPn0au3fvxnvvvYeoqCiYm5sDAEaOHInff/8dkydPRmpqKlasWIHNmzdjwoQJ0jiio6PxxRdfYP369Th//jxGjRqF/Px8hIeH18i8EBERUe1To9c0Xb16FaGhobhx4wYcHR3x4osv4vDhw3B0dAQALFq0CCYmJujXrx8KCgqgVquxYsUKaXtTU1Ps2LEDo0aNQlBQEKytrREWFobZs2dLNV5eXoiLi8OECROwZMkSuLu7Y82aNVCr1VLNgAEDkJOTg+nTp0Oj0aBVq1bYtWtXqYvDiYiI6OlVo89p+ifhc5qIiIiMiM9pIiIiInoyMTQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREclQqdD0+++/G3scRERERLVapUKTt7c3OnXqhP/85z+4e/euscdEREREVOtUKjSdOHECLVq0QHR0NFxcXPDWW2/h6NGjxh4bERERUa1RqdDUqlUrLFmyBNeuXcPatWuRmZmJF198Ec8++yw+/fRT5OTkGHucRERERDXqsS4Er1OnDvr27YvY2FjMmzcPFy9exKRJk+Dh4YFhw4YhMzPTWOMkIiIiqlGPFZqOHz+O0aNHw9XVFZ9++ikmTZqES5cuIT4+HteuXUOvXr2MNU4iIiKiGlWnMht9+umnWLduHdLS0tC9e3d89dVX6N69O0xM/s5gXl5eiImJQcOGDY05ViIiIqIaU6nQtHLlSkRERGD48OFwdXUts8bJyQlffvnlYw2OiIiIqLaoVGi6cOHCI2uUSiXCwsIq0z0RERFRrVOpa5rWrVuH2NjYUu2xsbFYv379Yw+KiIiIqLapVGiaM2cO6tWrV6rdyckJH3/88WMPioiIiKi2qVRoysjIgJeXV6n2Bg0aICMj47EHRURERFTbVCo0OTk54cyZM6XaT58+DQcHh8ceFBEREVFtU6nQFBoairfffhv79u1DcXExiouLsXfvXowbNw4DBw409hiJiIiIalyl7p774IMPcPnyZXTu3Bl16vzdRUlJCYYNG8ZrmoiIiOgfqVKhSalUYtOmTfjggw9w+vRpWFpawt/fHw0aNDD2+IiIiIhqhUqFJr2mTZuiadOmxhoLERERUa1VqdBUXFyMmJgYJCQkIDs7GyUlJQbr9+7da5TBEREREdUWlQpN48aNQ0xMDEJCQvDss89CoVAYe1xEREREtUqlQtPGjRuxefNmdO/e3djjISIiIqqVKvXIAaVSCW9vb2OPhYiIiKjWqlRomjhxIpYsWQIhhLHHQ0RERFQrVerjuV9//RX79u3Dzp070bx5c5iZmRms37Jli1EGR0RERFRbVCo02dnZoU+fPsYeCxEREVGtVanQtG7dOmOPg4iIiKhWq9Q1TQBw79497NmzB59//jlu3boFALh27Rpu375ttMERERER1RaVCk1//PEH/P390atXL0RFRSEnJwcAMG/ePEyaNKlSA5k7dy4UCgXGjx8vtd29exdRUVFwcHCAjY0N+vXrh6ysLIPtMjIyEBISAisrKzg5OeGdd97BvXv3DGr279+P1q1bw9zcHN7e3oiJiSm1/+XLl6Nhw4awsLBAQEAAjh49WqnjICIion+mSoWmcePGoW3btrh58yYsLS2l9j59+iAhIaHC/R07dgyff/45WrRoYdA+YcIEbN++HbGxsfj5559x7do19O3bV1pfXFyMkJAQFBYW4tChQ1i/fj1iYmIwffp0qSY9PR0hISHo1KkTTp06hfHjx+ONN97A7t27pZpNmzYhOjoaM2bMwIkTJ9CyZUuo1WpkZ2dX+FiIiIjoH0pUgr29vUhNTRVCCGFjYyMuXbokhBAiPT1dWFpaVqivW7duiSZNmoj4+HjRsWNHMW7cOCGEEHl5ecLMzEzExsZKtefPnxcARGJiohBCiB9//FGYmJgIjUYj1axcuVKoVCpRUFAghBBi8uTJonnz5gb7HDBggFCr1dLrdu3aiaioKOl1cXGxcHNzE3PmzJF9HFqtVgAQWq1W/sETERFR2Xr0KL1UgYq8f1fqTFNJSQmKi4tLtV+9ehV169atUF9RUVEICQlBcHCwQXtSUhKKiooM2n18fODp6YnExEQAQGJiIvz9/eHs7CzVqNVq6HQ6nD17Vqp5sG+1Wi31UVhYiKSkJIMaExMTBAcHSzVlKSgogE6nM1iIiIjon6tSoalLly5YvHix9FqhUOD27duYMWNGhb5aZePGjThx4gTmzJlTap1Go4FSqYSdnZ1Bu7OzMzQajVRzf2DSr9evK69Gp9Phzp07uH79OoqLi8us0fdRljlz5sDW1lZaPDw85B00ERERPZEqFZoWLlyIgwcPws/PD3fv3sWgQYPQsGFD/Pnnn5g3b56sPq5cuYJx48bhv//9LywsLCozjBo1bdo0aLVaably5UpND4mIiIiqUKWe0+Tu7o7Tp09j48aNOHPmDG7fvo3IyEgMHjzY4MLw8iQlJSE7OxutW7eW2oqLi3HgwAEsW7YMu3fvRmFhIfLy8gzONmVlZcHFxQUA4OLiUuouN/3ddffXPHjHXVZWFlQqFSwtLWFqagpTU9Mya/R9lMXc3Bzm5uayjpWIiIiefJUKTQBQp04dDBkypNI77ty5M5KTkw3awsPD4ePjgylTpsDDwwNmZmZISEhAv379AABpaWnIyMhAUFAQACAoKAgfffQRsrOz4eTkBACIj4+HSqWCn5+fVPPjjz8a7Cc+Pl7qQ6lUok2bNkhISEDv3r0B/H3NVkJCAsaMGVPp4yMiIqJ/lkqFpq+++qrc9cOGDXtkH3Xr1sWzzz5r0GZtbQ0HBwepPTIyEtHR0bC3t4dKpcLYsWMRFBSEwMBAAH9fW+Xn54ehQ4di/vz50Gg0eO+99xAVFSWdBRo5ciSWLVuGyZMnIyIiAnv37sXmzZsRFxcn7Tc6OhphYWFo27Yt2rVrh8WLFyM/Px/h4eEVmhciIiL656pUaBo3bpzB66KiIvz1119QKpWwsrKSFZrkWLRoEUxMTNCvXz8UFBRArVZjxYoV0npTU1Ps2LEDo0aNQlBQEKytrREWFobZs2dLNV5eXoiLi8OECROwZMkSuLu7Y82aNVCr1VLNgAEDkJOTg+nTp0Oj0aBVq1bYtWtXqYvDiYiI6OmlEEIIY3R04cIFjBo1Cu+8845BIHla6HQ62NraQqvVQqVS1fRwiIiInmw9e5Zu277d6LupyPt3pb977kFNmjTB3LlzS52FIiIiIvonMFpoAv6+OPzatWvG7JKIiIioVqjUNU0//PCDwWshBDIzM7Fs2TK0b9/eKAMjIiIiqk0qFZr0t+brKRQKODo64uWXX8bChQuNMS4iIiKiWqVSoamkpMTY4yAiIiKq1Yx6TRMRERHRP1WlzjRFR0fLrv30008rswsiIiKiWqVSoenkyZM4efIkioqK0KxZMwDAb7/9BlNTU4PvklMoFMYZJREREVENq1Ro6tmzJ+rWrYv169fjmWeeAQDcvHkT4eHh6NChAyZOnGjUQRIRERHVtEo9Ebx+/fr46aef0Lx5c4P2lJQUdOnS5al8VhOfCE5ERGRE/5Qngut0OuTk5JRqz8nJwa1btyrTJREREVGtVqnQ1KdPH4SHh2PLli24evUqrl69iu+++w6RkZHo27evscdIREREVOMqdU3TqlWrMGnSJAwaNAhFRUV/d1SnDiIjI7FgwQKjDpCIiIioNqjUNU16+fn5uHTpEgCgcePGsLa2NtrAnjS8pomIiMiI/inXNOllZmYiMzMTTZo0gbW1NR4jfxERERHVapUKTTdu3EDnzp3RtGlTdO/eHZmZmQCAyMhIPm6AiIiI/pEqFZomTJgAMzMzZGRkwMrKSmofMGAAdu3aZbTBEREREdUWlboQ/KeffsLu3bvh7u5u0N6kSRP88ccfRhkYERERUW1SqTNN+fn5BmeY9HJzc2Fubv7YgyIiIiKqbSoVmjp06ICvvvpKeq1QKFBSUoL58+ejU6dORhscERERUW1RqY/n5s+fj86dO+P48eMoLCzE5MmTcfbsWeTm5uLgwYPGHiMRERFRjavUmaZnn30Wv/32G1588UX06tUL+fn56Nu3L06ePInGjRsbe4xERERENa7CZ5qKiorQtWtXrFq1Cu+++25VjImIiIio1qnwmSYzMzOcOXOmKsZCREREVGtV6uO5IUOG4MsvvzT2WIiIiIhqrUpdCH7v3j2sXbsWe/bsQZs2bUp959ynn35qlMERERER1RYVCk2///47GjZsiJSUFLRu3RoA8NtvvxnUKBQK442OiIiIqJaoUGhq0qQJMjMzsW/fPgB/f23KZ599Bmdn5yoZHBEREVFtUaFrmoQQBq937tyJ/Px8ow6IiIiIqDaq1IXgeg+GKCIiIqJ/qgqFJoVCUeqaJV7DRERERE+DCl3TJITA8OHDpS/lvXv3LkaOHFnq7rktW7YYb4REREREtUCFQlNYWJjB6yFDhhh1MERERES1VYVC07p166pqHERERES12mNdCE5ERET0tGBoIiIiIpKhRkPTypUr0aJFC6hUKqhUKgQFBWHnzp3S+rt37yIqKgoODg6wsbFBv379kJWVZdBHRkYGQkJCYGVlBScnJ7zzzju4d++eQc3+/fvRunVrmJubw9vbGzExMaXGsnz5cjRs2BAWFhYICAjA0aNHq+SYiYiI6MlUo6HJ3d0dc+fORVJSEo4fP46XX34ZvXr1wtmzZwEAEyZMwPbt2xEbG4uff/4Z165dQ9++faXti4uLERISgsLCQhw6dAjr169HTEwMpk+fLtWkp6cjJCQEnTp1wqlTpzB+/Hi88cYb2L17t1SzadMmREdHY8aMGThx4gRatmwJtVqN7Ozs6psMIiIiqtUUopY9odLe3h4LFizAa6+9BkdHR2zYsAGvvfYaACA1NRW+vr5ITExEYGAgdu7ciR49euDatWvSV7msWrUKU6ZMQU5ODpRKJaZMmYK4uDikpKRI+xg4cCDy8vKwa9cuAEBAQACef/55LFu2DABQUlICDw8PjB07FlOnTpU1bp1OB1tbW2i1WqhUKmNOCRER0dOnZ8/Sbdu3G303FXn/rjXXNBUXF2Pjxo3Iz89HUFAQkpKSUFRUhODgYKnGx8cHnp6eSExMBAAkJibC39/f4Lvv1Go1dDqddLYqMTHRoA99jb6PwsJCJCUlGdSYmJggODhYqilLQUEBdDqdwUJERET/XDUempKTk2FjYwNzc3OMHDkSW7duhZ+fHzQaDZRKJezs7AzqnZ2dodFoAAAajabUlwXrXz+qRqfT4c6dO7h+/TqKi4vLrNH3UZY5c+bA1tZWWjw8PCp1/ERERPRkqPHQ1KxZM5w6dQpHjhzBqFGjEBYWhnPnztX0sB5p2rRp0Gq10nLlypWaHhIRERFVoQo93LIqKJVKeHt7AwDatGmDY8eOYcmSJRgwYAAKCwuRl5dncLYpKysLLi4uAAAXF5dSd7np7667v+bBO+6ysrKgUqlgaWkJU1NTmJqallmj76Ms5ubm0tfJEBER0T9fjZ9pelBJSQkKCgrQpk0bmJmZISEhQVqXlpaGjIwMBAUFAQCCgoKQnJxscJdbfHw8VCoV/Pz8pJr7+9DX6PtQKpVo06aNQU1JSQkSEhKkGiIiIqIaPdM0bdo0dOvWDZ6enrh16xY2bNiA/fv3Y/fu3bC1tUVkZCSio6Nhb28PlUqFsWPHIigoCIGBgQCALl26wM/PD0OHDsX8+fOh0Wjw3nvvISoqSjoLNHLkSCxbtgyTJ09GREQE9u7di82bNyMuLk4aR3R0NMLCwtC2bVu0a9cOixcvRn5+PsLDw2tkXoiIiKj2qdHQlJ2djWHDhiEzMxO2trZo0aIFdu/ejVdeeQUAsGjRIpiYmKBfv34oKCiAWq3GihUrpO1NTU2xY8cOjBo1CkFBQbC2tkZYWBhmz54t1Xh5eSEuLg4TJkzAkiVL4O7ujjVr1kCtVks1AwYMQE5ODqZPnw6NRoNWrVph165dpS4OJyIioqdXrXtO05OKz2kiIiIyIj6niYiIiOjJxNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDLUaGiaM2cOnn/+edStWxdOTk7o3bs30tLSDGru3r2LqKgoODg4wMbGBv369UNWVpZBTUZGBkJCQmBlZQUnJye88847uHfvnkHN/v370bp1a5ibm8Pb2xsxMTGlxrN8+XI0bNgQFhYWCAgIwNGjR41+zERERPRkqtHQ9PPPPyMqKgqHDx9GfHw8ioqK0KVLF+Tn50s1EyZMwPbt2xEbG4uff/4Z165dQ9++faX1xcXFCAkJQWFhIQ4dOoT169cjJiYG06dPl2rS09MREhKCTp064dSpUxg/fjzeeOMN7N69W6rZtGkToqOjMWPGDJw4cQItW7aEWq1GdnZ29UwGERER1WoKIYSo6UHo5eTkwMnJCT///DP+9a9/QavVwtHRERs2bMBrr70GAEhNTYWvry8SExMRGBiInTt3okePHrh27RqcnZ0BAKtWrcKUKVOQk5MDpVKJKVOmIC4uDikpKdK+Bg4ciLy8POzatQsAEBAQgOeffx7Lli0DAJSUlMDDwwNjx47F1KlTHzl2nU4HW1tbaLVaqFQqY08NERHR06Vnz9Jt27cbfTcVef+uVdc0abVaAIC9vT0AICkpCUVFRQgODpZqfHx84OnpicTERABAYmIi/P39pcAEAGq1GjqdDmfPnpVq7u9DX6Pvo7CwEElJSQY1JiYmCA4OlmoeVFBQAJ1OZ7AQERHRP1etCU0lJSUYP3482rdvj2effRYAoNFooFQqYWdnZ1Dr7OwMjUYj1dwfmPTr9evKq9HpdLhz5w6uX7+O4uLiMmv0fTxozpw5sLW1lRYPD4/KHTgRERE9EWpNaIqKikJKSgo2btxY00ORZdq0adBqtdJy5cqVmh4SERERVaE6NT0AABgzZgx27NiBAwcOwN3dXWp3cXFBYWEh8vLyDM42ZWVlwcXFRap58C43/d1199c8eMddVlYWVCoVLC0tYWpqClNT0zJr9H08yNzcHObm5pU7YCIiInri1OiZJiEExowZg61bt2Lv3r3w8vIyWN+mTRuYmZkhISFBaktLS0NGRgaCgoIAAEFBQUhOTja4yy0+Ph4qlQp+fn5Szf196Gv0fSiVSrRp08agpqSkBAkJCVINERERPd1q9ExTVFQUNmzYgO+//x5169aVrh+ytbWFpaUlbG1tERkZiejoaNjb20OlUmHs2LEICgpCYGAgAKBLly7w8/PD0KFDMX/+fGg0Grz33nuIioqSzgSNHDkSy5Ytw+TJkxEREYG9e/di8+bNiIuLk8YSHR2NsLAwtG3bFu3atcPixYuRn5+P8PDw6p8YIiIiqnVqNDStXLkSAPDSSy8ZtK9btw7Dhw8HACxatAgmJibo168fCgoKoFarsWLFCqnW1NQUO3bswKhRoxAUFARra2uEhYVh9uzZUo2Xlxfi4uIwYcIELFmyBO7u7lizZg3UarVUM2DAAOTk5GD69OnQaDRo1aoVdu3aVericCIiIno61arnND3J+JwmIiIiI+JzmoiIiIieTAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERyVCjoenAgQPo2bMn3NzcoFAosG3bNoP1QghMnz4drq6usLS0RHBwMC5cuGBQk5ubi8GDB0OlUsHOzg6RkZG4ffu2Qc2ZM2fQoUMHWFhYwMPDA/Pnzy81ltjYWPj4+MDCwgL+/v748ccfjX68RERE9OSq0dCUn5+Pli1bYvny5WWunz9/Pj777DOsWrUKR44cgbW1NdRqNe7evSvVDB48GGfPnkV8fDx27NiBAwcO4M0335TW63Q6dOnSBQ0aNEBSUhIWLFiAmTNnYvXq1VLNoUOHEBoaisjISJw8eRK9e/dG7969kZKSUnUHT0RERE8UhRBC1PQgAEChUGDr1q3o3bs3gL/PMrm5uWHixImYNGkSAECr1cLZ2RkxMTEYOHAgzp8/Dz8/Pxw7dgxt27YFAOzatQvdu3fH1atX4ebmhpUrV+Ldd9+FRqOBUqkEAEydOhXbtm1DamoqAGDAgAHIz8/Hjh07pPEEBgaiVatWWLVqlazx63Q62NraQqvVQqVSGWtaiIiInk49e5Zu277d6LupyPt3rb2mKT09HRqNBsHBwVKbra0tAgICkJiYCABITEyEnZ2dFJgAIDg4GCYmJjhy5IhU869//UsKTACgVquRlpaGmzdvSjX370dfo98PERERUZ2aHsDDaDQaAICzs7NBu7Ozs7ROo9HAycnJYH2dOnVgb29vUOPl5VWqD/26Z555BhqNptz9lKWgoAAFBQXSa51OV5HDIyIioidMrT3TVNvNmTMHtra20uLh4VHTQyIiIqIqVGtDk4uLCwAgKyvLoD0rK0ta5+LiguzsbIP19+7dQ25urkFNWX3cv4+H1ejXl2XatGnQarXScuXKlYoeIhERET1Bam1o8vLygouLCxISEqQ2nU6HI0eOICgoCAAQFBSEvLw8JCUlSTV79+5FSUkJAgICpJoDBw6gqKhIqomPj0ezZs3wzDPPSDX370dfo99PWczNzaFSqQwWIiIi+ueq0dB0+/ZtnDp1CqdOnQLw98Xfp06dQkZGBhQKBcaPH48PP/wQP/zwA5KTkzFs2DC4ublJd9j5+vqia9euGDFiBI4ePYqDBw9izJgxGDhwINzc3AAAgwYNglKpRGRkJM6ePYtNmzZhyZIliI6OlsYxbtw47Nq1CwsXLkRqaipmzpyJ48ePY8yYMdU9JURERFRbiRq0b98+AaDUEhYWJoQQoqSkRLz//vvC2dlZmJubi86dO4u0tDSDPm7cuCFCQ0OFjY2NUKlUIjw8XNy6dcug5vTp0+LFF18U5ubmon79+mLu3LmlxrJ582bRtGlToVQqRfPmzUVcXFyFjkWr1QoAQqvVVmwSiIiIqLQePUovVaAi79+15jlNTzo+p4mIiMiI+JwmIiIioicTQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERycDQ9IDly5ejYcOGsLCwQEBAAI4ePVrTQyIiIqJagKHpPps2bUJ0dDRmzJiBEydOoGXLllCr1cjOzq7poREREVENY2i6z6effooRI0YgPDwcfn5+WLVqFaysrLB27dqaHhoRERHVMIam/1dYWIikpCQEBwdLbSYmJggODkZiYmINjoyIiIhqgzo1PYDa4vr16yguLoazs7NBu7OzM1JTU0vVFxQUoKCgQHqt1WoBADqdrmoHSkRE9DQoKirdVgXvsfr3bSHEI2sZmippzpw5mDVrVql2Dw+PGhgNERHRU8DWtsq6vnXrFmwf0T9D0/+rV68eTE1NkZWVZdCelZUFFxeXUvXTpk1DdHS09LqkpAS5ublwcHCAQqGo8vHWdjqdDh4eHrhy5QpUKlVND6dGcS4McT4McT7+h3NhiPPxP1U5F0II3Lp1C25ubo+sZWj6f0qlEm3atEFCQgJ69+4N4O8glJCQgDFjxpSqNzc3h7m5uUGbnZ1dNYz0yaJSqZ76v+x6nAtDnA9DnI//4VwY4nz8T1XNxaPOMOkxNN0nOjoaYWFhaNu2Ldq1a4fFixcjPz8f4eHhNT00IiIiqmEMTfcZMGAAcnJyMH36dGg0GrRq1Qq7du0qdXE4ERERPX0Ymh4wZsyYMj+Oo4oxNzfHjBkzSn2E+TTiXBjifBjifPwP58IQ5+N/astcKISce+yIiIiInnJ8uCURERGRDAxNRERERDIwNBERERHJwNBEREREJANDE1VKbm4uBg8eDJVKBTs7O0RGRuL27dvlbnP37l1ERUXBwcEBNjY26NevX6knsANATEwMWrRoAQsLCzg5OSEqKqqqDsNoqmo+FApFqWXjxo1VeSiPrSp/NwDgxo0bcHd3h0KhQF5eXhUcgXFVxXzcuHEDXbt2hZubG8zNzeHh4YExY8Y8Ed99WRXzcfr0aYSGhsLDwwOWlpbw9fXFkiVLqvpQHltV/V15++230aZNG5ibm6NVq1ZVeASPZ/ny5WjYsCEsLCwQEBCAo0ePllsfGxsLHx8fWFhYwN/fHz/++KPBeiEEpk+fDldXV1haWiI4OBgXLlww7qAFUSV07dpVtGzZUhw+fFj88ssvwtvbW4SGhpa7zciRI4WHh4dISEgQx48fF4GBgeKFF14wqFm4cKFwc3MT//3vf8XFixfF6dOnxffff1+Vh2IUVTUfAMS6detEZmamtNy5c6cqD+WxVdVc6PXq1Ut069ZNABA3b96sgiMwrqqYj9zcXLFixQpx7NgxcfnyZbFnzx7RrFmzR/ZbG1TFfHz55Zfi7bffFvv37xeXLl0SX3/9tbC0tBRLly6t6sN5LFX1d2Xs2LFi2bJlYujQoaJly5ZVeASVt3HjRqFUKsXatWvF2bNnxYgRI4SdnZ3Iysoqs/7gwYPC1NRUzJ8/X5w7d0689957wszMTCQnJ0s1c+fOFba2tmLbtm3i9OnT4tVXXxVeXl5G/X8mQxNV2Llz5wQAcezYMalt586dQqFQiD///LPMbfLy8oSZmZmIjY2V2s6fPy8AiMTERCHE328ElpaWYs+ePVV7AEZWVfMhxN+haevWrVU2dmOryrkQQogVK1aIjh07ioSEhCciNFX1fNxvyZIlwt3d3XiDrwLVOR+jR48WnTp1Mt7gjaw65mLGjBm1NjS1a9dOREVFSa+Li4uFm5ubmDNnTpn1/fv3FyEhIQZtAQEB4q233hJCCFFSUiJcXFzEggULpPV5eXnC3NxcfPPNN0YbNz+eowpLTEyEnZ0d2rZtK7UFBwfDxMQER44cKXObpKQkFBUVITg4WGrz8fGBp6cnEhMTAQDx8fEoKSnBn3/+CV9fX7i7u6N///64cuVK1R7QY6qq+dCLiopCvXr10K5dO6xduxaiFj9arSrn4ty5c5g9eza++uormJg8Gf/rqurfDb1r165hy5Yt6Nixo3EPwMiqaz4AQKvVwt7e3niDN7LqnIvaprCwEElJSQbHYWJiguDg4IceR2JiokE9AKjVaqk+PT0dGo3GoMbW1hYBAQFGnZsn4/88VKtoNBo4OTkZtNWpUwf29vbQaDQP3UapVJb6UmNnZ2dpm99//x0lJSX4+OOPsXjxYnz77bfIzc3FK6+8gsLCwio5FmOoqvkAgNmzZ2Pz5s2Ij49Hv379MHr0aCxdutTox2AsVTUXBQUFCA0NxYIFC+Dp6VklY68KVfm7AQChoaGwsrJC/fr1oVKpsGbNGqOO39iqej70Dh06hE2bNuHNN980yrirQnXNRW10/fp1FBcXl/qKsvKOQ6PRlFuv/29F+qwMhiaSTJ06tcwLj+9fUlNTq2z/JSUlKCoqwmeffQa1Wo3AwEB88803uHDhAvbt21dl+32Ymp4PAHj//ffRvn17PPfcc5gyZQomT56MBQsWVOk+y1LTczFt2jT4+vpiyJAhVbaPiqjp+dBbtGgRTpw4ge+//x6XLl1CdHR0le+zLLVlPgAgJSUFvXr1wowZM9ClS5dq2ef9atNckPHxu+dIMnHiRAwfPrzcmkaNGsHFxQXZ2dkG7ffu3UNubi5cXFzK3M7FxQWFhYXIy8sz+FdSVlaWtI2rqysAwM/PT1rv6OiIevXqISMjoxJH9Hhqej7KEhAQgA8++AAFBQXV+h1MNT0Xe/fuRXJyMr799lsAkD6irFevHt59913MmjWrkkdWOTU9H/fXuri4wMfHB/b29ujQoQPef/996e9Sdakt83Hu3Dl07twZb775Jt57771KHcvjqi1zUZvVq1cPpqampe76K+84XFxcyq3X/zcrK8vg9z8rK8u4dxAa7eooemroL2A8fvy41LZ7925ZFzB+++23UltqaqrBBYxpaWkCgMGF4Ddu3BAmJiZi9+7dVXQ0j6+q5qMsH374oXjmmWeMN3gjq6q5uHjxokhOTpaWtWvXCgDi0KFDD73bpjaozt+Nn3/+WQAQ6enpRhu/sVXlfKSkpAgnJyfxzjvvVN0BGFF1/G7U9gvBx4wZI70uLi4W9evXL/dC8B49ehi0BQUFlboQ/JNPPpHWa7Vao18IztBEldK1a1fx3HPPiSNHjohff/1VNGnSxOBW2atXr4pmzZqJI0eOSG0jR44Unp6eYu/eveL48eMiKChIBAUFGfTbq1cv0bx5c3Hw4EGRnJwsevToIfz8/ERhYWG1HVtlVMV8/PDDD+KLL74QycnJ4sKFC2LFihXCyspKTJ8+vVqPraKq6nfjfvv27Xsi7p4TomrmIy4uTqxdu1YkJyeL9PR0sWPHDuHr6yvat29frcdWGVUxH8nJycLR0VEMGTLE4PEc2dnZ1XpsFVVVf1cuXLggTp48Kd566y3RtGlTcfLkSXHy5ElRUFBQbcf2KBs3bhTm5uYiJiZGnDt3Trz55pvCzs5OaDQaIYQQQ4cOFVOnTpXqDx48KOrUqSM++eQTcf78eTFjxowyHzlgZ2cnvv/+e3HmzBnRq1cvPnKAaocbN26I0NBQYWNjI1QqlQgPDxe3bt2S1qenpwsAYt++fVLbnTt3xOjRo8UzzzwjrKysRJ8+fURmZqZBv1qtVkRERAg7Ozthb28v+vTpIzIyMqrrsCqtKuZj586dolWrVsLGxkZYW1uLli1bilWrVoni4uLqPLQKq6rfjfs9SaGpKuZj7969IigoSNja2goLCwvRpEkTMWXKlKd2PmbMmCEAlFoaNGhQjUdWcVX1d6Vjx45lzkdtOwu5dOlS4enpKZRKpWjXrp04fPiwtK5jx44iLCzMoH7z5s2iadOmQqlUiubNm4u4uDiD9SUlJeL9998Xzs7OwtzcXHTu3FmkpaUZdcwKIWrx/ctEREREtQTvniMiIiKSgaGJiIiISAaGJiIiIiIZGJqIiIiIZGBoIiIiIpKBoYmIiIhIBoYmIiIiIhkYmojoH2n48OHo3bu39Pqll17C+PHja2w8j+tJHz/RPwFDExFVOY1Gg3HjxsHb2xsWFhZwdnZG+/btsXLlSvz111/VMoYtW7bggw8+MGqfDwazsvTs2RNdu3Ytc90vv/wChUKBM2fOGHVcRFQ16tT0AIjon+33339H+/btYWdnh48//hj+/v4wNzdHcnIyVq9ejfr16+PVV18tc9uioiKYmZkZZRz29vZG6aeiIiMj0a9fP1y9ehXu7u4G69atW4e2bduiRYsWNTI2IqoYnmkioio1evRo1KlTB8ePH0f//v3h6+uLRo0aoVevXoiLi0PPnj2lWoVCgZUrV+LVV1+FtbU1PvroIxQXFyMyMhJeXl6wtLREs2bNsGTJEoN9FBcXIzo6GnZ2dnBwcMDkyZPx4DdEPfjxVkFBASZNmoT69evD2toaAQEB2L9/v7Q+JiYGdnZ22L17N3x9fWFjY4OuXbsiMzMTADBz5kysX78e33//PRQKBRQKhcH2ej169ICjoyNiYmIM2m/fvo3Y2FhERkbixo0bCA0NRf369WFlZQV/f39888035c6rQqHAtm3bDNrs7OwM9nPlyhX0798fdnZ2sLe3R69evXD58uVy+yWih2NoIqIqc+PGDfz000+IioqCtbV1mTUKhcLg9cyZM9GnTx8kJycjIiICJSUlcHd3R2xsLM6dO4fp06fj3//+NzZv3ixts3DhQsTExGDt2rX49ddfkZubi61bt5Y7tjFjxiAxMREbN27EmTNn8Prrr6Nr1664cOGCVPPXX3/hk08+wddff40DBw4gIyMDkyZNAgBMmjQJ/fv3l4JUZmYmXnjhhVL7qVOnDoYNG4aYmBiDIBcbG4vi4mKEhobi7t27aNOmDeLi4pCSkoI333wTQ4cOxdGjRx89yQ9RVFQEtVqNunXr4pdffsHBgwel4FdYWFjpfomeakb9+l8iovscPnxYABBbtmwxaHdwcBDW1tbC2tpaTJ48WWoHIMaPH//IfqOiokS/fv2k166urmL+/PnS66KiIuHu7i569eoltXXs2FGMGzdOCCHEH3/8IUxNTcWff/5p0G/nzp3FtGnThBBCrFu3TgAQFy9elNYvX75cODs7S6/DwsIM9vEw58+fL/Vt9R06dBBDhgx56DYhISFi4sSJZY5fiL/nauvWrQbb2NrainXr1gkhhPj6669Fs2bNRElJibS+oKBAWFpait27dz9yzERUGq9pIqJqd/ToUZSUlGDw4MEoKCgwWNe2bdtS9cuXL8fatWuRkZGBO3fuoLCwEK1atQIAaLVaZGZmIiAgQKqvU6cO2rZtW+ojOr3k5GQUFxejadOmBu0FBQVwcHCQXltZWaFx48bSa1dXV2RnZ1f4eH18fPDCCy9g7dq1eOmll3Dx4kX88ssvmD17NoC/P178+OOPsXnzZvz5558oLCxEQUEBrKysKrwvvdOnT+PixYuoW7euQfvdu3dx6dKlSvdL9DRjaCKiKuPt7Q2FQoG0tDSD9kaNGgEALC0tS23z4Md4GzduxKRJk7Bw4UIEBQWhbt26WLBgAY4cOVLpcd2+fRumpqZISkqCqampwTobGxvpzw9ehK5QKB4axB4lMjISY8eOxfLly7Fu3To0btwYHTt2BAAsWLAAS5YsweLFi+Hv7w9ra2uMHz++3I/RyhpLUVGRwTG2adMG//3vf0tt6+joWKljIHra8ZomIqoyDg4OeOWVV7Bs2TLk5+dXqo+DBw/ihRdewOjRo/Hcc8/B29vb4EyJra0tXF1dDULUvXv3kJSU9NA+n3vuORQXFyM7Oxve3t4Gi4uLi+yxKZVKFBcXy6rt378/TExMsGHDBnz11VeIiIiQruc6ePAgevXqhSFDhqBly5Zo1KgRfvvtt3L7c3R0lC5KB4ALFy4YPL6hdevWuHDhApycnEodo62trexjJKL/YWgioiq1YsUK3Lt3D23btsWmTZtw/vx5pKWl4T//+Q9SU1NLnel5UJMmTXD8+HHs3r0bv/32G95//30cO3bMoGbcuHGYO3cutm3bhtTUVIwePRp5eXkP7bNp06YYPHgwhg0bhi1btiA9PR1Hjx7FnDlzEBcXJ/vYGjZsiDNnziAtLQ3Xr183ONPzIBsbGwwYMADTpk1DZmYmhg8fbnCM8fHxOHToEM6fP4+33noLWVlZ5e775ZdfxrJly3Dy5EkcP34cI0eONDgzNnjwYNSrVw+9evXCL7/8gvT0dOzfvx9vv/02rl69KvsYieh/GJqIqEo1btwYJ0+eRHBwMKZNm4aWLVuibdu2WLp0KSZNmvTIB06+9dZb6Nu3LwYMGICAgADcuHEDo0ePNqiZOHEihg4dirCwMOkjvD59+pTb77p16zBs2DBMnDgRzZo1Q+/evXHs2DF4enrKPrYRI0agWbNmaNu2LRwdHXHw4MFy6yMjI3Hz5k2o1Wq4ublJ7e+99x5at24NtVqNl156CS4uLo98aObChQvh4eGBDh06YNCgQZg0aZLBNVBWVlY4cOAAPD090bdvX/j6+iIyMhJ3796FSqWSfYxE9D8KUdkP6ImIiIieIjzTRERERCQDQxMRERGRDAxNRERERDIwNBERERHJwNBEREREJANDExEREZEMDE1EREREMjA0EREREcnA0EREREQkA0MTERERkQwMTUREREQyMDQRERERyfB/iISOwAcaIQ0AAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" } ], "source": [ From a9bad6177dbfa26f90e5406e8dcb5c01d068550c Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 11 Mar 2025 14:56:09 +0100 Subject: [PATCH 046/125] style: remove old model architecture section --- .../pytorch_text_model_debugging.ipynb | 139 +++++++----------- 1 file changed, 55 insertions(+), 84 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index e17f807..798c936 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -252,87 +252,7 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Define the simple LLM model with LSTM\n", - "class SimpleLLM(nn.Module):\n", - " def __init__(self, vocab_size, embed_size, hidden_size, num_layers):\n", - " super(SimpleLLM, self).__init__()\n", - " self.embedding = nn.Embedding(vocab_size, embed_size)\n", - " self.lstm = nn.LSTM(embed_size, 512, num_layers = num_layers, dropout = 0.3, batch_first=True)\n", - " self.fc1 = nn.Linear(512, hidden_size)\n", - " self.fc2 = nn.Linear(hidden_size, vocab_size)\n", - "\n", - " # Initialize activations dictionary to store layer activations\n", - " self.activations = {}\n", - "\n", - " ## Registering hooks to track activations\n", - " self.hooks = []\n", - " self.hooks.append(self.lstm.register_forward_hook(self.save_activation(\"lstm\")))\n", - " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc1\")))\n", - " self.hooks.append(self.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", - "\n", - " ## Registering hooks to track gradients\n", - " #self.hooks.append(self.lstm.register_full_backward_hook(self.save_gradient(\"lstm\")))\n", - " #self.hooks.append(self.fc1.register_full_backward_hook(self.save_gradient(\"fc1\")))\n", - " #self.hooks.append(self.fc2.register_full_backward_hook(self.save_gradient(\"fc2\")))\n", - " \n", - " def forward(self, x):\n", - " x = self.embedding(x)\n", - " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", - " out = self.fc1(lstm_out) # Use the last output from the LSTM\n", - " out = self.fc2(out)\n", - " return out\n", - " \n", - " # Function to save activations\n", - " def save_activation(self, name):\n", - " def hook(model, input, output):\n", - " self.activations[name] = output\n", - " return hook\n", - " \n", - " def get_activations(self):\n", - " return self.activations\n", - "\n", - " def clear_activations(self):\n", - " self.activations = {}\n", - "\n", - " # Function to save gradients TODO: check where gradients should be calculated\n", - " def save_gradient(self, name):\n", - " def hook(module, grad_input, grad_output):\n", - " self.gradients[name] = grad_output[0] # Save gradient output in the dictionary\n", - " return hook\n", - " \n", - " def get_gradients(self):\n", - " return self.gradients\n", - " \n", - " def clear_gradients(self):\n", - " self.gradients = {}\n", - "\n", - "# Function to evaluate the model after each epoch/step\n", - "def evaluate(model, val_dataloader, criterion, device, vocab_size):\n", - " model.eval() # Set the model to evaluation mode\n", - " total_loss = 0\n", - " with torch.no_grad(): # Disable gradient calculation for validation\n", - " for batch in val_dataloader:\n", - " input_ids = batch['input_ids'].to(device)\n", - " labels = batch['labels'].to(device)\n", - "\n", - " # Forward pass for validation\n", - " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", - " \n", - " # Calculate the loss\n", - " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", - " total_loss += loss.item()\n", - "\n", - " avg_val_loss = total_loss / len(val_dataloader)\n", - " return avg_val_loss" - ] - }, - { - "cell_type": "code", - "execution_count": 25, + "execution_count": 46, "metadata": {}, "outputs": [], "source": [ @@ -427,7 +347,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 47, "metadata": {}, "outputs": [], "source": [ @@ -460,7 +380,58 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 1 / 10241, Loss: 11.760147094726562\n", + "Step 2 / 10241, Loss: 11.744263648986816\n", + "Step 3 / 10241, Loss: 11.720343589782715\n", + "Step 4 / 10241, Loss: 11.64067268371582\n", + "Step 5 / 10241, Loss: 11.333736419677734\n", + "Step 5, Val_loss: 10.690058871212168\n", + "Activations for embedding: -0.0025795602705329657\n", + "Activations for lstm: -0.0010381790343672037\n", + "Activations for fc1: -0.07045605778694153\n", + "Activations for fc2: -0.679652988910675\n", + "Activations for : -0.679652988910675\n", + "Gradients for fc2: 6.027510937207733e-17\n", + "Gradients for fc1: 3.636175804899722e-08\n", + "Gradients for lstm: -3.4538189197519387e-08\n", + "Step 6 / 10241, Loss: 10.451458930969238\n", + "Step 7 / 10241, Loss: 10.109658241271973\n", + "Step 8 / 10241, Loss: 10.559635162353516\n", + "Step 9 / 10241, Loss: 9.650898933410645\n", + "Step 10 / 10241, Loss: 9.617280006408691\n", + "Step 10, Val_loss: 9.465246697776339\n", + "Activations for embedding: -0.003705302719026804\n", + "Activations for lstm: -0.008110555820167065\n", + "Activations for fc1: -0.14376381039619446\n", + "Activations for fc2: -2.851902961730957\n", + "Activations for : -2.851902961730957\n", + "Gradients for fc2: -2.2691806192661813e-16\n", + "Gradients for fc1: -5.234873583503941e-07\n", + "Gradients for lstm: 5.951231685230596e-08\n", + "Step 11 / 10241, Loss: 9.610464096069336\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[48], line 35\u001b[0m\n\u001b[0;32m 32\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n\u001b[0;32m 34\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[1;32m---> 35\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 36\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n\u001b[0;32m 39\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\_tensor.py:626\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 616\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 617\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 618\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 619\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 624\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 625\u001b[0m )\n\u001b[1;32m--> 626\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 628\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\__init__.py:347\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 342\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 344\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 345\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 346\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 347\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 348\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 349\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 350\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 351\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 352\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 353\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 354\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 355\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\graph.py:823\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[1;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[0;32m 821\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[0;32m 822\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 823\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 824\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[0;32m 826\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 827\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\function.py:292\u001b[0m, in \u001b[0;36mBackwardCFunction.apply\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mBackwardCFunction\u001b[39;00m(_C\u001b[38;5;241m.\u001b[39m_FunctionBase, FunctionCtx, _HookMixin):\n\u001b[0;32m 288\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 289\u001b[0m \u001b[38;5;124;03m This class is used for internal autograd work. Do not use.\u001b[39;00m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 292\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs):\n\u001b[0;32m 293\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 294\u001b[0m \u001b[38;5;124;03m Apply method used when executing this Node during the backward\u001b[39;00m\n\u001b[0;32m 295\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 296\u001b[0m \u001b[38;5;66;03m# _forward_cls is defined by derived class\u001b[39;00m\n\u001b[0;32m 297\u001b[0m \u001b[38;5;66;03m# The user should define either backward or vjp but never both.\u001b[39;00m\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ "debug_metrics = {}\n", "\n", @@ -547,7 +518,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 49, "metadata": {}, "outputs": [ { From 32d46f15f03e40c174ce00cb57fcf6267597d9af Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 11 Mar 2025 15:09:33 +0100 Subject: [PATCH 047/125] refactor: change attribute names for better readability --- .../pytorch/pytorch_text_model_debugging.ipynb | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 798c936..113801d 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -252,7 +252,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -481,24 +481,24 @@ " # Track activations\n", " activations = hook_manager.get_activations()\n", " for layer, activation in activations.items():\n", - " print(f\"Activations for {layer}: {activation[0].mean().item()}\")\n", - " debug_metrics[f\"layers/layer_{layer}/activation_mean\"] = activation[0].mean().item()\n", - " debug_metrics[f\"layers/layer_{layer}/activation_std\"] = activation[0].std().item()\n", + " if layer is not None:\n", + " debug_metrics[f\"debug/activation/{layer}_mean\"] = activation[0].mean().item()\n", + " debug_metrics[f\"debug/activation/{layer}_std\"] = activation[0].std().item()\n", "\n", " # Track gradients with hooks\n", " gradients = hook_manager.get_gradients()\n", " for layer, gradient in gradients.items():\n", - " debug_metrics[f\"grad_hooks/{layer}\"] = gradient.mean().item()\n", + " debug_metrics[f\"debug/gradient/{layer}_mean\"] = gradient.mean().item()\n", " print(f\"Gradients for {layer}: {gradient.mean().item()}\") # You can replace to use mean(), sum(), max() or min()\n", " # simplified_gradient = gradient.mean(dim=(0, 1))\n", " # print(f\"Summed Gradient for {layer}: {simplified_gradient}\")\n", "\n", " # Track gradients per layer at each epoch\n", - " for name, param in model.named_parameters():\n", + " for layer, param in model.named_parameters():\n", " if param is not None:\n", - " debug_metrics[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", - " debug_metrics[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", - " debug_metrics[f\"layers/layer_{name}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", + " debug_metrics[f\"debug/parameters/{layer}_std\"] = param.grad.std().item()\n", + " debug_metrics[f\"debug/parameters/{layer}_mean\"] = param.grad.mean().item()\n", + " debug_metrics[f\"debug/parameters/{layer}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", "\n", " # Output loss for this epoch\n", " run.log_metrics(\n", From 32a43b1f24caf9f08304b07093320c76000a7948 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 12 Mar 2025 13:05:08 +0100 Subject: [PATCH 048/125] style: update sections and model architecture --- .../pytorch_text_model_debugging.ipynb | 242 +++++++++--------- 1 file changed, 121 insertions(+), 121 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 113801d..75881c7 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -10,14 +10,26 @@ "\n", "### Introduction\n", "\n", - "See how Neptune Scale can be used for pre-training models like foundation models by tracking hundreds of metrics. This example is designed to be used as a code recipe for you to re-use sections with your own code to edit to adapt to your own model training needs. \n", + "See how Neptune Scale can be used for pre-training models like foundation models by tracking hundreds of metrics. This example is designed to be used as a code recipe for you to re-use sections with your own code to edit or adapt to your own model training needs. \n", "\n", "This guide will show you how to:\n", "- Initialize the Neptune Run object and log configuration parameters\n", "- Log standard loss and accuracy metrics to Neptune\n", - "- Log debugging metrics during model training such as;\n", - " * Activations per layer\n", - " * Gradients (mean and std) per layer" + "- Log _**debugging metrics**_ per layer during model training such as;\n", + " * Activations\n", + " * Gradients\n", + " * Parameters (Weights and Biases)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Before you start\n", + "\n", + " 1. Create a Neptune Scale account. [Register →](https://neptune.ai/early-access)\n", + " 2. Create a Neptune project that you will use for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune Scale docs.\n", + " 3. Install and configure Neptune Scale for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune Scale docs." ] }, { @@ -39,7 +51,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [ { @@ -83,8 +95,8 @@ "\n", "params = {\n", " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 3,\n", - " \"learning_rate\": 0.001,\n", + " \"batch_size\": 8,\n", + " \"learning_rate\": 0.01,\n", " \"epochs\": 5, \n", " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", " \"input_features\": 256,\n", @@ -100,38 +112,20 @@ "metadata": {}, "source": [ "## Download or use next token prediction dataset\n", - "The dataset used in this example is taken from [HuggingFace](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset). In this example, you can increase the size of the dataset to test the logging capabilities of Neptune, but note that increasing the dataset size will increase the time taken for the full dataset to download. The current setup only downloads the first 10 parquet files from the Hugging Face public dataset. " + "The dataset used in this example is taken from [HuggingFace](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset). In this example, you can increase the size of the dataset to test the logging capabilities of Neptune, but note that increasing the dataset size will increase the time taken for the full dataset to download. The current setup only downloads the first parquet file from the Hugging Face public dataset. The validation dataset is also reduced to reduce the training loop execution time - you can increase the validation size by changing the `test_size` key-value pair in the `train_test_split()` method from HF. " ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "DatasetDict({\n", - " train: Dataset({\n", - " features: ['text', 'meta', '__index_level_0__', 'input_ids', 'attention_mask', 'labels'],\n", - " num_rows: 81926\n", - " })\n", - " validation: Dataset({\n", - " features: ['text', 'meta', '__index_level_0__', 'input_ids', 'attention_mask', 'labels'],\n", - " num_rows: 9347\n", - " })\n", - "})\n", - "DatasetDict({\n", - " train: Dataset({\n", - " features: ['text', 'meta', '__index_level_0__', 'input_ids', 'attention_mask', 'labels'],\n", - " num_rows: 8412\n", - " })\n", - " test: Dataset({\n", - " features: ['text', 'meta', '__index_level_0__', 'input_ids', 'attention_mask', 'labels'],\n", - " num_rows: 935\n", - " })\n", - "})\n" + "Training samples: 81926 \n", + "Validation samples: 935\n" ] } ], @@ -144,13 +138,21 @@ "data_subset = load_dataset(\"parquet\", data_files = data_files, num_proc=4)\n", "# validation_subset = load_dataset(\"parquet\", data_files = {\"validation\": base_url + \"validation-00000-of-00001.parquet\"}, num_proc=4, split=[\"validation[:5%]\"])\n", "validation_subset = data_subset.get(\"validation\").train_test_split(test_size=0.1)\n", - "print(data_subset)\n", - "print(validation_subset)" + "print(f\"Training samples: {data_subset[\"train\"].num_rows} \\nValidation samples: {validation_subset[\"test\"].num_rows}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create DataLoader Objects\n", + "* To execute the models with PyTorch, we convert the training and validation datasets to tensors and then setup DataLoader for easier batching in our training loop.\n", + "* The model architecture requires the vocabulary size as an input and this we calcualte the max token from the dataset." ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 21, "metadata": {}, "outputs": [ { @@ -165,18 +167,12 @@ "train_subset = data_subset[\"train\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", "validation_subset = validation_subset[\"test\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", "\n", - "train_dataloader = DataLoader(train_subset, batch_size=8, shuffle=True)\n", - "val_dataloader = DataLoader(validation_subset, batch_size=8, shuffle=True)\n", + "train_dataloader = DataLoader(train_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", + "val_dataloader = DataLoader(validation_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", "\n", "# Determine the vocab size of the dataset\n", "# Flatten the list of tokenized sentences into one long list of token IDs\n", - "all_tokens = [token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]\n", - "\n", - "# Get unique token IDs\n", - "# unique_tokens = set(all_tokens)\n", - "\n", - "# Vocab size is the number of unique tokens\n", - "vocab_size = max(all_tokens) + 1 # Add 1 since token ID's start from zero\n", + "vocab_size = max([token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]) + 1\n", "params[\"vocab_size\"] = vocab_size\n", "print(f\"Vocabulary size: {vocab_size}\")" ] @@ -207,52 +203,33 @@ "\n", "During model training, capturing valuable information from each layer can offer critical insights into the model's learning process, identify potential issues, and guide improvements. Monitoring certain metrics and activations helps diagnose common problems such as vanishing gradients, overfitting, or underfitting. Below are key metrics to track from each layer:\n", "\n", - "### Key Metrics to Capture from Each Layer:\n", - "\n", - "- **Activations**\n", - " - **What it shows**: Provides insight into how the model is processing data. Dead or exploding activations can indicate issues with training stability.\n", - " - **How to capture**: Use hooks to capture activations after each layer.\n", - "\n", - "- **Gradients**\n", - " - **What it shows**: Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability.\n", - " - **How to capture**: Use hooks to capture gradients during backpropagation.\n", - "\n", - "- **Weights and Biases**\n", - " - **What it shows**: Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate.\n", - " - **How to capture**: Extract directly from the model’s parameters.\n", - "\n", - "- **Layer-wise Loss**\n", - " - **What it shows**: Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization.\n", - " - **How to capture**: Monitor outputs from each layer and compare with the target.\n", - "\n", - "- **Learning Rate per Layer**\n", - " - **What it shows**: Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate.\n", - " - **How to capture**: Manually track based on optimizer settings.\n", - "\n", - "- **Layer Output Norms**\n", - " - **What it shows**: The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients.\n", - " - **How to capture**: Compute the L2-norm for each layer’s output.\n", - "\n", - "- **Activation Distributions**\n", - " - **What it shows**: Helps diagnose saturation issues, especially with ReLU activations that may lead to dead neurons.\n", - " - **How to capture**: Visualize or compute statistical summaries using tools like matplotlib or seaborn.\n", - "\n", - "- **Feature Maps (for Convolutional Layers)**\n", - " - **What it shows**: Offers insights into how convolutional layers detect specific patterns in the data.\n", - " - **How to capture**: Visualize feature maps after convolutional layers using libraries like matplotlib.\n" + "### Key metrics to capture from each layer:\n", + "\n", + "| **Metric** | **Demonstrated in Notebook** | **What it Shows** | **How to Capture** |\n", + "|-----------------------------------|--------------------------------------|--------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|\n", + "| **Activations** | Yes | Provides insight into how the model is processing data. Dead or exploding activations can indicate issues with training stability. | Use hooks to capture activations after each layer. |\n", + "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | Use hooks to capture gradients during backpropagation. |\n", + "| **Weights and Biases** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", + "| **Layer-wise Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", + "| **Learning Rate per Layer** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", + "| **Layer Output Norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |\n", + "| **Activation Distributions** | No | Helps diagnose saturation issues, especially with ReLU activations that may lead to dead neurons. | Visualize or compute statistical summaries using tools like matplotlib or seaborn. |\n", + "| **Feature Maps (for Convolutional Layers)** | No | Offers insights into how convolutional layers detect specific patterns in the data. | Visualize feature maps after convolutional layers using libraries like matplotlib. |\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Define PyTorch Model Architecture and Helper functions\n", - "We define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use a mebedding layer, an LSTM layer and a fully connected layer. This architecture can be adjusted to your needs and increased in size when testing the workflow." + "### Define PyTorch Model Architecture and Helpers\n", + "We define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use an embedding layer, a LSTM layer and a fully connected layer. This architecture can be adjusted to your needs and increased in size when testing the workflow. To increase the size of the LSTM layers, change the `num_layers` parameter in the parameters dictionary or to increase the number of fully connected layers, update the mode architecture itself.\n", + "\n", + "This section also creates a `HookManager` class that allows us to capture the **activations** and **gradients** from each layer. You do not need to update this class as it will dynamically update according to the architecture of the model." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, "outputs": [], "source": [ @@ -261,18 +238,14 @@ " def __init__(self, vocab_size, embed_size, hidden_size, num_layers):\n", " super(SimpleLLM, self).__init__()\n", " self.embedding = nn.Embedding(vocab_size, embed_size)\n", - " self.lstm = nn.LSTM(embed_size, 512, num_layers = num_layers, dropout = 0.3, batch_first=True)\n", - " self.fc1 = nn.Linear(512, hidden_size)\n", - " self.fc2 = nn.Linear(hidden_size, vocab_size)\n", + " self.lstm = nn.LSTM(embed_size, hidden_size, num_layers = num_layers, batch_first=True)\n", + " self.fc1 = nn.Linear(hidden_size, vocab_size)\n", " \n", " def forward(self, x):\n", " x = self.embedding(x)\n", " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", " out = self.fc1(lstm_out) # Use the last output from the LSTM\n", - " out = self.fc2(out)\n", " return out\n", - " \n", - "# model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"], params[\"num_lstm_layers\"])\n", "\n", "# A class to manage hooks for activations and gradients\n", "class HookManager:\n", @@ -342,12 +315,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Everytime a new LSTM layer is added when updating the `num_lstm_layers` parameter is changed, 12 new metrics will be logged which correspond to mean, std and norm that are calculated. " + "## Setup Model Training\n", + "### Initialize Neptune Run object and Log Hyperparameters" ] }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ @@ -376,44 +350,65 @@ "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Execute model training loop\n", + "In this loop, we configure the `HookManager` and register the hooks. In your training loop, you will need to use the `get_` methods to retrieve the stored values for the activations and gradients after the forward and backward passes are complete. Below, you can see a pseudo implementation:\n", + "\n", + "```python\n", + "# Initialize model\n", + "model = your_ModelClass()\n", + "# Register hooks\n", + "hm = HookManager(model)\n", + "hm.register_hooks()\n", + "\n", + "# Training loop\n", + "for epoch in range(3):\n", + " \n", + " # Forward pass, e.g. model.train()\n", + " # Backward pass, e.g. loss.backward()\n", + " \n", + " activations = hm.get_activations()\n", + " gradients = hm.get_gradients()\n", + "\n", + " # Log values (mean, std, etc.) to Neptune\n", + "```\n", + "\n", + "Important: The `HookManager` class can be used in your own training script as it only accepts a model object as input." + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Step 1 / 10241, Loss: 11.760147094726562\n", - "Step 2 / 10241, Loss: 11.744263648986816\n", - "Step 3 / 10241, Loss: 11.720343589782715\n", - "Step 4 / 10241, Loss: 11.64067268371582\n", - "Step 5 / 10241, Loss: 11.333736419677734\n", - "Step 5, Val_loss: 10.690058871212168\n", - "Activations for embedding: -0.0025795602705329657\n", - "Activations for lstm: -0.0010381790343672037\n", - "Activations for fc1: -0.07045605778694153\n", - "Activations for fc2: -0.679652988910675\n", - "Activations for : -0.679652988910675\n", - "Gradients for fc2: 6.027510937207733e-17\n", - "Gradients for fc1: 3.636175804899722e-08\n", - "Gradients for lstm: -3.4538189197519387e-08\n", - "Step 6 / 10241, Loss: 10.451458930969238\n", - "Step 7 / 10241, Loss: 10.109658241271973\n", - "Step 8 / 10241, Loss: 10.559635162353516\n", - "Step 9 / 10241, Loss: 9.650898933410645\n", - "Step 10 / 10241, Loss: 9.617280006408691\n", - "Step 10, Val_loss: 9.465246697776339\n", - "Activations for embedding: -0.003705302719026804\n", - "Activations for lstm: -0.008110555820167065\n", - "Activations for fc1: -0.14376381039619446\n", - "Activations for fc2: -2.851902961730957\n", - "Activations for : -2.851902961730957\n", - "Gradients for fc2: -2.2691806192661813e-16\n", - "Gradients for fc1: -5.234873583503941e-07\n", - "Gradients for lstm: 5.951231685230596e-08\n", - "Step 11 / 10241, Loss: 9.610464096069336\n" + "Step 1 / 10241, Loss: 11.760001182556152\n", + "Step 2 / 10241, Loss: 11.625699996948242\n", + "Step 3 / 10241, Loss: 9.880274772644043\n", + "Step 4 / 10241, Loss: 9.80041790008545\n", + "Step 5 / 10241, Loss: 10.201050758361816\n", + "Step 5, Val_loss: 9.920373557979225\n", + "Gradients for fc1: -3.875334975882508e-15\n", + "Gradients for lstm: -2.1588371055258904e-06\n", + "Step 6 / 10241, Loss: 9.584494590759277\n", + "Step 7 / 10241, Loss: 9.479751586914062\n", + "Step 8 / 10241, Loss: 10.132659912109375\n", + "Step 9 / 10241, Loss: 10.202069282531738\n", + "Step 10 / 10241, Loss: 11.208635330200195\n", + "Step 10, Val_loss: 10.601182611579569\n", + "Gradients for fc1: 4.1997570650103774e-15\n", + "Gradients for lstm: -2.5467079467489384e-06\n", + "Step 11 / 10241, Loss: 10.122359275817871\n", + "Step 12 / 10241, Loss: 10.62948226928711\n", + "Step 13 / 10241, Loss: 10.538959503173828\n", + "Step 14 / 10241, Loss: 11.84500503540039\n", + "Step 15 / 10241, Loss: 10.522750854492188\n" ] }, { @@ -423,11 +418,16 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[48], line 35\u001b[0m\n\u001b[0;32m 32\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n\u001b[0;32m 34\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[1;32m---> 35\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 36\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n\u001b[0;32m 39\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\_tensor.py:626\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 616\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 617\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 618\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 619\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 624\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 625\u001b[0m )\n\u001b[1;32m--> 626\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 628\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\__init__.py:347\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 342\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 344\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 345\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 346\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 347\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 348\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 349\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 350\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 351\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 352\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 353\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 354\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 355\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\graph.py:823\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[1;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[0;32m 821\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[0;32m 822\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 823\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 824\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[0;32m 826\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 827\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\function.py:292\u001b[0m, in \u001b[0;36mBackwardCFunction.apply\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mBackwardCFunction\u001b[39;00m(_C\u001b[38;5;241m.\u001b[39m_FunctionBase, FunctionCtx, _HookMixin):\n\u001b[0;32m 288\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 289\u001b[0m \u001b[38;5;124;03m This class is used for internal autograd work. Do not use.\u001b[39;00m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 292\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs):\n\u001b[0;32m 293\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 294\u001b[0m \u001b[38;5;124;03m Apply method used when executing this Node during the backward\u001b[39;00m\n\u001b[0;32m 295\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 296\u001b[0m \u001b[38;5;66;03m# _forward_cls is defined by derived class\u001b[39;00m\n\u001b[0;32m 297\u001b[0m \u001b[38;5;66;03m# The user should define either backward or vjp but never both.\u001b[39;00m\n", + "Cell \u001b[1;32mIn[26], line 43\u001b[0m\n\u001b[0;32m 40\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 42\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m step_counter \u001b[38;5;241m%\u001b[39m \u001b[38;5;241m5\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m: \u001b[38;5;66;03m# Do not need to log validation at every step, although we can\u001b[39;00m\n\u001b[1;32m---> 43\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_dataloader\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvocab_size\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 44\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Val_loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mval_loss\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 46\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", + "Cell \u001b[1;32mIn[24], line 69\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, val_dataloader, criterion, device, vocab_size)\u001b[0m\n\u001b[0;32m 66\u001b[0m labels \u001b[38;5;241m=\u001b[39m batch[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mlabels\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mto(device)\n\u001b[0;32m 68\u001b[0m \u001b[38;5;66;03m# Forward pass for validation\u001b[39;00m\n\u001b[1;32m---> 69\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Shape: (batch_size, seq_len, vocab_size)\u001b[39;00m\n\u001b[0;32m 71\u001b[0m \u001b[38;5;66;03m# Calculate the loss\u001b[39;00m\n\u001b[0;32m 72\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", + "Cell \u001b[1;32mIn[24], line 12\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 10\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membedding(x)\n\u001b[0;32m 11\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlstm(x) \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[1;32m---> 12\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfc1\u001b[49m\u001b[43m(\u001b[49m\u001b[43mlstm_out\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Use the last output from the LSTM\u001b[39;00m\n\u001b[0;32m 13\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\linear.py:125\u001b[0m, in \u001b[0;36mLinear.forward\u001b[1;34m(self, input)\u001b[0m\n\u001b[0;32m 124\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[1;32m--> 125\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } @@ -518,7 +518,7 @@ }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 27, "metadata": {}, "outputs": [ { From 0c8497b870d0951f419a79cd2115470817f28a0e Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 12 Mar 2025 14:42:12 +0100 Subject: [PATCH 049/125] chore: cleanup commented code --- .../pytorch_text_model_debugging.ipynb | 140 +----------------- 1 file changed, 5 insertions(+), 135 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 75881c7..97c8c9c 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -177,24 +177,6 @@ "print(f\"Vocabulary size: {vocab_size}\")" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# TODO - capture the folowings; attentions, learning rate (per layer if needed)\n", - " # complete - activations, grad norms, gradients - weights+biases, loss - agg\n", - "# TODO - capture per layer losses\n", - "# TODO - increase number of layers\n", - "# TODO - Adding dropout to demonstrate the effects of regularization\n", - "# TODO - batch normalization\n", - "# TODO - add class for tracking activations, gradients, etc. automatically\n", - "# TODO - investigate packages that help with tracking layer-wise information\n", - "# TODO - visualize layer-wise information, activations, gradients etc. (use as example to share with product)\n", - "# TODO - update hooks for model, ensure tracking correct information" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -321,7 +303,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -330,7 +312,7 @@ "from uuid import uuid4\n", "\n", "run = Run(\n", - " project = \"leo/pytorch-tutorial\",\n", + " project = \"examples/pytorch-tutorial\",\n", " run_id=f\"pytorch-text-{uuid4()}\"\n", " )\n", "\n", @@ -341,7 +323,7 @@ " \"config/batch_size\": params[\"batch_size\"],\n", " \"config/epochs\": params[\"epochs\"],\n", " \"config/num_lstm_layers\" : params[\"num_lstm_layers\"],\n", - " # \"data/vocab_size\": params[\"vocab_size\"],\n", + " \"data/vocab_size\": params[\"vocab_size\"],\n", " \"data/embed_size\": params[\"embed_size\"]\n", " }\n", ")\n", @@ -518,7 +500,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -532,121 +514,9 @@ } ], "source": [ - "\n", + "# Close run to ensure all operations are processed\n", "run.close()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "ename": "KeyError", - "evalue": "0", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[45], line 3\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;66;03m# After training, you can analyze the activations and gradients\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;66;03m# Example: Plot the activations of the LSTM layer\u001b[39;00m\n\u001b[1;32m----> 3\u001b[0m layer_name, activation \u001b[38;5;241m=\u001b[39m \u001b[43mactivations\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m \u001b[38;5;66;03m# Assuming lstm activation\u001b[39;00m\n\u001b[0;32m 4\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mActivation for \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mlayer_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m layer: \u001b[39m\u001b[38;5;124m\"\u001b[39m, activation)\n\u001b[0;32m 6\u001b[0m \u001b[38;5;66;03m# Plotting histogram of activations of the LSTM layer\u001b[39;00m\n", - "\u001b[1;31mKeyError\u001b[0m: 0" - ] - } - ], - "source": [ - "\n", - "# After training, you can analyze the activations and gradients\n", - "# Example: Plot the activations of the LSTM layer\n", - "layer_name, activation = activations[0] # Assuming lstm activation\n", - "print(f\"Activation for {layer_name} layer: \", activation)\n", - "\n", - "# Plotting histogram of activations of the LSTM layer\n", - "plt.hist(activation[0].detach().numpy().flatten(), bins=100, alpha=0.7, color='b', label='LSTM Activations')\n", - "plt.title(f\"Activation Distribution for {layer_name}\")\n", - "plt.xlabel(\"Activation Value\")\n", - "plt.ylabel(\"Frequency\")\n", - "plt.legend()\n", - "plt.show()\n", - "\n", - "# Example: Plot the gradients of the LSTM layer\n", - "layer_name, grad = gradients[0] # Assuming lstm gradient\n", - "print(f\"Gradient for {layer_name} layer: \", grad[0].shape)\n", - "\n", - "# Plotting histogram of gradients of the LSTM layer\n", - "plt.hist(grad[0].numpy().flatten(), bins=100, alpha=0.7, color='r', label='LSTM Gradients')\n", - "plt.title(f\"Gradient Distribution for {layer_name}\")\n", - "plt.xlabel(\"Gradient Value\")\n", - "plt.ylabel(\"Frequency\")\n", - "plt.legend()\n", - "plt.show()\n" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Activation for fc layer: torch.Size([3, 5, 5000])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkQAAAHHCAYAAABeLEexAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABJCUlEQVR4nO3dd3wVVf7/8fdNwk0oKZQUWCD03luM9CJBIitFhRiaRl01qAhYWBUQXVGQthb46iJBRdoKSlEwUkQhgiCgoFRBRBJAgYSAhJTz+4Nf7nJJgOSmM6/n43EfD+/MuXM+M7mYd86cmbEZY4wAAAAszK2oCwAAAChqBCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCIAAGB5BCKgBBg+fLhq1KhRJH1PmDBBNputSPrOrSNHjshmsykmJqbA+4qJiZHNZtORI0ccy2rUqKE77rijwPuWpA0bNshms2nDhg2F0t/VpkyZolq1asnd3V0tWrQo8P4++OADNWjQQKVKlZKfn1+B9wfrIRAB+eDtt9+WzWZTSEiIy9s4fvy4JkyYoJ07d+ZfYTl04cIFTZgwoch+uV6LzWZzvDw8PFShQgW1bt1aTzzxhH766ad86+ftt98ulBDliuJY2xdffKGnn35a7du319y5c/XKK68UaH979+7V8OHDVbt2bb377rt65513CrQ/WJONZ5kBede+fXsdP35cR44c0YEDB1SnTp1cb2Pbtm1q27at5s6dq+HDhzutS01NVUZGhjw9PfOpYmd//PGH/P39NX78eE2YMMFpXVpamtLS0uTl5VUgfV+PzWbTbbfdpqFDh8oYo8TERO3atUtLlizR+fPn9dprr2nUqFGO9sYYpaSkqFSpUnJ3d89xP02aNFGlSpVyFQjT09OVmpoqT09PxwhajRo11KRJE61cuTLH23G1toyMDF26dEl2u11uboX7t+2zzz6rKVOm6K+//pLdbi/w/mbPnq1HHnnE5X9bQE4wQgTk0eHDh7V582ZNmzZN/v7+mj9/fr73UapUqQILQzfi4eFRJGEoU7169TR48GANGTJEI0aM0LvvvqtDhw6pbdu2Gj16tD777DNHW5vNJi8vr1yFodw6f/68JMnd3V1eXl5FdjrRzc1NXl5ehR6GJOnkyZMqXbp0voUhY4z++uuv6/YniVNlKFgGQJ689NJLpnz58iYlJcU88sgjpm7dutm2O3PmjBk5cqQJDg42drvd/O1vfzNDhgwxp06dMuvXrzeSsrzmzp1rjDFm2LBhJjg42BhjzKVLl0z58uXN8OHDs/SRmJhoPD09zejRo40xxqSkpJgXXnjBtGrVyvj4+JgyZcqYDh06mHXr1jk+c/jw4Wz7Hj9+vDHGmPHjx5ur/1eRmppqJk6caGrVqmXsdrsJDg42Y8eONRcvXnRqFxwcbMLDw83XX39t2rZtazw9PU3NmjXNvHnzcnRsJZno6Ohs1/3666/Gw8PD3HrrrVn2JfO4GWNMfHy8GT58uPnb3/5m7Ha7CQoKMn//+9/N4cOHHTVeve+dO3c2xhgzd+5cI8ls2LDBPPLII8bf39/4+fk5rcvczpX7u2bNGtO8eXPj6elpGjZsaD7++GOn2rM7ptlt83q1ZX5n1q9f77SNxYsXm1atWhkvLy9TsWJFExkZaY4dO+bUZtiwYaZs2bLm2LFj5s477zRly5Y1lSpVMqNHjzZpaWnZHu9M1/ue5vZ7sXr1atO6dWvj6elppk+fnm1/2R2DzO+mMcZ89tlnplOnTqZcuXLG29vbtGnTxsyfP/+6+wBkhxEiII/mz5+v/v37y263KyIiQgcOHNB3333n1CY5OVkdO3bUG2+8oZ49e2rmzJl6+OGHtXfvXh07dkwNGzbUxIkTJUkPPfSQPvjgA33wwQfq1KlTlv5KlSqlfv366ZNPPtGlS5ec1n3yySdKSUnRoEGDJElJSUn6z3/+oy5duui1117ThAkTdOrUKYWFhTnmKvn7+2vWrFmSpH79+jn67t+//zX3+YEHHtC4cePUqlUrTZ8+XZ07d9akSZMc/V7p4MGDuuuuu3Tbbbdp6tSpKl++vIYPH649e/bk/CBno3r16urcubO+/fZbJSUlXbPdgAEDtGzZMt133316++239fjjj+vcuXM6evSoJGnGjBmqWrWqGjRo4Nj35557zmkbjz76qH766SeNGzdOzz777HXrOnDggAYOHKjbb79dkyZNkoeHh+6++27Fxsbmeh9zUtuVYmJidM8998jd3V2TJk3Sgw8+qKVLl6pDhw46e/asU9v09HSFhYWpYsWKev3119W5c2dNnTr1hvNzPvjgA3Xs2FGenp5Zvqe5+V7s27dPERERuu222zRz5sxrTsyeMWOG+vXrJ0maNWuW03czJiZG4eHhOn36tMaOHatXX31VLVq00OrVq6+7D0C2ijqRASXZtm3bjCQTGxtrjDEmIyPDVK1a1TzxxBNO7caNG2ckmaVLl2bZRkZGhjHGmO+++y7L6EamK0eIjDFmzZo1RpJZsWKFU7vevXubWrVqOd6npaWZlJQUpzZnzpwxgYGB5v7773csO3XqVJa/vDNdPZqxc+dOI8k88MADTu3GjBljJDmNPmX+db9x40bHspMnTzqNYl2PrjNCZIwxTzzxhJFkdu3aZYzJOkJ05swZI8lMmTLluv00btzYMfJypcwRmw4dOmQZObnWCJEkpxGhxMREU7lyZdOyZUvHspyOEF2vtqtHiC5dumQCAgJMkyZNzF9//eVot3LlSiPJjBs3zrFs2LBhRpKZOHGi0zZbtmxpWrdunaWvq2WOMF3Jle/F6tWrb9iXMf87XqdOnXIsO3v2rPH29jYhISFO+2vM//5NAbnBCBGQB/Pnz1dgYKC6du0q6fIcloEDB2rhwoVKT093tPv444/VvHlzx1+6V3JlDkq3bt1UqVIlLVq0yLHszJkzio2N1cCBAx3L3N3dHfM8MjIydPr0aaWlpalNmzb6/vvvc92vJMecnSsnM0vS6NGjJUmrVq1yWt6oUSN17NjR8d7f31/169fXL7/84lL/VypXrpwk6dy5c9muz5znsmHDBp05c8blfh588MEcz0uqUqWK08/Zx8dHQ4cO1Y4dO5SQkOByDTeybds2nTx5Uo8++qjTnK/w8HA1aNAgy89Fkh5++GGn9x07dnT555Lb70XNmjUVFhbmUl+SFBsbq3PnzunZZ5/NMsetpNwmAsULgQhwUXp6uhYuXKiuXbvq8OHDOnjwoA4ePKiQkBCdOHFCa9eudbQ9dOiQmjRpkm99e3h4aMCAAfr000+VkpIiSVq6dKlSU1OdApEkzZs3T82aNZOXl5cqVqwof39/rVq1SomJiS71/euvv8rNzS3L1T5BQUHy8/PTr7/+6rS8evXqWbZRvnz5PAWUTMnJyZIkb2/vbNd7enrqtdde0+eff67AwEB16tRJkydPznUwqVmzZo7b1qlTJ8sv5Hr16kmS0z2L8lvmca9fv36WdQ0aNMjyc/Hy8pK/v7/Tsrz8XHL7vcjNMc3OoUOHJClf/13B2ghEgIvWrVun+Ph4LVy4UHXr1nW87rnnHkkqkKvNrjRo0CCdO3dOn3/+uSRp8eLFatCggZo3b+5o8+GHHzru3zJnzhytXr1asbGx6tatmzIyMvLUf07/Cr/WyIrJhzt+7N69W+7u7tf95Tpy5Ejt379fkyZNkpeXl1544QU1bNhQO3bsyHE/pUuXznOtV7rWsbtyVLGgFdSVeDn9XuT3MQXyikAEuGj+/PkKCAjQkiVLsrwiIiK0bNkyx6XEtWvX1u7du6+7vdwO83fq1EmVK1fWokWL9Mcff2jdunVZRof++9//qlatWlq6dKmGDBmisLAw9ejRQxcvXnS57+DgYGVkZOjAgQNOy0+cOKGzZ88qODg4V/vhqqNHj+qrr75SaGjoNUeIMtWuXVujR4/WF198od27d+vSpUuaOnWqY31+nmI5ePBglrC3f/9+SXLcbbx8+fKSlGWi89WjKLmpLfO479u3L8u6ffv2FfjPpbC/F7Vr15akG/67AnKKQAS44K+//tLSpUt1xx136K677sryGjFihM6dO6fly5dLunyl065du7Rs2bIs28r85Vm2bFlJWX9JXoubm5vuuusurVixQh988IHS0tKyBKLMUYArf0Fv2bJFcXFxTu3KlCmT47579+4t6fLVP1eaNm2apMtzVgra6dOnFRERofT09OtedXXhwoUs4a927dry9vZ2nGqULh/7nB73Gzl+/LjTzzkpKUnvv/++WrRooaCgIEcNkrRx40ZHu/Pnz2vevHlZtpfT2tq0aaOAgADNnj3bad8+//xz/fzzzwX+cyns70XPnj3l7e2tSZMmZfkZ58foI6zHo6gLAEqi5cuX69y5c/r73/+e7fpbbrnFcZPGgQMH6qmnntJ///tf3X333br//vvVunVrnT59WsuXL9fs2bPVvHlz1a5dW35+fpo9e7a8vb1VtmxZhYSEXPd00MCBA/XGG29o/Pjxatq0qRo2bOi0/o477tDSpUvVr18/hYeH6/Dhw5o9e7YaNWrkmH8jXT590ahRIy1atEj16tVThQoV1KRJk2znZzRv3lzDhg3TO++8o7Nnz6pz587aunWr5s2bp759+zommOeX/fv368MPP5QxRklJSY47VScnJ2vatGnq1avXdT/bvXt33XPPPWrUqJE8PDy0bNkynThxwulS8NatW2vWrFl6+eWXVadOHQUEBKhbt24u1VuvXj1FRUXpu+++U2BgoN577z2dOHFCc+fOdbTp2bOnqlevrqioKD311FNyd3fXe++9J39/f8ftAHJbW6lSpfTaa6/pvvvuU+fOnRUREaETJ05o5syZqlGjhp588kmX9ienCvt74ePjo+nTp+uBBx5Q27Ztde+996p8+fLatWuXLly4kG24BK6rKC9xA0qqPn36GC8vL3P+/Plrthk+fLgpVaqU+eOPP4wxxvz5559mxIgRjhsEVq1a1QwbNsyx3hhjPv30U9OoUSPj4eFxzRszXikjI8NUq1bNSDIvv/xytutfeeUVExwcbDw9PU3Lli3NypUrs93e5s2bTevWrY3dbs/RjRlffPFFU7NmTVOqVClTrVq1696A72qdO3fO9lLyq+mKm/G5ubkZPz8/07JlS/PEE0+YPXv2ZGl/9WX3f/zxh4mOjjYNGjQwZcuWNb6+viYkJMQsXrzY6XMJCQkmPDzceHt7Z3tjxu+++y5LXze6MWOzZs2Mp6enadCggVmyZEmWz2/fvt2EhIQYu91uqlevbqZNm5btNq9V27VuzLho0SLTsmVL4+npaSpUqHDdGzNe7Vq3A7jatT6f1+/FtWR32X2m5cuXm1tvvdWULl3a+Pj4mHbt2pkFCxbkeNtAJp5lBgAALI85RAAAwPIIRAAAwPIIRAAAwPIIRAAAwPIIRAAAwPIIRAAAwPK4MWMOZGRk6Pjx4/L29uYpygAAlBDGGJ07d05VqlSRm9v1x4AIRDlw/PhxVatWrajLAAAALvjtt99UtWrV67YhEOVA5oMjf/vtN/n4+BRxNQAAICeSkpJUrVq1Gz4AWiIQ5UjmaTIfHx8CEQAAJUxOprswqRoAAFgegQgAAFgegQgAAFgec4gAAEUiPT1dqampRV0GSji73X7DS+pzgkAEAChUxhglJCTo7NmzRV0KbgJubm6qWbOm7HZ7nrZDIAIAFKrMMBQQEKAyZcpww1u4LPPGyfHx8apevXqevksEIgBAoUlPT3eEoYoVKxZ1ObgJ+Pv76/jx40pLS1OpUqVc3g6TqgEAhSZzzlCZMmWKuBLcLDJPlaWnp+dpO0UaiCZNmqS2bdvK29tbAQEB6tu3r/bt2+fUpkuXLrLZbE6vhx9+2KnN0aNHFR4erjJlyiggIEBPPfWU0tLSnNps2LBBrVq1kqenp+rUqaOYmJiC3j0AwDVwmgz5Jb++S0UaiL766itFR0fr22+/VWxsrFJTU9WzZ0+dP3/eqd2DDz6o+Ph4x2vy5MmOdenp6QoPD9elS5e0efNmzZs3TzExMRo3bpyjzeHDhxUeHq6uXbtq586dGjlypB544AGtWbOm0PYVAAAUX0U6h2j16tVO72NiYhQQEKDt27erU6dOjuVlypRRUFBQttv44osv9NNPP+nLL79UYGCgWrRooZdeeknPPPOMJkyYILvdrtmzZ6tmzZqaOnWqJKlhw4b65ptvNH36dIWFhRXcDgIAUEQ2bNigrl276syZM/Lz8yvQvmw2m5YtW6a+ffsWaD8FqVhNqk5MTJQkVahQwWn5/Pnz9eGHHyooKEh9+vTRCy+84Dj/HBcXp6ZNmyowMNDRPiwsTI888oj27Nmjli1bKi4uTj169HDaZlhYmEaOHFmwOwQAyLE+C/oUan8rIlbkqv3w4cM1b968LMsPHDigOnXqSLp8Bd2//vUvrVq1Sr///rsCAgLUokULjRw5Ut27d7/u9o8dO6ZatWqpXr162r17d65q69Kli1q0aKEZM2Y4lt16662Kj4+Xr69vrrZ1PRMmTNAnn3yinTt3Oi2Pj49X+fLl862folBsAlFGRoZGjhyp9u3bq0mTJo7l9957r4KDg1WlShX98MMPeuaZZ7Rv3z4tXbpU0uUv35VhSJLjfUJCwnXbJCUl6a+//lLp0qWd1qWkpCglJcXxPikpKf92FABQYvXq1Utz5851Wubv7y9JOnLkiNq3by8/Pz9NmTJFTZs2VWpqqtasWaPo6Gjt3bv3utuOiYnRPffco40bN2rLli0KCQnJU612u/2aZ1fyW2H1U5CKzVVm0dHR2r17txYuXOi0/KGHHlJYWJiaNm2qyMhIvf/++1q2bJkOHTpUYLVMmjRJvr6+jle1atUKrC8AQMnh6empoKAgp5e7u7sk6dFHH5XNZtPWrVs1YMAA1atXT40bN9aoUaP07bffXne7xhjNnTtXQ4YM0b333qs5c+ZkabNp0yZ16dJFZcqUUfny5RUWFqYzZ85o+PDh+uqrrzRz5kzHxUdHjhzRhg0bZLPZdPbsWSUlJal06dL6/PPPnba5bNkyeXt768KFC5KkZ555RvXq1VOZMmVUq1YtvfDCC44rA2NiYvTiiy9q165djn4yL1Cy2Wz65JNPHNv98ccf1a1bN5UuXVoVK1bUQw89pOTkZMf64cOHq2/fvnr99ddVuXJlVaxYUdHR0U53Ln/77bdVt25deXl5KTAwUHfddVfOf1AuKBaBaMSIEVq5cqXWr1+vqlWrXrdtZmI+ePCgpMup9MSJE05tMt9nJtZrtfHx8ckyOiRJY8eOVWJiouP122+/ubZjAABLOH36tFavXq3o6GiVLVs2y/obzeFZv369Lly4oB49emjw4MFauHCh0wVGO3fuVPfu3dWoUSPFxcXpm2++UZ8+fZSenq6ZM2cqNDTU6QKkq/+Q9/Hx0R133KGPPvrIafn8+fPVt29fxzQUb29vxcTE6KefftLMmTP17rvvavr06ZKkgQMHavTo0WrcuLGjn4EDB2bZl/PnzyssLEzly5fXd999pyVLlujLL7/UiBEjsuzzoUOHtH79escFUZkBa9u2bXr88cc1ceJE7du3T6tXr3aaW1wQivSUmTFGjz32mJYtW6YNGzaoZs2aN/xM5nnLypUrS5JCQ0P1r3/9SydPnlRAQIAkKTY2Vj4+PmrUqJGjzWeffea0ndjYWIWGhmbbh6enpzw9PV3dLQA3gavns+R2vgluTitXrlS5cuUc72+//XYtWbJEBw8elDFGDRo0cGm7c+bM0aBBg+Tu7q4mTZqoVq1aWrJkiYYPHy5Jmjx5stq0aaO3337b8ZnGjRs7/ttut1/3AiRJioyM1JAhQ3ThwgWVKVNGSUlJWrVqlZYtW+Zo8/zzzzv+u0aNGhozZowWLlyop59+WqVLl1a5cuXk4eFx3X4++ugjXbx4Ue+//74jHL755pvq06ePXnvtNccUlvLly+vNN9+Uu7u7GjRooPDwcK1du1YPPvigjh49qrJly+qOO+6Qt7e3goOD1bJly9wd1Fwq0hGi6Ohoffjhh/roo4/k7e2thIQEJSQk6K+//pIkHTp0SC+99JK2b9+uI0eOaPny5Ro6dKg6deqkZs2aSZJ69uypRo0aaciQIdq1a5fWrFmj559/XtHR0Y5Q8/DDD+uXX37R008/rb179+rtt9/W4sWL9eSTTxbZvgMASp7M27dkvv79739LuvwHvqvOnj2rpUuXavDgwY5lgwcPdjptljlClBe9e/dWqVKltHz5cknSxx9/LB8fH6eLjhYtWqT27dsrKChI5cqV0/PPP6+jR4/mqp+ff/5ZzZs3dxopa9++vTIyMpzuNdi4cWPH6Ubp8kDHyZMnJUm33XabgoODVatWLQ0ZMkTz5893nNYrKEUaiGbNmqXExER16dJFlStXdrwWLVok6XLi/fLLL9WzZ081aNBAo0eP1oABA7Rixf/+UnN3d9fKlSvl7u6u0NBQDR48WEOHDtXEiRMdbWrWrKlVq1YpNjZWzZs319SpU/Wf//yHS+4BALlStmxZ1alTx/HKPFtRt25d2Wy2G06czk7miEpISIg8PDzk4eGhZ555Rt988432798vSdlO78gtu92uu+66y3Ha7KOPPtLAgQPl4XH5ZFFcXJwiIyPVu3dvrVy5Ujt27NBzzz2nS5cu5bnv7Fz9mA2bzaaMjAxJl0/dff/991qwYIEqV66scePGqXnz5gX6QOAiP2V2PdWqVdNXX311w+0EBwdnOSV2tS5dumjHjh25qg8AgJyoUKGCwsLC9NZbb+nxxx/PMo/o7Nmz15xHNGfOHI0ePdpxeizTo48+qvfee0+vvvqqmjVrprVr1+rFF1/Mdht2uz1Hj66IjIzUbbfdpj179mjdunV6+eWXHes2b96s4OBgPffcc45lv/76a677adiwoWJiYnT+/HnHcdi0aZPc3NxUv379G9aYycPDQz169FCPHj00fvx4+fn5ad26derfv3+Ot5EbxeayewAoCDm5tw3zg5Af3nrrLbVv317t2rXTxIkT1axZM6WlpSk2NlazZs3Szz//nOUzO3fu1Pfff6/58+dnmX8UERGhiRMn6uWXX9bYsWPVtGlTPfroo3r44Ydlt9u1fv163X333apUqZJq1KihLVu26MiRIypXrlyW+/ll6tSpk4KCghQZGamaNWs6Xdpft25dHT16VAsXLlTbtm2zzC+SLs8rOnz4sHbu3KmqVavK29s7y5zbyMhIjR8/XsOGDdOECRN06tQpPfbYYxoyZEiWW+Bcy8qVK/XLL7+oU6dOKl++vD777DNlZGTkKlDlVrG4ygwAgJKuVq1a+v7779W1a1eNHj1aTZo00W233aa1a9dq1qxZ2X5mzpw5atSoUbaTsfv166eTJ0/qs88+U7169fTFF19o165dateunUJDQ/Xpp586TneNGTNG7u7uatSokfz9/a8578dmsykiIkK7du1SZGSk07q///3vevLJJzVixAi1aNFCmzdv1gsvvODUZsCAAerVq5e6du0qf39/LViwIEsfZcqU0Zo1a3T69Gm1bdtWd911l7p3764333wzR8dRunxV3tKlS9WtWzc1bNhQs2fP1oIFC5wmkuc3m8nLTDCLSEpKkq+vrxITE+Xj41PU5QDIBVdHiLjKrGBcvHhRhw8fVs2aNeXl5VXU5eAmcL3vVG5+fzNCBAAALI85RAAsr7CfoQWg+GGECAAAWB6BCAAAWB6nzACUGEx0vnlwPQ/yS359lxghAgAUmsy7Exf0YxhgHZl30r7yMSCuYIQIAFzETR9zz93dXX5+fo5nVpUpU0Y2m62Iq0JJlZGRoVOnTqlMmTKOezK5ikAEAChUmU9KzwxFQF64ubmpevXqeQ7WBCIAQKGy2WyqXLmyAgIClJqaWtTloISz2+1yc8v7DCACEQCgSLi7u+d53geQXwhEAJAD3LwRuLlxlRkAALA8RogA3FQYyQHgCkaIAACA5RGIAACA5RGIAACA5RGIAACA5RGIAACA5RGIAACA5XHZPYASi0vsAeQXRogAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDleRR1AQBgJX0W9MmybEXEiiKoBMCVGCECAACWxwgRgGIpu5EUACgojBABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADL47J7AChA3D4AKBkYIQIAAJZHIAIAAJZHIAIAAJZHIAIAAJZHIAIAAJbHVWYACl12V16tiFhRBJUAwGWMEAEAAMsr0kA0adIktW3bVt7e3goICFDfvn21b98+pzYXL15UdHS0KlasqHLlymnAgAE6ceKEU5ujR48qPDxcZcqUUUBAgJ566imlpaU5tdmwYYNatWolT09P1alTRzExMQW9ewAAoIQo0kD01VdfKTo6Wt9++61iY2OVmpqqnj176vz58442Tz75pFasWKElS5boq6++0vHjx9W/f3/H+vT0dIWHh+vSpUvavHmz5s2bp5iYGI0bN87R5vDhwwoPD1fXrl21c+dOjRw5Ug888IDWrFlTqPsLAACKJ5sxxhR1EZlOnTqlgIAAffXVV+rUqZMSExPl7++vjz76SHfddZckae/evWrYsKHi4uJ0yy236PPPP9cdd9yh48ePKzAwUJI0e/ZsPfPMMzp16pTsdrueeeYZrVq1Srt373b0NWjQIJ09e1arV6++YV1JSUny9fVVYmKifHx8CmbnAQvJyRwiK9/hmflUQP7Ize/vYjWHKDExUZJUoUIFSdL27duVmpqqHj16ONo0aNBA1atXV1xcnCQpLi5OTZs2dYQhSQoLC1NSUpL27NnjaHPlNjLbZG4DQMHqs6CP0wsAiptic5VZRkaGRo4cqfbt26tJkyaSpISEBNntdvn5+Tm1DQwMVEJCgqPNlWEoc33muuu1SUpK0l9//aXSpUs7rUtJSVFKSorjfVJSUt53EAAAFFvFZoQoOjpau3fv1sKFC4u6FE2aNEm+vr6OV7Vq1Yq6JAAAUICKxQjRiBEjtHLlSm3cuFFVq1Z1LA8KCtKlS5d09uxZp1GiEydOKCgoyNFm69atTtvLvArtyjZXX5l24sQJ+fj4ZBkdkqSxY8dq1KhRjvdJSUmEIqCAcSoNQFEq0hEiY4xGjBihZcuWad26dapZs6bT+tatW6tUqVJau3atY9m+fft09OhRhYaGSpJCQ0P1448/6uTJk442sbGx8vHxUaNGjRxtrtxGZpvMbVzN09NTPj4+Ti8AAHDzKtIRoujoaH300Uf69NNP5e3t7Zjz4+vrq9KlS8vX11dRUVEaNWqUKlSoIB8fHz322GMKDQ3VLbfcIknq2bOnGjVqpCFDhmjy5MlKSEjQ888/r+joaHl6ekqSHn74Yb355pt6+umndf/992vdunVavHixVq1aVWT7DgAAio8iHSGaNWuWEhMT1aVLF1WuXNnxWrRokaPN9OnTdccdd2jAgAHq1KmTgoKCtHTpUsd6d3d3rVy5Uu7u7goNDdXgwYM1dOhQTZw40dGmZs2aWrVqlWJjY9W8eXNNnTpV//nPfxQWFlao+wsAAIqnYnUfouKK+xABecP8oNzhPkRA/iix9yECAAAoCsXiKjMAwP/k5E7eAPIXI0QAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyCEQAAMDyPIq6AADAjfVZ0OeGbVZErCiESoCbEyNEAADA8ghEAADA8ghEAADA8ghEAADA8phUDSBf5WTyLwAUN4wQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAyyMQAQAAy+PRHQDyhEd1ALgZMEIEAAAsjxEiALhJZDdatyJiRRFUApQ8jBABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADLIxABAADL8yjqAgCUHH0W9CnqEgCgQDBCBAAALI9ABAAALI9ABAAALI9ABAAALI9ABAAALM+lQPTLL7/kdx0AAABFxqVAVKdOHXXt2lUffvihLl686HLnGzduVJ8+fVSlShXZbDZ98sknTuuHDx8um83m9OrVq5dTm9OnTysyMlI+Pj7y8/NTVFSUkpOTndr88MMP6tixo7y8vFStWjVNnjzZ5ZoBAMDNx6VA9P3336tZs2YaNWqUgoKC9I9//ENbt27N9XbOnz+v5s2b66233rpmm169eik+Pt7xWrBggdP6yMhI7dmzR7GxsVq5cqU2btyohx56yLE+KSlJPXv2VHBwsLZv364pU6ZowoQJeuedd3JdLwAAuDm5dGPGFi1aaObMmZo6daqWL1+umJgYdejQQfXq1dP999+vIUOGyN/f/4bbuf3223X77bdft42np6eCgoKyXffzzz9r9erV+u6779SmTRtJ0htvvKHevXvr9ddfV5UqVTR//nxdunRJ7733nux2uxo3bqydO3dq2rRpTsEJAABYV54mVXt4eKh///5asmSJXnvtNR08eFBjxoxRtWrVNHToUMXHx+e5wA0bNiggIED169fXI488oj///NOxLi4uTn5+fo4wJEk9evSQm5ubtmzZ4mjTqVMn2e12R5uwsDDt27dPZ86cybbPlJQUJSUlOb0AAMDNK0+P7ti2bZvee+89LVy4UGXLltWYMWMUFRWlY8eO6cUXX9Sdd97p0qm0TL169VL//v1Vs2ZNHTp0SP/85z91++23Ky4uTu7u7kpISFBAQIDzDnl4qEKFCkpISJAkJSQkqGbNmk5tAgMDHevKly+fpd9JkybpxRdfdLlu4GbBozoAWIVLgWjatGmaO3eu9u3bp969e+v9999X79695eZ2ecCpZs2aiomJUY0aNfJU3KBBgxz/3bRpUzVr1ky1a9fWhg0b1L179zxt+3rGjh2rUaNGOd4nJSWpWrVqBdYfABSUq0PtiogVRVQJULy5FIhmzZql+++/X8OHD1flypWzbRMQEKA5c+bkqbir1apVS5UqVdLBgwfVvXt3BQUF6eTJk05t0tLSdPr0ace8o6CgIJ04ccKpTeb7a81N8vT0lKenZ77WDgAAii+XAtGBAwdu2MZut2vYsGGubP6ajh07pj///NMRwkJDQ3X27Flt375drVu3liStW7dOGRkZCgkJcbR57rnnlJqaqlKlSkmSYmNjVb9+/WxPlwEAAOtxaVL13LlztWTJkizLlyxZonnz5uV4O8nJydq5c6d27twpSTp8+LB27typo0ePKjk5WU899ZS+/fZbHTlyRGvXrtWdd96pOnXqKCwsTJLUsGFD9erVSw8++KC2bt2qTZs2acSIERo0aJCqVKkiSbr33ntlt9sVFRWlPXv2aNGiRZo5c6bTKTEAAGBtLgWiSZMmqVKlSlmWBwQE6JVXXsnxdrZt26aWLVuqZcuWkqRRo0apZcuWGjdunNzd3fXDDz/o73//u+rVq6eoqCi1bt1aX3/9tdPprPnz56tBgwbq3r27evfurQ4dOjjdY8jX11dffPGFDh8+rNatW2v06NEaN24cl9wDAAAHmzHG5PZDXl5e2rt3b5ZJ00eOHFHDhg31119/5Vd9xUJSUpJ8fX2VmJgoHx+foi4HKDRcZXbzYVI1rCQ3v79dGiEKCAjQDz/8kGX5rl27VLFiRVc2CQAAUGRcCkQRERF6/PHHtX79eqWnpys9PV3r1q3TE0884XSpPAAAQEng0lVmL730ko4cOaLu3bvLw+PyJjIyMjR06NBczSECAAAoDlwKRHa7XYsWLdJLL72kXbt2qXTp0mratKmCg4Pzuz4AAIACl6dHd9SrV0/16tXLr1oAAACKhEuBKD09XTExMVq7dq1OnjypjIwMp/Xr1q3Ll+IAAAAKg0uB6IknnlBMTIzCw8PVpEkT2Wy2/K4LAACg0LgUiBYuXKjFixerd+/e+V0PAABAoXN5UnWdOnXyuxYARYibMAKwMpfuQzR69GjNnDlTLtzkGgAAoNhxaYTom2++0fr16/X555+rcePGjqfIZ1q6dGm+FAcAAFAYXApEfn5+6tevX37XAgAAUCRcCkRz587N7zoAAACKjEtziCQpLS1NX375pf7v//5P586dkyQdP35cycnJ+VYcAABAYXBphOjXX39Vr169dPToUaWkpOi2226Tt7e3XnvtNaWkpGj27Nn5XScAIB9kdzXhiogVRVAJULy4NEL0xBNPqE2bNjpz5oxKly7tWN6vXz+tXbs234oDAAAoDC6NEH399dfavHmz7Ha70/IaNWro999/z5fCAAAACotLI0QZGRlKT0/PsvzYsWPy9vbOc1EAAACFyaVA1LNnT82YMcPx3mazKTk5WePHj+dxHgAAoMRx6ZTZ1KlTFRYWpkaNGunixYu69957deDAAVWqVEkLFizI7xoBAAAKlEuBqGrVqtq1a5cWLlyoH374QcnJyYqKilJkZKTTJGsAAICSwKVAJEkeHh4aPHhwftYCAABQJFwKRO+///511w8dOtSlYgAAAIqCS4HoiSeecHqfmpqqCxcuyG63q0yZMgQiAABQorh0ldmZM2ecXsnJydq3b586dOjApGoAAFDiuPwss6vVrVtXr776apbRIwAAgOLO5UnV2W7Mw0PHjx/Pz00CAArY1c8349lmsCKXAtHy5cud3htjFB8frzfffFPt27fPl8IAAAAKi0uBqG/fvk7vbTab/P391a1bN02dOjU/6gIAACg0LgWijIyM/K4DAACgyOTbpGoAAICSyqURolGjRuW47bRp01zpAgAAoNC4FIh27NihHTt2KDU1VfXr15ck7d+/X+7u7mrVqpWjnc1my58qAQAACpBLgahPnz7y9vbWvHnzVL58eUmXb9Z43333qWPHjho9enS+FgkAAFCQXJpDNHXqVE2aNMkRhiSpfPnyevnll7nKDAAAlDguBaKkpCSdOnUqy/JTp07p3LlzeS4KAACgMLl0yqxfv3667777NHXqVLVr106StGXLFj311FPq379/vhYIoGBcfXdiALAylwLR7NmzNWbMGN17771KTU29vCEPD0VFRWnKlCn5WiAAAEBBcykQlSlTRm+//bamTJmiQ4cOSZJq166tsmXL5mtxAAAAhSFPN2aMj49XfHy86tatq7Jly8oYk191AQAAFBqXAtGff/6p7t27q169eurdu7fi4+MlSVFRUVxyDwAAShyXTpk9+eSTKlWqlI4ePaqGDRs6lg8cOFCjRo3i0nsAKMGym3C/ImJFEVQCFB6XAtEXX3yhNWvWqGrVqk7L69atq19//TVfCgMAACgsLp0yO3/+vMqUKZNl+enTp+Xp6ZnnogAAAAqTS4GoY8eOev/99x3vbTabMjIyNHnyZHXt2jXfigMAACgMLp0ymzx5srp3765t27bp0qVLevrpp7Vnzx6dPn1amzZtyu8aAQAACpRLI0RNmjTR/v371aFDB9155506f/68+vfvrx07dqh27dr5XSMAAECByvUIUWpqqnr16qXZs2frueeeK4iaAOQzHtMBANeX6xGiUqVK6YcffiiIWgAAAIqES6fMBg8erDlz5uR3LQAAAEXCpUnVaWlpeu+99/Tll1+qdevWWZ5hNm3atHwpDgAAoDDkKhD98ssvqlGjhnbv3q1WrVpJkvbv3+/Uxmaz5V91AAAAhSBXgahu3bqKj4/X+vXrJV1+VMe///1vBQYGFkhxAAAAhSFXc4iufpr9559/rvPnz+drQQAAAIXNpUnVma4OSAAAACVRrgKRzWbLMkeIOUMAAKCky9UcImOMhg8f7niA68WLF/Xwww9nucps6dKl+VchAABAActVIBo2bJjT+8GDB+drMQAAAEUhV4Fo7ty5BVUHAABAkcnTpGoAAICbQZEGoo0bN6pPnz6qUqWKbDabPvnkE6f1xhiNGzdOlStXVunSpdWjRw8dOHDAqc3p06cVGRkpHx8f+fn5KSoqSsnJyU5tfvjhB3Xs2FFeXl6qVq2aJk+eXNC7BgAASpAiDUTnz59X8+bN9dZbb2W7fvLkyfr3v/+t2bNna8uWLSpbtqzCwsJ08eJFR5vIyEjt2bNHsbGxWrlypTZu3KiHHnrIsT4pKUk9e/ZUcHCwtm/frilTpmjChAl65513Cnz/AOBm0WdBH6cXcLNx6Vlm+eX222/X7bffnu06Y4xmzJih559/Xnfeeack6f3331dgYKA++eQTDRo0SD///LNWr16t7777Tm3atJEkvfHGG+rdu7def/11ValSRfPnz9elS5f03nvvyW63q3Hjxtq5c6emTZvmFJwAAIB1Fds5RIcPH1ZCQoJ69OjhWObr66uQkBDFxcVJkuLi4uTn5+cIQ5LUo0cPubm5acuWLY42nTp1kt1ud7QJCwvTvn37dObMmWz7TklJUVJSktMLAADcvIptIEpISJCkLM9JCwwMdKxLSEhQQECA03oPDw9VqFDBqU1227iyj6tNmjRJvr6+jle1atXyvkMAAKDYKtJTZsXV2LFjNWrUKMf7pKQkQhFKFOZ4AEDuFNsRoqCgIEnSiRMnnJafOHHCsS4oKEgnT550Wp+WlqbTp087tcluG1f2cTVPT0/5+Pg4vQAAwM2r2AaimjVrKigoSGvXrnUsS0pK0pYtWxQaGipJCg0N1dmzZ7V9+3ZHm3Xr1ikjI0MhISGONhs3blRqaqqjTWxsrOrXr6/y5csX0t4AAIDirEgDUXJysnbu3KmdO3dKujyReufOnTp69KhsNptGjhypl19+WcuXL9ePP/6ooUOHqkqVKurbt68kqWHDhurVq5cefPBBbd26VZs2bdKIESM0aNAgValSRZJ07733ym63KyoqSnv27NGiRYs0c+ZMp1NiAADA2op0DtG2bdvUtWtXx/vMkDJs2DDFxMTo6aef1vnz5/XQQw/p7Nmz6tChg1avXi0vLy/HZ+bPn68RI0aoe/fucnNz04ABA/Tvf//bsd7X11dffPGFoqOj1bp1a1WqVEnjxo3jknsAAOBgM8aYoi6iuEtKSpKvr68SExOZT4QSgUnVKGgrIlYUdQnADeXm93exnUMEAABQWAhEAADA8ghEAADA8ghEAADA8ghEAADA8ghEAADA8ghEAADA8ghEAADA8ghEAADA8or00R0AgJIpu7uhc/dqlGSMEAEAAMsjEAEAAMsjEAEAAMsjEAEAAMsjEAEAAMsjEAEAAMvjsnughMvu8mcAQO4wQgQAACyPQAQAACyPQAQAACyPQAQAACyPQAQAACyPQAQAACyPy+6BEobL7AEg/zFCBAAALI9ABAAALI9TZgCAfHH16dwVESuKqBIg9xghAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAlsezzIBi7OpnQwEACgYjRAAAwPIYIQIAFIjsRjhXRKwogkqAG2OECAAAWB6BCAAAWB6BCAAAWB5ziAAAhebqeUXMKUJxwQgRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPAIRAACwPO5UDRQj2T0dHABQ8BghAgAAlkcgAgAAlkcgAgAAlkcgAgAAlkcgAgAAllesA9GECRNks9mcXg0aNHCsv3jxoqKjo1WxYkWVK1dOAwYM0IkTJ5y2cfToUYWHh6tMmTIKCAjQU089pbS0tMLeFQAAUIwV+8vuGzdurC+//NLx3sPjfyU/+eSTWrVqlZYsWSJfX1+NGDFC/fv316ZNmyRJ6enpCg8PV1BQkDZv3qz4+HgNHTpUpUqV0iuvvFLo+wIAAIqnYh+IPDw8FBQUlGV5YmKi5syZo48++kjdunWTJM2dO1cNGzbUt99+q1tuuUVffPGFfvrpJ3355ZcKDAxUixYt9NJLL+mZZ57RhAkTZLfbC3t3AABAMVSsT5lJ0oEDB1SlShXVqlVLkZGROnr0qCRp+/btSk1NVY8ePRxtGzRooOrVqysuLk6SFBcXp6ZNmyowMNDRJiwsTElJSdqzZ881+0xJSVFSUpLTCwAA3LyK9QhRSEiIYmJiVL9+fcXHx+vFF19Ux44dtXv3biUkJMhut8vPz8/pM4GBgUpISJAkJSQkOIWhzPWZ665l0qRJevHFF/N3ZwAAWWR3d/YVESuKoBJYXbEORLfffrvjv5s1a6aQkBAFBwdr8eLFKl26dIH1O3bsWI0aNcrxPikpSdWqVSuw/gAAQNEq1oHoan5+fqpXr54OHjyo2267TZcuXdLZs2edRolOnDjhmHMUFBSkrVu3Om0j8yq07OYlZfL09JSnp2f+7wBwBZ5bBgDFR7GfQ3Sl5ORkHTp0SJUrV1br1q1VqlQprV271rF+3759Onr0qEJDQyVJoaGh+vHHH3Xy5ElHm9jYWPn4+KhRo0aFXj8AACieivUI0ZgxY9SnTx8FBwfr+PHjGj9+vNzd3RURESFfX19FRUVp1KhRqlChgnx8fPTYY48pNDRUt9xyiySpZ8+eatSokYYMGaLJkycrISFBzz//vKKjoxkBAgAADsU6EB07dkwRERH6888/5e/vrw4dOujbb7+Vv7+/JGn69Olyc3PTgAEDlJKSorCwML399tuOz7u7u2vlypV65JFHFBoaqrJly2rYsGGaOHFiUe0SAAAohmzGGFPURRR3SUlJ8vX1VWJionx8fIq6HNwkmEMEZI+rzJBfcvP7u0TNIQIAACgIBCIAAGB5BCIAAGB5BCIAAGB5xfoqM+BmwiRqIGeu/rfCJGsUBgIRAKBY43lnKAycMgMAAJZHIAIAAJZHIAIAAJZHIAIAAJZHIAIAAJZHIAIAAJbHZfdAAeCeQwBQsjBCBAAALI9ABAAALI9ABAAALI9ABAAALI9ABAAALI9ABAAALI/L7gEAJc7Vt7ZYEbGiiCrBzYIRIgAAYHkEIgAAYHkEIgAAYHnMIQLyAY/qAICSjREiAABgeQQiAABgeQQiAABgeQQiAABgeQQiAABgeQQiAABgeVx2D+QSl9gDwM2HQAQAKPGy+0OF55shNzhlBgAALI9ABAAALI9ABAAALI85RACAm9LV84qYU4TrIRABN8BVZQBw8+OUGQAAsDwCEQAAsDwCEQAAsDzmEAEALIGbN+J6CETAFZhADVgLV6IhE4EIlkYAAgBIzCECAABghAjWwWgQAOBaGCECAACWxwgRAAD/H1eiWRcjRAAAwPIIRAAAwPIIRAAAwPKYQwQAwHVw80ZrIBDhpsVl9gCAnCIQAQCQC1yJdnMiEOGmwGgQACAvCEQokQhAAID8xFVmAADA8hghAgAgj7gSreQjEKHY4/QYgJKGidclD6fMAACA5VlqhOitt97SlClTlJCQoObNm+uNN95Qu3btirosXIURIQA3I06rFW+WCUSLFi3SqFGjNHv2bIWEhGjGjBkKCwvTvn37FBAQUNTlWRbhB4BVcVqteLEZY0xRF1EYQkJC1LZtW7355puSpIyMDFWrVk2PPfaYnn322et+NikpSb6+vkpMTJSPj09hlHvTIgABQP4iRF1bbn5/W2KE6NKlS9q+fbvGjh3rWObm5qYePXooLi6uCCu7uTAcDAAoqSwRiP744w+lp6crMDDQaXlgYKD27t2bpX1KSopSUlIc7xMTEyVdTpolyT1L7smybPHdi3PULj/0mtOrQLYLAPif/Pp/bU5+P7japqhk/t7OyckwSwSi3Jo0aZJefPHFLMurVatWBNXkL98HfIu6BABAMZST3w/51aawnTt3Tr6+16/LEoGoUqVKcnd314kTJ5yWnzhxQkFBQVnajx07VqNGjXK8z8jI0OnTp1WxYkXZbLYCr/dmlZSUpGrVqum3335jLlY+4HjmL45n/uFY5i+Op+uMMTp37pyqVKlyw7aWCER2u12tW7fW2rVr1bdvX0mXQ87atWs1YsSILO09PT3l6enptMzPz68QKrUGHx8f/lHnI45n/uJ45h+OZf7ieLrmRiNDmSwRiCRp1KhRGjZsmNq0aaN27dppxowZOn/+vO67776iLg0AABQxywSigQMH6tSpUxo3bpwSEhLUokULrV69OstEawAAYD2WCUSSNGLEiGxPkaFweHp6avz48VlOR8I1HM/8xfHMPxzL/MXxLByWuTEjAADAtfBwVwAAYHkEIgAAYHkEIgAAYHkEIgAAYHkEIhSo06dPKzIyUj4+PvLz81NUVJSSk5Ov2/6xxx5T/fr1Vbp0aVWvXl2PP/6443lyVpfb4ylJ77zzjrp06SIfHx/ZbDadPXu2cIotht566y3VqFFDXl5eCgkJ0datW6/bfsmSJWrQoIG8vLzUtGlTffbZZ4VUafGXm2O5Z88eDRgwQDVq1JDNZtOMGTMKr9ASIjfH891331XHjh1Vvnx5lS9fXj169Ljhdxk3RiBCgYqMjNSePXsUGxurlStXauPGjXrooYeu2f748eM6fvy4Xn/9de3evVsxMTFavXq1oqKiCrHq4iu3x1OSLly4oF69eumf//xnIVVZPC1atEijRo3S+PHj9f3336t58+YKCwvTyZMns22/efNmRUREKCoqSjt27FDfvn3Vt29f7d69u5ArL35yeywvXLigWrVq6dVXX832cUlWl9vjuWHDBkVERGj9+vWKi4tTtWrV1LNnT/3++++FXPlNxgAF5KeffjKSzHfffedY9vnnnxubzWZ+//33HG9n8eLFxm63m9TU1IIos8TI6/Fcv369kWTOnDlTgFUWX+3atTPR0dGO9+np6aZKlSpm0qRJ2ba/5557THh4uNOykJAQ849//KNA6ywJcnssrxQcHGymT59egNWVPHk5nsYYk5aWZry9vc28efMKqkRLYIQIBSYuLk5+fn5q06aNY1mPHj3k5uamLVu25Hg7iYmJ8vHxkYeHpe4jmkV+HU8runTpkrZv364ePXo4lrm5ualHjx6Ki4vL9jNxcXFO7SUpLCzsmu2twpVjiWvLj+N54cIFpaamqkKFCgVVpiUQiFBgEhISFBAQ4LTMw8NDFSpUUEJCQo628ccff+ill1664WkhK8iP42lVf/zxh9LT07M8qicwMPCaxy4hISFX7a3ClWOJa8uP4/nMM8+oSpUqWQI8codAhFx79tlnZbPZrvvau3dvnvtJSkpSeHi4GjVqpAkTJuS98GKqsI4ngJvPq6++qoULF2rZsmXy8vIq6nJKNGufg4BLRo8ereHDh1+3Ta1atRQUFJRlUmBaWppOnz59w4mV586dU69eveTt7a1ly5apVKlSeS272CqM42l1lSpVkru7u06cOOG0/MSJE9c8dkFBQblqbxWuHEtcW16O5+uvv65XX31VX375pZo1a1aQZVoCgQi55u/vL39//xu2Cw0N1dmzZ7V9+3a1bt1akrRu3TplZGQoJCTkmp9LSkpSWFiYPD09tXz58pv+r56CPp6Q7Ha7WrdurbVr16pv376SpIyMDK1du/aaD3wODQ3V2rVrNXLkSMey2NhYhYaGFkLFxZcrxxLX5urxnDx5sv71r39pzZo1TvMKkQdFPasbN7devXqZli1bmi1btphvvvnG1K1b10RERDjWHzt2zNSvX99s2bLFGGNMYmKiCQkJMU2bNjUHDx408fHxjldaWlpR7UaxkdvjaYwx8fHxZseOHebdd981kszGjRvNjh07zJ9//lkUu1BkFi5caDw9PU1MTIz56aefzEMPPWT8/PxMQkKCMcaYIUOGmGeffdbRftOmTcbDw8O8/vrr5ueffzbjx483pUqVMj/++GNR7UKxkdtjmZKSYnbs2GF27NhhKleubMaMGWN27NhhDhw4UFS7UKzk9ni++uqrxm63m//+979O/488d+5cUe3CTYFAhAL1559/moiICFOuXDnj4+Nj7rvvPqd/tIcPHzaSzPr1640x/7s0PLvX4cOHi2YnipHcHk9jjBk/fny2x3Pu3LmFvwNF7I033jDVq1c3drvdtGvXznz77beOdZ07dzbDhg1zar948WJTr149Y7fbTePGjc2qVasKueLiKzfHMvN7efWrc+fOhV94MZWb4xkcHJzt8Rw/fnzhF34TsRljTOGNRwEAABQ/XGUGAAAsj0AEAAAsj0AEAAAsj0AEAAAsj0AEAAAsj0AEAAAsj0AEAAAsj0AEoEjFxMTIz8+vwPs5cuSIbDabdu7cWeB95dXw4cMdj3EAUDgIRAByJS4uTu7u7goPD8/1Z2vUqKEZM2Y4LRs4cKD279+fT9Vdll2gqFatmuLj49WkSZN87etKjz32mBo2bJjtuqNHj8rd3V3Lly8vsP4BuI5ABCBX5syZo8cee0wbN27U8ePH87y90qVLKyAgIB8quz53d3cFBQXJw6PgnmkdFRWlvXv3avPmzVnWxcTEKCAgQL179y6w/gG4jkAEIMeSk5O1aNEiPfLIIwoPD1dMTEyWNitWrFDbtm3l5eWlSpUqqV+/fpKkLl266Ndff9WTTz4pm80mm80myfmU2f79+2Wz2bR3716nbU6fPl21a9eWJKWnpysqKko1a9ZU6dKlVb9+fc2cOdPRdsKECZo3b54+/fRTRz8bNmzI9pTZV199pXbt2snT01OVK1fWs88+q7S0NMf6Ll266PHHH9fTTz+tChUqKCgoSBMmTLjm8WnRooVatWql9957z2m5MUYxMTEaNmyYbDbbdevPTnYjay1atHCq5ezZs3rggQfk7+8vHx8fdevWTbt27brudgH8D4EIQI4tXrxYDRo0UP369TV48GC99957uvJxiKtWrVK/fv3Uu3dv7dixQ2vXrlW7du0kSUuXLlXVqlU1ceJExcfHKz4+Psv269WrpzZt2mj+/PlOy+fPn697771XkpSRkaGqVatqyZIl+umnnzRu3Dj985//1OLFiyVJY8aM0T333KNevXo5+rn11luz9PX777+rd+/eatu2rXbt2qVZs2Zpzpw5evnll53azZs3T2XLltWWLVs0efJkTZw4UbGxsdc8RlFRUVq8eLHOnz/vWLZhwwYdPnxY999//w3rd9Xdd9+tkydP6vPPP9f27dvVqlUrde/eXadPn87TdgHLKNpnywIoSW699VYzY8YMY4wxqampplKlSmb9+vWO9aGhoSYyMvKanw8ODjbTp093WjZ37lzj6+vreD99+nRTu3Ztx/t9+/YZSebnn3++5najo6PNgAEDHO+HDRtm7rzzTqc2mU9c37FjhzHGmH/+85+mfv36JiMjw9HmrbfeMuXKlTPp6enGmMtPGe/QoYPTdtq2bWueeeaZa9Zy5swZ4+XlZebOnetYNmTIkCzbyU392R235s2bO55u/vXXXxsfHx9z8eJFpza1a9c2//d//3fNfgH8DyNEAHJk37592rp1qyIiIiRJHh4eGjhwoObMmeNos3PnTnXv3j1P/QwaNEhHjhzRt99+K+ny6FCrVq3UoEEDR5u33npLrVu3lr+/v8qVK6d33nlHR48ezVU/P//8s0JDQx2n7iSpffv2Sk5O1rFjxxzLmjVr5vS5ypUr6+TJk9fcrp+fn/r37+84bZaUlKSPP/5YUVFR+Vr/lXbt2qXk5GRVrFhR5cqVc7wOHz6sQ4cOubxdwEoKbnYhgJvKnDlzlJaWpipVqjiWGWPk6empN998U76+vipdunSe+wkKClK3bt300Ucf6ZZbbtFHH32kRx55xLF+4cKFGjNmjKZOnarQ0FB5e3trypQp2rJlS577zk6pUqWc3ttsNmVkZFz3M1FRUerevbsOHjyo9evXy93dXXfffbfL9bu5uTmdmpSk1NRUx38nJyercuXK2rBhQ5bPFsYtDYCbAYEIwA2lpaXp/fff19SpU9WzZ0+ndX379tWCBQv08MMPq1mzZlq7dq3uu+++bLdjt9uVnp5+w/4iIyP19NNPKyIiQr/88osGDRrkWLdp0ybdeuutevTRRx3Lrh4FyUk/DRs21McffyxjjGOUaNOmTfL29lbVqlVvWOP1dO3aVTVr1tTcuXO1fv16DRo0SGXLls1x/Vfz9/d3mnOVlJSkw4cPO963atVKCQkJ8vDwUI0aNfJUO2BVnDIDcEMrV67UmTNnFBUVpSZNmji9BgwY4DhtNn78eC1YsEDjx4/Xzz//rB9//FGvvfaaYzs1atTQxo0b9fvvv+uPP/64Zn/9+/fXuXPn9Mgjj6hr165Oo1J169bVtm3btGbNGu3fv18vvPCCvvvuO6fP16hRQz/88IP27dunP/74w2k0JdOjjz6q3377TY899pj27t2rTz/9VOPHj9eoUaPk5pa3/zXabDbdf//9mjVrluLi4pxOl+Wk/qt169ZNH3zwgb7++mv9+OOPGjZsmNzd3R3re/ToodDQUPXt21dffPGFjhw5os2bN+u5557Ttm3b8rQvgFUQiADc0Jw5c9SjRw/5+vpmWTdgwABt27ZNP/zwg7p06aIlS5Zo+fLlatGihbp166atW7c62k6cOFFHjhxR7dq15e/vf83+vL291adPH+3atUuRkZFO6/7xj3+of//+GjhwoEJCQvTnn386jbZI0oMPPqj69eurTZs28vf316ZNm7L08be//U2fffaZtm7dqubNm+vhhx9WVFSUnn/++dwenmwNHz5ciYmJaty4sUJCQnJV/9XGjh2rzp0764477lB4eLj69u3ruA2BdDmAffbZZ+rUqZPuu+8+1atXT4MGDdKvv/6qwMDAfNkf4GZnM1efmAYAALAYRogAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDlEYgAAIDl/T9ZY3gWrIUX7gAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Gradient for lstm layer: torch.Size([3, 5, 256])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHHCAYAAACle7JuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABER0lEQVR4nO3deVyVZf7/8fdBFtlJREDFfTeXSUcjdSql1ByzZHJJc+Nrmdi4TpPTotYUmplZodWMYU2ZSmlNY+WWWjnue7mb5QZomiAqi3D9/ujHGY8swhE83Ph6Ph7nUee6t8993wd5c5/rum+bMcYIAADAgtxcXQAAAICzCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDIAAMCyCDLADTRkyBDVqVPHoc1ms2ny5Mkuqac03aj9WLNmjWw2m9asWWNvu+uuu3TrrbeW+bYl6aeffpLNZtO8efNuyPau9q9//UtNmjSRh4eHgoKCynRbkydPls1mK9NtANeLIIObwpEjRzRq1Cg1atRIPj4+8vHxUbNmzRQbG6tdu3a5urwyN3/+fL322mvFnr9OnTqy2Wyy2Wxyc3NTUFCQWrRooUcffVQbN250WV03Unmsbd++fRoyZIjq16+vf/zjH3rnnXdcXVI+J0+e1OTJk7Vjxw5Xl4KbhLurCwDK2n/+8x/17dtX7u7uGjBggFq1aiU3Nzft27dPixcv1pw5c3TkyBHVrl3bJfVdunRJ7u5l+6M4f/58ff/99xozZkyxl2ndurXGjx8vSTp//rz27t2rxMRE/eMf/9DYsWP16quvOszvzH44U9cf/vAHXbp0SZ6eniXaVkkVVlvt2rV16dIleXh4lOn2C7JmzRrl5uZq1qxZatCgwQ3ffnGcPHlSU6ZMUZ06ddS6dWtXl4ObAEEGFdrhw4fVr18/1a5dW6tWrVJ4eLjD9GnTpmn27Nlycyv64uSFCxfk6+tbJjVWrly5TNZ7vWrUqKGBAwc6tE2bNk0PP/ywZs6cqYYNG+rxxx+3Tyvr/cjIyJCnp6fc3NxcesxsNpvLtn/q1ClJKtWvlC5evCgfH59SWx9wwxmgAnv00UeNJLNhw4ZiLzN48GDj6+trDh06ZLp37278/PxMr169jDHGfPPNN+ZPf/qTiYiIMJ6enqZmzZpmzJgx5uLFi/nWs2TJEtO8eXPj5eVlmjdvbhYvXmwGDx5sateu7TCfJDNp0iSHtuPHj5uhQ4eaatWqGU9PT9OsWTMzd+5ch3lWr15tJJmFCxeav//976ZGjRrGy8vLdO7c2Rw8eNA+35133mkkObyuruFqtWvXNj169Chw2vnz502VKlVMjRo1TG5ubqH7kZaWZkaPHm1q165tPD09TUhIiImKijJbt269Zl15+/bRRx+Zp59+2lSvXt3YbDbz66+/2qetXr3aYR+bN29utmzZYiIjI03lypVNnTp1zJw5cxxqT0hIMJLMkSNHCjyWeessqrYjR44YSSYhIcFhHatWrTIdO3Y0Pj4+JjAw0Nx///1mz549DvNMmjTJSDIHDx40gwcPNoGBgSYgIMAMGTLEXLhwoYgz8ts5ubqmK493fHy8adasmfH09DTh4eFm5MiR5tdff3VYx5XHqVOnTsbb29uMHj260G3m1Xul5cuXmw4dOpjAwEDj6+trGjVqZCZOnOhwHK9+5R2rvO3v3LnT/OEPfzDe3t6mfv36JjEx0RhjzJo1a0y7du1M5cqVTaNGjcyKFSuKPCaAMcZwRQYV2n/+8x81aNBA7du3L9Fyly9fVteuXdWxY0e98sor9r9YExMTdfHiRT3++OMKDg7Wpk2b9MYbb+j48eNKTEy0L798+XJFR0erWbNmiouL05kzZzR06FDVrFnzmttOSUnR7bffLpvNplGjRikkJERffvmlYmJilJaWlu+rjqlTp8rNzU0TJkxQamqqXn75ZQ0YMMDel+Xpp59Wamqqjh8/rpkzZ0qS/Pz8SnQ8ruTn56cHH3xQc+fO1Z49e9S8efMC5xsxYoQ+/vhjjRo1Ss2aNdOZM2f03Xffae/evbrtttuKVdcLL7wgT09PTZgwQZmZmUV+nfTrr7/qvvvuU58+fdS/f38tWrRIjz/+uDw9PTVs2LAS7WNJj9nKlSvVvXt31atXT5MnT9alS5f0xhtvqEOHDtq2bVu+Dt59+vRR3bp1FRcXp23btumf//ynqlWrpmnTphW6jddee03vv/++lixZojlz5sjPz08tW7aU9Fun3ClTpigqKkqPP/649u/frzlz5mjz5s1at26dw9dgZ86cUffu3dWvXz8NHDhQoaGhxT4uP/zwg/74xz+qZcuWev755+Xl5aVDhw5p3bp1kqSmTZvq+eef13PPPadHH31UnTp1kiTdcccd9nX8+uuv+uMf/6h+/frpoYce0pw5c9SvXz99+OGHGjNmjEaMGKGHH35Y06dP15/+9CcdO3ZM/v7+xa4RNyFXJymgrKSmphpJ5oEHHsg37ddffzWnT5+2v668ojJ48GAjyTz11FP5livoyktcXJyx2Wzm559/tre1bt3ahIeHm3Pnztnbli9fXuDVEF31l3VMTIwJDw83v/zyi8N8/fr1M4GBgfYa8v76bdq0qcnMzLTPN2vWLCPJ7N69297Wo0ePa16FuVJRV2SMMWbmzJlGkvnss88K3Y/AwEATGxtb5HYKqytv3+rVq5fvmBd2RUaSmTFjhr0tMzPTtG7d2lSrVs1kZWUZY4p/Raao2gq6IpO3nTNnztjbdu7cadzc3MygQYPsbXlXOIYNG+awzgcffNAEBwfn29bV8pY/ffq0ve3UqVPG09PT3HvvvSYnJ8fe/uabbxpJ5t1337W35R2nt95665rbunJ7efLO+5Xbv9rmzZsLvGJ15fbnz59vb9u3b5+RZNzc3ByunC5btqzQ9QBXYtQSKqy0tDRJBf8lfddddykkJMT+io+PzzfPlf0/8nh7e9v//8KFC/rll190xx13yBij7du3S5KSkpK0Y8cODR48WIGBgfb577nnHjVr1qzImo0x+uSTT9SzZ08ZY/TLL7/YX127dlVqaqq2bdvmsMzQoUMdrlTk/RX8448/Frmt65F3TM+fP1/oPEFBQdq4caNOnjzp9HYGDx7scMyL4u7urscee8z+3tPTU4899phOnTqlrVu3Ol3DteSd7yFDhqhKlSr29pYtW+qee+7RF198kW+ZESNGOLzv1KmTzpw5Y//MlsTKlSuVlZWlMWPGOPT1Gj58uAICArR06VKH+b28vDR06NASb0f6X9+czz77TLm5uU6tw8/PT/369bO/b9y4sYKCgtS0aVOHK6d5/1+Wn2NUDAQZVFh5l6PT09PzTXv77be1YsUKffDBBwUu6+7uXuDXQEePHrX/wvLz81NISIjuvPNOSVJqaqok6eeff5YkNWzYMN/yjRs3LrLm06dP69y5c3rnnXccglZISIj9l09eh888tWrVcnh/yy23SPrtEn5ZyTumRV3yf/nll/X9998rIiJC7dq10+TJk0v8S6lu3brFnrd69er5OmQ3atRI0m/3fikreee7oHPbtGlT/fLLL7pw4YJDe2mes8K27+npqXr16tmn56lRo4bTI7769u2rDh066P/+7/8UGhqqfv36adGiRSUKNTVr1sx3b5rAwEBFRETka5PK9nOMioE+MqiwAgMDFR4eru+//z7ftLy/9gr7Befl5ZVvJFNOTo7uuecenT17Vn/961/VpEkT+fr66sSJExoyZIjTf6FeKW8dAwcO1ODBgwucJ69fRJ5KlSoVOJ8x5rrrKUzeMS1qCHCfPn3UqVMnLVmyRMuXL9f06dM1bdo0LV68WN27dy/Wdop7Naa4Cru5W05OTqlu51pccc7yXM8x9fb21jfffKPVq1dr6dKl+uqrr7Rw4UJ17txZy5cvL3S/rlTYPK48JrA2ggwqtB49euif//ynNm3apHbt2l3Xunbv3q0DBw7ovffe06BBg+ztK1ascJgv7340Bw8ezLeO/fv3F7mNkJAQ+fv7KycnR1FRUddV75VK8+6s6enpWrJkiSIiItS0adMi5w0PD9fIkSM1cuRInTp1SrfddptefPFFe5ApzbpOnjyZb5j8gQMHJMne2Tbvyse5c+cclr36qkVJass73wWd23379qlq1aplNnT/6u3Xq1fP3p6VlaUjR46U6udIktzc3NSlSxd16dJFr776ql566SU9/fTTWr16taKiorgTMG44vlpChfbkk0/Kx8dHw4YNU0pKSr7pJflrL+8vxiuXMcZo1qxZDvOFh4erdevWeu+99+xfN0m/BZ49e/ZccxvR0dH65JNPCrySdPr06WLXeyVfX1+HWpx16dIlPfLIIzp79qyefvrpIq9wXL29atWqqXr16srMzCz1uqTfRpq9/fbb9vdZWVl6++23FRISojZt2kiS6tevL0n65ptvHGot6A65xa3tyvN9ZUD6/vvvtXz5ct13333O7lKxREVFydPTU6+//rrDZ3Pu3LlKTU1Vjx49Sm1bZ8+ezdeWd9O7vPOaF9quDotAWeGKDCq0hg0bav78+erfv78aN25sv7OvMUZHjhzR/Pnz5ebmVqxh0U2aNFH9+vU1YcIEnThxQgEBAfrkk08K/A4/Li5OPXr0UMeOHTVs2DCdPXtWb7zxhpo3b15gn50rTZ06VatXr1b79u01fPhwNWvWTGfPntW2bdu0cuXKAn+ZXEubNm20cOFCjRs3Tr///e/l5+ennj17FrnMiRMn7H2I0tPTtWfPHiUmJio5OVnjx4936Fh7tfPnz6tmzZr605/+pFatWsnPz08rV67U5s2bNWPGjOuqqzDVq1fXtGnT9NNPP6lRo0ZauHChduzYoXfeecc+/Lh58+a6/fbbNXHiRJ09e1ZVqlTRggULdPny5XzrK0lt06dPV/fu3RUZGamYmBj78OvAwMAyf/5USEiIJk6cqClTpqhbt266//77tX//fs2ePVu///3v893U8Ho8//zz+uabb9SjRw/Vrl1bp06d0uzZs1WzZk117NhR0m9hMSgoSG+99Zb8/f3l6+ur9u3bl6i/E1AiLhotBdxQhw4dMo8//rhp0KCBqVy5svH29jZNmjQxI0aMMDt27HCYN++GeAXZs2ePiYqKMn5+fqZq1apm+PDhZufOnQUOE/3kk09M06ZNjZeXl2nWrFmJboiXkpJiYmNjTUREhPHw8DBhYWGmS5cu5p133rHPkzdkOO9mYnkKGh6cnp5uHn74YRMUFFTsG+Lp/9/MzGazmYCAANO8eXMzfPhws3HjxgKXuXI/MjMzzV/+8hfTqlUr4+/vb3x9fU2rVq3M7NmzHZYprK7C9u3Kade6IV7t2rXNm2++mW/5w4cPm6ioKOPl5WVCQ0PN3/72N7NixYp86yystsJuiLdy5UrToUMH4+3tbQICAkzPnj0LvSHe1cOXCxsWfrXCljfmt+HWTZo0MR4eHiY0NNQ8/vjjhd4Qr7iuHn69atUq06tXL1O9enXj6elpqlevbvr3728OHDjgsNxnn31mmjVrZtzd3Qu8Id7VChvuL+maQ/gBmzH0pAIAANZEHxkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZFf6GeLm5uTp58qT8/f25dTYAABZhjNH58+dVvXr1fM++u1KFDzInT57M91RVAABgDceOHSvy7usVPsj4+/tL+u1ABAQEuLgaAABQHGlpaYqIiLD/Hi9MhQ8yeV8nBQQEEGQAALCYa3ULobMvAACwLIIMAACwLIIMAACwrArfRwaoaHJycpSdne3qMlBOeXh4qFKlSq4uA7hhCDKARRhjlJycrHPnzrm6FJRzQUFBCgsL495ZuCkQZACLyAsx1apVk4+PD7+kkI8xRhcvXtSpU6ckSeHh4S6uCCh7BBnAAnJycuwhJjg42NXloBzz9vaWJJ06dUrVqlXjayZUeHT2BSwgr0+Mj4+PiyuBFeR9TuhLhZsBQQawEL5OQnHwOcHNhCADAAAsiyADAMU0efJktW7d2v5+yJAheuCBB1xWDwA6+wKWt3t3zxu2rRYtPi/R/EOGDNF7772Xr/3gwYNq0KCBpN9GY7344otaunSpTpw4oWrVqql169YaM2aMunTpUui609LSNH36dC1evFg//vijfHx8VK9ePT300EMaPny4brnllpLtnBNmzZolY0yprnPy5Mn69NNPtWPHjlJdL1BREWQAlKlu3bopISHBoS0kJESS9NNPP6lDhw4KCgrS9OnT1aJFC2VnZ2vZsmWKjY3Vvn37Clzn2bNn1bFjR6WlpemFF15QmzZtFBgYqP379yshIUHz589XbGxsgctmZWXJ09OzVPYtMDCwVNYDwHl8tQSgTHl5eSksLMzhlTckeOTIkbLZbNq0aZOio6PVqFEjNW/eXOPGjdOGDRsKXeff/vY3HT16VJs2bdLQoUPVsmVL1a5dW/fee68++ugjjRw50j5vnTp19MILL2jQoEEKCAjQo48+Kkn661//qkaNGtmv5Dz77LP5RvlMnTpVoaGh8vf3V0xMjDIyMhymX/3VUm5uruLi4lS3bl15e3urVatW+vjjj+3T16xZI5vNplWrVqlt27by8fHRHXfcof3790uS5s2bpylTpmjnzp2y2Wyy2WyaN2+ejDGaPHmyatWqJS8vL1WvXl1//vOfnTshQAVDkAHgEmfPntVXX32l2NhY+fr65pseFBRU4HK5ublauHChBg4cqOrVqxc4z9Wjdl555RW1atVK27dv17PPPitJ8vf317x587Rnzx7NmjVL//jHPzRz5kz7MosWLdLkyZP10ksvacuWLQoPD9fs2bOL3Ke4uDi9//77euutt/TDDz9o7NixGjhwoNauXesw39NPP60ZM2Zoy5Ytcnd317BhwyRJffv21fjx49W8eXMlJSUpKSlJffv21SeffKKZM2fq7bff1sGDB/Xpp5+qRYsWRdYC3Cz4agnFUlA/jJL2l8DN6T//+Y/8/Pzs77t3767ExEQdOnRIxhg1adKkROs7ffq0zp07p8aNGzu0t2nTxn5lo2fPnvroo4/s0zp37qzx48c7zP/MM8/Y/79OnTqaMGGCFixYoCeffFKS9NprrykmJkYxMTGSpL///e9auXJlvqsyeTIzM/XSSy9p5cqVioyMlCTVq1dP3333nd5++23deeed9nlffPFF+/unnnpKPXr0UEZGhry9veXn5yd3d3eFhYXZ5z969KjCwsIUFRUlDw8P1apVS+3atSvRcUPpuvrfRP49dB2CDIAydffdd2vOnDn293lXX0q7k+ySJUuUlZWlv/71r7p06ZLDtLZt2+abf+HChXr99dd1+PBhpaen6/LlywoICLBP37t3r0aMGOGwTGRkpFavXl3g9g8dOqSLFy/qnnvucWjPysrS7373O4e2li1b2v8/7zECp06dUq1atQpc90MPPaTXXntN9erVU7du3XTfffepZ8+ecnfnn3CAnwIAZcrX19c+QulKDRs2lM1mK7RDb2FCQkIUFBRkv/qSJy8E+Pv753uw5tVfXa1fv14DBgzQlClT1LVrVwUGBmrBggWaMWNGiWq5Unp6uiRp6dKlqlGjhsM0Ly8vh/ceHh72/8/7Giw3N7fQdUdERGj//v1auXKlVqxYoZEjR2r69Olau3atw7qAmxF9ZAC4RJUqVdS1a1fFx8frwoUL+aYX9pRvNzc39enTRx988IFOnjzp1Lb/+9//qnbt2nr66afVtm1bNWzYUD///LPDPE2bNtXGjRsd2orqgNysWTN5eXnp6NGjatCggcMrIiKi2LV5enoqJycnX7u3t7d69uyp119/XWvWrNH69eu1e/fuYq8XqKi4IgPAZeLj49WhQwe1a9dOzz//vFq2bKnLly9rxYoVmjNnjvbu3Vvgci+99JLWrFljX65t27by9fXVrl27tH79et16661Fbrdhw4Y6evSoFixYoN///vdaunSplixZ4jDP6NGjNWTIELVt21YdOnTQhx9+qB9++EH16tUrcJ3+/v6aMGGCxo4dq9zcXHXs2FGpqalat26dAgICNHjw4GIdkzp16ujIkSPasWOHatasKX9/f3300UfKyclR+/bt5ePjow8++EDe3t6qXbt2sdYJVGQEGQAuU69ePW3btk0vvviixo8fr6SkJIWEhKhNmzYO/WquFhwcrE2bNmnatGmaPn26jhw5Ijc3NzVs2FB9+/bVmDFjitzu/fffr7Fjx2rUqFHKzMxUjx499Oyzz2ry5Mn2efr27avDhw/rySefVEZGhqKjo/X4449r2bJlha73hRdeUEhIiOLi4vTjjz8qKChIt912m/72t78V+5hER0dr8eLFuvvuu3Xu3DklJCQoKChIU6dO1bhx45STk6MWLVro888/50nogCSbKe0ed+VMWlqaAgMDlZqa6tCRDyVTUUYtWXWkQUZGho4cOaK6deuqcuXKri4H5Ryfl7Jn1X9LrKS4v7/pIwMAACyLIAMAACyLIAMAACyLIAMAACyLIANYSAXvm49SwucENxOCDGABeXdvvXjxoosrgRXkfU646y9uBtxHBrhBrmcIe6VKlRQUFKRTp05Jknx8fPI94RkwxujixYs6deqUgoKCVKlSJVeXBJQ5ggxgEXlPQ84LM0BhgoKCHJ6eDVRkBBnAImw2m8LDw1WtWjVlZ2e7uhyUUx4eHlyJwU2FIANYTKVKlfhFBQD/H519AQCAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZbk0yEyePFk2m83h1aRJE/v0jIwMxcbGKjg4WH5+foqOjlZKSooLKwYAAOWJy6/ING/eXElJSfbXd999Z582duxYff7550pMTNTatWt18uRJ9e7d24XVAgCA8sTl95Fxd3cv8A6Uqampmjt3rubPn6/OnTtLkhISEtS0aVNt2LBBt99++40uFQAAlDMuvyJz8OBBVa9eXfXq1dOAAQN09OhRSdLWrVuVnZ2tqKgo+7xNmjRRrVq1tH79+kLXl5mZqbS0NIcXAAComFx6RaZ9+/aaN2+eGjdurKSkJE2ZMkWdOnXS999/r+TkZHl6eiooKMhhmdDQUCUnJxe6zri4OE2ZMqWMKy+/rn4wYXEfSgjc7Irzs1NRf76u54Gm5VlF3S84cmmQ6d69u/3/W7Zsqfbt26t27dpatGiRvL29nVrnxIkTNW7cOPv7tLQ0RUREXHetAACg/HH5V0tXCgoKUqNGjXTo0CGFhYUpKytL586dc5gnJSWlyKe6enl5KSAgwOEFAAAqpnIVZNLT03X48GGFh4erTZs28vDw0KpVq+zT9+/fr6NHjyoyMtKFVQIAgPLCpV8tTZgwQT179lTt2rV18uRJTZo0SZUqVVL//v0VGBiomJgYjRs3TlWqVFFAQICeeOIJRUZGMmIJAABIcnGQOX78uPr3768zZ84oJCREHTt21IYNGxQSEiJJmjlzptzc3BQdHa3MzEx17dpVs2fPdmXJAACgHHFpkFmwYEGR0ytXrqz4+HjFx8ffoIoAACjfGI3lqFz1kQEAACgJggwAALAsggwAALAsggwAALAsggwAALAsggwAALAslw6/BqSK+yA+wMpc+XPJ8GKUBFdkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZRFkAACAZfHQSMAJPOiy/OBcOOJ44GbDFRkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZjFpCqbl6tITEiInS4MwoFM4FgJsFV2QAAIBlEWQAAIBlEWQAAIBlEWQAAIBlEWQAAIBlEWQAAIBlMfy6nGL4LICKoKB/y67Gv224HlyRAQAAlkWQAQAAlkWQAQAAlkWQAQAAlkWQAQAAlsWoJQClzpkHXZbVtm/09lFyxRnZVBHw2SwbXJEBAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWxfBrC7tZhiwWlyuH/FpBWR2f0vocVtShqRV1v4DygisyAADAsggyAADAsggyAADAsggyAADAsggyAADAshi1BEZVVACMYCsZjlfZ4LjCFbgiAwAALIsgAwAALKvcBJmpU6fKZrNpzJgx9raMjAzFxsYqODhYfn5+io6OVkpKiuuKBAAA5Uq5CDKbN2/W22+/rZYtWzq0jx07Vp9//rkSExO1du1anTx5Ur1793ZRlQAAoLxxeZBJT0/XgAED9I9//EO33HKLvT01NVVz587Vq6++qs6dO6tNmzZKSEjQf//7X23YsMGFFQMAgPLC5UEmNjZWPXr0UFRUlEP71q1blZ2d7dDepEkT1apVS+vXry90fZmZmUpLS3N4AQCAismlw68XLFigbdu2afPmzfmmJScny9PTU0FBQQ7toaGhSk5OLnSdcXFxmjJlSmmXetNhGGXF5+ywe1d+Nvhcoih8PkqfFW7P4bIrMseOHdPo0aP14YcfqnLlyqW23okTJyo1NdX+OnbsWKmtGwAAlC8uCzJbt27VqVOndNttt8nd3V3u7u5au3atXn/9dbm7uys0NFRZWVk6d+6cw3IpKSkKCwsrdL1eXl4KCAhweAEAgIrJZV8tdenSRbt373ZoGzp0qJo0aaK//vWvioiIkIeHh1atWqXo6GhJ0v79+3X06FFFRka6omQAAFDOuCzI+Pv769Zbb3Vo8/X1VXBwsL09JiZG48aNU5UqVRQQEKAnnnhCkZGRuv32211RMgAAKGfK9bOWZs6cKTc3N0VHRyszM1Ndu3bV7NmzXV0WAAAoJ8pVkFmzZo3D+8qVKys+Pl7x8fGuKQgAAJRr5SrI3CyuHs5W3oayAaXNCkM4rcjVw41dvX1AKgc3xAMAAHAWQQYAAFgWQQYAAFgWQQYAAFgWQQYAAFgWo5ZQpm7mEVqM6Lg5cd5xPUrrYa7OPgDWiv9Gc0UGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFkEGAABYFsOv4TRXDjOtKENcb+bh6UBJlNbPihV/5irKv3dlhSsyAADAsggyAADAsggyAADAsggyAADAsggyAADAshi1hBuK3vfXj2MIAP/DFRkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZDL8uY8UZKlvc4bQMu634OMc3r+I8zJDPx41hxQdLFkdF/fxwRQYAAFgWQQYAAFgWQQYAAFgWQQYAAFgWQQYAAFgWo5ZQ7hTUs/5GjhqoqCMWiuNm3ndnWGEUiKt/nlA0K3yGyjuuyAAAAMsiyAAAAMsiyAAAAMtyKsj8+OOPpV0HAABAiTkVZBo0aKC7775bH3zwgTIyMkq7JgAAgGJxKshs27ZNLVu21Lhx4xQWFqbHHntMmzZtKu3aAAAAiuTU8OvWrVtr1qxZmjFjhv79739r3rx56tixoxo1aqRhw4bpkUceUUhISGnXCpQIwxqvX1kew5v5/JTmw2RvVhyfsmHF43pdnX3d3d3Vu3dvJSYmatq0aTp06JAmTJigiIgIDRo0SElJSaVVJwAAQD7XFWS2bNmikSNHKjw8XK+++qomTJigw4cPa8WKFTp58qR69epVWnUCAADk49RXS6+++qoSEhK0f/9+3XfffXr//fd13333yc3tt1xUt25dzZs3T3Xq1CnNWgEAABw4FWTmzJmjYcOGaciQIQoPDy9wnmrVqmnu3LnXVRwAAEBRnAoyBw8evOY8np6eGjx4sDOrBwAAKBangkxCQoL8/Pz00EMPObQnJibq4sWLN3WAsWKPbwA3F/6d+h9GkFmfU5194+LiVLVq1Xzt1apV00svvXTdRQEAABSHU0Hm6NGjqlu3br722rVr6+jRo9ddFAAAQHE4FWSqVaumXbt25WvfuXOngoODr7soAACA4nAqyPTv319//vOftXr1auXk5CgnJ0dff/21Ro8erX79+pV2jQAAAAVyqrPvCy+8oJ9++kldunSRu/tvq8jNzdWgQYPoIwMAAG4Yp4KMp6enFi5cqBdeeEE7d+6Ut7e3WrRoodq1a5d2fQAAAIVyKsjkadSokRo1alRatQA3HEMvURjOO0qivH1eyls9ZcmpIJOTk6N58+Zp1apVOnXqlHJzcx2mf/3116VSHAAAQFGc6uw7evRojR49Wjk5Obr11lvVqlUrh1dxzZkzRy1btlRAQIACAgIUGRmpL7/80j49IyNDsbGxCg4Olp+fn6Kjo5WSkuJMyQAAoAJy6orMggULtGjRIt13333XtfGaNWtq6tSpatiwoYwxeu+999SrVy9t375dzZs319ixY7V06VIlJiYqMDBQo0aNUu/evbVu3brr2i4AAKgYnO7s26BBg+veeM+ejt/hvfjii5ozZ442bNigmjVrau7cuZo/f746d+4s6bdHIzRt2lQbNmzQ7bffft3bBwAA1ubUV0vjx4/XrFmzZIwptUJycnK0YMECXbhwQZGRkdq6dauys7MVFRVln6dJkyaqVauW1q9fX+h6MjMzlZaW5vACAAAVk1NXZL777jutXr1aX375pZo3by4PDw+H6YsXLy72unbv3q3IyEhlZGTIz89PS5YsUbNmzbRjxw55enoqKCjIYf7Q0FAlJycXur64uDhNmTKlRPtTkRXUc71Fi89dUMn1uZl64ON/OO8ArsWpIBMUFKQHH3ywVApo3LixduzYodTUVH388ccaPHiw1q5d6/T6Jk6cqHHjxtnfp6WlKSIiojRKBQAA5YxTQSYhIaHUCriyv02bNm20efNmzZo1S3379lVWVpbOnTvncFUmJSVFYWFhha7Py8tLXl5epVYfAAAov5zqIyNJly9f1sqVK/X222/r/PnzkqSTJ08qPT39ugrKzc1VZmam2rRpIw8PD61atco+bf/+/Tp69KgiIyOvaxsAAKBicOqKzM8//6xu3brp6NGjyszM1D333CN/f39NmzZNmZmZeuutt4q1nokTJ6p79+6qVauWzp8/r/nz52vNmjVatmyZAgMDFRMTo3HjxqlKlSoKCAjQE088ocjISEYsAQAASU4GmdGjR6tt27bauXOngoOD7e0PPvighg8fXuz1nDp1SoMGDVJSUpICAwPVsmVLLVu2TPfcc48kaebMmXJzc1N0dLQyMzPVtWtXzZ4925mSAQBABeRUkPn222/13//+V56eng7tderU0YkTJ4q9nrlz5xY5vXLlyoqPj1d8fLwzZQIAgArOqSCTm5urnJycfO3Hjx+Xv7//dReFssWQVgAlxb8bKK+c6ux777336rXXXrO/t9lsSk9P16RJk677sQUAAADF5dQVmRkzZqhr165q1qyZMjIy9PDDD+vgwYOqWrWqPvroo9KuEQAAoEBOBZmaNWtq586dWrBggXbt2qX09HTFxMRowIAB8vb2Lu0aAQAACuRUkJEkd3d3DRw4sDRrAQAAKBGngsz7779f5PRBgwY5VQwAAEBJOH0fmStlZ2fr4sWL8vT0lI+PD0EGAADcEE6NWvr1118dXunp6dq/f786duxIZ18AAHDDOP2spas1bNhQU6dOzXe1BgAAoKyUWpCRfusAfPLkydJcJQAAQKGc6iPz73//2+G9MUZJSUl688031aFDh1IpDAAA4FqcCjIPPPCAw3ubzaaQkBB17txZM2bMKI26AAAArsnpZy0BAAC4mtM3xAMAADefqx8g2qLF5y6q5DdOBZlx48YVe95XX33VmU0AAABck1NBZvv27dq+fbuys7PVuHFjSdKBAwdUqVIl3Xbbbfb5bDZb6VQJAABQAKeCTM+ePeXv76/33ntPt9xyi6TfbpI3dOhQderUSePHjy/VIgEAAAri1H1kZsyYobi4OHuIkaRbbrlFf//73xm1BAAAbhingkxaWppOnz6dr/306dM6f/78dRcFAABQHE59tfTggw9q6NChmjFjhtq1aydJ2rhxo/7yl7+od+/epVogYAVX9+JH+cL5ASoup4LMW2+9pQkTJujhhx9Wdnb2bytyd1dMTIymT59eqgUCAAAUxqkg4+Pjo9mzZ2v69Ok6fPiwJKl+/fry9fUt1eIAAACKcl0PjUxKSlJSUpIaNmwoX19fGWNKqy4AAIBrcirInDlzRl26dFGjRo103333KSkpSZIUExPD0GsAAHDDOBVkxo4dKw8PDx09elQ+Pj729r59++qrr74qteIAAACK4lQfmeXLl2vZsmWqWbOmQ3vDhg31888/l0phAAAA1+LUFZkLFy44XInJc/bsWXl5eV13UQAAAMXhVJDp1KmT3n//fft7m82m3Nxcvfzyy7r77rtLrTgAAICiOPXV0ssvv6wuXbpoy5YtysrK0pNPPqkffvhBZ8+e1bp160q7RgAAgAI5dUXm1ltv1YEDB9SxY0f16tVLFy5cUO/evbV9+3bVr1+/tGsEAAAoUImvyGRnZ6tbt25666239PTTT5dFTQAAAMVS4isyHh4e2rVrV1nUAgAAUCJOfbU0cOBAzZ07t7RrAQAAKBGnOvtevnxZ7777rlauXKk2bdrke8bSq6++WirFAQAAFKVEQebHH39UnTp19P333+u2226TJB04cMBhHpvNVnrVAQAAFKFEQaZhw4ZKSkrS6tWrJf32SILXX39doaGhZVIcAABAUUrUR+bqp1t/+eWXunDhQqkWBAAAUFxOdfbNc3WwAQAAuJFKFGRsNlu+PjD0iQEAAK5Soj4yxhgNGTLE/mDIjIwMjRgxIt+opcWLF5dehQAAAIUoUZAZPHiww/uBAweWajEAAAAlUaIgk5CQUFZ1AAAAlNh1dfYFAABwJYIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLIIMAACwLJcGmbi4OP3+97+Xv7+/qlWrpgceeED79+93mCcjI0OxsbEKDg6Wn5+foqOjlZKS4qKKAQBAeeLSILN27VrFxsZqw4YNWrFihbKzs3XvvffqwoUL9nnGjh2rzz//XImJiVq7dq1Onjyp3r17u7BqAABQXtiMMcbVReQ5ffq0qlWrprVr1+oPf/iDUlNTFRISovnz5+tPf/qTJGnfvn1q2rSp1q9fr9tvv/2a60xLS1NgYKBSU1MVEBBQ1rug3bt7lvk2AAAoL1q0+LxM1lvc39/lqo9MamqqJKlKlSqSpK1btyo7O1tRUVH2eZo0aaJatWpp/fr1Ba4jMzNTaWlpDi8AAFAxlZsgk5ubqzFjxqhDhw669dZbJUnJycny9PRUUFCQw7yhoaFKTk4ucD1xcXEKDAy0vyIiIsq6dAAA4CLlJsjExsbq+++/14IFC65rPRMnTlRqaqr9dezYsVKqEAAAlDfuri5AkkaNGqX//Oc/+uabb1SzZk17e1hYmLKysnTu3DmHqzIpKSkKCwsrcF1eXl7y8vIq65IBAEA54NIrMsYYjRo1SkuWLNHXX3+tunXrOkxv06aNPDw8tGrVKnvb/v37dfToUUVGRt7ocgEAQDnj0isysbGxmj9/vj777DP5+/vb+70EBgbK29tbgYGBiomJ0bhx41SlShUFBAToiSeeUGRkZLFGLAEAgIrNpUFmzpw5kqS77rrLoT0hIUFDhgyRJM2cOVNubm6Kjo5WZmamunbtqtmzZ9/gSgEAQHnk0iBTnFvYVK5cWfHx8YqPj78BFQEAACspN6OWAAAASoogAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALIsgAwAALMulT7+2ut27e7q6BAAAbmpckQEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJZFkAEAAJbl0iDzzTffqGfPnqpevbpsNps+/fRTh+nGGD333HMKDw+Xt7e3oqKidPDgQdcUCwAAyh2XBpkLFy6oVatWio+PL3D6yy+/rNdff11vvfWWNm7cKF9fX3Xt2lUZGRk3uFIAAFAeubty4927d1f37t0LnGaM0WuvvaZnnnlGvXr1kiS9//77Cg0N1aeffqp+/frdyFIBAEA5VG77yBw5ckTJycmKioqytwUGBqp9+/Zav369CysDAADlhUuvyBQlOTlZkhQaGurQHhoaap9WkMzMTGVmZtrfp6WllU2BAADA5crtFRlnxcXFKTAw0P6KiIhwdUkAAKCMlNsgExYWJklKSUlxaE9JSbFPK8jEiROVmppqfx07dqxM6wQAAK5TboNM3bp1FRYWplWrVtnb0tLStHHjRkVGRha6nJeXlwICAhxeAACgYnJpH5n09HQdOnTI/v7IkSPasWOHqlSpolq1amnMmDH6+9//roYNG6pu3bp69tlnVb16dT3wwAOuKxoAAJQbLg0yW7Zs0d13321/P27cOEnS4MGDNW/ePD355JO6cOGCHn30UZ07d04dO3bUV199pcqVK7uqZAAAUI7YjDHG1UWUpbS0NAUGBio1NbXUv2bavbtnqa4PAACradHi8zJZb3F/f5fbPjIAAADXQpABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWRZABAACWZYkgEx8frzp16qhy5cpq3769Nm3a5OqSAABAOVDug8zChQs1btw4TZo0Sdu2bVOrVq3UtWtXnTp1ytWlAQAAFyv3QebVV1/V8OHDNXToUDVr1kxvvfWWfHx89O6777q6NAAA4GLlOshkZWVp69atioqKsre5ubkpKipK69evd2FlAACgPHB3dQFF+eWXX5STk6PQ0FCH9tDQUO3bt6/AZTIzM5WZmWl/n5qaKklKS0sr9frS07NLfZ0AAFhJWfx+vXK9xpgi5yvXQcYZcXFxmjJlSr72iIgIF1QDAEBFF1imaz9//rwCAwvfRrkOMlWrVlWlSpWUkpLi0J6SkqKwsLACl5k4caLGjRtnf5+bm6uzZ88qODhYNputTOstSlpamiIiInTs2DEFBAS4rA5cG+fKOjhX1sG5so7ycq6MMTp//ryqV69e5HzlOsh4enqqTZs2WrVqlR544AFJvwWTVatWadSoUQUu4+XlJS8vL4e2oKCgMq60+AICAvghtgjOlXVwrqyDc2Ud5eFcFXUlJk+5DjKSNG7cOA0ePFht27ZVu3bt9Nprr+nChQsaOnSoq0sDAAAuVu6DTN++fXX69Gk999xzSk5OVuvWrfXVV1/l6wAMAABuPuU+yEjSqFGjCv0qySq8vLw0adKkfF97ofzhXFkH58o6OFfWYbVzZTPXGtcEAABQTpXrG+IBAAAUhSADAAAsiyADAAAsiyADAAAsiyDjhLNnz2rAgAEKCAhQUFCQYmJilJ6eXuQyGRkZio2NVXBwsPz8/BQdHZ3vjsVHjx5Vjx495OPjo2rVqukvf/mLLl++XOD61q1bJ3d3d7Vu3bq0dqvCctX5Wrx4se655x6FhIQoICBAkZGRWrZsWZnso1XFx8erTp06qly5stq3b69NmzYVOX9iYqKaNGmiypUrq0WLFvriiy8cphtj9Nxzzyk8PFze3t6KiorSwYMHHeZx5vOAG3+ufvrpJ8XExKhu3bry9vZW/fr1NWnSJGVlZZXJ/lUkrvi5ypOZmanWrVvLZrNpx44dpbVLRTMosW7duplWrVqZDRs2mG+//dY0aNDA9O/fv8hlRowYYSIiIsyqVavMli1bzO23327uuOMO+/TLly+bW2+91URFRZnt27ebL774wlStWtVMnDgx37p+/fVXU69ePXPvvfeaVq1alfbuVTiuOl+jR48206ZNM5s2bTIHDhwwEydONB4eHmbbtm1ltq9WsmDBAuPp6Wneffdd88MPP5jhw4eboKAgk5KSUuD869atM5UqVTIvv/yy2bNnj3nmmWeMh4eH2b17t32eqVOnmsDAQPPpp5+anTt3mvvvv9/UrVvXXLp0yT6PM5+Hm50rztWXX35phgwZYpYtW2YOHz5sPvvsM1OtWjUzfvz4G7LPVuWqn6s8f/7zn0337t2NJLN9+/ay2k0HBJkS2rNnj5FkNm/ebG/78ssvjc1mMydOnChwmXPnzhkPDw+TmJhob9u7d6+RZNavX2+MMeaLL74wbm5uJjk52T7PnDlzTEBAgMnMzHRYX9++fc0zzzxjJk2aRJC5hvJwvq7UrFkzM2XKlOvdrQqhXbt2JjY21v4+JyfHVK9e3cTFxRU4f58+fUyPHj0c2tq3b28ee+wxY4wxubm5JiwszEyfPt0+/dy5c8bLy8t89NFHxhjnPg9wzbkqyMsvv2zq1q17PbtS4bnyXH3xxRemSZMm5ocffrihQYavlkpo/fr1CgoKUtu2be1tUVFRcnNz08aNGwtcZuvWrcrOzlZUVJS9rUmTJqpVq5bWr19vX2+LFi0c7ljctWtXpaWl6YcffrC3JSQk6Mcff9SkSZNKe9cqJFefryvl5ubq/PnzqlKlSmnsmqVlZWVp69atDsfYzc1NUVFR9mN8tfXr1zvML/12zPPmP3LkiJKTkx3mCQwMVPv27R3OW0k/Dzc7V52rgqSmpvLzUwRXnquUlBQNHz5c//rXv+Tj41Oau3VNBJkSSk5OVrVq1Rza3N3dVaVKFSUnJxe6jKenZ76HV4aGhtqXSU5OzvfYhbz3efMcPHhQTz31lD744AO5u1vipswu58rzdbVXXnlF6enp6tOnjzO7UqH88ssvysnJKfAYFnVeipo/77/Xmqekn4ebnavO1dUOHTqkN954Q4899phT+3EzcNW5MsZoyJAhGjFihMMfCTcKQeb/e+qpp2Sz2Yp87du3z2X15eTk6OGHH9aUKVPUqFEjl9VRXpT383W1+fPna8qUKVq0aFG+X6QAinbixAl169ZNDz30kIYPH+7qcnCVN954Q+fPn9fEiRNdsn3+rP//xo8fryFDhhQ5T7169RQWFqZTp045tF++fFlnz55VWFhYgcuFhYUpKytL586dc/grPyUlxb5MWFhYvp7leaNkwsLCdP78eW3ZskXbt2+3P3cqNzdXxhi5u7tr+fLl6ty5c0l22dLK+/m60oIFC/R///d/SkxMzHcJ92ZVtWpVVapUKd9IsCuP8dXCwsKKnD/vvykpKQoPD3eYJ290nzOfh5udq85VnpMnT+ruu+/WHXfcoXfeeed6d6dCc9W5+vrrr7V+/fp8z2Zq27atBgwYoPfee++69uuabkhPnAokr7Pgli1b7G3Lli0rVufRjz/+2N62b9++AjuPXtmz/O233zYBAQEmIyPD5OTkmN27dzu8Hn/8cdO4cWOze/duk56eXkZ7bG2uOl955s+fbypXrmw+/fTT0t41y2vXrp0ZNWqU/X1OTo6pUaNGkZ0S//jHPzq0RUZG5uuU+Morr9inp6amFtjZtySfB7jmXBljzPHjx03Dhg1Nv379zOXLl0tzlyosV5yrn3/+2eF307Jly4wk8/HHH5tjx46V9i7mQ5BxQrdu3czvfvc7s3HjRvPdd9+Zhg0bOgzfPH78uGncuLHZuHGjvW3EiBGmVq1a5uuvvzZbtmwxkZGRJjIy0j49bzjvvffea3bs2GG++uorExISUuDw6zyMWioeV52vDz/80Li7u5v4+HiTlJRkf507d+7G7Hg5t2DBAuPl5WXmzZtn9uzZYx599FETFBRkHwn2yCOPmKeeeso+/7p164y7u7t55ZVXzN69e82kSZMKHCYaFBRkPvvsM7Nr1y7Tq1evAodfF/V5QH6uOFfHjx83DRo0MF26dDHHjx93+BlC4Vz1c3WlI0eOMPy6vDtz5ozp37+/8fPzMwEBAWbo0KHm/Pnz9ul5J3H16tX2tkuXLpmRI0eaW265xfj4+JgHH3ww3w/kTz/9ZLp37268vb1N1apVzfjx4012dnahdRBkisdV5+vOO+80kvK9Bg8eXNa7bBlvvPGGqVWrlvH09DTt2rUzGzZssE+788478x2rRYsWmUaNGhlPT0/TvHlzs3TpUofpubm55tlnnzWhoaHGy8vLdOnSxezfv99hnmt9HlCwG32uEhISCvz54YuEa3PFz9WVbnSQsRljTNl+eQUAAFA2GLUEAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyADAAAsiyAD4IYZMmSIHnjgAfv7u+66S2PGjHFZPdfL6vUDFQFBBrhJJScna/To0WrQoIEqV66s0NBQdejQQXPmzNHFixdvSA2LFy/WCy+8UKrrvDosFaRnz57q1q1bgdO+/fZb2Ww27dq1q1TrAlA2ePo1cBP68ccf1aFDBwUFBemll15SixYt5OXlpd27d+udd95RjRo1dP/99xe4bHZ2tjw8PEqljipVqpTKekoqJiZG0dHROn78uGrWrOkwLSEhQW3btlXLli1dUhuAkuGKDHATGjlypNzd3bVlyxb16dNHTZs2Vb169dSrVy8tXbpUPXv2tM9rs9k0Z84c3X///fL19dWLL76onJwcxcTEqG7duvL29lbjxo01a9Ysh23k5ORo3LhxCgoKUnBwsJ588kld/USUq7+ayczM1IQJE1SjRg35+vqqffv2WrNmjX36vHnzFBQUpGXLlqlp06by8/NTt27dlJSUJEmaPHmy3nvvPX322Wey2Wyy2WwOy+f54x//qJCQEM2bN8+hPT09XYmJiYqJidGZM2fUv39/1ahRQz4+PmrRooU++uijIo+rzWbTp59+6tAWFBTksJ1jx46pT58+CgoKUpUqVdSrVy/99NNPRa4XQOEIMsBN5syZM1q+fLliY2Pl6+tb4Dw2m83h/eTJk/Xggw9q9+7dGjZsmHJzc1WzZk0lJiZqz549eu655/S3v/1NixYtsi8zY8YMzZs3T++++66+++47nT17VkuWLCmytlGjRmn9+vVasGCBdu3apYceekjdunXTwYMH7fNcvHhRr7zyiv71r3/pm2++0dGjRzVhwgRJ0oQJE9SnTx97uElKStIdd9yRbzvu7u4aNGiQ5s2b5xCuEhMTlZOTo/79+ysjI0Nt2rTR0qVL9f333+vRRx/VI488ok2bNl37IBciOztbXbt2lb+/v7799lutW7fOHsaysrKcXi9wU7shj6YEUG5s2LDBSDKLFy92aA8ODja+vr7G19fXPPnkk/Z2SWbMmDHXXG9sbKyJjo62vw8PDzcvv/yy/X12drapWbOm6dWrl73tzjvvNKNHjzbGGPPzzz+bSpUqmRMnTjist0uXLmbixInGmP89EfnQoUP26fHx8SY0NNT+fvDgwQ7bKMzevXvzPfW8U6dOZuDAgYUu06NHDzN+/PgC6zfmt2O1ZMkSh2UCAwNNQkKCMcaYf/3rX6Zx48YmNzfXPj0zM9N4e3ubZcuWXbNmAPnRRwaAJGnTpk3Kzc3VgAEDlJmZ6TCtbdu2+eaPj4/Xu+++q6NHj+rSpUvKyspS69atJUmpqalKSkpS+/bt7fO7u7urbdu2+b5eyrN7927l5OSoUaNGDu2ZmZkKDg62v/fx8VH9+vXt78PDw3Xq1KkS72+TJk10xx136N1339Vdd92lQ4cO6dtvv9Xzzz8v6bevxl566SUtWrRIJ06cUFZWljIzM+Xj41PibeXZuXOnDh06JH9/f4f2jIwMHT582On1Ajczggxwk2nQoIFsNpv279/v0F6vXj1Jkre3d75lrv4KasGCBZowYYJmzJihyMhI+fv7a/r06dq4caPTdaWnp6tSpUraunWrKlWq5DDNz8/P/v9XdzS22WyFhqNriYmJ0RNPPKH4+HglJCSofv36uvPOOyVJ06dP16xZs/Taa6+pRYsW8vX11ZgxY4r8CqigWrKzsx32sU2bNvrwww/zLRsSEuLUPgA3O/rIADeZ4OBg3XPPPXrzzTd14cIFp9axbt063XHHHRo5cqR+97vfqUGDBg5XFAIDAxUeHu4QbC5fvqytW7cWus7f/e53ysnJ0alTp9SgQQOHV1hYWLFr8/T0VE5OTrHm7dOnj9zc3DR//ny9//77GjZsmL1/0Lp169SrVy8NHDhQrVq1Ur169XTgwIEi1xcSEmLveCxJBw8edBjKftttt+ngwYOqVq1avn0MDAws9j4C+B+CDHATmj17ti5fvqy2bdtq4cKF2rt3r/bv368PPvhA+/bty3dF5GoNGzbUli1btGzZMh04cEDPPvusNm/e7DDP6NGjNXXqVH366afat2+fRo4cqXPnzhW6zkaNGmnAgAEaNGiQFi9erCNHjmjTpk2Ki4vT0qVLi71vderU0a5du7R//3798ssvDldErubn56e+fftq4sSJSkpK0pAhQxz2ccWKFfrvf/+rvXv36rHHHlNKSkqR2+7cubPefPNNbd++XVu2bNGIESMcriANGDBAVatWVa9evfTtt9/qyJEjWrNmjf785z/r+PHjxd5HAP9DkAFuQvXr19f27dsVFRWliRMnqlWrVmrbtq3eeOMNTZgw4Zo3qXvsscfUu3dv9e3bV+3bt9eZM2c0cuRIh3nGjx+vRx55RIMHD7Z//fTggw8Wud6EhAQNGjRI48ePV+PGjfXAAw9o8+bNqlWrVrH3bfjw4WrcuLHatm2rkJAQrVu3rsj5Y2Ji9Ouvv6pr166qXr26vf2ZZ57Rbbfdpq5du+quu+5SWFjYNW+0N2PGDEVERKhTp056+OGHNWHCBIc+NT4+Pvrmm29Uq1Yt9e7dW02bNlVMTIwyMjIUEBBQ7H0E8D824+yXywAAAC7GFRkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZBBkAAGBZ/w+c2S35jJPqbQAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "\n", - "# Example: Plot the activations of the final Linear layer\n", - "layer_name, activation = activations[1] # Assuming fc activation\n", - "print(f\"Activation for {layer_name} layer: \", activation.shape)\n", - "\n", - "# Plotting histogram of activations of the final Linear layer\n", - "plt.hist(activation.detach().numpy().flatten(), bins=100, alpha=0.7, color='g', label='FC Activations')\n", - "plt.title(f\"Activation Distribution for {layer_name}\")\n", - "plt.xlabel(\"Activation Value\")\n", - "plt.ylabel(\"Frequency\")\n", - "plt.legend()\n", - "plt.show()\n", - "\n", - "# Example: Plot the gradients of the final Linear layer\n", - "layer_name, grad = gradients[1] # Assuming fc gradient\n", - "print(f\"Gradient for {layer_name} layer: \", grad[0].shape)\n", - "\n", - "# Plotting histogram of gradients of the final Linear layer\n", - "plt.hist(grad[0].numpy().flatten(), bins=100, alpha=0.7, color='y', label='FC Gradients')\n", - "plt.title(f\"Gradient Distribution for {layer_name}\")\n", - "plt.xlabel(\"Gradient Value\")\n", - "plt.ylabel(\"Frequency\")\n", - "plt.legend()\n", - "plt.show()\n" - ] } ], "metadata": { From f61f734c8efbeb6b767ce6df858687fcc8acca55 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 12 Mar 2025 14:44:07 +0100 Subject: [PATCH 050/125] chore: cleanup model outputs --- .../pytorch_text_model_debugging.ipynb | 64 +------------------ 1 file changed, 3 insertions(+), 61 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 97c8c9c..84435cd 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -363,57 +363,9 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Step 1 / 10241, Loss: 11.760001182556152\n", - "Step 2 / 10241, Loss: 11.625699996948242\n", - "Step 3 / 10241, Loss: 9.880274772644043\n", - "Step 4 / 10241, Loss: 9.80041790008545\n", - "Step 5 / 10241, Loss: 10.201050758361816\n", - "Step 5, Val_loss: 9.920373557979225\n", - "Gradients for fc1: -3.875334975882508e-15\n", - "Gradients for lstm: -2.1588371055258904e-06\n", - "Step 6 / 10241, Loss: 9.584494590759277\n", - "Step 7 / 10241, Loss: 9.479751586914062\n", - "Step 8 / 10241, Loss: 10.132659912109375\n", - "Step 9 / 10241, Loss: 10.202069282531738\n", - "Step 10 / 10241, Loss: 11.208635330200195\n", - "Step 10, Val_loss: 10.601182611579569\n", - "Gradients for fc1: 4.1997570650103774e-15\n", - "Gradients for lstm: -2.5467079467489384e-06\n", - "Step 11 / 10241, Loss: 10.122359275817871\n", - "Step 12 / 10241, Loss: 10.62948226928711\n", - "Step 13 / 10241, Loss: 10.538959503173828\n", - "Step 14 / 10241, Loss: 11.84500503540039\n", - "Step 15 / 10241, Loss: 10.522750854492188\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[26], line 43\u001b[0m\n\u001b[0;32m 40\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m / \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(train_dataloader)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mloss\u001b[38;5;241m.\u001b[39mitem()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 42\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m step_counter \u001b[38;5;241m%\u001b[39m \u001b[38;5;241m5\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m: \u001b[38;5;66;03m# Do not need to log validation at every step, although we can\u001b[39;00m\n\u001b[1;32m---> 43\u001b[0m val_loss \u001b[38;5;241m=\u001b[39m \u001b[43mevaluate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_dataloader\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcriterion\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvocab_size\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 44\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstep_counter\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Val_loss: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mval_loss\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 46\u001b[0m \u001b[38;5;66;03m# Track activations\u001b[39;00m\n", - "Cell \u001b[1;32mIn[24], line 69\u001b[0m, in \u001b[0;36mevaluate\u001b[1;34m(model, val_dataloader, criterion, device, vocab_size)\u001b[0m\n\u001b[0;32m 66\u001b[0m labels \u001b[38;5;241m=\u001b[39m batch[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mlabels\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mto(device)\n\u001b[0;32m 68\u001b[0m \u001b[38;5;66;03m# Forward pass for validation\u001b[39;00m\n\u001b[1;32m---> 69\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Shape: (batch_size, seq_len, vocab_size)\u001b[39;00m\n\u001b[0;32m 71\u001b[0m \u001b[38;5;66;03m# Calculate the loss\u001b[39;00m\n\u001b[0;32m 72\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", - "Cell \u001b[1;32mIn[24], line 12\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 10\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membedding(x)\n\u001b[0;32m 11\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlstm(x) \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[1;32m---> 12\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfc1\u001b[49m\u001b[43m(\u001b[49m\u001b[43mlstm_out\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Use the last output from the LSTM\u001b[39;00m\n\u001b[0;32m 13\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\linear.py:125\u001b[0m, in \u001b[0;36mLinear.forward\u001b[1;34m(self, input)\u001b[0m\n\u001b[0;32m 124\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[1;32m--> 125\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ "debug_metrics = {}\n", "\n", @@ -502,17 +454,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "neptune:INFO: Waiting for all operations to be processed\n", - "neptune:WARNING: No timeout specified. Waiting indefinitely\n", - "neptune:INFO: All operations were processed\n" - ] - } - ], + "outputs": [], "source": [ "# Close run to ensure all operations are processed\n", "run.close()" From bcc344e9360667167f38214658da0a1b4da5bee5 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 12 Mar 2025 14:53:47 +0100 Subject: [PATCH 051/125] refactor: update quotes for dictionary keys since Colab returns an error --- .../pytorch/pytorch_text_model_debugging.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 84435cd..216b0f2 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -51,7 +51,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -117,7 +117,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -138,7 +138,7 @@ "data_subset = load_dataset(\"parquet\", data_files = data_files, num_proc=4)\n", "# validation_subset = load_dataset(\"parquet\", data_files = {\"validation\": base_url + \"validation-00000-of-00001.parquet\"}, num_proc=4, split=[\"validation[:5%]\"])\n", "validation_subset = data_subset.get(\"validation\").train_test_split(test_size=0.1)\n", - "print(f\"Training samples: {data_subset[\"train\"].num_rows} \\nValidation samples: {validation_subset[\"test\"].num_rows}\")" + "print(f\"Training samples: {data_subset['train'].num_rows} \\nValidation samples: {validation_subset['test'].num_rows}\")" ] }, { From df89878baf6782ea2edaad3c607e7c8c07153bf4 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Fri, 14 Mar 2025 17:15:04 +0100 Subject: [PATCH 052/125] style: update intro and information about debugging metrics --- .../pytorch_text_model_debugging.ipynb | 43 ++++++++++--------- 1 file changed, 23 insertions(+), 20 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 216b0f2..01f8a47 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -7,15 +7,18 @@ "# Neptune + PyTorch\n", "\n", "## Logging and Visualizing debugging metrics with Neptune\n", + "**Global aggregated metrics**, such as overall loss and accuracy, provide a high-level view of a model's performance and help track progress toward the target task. These metrics are essential for assessing the model’s overall success and ensuring training is on the right path. However, **layer-wise metrics** offer deeper insights into how individual layers contribute to learning. By monitoring metrics like gradients and activations at each layer, we can identify specific issues (e.g., vanishing/exploding gradients) and optimize training for individual layers. This is crucial for deep networks, where different layers learn distinct types of features.\n", "\n", - "### Introduction\n", + "The main drawback of tracking layer-wise metrics is the **data overload** they can generate, especially with large/foundation models. Logging metrics for every layer can create significant data volume which researchers needs to log and monitor. However, with efficient tracking strategies and tools like Neptune, this challenge can be managed, allowing for detailed insights without overwhelming the training process.\n", "\n", + "### Introduction\n", "See how Neptune Scale can be used for pre-training models like foundation models by tracking hundreds of metrics. This example is designed to be used as a code recipe for you to re-use sections with your own code to edit or adapt to your own model training needs. \n", "\n", "This guide will show you how to:\n", - "- Initialize the Neptune Run object and log configuration parameters\n", - "- Log standard loss and accuracy metrics to Neptune\n", - "- Log _**debugging metrics**_ per layer during model training such as;\n", + "- Initialize the **Neptune Run** object and log configuration parameters\n", + "- Create a **class** to hook layer-wise metrics\n", + "- Log **aggregated metrics** such as loss and accuracy\n", + "- Log **debugging metrics** per layer during model training such as;\n", " * Activations\n", " * Gradients\n", " * Parameters (Weights and Biases)" @@ -51,7 +54,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -73,8 +76,6 @@ "from collections import Counter\n", "from datasets import load_dataset\n", "\n", - "import matplotlib.pyplot as plt\n", - "\n", "from neptune_scale import Run" ] }, @@ -183,7 +184,16 @@ "source": [ "## Importance of Logging Debugging Metrics\n", "\n", - "During model training, capturing valuable information from each layer can offer critical insights into the model's learning process, identify potential issues, and guide improvements. Monitoring certain metrics and activations helps diagnose common problems such as vanishing gradients, overfitting, or underfitting. Below are key metrics to track from each layer:\n", + "Tracking layer-wise metrics during training of foundation models is an important aspect of understanding and improving the performance of large-scale neural networks. Tracking layer-wise metrics is imporant to;\n", + "1) Understand the model behaviour at different depths\n", + "2) Diagnose training issues (vanishing/exploding gradients)\n", + "3) Model interpretability and debugging\n", + "4) Layer-specific regularization\n", + "5) Transfer learning insights\n", + "\n", + "However, one drawback is the overwhelming amount of data generated that needs to be logged, tracked and analyzed. These models can have hundreds to thousands of layers making the data difficult to interpret. However, with Neptune this is not a problem. \n", + "\n", + "In this example, we will show you that it is possible to log and visualize layer-wise metrics such as activations, gradients as well as global metrics.\n", "\n", "### Key metrics to capture from each layer:\n", "\n", @@ -192,9 +202,9 @@ "| **Activations** | Yes | Provides insight into how the model is processing data. Dead or exploding activations can indicate issues with training stability. | Use hooks to capture activations after each layer. |\n", "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | Use hooks to capture gradients during backpropagation. |\n", "| **Weights and Biases** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", - "| **Layer-wise Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", - "| **Learning Rate per Layer** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", - "| **Layer Output Norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |\n", + "| **Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", + "| **Learning Rate** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", + "| **Output Norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |\n", "| **Activation Distributions** | No | Helps diagnose saturation issues, especially with ReLU activations that may lead to dead neurons. | Visualize or compute statistical summaries using tools like matplotlib or seaborn. |\n", "| **Feature Maps (for Convolutional Layers)** | No | Offers insights into how convolutional layers detect specific patterns in the data. | Visualize feature maps after convolutional layers using libraries like matplotlib. |\n" ] @@ -447,15 +457,8 @@ " print(f\"Epoch {epoch + 1}, Loss: {total_loss / len(train_dataloader)}\")\n", "\n", "# test_loss = evaluate_model(model, test_input, test_target, params[\"vocab_size\"])\n", - "# print(f'Test Loss: {test_loss:.4f}')\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ + "# print(f'Test Loss: {test_loss:.4f}')\n", + "\n", "# Close run to ensure all operations are processed\n", "run.close()" ] From 23bd58a80d37a48316e003801cf2fae42f2602ff Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 17 Mar 2025 16:26:44 +0100 Subject: [PATCH 053/125] refactor: update to be able to run model on GPUs --- .../pytorch_image_classification.ipynb | 115 +++++++++++++++++- 1 file changed, 110 insertions(+), 5 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb index 0fd0e1c..01ca6d4 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb @@ -117,7 +117,105 @@ "cell_type": "code", "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz\n", + "Failed to download (trying next):\n", + "HTTP Error 404: Not Found\n", + "\n", + "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz\n", + "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz to ./data/MNIST/raw/train-images-idx3-ubyte.gz\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 9912422/9912422 [00:11<00:00, 883546.49it/s] \n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Extracting ./data/MNIST/raw/train-images-idx3-ubyte.gz to ./data/MNIST/raw\n", + "\n", + "Downloading http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz\n", + "Failed to download (trying next):\n", + "HTTP Error 404: Not Found\n", + "\n", + "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz\n", + "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz to ./data/MNIST/raw/train-labels-idx1-ubyte.gz\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 28881/28881 [00:00<00:00, 29159.66it/s]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Extracting ./data/MNIST/raw/train-labels-idx1-ubyte.gz to ./data/MNIST/raw\n", + "\n", + "Downloading http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz\n", + "Failed to download (trying next):\n", + "HTTP Error 404: Not Found\n", + "\n", + "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz\n", + "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz to ./data/MNIST/raw/t10k-images-idx3-ubyte.gz\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 1648877/1648877 [00:01<00:00, 1201389.96it/s]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Extracting ./data/MNIST/raw/t10k-images-idx3-ubyte.gz to ./data/MNIST/raw\n", + "\n", + "Downloading http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz\n", + "Failed to download (trying next):\n", + "HTTP Error 404: Not Found\n", + "\n", + "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz\n", + "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz to ./data/MNIST/raw/t10k-labels-idx1-ubyte.gz\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 4542/4542 [00:00<00:00, 2459084.65it/s]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Extracting ./data/MNIST/raw/t10k-labels-idx1-ubyte.gz to ./data/MNIST/raw\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], "source": [ "\n", "# Transform to normalize the data and convert it to tensor\n", @@ -139,7 +237,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -213,6 +311,8 @@ "# Instantiate the model, loss function, and optimizer\n", "#model = SimpleCNN()\n", "model = SimpleNN()\n", + "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", + "model.to(device)\n", "criterion = nn.CrossEntropyLoss() # Loss function\n", "\n", "# Select an optimizer\n", @@ -228,7 +328,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -240,6 +340,9 @@ " epoch_loss = 0\n", " with torch.no_grad(): # Disable gradient tracking during evaluation\n", " for data, target in data_loader:\n", + " \n", + " data, target = data.to(device), target.to(device)\n", + " \n", " # Forward pass (with gradient tracking if specified)\n", " output = model(data)\n", " loss = criterion(output, target) # Correct loss computation\n", @@ -267,7 +370,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -303,7 +406,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -348,6 +451,8 @@ " step_counter += 1\n", " optimizer.zero_grad()\n", " \n", + " data, target = data.to(device), target.to(device)\n", + "\n", " # Forward pass\n", " output = model(data)\n", " \n", From 0dbd34134cc5736bf0261a8eb2134fb64bf5ea1b Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 17 Mar 2025 17:51:41 +0100 Subject: [PATCH 054/125] style: update ending of notebook with follow along --- .../pytorch_text_model_debugging.ipynb | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 01f8a47..2d22ee5 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -462,6 +462,29 @@ "# Close run to ensure all operations are processed\n", "run.close()" ] + }, + { + "attachments": { + "image.png": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3cAAAGwCAYAAAAKQZwzAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAPO4SURBVHhe7N13fBz1nT/+18zO9qJd9S5LcqcZbEIHARdCCySE5C4F7sjluAvJ4XzhUi7J5XK59Fz4BS4hORKHhJoAobdACNiAC5aL3CWrWL1L29u03x+fmdHuaGVLRpYt6f18PPSw9zOzs7Mzq5156/P+vD9cOBxRQQghhBBCCCFkXuMygzuv14N4PI7+/n6Ew2EoipK9NiGEEEIIIYTMEZ7n4fP5UFZWBpfLhUgkal6FZMgK7iwWHs3NzVi5ciWcTmf2moQQQgghhBAyxxKJBA4dOoQVK1ZAlqnz6WiygruhoUGUl5dTYEcIIYQQQgg5ZSQSCfT19aG4uMS8iGTgMx+Ew2EK7AghhBBCCCGnFKfTiXA4bG4mJlnBHY2xI4QQQgghhJyKKFY5tqzgjhBCCCGEEELI/ETBHSGEEEIIIYQsABTcEUIIIYQQQsgCkFUts6WlGWvXrs1ew2RkdBzRaGzGOa88z8PjcaOwIGBeRAghhBBCCCFHtWPHDixfvsLcTDLMKLgbGR2HIFjgz/MBAFRVBcdx0/43GApDkmQK8AghhBBCyKLT1tGJ9iPdWW11S6pQX1uT1WZez263YfWKpSgsyM9ab7a9u2UbltbXoaS4yLzolEDB3bHNKC0zGAzBn+eDqhrx4Iz483wIBkPmZkIIIYQQQha88eDkUv5pUTI3TWpLpdIIhSNZbbPtjbc24UhnF154+VX09vWbF5N5YkbBnSTJUFQVqqoe17+KokCSZPNmCSGEEEIIWTTqllRh7ZozsHbNGahfUmVejPqM5XU5ls+2v761CQMDg/jwtVfj3HPOxnMvvoIjnV3m1cg8MKPgDpy54Tgc1zb68Min1mHdt941L5hl7+Kb69Zh3aceQZ950aw40dufnr6HPo11676JE300CSGEEEJIbvmBPOQH8mCz2cyLYLPZjOUn2sa338XuPftw1pmnw+fz4qwzT8cVDZfg2RdeRmtbu3l1coqbWXCnjbN7Pz+nkklBTncHOgCg5RD793060dsnhBBCCCHkeL2zeSuOdHXj03/3cbz2lzfRcrgNANDa1o6l9bVYWl9nfgo5xc04uJtTm76JdevWYd26G/CzFgAvr9cefxqPZI9FnR1Vn8GjjY1obPwuLjIvmw0ncPuTAslJtF7Ddetww33NAF7Feu3xpx86mf2IhBBCCCGLQ1VFGeqWVCHg95sXTSng98/4OdOxeet7aOs4guuv+RBKiotw/TUfwouv/BkvvvJnAMD113zI/BQyD8w4uDP3xM30Zyb6au9AY2Mj7r12om3Fnc+jsfEnuEJ7/O63WICi/5gDFRb0mJezNM/MIIe1Z6ZN6sFQZsCkpYdqbeZtT6SNTmf7uomga515Wfcj+PS6dVj3rUe012U/39xkPHn6umtxR2MjGu+5eqJt+ZfwfGMjfnI5Mvbtm/hmxmt9+qG+7PdpSo01H4OsfdP33/jJDj7Zufs0HnlID+LNx4YQQgghZOEoKS5EfW3NjNIt8wN5M37OsWze+h4Ot7Xj+ms+hIJ8VsV++bJ6LF9WD7yPwG7Pnj145JFH8Prrr0MURQBAT08PnnjiCTz77LMYGxsDAEQiEbz44ot47LHH0N5OqZ+zacbB3VwqryoH0IeOVgDLV2AFgObWDgDlKK9iPXvrX16BLz3TiMbGRjx/5wo033eDEWD0PfRp3HBfM66+x7y8HJ95TA8ar8a9jY149NZy06tfhDvuXAHgVbyiByzdf8UrLcCKO+/ARd2P4MsZ226852rg5fVaEDed7UMLqNbjVS3Iamx8Hl/Cz3CDOcB5+RXgR9ry5cCrdx2th24KVeUoB9B3pAPACqxYPpEeyo6zpqUDKzNeq/m+G/Bl/MQ4fnh5/VGP76t36QFeHx756s/QfO297Pg03our8SrWm98bmvGz1msmjmHLz/Bl6kkkhBBCCDkhcgV2AN53j52iKNi2bRvi8Tg6OjrQ0cEGIe3atQvBYBBDQ0PYu3cvAKC5uRl9fX2IRqPYtm2baUvk/ZhxcGfuiZvpz0y9+60b8LOWFfjSjx7Fo1kBFIBLv4vGxkfxGa2IUPmSWgBAxxHWO/fXV5sBXI1rLtWW3/ooGhsb8V3t8bGUX34NVgB49S8slOp78xU0YwWuubzcSLE0tlW7MiP4nKZNr+BVACuuvgIsvCrHFVevAFpewV8z006vvUN7j+WoXQoAHejoxsxTLTd9Ezfc14wVd/4Ejz42RbC1/BpckfVa2vudzvHNOl4swG38jp6AWouVGQHlhBX40j9p6xzPMSSEEEIIIdNyogI7AOB5Hm6323js19JIfT42PzYA5OWx3sdcbWR2zDi4m1t9qP2nRjz/zE9YcHPpd9GY1Qump0lqP3e9mvHcDhxqyXh4PKquwDXLWc/Zu3owYwQ/ppTQj/4MzebnHwPrRculGYemWpTlIny3caLXTO8lzD5GE/pq70DjM8/jJ7eWTzz3sc9ogeVM6cd3IqA0jkFrBwsYjTGT6ybGTRJCCCGEkDl3IgM73U033YSLL74YN998M4qLiwEAl1xyCa644gpce+21WLNmDQBg+fLluP7669HQ0ICrrrrKtBXyfsw4uDP3xM30Z2ZY+mVW2mAG1quH7NRIg9ZT9L6U4zP/cjXrKdukpWRqvWx9D30a61/WxwA2ovGZL2GF+enHoPeETbYCK6da9D6UV5Ub6Znvn358JwJK4+exz6C8+xF8+q5XjXF9eponIYQQQshitf/QYbz+5jvo6RswL5pST9/AjJ9jNheBHQA4HA6sXr0a+fn5RhvP81i6dCkqKyuz1i0vL8fy5cshCEJWO3l/ZhzcnXh6quF0K2JOBELZPWFaimPGmDm9+MeMCpJceg2uRjN+dtfPJlIyM9Qu0R53HJpxzx3bNtD86l+11MjJvYPvl1HwZNbnCJx8fNm5M523pbVaMDkLPamEEEIIIfNYIpEEAKRSKfOiKenrzuQ5mULhMHbsajrhgR05Ncw4uDP3xM30ZzZd9J17WeD1UZb692XcoRUB+TIe6WZj7PQiH/q4tBV3Pm+Mk7von76EFVnVLHO5CNfo1Tozgq7yW3+iFTfR0g7/cg0roJJRcOTY278I3228F1e3/Aw36KmL+BKeP45USTaecPanWDga8/Fdt249Xr32GtR29AFVn8FPtAIsbNkruOaeq1ka56wHmoQQQgghJJfR0XEU5AcosFskuHA4YkRcLS3NWLt2bfYaGQ61tKF2SRWgqgDHHde/HZ09WLmclVklhBBCCCFksWjctRfjwRAC/okiIgG/D/W1NVnrtR/pwth4yHg8HgyhbknVpPWm473GnRgPBnHZJRehu6cXzS2HgXka2O3YsQPLl890INTiMqOeO57noCoqFK0Xbub/sm0QQgghhBCy2DidDkAL1vSfZCptXg2JZCprHWjj2Y7H6OgYotEYfvfw49i5qwlej2deBnZkembUc9fTO4hkKoGK8jKtI04Fx3HT+xccevr64bA7UVlRYt40IYQQQgghC15bR6fxf47jUFleBpvNmrVOOi2iuzd7SM/x9NoBQOPO3cgP+FFaUgKXy2lePK9Qz92xzSi4kyQJzS3t6OjsRjo5+a8MR2Nz2FBbU4UVy+uoKg4hhBBCCCFkRii4O7YZBXfQAjxJUmaY0AlAAQSBp8COEEIIIYQQMmMU3B3bTEM0CIIAh8MGh22GPw4bBXaEEEIIIYQQcoLMOLgjhBBCCCGEEHLqoeCOEEIIIYQQQhYACu4IIYQQQgghZAGg4I4QQgghhBBCFgAK7gghhBBCCCFkAaDgjhBCCCGEEEIWAAruCCGEEEIIIWQBoOCOEEIIIYQQQhYACu4IIYQQQgghZAGg4I4QQgghhBBCFgAK7gghhBBCCCFkAaDgjhBCCCGEEEIWAAruCCGEEEIIIWQBOOWCO0mSzE2TTGcdMn/R+V246NwubHR+59a6devMTScMnduFjc7vwkXndvE55YI7QgghhBBCCCEzR8EdIYQQQgghhCwAFNwRQggh81BjY6O5iRBCyCJHwR0hhBBCCCGELAAU3BFCCCHz0EUXXWRuIoQQsshRcEcIIYTMQ6lUytxECCFkkZuz4G48GMQvf/0g7rnvftxz3/14461N5lWmjefnbLfJSUDnl5D5ieM4cxMhhBBC5tCc3EWPB4P4w5PP4MLzz8Vdd96B2279FFoOt6Fp7z7zqtPy5u+/gw3rG7Dz1d+jr2Un/vrgf2LD+ga0bn8NAPDWw9/FhvUNOLz9NcTDY/jLhm+w5Y2vAwDefvzH2LC+AYc2v4B4eBSbHv0hNqxvQPvOvwIA/rLhP7Tn/xkAsOkxtvzInrcBAH/+v69iw/oGjPW1a+t/ExvWN2CwfS8A4MV7/xUb1jegc+87AIDn7vkX9njfuwCA1x74Wtb+PP2jz2LD+gYMdx0CMt7faM9hAMALP/sCNqxvQF/LDgDAG9r7HTqyP2t7vYe2AwD++rv/wob1Deg5uA3I2B/9+DzxnU+y99PEAmx9/0a6W9jrP/Tf2LC+Af2tuwEAr/7y37TXOwAAeP6ez2vvvw0A8MxPPpf1+Lmf/jM2rG9A1/4tAICnf3ibtv3mrOX69l763/XYsL4B4ZFeAMDLP/8SNqxvQDw0AgD443/9LTasb8D4wBH2ej/+R2xY34DgYGfW64eGugEAf/7VV7K29+z/3I4N6xsQHR8EsvaHvd8nv/tpbFjfgGQ0CGTsz2hvKwDgkW/ciN/edSV0j3/r5qz9/9MP/wEb1jdguPMgAODBuz+IDesbkIqFAQDP/393ZD3+3b9dhad/9Flje49+86PYsL4B6UQUAPDQV6/V3l8XAOCRr9/Anh+PZC3Xj8+G9Q3YsL4Bich41vr68Xr4a9dnPV//vOke/eZH8dv/d4Xx+Lf/7wps+NLlxuMnv/cZtn/JGJDx/vTj9dBXr816/uu//nrW8/X3p3vkGzfikW/caDz+7V1XZj1f33/d7778IWP/08mY8X51G9Y34MnvfQYAEAsOY8P6Bjz1vVuylj/x35/Kevzwv3/YePzQ167D779yDQAgHhqZtP2H//3DeOhr1wEA0onopOUb1jfg0W9+FAAw2L4XG9Y34PFv3QwAiI4NYMP6Bjz2Hzexx+OD2LC+AS/87AsAgNj4UNb+peIRtv2M47dhfQMe/8+PZy1/6KvXZm3vD9/+BAAgONiFDesbjOOrL3/x3n8FAAy07cGG9Q340w//AQAw2nMYGzKOn/7+n/r+rQCAke5m9v6+8REAQPeBrVnb6z20HRvWN+CP3/k7AED/4Z3YsL4Bz/30nwEA4/0d2LC+Aa//5hsAgERkHBvWN+C1B/4dADDW15b1ev2tu7FhfQOe+fE/AgA6976DDesb8NLPvwQA6Gth23/pf9cDAHoOvocN6xvwyv13AwCGjuzHhvUNePWXX9bW35G1vv5+n/2f29nrHd6VtXywYx82rG/Axke+DwBo2/kGNqxvwJ9/9ZWs5S/c+0UAwOHtf8b/9+lleOI7nwQAREb62Pv99dcBACPdLdiwvgFv/PZb7PVad+ORb9yId/74PwCA1u2vYcP6Bmx+8v/Ler13n7gHALDz1d9lLW/f9SbbvnY8H/je/8OG9Q3Y8tS9Wa+3+amfARnHS9/ee8//ir1f7Xhuf+EB9vw/3QcA6Nz3Lp6/5/PGtWbbM7/AhvUNaHzx1wCAPW88jg3rG7Dz1d8BALY+/b9s+Uu/Ycv/+gdsWN+A7c//X9bzm7e8CABofOk3WfunPzaWv/hrbFjfgEPvPg8A2PLUvez1XnmQPf7TfdiwvgEHNj3Ntv/s/diwvgH73noSALD9+f/DhvUNaPrLY0DGtXzvm08AGcezeetLAIBdf34IG9Y3YP/Gp4CMa2nLtldyrq/fC+iv/8R/fxL/87c1xnL92p3rXgMANj76g6zt6eu37XwDALDr1d9jw/oG9Bx8D8i8V9Cu3fq1/eC7zwEZ9zr6vYZ+rdev/S/9/Ev43b9dZWxPv3foPrAVAPDqL7+MDRnXev1aqN9bPPX9W7Eh495Ev/cx7oW0a21vMyvs88K9X8SG9Q0YaGvK2p5+rX1Ju7br10r93mO8vwPI+O4f7GD3hvq1Wl/+2H/chA0Z1yL9Xka/Fv7h25/Ahoxr5wbtu1rfnr4/+vpPfe8WbFjfgOjYAJBxL6HfOzz+nx/Hhoxrq7493e+/fHXW6z39w9vwu3+7yliur69fe/X19Wunvj/69n/7/65g+xcenXStE1MJbFjfkHUv8ti3Ppa1Pxu+dLlxrRhoa8KGjGsRtP155Os3GI8fvPuDxne3/t2lXwug3Tvo1zZ9e/q1dayvHRsyrgX6d3nmtZ2cPIK54UTo6u6B2+XEimVLAQABvx8Xnn8umvbsw4plS+FwOIx1VfXYEy4Otu9BR9NGLL/wekTGBtCvPT7rg5+BJEnoa9mJjqaNWHbeNRCTMQx27EdH00acdvnfomzZWnQf3Maef8G14DgO3YfY47MHj7DnH554vpSMo+fge+ho2ojx/iOoXH0BBtr3oqNpIw6+8ywKq1egr2UXOpo2YqBtL/KrVmC46wA6mjair2UHeIsVYz2t6GjaiPBwH9KpFHpbdrD9abgZdncexvva0dG0EUMdB+ArrsGRPZvQ0bQR7bvfxHB3MwY79uFI0yYEB7tRULUKR5rY8o6mjQgN96B7/1Z0NG3E2EAn8kpr0bHrTXQ0bUTPoe2IhUbRe3gnuvdtxnkf/SIS8ShGelrQ17ITgx17IYkpDLQ2ofvAVoSGuuEtqkLr9tfQufcddO3fgshoHzr3vovuA1sRHulDXmkteg5tR1/LDrTteB1jvW3oP7wbfS07EBrugTtQht6WHehr3oGB1iakE1EMtO1B3+GdGB/ohLeoGt0H38NA625079+C8EgvuvZtRm9zI8YHumD35KNz37vob9mJ5i0vwVtYjsEj+zHUsR+RsUG48orRffA9DHXsQ/vOv8JXVImeA+9hsGMvgoNdcHgLcLjxdQy07kZb41/gLSxH9/4tGOzYh+BQL6xOH3oOvoehzgPo3PsOgoOd6Du8E8OdhzDadwT+0mq073oTg+170bX3HYx0t2C4Yz9G+9ow0n0Y3sIK9l76O9DX0ojwSC8697yNsb52pJJxpBJxDHbsQ3RsAIc2Pw+7J4DD772K8f4ORMeHAV5AX+tutDa+jpUXfhg2pxfd+zYjGhyCmE5DBfu8xcOjOLL3bTg7/Ojatxnx8ChSiRjAWdB9YCsSkXG073oTDk8AvS07kE5EkU4mwQtxdO57F4nwGHoPbcdoTys6976DZCyEVCoBcBb2R43QCA6+/SysTg86dr2JZCwESZKQjAbRfXAbxGQce998Ak6vHy1bX0YiMg5JkiFGguja9y7EVALtu96EzenBkaZNSCdjSETDAMdh23O/RDIaxKEtL4DnBbQ2vgYxGYckSRBTSePGrXnLi7BYHcYfVeKREFKJiHEjpC/Xb2xUlYcsy+ho2pi1vKNpI2wONyRJggoOHU0b4fD40bzlRQg2ttzpDSARj0KMRdj6To/x/JatL8Pu8kKSJKQTcXQ0bYTV4dKW29G85UU4vQGk0ynEw+PoaNoIu8uL5s0vQJJEdDRthNtfBDGVhiim0NG0EZ6CMjRveRGyttxXUA5JkhAPjaGjaSPGB7uwbPMLSCei6GjaCH9xNSRJQmikjz0/vxT7N/4JUjqBjqaNyC+rhSRJGNW+K/ylS9C85UWktOfnV9QjGYshPMqen19Wl7U8Fh6BmEojGhxBR9NGFI0P4uDm55GKsPdTVLMSkiRhuPswe1y9EgfefsbYv4LKZUjEo0gl2PFLxEI48M6zSIRH0dG0EcW1pyEZiyEWYttPJiJo3vIi4tpjSUqz79ZW9l0tiSkcePsZJCNBdDRtRMWKtez9D/eio2kjxFQczVteRHCok60vi0inUxjT3r8iS2je8qKxvkWwQhJF4/ioqormLS8irC1XFBmJeBTD3S3oaNoIWUzh4ObnEdGOt6oq2vFtQ0fTRvCCFcV1p2O8r8N4LEkSBjvYd7v++QsOdGK0txWB8nokomG073oLHU0bYXW60bzlRYxo3/0Ob0C7tuzCoXefh4UX0Fz9IgY79qGjaSO8BWXs9bX1Xf4i5G9+HmP97PW9RRWQJAlDR9jru/yF2L/paYz3sffjL61FIhpG24430NG0EXmlNTjwzrMY0I53QdVySJKEyGi/sbx5y4sY6W5GR9NGFNedAUmS0LHzr3jv+V/B7vJivP8Iho6wa2fFqg9AlmT0HWbXupL6M+H2FWC48yA6mjai8rQLIEkSepsb0dG0EaVLz0Lzlon3V3vOFag/92oMadfismVr0LzlRfRp69edfTmqTrsYA21sf6tOOx8czxvPX7LmMkiSZOxv1erzYLHa0NfMrqX1a//GOL768w9sciDYfwQd+rVckoxr5ZKzLoWiyOjYza6jyy+4DslEzFi/Zs2lEFMJ41q66tKbIEkSBtrYtb92zWXgBAGyLCI42IWRnlbUpdIY6GDL68/9IGQxhZ5D7N5h9WUf1/aP3VvUn/tBQFXR38r29/Tuv0XZsnVoee8VdDRtREfTWwgP9xj3GmuuugWJeBR92r3D8vOvRTP/Irr2b0FH00YEB7tRsUrCSPchdDRtROe+zQgN92K0uwWtja9j7fWfQ37FchzZ8zY6mjai9+B7iI4PovvgNnTueRuhoR4UVJ+GkZ7D6D20HUf2vI3x/g6Mdreg++A2jPUfQaB8KTr3vouufe9ioK0JseAwOpo2oufge0hE2fWjZ/829LY0onv/FoSGujHQvheD7XsRC43AU1iJzqa30Xd4J440bcJYbyu69m3GYPtehEcHYPfkY6S7GSPdLehv2Ynx/nb0NG/HUMd+RIMj8BZVYejIfox0t+DAxqfg9Oajt2UHRnsOY7T/CGxuP/oO78RYbxuONL0FpzcfY/3tCA11Q0yz75/u/Vsx1t+Ow9v/DJ4X0Ln3XYwPdGBssBsOXyE6dr2Jsb52tO96E1abAz0H30NoqAux0CisTi+GOg8gFhzGvreehNXuRN/hXYgFhyCmkkjEo2je+hJiwWEc3Pw8kHGtkmUFiXjU6DA43Pg6LBYBB955FmIyDk6wQZIktO96EwDQuedtcDxvPF8SRXAWq/H40OYXoMiS8UeGeDgI8Bw6dr8Fh8ePA28/AzmtXYv8xVAUFcEh9l3oDpTg4DvPIhkLo23HX+ArrEA6EUciGkJH00bkFVejecuLEFMJNG99Cf7iKkiiiHQqiY6mjQho15aEdu2IBgchplNIRMPo0K5NBzY9jbySGpTUn4VjEYQ5CUMWnTnpuRsZHYPL5coK4nw+H9KiiEQymbUux7GTfbQfgKX+5BVVYMX518JiYR8Ou9sLQRDAcextubwB1JxxMax2F3vs88Odl4+09lcUb0E5qlZfAFkbt2BzsefzFisAwJNfguLa0yGmEgAAi9UKQRBgEdhyRRJRWLkcPG9hj1UZFp6H3ekFAAg2BziOgzOvAABgd3lgtU7sv93lBc9x8BaWs+crEqxWK1Ixtn+KJKK4eiWsNqe2PTvsTpfRw2N3elFYuQyyLAIA3HkF8PgLkdR6iHyF5SiqXgGLtn/FNSthtzvg8RcDAPLLlyK/vA4Wqw0AO/hWqxUR7a9YeUWVKKxaabyeYLPDZrMZqVcuXxEKKpfCYmXHg+ctsNpsgMo2V1DJti/Y7AAApycPVqsVnPb+vQVlyC+vg5iMAwAUWYTVaoWkHW+r3Yn88jo4PX4AgIXnYbXZjOe78gqQX14HPRPMIrDXj48PseW+AuSX10MW02wFRYTNZmMfMgD+kirt/bPPpZyOwun2IaX9Vc1XXI3impVw+QsBAMHBI7DabBBsbP3C6pXIL6+DpG2/sGIprDYbXHlsfW9hOUqWrEJM2x9VlWC12eAJlAAArDYH8svrjP2Jh4ZgtdnAa5/niuXrUFS9EjqLxQKr3W583goqlyO/vA4uXz4AwOH2wGq3Q1UUcByHkroz2fY1drsTPM8hER4FAPiKK9jxT7PjnQgNw+X1G+8vv7wO+eX1xl8VLRzgdHnA6Z+nJaex46t9/qBIUMSk8VfJgoplyCuuNM6vxWKBqipsXQC+oioUVNQbj50eH1xuHwBAsNpRULkcecWVxnKrzQqHwwWL1WYs15+fTsYgJaNwef2wOlxIRoMIlNXBk18KaD1GDqcbVjv7LCqyhILK5bA52XcDx/EQBAHegjIAgNPtR0HlclisbH2Ot8Bms8OiXYjceYUoqFphbB8ArHYbSuvOZOuratb+W6w2CIIAu8sNAIiM9KCwagX8pUsAAIqqsO82hf1hK6+oEiV1Z8DuYsfDYrVDEARIKfYXa3deEQoql8PpCWiv7YbD7Ybdwd6P05eftTwZCcJqt6GgrAYAAI5HUdVK+IrZY4uF7Z+qvT5nEVC85DT4itj+e/JL4HR5UFS5HAAgpZMorlmNggr22Onxw+F2w53Hvlt4zsKOr4t9FyZCIxAEAckI+4t5KhFF8ZLTYNeWOz357Pg42fGxOjwoqFyOQCn7/HIAbDY7nF72fsDxKKhcDm8++12KhUchWK1GSreqyuz9a9+9vCDA6fJA0T6rdnceiqpWwullvzu8hX2389q1IxEeQ1HVSgRK2fHheQtbzrPf1XQynrV9p8cPp8eH8AjLHtA/n7L2XSYIbPuJyBgAQJLSKKhcDoc7DwAgpuJZ1xZZTKOoaiUKy9nnW9U+H5z25er2F6Gk9nR857d/AcCu5E6PD2O9LNvDYrGiuGY1YkGWrWB3eiAIgnE+ee34uf1FAACHdu0LatkPY71tKKk7A06f9v68AVgECxRRu1Y63CioWgG7tv8Ot0/7fLL3q3/+9PMvp5Ns/7XvDl9RFQoqlwPa8ZZlES6fH5x2fKF9v+mfD4fHD0EQIOjXQocLBZXLwWnn2+pwsd8f7fg4PQEU154OTyH7/bQ53RAEAak4uzZabHYUVa1EKh4CANhdPjicbvhKqthjuxsltacjHmbny2p3aq+vXcvyClFUtRI2G/u8Oj15sNrZ9xK0a3NB5XIoMvt98gQK2f4p2vnLK0RB5XLjXsPm9MDlzTN6qKwON4prT4dN+3125RXA6fKwv34DCJTVss9Xmp0PwcZ+fx0e7Vrg8qGk9nSktGt3ZKQXLp/fuJZ7iypQVL0KqsR+HyxWG3ieg1e7Njm9gazPt/751e+Fipecxrav30vll0IQBMja94df2z/9/PEcB4fTBVHrccsrrUFB1Qoossy2b7HA5c2DJ8DOlztQjMKqlca9ktXGvv/033+r04uCqhWwCOzehYcKQRCgatvzFpSjoHI5/MXsfPpLKtnx1z5enkAJypadbXzfub3s85VO6N+vhSipOxMWC/u8QpUhCIJxPXX5CrLenydQhMhQF2LBYQCAr6AMRUtOM+4FXN48OF0e8LwFHM+jrP4s+ArKISbjcHoDcDjd4LRroydQgqKa1SisWgmet7DPvtVqfPcINgcKq1bAqV33PfmlcPn8gPbdloqHUbzkNJQtPwc2pwcqB/A8B1li99pWqw1FNauN+0BFlmBzuuD0sWOraN9NOsHmhGC1onTJagAAB3Zt06+V8dAorDY7HG7td9Wdh+La0+ErqmC/M8f4IScGFw5HtFtxoKWlGWvXrs1eYxY89Qz7S/3NH53oDu7o7MIbb27Exz7yYQT87OYdYL12xzrhgx37EQ8No6T2dLjyCjF05ABiwSGU1J0Bl/bXxOj4oPF4pLsZkdF+Y/3+1t1IRoMorlkNd6AYA217kIiMGevr2yutPwtOb2Biufb84a5DiI4NIFBWB39JNQY79iEeGkFR9Up48ksx2L4X8fCo8fyR7hZERvtQtnQNHB6/sbxixTrYnB6M9rYiPNxj7E9vcyPSiajxWN++vj19eWndmXD68tF/eBeSsRDKl50Du9uH3kPbkU7GjO0PtDUhERlH7RrWfa/vv/5Yf3/6/nXt2wxZSqNi5bmwOdzoOfgexFTc2L7+emXLzobDnWccT315X8tOpOLhjO2z19f3p69lB1LxCCpXfgBWh8vYvv76+mP9/ZmPl/788uVrYXd5jeOhv37n3negyJKx3Lz/+vqVq85jf33T9tf8/qtWnw/B5jCOj34+9NfX31/X/i2QxZTxWD8++vvr2P0WABiP9e0br6c9X9/f7gNbIUsilpx5Cdu+tv5Ur6efb/2xnv5bczqrpKcfD325vj/G46aNgKoa79e8ffP6nXvfgaooWHLWpQCAI02boKoKatc0QEzG0XOIpQDler4kyejex1J6ci3XH/MWATVnXJxzeefed6CqqnF8zPtvXj/XY47jseSsS5FOxtB7aDt4wWocr46mjbDanKhc9QGk4hH0teyARbCh+vQLjefbnB5UrFgHMZVAz8FtWcs7974Djreg+rQLjPWtdicqV5035f5YrHZUn3aBcfz095+IjGOgrcl4vWQshP7Du+D05qO0/kykE1H0NjfC7vKhfPk5xv44vQGU1p9lLM96f7vfgs3hRsXKcyGlk+g+sBV2lxfly9ca29cf68tdeYUoqT19yvej708yHkN/y3Y43HkoW3Y2ktEg+lt3G+8vOj6I4c6DxvP15W5/EYqXnGYcb/319Pejby8VC6Pv8M5J789qd6Fy1QeM4zXV+vHQCAY79sHtL0bxktXG6+v7nwiPYaB9j7E9/fPh8hWgpO4M4/n68dOPl7egHIVVyzHW147QUJd2g7YKwcFOjPd3GI/1a4u+vVhwGENH9k88Hh/CUOcB4/3rx0Pf38hIH0Z6WuAvXYJA6RKsW7cOT/7mf+AtKENh1Qrj2ucrqkRBxVLjsb+kBoGyWuP469vTHwfKauEvqWE9dbs3omzZ2ag+7QKER3ox2nMYBZXL4PKXIDR4BOHhHuSX1yOvuArh4R6M9raioHIZfIUVxrWusHI5vIXlCA52Yby/HXnFVcgvr8d4fweCg53GtXSsrw2hoW7kFVcjv7zOeKy/P/34FVatgLegzLh26a8/2nMY4ZHeSa+vX4v1/dMfDx3Zj1hw2FjfuHbXrIInUILQUDfG+tqM/devXfpy/fWKl5xmBMYDbXvgLSiD219k3Gvo6+uvN+W9ivZ+9GtLz8FtEFMJ4/n69vRrv76/+rVZ37+p7nX0a5v+eTCuldq1xriWL10DweHBaNdBxMOjxrVJ3z/9cW9zI3jegrJlZwOAsb/Gtch0bdWvnRPXYra+fi0036vox0t/f8b2tPX17en3Bsa9gHZt169d+uvr29P3T399ffvdB7ZCSieNa0fnvnehSOKkewf99afav9o1LNVTT//UX6/7wFbIYtq4VnY0bQTPW1BzxsXGd71gc6Bq9fnGd6n+3QIAR/a8Dd4iZF1LON6CJWdekvHdN/W1pffQdsiSiPKVH4AqpdFz6L2J717tu0u/dunfdfpj/btJv/ZAu9YLNjsqV51nrK/vj/76+nfvibRjxw4sX77C3EwyzMvgTpblib+okAWHzu/CNZ3fbzJ/0fmdW+vWrZuziczp3C5sdH4XroV2bim4O7Y5ScsMBCaCt9mgaqkJZGGi80sIIcf2xhusEAchhBCim5PgrrAgH/F4HMmM8XXhcBg2qxXOjHF4hBBCCJmevDw25o0QQgjRzUlwV11ViVg8gebDrLT8eDCIzVu346wzT88qskIIIYSQ6bn2WjYlBiGEEKKbk+Au4Pfj7z7+UWzeuh333Hc/HnzoMSxfVo+zzmAD9AkhhBAyM0NDrBIvIYQQopuTgiozMZ2Bn9NZh8xfdH4XLjq3Cxud37lFBVXIbKHzu3AttHNLBVWObU567gghhBAyu15++WVzEyGEkEWOgjtCCCFkHiouZhMRE0IIIToK7gghhJB56KabbjI3EUIIWeQouCOEEELmoa6uLnMTIYSQRY6CO0IIIYQQQghZACi4I4QQQuahp59+2txECCFkkaPgjhBCCJmHqqurzU2EEEIWOQruCCGEkHnok5/8pLmJEELIIkfBHSGEEDIPHT582NxECCFkkaPgjhBCCCGEEEIWAAruCCGEkHno8ccfNzcRQghZ5Ci4I4QQQuahZcuWmZsIIYQschTcEUIIIfPQbbfdZm4ihBCyyFFwRwghhMxDe/fuNTcRQghZ5Ci4I4QQQgghhJAFgII7QgghZB568MEHzU2EEEIWOQruCCGEkHnojDPOMDcRQghZ5Ci4I4QQQuahf/mXfzE3EUIIWeQouCOEEELmocbGRnMTIYSQRY6CO0IIIYQQQghZACi4I4QQQuahX/3qV+YmQgghixwFd4QQQsg8tG7dOnMTIYSQRY6CO0IIIWQeWr9+vbmJEELIIkfBHSGEEDIPvfvuu+YmQgghixwFd4QQQgghhBCyAFBwRwghhMxD9957r7mJEELIIkfBHSGEEDIPXXTRReYmQgghixwFd4QQQsg89JWvfMXcRAghZJGj4I4QQgiZh/7617+amwghhCxyFNwRQgghhBBCyAJAwR0hhBAyD/34xz82NxFCCFnkKLgjhBBC5qErrrjC3EQIIWSRo+COEEIImYe+9a1vmZsIIYQschTcEUIIIfPQyy+/bG4ihBCyyFFwRwghhJDjpqoq/dAP/dDPnPzQd87kHzMK7gghhJB56Dvf+Y65aU4c68aCEELI3DF/J3PhcMT4Zm5pacbatWuznzHHJEmCIAjm5izTWYfMX3R+Fy46twsbnd+FS5IkWCwWczOg3ViQ+Y1+dxeuhXZud+3ahWXLlpubFxG993KiheO4iQcABXfk1EPnd+Gic7uw0fmdW9///vfx9a9/3dw861RVnXRuzQGd+bFuqnZyajGfX7JwLLRz29TUhPr6pebmBc8cwOkm2lWoKntMwR055dD5Xbjo3C5sdH7n1rp169DY2GhunjWZgZl+bvW2zGVT/Z/MH/S7u3AttHO7Z88e1NXVm5sXncxgz/x/Cu7IKYfO78JF53Zho/M7t05kcGcO0kRRnBTcHSvQy/WYnJpkWZ4y7ZbMbwvt3O7fvx9LltSamxc8c88dezzRW6e3UXBHTkl0fhcuOrcLG53fufX000/jpptuMje/L+ZgTH8siqJxg6gHdscK9HI9JqemhRYAkAkL7dweOnQI1dU15uYFjH2HZgZ3uQI6HQV35JRE53fhonO7sNH5nd9y9cCZg7vMwM78k+t5uRxtGTk5FloAQCYstHPb2tqKiooKc/OCYu6lyzYR1On/sv9qvXYU3JFTEZ3fhYvO7cJG53du/fSnP8Xdd99tbj4uUwV2+k9mcKcoyqTAbqogz7xNcmpaaAEAmbDQzu2RI0dQWlpqbl6wJgd6phRMU2AHULVMcgqi87tw0bld2Oj8zq3ZGnN3rMBOURRIkgSO47KCu6MFeebtkVOboijgeZr6eCFaaOe2p6cHRUVF5uYFLzvIy+ilywjueJ5j/6fgjpxq6PwuXHRuFzY6v3NrNoK7XIFYZlCn/yuKInieh6IoWT+5gjzzdnM9JqeWhRYAkAkL7dwODg4iEMg3Ny8g2d+V5p67iccTwR3H8UZwx3E8BXfk1EPnd+Gic7uw0fmdW48//jg++clPmptnxByM5Qrs9OAO2o2i+UeSJOO5epv5hoQQQsj7p6oqeJ4Fc8gI3jmOp+COnLro/C5cdG4XNjq/80tmb1pmz1tmYCfL8qTgTpZl40cURbjdbrhcLthsNlit1gU1vocQQk41+ndvOp1GPB5HLBYzvnc5jsfC6aclhBBCFpFf/OIX5qbjkhnYZQZ4emCXGcxJkoRUKoV0Og2fz4clS5agtLQUPp8PDoeDAjtCCDnBLBYLHA4HfD4fSktLsWTJEvj9fqiqClmWKbgjhBBC5qMHH3zQ3DRtmb12mW3mlEtzL106nYbT6UR1dTX8fj8Fc4QQcpJZLBb4/X5UV1fD43FTcEcIIYQsVuYeO3OAZ+6xCwQCKC4upqCOEEJOMRaLBcXFxXMz5i6ZTOKJPz2LkdGxrPazzjwdVzZcmtU2nTEb01mHzF90fhcuOrcLG53fufXggw/itttuMzcfU2av3VRj7CRJyuqxS6VSKC4uhsfjydoWIYSQU8ucBXfPPP8Szj/vXNTWVJsXZ5nOzcF01iHzF53fhYvO7cJG53d+0IO7XL11mT11+r/pdBqBQAA+n8+8KUIIIaeYUy64E0VJm5BvaoqiahP1kYWIzu/CRed2YaPzO7d++9vf4rOf/ay5+ajMvXaZPXeZ4+syAzuXy4Xi4uKs7RBCCDk1zUlwNx4M4oWXXsWHr7saAb/fvDjLdP7yO511yPxF53fhonO7sNH5nVvHM4n5dHvtJEkyCqhUV1fTGDtCCJkn5rSgytvvbsE9992Pe+67Hw89+gckk0nzKoQQQgiZI5lBXmagpwd7gUCAAjtCCJlHZjW4Gw8G8ctfP2gEcE8987yxLBZPwO12464778Adt7M0khdfeS3j2YQQQgiZrs997nPmpuOWK8gTRRFer9e8KiGEkFPYnKVlOh0OOBwOo61p7z5s3rodf/fxj2alak4nrWc665D5i87vwkXndmGj83tqm25Kpl4d02azobS01LwZQgghp7BZ7bmbSsDvzwrsdG6XE84c7Ucz0t2Cjp1/QToZMy8ihBBCFo2HH37Y3HRczL12ehDocrnMqxJCCDnFzUlw17R3H+67/wF0dHYBWvXMpj37UFFRnjPoOxqLYIUiS+C4Odl1Qggh5JR07733mpveF3OAZ7PZzKsQQgg5xc1JhHTWGafjsksuxDPPvYh77rsf9z/wW7hcrkkTmE+HRdAuNhnlnAkhhBAyM+ZgzpyuabVazU8hhBByipuTMXczcawxGyPdzRjtOYyq0y6Ay1cAaBco7liT45F541ifATJ/0bld2Oj8zq17770X69evNzdPSU+3zAzgco23kyQJiUQC9fX15k0QQgg5xc1Jz91sstrZGADeMnEDoSpyxhqEEELIwjeTwO5Y9IBP/z/9wZQQQuaneRfcWQSWJpJ54dEvSIQQQshi8cQTT5ibCCGELHLzLrgTU3EAQDoxUS1TVRUK8AghhCwqP/7xj81N02ZO0TS3E0IImZ/mXXAn2JxARg8eAHBagEcIIYQQQggh70c4EsG7W7bhf3/5a9xz3/345a9/i+07diGZSplXPSESiSQONR82N0/LvAvu9GqZHD+x66o6uXpm3+FdkNLJrDZCCCFkofjEJz5hbnpfMnvzCCFksQqFw3j0D09i2/YdAIDSkmKk0yLefncLnnr6OUiSZH7KrGtpbcXLf34dqeMIJuddcCemEwCAdCKa0apCVSZ67iJjA4iM9GK8vyNjHUIIIWTh+MpXvmJuOm4U0BFCCPPiy39GIpHE2WediS/88z/iU397Mz7/T7ehdkkNhoZH8Oamd8xPed9GRkfR2dVtPFYV9p2saP+qqoqdu5um9V0976ZCiIdG0H1gKypWrIMnvxQAIKYS4HkeFqsdABAc7MRg+17kV9SjqHqVaQvkVHeszwCZv+jcLmx0fufW888/jxtuuMHcPKXMnjlFYWPVM6dB0H9EUUQ6nUZdXZ15E8d04MABvPPOxI1Pfn4+rr/+ejgcDhw4cABbt27FBz/4QVRVVWU9bzaEQiE899xzSCazs3b0fWhvb8/at8xlDocDAJBMJvHiiy9ibGwMFRUVuO6664Ac72v16tW4+OKLjceEkIWhq7sHTz3zPJbV1+HD112dtUxRFDz82B8xNh7EF/75H2GzaXNvz4K3392C3Xv24dN/ezPy8wPY1bQHb258B5//p8/C6XTg3S3bsG37Dnzutlvg83rNT88y73rueItWLZO3ZLSqWWPuYsFhAEAiPJ6xDiGEELJwfOc73zE3nVTd3d3YunUrVq9ejdtvvx233norAOCNN94wr/q+dHd347e//S1eeukl8yJAC7xuv/12XHPNNVl/bBgbG4PD4cDf/u3f4vbbb8ftt9+Om2++2QjsAKCxsRFjY2PGY2S8r4qKCtx+++1YvXo1Dhw4gAMHDmStRwiZ/4ZHRgEAFRXl5kXgeR4VFeVQVRWDQyzWmC3nf2AdnA4H/vTcC0imUkaPHQAcaj6Mbdt34Lxz1x0zsMN8DO4kkf1FLhUPTzSq2SkliTD7YtZTOAkhhBByYkUiEUiShPz8fACAw+HAzTffjOuuu87o+ZIkCa+88ooRGL3zzjt44IEH8MADD+Chhx5CKBQCALz00kt4/PHH8fjjjxvLZxpMdXZ2QpIkrF692gjgXC4X7HaW5WPW3d2NlpYWLFu2LCso1LdTW1sLAKipqYEgCOjooKEfhCw0Fq2mhyznnkNbT5ecbVarFTd++FrE43G8/OrrRvvY+Dheff0NVFdV4sLzz816zlTmXXAn2NgXtJ6CCdZvx9JMJBEAIEtp1j7FiSGEEELmu5mkZM6Furo65OfnGwFbZs/a6tWrsXr1agiCgGuuucbo/Wpvbzd60goKCvD6668baZWJRAIXX3wxbr/9dlRUVKCxsRGhUAhVVVX47Gc/a6RM6vLy8nDrrbfi4osvRjKZxMDAABwOByoqKgAtbVMURTzzzDOTAsZkMolt27ahpKQES5cuzdquHnCaJRKJSSmghJD5rbi4EADQ2tZuXgRRFHGkswsWC4+iwgLz4lnBcbyRNs8ez3xM9LwL7ngtHVP/16AoSCfjEJNsHjzB5oAspY1AjxBCCFlIvvWtb5mbTiq9p05PiRwcHMQDDzwwaZybrqOjA8lkEn/84x/xwAMPoLe3F/F43KgOJwgCfD4foAVukiQhHM7I2jmK9vZ2jI2Noa6uDnl5eQCAK6+8Ek6nExdffDFuvfVW5OfnY+vWreju7kZjYyPi8TiNoyNkkSsvK0NpSTH6Bwbx0quvY2BwCJIkoau7B888/xIi0Shqqqqy0rlnQyqVwnMvvAxBEHD1B69kaYkA8gMBXHrxhejq7sHmrdvNT8tp3gV3stY7l4xN/CVNVRQoqgJZShvt+pQJerBHCCGELCSvvfaauemUUVVVhU996lPIz8+fsucLWkGTW2+91RgDd+uttxrB2PvR0dEBQRBQU1NjtDkcDnzkIx9BlXZj5nQ6IUkSxsbGMDAwYASar7zyCiRJQm9vL1566aUp98fpdM76DR4h5OS77pqrkOfzobnlMB7741O47/4H8NQzz6Ontw8A0H6kE9u2N5qf9r6817gT4UgE119zFTwetzHDm6oCa88+C8vq67BteyPC4Yj5qZPMu+DOYmVBm56eqTNSMbXCKv6SatYus2CQEEIIWUi+/vWvm5tOqu7ubjz00EPo7mblvIeHhxEOh6cMjmprazE2Nob2dpb+ZK5ImSkUCsHn86GoqOiYBVW6u7sxODhorK9755138NRTTyGZTCKZTCKRSMDhcGDJkiVGj2NmIRa9WqZ5jJ15DB4hZGHJ8/nw97d8EhdfeD6qqypRWVGOyopyXHHZJbjpxushCALe3fIeduxqMj/1uJ115um47uqrUF1VCeSYnuZDH7wCf3NFA7xeT1Z7LvMuuMuVltnfuhvBgU6EhnuMYipWhwsAkIpNL4WDEEIIIccvlUqhqqoKr7zyCh544AG88sorKCkpMVId9V40vaCKPg5PH6O3detWcByXNY7tT3/6Ex544AEMDg7ivPPOm1ZP2Z49eyYVUgmFQojH4wiHw3jooYfw0EMPIRwO4/LLL58y+NRVVVXh/PPPR29vrzFOT993QsjCJFgs+MC6c3DzR2/AJz72EXziYx/BmrPOwJKaanz8ozfAarVi49vvoqu7x/zU4+LzerFi+cR4X47jAAA8z/612Ww48/TVRvvRzLt57tLJODp2/RX+kiUoqTsdANC8Rf/rnQqO46GqCpaceSmO7NmE/IqlKKpembUNcmo71meAzF90bhc2Or9z6+tf/zq+//3vm5unNBfz3M2Wl156CYlEImsOOkIIOVUMj4xg46Z30XDZxSgsmP3iKvF4HF3dvVi5Ypl50THNu547i8DmubNmpWWqxsBDVVUgWO2wu9kgbN5CNxqEEEIWnpkEdoQQQmZPUWEhbr7pxhMS2EGbtuV4AjvMx+BO747kLCwtU0qzqloOjx9OL5tbh9MCOo7jKS2TEELIgrRx40Zz04Jx3XXXTZpgnBBCyLHN2+AuGQ0CAFSVzWXn9OXD6nBrbayoCjiOpkIghBCyIN19993mJkIIIYvc/AvutEIqerVMfaoDDhzceWziQZfWg2e1u4z1j0ZVtGCQEEIIIYQQQuapeRfc6fSxdLIsAQAURYbFakOgrBbewnJA6+UTk7Gs5+Wiz51HCCGEzBeXXXaZuYkQQsgiN2+DOz0tk+fYW3D5CsBbrHD7i42eO1VRphW4KYpMvXeEEELmlZ/+9KfmJkIIIYvcvAzuOI6DYHcCAFIJNlO7YLXD5mRj7vRUTKcvMK35IACVJjsnhBAyr2zdutXcRAghZJGbl8EdON6YxFyRWFombxFgEazgOA4cz96WqgKyOI2CKqoKVWaFWQghhJD54Itf/KK5iRBCyCI3L4M7VVWQjIYAALw2Ya7V4QIAWKx2Yz1FkSYqZx6FqqpQtLF7hBBCCCGEEDIfzcvgjuctEOysWmY6EWVtWoEVq5auCQBuH5tYUBLZXHhTUlXICgV3hBBC5o/zzz/f3EQIIWSRm5fBHcdbjHF1siRmjavTgzxtRQBAOnH0ipmqqgJUUIUQQsg88vOf/9zcRAghZJGbl8GdIktIxVhapsViBS/YzKtoVACALCYB7Xm5qKrKArwcpmonhBBCTqbdu3ebmwghhCxy8zK44y2CMYl5KhkFkLsiplOfEkFVoUgiZGmq4iosgDNPhxAPjaKvuTGrjRBCCDkVfO5znzM3EUIIWeTmZXDH8RZwWsqlIopGcGYmWFmPXiIyjnQyPtVqRu+cufjKSE8LouODSMXCWe0AkJ7G5OiEEEIIIYQQMlfmZXCnSGmk4mx+O95igc3pMa8CZFTOFBMxyFIa6tTRHaBNZp5Jn1JBf61MnXvfQX8rpcQQQgg5OdasWWNuIoQQssjNy+DOItiMXjkxlQA3RVomtHWhFVwxp13qVGjtpvF1eo9dNDiU1Q4AiiQiGRk3NxNCCCFz4je/+Y25iRBCyCI3L4M7ziJkVMtMGz1suXAWC8RU3NycTYvp9LRMvfAKb2GvERsfyiqsoi9PH2u7hBBCyAly8OBBcxMhhJBFbuqo6BQmi0mkE1FjigNfYbl5lQkq62Vj/5+clpkZtOn/14M3Oc0KsCiyiJ6D2zKCPy19kyY/J4QQcpLccsst5iZCCCGLHBcOR4zopqWlGWvXrs1eY45JkgRByJirLocjTRvB8RbkFVVisGMfqk67EC4fq4xp1n94F+LhUZTWnwWL1QaHOy9ruaooiIdHAQCCzQG7y4t0MgarzYGWba/A6nBBTLIeuuIlpyFQVot4eAzd+zcDACpWfgCeQHHWNsn7M53PAJmf6NwubHR+59a6devQ2Dj9is4TxcNUKIoCVVUhyzJkWYYkScaPKIpIp9PgBDZunRBCyPwxL4O7zn2bwQGwu30IDhxB/dorIdic5tUAAH0tOxAdG0TFynNhEaxwePxZyxVZQm/zDngLyuBw58HhyUMyGoLFakX7zr+itP4s2Jxe9B/eCQCoO+cKJKNBdO59BwDgK6hA2fKzWa9gxmTq5PhN5zNA5ic6twsbnd+5dcstt+Dhhx82N09ppsFdXV2deROEnDDt7e30mZtDdLwXrnmZlimlEkgnY1AVmRVXmSKwg15QRZNrQvJkLIx4aBiR0T4j7VJVFaQTUQCAYLXD7vLAW1gOMRVHOhGDlE4Zzxe1CdIlcao59AghhJDZN5PAjhBCyOIwL4M7i80Oi2BFMhoEpxU9mYrTlw9VVSClkzmDO0kripKIjBvVNFVVRUKrhClLYvak6YlwVuCXjkegKgoURYKi0Pg7Qgghc6O9vd3cRAghZJGbn8GdxQqO46EoCiz80VOABG2uu1R88kTkACBrBVFURcZ4fwdbT1WN4iuuvAIAgDdQwtYX0xBsbJsOjx+ylEZ4tA+qIk8UbiGEEEJOsE984hPmJkIIIYvcvAzuxFQcYioB3mKB1eE2L85i05ZLYjpntczMueqi4wMY7WnFSE8zosFhcLzF6LET7E5wHI9YcBhSmqVi5pVUg+N5JMJjUBUFskQ9d4QQQgghhJCTY14Gd4LNCd4iQEqnIEtHH+sm2J1sTjxVmZjQTqPIEmS9t00rhhIZ7UMiPIZULASYJkfnLQLSiRhS8QgAwO7ywmp3IRkLQVEVY8weIYQQcqJRMQRCCCFm8zK4swhWcBwHVVGOOoG5jrcIiAWH0d+6O2teOkWWwPE8OIuAgoplsDk9Wc/z5rNUTJ07UAwpnYRgZwVcOI6HLIlIJ6KIjQ1CpqIqhBBC5sgTTzxhbiKEELLIHTsyOgWJyRgkMQWO4yYFZLkIVhsUWYKUThq9btCKqKTiEXAcB6c3AKePja/T2V3erMc2hxuKLCIeGgFvEcDxFrh8BVAVGWN9bYiOD2atTwghhJwofX195iZCCDnpmlsOo+Vwq7mZzJF5GdwJdhd43sLSKqfRW5YZtCVjIeP//a27ISZj4Hmt4qZpTJ6iVc/UWbUeu1QsBFVVtSqaE5O86pOhE0IIISfaDTfcYG6aU0898zyeeuZ5czM6Ortw3/0PoGnvPvOiLE179+GhR/+AZJKNYydkOp565nn88tcPYjwYzGp/461NeOOtTVltJ5uiKHjsj0/hvUY2V/JiIMsyXnntDbzy2l8gy8cervRvX/4KauuXouHyKzA0NGRenCUej+PTn7kFn/7MLYjHWbV7s/974AGsOWct9u47+vfPQjYvgzuLYAU4Dqo6vbRMf0kNipasBgDEQyOAlpKpKjIAwGp3Adq0CQBgc3phd+chUFpjbAMAfEWV4HgLVFWFzeECx3FQZLYNYHJwSAghhCxUa89Zg2AoNOkmu73jCPx5PqxYtjSrnZDZkkgksHP3HnPzKefPf/krBgaH8M7mrdiybbt58YIkywoURdH+zbhHziEej6O/vx8A0NnVhb3H+IMQmZ5jR0anoHQyCkUrpGKfRlombxFgd3rBW6wsMAQQGZlIZ9ELoTjcPgBAXnEVSuvPhEWbRiGTPrWCXmzFHSg2limSREVVCCGEzIny8nJz05wqKymGzWpFV3eP0ZZMJtHb24eKinI4HKzaNCGzbWl9LTqOdKKjs8u86JSxq2kPDh5qMR5v2bYdb216N2udxa6tvR37DxzA3996K2qqq/HKq6+aVyHHYV4Gd4LNZUxIPp20TE6rhGmx2pBORCGLaaSTMWO5vi2LYMOKC65Hfnkd+Cnmz9N79/QJy/WpFgBAEpPGROiEEELIifT885NTIueSw+FARUU5DrdOTKaeSCaRFkXU1S4BtNTLe+673/g5Wqpm0959WWmeyWQSDz36h6znvPHWJmNbuVLzyOLgdrtRu6QGO3buNi8yHO2zp6cEP/7En7KWZz7HnDKcuey++x/IGVgqioKmPfvw/Euv4M2N75gXY+fuJuzdf8DcvGht3rwZAPCxj92EdevWobGxcVJqpp62WVu/FHfd/W9ZywDgjTf+aixvuPwK9PcPmFdZdOZlcGex2oz/cxZtvNxR6MGdIktIxsJIxkLGFAoltWegqHqlvqLxHKuDpWqalS1dA8HmgNPLgjyL1QaL1QbBxsbjiamE6RmEEELI7BsbGzM3zbm62iVZqZld3T2wWa0oKylGR2cXtu/Yhdtu/RTuuvMOfPTG67F56/acN8XT8cZbm9Db24c7bv8s7rrzDlx4/rl44aVXaczeInXOmjMxPDKa8w8G0/nsjYyOYfWqFbjrzjtw5eWX4o03N+FwazvuuvMO3HH7ZwEA7259D9ACu81btxvb+/B1V+ONNzdO+uPCH558Gm+8tQmtbR1Z7bpl9XVYtWK5uXle23/wUFYQ/fNf/dpY9r+//HXWssyezHg8jk2b3sZpq1ejvq4Oy5YtnZSa+X8PPIA/Pf00vvbVr6CjrRUfv/lmbN6yxVi+d98+3P3lL+PCCy7A/r178MQf/4C33nrLWL5YzcvgTkxEjf/bHMdOy9RTKF2+AmOMXiIyDo63wK6lYkKb2mDi/9lz3GUqW3Y2impWAdr4v7KlZyO/nM03lIxOTIpOCCGEnChXXXWVuWnOmVMzD7e2GymZtTXV+Nw/3IKA32+s63Y5EQ6HTVs5tmQyiaGhYVxy8YVGuqc+pq+ZqvItSgG/Hxeefy6a9uybFOBP57NXWJBvfIaqqyrh83mx9pw1QEav9Pg4C94Ot7bjwvPPNbZXW1MNf17epHF/5527bsr7x/POXYsPX3c1BCF3Zth8JUkTU4wdi5xRp0JPybz00kvgcrlw4YUXIi8vz0jN1IO/mupqfPQjHwEAXHDB+bjwgguMbWzevBmhUAifve02uFwuFBcX45Of/Dtj+WI1L4M7vQAKACiyNgn5Uei/aHa3D4okIjLaB47jIFjt0w7oMtmdHmPsHcfx4CwWCDZ2sYmMsoGhhBBCyELncDjgcrlwuLUd48EggqGQkZIJrbKh/lf7+x/4LUZGj6+3MZFMIhgK45nnXpyV7ZGF4WgB/kw+e06HA25X7oytZDKJeDyON96cSAm+5777s8aa6urrluCqv7nc3IxLLroAF11wnrl5QTjrjNNx1513GD9f/Jd/Mpb96+f/KWvZ6aexjhFkBGY//NGPUVu/FDfc+BGEQqGcqZlkZuZlcJdZ6GQ61TL1dEuHOw8cb0FkbBCpeATewvKs52cGekdjESbSQgFAsFhhd/vAWwTwloX1FxlCCCGnpvx8NjzgZFu2tA7xeByHW9uNlExoqWzxeNxIo7zj9s+isOD49tnpcMCf58NHb7w+62bxrjvvwFlnnG5enSwSDocDl1x8Ibbv2IVYbKKWwmx+9vQ/YFx5+aWTPntXNlyatW46LWLPvgOorCjP6jBYWs+yuwij98rp6ZQdba3oaGvF1776lUmpmWTmphfNnGJkKWX83zqNtEw9aOMtVhTVrDIqbXrzy46r5y5zbB4A8FYbBJsDVocL6cTElwshhBByorz22mvmppOiuqoSaVHEjl1NR62S2T84hGBo6pRMn8+H4ZFRY1xU5voOhwPFxUVZBTQ6Orvwm989PGncE1lc9BTJqca5YRqfvWNZtrQuK/1zPBjEb373cNYYvnQ6jaeeeQ55Xi8+ftONuOaqvwEAlJeVIeDPM9Yjk1MydZmpmS6XC5deegk6u7rwzLPPZj3PvP5vH3wQ8XjcCBoXu1kL7vSqVvfcd3/OSU0xi1Wu7K6JXxJVq1p5VEa1TCu8BWWwu/Mg2JywudzgLdoh4CYHbdOlp3eqiopUPGxU3ySEEEJOlMyeipMp4PfDn5cHMaNKJrR0LZfLhfsf+C3uue9+vP3OZixfWo/NW7djPBiEz+dDMBTGE396FslkErU11Vi+rN5IvXzjzY1wuVixMgC4suFSBAJ+4z7ihZdexcUXnm8sJ4vXlZdfCqdz4rNytM/e8TjrjNNx1pmnG9t78KHHcObpp8GfN1G34U/PvoD8QADXXv1BcByHlSuW4aYbr8d1V38wa1sLnSWj0CHP5y56+PvfP4RQKISl9dlzYdbX1eG01avxlzfewN59+/DPt9+Oj910k5G6+Y1vfBPVVVXG+mecfjp++pOfYPOWLTjtjDNx4cWXwOv1Zm1zMeLC4YgRibS0NGPt2rXZa0xDR2cXXnjpVZSXlSIQ8GN8PIibP3pD1jp6latPfOwjcDgckx7rJEk65mDTSHAYfQe3AQCqTrsALl+BeZVJYsFhLS2TRyLCip648gohS2mkYmFwPD+t7RzNQNsehIa6sOSsS2F3TfzCk5mZzmeAzE90bhc2Or9zSy8dPl36Hx5VVYWiKFBVFbIsQ5ZlSJJk/IiiiHQ6jbo6SiUjc6e9vX1ef+YOHGrG6pUrzM2nrBN5vA8eagHPc1ixfJl5EZkD7zu4SyaTeOJPz6KiohxXNlyKN97aNCm409c568zTjdz4XG0AIIrSMTvQ4uEx9Dez8rTVZ14Gq33irzVTSYTHjDnq4qERgOOMYC6diMFiEWCxTZ60fCbCw70YPrIX4DhUn34xrBlz4JHpUxQVPH+MDwGZl+jcLmx0fufW+eefj61bt5qbp5QruFMUJSu4k2UZoihCkqQTduNHSC4nMtggk9HxXrjed3Bnliu4Gw8G8adnX8CVl1+G2ppqo/2pZ55HIODPGpA6nb/8ppIJHNn1BgCg7pwrsqpnTiUVDxu9afHwKCxaEZTZlIgEER7pQXDgCMqWnQNfYTlkMZVVAIYc23Q+A2R+onO7sNH5nVuXXXYZNm7caG6eUq7gjnruyKmCgo25Rcd74Zq1MXdHEwyFIYrTGBs3TRbBavyfmyKf10yfugAABJsjayL02cJnpHaKqTikVAL9rRODvwkhhJDZMpPAjhBCyOIwo+BuPBjEL3/9oDGYearCKXNCy93MDNqOJnP6ApvddUKCO3AcBJsDvGBFaKgbI90tiAWHkYoff4UmQgghJBdpBpMHE0IIWRxmFNwF/H58/p9uM+b3MBdNmYo/zwerdXZTdWY8n1zmQD6Om/acdjPB8xZY7U5Y7U4osgRJm3KBpkcghBAy284/nypFEkIIyTb7EU4OTocDNqsV4fBED1YymUQ8Hj/uSSUtgu2EBGjvh9XuhGBzwBMogSymkNSqckbHB82rEkIIIYQQQsismpPoyOFwoKKiPGsCyHe3smqXK5Zlz3ExXaqqsrnpTiVa76CvsAIAIGs9d2IynrXaqUCZzvyAhBBCTllUvIYQQojZrFTLbNq7D2+8ucncDEEQ8OHrrjYqZL7x1iY07dkHAHA6nfi7j38UAb8/6znTqbYmSRL6D+9EOh5G/dq/MS8+JXTte9eYT8/u9mHJmRMVQU8F6WQcVrsT3LHmnTgJpvMZIPMTnduFjc7vqW2m1TI5YXpj2gkhZLGprWYdOaeiWQnuZtN0bg4kSYKYiMDu9IDPqJx5KomHxzDcdRAcOCSjQSw//1rzKidVOhEFL1inXZAmk5ROgeNwwqZ4mM5ngMxPdG4XNjq/pzYK7gghZHacysHdnKRlnhhcdpGUU4zN6UZR1UpYrHaoqgJFEs2rnFSqqkKRjy81U1EkpBJRczMhhJA5RAVVCCGEmM3b4M7mODVTCnWC1Q6LYIUnUAwAENNsrOHJMimQU1WoipzdNk2qokBVFEiphHkRIYSQOSLRVAiEEEJM5m1wZ7Hapz2B+clidbhgsbG0lmQsaF48J1RVNYKx7PbJbdOlqux5+lQPhBBCCCGEkJNv3gZ38wFvEcBrAaiUmvueO1VVoaoyVEU2ArLMZYqpTZfVy5cjfVNVtHEbxxkcEkIIef+2bt1qbiKEELLIUXB3gjnceQBwUubkU1UFqqwYA+czJSLjwBTBmSSmjP8rijzpuSrY88wBIyGEkLlDxWsIIYSYzX3EscjwFgEczyMRZdMizClFYQGemt1zl4qFMdx5AGP97ZMDN1WBLE6kW6qKPLmH7ig9d1MVjjH3/hFCCHl/LrvsMnMTIYSQRY6CuznAcTykOS6ooqddqooCRWYBmh7giWmtEIqa3fumKJK2voRkNAhFkbVeP3NK58TjzABPVVkvYS7yFEEfIYSQ4xOLxcxNJ117Wyu++uW7MT5+7D9ojo+P46tfvhvtba3mRbMqmUjgv/7zP9C4/T3zovftF/97L/7rP/8DycTsFhhrb2vFbbd+etb3ea6OOSHk5KHgbg7YXT5z0wmnyJJREVNVFTZ2TpKgqgrioRFAm+suMziT0ikoWgVNWRIhi6lJ4/VkMY3hIwcw0LqbpWxmVNxU5ezHmVRFpt47Qgghc66vrxdulwunn36GedGMjI+P47/+8z+yAtcv/Ot6/Od//TccTmfWujPVuP09/OJ/7zUe19UvxYMPPYp1534ga733q631MKqrq1FXv9S8iBCyQFBwNwcsFmHOe64UmQVyitabxnrVWA9eOsH+2ium4sb4Oj0dU5UlSCLrZZTSaRbsZQSA8fAIUokIJDGF6NhgVuCXjIURCw4BOdIwVWWO5vozpZkSQshCtXHjRnPToqb30H3xjn/OCsD27t2DZStWvO8AbCHYtnULzjv/AnMzIWQBoeBuLlgsEJOxKXu1TgRFFpGMhpCIjCMRGsV4f4c2fk5m+Zia2PggFFlivXqKgoG2PRhobYKYjEORRchiGnJGoJaIsCkdbE4PouMDRhqmIksY6tyPwSP7IYkpKHL2e42HR7O2o8ss3nI8MscMyiILRufiOJuD11zmOqCfdRQoE3JKc7vd5qY5Nz4+ji/e8c/4+E034rZbP43W1ux0Pz3g+vhNN+LjN92Y1Tula21lKYgfv+nGrMAsmUjgf370g6wUwsbt72WlQWa+/i2f/jsUFhbi5/f/HwKBgLGNw83NOOOMM41tPPfs08b+3HbrpyelKP7if+81luv7097WirvWfxH79u7B7f/4D8b7eO7Zp/GL/73X2I/MNEr9vT/37NOTjkPme3ju2afxox98D2+9+Vd8/KYb0bj9vZzpk5n7bQ5gn3v2afzPj36Q9RrmlM7x8XGMjIygfumyrPap6Mf6f370g6xtNm5/L+f7gOl8fPymG/Hcs09PuSzzs6C/38cfe2TKbRNCpoeCuzmQV1gJAEjFI+ZFJ4SUSiAZC2OwYy9Gug5hrL8N4ZFeKLIMRZGRjIVhd+fBandhvK8dyWgQ6UQUkbF+pBJRAMBITwsAYLjrEAY79kFKJ6EqCuKhYQAc3P5iKJKIke4WJGMhiMk4pHQKUFWtR28iwErHIxg6sh/DnQcBbWyfLtdYxFyFWqYia89XVQWyLEKR5ZxB5GybTi/kfA/ulIxzSAg59Vx11VXmpjmVTCRw38/uwQev+hCefPo5/PL/foMtm99FLMauI8lEAj/64fex5uyz8eTTz+HJp58DtEBEF4tFsWXzu/jl//0GTz79HD541Ydw38/umdZNvf76/3DbP+LJp5/DAxt+h4MHD2QFNYlkErF4HIH8AkALWHbv2oWHH/0Dnnz6OXzhX9fjnp/+xAiU9H3T9/cfbvtH3Peze1BeXoF77v05Tj/jTDyw4Xf4wr+uN14DAAKBAP7htn/Etq1bjDY9HfRDH7oGG37zQNZxKCwsxIbfPAAAuPEjN+Gr//4NNFx+BZ58+rmcqZjPPft01n7fdfeX8cPvfzcrwNu/fx9uufXv8eTTz+Gr//4N/O7BDVnLx8dG4Xa54HQ4jLZj2bd3DxquuNLY5o9+8D1s27oFTz79HB5+9A8AgD//+RVAC9D+4xtfM87Hw4/+Abt37ULj9vcmnauHH/0DRkZGJn0WxkZHc277WP7jW/953D+ELDQU3M0Bu5uNuYuM9ZsXnRBDnYcw0NrEgjlZMgKRZDRoFEzhOA52Tx4kMYXI2ABCwz0IDhyBLKZgEWyQxRSGOvYjFQshFQshEQlqvWwc7G4f3P4i8BYB6UQEQx37IaYTRlCWjAazetTSSZYGyvHs4xYZ6QO0XjdZTLOxgaoKKZ2ALIlIJSYHwbkCPlVVjInUjfeqSJN6DU8E81yAuUwnADxRptOzeCzqFMcx17k4Edg8jXPzWoTMR2NjY+amOdXX14t4PI5LL7scAOBwOnHLrX8Pt9tjLNeDG901116Hze++awQdbrcHt9z690bK5KWXXY54PI6+vl7jOVNJJJOIx+PIz88HtABr1arV6O3tMdZpaz2MwsJCoydv3bkfyBojV790GdxuD8bHRo1evmuuvc54/rpzP4A7v3SX8fho6pcuQ1dXl9HblpkO+oV/XY8bP3KTse5551+AkZGRaQexu3ftwnXXf9jY77r6paiursamjW8a66079wPGWLrM96U7nvTU08840xirWL90GerqlxrHx+F0Ys3ZZ6OnuxvQjvWqVauN4NThdOK66z+Ml158AQDwn//131nLMp8L7bMw1bYJIdNHwd0csNqd4Dh+7nruxAQAVkwlUzoZRSw4DFWR4Q4Uw19cDcHuRGS0H5HRicDTW1gOm8ONdDIKgAMADLQ3Yby/HelEFFY7uzDYnF4ko0EkImMIDXVDkVkwEx7uQe/B97SgTUE8wm5ApFQCYiqOgbY96G/dDVULQGRJhCJLSCdiEJMxKJKYFUCkkzEkYiwdNJMiy1Aktg1VkaAoMkIDnQgNdZlXnVWqqmZVBZ2qp3A2AqzjZT73x0MvrpNJH7s5J1R1zgJJQsjMjY2NwXWUnqCxsTFs27YVt3z674xUu69++W6jZ28qsVh0WoGr0+GAy+Uy1h0fH0dXV1dWCqZ5jJk5PfL2f/wHDA0OAFqwODzCCo5lCgQC0wqIAoEAqqursXfvHoyPj2Pzu+8a+6JXv9Rf90c/+J756VMyB7HHI1d66kw5HQ4UFRaamw29vT1Gammu95mZzvnxm27EIw/9Puv5hJDZQcHdHHF6A0YhkxNOm4fOTJElBAc7wfEWuLzsIlFUvRKymDICMwCwOdworj0dlavOQ+WqD8Ai2KBIIsYHjkBVFRY4cEDm2L3oGLs4QgsA0skY0skYZFFEaJAFW+lUHMkoC9J43pJRmTMNRRZZT57W25UZGEmpBKKjA1nz70EL6FRVgZROsukeZBnjQ10IDWX/pU9VFaRiYUCrCPp+ZVYQVRUFakaaqbGOqs8xeHKCE3mKXreZyNVzp8oyMEfviR2/3J/l43Eyg21CToTXXnvN3DSn8vPzEY/HkUhOTq+Htvy88843Ugn1n8wxcbm43Z5pBTIOpxN3fuku/O7BDUagduFFFxm9V7nGmP35z6+gsLDQ2JcHNvwOxSWlwDSCl+k47/wLsHvXLuzftxculwvl5RVIJhJ4+KHf4wv/ut543a/++zfMT52SOYg9HnpPaHl5hXnRrKmoqDRSSzN//vO//huJZBJPPvFH/OgnPzXaP3Pr35s3cdz++zv/ddw/hCw0FNzNEbe/GGIyhsgoS0k8kdKpeNZjq8MN3mJFZLQfYjIGm9MDXrACADyBEiNt1OHxg+N4+Etr9A47AEB+5TIEyuoAVYXd5UNB5Qo4XHnwFpYDAFx5ExfDoprV8JfUAFrAl05EjJtqjuMQHRsEAKSTcSiyBFkbt2e+8dbH7KmKgtBwD8b72zHW15aV6qjIMiQxhdHeViRjIVZMRZYgiylI+lx+2pQPkphCLDSMjqa3kIqHc471OxYpxbapKLIRdKiKDFnrPcykB0Ynq+cpV8A5U7l66HLNe3jCzHJapp62S8hCMZ0A6EQqL6+Ay+XKSg3cu3eP0TNXXl6BWDyOffv2Gsv1AiS6WCyKvXv3GI83bXzTCIocTiecLhdeefklY3nmmLZkIoEND/wKd939ZSNgyEx9NKdk5tLWetjouXM4nVi2YgUefuj3Rrpk4/b3JhUvOZr6pcsQj8fxh8cewZqzz56yxy/zfRyLnqL40osvGPvV3taKrq4uIyX2WI4nJXOmzGmp0IrTZI6r0+mppoSQ2UfB3RzxFLC/DMaCk1M+ZltmAFS85DQU1awCx3FQZAkWmwMVK9ZCsNrBWwTwFgHFNachv7wexUtOQ+WqD4DjeFjtLgCA3eVFflkt/MVVKKk7A4VVK+Dw+GCx2uD0BFC85DRUrDwXvIUFi25/ERzuPACAJKYRHOwEAPgKKyCLaWOOvUR4FFI6hdHuZkRG+9Df2sQKusQjiI4PIp1kAWp4tBfhYTZ+IhocMgq+QEvnHGzfg+DAEQy0NiERnhhbkIpFIIsppOMRo7cuMjoARRIx2L5vRsVOVFWFIkuQ9J5DVTGCTzZRPPu/JKYQCw4DqsoK2JjSS2fqeAOb2ejxUlUVSo5tsGqkk9tPBFVVMdV4xuPBenmze38Jmc9uuOEGc9Oc0nvOXn/tz0aq3eHmZmPMncPpxFe/9nW89OILxvLdu3bhpo993AhS3G4PDjc3G8tff+3PuPNLdxlByKc+cysOHjxgLDe/vtPlwle/fHdWup8ePJpTMgHgQx+6BiMjI8a627ZuwbpzP2AEdDd+5CYUFhYaqaS/e3AD/vt7P0QgEDDST2//x3/IGbBAS8288KKLEIvFjBRIhzYWMbMK57LlKzA8PGQUDMnPz0fj9vdyVu+EVnRlzdlnG/t1z09/gq99/ZtHDVx1s5GSOR2BQABf+/o3cc9Pf5J1vj7wgfMRCATw8U/8rXGuPv/Pn8MFF15kVN8khMweLhyOGHdPLS3NWLt2bfYac0ySJAiCYG7OMp11TkXdB7ZBSidQu6bBvOh9UxUFHM9DUWQc3qZXl+JQv/ZKpBMxI1DgeB4uH6salk7GYHO4kYyFIItpODx+yFIaNodbS79UwFvYcVYVGfHwGKwON2wOFvhJYgpiMgGn14/Ove8gGQ1i+XnXIh4eRW9zIxweP5KRcQh2JzyBEoz3t2v7xfbDV1SJ0GAXeIswqeeOt1hRv/ZK9DY3IqG9rqrIKKk7A4LdiaGOfRBsDoSHe+DKK0Q8NAKO4433mV+xDKlYCK68AghWO/pbm7Qtq3B4/ChecjqcXj+gpYJarLas10fGMU3FwlpvnQKXrwDpZAxiMg53XiGkdBLpZByuvAKIqQTSiSjsbh/aGl+Hp6AMJUtOg2DLPR7laFRVgZiMw+ZkN0kzwcYvRuHw+M2LoKoqOC6jW3YKrKpqyEjf1enjRu0ub1b7dM3kd1cWU1AU2fhDw/vVsu0VePNLUbbsbPMiMktmcn7J+7du3To0Njaam6dkZBxo44ZVVYUsy5BlGZIkGT+iKCKdToMT7OZNnFLa21rx9FNP4ot3fskIBpNahc4PXvUhbH7nbdx088cX/YTd4+PjuO9n9+DOL901rWCQEHJstdUnLsX5/aKeuzlktTtZimBGyuBMsQqCk3szRG2b+tgyb0EF/CXVEGwOODx5xg29YJtIybA52BxJFsEK3iLAIlghWNnFnON4I7ADAI63wGK1wZoRqAhWO+xudpNfWL0SpfVngeN58BYrLBYBifAoOJ5H5cpz4QkUG8+zu3ws3XKwC1a7C6X1Z6G0/izkldTA6nDB5vRCkUVExwYRD43AnV8CpzcAMRVHOhlDaLAL8dAIwsM9sDpcqFp9PpzeAAvsOA7gOIz3tyEWHMJw50EMdx0CGx/IjlsqHjbSFvWxgVm04yumE5DSSW3ePjYPoKpOFKpRVRWR0X6M9LC00nh4FIosI6oVp4kHh7N633LN6advN5OqqkjFJtJZp0tRZDb+MCNtFBk3dNCC9GkVW1GUnGM3VUWe1d60o5nqs348ZEmEqshIJ+OTxm4SQuansbExxOLZwxD04iNjY6OIxeMndIzZfLFp45uT0lPNc85l/swkDZUQcuqhnrs5FBruwUDrblSsOBee/BLz4mlRFBlQFPCC1ehZUlUVifAYXHkFCI/0of/wTpTUnYm8okpj+gEpnUIqHobDnZezl0oSU0ZgNxX99XJSVSiqDJ4XIIsp9Bx6D+lkHMVLTkNeUSWkdBJtO/4CjuOQV1KD4MARcByH4tozYHO4wVsEyNq0BvHwGMZ6D0OwOSClkyipPQNiKo6xvjbkFVcjMtbPUk85DuXLzoG3oAzx8ChCg10Q7E6kYmHEgkMAx0GwOSFljEG0OlwQk3EEyupgsdoQGupGUc0qpBNR8LwFgbJapJMxWCwC4uExKIqMoY79KKhcCrvLB7vbh9HeVoQGOlF12gXoP7wLYioOT34pomMDEGxOBMprMXzkAADA5S9CWf1ZkFIJjPa1obT+LFi08Y6qqkCR2LhDm3NiMmIplUAqEWWFb3z5kNIpCLajnxtoPV3JWBhWhxuSmITLmw9VUZCIjMPhyWPHWExB1QLzqaiqwqakiIXh9hdlLYuHx8DzfO5eQW2ajVyfL12u311FlrL+kKATUwkosmT0Espi+qjbVhQZPG8xNwPauMuO3W/Bk1+KgsplRurwVFRVAcdN8VmfRUf9nZpnVFWBLCuTzi85cfr6+lBezsY+T8dC67mDNqbrrTf/mtX21X//Rs654gghZLZQzx0BAHjz2bi7hDY1wHFRJkrRS6I2gbfMqkYqsmRUMnTlFWTdNAo2OwSbwyikYna0m33dUW9COQ48z27qLFY7ypevReXKD8ChFWsRbA5UrFiHJWsa4C+pBgD4S2thtTthd/vg8OQZAYzNyW7mpXQSdjcLqHxFleAFK+LhUaiKCk9+GSpXfsAIiqx2F3xFlQiU1KCoeiU4iwBPoMR4Lf1G3e1nPYhSOonR7haIyRjioRGMdDVj6Mh+DB3ZDymdQjIexkDbHgy274MiixjtPgxoPaOJ0AhUVcFw10GjF0wfSyilE8YYQWi9d2N97RjqOoTo2AAS4TGt948FUKoqY7jrIGLjQ8ZzFFWBlE6gt7kRkpbqOZ0eLL0HUEoljF43MZ3QPhtaFVLl2NUu04nYRKVM0+uySqG590VMJ4we5OmS0kkkoyFzM2MacyfLbAyj0Rua1TupQEpNXSRH1AL8dCJ61B5RMcV69vRxmid6rkL9DxoLgbkHmpx4MwnsFqrMCpT6DwV2hJDF7Ch362S28RYBvGBF8H3Mw6ZkzDPGUs0UJGJBRMcHIUsixvo7wPG8kXKZye7yTmu81Wyw2l2wWARwWsCnt/EWAXaXD1WrL2C9QhxLCwUAm8MDwWqHxWaHxWoHb7Eiv7weTm8AVrsTvsIKiMkYVEUyej71gFIP3jiLAIvVhrL6s1CxfC2c3nzwghX5FUsRKK9DSe3psDnciIz2sd4ZnkdwoBOACrs7D+P9HRjvb4cqs4BIT99UFAnpZAyqoiAVj4DjLUiEx4xUSyNg4DikYuGsXp/IyMRkvMlYCFKa9UipioJ0Mo7o2AAbWxhh00QosoR4aBSKlMZw1yGM9bVNTuXNEWCpWtqpqipQwW609XkA9Xn5pHTaKFZjpqenKrKEVCKK2PiQsU1oPYPQtp+LLKZnfIMf1wLlzG3qx5K1Z74+S62UUkkokohENGg8TxbTRpEbXWYQp38OFVnO2sfMwjqqqiCdiCEWGjaCLlmWpgxms0xnnRyOGdwd53ZPhmkdJzKrPvGJT5ibCCGELHIU3M0xd17hjG+As2iVGwEgHY8iGQuh99B2BAeOYLS3FWIynjWu7mTiLQIslok0OY7nYRFYWp1gZ2P3eH6iJ5HjedjdPlgEG4qqV6B06VlGQQ/eIsCbXwa7Ow/egnJ480u18X1s+3rQyvMWcLwFTo+fpWVa7Shfdg7yiiuRV1TFXkjrgfQUlCGvqAq8wILuJWdeAqvDjejYQNaUFU6tAE08NILxwSMAAG9BmbE8s+CH08PGNDg8eahcdR7snjxIYgoprXcqHh5lk7YrEhRZMubkU1UFifAoFEk0ircAQHikF7HgECKjE/MIQuvx0qdm0CmyjNHew6wXUbvP1v8QoPcwDrTtxtCR/cZz2ByBLDVUSrExhn3NjSzIHehAIjxq7EtKC/5URZkUdOg9xzO5wQ8NdaG/dTciYwNGEAqwKqvQggX9tVVVhSKLUBQFiiIhEQtCkSVjSovwcC/CIxPnTBJTWdNdyFovt5ESq/0O6gGrLImQRRHj/R3oP7zb6LFjQTibcuNoZlJ9VceO2dG/C3JNR3HKmiLoJydOe/tEkSpCCCEEFNzNPZe/iBV2yCjpPxOq1nMniSn0t+5C76H32LgdjkcsOARFFo1UxJONF6zgMsZAWQTbRGEXwQab0wOLdXKaKG+xQrA5wfMWWDLSRS2CFUXVK1FYvQK8RYDV7gSvBYscz4PjOCPgE+wswNVTPTmON6pW5pfWwhMoQaCsFgWVy1G+7ByU1a8BANSccTE43oK4Nq2CK68Q+WW1sDk8iI4NIB4cBsdb4CusMMaJ+UvZvH4AkF9RD7s7D/6SGnAcj/yyOkDr+QMAMRFlAZzWcyemkqxYjWBDaKgLsixCTMSQMKUqBgc72VQS4VGkE1Gk04lJ8xnqaZ9jfW3oa9mh9TROzLc32nMY6USUVf/UghEpnYKYSiCViEBVVXTt3wxJTCEeGgYAjPV1QEzGteBnopdJr5oJUyA1UWxmYuJ4s8hoP5KxEIa0cYmpaIj1jCaiLLDVAxpVNXoO9eBMVWWM93cYwauYjGPoyH4Mdx3EmPbHDWipqbIksn3TxiJC21dkBE2ydi5S8TAkMan1XqpIJ2JaTyTr6TtWr6Sspb3OhCJPBPFTmU/B3UwC+0xHO66EEEIImRkK7uaYXRtPNtZ3fH9xVVUVqixjuPMgoN2cCnYnPAWlRk+OJ3B8xVpmmz6Pni6zGAYvWGG1O2HLUebeIgha1Ux31jg/i9UKcIBV65m02l1GsMiCuuwUTWj7oDUavTbewnL4S5fAanMawaVetIOtwxnBiq+oAk5vAN7CcghWOwqqlqNixTpwvMXosdOLi1isNnAcj9K6M+DTitno005AC1pZ5U0Z0bFBBIc6IaeTcOUVwFdchXQyhpGuZgy0NWUVgQHHQUrFEY+MaQFhArGxQQwdOYDIaJ8RACRjLCC0u32sVyujZ09RFSSjLO0TAMYHO5GIBCFLaUjpJFRZRmi4xwiOdMnoOEa6W5CMjGOgdTf6D+/CWF8bUonIRICUTiGdiCEZCxu9d+lEjAVV0ZDRO6jrb92Nzj1vQ5ElODwBJGNBxEOjkNJJpGIhDHeyoE9V1Ymxg6kkhjsPYLBjH0JD3Rjrb4cspiHLEsSM9xkZ64eqFYRRZIlVx0ynjCqvRsqnFlDoqZiqwqae0MfmKQrrFdQDsJ4DWxEZy+491em9e1OZapmqTIyRnZLK9vFEmq0KqKyndebbmer4kGN74oknzE2EEEIWOaqWeRIcfu9VuP3FKFu65uhFSnJIxSPob93NKglq43Xyy+rhzi9B9/7N8ARKULHyXPPT5pWjnd9jVUTMNS9cPDwKh8tnFJNRVRXx0AgrOsPxSETGYbW7jF6+3pYdxnQGlavOM+awywx8rA4XIiN9iIz2YclZl6F5y4vgLQIqV52njSv0IhkNwubyon3nXyGLKfgKKxAZ7YfN5UUqHjZuqItqVsNfUo0jezZlvQbHW6AqslGJ0+Hxw+HOQ3R80Ag+bU4vPIFi2BxuhIZ7IKWTCJTVYejIPpTWnwlVUSDYnOAsPPqad8Dh8SMeGoHdnQdVkcFxHPyltRjva0c6GYUnUAIxnZjU62Z35yEVC8HpDSARGQfPC1iy5jJY7U5ERvuQiASNeQyLalbB6c1HIjKGRHgM/tIlAACnxw9RFHFkN6tsJ1jtKKk7E73N2+H2FyFQVoex3jbEwyNYctZlUGQZiiwiGR1HKh5FdGwA4FhgJtickKU03P4iyGIa6UQUspRmYytLa9nx1dIuFa2nfLy/AxwvoGLFWticbljtLjbpvCYRGcdoTwsAwFdUCV8hq4QlSyL6D++Et6Ac5cvPMdaHVkCH/ZuGw5O7AqcspsBZhEmf23QiCjGVgDuvkE3hkYOYSkCw2rJ6wGebLIngOC5nxdKZEFNxgBNgtU1d0TQXWUyDF4Q5qU662OnBtzrNapl1dSzzgJC50N7eTp+5OUTHe+Giq+lJ4M0vQ3R8wEj9mwlZTCMVC0GR0nB4AywFsKwGDo8PlavOQ/GS08xPWVDMN8iZ9PF8ZhzHG+Ps2GMOgs1h3EzanB5j7B4A2B0sQOR4HnaXh835Z3pdwWpHfnm9EUhbrHZ4AiWwWARYMnoQed4Cp5eNw3MFirWpGkJsXzkOHMfDX1IN3iLA5Suc2L7NgYAWFPkKK+Bw5yEZDSI42AlowZbV4YaUTmCsvx1DXQeRjIUg2B1GCmhouAeDHfvQd3gnxvuPQJElOH35sDk8SMVCLEUzHsFg+x6kk1HYXF6U1J8JpzHO0WoEy6lYCO5AMQoqlyOvpAaKIiEWHIKYiqOvZSfG+9uN459OxKDIEsb62hEdH9TeESBJaSPo8pfUoOaMS+D2F8HpzWfTTsiiqXAMuxGNjA0iqvXI6T1uUjoBVZERD40gGQuyFF2LoKVjTqSPDh7Zj5GuQ0hE2JxNeoEcVZnYli4eHjECnMwUVL0HNB4eySpGI6VTSCdYyurRetdkWc7qudQpWkXSo/Z2aZVVjYcnIIVRzajAezz0FN/j7rk7zucR4JZbbjE3EUIIWeQouDsJHJ48qIqCyGh/1k3N0W4QdfrNsjtQgvzyepTWncmqUPLsL9+WGf7VfCHR0y7NeN4yqVfA6phIB9UncTdogaBFsBnFacxjB3mLkFUgpmzZGhRULgOfsS3937ziKrjziuDy5qO4ZjWKalajYsU65JfVwV+2xNi2Pq2DzelBaf1ZKKhYirpzroTd7UN+xVK484pQvOQ0lC1dg9K6M+ErqmRBg6qy9D2FzTOoV0rVewFVRUFM+9zkFVWidOkaeAvKUVC1HCV1Z8Lh8SOvuBrFNauzglyXrwBVq88zHuvb9eaXwiLYMNLdgmA/KzDDC1YEyuvA8RYosoREZMwoVqL3Mspi2tiPgqrlEOwOcDyPvJJqqIqM4GC3UbFTTMaN8XGpWCjnzT9vEYyxiza3DzwvIBYayQruVK1YTNbvmRbMZKYDSukEmyvSVwCLYEM8PILhrkNAxng6i2DLLtIipSCm4pDSqawCPGaqkp22qe+LudhNLipUKBnfC5mvP1tUdZoT208hqfdC5zhH0zO5QA+ZnoMHWXo+IYQQoqPg7iTwly4BbxEQHR9kqWaazN6CqYRHegFwCJQugcubb4wVAwCrw2lMDbAoTZHaxlksk6aAMPfEZfLplTAzbjj19a0OV1ZgqAdwDlee1gM4Mc6Q0wIlt78YRUtWg+d5cDwPt78QVrsDrrxCePJLjX0LlNWBF6xw5bEePDauj/VI8RYBRUtWa0VobLC7fVnjFfV57Bxev/H6eiBQVL0SrrwieAvK2RQVVivyiqvgL6mB3eVFYdUKo/onx1mMictd/kIINqcR4DrcExOXu/KKIIsphLRpHoqXnAaHOw8WwYpEdNzoYYQ2vnSkpwXR8UGIyRg4ns+aV9HlywfH8UYRFwDoO7wT/Yd3ZRVu0fGCFXaXDwWVy+HyF4G3CAiULoFgs0MWU+hr3oFkLMQKymjnMKWNR4Q2nq734Hbtd4nRC9i48grBC1aoioK01suoj2WUxFRWEBQa6sFAWxNGe1sx3t+BZDSYFXxNTJOR3XOn907q28oM3iYxVSYV9e3PUjCkF4zRexHNMuf5y9X7qPcsqtq0FZOCcPPjHBQlx/MIIYQQclwouDtJimtWQ1VkDLQ1AXpvQq6bJxNOS/PjeAsr4JGRbphZkp9MmGnAK1jtEGxO2N0TY6j0apw2hztnD6HexlsEI8jU2ziOY+OmtN5Di9UOi2CH1e40pnrQ16tefYFREEc/t8Y8gE4PeIsVFqsdHMfBrk0Qn8liye65tQgsEMwvr0PZsrMn2lxe7X1OBFkcx4HjOHgCJSiqWQWPFuR5C8rg8hWyuQU5tl/+0hpY7S7IYgq8RYA7r0jreeRYaqSYNsbapRMRJCPjGOttRSoenjQu0mp3wa3NW6j3YrKgY6JwUCa704uimlWwu7zIL6tD5arz4NCmyOAtVsiyiJGuQxjq2Ado6aWZ0sk4JDGJkR42MT20aS5sDje8heVGQKKqKhvLp/3RRZFEY25JGH9oARJaenVkbCCrgqkiS1AU1rMo6ymYigIpnWLTavR3sBWN6R7YNA+ZMgMmlkoqQxbTSGQUxzHT52M81veJlE5CllgV0KnSPTOrgGYWrtHpgamiB6GmIG1a6Z6qYhwDMjMPP/ywuYkQQsgiR8HdSeIOFMPtL4GizdV1rHE7OjmdhM3pNsZxkWPjpyjOMhXeIqCoZhVKak/Pas+s9jlJjl7DzFRQi2AzevIsFis4np8U5ACAYHdq4/wmtsdxrKeL4zg4vX6j18vu8mank+o9i3aXMe1DZjVR/f8WwWosF6wTYw+Rsb8Odx64jKke9OknbHY3BJsDVrsTVi2N1GK1QbDZWZAWYAEhZxHgyAg+fYUV4HiWRun2T67mqo/z0/9wAejHdHKPjmSa+FvvVfUUlKJ8+TmoWnUeCiqXw64V0XHlTVQsBYC4VkRFFlOsomYiCjEZg9XpgSDY4A4UG8d1rK8tu+qoxCZSV9XJqYShoW707N9qBFXR0QEkI0FWNTQyBlVVWbVURcL4wBHEgkNIxkLGBPOsaE92AMWCOy2AkiVWYCYeYt8bOebeU2QJYjIORUtHzaQHcHqwKKVTkCWJzR1omgBep2+DvXaOdfSgUFXYtBWcKbjL9RyTnD1+ZFpWrVplbjqp+lp2GD+EEEJODgruThJeEBAoY/OjBQc6kU5Gj3mDk4wGWZl3kVW3I9NztBTMnDgOVrtzUjAnWFlAdFy04imcNhffVCyClb2OaR2rNm/f0fCCFTanG4KVjQlERkA6Ve8lx/NweP2sWmLmccp4zPMTBWWsDhesNiesNocRZHLcxPP0Y+TyFcCqFaaBFrQW1axEoHwpCirqjXadJ1AMu4v1MOrBjDd/YqJ4aL16zrwCBLQ5BI3AVQuaeW0cJC9Y4fQGUFSzCuXLzjHGReo9eJnHPzTUZUzq7g2UAByHgoplKKk7E8jo3dLnTZTEFFRVgZhkhVtY76cDFitLCVVVNlWFIksY7jqI8YEjGO48gOHOgxg6sh9SOgWoE2m0YjJu9FImoyFI2nx+uswiMtHxQfS37kFSSyHNTJlkaZFsCgiApZ5mjj1ERs+b3q6oLF1UVRWoOYIwVVUz5gNMG+dl4t/MSeYVJMJjSCfYmEmduScyJwrujtvnPvc5cxMhhJBFbuq7THJC8bwAp5eNNRrrb0P3/i1Gj8JUJC09zFNQal5EZhlvmVya3RzsHQ9zimBOHAeLaT19Ggczb2F59lx6PA/OIkxMleBivWeZhVLMeN4Cm8ub1cPJcWx8IPu/xQigoAdHHIdAaS2g9ULreO05DpePTUKvBVYWwQabww2Hx58z2Basdi3Vko1dhFYx1AhO9XGHVau0XkDemOBe355FsE4ujpNRqAba30P0QjMcxxtTIeSX18Otp8MacyJO/AHF4WIputHxQSiKjPEBllLpK65Gaf1ZGWsCyVjYmMcyHmbz9wFAeLgXw10HIYkppOMsCIqHR9HbvBO9zY3ob90FRWJpnDqVTXQHAIgFh6EqEsZ6WwEAsjQROLFJ5kUj4GPj/OSJlE5VmQjq9KBQUVlwZxR2yc4cUGXJmGdQkSeKrsgiK1CjaoEhe67KCuwMZM/faU4NNb8GTAEsmZndu3ebmwg5IVLpNBp37sKWbdsxNHT0e5UT7dvf/jbWrVuHb3/72+ZF80Y8HsfnP/95rFu3Dg899JB5MSHvCwV3J5nLX2Tc2MRCI+bFWRKRMcBU6ZGcGBaBpU7ONn3S9GOZaloHM39JjTH9hWC1A1qPVuZ4QWjB0dEIVrtRDRNagKMHt5zFAj7HHGR2lxfFS05DXnGl0ZZfsRSFVSvg8heB43nts83B7vLC5nCzIDVHr/NEIMkZPYJWuxNObwGsDrfWS6caRWnYemzMqf7eOI6HYJtcVMjtLwLHW4yxjGyicjYdBsDGQLryCo1zo/cI2pwe2N15KKhajuLa1RBsbBqL3uYdiIz0wWp3weVj6aSZv5PJyBhErepnZtCiyCIS4VGEBruMYFlMxiAmo2yqBlXFeH9H1ji10ECnMY2DPnUKS7lMQ1Gyi53IUtoYI6cHbUZKpyQZvXR6wCWm4lkT25sDMSXz+doyxRhHyJYpsrZcYXMSpuPR7G0Y0z2w9cypomyZilzpt2TxSiei+O1dV+KbDRz+9MN/MC/O8qcf/gO+2cDh0OYXzIuyvP34j/HNBs74ydxu5utl/rz9+I+NdQ5tfmHS8ns+tRQRbU7U6WxjvgqFwnjsj09h0ztbsGXbdjz6x6fwXuNO82pz6p577jGCu4MHD+KKK67AunXrsn42bdpkftqsMr+uOUgbGRnBRz7yEWP55z//ecTj7I+LLpcLv/zlL3HnnXdmPYeQ2TD5jo3MKU9Gr0ciPDbpBitTIsRu7qzaTSk5cWajly6X6QZt0w0COY4DOA4Oj59VetR6sfQAiY3V42f8fjKnfuB5izH+LpM+btCcruovqYFdmzvQV1QBly8fvEWA1e6E3TlR3TWTEUjyvDHGzeHOg7+kGlWrzkPNmZeg+rSLYKS3aumXPJ89OTjruct+bHf5UFi1HIVVy41KpHa3Dy6tYIzTG8juqdVSZ0tqT0dB5TK484pgtbtQuvRMuP1FbJ5JRUZB1TIjsLRrPaTQxgTq8/npvV2OjGA7ERmDlE5kHWNFlgCOQyIyljX/ZWS0D6N9rUaKpB5ETqR3akGXJLK0UC2YTETGWECljfPTgz4pnUQ8PIbgYCcG2/diVOsFRMa+6vQxfWpGoRdZnCjAoiiyUQhFn/bC/EcEoyKoFuSZU0WhBXd68Edm5je/+Y25ad7ra9mBn3yiGnnFVfjQP//IvDhLX8sOpGJhrL7ko+ZFWQ5tfgHbX3gAX/1TH777lopvvRJBaKg7K/ByuPNwxwON+O5bqvFzySe/krWds6/++6zldz3WalQanu425pt0WsTTz72A8fEgLrnoAvzTbbeiqqIc72zeioPNLebVoSgKXnvjTXT1TFQjngsVFRV49dVX0djYiMbGRjz88MP46U9/esKmCzl48CC+9rWv4Re/+AUaGxuxadMmbNmyxQjwRkZG8LnPfQ433XSTsU8lJSW4++67jQCPkBOFgruTTE+ps9pdUFUFsfGhrOVZwR7PQ7A7s8Y4kRPDnJI5W8w3v1OZ/uuzCpdVq8+HJ1BiBAysd4r1rtmcbCL2mbCYitDoE7ObcbzFSMXUWaw29mNhc9/lVyxlaY7HGG+o9xb6y2phc2akiXIcOLDn8ryFpWQKVq2njk0GnynzGOupmy5fASv44md/TOG0IjXQAu7MCqgcx4PjLBDszqw5Em12NwJldXB681FWfxYEq8MI1n3a/IG8YAUUZdIUDhbTH2RURYHDkwdfYYXR5vGzQi6hwS5A731TWepkdGwAqizBlccC0vG+dvQc3IbIaD8kMWXMBajIEoY7D2K48yDGeluhKgpkMW2koo73dWC48wDG+49o6ZAy4uFRJCJjGMkK9NjzoPe2aR1rkphGOhEzUin1apmy/j2VcSoyx+Ppk8fn/OPVKT7mLuc+ZzjW8hNpzZo1xv9PxByIcy2diOLVX30Fl37qa/jY135nXpxFX/eca27LWTk403DnQdScebERiNmcHiw790MY6jxgXpWYvLnpbYwHQ7jgvHNx7tqz4fV6cPWH/gYWC4/GnZPTgg81H8a+/QdPeupmTU0NysvLMTx8YvZj+/btOPvss42iRi6XC5/85CexZcsWxONxFBYW4tlnn8Wtt95qPOeKK67I2AIhJ87Ud1pkTticHhRVT1RmVDPSk1RlokCCqigQkzEINgcVUyEGFixxWjGRifn8/CXVEKx2ODz+rOkOposzF2CZ4jPH8/yUgSMvWI39me5YQ47nkVdYgbKla8BpkQLH8Ua6qR686kGb/q+ZHuAJWo+lHoQ5PH6A42ARBHgCJbC7fPAESowUTWhBJm/hjR5AI2VU+7e0/kw4tT/KCHZWxdTh8SOvqBI8b0FM63nT/3DD8Tz8xVWAFmzr2/H4S+Arqszq/XN68430a/1fRRIx2LEXMKqOWowesOBgJ7r2bcZgxz6kEhEMdx5EKh4GbxG0Akyq0XsmpZNGICJLaWMcZyoWQmS0H+Ghbujad79ppINmFm4JD3ej//BOI7hTtTF5UooFFlI6CVVlwa2Rkqqy8YEDbU3o3r/F2JZOltLTmuNzOhRtUvtcjjcIy9XbmCmzmupc++IXv2j8X5bSrEd3HrM5PfjsPW9Mq7dr27P3I6+4Cisv/LB50ST1a6/EYPu+rBTKw9v/jNMu/RgAIBWPIBkLwVtQbnrmhOHOgyiuWW1uNkxnG/NNX/8A9h84hJrqKlxw3rlGu8ftxpKaGgwPjyAUZlkKuu07dwEA6pawgnEn25EjR8xNJ1R/f3/Onrl4PI7HH38cF1xwAVwuGlpDTqzcd2VkTjl9E5ORKxlFEhRZL7uuIhEZgyyymyDOQqeNMCzgYUFQZkVNt78IpUvX5JyTbzqOVoAl01Rj6CawZdPZD6NHWn9PGePw9PRTI+DS0zinCiwtgjYnH1vfKEjDAZUrP4DS+jUQ7A4U1axiRV4yeyY5DhxnMYra6NuY+JcFfhzHg+cFWO0uI4i1WG2sEIlW7AZaBVG9Z4EFufq6bPt6ARk9TVSRJSSjIaRieu8fB1lMw+0vhsuXD4vNbvSKJaNBo4dvvLcNYioOV14hAuV1UFUVyeg4FK13bqCtyZgEXRZT4DkedpdPm4KB9calE1GEhnsgpRIY7TkMxTSlQjw0ClVV2Otqk5dDu1mG9v7YFAtpKIqM0HC30UMoy2y6F31SeD3VdLBjH0Z7JtK79F5nnTld9GjkjJTYTGpGQZmZksXJ4wQzsQqqWuGaKQLL9+NoQenWrVsnHqgTvaULXWS0H3vffAIX3PSv5kU5lS9fi1t+8AJ+/a+X4JsNHH7yiWpc/S8/nhQYbnz0B8ZYud/edaXxudaFhntwz6eWGuvkGud3rG3MJ29tegcAcOnFF5gXYdnSOgDAoYzUzNa2DoyOjqGudgny80/udE2dnZ3o6+vDuedOBKWz6dxzz8WuXbuMtE89eDPbtGkT1q1bh+uvvx533nlnVk8eISdK7jsjMqc4bqK4QzQ4kZY53t+Brv2bIUtpxLUxNN780ilvaMnioyUsAlp1SaP9GMHPsUz3ecdKM9UDs6OlY+oyAzeO441UQE4roMJxE8VTjsUiCFmFVYwJ5fW8QS7jPeaITS0Cm1aBBXBagKgHd5yF9ZJq7ymzmEqgjN3wsN5B1luoQoXV7oInvxSFVcuNOSqNYFELDB0urxEQpmIhIxgpqT0NDm8A+eVsCgnztAWymALHccYYOW9BOVzavIHh0X6oioK4VqxJ/xdgcxHyFgvEVNwITuLhsYwiK+yPSnpwIaYSENPadAoimxpB1ebHM/4YoLDpFeKhESSjIURG+jDQvhdKxnQLkZE+pGJhKFIaocFOyGIKydhEQCamkkYaqaooSESCrEdwGoETqxI6ORjKnHx+JhRFzp4DUBvDmNYK5igKC6wnxhZOfm1MI+jLNWehTtWOaVZbjiBO702dLyKj/VmB0rEKp2R67df/jjMu/wTKl681L8rp0GYW2P3T/76N776l4stPdOHVX30la8xdcKATeUWVxpg8AHjh3omeUQBo2fqysY3PfP95/OkHf581p990tjFfSJKEgcEhnH7aKhQVsrHKmZbUVMNi4XHgYAv741IwiDc3vg2bzYorGy41r37C9fb24uqrrzaKl9xyyy24++67Z2UuyMyiKHoRl1WrVuHuu+/GLbfcYgRvHo8HZWVlWT1zl156KRobG/Hiiy/ivvvum9cVPsn8cew7LnLC8byF9UpkpLEBQHi0D7KYRiI8ishoP3iLwCr/TfPGmyx8HM9SFpERJGgLgBn0wB2vYxWI4biJHrhjmQjuOBaGaT2S7D1qvXfTxPNs6oSpsAAyI+3TvFx7Lm+ZCOKMIFP7N9c4RKfHD3egBB5/sZGWqVfpLF6yGk5fPpsChRfg1sbP+QorYHfngRessDu94HgLQiO9Rg+U1eFG2dKzjcnYPflse5njjGzGnIJsjkbB7gTHW5BOsBvMibTHibRvm8MNi82RFRAEB45kpRnqQVdouAeD7XuMIEZMJdF76D0Mtu9jk8DrPYKyiPBIL0Z7DmOk+xCgn09FRUrbVnCoS+txlI0pI1RFZj1gCuvl04MdMZUw0jz1uQEzmYMeffqHzFRSRWJTRJgDoqznTjHmT5VlIyhVFQWyLEGVZeN4qlq1UCO4m6KXkQXCUwdeYjIxaZymTlHkSdvVz9nPf/7zibbjrDp6ssY7egvKcNdjrUbhkWONsdP1tezAYPs+rLnqFvOinNKJKDY/9TOc++Hbs8bcXXjzl3B4+5+NnrUvbthtpIPqyzv3vGOkcq66+CNZBVTqzr4cZcvORtuON4zXOtY25hNBEPDpv/s4PnhFg3kRAMDldGLdOWdjPBjEo394Eo8/8TQi0Sg+eEUDvN6JOU7nirmgSmNjIy69dHaCzGeffdbYZmZwpgduevAWjUbxyU9+Mmfapcvlwp133om2tjaMjBy9Mjoh79fkuxoy5/TCEVa7c+IirqrG+InQUA/S8QgsVjuc2l/kCWG4nD1PRuCSY0652XTsgIvLOY1CLsa+ZgRQ7CHrQcsVhE2FzzHfHTKPi5bmmdmWyeits2RX40RGj12uCqQcxyNQugQFlcthsdrgCZQawZhFsIHjeLh8BahafZ7x/LySaqMHwmK1geM4pGJhSGLaeC197CAAWO1sygqnJ2Dsu7ewHBbBBqc2NQPP83C48ybG2OXoUeJ4Hr6MSn8AkIqHER0fBAAIdhcS4TH0Hd6JmNamGx84wlI2xTRGelqMqR8AHokwGyuoB3MsbVQxwg5FEjF45ACS0SA7z9rxjQeHWSVRMZmzJyxzXj8dm39vImiKjg9AkUWjOqiqKkjGIzl77vTeN2TMIWoe+6f3TKpadVBZYtvW908P2PTvbXOwqWNB69TBnarIkNLJnD14uZ6rv/75558/0YbjqzqaK2g+lbXteAN9LTvwo4+VG71+u179PR75+g0zToMMDnQiFY/AW1BmpEdn8pfWGEMmCquWmxcDAIpqWM/QdLYx35QUF+X8ftRdeP4HsHrVCgwNj0BVFFx68QVYsXyZebVF4amnngIArFu3zryIkDk3/bslcsLo6WOKLCMZCSKdjCGViBoX8FhwCLzFioqV66aV3kYWD05LVzRjN82T2+faVPuXS2ZPGKvAOXHjf6xKm7no1TCzaTcqmT2BR7l5YYVqJl7XwlsmAr9c4wiNoJr9W1i93JgAXQ82swJXbeye/t4tVht8RRXGTbrVxnrgMgNVfRkvWI3jY3d5UVS9EoEyNrE8tOkvxGQCspiGkmO8mWC1ZxWkyT6+nDZNC+sFU2Qpa8oLVZFgd+fB4cmDIokTKaiKZBSC0cWCI2yCdlmEp6AMJXVnQlUkjPa2Qk6njRvf0Egvug9sxUDbHkTGBrWAaiJYMgKqjEBHzZi3T5FEjPW2IjI6YIwTZEEdS2vUK3tCC8JkkY0LZI9Z8GcOrtLJOMb62o2ATpHZsWDTN6iIR8bZto3eval67iYmo2ePs9+D/jjX89kcgqYeSu0Y3H333RONx1l1NHOcN/R9PYVd8smvZE018N23VJx99d/jM99/Hp+95w3YnB4c2vwCvvfhfPS17DAqY25/4YGsgiqbn/qZUUEzc/3M5cvO/RBsTg/0OewyU0fbd72J4EAnKlawm/ljbWMh4jgOV3/wStx15x34wr98DuvOOdu8yilBnzDcPC+ePg/eQw89hCuuuOK4p03YtGkTfve73+HOO+80eu02bdo0aZt//OMfUV9fj8Icaa6EzKaZ3S2RE8Koluf2QVVlDLbvNSrXscIQHAqrlmdNMk0IYApSsppn1tN1wswgKMvsZeR4C1jnGnturvd4LDlf14jnJsYqHm3b5t65zIAu1/Gd2N+Jf837wR6z1zSCOm27PG+Bw+2HqshIxcOwWO1wmP7qHyhdAn9JDQJltcY4S8HGUjEzxwey7xXVKGCSOTYQWuVQPZ2T43h48suMdFSr3QlvYTl4wYrS+jNRtuxsFNWuhrewwtiOYHMY+z3Rc8foQSPH81AVFkjpPVBOjx++okqk4mFjzBrHWyAltXRQvRCMFnDpZG078cio0aaqrEdNVVUjrTGdjBkBSzw0guCAVi0vY1t6744RGMqs4Iu55y44cATx0DArJKMoLL1TUdBz6D2MdB3CwOFdGGzfm9GDl7vnjAVwE0FTVtqoFhgGB46gY/dbRrsuV8+dHsht3Lgxq22q9FJzcJgps1dXVeQptzFXMicL//P/fRW7Xv09vtnAZQVOxzLceRCJyDjCI32AFhCe++Hbjd6+71zjRV5xlZEKuvLCD+Nj//573H/7uqzlmSmWn/necwgNdU+MEfzB3+Pvvv1HI03zWNsgJ088Hkd/fz8GBwcRj8cxPDyMcDj8vitp6pOUf/vb38YvfvGLrPF9l156KX7xi1/gC1/4gjFeDwCNuSNzgguHI8Y3eUtLM9aund4A5RNFkiQIpjm2zKazznw00tWM0d7DxmOOF1C75jJIqQQEuzOrGuJCtlDP74mg3zRmpQ5q0vEIbCc5HUhRZMhiCla7Nvn2NM+tqipQJAmSmMyaIPz9YumOKbi1ypSx4LDx/+lQZClnuqdOFtNIxkKwOdywOlwQk3H2e5sRQKYTUaiqolWqjBvHRhca6sZAWxOgpXzphVR0qqIgHh6F0xvASHcLQkPdqDu7IasoiSuvALHxIfQ2N8Lh8SMZDaKoZhWGOw9CsDkgpZMoW3o2fEUVaNn6MniLBdVnXIxEeBQDbXvgyitE2dKzkYqHwVksE0VcOGCwbS/EVBwldWdASqcw2tOiBXETAUhecTWg7Wv4/2/vzuOjqs7/gX9mS2Ym64SwJZBAQkAQ1JSwuSBCFZdaEEsrtSB8qbaKooWfxaVWrCu1UkURpVIQ64pGImpFi2DYBKIgoAhJgAQJ2ySTdWaS2X5/zNyTe28myWSDZPi8X68Rc8+Ze+/cO0nmyfOcc6w/ie2W3mlI6N0fVbZTOH1kPwAgIXkAyk8ehUarg8dVC73BCLfLiT6DRwLQwFFZhoozP6FX+kWotp1CxeljGDjqOiBQRgpo4PV44K5zoOTQN9BodUgelAVTjAVHdm+C21Ub2BdgjkuEz+OGo7octpNHENe9L2K69Ya9sgw6vR7uulqYYhPgrqtFhNGM4v3b4KgqQ7c+AxHTrbconTxZsAexicmotPoXau6eOgQJSWkN3k+OKhsio2LgrquFVqsXy5LUOWtgiDRBo9HCXetAraMaZ4p/RG1NBQaN+QV8Pq/4A4G9wgpotDAHSm6l5+sjjBg5chTy8vKAwHtZq9PDFGOBs7oCxqhY8b6rtVfCYIwS+/T5fIHZTn2oKbdCb4qGx+NFrdMBj9cLj8eLuro6pKUFJggKkTz4CnWyEyLJ4cOHQ3rPLVy4EOPHj2+3MXXnkrTo+bmYRTPU601dT8M/PdM5Y4q1QKPVIcrSE4AGCb37wxBpgik24bwJ7Khl/Bm64JmnpoKQs6UlZZly0utq9zGDqkulzqo1p7lrKsotA2WZWp1/siQ5+XIIwdb/k2bMjLb0FIuuy0nnrNXq0K1PBpIHDa/PFGp1gaUZtCKwd1aXQ2+IRKQ5FkkDh4tgWQo09BFG6COMgeDD/3PGXeefgROq8ladzoDI6DhotFrF+F91ZikqvgdiE5OgMyivV1RcInQRkYo/RugNkf5gI5BBiu/lXx/Lesw/xbqzpgLuOgdqyk/DUVkKn9eDatspuOv8E7B4XHXwelyKsVauWoc/2NUAgH/JBQDwedxw1TnhcbtQYzuNEwV7AJ8PtpJC2ANjBd11TrFen7Pav9afs6ZCZNikrJt8zJ4UsEN2LaSSUa9bWntPVoopGysoykUDGTP/8f3loR63y18Cqi7X9PkArxfPPfecP7MnLcUQyN5JS1FI3HW1QbN3UnmpOOdOkLkjCsW8efO6dBZMKhVdsmSJuomozVr2yYY6VFR8D/S5YCS6p1yAjJETERtY+JioKY0FT0HHhJ1lGrQuuAMAf8lpK5/bKOU+233/0syaUrAVJBiUlllAI8GiVqtDr/SLEd+rX6PBrVSurdFqYTCa68femaIRYfKXb+sNkaJsUxsYE6fTR4jMkjTWrUf/oUhMuUDMDBrXoy96pg0VwZ203h8CwWhicgYSkgdAExjXJ9pkS08YjCZEmKKgj1BmJTVa//WXZ7d8Xo8/wAsEI4ZIM3SGSBFMSWWl7rpaeALbTh3ej6L9W1Bx+hgc1Ta4au1ifTyf14NTh/eiuuwk3IFZPOvsVbBXlqKmshTOapsI2nxeD6rKTqDadkqUwrtrHf7tpSdE0GSvOANfYPyftLi7NGGMf1uZIqiDLAiUstfuuvqST/kMmNLz6pz+4NReYRWTnNQ56gNIxYyZPh+8Pi+uvPJK/+QxUkDmq59URQSPIrCtf748mLNX+ktO/X2VQTpRZ7Rw4cIGM1d2NWazGcuWLUNeXt45ydpReGvnTzbUVhqtFhGmKGh1ekSoxsgQtUSwwOGsa8GYuwba8txGaPwD+eq/7oj9AxBLUQQJzjRaXWDCGFl/FSm7Fuz5kI0F1Gr9E7JotP4FyZXBlk6UdEYn9PJn4LQaxCQmoWf/oWJcnE4fAa3OPzlLpCkasYnJMEbFieuk0Wqh1RlgjIpDhCkKEeYYmKIt0Oq0iJRNFCFlAgFAr4+APsLUIJDUBSZlka/JaAosGSHxZwUtqHNUwRUoP0QgyJMul7vOAXetA+WnimAt/hGnDu9TlKUCQMXpYvH/1bbTKDtegDNHf8DJwr2inBIAbCVHANnYMymgk9b7i4yKU2THpGssnxQlwhjVIJiSJnXxetw4kb8HJ4/4x+b5xwm6AXWmT/oXPng9/uN5PS6cOrIfZccLlcGZf8VyPPTQQ/B5PfUBnc9bP7mLtE3KOMomp5GCR3tlGSqtx+GotvkDQ68HrVlOQZI0cLh4EBHRudG+n2yozQxGf0kVUbhoLEBpjkajafVzG6WagCbUZRpCJTJ2jQRtEimT1hhFWWcQ8sBdukby4Arwv9bIqFj0TLsI5jh/abcGWuj0EYiM8s/gGegW+FcDnSHSP5mNbLZVjUYLQ6RJBDX+cln/+orShCoAECFbd08fKCOPNMegzwUjYemdhqj4Hoop4aX9G4xmRVbKFB0nArpTR/zr6AGAy2kXgYucVH6qngLf63GL++Gs8Qdqrlo7NFotPK466COM0Gi1/sAmMCmM9dhBVJw+Bp/Xi4rTxwCNBlFx/pntpPX/pKBNTp6Jk8ofpeUVvF4PPO46eGod8NTVwl5RijNFP6L8pD/49PmUs3S6nA74fP7nVNtOweWsgaO6XCzL4H+SP8v2+eef+xdud7tQVXoCdY5qeL1ueL0euGv9WUspyPO43SLDJ2VAqwJBrqPKJgsKA8cgIqIuqX0/2VCbBZ++nagLaybQaYyUkWpv8oCpsbLHtgjlvJsPWqWsWfB+8sXjm8rQarU6GCJN0GoN/rX/pIXYFfdEFfBK2UBZoKoOHPUG/7g5+WQwUqCn/uNUZFQs9BFG9Oh/oWK7f2kFfwCvCPp0esQmJosAr368ma/BbJYajRY9UofI+mnQLTkD0YEZDBOSApMFyCIWf9YyIjBbqHIGYmd1OapKS1BZWgJfIIMqBbU1laUAAEfg33oaOGsqxHl63S64ah2oKvXP1Oh1+RdQd7vr4PH4A6/amgqUnyoCAhk7+QLmFWeO4UT+HnjdLtjLzwT6uMWYQIjMXX22rsJ6HBWni3H66PdwOR2wFv+IYwe+BuRlmV4PagOL2kuZOynw0xsixNqAbcncERHRudf0JxAionNIHSi0lQYaRbapuSCsNSLN0W0+bym4aiwDqNjeSB/IgkhdYIZSKUiTv251sCcfJ6guY5X4x/lp/ev0SeP9pFJSVbZRZ4gEZMtaSGK7J8Mc180fnKriCZ0hAj37DxVf14/nq+9oirEgvlc/f3vgnur0BphiE5DQuz+6pw6BpXf9THB6QyQio2JhjIpHz/7DEN8zFZFR8YE2fyAZ0603TLHdUGU9Dp/HjYTeaWKhd0dFKSpOH1OsDYjAOEdDhKl+whWPGxWni1F+8ijcrloxFhA+H6pLT6AqMD2/RqMRY+TkS0m4ax2BCWSsqHPaYYyOB6BB6bH6mZSP/7gLlWeO48knHofLWQOPLPNXaf0J7jonvB433HVOxSzM0kLwXp9/eQWxeLvHnykEmLojIurq2vYJhIioK9EoZ6hUBxztQf3hvzU0WuVkJa2lCQRaUjAnZfwUGUGNMkCULybfWOawPvjUBnZQH9ypX79Go4EhwqwIqhFYAqF3hn/RY4+7PjjR6SNgiDTBGGMRQVuw2YItvdMQbemp2Cadr95gRKQ5BlqdXmyLMMegZ/9h0Or0gQlpIIJFvdGEXukXI65HCuJ7pMBgNEOrMyAmMQn6SDO0egPcLieqyk7A5bSLbKlGq4XWECHKMr0eN2rtVag47V/+oa6mUpFttP50SGT03K5akVVzVNkQYYxS3HP/pC8+GIxRiDBGiQDc7aqFu84J28kjGJ15IUoOfatYiNzjcYvSUHtlKewVVjEJjJTtg9c/g6jL6Q8862qqRLmpfD0+IiLqetptnTun04n3PlgLa2kZUvr2wa9u+qWi/bt9+7FhY65im8lkwi1Tb4Il3v/XU4S4DlYofajr4v0NX+f63rrrahUlhl6vWzHLY2chLUXQ1gDP46qFs6YSkaZoMQ4Ogen2pbLHWnsV4PMhMjBmzufziUDCWVPhn1ylCfk7P4PX60G/YZfj6N7N0EeYkD58grpbUNK6gScL96LidDE0Gg0Gjr4BCJQMHv52I9x1DphiEuCoqi9LBIDUYZcHpvL34mTBd3DV2qEzRCJp4M8QaY5FbU0FzHGJOLTjv/B5PejebwiiYhOhj4iEs6YSWp0Bdc5qnCzYA1NsN3RLHgAEgkuPuw4ajRbmOP9kLwW7PhfbfD4v9JEmuGsd0Gi00EVEwl3nRJ8LRqL0p0NwVNmg0erh87qRmHIBvG4XykoKFecu6ZV+EarKTqHGdgoJSemwnTwiJlbR6vTwetzo3m8IHJVlqCk/jYGjroejyobi/VsBACve/RSzf3M9DEYz3HW1/plNtRoxPtAYbREzg3ZPuQAR5liY47rBXlEKt9uNk4XfwQdN4KFFXK9+8Po00BqMqKurg0bf9j9UEBGFo/4pyepNnUa7/Nn6SFExlv97NcxmMy6+qL6URi2lbx/Mm3uXeNx5+yxFYEdE1JF0BmX2qCMyd+1B206ZOymrJs3eKbaqX7e8LFM1/q45OkMkNBqtCB7V4/OaImXVxMyO8jaNVpyXfOkEKZtnjI6HRquFXh8hxuxFmmOg0Wih1elEP2kcs9EUoygj1ekNMAfW6otNTBbZTa30xwdZ6aopNgE6vb9U1BzbDd1TLgACGcWouO4ikycmJQnMTOn1uMU4N4l/XUH/taouO406RzV0hghEJ/QS2U/IJmaJiDRDo9HA5/WiuuykoqT2yy07gcC6fv41FesnftHq9IqxfK7AshBSlk89AQ0AVJ4+jpry0+rNRETUhbT5k43T6cTmLdtw4ZALGmTrWsPn8/91v6mH1+trsI2P8Hnw/obv41zfW4/H2+TXneXhhabBtlY9PF7/eCqvR7HdK/s56/V6G70OnhDuV0y3JMR27wtpiTSdwdigT2MPjydwXoFFu3X6CEW7Kdr/xz+DSTbhSqAU1O12w+tF4FpJJYWA1+uDx+OFRqOH2+0OxLcaaCNM8Hh98PoArw/waXTQ6COQNGgETHGJ0Ogi4PF44PVp4PF44JO99oS+g9AtZTA0+gjE9eqPCHN8YFZRHaDRwufx+F+LbOwbAksNSK9N0q3vBUhM9U8uU207CZezBjGJfeH1+qAJlAyLmUq1OkTGdENEYGxgndMBj7vhjKHw+aCPMCFCFhxGJ/QWQSYA2CtK4fF4UBdYL9AemBhGnrn2+jz+ReG53h0RUZfVbmWZkg2bcmGzlTcI9DZs8pdkThg3VrFdzR1C2VYofajr4v0NX7y3Z5fX44ajygZjdHyDMW+SusBkHuqZIxEo61SPoVNzVlcA8MEYHY+D2z+GOS4RfYeMVndrkvXYIZT+dAg6QyQGZF0tth//MQ/VtpPoO2Q0jh3YAfh8SBo4HC5nDRKSB8BV64BOb8CJgj2oLjsJU4wFvQdkwmA0w+fzQqPRwlp8ELX2SvTOyIQ3sGC6x+0S16PWXoUIUzS8bhecNRUwxSbAUVkGQ6QJEYF1/PyLfdeXhRqjYlH47ZcwRBihjzTBXmFFrwGX4GTBHtEHAGK794GjygZ3nVPMptln8Cjo9BGoOHMMOn0E9IZIGIxm6COMKDn4Deqc1YjploQoSw943S5YevcXpZhxPVJgirHgZOF3AICvvt6NK0f7xy1GRsUisc8gnC76AV6PG/E9U1H60yEgMEGNz+tD74xM6A2RqLKdgr3KBkelDZHRcXBUV8IHLXwaLaDRIa53OlwuF8syiYgaEfZlmaGqqanBsn+txOIlL2Pxkpfx3b796i5ERNROpPLLxmbd9NPUl2+qNDahikIgMwYAiSlD0HvAJeoeIfD/jVEqz5TEdOsFQ6QZptgE6A2R0AQWODfH+cs0tVodtDq9GC9oikmon0Qm8Npju/dBfK9+ooQTgZJMid4Q6Z8xNPA8+cLwkobXQYM+F4xEnyGjEBXfAwDgcviD5EhzLCJM0WLMnLuuVpSNSuWYGq0Wlp6psPRKhcHoX05Co9XCHO9fU89fuhmJyGj/eEetVgeNVo9Ka4koG9Xq9CKwk77WaLXonjoYPfsPRaTZH5hqdQZEW3rBG1iGodp2CmXHC1Bnr4LOYBBlq1qtTlwXaaIVIiLqeloU3NnKyxXB2fsffqTu0qTjJSdxy9SbMG/uXZhw1Vh8tXkbjhT5F3IlIqJ21mRQ5+df7SB4v4ZBTUP+5RL8/+8PsoLvqynSjJKGQPAjMcYkoGfaMACawCyOynX6RJAWKCP0+bwNM5SBvvIxfHLS2EatbOkJjVbbYLyh/FpoNBoYo+NgiDSL2UVtp45Co9WjW/IA9Ey/GF6vB46qMvi8bjF5jT7CH6BqNFroDJGKrKh/m/9cTDEWaLV6aALXUqvTI6ZbL/i8brgc/sBLq9Xj9TWfiudHBEpXtVodDEazCIB1hgjoI/3Hd1ZXwB0YeyctnyAth2CO64YIoz8gdNf5x+0REVHX06LgzhIfjztvnyUmRFGXXjZlyAWDFBOoDMoYgPi4WBw+clTdlYiI2oEIVtQTqMjIgyW1xrYr1T/fVx9LtYgxRppYSzkNf/35a2CKsSDSHOPvIR0j0C5fyF39Wpt6fQoaTf1kK4YIMSGLRJqwxP+FRhxTKmv1ul1ISEqDVm+ATqdHpClaTLASm5gMjUYDr8cTyDbWB3L1GUMtXE5/UOU/5/qF5LU6HaITekGj1Yo164zRcfg81z+hip//2knP0+kNgf/XICLSX3Jb56iWLVbuH+MolZ7qI4zQBSbD4XIIRERdV+O/8dtZ717K9Ygkid38s5UREVEH0DQXcWlk0VLL+QOnQBAGjXo98pD4vP5neQIzRErkp53YZyB69BsSyGYpz9cU6/89Yor1L10g5z+90F6fFLD515VT/nrU6vT+pQY0UBzfHOcvpdTq9DDFWIBA0KTVGcQ4u8ioOPTO+BmSBw0Xk7BIpAyhRqOFpVc/mGMTERXfI5DhCwSv+ghEW/zln9IsmjGJSf7nBcpHpaynTm8Q23plXIKe/YdBbzRBH2GE1+OCs7IckghTtMh6QqMRWUh1eSwREXUdZyW4czqdWP3mO4oyzoP5BaixO5DSt4+iLxERtR9NoJSxcfVlla0iz4xp/AFeS3kD2aSGk7rI9iUdIkgmzhgVh15pF8EU7Q+ulDQtDu6C0Wq00OoN/lkyZfuTykCNgZk9I0z+xchdtf7ySY1WB31EJIzRcdAbIv3LNMiDO+mYWi30EZFISE73n7K2voxUyuRJpZcAEBXfA/fceTtiEnoDgVJOBJb7kAJTU7QFBqMJGmigj4hEbU2FInDzeb2Iiu8OQ4Q5sAyD/3hSyWZrHC4swIL758NmCyyc3gSbzYYF98/H4cICdVO7cjoceOzRR5C3S57pbB9LX3wBjz36CJyO9i1lPVxYgFkzbm33c+7Ia56zNhtLX3xBvbnVmnsvqe9rR90Loq6mXYK77/btr58kZe9+FB/7CYuXvIwlLy/HkaJiGI1G/PrmybDb7aLfV5u34dprJnCdOyKiDiSfGCQYjcafNWq9+uBJ04JASk5aKL1hYCgP5DT1x2pwDI1/nT31ZiCw16ANDTQV3Gl0emi1ev/YPNn+pEylVNKp1Rn8ZaSBLGJUXGJ9iWVgLJ98/J5WWl9PqxPBlfS61fdOL1v7UKc34De33FLfFmmCRqP1j9UTE+loA+fj/9frccPj8Y+xQ6D8NCIqFgnJadDrDaJUNNyqMktKjiPKbMbQocPUTS1is9nw2KOPKIKNOffci0cfexxGk3K8aEvl7dqpCIzS0gdg5eo3kTVipKJfWxUW5CMlJQVp6QPUTV1ee90Loq6uLb/RhYuHDVUsTi495t51B/qnpgAAjEYjZtx6S9A2IiLqGOosV0Ohhj7BaTTK4Kk1+5LGeKnXifOfumzfikBPJvBl0FfSxIQxak0Fd1qdDlqdTpFRAwBTVBy69clAXPe+gOzMfIHxdpFRcf7jazT+wFAb2EeARuOfnVP+ZH+mTpkhRJCFx5e9+i/oI4zQaDT+SVs0GmgCs2YCgYlhdHpotBpR1onAa0FgSQf/MWQBs0Yjykm7GimTc/ddf1AEYPv27UXGoEH80A9gx9fbMWr0GPXm857NZsPdd/2BmT8KC+0S3BERUeekzv40EDQTFjpF/NXKfXkDwYS0LIBEPnmKP0CCmCREqT4wUWtJNrHhfuv5s196aGSLfgcaYIpJEAGTdCx9YHISU6y/XFKr1QGa+olYJBqdDtD6n6MIkgOlmHLSDJhS5u/Nt95B99TBSB/+c3/ZaGCcnnQMnU4vgjeD0QyNVgtTTDfZ2ERZYCcdy9eyMXfSh+KpUyZh1oxbUVCgLPeTAq6pUyZh6pRJQcv2Cgr8JYhTp0xSBGZOhwP/WPS0ooQwb9dOxQdw+fGn33oLEhMT8dLLr8Ji8V93p8OB/IMHMWzYRWIfOWuzxfnMmnFrgxLFpS++INql8zlcWIB5996N/fv24o7ZM8XrkEoRpfOQl1FKrz1nbXaD6yB/DTlrs7Ho6SexaeOXmDplEvJ27QxaPik/b3UAm7M2G/9Y9LTiGOqSTpvNBqvVivQBGYrnBdundL5vv/UfcW+krGVTx4Dq+uWszVa0NXY8hPBegmrfH374gaJNXhYqXb+33/qP6K8O3KTS16lTJuGO2TNx9TUTQ878Bdu//H0gvQb5/VO/B+TfC+o2+bkGuxfqa0ck18xvfSIi6sqaW85AEwhcWk2jDeyl9SIDMzY2CHyk4AP+oMkfqDU8mvS1ejsQeF4TQVtL6aQgTkV9jG59BqDP4FHQBH7Nql+bRKutH4PnD+gCgZ5sQhXRt5EJT3T6CP/rFJOz1F8z/z/+stCkgVlISEoXSx4YIk3imNJzRJAaAqfDgSXPL8bV10zEmuwcLHv1NWzfthU1Nf4Mo9PhwKJnnsIlmZlYk52DNdk5QOBDuKSmphrbt23Fsldfw5rsHFx9zUQseX5xSNkT6fgzZ83GmuwcLF+xCgcO/KAIOBxOJ2rsdlgS/AFt3q6d2LN7N9548x2syc7BnHvuxeLnnhUflKVzk8535qzZWPL8YiQlJWPxCy9h6LCLsHzFKsy5515xDACwWCyYOWs2dny9XWyTykEnTrwOK15brrgOiYmJWPHacgDApMlTsODBhzHuqvFYk50TtBQzZ2224rznzb8fzzz1hOID/vff78f0GbdhTXYOFjz4MFatXKEMnspKEWU2w2T0L42h3qf0WuXX/tDBg1j26mt44813AADz7r27yWNs2vglRo0eI+7HF5+vF/ejqeM1916Snm+1WsXzzWYz9u/bK9rVamqqUVZaijXZOeL816//LxAIzl59ZRkeefQxrMnOwaJnn8MX6z9rEOg3Rb5/6b33yMMP4PEnn8Ga7BxMnnIz3lj9unh9TX0vNPX+kEj3Yk12DgYPHoK3/rNa0d6YR/76KB7566Ot/pq6njb8Ricios6u2cBNE3pmKyhZ2WNLsmRy0oyRrlrlB3opWwexb///yWebDHRU/isjZa7aS6PBcoNzECfeLLHsguxc/eWfytdZW1MBAP5ZOwHce28gwAgcU5rtUk2dvTXHd0ePfheKxd8FjX/MYKhKSo7Dbrdj7JVXAQCMJhOmz7gNUVH+4FEe3Eiuu/4GbNu6VQQEUVHRmD7jNpEtGXvlVbDb7SgpOS6e0xiH0wm73Y6EBP9sqRaLBYMHD8Hx4z+JPoUF+UhMTBSZvKwRIxXZmfQBGYiKioatrBTOQJbvuutvEM/PGjESc++bJ75uSvqADBQXF4sAQV4OOueeezFp8hTRd9ToMbBarSEHsXt278YNv7hRnHda+gCkpKQg96uNol/WiJFiLJ38dUnk5yO9Vvm1l8Yk7t+/TzxHOqbRZMIlmZnNHmPcVeNFcGqxWHD1NROx4+vtzR6vufdSsGswceJ1GCrLyKpFRUWLeymd/0/HjgGBQBeACPqTkpLRvUdPlJWVyfbQNPn+pffe1ddMFO+1YcMugt1uh8PpbPZ7IZT3h/y1B2snkjTzW5+IiLqy5oI7edamNYJm11pIGnNnCCz2Lcj3p/Efy3++ym6Sxl6HOrhpi2AZOP85BbJv4vWrg73GKfYpntYwcxdl8S8ppNX7+0+fPl3Rrl6bT6Lej05n8I/VkwJJxbmHXpJZVlYGsywTpFZWVoYdO77G9FtvEeVmC+6fr8jGBFNTUx3Sh2yT0Qiz2Sz62mw2FBcXK0ow1WPMnKrytztmz8TpUyeBQLB4xmoVfSUWiyWkUj2LxYKUlBTs27cXNpsN27ZuFeciLwGcOmUSFj39pPrpjVIHsa0hBVfS+TicThwtOooF988X5zT91luazIQlJ7dudnOr1Yry8vImj9fce6k9roGcFNRJQZ4UXMpLVluqT1//uNtgmvteaMv7g0it/X7jERFRp9NsYBNC8NGUBsFWK3ZnMJqhN0QiOjCtf3DyrGCQg2ga29624DU0GllUpm5p/tg6fWBBc1WJq5TRlEiZOY/bv3TEK6+8omgPFngCDe+xTq8cHygyf/oI6APr5YUiISFBZCaCSUhIwKhRo0UZnfSQj4kLJioqOqQP8UaTCXPvm4dVK1eIQO3Syy4TmSVbkDFm69f/F4mJieJclq9YhR49ewGBYLF7on/dwtYaNXoM9uzeje/374PZbEZSUjKcDgfeWP065txzrzjuggcfVj+1UeogtjWkTGhSUjIQ2Ge/1H5Y9OxzinuzppGy0LZITExEfHx8k8dr7r3UHtdAzmKx4A9/vBOPP/aoCLSm/vo3Tb4v26Kp7wWT0dim90dzHv/bY3j8b4+1+mvqepr5rU9ERF1Zc8Fdc5m9ZslLCRsLsEKQNCgLURb/hCGCet+BICTYa6ov22y4vcODOw2gCUyKorwW9V83RZyfuqvqvGsdVYAsGHzttdcU7Y1RrqsXmJ1Ttmvp+DpDBPSG4JmTYJKSkmE2mxWlgfv27RXZiKSkZNTY7YoyP/mkFwhk6fbJskW5X20UQZHRZILJbMZ/P/1EtMvHtDkdDqxY/grmzb9ffCiWl7apSzKDKSzIF5k7o8mEjEGDxDgpBMbotWTyivQBGbDb7Xjnrf/gkszMRjN+8tfRHKmk8JOP14nzOlxYgOLiYlHG2Bz1jKHSa5Vf2+bWlQvFgQM/iOfbbDZ88fl6jBo9ptnjNfdeCnYNSkqO4+iRw6J/S9hsNryx+nUsfuGlDgtq5UL5XpBryfuDSK3hb0giIgob8g/2HUJTn1HztTGQUme5/IFZ/VfSonLBA1JNkOhICpCC9W9PsolQxLUItLToejR9/aICs2XqDf6ZOEMm26d/whSNmOgFqF9rDwA0utCvlZQ5++Lz9aKcLP/gQTFOymgyYcEDD+GTj9eJ9j27d2PKzVPFB/SoqGjkHzwo2r/4fD3m3jdPBCG//d0MHDjwg2hXH99kNitK/abKZiFUl2QiME7LarWKvju+3o6sESNFQDdp8hQkJiaK8rlVK1fg8SefgcViESWDd8ye2WAWSInFYsGll12GmpoaUQJpDIwfk8/0mDFwEM6cOS0m+EhISEDerp0NZliUTJo8BZdkZorzWvzcs3jgob80GbhK1CWZkkmTp6BP377inB5/7FHMmDlL0aelunfvgUcefgBTZTNQSkFTU8dr7r0kPV9+b159ZZnIuraU/F5Kx5saZHbP9tLU9wKAZt8fRC2hqaysEsuVHjp0EMOHD1f2OMvcbjf0gfEEjQmlD3VdvL/hi/c2/Pi8Xvh8Xmh1ejiqq2CKjlF3CYmjqhzG6FhF4ObzeVFbUwVjdBzcdbXwej2IUC2XILFXlsIspvhXcrtqWx4QtYC9qgx6fQRctQ5ExfsDMI+rDs6aCugMEWKR9lA4qmwwxQT/wO6oKkPx/m3QR5qR/rPxeO211/D73/9e3a0Bn88Le0UpfD4f9BFG6AyRcNZUwBhtgcfjgavWAS+0cLvdqHU6YD1eiOjEvtDoO+6atYfDhQXIfn8N7p57nwgGnYFZCa++ZiK2bdmMKb+aGpYLdreEzWbDkucXY+5980IKBs8Hebt2YsfX2xWzntpsNjzz1BP4wx/vPO/fM9S8/in+EufOKPQ/0REREanJskL1pYktFyxhJc/k+ROEQTpJmmgLnulrP/4Mo7LUUZSQNnFewTR1ru46/yLveoO/LDOUwA6BfYpJU8QSC7Lzkh1To9G0aMbMc6msrAw1drtimzTxRllZKWrsdjHG7HyW+9XGZstTzzfyGVUl8hk05Vk09SPY2n5EnQkzd9Tp8P6GL97b8OT1eqDV6lBrr0GkOUrdHBJndQUio2IbBEPOmgoYo+LgcdXB5/M0OuFHUxkvr8fd+GQj7cBRZYM+wgiXswbmOP+EHF63C47qcugjjIg0h57NrK2pbLhEQYCr1o7D336J2MRk9M7IxMqVKzFrVmhldM7qcrhddYgwRkFriAhc7zh4PB64XS54vF643W646mphr64EdBGdPnOHwKLWmzZ+qdi24MGHO3T8FHV9UoZXPjtodHQ0Hnn0MWbtKCSdOXPH4I46Hd7f8MV7G558Pi80Gi2cDjuMpuBlk81xVlfAGBXbIANXa69EpDkWHrcLPq8HevVyCQHO6nIYo+PVm4HAUgvqoLE9OapsMESaUeeohjnOXxrq9bgD202ICCzSHoo6exUiGgkGPe46FOz6HMboeKQOuxxZWVnIy8tTdwuqzlGNOqcdEaYo6AyRcFSV1wd3brf41+VywVFTCZ9G3yWCOyKic6EzB3eN138QERGFoOEab62gaay0MlDeKPtvUEGf69eRgZ1fYKZO+WGk/2/psYPMBCqRZslsTRZSek59SWb9ecmvT8dfKyIi6kiN/xYhIiJqiTYEBo0FhmLZA/V6eirnMijRaAKzksrPQdTEtOy8mnsdWr1BTBwTakkmFAGhf8xd08dpqo2IiDozBndERNQumg4YmtHoU+UNjXZqciKSjudfDkIRoAauRUuvSWNBriR5YBa69fEvyj1nzhx1c6NEdlXafVOT3zTRREREndu5/G1IRERhpKWBjFxjQY1WCkI09evpBdVUW0fTNFxYXXo9Lb0m6v2oGWRLQbz99tuKtqbUn4d0Xk0dp2XnTEREnQcnVKFOh/c3fPHehjeXywWDoXXT6Nc5qoNOPOKuq4U+ItK/np7XA60++P5dtXYYIls3mUtb1dorEWGKgbO6AqYY/6Qu0tpykeZY6CNCn5jE46qDLrDUQTDuOqeYVKYlE6oAQLXtNEzR8dDo9Kh11EAfYYTH4xGTqUgPR00VvNAiLS1NvQuiDnP48GG+584iXu/w1dSf7oiIiELW0iyVQmPPlSecGusjy5SdC/517tRjAqWMo3xb85rL3DV1DZrjX4svhIxiU21ERNSpNfNbhIiI6GwIHlCEPBNnk2WGHUwEZPXnWB+TNnPeKk2XSwYmbgmYNm2aoq1ZstLWJoM7IiLqspr+LUJERHQWNBZriCBEo2ky8Dm3wUqQgKm1QVRz/WWZvfnz5yuamqORT/rS1LVsYUBKRESdR+M/3YmIiM6SRoMgRcAkb1Bq9PlngTTpS9BzCLatCUH3ISMPcLOzsxVtzdEwc0dEFPYY3BERUScQPNiQByFNZe5aGkS1r8B5qcfLadr/vOTX46mnnlK0Na9+fbsmx/a18pz/Mk4jHkREdG408dOdiIjo7Ggsk9RkQCcTar8OETh1dTmjf6KVc3heKvKArjOdFxERtR/+dCcionNOrMnTWo0Eh2eDCJTU56Cpz5R1hClTpqg3NSnUgK61p/zEJp94UHipratD3re7sX3HLpw+fUbdfFYtXLgQWVlZWLhwobqpy7Db7bjzzjuRlZWF1atXq5uJ2iS0n/REREQdKNTAozEdGUQ1R5Q6ql6D+uv29tBDD6k3NUmrC/V8zt21BICq0hNY/NsBosRz89t/V3dp4INnZor+/543AXWOakV7yaFv8OSNCaLPj9vWNdkuP6b6fOQP+X42v/33RtskzZ2n5Mdt6/CXcRp88MxMdRMg208o16atKioq8da77yN3y3Zs37ELb777PnbmfavudlYtXrxYBHcHDhzA+PHjkZWVpXjk5uaqn9au1MdVB2lWqxWTJ08W7XfeeSfsdjsAwGw2Y9myZZg7d67iOUTtIdSf9ERERB3m3IYTbVOfuVNtb2pcWzv49NNP1Zua1sHBpjywaa2q0hP41z1X4Pq7/4knNvmw4IMS7Fq3PGigJJECIClrGNejL9a9cLdoLzn0Dd5Z+GvMeu4LPLHJh7uW5+HTl+5DyaFvgrb/9b9VyN+1XhE4xfdKxYIPSsQxFnxQgqSBwxGbmAQEArtd65aLPnctz8MHT9+mOO/mzlNS56jGt/9diXHTH1Y3Aar9XDHtz+rmdlVX50J2zjrYbOW44rIxuH3WDPRNTsKWbV/jwMFD6u7wer34fMNGFP90XN3UoZKTk/HZZ58hLy8PeXl5eOONN/Dcc8/hwIED6q7t4sCBA3jggQewdOlS5OXlITc3F9u3bxcBntVqxe9//3tMmTJFnFPPnj0xf/58EeARdZSO/UlPREQUinOYeWuzc5S5++tf/6re1CStVqfeFNS5zIIeP5iH+F6pSMu8CgAQ0603rr/7n9j2/vNBs1xVpSdw6vB+jJlyj9h2ze1P49Th/SJ4K/xmA1IvugJJA4cDAJIGDseIG/+A7dkvBm2PMEXj0l/dh/xd64MeEwD2fP4GjNFxSOw7CABwxbQ/Y95bBYjp1hsAkNh3EHpnZIr+oZynZMfalxEZFYs+g0cptiMQRFacPoYb731J3dQhNuZuhq28AmNGjcCI4ZmIiYnGtRN/Dp1Oi7xv96i748eD+dj//YFzXrqZmpqKpKQknDnTMeexa9cuZGZmYvDgwUAgEzdt2jRs374ddrsdiYmJWLt2LWbMmCGeM378eNkeiDpOx/7mISIiCsU5DCjaqr4sUzWhSojB1NkSeiaxdfeiPcbcnSk6gLgefRFhihbbYhOT4KyuQK29StEXAKpKSwAAMd38GTQAiDTHwBgdh0qrv+100Q/okTpEtANA99TBqDh9rNHgDQDKTxah1l6FmG698X+LN4jArc5Rjfxd63Hpr+5TnKfc4d0bUX6yCMmDsoAQzxOBLOK+je/hmtufFtskVaUnsGvd8iaP255KTpzE9z/8iNSUvhgzaoTYHh0VhX6pqThzxoqKykrFc3Z9uxsAkNYvVbH9XDl69Kh6U4c6ceJE0Myc3W7H22+/jTFjxsBsNqubidpVqD/piYiIOox6psmuSJ2p04YcTLXO9ddfr97UJPX5Neoc3orTRT+oNzWp0loCZ3W5erNQ56hGxelj6s0K6cMnoGjvZpFBq3NUY9v7z6u7CdZjB+GsrhCBm6TOUY1/z5uAv4zT4PvcDxSZvObOU7I9+0UMu+rX4nlyUlbzpwM7Qhq311abcrcAAMZePkbdhIwBaQCAH2WlmQWFR1BaWoa0/v2QkGCR9T77ioqKUFJSghEj6oPS9jRixAjs3r1blH1KwZtabm4usrKy8Itf/AJz585VZPKIOkqIP+mJiIg6UBfO3AnqzF2owVQr/e1vf1NvahfqDGSoWjLmTj1JSWMTh5wNSQOH4/q7n8fLd2ThL+M0ePbXKTBGxSG+VyoizTHq7o0GYBGmaPzf4g14YpMPF469GU/emNCg5LIpP25bh4rTxzBq8l3qJiCQ1Tz87ZeINMeKsYEAgo7bayu3242Tp05j6IWD0T0xUd2Mfqkp0Om0+OHAIfh8PtjKy7Hxq82IiDBgwrix6u4d7vjx47j22mvF5CXTp0/H/PnzRdlkW8gnRZEmcRk8eDDmz5+P6dOni+AtOjoavXv3VmTmxo4di7y8PHz88cdYsmRJl57hk7qOjv3NQ0REFILWBhSdifo1hF4G2TpffvmlelM76fh7EdOtN+a9VSDKOG9+YBUANCifbE5sYhKM0fHqzUKEKRpxPfqqNzdwwaU3inO5/71iOGsqgpY/SmPn0odPUGxXu+DSG3HBZb9E4TcbgBDOU8oWBjumXOa1t4lJVKSxgUV7t6Cq9IS6a5vo9XrcestUXD1+nLoJAGA2mZD1s0zYysvx5jtr8PZ72aiqrsbV48chJqbx8+8o6glV8vLyMHZs+wSZa9euFfuUB2dS4CYFb9XV1Zg2bVrQskuz2Yy5c+eisLAQVqtV3UzUrjr2Nw8REdF5qqPH3P35zx07U2JLtceYu2Bj4SqtJTBGxwXNoklj2KQxbQBQa6+Cs7pCzGTZI3VIg3LPYGP7JDvWvgwAYlIXuT2fv4GeaUPF5Cuhau48rccO4kT+bvznoV+KbOZ/Hvoldn/2usgAdk9texaqJXr26N7gDxZyl44eiSGDB+H0GSt8Xi/GXj4GgwZmqLudF95//30AQFaWslSX6FxgcEdERNQBmvpgTMElD8pC+ckiHN69EQhkyj596U8ioyWNa5PKOGO69UbPtKFi5ksA+PxfDyoCMPWYupJD32DXulcVM1dKfty2DrlvPYNr//j3BoGfNJHKhWNvbrBdfk4IHKNo72aR4WvuPJMGDsfD68oUAfLvnvoImdfehofXlSFp4HAkD8pSzK4pZftG3HhHgxLRs0Gj0eDaqydg3ty7MOePv0fWz+pnB+1MpAXD1eviSevgrV69GuPHj2/1sgm5ublYtWoV5s6dK7J2ubm5Dfb57rvvIj09HYlBylyJ2hODOyIioi6os02t3pIxd42J6dYbt7+4GZ++9Cf8ZZwGi25Owogb78AFl94IBLJd5SeLFNk9qaRTOrZ6qYCkgcNxy8L3sHL+1fjLOA1eviML19/9vCL7Jo0B/ODp2zDruS+CZuZ2rH1ZMQOmRBprB9k5rJx/NW5Z+J5iP82dZ3NiuvXG5P/3qngdf7suBnE9+nb4Wnddnd1ux4kTJ3Dq1CnY7XacOXMGlZWVbZ5JU1qkfOHChVi6dKlifN/YsWOxdOlSzJkzR4zXA8Axd3RWaCorq0T9xKFDBzF8eMMfaGeT2+2GXq9Xb1YIpQ91Xby/4Yv3Nrzx/nZuPp//173P54PX64XP54PH44HH44Hb7RYPl8uFuro6pKX5Z0QMlTyoa0tpJp2fDh8+HNJ7buHChRg/fny7jak7l6RFz8/FLJqhXm/qepi5IyIi6oK2bt2q3nROtceYO6JQzJs3r0tnwaRS0SVLlqibiNqMmTvqdHh/wxfvbXjj/T27srKykJeXp97cqI7O3BG1BTNJZxevd/hi5o6IiIiIiCgMMLgjIiLqgi677DL1JiIiOs8xuCMiIuqCXnjhBfUmIiI6zzG4IyIi6oJaMt6OiIjODwzuiIiIuqA//vGP6k1ERHSe42yZ1Onw/oYv3tvwxvt7dnX0bJkafaR6F0REBKB/SrJ6U6fBzB0REVEXlJWVpd5ERETnOQZ3REREXdArr7yi3kREROc5BndERERd0L59+9SbiIjoPMfgjoiIqAuaNWuWehMREZ3nGNwRERERERGFAQZ3REREXdCwYcPUm4iI6DzH4I6IiKgLWrlypXoTERGd5xjcERERdUH5+fnqTefc4cICLLh/Pmw2m7qpAZvNhgX3z8fhwgJ1U7tyOhx47NFHkLdrp7qpzZa++AIee/QROB0OdVObHC4swKwZt7b7Oauvec7abEydMink15C3ayemTpmEWTNu7fD7RkSt027B3Xf79mPxkpfFY8OmXHUXbNiUK9qX/WslbOXl6i5EREQUgmnTpqk3URAlJccRZTZj6NC2lbHabDY89ugjisB1zj334tHHHofRZFL0bam8XTux9MUXxNdp6QOwcvWbyBoxUtGvrQoL8pGSkoK09AFwOhzIP3gQi559LqTXsPTFF7D0xRew4MGH0aNnL3UzEXUS7RLcHSkqxravd+GmSb/AvLl3YdaM3+JQfiG+27df9NmwKRfHj5fgrjv+D/Pm3oWBGelY98lncDqdin0RERERtZSUobv7rj8oArB9+/YiY9CgZoOX88GOr7dj1Ogx6s3Nytu1EwcO/IDFL7yE9AEZ6mYi6kQ0lZVVPumLQ4cOYvjw4coeIfhu336UlpZh/LixYtv7H34EAPjVTb+E0+nEex+sxcUXDcXFw4YCQNBtAOByuaHRiC+D8np90Gqb6URdFu9v+OK9DW+8v2fX7373O/znP/9Rb26Uz+f/de/z+eD1esW/Ho8HbrcbbrcbHo8HLpcLbrcbGn2kehcN2Gw2PPLwAzh18iSio6Mx7dbp2PC/L/DAQ3+BxWKB0+HAomeewv59ewEA464ajzn33Cue+8xTT2DCz6/G22++gerqavTs1QuPP/mMeO5LS57HlF9NRVr6ACAQZHzy8ToseOAhGE0mxfHV+0cg4FPvI2dtNv6z+nUAQHR0NB559DHRhkCGatPGLwFAnI+trBSPP/YoqqurAdlxctZm46djx/Db383AIw8/gJmzZotsm/TaL8nMxMSJ1ymuw9BhF4nXID8fAFjw4MNIH5CBZ556An/4451Bz1t+naS2/IMHUWO3i2MsePBhRebPZrNhyfOLMfe+eQCguG7S+Ticziavp7Qf9bmFQn2O0rXft2+veF3q4+Xt2olFTz8JBLlXTd176RwvycxE9vtrANU1J2qr/inJ6k2dRrtk7i4eNlQR2EkslngAgMPpRJ3LhdjYWNFmNBphNpthLS2TPQPQaAC9Xt/kQ6vVNNjGR/g8eH/D98F7G94P3t+z+3jnnXcabGvpQ6fTKR5arRZabWgfDZwOB5Y8vxhXXzMRa7JzsOzV17B921bU1PgDIHlwsyY7B2uyc4DAh3xJTU01tm/bimWvvoY12Tm4+pqJWPL84pDGf0nHnzlrNtZk52D5ilU4cOAHxTg1h9OJGrsdloRuQCBY2LN7N9548x2syc7BnHvuxeLnnhWZPuncpPOdOWs2ljy/GElJyVj8wksYOuwiLF+xqkHAY7FYMHPWbOz4ervYJpWDTpx4HVa8tlxxHRITE7HiteUAgEmTp2DBgw9j3FXjsSY7J2gpZs7abMV5z5t/P5556glFhvL77/dj+ozbsCY7BwsefBirVq5QtNvKShFlNsNkNMJiseAfzz2PUaNGi7JMAIrrGex+tZX8HCdPuRkL7p8PBK63+v7l7dqJVStXYPmKVViTnYNHHn0Mr76yDDabrcG9f+PNd2C1Whu8t8pKS0U7AKxf/1/R3pRH/vpoq/+f6FwL7Sd4Cx0pKsYZaynS+vcDAJRXVMLlcqu7ERERUSsVFxerN51VJSXHYbfbMfbKqwAARpMJ02fchqioaNEuBTeS666/Adu2bhVBR1RUNKbPuE1kU8ZeeRXsdjtKSo6L5zTG4XTCbrcjISEBCARYgwcPwfHjP4k+hQX5SExMFBmurBEjFePL0gdkICoqGrayUjEG7brrbxDPzxoxUmS6mpM+IAPFxcViohF5Oeice+7FpMlTRN9Ro8fAarWGHMTu2b0bN/ziRnHeaekDkJKSgtyvNop+WSNGiqyW/HVJmitPDTY2cc4992LkyNGKfm0hP8dhwy5CWvoA8f5R378dX2/HzFmzxb2Tv2ajyYRHH3tcBMJGkwmXZGbip2PHxLGioqLFvQzWThSuWhTc2crLsexfK8WkKFLppZzT6cTmLdswMCMd/VNT1M1ERETUDqZMqQ8WzoWysjKYA5mgYMrKyrBjx9eYfustmDplEqZOmYQF988Xmb3G1NRUo6xMWdUTjClQAST1tdlsKC4uxrBhF4k+6jFm0rg86XzumD0Tp0/5y/ocTifOWK2ir8RisTQaEMlZLBakpKRg3769sNls2LZ1qzgXafZL6bhSqWEo1EFsa0iBq/zaqJWVlaHGbldvRu+kJPWmdmFJ6Aaz2azeDATO12q1YtHTT4prNnXKJFEuC9nMndJDXtpKdD5rUXBniY/HnbfPwry5d2He3Lvwq5t+qWi3lZdj5RtvIzk5CRNkZZrxcbEwGPSKvkRERNR1JSQkwG63w9HIxGgJCQkYNWq0KCWUHi+9/KrIxgQTFRUdUiBjNJkw9755WLVyhQjULr3sMsWYLKvVqpgAZP36/yIxMVGcy/IVq8TMjyajEd0TE0Xf1hg1egz27N6N7/fvg9lsRlJSMpwOB95Y/Trm3HOvOO6CBx9WP7VR6iC2NaRMaFJS4+OEEhISENVIsHW2GU0mJCYmYsGDDyveO2uyczBp8hTYbDasee9dLHr2ObH9dzNuU++m1R7/22Ot/n+ic61FwV1TbOXleGfNhxiYka4I7BD4wRRhMKCyslJscwb+EpXYrfkf4ERERKSUknJuq2OSkpJhNpsVpYH79u0VmbmkpGT/5Bn794n2nLXZiin/a2qqsS8wAQgA5H61UQRFRpMJJrMZ//30E9EuH9PmdDiwYvkrmDf/fsUHf4m6JDOYwoJ8kbkzmkzIGDQIb6x+XZRL5u3a2WD2zaakD8iA3W7HO2/9B5dkZjaa8ZO/juZIJYWffLxOnNfhwgIUFxeLksbmNFeSCdn9ko9Lk5Y/OBdGjR6jeM22JtZFlEpXiaidgjspsLt09IgGgR0Ck6ckJyfhu737xdIHW7/2D5gdlBH6TEtERETkl53dfhNdtIaUOfvi8/WiNC7/4EEx5s5oMmHBAw/hk4/XifY9u3djys1TxQf2qKho5B88KNq/+Hw95t43TwQhv/3dDBw48INoVx/fZDZjwf3zFeV5UjCiLskEgIkTr4PVahV9d3y9HVkjRoqAbtLkKUhMTBSlpKtWrhCzUkrlp3fMntnoJCMWiwWXXnYZampqRAmkMTAWcemLL4jjZgwchDNnTotAKiEhAXm7dja6OPikyVNwSWamOK/Fzz0rZiRtTiglmWjkfiIw7k5eznrH7JlisXr59W5vWSNG4oZf3Che8x2zZ+LGX06CJaEbLBYLpv76N+Ic7vzD7zHm0suQt2tnuy/8TtTVtMtSCBs25eK7vfVr2klMJhNumXoTLPH+WTPl/dRtErfbDb2+6RLOUPpQ18X7G754b8Mb7+/Zdfr0afTo0UO9uVHBlkLweDyKpRDcbjdcLhfq6upCWgrhXDpcWIDs99fg7rn3iWDQGZih8+prJmLbls2KJRDOV/IlEEIJBomoeZ15KYR2Ce7aUygfDkLpQ10X72/44r0Nb7y/Z1dWVhby8vLUmxsVbsGdes07yNY3u2LsWHyTl8d1zQKlsD8dO9Zg+Yb2pF5zTk69Jh9ROGBw1wKhfDgIpQ91Xby/4Yv3Nrzx/p5d53twB9WC4xL14t1ERO2NwV0LhPLhIJQ+1HXx/oYv3tvwxvt7dl1//fX49NNP1ZsbFY7BHRHRudCZg7t2mVCFiIiIzq6WBHZERHR+YHBHRETUBVVUVKg3ERHReY7BHRERURc0YcIE9SYiIjrPMbgjIiIiIiIKA5xQhTod3t/wxXsb3nh/z64JEyZgw4YN6s2NaumEKmlpaepdEHWYw4cP8z13FvF6hy9m7oiIiLqglgR2RER0fmBwR0RE1AXV1taqNxER0XmOwR0REVEXdNlll6k3ERHReY7BHRERERERURhgcEdERNQFRUZGqjedUyWHvhEPIiI6NxjcERERdUFbt25VbzqnkgYOFw8iIjo3GNwRERERUaNq6+qQ9+1ubN+xC6dPn1E3n1ULFy5EVlYWFi5cqG7qMux2O+68805kZWVh9erV6maiNmFwR0RE1AVlZWWpN51T7VWWWeeoxr/nTcBfxmnwwTMz1c0KHzwzE38Zp8GP29apmxQ2v/13/GWcRjzk+5UfT/7Y/PbfRZ8ft61r0L74twNQVXoi5H10VRUVlXjr3feRu2U7tu/YhTfffR87875VdzurFi9eLIK7AwcOYPz48cjKylI8cnNz1U9rV+rjqoM0q9WKyZMni/Y777wTdrsdAGA2m7Fs2TLMnTtX8Ryi9sDgjoiIiNqsPcoySw59g2d/nYK4Hn0x8Q+L1M0KJYe+QW1NJYZccZO6SeHHbeuwa91yLPigBE9s8uGv/61CxeljisDLGBWHu5bn4YlNPvG4YtqfFfvJvPY2Rfu8twoQ0613i/bR1dTVuZCdsw42WzmuuGwMbp81A32Tk7Bl29c4cPCQuju8Xi8+37ARxT8dVzd1qOTkZHz22WfIy8tDXl4e3njjDTz33HM4cOCAumu7OHDgAB544AEsXboUeXl5yM3Nxfbt20WAZ7Va8fvf/x5TpkwR59SzZ0/Mnz9fBHhEHYXBHREREZ1zdY5qfPbKnzH2tw/g5gdWqZsVpL4/u24WIqNi1c0KZ4oOIPWiy0UgFmGKRsaIiThd9IO6K6lszN0MW3kFxowagRHDMxETE41rJ/4cOp0Wed/uUXfHjwfzsf/7A+e8dDM1NRVJSUk4c6ZjzmPXrl3IzMzE4MGDgUAmbtq0adi+fTvsdjsSExOxdu1azJgxQzxn/Pjxsj0QdRwGd0RERF1QXl6eetM51dayzAhTNP5v8YaQsl071r6MuB59ccGlN6qbGkgfPgGnDu9XlFDm71qPC8feDACotVfBWVOBmG5JqmfWO1N0AD1Sh6g3C6Hso6spOXES3//wI1JT+mLMqBFie3RUFPqlpuLMGSsqKisVz9n17W4AQFq/VMX2c+Xo0aPqTR3qxIkTQTNzdrsdb7/9NsaMGQOz2axuJmpXDO6IiIiozdqjLDMUVaUnsG/jexgz5R51U1BJA4dj+tPr8K97rsBfxmnw7K9TcO0f/94gMPzqzafFWLl/z5uAOke1or3izE9Y/NsBok+wcX7N7aMr2ZS7BQAw9vIx6iZkDEgDAPwoK80sKDyC0tIypPXvh4QEi6z32VdUVISSkhKMGFEflLanESNGYPfu3aLsUwre1HJzc5GVlYVf/OIXmDt3riKTR9RRGNwRERHRWVVVekIRKDU3cYrc5/96EMOu+nXIQeSP2/yB3e0vbsYTm3y4/71ifPbKnxVj7spPFiGuex8xJg8A1r1wt2wvwKGvPxX7+N1TH+GDp29TZClD2UdX4Xa7cfLUaQy9cDC6Jyaqm9EvNQU6nRY/HDgEn88HW3k5Nn61GRERBkwYN1bdvcMdP34c1157rZi8ZPr06Zg/f74om2wL+aQo0iQugwcPxvz58zF9+nQRvEVHR6N3796KzNzYsWORl5eHjz/+GEuWLOnSM3xS18HgjoiIiNqsJWWZMd16Y95bBWLikebG2ElKDn2DU4f345JrpqubgqpzVGPb+89jxI13KMbcXfqr+5C/a73IrN29Yo8oB5Xai/ZuEaWcgy+frJhAJS3zKvTOyEThNxvEsZrbR1ei1+tx6y1TcfX4ceomAIDZZELWzzJhKy/Hm++swdvvZaOquhpXjx+HmJhodfcOp55QJS8vD2PHtk+QuXbtWrFPeXAmBW5S8FZdXY1p06YFLbs0m82YO3cuCgsLYbVa1c1E7YrBHREREbXZ2SjLLPxmA0oOfYNFNyeJrN/uz17Hfx76ZYvLIMtPFqHWXoWYbr0RYWoYkMT3SkWkOQYAkNh3oLoZANA91Z8ZCmUfXU3PHt2h0WjUm4VLR4/EkMGDcPqMFT6vF2MvH4NBAzPU3c4L77//PtAJlyeh8xODOyIiIuoSrpj2Z8VSA09s8iHz2tvwu6c+wv8t3oAIUzR+3LYOT96YgJJD34iZMXetW66YUGXb+8+LGTTl/eXtGSMmIsIULdawk5eOHt69EeUni5A8yP9hvrl9hCONRoNrr56AeXPvwpw//h5ZP8tUd+kUpAXD1eviSevgrV69GuPHj2/1sgm5ublYtWoV5s6dK7J2ubm5Dfb57rvvIj09HYlBylyJ2hODOyIiImqzlpRlNka+WPj6Vxdg92ev4y/jNIrAqTlnig7AUWVDpbUECASEI268Q2T7/nZdDOJ69BWloBdceiNufvB1vHxHlqJdXmL5uydzUHH6WP0Ywadvwy0L3xVlms3tg84du92OEydO4NSpU7Db7Thz5gwqKyvbPJOmtEj5woULsXTpUsX4vrFjx2Lp0qWYM2eOGK8HgGPu6KzQVFZW+aQvDh06iOHDO66cIhRutxt6vV69WSGUPtR18f6GL97b8Mb727n5fP5f9z6fD16vFz6fDx6PBx6PB263WzxcLhfq6uqQluafEZHobDh8+HBI77mFCxdi/Pjx7Tam7lySFj0/F7Nohnq9qeth5o6IiIiIuox58+Z16SyYVCq6ZMkSdRNRmzFzR50O72/44r0Nb7y/nVtHZ+7kZZMdOakKhSdmks4uXu/wxcwdERERtdnZmC2TiIiaxuCOiIiIiIgoDDC4IyIiIiIiCgMM7oiIiIiIiMIAgzsiIiIiIqIw0Plmy/R4gMCMXo3x+QCNRr2VwgXvb/jivQ1vvL+dW7DZMr1er2K2TI/HA5fLBbfbDY0+Ur0LIiIC0D8lWb2p0+h0wR0RERG1v2DBXVNLITC4IyIKrjMHdyzLJCIiIiIiCgMM7oiIiIiIiMIAgzsiIiIiIqIwwOCOiIiIiIgoDDC4IyIiIiIiCgMM7oiIiIiIiMJAl1kKwVZejnfWfAiHw6HYPuGqsbh42FDFNjp7vtu3Hxs25oqvL75oKCaMG6voo/b+hx+h+NhPAIDEbgn49c2TYTQaRfuGTbn4bu9+AIDJZMItU2+CJT4eAOB0OvHeB2thLS0T/SE7rq28HB+sXYcJV12J/qkpij7UetI9a+777UhRMdZ98hncbjcQ5PtT/X2sfr+o309QvQc2bMqFzVaOX930S0Ufaj3p3qrvRTD83u0agl3rCVeNxUVDLwQaWQrhp5IT2L37O+h0WhgMBlw0dEirlkI4XFiAV19Zhgce+gssFou6WcFms+GZp57AH/54J9LSB6ib243T4cCiZ57CDb+4EVkjRqqb22Tpiy/AarViwQMPwWgyqZtb7XBhAR5/7FHMuefedj1n+TUvKyvDJx+va/dzJzofdOalEHQPPvjQQumL0tJSJCUlKXt0Ek6nE0eOFuE3U2/C+CuvwJhRIzBm1Aj06tlD3ZXOkiNFxdj41RbceMO1uH7iz3HBoAx8tXkbIiIMjd6X9z/8CAAwe+bvMGbUCOQXHsahgkIMGTwICHw4PH68BDOnT8MVl41BZVUV8r7ZjUEDB0Cv18PtduNQQSGunnAVrp/4c/E+SOuXCgTeJwd+PIS0/v1giY9THJta50hRMYqLjyEmOhrduyc2em+PFBXjs8834IbrrsH1E3+OXr16YuNXW5CY2A2W+DgR2F06egRu+uUNQd8vp06fhsfjEe+PMaNGYMTwTJgCAcSRo0VwOp3i/UJtI/9+lL6HGsPv3a5j7bpPYTAYxL3q1asnNuVuRWK3BMQHrq18zbvjJ04g75s9yLzkIlwwMAMmkxEGvR4arV615+bZbGX4Ji8Pl18xFqZmAgan04ktm3ORlTUCloQEdXO7KS4uQmF+Pn5x4yToDQZ1c8hsNhv+/sxTuPiSTPHaRo4ajXFXjW/TfgEgb9dOfJj9PkaOGg0AsCQkYPJNNyMpuX0/QH6/fx+qq6ow8drrUVJyHPmHDuHyy69o9vzV50d0vrPExao3dRosy6RWq6ysxKCMdPFXdkt8PLondkN+wWF1VyCQtSmvqMDwn10itk24aizKKypwpKgYTqcTx4+X4OKLhopswGWj/X+xPJhfIJ5DZ4/T6cTmLdswZPAgGJr55X/4yFF0T+wm3g/9U1MwMCMd33y7BwBQfOwnRJlNGJTh/wu9JT4el44ege/27ofT6VTsizrehk25sNvt+MV116ibGuD3btfhdDpht9uRMSBNbOvdsweizCZUVlUp+kqOl5xAXGwMuid2AwDExXbeDy1NcToceOzRR3D3XX+AzWYT2/ft24uMQYOYnQKw4+vtGDV6jHozEYWRLhPclVdUwmAwiL/g07l38bChGB+kjMti8ZdhqZVXVAIA4mV/7TAZjYgwGFBZWQmH04k6lwuxsg8WRqMRZrNZlBc5nE64XC7FPppypKgYS15eLrIO1DJbv94Js9ncZCmmxGYrb3DvE7slwG63w+l0wlpaBrPZrCjji42NRZ3LBUcguLOWljXYR1Pe//AjLHl5OY4UFaubqAm28nIcyi9UBGNN4fdu12E0GpGcnKT4I9uJU6dR53Khb5/gWaCqympER0erN4fEZrPh7rv+gKlTJmHWjFtRUKAM5qWAa+qUSZg6ZRKWvviCoh0ACgoKMGvGrZg6ZZIiMHM6HPjHoqdxuLB+n3m7duKxRx+BM1DaLT/+9FtvQWJiIl56+VVREup0OJB/8CCGDbtI7CNnbbY4n1kzblXsH4FSS6ldOp/DhQWYd+/d2L9vL+6YPVO8jpy12Vj64gviPPJ27RT7kV57ztrsBtdB/hpy1mZj0dNPYtPGLzF1yiTk7doJm82GBffPV5yb/LzVAWzO2mz8Y9HTimPIzwWBa2W1WpE+IEOxXXK4sP4+TJ0yCTlrs4FGzk96PW+/9R/xnMcefQQ2m63Jc2hMa/fX1DXJ27VTtKmfK72P/rHo6Qavl6ir6zLBneTDjz7B4iUvY/GSl/lLv5M5UlSMM9ZSpPXvp24CApk+l8s/FiuY8oqm2+U2b90u3ger33wnaOZHGv915RWXcoxWK9jKy3HkaJEiW9MYKVvQFJutXL0pqJqaGiz710pxf7/b5x/Dpfb+hx/Bbrfjjv+bwTFaLSRlUU+cONXs9xH4vdvlTBg3FhkD0sR13rxlG279za/E+Ee52traoPcgFE6HA0ueX4yrr5mINdk5WPbqa9i+bStqaqpF+6JnnsIlmZlYk52DNdk5QOADuaSmphrbt23Fsldfw5rsHFx9zUQseX6xCHyaIh1/5qzZWJOdg+UrVuHAgR8UH+IdTidq7HZYEvxZybxdO7Fn92688eY7WJOdgzn33IvFzz0rggLp3KTznTlrNpY8vxhJSclY/MJLGDrsIixfsQpz7rlXHAMALBYLZs6ajR1fbxfbSkqOI8psxsSJ12HFa8sV1yExMRErXlsOAJg0eQoWPPgwxl01Hmuyc4KOsctZm60473nz78czTz2hCGa+/34/ps+4DWuyc7DgwYexauUKRbutrBRRZnPQP5I7HQ68sfp1zLnnXqzJzsEbb76D/IMHcbiwoMnzO3TwIJa9+hreePMdAMC8e+9u8hya05L9qa+JdK+cDgcOFxZgzXvvYvmKVViTnYNFzz6HVf9+TREs79+3F+PGTxDtX6z/rEGgH8wjf31U8VBrrp2oo3Wp4O6MtRRDBg/CvLl3YdaM3+KMtRQbNiknX6BzQyrfGygr0+woNXYHoqKiMG/uXbjrjv8DAHz8388VfSorK/HZ5xtw5RWXhpR1ooY2bMxF/36pHX4/1Y6XnMQtU2/CvLl3YcJVY/HV5m0NMnNSSaF6Qg8KjbW0DNbSMhgiDE1+H7U3fu+eHe9/+BHyCw5j3ty7MG/uXbji8kux6j/v4Gg7Z7hLSo7Dbrdj7JVXAQCMJhOmz7gNUVH+LKA8uJFcd/0N2LZ1q/iAHhUVjekzbhMlk2OvvAp2ux0lJcfFcxrjCPxRKSEwXs9isWDw4CE4ftw/6Q8AFBbkIzExUWTyskaMxKOPPS6Olz4gA1FR0bCVlYos33XX3yCenzViJObeN0983ZT0ARkoLi4WAYK8HHTOPfdi0uQpou+o0WNgtVpDDmL37N6NG35xozjvtPQBSElJQe5XG0W/rBEjxcQ08tclCaU8Vbp2RpMJ/2/Bg81OdCOdk9FkwiWZmc2eQ3NC3Z90r+TvnaFDhwEA9u/fh7T0AVj07HPiviclJaN7j54oK6ufZGjosIvEc4K1E3VVXSa4i4+LxR3/N0P8srfEx2NgRjqOHy9p9V8dqX3Yysux8o23kZyc1ORse7GxsTAYGh+gHx/XdLtk1vRp4jhGoxEXXzQUZ6ylsJX7M0Mulxtfbd4GBD7EUssdKSpGeUUFfnZJfSlTU6QSvKaEUm455IJBuPP2WSLDMChjAOLjYnH4yFHRp/jYTziUX6go56SWS+nbp8nvIzl+73YdUgWFPOPePzUF3RO74fDRIkVfAIiMjGz1H0jKyvyl1sEyQQi079jxNabfeosofVtw/3yR2WtMTU11SB+yTYGfO1Jfm82G4uJiRQmmeoyZujzyjtkzcfrUSSAQLJ6xWkVficViaTIgklgsFqSkpGDfvr2w2WzYtnWrOBd1yeOip59UP71R6iC2NaRgSH5t5IwmE+beNw9ffL5enGOwEtqmJCf3UW9qk6b253A6cbToKBbcP1+c7/Rbb8H+fXtFH3l5rbqNKJx1meDOEh8f9BeQegwPnV3SDIgDM9KbDOwgG68jjd9B4Ae0NFZHPoZHIpX7JXYL/GW2kfdBlNkkPmAYDHrceMO1uGXqTThytKhB1oead/jIUVRWVmHl6rdEaVfxsZ+wYWNuo6V0Fkt8g9JL+Tg7+fg7SWVlJSJkY2l79+ope3Y96f4jEJTcefss9O+X2mDZBAqN/HqGgt+74UH9/SmJiY1GdXXTAVcwCQn+7+nG/siSkJCAUaNGi7I56SEfExdMVFR0SIGMFJCsWrlCBGqXXnaZyPQEG2O2fv1/kZiYKM5l+YpV6NGzFxAIFrsnJoq+rTFq9Bjs2b0b3+/fB7PZjKSk5AYlj1KJYajUQWxrSJnQpKTg4y4RCE5fevlVUZZptVobjHHrLExGI/ql9sOiZ59TvLfWBMpG83bthNVqFe+9N958B0MbCWxb6vG/PaZ4qDXXTtTRukxwt2FTLpb9a6X4C680IYB8RjA6u+RT2wcL7Gzl5Vj2r5WidNYSH4/4uDgxeyICpX/xcXHon5oiJgKQz5649Wv/LxZphsXv9u1XTKDhdDrx3d79SE5OavDB0RIfjxHDM/HZ5xuCZiOocRPGjRUlXdIjpW8fTLhqLGbceguMRmOD78m0/v1wxloq7s2RomIcyi8UGYSUvn1QY3eI2RNt5eXY9vUuMamH0+nE6jffUYylPZhfgBq7Ayl9G/4F97LRI2G321ma3QopffuImS4h+z4amJEOS3w8v3e7MGlmTPm9OlJUjJITJ8Xvy02bt+Lfq99CRSAYT07qjYrKKpyx+svnpO3NSUpKhtlsVpQG7tu3V2TmkpKSUWO3Y//+faJdmoBEUlNTjX2yjEruVxtFUGQ0mWAym/HfTz8R7fIxbU6HAyuWv4J58+8XH+zlpY/qksxgCgvyRebOaDIhY9AgvLH6dVEumbdrZ4OJOpqSPiADdrsd77z1H1ySmdloxk/+OpojlSh+8vE6cV6HCwtQXFwsSmKb01xJppRZ7KzBnJp0r+TvjcOFBVhw//yg96qk5DiOHgk+kzdRuOkywd2EcWMxMCNdZBJWrn4LAzPSOSbjHPp2z144HA5s2JgrsjuLl7wsPvD7J1lwKf5aLE2OIPVVT8U+YdxYJCcn4eXl/8biJS/jUH4hbrzhWvHh7+JhQ3HlFZfiw5yPsXjJy3h5+b9hNpuDBpcI9O+e2A3vrPmQHxLbmc1WDpfLJbI5/VNTcO01E7Duk8+weMnL+DDnY1w6eoRiqYxbpt6EbV/vCvo9bDQa8eubJ8Nut4v3x1ebt+HaayYEnQjCaDTiissvxfc//MgAr4Us8fGYcNWV4l6pv4/4vdt1Bfs++jDnY4y9fIxYxLy8vAJulwsVlf6lEZJ69cLwn12CPd/tQ+6WbY0uZ6MWrJQv/+BBMebOaDJhwQMP4ZOP14n2Pbt3Y8rNU0WQEhUVjfyDB0X7F5+vx9z75okg5Le/m4EDB34Q7erjm8xmRWmevJxQXZIJABMnXger1Sr67vh6O7JGjBQB3aTJU5CYmChKSVetXIHHn3wGFotFZJjvmD2z0ZkVLRYLLr3sMtTU1IgSSGNgLKK8TDBj4CCcOXMa69f/FwhkOfN27Qw6eycCk65ckpkpzmvxc8+GtFA8mijJTEhIwNEjh7HomaeQlJSMRx59TJzj9FtvEWPepL5Nnd+5MGnyFPTp21dc08cfexQzZs4CAuMP5ffxjdWv45prr2vxBC9EXZGmsrLKv5IpgEOHDmL48OHKHkRERNTlyRcu93q98Pl88Hg88Hg8cLvd4uFyuVBXVweNPlK9i07lcGEBst9fg7vn3ieCQWdghs6rr5mIbVs2Y8qvpjY7KUi4s9lsWPL8Ysy9b15IwSARNa9/SuMlzudal8ncEREREUnKyspQo1qCRZp8pKysFDV2e5NjzM4XuV9tbLY8taOpJ5SRP+Rr/hFR2zFzR0REdB4It8wdAjMibtr4pWLbggcfDrpWHBFRe+nMmTsGd0REROeBcAzuiIjOhc4c3LEsk4iIiIiIKAwwuCMiIiIiIgoDDO6IiIiIiIjCAIM7IiIiIiKiMMAJVYiIiM4DLZ1QpVtyf/UuiDpM6fEjfM+dRaXHjyAtLU29mcIAM3dERERERERhgMEdERERERFRGGBwR0REREREFAYUwZ1Wq4XH45FvIiIiIiIioi5AEdzFxsairKxMvomIiIioWafyvxEPIiI6NxTBXe/evfHTTz/hzJkzzOARERERERF1IYqlEGJiomG323HixAlUVlbC6/UqexMREVHYs1gsLZ6WXp6x65nBZZXCSV1dHQ4e+B5utwcpKano1r27ukubhbIUgsPhxpJX9qO4uBopKdGY+8ehMJn06m6dns1Wi2ef/w4VlXUteh2lZU6UlTnF1xkD4gEAJSU1eGXFAZTK2hY9MQrRUQbxtRqXQghfiuCOiIiIwpO0zh0Asc6d1+uF1+uFx+OWrXnnAeBr9oO2WnsFd9VlJ/D2vLEoP1EIAIjvnY5pi3MRndBb3VUo/Hodsh+dJL6+8OrbcP3/Wym+bm6f6ucDwJWzn8HIX/9ZfL3zvb/jqxUPiK/Vx5DIjyXvcyr/G7z34DVwVtlE38b2Ie8rnYfLUY3shZNRvOdLdfcGr6c9VVVWYv1/P0FFeTkAQKPRYPiIkbg482fqrm3SkuAOgAiIduadxutvHlJ3bVHQ1Bo5nxTh8/8dE19f8/O+mHRDqvj61RUHsHd/qfg6LjYC9993MSyWSKCR19KU7I+OYsPGnxTb7pszVAR4Ly//Ad8fqB9aJW8LhsFd+OJsmURERNRmPTOGi0druRzV+GTRdADAnW8fx51vHwcAvD1vLKrLTqh6+53K/waf/mMmLrz6Nty/3ospj+Xg+y9ex6f/mAWEsE/p+SmXjMd9aytx39pKpFwyHl+teAA73/s7IAvsrpz9DO5f78WMl3ah8OuPxDHkcv/9kAgiJdVlJ/DRk7fAGJ2AO98+jvvXe3Hh1bcpzlPiclRj02sLFEGgnHSe0nkYYyyI7ZmKSFOMumubuepcWP/px6goL8eIUaNxy63T0bt3EvJ27kBBfsOAyuv1YvNXm1BS4r/GZ9M1P++Lpf+8HEv/eTnm3TMMp045sPqtfHW3diEFdrfdOhBL/3k5brt1ID7/3zHkfFIk2vfuLxXt8+4ZBmetB8tXHYDD4VbvLiR7vrPiZ5ck4r45Q8VDHrylpkQr+tP5i8EdERERdQrF321E8Z4vkTz0ckQn9EZ0Qm9cfP3tKD9RiB/+94a6OwDgm5yX4KyyYdDlUwAAKRdfhZRLxqPw649wKv+bZvfZM2M47nm/FL9Z9D8YTNEwmKLRf/g1AABr8QFUl53Ad5/+C8YYC1IzJwAAEvoMQo/0THEMyan8b1D49UfomaHMakUn9Mbtq/Jx+6p8kV2TzrfqzDG4HNWir3S+6n0EI732/sOvgcHU/h/ut2/bgoqKCvwsawQuuiQTUdHRuHLCBOh0Wuzb+526Ow4X5OPQjwdQeuaMuumsSrAYYYzUoai4CjZbrbq5TWy2WuzYeQopKdEYdmECAGDYhQlISYnGj4dscDjcmHRDKpb+83KMzOoBAEjqHYWePU04dcqBkhM1qj2GprTMiV49zcgYEC8ekk8+K8an64vRu1cU0vvHKZ5H5x8Gd0RERNQpHNySDQBITBmsboK1+IB6E6rLTuD4/i0wxlgQ3S1J0eassqG6tKTF+4RsuxSANUY6huSbnJfQIz0TWVPmKfoFUxo4hjwwczmqkffhC7jw6ttwwdhfK/obTNH4zaL/iSBU/tqloLM9nTp1EvkHf0Ryn77IHJ4ltpvNUUjum4oyqxVVVZWK53z33R4AQN+U+vLE80lFeR2czoYTEpacqMGpUw4MyohHelr7Bl/ywG72bYNw5+2DcfPk/k2WZFJ4Y3BHREREnZ46wxUKKYBqTNWZ+jFTLkc13l3wczw7UYuqM8dw39pKpI++EdEJvZE89HI4q2wo2r0BAFD200GcLtwt25N/3N73X7yOrJvuRaS58RLJT/8xC89O1OK7T/+FO98+rhjXt3vdyzhduBvDJ92teE4wpw7lofxEIXqkZyKhzyB1c5vt2LYVADBy9Gh1E/ql+cfGFcpKM4uOHEF5WRn6pvZDvMUi6332ldmccNZ6kJoSI8a4tRejUYe4+AgUF1dj3/f+MW77vi9DcXHD92bOJ0WY86ctWLr8B8y5Ywj+MLvhHxjaQh3Y9e5lhsmkx/grk9Vd6TzC4I6IiIjarCXr3BV+vQ7PTtSKx7sLft7iwK29SZmx+9d7EdO9L56fHCvG3F0950UxDu/ZiVp89ORvAEBkDKWMW8ol45Fy8VWqPStd//9W4v71Xlx8/e1YNi1ZjLmTyj/TR/8ypHGLUkayI0oyPW43zpw+jYEXDEZCt0R1M/r0TYFOp0XBoXz4fD5UVJRj+9YtMEQYcNnlV6i7nxWf/+8Y5vxpC+b8aQsWv7gPPXuaMOO3GepuLSbtc86ftuDVFQdgMukx949DkZISjdffPIQ5f9qCjZtLEBcbgbj4CBiNOvFcqTxzzh1DsHT5D1j0zz2tHnOn9u0ea4PAjggM7oiIiKgriOnet8VBTLcgpZhyMd37qjcBsnLMI998DpejWhH43b/ei/F//CecVTaRNZPGyWXddG/I55iaOQHGGAuO79+C6rIT+OF/b8BZXRZS1q6jSzJ1ej0mTfkVLh97pboJAGAymjDs4kxUVJQjJ/t9rPvwQ9TUVOPyK65EVHRor7+9ySdUWfrPy7HgT5c0OwNla5lMeiz40yXiWFddkYSKyjpcMNAS9JjpaXEYlBHfpjF3aondjBg9smeDwG7On7Zgy/aTir50fmFwR0RERGdV+ugbRaB0/3qvGEcmBVXBxsIlpgwWpZMv/qobTuV/oyiZlI99gyyr1tw+W0qaXdMYY8G43y+CwRQtyj+zH52EZydqxbIK33/xOv41M6PRmT7lrMUH4KyyYfXdI/DsRK1YduGrFQ80yGx2dEkmACR27w6NRqPeLPwsawQGDByEUqsVPp8XI0ePRtqAtmfKOht5wBisrNJmq8XadUcRFxuBsZf2Ujcr1NZ6UFrWPhO8pPSNxvRpGUEzdhUVdepNdB5hcEdERESdgjTTpZTNkkoV43unY8jPp6PWUYXKU0WK8W/DJ90NY4xFlClKWTSpvLG5fVaXncC/ZmYoAqjGSh7l68+N+vUCUT458td/VgSrUx7LAQLr2N2+Kh+Rphi8u+DnikCvaPcGOKtsYhZPqVxTelw5+xkgsN6eFPxCNukKgpzf2aTRaHDlVeMx+w93Yvqs2Rh2caa6S6fx6ooDmPOnLWKpgua+DpW0GLmz1oPZtw0S4/tyPinCvAe2o/BwBQCg8HAFDuaXK2bYJOooDO6IiIiozdpjnTuDKRo3LPAvebBsWjKWTfNPDCEt0B1pikFsz1RFOWLPjOG4/v+twvdfvC6yZvLFwZvbZ/GejUgbeT2K93yJ5yfH4tmJWnz/xeuY8liOmOxECgBX3z1CsbB4qE7m5yF5yBiUnyjEsmnJIjN35exngi5i3hQpeJWCUzr7bLZaPPToTvzlb7vgrPVgzh1DxCyYDocbvXua0LOnCYtf3KcYAxjKYuWN6ZZgxMlTduQXlDf6+HaPFQBgNtWP+6Pzj6ayssqn3khEREThxeer/3Xv9Xrh8/ng9Xrh9Xrh8bjh8Xjg8XjgdnsA+NAt2T8jItHZUHr8SLPvOYfDjSWv7AeANgVKnUFLX0v2R0exYeNP6s0NpPSNxq2/GYA+yU1ndEuPH0FaWpp6M4UBBndERETnAQZ31Jm1JLgrLq5GSkp0SEFRZySVc1ZU1rXodZSWOVFW5lRvVgh1fTsGd+GLwR0REdF5gMEddWahBHfUfhjchS+OuSMiIiIiIgoDDO6IiIiIiIjCAIM7IiIiIiKiMMDgjoiIiIiIKAwwuCMiIiIiIgoDnC2TiIjoPNDS2TI5kx4RUdfDzB0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0RERHJ+NQbiIioi2BwR0REdJ7TaDTqTURE1AX9f23LeOaeMMrvAAAAAElFTkSuQmCC" + } + }, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## What next?\n", + "\n", + "Navigate to the [Neptune Web App](https://scale.neptune.ai/) and visualize your runs in near real-time. See how each of the metrics evovle with each training step. Filter through each metric quickly to find the metrics you're most interested in. Use dashboards to create custom analysis of the layer-wise metrics like gradients and activations. Create dyanmic charts with advanced regex searching to add new layers to charts as you update and modify the model architecture.\n", + "\n", + "\n", + "### Advanced Regex searching\n", + "Navigate through hundreds of metrics super fast.\n", + "\n", + "TODO - can we add gifs to show some cool UI things?\n", + "\n", + "### Create dynamic charts\n", + "![image.png](attachment:image.png)" + ] } ], "metadata": { From 8f98b334f89c8f71699aabe13aefd3477e2b2104 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 18 Mar 2025 10:07:13 +0100 Subject: [PATCH 055/125] style: add colab link to intro --- .../pytorch/pytorch_text_model_debugging.ipynb | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 2d22ee5..d749587 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -6,6 +6,10 @@ "source": [ "# Neptune + PyTorch\n", "\n", + " \n", + " \"Open \n", + "\n", + "\n", "## Logging and Visualizing debugging metrics with Neptune\n", "**Global aggregated metrics**, such as overall loss and accuracy, provide a high-level view of a model's performance and help track progress toward the target task. These metrics are essential for assessing the model’s overall success and ensuring training is on the right path. However, **layer-wise metrics** offer deeper insights into how individual layers contribute to learning. By monitoring metrics like gradients and activations at each layer, we can identify specific issues (e.g., vanishing/exploding gradients) and optimize training for individual layers. This is crucial for deep networks, where different layers learn distinct types of features.\n", "\n", From ce66396f1f18b8cba4868b5bb3a791e7b6827afe Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 18 Mar 2025 10:19:57 +0100 Subject: [PATCH 056/125] style: update colab link and ending section --- .../pytorch/pytorch_text_model_debugging.ipynb | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index d749587..239a59e 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -478,7 +478,10 @@ "source": [ "## What next?\n", "\n", - "Navigate to the [Neptune Web App](https://scale.neptune.ai/) and visualize your runs in near real-time. See how each of the metrics evovle with each training step. Filter through each metric quickly to find the metrics you're most interested in. Use dashboards to create custom analysis of the layer-wise metrics like gradients and activations. Create dyanmic charts with advanced regex searching to add new layers to charts as you update and modify the model architecture.\n", + "1) Navigate to the [Neptune Web App](https://scale.neptune.ai/) and visualize your runs in near real-time. See how each of the metrics evolve with each training step and epoch. \n", + "2) [Filter through each metric](https://docs-beta.neptune.ai/charts#filtering-charts) quickly to find the metrics you're most interested in. \n", + "3) Use [dashboards](https://docs-beta.neptune.ai/custom_dashboard) to create custom analysis of the layer-wise metrics like gradients and activations. \n", + "4) Create [dynamic charts](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) with advanced regex searching to add new layers to charts as you update and modify the model architecture.\n", "\n", "\n", "### Advanced Regex searching\n", From 5eda85665d66f18ef048c3d19b9a6b59afbfad59 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 18 Mar 2025 10:34:09 +0100 Subject: [PATCH 057/125] style: update intro to notebook --- .../pytorch_text_model_debugging.ipynb | 85 +++++-------------- 1 file changed, 23 insertions(+), 62 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 239a59e..6e10b7f 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -20,12 +20,27 @@ "\n", "This guide will show you how to:\n", "- Initialize the **Neptune Run** object and log configuration parameters\n", - "- Create a **class** to hook layer-wise metrics\n", + "- Create a **reuseable class** to hook layer-wise metrics\n", "- Log **aggregated metrics** such as loss and accuracy\n", "- Log **debugging metrics** per layer during model training such as;\n", " * Activations\n", " * Gradients\n", - " * Parameters (Weights and Biases)" + " * Parameters (Weights and Biases)\n", + "\n", + "There are also several other metrics that we can capture to understanding our model training in more depth, but we will cover those in another tutorial.\n", + "\n", + "### Key metrics to capture from each layer:\n", + "\n", + "| **Metric** | **Demonstrated in Notebook** | **What it Shows** | **How to Capture** |\n", + "|-----------------------------------|--------------------------------------|--------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|\n", + "| **Activations** | Yes | Provides insight into how the model is processing data. Dead or exploding activations can indicate issues with training stability. | Use hooks to capture activations after each layer. |\n", + "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | Use hooks to capture gradients during backpropagation. |\n", + "| **Weights and Biases** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", + "| **Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", + "| **Learning Rate** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", + "| **Output Norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |\n", + "| **Activation Distributions** | No | Helps diagnose saturation issues, especially with ReLU activations that may lead to dead neurons. | Visualize or compute statistical summaries using tools like matplotlib or seaborn. |\n", + "| **Feature Maps (for Convolutional Layers)** | No | Offers insights into how convolutional layers detect specific patterns in the data. | Visualize feature maps after convolutional layers using libraries like matplotlib. |\n" ] }, { @@ -58,18 +73,9 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], + "outputs": [], "source": [ "# Import libraries\n", "import torch\n", @@ -152,7 +158,7 @@ "source": [ "## Create DataLoader Objects\n", "* To execute the models with PyTorch, we convert the training and validation datasets to tensors and then setup DataLoader for easier batching in our training loop.\n", - "* The model architecture requires the vocabulary size as an input and this we calcualte the max token from the dataset." + "* The model architecture requires the vocabulary size as an input and this we calculate the max token from the dataset." ] }, { @@ -182,37 +188,6 @@ "print(f\"Vocabulary size: {vocab_size}\")" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Importance of Logging Debugging Metrics\n", - "\n", - "Tracking layer-wise metrics during training of foundation models is an important aspect of understanding and improving the performance of large-scale neural networks. Tracking layer-wise metrics is imporant to;\n", - "1) Understand the model behaviour at different depths\n", - "2) Diagnose training issues (vanishing/exploding gradients)\n", - "3) Model interpretability and debugging\n", - "4) Layer-specific regularization\n", - "5) Transfer learning insights\n", - "\n", - "However, one drawback is the overwhelming amount of data generated that needs to be logged, tracked and analyzed. These models can have hundreds to thousands of layers making the data difficult to interpret. However, with Neptune this is not a problem. \n", - "\n", - "In this example, we will show you that it is possible to log and visualize layer-wise metrics such as activations, gradients as well as global metrics.\n", - "\n", - "### Key metrics to capture from each layer:\n", - "\n", - "| **Metric** | **Demonstrated in Notebook** | **What it Shows** | **How to Capture** |\n", - "|-----------------------------------|--------------------------------------|--------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|\n", - "| **Activations** | Yes | Provides insight into how the model is processing data. Dead or exploding activations can indicate issues with training stability. | Use hooks to capture activations after each layer. |\n", - "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | Use hooks to capture gradients during backpropagation. |\n", - "| **Weights and Biases** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", - "| **Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", - "| **Learning Rate** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", - "| **Output Norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |\n", - "| **Activation Distributions** | No | Helps diagnose saturation issues, especially with ReLU activations that may lead to dead neurons. | Visualize or compute statistical summaries using tools like matplotlib or seaborn. |\n", - "| **Feature Maps (for Convolutional Layers)** | No | Offers insights into how convolutional layers detect specific patterns in the data. | Visualize feature maps after convolutional layers using libraries like matplotlib. |\n" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -220,7 +195,7 @@ "### Define PyTorch Model Architecture and Helpers\n", "We define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use an embedding layer, a LSTM layer and a fully connected layer. This architecture can be adjusted to your needs and increased in size when testing the workflow. To increase the size of the LSTM layers, change the `num_layers` parameter in the parameters dictionary or to increase the number of fully connected layers, update the mode architecture itself.\n", "\n", - "This section also creates a `HookManager` class that allows us to capture the **activations** and **gradients** from each layer. You do not need to update this class as it will dynamically update according to the architecture of the model." + "This section also creates a `HookManager` class that allows us to capture the **activations** and **gradients** from each layer. You do not need to update this class as it will dynamically update according to the architecture of the model. This class is also resuable and can be copied into your own code." ] }, { @@ -468,29 +443,15 @@ ] }, { - "attachments": { - "image.png": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3cAAAGwCAYAAAAKQZwzAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAPO4SURBVHhe7N13fBz1nT/+18zO9qJd9S5LcqcZbEIHARdCCySE5C4F7sjluAvJ4XzhUi7J5XK59Fz4BS4hORKHhJoAobdACNiAC5aL3CWrWL1L29u03x+fmdHuaGVLRpYt6f18PPSw9zOzs7Mzq5156/P+vD9cOBxRQQghhBBCCCFkXuMygzuv14N4PI7+/n6Ew2EoipK9NiGEEEIIIYTMEZ7n4fP5UFZWBpfLhUgkal6FZMgK7iwWHs3NzVi5ciWcTmf2moQQQgghhBAyxxKJBA4dOoQVK1ZAlqnz6WiygruhoUGUl5dTYEcIIYQQQgg5ZSQSCfT19aG4uMS8iGTgMx+Ew2EK7AghhBBCCCGnFKfTiXA4bG4mJlnBHY2xI4QQQgghhJyKKFY5tqzgjhBCCCGEEELI/ETBHSGEEEIIIYQsABTcEUIIIYQQQsgCkFUts6WlGWvXrs1ew2RkdBzRaGzGOa88z8PjcaOwIGBeRAghhBBCCCFHtWPHDixfvsLcTDLMKLgbGR2HIFjgz/MBAFRVBcdx0/43GApDkmQK8AghhBBCyKLT1tGJ9iPdWW11S6pQX1uT1WZez263YfWKpSgsyM9ab7a9u2UbltbXoaS4yLzolEDB3bHNKC0zGAzBn+eDqhrx4Iz483wIBkPmZkIIIYQQQha88eDkUv5pUTI3TWpLpdIIhSNZbbPtjbc24UhnF154+VX09vWbF5N5YkbBnSTJUFQVqqoe17+KokCSZPNmCSGEEEIIWTTqllRh7ZozsHbNGahfUmVejPqM5XU5ls+2v761CQMDg/jwtVfj3HPOxnMvvoIjnV3m1cg8MKPgDpy54Tgc1zb68Min1mHdt941L5hl7+Kb69Zh3aceQZ950aw40dufnr6HPo11676JE300CSGEEEJIbvmBPOQH8mCz2cyLYLPZjOUn2sa338XuPftw1pmnw+fz4qwzT8cVDZfg2RdeRmtbu3l1coqbWXCnjbN7Pz+nkklBTncHOgCg5RD793060dsnhBBCCCHkeL2zeSuOdHXj03/3cbz2lzfRcrgNANDa1o6l9bVYWl9nfgo5xc04uJtTm76JdevWYd26G/CzFgAvr9cefxqPZI9FnR1Vn8GjjY1obPwuLjIvmw0ncPuTAslJtF7Ddetww33NAF7Feu3xpx86mf2IhBBCCCGLQ1VFGeqWVCHg95sXTSng98/4OdOxeet7aOs4guuv+RBKiotw/TUfwouv/BkvvvJnAMD113zI/BQyD8w4uDP3xM30Zyb6au9AY2Mj7r12om3Fnc+jsfEnuEJ7/O63WICi/5gDFRb0mJezNM/MIIe1Z6ZN6sFQZsCkpYdqbeZtT6SNTmf7uomga515Wfcj+PS6dVj3rUe012U/39xkPHn6umtxR2MjGu+5eqJt+ZfwfGMjfnI5Mvbtm/hmxmt9+qG+7PdpSo01H4OsfdP33/jJDj7Zufs0HnlID+LNx4YQQgghZOEoKS5EfW3NjNIt8wN5M37OsWze+h4Ot7Xj+ms+hIJ8VsV++bJ6LF9WD7yPwG7Pnj145JFH8Prrr0MURQBAT08PnnjiCTz77LMYGxsDAEQiEbz44ot47LHH0N5OqZ+zacbB3VwqryoH0IeOVgDLV2AFgObWDgDlKK9iPXvrX16BLz3TiMbGRjx/5wo033eDEWD0PfRp3HBfM66+x7y8HJ95TA8ar8a9jY149NZy06tfhDvuXAHgVbyiByzdf8UrLcCKO+/ARd2P4MsZ226852rg5fVaEDed7UMLqNbjVS3Iamx8Hl/Cz3CDOcB5+RXgR9ry5cCrdx2th24KVeUoB9B3pAPACqxYPpEeyo6zpqUDKzNeq/m+G/Bl/MQ4fnh5/VGP76t36QFeHx756s/QfO297Pg03our8SrWm98bmvGz1msmjmHLz/Bl6kkkhBBCCDkhcgV2AN53j52iKNi2bRvi8Tg6OjrQ0cEGIe3atQvBYBBDQ0PYu3cvAKC5uRl9fX2IRqPYtm2baUvk/ZhxcGfuiZvpz0y9+60b8LOWFfjSjx7Fo1kBFIBLv4vGxkfxGa2IUPmSWgBAxxHWO/fXV5sBXI1rLtWW3/ooGhsb8V3t8bGUX34NVgB49S8slOp78xU0YwWuubzcSLE0tlW7MiP4nKZNr+BVACuuvgIsvCrHFVevAFpewV8z006vvUN7j+WoXQoAHejoxsxTLTd9Ezfc14wVd/4Ejz42RbC1/BpckfVa2vudzvHNOl4swG38jp6AWouVGQHlhBX40j9p6xzPMSSEEEIIIdNyogI7AOB5Hm6323js19JIfT42PzYA5OWx3sdcbWR2zDi4m1t9qP2nRjz/zE9YcHPpd9GY1Qump0lqP3e9mvHcDhxqyXh4PKquwDXLWc/Zu3owYwQ/ppTQj/4MzebnHwPrRculGYemWpTlIny3caLXTO8lzD5GE/pq70DjM8/jJ7eWTzz3sc9ogeVM6cd3IqA0jkFrBwsYjTGT6ybGTRJCCCGEkDl3IgM73U033YSLL74YN998M4qLiwEAl1xyCa644gpce+21WLNmDQBg+fLluP7669HQ0ICrrrrKtBXyfsw4uDP3xM30Z2ZY+mVW2mAG1quH7NRIg9ZT9L6U4zP/cjXrKdukpWRqvWx9D30a61/WxwA2ovGZL2GF+enHoPeETbYCK6da9D6UV5Ub6Znvn358JwJK4+exz6C8+xF8+q5XjXF9eponIYQQQshitf/QYbz+5jvo6RswL5pST9/AjJ9jNheBHQA4HA6sXr0a+fn5RhvP81i6dCkqKyuz1i0vL8fy5cshCEJWO3l/ZhzcnXh6quF0K2JOBELZPWFaimPGmDm9+MeMCpJceg2uRjN+dtfPJlIyM9Qu0R53HJpxzx3bNtD86l+11MjJvYPvl1HwZNbnCJx8fNm5M523pbVaMDkLPamEEEIIIfNYIpEEAKRSKfOiKenrzuQ5mULhMHbsajrhgR05Ncw4uDP3xM30ZzZd9J17WeD1UZb692XcoRUB+TIe6WZj7PQiH/q4tBV3Pm+Mk7von76EFVnVLHO5CNfo1Tozgq7yW3+iFTfR0g7/cg0roJJRcOTY278I3228F1e3/Aw36KmL+BKeP45USTaecPanWDga8/Fdt249Xr32GtR29AFVn8FPtAIsbNkruOaeq1ka56wHmoQQQgghJJfR0XEU5AcosFskuHA4YkRcLS3NWLt2bfYaGQ61tKF2SRWgqgDHHde/HZ09WLmclVklhBBCCCFksWjctRfjwRAC/okiIgG/D/W1NVnrtR/pwth4yHg8HgyhbknVpPWm473GnRgPBnHZJRehu6cXzS2HgXka2O3YsQPLl890INTiMqOeO57noCoqFK0Xbub/sm0QQgghhBCy2DidDkAL1vSfZCptXg2JZCprHWjj2Y7H6OgYotEYfvfw49i5qwlej2deBnZkembUc9fTO4hkKoGK8jKtI04Fx3HT+xccevr64bA7UVlRYt40IYQQQgghC15bR6fxf47jUFleBpvNmrVOOi2iuzd7SM/x9NoBQOPO3cgP+FFaUgKXy2lePK9Qz92xzSi4kyQJzS3t6OjsRjo5+a8MR2Nz2FBbU4UVy+uoKg4hhBBCCCFkRii4O7YZBXfQAjxJUmaY0AlAAQSBp8COEEIIIYQQMmMU3B3bTEM0CIIAh8MGh22GPw4bBXaEEEIIIYQQcoLMOLgjhBBCCCGEEHLqoeCOEEIIIYQQQhYACu4IIYQQQgghZAGg4I4QQgghhBBCFgAK7gghhBBCCCFkAaDgjhBCCCGEEEIWAAruCCGEEEIIIWQBoOCOEEIIIYQQQhYACu4IIYQQQgghZAGg4I4QQgghhBBCFgAK7gghhBBCCCFkAaDgjhBCCCGEEEIWAAruCCGEEEIIIWQBOOWCO0mSzE2TTGcdMn/R+V246NwubHR+59a6devMTScMnduFjc7vwkXndvE55YI7QgghhBBCCCEzR8EdIYQQQgghhCwAFNwRQggh81BjY6O5iRBCyCJHwR0hhBBCCCGELAAU3BFCCCHz0EUXXWRuIoQQsshRcEcIIYTMQ6lUytxECCFkkZuz4G48GMQvf/0g7rnvftxz3/14461N5lWmjefnbLfJSUDnl5D5ieM4cxMhhBBC5tCc3EWPB4P4w5PP4MLzz8Vdd96B2279FFoOt6Fp7z7zqtPy5u+/gw3rG7Dz1d+jr2Un/vrgf2LD+ga0bn8NAPDWw9/FhvUNOLz9NcTDY/jLhm+w5Y2vAwDefvzH2LC+AYc2v4B4eBSbHv0hNqxvQPvOvwIA/rLhP7Tn/xkAsOkxtvzInrcBAH/+v69iw/oGjPW1a+t/ExvWN2CwfS8A4MV7/xUb1jegc+87AIDn7vkX9njfuwCA1x74Wtb+PP2jz2LD+gYMdx0CMt7faM9hAMALP/sCNqxvQF/LDgDAG9r7HTqyP2t7vYe2AwD++rv/wob1Deg5uA3I2B/9+DzxnU+y99PEAmx9/0a6W9jrP/Tf2LC+Af2tuwEAr/7y37TXOwAAeP6ez2vvvw0A8MxPPpf1+Lmf/jM2rG9A1/4tAICnf3ibtv3mrOX69l763/XYsL4B4ZFeAMDLP/8SNqxvQDw0AgD443/9LTasb8D4wBH2ej/+R2xY34DgYGfW64eGugEAf/7VV7K29+z/3I4N6xsQHR8EsvaHvd8nv/tpbFjfgGQ0CGTsz2hvKwDgkW/ciN/edSV0j3/r5qz9/9MP/wEb1jdguPMgAODBuz+IDesbkIqFAQDP/393ZD3+3b9dhad/9Flje49+86PYsL4B6UQUAPDQV6/V3l8XAOCRr9/Anh+PZC3Xj8+G9Q3YsL4Bich41vr68Xr4a9dnPV//vOke/eZH8dv/d4Xx+Lf/7wps+NLlxuMnv/cZtn/JGJDx/vTj9dBXr816/uu//nrW8/X3p3vkGzfikW/caDz+7V1XZj1f33/d7778IWP/08mY8X51G9Y34MnvfQYAEAsOY8P6Bjz1vVuylj/x35/Kevzwv3/YePzQ167D779yDQAgHhqZtP2H//3DeOhr1wEA0onopOUb1jfg0W9+FAAw2L4XG9Y34PFv3QwAiI4NYMP6Bjz2Hzexx+OD2LC+AS/87AsAgNj4UNb+peIRtv2M47dhfQMe/8+PZy1/6KvXZm3vD9/+BAAgONiFDesbjOOrL3/x3n8FAAy07cGG9Q340w//AQAw2nMYGzKOn/7+n/r+rQCAke5m9v6+8REAQPeBrVnb6z20HRvWN+CP3/k7AED/4Z3YsL4Bz/30nwEA4/0d2LC+Aa//5hsAgERkHBvWN+C1B/4dADDW15b1ev2tu7FhfQOe+fE/AgA6976DDesb8NLPvwQA6Gth23/pf9cDAHoOvocN6xvwyv13AwCGjuzHhvUNePWXX9bW35G1vv5+n/2f29nrHd6VtXywYx82rG/Axke+DwBo2/kGNqxvwJ9/9ZWs5S/c+0UAwOHtf8b/9+lleOI7nwQAREb62Pv99dcBACPdLdiwvgFv/PZb7PVad+ORb9yId/74PwCA1u2vYcP6Bmx+8v/Ler13n7gHALDz1d9lLW/f9SbbvnY8H/je/8OG9Q3Y8tS9Wa+3+amfARnHS9/ee8//ir1f7Xhuf+EB9vw/3QcA6Nz3Lp6/5/PGtWbbM7/AhvUNaHzx1wCAPW88jg3rG7Dz1d8BALY+/b9s+Uu/Ycv/+gdsWN+A7c//X9bzm7e8CABofOk3WfunPzaWv/hrbFjfgEPvPg8A2PLUvez1XnmQPf7TfdiwvgEHNj3Ntv/s/diwvgH73noSALD9+f/DhvUNaPrLY0DGtXzvm08AGcezeetLAIBdf34IG9Y3YP/Gp4CMa2nLtldyrq/fC+iv/8R/fxL/87c1xnL92p3rXgMANj76g6zt6eu37XwDALDr1d9jw/oG9Bx8D8i8V9Cu3fq1/eC7zwEZ9zr6vYZ+rdev/S/9/Ev43b9dZWxPv3foPrAVAPDqL7+MDRnXev1aqN9bPPX9W7Eh495Ev/cx7oW0a21vMyvs88K9X8SG9Q0YaGvK2p5+rX1Ju7br10r93mO8vwPI+O4f7GD3hvq1Wl/+2H/chA0Z1yL9Xka/Fv7h25/Ahoxr5wbtu1rfnr4/+vpPfe8WbFjfgOjYAJBxL6HfOzz+nx/Hhoxrq7493e+/fHXW6z39w9vwu3+7yliur69fe/X19Wunvj/69n/7/65g+xcenXStE1MJbFjfkHUv8ti3Ppa1Pxu+dLlxrRhoa8KGjGsRtP155Os3GI8fvPuDxne3/t2lXwug3Tvo1zZ9e/q1dayvHRsyrgX6d3nmtZ2cPIK54UTo6u6B2+XEimVLAQABvx8Xnn8umvbsw4plS+FwOIx1VfXYEy4Otu9BR9NGLL/wekTGBtCvPT7rg5+BJEnoa9mJjqaNWHbeNRCTMQx27EdH00acdvnfomzZWnQf3Maef8G14DgO3YfY47MHj7DnH554vpSMo+fge+ho2ojx/iOoXH0BBtr3oqNpIw6+8ywKq1egr2UXOpo2YqBtL/KrVmC46wA6mjair2UHeIsVYz2t6GjaiPBwH9KpFHpbdrD9abgZdncexvva0dG0EUMdB+ArrsGRPZvQ0bQR7bvfxHB3MwY79uFI0yYEB7tRULUKR5rY8o6mjQgN96B7/1Z0NG3E2EAn8kpr0bHrTXQ0bUTPoe2IhUbRe3gnuvdtxnkf/SIS8ShGelrQ17ITgx17IYkpDLQ2ofvAVoSGuuEtqkLr9tfQufcddO3fgshoHzr3vovuA1sRHulDXmkteg5tR1/LDrTteB1jvW3oP7wbfS07EBrugTtQht6WHehr3oGB1iakE1EMtO1B3+GdGB/ohLeoGt0H38NA625079+C8EgvuvZtRm9zI8YHumD35KNz37vob9mJ5i0vwVtYjsEj+zHUsR+RsUG48orRffA9DHXsQ/vOv8JXVImeA+9hsGMvgoNdcHgLcLjxdQy07kZb41/gLSxH9/4tGOzYh+BQL6xOH3oOvoehzgPo3PsOgoOd6Du8E8OdhzDadwT+0mq073oTg+170bX3HYx0t2C4Yz9G+9ow0n0Y3sIK9l76O9DX0ojwSC8697yNsb52pJJxpBJxDHbsQ3RsAIc2Pw+7J4DD772K8f4ORMeHAV5AX+tutDa+jpUXfhg2pxfd+zYjGhyCmE5DBfu8xcOjOLL3bTg7/Ojatxnx8ChSiRjAWdB9YCsSkXG073oTDk8AvS07kE5EkU4mwQtxdO57F4nwGHoPbcdoTys6976DZCyEVCoBcBb2R43QCA6+/SysTg86dr2JZCwESZKQjAbRfXAbxGQce998Ak6vHy1bX0YiMg5JkiFGguja9y7EVALtu96EzenBkaZNSCdjSETDAMdh23O/RDIaxKEtL4DnBbQ2vgYxGYckSRBTSePGrXnLi7BYHcYfVeKREFKJiHEjpC/Xb2xUlYcsy+ho2pi1vKNpI2wONyRJggoOHU0b4fD40bzlRQg2ttzpDSARj0KMRdj6To/x/JatL8Pu8kKSJKQTcXQ0bYTV4dKW29G85UU4vQGk0ynEw+PoaNoIu8uL5s0vQJJEdDRthNtfBDGVhiim0NG0EZ6CMjRveRGyttxXUA5JkhAPjaGjaSPGB7uwbPMLSCei6GjaCH9xNSRJQmikjz0/vxT7N/4JUjqBjqaNyC+rhSRJGNW+K/ylS9C85UWktOfnV9QjGYshPMqen19Wl7U8Fh6BmEojGhxBR9NGFI0P4uDm55GKsPdTVLMSkiRhuPswe1y9EgfefsbYv4LKZUjEo0gl2PFLxEI48M6zSIRH0dG0EcW1pyEZiyEWYttPJiJo3vIi4tpjSUqz79ZW9l0tiSkcePsZJCNBdDRtRMWKtez9D/eio2kjxFQczVteRHCok60vi0inUxjT3r8iS2je8qKxvkWwQhJF4/ioqormLS8irC1XFBmJeBTD3S3oaNoIWUzh4ObnEdGOt6oq2vFtQ0fTRvCCFcV1p2O8r8N4LEkSBjvYd7v++QsOdGK0txWB8nokomG073oLHU0bYXW60bzlRYxo3/0Ob0C7tuzCoXefh4UX0Fz9IgY79qGjaSO8BWXs9bX1Xf4i5G9+HmP97PW9RRWQJAlDR9jru/yF2L/paYz3sffjL61FIhpG24430NG0EXmlNTjwzrMY0I53QdVySJKEyGi/sbx5y4sY6W5GR9NGFNedAUmS0LHzr3jv+V/B7vJivP8Iho6wa2fFqg9AlmT0HWbXupL6M+H2FWC48yA6mjai8rQLIEkSepsb0dG0EaVLz0Lzlon3V3vOFag/92oMadfismVr0LzlRfRp69edfTmqTrsYA21sf6tOOx8czxvPX7LmMkiSZOxv1erzYLHa0NfMrqX1a//GOL768w9sciDYfwQd+rVckoxr5ZKzLoWiyOjYza6jyy+4DslEzFi/Zs2lEFMJ41q66tKbIEkSBtrYtb92zWXgBAGyLCI42IWRnlbUpdIY6GDL68/9IGQxhZ5D7N5h9WUf1/aP3VvUn/tBQFXR38r29/Tuv0XZsnVoee8VdDRtREfTWwgP9xj3GmuuugWJeBR92r3D8vOvRTP/Irr2b0FH00YEB7tRsUrCSPchdDRtROe+zQgN92K0uwWtja9j7fWfQ37FchzZ8zY6mjai9+B7iI4PovvgNnTueRuhoR4UVJ+GkZ7D6D20HUf2vI3x/g6Mdreg++A2jPUfQaB8KTr3vouufe9ioK0JseAwOpo2oufge0hE2fWjZ/829LY0onv/FoSGujHQvheD7XsRC43AU1iJzqa30Xd4J440bcJYbyu69m3GYPtehEcHYPfkY6S7GSPdLehv2Ynx/nb0NG/HUMd+RIMj8BZVYejIfox0t+DAxqfg9Oajt2UHRnsOY7T/CGxuP/oO78RYbxuONL0FpzcfY/3tCA11Q0yz75/u/Vsx1t+Ow9v/DJ4X0Ln3XYwPdGBssBsOXyE6dr2Jsb52tO96E1abAz0H30NoqAux0CisTi+GOg8gFhzGvreehNXuRN/hXYgFhyCmkkjEo2je+hJiwWEc3Pw8kHGtkmUFiXjU6DA43Pg6LBYBB955FmIyDk6wQZIktO96EwDQuedtcDxvPF8SRXAWq/H40OYXoMiS8UeGeDgI8Bw6dr8Fh8ePA28/AzmtXYv8xVAUFcEh9l3oDpTg4DvPIhkLo23HX+ArrEA6EUciGkJH00bkFVejecuLEFMJNG99Cf7iKkiiiHQqiY6mjQho15aEdu2IBgchplNIRMPo0K5NBzY9jbySGpTUn4VjEYQ5CUMWnTnpuRsZHYPL5coK4nw+H9KiiEQymbUux7GTfbQfgKX+5BVVYMX518JiYR8Ou9sLQRDAcextubwB1JxxMax2F3vs88Odl4+09lcUb0E5qlZfAFkbt2BzsefzFisAwJNfguLa0yGmEgAAi9UKQRBgEdhyRRJRWLkcPG9hj1UZFp6H3ekFAAg2BziOgzOvAABgd3lgtU7sv93lBc9x8BaWs+crEqxWK1Ixtn+KJKK4eiWsNqe2PTvsTpfRw2N3elFYuQyyLAIA3HkF8PgLkdR6iHyF5SiqXgGLtn/FNSthtzvg8RcDAPLLlyK/vA4Wqw0AO/hWqxUR7a9YeUWVKKxaabyeYLPDZrMZqVcuXxEKKpfCYmXHg+ctsNpsgMo2V1DJti/Y7AAApycPVqsVnPb+vQVlyC+vg5iMAwAUWYTVaoWkHW+r3Yn88jo4PX4AgIXnYbXZjOe78gqQX14HPRPMIrDXj48PseW+AuSX10MW02wFRYTNZmMfMgD+kirt/bPPpZyOwun2IaX9Vc1XXI3impVw+QsBAMHBI7DabBBsbP3C6pXIL6+DpG2/sGIprDYbXHlsfW9hOUqWrEJM2x9VlWC12eAJlAAArDYH8svrjP2Jh4ZgtdnAa5/niuXrUFS9EjqLxQKr3W583goqlyO/vA4uXz4AwOH2wGq3Q1UUcByHkroz2fY1drsTPM8hER4FAPiKK9jxT7PjnQgNw+X1G+8vv7wO+eX1xl8VLRzgdHnA6Z+nJaex46t9/qBIUMSk8VfJgoplyCuuNM6vxWKBqipsXQC+oioUVNQbj50eH1xuHwBAsNpRULkcecWVxnKrzQqHwwWL1WYs15+fTsYgJaNwef2wOlxIRoMIlNXBk18KaD1GDqcbVjv7LCqyhILK5bA52XcDx/EQBAHegjIAgNPtR0HlclisbH2Ot8Bms8OiXYjceYUoqFphbB8ArHYbSuvOZOuratb+W6w2CIIAu8sNAIiM9KCwagX8pUsAAIqqsO82hf1hK6+oEiV1Z8DuYsfDYrVDEARIKfYXa3deEQoql8PpCWiv7YbD7Ybdwd6P05eftTwZCcJqt6GgrAYAAI5HUdVK+IrZY4uF7Z+qvT5nEVC85DT4itj+e/JL4HR5UFS5HAAgpZMorlmNggr22Onxw+F2w53Hvlt4zsKOr4t9FyZCIxAEAckI+4t5KhFF8ZLTYNeWOz357Pg42fGxOjwoqFyOQCn7/HIAbDY7nF72fsDxKKhcDm8++12KhUchWK1GSreqyuz9a9+9vCDA6fJA0T6rdnceiqpWwullvzu8hX2389q1IxEeQ1HVSgRK2fHheQtbzrPf1XQynrV9p8cPp8eH8AjLHtA/n7L2XSYIbPuJyBgAQJLSKKhcDoc7DwAgpuJZ1xZZTKOoaiUKy9nnW9U+H5z25er2F6Gk9nR857d/AcCu5E6PD2O9LNvDYrGiuGY1YkGWrWB3eiAIgnE+ee34uf1FAACHdu0LatkPY71tKKk7A06f9v68AVgECxRRu1Y63CioWgG7tv8Ot0/7fLL3q3/+9PMvp5Ns/7XvDl9RFQoqlwPa8ZZlES6fH5x2fKF9v+mfD4fHD0EQIOjXQocLBZXLwWnn2+pwsd8f7fg4PQEU154OTyH7/bQ53RAEAak4uzZabHYUVa1EKh4CANhdPjicbvhKqthjuxsltacjHmbny2p3aq+vXcvyClFUtRI2G/u8Oj15sNrZ9xK0a3NB5XIoMvt98gQK2f4p2vnLK0RB5XLjXsPm9MDlzTN6qKwON4prT4dN+3125RXA6fKwv34DCJTVss9Xmp0PwcZ+fx0e7Vrg8qGk9nSktGt3ZKQXLp/fuJZ7iypQVL0KqsR+HyxWG3ieg1e7Njm9gazPt/751e+Fipecxrav30vll0IQBMja94df2z/9/PEcB4fTBVHrccsrrUFB1Qoossy2b7HA5c2DJ8DOlztQjMKqlca9ktXGvv/033+r04uCqhWwCOzehYcKQRCgatvzFpSjoHI5/MXsfPpLKtnx1z5enkAJypadbXzfub3s85VO6N+vhSipOxMWC/u8QpUhCIJxPXX5CrLenydQhMhQF2LBYQCAr6AMRUtOM+4FXN48OF0e8LwFHM+jrP4s+ArKISbjcHoDcDjd4LRroydQgqKa1SisWgmet7DPvtVqfPcINgcKq1bAqV33PfmlcPn8gPbdloqHUbzkNJQtPwc2pwcqB/A8B1li99pWqw1FNauN+0BFlmBzuuD0sWOraN9NOsHmhGC1onTJagAAB3Zt06+V8dAorDY7HG7td9Wdh+La0+ErqmC/M8f4IScGFw5HtFtxoKWlGWvXrs1eYxY89Qz7S/3NH53oDu7o7MIbb27Exz7yYQT87OYdYL12xzrhgx37EQ8No6T2dLjyCjF05ABiwSGU1J0Bl/bXxOj4oPF4pLsZkdF+Y/3+1t1IRoMorlkNd6AYA217kIiMGevr2yutPwtOb2Biufb84a5DiI4NIFBWB39JNQY79iEeGkFR9Up48ksx2L4X8fCo8fyR7hZERvtQtnQNHB6/sbxixTrYnB6M9rYiPNxj7E9vcyPSiajxWN++vj19eWndmXD68tF/eBeSsRDKl50Du9uH3kPbkU7GjO0PtDUhERlH7RrWfa/vv/5Yf3/6/nXt2wxZSqNi5bmwOdzoOfgexFTc2L7+emXLzobDnWccT315X8tOpOLhjO2z19f3p69lB1LxCCpXfgBWh8vYvv76+mP9/ZmPl/788uVrYXd5jeOhv37n3negyJKx3Lz/+vqVq85jf33T9tf8/qtWnw/B5jCOj34+9NfX31/X/i2QxZTxWD8++vvr2P0WABiP9e0br6c9X9/f7gNbIUsilpx5Cdu+tv5Ur6efb/2xnv5bczqrpKcfD325vj/G46aNgKoa79e8ffP6nXvfgaooWHLWpQCAI02boKoKatc0QEzG0XOIpQDler4kyejex1J6ci3XH/MWATVnXJxzeefed6CqqnF8zPtvXj/XY47jseSsS5FOxtB7aDt4wWocr46mjbDanKhc9QGk4hH0teyARbCh+vQLjefbnB5UrFgHMZVAz8FtWcs7974Djreg+rQLjPWtdicqV5035f5YrHZUn3aBcfz095+IjGOgrcl4vWQshP7Du+D05qO0/kykE1H0NjfC7vKhfPk5xv44vQGU1p9lLM96f7vfgs3hRsXKcyGlk+g+sBV2lxfly9ca29cf68tdeYUoqT19yvej708yHkN/y3Y43HkoW3Y2ktEg+lt3G+8vOj6I4c6DxvP15W5/EYqXnGYcb/319Pejby8VC6Pv8M5J789qd6Fy1QeM4zXV+vHQCAY79sHtL0bxktXG6+v7nwiPYaB9j7E9/fPh8hWgpO4M4/n68dOPl7egHIVVyzHW147QUJd2g7YKwcFOjPd3GI/1a4u+vVhwGENH9k88Hh/CUOcB4/3rx0Pf38hIH0Z6WuAvXYJA6RKsW7cOT/7mf+AtKENh1Qrj2ucrqkRBxVLjsb+kBoGyWuP469vTHwfKauEvqWE9dbs3omzZ2ag+7QKER3ox2nMYBZXL4PKXIDR4BOHhHuSX1yOvuArh4R6M9raioHIZfIUVxrWusHI5vIXlCA52Yby/HXnFVcgvr8d4fweCg53GtXSsrw2hoW7kFVcjv7zOeKy/P/34FVatgLegzLh26a8/2nMY4ZHeSa+vX4v1/dMfDx3Zj1hw2FjfuHbXrIInUILQUDfG+tqM/devXfpy/fWKl5xmBMYDbXvgLSiD219k3Gvo6+uvN+W9ivZ+9GtLz8FtEFMJ4/n69vRrv76/+rVZ37+p7nX0a5v+eTCuldq1xriWL10DweHBaNdBxMOjxrVJ3z/9cW9zI3jegrJlZwOAsb/Gtch0bdWvnRPXYra+fi0036vox0t/f8b2tPX17en3Bsa9gHZt169d+uvr29P3T399ffvdB7ZCSieNa0fnvnehSOKkewf99afav9o1LNVTT//UX6/7wFbIYtq4VnY0bQTPW1BzxsXGd71gc6Bq9fnGd6n+3QIAR/a8Dd4iZF1LON6CJWdekvHdN/W1pffQdsiSiPKVH4AqpdFz6L2J717tu0u/dunfdfpj/btJv/ZAu9YLNjsqV51nrK/vj/76+nfvibRjxw4sX77C3EwyzMvgTpblib+okAWHzu/CNZ3fbzJ/0fmdW+vWrZuziczp3C5sdH4XroV2bim4O7Y5ScsMBCaCt9mgaqkJZGGi80sIIcf2xhusEAchhBCim5PgrrAgH/F4HMmM8XXhcBg2qxXOjHF4hBBCCJmevDw25o0QQgjRzUlwV11ViVg8gebDrLT8eDCIzVu346wzT88qskIIIYSQ6bn2WjYlBiGEEKKbk+Au4Pfj7z7+UWzeuh333Hc/HnzoMSxfVo+zzmAD9AkhhBAyM0NDrBIvIYQQopuTgiozMZ2Bn9NZh8xfdH4XLjq3Cxud37lFBVXIbKHzu3AttHNLBVWObU567gghhBAyu15++WVzEyGEkEWOgjtCCCFkHiouZhMRE0IIIToK7gghhJB56KabbjI3EUIIWeQouCOEEELmoa6uLnMTIYSQRY6CO0IIIYQQQghZACi4I4QQQuahp59+2txECCFkkaPgjhBCCJmHqqurzU2EEEIWOQruCCGEkHnok5/8pLmJEELIIkfBHSGEEDIPHT582NxECCFkkaPgjhBCCCGEEEIWAAruCCGEkHno8ccfNzcRQghZ5Ci4I4QQQuahZcuWmZsIIYQschTcEUIIIfPQbbfdZm4ihBCyyFFwRwghhMxDe/fuNTcRQghZ5Ci4I4QQQgghhJAFgII7QgghZB568MEHzU2EEEIWOQruCCGEkHnojDPOMDcRQghZ5Ci4I4QQQuahf/mXfzE3EUIIWeQouCOEEELmocbGRnMTIYSQRY6CO0IIIYQQQghZACi4I4QQQuahX/3qV+YmQgghixwFd4QQQsg8tG7dOnMTIYSQRY6CO0IIIWQeWr9+vbmJEELIIkfBHSGEEDIPvfvuu+YmQgghixwFd4QQQgghhBCyAFBwRwghhMxD9957r7mJEELIIkfBHSGEEDIPXXTRReYmQgghixwFd4QQQsg89JWvfMXcRAghZJGj4I4QQgiZh/7617+amwghhCxyFNwRQgghhBBCyAJAwR0hhBAyD/34xz82NxFCCFnkKLgjhBBC5qErrrjC3EQIIWSRo+COEEIImYe+9a1vmZsIIYQschTcEUIIIfPQyy+/bG4ihBCyyFFwRwghhJDjpqoq/dAP/dDPnPzQd87kHzMK7gghhJB56Dvf+Y65aU4c68aCEELI3DF/J3PhcMT4Zm5pacbatWuznzHHJEmCIAjm5izTWYfMX3R+Fy46twsbnd+FS5IkWCwWczOg3ViQ+Y1+dxeuhXZud+3ahWXLlpubFxG993KiheO4iQcABXfk1EPnd+Gic7uw0fmdW9///vfx9a9/3dw861RVnXRuzQGd+bFuqnZyajGfX7JwLLRz29TUhPr6pebmBc8cwOkm2lWoKntMwR055dD5Xbjo3C5sdH7n1rp169DY2GhunjWZgZl+bvW2zGVT/Z/MH/S7u3AttHO7Z88e1NXVm5sXncxgz/x/Cu7IKYfO78JF53Zho/M7t05kcGcO0kRRnBTcHSvQy/WYnJpkWZ4y7ZbMbwvt3O7fvx9LltSamxc8c88dezzRW6e3UXBHTkl0fhcuOrcLG53fufX000/jpptuMje/L+ZgTH8siqJxg6gHdscK9HI9JqemhRYAkAkL7dweOnQI1dU15uYFjH2HZgZ3uQI6HQV35JRE53fhonO7sNH5nd9y9cCZg7vMwM78k+t5uRxtGTk5FloAQCYstHPb2tqKiooKc/OCYu6lyzYR1On/sv9qvXYU3JFTEZ3fhYvO7cJG53du/fSnP8Xdd99tbj4uUwV2+k9mcKcoyqTAbqogz7xNcmpaaAEAmbDQzu2RI0dQWlpqbl6wJgd6phRMU2AHULVMcgqi87tw0bld2Oj8zq3ZGnN3rMBOURRIkgSO47KCu6MFeebtkVOboijgeZr6eCFaaOe2p6cHRUVF5uYFLzvIy+ilywjueJ5j/6fgjpxq6PwuXHRuFzY6v3NrNoK7XIFYZlCn/yuKInieh6IoWT+5gjzzdnM9JqeWhRYAkAkL7dwODg4iEMg3Ny8g2d+V5p67iccTwR3H8UZwx3E8BXfk1EPnd+Gic7uw0fmdW48//jg++clPmptnxByM5Qrs9OAO2o2i+UeSJOO5epv5hoQQQsj7p6oqeJ4Fc8gI3jmOp+COnLro/C5cdG4XNjq/80tmb1pmz1tmYCfL8qTgTpZl40cURbjdbrhcLthsNlit1gU1vocQQk41+ndvOp1GPB5HLBYzvnc5jsfC6aclhBBCFpFf/OIX5qbjkhnYZQZ4emCXGcxJkoRUKoV0Og2fz4clS5agtLQUPp8PDoeDAjtCCDnBLBYLHA4HfD4fSktLsWTJEvj9fqiqClmWKbgjhBBC5qMHH3zQ3DRtmb12mW3mlEtzL106nYbT6UR1dTX8fj8Fc4QQcpJZLBb4/X5UV1fD43FTcEcIIYQsVuYeO3OAZ+6xCwQCKC4upqCOEEJOMRaLBcXFxXMz5i6ZTOKJPz2LkdGxrPazzjwdVzZcmtU2nTEb01mHzF90fhcuOrcLG53fufXggw/itttuMzcfU2av3VRj7CRJyuqxS6VSKC4uhsfjydoWIYSQU8ucBXfPPP8Szj/vXNTWVJsXZ5nOzcF01iHzF53fhYvO7cJG53d+0IO7XL11mT11+r/pdBqBQAA+n8+8KUIIIaeYUy64E0VJm5BvaoqiahP1kYWIzu/CRed2YaPzO7d++9vf4rOf/ay5+ajMvXaZPXeZ4+syAzuXy4Xi4uKs7RBCCDk1zUlwNx4M4oWXXsWHr7saAb/fvDjLdP7yO511yPxF53fhonO7sNH5nVvHM4n5dHvtJEkyCqhUV1fTGDtCCJkn5rSgytvvbsE9992Pe+67Hw89+gckk0nzKoQQQgiZI5lBXmagpwd7gUCAAjtCCJlHZjW4Gw8G8ctfP2gEcE8987yxLBZPwO12464778Adt7M0khdfeS3j2YQQQgiZrs997nPmpuOWK8gTRRFer9e8KiGEkFPYnKVlOh0OOBwOo61p7z5s3rodf/fxj2alak4nrWc665D5i87vwkXndmGj83tqm25Kpl4d02azobS01LwZQgghp7BZ7bmbSsDvzwrsdG6XE84c7Ucz0t2Cjp1/QToZMy8ihBBCFo2HH37Y3HRczL12ehDocrnMqxJCCDnFzUlw17R3H+67/wF0dHYBWvXMpj37UFFRnjPoOxqLYIUiS+C4Odl1Qggh5JR07733mpveF3OAZ7PZzKsQQgg5xc1JhHTWGafjsksuxDPPvYh77rsf9z/wW7hcrkkTmE+HRdAuNhnlnAkhhBAyM+ZgzpyuabVazU8hhBByipuTMXczcawxGyPdzRjtOYyq0y6Ay1cAaBco7liT45F541ifATJ/0bld2Oj8zq17770X69evNzdPSU+3zAzgco23kyQJiUQC9fX15k0QQgg5xc1Jz91sstrZGADeMnEDoSpyxhqEEELIwjeTwO5Y9IBP/z/9wZQQQuaneRfcWQSWJpJ54dEvSIQQQshi8cQTT5ibCCGELHLzLrgTU3EAQDoxUS1TVRUK8AghhCwqP/7xj81N02ZO0TS3E0IImZ/mXXAn2JxARg8eAHBagEcIIYQQQggh70c4EsG7W7bhf3/5a9xz3/345a9/i+07diGZSplXPSESiSQONR82N0/LvAvu9GqZHD+x66o6uXpm3+FdkNLJrDZCCCFkofjEJz5hbnpfMnvzCCFksQqFw3j0D09i2/YdAIDSkmKk0yLefncLnnr6OUiSZH7KrGtpbcXLf34dqeMIJuddcCemEwCAdCKa0apCVSZ67iJjA4iM9GK8vyNjHUIIIWTh+MpXvmJuOm4U0BFCCPPiy39GIpHE2WediS/88z/iU397Mz7/T7ehdkkNhoZH8Oamd8xPed9GRkfR2dVtPFYV9p2saP+qqoqdu5um9V0976ZCiIdG0H1gKypWrIMnvxQAIKYS4HkeFqsdABAc7MRg+17kV9SjqHqVaQvkVHeszwCZv+jcLmx0fufW888/jxtuuMHcPKXMnjlFYWPVM6dB0H9EUUQ6nUZdXZ15E8d04MABvPPOxI1Pfn4+rr/+ejgcDhw4cABbt27FBz/4QVRVVWU9bzaEQiE899xzSCazs3b0fWhvb8/at8xlDocDAJBMJvHiiy9ibGwMFRUVuO6664Ac72v16tW4+OKLjceEkIWhq7sHTz3zPJbV1+HD112dtUxRFDz82B8xNh7EF/75H2GzaXNvz4K3392C3Xv24dN/ezPy8wPY1bQHb258B5//p8/C6XTg3S3bsG37Dnzutlvg83rNT88y73rueItWLZO3ZLSqWWPuYsFhAEAiPJ6xDiGEELJwfOc73zE3nVTd3d3YunUrVq9ejdtvvx233norAOCNN94wr/q+dHd347e//S1eeukl8yJAC7xuv/12XHPNNVl/bBgbG4PD4cDf/u3f4vbbb8ftt9+Om2++2QjsAKCxsRFjY2PGY2S8r4qKCtx+++1YvXo1Dhw4gAMHDmStRwiZ/4ZHRgEAFRXl5kXgeR4VFeVQVRWDQyzWmC3nf2AdnA4H/vTcC0imUkaPHQAcaj6Mbdt34Lxz1x0zsMN8DO4kkf1FLhUPTzSq2SkliTD7YtZTOAkhhBByYkUiEUiShPz8fACAw+HAzTffjOuuu87o+ZIkCa+88ooRGL3zzjt44IEH8MADD+Chhx5CKBQCALz00kt4/PHH8fjjjxvLZxpMdXZ2QpIkrF692gjgXC4X7HaW5WPW3d2NlpYWLFu2LCso1LdTW1sLAKipqYEgCOjooKEfhCw0Fq2mhyznnkNbT5ecbVarFTd++FrE43G8/OrrRvvY+Dheff0NVFdV4sLzz816zlTmXXAn2NgXtJ6CCdZvx9JMJBEAIEtp1j7FiSGEEELmu5mkZM6Furo65OfnGwFbZs/a6tWrsXr1agiCgGuuucbo/Wpvbzd60goKCvD6668baZWJRAIXX3wxbr/9dlRUVKCxsRGhUAhVVVX47Gc/a6RM6vLy8nDrrbfi4osvRjKZxMDAABwOByoqKgAtbVMURTzzzDOTAsZkMolt27ahpKQES5cuzdquHnCaJRKJSSmghJD5rbi4EADQ2tZuXgRRFHGkswsWC4+iwgLz4lnBcbyRNs8ez3xM9LwL7ngtHVP/16AoSCfjEJNsHjzB5oAspY1AjxBCCFlIvvWtb5mbTiq9p05PiRwcHMQDDzwwaZybrqOjA8lkEn/84x/xwAMPoLe3F/F43KgOJwgCfD4foAVukiQhHM7I2jmK9vZ2jI2Noa6uDnl5eQCAK6+8Ek6nExdffDFuvfVW5OfnY+vWreju7kZjYyPi8TiNoyNkkSsvK0NpSTH6Bwbx0quvY2BwCJIkoau7B888/xIi0Shqqqqy0rlnQyqVwnMvvAxBEHD1B69kaYkA8gMBXHrxhejq7sHmrdvNT8tp3gV3stY7l4xN/CVNVRQoqgJZShvt+pQJerBHCCGELCSvvfaauemUUVVVhU996lPIz8+fsucLWkGTW2+91RgDd+uttxrB2PvR0dEBQRBQU1NjtDkcDnzkIx9BlXZj5nQ6IUkSxsbGMDAwYASar7zyCiRJQm9vL1566aUp98fpdM76DR4h5OS77pqrkOfzobnlMB7741O47/4H8NQzz6Ontw8A0H6kE9u2N5qf9r6817gT4UgE119zFTwetzHDm6oCa88+C8vq67BteyPC4Yj5qZPMu+DOYmVBm56eqTNSMbXCKv6SatYus2CQEEIIWUi+/vWvm5tOqu7ubjz00EPo7mblvIeHhxEOh6cMjmprazE2Nob2dpb+ZK5ImSkUCsHn86GoqOiYBVW6u7sxODhorK9755138NRTTyGZTCKZTCKRSMDhcGDJkiVGj2NmIRa9WqZ5jJ15DB4hZGHJ8/nw97d8EhdfeD6qqypRWVGOyopyXHHZJbjpxushCALe3fIeduxqMj/1uJ115um47uqrUF1VCeSYnuZDH7wCf3NFA7xeT1Z7LvMuuMuVltnfuhvBgU6EhnuMYipWhwsAkIpNL4WDEEIIIccvlUqhqqoKr7zyCh544AG88sorKCkpMVId9V40vaCKPg5PH6O3detWcByXNY7tT3/6Ex544AEMDg7ivPPOm1ZP2Z49eyYVUgmFQojH4wiHw3jooYfw0EMPIRwO4/LLL58y+NRVVVXh/PPPR29vrzFOT993QsjCJFgs+MC6c3DzR2/AJz72EXziYx/BmrPOwJKaanz8ozfAarVi49vvoqu7x/zU4+LzerFi+cR4X47jAAA8z/612Ww48/TVRvvRzLt57tLJODp2/RX+kiUoqTsdANC8Rf/rnQqO46GqCpaceSmO7NmE/IqlKKpembUNcmo71meAzF90bhc2Or9z6+tf/zq+//3vm5unNBfz3M2Wl156CYlEImsOOkIIOVUMj4xg46Z30XDZxSgsmP3iKvF4HF3dvVi5Ypl50THNu547i8DmubNmpWWqxsBDVVUgWO2wu9kgbN5CNxqEEEIWnpkEdoQQQmZPUWEhbr7pxhMS2EGbtuV4AjvMx+BO747kLCwtU0qzqloOjx9OL5tbh9MCOo7jKS2TEELIgrRx40Zz04Jx3XXXTZpgnBBCyLHN2+AuGQ0CAFSVzWXn9OXD6nBrbayoCjiOpkIghBCyIN19993mJkIIIYvc/AvutEIqerVMfaoDDhzceWziQZfWg2e1u4z1j0ZVtGCQEEIIIYQQQuapeRfc6fSxdLIsAQAURYbFakOgrBbewnJA6+UTk7Gs5+Wiz51HCCGEzBeXXXaZuYkQQsgiN2+DOz0tk+fYW3D5CsBbrHD7i42eO1VRphW4KYpMvXeEEELmlZ/+9KfmJkIIIYvcvAzuOI6DYHcCAFIJNlO7YLXD5mRj7vRUTKcvMK35IACVJjsnhBAyr2zdutXcRAghZJGbl8EdON6YxFyRWFombxFgEazgOA4cz96WqgKyOI2CKqoKVWaFWQghhJD54Itf/KK5iRBCyCI3L4M7VVWQjIYAALw2Ya7V4QIAWKx2Yz1FkSYqZx6FqqpQtLF7hBBCCCGEEDIfzcvgjuctEOysWmY6EWVtWoEVq5auCQBuH5tYUBLZXHhTUlXICgV3hBBC5o/zzz/f3EQIIWSRm5fBHcdbjHF1siRmjavTgzxtRQBAOnH0ipmqqgJUUIUQQsg88vOf/9zcRAghZJGbl8GdIktIxVhapsViBS/YzKtoVACALCYB7Xm5qKrKArwcpmonhBBCTqbdu3ebmwghhCxy8zK44y2CMYl5KhkFkLsiplOfEkFVoUgiZGmq4iosgDNPhxAPjaKvuTGrjRBCCDkVfO5znzM3EUIIWeTmZXDH8RZwWsqlIopGcGYmWFmPXiIyjnQyPtVqRu+cufjKSE8LouODSMXCWe0AkJ7G5OiEEEIIIYQQMlfmZXCnSGmk4mx+O95igc3pMa8CZFTOFBMxyFIa6tTRHaBNZp5Jn1JBf61MnXvfQX8rpcQQQgg5OdasWWNuIoQQssjNy+DOItiMXjkxlQA3RVomtHWhFVwxp13qVGjtpvF1eo9dNDiU1Q4AiiQiGRk3NxNCCCFz4je/+Y25iRBCyCI3L4M7ziJkVMtMGz1suXAWC8RU3NycTYvp9LRMvfAKb2GvERsfyiqsoi9PH2u7hBBCyAly8OBBcxMhhJBFbuqo6BQmi0mkE1FjigNfYbl5lQkq62Vj/5+clpkZtOn/14M3Oc0KsCiyiJ6D2zKCPy19kyY/J4QQcpLccsst5iZCCCGLHBcOR4zopqWlGWvXrs1eY45JkgRByJirLocjTRvB8RbkFVVisGMfqk67EC4fq4xp1n94F+LhUZTWnwWL1QaHOy9ruaooiIdHAQCCzQG7y4t0MgarzYGWba/A6nBBTLIeuuIlpyFQVot4eAzd+zcDACpWfgCeQHHWNsn7M53PAJmf6NwubHR+59a6devQ2Dj9is4TxcNUKIoCVVUhyzJkWYYkScaPKIpIp9PgBDZunRBCyPwxL4O7zn2bwQGwu30IDhxB/dorIdic5tUAAH0tOxAdG0TFynNhEaxwePxZyxVZQm/zDngLyuBw58HhyUMyGoLFakX7zr+itP4s2Jxe9B/eCQCoO+cKJKNBdO59BwDgK6hA2fKzWa9gxmTq5PhN5zNA5ic6twsbnd+5dcstt+Dhhx82N09ppsFdXV2deROEnDDt7e30mZtDdLwXrnmZlimlEkgnY1AVmRVXmSKwg15QRZNrQvJkLIx4aBiR0T4j7VJVFaQTUQCAYLXD7vLAW1gOMRVHOhGDlE4Zzxe1CdIlcao59AghhJDZN5PAjhBCyOIwL4M7i80Oi2BFMhoEpxU9mYrTlw9VVSClkzmDO0kripKIjBvVNFVVRUKrhClLYvak6YlwVuCXjkegKgoURYKi0Pg7Qgghc6O9vd3cRAghZJGbn8GdxQqO46EoCiz80VOABG2uu1R88kTkACBrBVFURcZ4fwdbT1WN4iuuvAIAgDdQwtYX0xBsbJsOjx+ylEZ4tA+qIk8UbiGEEEJOsE984hPmJkIIIYvcvAzuxFQcYioB3mKB1eE2L85i05ZLYjpntczMueqi4wMY7WnFSE8zosFhcLzF6LET7E5wHI9YcBhSmqVi5pVUg+N5JMJjUBUFskQ9d4QQQgghhJCTY14Gd4LNCd4iQEqnIEtHH+sm2J1sTjxVmZjQTqPIEmS9t00rhhIZ7UMiPIZULASYJkfnLQLSiRhS8QgAwO7ywmp3IRkLQVEVY8weIYQQcqJRMQRCCCFm8zK4swhWcBwHVVGOOoG5jrcIiAWH0d+6O2teOkWWwPE8OIuAgoplsDk9Wc/z5rNUTJ07UAwpnYRgZwVcOI6HLIlIJ6KIjQ1CpqIqhBBC5sgTTzxhbiKEELLIHTsyOgWJyRgkMQWO4yYFZLkIVhsUWYKUThq9btCKqKTiEXAcB6c3AKePja/T2V3erMc2hxuKLCIeGgFvEcDxFrh8BVAVGWN9bYiOD2atTwghhJwofX195iZCCDnpmlsOo+Vwq7mZzJF5GdwJdhd43sLSKqfRW5YZtCVjIeP//a27ISZj4Hmt4qZpTJ6iVc/UWbUeu1QsBFVVtSqaE5O86pOhE0IIISfaDTfcYG6aU0898zyeeuZ5czM6Ortw3/0PoGnvPvOiLE179+GhR/+AZJKNYydkOp565nn88tcPYjwYzGp/461NeOOtTVltJ5uiKHjsj0/hvUY2V/JiIMsyXnntDbzy2l8gy8cervRvX/4KauuXouHyKzA0NGRenCUej+PTn7kFn/7MLYjHWbV7s/974AGsOWct9u47+vfPQjYvgzuLYAU4Dqo6vbRMf0kNipasBgDEQyOAlpKpKjIAwGp3Adq0CQBgc3phd+chUFpjbAMAfEWV4HgLVFWFzeECx3FQZLYNYHJwSAghhCxUa89Zg2AoNOkmu73jCPx5PqxYtjSrnZDZkkgksHP3HnPzKefPf/krBgaH8M7mrdiybbt58YIkywoURdH+zbhHziEej6O/vx8A0NnVhb3H+IMQmZ5jR0anoHQyCkUrpGKfRlombxFgd3rBW6wsMAQQGZlIZ9ELoTjcPgBAXnEVSuvPhEWbRiGTPrWCXmzFHSg2limSREVVCCGEzIny8nJz05wqKymGzWpFV3eP0ZZMJtHb24eKinI4HKzaNCGzbWl9LTqOdKKjs8u86JSxq2kPDh5qMR5v2bYdb216N2udxa6tvR37DxzA3996K2qqq/HKq6+aVyHHYV4Gd4LNZUxIPp20TE6rhGmx2pBORCGLaaSTMWO5vi2LYMOKC65Hfnkd+Cnmz9N79/QJy/WpFgBAEpPGROiEEELIifT885NTIueSw+FARUU5DrdOTKaeSCaRFkXU1S4BtNTLe+673/g5Wqpm0959WWmeyWQSDz36h6znvPHWJmNbuVLzyOLgdrtRu6QGO3buNi8yHO2zp6cEP/7En7KWZz7HnDKcuey++x/IGVgqioKmPfvw/Euv4M2N75gXY+fuJuzdf8DcvGht3rwZAPCxj92EdevWobGxcVJqpp62WVu/FHfd/W9ZywDgjTf+aixvuPwK9PcPmFdZdOZlcGex2oz/cxZtvNxR6MGdIktIxsJIxkLGFAoltWegqHqlvqLxHKuDpWqalS1dA8HmgNPLgjyL1QaL1QbBxsbjiamE6RmEEELI7BsbGzM3zbm62iVZqZld3T2wWa0oKylGR2cXtu/Yhdtu/RTuuvMOfPTG67F56/acN8XT8cZbm9Db24c7bv8s7rrzDlx4/rl44aVXaczeInXOmjMxPDKa8w8G0/nsjYyOYfWqFbjrzjtw5eWX4o03N+FwazvuuvMO3HH7ZwEA7259D9ACu81btxvb+/B1V+ONNzdO+uPCH558Gm+8tQmtbR1Z7bpl9XVYtWK5uXle23/wUFYQ/fNf/dpY9r+//HXWssyezHg8jk2b3sZpq1ejvq4Oy5YtnZSa+X8PPIA/Pf00vvbVr6CjrRUfv/lmbN6yxVi+d98+3P3lL+PCCy7A/r178MQf/4C33nrLWL5YzcvgTkxEjf/bHMdOy9RTKF2+AmOMXiIyDo63wK6lYkKb2mDi/9lz3GUqW3Y2impWAdr4v7KlZyO/nM03lIxOTIpOCCGEnChXXXWVuWnOmVMzD7e2GymZtTXV+Nw/3IKA32+s63Y5EQ6HTVs5tmQyiaGhYVxy8YVGuqc+pq+ZqvItSgG/Hxeefy6a9uybFOBP57NXWJBvfIaqqyrh83mx9pw1QEav9Pg4C94Ot7bjwvPPNbZXW1MNf17epHF/5527bsr7x/POXYsPX3c1BCF3Zth8JUkTU4wdi5xRp0JPybz00kvgcrlw4YUXIi8vz0jN1IO/mupqfPQjHwEAXHDB+bjwgguMbWzevBmhUAifve02uFwuFBcX45Of/Dtj+WI1L4M7vQAKACiyNgn5Uei/aHa3D4okIjLaB47jIFjt0w7oMtmdHmPsHcfx4CwWCDZ2sYmMsoGhhBBCyELncDjgcrlwuLUd48EggqGQkZIJrbKh/lf7+x/4LUZGj6+3MZFMIhgK45nnXpyV7ZGF4WgB/kw+e06HA25X7oytZDKJeDyON96cSAm+5777s8aa6urrluCqv7nc3IxLLroAF11wnrl5QTjrjNNx1513GD9f/Jd/Mpb96+f/KWvZ6aexjhFkBGY//NGPUVu/FDfc+BGEQqGcqZlkZuZlcJdZ6GQ61TL1dEuHOw8cb0FkbBCpeATewvKs52cGekdjESbSQgFAsFhhd/vAWwTwloX1FxlCCCGnpvx8NjzgZFu2tA7xeByHW9uNlExoqWzxeNxIo7zj9s+isOD49tnpcMCf58NHb7w+62bxrjvvwFlnnG5enSwSDocDl1x8Ibbv2IVYbKKWwmx+9vQ/YFx5+aWTPntXNlyatW46LWLPvgOorCjP6jBYWs+yuwij98rp6ZQdba3oaGvF1776lUmpmWTmphfNnGJkKWX83zqNtEw9aOMtVhTVrDIqbXrzy46r5y5zbB4A8FYbBJsDVocL6cTElwshhBByorz22mvmppOiuqoSaVHEjl1NR62S2T84hGBo6pRMn8+H4ZFRY1xU5voOhwPFxUVZBTQ6Orvwm989PGncE1lc9BTJqca5YRqfvWNZtrQuK/1zPBjEb373cNYYvnQ6jaeeeQ55Xi8+ftONuOaqvwEAlJeVIeDPM9Yjk1MydZmpmS6XC5deegk6u7rwzLPPZj3PvP5vH3wQ8XjcCBoXu1kL7vSqVvfcd3/OSU0xi1Wu7K6JXxJVq1p5VEa1TCu8BWWwu/Mg2JywudzgLdoh4CYHbdOlp3eqiopUPGxU3ySEEEJOlMyeipMp4PfDn5cHMaNKJrR0LZfLhfsf+C3uue9+vP3OZixfWo/NW7djPBiEz+dDMBTGE396FslkErU11Vi+rN5IvXzjzY1wuVixMgC4suFSBAJ+4z7ihZdexcUXnm8sJ4vXlZdfCqdz4rNytM/e8TjrjNNx1pmnG9t78KHHcObpp8GfN1G34U/PvoD8QADXXv1BcByHlSuW4aYbr8d1V38wa1sLnSWj0CHP5y56+PvfP4RQKISl9dlzYdbX1eG01avxlzfewN59+/DPt9+Oj910k5G6+Y1vfBPVVVXG+mecfjp++pOfYPOWLTjtjDNx4cWXwOv1Zm1zMeLC4YgRibS0NGPt2rXZa0xDR2cXXnjpVZSXlSIQ8GN8PIibP3pD1jp6latPfOwjcDgckx7rJEk65mDTSHAYfQe3AQCqTrsALl+BeZVJYsFhLS2TRyLCip648gohS2mkYmFwPD+t7RzNQNsehIa6sOSsS2F3TfzCk5mZzmeAzE90bhc2Or9zSy8dPl36Hx5VVYWiKFBVFbIsQ5ZlSJJk/IiiiHQ6jbo6SiUjc6e9vX1ef+YOHGrG6pUrzM2nrBN5vA8eagHPc1ixfJl5EZkD7zu4SyaTeOJPz6KiohxXNlyKN97aNCm409c568zTjdz4XG0AIIrSMTvQ4uEx9Dez8rTVZ14Gq33irzVTSYTHjDnq4qERgOOMYC6diMFiEWCxTZ60fCbCw70YPrIX4DhUn34xrBlz4JHpUxQVPH+MDwGZl+jcLmx0fufW+eefj61bt5qbp5QruFMUJSu4k2UZoihCkqQTduNHSC4nMtggk9HxXrjed3Bnliu4Gw8G8adnX8CVl1+G2ppqo/2pZ55HIODPGpA6nb/8ppIJHNn1BgCg7pwrsqpnTiUVDxu9afHwKCxaEZTZlIgEER7pQXDgCMqWnQNfYTlkMZVVAIYc23Q+A2R+onO7sNH5nVuXXXYZNm7caG6eUq7gjnruyKmCgo25Rcd74Zq1MXdHEwyFIYrTGBs3TRbBavyfmyKf10yfugAABJsjayL02cJnpHaKqTikVAL9rRODvwkhhJDZMpPAjhBCyOIwo+BuPBjEL3/9oDGYearCKXNCy93MDNqOJnP6ApvddUKCO3AcBJsDvGBFaKgbI90tiAWHkYoff4UmQgghJBdpBpMHE0IIWRxmFNwF/H58/p9uM+b3MBdNmYo/zwerdXZTdWY8n1zmQD6Om/acdjPB8xZY7U5Y7U4osgRJm3KBpkcghBAy284/nypFEkIIyTb7EU4OTocDNqsV4fBED1YymUQ8Hj/uSSUtgu2EBGjvh9XuhGBzwBMogSymkNSqckbHB82rEkIIIYQQQsismpPoyOFwoKKiPGsCyHe3smqXK5Zlz3ExXaqqsrnpTiVa76CvsAIAIGs9d2IynrXaqUCZzvyAhBBCTllUvIYQQojZrFTLbNq7D2+8ucncDEEQ8OHrrjYqZL7x1iY07dkHAHA6nfi7j38UAb8/6znTqbYmSRL6D+9EOh5G/dq/MS8+JXTte9eYT8/u9mHJmRMVQU8F6WQcVrsT3LHmnTgJpvMZIPMTnduFjc7vqW2m1TI5YXpj2gkhZLGprWYdOaeiWQnuZtN0bg4kSYKYiMDu9IDPqJx5KomHxzDcdRAcOCSjQSw//1rzKidVOhEFL1inXZAmk5ROgeNwwqZ4mM5ngMxPdG4XNjq/pzYK7gghZHacysHdnKRlnhhcdpGUU4zN6UZR1UpYrHaoqgJFEs2rnFSqqkKRjy81U1EkpBJRczMhhJA5RAVVCCGEmM3b4M7mODVTCnWC1Q6LYIUnUAwAENNsrOHJMimQU1WoipzdNk2qokBVFEiphHkRIYSQOSLRVAiEEEJM5m1wZ7Hapz2B+clidbhgsbG0lmQsaF48J1RVNYKx7PbJbdOlqux5+lQPhBBCCCGEkJNv3gZ38wFvEcBrAaiUmvueO1VVoaoyVEU2ArLMZYqpTZfVy5cjfVNVtHEbxxkcEkIIef+2bt1qbiKEELLIUXB3gjnceQBwUubkU1UFqqwYA+czJSLjwBTBmSSmjP8rijzpuSrY88wBIyGEkLlDxWsIIYSYzX3EscjwFgEczyMRZdMizClFYQGemt1zl4qFMdx5AGP97ZMDN1WBLE6kW6qKPLmH7ig9d1MVjjH3/hFCCHl/LrvsMnMTIYSQRY6CuznAcTykOS6ooqddqooCRWYBmh7giWmtEIqa3fumKJK2voRkNAhFkbVeP3NK58TjzABPVVkvYS7yFEEfIYSQ4xOLxcxNJ117Wyu++uW7MT5+7D9ojo+P46tfvhvtba3mRbMqmUjgv/7zP9C4/T3zovftF/97L/7rP/8DycTsFhhrb2vFbbd+etb3ea6OOSHk5KHgbg7YXT5z0wmnyJJREVNVFTZ2TpKgqgrioRFAm+suMziT0ikoWgVNWRIhi6lJ4/VkMY3hIwcw0LqbpWxmVNxU5ezHmVRFpt47Qgghc66vrxdulwunn36GedGMjI+P47/+8z+yAtcv/Ot6/Od//TccTmfWujPVuP09/OJ/7zUe19UvxYMPPYp1534ga733q631MKqrq1FXv9S8iBCyQFBwNwcsFmHOe64UmQVyitabxnrVWA9eOsH+2ium4sb4Oj0dU5UlSCLrZZTSaRbsZQSA8fAIUokIJDGF6NhgVuCXjIURCw4BOdIwVWWO5vozpZkSQshCtXHjRnPToqb30H3xjn/OCsD27t2DZStWvO8AbCHYtnULzjv/AnMzIWQBoeBuLlgsEJOxKXu1TgRFFpGMhpCIjCMRGsV4f4c2fk5m+Zia2PggFFlivXqKgoG2PRhobYKYjEORRchiGnJGoJaIsCkdbE4PouMDRhqmIksY6tyPwSP7IYkpKHL2e42HR7O2o8ss3nI8MscMyiILRufiOJuD11zmOqCfdRQoE3JKc7vd5qY5Nz4+ji/e8c/4+E034rZbP43W1ux0Pz3g+vhNN+LjN92Y1Tula21lKYgfv+nGrMAsmUjgf370g6wUwsbt72WlQWa+/i2f/jsUFhbi5/f/HwKBgLGNw83NOOOMM41tPPfs08b+3HbrpyelKP7if+81luv7097WirvWfxH79u7B7f/4D8b7eO7Zp/GL/73X2I/MNEr9vT/37NOTjkPme3ju2afxox98D2+9+Vd8/KYb0bj9vZzpk5n7bQ5gn3v2afzPj36Q9RrmlM7x8XGMjIygfumyrPap6Mf6f370g6xtNm5/L+f7gOl8fPymG/Hcs09PuSzzs6C/38cfe2TKbRNCpoeCuzmQV1gJAEjFI+ZFJ4SUSiAZC2OwYy9Gug5hrL8N4ZFeKLIMRZGRjIVhd+fBandhvK8dyWgQ6UQUkbF+pBJRAMBITwsAYLjrEAY79kFKJ6EqCuKhYQAc3P5iKJKIke4WJGMhiMk4pHQKUFWtR28iwErHIxg6sh/DnQcBbWyfLtdYxFyFWqYia89XVQWyLEKR5ZxB5GybTi/kfA/ulIxzSAg59Vx11VXmpjmVTCRw38/uwQev+hCefPo5/PL/foMtm99FLMauI8lEAj/64fex5uyz8eTTz+HJp58DtEBEF4tFsWXzu/jl//0GTz79HD541Ydw38/umdZNvf76/3DbP+LJp5/DAxt+h4MHD2QFNYlkErF4HIH8AkALWHbv2oWHH/0Dnnz6OXzhX9fjnp/+xAiU9H3T9/cfbvtH3Peze1BeXoF77v05Tj/jTDyw4Xf4wr+uN14DAAKBAP7htn/Etq1bjDY9HfRDH7oGG37zQNZxKCwsxIbfPAAAuPEjN+Gr//4NNFx+BZ58+rmcqZjPPft01n7fdfeX8cPvfzcrwNu/fx9uufXv8eTTz+Gr//4N/O7BDVnLx8dG4Xa54HQ4jLZj2bd3DxquuNLY5o9+8D1s27oFTz79HB5+9A8AgD//+RVAC9D+4xtfM87Hw4/+Abt37ULj9vcmnauHH/0DRkZGJn0WxkZHc277WP7jW/953D+ELDQU3M0Bu5uNuYuM9ZsXnRBDnYcw0NrEgjlZMgKRZDRoFEzhOA52Tx4kMYXI2ABCwz0IDhyBLKZgEWyQxRSGOvYjFQshFQshEQlqvWwc7G4f3P4i8BYB6UQEQx37IaYTRlCWjAazetTSSZYGyvHs4xYZ6QO0XjdZTLOxgaoKKZ2ALIlIJSYHwbkCPlVVjInUjfeqSJN6DU8E81yAuUwnADxRptOzeCzqFMcx17k4Edg8jXPzWoTMR2NjY+amOdXX14t4PI5LL7scAOBwOnHLrX8Pt9tjLNeDG901116Hze++awQdbrcHt9z690bK5KWXXY54PI6+vl7jOVNJJJOIx+PIz88HtABr1arV6O3tMdZpaz2MwsJCoydv3bkfyBojV790GdxuD8bHRo1evmuuvc54/rpzP4A7v3SX8fho6pcuQ1dXl9HblpkO+oV/XY8bP3KTse5551+AkZGRaQexu3ftwnXXf9jY77r6paiursamjW8a66079wPGWLrM96U7nvTU08840xirWL90GerqlxrHx+F0Ys3ZZ6OnuxvQjvWqVauN4NThdOK66z+Ml158AQDwn//131nLMp8L7bMw1bYJIdNHwd0csNqd4Dh+7nruxAQAVkwlUzoZRSw4DFWR4Q4Uw19cDcHuRGS0H5HRicDTW1gOm8ONdDIKgAMADLQ3Yby/HelEFFY7uzDYnF4ko0EkImMIDXVDkVkwEx7uQe/B97SgTUE8wm5ApFQCYiqOgbY96G/dDVULQGRJhCJLSCdiEJMxKJKYFUCkkzEkYiwdNJMiy1Aktg1VkaAoMkIDnQgNdZlXnVWqqmZVBZ2qp3A2AqzjZT73x0MvrpNJH7s5J1R1zgJJQsjMjY2NwXWUnqCxsTFs27YVt3z674xUu69++W6jZ28qsVh0WoGr0+GAy+Uy1h0fH0dXV1dWCqZ5jJk5PfL2f/wHDA0OAFqwODzCCo5lCgQC0wqIAoEAqqursXfvHoyPj2Pzu+8a+6JXv9Rf90c/+J756VMyB7HHI1d66kw5HQ4UFRaamw29vT1Gammu95mZzvnxm27EIw/9Puv5hJDZQcHdHHF6A0YhkxNOm4fOTJElBAc7wfEWuLzsIlFUvRKymDICMwCwOdworj0dlavOQ+WqD8Ai2KBIIsYHjkBVFRY4cEDm2L3oGLs4QgsA0skY0skYZFFEaJAFW+lUHMkoC9J43pJRmTMNRRZZT57W25UZGEmpBKKjA1nz70EL6FRVgZROsukeZBnjQ10IDWX/pU9VFaRiYUCrCPp+ZVYQVRUFakaaqbGOqs8xeHKCE3mKXreZyNVzp8oyMEfviR2/3J/l43Eyg21CToTXXnvN3DSn8vPzEY/HkUhOTq+Htvy88843Ugn1n8wxcbm43Z5pBTIOpxN3fuku/O7BDUagduFFFxm9V7nGmP35z6+gsLDQ2JcHNvwOxSWlwDSCl+k47/wLsHvXLuzftxculwvl5RVIJhJ4+KHf4wv/ut543a/++zfMT52SOYg9HnpPaHl5hXnRrKmoqDRSSzN//vO//huJZBJPPvFH/OgnPzXaP3Pr35s3cdz++zv/ddw/hCw0FNzNEbe/GGIyhsgoS0k8kdKpeNZjq8MN3mJFZLQfYjIGm9MDXrACADyBEiNt1OHxg+N4+Etr9A47AEB+5TIEyuoAVYXd5UNB5Qo4XHnwFpYDAFx5ExfDoprV8JfUAFrAl05EjJtqjuMQHRsEAKSTcSiyBFkbt2e+8dbH7KmKgtBwD8b72zHW15aV6qjIMiQxhdHeViRjIVZMRZYgiylI+lx+2pQPkphCLDSMjqa3kIqHc471OxYpxbapKLIRdKiKDFnrPcykB0Ynq+cpV8A5U7l66HLNe3jCzHJapp62S8hCMZ0A6EQqL6+Ay+XKSg3cu3eP0TNXXl6BWDyOffv2Gsv1AiS6WCyKvXv3GI83bXzTCIocTiecLhdeefklY3nmmLZkIoEND/wKd939ZSNgyEx9NKdk5tLWetjouXM4nVi2YgUefuj3Rrpk4/b3JhUvOZr6pcsQj8fxh8cewZqzz56yxy/zfRyLnqL40osvGPvV3taKrq4uIyX2WI4nJXOmzGmp0IrTZI6r0+mppoSQ2UfB3RzxFLC/DMaCk1M+ZltmAFS85DQU1awCx3FQZAkWmwMVK9ZCsNrBWwTwFgHFNachv7wexUtOQ+WqD4DjeFjtLgCA3eVFflkt/MVVKKk7A4VVK+Dw+GCx2uD0BFC85DRUrDwXvIUFi25/ERzuPACAJKYRHOwEAPgKKyCLaWOOvUR4FFI6hdHuZkRG+9Df2sQKusQjiI4PIp1kAWp4tBfhYTZ+IhocMgq+QEvnHGzfg+DAEQy0NiERnhhbkIpFIIsppOMRo7cuMjoARRIx2L5vRsVOVFWFIkuQ9J5DVTGCTzZRPPu/JKYQCw4DqsoK2JjSS2fqeAOb2ejxUlUVSo5tsGqkk9tPBFVVMdV4xuPBenmze38Jmc9uuOEGc9Oc0nvOXn/tz0aq3eHmZmPMncPpxFe/9nW89OILxvLdu3bhpo993AhS3G4PDjc3G8tff+3PuPNLdxlByKc+cysOHjxgLDe/vtPlwle/fHdWup8ePJpTMgHgQx+6BiMjI8a627ZuwbpzP2AEdDd+5CYUFhYaqaS/e3AD/vt7P0QgEDDST2//x3/IGbBAS8288KKLEIvFjBRIhzYWMbMK57LlKzA8PGQUDMnPz0fj9vdyVu+EVnRlzdlnG/t1z09/gq99/ZtHDVx1s5GSOR2BQABf+/o3cc9Pf5J1vj7wgfMRCATw8U/8rXGuPv/Pn8MFF15kVN8khMweLhyOGHdPLS3NWLt2bfYac0ySJAiCYG7OMp11TkXdB7ZBSidQu6bBvOh9UxUFHM9DUWQc3qZXl+JQv/ZKpBMxI1DgeB4uH6salk7GYHO4kYyFIItpODx+yFIaNodbS79UwFvYcVYVGfHwGKwON2wOFvhJYgpiMgGn14/Ove8gGQ1i+XnXIh4eRW9zIxweP5KRcQh2JzyBEoz3t2v7xfbDV1SJ0GAXeIswqeeOt1hRv/ZK9DY3IqG9rqrIKKk7A4LdiaGOfRBsDoSHe+DKK0Q8NAKO4433mV+xDKlYCK68AghWO/pbm7Qtq3B4/ChecjqcXj+gpYJarLas10fGMU3FwlpvnQKXrwDpZAxiMg53XiGkdBLpZByuvAKIqQTSiSjsbh/aGl+Hp6AMJUtOg2DLPR7laFRVgZiMw+ZkN0kzwcYvRuHw+M2LoKoqOC6jW3YKrKpqyEjf1enjRu0ub1b7dM3kd1cWU1AU2fhDw/vVsu0VePNLUbbsbPMiMktmcn7J+7du3To0Njaam6dkZBxo44ZVVYUsy5BlGZIkGT+iKCKdToMT7OZNnFLa21rx9FNP4ot3fskIBpNahc4PXvUhbH7nbdx088cX/YTd4+PjuO9n9+DOL901rWCQEHJstdUnLsX5/aKeuzlktTtZimBGyuBMsQqCk3szRG2b+tgyb0EF/CXVEGwOODx5xg29YJtIybA52BxJFsEK3iLAIlghWNnFnON4I7ADAI63wGK1wZoRqAhWO+xudpNfWL0SpfVngeN58BYrLBYBifAoOJ5H5cpz4QkUG8+zu3ws3XKwC1a7C6X1Z6G0/izkldTA6nDB5vRCkUVExwYRD43AnV8CpzcAMRVHOhlDaLAL8dAIwsM9sDpcqFp9PpzeAAvsOA7gOIz3tyEWHMJw50EMdx0CGx/IjlsqHjbSFvWxgVm04yumE5DSSW3ePjYPoKpOFKpRVRWR0X6M9LC00nh4FIosI6oVp4kHh7N633LN6advN5OqqkjFJtJZp0tRZDb+MCNtFBk3dNCC9GkVW1GUnGM3VUWe1d60o5nqs348ZEmEqshIJ+OTxm4SQuansbExxOLZwxD04iNjY6OIxeMndIzZfLFp45uT0lPNc85l/swkDZUQcuqhnrs5FBruwUDrblSsOBee/BLz4mlRFBlQFPCC1ehZUlUVifAYXHkFCI/0of/wTpTUnYm8okpj+gEpnUIqHobDnZezl0oSU0ZgNxX99XJSVSiqDJ4XIIsp9Bx6D+lkHMVLTkNeUSWkdBJtO/4CjuOQV1KD4MARcByH4tozYHO4wVsEyNq0BvHwGMZ6D0OwOSClkyipPQNiKo6xvjbkFVcjMtbPUk85DuXLzoG3oAzx8ChCg10Q7E6kYmHEgkMAx0GwOSFljEG0OlwQk3EEyupgsdoQGupGUc0qpBNR8LwFgbJapJMxWCwC4uExKIqMoY79KKhcCrvLB7vbh9HeVoQGOlF12gXoP7wLYioOT34pomMDEGxOBMprMXzkAADA5S9CWf1ZkFIJjPa1obT+LFi08Y6qqkCR2LhDm3NiMmIplUAqEWWFb3z5kNIpCLajnxtoPV3JWBhWhxuSmITLmw9VUZCIjMPhyWPHWExB1QLzqaiqwqakiIXh9hdlLYuHx8DzfO5eQW2ajVyfL12u311FlrL+kKATUwkosmT0Espi+qjbVhQZPG8xNwPauMuO3W/Bk1+KgsplRurwVFRVAcdN8VmfRUf9nZpnVFWBLCuTzi85cfr6+lBezsY+T8dC67mDNqbrrTf/mtX21X//Rs654gghZLZQzx0BAHjz2bi7hDY1wHFRJkrRS6I2gbfMqkYqsmRUMnTlFWTdNAo2OwSbwyikYna0m33dUW9COQ48z27qLFY7ypevReXKD8ChFWsRbA5UrFiHJWsa4C+pBgD4S2thtTthd/vg8OQZAYzNyW7mpXQSdjcLqHxFleAFK+LhUaiKCk9+GSpXfsAIiqx2F3xFlQiU1KCoeiU4iwBPoMR4Lf1G3e1nPYhSOonR7haIyRjioRGMdDVj6Mh+DB3ZDymdQjIexkDbHgy274MiixjtPgxoPaOJ0AhUVcFw10GjF0wfSyilE8YYQWi9d2N97RjqOoTo2AAS4TGt948FUKoqY7jrIGLjQ8ZzFFWBlE6gt7kRkpbqOZ0eLL0HUEoljF43MZ3QPhtaFVLl2NUu04nYRKVM0+uySqG590VMJ4we5OmS0kkkoyFzM2MacyfLbAyj0Rua1TupQEpNXSRH1AL8dCJ61B5RMcV69vRxmid6rkL9DxoLgbkHmpx4MwnsFqrMCpT6DwV2hJDF7Ch362S28RYBvGBF8H3Mw6ZkzDPGUs0UJGJBRMcHIUsixvo7wPG8kXKZye7yTmu81Wyw2l2wWARwWsCnt/EWAXaXD1WrL2C9QhxLCwUAm8MDwWqHxWaHxWoHb7Eiv7weTm8AVrsTvsIKiMkYVEUyej71gFIP3jiLAIvVhrL6s1CxfC2c3nzwghX5FUsRKK9DSe3psDnciIz2sd4ZnkdwoBOACrs7D+P9HRjvb4cqs4BIT99UFAnpZAyqoiAVj4DjLUiEx4xUSyNg4DikYuGsXp/IyMRkvMlYCFKa9UipioJ0Mo7o2AAbWxhh00QosoR4aBSKlMZw1yGM9bVNTuXNEWCpWtqpqipQwW609XkA9Xn5pHTaKFZjpqenKrKEVCKK2PiQsU1oPYPQtp+LLKZnfIMf1wLlzG3qx5K1Z74+S62UUkkokohENGg8TxbTRpEbXWYQp38OFVnO2sfMwjqqqiCdiCEWGjaCLlmWpgxms0xnnRyOGdwd53ZPhmkdJzKrPvGJT5ibCCGELHIU3M0xd17hjG+As2iVGwEgHY8iGQuh99B2BAeOYLS3FWIynjWu7mTiLQIslok0OY7nYRFYWp1gZ2P3eH6iJ5HjedjdPlgEG4qqV6B06VlGQQ/eIsCbXwa7Ow/egnJ480u18X1s+3rQyvMWcLwFTo+fpWVa7Shfdg7yiiuRV1TFXkjrgfQUlCGvqAq8wILuJWdeAqvDjejYQNaUFU6tAE08NILxwSMAAG9BmbE8s+CH08PGNDg8eahcdR7snjxIYgoprXcqHh5lk7YrEhRZMubkU1UFifAoFEk0ircAQHikF7HgECKjE/MIQuvx0qdm0CmyjNHew6wXUbvP1v8QoPcwDrTtxtCR/cZz2ByBLDVUSrExhn3NjSzIHehAIjxq7EtKC/5URZkUdOg9xzO5wQ8NdaG/dTciYwNGEAqwKqvQggX9tVVVhSKLUBQFiiIhEQtCkSVjSovwcC/CIxPnTBJTWdNdyFovt5ESq/0O6gGrLImQRRHj/R3oP7zb6LFjQTibcuNoZlJ9VceO2dG/C3JNR3HKmiLoJydOe/tEkSpCCCEEFNzNPZe/iBV2yCjpPxOq1nMniSn0t+5C76H32LgdjkcsOARFFo1UxJONF6zgMsZAWQTbRGEXwQab0wOLdXKaKG+xQrA5wfMWWDLSRS2CFUXVK1FYvQK8RYDV7gSvBYscz4PjOCPgE+wswNVTPTmON6pW5pfWwhMoQaCsFgWVy1G+7ByU1a8BANSccTE43oK4Nq2CK68Q+WW1sDk8iI4NIB4cBsdb4CusMMaJ+UvZvH4AkF9RD7s7D/6SGnAcj/yyOkDr+QMAMRFlAZzWcyemkqxYjWBDaKgLsixCTMSQMKUqBgc72VQS4VGkE1Gk04lJ8xnqaZ9jfW3oa9mh9TROzLc32nMY6USUVf/UghEpnYKYSiCViEBVVXTt3wxJTCEeGgYAjPV1QEzGteBnopdJr5oJUyA1UWxmYuJ4s8hoP5KxEIa0cYmpaIj1jCaiLLDVAxpVNXoO9eBMVWWM93cYwauYjGPoyH4Mdx3EmPbHDWipqbIksn3TxiJC21dkBE2ydi5S8TAkMan1XqpIJ2JaTyTr6TtWr6Sspb3OhCJPBPFTmU/B3UwC+0xHO66EEEIImRkK7uaYXRtPNtZ3fH9xVVUVqixjuPMgoN2cCnYnPAWlRk+OJ3B8xVpmmz6Pni6zGAYvWGG1O2HLUebeIgha1Ux31jg/i9UKcIBV65m02l1GsMiCuuwUTWj7oDUavTbewnL4S5fAanMawaVetIOtwxnBiq+oAk5vAN7CcghWOwqqlqNixTpwvMXosdOLi1isNnAcj9K6M+DTitno005AC1pZ5U0Z0bFBBIc6IaeTcOUVwFdchXQyhpGuZgy0NWUVgQHHQUrFEY+MaQFhArGxQQwdOYDIaJ8RACRjLCC0u32sVyujZ09RFSSjLO0TAMYHO5GIBCFLaUjpJFRZRmi4xwiOdMnoOEa6W5CMjGOgdTf6D+/CWF8bUonIRICUTiGdiCEZCxu9d+lEjAVV0ZDRO6jrb92Nzj1vQ5ElODwBJGNBxEOjkNJJpGIhDHeyoE9V1Ymxg6kkhjsPYLBjH0JD3Rjrb4cspiHLEsSM9xkZ64eqFYRRZIlVx0ynjCqvRsqnFlDoqZiqwqae0MfmKQrrFdQDsJ4DWxEZy+491em9e1OZapmqTIyRnZLK9vFEmq0KqKyndebbmer4kGN74oknzE2EEEIWOaqWeRIcfu9VuP3FKFu65uhFSnJIxSPob93NKglq43Xyy+rhzi9B9/7N8ARKULHyXPPT5pWjnd9jVUTMNS9cPDwKh8tnFJNRVRXx0AgrOsPxSETGYbW7jF6+3pYdxnQGlavOM+awywx8rA4XIiN9iIz2YclZl6F5y4vgLQIqV52njSv0IhkNwubyon3nXyGLKfgKKxAZ7YfN5UUqHjZuqItqVsNfUo0jezZlvQbHW6AqslGJ0+Hxw+HOQ3R80Ag+bU4vPIFi2BxuhIZ7IKWTCJTVYejIPpTWnwlVUSDYnOAsPPqad8Dh8SMeGoHdnQdVkcFxHPyltRjva0c6GYUnUAIxnZjU62Z35yEVC8HpDSARGQfPC1iy5jJY7U5ERvuQiASNeQyLalbB6c1HIjKGRHgM/tIlAACnxw9RFHFkN6tsJ1jtKKk7E73N2+H2FyFQVoex3jbEwyNYctZlUGQZiiwiGR1HKh5FdGwA4FhgJtickKU03P4iyGIa6UQUspRmYytLa9nx1dIuFa2nfLy/AxwvoGLFWticbljtLjbpvCYRGcdoTwsAwFdUCV8hq4QlSyL6D++Et6Ac5cvPMdaHVkCH/ZuGw5O7AqcspsBZhEmf23QiCjGVgDuvkE3hkYOYSkCw2rJ6wGebLIngOC5nxdKZEFNxgBNgtU1d0TQXWUyDF4Q5qU662OnBtzrNapl1dSzzgJC50N7eTp+5OUTHe+Giq+lJ4M0vQ3R8wEj9mwlZTCMVC0GR0nB4AywFsKwGDo8PlavOQ/GS08xPWVDMN8iZ9PF8ZhzHG+Ps2GMOgs1h3EzanB5j7B4A2B0sQOR4HnaXh835Z3pdwWpHfnm9EUhbrHZ4AiWwWARYMnoQed4Cp5eNw3MFirWpGkJsXzkOHMfDX1IN3iLA5Suc2L7NgYAWFPkKK+Bw5yEZDSI42AlowZbV4YaUTmCsvx1DXQeRjIUg2B1GCmhouAeDHfvQd3gnxvuPQJElOH35sDk8SMVCLEUzHsFg+x6kk1HYXF6U1J8JpzHO0WoEy6lYCO5AMQoqlyOvpAaKIiEWHIKYiqOvZSfG+9uN459OxKDIEsb62hEdH9TeESBJaSPo8pfUoOaMS+D2F8HpzWfTTsiiqXAMuxGNjA0iqvXI6T1uUjoBVZERD40gGQuyFF2LoKVjTqSPDh7Zj5GuQ0hE2JxNeoEcVZnYli4eHjECnMwUVL0HNB4eySpGI6VTSCdYyurRetdkWc7qudQpWkXSo/Z2aZVVjYcnIIVRzajAezz0FN/j7rk7zucR4JZbbjE3EUIIWeQouDsJHJ48qIqCyGh/1k3N0W4QdfrNsjtQgvzyepTWncmqUPLsL9+WGf7VfCHR0y7NeN4yqVfA6phIB9UncTdogaBFsBnFacxjB3mLkFUgpmzZGhRULgOfsS3937ziKrjziuDy5qO4ZjWKalajYsU65JfVwV+2xNi2Pq2DzelBaf1ZKKhYirpzroTd7UN+xVK484pQvOQ0lC1dg9K6M+ErqmRBg6qy9D2FzTOoV0rVewFVRUFM+9zkFVWidOkaeAvKUVC1HCV1Z8Lh8SOvuBrFNauzglyXrwBVq88zHuvb9eaXwiLYMNLdgmA/KzDDC1YEyuvA8RYosoREZMwoVqL3Mspi2tiPgqrlEOwOcDyPvJJqqIqM4GC3UbFTTMaN8XGpWCjnzT9vEYyxiza3DzwvIBYayQruVK1YTNbvmRbMZKYDSukEmyvSVwCLYEM8PILhrkNAxng6i2DLLtIipSCm4pDSqawCPGaqkp22qe+LudhNLipUKBnfC5mvP1tUdZoT208hqfdC5zhH0zO5QA+ZnoMHWXo+IYQQoqPg7iTwly4BbxEQHR9kqWaazN6CqYRHegFwCJQugcubb4wVAwCrw2lMDbAoTZHaxlksk6aAMPfEZfLplTAzbjj19a0OV1ZgqAdwDlee1gM4Mc6Q0wIlt78YRUtWg+d5cDwPt78QVrsDrrxCePJLjX0LlNWBF6xw5bEePDauj/VI8RYBRUtWa0VobLC7fVnjFfV57Bxev/H6eiBQVL0SrrwieAvK2RQVVivyiqvgL6mB3eVFYdUKo/onx1mMictd/kIINqcR4DrcExOXu/KKIIsphLRpHoqXnAaHOw8WwYpEdNzoYYQ2vnSkpwXR8UGIyRg4ns+aV9HlywfH8UYRFwDoO7wT/Yd3ZRVu0fGCFXaXDwWVy+HyF4G3CAiULoFgs0MWU+hr3oFkLMQKymjnMKWNR4Q2nq734Hbtd4nRC9i48grBC1aoioK01suoj2WUxFRWEBQa6sFAWxNGe1sx3t+BZDSYFXxNTJOR3XOn907q28oM3iYxVSYV9e3PUjCkF4zRexHNMuf5y9X7qPcsqtq0FZOCcPPjHBQlx/MIIYQQclwouDtJimtWQ1VkDLQ1AXpvQq6bJxNOS/PjeAsr4JGRbphZkp9MmGnAK1jtEGxO2N0TY6j0apw2hztnD6HexlsEI8jU2ziOY+OmtN5Di9UOi2CH1e40pnrQ16tefYFREEc/t8Y8gE4PeIsVFqsdHMfBrk0Qn8liye65tQgsEMwvr0PZsrMn2lxe7X1OBFkcx4HjOHgCJSiqWQWPFuR5C8rg8hWyuQU5tl/+0hpY7S7IYgq8RYA7r0jreeRYaqSYNsbapRMRJCPjGOttRSoenjQu0mp3wa3NW6j3YrKgY6JwUCa704uimlWwu7zIL6tD5arz4NCmyOAtVsiyiJGuQxjq2Ado6aWZ0sk4JDGJkR42MT20aS5sDje8heVGQKKqKhvLp/3RRZFEY25JGH9oARJaenVkbCCrgqkiS1AU1rMo6ymYigIpnWLTavR3sBWN6R7YNA+ZMgMmlkoqQxbTSGQUxzHT52M81veJlE5CllgV0KnSPTOrgGYWrtHpgamiB6GmIG1a6Z6qYhwDMjMPP/ywuYkQQsgiR8HdSeIOFMPtL4GizdV1rHE7OjmdhM3pNsZxkWPjpyjOMhXeIqCoZhVKak/Pas+s9jlJjl7DzFRQi2AzevIsFis4np8U5ACAYHdq4/wmtsdxrKeL4zg4vX6j18vu8mank+o9i3aXMe1DZjVR/f8WwWosF6wTYw+Rsb8Odx64jKke9OknbHY3BJsDVrsTVi2N1GK1QbDZWZAWYAEhZxHgyAg+fYUV4HiWRun2T67mqo/z0/9wAejHdHKPjmSa+FvvVfUUlKJ8+TmoWnUeCiqXw64V0XHlTVQsBYC4VkRFFlOsomYiCjEZg9XpgSDY4A4UG8d1rK8tu+qoxCZSV9XJqYShoW707N9qBFXR0QEkI0FWNTQyBlVVWbVURcL4wBHEgkNIxkLGBPOsaE92AMWCOy2AkiVWYCYeYt8bOebeU2QJYjIORUtHzaQHcHqwKKVTkCWJzR1omgBep2+DvXaOdfSgUFXYtBWcKbjL9RyTnD1+ZFpWrVplbjqp+lp2GD+EEEJODgruThJeEBAoY/OjBQc6kU5Gj3mDk4wGWZl3kVW3I9NztBTMnDgOVrtzUjAnWFlAdFy04imcNhffVCyClb2OaR2rNm/f0fCCFTanG4KVjQlERkA6Ve8lx/NweP2sWmLmccp4zPMTBWWsDhesNiesNocRZHLcxPP0Y+TyFcCqFaaBFrQW1axEoHwpCirqjXadJ1AMu4v1MOrBjDd/YqJ4aL16zrwCBLQ5BI3AVQuaeW0cJC9Y4fQGUFSzCuXLzjHGReo9eJnHPzTUZUzq7g2UAByHgoplKKk7E8jo3dLnTZTEFFRVgZhkhVtY76cDFitLCVVVNlWFIksY7jqI8YEjGO48gOHOgxg6sh9SOgWoE2m0YjJu9FImoyFI2nx+uswiMtHxQfS37kFSSyHNTJlkaZFsCgiApZ5mjj1ERs+b3q6oLF1UVRWoOYIwVVUz5gNMG+dl4t/MSeYVJMJjSCfYmEmduScyJwrujtvnPvc5cxMhhJBFbuq7THJC8bwAp5eNNRrrb0P3/i1Gj8JUJC09zFNQal5EZhlvmVya3RzsHQ9zimBOHAeLaT19Ggczb2F59lx6PA/OIkxMleBivWeZhVLMeN4Cm8ub1cPJcWx8IPu/xQigoAdHHIdAaS2g9ULreO05DpePTUKvBVYWwQabww2Hx58z2Basdi3Vko1dhFYx1AhO9XGHVau0XkDemOBe355FsE4ujpNRqAba30P0QjMcxxtTIeSX18Otp8MacyJO/AHF4WIputHxQSiKjPEBllLpK65Gaf1ZGWsCyVjYmMcyHmbz9wFAeLgXw10HIYkppOMsCIqHR9HbvBO9zY3ob90FRWJpnDqVTXQHAIgFh6EqEsZ6WwEAsjQROLFJ5kUj4GPj/OSJlE5VmQjq9KBQUVlwZxR2yc4cUGXJmGdQkSeKrsgiK1CjaoEhe67KCuwMZM/faU4NNb8GTAEsmZndu3ebmwg5IVLpNBp37sKWbdsxNHT0e5UT7dvf/jbWrVuHb3/72+ZF80Y8HsfnP/95rFu3Dg899JB5MSHvCwV3J5nLX2Tc2MRCI+bFWRKRMcBU6ZGcGBaBpU7ONn3S9GOZaloHM39JjTH9hWC1A1qPVuZ4QWjB0dEIVrtRDRNagKMHt5zFAj7HHGR2lxfFS05DXnGl0ZZfsRSFVSvg8heB43nts83B7vLC5nCzIDVHr/NEIMkZPYJWuxNObwGsDrfWS6caRWnYemzMqf7eOI6HYJtcVMjtLwLHW4yxjGyicjYdBsDGQLryCo1zo/cI2pwe2N15KKhajuLa1RBsbBqL3uYdiIz0wWp3weVj6aSZv5PJyBhErepnZtCiyCIS4VGEBruMYFlMxiAmo2yqBlXFeH9H1ji10ECnMY2DPnUKS7lMQ1Gyi53IUtoYI6cHbUZKpyQZvXR6wCWm4lkT25sDMSXz+doyxRhHyJYpsrZcYXMSpuPR7G0Y0z2w9cypomyZilzpt2TxSiei+O1dV+KbDRz+9MN/MC/O8qcf/gO+2cDh0OYXzIuyvP34j/HNBs74ydxu5utl/rz9+I+NdQ5tfmHS8ns+tRQRbU7U6WxjvgqFwnjsj09h0ztbsGXbdjz6x6fwXuNO82pz6p577jGCu4MHD+KKK67AunXrsn42bdpkftqsMr+uOUgbGRnBRz7yEWP55z//ecTj7I+LLpcLv/zlL3HnnXdmPYeQ2TD5jo3MKU9Gr0ciPDbpBitTIsRu7qzaTSk5cWajly6X6QZt0w0COY4DOA4Oj59VetR6sfQAiY3V42f8fjKnfuB5izH+LpM+btCcruovqYFdmzvQV1QBly8fvEWA1e6E3TlR3TWTEUjyvDHGzeHOg7+kGlWrzkPNmZeg+rSLYKS3aumXPJ89OTjruct+bHf5UFi1HIVVy41KpHa3Dy6tYIzTG8juqdVSZ0tqT0dB5TK484pgtbtQuvRMuP1FbJ5JRUZB1TIjsLRrPaTQxgTq8/npvV2OjGA7ERmDlE5kHWNFlgCOQyIyljX/ZWS0D6N9rUaKpB5ETqR3akGXJLK0UC2YTETGWECljfPTgz4pnUQ8PIbgYCcG2/diVOsFRMa+6vQxfWpGoRdZnCjAoiiyUQhFn/bC/EcEoyKoFuSZU0WhBXd68Edm5je/+Y25ad7ra9mBn3yiGnnFVfjQP//IvDhLX8sOpGJhrL7ko+ZFWQ5tfgHbX3gAX/1TH777lopvvRJBaKg7K/ByuPNwxwON+O5bqvFzySe/krWds6/++6zldz3WalQanu425pt0WsTTz72A8fEgLrnoAvzTbbeiqqIc72zeioPNLebVoSgKXnvjTXT1TFQjngsVFRV49dVX0djYiMbGRjz88MP46U9/esKmCzl48CC+9rWv4Re/+AUaGxuxadMmbNmyxQjwRkZG8LnPfQ433XSTsU8lJSW4++67jQCPkBOFgruTTE+ps9pdUFUFsfGhrOVZwR7PQ7A7s8Y4kRPDnJI5W8w3v1OZ/uuzCpdVq8+HJ1BiBAysd4r1rtmcbCL2mbCYitDoE7ObcbzFSMXUWaw29mNhc9/lVyxlaY7HGG+o9xb6y2phc2akiXIcOLDn8ryFpWQKVq2njk0GnynzGOupmy5fASv44md/TOG0IjXQAu7MCqgcx4PjLBDszqw5Em12NwJldXB681FWfxYEq8MI1n3a/IG8YAUUZdIUDhbTH2RURYHDkwdfYYXR5vGzQi6hwS5A731TWepkdGwAqizBlccC0vG+dvQc3IbIaD8kMWXMBajIEoY7D2K48yDGeluhKgpkMW2koo73dWC48wDG+49o6ZAy4uFRJCJjGMkK9NjzoPe2aR1rkphGOhEzUin1apmy/j2VcSoyx+Ppk8fn/OPVKT7mLuc+ZzjW8hNpzZo1xv9PxByIcy2diOLVX30Fl37qa/jY135nXpxFX/eca27LWTk403DnQdScebERiNmcHiw790MY6jxgXpWYvLnpbYwHQ7jgvHNx7tqz4fV6cPWH/gYWC4/GnZPTgg81H8a+/QdPeupmTU0NysvLMTx8YvZj+/btOPvss42iRi6XC5/85CexZcsWxONxFBYW4tlnn8Wtt95qPOeKK67I2AIhJ87Ud1pkTticHhRVT1RmVDPSk1RlokCCqigQkzEINgcVUyEGFixxWjGRifn8/CXVEKx2ODz+rOkOposzF2CZ4jPH8/yUgSMvWI39me5YQ47nkVdYgbKla8BpkQLH8Ua6qR686kGb/q+ZHuAJWo+lHoQ5PH6A42ARBHgCJbC7fPAESowUTWhBJm/hjR5AI2VU+7e0/kw4tT/KCHZWxdTh8SOvqBI8b0FM63nT/3DD8Tz8xVWAFmzr2/H4S+Arqszq/XN68430a/1fRRIx2LEXMKqOWowesOBgJ7r2bcZgxz6kEhEMdx5EKh4GbxG0Akyq0XsmpZNGICJLaWMcZyoWQmS0H+Ghbujad79ppINmFm4JD3ej//BOI7hTtTF5UooFFlI6CVVlwa2Rkqqy8YEDbU3o3r/F2JZOltLTmuNzOhRtUvtcjjcIy9XbmCmzmupc++IXv2j8X5bSrEd3HrM5PfjsPW9Mq7dr27P3I6+4Cisv/LB50ST1a6/EYPu+rBTKw9v/jNMu/RgAIBWPIBkLwVtQbnrmhOHOgyiuWW1uNkxnG/NNX/8A9h84hJrqKlxw3rlGu8ftxpKaGgwPjyAUZlkKuu07dwEA6pawgnEn25EjR8xNJ1R/f3/Onrl4PI7HH38cF1xwAVwuGlpDTqzcd2VkTjl9E5ORKxlFEhRZL7uuIhEZgyyymyDOQqeNMCzgYUFQZkVNt78IpUvX5JyTbzqOVoAl01Rj6CawZdPZD6NHWn9PGePw9PRTI+DS0zinCiwtgjYnH1vfKEjDAZUrP4DS+jUQ7A4U1axiRV4yeyY5DhxnMYra6NuY+JcFfhzHg+cFWO0uI4i1WG2sEIlW7AZaBVG9Z4EFufq6bPt6ARk9TVSRJSSjIaRieu8fB1lMw+0vhsuXD4vNbvSKJaNBo4dvvLcNYioOV14hAuV1UFUVyeg4FK13bqCtyZgEXRZT4DkedpdPm4KB9calE1GEhnsgpRIY7TkMxTSlQjw0ClVV2Otqk5dDu1mG9v7YFAtpKIqM0HC30UMoy2y6F31SeD3VdLBjH0Z7JtK79F5nnTld9GjkjJTYTGpGQZmZksXJ4wQzsQqqWuGaKQLL9+NoQenWrVsnHqgTvaULXWS0H3vffAIX3PSv5kU5lS9fi1t+8AJ+/a+X4JsNHH7yiWpc/S8/nhQYbnz0B8ZYud/edaXxudaFhntwz6eWGuvkGud3rG3MJ29tegcAcOnFF5gXYdnSOgDAoYzUzNa2DoyOjqGudgny80/udE2dnZ3o6+vDuedOBKWz6dxzz8WuXbuMtE89eDPbtGkT1q1bh+uvvx533nlnVk8eISdK7jsjMqc4bqK4QzQ4kZY53t+Brv2bIUtpxLUxNN780ilvaMnioyUsAlp1SaP9GMHPsUz3ecdKM9UDs6OlY+oyAzeO441UQE4roMJxE8VTjsUiCFmFVYwJ5fW8QS7jPeaITS0Cm1aBBXBagKgHd5yF9ZJq7ymzmEqgjN3wsN5B1luoQoXV7oInvxSFVcuNOSqNYFELDB0urxEQpmIhIxgpqT0NDm8A+eVsCgnztAWymALHccYYOW9BOVzavIHh0X6oioK4VqxJ/xdgcxHyFgvEVNwITuLhsYwiK+yPSnpwIaYSENPadAoimxpB1ebHM/4YoLDpFeKhESSjIURG+jDQvhdKxnQLkZE+pGJhKFIaocFOyGIKydhEQCamkkYaqaooSESCrEdwGoETqxI6ORjKnHx+JhRFzp4DUBvDmNYK5igKC6wnxhZOfm1MI+jLNWehTtWOaVZbjiBO702dLyKj/VmB0rEKp2R67df/jjMu/wTKl681L8rp0GYW2P3T/76N776l4stPdOHVX30la8xdcKATeUWVxpg8AHjh3omeUQBo2fqysY3PfP95/OkHf581p990tjFfSJKEgcEhnH7aKhQVsrHKmZbUVMNi4XHgYAv741IwiDc3vg2bzYorGy41r37C9fb24uqrrzaKl9xyyy24++67Z2UuyMyiKHoRl1WrVuHuu+/GLbfcYgRvHo8HZWVlWT1zl156KRobG/Hiiy/ivvvum9cVPsn8cew7LnLC8byF9UpkpLEBQHi0D7KYRiI8ishoP3iLwCr/TfPGmyx8HM9SFpERJGgLgBn0wB2vYxWI4biJHrhjmQjuOBaGaT2S7D1qvXfTxPNs6oSpsAAyI+3TvFx7Lm+ZCOKMIFP7N9c4RKfHD3egBB5/sZGWqVfpLF6yGk5fPpsChRfg1sbP+QorYHfngRessDu94HgLQiO9Rg+U1eFG2dKzjcnYPflse5njjGzGnIJsjkbB7gTHW5BOsBvMibTHibRvm8MNi82RFRAEB45kpRnqQVdouAeD7XuMIEZMJdF76D0Mtu9jk8DrPYKyiPBIL0Z7DmOk+xCgn09FRUrbVnCoS+txlI0pI1RFZj1gCuvl04MdMZUw0jz1uQEzmYMeffqHzFRSRWJTRJgDoqznTjHmT5VlIyhVFQWyLEGVZeN4qlq1UCO4m6KXkQXCUwdeYjIxaZymTlHkSdvVz9nPf/7zibbjrDp6ssY7egvKcNdjrUbhkWONsdP1tezAYPs+rLnqFvOinNKJKDY/9TOc++Hbs8bcXXjzl3B4+5+NnrUvbthtpIPqyzv3vGOkcq66+CNZBVTqzr4cZcvORtuON4zXOtY25hNBEPDpv/s4PnhFg3kRAMDldGLdOWdjPBjEo394Eo8/8TQi0Sg+eEUDvN6JOU7nirmgSmNjIy69dHaCzGeffdbYZmZwpgduevAWjUbxyU9+Mmfapcvlwp133om2tjaMjBy9Mjoh79fkuxoy5/TCEVa7c+IirqrG+InQUA/S8QgsVjuc2l/kCWG4nD1PRuCSY0652XTsgIvLOY1CLsa+ZgRQ7CHrQcsVhE2FzzHfHTKPi5bmmdmWyeits2RX40RGj12uCqQcxyNQugQFlcthsdrgCZQawZhFsIHjeLh8BahafZ7x/LySaqMHwmK1geM4pGJhSGLaeC197CAAWO1sygqnJ2Dsu7ewHBbBBqc2NQPP83C48ybG2OXoUeJ4Hr6MSn8AkIqHER0fBAAIdhcS4TH0Hd6JmNamGx84wlI2xTRGelqMqR8AHokwGyuoB3MsbVQxwg5FEjF45ACS0SA7z9rxjQeHWSVRMZmzJyxzXj8dm39vImiKjg9AkUWjOqiqKkjGIzl77vTeN2TMIWoe+6f3TKpadVBZYtvW908P2PTvbXOwqWNB69TBnarIkNLJnD14uZ6rv/75558/0YbjqzqaK2g+lbXteAN9LTvwo4+VG71+u179PR75+g0zToMMDnQiFY/AW1BmpEdn8pfWGEMmCquWmxcDAIpqWM/QdLYx35QUF+X8ftRdeP4HsHrVCgwNj0BVFFx68QVYsXyZebVF4amnngIArFu3zryIkDk3/bslcsLo6WOKLCMZCSKdjCGViBoX8FhwCLzFioqV66aV3kYWD05LVzRjN82T2+faVPuXS2ZPGKvAOXHjf6xKm7no1TCzaTcqmT2BR7l5YYVqJl7XwlsmAr9c4wiNoJr9W1i93JgAXQ82swJXbeye/t4tVht8RRXGTbrVxnrgMgNVfRkvWI3jY3d5UVS9EoEyNrE8tOkvxGQCspiGkmO8mWC1ZxWkyT6+nDZNC+sFU2Qpa8oLVZFgd+fB4cmDIokTKaiKZBSC0cWCI2yCdlmEp6AMJXVnQlUkjPa2Qk6njRvf0Egvug9sxUDbHkTGBrWAaiJYMgKqjEBHzZi3T5FEjPW2IjI6YIwTZEEdS2vUK3tCC8JkkY0LZI9Z8GcOrtLJOMb62o2ATpHZsWDTN6iIR8bZto3eval67iYmo2ePs9+D/jjX89kcgqYeSu0Y3H333RONx1l1NHOcN/R9PYVd8smvZE018N23VJx99d/jM99/Hp+95w3YnB4c2vwCvvfhfPS17DAqY25/4YGsgiqbn/qZUUEzc/3M5cvO/RBsTg/0OewyU0fbd72J4EAnKlawm/ljbWMh4jgOV3/wStx15x34wr98DuvOOdu8yilBnzDcPC+ePg/eQw89hCuuuOK4p03YtGkTfve73+HOO+80eu02bdo0aZt//OMfUV9fj8Icaa6EzKaZ3S2RE8Koluf2QVVlDLbvNSrXscIQHAqrlmdNMk0IYApSsppn1tN1wswgKMvsZeR4C1jnGnturvd4LDlf14jnJsYqHm3b5t65zIAu1/Gd2N+Jf837wR6z1zSCOm27PG+Bw+2HqshIxcOwWO1wmP7qHyhdAn9JDQJltcY4S8HGUjEzxwey7xXVKGCSOTYQWuVQPZ2T43h48suMdFSr3QlvYTl4wYrS+jNRtuxsFNWuhrewwtiOYHMY+z3Rc8foQSPH81AVFkjpPVBOjx++okqk4mFjzBrHWyAltXRQvRCMFnDpZG078cio0aaqrEdNVVUjrTGdjBkBSzw0guCAVi0vY1t6744RGMqs4Iu55y44cATx0DArJKMoLL1TUdBz6D2MdB3CwOFdGGzfm9GDl7vnjAVwE0FTVtqoFhgGB46gY/dbRrsuV8+dHsht3Lgxq22q9FJzcJgps1dXVeQptzFXMicL//P/fRW7Xv09vtnAZQVOxzLceRCJyDjCI32AFhCe++Hbjd6+71zjRV5xlZEKuvLCD+Nj//573H/7uqzlmSmWn/necwgNdU+MEfzB3+Pvvv1HI03zWNsgJ088Hkd/fz8GBwcRj8cxPDyMcDj8vitp6pOUf/vb38YvfvGLrPF9l156KX7xi1/gC1/4gjFeDwCNuSNzgguHI8Y3eUtLM9aund4A5RNFkiQIpjm2zKazznw00tWM0d7DxmOOF1C75jJIqQQEuzOrGuJCtlDP74mg3zRmpQ5q0vEIbCc5HUhRZMhiCla7Nvn2NM+tqipQJAmSmMyaIPz9YumOKbi1ypSx4LDx/+lQZClnuqdOFtNIxkKwOdywOlwQk3H2e5sRQKYTUaiqolWqjBvHRhca6sZAWxOgpXzphVR0qqIgHh6F0xvASHcLQkPdqDu7IasoiSuvALHxIfQ2N8Lh8SMZDaKoZhWGOw9CsDkgpZMoW3o2fEUVaNn6MniLBdVnXIxEeBQDbXvgyitE2dKzkYqHwVksE0VcOGCwbS/EVBwldWdASqcw2tOiBXETAUhecTWg7Wv4/2/vzuOjqs7/gX9mS2Ym64SwJZBAQkAQ1JSwuSBCFZdaEEsrtSB8qbaKooWfxaVWrCu1UkURpVIQ64pGImpFi2DYBKIgoAhJgAQJ2ySTdWaS2X5/zNyTe28myWSDZPi8X68Rc8+Ze+/cO0nmyfOcc6w/ie2W3mlI6N0fVbZTOH1kPwAgIXkAyk8ehUarg8dVC73BCLfLiT6DRwLQwFFZhoozP6FX+kWotp1CxeljGDjqOiBQRgpo4PV44K5zoOTQN9BodUgelAVTjAVHdm+C21Ub2BdgjkuEz+OGo7octpNHENe9L2K69Ya9sgw6vR7uulqYYhPgrqtFhNGM4v3b4KgqQ7c+AxHTrbconTxZsAexicmotPoXau6eOgQJSWkN3k+OKhsio2LgrquFVqsXy5LUOWtgiDRBo9HCXetAraMaZ4p/RG1NBQaN+QV8Pq/4A4G9wgpotDAHSm6l5+sjjBg5chTy8vKAwHtZq9PDFGOBs7oCxqhY8b6rtVfCYIwS+/T5fIHZTn2oKbdCb4qGx+NFrdMBj9cLj8eLuro6pKUFJggKkTz4CnWyEyLJ4cOHQ3rPLVy4EOPHj2+3MXXnkrTo+bmYRTPU601dT8M/PdM5Y4q1QKPVIcrSE4AGCb37wxBpgik24bwJ7Khl/Bm64JmnpoKQs6UlZZly0utq9zGDqkulzqo1p7lrKsotA2WZWp1/siQ5+XIIwdb/k2bMjLb0FIuuy0nnrNXq0K1PBpIHDa/PFGp1gaUZtCKwd1aXQ2+IRKQ5FkkDh4tgWQo09BFG6COMgeDD/3PGXeefgROq8ladzoDI6DhotFrF+F91ZikqvgdiE5OgMyivV1RcInQRkYo/RugNkf5gI5BBiu/lXx/Lesw/xbqzpgLuOgdqyk/DUVkKn9eDatspuOv8E7B4XHXwelyKsVauWoc/2NUAgH/JBQDwedxw1TnhcbtQYzuNEwV7AJ8PtpJC2ANjBd11TrFen7Pav9afs6ZCZNikrJt8zJ4UsEN2LaSSUa9bWntPVoopGysoykUDGTP/8f3loR63y18Cqi7X9PkArxfPPfecP7MnLcUQyN5JS1FI3HW1QbN3UnmpOOdOkLkjCsW8efO6dBZMKhVdsmSJuomozVr2yYY6VFR8D/S5YCS6p1yAjJETERtY+JioKY0FT0HHhJ1lGrQuuAMAf8lpK5/bKOU+233/0syaUrAVJBiUlllAI8GiVqtDr/SLEd+rX6PBrVSurdFqYTCa68femaIRYfKXb+sNkaJsUxsYE6fTR4jMkjTWrUf/oUhMuUDMDBrXoy96pg0VwZ203h8CwWhicgYSkgdAExjXJ9pkS08YjCZEmKKgj1BmJTVa//WXZ7d8Xo8/wAsEI4ZIM3SGSBFMSWWl7rpaeALbTh3ej6L9W1Bx+hgc1Ta4au1ifTyf14NTh/eiuuwk3IFZPOvsVbBXlqKmshTOapsI2nxeD6rKTqDadkqUwrtrHf7tpSdE0GSvOANfYPyftLi7NGGMf1uZIqiDLAiUstfuuvqST/kMmNLz6pz+4NReYRWTnNQ56gNIxYyZPh+8Pi+uvPJK/+QxUkDmq59URQSPIrCtf748mLNX+ktO/X2VQTpRZ7Rw4cIGM1d2NWazGcuWLUNeXt45ydpReGvnTzbUVhqtFhGmKGh1ekSoxsgQtUSwwOGsa8GYuwba8txGaPwD+eq/7oj9AxBLUQQJzjRaXWDCGFl/FSm7Fuz5kI0F1Gr9E7JotP4FyZXBlk6UdEYn9PJn4LQaxCQmoWf/oWJcnE4fAa3OPzlLpCkasYnJMEbFieuk0Wqh1RlgjIpDhCkKEeYYmKIt0Oq0iJRNFCFlAgFAr4+APsLUIJDUBSZlka/JaAosGSHxZwUtqHNUwRUoP0QgyJMul7vOAXetA+WnimAt/hGnDu9TlKUCQMXpYvH/1bbTKDtegDNHf8DJwr2inBIAbCVHANnYMymgk9b7i4yKU2THpGssnxQlwhjVIJiSJnXxetw4kb8HJ4/4x+b5xwm6AXWmT/oXPng9/uN5PS6cOrIfZccLlcGZf8VyPPTQQ/B5PfUBnc9bP7mLtE3KOMomp5GCR3tlGSqtx+GotvkDQ68HrVlOQZI0cLh4EBHRudG+n2yozQxGf0kVUbhoLEBpjkajafVzG6WagCbUZRpCJTJ2jQRtEimT1hhFWWcQ8sBdukby4Arwv9bIqFj0TLsI5jh/abcGWuj0EYiM8s/gGegW+FcDnSHSP5mNbLZVjUYLQ6RJBDX+cln/+orShCoAECFbd08fKCOPNMegzwUjYemdhqj4Hoop4aX9G4xmRVbKFB0nArpTR/zr6AGAy2kXgYucVH6qngLf63GL++Gs8Qdqrlo7NFotPK466COM0Gi1/sAmMCmM9dhBVJw+Bp/Xi4rTxwCNBlFx/pntpPX/pKBNTp6Jk8ofpeUVvF4PPO46eGod8NTVwl5RijNFP6L8pD/49PmUs3S6nA74fP7nVNtOweWsgaO6XCzL4H+SP8v2+eef+xdud7tQVXoCdY5qeL1ueL0euGv9WUspyPO43SLDJ2VAqwJBrqPKJgsKA8cgIqIuqX0/2VCbBZ++nagLaybQaYyUkWpv8oCpsbLHtgjlvJsPWqWsWfB+8sXjm8rQarU6GCJN0GoN/rX/pIXYFfdEFfBK2UBZoKoOHPUG/7g5+WQwUqCn/uNUZFQs9BFG9Oh/oWK7f2kFfwCvCPp0esQmJosAr368ma/BbJYajRY9UofI+mnQLTkD0YEZDBOSApMFyCIWf9YyIjBbqHIGYmd1OapKS1BZWgJfIIMqBbU1laUAAEfg33oaOGsqxHl63S64ah2oKvXP1Oh1+RdQd7vr4PH4A6/amgqUnyoCAhk7+QLmFWeO4UT+HnjdLtjLzwT6uMWYQIjMXX22rsJ6HBWni3H66PdwOR2wFv+IYwe+BuRlmV4PagOL2kuZOynw0xsixNqAbcncERHRudf0JxAionNIHSi0lQYaRbapuSCsNSLN0W0+bym4aiwDqNjeSB/IgkhdYIZSKUiTv251sCcfJ6guY5X4x/lp/ev0SeP9pFJSVbZRZ4gEZMtaSGK7J8Mc180fnKriCZ0hAj37DxVf14/nq+9oirEgvlc/f3vgnur0BphiE5DQuz+6pw6BpXf9THB6QyQio2JhjIpHz/7DEN8zFZFR8YE2fyAZ0603TLHdUGU9Dp/HjYTeaWKhd0dFKSpOH1OsDYjAOEdDhKl+whWPGxWni1F+8ijcrloxFhA+H6pLT6AqMD2/RqMRY+TkS0m4ax2BCWSsqHPaYYyOB6BB6bH6mZSP/7gLlWeO48knHofLWQOPLPNXaf0J7jonvB433HVOxSzM0kLwXp9/eQWxeLvHnykEmLojIurq2vYJhIioK9EoZ6hUBxztQf3hvzU0WuVkJa2lCQRaUjAnZfwUGUGNMkCULybfWOawPvjUBnZQH9ypX79Go4EhwqwIqhFYAqF3hn/RY4+7PjjR6SNgiDTBGGMRQVuw2YItvdMQbemp2Cadr95gRKQ5BlqdXmyLMMegZ/9h0Or0gQlpIIJFvdGEXukXI65HCuJ7pMBgNEOrMyAmMQn6SDO0egPcLieqyk7A5bSLbKlGq4XWECHKMr0eN2rtVag47V/+oa6mUpFttP50SGT03K5akVVzVNkQYYxS3HP/pC8+GIxRiDBGiQDc7aqFu84J28kjGJ15IUoOfatYiNzjcYvSUHtlKewVVjEJjJTtg9c/g6jL6Q8862qqRLmpfD0+IiLqetptnTun04n3PlgLa2kZUvr2wa9u+qWi/bt9+7FhY65im8lkwi1Tb4Il3v/XU4S4DlYofajr4v0NX+f63rrrahUlhl6vWzHLY2chLUXQ1gDP46qFs6YSkaZoMQ4Ogen2pbLHWnsV4PMhMjBmzufziUDCWVPhn1ylCfk7P4PX60G/YZfj6N7N0EeYkD58grpbUNK6gScL96LidDE0Gg0Gjr4BCJQMHv52I9x1DphiEuCoqi9LBIDUYZcHpvL34mTBd3DV2qEzRCJp4M8QaY5FbU0FzHGJOLTjv/B5PejebwiiYhOhj4iEs6YSWp0Bdc5qnCzYA1NsN3RLHgAEgkuPuw4ajRbmOP9kLwW7PhfbfD4v9JEmuGsd0Gi00EVEwl3nRJ8LRqL0p0NwVNmg0erh87qRmHIBvG4XykoKFecu6ZV+EarKTqHGdgoJSemwnTwiJlbR6vTwetzo3m8IHJVlqCk/jYGjroejyobi/VsBACve/RSzf3M9DEYz3HW1/plNtRoxPtAYbREzg3ZPuQAR5liY47rBXlEKt9uNk4XfwQdN4KFFXK9+8Po00BqMqKurg0bf9j9UEBGFo/4pyepNnUa7/Nn6SFExlv97NcxmMy6+qL6URi2lbx/Mm3uXeNx5+yxFYEdE1JF0BmX2qCMyd+1B206ZOymrJs3eKbaqX7e8LFM1/q45OkMkNBqtCB7V4/OaImXVxMyO8jaNVpyXfOkEKZtnjI6HRquFXh8hxuxFmmOg0Wih1elEP2kcs9EUoygj1ekNMAfW6otNTBbZTa30xwdZ6aopNgE6vb9U1BzbDd1TLgACGcWouO4ikycmJQnMTOn1uMU4N4l/XUH/taouO406RzV0hghEJ/QS2U/IJmaJiDRDo9HA5/WiuuykoqT2yy07gcC6fv41FesnftHq9IqxfK7AshBSlk89AQ0AVJ4+jpry0+rNRETUhbT5k43T6cTmLdtw4ZALGmTrWsPn8/91v6mH1+trsI2P8Hnw/obv41zfW4/H2+TXneXhhabBtlY9PF7/eCqvR7HdK/s56/V6G70OnhDuV0y3JMR27wtpiTSdwdigT2MPjydwXoFFu3X6CEW7Kdr/xz+DSTbhSqAU1O12w+tF4FpJJYWA1+uDx+OFRqOH2+0OxLcaaCNM8Hh98PoArw/waXTQ6COQNGgETHGJ0Ogi4PF44PVp4PF44JO99oS+g9AtZTA0+gjE9eqPCHN8YFZRHaDRwufx+F+LbOwbAksNSK9N0q3vBUhM9U8uU207CZezBjGJfeH1+qAJlAyLmUq1OkTGdENEYGxgndMBj7vhjKHw+aCPMCFCFhxGJ/QWQSYA2CtK4fF4UBdYL9AemBhGnrn2+jz+ReG53h0RUZfVbmWZkg2bcmGzlTcI9DZs8pdkThg3VrFdzR1C2VYofajr4v0NX7y3Z5fX44ajygZjdHyDMW+SusBkHuqZIxEo61SPoVNzVlcA8MEYHY+D2z+GOS4RfYeMVndrkvXYIZT+dAg6QyQGZF0tth//MQ/VtpPoO2Q0jh3YAfh8SBo4HC5nDRKSB8BV64BOb8CJgj2oLjsJU4wFvQdkwmA0w+fzQqPRwlp8ELX2SvTOyIQ3sGC6x+0S16PWXoUIUzS8bhecNRUwxSbAUVkGQ6QJEYF1/PyLfdeXhRqjYlH47ZcwRBihjzTBXmFFrwGX4GTBHtEHAGK794GjygZ3nVPMptln8Cjo9BGoOHMMOn0E9IZIGIxm6COMKDn4Deqc1YjploQoSw943S5YevcXpZhxPVJgirHgZOF3AICvvt6NK0f7xy1GRsUisc8gnC76AV6PG/E9U1H60yEgMEGNz+tD74xM6A2RqLKdgr3KBkelDZHRcXBUV8IHLXwaLaDRIa53OlwuF8syiYgaEfZlmaGqqanBsn+txOIlL2Pxkpfx3b796i5ERNROpPLLxmbd9NPUl2+qNDahikIgMwYAiSlD0HvAJeoeIfD/jVEqz5TEdOsFQ6QZptgE6A2R0AQWODfH+cs0tVodtDq9GC9oikmon0Qm8Npju/dBfK9+ooQTgZJMid4Q6Z8xNPA8+cLwkobXQYM+F4xEnyGjEBXfAwDgcviD5EhzLCJM0WLMnLuuVpSNSuWYGq0Wlp6psPRKhcHoX05Co9XCHO9fU89fuhmJyGj/eEetVgeNVo9Ka4koG9Xq9CKwk77WaLXonjoYPfsPRaTZH5hqdQZEW3rBG1iGodp2CmXHC1Bnr4LOYBBlq1qtTlwXaaIVIiLqeloU3NnKyxXB2fsffqTu0qTjJSdxy9SbMG/uXZhw1Vh8tXkbjhT5F3IlIqJ21mRQ5+df7SB4v4ZBTUP+5RL8/+8PsoLvqynSjJKGQPAjMcYkoGfaMACawCyOynX6RJAWKCP0+bwNM5SBvvIxfHLS2EatbOkJjVbbYLyh/FpoNBoYo+NgiDSL2UVtp45Co9WjW/IA9Ey/GF6vB46qMvi8bjF5jT7CH6BqNFroDJGKrKh/m/9cTDEWaLV6aALXUqvTI6ZbL/i8brgc/sBLq9Xj9TWfiudHBEpXtVodDEazCIB1hgjoI/3Hd1ZXwB0YeyctnyAth2CO64YIoz8gdNf5x+0REVHX06LgzhIfjztvnyUmRFGXXjZlyAWDFBOoDMoYgPi4WBw+clTdlYiI2oEIVtQTqMjIgyW1xrYr1T/fVx9LtYgxRppYSzkNf/35a2CKsSDSHOPvIR0j0C5fyF39Wpt6fQoaTf1kK4YIMSGLRJqwxP+FRhxTKmv1ul1ISEqDVm+ATqdHpClaTLASm5gMjUYDr8cTyDbWB3L1GUMtXE5/UOU/5/qF5LU6HaITekGj1Yo164zRcfg81z+hip//2knP0+kNgf/XICLSX3Jb56iWLVbuH+MolZ7qI4zQBSbD4XIIRERdV+O/8dtZ717K9Ygkid38s5UREVEH0DQXcWlk0VLL+QOnQBAGjXo98pD4vP5neQIzRErkp53YZyB69BsSyGYpz9cU6/89Yor1L10g5z+90F6fFLD515VT/nrU6vT+pQY0UBzfHOcvpdTq9DDFWIBA0KTVGcQ4u8ioOPTO+BmSBw0Xk7BIpAyhRqOFpVc/mGMTERXfI5DhCwSv+ghEW/zln9IsmjGJSf7nBcpHpaynTm8Q23plXIKe/YdBbzRBH2GE1+OCs7IckghTtMh6QqMRWUh1eSwREXUdZyW4czqdWP3mO4oyzoP5BaixO5DSt4+iLxERtR9NoJSxcfVlla0iz4xp/AFeS3kD2aSGk7rI9iUdIkgmzhgVh15pF8EU7Q+ulDQtDu6C0Wq00OoN/lkyZfuTykCNgZk9I0z+xchdtf7ySY1WB31EJIzRcdAbIv3LNMiDO+mYWi30EZFISE73n7K2voxUyuRJpZcAEBXfA/fceTtiEnoDgVJOBJb7kAJTU7QFBqMJGmigj4hEbU2FInDzeb2Iiu8OQ4Q5sAyD/3hSyWZrHC4swIL758NmCyyc3gSbzYYF98/H4cICdVO7cjoceOzRR5C3S57pbB9LX3wBjz36CJyO9i1lPVxYgFkzbm33c+7Ia56zNhtLX3xBvbnVmnsvqe9rR90Loq6mXYK77/btr58kZe9+FB/7CYuXvIwlLy/HkaJiGI1G/PrmybDb7aLfV5u34dprJnCdOyKiDiSfGCQYjcafNWq9+uBJ04JASk5aKL1hYCgP5DT1x2pwDI1/nT31ZiCw16ANDTQV3Gl0emi1ev/YPNn+pEylVNKp1Rn8ZaSBLGJUXGJ9iWVgLJ98/J5WWl9PqxPBlfS61fdOL1v7UKc34De33FLfFmmCRqP1j9UTE+loA+fj/9frccPj8Y+xQ6D8NCIqFgnJadDrDaJUNNyqMktKjiPKbMbQocPUTS1is9nw2KOPKIKNOffci0cfexxGk3K8aEvl7dqpCIzS0gdg5eo3kTVipKJfWxUW5CMlJQVp6QPUTV1ee90Loq6uLb/RhYuHDVUsTi495t51B/qnpgAAjEYjZtx6S9A2IiLqGOosV0Ohhj7BaTTK4Kk1+5LGeKnXifOfumzfikBPJvBl0FfSxIQxak0Fd1qdDlqdTpFRAwBTVBy69clAXPe+gOzMfIHxdpFRcf7jazT+wFAb2EeARuOfnVP+ZH+mTpkhRJCFx5e9+i/oI4zQaDT+SVs0GmgCs2YCgYlhdHpotBpR1onAa0FgSQf/MWQBs0Yjykm7GimTc/ddf1AEYPv27UXGoEH80A9gx9fbMWr0GPXm857NZsPdd/2BmT8KC+0S3BERUeekzv40EDQTFjpF/NXKfXkDwYS0LIBEPnmKP0CCmCREqT4wUWtJNrHhfuv5s196aGSLfgcaYIpJEAGTdCx9YHISU6y/XFKr1QGa+olYJBqdDtD6n6MIkgOlmHLSDJhS5u/Nt95B99TBSB/+c3/ZaGCcnnQMnU4vgjeD0QyNVgtTTDfZ2ERZYCcdy9eyMXfSh+KpUyZh1oxbUVCgLPeTAq6pUyZh6pRJQcv2Cgr8JYhTp0xSBGZOhwP/WPS0ooQwb9dOxQdw+fGn33oLEhMT8dLLr8Ji8V93p8OB/IMHMWzYRWIfOWuzxfnMmnFrgxLFpS++INql8zlcWIB5996N/fv24o7ZM8XrkEoRpfOQl1FKrz1nbXaD6yB/DTlrs7Ho6SexaeOXmDplEvJ27QxaPik/b3UAm7M2G/9Y9LTiGOqSTpvNBqvVivQBGYrnBdundL5vv/UfcW+krGVTx4Dq+uWszVa0NXY8hPBegmrfH374gaJNXhYqXb+33/qP6K8O3KTS16lTJuGO2TNx9TUTQ878Bdu//H0gvQb5/VO/B+TfC+o2+bkGuxfqa0ck18xvfSIi6sqaW85AEwhcWk2jDeyl9SIDMzY2CHyk4AP+oMkfqDU8mvS1ejsQeF4TQVtL6aQgTkV9jG59BqDP4FHQBH7Nql+bRKutH4PnD+gCgZ5sQhXRt5EJT3T6CP/rFJOz1F8z/z/+stCkgVlISEoXSx4YIk3imNJzRJAaAqfDgSXPL8bV10zEmuwcLHv1NWzfthU1Nf4Mo9PhwKJnnsIlmZlYk52DNdk5QOBDuKSmphrbt23Fsldfw5rsHFx9zUQseX5xSNkT6fgzZ83GmuwcLF+xCgcO/KAIOBxOJ2rsdlgS/AFt3q6d2LN7N9548x2syc7BnHvuxeLnnhUflKVzk8535qzZWPL8YiQlJWPxCy9h6LCLsHzFKsy5515xDACwWCyYOWs2dny9XWyTykEnTrwOK15brrgOiYmJWPHacgDApMlTsODBhzHuqvFYk50TtBQzZ2224rznzb8fzzz1hOID/vff78f0GbdhTXYOFjz4MFatXKEMnspKEWU2w2T0L42h3qf0WuXX/tDBg1j26mt44813AADz7r27yWNs2vglRo0eI+7HF5+vF/ejqeM1916Snm+1WsXzzWYz9u/bK9rVamqqUVZaijXZOeL816//LxAIzl59ZRkeefQxrMnOwaJnn8MX6z9rEOg3Rb5/6b33yMMP4PEnn8Ga7BxMnnIz3lj9unh9TX0vNPX+kEj3Yk12DgYPHoK3/rNa0d6YR/76KB7566Ot/pq6njb8Ricios6u2cBNE3pmKyhZ2WNLsmRy0oyRrlrlB3opWwexb///yWebDHRU/isjZa7aS6PBcoNzECfeLLHsguxc/eWfytdZW1MBAP5ZOwHce28gwAgcU5rtUk2dvTXHd0ePfheKxd8FjX/MYKhKSo7Dbrdj7JVXAQCMJhOmz7gNUVH+4FEe3Eiuu/4GbNu6VQQEUVHRmD7jNpEtGXvlVbDb7SgpOS6e0xiH0wm73Y6EBP9sqRaLBYMHD8Hx4z+JPoUF+UhMTBSZvKwRIxXZmfQBGYiKioatrBTOQJbvuutvEM/PGjESc++bJ75uSvqADBQXF4sAQV4OOueeezFp8hTRd9ToMbBarSEHsXt278YNv7hRnHda+gCkpKQg96uNol/WiJFiLJ38dUnk5yO9Vvm1l8Yk7t+/TzxHOqbRZMIlmZnNHmPcVeNFcGqxWHD1NROx4+vtzR6vufdSsGswceJ1GCrLyKpFRUWLeymd/0/HjgGBQBeACPqTkpLRvUdPlJWVyfbQNPn+pffe1ddMFO+1YcMugt1uh8PpbPZ7IZT3h/y1B2snkjTzW5+IiLqy5oI7edamNYJm11pIGnNnCCz2Lcj3p/Efy3++ym6Sxl6HOrhpi2AZOP85BbJv4vWrg73GKfYpntYwcxdl8S8ppNX7+0+fPl3Rrl6bT6Lej05n8I/VkwJJxbmHXpJZVlYGsywTpFZWVoYdO77G9FtvEeVmC+6fr8jGBFNTUx3Sh2yT0Qiz2Sz62mw2FBcXK0ow1WPMnKrytztmz8TpUyeBQLB4xmoVfSUWiyWkUj2LxYKUlBTs27cXNpsN27ZuFeciLwGcOmUSFj39pPrpjVIHsa0hBVfS+TicThwtOooF988X5zT91luazIQlJ7dudnOr1Yry8vImj9fce6k9roGcFNRJQZ4UXMpLVluqT1//uNtgmvteaMv7g0it/X7jERFRp9NsYBNC8NGUBsFWK3ZnMJqhN0QiOjCtf3DyrGCQg2ga29624DU0GllUpm5p/tg6fWBBc1WJq5TRlEiZOY/bv3TEK6+8omgPFngCDe+xTq8cHygyf/oI6APr5YUiISFBZCaCSUhIwKhRo0UZnfSQj4kLJioqOqQP8UaTCXPvm4dVK1eIQO3Syy4TmSVbkDFm69f/F4mJieJclq9YhR49ewGBYLF7on/dwtYaNXoM9uzeje/374PZbEZSUjKcDgfeWP065txzrzjuggcfVj+1UeogtjWkTGhSUjIQ2Ge/1H5Y9OxzinuzppGy0LZITExEfHx8k8dr7r3UHtdAzmKx4A9/vBOPP/aoCLSm/vo3Tb4v26Kp7wWT0dim90dzHv/bY3j8b4+1+mvqepr5rU9ERF1Zc8Fdc5m9ZslLCRsLsEKQNCgLURb/hCGCet+BICTYa6ov22y4vcODOw2gCUyKorwW9V83RZyfuqvqvGsdVYAsGHzttdcU7Y1RrqsXmJ1Ttmvp+DpDBPSG4JmTYJKSkmE2mxWlgfv27RXZiKSkZNTY7YoyP/mkFwhk6fbJskW5X20UQZHRZILJbMZ/P/1EtMvHtDkdDqxY/grmzb9ffCiWl7apSzKDKSzIF5k7o8mEjEGDxDgpBMbotWTyivQBGbDb7Xjnrf/gkszMRjN+8tfRHKmk8JOP14nzOlxYgOLiYlHG2Bz1jKHSa5Vf2+bWlQvFgQM/iOfbbDZ88fl6jBo9ptnjNfdeCnYNSkqO4+iRw6J/S9hsNryx+nUsfuGlDgtq5UL5XpBryfuDSK3hb0giIgob8g/2HUJTn1HztTGQUme5/IFZ/VfSonLBA1JNkOhICpCC9W9PsolQxLUItLToejR9/aICs2XqDf6ZOEMm26d/whSNmOgFqF9rDwA0utCvlZQ5++Lz9aKcLP/gQTFOymgyYcEDD+GTj9eJ9j27d2PKzVPFB/SoqGjkHzwo2r/4fD3m3jdPBCG//d0MHDjwg2hXH99kNitK/abKZiFUl2QiME7LarWKvju+3o6sESNFQDdp8hQkJiaK8rlVK1fg8SefgcViESWDd8ye2WAWSInFYsGll12GmpoaUQJpDIwfk8/0mDFwEM6cOS0m+EhISEDerp0NZliUTJo8BZdkZorzWvzcs3jgob80GbhK1CWZkkmTp6BP377inB5/7FHMmDlL0aelunfvgUcefgBTZTNQSkFTU8dr7r0kPV9+b159ZZnIuraU/F5Kx5saZHbP9tLU9wKAZt8fRC2hqaysEsuVHjp0EMOHD1f2OMvcbjf0gfEEjQmlD3VdvL/hi/c2/Pi8Xvh8Xmh1ejiqq2CKjlF3CYmjqhzG6FhF4ObzeVFbUwVjdBzcdbXwej2IUC2XILFXlsIspvhXcrtqWx4QtYC9qgx6fQRctQ5ExfsDMI+rDs6aCugMEWKR9lA4qmwwxQT/wO6oKkPx/m3QR5qR/rPxeO211/D73/9e3a0Bn88Le0UpfD4f9BFG6AyRcNZUwBhtgcfjgavWAS+0cLvdqHU6YD1eiOjEvtDoO+6atYfDhQXIfn8N7p57nwgGnYFZCa++ZiK2bdmMKb+aGpYLdreEzWbDkucXY+5980IKBs8Hebt2YsfX2xWzntpsNjzz1BP4wx/vPO/fM9S8/in+EufOKPQ/0REREanJskL1pYktFyxhJc/k+ROEQTpJmmgLnulrP/4Mo7LUUZSQNnFewTR1ru46/yLveoO/LDOUwA6BfYpJU8QSC7Lzkh1To9G0aMbMc6msrAw1drtimzTxRllZKWrsdjHG7HyW+9XGZstTzzfyGVUl8hk05Vk09SPY2n5EnQkzd9Tp8P6GL97b8OT1eqDV6lBrr0GkOUrdHBJndQUio2IbBEPOmgoYo+LgcdXB5/M0OuFHUxkvr8fd+GQj7cBRZYM+wgiXswbmOP+EHF63C47qcugjjIg0h57NrK2pbLhEQYCr1o7D336J2MRk9M7IxMqVKzFrVmhldM7qcrhddYgwRkFriAhc7zh4PB64XS54vF643W646mphr64EdBGdPnOHwKLWmzZ+qdi24MGHO3T8FHV9UoZXPjtodHQ0Hnn0MWbtKCSdOXPH4I46Hd7f8MV7G558Pi80Gi2cDjuMpuBlk81xVlfAGBXbIANXa69EpDkWHrcLPq8HevVyCQHO6nIYo+PVm4HAUgvqoLE9OapsMESaUeeohjnOXxrq9bgD202ICCzSHoo6exUiGgkGPe46FOz6HMboeKQOuxxZWVnIy8tTdwuqzlGNOqcdEaYo6AyRcFSV1wd3brf41+VywVFTCZ9G3yWCOyKic6EzB3eN138QERGFoOEab62gaay0MlDeKPtvUEGf69eRgZ1fYKZO+WGk/2/psYPMBCqRZslsTRZSek59SWb9ecmvT8dfKyIi6kiN/xYhIiJqiTYEBo0FhmLZA/V6eirnMijRaAKzksrPQdTEtOy8mnsdWr1BTBwTakkmFAGhf8xd08dpqo2IiDozBndERNQumg4YmtHoU+UNjXZqciKSjudfDkIRoAauRUuvSWNBriR5YBa69fEvyj1nzhx1c6NEdlXafVOT3zTRREREndu5/G1IRERhpKWBjFxjQY1WCkI09evpBdVUW0fTNFxYXXo9Lb0m6v2oGWRLQbz99tuKtqbUn4d0Xk0dp2XnTEREnQcnVKFOh/c3fPHehjeXywWDoXXT6Nc5qoNOPOKuq4U+ItK/np7XA60++P5dtXYYIls3mUtb1dorEWGKgbO6AqYY/6Qu0tpykeZY6CNCn5jE46qDLrDUQTDuOqeYVKYlE6oAQLXtNEzR8dDo9Kh11EAfYYTH4xGTqUgPR00VvNAiLS1NvQuiDnP48GG+584iXu/w1dSf7oiIiELW0iyVQmPPlSecGusjy5SdC/517tRjAqWMo3xb85rL3DV1DZrjX4svhIxiU21ERNSpNfNbhIiI6GwIHlCEPBNnk2WGHUwEZPXnWB+TNnPeKk2XSwYmbgmYNm2aoq1ZstLWJoM7IiLqspr+LUJERHQWNBZriCBEo2ky8Dm3wUqQgKm1QVRz/WWZvfnz5yuamqORT/rS1LVsYUBKRESdR+M/3YmIiM6SRoMgRcAkb1Bq9PlngTTpS9BzCLatCUH3ISMPcLOzsxVtzdEwc0dEFPYY3BERUScQPNiQByFNZe5aGkS1r8B5qcfLadr/vOTX46mnnlK0Na9+fbsmx/a18pz/Mk4jHkREdG408dOdiIjo7Ggsk9RkQCcTar8OETh1dTmjf6KVc3heKvKArjOdFxERtR/+dCcionNOrMnTWo0Eh2eDCJTU56Cpz5R1hClTpqg3NSnUgK61p/zEJp94UHipratD3re7sX3HLpw+fUbdfFYtXLgQWVlZWLhwobqpy7Db7bjzzjuRlZWF1atXq5uJ2iS0n/REREQdKNTAozEdGUQ1R5Q6ql6D+uv29tBDD6k3NUmrC/V8zt21BICq0hNY/NsBosRz89t/V3dp4INnZor+/543AXWOakV7yaFv8OSNCaLPj9vWNdkuP6b6fOQP+X42v/33RtskzZ2n5Mdt6/CXcRp88MxMdRMg208o16atKioq8da77yN3y3Zs37ELb777PnbmfavudlYtXrxYBHcHDhzA+PHjkZWVpXjk5uaqn9au1MdVB2lWqxWTJ08W7XfeeSfsdjsAwGw2Y9myZZg7d67iOUTtIdSf9ERERB3m3IYTbVOfuVNtb2pcWzv49NNP1Zua1sHBpjywaa2q0hP41z1X4Pq7/4knNvmw4IMS7Fq3PGigJJECIClrGNejL9a9cLdoLzn0Dd5Z+GvMeu4LPLHJh7uW5+HTl+5DyaFvgrb/9b9VyN+1XhE4xfdKxYIPSsQxFnxQgqSBwxGbmAQEArtd65aLPnctz8MHT9+mOO/mzlNS56jGt/9diXHTH1Y3Aar9XDHtz+rmdlVX50J2zjrYbOW44rIxuH3WDPRNTsKWbV/jwMFD6u7wer34fMNGFP90XN3UoZKTk/HZZ58hLy8PeXl5eOONN/Dcc8/hwIED6q7t4sCBA3jggQewdOlS5OXlITc3F9u3bxcBntVqxe9//3tMmTJFnFPPnj0xf/58EeARdZSO/UlPREQUinOYeWuzc5S5++tf/6re1CStVqfeFNS5zIIeP5iH+F6pSMu8CgAQ0603rr/7n9j2/vNBs1xVpSdw6vB+jJlyj9h2ze1P49Th/SJ4K/xmA1IvugJJA4cDAJIGDseIG/+A7dkvBm2PMEXj0l/dh/xd64MeEwD2fP4GjNFxSOw7CABwxbQ/Y95bBYjp1hsAkNh3EHpnZIr+oZynZMfalxEZFYs+g0cptiMQRFacPoYb731J3dQhNuZuhq28AmNGjcCI4ZmIiYnGtRN/Dp1Oi7xv96i748eD+dj//YFzXrqZmpqKpKQknDnTMeexa9cuZGZmYvDgwUAgEzdt2jRs374ddrsdiYmJWLt2LWbMmCGeM378eNkeiDpOx/7mISIiCsU5DCjaqr4sUzWhSojB1NkSeiaxdfeiPcbcnSk6gLgefRFhihbbYhOT4KyuQK29StEXAKpKSwAAMd38GTQAiDTHwBgdh0qrv+100Q/okTpEtANA99TBqDh9rNHgDQDKTxah1l6FmG698X+LN4jArc5Rjfxd63Hpr+5TnKfc4d0bUX6yCMmDsoAQzxOBLOK+je/hmtufFtskVaUnsGvd8iaP255KTpzE9z/8iNSUvhgzaoTYHh0VhX6pqThzxoqKykrFc3Z9uxsAkNYvVbH9XDl69Kh6U4c6ceJE0Myc3W7H22+/jTFjxsBsNqubidpVqD/piYiIOox6psmuSJ2p04YcTLXO9ddfr97UJPX5Neoc3orTRT+oNzWp0loCZ3W5erNQ56hGxelj6s0K6cMnoGjvZpFBq3NUY9v7z6u7CdZjB+GsrhCBm6TOUY1/z5uAv4zT4PvcDxSZvObOU7I9+0UMu+rX4nlyUlbzpwM7Qhq311abcrcAAMZePkbdhIwBaQCAH2WlmQWFR1BaWoa0/v2QkGCR9T77ioqKUFJSghEj6oPS9jRixAjs3r1blH1KwZtabm4usrKy8Itf/AJz585VZPKIOkqIP+mJiIg6UBfO3AnqzF2owVQr/e1vf1NvahfqDGSoWjLmTj1JSWMTh5wNSQOH4/q7n8fLd2ThL+M0ePbXKTBGxSG+VyoizTHq7o0GYBGmaPzf4g14YpMPF469GU/emNCg5LIpP25bh4rTxzBq8l3qJiCQ1Tz87ZeINMeKsYEAgo7bayu3242Tp05j6IWD0T0xUd2Mfqkp0Om0+OHAIfh8PtjKy7Hxq82IiDBgwrix6u4d7vjx47j22mvF5CXTp0/H/PnzRdlkW8gnRZEmcRk8eDDmz5+P6dOni+AtOjoavXv3VmTmxo4di7y8PHz88cdYsmRJl57hk7qOjv3NQ0REFILWBhSdifo1hF4G2TpffvmlelM76fh7EdOtN+a9VSDKOG9+YBUANCifbE5sYhKM0fHqzUKEKRpxPfqqNzdwwaU3inO5/71iOGsqgpY/SmPn0odPUGxXu+DSG3HBZb9E4TcbgBDOU8oWBjumXOa1t4lJVKSxgUV7t6Cq9IS6a5vo9XrcestUXD1+nLoJAGA2mZD1s0zYysvx5jtr8PZ72aiqrsbV48chJqbx8+8o6glV8vLyMHZs+wSZa9euFfuUB2dS4CYFb9XV1Zg2bVrQskuz2Yy5c+eisLAQVqtV3UzUrjr2Nw8REdF5qqPH3P35zx07U2JLtceYu2Bj4SqtJTBGxwXNoklj2KQxbQBQa6+Cs7pCzGTZI3VIg3LPYGP7JDvWvgwAYlIXuT2fv4GeaUPF5Cuhau48rccO4kT+bvznoV+KbOZ/Hvoldn/2usgAdk9texaqJXr26N7gDxZyl44eiSGDB+H0GSt8Xi/GXj4GgwZmqLudF95//30AQFaWslSX6FxgcEdERNQBmvpgTMElD8pC+ckiHN69EQhkyj596U8ioyWNa5PKOGO69UbPtKFi5ksA+PxfDyoCMPWYupJD32DXulcVM1dKfty2DrlvPYNr//j3BoGfNJHKhWNvbrBdfk4IHKNo72aR4WvuPJMGDsfD68oUAfLvnvoImdfehofXlSFp4HAkD8pSzK4pZftG3HhHgxLRs0Gj0eDaqydg3ty7MOePv0fWz+pnB+1MpAXD1eviSevgrV69GuPHj2/1sgm5ublYtWoV5s6dK7J2ubm5Dfb57rvvIj09HYlBylyJ2hODOyIioi6os02t3pIxd42J6dYbt7+4GZ++9Cf8ZZwGi25Owogb78AFl94IBLJd5SeLFNk9qaRTOrZ6qYCkgcNxy8L3sHL+1fjLOA1eviML19/9vCL7Jo0B/ODp2zDruS+CZuZ2rH1ZMQOmRBprB9k5rJx/NW5Z+J5iP82dZ3NiuvXG5P/3qngdf7suBnE9+nb4Wnddnd1ux4kTJ3Dq1CnY7XacOXMGlZWVbZ5JU1qkfOHChVi6dKlifN/YsWOxdOlSzJkzR4zXA8Axd3RWaCorq0T9xKFDBzF8eMMfaGeT2+2GXq9Xb1YIpQ91Xby/4Yv3Nrzx/nZuPp//173P54PX64XP54PH44HH44Hb7RYPl8uFuro6pKX5Z0QMlTyoa0tpJp2fDh8+HNJ7buHChRg/fny7jak7l6RFz8/FLJqhXm/qepi5IyIi6oK2bt2q3nROtceYO6JQzJs3r0tnwaRS0SVLlqibiNqMmTvqdHh/wxfvbXjj/T27srKykJeXp97cqI7O3BG1BTNJZxevd/hi5o6IiIiIiCgMMLgjIiLqgi677DL1JiIiOs8xuCMiIuqCXnjhBfUmIiI6zzG4IyIi6oJaMt6OiIjODwzuiIiIuqA//vGP6k1ERHSe42yZ1Onw/oYv3tvwxvt7dnX0bJkafaR6F0REBKB/SrJ6U6fBzB0REVEXlJWVpd5ERETnOQZ3REREXdArr7yi3kREROc5BndERERd0L59+9SbiIjoPMfgjoiIqAuaNWuWehMREZ3nGNwRERERERGFAQZ3REREXdCwYcPUm4iI6DzH4I6IiKgLWrlypXoTERGd5xjcERERdUH5+fnqTefc4cICLLh/Pmw2m7qpAZvNhgX3z8fhwgJ1U7tyOhx47NFHkLdrp7qpzZa++AIee/QROB0OdVObHC4swKwZt7b7Oauvec7abEydMink15C3ayemTpmEWTNu7fD7RkSt027B3Xf79mPxkpfFY8OmXHUXbNiUK9qX/WslbOXl6i5EREQUgmnTpqk3URAlJccRZTZj6NC2lbHabDY89ugjisB1zj334tHHHofRZFL0bam8XTux9MUXxNdp6QOwcvWbyBoxUtGvrQoL8pGSkoK09AFwOhzIP3gQi559LqTXsPTFF7D0xRew4MGH0aNnL3UzEXUS7RLcHSkqxravd+GmSb/AvLl3YdaM3+JQfiG+27df9NmwKRfHj5fgrjv+D/Pm3oWBGelY98lncDqdin0RERERtZSUobv7rj8oArB9+/YiY9CgZoOX88GOr7dj1Ogx6s3Nytu1EwcO/IDFL7yE9AEZ6mYi6kQ0lZVVPumLQ4cOYvjw4coeIfhu336UlpZh/LixYtv7H34EAPjVTb+E0+nEex+sxcUXDcXFw4YCQNBtAOByuaHRiC+D8np90Gqb6URdFu9v+OK9DW+8v2fX7373O/znP/9Rb26Uz+f/de/z+eD1esW/Ho8HbrcbbrcbHo8HLpcLbrcbGn2kehcN2Gw2PPLwAzh18iSio6Mx7dbp2PC/L/DAQ3+BxWKB0+HAomeewv59ewEA464ajzn33Cue+8xTT2DCz6/G22++gerqavTs1QuPP/mMeO5LS57HlF9NRVr6ACAQZHzy8ToseOAhGE0mxfHV+0cg4FPvI2dtNv6z+nUAQHR0NB559DHRhkCGatPGLwFAnI+trBSPP/YoqqurAdlxctZm46djx/Db383AIw8/gJmzZotsm/TaL8nMxMSJ1ymuw9BhF4nXID8fAFjw4MNIH5CBZ556An/4451Bz1t+naS2/IMHUWO3i2MsePBhRebPZrNhyfOLMfe+eQCguG7S+Ticziavp7Qf9bmFQn2O0rXft2+veF3q4+Xt2olFTz8JBLlXTd176RwvycxE9vtrANU1J2qr/inJ6k2dRrtk7i4eNlQR2EkslngAgMPpRJ3LhdjYWNFmNBphNpthLS2TPQPQaAC9Xt/kQ6vVNNjGR/g8eH/D98F7G94P3t+z+3jnnXcabGvpQ6fTKR5arRZabWgfDZwOB5Y8vxhXXzMRa7JzsOzV17B921bU1PgDIHlwsyY7B2uyc4DAh3xJTU01tm/bimWvvoY12Tm4+pqJWPL84pDGf0nHnzlrNtZk52D5ilU4cOAHxTg1h9OJGrsdloRuQCBY2LN7N9548x2syc7BnHvuxeLnnhWZPuncpPOdOWs2ljy/GElJyVj8wksYOuwiLF+xqkHAY7FYMHPWbOz4ervYJpWDTpx4HVa8tlxxHRITE7HiteUAgEmTp2DBgw9j3FXjsSY7J2gpZs7abMV5z5t/P5556glFhvL77/dj+ozbsCY7BwsefBirVq5QtNvKShFlNsNkNMJiseAfzz2PUaNGi7JMAIrrGex+tZX8HCdPuRkL7p8PBK63+v7l7dqJVStXYPmKVViTnYNHHn0Mr76yDDabrcG9f+PNd2C1Whu8t8pKS0U7AKxf/1/R3pRH/vpoq/+f6FwL7Sd4Cx0pKsYZaynS+vcDAJRXVMLlcqu7ERERUSsVFxerN51VJSXHYbfbMfbKqwAARpMJ02fchqioaNEuBTeS666/Adu2bhVBR1RUNKbPuE1kU8ZeeRXsdjtKSo6L5zTG4XTCbrcjISEBCARYgwcPwfHjP4k+hQX5SExMFBmurBEjFePL0gdkICoqGrayUjEG7brrbxDPzxoxUmS6mpM+IAPFxcViohF5Oeice+7FpMlTRN9Ro8fAarWGHMTu2b0bN/ziRnHeaekDkJKSgtyvNop+WSNGiqyW/HVJmitPDTY2cc4992LkyNGKfm0hP8dhwy5CWvoA8f5R378dX2/HzFmzxb2Tv2ajyYRHH3tcBMJGkwmXZGbip2PHxLGioqLFvQzWThSuWhTc2crLsexfK8WkKFLppZzT6cTmLdswMCMd/VNT1M1ERETUDqZMqQ8WzoWysjKYA5mgYMrKyrBjx9eYfustmDplEqZOmYQF988Xmb3G1NRUo6xMWdUTjClQAST1tdlsKC4uxrBhF4k+6jFm0rg86XzumD0Tp0/5y/ocTifOWK2ir8RisTQaEMlZLBakpKRg3769sNls2LZ1qzgXafZL6bhSqWEo1EFsa0iBq/zaqJWVlaHGbldvRu+kJPWmdmFJ6Aaz2azeDATO12q1YtHTT4prNnXKJFEuC9nMndJDXtpKdD5rUXBniY/HnbfPwry5d2He3Lvwq5t+qWi3lZdj5RtvIzk5CRNkZZrxcbEwGPSKvkRERNR1JSQkwG63w9HIxGgJCQkYNWq0KCWUHi+9/KrIxgQTFRUdUiBjNJkw9755WLVyhQjULr3sMsWYLKvVqpgAZP36/yIxMVGcy/IVq8TMjyajEd0TE0Xf1hg1egz27N6N7/fvg9lsRlJSMpwOB95Y/Trm3HOvOO6CBx9WP7VR6iC2NaRMaFJS4+OEEhISENVIsHW2GU0mJCYmYsGDDyveO2uyczBp8hTYbDasee9dLHr2ObH9dzNuU++m1R7/22Ot/n+ic61FwV1TbOXleGfNhxiYka4I7BD4wRRhMKCyslJscwb+EpXYrfkf4ERERKSUknJuq2OSkpJhNpsVpYH79u0VmbmkpGT/5Bn794n2nLXZiin/a2qqsS8wAQgA5H61UQRFRpMJJrMZ//30E9EuH9PmdDiwYvkrmDf/fsUHf4m6JDOYwoJ8kbkzmkzIGDQIb6x+XZRL5u3a2WD2zaakD8iA3W7HO2/9B5dkZjaa8ZO/juZIJYWffLxOnNfhwgIUFxeLksbmNFeSCdn9ko9Lk5Y/OBdGjR6jeM22JtZFlEpXiaidgjspsLt09IgGgR0Ck6ckJyfhu737xdIHW7/2D5gdlBH6TEtERETkl53dfhNdtIaUOfvi8/WiNC7/4EEx5s5oMmHBAw/hk4/XifY9u3djys1TxQf2qKho5B88KNq/+Hw95t43TwQhv/3dDBw48INoVx/fZDZjwf3zFeV5UjCiLskEgIkTr4PVahV9d3y9HVkjRoqAbtLkKUhMTBSlpKtWrhCzUkrlp3fMntnoJCMWiwWXXnYZampqRAmkMTAWcemLL4jjZgwchDNnTotAKiEhAXm7dja6OPikyVNwSWamOK/Fzz0rZiRtTiglmWjkfiIw7k5eznrH7JlisXr59W5vWSNG4oZf3Che8x2zZ+LGX06CJaEbLBYLpv76N+Ic7vzD7zHm0suQt2tnuy/8TtTVtMtSCBs25eK7vfVr2klMJhNumXoTLPH+WTPl/dRtErfbDb2+6RLOUPpQ18X7G754b8Mb7+/Zdfr0afTo0UO9uVHBlkLweDyKpRDcbjdcLhfq6upCWgrhXDpcWIDs99fg7rn3iWDQGZih8+prJmLbls2KJRDOV/IlEEIJBomoeZ15KYR2Ce7aUygfDkLpQ10X72/44r0Nb7y/Z1dWVhby8vLUmxsVbsGdes07yNY3u2LsWHyTl8d1zQKlsD8dO9Zg+Yb2pF5zTk69Jh9ROGBw1wKhfDgIpQ91Xby/4Yv3Nrzx/p5d53twB9WC4xL14t1ERO2NwV0LhPLhIJQ+1HXx/oYv3tvwxvt7dl1//fX49NNP1ZsbFY7BHRHRudCZg7t2mVCFiIiIzq6WBHZERHR+YHBHRETUBVVUVKg3ERHReY7BHRERURc0YcIE9SYiIjrPMbgjIiIiIiIKA5xQhTod3t/wxXsb3nh/z64JEyZgw4YN6s2NaumEKmlpaepdEHWYw4cP8z13FvF6hy9m7oiIiLqglgR2RER0fmBwR0RE1AXV1taqNxER0XmOwR0REVEXdNlll6k3ERHReY7BHRERERERURhgcEdERNQFRUZGqjedUyWHvhEPIiI6NxjcERERdUFbt25VbzqnkgYOFw8iIjo3GNwRERERUaNq6+qQ9+1ubN+xC6dPn1E3n1ULFy5EVlYWFi5cqG7qMux2O+68805kZWVh9erV6maiNmFwR0RE1AVlZWWpN51T7VWWWeeoxr/nTcBfxmnwwTMz1c0KHzwzE38Zp8GP29apmxQ2v/13/GWcRjzk+5UfT/7Y/PbfRZ8ft61r0L74twNQVXoi5H10VRUVlXjr3feRu2U7tu/YhTfffR87875VdzurFi9eLIK7AwcOYPz48cjKylI8cnNz1U9rV+rjqoM0q9WKyZMni/Y777wTdrsdAGA2m7Fs2TLMnTtX8Ryi9sDgjoiIiNqsPcoySw59g2d/nYK4Hn0x8Q+L1M0KJYe+QW1NJYZccZO6SeHHbeuwa91yLPigBE9s8uGv/61CxeljisDLGBWHu5bn4YlNPvG4YtqfFfvJvPY2Rfu8twoQ0613i/bR1dTVuZCdsw42WzmuuGwMbp81A32Tk7Bl29c4cPCQuju8Xi8+37ARxT8dVzd1qOTkZHz22WfIy8tDXl4e3njjDTz33HM4cOCAumu7OHDgAB544AEsXboUeXl5yM3Nxfbt20WAZ7Va8fvf/x5TpkwR59SzZ0/Mnz9fBHhEHYXBHREREZ1zdY5qfPbKnzH2tw/g5gdWqZsVpL4/u24WIqNi1c0KZ4oOIPWiy0UgFmGKRsaIiThd9IO6K6lszN0MW3kFxowagRHDMxETE41rJ/4cOp0Wed/uUXfHjwfzsf/7A+e8dDM1NRVJSUk4c6ZjzmPXrl3IzMzE4MGDgUAmbtq0adi+fTvsdjsSExOxdu1azJgxQzxn/Pjxsj0QdRwGd0RERF1QXl6eetM51dayzAhTNP5v8YaQsl071r6MuB59ccGlN6qbGkgfPgGnDu9XlFDm71qPC8feDACotVfBWVOBmG5JqmfWO1N0AD1Sh6g3C6Hso6spOXES3//wI1JT+mLMqBFie3RUFPqlpuLMGSsqKisVz9n17W4AQFq/VMX2c+Xo0aPqTR3qxIkTQTNzdrsdb7/9NsaMGQOz2axuJmpXDO6IiIiozdqjLDMUVaUnsG/jexgz5R51U1BJA4dj+tPr8K97rsBfxmnw7K9TcO0f/94gMPzqzafFWLl/z5uAOke1or3izE9Y/NsBok+wcX7N7aMr2ZS7BQAw9vIx6iZkDEgDAPwoK80sKDyC0tIypPXvh4QEi6z32VdUVISSkhKMGFEflLanESNGYPfu3aLsUwre1HJzc5GVlYVf/OIXmDt3riKTR9RRGNwRERHRWVVVekIRKDU3cYrc5/96EMOu+nXIQeSP2/yB3e0vbsYTm3y4/71ifPbKnxVj7spPFiGuex8xJg8A1r1wt2wvwKGvPxX7+N1TH+GDp29TZClD2UdX4Xa7cfLUaQy9cDC6Jyaqm9EvNQU6nRY/HDgEn88HW3k5Nn61GRERBkwYN1bdvcMdP34c1157rZi8ZPr06Zg/f74om2wL+aQo0iQugwcPxvz58zF9+nQRvEVHR6N3796KzNzYsWORl5eHjz/+GEuWLOnSM3xS18HgjoiIiNqsJWWZMd16Y95bBWLikebG2ElKDn2DU4f345JrpqubgqpzVGPb+89jxI13KMbcXfqr+5C/a73IrN29Yo8oB5Xai/ZuEaWcgy+frJhAJS3zKvTOyEThNxvEsZrbR1ei1+tx6y1TcfX4ceomAIDZZELWzzJhKy/Hm++swdvvZaOquhpXjx+HmJhodfcOp55QJS8vD2PHtk+QuXbtWrFPeXAmBW5S8FZdXY1p06YFLbs0m82YO3cuCgsLYbVa1c1E7YrBHREREbXZ2SjLLPxmA0oOfYNFNyeJrN/uz17Hfx76ZYvLIMtPFqHWXoWYbr0RYWoYkMT3SkWkOQYAkNh3oLoZANA91Z8ZCmUfXU3PHt2h0WjUm4VLR4/EkMGDcPqMFT6vF2MvH4NBAzPU3c4L77//PtAJlyeh8xODOyIiIuoSrpj2Z8VSA09s8iHz2tvwu6c+wv8t3oAIUzR+3LYOT96YgJJD34iZMXetW66YUGXb+8+LGTTl/eXtGSMmIsIULdawk5eOHt69EeUni5A8yP9hvrl9hCONRoNrr56AeXPvwpw//h5ZP8tUd+kUpAXD1eviSevgrV69GuPHj2/1sgm5ublYtWoV5s6dK7J2ubm5Dfb57rvvIj09HYlBylyJ2hODOyIiImqzlpRlNka+WPj6Vxdg92ev4y/jNIrAqTlnig7AUWVDpbUECASEI268Q2T7/nZdDOJ69BWloBdceiNufvB1vHxHlqJdXmL5uydzUHH6WP0Ywadvwy0L3xVlms3tg84du92OEydO4NSpU7Db7Thz5gwqKyvbPJOmtEj5woULsXTpUsX4vrFjx2Lp0qWYM2eOGK8HgGPu6KzQVFZW+aQvDh06iOHDO66cIhRutxt6vV69WSGUPtR18f6GL97b8Mb727n5fP5f9z6fD16vFz6fDx6PBx6PB263WzxcLhfq6uqQluafEZHobDh8+HBI77mFCxdi/Pjx7Tam7lySFj0/F7Nohnq9qeth5o6IiIiIuox58+Z16SyYVCq6ZMkSdRNRmzFzR50O72/44r0Nb7y/nVtHZ+7kZZMdOakKhSdmks4uXu/wxcwdERERtdnZmC2TiIiaxuCOiIiIiIgoDDC4IyIiIiIiCgMM7oiIiIiIiMIAgzsiIiIiIqIw0Plmy/R4gMCMXo3x+QCNRr2VwgXvb/jivQ1vvL+dW7DZMr1er2K2TI/HA5fLBbfbDY0+Ur0LIiIC0D8lWb2p0+h0wR0RERG1v2DBXVNLITC4IyIKrjMHdyzLJCIiIiIiCgMM7oiIiIiIiMIAgzsiIiIiIqIwwOCOiIiIiIgoDDC4IyIiIiIiCgMM7oiIiIiIiMJAl1kKwVZejnfWfAiHw6HYPuGqsbh42FDFNjp7vtu3Hxs25oqvL75oKCaMG6voo/b+hx+h+NhPAIDEbgn49c2TYTQaRfuGTbn4bu9+AIDJZMItU2+CJT4eAOB0OvHeB2thLS0T/SE7rq28HB+sXYcJV12J/qkpij7UetI9a+777UhRMdZ98hncbjcQ5PtT/X2sfr+o309QvQc2bMqFzVaOX930S0Ufaj3p3qrvRTD83u0agl3rCVeNxUVDLwQaWQrhp5IT2L37O+h0WhgMBlw0dEirlkI4XFiAV19Zhgce+gssFou6WcFms+GZp57AH/54J9LSB6ib243T4cCiZ57CDb+4EVkjRqqb22Tpiy/AarViwQMPwWgyqZtb7XBhAR5/7FHMuefedj1n+TUvKyvDJx+va/dzJzofdOalEHQPPvjQQumL0tJSJCUlKXt0Ek6nE0eOFuE3U2/C+CuvwJhRIzBm1Aj06tlD3ZXOkiNFxdj41RbceMO1uH7iz3HBoAx8tXkbIiIMjd6X9z/8CAAwe+bvMGbUCOQXHsahgkIMGTwICHw4PH68BDOnT8MVl41BZVUV8r7ZjUEDB0Cv18PtduNQQSGunnAVrp/4c/E+SOuXCgTeJwd+PIS0/v1giY9THJta50hRMYqLjyEmOhrduyc2em+PFBXjs8834IbrrsH1E3+OXr16YuNXW5CY2A2W+DgR2F06egRu+uUNQd8vp06fhsfjEe+PMaNGYMTwTJgCAcSRo0VwOp3i/UJtI/9+lL6HGsPv3a5j7bpPYTAYxL3q1asnNuVuRWK3BMQHrq18zbvjJ04g75s9yLzkIlwwMAMmkxEGvR4arV615+bZbGX4Ji8Pl18xFqZmAgan04ktm3ORlTUCloQEdXO7KS4uQmF+Pn5x4yToDQZ1c8hsNhv+/sxTuPiSTPHaRo4ajXFXjW/TfgEgb9dOfJj9PkaOGg0AsCQkYPJNNyMpuX0/QH6/fx+qq6ow8drrUVJyHPmHDuHyy69o9vzV50d0vrPExao3dRosy6RWq6ysxKCMdPFXdkt8PLondkN+wWF1VyCQtSmvqMDwn10itk24aizKKypwpKgYTqcTx4+X4OKLhopswGWj/X+xPJhfIJ5DZ4/T6cTmLdswZPAgGJr55X/4yFF0T+wm3g/9U1MwMCMd33y7BwBQfOwnRJlNGJTh/wu9JT4el44ege/27ofT6VTsizrehk25sNvt+MV116ibGuD3btfhdDpht9uRMSBNbOvdsweizCZUVlUp+kqOl5xAXGwMuid2AwDExXbeDy1NcToceOzRR3D3XX+AzWYT2/ft24uMQYOYnQKw4+vtGDV6jHozEYWRLhPclVdUwmAwiL/g07l38bChGB+kjMti8ZdhqZVXVAIA4mV/7TAZjYgwGFBZWQmH04k6lwuxsg8WRqMRZrNZlBc5nE64XC7FPppypKgYS15eLrIO1DJbv94Js9ncZCmmxGYrb3DvE7slwG63w+l0wlpaBrPZrCjji42NRZ3LBUcguLOWljXYR1Pe//AjLHl5OY4UFaubqAm28nIcyi9UBGNN4fdu12E0GpGcnKT4I9uJU6dR53Khb5/gWaCqympER0erN4fEZrPh7rv+gKlTJmHWjFtRUKAM5qWAa+qUSZg6ZRKWvviCoh0ACgoKMGvGrZg6ZZIiMHM6HPjHoqdxuLB+n3m7duKxRx+BM1DaLT/+9FtvQWJiIl56+VVREup0OJB/8CCGDbtI7CNnbbY4n1kzblXsH4FSS6ldOp/DhQWYd+/d2L9vL+6YPVO8jpy12Vj64gviPPJ27RT7kV57ztrsBtdB/hpy1mZj0dNPYtPGLzF1yiTk7doJm82GBffPV5yb/LzVAWzO2mz8Y9HTimPIzwWBa2W1WpE+IEOxXXK4sP4+TJ0yCTlrs4FGzk96PW+/9R/xnMcefQQ2m63Jc2hMa/fX1DXJ27VTtKmfK72P/rHo6Qavl6ir6zLBneTDjz7B4iUvY/GSl/lLv5M5UlSMM9ZSpPXvp24CApk+l8s/FiuY8oqm2+U2b90u3ger33wnaOZHGv915RWXcoxWK9jKy3HkaJEiW9MYKVvQFJutXL0pqJqaGiz710pxf7/b5x/Dpfb+hx/Bbrfjjv+bwTFaLSRlUU+cONXs9xH4vdvlTBg3FhkD0sR13rxlG279za/E+Ee52traoPcgFE6HA0ueX4yrr5mINdk5WPbqa9i+bStqaqpF+6JnnsIlmZlYk52DNdk5QOADuaSmphrbt23Fsldfw5rsHFx9zUQseX6xCHyaIh1/5qzZWJOdg+UrVuHAgR8UH+IdTidq7HZYEvxZybxdO7Fn92688eY7WJOdgzn33IvFzz0rggLp3KTznTlrNpY8vxhJSclY/MJLGDrsIixfsQpz7rlXHAMALBYLZs6ajR1fbxfbSkqOI8psxsSJ12HFa8sV1yExMRErXlsOAJg0eQoWPPgwxl01Hmuyc4KOsctZm60473nz78czTz2hCGa+/34/ps+4DWuyc7DgwYexauUKRbutrBRRZnPQP5I7HQ68sfp1zLnnXqzJzsEbb76D/IMHcbiwoMnzO3TwIJa9+hreePMdAMC8e+9u8hya05L9qa+JdK+cDgcOFxZgzXvvYvmKVViTnYNFzz6HVf9+TREs79+3F+PGTxDtX6z/rEGgH8wjf31U8VBrrp2oo3Wp4O6MtRRDBg/CvLl3YdaM3+KMtRQbNiknX6BzQyrfGygr0+woNXYHoqKiMG/uXbjrjv8DAHz8388VfSorK/HZ5xtw5RWXhpR1ooY2bMxF/36pHX4/1Y6XnMQtU2/CvLl3YcJVY/HV5m0NMnNSSaF6Qg8KjbW0DNbSMhgiDE1+H7U3fu+eHe9/+BHyCw5j3ty7MG/uXbji8kux6j/v4Gg7Z7hLSo7Dbrdj7JVXAQCMJhOmz7gNUVH+LKA8uJFcd/0N2LZ1q/iAHhUVjekzbhMlk2OvvAp2ux0lJcfFcxrjCPxRKSEwXs9isWDw4CE4ftw/6Q8AFBbkIzExUWTyskaMxKOPPS6Olz4gA1FR0bCVlYos33XX3yCenzViJObeN0983ZT0ARkoLi4WAYK8HHTOPfdi0uQpou+o0WNgtVpDDmL37N6NG35xozjvtPQBSElJQe5XG0W/rBEjxcQ08tclCaU8Vbp2RpMJ/2/Bg81OdCOdk9FkwiWZmc2eQ3NC3Z90r+TvnaFDhwEA9u/fh7T0AVj07HPiviclJaN7j54oK6ufZGjosIvEc4K1E3VVXSa4i4+LxR3/N0P8srfEx2NgRjqOHy9p9V8dqX3Yysux8o23kZyc1ORse7GxsTAYGh+gHx/XdLtk1vRp4jhGoxEXXzQUZ6ylsJX7M0Mulxtfbd4GBD7EUssdKSpGeUUFfnZJfSlTU6QSvKaEUm455IJBuPP2WSLDMChjAOLjYnH4yFHRp/jYTziUX6go56SWS+nbp8nvIzl+73YdUgWFPOPePzUF3RO74fDRIkVfAIiMjGz1H0jKyvyl1sEyQQi079jxNabfeosofVtw/3yR2WtMTU11SB+yTYGfO1Jfm82G4uJiRQmmeoyZujzyjtkzcfrUSSAQLJ6xWkVficViaTIgklgsFqSkpGDfvr2w2WzYtnWrOBd1yeOip59UP71R6iC2NaRgSH5t5IwmE+beNw9ffL5enGOwEtqmJCf3UW9qk6b253A6cbToKBbcP1+c7/Rbb8H+fXtFH3l5rbqNKJx1meDOEh8f9BeQegwPnV3SDIgDM9KbDOwgG68jjd9B4Ae0NFZHPoZHIpX7JXYL/GW2kfdBlNkkPmAYDHrceMO1uGXqTThytKhB1oead/jIUVRWVmHl6rdEaVfxsZ+wYWNuo6V0Fkt8g9JL+Tg7+fg7SWVlJSJkY2l79+ope3Y96f4jEJTcefss9O+X2mDZBAqN/HqGgt+74UH9/SmJiY1GdXXTAVcwCQn+7+nG/siSkJCAUaNGi7I56SEfExdMVFR0SIGMFJCsWrlCBGqXXnaZyPQEG2O2fv1/kZiYKM5l+YpV6NGzFxAIFrsnJoq+rTFq9Bjs2b0b3+/fB7PZjKSk5AYlj1KJYajUQWxrSJnQpKTg4y4RCE5fevlVUZZptVobjHHrLExGI/ql9sOiZ59TvLfWBMpG83bthNVqFe+9N958B0MbCWxb6vG/PaZ4qDXXTtTRukxwt2FTLpb9a6X4C680IYB8RjA6u+RT2wcL7Gzl5Vj2r5WidNYSH4/4uDgxeyICpX/xcXHon5oiJgKQz5649Wv/LxZphsXv9u1XTKDhdDrx3d79SE5OavDB0RIfjxHDM/HZ5xuCZiOocRPGjRUlXdIjpW8fTLhqLGbceguMRmOD78m0/v1wxloq7s2RomIcyi8UGYSUvn1QY3eI2RNt5eXY9vUuMamH0+nE6jffUYylPZhfgBq7Ayl9G/4F97LRI2G321ma3QopffuImS4h+z4amJEOS3w8v3e7MGlmTPm9OlJUjJITJ8Xvy02bt+Lfq99CRSAYT07qjYrKKpyx+svnpO3NSUpKhtlsVpQG7tu3V2TmkpKSUWO3Y//+faJdmoBEUlNTjX2yjEruVxtFUGQ0mWAym/HfTz8R7fIxbU6HAyuWv4J58+8XH+zlpY/qksxgCgvyRebOaDIhY9AgvLH6dVEumbdrZ4OJOpqSPiADdrsd77z1H1ySmdloxk/+OpojlSh+8vE6cV6HCwtQXFwsSmKb01xJppRZ7KzBnJp0r+TvjcOFBVhw//yg96qk5DiOHgk+kzdRuOkywd2EcWMxMCNdZBJWrn4LAzPSOSbjHPp2z144HA5s2JgrsjuLl7wsPvD7J1lwKf5aLE2OIPVVT8U+YdxYJCcn4eXl/8biJS/jUH4hbrzhWvHh7+JhQ3HlFZfiw5yPsXjJy3h5+b9hNpuDBpcI9O+e2A3vrPmQHxLbmc1WDpfLJbI5/VNTcO01E7Duk8+weMnL+DDnY1w6eoRiqYxbpt6EbV/vCvo9bDQa8eubJ8Nut4v3x1ebt+HaayYEnQjCaDTiissvxfc//MgAr4Us8fGYcNWV4l6pv4/4vdt1Bfs++jDnY4y9fIxYxLy8vAJulwsVlf6lEZJ69cLwn12CPd/tQ+6WbY0uZ6MWrJQv/+BBMebOaDJhwQMP4ZOP14n2Pbt3Y8rNU0WQEhUVjfyDB0X7F5+vx9z75okg5Le/m4EDB34Q7erjm8xmRWmevJxQXZIJABMnXger1Sr67vh6O7JGjBQB3aTJU5CYmChKSVetXIHHn3wGFotFZJjvmD2z0ZkVLRYLLr3sMtTU1IgSSGNgLKK8TDBj4CCcOXMa69f/FwhkOfN27Qw6eycCk65ckpkpzmvxc8+GtFA8mijJTEhIwNEjh7HomaeQlJSMRx59TJzj9FtvEWPepL5Nnd+5MGnyFPTp21dc08cfexQzZs4CAuMP5ffxjdWv45prr2vxBC9EXZGmsrLKv5IpgEOHDmL48OHKHkRERNTlyRcu93q98Pl88Hg88Hg8cLvd4uFyuVBXVweNPlK9i07lcGEBst9fg7vn3ieCQWdghs6rr5mIbVs2Y8qvpjY7KUi4s9lsWPL8Ysy9b15IwSARNa9/SuMlzudal8ncEREREUnKyspQo1qCRZp8pKysFDV2e5NjzM4XuV9tbLY8taOpJ5SRP+Rr/hFR2zFzR0REdB4It8wdAjMibtr4pWLbggcfDrpWHBFRe+nMmTsGd0REROeBcAzuiIjOhc4c3LEsk4iIiIiIKAwwuCMiIiIiIgoDDO6IiIiIiIjCAIM7IiIiIiKiMMAJVYiIiM4DLZ1QpVtyf/UuiDpM6fEjfM+dRaXHjyAtLU29mcIAM3dERERERERhgMEdERERERFRGGBwR0REREREFAYUwZ1Wq4XH45FvIiIiIiIioi5AEdzFxsairKxMvomIiIioWafyvxEPIiI6NxTBXe/evfHTTz/hzJkzzOARERERERF1IYqlEGJiomG323HixAlUVlbC6/UqexMREVHYs1gsLZ6WXp6x65nBZZXCSV1dHQ4e+B5utwcpKano1r27ukubhbIUgsPhxpJX9qO4uBopKdGY+8ehMJn06m6dns1Wi2ef/w4VlXUteh2lZU6UlTnF1xkD4gEAJSU1eGXFAZTK2hY9MQrRUQbxtRqXQghfiuCOiIiIwpO0zh0Asc6d1+uF1+uFx+OWrXnnAeBr9oO2WnsFd9VlJ/D2vLEoP1EIAIjvnY5pi3MRndBb3VUo/Hodsh+dJL6+8OrbcP3/Wym+bm6f6ucDwJWzn8HIX/9ZfL3zvb/jqxUPiK/Vx5DIjyXvcyr/G7z34DVwVtlE38b2Ie8rnYfLUY3shZNRvOdLdfcGr6c9VVVWYv1/P0FFeTkAQKPRYPiIkbg482fqrm3SkuAOgAiIduadxutvHlJ3bVHQ1Bo5nxTh8/8dE19f8/O+mHRDqvj61RUHsHd/qfg6LjYC9993MSyWSKCR19KU7I+OYsPGnxTb7pszVAR4Ly//Ad8fqB9aJW8LhsFd+OJsmURERNRmPTOGi0druRzV+GTRdADAnW8fx51vHwcAvD1vLKrLTqh6+53K/waf/mMmLrz6Nty/3ospj+Xg+y9ex6f/mAWEsE/p+SmXjMd9aytx39pKpFwyHl+teAA73/s7IAvsrpz9DO5f78WMl3ah8OuPxDHkcv/9kAgiJdVlJ/DRk7fAGJ2AO98+jvvXe3Hh1bcpzlPiclRj02sLFEGgnHSe0nkYYyyI7ZmKSFOMumubuepcWP/px6goL8eIUaNxy63T0bt3EvJ27kBBfsOAyuv1YvNXm1BS4r/GZ9M1P++Lpf+8HEv/eTnm3TMMp045sPqtfHW3diEFdrfdOhBL/3k5brt1ID7/3zHkfFIk2vfuLxXt8+4ZBmetB8tXHYDD4VbvLiR7vrPiZ5ck4r45Q8VDHrylpkQr+tP5i8EdERERdQrF321E8Z4vkTz0ckQn9EZ0Qm9cfP3tKD9RiB/+94a6OwDgm5yX4KyyYdDlUwAAKRdfhZRLxqPw649wKv+bZvfZM2M47nm/FL9Z9D8YTNEwmKLRf/g1AABr8QFUl53Ad5/+C8YYC1IzJwAAEvoMQo/0THEMyan8b1D49UfomaHMakUn9Mbtq/Jx+6p8kV2TzrfqzDG4HNWir3S+6n0EI732/sOvgcHU/h/ut2/bgoqKCvwsawQuuiQTUdHRuHLCBOh0Wuzb+526Ow4X5OPQjwdQeuaMuumsSrAYYYzUoai4CjZbrbq5TWy2WuzYeQopKdEYdmECAGDYhQlISYnGj4dscDjcmHRDKpb+83KMzOoBAEjqHYWePU04dcqBkhM1qj2GprTMiV49zcgYEC8ekk8+K8an64vRu1cU0vvHKZ5H5x8Gd0RERNQpHNySDQBITBmsboK1+IB6E6rLTuD4/i0wxlgQ3S1J0eassqG6tKTF+4RsuxSANUY6huSbnJfQIz0TWVPmKfoFUxo4hjwwczmqkffhC7jw6ttwwdhfK/obTNH4zaL/iSBU/tqloLM9nTp1EvkHf0Ryn77IHJ4ltpvNUUjum4oyqxVVVZWK53z33R4AQN+U+vLE80lFeR2czoYTEpacqMGpUw4MyohHelr7Bl/ywG72bYNw5+2DcfPk/k2WZFJ4Y3BHREREnZ46wxUKKYBqTNWZ+jFTLkc13l3wczw7UYuqM8dw39pKpI++EdEJvZE89HI4q2wo2r0BAFD200GcLtwt25N/3N73X7yOrJvuRaS58RLJT/8xC89O1OK7T/+FO98+rhjXt3vdyzhduBvDJ92teE4wpw7lofxEIXqkZyKhzyB1c5vt2LYVADBy9Gh1E/ql+cfGFcpKM4uOHEF5WRn6pvZDvMUi6332ldmccNZ6kJoSI8a4tRejUYe4+AgUF1dj3/f+MW77vi9DcXHD92bOJ0WY86ctWLr8B8y5Ywj+MLvhHxjaQh3Y9e5lhsmkx/grk9Vd6TzC4I6IiIjarCXr3BV+vQ7PTtSKx7sLft7iwK29SZmx+9d7EdO9L56fHCvG3F0950UxDu/ZiVp89ORvAEBkDKWMW8ol45Fy8VWqPStd//9W4v71Xlx8/e1YNi1ZjLmTyj/TR/8ypHGLUkayI0oyPW43zpw+jYEXDEZCt0R1M/r0TYFOp0XBoXz4fD5UVJRj+9YtMEQYcNnlV6i7nxWf/+8Y5vxpC+b8aQsWv7gPPXuaMOO3GepuLSbtc86ftuDVFQdgMukx949DkZISjdffPIQ5f9qCjZtLEBcbgbj4CBiNOvFcqTxzzh1DsHT5D1j0zz2tHnOn9u0ea4PAjggM7oiIiKgriOnet8VBTLcgpZhyMd37qjcBsnLMI998DpejWhH43b/ei/F//CecVTaRNZPGyWXddG/I55iaOQHGGAuO79+C6rIT+OF/b8BZXRZS1q6jSzJ1ej0mTfkVLh97pboJAGAymjDs4kxUVJQjJ/t9rPvwQ9TUVOPyK65EVHRor7+9ySdUWfrPy7HgT5c0OwNla5lMeiz40yXiWFddkYSKyjpcMNAS9JjpaXEYlBHfpjF3aondjBg9smeDwG7On7Zgy/aTir50fmFwR0RERGdV+ugbRaB0/3qvGEcmBVXBxsIlpgwWpZMv/qobTuV/oyiZlI99gyyr1tw+W0qaXdMYY8G43y+CwRQtyj+zH52EZydqxbIK33/xOv41M6PRmT7lrMUH4KyyYfXdI/DsRK1YduGrFQ80yGx2dEkmACR27w6NRqPeLPwsawQGDByEUqsVPp8XI0ePRtqAtmfKOht5wBisrNJmq8XadUcRFxuBsZf2Ujcr1NZ6UFrWPhO8pPSNxvRpGUEzdhUVdepNdB5hcEdERESdgjTTpZTNkkoV43unY8jPp6PWUYXKU0WK8W/DJ90NY4xFlClKWTSpvLG5fVaXncC/ZmYoAqjGSh7l68+N+vUCUT458td/VgSrUx7LAQLr2N2+Kh+Rphi8u+DnikCvaPcGOKtsYhZPqVxTelw5+xkgsN6eFPxCNukKgpzf2aTRaHDlVeMx+w93Yvqs2Rh2caa6S6fx6ooDmPOnLWKpgua+DpW0GLmz1oPZtw0S4/tyPinCvAe2o/BwBQCg8HAFDuaXK2bYJOooDO6IiIiozdpjnTuDKRo3LPAvebBsWjKWTfNPDCEt0B1pikFsz1RFOWLPjOG4/v+twvdfvC6yZvLFwZvbZ/GejUgbeT2K93yJ5yfH4tmJWnz/xeuY8liOmOxECgBX3z1CsbB4qE7m5yF5yBiUnyjEsmnJIjN35exngi5i3hQpeJWCUzr7bLZaPPToTvzlb7vgrPVgzh1DxCyYDocbvXua0LOnCYtf3KcYAxjKYuWN6ZZgxMlTduQXlDf6+HaPFQBgNtWP+6Pzj6ayssqn3khEREThxeer/3Xv9Xrh8/ng9Xrh9Xrh8bjh8Xjg8XjgdnsA+NAt2T8jItHZUHr8SLPvOYfDjSWv7AeANgVKnUFLX0v2R0exYeNP6s0NpPSNxq2/GYA+yU1ndEuPH0FaWpp6M4UBBndERETnAQZ31Jm1JLgrLq5GSkp0SEFRZySVc1ZU1rXodZSWOVFW5lRvVgh1fTsGd+GLwR0REdF5gMEddWahBHfUfhjchS+OuSMiIiIiIgoDDO6IiIiIiIjCAIM7IiIiIiKiMMDgjoiIiIiIKAwwuCMiIiIiIgoDnC2TiIjoPNDS2TI5kx4RUdfDzB0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0REREREVEYYHBHREREREQUBhjcERERERERhQEGd0RERERERGGAwR0RERHJ+NQbiIioi2BwR0REdJ7TaDTqTURE1AX9f23LeOaeMMrvAAAAAElFTkSuQmCC" - } - }, "cell_type": "markdown", "metadata": {}, "source": [ - "## What next?\n", + "## What's next?\n", "\n", "1) Navigate to the [Neptune Web App](https://scale.neptune.ai/) and visualize your runs in near real-time. See how each of the metrics evolve with each training step and epoch. \n", "2) [Filter through each metric](https://docs-beta.neptune.ai/charts#filtering-charts) quickly to find the metrics you're most interested in. \n", "3) Use [dashboards](https://docs-beta.neptune.ai/custom_dashboard) to create custom analysis of the layer-wise metrics like gradients and activations. \n", - "4) Create [dynamic charts](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) with advanced regex searching to add new layers to charts as you update and modify the model architecture.\n", - "\n", - "\n", - "### Advanced Regex searching\n", - "Navigate through hundreds of metrics super fast.\n", - "\n", - "TODO - can we add gifs to show some cool UI things?\n", - "\n", - "### Create dynamic charts\n", - "![image.png](attachment:image.png)" + "4) Create [dynamic charts](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) with advanced regex searching to add new layers to charts as you update and modify the model architecture." ] } ], From ffd0b0a933e7d667bfc0a089cbe4d766aacc68c3 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 18 Mar 2025 10:36:32 +0100 Subject: [PATCH 058/125] style: update markdown display to work with colab --- .../pytorch/pytorch_text_model_debugging.ipynb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 6e10b7f..17445f6 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -448,10 +448,10 @@ "source": [ "## What's next?\n", "\n", - "1) Navigate to the [Neptune Web App](https://scale.neptune.ai/) and visualize your runs in near real-time. See how each of the metrics evolve with each training step and epoch. \n", - "2) [Filter through each metric](https://docs-beta.neptune.ai/charts#filtering-charts) quickly to find the metrics you're most interested in. \n", - "3) Use [dashboards](https://docs-beta.neptune.ai/custom_dashboard) to create custom analysis of the layer-wise metrics like gradients and activations. \n", - "4) Create [dynamic charts](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) with advanced regex searching to add new layers to charts as you update and modify the model architecture." + "1. Navigate to the [Neptune Web App](https://scale.neptune.ai/) and visualize your runs in near real-time. See how each of the metrics evolve with each training step and epoch. \n", + "2. [Filter through each metric](https://docs-beta.neptune.ai/charts#filtering-charts) quickly to find the metrics you're most interested in. \n", + "3. Use [dashboards](https://docs-beta.neptune.ai/custom_dashboard) to create custom analysis of the layer-wise metrics like gradients and activations. \n", + "4. Create [dynamic charts](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) with advanced regex searching to add new layers to charts as you update and modify the model architecture." ] } ], From 275f0241df13e424ff5b401b25e6b29fc206db80 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 18 Mar 2025 13:45:18 +0100 Subject: [PATCH 059/125] style: update HookManager section --- .../pytorch_text_model_debugging.ipynb | 109 ++++++++++-------- 1 file changed, 63 insertions(+), 46 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 17445f6..904594f 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -193,14 +193,12 @@ "metadata": {}, "source": [ "### Define PyTorch Model Architecture and Helpers\n", - "We define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use an embedding layer, a LSTM layer and a fully connected layer. This architecture can be adjusted to your needs and increased in size when testing the workflow. To increase the size of the LSTM layers, change the `num_layers` parameter in the parameters dictionary or to increase the number of fully connected layers, update the mode architecture itself.\n", - "\n", - "This section also creates a `HookManager` class that allows us to capture the **activations** and **gradients** from each layer. You do not need to update this class as it will dynamically update according to the architecture of the model. This class is also resuable and can be copied into your own code." + "We define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use an embedding layer, a LSTM layer and a fully connected layer. This architecture can be adjusted to your needs and increased in size when testing the workflow. To increase the size of the LSTM layers, change the `num_layers` parameter in the parameters dictionary or to increase the number of fully connected layers, update the mode architecture itself." ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -218,9 +216,68 @@ " out = self.fc1(lstm_out) # Use the last output from the LSTM\n", " return out\n", "\n", + "# Function to evaluate the model after each epoch/step\n", + "def evaluate(model, val_dataloader, criterion, device, vocab_size):\n", + " model.eval() # Set the model to evaluation mode\n", + " total_loss = 0\n", + " with torch.no_grad(): # Disable gradient calculation for validation\n", + " for batch in val_dataloader:\n", + " input_ids = batch['input_ids'].to(device)\n", + " labels = batch['labels'].to(device)\n", + "\n", + " # Forward pass for validation\n", + " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", + " \n", + " # Calculate the loss\n", + " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", + " total_loss += loss.item()\n", + "\n", + " avg_val_loss = total_loss / len(val_dataloader)\n", + " return avg_val_loss" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create Tracking Class (HooKManager)\n", + "\n", + "This section creates a `HookManager` class that allows us to capture the **activations** and **gradients** from each layer. This class accepts a PyTorch model object as an input. This class will automatically capture the gradients and activations in each layer of the model. Below, you can see a pseudo implementation:\n", + "\n", + "```python\n", + "# Initialize model\n", + "model = your_ModelClass()\n", + "# Register hooks\n", + "hm = HookManager(model)\n", + "hm.register_hooks()\n", + "\n", + "# Training loop\n", + "for epoch in range(3):\n", + " \n", + " # Forward pass, e.g. model.train()\n", + " # Backward pass, e.g. loss.backward()\n", + " \n", + " activations = hm.get_activations()\n", + " gradients = hm.get_gradients()\n", + "\n", + " # Log values (mean, std, etc.) to Neptune\n", + "```\n", + "\n", + "_Important_: The `HookManager` class can be used in your own training script as it only accepts a model object as input." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# A class to manage hooks for activations and gradients\n", "class HookManager:\n", " def __init__(self, model):\n", + " if not isinstance(model, nn.Module):\n", + " raise TypeError(\"The model must be a PyTorch model\")\n", + " \n", " self.model = model\n", " self.hooks = []\n", " self.activations = {}\n", @@ -260,26 +317,7 @@ "\n", " # Function to get gradients\n", " def get_gradients(self):\n", - " return self.gradients\n", - "\n", - "# Function to evaluate the model after each epoch/step\n", - "def evaluate(model, val_dataloader, criterion, device, vocab_size):\n", - " model.eval() # Set the model to evaluation mode\n", - " total_loss = 0\n", - " with torch.no_grad(): # Disable gradient calculation for validation\n", - " for batch in val_dataloader:\n", - " input_ids = batch['input_ids'].to(device)\n", - " labels = batch['labels'].to(device)\n", - "\n", - " # Forward pass for validation\n", - " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", - " \n", - " # Calculate the loss\n", - " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", - " total_loss += loss.item()\n", - "\n", - " avg_val_loss = total_loss / len(val_dataloader)\n", - " return avg_val_loss" + " return self.gradients" ] }, { @@ -326,28 +364,7 @@ "metadata": {}, "source": [ "### Execute model training loop\n", - "In this loop, we configure the `HookManager` and register the hooks. In your training loop, you will need to use the `get_` methods to retrieve the stored values for the activations and gradients after the forward and backward passes are complete. Below, you can see a pseudo implementation:\n", - "\n", - "```python\n", - "# Initialize model\n", - "model = your_ModelClass()\n", - "# Register hooks\n", - "hm = HookManager(model)\n", - "hm.register_hooks()\n", - "\n", - "# Training loop\n", - "for epoch in range(3):\n", - " \n", - " # Forward pass, e.g. model.train()\n", - " # Backward pass, e.g. loss.backward()\n", - " \n", - " activations = hm.get_activations()\n", - " gradients = hm.get_gradients()\n", - "\n", - " # Log values (mean, std, etc.) to Neptune\n", - "```\n", - "\n", - "Important: The `HookManager` class can be used in your own training script as it only accepts a model object as input." + "In this loop, we configure the `HookManager` and register the hooks. In your training loop, you will need to use the `get_` methods to retrieve the stored values for the activations and gradients after the forward and backward passes are complete." ] }, { From 295c9b2a53c0f2a27a6288818245dc582a0049b1 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 19 Mar 2025 09:57:49 +0100 Subject: [PATCH 060/125] style: update sections headings --- .../pytorch/pytorch_text_model_debugging.ipynb | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 904594f..6e2755c 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -10,7 +10,7 @@ " \"Open \n", "\n", "\n", - "## Logging and Visualizing debugging metrics with Neptune\n", + "## Logging and visualizing debugging metrics with Neptune\n", "**Global aggregated metrics**, such as overall loss and accuracy, provide a high-level view of a model's performance and help track progress toward the target task. These metrics are essential for assessing the model’s overall success and ensuring training is on the right path. However, **layer-wise metrics** offer deeper insights into how individual layers contribute to learning. By monitoring metrics like gradients and activations at each layer, we can identify specific issues (e.g., vanishing/exploding gradients) and optimize training for individual layers. This is crucial for deep networks, where different layers learn distinct types of features.\n", "\n", "The main drawback of tracking layer-wise metrics is the **data overload** they can generate, especially with large/foundation models. Logging metrics for every layer can create significant data volume which researchers needs to log and monitor. However, with efficient tracking strategies and tools like Neptune, this challenge can be managed, allowing for detailed insights without overwhelming the training process.\n", @@ -58,7 +58,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Install Dependencies and Import Libraries" + "### Install dependencies and import libraries" ] }, { @@ -93,7 +93,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Initialize Parameters" + "### Initialize parameters" ] }, { @@ -156,7 +156,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Create DataLoader Objects\n", + "## Create `DataLoader` objects\n", "* To execute the models with PyTorch, we convert the training and validation datasets to tensors and then setup DataLoader for easier batching in our training loop.\n", "* The model architecture requires the vocabulary size as an input and this we calculate the max token from the dataset." ] @@ -192,7 +192,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Define PyTorch Model Architecture and Helpers\n", + "### Define PyTorch model architecture and helpers\n", "We define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use an embedding layer, a LSTM layer and a fully connected layer. This architecture can be adjusted to your needs and increased in size when testing the workflow. To increase the size of the LSTM layers, change the `num_layers` parameter in the parameters dictionary or to increase the number of fully connected layers, update the mode architecture itself." ] }, @@ -240,7 +240,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Create Tracking Class (HooKManager)\n", + "### Create tracking class\n", "\n", "This section creates a `HookManager` class that allows us to capture the **activations** and **gradients** from each layer. This class accepts a PyTorch model object as an input. This class will automatically capture the gradients and activations in each layer of the model. Below, you can see a pseudo implementation:\n", "\n", @@ -324,8 +324,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Setup Model Training\n", - "### Initialize Neptune Run object and Log Hyperparameters" + "## Setup model training\n", + "### Initialize Neptune run object and log hyperparameters" ] }, { From 585184245a4369201f3dfc44ae05429a188ca7de Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 19 Mar 2025 10:12:22 +0100 Subject: [PATCH 061/125] style: condense intro --- .../pytorch_text_model_debugging.ipynb | 30 +++++++------------ 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 6e2755c..443e417 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -11,36 +11,26 @@ "\n", "\n", "## Logging and visualizing debugging metrics with Neptune\n", - "**Global aggregated metrics**, such as overall loss and accuracy, provide a high-level view of a model's performance and help track progress toward the target task. These metrics are essential for assessing the model’s overall success and ensuring training is on the right path. However, **layer-wise metrics** offer deeper insights into how individual layers contribute to learning. By monitoring metrics like gradients and activations at each layer, we can identify specific issues (e.g., vanishing/exploding gradients) and optimize training for individual layers. This is crucial for deep networks, where different layers learn distinct types of features.\n", - "\n", - "The main drawback of tracking layer-wise metrics is the **data overload** they can generate, especially with large/foundation models. Logging metrics for every layer can create significant data volume which researchers needs to log and monitor. However, with efficient tracking strategies and tools like Neptune, this challenge can be managed, allowing for detailed insights without overwhelming the training process.\n", + "Global metrics (e.g., loss, accuracy) provide a high-level performance snapshot and ensure training is on course. However, for large or foundation models, monitoring layer-wise metrics—such as gradients and activations—delivers critical insights into how each layer learns. This level of detail helps identify issues (e.g., vanishing/exploding gradients) and fine-tune individual layers for better overall performance.\n", + "The **main challenge** is the sheer volume of data generated by layer-wise logging. Fortunately, Neptune is built for hyperscale tracking, enabling you to efficiently capture, organize, and analyze metrics from every layer—no matter how large your model—without disrupting the training process.\n", "\n", "### Introduction\n", - "See how Neptune Scale can be used for pre-training models like foundation models by tracking hundreds of metrics. This example is designed to be used as a code recipe for you to re-use sections with your own code to edit or adapt to your own model training needs. \n", + "This example is designed to be used as a code recipe for you to re-use sections with your own code to edit or adapt to your own model training needs. \n", "\n", "This guide will show you how to:\n", "- Initialize the **Neptune Run** object and log configuration parameters\n", - "- Create a **reuseable class** to hook layer-wise metrics\n", + "- Create a **reuseable class** to hook layer-wise metrics (`HookManager`)\n", "- Log **aggregated metrics** such as loss and accuracy\n", - "- Log **debugging metrics** per layer during model training such as;\n", - " * Activations\n", - " * Gradients\n", - " * Parameters (Weights and Biases)\n", - "\n", - "There are also several other metrics that we can capture to understanding our model training in more depth, but we will cover those in another tutorial.\n", - "\n", - "### Key metrics to capture from each layer:\n", + "- Log **layer-wise metrics** to debug model traing such as;\n", "\n", - "| **Metric** | **Demonstrated in Notebook** | **What it Shows** | **How to Capture** |\n", + "| **Metric** | **Demonstrated** | **What it Shows** | **How to Capture** |\n", "|-----------------------------------|--------------------------------------|--------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|\n", - "| **Activations** | Yes | Provides insight into how the model is processing data. Dead or exploding activations can indicate issues with training stability. | Use hooks to capture activations after each layer. |\n", - "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | Use hooks to capture gradients during backpropagation. |\n", - "| **Weights and Biases** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", + "| **Activations** | Yes | Dead or exploding activations can indicate issues with training stability. | `HookManager` |\n", + "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | `HookManager` |\n", + "| **Parameters** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", "| **Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", "| **Learning Rate** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", - "| **Output Norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |\n", - "| **Activation Distributions** | No | Helps diagnose saturation issues, especially with ReLU activations that may lead to dead neurons. | Visualize or compute statistical summaries using tools like matplotlib or seaborn. |\n", - "| **Feature Maps (for Convolutional Layers)** | No | Offers insights into how convolutional layers detect specific patterns in the data. | Visualize feature maps after convolutional layers using libraries like matplotlib. |\n" + "| **Output Norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |\n" ] }, { From ea30d40c4ba400049fb5601f58a31b03b0c06f91 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 19 Mar 2025 18:27:31 +0100 Subject: [PATCH 062/125] style: add links for active run and update ending steps --- .../pytorch_text_model_debugging.ipynb | 54 +++++++++++++------ 1 file changed, 38 insertions(+), 16 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 443e417..3b6fcff 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -63,7 +63,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -76,7 +76,8 @@ "from collections import Counter\n", "from datasets import load_dataset\n", "\n", - "from neptune_scale import Run" + "from neptune_scale import Run\n", + "import os" ] }, { @@ -88,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -118,7 +119,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -153,7 +154,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -188,7 +189,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -258,7 +259,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -320,17 +321,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "View active run: https://scale.neptune.ai/leo/pytorch-tutorial/-/run/?customId=pytorch-text-233cfd51-ae85-4e8f-af8d-b5ce553b7aec\n" + ] + } + ], "source": [ "# Define Neptune parameters\n", "from neptune_scale import Run\n", "from uuid import uuid4\n", "\n", + "custom_run_id = f\"pytorch-text-{uuid4()}\" # Create your own custom run_id\n", + "\n", "run = Run(\n", - " project = \"examples/pytorch-tutorial\",\n", - " run_id=f\"pytorch-text-{uuid4()}\"\n", + " project = \"leo/pytorch-tutorial\",\n", + " run_id=custom_run_id\n", " )\n", "\n", "run.log_configs(\n", @@ -346,7 +357,9 @@ ")\n", "\n", "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", - "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])" + "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])\n", + "\n", + "print(f\"View active run: https://scale.neptune.ai/{os.getenv('NEPTUNE_PROJECT')}/-/run/?customId={custom_run_id}\")" ] }, { @@ -363,6 +376,10 @@ "metadata": {}, "outputs": [], "source": [ + "# Create links to access active training run. Note - this is not required for the example to run, but for ease of illustration of the tutorial.\n", + "print(f\"View training charts: https://scale.neptune.ai/{os.getenv('NEPTUNE_PROJECT')}/-/run/?customId={custom_run_id}&detailsTab=charts\")\n", + "\n", + "# Training setup\n", "debug_metrics = {}\n", "\n", "# Initialize model and optimizer\n", @@ -454,11 +471,16 @@ "metadata": {}, "source": [ "## What's next?\n", + "While the model is training, you can start using our _super-snappy_ web UI to browse your metrics and create custom analyses and visualizations.\n", + "\n", + "### What to do\n", + "1. Navigate to the charts tab of the active run to visualize the large number of metrics being logged in near real time.\n", + "2. Filter the metrics using our [advanced regex searching capabilities](https://docs-beta.neptune.ai/charts#filtering-charts). For example, use the following query in the search bar; `.*gradient+.*fc\\d`. This query will filter all metrics for the gradients of the fully connected layers. The more FC layers, the more charts will appear. \n", + "3. Export this filter to a [dashboard](https://docs-beta.neptune.ai/custom_dashboard), using the _**Duplicate to**_ button. The saved dashboard will now only display these metrics during training.\n", + "4. Using our [dynamic metric selection](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) feature, you can update the chart widget to display all fully connected layers gradients in one chart. Use the same query as above in the chart widget.\n", + "5. Create a [custom report](https://docs-beta.neptune.ai/reports) to outline the model training, global metrics, debugging metrics and more.\n", "\n", - "1. Navigate to the [Neptune Web App](https://scale.neptune.ai/) and visualize your runs in near real-time. See how each of the metrics evolve with each training step and epoch. \n", - "2. [Filter through each metric](https://docs-beta.neptune.ai/charts#filtering-charts) quickly to find the metrics you're most interested in. \n", - "3. Use [dashboards](https://docs-beta.neptune.ai/custom_dashboard) to create custom analysis of the layer-wise metrics like gradients and activations. \n", - "4. Create [dynamic charts](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) with advanced regex searching to add new layers to charts as you update and modify the model architecture." + "You can see a generic example of the end result [here](https://scale.neptune.ai/o/examples/org/LLM-Pretraining/reports/9e6a2cad-77e7-42df-9d64-28f07d37e908).\n" ] } ], From 471beb7224b49e3a27cb5422496437cc1467154f Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 19 Mar 2025 18:42:43 +0100 Subject: [PATCH 063/125] refactor: log global and debugging metrics during training at each step and only validation loss logged every 50 steps --- .../pytorch_text_model_debugging.ipynb | 76 ++++++++++--------- 1 file changed, 42 insertions(+), 34 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 3b6fcff..9289af0 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -63,7 +63,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -89,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -119,7 +119,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -154,7 +154,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -189,7 +189,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -259,7 +259,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -321,14 +321,14 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "View active run: https://scale.neptune.ai/leo/pytorch-tutorial/-/run/?customId=pytorch-text-233cfd51-ae85-4e8f-af8d-b5ce553b7aec\n" + "View active run: https://scale.neptune.ai/leo/pytorch-tutorial/-/run/?customId=pytorch-text-d328336d-8fe1-4271-bf0d-53e4e5ea36a2\n" ] } ], @@ -421,33 +421,31 @@ " total_loss += loss.item()\n", " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", "\n", - " if step_counter % 5 == 0: # Do not need to log validation at every step, although we can\n", + " # Track activations\n", + " activations = hook_manager.get_activations()\n", + " for layer, activation in activations.items():\n", + " if layer is not None:\n", + " debug_metrics[f\"debug/activation/{layer}_mean\"] = activation[0].mean().item()\n", + " debug_metrics[f\"debug/activation/{layer}_std\"] = activation[0].std().item()\n", + "\n", + " # Track gradients with hooks\n", + " gradients = hook_manager.get_gradients()\n", + " for layer, gradient in gradients.items():\n", + " debug_metrics[f\"debug/gradient/{layer}_mean\"] = gradient.mean().item()\n", + " print(f\"Gradients for {layer}: {gradient.mean().item()}\") # You can replace to use mean(), sum(), max() or min()\n", + " # simplified_gradient = gradient.mean(dim=(0, 1))\n", + " # print(f\"Summed Gradient for {layer}: {simplified_gradient}\")\n", + "\n", + " # Track gradients per layer at each epoch\n", + " for layer, param in model.named_parameters():\n", + " if param is not None:\n", + " debug_metrics[f\"debug/parameters/{layer}_std\"] = param.grad.std().item()\n", + " debug_metrics[f\"debug/parameters/{layer}_mean\"] = param.grad.mean().item()\n", + " debug_metrics[f\"debug/parameters/{layer}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", + "\n", + " if step_counter % 50 == 0: # Log validation loss at every 30 steps\n", " val_loss = evaluate(model, val_dataloader, criterion, device, vocab_size)\n", - " print(f\"Step {step_counter}, Val_loss: {val_loss}\")\n", - "\n", - " # Track activations\n", - " activations = hook_manager.get_activations()\n", - " for layer, activation in activations.items():\n", - " if layer is not None:\n", - " debug_metrics[f\"debug/activation/{layer}_mean\"] = activation[0].mean().item()\n", - " debug_metrics[f\"debug/activation/{layer}_std\"] = activation[0].std().item()\n", - "\n", - " # Track gradients with hooks\n", - " gradients = hook_manager.get_gradients()\n", - " for layer, gradient in gradients.items():\n", - " debug_metrics[f\"debug/gradient/{layer}_mean\"] = gradient.mean().item()\n", - " print(f\"Gradients for {layer}: {gradient.mean().item()}\") # You can replace to use mean(), sum(), max() or min()\n", - " # simplified_gradient = gradient.mean(dim=(0, 1))\n", - " # print(f\"Summed Gradient for {layer}: {simplified_gradient}\")\n", - "\n", - " # Track gradients per layer at each epoch\n", - " for layer, param in model.named_parameters():\n", - " if param is not None:\n", - " debug_metrics[f\"debug/parameters/{layer}_std\"] = param.grad.std().item()\n", - " debug_metrics[f\"debug/parameters/{layer}_mean\"] = param.grad.mean().item()\n", - " debug_metrics[f\"debug/parameters/{layer}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", - "\n", - " # Output loss for this epoch\n", + "\n", " run.log_metrics(\n", " data = {\n", " \"metrics/train/loss\": loss.item(),\n", @@ -457,6 +455,16 @@ " },\n", " step = step_counter\n", " )\n", + " else: # Log training loss and debugging metrics for each step\n", + " run.log_metrics(\n", + " data = {\n", + " \"metrics/train/loss\": loss.item(),\n", + " \"epoch/value\": epoch,\n", + " **debug_metrics\n", + " },\n", + " step = step_counter\n", + " )\n", + " \n", " print(f\"Epoch {epoch + 1}, Loss: {total_loss / len(train_dataloader)}\")\n", "\n", "# test_loss = evaluate_model(model, test_input, test_target, params[\"vocab_size\"])\n", From b2732cf9aa3dff0dabb7b53d581864ba5be7153b Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 09:42:37 +0100 Subject: [PATCH 064/125] style: update urls to use experiment_name --- .../pytorch_text_model_debugging.ipynb | 38 ++++++++++++------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 9289af0..106c3db 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -63,9 +63,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], "source": [ "# Import libraries\n", "import torch\n", @@ -89,7 +98,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -119,7 +128,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -154,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -189,7 +198,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -259,7 +268,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 8, "metadata": {}, "outputs": [], "source": [ @@ -321,14 +330,14 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 22, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "View active run: https://scale.neptune.ai/leo/pytorch-tutorial/-/run/?customId=pytorch-text-d328336d-8fe1-4271-bf0d-53e4e5ea36a2\n" + "View current experiment: https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?viewId=standard-view&runIdentificationKey=pytorch-text&type=experiment\n" ] } ], @@ -337,11 +346,14 @@ "from neptune_scale import Run\n", "from uuid import uuid4\n", "\n", + "project = \"leo/pytorch-tutorial\"\n", "custom_run_id = f\"pytorch-text-{uuid4()}\" # Create your own custom run_id\n", + "experiment_name = \"pytorch-text\" # Create a run that is the head of an experiment. This will also be used for forking.\n", "\n", "run = Run(\n", - " project = \"leo/pytorch-tutorial\",\n", - " run_id=custom_run_id\n", + " project = project,\n", + " run_id=custom_run_id,\n", + " experiment_name = experiment_name,\n", " )\n", "\n", "run.log_configs(\n", @@ -359,7 +371,7 @@ "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])\n", "\n", - "print(f\"View active run: https://scale.neptune.ai/{os.getenv('NEPTUNE_PROJECT')}/-/run/?customId={custom_run_id}\")" + "print(f\"View current experiment: https://scale.neptune.ai/{project}/runs/details?viewId=standard-view&runIdentificationKey={experiment_name}&type=experiment\")" ] }, { @@ -377,7 +389,7 @@ "outputs": [], "source": [ "# Create links to access active training run. Note - this is not required for the example to run, but for ease of illustration of the tutorial.\n", - "print(f\"View training charts: https://scale.neptune.ai/{os.getenv('NEPTUNE_PROJECT')}/-/run/?customId={custom_run_id}&detailsTab=charts\")\n", + "print(f\"View training charts: https://scale.neptune.ai/{project}/-/run/?customId={custom_run_id}&detailsTab=charts\")\n", "\n", "# Training setup\n", "debug_metrics = {}\n", From c34dca31bfee3d175de3dc0633b49066d23e753a Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 10:02:30 +0100 Subject: [PATCH 065/125] chore: removed unused notebook --- .../pytorch_image_classification.ipynb | 585 ------------------ 1 file changed, 585 deletions(-) delete mode 100644 integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb diff --git a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb b/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb deleted file mode 100644 index 01ca6d4..0000000 --- a/integrations-and-supported-tools/pytorch/pytorch_image_classification.ipynb +++ /dev/null @@ -1,585 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Neptune + Pytorch\n", - "## Logging and visualizing debugging metrics in PyTorch\n", - "\n", - "Introduction\n", - "\n", - "See how Neptune Scale can be used for foundation model traning when you are required to track a large number of metrics across your transformers architecture. \n", - "\n", - "This guide will show you how to:\n", - "- Initialize the Neptune Run object and log configuration parameters\n", - "- Log standard loss and accuracy metrics to Neptune\n", - "- Log debugging metrics during model training such as;\n", - " * Activations per layer\n", - " * Gradients (mean and std) per layer" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Before you start\n", - "\n", - " 1. Create a Neptune Scale account. [Register →](https://neptune.ai/early-access)\n", - " 2. Create a Neptune project that you will use for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune Scale docs.\n", - " 3. Install and configure Neptune Scale for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune Scale docs." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Install Neptune and Dependencies" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Install dependencies\n", - "! pip install -q -U neptune_scale torch torchvision" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# TODO - update config to include model architecture\n", - "# TODO - Add more hyperparameters\n", - "# TODO - look at CNN layers\n", - "# TODO - output and log the model architecture\n", - "# TODO - check loss and accuracy calculations\n", - "# TODO - clean up the evaluation function to exclude tracking gradients\n", - "# TODO - do not use group tags\n", - "# TODO - track the input features\n", - "# TODO - clean the training loop of commented out code that is unused\n", - "# TODO - add batchnormalization and drop out layers to improve the model" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import torch\n", - "import torch.nn as nn\n", - "import torch.optim as optim\n", - "import torch.nn.functional as F\n", - "from torch.utils.data import DataLoader\n", - "from torchvision import datasets, transforms\n", - "import numpy as np" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set Hyperparameters for Training" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "params = {\n", - " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 256,\n", - " \"learning_rate\": 0.01,\n", - " \"epochs\": 5, \n", - " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", - " \"input_features\": 256,\n", - " \"n_classes\": 10,\n", - " \"input_size\": 28 * 28\n", - "}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Download and transform the data for training\n", - "In this example, we will be using the MINST dataset as part of the PyTorch library for illustration. We create a train, validation and test dataset and apply a transformation. " - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Downloading http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz\n", - "Failed to download (trying next):\n", - "HTTP Error 404: Not Found\n", - "\n", - "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz\n", - "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz to ./data/MNIST/raw/train-images-idx3-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "100%|██████████| 9912422/9912422 [00:11<00:00, 883546.49it/s] \n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Extracting ./data/MNIST/raw/train-images-idx3-ubyte.gz to ./data/MNIST/raw\n", - "\n", - "Downloading http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz\n", - "Failed to download (trying next):\n", - "HTTP Error 404: Not Found\n", - "\n", - "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz\n", - "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz to ./data/MNIST/raw/train-labels-idx1-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "100%|██████████| 28881/28881 [00:00<00:00, 29159.66it/s]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Extracting ./data/MNIST/raw/train-labels-idx1-ubyte.gz to ./data/MNIST/raw\n", - "\n", - "Downloading http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz\n", - "Failed to download (trying next):\n", - "HTTP Error 404: Not Found\n", - "\n", - "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz\n", - "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz to ./data/MNIST/raw/t10k-images-idx3-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "100%|██████████| 1648877/1648877 [00:01<00:00, 1201389.96it/s]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Extracting ./data/MNIST/raw/t10k-images-idx3-ubyte.gz to ./data/MNIST/raw\n", - "\n", - "Downloading http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz\n", - "Failed to download (trying next):\n", - "HTTP Error 404: Not Found\n", - "\n", - "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz\n", - "Downloading https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz to ./data/MNIST/raw/t10k-labels-idx1-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "100%|██████████| 4542/4542 [00:00<00:00, 2459084.65it/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Extracting ./data/MNIST/raw/t10k-labels-idx1-ubyte.gz to ./data/MNIST/raw\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], - "source": [ - "\n", - "# Transform to normalize the data and convert it to tensor\n", - "transform = transforms.Compose([\n", - " transforms.ToTensor(),\n", - " transforms.Normalize((0.5,), (0.5,)) # Normalizing the image to range [-1, 1]\n", - "])\n", - "\n", - "# Download and load the MNIST dataset\n", - "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n", - "val_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform) # Use test set as validation\n", - "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n", - "\n", - "# DataLoader for training, validation, and testing\n", - "train_loader = DataLoader(train_dataset, batch_size=params[\"batch_size\"], shuffle=True)\n", - "val_loader = DataLoader(val_dataset, batch_size=params[\"batch_size\"], shuffle=False)\n", - "test_loader = DataLoader(test_dataset, batch_size=params[\"batch_size\"], shuffle=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Adam\n" - ] - } - ], - "source": [ - "\n", - "# Simple Convolutional Neural Network model for MNIST\n", - "class SimpleCNN(nn.Module):\n", - " def __init__(self):\n", - " super(SimpleCNN, self).__init__()\n", - " self.conv1 = nn.Conv2d(1, 32, kernel_size=3, padding=1) # Input channels = 1 (grayscale images)\n", - " self.conv2 = nn.Conv2d(32, 64, kernel_size=3, padding=1)\n", - " self.fc1 = nn.Linear(64 * 7 * 7, 128) # Flattened size of image after convolution layers\n", - " self.fc2 = nn.Linear(128, 10) # 10 output classes for digits 0-9\n", - " \n", - " def forward(self, x):\n", - " x = F.relu(self.conv1(x))\n", - " x = F.max_pool2d(x, 2) # Pooling layer to downsample\n", - " x = F.relu(self.conv2(x))\n", - " x = F.max_pool2d(x, 2)\n", - " x = x.view(-1, 64 * 7 * 7) # Flatten the tensor for the fully connected layer\n", - " x = F.relu(self.fc1(x))\n", - " x = self.fc2(x)\n", - " return x\n", - " \n", - "class SimpleNN(nn.Module):\n", - " def __init__(self):\n", - " super(SimpleNN, self).__init__()\n", - " # Define layers (increase number of layers)\n", - " self.fc1 = nn.Linear(params[\"input_size\"], params[\"input_features\"]) \n", - " self.fc2 = nn.Linear(params[\"input_features\"], 512)\n", - " self.fc3 = nn.Linear(512, 256)\n", - " self.fc4 = nn.Linear(256, 128)\n", - " self.fc5 = nn.Linear(128, params[\"n_classes\"]) # Output layer (10 classes for MNIST)\n", - "\n", - " # Registering hooks to track activations\n", - " self.hooks = []\n", - " self.hooks.append(self.fc1.register_forward_hook(self.save_activation(\"fc1\")))\n", - " self.hooks.append(self.fc2.register_forward_hook(self.save_activation(\"fc2\")))\n", - " self.hooks.append(self.fc3.register_forward_hook(self.save_activation(\"fc3\")))\n", - " self.hooks.append(self.fc4.register_forward_hook(self.save_activation(\"fc4\")))\n", - " self.hooks.append(self.fc5.register_forward_hook(self.save_activation(\"fc5\")))\n", - "\n", - " def forward(self, x):\n", - " x = x.view(-1, params[\"input_size\"]) # Flatten the input image (28x28)\n", - " x = torch.relu(self.fc1(x)) # Apply ReLU activation\n", - " x = torch.relu(self.fc2(x)) # Apply ReLU activation\n", - " x = torch.relu(self.fc3(x)) # Apply ReLU activation\n", - " x = torch.relu(self.fc4(x)) # Apply ReLU activation\n", - " x = self.fc5(x) # Output layer\n", - " return x\n", - " \n", - " # Function to save activations\n", - " def save_activation(self, name):\n", - " def hook(model, input, output):\n", - " self.activations[name] = output\n", - " return hook\n", - " \n", - " def get_activations(self):\n", - " return self.activations\n", - "\n", - " def clear_activations(self):\n", - " self.activations = {}\n", - "\n", - "# Instantiate the model, loss function, and optimizer\n", - "#model = SimpleCNN()\n", - "model = SimpleNN()\n", - "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", - "model.to(device)\n", - "criterion = nn.CrossEntropyLoss() # Loss function\n", - "\n", - "# Select an optimizer\n", - "if params[\"optimizer\"] == \"Adam\":\n", - " optimizer = optim.Adam(model.parameters(), lr=params[\"learning_rate\"])\n", - " print(params[\"optimizer\"])\n", - "elif params[\"optimizer\"] == \"SGD\":\n", - " optimizer = optim.SGD(model.parameters(), lr=params[\"learning_rate\"])\n", - " print(params[\"optimizer\"])\n", - "else:\n", - " print(\"No optimizer selected\")\n" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "# Function to evaluate the model (validation/test) with gradients tracked\n", - "def evaluate(model, data_loader, track_gradients=False):\n", - " model.train() if track_gradients else model.eval() # Ensure model is in training mode if tracking gradients\n", - " correct_preds = 0\n", - " total_preds = 0\n", - " epoch_loss = 0\n", - " with torch.no_grad(): # Disable gradient tracking during evaluation\n", - " for data, target in data_loader:\n", - " \n", - " data, target = data.to(device), target.to(device)\n", - " \n", - " # Forward pass (with gradient tracking if specified)\n", - " output = model(data)\n", - " loss = criterion(output, target) # Correct loss computation\n", - " epoch_loss += loss.item()\n", - " \n", - " if track_gradients:\n", - " # Track gradients (we will backpropagate but do not update model parameters)\n", - " loss.backward()\n", - " \n", - " # Calculate accuracy\n", - " _, predicted = torch.max(output.data, 1)\n", - " total_preds += target.size(0)\n", - " correct_preds += (predicted == target).sum().item()\n", - " \n", - " accuracy = correct_preds / total_preds\n", - " return epoch_loss / len(data_loader), accuracy\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Neptune - Initialize Training Run and Log Configs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Define Neptune parameters\n", - "from neptune_scale import Run\n", - "from uuid import uuid4\n", - "\n", - "run = Run(\n", - " project = \"leo/pytorch-tutorial\",\n", - " run_id=f\"pytorch-{uuid4()}\"\n", - " )\n", - "\n", - "run.log_configs(\n", - " {\n", - " \"config/learning_rate\": params[\"learning_rate\"],\n", - " \"config/optimizer\": params[\"optimizer\"],\n", - " \"config/batch_size\": params[\"batch_size\"],\n", - " \"config/epochs\": params[\"epochs\"],\n", - " \"config/input_size\": params[\"input_size\"]\n", - " }\n", - ")\n", - "\n", - "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", - "run.add_tags(tags=[\"Torch-MINST\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Neptune - Log Metrics while Training" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch [1/5] Training complete. Loss: 0.5882, Accuracy: 0.80%\n", - "{'layers/layer_fc1/activation_mean': -28.098386764526367, 'layers/layer_fc4/activation_mean': -28.098386764526367, 'layers/layer_fc5/activation_mean': -28.098386764526367, 'layers/layer_fc2/activation_mean': -4.294741630554199, 'layers/layer_fc3/activation_mean': -4.590151309967041, 'layers/layer_fc1/activation_std': 10.138233184814453, 'layers/layer_fc4/activation_std': 10.138233184814453, 'layers/layer_fc5/activation_std': 10.138233184814453, 'layers/layer_fc2/activation_std': 3.513108730316162, 'layers/layer_fc3/activation_std': 4.694570064544678, 'layers/layer_fc1.weight_mean': -4.2270323319826275e-05, 'layers/layer_fc1.bias_mean': 5.756489190389402e-05, 'layers/layer_fc2.weight_mean': 1.7486901924712583e-05, 'layers/layer_fc2.bias_mean': 5.119257184560411e-05, 'layers/layer_fc3.weight_mean': -6.444352038670331e-06, 'layers/layer_fc3.bias_mean': -3.2650757930241525e-05, 'layers/layer_fc4.weight_mean': 4.735463880933821e-05, 'layers/layer_fc4.bias_mean': 0.00019672312191687524, 'layers/layer_fc5.weight_mean': 2.773958840407431e-11, 'layers/layer_fc5.bias_mean': -3.4924596548080444e-10, 'layers/layer_fc1.weight_std': 0.0018884788732975721, 'layers/layer_fc1.bias_std': 0.0020714548882097006, 'layers/layer_fc2.weight_std': 0.002078161109238863, 'layers/layer_fc2.bias_std': 0.0017285202629864216, 'layers/layer_fc3.weight_std': 0.0006235535256564617, 'layers/layer_fc3.bias_std': 0.0014959467807784677, 'layers/layer_fc4.weight_std': 0.001397876301780343, 'layers/layer_fc4.bias_std': 0.001897347392514348, 'layers/layer_fc5.weight_std': 0.014698885381221771, 'layers/layer_fc5.bias_std': 0.015608142130076885, 'grad_norm/fc1.weight': 0.8462492823600769, 'grad_norm/fc1.bias': 0.03309129923582077, 'grad_norm/fc2.weight': 0.7523983716964722, 'grad_norm/fc2.bias': 0.039090901613235474, 'grad_norm/fc3.weight': 0.22576160728931427, 'grad_norm/fc3.bias': 0.023894065991044044, 'grad_norm/fc4.weight': 0.25318393111228943, 'grad_norm/fc4.bias': 0.021497542038559914, 'grad_norm/fc5.weight': 0.5256778597831726, 'grad_norm/fc5.bias': 0.04682442545890808}\n", - "Epoch [2/5] Training complete. Loss: 0.2444, Accuracy: 0.93%\n", - "{'layers/layer_fc1/activation_mean': -28.045276641845703, 'layers/layer_fc4/activation_mean': -28.045276641845703, 'layers/layer_fc5/activation_mean': -28.045276641845703, 'layers/layer_fc2/activation_mean': -6.288389682769775, 'layers/layer_fc3/activation_mean': -7.880246162414551, 'layers/layer_fc1/activation_std': 10.427655220031738, 'layers/layer_fc4/activation_std': 10.427655220031738, 'layers/layer_fc5/activation_std': 10.427655220031738, 'layers/layer_fc2/activation_std': 5.016049385070801, 'layers/layer_fc3/activation_std': 7.8910441398620605, 'layers/layer_fc1.weight_mean': -1.8336819266551174e-05, 'layers/layer_fc1.bias_mean': 2.4514503820682876e-05, 'layers/layer_fc2.weight_mean': -4.397415978019126e-06, 'layers/layer_fc2.bias_mean': -5.701838745153509e-06, 'layers/layer_fc3.weight_mean': 3.609377472457709e-06, 'layers/layer_fc3.bias_mean': 2.2656186047242954e-05, 'layers/layer_fc4.weight_mean': 3.2792174806672847e-06, 'layers/layer_fc4.bias_mean': 6.183851655805483e-05, 'layers/layer_fc5.weight_mean': 1.033322427623773e-09, 'layers/layer_fc5.bias_mean': 2.793967834868738e-10, 'layers/layer_fc1.weight_std': 0.0009652073495090008, 'layers/layer_fc1.bias_std': 0.0011003392282873392, 'layers/layer_fc2.weight_std': 0.0015912855742499232, 'layers/layer_fc2.bias_std': 0.0008093378273770213, 'layers/layer_fc3.weight_std': 0.000562622444704175, 'layers/layer_fc3.bias_std': 0.0008122650324366987, 'layers/layer_fc4.weight_std': 0.0013753673993051052, 'layers/layer_fc4.bias_std': 0.001243805163539946, 'layers/layer_fc5.weight_std': 0.02044137567281723, 'layers/layer_fc5.bias_std': 0.01635204255580902, 'grad_norm/fc1.weight': 0.4324897825717926, 'grad_norm/fc1.bias': 0.01757538504898548, 'grad_norm/fc2.weight': 0.57610684633255, 'grad_norm/fc2.bias': 0.018295787274837494, 'grad_norm/fc3.weight': 0.2036944478750229, 'grad_norm/fc3.bias': 0.012975896708667278, 'grad_norm/fc4.weight': 0.24896498024463654, 'grad_norm/fc4.bias': 0.014034420251846313, 'grad_norm/fc5.weight': 0.7310471534729004, 'grad_norm/fc5.bias': 0.04905613139271736}\n", - "Epoch [3/5] Training complete. Loss: 0.2124, Accuracy: 0.94%\n", - "{'layers/layer_fc1/activation_mean': -28.00644874572754, 'layers/layer_fc4/activation_mean': -28.00644874572754, 'layers/layer_fc5/activation_mean': -28.00644874572754, 'layers/layer_fc2/activation_mean': -8.306803703308105, 'layers/layer_fc3/activation_mean': -10.63320255279541, 'layers/layer_fc1/activation_std': 10.678731918334961, 'layers/layer_fc4/activation_std': 10.678731918334961, 'layers/layer_fc5/activation_std': 10.678731918334961, 'layers/layer_fc2/activation_std': 6.999919891357422, 'layers/layer_fc3/activation_std': 10.323572158813477, 'layers/layer_fc1.weight_mean': 7.430891855619848e-05, 'layers/layer_fc1.bias_mean': -9.825517918216065e-05, 'layers/layer_fc2.weight_mean': 1.702215740806423e-05, 'layers/layer_fc2.bias_mean': 5.076182424090803e-05, 'layers/layer_fc3.weight_mean': 2.006397380682756e-06, 'layers/layer_fc3.bias_mean': 9.8647487902781e-06, 'layers/layer_fc4.weight_mean': 3.732190089067444e-05, 'layers/layer_fc4.bias_mean': 0.00015728663129266351, 'layers/layer_fc5.weight_mean': 5.190377017072478e-10, 'layers/layer_fc5.bias_mean': 7.450580707946131e-10, 'layers/layer_fc1.weight_std': 0.0013835423160344362, 'layers/layer_fc1.bias_std': 0.0014023504918441176, 'layers/layer_fc2.weight_std': 0.0018697652267292142, 'layers/layer_fc2.bias_std': 0.0008941664709709585, 'layers/layer_fc3.weight_std': 0.0007453133002854884, 'layers/layer_fc3.bias_std': 0.0008750202250666916, 'layers/layer_fc4.weight_std': 0.0016579279908910394, 'layers/layer_fc4.bias_std': 0.001322976779192686, 'layers/layer_fc5.weight_std': 0.018788360059261322, 'layers/layer_fc5.bias_std': 0.0166013166308403, 'grad_norm/fc1.weight': 0.620717465877533, 'grad_norm/fc1.bias': 0.02244885452091694, 'grad_norm/fc2.weight': 0.6769528388977051, 'grad_norm/fc2.bias': 0.020245518535375595, 'grad_norm/fc3.weight': 0.26983216404914856, 'grad_norm/fc3.bias': 0.013973843306303024, 'grad_norm/fc4.weight': 0.30018848180770874, 'grad_norm/fc4.bias': 0.015015012584626675, 'grad_norm/fc5.weight': 0.671930193901062, 'grad_norm/fc5.bias': 0.049803949892520905}\n", - "Epoch [4/5] Training complete. Loss: 0.1774, Accuracy: 0.95%\n", - "{'layers/layer_fc1/activation_mean': -28.02176284790039, 'layers/layer_fc4/activation_mean': -28.02176284790039, 'layers/layer_fc5/activation_mean': -28.02176284790039, 'layers/layer_fc2/activation_mean': -9.155248641967773, 'layers/layer_fc3/activation_mean': -13.8362455368042, 'layers/layer_fc1/activation_std': 10.731369972229004, 'layers/layer_fc4/activation_std': 10.731369972229004, 'layers/layer_fc5/activation_std': 10.731369972229004, 'layers/layer_fc2/activation_std': 7.8137617111206055, 'layers/layer_fc3/activation_std': 12.559845924377441, 'layers/layer_fc1.weight_mean': 4.3527179514057934e-05, 'layers/layer_fc1.bias_mean': -5.993247759761289e-05, 'layers/layer_fc2.weight_mean': -8.3655349953915e-06, 'layers/layer_fc2.bias_mean': -2.7970250812359154e-05, 'layers/layer_fc3.weight_mean': -6.794801265641581e-06, 'layers/layer_fc3.bias_mean': -5.020684693590738e-05, 'layers/layer_fc4.weight_mean': -5.56191080249846e-05, 'layers/layer_fc4.bias_mean': -0.00022704749426338822, 'layers/layer_fc5.weight_mean': -4.933287667263642e-10, 'layers/layer_fc5.bias_mean': -1.0710209386033398e-09, 'layers/layer_fc1.weight_std': 0.0006279172375798225, 'layers/layer_fc1.bias_std': 0.0006310796015895903, 'layers/layer_fc2.weight_std': 0.0009842520812526345, 'layers/layer_fc2.bias_std': 0.00036153788096271455, 'layers/layer_fc3.weight_std': 0.0003291342291049659, 'layers/layer_fc3.bias_std': 0.0003552739799488336, 'layers/layer_fc4.weight_std': 0.0006492410320788622, 'layers/layer_fc4.bias_std': 0.0005454771453514695, 'layers/layer_fc5.weight_std': 0.0076590655371546745, 'layers/layer_fc5.bias_std': 0.005646508652716875, 'grad_norm/fc1.weight': 0.28198111057281494, 'grad_norm/fc1.bias': 0.010123053565621376, 'grad_norm/fc2.weight': 0.35634881258010864, 'grad_norm/fc2.bias': 0.008197144605219364, 'grad_norm/fc3.weight': 0.119184210896492, 'grad_norm/fc3.bias': 0.005729861091822386, 'grad_norm/fc4.weight': 0.11795385181903839, 'grad_norm/fc4.bias': 0.006662336643785238, 'grad_norm/fc5.weight': 0.2739119827747345, 'grad_norm/fc5.bias': 0.016939526423811913}\n", - "Epoch [5/5] Training complete. Loss: 0.1836, Accuracy: 0.95%\n", - "{'layers/layer_fc1/activation_mean': -27.910594940185547, 'layers/layer_fc4/activation_mean': -27.910594940185547, 'layers/layer_fc5/activation_mean': -27.910594940185547, 'layers/layer_fc2/activation_mean': -11.856008529663086, 'layers/layer_fc3/activation_mean': -16.673297882080078, 'layers/layer_fc1/activation_std': 11.18954086303711, 'layers/layer_fc4/activation_std': 11.18954086303711, 'layers/layer_fc5/activation_std': 11.18954086303711, 'layers/layer_fc2/activation_std': 10.051806449890137, 'layers/layer_fc3/activation_std': 14.125566482543945, 'layers/layer_fc1.weight_mean': -4.3949068640358746e-05, 'layers/layer_fc1.bias_mean': 5.6177024816861376e-05, 'layers/layer_fc2.weight_mean': -3.007975647051353e-05, 'layers/layer_fc2.bias_mean': -6.0376849432941526e-05, 'layers/layer_fc3.weight_mean': -6.851441867183894e-06, 'layers/layer_fc3.bias_mean': -4.150162567384541e-05, 'layers/layer_fc4.weight_mean': -3.618385744630359e-05, 'layers/layer_fc4.bias_mean': -9.973671694751829e-05, 'layers/layer_fc5.weight_mean': 1.3166733259240004e-09, 'layers/layer_fc5.bias_mean': 1.3737008197622913e-09, 'layers/layer_fc1.weight_std': 0.0014514160575345159, 'layers/layer_fc1.bias_std': 0.001587417908012867, 'layers/layer_fc2.weight_std': 0.002324556466192007, 'layers/layer_fc2.bias_std': 0.000764612341299653, 'layers/layer_fc3.weight_std': 0.0007226017769426107, 'layers/layer_fc3.bias_std': 0.0007500615902245045, 'layers/layer_fc4.weight_std': 0.001422140165232122, 'layers/layer_fc4.bias_std': 0.000992378918454051, 'layers/layer_fc5.weight_std': 0.016280390322208405, 'layers/layer_fc5.bias_std': 0.013839689083397388, 'grad_norm/fc1.weight': 0.6505305767059326, 'grad_norm/fc1.bias': 0.025364963337779045, 'grad_norm/fc2.weight': 0.8416466116905212, 'grad_norm/fc2.bias': 0.017338205128908157, 'grad_norm/fc3.weight': 0.26162052154541016, 'grad_norm/fc3.bias': 0.01199591625481844, 'grad_norm/fc4.weight': 0.25751423835754395, 'grad_norm/fc4.bias': 0.011240324936807156, 'grad_norm/fc5.weight': 0.5822373628616333, 'grad_norm/fc5.bias': 0.04151906818151474}\n", - "Testing complete. Loss: 0.1949, Accuracy: 0.95%\n" - ] - } - ], - "source": [ - "# Training loop\n", - "activation_dict_mean = {}\n", - "activation_dict_std = {}\n", - "params_dict_std = {}\n", - "params_dict_mean = {}\n", - "grad_norms = {}\n", - "\n", - "num_epochs = params[\"epochs\"]\n", - "step_counter = 0\n", - "for epoch in range(num_epochs):\n", - " model.train()\n", - " epoch_loss = 0\n", - " correct_preds = 0\n", - " total_preds = 0\n", - "\n", - " # Reset activations for each epoch\n", - " model.clear_activations()\n", - " \n", - " # Training step\n", - " for batch_idx, (data, target) in enumerate(train_loader):\n", - " step_counter += 1\n", - " optimizer.zero_grad()\n", - " \n", - " data, target = data.to(device), target.to(device)\n", - "\n", - " # Forward pass\n", - " output = model(data)\n", - " \n", - " # Compute the loss\n", - " loss = criterion(output, target)\n", - " \n", - " # Backward pass and optimization\n", - " loss.backward()\n", - " optimizer.step()\n", - "\n", - " epoch_loss += loss.item()\n", - " \n", - " # Calculate accuracy\n", - " _, predicted = torch.max(output.data, 1)\n", - " total_preds += target.size(0)\n", - " correct_preds += (predicted == target).sum().item()\n", - " \n", - " # Print loss and accuracy for each batch (step)\n", - " #if (batch_idx + 1) % 5 == 0: # Every 5 steps\n", - " batch_accuracy = correct_preds / total_preds\n", - " # print(f\"Epoch [{epoch+1}/{num_epochs}], Step [{batch_idx+1}/{len(train_loader)}], Loss: {loss.item():.4f}, Accuracy: {batch_accuracy:.2f}%\")\n", - " \n", - " # Validation step per training step\n", - " val_loss, val_accuracy = evaluate(model, val_loader) # Evaluate after each step\n", - " # print(f\"Validation at step [{batch_idx+1}/{len(train_loader)}] - Loss: {val_loss:.4f}, Accuracy: {val_accuracy:.2f}%\")\n", - "\n", - " for name, param in model.named_parameters():\n", - " if param is not None:\n", - " grad_norms[f\"grad_norm/{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", - "\n", - " run.log_metrics(\n", - " data = {\n", - " \"metrics/train/loss\": loss.item(),\n", - " \"metrics/train/accuracy\": batch_accuracy,\n", - " \"metrics/validation/loss\": val_loss,\n", - " \"metrics/validation/accuracy\": val_accuracy,\n", - " \"epoch_value\": epoch,\n", - " **grad_norms\n", - " },\n", - " step = step_counter\n", - " )\n", - " \n", - " # Print loss and accuracy for the entire training epoch\n", - " train_accuracy = correct_preds / total_preds\n", - " print(f\"Epoch [{epoch+1}/{num_epochs}] Training complete. Loss: {epoch_loss / len(train_loader):.4f}, Accuracy: {train_accuracy:.2f}%\")\n", - "\n", - " # Track activations\n", - " for name, activation in model.get_activations().items():\n", - " activation_dict_mean[f\"layers/layer_{name}/activation_mean\"] = activation.mean().item()\n", - " activation_dict_std[f\"layers/layer_{name}/activation_std\"] = activation.std().item()\n", - "\n", - " # Track gradients and norms per layer at each epoch\n", - " for name, param in model.named_parameters():\n", - " if param is not None:\n", - " params_dict_std[f\"layers/layer_{name}_std\"] = param.grad.std().item()\n", - " params_dict_mean[f\"layers/layer_{name}_mean\"] = param.grad.mean().item()\n", - " # grad_norms[f\"grad_norm/{name}\"] = param.grad.norm(2).item() # L2 norm (Euclidean norm) of the gradients\n", - " \n", - " layers_dict = {**activation_dict_mean, \n", - " **activation_dict_std,\n", - " **params_dict_mean,\n", - " **params_dict_std,\n", - " **grad_norms\n", - " }\n", - " print(layers_dict)\n", - "\n", - " # data_to_log = {\n", - " # \"metrics/test/loss_epoch\": epoch_loss / len(train_loader),\n", - " # \"metrics/train/accuracy_epoch\": train_accuracy\n", - " # }.update(activation_dict)\n", - " \n", - " run.log_metrics(\n", - " data = layers_dict,\n", - " step = epoch\n", - " )\n", - " \n", - "# Final Testing Step with gradient tracking\n", - "test_loss, test_accuracy = evaluate(model, test_loader, track_gradients=False) # Track gradients during test\n", - "print(f\"Testing complete. Loss: {test_loss:.4f}, Accuracy: {test_accuracy:.2f}%\")\n", - "\n", - "run.log_configs(\n", - " {\n", - " \"metrics/test/loss\": test_loss,\n", - " \"metrics/test/accuracy\": test_accuracy\n", - " }\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "neptune:INFO: Waiting for all operations to be processed\n", - "neptune:WARNING: No timeout specified. Waiting indefinitely\n", - "neptune:INFO: All operations were processed\n" - ] - } - ], - "source": [ - "run.close()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "neptune_scale_py_312_base", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} From 5fd0a0e5148ee6817222606bf015ac6b1d8e8fef Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 10:04:42 +0100 Subject: [PATCH 066/125] chore: clean up comments and code cell outputs --- .../pytorch_text_model_debugging.ipynb | 31 +++---------------- 1 file changed, 4 insertions(+), 27 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb index 106c3db..7f99bae 100644 --- a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb @@ -63,18 +63,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 24, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], + "outputs": [], "source": [ "# Import libraries\n", "import torch\n", @@ -330,17 +321,9 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "View current experiment: https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?viewId=standard-view&runIdentificationKey=pytorch-text&type=experiment\n" - ] - } - ], + "outputs": [], "source": [ "# Define Neptune parameters\n", "from neptune_scale import Run\n", @@ -444,9 +427,6 @@ " gradients = hook_manager.get_gradients()\n", " for layer, gradient in gradients.items():\n", " debug_metrics[f\"debug/gradient/{layer}_mean\"] = gradient.mean().item()\n", - " print(f\"Gradients for {layer}: {gradient.mean().item()}\") # You can replace to use mean(), sum(), max() or min()\n", - " # simplified_gradient = gradient.mean(dim=(0, 1))\n", - " # print(f\"Summed Gradient for {layer}: {simplified_gradient}\")\n", "\n", " # Track gradients per layer at each epoch\n", " for layer, param in model.named_parameters():\n", @@ -479,9 +459,6 @@ " \n", " print(f\"Epoch {epoch + 1}, Loss: {total_loss / len(train_dataloader)}\")\n", "\n", - "# test_loss = evaluate_model(model, test_input, test_target, params[\"vocab_size\"])\n", - "# print(f'Test Loss: {test_loss:.4f}')\n", - "\n", "# Close run to ensure all operations are processed\n", "run.close()" ] From 23ea6635d65125ce7ed616cd9e7b34d7fa1b8500 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 10:06:50 +0100 Subject: [PATCH 067/125] chore: move notebook into its own notebook folder --- .../pytorch/{ => notebooks}/pytorch_text_model_debugging.ipynb | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename integrations-and-supported-tools/pytorch/{ => notebooks}/pytorch_text_model_debugging.ipynb (100%) diff --git a/integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb similarity index 100% rename from integrations-and-supported-tools/pytorch/pytorch_text_model_debugging.ipynb rename to integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb From 21114f643b3c92280acfe6d1dad98d7dadc4fc5a Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 10:07:31 +0100 Subject: [PATCH 068/125] chore: fix comment --- .../pytorch/notebooks/pytorch_text_model_debugging.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 7f99bae..9cec929 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -435,7 +435,7 @@ " debug_metrics[f\"debug/parameters/{layer}_mean\"] = param.grad.mean().item()\n", " debug_metrics[f\"debug/parameters/{layer}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", "\n", - " if step_counter % 50 == 0: # Log validation loss at every 30 steps\n", + " if step_counter % 50 == 0: # Log validation loss at every 50 steps\n", " val_loss = evaluate(model, val_dataloader, criterion, device, vocab_size)\n", "\n", " run.log_metrics(\n", From c8cd6768c174a56581eedd812c5ac944fd1ed2e6 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 11:00:47 +0100 Subject: [PATCH 069/125] refactor: update notebook to use envionment variables --- .../pytorch_text_model_debugging.ipynb | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 9cec929..1aa1d62 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -44,6 +44,30 @@ " 3. Install and configure Neptune Scale for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune Scale docs." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set environment variables\n", + "Once you have your project and workspace names as well as your API token from the UI, you can set them as environment variables to be used through this notebook. Uncomment the code block below and replace with your own details.\n", + "```python \n", + "# Set Neptune credentials as environment variables\n", + "%env NEPTUNE_API_TOKEN = \"your_api_token_here\"\n", + "%env NEPTUNE_PROJECT = \"your_workspace_name_here/your_project_name_here\"\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Set Neptune credentials as environment variables\n", + "# %env NEPTUNE_API_TOKEN = \"your_api_token_here\"\n", + "# %env NEPTUNE_PROJECT = \"your_workspace_name_here/your_project_name_here\"" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -325,16 +349,13 @@ "metadata": {}, "outputs": [], "source": [ - "# Define Neptune parameters\n", "from neptune_scale import Run\n", "from uuid import uuid4\n", "\n", - "project = \"leo/pytorch-tutorial\"\n", "custom_run_id = f\"pytorch-text-{uuid4()}\" # Create your own custom run_id\n", "experiment_name = \"pytorch-text\" # Create a run that is the head of an experiment. This will also be used for forking.\n", "\n", "run = Run(\n", - " project = project,\n", " run_id=custom_run_id,\n", " experiment_name = experiment_name,\n", " )\n", @@ -354,7 +375,7 @@ "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])\n", "\n", - "print(f\"View current experiment: https://scale.neptune.ai/{project}/runs/details?viewId=standard-view&runIdentificationKey={experiment_name}&type=experiment\")" + "print(f\"View current experiment: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/runs/details?viewId=standard-view&runIdentificationKey={experiment_name}&type=experiment\")" ] }, { @@ -372,7 +393,7 @@ "outputs": [], "source": [ "# Create links to access active training run. Note - this is not required for the example to run, but for ease of illustration of the tutorial.\n", - "print(f\"View training charts: https://scale.neptune.ai/{project}/-/run/?customId={custom_run_id}&detailsTab=charts\")\n", + "print(f\"View training charts: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/-/run/?customId={custom_run_id}&detailsTab=charts\")\n", "\n", "# Training setup\n", "debug_metrics = {}\n", From fe4c1a38c98098670c7e154fdc04a47c5fde70fd Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 11:02:35 +0100 Subject: [PATCH 070/125] chore: update colab link --- .../pytorch/notebooks/pytorch_text_model_debugging.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 1aa1d62..c33a9c8 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -6,7 +6,7 @@ "source": [ "# Neptune + PyTorch\n", "\n", - " \n", + " \n", " \"Open \n", "\n", "\n", From 6e96c5cc990694c4be7bee904fb8535364fc6969 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 11:35:52 +0100 Subject: [PATCH 071/125] chore: accpet pre-commit recommendations --- .../pytorch_text_model_debugging.ipynb | 152 +++++++++--------- 1 file changed, 77 insertions(+), 75 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index c33a9c8..1939444 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -87,7 +87,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -113,7 +113,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -123,13 +123,13 @@ " \"optimizer\": \"Adam\",\n", " \"batch_size\": 8,\n", " \"learning_rate\": 0.01,\n", - " \"epochs\": 5, \n", + " \"epochs\": 5,\n", " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", " \"input_features\": 256,\n", " \"embed_size\": 1000,\n", - " \"hidden_size\": 256, # hidden size for the LSTM\n", + " \"hidden_size\": 256, # hidden size for the LSTM\n", " \"dropout_prob\": 0.3,\n", - " \"num_lstm_layers\": 3\n", + " \"num_lstm_layers\": 3,\n", "}" ] }, @@ -143,28 +143,24 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Training samples: 81926 \n", - "Validation samples: 935\n" - ] - } - ], + "outputs": [], "source": [ "# For the example, download a random subset of 10% of the original dataset\n", "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", - "data_files = {\"train\": base_url + \"train-00001-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", - " \"validation\": base_url + \"validation-00000-of-00001.parquet\"} #doanload the complete validation dataset\n", + "data_files = {\n", + " \"train\": base_url\n", + " + \"train-00001-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", + " \"validation\": base_url + \"validation-00000-of-00001.parquet\",\n", + "} # doanload the complete validation dataset\n", "\n", - "data_subset = load_dataset(\"parquet\", data_files = data_files, num_proc=4)\n", + "data_subset = load_dataset(\"parquet\", data_files=data_files, num_proc=4)\n", "# validation_subset = load_dataset(\"parquet\", data_files = {\"validation\": base_url + \"validation-00000-of-00001.parquet\"}, num_proc=4, split=[\"validation[:5%]\"])\n", "validation_subset = data_subset.get(\"validation\").train_test_split(test_size=0.1)\n", - "print(f\"Training samples: {data_subset['train'].num_rows} \\nValidation samples: {validation_subset['test'].num_rows}\")" + "print(\n", + " f\"Training samples: {data_subset['train'].num_rows} \\nValidation samples: {validation_subset['test'].num_rows}\"\n", + ")" ] }, { @@ -178,27 +174,25 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Vocabulary size: 128257\n" - ] - } - ], + "outputs": [], "source": [ - "train_subset = data_subset[\"train\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", - "validation_subset = validation_subset[\"test\"].with_format(type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]) # HF provides methods to convert datatypes to tensors\n", + "train_subset = data_subset[\"train\"].with_format(\n", + " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", + ") # HF provides methods to convert datatypes to tensors\n", + "validation_subset = validation_subset[\"test\"].with_format(\n", + " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", + ") # HF provides methods to convert datatypes to tensors\n", "\n", "train_dataloader = DataLoader(train_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", "val_dataloader = DataLoader(validation_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", "\n", "# Determine the vocab size of the dataset\n", "# Flatten the list of tokenized sentences into one long list of token IDs\n", - "vocab_size = max([token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]) + 1\n", + "vocab_size = (\n", + " max([token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]) + 1\n", + ")\n", "params[\"vocab_size\"] = vocab_size\n", "print(f\"Vocabulary size: {vocab_size}\")" ] @@ -213,7 +207,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -222,27 +216,28 @@ " def __init__(self, vocab_size, embed_size, hidden_size, num_layers):\n", " super(SimpleLLM, self).__init__()\n", " self.embedding = nn.Embedding(vocab_size, embed_size)\n", - " self.lstm = nn.LSTM(embed_size, hidden_size, num_layers = num_layers, batch_first=True)\n", + " self.lstm = nn.LSTM(embed_size, hidden_size, num_layers=num_layers, batch_first=True)\n", " self.fc1 = nn.Linear(hidden_size, vocab_size)\n", - " \n", + "\n", " def forward(self, x):\n", " x = self.embedding(x)\n", " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", - " out = self.fc1(lstm_out) # Use the last output from the LSTM\n", + " out = self.fc1(lstm_out) # Use the last output from the LSTM\n", " return out\n", "\n", + "\n", "# Function to evaluate the model after each epoch/step\n", "def evaluate(model, val_dataloader, criterion, device, vocab_size):\n", " model.eval() # Set the model to evaluation mode\n", " total_loss = 0\n", " with torch.no_grad(): # Disable gradient calculation for validation\n", " for batch in val_dataloader:\n", - " input_ids = batch['input_ids'].to(device)\n", - " labels = batch['labels'].to(device)\n", + " input_ids = batch[\"input_ids\"].to(device)\n", + " labels = batch[\"labels\"].to(device)\n", "\n", " # Forward pass for validation\n", " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", - " \n", + "\n", " # Calculate the loss\n", " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", " total_loss += loss.item()\n", @@ -283,7 +278,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -292,7 +287,7 @@ " def __init__(self, model):\n", " if not isinstance(model, nn.Module):\n", " raise TypeError(\"The model must be a PyTorch model\")\n", - " \n", + "\n", " self.model = model\n", " self.hooks = []\n", " self.activations = {}\n", @@ -302,12 +297,14 @@ " def save_activation(self, name):\n", " def hook(module, input, output):\n", " self.activations[name] = output\n", + "\n", " return hook\n", "\n", " # Function to save gradients (registering hooks for the model parameters)\n", " def save_gradient(self, name):\n", " def hook(module, grad_input, grad_output):\n", " self.gradients[name] = grad_output[0]\n", + "\n", " return hook\n", "\n", " # Function to register hooks for activations and gradients\n", @@ -318,7 +315,7 @@ "\n", " # Register backward hooks for gradients\n", " for name, module in self.model.named_modules():\n", - " if isinstance(module, (nn.LSTM, nn.Linear)): # You can add more layer types here\n", + " if isinstance(module, (nn.LSTM, nn.Linear)): # You can add more layer types here\n", " self.hooks.append(module.register_full_backward_hook(self.save_gradient(name)))\n", "\n", " # Function to clear activations and gradients after use\n", @@ -352,13 +349,13 @@ "from neptune_scale import Run\n", "from uuid import uuid4\n", "\n", - "custom_run_id = f\"pytorch-text-{uuid4()}\" # Create your own custom run_id\n", - "experiment_name = \"pytorch-text\" # Create a run that is the head of an experiment. This will also be used for forking.\n", + "custom_run_id = f\"pytorch-text-{uuid4()}\" # Create your own custom run_id\n", + "experiment_name = \"pytorch-text\" # Create a run that is the head of an experiment. This will also be used for forking.\n", "\n", "run = Run(\n", " run_id=custom_run_id,\n", - " experiment_name = experiment_name,\n", - " )\n", + " experiment_name=experiment_name,\n", + ")\n", "\n", "run.log_configs(\n", " {\n", @@ -366,16 +363,18 @@ " \"config/optimizer\": params[\"optimizer\"],\n", " \"config/batch_size\": params[\"batch_size\"],\n", " \"config/epochs\": params[\"epochs\"],\n", - " \"config/num_lstm_layers\" : params[\"num_lstm_layers\"],\n", + " \"config/num_lstm_layers\": params[\"num_lstm_layers\"],\n", " \"data/vocab_size\": params[\"vocab_size\"],\n", - " \"data/embed_size\": params[\"embed_size\"]\n", + " \"data/embed_size\": params[\"embed_size\"],\n", " }\n", ")\n", "\n", "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])\n", "\n", - "print(f\"View current experiment: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/runs/details?viewId=standard-view&runIdentificationKey={experiment_name}&type=experiment\")" + "print(\n", + " f\"View current experiment: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/runs/details?viewId=standard-view&runIdentificationKey={experiment_name}&type=experiment\"\n", + ")" ] }, { @@ -393,20 +392,26 @@ "outputs": [], "source": [ "# Create links to access active training run. Note - this is not required for the example to run, but for ease of illustration of the tutorial.\n", - "print(f\"View training charts: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/-/run/?customId={custom_run_id}&detailsTab=charts\")\n", + "print(\n", + " f\"View training charts: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/-/run/?customId={custom_run_id}&detailsTab=charts\"\n", + ")\n", "\n", "# Training setup\n", "debug_metrics = {}\n", "\n", "# Initialize model and optimizer\n", - "model = SimpleLLM(params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"], params[\"num_lstm_layers\"])\n", - "optimizer = optim.Adam(model.parameters(), lr = params[\"learning_rate\"])\n", - "criterion = nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset\n", + "model = SimpleLLM(\n", + " params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"], params[\"num_lstm_layers\"]\n", + ")\n", + "optimizer = optim.Adam(model.parameters(), lr=params[\"learning_rate\"])\n", + "criterion = nn.CrossEntropyLoss(\n", + " ignore_index=-100\n", + ") # Ignore the buffering index of -100 in the dataset\n", "\n", "hook_manager = HookManager(model)\n", "hook_manager.register_hooks()\n", "\n", - "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", "model.to(device)\n", "step_counter = 0\n", "\n", @@ -418,22 +423,21 @@ " step_counter += 1\n", " hook_manager.clear()\n", "\n", - " input_ids = batch['input_ids'].to(device)\n", - " labels = batch['labels'].to(device)\n", - " \n", + " input_ids = batch[\"input_ids\"].to(device)\n", + " labels = batch[\"labels\"].to(device)\n", + "\n", " optimizer.zero_grad()\n", - " \n", + "\n", " # Forward pass\n", " logits = model(input_ids)\n", - " \n", + "\n", " # Compute the loss (ignore padding tokens by masking labels)\n", " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", - " \n", + "\n", " # Backward pass and optimization\n", " loss.backward()\n", " optimizer.step()\n", "\n", - " \n", " total_loss += loss.item()\n", " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", "\n", @@ -454,30 +458,28 @@ " if param is not None:\n", " debug_metrics[f\"debug/parameters/{layer}_std\"] = param.grad.std().item()\n", " debug_metrics[f\"debug/parameters/{layer}_mean\"] = param.grad.mean().item()\n", - " debug_metrics[f\"debug/parameters/{layer}_norm\"] = param.grad.norm().item() # L2 norm (Euclidean norm) of the gradients\n", + " debug_metrics[f\"debug/parameters/{layer}_norm\"] = (\n", + " param.grad.norm().item()\n", + " ) # L2 norm (Euclidean norm) of the gradients\n", "\n", - " if step_counter % 50 == 0: # Log validation loss at every 50 steps\n", + " if step_counter % 50 == 0: # Log validation loss at every 50 steps\n", " val_loss = evaluate(model, val_dataloader, criterion, device, vocab_size)\n", "\n", " run.log_metrics(\n", - " data = {\n", + " data={\n", " \"metrics/train/loss\": loss.item(),\n", " \"metrics/validation/loss\": val_loss,\n", " \"epoch/value\": epoch,\n", - " **debug_metrics\n", + " **debug_metrics,\n", " },\n", - " step = step_counter\n", + " step=step_counter,\n", " )\n", - " else: # Log training loss and debugging metrics for each step\n", + " else: # Log training loss and debugging metrics for each step\n", " run.log_metrics(\n", - " data = {\n", - " \"metrics/train/loss\": loss.item(),\n", - " \"epoch/value\": epoch,\n", - " **debug_metrics\n", - " },\n", - " step = step_counter\n", + " data={\"metrics/train/loss\": loss.item(), \"epoch/value\": epoch, **debug_metrics},\n", + " step=step_counter,\n", " )\n", - " \n", + "\n", " print(f\"Epoch {epoch + 1}, Loss: {total_loss / len(train_dataloader)}\")\n", "\n", "# Close run to ensure all operations are processed\n", From b16869b4cb8a32ddc60eb6e954982214f6daca1e Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 20 Mar 2025 11:42:32 +0100 Subject: [PATCH 072/125] chore: add notebook to test workflow --- .github/workflows/test-notebooks.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test-notebooks.yml b/.github/workflows/test-notebooks.yml index 016c353..06d5be7 100644 --- a/.github/workflows/test-notebooks.yml +++ b/.github/workflows/test-notebooks.yml @@ -21,6 +21,7 @@ jobs: python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] notebooks: # Add in alphabetical order - how-to-guides/hpo/notebooks/Neptune_HPO.ipynb + - integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb os: ["${{ inputs.os }}"] steps: - uses: actions/checkout@main From 15357bb749b81b607c2ba99d75c9ac8cc058fe87 Mon Sep 17 00:00:00 2001 From: szaganek Date: Thu, 27 Mar 2025 12:23:26 +0100 Subject: [PATCH 073/125] TW review --- .../pytorch_text_model_debugging.ipynb | 121 +++++++++++------- 1 file changed, 78 insertions(+), 43 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 1939444..31167f5 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -8,29 +8,47 @@ "\n", " \n", " \"Open \n", - "\n", - "\n", - "## Logging and visualizing debugging metrics with Neptune\n", - "Global metrics (e.g., loss, accuracy) provide a high-level performance snapshot and ensure training is on course. However, for large or foundation models, monitoring layer-wise metrics—such as gradients and activations—delivers critical insights into how each layer learns. This level of detail helps identify issues (e.g., vanishing/exploding gradients) and fine-tune individual layers for better overall performance.\n", - "The **main challenge** is the sheer volume of data generated by layer-wise logging. Fortunately, Neptune is built for hyperscale tracking, enabling you to efficiently capture, organize, and analyze metrics from every layer—no matter how large your model—without disrupting the training process.\n", + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Introduction\n", + "Global metrics such as loss or accuracy provide a high-level performance snapshot and ensure training is on course.\n", "\n", - "### Introduction\n", - "This example is designed to be used as a code recipe for you to re-use sections with your own code to edit or adapt to your own model training needs. \n", + "However, for large or foundation models, monitoring layer-wise metrics—such as gradients and activations—delivers critical insights into how each layer learns. This level of detail helps identify issues and fine-tune individual layers for better overall performance.\n", + "The main challenge is the volume of data generated by layer-wise logging.\n", "\n", + "Fortunately, Neptune is built for hyperscale tracking. It enables you to capture, organize, and analyze metrics from every layer without disrupting the training process. No matter how large is your model." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ "This guide will show you how to:\n", "- Initialize the **Neptune Run** object and log configuration parameters\n", - "- Create a **reuseable class** to hook layer-wise metrics (`HookManager`)\n", + "- Create a **reusable class** to hook layer-wise metrics (`HookManager`)\n", "- Log **aggregated metrics** such as loss and accuracy\n", - "- Log **layer-wise metrics** to debug model traing such as;\n", + "- Log **layer-wise metrics** to debug model training such as:\n", "\n", - "| **Metric** | **Demonstrated** | **What it Shows** | **How to Capture** |\n", + "| **Metric** | **Demonstrated** | **What it shows** | **How to capture** |\n", "|-----------------------------------|--------------------------------------|--------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|\n", "| **Activations** | Yes | Dead or exploding activations can indicate issues with training stability. | `HookManager` |\n", "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | `HookManager` |\n", "| **Parameters** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", "| **Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", - "| **Learning Rate** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", - "| **Output Norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |\n" + "| **Learning rate** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", + "| **Output norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Use this notebook as a code recipe. Add your own code and adapt the sections to your own model training needs." ] }, { @@ -40,7 +58,7 @@ "## Before you start\n", "\n", " 1. Create a Neptune Scale account. [Register →](https://neptune.ai/early-access)\n", - " 2. Create a Neptune project that you will use for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune Scale docs.\n", + " 2. Create a Neptune project for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune Scale docs.\n", " 3. Install and configure Neptune Scale for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune Scale docs." ] }, @@ -49,12 +67,9 @@ "metadata": {}, "source": [ "### Set environment variables\n", - "Once you have your project and workspace names as well as your API token from the UI, you can set them as environment variables to be used through this notebook. Uncomment the code block below and replace with your own details.\n", - "```python \n", - "# Set Neptune credentials as environment variables\n", - "%env NEPTUNE_API_TOKEN = \"your_api_token_here\"\n", - "%env NEPTUNE_PROJECT = \"your_workspace_name_here/your_project_name_here\"\n", - "```" + "By setting your project name and API token as environment variables, you can use them throughout this notebook.\n", + "\n", + "Uncomment the code block below and replace placeholder values with your own credentials:" ] }, { @@ -64,8 +79,8 @@ "outputs": [], "source": [ "# Set Neptune credentials as environment variables\n", - "# %env NEPTUNE_API_TOKEN = \"your_api_token_here\"\n", - "# %env NEPTUNE_PROJECT = \"your_workspace_name_here/your_project_name_here\"" + "# %env NEPTUNE_API_TOKEN = \"your_api_token\"\n", + "# %env NEPTUNE_PROJECT = \"your_workspace_name_here/your_project_name\"" ] }, { @@ -138,7 +153,11 @@ "metadata": {}, "source": [ "## Download or use next token prediction dataset\n", - "The dataset used in this example is taken from [HuggingFace](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset). In this example, you can increase the size of the dataset to test the logging capabilities of Neptune, but note that increasing the dataset size will increase the time taken for the full dataset to download. The current setup only downloads the first parquet file from the Hugging Face public dataset. The validation dataset is also reduced to reduce the training loop execution time - you can increase the validation size by changing the `test_size` key-value pair in the `train_test_split()` method from HF. " + "This example uses the dataset from [HuggingFace](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset) (HF).\n", + "\n", + "You can increase the size of the dataset to test the logging capabilities of Neptune. Note that increasing the size will increase the time needed for the dataset to download. The current setup only downloads the first parquet file from the Hugging Face public dataset.\n", + "\n", + "The validation dataset is also reduced to decrease the training loop execution time. To increase the validation size, change the `test_size` key-value pair in the `train_test_split()` method from HuggingFace." ] }, { @@ -153,7 +172,7 @@ " \"train\": base_url\n", " + \"train-00001-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", " \"validation\": base_url + \"validation-00000-of-00001.parquet\",\n", - "} # doanload the complete validation dataset\n", + "} # download the complete validation dataset\n", "\n", "data_subset = load_dataset(\"parquet\", data_files=data_files, num_proc=4)\n", "# validation_subset = load_dataset(\"parquet\", data_files = {\"validation\": base_url + \"validation-00000-of-00001.parquet\"}, num_proc=4, split=[\"validation[:5%]\"])\n", @@ -168,8 +187,9 @@ "metadata": {}, "source": [ "## Create `DataLoader` objects\n", - "* To execute the models with PyTorch, we convert the training and validation datasets to tensors and then setup DataLoader for easier batching in our training loop.\n", - "* The model architecture requires the vocabulary size as an input and this we calculate the max token from the dataset." + "To execute the models with PyTorch, convert the training and validation datasets to tensors. Then, set up `DataLoader` for easier batching in the training loop.\n", + "\n", + "The model architecture requires the vocabulary size as an input and this is why we calculate the max token from the dataset." ] }, { @@ -180,10 +200,10 @@ "source": [ "train_subset = data_subset[\"train\"].with_format(\n", " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", - ") # HF provides methods to convert datatypes to tensors\n", + ") # HF provides methods to convert data types to tensors\n", "validation_subset = validation_subset[\"test\"].with_format(\n", " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", - ") # HF provides methods to convert datatypes to tensors\n", + ") # HF provides methods to convert data types to tensors\n", "\n", "train_dataloader = DataLoader(train_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", "val_dataloader = DataLoader(validation_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", @@ -202,7 +222,11 @@ "metadata": {}, "source": [ "### Define PyTorch model architecture and helpers\n", - "We define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use an embedding layer, a LSTM layer and a fully connected layer. This architecture can be adjusted to your needs and increased in size when testing the workflow. To increase the size of the LSTM layers, change the `num_layers` parameter in the parameters dictionary or to increase the number of fully connected layers, update the mode architecture itself." + "Define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use an embedding layer, a LSTM layer, and a fully connected layer.\n", + "\n", + "You can adjust this architecture to your needs and increase its size when testing the workflow:\n", + "- To increase the size of the LSTM layers, change the `num_layers` parameter in the parameters dictionary.\n", + "- To increase the number of fully connected layers, update the mode architecture itself." ] }, { @@ -252,7 +276,12 @@ "source": [ "### Create tracking class\n", "\n", - "This section creates a `HookManager` class that allows us to capture the **activations** and **gradients** from each layer. This class accepts a PyTorch model object as an input. This class will automatically capture the gradients and activations in each layer of the model. Below, you can see a pseudo implementation:\n", + "This section creates a `HookManager` class:\n", + "- It allows to capture the **activations** and **gradients** from each layer.\n", + "- It accepts a PyTorch model object as an input.\n", + "- It automatically captures the gradients and activations in each layer of the model.\n", + "\n", + "See a pseudo implementation:\n", "\n", "```python\n", "# Initialize model\n", @@ -273,7 +302,7 @@ " # Log values (mean, std, etc.) to Neptune\n", "```\n", "\n", - "_Important_: The `HookManager` class can be used in your own training script as it only accepts a model object as input." + "_Important_: You can use the `HookManager` class in your own training script as it only accepts a model object as input." ] }, { @@ -336,7 +365,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Setup model training\n", + "## Set up model training" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ "### Initialize Neptune run object and log hyperparameters" ] }, @@ -382,7 +417,9 @@ "metadata": {}, "source": [ "### Execute model training loop\n", - "In this loop, we configure the `HookManager` and register the hooks. In your training loop, you will need to use the `get_` methods to retrieve the stored values for the activations and gradients after the forward and backward passes are complete." + "In this loop, we configure the `HookManager` and register the hooks.\n", + "\n", + "In your training loop, use the `get_` methods to retrieve the stored values for the activations and gradients after the forward and backward passes are complete." ] }, { @@ -391,7 +428,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Create links to access active training run. Note - this is not required for the example to run, but for ease of illustration of the tutorial.\n", + "# Create links to access active training run. Note that this step isn't required for the example to run.\n", "print(\n", " f\"View training charts: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/-/run/?customId={custom_run_id}&detailsTab=charts\"\n", ")\n", @@ -491,16 +528,14 @@ "metadata": {}, "source": [ "## What's next?\n", - "While the model is training, you can start using our _super-snappy_ web UI to browse your metrics and create custom analyses and visualizations.\n", - "\n", - "### What to do\n", - "1. Navigate to the charts tab of the active run to visualize the large number of metrics being logged in near real time.\n", - "2. Filter the metrics using our [advanced regex searching capabilities](https://docs-beta.neptune.ai/charts#filtering-charts). For example, use the following query in the search bar; `.*gradient+.*fc\\d`. This query will filter all metrics for the gradients of the fully connected layers. The more FC layers, the more charts will appear. \n", - "3. Export this filter to a [dashboard](https://docs-beta.neptune.ai/custom_dashboard), using the _**Duplicate to**_ button. The saved dashboard will now only display these metrics during training.\n", - "4. Using our [dynamic metric selection](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) feature, you can update the chart widget to display all fully connected layers gradients in one chart. Use the same query as above in the chart widget.\n", - "5. Create a [custom report](https://docs-beta.neptune.ai/reports) to outline the model training, global metrics, debugging metrics and more.\n", - "\n", - "You can see a generic example of the end result [here](https://scale.neptune.ai/o/examples/org/LLM-Pretraining/reports/9e6a2cad-77e7-42df-9d64-28f07d37e908).\n" + "While the model is training, you can start using the Neptune web app to browse your metrics and create custom analyses and visualizations:\n", + "1. To visualize the large number of metrics being logged in near real time, navigate to the **Charts** tab of the active run.\n", + "2. Filter the metrics using the [advanced regex searching capabilities](https://docs-beta.neptune.ai/charts#filtering-charts). For example, enter `.*gradient+.*fc\\d` in the search bar. This query filters all metrics for the gradients of the fully connected layers. The more FC layers, the more charts will appear.\n", + "3. Export the filter to a [dashboard](https://docs-beta.neptune.ai/custom_dashboard). The saved dashboard will now only display these metrics during training.\n", + "4. Use the [dynamic metric selection](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) and update the chart widget to display all fully connected layers gradients in one chart. Again, use the `.*gradient+.*fc\\d` query.\n", + "5. Create a [custom report](https://docs-beta.neptune.ai/reports) to outline the model training, global metrics, debugging metrics, and more.\n", + "\n", + "See also [a generic example of the training result](https://scale.neptune.ai/o/examples/org/LLM-Pretraining/reports/9e6a2cad-77e7-42df-9d64-28f07d37e908).\n" ] } ], From 34c47f3ea7dd8e179b3a1a4de1780ac9c8b960d1 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 27 Mar 2025 15:24:20 +0100 Subject: [PATCH 074/125] chore: update pip installation line --- .../pytorch/notebooks/pytorch_text_model_debugging.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 31167f5..dfe143a 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -97,7 +97,7 @@ "outputs": [], "source": [ "# Install dependencies\n", - "! pip install -q -U neptune_scale torch datasets" + "! pip install -qU neptune_scale torch datasets" ] }, { From a2a2bf1013a9c8fc944905bc0873eacdcb83f776 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 27 Mar 2025 15:31:40 +0100 Subject: [PATCH 075/125] chore: update url's to use the new run methods --- .../pytorch/notebooks/pytorch_text_model_debugging.ipynb | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index dfe143a..9e41665 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -407,9 +407,7 @@ "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])\n", "\n", - "print(\n", - " f\"View current experiment: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/runs/details?viewId=standard-view&runIdentificationKey={experiment_name}&type=experiment\"\n", - ")" + "print(run.get_experiment_url())" ] }, { @@ -428,11 +426,6 @@ "metadata": {}, "outputs": [], "source": [ - "# Create links to access active training run. Note that this step isn't required for the example to run.\n", - "print(\n", - " f\"View training charts: https://scale.neptune.ai/{os.getenv(\"NEPTUNE_PROJECT\")}/-/run/?customId={custom_run_id}&detailsTab=charts\"\n", - ")\n", - "\n", "# Training setup\n", "debug_metrics = {}\n", "\n", From 3da164e5b1accd36ab4015c8feed6a6aa277eef6 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Fri, 28 Mar 2025 13:15:37 +0100 Subject: [PATCH 076/125] refactor: update example to start using an external class for hooking and logging layer-wise metrics --- .../pytorch/notebooks/TorchWatcher.py | 239 ++++++++++++++++ .../pytorch_text_model_debugging.ipynb | 255 +++++++++++++++--- 2 files changed, 458 insertions(+), 36 deletions(-) create mode 100644 integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py new file mode 100644 index 0000000..ababcee --- /dev/null +++ b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py @@ -0,0 +1,239 @@ + +import torch +import torch.nn as nn +from typing import Literal, List, Optional, Union, Dict, Any +import warnings +from contextlib import contextmanager + +class HookManager: + """ + A robust hook management class for PyTorch models to track activations, gradients, and parameters. + + Improvements: + - More comprehensive error handling + - Flexible hook registration + - Support for more layer types + - Configurable tracking + """ + def __init__(self, model: nn.Module, track_layers: Optional[List[type]] = None): + """ + Initialize HookManager with additional configuration options. + + Args: + model (nn.Module): The PyTorch model to track + track_layers (Optional[List[type]]): List of layer types to track. + Defaults to common layer types if not specified. + """ + if not isinstance(model, nn.Module): + raise TypeError("The model must be a PyTorch model") + + self.model = model + self.hooks: List[torch.utils.hooks.RemovableHandle] = [] + self.activations: Dict[str, torch.Tensor] = {} + self.gradients: Dict[str, torch.Tensor] = {} + + # Default layer types to track if not specified + self.track_layers = track_layers or [ + nn.Linear, + nn.Conv1d, + nn.Conv2d, + nn.Conv3d, + nn.LSTM, + nn.GRU, + nn.RNN + ] + + def save_activation(self, name: str): + """Create a forward hook to save layer activations.""" + def hook(module, input, output): + try: + # Handle different output types (tensor or tuple) + activation = output[0] if isinstance(output, tuple) else output + self.activations[name] = activation.detach() + except Exception as e: + warnings.warn(f"Could not save activation for {name}: {e}") + return hook + + def save_gradient(self, name: str): + """Create a backward hook to save layer gradients.""" + def hook(module, grad_input, grad_output): + try: + # Save the first gradient output + self.gradients[name] = grad_output[0].detach() + except Exception as e: + warnings.warn(f"Could not save gradient for {name}: {e}") + return hook + + def register_hooks(self, track_activations: bool = True, track_gradients: bool = True): + """ + Register hooks for the model with configurable tracking. + + Args: + track_activations (bool): Whether to track layer activations + track_gradients (bool): Whether to track layer gradients + """ + # Clear existing hooks + self.remove_hooks() + + # Register forward hooks for activations + if track_activations: + for name, module in self.model.named_modules(): + if any(isinstance(module, layer_type) for layer_type in self.track_layers): + hook = module.register_forward_hook(self.save_activation(name)) + self.hooks.append(hook) + + # Register backward hooks for gradients + if track_gradients: + for name, module in self.model.named_modules(): + if any(isinstance(module, layer_type) for layer_type in self.track_layers): + hook = module.register_full_backward_hook(self.save_gradient(name)) + self.hooks.append(hook) + + def remove_hooks(self): + """Remove all registered hooks.""" + for hook in self.hooks: + hook.remove() + self.hooks.clear() + + def clear(self): + """Clear stored activations and gradients.""" + self.activations.clear() + self.gradients.clear() + + def get_activations(self) -> Dict[str, torch.Tensor]: + """Get stored activations.""" + return self.activations + + def get_gradients(self) -> Dict[str, torch.Tensor]: + """Get stored gradients.""" + return self.gradients + + def __del__(self): + """Ensure hooks are removed when the object is deleted.""" + self.remove_hooks() + + +class TorchWatcher: + """ + A comprehensive tracking mechanism for PyTorch models with enhanced logging and context management. + """ + def __init__(self, + model: nn.Module, + run: Any, # Made more flexible to support different logging mechanisms + track_layers: Optional[List[type]] = None + ) -> None: + """ + Initialize TorchWatcher with more configuration options. + + Args: + model (nn.Module): The PyTorch model to watch + run: Logging mechanism (e.g., Weights & Biases Run object) + track_layers (Optional[List[type]]): Layer types to specifically track + """ + self.model = model + self.run = run + self.hm = HookManager(model, track_layers) + self.debug_metrics: Dict[str, float] = {} + + # Default hook registration + self.hm.register_hooks() + + def _safe_tensor_stats(self, tensor: torch.Tensor) -> Dict[str, float]: + """ + Safely compute tensor statistics with error handling. + + Args: + tensor (torch.Tensor): Input tensor + + Returns: + Dict of statistical metrics + """ + try: + return { + 'mean': tensor.mean().item(), + 'std': tensor.std().item(), + 'norm': tensor.norm().item(), + 'min': tensor.min().item(), + 'max': tensor.max().item() + } + except Exception as e: + warnings.warn(f"Could not compute tensor statistics: {e}") + return {} + + def track_activations(self): + """Track layer activations with enhanced statistics.""" + activations = self.hm.get_activations() + for layer, activation in activations.items(): + if activation is not None: + stats = self._safe_tensor_stats(activation) + for stat_name, stat_value in stats.items(): + self.debug_metrics[f"debug/activation/{layer}_{stat_name}"] = stat_value + + def track_gradients(self): + """Track layer gradients with enhanced statistics.""" + gradients = self.hm.get_gradients() + for layer, gradient in gradients.items(): + if gradient is not None: + stats = self._safe_tensor_stats(gradient) + for stat_name, stat_value in stats.items(): + self.debug_metrics[f"debug/gradient/{layer}_{stat_name}"] = stat_value + + def track_parameters(self): + """Track model parameters with enhanced statistics.""" + for layer, param in self.model.named_parameters(): + if param is not None and param.grad is not None: + stats = self._safe_tensor_stats(param.grad) + for stat_name, stat_value in stats.items(): + self.debug_metrics[f"debug/parameters/{layer}_{stat_name}"] = stat_value + + def watch(self, + step: Union[int, float], + log: Optional[List[Literal["gradients", "parameters", "activations"]]] = None + ): + """ + Log debug metrics with flexible configuration. + + Args: + step (int|float): Logging step + log (Optional[List]): Specific tracking modes. + Defaults to all if None. + """ + # Reset metrics + self.debug_metrics.clear() + + # Determine tracking modes + if log is None or log == "all": + self.track_gradients() + self.track_parameters() + self.track_activations() + else: + for mode in log: + if mode == "gradients": + self.track_gradients() + elif mode == "parameters": + self.track_parameters() + elif mode == "activations": + self.track_activations() + + # Log metrics + try: + self.run.log_metrics(data=self.debug_metrics, step=step) + except Exception as e: + warnings.warn(f"Logging failed: {e}") + + # Clear hooks + self.hm.clear() + + @contextmanager + def track(self, step: Union[int, float], log: Optional[List[Literal["gradients", "parameters", "activations"]]] = None): + """ + Context manager for tracking with automatic hook management. + + Args: + step (int|float): Logging step + log (Optional[List]): Specific tracking modes + """ + try: + yield self + finally: + self.watch(step, log) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 9e41665..dfb5396 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -102,9 +102,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], "source": [ "# Import libraries\n", "import torch\n", @@ -116,7 +125,9 @@ "from datasets import load_dataset\n", "\n", "from neptune_scale import Run\n", - "import os" + "import os\n", + "\n", + "from typing import Literal" ] }, { @@ -128,7 +139,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -162,9 +173,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training samples: 81926 \n", + "Validation samples: 935\n" + ] + } + ], "source": [ "# For the example, download a random subset of 10% of the original dataset\n", "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", @@ -194,9 +214,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vocabulary size: 128257\n" + ] + } + ], "source": [ "train_subset = data_subset[\"train\"].with_format(\n", " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", @@ -231,7 +259,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -358,7 +386,62 @@ "\n", " # Function to get gradients\n", " def get_gradients(self):\n", - " return self.gradients" + " return self.gradients\n", + "\n", + "\n", + "from typing import Literal, List, Optional \n", + "class TorchWatcher:\n", + " def __init__(self, \n", + " model: nn.Module, \n", + " run: Run, \n", + " ) -> None:\n", + " \n", + " self.model = model\n", + " self.run = run\n", + " self.hm = HookManager(model)\n", + " self.hm.register_hooks()\n", + " self.debug_metrics = {}\n", + "\n", + " def track_activations(self): \n", + " # Track activations\n", + " activations = self.hm.get_activations()\n", + " for layer, activation in activations.items():\n", + " if layer is not None:\n", + " self.debug_metrics[f\"debug/activation/{layer}_mean\"] = activation[0].mean().item()\n", + " self.debug_metrics[f\"debug/activation/{layer}_std\"] = activation[0].std().item()\n", + "\n", + " def track_gradients(self):\n", + " # Track gradients with hooks\n", + " gradients = self.hm.get_gradients()\n", + " for layer, gradient in gradients.items():\n", + " self.debug_metrics[f\"debug/gradient/{layer}_mean\"] = gradient.mean().item()\n", + "\n", + " def track_parameters(self):\n", + " # Track gradients per layer at each epoch\n", + " for layer, param in self.model.named_parameters():\n", + " if param is not None:\n", + " self.debug_metrics[f\"debug/parameters/{layer}_std\"] = param.grad.std().item()\n", + " self.debug_metrics[f\"debug/parameters/{layer}_mean\"] = param.grad.mean().item()\n", + " self.debug_metrics[f\"debug/parameters/{layer}_norm\"] = (\n", + " param.grad.norm().item()\n", + " ) # L2 norm (Euclidean norm) of the gradients\n", + "\n", + " def watch(self, step: int|float, log: Optional[List[Literal[\"gradients\", \"parameters\", \"activations\"]]] | None = \"all\"):\n", + " match log:\n", + " case \"gradients\":\n", + " self.track_gradients()\n", + " case \"parameters\":\n", + " self.track_parameters()\n", + " case \"all\":\n", + " self.track_gradients()\n", + " self.track_parameters()\n", + "\n", + " self.run.log_metrics(\n", + " data=self.debug_metrics,\n", + " step=step\n", + " )\n", + "\n", + " self.hm.clear()" ] }, { @@ -377,9 +460,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment\n" + ] + } + ], "source": [ "from neptune_scale import Run\n", "from uuid import uuid4\n", @@ -424,7 +515,118 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 1 / 10241, Loss: 11.762494087219238\n", + "1\n", + "gradients\n", + "Step 2 / 10241, Loss: 11.673644065856934\n", + "2\n", + "gradients\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2025-03-27 18:14:46,982 neptune:ERROR: \n", + "\n", + "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", + "\n", + "This can be caused by either:\n", + "- The step of a series value is smaller than the most recently logged step for this series\n", + "- the step is exactly the same but the value is different\n", + "\n", + "For help, see https://docs-beta.neptune.ai/log_metrics\n", + "\n", + "2025-03-27 18:14:46,987 neptune:ERROR: \n", + "\n", + "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", + "\n", + "This can be caused by either:\n", + "- The step of a series value is smaller than the most recently logged step for this series\n", + "- the step is exactly the same but the value is different\n", + "\n", + "For help, see https://docs-beta.neptune.ai/log_metrics\n", + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 3 / 10241, Loss: 10.27919864654541\n", + "3\n", + "gradients\n", + "Step 4 / 10241, Loss: 9.545785903930664\n", + "4\n", + "gradients\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2025-03-27 18:14:52,233 neptune:ERROR: \n", + "\n", + "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", + "\n", + "This can be caused by either:\n", + "- The step of a series value is smaller than the most recently logged step for this series\n", + "- the step is exactly the same but the value is different\n", + "\n", + "For help, see https://docs-beta.neptune.ai/log_metrics\n", + "\n", + "2025-03-27 18:14:52,234 neptune:ERROR: \n", + "\n", + "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", + "\n", + "This can be caused by either:\n", + "- The step of a series value is smaller than the most recently logged step for this series\n", + "- the step is exactly the same but the value is different\n", + "\n", + "For help, see https://docs-beta.neptune.ai/log_metrics\n", + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 5 / 10241, Loss: 9.699625968933105\n", + "5\n", + "gradients\n", + "Step 6 / 10241, Loss: 9.690692901611328\n", + "6\n", + "gradients\n", + "Step 7 / 10241, Loss: 10.060093879699707\n", + "7\n", + "gradients\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[19], line 36\u001b[0m\n\u001b[0;32m 33\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mzero_grad()\n\u001b[0;32m 35\u001b[0m \u001b[38;5;66;03m# Forward pass\u001b[39;00m\n\u001b[1;32m---> 36\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 38\u001b[0m \u001b[38;5;66;03m# Compute the loss (ignore padding tokens by masking labels)\u001b[39;00m\n\u001b[0;32m 39\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", + "Cell \u001b[1;32mIn[5], line 12\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 10\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membedding(x)\n\u001b[0;32m 11\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlstm(x) \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[1;32m---> 12\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfc1\u001b[49m\u001b[43m(\u001b[49m\u001b[43mlstm_out\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Use the last output from the LSTM\u001b[39;00m\n\u001b[0;32m 13\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\linear.py:125\u001b[0m, in \u001b[0;36mLinear.forward\u001b[1;34m(self, input)\u001b[0m\n\u001b[0;32m 124\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[1;32m--> 125\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ "# Training setup\n", "debug_metrics = {}\n", @@ -438,8 +640,8 @@ " ignore_index=-100\n", ") # Ignore the buffering index of -100 in the dataset\n", "\n", - "hook_manager = HookManager(model)\n", - "hook_manager.register_hooks()\n", + "# Define watcher class\n", + "watcher = TorchWatcher(model=model, run=run)\n", "\n", "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", "model.to(device)\n", @@ -451,7 +653,6 @@ " for batch in train_dataloader:\n", " model.train()\n", " step_counter += 1\n", - " hook_manager.clear()\n", "\n", " input_ids = batch[\"input_ids\"].to(device)\n", " labels = batch[\"labels\"].to(device)\n", @@ -471,26 +672,8 @@ " total_loss += loss.item()\n", " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", "\n", - " # Track activations\n", - " activations = hook_manager.get_activations()\n", - " for layer, activation in activations.items():\n", - " if layer is not None:\n", - " debug_metrics[f\"debug/activation/{layer}_mean\"] = activation[0].mean().item()\n", - " debug_metrics[f\"debug/activation/{layer}_std\"] = activation[0].std().item()\n", - "\n", - " # Track gradients with hooks\n", - " gradients = hook_manager.get_gradients()\n", - " for layer, gradient in gradients.items():\n", - " debug_metrics[f\"debug/gradient/{layer}_mean\"] = gradient.mean().item()\n", - "\n", - " # Track gradients per layer at each epoch\n", - " for layer, param in model.named_parameters():\n", - " if param is not None:\n", - " debug_metrics[f\"debug/parameters/{layer}_std\"] = param.grad.std().item()\n", - " debug_metrics[f\"debug/parameters/{layer}_mean\"] = param.grad.mean().item()\n", - " debug_metrics[f\"debug/parameters/{layer}_norm\"] = (\n", - " param.grad.norm().item()\n", - " ) # L2 norm (Euclidean norm) of the gradients\n", + " # Call watch() method in loop determing which layer-wise metrics to watch\n", + " watcher.watch(step=step_counter, log=\"gradients\")\n", "\n", " if step_counter % 50 == 0: # Log validation loss at every 50 steps\n", " val_loss = evaluate(model, val_dataloader, criterion, device, vocab_size)\n", From f44323d5e9232be466902c73db636ed54aa7dc9f Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 31 Mar 2025 16:27:58 +0200 Subject: [PATCH 077/125] feat: create a TorchWatcher package - package to initialize hooks for Pytorch models, replacing theHookManager class - add readme.md for using the package - update the degbugging pytorch example to use the new package --- .../pytorch/notebooks/README.md | 115 +++++++ .../pytorch/notebooks/TorchWatcher.py | 136 ++++---- .../pytorch_text_model_debugging.ipynb | 320 ++---------------- 3 files changed, 230 insertions(+), 341 deletions(-) create mode 100644 integrations-and-supported-tools/pytorch/notebooks/README.md diff --git a/integrations-and-supported-tools/pytorch/notebooks/README.md b/integrations-and-supported-tools/pytorch/notebooks/README.md new file mode 100644 index 0000000..07e74b7 --- /dev/null +++ b/integrations-and-supported-tools/pytorch/notebooks/README.md @@ -0,0 +1,115 @@ +# TorchWatcher + +TorchWatcher is a powerful tool for monitoring PyTorch models during training. It helps you track activations, gradients, and parameters of your model layers in real-time using neptune.ai for logging. + +## Features + +- Track layer activations, gradients, and parameters +- Automatic hook management +- Select from predefined tensor statistics +- Flexible tracking configuration +- Support for various layer types (Linear, Conv1d/2d/3d, LSTM, GRU, RNN) + +## Installation + +Make sure you have the required dependencies: +```bash +pip install torch neptune_scale +``` + +## Quick Start + +1. Initialize your Neptune run: +```python +from neptune_scale import Run +run = Run( + experiment_name="your-experiment-name", +) +``` + +2. Create your PyTorch model and initialize TorchWatcher: +```python +from TorchWatcher import TorchWatcher + +model = YourModel() +watcher = TorchWatcher(model, run) # Uses default mean() statistic +``` + +3. Use the watcher in your training loop: +```python +# Forward pass +output = model(x_batch) +loss = criterion(output, y_batch) + +# Backward pass +optimizer.zero_grad() +loss.backward() +optimizer.step() + +# Track metrics after the forward and backward passes +watcher.watch(step=step) +``` + +## Advanced Usage + +### Tensor Statistics + +You can select which predefined statistics to compute: +```python +watcher = TorchWatcher( + model, + run, + tensor_stats=['mean', 'std', 'norm'] # Select from available statistics +) +``` + +Available statistics: +- `mean`: Mean value +- `std`: Standard deviation +- `norm`: L2 norm +- `min`: Minimum value +- `max`: Maximum value +- `var`: Variance +- `abs_mean`: Mean of absolute values + +By default, only the mean statistic is computed if no statistics are specified. + +### Custom Layer Tracking + +You can specify which layer types to track: +```python +watcher = TorchWatcher( + model, + run, + track_layers=[nn.Linear, nn.Conv2d] # Only track Linear and Conv2d layers +) +``` + +### Selective Tracking + +You can choose which metrics to track: +```python +watcher.watch( + step=epoch, + log=["gradients", "parameters"] # Only track gradients and parameters, not activations +) +``` + +## Example + +See `torch_watcher_example.py` for a complete working example that demonstrates: +- Model definition +- Data generation +- Training loop with metrics tracking +- Validation +- Neptune integration + +## Logged Metrics + +The following metrics are automatically logged to Neptune: + +- `debug/activation/{layer}_{stat_name}`: Layer activation statistics +- `debug/gradient/{layer}_{stat_name}`: Layer gradient statistics +- `debug/parameters/{layer}_{stat_name}`: Parameter statistics + +Where `stat_name` corresponds to the statistics you selected. By default, only the mean statistic is computed. \ No newline at end of file diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py index ababcee..7d9dd08 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py +++ b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py @@ -1,27 +1,39 @@ +import warnings +from typing import Any, Dict, List, Literal, Optional, Union import torch import torch.nn as nn -from typing import Literal, List, Optional, Union, Dict, Any -import warnings -from contextlib import contextmanager + +# Predefined tensor statistics +TENSOR_STATS = { + "mean": lambda x: x.mean().item(), + "std": lambda x: x.std().item(), + "norm": lambda x: x.norm().item(), + "min": lambda x: x.min().item(), + "max": lambda x: x.max().item(), + "var": lambda x: x.var().item(), + "abs_mean": lambda x: x.abs().mean().item(), +} + class HookManager: """ A robust hook management class for PyTorch models to track activations, gradients, and parameters. - + Improvements: - More comprehensive error handling - Flexible hook registration - Support for more layer types - Configurable tracking """ + def __init__(self, model: nn.Module, track_layers: Optional[List[type]] = None): """ Initialize HookManager with additional configuration options. - + Args: model (nn.Module): The PyTorch model to track - track_layers (Optional[List[type]]): List of layer types to track. + track_layers (Optional[List[type]]): List of layer types to track. Defaults to common layer types if not specified. """ if not isinstance(model, nn.Module): @@ -31,20 +43,21 @@ def __init__(self, model: nn.Module, track_layers: Optional[List[type]] = None): self.hooks: List[torch.utils.hooks.RemovableHandle] = [] self.activations: Dict[str, torch.Tensor] = {} self.gradients: Dict[str, torch.Tensor] = {} - + # Default layer types to track if not specified self.track_layers = track_layers or [ - nn.Linear, - nn.Conv1d, - nn.Conv2d, - nn.Conv3d, - nn.LSTM, - nn.GRU, - nn.RNN + nn.Linear, + nn.Conv1d, + nn.Conv2d, + nn.Conv3d, + nn.LSTM, + nn.GRU, + nn.RNN, ] def save_activation(self, name: str): """Create a forward hook to save layer activations.""" + def hook(module, input, output): try: # Handle different output types (tensor or tuple) @@ -52,22 +65,25 @@ def hook(module, input, output): self.activations[name] = activation.detach() except Exception as e: warnings.warn(f"Could not save activation for {name}: {e}") + return hook def save_gradient(self, name: str): """Create a backward hook to save layer gradients.""" + def hook(module, grad_input, grad_output): try: # Save the first gradient output self.gradients[name] = grad_output[0].detach() except Exception as e: warnings.warn(f"Could not save gradient for {name}: {e}") + return hook def register_hooks(self, track_activations: bool = True, track_gradients: bool = True): """ Register hooks for the model with configurable tracking. - + Args: track_activations (bool): Whether to track layer activations track_gradients (bool): Whether to track layer gradients @@ -117,50 +133,69 @@ class TorchWatcher: """ A comprehensive tracking mechanism for PyTorch models with enhanced logging and context management. """ - def __init__(self, - model: nn.Module, - run: Any, # Made more flexible to support different logging mechanisms - track_layers: Optional[List[type]] = None - ) -> None: + + def __init__( + self, + model: nn.Module, + run: Any, # Made more flexible to support different logging mechanisms + track_layers: Optional[List[type]] = None, + tensor_stats: Optional[List[str]] = None, + ) -> None: """ Initialize TorchWatcher with more configuration options. - + Args: model (nn.Module): The PyTorch model to watch - run: Logging mechanism (e.g., Weights & Biases Run object) + run: Logging mechanism from Neptune track_layers (Optional[List[type]]): Layer types to specifically track + tensor_stats (Optional[List[str]]): List of statistics to compute. + Available options: mean, std, norm, min, max, var, abs_mean. + Defaults to ['mean'] if not specified. """ + if not isinstance(model, nn.Module): + raise TypeError("The model must be a PyTorch model") + self.model = model self.run = run self.hm = HookManager(model, track_layers) self.debug_metrics: Dict[str, float] = {} - + + # Validate and set tensor statistics + if tensor_stats is None: + tensor_stats = ["mean"] + + # Validate that all requested statistics exist + invalid_stats = [stat for stat in tensor_stats if stat not in TENSOR_STATS] + if invalid_stats: + raise ValueError( + f"Invalid statistics requested: {invalid_stats}. " + f"Available statistics are: {list(TENSOR_STATS.keys())}" + ) + + self.tensor_stats = {stat: TENSOR_STATS[stat] for stat in tensor_stats} + # Default hook registration self.hm.register_hooks() def _safe_tensor_stats(self, tensor: torch.Tensor) -> Dict[str, float]: """ Safely compute tensor statistics with error handling. - + Args: tensor (torch.Tensor): Input tensor - + Returns: Dict of statistical metrics """ - try: - return { - 'mean': tensor.mean().item(), - 'std': tensor.std().item(), - 'norm': tensor.norm().item(), - 'min': tensor.min().item(), - 'max': tensor.max().item() - } - except Exception as e: - warnings.warn(f"Could not compute tensor statistics: {e}") - return {} + stats = {} + for stat_name, stat_func in self.tensor_stats.items(): + try: + stats[stat_name] = stat_func(tensor) + except Exception as e: + warnings.warn(f"Could not compute {stat_name} statistic: {e}") + return stats - def track_activations(self): + def track_activations(self): """Track layer activations with enhanced statistics.""" activations = self.hm.get_activations() for layer, activation in activations.items(): @@ -186,16 +221,17 @@ def track_parameters(self): for stat_name, stat_value in stats.items(): self.debug_metrics[f"debug/parameters/{layer}_{stat_name}"] = stat_value - def watch(self, - step: Union[int, float], - log: Optional[List[Literal["gradients", "parameters", "activations"]]] = None - ): + def watch( + self, + step: Union[int, float], + log: Optional[List[Literal["gradients", "parameters", "activations"]]] = None, + ): """ Log debug metrics with flexible configuration. - + Args: step (int|float): Logging step - log (Optional[List]): Specific tracking modes. + log (Optional[List]): Specific tracking modes. Defaults to all if None. """ # Reset metrics @@ -223,17 +259,3 @@ def watch(self, # Clear hooks self.hm.clear() - - @contextmanager - def track(self, step: Union[int, float], log: Optional[List[Literal["gradients", "parameters", "activations"]]] = None): - """ - Context manager for tracking with automatic hook management. - - Args: - step (int|float): Logging step - log (Optional[List]): Specific tracking modes - """ - try: - yield self - finally: - self.watch(step, log) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index dfb5396..1d7dadc 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -102,18 +102,9 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], + "outputs": [], "source": [ "# Import libraries\n", "import torch\n", @@ -139,7 +130,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -173,18 +164,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Training samples: 81926 \n", - "Validation samples: 935\n" - ] - } - ], + "outputs": [], "source": [ "# For the example, download a random subset of 10% of the original dataset\n", "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", @@ -214,17 +196,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Vocabulary size: 128257\n" - ] - } - ], + "outputs": [], "source": [ "train_subset = data_subset[\"train\"].with_format(\n", " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", @@ -259,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -302,35 +276,35 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Create tracking class\n", + "### Setup tracking class\n", "\n", - "This section creates a `HookManager` class:\n", - "- It allows to capture the **activations** and **gradients** from each layer.\n", - "- It accepts a PyTorch model object as an input.\n", - "- It automatically captures the gradients and activations in each layer of the model.\n", + "This section intializes the `TorchWatcher` class:\n", + "- It accepts a PyTorch model object as an input. \n", + "- It allows you to capture the **parameters**, **activations** and **gradients** from each layer.\n", + "- Specify which tensor statistics to capture.\n", "\n", "See a pseudo implementation:\n", "\n", "```python\n", - "# Initialize model\n", - "model = your_ModelClass()\n", - "# Register hooks\n", - "hm = HookManager(model)\n", - "hm.register_hooks()\n", + "from TorchWatcher import TorchWatcher\n", + "\n", + "model = YourModel()\n", + "watcher = TorchWatcher(model, run) # Uses default mean() statistic\n", "\n", "# Training loop\n", "for epoch in range(3):\n", - " \n", - " # Forward pass, e.g. model.train()\n", - " # Backward pass, e.g. loss.backward()\n", - " \n", - " activations = hm.get_activations()\n", - " gradients = hm.get_gradients()\n", - "\n", - " # Log values (mean, std, etc.) to Neptune\n", - "```\n", - "\n", - "_Important_: You can use the `HookManager` class in your own training script as it only accepts a model object as input." + " # Forward pass\n", + " output = model(x_batch)\n", + " loss = criterion(output, y_batch)\n", + "\n", + " # Backward pass\n", + " optimizer.zero_grad()\n", + " loss.backward()\n", + " optimizer.step()\n", + "\n", + " # Track metrics after the forward and backward passes\n", + " watcher.watch(step=epoch)\n", + "```" ] }, { @@ -339,109 +313,7 @@ "metadata": {}, "outputs": [], "source": [ - "# A class to manage hooks for activations and gradients\n", - "class HookManager:\n", - " def __init__(self, model):\n", - " if not isinstance(model, nn.Module):\n", - " raise TypeError(\"The model must be a PyTorch model\")\n", - "\n", - " self.model = model\n", - " self.hooks = []\n", - " self.activations = {}\n", - " self.gradients = {}\n", - "\n", - " # Function to save activations\n", - " def save_activation(self, name):\n", - " def hook(module, input, output):\n", - " self.activations[name] = output\n", - "\n", - " return hook\n", - "\n", - " # Function to save gradients (registering hooks for the model parameters)\n", - " def save_gradient(self, name):\n", - " def hook(module, grad_input, grad_output):\n", - " self.gradients[name] = grad_output[0]\n", - "\n", - " return hook\n", - "\n", - " # Function to register hooks for activations and gradients\n", - " def register_hooks(self):\n", - " # Register forward hooks for activations\n", - " for name, module in self.model.named_modules():\n", - " self.hooks.append(module.register_forward_hook(self.save_activation(name)))\n", - "\n", - " # Register backward hooks for gradients\n", - " for name, module in self.model.named_modules():\n", - " if isinstance(module, (nn.LSTM, nn.Linear)): # You can add more layer types here\n", - " self.hooks.append(module.register_full_backward_hook(self.save_gradient(name)))\n", - "\n", - " # Function to clear activations and gradients after use\n", - " def clear(self):\n", - " self.activations = {}\n", - " self.gradients = {}\n", - "\n", - " # Function to get activations\n", - " def get_activations(self):\n", - " return self.activations\n", - "\n", - " # Function to get gradients\n", - " def get_gradients(self):\n", - " return self.gradients\n", - "\n", - "\n", - "from typing import Literal, List, Optional \n", - "class TorchWatcher:\n", - " def __init__(self, \n", - " model: nn.Module, \n", - " run: Run, \n", - " ) -> None:\n", - " \n", - " self.model = model\n", - " self.run = run\n", - " self.hm = HookManager(model)\n", - " self.hm.register_hooks()\n", - " self.debug_metrics = {}\n", - "\n", - " def track_activations(self): \n", - " # Track activations\n", - " activations = self.hm.get_activations()\n", - " for layer, activation in activations.items():\n", - " if layer is not None:\n", - " self.debug_metrics[f\"debug/activation/{layer}_mean\"] = activation[0].mean().item()\n", - " self.debug_metrics[f\"debug/activation/{layer}_std\"] = activation[0].std().item()\n", - "\n", - " def track_gradients(self):\n", - " # Track gradients with hooks\n", - " gradients = self.hm.get_gradients()\n", - " for layer, gradient in gradients.items():\n", - " self.debug_metrics[f\"debug/gradient/{layer}_mean\"] = gradient.mean().item()\n", - "\n", - " def track_parameters(self):\n", - " # Track gradients per layer at each epoch\n", - " for layer, param in self.model.named_parameters():\n", - " if param is not None:\n", - " self.debug_metrics[f\"debug/parameters/{layer}_std\"] = param.grad.std().item()\n", - " self.debug_metrics[f\"debug/parameters/{layer}_mean\"] = param.grad.mean().item()\n", - " self.debug_metrics[f\"debug/parameters/{layer}_norm\"] = (\n", - " param.grad.norm().item()\n", - " ) # L2 norm (Euclidean norm) of the gradients\n", - "\n", - " def watch(self, step: int|float, log: Optional[List[Literal[\"gradients\", \"parameters\", \"activations\"]]] | None = \"all\"):\n", - " match log:\n", - " case \"gradients\":\n", - " self.track_gradients()\n", - " case \"parameters\":\n", - " self.track_parameters()\n", - " case \"all\":\n", - " self.track_gradients()\n", - " self.track_parameters()\n", - "\n", - " self.run.log_metrics(\n", - " data=self.debug_metrics,\n", - " step=step\n", - " )\n", - "\n", - " self.hm.clear()" + "from TorchWatcher import TorchWatcher" ] }, { @@ -460,17 +332,9 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment\n" - ] - } - ], + "outputs": [], "source": [ "from neptune_scale import Run\n", "from uuid import uuid4\n", @@ -506,127 +370,15 @@ "metadata": {}, "source": [ "### Execute model training loop\n", - "In this loop, we configure the `HookManager` and register the hooks.\n", "\n", - "In your training loop, use the `get_` methods to retrieve the stored values for the activations and gradients after the forward and backward passes are complete." + "In the training loop we call the `watch()` method from the `TorchWatcher` package after the backward and forward passes to track our parameters, gradients and activations with a combination of tensor statistics. " ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Step 1 / 10241, Loss: 11.762494087219238\n", - "1\n", - "gradients\n", - "Step 2 / 10241, Loss: 11.673644065856934\n", - "2\n", - "gradients\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2025-03-27 18:14:46,982 neptune:ERROR: \n", - "\n", - "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", - "\n", - "This can be caused by either:\n", - "- The step of a series value is smaller than the most recently logged step for this series\n", - "- the step is exactly the same but the value is different\n", - "\n", - "For help, see https://docs-beta.neptune.ai/log_metrics\n", - "\n", - "2025-03-27 18:14:46,987 neptune:ERROR: \n", - "\n", - "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", - "\n", - "This can be caused by either:\n", - "- The step of a series value is smaller than the most recently logged step for this series\n", - "- the step is exactly the same but the value is different\n", - "\n", - "For help, see https://docs-beta.neptune.ai/log_metrics\n", - "\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Step 3 / 10241, Loss: 10.27919864654541\n", - "3\n", - "gradients\n", - "Step 4 / 10241, Loss: 9.545785903930664\n", - "4\n", - "gradients\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2025-03-27 18:14:52,233 neptune:ERROR: \n", - "\n", - "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", - "\n", - "This can be caused by either:\n", - "- The step of a series value is smaller than the most recently logged step for this series\n", - "- the step is exactly the same but the value is different\n", - "\n", - "For help, see https://docs-beta.neptune.ai/log_metrics\n", - "\n", - "2025-03-27 18:14:52,234 neptune:ERROR: \n", - "\n", - "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", - "\n", - "This can be caused by either:\n", - "- The step of a series value is smaller than the most recently logged step for this series\n", - "- the step is exactly the same but the value is different\n", - "\n", - "For help, see https://docs-beta.neptune.ai/log_metrics\n", - "\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Step 5 / 10241, Loss: 9.699625968933105\n", - "5\n", - "gradients\n", - "Step 6 / 10241, Loss: 9.690692901611328\n", - "6\n", - "gradients\n", - "Step 7 / 10241, Loss: 10.060093879699707\n", - "7\n", - "gradients\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[19], line 36\u001b[0m\n\u001b[0;32m 33\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mzero_grad()\n\u001b[0;32m 35\u001b[0m \u001b[38;5;66;03m# Forward pass\u001b[39;00m\n\u001b[1;32m---> 36\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 38\u001b[0m \u001b[38;5;66;03m# Compute the loss (ignore padding tokens by masking labels)\u001b[39;00m\n\u001b[0;32m 39\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", - "Cell \u001b[1;32mIn[5], line 12\u001b[0m, in \u001b[0;36mSimpleLLM.forward\u001b[1;34m(self, x)\u001b[0m\n\u001b[0;32m 10\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39membedding(x)\n\u001b[0;32m 11\u001b[0m lstm_out, _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlstm(x) \u001b[38;5;66;03m# LSTM returns output and hidden/cell state tuple\u001b[39;00m\n\u001b[1;32m---> 12\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfc1\u001b[49m\u001b[43m(\u001b[49m\u001b[43mlstm_out\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Use the last output from the LSTM\u001b[39;00m\n\u001b[0;32m 13\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1845\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1842\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inner()\n\u001b[0;32m 1844\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1845\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43minner\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1846\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[0;32m 1847\u001b[0m \u001b[38;5;66;03m# run always called hooks if they have not already been run\u001b[39;00m\n\u001b[0;32m 1848\u001b[0m \u001b[38;5;66;03m# For now only forward hooks have the always_call option but perhaps\u001b[39;00m\n\u001b[0;32m 1849\u001b[0m \u001b[38;5;66;03m# this functionality should be added to full backward hooks as well.\u001b[39;00m\n\u001b[0;32m 1850\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m _global_forward_hooks\u001b[38;5;241m.\u001b[39mitems():\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1793\u001b[0m, in \u001b[0;36mModule._call_impl..inner\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1790\u001b[0m bw_hook \u001b[38;5;241m=\u001b[39m BackwardHook(\u001b[38;5;28mself\u001b[39m, full_backward_hooks, backward_pre_hooks)\n\u001b[0;32m 1791\u001b[0m args \u001b[38;5;241m=\u001b[39m bw_hook\u001b[38;5;241m.\u001b[39msetup_input_hook(args)\n\u001b[1;32m-> 1793\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1794\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks:\n\u001b[0;32m 1795\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook_id, hook \u001b[38;5;129;01min\u001b[39;00m (\n\u001b[0;32m 1796\u001b[0m \u001b[38;5;241m*\u001b[39m_global_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1797\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks\u001b[38;5;241m.\u001b[39mitems(),\n\u001b[0;32m 1798\u001b[0m ):\n\u001b[0;32m 1799\u001b[0m \u001b[38;5;66;03m# mark that always called hook is run\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\linear.py:125\u001b[0m, in \u001b[0;36mLinear.forward\u001b[1;34m(self, input)\u001b[0m\n\u001b[0;32m 124\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[1;32m--> 125\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ "# Training setup\n", "debug_metrics = {}\n", @@ -641,7 +393,7 @@ ") # Ignore the buffering index of -100 in the dataset\n", "\n", "# Define watcher class\n", - "watcher = TorchWatcher(model=model, run=run)\n", + "watcher = TorchWatcher(model=model, run=run, tensor_stats=[\"mean\", \"norm\"])\n", "\n", "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", "model.to(device)\n", @@ -673,7 +425,7 @@ " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", "\n", " # Call watch() method in loop determing which layer-wise metrics to watch\n", - " watcher.watch(step=step_counter, log=\"gradients\")\n", + " watcher.watch(step=step_counter)\n", "\n", " if step_counter % 50 == 0: # Log validation loss at every 50 steps\n", " val_loss = evaluate(model, val_dataloader, criterion, device, vocab_size)\n", From 2b001af11a9db7fc321a2394921b1ca60b39a6dd Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 31 Mar 2025 16:29:43 +0200 Subject: [PATCH 078/125] style: update intro --- .../pytorch/notebooks/pytorch_text_model_debugging.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 1d7dadc..073217c 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -30,14 +30,14 @@ "source": [ "This guide will show you how to:\n", "- Initialize the **Neptune Run** object and log configuration parameters\n", - "- Create a **reusable class** to hook layer-wise metrics (`HookManager`)\n", + "- Create a **reusable class** to hook and log layer-wise metrics (`TorchWatcher`)\n", "- Log **aggregated metrics** such as loss and accuracy\n", "- Log **layer-wise metrics** to debug model training such as:\n", "\n", "| **Metric** | **Demonstrated** | **What it shows** | **How to capture** |\n", "|-----------------------------------|--------------------------------------|--------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|\n", - "| **Activations** | Yes | Dead or exploding activations can indicate issues with training stability. | `HookManager` |\n", - "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | `HookManager` |\n", + "| **Activations** | Yes | Dead or exploding activations can indicate issues with training stability. | `TorchWatcher` |\n", + "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | `TorchWatcher` |\n", "| **Parameters** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", "| **Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", "| **Learning rate** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", From fdfca8099158ad3e8175fad2204520ac2c7acd01 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 31 Mar 2025 18:57:15 +0200 Subject: [PATCH 079/125] update TocrhWatcher package to default to tracking all available layers as well as allowing a user to specify which layers to track --- .../pytorch/notebooks/README.md | 235 +++++++++++++----- .../pytorch/notebooks/TorchWatcher.py | 121 +++++++-- .../notebooks/torch_watcher_example.py | 120 +++++++++ 3 files changed, 388 insertions(+), 88 deletions(-) create mode 100644 integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py diff --git a/integrations-and-supported-tools/pytorch/notebooks/README.md b/integrations-and-supported-tools/pytorch/notebooks/README.md index 07e74b7..90c2872 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/README.md +++ b/integrations-and-supported-tools/pytorch/notebooks/README.md @@ -1,115 +1,216 @@ # TorchWatcher -TorchWatcher is a powerful tool for monitoring PyTorch models during training. It helps you track activations, gradients, and parameters of your model layers in real-time using neptune.ai for logging. +A simple PyTorch hook manager for tracking model metrics during training. ## Features -- Track layer activations, gradients, and parameters -- Automatic hook management -- Select from predefined tensor statistics -- Flexible tracking configuration -- Support for various layer types (Linear, Conv1d/2d/3d, LSTM, GRU, RNN) +- Track predefined statistics for specified PyTorch layers +- Automatic metric logging to Neptune +- Support for both training and validation phases +- Selective layer tracking +- Predefined statistics options: + - `mean`: Mean value + - `std`: Standard deviation + - `norm`: L2 norm + - `min`: Minimum value + - `max`: Maximum value + - `var`: Variance + - `abs_mean`: Mean of absolute values ## Installation -Make sure you have the required dependencies: ```bash -pip install torch neptune_scale +pip install neptune-client ``` -## Quick Start +## Usage + +### Basic Usage -1. Initialize your Neptune run: ```python from neptune_scale import Run +from TorchWatcher import TorchWatcher + +# Initialize Neptune run run = Run( - experiment_name="your-experiment-name", + experiment_name="my-experiment", ) -``` - -2. Create your PyTorch model and initialize TorchWatcher: -```python -from TorchWatcher import TorchWatcher +# Create your model model = YourModel() -watcher = TorchWatcher(model, run) # Uses default mean() statistic -``` -3. Use the watcher in your training loop: -```python -# Forward pass -output = model(x_batch) -loss = criterion(output, y_batch) +# Option 1: Track all layers (default) +watcher = TorchWatcher( + model, + run, + tensor_stats=['mean', 'norm'] # Optional: select from predefined statistics +) + +# Option 2: Track specific layer types +layers_to_track = [ + nn.Linear, # Track all linear layers + nn.Conv2d, # Track all convolutional layers + nn.ReLU, # Track all ReLU layers +] -# Backward pass -optimizer.zero_grad() -loss.backward() -optimizer.step() +watcher = TorchWatcher( + model, + run, + track_layers=layers_to_track, # Specify which layers to track + tensor_stats=['mean', 'norm'] # Optional: select from predefined statistics +) -# Track metrics after the forward and backward passes -watcher.watch(step=step) +# Training loop +for epoch in range(n_epochs): + for batch in dataloader: + # Forward pass + output = model(batch) + + # Backward pass + loss.backward() + + # Track metrics + watcher.watch(step=current_step) ``` -## Advanced Usage +### Detailed Examples + +#### 1. Using Default Settings -### Tensor Statistics +The simplest way to use TorchWatcher is with default settings, which will track all layers in your model: -You can select which predefined statistics to compute: ```python +# Initialize with defaults watcher = TorchWatcher( - model, - run, - tensor_stats=['mean', 'std', 'norm'] # Select from available statistics + model, + run ) -``` -Available statistics: -- `mean`: Mean value -- `std`: Standard deviation -- `norm`: L2 norm -- `min`: Minimum value -- `max`: Maximum value -- `var`: Variance -- `abs_mean`: Mean of absolute values +# This will: +# - Track all layers in the model +# - Use only the 'mean' statistic (default) +# - Log all metrics (activations, gradients, parameters) +``` -By default, only the mean statistic is computed if no statistics are specified. +#### 2. Specifying Which Layers to Track -### Custom Layer Tracking +You can choose to track specific layer types in your model: -You can specify which layer types to track: ```python +# Track only convolutional and linear layers watcher = TorchWatcher( - model, + model, run, - track_layers=[nn.Linear, nn.Conv2d] # Only track Linear and Conv2d layers + track_layers=[nn.Conv2d, nn.Linear] +) + +# Track only normalization layers +watcher = TorchWatcher( + model, + run, + track_layers=[nn.BatchNorm2d, nn.LayerNorm] +) + +# Track only activation layers +watcher = TorchWatcher( + model, + run, + track_layers=[nn.ReLU, nn.LeakyReLU, nn.GELU] ) ``` -### Selective Tracking +#### 3. Selecting Statistics to Capture + +You can specify which statistics to compute for each layer: -You can choose which metrics to track: ```python -watcher.watch( - step=epoch, - log=["gradients", "parameters"] # Only track gradients and parameters, not activations +# Track multiple statistics +watcher = TorchWatcher( + model, + run, + tensor_stats=['mean', 'std', 'norm', 'min', 'max'] +) + +# Track only basic statistics +watcher = TorchWatcher( + model, + run, + tensor_stats=['mean', 'std'] +) + +# Track only norm-based statistics +watcher = TorchWatcher( + model, + run, + tensor_stats=['norm', 'abs_mean'] ) ``` -## Example +#### 4. Controlling Metric Logging + +The `watch()` method allows you to specify which metrics to log: -See `torch_watcher_example.py` for a complete working example that demonstrates: -- Model definition -- Data generation -- Training loop with metrics tracking -- Validation -- Neptune integration +```python +# Log all metrics (default) +watcher.watch(step=current_step) -## Logged Metrics +# Log only activation metrics +watcher.watch(step=current_step, log=["activations"]) -The following metrics are automatically logged to Neptune: +# Log only gradient metrics +watcher.watch(step=current_step, log=["gradients"]) -- `debug/activation/{layer}_{stat_name}`: Layer activation statistics -- `debug/gradient/{layer}_{stat_name}`: Layer gradient statistics -- `debug/parameters/{layer}_{stat_name}`: Parameter statistics +# Log only parameter metrics +watcher.watch(step=current_step, log=["parameters"]) + +# Log specific combinations +watcher.watch(step=current_step, log=["activations", "gradients"]) +``` -Where `stat_name` corresponds to the statistics you selected. By default, only the mean statistic is computed. \ No newline at end of file +### Layer Types + +You can track any of these PyTorch layer types: +- `nn.Linear` +- `nn.Conv1d`, `nn.Conv2d`, `nn.Conv3d` +- `nn.ConvTranspose1d`, `nn.ConvTranspose2d`, `nn.ConvTranspose3d` +- `nn.BatchNorm1d`, `nn.BatchNorm2d`, `nn.BatchNorm3d` +- `nn.LayerNorm` +- `nn.InstanceNorm1d`, `nn.InstanceNorm2d`, `nn.InstanceNorm3d` +- `nn.GroupNorm` +- `nn.ReLU`, `nn.LeakyReLU`, `nn.PReLU`, `nn.RReLU`, `nn.ELU`, `nn.SELU`, `nn.CELU`, `nn.GELU` +- `nn.Sigmoid` +- `nn.Tanh` +- `nn.Dropout`, `nn.Dropout2d`, `nn.Dropout3d` +- `nn.MaxPool1d`, `nn.MaxPool2d`, `nn.MaxPool3d` +- `nn.AvgPool1d`, `nn.AvgPool2d`, `nn.AvgPool3d` +- `nn.AdaptiveMaxPool1d`, `nn.AdaptiveMaxPool2d`, `nn.AdaptiveMaxPool3d` +- `nn.AdaptiveAvgPool1d`, `nn.AdaptiveAvgPool2d`, `nn.AdaptiveAvgPool3d` +- `nn.LSTM`, `nn.GRU`, `nn.RNN` +- `nn.Embedding` +- `nn.TransformerEncoderLayer`, `nn.TransformerDecoderLayer` + +### Available Statistics + +The following statistics are available for tracking: + +1. Basic Statistics: + - `mean`: Mean value + - `std`: Standard deviation + - `norm`: L2 norm + - `min`: Minimum value + - `max`: Maximum value + - `var`: Variance + - `abs_mean`: Mean of absolute values + +### Example + +See `torch_watcher_example.py` for a complete example showing: +- How to track all layers (default behavior) +- How to specify which layers to track +- How to select statistics to monitor +- Integration with a training loop +- Logging metrics to Neptune + +## License + +MIT License diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py index 7d9dd08..33fe8ba 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py +++ b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py @@ -1,5 +1,5 @@ import warnings -from typing import Any, Dict, List, Literal, Optional, Union +from typing import Any, Dict, List, Literal, Optional, Type, Union import torch import torch.nn as nn @@ -15,6 +15,63 @@ "abs_mean": lambda x: x.abs().mean().item(), } +# Common PyTorch layer types for validation +PYTORCH_LAYERS = { + # Linear layers + nn.Linear, + # Convolutional layers + nn.Conv1d, + nn.Conv2d, + nn.Conv3d, + nn.ConvTranspose1d, + nn.ConvTranspose2d, + nn.ConvTranspose3d, + # Recurrent layers + nn.LSTM, + nn.GRU, + nn.RNN, + # Normalization layers + nn.BatchNorm1d, + nn.BatchNorm2d, + nn.BatchNorm3d, + nn.LayerNorm, + nn.InstanceNorm1d, + nn.InstanceNorm2d, + nn.InstanceNorm3d, + # Activation layers + nn.ReLU, + nn.LeakyReLU, + nn.ELU, + nn.SELU, + nn.GELU, + # Pooling layers + nn.MaxPool1d, + nn.MaxPool2d, + nn.MaxPool3d, + nn.AvgPool1d, + nn.AvgPool2d, + nn.AvgPool3d, + # Dropout layers + nn.Dropout, + nn.Dropout2d, + nn.Dropout3d, + # Embedding layers + nn.Embedding, + nn.EmbeddingBag, + # Transformer layers + nn.TransformerEncoderLayer, + nn.TransformerDecoderLayer, + # Attention layers + nn.MultiheadAttention, + # Flatten layers + nn.Flatten, + nn.Unflatten, + # Other common layers + nn.Sequential, + nn.ModuleList, + nn.ModuleDict, +} + class HookManager: """ @@ -27,33 +84,37 @@ class HookManager: - Configurable tracking """ - def __init__(self, model: nn.Module, track_layers: Optional[List[type]] = None): + def __init__(self, model: nn.Module, track_layers: Optional[List[Type[nn.Module]]] = None): """ - Initialize HookManager with additional configuration options. + Initialize HookManager with layer types to track. Args: model (nn.Module): The PyTorch model to track - track_layers (Optional[List[type]]): List of layer types to track. - Defaults to common layer types if not specified. + track_layers (Optional[List[Type[nn.Module]]]): List of PyTorch layer types to track. + If None, tracks all layers in the model. + If specified, must contain valid PyTorch layer types. + + Raises: + TypeError: If model is not a PyTorch model + ValueError: If track_layers contains invalid layer types """ if not isinstance(model, nn.Module): raise TypeError("The model must be a PyTorch model") + # Validate that all specified layers are valid PyTorch layers if track_layers is provided + if track_layers is not None: + invalid_layers = [layer for layer in track_layers if layer not in PYTORCH_LAYERS] + if invalid_layers: + raise ValueError( + f"Invalid layer types specified: {invalid_layers}. " + f"Please use valid PyTorch layer types from torch.nn." + ) + self.model = model self.hooks: List[torch.utils.hooks.RemovableHandle] = [] self.activations: Dict[str, torch.Tensor] = {} self.gradients: Dict[str, torch.Tensor] = {} - - # Default layer types to track if not specified - self.track_layers = track_layers or [ - nn.Linear, - nn.Conv1d, - nn.Conv2d, - nn.Conv3d, - nn.LSTM, - nn.GRU, - nn.RNN, - ] + self.track_layers = track_layers def save_activation(self, name: str): """Create a forward hook to save layer activations.""" @@ -94,14 +155,26 @@ def register_hooks(self, track_activations: bool = True, track_gradients: bool = # Register forward hooks for activations if track_activations: for name, module in self.model.named_modules(): - if any(isinstance(module, layer_type) for layer_type in self.track_layers): + # Skip the model itself + if name == "": + continue + # Track all layers if track_layers is None, otherwise only specified types + if self.track_layers is None or any( + isinstance(module, layer_type) for layer_type in self.track_layers + ): hook = module.register_forward_hook(self.save_activation(name)) self.hooks.append(hook) # Register backward hooks for gradients if track_gradients: for name, module in self.model.named_modules(): - if any(isinstance(module, layer_type) for layer_type in self.track_layers): + # Skip the model itself + if name == "": + continue + # Track all layers if track_layers is None, otherwise only specified types + if self.track_layers is None or any( + isinstance(module, layer_type) for layer_type in self.track_layers + ): hook = module.register_full_backward_hook(self.save_gradient(name)) self.hooks.append(hook) @@ -138,19 +211,25 @@ def __init__( self, model: nn.Module, run: Any, # Made more flexible to support different logging mechanisms - track_layers: Optional[List[type]] = None, + track_layers: Optional[List[Type[nn.Module]]] = None, tensor_stats: Optional[List[str]] = None, ) -> None: """ - Initialize TorchWatcher with more configuration options. + Initialize TorchWatcher with configuration options. Args: model (nn.Module): The PyTorch model to watch run: Logging mechanism from Neptune - track_layers (Optional[List[type]]): Layer types to specifically track + track_layers (Optional[List[Type[nn.Module]]]): List of PyTorch layer types to track. + If None, tracks all layers in the model. + If specified, must contain valid PyTorch layer types. tensor_stats (Optional[List[str]]): List of statistics to compute. Available options: mean, std, norm, min, max, var, abs_mean. Defaults to ['mean'] if not specified. + + Raises: + TypeError: If model is not a PyTorch model + ValueError: If track_layers contains invalid layer types """ if not isinstance(model, nn.Module): raise TypeError("The model must be a PyTorch model") diff --git a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py new file mode 100644 index 0000000..55ad415 --- /dev/null +++ b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py @@ -0,0 +1,120 @@ +import torch +import torch.nn as nn +import numpy as np +from neptune_scale import Run +from TorchWatcher import TorchWatcher + +# Define a simple neural network +class SimpleNet(nn.Module): + def __init__(self): + super().__init__() + self.fc1 = nn.Linear(10, 5) + self.fc2 = nn.Linear(5, 1) + self.relu = nn.ReLU() + + def forward(self, x): + x = self.relu(self.fc1(x)) + x = self.fc2(x) + return x + +def generate_data(n_samples=1000): + """Generate synthetic data for a regression task.""" + # Generate random input features + X = torch.randn(n_samples, 10) + + # Create target values with some non-linear relationship + y = (X[:, 0] ** 2 + 0.5 * X[:, 1] + 0.1 * torch.sum(X[:, 2:], dim=1)).unsqueeze(1) + + # Add some noise + y += 0.1 * torch.randn_like(y) + + return X, y + +def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, batch_size=32): + """Training function that can be used with any watcher configuration.""" + optimizer = torch.optim.Adam(model.parameters(), lr=0.01) + criterion = nn.MSELoss() + n_batches = len(X_train) // batch_size + + for epoch in range(n_epochs): + model.train() + train_loss = 0.0 + + # Training batches + for i in range(n_batches): + start_idx = i * batch_size + end_idx = start_idx + batch_size + + x_batch = X_train[start_idx:end_idx] + y_batch = y_train[start_idx:end_idx] + + # Forward pass + output = model(x_batch) + loss = criterion(output, y_batch) + + # Backward pass + optimizer.zero_grad() + loss.backward() + optimizer.step() + + # Track metrics after the forward and backward passes + watcher.watch(step=epoch * n_batches + i) + + train_loss += loss.item() + + # Average training loss + train_loss /= n_batches + + # Validation + model.eval() + with torch.no_grad(): + val_output = model(X_val) + val_loss = criterion(val_output, y_val) + + # Log metrics + watcher.run.log_metrics(data={ + "train/loss": train_loss, + "val/loss": val_loss.item() + }, step=epoch) + + if (epoch + 1) % 10 == 0: + print(f"Epoch [{epoch+1}/{n_epochs}], " + f"Train Loss: {train_loss:.4f}, " + f"Val Loss: {val_loss.item():.4f}") + +def main(): + # Initialize Neptune run + run = Run( + experiment_name="torch-watcher-example", + ) + + # Generate data + X_train, y_train = generate_data(n_samples=1000) + X_val, y_val = generate_data(n_samples=200) + + # Example 1: Track all layers in the model + print("\nTraining with all layers tracked:") + model1 = SimpleNet() + watcher1 = TorchWatcher( + model1, + run, + tensor_stats=['mean', 'norm'] # track_layers defaults to None, tracking all layers + ) + train_model(model1, X_train, y_train, X_val, y_val, watcher1) + watcher1.run.close() + + # Example 2: Track specific layer types + print("\nTraining with only Linear and ReLU layers tracked:") + model2 = SimpleNet() + run2 = Run(experiment_name="torch-watcher-example-specific") + watcher2 = TorchWatcher( + model2, + run2, + track_layers=[nn.Linear, nn.ReLU], # Only track Linear and ReLU layers + tensor_stats=['mean', 'norm'] + ) + train_model(model2, X_train, y_train, X_val, y_val, watcher2) + watcher2.run.close() + +if __name__ == "__main__": + main() \ No newline at end of file From 84a7ca334d1ab81d1e5800a83cc1107702581de8 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 31 Mar 2025 18:58:59 +0200 Subject: [PATCH 080/125] chore: update readme --- integrations-and-supported-tools/pytorch/notebooks/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/README.md b/integrations-and-supported-tools/pytorch/notebooks/README.md index 90c2872..a4b126a 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/README.md +++ b/integrations-and-supported-tools/pytorch/notebooks/README.md @@ -20,7 +20,7 @@ A simple PyTorch hook manager for tracking model metrics during training. ## Installation ```bash -pip install neptune-client +pip install neptune_scale ``` ## Usage From 75265eccfa7c780ed76ae61f71a0100588b2a46b Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 1 Apr 2025 09:59:14 +0200 Subject: [PATCH 081/125] chore: cleanup comments and add TODO for future improvements --- .../pytorch/notebooks/TorchWatcher.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py index 33fe8ba..5d131b8 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py +++ b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py @@ -204,7 +204,7 @@ def __del__(self): class TorchWatcher: """ - A comprehensive tracking mechanism for PyTorch models with enhanced logging and context management. + A comprehensive tracking mechanism for PyTorch models with enhanced logging. """ def __init__( @@ -244,6 +244,7 @@ def __init__( tensor_stats = ["mean"] # Validate that all requested statistics exist + # TODO: Add ability to set custom statistics invalid_stats = [stat for stat in tensor_stats if stat not in TENSOR_STATS] if invalid_stats: raise ValueError( From ebde389e6ff47e8619534de445fd92a573b8b786 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 1 Apr 2025 10:02:51 +0200 Subject: [PATCH 082/125] refactor: update TorchWatcher to use a common _track_metrics() method --- .../pytorch/notebooks/TorchWatcher.py | 36 ++++++++++-------- .../notebooks/torch_watcher_example.py | 38 +++++++++++-------- 2 files changed, 43 insertions(+), 31 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py index 5d131b8..3269c1a 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py +++ b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py @@ -275,31 +275,37 @@ def _safe_tensor_stats(self, tensor: torch.Tensor) -> Dict[str, float]: warnings.warn(f"Could not compute {stat_name} statistic: {e}") return stats + def _track_metric(self, metric_type: str, data: Dict[str, torch.Tensor]): + """Track metrics with enhanced statistics for a given metric type. + + Args: + metric_type (str): Type of metric being tracked (activation/gradient/parameters) + data (Dict[str, torch.Tensor]): Dictionary mapping layer names to tensors + """ + for layer, tensor in data.items(): + if tensor is not None: + stats = self._safe_tensor_stats(tensor) + for stat_name, stat_value in stats.items(): + self.debug_metrics[f"debug/{metric_type}/{layer}_{stat_name}"] = stat_value + def track_activations(self): """Track layer activations with enhanced statistics.""" activations = self.hm.get_activations() - for layer, activation in activations.items(): - if activation is not None: - stats = self._safe_tensor_stats(activation) - for stat_name, stat_value in stats.items(): - self.debug_metrics[f"debug/activation/{layer}_{stat_name}"] = stat_value + self._track_metric("activation", activations) def track_gradients(self): """Track layer gradients with enhanced statistics.""" gradients = self.hm.get_gradients() - for layer, gradient in gradients.items(): - if gradient is not None: - stats = self._safe_tensor_stats(gradient) - for stat_name, stat_value in stats.items(): - self.debug_metrics[f"debug/gradient/{layer}_{stat_name}"] = stat_value + self._track_metric("gradient", gradients) def track_parameters(self): """Track model parameters with enhanced statistics.""" - for layer, param in self.model.named_parameters(): - if param is not None and param.grad is not None: - stats = self._safe_tensor_stats(param.grad) - for stat_name, stat_value in stats.items(): - self.debug_metrics[f"debug/parameters/{layer}_{stat_name}"] = stat_value + parameters = { + name: param.grad + for name, param in self.model.named_parameters() + if param is not None and param.grad is not None + } + self._track_metric("parameters", parameters) def watch( self, diff --git a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py index 55ad415..f1a52f8 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py +++ b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py @@ -1,9 +1,10 @@ +import numpy as np import torch import torch.nn as nn -import numpy as np from neptune_scale import Run from TorchWatcher import TorchWatcher + # Define a simple neural network class SimpleNet(nn.Module): def __init__(self): @@ -17,19 +18,21 @@ def forward(self, x): x = self.fc2(x) return x + def generate_data(n_samples=1000): """Generate synthetic data for a regression task.""" # Generate random input features X = torch.randn(n_samples, 10) - + # Create target values with some non-linear relationship y = (X[:, 0] ** 2 + 0.5 * X[:, 1] + 0.1 * torch.sum(X[:, 2:], dim=1)).unsqueeze(1) - + # Add some noise y += 0.1 * torch.randn_like(y) - + return X, y + def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, batch_size=32): """Training function that can be used with any watcher configuration.""" optimizer = torch.optim.Adam(model.parameters(), lr=0.01) @@ -39,12 +42,12 @@ def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, bat for epoch in range(n_epochs): model.train() train_loss = 0.0 - + # Training batches for i in range(n_batches): start_idx = i * batch_size end_idx = start_idx + batch_size - + x_batch = X_train[start_idx:end_idx] y_batch = y_train[start_idx:end_idx] @@ -72,15 +75,17 @@ def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, bat val_loss = criterion(val_output, y_val) # Log metrics - watcher.run.log_metrics(data={ - "train/loss": train_loss, - "val/loss": val_loss.item() - }, step=epoch) + watcher.run.log_metrics( + data={"train/loss": train_loss, "val/loss": val_loss.item()}, step=epoch + ) if (epoch + 1) % 10 == 0: - print(f"Epoch [{epoch+1}/{n_epochs}], " - f"Train Loss: {train_loss:.4f}, " - f"Val Loss: {val_loss.item():.4f}") + print( + f"Epoch [{epoch+1}/{n_epochs}], " + f"Train Loss: {train_loss:.4f}, " + f"Val Loss: {val_loss.item():.4f}" + ) + def main(): # Initialize Neptune run @@ -98,7 +103,7 @@ def main(): watcher1 = TorchWatcher( model1, run, - tensor_stats=['mean', 'norm'] # track_layers defaults to None, tracking all layers + tensor_stats=["mean", "norm"], # track_layers defaults to None, tracking all layers ) train_model(model1, X_train, y_train, X_val, y_val, watcher1) watcher1.run.close() @@ -111,10 +116,11 @@ def main(): model2, run2, track_layers=[nn.Linear, nn.ReLU], # Only track Linear and ReLU layers - tensor_stats=['mean', 'norm'] + tensor_stats=["mean", "norm"], ) train_model(model2, X_train, y_train, X_val, y_val, watcher2) watcher2.run.close() + if __name__ == "__main__": - main() \ No newline at end of file + main() From 9f83fbf074091d7d565661c269b5c25a237caa32 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 1 Apr 2025 13:55:23 +0200 Subject: [PATCH 083/125] refactor: update the watch() method to accept boolen inputs for the metrics to track rather than named values --- .../pytorch/notebooks/TorchWatcher.py | 23 ++++++++----------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py index 3269c1a..3efd1b0 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py +++ b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py @@ -310,32 +310,29 @@ def track_parameters(self): def watch( self, step: Union[int, float], - log: Optional[List[Literal["gradients", "parameters", "activations"]]] = None, + track_gradients: bool = True, + track_parameters: bool = True, + track_activations: bool = True ): """ Log debug metrics with flexible configuration. Args: step (int|float): Logging step - log (Optional[List]): Specific tracking modes. - Defaults to all if None. + track_gradients (bool): Whether to track gradients. Defaults to True. + track_parameters (bool): Whether to track parameters. Defaults to True. + track_activations (bool): Whether to track activations. Defaults to True. """ # Reset metrics self.debug_metrics.clear() - # Determine tracking modes - if log is None or log == "all": + # Track metrics based on boolean flags + if track_gradients: self.track_gradients() + if track_parameters: self.track_parameters() + if track_activations: self.track_activations() - else: - for mode in log: - if mode == "gradients": - self.track_gradients() - elif mode == "parameters": - self.track_parameters() - elif mode == "activations": - self.track_activations() # Log metrics try: From 603af72be2cd1c27c9b37069b50b8c0c647e8fc2 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 1 Apr 2025 14:53:27 +0200 Subject: [PATCH 084/125] feat: add ability to specify base_namespace and namespace during training loop. - more control on namespace logged during training --- .../pytorch/notebooks/TorchWatcher.py | 34 ++++++++++++------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py index 3efd1b0..5ba6c87 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py +++ b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py @@ -213,6 +213,7 @@ def __init__( run: Any, # Made more flexible to support different logging mechanisms track_layers: Optional[List[Type[nn.Module]]] = None, tensor_stats: Optional[List[str]] = None, + base_namespace: str = "debug", # Default namespace for all metrics ) -> None: """ Initialize TorchWatcher with configuration options. @@ -226,6 +227,7 @@ def __init__( tensor_stats (Optional[List[str]]): List of statistics to compute. Available options: mean, std, norm, min, max, var, abs_mean. Defaults to ['mean'] if not specified. + base_namespace (str): Base namespace for all logged metrics. Defaults to "debug". Raises: TypeError: If model is not a PyTorch model @@ -238,6 +240,7 @@ def __init__( self.run = run self.hm = HookManager(model, track_layers) self.debug_metrics: Dict[str, float] = {} + self.base_namespace = base_namespace # Validate and set tensor statistics if tensor_stats is None: @@ -275,44 +278,49 @@ def _safe_tensor_stats(self, tensor: torch.Tensor) -> Dict[str, float]: warnings.warn(f"Could not compute {stat_name} statistic: {e}") return stats - def _track_metric(self, metric_type: str, data: Dict[str, torch.Tensor]): + def _track_metric(self, metric_type: str, data: Dict[str, torch.Tensor], namespace: Optional[str] = None): """Track metrics with enhanced statistics for a given metric type. Args: metric_type (str): Type of metric being tracked (activation/gradient/parameters) data (Dict[str, torch.Tensor]): Dictionary mapping layer names to tensors + namespace (Optional[str]): Optional namespace to prefix the base namespace """ + # Construct the full namespace + full_namespace = f"{namespace}/{self.base_namespace}" if namespace else self.base_namespace + for layer, tensor in data.items(): if tensor is not None: stats = self._safe_tensor_stats(tensor) for stat_name, stat_value in stats.items(): - self.debug_metrics[f"debug/{metric_type}/{layer}_{stat_name}"] = stat_value + self.debug_metrics[f"{full_namespace}/{metric_type}/{layer}_{stat_name}"] = stat_value - def track_activations(self): + def track_activations(self, namespace: Optional[str] = None): """Track layer activations with enhanced statistics.""" activations = self.hm.get_activations() - self._track_metric("activation", activations) + self._track_metric("activation", activations, namespace) - def track_gradients(self): + def track_gradients(self, namespace: Optional[str] = None): """Track layer gradients with enhanced statistics.""" gradients = self.hm.get_gradients() - self._track_metric("gradient", gradients) + self._track_metric("gradient", gradients, namespace) - def track_parameters(self): + def track_parameters(self, namespace: Optional[str] = None): """Track model parameters with enhanced statistics.""" parameters = { name: param.grad for name, param in self.model.named_parameters() if param is not None and param.grad is not None } - self._track_metric("parameters", parameters) + self._track_metric("parameters", parameters, namespace) def watch( self, step: Union[int, float], track_gradients: bool = True, track_parameters: bool = True, - track_activations: bool = True + track_activations: bool = True, + namespace: Optional[str] = None, ): """ Log debug metrics with flexible configuration. @@ -322,17 +330,19 @@ def watch( track_gradients (bool): Whether to track gradients. Defaults to True. track_parameters (bool): Whether to track parameters. Defaults to True. track_activations (bool): Whether to track activations. Defaults to True. + namespace (Optional[str]): Optional namespace to prefix the base namespace. + If provided, metrics will be logged under {namespace}/{base_namespace}/... """ # Reset metrics self.debug_metrics.clear() # Track metrics based on boolean flags if track_gradients: - self.track_gradients() + self.track_gradients(namespace) if track_parameters: - self.track_parameters() + self.track_parameters(namespace) if track_activations: - self.track_activations() + self.track_activations(namespace) # Log metrics try: From 6bb48799780fb60dc3788c6dad338be1d0059516 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 1 Apr 2025 14:53:55 +0200 Subject: [PATCH 085/125] chore: updare readme and example for namespace feature --- .../pytorch/notebooks/README.md | 253 ++++++++---------- .../notebooks/torch_watcher_example.py | 83 +++--- 2 files changed, 159 insertions(+), 177 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/README.md b/integrations-and-supported-tools/pytorch/notebooks/README.md index a4b126a..aa383a7 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/README.md +++ b/integrations-and-supported-tools/pytorch/notebooks/README.md @@ -1,21 +1,16 @@ # TorchWatcher -A simple PyTorch hook manager for tracking model metrics during training. +A lightweight PyTorch model monitoring tool that automatically tracks layer activations, gradients, and parameters during training. Built for seamless integration with Neptune.ai. ## Features -- Track predefined statistics for specified PyTorch layers -- Automatic metric logging to Neptune -- Support for both training and validation phases -- Selective layer tracking -- Predefined statistics options: - - `mean`: Mean value - - `std`: Standard deviation - - `norm`: L2 norm - - `min`: Minimum value - - `max`: Maximum value - - `var`: Variance - - `abs_mean`: Mean of absolute values +- **Automatic Layer Tracking**: Monitors activations, gradients, and parameters of specified PyTorch layers +- **Flexible Layer Selection**: Track all layers or specify which layer types to monitor +- **Comprehensive Statistics**: Predefined tensor statistics including mean, standard deviation, norm, min, max, variance, and absolute mean +- **Configurable Tracking**: Enable/disable tracking of activations, gradients, and parameters as needed +- **Organized Logging**: Structured metric namespacing for better organization in Neptune +- **Memory Efficient**: Clears stored tensors after each logging step +- **Error Handling**: Robust error handling with informative warnings ## Installation @@ -32,184 +27,154 @@ from neptune_scale import Run from TorchWatcher import TorchWatcher # Initialize Neptune run -run = Run( - experiment_name="my-experiment", -) +run = Run(experiment_name="my-experiment") -# Create your model +# Create your PyTorch model model = YourModel() -# Option 1: Track all layers (default) -watcher = TorchWatcher( - model, - run, - tensor_stats=['mean', 'norm'] # Optional: select from predefined statistics -) - -# Option 2: Track specific layer types -layers_to_track = [ - nn.Linear, # Track all linear layers - nn.Conv2d, # Track all convolutional layers - nn.ReLU, # Track all ReLU layers -] - +# Initialize TorchWatcher watcher = TorchWatcher( model, run, - track_layers=layers_to_track, # Specify which layers to track - tensor_stats=['mean', 'norm'] # Optional: select from predefined statistics + track_layers=[nn.Linear, nn.ReLU], # Specify which layer types to track (default is all layers) + tensor_stats=['mean', 'norm'], # Choose which statistics to compute (default = "mean" only) + base_namespace="model_metrics" # Set base namespace for all metrics (default = "debug") ) # Training loop for epoch in range(n_epochs): - for batch in dataloader: + for batch in train_loader: # Forward pass output = model(batch) # Backward pass loss.backward() - # Track metrics + # Track metrics with default namespace watcher.watch(step=current_step) ``` -### Detailed Examples - -#### 1. Using Default Settings +### Controlling Metric Logging -The simplest way to use TorchWatcher is with default settings, which will track all layers in your model: +The `watch()` method provides flexible control over what metrics to track and how to organize them: ```python -# Initialize with defaults -watcher = TorchWatcher( - model, - run +# Track all metrics (default behavior) +watcher.watch(step=current_step) + +# Track specific metrics +watcher.watch( + step=current_step, + track_activations=True, + track_gradients=False, + track_parameters=True ) -# This will: -# - Track all layers in the model -# - Use only the 'mean' statistic (default) -# - Log all metrics (activations, gradients, parameters) +# Use a namespace to organize metrics +watcher.watch( + step=current_step, + namespace="train" # Metrics will be under "train/model_metrics/..." +) ``` -#### 2. Specifying Which Layers to Track +### Namespace Organization -You can choose to track specific layer types in your model: +TorchWatcher provides a hierarchical namespace structure for organizing metrics: +1. **Base Namespace**: Set during initialization ```python -# Track only convolutional and linear layers -watcher = TorchWatcher( - model, - run, - track_layers=[nn.Conv2d, nn.Linear] -) - -# Track only normalization layers watcher = TorchWatcher( model, run, - track_layers=[nn.BatchNorm2d, nn.LayerNorm] -) - -# Track only activation layers -watcher = TorchWatcher( - model, - run, - track_layers=[nn.ReLU, nn.LeakyReLU, nn.GELU] + base_namespace="model_metrics" # All metrics will be under "model_metrics/" ) ``` -#### 3. Selecting Statistics to Capture - -You can specify which statistics to compute for each layer: - +2. **Per-Call Namespace**: Prefix for specific tracking calls ```python -# Track multiple statistics -watcher = TorchWatcher( - model, - run, - tensor_stats=['mean', 'std', 'norm', 'min', 'max'] -) +# During training +watcher.watch(step=step, namespace="train") # Metrics under "train/model_metrics/" -# Track only basic statistics -watcher = TorchWatcher( - model, - run, - tensor_stats=['mean', 'std'] -) +# During validation +watcher.watch(step=step, namespace="validation") # Metrics under "validation/model_metrics/" +``` -# Track only norm-based statistics -watcher = TorchWatcher( - model, - run, - tensor_stats=['norm', 'abs_mean'] -) +3. **Metric Structure**: Metrics are organized as: +``` +{namespace}/{base_namespace}/{metric_type}/{layer_name}_{statistic} ``` -#### 4. Controlling Metric Logging +Example metric names: +- `train/model_metrics/activation/fc1_mean` +- `validation/model_metrics/gradient/fc2_norm` +- `train/model_metrics/parameters/fc1_weight_mean` -The `watch()` method allows you to specify which metrics to log: +### Example Use Cases +1. **Training with Full Tracking**: ```python -# Log all metrics (default) -watcher.watch(step=current_step) - -# Log only activation metrics -watcher.watch(step=current_step, log=["activations"]) - -# Log only gradient metrics -watcher.watch(step=current_step, log=["gradients"]) +# Track everything during initial training +watcher.watch(step=step, namespace="train") +``` -# Log only parameter metrics -watcher.watch(step=current_step, log=["parameters"]) +2. **Validation with Limited Tracking**: +```python +# Track only activations during validation +watcher.watch( + step=step, + track_activations=True, + track_gradients=False, + track_parameters=False, + namespace="validation" +) +``` -# Log specific combinations -watcher.watch(step=current_step, log=["activations", "gradients"]) +3. **Efficient Training**: +```python +# Track only gradients during later training phases +watcher.watch( + step=step, + track_activations=False, + track_parameters=False, + track_gradients=True, + namespace="train" +) ``` -### Layer Types - -You can track any of these PyTorch layer types: -- `nn.Linear` -- `nn.Conv1d`, `nn.Conv2d`, `nn.Conv3d` -- `nn.ConvTranspose1d`, `nn.ConvTranspose2d`, `nn.ConvTranspose3d` -- `nn.BatchNorm1d`, `nn.BatchNorm2d`, `nn.BatchNorm3d` -- `nn.LayerNorm` -- `nn.InstanceNorm1d`, `nn.InstanceNorm2d`, `nn.InstanceNorm3d` -- `nn.GroupNorm` -- `nn.ReLU`, `nn.LeakyReLU`, `nn.PReLU`, `nn.RReLU`, `nn.ELU`, `nn.SELU`, `nn.CELU`, `nn.GELU` -- `nn.Sigmoid` -- `nn.Tanh` -- `nn.Dropout`, `nn.Dropout2d`, `nn.Dropout3d` -- `nn.MaxPool1d`, `nn.MaxPool2d`, `nn.MaxPool3d` -- `nn.AvgPool1d`, `nn.AvgPool2d`, `nn.AvgPool3d` -- `nn.AdaptiveMaxPool1d`, `nn.AdaptiveMaxPool2d`, `nn.AdaptiveMaxPool3d` -- `nn.AdaptiveAvgPool1d`, `nn.AdaptiveAvgPool2d`, `nn.AdaptiveAvgPool3d` -- `nn.LSTM`, `nn.GRU`, `nn.RNN` -- `nn.Embedding` -- `nn.TransformerEncoderLayer`, `nn.TransformerDecoderLayer` - -### Available Statistics - -The following statistics are available for tracking: - -1. Basic Statistics: - - `mean`: Mean value - - `std`: Standard deviation - - `norm`: L2 norm - - `min`: Minimum value - - `max`: Maximum value - - `var`: Variance - - `abs_mean`: Mean of absolute values - -### Example - -See `torch_watcher_example.py` for a complete example showing: -- How to track all layers (default behavior) -- How to specify which layers to track -- How to select statistics to monitor -- Integration with a training loop -- Logging metrics to Neptune +## Supported Layer Types + +TorchWatcher supports tracking of all common PyTorch layer types, including: +- Linear layers +- Convolutional layers +- Recurrent layers +- Normalization layers +- Activation layers +- Pooling layers +- Dropout layers +- Embedding layers +- Transformer layers +- Attention layers +- And more... + +## Available Statistics + +Predefined tensor statistics include: +- `mean`: Mean value +- `std`: Standard deviation +- `norm`: L2 norm +- `min`: Minimum value +- `max`: Maximum value +- `var`: Variance +- `abs_mean`: Mean of absolute values + +## Example + +See `torch_watcher_example.py` for a complete example demonstrating: +- Model definition +- Data generation +- Training loop with different tracking configurations +- Namespace organization +- Integration with Neptune ## License diff --git a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py index f1a52f8..a173d4e 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py +++ b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py @@ -1,6 +1,6 @@ -import numpy as np import torch import torch.nn as nn +import numpy as np from neptune_scale import Run from TorchWatcher import TorchWatcher @@ -34,7 +34,7 @@ def generate_data(n_samples=1000): def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, batch_size=32): - """Training function that can be used with any watcher configuration.""" + """Training function demonstrating different ways to use TorchWatcher.""" optimizer = torch.optim.Adam(model.parameters(), lr=0.01) criterion = nn.MSELoss() n_batches = len(X_train) // batch_size @@ -60,8 +60,17 @@ def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, bat loss.backward() optimizer.step() - # Track metrics after the forward and backward passes - watcher.watch(step=epoch * n_batches + i) + # Track metrics during training + if epoch < 5: # First 5 epochs: track everything + watcher.watch(step=epoch * n_batches + i, namespace="train") + else: # Later epochs: track only gradients for efficiency + watcher.watch( + step=epoch * n_batches + i, + track_activations=False, + track_parameters=False, + track_gradients=True, + namespace="train" + ) train_loss += loss.item() @@ -73,18 +82,26 @@ def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, bat with torch.no_grad(): val_output = model(X_val) val_loss = criterion(val_output, y_val) + + # Track metrics during validation + watcher.watch( + step=epoch, + track_activations=True, + track_gradients=False, + track_parameters=False, + namespace="validation" + ) # Log metrics - watcher.run.log_metrics( - data={"train/loss": train_loss, "val/loss": val_loss.item()}, step=epoch - ) + watcher.run.log_metrics(data={ + "train/loss": train_loss, + "val/loss": val_loss.item() + }, step=epoch) if (epoch + 1) % 10 == 0: - print( - f"Epoch [{epoch+1}/{n_epochs}], " - f"Train Loss: {train_loss:.4f}, " - f"Val Loss: {val_loss.item():.4f}" - ) + print(f"Epoch [{epoch+1}/{n_epochs}], " + f"Train Loss: {train_loss:.4f}, " + f"Val Loss: {val_loss.item():.4f}") def main(): @@ -97,29 +114,29 @@ def main(): X_train, y_train = generate_data(n_samples=1000) X_val, y_val = generate_data(n_samples=200) - # Example 1: Track all layers in the model - print("\nTraining with all layers tracked:") - model1 = SimpleNet() - watcher1 = TorchWatcher( - model1, + # Create model and watcher + model = SimpleNet() + + # Initialize watcher with specific layer types to track + watcher = TorchWatcher( + model, run, - tensor_stats=["mean", "norm"], # track_layers defaults to None, tracking all layers - ) - train_model(model1, X_train, y_train, X_val, y_val, watcher1) - watcher1.run.close() - - # Example 2: Track specific layer types - print("\nTraining with only Linear and ReLU layers tracked:") - model2 = SimpleNet() - run2 = Run(experiment_name="torch-watcher-example-specific") - watcher2 = TorchWatcher( - model2, - run2, - track_layers=[nn.Linear, nn.ReLU], # Only track Linear and ReLU layers - tensor_stats=["mean", "norm"], + track_layers=[nn.Linear, nn.ReLU], # Track only Linear and ReLU layers + tensor_stats=['mean', 'norm'], # Track mean and norm statistics + base_namespace="model_metrics" # Default namespace for all metrics ) - train_model(model2, X_train, y_train, X_val, y_val, watcher2) - watcher2.run.close() + + # Train the model + print("\nTraining with TorchWatcher:") + print("- Tracking Linear and ReLU layers") + print("- Computing mean and norm statistics") + print("- Using 'model_metrics' as base namespace") + print("- Full tracking during first 5 epochs with 'train/model_metrics' namespace") + print("- Gradient-only tracking during later epochs with 'train/model_metrics' namespace") + print("- Activation-only tracking during validation with 'validation/model_metrics' namespace") + + train_model(model, X_train, y_train, X_val, y_val, watcher) + watcher.run.close() if __name__ == "__main__": From 50b0a6d90742885cec4eab15a02946223ea74e09 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 1 Apr 2025 14:55:19 +0200 Subject: [PATCH 086/125] chore: pre-commit hooks changes --- .../pytorch/notebooks/README.md | 4 +-- .../pytorch/notebooks/TorchWatcher.py | 12 ++++--- .../notebooks/torch_watcher_example.py | 31 ++++++++++--------- 3 files changed, 26 insertions(+), 21 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/README.md b/integrations-and-supported-tools/pytorch/notebooks/README.md index aa383a7..67e6bd8 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/README.md +++ b/integrations-and-supported-tools/pytorch/notebooks/README.md @@ -46,10 +46,10 @@ for epoch in range(n_epochs): for batch in train_loader: # Forward pass output = model(batch) - + # Backward pass loss.backward() - + # Track metrics with default namespace watcher.watch(step=current_step) ``` diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py index 5ba6c87..6693859 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py +++ b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py @@ -278,7 +278,9 @@ def _safe_tensor_stats(self, tensor: torch.Tensor) -> Dict[str, float]: warnings.warn(f"Could not compute {stat_name} statistic: {e}") return stats - def _track_metric(self, metric_type: str, data: Dict[str, torch.Tensor], namespace: Optional[str] = None): + def _track_metric( + self, metric_type: str, data: Dict[str, torch.Tensor], namespace: Optional[str] = None + ): """Track metrics with enhanced statistics for a given metric type. Args: @@ -288,12 +290,14 @@ def _track_metric(self, metric_type: str, data: Dict[str, torch.Tensor], namespa """ # Construct the full namespace full_namespace = f"{namespace}/{self.base_namespace}" if namespace else self.base_namespace - + for layer, tensor in data.items(): if tensor is not None: stats = self._safe_tensor_stats(tensor) for stat_name, stat_value in stats.items(): - self.debug_metrics[f"{full_namespace}/{metric_type}/{layer}_{stat_name}"] = stat_value + self.debug_metrics[f"{full_namespace}/{metric_type}/{layer}_{stat_name}"] = ( + stat_value + ) def track_activations(self, namespace: Optional[str] = None): """Track layer activations with enhanced statistics.""" @@ -318,7 +322,7 @@ def watch( self, step: Union[int, float], track_gradients: bool = True, - track_parameters: bool = True, + track_parameters: bool = True, track_activations: bool = True, namespace: Optional[str] = None, ): diff --git a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py index a173d4e..1addce3 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py +++ b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py @@ -1,6 +1,6 @@ +import numpy as np import torch import torch.nn as nn -import numpy as np from neptune_scale import Run from TorchWatcher import TorchWatcher @@ -69,7 +69,7 @@ def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, bat track_activations=False, track_parameters=False, track_gradients=True, - namespace="train" + namespace="train", ) train_loss += loss.item() @@ -82,26 +82,27 @@ def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, bat with torch.no_grad(): val_output = model(X_val) val_loss = criterion(val_output, y_val) - + # Track metrics during validation watcher.watch( step=epoch, track_activations=True, track_gradients=False, track_parameters=False, - namespace="validation" + namespace="validation", ) # Log metrics - watcher.run.log_metrics(data={ - "train/loss": train_loss, - "val/loss": val_loss.item() - }, step=epoch) + watcher.run.log_metrics( + data={"train/loss": train_loss, "val/loss": val_loss.item()}, step=epoch + ) if (epoch + 1) % 10 == 0: - print(f"Epoch [{epoch+1}/{n_epochs}], " - f"Train Loss: {train_loss:.4f}, " - f"Val Loss: {val_loss.item():.4f}") + print( + f"Epoch [{epoch+1}/{n_epochs}], " + f"Train Loss: {train_loss:.4f}, " + f"Val Loss: {val_loss.item():.4f}" + ) def main(): @@ -116,14 +117,14 @@ def main(): # Create model and watcher model = SimpleNet() - + # Initialize watcher with specific layer types to track watcher = TorchWatcher( model, run, track_layers=[nn.Linear, nn.ReLU], # Track only Linear and ReLU layers - tensor_stats=['mean', 'norm'], # Track mean and norm statistics - base_namespace="model_metrics" # Default namespace for all metrics + tensor_stats=["mean", "norm"], # Track mean and norm statistics + base_namespace="model_metrics", # Default namespace for all metrics ) # Train the model @@ -134,7 +135,7 @@ def main(): print("- Full tracking during first 5 epochs with 'train/model_metrics' namespace") print("- Gradient-only tracking during later epochs with 'train/model_metrics' namespace") print("- Activation-only tracking during validation with 'validation/model_metrics' namespace") - + train_model(model, X_train, y_train, X_val, y_val, watcher) watcher.run.close() From e67e1f6fdbfd59b1929acb030d360758c96c9f91 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 1 Apr 2025 15:39:25 +0200 Subject: [PATCH 087/125] refactor: update example to use the improved TorchWatcher package --- .../pytorch_text_model_debugging.ipynb | 110 ++++++++++++++---- 1 file changed, 87 insertions(+), 23 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 073217c..8118620 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -102,9 +102,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], "source": [ "# Import libraries\n", "import torch\n", @@ -116,9 +125,7 @@ "from datasets import load_dataset\n", "\n", "from neptune_scale import Run\n", - "import os\n", - "\n", - "from typing import Literal" + "import os" ] }, { @@ -130,7 +137,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -164,9 +171,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training samples: 81926 \n", + "Validation samples: 935\n" + ] + } + ], "source": [ "# For the example, download a random subset of 10% of the original dataset\n", "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", @@ -196,9 +212,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vocabulary size: 128257\n" + ] + } + ], "source": [ "train_subset = data_subset[\"train\"].with_format(\n", " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", @@ -233,7 +257,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -309,7 +333,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -332,9 +356,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment\n" + ] + } + ], "source": [ "from neptune_scale import Run\n", "from uuid import uuid4\n", @@ -376,13 +408,46 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 1 / 10241, Loss: 11.765020370483398\n", + "Step 2 / 10241, Loss: 11.628341674804688\n", + "Step 3 / 10241, Loss: 9.686809539794922\n", + "Step 4 / 10241, Loss: 9.657011032104492\n", + "Step 5 / 10241, Loss: 9.74715518951416\n", + "Step 6 / 10241, Loss: 10.450525283813477\n", + "Step 7 / 10241, Loss: 10.163161277770996\n", + "Step 8 / 10241, Loss: 10.278069496154785\n", + "Step 9 / 10241, Loss: 10.855117797851562\n", + "Step 10 / 10241, Loss: 10.982325553894043\n", + "Step 11 / 10241, Loss: 10.332056999206543\n", + "Step 12 / 10241, Loss: 10.331168174743652\n", + "Step 13 / 10241, Loss: 10.291738510131836\n", + "Step 14 / 10241, Loss: 9.950544357299805\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[16], line 36\u001b[0m\n\u001b[0;32m 33\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n\u001b[0;32m 35\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[1;32m---> 36\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 37\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n\u001b[0;32m 39\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\_tensor.py:626\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 616\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 617\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 618\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 619\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 624\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 625\u001b[0m )\n\u001b[1;32m--> 626\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 628\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\__init__.py:347\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 342\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 344\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 345\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 346\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 347\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 348\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 349\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 350\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 351\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 352\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 353\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 354\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 355\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\graph.py:823\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[1;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[0;32m 821\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[0;32m 822\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 823\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 824\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[0;32m 826\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 827\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\function.py:292\u001b[0m, in \u001b[0;36mBackwardCFunction.apply\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mBackwardCFunction\u001b[39;00m(_C\u001b[38;5;241m.\u001b[39m_FunctionBase, FunctionCtx, _HookMixin):\n\u001b[0;32m 288\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 289\u001b[0m \u001b[38;5;124;03m This class is used for internal autograd work. Do not use.\u001b[39;00m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 292\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs):\n\u001b[0;32m 293\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 294\u001b[0m \u001b[38;5;124;03m Apply method used when executing this Node during the backward\u001b[39;00m\n\u001b[0;32m 295\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 296\u001b[0m \u001b[38;5;66;03m# _forward_cls is defined by derived class\u001b[39;00m\n\u001b[0;32m 297\u001b[0m \u001b[38;5;66;03m# The user should define either backward or vjp but never both.\u001b[39;00m\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ - "# Training setup\n", - "debug_metrics = {}\n", - "\n", "# Initialize model and optimizer\n", "model = SimpleLLM(\n", " params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"], params[\"num_lstm_layers\"]\n", @@ -393,7 +458,7 @@ ") # Ignore the buffering index of -100 in the dataset\n", "\n", "# Define watcher class\n", - "watcher = TorchWatcher(model=model, run=run, tensor_stats=[\"mean\", \"norm\"])\n", + "watcher = TorchWatcher(model=model, run=run, tensor_stats=[\"mean\", \"norm\"], base_namespace=\"debug_metrics\")\n", "\n", "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", "model.to(device)\n", @@ -425,7 +490,7 @@ " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", "\n", " # Call watch() method in loop determing which layer-wise metrics to watch\n", - " watcher.watch(step=step_counter)\n", + " watcher.watch(step=step_counter, track_activations=False, track_parameters=False)\n", "\n", " if step_counter % 50 == 0: # Log validation loss at every 50 steps\n", " val_loss = evaluate(model, val_dataloader, criterion, device, vocab_size)\n", @@ -435,13 +500,12 @@ " \"metrics/train/loss\": loss.item(),\n", " \"metrics/validation/loss\": val_loss,\n", " \"epoch/value\": epoch,\n", - " **debug_metrics,\n", " },\n", " step=step_counter,\n", " )\n", " else: # Log training loss and debugging metrics for each step\n", " run.log_metrics(\n", - " data={\"metrics/train/loss\": loss.item(), \"epoch/value\": epoch, **debug_metrics},\n", + " data={\"metrics/train/loss\": loss.item(), \"epoch/value\": epoch},\n", " step=step_counter,\n", " )\n", "\n", From a2d0d5007a868643562554fe568bbf239994bcf1 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 3 Apr 2025 14:11:13 +0200 Subject: [PATCH 088/125] chore: pre-commit cleanup --- .../pytorch_text_model_debugging.ipynb | 100 +++--------------- 1 file changed, 16 insertions(+), 84 deletions(-) diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb index 8118620..1b1b03c 100644 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb @@ -102,18 +102,9 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "c:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], + "outputs": [], "source": [ "# Import libraries\n", "import torch\n", @@ -137,7 +128,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -171,18 +162,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Training samples: 81926 \n", - "Validation samples: 935\n" - ] - } - ], + "outputs": [], "source": [ "# For the example, download a random subset of 10% of the original dataset\n", "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", @@ -212,17 +194,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Vocabulary size: 128257\n" - ] - } - ], + "outputs": [], "source": [ "train_subset = data_subset[\"train\"].with_format(\n", " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", @@ -257,7 +231,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -333,7 +307,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -356,17 +330,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment\n" - ] - } - ], + "outputs": [], "source": [ "from neptune_scale import Run\n", "from uuid import uuid4\n", @@ -408,45 +374,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Step 1 / 10241, Loss: 11.765020370483398\n", - "Step 2 / 10241, Loss: 11.628341674804688\n", - "Step 3 / 10241, Loss: 9.686809539794922\n", - "Step 4 / 10241, Loss: 9.657011032104492\n", - "Step 5 / 10241, Loss: 9.74715518951416\n", - "Step 6 / 10241, Loss: 10.450525283813477\n", - "Step 7 / 10241, Loss: 10.163161277770996\n", - "Step 8 / 10241, Loss: 10.278069496154785\n", - "Step 9 / 10241, Loss: 10.855117797851562\n", - "Step 10 / 10241, Loss: 10.982325553894043\n", - "Step 11 / 10241, Loss: 10.332056999206543\n", - "Step 12 / 10241, Loss: 10.331168174743652\n", - "Step 13 / 10241, Loss: 10.291738510131836\n", - "Step 14 / 10241, Loss: 9.950544357299805\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[16], line 36\u001b[0m\n\u001b[0;32m 33\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n\u001b[0;32m 35\u001b[0m \u001b[38;5;66;03m# Backward pass and optimization\u001b[39;00m\n\u001b[1;32m---> 36\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 37\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n\u001b[0;32m 39\u001b[0m total_loss \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m loss\u001b[38;5;241m.\u001b[39mitem()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\_tensor.py:626\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 616\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 617\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 618\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 619\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 624\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 625\u001b[0m )\n\u001b[1;32m--> 626\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 628\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\__init__.py:347\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 342\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 344\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 345\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 346\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 347\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 348\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 349\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 350\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 351\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 352\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 353\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 354\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 355\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\graph.py:823\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[1;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[0;32m 821\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[0;32m 822\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 823\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 824\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[0;32m 826\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 827\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\function.py:292\u001b[0m, in \u001b[0;36mBackwardCFunction.apply\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mBackwardCFunction\u001b[39;00m(_C\u001b[38;5;241m.\u001b[39m_FunctionBase, FunctionCtx, _HookMixin):\n\u001b[0;32m 288\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 289\u001b[0m \u001b[38;5;124;03m This class is used for internal autograd work. Do not use.\u001b[39;00m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 292\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs):\n\u001b[0;32m 293\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 294\u001b[0m \u001b[38;5;124;03m Apply method used when executing this Node during the backward\u001b[39;00m\n\u001b[0;32m 295\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 296\u001b[0m \u001b[38;5;66;03m# _forward_cls is defined by derived class\u001b[39;00m\n\u001b[0;32m 297\u001b[0m \u001b[38;5;66;03m# The user should define either backward or vjp but never both.\u001b[39;00m\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ "# Initialize model and optimizer\n", "model = SimpleLLM(\n", @@ -458,7 +388,9 @@ ") # Ignore the buffering index of -100 in the dataset\n", "\n", "# Define watcher class\n", - "watcher = TorchWatcher(model=model, run=run, tensor_stats=[\"mean\", \"norm\"], base_namespace=\"debug_metrics\")\n", + "watcher = TorchWatcher(\n", + " model=model, run=run, tensor_stats=[\"mean\", \"norm\"], base_namespace=\"debug_metrics\"\n", + ")\n", "\n", "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", "model.to(device)\n", From fcf36577e68bac1062cb8bbf1319926231a5045a Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 8 Apr 2025 09:41:15 +0200 Subject: [PATCH 089/125] feat: notebook tutorial on debugging training runs with Neptune --- .../debug_training_runs.ipynb | 401 ++++++++++++++++++ .../debug-model-training-runs/setup.py | 226 ++++++++++ .../tutorial-images/debugging_dashboard.png | Bin 0 -> 151345 bytes .../debugging_regex_search.png | Bin 0 -> 102276 bytes .../tutorial-images/debugging_report.png | Bin 0 -> 124967 bytes 5 files changed, 627 insertions(+) create mode 100644 how-to-guides/debug-model-training-runs/debug_training_runs.ipynb create mode 100644 how-to-guides/debug-model-training-runs/setup.py create mode 100644 how-to-guides/debug-model-training-runs/tutorial-images/debugging_dashboard.png create mode 100644 how-to-guides/debug-model-training-runs/tutorial-images/debugging_regex_search.png create mode 100644 how-to-guides/debug-model-training-runs/tutorial-images/debugging_report.png diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb new file mode 100644 index 0000000..337b0ac --- /dev/null +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -0,0 +1,401 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Debug Model Training with Neptune\n", + "\n", + " \n", + " \"Open \n", + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Introduction\n", + "Training large models requires careful monitoring of layer-wise metrics to catch issues early. \n", + "\n", + "Neptune makes it easy to track and visualize metrics like gradient norms across all layers of your model - helping you identify problems like vanishing/exploding gradients quickly.\n", + "\n", + "In this tutorial, you'll learn how to:\n", + "1. **Initialize Neptune** and **log configuration parameters**\n", + "2. Track **layer-wise gradient norms** during training \n", + "3. Analyze the metrics in Neptune's UI to **debug training issues**\n", + "\n", + "Step through a pre-configured report [here](https://scale.neptune.ai/leo/pytorch-tutorial/reports/9e79d952-272a-4a38-83e5-27df4dd225ec) to see a finalized version.\n", + "\n", + "_Note: This is a code recipe that you can adapt for your own model training needs._\n", + "\n", + " ![Layer-wise gradient norms visualization in Neptune](tutorial-images/debugging_report.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Before you start\n", + "\n", + " 1. Create a Neptune Scale account. [Register →](https://neptune.ai/early-access)\n", + " 2. Create a Neptune project for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune Scale docs.\n", + " 3. Install and configure Neptune Scale for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune Scale docs." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set environment variables\n", + "Set your project name and API token as environment variables to log to your Neptune Scale project.\n", + "\n", + "Uncomment the code block below and replace placeholder values with your own credentials:" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "# Set Neptune credentials as environment variables\n", + "# %env NEPTUNE_API_TOKEN = YOUR_API_TOKEN\n", + "# %env NEPTUNE_PROJECT = WORKSPACE_NAME/PROJECT_NAME" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Install dependencies and import libraries" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "# Install dependencies\n", + "! pip install -qU neptune_scale torch datasets\n", + "\n", + "import torch" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Initialize parameters" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "params = {\n", + " \"optimizer\": \"Adam\",\n", + " \"batch_size\": 8,\n", + " \"learning_rate\": 0.01,\n", + " \"epochs\": 5,\n", + " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", + " \"input_features\": 256,\n", + " \"embed_size\": 1000,\n", + " \"hidden_size\": 256, # hidden size for the LSTM\n", + " \"dropout_prob\": 0.3,\n", + " \"num_lstm_layers\": 3,\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup data, model and other PyTorch required functions\n", + "\n", + "The `setup.py` script wraps the data and model creation for use in this tutorial. You can use your own data and model setup if required. " + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training samples: 81926\n", + "Validation samples: 935\n", + "Vocabulary size: 128257\n", + "Model created: MultilayerModel\n", + "Optimizer: Adam\n", + "Criterion: CrossEntropyLoss\n" + ] + } + ], + "source": [ + "from setup import setup_training\n", + " \n", + "# Setup complete training environment\n", + "model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size = setup_training(params, use_multilayer=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Debug model training run with Neptune" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Step 1: _Initialize Neptune Run object_\n", + "\n", + "The `Run` object is used to log configuration parameters and metrics. " + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment\n" + ] + } + ], + "source": [ + "from neptune_scale import Run\n", + "\n", + "run = Run(\n", + " experiment_name=\"pytorch-text\", # Create a run that is the head of an experiment. This is also used for forking.\n", + ")\n", + "\n", + "print(run.get_experiment_url())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Step 2: _Log configuration parameters and tags_" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "See configuration parameters:\n", + "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment&detailsTab=metadata\n" + ] + } + ], + "source": [ + "run.log_configs(\n", + " {\n", + " \"config/learning_rate\": params[\"learning_rate\"],\n", + " \"config/optimizer\": params[\"optimizer\"],\n", + " \"config/batch_size\": params[\"batch_size\"],\n", + " \"config/epochs\": params[\"epochs\"],\n", + " \"config/num_lstm_layers\": params[\"num_lstm_layers\"],\n", + " \"data/embed_size\": params[\"embed_size\"],\n", + " }\n", + ")\n", + "\n", + "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\", params[\"optimizer\"]])\n", + "\n", + "print(f\"See configuration parameters:\\n{run.get_experiment_url() + '&detailsTab=metadata'}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Step 3: _Execute model training loop_\n", + "\n", + "In this training loop, we:\n", + "1. Register backward hooks to capture gradient norms from all model layers with `register_full_backward_hook()`\n", + "2. Track these norms during training to identify potential issues like vanishing/exploding gradients in a dictionary called `debugging_gradient_norms`\n", + "3. Log the gradient norms to Neptune for visualization and analysis using the `log_metrics` method\n", + "\n", + "This approach allows you to monitor the learning dynamics across your entire model architecture in near real-time." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "View charts in near real-time:\n", + "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment&detailsTab=charts\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2025-04-08 09:18:43,794 neptune:ERROR: \n", + "\n", + "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", + "\n", + "This can be caused by either:\n", + "- The step of a series value is smaller than the most recently logged step for this series\n", + "- the step is exactly the same but the value is different\n", + "\n", + "For help, see https://docs-beta.neptune.ai/log_metrics\n", + "\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[32], line 29\u001b[0m\n\u001b[0;32m 27\u001b[0m logits \u001b[38;5;241m=\u001b[39m model(input_ids)\n\u001b[0;32m 28\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n\u001b[1;32m---> 29\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 30\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n\u001b[0;32m 32\u001b[0m \u001b[38;5;66;03m# print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\u001b[39;00m\n\u001b[0;32m 33\u001b[0m \n\u001b[0;32m 34\u001b[0m \u001b[38;5;66;03m# Log global training loss\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\_tensor.py:626\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 616\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 617\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 618\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 619\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 624\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 625\u001b[0m )\n\u001b[1;32m--> 626\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 628\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\__init__.py:347\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 342\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 344\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 345\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 346\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 347\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 348\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 349\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 350\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 351\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 352\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 353\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 354\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 355\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\graph.py:823\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[1;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[0;32m 821\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[0;32m 822\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 823\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 824\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[0;32m 826\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 827\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\function.py:292\u001b[0m, in \u001b[0;36mBackwardCFunction.apply\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mBackwardCFunction\u001b[39;00m(_C\u001b[38;5;241m.\u001b[39m_FunctionBase, FunctionCtx, _HookMixin):\n\u001b[0;32m 288\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 289\u001b[0m \u001b[38;5;124;03m This class is used for internal autograd work. Do not use.\u001b[39;00m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 292\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs):\n\u001b[0;32m 293\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 294\u001b[0m \u001b[38;5;124;03m Apply method used when executing this Node during the backward\u001b[39;00m\n\u001b[0;32m 295\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 296\u001b[0m \u001b[38;5;66;03m# _forward_cls is defined by derived class\u001b[39;00m\n\u001b[0;32m 297\u001b[0m \u001b[38;5;66;03m# The user should define either backward or vjp but never both.\u001b[39;00m\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2025-04-08 09:18:49,998 neptune:ERROR: \n", + "\n", + "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", + "\n", + "This can be caused by either:\n", + "- The step of a series value is smaller than the most recently logged step for this series\n", + "- the step is exactly the same but the value is different\n", + "\n", + "For help, see https://docs-beta.neptune.ai/log_metrics\n", + "\n", + "2025-04-08 09:18:50,001 neptune:ERROR: \n", + "\n", + "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", + "\n", + "This can be caused by either:\n", + "- The step of a series value is smaller than the most recently logged step for this series\n", + "- the step is exactly the same but the value is different\n", + "\n", + "For help, see https://docs-beta.neptune.ai/log_metrics\n", + "\n" + ] + } + ], + "source": [ + "# Register hooks to track gradients for each layer\n", + "def hook_fn(module, grad_input, grad_output):\n", + " layer_name = next(name for name, mod in model.named_modules() if mod is module)\n", + " if grad_input[0] is not None: # Check if gradients exist\n", + " grad_norm = grad_input[0].norm().item()\n", + " debugging_gradient_norms[f\"debug/gradient/{layer_name}/norm\"] = grad_norm\n", + "\n", + "# Define dictionary of metrics to log to Neptune\n", + "debugging_gradient_norms = {}\n", + "# Register hooks once before training\n", + "for name, module in model.named_modules():\n", + " module.register_full_backward_hook(hook_fn)\n", + "\n", + "# Create custom Neptune URLS for tutorial steps\n", + "print(f\"View charts in near real-time:\\n{run.get_experiment_url() + '&detailsTab=charts'}\")\n", + "\n", + "step_counter = 0\n", + "# Training loop\n", + "for epoch in range(params[\"epochs\"]):\n", + " total_loss = 0\n", + " for batch in train_dataloader:\n", + " model.train()\n", + " step_counter += 1\n", + " input_ids = batch[\"input_ids\"].to(params[\"device\"])\n", + " labels = batch[\"labels\"].to(params[\"device\"])\n", + " optimizer.zero_grad()\n", + " logits = model(input_ids)\n", + " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", + " loss.backward()\n", + " optimizer.step()\n", + "\n", + " # Log global training loss and layer-wise gradient norms\n", + " run.log_metrics(\n", + " data={\n", + " \"metrics/train/loss\": loss.item(), \n", + " **debugging_gradient_norms\n", + " },\n", + " step=step_counter,\n", + " )\n", + "\n", + "# Close run to ensure all operations are processed\n", + "run.close()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Step 4: _Analyze and debug training_\n", + "While the model is training, you can start using the Neptune web app to browse your metrics and create custom analyses and visualizations:\n", + "1. To visualize the large number of metrics being logged in near real time, navigate to the **Charts** tab of the active run (_or select link above_).\n", + "2. Filter the metrics using the [advanced regex searching capabilities](https://docs-beta.neptune.ai/charts#filtering-charts). For example, enter `gradient & fc & layers.[0-5] & norm` in the search bar. This query filters the metrics for the first 6 layers of the gradients norms of the fully connected layers. You can specify down to exactly the metrics name you want.\n", + "\n", + "![Alt text](tutorial-images/debugging_regex_search.png)\n", + "\n", + "3. Export the filter to a [dashboard](https://docs-beta.neptune.ai/custom_dashboard). The saved dashboard will now only display these metrics during training. This is useful if you know that a certain set of layers can be troublesome during training. \n", + "4. Alternatively, use the [dynamic metric selection](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) and create a new chart widget to display all LSTM layers gradient norms in one chart. Again, use the `(.*gradient)(.*lstm)(.*norm)` query. This makes it easy to have an automatically updating chart that allows you to view all layers on a single chart for rapid debugging in case vanishing or exploding gradients appear. \n", + "\n", + "![Alt text](tutorial-images/debugging_dashboard.png)\n", + "\n", + "5. To document this behavior, create a [custom report](https://docs-beta.neptune.ai/reports) to outline the model training, global metrics, debugging metrics for the model you're training. This allows you to keep track of any anomalies but also to see what worked or did not work during training.\n", + "\n", + "![Alt text](tutorial-images/debugging_report.png)\n", + "\n", + "See the pre-configured [example of the training report](https://scale.neptune.ai/leo/pytorch-tutorial/reports/9e79d952-272a-4a38-83e5-27df4dd225ec).\n", + "\n", + "See also: PyTorch layer-wise tracking package [here](TODO:Link to integration for tracking layer-wise metrics)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "neptune_scale_py_312_base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/how-to-guides/debug-model-training-runs/setup.py b/how-to-guides/debug-model-training-runs/setup.py new file mode 100644 index 0000000..c66a431 --- /dev/null +++ b/how-to-guides/debug-model-training-runs/setup.py @@ -0,0 +1,226 @@ +import torch +import torch.nn as nn +import torch.optim as optim +from torch.utils.data import DataLoader +from datasets import load_dataset +from typing import Dict, Tuple + +# Model Classes +class SimpleLLM(nn.Module): + """A simple language model with a single LSTM layer.""" + def __init__(self, vocab_size, embed_size, hidden_size, num_layers): + super(SimpleLLM, self).__init__() + self.embedding = nn.Embedding(vocab_size, embed_size) + self.lstm = nn.LSTM(embed_size, hidden_size, num_layers=num_layers, batch_first=True) + self.fc1 = nn.Linear(hidden_size, vocab_size) + + def forward(self, x): + x = self.embedding(x) + lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple + out = self.fc1(lstm_out) # Use the last output from the LSTM + return out + +class MultilayerModel(nn.Module): + """A larger language model with multiple LSTM and fully connected layers.""" + def __init__(self, vocab_size, embed_size, hidden_size, num_layers): + super(MultilayerModel, self).__init__() + self.embedding = nn.Embedding(vocab_size, embed_size) + + # Create multiple LSTM layers + self.lstm_layers = nn.ModuleList([ + nn.LSTM(hidden_size if i > 0 else embed_size, + hidden_size, + num_layers=1, + batch_first=True) + for i in range(10) # 10 LSTM layers + ]) + + # Create multiple fully connected layers + self.fc_layers = nn.ModuleList([ + nn.Linear(hidden_size, hidden_size) + for _ in range(9) # 9 FC layers + ]) + + # Final layer to project back to vocab size + self.final_layer = nn.Linear(hidden_size, vocab_size) + + # Add dropout for regularization + self.dropout = nn.Dropout(0.1) + + def forward(self, x): + # Embedding layer + x = self.embedding(x) + + # Process through LSTM layers + for lstm in self.lstm_layers: + x, _ = lstm(x) + x = self.dropout(x) + + # Process through FC layers + for fc in self.fc_layers: + x = fc(x) + x = self.dropout(x) + x = torch.relu(x) + + # Final projection + out = self.final_layer(x) + return out + +# Data Loading Function +def load_and_preprocess_data(params: Dict) -> Tuple[DataLoader, DataLoader, int]: + """ + Load and preprocess the next token prediction dataset from HuggingFace. + + Args: + params (Dict): Dictionary containing parameters for data loading and model configuration + + Returns: + Tuple[DataLoader, DataLoader, int]: Training dataloader, validation dataloader, and vocabulary size + """ + # Download dataset + base_url = "https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/" + data_files = { + "train": base_url + "train-00001-of-00067.parquet", + "validation": base_url + "validation-00000-of-00001.parquet", + } + + # Load dataset + data_subset = load_dataset("parquet", data_files=data_files, num_proc=4) + validation_subset = data_subset.get("validation").train_test_split(test_size=0.1) + + print(f"Training samples: {data_subset['train'].num_rows}") + print(f"Validation samples: {validation_subset['test'].num_rows}") + + # Convert to PyTorch format + train_subset = data_subset["train"].with_format( + type="torch", columns=["text", "input_ids", "labels"] + ) + validation_subset = validation_subset["test"].with_format( + type="torch", columns=["text", "input_ids", "labels"] + ) + + # Create dataloaders + train_dataloader = DataLoader(train_subset, batch_size=params["batch_size"], shuffle=True) + val_dataloader = DataLoader(validation_subset, batch_size=params["batch_size"], shuffle=True) + + # Calculate vocabulary size + vocab_size = max([token for sentence in data_subset["train"]["input_ids"] for token in sentence]) + 1 + print(f"Vocabulary size: {vocab_size}") + + return train_dataloader, val_dataloader, vocab_size + +# Helper function to create model +def create_model(params: Dict, use_multilayer: bool = True) -> nn.Module: + """ + Create a model based on the specified parameters. + + Args: + params (Dict): Dictionary containing model parameters + use_multilayer (bool): Whether to use the multilayer model + + Returns: + nn.Module: The created model + """ + if use_multilayer: + return MultilayerModel( + params["vocab_size"], + params["embed_size"], + params["hidden_size"], + params["num_lstm_layers"] + ) + else: + return SimpleLLM( + params["vocab_size"], + params["embed_size"], + params["hidden_size"], + params["num_lstm_layers"] + ) + +# Helper function to create optimizer +def create_optimizer(model: nn.Module, params: Dict) -> optim.Optimizer: + """ + Create an optimizer for the model. + + Args: + model (nn.Module): The model to optimize + params (Dict): Dictionary containing optimizer parameters + + Returns: + optim.Optimizer: The created optimizer + """ + if params["optimizer"] == "Adam": + return optim.Adam(model.parameters(), lr=params["learning_rate"]) + elif params["optimizer"] == "SGD": + return optim.SGD(model.parameters(), lr=params["learning_rate"]) + else: + raise ValueError(f"Unsupported optimizer: {params['optimizer']}") + +# Helper function to create criterion +def create_criterion() -> nn.Module: + """ + Create a loss function for the model. + + Returns: + nn.Module: The created criterion + """ + return nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset + +# Helper function to setup training +def setup_training(params: Dict, use_multilayer: bool = True) -> Tuple[nn.Module, optim.Optimizer, nn.Module, DataLoader, DataLoader, int]: + """ + Setup the complete training environment. + + Args: + params (Dict): Dictionary containing all parameters + use_multilayer (bool): Whether to use the multilayer model + + Returns: + Tuple[nn.Module, optim.Optimizer, nn.Module, DataLoader, DataLoader, int]: + Model, optimizer, criterion, train dataloader, validation dataloader, and vocabulary size + """ + # Load data + train_dataloader, val_dataloader, vocab_size = load_and_preprocess_data(params) + params["vocab_size"] = vocab_size + + # Create model + model = create_model(params, use_multilayer=use_multilayer) + # Move model to device + model.to(params["device"]) + print(f"Model created: {model.__class__.__name__}") + + # Create optimizer + optimizer = create_optimizer(model, params) + print(f"Optimizer: {optimizer.__class__.__name__}") + + # Create criterion + criterion = create_criterion() + print(f"Criterion: {criterion.__class__.__name__}") + + return model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size + +# Explicitly define what should be importable +__all__ = [ + 'SimpleLLM', + 'MultilayerModel', + 'load_and_preprocess_data', + 'create_model', + 'create_optimizer', + 'create_criterion', + 'setup_training' +] + +if __name__ == "__main__": + # Example usage with all required parameters + params = { + "batch_size": 8, + "device": torch.device("cuda" if torch.cuda.is_available() else "cpu"), + "embed_size": 1000, + "hidden_size": 256, + "num_lstm_layers": 3, + "optimizer": "Adam", + "learning_rate": 0.01, + "vocab_size": 50000 # This will be updated by load_and_preprocess_data + } + + # Setup complete training environment + model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size = setup_training(params, use_multilayer=True) \ No newline at end of file diff --git a/how-to-guides/debug-model-training-runs/tutorial-images/debugging_dashboard.png b/how-to-guides/debug-model-training-runs/tutorial-images/debugging_dashboard.png new file mode 100644 index 0000000000000000000000000000000000000000..719ef1fca137cd6f44136e4f69db0579e897ecc8 GIT binary patch literal 151345 zcmce8WmFwa(B=Ra_dsxW*C4?m1cD|I+$BhGcey|a5Ii^p3GVLh5Fog_ySuYD-<#xp z_niH)KlbdLbGb9j?dk67s_L$%o}Qq0@{*`X1V{h?ph~?FR|Ei9Jph0KA;3ZY!!8z5 z0QrKlSCkY3iU)~yAQvDLQ8`fnC<{lv*N25%Big)Cw+8^Uj$a?B9_!o>0PvJ0B`&Ju zqO-r?s`dQhQTP~_pM~rVMS3>fm%A?si8{_BDCGs!ri;4SG2_Y{jErYa{e5!=vrVdu zam`a~G7SkzzMJh6<4PbNB@FS`7@`ic|!BPA@%F*CT|K1blZU<;u93ppF)_Cp=@C4zs1Ti{~uNM0?j~5j|_UEB&R)+ z#fkDy1?3_|pi(q6G`xEe)=KLo4XwXT6d%`9V+L1MRCJU(Z?4S$C<=U8i3z36AE*rg zmzPf0SNT)N2i~#zJ#PzSFaJJ|BLAomr&UPEktoH(1NvKX^7773SW6R^>OM5!;?~`P z$79R}q z~$^91nT|>j;@T;dUzP|k1!Y*)qefdp{1V6UCo+TC12n&;*ZT{@IIom4J z?|?hs8I#;se7%F<&49<4C5={2R`^Bzvwqjk^j*nS>E@QDPri$2cIR5WGvE)??t`Z(5*ujf zc<}(F@m`QsTcGq@`sG{lH?;BS;bjkb;*7!HwatiX?UBWTke2Gq@hF;;5tmjS_O;7E zPt&eQGkqsV?3mwWHpw$9lH5WWq?UML+EO*2Y4=U}O>1d{L{n;(dL7^p$-a^FDjVP5 zUkSE)@MAmcdWpB7ATWJ1eW${vGndz_2$awl9G_`+R8C=ku|0l}EY;Jj&i{!l%IMWv zj+uiJo`jfP{AVcBclkWl@z6+L!twL-3-H{r_73#5R|o1s0q=7_9b+9Zdyhwi(Q$Ex z(-CcW0;Xx#m?|bQn5Ba54$wenY7+v$i^UoWzOdRrJW@%i_iA##g#lLO-;j^xjo|{o z&m^ALTk;2v4-yab7~ZaK9Qy|c4wjBmIyxjN-jA-|guO|c+%CjKM8MNsx2NGN89BZu zvo&_tNI=$2ue{LbO`d-Z*4>eL`T6n;<1$Sf}PsERU5NWZaQsywV z_F1-92cJcH@gXs8z3u4DQCW=-3TKP(#yAor2$}MV=uqulT06&Syt-Cmnz7!2pXtpy zP&@8@C!5srOd;oDg%QBs%3d39Ms=~=eJAU}h%~di2eQIzy0!!%PDx)@d+RNzTM`Zy zcL={&VO-|=NUuYx@Nk^)+hO(+d`r$+`G9tLX=CFs+yoEM($ezs@$GU*;!bdAmP-Q# zvenNCX;C+t;_aS0A7VoTw-+((Qt|`}!p^~{dPPVUM#8Uw_n?MuJm*U5OqsOpgs{B4us6lr7Va49!(tdKK-j+qTQ% zZ`i$dw37DYVXS|#4uqA0+3r>|;^KK?stf~U8g`u8WbQ4R7~X9WDjh-9!meDX2KbyX z{&s~E&tYya+Aj?Y7SR(4l?tD7y^9HrT^pKcT?Pd`U5_WiZEp8hE;L%Zz&eSN04hBcv2B(kcCML8%>&`V5Afm4Ra#dbhUnBg` z3oP(GE+gYy>w2N#b*_w^o!#+%#n9?%SMc%KSx20%SJ39qq~HaMWikq9Q%MqnsHOu_ zwHyX*Hq+eW2)B>e6#hQN0Jx%M%~{TcmmniRRnUuBsPfs7Mrn)NOyKRDu+4GUv=yEH z98P>ey=LPRshVPfrHOUhSJna8dZ5^{%Z)sZ`&~V*rjv?6gGyA`F^?+6_jU~RjeTjR z8y&2ycFXoTHE!uK&^PU%7`uwB34LRm!$xFvf6v9aXk`@@uWG<7$LGhx zRSQea#Iscsaj(u#&4R$iqfxh9ZDvsWOG~=J?P3q{9Z@-%r_c|#PO!lF*M4-Zp#f2+ zOJ>iOC`E(Z5HRYtn=p`8S&0pqCiZS8VDV2|c~Ek*N@T&d!Mf za?%?e^6Tw8PhM;+zj(@k(zD>1ZNrGjY|338<#=8T3g1rzIq>0l-EoL_>6Mg(jvMfw z_0?rj3=9k~*mapIFuhouqSNnK<ZUS38iX?#@v;j0Mg2v5SPjo%=l2DDJgUO%6>j2wp`;+u#Z#Q*CN>K% zy3T;hNGBK`CF?PsK{kXW3x2B zjJK;jvNycFA$+LkvyyU>pHey4?~i^4G8k6?m)q=5qjm>zf<>qs69tg`nA~Y`G+e;Qh+Yw5@bBuJP1%gH z{8^%M6l*~3Uzee;wk}pp*RCAZ2i0P$^H@>Ps}pcvP3+bT>!yU5Kl!N1wd_q**j_eb z<3FBNObxfBjvp*s)SnHhU;BQtrSvp4>11>6skt8niUdh)Gb9-EVieTdZSWy4{*ij7O*IaO<(LyW$3ne zt*#P$4Hqbb_Vo)M26)j?pBdLI1sl==RNp%D6qV0%$pn-_Pho)Nxsg%{^FHO16=$hP zw>bb@UHkT%a^bTO%em?1uo^}5cE@nq zU4!c(w!;*JTI6((8AP36J>>!wGD^76B&WP^1~8i+jR96 za~txLiY5OUp30E7G7^T&S!C%Ny;$bDnu;%6!1n15m+qT0hfbfHkRuLrd%3$ujR zsZODXJ~T}uFFNb}Lz+GvDiFuLyiigV^tkO`aNN}|wlFo?blr2>)Gh4yX9{_N*E)%8 zWz)YHHbp|}|J}z2fk*%wOJ^GV9u9`b6Y~X&hy^GlhCEO`C}x*0^?5X@jjcVd#rhW(FfzMg;1(QV5%^}wi>fvhKbeV|Iqz{5tKEo9zFV@iIJ7-XYCkEl> z+m77tc28}u*mqB+Fi0qg&-RItkK4Pk5BRd^r;j(yaw%NNjx8I7+E526b$a&N5q9sB zk5)+ULL^ILb^pf1sH8B{Zu)pM^7*yA`BTwT?2?6u`KGrR5`@V!I*JN6P$l{q!;%&+ zvUKrc<%?XrFB&n3GD_OO4{uah1-1u225wUOgvnVUetB9NS)IWpT zoD5I?;ZbtBl9NCaH~e#)-J|qa*~UZ6n|E&qHrNpKS@A#LKn*p9k*&BSFuZ?z79Q5aZkYf6V-Et`@!og4{)Ho=TVkoPuC&-LcKd%0!Xk5e+uvZ zVoUf3DZy06m2Bmv$@=)_E!Hd(y|Bn1KP|4@4pe`iYY|WJ6vFWF%R$6qR@dc zh9L_?-)v^SFLI83$(nJUG^nv;5iryK3I+uOxD`ASWa9(tI)5Lg2@6gY zckXc)Vv7TI4gN(0sTiC4mt&|lvPLi7Y5%8J>*X*gx>7}rO7$%H8H!wbcUQ=*siefA z@bFjN4+okZZEd3a2Fh7kS+!r?h`otkhCV%3;@Z+a>tr!5gZm3As^@QW!>Dn5*aB)_ zU}-X&*a8jv$*wM^Cx{02p|kf7@=UHKq0<)6{>3ZHmGo1l#i(o@aW$Dt6cBw&rXGtX zJ5+&RWv<;0mO739^q(S~FRvYNn&CfVTmAlMU|)oP^3I#d-6UmN>xfv3Ah5Mn1O&&@ zCaSM~+l`Nag3|f*p?hfx9lnf;4Z}yK%UY7+p-Rt1ZhXjI_bSs`I3(da{HG~>i?_J$ zDhh4_I>>_H1arks>T>KtG{~uUa6lEeI#01y zw8cHnY{KySp13Ig9c>PlRb26B1al2L=U+4pg&&$dHgjKxZn-j9ayrsL6svD+O6o zl33uPel$JDkm=xm%z}XVC1@P3FyZ=Q4JN*V7p-h6*jBD8v%XlH5j%;Xh(H*}F5W7* zb;<&`+me(FmUEQQUaXOpSqSbGC)4x7^995Ysy~iPI8HoHs3jnP2A=RZ=lU%VObp)t z3ilw(li}pTex6SFS5QQ^2^pgQgspETYnZcLpr+(acKjwonC>G{EAeM=U~Vsa>Nh%0 zbRue;ni$NmUV!#u(R=2bSHB{uxl>sYzuPcWfKN#NWbV9fcI|lnJZbM!w9F13Kf98} zWG-uzO_udHpXDK+Qp=>}8g2|^Cy;&&gH2Q{Et%nEuN}b>* zen?EvXao30|M<>>Iz9Tr7DEOacl+{flia?pZ>#?;_3aRMg**%yvH97Ol=I(~V)Krne$Sx|29ml#2dy9HkSYDczkp^tGyFYCJx7UNM-MNm+|0P3UUlmj<(6 z$q`Oq!-`&lm2)okZsZYKCCP7}@)EUDNW!OLH^`ctWn}w%+a{Sl)_J+ps}Z|B576N1 zd566m(MZ%ZNA8i5dU#fuTC&6{!eU>_B>5bNr^k2VfgOSE^oFK6s@&T&DEx5|s~K+Y z#r@zv3W=IfCRJPfnz(yzq&2#f++|wQCETU4i31)FpDdvCmvVqC`eRiO7t!_}R|BS> zh+20CVqK0VQcV4@3$;C-OEy*>m4u+Qx?7I2nbP3qTKMhyVO6I?^Fh`E9T6X*GTwPIk$K=3yjI`o^ z+GhWgz1AHn?3eQk6+IWGzWut}%1vF5IsV({L^ID|$^!-?+c3;kUZ%dsGG1!Gg&`u53 zk6#e4l$40KXDW{xp0XEUe4=A=VbH>bBi3T@FA-(T9JpzoI(7>^? zVZ!* z(}3@Etqf(c1&3!@$I3G%ONdaTZhl`JSJ;>yZJzq|Nfny;JbL_u(C?>~h@nVAgxfd(51V4h9DEV08dKXWu;K4kG|Ght8 zttm3BwXT47@$C$p%?H&+4}rB~r%Fb#jU(dOCZIq#HVz6v0(+g_h$2=W$U=Fztv1mn z!PrHZo82iJaf2OgY%r2`L+Tr?W74BZ5eQIqf#wVvidF(61RA+$NObaDQx!mH7k8~J z#6G22_CNxN#MjZbq4fYfZf*n-Xo{-@X>AYoGfnDTKQF?dE%FBiWg$+K@2A|JCjJy} zo}ln9txyagxrC2wsjT<+P-L+5#%2*JM#;CYwBc(FL|J=#zBiHH17!5DJ2AWxqK1t8 zVaftqp(1+T+BS$lv!yl^5H~6-Fi3t551d~{E-|iHk&jMz7i!cQMx5UG&95LY50_yB z^t?{9)2Gjf1Y!|e3CX=liwT(LP%Jf}dKn}V4?C*%U=nz7G@;X8X46w@z|0wFFe-aT zmL2`0CgaoBmF<4@9H5%W6x7O{1tiN(uQzQc_Q0Ta*S9WsHc65l*MnQjv=Vr@4A9P$ zk2&qBO_93{WYk(uqU?&^WBITE4;-o4wYplyu{DEL` z+89NhO2^84h?M-MQqo z8A1B=_6-M@UjxU-sCnUcx@N5gc`NbXHFfFi^`w<2^LV05D-#QyLO$!yK<4S(5@gM{ zAeB9iNS6C-o4X}6J>%T_*|b~I=fF|~x!N!xZ>8wcR@DJb`%GVNt>5`Me=SvY|LB;t zjBsb?&K2_uQO>Vb`4*SS|IFyST-GSt2Xy`}&qGyWNOj{RTKh^`uAz=QS`S+zu}%U7 zwSC(j+|om99-B@?rig?GJj?5Z2IjTE+?4#t)S!@%ugcfw(URTO9eUJ-_u*wkxph`l zdFLF8*=b)LGORY@sw9)3fkus8$;V#?B|Y<51G4N^q2GXy?rF@5JLj!^-iM(khA&FL z?|14_uK?+J8Jl#KJgB|?OY%S@skJ1A%8xk>E~$OW{64;m;JnD5oxE%bir_+z5C<%9 zUZ3oX?Pn0piOnMAaRJa(mFYh;;Us@ntE0XY4sbMrnyqvK4Q@cvWG|rY zMOrpb-Iu*CFyw6H&XT3|WsNH*7wb&QFpID;Pm>~9t$xt(0u0Q0WM{o1RJ~^@OWyn|G%p2d!fzo3 z7e9VL;(VI#_uzIjBLpT66f9b(HzA!m8|)X5gmUSJ%8ZuO#wB{*5`3rvk4@=96|wb) zS%OjkT)dEZT*DE*qzbWc197_cdb7PV3NE4&dBo!R?e_&l84jO>p9kU-`mg_-&)s%o z@pOkngTlKFe1RX+iD|jPv-5HHNr9g7oqu96ov4OUdtaddQPf=yY5yxe@OQt=r4Y1T zON%4kFm*40I;p2)=;52I^+J!el>U?U5a;DOCGGs7x+->g=K>upO= zPgRuq^{cb^X3A`Vh4*(&>-D+2U!ksG0$(FdvdBm5Ha~W~C6#BL4EQOMazAtH1?C~~ zsv?Fn+3GdId{+9C5S$NJsC<6F8qhAi<1aQJK9^`SLwDmrG{Z#Fvs>{`V5vlvdM9O( z|DMm;TghTHckhl>&Wl=3sY(+ds7QMK2sqSr~t?~5<9VbHH^*)*q_pf z6#^Awk95-+9RU!TMVGiZOEj(LSM&JYE2%|i&b>|#{`XcTPo4Hq6YT5)&Zl(>xA89} zFqZ~FD3huPTP`d%Ho8rgND&HwAWkKSJDJO;sat0!H%b@zjk4P#3I$h*9$w!}@ z@NnFpYgS}>HSe6fd3q^%w*53b(}78V@bS|whwE3SI{5-G#UjYcF-EvS*&gLJ6dHZ% z_uUd*N!OeO;R9#WFxUzoWkRI&A@KbT^U284girZ#Lf<9%g-kH}*>AHU@M#w)%L?Pe z4|tj6^KK&JVk#YN?O^QC4-JAh{=*)b$N_NZ)fnHN zW^rHtSc%u%8L6D?1SaDXk@c=TQ0MDlWp#k*nIM^h|8PgtWgZaq^0Jj_!9@uKr8OKr zsX|a*54fc)D0Hk??ti%v6ATdC&_DNf>h(`hsjvqy&-m3#=#jDXhPVv=PeiUzWascq zGceyf&iBZ4tnf(oK0Yk1o?#+9 zCTUR_l>%3M7-t2wIRCb%tO8yu5lX%o#ek5JAAaY);YAcAg#L|v{G+1fvM1VlqPic; zixvk(_`knv{~<=!guTrPhwz}-Q26Y({+>Sm#T_u!eV%h@QI29|9as4$>qBzq3&~l` zIo~ki{t63|3$Gec57{p3$%*Y*Gc>qDaCnxacPQ3ZCQqSaEuM@NV+tP$rq)pc;_;D^`9%VPe5D7kH=a)9{}f~<>tqG^BpR}Qiw5T3BA zme@a2KjiPY-ZTvu7R){bG>eLeVZLJt`4q|um)(qheKA)3Z%??$Afh}V2AiltqkF?@ z?Vx>QjclI|CidCUh3Uu+HvhLtTy(#HjCiK;M2|z-vOM{n`3HqEDZ{Hl?_fD`rJkmy z`}Aq?&{p?g{vf&Etw8lXkoXU=iOUYv4YVJWv10IV{R*Kg2>)d}u@kn&#>PUhAxMP# zH~B$9p`_%i_kRdDOC=C}?|tWbhlY9}31m7}9@pH!T%U4CGB%2a#m3&=PxH)p)!>iB zD+b_7`xg@eXh~DaxTg@4T9$&Sj|wr9xbg{Gjg1#XPMV%wyVQsU<5-o}@bK%!2NI-} zmvhY5$ApIrF!A!gY;TzGG{F7yw5S1^FHa92OQw}frrnmTHa0SHOYxwz`P#-RU^`UP zCHfW@Nm5dnF^03Sz90qLGP3IK{~4VRUuTrq55=Lmy<6T}6g&QENG%qkLB&LLObj^> zE9qj9Cc6tYg$SBKlV**Wm5xw<|&6`jccrcyYmjmN;3cXxU|N=B&dg4b7tK! zR^dM_D;^r3V}d+C*uwl5CQDG#2atiGfd}_wJj8~ZyJQ#R!=*0c0!Bv0S5i`lM7*~# zfl5UC2MUUc?U;WkY9H8xvAQABX&a~2?b$qXreNGN4h3OsY;5%Q)d&4tI_iYnD2a?L z`BKg?=PgW;?|tnI^V{l_KmQ^kDNADs-7c4G&*v3QTPXj)9Xp;)dcsa(c3L&9o&FH{ z_3N{5L2o;c14uA%F%>ACm#;Fm#~KLtUj5dM$k+Mb$UN%Ju)eZp2mMru@?b|zEuN*~ z#+pEnn=-UB@xK01QYvTw&k@uU_S-B7A{gLHr%JY#t9YJ*LX#*WA!Vqkvkc;-8R_X? zpLV^ln8yy9TD+$%)*ZrZ7*~HCmr>?4^5Xn}99~ioPH$dUNPK*QqO1V*HKi;{5l6Lw z6T0{HMZDh;=chv@7~eATmxzo#fd^F|hQwY@YXy3)me;3uR9sWPB(T23!2M$>6T-?Y zqVGt@l@NU!1+De=8;g255=CpodR6($>_Qr9TjA0%CtcC1VGwSl(H<_DJ}uc}g6n@u zTr!{b>|2Tqb&XsbS@GN2uGzT?(u6-^oFH_6UVn~9%~HZ#zxO@tAkL>i?XLHYht1^W zl%-J_2fA;n8tgVj%41s&mkaHdn(G?F72pcjn{ z#6{wbzSny5*IQI~0@aDDJWqrH(ohZvT~mxXZA%NV!1C%Met3yicpM2)5Kqek56Kng zR3W+0Sm}zzGsVH;iW8xA@`^{L5VGG!Vdk)!!EMmvS~hVDIazJ!P&cs8=mM%WzLr@} zDFfiAN{n*{3>^ORR>q`BeXF$k`&Q*bPx1UQK37X3v?kJ!X6*-d*P`>6y}=z4j?6Q~ zcFgLwAj^+9>9M05QsO9bXb5@uYNwV;kb9|46 zcJ0oUazt+D?oofcUPmYVM>4$L$x~*{Nxx)bs(_Ssz>6a`Ar}YmdGix!;EqLoTkF#} zF{wefOWoV8lx==Ta_;Ut=ka^?g_<{yVZrHpMW$@7phVd&KH95Mqb?($3)`dhLz^QWEzsC2IC(c zjmJ$4Ya@@9D%9ev2}cfI1#Z~d1ykQyK7LbH_40!9n#S=dEF^?v0|yETuk|%Sr^ra1 zuYV6xITO)rJ*IgLiD-6hZ}eG1DK~?tMG;K74dW6#aedIisgQS*f}8SYL*g)}DNYLB zsI*1#CrFuFN;IwrlsG7hs!R%ch1(xp3IsPA_^wI1-65zZkvX5VvTU#QR$ts(Y7(7B z*EL9=MV#Nc4G^ACq#x3mJ7|g9>qDk2^tS*hAU+Szs&lfc6a?|V9o|lN}5I` zI$+xJXnAODsa0sf3Hx=R4j)#qv-a!AQRj}a_$zJ4C+tjvg~pt+%gRP9*B^my<}{Bu z8Ap+oP6QJ7j)G{^drsJ{wwTv*UVR;(6ZUaeGBqy#B!Y02e^?AD`*8$(| z?Cu$02VHD%z_jv>u4ngTB5=yng*vOa1Np3d!xQQY3u$+Y@!Qht2G+;a$R7&^!kzmy zP81Wp6cq+Sozp!_ZI`tIJ9r{|xB=Xaf$;54(w4+DWSQ_lr~Vzug{yFIZfXg%X+T84 z9gOm{b;VZU7bGMk8|n*&YOA@hZ$urL_YzLrUendYs){A6mzI9WDgU^gH+Dj3CssnH znl{L>*<=Yk$ivl!;At)RbuwILJ&a&u;x^|1pYKBh?eiwa`nj6Wmy@^K6GZFj%=f5H zlTND{g;+v_9l#`xLk8>xt0Co`;(}k<4tk#PKCAs46xzed=u-Cgt$63-{vE2k*!>~o z5iCI3nPS)3wW|=JFsftdl5=updxk2Ls(Y=5{tySYRDFrL8$M?PvtGEL%lyhKkjxoS z*F33V9YQjvT2pzN;7y4C6kWl8B5j>_F+4Ofsw>o9cp-=@Nl88PH7KgM-BbF-AT2F9 z_2v@7M_54pdPmij%1DA&N7oAn%OAjmpfiaVa|vOST(2b`wiPFOr?2Vi0+pRRQGeP3r1(<28-fD`?R?!X;#eszJF`J76x zq1fBks-dr1{}3vEdvcR+0o-DsBVg%7O_E3ZPkz>7pFzg9ZB`-{;78yxZ^_;PsM4epFa5BjPu_dr6Y=q3%G2Vi7XLsw?tC?&m@>$Pj+?;)!A^-IjmAN++GCK&GbE6 zQFtuWTCdsxrvPqna^gdqLZhamX}|}PN6b0vi_Tx3;gAz+g!3s7n*`!8KBC0VndnhZS4DM2ia}E0Tjh|5S>;<-CFcw-!X+`n?Dq9C^AWiv|?yh-VWC?A8bp z@g@V#l$7>O4Y1p)2f51DaXGMc_kJ&(yHDLOo6@{*Dl z`(mb)n{{f?j>+waaOqP z7K8rI@CqZ~ zKzKuH1D-|-*Ddt02G-31z4f{*!{+Xi*y7>H$>L^(7Q+06U+W1o_>*HcrXt()ut!1t z`gky%|Jr_iA^KHqVQ_7_TeL7IJ*w%*Xm2?al{Ty!n$qLL4ASGl`bgGHO@#MBpx3%- z4R8*^@TP~cbnnk#+c;p@Ksmol3mHI-kfy@MCVRwnt=wd-8Fru5b0M5@Lay07KK{By zVDE)H`S=yMA`jQk6@5(f*Sz^TVqfl+$ux_iGT}PmU7Zbjb;-PLe!J79+XrsQ8BGEA zPbFsf->Wn+o!JPCM)23?HBRl*I*?*)cEwv3ag3_^H(o`j!E7OV+W?vfvTJT;oT)b0WIXRqeZ~`vpJf5GzeW zY*kd4X)}W3Cmr#(@IajET0-5PEu9QWvKuYFf^}1fcTjjxQX~OxK|xWT?5?td6K#4l zX2BN{m~}-M{*#?k(+|&~=$OSu(KRCr5xLnKOK}5X65>Nh?gsd(`WYY~BxJ?6CGdzGCZx0w|7mXalnBQPSvKX>o=i z5mT3WdSCIXOUPlqUvP#4bLY{QuVH0r%EvE2P_1Ph*E?6HmN!u2>t3qzznp7=048}9 zt?l}6?s-C-BglP-XG%^wOW-oan-MC+g=EmX6y-7d-36pPa!Ej!DJe&UIh zBFucu^oUm~+6mCgh;Wv81xVkZ^q-Xyt?!~4;-J0WlYzlYVq!Y@s<@=Ai#+w!FgIHe z4sjY2HG~_X7)nI>h-VK$zF^Q=nH^kIj5cJjVX%XFQTXH;h>w05O;UF#2zA!XC>C*V zT1`EBMFKbjy}_-GB0D1oR^i((9Y75;;Mu1{u*8ka*&mBQLQThmAltc9 zCW5+IvpzzX3Lfxi#r0GO`Szr`&Qpn|t?3YwhkY$Wo&jU!5#0lJiwD&`-<`~z2C)$5 zNqCnauui^Pgt$F9LFy&=S#PnL2`Iuiog^CbW>=FMAthbS+u+Bny8kFJtm*Yl=oob$ z2E52>AfHzI*`M#jN|OF))#yPRZkZr5prJphOY>&qxrSUvQi=4G{C#3hmi_z;=|Xo) z;L%fHch1vGrf}$Dsw1aUu4g_o$$n**cKql2tf8xecQ>!|&R51;}UAJ=om7 zpL;^3?SMd`@b3tG(1rLJV@TuE>botgmQ;r8qDc9izp^U+jvh0fm9E8sYSA&<-<06k z^bftJ0glK8M=obEKd!H5;cU+czyz!ZC6-%BVcAG!u*0x@M5qiOd`pO8O+~ijAW+SC zP(Kn^#8&BSul>@e$uzjSx0T$;E>DHTrX?24cyPOSRW#><;wUrhIk|&_g}F^_$zQ7J zV>K7232H5BEsg!?F^-uDHJYh!Oq1N#s1ER8QR>7f0=JmCeL~2T?<(vfRU^O$3W%qV zqL0P!U?-toVN&|3o|wBIS+I=(D3U`SH!j}9LGz0pE7qrY`3W!K>Me7dot1nY&?Kt6 zo;ymx+qYHv@AEk#)8i5ne!-IL=%NXZj*ecI^`Q7Hd=9Sd&1{UfVbX4fRA zMKErM3f*jc+_GGcFADbR8EKk@QjzV(!ky$L2sTF#Op_KwzRV9>di+WcEG0ebBcv20g2$UW z6KhMaRWUDq5MG}LbiC<{qA@rnf+Qb#HiHNh@8Q>k@_cz#>hY#I-m{Rxm~8p_)$@W6kP2yA-9X z>L*a+=_G$VHGIHkcwW#@QKvIweIr&Xi?+`ZIxS@hHrSmJ#buqi(8C;hHYt%``syaT zd9B6vG*V6Ac{L;=1j_`F$KTa>uf+)GnGb(i(ylKi@jfJOd{I?_J(+atMYFLoe87f$e#U2Fzb}RJlJ>rzCOfck_J3XQp8(QK51| z#>BE1fffwV6-5eR-&v&OzZ&C;qkjsi5xsVUpg+#B?W(_RgJRFjKSeY=kA@}5ziw!n z#RC@dSi+s(tE&5a8A{%Q{ZlvD!+rD?Nma_{0Cdipf~KtRBt|&#Z-9tP140nX9o6EK zZ9PRdCP)yB{EeFhj;p~;B1}SfXYbWu{s8U_eu%z^PEJmA%*D|6yqd)bdCqfOECos> z4UI3sam8t`gK>yp|1k7$3;}cBfE<1;Ep6N5VfM@{6ShEVvrK6jnaJ1_AHRUGwyufi zBq@jgL0@?h;}EH-sp(i)B#fO{I5;b8t}cnA=NTo{UGWXg)4S79N^2?tGNOnRUzj6V zAE_K)o`NeXIEjdfyB7bzUJ~qJu8hn~5Ves}t*QfhRu(lKqj_n-mq6Llni@YpKgw@f z-7O;K&SsfYoae)M+S(}XJEx$MJDj2cXz*gChRoF*efg=TT^!up^6seu-J(35R#Bqjze0TmTkZkMTOKw$Ws zZ+g1HImtChTBg5Kx$q0+U!Hm(V&4lH%gQ?TIJb>6w`ohkfT-O|?%t52Uya8pM*2B!GhB2JXCsmzU7tBD0ulo!nC# zB4qpHi;t_&JUl$LT-G!!|4<{--2x$`a`PJNrM9*X?`6APWm`-jO?@mIVS6{+=ix30 zLYZTTPH==l{84wkcCD=?#V9J3H{DTBL=3*0<6CX>k;53Cfmtj%#A^5ll2s5i9D4>j zn5+3*P{4UOPv5!FPBCAn8OZO9JkmJ<+ia;3wD6;kqd6&9Aw$Ao9Ns%Hm>^^?LA{re zuC+sSEwmZB{+4p%^tmk;MLN<#Uw_X{Cy_-^Z%-FvHM%ZTS|)a_OBP6zBGYoIuOIi{ zIxB!`^$Hrbrd>y%HI~$Ctm?^mP6ue+N@W%s%Py|JggYAM2qEBlHsl#c&XU(K+T3H$ zjcmhNYIOJ}grEvVvh#I}4d)e2Vr_kUO=$RtDj_N9d}9J*v4r>s6?JudyR%bUx=X82 zP)Yh%#pQSV+UuJebB+7I(cqa_Icba+mb+h$nq5gDJ4d^pE_)2DwmB9uHPO&6)t)vK zQAj7zbB3r^^3-K)wyeHGHkm5qNMjIbd{(eNkciuA=}>)p9VlZ-vDQ%Ma@vpFTDviQ zwduFI`X$RVe9O4F%|E}`S_Q6pz!Td1Yq!BoGc_I)OZD(cEmm)BN>G4bcW^KO>`ZYS zn~$>$cpAsn1_t(KLb55}-oEMHwMKfUfegh2@bUuR-||RaSv{6Qv=JAA{41aq{HQ-x za#YUC8?x85H@mwZ?>Ttip%(>QKC}d^2JNib(g@+d9dtu>J~V}ygzOG9IO93WkmM1* z-%8RwK7`+U(O(2H(hg+3v(vLkH!f6v|6B7fkbUy%nvh(_U`hj+Yo+=I_IuXmPlwkf zDNqnYpMm*B^#0%j!CdK@RiR0bR7^+OL!YdRJNo+iZ1z-yI5;Yn_t(|**dF4xvs7lgA1U#%2{TAsRAb~ z46Qc)>*qqkZqg?!$5eQ}o2u8gCxR$9V;1Y18ZCp`7cgqnRG{twb}Hg(b&n>e=XeJq82T4P}x$hN5X^$93&q_)Jrt*}HeS;X!r+!?0&K zPOXX0Zw>Rbml_}7(aAaY;C|^`P8ClpMQiUjr^OVS#h8GFP@tO}#Dk>|NQ*F=t_O zDKdhH$;C(~&dk4bvdI@faV_P6_+$V`zMnNXPD_!{g`7YGkT(FZxx8atdB2+?h!^a6 zmtJPQ3kGFES?c>@my zzzg2EOJ_#Xg(4$oM|=e@42EdGq280%pTuK_@Y$jtk`#nw_Yw+!sdvLK z^}aY@blU%+cdY@By~NlNCTJk9`ckFk%i%(SN11@6WWmwriBSAJ&?lkz#}I7U!=OhP zFuFv@4)ij#6X`qoZqn-m9^lcCU92x5l)qA*fS&JA{bdBKM~Vw%PSv`$OJ;kij2l8+ z%%B}{DL5HChgUC-J8Tw0Sm562ef;L4StjFyWGF$Nn=%`H?8pJZ=V8C4Xy{2~-R4ju z(!w4RKs&|z?Ump^gyCJkxtp4mof$EUY`0cOmeXo$2`9-BGRCTqF|Kd+1bE|2A9A|# z%<|n?3R65tgzWMaqs$f{uC_x`KKSRwt3IO-z%jlMjy9k*eI^WftpfRu?GuaPVG)tf*m*|WYWSH`hEn}V`;YQ@%NuwM5xHE*(is+*W(X@)1 zt+Dj*a4>=kX?nc66LmPp$EC3J+U-aY@4twuI7E-oAf9^LS6i2r!v_G!7`;;HR2)p@zK zt)sWUz@;<2&N5!pn3<~#HJ`AMq!n63Y}IH$_Zrdd!kZ|$LD=kk!o3yCW#eKfBKjLc zu?{5OjF7C|?Z=?fiS0+v?L&%RwQs45OduX^o4+Nl7s+SP?2xtOVSYtenhkOJ>py;M ze%x)-(CtAxII$Ua5g>X&B2=W=k>hB-a7T^~YM+@=*YXeCO=xcyD|~Rv*vyJ`Ay==k zczgczqTe!43QH0Y`CrVvbyU<**ET$KNOz-xARUsDBAwDmhk(){ok~lIV1a<7(jeV6 zbc2L+ch>+zd}q+-eeQd$Z@qtfe|@tSE*NLd@5Da)?6dc^uU(ow9KgowWhkOdH?C?2 z&ii!qhxZPUiMU=`y~y@v6GLLW+I|;QIH%{XbX=l@?A`g_r zn&JyyRG$XpShg14_=L%v#)1yA^S#xmy#((Z*WOrZd$>>vEwl2q+sm(7M3UcvdlFwy zX=CJ4P8{|Vdur%P2%s3uJ_l~?cF`lD?7Yop*0~WK-a-?(_LD91M}(uU!h%_Gz>4sl zpK8YdIrF3Ju!95H4m{Y_Clh5J-NmfaGkEnv1dIO{vS~uVfO4xAY$SngOIU4-f8S$A z_Ek<+Y@sYeeyIew&@r+xV)(4$ie5;#(1$S z%y_!iL(MSLb#GQ|d%B(+ya|=Tl_&Qt^A>7p8`sEqx|aK7w*N;xkAlP3($_*Qy7lr~>F9_vj1G?qSdHaK`L#*ZU@Ial6WhtH z#16VoDZWBPNo^en-yl^LNV^2si>{l3%NL_lx?@3uyexDFkoWHmpTb$uhWKd}D^49k z4lHCBnlX?M=270foXw|HO%V?Q2WFaAsa1{p0l(+7?`P`SJI^s2!KK>2`*9{Ch@lB% zQD;Ly4p zFAX}+0|0jNP0frR?I=bG)ReZq*%8}KZ(O`E=}bv??nGtu<#gI1>{;d4v}@lJf6Cu6 zqxQNdep*w5FkL@5eTw%fPS{lB1;Mi;dG?r(9b*wN7 zLJ)>zuB{$3-9TqKrhyLeZGj+G<=An^#BD9Yt@WZYXhMcN$}6u$-57fo^l}ygP94|PQ;HN zZY^~;Xp8*)}vb0}05*U<{& zAd*sL@Pl}fWLpJi620gAy%$y!8#fSFk+nUdYE&Cz)hM?ZR0{ktvzWaff5g|*nEiVy zNJ8lyTK4Ctuc*884jEp|B#PYgJ#j)+}8&R*#n4auN2E!^aKlPb`C7WU=jj0F*~@Z#U$-m*-uE%t6z8uTm8C@L z&WkY;tzN7vF|L0?+L{#$bR<@SB+PH)V{jiCy>eRmmetBLEaw2y`POmdj=$(v69_cU z(^c3t25;P>C8Wv3H~WF%0sSra*tF+VsgsM_NNysLqA4*e;M=U-PyGvSKY+EzpRpR~9U2-Mw|LoVq48cv%&oj^6&jw`?*%{kQiM6)_z)W0=e)qnkNZxw4~)}cx>I&@lrN)b zdz*&Oc8p|asyrkwFS2mPT#>`b^;FtaBrv6!c-aMihk#@nMW~imeRC_LgW6L@+<(~3 ziR?fjcSZI@veVP=yMvRT8KV?mnm<~5zNF$Wf34E;KU0*kzre|cqK1#JjFo~DOQu1| z9g^dg+x_SH1?|zRuRi=N)b=9uT2)3;I@*3JT9j+6KsO z=4`N5G7R}P53}f7J6@LI8=8D%k2oK4Nn-0W8*%A^J2e-ODuX~`pR7-cJF$w;el5Fe z9<-4lk;R3Qo&9ACe@r}^*glEuyXMw?T1DsxFDi1Nu~?18acbyNi5&f#$kIo?`aTxJ z!OmYqossJdW-xSh$(6lc@XFr2LiGlmO@+n`!$p?iDomuL3bE6_Lz(H1T2q-lnr?hr z=;M5{sP}qw9hZJ|vP}e>)l49;*HqyR^*&|`m(OOlvdC9AKV7-a8utAH=a%gle_^sH z_9?sgnRT$0g2r3E{MT^TO}X>5J4RXa8dL{~-aJ_Agm3xHnqBlfNENWM={@NA)v3H{ z{bK8qS?-iDRfW+sJh*qA{}?qt6EArI|E~BGRn(7!6-dcL#@BXtkxM_y{gDgMYmN7% z9M4oB?)5ZJT&2l1i50uRECk{9wza&4nOst<8%uPue*CsU40-~IZlRl$b(RXO%(#wr zJ)&Zr7#Bav4^RD`z^e%-xMlprCM!_iK`Nrl{7DM|5F%k#{C9Dw=<@G=I5XlN)OP!Jo+I+}6vSy|nQCV12x@vZ`jM+)Y-q zq~Kj*jUZtXj`Csd-537e-*XcJ>Akg%<1&mEkiaRtpYUszV1xSkFNt&$50kc4dQHSa zdV1Q4Al9~Vw_=9h1_!Z{2#%h|9*0lwcz75Y7|aR0TpH1b&5n$qB#XHHt}jv${N;-9 z92v@wjc4+&RMx|beN1+uKVWWep+U|Vn3Qh$2DOAObWDRp51ndrpHuz$>Ah?K`yfZ# z&7ibI><;!-KFOCDELqHcbGU@ADg61#x@_|o(_n~vO%>f#rNULFq#*JXxkARP(ToRd z){Hv-q&Rj-8TfiWb#JaxqfqB#Q^fA7NN1g^j(%P4x%lPkV7_;QHxE>8%{kHI**UsL zDPKZg#fIEHTeurE^qG`MmV3)cCYQ#uV(*2}s@^b)m3}+L@IXUFNu_n zxGfu8zMzu^vMd$)Z`m>?T4gdwrW5M6)E|3rPNnIx~*;aX7K3nuuTU2 zUF4p&O<8_n|w2~fL-t_q&6MI}z!jSU)( zI=Mc%iK6+RNXW?CDDao8EYt91r(~YaA zhu~pGG6T24qqINrQ4}1hy22FWqidrdgD{`GA39QZ3myM#C^rH9`RdhME@utCt=L|W zr!u3YE7h04KALHus;as^Rr}e>t91;Gq4qJdhb!S#^zU@kOgiI3F16(kNa()PC-D~N z{AkbKD*a%`WNYjKDLUCZVmw%bYMl?pz0e@jtO-5Dd~f7Jg7moV@}Nk@ude_2?MpjD zZIZ)$n)^1@!>DaQkz<>x9k|ba7Cz((ve+$h>c12;ENqBiVksSDTzOSx=ug={euLFf zCkE+;mQy7!IWs$PK7ktyynQ<*MCXCrv84KP(pKZhDrSAQ9Bq5&P2NW3P`&M)QZ7pk z1jCWd7jS;PmYAB_3MecC5$Ha1GimcM>w`$%gHM;8uxa}CnL4Osyl zg2|1G4P?P;f&A>g_lFO?FAw8}{p(5a1NsU*LIYw;&rHj%)!jB8gm~<%$*wcaewbV{ zg(nc4F?YR*5N-Z)d~QB(Ednl|h+E{PGave&Kf;(agZRYliaB6mES$hXfw$Y&d(68T zpC`JqM!Q^F7Lt4~QZ^2IUrKKttl^IAXc}NCi(%k>iBni4JRxpLt0As#gU3c(uR!d& z^HiGdl4us*Vw;!~Mw+PoKAra9nGPy^Q7b`&@9Faq@rQ2I zq4!e-k!o$Gq2o^hBIWEl%)SN&0uxU=UtHas#&a4&8*Z)&edfA~P}-v|<9Gcm0hMQHk4OJB@PC88#;#xWUu)I9}nUmR^CPD@?v&@W`LIhUL8%XrYP ze=xzWe~dk7fiC`4Omf^^?9k7*j=AIQ-hE$>pD>llxx6Q-=Bg4Wk3XjQTCezd^q#%I zW4)^V8YS<$uRz6=I~F~)k1epVDL_f5Im7ebEs4c9mUyf9;pMknBy{+XsnE?c8;No0 zfVkDmFCwd>jJ`iUSs1q_5szA%h|Z6bjhDgHKH@gcXQQhU{nl0#k&_T(CB*{? zjFxR|>BW4SGF6jSF`#BN)9{wTbc-w|upm!0zcn4V#N3wFiB`R5PA=i&YFIVhxwv?E z9jCyZOm249UfZ+fc(sEr=2Y`gn1gSfC=qGF^HtlYdOzvHG^^Q$kbv@SGDjKfBr8ldD;moWxK3;t+@O)vS-QE)| ziML}>yU%~Ch?a%XKAN)oP{tyT@)~blvp>be3{|_9$*uBy#QPW6t)QUg1@{FxN5i zmoCm^+lsF0yMt1|t>4s-=YkyR!t9v9zZI5_mkN0p^z_DVCPnO2tI zc2;1)sKXlXm}4SaCvp%H`XM5ei@f`j{wRaE ztva84qFkf++?)}r4-w&y!Q#cbo`?1J$j3*A+r`txw6A}O-*!7=XxujImVsEh}Q)q z-Ec2(ngZc2PTJG%&v~{ZKbQu<85KpDkXIf^VHCL3WFAs9>HPZC zWf8rOnyK{fHbt4scE<7ksxlwW*~y$b_U6eO_P{7TD0T-L zWLv4QbzzsKX%;O@#XEhvWbwGBxBQM`k_4poaH^O~HPmQBrQ{9*1@b(izFthf?ByE- z0;4RU@vKk{_@SK-hDpmn%ya!A*!7c$0B5123D2u2wQ@OQjT2#!KNiCjnoq(lX+-z7 zPGcq}p4D-~cuI0UI52F$f`l&Qu$X)g(XV;^k=uQk7bCgMhion}abR4nZ<5CcPtrJF z;z6on=5DAXRc{;xLXYX+9L(lSX!9ba)?MFlOKNXDs@Svj+-vLL_P<0u?wbhoz~k#U zyS9fIKHVL#b zc}s+w9tmFWvlzG3sfoj`n?c`8NLjO+G{+6HSFGQ=w$jX{XB9_x?9>CsfugUUK_fPy zc_UC2ojGUm#-D+8miZ2cei=6CpIMEBGM${})|=WX6KDBOt|WW9T3K=Il=J85p2KZ9YymJhau)Ocko?N zi4nDlRYYH?8_pE2jf*7m^~tM}UqhTBL=Bh7bGwrvMz!t>?_j7K1JXyF6ygn}h1(w* zeQpFK+p(l+YeEH{malO#IFNss4yCNFC|ZKu?1mmWtqgpHUh6$aw9q%r2f7AI+6Z2=T74JdS+7)s) zZ5?2yvP$@j;*RIkeMNd25>d5w!Z<(Ad75s+3i1)HbOd5jE3W%=EKyJ0rMJdlU+Y;A z_?0OZF0QcnNMeV%>d56~C8yUe(xurd%44>flez#9N1O+u; zkqp@H{M8!RX)=q+I|BZtk1*dB|Z~JR&rzfQZ+R&d>&YokfCG+Ou*NlBl*= zek$pT`)xl?Snf-^f3qCnhdA?oR$O#_LHsg-bfNGjO4*4q#GqSRe=~KZZPB{3(ArSW zHkB{vdYoadqhp*&@#@k2gJ$O1G-Zp-)Q9pDjzKWRq>I{~1s?tT9R}CFS=P&Ny;$Lz zL*&V>cx4q|M!#7H$E!>Qgf==rnv9DEzdU(C?fjM@VzriP#^gXm`?I*Dpg&DpS6V_u z2K3~&S*d?@+xXXJmn@a+bGwM4D_(aoDqGE=abNxh!^u;(K4C$`lq_Hx3^_x%Hx?!5sl@FrEhi_Ckx8<&i2Ck zi!i3X#FnXA(z<%l`R#2SIXSr!!6sLOQbd5f72rQwAY<$n_dW|asbf4sM~b6Jp~8LF zi&2~%M31?p$gv7%gGzGgeBm%wd*oUhL7)I4B-vQ-WyvvlFH$2X_Fhr^pH}sPLKTZ1 zWkh}M?9-uflE1l4`8|@ep1b=;g(R<1&^$W$Mqy!RQVx~C2u;hT(8or~kK3Y>6(6_r zk?Ek3ZKzGGwpEe4aE;G_w}tjXQ;99$uS{MOa(*@mweaV|7Ab5OE26w0?RBa-;5X!I zjQT*fr2hMwA))#;N$oR!Fb8+f7cK3sR+xqhMZzO(rKiTSz6kswHoCmJP2L~<1@d7N z5pw30s$m}Gua;`}t1s-w3oH- z-aOLXE6dHKN?q(dOMZ6Y-)7an@NL7wfEu?lLSl#N{fK70Ssb zniS1JUW?RrnG$gYMO5Wx@n68u-1nZTQISO8YY~fWFjXcBVLYnyu<*023@}e_W>j+) zacB6^0Ws1;m~7u-iJ0NM{5%PK_dccH*ztVx#kKf2sG*zQymd>gk)KBFL~ zB2hS*#5AA36TRbwEJ*4@4(!(0Urc@r3H4Z~G780PjTt)`=DIo%a5oTIy?!^m+NwTj zM`60g=+3})u>V@FvJhr={NDVmtVWmNNq&%@u&@41>-xR6^|2FiPtRWsTwq%pnOR$7 z007^_)YOr7he>z!FQ9Mx8W0<{5^UHeB2=rcSau(ej`Py)rA}F!#O_$O$k?DQ+v41D zLb~#C(J{NrA={C1LtMCl&Ra=sVy$w&*2w~mFXXU*N&SMF4``bmH?0FXHYtgBeeql6 zYH;bVb_I_3MHfe?3jCm!96au;5NPOLXr<>g(zy^dOR3YG-bRV*d1-ZMUROuT5oL>k!qH0RcsH@3fa?A3K4?b)Ib@LupIc5`9?~4$U?hD04qY-!5ZDRhU5k0> zVS^i1%e?T+i6o|)5VEblD2^UNdyP9Ws%k&n4xQvrWo=1(2H-B>g^Q1(SIU0Jl!Mky zQTm037t6QOv8H=_5E^W}rov;q?&MvR?Qf>KA0k|8tsdH+LKJ60zgM|Yw2o708FOAa zYMd7pu*5PPl>AF+3vHgnR~L8;4yHIdqf{use&#m!BefMssZX!+lfXG;6(SG7snBf} zyQnje(Ft>Wc*%3sXTH^_GL4M9u-8;)213O0Q zyYpT1{~)$M=@tPWM0!noKb9pYt@`e?s<-fL$N!*Bf2RBg_oejzwHglJx$H5B!}a#J^gvfGdD7iM@pg^?%>||5ltwOU4Jb+Xg7oi#_&?oe2@>x&L|n zv_UXm$1|(lfAol$SuF+n^^rA{^FN@w;jCNz$s*w~F=J&ia&k-H_e6nO)fuU7evZ-C zH&+EEi%aKDuU`|8Z)9d!nsp5qIXx0IF?^xT91|L;LgnDlhj8c^*1F?*d3il#WBUtz zRYx&(bi@br+hwmt(7`Z4lX2iv=QD|6M#???b*ekSZrB2~kw)!^o{uLkN?ND0{t{7Dh1-508-3e4thJ zGeq<{uG}NRS|O_g3nnftHVX5-VaXIE9kZDx!&w4ySIy`X)(Vh8?)!?rgb^FlNU6kddt>fd@o<5aix+C*` z_ksASw`$J7rQIO#qJXPAu-%yvc3oE&mlh?iden(@bUUxF32UFaZunOx4vx;o6K~Qj z!UR0`99MDhCZ!U#VAcG)$ zv%Eqnp7mhy-!sh%;L;(Djg1qTX~qsGkTDG(7;Q(pi0-%^^_N0bd;7ddQ-Uj!nTy{(LwLAxN9}wlmDEZ{NP%KkVU3@AmZI-#Ge{n?2rCU-f{EB=aow6y&!v%XlM_FLV1 z0WyPU&0n}3iYsn&0XpAZPF1~Ac}!bp^p~jChSBCod}tFXKZmM#<#6f=O0C(S+JOue z>YS#6vS&98h%K$ItwK4eVTTM6eGDa@rBel%DDdJZD)&l#2?pD5Om$%c9FzUYQ;R=t zI6KZtG&bJ2BxGdi-Z|df2+LN{?)t^8U%^=&=STdR)1<8}f$gKWQH?LD;4jB_^l}Jc z68ODz&2+Z-K;n3B8V52z)Js^}J{<cg4APNE-T<)9I9!z_q*h{%udK9%*P$&+G2An zZZmSCp-@yD8lh))V~8#R;O_p*1w06-uoMQ;AKGsE>OY2k_@FVpw!-4ihZz3H2R{tk z+mvz=D^zKxGD=M_RFWnJ+S(|^kT0E6KOf9p8}u}{%FWMJ6YKj_5|NMoY+2;3dQ2T7 zDCB=BRO60Iki`FY3@34(HyE?bRNA9(D9|F|Dr7vtSivv1|@cqFi$5Jzb#al>Xa zLPT6#9dq;%B9Xgi<&ZKFyy)}5JpPG+x;hWQ8`k)uN$iD7>0j^z`Qmlg zwCC7_OS{5?vS*LP5vy(Y1{U$DWYuqRukj`ZaN{qP4v4p2oNOJy_ZlOOKnz_|LxXij zE6?~R6b^8`AW!KXJtAM@1!?l*ijF4#dY)nv$Je_P#)eV-xhu(0?3ESuL-4pbXWCe0 z?3V9HS0z`U+g1h3r&a{W{w0vQ}?FKgV*oktIR5?`le_(P-qRf#H5G zOPyL-!Cb%%?W{9=nW%!*{>b+>{>Iue>c*tX_*Z zNwPYBmu$!Bw}gYJ;5FkYdpDnrpnOm(T(@J}L?h$Lw|geNmjdX7M_Dy4PuR=}j&`|A z;+k=G}6F5&}#Bc52IV1o1wI&|VT%6GOPY7Ezr^b3fympR5gE&34r6>td5cUoEEQ z+d+9*j<4OBlx6om9lyd!2v}3bZc{#wm^5>3B?v^7-YvD~^K;r!WK_m}s;BFWMX27l z;Ah^a-i`Mq9dQ_(F6S_2W*c{$IpfbqY}}5^4+8XHYec-lV#jaFO?7N-FYVZW9HUJB z2DUusvXYc&Y|;L>QiF^o7NnWGGB55E5b#kw>Oe;dm&)XqdqR<*|IA;+zRovDuchgD zNv=CXTd8=N<%HMo4|=N`m;f6S+dcT;pQ5K(bd6BxY#~vMIeseT$;+N$e|BDUg<>h4v$}P9SWxdXuvp^993beaOY(FSOteAG;e5ryc3!k-m@;eVL zT%TPFQ?L;uZR>rXvyLARy0(12fxQHE&6|rUaY0)=v0Sa&IQ5B`?$D~{zdCq!+O`gh zlS9ThgGVU6Cn(pYpk?2&-e1Flq!&vbeYHCM^W{n@Q@zqvE>a214apwyR=Zz%=O&V! z)vC%R|Mnx%i?ueg`WCW6sj+Co0}FY9?HO%M4F?jAbNWUF|1alK1n;Yeia3qlj*0{Z zo~M0z=$`lX*e_0A62cU7h;~_XuPN6ObC~e@0*HZr7Dw~GmeBA5n;)~vybnQK?wqH= z@tL|gcFh$7pe9b@F7*Q{O+WDAXoi0t(*x6$Ep4$~pm%ZOOOk9LqjW>hbaX-)*Q)+)$?!>g`FI9Q+?BR`PNOA3MELqb9_-xcKy$sf# z@2|(+-OX6RN>2}rwJ;|r<-kD-Z$U1x+K7>#r{5&P*d%bgB}$g6=2A9{FkSJ?47F${ z8TDHNB&m#r()+_f6sLuU>y5r94<#-fuZ<4?IdU92hdeBVUP;99DJdN_aP2W|JIG@-Wi;&dQZC#3cpUD-Z!$16RS3CYY+sVC;Fanq`vKjZUfyWz| zH}`Y91KfcbNCghhU*3?Z!EgprR(P&>)^gw`6Yt>R;sq6h6yX@D`~+bLYTuGYdqg9T zpnqw00{xfpp&g_$`sfIwg6E-<{$(n<^gZ}9HOTU!u_O=*9>OP^O}cc+gnaH_uIUN8 z8;}s>zy$OX8XSUF6SckxAQer%4^TltfSny{)t9(2>jO$KRoB*TP3o7um$jy_%~M$P zGqE)JL~PTfztsTU_DlFwGu!Aat7sjO%5}&4$?VOk|9((hVpa`-k|$NKDakGZl+WTa+(kn2`?8@ohTn<|_S=?UCZwr&4ryETnL4@uaImkT{Nh0lV;NBk5D?b=7-B&6v8fy?XmP+ zzS!Emy?zOjSbNG!&0c+CiHdGg(xeacl+Hx;JRGY)kdqN`H^2Yz!v8)Figt}|zFGn= zWO7eOJF%HkGq}rl)ZIj2t1(es(dZF4!tpqm9?fQJ%Mx*j~h~wEFHqPkK z*$+qm=I5PEQ`BhXu`na~?sX|YB}0`%5Yl;d*m-BAa;>`)X{ILkbKKmgtsGg}4ry>y zDiIJ6s?QJwTqFBW!9)tPt-~XgYZ6n`H{7K8oi%YGg@uLdLm3$Q6?QqcrInRojg1nM z<+eXIwzjq+!Q8YA4XKY96GPz*-3^<_@i6uKUVM2^aQlA0KCiz__`q}RuXEnkNR*G( z8#_2V-UZ9?zeZYQUDZA}9zwMw%HQ-2B?YwTxQ2rXxIfd6qaPKUhHXytz~J7Sgz3Yb zfCT-e%vv9#C911(R0Gb}2N~}|hN=T|IqlH+qe!!#PD5T3lXpF75H}semw(>$hDKWJ zj%Vsm-6UAVee`0tuZE@)dYYxb_tkT{sRViyGfdhJQLytq-+MM!MNMgR?nELNM(%+9V^;$11~QChZ$ZK z9M!@?kIm_UMSL_n6j#jreQp?`>lRM}DlVLuqGF(%cliqkYD%D4W@kS6NP+bRR8AY5 zsZ@w<&O=9{_?n=Y_trBnT9-A}GBtPqD!)hE;ACTqbJn z4@oE6r1!$tS8smUp-35O*^R1tDCWAqCiz7oGq4QUn3LHxv6@WI!=t#kqiPOpb8?A| zx(W0_x-&oI6g5q><49Q112|amQ7KX1VH5)U6Rk@3uR5zV~f z{oVB#`$=CbuZc4(wt{U9K+%PPNkZ(G|?=xPH&-RC4ueq)u_T>w@5?;7KM=0_lbO0 zaUM#4a5<~1qX8N1+829SJ-R~$w$=w3U6ZF=ahv)o`r2EuKHxU@M_+mR)pTz?ni~F9 zFmejr?)vullaW1;zusBxr3AaS+U0n?IWhY^^+8NKbkWLz^P10>0+E9Q(wLs9Ip)UF zOT-i!^+^=Y{@f+@qL4RW%t{1n|fVU0wdMPJQ)>@(5h% zQD~5CQ|jw!yL*r@8OYfKxctyZY4XGtK?O9eF#cDI*qwsi-Fd}7^|nK6Lu3-g-)~uI z%ecGaVPiWi-8B4(ny89*vYo9np4BplbV+^bd1Ji+uklFjgg^WhqbRxwCP}9Q4y^&O zkG?3oszi6%5E+l5u}a+S?M!Mr=jFE>H?`i~8b62BWwjV;Q>k7hDlvQ!U>$=L&? z#qi;q%X3&Mz->;RSG*}Ljn7sC39OmfuWT80EVWP|P-u#zk{bS5nM1*OUx1(1P$xQH zB3yb_F0Pyg`k@Ie`~&mU;<_%tEVaR*xq0j2CAkcylue1%C7diQPox%QG7VK_oYNK@ z@ANvmyM>yBono#}=BV~o+!lcQ@XjH>`S$XdE}N4Mmqss^)e;O0@pU!Q(9k(I$F&U1 z$PoMM4ch$j-=Br%q8ERxH?n&T(oBIP2S;~JvB5CeOA|p-`#bR9_AVUG-uxfly8j=S z;|8|--xsL;ZAYu4p>bb;vN$vLAvgCkyL9iLtZONo5-G6u$hC%p#cIcIhlkiuCX`Rd zX`#{Ml1uGJb5#eI#6@o|LjJFX!=s-mD@Xl@k5r%uQbB0WAq=kWx3BcXX7GmheltcJ z!W;JY^$Q8TIJ$FXJyPiK4-!OD-8ilaz`*{!@}rRK7@X}!pInliK2p75COa+eP*d}b zd~17de#`P}^WA}Lh6JGM5P3x0f%d4_!$(eswpUPtfsehC{@!$|S98uP5{O6HRIOf# z;}1wnFx)cS#zpghk2_?|4%czL;ltA1&7x5e0XvzH-w;&maJ~-@{!3XQ&<^_i^3NZo zBmd+jD@dSjrB0&z-}YSIx#)!=3RL|hLp(ZhdYlA57zmEOU*P5zlL|5*MNm7uF@8>ehCz}10Fl4$x0ne|ERJ9kQ)M?7i< zG>v`IVYXxC72-Es1bu!prWj3xF`n})Ve`||GTXO8j;o~_r{%xt-evEMcp~8+0+b2t z?TFdfva;8~;)2w7*22=kjqL1v{2>V$**hbC(75xdA+q=DI|Ss-U0qn3QcM;I^tx?t zC!x7uUMb)~77i4Cpq9REobz`zsD4i0zk8|$?M9VuyL`uIcR=-!QYub+6SfxKu;Am zA3uI(%r>fjgodouBLub1bGMI; zgDWgDK7PV(>|m*5s620ht80|CMy;&8t`51XR**c2pM4{v>Wc@VSmh}ygyyRYs-ONT zVeK<9t*^F$RCFmf?a^zgp12dT|8J@l$Fy%wZw@5Z1_WGCTF7205P!NZ=0HwKp<|LO z$Tnq zoV6TK`_?W?tY7?mO{{vNY1uEv4txt>`zQ(cMC+PYjy7n=&pNi zW1YEen+lkUIqer0UT?Ll%xUWlH&Okz#8BM(%bBHa5oeT80DqqImr#X22r2&~LJB>0 zr<;pGssQ=Mbb~8-Um|~N%Xu26pMwAd;Mk3TglJ0XS_`F>ec``l{Ghtyw(ecJo^3^W zD}7`rdXGf)t6ICd4&|;9qIUS3@_1+D4zN)d&6BND{55nN0I-YH4}J{p#dg!ZqiQjx zce*hr{yb3op8uC$Ufm)sC75l?;nRP#iC`B1m{Z_Quc-jZ!qe5R7%-5OaEumcJN_un zhYjE7bZ7h{`9!Is1XvA_`Oqi@63{?tWp1i|0ozGbm8dd0Y*i)2=K}C=o_A z*zZ(|SZ}Rj`uJ8b5o)CK6u2;fzJFI??}zBQQS?|)01X-h_4NV9@&^>fs!brXz4(53 zU{`G#TgXN?UF}Sn`LDa~Z#gw)OK4?SO;w!mQ}ya=I{Oo17p__OZCE9c={8hwASP#2 zO*lk&-{qvmuMl>x_Wi9oRaY^G7`)+Oko#_LV3L1rgQDZxKn1cd4mw+()D$1fy5oTy zbiRxieH&A?kikhj^#kVQ>Sb9sJnoN~w!4f%MTWfsGA2(1(M= zSr0^wME~_96sTB6MeR3R_R~*ZVf~wKGQmiPP1z>i+b`ul$1$ajaM7N>h+Yal(1w8m4M+pVHF=ha5dU=*|D}-t!(Mvsx-) zt<5D+2#{hY|A*~SH1{|w9zO`Q-xwxJ>DPcH86>bBBf4?lpya<$;Cw{H#N1Bvk|ovs zAT#VuMn=X&zL^3&L>|6`ecxmP9nvX>giN;{qhKP& zinFjY=Hw%~B>D?F2UUkJb&f;=_&0bNe(m8#IVmN_RNq4*)*79c&Y6u-NL>NT4*8F6 z+KhL9Lw@jc;Oie@f3&*t*#w=UNypd1D2e?Xy z8x`VObush*R@VhZ>D0D*o~c!!j}EiFZ6I~1rXrz7x7d2_W`7N2XZ;8Z%Wm|B+Re?@ z^Yuu3d7(LZ2$~3Ve-MWhZ*6i0Z{svKzYlu3I)QU_J%xWGczCn#xz^EEs5PMHjr*(> z`p;BBn)->b6p`4K2>6Jy+AfGt2^5JCyIp-{rtW!l$5|0bJ_JZGr=3YRqrQpgq=u(v zW@!_nxd}w5bp3QS3RMmyzi3DiUr;fFOvxwYGA~HECj1jxb&zJ{#2|v9+OMi7Bteok z(1#8I{$8bR8}0631LHztu)~qH(r)V^{-Fa>DJ8b(Cfh~ww?tzulbV`+1d`J9Ttl+M zB8RT7ES4 zH!UkZXeOX=k^5mGqu&x^@kb)2BbQZp|3z2ELpHP9qL{xarrB$-@X7rmP`GrtUXWYa zULd3;ZU-24jK3L-9T&+IBmb65;YSBZ12uBBY1W3Uji(4bRMC2?HPDLZL}Wo0%C~Uf z-wXel*2B_0q}-Bq*~^QH_!Kx@?HJ7LhB@4fZVOk~cmmTEby>X#`#<`XMu)T&K^FLo z{C0FybPocxV*|BY^eq49XayQhnY2RO_`!iO(A$rls^XmuIb$^a?J%yfsu`ZBQRbnh4cLX>f^Qch z6tRZaBK$O47X?z2^t!L@{{kqeD!!%Dcve$NLc!{f*#Zmh#@!Kto#V+htG;?7!_biE-0>p6b!={V= z${4QP*bKWQ-45%a%SYKZEAqdjaliP%#G*1!Bea5zVFFcAJF*_Wbq=SUgL>g@=fnTr zp!WL3W(H;+qs?!o^;9Lt{ZE@=yfvfijD~+}+-g~XX_sOkrkB)OSChXusr%aiRd|w& zAQA*tJW4TtJKaH( z$XRWZ-}zh2KL7SFQXaW2MTm=daE}bD;P=&yMJ%Vk3M1MSGKHD&kpf@8C)o6__{VHZ zjqsZyeb%-o`DReIe0{^xah$B_&FlLKA&9J>18PP4Pj3LLofN_SlUQzI z0j%BrJ-JwSf71=6j+$QMDn0xcEgD$l~ZveXK<4MJ<)CxQk9;{wxiZV)N$E z@@%o>RUKCfyarBmvrBa2Pkb}WkjnxLwZsCXDWHBGNau(E1rb=$ZgA|6^i3Q3R)RD& zaUo1#1uuSmu6FN0iWg*mGr03RiJqd?TzG`W$H!N7=5{NVA#E=YmjuQQqe@qpvj;wF zgmPz;m1)7|-WdZL#@q1;ctr%vQTXKC^bxi2pG#znt?3k%kP6cbi`-na5tHgObo>p%9j4eCp$?o9!77 zUlO;Arh@^yo@_yR%zIe%f-D-D7Eja(p2$d(XldoIdo|XM)E@2D9f^vGsjELxS1(C> z7*6se_juO!E>>Em!^P><6;GFyhkP2Ue9Mu4{p!cBdoYps_s^MtU3BD2i^u$Kar`w| zHtZ#`k>h4~Kg=PT8Dm%{AvSk9o|w^gZIzkfc~87uxd~@o+!h{# z&wliW4N#_}vemI;47qm%+`7br>Jah& zkdVT&QJs)B-=8jPNt1c?D)bjlKL&-6FKe2BdtI6$1EPPsZN$`%2>zo5%Bg!p{*qJD zSnT-_Rk{>p4CJ0KIfi>#ujW>=B3~qY^r4GGz^N8TwEkUmVL98dV8ig@MPg0|YWrYoJ6%35m5ijp%2)UBHrr;R5SO2sf1 z81y1OCTV1U=de9=OIfnL70y2tRH$dtf$kIhQK9zPx<Eh3yqScGJqi*QmERckZUaWj_Sg?vld2@0@W4wG@4=I=MWGQP z%N4;4Cwp(-HVqVfsl%OSjAx2vxI)mkj2Spsohw#z>@$~pmz?DCq}8amZ#Y2fSq4Nt z1Z{=P^W1tIQGb9_f1oAaI4UqGXsp`&Eh&$Mq=p8uNtNcOGKmnaS7ppZ|34QgCoGkTtcOAGqrNylbiYI&tB#>zW z>N}r8Jia`keM0Q9$v9;_Ge|a`cY3_XUu`q_lT5y5ePhk;1*=(QX>yj5Q_Q=Okurr% z6l!IU zNjVIS$D7b#%3aB4Jc@pkdQDN$@JqxtNl)0 zY|F0psnY4SX7dAIp2Fs5^Ks1s*x5y)01i z7$Vk2R;9bX>jtwL$Mpp? zq~_FWovhm`#R#x|4f4bAilpB8^j!}w>pu3>$0_!dZJo!?{GCgN>o*t; zCUH9gJO2Iw^sUeClr1(-J~woa7HAu`qLa~Be7dNZtL)lvcwYxa0X5%)Cjr>!dS+&f zAYdg7I8B{bSm8J+*_28_-<4PkI^*C_Dgw7*t^T>M(gfzuS1=<*{@Gz3`4L*n8?8^i zW62edP7ix^EE=+>+EzDQG+fZZFV<6%G2c~Eb?}tBX;o$|pdOpRxWnDE9~T}8>Gd`Y zO{XewvtVqKjxmM?%FijEo;Pp!orMCw`6HP%TD+^}A@lrH1X&~HTWj5SKiW#ih?s2| zgX$6H4ApG7LmxvSjpR_wFc0(4Ii2l+os;%DwQ4mOnxFyIs?(uuy3vy*_Z0GNZS>89IMAp%;px!1#=H zWqE~Gnbb(P?md^TfJ>(pZ>(j-(c)kf@O(U9aBze!jwFHq33DOgIZ<0B?cy{(dD`H( zQy_ecP&G3#SB*o@Gd{I_d~{y8w#QT+jZZ95|Lqn|T6b7f|U`2W2_e55!jI0e%8nBJ%aksM8zRQYR%NNqECU@-7hM-3;d(Mbd?Xr%JS3 z{R`upJjpS85EQ1IIQl;o8qby>O1(G9Z<(KGPd|^B*D6cyiS#^w`N!ZTyDSRqKXFFx zG0r@^sz2Lfo!?w=7VICuLsp5cPSOp_G7APyT2)2`ztu7uUUyf-+GKY`d;`yl=e68= zAyX^~mqoymh;cF9LEne~G#{MbFFsgl_Y^FFeO@AMcijKu> ztyLCmRugE>a!5?`#?MxiiCz>N$W(4U9{p`rS;C2u@J|?e0>Y55Z4ykHicH-MtB4ow zBOXx^+#td~f?j`ax?7ZAOyo_wG#UPII<>E1&AUJHIxDBMAB^O$8IqWA=bs`!T-Soo ze!z3B(p7?W+Ce(L+|w9Z7v62Y`vUXjKXDi3Z`@@OXD=PlCAdIQdHb~SHL`uh$;s)} zYLV@1y&E&+^9RFnC`Iv=weZK*2b}!z!iC&!3IT_JjD*`y`?yG+8_7%-$-Hp23UP`x{Vli`msfEN*jag# zlF=WQTD%4mIXW5#(*%i%CU^Ezcq|Fc%{O3TlWU##=j2*jffJNf5NuD1kk_g)8(#Li z8`++K62$g(_G;VG3p|Sv_P8{xgVr?a#oZxca`>!#aB!Fqi$i*_oB=05kAOwb)(Vf%%Yed3+|Nw zv%*D2jEaZTg;PLd0fdlMr#C_0yAS-?-e-g6bE2T2$R~4yVKLB~n&9Bz8*4Ngi})7q zwaJa|e{A1rn)@c3aFdd_L0Q0Ps?*a|{ic2XA3vVo8}kE?2t2GX#m{&m5&?+DU<-reur;8*lLf^-rjbg9E3_k{2K78KV~vNY?kh>eA)} zv)DURhDfQXe#NtB_YMr4B5DO=Q5~Ev41E16{wTLY84(uJk3miKa9_U^!qF)pi0_*dg5#dM*h6x$Zi z);7#;*o{6_TH<<=X4n(?zDDcndbccz!w_+MdmF5$hbcQBF?v8?ptw0<(fIaLQBnPm zaaCWxiap96^?mM~a+maxRk%odx<(m!gWl%lXvvdy<^6kdA;(01{a?m&XC#AZ zd>MG#3^1}H=|8)Nw5uC+;}o=OzvwHsmGCXe%F41Ezbd1|a%{~*E79UMUHi?lzBynd zm38q9ElWCiB6BBmOZv7>7Yi58|E{}6izjOHqUa&GztR}k&ws#^m9RRYffK^-*o8a1 z#l$IO-xxx&N#{Oaf*Os-BW(JMo`nU&cW_W1=<}H|3V1E2B2rVyV`3+J;s777KnR5)A+gIk(Pr zpxITK92!3%Wg*i>DBa`{UyTa)dN((bTQ9#T?3hDbdsC+;O}m+U|7(QZQoq zZ(Eb)=>=S@4y}o@c%PxQ#eXIEGq6H2C@Yvlf>7jvWq82j4?*TEh;`_Ds`F3$4;pLQ z?{ya}>Woy#uuQon7jTR|SZK6NvBbz94{gtk$0(3@GDKBeKfbzqpuXvTFmFVDF&esv zp_l!jUpDO=Dv;Um(T6|II2dq@LX=oGb!2tW&*R&j8xAXWFYE-;UXWw%IST^!GEKQ! zsabDF1IXEV_uUdzMBPAaCm9np!XmYY?o7WsGi=>cJO=_IqS)kkMJR7cwJy?M!Fhr96BDJo}LmNtGQ4yvGx4J{K`LIL3 zso!h4JXUZZ!okQm(s=(n#rFDK)o5p;kea0YJY!(xXZGnY2VrN)v}3|FyoA3v`q-sOxL!OmrCvi5$?#MvM4W}!Y$QGOdRVzkNq3uSU? z0F;r;o+*u>YK*(-DPE4h??9gYNb4*AlkPDvszj~NI5gqeQ84vy?!N5-FtNG=^1`sn zp+9db>@#Xe4B#)pb0M!KLBNllbk35~I@+3%X;SzLi2|^*?t4L2^cvd{E$Vm4#q<~_ z<2ylg-}s`I&E^Szh-6B3U#*EHfWVJGgOnPq7pwKYy~y4*A(gXN_{LH(H8n>JLXr2$ zMPn|!-y@QgZ2n?7aBkApjR4PC`0Djfbm@I|h(Lg(!PNFQZn)ZB%^V7lGJ*#!e8F1r z*HsC%eMjTYub z|3aYiROmL9uM6C1kElMzRZkgTo;rfRyxSkBly$)}%of{36rsw_X0WF?A%HLol^caj<=n&5k z!J+f}HSPIX=J(>?H!e``hb{k-Cp$b-Jm2I-(rS`%ap7@E$ORQytN$%d(m@dtc*e>ZkQfv{2Q7Y`?OOk9k_ z)Nd05WqezoBFkV7$JHgz3*Xq<51e~yPvmhRMsY@e&e8tbLO8?Y*J!|d_Nn9rm9Mh@mwI2Zvs}$edvW`}ISg0?_`i#=FG5mIFKiPP zo1ZVTZ+)7U^ZK8+*Zone^8wBfCNbST+QX&v_<~}tJ_HYmm*XPW)@-a)-R-7LMd-@} zq3@QjnM3ivKeNCb*SBQzmdoJ0UYJ$n`iR!?`sD4Qb6>sX!7HVDr&<`t=XS0-W-18x`sFK zqTl(~W5jmZ&~Q@Y`ifR>^`2oiH+u;OiFUFmHNBd<|GY?A4teDB-lrt^!T*cUNdAJE zjOFjc1wH8-||!`2QT_1}12-!aFg1NZL!B!w}#>$7nnFBA~?IllVFqxHPaRXCmAO|79P1y3NKkb+rFikj_&%c z{=)h#nj3}=>Er?fA_9W@^pfYPLTgKye3}3OVDD}CrUukYO*Tt~A@GQZoEO6?lF|hv z2n0!jwjzb*3=no=^m$%S;-MH^7*N;oJYB#tGQJUAoXq_w6NE{+JnhVCTmPM`>Liek zT+YVkXY9L;d#F|t374~5^I6N`~D@ta_q1eK{QP{NR zD+^^*4_p0ruAO679nU|=4(`NUR&O;>E+qcT_|1>$iedpv8{L>R6%2QI4l%QabE>X?&>;3a;tHP4$o4HDO;?HyX$Er-wcnmQ|G42O}h7`)yyY>A>=gw?& zk3F74XFJK4T9*x7jcL5D=$Bn}(f)KG+b?e_cU=>3;60wTP5+d zmOK_l9?3aS4pGge!(fevs-(LW@a-Hgudjs0j@-miA7PCBHRY00CR4m`JuLMBKP z->IPk%Z>tv7$^g_x3>%3OyK9qr^B}?g~z+!SjPCEz*5*mrM$vBklBod*}W*l`zJZy zNv}ko0lse0!iPoFkqXv#K})i~ln37^?TqZ9!xF2?XCDh9)L;tda;Ty3&NmrG+xhvw zF7a{4LXOxYy2DeP{xmF3gZTrpAV;u;cHSHuD>fMO{w+cR64zUNE_sy?CE-<6*7)(_ zAjuX6h)9Yk|B%KL#)KYjh{c-u`3mJ)2hX1HhH|iZO6wF^-gfg@j3x!W+W%IqXO%O$ z{$FhPNMc(Nzz&(nOVeImT`lq!P0HEZb6{jml1-cvkl@&>@x%HqXNtixo3(1RY#=xy z3JXa6+T+)keS}CM%sb29I>^5pNxY`%yc(#DH)%`GHfuTY5RnyzZ@XXeUf-(SNFE)z ziDWR39wGHj0G#rPrEO<1A6^008RfsR6CD&p+JTKyEhVNkUbtFT-`Bp!gIH_Tz#D8H z2+OC4_!}}6F3yz!hj>A`1_)$)f`sVAk@e!^I7CO6gSnTUo@=OZRYU`I@P3OqGAa{q zOZnn6+!K`2G-yh5UIs^S?ylY@7x0XO24S5dzODlD zQh~Jf%}qQKoOWOrG{Sc|E}u%A$auN_#s^8rNbXx1MBpHssC-tzsYwzsBor4HUyOn0 z8RC2NLn`>fQS!+Fiwtm6b@G1L>Tj~8IqDZ!0qF-*K4-PQu`yn6pXywYadgB^N$i7# zcSt7lE>6HNq}~Bmp8kze&LelID*;({_yMA9u71%yS)4t+|064=R;KsT-oC;)1Y14s zaJ|S@KIPz2+*8W=U~!GYgLUnfD&P`l+B8TLVPCK>J9l%-!Ddbu^!!*^@i7m^BOi@+ zzOzB19@6S$(T=a;y+jAJvMu|+lnOA9bbqiWN1)p73XbPvjlh_foJ;>Y-BMM6q#+y?1!@LU5KgD3CREMQN(l_ty_5BhB@A zNGIn$`eAXoysKeCmV9g*Nu^w+eG5pg&$xZw8VZ?$*S z2o!fCN6pSCUy3<6IMi&iPk(rS6*k$)6V_!?_fa81@wt9QvbJptPWEVT(-oR|a-3|G zjV^z(XX<0{iGVQ4h{~CZRm~GeiXIE$dA-hwD)y_uA6sB5TUD?j<5In1HHY=rHwsf*)muMtv;}q z3iTO1lwJWc^hy_zv(2VhkN*G;)X$ENAGWv6O{WKrBB>ny-(ChQ>*x%2#8SOqxITx> zi@t|tRxizwiK1+}pY!o-fo^Qa&aYRtY9VH1mq(H-(Ud%NKEL0LJ| z?>1?;C7j*QC!>4wzT;;-1dv!7vp11~L4b|-MtvWOzKB#N1J zx1d|y7@~Ec^Xo3xkqNfEuP4&O-{z5I1DcxYi>*8g#I{O(^)W!>*&<#C!W)b$rGFT* zC9A)yO#vboaKLWQPH}C>L?S|t><&3Pe;}-ZU^MnUB5+!lZ1grD!z6rjP>PM#gJnG7++3wa_w7mY9D=PN^U=Op)8} zW^bN4z%NSVaY<#+$3`%1G}&;~y=y+LmPTxLb)xTPc4G9n$%<(L4$}*Up;1w$zay|7 zMRtg0FfCWt%s@2K)IL=S**i>s5MJ_nXm+{rC*k^YS@iJ!jKT#d&z`9*eCF8%?Gv6G z77ia-yxYe?!~|v?Hd?pjJoQ%jZ`$zX3y$g6?EYVDPeC(YnZkgJd6(ah`8g`!h$Cs*|!~=8rKjAb6BaJQYYS)p$%_1J=tOV-_W}{ zT3ztiRb6H^xj4xqyBdp#k!26!)w)*i02}pU{=KDUp>Jh_TcaG6ek-wauz$p$ylvWM zlET*1XOK&&pEj zJ-aOC5565_HVpZ%PGDU%U7fT@}x%-@isyId~|AXVN*1 zFML75NbD(FT}zOHS1?@pUbbk}0d$NH@!~@{!PWl9{obAs^EpaiAM)9*w>P{v0DPMC z&C(eGJ;Ha@dJQPxsW!pj3w4wzsQ=^_nfEJ9lx|zzj@=;9zep;09m324IAF1=q;@$LJ`ZB-E$9R44G;7t-L^;(1;rH8eIQ;YX(4lg1xj|+ zTOx5G8cv+7n0H~DUlm;!n=osA(AvJRw6brq0a}WP78^=ry()vi&H4I8i@zZT`dm%_@0H*e$v0unM|FQ8LBag=(?T=h4VSQ7{N` z$3-s#9y9scq?>?{(EfLX$r~Ig7^(K`z6f-nj?3wk0~dt>S?G@QFFPzVi$)?L_nK-^o7EJ>uT)ZF^GQi4t@>8T3 zV8uUws@OuKz;KgoKQz^4Mrr}-;AgOpK7QlrPiNL3>@@CLeoM5>Wr>$V3YlW>WMcCEQcKcDS;u|Qq+J^&=+`LG5H zO#b$Q3kiOt)QOppMYG$3%k;Q!Q$HplK?&$-l6e%?tN8U#?}Ib2@riqRbOqtC@$n^F zTbdu!@bSD~23_BI9CzoEAhCUiGXU)b28|V{6!TG2F1?6UV@W=poOf$Fy_a`BT#zxy z^wFK^ueKVE2%taq6!O44Y~qV8-m_}ZGeE2^AoCo+iW`lm*_tS%eH9w&d9|T#6c^v9 zQC%;_5KZ4pFbXgN z6B_)r*`i1srP zsr6Kv&K#4`-!sD{l8?eEvaX%rx01q`Luz9Fn-lv1{MeV6TN9nwV7;;buD505j)Hp) zW^vZq4V8Bcq_|)bs54F`;`(&Pr7&XRU&+ZYue`!@wdgmZpBIdW+5-6_jCvAigF60` zjdeWJ5$kGjE4+w{cz1pA`&fo*6{tLZT;=U@Yz;hMJ*A^RI+8rE$5Kibd-LJbFPyHc zkcbe_?}W36QfJe0><)h9RO14uk)?a?y-#;NdvDXaPk2Dd71kSq4EP67Sfb{XNDIjW z02rv6*V7xNZ}vf&j&(e-7G<|dLHi4}Exh67;pu~tdi(Rk#>>6;vnLy!Jw151tD+Tc zLmffj?%$sLjA449d#qbZ529oHrJX0Xd+6g4+TwM6G_u~uZ8;jFN#*mL$3W`y|CGZn z@BAZ&&35_F^N$wRqOI!rpSe1hU!-U)utv~Im=Mg6-$#J#OdJV|?$tH65qy|s?8VVpsHnb3=D>xNdRVn ziWVs70foDNyD#7n^T)q2qK0KbG8yGc>KCW^CWmYW+~yhL5maBx$w9Wpeq{62KT7cs z(1^xgl;VJmG`Q-Y+?&shM>--PChnaPoW#oMPaPQSHa6r8b8 z@(9JE-LVjkey8Ndrm4OEw|tgji&zYVbtNy9@hPxG*pmZ%HEi76p4NGL=EDPJm4psZ zpwzbPd@9TG4*N$Q?amN$I%LJ-k(F&V)&>%|KMDfswK{w2QN|?Y&@e|wjZ<3lsVDKn ze`v{Ce6$7e2frhzc$6ZPsRa zBB=W1$j>t@WWv6~JNk}j(>PSt=DT<8t{6OtvzojBXvIkRJ!mwgP4h|0^^zF%h}D3PxItAE zL3DPK5K}>sc}Ct!siTQ41c@Q^710s=?haETMg?vzI@hRa1{E$KM4K{X&A^2h$S$ph z_jB(*&pXk~_kyeWTTIKc>8shmB_kgAoZ`I_g;a+#m&x%e?HUK0_!EOM*Vb9|Otxom zbP)E}fXzhGxVximzgzBoru&gBS|vVf8Aq{dz5?ce0)*nm*9^Yba?tFs6vPJE7rBaO zPo70T2$OH4c9FF^D?>fJ&oHQ0v`1w8xZXkmw?_a-GU4ABD8=7-41^iO(izfI1BNvy zea}k>{$>8+tt@!~X%UG}w0u^N5*m@aX&sA#Y6~e$q4_3AED!n&_YwM9{dzijO_xk( zvL9#`v<79&gHfx^2NT&&<6DjM(jTA4TwZZ(Dxiwc}irQ;Xr zt(hVto)pBB49KA1D)4AwI*LF&3S9zBxIp&AL970O)ug)=4*1U3K*neh{*!KJ8u6FwM6{37Q4)T4B15fKHY>7WAfI!$D<((q> z|CN2`f9Ei_jI02HQOHwFv+gz@^fqkVPB19B7tb)Sa{1mC60(omW1X>>JF7k6teSQ+ z(6y$*M)m*l(2DP}yXRh})`4W%Tla$bWhs?}tLv458Mr}$*+=)4v!Xe3pm0fHJIQ{tBQ-m|EDDegql$^Nzfz0 zPCT;hxaBB-FD0&m5BYyruHN$I{lfB+MSiuMvODR~NUQDLzEM4%cWJlJN+3{f19N(R zN*vflsUewS-quxCPIhoY1xIUqMnBOf8T+aiWz)4!&~&Z@t`GqH2aWxI%WXq(jZ{oICX{n=R~^)uZY$|J=j5* zR3W#qMt#-aIbX3&r#O^BD-Y-<0f?v~B^j^55tHnBQuUtw%dYn6w@N>gh-ylnI`0h_ zQd#Ahz7aHW-g%wHAN|}i-Z>F_)xklN`VQvY{tER|*;lzgs970fwR*Z?YK8h_t!OMR zx(y&o8t|Fb?o=Xy!0QwRBoG`+W3cYAlu^*YtO7svw2Iq_MlaYJi3wEzO;Q%5L6<&c z`eH|_?qH$1tNu#X>eEmuq7GRP zAMCQxwRu>0c!h?8`CK&<hFQ)uj0G3KWunk2yOrd?#y}M zc^|yjQ%EUAk?Y;T)5JcIm z7)`ua^3b<6nJw6NISqQ1ub|qkvr$DGkVp*J8;%;0qQWHScSC5Dkb@5|PGMgtnt23b z!-6Q-dZk#J%e8eBw`eDNvzHA-EE9r5$$Ye|tdVr#2}iT}8K*<)OVMBKH%#b;sx%TW z7lcIaw+v0ZUW8-p^10tcNaKOLF()s_?zbZ%42;PWa3QnZ3koAs)6*_fefHfBP0!1H zmq&HtfUp@xqcO2Hv<5EIFkdlIo}TczM%$i}=vohzcTTw&hOD~raOekjDH3xM9dF&g z3XiyYx$=hMGqlmSKrvZL#vO#Dr!OaI^~CBH-r@BCGXC>7aPTyIe%WVCSe7tW4}=bH zug)&{xPS=cq!CVg#u^QQy5|{CfY?jnKje9TzS<2$!d99`>k0OI>-A>myPf;9nG3Po z9-@1XM&(kSUN@q#gDHRy;AJ7T-lr@qnS6zG$dDkcR+uzEnlFigrH%m**`fy4wMt4i z_uQx`dwO?!v->1^cbwcd13S*zOs z;rjY-yFVi|xOkHhYLM)9Sw~0i%S9KqSjyzioM3tmo7)$y*OgyUkl9Cl#C*CJx#CeS zmt?^4M#7?;z!ZF4G2WrhAV*wDA5LRgIb*`~>)z>y&f-Uvi^+rXHRspAn?OE?NSK1) z8v;4^7xG0!v1U)54K^KJqN%FJ%qc{W6wX2f<8@V_Laaif_f&h@okt80Jta!bLj^OE z0!T$QL(iShXCy)fdY!RjZp6D^&!wN@od=NO%V!kt7y9(eVRklSu++U=hnvGmNavtj z&3kz)z^J_W9!COM|1l2VavFSgKPrHURev~*1TAp}(YzCXh2k;% zG2Z5^wOU-I$X1lk_NAwFoF3M_tz1u#gY*Bo?^5whuylO;tW=QPd2Ia7vO-!$28Wzn z!p4U6b4sTNwBu)oU89as+?107e|rBIU8@0G36b_!Rc7@Hb3?bhlK6p*53_6oNjLjC zVW{Cv`(-Yuo749gO|R=hPEdlt*Zz#j0K&YOpSNzJF<-v){k%fs_mrh~#+KNf*7w;6 zXKPsoe@{B2*!RUvTHL*0j{BApf6^W&@;v#ToO}rq2CLvv1UHoS?f3>;2*0>-LbDx? zmO76oecg#$*35sDymO$WY^7A%qA=CUvi7hJS3(DKmeA~gd5mm@4}&Ueta zt>mpO)$46I?7T2s^(Nqa2j(PIld=$Ak`-n*) zB$+>)#=KzLb&`w{Z5E|{aeB5jn#Pr{c)HC{m2|m)<^2%Ei9sqkp7|c`=g*%_)_#6| zj)${pPP^hU+r4#i+ps@)TowAl>rZg6&KfzFlJy5H3HEY6rXLk4}$ zjQe|dUqIel%+>VvR^&d|l)YzG*`(%0K-usNKzH(MnmC>iql$#i^?w-fWzWd9!z zHC<MC>%`&hMd3xo>^`a|7v$esze~;v_+DW1VaT)8 z(3VCl!|Y5~AKO(6&S09!07~wS+-WWCueB1_h06c6-a}kHf;n4uhQFHBqz6W z24SjgIQl?JVfS+HNdb}O17S!+JN?1KKF82vR&5cG2Cy1Cy%MIx0?`EMM}fQ0)Z%Sh zv0DaN^$YTuedjFF!dUkS0-)V6U9jD)8LGBkifr+^dySgP21=|a`tjr6oh*}i)ESm> zB_=Sw!Q4}c0r$yeIrW@8^5ld5GGpzs|=?M~CqL5tSYWD~4sonM@aHUev7o zx5hCbn)$u=Ym3t0U&dP!y2l{e8co1IK+@>>oXloPEMbdvBTYt*s?VAqJ8}s=a&4qP z|GB~St>LZ^Vj6$MY|^$+l@Hf&Z0lNy9vjDx1=XWJ4iwI;Wq2ww_|HF` z-AtnQLuy)lq=fWE5vM%kZdx;`Pq}II7vZEd8C_bk?|sU8N7J!KNRvIk?H77Dm`Y6T z8d#w!B$#mY?uYS;bDoeYRDF2Z?`c0FAtn`@FjQ_*6IQw=MkYaD zx%6d5OP$45K3~$eKd_jz%%m)Q)Va#COu~aq-S&S7T~lmPV4GEtc0Bm?+_&i-t%khX zdU{wpc+%y^48RVcOwF6}RocE{j+I7|qRMVXqIH?sw`0WsbZB^6OX#5faOQ#IeMTMD zT-3UD-k1{Oaa~Uu!i3h%osRi=5^E8LaWPX7KlXuwyA=~r2uxj})nP>%3OlKh`WfyG9sLj*R@?E5T*6+PYnMXu+ zCT1QxL6xDMd%=^*;0}A53*-HQIl417vkFy=7D8L!yV2-lA}7wyhT*Rzh(DEt(MTSY zp?kp-KKirp4#QUE6hXIc7P>mVw^Yk^+Oj^f3#>X>rKZg=Q)#(KL|ONLvp1lbP*G9| zy-w`TY!^1dWu+-Cudjb$L&pE9t?fkVR{*xtV;jT^`_+J25??$9#d71_!=gA7Saa!q^tnnj!Jh-vJih7X3GkB^@bs>kig*4QTT4|FMHaeCO%_IrzT+hi z3EndYVm&w2M}~2_Aq`=6aCE-XQq6_c4TFn$6GNnSI{hi;-i9>GzX3t?tA9PnF0bFX?l{^ zrrCgUQisqfrioA+NVmUlK(@eG<#nHGVFS#UNk=5y#aWo&Mcxd35}v>hQ?vB2#Bvca zYDT2Xr7|ETE1E6shIeYLoU5-%5+Nmh1ivo_(JZv5d?wpISQ|l+_zxwnJ9057dF)az zOBzT&h#obifcY}(_9W>q=eEY*fd+l-pE8^Jzdg<#yiuS~u(#>IIxbWe(b1Lf6ElO8m z>n(OCD7_DAEK%Rk{HJ6xhmKD;exLM3vL%1RA(q=jMWHA)84X$}wduK2y0gpb%g1(3 znNzFRV3}2^ezP#_hQ)c0C}k;!#VENC`rq6K`3fH8Ym6JGC2sQ;+9dx-riPb5Vw5Q0 zWRW`nK2$d%t7LPQBn2Sw(*80xUG?$#cIhCTq^>i{YE8vkjg z;=!Z?S^&DLzh8}?Q~tH9V1Z_o-vYHgW#_%NV5PUY6LvBy+)Hu|myXb53$f8!eZiA4 zJ7fPwWnEET;x{tlqh&kJx6gUDW`|*~5iL>*rLI~Mv<+NB5+N}AllwF36N9tmDm%`j za=or7kr>MURgf!DiN3rOLu3`SM`IW_X^Vdx{?~e6$PG7T`h@jAaw~qf3!xMLQq#5| zy2O`HkeOvw1Y_JKI=x0}t80y<_Lz1I^CA_nmjY#XNxUDdNo~<7UI+Vnf&tzkuXBqr zEmwt{3Ij|J-nzCt_dvfD3h2&4Pe+xRUQQBT6pb#j%|yJHFPZR#9`BaTuj>rUzZfGJ zzST@ieOAE6t*x7Wr?C`>uG%_xQ3%sv=ufi=SHrUDRYPboWeE!2<;$NYLQ!?i$?PA-KB*PjDv?EO>BtcXto&u0`F+_pNo$IeY&A zo(Br3nsd~U-dlTH?C)3yeTFd=hex*!-r_kYxku!sU%%NzE7UgKM4n)7m>_?qJ{Kfe z`2(?h&w=m`hnAAf{n|Y2q-UlJd4jv`GfYVeGo+_gojdVbiM5*2RQ-;4{zjppe$lVlV=dQ>a*LCxe za{fvPf-%6^7Mn64Ilv1?CLlC-G{LV>wyvHTC1ODPH{R0kBL#KTd<9X+RgZ}@Wg>qr zja9yrM`pOzZVwzA=bVk$gNdkCnA%3_6{&L*nyK@__5%)Q;g$Z6!X$#?DB617_)3GH zT0vLJgC6QYUjQYTGFxcD;si+vXODnQTN4D|=N`d~i#>vXM1tB0+xb1$tOW)?J+Ro) zBfVJD1H4%B{RA_iR6VF-QX!}r$r;aAKwUdynoTgH{vTM;khOCA!*05-4^Aq#3ymbg z@oeWdOzf6;VNv1JyuSLzh5LW}0l6^%(_@2)6^69H(hDgzC<#_*Qska*7Excf(Q1`< zT3m9!54M8-=XkvRB;P=2E?nH%z&ucKZ%{nNa#7Bs%vL4whDVAHF!NCqpAjThwLSl$ ztzJiWuXT^|YvoOG-muKa8Mr+uYMp{|#M^+BcA<1<5}_ zD}OfkxTch}$9mn-PIZ6(In1x^gx>I+VBYsu3@%2!rhxRgB%nVCkt3pAFx92+Gp&$g{$ODU>mayPfo{6<2lYSPN1|o_?jvCnH%3HZ!Yf+c~Qu~9+ z23VMGQkKoJ7=Rse{>|U=_80N<|2nT+G?VC2V#zZheVhlI&Ks%h_;d0>QGSAxjwk0s zV*J14q2e=Twiw&@?jR3ojH6%oM=~j(Xhr3yU(i1P#$Nl<#d5~AaFJQps63a8LXB?mQ}Lx;t4)#0e(1iWDf15PmqO@_l46apEf14kCfskj%8VO? z3~x9`e*djnEA5f&&KY4{zzXP?rJDS=E$-=ioq9MS`Rl6us?a95DcV$be|gyZuV;!l z`D_5z=N-kMr8ihrVL&QiB5#8sI2?ItGjTd3kO2BybFyKXSLYf)os6btJ9+$JD`dHLbWfJxl5Qw?Jm-mTsoe4Ho$y~+ z%n6}Y20d7KfueYVMwz35(Ac1q_Q2x&CgLkM3#a$oo}|Yym&V9S>F^k#YGFB>ATsCR zCAD~$xDaY!K>xF+;5hi{j8&dUt8P?}i@jP(7Tv^NFv%9pgplri`|h7kPpxkGXdr_P z{&-l;-|u_tibUzWZ;xI^$)#Yd6w~gXlIMwimYEecQNX_SK5<-Jr|)ZzEa%^GU@& za?9ilKO3)huK*qKnkmp~ULtb=7#;wzh_KSr9&G`r`Vfn$6GkNt!Txj6U6jlD@RE0| z#Tm1^bMqRhx2Za+gf^NwN@tUre;zGs6h03V_b9b^MFdqZ=aXE{v2tV^Yqb)%mX`%(@X+tqYe#>1n~CSeWb(e_YWwVh?yz!pUo+@Q(f>(Kv(t^!iHm(ZSI?{K)oAl zjx?(>RIWyZv&s*7%;?ZF8jyek3^U4QB|*uBv-`UwQ!9}b z?Fsh+*M)~cNC0VnKkseqS$G6D1u(sjN%YCYInw@8`z$mU8hg>=#Aot8&?&`+_Nm8k z?ESKcq$+vViwPqd){;e-JDa5&{Wj!*T-*t>-?encHJfE8hC|Wr8CyDAYN?!i_-e<5 z7jhWBC$M@k@*bY6*cUt`$Cw+H8hUxoo3O_r;!vY6-~$qGwB{aiI#LpUUT$ZkZS2Y^ zFEY<13EMr&Ig$F@Q>0l*McFaAwqtzR2IKbaOb^E?OBv7q`D~VOa?!qI+582{>z&IW zoljX}t!2H>GP7P_kd(oL>MDa4Ja8CY<1i4o!|*D9SG_2!sE8RB`_Z}?iW8s$nme_T z(ei`=TE`2sUgb0{ZM8oet*Ia{7856WT>j_3EOX_{9W@6mV~6d^*Wb~W+J4!VGLrZ- z9H!y{u?v>cK7HSGu*uFX^2FXF)PBwRwN_y#miTd(*ZK4 zJ4>i7FDk5Qsl$#r3ZRur+Y9*OF5F)k4Hc%5!#f{XIVGEiJ*t;%-t#3151UD)zz)G? z1IF&>7H^7@d3&?va=!QS{Nmm^aS?}*)YJ2}yM;g2x~m3$`Ur#2>_ zmM1o>{yjBQM%ep3RKN0+#>KL@7OM=<(2RGzd?)!TL=D_*D`IU(Y0F=Re_8VlL zlFu@_2_3arYKsIIUEh!|fUc(_y~lIIKHi-D;@F(77w6Z*ggY;_UYE;{b}|eF=){0e zA^gpLG#>v~Q$0NrpbZ9qe*mPU@9e=0hO`eJ6yQGJxoIos0ZR0flW0K6v;5j8chroE zH0N-BX(UMBi1UY3EaS@<+x&Vke+_X;nsrzvC>GXH-~$_122S(11GmoR&pA9r6*V2) z*A{8AVy8mfg*vqksv)@`9t?CD>1}q-y|-X?6V7lVN4ixeXnjO(p z!TRe9*B|zJO0pIiP-F|5ONH))CC#bzv{7+FB7q|d>PXR8D_CV9Hg@9p(4mN$C5%J| z_V}Y06xA;DqJw0C62P}{eHF(~5Ora2n)5O;cj7OR>3;kMaPXNjD55cY0W1akCfz)Yao>Pl>@Ro`>q{0E2? zjfjXyaX4GOZZF4pfQPqPY{~>63V`mNRUn`pI0*Qx>)S6>b^^3DpU0wjrlD1DzTI@_ z5{?68Bx=SnlK1a}#x#5%SJ6Kq&>q$q7a#1qcv40;$A_H}0b|k_9mFH=KiG4^vEH$- z9rl)hq!0MQbdbfg!s_ntwk}?UM&L3W{SFt$>1mV$3m;R^%JYCK9*_E@M@rt*Xj~`gTs1=9k%`WBKsisy=%#7_*6?!b5-{r~eY}UGfw^qI-EYpLQ}2*<Zq;tOVcf8)KG^;Mxd+E9{ z5pn(b%d&3%eI1|2RvvM%C|mC5yQsRSKzH94+AMYMO^<3&)?HY)pw3E%1Pk&KCGzFYoaFZCw)7s9b;$(Crm zF>SE>qa~e#5Hu{^-fz&ct>{2F{wyFoM0tX#q<_#&c5UXu=Z^Hz7vpDlr^;hinFYl! z1)6bbwmy*s3yW)gq;_pv6r#MOPe7f^E=&i5iLqElQ2@ezQfIHda5C{11*uhR)$X}G zt!&vb?2+$ND?9uRyiYTI>Hrjv zZb|%NJ;wxifMW7hpILzAl$n_sfZkTr+X$tr(L-~(-y=bZ=XF-JYtN4@5TnayergTX zhyJe^i~!JTSS3LGAWtCb9F8Ojux`xdMhvFQcfE>kvw9VcB6)s?)!Nm@y6fqx!xUL@ zRB>3!__y(s{)n;q9aqAD(sN_%Z9OJFez;Ht)h8yPua7S1ZG6+m1O96n5fM39rD5^? zmv64+AY(@k#}ua;_#?nhFg|uA>g9)?{MAE=f9)wO5{Qh7bUv5OhL83xd4ZaarY~on z#pWDJ3y9)btzd@c^TK+WVF?zvLH!v|(xs~$Xgdv{7l6}dt!z@aP>BsTXG|a$Y+%Ja zI+x$0qy?Csvui&MGk8?%f;>Q2CsElh0Jh2sb8ZF;t1ZA4S2AT=U`z?7@bw90#ErbglFTUG%_s>xuh(w( zQ2kJ{4a7Rs{B!(oJ_*%VVV?Nem#o}VB?z2~dw5a|t)22Hrp@NC55+@$#m30?x6E)}}% zSqzTUttND|e|M@O9W;|tiIt6O&xb?@23L@s`sFXziT_3V4}5lM?W{I9J+42Z zFbs!vO1?avJ8zynQXNoq;dvpqsDj=y7gF@A9nC5=yWx?M!N~M}cMC7#C_w!Q>_Fzn zpwR}Up48-n{3_iAWWfg$ZS(*XaKaT<(WusFaM*k;R=`6BQ#P9^vh?`(FeTS}`OAup z75dk%9p6&lqftMl+@w>qNErH5tCySYmB*_J{czbMApt?M$@z2u0Lv}9X#Hy*zZ4h{ zLBzO~t@-{qFXn)t%X);* z@Bn)Qqz-&%Ur3>M{;yvK|5M59p!NwyS_X3_^vK7j+T}=1tG>J?>jMz0UgT1K7yU1x z28{GI&jaIP=!d>XjuZ0GhM+}ll^F?zvsu?%%->U%DSpOCm2r()a#8xP3>EQ9c6~1d zBP&-J!Z6y*ZUc0=e4nhs_}`Dr`_p1%P<1Z6bADNjP8K_rWpkp-fz80LNFoULKt{ze z`C|qON*Hx#_eHJykYKK6Eul)iXTlu0=9AWym`q)(iiqhp}>#P$CzuivR$Ff4r;{y>kAXJjd_EQZ$!2iblFr7bh@gz&VsI-A6z>FFR?HJ zRt53;{sozSHk(0mq{qdj(;2bt2A1Zx0kc-7YtJFqr?c^9`QK#QfFHI!8A1bN{`yvZ zMlBWC3=9v!!nFK9MZsR$=5MFV%~Y?1fp6G-U-!?ITua%2+JPK@5E#581^^-Itf5Pr($%!R&r=M=}Q2FEN z#N}|0B!``rGhYG$$sxc376=cAl}y=G&aBr*0$UH?&eb6}xD@=U3XHpT7etJ8j--Lg z&$q-=-h{f)%X9Rc=oTb+P9^i0g~L01cxx5th^JG&z^uJ@>U~C3qOwI}aMUp=Xl{|L z!_uh^B_hIW#`D;~I|9w|-F$pH22&k&xbRSv0g6Ih>-iT!?apx|62q zg{>#xSq~xpNM_*_KdOWKG}8RcJEhF#k^(E5Eb>6DObR2 z!^QW3qtqK^yieizDes4(-Mv7<^2;}1ouKA;8Eh9QE=)`1z2Qjc$NjnTr~r9MQKP;a z5hb|c9;y_`bG|~Jg9+~I$|-D(bR5-v0h8c0WX8F?L#OL?a%l8}46eGHt;Ot}kh$v} z1?;J0Z_wp0Hs6q4)p6f$RsR`-9^i!uu_9=e4nb8JCScVrM$m^&1HYknOE$j5pQcAyPYOMlb@ z;S3m+0meXJ5KQY0C9u%}V1!gWZYLKAKfKY1S^fJ-vLsD{AGe|+qsczBY;{ER=b3S- z)i$hVRWD`8C+k1FvNrm+FK>a(HZ%swb>Aga>G0;I8N-k=M^eN%I8k4RA$eUn3CD{8 z{k{Bd`U6mQK16leWBfn?{wTnx-PuQ8lygzDlA@ zOZ>L$<+N)@^%#;Pfpg|XubY6f#{Ly$?@nKxF;CVHEM5u<&KOWPCJ{}e>PWqYA*&#vcpCt?+==ukju=B3Aaf-h2f+n&(;e1;YJ;hb>vm6eQCt3X|Kbs zL=)>Lp+*R#W))A)@&avKv`toRT6>`C3s^F{NJ1wc8u`hP^8)G}ol<=n&unUw%9%e) zen8R>Tg8UIE3nz8M$%)?>3>&=xvE$~i;oJ8K2u=ge8pZ}8rkYtMGSvKJRtWZCo6B= ztJdvv=nI`mx+%k!{-z@ZiplRw^7#~MZNn{l3BrxO38Bm3#tBvj4ed3w6!k0BFQ4upf$=w~ZK1m{B`7!g?yGwm(LM zOAuVwxc#)Yq@QN4vQ?t=jlR5BPS~r@S>ZW4e(^FZf?i991a74hqd1u46T17J;quDp zh0f5wfL(%>6Y`TzDxdVj_||!2_GazeUuKnB)hw+Tz1mW95$^Qu!uB$WNEKAdP5Rv& z_SFX_XAEZH0~+%laY!EtYNLdvU~ZOBC#(dVuZy+?%fyx^RlcS-k^`V(*%r5Vz0SMQ z`s+K<7-|=WCC4a2ViqLftJF+g5r9-^%a*s?IXn@t*P>Gg@s|gPM3LAG-!m z8;mN6+Nv!!wr5y1!F6sL_b=8tVTe55?aZ?sHJmPy9IsTzhN9(x4;#x@8xp}`=y@FY z;SorCyFX#SHtW#o)+Vu}F7ChEDaC?Y0&fxZI!tds#GitG_kze&OA`N2vXkfzt2 z{hdig^>jWMIYdiu@a=FZeYPdn<*Cj4QeD8mt{YwwmOxtc^xLZ@im*vxhCBPI?~sI+ zSJV5)vffD_krEf``$KjxRs98hKg*Wgm4ClUA6;=^2jkzK;%BN+1yZHiI&YQ1n-W)8A&HyJNxP6(WF* zP|ExrIh?L~o5C0>wVsICHPN%}^13_UweD^5K4%Q{MU) zF7zGZtr#=6^P$>`IDF-s?c4;LfD!eX9LsB#xQE}5SE5ns)=Y;q4R+0uGf{d^N^B|C ztH=i!B0{HGA^oy?(=82yPeb$q_w8&%N48{fDM z6lOG&H%+hrZ~%z8DIFax(XC&~3l8fW^T@#vWvw{P8RSI3cv4&dTM3-qV8b+8Uyc`L zeXJYxivHNlxRz;6{`1%@W>f7V1=6t32KeySJj{D9O9OW2A~u)Flu+(|RXB1Efjc1E=fUZX*$?E8`< zVv3f5z{303?T_F)gzDzX6t8r&9Y(Y>^3DF_4oj8TX&2U^lZr%;+o&^d|6J9F4{@Is zucW^2T^ZHmd2FWP4fU&19SK+o&Y1>KW()VgQ9p~(PaB@TDSyw2i9tdeyoMmfpZe>d zdE;7-xA5*K>lxyS^zeKCG>SdPqD9}d!6F)E?wo5M{H6P6V%`?z zvIiYCJ`}UjW!+$C)i#9=y0!j3DStaC@gnPVG(6%20p)Q+ zw3q1_g0R$)QB-g*Z_ASx?75_E9Ax-y@+XEXiQZ)AiOxq(_qA`329Zen;nb_x9m?%3zh+aX!BXt1T&u;O?_o&=zSbwxsm)ubwC30aN;Amh}uzdqIF*1 zRf`%88bSmMkn(e`L7vp|H>i*fVIM6xXHbv(e~u!5?klPGK*WG<9skQqB+y&EZ-3~i zgP6dNA-uOd{4_2CAkWSO<{}|a@;)^j9#3!5^i^)K!vwOVwjGpkM-xo=c`nh0`N*Ne z#~!hPC)PWb!Cqx;+SL4PF67=GpN()^wPpw~6n2SBAVjpL?bblbIg^1;_iF<_|gojGn5MB62)cOwN;Ykdv3 zbrPpZS5i=@0|n zdW&D_$orJYmYoM0I}js^3A~kdkF#3f=8EHAwuBJ1f)jFh1z%HeLHb21o^rLfAw+@V1AT;p~5}nduz0u zbZ7bNWLeN%*zh(6RXT0 z`6+JBD5%UVKhSFT^-NH3qe9i`^&67q_UxU-!UrID0BMr;I>KtqNlyDx2~yusKIwvL zGZ7_e{LsE^R*jAGPPDK{K1z;I@+Uot*P!bzacZyj0WQDHvPj_s(vQGlSp2uqX+LIu zG9suz9kuzy_t`dKzfB+5V~qMp-2$x?t?jT;{{R;vVN8b-5vf(&)Q-LNVLuvk4S%aS z$C)^f-|9@nk^^;T37HzG*&aBCXQkI_qCv33;zPI`P zob8t|P^=c^dVXC*B7!^6bF{aFpVMYLEV`30_4Q~lXVx>B_bn4hwiU}8QSzrHk*2-z z_o6A8&K|b_Evobpw?BooFc1|@bE(Weab(1$M+nH zM)dqhGgTN%*pl4*5IIB29kI1#)cy?{z@9S^M9(7VGg8#&d4KqjDTI+g#cjXZgabeE zAltRWWBCuvH@L`y&0{^~yHDUi%mZ9qi!=0(6iS~@8k6C^Pq9k(y+IDUVv>G2-W}hN zt)dm09|K;#^d}Y~{DLlBCiB5EX)U+ID+`!WvdUB)eqTH4Du`yJEklLpVp{AXk+~C} z{p`Ab_kIO;w7iz%>-N<6gPRbPtu$a#T@FS-ql$B4CRmOwwDeqM%s9UEQaj{v{5?oy z!*lUY-)}PZt*w5KCnd>MmPVI6+|_lHk48I~d~W86tdX%?gnrS%k)H?S)JS1)eZhk_ zTzA>5a`&20En91}YbK=S4;aUc(Jyp7iGR#o24wyci0I+leYMiEZ{`V}i0p`kkpkPR zH;pvJu4VI%}tO2Db78@>YiESw_50+%=iSN(7fi9_MIy?{1hs!#_dOB^?I}T z+W$~`eDYgq4zD32zrQPYbN5&t<~5=Dd9ifXU^ob8r{~PFT{p7x0IMAi{zD!Bg1@|c z0j2mkvXJ{VrVo}-O^@6XD@cHhC*}03HgD0q7xygt*vh!A>i`d!V1Y~9_ZyCJ!(n3W zeS)<()#jLV+|>bHAU`g^R-m%;hm_86IUHO`yx{sxGNaVt42FNwYQ>#d*UR&1^}EEE z^u5+wOaSgSva|jS&U!OHKkkEKt+b|jbhP_|p{?`+UsiZRivdI#Wi`2h;VaZ#M~3DU zj@QGdeSPC8B0L$Wv2uURT*5997TK1oQ{f$P6E3Yei2dP&zGP8wpI33MxG66@)l}c~ zjDBOOL6~z=dLq=gi9uZtM%fv2Q#?&OT_?Z{WIiQN;8_!G+_4ARcXwy?I6`wuyb@|p z_47d6t5x#9nLma%eJlWV4LHX-AFR9u#1TOL6aaNH0}8Id+5~_-6Ynlqq(MBxBSQrq zXbNWAx2XZT_A?$JoIB-r*9EVIB?dq)0??boWr~}g`mVmGJmTQ5CWt1Cz4JpB4Hmy} z&&k5sRC=y6g09c<_^zNcetB2*&HK;2TO1prJb$3MJsvZ=Z_G{)F58Air6Qj{tpXZ5 z_s}}job;HA0d3~!T4IU(nraPJX|wbbOUEVGUq}w{HVlT1HuQtbV92cL{i+3aO9pH6 zQ5f(cd8~`e*Yrfc^6%cY(*|PTxJXIJcD?bwz*Ky;SU%jD=h7G*8 zm*v%#zw5w0HxGFZsv?IePCEl>q@VGt>#Q1Lf+Qv z70GT#^z>dgG{#uf=7WMWQP&CajKtTQl075dwpgs>_)!-9zLd%DN~_r>YKm}4*y&Q> zowJYf)-r#ezQg&x8#vLAHk6p-cilNQJUcjfPHRfzk38Lr84tbO`n~7QZA+#f#=bY2 z)$NfFiRNuHA_KR(2a`pfZ5%x1?q5xcZY8&L(lV}(_++mOMp7`c*q*YUK0DHh_z?~z zi&MFsE((sQ>pr+0@vVG4H_zhxF{baSO;EdSxF)|gLPh^{(f7^c+387uE3P(a>$&#t zA~Uqbww{dr-5u}rF8a&L$#}~`PsfVpz{ATI_Wr2($8*qAiEQJ6qxMhx(d#tf^{9zbh z`i&zrV5BZs>qt6#oc9CCd>7-J*Kc*k>wP%**F0S7H<|6;La9T6 zKvc&1oG-X(ypdYJ;@E3;#xq~Q+*Z5d@0}~t5pcx2Y1^ImegM6%!*T(a%(on&tb6o5 zb@t#fuAKUedS7&%tgUa-*7v&~$v~_{Vb30UFkJIL3LyG(`L%U7zw}1dt2ew16=|GV z&)S1;@y`&I>=h1=`CjlCV`w0Y-gAELKFsLWObKmCG>>i%WquE!pSXLdP1iz?S;N}`Y(#e3 z^tk%|zFdi2Xv}rm@96^gJ8q~-&HuDR0jgba1ECb!LQ_T4lHSaQPp;aRyXZE$kPyx? zbld8A#H(zx;2XTH)60qywQ1jauqHdM$riWehgL)6)2_soKgH0E9Q>UkRK5m^n{NZR zqP!3AqwK)%Ngjx8s@aUzk3XNGE2EAjR^`>O9&`ju(2H}JX_#$Qt;DbJ^OVvs58JWl9ZYc z@0ms94J#x@gnvD0htCtEt^y!lq1Pf$#D-eV?qB$cJB7ga%g@B9C+{NYe`5v#K@*eU z`-(RDyOEcS$>}=m(&>0719CZTojpEru@w@IrYSBGnje|2aVqed!K`P}k5dH4e2uZ7 z$MwM!7yYR|yc6p8#cYSAk z*9{C>f4*e;?n-nQ>lunx(r&1*i`=xz`u*JJsM3S9-3Js1$S~~~DT|L3Bj^a=;6ja% zI3a0wbyKoK{u>T;jNY-+-%d2}09GyN~wnBB&Y{TW6bog^RY+XIUBMOatc#`r`I73{;Oio(8DA(>*XFP5C2?#J}N3=cat!@rG+sysm0TbZ(Gug?dCnjfS9< zzt5n`M%3jDz31pK?ew^8q^WcIn)ST8_@v+Aianl}VfQZj>hD#LDPsJh3t@w!MZ*=B zJX=K9#C__0QvTlR6o%2G? zW7oM7RjTs0RqCCDJrJ)qKQ@4NRCPo$sQe^iVa0FJ(?UhmMdwY&E30+iW%Y10x}5rH z%$$(2JHe69Q&k@C@_ZV4K_OS`zMQl-X3_GnSW>ac`W%-ziE(va^XWbQdSchcXHW0S zh!E^-($0o&c4rlv&0%^S1V!e@JbvcOVAusb_NeCd7h11Aw_Xw~`T9HcYKN)#*_d^K zpF9*oWN8;AsdpRMm52lcPq*s}z7VL}V2tf7OU5d5Z#?Nhk%Na$EnIoeK*|DoovThl z-;`^KV1X)gBUk zf`MvczaSUo^OO(N$}h7ar*4q3n5V~Tj%TyJwkM9;=p$h%N&I>5%A{Tl*$WcCwH^W0 z$E$>B{p$|n9s>5MtL_`$?Rtg%v%9BH%!pIQefH0+2U1wk=?%N!S#QWhBqxju@Ym1Q zvLRWPex`Rm^9C5mg->238s`%#9|>pH_uMer!>3ElkC*+7mtkRLVj)WWFAyN;laKL5;|ctQ-|q|llw zo)FZ-P~SO;Tra~cu6j}H+OEenY*7oK`n=7Y>|ND|d~2=O8=i~vY6PdNb2&M&s;N-h z?jCRMd1d7i3rvDrXT3R1xg!QgMeOAbw?#NADQtLS+b%vgle8pZIZ?N+>5gH<@=5&o zlXZGlwv{fS-(~CQG_LJuekCHpiRnnb{c`uAndC5po-{aX+Rt=$+izoUO~HcE&727_ z9hqyKOZ}fJG+`yM#f9UTW82YLbNIqe>ElBaNum-|BKJtibYI&E2bYXz$VkI2(kVnG zAg63TVmm{hrEMO4aT~c*@drp-|E(b&?|lrFm(ia<{&^bXZTMb=D&}gVNDG#!Br@C zV2HO<<>>Pn34Jm^*3^xerY@&VkngIsTYXcRnz9PAN-F-(RZ zS>09b!P6Obo#ZS0K(MSjh-(igUaH}+?T}JE2X~!Ht4yu!;S}-DA15dzR8*7W%shbI zIDw9HBl_Qs$gg0AOde(cYu*0xS||okwP&tk#;;}UheT^(u>;7N&PM|_SLFM0Fu*zJd8L0BR-?32oXTBclTHCO#n(;AXCkv8KB$_0Win^ z1Ze!9#AjWSsJAZjjT!ollr#FKo$~?Ur`MeG6d;qMx3?FtS*-Jz0a*V32xTr_k?R1r z`!!{}DnYf~_*>JcRqY>ph|p5~4aS?SVK(6z5};jDxH5L>|9h>7{PX9bX7E8_wh=2r z-rXmmjUQNW|J<}%t@2)BRTH??=3)glXjc2VgxoDG`OLv{BHF_?K=$y;Id?wym#$6) z$}pDGzu{k_Pk>9G0|4C7K~i(*I4+7Vd+O!LH`)N(6o?pKOdqvkPY2kn#~cMYIdP}+ z&KG99{k?sdK(fMR)wb`p=<4lqs2q?215|gwOC`*m9bL=Q&a6q>jm4LP5NxUR#(t`8@ z??{E*jL*MWyFD}@iqolLauFJ&F>&#s?Y$eJf3Bf=GH}I#_VmWhNUdpkaM zvI9%lpBF`3!I0T3Y(N?uBBVu=C4#8L?W)#p-i+>kdm1`jI0KFnPY4j10gJC9Z*F3i zzc5`Nub|uE4ed88*%2f6_PXC5m%ezxh>9lMMH9zP-5jUCx+c}b%9dR*-;w~~An*xo zO5?8uhw6eQ1|^J);eXzq)c`{KHKewpEwg`tN8-!F&ymh<2_U(+xYFcXQF@)-vQTw_ z)ZVq~{& za9#0^bFrkdq-Hek@!zS$owg8zKp28U-&=V(YdB!riTnHsL^8p`s?wU8235Jm>iUhV zMO9ceBSWgObPO!8fk8jDT+VOARO(6o?!o31uS3a!b4y{G(ViCJiZrodN(|alVA}X6 ze%DTc*5IP}N=@5_4?IwBj;#cYV~Hi;Ig} z?zbYn?>ED)4`(9*K0Wuk6A;@uILz2CtChwq6%0u8wtBwVthZ&>)UdvSNY*@$=v|K& zIL#KxGaxdg4cJ- ztBXlhH9-RU_Ws^oJPKdvY^~MfVJBHiULJvziwg)5?Q=L?;etXZ=I%MFXqPuLqhw-Y z5)~6;0=P3~qiJHmlO&gj`P=gidkhTZ%QS-#F}{Afar~yqv^A!psMrD0uAg2xaa7&p z8*=jA;p^nNIamz3FxKDvW*C&(?(m6-y_r(SagSQt(LQ;=5&lTDw;r-V2B4_-=klwY zN>VOcB*K@0@~@)?jG`f`d^5E=^dQiL-nMpz9)=i8+;mOj zAE=L1B6=ig++qaVt~?Ep2aPi6bhQ#VndS8p)U+k+aiK&~q_WMr0HUDDTHpL9I7yjq zey>pF2!mCt0VNn9W>Q`r9TpMYWV%4E<)V-1`Metg@QxF*S&0HrC4ia^Pf9@%0w4^v z>h0uet!7jBeYmszUbu9dT}@w}AL#VkGRQw74i`;v8b zt}Ijh`1R|rQG98pJm+vmHm(D3KWWVdB8KvEa{K`>@FWmV;qBuyyGk;2MM7rRZXG2b z9x;{&7Z7WX9O!Yi3vGak`d3+;`x_6I_Ngc}Pk92L>cr3DMZB=RSV^90!0W-@JJP zq>PRAPyaEM(ev2B*^`nL6oisXXBW$x#3Uh+2ku~%Mgn80vWf};s|DuUw{Jy7ML(O3 zq&UtNv^pPPK|@0ahtb|NF4bB~0Q4bXNI+emuBLZ&3G+jqoet+K6!ykU{qpF8F;CfT zVs;lU;;}5=DqeSby>#URTLDpicdwuS+>eIp*IQsuI}czN@1)OXBQq2lCB@BfiP%3; zP*cjK4u#51X7D8sBot3`W#;rayxtEQ1mq6SPbGti7r$q%br-z_fxK5JB)f%_{YGsK zo>O(T_LhX>9!H(u^gU(HT(h+a>WMTKp}eOwC=NKRcQ>NbkncZO%$@#_?F1?d3#!bl zS26{2^!hpo+-C1~986E-C{-y#-?9`6Rj53wvVswE4sQILG?k`&novq%YP)lyqDGfT z&8iJ)-&ob#EdBsy!yi9>04UL)K*q}9CWRv$<8b%-dbWZ9I03k4#mxZetj2mC9Rvco z;6Z8C8-oZ5ft=pq@aSj?cJ?G-{$w?aZ_4j^ zs_8!o?GED&HEi!&zrvlgsvYYv1Ewd7X`s%H$Jl4Xp8RzA*wG>AGR;<{LHC~1!5!&NVFV%{Gh7!Xk;vguw87z8q#m#sBQz=(NYpVi$s$OBX{>slr&u zlZs3kGBpiH|6iW?0@~eMN^RssngZ}2%XY=uN{ZiS0bAHoSiSP(9 z>^0^gYZU*}$kY`lq`SMjdFQFub=Px`_kG4Uz8?$&*yrARtu^a7<~;IW zy;e2R>#I7Woj>EitP#=@zCJ{KnZMc;0p|*bnAp4Yw*R;Y1I6l>K#hZew*m5IZkUB^ z^%inNdDS5#yB@0!+z#cV;fuz3%5ZMKv>DRjV;JUx8t+WS{qm6!X04WV)W{%T*E+7t z8Q#gZ1RwZ6opzuampM3Ie8k+HXElaUC5-_3#p@eA`RgCwc`_TXtzSGtX};y|&cf@= z>1cKCN`K3j%tD%QaUhXB(WH;02W7zX*TDXdgvIVQn0Qoq#pYYr&KkzMiv#Rdo=2FS zc0MnBsVDhX^U7~?5=vQ7nDtdT-ShN}TXfj2-sG`gk`Hfu#L5~14e8OF(~Y=^XWQl{+ZJG7~0U?h@hal@$)ms7`G#nyZ7#K+AZK{ zx$fRPKkRS^O+~Wd+1c4m_^pU@sZof%U7>nePK4j0u?o$Se?aITJwASr#rZCTzU3KY}<>5 zlXx0Wr1c9mj|oCSJ021cQ0n=|2>G0T?&{1=W0v+-V_%WycgEH$rPS3)+9GL{&_a?E zVyXTJ-4W+xZa;B%=bwknur+L;auaGANLfPU{_TQgwZ!tndjYz zvv4nN{4z`rP6OTvXk;@~9ZX@wxSu&0ZG!<3yp;NMmeze@tXrVJ6qB;Vweq^sU9`DZ8qoJd3f#&=6pi!*#xaIhvJ7z5CPSeWRJ50Z|;*-&Zw%&u)OKhddk9Sl%!b zHwSG#8~SpygJDWg{RRTn=o&?dJ)){gLMkqZN~Zg(IPTL!&P*2rR)Rem@{>zv96m#i zF6bYh)GHv*B#k_JX31i65N3EpMDy`_bqAD0@M<~C!3uMPJ$&FlsnKvtQInn@q4~Jz@^;4@~;f;rT+qBym$;{$(i&o<6RVl@pp3Z3fT7a3C}KL zVj2t;m?Rlio3ap%In9&$N2L6t=E5F?$PD`yu92Uxe2t6663)0M8e(Ojv+SbwXujd! zZwOh_>6byqvAn2?W}WfzU&TuVj9yftWm-N~LxQ${6@7l}A}?ptVc1JpKt`DBr&8ZF z@R$7jpC9=UDTcgCWlTojg38-Cox1mkfjrt>9q$sOw%_~yuM1_zA|J{KJ<|xW2iw6u zTM{Wo_}4c*aoAxZa%Hd!uTmBbA`;eDy}$Io+%M7qXIt*zAM4JgNAi$xagKYjRc8+`D|2@95+%NJDUX~e>bq2mzm zbL&{_$+QAJciO?#=3%c<%SOv$J(wXnXs%3$4?!2$Uhn$1gXRtTOxIO;pHzbUajM;g z3m!dEv!S0q!#M9BFtVwC8ANHJu^mKdRkQH36um_fA1BJ4S(&j!T-|hAx0srmG9IgV zS2idl%>S?3;_S?=eHTWHyX=JMkCa%0)cIJ;^;Z{p-sV6B>c~v)(%&lGXmHsKg?8WH zE3$^Cm~k^}Z_ZP~s$H|*!hqh+Rk`zc0j1;qJeU0vVq#uR%Jh$@Wbo|=4?chyw9o(j zjyIl}n3$kl)Hv?7RJx0z%%GoNav@FC18XIQFc%Jxel{Hl$QErHqN?|1;UH-~_7|gmo-w_lN z@~&-;X8r!}=dAEj%%Fe=$jyCpa(dd--Y$o2UDk)J-R^!zn6K57xUC(dB2Wr15LRuO z6j!OtSr94)Rlx5zHCbC^?F-kgH;SPhh%;7kReY13A3{iTxZDO(Pl zP!J&%L}5jB(itb_soqq0?@Ro>9`_Q@ky4gwQl>G>!nN$M*S&P%r2pKdf+J#Z4~D{} zZA4i{m;({4W;Cm>`?An8|9+zm~A`6inda-y(>t8C; ze|G+q2&mK$l4oN*cQkNu^_r&#xzGh-n{po6H!cWB|GEue-MPy^mOa zH-efb;oJXgP9^A?vfM|{-t2l@A@bR-GuO@CO*zMLo13ef>I{*+g5PUbd#nbpRMRl2 zMEv6liDHO843$ySQd3{9`KP;BptFoB;DMesd|#an=Hkig9v)uIR4)$*w?x8=aa%S_mCESa5SdRW&#krTgagZ)sAU$w$iSTunVipmC$QRv1PEz@pt7i>IL<~1c z<3sbpO6h$&E_xZ?Tg}Y(yySAEZ&7vJpU`7riqj$|TOALZh@;^ds%DVEB!3zkGa5bI zmwvPP9<)$IhPBjq9Zf9j-Df6DpFP>bScSy?ng28z_FKxrjkY_)6K2SVK(E9Fh zN#`FMy97OABUCO)Jy!E@^}hwTnmLwxvfW=i+ftOz=d2JcE4`j%=l!VUvA2!fp+YlQ zVto%^l20YiC^hgoaUH5nleNL~?!4+=lEGewa?lE=8sWYx^gyBeqifVD z^Lsy6QQe)*>5aW{Xi+X$WEA6-zl?&0R(6)6nFap!&mcdTR#K}i0D40G%aHCE7pg)& z4twZR63Cy9Oh`0fnUr1`IFHK2+w^Bkz)Tc7c}jAcke>7 zS#Qc3aNQ*;uC?se5qkORV|jF>0nEC4QJY?o7?ccWW){(|Vn`j_^4+PbCT2C0uby&X zr$s~t1k^XstBH5C+Q8^QRkp-o-s^}TKh~gB^7&|Y3CDfvKK5Q2cC>V3>DTR(szvNf zMJ4L%T3T9UP6sII=r^%z%4q3W@@O3x#GB@+S;Es)SwCEIaiDtiNPxGeu4;V#d8H-p zQt>CW+q|=MjgC3lljD}CSwZO7ks*`AAjb91x6eDDT8fB(m#-^OQy4+3)% zUguyaIBL_=Y%2wtfe7-I3k)-S;h}xX6=-ifL=x-q%LnJ)O_iT#=yh-Hsw8b zm99gUhrfRS+31?ikKD&R%r>`bA1iP;pL}&zW;C>oFKcSjE#&lnH=WXC9{Ln!GW2u1 zpH=*pUb=jzqBZu4Q^nybVOgd9hc~#>;nfs35&Zo8szv5ahui&1ez`2*Yy{cBZ3GkL z#;{qATz)N*c<)gz<1)W;$56k!!SuLdhSybVNm736h$PxR8*maWN}1E@-A;X$A1K+{ zS#?4J@sevTw@-9+&TH#HC@-m}M%!dqBd2@M%=-B54|BGl;HarCtxDC)Z_duXCK){| zwzM#E>7`M~Rkir4{_N~Z6qwv0OtnWhd%Y_I+teI$&GyOc^MjyJeA>XToNITnwA876 zud_@_jH`;kR)-JtUuULWHnG$FY8zt*N?bEFi^Dx_Khhmr&=W9dfLz}$3BUD_-nii9 zNsDjazTH2dL3*l5;TsR}w|3^KRI1s7eAH=E81Oc_I>cWcMh1Rfn4cF_R19^_Clq;R2t=$eR! z?Niwhnw~2eZQnx7!oh7RW)BL)H$B--p8P_uXn0=mL4}|G?%4tLP@##!k^?cDmHnOu z_O2VLKh@Nl!`H9s`g0=}=`^c|JmL>#>*9NbQm?CmnaN&JhdMKsnBWtwG4*mAo!IfTPVFw4S=rv0jv0SD!1?8*=w1Z=7sPgP~G7YXud|s9C zZZlLGEz-#^>3z*=oHxwd2x{6lv($Jc@Hk8x)7OPUKb5BXy9%p-4QZUZ+^i^)rOIO4 zr(>^`&3#@uzQip@~nt|HM#Ib)Tr7 zaj|4BCR&!oBRi5{0@9aO!+DBhyf%@w^>vx0Sr-31tkLyV!TAqZv7)3TpCAAw%9?+wIl~sXJQ+>0?!B}xxqFY1haEVUtJ`DP$2qr;eJ3t|ok~Sfvm;FO zNIXFZqo`g&a_!)W<=oQ3esKnU{+ag6D9053BDOSZx4ZUxH~x_7DSJacUeWiZJQ59o zLlS3X7c0x>bks~CXoYsAX6(r#At7=Dx|BcRlDvVKnVZGM!Kr})=0>w*2THE)E>|&q zj!fP)Wq(cZk&dIe!(B@n45*PwHkvdKk)L2qo1`fX%`9E1I=n>e=`t^;(P2ZyHVE_Q z*pk0z-BYB>@c)6 zM+(Pw+(cp|xXAqYI-Z{> z(wSm}{eZ@#6Cj3%Nt%?r_v-XU3LzzbET5(6H8z3&;^JE6zGkr+QPbPsIaY}T^cJHj zVSZTrf&TZ^?yB1Z7auw1c?4_Md5(i*)z~Oso*c zjaqso#LiY`Jq@H$B11;cQ*)QC_o`(AK41^7;F{ z)8p%do0g$D%5CAE`Aa&QfSxH$EH6s}YBS(I@i|{)xhR?uQDEGM2e=G2qrsA$G(n}Y zG*y&ATNx$~MAmSp;?wXD@Y6UT^+C<}$c28yBb7^t&}+a_beXMz*9AHC9!D@((EgKs zl289Lt1#<`P|(Z#!l?J*!-oe&tuef6ycJ)1dgReDiGINU!uz0gI97ckDoWr&x3-P( zlDl!;J*(PQyhH2B6EqKW%)3z71R$}q+|dfkh^b>c5pa$2o<^5@n%Kob)1}Y&!)6bM zU+^#}<@-XtNu*aW{TcmN8j(b~(RW?Oc6;OIYG(rtRj6LZ>R4>HF_VzTOlF_Uhq^_>cyK%0=Y;>wev_>5i z>3fLy-i*zI&yS%*_6_h!HglV3G}crt|9B7b-CNk&dCHy3IqDbUzcNNrWzPO+PDJdS z^b4`Lr806w`{$4AGjX0DWa8sukcUgQJ5jWqeC+MGeoL{QQ&C}kD&eK%MxdTgc~4~6HHRa?L)F1sFKz1RwyPYctIrbIw+m}W198yE(E~^s`L;9 zOLd_|5cwtxR*btuf`mf^yHz}-6n#!pUDGMX_+zr+D?OIg$+3+xG4TfZ-RBK}xV~J@ zD&}xM;~959=Z%k#U%I0d423uhh6(||J&_0}=pU06^Uhs-;5C?U#B9)v*WgFmve1EFGtx(OEyc;vQ1>qCBgRZsI(pPnGSXzaaV`-KV7XuZ*}u6waB3`>fvh8q%d zzD_LfZnVDc<@C_UA{&1-u(~iJfm0I3@W;cvP0$_%jJoge2N1T%We0O1tt4f&z3k_ z!!8cHwUNP6%SOLGiyIFkpMIzK?GPR=MPw_LtLFqhREp)WPT02DqiyQwFlzn6Sl=KW z#St)8>0&$c^Sb}07dANn`T-!`qsjOvwW?tFwA)d19F*mw!~Dv}tEcy!=A#{XBp6E8 z>#PSW5@lQuD~`U|iJk-APss1aqoSe$HRALE+qqjgXQQM0{cwD-NS|eYDp%=gEXbU{ z);sNA(%!Pz+YvimB|)P!@gN|cEYWSvn%go_#8(&6#rAoXO5fw{^#}d3s+{kcjoMQq zvs(_g=V4~F=f>4S)A!w9t?M#q=w<1@E-6?&4_)Qf<8a%;L%xrtUO|NMSmqVbAb+!b zxrz)>27s!*xt%(L{x1R~&gqnMUmLW9284#93qT_%tzzaxO03&dPuj57oB$M2#iCI% z_*&T_U80r#Xf}AJ{=9*UyV#$j_3=%X+3fs$BM>RXo<5z~n5>hgVfg05xqNa{emi(=Lxo*xvxu2OM5VQ4t^Dk`PgM044Mq{V2G&A^HT6fwan~^DqwUVs5gKy`N?>26%Ji$V6!?@pDG7<@n_BDqX9s$v z=WI|uyn=zD2j&0{zy(0zTGmF(dxgNOse1*!&8e|Bsv0t90~K6h$j4J;V&`@ie<4;z z%fmo$9x5seCt5(=k+HF10tg9!Tgk!{HDuBJC+~-J)(3?%5_$~Hf%Y%3$NnHZ-D%9K zIS$l0KzZRrm1p1D+JW)?&vP_+k-3|tN_M5oz9Kd~^Y9~hVjxk-!gqkQfl0A4uJhx= z^@HQ%=ZPYia5_I-93Vl_c6WC-?M)Yk)eC#5K@<9pn21Qi3yU=BaV=mjP%xc+b&{o8 zs0*ZuBtu!Q3+?o(1#vn;Nn;vNkpX6zE!`CzI^xun!h+N?C2=R*z z2naABBfhjhUv?kNsa;i7%X7awa9Tl^krVZ-+;QCj-L*Ho7S>U8(!2x0p9>LgZANCVG3M$?kEd|3>uXRio#K8WahhO z@K>y>3$`N%YJrlA(CZJr=11oHdk^^{Qhe zeF}>2DKHff!D2NQ1-MdozLCt)-fG5BOBNMaRrw0^l@P4_zqq0$_)9*085%Ti!CD4E zHQwBuj@w}+4u0M9{rhw;&U}?#+?z?s$siR!-x^9M4x7VLOuOYenxc_E7n4|BU0rXa zdceI%7AB@ZIJf?%8q_X@EPg(1%Y%)*N9*qA$g{!Q%%`3KfzKoBsVDQ>lMhfPCm=uR z&(`?p?@t9V3cPDhk5B{B$Zq)1Z-CCjvx7HH26{@p1dckrXs09C2?IVqh_%4NJOMaw zVZ6G!OzrX=nk0UIGNaSvtoxK&vW_7%jECP9tP&k)0rRM;sw%J?jezaWIShLONGeIN zS;+L3d(wn}`qQ<%OSW2E7joA&A@ExSUAz7MQ8rdg&(%E~%o_uRW=j4M-;`)fUeHTiz0eWEJ50FX z;{z?w4}Pj_3Gl{;sEUU*k#pL3$|Z`x)>^8lrjQK%tje+&QWhmBjDv^w#FvP5f=CiO zmGiS^{miK9CTb#->^`1unNe7>J`lq(N{&e~=DP=mPk2ih#tm+t1gI~mV za8f77Mnfq)EO-Z7+^anH2KIxIXycx3F9o%cXB9AE^DL3zSlgRWA8 z8dbpE4 zGvP)tI`yA!0#g781q{ebf|rgCa^q~q$yCn!o)}}M(wKZ7UQvO?aGz5J(zrqdaR`o$ z_ia2yNG>EFdIuYmWYx!DdrZZiq*eX|RxpIB50z>lt9i2cPz&Z57UF;w z$s5WFgPz^kQw7=oNEwj$CpEKFSr6ko2f9cm@r-DJHmp_NG9c2eWTKie&F8LqWJRfy zVhiNB?Y};I2|cFjnACQ;%V(QC4r^k9U#DI3m(!Db-ch%84fx)VJ_;i9(0r27 zo4;C2ekE1aSGv+mJ&%F=^v1ATq zIUf<8?u!=Za{55#p;fc6OhajzuR2g=gI`Eo^ws3plI*xjLmK!ty^R~Dq;N8kd_jNr zlj-;B+0GP@2_RXmNUlczPAx60?f$rXtUuq;!h6Fuj<-2}5e~(_vN+~K0(zZSwG=7u z--n6kNaS|c8F(CA(O6KrSIiL3P@wFvdm5M4WlZzqxU%@8w|An`Hf`*Buzk$Sodaqj z*3=9V+Hz7Bq7{c@g;Bof&tffx#phOZTwG-%wX}B(J7U^`sh{8Cb})Z;paqfd_ve3Q z4L4RIUees>_(18)OZAw?evZy$F}!_vkucnoJzdLO-3e;#cX;o49Ir1ol>WFsg2W9j3)rqJd->wjRY{`b7|1noY;&ztEXagFW!aeqk zl@+-}fBvkyN7|2$5`HbG;kpqFE)uk?l;0SCO;b>nf?b88fDq$#M}Pj9tdN+-KgU@a zmF(pdCX^WEEYifdoGy2E!qW6;+*CC+r2$UJU2?aBJ7CRqA;0+1NUOC0(#e5gT5jQh zGC#Pgiwp*V_Zbu#+5igSKh&%$>-|W%Urv3b`ta$K?xrq<-hgFszH!&xqupgu2vEXV zOahsS8v>e+dX=;owQCS$FDFpINY9+Sy$!+MWS!S9prQZ+xHg8@CCZ;b`+vzCZsZ^T z|AjeBgO#O-f%LZ(S-pyx&o_w`XarvJi>|7JOTXzK8_R%qQJz%talAOM&&gXse@47s zrAquThLnJypq5rn{(Pf#w{$dg#b(HlIPR1Ym}90cn!YyE5W?^Fz33)0 zVvm=d86UOs!p~f$J~xB*er&AcwWIpa+AZuo3Duc7Z$PHiR-WE*jVz|S6f$*wjCmk z)+o-P*_JTw>cQ5grb)1ghTlD}n2(eM!1mbQ?bp&{3Qv8Wf(5CLVS5y(zM&zLUc;wa1NmUDy1`}n6!l{!os3!mj=%#PY5Y~Iu-`XoQiLy@tbsKXOv|q zYlVC!x^~pUPsnF^!@v5F5L3Ppe%0U*P3)U(!4v1Q#sMgQy-{lx zM0gq(raP(@W|wqsD!zE}2$ClBIH-Zl%*_P`1$juPBwCGE!AfFf^fsTJ9z(Eu5xB@p z8hEx-Kdx{&tcYoABd0f8j+W&)u4@{(1A`c{0Am&+)4oh`_#Wt(0V)84jH3NhAhoU7 zYNle45s)~gs8htMOFrd3_l_~V1%hvr0wtR?uAt>gFn%7xCj8HyJA_FWy{ zEYGW~X)j(xR^T-%ht--P#Q~!2bz)-Lq)Si21%-rEip&Y~^75WJ3qbH$0*l2JwjqM~ zh{&6YiqIF`-@5;qbIqLuy-VMT*|$n~v|b{~7&h1mrn?4G-;)BwOFWe}b8fWOC7z2Y zCx$L%GZ}oP%s=-ZH+CPthvwxRieB<8R#2!i*&+`Q|JmmZv>}rASU#WHMz7A?sd28* z`-?Moh{o-)tIiZUjSE>UNyQzUO+Vt=E1>%g)!>Qg0)JeG0zLN5+GFpz$!Eh#zl+LP zcaW7EYWZ))2s^%d)LfCouIVEEnyXg&`Kr09txI#ykWXye%cCmHClI|+-rO5zP5rU1 z+-<)kRX%Nbo@Q1jUzNj_!w&$$xZaR;kD@{OILq`N5?l-v`3yy0IP|$MH+5SLZA%$) zu&-T{XV<{>uKx~s&I7ug)@M}#z`qBAr^^RFKL>QHFnYDQs8m`iDlaX!!*@W3e&o2O zZq$&4lduKx|G^Q_CadP`lv(f}p4<;o5CwC*y)SOKGH@lGP;7P2KGoHc^u%oJ%WW?l zc0sW$=k_EzaB`i*K2iL(Xs;#3Q<^&3zDnd~XlA6{4zuT%(ye3Aat^-ov!m(0;p(gJ z!(&2;eb?Lfz8w{Cp`9oio7XPZ zF@Cwal{;)6#a9tYb0_|mD{7Ar?N_KANXPPF)&$%Wy?pud#^t!Z#a~{)KKKO-rc0Q_ z72VG16j}}z_?uA&nT?GNa5>C}i+s!1tJHWiZvm(d5YF}{67`0h^O@|6)5n&L=DU8a zieAw2AuB68QPBb2AR9UEr`~VhURZJ9mJFqfhlT|tyY=AUAlG_Y#D8_yxf0FbFRtHr zZ{W7HC|^}4AKG<8_kYTBX`kplTV!}{?(L(h69lR|iAcWaC#`R^PYfVfuyO2o95qlK zEez91R%wm8E9zX)(94&WRay;!GeS0uLgIHA-P%IHQL*fZ%qLo^4qy9XR5i$dcCW9B zGi5cg-akFbJ8{96KBP7BTS*qi?m;OY^*G#*a`zUw5q++Zu?{LCh!#H{JemQsse5WNee}4o_n3)gG^Mg`{Ef43#@Kz#J-ISDN0rbH{FdESf6zRbXoB14@~gYr^}7Vbv$^{Q1wmKsLJB zL>2o;|0fP?TFzGokGLJz^kFO|Fo7Y#sP9gagcc=-wPHvV%m=@{16#^$Gb0AI%}IwG zWT66Crlld4dfAdXl_sL8tJSJ*ALOOiIJMZ57F1UH9X5f1W=t}fUh_l!J=bx>1nx}T z^GK~HiS{A4N?uLUVPn4X85mGTT$LL9;-J;gPv#TvGKf3*;ggW-iq$32VABu`uv0+q1)GXsisl+qA%lz;IH!S7TiK}gV*Nw z1V=0vsXw+0zr0jF+78Vpi$jGOe=#QDEsk6VCu(3$sf193@;Z?@8*O?2x!_|>HiSr} z-!JzD9P(nq*FW-`G7tA9`l`yJJo2ZahB5B^=)L-%oLOX@W$icb2DvsBfxtiY>0L=b zgu>pSL7I39b!&%xQ8i-)F@oU66iO$dhKjLbF(Ugl{O^h1LVtj*U8uQSpAcrf~<(@t1^Xk9%z@Lq954u|j_la^rORVjvEov1=$^X2WA8klC zFs=DkAmwV!1>*u${I=NDF3Nvv&K@B_^23^0mo$z4%gzNiW1oL%_lvDsNA6zXETX46 zhtZXCUpQy~VvYP~(V=(wGcz~43+!Fy*$A-JC;Gdp5wiIj7G%-9BZE{l4|nsbNmm?Z zSL=X(TRfaMXg*-h2E7kJbP?r0Q7=Z=+Qg>&YY*p1`*+83lnCIx9Zwpw{(og_ysPuf@ zPM>>KyEueI-JU^VLi0NW8SrBxWGX70OUb&;%pxz@oiIlRXAZ{U3&ROh{vp&fnbrt% zm&8lV9VifLS*oo54q0l+nNpOK|Ja8}TpGFFn*jBoNMLdz%G<6ILBn2jejBUwoqt~x z4&((Dy)#;0_r)a|V`0AtzMWgwCFG*+`|m8%a6`d~qN@=h_*7wTy|4P;Vv+YxvwOK7$FOxj(2i$|_ zLhf3SeQE#a-*EO{M?{?*d9@6+D=1l-8gB)zWS(JYxFus$4)+^^1=e zy_Fc(!)8w^v@eFM3?f0k@Q}7{t^U7RtpVnT)9bIch4kFMj7E?f7SA1r(|=(srza$& z`d!EWV_U65kY*v%(A!oc8bz(p`Bp9K(r1A1jW{oqYc%70xXO^}fJsK)9s34*zoEZB zb**ZaW@x%pGb;jn{;ItguUHNPrFshBnT{ZlV- zQ){zM{gUrV!2~;-#o!erPgA8tpEciIwV|PnTRB(%W{Z;LZT@NjR|2Ja)BHSULw;L# z&GyVvN##mQZ`;^vtD*L_5|8%0D@H*odCtthO@IA$2G32UA`m|EoBp9{cQ;d%UVo;) zVN2gFH_ei_$p4=8zm+NM-Vq)?h`gaeetNyWiX^gq0DL1cp@X_sxOj!+W2{W#du(kM zMLAzQ!CEtw*N+do1X@B)t2gdQ(#G;HSZr{K}018H2 z7me4*9NFu_;VvX!pK9wy!Z!|=H6CbROc8m)c+~W`Cqpd~fWs+0mhSWTW5R5)ZYX&!SlxxTqA_HgU+~)^b8YwC=6diN@{tOPQs+gU0_c9AUAZG+eYj;%WHsKtGMFFE@DmVuISM3g1DT6 zr3gDeW0^)Q$26~ypU_djz5sP8pmafCho3<^;Y3fwmRgsHdNeUV?=78?{RJ1*VhGu8Ze`VLmJbV&8}ns@70? zHK>#mgfVELPe9G}$B!Smwm>4bvFVBY>(q&II*K1UQR61(JnPdUL+ls6OGjy54PVx7 zv+QRZ6S-$09FEs^>$tJom$p*g{N`Utq_O+%ROkLKrR z>69xzd53lP)LDkN0>dwde4fs*VMTbwXK;Om`PQ|bbd~7*`9N{;QXhYR?Alrdlt*fG z>e7-%V+_?ydV6>A8Pp^--B6&3&SL!`VDXPz=A;nDzsfF(B)%C}w9?;`X;H4SLFMFh zT9T45U^1}f(xInUySD$OM&`0yS)!VG8U(N%g3g-d({Pl_JrhV6D z2jzrbTJFfMt=bfey1$%0`f8wWbMk|bs0TwR<@uN&rZaDE#=A#02Z=oF&JQ|#$(n=w z`2~C9zAfh+9o#BeAG;iLi|cil3|4|^h`^M8>@#dy1>G&h);V?4exDbhSjRS>_u_;l zye-Dysb`5tG>_A>(V5AK$4Zq(f84fN>8I>*1@ zlpLKVMVi|Yj})%rOhwU#43Wgyk5r-wzlft;6*I&oz}`+Cd_)V)If>pTc(^&->4j;X#C#QADYb<**IRRs*sQ2GKE}y!!y&oS(-4oJXu?vrdH{J3=>^83&;xm^Hs|sv9e)kc~?L?MisEiJ=waEB; z<^Ic1yb@K#3}4L1;$QHtfd`nfhA$|ut&m=FCJGZT1`~7cPdAia)u8x|1cWr5?=;W2 z{}Tu3P3SWI^lcf^)K43KcX5sEQfP=|n z`)iIkzHHq4@hWMp)PcEAVB5L#l&uw0qt7X!x;obO)RYe+I0}oJhCb2--CUgFa)D}9 zjM3D<~Jaevm?BL@N8>Eztp&qIZRy`Y2k z=Zrpi*Jn?sKU?b&J&|bCQ`v9SK$IJ*u94dvh*-h=91~Dmu@+*-71P@Dj>5-46eZ9pMJlBIGC|VqPB;q=Z<+CHwk3;k8T2Dcx2v* z50NqSyJ=UsQ@1oXetWu`yQftNbnguFkEro~Kbvq7$?FQigA<0%1mmNCYAi1Y;cZqK zRWyZ@klj57A=RR9eDjX8^JN?_x3rp%)PyEib()Xb@D)i7PZdU0ox~A)TtIz=u>|B= zTKxt;fQ17b<1NX{H9#PHKgR2l8VO_?MfUiA(21j#ftVLLNRNR2&wg55mjk6ViCr#C z##_EDmOVHtZ2qnbKH26f8BL$0Q|KMEzR73SpAIN}6;Ao(MCT`N0Gr~$|6mm}-s+Lb z`)xlSd~+f{vGp?=e(tKiENb|=+W*^F8likfLNNYI`%m5Y_6#T0RO-`KA$^#)d9L|U zd=JO7x0<~t7qA>06BBP)Mw5cuZbQAxH0ev6R-XOQ1wGS6Rw0FbVwxCHYq;Mk z)wiGEK_5Qr%DY47C;p zo~NTW%ZEgh2Xwav8X0-d!@pUSiBIld6j{yD+i1nxnm; z^suF+Nvz(S=Kfbh4dCb<*w>mv;n7bqrO`0|#`d8iR@?9;fiTCll(wH(?XI6bi6e_a z9gH-DB<1Lp|0RpQjLks9bdM(Kd5UxsV@2IhMuza-rK1F!t(hh=R|-P* zc2*aGf2VPB07@@`3l#ijktx0eloK3 zy^L07V^_5_47-z1-K$^zi+}x$N>CpoV<2cz|22K=_o30$R`A*SMGPDgReGL&nNihJ zF*pZED(pzbD~NnvCrm7Xr2(ArCn)X z5etS2O2g0@9!B#=!;Wi)V02G7j;SKp%><8&ED%3`wthEfPg> zn6A7ZJ@9U_x&mqa_5GnN)ynz!!eR4vpMGF5M)YhOJ%EbYp4sS9{ZMQ6Q`I|`Dcb~l z90Y$m55eHIVfJ_vQu~bkRK<&sD*4aBTjcwFu~vm>xl5LWxek%N;V2l|%^h{7jGR{g z+96Kk5j5XR?^u*LA6%n9DqB*XR@PshIfiNUyeI1>vbFq!%UpVUoGk;2cblb<8q1Y_ z4ad=>f>})J3&a^JCn$TxTcKYmYhhSa(|yb8eva-{bc``5tWId)V>yc2yL9 zd)lvM-Xj$07}YxV@nZTS`kFUF`QfZNm)^ZoX=n3JdaYolHCY|xf&B5gvlB#VX~+g` zmDvj*$|8IIx3(H%E4!^)~KYOjH$BS6~P+xBNQ<&CFP zY7Y|p<%Ydwcx{EuM>0-GY{;*zC1)CyrPGcDeA|lC`pUE!np%Q<0p{uAB_u0sNg8VM zs1Cq@0V4N`L&xhZ5B-`?&8fMMyzPk_x)#^M4u=ou-d{U<7tI({`sy&%*_d&4$Vc~N zd7kk6$Y~8^Gl+VYMOx-OqE8jg?r`f-T043XT)BefSfo|9L)E2r-c*O})->K=v?;JI zqf)F>AsSZ*7ti886MmAS#8Y(fXGe8wjO%#U>Dd0p<`tWwb-&|4$u#$^23jL0vlU@G zXRWh=y}ZVv-_JPJ9vqF$owItZcgOOR56SGj5knob>m+&p3!@x;+#!M9Y8)e53{xgbf5ovUMVf~3N;2Re!+wpvTXtQ&@D5?PQIC+Mj*(I8{10-q_Bhcw930t}L0tC}rB368 z&OH@>`Wr!xvPlx85v-&LfuIKL>o~GO1qFKTj4u1`UeacCu=FvmztR-)d8LT>7rNG# ziN9BBoPF|_!Evr+Xp6*}u~DsP{PKwgcr654uL7|7Pd)=6!DzQ2J2f5B=(aT@uIYAY z(B0jQ$W_gH`hRC)Ox4QYpQ7bjW#K$j{RKad~fLGpfhpf5*eX zgZUZ`e!v+t0%9w2Y!s3yGBcx(0Mr?nJJ$f5N1f=57pgcrS_a%6_){?U4KLrYO&3lY za-7>=;K@i1^4$Ii$Jl6f*J8eKvs-l0sXH1D8VimZ zOg6!$ypbp~ z=AFj#@@t*n2_Fw+9=t?5jbCEpMH7t2ABp+F#Qk7%X>guIi=1`PC{FliT#fMjEal#klUcY{gfIhP>V;1CW9~j5=?AbHurmQh7g$q-_(+2Jgp09||%a%)u}QP{n_;6Olk>h*2ya-XKR0Kf7cwsHTH(diqCm~PHWD%H=KbY$+E9h#Pw zDagws3mDlu^bGk7hS_CJb$NQ5c1u0tqv|>Up6)m+^_hx~G~Qq3Xjx*EL`ikD5;}6* zy{wvr^<0Nf#e^EotM9SZqYEz%UtQ2naaO*Ec(@{ESf4Ku5IW@8uQTCa7qx$fCV8uZ z&yuZ?V(kAm1YzcySV;dNMz&;qovGu_!gEPU?uPFMb+_1z@ARg#UecZnykobraaZb7 z^NrxJz)fc#H`i+Y$?kYhcIVg9j^`I&+kPA#D49LqJkXQe+|FnpI~~urZb8fJ6ZCm6 z0H6i|IOI9?DK^C{mv@ciYJy@fa3SOn+9`71`q4Swx%le9z#4CVY};=;kCcPOi{HQ1 z=tpiyd!zMc_hWnp@>*2hbALmrOI*WNVURY2I9+@G{JGQl={}MK4~$%T2X1AR}-6bGOD%~mF-7PM<8y205?uI*Y z@BKUH+HF~f z;_#@|y16`1kKDlv=(hprPw=(x7QF7RNPq~yE;Pe${=|`bfTsna|H3O`rs9dMM&ZH@eeYgj9_0@21br+fk@N79p+K$<7d917bJJ)QVEhnux-rg9k zndac@7){90omJ3YRaUc056l}qpSn636Bc2yWedIzV;Gr#ooab<=wDQJ$U1u$N^&$$ zmnsY^BT+Y~&`n7m$1G5I!V0UJcxSMeT>(=jE^mo2j?Jf&mH$$fOE9-_olEc9mSZa{K{VNc-+BD*;(BYbnD)OmlU$XeG zlC2}kE*Gf@^|=9_mXNSQ@&vR`=?7v7*uG)lKFPBCHOY)v;$?mpTc>-5rKJ!w#tY%L z^3+zVaxNH!6ttIOH~&?Arv3#}%TH^sr|YAcO2xKKui=CkP)Rs3x&9J?tTDLy5ySXi zvq9Xz3X8ry#nI$$ma1pZNW7{^>c$TKh)Mxg*ae`R-mkEndl1WgHR9li0d?YNw|>tR zNlgNygv4-3E$(9yK1o1`1iK#&3W(d~u}PLpN3&S2s2-bRD*>wy@g>X2sZSG;X9mE|O%l&dSsdsKe7Id_tEG-g5-4 zDWH+IWY9d6J#hoso~^9^1u_5oyi06h`k(VInE76N7``li@>vOUw6SdlR@8h~S-Ky_ zS#m>7*(BZvTIu3h+hdoP${ToSQf;HW6f}&j(TMx9e)4rKa*r3)GbR9#)L-{Ok?ppLFc`FCcDmg~}l! zUFpsM*@}jqD{P&!MI^bdRMz6b%s@&Vs4w^%O>n&&yIxvK>Kvs*&|!g%%^Q@r&Re>2=q$*Q9XkzcPA zM^+`aBaMH3C`UjBQdS;u?yzb%3){Uw$98YT2Nfcb&jUa3K^`bFNGNsEk1mfl+u>vR zCct-63dxktwnI6Eu~|$<0;|DyAY1Iv(*;AgS&uUqT(<*QVADr76T{Yl^HVa3>*UL* zx;;|&?3M{4Fa!7kiPXBsy%o>vW=s2CstABU zVB|I5+*B@lp&1T+^f6GE#N9>O@ZoU&9WC4vPcaa@-R1p zG{vZwYu`R?T;Q%m+faLQV$5C1sd}9Z9inOr#N}+IJv6xK5CgXdQ*sTzGb8FPv0TO_ z?QT~~1P%GDA2kL_(3V-uZdXszZTYO>g`?&c>(z{m93gNP3EB4k{Y4~$P)ihOT^xuk z8;4MR!)Q$Rv{=Ufys;PnzjJvSCpkN3b$>*}Wgw9Lls)kf&G8vJbHWiQ;M&892s+4pBJSXEWI$3!*~ir*0VHEH!J zja%wj_T&ybUy6Lhn-TnvjjlB79+oo)UK--Oz2ii8hQl$sl9$ZEQ~W3CsV_Af6VTuM zKM8`tBAM4{?~ILK)2UaHGcynCb9>(lLdOfbQU!ef=)#veAiyLM4HS?p;4}jyIYD5w z_k=I!6Rq~Wy!3yUYemccgj4(b`3(YN&=HUN7a;>l3+km4gog$Vh zQ%ro{{eC@CFC$n(jispbIfk-5Oqt`x2TOT}zqy4#n`Y~#*ajUj0$JqL$2*+n_^sj- z`Ohgf&{1CH^<30Q2z};gDzA&yg$om6AiXm=Iac&_TERRFCksbv`{ao7`BhYz=Q%Bh z#e)WiQt^f7sUnO&jcB&?iG7-T{JW##0mp55xNIhuCx$SZHOsok0Ns8Q$-yibaA1I| zVrnxc2YU`bkXfhs=2Sh*q70km!XWjKO(x1!@kP_pi!e{QgGl>rtI9xy&+lg4S8E9y zD|!xLMBs7&9=Jom$V?m2tZhoGY5b%7lK-3vZivIo-7d+_l@t$g>29LFf7iJ@R(MbX zda;1JW23pkX1)GlK<>f&T|(&Hx)fsxhC*$qM4|AiUHZM0LXx#CE~n!j$D z2Iblzn0qX&akGHyLe@AwBCJlZN+pwj{&C( ztXBkeR|>Ee!Cc+ien_BcZy(uiF1Dw&&nSrkbsKc`s<$Tql{ES~QBr;&#imLOkO!BP zK##OQvpyQAGR5~5AIfe1(}fgOjVLIf1wa+g4$86a1$nr^KNQ(GfS8epDPFkt$H|gT zX}$Fg;Q0-hfbV_K)c8>`GRAlV|7rv{8_Q(fJB2cSj}Q*}^Ez(g(`3K&y@VutwXdA$ zs8Qlt9KD0Y;?~b}hF`r^sJPo)Zjrm$zN!{~()3{Tu^i}PrtqLLV9JiqU*c9hyTJ(K zTfq0qdtG1Ef|T;+C@JCxqHY+?)-4~S?w7K;2I64XcdEri@=Xi*Iqvt(ivmMsBEh6q z2V*dX7QOIE;&#$2S7TaT*R6ijs+2SYICUVUSn#VEHlPy&f4F#`=a?~T!%;sspm!<* zhzTH%`d*%N_VrNR*2q(n14qgZx6{bqplq>$P91<72234R(CG|zIZ(ZCFV;Q^#s8qP zqtgSyvADz9{f~Z{cvfvJhqFm-H!A?MRnlP`!*2y<>gr3_pBvStP z>{0g+zEsKB$|Zf{8I{J46B9WZH1h`q*LW3grtg-|v}edQ$MW7O8n`}LlaXZ}L?FJK zR2)xr84~Vj5;j zDbPQBUZ~zOzJ}ZRw>pUe1MTq|04q#2*>T*1f}vCVLk+6fMvQD^EEwfe?otsBXrxHO z*mty3=$?GaO}{R#4xYCB#WDWgOSw#L%U@7*qTx5k=#TP}BgVw$M>6gYL{chtO`{LD0GRsXy}mYImhBFcqt>-*q9J7hiQC_T7p zMpd9mWLjPPzOSm1_cuCF3y$q>+^Yq@27XAIg6Hg^57y`+0&a7-Xo!jvU^nahTJ9$I z0Oz6bkL!!RaR#VagaWDRyb|^MT2`2JE=zd?xr&G?7vI8vxSpdLt7`M6+nT6BEBVU8 z+`_PcW{i3lL8WkxE@NyL06PKN(Kv`%&jKjY5)#~g1SGqE_uMWk6?~8PP9N+KTD=9oZ9`c1i z2l27eRgRcvA9#G8^>{lNcB*Awd;imkowbyxqAstFnK*sJxG3h3U!!EEpSBM2XCs+e zZjuD^eOw4e3xST20>~_uwCWYtL7y7m-F=9_BeA;L5(Y>AA6kq$+Jj1tdH3>3at+TWQuEjFISCDYKa4+zpv}-hSazJ zH@@B+TbvR&ZJOV#qlD6PBE36%ayfX&WPmbPfnjuTQvBIV_8|lS+J9|Ncli4`=Y9F| z7Z%QEnB{Uds^`l)TVe+dB7U3r2g+H(*BeG@5;&=*!nlH1qOi8+Lu}`zaQ>V79;Gj9nNDBqmXElLTI3O zYZoh2&!-X9Mn75(sAVI4qLDY;2;sVY!C7>fKXoD7A;T-EpuhoVd&v{!ki4X?(C8wd-Of{q-4tsQ3C60N~8-kAKaEb*)QdIE=T3om^9@Isf( zy;%)L-MuV7^iv%>*&f);L2FytZNkFpu=9%XMA<;|bFtto54aFsQAo9|<{ZEiKVh5$ zesq9HMw)BVmvdw#hpKv13z%>1e=dNKl^*ez1rjt4G8g1ppSi}y3e$ORZ+U|gvMl|9 zGUAF7=z&T;y*fS~UMRzQ_U+F+0#hYP4lRqzw80eipd^9A3Z(_g0ZE#J0EnBgnRSRw z%AmwQll)(W2ZQsv(^lZ+UsPBhc@N+&9BMX&so#yI(e`x$?z4e z&n=`kdapL|u#>7Yk$nEWf{CXtO3t4cG@5$Xoxzb%TlSOhzc>mTDFvo2#wahnR9mVE z(WKW?dczzyny@=MAC@l4G ze=2P7J-+41*v;i}f3$x??U0YPWC#excyE{x$C>Ug@6Q8#SCS?p@I*E}f#pZj>F8_! zQi^M8@kjkaQs!!06G{3%FOuY|`>PdKFJ_0p$HT`k{ry}9^L^{Gm+Y=X!S*aHMez(= z*6e@AS7ivHwhvCzA^C1-pMn342vR7rg_|n;A6f6$8J#Yr^A##k05_+I`CO!7)T56j z14Qfc)FqS)(akv_EE8ZBmJHsOK@$THs4f_ggXY$f(w|;+%G(~Mv}m$+#VuU#8(7I(^kSK;3v>V-kMaN!;Hli)D%*i}wxxRB#A`#8x9jTYenRKhJe5 zHXq^3?6(xYAFPDE-a?hGr^LIm#&PEz*U!OP!o9al*$k*Td@8@ZM3o0K6VOYa`QR_I z)$NT8pnz=#Acji_8CxkIw>=}>d8M@pI8<`#t!e{Lor^@_a~R}zHlch+sE7OAq$;+3 zq-lOTsx5V>2Y6bXMM9mb@HUGTA&k0`L(FT&hK7i4AqT!I~eo)gsj2(Fj-qLGm&Xesgq${s$j!Ii z1^yV5`%seW!!6tBLe2M{0!^3nMS&fL`bHXrcDQt(#u<)OD{Xx3QnoTOzD?B+aI%v! z{dl%Dq0gRx@S)B+`J$O#f9wH%X+DSLr=HNA>jrZ)7QE}WXrh@?oEsve zPllzR#?TGvG+KI*o1JIZqgE)_WHa>Xf(7`DF}d1Y*5WnQ$4+9Trnb>$ge1SoH)vPAg z)%EkImfl8-PcY%>TeXnrc4#QVAP-MsP;N{j!i3*1p*GX8UpkE6wZ9MSdR*01>yr=g z@Fn6(#h2>K5aI^qlfTo|C2c1r`Zb7vDAv=vvXmJFA>?&QRAVzeJT8H{m~Jq-2lJ6@ zey{;*-UZyT&bPKNJ}Qw`3DV%*ejwj&a{Y&QyF?j#z#c4b`^mDV#MGJ(({mcnQQ zA`6bUt3w+w?5@-rl#LfFKWS8w(0K=G5+PwOFGpb9I9b8N2~S7}(8|x0IooBm811ap zH2JKqL{*_rr}RXnt+;V5<7h}OKPa*C&$T}**}Uhor|jDt1{jKlaTO17Z_+Fq?J5hN z^xUBlCFHslNpGPHl1p}F(bcq`xD|go)QfG@Qcbyd%*xQ6;9L>`Wh50szW15P50h)G zwYm3ZJ*bx32ggc;)S({b zVBaSqkapv98lq zty4Oc2yHtU`NZxv8uqvHOnKclI$WR@j_oke_aolL>~rHYO;bwR$#ly`M~C-|D})>4 z;5Mvpuqu#%_i`~keDl&|=$9IB=Dxq*HXAx%w7?_Zo0U%`W}*;+*^V-+NPx7Jb(I9B zicLKczeAvb*@X3doth|GoxM0Jm?`t0^l(ek2a=-H)}p|>9e(%N6Lt2O|0sd`fD-uo zlsUc9$12wk4-q_Xe;OFda}-pS^X7=ZR7wa5iizBYfjh>y#Z+)^j@EXR`BZ6=rrTLg zLQ(-c4Y>2F)+}+@+8rhCkQNGZl6X8;PkUBBCf(hCwBe3v9HORowqpP$orzN7pDyCr zG#V1S15dWVuR9Xj^k-!VT_n~c5ovvSFRU6|-In+K?Z?wmh}M=$dbj|W4eRxmi^JYY zt`ai)UI1rvzFX1!kexN02kD}nVeicug$e5rRk0Pf-4FCvcO1=r+FPWRNJ~Sn?C}fFi!)MVSdz9 z5H2&gZbFlI#OlWj#lgmZoFL*s>Rj>ifl!5+Om}9`eHoKbDs0mk%;%t}P-7F!SEPD9 z=s>x;z9lMj(+3Jk2^oEKy;{Y-^JDRmCI^D>ekvBT#Rs6uyc2O`r;BHGIGwR@ z1orEoC|qNlF`UMfWJN^0^7Oq2%wd@$r+w1KCu+ecne;^hR`0SW(#MR4Hjg)Y4xIlK zY$2NyvzBD5R4IM1h*&NKp7SF~CkIHx!L|0zMns%1o8jXGM(|>IG=cuaU(R-UWuDsV z6Xfb(5sChZ)ycp9@8ogCS@E&^RGlX0!QV-)l8bY!+a`vrFyQ9Q?e4IHGF@I`YqYba z^#?wqdH&Y=rBYB_&-#EY#ZO94A4Sf$W>4xVX~w2z_+UTWe#6#hMHG18`d-{`;o;Le zj^J3to`4CCMZj8STB*mL0Mds*+71{}kL4?T+)Eeawq1HV!943>+e1ceZ;XlUXylo%)p6VNU_t1tBYYy`@*d07huMD+g9Ic zwy^g4kFV`Ss&mX8_xmFiEB#9Sk;y@0Knlbu)q77FQm48jZQ8=Wy#P%Vaj@>iuVMb)K$tA`X=V!NV| zS~irZRXSJf0YGiHgV3LI*4*yCYGyOKa#;C>U!d=g)DLzjN=ZFjL>3S2{U%Pu%@lFZS4fOmJD6GD8rr%^ z%UR3FdOZkdv|e}zG13{2L!gPU{)hbk3$t2>h3C?z?&S2M=UwbUXc-^}bt5Ux{k%#j zd98*PSY?^B+sLWLp#ZO-d}Fuypk(K}+0ETrOv7#GmH3no!kKs1k|~~`Gafhypq&LP zCJ6+IAhIVs6&lN9)`3Z28CL*Uj6q_5A(D@xIM%=CJxnGWR@w_$uz^)P@Ew(;I2yJ^ zuMvDfnN?xE>H`a+ zONody<6SLc50b;6&!yS@H2hYP$ArcwiHFW=9I~F->U89?Z%AZmglZeNNyK%Yy)@A6 z+LEB8ZnY-0q;$)A%2M`wRj-*gcf1-{FnaDW;9_E3_ie{})c`;)E>=gGftiQw6J#Br zG5=S7>3c~bV`0Mh{5c`pcaZXEL+{~wD*L3Kn!UQ1)K)S^Kqe>2wYZKK(BCHkB0mt(NC z*Pe4c2ayfdOq}3hGM9j30ODl0BwTlivcAts>Y2KL00Q`FE376^ z03QHS=dk(c-Sq)>-jMj(77({?j^slfP!$9&q6hU#HEc?!wp9w1X#K(E=Zjkp(=sp6 zN0{&&GA>mu1j>dyj(maUI}Y!F=I25I%|f^{($RsHljfNu%QLm$X<;4rsdsrSw0>9* z*00hpZ{%-0;VRVdD}cd9?=w942>#<)+uDuz_*OQ9V(25N)tmyQr+CvNk3e{P9d|_f z^{mbFxeIAklB0*$>%w8W9js^{Jy9-OC-kEnk{)|HtE=*R2%xPzNR;tKV+mxZ%{~`$ zKG*E*4=^I3*2Yr(%Kgnn48QL2tues`R!ITpZ>Gw@akd9K12F?2KmY)+6oxt=b4LHu zg*^2Jw5mILJTqj9uS|e|zuaSJSR34r=~+6}hHJnD;Hh5wMHJ8JB)^E9P1O2{P5D$%5k=YGf@|4Ud7Q6o|7E}b26rIHcOs?!(Ib|<`g~58 zo+^owhereE4lP&H)OP)k6D7p1i0c2uq`LQ_02wfiDl5K}zjN~$J3xE@T%aIqJFVd8 zR{|XkO+adD`qoDYxxf7=sY^iQlx)?0MMetOa@Y&8DG$BtC$nCAz%zsqN+nQUC;q+S zD&e~VtIKE{jw?2mDJ0HWkz?}N{%*i9e@o!}}Ix}-Q zV}g~mMKFKWj`g@#60enKE(KRT=6=Fd-A z;m1a=azz8Fv%q}iPp_$l+Q$=%cXJI{+bWB`Fml1nll0LgBwRkKU^vcFwUmSG9DU!~ zh~iv)=vmqsp488iG23%{pR(lm+-nWI{Lu+yia`%5P?}x`?U<8J{e{ZZC#%N|U!d_$ z^J0vjMyaU8gwxw#3lZ>d-0e67w0#mMSf?vKdGo7f8V!q;o zvd6#(YF~t^SS)1>v_HL@C`xy36lQZ5A%!Fs)-(UM4TW5IE*N4@3C`>31q&jt-530C zWm%0_p3E%;Js{phe;-6odBRc$&uu!P>(zR4Hn&P`_0n(uQ%H~h1ARu{8qs4Ea_?U&iN_*A!j@q4F6LY73fG@lZD3I`*5E5Xt`wa~cSp|I!IPY1h-12oFWP;| z@fAs@xMg4hFQfGx2x}-W1l9@wk_909qvz^cU|`gb);NoNezmA9=;l(3DoNy{;3kJs zK-Qv$TiD9hZx}7u`_C^;P7C^UamB@W>>b+Ez=RAhqa31i%V&a+l}6{lSp-pp#1rkF z_joTpL`_zm-)0a){I#`OqRr)>)a!2N;%?UpuQFbdWG-ln!AZ5U-G9__M-~?|0BE=_ z@cH!PgBk-5^$3)$RFDNLnZ)M{q>#4T=VHV!1_xfyN;M{e zeXNaP7f`V5Bx-E^SLkKjDJ%@w9R`~$2zHzIv_PI^>;0yH%LC>1_O?G9X(I3EBb5Z$ z7bWFug0SYB9jktagucK&({{`9`}yeK?}mnNFyRJXu4Dt44OYkVjs#LF$mrOcxzFWh z>^4!yMDwDHPI_EzZS}sMs(O!gfSxGx@eHVo`;4X{g&h-aRGijQ z(VCc!o|`0G=(!hBLh9&!&ZW$PZ4_QZPVF(qVYMntwg`;HAE=yKg%5wv{D{3H!!BK(Iv2^O z&(ID<1?F*kvEJ_>>aEn}?Eu{KXH-H4KRj1nnGvV7gSE9k(>@qR_7|Al>0R(*Tkw)i*LQO)j*ktGv+21r<9O`lL7o$%rFFMD zp5J66KrZdzz&oN^2T>G5B&9~Qb0l@+he_U`Ca^d!Ya5@r za-2#9Qf}E_eEh4wkY8l3gD^g5B&*8!XE< zM%u-j?}t#IGZ{R4%InA(ML|kxN&&yTwz=;F*SSK6Vi`!l^bup7;-B9T$L7PTVKoYCBSzGNfWj_#E&D{GQ4))kcMR2 z7y?dNetmR0d35MMQ51`X#|D=eW&z~+?L;LA;{XR-r9CCY@mFj*!mE03WYHZt+-s08 zA!H7FR-d&)-r4romLU|A?+LOW*qsjA+Ecmb(^MLRupD#J0`>R1SFh0T)O_H(H0YN2 z;>M^H#%AS{A8waf9bDO4i(S24js5Ko6)5k{l?jjVl2`hgE!T^&9ZQY&C?HE=M*FRz zr5YdPK6_I}L=7~*ZGk%&ak@}P9C*#Q7Pvq=dzlaB_??d6HHH0>vgIAWhv)nE*R-pT zF4bqw*RI-E)~F+p*Vfg~2Ua|;%Qncpl1hR&=B9pkvl*4*88DG>CH_Vw={N*WZ{TM7 z;u{dDCJf=@* zal|s8HbHZgmq%U9&rhp|iOW=x1ylTCuTR1pccmA7!}P>zV+RhS&>FZbX;2?)uC=7~ z^cRdBe%ja#7F+C7p4mTH*>&fw^<+t#v{O0lR2^ zfU|W9gAVY60qgAtUZIC_-!wk>&O7F)g8^Q|ju3)>3Z=e;QV~ys>h=Y@5z$EC?_@Jy z=s@_LR>J>5gCbK#(1g6NFCu{>jn}M61V!FA~{kl zP#ZiHGRPV-8@i~uSswC+sf99NZ)5Ruhqinn9o-v|gh{|}KSiJ3exW$?=<7cl*XeiE zB(~~*^QqkqLuK?tVw{gD=9o;pq_@}=LMJNf%tstmZnmuIJb)8@ocQ~ep_=P%4SxJE zyG!2!p_4C3WbHr!pGWrEMp2=*7B^3UX+Onl?)`+DX{q7+SKu>%R6zbxcAylyo8oHy zsa>zw7RxN%^E$Xu4vx%)P(FLWh>8x6omvc6fmLBiJR4t*ObC znHfX1VVl*%b$oF|rf#rI_+c|G*SD@unI;i=_L~|(QD*%hR*QIxgZyC7_asMgB&?}% z>;Cx_rhf_Mel)9*=o3PjY zh>~Ym5eCLuaff%V(VxR2Qa3WRTB*cTJe!#eS_^Dyi&C>vq7$!f$@34iUd!(GI+p1n zAzO$Zm8P)iZ>+~>bgEB&?8R}n4s93`|HfyP269IfMT%rrEB`m$`{l4NM@`7o#U}25 zHZs>}*RQ`BxV2_2J{3tBxs6{seIQ`%xX zU6MZJvGk0<d)>-vM+u=>-u*I&Aao3DA$?GYW0ulRppfYwfXiM*3YENT5 zU24C-!?~0DPC#cn;3wb6sn$VbqHu6CGX4ouDW2--%!@gfa$Zu)*0xm|>>6v*u$UAp z#cfB=CjU3{gy|C=4jY7W5y!`Zd^dXh?=$c}R_6VvJ(Ag<*4|BbWz&|Vq!A4@#3Cm4 z>o8v)x!W-sBnwHL@wn(C6?tqW4qi;R=WTULFYW4EHe;;L$>xgI0U6H6@cA);8}59o z^)I)~f!zOJ28{g-|lRtKXyt>jA!FwsrY&rXO-SvOkgi}X00V(c3X)hrn7YQH_ z``Oa}Xn7tcQtk3Pf4xC{{C9K>wYF=_s8T$T=;G39iGagPB6b}Y*t4e7WJE@Gefn*D zy!*npBMKlEPF+^s^Y2<%tlnWZ*S>I8eg%jNN~B?T4UcMi@VGQC?&clp&HZsODo%YK zaG&-=N#{citv)^+ZM}Z9eRiHS!2;u-NCxHRvPYCj+0P~i$yG}N;Hi0!`D(m9TyUou;Cg#6yx&w} z>2Fjftt&fk*{0^XXMQ*?((>KimZaaTR+)?}`fBT&nf=Z<(NkOJ~23FWrZ*!u=F^z=D zg``kv%l3{n*@F^1`NQ*FXvU+fyKR_5tb)N+%fa5UW83L)(yNy!FF(J0I;{8V`6TQc zQE_0?wR@>>8wBl#h#V66&w71=DmNoX_!)G&zn{<$19!59f5L&@G5IP^cQKLIZ7um~ zb|7c%x{4>+(}S!wy?)uz#q>48miWb4J+)Lxbc}c9)b`41T`vJKWv}!L{0Jgv5)Y%-)5^X?`r4K1nd?C?^eRn`54&O|Q~s!ongE zZP|_#O=W-_LH$h8QCI3&G4@1(Ch)4W>x;rx2)SS24a7I!8uoTBBB3g3M3uSi@v&RZ zb<-`2B(ZUDaHM#92dYmh7=-4`uRO1@x5XeOea6N9QG}j&>JC%gV&*PuU_i&6l=wz7 z?6~EM=hNGh!-HaGEd<~_b9;0iop2WcpBxTfj z4}IZ<22wZBq8Y*B!}5MS2A@XG2-8uG@Eu50+W3O1FY#K0Xp7UvOorv-^?NRr)_r^O zXnwob&mE9peQD?Jum^5M&!V)DwbmWWYaP7)8ZW#sc0EtuTzNm#PW2`^BJ^xDYhw>B zkl^tlZLM~;?mxM}L8c_xsyYl)tvkK0(_$xe&3s6>YK zEp3RZ5U;<|aD*NikkZS=Owq9yP-5U)C6T%4qDEyMjwHfNmp5d%FIKxvXyJFroAnV% zrOOy+;pf@w>Lg{4vuqb;yh`J#Z#h4Lum-}!(x_mMXT`g*4vf!OsoRRaDt>_%{0?a_ z!GU+=@rReFm{*u6M%OgRZabDlxvR)(m>ZlWKRP@l;`OCTSIU3*XcU{!{N>xpcivvA zhn36-x2{G6{?*!BT}LvSB08zdg7j>KU%l9q(o|Wzl&+H(whh!Higt)vSHyObx+V;0 zmMo(GpIgZVOm3*x9)5{IKb9+*xbB$^6 z&q|-2n!lF0#ULA0?7xkqX&SQG+UqhP%k^S7!CdL?-JC1~eJzgNNNNLkEAX{YK~yPa zl50<^ursI>0^?nrgk^#z4zDXZWI5s6inbTlkvp^w>x)swzUDM8Y+nbu6&c}-_3z`^XPSPJ)}RGatIeJOt8v#N27o6tScl%Es8+{7IJaShhAyDkbYXcFi;J)gUU- zhH?N#UvRgAklLKj)YM!l?39o}5qG=G_=rJ>6R+O%VIVQTaBD2t3C-x?HDmWen{y94pTA?zm%Swd5?{wm^X77%;4ERy!=}FqD=vS4YDU$%{AxIp>Ai;`NP>@hh~rw%NIV2MY!cAC}_8fj>mrcsLnM z#!LpdW$Wc$ue)ErJZgEteEM@i{je^Qw&_{*HP&CsY=ziCR<&g++k{-G_{9%Hcr%g>{M_{9r9m}nw|xX@nGoKpKpQTXNS;6XWW(H&= z1bqD0wP)J0=!~B8yhPESFH;YT|8@BnR%*6-%wRdSxOz4EPpPeUSsqPc%5$^baDJkc z94Gj5!Rj9TgDxXCmz$)W^Q%qZcE&!Va>II_YP6lRWL|1)M~it{i`R^2uZtUMqd56FsWW{aU=udIlYyi z_VF>kLIK4ff`8*>bjVn9R^{#+^5O|TNfORoSp|QE<|LoGLJqCsj#PqWw$a!^WHg_D}C}ys!Zzj&Cs0hwZV?!QY+rKUKh_~k2eJG$ibbek- zm3r?F|0xzR`&*oj@C-3dbOhn~P0x;B1z+`8b%(!l)bg@mz7(%chrbg&uMY2236sAA zfy~jlZxZZj`O>V%L7%`dOYr%H%k5onFY8dg(ewAH%E=Xu6yU)T#3c^e291q5OAhJT z$kjX{il1)N=N~?PtoN?D$#Pk7b4w|TGu<oH?6Ez3lGFN**DIke)gXbSfgK zl?(T%MbD)rdVqw@BMs{4c~HmQkmfTj25R+c^Qh~kBZ*5e6Z=+i6)TlOV7tx*r?LoF z&388gJ2q=^NM-UwiiVjt`f>_m=%hkYC2Bj{uoU5hXrFNRLV@aFv##$1dL}UD!DaT9 zFO#NJ_w#qNrv&c>bpHwnlX{nR~M?Vq7Me$EVo_R%yx9+z?Huf;d z7Cu!|Q)we8_~5(8RCKJzutQYX6|~UIcvbv(M?P0sLBJ;G!=Ym8&T?2?Cf>LOa<^=f zjiaOVs8}~~$!WM^y<%Rs6VUm#8rntLn%Ns16|TY0h;Rg1&FR#U4tEKZBh_#6hH{+u zn;r)fQ7A^Sy?HG=uo}arx3NG;TuoD)b17Vgl@9;x`?0zTjBVj$A^a4 z2hk?n-QvUcg%bJ~;x?|l7h>$ENbDetk{56tC&uAI+olSf3H z0d^*ckkrEo9d65?Ivb=$f)m&?XxPXcL|OZ-5weFor2F#AXHKY(WVmqO-oa79fxbr zfsAck$33^l*+0=SpBX-X2g$liMrlb_yTj$8bEQvSiZcOa=KO93Ek(6Z_9Dsal>%%S zqxcPzv8zH!XNk0bRwdpX35TMH5bvix$7AfGz~N8si%HPex73O+icb3b!B1o|8EiA# zxQ{^D6pGUD)cmSGi>$wBhwg>BjJ(V~+lDpi$r@`TB%5C*KB2zxzH( zwT+o&(TskWf{r@}nw##h)q$Wsm!o>u4nkfzf9B=oRxyk?f6UZ8 z6HO%sU1g#QO_yZc?)vMhI@xf?(%4 z9!K6NYyQR%97WzV?8IiUwQi90=jq9LsrDx>s@x%vH}Ah1-!^ZUj;3=o5R+IwKSAU? zy1kl|T0PS^y{&lR*<3BU`mtI*-eIf$8Gv(_=5OaH;6IwX^%G$eUe+C3VQ9V{-YFH& ze4}0=f%`AhS_3JFLvXA9CG5u;!a}#G?&Z@7RXkA8qMdPea(Sd zVs5y&#!yKPzR7|52*Tc;8ufd{^~~Q+as=RVusfAxf0!M6_*LaJ0oxqU2eyWnG6V)dH*-);g#|3JbK{=x4VjVcM&JV$HIi9$wa<|G* z@hh&BC<%08&a{EwG;lzfj!=NW*!5C(YHedi7pCgH>HBHhh+6x3@39HG3z3t)1hoik zyk5GPp1~APT>#<5ya#k>>(Xw`_wM`AmfmV4`h}JQ1zijS+3{|DR2rAmSUT)$_^A3# z`PMvVHz@~TXl1#b?RGD!0lQS{$)I41;$|Y}goi&<{zGr>+xTE9{a-4PG9R1^4&NBP z>Fgq>T4w#xH*++=?PX)!v^?@{uJ||`Hre03nI4_fePEL|wG%oL0JDSwu6_Ab$&@(| zo)jq20Csm!$LA^YteTqGPVbtB=onfH2%lKOJz4=t6ZW1&cz6L?#iH3G zX=A2CoAv;~2a*v@EAcDULc}u#GvyId<#SCtE}ELAnmo9HH~xh1XhmL|YHe)|8*3q( z(DCD+eM}Fu#pkVPy-%jq`+54^QW@NqgHPw%a%TteC7Td?%g3H`jOW7V*EYt)X^Dy) zqpLUC2{^pAPmexJy5AY^N!pJiC!aQNW_${^qR@4IOIYlb05**orrR4*`3j6njLx|S z7kS8eEg+vpXlG^fc@8EzyA`G968W)I{NP4fz(bIq$C)`D=(zK0XXNhbNy?MKFPf7L zf(8@c`1saOwNf2DD#-fd%<3T_TJLR2|B@(OpJM~r&b2f8QPVchfdT!2-u1U_9N}LE zKb-wXleGE%zO{a>%Xt6sLp(-IwUP*7kd5Zo6`Gc_{Kfom;$oWl{Q53sIlBfv^T55O zDhKKdHR5&_x$~PL;v3#jB*md1w3W*F5Qm@)!767FeBrQ^2Q5a167K6&(JT3H8OCQd zY&~zrEPBtnX}-nX@~ahTDUx+5LnPF=gzFt`+&g87Jzo66jlz6f&cwRqMcioh_a%XqPKdhHGCw2t z4KC*TI5_cvnG+4!`r00EDF1vj-tLQn;J`1PJp9;4;j{{KrmV`NP1J36nVXvdb(tB} zS(ZYK_eZm8MjZ(zZrm5#Yp+M;k-K1QOS7xjrvwL1^X)%qTz_1*1>rzDYCAx}sFRy} z(>1D=Owel2zUf4Cq_Q@hlTXpK3Fi!KYaOqm}i z)&IO$yv`bq#SHDp41?>2H2seoYKUy4+2spGPjsMi4^JcbiXRkQfr7IBcCACTTedNs zfH?RJuTPF`JS}u|5melHgPq6`znrSiAqlcq&e$ids1>kMB9D=*?l)}Gc+Z4KIPHuN zcQ~9K>}e&HV-)D#Q^5(r^s??}rT4HeU;z-?QqwEgP*NqWB%uKbJ9Ya%)zxP1)C*%E z_+x1F!Eigb6Zg}s+^KHkv71KJSD$sS){|$5zJ3uK8A2__%K$Ap$rpJO$4~rd0)%*# zD)dn|NBF5M+cXnhu868tc84T+DHY zIXn_XjFcO~nkuJ@<@PQ?AeWz#@jE8#QDSjd2}rbGCoYbuMlB(7)rNsdEkQ?aAl1==LY z%X$+~65j`|GuF%hi>|K#%Cc*_r355J=@cZSr5ltKknTp1hKFthm2M?OT0pwHn}>Li z?(XhxIGgu-zkkk|`Ol2Zz#y>id+&Yay4EUdDji=goA_74At>Rn3(L(W%VD&9g0AI3 zD4n!W*F{*|zb%4wu}mG7W1fR{z#nlmM-4VVmXdOt^d$Jjr~Y`cH*e%j^1UJ_cEowdR3MYy)re;&0 z5(`Z;v*Zmp2FQ=n7X=RwBjxp(GFLxo1&Y$M!N43X3K6&k z;X=|gCPBjE(8Z1Bm>|I#^0F(#hTA3bvduAH}XDze%i1_Tk-KD zWMwSPb{NUV_yVS7zyG-N$;+h(KHJ=xrc362CBV`J>#k&`-W8 zvNKX}c{aui8M||^s?b4mR5^viDZO7YcUI>~Ow}_~+i7Hq=C^V2CbS=Z{RZnFHmsJ$ z)>QJ82$Ztx3K|L{=R{&HmR*2rsQN6jVIl#acP24e&FTNDFD;XNz7MY6A^j1B(s5O5 z+){hy^rYrN<suw?ye7TdtK51X}_u@E^QZF>< zO@g9dnyj&hgh{}f)3uqn8A+;a)@BU6mv>Tq7OT1PlZ$JO<_xT-wbZZe*(twD1b1x{jy|9IIx#h^BT~n}Qr1~?{_>o7 zukO@})ZHcHDc$J@-Sxwvs`^Skd3E~|itr@r>*GZi@6)O$nZJie3xmBM6Ax?)%8$+X z9*b>zw|IH&uS3d=>Q_R>p?qt@z+lUz&nm^(E~;zyb5u0~v+nEaMrs#?du8O#VGVca z%X@?>^4G;r-no0<9!ae|W2i(yQ#osI{6)i@+XeWf+ggr^>ox;}^* zD43M5Uw#?#4R~Ymw-006vGAl8=i|q)hI`~=baW#7JHrM{$WF6tyy2jx=H^x|Zr?h^_mBVm(ER)9DpBZDm= z6h0^-2=3(;h?Yapdj9rhNfa2z2JJQoRuD7)pvjlx4i4#5)%}nrOJi03ixB+OeN&Sg z3C3#7Y3i6oB`#4o7%%*-W>u3mP+4Iy-geb+Jc;6#am|9lUT9d6cFj*vbdF3B>j1)8 za0U6S7Lc-18%ZT9y=pN68kNu8=Ex5(0@eX?`MhC4A&%NL}R+tRmG2b zR8}8Ou%{e1lt?Ky@D*-WUo6wkuFZzH9JquAHXkB1Y3Ofp(-PByd<#)MRQ5@MI-&~@ zDO@|Z_1ezOjsbVmQtC;;cqYR5nlor-XmeXhI=LaqK3pf;`dkI`g%UW>3hD9frT4&k zKJFhcTrOu%dEKutaa+%9hiy6YQxWs5s~QPrj|{)rY(&LELuj#Pi%yB{aYbK0zKSfr zwCE|u#l`zr?Ro!|eBS3JnT`N)v8&Alj<0-I+k(bnUjxMzxX{OFq))fd!cPMs`Z+8j z{@XA&UVV&=yO{ES&QZUX7lYsl4Ry-K+Eh(biABQ7jvr^uo*wUF3N{XAl7%JOxh`Yf z5T$w1cPnir=T5P}?j(*b8>ZddZ1Ym5r-cvtvNOp7hX)G_%=4E=UFQ5&S&k=7y3-h_ zk6lje*Nld;FSr;gH|9KNY^v!4QG?Az6*JzCg7S;5fksehD?s*h@-bA)K_e`cLdTYO z?X__VwZ)2y;vBnz*k(EQ)u$`1q5Zx$F(WJqWGLs)hj_zKAdS%|wux^SQW|HM3lq8M zgy<5*x*~@J`H{n?kIJ)0w#{)#+vbcrPZn)nxE)qK{Xp!<@$McTGH}shIOsSRJMre@ zs^)&<75BG0L|(VuAcyIxQ-xgP%i1{uq0`!Z~Rxn`!aS38Q)CyGmfN zyxOhw$NLheDjha4^xDLH1UZ$l$PSY5b|5qjeTff*mCx|h_xL@BNet!GEirEwe{Bmzd%>gOXfM_{e392fGh7tjw(Y*e#y)%qp0id zLf3Jn6-*A}=S9Pb+Wb!4xivF);s9X1e$5MHQYhFR%YuVL93oJzZj39Zc#~6*Y0#aR zb#AiD4Xh=`t*2W$$!22O<<%2o@pkT)yJ$kYFI0)@XHW&g-!v2`8}{~-Fe=wLIx*qjfZHIGmF9Ip1E(vM3 z!TkihhBDN+nwSdy!VK(u`9R71cZJFs>bXNZPW^mez3)53#mfxG*iD&!buEjie`H|E z`rP|dvp~J+Zp{pQ&|#5@NLHUG!#>Y%pZN$FZ-`yhWiBWbv&``(dju43+;=yu$W3icW_i7qmBjb<$9@X*azY^U= zqGID@Z`5s)>=XMUhRly=Q>YiPC`{55a8bQYD(NHc(j+wM6h;|1Et$2GGem)AI(SPA z5N~w#*atrQO1~$q`^CSdrc5Ht7p=2DOh$zcIh}UyK+R1wgV6ki?L>wi?qagaO^wXw zw2AQ3C3CbZNS3w=-5N`F3@+x$Wd7f(z?gc{YKgW@lBV~sW&=IUw>wLFwbx@!e`r!( z=1HKeIl(=Oy;s@nYlygOS>Ud1EK0mu$4q_t_O10Z9@g4ko0J@(JO?jb^UOV#|35to z6bcnb_M7 zp3hIFuoF@(i;h64>tsDQJ{36TEU*Cb_pJ)f4^Z0lwrzD~?Z-`A);+ak?a{FRYOa=( zNOJg?!r`>_BqYoA7dJZ;Jisj(&1iK%hZ-Rp35lL(sZ+552I9N;4%b_Ct%fp9|$7m+WWXQ!C;xP zb@vG^fTFWZScB*}=Jz@CKZYxk%Gus+xrtrTd@G*SZrvUA=KP-AecsnN#quUr{;2k) zF{zmX3it3$4G;akv(6jO5D@QB(N2w*lX;H!1+bSl81W3adbpJGh6J>q<*1qjt7iJ6 z6EFlOF{A!L_~&YXXg*q7s=ZP#CPOrd#4~{iPOX4^jycWz2I+Nn5b|r!R?+2yrogb` zrZ#=uF#f^2n#28RJP;YrcKX%)V$no6zxRZsOGlT_U{#po_vpO{cpV6JS3Li6c78_3 z%LKELHyO1I&p;_qZTS~%Z`m!o(6&er3;tf>H_E{7y$Tf1ub6jx;fGgc{thALaAoWL zG;5Xg5M%F&vsdd6Swp71rU?&qLBEGG=9WcuEeXKZ@oa7am1KuWi?{Kl!_N7_+cm`yTnX0Ce5-u`OQaw=h1N}HExj&Uz&eoXVFeajO8ebVEZGJ}iA@?r`)1HaH)wkK8?|9#r{wFvSrB~~8D}op%a!SnHh)n~fVpm& zW2Yj;o`qFraG58s;E!BA4CDT137mrQ?uJYOza(KnyarjM84jCKY)wDk>UYge%E!Yg zN^P^T_KD(cyBWm0Ug?7*yU0!bs$d4agv|c-CQRk|8`l0dm$swnCiI!bGkK=G-(?pC z=}Yh34&QwU#Cm>qf75Y$p#qGX_CNJaVphl9=}bJ^549S6yX#~02IZ~>V+w5>Mib4B zF=CJiQoav>P${;(5{!|&%P+hnkliA6)FsdJRSy#i$HiY?S)uSV_`boI3`nt}C-go0 zZ2p>7KK5!s<&&d+AP#p!z`Ip$@(|9W`8eMKnyK>GoGqUr%f_ysivYnWh&e#*Q!S=4z7&<1~^;C@p!AXKj1t9|Nq+(QRpQ+VWb+?W7kZa`Sn ziJuHuuVmnnGT!Bn0X&dW{upq^stjFZ^bE0i>PZJXiF-5EBUy4fYbA-YpOb$jCMvxB zlIk!i_m3f~qChNv_#!3ZTSM`3P=g3>gTX>p&_}rX8|_`9<6g`=Sc#b=NtQ)6n;?O| zP+yq468<*Uyt(HMqnUahXrX*q__KTmVVZ{jiYw%T2G@R~l#?+>FjuvqL+`OjU9}>4 z%kA8jVoXy!0vX+FJkpsXhnrcV8nq$Y>G&lq*UC$SbBDO(<}-){E}EKgt><|3^!|ugFr&hU3vO@?-#!z%})PdH!&=b9<-iX4# znga2yr`}6_RtaObS$mB_Y6B7$SxMwA1Me;F^|3TxY6FR6=XcLbH1x)__?&*~H}W$C zZvO#YY`|WNqhT-={fMTKegRqO9$ML*uUFv1`x0$`XU zrTaWjEkijs*m4LyR)!hf{@(%c{+T85APl955XT=SOSsKpNzf(=m!bQ0mQ#;1>?`S7 z+$@6E*&ct;#+L8AU#L=QSKJ{UiA>@hC?xP1y(4xK`=-d_ID2J2^<8Qn@8)&gjD1(0 zvl%7SAhlJ0IB*Q0_Pl*6q&z>LdD`S5a1oM+x9n6B!i_HRH;C8-oNIT5oFx&hz{uXWhjb8*zBSLdlN^vS~J#Q8{q2c3rw` z*70l%U1g&zPl>y1mR!1+LsP0W^xfAAN{9O(uGIFjQ-9T&@OQLMKGmNl_L+)U6>wXt z=a1Pi+Q@=_&RMt>VKPMu7FPT@W)FvP4qK9+y87Bpfr{CT=hTIuls!a{l zY7YhyjNPb)k1p9M8k)@`?#a9AnCp~UFDVWJ?(Y+P9*WRL(o#nW?>=e0$|_=*ZX{W` zzk?Wq;{yGmui>y;xAWd>^CMcdt!tM-G@8QkoFa2cF*>29$_b5Ea+u)`B zlG@s3%^=9&+o|r}l8{erq}0;toAo6~NJ<9i%*Fz;+lvLC=9ALXFNI_bJ9`qCdUPp~ zd8f|#FjA%F9yY~hngWp?k%M~)Zy9nSipGSZi&Ll2f$f_or4yVhbr;C>;c*N}UI-_% z7+wSweA$+2NlwcK) zUP?l(@UsP8KKUJHVO&JoLWB_G7230L=BSaG-UCD{H(rE;$V8cAwNc)acvaAw6{R4N zy<}v4aX(uQu|Mc#$r$p*ett4#R-t_osB?_>awj`g`j?l8)XnKy_t=CXze5MniRA>a zed+oPBC;A{?8A=0VBnDnaMCgN4OQrj7Ll6V(i2GFw#g$6h+4IdW6_HNy$PiQyadB_%U@^p>{JYn9-hz9JO3_C9*dp2 zty*@n)J1piryRVo8Rbj*HtKa8OFzpg$c9D(shfp3HSQFaigH^diQ`CqY9C zZ@4@rXy9M73hS&hILgB%^@3DiO;G2{BB&b=ew)+(_{7>>)so9H{O9cl4l@Ui_tOb) z?VF_e2=Dr(au}X!Vn72S-Pcmb!sC;ywUp0M(6AZLV{f=OwmHS6L3I;1gl^*4?WkIL3KB zSO^Ni5AT<1;K8(eU21}E?@_wL=iUKu?yoqFLWv-NjX37tcY)OYm|EWxEMlycOTld4A&Q1eFblJ%W?7X8N|4XfC`3L>gS)n zDA=I0AC;2Q54v3MJv{2zo%Bun5}=^d5!0TIj}Eo~NFJEB`8^f9T8*A{=w;S*?Bnta zoVYcj0mqEs?+pnznPNo9@ha`7a;IT0z#RI%x<})U%My}N(Ycqae}2o))=SQFXW5o* zN#vC?8O{KySF`JAX)*j~FXY3U{l(&(S2{{Q{)#3$P?6z!Fd?_$w)rzg(G1GqN^ML)zziBTdN-iNQp6(Z9M{+_ImGyskUaS1%OwXd9}8d5 zUbgOu)tI*`#_sU((YU>Eh3@QWR)~EYJjaAZ0hs zb9mDzbbnL7T;D*J(CanF;xhJw-|Jy_(QQvpZx9PZd}Ipc%ksgqHeguA*(li%&CF66Q~ar1sq%^YK`IZh>H7pHxTtOr*JFYv0GNj2P=FeP@T*1!Jt zY^T)Jv@SdzivnR6DV$iMgQPg)+|l3xI#_q+u2G9d344!lF8BoHiG6R4lJ#suZ0<-F zhw*r1g~eP{QL+I1_vGl%DQg#&@imX3M$ZA^-TO< z#5Pw?6XWzfr1_A($Yi!c$}>QgJI!R$qes~fh01VFmNR`flW@#jImeSju^laJEXh_a z^%%ukcPIKqV74SwNtE}tYv8Mi8rF4iI9VhY1HcNEWaApMzP_m~2etpxZu$oOP)Wyl zR=7Mv<^Y0pr6&UFEY;*JZ7HVjMtYC#v|Lv+8re`!8C07r0UWf1j72B6t&9&_^*4#v z)=ZMBl!piZ#hfdJO4f&^x}759-N{l?;axWrm)lGG{pJU6c3GBt0E>8}dMDw5P;+oZ z_e9g3oQ^1zTa3^`YS-{3!w7-s6gnIr?Eaf7lG^HvGSckhlO>GgzxkI>1+Ko`VS6_-NT>(|jrm*brCgguy%EHm!H+PFY}V>>U1j7i)9$mb_(>@#%! zMyID!Jt}9>$|rRL3i?Mul2?*pb~^AUhfD3V9y^7n2Meh{Mj~}JD&!kbuOjz4eE0nM zbGXTBTZ%~G>(_jIe7E4+VT5eo;yKLN9oIiRu63_^p}*8P0{c?@&Y`qFe7Ow<>j#1f z`1WV9Lhjq8fxkGHxFbk~`u7`eVn7oD45D=Q=@4-lX~u%%rXd#(7a6G>fu_KYmHFc{ zRWx~|o}6{+I?OJUs!k$Jz&h=6MwLme;YOAd17Nk-o34JY_(EPTHTA4AX>H?2-W|&( z^d-x7#7v-)i;&BGVQOLS@c&~u1QBo4v#|JIlRjr^TbOb6OuUFCH(xeswQS^#Gs4$C zhWnU2sYr6`nJPKH50G^tCuCdh#w%<(bv|{<-G@}l3c^usKDAK1n%}r0nQWMDs%IR{N)2f9{5?4me(Y*H5T*OA)CW07PLO81hDtWG!dCaf7#N#B`TW zy<+0Y;+Z59!-DKh{9_hsK9uecQbur8ANN&3=nNM~O?}v2SHK^7n>}*$mt(+gC7g%ddh*Tr!NNd=#mMD=&;81+H^ZX~$uDk#fdxtc zov@@8pfTBSIxPDkMKE4Ing3`b)u+s~k2vc?{5Sn#v$O3nu&Lo-MZX^P#lSTg0bYIM zM0N%n=ytX8x#X&vr1FTX@6;}kIc$&8H#9a5Sypa>9+NgRkfIEi^aX{)LUoW!B|X)S zl~H5tzc}kV-)M%en>xfe@-?&A-F?s znN-^EV=pgr98|TFdhyo2l&G=k7JE+OPJ<-!tCo+sQvOoJJ-*3%x018QvyKP%B%DxN zsU4NZ&sw_HoxfrRVD0c&Pn!&2GI$N52`RT=^HCemiF z=l4nxI@!G{`Uu>t0+`|IUCjr!OOwN5aHpOd$l92(2CJ1yY7vp>a`9jWVh_|c{*k4I z*2e$CAt#r>`7R^(vhPp6qjZbM=fRkCvz0^d=BLo|;k=|T%qFZQO1w*i*Y;_fkf+dF zM`URE$YJ~(TEiG&YM9vE6iR!0$b5v$o6q*hJL_IvO&0KVW!v1I5wUI3T}K`q$Z~X2 z%B7s}R#%J!&K2a&B&=o#l5&=}Xz zAr9wpzT5LisCC;E^DwSVr;xG=4(KwVfroZ=Z4^8phC&~g*8TfFR zuG)ReKm6E#E76QYO4rW^Vr^yuKwAde8$*f;ybeN5}IFdgW*{3 z9BWzKlU<@0KC_M+OP(5&jg>%Df7jC!jvDgRR_~WUU}AU$??$S2#r0sGmMRzuQXtTX zGs|e6Y25p|XNPlsTX%J$sZE4A2=Wtl4i0GeSoZnGB>>w> z)fsCUHJ-aph)0|?;>&6$aAcjIL_dz_1yLLBz)w);N*1?T=JNJ%nSb9<&``7NLeM9+ znfkbtFpHNiSR{=!(FH@YwzpqHHyR@>C-YPq^B^P`_~4jd<(#3hbf(S(FFiM;V7i+`PUMrkJS;Ppm4!(|Qr8F76bWwdh2CMeh!?NKcUk39ht)U@)I>#NmX# z#7xa~SJboR@84Y_`~4oTXKJH)s;MNG^89J0 zab_s39x4d*F0D!+E~+xx*tj1RUc2mcSq@Yyzd3&6UrpvL+n8b$ zn^Lsy(l>0s)xl5bkJy$0FFT}JpLa^EFtgvo=PhuMY&Bu4U21Am{{ck4LZO{0~f6Rz}4Xim4ar0%)$6@_v=Wn#{&*I#rjbC(O}!>sOBFH(y3uwnb@m z7j6#I2XQB-QLRE0znpx2&aZ0+Z?m_HUyZ%YqhiEo_dofOtB5Y zDos-w!<(=uK`>dNDrJH2PHRd{8(vFM(N4ed1!MZL`pTUPQ`UfdG)cYhH+!i;k>_6- z+A^1_dLo2Qb~}DToKUyz$lRG-qb3~az-M(sCN{`m_~ioybw)GYU%-?BfxLY%N4gTj z{y|ckyi8=_)=0FYzGbPO5}s9Gd~*R024j(JxoRJOg9`z`-J9Q_i1u7$?CmM&$IY#B z&jf=(prv>Ko|`cB_V#AdtBV0~O~k|fMf0rVfY9cdN$?>sG(*5b0cWvwF8BqXS^F72 zekV{3Bm!X)IzAKJZ^w=PNAcKF<1Tjd!IywC7ZC+ZRrrnx{6fL&%?z*8A*s`?k-+MC zk5G^_ydd1XY!;>fs+f;IGy|y*e`LeCdAPi~I%+}0J_NDYV@7ZULajHZe+e5J8hGs% z;eaxODUdD&y@vgU6M3)*aR-O86X)Xkeg6CV8`}U3qPG7;^j*65>Pj=JRX(~a@$vCy z%oS@1L9x`ZtXke?-Uwc9L%bS!+v))Kj2#K$HS z{MGW~6==detBhu9Q|nnqi&Iu3n7dprd*E+wa-CTDloaubv+amJhi;9zP0bRNUw3b} z0a>Jo6W$!HL0S@(<<~rN3W9`e{~8dnsORJ5k9mw6=>j$|HSLcnmMm>FEDd=~-Kz6# zszv9!Q|;*zp>9C@Gq3V>V$Ee<*#HUE1a{P;KL8BRbIV0QqD?H4TnMm{15wzd%FCai zL6b3*6dMDjq14dS+gz z4?0Ltd^gi1b}AT0{Q&`IZIwhN!9p3q^qbF7->MAY{FBxC@0!=A);^KT8_(Lh(x*~w z!vS!=ME!*!V|Uj=X7_Kq9d2cPPfP%Q?YPLxKdfCwPMPtR8I_m1DGj1{90I7jW-^T<4CLCZP8?wNkqIU(@;Fc-_8# zrqKilcBV_Muq7lU3a+fkRI=qz6HA{I)k!7vXu3o!vA(jN*`D9%NGcT&`*f( zERC@esbA$BdGHbMaos~!e#~X`1P^rn+B0{3wT)~gj|Grsu%&&WpynIM@V$<@?vurl za+F87?Q-SBAqDSXyceG6s1Tmnws@f#SKJx-=;o+mS(}5z^~FIkDFO_CNx8n_<#!OD z)af$T#PkFV%M^!&VXT2+oS_`0SHcgsCl2`rY7BTlG{d?!6dp~90F#U#H@UyDFGxPy zcylmKvFtEGCeuN^&O@e6rmb=x4GV3#^@jFn)EN8mqqY*ye7@poq!4F(X$Azr5DZ!N zyBE!XK{u+J5Z6FZ?Dc9StrPl9iWfWe&@TT?C*ZlWL;)E8>AluHE}n|qTb&9GmUN0l zeraNr&Q5K~CTI9^y+iz)ZF9J($b)!jrfjSVXiJDg{>I9gc?x z;?WLi=$DY`9;a2*bw8x)()Cb$oTKf0Hv^aWnHm5v_<>|iwvq61Y?CtrZBj=2S%FTq zJSf>{>m7ULe!&}4O%PP1#f|CFn4q=>S?JXj%|&6Xm}7jg;|tNC=>nj<>a;f<`}_CH zI+s0}Z29C8?Lulgx_TM|Mwzy{6j}dg>D;nCejCjJw^TKGzU&MP4(6@;aN6^l-af6z zZV$zL!sXiZfyq>LQqg5{42D)2>2sd%Yi!x+AjI$13#MQt&(h&@cva{ug%SZWqq1MA zH81balXnuklbI+5IXHMUA1k$)JsML+M^$tEz?mWea|LEVQxjB3C_qcs$cXA6!fTz6 z$7{C3B=H-S*RSSF#H*Y;C245kxaRqc78q46V5Dmpbv zsx>|&lvMm|jY+%Xhy*$MbXDb(*BMtQ;Pz61snysBWOqhp^Rh(bTr9l0U5kqWTaMXM z`*^4gnX7o4$2Tj%%vHmk(CM?@KInay-}GTi=?R(&pyomc0Qr?|BeAYyfXUh=FW&mY;oJobtRh>N|K;7oyt5 z15EU+xAohVZ)^^i?}gvlHw8yjs~xC2g3RnT#gpyx_X3RC;Jp!yFEPN4do~RVU*TFo zQcg-~sc}D-XW>HTWVT0haSL3V^DE3ZRHA z68sctKZZ-b0&4A_%*E zI=ruF3V8~$&qPR2i3P^@+<|WrzZJT0xIEq`ZOr27L`0cgzMsIh^JMZfcLSiruG%V8 zI~%e^T4p>?)2Y0ZNLH7E0`tg|(cgMt8XL-YqS~5S;KwW{3UZK*RB(hE^%eB>$-x0% z23xVZzHY=xOhd`&ms1JgQl{ncv$MLOqVd>0J4ssih&WM&&lgmvyQc?Uaq57gr5nJP z)7fjEPElL)N+jZt#iq$vcw8p>A|5f?~@wF?6+)d_Mvl`n1D1mW81#2r>9c;J6?xbbaFjwpZhf3<}+=s4|EC4BxY zZ!*g+`G(HEmMFVN$l7R4tQp~%qq7RKLfj0sWkwz;MI30*xX6^w>_I>SR=}!UyO5j$ z5wa5X>zU{%zDxZab37Wah@rCg;_CGz*VT3|LRG_wu8q*bgvm^`ZMIxu2tiBj=tOXB z?G+NzC4J!BKO{CR>uiTJwB5{p0)M_ABw61bi0r16h6_A2QHU8%c~-(eE%@tP1murr zTcpGJ!`D>Taxl9;do;0Xc@b~3xr>kf^kPnG(s5vAtSOmYcgJn|A^qbIFIH1GMYo&fDtP)!f@nN;L-w)iille@qDaQY8mV4)qO@%=%1C$K4?l zMX+4;BUz_frMb&iyVu5>z2=3OPJKp)3)&@WA1v;w>BFfM1BDjZ%ra_g_a4oOU&roi zjoxBlmM-Zj#ZXCiQ*%X3kCMsrO3u=}4^?%vdIL09GRz_i6Kb(~nkwi;FY=GG@)#V& z?_Ag-zs_|ntr3-++L(x|LKS?QUlpkEA_^1dVZ;s~yX(35>W4p^_y4|BTe5gCBb6~8 zVip;G@FlUZ7bB_um17{d=L7y8o83o{_P>6riA^knpsr`wRh9dt0iOuhLe39bA)c=> zn-Ud=@&IW|iTd05=TlLW1ybI5imlJ1+n{mf%d>2+mt^-^?}O2&<3ElE(yPVzsnHtg zy(ojZgHl33k{dIPbWMQl`R6^wgx6}c%6czKVSf499ad`5#iL(OKI^GY`Eyk+@|QB$ z=H;uoPQotyS5eZADswn4aL_K4*L4LiHW{ku9fw5j~!@hJX^a z(+6suyZKKG1_^y5?%C-lkq)iQCV1A)(ZzrsHy4^MniyPiR>Dq|w|!WAbA)-4?{bXy zmPc^X__&g4DNZ7)ObmZH@56EVzz@5JJ77HXFhHj`{N#J?3uo!JX_dd54^I}F-$auc zkW%pqX5FIEhZ21W%=!SIFG1w2Y-NJOqQns+9gs(Bl%HU(Oe^^*T8?|l(nBT1RnP`@ z5_f!?CHUaZF9p~XY{sZD)ElvddRdX59$BUWyz;cc9ye!t4w+_pvr!@HKoEQfl6tq%LhUbjT+kbWBiztdpV;f zPgQyF(Ln-P?}=?LxB%1q9vv)caP9?)Sy1=m80#F%=T)3II!dDIda z&l*(bucX}d9p&mg_$eM&cs{|x>rU+Po^i<0<+K_RuA(WokzF+nbhb)p5|yfRuD|JJ zrh9b4eJ_*CGZZSt!&M|Dkos%(Aaz(+{WvOT|L)PDILdvubRynW9eg!n1fQ(Ww=J2e z&iA~Q#v1w<9x22-DMDa=5*E+J;wKL)=wG}l>!D4w=BlJ0;7Q@0aQvCwk-xLPmGt`2 z%--`}I^+T$(siHe@?uDcxEa;QBu7-=wrlxDg;DkE8t#PZZ~JW2hHs60jY3L9&%ZQD zztAqcLVSQnjG?6Tt1*0Z3sX!cLfT^g+Ph*UB1b+j;*N6PY21TcrMOtY>f$n z9)>#85nF$fUUu*u#e4xSF4hO;a!&iegXtRm*}#)$tyb3ETP5WJpf`)$J|Q*u6oW;n zwFRgE$j;!=cyCM^n%4Nnx1oS4pWp1NCOC8Hyp6B4_-0=cT^!Jz+{8nSEg!aNI{cq4 zdr5;gRGy&AXh>#zEm$`7HUqx!Cu5zft7^Lk7_#kVm08=Co~t?T+}Cx9JZ;C}l8p!J zVUT%2SR{>MQYQAeeS6Bbu#@Ox!SyXcI=^SmJzy9W%w{Mlkp&X$w+nOxu1{1e2Qa9* zZP}{}w6{r)Ht>STi>DC*Gbr5m^Ecf(wILkLf%+AG?-uURLB*Cbb`GaakfTziT$?Co~ z{^z(<+NPM@z(u)N@cvQ7rNugMM)Ml$f$0_6$n`UV-N{Nvi8POqk z1XJ|IIolVg&^cBFsDKk0jB+%5-m6NS4zK}zL*V0KC`Ek+o2TWEqAF5QWDOjT3%l^_2mvD z(G&wq+?<_lYod@Rsy{K zq%qy|UagFLLsxC4-(&EHV-NQmnR_#{m=3D#MOvdzJf+HYTHC+bm z)Mu_g1ha!;k9z>1hI~g;73A~!M0wu<_F1Em1BS7^>o>*i~zYrBeIGTU$ZVV{-6bX zO@jujl!#{IJ3jPS8Q(dG>kgfj^Oe9qvnC)Z(kY<`+b<-=Q*$=I7e4hyPQWafU)IrA zNeAo+$d;QNP2G$}@Vw<5T3`uLDIOsOwT>32hhi1CE3TaIYQ?-oe>*S6k0(MIwS?U% zYoGFg96?m|u>!?A@mTE!L(gJmqoz;l$tog{c0sRe&uupi8;FPia5pG>(-_+PEC-?1 ze~^Y}1Fnoq*BdXhlj^XP*i?#wqnjx*=YGkXV@6yUsAcX^1VMEB=GZC5`4Gf@6VT6W zpNs0Q$Kaw>Ludde2mR#+MYZN7I_se2o&l%#snV#vKBUyqgr2?pbpML+a6#@BKa3{4WPq*I94Gd-#nBMco)B z^_JzWpYI)|(<}cxX$6JAm&|x&>~kBj6)zR@xpMTTiQYf?mkk}Wy>FuJv)D_;`Lg6_ z9}z9=w=+W^!gQG8U|Mya^IbbgX#AWKC0vs2I)0B-0N~ub!YRu&ywyjuYmB@udOpkh zNpFwQS)-cIZL4e2Y;>G(a3IDT8E+cD;9=jxw#cM)uXZa>y`ABByxGtap`B$n_}^co z7J7w%hv|7D-Jd{HvKRMGn&#**o(JHewbKNtjGsV^Yge%;9KH2)uiNVAP-#X;PoU-E^+wz)lb2C^}F zc?7YyaU&EKsvEf+k_z_PVI~$iF455?AbPjh{ll%&YJ*#UMEN4cbgS{C6jzH-@G~_a zvZ{xQY4a?17E2nlX=w_(e{9sB{D5lzanu}vTTsx?lNQrac)r4f;^KpMWSqfWRPaSZ zyUxMi-;@A1LwsG`zFpL_(R?xZ_IA+=WRzt9L5WSakC z`Gc+x0?6&0XgP~(1?our#L&-=4zfwiMT$5+XXzG$An-WQ*q+YD7I0Hbbp1Vn8vn|)G;xJwnT$XRD)U)QFOPGlPy-BmAF_>V zfeoWWo;ODQ%;HX=%JT3pOQfHPHId3GnNrHq{(W^~UI(TyL}T(LUzB7(wnOXwz@9JO z#P`xOun*lNuvA#$wV#s(YTb1;_t(5DHa^7E;HL=PcHA4tqc29Wmd9G%;G>WSy=NMD zouAKGja`es#3KEB>CW^`$;AU+g9=Opvq(ok_4wd&0JrXmgO5p#XqA`*ZW!OUtvhs` zWMXeEYI>a)vF%*2pK)FBuJdc)n9>Wo5AzkBUt~jvGb==#zMGz`KR>GT4E-v**Ij&b zb=Q+AZZPHtpoKI!?$E+lqSKtqodZ(JF=`#hJ?NQq1<%r9mYndOP`v$AQ|GPOD4kA& z0S<+^PCo%;M7!&-n>K|;k1AJBro3z!W>hBp4Fp{{&=<8k^;OF!lS_%&tzm6jQ#SeU zn7WS(sXuLX>v%gX==DH<@x95OY<|$*NYXvUYz~G7h^xgOA%LsEM@vdLh0@Zogr@%$ z1z)TdX+ulD`gSJ?C4ynmcQZAmIYI!M8vyUa*^9b!p?}Mh~)an4@d;~(_a4R2qJ z`GIjxOOcC)w>A|=bIp?io+!z5y8T}w0~p$4(I7;yaD(tpZEF+g9XhL~gAS^a54by3 zHJ7_HQzs7Smy5nrzdk@4?ze@29@5{>-vYnMDLI>Iv@p2jp6w#-0`wfI!Q>vfXUjLT zN`QCt_;(CNsTsgZ-6n2{70-r$7+jQJwL4c34SglKKcKA$*rroN2a`AroU(jg??6jU zawEUE)W0U?v;JK%#oJv1gWsXy&LqX*AtxW08a;(k*7ufRW;eq-xxz4dHT+5HpfrQX z)?a!Z#npEo)^WC;GWxwX0PlNbxPRZrs7f<92z4#(MrtJUWU87xNvVSR*_g4T1@`ap z+@aTudVOYeh5}BnE%1KCKC#H{x7Q$!lc@QF^4|-f;ehPbgrsh(A?Y`qD-tPSt8;ME z`PQUHhKaGfw4hf4Pw_luM>KR7$8jF7#3efTbl|>g$m&L^!Z8~p>T4Y)rG~qt>{!`D z2gxLxP9ScMu|$X9!4|h(v275PFSkmpCe`suJ5!3!sboBz^r z!ecvCO!|DxZk$S2Mw{8q(>IIB1t6;Un+7dfEuB@{>Xd3VT4ca1*FQ&NyD&#llLrq{ zCqsl&^qf0sIWn6RojbYLW+$`4*Y{AwiDyF)!>?t*7d&3v-_*rs=(Cf`JCmshxemBFJurLsb>8Yb6g?W9 zQELR3tB~+tvWzUUr2M1+c+z$0Izw4)50_E;YmJeIK2U<|wa&97XfL|x+|4d%U5_65 zXZ>L~FW^RUxj@ql1>Mho)-L|}Y$f%(!5>n*BCNkN?Rir8FUD!2zNTVm1myoMV<*vZ zVB794lZavs2!ccim(b7jm!hd>wwW@}Uav$31Kd?b_;#l@i+fM)E%!&3orO!pW;+qd zzqS5B5G9z!MxO}77fOS00MIKx{wlFbBmG<*b*Ts#%y=z#ulqHs>Qbws{kBKz0-%R? zvX;|k&EMnb`tk;(GVw~DJTVlm=V67bi!!*zzoVLO=eJ~FAPMoR;R_qufe#Q#jJ z1qHMWkTkgy2gu&QM=p=281D7vn@<-%X>TG6oges}q-?THC=`Iw_C^pd7eg(ck1zg3 zdw@!b{VQm_ixTJs!|2!QjzX1IJ50*Am!N+rcs-e)0fc-mh0os~jKx%2!F2SqWl-j# zrx;G2x;kqBO?!Lqe|vR{=0$SD1}Zic#LAA$C=AbM8DfT-XXTxWX|3-MZO0qW1+tRL zW`mxkFXbH}F|`EvoF5Lpq5#kdpK{j&wqx`VKtgD?QS|EXoe3tk@G*{nYuqd{Ykz28 zCcj!?!I#RPEG=u^p0WUMLuQbp3JGNyiSXwmB*aKBsoyl>m?tfb^O@5##i^H%cGR@VHq1rc(@anP&)3Mfi7ce{9F z-x31ftmY39B498z`%uP94bLrG03Mt1B#Ohvbz-Be2kcaRFyH-u=z8m@AiwC{la!D~ zxxGo$gz33L*c-Wmax(B;_16(!Q3{2> ziO%rjpmuOdibr#LldrP&b07~zV_Rp#%64c?^G0sEckXBow^9m8L0zFhfoSe*YEEp? z53+qQF|(goM8x-bZfIqRMhEm5{^y?XWx__hUoH&M$J5>^>wJo3cVf*1iTH@UFz{jd zo~b!9kC3181&RXfi{XDLdim<|#?AWf&t=!GTKqPI1HXFg#i;ZCl74TM#Pq}D5S@MH zl>Lu;|2(=s-!-A7^U+(dS6>YDhpttcz*6DOTNR@iPTLJu|Jb=z?qlt6zbTGlaxyog z+w$R!7~IK1s{Pz)m(kW6QW0poHUI_b0|KN3pu+{-yPfkAi%5AW(l5 z($#MEzf#w=zh#W|HFgTP6+gbKyZc^kaA%*>nddsw7pk9^8LNz<4c zU$YC~J4dptv3`FQ!tmnM^sF!%P9UcXjZxMj^|JBK`l>YA>y`p`?)#@>Ob8QUVNI<+ zN3%TkaOYyf?@aA^m3ilq!L#>pv}{Dawmh>#Tj zokXjW-q5@w89Dj+=w`!m{j z@NkY(vM2uHvgzT<@?AyP6O5LWEbJ?&nmR+o62;C7!xnyqO+w@^V#5|vqnf`UQ`D9INXri}3mWy=Pjl?5*zN0RRHRV5A8O=TKaC1A zOWD>IRjxvB$x4+-({=HEhMIKS?;)5KH=3dC;hc&7^x#_Fq^WQQQd9Z4`>Fb@4^0s* zS}JXEveTFgxp*614I%W-j><9xSZ*!cK5Y{q5mfDc7^~o_%qSF>gdbyk=>U9*b1g7- zdgKw~+T8Aq>~J{NdP0~tVP&>%D{(j+4+hHZSe|V&@SY3|4OxKein!;e==%^a_3qdv zw{ZaUtl6PX9Ku$h-pb=qFGIep|7h1PoBiYL{ae;{Jb5gvOy=Y=*xDLgPRG4BL!bE( z6ceRP%9+ao+1rb04zbLQp9eK99$P{o98xF!j>5G9hA4t(=R1umvXzwWe7?JhUAcFV z-#4#Cd1^BiEQPhXBFnS=*A%Gk*X00>a4nPnvn?=ujjK|Q;L()f&0#3=VDmTwQv+|< ztDFsrG)$geuMQQx09HUTkUHjUeBcF#HOn%B=61~2&p6ro1Xr3sTj;T?JdXNn{C?}H z65q)dyMS9c?i~x&LNsR+OQN6swOopI)=swa#ARAq|DUR_x^9r({0%7+Jkft`>~V@+ z>QU7?=Jv}KK1m;Vf2ryFufo2&v2plt7cI+g8{&Gl87&J;wlvM9!~MfXZ6%VpHcM4j zedD(wUhg?Md-bRCb7#Sxn{6^C;Rwf*h#c#EVZ8D?Ft+{h;`rQ)t|9nr^tMZkUKerY_+N=vmU0emeNEO%;l-K-C{9*R`1GDoI54m zqntZNjz746smc!(9JD7!mYn~c}gF>4rBV^W;C z9efh_!}LN<4Wuy`8tO_(@K+AH>W`3@KJ>QFOvFN^C374xE%quSA?kKkl&RZ{dX|3s zbR12nR*NbbiRWVktv0sbw|vv1&{EZrQ~lSk>fqUw!wdaMW6gbB$66@2pt{FCG)V=ktsql13D4!-6M8uGj%ge174sT|kGjyp#m)Tb3XH`v%Z2YEfzGt&4 zL8s~-*lOSdScI!oE9K<>1S9uH;V8sL_1C4(zfJ z5;m=po(x2)XZly>O4wX2^xKm|8xqg!l7=@~W4Npu_tS6HkWG8;IG%L&y`R6x)U7gy z|Eh4lLQ&!?Pq@uCDm$zQ1?Hd$J8_<3cWXuthUDHUhO6v3AEn(#9~e7YNO~elMC}Lq8|qHazjSI${G4pcRpr=nHoAWF6-d((T~zDI zvi``*I_{g{t3E)l@EXdRfdR;{7S{2uX=OA{rW>?IxBjK;U*pH*5$@u4@%M@QDLgVZ}L*FEk2H z9^daI{_k*jPTf%T6YSos+HRVr>T}k>0dAGW!Lru^N7gM~(MT=dyzjQ(wUheu%UZW3BdyiL4e}73S=Xm`MwSRh7 zj``^v6;GBQ(#LtF8K&tDf3G!P*+DDsvBk7Mo$SFeYFeKMz;S6}*QjZv=rZ1N%^cc# zMGb7Q+`b{>!3L%M;cUh#2%p*gq={N}GIYMIlY(rl;;Gn5(zKW%gkMMEqykKE5TWsL z-J0*eAFiZqb5wuf-|_|V#EVG`hw$yuwf<%oPjmbziZ1IB{npwMHB0uj<-0->*Ie8ZqP2H53XOr=*#)E{cj*kF(%axzB@FIHUxw$ zmV}6q!ZApr%%AR(ncfE}&mON#*lQ~0l&C9i*WR7pSu<|o)oqmAkKHO94}H%|&38wN zans}O%x!O)Q|z8P=6Te7lA%6>so^q z4vq3-b1g%hX@yhnvdSQHP4U+4PUG_dX%)Nu!yTS z{qG+x+Yw((bKei`y&km>@vs##)k@X+?#<}{?JKIq_PynFi7@8ZxEMZ?oeJDr#G&@U+fto1TbP{6GS;cQ-qhx3~M1fp!9AcGpJx$J#159aJ- zBw>qOAq7n^foz%aHp=k9XQjY`UsI({mH6Xf2?Yf;y_%l3nIn+MiogUN<#U-qfl(Tx z@%DImV|Q!o@bGY&I)jycwZ3`y$1Hg3w5rt@l6~z;#Y~^6wPqd^s|AIlXK}-ER$`mh zJ_|(jg8~tZ;6~UQQaT@sqiL178~1D>8url>na<708t@wZvS>B&zhH%b4c(NjDK!7b zrF_~@ZgY6SF55LGG*?kj8@>Xp6noC??)DA#rSs&xG1WIY@{934j zLCFm^my$D7x!<|-xO)WuOj^LxJdaw8#w|7Y01IO`L;5cy#b6-bJ`Hej;0VfU>K=Z+a|dB9i7!{Qy1Va`*YKx?dP7SV!c+8w%_&Ks~ziDOJr#2{e}A~v77`% z%M!nic>I_uEYMFfwn;^0{(?G|m)vshh{wSAkA88|B92xCRn7t~+Pil!jb}^65+Eh#)$~`$aqO57d zGz^LS<$u1wZy+%FY-n8zzLim5r#1u_+4nR|X<4d0^l~o5s ze}A8F1_+7d4b{`ucMw_bY-`;NVzEOwRUZv|g7VT0T3!}40sY)HTS@tA^L&@oU8-O; zbzz>GsB~q5t{JS>A6QIF+6EO%kY4mMGFkQgaS3w zH0|N>LlE}CdvJk^p|zT}6IY)P-F@O;1H^E$hHztdj2HO^pPMALwU$LbOSWJs&E z6CG_cBl>4k+2b6J+0%u9l0Ad@9t(^ z>U-(2k9J)CQA<$>xgB}7qRrFh_ZR_;nP1lERhE#Eos7Jc8rXpJx z>na^H-0Y+ZY+a}3v4QS&kgDY8V@OaW97Zbaf=a~GVUL$~TATLDh*8>)^HFRd(~*>X z_}7*cNu(%adh-U`_9joiE$aWr+iX~rOCJE%F(b|!Odw6IkX;Ur6^oe=fi68Jv*3r1 zw8N<*DCp}y-3l^g`$e>zsYt6w70Ks;OsP%$5%4>*#`jN1=nwfib8u{3S>pvpLkI*& zx}+<5Sag^CkkVaQHoQ_>^Xh8G%v5;7Uu#YoJ~4gMxRugAdbsxzeR6pX8cMbfeL~nj z6FZ`UTWr7|2VuMVa}jLEfUE<>{xD=+t?Wk^Qb2$XLqUWi79QRoLOU`YO^oU=|6;$~ zCz^lRIx(308X4(qe2eKGw&D35OwPE+8|flvTboSp0rlRowK)3zQB^zTVwl#n{hQ?n z;(Z~V5hm9AQ#iF^4WZGYjW;7`>*Eu+tp3iSWTkTfnkog=CX=Ko`^yA_MBacYmwT_n zrh0QfHT9B=f@+|g490RJ0>oY1Mctx`pQA`b}At&e4j>A)JuI}&_*itGur~%YITuhOZ;R4&cY)ukw z>k`bzPoWR|_hmS9z{pN!cD3N5?PcYu4~9sEO=sksr^d6K<*L`MRiMMLzuvy4jI%Kd z-5ACqrGJ5A>TW5wstM-!j>Fcy86O{y5nkOv{q}7*s2HoXxXI|hb-!fiX3RX+q?a15 z@6G}J)>b}cfcDFZa@vYD0;yU0?D7!q)sjk9iA6h!EQ9{PYYfnIM6KXDCYW0* zRY|tL34wJ;E6^S=tPm3VR^c{X1qZIO|L$3N$-@USZC?#2%Ef-#7k&w1gwlhSn0;#r zj~_KXc5;T)%kWi{J~bpxdId9Z<~}Un78sBayNNP)$9{wxgFz zL}!adKMwHL>;!`PD^o`cY57bpOc+7%%Dk0}_wH^kt$!X*qp(LUs6JN`EF>>vF2I7k)Ru(aYWl`_2myWU8jp?-h89!vgWtg7N7l>b^2OX%9; z`}B?KF$*ry&2-|6AFxW1+b&1izH!2%2GTvlc^PW<(Bn0X^(M{;bY58Unh;q|LWzZC z^f|ALJMFiKcwdkP#?dXMML!(a8;m52**3bj7%b(XfvI-F9PBXmpAosV>9TdsjcT|O zS4K8xTZLYr=P~hG6xLJJ5K?s%VYK`{+Ud>y0?)jW$Boj1&(iNtD8v>bzRv$NvC#fo zM#Kh{f#^Wxh5m7pt4aIs$P~kc{t7COs?!P-?I)b#o=UWwm(W~yW#njyC#ZvVa~I2} zWU^RWW|v2+YYyfe(eUZ~Tbp~oHj9G&9fJjTHoTk_2HB*khFQ>`YiLEklH}~PwgiCz zF}zS;iS}Ek5VtoCWR`i;4#@4tyox$b^eWXg4v%G9OLeXEw=64=HI-i6y$4` zx3L+lY`8UZ#q-f0K+&cD z@O#Qr7(S6TGJ{DUSSipzVHNP%7@K4L!DK+tQhynIbbr20?+s=L((l_yD&IA-`@|Fl z)8aqB2PF(3QqTMH@{ujDfjLwy9;UE{1DEhIk{6HDCc~~*r~tNL<6~DB}8;6&7qp66nJYY zErPXMyz)Ffm(rXKPQBV2L@}nxG2EN6&VN`~h!H9lch>+{Qp``saQ!lFOKq_hRZ@}1 zF((H~t0ca(_Mac)mRPi-O4oh!Pij(S!R&x0HV0xF9chZ*K^XH}iHX8OI~i&ta5`od3XsSw~z=?-76;D@-f#b0nXBu(tPCF510PoaS$!ia3qvqA!s(Q9n?mIZ&GiKVen#4o(^@L$sX9m8~b3C&}KR8O$aSF%PvF ziRviSUoGzCS2Mm~-ow4U@jd}Ks7o{E{8~YJ^I~)w0Rq;4$3<5zj`a_Jo zM1PQGI1iD90L@YkHohoaSK#miC)eW`x|LLIN~yFK#BNW}`nkM0(g^n*c|p6QV*W6& zA9#lHd}1^^vV>tr0PWOxTaPVt(2f{Reh~fBVL=~a^G8zs6$l`k1@#Y__kc!06GVvg z>g3dxe-c+CUq|{Y>_7d%mw1q4iga!rfk}#HK-LU_!7i>VtXb{Fu=p6cu@4achtOb- z6p_6h^WW#>BM*zYm~d*Tpqb<{t8@A{(#9kYN>ySLOE~V|LL0Au6WQK>-2)TunuK%% z|201e8>}Fh8?_YiRspiSH4Y6qQn{4ly?1>qNcT@?UenB8&56GJpCxZqflbHNLP)2~ zXV^0mHfMEJXDh<51{L5V^fu;|r`5_iv&@(4fCu`FiA)B(pi8UJzgR(VabASXrAj&GLF}$|L{V`O?gSF&S!mF^=vCK@&sGF z9%F=2oOm|31DKk+roE&p&6fp6%^7!Cxe3)U|V*{*I)VT(Z3L$(yJu92KlVU#sPfUu}BXU%lSJI|&x_!v~#jen-ye!6f%ZS|l z9aIJZf$wp-lhrwOGytJM+dEePA0TamHDtxTS6fNVQ|EU0^b55w`RSa)KFDhXsDmP0 z$uw^r!a=qc6`vrS%QGxP4Fr^bP&k}*TZ4pq`4ju{gK+ej`ZU|tAO3yq1RrDsSnK0* ziK(0^-!5Co-%bVpOME1v8QI7d`Q7U1QQNcG>f5#P&|){fBYY1~^LB=U$WFCVev9Ve%L)d(GL&jgbiMA0fJnAu?3*J0mf@* zdb7cLNe?2F#I>_7Kq_e$djbY-!f6LZ?YSuqTQdG7Vq4jIMpO99q#^lim1rPZ@dA|8 zR^K`(aiGx6_GmK{Z}#lB=sxYvS$J5}Q7vGcBA)%;uw2@G+=J+*OKWXvAT|afpi2Dd zuxv@x@{?_no6G6+mr_?;*4cA0wEeXX{6*GQs3n-);cxR&hbe{(*!1(cps z?|=+bi3jgO2rd!JvG`3!YXze!aXwVI%OeQEr=x&Ne1W+g8uGYtFh2ddLv zt!b733xq07Ng_2dqO+tvZTGg?+J1c|)qx*-XS*rAH7Rw-Q+V|cQ&`!`G(H&WF@~Zy zhn=9_W>{PB%VAe(981T9p*H82AD9@@1z@RUYp^!t33f?e(H|Q|FJkEYB!M zH|EUH{DkdWs25p4eG0?wSjl9vNab6gz1ssAaLxa&fQxqAj9^;{jv z4A8I4Wq;U9Dafu(TUOp3@S`kpMxlAv{*KmvIQrdyd%g4)#rgQ#b4l26^};E#J8Tb(1TdZ~lq2nI10aN;c9UO#F)pUQDB&&RGR#a^3E-R_z4 z)MI9Q*QF*Kh3DR07IEGT5MzfvM}m&l`fijT1fjYUypK!`5NZH`dlm6=nwISK8u*77 z!31E^Qopz$*-xT%7nv@Vx*x4M(UnC7bu;w~M2?r6VucMq-}lTP{va+LKNfl6lV;J2 z&!zdx>T7>oV0+ct$PEXkFG{;xD?H&hE{AuWh}YVC4Nggq;a3uB+s;hS7+M`jh?ctM{VR}8m0 z{4FA7&E+T=v#n><nzfm#?#YZa=jaOj2FLhX&5B*Wj& z2;t3J7xRnm^upr+zt|;<<4cnsg5^G^)rg|X4t%kvIT{B;YH#d_mL%X2d$-gl-DVqC zIL-@z*2tR{aqq1O@Q5E@O3wi6Xt09XFw3W(%uFltk}CK7IiQxC1F+k$X~_CB>qAwK z?tZwQLEKPLl&yIAF*8O%arit16Aic4dZmSbpyluL`xjRtS;ZgB(acuHWn4QYH*fa;>_E)$8C3D=DefsbrUyLH2HDbV4&{HWJt4=>>CCb*DY3 z`;1>65%-_Co}-**0)?>cc~tO)SB}7PX4vDnm4k$9IgmcfkBF809_kJI=UGc_5(bd= zk6^RyTTmZR^W=}iSeU{VwC~tr$EB3nRgR*xic!)MQk_I7OqsRylzsi9{u(OYtKwhM zmgPjvnJQ-W5haq3@KKvC`K*kHDeH&LbHuXKxX7}hI}^h|U=;OiE~<+_`2G08MS`z58}(`$=j-mE|Qn{db^&XZ{7|e^8Y) zemp8S1nOZ6+P0dd&jV8h2#KtHbWT)%8mD$if?M5_?z60Ordv+oa;FKiLDZ>z_@>ow z6Kc*;%_7`F+;M1r^wOpKr?^*LnJLLteU7uZb>UNgz7Gcrr0(S|MF)gB73q)PZc*vq z@7rMKO@Ry?Gt{YiCx%?T!Q+$?VxO6hOb61=uGQ=lU5L#;-t8pS>Aox^8gT7EY)s^S zaBFAU{tN~~*9Q}y6ZX9OBz?z;VTkbDNo9oSa}e;Fi~mM6irCxN*PH!p)#l_}X_(u2 zS+il?z6)zND{HaauRAnnO56M8+NAc_oV2QLWgBw8y zQY8cJ#A7mC>_b<|nGeLMmthnSu143kl=|o@bXI&in$PtAgoj^v$gP8WfQqM;LTJ{$ zC=aS?_xNm4e&RZw{pMj3AL2Pd5QGp4UB60Fx$>K{*6 z#_&pNh(a3wS|N7t>|>i)+f)k5iT(W|_IF7OYIQ}Hz4SKQk#&D1s@WPzArP3QhxeY> z(ApV7^oA-1IGZNMd_Td<4i8TUb9bm|RNED)<$jYAyY!_bb;G*m7(4q)BNpPzqhI+Z zZbnhCAfQoW{->qH3{SG{93K(9>5@2?6*aB2%$hXANnB|-K6Ws$D}W#wR9BEj220IB{!<* zRa5??3|K3b9WKUc_pqzQ-aCMLPsnAL0Jw=XLFUL{q@|L(lkCh_fs5y)EUBK@i$m|_ zpqe5GeAlB>$8LijG3X~XEbNc~ObTSDjG~fk>gmuv7}GytGiRgYf|Dq*L?l^WQIE@@ zqxI5hoN4_3hPsE^4Ij(Wk}`0`zi8H+2EPO7CK&7s1$g?{&h0;q*8&mMtcyYZhCm== zw^LuAY+H%-MrWwcR(+J(ewuc;@tzn0_&G2=2Iydr?8%_RDTMY-dTD&OodwrrS95}_ zm0FuPxWVKNfIVhw1AF~}pUIpo9yWO) z{cC+wWVP_uHnE5td0?mmVDjm@ohbQD$=_+_`bEgu$>7+59svkWGJ9HL`kwz29dj%s z-{t3SOxrlpFc_a<5h8T02>75#uT2D;bkoLDLcK8E_p5_`rzYw2vqjeDT4~+`a4E&< zSPv(=4O&pc_3EH|n1MMJxDuP3EjhnSip_c3*$jjRXs%42`r!&Uwk3y}+7sE$tTj~6adI$fqy z#@O5u4oZ-l7mIJQ0>9Jplm2?Mf6|xlMScK%9jb&<*Vo(owtp-7biUiDBJ*l=qH1!O zp+={I`>xfgp#{gR0U7BC;glD!fZyMkD&|T4&~yhQU-VIzWiyH`217r?auc~5S?qv_ z5>d&O=0gGO1O-jc%Ak4q|7tffkdp_Dj{aDn(cIe?iT;D@w|1bFRIIV{i$vV;S+-%9 z`43$E4ohh|W9H!@YHQ^uG-4=Ay|<${SC8KA&cq~+j`Us+Eapb#QcmSj{^SUf;=q9z z(M9wLO>XhhtYFH$YJRJ_`f+J_xw^WhsAx9dk?QKlXV_2Zix5H)ou&x&u<%bG0L$F>$wI#(~`=jUmjV%J|dAe~#c*-S{AU^YBG z<&Vmtup)OEd=rP}EhC(y?!`yJoX}Zh9L>|U__4pkBzMCs>#@dZnz=Rq{zEjh3A4)0 zK}q_U$M0bE6%DC7B0Qwj#P$S3Re$6V!5l?f_8lZj4xH#`A!4y*J}G%q=r-F|7m?o} zVvf0n;-AaHZm5LoPIDIcR%A5DaLgaNTFExNw-6Y2dp%6v>2|!!J zb6d5M>Wpb~_#bsHG)jgj7pT^@S)vnIAAr{Ni<;VJdHHB{5+8qm7^~*G{e{JIY9XB; z;+Yhe+}N@b`hO=`A(j^+M|$oORi=|Y4q|DE;)wPS;w5feO@Y9y8k}Vzwm(eB+>18 z5>-ahXmG&5Zyg_J3)>motf4g9G?LkJyw*z7P-i{WqMH~M{?zrR61DELe9=41`T`RnMW zyMh3M;x(@NTY4z1=Qr;@>YPg5lz96tLAH3|Qt!W|g}=ag)@zpRk9S>vJ#faNqQ&iJ z?@CDUbb&BE&2Hsph|Z*{S&+wvC%N}$QTg+2+E?o9cwJM*?AJ4x98{6x)-o9U@1oST z8la_DA^J^F8UOcJtYA&}GpqjjG|)rF($UfekB9VICQFn=B1hC{jCIXi9fN(VQTbQb zjFkRI!%3_uv_#^=df$3ahRogz69w)W{a8^6gF7?@$er0z&xjN7ni+&f*T9sZ%!~}5 z?ru?l8T_7HdqBGT@l&opV^H*Qj?6_}?AgJVa^d)1)L1)NO zyKJt?>sUfd&(x4m@Vu$w6h~1&o2P4==Yv$AT<=`-zpz43gHTiFs$>p_W@zwTEFf-5 zl6dAHY6`C{yh|c!_wK7j)ndN#i*Nb)2sG)OntStPh;M0F{9G%H>n`gQ8eyk3AZ@c4 zNb>8iRiAtD3x_Oe?`HTo2@W&9jQ)&@NJ~fOj!g@w5KV|cRJ=r+c;Y(g96>pq3bdqs zyn*}}-Le$a@!GJYL=5ZbX_-7EK7)V_vE$(uM%p=CvS#O_E*`HP%fl@+ry{JkD^&9= z>TS-+#k)NA_6WwBTx6B7k1Q}WrYKhD+_HCi3)A1-;vjtj57hV{PsKP3QMRAfu-AvP zr)dO-#;DJhxd#6fQNl~3Y-}rIqoYyZOI%QD4`YHSyg{WcGFJbv;QHeM^(=4r4~)+4 zpe~t=D9K_;urO@Gt zD)CW=sup&UgK(B6>8j2)B%~|(WLgtZy5d>$)%t-8(!65U#k_wd<1v0claE*ue_k~; z<8RTRU;{;Rbd-{DI2oN03s*r{=`}-fENmjG%;ui)V#W3P*`B!<{4@Cmzf0dNs@4oux$m>(lWNLg4t)^?jUkWh7iAS_!SxG+ zZa&U${!|iHCFhpNm7J#^F-vz8p{sTC_AxG)f+IVFU{BYL;rJRH%2kLCKI9p#pTGy= zIw(vgt2mFw4jrX=z|MMSRf~CoSTOvAsmS?lnw>xvnAiAS%?jdQ|cWexgiELQh_V_)0LGyOdi8k|* zU;NLCyz)gTg_i?w?MuH2{Iso~1WFA(}OFnrs;TV;*fXP|QI2{nJ=HjqoR8byAo4FDFDg3j8h5&5m8rlpv{61=9ejRBnr~?doG^NSakYSu#00Hq z^5Jsmd6~_I1?P;Y$|6J4i*a%ZfDqs?()-iwI@f~t8Oi_#N^a5j5!p*jK%e#z^ zLLH7a)c0?${EGYul{1@2krbm)2GVJ4r)TAI+;4sIAdLM|yBj7wPB(i@{3-{%?U@24 zOYLiFIX$>!U-pTB&ENJ*Bs`pf@5$qMT;6Zm%3jSU4Wy^=N8V7Ik-R1~rnmQIWvRpetT%dnjZ1{`5d$m@W(G`p# zj=ZOTYtDf}Svj8y<4mS!$sXqE5<}Fo1l78mvypIGQFkPZ5wECyE_}m)cikr0?R9b% zRHh9k*g!~P)1B~&^!=H52_6>gGeKd$GJ{he*{zpEon(k10L0Bu5(k|aTeNAc7mmcU zJ+D#$g?JcD2dyKz?q2ABRqVq#79T53(6zimX|cxe;Y8R>%MHPN z3lAcT*!}0Oar27nVoG}WguzDvFWw=Fn=$i~{)u9&`oE~che5?&lm&0QJR1;NjPIT# zXpbjL9X0s7r(gYkvcb-d`?+CZiX8A9-%&PI2O|p0v>xXm`mFt77~?f5S0YFnOPjSr z>@?oykGZ8#z!hENWItPC^s^Fy{asW&i6>k7*p}!7eq7B)wma!jnC>_$(U7Uq)6dp`U`mR*$EY`lz=aBIoh5ua~JA^9a zg0#9{Vq$#|WF#YrX0}ahi zaZX4AC;#hu&9JM{M`srMZm=$q7n zUEHKq+|}n=vCt^*%FF*`k9=!6?2gdUIY`gV)Wi?|2)SUVqnBhJYvSDR10navWT_$i z&Uu;CWm}51tSq%>0(0%CE@Qw}?o#nhro4qeh3j)Elnxkza1b)^N4qYng5eQsKk>6~ zgQcKu!JoF}QSsizx)0X1L`y$aclF=RX~XNbWrT;m+uyO}b%BQ2U(2;_Z#vgVHla)0 zy2I4)gnMnAP4vkps=}L3$oA3nCliI$%>q=EaX3V4u}y@VCwGUnc=ommTNl9@6;l4& z8yn>6dun39!fN?38iP z#rwMT&IEe1O_rGOWu`F+34Hm-A3qaVzfwza$)wR?_yFmCn7~P#&XEF(iRr#by{G<% zF$qG2lPNODINU%~?G1P64_TDBvf_C4oH3D4P5WF+IX!srQ|8>-vKXbBgO^5Ojn23b zp0#OZ&c==uI#`JCQy)BJnxRZ$5v5QVP2S|aDaK(UF4@>r?tz3SL6_)nj7*cojQ{fg z3KBE>4pM2HX{PcNm;?kJ@yXSt#w(D4NWm1mw3R(MhTw>ShC^l1zf;J{)=2*9AArwk zM@s_Km?nAOe6#e7w0SDdUBC)+6$?ivI&>Rqc6nV`2emGu&99(Iu^=B)LbzTEGfO9-bhhiY?kv4keW4<9SHgQX;ZLLg>~ zD*YSndRO7lO(E<8KX|iZQ0#s8DsY84c3+ERpfEAAQ?#}H*%wcrlpTtCUq_1B|5Y32 z>w2XVCo*lg*X~X1+#<*9Pnc}2$dO6ZU_Kxm{`jZzNWZ+ZFFL@1* z<`hc*=crShczYBW|Gn?Y34+^xn8Cx}yHvi)g1cu}(QuB3GgXfOO{+^*ScR*iih5CH znq_8HJ)UMQan3wnF_DSwdH@AjIZ57*`oz|ro+*W!HWaAg4h@diW8aulP~c_^JB98= zHFoM<@sn}8kB^Cu=V<4-3a^ImKZoX@d!ol-Ma}-DJ#7bUfnR~Z8T$inWv%_g!N{U;ztPrfh!B*i! zLqjGQm?pEO-`e-s&mFz=lm3?F^@cWA7Ib(I-L#8etq128= z;ny&!`qvOzt~%n%)R*I|Beg^VU1#?UsIS)GsYFWM}WT|eJeErHhjM^gQuvz z+5P@rEFq6pXI#3M%vlQhVznL>WF|oU0&v<|pjLD{UCC10k~V*PCuMHty@or<2B^5dLT!n=Fo^5?Sj^3+^-v(X#N z6c-^j9Udtnye^WXL?5A**hkOJDov{`n6FvBs;4)sVajZ$yZa413-?j6Z@SN&Ln%q$ zjUHY;`oN*1mdGi!@is7N=@yGjNrx2wjWJK7Wq|`jD zrWvm)66+4vc|@Vllb8;xb*I=i)kBvLHle=q>R}b9iI)&$BgDg(-gQG24Yfc2&zoe} z<95+PUJ1uleM8Zlh0$R>vQ#Q_qHZ6bGnj*R$YwoUmD=NO&VyrJ)wJ8gIX1&N8b9w9 z8alN3Xep3m{zHZF{*{A3a1Ta)=Gya<#1e_O*iF+l+IcofW~~o0&7iAm%5rJC=5eJC zN^>Yz$mLHLg@H0>4s;e**0Jbrq<*!+$G(STJKAS;p_8k!H-t2LJLl>M!5oY`*BmSc z8p?^^j~a)fqe4F&c-q2A8zTRr!}a@G@XlMhxbw=Zec|TyGsOnOt9HWDM9WFSg6_XI zdMv7chDy4}4Kj;c6<4aaO^&rOl7AQ2;J-oQVS#b~9{q#t{l>)~Tqt42Ux8(honA)6 zY0Kt=GwM>a>I-!)A(#x6uMS{~d=5(+Y6Fdzmu01+yUyiuquGWYn!=H*6+Xj;k4PSR z@S3W?c-LEFAfWFkv*=~)!q?}@rVduf^0jrKE(+_8*_PXD2KaeSd49_OTmTi|OOcFO z&>_)SY^*nRrTE&?dP@H>A8(&BR7I<}WA5POAg~W*%V-91{)on$s5;H_pXR9X4+p;M z)+JUd0j)zN7B&&3qrvgzmLoTjaG*#-k;exv$0Uwrw`8|$^m%2bln-2Ut`Dnggq8Tn zwufvT#UeN~C`?{2Se45o3+M@>n$$KWK~%wfw~Zn|K}wILnw@W?!`=^dQd zI0z(zZ|h%ygkRsPF?Kcf+blbq_}P>nUoO`9Gfmf3oq1SNIzuk014*2K!p#M4GUnDJ zy2tJP$n5QdlITie-+I+`I?L@|i0(|T-OOE3!UM?!*X@+;+014Xt9b&MHh3fr471dv z+0)ZbX(^puHY$zF-6<(oBMOd|`tJe9^!lgMaAo|aZ!dPJ7VpBz7fU@Lv(s@=v#ZE@ zC>*R{_G~!0s=4Dm2b@c91=w}ytcYxE*I}o84vX7O-rXzkMT?2(jhd@CXB1W#nFlp8 zzqjw{K5>=FQNRCLY4YT5i*4vwbinXOf3H_utJWK1Z3z+H*_+Qh4^mx4K}u7TZ$^uS z2u#e@$`vk{0)x0_vT<#+*;bPJ2|hgBe1ISg&V0!OAaZ=gi=a|_+HPt6(UG_l0har<*R*zYEc5O|~fGVBj|5WK--OcVha(9Ok z4XveUfCRm}nD3r5C3RqyDLaAPkh%8iX|uP?!|7;QvY@Z?B*q{xmrPz&u}f^9z0`b! z|3`Sb&TthA0*amG8wELqpH)NsK;S=DWlRzpDov~9LO|NKtnBMjaQI1}D|iPJ?(XYM zQCWUu_ewfh1;uxX736oh!>zxrqKehR&!BKV7R`)~j&`O!2oqXz4$n1GXC;5MdC0{7 zDeNrl+YMWcG{r(`svKE>62`zwcHoH6bN!UNK^Obu;mKN&lDMyK>s*5CKqz zQP`JkuVN%1wMf#D%{B+vl1&TEbnd)|joS6)-6s3aHwSnjzo$kB&bKr9lLxVE%_XJ< zbwO_#W^^}(iO9ij~CAsm>j<(zGs;&k;vn_ein@@Py1_$ zB`nc-giQZT`xe90H z`*XuR;RPRJG8b?5h-OpJMkRk52`q~)s9&ZH}+NktDyd{=8r1hZMZUDzPX zHa6-UyS|PTuQUH2H6P|HzSI#zgQim_Q@qDFlxebry90&+V*JB7w~44sCd#_Cw?-IG z55C6nA?+5dwwtLPZ@iTRjiHe(B!IlW%LsX5OnpK?;QRG`GM82n8@7 zwks`b{XKwFvq>q-N+b!ibbn7*gPo!JMrc+2y$$<(+mQ(R^7-|YhIQ7He~&R)?!=T= zBZ|2sL7g=T87IBJt9Na{F4Brp(;c(yOWs1VXz&m)_-*;pImJan5={ypdu-$P%!k{{ zc@4P_39P*3`_o>H?{B%?rH~i^-<=5|^ML2e04B|q9{l84>)@I}dj54i*NmXD_Hi@D z<;S@O%l4e{JjS`q@kN&Sqe{*;yRb7zNW>IJrxO=7ad%6V*x%-Evqd{D$p5-lYtbQC za8R6TYNd~-l(>}zoM^m$3F&-0b6Z(s0e=&3Hd&9HTs0I+-VJ)^WtiI0Pls#tiTCdtdwPttEbxP(?sJH)zaG) zQrguq45In&HHy=2gKck|Z(?!v`ucOrU0ngYh1;_ET51hmgx)Le= zzUDaLX@L*na;$d=k&0R+Y`Ue4uL&m`EOah>Dg_*J!0gul)gZC>+!Fkjc*XS2{_e*{qp2O}1_?DxH;mSD*}_u#p4 zl8NTaE~P!Pu&EdcwI|L|$@TfylwAd9ZS%Wc;*N zG~iFt4(+`c$39f#)0XWoiy%97@Qht-W828I>j#+%X1lu2{29l?_nCCtbKfoRhkeXs zk8p-@c5#*LJ7Ka?1{9Yr?(md6A?PiS|LN_+9{)uKPv0o3b!+)*x{nOrxXFkO(947K z&ka_B(#jqWYLIm6If$!Rua2+1Pw5cxyF8HU{j>5WzWC!4UNF?ric99%pQ8txE?#o* zP5CF_{grMiHMxz{Cy0>s>l{-@>}!XsN-|2dnUSTd`4~rO)$7gr!JaHtCHJ}jH-%|t zn?|>%Fo++QU5yQekTYlUwW+h&+!Y;%3jZ2lDWEJ!FRgAU%dVZ`i34}3cEHx z+^dOKw~_G)rWE)zN#UC>Yi>PE)&36T1h-#pp}=0c3ua2D7btaH6xQ3<%cCHWaOw^p zZ4h#Gcl9^hxKqcKRjhJA5H>WS<+Tzz9ai2Oqc*x?vF&@w!Jz1{O%!3T)prrwe$Wvl zgNFIK{v4hh>S@H3;&E~scZ}7R-*OsW$R!|+CF2&qcJc8;&ixaMa=yuic3CIMr2Eq zapl2#`Y2EJ{Rn^>hb2{8ClVA`N4H?b45VS@fUq zUC?~)#n2`|O31}YPoRnokf?dsikfIy# z(PK;`^Mkh2MLw%qVwwsoJ@PBwebpj&NT_B=@PE$6VbpVX!!fdwqBGX^ru~&dJj5_n zN@i66_hDM>$qYc!56_6uxhExIM}(E{>w~;M4MR>qGxsbgse@LbzG?5~hWT-scy+{irc2R#IGdDUVq=TLx9L>XYEG zEwxmz$#h+e-tOho^t&WZ*IYQozpc?DrcL&9C?Xy}L#Mwd#QzM=clo6r%3S&IO89M6 zNQSTP@s3MuChJuwXYH_|PaiYF9?$LMmuu7#Hy#dGxjkfUV>#dSkyDM9!pd*pLh1?T zRo_9@V|Nie-Ab}N^WPzZTd;qe?m}RcS4^HQ8Kn_E>qV`EAbbmYn?*vc=FOiJ-&y~a zpzKj!p!Gy|B4qI6E#Jw}!rNa*uO7?rD=JrR9%qm4&Qp#c>-En&15&w^@wM>DAjUYoyKUd+BDUA0Cp(GKX?` zy$?OLfAk1OB=^Qs;)!6Yc07Kf)2qHclK9Bkz8@B1D=-YwI*Wa^2=OJqpV%xIX5j9YyLfX=XYhp+$_Ps%{XJ~h&0%^% zPAW+@Ct+dVu1d=6qtJ4pg70<3ty&U=wZ`q&q2Jh+#8S{oDfQJ%7fO^COq7nov9lo_ zYkz{Ee(CFdgKL58tBkTmy{*5)Uhdyh4&Ks3v>ZJ^{%OM#OkYRw?IV3-1)$${>kiu5 zGJ=?_3gx_aU%bHQbJpf`SMhQRF_k~gh)YZg4xBbTzfN#Gv^vFE(XuhiaJiyv@sr+W zW{NEewJ~l$lp2AL)V`WrY=O6#GPP}lj>w1)5%s3U}u0!}r+ufunMhI99`i`F-P@ z(oCQe3m{-vm)p~=0=e$G*aY@k_IWZo0`-~(GNVLFUC@p-F2h-kynlDIwf;hjz_sQy zPLW)+YNtiIeb(e6) zTh0ECRHLQL>$Pc##1Ddz7L5^IS|RIws?P5a8B!}kqzXz(;n|5?Zmw)=B4-;z&kuxc z0j}Te{!dIwmF@I=%H}MJJEW2~(mLd&I*7HH*wAU=_TQ`>oE*oK!IGS)OwY5V@_H_7m*XSi;6Nx_!UvF7?HE+gzWY4)1m%&18)~mwZNF`Ddn?ZwWpo z_Qad^2zji>Ls8tNa%9f|N3e#0E8D;1(g@OFbHw!_cHT!hQ`UjO3@rgt{)Tsgii8rf ztxd@pX&o4miJL6%e!S8o53KN8SUDk@Rd7!edUT;9#*>{XxUMbIGi2Qz)o$8HdRz>Q zGG*ktoJ`v1qFjgIFshB*bt+mOD|yshEBREE9zDDCVoGKlHs?BX*>8$50v_DHF4M4sO zIZFcr_ex(U`Z#Fy;cBI1OkCXk?$K##dU_rIu^PRWXrT$`-^W-X~O+i0-R4t~QD&C< zNd!?Pc2s?+7nZx?;FrzK4UA1{bjzfqr6oOrO|V^0t5dm%E-cF3_6#v4jTY*zLzd%E ze(b)iqwt?4v?>ifm#)KC#_Ad4wA7){Sc@QATb|#ac$kvYiEjAcW9q-1c$q%y(UQPn z)@kFd=TzlXaI)@ajh7LYiz&#&xnZ46G7PoVA`TcM|J2w#ddbwUlI};4yu%AK?&sfX z9|%ES(Q~SLFPjYFwRB#Uvd=xRt^WNqKuPo}DcMkNN_XFp7I@P1L0`*bD|v3OW|b$; zo?Q3aDe61XP;iu5?0YyVJy_tm{UKAjBA1&-F|8HPO1Tl*$m zv8_%{>9sJJE2$HDfA|!kqGD{Yc}4MR94%GXUUOprtF@c}h3K{x&lAlK9X7eTasy>X zoJtJMQO5;78~iOFftYK!h^QlXNpA!z<90Iq#5ISk0kZb4G%^th;|H)hD$4)Uf!vQy z=(;rtB|E>cRE74J`bdWyLkcLMeNKWL=*5IX-{#EGxd}Z8Lc4;ohHh$Zq0rOWaT3-u z%)r15L|UHZ`LokTzDy_4@H?HM4Y8%^bcYBqR{ihqbr+e}Cj)kQdoM3GSae;5mS;bK z+_34nP)qz&5Ps_QZD)k~gO`alGg)7}HX#=OOf1P}de<@J+9~B6fz~nRHNmeb} zf$vmx@yD!i=$^E$VaCofu`=l7M-Q=>{_*j}!MzDE0glo#KKRs5>Qh*GvQtlW0`crg zGwxhxgT*FSm?sVBH%)3^O|YG2T3NB{;OO{QGe%`^_kx8 zmMlZji1}Lo8ndVqJWOegPI+1dxh{Td4LMH->KTNG`&i32OnpJZGM}Z&IwT(s>?QVE zR3&D-#v)@urE<`NOsp(+qPXn%6Ac6Y*H|tfe!zb)04buqo!(;a=Wz4z;@H80RBT?1 z`MM|TLp%u7!7PVjq>K9r#}DU%O{@qULl#H(OHmx>e{iJY4=F&PJwuqr>f%&5u|z5< zXA6;~w6#*lU7+$rfy!e6Do>|r=Q{@-{0s|x_XBf+Im{s|FsHoN!X~aY=IrapS5;@> zLwC-T6rZo=`!VP6<$P(*$+~%tNB;kR5PX{OZR$62L5y^a2HPF}`)R{khs+g9ftU zMPYxlq5OqB(VPE3q08X*2Oc`-In&RvO+c!=Pocx>wAGG0qHw4=DDeVrJmdX_VLj>1 z-%K3TDDart6i&Q*^?}lMc6M&Acrh_Cpyq4EX4(HfMJn_-Sc)aiB26pz{_)|Sn`>m? z1yG`y2GmsjXnYN!{#WQ!$Q55tPfsQ}iQ&zctTA+FknmJ}Yar<1cmV+cFpxZAa|6^U zHv+}rEV5MV2}W$hZ<(56k-}%9KQy+qWFwAjF0Uc^_iD6;5i>J0NX<&az@TMfVv+#D zvABJDf(xdycVN5ZD3hi_V_h;bvYYz)yxiPsCXlmE!(FYNG6Z)64f{|~U|Xf2SX-S4 zxqbTEgGZ1oVD;0(QE!ryb5g~Bn-`SH^=zhEuP1|z#iaR_l}TXzWnpFYfb<;^GMu%I zFy+ptRvU3@Mn-KL8%_g*-b~BMo9EpHNjFOO=OP`;I-D0c3=J*3%oaLWvW^EnI+}(W JC2BSi{{g!a9I*fZ literal 0 HcmV?d00001 diff --git a/how-to-guides/debug-model-training-runs/tutorial-images/debugging_regex_search.png b/how-to-guides/debug-model-training-runs/tutorial-images/debugging_regex_search.png new file mode 100644 index 0000000000000000000000000000000000000000..9278ebbb811154423d71429ebc4e87fcf67dbb13 GIT binary patch literal 102276 zcmc$`WmFu^7B)Iag1ZKH2u^|q_uv77y9BpD(7~M$7#xC2a8Gb%a1HM6?#|$RlXK3S zbMCk9x_@skX3bP}cXidSZO`7jiTI!_i-Agl3IG5wc5Zqq z*cY6Onydt%Vwh|X)#=bsN;uVaxp0PvhICnf&T z!|-s)(_0bxBzihyH-}9f+(n`%f0X-G-kx4J%0`1|f0-GQ3~b#mOj{_0v}h46>ASY_ z9kWg8NtaI1)S%Sz$m`LVIwIrHq)E|GFW9w;4iA2RYnh~I(Ua15aO*J`bn7i*ne;Z^ zGwVVmOPb4WmeZ`gcG}8nIBSlEgXZsX{w^QvAO81XO_8Gid(Jsf?&IGE4defBr#cwr zCLIhGHY!qv&rSb3q1K(xxUOBNPyCk?mi%5;!Y?l)W3}>(R*v7_S-fgl_(_IGNk(1C z@b`vuTM9;1BR_k>j?OgN8XdgBENB9lgwp2*7ajot08pnOr~Q9kO|J@EB8aLmnBORM zbTb)g;a=n`6YWUlBRIH+9tB_?Evybyx|IuKZ`R9lgyte#}ltaKq$GMWh1rk(;3ufy#xl zb62Dd>3ZSKC&%!K@)1>47FH|l@Qlh9?Z%wB;B^M}9m4gB=690$YBWM%0SiSS@l9HyI{$b2Y3S}U3c%^i zG*eqe>YroNQAj@bcRuAK24J+(5$m(I-+b(Z>El^^2e=3UX)&Y)(hOH{s}}Kh^$&>) za+7e6X4p#tze^5hKbtI1ROg+DFNOz<9$ejAoxZLXVN-fv6#bPn*hKOk9)Ovzv;RML zN9oB;21k6Jz3gtDt$xfx#&Tp&$`WybOXeUlJ=_J3wmM>wz4J!pbJUEudeu9UTUCW< z){EZm(|JeB@ytA&*-ST$_jl#Dr1fz{GI?W{9NGRIB@d8Kws3&_{H>e&h0^YKE{}pI z&qRO^YR=@5$=-GvDu{pvFy@4*8IElW+s`(aMnw<8AP&#h;q0J!b}p)mMRiF@E!PxYPk}D_eu@WVYd=bd zrO>~F$}a9lpaS{@G3S-C;oG(bhVg#r>~IK{GdlKlHWkjL{bEO2jycRLoS{}(UgbTT znUVg_mP`^iX8ey*utER7^uXl1bpFp0hLfBLkD)8)TOsE8yrkcC&Lu=wD=Q=a^Y~rf z-;lU4N$WjMV)UYliXM)z@ejJXF9H0Y_qm5N1vyr_Y!?l+_uVeKJJe2E?%7+O?riNA z8*mJo-4eB$58FtqJ=TC15aPiVFok+G(!h##AR8m|2efP*DI@eO0m@3{<46nUlx=RM zw)0;FL#ayLzYQvltK_}4+vlX{gdj5MZ(S|}pW72ZJ=d`QHkW<)$qjzR__~O#eeH)- z*{v5=bOw^X6M5T^)bG|@yX*3HNS(%je}(4{z71k0X4X##cap;X+V9f z{=2zeGFe;hEKv*%M^csNW0CZ7d*x~WUHPmFHz`5EYo9df%kI_Sy%bRw?%Y8^xvVJH z5s~PvLN>kLhdtXeKi+w@Y|&^aMS4V~ydpHMdjs+VW%g}J%U?1VeZKMTl+kBnZ#M%8 zeJkzme*d^WK^s2HiW0htS6ZBIvCpg4drN`;Hd0e8>mAI z)%d~QeseZaW!`C`08ap5mUnu%hjO|+c(BSPCM*=S1>~!9x`ezAKwY~ce|{1kOh2cH zo;jNrDj4aR_Qaw!S$4L2oQe|**>hR=bw@z=Ej^+OuYjoVYtJQ?VZFSE(2}$t z7bo^2zUUfqHLW)b_I%qDMH)6^2vjmW$`P{DZzB7SiR(wd(&|qysAxT@if1gBr%B~G zVhaB;{Y(17Tt@6{G(TpJr!sbwFs6xP^) z>F4dqB2NZ30{FKe>KXaI(2Lw8U|_^OJGN*BVJ3KoY(?*ba|w}>o^TW}r_ac0d#f#y zHie}WJAi(a_Qf-nrnhP&d8zS)hbgXM$N5jNDD=9l)heGN|R6K~zqBS`zFLfnr7E=oDl0$No1 z8=GklaIy(=GS*%)2C$x_z|ryj!%A4%2^>J4cc|+ozqk8-&CIB`btNTu7d!iUU6b{$ z6Za6uNE6?};eE#5un;+ThN z`xoi}Q~>o@%H?!g3iDC8$2H$W0g}Jdw+KGQO-0E&`UCx~Bc(bz5{kKerg)o*&n;oz0m^Em?6Zk(d9>&srz~RAje6Z&1 zKmZiMV`rRFEPJ?d7PMAV0HKU1C?A16tzV(Ixj5Olb&;OyuihKIs=&|F_F9QNAE%q3 z2ZTRQ97w>#559xz1~fu#sAd$K6l!;7`1YIO4a0gT+ZFJMSuUG~OnoJh@h?yHt?(=5 z^2&^eK7(fmxM^tMpKnL}x|WN#^?bZ?=25ZZ0@O;}!oMaEn$wOXG@^kb!|0=Q12!oK zBSvr^5}5bk1Uzv=2PkKX?H(9(Zjh11-s-&Jpqlj(C>~^0@ET3cfFc4sJ>!CD{0nko zuyiS(3KLVZu+4N82jDcauO6{=eqIy)ej4(Ms7OdQ!y#p9#PIC)qAt21DvPVI;w7tm z;-QEWDq(tvfS$?wdA|O~Hs6vms2*uII8hfUehAzy6!v`@TF>mnJFo4RU|5SHqJw<n)u|34^_XGydhnY*>U0|-Be#+)O0(5;UNL3Q^F3ay{!#ixI$aaVX;#pf;V>%|? zepte59lYi$cXX5xDV10bm42V#7s6b>zN9e?(f|7lQP?xON-JBws)#<~8#MoJ#_`G} ztUXWMztW0H=3on#NuZk{60yN$eN=q=nW`Ky4C@d_Ayt@({QRWEYH>=ztCf%-ar|D) zKGC&xBE?}$6MBG8@S>E>A))hUR~*%a(L~ItGYzKx<%medn0vSOlBn;z@A_N8jfNp0 zLhy?g!>9K@qs-?6!74jCgc!8{_|mQ2bT;I7mwU7v=O1`^+4aIY=?i|C{LuxjI4yA& zt9MXo(LJ)9HK@)Nx3Kpm9@(s83sFcfWr-guL3l?n;+`Au6x8Y{UH3Xx7x@0s-q~6NYI=U-F>tkvc>p zNE0&Bp@q?35*u+w?7Iy44{oKr^-ZbbR9)QA{#n{p(!JZu*hff%+Uv7H?~>{JoJhY% zLIC2@6qX;_8VgTiFm3f~uOS}iE^B7R4^hbk6~{3$&($J=dQs3AFw%Y^0_iwdyw6Kh zraGNhc;JX1dvC*I!O?>%JH`)H+Fd=Eoi{Ixs29?WKVtMuR$nQOd$E$Z%dJezZN2AY zBd%qd`bc<&oSDWtCwFIOXK=$WgYovLP!+V1I3#h-?b1zIy_uaaYkz$wtkNXcQ;Nr6l$ zijub#0?4HxLpXrjVJKNkdDp$1dWu`p;;|(P({I7x4&9ronX{9S&ZRW0+KVp__m6VO z?x((RfD$J~uV-P!k_}f6BYXxS!_L-WEh`3Mg4YZ|e1Ux2yIg#{lP( z#fDKg*#x|astP#ZJ9!s#f_E$O$#G(Vs)47$U+Ym;C&DHSX~Aj<*n%%`%1uLY!43^G0Df;f=(`k69%}X!_C*e57 zQz;jER{GNH2`!H;tCabr%QLUW_(B8xYqVrz+wa<|^9TRz08Q2hbW+|yZKv{D@Zw2^ z=r3u*&3a5QP9#wa-%4cr!_Wbj!8qQSMy;S>dsVCa_Uog6+0)LMpw)kPHJk+RTFPFG znfYt^ac>vM0{$-u+$A)_om$Cff5V1M5P7BmQeeI_RTEM#`iHPm^(65{7$0x&F1V(4 z(7685ia0zX>?08*q7FlI zj>K-Jk_MAvPZQ-m^TV9(JzGCG=D+JZXHJeQqHMW;K7HTuc)qS@sxS`nIGgh8i?rwd zLC5^xXX?9R?m$1m3=f4|1LdC^HoO1MQ>s2>g3jilAxA_hJ5@N&Npu|sE{XmJVx>>e z?VH8-_C&zvVa)TYNtp9~J5_f?A2#FB-wORJ#5En3TbL(|s?QgetC6j6 zQ=i1bC7ABd{Q~0jJr(|$Q0_~W9ShTF?~$dE-_3H97qb2n_W^l=0lsi;KJ8+*`J+bH zrF>iE|79^D%r!^;#m6#mnv`&Z+)ce7U{nU72w&`3jEcCe+Mumg=$6pE3PBs6G;(&< zTH9SmExif3x;oMc77?rLWA8W1*wW^LN1@IMIm7(4dN zo(qxBsVg1K((u(tKGoFOn@{NS9J%btpvDeOOWN#u97h|MlDe`=@;y~kR@GHT-%XA- zz>%~jEr7jR?LC4>Ift32NzI3Ve~sGdSLe{&IJka-w!H5gpaNb zhw3K#y@Hx^(fj>|M;Bt_*eb=sac%2TJEZ81itq=g1_(NuhQh}*9>UWj)?SX|bthRJX9-zKM@lCbzw$hm7N1h8C|N6K63FV*E#{P&RGwM! z(iHrST2GZ7Kb|nCHe=$)R8C$D>$Dx( zm#;lcW)Oem=da}<1tj=Y;^FCVm=DRu6&D&mKsk(6nK>U$z z`_h;psj}p``Kmmv!CAwQ!6ldMrW3g=Z4RGbl?wez1!11;Ydegowd;%Ma|gE=JYT@2 zWbwnFDD8*JJugsn0?^Y0V6CrW%hMM(By4Af>&RsacA&LAEy(P_6kl$0H^9W}#%FqQA3(_V9tSwj zz=jYvH<0vi;R2f*KNqxpfAb>`d5`f6@wn0Sea$L9E)*_)2h_Ho&y}K|5UF7m*1z^{ zlOPqhl9S%FJF|>>8Wcb(_Kc$~+)7WQ@H^HpChF+^3-Niv9#>9JTRiRsFeOIy(p?%MyakQxy|^=E=T%g1Fh77i7YBo?Anml5 zaA_zme0cab;xN0cgdMA?nUNT(|6$}cu6Soa8KQbKBC9Bxxy2`hz8*(QHV`?F4LmF8 z*ow4~Ns4xCQh66HvdnH4NZ?CbiA%Kg$=PS#R1M$12q5)+z%+ox(E=4Bw*n3o+I5mX zm&tosS67o5VMBzd4++)UK9)7r48^3d{_LPr@)&}>K0vH zAVSfw!!pI}UW4!?p-?Ik_bY%JVHeHyo~zhmnukkho(kySY*LhY7h-~366<%!?E6}k zwj`>orHb%&)a1h1rBZcOO@y-wr2C$baG%rMTtO=HwWOB`+j%Df_G=(=_Uo5d2&24+ ztw)C}4Xz z59>BiMp*hkbff`jPDkIe1>lm03el5?I(i9P1r|90V7d&)|7~N3ncrswreYp?)ggO( z>1*q#_fk}E;KihnY2c*laAaC`i1z*CY$ZSwTQe~rn@ytk^}lMacwxFm5Q^Y6g2-+K z2T^4z9Y@$ngMYgmU*BRzKAQ+ng*U*M7BA$0zE`PE@$(lT#Va<|@!*I)kiAlJ9lCwX z7GhZ9>g<_@@LoUm^`#fj72PFJm3o(z!$o{~8lEiJnqo?tjhsR8=qtH#Do`UT$oAUo zPTz63b)PgT_(%MR(6LgE8?0e;>pTEd%*K&wwH8qzQ0I^k<0E0K8FBR5lv0uGH#3wMm_+CJazd`~H)iS#n zCN&KlZKU~;h1{{2UtpNev0`7VbQf*g$9IKl@Mb*D>Khp)ZB5n+ZmcD0Fg3J_&T>dt zRci#)9=9TmCj2qkNU}Fm>8$hzV^O!#W+FhWCfi0K zb?Vy|h&0TndEqZ>^deIfS*qnF69Yh46kzy-V>Knde2z(vzQxCwGKfMC4ZRe^3mH2b z?;V1L#xv;46ot){ASy2;9EI+M71xYO?{fuZb#R+~IY>a?v}hfB3Akdv*I!xA;HH%$ zjA8=LZ4PD@cu(+g>j_a@Du?ZuEA0V_E4FeRD$QB9TdvwQyjZGlr_nmE4NUjsy#5$@ zGk6(y&Z{O~zgCVD@pB!MujqiJ<@~u;xq|GfPu~OErUUp0*8rg#N`*uy|8Y1L-&Xml zZzk^^GKwuMM>Wz?Ujux6JPSA-En7L!Uyps?{t(sHCL-n=K?1#Y|HSng&=4T?8B^^* zKsn^~%V5Kth!F8`*2d7I;=-5)A$6+s#}VI7zV>s%2o7r}FDR_1 zq6nKpco<7eXRAzl0&UOdkPvr9>|%*-KmOZmuD`Dph^u!a!PNv@@Kq9pvUmO@#@nVh zc`Op{=ui~irnQ#j#x*nIyea`syg3wb#W5vn`{)LUG&K^t2GdQDfhJ;N-oX6{Im-VG z#|Nyodw6qSE1`qlwni)F6z;EtDa?19J%R05`C4UCh`#;|H%bcS+xP)uTPO%RO;y7V zx=ougyEL3|p?-O>BW-qzYEse9w?T)sABI`E2m&dmoD`qkt1mICtFe(E8Q`P4eimrkAyoLhKoa(x6oG(XcDhVA07|_*%j@Y~{hZ!ABlyL1 z-F0>lCxw)S_eyjf-jtB5;DoB{r18j7WNn?BuA&dW{?{G5_zgQMDB~N-I+WwpZfu02 zdamK!Qmk()+lb@S6P$;W$hIXL(YDDFCkZf9SVJ~F zSU1wR1alG`s&aCh?@dzu^(wgNRKqaIZCF`rTi-gm=f?f_;&b<>SSfm~#(1f#r}Y2g zrKs@0S9LEtVQ6E|VrIbc|H5gprIT>yxaV5OzM9JMMKFjNPjowYq<%11Y7B#?FqGPpi1=S<8e2Iz zhd0IQyZAwf*AqQ-c>$?iJmB00w0q@co-xw>)z2p)AwzVa^vG1@znSs#7s>HOY6^tU zaFFA#Nkq*@myUBDK4*TL-OB%qDqBA6IQaq%jUFhyn_W374B6tfrUs@;Ki#+;EPMSo zz8W6x-Cdqp^yDwB`WKV+5s}#8y3R!V^+)bvUf+$QozukW{H%%u|3?>uhl8`*8C2h5 zmd6u~k=qTI%5F*1{W@QQJO4}9b9jdTju)QpWNY&J@44Kb|F1;9OD--Bj%P%>u|Moj zvou7xFkb7=PQ>Eb)RK@yy_C<#m8ZY=#)M`Prd{<}cWlh(x%&Nbf(4fa^-8I^;6lP~vy*K=JY|w0KzUbu1g)_#c@!>;o z0;Ph}x*{8MDf+oheF!4USfM3g{RB&Pq3+tOK)Fy-rbjh6EG!QQJx0dFOyZ;dLjlH; zk`kxmMS+Bb1X#KQ?nCL+IRj1pSBM5FYEU~MXc^eMEme=^_gVk)V1bs)=2`~7kHsNA zki*h!vWV}j6x@Mv+)_^~`xcwvRm9xLa8=_{88`y`a3?~86MHW&|2yVV63$nt^4jK4 zo~so7h6h3-CyJl_!Zl&a$2XY-vpku z;V6@#(#0Poir06fF(`q#O}l@{VRB98UA>P|lc?p@myucY!f3ch{dw?By2JvVmM=s65MYxpIF&9ZxUt>ZfXc|ASN!Rc|SX{gf?X6ypdha!F z=Rh=tQtxUzl!~9fuO$5sYwZ`7l#qmBQFi#lA#7;?0JS}zu{6$&-j!eQAUJf_=)03> z6}-yRD^#Ud4e6rg+=erTV(kQXASIce*h0U$x=fdMOu9)V#w>!duw3s^Rz7{F%x|8R zDdJ03h|AXEUBBI92|((B7d`9{m~dHLO(VXVG1$f!eZ(?a-(Xz$DmTf6b=cuBu@U8I zOG$`PAWUi~n& z2?q<~e7S!WC6g}f4mTkBI$6te>0w7=EDuPOMz_D#8OG&x$yEC04;MI(WC{bhJWh>0 zxr`Yjye2zY44fmT^fK`pL&_T$$J# zLrDqv$duo{1T6UW^{TWcWrHPHDrp2I(yX0UVw{8?#D0K(%hMjJ_+TbCv9>Qm?oYcZ zn-D}S%j%1&2*$d@rysjZQnU#Gs-NuQm#4sdsm>xc(_G>XxVx!0TuOrEdp(P2D4j3(1@*lS&gAa=kf~1#(ARPQC`2B& zPcv5y>4n|h1GQHz2E!A%hfRi0G3?Er>H(yQPl;kL06}mKs>N+XX+nM#3UYF<;NcN) z=6C~9Cf`|c+ep<#iHR9!cUt$ch@IFy- zVcci%asBir@Uhuxc)wAr-^(Yu8=(`B&Gt@OPd=nkMfPhwAC^3CY?Y^X=?^D8MFSzE zrD`-ZS+*{RZ{6cF$BClB3aF@&oK|9@=p>^mhSE}B^j<}^H2yv*-T9*%Y>kX}9&4ywlQz;p_4^>o>(xsPyHW2Khpx;0h-m*32I>%|}M zbFAYT&XvN8Vp-UlqrngGn_CKuqBto73Ii1brC`%8h&?Ay&M)vRBl=5(I=eYo>5}r= z_E4JDSndml#fE@y-_X6GOYY~}e~g50&ZcfLxKGB%D|>rW(|CihSvYS0iht50QJNt-uwV^db0nZZ%mFiBR2I@@2;X zi{YstN7KooxpNDLU!L9%fqb&x|?-F&8q{hfNt17F}5hoS?m5#cG|QG)oy|HX)bsS?B36tc4#As zjx#Xl!ynN|Zgx`)^!2sX+RlvrvQ&9Ec5m&>1BrZHc|3OC9nVKQzqt6kJ^1RD;m>;O z7q2(T8)EE5Uuig(<1fU!i&eTira(SgXue|KOvqagCCs0({a}zMxzBMsYBQ%4zn{ro zx~cmb^T`<*$xk5os9z9!B2SQ)(T6cf&ar~-32ybg1X0TUh+%pAB&q&13c}$kd8u%} z+09;T+dB95APPr2nF)f?!Rndkm3q~KTn3cf<-b{d$k9&b$FGb^s&9JB-9cyLu7CHD z_?*4$L+i)!=7|KdM-w8qNS6fEE%hvNH1qN<^*oD>QWgoUv3#tQuo2OSL>-vrp2EZQ zszB}B6ZN%a9e&)AGii71o#KRHbE7wnskY4 z#~liPl-ahTgk}@vbx)KA-Hupfxk~Z#4=2eVAAf^>xI*+RDC1$N|3ae4x*y_{$NtkYVPY4>(X4*~H za`GV3w?^$NCwR>{(XjJTkwHGnY>@{P_JPtNXOZ@W+@7)-19y{wx)UdXnLGUo*zUyD zF`9|BwtOi5sOfqY~h3MSB7q5LeTzPX>lfS#) zLJRwibJ)ymzZ{SSVj#yIPZy?(FEcc(88^D$+ zh16^ATU>%px_#Vx*?3>QW(Sb*qK>t>emu|O@;M&y0D{0h1V?A{VYH{qZ_k~dbBN!4 zDXOmGmM}bqzWL*SP8F{X%z5mKwxzq4a!#pQ{(#9?LgP$9S|rC(zwhiXqT1Rz7padL zT4o=feGdcVrIhXGiX*wMGCI|Ae(-JVMt(Q>(bZ4EDv=1g&OhxS~a_QFo9&)Vo(bN*q^r0;4$|E{^tCO zF|yiv!4n`>h?$deZN(QHY*jY@<=FT;k;_OXgjmRx}xC(nM5xpWcJY1HR4y;Js5T+cV?N-b*Gjl{<6FAt3QIBbKRfA$E?vY4R8rJ7aqz%&74aMx zozze$-~`V23#GRZJ^ClnlU|mr`d(W~nY@cFJE2w~A`c7fX^a=wL&AYm1_PL zsC#=1@Qm#nuEY?3*OgT!#OZ|(GX}OUk$%SLctuheV!q3mwG|XD?r)!oeu8kb2~!b; zBUwo_=4VWuGE@`ohz$DO;FY~`bUfDhZuZV;QE0${V;s|9Fg|>Th90G1VL5$WAj^3D zLe77a>qn)@dWrrmp;%!Br|9HXhF$&Ms*{UrqWcuW*Ll-YdP!U$e@K*Nj+A zgH1)U&pJ(S+3}lC+ zZ9FzkRk-lX%>w@C$VsOyM0w-L?-=2oaM%$gBj9~o>V;eiTY+?(KZ;JDNGp0?nO93i z&yS&BhZJv*eXI%VlnvP~qnCb>yss0kyKWMmVnVVZ@!J@c}@SC;vkaUAqINk%N23| z1*ChJX6vOKycT;~0ai91J~2BL9a(x|?Ry)GLVh-Fn;G@a99Ls~rar{v8U{vwk+H5y ziG4KJOCu$UrLiIQeY(QNq?D8O6^DB>@@Wl*y2ylm;bFDXrq2Oc)sD};_1n{*H6p9F z&uK_2YCeY@!n5@(QHF^7bdJ4Y#il*B4>~rG_BHT%|bjl=GXv9Yl~)xqsSNZnE; znx);aLPX>Y<*?%=1AT8N=o>J(nMlMU!d@7^3YpW^DGsBqSkErrgxygcQi%&pD`rA2Ax+u?#RUZf;&BT!wS?{z&0z?(CC;H`8}QV_V#+b8o^V zP=9yH7B*(hh~-+hBCt~9_jMaYPm#mi!0DkWK>cPkU8p|3sBVhl*ABwlX1BLj$43lu zRRgp4ib_alH`+fdpX{ESEnJ@;Y`*Bd!*z*a@^=4m4-2{UHMu?bKBb3Igs;r;e?0c{)_(4x*QWB zr`NE07)VArqT-t?8i+SF+Aa^uS%z_wKW6&x8Uy2U7~ON!#b*%9I^MI^3*zp%m2}OG zz)=ysN_G`<0_1Kg(NUmd0ZJUG%&-9Ioh&ZUjl>a`*f33xd2YsozB)iE(wtI{2r&hK z3Xv91*4Q9-RDp+Xa#EufGs|8G>@ z52ojs8AXSA6f; zMD7pYerBI!(W>ys1ZRT9z~@UQWT!ceL^=FmU?>z&)ieD^qka82|_ZJz+Z_+P<68|mb;UhiOUQKc?e zW)k5=#BZlz*(~JgeJfs4xaZtQ5UiZ~G`^e_XP932%L)37f-=Ozbz-b+5YZ14!!ys- z+7O8U;6=kX*7Ode7#qjF)TNOxlyweuWH))Rt?$tMkrLk-canPb=+Mxt#i2;T=>0Ou zuun>A^8_`r!qjEl?Hjo;u4uv2=GpHZErpU8mqs!#h?Dn3t|wJ$u+?a;C*Yksw#;e` z$i~JccHzLQwp)AGsaDT+0Q+g2c~d-U`X}{c`H92VX`NeGu1cU`zKP&C8qlK6cFFzN z!LXz8t(0EuV7epsN4YF!M;gdIfv==~+T>c6DWuMh`e7l5W&ZsFL2?cm0e|Bs3{77! zIU|ezxotEsc;1e~tb&N5{zmP+upL0A9!n*=_z&hk`x zlUWFK^-Pv=ZjnKm7-RsvHhD}oGTV)5_jnkSTgUe6rK5GzH|JQ}a=6;840{AVz&)@Pz@?+7bd5p+o;yKBv-@oPxGK>Lvn zpgW0wO2l;J5RFc8$nmiEH4@?xAUS!uphNvyvZ-{?eIPfvnk|OjSXe)0f0}Lhf!Xn@ zk`@PND&MEsjrg58=OiX5<)z+umGh7j*+TV~`hCOg$3CLJ zBOwxAF(N+lMBpb=Q8FR@;B!)4vSFs4uNsyyS|467b=fA!(=(`Ke!D+urL+ik7PBb=g8UrVyoIsv z1nuyF<+Khjbd>SpmAAYDv8qWHEJee%cC-*i+$9&s9g;TF7Zf3)^}=Cul;$Y+=9hxI z3=H8%E?^@WQ?)_OADf4r(~2$}aMbNgm!UjMhgUT(U_7fHh*yw_5Z$`&24pw?lqpls zegyZe%lYhf@FG3B%ejBFN)-=xUnpkfegx(NTfVXwQI@dzV?i=PSh-%qs?p5Lt9F@r z9hai-Wf#L>ZvI+3%0|q2Ck4ziisy6fFR9zwC}<@Y}agL9q1r5Zo68c^4pn3wN~Mq+&F3pVj3n0a$_+v3vmY>K5U})g9rinPcZ(UAL9-u`V>{ zf+ZsBle1vyj4t4_P{g4E-=@<35R9eiBcH=DvLM#3_s&ZTDNi+!;YQdF6^yR4ACf25}M=K|7RXLJf z)DH%Ef@bT%pWok_@S&ojZKX}_%@&(JJQZhp_{wn9Sd#}{5LM&xnRvN=1m zIMx0@ZKZfE;Tm-$A!#_nB2yRhQC3K#7$w@C=ZcdJrE`K-Z*6jB=PY*i;A2!Zt(gy$u z$A$mnXuu+EeH^S^5%Gmir^%B1Hv`n^{GKF*Bj#VB3W^gH{5%$~)QBW@kLGGgbF)-% zW#wH{GJ<*HmGZvHv?8!B?tJ;o2xv<|RFuHAc-NHTaOK?rF5Ff|-KKuP)5kyz{ff(4ydHxE(ANK2o z*yJcqhnbGOC_HW24E)jlOyh=)9MAIYE~4*16MLm|r0MQxP7mxYJD)k)g}M$SZYzn(yT0h9QytH`ABcSrOg#_!HEvzgh;I4 zOxHau$XI24yrA4+`R=R(eqkgU=EY0=iT0>02f#hv&1WFgW>+B#x*fH5p={tdkjOiX zbPr7L+&Y4R$Q(i|SZi$XXM3S$B=74sO{66kskYF$NY(&81B6_HS={Y zND3qM!ocfyLhNd`tewGVm0l3Xl$ly<`xqh~`FG{^%Tm2U+|w^R2G$$ zNHq7=HTXivULw&pgkq5g7WMUTiEpgcpf~T(yBG@FxUR5dz6y4gq?T(~Zq#fM6f>kd z_rfIr_zJe5EVQnGk_ml-$rqlnW-7Ie_iI}F(maD<1p`fZMFZ|M*{hO84rHO?cqo#_ z?qWHC5=N+P$axH?^MvZzwtEuLx|d?}c(smM%cDgh2*%~gVH=Ff@BrQ5TCwEaJ6BH^ z`iQ&wHd4SETiZ2}B6>u-7+Gt#k2=<9?U^iPMTAu#ltALBHAKP6?9H z|H*ml=gl1Cs1IQE8`Va#Y-(U(4#R9CjbEO2p?1UN;$YmUSLwZ?(%Qe%W2XC zRgD5R2|nhk-xnYwR|$#67rFDo%5yE~XXIm(eDMPU0*D1n8^BFtCb(DAPDxT%&qky) zxq=BLrlRoYNfJ41GD@4aMPKE_H9%}d(NBmxM=QY|+_Q zypta+?_{Ky4eu{gPB?Io1`C5^W}!N5FS;f>c#4p18&T#leEEegxId3EHiE$MCSYGc z9W>#vZmL?}w&#w$Gt~_T1Dk8H9t)(MS1>^xb50U_@xc;)Dzg`_X}z%!xc28W^6M_! z^8x^i6LPvq0fXz&C7SD$M(Lzr;5${1{zJq*?()wUJs|QS#QmewTO374M3l)U4+vw4 zT5+3WZ)^L^%5jI$%KNVCp@Ez!8!pfNf%5_i>u_OJwD^^dy1rIZ%_Smj2gtW659_5@ zYtQ99>sujsLYatnyQxIgPhV6N?~MruJ%KrjH_rwWq6uKA#1B>`7ML~cZ&}{=EKnYH@bG(q3T6O6*E+IRB3UxgmQ(GYP&OY3KsVa+ zMWqsU-5b&Ryd5@0U$m1G!ulE^2k|${Pc0aRE;S&sqGuohnZX53G&*;^O7DL1^afws z^;@^g$M_?Jw&5Mer$KU`7UfpP@tKdk&_ed{ooX+#nM}zXHZiew*$-NW(nIj_fec7h zwy;FHkSM*wt#O*G*d&*WGT&&@3OJMW$e#;_^gEge7pw*S>k37L>GIH9mD}A^Xw32n z0W5$R@c!BHP1$mJMsLvT97dt#Fb_{fg_>_)YVHi*Y5K)P2XK6!zgFmF{Q0Pp0w!rZ zn{Z0jDE5I3N-Z${;MJS^{@jr4^W{xaZyZB3TI~6XK{)ox3b)kZvV~W)1U_MH*K9Hw z-CMM{v;Je=6pMKdN=3&YgO%oIi0ckyo>}VmxOolVkzeT_;&YgO+iMiP!KQqVdmeHd zVkNLmpzvn0f9GHq==^2K3qJz%jM_s;FZd+*xo2M(99S)VP(6Q1q3?y6hxt1d?Ze6q zPE!F?oRW8$0rtwIjK+7anxgx=yAx9ch8DZ zmgooO#G>M}Ws-MMe^R0em@WD_L##x!MZWvTF z6%>Y0TE3L=+TV{W#Mz_9on5Dz!z2KQ<9l3%>`cnS_pDwzqo)VA6t4z@g~!Yr52h+R z=(s|`56vE%f#;!-Muw%y$WszGpVd`?dF3|SpP8}Jw@HWzm$iDhjUgTPzy1$n-vLf# z{Qi9;BSf|&Ss}{E-b6%MWo3`-Y_d1WUWx2cGLj@hM)t_gE+c!(-n{pt-~7Mhde7B$ zuG8_H=lRb2{@kB?d=E!{6uDjHFz|D_!^M7$;xN4&YfKz((Fs%L!C~dc945vDx5t?) z;&@Vm5g}>5$IZpBBCNjt)S#C9kbdcI72^xpHePhhbIrZCf=B$Gc-TLsp)Qq`6Wj~K zYA13JM1J^G^~D`m!v}3WQ%P-U#jc}P-A&n&Wq+t-1}lSf8aqY1cs~w*&QXea`O4#2 zZ|J$5>PN8sN<8$`re0`_V6NjDSA}*Iwhr$WPT+(v%w&S3@ZRP5J}#w`R@idjPDE-h zVkcsIvAnm&X!9gz$%YzzJ$a9ML|?q-)ug-a0TrDHNlBnmpSB-u8BC0F!lmL6W9%@Oh_(EcE{+DOlk{7L+xI?5KkT*3iGaeKXYs}pLOi<1``C0TsYl49SiFD z{7}=iQNBHKlkue-29bJYNVkdnbkaeeO|#EjLuxc^$@GhpQw8p=*U7^h>vt8uS^jXx zk+hRnKCcV)pRzKEkfY^KcSrkP(~L2hZRQji z3^?k)Gs#@Pb<(~jyD<`YY`DP5%q*30^9mWMxdOM2gmuy9?298(8btCt*A$1?Cl6ci zHQQ8Sj+An;meE}%vX&rQ*b1Fbx}t4v-?_Pd|s8zUM_ ze(gV9>r>>^@>p~qCWw<5csTTDgD75FD-69gipqUwF`70f-hQA@6Oj<>xq#ldc9Uk` z+UpN(wn5c`z*UcTi-)=e)l2AT!@oQX{Zvm9sXR|nG~P)G?IgRAM>ciCb4%0iy(V>f z>r`r?vU*2jm5TCduiu+m8raif1|OQy^mfz`Tm8H@cAqwa8XXg#S`#96y-bN|qp=D7&_rdq+d4`R5VY*(!npCE1DhG*UnpYqn&i7>F;G&FEuYJZJrZ{GwuI+~9*cyW>P ziAkzVYsnHeuBE~5+VpQMy7h^nQm36fhR|*kN@4!eThv}O4|RX_vmqdRYm<+R&Uk_M^J9KoN9RlVgL9RUKI#+$;4Y2y~x9`8({@&{?28oDZ~KRJB3K{+wM{$@JT zYEFdg-Zd<>G`*LO<8q=wyc^{=(Ut@R3>FLaBm^--4wWmyy9d^%zKgN`?7G3HE0~t! zdTqG0*sTyz&Dyo+7k*{T?y{v3_iOL!XZ%UYk!oU5+8z|u4c{gqL31TF8gOMZTS@7s`I${eq}{QT%w&(0qo45u+Y z5@cfGq1u_1f-t69pT!YP5;E<(%61o6Y!2q=3QuU>$a2tW24>0s9KgoGw&j15W_?i= zVqwX~-|tj9&p&$9oS`*XG5A@;>EK*%uW@K~+3(2WYjNcL3nl|z$yQ`RzS&QtYkN{! zBB>&Mt&%by-@Uyv8YVI0%Q`P=OhZSzRrmAtwO~7RI<#qePgBv%rq_$M#E-?^q90Fn zUwk~|{o#f*-X)8hk*u!vNpoE?2a0@z4tma?7pQ~mrkad?m910s=*&lWa#@N+7^d5v zN|WuRt@w$Z9xJ-^^;!S&95q-i)3J4AQkXtZH!@~vIYKhJ%4NmHhCjNzwG*MDEiLCI z6gEFbf+6F4N9aWIX0pMUw9B=?R?9z+;!kU1#6&{urZlHw7D~p26T|$2@D1iVQ-;bn zQEVz?Qw^Qm6bB08NtMTC!=oF?b2~P7T_a^UKNb+(T`=w065U+7)lhWR^pcw%Rbq<6 zmP^c_?#F9-#!{2DE}Nnscd2os8TTJ3Mftug&L}&rUTyhVLka`SA<}$-L|yk=ZAv_+ zwKhYq^CHIyM1nZa9^YJjBqVzmCC-4^Hj?V_XRn~SMDlk8*Zjue>-ljNm(dQE9eR1f zAYZF4!*Q~!m>nEqdJH7xOLkJ6)aXA4)?H1GUldgM?m>WCDF$1CVvyl4ajCJ7C)Oe3 z3#)u7UhkHx2TMV01^nSNI7Ak32?pB=vy{++L&CipZ!;mK4MDn$meG)Qle($L$MXF9 z9=FycPDBW+O{0k!6RPR!PEM<7FzM+wi^R|=nB6^PY7Y3?I=l%yFG6}5lkeh|zyt4} zbjGN@L1z?)__!l>I!?(4KIH*zpKLRpFf`@vKoPvGrp-`YXO=46AivCqByqng(1X8L zEO35QJfFyYp-4895)bobTwI1KE87WDm}ffp{rgR`-ZvK#c`O+ViI}z5a^+5-K+_BQ zS2H9y7%Fnt6M4-;27Vhrv=1zmn z!fZKHV5-@+UV_XWQByO3D!*RM{@$H6IA&vh;Pu^tV!vmL>%|L_81d4=(av+K7=e?O zaaN2+s#FK3wOF9d@>3H`D4$c4UT8fhv%qg(!GyUTWWINDnITEf|B0TUyKr&V_}U$* z$`vJ7R;!;hlb?+x((1An#A|%bj3{|+TqDfO^7}O^k7{oUrPH`ouU6c;Ov!%jwiR`$6(yOC7qv|N zzSR%bn2aB@zw6cNLvGbKjf?SiyJ?8stbae4_Nqn(H{am!6Kz|s;O5pjwfX_A@W!DR zI`_zfZYB-m*L?YM?WR?VBA5Q!dcZNw<%-oX$F$aQp|PMp!Frxf?c-P58?7aP zmm^wlf`f~ztf`6PFTq}j#1qpoy_6$GA|ZLLmamBs6OJnA%4Z(Le3eyMNX{8p?#R6- z<%oOPBICU%x_oQAQv7uZv|DgjJgYtKocnx|j}CaY;#DUAQ9&obZHz`#PYL&a*}YZUfk{uPHsMt8jA zWB1E>OG0RYlohWcC6pa5$gPmhC*k|kre}Gu=J{vszY;dE!4^a8Btk+SeDG7sNln+_ zjU{KFA!|^al`l+}Ap+A^tQlE>@Hz=GUHSV7c~{{0&+K_A{}kfC$qMkxf4nL`Kz>5` z$A9Fdz+?8$1#SK-pMy}K_CM+aL=!(iQV#y{`X7I4OHZe9z5LHV5)!^@nj;{G*wfnM%rblDaD-Q>4%a9ORwn zd};%$o>CZ=qC@&&e$kg{y^VaeOh7p_H1sl5YiMC=XNrTzZ{!Jb>O!VOAqTuGOHRmEZUhRk1?Tl! zoLyx~f)l7!f8}!Fix(~N6!4xyYs;s*Uw1YBKkBV*F#cEKhCY&G@sC%_g~pa>`tW$1 z#P8pab(=DrHt0~_hKpZQaGOO{YB&~r##2@8-!KgF^Y`!mbT^?g;l+yw7A2ZTdp`=* zj0wUj6py@hy=WDVpD9$1GiH(=jCsbpj6ZU#q8?ux3RJi%ly0-8yJ3;WZ$FLgw)G;K zT&NLxJFOL_9;ObRMw^Orhi+4W*m!TAWaZ3BZ2!X82G_v|+}#^gdFdJI@Y->3=w<&k27nv@c%2i#APn~ycS z%y|CuLk*=W(e`YI+<8n))JFgHj@_BlbJ%!z5OrQhUd9$m&fb(P;Gpz)q3jq6U~-uE zGQfYOb!Va>%64G?*J{rD_YLsPN0)WO_0untu5S)!pi(A$zgU}?7N^|sC(DUjJf(?k#%c%_(z_ONhCdnat$Ck*-(;n0VokH~kX3P&xVmV;zEZBNp<(ugjhA;|)?x@CPD4dspNjai zxGoRXUJs{RJUsn*P))JYU#&nBE!)R_s_7ygweXO^�(NFt=FjHAlB$a>HQ-7_N+z z7z!<@%5FL|H#JQXc<-S@WI1iM^?sa)`&y4jKNlxw&tg#*1Wt$@FDUXm%;IG$q*##t zVbd<7p?-5yx00f~yxcHpa?((EqlKQgu$ZlPp$Euj!QX*bAmTz0pp z%0H*AO^$?&EMzSgLfKz{`i3l%MqXs-i%mMZ^VQjd4)H$i(%$0p)`LOvN7h|DyI!eV zN>NeKy$26S#J+3U1twvmG)v7QAbvjG(vitEE_?{hlcS}cH5)RkKCUNqPzal$FbODq zFg9l=<0Z}aOuq99pUg9W3s#|8)?Y>aC^|o1WGZgO-Er&Z&xA=;nAGay9WA6URqnf` zd0a-7nL-#-{RPVZ^HNgO&BvdlJURyow9vCDjGJJM;=sK2@5SWwvc6#(qmq@EznhmE z|9i1NKRPXqeV{<+D_LM*;63XD zQ>zs3`>R`$nwpf8lan9qrlgBbwibSvjeNc#=(MO}`tu5(wf^DGO2Tbp9FW;jiHR0J ztf44hOIMc+++5dvg-5&G{_Zc`ybrq?Dk=f6BpMnT^r1H0{BSW78M|)S&dN_M9gJIS zY)$){bB+Ea=1B@Eg1@&1^<+*@j-#P$$N1^Vu11kz;QiXXvN8%Nd?*MGr$761X;Tm_ zjLdzi%jAWZ@$pm8t{iS1W3R0q8M}imbC1K7FZX0+3CEkW<&zqkxz@e{?b>Z|?Q}iT zy8)uqm|abNi_0E(a(uP@?YDPYA4HG^;$yNlvC32J@niGNlhcj(niu2ozkX>8Z}&YK z`eck-S6`3Tx&);&=^*kaz;$!>_hy%vjEM;&w5VZ@X(Rj9Ep5GY4)LsL^73UjSAkM% z#inmwo}3@O^-$k3K0Bgv)O`v;v_;d)D5){Fx0-NJM5U%iR8|TXb+pDZMaRWmA8z|H zUDOhnL?5Q!Ii7X&euVI**w;=|W}hKwQ~i#Y4FW0BA9VqhIU zI61*0pvKGUVfNA+`C?<^G=XYeTJC)tx_A6^w1Vf3q-4PeArJSU-2Eaa_uB`*Mv0FP za71P!OsJX_vAlEDeXx8+zdBEWuHO3{bPa_9TlWrZg6h)~_r8GvI#yO(xP1d8pgnb~ z-Afc^6F3;W+{8M!rhM{q341E;+n8u59{|LZo_bZZ526 z46>UV`u*++>mfN_l$MnnQd##iDKBDDJWvLQ#YEP5jhgUW zAH{#R=*Ph&yZOxz0~Z;5NVYkGQhu1r+QyCpYI#H49yVi@u9Kuc5B|)$J3foD-(FOO zt?@lnXXAKx93A!W;X`ESgi5f1@{tU}ziVJXyR$mV+DYmN%K+rDMx|Twt5+sm4_YdZ zK`}+g#P}T@c{DUO2G{$&v4YtxXhWsnk7q;qBqvk-DzmF+6#ohUtuD*|PNvpx%>^gd zXi3c!1znYQlQEWi2R{qu2M?|9`-nZGa)0EV=M1Cj_JcyJDXFl3!`dD zqH6Xgao@aog9tpW(x-%lWvzv`1uZQVfiP+Gu^M=LpL}|5E~~dSDxzX6D}Ng6hJ9~- zBF9C$B663joOx+Sm-?^sK1aO)*A1NrtfW=*qUH zqF7Vg<$&WFR+-P5R|mZJc~-AdSzev+c4YsC{w+s2i^H%1yP>%m9fgTU;RE6XB7d$0 zY8FU0M_eVmb}bUoQMa^8U)ro{ZIn&V>oHL^=1)xMzpI%EQzvt_sLaEiA{~{bD75Bu z)wbU%Bo0%L7|+UIopv!+NIzN9JN@*SCUiYyl_5<1B){jna(H%rI=;=b5q=_a@*uzg z!kg_}lTjCoFLRkd{_V+RcVJ!J9ZnRlPF)x6LF8dOSJy;QGqofnn<|@|<^{M?`B1N$8Xv`}o(Y(CQbB4nhi=c+!1cv}sAjsPq!iu=4qXAS z$O!INE~<|o2eVi2or8`}P66^o@RD#%rV##A-08!=Hqp233zo-s|A%=&fwC3gBOoL! zNLOMGV9j7VvV`Pr6_qgjm8@xkIWCY(dy}E;e?ZwO@g0sJwqvg!$dJ|6Qrv^tu)cB=m|Td1z2cZom7 z?wpRGGZfm##LV>U|<*!kh*fn1?(RRgORz&1qFJ$b?a1c22CScX@w zT(R6vIGF$(%I~(7Qx7{6HmM;*=1%s@Y9Kb)LR zw<$0*zR|~8?OD~G_ej1ZFX8s#DtY|Ni7)LAdmBqO(XOkN;~0BdGYTQ<7#J95X4sdj z0OO#IVibjoyL|Vj3jt)u54vS`dOV{eVjSu6NZVhbBRgGV%E|3-%l|cNt=KnEP(Rb? zRN+=KU8;NII^BkIZ*UhnI*%EhkjtxqFHeTfyT#WH`POl1PPHzVHY~-YL&HPsXt`KR+usvF%|v zSW~r5WzSCjJ67>aYd-u+;8isj`M#@q(ez4JWz0P%reF&IJ+3>#oqA>Gt8@wcduVLv z9K1`Fx(CV8g$W4-rWB$~D*>768Nqwz^KU6GGq}3p zx^i#>Z>c2?;r#{;PI0KWO2@ax0^zR=T6?JvbKmcVgLyxQ0I@3w6>x^Nke|2Y6F#Wu zHOyBtaIs(1_-xjLSLODs)I#Q8v`U|n3r}#K67!{z$_uF;hBtu-#zg%ZWu>&K6n!-I z@}DdK#Ap4{B~s1sIfuh-GjLu};cDo_gt#j%H1zK)6SLX<-(@=>QY8vHE=?cCG3nMQ z@3G&{rd^@(g1u%WF!?^l&{l`8tK~m`NtcOP3qSo~d$O^_w?5OPQ+pw%XPID%49DJ^rxr=;TV1ji>-4`>y-tpn=v<6w0*A#IQSE0y+O7 zQ)~8Y2h&UIF8}3mDF5;}s|b&i`RReKG#g}A!oLkel#ha`xTx(-N=@{Vd|C%8w#czh+sdg&*VX^)61gX*H0=MXoJ&VX{ND#| zysS!dQ`43HqG^Dpq)f{WMIbCFKE$Ins?WTI=bQv0JEQWR>qe;JjIm?rWku?s^&zZVAA@7rKi>6QUO#v|n>lFrw1A7`UJ8Z8omDVJl|Hh4KWp_TTAARt+HUBIazJFAE?lfDCx>-Q&JpY; zH1Bd3mK`Mrek21dVDT|2D$CICRRGS4LU|;8?2O9wcxm=WK*~= ziO_2b&C$g0OCER<8{RDZLdnio3dzqOA_g`LZl$94!#vRYa(nQLoQ&S3TkB!aTfGG* zpI2w2=MR79BJiH(xCezEeiRWi6+OL6_p0DtLw1Mkh6U@eK>SC#b))66c$Px@YwYKV zfT}|V4#wnH_Ww+8MMHI_b~L0*_)je zS?BfTu|s-l_i2OA*tqr3y!o*L+=K0fTQ(t!ixvKy~-1}E7Vm=;tgsrUD9>$=7kPhBu|YT8aia}yy^6hqJ9*;tg0oWnia7hSuf zbKK*OSau*f&&B3g+waJ%ZQa*V#-Ey%_mM_#eALxq+1h|?GL9Vg&S43G^6a8zNoT*x zuj`9L!Ld*#gOO1xGKD|b;W0UfvAthq?$F_9RhRquR_)22qMeIjmV+o1iK6hu^CQ7d z;o7RI7r7}fdV7yH9M6dss3qiUWaSFImL3S0a=zu~59LUlN%WdnZS2Wnllz0$Ef#-> zUz&=e>LM4ehwD?doP>F9*( zd5MI%2jNLaeQ_VtxUMWPNCk{??uh31Q&)Ug=Vw>TqL-DcLwTX-P|d(EpDXlqb@9Wc zjpA+=S2+**Xsi)hv9K*}`(ox4WYTtDp`uMueC}uGg_o@tp1CLOEd97*B;C{+I%*W5 z<0~NysDkd)I6_@QLU;|tVxc}!{~(?l4TsX6n7;rB7jBRi578`mrYK;<*sFA3s9Kab z6(R+d_@LamrQp@{9*q+?5~eR|-m&gNu3RG`r%vv_BQ_*O1ikA8=!gmati?t5KbHuV z>$3*Y<$lQCaNoHEN7>wtOjgQ8T|K)l0+9<#Xm5#>Hm74joX{-D-eAWhf~$z8;u3Lb z;gdQBdlOUwu6J#^53P%$3iUV+)6;vf*3ak}iu4!J5msdG@DdRhSLXLndwQ>S`> zd_d2NZ{jSdg!+_}ua}eWYeesHBJyNpTQ9SsLUZgq%67Ry7RgjIW+f^zcZkto8}%9v zk$SJaeM9bh+xt?dQ2uaAp+ab|^FQ!Tc+|iB7saMroMzoLu0=T&6_k3O#Tbp(2jQro z^*@s9FIM}M$M85XwyehA(y)BGmHj~BW8M#o!1HkI%qJEs#7Fc`K}6v#f<1pxT$Yoh zY8A8MSv+5c;Qyy6%B=t##((HRHE{flvMF>}T>nycCI-ndR+|>L8$?mz?WP^N98HUw zHm^Ao*)-?Fg7hdhzS0y{aSd5myVWWX3?FT$w(`k=?RUH$fsuWkB+93-df3m)(>+Ts zZLFoUJb+bw!*=|mdqHV53wzDNpW@uSyi%=q?ZW7dGoc)qnCKlz_1(@sm9kmtIy&g# z(&%v^HD6XIP8`xY`!XIBm{jJmSB;?x)I!tK#a&hX)0HSP)beK)MW`Qyu_p7DM`*Ft1MKXT*?Cg4A`w6It{(O=w_o_Ev)gQ6ur zUG@+|`YmN!FVVb^vu~=b%GWJ7gE2by!+7mSj~?NcXrjl-(H7E?_xJUU91AfH@@#qL z^yJt)jw)mYJXtT5TU_yoNx%K;x$_ukwDRc`+!|w5cQ|~{CaiOCNh5Ku`q&-d!Hu?3Vg3w`1TTt$o-70>K>;Mp?o6jpaIxUus;g51!PFr;{(Fqp-^wk^dL%hub+jTK zhFopN1U!d=Db$qVD?x>WS2FqsMvfn{5@gp=P@QWmF-+INqFAla8Vf*~mDELDL3&Jo zejCN8ETW{U>OZ0Xv&P7NH02|Kj_h<9@pMWftDao~vy+;$FnZHuzPJ)cx|*b2O+m0w&q6@#!#ErcjtG!ACLq_bvAbXFe*X|Jnbkw1!K+HiaG~$n zO6GgFL`1f7Ws3wuZvhfWYHZH(rfB~%gf_NfcmhBZ0_PYHKQlHkwv)$z5zI<71D-$E zEMaA5mw`lWJ7(`@{FTNX>K{wxatvp{4rFhlTjmBpt{hHWOX`r!{6@pB9^7F-c|9Ym zZp3L`CxN3G2e5&H9p1cp6_>Gm;FPq<^<{qh4!er(7U(8+a}`+?2M)z9etZ<9&74C? zl@_HEml_4_04qWg^hd8E8TVf4?h9RloIWZwfPoid&G~EY6ku*uQrwg3ODF+slc2n7 z^war zp@aKFzgRa=>p_Y-NwOE5a#i_8`PNO0R*o3p3ZaIZ%U*_yl=I>X5YG38%df0YHNDvV z;u(*uHsG#=_xGOUxAvsS2P{9$)SD{3rL;AQiyu>#PX({is^$a?6t6o0bKmYoC>E z9!zmv1Zcb_oAwDg1Ku>7=Yr+pI4S_Bw_FSUkz+RvwWM6JK<{P)=1u)OsnNPvIsCUS z!aW2wlXK)q2Q$IY24%nfOz8$1kD@4m547IbZkeAYQGO7`+*J6zc2lTSNE;(W3H6u?_YMI|x*o$x~F;R5K|l z(0ar4R`ZcI{pG+)pT)q#E>Gzn;lJwF#;)EiBo_RTqmnEQc6a3+PRDsABLnjQaO)dW zX)>{&LnVtO1xxs2wuV+IOT=e97b+lg~kYfDzK1`q5fa{3@aX z0F#C@qAYj{JJ-IXW

i%@ z>bF_0xyy~c>B%qXmncw^Y)#YqVtZQNrGJSiNA7y8@3t`qN-zZ<4Qi8jTjC$ZkZo&JWRDa|XuaPh~06?P7?ODOt8=H}CrwFfnJGK3_?9#^)(ToN-W z(*Dl4FnNqX{&UE=I__WAR;tTk{Oht|bm zS7}xIPA*)&Q>a3;zB`C$&Z+g-Ln`W;af8aMg&vK{5By>wtO6tGTiL!Pb80gF`5|4Z zr|ti3s88yBWq%#C7V5BR%MXDq!z>^UtqP2DJTx|yKt!qxU+%S`E2fp6DjJ3)b|fiv!gT^nMrp0rRE_GL4K`-Vad(P+~IsqO2V!P%O(-V?Q+ zVQEPO@{IkOTR&>Qyf-sg4^zo7U|ymrEarfjnzGLQiG5b1-&M-(VH*YlrWXm!TgD=eo74A|4>H`cPzpHpNSUw>2rKCPfySfUX5G9_#xgvWW z10&=dbt0$|E`0JZl4MRTT*gsMvoUx7XLK9Gmt zZco2q(cFCCGMvh1X2o@F5$sF;XOj&2iNqy2^DN-TY82drWcN?EU+UDb4V23Z-U=5; zz3lQ3&dKnSwyF5Wv=V&Q30K761~XeCxt<185EK_Opx|2cdV0u)0b6vkElwD(W|tcE z4Y9QC``j7$&)GMbpHP!)oI|l!Z+fmMCU1*L0EL%V?PKs zrV@~ll1epz;lgwKZcDUTLxm|TCG{*}PEd!#<^%uDYuN?|4kluu&=71KoVqY`MZ?<4 zat#g}wx+N!;xF-ovgFD3Nn@Xxg9M(NbDt&boHTp*Dq4l8qxqf@71V%L;vEI=6R5m3 zALL3P1`5ES`j4sT4OP2nU>Y_gH&SyfNY7Xlqci!{OWZFUPjzO_kZfc`v5@L>Y%74> zvq8S$@>b=qTn>X_x_xg}isy_Pd48wxi}v{da}BY-Hb~EsuMg_GC#gFGJ4MeS>uRCy zq0gnEzbr=geQA|%9uM0>eL;kqPS5EeF;NH$12dcA#G!s0X(PJf+MB*g7c5wqHQnDk zzXHL4=;uhVq9moO#y(MC{UvND;IlYgyF=z}z#GwNJ;UYm&1V|n5=itnKQ2@$c2FSP z32o2!$pp=g@+yS+X&KC%a0O7{3S~5_pG0j>{QgJ;oXfo}%p^im>C~5q9Tna6yFjhJ zCDaQVNec#&EOomo%gzKXutSof&B-x#K*StHdECN@OGL`qww#TfJjdMGoy4c@9u%Cz z!!!q~GV^EG*qs+=<{|;K{{Dvkhgni(cpQJ&27mVCXKr|acYw?Ft7tjXXN^L}ws$Eb zN{4XfOrhyh;62NJYj9>dXT=xXtvzcK1~ON=HpMhpXEQ~y@%xC18kWwH-%e&oln z!f+{dh)TfDvK;gdMgRveK~LtiWBAZePXRr{H;q46J##l0x0&>!{F%}atF(m;lSPH) zAM}}YAvu{C_oHGa*xjxQ{||L@_unQ-5Wo^dRwN|~6v@vXJr7WxuInXam9+ zYB^@8S*Aa{F|9hCnV6`VRAZEv_Pm;ahmxnSm+(A3m4N+u5x0hx(3CtdS0J+VFA;^u zpTo6!N{EgBoNk9YV`58|!hv0q)7nqq9`}o9Q&}D(xh_aVnB&c=Baqbgf?TCDOZLI zb8ViEQS-P~7Ys$@dK(0&eNoRkuq*^J$JqF-ix@LAHq=7~Pnz0FJ7Q`g%jvKDi&d8_ zPy<~(@=JvULxm+!zPv6yh(*cDoIKsPPXD8Mb5ieTo0T=aG3~Dc2h1J! z*%2Yg4zM2VIEF&Z2jn<|A5I@Wbp{T_HTHmO-pYOFOp6Ol$W{88uj>^yu~tzl-Jgup zyf_cFJ{wDj&q%+lzhvDra~0hCwT=(>p&*iyw*5t@QcWO65(yOsqf;C%+z@i6_xeSF z*ak*S3L%nvT7h30`kJYWEc!cR!Qv#{h*u^k!PD0B4hbZ~5~Vj0b`p-^`X&|c<#&G7a}L>3|Lq3d(Nvf2qh zJR``STiw$ZnB8o4DRED;?c_0elH9>-0;$#Fw^CxYGPASEe0|Z>j3tDs{Y%xa%?n|N zVzrRQ)WV7d0%0yf`2@ZsZK^`K>hIjN;#P>PdlIyksZfdWIw2X$Bze8R?|H1%g! zyDbHrSrFZ`c&jtU<5_Hu=B+S25B$dIW?z$zfKQ>r9MmG`8mDhS?o3~icPlXQZXb#m zP&RLyJ`s#w*jR1KhtbR|o2En*?poUS3ZEOXR}1-b$sz1mE?vlur1G+;8--4{P4z|V3}|5r6e z$YrjA5uRtldVkJHtLin;ulw0TQ9=;7njqyATP)`GRc~>Tg`(!*T*wW@bF;3G9oTF4 zzDs0;z32Z~Gfj!<)HN)`{O-UUF#>7UtSe)7hJ0^GSQN(Rmah~|F|9=8h+j>MtP-43 zzmiTtjrEy0CWpBSbBZ_hW!rzC*JVJjzDGQ|uLUfO*co3B=W}_LpoJW+Vr7n8z!?0l z|Lfou$qz46kz4Zg|1Dq0Pe(i(g`>+It*J@LUm?CP)AkmAWVh3=UU?2okE^+_)NpbR zN5?)?lbJ@_+_Kokzr4)Rx_H~Q^7d};4C1_T1*xdViS=|Jw%j^+Zrq@Hb?>4VM4Y6A zUXv~j_L0t#h#-{ekCXoQEe)q`@vZhGZtVJ(oKt~Ui}X$ z>lVyV?&m(llLEd?|NI^gWebK)g)=dk+ihTD>VXLjvD&~5P4BlLd^*=qGWMPgGtqqq zdy@kqyI+?R73wFl@)*xqBbN9YgHp%)n4B|MmI^pbeOYJwCH@K%IBO)pHbSBuMRmTS zfs8okQ`WlLOnJTqG5;6B-f`M?+=IL zP}Cf2GUJ*hu=zN_A;jtf^WQO+2&b8)h*Z-!Ei@VvAUz+Ph!P5^IZhEM%NIN-+QTZ zNhN&f$;Lo=j~Ma+>}f9ag=8)RCF3gGegNu-5YVq zh)+$p4)_3yWjL({*Pa8pYnp>&tzyBatpg~GT`@}4$WTJbK{1WEDA|xHgBCEbJP1~i zVai6lmH>5A^zdhYMThC^9FwYQiS`W}^ zDV?mz^+^bu1`QZWA|}l?h&rPiP88mK(RqV@)(f~Y(BY=Q+un60Z}1wB4+&h!)T*6l z4sJ8J4gl>E1yy*?1ac%{fCg~jWbt^_wSFJ zA(Yb3bfIem0I*pFu|!<>-v=NrNf~nVI2MD?7yz5ax&`iK+Eenttq|BeIUmA&b|=XR z<_J3l@Lp^%VPdVz$3pa}L#23TqKG@9swuYJkTzv%!RZWoc8(jlDt3P0y4~;=#LB;# zuaf)b_aQP(Cp_-#|W=T;Og|j(Y z@+VFt*UOzH9YjgZoN+;aLl3$D3g(WB1VPi+X3 zpA59qSs$J8-7@dIeS$2~(+mgOp2#!L31S#D!ac~R!S2HpFC>1x`u)t~dlW1|5ZK#k z{^4P3;9KZ1{j*HmQKAS&e)W6-WcZKLibH+RCcMU$q>%p@4@B9${};jrhu+>|(k<}u zVU}oyBTPTYj}4Pd(8;zXW#<(|MK6QcXs|C(!~RAYZsjwufY)tdNk~P-@ai((`#{5G z7n;m)Rjs2luZL2D*b|+x76DKLiFtVdd*N&dtJvM?#3a7_?l-a@A*3LqXt|zVCISMo zC|rvkIW^`ZSYSs7$5HSq`1=r8A)yp?{X@c4fH+@wv(KRfpP5D8h$Ti1@1QPGQ9UI_ zFadmE9*c)lD2nnPokZa|F5*c>rmC%wl*gwF+hZTdK6>AFr5F;Bkp|?FY_Qm)fs*{J z`Fl+#tBC96>*U_0aHJC?Pfq!YXl-W_*3=*j!uFXkZ6OzyPZ}o z`V~~82(~l);b7eMyh=`RlS5pi0a3R+kV+7a5Z8S9a7nR{@6Nj)n#Zd=43yyqr7(8U zOmc6hotjp(GRpFqx(k9-g-E9B%bdwc6h(}5#$NniT`-1sK7$?k4^;AVOZpb{sC4b& zpKl4uNO_$x*-TOH3r$MjjZi)tlNiih$pO}W-3~9#1C|0$Wf#xZ`E)U}`NKcYt}7RV ztwrK11Q$ zjWf*ia8We%CE|A44-saRZC#>j%Fn-hhrJ(=(f2EyCu431;0#k=HL9$rqU_gmS9Rh6 zqoVN;41sPlZf17&FBVup#%5>gjgOfT4ruFMf0(b@ZT8g0q6EjHr1zl-_x1DW=bnz_ zo#TOEn@3|qHg%$(Z_RaVCutT;i*EhNekgkq;p(t7(7l_%dZAA%qq35!q(pPw>mO8{ zsyaYGLNZ21B`^(@!4>^#jv{hB`jjdZk|8OoLb8SEU=r!o$Q55F#>R?l1!AE}lI`2l<$0v)6VUtLTufHe7y?`|yHqD#$VhZtgyd551q3 zW?&GpOS!zi2>6bC(0SS4eGKfU5zzmHBqRX{VP!e?YqbS8et~LhnN#s{UF<^;uvqvq z3JHc3T@aVkB9E9=)Efz@c2uY+J&p6I7f+2un3lo8iV$36?Mb2KU0qV(l8-kja-tFpP}rjfB45R5Q=KSCN+ z$1MVWhv+LL5O>Z)_DXZN;VJf6^?5W=ZIRw#3wz%Dw>r1{>`ZS1tY)&od8rp<_nq%f zN>%I+>9%1AQL!z#*VU@-{`TO^yVFnaNm`g}x3jO&zp%~m;S?`KHu79Y{rfA6pE<1e zs6tvF`It~|Iac}q`8Of`G-7;BDV$MPWP{?8#aHon*woJ4(ug~DeAB~tQ0z*w^(kuiLWq3`HQ%~H<1EM!8n_; zHxMV$sEHv_+*u4%Bw&c{rCaqoGu(ivmTLhSp0s(?UA{z9Nci#0L4ywW-KV;*(1`HV z<$tfI+8yKkv4I3n9I5CqCCgoxQMm+dHBBY%CyWa@RXu|I*A>< zZws@lYSzqC&_SSiGJ4L77tEFrIgeI84rG+~n`u-b085_wU%c3cYNdVC@SKrqd>YnN zqg$NTf~zzX6q>Uw~%{5D?(R{9SFZt7%2=bnJ%FhaZ#0!&8EdN0ObS zOiD^KF^Hbyw#Vtpg%Y0_yQV)+bZ1T{oIAIC%6Rbgn|OHrxWf%3j256EqbXJ{FO~t~Qm@8ooX47$M?*EqU&)}jO)}rD ztlMvfKpV=`0JqM^e!<#5{(5lK*V`^bJfy15sxgJP0OALm=|G{H*1kye6bN-su|ZoJ z#`Vq^ux7>8VgaujNmrr&ifA}tuc2<54hU*sxN%0IA^`~C8Xl_R>t_je15Sq|Ub|Sf zQm&s}_iBJ)8}Wt-3N_XFgn7gn<+lAwokvbpl?P*P%6j|?+GM0Uq(NIr?!^^%-{lMG+8tcJkikb&9ttWD=db^81uP%-`6cXGxwvj7 zFhp9eEjZnIZ!h(B^4AzVh8K9UIn&N>#ayYJ+s6-DH=fTy zK4nga5;4~3zMS1O)x#bMQQGJ9)cMhya~b?2lo`-RW{~bXR8@bad%>(^!FMo`C;#cl zi2zp5m1}T*yK^c|uCbF9=>sBO3ei;*{K{hxckfYxuAU6lquY?g-G}{ubIxOAseL#g zd24L^4{z<%kCH343-)Y8V!Hk%OEps9-h-ONK@isS5q$QD!48XG+>wco{8xb`CC|UC zD_n?#==3Ys#Gs!;dglH12bvsH?CB;FkI)mcx^+5yxI4kz=?=-syT4@odawh^C>XwJ z<6#=%4Kb2G4V#*vAU29^2dILJs_vM>KQ7dU4$BYj2bZ5E5% z0Z(Ol1|Wvl4Gb2dZ*-D^EWuNwy#A1$aB3l%_sSKpkyTtm&JFc`{yMY(3XG96IFaD> z`;K~qplxIsP0R^1f=lpvZ;H_)Jc}}Zf>cM4BfWC@Hwcy^E#|g+Uz`e*FfeN{1@irq?TH~SKGoAYUw?UikLetZs}6fDJwvW@?4>XI zU*G@A$a2<^^CWk*3>itSZ-VVA@R1EF9GsfvXx?OZ6FXinHY7x)*VJ5toU%)y@3a{* z&!OHAw7SmUlY-L1->f5f`Pq|$K5tp;H$7NaksrURZ+oeC^m}7sh_R#R4X#f&>n%#^ z45J%Z+LZWj*8lz(+W0b1uKfAv=xBn-SB0CrPk41pN}MN-2}J7%s=r)`6xnd{Sf4Qt znBkc1C{W~IYsfVz8)d8fQ#G2vXTvsjIzfN)@h|Kz-${j-NGO9E4a1uX8jL)Isq*W$ zEtO2$18%!_zS*QBB>i*flln7iO_wwG15xC<-Q^et3Ds@WbJW9Bm0q(1kJT;(i%JGN z35OBf4Hkbzs&Vs`tkYZD9E<*=yrE=Q5>pqJ=Mu(q91DO- zdPUiEMO9H(bj3F5o~|WjkM2%w$Q!pksBFWkHrk~9U$nh-SXN)OF8a}u(n@!TAl;qP z-3`*MARvt>-6_(IbR&|I0s_({Eg}uledpr$+h?DB?mjo3``q=%_dLq?t(a@hG2Zcx zcZ|6Uzwwg}ICKt9F0OZU5hQBcC-AUjR)g@;Y5kt@s>{rz8PLWa)^Xi*Qe08;*55@| zh=nFT*QSu{Ch$$X`g*gJ{CVW)!Sxk$}YdN78OtiU8A?vw& z=!uOLf`j%RcVJ^A?eY8Zh4IoP)I6_ZwknH z(vW!Of4w44C4d&Od)j7{QFjh%Zhb20BfCwr@M5i5y;4 zE3#ldex!HYCF8ptMisjLLH~KR@UOLWSbmn;*ZbM`gJV{giIfl$rE_%BZn=?AOp6dZ zFe8<|j{+pgfb7DK9%^MsaIk6Pd;-1f3F6i%LxY*ziy+;MraBS@y;(-UTy<*v;@W){ zg!mM!6=KC)WnSMQvOHV5pOc`!0mrXH&mZo2WFqYpFqaekr(ispGJ}sE8Ms_$;CbE( zHosB;-8R#$qWJTWq2ITCtKku&BM=T7h!zDS_?5ErM`CPVzgk{z(GaqWBo?@*j`&_= zhq)^9N>VQZ6f7P%7$|y1fPpo_h(*DVCY4YM+U~e7Ld(X#nr_A3l6m1+wly!7>@8T- zvT>m=`!YpJ-#W5_v*)bF7w4!x2=O~2hF03w!mOO~R!JeG(CFn+k__%Kon^mM7!-g@U`&q(kEcx9OmL;7=@*P{Z@ zlmMiAPdnI~;pasL$zM}VjX+V!?Wk(KuJN(o<(hR5HB8LkUli1eQWEyHyxeh|m>&6x zzBC_odC7nNXKh^9?`-TH8O!{f;K6JY`pi+#{>rw=LS*T*L5@UE%V(n~8XSUGZi%+E zx%8yrxPi=1t8|%FpUapvxg-7Ex}wi0qOfE9aGEYJXrU(#zSG(!#39(*XDJ(xsI|_` zKgScx~B_uon#ISakuNxSzw)L~kQaL~#cwPq+Cb+eL}Z{B}eHIFF} zIUqLMSL&K2@x_{ZSsAC27WK0y`B=Dj0IhwP4)LP1YuC0l`C8JONS*guYWuprT$>1I zODM1p6iVUlXUFvEjrrAZOLu#&sA+D;Cl6`U6RO=>b3#eJcG<>)7g`pXTrj!~NfU~H zL<)$WxDyqDHYRbX$T)@9h>}!@@PRY-DT7GO3(-K-_wESRv2nJxp4I*q0VS=oAZ@RN+%?ngA)edSoqC25E^%p zJ}lL`68vp;3r6j+s6vkUW}5S3$Jk<4y*%eaVCWN_t38v%cdUviz&lq}lbdju@M`ru z>=W$-{%d8cerj_*65){KRn;L#d`rtX+YmcWzgs|=Excn1o^DECj=mt zZZ<3qHyB~TN24Oe>RLObO=r+=bNF>F*P4 zi?VtK;sD`lAumm*cEYVJ3U=HKH9*%gA<^wa8+UA2KX5mqp zkChsG($P114D|+$A{yRHR zcBlnP;df+;w}6-rbb2wzef`B!puF1($pWhQVd6;bWp@YacQvXf@>Vozjy%Qp)G8`u zL8XTs5FZwagFmvzCw-qfZKzQL{sW2DdS=tQwi>#}LU4}@97cFa;}B*1*s^85TH2r! zFwn)=9F87(WrmOlQ_<}HnwatS{oroc)f2z-B-cQi6E!$#@U52zXy6eNphw7+qI^jV z(%DH&bX}X*JS5mw&^@Ekj(hf4P5{tNORrXBz%x$SLGA&jjO6 zBMNZv0O3D$>y3)jzpGybLJufWlRoR_7rg^0bBTef1-h54pp68_lOS!Jb=ov?wfjNf zi3^Y`DWfpw!gv5QI!=!f-N6i9fC=j>ZW|XK>_}f;(v?g z#x=8OWx!BQqP) zW+wkxeN)^j9NQpFHKj?Lx2=$hiY!?*PgXTMqhmhx#|C?H(y97Gjp~!>gPqx0&^@3) z*WnwZ@x`^@^-9!>nT8V_W?P3)Zl*@zFLax4(s;`HChf~-4u9dfG#QF$v^8l2y}D!-`hrz$ zmt7Kocd*K=H{rN&Fx~RVn^|>yf*R7`@lEVf`L_#BK`cR7m7k(@WWd^XS^eehxtCf5 zaM9{PT%K;5!xP|n6I>R-=<)gIAf1&N2Cl@#$yH;4W(j>LL!lF%R7>7jp7QYa8%i_J z{DP|)Ji@gILeA!Y9$cpOd0LXMX_>Ky*~P2Hvr>g!&Q($Y>W0lf&{IE`RU~9?7H%_) z0f12p2#hM`{;p8R4H*BVUXhbZ>5N|q-B0zp>yD3n1S)v%kh`3zKl-E_k1tjxK38Q! zrJU}dESsCzKJ&KiHZVU*ZQ;NL+o@3lj16MY3zz1Ms`N{ARh<~wkF+Hd?qp*dkFaN} z70OEZa(+DCaYrH1qCdP$eS~FS`*G*=I0{cbpOL9^<>hC?{{655$^`2+XiU2K+yF+e zFt+d6Gs5dv$Xbed;Y`*yuHyK-Z)iC2<liODU5YA+=2B zzW(Utmeq*7r!ZH3PmM*MYSFt~i%hBYV4!=kkU%b{=1_V#oVtT+TW^c01$a`=MdF8E z`^P;p(G3j?V!k)&WyF~wF{DvS1;-aR2M@72*A7FA3Q~YUfC_WN|9T49G>>9sEj@&N z22OG?g@r&RO(1nR{hO&I9!UCIe2~ge#&PUxij>KngNzTgQu%a*qXEU&>c;3b;PKpE zFLdfq_+f`u5S;<*6F9Q}9C$@6o*N7ZwUY4)jvm*VEdOvemSn{5jAqCZ)0t+0?3NBz zYq%20)C+*H&rJ##tY;YO=OA#yXeIXofyYrd2KoZPE&+d-QKbYg?*Z?$vam}PDuXi5 z+Vy+F-KA0Gjxvk!O%&Kl739nVco6br`21a|c%$zgrKp2=o4I<> zLco%__MVFDTWL{*kQ4y3N#m8`(T3`A2lRXB)RwNt1wU_BAVFb>(9MLq&+*}=9I867K<%w}aDL}O`{n=| zfkL78?f%Ddo%}G$K{N|~j!jj3g792w+~genvV^S!@{)Ytq3VNQU&4hF6Q@(2TZP<; z;6pH1iu!|QFHLhEX@!~epgV2J+7WlUaiF^Q?%(GcfR&)uDuvA zF@ZEardN_`?h23$29tdN`%94Tt~&K$bC&Z>(mD$Gk9Cqs_Sbc>MPh=^Dc;%q*n zge|6BReYopVmUA0Hx};i^6VuKi9d=0yF-hbFaT~n@shhvpH7!!Cbg}(nWWZwI;===Y|}nK7ow9v2!eD;2nqxgy((t2K4FHINcJ<&0gL(CmdYmhP49o~)~(=Gi`k z3H)fJ&Tn>Og5=}XyYstG{vz={^4{!%XZ@8R*6T=mno?~h`FDBq=<4N8`I_hDr`_Tc zFE|)5L_L2k6{?+fi4TuISZuuO(jO^U5NT@ENTzuZ(zoK&jCEoqnD6Wd7T=ST#`d>$ zCofGGeSjB#Y-BQ^$oBZ{!d}2oZ->Qy2PSg!Sny;HnM}Ys&dU?m43WucdyPMq>5pHU zpdLdyMO+n;5~@Y;{%q;qDQ_?wnUCx{rRZvD5AYZxJ2#>!|?hCD8e*`IjbA>bj4))2nQ`atS z@CVC6jgWamIRj^KAbEaWLQ6|)=QSg~1RrT|-tiSJ5TvKhy-rwXULz0$N?}HC0&d*N^|4?RGcXwV2zcawO#@IKL~ZPV{|A3B zn5l+HO&UE;EBX(v0-$Ds09_i6dA5iB9#FSLNYyxdNK>1G@F^}QTp{;&5^jIRVSg0R zZ4`g7Xs*V;w{le9W}p3omMvrCCN{h4q00Y6OToOi89>Lb`9RP4-8Ul8G$2tJ;QIt- z$pMZJr4CR*ZS}VXH9IqFP|ZXj)^Iprwe}GNlZo8mTN({tf7-pc4|XuQ=lQH^;efOT zJYIX9XVL|RSeSjTdd1#xr~7)1uDq0GG`K2$m<4afQZEjG2%1vAn2U}Kn&#QTi34(N z96u@Ql?Qxxf0SBed`UXhP-@POlLRVzixNKdGC&e>g0RrAIN==(fbV@0)^~W0*FP2~ zmd-E41_z)2l+-kX9PzBvH#0LR3O*h-arg?^Q}Fu0q9Q1LaeM?KUBXfh8f*THEXcJp zAxifRmqr}7vub0vUTf71tiEx`&b<3ZOt6$TAJ6#?~8l0&z3(^%@ z?=&lqvuRXuG8}k?xN^W}=r4d~yA~wYWqz2LKjE4M-v%dZK=G)QDH;N6h(S~{yI3<0 zJ5Sl|-MB)x>>70y^@p9`L(CW9AiPJGHUw zs|IW-PZ}oSqMs7LXsozua4^CMT5UDrxCfMj!2bO@>o(tTo6}evSzG5cR-((>2pb)x zED^avbkPBzS*}dE;zVE~89mm057(XvZ62N8s15PtC_X(D&z3-RfL5 zQ7{k1eL%6mP?A%8(V<1azZ%K+2BA9t4+tP2m<6Kx$c(lBG>(5h&!)Qg3FR$~fFMcB zS_EE*Ty~rDTBOoA(CY#>pKJ-BV|g|{zt4UWGylA2kNczbWo6d|mOY zpL@F7AL%k1riDPpDIUyjLTEJ=!)l^*w4cr!Stzr?Ffied2*pmO#Qrk4JK?Flc>x9W zwb>~d!k}4)MVZrS*}G{;Nf1)*x-=D<$*VL(si$Fv-b3z8OOndZIMk_=3z*+Opla&S zsB54{M%x7yg%(p=#I59&{Si)^jg+dNtD}Q~l3DeG{}uQujRzxJ5f_lo0w{S=$T(Zy z%$Jd+sP626aI|56m$^BL77~K1LY-UFL%YQlM~$;fp~o%40%0NxoBs+~VX*C~3OE?4 z$SZuXHj!ZIe3Ov?$-1msa1NUQUevdQf5pClZcrb^q_9jox)(4l*9o;hBRX+~Qyf_b zrgoiVUF&I?V8xll9eq!3q>|nO(~=v*I8^w2!W>X4ufQ8>04@}wWlwi~QK8wCw7*!- zdFll=H8tP()7S94vD-HZLki!qwph_}JOE_7J~D;J`xa5GT0O+Iq{fEH1P3;`G7Q*R zi(BF*6`KQ!MTu58tmN#K)!Tm)!xM94HEnUogh^LZO;BPK;_Z?%0Gc zvF;AnbY@vTqDz;NT7McLE2$S&JomiWs7x;EpA&>xG|`tVGOk~O+-0K5Y(?0BvM7i6 z?k>{rAqD2+DR_jNFiR=T24L2nf&Hn;&xd@Tfihst1WVT_^?OG7f=eXRs3=S1$9n`_ zMv_Kn@sGUK0kKkA3r-zYD9AUc>r$GT;KKCr)YgsBq^m{ZGTp==*ts6V%bDepKN*&+ zU9kg##qg|m{yXm2D|6oJsk7#;{aw5CycRb zzE-z0A5rOfi=F*`N2ReZng89v3&U1-vZp%CK`j01Y#_0=JURVvtt&?5Us%e&c_ey$ z&NXag^R=cqrDOM1kBEnZVl%5vdr>zH>4VOfC%Dw5QiLh%$eWt6yTRB~ha z0_43Rxi-@y=5l^2%rpKJJey%+k#F5gk{Ce9XEjuE_pj}kRsOo=o zN9lVt`%Bo6pc^v2Z@*##R>OgZC;vM!8D{R6ITk_5nsTF=eb} zG~(#h;Q#&WZEksa;=1@5&EA`&m;B1Ab3@fm-tCk{@TR7SPOZyDK*4o6W5G#bZj_z_ zukvr8oMu8Kxw<9)+i#PsX6^1sW8)*Clfe^`?E%LV%GnP%_b%=kCx>^xhZ(<0rG84Z zx1s;G@y~=q^txIW@xvcGx_eSOi?g-e9$07jRP}3=`hSkh3tVpQJkLo06(d-(UTkQn z^cwpgAKLEBUEmwOq>&pjaS%7T6Ju`j`XAJhAj2fx3;fErt*tI({&-^#{ff(#QmIb9 znaOTbzzA6hN|})N8;QnH1bw${vz?U7_}p(Uy>l|JOHaQ`5t;qNA$c~Y)5qiX`x%i- zP5%vZxlcngKfzu2#Wt^NRsZV66Y5+vc97OQJapNqZ|n_n;fu_!(s%>zq|-j;xgaaP zaAo5e7Q{~glA=eZ)=F_ng*pO}DZ4Uk!2OZsvU26AUzkER{lSC{iAq#-D7TcR6=b7k z_iV~)XYc9P&T*20*|Drfse(voT&_J?lW518BA~)nP-*1Ox zk&vZ8AldkMLcgcFKE~xmYAxijK(xgaMfEC7_t!W!V%Dj5+hVCeBR@Ag1M+XRM{; zhEQB9hJd?8qJM&B%6|C4be$OVzDUXiC=sl$0xTg?vyE?y`YUPgI!kvh_6bgJ8GZx7m z^IbCzdq>nASxo7j#iZz@=UU#>&aIU`0+D@_iXRIhoUMC|R9#VR!&HQ0jFViGw*?@r z_HPf6A%F_FtZK(?LXhG@1)WH>0nq?bX?bq=((m1VOU_I@pnmADRK{b$e@kEGgGB97 ziCxMh&sTPW#?aiBn5t@n;ogj(T?#O2@Y}EK5MTpDIMy^<0-62L4teS`Xtasc0cYhnF#qui5UF1mqVFw|hagW(Gg*=L_2Cu=sfppc^H zXvG!5b!G{=TI`6kfxS~bjn>GWM}=s>XF5z|2H{nQ!ig- z!A4wrPic_EsA3cKl=`5CtiP+@S$)#lT6Kv`S#C(DO=RB=$Ct4=jrH`!zv>M$hGZfD zjHN-Aro&Fje<@cA;xcG2U}3M+qYs(J=Fv-+P;A$Q&6BDtlsl-!zu4zM&a-_;Y!C_9 znz4V4hYt9VDQv&zpGQ=FzqC-X>mje~HF(y! z_!0pk%S$mnQTW3cab!*Uoqn70uiWqm|4Wxp0&=d!Z;Qeo#r6i-w0~js6RqigT26IZ zb0V7Tj0ERqF85ANUa8z9_u3C~qh8cZH4bgY0f!zl3|6ba$j;ZmaWe!W(TiKlL2S3o|HlBfvn` znQo;8JXSj21$n0TZ(GPJrO`Fidq8He?KCL_PlRD{d6~`OiOjyPh%jav^$VYID_|;a z)X|x5;UXQCuZ^GfpMST*Rm>mAWMQgkB+=|EO9fQ7HfPy@&x5MU9;a-j)~3w zC;ozz7yS$kXG3ef>hD1tH#r&q;~LVy^&!#Dnmstves%EpmzzYUo{z_940=k!&7tg9 z|62jezCVE(_q@DfEN{3DfK{Jo{_?wvoLKCfJg3O&t;kU6NiGz3!?*kI(8ci;>fMcV zl^$~}k9E_|$i}0b#jUr;QM(`$^Z>aJXnWq=afboQ(?yr$#-g!ErBIE!T#Yz*eiy%V zU_i64)Q3G%XP>vrs!`nQ&r@J4>~*sJ-k^>}J4bL61ns_`0Es08km1Y(tX&T+c+_ed zNGziLGr{E#%l}^A^(ygq2)wPpPN;! z)dM|l1{y*4+7rwtR%;)W3=DxV zfsw)jDL$2!iB}>Ye}Yw?a-mkAc~~*((E;5sK6UP2Gh45deUKS>?=tiA0eM;!h~bJm z-~4Z}NZX&fYM@c8_DO9ZJOf113ypJMKhf);AP~)?W;x5U&oZtb0|H}!p_WFCsFE>t zk=!(!Lf7_{()gU77~6wLx@TXdq}IQ}YeradX?;g@MW2obaLWzQ8x4-Wh>(p3Yy1b4 zXZAf8r~L9}Vo=l{30VLCm#8D3@wWg%6Fo@YtS8{IPw&;}*9Wpny_~2ms#nS}?C0fB zu>Y;Ndtn=prxrF4x86fhhBwIgJYFPzLUV1rlN@xP==`7g(K;WggnUR30s@N{m1iL2 zzdCJux6n-Q9{VrNS}h>l^pRbfy|JOOr;~^Pylme1_%7l_sY5R(1P?E&as$>Wiz0VI zwr@iPq2P+^I-=O%iY1GAw(K|`9No)#fd-;Dpe{;4u`JB)9}4t&xOv4wfd_ugT1@T) zYddVu80)iAw8Z43XAaQoY9=ezMKhAgY1R9G*j)J4s}6hs#`kDIumIWw7;PDKnt&mT zW^~i<)I$esfQcI0^6vRqCdbVkFv0*?Zd2UX0*s?^#a2NV=O2KVkxJBwH-D9fb>lii z1w6GHdHWo67Art6I*@YU^krfu%Z><-@OT&NZ!u#@c=#K-oe%h^~!Q=u$Jm z&$ySV##-<mQ_~g7PQkKmTZaS<8T27`b)YZMDDFy~QuVXbX!v-+HvZKNc zw6l^&1=PXs*ib;P48-wdEJ-m(aLYsDXhxOq-A~C9iV0L&kCF1VnkwIa%m#zcMVpMP z76wQy0Y>^FvA4}qfPThd#~a)kEpT|1nj{X!0JU|2nKluQR51Jhngqgx^5DxUc1MP_YHa2WQGyXl>W|FRK$!zLK?23rWBw|i^~4y7s8oKuF`5{-^68w+ zJH;}(T-?MLg)#B!i9emXc?5&rsVKSS(FFNj!9 zSh1^=YTE=mVf9DWE}I}u#X`}HLD}-%&be(RdQ6Nj8Wd_HQGuXR))1b_Z|2w@cg4*t zzV)0!O!%+&a@$p4iCwkV*3hSr<3Bvb6#fqkTbZYD)lh-LNq`YeQvoYw$}UZJDdg`9 z2aGL2fB4R|*tQE6mY2()0zsy)n4-}K?_Xlu%ay8zGLV_Tbd$CH(kHwXJ-9x2d!rw&o1D9B|B zzj*yCyK+e5YgA<%j&2M^5M9hT*+har(pzy^0|4Ih(!Ak1gi8aFJj5MEv;;6$2Tde= zw;7r9XsT}49XmGgz&zk6WDHn%mTWbw-86nNIZpBQc4f&_*bn3Jam3`s=sZm_eo7op zvA-AIpY3s;1sg-^OgLC<+h1<6FtM9oG}YmuvUp))Lj?<3Irl$DFv@r!dd)2x4u1l6 z*OLkE88009#s*>|@tcLxjsn*1xy_dsxD&ifKc=269AdE9Cxho}_U6wIX@@^qXeZnw z3JiVK304hII^)=B)2Te&+zq^(@)s-5K-!#3QC;$P_NshOIwg}mz>)7PjQM$Cl_-|{ z5-t0is_2Dk8Pl^I?1{6bAg@zASp)K(8Q#oyUrebcJnrBzFvG0za0&pMHMHl3;rrqf z>GRs;k1mh5!aM@@oFbi<0>3H7Bs1=`JjvwutuP7lI_EC#TY7uB*gQ@mIuvV}U8-SU zCYS+G3F-tfmpK_0IR9kwWRtse869oEYZ=WoZG{*j!2@8EkL~7KG;IpkFZwG=qOIEa%_Uxb_K&PB_&ln&p@uq^#(0}POoD}pt`Wds2%y_mkHIv9W3!RN%k{6WQFH~2h!;I z&4eFK*cJmW8_xoA#AQ`Pkob(u>wB$&G({isxe~{Z>!r7+Oxdg6y&?@j+&p9mcymNG z$iWb)83xb_u;>bv6Y^dwCnqQ`$2QQ=n#9WWHnbISyF+#wGqy}ZtH`wm9+#94+jiD zbk!i{;MjVV6#jDFLlAtid1RjJX>7=lOe8P_obt$$P`s_%^IINd4VbbYJtyMrqF%Bf z`tup?;tb`sPYyp`okJziD~gWp{9?U=SO~Mk(N0NBp3b87Z=0{+HcN zXlE@#SkwBg4)!g3xCL?rrF#VS9OJWax@ED1Kk23t=}y)j1`cD%&S%2xtZjFZ(h#Pa zQ*2QuVnp9@c`P26;d$+%O|)mg_x4c<-y-IFYfI`^sWzWJ5u{-GrgJ;$93MmO$d{osiO@xxJfUv_&$X1pB~u{Bm?Nbyu%R7;{d;B zRBB{WN-)*DAIwEKRybeE1d>NRD%{Vp?8g~po>K`QT3k~80z(qcvzojp%z|oEI}sxS z6A?t;{2D^#c+0dr7XQ^twegu@G1Ah=u!R0Ztbm?kLcWh_QRMm+aj*o{^~r(;q%QQ? zPK$Zb^SAkM>d-^gzyQ5*FpM|4H45z5)k$_En95}J)~9VXDK4=+D-ce~X(_nJA+K%? znKlLeLy5OLI|<3S%(X$8zR;(VkDu?J>B2s_6Hagz)&E-QM5avP9+z%x&S_i1@EL_? zHbCCoj)8aGfZt=1^n&#RlMW)G)k^On?GQtoBcuOy!ptSt|XiZ6pPeInsSUwJF8 z=vCzxeS4F7BNP$m(K+uuQWU?RHj{H+)0N`-jpSpJX|4uNtncNZ*WD>ScjmrN%#!-^ z$AkX8fM4ziUnzeqp&b82{vr?aJNeGgku(_FAOz1mfjg2=;q38f;!GLN^|#pNCCzV{$;fCJ8JQ|V z-rNMLSXyV6w1no(AB1J)hxtVwVOkfw92h4P&HB!Jc`V> zk7sztcueIu1&Bcr+DAk|qJcARAhWw?Ab?m21m)F#!6r3JH!GMx(cfWJyRuo-&}Fdp z)xKTytts2%rv^X~0Y&jQ$E5Iqu**x7A3)FvN-a_k?m~w%g(C=_YKQj(|0Sr5%)h=d zY)30tr!HGkUNlRM1_0y(yX%;hxTUp?YTB)?P7xhjUiTHNk4QPE%Wp+snM4jRtS~R@ zUrRJQ!$qXnki|}NcGtpM6KdAqo;K^cX6XvQ-aCd5R0G9Ob~;6;mlf~l{)`6qmYMJ7 zZ==B0)>M~7N_JhYCs_`QG}7fpR1(?sqA5wqL*|9+qdGJnSJotO7#p?1uMe`C`d$)M z)4$l79q-t96nM2aEmN1g66$d>&e@T~T94_rmaHb{)!yoYwbi6T<-5M!QL~P>KKqy$ zQNUT^1k(ofOLK~(e*0HX?etum@yq!N`>;ia=TXQAsw4h|bg?^FEFz1wOqM?BXmXV% zZ5R)2<($Pxss)7d=}h3tTLroDlja?O6Old4_^e!%OdG=}W|HNeSppBLKPKbT9v|Dw zeQp!kJyCpERSc)DMNs8}v)u29PYsw*z(!j$ba+|B=19dLJ`ODRNY#A)nHRAj>S6{7T%L>3n* z!|H1(LA0fvM0OH%R=Va(!Pyz3=duXQ6Zrz@?|~c zmtyfWt1mv5+ED_O0&h?L;FvDLJ8~(B9G}{xxnN-0i3Ty-^Y*V$nXWVK2GaJA#4#6_ zjxNe?k}M(q*6FP)Fub>Q1gpfEAk%#GNFN59Z*cnhrG75a?GSGHkAZ*Z9QT9uKUg{f_-z?DKJ+Nu#<<6z+rNP0Ag#ZkRTY*b(NnMJ?XBzGn9+3; z@Lff)AJ~36s49A%)}Yq6Bf#QG^CdOqGh^cr*2JVO?@4e^D$%s z9r@3_9q(U02>K(cj1SIlR8f!v4*0Oql#Xe|=M53+^;P2X&+wZJgU21zkzSwd*e-8b zI|VaU zB?s&HNB%Nd)i|LTJEGIV5&!q>sAnT~-nrxSmj=LCUf$VIghY;PXICeq8`S^0GeS+L zs3C0PZ_-?ylz7P%EA|3TQ@NIc-&US5uCi)mTbzR0lQ~I0Zakt|YC)Ypbq?b$?3O90_>3}! zKvKBVmh9o{E9Qc4XYkae6n#u%X$qGb9!TF!vKkDPY*t%SCepoFWZu}tR`HGRTsCeu-8nI#J?Ar_3!^W+2bW3Bf~_69dtnwoXFg0-ko|cy zlFn9StWYCE;Sms4=%%Pe0OnA4W-L6M=#Q*PTNXLqN6Ukxdck04krCGM{!Ku+{_WHM0DlV0@?)i^`dm4l; zG>1AqH@e%cP5uex@oH*+XvbG}vlJJ4%I+^R5T?o1C;PWmVWFh{Bm>t-Rh7*qWdor+ z0!PWGeSFQz3Lhtj$B3B=2)~_l6qe{$TF}D3_SC()*l{m^&eeZOx>7~>IKc>PKAJSB z8U(l7i)6$5+}9$?k8?bvS|y}mTeZ@7lsIASh52qbrXcR(N+M(I$?6-wbbsvj%;VL?c0Bn0A&1W8q_$d)B=1bR}%tw)$BR`0re6he}jGHM|IOSjE@Bfogz%PGwEtMdBM@~Qoua~^y zCP42OQz9@Ls|aMMvf8CnO^(M`lSD$(?8d2)Ivb)vKj+csdE1^$eefvK@m3}$dw=|G zYL(oLa&XSu%@ly6h1ncB32ZVt8xOI$W77fa%gDXP_Z)l=PQLfyp{AenL<~EjBtLsP zgNq^UdxNGV+XnZNeQf_ZZ83|qsa%bAOG(Mxk+xFQsUm*kcQFZ}5n5YRm|uHx3U)Vc zj+}ACYih0JdTU9oj1O@MEUiP8eIy|WV><%7KzRk%GHT5lIJNmGcs`@=?B<{<*+vWh zozqfpTtv#jj~!x90wQPk6VvDQJXe2)MqloHZicIEV8H7BrgLvO6F`pVyo;o-%5j?p zQlR1$eT1VlD}KpWMu|_X@FleFrndHRmM}^@kd!h9;>hN~>G~juvB3tjA0~elmtA|I z>gZl<7kli$MdFP`uVQuyTFFp!Xd_{j zE+wlG7PoY~hKY^9>Waoy0ADG@@{#f6X{m3oOt#wdh!1J+376G@(pIlI!E@}JMOs!t$jL3`W7>j zH|aHSDkFiE4I^tLW}(?#`J-JP2_)3)pK#jFF#bl<6`TB%MLX4s=yAZwl+w%>fq?3l z#r5@aOOm19cVUskN~iq&i;Z8-UlG8lnabL3-=X+T+DYfT^H9R!#5^i4k;r!^q~;V> z#pIz?^tfZ;Dy^i5P3k_!_umA%hHC27awVdoqJ-IPYa+5>F1`nY*`nHoB}}x^fs+(u z;`ULA<9~Kfih=b{8jYepc&DdM&fg-X>a4c#kOKTVC! zzCRN^hp9OF!*gd6V_;#fX0*3OFpy6t*N)v8k%TSl9$kF~!Q0QHA6-ac5Eiw=HBW9S zZ$+T+^&xI{7a<^tvg0zcn=3@vY$bacA*&)^qjH;jJu%F&2RAjsOC7z4^L&riJL7ljNA+Nnr})G{gX`(V|1s|vyval@ zX|yx0_cbQl{V8pI3&G^O*_MybKiSeR|LvdoXZy#@XLn4E35Mo9_RX!6IGav>M+84o z`wQGxZohss5P%YZuR`xSIs7~-kZw&Ms8lwg7&qco`kwN?U0X{a6cGhVDnxkkZ#6=W z=vgY#EgZP*^f3M6ulem;E6QCnehG*F@n*aAt>6^ko!#i34%5_n zpC4n2y$vku%?j6}zM_*`130G3%gsE!_*RC5g^&{g8+IkErvbD|hRvEAMPDBE`IaYE zYWVJHwkS(qlaKpiRxA2M_0z-@?Oq6{{!uJ%$TTMPzN7gzN_)_NdG=zVrr<$T%$mnO zPnRf1cv1T$b_=J~ay{NY6(3KpXVN=0qhVx}yg?`1ks5PFxknzjndP~wq71h!a@9my`CI+=YYyl*uufyVj*w6+7b+Q% za1u&7lTeVUfp@fCXs z&5M@1-@)MYBBH4};UoKzI>dPb9KHXg9`Ae_^a1(zumF4T(F&12B#jQ^?zj%R*VtND zRe;8aNev)C&^w7IqygHaEj!0ulZ=s#=LuLwIL0LgI>1q;4snE7?lOHc` zN&?9nEW3Ued24&4drA`IN0{6}heW=sO@7KR%u1BlVqLlKS8c(ik?uDeD*y^tT2TGw zFn{?Vy7e_FkY?pTdF;!Bp!;u0E0`$kfnqaA^;6dDtkkL82Y%ZV0QHxWN8(N9ctWa; zjS&nQ7Ys{2rxv571{I&6N}o!4)O}Lf{ht;e=G9+26>L5-AMNw$6!>9gD0|B+wWq!x zIRfq+F&`?qCi)B$1z|o-56m)ffRhspJV9@yL1_v?dtK4*$|McwWUFOCxlLLgY2A!~aX2Y{tlbp9$EF$s_ZM2kbkz{n@LPz~l0&+U;eYCsPkU(9CbJ z)-4B~Y_o}oWV*C2@}2U43aT(8!K}A~>)i}RBj)SjmcV(Un&gdT0Ke2Y|Nar^&H}2J zd#)&apuQ%cB+C<%6V#g>{GRvk&5QBDT)O8lqVBaNq$L2ebQefXZSvsSw6WZ|kpCu9keC{u!1q1XVBZV0laMSDM)_ z3)2Wjnl&lBc0QQ$%*AAS1d2}gzSxU9Afnqs4L-d6;W}(6!8N+ijDwHcmtLPK8A=|s z>TsNkj9{&4?cEc*1DN2=f@D-Uq&XSas03BPpr#9NI7jNADghs6${yDyL)3Kukkvo~ zg~Sv?eFRBGw?9fhjPoU)%gdR zab&$!KOZlUtZqx>V0(G*n{LMURJ+Q#f|d@@FP9eYV0W7r``r~_oHsW$KH9f(qppJ* z1mw1n+Ahx|lHSsv0NM6=YWV>5CNiYMkEg}^mJU|T!Pn>=eX;0F?iP z>POHt8_j?8TezqxsBT~Jylz%j+NV-FyItjGCTp=)Br_g7R$m}Ft{(%I%_XXi7HazXo^~=gNl4INvUQylq z;ji|(>YgJX1X_|e3%>CFiF(_-rypAdho;2i?<5S#Me$|Z&eXTJDa87Qlp)npVb%?# zT|6w+@ecvfS-^g4eCOH4#7ey)n1vpITsCWAQvRuH7yeFys^#+14C>7S7$WS_CsCP; z#)xmfzLOqSNw@2rw4XlA%Dls}8vYRHA_DcCL1I|)hStAM=Y-z@v6Ym9*d2F6$3;@< zG#lG;m#_W)`qJxQyH;*bn{i|Z&^dz#1XF>g*!J2>K|>E6OFr1!SgjmM^gN|$vHsBY zI9WK)I|$f>O8+ZwsOwD{#ayE5^SP6Y8(`5SyCgmvJ^Q)y<$ZK?ny&Z0-HZcI+u4}z zm(T2kl~a`l5t)mEo>m);Y+$B^d+xIvYw`4!?%dA+XfEumX^F_D@j7dJ`xG<=P5&eI zgjEMDid<9rorUn? z5r$Y0JmDyVBYV#iX>KWCXRV^kjXP$kc)PF3k%f(jxjIiExmCZ{2sAv-N?PX7U zjBpo zH+OIVLRceaT6C9SO)5QIUAb8~eyyPLavZbgN1nfyOr zzTan9Ui{gATP|Sw`#aHRu=VrhRKYN)#%XA}Sf2m* z`QOtu6E&Sz7`^_V7F-_1zc@VqvUXUVChPC5Z4E}uD{hQ)HPJ-CU7 z%&MV403UD*_}5C!K1!hs^dExcMiUh|xnfi#J`7elNk8D@;QA--4(OH-$NRc=dt*Gu7C zA*4)dM**qi7x6b|BKXbEPESe$v^&ly15Ef#YF~6P`w~Y>?->pa7u3|y^+^3DK0M4@ z=QO|j__2_v^Ej;}DU$?8#

7KNjGV=7LC7?5@vyw-X~ClQi17!g0PL<8mT~;Sx>m z9(_>eeD-Ifsde`Q!^j7Z%ccNl=6$sPz%?5%#46`38A?#?H`@zuyzMBqbOT$u^AdQ; zFVIW2c_&^j^-Ek`cvz2q)B=n#93DoWjU5ZocyKit{4^JSnJjDI6`5l)a5#h2m^fWi zgTZ23zVbIkzJB2Ur0*=gK>an}bcoFHbk4@Y&QibOdovt*1_rleeX= zX-hEXf~Hn-k;+S^SLp-^Knr|6gP5~f$15grur0gOZa?>$@)MheURVvhy$tAqp!QV| zdg5dWMtP*#jXmSZFS?zb9gbpkQS9v1rhUbtcVK?Vd=7R@RsD9%pOzSd2|67#p8aN6G~>^RY67x_lwXP7>me1n*tBf8gNo_M$OL7 zUbZ?juw z;EhX%z1GJ+eoGcA_1Rrc7$$D%A%F0$=>hbtQop|mRX_P{-SVf#ZB|KGM5N9Cb${aJ)oeqNFYUVyrPn%``=_VL>rX@2M=BmlLkti<-v=$qnQ0$cCdFTF zNF6!x7>VD$gbI&q1Je^FV5)gar#Iny^M>it>BiYfQ}xk$U8(ktpf<~;?j~Fn2uL0GOr#lbhkY_*xVEtdS?;=Eih=xU4I5Xj9H{q5^)|ayHL_} z>Qi(Y!QTk#XN10wjJ(+!kBud-Klqh8(%>t$yt2|eJ}$LX|0(23HfV3?h-T@KIr}Y> zU08T??_jI%B(kBQ!PeQiy)*IV*5OKt#UNIHqzORt`0l^?etB$oIAmqGq=Q*^rpM0F zv9;;=Ytzruqm7PaA)C!BXPZ~Vy*JI2K4MEsuAFW2pFIN)_|u^2kowNis?dGt*f^T} z1Hdvbz)^x5WXu1%yyp&h(0$y1<5?EB5<|7l8LIu-QpJx$Nib7R2vO{ zOvlfp+3m_Iqf(LZFnFdDYU^GmWe4r?Kd_kyJ#qjOtAM{@BIY#H}9gN#C2irXpfuv*R_7_n}1*p%0s-PNZuuv`gZ+6IK#s zPV*ms4+m&rdhp=DIR)uTFlho{D+9a$7I3=oQkImTvj)tzM~y7DfE7O;aCTB!J2Z4R z?T#P4@dw9`duu;>M@PfJrh~z(yVK|{+k&H(_T|f$t_$7iBMa6m6JArA5`!FcCm`+( z-`U$U|C+=L?f%K9zcs<`XC=G%wz87?_d<^{*wx@XD}7|O&D0AP0W?t%&ZOq?LU?8h z>*+N`s*CvFCnlmg(0aG_R!8Hv++TwyI$miyy$O!-P?popkj3TYmUKxkbD93SjmC(* zJx|Cx0B1%^6oc4z@R(~qsw}|oi~Ai48yOp0Jm5ba>&ld2IoezM1QtOkuknYUH47Q) z`%NCuyMYz;-hHW0wV{{AfB)g?f_%6G=jk!{x@SNr;?LKX;LD{x9y88Q4*%4EzczC8 z7#smm{q+KQVhT>?m9wp1neX=|^$oqI=wZz^3w#hawG-Zq*BnO41x@BLIF)f=Pq)8t zK_}*<|B+K85YJ!QD`|fJ*8Ur!f2K?CMu#^wuyjABPHDh{ zt1m7_l-Sg>b!xS>6>@BS-emt_5NUuMZM})G_`0~reR?u=CY{te5MKtMW}}4_-Xhe1 zwU=_`qZygBwDhHw%Cy^$34Hff$+f#wOiY--e1`ZP?L7oDekohA)l@0_@$ByI?sPPu zRqU-#FszhzDY25hDa-6#yf)vRE=fyIPZ1%sz4yZf4+b3$4^Ea1N?8rSyaBdCc78q~ zmv%;EG4;UMSh!}YXq!)TPuguEyOfyI1OBGtThJ#!p_$`o*>zD~Zt9ai*@O#V)*>S# zHHf|^(=s!6FJ;BpN)Yc8w4oDP%6MpU2Zv<>r_Q!y-aB_SP~w~(Ur2NGmsx>bfsux= zFDa*X5k$06_>#c(7=1h=K(5lWQ_!@ss%mz7{%h&fr&O25FtMqpLVLTwdW{R1CUSxc zQG!K1E*ZlUV*`V$>f_6*_u6kl5s9_}01CBF*?kh%pk=i+)28Zsd%{^v)MYXf9DiB- zwWr;uCr1`~_%kkj8;wVNci-KoTe&{alrgFB(pL6#%_SgWwj+8Ln1W0FEp5;!#^0;= zcH=QyN#ZjJ2d|7zoaeq6bZfBZ&5bRvvZo5e7O7ra28mHObQnA}9I3FQ1lv1U=Io%S zlrR$VU|#b-Q1bPag#8RS+c-;q!Qk%Mfp+E{lIRYL<>^4%TH@%)nbn$z#m0%7cQqe1&Qo+h&^Yx< zR1@c!6)QPmzTz_WhW6)_(AA$fW-CL*`-j6#ufWd1gAJ8Dh*oCtqw2YH!~^0w^z`)a z{f;~#)AmNhA!&c6ZBHlbYF2JEL;vWRz4+oCRb`7 z2f(d8nNLdKu<@mp=T+@ORyfSP8NJ{WAD7&5VH~P0;nX)N@~lLJh2J(d-dQi-j@VNB zC)L;!b%pxx2n3n~d`SY064h#q<-d_V=88pETiTtnmOj^#4GtVf` z7S4`Y2pr&-@fzjQq`k=1uL417@pLVo`vUxTS%`yO3ym3~rhi}gxWLj1-G%OEb_XZ% zyVm6sf^{9b;^ZWLFnQ!XeXwEX+#p(3N1Jxu56|&niif(raBkt2sb;&kqOb^U32ljc zfjQZ%K8PuVf*@{U4Y(&$i&0ck6`M9l49!*nKhANgkqMZt4zN48c5mfLctOYJgK}EP z(a7}MyVqI3S%8m=Q@=kGQJP_P^4Xm}{PbX}Vc=i~_%$}dgD)_sY7-+Dio!`(L00!QFH#978K$8q23oe12tv z6M*xoa7GE;oh)R`qqWkVe)?<=Xv0$1?FaXyYe`~*Ll*WA?^TLi@>r_)P-)?O!iX|h z_!A_>VC|zu2lT+VqoMB%#ZU>DTCtRDXQ7qQwG({ABvG~w)2+Pr89Npg{JB?;o{Iyf=iyr!)|VdjkD>o#&AWFasc!(hM-& zA>nFhHu}Z#HW)}KDE$wa2GsqXGDDtwEZ`P@z}b%00Jv!MQ%VaYCxDhBZdw z{A)bs|8>{_BDDV7KmS+0;$N=s0Cp%$%R~~cD5&IN$wNylm&9%^XccQ0ha$8*Gaf`9tu-a0fAtx z%oAdnvEO(QR)tIx8xGhGN>0UBq3Ev3)MQYJ=W_(A^m;kxUj%As8w_S=OBTn&e#_y5p<=It zP5J1<%158@g#dt4kIHFSvoP^Q>o4_G>&etjPtQ6M(Ns;IjJf|{u{zd=M=ON3mqg#W z6ZJRGLeV{xBVlvScJgYekpMG71M+NDl4j*w84s22J!tAql{I1dnUc}O>z8j)HvtC> zrZPr3aVgR&$pgQN`w)}pdiEPjlyZCLp}*wBGr7pAMv|{+B~uy7e=ify1~e;(6;Xuz zPpk;a?v=)g=Lf%k;le-$;NNO-KqH>{0`V0<+moEV;~*~}o>ZnK@mnxwOAyhB_Pdh6 z4N}z;>t+<+Il%I0c}N$^IlwImNPwVj^=smBqXE&aCjz3Bb6xF)j`0`>*#T(rjsfPz z@l_jxx~u!p%uH7+dhF%F{u$;10IJP@A+LZEhb;+Uov!Sdl@n)F;-*sL9;U=VEzX6BU78y5#ym#ZlA{j| z^WiaUojH$~#~y-yccWa@c>pZ;K6!Mzs8@k!A$FZRd!QV#9?+^oG=n{&pvwX{q zl`N2+7%(XiC^f+J0*D7or?(f#^5p zzks&8mEoYoLU2A{Wi`-B)NP-c70*%2eMS)R^luYm!h6p@5Kg`XaPa6Pm1SDHD8je{ zKsD|fi-n^*C3#}eiUCeOWnZ6F;I}5iGaT-sdt;GH^H{pKKik-rkiBgc#z9 z@ZIg}DmOQ9#(TTIwLILSVuJ(C@Aez};CFkKvZ2!q{CyHzRTLy4fl6`qa!6I+D^AcLVF;JyzjT!vy+ z_x(WUP4jfhV9oUH%0tU*YUzBxRM#aAhV>6yrojgrWb2=TX*RP~Nr| zHn`SS*HDoo3u=7s{m_ADd~Fj!hQWcrWZR3CEa)8*GlTq9L`24#CY+u5zEZ~Itc2QssrX( z+>~d({*L4PLsy+t0#NDN;8`08QwD&iL|iaSyg~L=5&D6jaKeNxhr&S@a~s>8F#74S z^MSPbxIwtHtAnHBEJij1R2|T0F)94<<&^p?RKl_=?(DVFH=&)h&}aOKniyZ}KmmMo zT<)P?pbc9X(QAf_kD&Ees@GJH|1DLflriG-LqhpksB;ov$RZS<&Y*tP!l4-hKLmO* zhFVva?O9u6fuO0PW)f&>xK^xDmX*-G^nwE@S)f$G9Mc@Cpi-k4)S=z^CNK{6r^=Tte6{sc_03V{^dQ@8xU7ty*PYfP67;T%aQ+yZD zRKQDiQW441-^?ypS5O%HtY=V>MPe%{^}|s%G|Cx_l86Lubzy5CK6dg(`*ovGH~|r;cH^90XuN)s@v&`|;r5g@b+%We4G4p0;+ z>7w|w7%zVjG>*{bdCzkcUXE9voJFOf?-plcH7eZ*h!U=OhGt}@T0q7bC ztRvE$H8&e zI@Y5ryqiR{{Z91Fc<|?cOa&EN=NXzj8W20f>)WOdd|y}YvyHE#hqn|Ib!oolZ!%t_ zdc zLJTE;4edETV($?6DXV}QH(kH}L;4NNi7%)g0V!-=h@Am(*wcli7+?HgG6~QnBYu@z z#BT8y7#@#7n~@E%mna z2?tdy5VNTRLO8{2PW~r5$~b3HY8$m~8G>X26f^rovr4u2&YPzfqi|u4A{k#i+(HK7 za*t|RgFCnew^Y0gvqKEK*(Z>s0adw06-`G#0svO1Z+tcJ@0QLDdq+-Ruq6)<(Uf?4 zr+EpVQB^6ty0BvRK^S))loy`59dwRIEv#3@*NeT3Pptfw)Ae?cz%VdvR2pcS%b@?7 z$BMU*gc?d6-lDh&BjrMQQ4g5WMIv@dKto8YZdCuJgik#xmT#RsXO04FZ2|pT+U^-j z`>iwZJz9kQ(a$d82QJ%}3f~xqMzOlWW$`)r{f^DOd-j&q*qFWFfX9+RZg_4ht6{sIqN3Z$YLMWrEv&8`37 z9E(7%Pa{J6UCFlgeoSYUoP_ur9}aBe5OaQ@ZnE)zcn1J+;Mv{@xdy%szQf%kKm&`* zIE={LovJVL+YmyZiO)=m2e1T^&B2#*-!dzgD>J%%URnj$W`$MXA6v{UrI^aG@K!lm zEFgE_6lW~$ssQFU6u*4Luyg6B3hI2IOGAsp?E6oFiuw5f0K>zkbu~vPjO!{PL51b} zPuGStT$8?(T>wVj+FkfJ2li7IZ&!z?)Zdr-460M@^$xB6p3y`?q68>?Io{`*62xMO zefs*3cOb?r9bNC8We0=&EQz!+88s(zJ4b|DxH4=@fYiUrR;M#E;g*N zg+<`k3xjk{)k)j67zKrxK44&Kpb-a;z=R{kTsrV(qB{yV42};OP*I9O0&eosB*-Os z8l|Lsy2$P`uAiH3wnG9Fiei|i04tmgekX`WG_mitNwYW*oYBXHtP&3%O@t_r*^a0L zV+Qb#R0DUE^0l#AHcOnylRIk`Q3ge-D3dOc9f10bq`zqEaM$J&u)>g4gFX*Lxt!ou zl>p@cj0=|MqnUa3;QUahTC5A~jiBC-(ovNgXOZB0?JdP;HFM3=_`ndLuARSbk>-4r zVFj`P`rSk}T8)ub1&^AyNsHW?gByf>#UVsa76l^&oM0vyLF=f#&25Uor8%V_1U0BI z1fGcz#h^8%1GG&$jRoFbr}kLKhTS;LwYI&8Nxzu$bEx9rzg73fmHYDzkUYGNue)wzts?HigjoL9#OUq*_&7^iox}t^uz!B z=x1=!85U8NKqVJ&IRQ-6ohV(C^**Mo+u|3!Z>gZ=SsGejijgCcw9I{X+S{@Fu!4&fd?VL!8=Gf^R?1 z{{kqE^S3nzGENK8h*qO>*Nuu8wq6~TJHFn>I4JbJBuaizvaR2(PTgx1mks@hlD5lV z8U=HeQ1jZuC$|a)gC)ScR#^u5QowOweF%i^ratzK-B)7*%{fAh!9-4vZ>%qBAW$67 zIENH2zzCtWK?LSD2yM&4+xsA7u=DRn2^4!Xd}rb@huyo^lMul@gah7feFNkAj16Fp zI0j;;i-r|0pjV7L zY576%@qk`#>p0BeYS$Yx zaeB}3s7~D>A#W<*@0&RX{Qi0$gOfS}_3jxCXfi4l7V?yQZF3C^1NH#V4bX(5`VYW_ zV{SzT$Cbz}xXbb8B2)Zj1b|auUlb1ASEw)CF6#2f+!Caq9={)s(G>_YBDTC&x3tVd zBl`f{8Y8u7F%UYAMP?Yl#cg zq^b^HVuxqqPts2 zeU5!503nVD1PT^7*y*3|2pDi|2zboo0esE_7;cgGwgBzo>V01{YL!Ccg*2lG;D6e+C;D9?OmSN zoA2iI-1wlt#1uUL`?AiEG_1`&73JctQant)<&))Y&oyY}`?>6yim>(nruLTrR9Ft; zDoCiUKp1lG}%>ux^sFlKu*J=|K%>I?-j^mgn<;MfM|DR z4Q5|8ixuVP^j#d=UmT^&i_`a=6;(%xO(s#rj;G-Qi)72r{$+TfJK-zIW=#ZURS+!S z)}`ZfeMi(6=xgpUziQYUI#&Rsio-yKm4FsS(K)lbHyNh6B;$s?R|NFbf+n0beP{=_ z1nxMBUUvNOWgf6%`xY-c3SrGlc2{gt3p5KWZqCNv$sZwISrp)K>B`yGAT7YsCzXEB zc_b9lmB(a$J9;O?b4~3}V}6f3D_cBnc8v9#%x?bM>8oVye>bFJFGapYD#^vikj3fL z6lZMLK6Sg|U$H<4#7|ySCsG+Nb|?PU&BqT3i=Z4P~0B!{_dO4FZ!tBZ)JURq9VmE&?hRraSRYuryu1&LClsg zx`ikh7Q*i4wC~+bL6FqPx%?>=uwjyEWpS5chLeNP9{;PV%)iKSi%6 zpNDwd6lFwuzn%6InP{*7K*&>-kVT38t`O-ke(m)Ujsy}6U>P`>UpM!BPee8Z% z1KX^VsSj#A#ep@0T1?L3Oo{W({B82Uzwzkvo8&BsSTRE}=q)E$`q9%CK2wtEbIc1# z04)zVoY9PfPF*olg~Q3qW1mMIJ-p;je)z^T9^sX8I!wP8jsM~tZS0VFi|rrzE1``d zxMDwf$@a(dxS(KrAtuNl<8n%vBYnm_M5|Wq|NObm+3Xbvju&>6uA}=`HRCRjs)K6! z|2D>c8Cm%!;HpMD0klug!NDXEG&8MVe!bvW1{dnZ6_z&%-cdGGpW_D$z{L(0!+aWk zce8ClM^chiI?FFJ=h8Xcj&4qK2H@}+O|nOvwKFxc5k@grKCOwy8ggGd*HY?yZ7HZ~ z105mUT`gC6btK2GIR(J)Cd42B66o8GtkHv!CTfLrz;N2Z$(fj9FcaQ#mky~3qo7asiGU4v_D{>&IGh4yuvzkE@Bq}SJ{Yo2K$ zigGQRvW3A8XwKXy0*-QUPRFAOlRy1oCcmElQhv;R4>i6%^MHRq3^rGWdfCYASbCNQ zJU=)Y%zExUGaTK2KyH%vGPDyKvHZ@24bLL(Fo&Rf#bYTvkPA4^Lxqpkjb@EkGt)#3 zikSf?2Doa9?s`DGs8dozAjOx0y03a2}3t-YhiL5L~yze6N(zGVOhIQTtPY~f*WpOV^*p35! zUc!j_g&P!hBjyV#vAKKq`RTY4nk@wZSfD^Cl$I%2n0n=ngh?|k@Qf16K|W{5rvZYH zK;5w*genhoke=>N69z55ME5vnRKyhd50X`eF_bb68;r@#Z2f)nbFE#DYvH+~Grns`q&SMk4s{s4|v}oKG4;!gS z#p{MWN5i37ff9tg^zSf3jf4iX@7`A}LiCOdi&TB5U^*u0)jaWZ&Q%}2(14=Rsl|&r z(t12y8DB4Wq(Q%04k~PDuHVCPbh|PIMV66*3@n805yAzExeD6!^sf+6L)1$sufYX8 zdz*;1(Z>dE(EaJ!;#M0*fnQJS^&YqvSVaK$^zG*`cI51^G6;;pMIX^#3~AFvuP*iv zM~VJ^KU%RA+a^Kb@s{OH;;auQ@B__*Sk7?ZCXcDOe|d956%8=a=tm&JEvQ4)Oy9(5 z?~Z~qfZ!u4(lS{v{LxinTas?~as9Eeg0{~K&l6VibVFa6645O6s31_?xY@G1R7D{~ zvX8{W#Y{ozEx6gza>ssg0F!_J{As{BL7&Kd)#BPL5+h*{>s-qbQrYh1^eR;iComOc zu%OF+zO~*BgfjXiupXE+$aYfQc2mHW&N}y+!BqYln@FfL6Da4v_Ft%^rHu4%+UwP0 zAF}K*Y4dmqJU&?_re4vDX+&CAq2&d1L?VKCxZXX-o~4oi69-oU0hBzDDxi0P=N3M4 z$i5rV)eQTY{tS@vf5;ZnDvyJ!N_lVWH;J8F)8@dSje|$v;ZYqrU%lYt(*j8$pt8r zp(5vOxmYBRz$jRZz1vx;4tH(bcl7w=>D~dcOq`Ew8hoTAS`#W@rz`TdkXL~P z!Al0i=5dEDhj+t&Cip`V4fa_}*-u(yF`^969Tq*)&HQZy!EY{Ycm|42&9merb(RX@Gd=l6%}T?8cS@#Hs7i5I%4 z<;#Dl0O0*Q!q}=w?*qUp$A;q8Nq>pEZk2ORM21A3)c8!7+S>zrK>k0+>Z=fgs^LSB zLJdU2GSh=Ae7bTF%Q-9gN`Vk;vyR~XMIkSuc`g3$fE)ahu#Nt=^As){xr!GrmBX#} z^PH9a|5wLY$mHxNt2N4C+PBW@;ctah2*+K$OYVov`Yz5xJ0&x?jH@_ z!7~E*N}vl&cSYp$A8COCzIgW0-SP_4;9Cx)pf6T+;rAN=43OiXFK<0p2S5O}HV@6$ zoLXIRlnAc+)AgGd{s|olH?RR@WN%0)ZA2n;eq;7qONP6|6|IBO?0ki{Xv3EKyx#Gn zktvNq1?ip9i@$W+svU7+aR5nud0_y^Ipe{gMxw>F1|d7)WzZc39i{X%n(G{(qN{~g z>}C$G4jv9hDBo2H2R3LOBHRtKJM$nQF0Vcas~qK|C;~>OO+K?_!~KzjOmaS#;SPQQ zc`C_!BQA;(MOmgdha6axew)V*W{wzSg{7P+nOvd>0UKNHT39X6Z-rEEvUp6-k*}wq z5xvdcu$X3KZ7An&DAT&U**f*U>5UL4o{M(vk*f2_%wOJR&Ut>qeva>+kM$mL9J`kE zTP47D_?`QP?;+0%-Sa&Y{=qr?v-Rvd&=iS!y3;L_5Ax)<_-XO6WrkXvYkcpJDGJYH zth;E`AL}{Fup^m+{|8?EwUkH8i`+Wtiaeb!`Y$CinV%oC^6~Snl{y~hTH9?ZNjM>T zE2|b1iQk9_I6VcsU(|h8vLihV0wEA#v2{4ed0g)Ry1hl2YoK{)!a1`duOJrWt^XUW z9wg87T-+^}hdsT-BV2?KLbY|BvkR;#=Bj002?pdB-d`WB{$wBY_bg<%U36V%ZLf6~ z8CFJ4tTm?78S{gr(vTwZ(g1Tin4$ub%0a29U^!p^) z${)8ao@wYQWUay(J>8y9{sNyMaeldZbp^w5Cjs74O1lZ^^QI-#iPuD-(a?(F7pwH3b*CWvrA~6wXvLC ztTc6L0nydWmA_)d7V}CB;O1YTbMcK}r7B9?M5-b{5bv^PY&;jz z>8?`(h!7zKd&LlQK(8vo45mxNemlZ$!N$X@D<>860JOzGA zQbIELD{rf@E>D%hVf$2P4-X(rMI8mU86axE!~h{j=sStPbu9(=BkX7Q)2>$ZSk0si z%&s;LQ*Ny=$gl@QIQ^7fRxZ#1#quvv{r9A{IwRj{6@Pjs>dqVM!JfO`2F_$uIQweD~)d&~9u;DMj+i zVht}1C6J?Gpu@4&`qXg$K}s${>=MeQ0>_IN*U?i2bbbO-hh8Y0J|1%+FVB!tABvS} zGF9diCXw#3QS?&9RK{ZBCc2l7EnlH+Q=Kj2K_s?v0WIIO@Dy)Mc|*7;j0m>-IA}5> z&gm&gbQA5~y(-|+XqYBx699j%Ajkn01a-et|C%7P=d z)F@R{ibN2M!x^Q7(386htJC}ei)B!cr3+dV?pXXKMCYbFtj+754cHwF!6}Y6@(KVz zX63%k^TbeFpNZu@r8DhH-ARu38?R_=uJZMn{#Y|Y*Bm8zb0XV7H2pi9+BTtEL~u5l z?ed}|eKe$lff&6MQdw{e3NNUVvrDL7cS*RDh9un|n7tQ-@t@jpg6F21KypQh{w2?< zJt-k3b|ksxomYvbVCgjB2n*YP6;)J?pnvN*sUaSXi31*oZ5i%}ZY08LHX*MwG)XG) zW%a!a>z>NXZ|5F zax3Me%2#)QOKcq#9Ejx^QW&fm51zOWXCyExanM`Ok44G6h73%eo=fxxyFPa}iOWsL z8Xpdi0%IRK=@ocq(63bwCL3y@vOrk6f{H=i&H)WAfS`v?I|UM6&Z|>ExcbXOr4=N`yAea8{Us?<&eqdQiT)zc9)urAf9Z*=YB1CArLV#gVPs-}#fs$P<&c1fB4j z>S)46^g5Q{H5}M6e>}qhj7J=(EPUY;5BS3X_CO9`5q7a`5y9+8JNw92xOZ=|l`p`a zKL=u4NY5E%G}{=RNy(LC5XrezNf&|w~UPm(Z{EqPh9p4CBwI)Ug zYWenIX_AKY?^169a-^zG-S+ouzuc3C;*|05nl}Yv;O$)x-b!ywtkXDz!b#&X!AbUD z`tBNunZixk*+FJ_IWjp$sgeXlv^3&BfjdA4T_Lx+#WfYqAw*)D)?1za1H!{f@{0c3 zCql1KGF8>|g%6&->&gJUznF|eQP*seWnZqga^P1^b&;y{`|&}csa-oBuyqFWaJ@oUg|JxN1R1PE>#y^C zakA3LUAOZI3dhH#udwORg~jozZ<$gSG` zLf*?lT@J?Te6K|r-lN8Cg9oY+0FA)w2m>WEU<>}FQe^Oe_fZx)rs=*@QVs&-g*n4r z;&%~hh+kiND{FTv>zzck7V=j?{F^V{0Zn6T8SPWf)Q6_(XUtdFCFZqTy?LypwcNpC zuR3wyrAal^{cNW=y*b?!!*wR%=EBMbHwWCm4^JMH$!X|)ndV-kpS-uFn7^GL;dc2d zuEO)&L`KfPJ1EUwBA_C21;`!BEpNs^^Jl&DO1h5yu(!MWPUovhjC(S; zjm{5w>UUT5k~=k2D@K`j?W9Ovwbu?BR$N=dajuZ_G0Qg0j#Ba4z5%#{%aSfav1*96 zl0wS9XVs)xOEa!i&c6^z0}M{hX@@&Pyiy{~8dqEk)r+oSxxrpG>RdYRMc&Z>Poh+@ zbPRjzQ?CUWw$Cm-kg2FxV`yG8 zZVrBIPEC${=rQ>sA-dqW8DaDAj^xR=>Ry5Zw^BoU$B!nUbmrV|=u_UvQA3xxsnrk> zQ2Nje(lM`{ByH@@g<_;WPlwH0ztvnY0IA`!w*;I+OQLN{__zn4)u*aV>2eQIec9!_Me5wOj>~U8f?c#7!+YOKt z|CXE`1!Gyf>X4`0)mKcAEqN=C3TVbV?XF z`uMGA=8WI7P$C;g;_Cj?CXgZ5;u0{;VMn@n0G2YS&;t}f8d2G`RVwu`I4&?B9Qh1k z_Nn3_pCe^qeUZtR6yt^qivtu83D-r^-XIcJJzpVmS0i6(tdDIsZOHce<%0PYUz`zqCynZ zTeSXItEV$Huhxz?4j1S?DA80-?W}?Nk-fIM|>7n6iJY4rE6Vv??vU+2`F!@>!bU%~m64U_r@Pv)s-@0z%l@swt`7V!d z>;lek8FgO1XP(EfJm77=0IcwEpDKGcxX|p&i;H#HB=$FG%7_6)*KW`r zg!B8t6O9NN34iH8rI|5Q*LR^X-{!Om>efx#fk}mK4iYrm8r^j?8y9fZfu}fnF;Z)N zL@4{Okb_pbEA3*V{+p%Df^X?pcD14nB}4!=?hmiZpSeklXoU}nz~j86_t)7vLyXgb zF`HYYdrDylGB?yaz6zF;t`CVZh2IMD!xx z`n)v6f&?L{wgp~7XlFc+L#pc1>fX%6lcexh^cQnpso#sSukV+vY$((-7B$mqFw_(1 z8?TY=+2KGAs%PRw&{vWU*>O%(k?wCt%-rXS3GI9}_N1Iu@4$los=`|RjU<{#pQVWL zA$e*bdM)1o?%Vo6ll8JHGV{IX3DFTrkHxHtm%kzjOe7qXq z?*I1A6A8mkI`V%T&}QHLT|vj8m;RLaLXd+&B}wk~LnOP7KFRL*Jy2Y1Yrj-ZL{u#t zQQ;aZI^RH>5${}K_vct&&%5ctqpG1_b3Vf;Ij}l8uA57N3XMUfLW<MW`l^=Wa43fP^=@?=Aicjnv0gf|mONGOPhkJ|*{dP3<*eZW> z<|vYw+dX_DWz=EY<5a5jRw?=$=7DLS==T&!?V=RXzC*&mo3A6>nHq#Qq}kw1(i3B* zWaZWoA_sb6gsukAO)sThPp0W&(S575fX_*vaK;;m;UFe?Ar`d8747YZ+tljj{X=K? zuXT;_(J&3n(J|Y=J1rG$sT&ytFdZ4m%fRvjR&6Pk?ZQ7y>2D`C^3n$eKqe&Thy~WV zGCWMUyOtfZcbvTs(z@bByoCUDJ*}V|9P^JLB_B}y{oBL?l|6#m^#4_WQdN8d2w0^x zeShYA_g(Ym=ZlbfR2DM~Wb7WQ3$gD{%=TcDwmp|#_{*iQHd)h<)*>1@E>)c_@*;C?aeTRPkV#j-j>&?5956F-A zoH1KW1d7cEU#7BZ@6$H%@A|P*^ip7G)DJD+f)wPR)PUBJU%Wco^{0iuUAVG_yom#n zo<<;GfK&OOm8bYKx9s6o}CP!0ykVJZq(-j>c<4s2q)FRGKXAluv>T+MlPK|@x1;C@ix>1 z01DjRf+GJUDrpefF(|1t|ML!X&;y(ngf7#Tsbm7Q9m?+L*KYOUx(@=V`;wFc4=68P z-TltbDt!|9p$3LCbCP5}Fm)`mmTUvm?cs8q!`fi(n~g8q`RxTLC=LA1#A#@VY24B* z#cl#Oarcl~MKXij!MrTo;A*!VI07qXe*n+AZh~aDRWc^yi0`MG|CLU$_QHX0?S%yi zJe*6B%byK7|NPL+%{ainc;TAg8hNRGr=|M|@W4;@@l>XK<%+&A?7D2=#9qu%{t&LO zC+`Qclu0G9b(5QSa})II(fE-D!B(5Yrcbt|rNK=I0Aw^Xd&yY`D^ycWQjKm&@eGk6 zV*zGJ#d~e*Jjow!e6tk9z%tGPeE%>p4JR%Tp86v7WpP*o<;^%@&u%}Lqk2x;#;Qg& znxk7-XCs`k_sy}Y-Z`_MVO6MOulpVNnuZ}}2dA!8jXK}r^}9E9(1`Hx92Mj@s>cK> zYbdP9+ju0a79Cu>ak9Q~>M!$zx<)j0s^;2bhY!EA z1KvHVIgTc`tGSP*dHVL2(8tbh90|z^Wd#ERJVEyM<*dWG1-$5!fK!EQ8{^Lmt5tl( zZ{=xR*xtV8a)0<08F{d>SC!9X*ay3QoDc=&+evPH!-Oa)+Tk8wK`&+o4P>7gr+t&H zV|2y+Cfw=wpK5&4_k=+QkQ|*Zw(#djrv_?qd{X&DirMn%@txHlo*#c?+F={~hc+66 zY?j@tf3;37YZYCx_T@O3-TT}e*=^8xL2Bw)52+vZcmdEnCMVt-g_Ni`2pNRX^ZMH4 z2p;T}gjl`*`sWN*`Y_%{g;mzYTCHr!HspAY-Nee$-K2?dK1QrW;rOTGTBw^A*IZrG z^Zk#PWwqX1Wod;V<~*elqtFkL+e9Ec}yk>aC0=Ka3jO zqcVmoTH$yxx|03VVy%C)@gQE=tG7kYgNZR$yA{RWizl@!$4osXDYc|~UxAXu9%Z5Y zx5-;qZ#TzTjb(+#bwuM76!aETk3?U|4WdggX|Wqg6K=J&5SB2>ta4NH-y{t6(8dvJ zFG0RtU-4R?nVXPYW`?)S9R&m$))2iB+MY44{_GNsmq#@*Pr1`=Ak4T>xJW;nFZ~2z z<(tzX?c*gCAI21YvbZ6dx^s9qko4mehMfsGL3oOOpIrBf#3f%HVW2o&eW~Z^DGG-d zejhsfZR~$`?N@hlt~BrQ(|IeN)2=+TQW#(UnIKznqp6vE(jLaGQKPq`c#Qm$rJuBb zRmz^LW3to*^A*p}H(#^z2b3@k<~NcquH*hvI{S!7cn4!nD5L;cjp>2mtCLT-sDc$` z4Q%Gr3-qO6Iaj%piHKfaMxXavRmfquCd+C zYqocvm6yQt;@mur5Yat`#L$+nVqz}}Nxd+fmIb;hB)em=Zr3*^k(aHruR8~S9aw69 zajE(F(HJ@UMGLEa)3c{%?^*!xye`_Svl4AC|_S1PEi(Q$Ip6S9Z2KT;k&(vN*C z%FiAynXCT#=j$eKrl#Yz75^Xm55}6je*XFK!I56P;}5-^dXw||sceC0sD0_?;}`85 zzCMAG0t84LR!N4kVol$HH|x&3-%ZH4uD|2deCh4XzxngdoEKL`c2Fg_Kv$AQ&^Rcg zm3VkBbgsV?W7R@@DXue~C@~c7638yV(3X8L@^wF!a=ws~%KD4{`>RL>K^x{dKhZbn z)^_j`7{_S$I##92=oAh3cQq=G(SD)5P6y8bO@%uZWU3N zUwmI~i&9e&hvX@-vRk-1M-q;XlKt?6k?exq;h?O{!5?ttTM83pR2qRs8oow4>A}f` zwTtAjf)f)KGM5$KaP#2WJFtD&|$W&-NBh@j^PSI_nEw$#=(PL_SD^7mBs?l^ejZ%I4 z@_@OCU$H00>fD!ES#7aaN^Sd)b^{**9Yg$k%y^@ihqW@Sedc5yp`k9(J3cu57w<{m z$-ZD1rqth0A}H5^weTe6NL43~WoE9jH_W#ZEia{tD$X~UnlTl4^cxlb%GGjLctB1g z>bVbwJy`%}W^^H_(e%wLzf1j9%Ga(05I@<3&D99WI^1j>up>+TAb-5ch2?ut^rag| z&AL<+P97Zm&g4li^fW?^#)tD&Q!rj$vYz!YH49!ai#l5ZK9^Vb?o_&Z?^_HmhI47});{n14!nJqFE3kU zMDk>h>ic+(bObjCF$43HP5xXBA>EVczA<(>I$W${8v3c&njO77U)eC z%)EooY)jW{+Wd1o7fSUSTHqm>NXob~t5pIk)}>HJ=qWdCgYLP;fLfKj?z1O#c2kZurx0VJeD=^VNw1tg?Fx&@@WJG?Kyd;eeVT6eAc z6GCTi(VcSL9QLrLUo*C!n)?;eSVZaY>Uqb{N5bEa z=DzAu7RUaoRDG4{)X@^9bAcI^koU2$Q9I|quhW?Ng-OQSOBa>;Wb`s~d1xY!QA1rF zBqPPdblxrx&uSI~!Y6x1>;q|^3HH~b5V@>%Xy>$3ojZ!co}F<_s{NgM0U|byq#Fj# zRqLXP;X8gLN3IeMs<0|JyulTx)`1Ab^|yZ?>{vKu=Sp%K;<>Lhue?3 zvJBB3S0zM`W|95v?U&(xQ|Rv3i7v=4Qf$?%keLfh(c@zXt#FF#`S-d{^;FSF@-z|n zyMocteg4yo_OXe3x4CLJpSbQ>kQiOLT8DWoaI^o8P@zONqNi7DdM5q_!CC0wSBl+Y zxX?UUy&GKBhT*ja6>(4#K`hBxkhC|a-s-!1qy9maM z&7tPyj8~-=vq0F^+~ZE5*x1n8QIQ1mZpc^r33kodQ&%5DqRg8x`lyi)CoaDQOA|%S z`()MoP(0~bdtVWKZxDm~-W}e_Aj5LLnVx}>KtG*#r+sdE0`2-C5@mV@JKY|MXwRul zE2U$SV7yVg@q9(IR(J;jsWiu)xm@ct*4mMLcrVEC%njFZtxpmReDhh;Z*ZWPD4B2n zcyaW5Y%cu0p5R3|23%VDULTsv{&NPU?40mMjNVZdoJFTr{WqD=`w*o(db3K}N5U^` zmjVk)s0${Vq~{%%{?*wYU+PBs5D$L7UrWt7P8=><`$opnPO4{#TF2HP;Hdk={vNfp zav1Gu501#W-!Lbm2)*EYI-ZlGi|&(3VrtS9iixe->3{muG2cjHbnOhy-JjrJud6RkxoOH~ ze)951k9;}f!&%yvEmggHLAU_Qf!MAk*dDrmJKJ7S>&qT~|3xe|wek^_R~l6k1x%~> zRD^_!D=H28(QPO1wlhX2eQ?HvHIfO#MsV&(p=?6A>BgE0Q{cCZtj|@rhoSgEwhKFV z$BoyD6DSxwkvBY%wOo#(4o&!d>t4<~zZ9#gUNGW@8h{(Ta>n#G=f)Ffz|z{TbCJdH z?clC{eo4WJh%fujSN>6pcZai<>7~l=opX&o-ZX|2xY|l+X~9LW8bL<-Y#{D_!n>a@ zA>dCJHbXQz2G`OqBphF5(=>S03^CsIN;io=6)_(CHWQV=BWdG9m#B*SiPvzhq42iI zn+I#2wBnF?xtoHt;SBx4pJK`w3eY0{Zr-=n&YBa4B3!Rf@X9`V=KbU6SZZ$%_Vm=7 zP~@+LQ}|~M>#5dZKK`IoTjXcLouU2i9^NLdiVVh&pm|2xDtY>2Z+$*qM$TCC=qrFD z!OY)nm#dcYHz`D=H_MvK)4vK*+|r;lhi9sPV}AS~GyH@X4^n^opYfWv+_->`c#`xiLd?h;`1gV6^mpCFP zUZfo2o&9rZgYzSSl;Ib+q!-605qgTi+0R8%(GS-j9j6|n*EFf_anBBDFnZ*~@lI7PMABeDI4P-T_;CeYq#P!gIVj-@Mt+0^7X&K0A1& z_xEE>6PaX!F^nH%S-6N*Mv)&w`VfkwSQodff?#HvQMu0B>R0vzsNUz3#owZ(&Ytc0 z;8HyGWnM0(kE#}c+cLH%JClmLk^QYsL)nb1JobNn{8c9kjsncV);HZ?(8P~FTFl_#|+5`F=)BW(%=?oGAGC)^C zIiD5uH6x-_lD@zC1pWw@DE(dTq*2QK7-?cs~>VrsU^k z-fOckd=FB~lT%c1^Z|cHv%k!fBOS+>OS;ZEcM2_!p!&$CYMK7VN+3mmd6x#AJiZ6Q z26x5Ex%|}`@wLxPyduWhxxroKa+pWM?_{tEKKGH6Yhme<*q%yP;|Ttuj|&Nmby=DT zGEr)!j0Wi%nMc#rMKwx1nEIeeG_luYT0wSkclt$bz5Thv0wpwtOY=G>6{WPOA8cik z4_SO2Xi3U1pf<}K?z;5o6ldu&L9cN1dz&mTAXMOy|1M55bG^%4UjrY*kV~U7$yqh# zrE5aH_R!H+#UXP=?G-{n+n`2I`or~vTipTyO}Hjjd;RaNbh--rr%#;gtt4NLMoNrY z9Jp!3Z|Eqn$zZF>mhN)5&|SyN8u3lW1|XxuSmAhd@*MQ?Lr4>4WfQBBl~sxF8)==( zS=i%J;im5HQN=}Ja?FQgn*%+J-VIM!!SuBS4=EKJcFvD(K1>S-aK;z7vvowrPEx7Q zbLeDErAxJnCTnRy!h6L@7Lc#Hd;rOXkYq_E_4VyC&zY3|{LBQ7li_h1(t>6!$|m(o zV!sXNOO~w-?BUL4ikh;iV;=0w&5Q`w_X4~M%lDp8#7UahU}zPhy8DvzyE7&hN<>4O zJ`={py{;!eyrk+PnaBBX1Xh<;o4<;mA}sfQxS!6DUoak_MsEDSZ@XTX0-NpH!y6Az ze+3L$aY6cpGQOD4$XBaZHn>&a=395JDd8ZBzQSa*cwYwx+x*tMlIg~@2GPOw>t0}S z(6n+*QsOs2WfU=nhhe%D0*|`>{P`(7MVYqRA3Rw}k|;oLeDeyRNRu_V%_O1w<%8cH zm-*ch|J`?V^JBse7zb8X4Nj{J8*0x?%8r_ZjP;%&sQ{OvNTv>F6k+DD%q1cBF4kia zJ+aC4TJvb46Vw%VM+xVj1QnWR3>*|=Jqn}QLvvU^6-Lh!zTlM`Iog;~4JH#Qq@rKU zIj|8wcn;!G$Iehfh?QVrk>-6d56OItb}&VPvP(xOjGBON%F;w%{|jE`>S{r$2RK=y z65u7?ew>IaI)1PxONkso%hqKc9!VPe%30sw?-Vd7iFik<{2r4*scVnNBA7ko36hS> zz*|DYh&TLZU+&W_EV#``6q%etuc1y1B8KhR`ZrrH>TyKQqF4Bv;Q%`~3uI~s+%pZo zPcbRZxGM!ABzqWCB-L554?s;v zEo7akmUwc4D#fl5!v&}1=c*clA3Z@HOAg~{652yPRoSdOJ#PUuvtPI=L?4RSLcrg? z%H+}(k$n1!k`hu}Jl%%4oVvoc%KRYWOK$&c)UooXIrDw}LjsuA3pDpd&X~DqN^mys z1C^|`I8PcxZ>wuwYCUcOQ;MOkHzlg#dI&2H`N7}z-rm1lrD`aQ5)1gf)1{ux2q33yA9$qb@jDzo#;u#D(WrG;--+%HWNTSxB?v-Q7AylBmq3 z!<7C;_Xpk{o_>9#j9QhI{=3}c0uM45e@%-SnJH?}N!am3k^OuhH}xFu=|`6gDmW4* zOf@x_oVJDZBkGII*@!rO1@Np2dP2^vOg^N_*@)?XnYn;#=n(4m!mN0Q$vUze77y}w zBHuktN(zy?{PbEztTVUC#mPb%$v(A#3h4<}KYYldpE6*k?zs5-%-9@f`QTktUUPuk zUs)uyp*BuDb@?+n%|p7dHsWkuO*W3p%B4pRC&vh?sMTtwV0ZY9f0;%=)#!WKF(J{!2nJcVf(E3kKd=b(>H1o|t1=Ik z`i7v~A#8e6*+F`F1*5qC?oiaup$aI|shtu-k|#kx0OqwETn^5lqN3l7IhdW`%4)?f zc~$#o2b6_RoPrxKh0X3xxRR~VTSn7?ifmL#Ekf$#zrdlr#8Y#Lh?Y;HfWK`)V=imLMUvGP@6$ z?pFF9L}ZDInK5lcSu$(knxlvHyWB9Ac5~*;w|(rG*~{^_58Cchy^~pH?mCd~ceDz( zj3x&n2b9djDZYr;cpj7UH*sptOFjNFQa~sAvVx5RqL55TnP3sA*2tT3b{l;3tkS@`Z6q|FsI^=chE6(w9BQwBSeqijz!EML5%q zpZi!a{_xw9kqb$9W*)uF;BQ8OU-1OpcX4J^XXaw4d1t&I#&NQ>V?@mnDD8u`pD3XE zNQStK;J>s`iR-68&ib!)nnnbiKBYyRYTGvqb;j;a(QB3$1-q$@2|K_e`%fz85? zaE%yG&el@v#Nh7_Jgl}qLPh123=tXTD(GLt#1`e*neoSC3#`gZGn7Om@sJ=p(H1s+ zrDjsFUmqOqKcu!=IeVt~M6F)TpV3*VX7~tBj<=K}{`3VaC$c8=BC!Gd7 z*4~0rNM7kZP$RLw=A=$9!;PNGmo=LGm=ESZUz&Gy8|g>;z2Wv2P#u~t3xGyGA`OKW`Gi=c^lr*1fNQ6`DFnJp>5ciTtZ0{EGett||;{TETK4|td=067xN{v>#=w49Rn zl5go--z!BKF)=vyygae*RKZtcS2{5ZyK~ygTW1hOJC4LkRk+UNeU=0KnUfDP` z)hw>|Q|C(E_rV*+VD{4Hd{5-9=IKybIVDj?az5p&%yfv`Y^In3Wn$Z|AqCZq-sKgr zFsT}^gD4zY(Rx2_ww>z+k7rwA6gs25G|_x1pQ!NYJjM6S@%n&iXE;@|2}f;&1~8*$ zd<&+eap)HBRlixcJ?g}+sn90T#EY&YEdjlWhFUx6`1P_I+NMbacUhY8I=k|V)rB$ok)0~Bl8@>I_ z$1~M=Y(j=w1+r@^LP=;s0r~vA7qtNUsEcW&71_uL+QaT2Pe=tu1d5L%SeXoQw(WPK zU_CurXysaFS*&XcNOv--QS!C2%jPpLRNoJQG7hv*;Nvy;hs>TusW>+-;3OuG^UbT$fr zIlDv|(+df@2<%9y1nGbO1f(t4u5PGIWLKFap`qsCnHVUyn2^%Bs+Jv@B|f8}M=_TC zBJq)ONLE9bBXI}zeEcmwh({(yM43uS!iVw3l~bRJ@QT6&S`sv^7_(%9Bd0`Z+*s*0 z9sac;lpUjk`FB*##zzoPf%TO7k*Vm5hj4Pp_>x zZobnDV|po_+sYfd+{uK#a#qEHpd_}f!HP06^WHmE_hno%1_^a`0^3z;3j1M+Z<&41Q86fJ;(DfCf3z$>ZOndu=V7m~8kg4p7 zcI4$H3a4G{0(vidpTWu^*yLwye_ zn}0U!LC1*6E=5o&A8U`6J(vNP#-{Yq?L%?BLozQ_ zzE{3j3{vueKDT#LK6UR~J`y?GA3xy2e5#gSNcSWEiUt5o0=B*3)3~Me|FpD< zSS0*z;n;8@u(AI%Ndu~xnZZ~C(bc{DNdL;jypXUem1c%_4b;}6Xv@oR(I4*?$`Wrw)zlb4#cc4<^{L#ywYTpZH1rul+)t}cZE|X~#R$SRn*7<5a}|S&O2+`6dSWY~K^HhdKkMQX6OxqA z|Jmj}ZF4U0Rw`=iWlS>y8BdP!3g5mCmO8yGt|Cg|HroTf%;M_!6yZ=!dfPx}cX4U6 zWmp^_+rK6%w0w1s>U)&~XCOwxC8|FY?j|E&lJqc(|D#OgCHi~8bZ#0Ss;P`F2{(AQ zcE$zwWsw5lj^;t`AcRriNkohvK$6(X_DT7zHfGDe9FW6(){VHQCHfNQ9IrNPDh&n%bgr;6PgT}Zf z@(oBJ&Z;{%VPcdN%Ylw}Xj-S5?r%Z(v{8#qkWMpNttWkc@d4A;F7~i4Nu2TTcG*qq zC6*oBj=Q2w!Y=O}lVE)2mnt#rUm~EOBA{jI&snY;fqO&SOV<4R@TQF*et-i!T*TNh z%B@#{r!Gi%ASerjTM#t1tW>!ax9LMI9q8}Il4G}tB>kM8OKCXDD*q! z4lAt6oRA|CSG6Afu+J6sMoN5Y4$Gv%i?AbKHimhtHb;C2>AW@G{pitcl_llYRKg8@UoPk^dZ@%N5g1>TH$M}osmQ{>JdVaaBA`^G1)Ggng2 zGmP(VcJht56JEEM={R9VcSX+Z1oo~VJTCVKrw|_i4Fop)7<#&CWc33}*v|R>QXAOW z!Y3CRX21O@Ir97a1D-_AijJ$l{%8Z>~f%;-{jf!W_6HBHhoQe4N`>T=f3cfsBuzzMGsP~o^ zZ%3t7{%V_aRzCixI={L|wjQrxvx>W@0mSc7B6hArkA^lzZ-=A(wl^BD>Gs8xnhQ>@ zwRTnTjoXK#1>G95KhFqzbU4itogY|!@@xu?El({4LeBg@SEPBWdbmFSvga@qCo`|HCn7Wgad_T1vlv(BcEiO=wKzQ6IbLLg2qJLG;YA94~g~y4kWSel%MlMEL;D=Lnbd3^e1PlO$y!eLJTuCcMn7 zfh3w1HV|NaxT}`Yd7Wt!~}oK@(yXJiQ>(odGvkO=w0{mp@-=|sW|sZo%_vw(DyQRjuggiY}twG z!QorptXn4po$_)pR#q(PFw;q$%pae)NFJHG8daFVO#fyZElC;P$xL+~R3GNByDq|D z=Kavq!@jQ=Rf26FuCUeEln`=CUfvOCk`rPEwQGekv|ATeZ~J!8OAIy7N?2Iw_=V7v za1g2sbx)JPWXQyi-Y;+|+}8x7z7ajO{_esKduqyIzsP`aiI7XYW0_wuN6~+Sk`6Wc z91Lv5!864Tb8^wOd89bs6xnGfC~aQ!w5=7g%c2xU!MiY9>rUY5DlW2r^?y8BK2!M< zwzCNNttJ1@Y1Trvnb)^cAO!4{m49(^a>9Cp=kn}pSG_03@Ji4Yp7-PQ-1^tqe;PkF znV(t&M~9AV1Wnnzl0)|$*feeO#>gF~ekO?0)Dipqra89BKu7!v(|d3yV6;w>K`FC= ze%!9@a@KO!%;=d{Dps^!B$5 zuXl&PC6KK0{<@A;d4w$Dmk{GItFd*i5LRom&_*Y+_1%w?*Htd;?A;zWt;~;jJ%j3f zs47ru&WFUyssTM(GiC0hgKUOoktYL+3#-n+f&dwMgJ68Pm|A-O<)9ZWKR)&9<-rVmG)1B{p2-qL12fm; zhcTiirB|hC*KKxV7zb4w(-{XV2XG%3+^zw^-Ot}4UxxWlIaW)_cC4?xZ;S!I zEZ@X?yUMwvq7r>_87MW}ukHOKim9=1^HGGxZeA9~VejDR+As~8YV!pOr6y4M^#$~A zf#S~-UZlhS+!~0*dw1hI4U{f8nJ-0&uAvESG^%Fs9C z@-x4X#I`S9SI>AVn26Efg9p=Hee8b956iKNpl0)G3gy17qF?w>w|`IEo5=TaI#N|t zHxL}*okv+mZV+`+p?7Q)4bYh8aV_M>eN5a=lU}SlGi2vCQAhlJ59!_E=gFT5HQO61 zKov(RR8d)Z^PTIvt5d6SIpJd_^{BmO@8U}JZ|2pnRl$^hgRAbsve(63)l+YKyCuxc z^X4z=-DWS>IPWTvXifRbO*gT3Ph^z&cZd1>qIILrURgXDo4~nPF>g7heTS_Q{3sRg zVD<>OXqMN4HF4}iNtA-E7^#o~(Df~Vx?~DV)w_wYfvyM}7ZW)5BF}+vd|@=6c1lm} z&mosp!%}^>nlZXWRKgZCwLK9S75#Oj!1ng=u$pCwMd%*yOEFU954Uyc8{#2{QYBwb%!?D;ffVely4mpV&Q-6^;yruZ>ElZ1gN2_Od# z;#U5Wr|0PjkibHv>ICV*j>yk#(oHRQ_^H0$L$vx-Hr==I-~4Tx9avx+@qhdC{J>5y z2xL&afx_AOOi()VcaeXW)hlKeS2MdS2d&7pcM@`Avx!Bqo$ zGf5q#^!Jn$BVX#yOJy@zw3)5-Vw6~&@Qce;7g3Ivm)^~eHz;mmb6=fQJ{M@cp+;t! z4x4m&?3&3?%2FX-7Kz4sh~lDN*6(3QO^IQ`zkmN!Y!uCXr`ce5FZotJ0=Mk@U@`6w zcYlD2^NmU4*dH`?i2U%!9Y-k2Ra8hizgiuj4UPXx-V&Rc+^?aT9~4WQG+imlXDtXk zB~cHR^I@b!`S~aSmT$!VxE*md-dwpFk(BA*{y$)}K-8o$viJF=i%+Glx7JAow^dbs z@wIwYa0q#PqA-y~zPac_iT{Ix$?A1~)fV3K$rHnDmp$wHOkr3^b@cqGstsUl=6O$? zzd72PBVQkM>;{Jd<1w;B{#^_%hCkAhGMbT+c3G^D#1V1hH0 z$;XRk=DY1HS%)F`ROOvT=0CXh{|R>c&{mm)+d``Ns{_SP{_4tD3%`+1uvVsjdvpC{ zHm7$nRVyDVC;XqQ=6U{gs!9&@iu_Nsu6=)UAv9EEsMaT>bt?|y7fYD|m6M@mY9_e~ zD&D777$wH)-?}Rz0GPg2`0Q+H8Sj_2CPkZ6df#7S)Zxjvl;y9X!LhbiA*f1Wwi<%N zZ}X^R9@#gdDI}UV;p9I5=9y2oF+SYTr-!q~ZaowiAp{2WC$0=H+GlOkQRIimvYHK? zqn9#LUGEIhfIVisWP%urHHk@1D(5PLe}Fb=dC91$r|JyO_#E?KTDiK7rNM#daEJw1 z1TCba<=BCjIj?{WKxb0%>w|S@c|erL1W#YIN7IKozj(Gby-xv2rFWi3DQJ6yK3SnE z6F10T94M5-(kj>X+|72&-r%=pubD_NwM*w}Lk_P<4u)@kwEcR; zyXrM2&KBuYP|-xqGnOg{_qlxpjMhea=OO0a-o!eb@m=13PERE6Jk|FMA{x@ZeL-Ab zvXUmqo0`5pj{h9$c$iY^-{^&0d0Z_ep?(wx%-~tE+_I2YtA#QP%AWjgvwn||@_W|n zKacG;*HsJy2-AaXB&nAJuHf#vZdOP2`Yml&QJs;JO=q56N9N30#l+SSDh$2L$JN0h zGCYye?}sKH3uxYWsSig{fdLUZs`~Mh&Td3h9v7 zfSp_&MKSr}zG)Vw`90RVfyo#v<>N>R&WL2HFdiIU~#xd9$!b>8|C zLlz)Ebw*%UIurtQCi;q#F>J`ct;e)*;QSNJsRg*Lm_(cM5$z|7#08lQG2(< zhu6-h0#>WH<3@9}pIXenNwz_7(gg%gpgFz~m^#0b<#UdxDI|Z!baA=I@Unr0(7Mw8W ze~z0NvvHtB>M`TvS4p!Jmf`*_xb@pajcqP2_6`C&jll9f>Cw`?*-u`hkf~up6B9|m zCzV?gv_BSCgF4EaVcko_^Jjmgsuzr8cjM0M;U4R?F=MkL{}M-{Y4*~}cw+fJewVTT zWF0|%vH$gLqYM9|$qyAnqqDqG&o9d|$z#|Qj|+oCZ{J$q*=wd}&>+B_X1vQV#snsC zOJV4p&rHt@{TcGN$x>|isswv~Jovz55?0DPB|7o6=QB0#i-^u}ZGvD|xh z!k0z5zM(~{j-dRS&Znx5hEwSo{YsOy=8!_MOD+8AUIIpqos~3&K}mnmdp)70{VzOs zOio5g38QkcH~G39<4MiGeflbZr?P8t3$=?|?Ed)R*fm~IL()O2hO`_F?jrQ{|I209 zt2?e`9%J~il=m)f#OjYv?S`1F4+gEUE-$O3Ex&dLc>wSQU78POmk8P5OPq~?08WhG z5XL`wN5=ylv0;GtlJhgQEhAx(`f?V1S>3}vrG@AcW2GMQx*Ti{m zw8wnd4ZK62@a%j^$82E8O}P?!-eC)Cb!Uw}LN(0ymXFHbk}Zd1!R;blVv*=k66Tqw zzAv8A3oLe{amsnK^%<;tj!|Qqi*}K=?GBvSYQG+#P%11WFYv zhK7Gmwe4E!geiMrC?uhmpJDxn1gG&2wvR|lZ!wKe=-#$s-a#CYi7ytfIP^}oc$iBK zuuBZFP#lmF<6$=fAg+6@n1lFhQjrU6o|M-=2vy0+tjDE_^hw-z`9U_FHliaTZn&kp zDX=UGqvNQd2L?&Fd7g_&Z9N@wBM?OFZu1@uIK*u<9h&hB*xs_;e@wGnCaP(?7nV~J z6GQ!a%gCoHWmXQinZo9O@lo`o*v093@(RYI|LJ<)8{#+lkGDPf1a6l5&%MC1WxI~F zR~wbSDPJlnT1}Q&hMUNbXa5WastDjyyST`UvinDOxMPS>V|)JZIbw0aMGBLiolg&3 zffKNC$eCG2l7AU*#Nv2dU5|g*`h15yFv&^y1V^=4%XW=KQ%X!lo8aPP4K5w*;-q4Z z^Lvg+Ate{eET^$N)V#S6Zm(>ag5yUhJgtAuXQ zXX0~_gkSN(EB(e6Kd9}gj+Pnc#s-ID9AS)rCNP4x#9BtB`QlfJji+Xgj-l7pU2cw% zv!bc$;A&`p$aDCLZ%ry-Eh&6El(zlL6No(L=WWeDrJ1fhuSa|j$=MW*`M#ntD%+o| zcqy>8+~rHw-d(hjTx$|_c6@x8%L@Ycm1z9jh~WCyCs9r-o=B=f*lBaoW?GOs7${PS zX*}rjcG{_{wE51=`g^2UEQZ}p$$Ve$J6C~U*pVjta-96M)Fp89Fy2%&8_hXr-Y#8g zc3ErtHPrb3l7>q+oHjVcxY}Wi5Cr%dlCoCzDIoP5n8C*$^noo!9z^X`8>H&Fs1CI- ztix8_E{aa$LNqhc^0R{HGLgqlGG>`Uxw&-UoB$o5<#;1%oPEXtpe90ze1k zS1Of(r6mfpW%map{bP+bk7Hnrmi&Df1sK!KJ-=c%Z-&JlI%+4TZCS@~6e0&V| z@q(K={NYijb^H0@8LZSG=z2lFW!Dez-l*MzM_n)tcHj&9E}ms;W?eWIpko?%%6Z@W z4fhR@_^HFn)0oh`pr8BAx>B$=8O~18&w63wc9`*8+CP(sQf?~e4LqpW6LEEI)8DyG z>7VS0#5wOEf4yAA1O)=zW(LoFDf#vOTMF4505bH=Nyk7kPCT%VEh^AzAP*w7p1_F7 zgZrEWa@?Sg(cEKf561Q$?Z9ww6`={t4t@dGqBHoCdBZ zLo~p8BY3p>2(Nof+dnW2LcfJCjqt!Ovv}GbR+_J%1oFcI&J%A3nq0dkIvdE-;}Za* zgGnS)MTyJbGPe`(bZW%Mv+JYv=|S8N8eQ1gx(L=ZrZ8S~wX@~%o#Uiw*sci`86UWghN&y`>mR3=EAgU-=Lf%-vvz9do7bzn!eL4lhBYyabUoI7M5c1@{YAm z4XZgf@59Xnt)}#0e2(ca=JFNTKzC`Er)xC}S~t!LM+&>+erhE0{@YD{P?4&d3V*y2 zRbnF!zC_hvDkg(hH5^rd4O#|a!jSS6%Q1!OES9%ramY(xoTTjkBzi!!u>Fsd5A}oS zaEXK&AO-gYu`WmRQQM$#>YXGL6`hay-}G-!OiB#6q!kq@Zf}}zXmgd%_7HP9&xJPJ ze*CiaQviJef}r(m>@&MmTu(at)0D+;4Up6h)J4E4tDJVe;a0yF65Cq1J-NDk#G+?I zV}=5ZTuc3fb|<%E5imDD%e3u##D;6=Vqc~|*>8reN))ZA=p&(0?uNI_1?@LwZ~-WW zIi7PQ0`mcIIM6X|^m^>O`Gw-l_9#;-j?f_K;_L?SbJ&<-pkC0a)YcCr zBb(%Z16N~o9RW7l&kWG(sS9G8J*nZZc6%}B%z--IpPB8 z+A*FE<(my+4Ht1l(J6wib|FIyGm7?ICPK4nt~Z3`ru556d$3adA)k9M1MK8|q9ina z!cYz;)ud7wtd47ipFcfKZUe8mf7R&1_i1x;4D~#zKR>ddv?VBVQgrrI(AjW5l!&;& z!+>||Yvu6B*n~=DQ?#NT&+lP$RgEGj?SP3I9y5YpFb41M1{CsOc_U_e=xI0Tqz2%y zyo%EJXaxQ*LKYbP53WgI23L>T9X**t&C$i?K*3RvW{&?FJm)m8599`=4CgmHB5}{I zH(c$7ubDFcTrDKd@5OupCUY&zG5`_Y*}wewH=@x;(^N=l;kzt8Q@IC`{Z>#@U**B? zu^cUFE^4H?3&}jTpeYCV5dVf6;L$LJarb<(qY@S9;@le5SbDtPy?yYmy4uM?(X^7* z_~-qul!@TfjlmuC-9JG(7oS{A6pU}Z78Y-w{`;|f!~0f?6z%o>Oc{b)MvkDq_uei$ zGHT^tZ-_G+^gN%H)xXvhX5%pgogf*!Da_w@FhNyWq9HaY$(^vt7Pft*k-gKk}tK@cd*X@lqv9V=0#P&3iQ&l0?KhPM? z$F4Suy~a?9We>Jl|K7A39n2Rza{YNfy%HlIKPvpS3rSfn#!Ku9M3@=sDlC|!iY1`F zeslm?2aYr1X`c~^&kzp_UtNm4rdErF9)`ma%H;vQj z;ecPJdo8myLZPTqgsTv!dr&Svl#EBTP8vI)eiD2jo=t^NBpj69Zfe(h=Y4=Z)|3Nm zm%#bfzkNL~`vv-)ng^^K#LN`VpB2dD2oEncL8oe5|4T|U;|n=W*D92n2)``?xRw3>0idQIA#?3$EBGM_7F@5kE z{-=pn4MP%i37P0Ffc8|0IxAx1USR4W>i`?!YZWw(eVa~M_gu6t(i#2$rvu|39ARhf z*AwIL_55Ht1TzrEjj#Afd4AwO^~PSl`OxL+n}?1%(}a(Ws^q1{eE0xMoxw0Mwf+}7 zl#?d~Is)D_w3-*g0=bZ(x6}Dxk@Y{~(%WUjCEts+e^la^S^tRs^@{&1t+ii_KON0x z6}4?)fMHmJ8K-pm{fbdIf1pKV&kVrxJ{3d;h5_H+;+IrF=m*|$9wuH@{46IarGC1V z%z`$<5!ms`_~kdW;C4~@c`q1xrrd8?p-ZJI2o4cY4On$TR}4qv!v)kZ2RvQ>y4H|b z29jYCGVm==4zG&eS~~X7lhkq5Dw3S6gL1=(n1_O&+Pa|5>IW)PtZJK(EIjQq;oG100j(V1b|KyJiL;Bq+(>wzm@}^li<&EFvVLyzMmXIk zUS1mxyk5I}vI%9_s@`9f?)B_VwAeB+uRNP_Cp5cr>1%Y;PmK2eqUjftWaas)i8UGU z1MdO;M_@tlCZ4O$fZ{U+V}Kg_?d2wJ)(Kd2;yo`P*AbP-%_2be;NX6e2``S z?18mQBpJvyBd`ASh__f_tque?7Vvhh^wX2d%ZK04Tdqv{b*uYbnFe%jyQHI{iE@za zFP=e3CB_j{1qE1 zugB*dy?W7F5};qVYz4Y4t{imdr#6!s_X+~0E1y%8&XD`wTRh%E9I4gE_vA0vhv#y7 zIxzEtg=2LZF~LLz(G#u(2MnVkO%AUWbT8d@(Bd-n{H}XaJp^eS*+%B|yy!tcBJQl$ z|8LT5cx{nZT)4;0=5KWzaOeb-9n8^HjX5~c<2eIFkD#ux;Xv+?IdUT$N1T{u9uh%i z52vY+ra7Bz1b&G^hGIHFF*8{18y`1I7$WYjfcIg3$-KD|7??4wI=4K;Gv?_wM2`G6 zh@uNy%E_lO?)Lyh{*;YXV;vFrk7Df=+^82sCw^So0wgr@tC-c3r~Cd^a0?Sv!Kfrw zl#xsa@OE+Xypwu#Ka`0$*ou9{azr5W4xlpb$_ah5`k;ec9 zx+PtKh2pIl9+>$7YrZ9Sn?;G4nn0mZ_XedvXR_uyXHeX$BCC~8lKHevvs1-;KaVL{nS7e`%y8p@wY9Tp#wEBlaDx|5eEnV6ogCMs;^#D-%Y~=?r@)*Q)vxP2crWCK ztu#f@qGea4G(}vp-8++s%e`_?N#YvX2^{8vuXb$jhx!`NVW;QP9sYGsuapu|09_BM zQP_N{-hkuh{i@U@e{ZY}B{*V!%FvbJyd=5!p`hO7@h_t-zAs9~9UP$51%d{;UQ|!) zw_SED<$y=Ez>Zn`g7nT3Qb}UGb?R@kg9Hr(ZdNSI7nMvrT#;UqIK*mCz8~gejW0fxAboXEO~oJnJY^H`sw!wM!9tje(LHf(T)23LN~Vd2y?%9YS5<# zoWgHR2UT^l-K4q@0x6?XI=oh1n7!S0Fz(c`2*S0;f0!Omi6v2J>{sQ`8vkJ@nL;3* zBF?FIJ{E^;&fKPPI)kam{<*J^LoT$53Z(LK%)w3@@d9IT)$(~uL~IM2vcKD@I|w@d z1$P4bCs5{BcQHm7&^dr!JQ+uN8%`|S$j>x+6t&)b_U{<NQSBsiF@&ArcPmz*SM zh%$3?*>=2G0kL3yNSThpXKyHkp0aFh8JCQd>(ndsJMx8ANr6j|J?E7T7L*%qv(YMj z(0%vfkKO3Ge0S-ZzsA~hU*CP-VuS-o5y}rrFgf8EHYw3^Y|vXIY4ZpF*>;qnPcAwN zD7}i|(|a=b;3y{Ud-zcXB+Qv;&AC~=>E7eXk7&=LRK6!4A2|C~&Y(H7BJw<4uH3>b z1Yd!A5L1+~ny$`O{E5JK^cTRp4G0Fw#$%4CxSa17zK3M?S8O5*bSnph<4eTxtg-=M zk_eblQ>#@P@TQXb@G1+lFcx8v(eS<)Yk+;q+iX+|BlT(u zUkM(-SU}seI*?tTwnyiHBP-?O1<5ZRNtJy^YI`rr%D#y=MH6S?0Sk{l=IQ{>@CdW{ zfR;d2q<68H;>HfvG~?0FSR^~5b%W67akFx=!1M1%6B>no=se_+{e}<|GgJrMEQ9=u zvWsAu2`#{|q*sPPI%T95wraGfy=DyeU4>R04O&F_14F^sp`B|piiHEKNVed3>TGWh z(nqegR?@9<1`Q}%N+tVL`cgb zZc{;Q zuOq!CJFzks3DcBMCQv1{J_L)I_VjiVH82(_a<}Qv+I*2*; z=7TS?&DPg5aq)-Ww+fdQB`?^ly#i$12Y`%=G|n*J%~Kr<;$mcs){*0Cm(r8shX9(x z>t}5)6o>-&qi>eXEQM_Jcb7l7>Dc9RD<&?N*-oP!!*PCrMI^fL#m8a#SFs!{L5 z1t3bN7Q4>-!sh_$IeD(-7RsMQAcgijh^Vz*#FO00A~AuQk#YEJod&98gc)^>>@+#5 zc?xkg095*5g*ziL0S#;ZF}$aLJ!G4{0>JtFS6236o&u6^cpmN*%^An52KCd?rsw+7 zNukP6*ijyd^^>U4vwY_xE;RoD!)>5k;wIUzi>|2UFQE8=pec^ukc>=5#4<2O_x|h+ zASN4lLuxXV*_?8KfPi@|DKMUG1l9D#w@t0r<2o9KGucl2{K`SBmB6a7x{|I+ZjENg z0pqKBF6;HvKs!padHyX(6_mp-XIdK~RGJs45R+M1jzgea+kh&B7XX-g^z(s;%;av; zMezU_${;BbB*G#COD6K7!v!+O)g45VGH5{n{Qc`4AsZ=@uKgbjr5FS~6JtCm#sd7`10cqL%=l36h z)YN(EFumYBwkuMv_io5#yR^xsJTK&1I(#_mzPv`aZ70>boq^c5g})^jOF)$YKEg2j zP!&cXv;1s;W83S_pfK^g?fbY&p^_w5{k|}x-fz*nr0dOJc*v=lMpYO_ENujV)Wr!_ z(Lk8Lmcn!0?>!s^cIh$i@0EC@Em=NUU_as1&Bz!M*5X*lhfoF>p020(oM&Dq#SUEU z#$Mc$DGd$V@jTVx>6p)X4X0x|Y>>jrdo)>NBfK3ij#k7vSNjUPGvfIUv7F)BU=|^B z4UK}g)<{69USL-O(15vc$2Zi)y0`8Vd|G8`Gu>va{tl_e)B!`4&t$egR2=HDX5jA0Mu z3^u0}DtU+rtUa2MTcy}yA!RKM4J%0mLsE11Lf|Q$aG@8X6z2?PB|d?}0#fbMguOD6 zts2s%;!gFwvIC5k0(%_5&xu~j3-r_R4<-tnb?-&s`)F+D1gn_|32*_3j1=X=vmn={ z4wa__OzXO5JUQO8lUoed3@rJM1@E?!Rs%C#a1PifZrl!tVdxfSJ+G|<LvN5cNAUgHcdNsQ1rVv*|^K_fJ6q6e}6xd zx75@pFo@!3WMpdvi>rDn`#vD2G2C9&-TL7&)gQwbL$MRM_ar=5#_?(>a8uz}XPemrj=MB8@7(Ps78b9#%IG z(N5R~8JFr@)jc)1p>GvX(ua<&XA7njx!g5Byb#WPShBa3y45tOpA>jQ zgZZ{uasN^_BAhmDnAF5K%~RkVNR2^K(|9sxfZEwbz$Yyi-FQtO@Ua$A%ez;pJ>5VK zYIkwmsvwB8tt>>3_DkuJKoW8xZBjhq=|Frbbdj%9JO)C~oyV7wR#Bi8GItlxdp<`m zg_?$jP|)F02emCb!1h`6LrEcJ6&2PKWfByfLEj%%$EV6SvvvX&QBU+WlMmN`?}c>^ zPhq`39IzUR3?&sWK3&&125~$sUQ+5RV&OQXrt!kttM+K3l$d;pbmw{ zDXh-@{<>M^{fiouu!ftN#%z~;KsY$4pyIxUlqgy#hB`Y005d6AM-j_u$%UY5QHJj> z-UZ%1C=WBIZ|%hdV4dL>t{cpt=<%FLeq0iL;HEwNa5+>S5?~C($da-uv*)&?Vm6c9 z)6t}nzjh09IjX#_-nR&S_ELtc%9BOy2Ft4`Mu1Gu{`P7rV*{(2^a=>4RTnT19J~*s z7LfOeLYMOjGSAf>h+G~X)Q5*{j*ne%k&3#x<Mu>rn%HLT#96GTLNBN*110QpL?K51g9RfuEWQ#{K88-YbO6#y|ESUp zCOLf=v{S+^hcrOoUcM&{Okj*o(+B`-@#-o7?T`Xm5It&XT_*s?3nV@n`f%_^D=icS zo>0bk)pT@os|)?iX3}khP-_AIIkW{i&}4fZKQ{9A0zUcz2V2AanWr2I<`PopOHuX2_f9YVd z3S0GpRrm5$68E>mRoBpoo*rHcy;GL})h6n3A-!tNwUNcIf5##Uh25 z(vIgL&j_oP{!L!FeS1ZDh6eD3@paer!ym$Op7Xp*OB^2@=6+^PY8l?JdD4JDJguzf z5}b1mwJ%XI9s+d;R2HF5d7$1l7DdI(S|I;T(B>e@-c}{CJ4oPO_D49`?0;IR$r^lw z@2TCNdU#Ay9bnCodwX*eb6^g(zwN(%Ubq@~Kniw&mgj!*%*w9nEz9E|dB>66qTkoBk-PmCe6kIowt1BP{7|LwB$160w^(QsUu1h!* zGP&y^p<{h=IveepPoCedygRUgmu$c7MffMl7brj)L!X;`ww0m(a^ zlqhs8SI3>@(b;PI3Y+O~U*-I^VRlK(uWNHD#$3NOR}`hKf0LSY-(OLbmX@&|?Y72Q z$ToHIa_?`#z3_{|SqI2hb*FwVhfsPwShAd|CG*)hA>q|fV8x|%P)AFP&H}R>%vl!W zTYW}7(Q8VmsKi~G@(Vmn34Hmy^a&=S=^;L0S}H&8$6@*)Kw}B0(@lD+h?vNT`78f_ zLqfo=*=#~DCQ5r=mLAk+h6~==x@;2x*dPFr4;&hz#A1Es~jG_u;Mm{Lq*M|4`H z`}TpRS-mHjPBIP6VqeJAoQj17_h>PS6f*m;w%tth@};hh-^d^CEx06!=obI1&h8Vv zSs?-`EW5aH9g6;Mrwm{Q+NZ{fjTFu`NBy5=-#|B=IlU&nm+f!rZ{ASw%x_z|f0%Oa zA!$lV!2PF$7rJHA>eJP<=?XyW400z|4S*%08_^4!T>pDmP7;OeERZ-;+8NL|Y>$e>i2eQ0b8Ntra6q9UU3*(&@Vx%4nubnLk=t0Z17 zWB`1sbSVhs6zyqhea28|SjZ8aKlGU3RWk+E(dPlT3HL5ei|$?Ec1H&DnFlX6O&KzvHcw% z4E2q5`?}IQOyPUy@kbA@LTTVYk8_w3=}(eZaNXga9%}}JeQ@?Z;+hH5TFUy@n zXu0>~qDI77{L%#;2-<7*69&PIv{yXXZ9nmbb8^^fz1u78NSRs*0D6%L&$8IG z*7n#i535Pe3DFy__b$Y{Ri}Tr+p+q{HShBhJ11eAcYAe;)-;9|$Ay#I*fknI#Oi=o zDHrVs4GyET>BX8D*LX_`6*{@i&F_rY?wi062q?aM1ku(JrX2lc_eN}Hrjo54f}OC% z@xa-v>YfUm#n$7Z?wuko_{CBEJ&ce_k5Rp%iqY8YmC?sdc6V&ZnRph(A$+#9F;}ly zWO1?H6zaB9I(PQdvVHoOx2ZO!B33S^l23ed5jPX#9~`<+`udAe-|sd*-#DkzT}30v zns2+D+ZqFqk`4h35Z!-e#~mD;*^fI0PT2PQ$I^|#kAHSzjB9Kq0gE)?NQ8SPt<}El z8M3V8!}3ZWou)tL^R|o%w?@GSFAQ zG;tTXyCqAuIr5Hn`GfbInj9w`5{Z|3?CCL=!gc$Cf-nsQJr}sNKJ2S=yOctv)<6_n zUU5-d*B$%*G5gr%UFyR}1|Hg?61LM&M45fwKzn<}m$#luDx2NIV+v=bFQ-5(WZ>q| zIQA5kdMHwflDO|9!Sd}h8n`bIK9Ou1wbB(i+nIVQ#Txn6)O+gMGNScq`{Xs(!a=%N zA$C;*u=fXt#aQA}N66^kcIc#^fHYF>IWX-KRmpYqkNviUI`@H>-__y3rW~@aR;w%a z5n z%c()jziraD&-UXnWAtW0yoBv7Dj}wF@yfHJo1U{%{@XCftgDhzY$3b{5-=tFZUJqv zzUsDLdG*VhSI1`xl(M8SE&9EvmT4B1uTc+F4pm#hLg)yy*VElDUx^xau+34!YG{k+^|MmUdKmcVi`+J8uYL3R~OyCi}s1@7Vne1}ZQbgKW!S8u%X%2Uv!0 z`|f<#Bx@{oSUF4iinJ+S*aI)iJ#!XcZ>Uv3<$BbG@UA<(nqCv^e?UMzL%ZTBZiOA~ zmIz5nF(r#xkVA;o(3O>EG()7ya>00MvS5VI6I;$2l~DLnXDV!MxeslCk0xmf+#JED zsmqp!kw6H)VhybZey0b{?2O;TyUu)T>hs^aoT}hU(v!tqh!oBt7F8343gpO+eHq_Q+MRRCP}gBb9gZ9%U9VEBGl3esds@K!J~N z5INYW4k4S|B}@r_SwD#Q)kJA}P)^-PxZ&%dF)-1KzG`H2MxS&z(xY~&CgSIyn@c5! zpx|PBszd@E7|fEjYIk$-*{$|fjQQPE%`yMlcpP6k`t+Iq^a{vjrZ~8xIe|zU>JgD}5vd)Y(mS+hQ zA5*>Ys_)mQ87^UN?`Ye}dBF*Qa}!(Q2_U0*hA96|)K#>HB!!1t8}_RXe+<=07jkOe z6t+8cLf5~+Z@LLDTM3=(`nro>vVhSFUOd`%NSV3Gac!#3?GmaC^=?M;*s#aOHD-Lw zzyfg@E?cnRu8Mde7UoF_S@W_;e!X3SV7;8W^@x97`N-idNY?&sdfgJt8sq_Ak{;l{ z>=fs8%#G4zmv;nKHi!*D+p*V8h_3_*;J>l4Hl}PABON(MIPve-xu4NLv3U^8dVuz? zB1I=o@Nib&#q8{J7YVno9(6sbsy}@*(ivZg7P;zTS1@zs&x;fPEYW&zTB##bp*;xY z*6T@C9s*IY^F3*DMY=O$Fo!J#rM4F_F10j-?H~Hi_uiylt>3fInd5fNjOMzTOPB7o zZY6w#T!MoeoQjQRiO1y7)L=Ck@gY;50| z^p7sIo*CmHZaM43pU)H4b-u;a{)b&Xfi_`cV{`{xS3Hu#4x$4O{|AWRIMJfFwLaGe zLO3LLrk2CdiCxUB?kUl~cb0Lw{q~4H*lEnw`bUJmT!v0}stHV@-RVkA#kMEX4=ZPm zTBq=SvZ>a*1Y6$*03NU=ls-JM75Ak6{U$)r1k%ZFnv#npGK$qpueW%ZfT~!aa z?_}x#_WIMIxu`VZVFB zVC*LR;7>H)@76#K^oYk#4|<{?_&qJMhK7NFD+#4;8!k*JJTU%hY%&>STUk=wSH8~t z83DhL^iomqrb1#I*0Xj7aZ@k5hR@HlcJY=Pf!71Kr;YtuPvhu#^aJWGy5;ku&5x|` z4pHbD>`suT7YS3!swP(DcP0Jy$^uzb^^f&T56K~l_4PkXCwHfZx^^~AYn<2EgI4RS zKJECp$ytQ_v8NiaZo|4Cl(W|;`=5?SDDsf&HG=*UcDa(S?6s9YIT#|K7`>L|4|;v< zzOKQ)d`;k+RX*aqLYi+_=rK!n22$XJh>wq-^NKq8W5U@R$B&Nj_VX6)c;Qq^>nutV z0C;VZUcK+FY&>hZw6%L)!Y@6VHPipA6g!h4AFTXj>qp>n`_A+@(zNyAgDa?x-RVer zWEMr(Cfwnl?#^?}U_Q+uS=j&S?zoLyxV&@#HH1RUU_W~QAOW`-7cT?jQb-}qR|3Xs zO?h;t?tF<^W8|dn z)jKdrCgmBK+rDS7eN-t}4YqRNu{{3b1}bw?vxyxZ1;0J%#-I)9Z^K;)mL_$=uicph|Tw zY(Nw)-)lr1k+jwtk7Zk%w1)Z8%T7nPd>xw5M@Z2V5T4eil1O^fb#UBU-e5G=OOIR? zhAupN4r^Q`3up^ra$j;$gi3p4uN|NPXSL>7(M2W7GZuD?oGcyI;0MNw0W69(j=3V+R&Uu0Ugd=gxbEU zI|<|_HO<->+ND+5*YhGZ&B-*iLyY*!d}XdsD6->~@{BwM`T^&um!v9Pt(@Ri61g?s znXKFY9A3=ALDV@q4t2DUs(X_WljTL7UDsN&T+D;Uyu`r0IARBTlN1xKpW4ET$k5s} zd{%uCzsPL*iUmI5CPo8i_K%SNbSTG|(Ex~#|4c7A%{63%pbrztUAgqpN!*E^qBS)S z0N}z=!`L~Ze-+#Gz8*Rb5z}sh79xY0UevnYsVKY5K(@1mzZ0~EU`1vW`y?YmE zO_a5O4q4fh80+fl1~?AA%PuT5Re%Zg8iAn^Oh<=@3qXOsy`~zN*Z+35S#Yjmlj*N} z!x+L83`X*AdwnfM=0PeDocyP4-Td|QXFv49zeYg`*4G228$e(G`~KmS5dE5(nz+n& z#Ml4ZAaR4>7CoobVd%e8?7Riv(inTO;iamlDNo_)?wrHsp zBM7245o#nMA$gWuI{761iYKJj?x9S- z)RKiBAh6oqlm-WB)PdG4JP9_}Q1IH@MAT^PFx28y#S6Wk&PG?K_@>mS&f6Hh1tu)5 z$NM?U`Z=FCarONGc5CBzy>iopXe;;mq`s@E=_mZLBDzdUxmW)+#@Urg<$bUHedg1Vr_VD3D=zj5A>gpiY z>|=O(eh9c0c}vG_SV;TKV(}6zqSvA$V+)P=gWjR$Ej{mup^Daiu7V4fE?_hlvqMwq zJ5i^}=6pJ>99w$(PrnT(7Ltj$mA-iWD&&7OTA5CprXxGfrtx{)9T(L>QC-rOU;c`# zt1YYyJe$Nf-@`>J+~qUg3!;1ou+>} zY}rsA=+x?}g9(gc1+BLn9sf1ptb?gNN=23bho>8MX_{_sX)mNR-t3fCS{7e6oz%yt zQ)$`WP>x;P%HN#l~Qv7~qcG70VbiWaT(;ZsNGe_#(9SekBeoS86Wsn2X!qdA&p?KA>)wjyvI6+57fNO?UVTWBGuCsac7Ivz>+| zzf5`idHjHd&kTBTDDX$V<)s0hgJt=sw5xOS zZk?6&KjA(`Z7eMf_qAsK1H)A)UjxIKZlPmBBYO;^ zZCwE2jrV;SU*e@3%j+w@Egf^klFyP>f-4aX6Q|a^#@!!vo%!?M2`Slzi2Z)hb~yQh zh+gQB931h*(jlTCtBI_eNN-tUTX8Wc&!Eu-L#yFI#85v3AcUTRihpxAXr#U4i(@ow;bkf_9Ed%Q=A~np#7y_9O z{Vg=qs#VZFxkbQ#JBo)`_EXXf<^Jxeqa(mx!jA}Q17tgokBbEOqjZU7yBdDsmXC$| z(#!7qICGxUHtDLmWj=2C89x8GZXqs40vUT)H{>2brV27d={JqrL0w^>A+wPxR}RO)XmC#_uc%??E|zrb zR&OvKE3`IBR&`Befm_|{yC6>2A#637b!4%p3hesBHv)R`fW-Zhr`Aq3n>e|I zuuZ2qqZj#qu{}r>I%5Lz{VDTw$h`swg6nf0Y)OIBcogRX$oBsr?(2n7Gg#7**{}}8ga932L+mFS}$SC(EsZ1)+jz{iu zi-6%INt5?gr?8WCoX?LlBOp?e^qVVyOmu5;y zUta&7@0Zw1n*Q9#tWs$xl~3^EKHs9){0JU{ceBi00mdkhm-k9hTpB-10m!hbjg^R zg@xP9*SF(!9vK)6W~kVZhb9zmQo22$qg(s#M?AF3u60~eQ^N0V83|f6EEby*<=%0d zLezc>b+~7Hv`pB%X3s~dW%X!Em@U7cdfp^**kh0fQ&{$;oX6X=5t2f$eU>6#pIoy2 zSpV34Nk!EJ^u?60#XIL>PH#bd^ZduI@{4$%YvJz|?q7YFXok2PDB!xL+5P$N4V#vb zL`4C#H@@?4g)|Up?#_Hq^zhJG0;=U6uG8KQ082b4T;u=69om;C<>TM zup><9>?zI=q|xRpElHrO{`h5c%Tm{QhZJaPaem}NNLnZ8smP*stWb&iIrI(Inas*OU@>-8uc5N>l)asuxjZ&SznO#!9Io-1 z@3>4Q?6r4I&Fui*8f`zXgi1pSshP8z=i?XaTt;Y_QDh%E3s&y9+9>=zL$G&dU<&6T zX~!XT<>;cGJxNn1jN>{$CMJv!JFSB? z(qD76aor_~l?&q)L9_M1)#;}J0@CyI6{Dd0pGEi$U!CaCgfpJgM^u3{4ksDDr~vR= zOhiWR#+j;~=lOi*a9_(h%xwYJA5|A{Cm#*ZckQXYoY7Q4)MVaD`_#f`&VAS1*Z|7S zla|{SC}v_DsmU4z)XuLMEUZbrq&~mfRb6ZTG&3i@hrLXa^yVNsfn%aaP3M)0Cn( z4rJps=f@6-r9$Q?K~ul=IMdHLTXsiY=)x=7lDc zD{4AB?mYSBp$k|C9?gE$z({o$F&Q|Iy`>}6OZ!9&>BUV0-607qBTcDa*=itKe()cI zvof~@=fM&m#kQcMHLCIPD08OW51r#_@9P;ZjU=1kQ_3?-Up3$)!4Hd~GhaZiVGV!@ zRSwz%9zLg`>E=0S87igE=FKKUD0WWJ7ZADdqv8v0VX7~=z4FPfEc{}zmY*l#T>-nX zca6zsE~Xd}yq>`}7blC_PCIVz=+zNEqD!hKP>`oSYVd@%lzQ>8CwmTZgM5_ z!o@37X^K3;t-H7MDN{|Ki{&keHhIam>tH*IvN_U*H8pe2z_@k`rW&stv(ebE0_GJ_+^kc6BVgPz1G_Pr97v4p_$1cG%iO0Q^ac$I9H43{+At$XY}JaP+n{r=Hmd}ry-a2gT_)a24a$lD5strjBmzdhf8 zUIGdK2-buX2*H|b73jeqNi(k{AS~a~oGXmO0j<`JJgVnC+8{eI$+8*HWm_Y6Arh76B=1lZl0ii5J>CJB6jCM`l4kKHMkA zV{>eExq;KeaXzc(SJpWafDhcVbK|JEO60=hn``S8Vv%iWQn`tw`dM>Lh+8inPHsJAK^?vh*k;-3lQ`0uPi6J4eq zH76BYd>4cV)P4!NbpfOrF=sHfL);TAhC{rmb=@JJ%^vxV@E{$O?ejco4TklsautUE zag)6qve(?#%4za&XHmJI6HN8QqrsW85u71=0i`}4aGf~bE8obNRNPg6Jbt!V z1zwqYef4JAS3*K(N7DiuzD09vB&e#jZt(HBW>&+eXyN$G;yF1aaQ{SgOOld*0a3^p zPMQGZz#6%a^tLmx?Awj>LY;I{qq^?-S$9w_ms|Fh#ow8#gKwtjk%}CX$+=?-B(%+- zdsgI%lzv$jwpDET?PtkfYr7#H6}&Lx7ZUguuP)MpIK_CvI8fBW0x_E{=fLZs5)2c@ z%I2t3Yx+~^we{c0f{iJULu3@sEN|IqgfnM_X zMp3)hRGqla4FqlZR)lQ64gn(;!~)rDqWBf_vA#bb-+C}J3LaP+aAm3}CCtBENLWqM zJ9<8|J6J!SufZYSCS!T^eeBix3HS9~Uj>dYi`eUD*!+ql`8)RoT+oeTV)~{Lw9^i| zFE#-jgM;9!`O1N~@vnOfj^a&o8yh6+?_ltk3bJjV7Y`Z8cPLhCnA`8Z!r6^rh=XU*(XXjm-8CNE_cde|I$# z?Vt$#5cfp)!PBLMh05Tv^XFxLh6n9|L$59=dU%4J}&%30$#^!A10B>1XR&Rbf6&vB)!vB~vzkh8b z-qTS?9Y*tNCB+Uheatcp_FK=|sU#!aGPA>))% zH&K49Zg6vi8IycGVIL(>IR)VkGl)McB@lj&)p#hYT5?K6n>(-4(7Jc>{k`_SovayR z8CU60d}=)mWcghxnXB}xTp!TH{e#?q``@Zf0-&`VV%yNI`Jq&S(icp>v1FIRYBndw zJwC#ej+!p|5azko_QV{WMM6OR+-aVYWOf%}Wh<80YjCYpC*b}#P#jTYk}}TbzZ4w( z@dx?Z1#Z|*=%O15MG7gie*sa;Df7r1`yA#`eGIm>bvFhrQC_!|;*R z(?(W87e;(cOHBwbNZwp;SR+jOCP7|FV;sPy=g}kK(^Jc=2O6#C8_1i7lmvyAA^=Ug ziMM!pQ?jx=t~D-DZCb(__h(6S*@;?}OtrelYnJH0``}znJc|6Gsw3kRn6*AlLy+nl znl6*KqA~<|Rmr!Uc!vS|y7B(yH>q5{`HG&J=B@io*-5yfAHcTCRN=_Q+my=Hw9rqw zlTog@w)ad~V&2xqnp6xG+d`CX!Kh(r6(zJ-js*u3o`=}#^{|U+YpXBqc#;DH~EqU_m*KLhy z$}5gMI<1flJ%RQ*5pres(aS3RYLAtQ;;-!?5?a9-6@?qO#Nhk7WrMvQo&}W!Pn9NQ z)GkUXxS)MgVRz(;7M9Nls-7nw1wauSM`x1XOWF%YgxkTp#Zv%A`MR&Ilr7mRu_*ic zVGZh#_42KGv_thB|4)*WQY8y$UzL(zOmVaJt|;XmmUY<~{N$B9ZrzwzaA|VVVnOB! zM=%ooMMub{p=M1Wk8e5VDK_{Dp9cMDZWJx-17gWVxMI&t13d9et#8ABAPP9D?-}Us zLCcSe&3d**;Sm%niZdvDN+EbZo_IhZ@b9pYDs!q|Yiu&IekTdTw7l_#@&T(QFuk-= z+j>tSLBWB8AcRXfY_tU$5v2Wn&}FF>kwiXDu?)+s&AkZ^p2&K__IHq}IbvowCfU~| z@xsh1{B2Q})WpOv-X=!i7x*Ekk9!PWJdiY_#-86NXN~92$u4 zEpHMh5!}plvCrCJD`w=|T%$cEY$*t7djy*Z&6q*N41SOs6C(9@nwF9GrTY1EtxNUW zoso+pY}+qjfv(fTM~5Sx!)#&lhQVu^?HqrX4~7wv38QaAzR@%68dYN; z*U+tte)Y77@1JoKeY8=l*x-TiRQ9sM7iJB2b?h+OmFCKrM2E@Sl3$2^oz3(`gxqRG@zysu-XryOcs|?4MYh?Danc^1DVI_ zRa8w!#w|(%-Ll`n?`jWwL|&9!sFZZ_5;AX8%6OO4SiDeK$a?310mJEu#VGaVR;~iG z(8m#(!A`h8UpLnnhfkmeh=v7N8;PizM&sTc<5>1K-ecyK4>^XpA&&eL%5 z#Y>I&r;-~RN0~HuKK|DPrMkLTUN3?{!-JP$X>MB!J>#h1EBv04Y=XfndExM2j_DZ5 z$xttp?jFAQCm@aPDd@C$CRSQV3sj%$WSX zxAtIIMni0W?2CE}_v<^p+fS8b#+3$MQ`Q5TzSHp|_{E3+n_PV2UG z-6+$imYqG|sqE|X6JAH+4lyt=z2SN!HMP?xn= zR^a273xE+|`JHn*-se6fdw#Qomzvdlx5+rg-w}F-Xes11QUQfN(M$MCLI;VySC{%J zyfB{_{F%1{)EL!xMX`muTHj(>U%k+l6>M%P<^2~#Vt8y#8`f=iDzR{ce5A=2LR1@J z@lth+fa&NYu$+!F8=61gt8;~4r!9{qW~pGO$+RYDULGwysfO@NVmfaJRLR!3Qx%22 zYhUfBlv$8%FBVIBU$8e9-SCxIN-nLr(HE+lHEl$?CSzHKcR-dqZAk@o1o7jO%jX)D zqkr8S^8y-V+d|W9+U?3f6#`nzw>^aWEWdwE7z1WYj0m&v2|l8vNKqIrP&2L*oHKu zzN*PaeA!V!27P1{Xwb!L9&A1fgvfJKTjA!OU8V_?b^(WVy5~j46)&MGec!vw z+Syl>WX*N3?Z6-9m%GI3Cm)sxr_@Uurmz@W{dmxhm)D8k`*~hz^BL3v*@NLdYG;W> z^bs1gMo9yo2<7WrDXofl@oVJ-?LB2nTZKbgDfN7P;F(dviPAVh14BR?MAAp|>ApM$ zr7ko+(q;!uTinmK10eR!dCC|dgkpyokC)C45<;KBD9r)|M2iE zY4{6zi%*4eltSpwtI`h6Xcj#%K+l~uyj60UX6E8`WMgft9R%2H`>W>87Wy`ape0<5 zEcYs~VjyEgN0IFcXgE`2?cr3%ij?|W(SpTI^O{Z5G`o)Tr?X8rlkYZJ&N|f8f~@iv zTB(1Zp#JsZ+J}3XD`FMljd=h`D|7s98MJUiom^5{a|50{HC{NE8x9qPwCY6=s zu4Ex%ps7ewAqReAr?9cq@w)5aNTVcxjV=n{l)l>3l|9`Lss+ zvh?mjko>7t-VH}&3K)uCO%b+}zc_Ppyh+kPEYGKPP%=#?njmiUR%UFCfn#5u!i$(p ziR!uRCg`S=>kohByddebiH4>V2Dxlu-P}V%PN{x~D)ln@J)3y;QS3X*2->cV7D znO)w{;${ymX_qr#aki_vVx;ia8^N(UhZJi;4#uLnP{3ZCPHE{~b2>o+Z2LJ#A5F7| zVKF!VaW7v_hu_#pP81L|#@5xRSd%#N)kFOw9E+wDT!pt{wuZ*&0A}P}Z7?Gj_Kv%6 zsXU>;{o%wrcyugRfxli??%$$oKZ}1XcLX@wXwB&{{SPy@_Lq`+556>#9l&iJsTN1`VAsh4)r_j%yu4DIoYJ>nPF}@b`>Pno-EG#W^oip-@+-kk>z&qi z<7f1m?V#lSw$YL24Y=g0D)lE%JVA}}CUCXX`uS^uy56g4#n@pV-K$c@@nY35B#Fei3dJ97o}thzMcy4)F)x?gWst3RbBQlG*2wh z*R*|!)xZ5nCeDwi9k4f}zDPfNRoNI=7yRH>$0Tfr9QTRt+G7U_z;gevq&hk#UvjYz zt*qp@r>K!$H#PD1;r&T@2w_Iv7~yfjn5MmV;P!Ad2Q-ptpvUgKIhW)Jck8A7_N2dt z>_J?CpOg2ZfZPfh=w+1!(faQ}-PmH_N{Q^^I%qXlmYE}Et`2^b!;eH;Ve7sqXU%{82n&bg0h0QX2BIHoWHk(C52zo4u55;@tp>c zdFY3)+}o*zx11J2cD|qoKKpyTWaAZ&nkoXe{`%`{EnzmhDqq?-(j6XL-dr@d5p^mS z;tr(_h2NOCgzPc)fgFLS;bBo1R@|jD-l6otF6~~Q4x9%Kzl7_eHa4KoWB;|3V3vVTg4M~_J;jiK-8WKQ zj1hZ|B2ZNJ-rT*Lk=&TopCnnUC%jTLdx^VYbAFeJ<%?)VXXt`qiJ{flvy3~~E(l-v z2P)gIO)m6_(g>vhY(e*|WcixylkO3xa&8Xu_%}=97S+XM_$+0r2xSHGjV3&bWKX#F zXdm4x?8p*_srkOa-O?IA6Sf2JUGk#=zyWTrCgpVsL~GKdE8ZU-lwQji?c zGyLr;KGMM8q$O}6DOmHtkAL?Z9bT~x@gj?W~<9d5mz-2S_0 zEax#WAdh3G6Dt+rUl{cX-owvZyFXt4go45CktANihm#$b zF2EBEoq@Yq#^4;z^Nv!K?>6HSvX8zSDlamvB|XUn8`2s$;8gC6B{y+rms?ZF$fn+K z_R<#-K>(oX385cp1m3XRBJ>W*xpR2-(80#_gNrvBcV?%QJ8!gBGT2-aFVz#VpJVE&P ze$+cYnowW+a+hfYeC4{lQME&ae*DCEv2(`Fyyyl+V?nXGI|R0)Xq85v?^Ysxx~5&f zl%+1-k9sAvxl#$GQEfw>y~i4&N~rhE(#|d+@8B&0jnh*^I?o=aj~n=%8498CG54N( zyG?ra;$6e+cr-E{8x@X43Ej<)pIJ8;n89L&K~q+wjgb>=2^_SisaFhj%yPWFgry>< zjm~A#p(9QrSl$AfFvf;`3zc3sEmaXArLj*OKFeylcXq2fu(QnY6#*?imwAfxmJ_LE z-!(vGza8v#hNrsl1cLXyq)E#C>>`R4ceU%M+>pDZwVo0oBRGWxt`kd7Y9)HINwF=^N@@LaFMBh{m`%nLm;Dp2uYd@miinx8ZrGeXc0?pM} z$j@bEWI4TKP%w|THpn9yJ3u#2;kVfu4N}T>)JOgW0}7`JH-1&YZAj15!goN7jHanW zP^+~^?FN1Rg3-__o9@owm8-vNHER6dO>_#vHXp!oU(@xo6Hkgff?FuLoT%dmJ}Ae2 zs^fe8N>eDf0iy2flEu#4lpBqP+U1UL9w?1Nhe!{-c@87YmJ1v&bAp;@xPR0fr(_87 zv)R1Akg{6U3}~{7M~WEFpVv!Q`|WZOHeMrmt%u9vsWlXAfL#pK{{IYR!+os51Qu^# zQVLN%64fx)6)Zjw8XlPWvNT!A@wv&!csJy7(cE#b<%GkVg8?hO4~Y)+amE5d=J{`%#R{Y{% z*NfAc8|(WIK;qRY^?BMkJa^3-A*yfs?`gaWu-G21jMl$e#7WZ-`dWd<{Hx;*>A#s) zKENu|WaaEyYr|YThrfqP=!Q-L?P1Pt($?mb3+kG3~$v>}M-I!Y*ZEbDj{3`|g z*mM8BeqOX7)xh?rkIn7>Kt6Qr>i@a_kn1_{U10TfVhy2Mh6#o)MbE2-z0tLo%0vCB z0Ab8N**DuyGWh1`mtFPpQlWI4hyTJgE`JMLH8JLQrkgo~{__M{tamw80J1w74)H5g zH6J!F1~B)Z4VGS&mX>YzB(NO(!c|89-f~dodeA=gK?NEZQ!yA@IOeUX@Xr45&&bkJ zL=}5Z-N9d&Pvocn^gQ%DM+)N2Xo|#S80xGm3Zw7o%lk~UXg-kN{t|eMtW;8pz~u3? zRR2fwy^`ff8dBxv{&vB}dOSYCo}L7WeZIXPA4Sx-Hz57*sQooi(FTS^Y0bWxAj1Q@ zIgwUqIMR@MJU8c?$ogfEKMd3_1HPm+$>?1>16y2AddW{xQHyprtqyBng%0c;Uh&UV z#9G$;p}Ay5(dji8MbM*c+LwBaxr_Et^ zCvK;w?vn#UW+qv5FsU_o;EERmCPE%AkEX-mt@~N2m&e$L`hNdVASd2(?O({ft9Dcx z9KdEmUD9-}i??2VfPyF$yqW`eYwE1}wj8smDkt z6XRmof7IpV>Xxp+G#`(VT*qgSoD5J4`=Qm{T7+klbsCX%e+n04=NQxB%K^(@eU?{m zEZ@=-;*Qc&d*>zM^9EoDWS`jldqIWq?%<*abswh&J>e=?CBO=17&`|qg-LZ{EXSN0 zZBUOBQWugNFGZv$A%f3fQiwKo^~jb46dCh6aR~UHxkpysl2|D26k!K%;S~TRNeq;Uo^E`PIt?mKLbzk^`<3CC;Hj04;kc6PKLDWi^Mg8KV;U z5SuyPb)4s`Di%nfabWe)-=%L%CqmC>hqg{?J-16;;N+bG03ddpn`deQ@!^Z|uX8*x zF6}sx95;J9+ghM(=bo2H#@b(ZvwVs!nRUw-=SElUC(09EZV~kq<#HkHlaBAGIbpDx zpX=bFZFo>%igKJbh9@UNb1Nr8ol^3N-;2>1J-oM=GoWOtcXBGI^VAIczFvIGfHmcV zA^U_MNr2jvZW*CBju*kjq)1z}{9yZyL?E$Dn>gU&B(7PtsJ-!)UC#yVAD1Yjxhdvb zGz_n_M%s3gj+WhR#o{5f}lh^#s$i+2ga_nf~aO?{;meiGL3< zYu69jgZ*dE=voZid*tx7^u$6oIwfzDPDz*w+7dB0H(#!vA$IMq(`=GzEgZtF@t64M zKN(BZb~KEY3zlOrnl>bRQ*gm1bXjou6KA}FR=Wzd<1j#KjB-hy9lYYYH~Trp|3x9h zsn*bmHs5m8WZ6xLio-9z4TMe2M!pJ*JT2Ob{ojo)p5;Z^uePi&A`^Oe{qEllHSG_k7^-EoiiUkgsW_91*X zqXnw$lvrfgXboYThdx~GVQ$}Pg0IVcw;J(Ft74PWq@}@U`mLjzYmySO1rXK#12-Je z32DQMmp=ki7Klrpo}q~|V&ljxuqQ6RToo@@=fwG%F^^3_H54ZxTxO|C(f;nBE+g<> z=_8b)b)a2%s%TtB;d1Zx!&KtR`xb@35^JPd={*HtbG=qF;!&LE+Y&6 z>mQPPQ%|2h?X$anZ6N|z<5|HhC0-2Gal1IfAESS9E5f_g93=RY8(uTs^3{8_p@#9n zfBzUQ(KJzNk$xa)4a#Jxy<*A^bb_u~tg1hrVPnKa$zHxKi zdpzm?MT}p^lG3XCw)JX$W#Bur3~7%ck&RvKkJ$NwSW}C<^*T0&rI3zc(!o z@b)ZeR5Y_}1Yh_^Iu4cSYis{awYySK=h63`@A&cKf(w{CS&)F9$2iY~w35N&KT{2* zkW3p_&yu>Pn-abo2<(Gl>Krw58C!aV31YfOwZ>I^sLNY={94*PVv?Aj<>R(VLY>|( z$Q66OrxaFg6V4E zoE9S6b(hqmjN|M-*KdbLtIWU`dap!XHOsQq>9q!o;1bD+e>~MJ?TZP zWa^`NGY;Cs_YM1A`KB6>H6j(UqiLzHW8tv4cVuDlUwn@pdr3Q|?%f&yW(FVKX>!Y5 z{YGrN1kUgGuZdk8{?{hTo`&8Q=>y)-en`Ic!A298EJ)*t+1|V@h!Kf5oDDnh3*=}b z!5jbjlDWb6eOq6&?0L2_y};G>_m?hQ&p*?*=KfDLHgn>gx7Y;Q@bK_&XLpGBLqeWO z*?fBgQN5k$ zVgHcDMBDrCUJtx6d-kL?v&X3AAHR4f9eWzg4893FRpztsEo{m2lfEo3yF~9s5UuhBB{|u2C z;HKa0IE?g4_H4XwuQnS|wAHJi_3)v7h6DZU+ly2x!K+r9#*5x?u49q8g0qBQ2L5+! zs8}c`l4jwTvlFp0RZ)!|MFv1x90Xe#8R$`$W0XJ06qm{mZU${`@6C^scEdv}@vGFZ z*6i%o{}JmDhrBQT=I)yu&@_W&jEzx%g9(pmGa+&Tk!x*#ko^QNefU4nK7Cj7=J^-L=ZF6PdVg_9M1J=b`j^>9X;2>echNo*=jWzJ zc>kmRse8q%-}Ym9V7Qn+ESKfK=llPTHiOSDuGD$qEhcaAuC^x@0&>=|0|hGMG5gV{ z!T)3P?K5i(l33EEVZzhUNp-yUsi)mdCt|tKBVo z#Q(r%q@pkF+iLDDj7II%>*T%XXr zdGn^*B&rs2-XPzxGY(!_synQl&3pZKZ8mevbHkX~gsBzmGhRx(B@g!PtY+)O{?qW3 zmwY+`Kb!po2BjPDnM%U0R~_OK#0dW)^Z*d{R*G6>U|Jawm~@;X){tAE z+Ur`t6JCOeH&P^5$_Ppbz<=bJbayuc{ zBlb45Ak^jJ@hYd7aO$!E)BAv1BJEL|UBZ_+INTh#{(<1ZnUA3k=1IpW{DOD1S3(j% z0oNkhc{8GG!^626@r#>lEUePfV~tz8sPe$VPRqsKMZLuF8em2DQ`A`9ksZt6;6{sb ztGrlFnU&(4@I<~WSs&H*_Vy5BEw(0b-JbSGsKH=E1RZg>hBQ8`NrX*xP6B+j%ro^U8;8m&gaMmCwyRi{(+V zub=RZ$uB4Xua4eF)7F}V#Kh_&F8zzoq21+CAL5vUppFnywzLZQ?%jQry`V?1lP&cX zlm1=8Edx^!8uLJ4ctsB#IWoVn{3V{HMw-^z>q{%UxyNBGHr}kKvS;NaMqXixp;=9E z8d~R=*g$epTCHvQ1Px+U~-^y-va0$X`fjlm#_Vl;P7Z=O;4Q@@dDs`cbaUii+> zlUWh;UFy781pJI>!-ERjdJ8M74=2uX6lN=hmZDHLySuyO_>qdcdVVG*$unUvcz=t5 zgj|DjoCz~1^Ag+BiZ-D`VUgtHtek^x4pX16+esA1&H5WzN9Y@mYlh1GgR2b+Q?*W_ zIN6f=EbEv1pYTR@F7$P!Q1^;MmPthNaK_gckq*l_%VLDu2qXr?u!o^(WZ~ei_hOrO zHHnk;cx%syig1@$h$(oJnIP*A#1rzO*q0-lZ|z$-NoET$$p8a0M1~>-`&Rk`d$5i( zD(t||HGDdl-abm>-S5`bHYV@F8H^KpO@Y8-K99+#d)OSPFRu7Kvp!e{lVWKPbl|h@ zq7ZS#Of59Vj?Fg$ZcUl)EHy$Euwk*O_6F$%<>EBdi!!{|0<+@pG)ujh>yeY#9|M_E z`Z)c+l-=Jf^l~A?<6g!B^)Z6~ctFn=u%vvW*hW~IZg&e7#YQIVY+MYP@5y3OL88~_ z=3cZF{h61)w!r)EWdU2&RtFbYcIy!Y1+4bn8Y%md?jqk!M3&Jy$7Rd~TA7Xjv!q#E zNot)x7Bzc*GN}7(JQAnXXR`9rvNq6R@Wdu`=sh35-guRKx43V3PQVqtXdR2EmO6lS z46Z4l^chyMy)kT*jRWn`Ie1dXrV7`xI<)bHO+`DgS8nk-?UoK^%q6E{dipV!O1NDV z8&=u+O-$_T@hj5myi}VmY8-Z=Q6jnjyGekJu$hf67cavwv?J+At&0A?8p1&%;M}m& zz>{9e&6{m2criPdD3n1aE?JZSfuFG3k~r6};KIS7V&ySjW6X@XBqhT?RyTKRWMwh9 zy?grnA2!@K^ne@##~66N0Um|oJK`}!4bi>(Wj?(}l_bNR-i7C&Z7!m8$I)iIGmLx@ z8FX%9@rew$-`Ys&ho}_V2w>$_w|rcz>ax1IN7iE)`2uwu8HRM40gUy2 zYd3we${nYr6;ID?*TeV|pM$omt*`^XnCY5iN^88N&*z|85O&1LT1_e-U}byTpG(eH z9qSN*7Hyonz&z6_@2`z)-i+p3mTBFMRaFgZ{?=V3>0+Np)klhaa44xsPOI$+!`@#z z+;er|>d^@1I)Lb|%bDy)of|mXdw#Q{Y9I1u>2Yspskf-#WcJgw{t5h5$9^_MdOvHwiltK_4whBCa)dtoptL60@fY*?_VBCkIAYjWp6g2B8*RHv9pSCT zEIKANYga|k?g-Acy5=}Y-B9Z<9>I_?MHg5c$J3F%T1A^cE3L^47DhsT`5CvC#c8Nq zNNGt9Oi6@h=ZveruBFdKbOMDszU^F3v2DT*(Q>=%h&hz->+_>UIG3KfB$DF&dPkb# zZzg|&&&8lyieE5F_e!oX{9;Puxh#u+NtoLNSg{AP4zEE{R;EaIC*`OnG%bG}c5KjN z$Jqq1AaX}@9nmwA=5%tX%q1}PYU*PLn8gTn7cxg*=JT{lEUd60Arhc2>rG`f8YY|f zcxBf+I5P3Iru<`UWt6U_*u6cb5bw+A*fiS6B^H8hYE49O*j|zG8SUNKo-T7twSsvS zC@Y+Yw*uvY@(UeY#aC2>%yNAQaDK6Kh2Mxh%~YGO@Am18z{_Qw-$-Lb*hZ%T4B z=TO_tTw5i;{o%|bDnijLStPNE2K|B4*(MvKrsqkfKlOV5s%>mG+AqrJE@n&c#r%-e zkjgrhy@Fqt@gqI3(*nN>&0ScIiUo%3e)>!4v3}^>&DRF4Ed!;pMiR^4=tnP`1n04E z(Hbg^a8EBx$UpfU)zd#QFreuTL4dUWt|l(_4s+edDTX7HswYKW?B|CgNCDYA4$yR0!#IQmr&jq_}04M26n2^}-s|PiB^j1+0Tpk&IuwH0Kp&pm^WTWNX!QVWlE5 zF?sc>px5k`y>)2r+z06M@c5FeItsOZ^@S7*P|DLGg`(S}_=LXJsE{FP483T1L3=M| zh*wt%C*ikEiCcM6zPPhC10ZF;r?^yFy7ku{Wv<$DLOah2d{wNF=XQBO%vTf4RH?hA zozLr+FJEpCTRJRf1x^jLMV>sc!YOHKph*;Xyk+u(_AEwkGL*RTbk3{(*rKV4+h)Dj z-J!Y<3U?3Bdh_$CgKSgJ&k-pZjj88}n$K-cvZDO|YB)C9&0>BBEBRnQT*7j;!Uq64 z)a0e2LC9qd`NqH_y=v+_%HfM`4GNO#agTQ%aw+y3XDKPgPNznfSgY$`3f_I zpH(F?>GKc4&AHV?wc6@Bbg|>?XV~m?E=x?|&DOf>O)b2V>g&tISaAi@aw5)f|LHsV zk$r18-iy2c{ZTPcufyQ>)bYTEOtpU$c5zd+D$aGMA0aPX6Klgv`P}(-XMK$5-VU&B z-7!+*QVKD1hyWm%mN+ylCCzf_hXk&Ssji3Xh}U{I-`r@x--%2KOL8>9WsO&vWAt_G z{ifINN^i!Lql%+$HBpS7QQDlT*Le@&=c&&~(poF)`(eeGlFHi$nd`kQxe@nc(V?3R z8m?^$(H=K(H)w5A&?~Fcjb8CAJX2acDop#$U0Pen<*+K_NCa2zsPLrkr%eARPo7YB z`yxbj##E&4{K|D-zw3VOMj~Le?%WxA zDEfS&Bo$*y9RVhONv>Zy7td3E{#gAd_`)9eMf#tMVZ)>Sg&>R6n0}Ql!cE;;G4B4y z%D)5oZw`|{$gu0wCfTw7JXK+MEvrKy%ullDWx9-`b(wTuEi>S&G_713H3oCkH?|Qw zt*l;?lST>E&p&cg4;HdUtEo{$CKL$1QOr^Lz;7np>q)sgt8V6HS(zl5G7=;9lBl-dI3xq z5qGK7h>U_9KhVO^?KNIw9`;#86~rlpT~E_Ymf>&jx8Tcr9CDFG%I!;!4B_sfA1yE{ zanXN;yR-M*%?`m77su7oqgb@$9401Hi61fn{16=~FLn0~`%qKaBSshT*TgCKbKen| zf$!PL5YTl?>KSj+;Pw(tfANfFZ-CZRMz{H9vP^;1XPy@L=A3j~LojLSU2R4)X+m*j zDT15doVM#V)Fe#L!vAKMEK1gSbLTF$gfOroGpIfU)(1kBgYjKc;iu$KKa%@Sql8Ss=aatd8&7L*OH$DWcE64l{oXUr-!|CXc;p>y!fQ@-m0K3 z0AgY7RPQc#oWq;yZ9Id2+8M|f7x!EB?UOJXi(-=4aIO=Yz>FWk62R6MF=v;h2n25MbPIUcPW$!qUh5xjg5^JA@MS zW}3DGf9$5SRRVVB_qy_>Tm|fY_>~zUw1t%rD*7yRXsSzM6iF~s8Q3o)LM90r`P=iS z*=bXs$;$;p2^8|;_`xcw)T(VIaicCj|BmwrLW7{V_w2CGd~oc#K!Cah__HFwh0$Ce zBhf+IpUA7&5)vnDHc*^foW6;sB&xUB9uh5m7+%9wyy{r<0KUDtu19rpPd&u~5#LY! zY|WEUOJe8RFR~$FEaYES;otiztKMkq6q#CGx(P@PO-)VV`_gO;34jcc06)J9pb=CI z=aA2L#^TRZ$nsgy`KY9A=xf%j`nG=(Zb)tI)#!tDZp1-C;<<3oiah$H{birNffBX7 zYQbw0F(T8latr)%D}G%MiNR~{fO5pLNtT=4}zF+KFg!W|^Kd4p~+x zrAb&@v1{d0xks`mnc06qu@2(B$c(7K)UtSy-T=VSQuEa5i?FQ1%8}i*jAMY8x=rpt zs$ORiRyq9ru;&!^a=*no8QJt(JbU~ z+JJ7^kl*fw&dlok;q{)8{q-H^XD18UCba2 z!|$eOKG3|#E4Lsh#sxx@28e$-N!071Z$Lv_9f-D@l+)3X$^e7OCI^t2$r?+MLN|qo zw96590n2hYb#oFW2%+Nvj04%>J@x@_r-NY0P=g#HrX7i?l1i8c&fu?WH|@F|J*PoF z4H+y6goUAjDrYkbzqyzbWY+fU>@Iph6oN{UCYnh_T;(ES_BY^ob=KdQ*tYdpu03td zsQx%fC|dG+6D`G)`lUbY_UIV^tb<8`!U!K4RZYOiK_9_RPi;{zTb4W;Q| zJYke|RfD1F;<-qKB|}XUbYSC8CVIp))7v>w=hGo!CKh4 z{>u#j#0vXO_W@3Sc`C!j{Ve%AHWylF2s^uR`lQMVwo*4|O+tq22R*CY`}W$1YeiWL zHGdX2-0=2G>(4w^)qwh~&kbh;!-U(~;~;mdHcsjmUCfj9Y9#Iw%-kY*md zy7>?F=>O93`MBidqPfmE0a;nITmOEwv&wJN_7D`5@m>3(K-qCRmc zR{;2BTp5Ib{#R7)OP2pavd`9_mSSx*fIVvcyNF>ARK1?7aK$MEls@^l1x>So$=zo; z`{duIyOygQ!9l*2^M@TphbmZTF{QQ20a*4ZvVCbYlExzMl8xNmWu@L~9Q{9Sjy;w! zP6$DG>&HwKspEqWWUb&F_2nTJezg^sDeN7Tr(i=d2uK7B;{Jb)xYNv$1D-BAB{A;P%D*~V=} z>dSx6ADp5DcO9IFKe#upu;G9BiYp^=UQ>zek;({H`-A8mIg*P@=tZ89;p_KA2u%&D9NO)3g%4b_k^H7 zbLz+|76HRTZ5Rv*NGfov$N-W2#^xrCb&TH8=E8_8ptJ|5w-sBYHsoPlMF-ZC0Q{E8 z{8DxSm0}ojF?IyzoU!+xwTxFHFM|&DY2=qbasMtUy$WJrV#0t}u7axO}FaY|2(2C{g>nyQ7e9!T)Pf-8-m57A9{|wn92}*yM zq;FaN51r36HN_60I%5b^@x6{MtY82@cK+pu{?lNl6+j3eeL(9H?X9_6eu#0V_e}Xu z)yCsTW?oWnMN}~crJcQcP&gpmSSIW!_TNU0;Mso`XdZl77q8L!cZ46i@Xx6u|Gzp` z|A)8x3n6#Q32W5w+`2iegSVi-({my#O*UD1#-X>B|*@1MzvTx=wLj( z*&y<@Y_8RiI17nT2bt-W;r~%qG`h97BCtzcNj5L;ei#&J6mjulX-U4|c^5QPKp<15 zxSUSw0y^yA*=`Hl+}=LGuKwNON#94ST^IH!!0ebQ>jL~+&bwxibALcy6r_39sIqPp z$p9;zx$2x7sZUB%a3_M+Gw8)-UA|6F^V+w-sQ0&9Qj8$I?1Potioeu;=nGyFOa)+XXV6*XS9O=1_I~psV~ZEUf4F@te^0``;6-$V7Yg+>dt~Mcp+>G_ZJhog@_y8@phz1MWCGW!m z_^3?JsJNj50#Il1mc8Vzt`NKH?-p6R`kI>adh!xv*{6D*2530uy(ATM$UB1a=57ZD zW!+)9G<%l{dRtY~s+Ne^X%^=LCwr77Nb&SeeNYN#QMT~5xB6?nBqdmRu>)?qo8)aF zzPQihPw3`B2xJF$^5L_rlp#Ye8YmsI9`M1E{U_jk3tOQ-UW+AS5_VO}V=H$cwjuNQ zY5Gwn>6I~3ttD)Q?w1#K{CW{u(jOD4pIktfc=vpMM|7PV0_GVkf|nP1@%L)L%d9Hj z&>KP3PX%npi~p`^`oJplh$8nXZ9Mji|A@fVuoZP*1Dn_gXW#E*`&T*942CiMl8`bN z_drVDYFF9MqZm6hk_%b8a*nyw z_>?NF4@po#DEX9ux$OZYx_5P?=z=npH>?kOUeW#3W~XKRmm1c2$6#)UkZTd_g*7!J z^8r3+2}Sc7W6Ebp%@n3Qy0opTOqf(ysZrGF4W?kry_8?Os|Se+G(S5{GAO<_Ci{J4 zA}BDR8Zt9rRBO8Ttpw7njj_^27bZ)*z+0#I$e!D`qpn_p*gr7`MS7u1#K;~hiLrMY z`flg@2ILuX#QH~2V|GU{B_SUuOoV+<;tH#z@mWbt^s<3j(n7>cV9ia^RSZ3cMS)@~ zouT>I1sf?t@?7B8lxj1@{^x` z(MgH#jjFZ1@|i8f#G?p&^XPhN?QVBiSb|QwLVgU0`gSOhp-LKw_ zibjJ6I zq$PnFVTCT(wbs&5Gl1H4Rf|#xoop^B6w5HC=OZZM5o(u}W3PQ$ zg^^hm+M=+s;9@}XQ+KsyuH8F;Rma=ayTu#Iyskjrp+-#Hjm}IU_Wazf&fZgxyhsS* z#5Q859@`Wae;j)Fm}959tjq6Y3KG@sjGUX%8dEwI2f2>#O5UenWmQBP2ZmnALr4`o z9j4&b&g6MyVJ9jC{>`q%wCLV(LUQDS6oty!xcEgFEI~AQJX(E!-o2sH8!&DAd3EVw z16pN@`foBdtA%wmcb_neujIX7B1A`t>KWC!{21TA$g-g|L&fAe`d^`+Jl%^KnGOgV#27gUZJe4J>ytl*I~Z(XR@Gg?hr6b^eL%2+U=zmr^YTfi>4 zY#;~U@m%H3;M-&6iPn0>_wR65iVm{Q73!fs@c6rpTJ0`$J#<`twX?e@H7z1f#XiI{ zywuDz+ju__04=rEI;gy|`niZQMxDj;Sg)0cRh4H_K-#08C5|?4hL*@62kq`B3na9A z`;&20Nsh~Ixu1qL%}jJA+)yAODA-$V0M!i9JFOCOkU{@}EwylfO7P%O2|h)y`z*Kz zd;iVF6gzXy`Kyv|h^*|eo6Yl5CH;~qYW>k|u}l3;-F{+!rn;5}gGsA2u!4D*b>6uR zAUS?UhXVpR+@K8h+%FLAbt++f%a;$WQ+l$Cf2qLmwYELUTykG1eC5ULHToqvbE~NE z=guQtT^I_h$#hbg1S8q}IEIe}5blpzV zA%fn|b5r!6eXSLK{5Ij?=J*}Y<*lCX`&T|+7#MGCXsyLrgwRp=p{mWJ)zva5)XpvC zrv-Ok4Czpsa&FM*`W{40+s*;6$gXs^sfVwb{xaxYoP{IibsEZqV@a#gUn1 zB(lH1y4s;JuK><%$WCH|T}mUT{(kTj8or1jznf4}$hd+zzItssY? znS#m$+IGh&xxAMsy+%a9$ISahf$~P@I5O*8U1#rh-Lw$*n7E?B+}AI4@L0yN{UbQc zh6i-LO?X??LN+rvvw62SsC!6`5gfCQdGOwLcm7V z#@yK}&Vj+i%l}k{h@A!$P;+N$7aV_lB`{i_+Q!W98LeQe2kDxxpr*P+R|+I8pT0qg z9E@yEYeshH&$rj@r!os29SDe(9Ux07m{+SRPq2U>yIHHdPnA_!UKC~eNq%J%B-m%7zP|uik za)IgIj|=0TooB`=2tmmp&$AbGLNoyjFq!Cu;o98lrFMIQ_0rCb>kbY@cy1xXbf}M6 zWIvC8;=)POw>)p5y(oeTTI_6sa)WuqwOV#7l}@84(-6hUd)^b|dT4@3sTb+Rim&2h z<`QA_atQBYEGsYg4HWqYSJ-B6qVZn4b}gIrt%b0PBc`e-@-4ky$VH7y!*O1|Ga*Ws zgnd5(Ra%iUd4nzU-$7j>1JlDGH6`YZJm{s-CW#MqyL_}o)0cw}jkNsCtukb8|5)#z zyokRps!uOuvIxCzp<>|*GZ&qaz4K$I=@R5!y{^>(LY;oUYt!DNK=6Fq5^|_=6uKiO z8q+SLhF}dv!<`M5+!}&w_nmRsgE}uPbsG+hoxNSm$5bwp-@q^rjc^C9HbwW8J0h+Z zKh$C8HgM?tp5@9rZ`pzji0D|2Rlh0Or19Z1^#)quEQ;-l8D#cNqx${# zk6bM6GZQ*Ya*`BTrO6gstoa}*-A$(=-6`eya(>0e*1+i&tp(>0@YZA}5g)!Vzu50! zB_8#;;ZGTIs)I&@Y-mwD1t%zqmArs7UhC${dbSQ3m<910N2i-rI2W zQD>R!T7jArN?(x<$cw#Z_&I9U(1C)eMI6vGkFH2O@(Do~V_&G>F}28I?~KFCtLXBz zJYmf!mW46aMITUT38@mHb#^aEDkKgTDps?Eu2ls*aP0p6Fb{Eqaer-u?vI7xZ8*TtK zuFV@Hjm2IfakxdTA{3tnecn^s44@|{M*zb}yV7haDyWE>JywXzoOi6=d7^62!e@%; zVmph=sP@NDRWACl!xU7n5;;)YI@Hl(tt39{cMNy)@*mY6i0Tn*7K>i|Oa=mj`Sh4w zoPS2MYdcCDlLYF)5(^^K^JmQO0kY@A~ih6oiyUQ2d3`8|Q{w|Pn^9Q`U`lH9P zi(TZEl@_{c&(c-5%)#sa4KDF1#q6labY8~^YtfGac{8M|;v%ZLq>EWvB`WTliOQl| zKX;ib95bMS&PH!wdmpd7Wg(1jzK(Ct#z~D2HJkHze>@c*l~w%tW#$=`8lO6$`i?SY z6&ZhHoW6>GP}O<^8bjBJ<^+vQ^BghG`cT5XqJois?3}80J&mgCX`lmMD~3!kHyu6yezd{IxGvU8K zk0-Ge)<~5XhxfTZ`JmI=4A?=;yhxo*>_ox6(dVPi;p`@s)gP+umLdui`a6_^ide6N z&BWiEGl)^aOTEl|yDM!#4G8+0EbAUD9?+0EsckV_n~`~E#Zp|Yv9_t7V=%_@E`Loy ztHNY+I|Ousf^Nrxi^!clK$p ze4+109T1r{YI{CkH)vuX+GOJ2ZA+RGZ$fwnZ%w{_&tk1PnSW7UeB&nronoD>Kx8Jo znH1tDS)23yQ)Y8r40X?1zP3m2Qfc)n)HYwPr_%&tY-ZR!Fl*y7vT0%NM*)eiBZ^yp zRG!nxA@JfysC;;%sJB>NSGs=Gpk-gi&xW~nnW3AG(J3&eOxLDy|LPL@QM!R`hv1>S zqnxA;J^9BnxfJ1S{76vG5GtGl=oa|T%`NonJeDhiP0}|#+DCvCv-&)#*8v_a(tV>Q z0>xu zyWauN7x%+8jnuD9Dd!|$(JsP|L<@H9i&uSb8!4tzsMXoo8^NnrBk`C~rf?jzEa9;&s~=OaC(CtHD?}nK z_T<|#3dT{nQs1&{y!|htHpJM5F{ZT<8-j>1JoP+(AVuCbGG#mM1o=rUoN#I!+T(G@QgW2fD7zVsHWRKk~~C3l=^R_o*zGC_PFNn??el7 zBDf)oxy43wnCO!y$bZS4^wI7$;~exo_J^RJ!>#nI?)dQ8`yTxl!nGd5{|6AY|Av0S z7+FRi9xe>UfnbM=?n~S2Gh_pRNuFm=U?@Ws4$z zGH6i3kV;-jI)Kf`+nL9j0Ge4}CMON=2niW_c~y*77C1N^IWjrYpP^jojFou$^l2Uf zL4Gd(p9^Fv02hcYIb3@Y>jVC{%D1COUisT&ijx7>D`_qcG|E&v<@0L*x!rdffj${b zIe2uOh%3AtbO$YPKr_u>z*TeuLNvz#X5G72KsSbb) zg$6hW?crh3N%!FukCwE(ab9_}h#By&4a!bRFHwF-BKI_~#>i8Yaq#KJZ8DY^_Nw{)Ri(bB?{SocBxo#>MwZ21Ir433 zG0~NNBvhXiMSW%#FcP6?xK#HzM#v`T9D!IEoP*QTBJTW>RD3mmH8*I`|J-u5m|)}E zqxj)cM|MT`?`LE_A3*8@{et$k-PvW8IP{J@2zQC|>NkK2V}O?tMGY~`R8VK*CrRI_ z#nT2isN3`Qr^=JaDZl_U%<62tEg@l8TE4LyswwMN{*sZEwLqj(btRO0&cW8H@TFDT z?QzJO#reaH6j~L!x+;aEqGboA&>4es4)kVaqY*%nxFBcH;4WtqE&Gc8rj{$$X+ogu z2(ax#O3B;`Y@kPBzO|jxzbCBIaz1hl$>oaHx4EOUc|I0g*w$Db?ulDb7%HS3 z3ijkZD9Cxj?}?y&o^zPrQg_%6WULo6yo`RTX-Q5ynY%K4}+Px|X zK2hxe)y~0$3xG;8xAGuGVU$o)(X6X!rqaszjjs@x6dPZVdvDje_0Mm(j%H zr`=wG;07|3LBu8Uib3HJkGL7XMOE$;B|P@~U@72Has&^ja)q0P3@G=oqiYwEtU$9U z;D;Wu_r@hD-Iv3`mRTJ=cVpkuZ?buLW~SM9=#&TB7JT{gCAR>Gewu zCe(tX`7z+OJNqRk?-)yK==4NB=@!1N-Sqb}Pet>RP;SwN-VOK59C-XpY2FNS28R1; zK@`HcbnnEh=0QE|MqiJJL*CbL>*|(P#pL#UtC>#Gf7s-xK`_vZmMVF0@?;dl63>wp zQ#@vywp;Xn_`l5=jh1j zSDJ@BpgK_z*iDmk*3tPiFgQp`J=jxLq*?FD*8z*sMaHF%T#-B3z@9JMDlsuJ{?wIf z(tw)q5+E#J?aXuU*A{6)z@9zDWAtbOl^9{))?+bR+qs>Z1aMsorW0MaY(|bk&A0Qo z9S-YL>~JsxLk=|6@1We7zYmzV8cGQG&dHN^Zu{zXba|O%{Mv)%w#4xVaJF9Zk)Tpl ze>sORM-b3#9YgxU8L)9+W1!DV@k&B#$G9+M(Roj+zS&Gf>N8RjvQ>h^D1a@rcT|eb zE)v6w2i0VTJ*|2U)o6%~2)*aw0DJ^>@}%vn0^>YM)QRE^TONK$9oMkI7e1R4PaD>Y zoWppb1Hk`DBbyHmkl?5RyiNN3BWG zw;clVBRMwo8~yKlB4kI?pIfF!i=Zx;UL4BguhOZ3-$DG zIJn?o{P3@H=WJP1Ykx}dIl}FZ6IKO8ZPcb4$b6C?Vi2Cj-7r@zRPg)`SW)@hJdeMW zvU#r_-@<;6QF>PC6o$&cgF9^-j*xrgxG zJ>uT6=9cr!_?LRIDJcnbr0nM?XSRLweG+#@g=e(NLbv30=R%i6g~+OoIOQr=#9pu} z&Wu`FvcmIWzo4=xYU|5gOrc-0tnazk%9hg(^ntk3y_CC~Flf}uu&p}ee3xBEJx3uW z#eX%f4X`LjxHpL-bgUAK?Vq>+ePJDy_v?zs-n;?$kXTp9+T=r^7DFXc-`u=#U_ckp z9X4i_aQf=Z4~#q4N94<|NzkK5-0oL6qEy*sl9mF4R%=_WKH8;14K?C->}RFzFk$IZ z3rV{Xx%L;RL|Z5ba$k81JsSt_8xFWCDDaufBqsNTt+xCvm(S_kHpP{vJnuT(Mf?C8 z3xgBh$$WnJ)b5^R-#s4HI};A<#Y+kw#@m0g=RFg>A?{|MZuY7(%Mz~7*%K{u`NMKr z!h>us$os70P3?9o;U&&iXHWUO*Ut?6IRa8XcS{QqhT3?2tXqOGZK08P{ozBkx?OWc z`emH!sd=`xe*46iML$J?C(v7b@XbEIz;1mmax`0IRmkOgBpH)JPO14oH#QTxZ>M8tRoaWRgo31LN)z}Q}&Ue(52E1vRTjphaLj+)k}OoOUHnVxQm{mhtK zrkcB@ByXIRmEtv9TD7a*`@mY_uyk>E!etViFz&9PQjBqdj^?~&-XiA%!cEfH(- z0V)P3(+>ms&8~z-yIzq4Z+SxD;gQ4voTPuv{ui-9vJqjG`St=wrn{BW(sxa!XOXbW zipI@X-s0zHFYlTx$3$j+R?F}rbpKS{Yq`V|aorZ%k=Jv1`M$35kJi>ZOHJW{qL-$fGvzS>y_qU|# zifi57JSX302(oo)rUF}E#K_3lbv;kz+IAXeni(Ftmc*VrB9O!H2u$dtwhI2i4Y4oH zSyj*7t$IKCyPeQ%2*pGR7=cMv$HB)od-_*wZ=f%K_VG3z@+$P3jdbkvy_MZWj=hd^ zmPZeUf7&ljeic5q_+(vZzoekjKdWEj@=R>Om)MJy@7y~9+qC}UO@?5gX4a7QJgq{4 zG;&(D?Md5eU_B0)6OFYy^NA3k?hlVPb2aewt>Iom5Q_Bz1MA*1v+P!C5`9HUMHjNY0Gee(+7C0pRyjkS zrgnN+8hG9%H`=| zw!fs6JLnHvz5UbcA3Pc#J6X_Y?^~q)a8r0Vxm`GC0PgD#H2r3bQKt>)v$G++YHIR$ zjF)*xYJ+)M8JTK--|AwJOSur(VIa;F_+hwf z?0KL8G>2Ksj~}@}Uer@&SY(<95UUW2>7nN6@OSUDODr1_Xhf5sL%!I9^FBa66Pd{w zwAYVp7idF}b}l%v@w?sKDG$HAb`j3N7H>Lp|7J1tmZ_Tm1<^D3>r>0IE=_Fl#AsPP zmGieOj?W4ml?7C~J!X#*r`oB@k8cb&#asSRmz z)O1I)v~JsM6`cWL-~ZXk*`)Rgv_!0ExY~QQHZ|vD`)fTHFJP};fK2!-%VjzMajFQ+ zSGQUGp3BNBkHV?Xiz77C0=-prdabCTfu92pg!nt0wA!Dc`C^wLd5V-FE8C7t#Htb9 zWZvL0AT)(0#Cz$vju9ppPa4I5YNhe%Dkp zgUL&}TT1*M3&F8oci$%v&-BqJIBU8%z`H;Kajv(+!>Yp0Rn@aax%xl*$rW>_EJQ?i zd`@)Ybz)>`$-H%rl4Hc_tbiPT)fQD~`*!hP5zJg+$yCRJf@D``y}G}yI*tZ)l?|9G zy_4n566Z}2y!(FW>y)^b(2sK^SR?YMJHFv!)3+EHS1Fn@WuuJ59@eWNgOI0tO#})FG zaNjU0iISGEbD~TXJn;DEz|pzLPMp!aPkYjH39bHC?%p2H(a**s?#tuFv{R+0G4c@Y zZJoGMQy0w%3(wv}+&16su^LWRK)Y@C9R6DJawn+zsNFA`R^uqL!Pv+G64Nm3fF)ae zsLu@bn*N)CA@eGq=Iz_Jb1jLD!wp*f4G63x-4Us71_l?P5*n7{DR>o~63}CU=NLiR zH1swOXMAu)dLj?zvBaU3NFf(OvJ)`mOZ`b3vB+29kh;pE)8lC zV!y0WbVy*%wS3eF)w6O@HLxx}JxAcu61B=voxs;fS^)3G;Kx_}5}50r^$TI5-Dz1a zS{If#hw6KWWzE8<&z>(Qy37Cc?iKBMlYJH;c~!XW6fi88z4j8TO8*;sNh8ys;6}2P zj%dC%x&Ji;DOq*&$OEA6j%PE-iLHbaR#;Y&ouRn5SO)Jh56-#OMl7Tfb4rNEr+mpK|q|nO$0ISc`kvsoi^sQQDOhES?QhR$+)u~7M66+bf&-Wb1QYmR{!v2 z$%66wCG<~tg^L8&S{_eB*9zh{xs_L0Z2Z-DX4KsoeK%}Taw{)g%M+weu>_<@@^A^p ztc$&B5FHFu<+pO|zN&pRv+7sA)q0q3eWKFoW8Qs-Qt5%cS|jl3@6Ko4;A$s2&wim& z4M(dR^__d|b3$c6&v9Z$CdDcWtgAP1z;?ih+tZKU{9WO0l9P|;TNx*^#)s(3()YMV2iDNZeU*!= zofqWTWWDS6e@4L70BZZKNkGYwXJf$mp1XLl>X*P?j@TiH zbQd6fl4zdSNqvWM3xF&^)lE`hCEZQn3J=(~lTWe;19c6+qHO_2ydU_)FEsVR`3Ju! zmQvv-PoFNZsH-|hw=y|+c<>AVeRb{Czs%mFepZn`YhGZ;vz1Cd zne#R&1SLBEqNqGBpxiD#(n`r;iG%t&yVRwX5%0Hof2CY_4bPxxxfZdTLJ=JODZfAn z3ja-^O$t0g(JF&FWDJSUpQB(@&UpZ6?7UCm|r8JD!-yh{& z&2t{8%3-u~W{Pko06S5x)VRDQQK1f0TRF>X#U!tERk=VNr*Sr7A&MlgZe_e6Vs5*f`ch@|+T4knt%EmI^j@Pcnskz)aVN=o}jjKhwf;o2= zn>An6bszi++W-WO}yPc9%1T6Y)Z z#nIS2^{76otrG0cNXJ0CO;L);en~5$qaMZ=Zk!smO zhoS4}a|D02q{fiQ5-nlZ(?h$CK*q3*6-kN|Fvi;K6|Q-Q_z6p4#_`0JQ7Zi_hvWyO zs?C>8vS)v$g5P=urJ(3~-jJLGsu9lys0oxIiSe}qIbt({*~tEN;lfkNZWXcMB^PXg z70z*-$;h&oo|71H#vJG-6~0Wvh3YEdU-RwPR5Fz$!K>nSZkdYm?tHA1Su*Jk^{lVe z9OQh5MpxrRk68!AF2$qz6Fxbfg;~gH7A!YuMHHqI3hVZ9ZoM|K4>k}_wZPzka@!*) z1`3m)O5{Yx{o@r1XovCioUGIHOU0q=JgYd2+p4pyFP!UvfJ$=Ay<4WEmebOGi`r(` z06ky`J0+U7jOQ@-7eDNSf^q%mOlnw_SDVSge9zx%h!<7Kq~L)WLc5;kxTsiski zLP=i_DrM6u=X!~?8#U4622Q={+00qBvcw#AMUL47@$eh9Y^N!FPJ#Ia;lkJ=wG|vV zNw^2B0*5C~?Iw>nh!FW}lNB4wErTN4HgV;I=xFYhJ0h~FT|`7vR^gL6F>@6(*?X82 zUzMMTkfkihbc;sl(Xbu4p`EZ<>qS9VT!USFiiy0rnt4wsRgB9&(XeD3$R!GFK!J0E zw97|Vw*N}yR=MdDf@S)(OgPEdM7y@)={#a9y!l4_TMrA}cGy$HI~6tc3N zT_v-EDP9?%JG*+cr0Hxk_}H8R>gO;To%6OXyD7Kr40+UVf*yP>q9zBgU|(6)z*Grq z&8f-@?HAFxdRI(|TQ(xJnporeE;5~U<8PJlWIz)1W57)0{no+tWkwUi!b;$dCRM`6 z1;<;2>?Mt<2AY})658VsquYpa#1OfGiH?@_@@_J_Qd_s1r?u~UZ->NrZqq@edD__m z3yGNu$-K5_q549u9@>wtYFCY^Pn!>dIjZe1n;-iYFfr&@3pZDk4l|ztFlhYdlh>&&#(Ei2 zppt})c~(ozXtfy=Cz3LKQ`JKuz7%BQ+9`1k_vu_KIUf>T`>C}9f!uTmO>6XmXM^Hm zUYy98c}sZfOuP+_D4J;xx2o&SuN%>^qYJ~-6`7N1&>~BVD8~*iWmO?;r-%m6qF3GLn!k4FRK@EX0_UwTIn@97zy>tgGSF6_d2qdj^5@NFkcqf8LRvz zEK?95+`Ghswy~*!rxcfKPE?~9$VE_Pvz0sx%~%<1>uIKlL1JXsjYVW7@frHl%D6Bo zY4=B0C$6q8t1x9Duk)F`tmvDL$Wf1a$p=NarKicM4$mzJV=fkfyOvxA{bnY_M0Uy8 zoO?oWeV^+3t?=2v^cmm@Ru(hsHC0rw6*icK(X#G1cqc)ucln-Yg;w)u$A-SD=G%#l ztrH)Dt2f8;A5!O$!pL=)mS>&fEkH@dK5Xm&Hha-vfwSt5{{(NY@1ebgZjoHZwa1{WDWfe6qP`0i~DWH?lDjBRV>cyV|3z2@_S5gGYX`bi&$io@_~=N zv;@sIy1w+JZT#og-?6uE*N4Nej{V|MZoivUs9!8xZE)JDPK~s+j-08raok%UjgwH7 zmsba_@C}&0@Y+Z2T9CoW(T#@VQv7h@j!sdJYsQyNGto4m!=d{PDDR&_O-chHwIQLC z@<1d3WEFvVIs+0{Fj`oIok$98b3mcBDl>X^t{oApzKY|FXL8u3XwD7okjl5r1G5 zr3cqEn@Pb>fKfMaL?I4WpSvm83qX?Jb9)le;jL4Hu?siW98r8j~{HjK6a&8}glRI!H( zz-&2e;z^*2RPY;+vMC|AKMRD7q3V{w?b803+7!Vypy(5$42C%-c>=aq6(jr%Zw`y%m%7s%!YO0DdiYvQN&wIx$?E+ZL8{3@Pex<$&4LnOl zKVv->yul@PWAMs2&kj@D7tRY1(PHi1pUj8(142*>`xVpey&5oPl=|6|PfC$sZ7i1v{>zyjiv3Hyg)-K%VprdXfl0icc^YXsR` z=VL_`DEm}8Sk2cdp4AM5s)zVH4QUlLy2qY;Q;l9PVXpl>md*b6qC;3EG@!H?fxi>r z-ms#^npKm7aQKOCwEt>->ci@w*#~r(x^n*c4E6J1U1%YYJbtp9P>(MKM&k6ue%5A# z^{JREIGspO*~2kDl`u#!;gbIem=;YSs>}daam6>4{AAXRtW7D`^bo+39=tUSCz|W= zNyjGQDm4&ICPak7%=T)pAk}52N{gdv>aRAk`*M!W!8kkwOBosMt4&4E%ylk|t*8YU zn>;?4Mu1|imSN_138)-85;D8@aMq z2dND~5hc?IBJBXvHDM;XDlz=(>y>YjZ`{=VlPY$yvz}I(y>oi9Y@S38P;G4Dkv}@3 zXy&H-3FpwO(z}C};eW?=I!WTk?BBDQ?t`PHUl}*m%=6M9<27QuRd4RPRNd`DDUEqy zM@1VIZOId&<-((gn}1F8wmqir>q;0A1s5@^q?&A+G|jA4m`qZ)paOlZO*T!N^lWBi z&u$@Kxu4+HoiIs`4UEoOc@lxOx6=JJAHmYvhGu$gwoh}(y3r-rd10si8;m#c`00z< zhTdjwrCLIYcJxTH%;}&VnN->kAOCGPU2LY=H3s3$0?vjgQ2nmjY5|fjYk8;m{NG(4 zT}`xuf}-o|FUzs{meW&Lt**<4ruievmxGdbo?W)`Vz+SX%L zzu*|Mo+I)3`q0H0E$4gtL#M7j7FziT`mD8o=`?h1LvrOABO@d4ej@xAhRHLG2c>jBx7+)nF)dHrN3XB=EI-Xr@g<9Sjm8%b50E3#TJG?JtRlkknFBCQ%{ z_y_j*e1=h4-u|(}RWy1CxkwGaY%7b55KAQr&iF6zu2|s0B(AGA~IQx#g{KNIjvwV{jFoOQ&7xon&r>_RCf7m2{%8Z5wrI z72O-}xp$BtEw=n%nISocBp8EjH=C4pfMM+^1Y?Dinv(K9(S?_rf*S3%Pf!eK3=#qE zTZBpZ!e9rR5~#9jSXD&hK@|p*dj{QFUA;+B!EI1;*C6J8Jny8;`L6%000!Z@Z$Wvd z@5D8&tUl*_rV1(tHh)+VRTU5lnKC-fCt}9?-u9o-qVly42sSR7`UfDZ@+Sh1A4eXY z^fjOMg&HdM@uiA}JH@aU;8T}*W;Jp}xias5x2FBp>MzM6JVpFXzwmV0&h}OehsEkb zONMf9yn~IyJ8rxq?gW3-9slg1-Eqq< zaXoQkzRYkeW04_eZe3kiXIiI5+(vF$8Dy*qWx{0VEU!T1?htBA85tu8WXqpbV z>9ItSvXiZJe50hc?Szc1sfsFFYy)0l<|tCXeb`ah)4NoYuT8)m&bU?kjE6*0b9qb5 zi9J2ku@3$gl&j49ruw0cM`c~u$*cVQU^8ck6r9|EKhM|qAif{M6Y!CdDmKJRW}`y& zgx=KEF(!zIh48(LmdBy24EA~;cKxt~lsq|Ec*gVbPOQi)nVGeeRy$%xVGYr!-4{*4 zD=~{=3D22$NeQco1FKTVY)ibiQq8~{MYkvvK|wCRuia14{6}??d961f8?V~Loc!Bh zQu(Sh}RlVn4+H!cUB(n(Yn}=9? zb)4W;+;!@4-Zd~czWLenkjBQ1@h5w?RB?FoZ0l|{Y^AstPFPGsSw-6-1p55*@RwV# zuaOy!5frRVL}n#3Ime~M^?JuB)1!^ z;2v**y%L%crR#JZP}#`-{2J!HUrXgkpK1hmveUG+?`7-q(25lpo%eR_Yhu}!Wh45Dq9TJ5Y5T2K^ zbq?bsSSI-CAo1!^35u<95JEZsMKMCU+kDwI7&|6(zRH>e3$_ zYU>`gl03Fq`B_EFF*bSSaZAlV%J6FF&+|qns@=IJu>!+C<0kWf?D}DsMvorDw@;0< zjhSkP8nn;5%ICJc@+_x2J7-}nzkIAEn4VB_8eFpr6kcE}1BQlw?8Q3KTbGUWvFfyU znvRsemhDER%DWEdGP3RWM{YPl#9fkDnrf2+fKy#Ld<7;D8B{{Gt*3nIMI-8=wWL3qd5HE7 zIFf??TxPk=bX}95PXaayP%FxyjFXQln(hR#Maz?vf_~f)2gGsqJE2qGiqeZ`W(P6$ zr3HID#M?zC+j44K>#qXyppnJPYaA=BxygB^w6UqDGt+(6yjjK6e$EHcA@gJNFRD3o z`=A3gR*Ew|ks5lb>ErXCPw0zK{qvB&ZF*~|h$T9Mcqg_C6*XejW)6Vaj zEQo?ebwmpkGG(6EaQL2`X4`Hr>($;$lHtm=0jHwO&jVsQCkIPD>yt9bubKT7J1dcc zF?TF}Hl?i^q*N=GH(eRt+C->Y=O59s5{wBvKx=pKDjvSd&SKXq-C|1ukL!|$nY)GM zN#fPpM*xS6(Hd)ajvDaNJ*?)OQ*Pl1Y<;?6 zVvq0b0HlX<+&6V9Q0PI^mcA1=as}4^iMz;jGSHr6%cpQ%VK-AX(izY2SHy8iCx!Vl%H3tN8 z(goi#+D39e_JtWp0vaeIB@0a7`N^%rS8(a4Mp@?_A@PZl-~zxWcw}B)>wWl7C>!&2 zL*15TxZ|C@0rzdD+k*bG4@3UHY)nC3-s9bKAiahj^RQZK3L;F7MuV?ey(xamg--ff zr+DS}(AYajW`Q>Kr@5e0cOyold3;58_cqF5Jk-1KbC`ux?45~by#*_eqUfF;r*Gjt zCM7OpUYXO{T9=Fcc`KNdItRmVYiD{O&Ga0}o1QY)gT~}rh`LF&oOC*_HL#VLImZC= zHC^>Kr(;;fpbQV7=d-|to)C(4329vV;yh9Jh%fIZR!*~Xpe z$T6ytb64g*uWN=Nik!ey_1e0buSpr0{j&&*KiIWJ+u*-GQ91bmzQ~N{PrlXl12&)3 z9`5TXSu}q z-p`&Ct+Qz^D&*{438x=8z4hD|x6@^-!T#hJJBgU76c>`e0XDleWC0KK zAz0Ux`XuHEAMVCJ7jX4F`}YZHI}&L8gRVS*3k@2j(6mTWcO-nP6u-D3ukiIhCbXD$ zeFLXc#keT5(IDSCphDf^&a4toDD^)$8%~OSOcx(c{P)`}GNER5T_;pW2&D`tpV9%^N zDD?dM8bwj1IVRNK2U^L;aSu316TLV1a1)&F>*Rj9a{4pyGft_hY>GN2>4o;ikd?o1 zz}k)__6n#h&k8U(4g#WG8Ie_2aW*v6`$b5@W-i7ks_5=aJSi)%n~^xy_;@&c40WBw z&aANjeoZ`Wg9y3Wtp!I9>1Q%a)}^hIi#f|8)F1Oz!!c*usn=UXCnI>oLcc|!FB<;k zqyL|uw4=a2IJS>LpN-Pix>y}PZINu$zqj)TPZ6D_0f&iQ_PlZ{=H^$YL);iGP9^V$ zdj<6#W6`zU0kz}NwY?a0x3RM$k^*iK2Wt+58c`osESQgNNK&vyN@|}Z&9Xqm&vj9o z`i5m6;~;Xc>T4%Pe#LS~Hb3z`Z>kI6@1m+{I*2|tHuL`dksn-1u` zbToEx)Ld)eUF#gvg29^&h%%|=nqeB-`lDU`uD*xU_IU^q_vZUUlEE0f6U~NMzSsEJ z0h*n-#$P5mTW8zFai6z$VbwKM#Fz!)(>>6Kn=sO*!!lbbZjoW+C3yFub-OPdd?7Ad zKJR-GTj{k2_X71v9FIP#(9QnFqf#2%+kmp`Rc|@qMgb!pWmj6;6Q7#xVna@A+BC-%<4oRomz~OqsP_z zd%gOsz+1A04mD1P#Hct>M%5i7DWXk*ra@zySBgK~N|F0(Gnn`15I3#siu`3~q!X*f zAeaFZ7M3{ulc!s-&8`hnZIgMnzB}j}O1@PUX0FmxH?7+N3rh{WBeFehBlVqJU~clQ z_M{+t07)2)jrjOeGM8nQA~%-h*XoQVNOo6`aJL;-Qc`O0Zm?t6BWT`B6)kBI%e_E#z$;QB`@QBEMWUpNiI5qT$;eJsb zD5F^X7%bPc8K+S{Nv6C#(T3{Wi*fRCjr?jYegxYXXnRv0mjVNKJ@nsS<*Gq5uur90 z7>dvJ-$Y4_%XKum)pk`v@Zg9USr-vH35FXp7Gi_%nKtcSR6X{9#ztJsN8T|zC2DQJ0-Uul?eA-Y#N$us$DWz(_o)?B&KR0<7> zS&Psdt{^`({k5#qEi9&ZO3tQq&S*2Wb8hoKV3szVRHx|cZEz}tNiv16#?6PWq-_F* zB=htl29WW;JHzO<%m0rbH@tiIV^&Cd@6IDf`0)tB>RYknaPNaX;00hUW{kS2 z(OEQt5%TK9)G?WZJhW3cmT3! z7f}&*IE1nl5+BH8q;YgWVgkJ>eR}2ng-`RG28cm&zkK9m;6J!kIr_`N*@*ogR;P>v z`m?3^f2!M7wv#F-cB~KTUK-Ot3sUyI>F^Pvt`S;UHl{!ryAa*ZPyNWlk{Tz^^fwNa z0K$pb=vyqp@2nA9T){(afQN1b&Wz@L3rx~VKstEB?B&8dI{-Q9>>=7tUK$0zkA%Ds zOf6~X6biOgN)hVlef(Y|jN9Vz(sY&js%qq|p3RqgO$allUmLKnr@saut zYl;l%P|BO5k_TfXo$4z`xia~MtxAHrSxX1ycr4^gRZ%Q)iAW|Yy5c_BK^9Y&uJJPsnUH3nJQRacK)V$S0GRT~i z2Bq711GvcuE*!Pu^Mj4KQn%#ZF5!lX&yBRv<}}*j&pc^9d+e+-uT99ApF$Y@QUa9o z2>PrnXIrg7TP{MFJ_q>;ZeVAywDo`T=$!oEY;f}V04t=rI;_yLp}I%t=3WBTpVkhZ z-tO7FYUS~%VjGx{Xn)`4Y;rN3I%CUP^r91&Fh-JLlrhkiG)iGmQs_MoOX#LTpJ~4v zPzbwk?-}A37Xlc4n%YZ8hnhO1cYLDt<9L9!?^)wqhWanq8M7PA!@^An_iRLz;B?VgEv?Jk(-sHM>uY(y(1IzO5{ zrb#zU^ylG{*_Yqe*jYB&8-suNHP)7G=tckD|NI@nP&1eWiF{cZGXuU?Q(VH-{RZ$UxS%SydSsJo2=A<27vl zfR~I>mBPTK;&nir-2p*@Og47ZDJuFOO*2cUC=Y78OxaTU-LSs@A9 z2r@5$#Hve6{6O{Zhb3t{Sb58!B1}hD`mguQ`7A|Ou)6mr26Y0Nk%`pui+kUHck7SS zYw&Pv5^(>q;Q|$uH&d8;MrcJQXhx##xH&njOLBc>^)hwR&h;B_fT|NB5>9T2ikN%D>$D!@~Hzx7?dRHam{ zKJ#>t{qJyRz5Ms?B3HcDv!dg=65?rQs)-LxRBsxmY;+}%Y3VUs_;LytUlCQTdz`yYgy(DzZ`gJTIq(@az25+HiL|e4Ty;3i(jXs|K zMB~zwVE0Sg*>D#d0z}yi13|vePMgKiQJ19RW*hC%=@dK{vojQW1x=~%BZr>P^!C}@ z)g-D4|;5y-Wciz*n2y{?FcH-~(mw-gF^8+jEgb=c~PZ@gnk_E^}jJAN+pU;T$~NuOdA&iIg=>&b`ml_Gk-+sB(gegyH|>^$3_ zhMu2K5A48IO;1&Bo_5~#74e&|3CSe*4Qg3v%_Flpm>J;x0_yzoRt#@ZD-# z^R=%#zBA7TeLVs*KwS6t5@`cu&8ar;WnN|?0;Y`r!@fhMzPGkN*4m8hgI6-e+Bg_m zd0>ORBgH*xIYYAfVaiO>!c1nUa*ULCqF(XwD7{2( z;wDr-h}Un7LvB-B@V>biy{L;=*K8ON)f;V|V??YOVNX`Sg}9#vgB(vc!;Q*#jML~_ zAd3xKx;uuT^xwX48qS3qv{XVLcz)m+g!;{kQ~htVR-xi;m~A@-7)9@H8 z9^?L7#x3)M_@C8XgqPhg#4xl*VlKxqD4M;B4~ zc-9wv3Wx0aKp_&wD|6o3l-!M{xB9kfQvCg|Lfu?;!(PKzu&c>&^L&;sEO;{l0Me6A z+hvyX_!3_9rh+>1o~vk-Zid$&{y1|25?Pu_G1lV)1cTYjm)&>w&qmiRUd>@p#ic2HOZU^8Gz`A}Ky^Y(0lYEfBO~k0 z4G&u#5XvYm+@HwRLW*#r_5RJyh8z1+6D*ljPWLAeF>#ms@ag3;2ADG6@xsXo&Jj;B2IV%81Zo{Q50Mw_+nOP;Gv$kf9{qNbcxrKL> z;>B^I)iCeuPuw-jnZ5%f8KG;mQw3q0f&mAwnr}M{f~!l;0a{Pe}F{mBhKuoF1Wq+Y&C?3g&3SzB>tfVF^{SDLwb_ zgjPrK$#PQ%#~P#QM^KN^)O$yik3?Hr(hB{kM#tLT+%3}Jib2_bOpF?(-W$_UZGEVn zq|#6Zkm$Z}ElS|883WqbjF*+Qb#nzCjwlPyo#+YFt_ymGO%Vmg9=BoS# zgAsU#IXLgCT;xd5@f{D5Gw4A+uUof=N0p$NJIU2A^witl7;Sg%PoE4YWaX>*CPmw_ zsY65!;5Yck9t=t)C_X#`J2iyQBGvQ}swuDR?EfO_!_!{6m%z(9B*eFbAZa4~av=FQYhBUIb`pX2hgWTMRkil> zCfXYewt2RU+m=h;@!MMVIY!t z95ksTy{K5-j|DU!!Ov?Lz!zjCg%0dD~;i^5Zziew(ky}8kG$&wUu_&klt?U$qTNyOA9!Y*Z}8+_`&D<0!3_2Ev;Qmhm`K2BcI*iY&B z;5(}@atUkw{n2DH-p)=Acdfb@s)XV+4*i8b$^n?fj*Og*$+9nhdQPp8bQEAk6Ekx? z2*dsW5|69#e{!$`H(>%GV{oQaN}yD%JT|Kph*~`RYIYEA_O+q>Q{A3(-<0L4)xDp; zJ%VsbGkoNeTHeg~&{RE}9T&f{2t)vSL|%vH#B(eCUJabNu1+G@WU5v4lcZ>61V3RR z;~qq#>s|Ou=y1pG^$u~JG`8odZxCDb8(eUiifwmBB;te{-VT+S#Xq@LYBX!wByvxy zsK~!cnoi-7|DMVyvFX|Squqy6j1)y)?1^NODVc$m=Acp0kz+v#n8vb9cm5yp!YIiYH?J~m~!i=zV(s!4#}46hQ) z90spS3kn`9e#>){7XjIt96(1KW?*+|V)dPppHyVn?vOU-n^Tlk(uFgqkp*oCL)C$K zx2%SUa;ZGLoy8E+H6&d*-0lgY*V0r^(|<+ygVteR+*VgWCn>%^yply;#9}Gsw-B!3 zTJ)1NzZq1Ns%jVI65jN<1)(PX8mZCl9d_Z)p-PapFz=xq%S0L>l~`t%c8N3YQ2>Kl z!P9l8%33mE7XQ;Wu*O$2t_UrmGKqJos_z?5wv<7cOD*yDyAfs{hbZDFTH6DXtiQj>@j37+l7Azlz>-8@6B`9GF8!5h|lqf1^e=T0k{TDD7OeTw~ppn#*4sL;EINRGf$0q);2WL{-$6emdE0( zJDVX58P_uhIVMbMw2J0+H~|5#-*ViX-U>SCd@DqxGF46FD4bgOA}yeAPfPg`mRn8= z(j=+vER9DHB@uI00CWydgY~+IRR-{ap>0Xc-OBl(ZQQozVwq39O}7K`!t8)x_$^Qh z?4CHmUB`1vrKke8_0mJ)4Q8v6ih4LK@6Hk@(Hb3LtF(e1C{r7uLmgYx8sqMEefA^dQghpiT{8d6sRv=#*R6GCDH4uqJe2IoNTMI54ER_( zDKka)?S8{@*Om_*DfKoLxgs?CN4Vpm)7pI5=L#ZFLaW8qln;t}pvZpUb%sXz>e75-afwK=Fi8tx9zMBnyMtJ3q*}k_Np%-ngTfE7 zIc*yLO&Pt|h+3vS36TPAdf?T`Wn`mMfE32$$7{C*SSJlj$Ma>=vqkr74D=n9;^fJ0 zrTgotbST>lk-pz6qn^5Q=3cMv|C}f3qN0Gt)F&y-YoB8J{| z56zOSP#ln8R1;WSm$pZ5TMr28N#X;7lA+FpqXyizg&ETZJ9rImWE}p?Js+|Ieac$0 zRc0br#_k7LONDobX)9CJ40igYsrd=sN`v@g$k5p*s{kUD7fs_O$NNtN2;aj{cgJQYAn2(ezN#fO$pXRmY1!uRn)%*-gnj*q`ZW2l5gNN!x(&I?Xv zW~*zF`S{_r_h3mp+1m3H^L{55_~v(>w;u)mg3|{}XXDw}Cnih;yW)rqIyCXLgyL^F z89~+J1adz98&Tf*6H$^kPCOK8G`#l?+)*d*{WhfK;kxpu%62D>DL7b*Ep)!%Iz39gpZ z&eIcdVLTstoErhbPZ0x4bp%-NFB??<6S$f6>$0T0KXs7XI>ZdRl@TBQAQ!6nS5O8_ zD&R?@%1j$aB7pogx`8ZGXhq&%77ysks{2n*Bx^dgF(4S|rkFYJv?>})oillF zmU@;D#v_pRS8Z_h0+oi+uiM@=eaa9uI1|JpMrh2Wb8UT7a>EILHWb&iuNZm0Jf`9U zEYZKhgUer|M)w061t6D-3EmfdS|$V`MW<)Ww!Xrls`zo+k?|HFh5idg-d~PO7m!F) zH!^YAH#NJlrT=Xfr`^Znqt2&M@!cV`oL}(28UBomc2AVna|KPczgcHS$*HYGp-aTB zU{dAbZ|9DYt-o-ns%xf`1jj`FN(}x)rZO^}ALvPb98MZdCUo@79KR`(mH^~I!L^U2 zklmOCZ^DrlPAH2nMy2)Fg!#A@izEWPw3G8NKj{`xSd1-x$Fn_TV=%NH?{^N zjHU%Y5bg;Lk`z;5Ltr3jg4NCdfAbjClEL+~Irq*Sh&`I6#B@GP*r+yPehxqDe<<4y z+qlA^b1T%t!zf^OlG5N(gc2Qd*&8jc0cd9;qkfgNFI_$$oW-0Q=??)&Md5$FtZH-! zz3Yt`6T|FFx?Wx9x_*4IW|;wHCHAMz_uw&wt^lHyhJ7}V9~?P+n4LhLJ_6v+mEoKf z9)t)9KH&PEtid0m)?$tRJTob2qS;@DbE&t&l3k(*sn!rKDFTXJk)x^g;PToS#+yKX z#8mOd`&1PWzIN`7SW16|DFqj@eH3?#==ZTm)nygA4P0QSpY!^e#zOXSF3DhK_f;>t6bx zW^ZZ)h?Ie3ngeEUu7DE@gkJH(rrRslKL>EUR!ux9Pu~=aU%VM-m9O7Pm^YK*(y z`XiAaln^T?)NwZ*8{Z5P(+NM(owRH1 zkOcI{5@`8>JVP0wQH@nHf=Dvr@OgI1no6@)gb$fl!Ow{jK z`2?N0w46;cg}dWLKaqmj(YkvkP1;$(xKpxSR@OH)r)K{nua*?YtL^5IIZ3C~>>4-0 z1fz9eF;)Mqr@#0^Pdr-hM-PA9yl1L&01xgf7wVft3- z9|bx0^}|56_^)k7cl@d=d-J{K*%+2iZdDR-Clt+{St@uv3V<{lBm<(aC(BNUgx}J5 zKeb}$P|2QZMUKUZD_k-5wtK}p`4clhe}=2;tGB_91Y4gK1IFw{?f0->VgP=kHds%( zH@<$r8E^Q(@mK_H(l;pV+tf#yAa&-7&nTt;iaEH{+O|yEs2v5if4mQ?PVSCU)1$HdGwuW7Y@UxGV8!U z~G)^}3I5jerx*hUaiC`pO@E=+F^Kj^z|3ijkhOn5V=f z0!B&Em{Bb(Q>;1lbO7?6hMj;$$e%;5M`!zd$|I1Fq7v@u z`d}*7djfryHDiQ^5_V}sTbmG5{shZ3u7ALKB%N=4 zKm`J3I`m`o$%$ntl-LND%7`LYHILTDWq3P5=}U8^tfZUiREy*jOND=;;EBPydSCx; z>wh-S(28(ME89X7?Ya4x&nM~O5D;$fB>I4mgfQ9dg&KKbUjT<0@7m_3`MFf1jP9xl zg^v+oYR)wDL!FFW=?DVIP#qWnr}i**lN2i!{`Te0`Ms^kWB*wg{dWUY0aA{i_zLPr zF^cr*8-`N9Esu_ZW>>h_mi5=f(fRK0S;WMrT{etWrUu{1!SLi$EEs?zoJwY<@a(4Z z`j-;|x>h*S?3Y}>s;&Vizm9Jn#=LXh@@a8`PRjGr%O8K2AjcQ|2uk{4M z#%zLp%DFgi?}Q{1X7B;`+ z)8D}pLCE+{4@Mt)p-3Bw#qH+SOeFw>}(z;C#)bUi`P}OT5 zPRlKS_4~>^wq590^=_p9fnzC)qz&mo;euZwX0&*!bD^-vBHgcI36wJ9sw+Imx0WjLTM+4?!!-MaB>a@wxd_ic!+D~k+?7x>GK9iY> zd}ynd%-9fo0EmT;c5NS7Y|9A+X&)yzskT^G`_Qt_&cQlOQ|~@dO;pxP6>iVmU9_~z z2)ENDo;P@j9h0vNDUwl-?xj8jf*w1&jN+M|FrSHdxelTyU1;O=r%6JKt4<Cb2h1{tLHAusFB{!=9*RGM{y5<7fF>AK8*kYWHCnY7xX)%SW;_L?C=gt z39uy5dwmhVBv=BzeBYSC>cv(W?i2oUv=)-akooz*FL53;4Jpn-?`6q}9CIW05~>4N zEM`5687q70qesyzG zg|ps$_r6EejgNjnuz=0s1TacU>#i>F5NzP;>_#2TLPzD^*3J3YkfeV$Q%r5&^R1I{ z-aei)!KoW(WI8dRL}e!+wJs$+SYO_dNo#xm8Vr!PBdm=+kXGb za$FO1A2zCKG*N~KnXI}Tk+*aF&OBgzVCP+PRE(zqpiD*Rqh ztJXR^!*d4kqmB*`|P5>~oimB-;3g0;(%1YyF83#s$Xl$*I_JU+g&YHeCCw@4#$ ztV({7c=QsM87gc2t0R}ZYCm`Oe^rrJw4X4Z?odmfl|_VG88Nmf(y2$$A{^% zriZQJ>7x5Rk<<*Pv*j!?KldjEK#@#9)+cHva`W1V8i`>TR;ag3af8-&c*r)KA>;cg zgI3Np_mP{UZjab)AVN1k-QL*GA<3T+{n8_?=1@rw6oY^hN=?jv1u@VZWsUmsg!Fy= zj!6Km{_Ytg1{MYcZFYRep6F2s?XZhLGd`5=NOZ{BmpPpG;}|> zVSkSU>kPtSNN}$rWyOYcHSSNB`TIkb{5-o`&CMrDo}QlWWyco4hh!GkY3rK?j*FvI zptLc{$)@j(jiAVg8Q`=5+(_3YpS+A4_0vB@hqm<@7%?EjGKjB0{)MEbI}7Mhm;0&V z?dJ!YN$MQ|LAl*Gy*uP@mgKpONLSYTNnB6qOo(p!?XXTvK_ZwjNhIkx!Y%+B>rWr| zx7n?u`R=7E3N&?0#c(uob0CiM4>}y$2kLE9&~dW%Saevo=25_x)1{~C;<7jJDGsc1 zrhXJWD(_dq37U5ZLxt=DRWQofMD?g-gCF3n9?30npFX)s0y?^y)->p0w_Z%%oXCp6 zoB%0KKAV-Dy+ezIl$|jM`PH^+n)MfPK(R~`+}fqLow{QS_iCR)SLWYBA!Z?<=n1zv z&RkWE??FiXQi4_+k?^*%ilNhfjns~J7We^Empt=#Rfzgv<|CC97Y4>nb%ZLz!HR8u zkk5(uH@~|=OpBpkNpgt)NmZK>TWH$6t@@Hs8$k1(&5U2LRWb&y(-#0lcNr4q{4V>rvHQgC6Ec=Oq6 zeChuz-Yn%u^=VV-;d{<3pNf3j({5#ELsM)kO-a5d%7xV*&+1oiJ;zH+s?Ji7n6EaP z&lqjlWOdl{MXzT$x!Lh{{Iq=M6op7?WG=j;ho5L`HJg{p=m+Rt%Z=@$<*BN^zQEewgafU!|044$fI<#taD5gWtTd_+{AI zF4vo9*Smy8f8$CttJfpivkL=&Tx4}@1ZynxbL^`Pj3-x^c^l*Rw5>Yl6U!$(uQie4 zXjh@K@s#J|KsBB9X|y12N`19i8o%ft^tig@35wh`P75|sCBVI{7b%)(eN_o80IeRl(s%pJ%hA3K|xG?X4tR zBkoK|7?D_UpR?cYG}G6VbR!v1vq;da*yC}1<+XNB>h4^6!l<0&={z+n_Sr*(NG!VR zwVpbGAwdq80$#RKK>o)9w;f9_$MbdCLT`+9JDRZo=r(8b`r?(rZbxg z6YAK_R{^QhktZLOg;BMvOCq)Ip&zfe{tDpbxE1_8N43D_QMcP!w8aI;0R*=_Smlmz zuzm^O{~p?zB+lur%|P}uV1o}P zWx@TXp?`l>BOnA^9=m${5+7HpKFB36(7%N9-g%TqunVC68UnwT*-H8NE|afLD(`R* zmcCsw_;|6hnb=d1F7y1nA&`ed=E&{s%Sw)};KmZu$*HHs#Q&3o=f8*y9et|)EzMEB zZ{=7U5d~~Eb4mmM+#jzd+wdw5`|P#-(#Q~p_0;-iPV%!Z2tuuoZ>QJXlfj3eumWB^ z)5ppE;SIsMWeKR3l(CkPr)==Ah6HPQnher_#AavwIT|3ZhHZdlAMJFijrfd(uaDzV zxNUvDkM55bW2g74-QRWdgQWMy8P{heaYB#J6r8x+GG>03--kPv?Ch=H-O@?mEr&z4 z#aSQQmj1$n$)nA0FAjg)aQYb1EnosrU7#w# zJ+zK8=9AJFytCCis9Gwy^kChM@G7?s*zZ;Yhy+QqhY|zuT(qWdPv)A)zn8-PmS05m zC<6>tl`A(Ls7+`z%w(8wE6OABhCXMxk|G1dUz$ze+Rlz`fx4}SJ)%Q{v0hW9+{BUR zzs37y{pQZn8J>~%oCZsi`VzGKt|(KJpRa?6C|vMJ$Mg$E#HzUTqq;R=U9AV#sBto& z)_G^nXMY=>iq6YPu-W5;i8wn zD}IR#Wl^iFRb*AH3VV?lXqZ2LH__#`Q(Kq^pMQWiS=nX3}Ok^G>Z z2GwFKKSfjDm~dVeSgg$_mepo>1fYGCkb!Ki$J< z0Tkxsd2cZ!>63_Fi32K@KyP9=u{-=9<;|>h15mL;^_5;j)jl8lsLnK2ppX4k6nhA(M7{}&yZ;-{$NE2XVLdI=BDypObONLDswo6G}vR%8-R z29pFE7A~*mpeI$cnRTYUMv!JU$%mP#PQo{8!a+xWd>2I^sBKyK4g z@j7nt`TVHH&U}imZ>o9X8sBoA;_EcklhgD3)kOa^`Wa?B_WU_8|2JEVS)^xZwd|ZI z?M6ezn8r>LQL08SDgcsJO^mJE%lLdEXgR9~yjz*~K8l)!q{)T%vxn0}^&?cD{WEgt z^V9Sj*BNh689^OTc81|kM^Po0#d!)(TQTp*83znv1s$SwX%7mM^1^fXCUs@k$Mn>U zEC3q-jDUS&O9hifzVZzrOQ2&h=Q9iYm`m5+hodpF^~2)XeLVdJdUNRwb8BY zs|UgQB+rK`n3}YgN;#3?#5MiGt4}BU$Dbj#@L3PC@M+%6W}$+ivnre_8zU-Wcel@} z(XQd+DhB4{g;p6uxCTf%eurt$^hVS{+w7MNpica&k+OX zXWJ)o(`zp1(awty!PFe{yO_tM?rh`ZTQMv>wH)*8oloU$K*twik24Td*Cr<4l3+Qs zQN=S&agrO>XF(e$MY}OVG$mZeaniG;X=~Gh-=GkLgQ9q z(L)9RygYE#{7ju0=$1U`M0V)g0HP8#GMQf8I^m|T3Ha+6gu^dv9ehd% zAe;#meFJX_PFW8Z@i-FVJdA)sJyke!TcP21CBC&3-DcKRP~C0&UCtlR@wI z(ky-uy(4qG0EqFV8{?PuW^UnIiAm$Hc%7A$ym55mWj4eEFoyqd0&dP%mhLo<%G@^E zJQbHL2^Gbd8bA+~9emJqmrT|36C~D5Z4dy(1JtqG7~m}emP8c$cvbfzr_7hiRqu*y zc-|Om)vr+T)-v ziuBAKB)@#A&wJzszd@Z-glQLWm%uBoe`;2oOSi?=*5ups>$X}St`X|;D zSx<`H@gP-_$V8C6gx%Y*ge`=_xO2=6qb(jZknN~U#`oQ<@zj_U&X?7K-p{;Bmh`bcaoMU}$k+mKf zY9awt4CQoCr;dseK(v7aKP(iGJT)c^-cv-%cuvVAvQxqDQcPW@9!WDo`S+4As-g^_ zf_O*`*`d9CZvyI}jbVSbWB>_|XgJ?*oOeP3(Bj-cS4rIn0j5FA!xd(f=ek=$7pZro zW7aGmop`)xNFh)IIKlftlLw2IrY3y>!JgZSaZpL0^FW=)F_T}P%BQdveVhQ>V*0BISQpmqz#zbJgr``lz(g zJ8E5vc-bH>j>F25Xi6z_$2ZT>xBt}WY@PLn zmE*nHHWI`P<|Wz{IOmyv)HWoAzffb0+qr#mJ95u!t?Zwlhvhv}YM%@uYJ0TQ0$>_)yJrtG2PG*x zTJS(PFh|F&2s7T86)_CZ7%C=h6s310+FuhIl|8x_+cpJHALi|S_OsgwXjhv5gy=~WLGsg(vHy^phef3JM0ayCz4NE1?GGc*p%lc$H;N>w)dtdAf6EgILgP%0~IlCK57_g}VGQ(@{pA$4Xj4k$Uw8>r1k=8^jypYWX>O!^m9hJHeY z)$Ny1*(JAb8_)F$A)5nhl76~9)gkXJBYZ`+1Uh&;2n zfh$Ly->rR<8Tnc3?9AL!5^$b+t`o1yXjjp@z8~T1+@mQ++J^vaO?f1u5E3Dd9M^1d z%u7plx!>Q4uQ)B8;f|pOry;1mKMXzRyUbbN);BA^g56IuwT*ukKC6Cr-|v&$ag(OL z;P(e!o44P9p!cg#$0*^r|J z10sgb9Y9J|WnuumyR&uktSvCfltw?1A}mH~7rcs~byIr_ z?9S-)0kXMxJl~K_&VSMgTKhT>1Dh@u`&TEGsK_gzu%UOtrS(3Lxg;@rt3xC^HagOQ zx#i56l!SflM=r1y>OV>5?pKVvpAPtI^Gbn9HpWF*{@Mj0S zMRrE(nI-fXNa=sZ~Q96!itjCwvGmwh@f=_f50PuMt>Ut$_mj}Z3uJ$TtW-vp!to|H*dApl!aFa z$10jM#EE(Cmau33D8S=ic%$1U!fX=$b20 zGrcvj$W@L8=VEUV&d^1r={x;KLZAIC(PMulsNUC&0lE0zBQ#sVaeG0H9h?831wqK- ziQ1D*EQVF`)Pv<^4{vOiKND|KA`Qo}srpMwwI%XgL#p>>z|hnh=;uizsr=%kQG00S z?^uAhA&w}K5=i)aJO3qi;qE?f)ixb6-r$%1EZXo^)3BCS+kbdRX*uOIM`0u~I13%R zn8L%u^Uxw%#g6JhrQG7-MPq@gNC%)2n%>)y1h_}vosw>2Ya}^kZvcWAGTC5#9d=bc z%vbvo6Vu+fPJ9W9kScV=QU=HI-9Fg)n5w)0iBdV4>Yfg2-5%_ zu)C}B$7J}{j|SNLqucOyj@fGrUKbtiJaoW9JcpsfAVO+9vVfv^(<|+tBH{;TbCCVb z;NkO3haK$r>LwmZS3yGr>H<3*|0+>a8+=7Q6Rg^3>ja>(4W?g_af$IEYGm+u@7z2O z-17p%20(P0Xqkh~80J`X1J<^X*}7i0CEq_zb+r1_xOuC(Mtuthg(0kw8p;~Lfo6Xt ztj6mRl)BnhV6d^=;zR9`d9M-7!J_L3`(b50_}Sw3oz~a?FM;aC!3o5lRk8ib!ro5t z#Cxylte*o_zXI$hrrb(;iOtUXg*{wmX0QI~w3qzBe0qgr?$Djs{S4uz9uP3~10to~ z1l_CLOP5|7n3X*}X&w7VnF6RW=1ldG<2fqI*_k28l$1Ie%Lne7W~H4~Q0rlhpC19z zX!0JnhjXAc4TL0Mm#rs)DGr3| znLND8`EAn=!ydm8{2$viX=);){!-qd8kwtfJJ6l0 zh>WnPDU$0r>efA_TI9c523V!@<0&O(?;LJM{eUqadFSmUA0t%Gkhi~>&IBs%LcQ*w zXbK=8TzUK4@(VK)zgMgAoA31H|{{kkw%{e6I2^-lhAFz)s6gNlS7S5JTPT{^b? zrhNQ1_~nS7I!#jGa_5!CA9h1zc+np{#z4)2s@YVb>hY1w??WGWs+5rBzYU)4M3x?{ zFv>eL3%IQShEC|-zI&=hpRT2-wh}ggwc5zhhHwWJzE{5SBUSU%aWqyKK==gw2jLSL z@h`%saUbFH&w#Tp_fp?s4NKbe_L-Jhv^w%F@?UaKc17T~y69a6D&ijdfd6QPGiC;i zJ?DxUni;!&N)H3B1`qST>GUf_$e0{^RCN1Wa*Fz+(h8wQgxc2%%j?`r0C{4K>`v2s zRAyfSL3TMye}BP7->JlhEKk4c_y7Qv?z>ygShWGDF0&>9fy$m!66q=ZE*G-82E*;= z-7LBy^;I@+z-zLI1R&KJQX5;nE;bMpNsN`(T(f`tzPL`zK1_o|mim57*+wKQm$QGd0tqHwr6F}4x9Prm2y@G@0cv@*$KT^H@fri& z%A@O+e=@bn9a;eA$#qjtzw|TB6QytpdJ1&7F0$^IN-+K02?4+xx1~y|Z6GY3vPP`H zDhU@&wIdagL9|59G&`n!h9=Sf~y&^FCGI~n)QPu1oID4A`~Evq|lRT zjtC*8G@#sb-?g~oMPCrW`+0|V$v*A44^AYFp0;sd2lzv-n2-qCnW8*4t09;OTnKp8 zxPQgwimrTqh&ez&qH#j{a^lpgbUV3$OhCm0n(?h;MXqDOA69WeAB=VKH)=s@fBr*F zBB4WoC}k*B($W{uoriE3eyv=7v6c1W#z=UdqMF0WqR$<@3dINBt7UGb(3~ehfsuWk z&QE|dKEfN$VxgV!5PmFrq}6oicfo-$q`B!MEfqG$q@SaAIi;pw*`yRNL*4xPmF4#_ zuV(Y}k6zg8T{5pr3^ecj=YLYdWOw+KFp*%HuiK=h=QK;lJtIhSG0*o7-^!4|9L00g z`eY1UN{jrwFX)_62Fm|Y4*;K8gTn!eC*_nKf8OVNw~~xrU0x5#S@OFOyr2M+7YiM~ z`bbsjlzt@__0tdAez@YM*6w-(WZhiP_dAikG4zx`XCXqRX{KDL4SWYkSJt+{C8X_Y z)0vTT@$w~XOevOyZQlUokgyIXwadrjIKWL+Msx$_uBOFQ@Sy<1hbt-vj8ws?vn7C; z_G1;X_!UI|v#aeJ?~D<@e@K9|X8E7xOt>d#zDPe){rqYBpkv1FZJEEZs?o!(rg?6C zN(IOYz*B=U|BATqLU&hrHLt0|0b`m3t}EJ9UUa)G`oO!Bq1cDGI%kWOK?90gtH7NT zugAPp4j(>DizR3x3YzfNU3n2;^Z~9|K2DeAhT3yL?!Yzl*QuewmaBxDT>)IKfRjMf z@9fTt8~bZlG1Q*GTVQeGL^h9DhZObEs!w#aB@HnYvY_y_;QZ}p1zSgg18!9Xfx}fi ze!LykDr5%oWvPPBBS5~seuTAf%J~|DH6nSAwd15z8cyr8o8jzFzow}VAHk#5kuC1^ zfj>_p&)yT*LDuGu#&iZq&uBNuzpUh(sg?nx6Y9ha(vyW*W15#7QHLeHN|kg3$c)Ty zml|)r?|Ec5H3pzD{}MB4B3Yl>zT32DZkro3vptewW!;$&@|z#-(Zxc~8C1tyZimxa zCQ^2e?jpQSH8MHABP_u}#K46ImIn(e;11RrGkYyo8GI}nF$RqM?b{-9A6vA)7VO^k ziU>h}H{fb_(2)2`R?WxdVzg-@2cFAUwX;$<5CL-i8nXXndxX*Ske#coS6k6Oi-Bv6 zGYXUqS-Q3-qTM5GH(uwvDJLhFfO=Y7@Npy%9O<(aPp`~sY2vb#H5(_PQ6 z7a56fa>zAK=(3+Z&;h`_7i@d6s@x;}6E|bCD*Vn+8b9Q+zlQ5+d_{dfuBnD`w*BiywcP9##RY z>_r|$Q$4;d?s`-kGI-&imnl`sDR@V{xT%@7V|sT+?-@6ZYZmW530*yfnJESTBHN4# z?jb4wM{?_z;Y%YKnIcN?N0M&xPezV;U?M9m-S3fCR~56YP_MhL2OEJ-LE5W9J|8p45@F|Mo1_>6-ZR8fUhJX;%7JaBe?a!aId42Vfiq|< zOUK_C-$14ym365^^kH5n9_Xd|z+4rX!eZZEFw;TlQ!t)J9-nAjYWV;8!Ni{bJ&>@; zWc_^c_W*KX+Ja#)o~9U&vb@oiJbn+Wp3di$9mEd-Mwq|(K@G-2<3CFJX)CzggV54+Z^_PA|DqL z(5cD)jOCHKKR9OT<~PZV@qk;qx%6YZitIyZNAm!v4|m_lYc4BwHIuWV<+Wi$0%{$j zl(eG1mJfI$phg%>1?qM{+)cyAv9l;S-KB{aUxX1dW1?)3apv{CB+ z;y{4|sX+yx3VdzMLkvv9U7e!N0W4BcxW20k(IiS;_3552LE7;tKfoK^pstiR*f~Fa zeUeM1KN5@%cnf%C{EXL44g&4nd>E-oejFYK!~n$);4|WaD>>!x8OQiE{_T###lh7= z_GZs6=%;z@_#cy*_0T?A0^=Wjcy|oxpB@gaZU1%y)SAx4QoOYgG$mhWuADwl5wAVV zOrO5>BSyLtCa;!qQ_}66{r@w8l?U4=hF(*>oFpTviMz`|H`n|AYTufi3&I ztjw5AkeHE^1{nj6^X&!2bC7@VG$Pd^5m^PLB@fZ>K-CbIr_k@5fG==TrGU;R$MAsp zl>CAxm8yl25sBb;xBVz=EHvq-zOOCJ*vwt%u ztOqHfdb|(A;yd&8XWutDKm9YOrPHQN@IYu|qd+`-p_1lFM< z|EzDS0}JO{i#%JocDCU30J9h%lDX4AAq+4yruHq6ti{WMdXvVJ3h2Hv!I)VsbgTjR z8ttc8WB(f1QpW7LR7RKfv`)w#83Ug)Is49Iq(&3EEv$77j|@Grx4;0H?4qg6vYq{;F5xOxwgF=73J?s9U#1VR% zoPP&N_F$0Giz>4$h^_IQwDt35A=2#TK$qQaW~a1A2W#=Em)X=k1>?|~D^+~uML-tD z^!g)Pb-DrbLJqn=!=yR0o3hLM=Wb@21KZd?Sf8pdI?qy^Pc6~o4*CLU((ozsjxUMK zkWxLZ1?o~?!-E-eLFObAR*uQ4=D5WH?`}}!;8reVW7PeMlI}iD5f(4Q-+z^@5fVem zDs0%EJThhgNClmKno}h=^7E;H*^cf=jdS4Nt2i)gN}B>~az2}D^EW8Tq!4SBql^kW z)PQaHRwN6>hklF1xUsA}Z7vLWo7m(n57jFVV9oyytN6hsEL~@F6`<>rvl|cf2%}I# zUbnvykmgkpUA%Q;KJiQ=@=6vWIt9@~+G?R^vhs@{l~w*VfIj^9nc_lO*|3VA?gq$K zVTjuoaKbM4fzdb-3{MBtkzo-tC0-)>2N6JRc@i+&`RA0y2>+gI6io62Z2ZWWclJyeOVX%jn`jF%4{Uv3>o z*y`5{n(9HKbYy5qbmt|rGUc|=dn0=q87n%Yq~&rtz}nf+-EN=bGk)=)ra-&`;p}CZE=ggv65!xn)kMS>CJ9zoYtkR^0 zdUH&uk4%;0^+h#L*f2Es83%ba*z=ck)*J_An{yJFQPa*y5M-O&vWh6#mJ?b#kz;Zy3Vs4=8XePAQ*259;<>Ot|%vjh-ft!yRY%;G^uM`MMITQ zdFPlHDE7o;)8-G<5vN$8A8OLps|BpE9#Rm>E>LXcyVdemD)*lmk-U@y99JIQtmC#& z2flo+d-g7#X@-_x+XU#yN8OV%_u<9-{Zc?p$TV_@R9KgIE2{V-hw9AyfnY>CV`=Pd zh+TxsUqrchS%eC2?@q$Gy*{9Q3# zn~x}?EU@?`-NX#)bD7GqmUr=YVgRNl*T4kl;*#L{J{U~QEu+@9S({Z}&Pga107x)Jor=z#BNtpI`ldJ;& zxEYPB<^IS)UcA3cC7J>o1J7PsS5j;)x)zjvp^`^4Z@${>Bun&p1s$-F~rTSU>uXUgR(hHoVV@m)!;7JMD z3xIN>fhHl#DHeA*25~w>q50XOds+N7A639sX+Fe{wo;}3_Oi?&Fb&_kHlVehzHalE zJ7l%bfSP`pJo_@&h>Xo})b#TNIWLMBS_VLne{kS{G9?eNwRE8Gq=D4?DBfJ73No^a zhUSNEy?0W>JgHm8c960*^)kVoDLI*Qkg13^F6v%Ac5Yqa)~ZG1ro0L8?i=o?%jtqh zH9+j??#l4SWMu@)iPC7VZcKuDMjgIFoA@G*RP-?`zk4<+iN1Gt+hUF=!afX^SNT(A z8iAJv^MvwMN}Sy3foO7~l$V#RLe*RG{xG`JS65WTcY3YA$!*5rcW-eJo~B8j^MBL6 zNmcL0fN`%#QzD_$%qx8kmQxuXVrd~WDo(fK7j#u_)zh^RL0HkuQS|PV$jRX4vjue3 zHQSjhg6}qL{VjC`Ks$36^k#YH@zWj%zh|qU$~C83wVh~!lW)usjlGlI3tRSZ2%LtLm2)&^1AJ_KxZda_Y1yM9kE`aFh@v4@aW+=E)DU=rpC)yA(OfPp@UT1?TUu zXp9FuTLp5xsvFbX&Wk9~A&xP`5I+v%j1O;EYSJLyK5#JhHiw*P+7zmF6!Ajqk6`QC z%#s5wf1HQU_-y=KemWB}r}Z2W0Z|{^ z|0G>c!MOc|(*kJ$NFzyNLY}RD%Agxsaj5az->-^=0H$X=9`CLYY%BDg!^5VDcff0^ zxn>W*5z+zqDLXWEhaOw=6}c0v2CO9vwBp$8z)jqp4~x#U?5!JJ2VfNgc=4Z@J^|R; z&C}@OOUF*GfBp*#J2vyygI1)1kp*dVJetz$1e@fQvQX>Pa&!!yqiQrXt6XrLinD=Q04c1!_K}KRLJ3#zRm2 zc6qtE)hIS(HhX9u5b#vj0=8t_1uUhf;)$oeWdqE3w-HT1?YdNl1whlSU-KVYE(5JtT5%z`zD+&wM{kWm z=cdUVXnY!yaO`ttF6>Pz+E_y|MM{5p7LVMFTI1E7COXo4SWRYV=EtdLgMTtRsta5y z5bLz6YAithYJ2%9BY1O*(*ifms-o}NZW*#(?+|Knx89g)b5dh(l;~W|!HX3m-qN#&q!e=|@g=aCcdR?OT^!Fv)`?$fsIpLK4zwvsJ#?>I^i^m{ znsMCv&bqkMTGQh7*CsN7bk&k&gl8QqA$GurH)RCxoP4rbAMl#$Cds`%2f<|RgmvR{ zR779UEBVDD!5pj_>XAQQ*!gSxLkbZez$Q!BttO+@0ru5B* z3#ycu@+W&JX`i<3)EeL-XZ37+@ylz|Ufcx*vkNi%Fgcp#Q>@t6X;@4Dc^1uGzB8)U z;o~CZIyJAQX!3`2nECNi%=2#4m5DlQ=8_d2+Qw|qWZ-XZodcls-z2n+23fpWx~HX| z!#%3#o@mo`jJwb;V$}=C7x+*cxFky+9X`FHfpYx)*`E{McC;RQW*shY6j+%@4^Qb9l_E%k zU77^<-ad+KKLpI2<%HS(x(Rl5Uc$uSe$ByKC|T9wkMfX)OYCp}GL?CU@@c@g3*4_> zVi15Fl97Y{lmB*v*`-DQ(l=tK#<+?v>$^sFvuA^k04(r#7azHLGoYQ9X!Qtt&~vU8 zsk;?43+v?k8h5O{2hdsp&G-f|h>0zE{UjuOE?#>DZs_s*cwU?78(;->eMV4tWk~$~ za74nyc-kd~2i|YEBjzXeTcJDPRZ(68iSE&6XJFXX2)Zloyu$j_@1qkjYn+)iKJjEC(kH20i%twJ=!A;-LdwVMPI@i zkbYVq0=YdAIZ!YAIo8j-Yi4do1=Q8kNEfw-NF7Hwr>zs8Xq7V7L7Ymd8kZ*!`Vup# zXid6IV-m}D2UK#Bcu{-1V?{;dXJ~M<+#+Ua$XQt3rfIaWb}RMf-j4X3H%45&UH>s} zDW?rSn6iMZ>C{k9(tti<%ArdHE%|9}alD@xEGQEJ>5->oOR77qLg1OxQ2`tNd-Qs@ z6s77M7;Oi}6q886C9y+Ie&~NHp!1xEv+n5v5;$b=C8*UxVJ6-x``IAem}p9I#n2rF zWp%q)n@r$VG`iAJ^97J69+5yR`dvqBUw$LE~~+@XGF41kl-lQe>VF z!L(uT1!z}HAc&z=u;1OY&S~dLmVs_NE&jR&Utiz)Y`cOG)aoG>uiOVlJNJs`iq6K) zLH%ZgW0rF6hTRC=Mn6J#CCv~jZN1Yh@;(83R4OO?NQJ=7Ch9MjANpx8kj}zDi4(|M z1R{GghCH_M{IsKfojK;PD^B>|xodp)?o|T=u9&1lS~sg{Kn{N-wGtMz5<<)1$M)>e zQTNu7V!hkJ_JDUul!OMn&0nZwOUgoCS^z-f%`ZZ}W}`~!@SES_=rIfBxD?qSBv!ac1p%*#%Qo zLreC-k2rLur089b)CWad>TjLh?L4Lkz*3!t93PEgGXLD0kLc(=?Z*E~TP;A`57nH; z(tew3Hm%buVPcPCbKTyi<5mG-vRO!`IYv9{_J!#=BfV*L?$F?gkihV99v0QVsvUlXsXLIbB4#BfzR4WVcAnItuh;CNX+xyJubxcJ;WH zOn7UH7cw+GE_GMX@#s&1AxV6Ha%duB&h~Pq$;&N0Oy}orq2PCu(ySDSfT5!AnthmX zC;zYpALN+z-jgGeeuNFKFk-+iufpg(K>NkS%BV9i=b#vZ0*NopeIYnUkYWO#v_^~@qt5+^p>E4Vy|}9Q zrq3F0r`gM#Zp_DYhrHT&@k(lPVB%{PP1gH?dMEUYJ|!{v!D2{HCtSbCDnrD^+H|`1 z+_dgdG5aQkdcc%)v%_44IgEk?x7Ek-(w*NOu*(-R)0!OAcqY?Ls{e@&g@z3wt#8i^ zqnjmAxf5tu&Et&);)09P!ptf)nZTsmK&|9MvC}b`XxX^z$M!CLmZ``N09t&GVUxxK z|3d-Zj>J$U%02$bW5~^qC^U@qLSF%QVKB{%{gt@GJHPBJdqp#sQBZ|)(%Pi*HDJQM zYg9c{Nij=Sm=R?_eO%I1K8hZETNuh<)L(PK^mkSOo$TSA^4J2lb!eV`q1-r3aOBE_ z?#=u7%YbQfaYld>&@Y>iUr^nmZL>UP?cEp@qOai@E7)m-sQ-DFh%dX zcHohL)mhw`{I@H0K^l}fmEalXv2Wuh?Sm_s-It&;DWr%Nrm`9T=i4T#SJ26wIK&Ym zc|dTEiM4u8(pnR1J=&$Haq=B@Oc=4Mp|ahcR82SH788sxx4!N);X)OM^^n!wYcB76 z0biI^S;89J?h1KpH--4Fcx<{h5h~ZGfzqMc)a`U!AF4^b zH1>XXvxL;y-6rDvj(%ILf^x}Xl)Az%MN{{?Xx=1(YqfNZbw z=&swFrlcJpy89JBdKvD(z>wETp))AWi#a2AvL*vOg#E-g;x!v3s0m5p1JoD}%r&U| zkB&Yk)u1=qOtFp~Ri?FPLw2)h^*BO4oL@9U9Xd2>(}$8@aEKU%6d`K(&O3SypLUke zBAML!hTJz(cIz$>9@fwd-{`u3!~uD9(R*bBg&2ZRGXNHVUt5>B;H%a2Z1Jd~5LeOo z&i#Q6(HmE02*`7dG2u5#tT#U7#`XkDE19Wk&bG0fKmDUmL#+v~IBDPM@lWChbc?_k z4j+9E=__YVZnnVJ*o&$0eog8ZDt=lyXF#{t-RZQ)-No>;g$T86(ly-1gCr*qNodF0 zXu1nhb8K_1rbQ$=*zT;usV#rVMdS;1WcGsX&G_+0@eu+ZHe;aiJJ_w^qRtp&>N}u6 z9IB0%|Eh*=jLsbD%@pZPM$R1lk1qlD8DfrYdWJ?L_eM<*^PJrpZo`wob#cDMB!KGwj;*d^nQ|WwN>q@0BZY@&&}hj zLOw zWeBr$1}NeH9p~gqOX9+zH4&ReAOK2MK`J$+O{|CV5(ln(?Mpwvt1D}o4WVjvbv~9= z?TDP(biSd@W<`GpUJYFv?hLFI7Pa%JY5xRi00;$_jPa}czj^;Lmu1%8JKg(D^Cx)} z*%{k4Zyn99#bn9LcebOOHLKQSV~?7RoqoI;`r6Sds)i7+!4~S%6ul z^N-r$CHjb*lt5t=MU^bF?3D+m6k%6_Q34ze%1NC#N}T_8vr3D#1znMJ67}WA@u0syws52z&EEYUZAN0y_D`8t*bWqs=mk5gQeX*iq^Ub&LD#7!Y z(!f4#>};;JVBL(F053yzYx44$Im>xlV%ZT)(PbM_lT*N%$DWuU8GDrYP`^qaf>eQP z_3<{4YR&{Pah%+izFVg*{cZv^Y(WtfzjFIioP6*s57I;-f#e}Ozg*RIquZeRq;3I0 z|9J+Jno>c3b{D+y9vd&WQ z{7f)h8DuB7O{kf6TTadg<<~edr{!6Je!_!TNq9XJb8=wT2P@te`Dj4=)BVY90$y@< zE5wKOyXx!tmGNmiA|w9nkSTt|xZz*9q|-MjiUHu8{o7GOOv~wmIg=;QKjNL{E7b&D z@iOW_0HOIvhKZq0*Q{ZwN9@&-rv_GG7#047Idg5?_y71XsHlFKNLQ_GZo zYlN(Kms+=Rp@vSH{mAgdNw2T0{CpyeBc)}cOj_Yb)7+%pa2J?(qGjAl-2{RjAF00F z6(=K3LWi--2sF`+$lI9G1K2$>+PI$SI%-5AdNy?6J~t_vvsNXn$Hg{=4FCxG7t;x_ z=sM-7y{^eh5K@qKjFy%(HDLn2B~SuaTCQKt@eJU^15MQGPPRG-jFif-+!zXo?lY3R z?s-dO>x4k)FuM#Rlf-#>=`&0gWo1t3(xQhvh&TG98#%R_>$6!LbHcCKH0W>_5AhjG z@YRD6*BMpp>e)qbH2KMMLqn{-g_H zqk^qZ#Gk9#sEnAPF6v-U)r)zB*eH+i+cK(vs$_5guer-jin|eY<)MEYOgT}B+*j|J zh4BEYrQMI*Xs%ZWPkbZua5u@9 zURQ{AwwcK=31FIo(_0vS6lCv)@r!M=z8zHi@G-$QLhalkV>_VKJk+NfFK2*z z5o@ay(!mlgqsC^)jtdn)9aCzveyFq=)WX!{w^+e}>m%#U&JIgoj!Jg2$w{ataWtuf z@G*95l{-aAvk{fgOP5J0`{MeO!)=F5`fjANp7HEy#N+%&hL#0(+?&vBU^joqdgnF< zVs&HPaE6}g^z&H@!^2EGQLvR<6fv6KFKN~lv|@i)Y@>!)aDl1yFqZOAUruH>D+tc@ zJuB3C1mL1ayV=~Tmyj;A$IG-PQ8js*m~;dHUQsY(E+$&w#*mzDyeN5y5nt9?kBalx z8UerVl!><*f!`O+HzWdG12UU_df4X(hn0rgnhfPHjHcAs>5*dCp0tGsxJJiLj*rBK zCwthP$e>W<&K;6reb3Y-FXI?fiUEa%PF(MTSL8pPywe39FBHK+42ao|*ZtY1qdB`` zuf7W>yYrx`$l~Kf8R_dbXb$^YCos89UV5F~)xY@EbVO<(G?A}^q}3dCrj&&(9`YKZ zDg$%zuaABfE7KHp=_F8{)q1yJamW^?Fdy4c^rBCHYSVCL0J*%OJKZWhAkRJR!y{=f z{j2#ri@oJDyNy%QV|C$HU!^685W?v0LQ=ywPM&l=Ap@tJfM`h0dFj)b`YyRriHSP; zf{=nJX%;q|x`3#o%4THvSWk@F#5kU14~X43W^9pT%xWldbZ99n4o4P8lT8&>4msKD zy{UgjD!<5)o)#nIy(^Bj|2eRNb2mC7`dEc{Xe7h|+qSNmz1$lggRBMRY!g1yw9?1CE2Vy(@5Lni9Mbdf0Z`A!<9?(a` zs-1|ZFD3PE_-%M;K(Bd2_qJ=7b6zMcoS_F&D;QC%hf?;?QOht-JMY^~RaB--W`BBb@GmL?rp#%Om$>f6e zuwkyM84AkolpYHjczi56SpPWsk%df=rX9=EEL(>=;fRx3(oAehJuz9#l+s1hrCrtMkFW_SX4_FMv+rccir3LIU!_mOmnFl4wzFwQx@d!>AhoD zTB45Z5lVz+r@je)3iVdOIuX}6kXuYha_&7xi;M49wBmAt=XBM-Ej9sx+~TudR$i&~ zX(~p4dy~3kKN`4nW?htv#z-S-{O$yfiK%`9CI3^?qXfU7B78j0x6;lyK=o|E*(wOTa!6={O&0e?jIxJHvx6GOrvcet@#9JD2k_iC zH}DhB9Er_W#d?P7;y*QvRz~Rs)N=SGR+9Hd0UW}Br+nYepMcKjMU(KJlKWF~gI zHxtEC$3guekzG?4a~N~G$md;;AZv{ajlZWhYg!tw}X#b*QtT#-|eVm6@>{1XPeKqcSoECMZE(sDA1~Q+3!t3?z zyX(u=B~uOL%>14m>yr<=Dy61UJEgAMuR*p}vc0mu#cO$jCZ-a(HBqL366!F-?+~dd z#j!IzMS1WkpaDR&6D^GHU%&Gb%J>3acefAsGUeL+t>0ZyT3wh76p8*h_#TYe^Q}IO^j>@^#tn8)i@eI;)Soq$3t#aDhjyuoDf{f=vj)re~A!ftRF9MaA9?+ZGY+htim(9iyb#8Ebw5w;%J%p1T?x z@1-ql@wH5{MH;JRQ=#$|e~&F!DFUnkLfwq5Q@lFQxWlIyU8b)Ebp2aaxrhncjq|d9 zoFnx7l5t?V?75w`TarL0U%R$InSz)sE#jt3Pa zd76sMxgFLd`d#CR&x8@=oTAF6MX~Nj?CYktpaM>Zawg*i}yqEGed#J{**G}s6`5nR1j5KW1nt#9W_ssBilqbl$J?x*u=1R0YT$vSVD-{V*HDoa01V3Dp;x#!vZ`6;~%V;|)L z)us{`6Vs#IpW9gS)hqM*{+61H*HZv)tBCL2*E2Y)xFM-x8+-e4PA{#C)f{#Ss(r)M z8Zzf$LhF&OE@>D})UV@=yf_<^$qKuYN2BKDcm{5JqeDw}uYTf!%H7gjlmP$M z({#Dr((_R#%4F=B28QXK&O(#>3UD9g7Bzx{xXKJenvZQ>>u2a|m3V$N9Fn7m)s%L6 z%KxNJAgJC?eZ>2gq&;vEJW-{RZEscPYQ2xkNZ&v``v+nrY=jAu`>JSXb4tPQ{7h|+ zPiQ@V$0>zU3?8!RWYiqc)K($lXv3o<8dgV+Im0c=WP~KlJp+qJkr9XMP8tn{5$1k; zD1OW>COX+0wcjTFTCsov!M%<@ru3rxvlDz#ysFfzuZP6l^LhOHC82&4VR=gomBQ+I5W zz!JIpbo%wHDL38`23`RNo4y-EZM$XT5G@aZ!})|Q&E{s|O|v>}bC_8lvM@}G4Wt7lv_cD8z6<8!QAIX_{+Mfmet z27IMu`qfC4n=r*foT@Ysap%|M`1{d)#xx=t&Z=IXs&(f6P9q;9t>GF$uCCVuvMCUV zu%p+Zr37I>r85KexN*ubOY!2e`lUQ7-_3l2qfxWZ58syKn~@GG z3Slh5>d|o({-^1wA9Hkwppgn0k3^+S$YZ+QF~u_kHmv_w3ZG*#fiQrdk0}igi0sqI zN+#LZU+dzJ6-H7yy)mCUIcz1n+NzF=?i%IvNnJA+aH+zKxs+&Zve&xJO$$YQ;g* z4OdJb{~{c&1ggO_F2*Z+^Z=Apxer5{gR#3*h`^_DO1@z}7 zBxWEva5T~1Rt&zUs{(k>FI7R=)4LkT9l>~GV}S7U$YlHeKqZGaHc=R9(XQb}It_|L z&Zz;c7H`-vW^tCf+kbg)#=o5T`pqf8F#1R$6qOk0sSzV9yVZY;CMO3}<=p=RioH-M z`elT8)?d^(i*9Uu%s!~^BLm4EAR6=>!R6Q!ImRcosneDSpVkSYI)w0n0|$Z@rbL5; zrgJe1dBpre)lmOM4(;Sv$AO}TTQ+|*K+m38ToMa-a%bl8h`t+*;2@lff4bHkaQOyT zE__Fpy+8*~>bF3U*52g_S4n0q%xP=jr?(TH?#<}C+s1~zX}@B#m7w@@Y)sd~!^3X4 zC62cvPV>Cr*@E}`A8!EyDD52}vIbPT$biJ1&?qCDNNYzOnIdty+xwImpxnCBeYRRQ ze5}i#8hDgZ<;V8q%HtWS`bPcNGOL5yRePx%_X@9(yc5pD9q5g#gPsspxjd~~f&t5r ziC^1tq+a@gkwvy=Un`i>NNXrL>V%&LNWSkT*TlECvR4d^s?!}n)oLd9_p1;O8}}7g zVhN^lPU|h3IR!v_Nty|}#%Ry6^x(;~UuiJ8k7zw6Su0%oejov6>BPU-SK>tNwMtrrftLj%{VaAdu-?F&djYd4K1dBUCG#>(SKdZ^5fT-fqoD! z4t`zF}~UX5&M29G<&;wF)Xn9!&Kwm&-Y)yOplhxya{~J z7gjE!`+>9u&zP~tgWn&Ub?Hej3Ga}#b^%Sl;}KlHxcTr_&0;5vegDG^^^HP%E+Qe5 z_gAM@guNv1d@3%^HLLb1gsi1+Dt<8Z7Eul)ri{Ws1Lq&2j7a5nPKR-s)`{(77G9Pj)&AR8^YbSa8R<_g< zi+4|Rj*%O?>SVxO5Sxqety5zbfrhKthIb<54Q$fV#zNomwH(eeyS=!!y2YS6%$nqd zmFI_de=MIsweUrnV6S?OUgFchViBAEz2k$s-~6fc35khj<06Al4e8Lim1{S|W16M} zmQ`)N({pYOK$&6KmYJ=9Ik9Y{bK51AAiq_rKW@kt;Kj~_wmvdU$aB@-{>MEis6nWk zh3YHzw#4-FbG1)PuaqkS?%ND}OuVZ&CIR9Zbt)CB$+JRsDl{ea{P3!^bpUc`ig(RN z>X37fh!qnshASFz)a~6&qnJTOj!f2iS?lo5HKO8|ik5a$#!=Ik9mA6F9C5772$5Ua zAHU4C#r@OF{C&j5oU=ItV7tXpJ(J+6$y)a!Y?s&ag(`j0*8wv*CX|PKRA@!Zz^r;{ zTWFk_<(8X$Ht8hcO?lB$?Xjk*sVY=A=|WYu*C>k6i~06@bLa+)sQ(0Jv0}-Kf(WR- zzKU^O`Tpx0M~mujU%q7%9JX> z{XEFtZWA$3K>H4NYu`-TchbJQx;m(z@-*7pQFNyT`SI zWWoB~f3yv47{e8sBxc4LjwqEJoI-mc%XEGHfgqi3MQIpbZoEY{5*g>de(-}jyp-e#U5@c!$77}UqGgq#O@quPeDmpRq2jh#(uzKfD1J$Vwzt_O|NNm zLq0yK4tnA*;+Q0Yam=Cuo|1@o=*lw~!phe@Vjo1;KI?7Sx}lpKdMq+tI#n*$KJ+Nd z{^k`}*a6K_v8r$<+JAgAS4e{iF>^^}8;X0IXsSm>9~p4WE|VQzZG-UHE*aDjP`VcK z5zoNyN=l*1_fpQV_0Tw$!2R+HZgzIb`wt&K%JcQ`s5=;vrZY~2?z{-}*{Gclz(hkp z0wPF$XpZ-Mpt?79d@|nq1Jb|sj++Y$d}pVUl8Wa>)X(&^w328uztF~Lh9nkZl8iBS z=`%O%v`qzZYr*g0fR_C&e_DFl8+<>eB3ZjyDp2bg-f*q6@7x)x)WLvQJV7_Q%*g+d zkg>7Ab7z{e%SioX;3!(ct~3w(2e+T&zS{rC2&5ze%yE?zXF6#houqP#{OpH2gTd`q z`coQD_teGH90iXL*&UM#kZ6`GmNJh%oXH3^HghV5PZaIR`xoVpG+tAF22SDP%FCQ@ z6WfwtaVbz(hFG_3#3)bK%rf@}*|)A02Ly@j4}tp9Ku6S2kRV zH8M7_F%ii|+twIgVpm3Gvl=fSsTn8iX65{9V9SO^Ou4C*$l&BmW1Ab>4vkf2Fr|aP zX+CY)iKXiHHya0J6U(O1jUE9&(4vz}hfj~Q zZk$QnL6|<>V?auOB~{NLktQ%bT9*vOI)G^;*@Iv=P$J?)U?t(T*rJ|OdAFwSWiLfO%czeb&nKue| z3WFNhzm?+FvB3A|88`23h((VjeS2J?v?wmp$9mN)(d807Vnch^NmM~vVbO~F!y+0P zY&#jU&>|l}C^oaAk+T2v$rGRceGn*SIXl8QDu4Bpc>kVydy>Yx7lKTJ|3nQKY;e3C z`l7gM-psjx9W~63F_M4?6=b!lGTA#-r%dH!sATR&u z;f}m~RELT;UP7D@daWy{4j6hZQsqpHMuRHM9s8?&QKXxihgFLjOqUN^<|}T#9voZF zI(=V5wD=2SnDtRDvouj)eJc-E4arL!9n7KKSBPFX#LB#!=sFfuAYIkj=rN~>jooEG zW)5?sx9d{;Mhm}YE43!XNRZ&GJ>OtPh=~xC8fLno+Ul;%S^A5+z2_78>oIVB!f5C| zjPTFGH~BqEt|%L)Q10q@h3e-1;oW=?GFwxjbAkm-8*uH?C4i0mw(5rvH?x}VmzzxZ6pfQtd0K8D`(;Hr&hbQ4rI#MTdCscgt&S#%?!z36_33A(Yo zZsb0W&?q^{6i%@BM~h(>+zw-`b^-G(&7kIW(H@<5@k%R->l>FD>E=m{1F!e~ef^%g zNh_-bL6ek8G_i6;IKt%;o^E*3YZa);ogTJxje-HKEOT{AUQ(SlS4A$|$Tz%j#X;lM zyZl%5Ct33qBsN8nRvw8*>tYKhs-LbpFcOcQlEmNdF)EZBjAe<5cOo9^txuwV5%xTa zIaN-1wr~4@A+~;0ELG~=r|yNqY^GC@Cuz;59(g!H9c@p5Tf0#qYW2f7BKLJ=@V5Sj zbeA_n2%N6#6PO*w;k^%g98-H#tH+2W=?{JQ`p!_ei9?xy%agv>Mb5JH`#k>u9r%rM z;wwWvO+FB@?9r967N}Io1V0EAq0PFl+>zx&9Ss<<2RqO+6|SjX!5$`<_9Bl>%q?D{2Wmg<*(4u716xO6km$r=dU3W1o1DQsPl&4f&btpDrPeD$tRCU~Qb?V>(-}U*t7@&WD`XUqY4(U8-2DYRHB~0h`Sdw*Bylb zz>)1_bgDM4jSqRE5jP)zaFRM(l7@*I9Kl}gwH%|FggxE$Lku%DuveP!S25W8$_G{) zh6Mhw<~p5!?hfYqSuz$!7|}SDeKJnm=!flyaD*FC6vvrBT=EThm_Ny)mU3CX8%NbU z!eiHZo|IIV&6vC@nrwMGLv3S_=@VT6y!#|SOInA1D$fETZUWPxi~!Y*O);iN)YO*G z1qF1nav}Eg$^J};L^szT=ctX*ObRr83Z&m9f=9XahsGZoRIX@PF4gC^V#I|DVFD)s zc)7}hNx!usSUaLwZ4YBEemIoh2Qtk+sU!kX zl@^gseCFri(`bLHejupjn_yqtIZ_ZNfWCh^YgitW$jb$ruuGZsOWB}{Y+x(xK`jgr zH=eZrs?#V?g$(`U&9|EK?YYE_l3-GhUul+&&WN*eL#}PsQ?EX4`0y8ZL^=yMx@6(W zTF_OpaX=iCoWkw{D72k_l~W!$b>Lozq9MN3ob57?0>u(?cC`q&^;<1dNqft7Smmeu z3Kby|?qZ*(wmDJk_Z&al<9^j6uqw%omthh$6RkpTH$^Jmip7+Bh;?mq*XtDm@~nOG z#D8LKuTQ!N51%S#olpM^!AXj$c1C?0U9eJ+eGiXm)!rtHw_8Tw=(6Wi+D9#%+)&#D zc!MjK&#rg$V=7i1y-5vTlg6L*|ArpXc|GZlf?t`X7^NzyB+Uzzbx*bLEnllE`4 z-gqW~IY-M?C5w%&)USzs6>0BEzi31VGYt7w}qKZ+O63G-gWavd2G; zrPM;2UKV0Q!6*+!0E|nzDv=iv^B&lCcWoivee705F~?(zBFtovz*klOM)xNx^Tag1N4VeY+3 zK=4pxH{e6ee|lngwdBQB1OzG?EQf!GCiwaA|K|56GbtiYse_qP2;*D$r#{wCMfFwq8e14jIJ{73LsPvFJ3 z|J`@;jVO(3wlOIoKV6%l2mVAL%v?`Nr!v zLb*1=erWp8K!PVT`xq>(l)svd{E&Fs`Lmwfz&q?}Ruwm{5D4DHd7ba%XGV@bA^9GT zK97@Jp+Px-Xf4xJgPwdZ6eX>FrO{R4pp7xkmgAK)T1Li+29MSBlJl`1a(-1!`wGzC zB-KB>Xud_uxWW2qVRyY4r%;D%tx?)>a=Edpr+$s*g=>xW?ln( zAiDh`_VrL^889GzR)h6<=cN4X1?-a2<~gz1#vQ6gQ|^jiIUlkhydLG=e?CwZ`t0g79*`XlACNmOsjoglyxXbT-36jMrqk0k!8oX8 zH;GZ2&e1TyUU?1K*>Um~GCMUMdCzLIut8A3^<-m@P zPY-nO1}}EgruLvFYdD9y!d1i}CSMAk&+zA*(EW=?#B4K)*iSMAyDm?+TQTLWr_1~Z z82k*PmLb0E$Vl#y2FJ`FrmHTF%+8h~DxjuAlKT-goL`w0qSlnnPGSK#8;r}^T*TAm zv-HY9My{{N>e{=Te6Awvz~Ac-W@)=q8(u(YSq;Vqi3|su_;@he^+)t0(ltxM_S~ncS){j@S^wiX%BpIV?i15q0eFZNd z-aM+Y>LJ6POhPBf)g`l)`CG=OCE1{7s0#ea2|O7^E0uI1{Nb2PQ=dOqb#!&b({jW? z8}Vn0GH0?`08Qd16!4?1uZA5bapQWqABl=iPTw7#ot=FT_mF;o=U~BD50;>u3jO{V zr1u;sX{!;zX9C8e8qZD;6BgKE1%y4GiQ zRy`khIQRPmPr`RN5O<$tVwpg_gOkVBsILm1+NJ@!a)D?2S$bYxULBUl^M|bk{DJGr zCxhOt}+mB}#y$h&7*Y0HicvTyF~h#F8BC91K^-05wkMNd?Bs9JSRvwb0&`rucI`kEyf5mvl49fegIIL!Eg|D)mX@9GHyeT3 z*Sf%^jVv(0Z!ZG_11np;ewD{FhjZgPo75?gh}F|G46J@toD27X%JjOjIrrzK^5WtV zRnfUTDpQ}Qq%xmTWyUjnBm2#YX- zLDt!3uA^78z^1@iYX|jLOF5HX?uVo~H!9J}9@9DC?)u!`L73qTl@obl=mVoxfbWfX zJ-!YczB-naw85$|%n@v|$%jpff2TBUlh zVdiIZGP;X85_JM2Ki&eAFUE3{`Fy_f=?`gMv`L()Q@LR>niJwu?7d-O; zoYd|@&a%OT+hszX?hnCU%tq)*#$n_)JOECWKHH5}PV5O7aDO^6A~8GzP>(<*Q^wN^ zp~pK^jEk%X7J-89>H@NDAXQkov$Hc*)Wy_w8qag%I(l~P@2)~XGN>YTJT(FAQOP@W zm*ueF9ei`d%js8ClgzTClHyqc`jAxeAo58AbszUj`%ZcRXOnJ`2WO&(uz4FMUR3$H%LN~U|zVbHHRrlP^>fQq1tUqZqt3wOFY4u@An zMUim63|c8~+M@N=oHm&6PPawlH60+%a&$^`KOlb?a&gSZO2+7uZ&dZ}c95wnF*Y1b z2V_kr*<|{%Ip&ypXoG9d&uSbHSG}V&qQDfm?QRyDP}3w}s*a>6|GO>1T;@F_p`?5c2ie63G#W_85E)~rFcQ4_Q* zDG2v2bEZD>Fd? zH3A$2h~{I3x|W3y!v*1??n_d((&hrnjML?G{-qDhJQg?k-$X3Yhi0ugJkZwFy~aM= z0@w+47M(Xtyr!+&Vh$hhW+x=(m(aMc0#5#K(15oD;_Db`t~nQ*Slo39=2?M zK>2otQJier$k6eKc>>@Bk3ibV6jz>iiOzXyjh*j!G8Z1_srH-IbE5xC0)g8zHLzz_ zgYB*PU$Vd^{H!;l-v6`*!9cSY;nhiDG(Kx|ji@3cRJ?)avgjdVhFEiPJ2GY#uiJ_( z>7F6+8cfG?WBy6gTupJy7Cf|NV=XWy3hw0pdSOa)#rk))3w6)@Hry5i zK6H9Ml{D&--p%39)TOPtN-X8M%D0RgD|*5j+iN*Y8#?K7_YA!d+yeJr3fdvf#MzSU4_*;~i!eAN&7WtXvu z7Hz7FU2g z7z0MC5PN5&^Vu9cv0GDmwf!A&b__=Ew9d=%USpdfR{NbwKa9bY+B@xvvQ5?vQa!|N zF-`TNjs3v~gjBB{sHW2;`-nWAMzSP>52I0IALpOkDxa*Q^Q-Gg`z@QV{{Thxq$@?h z@*^eH3L=qzj%BDfjofc;FF&pJ+e0%z$GbXH;+Oj-qWyiNp3K)!Mg&(qzQK#ny&wN)-_^5AOLZWa7GuwzjqAV~QDZaN=NkrYVgvt5Z;PZbEH&AoStz>c4rTUqF<`e;AR zXol+fHTSOmAG$z2G2UK2M^~1Yj5m!P$%n+_oDO~|Wt0c$FiB`GIHqcj2%uMQ{%cwG zw-5ilUNyE3>HnT@2Ke;a?t;+yTH=RYS)N;U(dp+CdUH#`oe4O_VSA(*byzC=93O(t zsgj&?$}aDOJKuN}hI2pMR}P%{y#}Tue*9(T8~5-IjE8a`diO5K_@?ON()nYSW!$=s zw(kj_d#lZh&ppHs|xM|z8WzVN_V=qgZ>IB?-t_j-W z|NdAVHBrEG?()OuOaiWmulI00zib0>M6nkmj$e%UuK)d6?NzFuGS4qqGua;RhaymU z`uZ7o=DUG-_l0IncK`cxdkp57>IHLl59r(>t99Y|;ryBY+e z-#gCQpSa^j-!29Hw!tr@ZS2XvrcJFg(M;3Zo580fF8VXC@qD2Pf2VaSYZJM2)=bki z6miafZ#X|(5NnBvUT0z!xQ&?Q@QU!!f1ZVmr>;T6Xo7GzN8ch-&foR3{%if_Usle5 z80h&dw4b}<0w(cEUL|z}L(>mwfCxntS(!bptH|?le74_=$m#+R>WA+-l<{f+35Ofr zBIcKACap^ycsM6tu_+F>xX*V`B|kZ9A2wIYES;sfaJ`y`-EYXUz^az$yfj|$Q4M+k z_%c*@&tB#7M6*~AWbpk5{egw56UwEHjR2{uzY*uDs`0m+iaGf-RKq>Q*mU|RDek&x zlz23R14*kO8Qr3*gV$8P92;Wvn;H>`SCbAz%U0;_lD~F@|GsoP11H5C!$jX8<9+Qf z7Xj12b6aOJt55YxD)XVuLp;?1HlPMllJ1pyM%R$kiIFv1hYxRweE()0ej}`;PH$wM z%TfY;=d`OZ$b;!mt}PN-j&VH?)ri0rXtU{RplmhJJ1=O&>d76TUMsd1i^&5{)B-i# z4^Go=$5@`z?L148BI&*SP8w*ys6UyeZ*C(B@QB%yj)g^&k&8NGt~043XCXCk`6eOC z-AJG4YNu}DrP)N^O>}a{e%Ys=t3T(x7jbo`ZKK@Vc}*lkrzX@;wD0 zLa8osXKUkLNHIRsF1D90(rG*?B{HMf=#SQihG_X{amctnnJ2J!<%yTK5nHyCTi=mf zHzlrxx4tejtTGAb*`A)#DQi?)4fAq4oHQ!CTmxl5+E4S0msPS+z_j5y3|t^j^SIgp z^2CVnd`qL?+R<4Bjo!2^{XeV!`sl3c$D;9ne;2o7j5XhEz zPUM8cQy_~m;P#&~-GmKU1U@UVAxibY!^$Rp&2HNGE?1Uy3)Et>32Il@i!QsBFn6P4 zAc&z$db`}MIwSblFvtj>|K%>}WBY49N41z!sQyk9e8NHcfacZkB#%q>kXI{O_v%(l zbDECE80YpV8j*wgqF%i7{*d?IDVQRVKl<2;i3nW49nUdX-_NS=&8=lO|ItrBly z>e|hTF<6v=;a5N9rWnt7KRbIIGJ)hb&Rav;~M`aCYh4XG3k`# zMBG(TI~c^v11(eB*r?mCv5lh*RvJ4t^sngqaU*;;6L?)L##sw1Qsh;zoWAlmp-(D? z9@VT67Pm;x^UXRxm-LGGw+gM!`nl4pp$8JK^US>^+AYF%CMy~Z;;0bDtCr?aJ|Imr zYImQLbChfKGR3n!zT!z-?0#{z@ogr1W>u4-w+C z6J#s`t-_z5bB5~xDG}baCD!|H{!+Nd_$>$X&6CS~aochRF-{j&=jT=i=ou-)=RN z(CUFx=}nIc8tJ!Pv?br3heiK*Cmel^tQ@T=p>duQS`~yF#jy5qEGq_X zfc>E2VTo&8eS$XAxt1owYCcO&+}{EfIOAD9L5A{rf%L1;^k9w$3?5Tr*P~MF!I)SP1sq*NGU5Cu$;?m7u6}|=yZsVg%zuly?+{VeuCDC&(tx_ov>Nw0{TRhs<+wI_BHmE{ms z$7w?Z2mDXGBDiH763P?ll%~|NLneh)6;|TzBUd%9GOT^xDPEpQt#tCRd{>}e>WfsJ zGx_&k+om64J+|LQmKl*!Q>DR}yUB`^=ORMRO~zJen2D4C3{LO*tIIZ=4{Tj@3MnDpzMCVD-45$>p3*fv>#i9@3~xpKS}Ms05p&g#QB`Py z`UBbJB&%+)t&?+SFi#$7E3d3=@*z6<<&|x)&{0z{UoJxbJBd{XD`3uvNvD)jsF@gme`@E3K-LNL_#+JX(k&s*3*aPNV1^ z^1=)k-s5+nlW_l8U4N+@%B=j_I}aJ(`tqm483`WR0k9xJc#@-&S!t9ni^iDboCRos zbQz0UN4NM%Tdjc`Ni)aEt~VznX}c9qFCl$@6)X&l3vA?GakI61X;Pp-cl?p$v*1~r zO@c7LcA`RT^=tLqm2<1bDjAlT00+_}JcG&%s4bmOlU0!)ZK0zD(sde!tes z`YL@!Tgy1-UGZYE)a}$OSK5F+cY*Q-4N*&HvowG0>svKGZcAiOSPlgQF6x^a%lOGK zcKGaVF&V{J>*8%&J@4+^09iuC`?w;HSS#*6W~B=Y>H?#DR&s9Ml^@93mcEyCVX9k1 z4GyIu1l_^Fn>g^R{%qHZuB*Vuzpf{)0WqjyV$snpUxbROmfP~ut_ zoy*uvokN}5qI{W2lP(bAV?Gap7IpBnkn1M0dpDj2$1meW_nV@37Y`(J!!Jm@`~3x$ zu)e?&&vIA~g)LMd=Scr&zy%t(U2~u|L9bwtsT^Si#UX_t%lH|->3@C!PKcsrp;JIY z2$kX%;mn140N}`R9^Oz^)_&P+=znq!7%0XdVw2zRP8g;ctTxv{o?rETR05wiAOAP7 z2*p>VeS3bjleHKqV^jNQ@yulW(Qiirf@AdvF2o|>2n{CR%(VO~8pss4viR|L{O1}r zbH^F&8jB`MVMs|+t_jL|(2F41_$+(a`~keTw0-C=Mgy2Ci{CG*Q1;mHuFA&H zxJi9~E2AdMzJdi%;1ng*EBzEm9^ME7s?pMk<8oAmcOyEs5jxRfo~Q>lveg~F^aQtm z%lL{o3Yy?^f&hwehFs8gK}O)k1%w#90+`BAD=CA1Thn3b3nN|Z>Z5-XM;Vz7A<;}nV+)%qIuhMtB-V?P>P3?!USn*yn(Rb_`jhF?1bUup&9 z??KQii+~=mOKM~}tcf(koXU<}P`2pJQcxJa&IeCRp!6u9nD>zp5To1#?0MNQWhQfk zB1HwgDKAB0zt1nPh|mJL;wCz8K`#(9<8x~g;Xro0I7d_@y zr$npe#?-_;WbK&wwnJk5FPrIl1^<%Em_a}nF-VV7Ds`qsf-;ST%)&6qWya+x^3j15 zE_KD)Oko-^>IfdC?K8HlauU;M$LHMutRekVK1DE5D8zj?i7lC8__ zdDAvUN%Yf151J`?1GmuLcay$Hz&7pzdvu}-^T1ca0lAG{BFeF^2h-0JK~pgkgl3{i z^LHI_or_%|!PAKu8D)X$*fi^#DcyJz$M;W9pi=M2k;-uhH6CUIk%y=Me2zAt^KCT8yvvVr+?f8E#+slN-L%e0l6f{C63?^=ch$UkTR${KYy^*S$g zTQx-;0t_M&8#G~v9Xy0M60wNz!T>;COrcOaoY>|`FRLOj;mxR^Zxn%TVyHLbaZ2xA z3X=XVHq_!F;l@}adtg~7*;WM!F`*+FL<@BGm_vgARWN%`N73Kf%nM`=YhUuL99Rx= zOFvZXeg)Gtu8ORbDX2v7jbP~eFsXH1I}f=w&>7ot4@_+!t=*#vs||HhPuzGsLOCDm z{#(rg30n~8X;>x(x!<^7Qm-5vblhP&XN;EPJ@s4*wgW2lqWupM9yruBnEUAOUoWRH zvxzi)>h>qKJ4*ItX9opBjlbWe40gs1f;;b6V-M98$CDnXd6w5I7)dGEL@2BvX zSb(J;{EuEr6|YVH^uTPs^pvTyh2u1@<_E;)9zf-Rd{r!oo11FuhuVomTZL?8vPrbG z&9jl)i4ixI7aCZO8I8#@o9R5<;W0eU(_|dPcguZhl$nAfI#x{kVr>!i6N%K!wcPYQ zC?E8;*Q`52z1GrQhDnnn#eu=vl##cAX-GigdGqVbn>~APD$i1g<_CX??>&yi{BgU{ zv@)v?2jHm=U0zp)W`eJjMEQ>?qT?L|zveARSQt0NSqVfnh>#g^6CaA+54_XB5YmTQ zkR;qa7+yAR%uuMeZu9Bb4}bXpHz=?6lhkhe>n#| z5C#;!9MqUeXU95JX!O{W6O~Iyp1yT==Yup$7)v5l9^smDX-yCO2j+eA`Hoh)GX_q6 z^G9(Y7SLrYc`)_0?f93%JrXC}8B@71Dg;PsUFjd=(y&xZ zOiMtV@dlSR2qnKf_rHF<9*Zf8uYVbSIaEzlkAkwu5}sWd%7X#wOQLdE8+)q)X2lN? zUvd-u!P{<_%Sr&|X3`qEqcGq>6`oi)WsQlsYG2`kmSYk|+P6+fQ!2jj50sDGe}Bj{ z;`fVP$46Uy#nL`*k~=}Pi#8ki@S|mv=RikUXF*gc?LYi!gZV{OcIqT30!%jEG1Av{{m>i8Z0iIP{NDyoFtN2<#=h?Q4AnIFVB zEMOBX3si!LLv%{ixa%Z$DlOZ;2Ur@|v{K6X5ya{hMN$C%MhD!F=n79P8-)bh8)6A{ zw@#7M=FVNdo?k$>Qz96kq*AvM=n%D{_7O8 zOSVCxRmuiX^SebHpUg>;;HBiaYZ>qsk`oK`W! zJu-9fc8=xgUNK=bO)S8#mk{E_qP8gYnmfti>x<$~hkiXJ+PO%-gQD-g#LqQ#%Vr*A zO;98iww=7`C<5w#drX7aEBlW=KDPqww;7W=Qz5GgfydH4pn~2%D#4oW3xMJ8-(5=@Y;<)kPe^S{eT>6{V0h@w4ijqsABd0 zWY7CcdCw)|NLlx`9rK7lFtZ>ze8u=Ox1Q~vg8g@K%Y#|CqTjY>K#5HPOW?BCBGH$9 z8!gE=@y?pT=a^BPpHTr6R**i4Q!xTPL@8Dt4ANu@%2%eUr*t?0QHiAq>56xp z1<0L8;Q#F+z0<`oQ+guU;wF#x*IS8Hqw-*{drMA+uaMK7q$cSVB2`#ctfRF`2tP|GM&RZ z&Nkdz4SRM6)6ymBi}B34E+S@ElR*bH?w0Y(DH6e{7s0Em7s&4Nsce_+X|lENib@iW zB>j>5 z;51OgK|t{H-UV2EWA2V47s^l-S^0V95x;VdIV0pRkm3$$rp|<16}V1F0i8y^b6(yc zsI>_t0IvFsJ`K+8?Ts{z1wgxdWDMqYWDtJbC$G;@?!&7c8MC4aTS7&A_+PCa{cDCh zj+L1~kNyUf?Eg~XC`3Lu6k~fHRr_}oZHF|9%fXbs2kW>vDTDhPL%avnG63K$`Eo=m zrmAmGQFDUbM({7S>HkITKMmjHx0E#}(%1L0ST3yCln6X2o_yTL?MAS0a*Gb);q+gO zeG&M)_v1Ju;(4GX`36_=tjz_JSqYxcn`TgDtlP)~OnafOe)^oxUoaia@p(4df9cHg zBJ%d+BXdJEr#;a8gk!k)>m?i6lQqDeT^OGkr8;WBgR=U<@v0f$b{c&2gbN=Hvux>E zkjzvPhB(yY5q2Grm>a5O4Yl0zcXU%SDr)T#SWXT-`4)!(#1-lJz7eE^E(OurmGO#! z2`)Xr%z@|torA44%E3sS&9xR+e&03 z#5ct1!-myuPUoVggQ9*^KwYCe3}KXdEde&C56WFI#@A&SSG{IW?&?H1{M=M79isHd z*dy27N^eJVbT2TD?zqf7mjJJb^n-FbxdOmlt>0ymu3%gnx^ja|A7f}IMANr{sPAHV zJ^%kgF+yJAV#GmVj!JA|QBIB)sL6uG`G4_`j~-^h)1= z_39K!*Q$L`N0cgkYklo|Gq4@KB_EB}6^r|D0T1#1EZu|`8!mh68^7JILg=Qv?J}wu zdUEFy4?Fz(MSx~KI>u$01KF02)9Efo`Ic!4^UI0UAZNV<;i`JXCy`VvV&|e6NGj-G z%|Ocj)n9}VjR9g?5LEC&nAu%EEW){8Z!YY~2>*FueJ`dqUxqL+wzMyTP@(g~v3!J{ z<7AF`L|`%Rt=92!M~9tMF49t$L{fl?*e(2Z`%N1P(go9BAQ#C0SfNs}+_;s``Rn8MO4P>@#8_de&PbC(k_EW*G+6E1w_#pn_74qE+xk=iCee~b|Vx zoWv571~eR~WDQL$YIROwDS{R~_2;H_tV>s(bkVHZr@I7n$gt9}GYRgsE;`QAEAn#? zECEaujp|-c;PC^X1SI5MTa-C=iI2*{6(^ zUiW=;n6SKu(K5o`CTB?Z#&-@g3ma9+kf|5W+Q;bwuNI_MK3_t4iY=@>Xpa6hew?l!jlLmhpj#tesajOCx##;bi@*cp-yppk{SZ_&{SsI&whB~72Y zlJMNZS(QXN7U(~a%|q({C~E&U86uNNh9tbz6!r7;>&TIuG$r%2i{$mVveT6Kwqp-; zrvVA?5i&Hu#L4%eD=^SoYkIp}#S1XDJQ1&JzmO_@crJ-Xb2S8Wi@ z+Y=))G1Q?yssp*_`jnr?L|CdR-rIHN?HjJ1cH%giir@_*s+PQt58-*j1`I!Y*Kl2C zPBbDtaqL+d5m#1K<(TjWJnoWqVOS7MmVoy*_OwMf;R| zg>o)>OBymGsf{+#8?@vq<@FbB7}%N@;*JF9m}6yg-N`5MA@j$*$t*+Ei&S?Tz-5Gi ziNhUz+8Xy!2axsO(pLvFVyrgTowU^T3R`^?f49F}r0(_OF(fm+A)oEg9hm38CSIR7 zuB7e56NMlh)R>6l)a0?a4jqh)nZ6@8qdu0LRtmpGi}O{Sms;E#$jfBbXe}b=E4S>y zhmpH~@YnEa8h5|KSH{o!KJPXRm~lvibM6;5?|+Iy(Q7EYkCha$Ho8J;(tigRBXg60 z8|R;!N=Y5bQ~~%RF08UA*O(i!UPSdUIL+G27}Yt3J|_AaWA3i@VCHLTWvE3NxgCk^ z&_`tA9@8IIKSPH_4(=MD2?DS1*L?GW*U$zlx5nrFNQ*lCA;8lTWaKQTyeBu2aZ ze~i6#T+`p%KW@EMEEH6bR4J7P>6l4McPmJDON@!2h;%no=^E0~iiFgV8Vw2p!sr@o zz<%d7@O|Ik`*DB1zwaL^ZfrZR^Ex}{x~^wjgQXZ<6BF|erf2nNljfTd@nR?{CaD>V z)z#F1)lvSd9x=n(Hx>(uw_2v1Z@zgE1|11r>oLC8|%06D@Czu zw?`^eIcF?(fFG5|)2Qcp-J|dGILV&L-Wdl!FqZZuVlsaS9;0;At@vfYAgz;7BPU>| z`9A+q;F)*uT!+}{B8(6B1?JL8aWmRDT=b$I#f?<1wsI@?*BLWg0MJZkk$SLM^let0 zCz#hqlWUJXJJL-|{t9qo;Tp28JQXX#w!A<#qFGwZAf-B%ziG>QRUSLM=*YeWx%G{y zNGZo*1hPl@vL3|bQnH^?E7uqcVHgouHb{LcoA7p&aBal90N#;Isj0;Q*Wc)k}>w zdg3b!w{<=f-+oj4`}s4l`mbyExCRiAEyG()$uPk?tV3%4t{{2aJo380) zYXfg)gQpDPZz+e4seOr;UYb=L5IIZ|nn8keeXk4vq%bQ8AWdAKLxHlm&AJS95&gq} zu_M5YWc%`ROZ<=8>db2N@X;P5v5`88;B5Xcm+`wgNI}x;AIsCN^Qr@2^ki%iWGxe$ zi2LPia9sx!F=V#h92)aqiI)7^DI zjtoyAZ8Oh#h(9eHF8kaBcYQ))%7zp=JwI~zNiu(FQfQ%r1S}r0vM|`T_MEovRx zb`m)=;q8BYh_&G)3C;NZ1l$@hu6u88Rw5)VhCaTUMxfZByRT|1FdxP&v!eZd;@>N;9SBY zb{~sp(BZ@rhY8{%W?TeKcjP1xl%@L~ILU=9&M#SL!4pd8zdRQK#9L;|4klbyXVMMg zMPMTVO&VpysPr+YLbo`N(wni7N%)r?`GyS2X1q6?#3800$^29BNWzT;N$erMN$_;D z&6TxKhrKM~k)Mx`nqs|Ujds~l`kNVFB4x(sOcmdV&HVORI(JaGfTjcK)cgnY>Sw!3 zXfub3Nsj%Z=2TS~rYdA*>-}jKdz-W#dcv6yP4dDxdYn%Si!v7W8L7O7$LJF1j?8p{ zdJ$7g-$J3Q7iXvLl_IXXB4V*^it4Oe%#bm~#l>C!CUGak=%Wxvq{nEe?eNUw(?PSU1f298{`*+T`Y%4I(GGhC-%<0oAfCxnaF&pL0cc>-to> zS(dgknoT%Ya#3*#FtK|4cNwnU7n*M;8knEFdCg%!>0*UWuA zMar^Yc06!`HQ|%ItMD&XDyjf5UCj<~-`MO`%dwL=yq^?-Cl)v?*b-`9*ZmZ%qLmod zyK*B(15nhw*M=!<|BJyEsyjReVT`&1=Sdzw21$L!L(7jW3O2J*pKc7+$9C?ZH5O|6 zb5-Ggwu8KN&1GvywyNdci-KYQ!i7@#!A_4jlY-Je-b2`CyfjW`4DiQO4pc6xkG-R@ z%o;+TjPc6*78wHB8Gb&2D*N>OgA(?FXRzxrxJ?14V`F>PFv`m70WZdJOTb#gvCLC2 z0Q^{eko8Hr*lB5KH;i|Ba{}5PJ>kO8a?(}gR$JOTKHQ$W5CpNl=n;9wHF9AS1mgz% zAK(mw%(m&l^Jaakr79a?ZpmAw;lq|U`zSR<9YvmF^X*o}( z>=b}^i!?IOF>U2Ed*SL!s&Oz)gdT+De%QY@+vYJ=ZIo9uVSc4+XHI?0Z)T7F50=`P zdcYiB+J%ZojtLtjY_63!weW&wCkJ_Scp#{D3_?bNWrOl@Ug>BiQ8NSC$GKzIX>H5RBw0^}By$+soy!H2R?>GG$m!=Q|q$)&Uh@GV)gw z3(LfWay~oPUr3bgrEVFjvaS}TBama=ANx(?c~QNe{NX3gV$}0SJ;fs-2&tx0H{;S) zA>^TSgHX$J(hl4c+l2W7XZ7D>`R{J#WDP`oR?1%PMty)>6Bj+z4KoCF@zC3rR*qA6 z%}Vf*ecY^oyb&1|%bN@S8aIX{jg--J5%MR+VAHr^*zp#MhqIeYcpb*g682_uA++8; zr7x%!Co2zlBZ(>eXjW*ux|aw7!jm?p)VbbhGg&~(xV>Xt9P^f^?b)N^pRqA_%9%ev zK4_t5!Xj!qK^Xe(US-IwG-Z|?2qEywo4Nz%b(4CPXOh%X#{8Ncd;C-uNLJ$0Dx+9K z*D3G5*>4oD;G#_8(*0;muN5|3qaF42jkf`P?01(9$WssEah4!Fgz`RqUk$%8=}6Q1 zXKlKh&G-VW?h%LDe<)k;HpH!gJcAdb;Mf`BmsxT0xG_=9j@O$I8`e1fnj8MC~me{yB*Mb}EYEk5a>| z;NzZO!eEU*NUPWL9@bN#-=Vz})hIKgDUadPdWzCJJTyC_<@xQ2tNuTxcID^A`c zMV+_ZNW`$*-te>R3Vccg7TClig4`^UtakKSLjsKgo4F`eODD(dx5HD|1ALuS-?MJp z>FsSjDr?2M#+n8;QSVgao+hba?sGAf9Cw|m9&4)>j7N4;^I|ovl z$aaf9$motaWNd;87Kg(DI)F36xN)gV8(r`Lm?|zYEfqi&7Ryk(u&3P=UOV(^KWm%c z&SJo_)*sa ze*4^c9iwQe7NVE)Cc8mCuM|@iW9N8lJ$n?}o0^4_$Fx!t5;Z7IZz|DV=0x?8E=OE_Q;|18EmK zV9j0oyPUXUZ423x){?(LD6z-WBvENc#L5c&vYn1vxJ`NT6O^m!fGWH2e5agg#T1#2&v1cblr2W~9-;=%nSsxc@u(qghS3r6DKvDd6-vRiyDO~7>sxBNGzk^HIuNi zSeKYmG2*oRi&n3~6%W#eiGPKLfmS7R-V@7*r_=9GdwpOKOin zNj(<0q25}l6T8WxHFeP)F)2~;IM-JHBmIn^7O>(CKn5M!sqN(UvX(XSMhc4?HdkJV z@d%s}Uca;X`zS%wlo0y}u+{q~d&jlOZypn9EwiomJVrTpvT|W!QJ1~ITG_+=&#roo zA^1>Q0Akvf*h9=w+{rdQfHlhXJI28S@3aYA_NkLo$4rxLme}MSoZD91+K!K|kd0Qa z7NO588!P!s3xG1ldX6SW5@J?NCBKo~g_}si0-JemBR4a@gL4Q52yRxxqtAm!*$z_^ z*8iBAq(KCY6j0jx+@<%|BmL1G&Zx{u(Ito~vl_rXmky)|fNwTQ9QW&6 zfQI(x!OB6_vHcW!hW&dYB2;2jBaJ1B#kGh<`NY-NVU}AkP8Y$Z#ox)ZuFn>;?pn(S zcMaZ+eK+{+-v1S5gBU?vPEFtU)-~TNdVb>C+2KUdY|12<7`&4dx_4yuk{Tm&?9)25 zJ=7G%O{+XrH8>0xXKqv?Qs?chNU{h=&Qttl0BEzH<4^MW9=M1F()s6)^NcykcFdm%C*%bk(f!ZF7_J>)yool;C4 zm&*MZrRhu9aVE-A3{BarmmonnEIx62;muiMwbX<0{3IA8g}Qby_1E4e0wB|_?B&rT z-!HHBu|26km`~*psJld4U6uyljvL{Ni-{se{PmuJB_ei=K0DdFjH$g%^}3TbZPZcp zUjF|I1pPiAa$_NAlJCYANs~z;w8b|-XdEHGa*DnZ29Wo|OfHioL<;u>usqO0CXFIV z0y+RGABNk{6<;OAh>ZT3W~1%9|_v-20u&jnNy zA44{-z-`e5SOU)8!8c8fKlOqrc)-to_KxLkVGvML&eJU85HAjqJQ))x8M-G0e${X< zb5Udr*yLlMKRmR88~`SQe*cG~`5OCISI7Z9Qr&sN6Bwxjx2U0ETwlFTd1PIrrZa5C z;bjlWXf9mm|52d$f1^mH+NMBu!H_p-qJXvLU~w-K)3aq-;8Cq-#sSNj>-`v4ztf9E z0~;qZQ7R>B;zS+io|B9Y;w?ja{Dm5BVwI06eMj6oZkKX1|JG262PEYtL|VQ-3Q0N- z*vz6)^n|nKM%ic0S}S$*R~)smlUWK=$xjD~p957F_k+M)G1}WV5RlX2g$r}Q zChF0eR8?yZr<=zE0#k7R`v__uG2x7cPu_1SJx zhb=)7j{S_@Z#jh@lphdHqTs_XFJm7nWpNExYxCQ+If8Cb|9PAV7YWet3SUGKqNinx zAmzkpcfODLLtS84DkfK&Yx!j;JpL~{Sn{=PQ0u;JQppVw2uOQTw&fpZ%8)Z*Jw!hi1nxVc&aBSz-P@DFt3xae;aZKn$ zB$ux>MU?T9ep;U*Gl|jSyxY2}zDgbfGS%&{3A(-IfBDh=(4gWYxoV%MLjFh*5fML& zfBkx9z~h{0)|HqsDk25n`4IK(>lRo`_}&EmwpLl<@cKTbEKMbBWZO0Tz$YMQD`6qUX@QyJvK!P}K^c0@3!B3mFeVvJMIhTtf6)lZ@hz0Z8vNNA==H;nDm zc|8vOa#~(Z-a^KfvRNAL7FW|s@`csVWx9V}k2$xbSObk<4-Q%?>O0IkrKr!1j-wG1 zR3*z~%@aGM&W~KzAgdIzqh_Aj2C#%8L;* z54}J{Qu!^5^sNO0gZLtajwICXG63h%+tv`wZpwdHd%5-o=z7*P+pB8;cDd;3)Vcem zL9t)rmVg`JzMAC89VdGas{j2x&+Ea<4{LPb9|A0ahN0}Y_Dt1hWZiQ#f|(@ zD=5$Fh|tDD^xeA{vY1J-ThMwt86$s(oX%)aRP{QU2B!}G6U2>u)tW8XKZyUMX8XaJ zmiI-TMh5-|0i@Jaw`Jp@8b3h)BHsxS=w`pmY$wg44qFSD%62q~n{yp72l^}s3JXwM(iSMI~ zN7+@nbg0u#Xn~SK&aI2~de^QaF}6Ig_2A7dO&!z(X@UUZ(0UXk@$>Oy`zu`op!I{7 zy4K*u;5?5}4B`5W#FeSU(mc&e{kF4M#>)TT(+ zxoznscAgxr!>ShtU<{~qv-W>xe%-cQ##%S5UayclW~mc{cZaC23^J4o>) z!&S=Ivqlcv;h)CbIj}bJE*)j)<&^!=b5sCL0Br;Yh5~@DW)!jzX9kd&G^ElS5(P;* zMLwlrscgvC6MNnTwF68Z@MlbsQ^sN#)7&Bb^iX)fEApGhDJ4Ac&f#C2XS}+iR4CDZ z-Ty;&fcrn7XBKkGYNrn$k9AJa%8?r3ZSg>vtXa4_F#N;BW!K((g&o|@R1bS?#tshc z5)JG1bQ6OV)n5%ieA>&U6njJ)ox7T9GMU~I;3znGSlI|4*2DeErs1_&k+UZCOMtal zZczWXRe&pl(uvTn0jOO7=WzN}tT^K{-2Iz4b(?M!%rar#w#BLB&O6YFfSTMe|4`i^ zj`<-tRTs+lC10lmvhmk&p1^;KOZcR9{VQ?Nb>dt(XjBtrD|Uu?yK}3}el!P=1x2)E z#mvZ3p7)K$rCY9>DIX{wfwk$L%X0;elRA`xW~$YoMz)Z_jJf7_h2?y9mi!$f$@`SO zqH=N3wr7c=_Z&ZnP`EAtz*BDA%F81VNCuvv!lB#dE8Iv^L>p*b)52%)u+BiO!&>y!iwUaQ<*VJ3jIyO(rpdD$`rBpKj2nkJLaul> zZA%4R9uEMsf2VG7=93R6Nt#Hw{=w@`>EWUYO92~*$yqC9j_48VbV8J z5!-pKy`msEQW`Kebbs>+$>5Eo-v%VS?Mh5ajsOQe6!^XDc<2OEgR)lPw)_5Z--J;x z|Bz#geT<&@Fj^*gra4Hum~6_y`d#C-WAvGb2d(+d!fj6Ja$r(|^f#nID9cDFVjC@vi^-9$k?F9hk1@$s$c27p5-WMm3i)Zo55ftZQ;t8hR|Yb(DT|AZN0(jymK0_od{D;}hf6j6Og{3^`XugprlL75oORET%iJk+sD8gJ)k#!(>eRIR}}jwkyEb}O?&5BWQ}u5 z_6y@H1biS+PMIKi`Ul;t3!Hm?g#(I*O5HVz2??t8#Mt!5^6L3h^F_(^pU2}=SCdN| zo(*Ux%f$t>cyb_5fdU&Xd)wh>&cB?N=@zxQg!*XhO$}0&EnokuZ5*o> zuW5V;*c=g~XY$oDJQpu?KBh+5wWj|?1q;UB=M4l%MdGwa za?{76lw+CH<0NV_UGMUC>BIx0gm|U0GbzYd=T%Mm$XqiHxW7a5$U-&0Dn@5T!G-Q6 zuQ2~|gK0Gn6EBNChaR-+q;bogP%Ilo95xp?v40soiGh3muI;3X-|nN-YXVQ#bszOIL|bwGS+>;zrTGoPMa!U_~V&&=CW0tOguP&^{_t*0qNy< zEaCc+*B!KV&$NExfRSa&o_4OfL}r(bwLTEns|R@`a}ml7qygBz=#pe~WR5j{6d9GC zNbn6fvG0x+PJ&XRBbNUe**gw_ViHl$>F0?*vx}Xg_^ph56A8uMOr&cI-v9b0V66OL|B`-Hu#W zJ3#k%T2qy^B8$0FZom%tsV0X{?2~uh#zq3s%T#0xV5;9>?Ss5yUOpkc^>x2(;tQ$b zC$Ew?bezq3J{%B4fkzSem1~W32FQ*?LS%G|SjwTY^8Nubc(H`b2Q$h%Fxr&@e3zi9 z4gdQL|54;d1Y!M9G{Fb%KmFENa|rnze$XtJI_U+c6@K z>j6oaGJLd!dbRWAd+^0ydZBK;o_s!{6tMRm32>sPDLHb|+SU33f@3-era+8=A+_~; zbE|oc;mBN@Ju40tJNmnBhI(ZEU&`vsgXC-#z@^xp-?6_VGCIhkNG{{|%&tSo-RVC7 zx#KM!rn4Obg_PLM2B{}v!&(CL?)g7io9he)bP6QuZU1FSrheJK875;_P*ykLxbRhU z(KqqujA!WZ&#yFpIQ14!2d8Vg(q4eMUAf&Ti6{bGL{-a}*FR{E*FCJRjs;A3xh298fu0$&JJE zeByXP{y2!BJR_}R$o2x}DLk4+C-Z%i4--&v3Xvj7ZfBn{y2LBi2&?G7lB9I}>v zN?1Xe5MNhaK`Kl87?y}3Fgfhy)+#0zVzkw{8UvOiGV$?*C5W4AwP~~U&w%AnGB-T; zUwY#(5CcxsIjR|z)V%(Ha~JX~t!PVldI+|8aDbwsW&qp8+WtGje~RdW)GHb)IgywI ztT-^T@jYQRM8fVqe1kAIy*1eOIFKQBY?4@w4HT*LBQ&pCbdAicBaNHNZ1TEcveUln zrgmw7BZcjI8mLhxPdt~=E)h9ku4IM-w8IjBVfZ71dPJGFBH1nm6x@k9)~W{@Lvj)% zKFLaG*aeQiyane$XRy2pVQ{s{E84sf;q%5Ncj^8*!%7Y)xrR@EWw9#aDlz2e8YnE{ zwl=O)5I|eke7cXV`OtYQ+=hFkg6~tIa`2?w%52kTqniFTE}?R34tq7YR@tJrMG{+c zopFnVem<^|iuOoU7sA-(K_tQKJi@ub5c&?@^8_f6@mY*bS-%xGaA*+?`xEcVas~K% zL}bUNgI*^ZT*w;V=SC1KG27x3OQJkB__^`jqAgpy0Z_NEW6P|O>(`@9IE3elP|Ptx zy4DzOHF%o*T9rkVE{DK35K?Q|s$|tD=|%kjv>^K&_VGh+6rQLdzWK1_(ez35o~VVq zk0_=cl(A=Sx-N)4N1Rqxp%*xbrWTo+&pm{^Fps$N+~$yFoG#~OR3U2frK!PWsA<{2 zrGdA5p=us!EN{?~%)?#Ru@XE+*2|lpxl8o(mUZ1v)o1w`5+4MM$|{egW1 zl;D(mDFg11#Sr>4KnOz?Zvfwc5p)aCgGPFKrZurijWWI6O5HX-e*MbsL(1rXx8uu^ z1j{d)uRyYC6B}Ll_!4C~1U9zCh1q`W_)cJ$aI>{x@e6k1a)S~dlkSf!cQB5d+EL4| z0UW6z+vCl7Ct9>BpXDuG5kv@tI$H%BK`K-y$>*oT)?dBec^0@c+2Gx-v~;sY;1vL~ zPRDle8cE%za$hgoEx#%64#fPq+vQ30jxkq9i&dM77-T*>iNtA=;rJ6* z)~`_P;5orqSuk1-UHl4gD2fQ&) zN)NHAVOq@P4z<9+_nlxF!6#A+My1nQDtk^OB*TIFF#BnfkH(4iKI&=xOq63|nT6UP ziCC@lNNGsZW$c{pIdQ{dn(k<7CdGoeCr|q|5kt(SEm>o6>fG2@rsc+nv!}l7G{J;v zl6M7%in$V5iPsO4&=Y{onM0DWxxW|Oec>N>N(2RaM7v1qJkgxJy|T#q5d_dBzZJa! zI=L)4InMeC9@jl|KPdKd^aNRdR+ylZ4urL)p9#xIS31W#MRPxWDwwrUm6koYM|@9d zIj)(f4WIh}wQHWdXWU^ZAnN|4bWy*oGlTivP39r3ta>U zprDO=j?!|xGO)nHG{;nv)c#i%8M95Oy(~=8IQP@XSg3ns6 z#@Byvu5P2F?AEl!s_)cxGbn%FxWQnuFsuF9puSn;d}q5dl*5Tt%Y6!u1qD+tu{4pXUa9U#vNr&K@7^&C;= zUUAhi)3;S^rzjSAtb+>&xoo#y$64lB{Afn~D93n6439laxiw%wipaIGHC`anomEwB zN*lv5s3cSx0(c>0F)-wPC>zT66d%3sXKHfOLus6s1+j@lY^z0pJ;|#g5A|c`eZ%9G z`_iC<*!gG^xQg7(7I$C>*`_s({)ihXAYY|6fweSD<2eD<&Rx4scCuJ8X8s#%$ocv= z5YiM#?;JFU3;FGup%Z#g!f{PLTRa+#9%1L|1RJl|mm^d^7zqlV9HL(b2p z35iMlXmtYv=`xo&)mdX^uz`_u)4qQ)yhz&j^IT**%%CZc%x<%fcDNxRx@C7UNL(Xl zZtSa=So5YT;Mt z-_rVjG74GL{|yvk_gK&NZHMV5o|Z`zo~b;-^P$6YTgRC#`(NP{6cX}b4!LY(w=9nX z_Ct&-*U9t$a^1wA98+zEUB1;GC3YXgTv`A6mOVcfNw{}%9<nwynjeUQ@(hD?#^R)IBgkY>VE zuRrDf<`z}Hhx!;jB^_n*EMVTJjeSOUPzA!TPrss^_wmjwxqZ8M=Gw&R6(ABiX#Wsy zb`X7@hC+L;)OZyruK$w1NYc>B7#+~i{1FZ=4XnK}uD>M@!?R&hPm=H(B)OQoF?h}t z*kDf{9GQ5MLJB28&NF z=ttl2@daoDG2jwDLEcXEXsK+sPdI`=Gn7YPA}vc$OqnbyhE zp`%@?gGLh__9vCyNuWf}NaMciX^=&vL%J|5(5wYTaap5OdXZHJvowA<4(LwJDB8#} z=twUi(+`Xi)ApKY#({)LXx{(KfbLLd{Vk!i5Y}8hX5Z4 zZorKWKf22%?Uy#s2KeA1Rr^n}Njd-AA(;_~`l5XjZL(#FUd|Pk-ANK>WdMwch zK4sux6WO-bk#cFb99mdMu1k<6TgKUJkC5*Xr510*_!>j5)b`oTrjcQ09fQE-;Et!a8gZsY|_p0{$c0-*EiTvih{=(~xWGPkE z(u`jF{E3>v?L*0K|NrCu2;G)1mw%CrH(RfjakmSd*k#K;kjT!xBjsO->*NYfP7@sM ze@Qasq=l9)LJU*^wwFu#;?FuF2(yQhOG}ct^j_jRQk<4f0d@^&BTtb58th-v6p0EO zKEql&v)=@a#I7G^&ISrEoQO+uVRx3R`YWk{uJ6HrgM*7}hmCw#CPg6w7F#BRWUuun z4tMvSw^q5FJP3MZOej-F>y5v%xinotxD z&`R?@cyIuE612-M|BVn`K}xd8Mu|EZ`O3QPEk_=BxVXf`7?rjy>&Kr1?Sx8qx%Y$W z^c|yiBU4w(=f3z7e|-OrOwLn$`+slW^?X-M40MaEC6H%(PoL@R>~s~(gW7xV_psx^ zMSg|yOtZv){W7L-m}@)s!mN8golM8BjfVwJ64$JI7C21zn7f9e z-B>xc-Uj}!XB7WXQN6>~>naB`W5Qo6bZ}T%fxLyWy;6p_JTZ}0BTjg`H&s}dQ@=OD zIOj*MG!m;s649n-z3{mt*IZu$^-iuJ?$p>7YFoGQ1 zaQYlM;(c;*kRPl%&^)C`c*Iec(R=@jDRKKwXN@hY#G}D`yPs=kLNYA~Svd@i{La?f zkpFe$e{HycB?at;sYhK|`Av=@ZrL3zLsT`p1iA@)k0@sxm-nA4>;c{Rc`imey5fR3 z)Gk|ak8qS>toiu4ol{++R3sX7&m-_%qS6;u6B^f^)TeVD<;wg5^L`{!Q3MO$K32YBJ%LuK}DvH5;v~IiBJtaNfK1h(qAQmPS}^} zZ7ES{UKsV0Y(G*!hhcyg(NMp52>gH&#oX8C2f_jnKj9Yi_im%!rKttI);IE+1cqUzrrxGSMS6}|vH1;ST?%-7CmL;;U zt|N)T;H`;xqe5sWJtC6}nq{j#`+9Rve!V_--`4;iH-pds|JO@M6|48GhWueri*pT- zO>dk6kh<&4e2264lj>fj(S5+aec$`aRsCQ3bTh+U9IA#4X`p8GrLn4VgO$|G9{NF# zFgmIa_ZkyoD$hC&j+4f}}pLXeX9XENR!bb7=OQo<*jxgm_`~ zNN;}`(L>y>9;3PTEFJ9M_dWtC`t56#+_E}UN{56jOREP_P*_0CNSt$SOBC8+!dRnSzwUUlbDme5J zBiV$*)pqzQm@)-W5Ir{aI5z!qHw!J21U~4?#EQTpzbUae=_(B;vJ30nfHtb@K94aIB z{&z#^gwpN>*-@>7qBc-g+;~@1b4pea_*q0-X!O1;d3H=eYp6#@x5OnDqtCAv7Zy6t zrj(-mwb`z}9-ZCWcTHUlUU7BzAwso|tjNLCR|=t>&d8~7y&AbEyJtb52-zPy#_3Xi z)RJQw_%Qs979Tq?e!0x2CV=DU3tQ63W*qa1F&tNUza+-3zLcN>uW-D_o$8DVYh7h4 zqoeQF%i2SqJi@#ESDxY^?F2jSAG73$VYfffq1X-?mrei<5P`u$X^E@7n6N#)=@LKxB_|Uqb_HM-DU~<`P=l;q0dA3D~uQ;8lr*zgY#FOb(i()kS z@bpE9qg(61$@#{5mZk;9wCQcfd3S*{?^)UWyz4u~bg2A>T~pS4`P=xU_eQhw4h%hUs^Vb>|^e1z-)9?R0n zOCf|nX&hH-!__mMyKi2iqry3glkNmBda3W`i<1IgNnf1%+vuI~N+KTLso1E{L8^(mt}%dBLCFc8|`d!H!9-FMu*tG|ogOm>X%Pcx%%TXS%-=Hn?#&fOk;32*Fc zT_-zr1@iNM>a-}XeZ~7G`mDq*iFs;PuA5_ht0gl>PAScIAw!*M*XB2`xkvG9#iw5!*%FDY$36Uz&>lg{|r)M2uHGFK77dg<{wq zJ(uFMS6Ww9rlgcWesr8xd58SXW)#e9&e>=?-p2>~&-`XxUOjxEYQ02s?NQ+w;6fxV zFPVv)kkBH(EFT8jEVo;27V)*)r`y|I3fd5nZ6j+Wgoe$n{XD}sXHSG@`jnXjN)eIl zrOG|aNP=>0oKbqfa%vtD;StLP+li>FURT)r9qszwb_QPnwQSnoJ^R*;|nT1w}W~&UQC6re!6)9 z=7En!5!`&Gh*}@4B&TgcP&gq(pH`LmpHm2rB0D5|t*eXn6&USZ8Y2&b0dW`IdZcag z6jG}L!={uPu~~E?^NsodprGW-o0CcppljwMCm21D7bUnhmL}X?w#HnrkFX-Q(Xe!w z-qEdg26aT>UdEZDn?AhRo`04?b=+b|n&j{`4Ih?~#2xyY-#3DM%_r4WY>{f$3spBU zjrIxB)AN0dqf)3-0BSJ*bEEl{K>wXMrSa_6i9gGhV`T>PXdlcqvU!URoKDmLfG7Hg zYwj()6U@m#JJ}t0UIC%gKMbLzIX+;Arn5NE@cc2b0`(} zt%aGh$H_1+=)^wM!xa);JN0Xwh%vgv*Z<&K2(W^Hv&rmIE8XYzx`=hk*!oZRV8fAl zdMqI|(*uhdT62h40~B#5r4o<7E88Ny@sXJxA&sm39d~v1@YZuP!?bZdAJyKm> zRr8bwfAiBsZtQd+#2wBX<(Sv6=X?%3oA4FU^Wt=TTd_D0Xk!`Qt@|=Vk<`;3D6&A4 zxpqe24v9@EQP>$sOl7_%W)zf+Z7Pt_;&$RXD}w32?U858qIyPCSSp6?{pTNa{dUjT z$xNa*0_`~VLbg(Y0fg*yJ0<)c#U30uPiO$Yzh@Ka+79hlIn3F>xnSt$V{BaNLruQKu2eG{CiB=O9{V)Q64$YeNXa#8Wz}VN#vHAl z0T_;~eLuY&m6X8}8G0@`cB9S*-p4)npg>1ZthA?zDrZ5pJt;I9Gc}tYF*|X-*|ND} z90NNiPa~Np#F-a5_8PydvBkoK@YdQSryoEP6|%0{oJ&diO8u<|rp3d(@oo2-uCWL% zqO)(dkgu!B?Hi(pniIJOI00tgVT}hC!ZJWJe2J>0 z^xfWOHvGgsHFW5H(WGd;HRtDGmcT@47oCK?1N@t799e1G zrFa}0x0!U@QLLba92s%XcWo!d9u}&(p(^oBF+*}6%#Ke`lzB+vjB}=5TiYXd zy!v|?{1dN9S7uGLNTQBT;*@^u#d;UT$l|-~!}Z5}9~!MSg#|R_)Gs+bhBF9g+HYW+ z>tP?386xcV*7Xu0b-+?jVQ;hKJovWZ;nLg$%Nyn&)Vh?y<>Avq>j2^+b;%Njz;Y3Be&9Gg>!yU{Of8Kzk+mTjn`YPr8$R&Pq=YRPB_{M9HYa0|=EP5>e|MthUw z?BBM0KH7*6+d9H}!cHwD?YH9G+R79lCs@5A*s3XLx2A*xu^Q!tmQlC&m(|Bk5}4b% z%Pl4E{`vDx<(5PTpN4-E)8*bj=gFmup5&XHMyL4q^>#g_r@HhAR z*L2$Yb@mpC18E>XQwxLPV%_#OKBI_n72nF;2PRyx)k?J)2IQ{TL}Y42W@jG}l((2I zWxxKD$TFA1hJS_S5c?zZl5;N}V@{i4dxpW=0A}f7;J!DOgpcFLcpQoRlPJG=ukP+z z+k4c`e%|n={?9>#n^e_{^71jOzU|7*=ApJ2qiUbQv_i?%Oa{cf#@IG_)`nZCsh%^p zMyIPz4VZ)aZvoh)Kk8MnKk&`)%^Zmg3wG}H`} zhppD;BE_NW`12V+ zD=8o_>i#rFqr|7ObH3L*;GP%N#kB)+0bV z9JZs*PWas=b|x<^_QaX~9quBD-M1{_3@mrBYmKY-u=%}5%WraY>+4;hy(&y6YEcY5Qq*Y|$O z%TE0v^0P7wkB-4t=^Q5xE6va8arc>;OKA`r-|zpb)xkFn&`k|x8RVWS@90l#PaoHk z{9#ZpUJ#Li?E8jH$_|?dDw`2IyRpvQ(YF9g#lXqW&)3a%%@b&AP zy`+O*=qT~r&A9>J9GG=!k%*fgDgRclTruwadMg&(<0!_tY8ZW=7Nd`m!m`r9EM=Qr zms+Pm$>z}6JuAy=Jri}j3ZXKm4BXn;Hdo4efykq`3j6E6fI)nok+b^hq}Kh#$`Konq zw2u{-HKg1i5&~iVBv*Tb%KvrjsZ+JH z#>uord#m4wEo*cuZL&htyb^JvT#<;AoZc|Z%J;~D|F|eyY!bG8>2J5Y|5eO3G!*gX z@o#2cZd~uVCXr_Vyd$VZ$*Wc^~@?o-pzwYK65Q$q$5|X*| zEw;-Ro^d{q?xwkbOA#qC#Df~b@qDF%sWDKmuiFg)(MKYZ#o zr*4y&a)$Q3cefPo9Y1}N>`}Obw1v9N>61=OQ6Bs8@@8(Vt%`i#p8Q_-sa@;Q@#^eI zonSEA+QkglZ|+Klycknqg#YUCt1^=c+}B|;@UpJq+rE6kwlW}Qd*S}x0Ia_IU4Eg! zYCUcae_pVaCPU}i2Z^X71<2ZT-DmW#s^d|~XXoXNRY=`TPV=V69Emf8pTnMc(w!S| zHnFnq=uZk11APX!6a8j)mYGKGCj`U(1VZXLRrwfU%qsZTozAr_Kn(SD(C%k?#MmsXPJ`Evf*8c zIRuMLCADPwtr+IpGk(pQZshridspr#jIm(bC9bTlhc@~@s-t?uGfz`;j+bzZ`utT4u^WsZ0Nk+0?8X)3Yo$<|_T>YMrnFXm4M(YWIUJ5hL_C&=4`p>kB&F zJ$NHl$w+mx6*UDwyW0YlFq=Oi_^=$4=OKZS%o-Xgy>Zp#a~pH~OOi!Y zkFgc?l}dg-P6aIz+%_$S^l6QCXI(aj#TQ2m2)MQHexfUWshmFgg`*K4RSFq|?2a7y z`Rb|@KTgbFK61fSp!uLRstuAg=6Sq|!ajC7w_9bEK_1tGe z562L^BJ!il6su9OnP=Q$<7&2?jo0_LMnSlO_`w}LOTptl=6n(jYj z@ej^;BR-E|gg;EfU+8x>lC7S-P_SMz9yYeSCn%{n8LSj+ka$UJU>)z-dE`iG@}5f> z2bDB+k{_v$HhLsX?D{t>r|#}kB}As%*KuymuTSm6YwV01 zLQw;4g`U>G3k-tw=7qQBsaYi5l=;Qwa5c#rulQ)>c)$?YqcDB3_?=$m0?&@mJPR}u zqwXP6Rrq}PgG8}D^N*wLZMQw2tS5)nO7VLZc7IC^Tu-yNP@lLh$}_gU<27IT#QbC3 z$csG_98KpR&P~^?otERF!gOA)!F-zJYOyVn^e;N%f7*N3mzbvu7rt|7vI#M}{A5*b zTu1`dve`zjR%-S8*y4yp%jo-aM~)~w=SI1)^*ua#0Og*R@R47tut!7;EVeMzs=1Fv zeBfT&nnM`Akbd^-nOyZMO`&9*!Eh^f>vc|+r+}{KEM;dBAx3Jqwfcz?-udj^2iB!M zv-xh)$Ka{_&$wjUsBIHD5? zETK;Kq1^lB&viYg`rO>|)H?Y&ocZSNx_xeOv78cY=RJO7X=&{1Bz(Q$xpDVsT7b*F zhXQ=xKL28)5m6O&$oAFQlAic+%e^(>wyx>ul8M6y>#)=wEw9pv>LESVu;Jfc^ewLM z@6kBwjh%Rbw|$uL>ft6i9i2q_alBlYsb}oEZ1*?z5PI67<>OwZ)i|CpGdkfpL2V{4 zu~Wi{)B`c~POMwRCEgXVha4GH(qQL^Hd|`pJ!LSU0`_C&BZJJRwToo7-C2+~ z>HVjf_n~)xPzcA*;er6q9S+L%KuuJ%=jMt=$wX17wo`NP0{MUpQ$YFqRBx(HIlkC# z$M5?N3g1fhvG9VB_2FehxOm(03kQ_5gKOcN)OIa>FYDioadXoV)*BABo1j743OV8S z|JT@e$FteC{rA4Rr?!fsxZ4^<(bg)84ob`#5nD@%t@a)r+S(PXu}5u^5<5m~M2%2X zY^g0qVkU_2Ug5r<=l4Fp&-3}b*I!r0bzbLr%+;_JO|>Omnpqx27N-Ty^b3QSn81c84^+wao0z>(HtZ+Tabpy;in)OnlA}&C zf!-ZmV$~@tpfD!tGLD>ho4@@#S*-+vzgNaX;}RtUGpJ7f=%0FtpguCUdRc>z<&rc2uII}o}tCF%7NZmCAhiT@Gl`MuQ zaf2#MEQy*U}VT4unr|yc93yX@hM=GLC{X5SrvU9i`f7T<^C}ak^4H0RM{k+7a)mSCG^o(ch++vZePo+_M$7=dUi@erMtU)^8oQ7#~dk?)@UVF{Ze* zj6u@ThRJZPUa8~%ADTQ^J2RVvc;@Y0&294v^B2-!x890~>>kgXYTYkR!+WDMGc#i= z1oFLwc!SVHI;P5qTiigGi`~NcaRCX1O;QG zRfc3$wTG5Umv6_k+tw1DDu*mbw0Prun~N~QMyBwkE`9VnGxnPuiZ5U-lYzf>n_&&$M!*Yh)Ii8f@N*jHtsP+Sdfihm3@_*}j$zakTk%zlK|#(TvDX`spxrt@$o7%{AyD z?&;-k6J1nETsOB7BG9yu71|vy$eZ(8;X|64k?UYZN_1WCc0gfKv;zdLYm+bruY$yx zklWc>|FTw{WM-PJIwLGyM3&`vq2(oC_zUMea@lH?P0VeTP1(&fWX@YLb-@1yN0fn8 zp;*dGTQ%&s%aA6{RBE=G;oedqI?DB;0@wqeb}7}N%fCV)SbX<$>PTkemDFn{)N7j? zaZy!17{uN4%TdvVPL`H&nVIbRZ}}Tn>megYYDt1AZ#9|3O-WHZWW1@~)GbwS1QDpj z(G;3fM{1&)afhwTI{2x9w2i{1(02>=2AoErzSZmaoG6RsHV=g11dHO9pC6?p{DSK) zI;@Dg8(+a3OmW(gy4;y&h5)oeCs)O7U_RO%M#aGQQcpn76-ZV$Y>YA0I6vI?v7@37 z^ubEr6GW8M$>ZTTa>QEbZ(WBos)_p3mBQ;|ol}KKS9<{N&SHQPywRs@Q&{QOo8E_3 z=A<~UVvlnQd6`c=ppBTt_9KC-aeg*u%tFw!F0%1T=7^rw3aOL!Y^>a;=^T5WAlgP3 z*VF4M56;Kjyfp2Nw?$igD;&M~TF+qWR);%I-%7P~hIQP`nyC>P72U!n94v$U_4U2f zUPGG2GtA`|TKc76aWiS?8d}$j@5VBqmF>bKEaMfihkrQ&XV7&o!!q-Vf*{jA9BzMYGO92uvm`n zpMhmb^o2z4xJCIMXOLy>ctfmm82)^FkVc?;o(wi$9n^fcRE#B4r1mq@Fl+RdH4JIc z${9jf&**_uIgje6E-Y2PFw(Iy8=&ZW%+=hQ;O}~!#HEtY1Jx$kgWv>i9g9X}C@kCc zU5QlQ64bt6$b@*+%kKFJuRCHp-cts{ZQZxK-udOVO#9i0k-G|y*m}gb1#~IxcBFuu zy9-;BI|F9L;_7OjR&a104HQx*${^LgimkU_^o?-+#7ipr^-WuojFK3tNE4Sprx0~B z`WgJQx16#zQ7w`9Xhg;0o;!TUzD_?^25e1}rHXqI;%ozZwL}6toa%LKxRQLG$DSxf z+WO;8jCoT}fqHvAA-DKT5>DhY!2EvAJAH)X%Y-b+t_jltqQQo}e;f(IHFkzPqi1QP zswMkQ{_UxOv$hj`qZ-JI^sqYsW4d#HTR^~M!jJgQ#f zPDJO%Bf854-f@`o`Cl+0IciWc`&g970sI=84p>JN(vj4`_2|jr)?VPlt4H$}S#m!F z0P91nmOlVBvj6_ek+ORS>6(L&)|LmUqJ#UN?fO1sPD@FNy^BkOJHTnf$TMfi0^;KR zgFp<+77Z)|{qm(S9BcY`*_W*5U2WSn1bna;mk9n4%%0@T`QQ5sl>CTrEUdV62{U^F zPWSkFc4Z}Pd@w5ZiY*9$a)xIC;q>c1$hkg_K46DLwdUcHJ9^IyS^qDc|AkIRl>2*H z=qU5c_zw0x$AKSpb^(_(>ap z_@DYxOzHt0H z>c#@!Ss!91PdsPxU1k0OZnRU9tb{dNca@5^o10r*+A^tFKR*kwGw1PZp<&M%RHYhLICE$X?aH%>YCaQ1Cmvp^^bP{M7{FM~M8_tz$qr+#nkvB11 zW|?$>LrOq=VlbVcx9^%#q*s`uo>Bz-EC}cz9X0}FGo>9sbO5tYe^Mcr=^>hDvGUbg zeK{Jq=@5pzqM}X?sB;(N9YD8m+qFYLH)fU#)X8D4sX-lM^-n+$vbKW4J9Keo=3_d< zKTf@14Zt=O#9)I4m2d4m5xXI;vLwM?|zKI{&gRKeC3O{ zMa&;GI1lGvbXHFHsn^svIi-o_PpgrW?;CufVra=PqqyK9b;)h1>P8j(u0Nh%mJQG~ z^m&>uuj1S)HUSG`vWUQafAT?{i(PCiA9lIL^P|p!`#{Zz4|yJevYZjvCChk@>?}nH z2nzPCq@8!J%&?>*Bpp6Yw{GUB&`|9J;e_f+*Dl8UZ~IPq^DHt7SL-`3k9coRB=db; z^Oyl53biq`Z>t2fLl4yZj*2NTks(?9x&LFR6nyE@B^BkW=BkkH9o~aI2tUsOlDPr! z)6Y~Q;@e2QMXq^KPmRE;$`tPw2#`7q-dFQ&NOG(l4F2fq5i4_%Ikl#)6lvJfDk{nG zS~@$Tp=+=RkW%_+Og>cIAA`x|by1~mWJbsgc?01TK<8W8X7?cZ$36*mg+R!$_vfUL zWa0=C7^&&e+2r+OT>uRnI%tpOs|iob9Y%0=BfmEu8DC;K(4dSaC9(TiL<(@^qWYoP zDppZc45mkHbaJ&g3HQ`)pA~o7Ym`p-dQ|=;5V`5P?34-DO`BuQwqUbZv%KxIW-bYl zulKi9DkgS+0HOfU3VrfoRDeHFS=5?|Ibd;bDR62z+r6Ef?u~;w{Q?>+ry~=<{&KH( zGeWi#yw_Q5@q2Gg-_rYwBXh_w4b=OmJW@PnN+feE^w7?vF)C~oIUU+hYjC@ufLle8 zF}i*<%a_QxoYw@utIRGpq+rg-5)SLb1N<>Q$-seYK8dj#{?$eopkk9sQ+m)j)}w}S z;g-BGPZIs(^*84@+FEkIH()CO|G{*Lbpml-1X>(WHVP0ZQNq2HSnXRzH#~sIv!VYw zVhlN^o+NDm5P5?wZzh@V6 zagGU|?g@&afJCY^yiB=o(3~F6<^D0OFXHIV;?`=Qs1EsoYBjGx0*~>6yId393W>R1 zd#wY+ueB-L5%HrFsPP8}R@)k!nUx9nKDMOSXH|@;9--kM??^Oh(%%l|D(VOHJi2+$ zyF$)4>kOG=1wKgnb$5JGV&zFI&}Om#e@{q^V*3c3Bup)xF!Z zYdR>T&X4>-3Si1Yw}^;G_eb}Za@B+mwq%2_c<27%K&KWJ&ykFqO74>{qX8KfrW-AU zVrH&-%F-UYtZmh0;W~vFKDlquz$0{5kfV`0&qV#s`u5lI8X{5JI}@-45rFoVZY;^R zJKFDFA1v7vvWPKby0QeewJj)NJ#s|s25^2M2n1nGsw;~p4Q&pw$5o<;ovHKl)@h;_ zCd>dYK@gVj%~59uQU+~rtOMeKICT-VfHU@JkpT?UX5|cN@ugNtNmz}^IM_3S)5iWi zD59mlm1#^@OKXBDP|kO^X&u#~lN-6Zvz=0DN2;eqa2{GSA%D-{_KuE-q}X1OA(Pba z?qEeAjBkcU;IGQNKClq(^8T~Eb2}U^XAP$Vt`6vkn9uYbMMXvUp4#J%ndSC@dfLf4 zxsE3^)Oe@Ua6BNC5$bZ$!0Z?f<_1g@{rW6*=|6mRoB!cHJ-Yw?*?zd@kba*YcQB1b zzdiC$@ZX>(NH53W>_U92=fF@GB>x`Sy#HOLxT9nA9i!~w_<-EcjJ`}JeuGs*V)QAh zzr^4MejiB&2aVjiM|YBcR-`*>A>C5cYQZzk2~=5NZ;tqc1DMmZT7!4IAk&n90WxZb zm+?xXm}^5Gq$WVZJ`Xao)_sN>^ny8zcibf#C{rD1(9k{j0}CAn)3&F{P~}vz^oYnP^_d@{$B(Vi-!^uT0J2Kd`fG3#6Lhaz7Jsq4&p>0 z5J59L-A|V@8D8u$imBa0kcN7uqHWbXtF%8f2=*3qGu)ESjG#K31j2N21eLf}Pswqn zzdIXrHl8rOH`Fzrb%|-NU^A+l;#&^Z8zzNdEWW6{Uuv@c|j5PjbVPMjtRzQ@eDN z`veWp8}nxJ+GHYfY#B2L>{UCsem}O7t|nD`5%Wcv43j*6*r9X#Eax`TNYeuQ`XSCb zbs_$mxm|h-B3v^BG^vBT$@O5H6Y%V-4AGQPN|~SCSc`LnMr!`AvdRiOrwIyCLKJw0 zPqiaL{rb(@x78$)um@Q6Icn$%Gj5A;5|5(4Ip~=MS#o*1w@7x|3o=%-Rqy9pW8631 z9>-0YtWob-gy~!8`~XsxDWPFu2N}~pT3T9cfzKt?7wVx0D}XXEOHRY&gn*A{YKtw!IW@e3KJEM#c5Z@=$wGAvxZ zMj#`_JZrd$i?r6gXS>v!&S8YVC$SCo^7>3BjuiqYDGSKSnNCbhV1UzdzO`AQ@~f)! zTO&F8zz`Fjq+-lq%h`WM(*x8d6S6FS%AVwB>*8Vtb5vO%|Uw|NDc`}@5Ot>q=!{Kr@7?88cD`9{;Z+)3#iN*XrLNMW0kB2Lfjbsa(uHYU?s z_-zzpM>NCV9IwicZ<+KTA$nOu#&$RA6XscLWJi-&0L}cIcZ#;#IyUN%3W&oW;s+-N z(H~_whZ3hGC65Nae3i*rC*D?N(1#yFG^<*O6PT6G(|SW)eb&YvST67BrI?T4jFR@F zzlTV#jIsKc75=PhJNqx{ePc)g*cJwq#OzIz7yhY)K4TeiE`gi=puDe~ssXwoj@zAd z#iHs%W%U%IPZJ5cBk_+49pN%e9^GGWJ-*Z7qoht7PbeSQN9<`g_^i9eu`&1f|DH89 zt&C7!`RD)7HNs%j)DVBr4!$3~sn_)aDJQ%iJ!C`^COx+&KYsHMU4k&;GSyCMORBx& zjifq9b{f8?4U;4N?}mPUwc?@p^N_JsfCB7wuM|dL-K#F|`lH>%91kKsI&I%T4<|R~ zI|k$flwlh`emDCtQlrKbU)sAE)lLK<=C5QmtBw%tXQU|R=%{lVVjgiV=F!|K`LW}> zUmn8TwxNOI8@7W7lOJ@F)2~xD2XSUDFCT?{ZQ#Snyg*IT5E-CSk5L+Duk2B@-Hyyi zU4{5`4JS)sW_!+9YuI7I8~VdRXM+G*quSR#$LYM%M(xG(fp!SurxRs}(iRCmW8)3P z@BsS%=-1Ei>3}rhRSa*MoV@-QyfF1KRIqT&!HRKLa-&8UbebdlUrr;Pzl{d?B5uM$ zhYX=IE3havp-j@ld1TW)BmgyiHZB&-DhXFv!ON})AV(1YGMz`%m5G%?G!59xN>y5o zI}SzJRcK&KEO1O2_TC&Z+Nw+S@mj- zt-J26*h~c%q4WX^2$oY9SLmdfz>dagnZ4QPY%22aUkBD!9A3tGw`F~!O4->Lb$$t& zjOhf}+k2qJgbmU4!nhDpN9{0@)bk2pD+7}%Y{!FOJ^H1|J1u|ewY`OFmWRE6$KYW_ zQ@}|pqYk{iIDfSW^i4xiUmRvO=xgdIzNoqW-zf;g>!>#WK1*+7z(eZg`f~}^OfLrA zc4N0H)OBkcSSTY+S@Dtou?Jh%?bbSJxHpFrNkD&taG0e|Al;?@%)lTcGxJ7VoHvsH zpOI9((l6F?UmDbM7>u$Dr~-V!CRP$Z zzt$l_@qL2_%vPH?MMM`sGu;2`L1Kw3fnzT9a#og4tEj3Lt|xd?xv{3vNbRImPF@b9%+oeDDym88JgwpE zA___3BdjAsY0^m=MYxS{v207Vfd$6*#8a2bTd@?py#sm4QBaRKf)cl}e(&o0{vIi* z;P}3PZX8rTOA|^BI5-*{(8>qbtGS-@X(az5%KAoN10X&(%ob-gv}eEYNRN&llV_`t zgd`PmgB|o7A2Q?%KEgg!eHb@uJMMo|%zZ+7Ce0&ztx(tJ4n#GzR@GCk!65rR`fp}r zyqOLNTIL_jEs97J03z9`oOn=jCdI`iHYO%3Il{njbCRZVCZP=anlmlUX^BI-prW(y z<0GuHDVGrf#QJ_{W3CUJ&vuoc-z8ledCyTHk9Dnf9sgkfufui=RM0yctZBD>2{Zw! z*>3^2x3_6&-~_N-Y1|+I)9oVD{oa-I!;XCOF7sG6q)jjcu51IF_P#kP*J(IAFPGnR zN~j2^AFCRYZ@kGAIHrINyk`Q_tu+rR?EohEm5A?W1ROE_$AhH`TOp_xj0-T7Ar+ZF8ts4|^#c z_T%^NP(xrtNm~R!7DdJK{*;$#C0 zh*&oqNB56kU+_YZ0Dso$zHP&`5p-;Hd`{UTc;X7WUwSE8*K>fjKXaMMXVhVxlpM7$ z6D2SW4sK7C0o^q2mc6Be8o&s+_ueSR8Wfe7$Z~#*Dc0R~>h7)MEdR-OTliU{L$`H5 z=9;W~3^$W)n+Qjg%mi>+a93J-=OdfMl?n@_N`os$&>s>v9PFimYEqpn`ZbNo1 zfJ?2{rK#rp@i3ycpsOo(xg*vQ z^vBU~bw_SW_tEZS`?*PSI(tM}jPTwiycS2wzR{~@_$Z142W#`R!Ic_wc+4CtREgW4 zc%aTEsg$(Xxq{RtzabcY{a&4(SFC%StE;C@vnbvo)m7(;-6kXOOsF3dI(0;^ptnGK zMmQ0Xz+Uy~df?uDb;HlaO{wKSRkIrzpf)+KnDX4-I8D2S(l^7$gFS_RRw>Qdj3s23 zpUsNr)FYM}u&ugPw3_FGhk@OiSVO=vKrINVJc&CaJUf+W&7+Bz@K{%_fol*ypmz=S z1ygw863aEf`h2vfRw`5_(QaZEzc&4Zm6xRv~>ebFXMjZyQ(JWzgE(K(6lz+kL zb5`sA3T#&Pj%o*`%470tX<8a0fhIQe^ujntQKN(pWrj~t#Q;Y}P&VypV_!U!st$${@Ma_B)I?V=)z73MzC3BB$PG6qk zbmqDf7f`LS&Kud6EaZ+_caI$Kj+8v)#x7}R5eXSU)I*hHM&&g#Glv% zw6R$-1uzD@yrsF?E@k+&RSV)bX=l*h)~1Ckg{wrx>DC&G)apF1ntA~?p@!>OO4)9U zq)z9_BbJc_SqKbQ)W}fohvx2=^8`$WI{ROs?BO!()~Ik&xvgT!7ya|z!v<&lE#haR zK%cm0oT#HWrAW`N$|$Tuq*5(b>rVIfQx-#BfY|Q)my}9Kn{_TUU4O(l|1`q)bPop;#|U04B#S2W5sUfQoUC_mF}uF|>_KIt(8=bT%86q}qq7 z^twlxMF4BJ>d7R#n_3DO)^htq?Ixc5XGxXPClfua?A%MZ3sJRN8j+>rW<~J6v>qAF z+(N?o^po&>it!sq_8H~zIb=rx$DNUwV)tO!<@dMhqP{S8h(j}`K69(P$eH4N6TMxF zEPgIAAltl}7p@_>d}-i;}eMHeWuCNmQgycCU)e^b%G8PqCmR#bY zpn>MLqhm-k_3K1UJ-KJ0zjljPP{LUIrRVv!SG<9$s$fDtGGvEgU z-l^jT6V@HNXtmB&@69GgySLoN0zExJ+7r2lga)xbiGAsg*=B}(Ibpl?nibIQWj%pZewE_$RvT-*p)svrTwvOmun^06Yr02dqtmS3&3+kUgaDq zGH&{#(6u&^>2LG&jtxlBZ@9Jk6bcgv1i(lojQNrmVv@0;5p~L0N*8o=b&V_%;su4Z zqt^pt5Xzr)^BzMsqZx92d6fBTCpynbL${K&42PRHTpM4h*9w_xzc$^;MtKBaPGLtT zKNIA3nU&0|5wF=~_(9tFI9b_71FHNJodhP8(1Dds_6X0s4!we1CX_hA15&ppmK|a6 z3|fIIFc~M`-Gw(BSLTf;smu>UEoPPsPdHNQFh=re{a^KSC|ft?772We>Nsi=fbPEN+cV42g?FJaWVCdBSo z`|wu2aA=5g>}*J+gvV-F?b{hQF&_BCF%R zf0krDdjyYalbz}2$_2R&5Pp>RUCZ`x0ZKfMd09{S6bZ!P@}uxq>GY8(D9piT^#d1(LxNK#}ZZeFX&O-n7jM73k6 zK)060qfYa$;N}n5(9f2EW>6MMMYz3HRUaGQ64&Ta0gk$G5<#4@aUlS33W9lu~mr{EyrxXZj_CTGh2A0A; zVB4>%Y2(%X{_@6gAKG`c82b4;X8)z4{~@KsfF*JeYw$?g0y^6 zvCs+8sCgPGW>;ZqSY^-x?%MMEYM91O-TAq(b>hhv1c{_AvX zxd^Xaccww^O?W0tt~6scY*IAWT7?T#@n_whmqSRBZ<=xOs3r@6G!oI)K@1%uzq@xr zYDZmv`nG_}n239Vt6s?tzQ}FPJYSX5J$9tc>+8dWO^Vl5P{IAIOaTzPl}zfuy2!d| zYo~RS$+ZGqx5vHF1L7|}O$4yQ6U8KrTF&!g8DnFYKDrZ>P_YCbAv2RK8MKP_Yvyox zF&1`3Ucn+ITlEY?ZQ$cMs2?sqF|loYV=M7uT6#z0%xJe*cD59y{P?6&m)jE5reZeS zl@oOdRfZV6^p+3vE5(fH)FKE4V34s;;F_tW)%{2)CQDWK7JtCBe(PlYocfpEeOJVL z&v@N9GvB3qPpuaw{lFlV#9eb5B*aba6>r;sjnD8gAt~HDN;0=WpZhu%X1`eW8+kku z_CV72+S{#(#7X1Mn%@!mkrYeA#CsTQ@4|9Y`TW?ZbeIZzu-@Np>$ej((W4275|=y_>h|`n8E5af|O=)a~Z~^5o_aCS~K}5QXe7rVPkM89Y>6Pg_i8J7q`fYyFb{3y} z*M?-WXS0MVEZZ92!M%I+3za6t+qS*ua%Vr))n2oqzxq@t7R?={O7UeO)ZwGR3ryuF v`N*NV5wztIAE=LQ*6NvRufve~8*)!xU@VjGB@_HB-9UpBH53q!pTGGZPi}Nb literal 0 HcmV?d00001 From 82f4d8b68c6cc735d5108b0fc0d537cf83debe51 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 8 Apr 2025 09:43:06 +0200 Subject: [PATCH 090/125] chore: pre-commit changes --- .../debug_training_runs.ipynb | 130 +++-------------- .../debug-model-training-runs/setup.py | 136 ++++++++++-------- 2 files changed, 97 insertions(+), 169 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 337b0ac..ff1d152 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -55,7 +55,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -73,7 +73,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -92,7 +92,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -121,27 +121,16 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Training samples: 81926\n", - "Validation samples: 935\n", - "Vocabulary size: 128257\n", - "Model created: MultilayerModel\n", - "Optimizer: Adam\n", - "Criterion: CrossEntropyLoss\n" - ] - } - ], + "outputs": [], "source": [ "from setup import setup_training\n", - " \n", + "\n", "# Setup complete training environment\n", - "model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size = setup_training(params, use_multilayer=True)" + "model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size = setup_training(\n", + " params, use_multilayer=True\n", + ")" ] }, { @@ -162,22 +151,14 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment\n" - ] - } - ], + "outputs": [], "source": [ "from neptune_scale import Run\n", "\n", "run = Run(\n", - " experiment_name=\"pytorch-text\", # Create a run that is the head of an experiment. This is also used for forking.\n", + " experiment_name=\"pytorch-text\", # Create a run that is the head of an experiment. This is also used for forking.\n", ")\n", "\n", "print(run.get_experiment_url())" @@ -192,18 +173,9 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "See configuration parameters:\n", - "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment&detailsTab=metadata\n" - ] - } - ], + "outputs": [], "source": [ "run.log_configs(\n", " {\n", @@ -239,73 +211,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "View charts in near real-time:\n", - "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=pytorch-text&type=experiment&detailsTab=charts\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2025-04-08 09:18:43,794 neptune:ERROR: \n", - "\n", - "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", - "\n", - "This can be caused by either:\n", - "- The step of a series value is smaller than the most recently logged step for this series\n", - "- the step is exactly the same but the value is different\n", - "\n", - "For help, see https://docs-beta.neptune.ai/log_metrics\n", - "\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[32], line 29\u001b[0m\n\u001b[0;32m 27\u001b[0m logits \u001b[38;5;241m=\u001b[39m model(input_ids)\n\u001b[0;32m 28\u001b[0m loss \u001b[38;5;241m=\u001b[39m criterion(logits\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, vocab_size), labels\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m))\n\u001b[1;32m---> 29\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 30\u001b[0m optimizer\u001b[38;5;241m.\u001b[39mstep()\n\u001b[0;32m 32\u001b[0m \u001b[38;5;66;03m# print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\u001b[39;00m\n\u001b[0;32m 33\u001b[0m \n\u001b[0;32m 34\u001b[0m \u001b[38;5;66;03m# Log global training loss\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\_tensor.py:626\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 616\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 617\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 618\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 619\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 624\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 625\u001b[0m )\n\u001b[1;32m--> 626\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 628\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\__init__.py:347\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 342\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 344\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 345\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 346\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 347\u001b[0m \u001b[43m_engine_run_backward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 348\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 349\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 350\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 351\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 352\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 353\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 354\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 355\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\graph.py:823\u001b[0m, in \u001b[0;36m_engine_run_backward\u001b[1;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[0;32m 821\u001b[0m unregister_hooks \u001b[38;5;241m=\u001b[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001b[0;32m 822\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 823\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 824\u001b[0m \u001b[43m \u001b[49m\u001b[43mt_outputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001b[39;00m\n\u001b[0;32m 826\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 827\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m attach_logging_hooks:\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\autograd\\function.py:292\u001b[0m, in \u001b[0;36mBackwardCFunction.apply\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mBackwardCFunction\u001b[39;00m(_C\u001b[38;5;241m.\u001b[39m_FunctionBase, FunctionCtx, _HookMixin):\n\u001b[0;32m 288\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 289\u001b[0m \u001b[38;5;124;03m This class is used for internal autograd work. Do not use.\u001b[39;00m\n\u001b[0;32m 290\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 292\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs):\n\u001b[0;32m 293\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 294\u001b[0m \u001b[38;5;124;03m Apply method used when executing this Node during the backward\u001b[39;00m\n\u001b[0;32m 295\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 296\u001b[0m \u001b[38;5;66;03m# _forward_cls is defined by derived class\u001b[39;00m\n\u001b[0;32m 297\u001b[0m \u001b[38;5;66;03m# The user should define either backward or vjp but never both.\u001b[39;00m\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2025-04-08 09:18:49,998 neptune:ERROR: \n", - "\n", - "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", - "\n", - "This can be caused by either:\n", - "- The step of a series value is smaller than the most recently logged step for this series\n", - "- the step is exactly the same but the value is different\n", - "\n", - "For help, see https://docs-beta.neptune.ai/log_metrics\n", - "\n", - "2025-04-08 09:18:50,001 neptune:ERROR: \n", - "\n", - "NeptuneSeriesStepNonIncreasing: Subsequent steps of a series must be increasing.\n", - "\n", - "This can be caused by either:\n", - "- The step of a series value is smaller than the most recently logged step for this series\n", - "- the step is exactly the same but the value is different\n", - "\n", - "For help, see https://docs-beta.neptune.ai/log_metrics\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# Register hooks to track gradients for each layer\n", "def hook_fn(module, grad_input, grad_output):\n", @@ -314,6 +220,7 @@ " grad_norm = grad_input[0].norm().item()\n", " debugging_gradient_norms[f\"debug/gradient/{layer_name}/norm\"] = grad_norm\n", "\n", + "\n", "# Define dictionary of metrics to log to Neptune\n", "debugging_gradient_norms = {}\n", "# Register hooks once before training\n", @@ -340,10 +247,7 @@ "\n", " # Log global training loss and layer-wise gradient norms\n", " run.log_metrics(\n", - " data={\n", - " \"metrics/train/loss\": loss.item(), \n", - " **debugging_gradient_norms\n", - " },\n", + " data={\"metrics/train/loss\": loss.item(), **debugging_gradient_norms},\n", " step=step_counter,\n", " )\n", "\n", diff --git a/how-to-guides/debug-model-training-runs/setup.py b/how-to-guides/debug-model-training-runs/setup.py index c66a431..4607715 100644 --- a/how-to-guides/debug-model-training-runs/setup.py +++ b/how-to-guides/debug-model-training-runs/setup.py @@ -1,13 +1,16 @@ +from typing import Dict, Tuple + import torch import torch.nn as nn import torch.optim as optim -from torch.utils.data import DataLoader from datasets import load_dataset -from typing import Dict, Tuple +from torch.utils.data import DataLoader + # Model Classes class SimpleLLM(nn.Module): """A simple language model with a single LSTM layer.""" + def __init__(self, vocab_size, embed_size, hidden_size, num_layers): super(SimpleLLM, self).__init__() self.embedding = nn.Embedding(vocab_size, embed_size) @@ -20,65 +23,73 @@ def forward(self, x): out = self.fc1(lstm_out) # Use the last output from the LSTM return out + class MultilayerModel(nn.Module): """A larger language model with multiple LSTM and fully connected layers.""" + def __init__(self, vocab_size, embed_size, hidden_size, num_layers): super(MultilayerModel, self).__init__() self.embedding = nn.Embedding(vocab_size, embed_size) - + # Create multiple LSTM layers - self.lstm_layers = nn.ModuleList([ - nn.LSTM(hidden_size if i > 0 else embed_size, - hidden_size, - num_layers=1, - batch_first=True) - for i in range(10) # 10 LSTM layers - ]) - + self.lstm_layers = nn.ModuleList( + [ + nn.LSTM( + hidden_size if i > 0 else embed_size, + hidden_size, + num_layers=1, + batch_first=True, + ) + for i in range(10) # 10 LSTM layers + ] + ) + # Create multiple fully connected layers - self.fc_layers = nn.ModuleList([ - nn.Linear(hidden_size, hidden_size) - for _ in range(9) # 9 FC layers - ]) - + self.fc_layers = nn.ModuleList( + [nn.Linear(hidden_size, hidden_size) for _ in range(9)] # 9 FC layers + ) + # Final layer to project back to vocab size self.final_layer = nn.Linear(hidden_size, vocab_size) - + # Add dropout for regularization self.dropout = nn.Dropout(0.1) def forward(self, x): # Embedding layer x = self.embedding(x) - + # Process through LSTM layers for lstm in self.lstm_layers: x, _ = lstm(x) x = self.dropout(x) - + # Process through FC layers for fc in self.fc_layers: x = fc(x) x = self.dropout(x) x = torch.relu(x) - + # Final projection out = self.final_layer(x) return out + # Data Loading Function def load_and_preprocess_data(params: Dict) -> Tuple[DataLoader, DataLoader, int]: """ Load and preprocess the next token prediction dataset from HuggingFace. - + Args: params (Dict): Dictionary containing parameters for data loading and model configuration - + Returns: Tuple[DataLoader, DataLoader, int]: Training dataloader, validation dataloader, and vocabulary size """ # Download dataset - base_url = "https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/" + base_url = ( + "https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/" + ) data_files = { "train": base_url + "train-00001-of-00067.parquet", "validation": base_url + "validation-00000-of-00001.parquet", @@ -104,47 +115,51 @@ def load_and_preprocess_data(params: Dict) -> Tuple[DataLoader, DataLoader, int] val_dataloader = DataLoader(validation_subset, batch_size=params["batch_size"], shuffle=True) # Calculate vocabulary size - vocab_size = max([token for sentence in data_subset["train"]["input_ids"] for token in sentence]) + 1 + vocab_size = ( + max([token for sentence in data_subset["train"]["input_ids"] for token in sentence]) + 1 + ) print(f"Vocabulary size: {vocab_size}") return train_dataloader, val_dataloader, vocab_size + # Helper function to create model def create_model(params: Dict, use_multilayer: bool = True) -> nn.Module: """ Create a model based on the specified parameters. - + Args: params (Dict): Dictionary containing model parameters use_multilayer (bool): Whether to use the multilayer model - + Returns: nn.Module: The created model """ if use_multilayer: return MultilayerModel( - params["vocab_size"], - params["embed_size"], - params["hidden_size"], - params["num_lstm_layers"] + params["vocab_size"], + params["embed_size"], + params["hidden_size"], + params["num_lstm_layers"], ) else: return SimpleLLM( - params["vocab_size"], - params["embed_size"], - params["hidden_size"], - params["num_lstm_layers"] + params["vocab_size"], + params["embed_size"], + params["hidden_size"], + params["num_lstm_layers"], ) + # Helper function to create optimizer def create_optimizer(model: nn.Module, params: Dict) -> optim.Optimizer: """ Create an optimizer for the model. - + Args: model (nn.Module): The model to optimize params (Dict): Dictionary containing optimizer parameters - + Returns: optim.Optimizer: The created optimizer """ @@ -155,58 +170,65 @@ def create_optimizer(model: nn.Module, params: Dict) -> optim.Optimizer: else: raise ValueError(f"Unsupported optimizer: {params['optimizer']}") + # Helper function to create criterion def create_criterion() -> nn.Module: """ Create a loss function for the model. - + Returns: nn.Module: The created criterion """ - return nn.CrossEntropyLoss(ignore_index=-100) # Ignore the buffering index of -100 in the dataset + return nn.CrossEntropyLoss( + ignore_index=-100 + ) # Ignore the buffering index of -100 in the dataset + # Helper function to setup training -def setup_training(params: Dict, use_multilayer: bool = True) -> Tuple[nn.Module, optim.Optimizer, nn.Module, DataLoader, DataLoader, int]: +def setup_training( + params: Dict, use_multilayer: bool = True +) -> Tuple[nn.Module, optim.Optimizer, nn.Module, DataLoader, DataLoader, int]: """ Setup the complete training environment. - + Args: params (Dict): Dictionary containing all parameters use_multilayer (bool): Whether to use the multilayer model - + Returns: - Tuple[nn.Module, optim.Optimizer, nn.Module, DataLoader, DataLoader, int]: + Tuple[nn.Module, optim.Optimizer, nn.Module, DataLoader, DataLoader, int]: Model, optimizer, criterion, train dataloader, validation dataloader, and vocabulary size """ # Load data train_dataloader, val_dataloader, vocab_size = load_and_preprocess_data(params) params["vocab_size"] = vocab_size - + # Create model model = create_model(params, use_multilayer=use_multilayer) # Move model to device model.to(params["device"]) print(f"Model created: {model.__class__.__name__}") - + # Create optimizer optimizer = create_optimizer(model, params) print(f"Optimizer: {optimizer.__class__.__name__}") - + # Create criterion criterion = create_criterion() print(f"Criterion: {criterion.__class__.__name__}") - + return model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size + # Explicitly define what should be importable __all__ = [ - 'SimpleLLM', - 'MultilayerModel', - 'load_and_preprocess_data', - 'create_model', - 'create_optimizer', - 'create_criterion', - 'setup_training' + "SimpleLLM", + "MultilayerModel", + "load_and_preprocess_data", + "create_model", + "create_optimizer", + "create_criterion", + "setup_training", ] if __name__ == "__main__": @@ -219,8 +241,10 @@ def setup_training(params: Dict, use_multilayer: bool = True) -> Tuple[nn.Module "num_lstm_layers": 3, "optimizer": "Adam", "learning_rate": 0.01, - "vocab_size": 50000 # This will be updated by load_and_preprocess_data + "vocab_size": 50000, # This will be updated by load_and_preprocess_data } - + # Setup complete training environment - model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size = setup_training(params, use_multilayer=True) \ No newline at end of file + model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size = setup_training( + params, use_multilayer=True + ) From 8c562965233e3b17afebb3b9e0aa66acecae2126 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 8 Apr 2025 09:48:13 +0200 Subject: [PATCH 091/125] chore: update colab link for branch --- .../debug-model-training-runs/debug_training_runs.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index ff1d152..7d18660 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -6,7 +6,7 @@ "source": [ "# Debug Model Training with Neptune\n", "\n", - " \n", + " \n", " \"Open \n", "" ] From 247679a1dbedff4ca7decb97495da43fb74e4d27 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 8 Apr 2025 11:55:31 +0200 Subject: [PATCH 092/125] refactor: keep notebook more self-contained for use in Colab --- .../debug_training_runs.ipynb | 120 ++++++++++++++++-- 1 file changed, 112 insertions(+), 8 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 7d18660..49ca6b9 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -114,9 +114,16 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Setup data, model and other PyTorch required functions\n", + "### Setup data, model and other dependencies\n", "\n", - "The `setup.py` script wraps the data and model creation for use in this tutorial. You can use your own data and model setup if required. " + "In the next cell, we'll:\n", + "1. Import required PyTorch and HuggingFace libraries\n", + "2. Download and load a [next token prediction dataset](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset) from HuggingFace\n", + "3. Create PyTorch DataLoaders for training\n", + "4. Calculate vocabulary size for the model\n", + "5. Define a multilayer model for training\n", + "\n", + "You can modify this setup to use your own data and model architecture if needed." ] }, { @@ -125,12 +132,109 @@ "metadata": {}, "outputs": [], "source": [ - "from setup import setup_training\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.optim as optim\n", + "from datasets import load_dataset\n", + "from torch.utils.data import DataLoader\n", + "\n", + "# Download dataset\n", + "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", + "data_files = {\n", + " \"train\": base_url + \"train-00001-of-00067.parquet\",\n", + " \"validation\": base_url + \"validation-00000-of-00001.parquet\",\n", + "}\n", + "\n", + "# Load dataset\n", + "data_subset = load_dataset(\"parquet\", data_files=data_files, num_proc=4)\n", + "# validation_subset = data_subset.get(\"validation\").train_test_split(test_size=0.1)\n", + "\n", + "print(f\"Training samples: {data_subset['train'].num_rows}\")\n", + "# print(f\"Validation samples: {validation_subset['test'].num_rows}\")\n", + "\n", + "# Convert to PyTorch format\n", + "train_subset = data_subset[\"train\"].with_format(\n", + " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", + ")\n", + "\"\"\"validation_subset = validation_subset[\"test\"].with_format(\n", + " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", + ")\"\"\"\n", + "\n", + "# Create dataloaders\n", + "train_dataloader = DataLoader(train_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", + "# val_dataloader = DataLoader(validation_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", + "\n", + "# Calculate vocabulary size\n", + "params[\"vocab_size\"] = (\n", + " max([token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]) + 1\n", + ")\n", + "print(f\"Vocabulary size: {params[\"vocab_size\"]}\")\n", + "\n", + "\n", + "# Create model\n", + "class MultilayerModel(nn.Module):\n", + " \"\"\"A larger language model with multiple LSTM and fully connected layers.\"\"\"\n", + "\n", + " def __init__(self, vocab_size, embed_size, hidden_size):\n", + " super(MultilayerModel, self).__init__()\n", + " self.embedding = nn.Embedding(vocab_size, embed_size)\n", "\n", - "# Setup complete training environment\n", - "model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size = setup_training(\n", - " params, use_multilayer=True\n", - ")" + " # Create multiple LSTM layers\n", + " self.lstm_layers = nn.ModuleList(\n", + " [\n", + " nn.LSTM(\n", + " hidden_size if i > 0 else embed_size,\n", + " hidden_size,\n", + " num_layers=1,\n", + " batch_first=True,\n", + " )\n", + " for i in range(10) # 10 LSTM layers\n", + " ]\n", + " )\n", + "\n", + " # Create multiple fully connected layers\n", + " self.fc_layers = nn.ModuleList(\n", + " [nn.Linear(hidden_size, hidden_size) for _ in range(9)] # 9 FC layers\n", + " )\n", + "\n", + " # Final layer to project back to vocab size\n", + " self.final_layer = nn.Linear(hidden_size, vocab_size)\n", + "\n", + " # Add dropout for regularization\n", + " self.dropout = nn.Dropout(0.1)\n", + "\n", + " def forward(self, x):\n", + " # Embedding layer\n", + " x = self.embedding(x)\n", + "\n", + " # Process through LSTM layers\n", + " for lstm in self.lstm_layers:\n", + " x, _ = lstm(x)\n", + " x = self.dropout(x)\n", + "\n", + " # Process through FC layers\n", + " for fc in self.fc_layers:\n", + " x = fc(x)\n", + " x = self.dropout(x)\n", + " x = torch.relu(x)\n", + "\n", + " # Final projection\n", + " out = self.final_layer(x)\n", + " return out\n", + "\n", + "\n", + "model = MultilayerModel(\n", + " params[\"vocab_size\"],\n", + " params[\"embed_size\"],\n", + " params[\"hidden_size\"],\n", + ")\n", + "print(f\"Model created: {model.__class__.__name__}\")\n", + "optimizer = optim.Adam(model.parameters(), lr=params[\"learning_rate\"])\n", + "print(f\"Optimizer: {optimizer.__class__.__name__}\")\n", + "criterion = nn.CrossEntropyLoss(\n", + " ignore_index=-100\n", + ") # Ignore the buffering index of -100 in the dataset\n", + "print(f\"Criterion: {criterion.__class__.__name__}\")" ] }, { @@ -241,7 +345,7 @@ " labels = batch[\"labels\"].to(params[\"device\"])\n", " optimizer.zero_grad()\n", " logits = model(input_ids)\n", - " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", + " loss = criterion(logits.view(-1, params[\"vocab_size\"]), labels.view(-1))\n", " loss.backward()\n", " optimizer.step()\n", "\n", From 408ba02dff58b1c5ef048af5ba0951e5c27fd5a6 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 8 Apr 2025 12:00:42 +0200 Subject: [PATCH 093/125] fix: f string compatibility in Colab --- .../debug_training_runs.ipynb | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 49ca6b9..48edb9d 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -128,9 +128,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training samples: 81926\n", + "Vocabulary size: 128257\n", + "Model created: MultilayerModel\n", + "Optimizer: Adam\n", + "Criterion: CrossEntropyLoss\n" + ] + } + ], "source": [ "import torch\n", "import torch.nn as nn\n", @@ -168,7 +180,7 @@ "params[\"vocab_size\"] = (\n", " max([token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]) + 1\n", ")\n", - "print(f\"Vocabulary size: {params[\"vocab_size\"]}\")\n", + "print(f\"Vocabulary size: {params['vocab_size']}\")\n", "\n", "\n", "# Create model\n", From 314e8a2ede60c6baa20cc30806a400d4ca33f627 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 8 Apr 2025 12:03:22 +0200 Subject: [PATCH 094/125] fix: ensure model object is on correct device --- .../debug-model-training-runs/debug_training_runs.ipynb | 1 + 1 file changed, 1 insertion(+) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 48edb9d..e101a59 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -240,6 +240,7 @@ " params[\"embed_size\"],\n", " params[\"hidden_size\"],\n", ")\n", + "model.to(params[\"device\"])\n", "print(f\"Model created: {model.__class__.__name__}\")\n", "optimizer = optim.Adam(model.parameters(), lr=params[\"learning_rate\"])\n", "print(f\"Optimizer: {optimizer.__class__.__name__}\")\n", From 62227bc6985a733aea940b7c60b8aad8f519b552 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 9 Apr 2025 10:13:00 +0200 Subject: [PATCH 095/125] chore: Add header links to GH, Neptune and docs These need to be updated the the final branch when merged --- .../debug-model-training-runs/debug_training_runs.ipynb | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index e101a59..4c76655 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -8,6 +8,15 @@ "\n", " \n", " \"Open \n", + "\n", + "\n", + " \"Open\n", + "\n", + "\n", + " \"Explore\n", + "\n", + "\n", + " \"View\n", "" ] }, From 505f41c5bae3e4dd35ebc2aebcd1233156f14a6f Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 14 Apr 2025 11:26:55 +0200 Subject: [PATCH 096/125] style: minor updates to markdown --- .../debug_training_runs.ipynb | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 4c76655..b2d5a11 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -12,9 +12,6 @@ "\n", " \"Open\n", "\n", - "\n", - " \"Explore\n", - "\n", "\n", " \"View\n", "" @@ -34,7 +31,10 @@ "2. Track **layer-wise gradient norms** during training \n", "3. Analyze the metrics in Neptune's UI to **debug training issues**\n", "\n", - "Step through a pre-configured report [here](https://scale.neptune.ai/leo/pytorch-tutorial/reports/9e79d952-272a-4a38-83e5-27df4dd225ec) to see a finalized version.\n", + "Step through a pre-configured report:\n", + "\n", + " \"Explore\n", + "\n", "\n", "_Note: This is a code recipe that you can adapt for your own model training needs._\n", "\n", @@ -132,7 +132,7 @@ "4. Calculate vocabulary size for the model\n", "5. Define a multilayer model for training\n", "\n", - "You can modify this setup to use your own data and model architecture if needed." + "_You can modify this setup to use your own data and model architecture._" ] }, { @@ -191,7 +191,6 @@ ")\n", "print(f\"Vocabulary size: {params['vocab_size']}\")\n", "\n", - "\n", "# Create model\n", "class MultilayerModel(nn.Module):\n", " \"\"\"A larger language model with multiple LSTM and fully connected layers.\"\"\"\n", @@ -284,7 +283,7 @@ "from neptune_scale import Run\n", "\n", "run = Run(\n", - " experiment_name=\"pytorch-text\", # Create a run that is the head of an experiment. This is also used for forking.\n", + " experiment_name=\"pytorch-text\", # Create a run that is the head of an experiment.\n", ")\n", "\n", "print(run.get_experiment_url())" @@ -314,7 +313,7 @@ " }\n", ")\n", "\n", - "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\", params[\"optimizer\"]])\n", + "run.add_tags(tags=[\"text\", \"LLM\", \"multi-layer\", params[\"optimizer\"]])\n", "\n", "print(f\"See configuration parameters:\\n{run.get_experiment_url() + '&detailsTab=metadata'}\")" ] From bed489922715d058545a94357dfba23364b5ebae Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 14 Apr 2025 11:38:03 +0200 Subject: [PATCH 097/125] chore: remove unused code --- .../debug-model-training-runs/setup.py | 250 ------------------ 1 file changed, 250 deletions(-) delete mode 100644 how-to-guides/debug-model-training-runs/setup.py diff --git a/how-to-guides/debug-model-training-runs/setup.py b/how-to-guides/debug-model-training-runs/setup.py deleted file mode 100644 index 4607715..0000000 --- a/how-to-guides/debug-model-training-runs/setup.py +++ /dev/null @@ -1,250 +0,0 @@ -from typing import Dict, Tuple - -import torch -import torch.nn as nn -import torch.optim as optim -from datasets import load_dataset -from torch.utils.data import DataLoader - - -# Model Classes -class SimpleLLM(nn.Module): - """A simple language model with a single LSTM layer.""" - - def __init__(self, vocab_size, embed_size, hidden_size, num_layers): - super(SimpleLLM, self).__init__() - self.embedding = nn.Embedding(vocab_size, embed_size) - self.lstm = nn.LSTM(embed_size, hidden_size, num_layers=num_layers, batch_first=True) - self.fc1 = nn.Linear(hidden_size, vocab_size) - - def forward(self, x): - x = self.embedding(x) - lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple - out = self.fc1(lstm_out) # Use the last output from the LSTM - return out - - -class MultilayerModel(nn.Module): - """A larger language model with multiple LSTM and fully connected layers.""" - - def __init__(self, vocab_size, embed_size, hidden_size, num_layers): - super(MultilayerModel, self).__init__() - self.embedding = nn.Embedding(vocab_size, embed_size) - - # Create multiple LSTM layers - self.lstm_layers = nn.ModuleList( - [ - nn.LSTM( - hidden_size if i > 0 else embed_size, - hidden_size, - num_layers=1, - batch_first=True, - ) - for i in range(10) # 10 LSTM layers - ] - ) - - # Create multiple fully connected layers - self.fc_layers = nn.ModuleList( - [nn.Linear(hidden_size, hidden_size) for _ in range(9)] # 9 FC layers - ) - - # Final layer to project back to vocab size - self.final_layer = nn.Linear(hidden_size, vocab_size) - - # Add dropout for regularization - self.dropout = nn.Dropout(0.1) - - def forward(self, x): - # Embedding layer - x = self.embedding(x) - - # Process through LSTM layers - for lstm in self.lstm_layers: - x, _ = lstm(x) - x = self.dropout(x) - - # Process through FC layers - for fc in self.fc_layers: - x = fc(x) - x = self.dropout(x) - x = torch.relu(x) - - # Final projection - out = self.final_layer(x) - return out - - -# Data Loading Function -def load_and_preprocess_data(params: Dict) -> Tuple[DataLoader, DataLoader, int]: - """ - Load and preprocess the next token prediction dataset from HuggingFace. - - Args: - params (Dict): Dictionary containing parameters for data loading and model configuration - - Returns: - Tuple[DataLoader, DataLoader, int]: Training dataloader, validation dataloader, and vocabulary size - """ - # Download dataset - base_url = ( - "https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/" - ) - data_files = { - "train": base_url + "train-00001-of-00067.parquet", - "validation": base_url + "validation-00000-of-00001.parquet", - } - - # Load dataset - data_subset = load_dataset("parquet", data_files=data_files, num_proc=4) - validation_subset = data_subset.get("validation").train_test_split(test_size=0.1) - - print(f"Training samples: {data_subset['train'].num_rows}") - print(f"Validation samples: {validation_subset['test'].num_rows}") - - # Convert to PyTorch format - train_subset = data_subset["train"].with_format( - type="torch", columns=["text", "input_ids", "labels"] - ) - validation_subset = validation_subset["test"].with_format( - type="torch", columns=["text", "input_ids", "labels"] - ) - - # Create dataloaders - train_dataloader = DataLoader(train_subset, batch_size=params["batch_size"], shuffle=True) - val_dataloader = DataLoader(validation_subset, batch_size=params["batch_size"], shuffle=True) - - # Calculate vocabulary size - vocab_size = ( - max([token for sentence in data_subset["train"]["input_ids"] for token in sentence]) + 1 - ) - print(f"Vocabulary size: {vocab_size}") - - return train_dataloader, val_dataloader, vocab_size - - -# Helper function to create model -def create_model(params: Dict, use_multilayer: bool = True) -> nn.Module: - """ - Create a model based on the specified parameters. - - Args: - params (Dict): Dictionary containing model parameters - use_multilayer (bool): Whether to use the multilayer model - - Returns: - nn.Module: The created model - """ - if use_multilayer: - return MultilayerModel( - params["vocab_size"], - params["embed_size"], - params["hidden_size"], - params["num_lstm_layers"], - ) - else: - return SimpleLLM( - params["vocab_size"], - params["embed_size"], - params["hidden_size"], - params["num_lstm_layers"], - ) - - -# Helper function to create optimizer -def create_optimizer(model: nn.Module, params: Dict) -> optim.Optimizer: - """ - Create an optimizer for the model. - - Args: - model (nn.Module): The model to optimize - params (Dict): Dictionary containing optimizer parameters - - Returns: - optim.Optimizer: The created optimizer - """ - if params["optimizer"] == "Adam": - return optim.Adam(model.parameters(), lr=params["learning_rate"]) - elif params["optimizer"] == "SGD": - return optim.SGD(model.parameters(), lr=params["learning_rate"]) - else: - raise ValueError(f"Unsupported optimizer: {params['optimizer']}") - - -# Helper function to create criterion -def create_criterion() -> nn.Module: - """ - Create a loss function for the model. - - Returns: - nn.Module: The created criterion - """ - return nn.CrossEntropyLoss( - ignore_index=-100 - ) # Ignore the buffering index of -100 in the dataset - - -# Helper function to setup training -def setup_training( - params: Dict, use_multilayer: bool = True -) -> Tuple[nn.Module, optim.Optimizer, nn.Module, DataLoader, DataLoader, int]: - """ - Setup the complete training environment. - - Args: - params (Dict): Dictionary containing all parameters - use_multilayer (bool): Whether to use the multilayer model - - Returns: - Tuple[nn.Module, optim.Optimizer, nn.Module, DataLoader, DataLoader, int]: - Model, optimizer, criterion, train dataloader, validation dataloader, and vocabulary size - """ - # Load data - train_dataloader, val_dataloader, vocab_size = load_and_preprocess_data(params) - params["vocab_size"] = vocab_size - - # Create model - model = create_model(params, use_multilayer=use_multilayer) - # Move model to device - model.to(params["device"]) - print(f"Model created: {model.__class__.__name__}") - - # Create optimizer - optimizer = create_optimizer(model, params) - print(f"Optimizer: {optimizer.__class__.__name__}") - - # Create criterion - criterion = create_criterion() - print(f"Criterion: {criterion.__class__.__name__}") - - return model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size - - -# Explicitly define what should be importable -__all__ = [ - "SimpleLLM", - "MultilayerModel", - "load_and_preprocess_data", - "create_model", - "create_optimizer", - "create_criterion", - "setup_training", -] - -if __name__ == "__main__": - # Example usage with all required parameters - params = { - "batch_size": 8, - "device": torch.device("cuda" if torch.cuda.is_available() else "cpu"), - "embed_size": 1000, - "hidden_size": 256, - "num_lstm_layers": 3, - "optimizer": "Adam", - "learning_rate": 0.01, - "vocab_size": 50000, # This will be updated by load_and_preprocess_data - } - - # Setup complete training environment - model, optimizer, criterion, train_dataloader, val_dataloader, vocab_size = setup_training( - params, use_multilayer=True - ) From 75885bbf9499890992246f23a6278c02e57f039a Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 15 Apr 2025 13:42:14 +0200 Subject: [PATCH 098/125] style: remove image reference --- .../debug_training_runs.ipynb | 21 ++++++++++-------- .../tutorial-images/debugging_dashboard.png | Bin 151345 -> 0 bytes .../debugging_regex_search.png | Bin 102276 -> 0 bytes .../tutorial-images/debugging_report.png | Bin 124967 -> 0 bytes 4 files changed, 12 insertions(+), 9 deletions(-) delete mode 100644 how-to-guides/debug-model-training-runs/tutorial-images/debugging_dashboard.png delete mode 100644 how-to-guides/debug-model-training-runs/tutorial-images/debugging_regex_search.png delete mode 100644 how-to-guides/debug-model-training-runs/tutorial-images/debugging_report.png diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index b2d5a11..8728019 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -36,9 +36,7 @@ " \"Explore\n", "\n", "\n", - "_Note: This is a code recipe that you can adapt for your own model training needs._\n", - "\n", - " ![Layer-wise gradient norms visualization in Neptune](tutorial-images/debugging_report.png)" + "_Note: This is a code recipe that you can adapt for your own model training needs._" ] }, { @@ -387,20 +385,25 @@ "### Step 4: _Analyze and debug training_\n", "While the model is training, you can start using the Neptune web app to browse your metrics and create custom analyses and visualizations:\n", "1. To visualize the large number of metrics being logged in near real time, navigate to the **Charts** tab of the active run (_or select link above_).\n", - "2. Filter the metrics using the [advanced regex searching capabilities](https://docs-beta.neptune.ai/charts#filtering-charts). For example, enter `gradient & fc & layers.[0-5] & norm` in the search bar. This query filters the metrics for the first 6 layers of the gradients norms of the fully connected layers. You can specify down to exactly the metrics name you want.\n", "\n", - "![Alt text](tutorial-images/debugging_regex_search.png)\n", + "2. Filter the metrics using the [advanced regex searching capabilities](https://docs-beta.neptune.ai/charts#filtering-charts). For example, enter `gradient & fc & layers.[0-5] & norm` in the search bar. This query filters the metrics for the first 6 layers of the gradients norms of the fully connected layers. You can specify down to exactly the metrics name you want.\n", "\n", "3. Export the filter to a [dashboard](https://docs-beta.neptune.ai/custom_dashboard). The saved dashboard will now only display these metrics during training. This is useful if you know that a certain set of layers can be troublesome during training. \n", + "\n", "4. Alternatively, use the [dynamic metric selection](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) and create a new chart widget to display all LSTM layers gradient norms in one chart. Again, use the `(.*gradient)(.*lstm)(.*norm)` query. This makes it easy to have an automatically updating chart that allows you to view all layers on a single chart for rapid debugging in case vanishing or exploding gradients appear. \n", "\n", - "![Alt text](tutorial-images/debugging_dashboard.png)\n", + "**Explore pre-configured dashboard:**\n", + "\n", + " \"Explore\n", + "\n", "\n", - "5. To document this behavior, create a [custom report](https://docs-beta.neptune.ai/reports) to outline the model training, global metrics, debugging metrics for the model you're training. This allows you to keep track of any anomalies but also to see what worked or did not work during training.\n", "\n", - "![Alt text](tutorial-images/debugging_report.png)\n", + "5. To document this behavior, create a [custom report](https://docs-beta.neptune.ai/reports) to outline the model training, global metrics, debugging metrics for the model you're training. This allows you to keep track of any anomalies but also to see what worked or did not work during training.\n", "\n", - "See the pre-configured [example of the training report](https://scale.neptune.ai/leo/pytorch-tutorial/reports/9e79d952-272a-4a38-83e5-27df4dd225ec).\n", + "**Explore pre-configured report:**\n", + "\n", + " \"Explore\n", + "\n", "\n", "See also: PyTorch layer-wise tracking package [here](TODO:Link to integration for tracking layer-wise metrics)\n" ] diff --git a/how-to-guides/debug-model-training-runs/tutorial-images/debugging_dashboard.png b/how-to-guides/debug-model-training-runs/tutorial-images/debugging_dashboard.png deleted file mode 100644 index 719ef1fca137cd6f44136e4f69db0579e897ecc8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 151345 zcmce8WmFwa(B=Ra_dsxW*C4?m1cD|I+$BhGcey|a5Ii^p3GVLh5Fog_ySuYD-<#xp z_niH)KlbdLbGb9j?dk67s_L$%o}Qq0@{*`X1V{h?ph~?FR|Ei9Jph0KA;3ZY!!8z5 z0QrKlSCkY3iU)~yAQvDLQ8`fnC<{lv*N25%Big)Cw+8^Uj$a?B9_!o>0PvJ0B`&Ju zqO-r?s`dQhQTP~_pM~rVMS3>fm%A?si8{_BDCGs!ri;4SG2_Y{jErYa{e5!=vrVdu zam`a~G7SkzzMJh6<4PbNB@FS`7@`ic|!BPA@%F*CT|K1blZU<;u93ppF)_Cp=@C4zs1Ti{~uNM0?j~5j|_UEB&R)+ z#fkDy1?3_|pi(q6G`xEe)=KLo4XwXT6d%`9V+L1MRCJU(Z?4S$C<=U8i3z36AE*rg zmzPf0SNT)N2i~#zJ#PzSFaJJ|BLAomr&UPEktoH(1NvKX^7773SW6R^>OM5!;?~`P z$79R}q z~$^91nT|>j;@T;dUzP|k1!Y*)qefdp{1V6UCo+TC12n&;*ZT{@IIom4J z?|?hs8I#;se7%F<&49<4C5={2R`^Bzvwqjk^j*nS>E@QDPri$2cIR5WGvE)??t`Z(5*ujf zc<}(F@m`QsTcGq@`sG{lH?;BS;bjkb;*7!HwatiX?UBWTke2Gq@hF;;5tmjS_O;7E zPt&eQGkqsV?3mwWHpw$9lH5WWq?UML+EO*2Y4=U}O>1d{L{n;(dL7^p$-a^FDjVP5 zUkSE)@MAmcdWpB7ATWJ1eW${vGndz_2$awl9G_`+R8C=ku|0l}EY;Jj&i{!l%IMWv zj+uiJo`jfP{AVcBclkWl@z6+L!twL-3-H{r_73#5R|o1s0q=7_9b+9Zdyhwi(Q$Ex z(-CcW0;Xx#m?|bQn5Ba54$wenY7+v$i^UoWzOdRrJW@%i_iA##g#lLO-;j^xjo|{o z&m^ALTk;2v4-yab7~ZaK9Qy|c4wjBmIyxjN-jA-|guO|c+%CjKM8MNsx2NGN89BZu zvo&_tNI=$2ue{LbO`d-Z*4>eL`T6n;<1$Sf}PsERU5NWZaQsywV z_F1-92cJcH@gXs8z3u4DQCW=-3TKP(#yAor2$}MV=uqulT06&Syt-Cmnz7!2pXtpy zP&@8@C!5srOd;oDg%QBs%3d39Ms=~=eJAU}h%~di2eQIzy0!!%PDx)@d+RNzTM`Zy zcL={&VO-|=NUuYx@Nk^)+hO(+d`r$+`G9tLX=CFs+yoEM($ezs@$GU*;!bdAmP-Q# zvenNCX;C+t;_aS0A7VoTw-+((Qt|`}!p^~{dPPVUM#8Uw_n?MuJm*U5OqsOpgs{B4us6lr7Va49!(tdKK-j+qTQ% zZ`i$dw37DYVXS|#4uqA0+3r>|;^KK?stf~U8g`u8WbQ4R7~X9WDjh-9!meDX2KbyX z{&s~E&tYya+Aj?Y7SR(4l?tD7y^9HrT^pKcT?Pd`U5_WiZEp8hE;L%Zz&eSN04hBcv2B(kcCML8%>&`V5Afm4Ra#dbhUnBg` z3oP(GE+gYy>w2N#b*_w^o!#+%#n9?%SMc%KSx20%SJ39qq~HaMWikq9Q%MqnsHOu_ zwHyX*Hq+eW2)B>e6#hQN0Jx%M%~{TcmmniRRnUuBsPfs7Mrn)NOyKRDu+4GUv=yEH z98P>ey=LPRshVPfrHOUhSJna8dZ5^{%Z)sZ`&~V*rjv?6gGyA`F^?+6_jU~RjeTjR z8y&2ycFXoTHE!uK&^PU%7`uwB34LRm!$xFvf6v9aXk`@@uWG<7$LGhx zRSQea#Iscsaj(u#&4R$iqfxh9ZDvsWOG~=J?P3q{9Z@-%r_c|#PO!lF*M4-Zp#f2+ zOJ>iOC`E(Z5HRYtn=p`8S&0pqCiZS8VDV2|c~Ek*N@T&d!Mf za?%?e^6Tw8PhM;+zj(@k(zD>1ZNrGjY|338<#=8T3g1rzIq>0l-EoL_>6Mg(jvMfw z_0?rj3=9k~*mapIFuhouqSNnK<ZUS38iX?#@v;j0Mg2v5SPjo%=l2DDJgUO%6>j2wp`;+u#Z#Q*CN>K% zy3T;hNGBK`CF?PsK{kXW3x2B zjJK;jvNycFA$+LkvyyU>pHey4?~i^4G8k6?m)q=5qjm>zf<>qs69tg`nA~Y`G+e;Qh+Yw5@bBuJP1%gH z{8^%M6l*~3Uzee;wk}pp*RCAZ2i0P$^H@>Ps}pcvP3+bT>!yU5Kl!N1wd_q**j_eb z<3FBNObxfBjvp*s)SnHhU;BQtrSvp4>11>6skt8niUdh)Gb9-EVieTdZSWy4{*ij7O*IaO<(LyW$3ne zt*#P$4Hqbb_Vo)M26)j?pBdLI1sl==RNp%D6qV0%$pn-_Pho)Nxsg%{^FHO16=$hP zw>bb@UHkT%a^bTO%em?1uo^}5cE@nq zU4!c(w!;*JTI6((8AP36J>>!wGD^76B&WP^1~8i+jR96 za~txLiY5OUp30E7G7^T&S!C%Ny;$bDnu;%6!1n15m+qT0hfbfHkRuLrd%3$ujR zsZODXJ~T}uFFNb}Lz+GvDiFuLyiigV^tkO`aNN}|wlFo?blr2>)Gh4yX9{_N*E)%8 zWz)YHHbp|}|J}z2fk*%wOJ^GV9u9`b6Y~X&hy^GlhCEO`C}x*0^?5X@jjcVd#rhW(FfzMg;1(QV5%^}wi>fvhKbeV|Iqz{5tKEo9zFV@iIJ7-XYCkEl> z+m77tc28}u*mqB+Fi0qg&-RItkK4Pk5BRd^r;j(yaw%NNjx8I7+E526b$a&N5q9sB zk5)+ULL^ILb^pf1sH8B{Zu)pM^7*yA`BTwT?2?6u`KGrR5`@V!I*JN6P$l{q!;%&+ zvUKrc<%?XrFB&n3GD_OO4{uah1-1u225wUOgvnVUetB9NS)IWpT zoD5I?;ZbtBl9NCaH~e#)-J|qa*~UZ6n|E&qHrNpKS@A#LKn*p9k*&BSFuZ?z79Q5aZkYf6V-Et`@!og4{)Ho=TVkoPuC&-LcKd%0!Xk5e+uvZ zVoUf3DZy06m2Bmv$@=)_E!Hd(y|Bn1KP|4@4pe`iYY|WJ6vFWF%R$6qR@dc zh9L_?-)v^SFLI83$(nJUG^nv;5iryK3I+uOxD`ASWa9(tI)5Lg2@6gY zckXc)Vv7TI4gN(0sTiC4mt&|lvPLi7Y5%8J>*X*gx>7}rO7$%H8H!wbcUQ=*siefA z@bFjN4+okZZEd3a2Fh7kS+!r?h`otkhCV%3;@Z+a>tr!5gZm3As^@QW!>Dn5*aB)_ zU}-X&*a8jv$*wM^Cx{02p|kf7@=UHKq0<)6{>3ZHmGo1l#i(o@aW$Dt6cBw&rXGtX zJ5+&RWv<;0mO739^q(S~FRvYNn&CfVTmAlMU|)oP^3I#d-6UmN>xfv3Ah5Mn1O&&@ zCaSM~+l`Nag3|f*p?hfx9lnf;4Z}yK%UY7+p-Rt1ZhXjI_bSs`I3(da{HG~>i?_J$ zDhh4_I>>_H1arks>T>KtG{~uUa6lEeI#01y zw8cHnY{KySp13Ig9c>PlRb26B1al2L=U+4pg&&$dHgjKxZn-j9ayrsL6svD+O6o zl33uPel$JDkm=xm%z}XVC1@P3FyZ=Q4JN*V7p-h6*jBD8v%XlH5j%;Xh(H*}F5W7* zb;<&`+me(FmUEQQUaXOpSqSbGC)4x7^995Ysy~iPI8HoHs3jnP2A=RZ=lU%VObp)t z3ilw(li}pTex6SFS5QQ^2^pgQgspETYnZcLpr+(acKjwonC>G{EAeM=U~Vsa>Nh%0 zbRue;ni$NmUV!#u(R=2bSHB{uxl>sYzuPcWfKN#NWbV9fcI|lnJZbM!w9F13Kf98} zWG-uzO_udHpXDK+Qp=>}8g2|^Cy;&&gH2Q{Et%nEuN}b>* zen?EvXao30|M<>>Iz9Tr7DEOacl+{flia?pZ>#?;_3aRMg**%yvH97Ol=I(~V)Krne$Sx|29ml#2dy9HkSYDczkp^tGyFYCJx7UNM-MNm+|0P3UUlmj<(6 z$q`Oq!-`&lm2)okZsZYKCCP7}@)EUDNW!OLH^`ctWn}w%+a{Sl)_J+ps}Z|B576N1 zd566m(MZ%ZNA8i5dU#fuTC&6{!eU>_B>5bNr^k2VfgOSE^oFK6s@&T&DEx5|s~K+Y z#r@zv3W=IfCRJPfnz(yzq&2#f++|wQCETU4i31)FpDdvCmvVqC`eRiO7t!_}R|BS> zh+20CVqK0VQcV4@3$;C-OEy*>m4u+Qx?7I2nbP3qTKMhyVO6I?^Fh`E9T6X*GTwPIk$K=3yjI`o^ z+GhWgz1AHn?3eQk6+IWGzWut}%1vF5IsV({L^ID|$^!-?+c3;kUZ%dsGG1!Gg&`u53 zk6#e4l$40KXDW{xp0XEUe4=A=VbH>bBi3T@FA-(T9JpzoI(7>^? zVZ!* z(}3@Etqf(c1&3!@$I3G%ONdaTZhl`JSJ;>yZJzq|Nfny;JbL_u(C?>~h@nVAgxfd(51V4h9DEV08dKXWu;K4kG|Ght8 zttm3BwXT47@$C$p%?H&+4}rB~r%Fb#jU(dOCZIq#HVz6v0(+g_h$2=W$U=Fztv1mn z!PrHZo82iJaf2OgY%r2`L+Tr?W74BZ5eQIqf#wVvidF(61RA+$NObaDQx!mH7k8~J z#6G22_CNxN#MjZbq4fYfZf*n-Xo{-@X>AYoGfnDTKQF?dE%FBiWg$+K@2A|JCjJy} zo}ln9txyagxrC2wsjT<+P-L+5#%2*JM#;CYwBc(FL|J=#zBiHH17!5DJ2AWxqK1t8 zVaftqp(1+T+BS$lv!yl^5H~6-Fi3t551d~{E-|iHk&jMz7i!cQMx5UG&95LY50_yB z^t?{9)2Gjf1Y!|e3CX=liwT(LP%Jf}dKn}V4?C*%U=nz7G@;X8X46w@z|0wFFe-aT zmL2`0CgaoBmF<4@9H5%W6x7O{1tiN(uQzQc_Q0Ta*S9WsHc65l*MnQjv=Vr@4A9P$ zk2&qBO_93{WYk(uqU?&^WBITE4;-o4wYplyu{DEL` z+89NhO2^84h?M-MQqo z8A1B=_6-M@UjxU-sCnUcx@N5gc`NbXHFfFi^`w<2^LV05D-#QyLO$!yK<4S(5@gM{ zAeB9iNS6C-o4X}6J>%T_*|b~I=fF|~x!N!xZ>8wcR@DJb`%GVNt>5`Me=SvY|LB;t zjBsb?&K2_uQO>Vb`4*SS|IFyST-GSt2Xy`}&qGyWNOj{RTKh^`uAz=QS`S+zu}%U7 zwSC(j+|om99-B@?rig?GJj?5Z2IjTE+?4#t)S!@%ugcfw(URTO9eUJ-_u*wkxph`l zdFLF8*=b)LGORY@sw9)3fkus8$;V#?B|Y<51G4N^q2GXy?rF@5JLj!^-iM(khA&FL z?|14_uK?+J8Jl#KJgB|?OY%S@skJ1A%8xk>E~$OW{64;m;JnD5oxE%bir_+z5C<%9 zUZ3oX?Pn0piOnMAaRJa(mFYh;;Us@ntE0XY4sbMrnyqvK4Q@cvWG|rY zMOrpb-Iu*CFyw6H&XT3|WsNH*7wb&QFpID;Pm>~9t$xt(0u0Q0WM{o1RJ~^@OWyn|G%p2d!fzo3 z7e9VL;(VI#_uzIjBLpT66f9b(HzA!m8|)X5gmUSJ%8ZuO#wB{*5`3rvk4@=96|wb) zS%OjkT)dEZT*DE*qzbWc197_cdb7PV3NE4&dBo!R?e_&l84jO>p9kU-`mg_-&)s%o z@pOkngTlKFe1RX+iD|jPv-5HHNr9g7oqu96ov4OUdtaddQPf=yY5yxe@OQt=r4Y1T zON%4kFm*40I;p2)=;52I^+J!el>U?U5a;DOCGGs7x+->g=K>upO= zPgRuq^{cb^X3A`Vh4*(&>-D+2U!ksG0$(FdvdBm5Ha~W~C6#BL4EQOMazAtH1?C~~ zsv?Fn+3GdId{+9C5S$NJsC<6F8qhAi<1aQJK9^`SLwDmrG{Z#Fvs>{`V5vlvdM9O( z|DMm;TghTHckhl>&Wl=3sY(+ds7QMK2sqSr~t?~5<9VbHH^*)*q_pf z6#^Awk95-+9RU!TMVGiZOEj(LSM&JYE2%|i&b>|#{`XcTPo4Hq6YT5)&Zl(>xA89} zFqZ~FD3huPTP`d%Ho8rgND&HwAWkKSJDJO;sat0!H%b@zjk4P#3I$h*9$w!}@ z@NnFpYgS}>HSe6fd3q^%w*53b(}78V@bS|whwE3SI{5-G#UjYcF-EvS*&gLJ6dHZ% z_uUd*N!OeO;R9#WFxUzoWkRI&A@KbT^U284girZ#Lf<9%g-kH}*>AHU@M#w)%L?Pe z4|tj6^KK&JVk#YN?O^QC4-JAh{=*)b$N_NZ)fnHN zW^rHtSc%u%8L6D?1SaDXk@c=TQ0MDlWp#k*nIM^h|8PgtWgZaq^0Jj_!9@uKr8OKr zsX|a*54fc)D0Hk??ti%v6ATdC&_DNf>h(`hsjvqy&-m3#=#jDXhPVv=PeiUzWascq zGceyf&iBZ4tnf(oK0Yk1o?#+9 zCTUR_l>%3M7-t2wIRCb%tO8yu5lX%o#ek5JAAaY);YAcAg#L|v{G+1fvM1VlqPic; zixvk(_`knv{~<=!guTrPhwz}-Q26Y({+>Sm#T_u!eV%h@QI29|9as4$>qBzq3&~l` zIo~ki{t63|3$Gec57{p3$%*Y*Gc>qDaCnxacPQ3ZCQqSaEuM@NV+tP$rq)pc;_;D^`9%VPe5D7kH=a)9{}f~<>tqG^BpR}Qiw5T3BA zme@a2KjiPY-ZTvu7R){bG>eLeVZLJt`4q|um)(qheKA)3Z%??$Afh}V2AiltqkF?@ z?Vx>QjclI|CidCUh3Uu+HvhLtTy(#HjCiK;M2|z-vOM{n`3HqEDZ{Hl?_fD`rJkmy z`}Aq?&{p?g{vf&Etw8lXkoXU=iOUYv4YVJWv10IV{R*Kg2>)d}u@kn&#>PUhAxMP# zH~B$9p`_%i_kRdDOC=C}?|tWbhlY9}31m7}9@pH!T%U4CGB%2a#m3&=PxH)p)!>iB zD+b_7`xg@eXh~DaxTg@4T9$&Sj|wr9xbg{Gjg1#XPMV%wyVQsU<5-o}@bK%!2NI-} zmvhY5$ApIrF!A!gY;TzGG{F7yw5S1^FHa92OQw}frrnmTHa0SHOYxwz`P#-RU^`UP zCHfW@Nm5dnF^03Sz90qLGP3IK{~4VRUuTrq55=Lmy<6T}6g&QENG%qkLB&LLObj^> zE9qj9Cc6tYg$SBKlV**Wm5xw<|&6`jccrcyYmjmN;3cXxU|N=B&dg4b7tK! zR^dM_D;^r3V}d+C*uwl5CQDG#2atiGfd}_wJj8~ZyJQ#R!=*0c0!Bv0S5i`lM7*~# zfl5UC2MUUc?U;WkY9H8xvAQABX&a~2?b$qXreNGN4h3OsY;5%Q)d&4tI_iYnD2a?L z`BKg?=PgW;?|tnI^V{l_KmQ^kDNADs-7c4G&*v3QTPXj)9Xp;)dcsa(c3L&9o&FH{ z_3N{5L2o;c14uA%F%>ACm#;Fm#~KLtUj5dM$k+Mb$UN%Ju)eZp2mMru@?b|zEuN*~ z#+pEnn=-UB@xK01QYvTw&k@uU_S-B7A{gLHr%JY#t9YJ*LX#*WA!Vqkvkc;-8R_X? zpLV^ln8yy9TD+$%)*ZrZ7*~HCmr>?4^5Xn}99~ioPH$dUNPK*QqO1V*HKi;{5l6Lw z6T0{HMZDh;=chv@7~eATmxzo#fd^F|hQwY@YXy3)me;3uR9sWPB(T23!2M$>6T-?Y zqVGt@l@NU!1+De=8;g255=CpodR6($>_Qr9TjA0%CtcC1VGwSl(H<_DJ}uc}g6n@u zTr!{b>|2Tqb&XsbS@GN2uGzT?(u6-^oFH_6UVn~9%~HZ#zxO@tAkL>i?XLHYht1^W zl%-J_2fA;n8tgVj%41s&mkaHdn(G?F72pcjn{ z#6{wbzSny5*IQI~0@aDDJWqrH(ohZvT~mxXZA%NV!1C%Met3yicpM2)5Kqek56Kng zR3W+0Sm}zzGsVH;iW8xA@`^{L5VGG!Vdk)!!EMmvS~hVDIazJ!P&cs8=mM%WzLr@} zDFfiAN{n*{3>^ORR>q`BeXF$k`&Q*bPx1UQK37X3v?kJ!X6*-d*P`>6y}=z4j?6Q~ zcFgLwAj^+9>9M05QsO9bXb5@uYNwV;kb9|46 zcJ0oUazt+D?oofcUPmYVM>4$L$x~*{Nxx)bs(_Ssz>6a`Ar}YmdGix!;EqLoTkF#} zF{wefOWoV8lx==Ta_;Ut=ka^?g_<{yVZrHpMW$@7phVd&KH95Mqb?($3)`dhLz^QWEzsC2IC(c zjmJ$4Ya@@9D%9ev2}cfI1#Z~d1ykQyK7LbH_40!9n#S=dEF^?v0|yETuk|%Sr^ra1 zuYV6xITO)rJ*IgLiD-6hZ}eG1DK~?tMG;K74dW6#aedIisgQS*f}8SYL*g)}DNYLB zsI*1#CrFuFN;IwrlsG7hs!R%ch1(xp3IsPA_^wI1-65zZkvX5VvTU#QR$ts(Y7(7B z*EL9=MV#Nc4G^ACq#x3mJ7|g9>qDk2^tS*hAU+Szs&lfc6a?|V9o|lN}5I` zI$+xJXnAODsa0sf3Hx=R4j)#qv-a!AQRj}a_$zJ4C+tjvg~pt+%gRP9*B^my<}{Bu z8Ap+oP6QJ7j)G{^drsJ{wwTv*UVR;(6ZUaeGBqy#B!Y02e^?AD`*8$(| z?Cu$02VHD%z_jv>u4ngTB5=yng*vOa1Np3d!xQQY3u$+Y@!Qht2G+;a$R7&^!kzmy zP81Wp6cq+Sozp!_ZI`tIJ9r{|xB=Xaf$;54(w4+DWSQ_lr~Vzug{yFIZfXg%X+T84 z9gOm{b;VZU7bGMk8|n*&YOA@hZ$urL_YzLrUendYs){A6mzI9WDgU^gH+Dj3CssnH znl{L>*<=Yk$ivl!;At)RbuwILJ&a&u;x^|1pYKBh?eiwa`nj6Wmy@^K6GZFj%=f5H zlTND{g;+v_9l#`xLk8>xt0Co`;(}k<4tk#PKCAs46xzed=u-Cgt$63-{vE2k*!>~o z5iCI3nPS)3wW|=JFsftdl5=updxk2Ls(Y=5{tySYRDFrL8$M?PvtGEL%lyhKkjxoS z*F33V9YQjvT2pzN;7y4C6kWl8B5j>_F+4Ofsw>o9cp-=@Nl88PH7KgM-BbF-AT2F9 z_2v@7M_54pdPmij%1DA&N7oAn%OAjmpfiaVa|vOST(2b`wiPFOr?2Vi0+pRRQGeP3r1(<28-fD`?R?!X;#eszJF`J76x zq1fBks-dr1{}3vEdvcR+0o-DsBVg%7O_E3ZPkz>7pFzg9ZB`-{;78yxZ^_;PsM4epFa5BjPu_dr6Y=q3%G2Vi7XLsw?tC?&m@>$Pj+?;)!A^-IjmAN++GCK&GbE6 zQFtuWTCdsxrvPqna^gdqLZhamX}|}PN6b0vi_Tx3;gAz+g!3s7n*`!8KBC0VndnhZS4DM2ia}E0Tjh|5S>;<-CFcw-!X+`n?Dq9C^AWiv|?yh-VWC?A8bp z@g@V#l$7>O4Y1p)2f51DaXGMc_kJ&(yHDLOo6@{*Dl z`(mb)n{{f?j>+waaOqP z7K8rI@CqZ~ zKzKuH1D-|-*Ddt02G-31z4f{*!{+Xi*y7>H$>L^(7Q+06U+W1o_>*HcrXt()ut!1t z`gky%|Jr_iA^KHqVQ_7_TeL7IJ*w%*Xm2?al{Ty!n$qLL4ASGl`bgGHO@#MBpx3%- z4R8*^@TP~cbnnk#+c;p@Ksmol3mHI-kfy@MCVRwnt=wd-8Fru5b0M5@Lay07KK{By zVDE)H`S=yMA`jQk6@5(f*Sz^TVqfl+$ux_iGT}PmU7Zbjb;-PLe!J79+XrsQ8BGEA zPbFsf->Wn+o!JPCM)23?HBRl*I*?*)cEwv3ag3_^H(o`j!E7OV+W?vfvTJT;oT)b0WIXRqeZ~`vpJf5GzeW zY*kd4X)}W3Cmr#(@IajET0-5PEu9QWvKuYFf^}1fcTjjxQX~OxK|xWT?5?td6K#4l zX2BN{m~}-M{*#?k(+|&~=$OSu(KRCr5xLnKOK}5X65>Nh?gsd(`WYY~BxJ?6CGdzGCZx0w|7mXalnBQPSvKX>o=i z5mT3WdSCIXOUPlqUvP#4bLY{QuVH0r%EvE2P_1Ph*E?6HmN!u2>t3qzznp7=048}9 zt?l}6?s-C-BglP-XG%^wOW-oan-MC+g=EmX6y-7d-36pPa!Ej!DJe&UIh zBFucu^oUm~+6mCgh;Wv81xVkZ^q-Xyt?!~4;-J0WlYzlYVq!Y@s<@=Ai#+w!FgIHe z4sjY2HG~_X7)nI>h-VK$zF^Q=nH^kIj5cJjVX%XFQTXH;h>w05O;UF#2zA!XC>C*V zT1`EBMFKbjy}_-GB0D1oR^i((9Y75;;Mu1{u*8ka*&mBQLQThmAltc9 zCW5+IvpzzX3Lfxi#r0GO`Szr`&Qpn|t?3YwhkY$Wo&jU!5#0lJiwD&`-<`~z2C)$5 zNqCnauui^Pgt$F9LFy&=S#PnL2`Iuiog^CbW>=FMAthbS+u+Bny8kFJtm*Yl=oob$ z2E52>AfHzI*`M#jN|OF))#yPRZkZr5prJphOY>&qxrSUvQi=4G{C#3hmi_z;=|Xo) z;L%fHch1vGrf}$Dsw1aUu4g_o$$n**cKql2tf8xecQ>!|&R51;}UAJ=om7 zpL;^3?SMd`@b3tG(1rLJV@TuE>botgmQ;r8qDc9izp^U+jvh0fm9E8sYSA&<-<06k z^bftJ0glK8M=obEKd!H5;cU+czyz!ZC6-%BVcAG!u*0x@M5qiOd`pO8O+~ijAW+SC zP(Kn^#8&BSul>@e$uzjSx0T$;E>DHTrX?24cyPOSRW#><;wUrhIk|&_g}F^_$zQ7J zV>K7232H5BEsg!?F^-uDHJYh!Oq1N#s1ER8QR>7f0=JmCeL~2T?<(vfRU^O$3W%qV zqL0P!U?-toVN&|3o|wBIS+I=(D3U`SH!j}9LGz0pE7qrY`3W!K>Me7dot1nY&?Kt6 zo;ymx+qYHv@AEk#)8i5ne!-IL=%NXZj*ecI^`Q7Hd=9Sd&1{UfVbX4fRA zMKErM3f*jc+_GGcFADbR8EKk@QjzV(!ky$L2sTF#Op_KwzRV9>di+WcEG0ebBcv20g2$UW z6KhMaRWUDq5MG}LbiC<{qA@rnf+Qb#HiHNh@8Q>k@_cz#>hY#I-m{Rxm~8p_)$@W6kP2yA-9X z>L*a+=_G$VHGIHkcwW#@QKvIweIr&Xi?+`ZIxS@hHrSmJ#buqi(8C;hHYt%``syaT zd9B6vG*V6Ac{L;=1j_`F$KTa>uf+)GnGb(i(ylKi@jfJOd{I?_J(+atMYFLoe87f$e#U2Fzb}RJlJ>rzCOfck_J3XQp8(QK51| z#>BE1fffwV6-5eR-&v&OzZ&C;qkjsi5xsVUpg+#B?W(_RgJRFjKSeY=kA@}5ziw!n z#RC@dSi+s(tE&5a8A{%Q{ZlvD!+rD?Nma_{0Cdipf~KtRBt|&#Z-9tP140nX9o6EK zZ9PRdCP)yB{EeFhj;p~;B1}SfXYbWu{s8U_eu%z^PEJmA%*D|6yqd)bdCqfOECos> z4UI3sam8t`gK>yp|1k7$3;}cBfE<1;Ep6N5VfM@{6ShEVvrK6jnaJ1_AHRUGwyufi zBq@jgL0@?h;}EH-sp(i)B#fO{I5;b8t}cnA=NTo{UGWXg)4S79N^2?tGNOnRUzj6V zAE_K)o`NeXIEjdfyB7bzUJ~qJu8hn~5Ves}t*QfhRu(lKqj_n-mq6Llni@YpKgw@f z-7O;K&SsfYoae)M+S(}XJEx$MJDj2cXz*gChRoF*efg=TT^!up^6seu-J(35R#Bqjze0TmTkZkMTOKw$Ws zZ+g1HImtChTBg5Kx$q0+U!Hm(V&4lH%gQ?TIJb>6w`ohkfT-O|?%t52Uya8pM*2B!GhB2JXCsmzU7tBD0ulo!nC# zB4qpHi;t_&JUl$LT-G!!|4<{--2x$`a`PJNrM9*X?`6APWm`-jO?@mIVS6{+=ix30 zLYZTTPH==l{84wkcCD=?#V9J3H{DTBL=3*0<6CX>k;53Cfmtj%#A^5ll2s5i9D4>j zn5+3*P{4UOPv5!FPBCAn8OZO9JkmJ<+ia;3wD6;kqd6&9Aw$Ao9Ns%Hm>^^?LA{re zuC+sSEwmZB{+4p%^tmk;MLN<#Uw_X{Cy_-^Z%-FvHM%ZTS|)a_OBP6zBGYoIuOIi{ zIxB!`^$Hrbrd>y%HI~$Ctm?^mP6ue+N@W%s%Py|JggYAM2qEBlHsl#c&XU(K+T3H$ zjcmhNYIOJ}grEvVvh#I}4d)e2Vr_kUO=$RtDj_N9d}9J*v4r>s6?JudyR%bUx=X82 zP)Yh%#pQSV+UuJebB+7I(cqa_Icba+mb+h$nq5gDJ4d^pE_)2DwmB9uHPO&6)t)vK zQAj7zbB3r^^3-K)wyeHGHkm5qNMjIbd{(eNkciuA=}>)p9VlZ-vDQ%Ma@vpFTDviQ zwduFI`X$RVe9O4F%|E}`S_Q6pz!Td1Yq!BoGc_I)OZD(cEmm)BN>G4bcW^KO>`ZYS zn~$>$cpAsn1_t(KLb55}-oEMHwMKfUfegh2@bUuR-||RaSv{6Qv=JAA{41aq{HQ-x za#YUC8?x85H@mwZ?>Ttip%(>QKC}d^2JNib(g@+d9dtu>J~V}ygzOG9IO93WkmM1* z-%8RwK7`+U(O(2H(hg+3v(vLkH!f6v|6B7fkbUy%nvh(_U`hj+Yo+=I_IuXmPlwkf zDNqnYpMm*B^#0%j!CdK@RiR0bR7^+OL!YdRJNo+iZ1z-yI5;Yn_t(|**dF4xvs7lgA1U#%2{TAsRAb~ z46Qc)>*qqkZqg?!$5eQ}o2u8gCxR$9V;1Y18ZCp`7cgqnRG{twb}Hg(b&n>e=XeJq82T4P}x$hN5X^$93&q_)Jrt*}HeS;X!r+!?0&K zPOXX0Zw>Rbml_}7(aAaY;C|^`P8ClpMQiUjr^OVS#h8GFP@tO}#Dk>|NQ*F=t_O zDKdhH$;C(~&dk4bvdI@faV_P6_+$V`zMnNXPD_!{g`7YGkT(FZxx8atdB2+?h!^a6 zmtJPQ3kGFES?c>@my zzzg2EOJ_#Xg(4$oM|=e@42EdGq280%pTuK_@Y$jtk`#nw_Yw+!sdvLK z^}aY@blU%+cdY@By~NlNCTJk9`ckFk%i%(SN11@6WWmwriBSAJ&?lkz#}I7U!=OhP zFuFv@4)ij#6X`qoZqn-m9^lcCU92x5l)qA*fS&JA{bdBKM~Vw%PSv`$OJ;kij2l8+ z%%B}{DL5HChgUC-J8Tw0Sm562ef;L4StjFyWGF$Nn=%`H?8pJZ=V8C4Xy{2~-R4ju z(!w4RKs&|z?Ump^gyCJkxtp4mof$EUY`0cOmeXo$2`9-BGRCTqF|Kd+1bE|2A9A|# z%<|n?3R65tgzWMaqs$f{uC_x`KKSRwt3IO-z%jlMjy9k*eI^WftpfRu?GuaPVG)tf*m*|WYWSH`hEn}V`;YQ@%NuwM5xHE*(is+*W(X@)1 zt+Dj*a4>=kX?nc66LmPp$EC3J+U-aY@4twuI7E-oAf9^LS6i2r!v_G!7`;;HR2)p@zK zt)sWUz@;<2&N5!pn3<~#HJ`AMq!n63Y}IH$_Zrdd!kZ|$LD=kk!o3yCW#eKfBKjLc zu?{5OjF7C|?Z=?fiS0+v?L&%RwQs45OduX^o4+Nl7s+SP?2xtOVSYtenhkOJ>py;M ze%x)-(CtAxII$Ua5g>X&B2=W=k>hB-a7T^~YM+@=*YXeCO=xcyD|~Rv*vyJ`Ay==k zczgczqTe!43QH0Y`CrVvbyU<**ET$KNOz-xARUsDBAwDmhk(){ok~lIV1a<7(jeV6 zbc2L+ch>+zd}q+-eeQd$Z@qtfe|@tSE*NLd@5Da)?6dc^uU(ow9KgowWhkOdH?C?2 z&ii!qhxZPUiMU=`y~y@v6GLLW+I|;QIH%{XbX=l@?A`g_r zn&JyyRG$XpShg14_=L%v#)1yA^S#xmy#((Z*WOrZd$>>vEwl2q+sm(7M3UcvdlFwy zX=CJ4P8{|Vdur%P2%s3uJ_l~?cF`lD?7Yop*0~WK-a-?(_LD91M}(uU!h%_Gz>4sl zpK8YdIrF3Ju!95H4m{Y_Clh5J-NmfaGkEnv1dIO{vS~uVfO4xAY$SngOIU4-f8S$A z_Ek<+Y@sYeeyIew&@r+xV)(4$ie5;#(1$S z%y_!iL(MSLb#GQ|d%B(+ya|=Tl_&Qt^A>7p8`sEqx|aK7w*N;xkAlP3($_*Qy7lr~>F9_vj1G?qSdHaK`L#*ZU@Ial6WhtH z#16VoDZWBPNo^en-yl^LNV^2si>{l3%NL_lx?@3uyexDFkoWHmpTb$uhWKd}D^49k z4lHCBnlX?M=270foXw|HO%V?Q2WFaAsa1{p0l(+7?`P`SJI^s2!KK>2`*9{Ch@lB% zQD;Ly4p zFAX}+0|0jNP0frR?I=bG)ReZq*%8}KZ(O`E=}bv??nGtu<#gI1>{;d4v}@lJf6Cu6 zqxQNdep*w5FkL@5eTw%fPS{lB1;Mi;dG?r(9b*wN7 zLJ)>zuB{$3-9TqKrhyLeZGj+G<=An^#BD9Yt@WZYXhMcN$}6u$-57fo^l}ygP94|PQ;HN zZY^~;Xp8*)}vb0}05*U<{& zAd*sL@Pl}fWLpJi620gAy%$y!8#fSFk+nUdYE&Cz)hM?ZR0{ktvzWaff5g|*nEiVy zNJ8lyTK4Ctuc*884jEp|B#PYgJ#j)+}8&R*#n4auN2E!^aKlPb`C7WU=jj0F*~@Z#U$-m*-uE%t6z8uTm8C@L z&WkY;tzN7vF|L0?+L{#$bR<@SB+PH)V{jiCy>eRmmetBLEaw2y`POmdj=$(v69_cU z(^c3t25;P>C8Wv3H~WF%0sSra*tF+VsgsM_NNysLqA4*e;M=U-PyGvSKY+EzpRpR~9U2-Mw|LoVq48cv%&oj^6&jw`?*%{kQiM6)_z)W0=e)qnkNZxw4~)}cx>I&@lrN)b zdz*&Oc8p|asyrkwFS2mPT#>`b^;FtaBrv6!c-aMihk#@nMW~imeRC_LgW6L@+<(~3 ziR?fjcSZI@veVP=yMvRT8KV?mnm<~5zNF$Wf34E;KU0*kzre|cqK1#JjFo~DOQu1| z9g^dg+x_SH1?|zRuRi=N)b=9uT2)3;I@*3JT9j+6KsO z=4`N5G7R}P53}f7J6@LI8=8D%k2oK4Nn-0W8*%A^J2e-ODuX~`pR7-cJF$w;el5Fe z9<-4lk;R3Qo&9ACe@r}^*glEuyXMw?T1DsxFDi1Nu~?18acbyNi5&f#$kIo?`aTxJ z!OmYqossJdW-xSh$(6lc@XFr2LiGlmO@+n`!$p?iDomuL3bE6_Lz(H1T2q-lnr?hr z=;M5{sP}qw9hZJ|vP}e>)l49;*HqyR^*&|`m(OOlvdC9AKV7-a8utAH=a%gle_^sH z_9?sgnRT$0g2r3E{MT^TO}X>5J4RXa8dL{~-aJ_Agm3xHnqBlfNENWM={@NA)v3H{ z{bK8qS?-iDRfW+sJh*qA{}?qt6EArI|E~BGRn(7!6-dcL#@BXtkxM_y{gDgMYmN7% z9M4oB?)5ZJT&2l1i50uRECk{9wza&4nOst<8%uPue*CsU40-~IZlRl$b(RXO%(#wr zJ)&Zr7#Bav4^RD`z^e%-xMlprCM!_iK`Nrl{7DM|5F%k#{C9Dw=<@G=I5XlN)OP!Jo+I+}6vSy|nQCV12x@vZ`jM+)Y-q zq~Kj*jUZtXj`Csd-537e-*XcJ>Akg%<1&mEkiaRtpYUszV1xSkFNt&$50kc4dQHSa zdV1Q4Al9~Vw_=9h1_!Z{2#%h|9*0lwcz75Y7|aR0TpH1b&5n$qB#XHHt}jv${N;-9 z92v@wjc4+&RMx|beN1+uKVWWep+U|Vn3Qh$2DOAObWDRp51ndrpHuz$>Ah?K`yfZ# z&7ibI><;!-KFOCDELqHcbGU@ADg61#x@_|o(_n~vO%>f#rNULFq#*JXxkARP(ToRd z){Hv-q&Rj-8TfiWb#JaxqfqB#Q^fA7NN1g^j(%P4x%lPkV7_;QHxE>8%{kHI**UsL zDPKZg#fIEHTeurE^qG`MmV3)cCYQ#uV(*2}s@^b)m3}+L@IXUFNu_n zxGfu8zMzu^vMd$)Z`m>?T4gdwrW5M6)E|3rPNnIx~*;aX7K3nuuTU2 zUF4p&O<8_n|w2~fL-t_q&6MI}z!jSU)( zI=Mc%iK6+RNXW?CDDao8EYt91r(~YaA zhu~pGG6T24qqINrQ4}1hy22FWqidrdgD{`GA39QZ3myM#C^rH9`RdhME@utCt=L|W zr!u3YE7h04KALHus;as^Rr}e>t91;Gq4qJdhb!S#^zU@kOgiI3F16(kNa()PC-D~N z{AkbKD*a%`WNYjKDLUCZVmw%bYMl?pz0e@jtO-5Dd~f7Jg7moV@}Nk@ude_2?MpjD zZIZ)$n)^1@!>DaQkz<>x9k|ba7Cz((ve+$h>c12;ENqBiVksSDTzOSx=ug={euLFf zCkE+;mQy7!IWs$PK7ktyynQ<*MCXCrv84KP(pKZhDrSAQ9Bq5&P2NW3P`&M)QZ7pk z1jCWd7jS;PmYAB_3MecC5$Ha1GimcM>w`$%gHM;8uxa}CnL4Osyl zg2|1G4P?P;f&A>g_lFO?FAw8}{p(5a1NsU*LIYw;&rHj%)!jB8gm~<%$*wcaewbV{ zg(nc4F?YR*5N-Z)d~QB(Ednl|h+E{PGave&Kf;(agZRYliaB6mES$hXfw$Y&d(68T zpC`JqM!Q^F7Lt4~QZ^2IUrKKttl^IAXc}NCi(%k>iBni4JRxpLt0As#gU3c(uR!d& z^HiGdl4us*Vw;!~Mw+PoKAra9nGPy^Q7b`&@9Faq@rQ2I zq4!e-k!o$Gq2o^hBIWEl%)SN&0uxU=UtHas#&a4&8*Z)&edfA~P}-v|<9Gcm0hMQHk4OJB@PC88#;#xWUu)I9}nUmR^CPD@?v&@W`LIhUL8%XrYP ze=xzWe~dk7fiC`4Omf^^?9k7*j=AIQ-hE$>pD>llxx6Q-=Bg4Wk3XjQTCezd^q#%I zW4)^V8YS<$uRz6=I~F~)k1epVDL_f5Im7ebEs4c9mUyf9;pMknBy{+XsnE?c8;No0 zfVkDmFCwd>jJ`iUSs1q_5szA%h|Z6bjhDgHKH@gcXQQhU{nl0#k&_T(CB*{? zjFxR|>BW4SGF6jSF`#BN)9{wTbc-w|upm!0zcn4V#N3wFiB`R5PA=i&YFIVhxwv?E z9jCyZOm249UfZ+fc(sEr=2Y`gn1gSfC=qGF^HtlYdOzvHG^^Q$kbv@SGDjKfBr8ldD;moWxK3;t+@O)vS-QE)| ziML}>yU%~Ch?a%XKAN)oP{tyT@)~blvp>be3{|_9$*uBy#QPW6t)QUg1@{FxN5i zmoCm^+lsF0yMt1|t>4s-=YkyR!t9v9zZI5_mkN0p^z_DVCPnO2tI zc2;1)sKXlXm}4SaCvp%H`XM5ei@f`j{wRaE ztva84qFkf++?)}r4-w&y!Q#cbo`?1J$j3*A+r`txw6A}O-*!7=XxujImVsEh}Q)q z-Ec2(ngZc2PTJG%&v~{ZKbQu<85KpDkXIf^VHCL3WFAs9>HPZC zWf8rOnyK{fHbt4scE<7ksxlwW*~y$b_U6eO_P{7TD0T-L zWLv4QbzzsKX%;O@#XEhvWbwGBxBQM`k_4poaH^O~HPmQBrQ{9*1@b(izFthf?ByE- z0;4RU@vKk{_@SK-hDpmn%ya!A*!7c$0B5123D2u2wQ@OQjT2#!KNiCjnoq(lX+-z7 zPGcq}p4D-~cuI0UI52F$f`l&Qu$X)g(XV;^k=uQk7bCgMhion}abR4nZ<5CcPtrJF z;z6on=5DAXRc{;xLXYX+9L(lSX!9ba)?MFlOKNXDs@Svj+-vLL_P<0u?wbhoz~k#U zyS9fIKHVL#b zc}s+w9tmFWvlzG3sfoj`n?c`8NLjO+G{+6HSFGQ=w$jX{XB9_x?9>CsfugUUK_fPy zc_UC2ojGUm#-D+8miZ2cei=6CpIMEBGM${})|=WX6KDBOt|WW9T3K=Il=J85p2KZ9YymJhau)Ocko?N zi4nDlRYYH?8_pE2jf*7m^~tM}UqhTBL=Bh7bGwrvMz!t>?_j7K1JXyF6ygn}h1(w* zeQpFK+p(l+YeEH{malO#IFNss4yCNFC|ZKu?1mmWtqgpHUh6$aw9q%r2f7AI+6Z2=T74JdS+7)s) zZ5?2yvP$@j;*RIkeMNd25>d5w!Z<(Ad75s+3i1)HbOd5jE3W%=EKyJ0rMJdlU+Y;A z_?0OZF0QcnNMeV%>d56~C8yUe(xurd%44>flez#9N1O+u; zkqp@H{M8!RX)=q+I|BZtk1*dB|Z~JR&rzfQZ+R&d>&YokfCG+Ou*NlBl*= zek$pT`)xl?Snf-^f3qCnhdA?oR$O#_LHsg-bfNGjO4*4q#GqSRe=~KZZPB{3(ArSW zHkB{vdYoadqhp*&@#@k2gJ$O1G-Zp-)Q9pDjzKWRq>I{~1s?tT9R}CFS=P&Ny;$Lz zL*&V>cx4q|M!#7H$E!>Qgf==rnv9DEzdU(C?fjM@VzriP#^gXm`?I*Dpg&DpS6V_u z2K3~&S*d?@+xXXJmn@a+bGwM4D_(aoDqGE=abNxh!^u;(K4C$`lq_Hx3^_x%Hx?!5sl@FrEhi_Ckx8<&i2Ck zi!i3X#FnXA(z<%l`R#2SIXSr!!6sLOQbd5f72rQwAY<$n_dW|asbf4sM~b6Jp~8LF zi&2~%M31?p$gv7%gGzGgeBm%wd*oUhL7)I4B-vQ-WyvvlFH$2X_Fhr^pH}sPLKTZ1 zWkh}M?9-uflE1l4`8|@ep1b=;g(R<1&^$W$Mqy!RQVx~C2u;hT(8or~kK3Y>6(6_r zk?Ek3ZKzGGwpEe4aE;G_w}tjXQ;99$uS{MOa(*@mweaV|7Ab5OE26w0?RBa-;5X!I zjQT*fr2hMwA))#;N$oR!Fb8+f7cK3sR+xqhMZzO(rKiTSz6kswHoCmJP2L~<1@d7N z5pw30s$m}Gua;`}t1s-w3oH- z-aOLXE6dHKN?q(dOMZ6Y-)7an@NL7wfEu?lLSl#N{fK70Ssb zniS1JUW?RrnG$gYMO5Wx@n68u-1nZTQISO8YY~fWFjXcBVLYnyu<*023@}e_W>j+) zacB6^0Ws1;m~7u-iJ0NM{5%PK_dccH*ztVx#kKf2sG*zQymd>gk)KBFL~ zB2hS*#5AA36TRbwEJ*4@4(!(0Urc@r3H4Z~G780PjTt)`=DIo%a5oTIy?!^m+NwTj zM`60g=+3})u>V@FvJhr={NDVmtVWmNNq&%@u&@41>-xR6^|2FiPtRWsTwq%pnOR$7 z007^_)YOr7he>z!FQ9Mx8W0<{5^UHeB2=rcSau(ej`Py)rA}F!#O_$O$k?DQ+v41D zLb~#C(J{NrA={C1LtMCl&Ra=sVy$w&*2w~mFXXU*N&SMF4``bmH?0FXHYtgBeeql6 zYH;bVb_I_3MHfe?3jCm!96au;5NPOLXr<>g(zy^dOR3YG-bRV*d1-ZMUROuT5oL>k!qH0RcsH@3fa?A3K4?b)Ib@LupIc5`9?~4$U?hD04qY-!5ZDRhU5k0> zVS^i1%e?T+i6o|)5VEblD2^UNdyP9Ws%k&n4xQvrWo=1(2H-B>g^Q1(SIU0Jl!Mky zQTm037t6QOv8H=_5E^W}rov;q?&MvR?Qf>KA0k|8tsdH+LKJ60zgM|Yw2o708FOAa zYMd7pu*5PPl>AF+3vHgnR~L8;4yHIdqf{use&#m!BefMssZX!+lfXG;6(SG7snBf} zyQnje(Ft>Wc*%3sXTH^_GL4M9u-8;)213O0Q zyYpT1{~)$M=@tPWM0!noKb9pYt@`e?s<-fL$N!*Bf2RBg_oejzwHglJx$H5B!}a#J^gvfGdD7iM@pg^?%>||5ltwOU4Jb+Xg7oi#_&?oe2@>x&L|n zv_UXm$1|(lfAol$SuF+n^^rA{^FN@w;jCNz$s*w~F=J&ia&k-H_e6nO)fuU7evZ-C zH&+EEi%aKDuU`|8Z)9d!nsp5qIXx0IF?^xT91|L;LgnDlhj8c^*1F?*d3il#WBUtz zRYx&(bi@br+hwmt(7`Z4lX2iv=QD|6M#???b*ekSZrB2~kw)!^o{uLkN?ND0{t{7Dh1-508-3e4thJ zGeq<{uG}NRS|O_g3nnftHVX5-VaXIE9kZDx!&w4ySIy`X)(Vh8?)!?rgb^FlNU6kddt>fd@o<5aix+C*` z_ksASw`$J7rQIO#qJXPAu-%yvc3oE&mlh?iden(@bUUxF32UFaZunOx4vx;o6K~Qj z!UR0`99MDhCZ!U#VAcG)$ zv%Eqnp7mhy-!sh%;L;(Djg1qTX~qsGkTDG(7;Q(pi0-%^^_N0bd;7ddQ-Uj!nTy{(LwLAxN9}wlmDEZ{NP%KkVU3@AmZI-#Ge{n?2rCU-f{EB=aow6y&!v%XlM_FLV1 z0WyPU&0n}3iYsn&0XpAZPF1~Ac}!bp^p~jChSBCod}tFXKZmM#<#6f=O0C(S+JOue z>YS#6vS&98h%K$ItwK4eVTTM6eGDa@rBel%DDdJZD)&l#2?pD5Om$%c9FzUYQ;R=t zI6KZtG&bJ2BxGdi-Z|df2+LN{?)t^8U%^=&=STdR)1<8}f$gKWQH?LD;4jB_^l}Jc z68ODz&2+Z-K;n3B8V52z)Js^}J{<cg4APNE-T<)9I9!z_q*h{%udK9%*P$&+G2An zZZmSCp-@yD8lh))V~8#R;O_p*1w06-uoMQ;AKGsE>OY2k_@FVpw!-4ihZz3H2R{tk z+mvz=D^zKxGD=M_RFWnJ+S(|^kT0E6KOf9p8}u}{%FWMJ6YKj_5|NMoY+2;3dQ2T7 zDCB=BRO60Iki`FY3@34(HyE?bRNA9(D9|F|Dr7vtSivv1|@cqFi$5Jzb#al>Xa zLPT6#9dq;%B9Xgi<&ZKFyy)}5JpPG+x;hWQ8`k)uN$iD7>0j^z`Qmlg zwCC7_OS{5?vS*LP5vy(Y1{U$DWYuqRukj`ZaN{qP4v4p2oNOJy_ZlOOKnz_|LxXij zE6?~R6b^8`AW!KXJtAM@1!?l*ijF4#dY)nv$Je_P#)eV-xhu(0?3ESuL-4pbXWCe0 z?3V9HS0z`U+g1h3r&a{W{w0vQ}?FKgV*oktIR5?`le_(P-qRf#H5G zOPyL-!Cb%%?W{9=nW%!*{>b+>{>Iue>c*tX_*Z zNwPYBmu$!Bw}gYJ;5FkYdpDnrpnOm(T(@J}L?h$Lw|geNmjdX7M_Dy4PuR=}j&`|A z;+k=G}6F5&}#Bc52IV1o1wI&|VT%6GOPY7Ezr^b3fympR5gE&34r6>td5cUoEEQ z+d+9*j<4OBlx6om9lyd!2v}3bZc{#wm^5>3B?v^7-YvD~^K;r!WK_m}s;BFWMX27l z;Ah^a-i`Mq9dQ_(F6S_2W*c{$IpfbqY}}5^4+8XHYec-lV#jaFO?7N-FYVZW9HUJB z2DUusvXYc&Y|;L>QiF^o7NnWGGB55E5b#kw>Oe;dm&)XqdqR<*|IA;+zRovDuchgD zNv=CXTd8=N<%HMo4|=N`m;f6S+dcT;pQ5K(bd6BxY#~vMIeseT$;+N$e|BDUg<>h4v$}P9SWxdXuvp^993beaOY(FSOteAG;e5ryc3!k-m@;eVL zT%TPFQ?L;uZR>rXvyLARy0(12fxQHE&6|rUaY0)=v0Sa&IQ5B`?$D~{zdCq!+O`gh zlS9ThgGVU6Cn(pYpk?2&-e1Flq!&vbeYHCM^W{n@Q@zqvE>a214apwyR=Zz%=O&V! z)vC%R|Mnx%i?ueg`WCW6sj+Co0}FY9?HO%M4F?jAbNWUF|1alK1n;Yeia3qlj*0{Z zo~M0z=$`lX*e_0A62cU7h;~_XuPN6ObC~e@0*HZr7Dw~GmeBA5n;)~vybnQK?wqH= z@tL|gcFh$7pe9b@F7*Q{O+WDAXoi0t(*x6$Ep4$~pm%ZOOOk9LqjW>hbaX-)*Q)+)$?!>g`FI9Q+?BR`PNOA3MELqb9_-xcKy$sf# z@2|(+-OX6RN>2}rwJ;|r<-kD-Z$U1x+K7>#r{5&P*d%bgB}$g6=2A9{FkSJ?47F${ z8TDHNB&m#r()+_f6sLuU>y5r94<#-fuZ<4?IdU92hdeBVUP;99DJdN_aP2W|JIG@-Wi;&dQZC#3cpUD-Z!$16RS3CYY+sVC;Fanq`vKjZUfyWz| zH}`Y91KfcbNCghhU*3?Z!EgprR(P&>)^gw`6Yt>R;sq6h6yX@D`~+bLYTuGYdqg9T zpnqw00{xfpp&g_$`sfIwg6E-<{$(n<^gZ}9HOTU!u_O=*9>OP^O}cc+gnaH_uIUN8 z8;}s>zy$OX8XSUF6SckxAQer%4^TltfSny{)t9(2>jO$KRoB*TP3o7um$jy_%~M$P zGqE)JL~PTfztsTU_DlFwGu!Aat7sjO%5}&4$?VOk|9((hVpa`-k|$NKDakGZl+WTa+(kn2`?8@ohTn<|_S=?UCZwr&4ryETnL4@uaImkT{Nh0lV;NBk5D?b=7-B&6v8fy?XmP+ zzS!Emy?zOjSbNG!&0c+CiHdGg(xeacl+Hx;JRGY)kdqN`H^2Yz!v8)Figt}|zFGn= zWO7eOJF%HkGq}rl)ZIj2t1(es(dZF4!tpqm9?fQJ%Mx*j~h~wEFHqPkK z*$+qm=I5PEQ`BhXu`na~?sX|YB}0`%5Yl;d*m-BAa;>`)X{ILkbKKmgtsGg}4ry>y zDiIJ6s?QJwTqFBW!9)tPt-~XgYZ6n`H{7K8oi%YGg@uLdLm3$Q6?QqcrInRojg1nM z<+eXIwzjq+!Q8YA4XKY96GPz*-3^<_@i6uKUVM2^aQlA0KCiz__`q}RuXEnkNR*G( z8#_2V-UZ9?zeZYQUDZA}9zwMw%HQ-2B?YwTxQ2rXxIfd6qaPKUhHXytz~J7Sgz3Yb zfCT-e%vv9#C911(R0Gb}2N~}|hN=T|IqlH+qe!!#PD5T3lXpF75H}semw(>$hDKWJ zj%Vsm-6UAVee`0tuZE@)dYYxb_tkT{sRViyGfdhJQLytq-+MM!MNMgR?nELNM(%+9V^;$11~QChZ$ZK z9M!@?kIm_UMSL_n6j#jreQp?`>lRM}DlVLuqGF(%cliqkYD%D4W@kS6NP+bRR8AY5 zsZ@w<&O=9{_?n=Y_trBnT9-A}GBtPqD!)hE;ACTqbJn z4@oE6r1!$tS8smUp-35O*^R1tDCWAqCiz7oGq4QUn3LHxv6@WI!=t#kqiPOpb8?A| zx(W0_x-&oI6g5q><49Q112|amQ7KX1VH5)U6Rk@3uR5zV~f z{oVB#`$=CbuZc4(wt{U9K+%PPNkZ(G|?=xPH&-RC4ueq)u_T>w@5?;7KM=0_lbO0 zaUM#4a5<~1qX8N1+829SJ-R~$w$=w3U6ZF=ahv)o`r2EuKHxU@M_+mR)pTz?ni~F9 zFmejr?)vullaW1;zusBxr3AaS+U0n?IWhY^^+8NKbkWLz^P10>0+E9Q(wLs9Ip)UF zOT-i!^+^=Y{@f+@qL4RW%t{1n|fVU0wdMPJQ)>@(5h% zQD~5CQ|jw!yL*r@8OYfKxctyZY4XGtK?O9eF#cDI*qwsi-Fd}7^|nK6Lu3-g-)~uI z%ecGaVPiWi-8B4(ny89*vYo9np4BplbV+^bd1Ji+uklFjgg^WhqbRxwCP}9Q4y^&O zkG?3oszi6%5E+l5u}a+S?M!Mr=jFE>H?`i~8b62BWwjV;Q>k7hDlvQ!U>$=L&? z#qi;q%X3&Mz->;RSG*}Ljn7sC39OmfuWT80EVWP|P-u#zk{bS5nM1*OUx1(1P$xQH zB3yb_F0Pyg`k@Ie`~&mU;<_%tEVaR*xq0j2CAkcylue1%C7diQPox%QG7VK_oYNK@ z@ANvmyM>yBono#}=BV~o+!lcQ@XjH>`S$XdE}N4Mmqss^)e;O0@pU!Q(9k(I$F&U1 z$PoMM4ch$j-=Br%q8ERxH?n&T(oBIP2S;~JvB5CeOA|p-`#bR9_AVUG-uxfly8j=S z;|8|--xsL;ZAYu4p>bb;vN$vLAvgCkyL9iLtZONo5-G6u$hC%p#cIcIhlkiuCX`Rd zX`#{Ml1uGJb5#eI#6@o|LjJFX!=s-mD@Xl@k5r%uQbB0WAq=kWx3BcXX7GmheltcJ z!W;JY^$Q8TIJ$FXJyPiK4-!OD-8ilaz`*{!@}rRK7@X}!pInliK2p75COa+eP*d}b zd~17de#`P}^WA}Lh6JGM5P3x0f%d4_!$(eswpUPtfsehC{@!$|S98uP5{O6HRIOf# z;}1wnFx)cS#zpghk2_?|4%czL;ltA1&7x5e0XvzH-w;&maJ~-@{!3XQ&<^_i^3NZo zBmd+jD@dSjrB0&z-}YSIx#)!=3RL|hLp(ZhdYlA57zmEOU*P5zlL|5*MNm7uF@8>ehCz}10Fl4$x0ne|ERJ9kQ)M?7i< zG>v`IVYXxC72-Es1bu!prWj3xF`n})Ve`||GTXO8j;o~_r{%xt-evEMcp~8+0+b2t z?TFdfva;8~;)2w7*22=kjqL1v{2>V$**hbC(75xdA+q=DI|Ss-U0qn3QcM;I^tx?t zC!x7uUMb)~77i4Cpq9REobz`zsD4i0zk8|$?M9VuyL`uIcR=-!QYub+6SfxKu;Am zA3uI(%r>fjgodouBLub1bGMI; zgDWgDK7PV(>|m*5s620ht80|CMy;&8t`51XR**c2pM4{v>Wc@VSmh}ygyyRYs-ONT zVeK<9t*^F$RCFmf?a^zgp12dT|8J@l$Fy%wZw@5Z1_WGCTF7205P!NZ=0HwKp<|LO z$Tnq zoV6TK`_?W?tY7?mO{{vNY1uEv4txt>`zQ(cMC+PYjy7n=&pNi zW1YEen+lkUIqer0UT?Ll%xUWlH&Okz#8BM(%bBHa5oeT80DqqImr#X22r2&~LJB>0 zr<;pGssQ=Mbb~8-Um|~N%Xu26pMwAd;Mk3TglJ0XS_`F>ec``l{Ghtyw(ecJo^3^W zD}7`rdXGf)t6ICd4&|;9qIUS3@_1+D4zN)d&6BND{55nN0I-YH4}J{p#dg!ZqiQjx zce*hr{yb3op8uC$Ufm)sC75l?;nRP#iC`B1m{Z_Quc-jZ!qe5R7%-5OaEumcJN_un zhYjE7bZ7h{`9!Is1XvA_`Oqi@63{?tWp1i|0ozGbm8dd0Y*i)2=K}C=o_A z*zZ(|SZ}Rj`uJ8b5o)CK6u2;fzJFI??}zBQQS?|)01X-h_4NV9@&^>fs!brXz4(53 zU{`G#TgXN?UF}Sn`LDa~Z#gw)OK4?SO;w!mQ}ya=I{Oo17p__OZCE9c={8hwASP#2 zO*lk&-{qvmuMl>x_Wi9oRaY^G7`)+Oko#_LV3L1rgQDZxKn1cd4mw+()D$1fy5oTy zbiRxieH&A?kikhj^#kVQ>Sb9sJnoN~w!4f%MTWfsGA2(1(M= zSr0^wME~_96sTB6MeR3R_R~*ZVf~wKGQmiPP1z>i+b`ul$1$ajaM7N>h+Yal(1w8m4M+pVHF=ha5dU=*|D}-t!(Mvsx-) zt<5D+2#{hY|A*~SH1{|w9zO`Q-xwxJ>DPcH86>bBBf4?lpya<$;Cw{H#N1Bvk|ovs zAT#VuMn=X&zL^3&L>|6`ecxmP9nvX>giN;{qhKP& zinFjY=Hw%~B>D?F2UUkJb&f;=_&0bNe(m8#IVmN_RNq4*)*79c&Y6u-NL>NT4*8F6 z+KhL9Lw@jc;Oie@f3&*t*#w=UNypd1D2e?Xy z8x`VObush*R@VhZ>D0D*o~c!!j}EiFZ6I~1rXrz7x7d2_W`7N2XZ;8Z%Wm|B+Re?@ z^Yuu3d7(LZ2$~3Ve-MWhZ*6i0Z{svKzYlu3I)QU_J%xWGczCn#xz^EEs5PMHjr*(> z`p;BBn)->b6p`4K2>6Jy+AfGt2^5JCyIp-{rtW!l$5|0bJ_JZGr=3YRqrQpgq=u(v zW@!_nxd}w5bp3QS3RMmyzi3DiUr;fFOvxwYGA~HECj1jxb&zJ{#2|v9+OMi7Bteok z(1#8I{$8bR8}0631LHztu)~qH(r)V^{-Fa>DJ8b(Cfh~ww?tzulbV`+1d`J9Ttl+M zB8RT7ES4 zH!UkZXeOX=k^5mGqu&x^@kb)2BbQZp|3z2ELpHP9qL{xarrB$-@X7rmP`GrtUXWYa zULd3;ZU-24jK3L-9T&+IBmb65;YSBZ12uBBY1W3Uji(4bRMC2?HPDLZL}Wo0%C~Uf z-wXel*2B_0q}-Bq*~^QH_!Kx@?HJ7LhB@4fZVOk~cmmTEby>X#`#<`XMu)T&K^FLo z{C0FybPocxV*|BY^eq49XayQhnY2RO_`!iO(A$rls^XmuIb$^a?J%yfsu`ZBQRbnh4cLX>f^Qch z6tRZaBK$O47X?z2^t!L@{{kqeD!!%Dcve$NLc!{f*#Zmh#@!Kto#V+htG;?7!_biE-0>p6b!={V= z${4QP*bKWQ-45%a%SYKZEAqdjaliP%#G*1!Bea5zVFFcAJF*_Wbq=SUgL>g@=fnTr zp!WL3W(H;+qs?!o^;9Lt{ZE@=yfvfijD~+}+-g~XX_sOkrkB)OSChXusr%aiRd|w& zAQA*tJW4TtJKaH( z$XRWZ-}zh2KL7SFQXaW2MTm=daE}bD;P=&yMJ%Vk3M1MSGKHD&kpf@8C)o6__{VHZ zjqsZyeb%-o`DReIe0{^xah$B_&FlLKA&9J>18PP4Pj3LLofN_SlUQzI z0j%BrJ-JwSf71=6j+$QMDn0xcEgD$l~ZveXK<4MJ<)CxQk9;{wxiZV)N$E z@@%o>RUKCfyarBmvrBa2Pkb}WkjnxLwZsCXDWHBGNau(E1rb=$ZgA|6^i3Q3R)RD& zaUo1#1uuSmu6FN0iWg*mGr03RiJqd?TzG`W$H!N7=5{NVA#E=YmjuQQqe@qpvj;wF zgmPz;m1)7|-WdZL#@q1;ctr%vQTXKC^bxi2pG#znt?3k%kP6cbi`-na5tHgObo>p%9j4eCp$?o9!77 zUlO;Arh@^yo@_yR%zIe%f-D-D7Eja(p2$d(XldoIdo|XM)E@2D9f^vGsjELxS1(C> z7*6se_juO!E>>Em!^P><6;GFyhkP2Ue9Mu4{p!cBdoYps_s^MtU3BD2i^u$Kar`w| zHtZ#`k>h4~Kg=PT8Dm%{AvSk9o|w^gZIzkfc~87uxd~@o+!h{# z&wliW4N#_}vemI;47qm%+`7br>Jah& zkdVT&QJs)B-=8jPNt1c?D)bjlKL&-6FKe2BdtI6$1EPPsZN$`%2>zo5%Bg!p{*qJD zSnT-_Rk{>p4CJ0KIfi>#ujW>=B3~qY^r4GGz^N8TwEkUmVL98dV8ig@MPg0|YWrYoJ6%35m5ijp%2)UBHrr;R5SO2sf1 z81y1OCTV1U=de9=OIfnL70y2tRH$dtf$kIhQK9zPx<Eh3yqScGJqi*QmERckZUaWj_Sg?vld2@0@W4wG@4=I=MWGQP z%N4;4Cwp(-HVqVfsl%OSjAx2vxI)mkj2Spsohw#z>@$~pmz?DCq}8amZ#Y2fSq4Nt z1Z{=P^W1tIQGb9_f1oAaI4UqGXsp`&Eh&$Mq=p8uNtNcOGKmnaS7ppZ|34QgCoGkTtcOAGqrNylbiYI&tB#>zW z>N}r8Jia`keM0Q9$v9;_Ge|a`cY3_XUu`q_lT5y5ePhk;1*=(QX>yj5Q_Q=Okurr% z6l!IU zNjVIS$D7b#%3aB4Jc@pkdQDN$@JqxtNl)0 zY|F0psnY4SX7dAIp2Fs5^Ks1s*x5y)01i z7$Vk2R;9bX>jtwL$Mpp? zq~_FWovhm`#R#x|4f4bAilpB8^j!}w>pu3>$0_!dZJo!?{GCgN>o*t; zCUH9gJO2Iw^sUeClr1(-J~woa7HAu`qLa~Be7dNZtL)lvcwYxa0X5%)Cjr>!dS+&f zAYdg7I8B{bSm8J+*_28_-<4PkI^*C_Dgw7*t^T>M(gfzuS1=<*{@Gz3`4L*n8?8^i zW62edP7ix^EE=+>+EzDQG+fZZFV<6%G2c~Eb?}tBX;o$|pdOpRxWnDE9~T}8>Gd`Y zO{XewvtVqKjxmM?%FijEo;Pp!orMCw`6HP%TD+^}A@lrH1X&~HTWj5SKiW#ih?s2| zgX$6H4ApG7LmxvSjpR_wFc0(4Ii2l+os;%DwQ4mOnxFyIs?(uuy3vy*_Z0GNZS>89IMAp%;px!1#=H zWqE~Gnbb(P?md^TfJ>(pZ>(j-(c)kf@O(U9aBze!jwFHq33DOgIZ<0B?cy{(dD`H( zQy_ecP&G3#SB*o@Gd{I_d~{y8w#QT+jZZ95|Lqn|T6b7f|U`2W2_e55!jI0e%8nBJ%aksM8zRQYR%NNqECU@-7hM-3;d(Mbd?Xr%JS3 z{R`upJjpS85EQ1IIQl;o8qby>O1(G9Z<(KGPd|^B*D6cyiS#^w`N!ZTyDSRqKXFFx zG0r@^sz2Lfo!?w=7VICuLsp5cPSOp_G7APyT2)2`ztu7uUUyf-+GKY`d;`yl=e68= zAyX^~mqoymh;cF9LEne~G#{MbFFsgl_Y^FFeO@AMcijKu> ztyLCmRugE>a!5?`#?MxiiCz>N$W(4U9{p`rS;C2u@J|?e0>Y55Z4ykHicH-MtB4ow zBOXx^+#td~f?j`ax?7ZAOyo_wG#UPII<>E1&AUJHIxDBMAB^O$8IqWA=bs`!T-Soo ze!z3B(p7?W+Ce(L+|w9Z7v62Y`vUXjKXDi3Z`@@OXD=PlCAdIQdHb~SHL`uh$;s)} zYLV@1y&E&+^9RFnC`Iv=weZK*2b}!z!iC&!3IT_JjD*`y`?yG+8_7%-$-Hp23UP`x{Vli`msfEN*jag# zlF=WQTD%4mIXW5#(*%i%CU^Ezcq|Fc%{O3TlWU##=j2*jffJNf5NuD1kk_g)8(#Li z8`++K62$g(_G;VG3p|Sv_P8{xgVr?a#oZxca`>!#aB!Fqi$i*_oB=05kAOwb)(Vf%%Yed3+|Nw zv%*D2jEaZTg;PLd0fdlMr#C_0yAS-?-e-g6bE2T2$R~4yVKLB~n&9Bz8*4Ngi})7q zwaJa|e{A1rn)@c3aFdd_L0Q0Ps?*a|{ic2XA3vVo8}kE?2t2GX#m{&m5&?+DU<-reur;8*lLf^-rjbg9E3_k{2K78KV~vNY?kh>eA)} zv)DURhDfQXe#NtB_YMr4B5DO=Q5~Ev41E16{wTLY84(uJk3miKa9_U^!qF)pi0_*dg5#dM*h6x$Zi z);7#;*o{6_TH<<=X4n(?zDDcndbccz!w_+MdmF5$hbcQBF?v8?ptw0<(fIaLQBnPm zaaCWxiap96^?mM~a+maxRk%odx<(m!gWl%lXvvdy<^6kdA;(01{a?m&XC#AZ zd>MG#3^1}H=|8)Nw5uC+;}o=OzvwHsmGCXe%F41Ezbd1|a%{~*E79UMUHi?lzBynd zm38q9ElWCiB6BBmOZv7>7Yi58|E{}6izjOHqUa&GztR}k&ws#^m9RRYffK^-*o8a1 z#l$IO-xxx&N#{Oaf*Os-BW(JMo`nU&cW_W1=<}H|3V1E2B2rVyV`3+J;s777KnR5)A+gIk(Pr zpxITK92!3%Wg*i>DBa`{UyTa)dN((bTQ9#T?3hDbdsC+;O}m+U|7(QZQoq zZ(Eb)=>=S@4y}o@c%PxQ#eXIEGq6H2C@Yvlf>7jvWq82j4?*TEh;`_Ds`F3$4;pLQ z?{ya}>Woy#uuQon7jTR|SZK6NvBbz94{gtk$0(3@GDKBeKfbzqpuXvTFmFVDF&esv zp_l!jUpDO=Dv;Um(T6|II2dq@LX=oGb!2tW&*R&j8xAXWFYE-;UXWw%IST^!GEKQ! zsabDF1IXEV_uUdzMBPAaCm9np!XmYY?o7WsGi=>cJO=_IqS)kkMJR7cwJy?M!Fhr96BDJo}LmNtGQ4yvGx4J{K`LIL3 zso!h4JXUZZ!okQm(s=(n#rFDK)o5p;kea0YJY!(xXZGnY2VrN)v}3|FyoA3v`q-sOxL!OmrCvi5$?#MvM4W}!Y$QGOdRVzkNq3uSU? z0F;r;o+*u>YK*(-DPE4h??9gYNb4*AlkPDvszj~NI5gqeQ84vy?!N5-FtNG=^1`sn zp+9db>@#Xe4B#)pb0M!KLBNllbk35~I@+3%X;SzLi2|^*?t4L2^cvd{E$Vm4#q<~_ z<2ylg-}s`I&E^Szh-6B3U#*EHfWVJGgOnPq7pwKYy~y4*A(gXN_{LH(H8n>JLXr2$ zMPn|!-y@QgZ2n?7aBkApjR4PC`0Djfbm@I|h(Lg(!PNFQZn)ZB%^V7lGJ*#!e8F1r z*HsC%eMjTYub z|3aYiROmL9uM6C1kElMzRZkgTo;rfRyxSkBly$)}%of{36rsw_X0WF?A%HLol^caj<=n&5k z!J+f}HSPIX=J(>?H!e``hb{k-Cp$b-Jm2I-(rS`%ap7@E$ORQytN$%d(m@dtc*e>ZkQfv{2Q7Y`?OOk9k_ z)Nd05WqezoBFkV7$JHgz3*Xq<51e~yPvmhRMsY@e&e8tbLO8?Y*J!|d_Nn9rm9Mh@mwI2Zvs}$edvW`}ISg0?_`i#=FG5mIFKiPP zo1ZVTZ+)7U^ZK8+*Zone^8wBfCNbST+QX&v_<~}tJ_HYmm*XPW)@-a)-R-7LMd-@} zq3@QjnM3ivKeNCb*SBQzmdoJ0UYJ$n`iR!?`sD4Qb6>sX!7HVDr&<`t=XS0-W-18x`sFK zqTl(~W5jmZ&~Q@Y`ifR>^`2oiH+u;OiFUFmHNBd<|GY?A4teDB-lrt^!T*cUNdAJE zjOFjc1wH8-||!`2QT_1}12-!aFg1NZL!B!w}#>$7nnFBA~?IllVFqxHPaRXCmAO|79P1y3NKkb+rFikj_&%c z{=)h#nj3}=>Er?fA_9W@^pfYPLTgKye3}3OVDD}CrUukYO*Tt~A@GQZoEO6?lF|hv z2n0!jwjzb*3=no=^m$%S;-MH^7*N;oJYB#tGQJUAoXq_w6NE{+JnhVCTmPM`>Liek zT+YVkXY9L;d#F|t374~5^I6N`~D@ta_q1eK{QP{NR zD+^^*4_p0ruAO679nU|=4(`NUR&O;>E+qcT_|1>$iedpv8{L>R6%2QI4l%QabE>X?&>;3a;tHP4$o4HDO;?HyX$Er-wcnmQ|G42O}h7`)yyY>A>=gw?& zk3F74XFJK4T9*x7jcL5D=$Bn}(f)KG+b?e_cU=>3;60wTP5+d zmOK_l9?3aS4pGge!(fevs-(LW@a-Hgudjs0j@-miA7PCBHRY00CR4m`JuLMBKP z->IPk%Z>tv7$^g_x3>%3OyK9qr^B}?g~z+!SjPCEz*5*mrM$vBklBod*}W*l`zJZy zNv}ko0lse0!iPoFkqXv#K})i~ln37^?TqZ9!xF2?XCDh9)L;tda;Ty3&NmrG+xhvw zF7a{4LXOxYy2DeP{xmF3gZTrpAV;u;cHSHuD>fMO{w+cR64zUNE_sy?CE-<6*7)(_ zAjuX6h)9Yk|B%KL#)KYjh{c-u`3mJ)2hX1HhH|iZO6wF^-gfg@j3x!W+W%IqXO%O$ z{$FhPNMc(Nzz&(nOVeImT`lq!P0HEZb6{jml1-cvkl@&>@x%HqXNtixo3(1RY#=xy z3JXa6+T+)keS}CM%sb29I>^5pNxY`%yc(#DH)%`GHfuTY5RnyzZ@XXeUf-(SNFE)z ziDWR39wGHj0G#rPrEO<1A6^008RfsR6CD&p+JTKyEhVNkUbtFT-`Bp!gIH_Tz#D8H z2+OC4_!}}6F3yz!hj>A`1_)$)f`sVAk@e!^I7CO6gSnTUo@=OZRYU`I@P3OqGAa{q zOZnn6+!K`2G-yh5UIs^S?ylY@7x0XO24S5dzODlD zQh~Jf%}qQKoOWOrG{Sc|E}u%A$auN_#s^8rNbXx1MBpHssC-tzsYwzsBor4HUyOn0 z8RC2NLn`>fQS!+Fiwtm6b@G1L>Tj~8IqDZ!0qF-*K4-PQu`yn6pXywYadgB^N$i7# zcSt7lE>6HNq}~Bmp8kze&LelID*;({_yMA9u71%yS)4t+|064=R;KsT-oC;)1Y14s zaJ|S@KIPz2+*8W=U~!GYgLUnfD&P`l+B8TLVPCK>J9l%-!Ddbu^!!*^@i7m^BOi@+ zzOzB19@6S$(T=a;y+jAJvMu|+lnOA9bbqiWN1)p73XbPvjlh_foJ;>Y-BMM6q#+y?1!@LU5KgD3CREMQN(l_ty_5BhB@A zNGIn$`eAXoysKeCmV9g*Nu^w+eG5pg&$xZw8VZ?$*S z2o!fCN6pSCUy3<6IMi&iPk(rS6*k$)6V_!?_fa81@wt9QvbJptPWEVT(-oR|a-3|G zjV^z(XX<0{iGVQ4h{~CZRm~GeiXIE$dA-hwD)y_uA6sB5TUD?j<5In1HHY=rHwsf*)muMtv;}q z3iTO1lwJWc^hy_zv(2VhkN*G;)X$ENAGWv6O{WKrBB>ny-(ChQ>*x%2#8SOqxITx> zi@t|tRxizwiK1+}pY!o-fo^Qa&aYRtY9VH1mq(H-(Ud%NKEL0LJ| z?>1?;C7j*QC!>4wzT;;-1dv!7vp11~L4b|-MtvWOzKB#N1J zx1d|y7@~Ec^Xo3xkqNfEuP4&O-{z5I1DcxYi>*8g#I{O(^)W!>*&<#C!W)b$rGFT* zC9A)yO#vboaKLWQPH}C>L?S|t><&3Pe;}-ZU^MnUB5+!lZ1grD!z6rjP>PM#gJnG7++3wa_w7mY9D=PN^U=Op)8} zW^bN4z%NSVaY<#+$3`%1G}&;~y=y+LmPTxLb)xTPc4G9n$%<(L4$}*Up;1w$zay|7 zMRtg0FfCWt%s@2K)IL=S**i>s5MJ_nXm+{rC*k^YS@iJ!jKT#d&z`9*eCF8%?Gv6G z77ia-yxYe?!~|v?Hd?pjJoQ%jZ`$zX3y$g6?EYVDPeC(YnZkgJd6(ah`8g`!h$Cs*|!~=8rKjAb6BaJQYYS)p$%_1J=tOV-_W}{ zT3ztiRb6H^xj4xqyBdp#k!26!)w)*i02}pU{=KDUp>Jh_TcaG6ek-wauz$p$ylvWM zlET*1XOK&&pEj zJ-aOC5565_HVpZ%PGDU%U7fT@}x%-@isyId~|AXVN*1 zFML75NbD(FT}zOHS1?@pUbbk}0d$NH@!~@{!PWl9{obAs^EpaiAM)9*w>P{v0DPMC z&C(eGJ;Ha@dJQPxsW!pj3w4wzsQ=^_nfEJ9lx|zzj@=;9zep;09m324IAF1=q;@$LJ`ZB-E$9R44G;7t-L^;(1;rH8eIQ;YX(4lg1xj|+ zTOx5G8cv+7n0H~DUlm;!n=osA(AvJRw6brq0a}WP78^=ry()vi&H4I8i@zZT`dm%_@0H*e$v0unM|FQ8LBag=(?T=h4VSQ7{N` z$3-s#9y9scq?>?{(EfLX$r~Ig7^(K`z6f-nj?3wk0~dt>S?G@QFFPzVi$)?L_nK-^o7EJ>uT)ZF^GQi4t@>8T3 zV8uUws@OuKz;KgoKQz^4Mrr}-;AgOpK7QlrPiNL3>@@CLeoM5>Wr>$V3YlW>WMcCEQcKcDS;u|Qq+J^&=+`LG5H zO#b$Q3kiOt)QOppMYG$3%k;Q!Q$HplK?&$-l6e%?tN8U#?}Ib2@riqRbOqtC@$n^F zTbdu!@bSD~23_BI9CzoEAhCUiGXU)b28|V{6!TG2F1?6UV@W=poOf$Fy_a`BT#zxy z^wFK^ueKVE2%taq6!O44Y~qV8-m_}ZGeE2^AoCo+iW`lm*_tS%eH9w&d9|T#6c^v9 zQC%;_5KZ4pFbXgN z6B_)r*`i1srP zsr6Kv&K#4`-!sD{l8?eEvaX%rx01q`Luz9Fn-lv1{MeV6TN9nwV7;;buD505j)Hp) zW^vZq4V8Bcq_|)bs54F`;`(&Pr7&XRU&+ZYue`!@wdgmZpBIdW+5-6_jCvAigF60` zjdeWJ5$kGjE4+w{cz1pA`&fo*6{tLZT;=U@Yz;hMJ*A^RI+8rE$5Kibd-LJbFPyHc zkcbe_?}W36QfJe0><)h9RO14uk)?a?y-#;NdvDXaPk2Dd71kSq4EP67Sfb{XNDIjW z02rv6*V7xNZ}vf&j&(e-7G<|dLHi4}Exh67;pu~tdi(Rk#>>6;vnLy!Jw151tD+Tc zLmffj?%$sLjA449d#qbZ529oHrJX0Xd+6g4+TwM6G_u~uZ8;jFN#*mL$3W`y|CGZn z@BAZ&&35_F^N$wRqOI!rpSe1hU!-U)utv~Im=Mg6-$#J#OdJV|?$tH65qy|s?8VVpsHnb3=D>xNdRVn ziWVs70foDNyD#7n^T)q2qK0KbG8yGc>KCW^CWmYW+~yhL5maBx$w9Wpeq{62KT7cs z(1^xgl;VJmG`Q-Y+?&shM>--PChnaPoW#oMPaPQSHa6r8b8 z@(9JE-LVjkey8Ndrm4OEw|tgji&zYVbtNy9@hPxG*pmZ%HEi76p4NGL=EDPJm4psZ zpwzbPd@9TG4*N$Q?amN$I%LJ-k(F&V)&>%|KMDfswK{w2QN|?Y&@e|wjZ<3lsVDKn ze`v{Ce6$7e2frhzc$6ZPsRa zBB=W1$j>t@WWv6~JNk}j(>PSt=DT<8t{6OtvzojBXvIkRJ!mwgP4h|0^^zF%h}D3PxItAE zL3DPK5K}>sc}Ct!siTQ41c@Q^710s=?haETMg?vzI@hRa1{E$KM4K{X&A^2h$S$ph z_jB(*&pXk~_kyeWTTIKc>8shmB_kgAoZ`I_g;a+#m&x%e?HUK0_!EOM*Vb9|Otxom zbP)E}fXzhGxVximzgzBoru&gBS|vVf8Aq{dz5?ce0)*nm*9^Yba?tFs6vPJE7rBaO zPo70T2$OH4c9FF^D?>fJ&oHQ0v`1w8xZXkmw?_a-GU4ABD8=7-41^iO(izfI1BNvy zea}k>{$>8+tt@!~X%UG}w0u^N5*m@aX&sA#Y6~e$q4_3AED!n&_YwM9{dzijO_xk( zvL9#`v<79&gHfx^2NT&&<6DjM(jTA4TwZZ(Dxiwc}irQ;Xr zt(hVto)pBB49KA1D)4AwI*LF&3S9zBxIp&AL970O)ug)=4*1U3K*neh{*!KJ8u6FwM6{37Q4)T4B15fKHY>7WAfI!$D<((q> z|CN2`f9Ei_jI02HQOHwFv+gz@^fqkVPB19B7tb)Sa{1mC60(omW1X>>JF7k6teSQ+ z(6y$*M)m*l(2DP}yXRh})`4W%Tla$bWhs?}tLv458Mr}$*+=)4v!Xe3pm0fHJIQ{tBQ-m|EDDegql$^Nzfz0 zPCT;hxaBB-FD0&m5BYyruHN$I{lfB+MSiuMvODR~NUQDLzEM4%cWJlJN+3{f19N(R zN*vflsUewS-quxCPIhoY1xIUqMnBOf8T+aiWz)4!&~&Z@t`GqH2aWxI%WXq(jZ{oICX{n=R~^)uZY$|J=j5* zR3W#qMt#-aIbX3&r#O^BD-Y-<0f?v~B^j^55tHnBQuUtw%dYn6w@N>gh-ylnI`0h_ zQd#Ahz7aHW-g%wHAN|}i-Z>F_)xklN`VQvY{tER|*;lzgs970fwR*Z?YK8h_t!OMR zx(y&o8t|Fb?o=Xy!0QwRBoG`+W3cYAlu^*YtO7svw2Iq_MlaYJi3wEzO;Q%5L6<&c z`eH|_?qH$1tNu#X>eEmuq7GRP zAMCQxwRu>0c!h?8`CK&<hFQ)uj0G3KWunk2yOrd?#y}M zc^|yjQ%EUAk?Y;T)5JcIm z7)`ua^3b<6nJw6NISqQ1ub|qkvr$DGkVp*J8;%;0qQWHScSC5Dkb@5|PGMgtnt23b z!-6Q-dZk#J%e8eBw`eDNvzHA-EE9r5$$Ye|tdVr#2}iT}8K*<)OVMBKH%#b;sx%TW z7lcIaw+v0ZUW8-p^10tcNaKOLF()s_?zbZ%42;PWa3QnZ3koAs)6*_fefHfBP0!1H zmq&HtfUp@xqcO2Hv<5EIFkdlIo}TczM%$i}=vohzcTTw&hOD~raOekjDH3xM9dF&g z3XiyYx$=hMGqlmSKrvZL#vO#Dr!OaI^~CBH-r@BCGXC>7aPTyIe%WVCSe7tW4}=bH zug)&{xPS=cq!CVg#u^QQy5|{CfY?jnKje9TzS<2$!d99`>k0OI>-A>myPf;9nG3Po z9-@1XM&(kSUN@q#gDHRy;AJ7T-lr@qnS6zG$dDkcR+uzEnlFigrH%m**`fy4wMt4i z_uQx`dwO?!v->1^cbwcd13S*zOs z;rjY-yFVi|xOkHhYLM)9Sw~0i%S9KqSjyzioM3tmo7)$y*OgyUkl9Cl#C*CJx#CeS zmt?^4M#7?;z!ZF4G2WrhAV*wDA5LRgIb*`~>)z>y&f-Uvi^+rXHRspAn?OE?NSK1) z8v;4^7xG0!v1U)54K^KJqN%FJ%qc{W6wX2f<8@V_Laaif_f&h@okt80Jta!bLj^OE z0!T$QL(iShXCy)fdY!RjZp6D^&!wN@od=NO%V!kt7y9(eVRklSu++U=hnvGmNavtj z&3kz)z^J_W9!COM|1l2VavFSgKPrHURev~*1TAp}(YzCXh2k;% zG2Z5^wOU-I$X1lk_NAwFoF3M_tz1u#gY*Bo?^5whuylO;tW=QPd2Ia7vO-!$28Wzn z!p4U6b4sTNwBu)oU89as+?107e|rBIU8@0G36b_!Rc7@Hb3?bhlK6p*53_6oNjLjC zVW{Cv`(-Yuo749gO|R=hPEdlt*Zz#j0K&YOpSNzJF<-v){k%fs_mrh~#+KNf*7w;6 zXKPsoe@{B2*!RUvTHL*0j{BApf6^W&@;v#ToO}rq2CLvv1UHoS?f3>;2*0>-LbDx? zmO76oecg#$*35sDymO$WY^7A%qA=CUvi7hJS3(DKmeA~gd5mm@4}&Ueta zt>mpO)$46I?7T2s^(Nqa2j(PIld=$Ak`-n*) zB$+>)#=KzLb&`w{Z5E|{aeB5jn#Pr{c)HC{m2|m)<^2%Ei9sqkp7|c`=g*%_)_#6| zj)${pPP^hU+r4#i+ps@)TowAl>rZg6&KfzFlJy5H3HEY6rXLk4}$ zjQe|dUqIel%+>VvR^&d|l)YzG*`(%0K-usNKzH(MnmC>iql$#i^?w-fWzWd9!z zHC<MC>%`&hMd3xo>^`a|7v$esze~;v_+DW1VaT)8 z(3VCl!|Y5~AKO(6&S09!07~wS+-WWCueB1_h06c6-a}kHf;n4uhQFHBqz6W z24SjgIQl?JVfS+HNdb}O17S!+JN?1KKF82vR&5cG2Cy1Cy%MIx0?`EMM}fQ0)Z%Sh zv0DaN^$YTuedjFF!dUkS0-)V6U9jD)8LGBkifr+^dySgP21=|a`tjr6oh*}i)ESm> zB_=Sw!Q4}c0r$yeIrW@8^5ld5GGpzs|=?M~CqL5tSYWD~4sonM@aHUev7o zx5hCbn)$u=Ym3t0U&dP!y2l{e8co1IK+@>>oXloPEMbdvBTYt*s?VAqJ8}s=a&4qP z|GB~St>LZ^Vj6$MY|^$+l@Hf&Z0lNy9vjDx1=XWJ4iwI;Wq2ww_|HF` z-AtnQLuy)lq=fWE5vM%kZdx;`Pq}II7vZEd8C_bk?|sU8N7J!KNRvIk?H77Dm`Y6T z8d#w!B$#mY?uYS;bDoeYRDF2Z?`c0FAtn`@FjQ_*6IQw=MkYaD zx%6d5OP$45K3~$eKd_jz%%m)Q)Va#COu~aq-S&S7T~lmPV4GEtc0Bm?+_&i-t%khX zdU{wpc+%y^48RVcOwF6}RocE{j+I7|qRMVXqIH?sw`0WsbZB^6OX#5faOQ#IeMTMD zT-3UD-k1{Oaa~Uu!i3h%osRi=5^E8LaWPX7KlXuwyA=~r2uxj})nP>%3OlKh`WfyG9sLj*R@?E5T*6+PYnMXu+ zCT1QxL6xDMd%=^*;0}A53*-HQIl417vkFy=7D8L!yV2-lA}7wyhT*Rzh(DEt(MTSY zp?kp-KKirp4#QUE6hXIc7P>mVw^Yk^+Oj^f3#>X>rKZg=Q)#(KL|ONLvp1lbP*G9| zy-w`TY!^1dWu+-Cudjb$L&pE9t?fkVR{*xtV;jT^`_+J25??$9#d71_!=gA7Saa!q^tnnj!Jh-vJih7X3GkB^@bs>kig*4QTT4|FMHaeCO%_IrzT+hi z3EndYVm&w2M}~2_Aq`=6aCE-XQq6_c4TFn$6GNnSI{hi;-i9>GzX3t?tA9PnF0bFX?l{^ zrrCgUQisqfrioA+NVmUlK(@eG<#nHGVFS#UNk=5y#aWo&Mcxd35}v>hQ?vB2#Bvca zYDT2Xr7|ETE1E6shIeYLoU5-%5+Nmh1ivo_(JZv5d?wpISQ|l+_zxwnJ9057dF)az zOBzT&h#obifcY}(_9W>q=eEY*fd+l-pE8^Jzdg<#yiuS~u(#>IIxbWe(b1Lf6ElO8m z>n(OCD7_DAEK%Rk{HJ6xhmKD;exLM3vL%1RA(q=jMWHA)84X$}wduK2y0gpb%g1(3 znNzFRV3}2^ezP#_hQ)c0C}k;!#VENC`rq6K`3fH8Ym6JGC2sQ;+9dx-riPb5Vw5Q0 zWRW`nK2$d%t7LPQBn2Sw(*80xUG?$#cIhCTq^>i{YE8vkjg z;=!Z?S^&DLzh8}?Q~tH9V1Z_o-vYHgW#_%NV5PUY6LvBy+)Hu|myXb53$f8!eZiA4 zJ7fPwWnEET;x{tlqh&kJx6gUDW`|*~5iL>*rLI~Mv<+NB5+N}AllwF36N9tmDm%`j za=or7kr>MURgf!DiN3rOLu3`SM`IW_X^Vdx{?~e6$PG7T`h@jAaw~qf3!xMLQq#5| zy2O`HkeOvw1Y_JKI=x0}t80y<_Lz1I^CA_nmjY#XNxUDdNo~<7UI+Vnf&tzkuXBqr zEmwt{3Ij|J-nzCt_dvfD3h2&4Pe+xRUQQBT6pb#j%|yJHFPZR#9`BaTuj>rUzZfGJ zzST@ieOAE6t*x7Wr?C`>uG%_xQ3%sv=ufi=SHrUDRYPboWeE!2<;$NYLQ!?i$?PA-KB*PjDv?EO>BtcXto&u0`F+_pNo$IeY&A zo(Br3nsd~U-dlTH?C)3yeTFd=hex*!-r_kYxku!sU%%NzE7UgKM4n)7m>_?qJ{Kfe z`2(?h&w=m`hnAAf{n|Y2q-UlJd4jv`GfYVeGo+_gojdVbiM5*2RQ-;4{zjppe$lVlV=dQ>a*LCxe za{fvPf-%6^7Mn64Ilv1?CLlC-G{LV>wyvHTC1ODPH{R0kBL#KTd<9X+RgZ}@Wg>qr zja9yrM`pOzZVwzA=bVk$gNdkCnA%3_6{&L*nyK@__5%)Q;g$Z6!X$#?DB617_)3GH zT0vLJgC6QYUjQYTGFxcD;si+vXODnQTN4D|=N`d~i#>vXM1tB0+xb1$tOW)?J+Ro) zBfVJD1H4%B{RA_iR6VF-QX!}r$r;aAKwUdynoTgH{vTM;khOCA!*05-4^Aq#3ymbg z@oeWdOzf6;VNv1JyuSLzh5LW}0l6^%(_@2)6^69H(hDgzC<#_*Qska*7Excf(Q1`< zT3m9!54M8-=XkvRB;P=2E?nH%z&ucKZ%{nNa#7Bs%vL4whDVAHF!NCqpAjThwLSl$ ztzJiWuXT^|YvoOG-muKa8Mr+uYMp{|#M^+BcA<1<5}_ zD}OfkxTch}$9mn-PIZ6(In1x^gx>I+VBYsu3@%2!rhxRgB%nVCkt3pAFx92+Gp&$g{$ODU>mayPfo{6<2lYSPN1|o_?jvCnH%3HZ!Yf+c~Qu~9+ z23VMGQkKoJ7=Rse{>|U=_80N<|2nT+G?VC2V#zZheVhlI&Ks%h_;d0>QGSAxjwk0s zV*J14q2e=Twiw&@?jR3ojH6%oM=~j(Xhr3yU(i1P#$Nl<#d5~AaFJQps63a8LXB?mQ}Lx;t4)#0e(1iWDf15PmqO@_l46apEf14kCfskj%8VO? z3~x9`e*djnEA5f&&KY4{zzXP?rJDS=E$-=ioq9MS`Rl6us?a95DcV$be|gyZuV;!l z`D_5z=N-kMr8ihrVL&QiB5#8sI2?ItGjTd3kO2BybFyKXSLYf)os6btJ9+$JD`dHLbWfJxl5Qw?Jm-mTsoe4Ho$y~+ z%n6}Y20d7KfueYVMwz35(Ac1q_Q2x&CgLkM3#a$oo}|Yym&V9S>F^k#YGFB>ATsCR zCAD~$xDaY!K>xF+;5hi{j8&dUt8P?}i@jP(7Tv^NFv%9pgplri`|h7kPpxkGXdr_P z{&-l;-|u_tibUzWZ;xI^$)#Yd6w~gXlIMwimYEecQNX_SK5<-Jr|)ZzEa%^GU@& za?9ilKO3)huK*qKnkmp~ULtb=7#;wzh_KSr9&G`r`Vfn$6GkNt!Txj6U6jlD@RE0| z#Tm1^bMqRhx2Za+gf^NwN@tUre;zGs6h03V_b9b^MFdqZ=aXE{v2tV^Yqb)%mX`%(@X+tqYe#>1n~CSeWb(e_YWwVh?yz!pUo+@Q(f>(Kv(t^!iHm(ZSI?{K)oAl zjx?(>RIWyZv&s*7%;?ZF8jyek3^U4QB|*uBv-`UwQ!9}b z?Fsh+*M)~cNC0VnKkseqS$G6D1u(sjN%YCYInw@8`z$mU8hg>=#Aot8&?&`+_Nm8k z?ESKcq$+vViwPqd){;e-JDa5&{Wj!*T-*t>-?encHJfE8hC|Wr8CyDAYN?!i_-e<5 z7jhWBC$M@k@*bY6*cUt`$Cw+H8hUxoo3O_r;!vY6-~$qGwB{aiI#LpUUT$ZkZS2Y^ zFEY<13EMr&Ig$F@Q>0l*McFaAwqtzR2IKbaOb^E?OBv7q`D~VOa?!qI+582{>z&IW zoljX}t!2H>GP7P_kd(oL>MDa4Ja8CY<1i4o!|*D9SG_2!sE8RB`_Z}?iW8s$nme_T z(ei`=TE`2sUgb0{ZM8oet*Ia{7856WT>j_3EOX_{9W@6mV~6d^*Wb~W+J4!VGLrZ- z9H!y{u?v>cK7HSGu*uFX^2FXF)PBwRwN_y#miTd(*ZK4 zJ4>i7FDk5Qsl$#r3ZRur+Y9*OF5F)k4Hc%5!#f{XIVGEiJ*t;%-t#3151UD)zz)G? z1IF&>7H^7@d3&?va=!QS{Nmm^aS?}*)YJ2}yM;g2x~m3$`Ur#2>_ zmM1o>{yjBQM%ep3RKN0+#>KL@7OM=<(2RGzd?)!TL=D_*D`IU(Y0F=Re_8VlL zlFu@_2_3arYKsIIUEh!|fUc(_y~lIIKHi-D;@F(77w6Z*ggY;_UYE;{b}|eF=){0e zA^gpLG#>v~Q$0NrpbZ9qe*mPU@9e=0hO`eJ6yQGJxoIos0ZR0flW0K6v;5j8chroE zH0N-BX(UMBi1UY3EaS@<+x&Vke+_X;nsrzvC>GXH-~$_122S(11GmoR&pA9r6*V2) z*A{8AVy8mfg*vqksv)@`9t?CD>1}q-y|-X?6V7lVN4ixeXnjO(p z!TRe9*B|zJO0pIiP-F|5ONH))CC#bzv{7+FB7q|d>PXR8D_CV9Hg@9p(4mN$C5%J| z_V}Y06xA;DqJw0C62P}{eHF(~5Ora2n)5O;cj7OR>3;kMaPXNjD55cY0W1akCfz)Yao>Pl>@Ro`>q{0E2? zjfjXyaX4GOZZF4pfQPqPY{~>63V`mNRUn`pI0*Qx>)S6>b^^3DpU0wjrlD1DzTI@_ z5{?68Bx=SnlK1a}#x#5%SJ6Kq&>q$q7a#1qcv40;$A_H}0b|k_9mFH=KiG4^vEH$- z9rl)hq!0MQbdbfg!s_ntwk}?UM&L3W{SFt$>1mV$3m;R^%JYCK9*_E@M@rt*Xj~`gTs1=9k%`WBKsisy=%#7_*6?!b5-{r~eY}UGfw^qI-EYpLQ}2*<Zq;tOVcf8)KG^;Mxd+E9{ z5pn(b%d&3%eI1|2RvvM%C|mC5yQsRSKzH94+AMYMO^<3&)?HY)pw3E%1Pk&KCGzFYoaFZCw)7s9b;$(Crm zF>SE>qa~e#5Hu{^-fz&ct>{2F{wyFoM0tX#q<_#&c5UXu=Z^Hz7vpDlr^;hinFYl! z1)6bbwmy*s3yW)gq;_pv6r#MOPe7f^E=&i5iLqElQ2@ezQfIHda5C{11*uhR)$X}G zt!&vb?2+$ND?9uRyiYTI>Hrjv zZb|%NJ;wxifMW7hpILzAl$n_sfZkTr+X$tr(L-~(-y=bZ=XF-JYtN4@5TnayergTX zhyJe^i~!JTSS3LGAWtCb9F8Ojux`xdMhvFQcfE>kvw9VcB6)s?)!Nm@y6fqx!xUL@ zRB>3!__y(s{)n;q9aqAD(sN_%Z9OJFez;Ht)h8yPua7S1ZG6+m1O96n5fM39rD5^? zmv64+AY(@k#}ua;_#?nhFg|uA>g9)?{MAE=f9)wO5{Qh7bUv5OhL83xd4ZaarY~on z#pWDJ3y9)btzd@c^TK+WVF?zvLH!v|(xs~$Xgdv{7l6}dt!z@aP>BsTXG|a$Y+%Ja zI+x$0qy?Csvui&MGk8?%f;>Q2CsElh0Jh2sb8ZF;t1ZA4S2AT=U`z?7@bw90#ErbglFTUG%_s>xuh(w( zQ2kJ{4a7Rs{B!(oJ_*%VVV?Nem#o}VB?z2~dw5a|t)22Hrp@NC55+@$#m30?x6E)}}% zSqzTUttND|e|M@O9W;|tiIt6O&xb?@23L@s`sFXziT_3V4}5lM?W{I9J+42Z zFbs!vO1?avJ8zynQXNoq;dvpqsDj=y7gF@A9nC5=yWx?M!N~M}cMC7#C_w!Q>_Fzn zpwR}Up48-n{3_iAWWfg$ZS(*XaKaT<(WusFaM*k;R=`6BQ#P9^vh?`(FeTS}`OAup z75dk%9p6&lqftMl+@w>qNErH5tCySYmB*_J{czbMApt?M$@z2u0Lv}9X#Hy*zZ4h{ zLBzO~t@-{qFXn)t%X);* z@Bn)Qqz-&%Ur3>M{;yvK|5M59p!NwyS_X3_^vK7j+T}=1tG>J?>jMz0UgT1K7yU1x z28{GI&jaIP=!d>XjuZ0GhM+}ll^F?zvsu?%%->U%DSpOCm2r()a#8xP3>EQ9c6~1d zBP&-J!Z6y*ZUc0=e4nhs_}`Dr`_p1%P<1Z6bADNjP8K_rWpkp-fz80LNFoULKt{ze z`C|qON*Hx#_eHJykYKK6Eul)iXTlu0=9AWym`q)(iiqhp}>#P$CzuivR$Ff4r;{y>kAXJjd_EQZ$!2iblFr7bh@gz&VsI-A6z>FFR?HJ zRt53;{sozSHk(0mq{qdj(;2bt2A1Zx0kc-7YtJFqr?c^9`QK#QfFHI!8A1bN{`yvZ zMlBWC3=9v!!nFK9MZsR$=5MFV%~Y?1fp6G-U-!?ITua%2+JPK@5E#581^^-Itf5Pr($%!R&r=M=}Q2FEN z#N}|0B!``rGhYG$$sxc376=cAl}y=G&aBr*0$UH?&eb6}xD@=U3XHpT7etJ8j--Lg z&$q-=-h{f)%X9Rc=oTb+P9^i0g~L01cxx5th^JG&z^uJ@>U~C3qOwI}aMUp=Xl{|L z!_uh^B_hIW#`D;~I|9w|-F$pH22&k&xbRSv0g6Ih>-iT!?apx|62q zg{>#xSq~xpNM_*_KdOWKG}8RcJEhF#k^(E5Eb>6DObR2 z!^QW3qtqK^yieizDes4(-Mv7<^2;}1ouKA;8Eh9QE=)`1z2Qjc$NjnTr~r9MQKP;a z5hb|c9;y_`bG|~Jg9+~I$|-D(bR5-v0h8c0WX8F?L#OL?a%l8}46eGHt;Ot}kh$v} z1?;J0Z_wp0Hs6q4)p6f$RsR`-9^i!uu_9=e4nb8JCScVrM$m^&1HYknOE$j5pQcAyPYOMlb@ z;S3m+0meXJ5KQY0C9u%}V1!gWZYLKAKfKY1S^fJ-vLsD{AGe|+qsczBY;{ER=b3S- z)i$hVRWD`8C+k1FvNrm+FK>a(HZ%swb>Aga>G0;I8N-k=M^eN%I8k4RA$eUn3CD{8 z{k{Bd`U6mQK16leWBfn?{wTnx-PuQ8lygzDlA@ zOZ>L$<+N)@^%#;Pfpg|XubY6f#{Ly$?@nKxF;CVHEM5u<&KOWPCJ{}e>PWqYA*&#vcpCt?+==ukju=B3Aaf-h2f+n&(;e1;YJ;hb>vm6eQCt3X|Kbs zL=)>Lp+*R#W))A)@&avKv`toRT6>`C3s^F{NJ1wc8u`hP^8)G}ol<=n&unUw%9%e) zen8R>Tg8UIE3nz8M$%)?>3>&=xvE$~i;oJ8K2u=ge8pZ}8rkYtMGSvKJRtWZCo6B= ztJdvv=nI`mx+%k!{-z@ZiplRw^7#~MZNn{l3BrxO38Bm3#tBvj4ed3w6!k0BFQ4upf$=w~ZK1m{B`7!g?yGwm(LM zOAuVwxc#)Yq@QN4vQ?t=jlR5BPS~r@S>ZW4e(^FZf?i991a74hqd1u46T17J;quDp zh0f5wfL(%>6Y`TzDxdVj_||!2_GazeUuKnB)hw+Tz1mW95$^Qu!uB$WNEKAdP5Rv& z_SFX_XAEZH0~+%laY!EtYNLdvU~ZOBC#(dVuZy+?%fyx^RlcS-k^`V(*%r5Vz0SMQ z`s+K<7-|=WCC4a2ViqLftJF+g5r9-^%a*s?IXn@t*P>Gg@s|gPM3LAG-!m z8;mN6+Nv!!wr5y1!F6sL_b=8tVTe55?aZ?sHJmPy9IsTzhN9(x4;#x@8xp}`=y@FY z;SorCyFX#SHtW#o)+Vu}F7ChEDaC?Y0&fxZI!tds#GitG_kze&OA`N2vXkfzt2 z{hdig^>jWMIYdiu@a=FZeYPdn<*Cj4QeD8mt{YwwmOxtc^xLZ@im*vxhCBPI?~sI+ zSJV5)vffD_krEf``$KjxRs98hKg*Wgm4ClUA6;=^2jkzK;%BN+1yZHiI&YQ1n-W)8A&HyJNxP6(WF* zP|ExrIh?L~o5C0>wVsICHPN%}^13_UweD^5K4%Q{MU) zF7zGZtr#=6^P$>`IDF-s?c4;LfD!eX9LsB#xQE}5SE5ns)=Y;q4R+0uGf{d^N^B|C ztH=i!B0{HGA^oy?(=82yPeb$q_w8&%N48{fDM z6lOG&H%+hrZ~%z8DIFax(XC&~3l8fW^T@#vWvw{P8RSI3cv4&dTM3-qV8b+8Uyc`L zeXJYxivHNlxRz;6{`1%@W>f7V1=6t32KeySJj{D9O9OW2A~u)Flu+(|RXB1Efjc1E=fUZX*$?E8`< zVv3f5z{303?T_F)gzDzX6t8r&9Y(Y>^3DF_4oj8TX&2U^lZr%;+o&^d|6J9F4{@Is zucW^2T^ZHmd2FWP4fU&19SK+o&Y1>KW()VgQ9p~(PaB@TDSyw2i9tdeyoMmfpZe>d zdE;7-xA5*K>lxyS^zeKCG>SdPqD9}d!6F)E?wo5M{H6P6V%`?z zvIiYCJ`}UjW!+$C)i#9=y0!j3DStaC@gnPVG(6%20p)Q+ zw3q1_g0R$)QB-g*Z_ASx?75_E9Ax-y@+XEXiQZ)AiOxq(_qA`329Zen;nb_x9m?%3zh+aX!BXt1T&u;O?_o&=zSbwxsm)ubwC30aN;Amh}uzdqIF*1 zRf`%88bSmMkn(e`L7vp|H>i*fVIM6xXHbv(e~u!5?klPGK*WG<9skQqB+y&EZ-3~i zgP6dNA-uOd{4_2CAkWSO<{}|a@;)^j9#3!5^i^)K!vwOVwjGpkM-xo=c`nh0`N*Ne z#~!hPC)PWb!Cqx;+SL4PF67=GpN()^wPpw~6n2SBAVjpL?bblbIg^1;_iF<_|gojGn5MB62)cOwN;Ykdv3 zbrPpZS5i=@0|n zdW&D_$orJYmYoM0I}js^3A~kdkF#3f=8EHAwuBJ1f)jFh1z%HeLHb21o^rLfAw+@V1AT;p~5}nduz0u zbZ7bNWLeN%*zh(6RXT0 z`6+JBD5%UVKhSFT^-NH3qe9i`^&67q_UxU-!UrID0BMr;I>KtqNlyDx2~yusKIwvL zGZ7_e{LsE^R*jAGPPDK{K1z;I@+Uot*P!bzacZyj0WQDHvPj_s(vQGlSp2uqX+LIu zG9suz9kuzy_t`dKzfB+5V~qMp-2$x?t?jT;{{R;vVN8b-5vf(&)Q-LNVLuvk4S%aS z$C)^f-|9@nk^^;T37HzG*&aBCXQkI_qCv33;zPI`P zob8t|P^=c^dVXC*B7!^6bF{aFpVMYLEV`30_4Q~lXVx>B_bn4hwiU}8QSzrHk*2-z z_o6A8&K|b_Evobpw?BooFc1|@bE(Weab(1$M+nH zM)dqhGgTN%*pl4*5IIB29kI1#)cy?{z@9S^M9(7VGg8#&d4KqjDTI+g#cjXZgabeE zAltRWWBCuvH@L`y&0{^~yHDUi%mZ9qi!=0(6iS~@8k6C^Pq9k(y+IDUVv>G2-W}hN zt)dm09|K;#^d}Y~{DLlBCiB5EX)U+ID+`!WvdUB)eqTH4Du`yJEklLpVp{AXk+~C} z{p`Ab_kIO;w7iz%>-N<6gPRbPtu$a#T@FS-ql$B4CRmOwwDeqM%s9UEQaj{v{5?oy z!*lUY-)}PZt*w5KCnd>MmPVI6+|_lHk48I~d~W86tdX%?gnrS%k)H?S)JS1)eZhk_ zTzA>5a`&20En91}YbK=S4;aUc(Jyp7iGR#o24wyci0I+leYMiEZ{`V}i0p`kkpkPR zH;pvJu4VI%}tO2Db78@>YiESw_50+%=iSN(7fi9_MIy?{1hs!#_dOB^?I}T z+W$~`eDYgq4zD32zrQPYbN5&t<~5=Dd9ifXU^ob8r{~PFT{p7x0IMAi{zD!Bg1@|c z0j2mkvXJ{VrVo}-O^@6XD@cHhC*}03HgD0q7xygt*vh!A>i`d!V1Y~9_ZyCJ!(n3W zeS)<()#jLV+|>bHAU`g^R-m%;hm_86IUHO`yx{sxGNaVt42FNwYQ>#d*UR&1^}EEE z^u5+wOaSgSva|jS&U!OHKkkEKt+b|jbhP_|p{?`+UsiZRivdI#Wi`2h;VaZ#M~3DU zj@QGdeSPC8B0L$Wv2uURT*5997TK1oQ{f$P6E3Yei2dP&zGP8wpI33MxG66@)l}c~ zjDBOOL6~z=dLq=gi9uZtM%fv2Q#?&OT_?Z{WIiQN;8_!G+_4ARcXwy?I6`wuyb@|p z_47d6t5x#9nLma%eJlWV4LHX-AFR9u#1TOL6aaNH0}8Id+5~_-6Ynlqq(MBxBSQrq zXbNWAx2XZT_A?$JoIB-r*9EVIB?dq)0??boWr~}g`mVmGJmTQ5CWt1Cz4JpB4Hmy} z&&k5sRC=y6g09c<_^zNcetB2*&HK;2TO1prJb$3MJsvZ=Z_G{)F58Air6Qj{tpXZ5 z_s}}job;HA0d3~!T4IU(nraPJX|wbbOUEVGUq}w{HVlT1HuQtbV92cL{i+3aO9pH6 zQ5f(cd8~`e*Yrfc^6%cY(*|PTxJXIJcD?bwz*Ky;SU%jD=h7G*8 zm*v%#zw5w0HxGFZsv?IePCEl>q@VGt>#Q1Lf+Qv z70GT#^z>dgG{#uf=7WMWQP&CajKtTQl075dwpgs>_)!-9zLd%DN~_r>YKm}4*y&Q> zowJYf)-r#ezQg&x8#vLAHk6p-cilNQJUcjfPHRfzk38Lr84tbO`n~7QZA+#f#=bY2 z)$NfFiRNuHA_KR(2a`pfZ5%x1?q5xcZY8&L(lV}(_++mOMp7`c*q*YUK0DHh_z?~z zi&MFsE((sQ>pr+0@vVG4H_zhxF{baSO;EdSxF)|gLPh^{(f7^c+387uE3P(a>$&#t zA~Uqbww{dr-5u}rF8a&L$#}~`PsfVpz{ATI_Wr2($8*qAiEQJ6qxMhx(d#tf^{9zbh z`i&zrV5BZs>qt6#oc9CCd>7-J*Kc*k>wP%**F0S7H<|6;La9T6 zKvc&1oG-X(ypdYJ;@E3;#xq~Q+*Z5d@0}~t5pcx2Y1^ImegM6%!*T(a%(on&tb6o5 zb@t#fuAKUedS7&%tgUa-*7v&~$v~_{Vb30UFkJIL3LyG(`L%U7zw}1dt2ew16=|GV z&)S1;@y`&I>=h1=`CjlCV`w0Y-gAELKFsLWObKmCG>>i%WquE!pSXLdP1iz?S;N}`Y(#e3 z^tk%|zFdi2Xv}rm@96^gJ8q~-&HuDR0jgba1ECb!LQ_T4lHSaQPp;aRyXZE$kPyx? zbld8A#H(zx;2XTH)60qywQ1jauqHdM$riWehgL)6)2_soKgH0E9Q>UkRK5m^n{NZR zqP!3AqwK)%Ngjx8s@aUzk3XNGE2EAjR^`>O9&`ju(2H}JX_#$Qt;DbJ^OVvs58JWl9ZYc z@0ms94J#x@gnvD0htCtEt^y!lq1Pf$#D-eV?qB$cJB7ga%g@B9C+{NYe`5v#K@*eU z`-(RDyOEcS$>}=m(&>0719CZTojpEru@w@IrYSBGnje|2aVqed!K`P}k5dH4e2uZ7 z$MwM!7yYR|yc6p8#cYSAk z*9{C>f4*e;?n-nQ>lunx(r&1*i`=xz`u*JJsM3S9-3Js1$S~~~DT|L3Bj^a=;6ja% zI3a0wbyKoK{u>T;jNY-+-%d2}09GyN~wnBB&Y{TW6bog^RY+XIUBMOatc#`r`I73{;Oio(8DA(>*XFP5C2?#J}N3=cat!@rG+sysm0TbZ(Gug?dCnjfS9< zzt5n`M%3jDz31pK?ew^8q^WcIn)ST8_@v+Aianl}VfQZj>hD#LDPsJh3t@w!MZ*=B zJX=K9#C__0QvTlR6o%2G? zW7oM7RjTs0RqCCDJrJ)qKQ@4NRCPo$sQe^iVa0FJ(?UhmMdwY&E30+iW%Y10x}5rH z%$$(2JHe69Q&k@C@_ZV4K_OS`zMQl-X3_GnSW>ac`W%-ziE(va^XWbQdSchcXHW0S zh!E^-($0o&c4rlv&0%^S1V!e@JbvcOVAusb_NeCd7h11Aw_Xw~`T9HcYKN)#*_d^K zpF9*oWN8;AsdpRMm52lcPq*s}z7VL}V2tf7OU5d5Z#?Nhk%Na$EnIoeK*|DoovThl z-;`^KV1X)gBUk zf`MvczaSUo^OO(N$}h7ar*4q3n5V~Tj%TyJwkM9;=p$h%N&I>5%A{Tl*$WcCwH^W0 z$E$>B{p$|n9s>5MtL_`$?Rtg%v%9BH%!pIQefH0+2U1wk=?%N!S#QWhBqxju@Ym1Q zvLRWPex`Rm^9C5mg->238s`%#9|>pH_uMer!>3ElkC*+7mtkRLVj)WWFAyN;laKL5;|ctQ-|q|llw zo)FZ-P~SO;Tra~cu6j}H+OEenY*7oK`n=7Y>|ND|d~2=O8=i~vY6PdNb2&M&s;N-h z?jCRMd1d7i3rvDrXT3R1xg!QgMeOAbw?#NADQtLS+b%vgle8pZIZ?N+>5gH<@=5&o zlXZGlwv{fS-(~CQG_LJuekCHpiRnnb{c`uAndC5po-{aX+Rt=$+izoUO~HcE&727_ z9hqyKOZ}fJG+`yM#f9UTW82YLbNIqe>ElBaNum-|BKJtibYI&E2bYXz$VkI2(kVnG zAg63TVmm{hrEMO4aT~c*@drp-|E(b&?|lrFm(ia<{&^bXZTMb=D&}gVNDG#!Br@C zV2HO<<>>Pn34Jm^*3^xerY@&VkngIsTYXcRnz9PAN-F-(RZ zS>09b!P6Obo#ZS0K(MSjh-(igUaH}+?T}JE2X~!Ht4yu!;S}-DA15dzR8*7W%shbI zIDw9HBl_Qs$gg0AOde(cYu*0xS||okwP&tk#;;}UheT^(u>;7N&PM|_SLFM0Fu*zJd8L0BR-?32oXTBclTHCO#n(;AXCkv8KB$_0Win^ z1Ze!9#AjWSsJAZjjT!ollr#FKo$~?Ur`MeG6d;qMx3?FtS*-Jz0a*V32xTr_k?R1r z`!!{}DnYf~_*>JcRqY>ph|p5~4aS?SVK(6z5};jDxH5L>|9h>7{PX9bX7E8_wh=2r z-rXmmjUQNW|J<}%t@2)BRTH??=3)glXjc2VgxoDG`OLv{BHF_?K=$y;Id?wym#$6) z$}pDGzu{k_Pk>9G0|4C7K~i(*I4+7Vd+O!LH`)N(6o?pKOdqvkPY2kn#~cMYIdP}+ z&KG99{k?sdK(fMR)wb`p=<4lqs2q?215|gwOC`*m9bL=Q&a6q>jm4LP5NxUR#(t`8@ z??{E*jL*MWyFD}@iqolLauFJ&F>&#s?Y$eJf3Bf=GH}I#_VmWhNUdpkaM zvI9%lpBF`3!I0T3Y(N?uBBVu=C4#8L?W)#p-i+>kdm1`jI0KFnPY4j10gJC9Z*F3i zzc5`Nub|uE4ed88*%2f6_PXC5m%ezxh>9lMMH9zP-5jUCx+c}b%9dR*-;w~~An*xo zO5?8uhw6eQ1|^J);eXzq)c`{KHKewpEwg`tN8-!F&ymh<2_U(+xYFcXQF@)-vQTw_ z)ZVq~{& za9#0^bFrkdq-Hek@!zS$owg8zKp28U-&=V(YdB!riTnHsL^8p`s?wU8235Jm>iUhV zMO9ceBSWgObPO!8fk8jDT+VOARO(6o?!o31uS3a!b4y{G(ViCJiZrodN(|alVA}X6 ze%DTc*5IP}N=@5_4?IwBj;#cYV~Hi;Ig} z?zbYn?>ED)4`(9*K0Wuk6A;@uILz2CtChwq6%0u8wtBwVthZ&>)UdvSNY*@$=v|K& zIL#KxGaxdg4cJ- ztBXlhH9-RU_Ws^oJPKdvY^~MfVJBHiULJvziwg)5?Q=L?;etXZ=I%MFXqPuLqhw-Y z5)~6;0=P3~qiJHmlO&gj`P=gidkhTZ%QS-#F}{Afar~yqv^A!psMrD0uAg2xaa7&p z8*=jA;p^nNIamz3FxKDvW*C&(?(m6-y_r(SagSQt(LQ;=5&lTDw;r-V2B4_-=klwY zN>VOcB*K@0@~@)?jG`f`d^5E=^dQiL-nMpz9)=i8+;mOj zAE=L1B6=ig++qaVt~?Ep2aPi6bhQ#VndS8p)U+k+aiK&~q_WMr0HUDDTHpL9I7yjq zey>pF2!mCt0VNn9W>Q`r9TpMYWV%4E<)V-1`Metg@QxF*S&0HrC4ia^Pf9@%0w4^v z>h0uet!7jBeYmszUbu9dT}@w}AL#VkGRQw74i`;v8b zt}Ijh`1R|rQG98pJm+vmHm(D3KWWVdB8KvEa{K`>@FWmV;qBuyyGk;2MM7rRZXG2b z9x;{&7Z7WX9O!Yi3vGak`d3+;`x_6I_Ngc}Pk92L>cr3DMZB=RSV^90!0W-@JJP zq>PRAPyaEM(ev2B*^`nL6oisXXBW$x#3Uh+2ku~%Mgn80vWf};s|DuUw{Jy7ML(O3 zq&UtNv^pPPK|@0ahtb|NF4bB~0Q4bXNI+emuBLZ&3G+jqoet+K6!ykU{qpF8F;CfT zVs;lU;;}5=DqeSby>#URTLDpicdwuS+>eIp*IQsuI}czN@1)OXBQq2lCB@BfiP%3; zP*cjK4u#51X7D8sBot3`W#;rayxtEQ1mq6SPbGti7r$q%br-z_fxK5JB)f%_{YGsK zo>O(T_LhX>9!H(u^gU(HT(h+a>WMTKp}eOwC=NKRcQ>NbkncZO%$@#_?F1?d3#!bl zS26{2^!hpo+-C1~986E-C{-y#-?9`6Rj53wvVswE4sQILG?k`&novq%YP)lyqDGfT z&8iJ)-&ob#EdBsy!yi9>04UL)K*q}9CWRv$<8b%-dbWZ9I03k4#mxZetj2mC9Rvco z;6Z8C8-oZ5ft=pq@aSj?cJ?G-{$w?aZ_4j^ zs_8!o?GED&HEi!&zrvlgsvYYv1Ewd7X`s%H$Jl4Xp8RzA*wG>AGR;<{LHC~1!5!&NVFV%{Gh7!Xk;vguw87z8q#m#sBQz=(NYpVi$s$OBX{>slr&u zlZs3kGBpiH|6iW?0@~eMN^RssngZ}2%XY=uN{ZiS0bAHoSiSP(9 z>^0^gYZU*}$kY`lq`SMjdFQFub=Px`_kG4Uz8?$&*yrARtu^a7<~;IW zy;e2R>#I7Woj>EitP#=@zCJ{KnZMc;0p|*bnAp4Yw*R;Y1I6l>K#hZew*m5IZkUB^ z^%inNdDS5#yB@0!+z#cV;fuz3%5ZMKv>DRjV;JUx8t+WS{qm6!X04WV)W{%T*E+7t z8Q#gZ1RwZ6opzuampM3Ie8k+HXElaUC5-_3#p@eA`RgCwc`_TXtzSGtX};y|&cf@= z>1cKCN`K3j%tD%QaUhXB(WH;02W7zX*TDXdgvIVQn0Qoq#pYYr&KkzMiv#Rdo=2FS zc0MnBsVDhX^U7~?5=vQ7nDtdT-ShN}TXfj2-sG`gk`Hfu#L5~14e8OF(~Y=^XWQl{+ZJG7~0U?h@hal@$)ms7`G#nyZ7#K+AZK{ zx$fRPKkRS^O+~Wd+1c4m_^pU@sZof%U7>nePK4j0u?o$Se?aITJwASr#rZCTzU3KY}<>5 zlXx0Wr1c9mj|oCSJ021cQ0n=|2>G0T?&{1=W0v+-V_%WycgEH$rPS3)+9GL{&_a?E zVyXTJ-4W+xZa;B%=bwknur+L;auaGANLfPU{_TQgwZ!tndjYz zvv4nN{4z`rP6OTvXk;@~9ZX@wxSu&0ZG!<3yp;NMmeze@tXrVJ6qB;Vweq^sU9`DZ8qoJd3f#&=6pi!*#xaIhvJ7z5CPSeWRJ50Z|;*-&Zw%&u)OKhddk9Sl%!b zHwSG#8~SpygJDWg{RRTn=o&?dJ)){gLMkqZN~Zg(IPTL!&P*2rR)Rem@{>zv96m#i zF6bYh)GHv*B#k_JX31i65N3EpMDy`_bqAD0@M<~C!3uMPJ$&FlsnKvtQInn@q4~Jz@^;4@~;f;rT+qBym$;{$(i&o<6RVl@pp3Z3fT7a3C}KL zVj2t;m?Rlio3ap%In9&$N2L6t=E5F?$PD`yu92Uxe2t6663)0M8e(Ojv+SbwXujd! zZwOh_>6byqvAn2?W}WfzU&TuVj9yftWm-N~LxQ${6@7l}A}?ptVc1JpKt`DBr&8ZF z@R$7jpC9=UDTcgCWlTojg38-Cox1mkfjrt>9q$sOw%_~yuM1_zA|J{KJ<|xW2iw6u zTM{Wo_}4c*aoAxZa%Hd!uTmBbA`;eDy}$Io+%M7qXIt*zAM4JgNAi$xagKYjRc8+`D|2@95+%NJDUX~e>bq2mzm zbL&{_$+QAJciO?#=3%c<%SOv$J(wXnXs%3$4?!2$Uhn$1gXRtTOxIO;pHzbUajM;g z3m!dEv!S0q!#M9BFtVwC8ANHJu^mKdRkQH36um_fA1BJ4S(&j!T-|hAx0srmG9IgV zS2idl%>S?3;_S?=eHTWHyX=JMkCa%0)cIJ;^;Z{p-sV6B>c~v)(%&lGXmHsKg?8WH zE3$^Cm~k^}Z_ZP~s$H|*!hqh+Rk`zc0j1;qJeU0vVq#uR%Jh$@Wbo|=4?chyw9o(j zjyIl}n3$kl)Hv?7RJx0z%%GoNav@FC18XIQFc%Jxel{Hl$QErHqN?|1;UH-~_7|gmo-w_lN z@~&-;X8r!}=dAEj%%Fe=$jyCpa(dd--Y$o2UDk)J-R^!zn6K57xUC(dB2Wr15LRuO z6j!OtSr94)Rlx5zHCbC^?F-kgH;SPhh%;7kReY13A3{iTxZDO(Pl zP!J&%L}5jB(itb_soqq0?@Ro>9`_Q@ky4gwQl>G>!nN$M*S&P%r2pKdf+J#Z4~D{} zZA4i{m;({4W;Cm>`?An8|9+zm~A`6inda-y(>t8C; ze|G+q2&mK$l4oN*cQkNu^_r&#xzGh-n{po6H!cWB|GEue-MPy^mOa zH-efb;oJXgP9^A?vfM|{-t2l@A@bR-GuO@CO*zMLo13ef>I{*+g5PUbd#nbpRMRl2 zMEv6liDHO843$ySQd3{9`KP;BptFoB;DMesd|#an=Hkig9v)uIR4)$*w?x8=aa%S_mCESa5SdRW&#krTgagZ)sAU$w$iSTunVipmC$QRv1PEz@pt7i>IL<~1c z<3sbpO6h$&E_xZ?Tg}Y(yySAEZ&7vJpU`7riqj$|TOALZh@;^ds%DVEB!3zkGa5bI zmwvPP9<)$IhPBjq9Zf9j-Df6DpFP>bScSy?ng28z_FKxrjkY_)6K2SVK(E9Fh zN#`FMy97OABUCO)Jy!E@^}hwTnmLwxvfW=i+ftOz=d2JcE4`j%=l!VUvA2!fp+YlQ zVto%^l20YiC^hgoaUH5nleNL~?!4+=lEGewa?lE=8sWYx^gyBeqifVD z^Lsy6QQe)*>5aW{Xi+X$WEA6-zl?&0R(6)6nFap!&mcdTR#K}i0D40G%aHCE7pg)& z4twZR63Cy9Oh`0fnUr1`IFHK2+w^Bkz)Tc7c}jAcke>7 zS#Qc3aNQ*;uC?se5qkORV|jF>0nEC4QJY?o7?ccWW){(|Vn`j_^4+PbCT2C0uby&X zr$s~t1k^XstBH5C+Q8^QRkp-o-s^}TKh~gB^7&|Y3CDfvKK5Q2cC>V3>DTR(szvNf zMJ4L%T3T9UP6sII=r^%z%4q3W@@O3x#GB@+S;Es)SwCEIaiDtiNPxGeu4;V#d8H-p zQt>CW+q|=MjgC3lljD}CSwZO7ks*`AAjb91x6eDDT8fB(m#-^OQy4+3)% zUguyaIBL_=Y%2wtfe7-I3k)-S;h}xX6=-ifL=x-q%LnJ)O_iT#=yh-Hsw8b zm99gUhrfRS+31?ikKD&R%r>`bA1iP;pL}&zW;C>oFKcSjE#&lnH=WXC9{Ln!GW2u1 zpH=*pUb=jzqBZu4Q^nybVOgd9hc~#>;nfs35&Zo8szv5ahui&1ez`2*Yy{cBZ3GkL z#;{qATz)N*c<)gz<1)W;$56k!!SuLdhSybVNm736h$PxR8*maWN}1E@-A;X$A1K+{ zS#?4J@sevTw@-9+&TH#HC@-m}M%!dqBd2@M%=-B54|BGl;HarCtxDC)Z_duXCK){| zwzM#E>7`M~Rkir4{_N~Z6qwv0OtnWhd%Y_I+teI$&GyOc^MjyJeA>XToNITnwA876 zud_@_jH`;kR)-JtUuULWHnG$FY8zt*N?bEFi^Dx_Khhmr&=W9dfLz}$3BUD_-nii9 zNsDjazTH2dL3*l5;TsR}w|3^KRI1s7eAH=E81Oc_I>cWcMh1Rfn4cF_R19^_Clq;R2t=$eR! z?Niwhnw~2eZQnx7!oh7RW)BL)H$B--p8P_uXn0=mL4}|G?%4tLP@##!k^?cDmHnOu z_O2VLKh@Nl!`H9s`g0=}=`^c|JmL>#>*9NbQm?CmnaN&JhdMKsnBWtwG4*mAo!IfTPVFw4S=rv0jv0SD!1?8*=w1Z=7sPgP~G7YXud|s9C zZZlLGEz-#^>3z*=oHxwd2x{6lv($Jc@Hk8x)7OPUKb5BXy9%p-4QZUZ+^i^)rOIO4 zr(>^`&3#@uzQip@~nt|HM#Ib)Tr7 zaj|4BCR&!oBRi5{0@9aO!+DBhyf%@w^>vx0Sr-31tkLyV!TAqZv7)3TpCAAw%9?+wIl~sXJQ+>0?!B}xxqFY1haEVUtJ`DP$2qr;eJ3t|ok~Sfvm;FO zNIXFZqo`g&a_!)W<=oQ3esKnU{+ag6D9053BDOSZx4ZUxH~x_7DSJacUeWiZJQ59o zLlS3X7c0x>bks~CXoYsAX6(r#At7=Dx|BcRlDvVKnVZGM!Kr})=0>w*2THE)E>|&q zj!fP)Wq(cZk&dIe!(B@n45*PwHkvdKk)L2qo1`fX%`9E1I=n>e=`t^;(P2ZyHVE_Q z*pk0z-BYB>@c)6 zM+(Pw+(cp|xXAqYI-Z{> z(wSm}{eZ@#6Cj3%Nt%?r_v-XU3LzzbET5(6H8z3&;^JE6zGkr+QPbPsIaY}T^cJHj zVSZTrf&TZ^?yB1Z7auw1c?4_Md5(i*)z~Oso*c zjaqso#LiY`Jq@H$B11;cQ*)QC_o`(AK41^7;F{ z)8p%do0g$D%5CAE`Aa&QfSxH$EH6s}YBS(I@i|{)xhR?uQDEGM2e=G2qrsA$G(n}Y zG*y&ATNx$~MAmSp;?wXD@Y6UT^+C<}$c28yBb7^t&}+a_beXMz*9AHC9!D@((EgKs zl289Lt1#<`P|(Z#!l?J*!-oe&tuef6ycJ)1dgReDiGINU!uz0gI97ckDoWr&x3-P( zlDl!;J*(PQyhH2B6EqKW%)3z71R$}q+|dfkh^b>c5pa$2o<^5@n%Kob)1}Y&!)6bM zU+^#}<@-XtNu*aW{TcmN8j(b~(RW?Oc6;OIYG(rtRj6LZ>R4>HF_VzTOlF_Uhq^_>cyK%0=Y;>wev_>5i z>3fLy-i*zI&yS%*_6_h!HglV3G}crt|9B7b-CNk&dCHy3IqDbUzcNNrWzPO+PDJdS z^b4`Lr806w`{$4AGjX0DWa8sukcUgQJ5jWqeC+MGeoL{QQ&C}kD&eK%MxdTgc~4~6HHRa?L)F1sFKz1RwyPYctIrbIw+m}W198yE(E~^s`L;9 zOLd_|5cwtxR*btuf`mf^yHz}-6n#!pUDGMX_+zr+D?OIg$+3+xG4TfZ-RBK}xV~J@ zD&}xM;~959=Z%k#U%I0d423uhh6(||J&_0}=pU06^Uhs-;5C?U#B9)v*WgFmve1EFGtx(OEyc;vQ1>qCBgRZsI(pPnGSXzaaV`-KV7XuZ*}u6waB3`>fvh8q%d zzD_LfZnVDc<@C_UA{&1-u(~iJfm0I3@W;cvP0$_%jJoge2N1T%We0O1tt4f&z3k_ z!!8cHwUNP6%SOLGiyIFkpMIzK?GPR=MPw_LtLFqhREp)WPT02DqiyQwFlzn6Sl=KW z#St)8>0&$c^Sb}07dANn`T-!`qsjOvwW?tFwA)d19F*mw!~Dv}tEcy!=A#{XBp6E8 z>#PSW5@lQuD~`U|iJk-APss1aqoSe$HRALE+qqjgXQQM0{cwD-NS|eYDp%=gEXbU{ z);sNA(%!Pz+YvimB|)P!@gN|cEYWSvn%go_#8(&6#rAoXO5fw{^#}d3s+{kcjoMQq zvs(_g=V4~F=f>4S)A!w9t?M#q=w<1@E-6?&4_)Qf<8a%;L%xrtUO|NMSmqVbAb+!b zxrz)>27s!*xt%(L{x1R~&gqnMUmLW9284#93qT_%tzzaxO03&dPuj57oB$M2#iCI% z_*&T_U80r#Xf}AJ{=9*UyV#$j_3=%X+3fs$BM>RXo<5z~n5>hgVfg05xqNa{emi(=Lxo*xvxu2OM5VQ4t^Dk`PgM044Mq{V2G&A^HT6fwan~^DqwUVs5gKy`N?>26%Ji$V6!?@pDG7<@n_BDqX9s$v z=WI|uyn=zD2j&0{zy(0zTGmF(dxgNOse1*!&8e|Bsv0t90~K6h$j4J;V&`@ie<4;z z%fmo$9x5seCt5(=k+HF10tg9!Tgk!{HDuBJC+~-J)(3?%5_$~Hf%Y%3$NnHZ-D%9K zIS$l0KzZRrm1p1D+JW)?&vP_+k-3|tN_M5oz9Kd~^Y9~hVjxk-!gqkQfl0A4uJhx= z^@HQ%=ZPYia5_I-93Vl_c6WC-?M)Yk)eC#5K@<9pn21Qi3yU=BaV=mjP%xc+b&{o8 zs0*ZuBtu!Q3+?o(1#vn;Nn;vNkpX6zE!`CzI^xun!h+N?C2=R*z z2naABBfhjhUv?kNsa;i7%X7awa9Tl^krVZ-+;QCj-L*Ho7S>U8(!2x0p9>LgZANCVG3M$?kEd|3>uXRio#K8WahhO z@K>y>3$`N%YJrlA(CZJr=11oHdk^^{Qhe zeF}>2DKHff!D2NQ1-MdozLCt)-fG5BOBNMaRrw0^l@P4_zqq0$_)9*085%Ti!CD4E zHQwBuj@w}+4u0M9{rhw;&U}?#+?z?s$siR!-x^9M4x7VLOuOYenxc_E7n4|BU0rXa zdceI%7AB@ZIJf?%8q_X@EPg(1%Y%)*N9*qA$g{!Q%%`3KfzKoBsVDQ>lMhfPCm=uR z&(`?p?@t9V3cPDhk5B{B$Zq)1Z-CCjvx7HH26{@p1dckrXs09C2?IVqh_%4NJOMaw zVZ6G!OzrX=nk0UIGNaSvtoxK&vW_7%jECP9tP&k)0rRM;sw%J?jezaWIShLONGeIN zS;+L3d(wn}`qQ<%OSW2E7joA&A@ExSUAz7MQ8rdg&(%E~%o_uRW=j4M-;`)fUeHTiz0eWEJ50FX z;{z?w4}Pj_3Gl{;sEUU*k#pL3$|Z`x)>^8lrjQK%tje+&QWhmBjDv^w#FvP5f=CiO zmGiS^{miK9CTb#->^`1unNe7>J`lq(N{&e~=DP=mPk2ih#tm+t1gI~mV za8f77Mnfq)EO-Z7+^anH2KIxIXycx3F9o%cXB9AE^DL3zSlgRWA8 z8dbpE4 zGvP)tI`yA!0#g781q{ebf|rgCa^q~q$yCn!o)}}M(wKZ7UQvO?aGz5J(zrqdaR`o$ z_ia2yNG>EFdIuYmWYx!DdrZZiq*eX|RxpIB50z>lt9i2cPz&Z57UF;w z$s5WFgPz^kQw7=oNEwj$CpEKFSr6ko2f9cm@r-DJHmp_NG9c2eWTKie&F8LqWJRfy zVhiNB?Y};I2|cFjnACQ;%V(QC4r^k9U#DI3m(!Db-ch%84fx)VJ_;i9(0r27 zo4;C2ekE1aSGv+mJ&%F=^v1ATq zIUf<8?u!=Za{55#p;fc6OhajzuR2g=gI`Eo^ws3plI*xjLmK!ty^R~Dq;N8kd_jNr zlj-;B+0GP@2_RXmNUlczPAx60?f$rXtUuq;!h6Fuj<-2}5e~(_vN+~K0(zZSwG=7u z--n6kNaS|c8F(CA(O6KrSIiL3P@wFvdm5M4WlZzqxU%@8w|An`Hf`*Buzk$Sodaqj z*3=9V+Hz7Bq7{c@g;Bof&tffx#phOZTwG-%wX}B(J7U^`sh{8Cb})Z;paqfd_ve3Q z4L4RIUees>_(18)OZAw?evZy$F}!_vkucnoJzdLO-3e;#cX;o49Ir1ol>WFsg2W9j3)rqJd->wjRY{`b7|1noY;&ztEXagFW!aeqk zl@+-}fBvkyN7|2$5`HbG;kpqFE)uk?l;0SCO;b>nf?b88fDq$#M}Pj9tdN+-KgU@a zmF(pdCX^WEEYifdoGy2E!qW6;+*CC+r2$UJU2?aBJ7CRqA;0+1NUOC0(#e5gT5jQh zGC#Pgiwp*V_Zbu#+5igSKh&%$>-|W%Urv3b`ta$K?xrq<-hgFszH!&xqupgu2vEXV zOahsS8v>e+dX=;owQCS$FDFpINY9+Sy$!+MWS!S9prQZ+xHg8@CCZ;b`+vzCZsZ^T z|AjeBgO#O-f%LZ(S-pyx&o_w`XarvJi>|7JOTXzK8_R%qQJz%talAOM&&gXse@47s zrAquThLnJypq5rn{(Pf#w{$dg#b(HlIPR1Ym}90cn!YyE5W?^Fz33)0 zVvm=d86UOs!p~f$J~xB*er&AcwWIpa+AZuo3Duc7Z$PHiR-WE*jVz|S6f$*wjCmk z)+o-P*_JTw>cQ5grb)1ghTlD}n2(eM!1mbQ?bp&{3Qv8Wf(5CLVS5y(zM&zLUc;wa1NmUDy1`}n6!l{!os3!mj=%#PY5Y~Iu-`XoQiLy@tbsKXOv|q zYlVC!x^~pUPsnF^!@v5F5L3Ppe%0U*P3)U(!4v1Q#sMgQy-{lx zM0gq(raP(@W|wqsD!zE}2$ClBIH-Zl%*_P`1$juPBwCGE!AfFf^fsTJ9z(Eu5xB@p z8hEx-Kdx{&tcYoABd0f8j+W&)u4@{(1A`c{0Am&+)4oh`_#Wt(0V)84jH3NhAhoU7 zYNle45s)~gs8htMOFrd3_l_~V1%hvr0wtR?uAt>gFn%7xCj8HyJA_FWy{ zEYGW~X)j(xR^T-%ht--P#Q~!2bz)-Lq)Si21%-rEip&Y~^75WJ3qbH$0*l2JwjqM~ zh{&6YiqIF`-@5;qbIqLuy-VMT*|$n~v|b{~7&h1mrn?4G-;)BwOFWe}b8fWOC7z2Y zCx$L%GZ}oP%s=-ZH+CPthvwxRieB<8R#2!i*&+`Q|JmmZv>}rASU#WHMz7A?sd28* z`-?Moh{o-)tIiZUjSE>UNyQzUO+Vt=E1>%g)!>Qg0)JeG0zLN5+GFpz$!Eh#zl+LP zcaW7EYWZ))2s^%d)LfCouIVEEnyXg&`Kr09txI#ykWXye%cCmHClI|+-rO5zP5rU1 z+-<)kRX%Nbo@Q1jUzNj_!w&$$xZaR;kD@{OILq`N5?l-v`3yy0IP|$MH+5SLZA%$) zu&-T{XV<{>uKx~s&I7ug)@M}#z`qBAr^^RFKL>QHFnYDQs8m`iDlaX!!*@W3e&o2O zZq$&4lduKx|G^Q_CadP`lv(f}p4<;o5CwC*y)SOKGH@lGP;7P2KGoHc^u%oJ%WW?l zc0sW$=k_EzaB`i*K2iL(Xs;#3Q<^&3zDnd~XlA6{4zuT%(ye3Aat^-ov!m(0;p(gJ z!(&2;eb?Lfz8w{Cp`9oio7XPZ zF@Cwal{;)6#a9tYb0_|mD{7Ar?N_KANXPPF)&$%Wy?pud#^t!Z#a~{)KKKO-rc0Q_ z72VG16j}}z_?uA&nT?GNa5>C}i+s!1tJHWiZvm(d5YF}{67`0h^O@|6)5n&L=DU8a zieAw2AuB68QPBb2AR9UEr`~VhURZJ9mJFqfhlT|tyY=AUAlG_Y#D8_yxf0FbFRtHr zZ{W7HC|^}4AKG<8_kYTBX`kplTV!}{?(L(h69lR|iAcWaC#`R^PYfVfuyO2o95qlK zEez91R%wm8E9zX)(94&WRay;!GeS0uLgIHA-P%IHQL*fZ%qLo^4qy9XR5i$dcCW9B zGi5cg-akFbJ8{96KBP7BTS*qi?m;OY^*G#*a`zUw5q++Zu?{LCh!#H{JemQsse5WNee}4o_n3)gG^Mg`{Ef43#@Kz#J-ISDN0rbH{FdESf6zRbXoB14@~gYr^}7Vbv$^{Q1wmKsLJB zL>2o;|0fP?TFzGokGLJz^kFO|Fo7Y#sP9gagcc=-wPHvV%m=@{16#^$Gb0AI%}IwG zWT66Crlld4dfAdXl_sL8tJSJ*ALOOiIJMZ57F1UH9X5f1W=t}fUh_l!J=bx>1nx}T z^GK~HiS{A4N?uLUVPn4X85mGTT$LL9;-J;gPv#TvGKf3*;ggW-iq$32VABu`uv0+q1)GXsisl+qA%lz;IH!S7TiK}gV*Nw z1V=0vsXw+0zr0jF+78Vpi$jGOe=#QDEsk6VCu(3$sf193@;Z?@8*O?2x!_|>HiSr} z-!JzD9P(nq*FW-`G7tA9`l`yJJo2ZahB5B^=)L-%oLOX@W$icb2DvsBfxtiY>0L=b zgu>pSL7I39b!&%xQ8i-)F@oU66iO$dhKjLbF(Ugl{O^h1LVtj*U8uQSpAcrf~<(@t1^Xk9%z@Lq954u|j_la^rORVjvEov1=$^X2WA8klC zFs=DkAmwV!1>*u${I=NDF3Nvv&K@B_^23^0mo$z4%gzNiW1oL%_lvDsNA6zXETX46 zhtZXCUpQy~VvYP~(V=(wGcz~43+!Fy*$A-JC;Gdp5wiIj7G%-9BZE{l4|nsbNmm?Z zSL=X(TRfaMXg*-h2E7kJbP?r0Q7=Z=+Qg>&YY*p1`*+83lnCIx9Zwpw{(og_ysPuf@ zPM>>KyEueI-JU^VLi0NW8SrBxWGX70OUb&;%pxz@oiIlRXAZ{U3&ROh{vp&fnbrt% zm&8lV9VifLS*oo54q0l+nNpOK|Ja8}TpGFFn*jBoNMLdz%G<6ILBn2jejBUwoqt~x z4&((Dy)#;0_r)a|V`0AtzMWgwCFG*+`|m8%a6`d~qN@=h_*7wTy|4P;Vv+YxvwOK7$FOxj(2i$|_ zLhf3SeQE#a-*EO{M?{?*d9@6+D=1l-8gB)zWS(JYxFus$4)+^^1=e zy_Fc(!)8w^v@eFM3?f0k@Q}7{t^U7RtpVnT)9bIch4kFMj7E?f7SA1r(|=(srza$& z`d!EWV_U65kY*v%(A!oc8bz(p`Bp9K(r1A1jW{oqYc%70xXO^}fJsK)9s34*zoEZB zb**ZaW@x%pGb;jn{;ItguUHNPrFshBnT{ZlV- zQ){zM{gUrV!2~;-#o!erPgA8tpEciIwV|PnTRB(%W{Z;LZT@NjR|2Ja)BHSULw;L# z&GyVvN##mQZ`;^vtD*L_5|8%0D@H*odCtthO@IA$2G32UA`m|EoBp9{cQ;d%UVo;) zVN2gFH_ei_$p4=8zm+NM-Vq)?h`gaeetNyWiX^gq0DL1cp@X_sxOj!+W2{W#du(kM zMLAzQ!CEtw*N+do1X@B)t2gdQ(#G;HSZr{K}018H2 z7me4*9NFu_;VvX!pK9wy!Z!|=H6CbROc8m)c+~W`Cqpd~fWs+0mhSWTW5R5)ZYX&!SlxxTqA_HgU+~)^b8YwC=6diN@{tOPQs+gU0_c9AUAZG+eYj;%WHsKtGMFFE@DmVuISM3g1DT6 zr3gDeW0^)Q$26~ypU_djz5sP8pmafCho3<^;Y3fwmRgsHdNeUV?=78?{RJ1*VhGu8Ze`VLmJbV&8}ns@70? zHK>#mgfVELPe9G}$B!Smwm>4bvFVBY>(q&II*K1UQR61(JnPdUL+ls6OGjy54PVx7 zv+QRZ6S-$09FEs^>$tJom$p*g{N`Utq_O+%ROkLKrR z>69xzd53lP)LDkN0>dwde4fs*VMTbwXK;Om`PQ|bbd~7*`9N{;QXhYR?Alrdlt*fG z>e7-%V+_?ydV6>A8Pp^--B6&3&SL!`VDXPz=A;nDzsfF(B)%C}w9?;`X;H4SLFMFh zT9T45U^1}f(xInUySD$OM&`0yS)!VG8U(N%g3g-d({Pl_JrhV6D z2jzrbTJFfMt=bfey1$%0`f8wWbMk|bs0TwR<@uN&rZaDE#=A#02Z=oF&JQ|#$(n=w z`2~C9zAfh+9o#BeAG;iLi|cil3|4|^h`^M8>@#dy1>G&h);V?4exDbhSjRS>_u_;l zye-Dysb`5tG>_A>(V5AK$4Zq(f84fN>8I>*1@ zlpLKVMVi|Yj})%rOhwU#43Wgyk5r-wzlft;6*I&oz}`+Cd_)V)If>pTc(^&->4j;X#C#QADYb<**IRRs*sQ2GKE}y!!y&oS(-4oJXu?vrdH{J3=>^83&;xm^Hs|sv9e)kc~?L?MisEiJ=waEB; z<^Ic1yb@K#3}4L1;$QHtfd`nfhA$|ut&m=FCJGZT1`~7cPdAia)u8x|1cWr5?=;W2 z{}Tu3P3SWI^lcf^)K43KcX5sEQfP=|n z`)iIkzHHq4@hWMp)PcEAVB5L#l&uw0qt7X!x;obO)RYe+I0}oJhCb2--CUgFa)D}9 zjM3D<~Jaevm?BL@N8>Eztp&qIZRy`Y2k z=Zrpi*Jn?sKU?b&J&|bCQ`v9SK$IJ*u94dvh*-h=91~Dmu@+*-71P@Dj>5-46eZ9pMJlBIGC|VqPB;q=Z<+CHwk3;k8T2Dcx2v* z50NqSyJ=UsQ@1oXetWu`yQftNbnguFkEro~Kbvq7$?FQigA<0%1mmNCYAi1Y;cZqK zRWyZ@klj57A=RR9eDjX8^JN?_x3rp%)PyEib()Xb@D)i7PZdU0ox~A)TtIz=u>|B= zTKxt;fQ17b<1NX{H9#PHKgR2l8VO_?MfUiA(21j#ftVLLNRNR2&wg55mjk6ViCr#C z##_EDmOVHtZ2qnbKH26f8BL$0Q|KMEzR73SpAIN}6;Ao(MCT`N0Gr~$|6mm}-s+Lb z`)xlSd~+f{vGp?=e(tKiENb|=+W*^F8likfLNNYI`%m5Y_6#T0RO-`KA$^#)d9L|U zd=JO7x0<~t7qA>06BBP)Mw5cuZbQAxH0ev6R-XOQ1wGS6Rw0FbVwxCHYq;Mk z)wiGEK_5Qr%DY47C;p zo~NTW%ZEgh2Xwav8X0-d!@pUSiBIld6j{yD+i1nxnm; z^suF+Nvz(S=Kfbh4dCb<*w>mv;n7bqrO`0|#`d8iR@?9;fiTCll(wH(?XI6bi6e_a z9gH-DB<1Lp|0RpQjLks9bdM(Kd5UxsV@2IhMuza-rK1F!t(hh=R|-P* zc2*aGf2VPB07@@`3l#ijktx0eloK3 zy^L07V^_5_47-z1-K$^zi+}x$N>CpoV<2cz|22K=_o30$R`A*SMGPDgReGL&nNihJ zF*pZED(pzbD~NnvCrm7Xr2(ArCn)X z5etS2O2g0@9!B#=!;Wi)V02G7j;SKp%><8&ED%3`wthEfPg> zn6A7ZJ@9U_x&mqa_5GnN)ynz!!eR4vpMGF5M)YhOJ%EbYp4sS9{ZMQ6Q`I|`Dcb~l z90Y$m55eHIVfJ_vQu~bkRK<&sD*4aBTjcwFu~vm>xl5LWxek%N;V2l|%^h{7jGR{g z+96Kk5j5XR?^u*LA6%n9DqB*XR@PshIfiNUyeI1>vbFq!%UpVUoGk;2cblb<8q1Y_ z4ad=>f>})J3&a^JCn$TxTcKYmYhhSa(|yb8eva-{bc``5tWId)V>yc2yL9 zd)lvM-Xj$07}YxV@nZTS`kFUF`QfZNm)^ZoX=n3JdaYolHCY|xf&B5gvlB#VX~+g` zmDvj*$|8IIx3(H%E4!^)~KYOjH$BS6~P+xBNQ<&CFP zY7Y|p<%Ydwcx{EuM>0-GY{;*zC1)CyrPGcDeA|lC`pUE!np%Q<0p{uAB_u0sNg8VM zs1Cq@0V4N`L&xhZ5B-`?&8fMMyzPk_x)#^M4u=ou-d{U<7tI({`sy&%*_d&4$Vc~N zd7kk6$Y~8^Gl+VYMOx-OqE8jg?r`f-T043XT)BefSfo|9L)E2r-c*O})->K=v?;JI zqf)F>AsSZ*7ti886MmAS#8Y(fXGe8wjO%#U>Dd0p<`tWwb-&|4$u#$^23jL0vlU@G zXRWh=y}ZVv-_JPJ9vqF$owItZcgOOR56SGj5knob>m+&p3!@x;+#!M9Y8)e53{xgbf5ovUMVf~3N;2Re!+wpvTXtQ&@D5?PQIC+Mj*(I8{10-q_Bhcw930t}L0tC}rB368 z&OH@>`Wr!xvPlx85v-&LfuIKL>o~GO1qFKTj4u1`UeacCu=FvmztR-)d8LT>7rNG# ziN9BBoPF|_!Evr+Xp6*}u~DsP{PKwgcr654uL7|7Pd)=6!DzQ2J2f5B=(aT@uIYAY z(B0jQ$W_gH`hRC)Ox4QYpQ7bjW#K$j{RKad~fLGpfhpf5*eX zgZUZ`e!v+t0%9w2Y!s3yGBcx(0Mr?nJJ$f5N1f=57pgcrS_a%6_){?U4KLrYO&3lY za-7>=;K@i1^4$Ii$Jl6f*J8eKvs-l0sXH1D8VimZ zOg6!$ypbp~ z=AFj#@@t*n2_Fw+9=t?5jbCEpMH7t2ABp+F#Qk7%X>guIi=1`PC{FliT#fMjEal#klUcY{gfIhP>V;1CW9~j5=?AbHurmQh7g$q-_(+2Jgp09||%a%)u}QP{n_;6Olk>h*2ya-XKR0Kf7cwsHTH(diqCm~PHWD%H=KbY$+E9h#Pw zDagws3mDlu^bGk7hS_CJb$NQ5c1u0tqv|>Up6)m+^_hx~G~Qq3Xjx*EL`ikD5;}6* zy{wvr^<0Nf#e^EotM9SZqYEz%UtQ2naaO*Ec(@{ESf4Ku5IW@8uQTCa7qx$fCV8uZ z&yuZ?V(kAm1YzcySV;dNMz&;qovGu_!gEPU?uPFMb+_1z@ARg#UecZnykobraaZb7 z^NrxJz)fc#H`i+Y$?kYhcIVg9j^`I&+kPA#D49LqJkXQe+|FnpI~~urZb8fJ6ZCm6 z0H6i|IOI9?DK^C{mv@ciYJy@fa3SOn+9`71`q4Swx%le9z#4CVY};=;kCcPOi{HQ1 z=tpiyd!zMc_hWnp@>*2hbALmrOI*WNVURY2I9+@G{JGQl={}MK4~$%T2X1AR}-6bGOD%~mF-7PM<8y205?uI*Y z@BKUH+HF~f z;_#@|y16`1kKDlv=(hprPw=(x7QF7RNPq~yE;Pe${=|`bfTsna|H3O`rs9dMM&ZH@eeYgj9_0@21br+fk@N79p+K$<7d917bJJ)QVEhnux-rg9k zndac@7){90omJ3YRaUc056l}qpSn636Bc2yWedIzV;Gr#ooab<=wDQJ$U1u$N^&$$ zmnsY^BT+Y~&`n7m$1G5I!V0UJcxSMeT>(=jE^mo2j?Jf&mH$$fOE9-_olEc9mSZa{K{VNc-+BD*;(BYbnD)OmlU$XeG zlC2}kE*Gf@^|=9_mXNSQ@&vR`=?7v7*uG)lKFPBCHOY)v;$?mpTc>-5rKJ!w#tY%L z^3+zVaxNH!6ttIOH~&?Arv3#}%TH^sr|YAcO2xKKui=CkP)Rs3x&9J?tTDLy5ySXi zvq9Xz3X8ry#nI$$ma1pZNW7{^>c$TKh)Mxg*ae`R-mkEndl1WgHR9li0d?YNw|>tR zNlgNygv4-3E$(9yK1o1`1iK#&3W(d~u}PLpN3&S2s2-bRD*>wy@g>X2sZSG;X9mE|O%l&dSsdsKe7Id_tEG-g5-4 zDWH+IWY9d6J#hoso~^9^1u_5oyi06h`k(VInE76N7``li@>vOUw6SdlR@8h~S-Ky_ zS#m>7*(BZvTIu3h+hdoP${ToSQf;HW6f}&j(TMx9e)4rKa*r3)GbR9#)L-{Ok?ppLFc`FCcDmg~}l! zUFpsM*@}jqD{P&!MI^bdRMz6b%s@&Vs4w^%O>n&&yIxvK>Kvs*&|!g%%^Q@r&Re>2=q$*Q9XkzcPA zM^+`aBaMH3C`UjBQdS;u?yzb%3){Uw$98YT2Nfcb&jUa3K^`bFNGNsEk1mfl+u>vR zCct-63dxktwnI6Eu~|$<0;|DyAY1Iv(*;AgS&uUqT(<*QVADr76T{Yl^HVa3>*UL* zx;;|&?3M{4Fa!7kiPXBsy%o>vW=s2CstABU zVB|I5+*B@lp&1T+^f6GE#N9>O@ZoU&9WC4vPcaa@-R1p zG{vZwYu`R?T;Q%m+faLQV$5C1sd}9Z9inOr#N}+IJv6xK5CgXdQ*sTzGb8FPv0TO_ z?QT~~1P%GDA2kL_(3V-uZdXszZTYO>g`?&c>(z{m93gNP3EB4k{Y4~$P)ihOT^xuk z8;4MR!)Q$Rv{=Ufys;PnzjJvSCpkN3b$>*}Wgw9Lls)kf&G8vJbHWiQ;M&892s+4pBJSXEWI$3!*~ir*0VHEH!J zja%wj_T&ybUy6Lhn-TnvjjlB79+oo)UK--Oz2ii8hQl$sl9$ZEQ~W3CsV_Af6VTuM zKM8`tBAM4{?~ILK)2UaHGcynCb9>(lLdOfbQU!ef=)#veAiyLM4HS?p;4}jyIYD5w z_k=I!6Rq~Wy!3yUYemccgj4(b`3(YN&=HUN7a;>l3+km4gog$Vh zQ%ro{{eC@CFC$n(jispbIfk-5Oqt`x2TOT}zqy4#n`Y~#*ajUj0$JqL$2*+n_^sj- z`Ohgf&{1CH^<30Q2z};gDzA&yg$om6AiXm=Iac&_TERRFCksbv`{ao7`BhYz=Q%Bh z#e)WiQt^f7sUnO&jcB&?iG7-T{JW##0mp55xNIhuCx$SZHOsok0Ns8Q$-yibaA1I| zVrnxc2YU`bkXfhs=2Sh*q70km!XWjKO(x1!@kP_pi!e{QgGl>rtI9xy&+lg4S8E9y zD|!xLMBs7&9=Jom$V?m2tZhoGY5b%7lK-3vZivIo-7d+_l@t$g>29LFf7iJ@R(MbX zda;1JW23pkX1)GlK<>f&T|(&Hx)fsxhC*$qM4|AiUHZM0LXx#CE~n!j$D z2Iblzn0qX&akGHyLe@AwBCJlZN+pwj{&C( ztXBkeR|>Ee!Cc+ien_BcZy(uiF1Dw&&nSrkbsKc`s<$Tql{ES~QBr;&#imLOkO!BP zK##OQvpyQAGR5~5AIfe1(}fgOjVLIf1wa+g4$86a1$nr^KNQ(GfS8epDPFkt$H|gT zX}$Fg;Q0-hfbV_K)c8>`GRAlV|7rv{8_Q(fJB2cSj}Q*}^Ez(g(`3K&y@VutwXdA$ zs8Qlt9KD0Y;?~b}hF`r^sJPo)Zjrm$zN!{~()3{Tu^i}PrtqLLV9JiqU*c9hyTJ(K zTfq0qdtG1Ef|T;+C@JCxqHY+?)-4~S?w7K;2I64XcdEri@=Xi*Iqvt(ivmMsBEh6q z2V*dX7QOIE;&#$2S7TaT*R6ijs+2SYICUVUSn#VEHlPy&f4F#`=a?~T!%;sspm!<* zhzTH%`d*%N_VrNR*2q(n14qgZx6{bqplq>$P91<72234R(CG|zIZ(ZCFV;Q^#s8qP zqtgSyvADz9{f~Z{cvfvJhqFm-H!A?MRnlP`!*2y<>gr3_pBvStP z>{0g+zEsKB$|Zf{8I{J46B9WZH1h`q*LW3grtg-|v}edQ$MW7O8n`}LlaXZ}L?FJK zR2)xr84~Vj5;j zDbPQBUZ~zOzJ}ZRw>pUe1MTq|04q#2*>T*1f}vCVLk+6fMvQD^EEwfe?otsBXrxHO z*mty3=$?GaO}{R#4xYCB#WDWgOSw#L%U@7*qTx5k=#TP}BgVw$M>6gYL{chtO`{LD0GRsXy}mYImhBFcqt>-*q9J7hiQC_T7p zMpd9mWLjPPzOSm1_cuCF3y$q>+^Yq@27XAIg6Hg^57y`+0&a7-Xo!jvU^nahTJ9$I z0Oz6bkL!!RaR#VagaWDRyb|^MT2`2JE=zd?xr&G?7vI8vxSpdLt7`M6+nT6BEBVU8 z+`_PcW{i3lL8WkxE@NyL06PKN(Kv`%&jKjY5)#~g1SGqE_uMWk6?~8PP9N+KTD=9oZ9`c1i z2l27eRgRcvA9#G8^>{lNcB*Awd;imkowbyxqAstFnK*sJxG3h3U!!EEpSBM2XCs+e zZjuD^eOw4e3xST20>~_uwCWYtL7y7m-F=9_BeA;L5(Y>AA6kq$+Jj1tdH3>3at+TWQuEjFISCDYKa4+zpv}-hSazJ zH@@B+TbvR&ZJOV#qlD6PBE36%ayfX&WPmbPfnjuTQvBIV_8|lS+J9|Ncli4`=Y9F| z7Z%QEnB{Uds^`l)TVe+dB7U3r2g+H(*BeG@5;&=*!nlH1qOi8+Lu}`zaQ>V79;Gj9nNDBqmXElLTI3O zYZoh2&!-X9Mn75(sAVI4qLDY;2;sVY!C7>fKXoD7A;T-EpuhoVd&v{!ki4X?(C8wd-Of{q-4tsQ3C60N~8-kAKaEb*)QdIE=T3om^9@Isf( zy;%)L-MuV7^iv%>*&f);L2FytZNkFpu=9%XMA<;|bFtto54aFsQAo9|<{ZEiKVh5$ zesq9HMw)BVmvdw#hpKv13z%>1e=dNKl^*ez1rjt4G8g1ppSi}y3e$ORZ+U|gvMl|9 zGUAF7=z&T;y*fS~UMRzQ_U+F+0#hYP4lRqzw80eipd^9A3Z(_g0ZE#J0EnBgnRSRw z%AmwQll)(W2ZQsv(^lZ+UsPBhc@N+&9BMX&so#yI(e`x$?z4e z&n=`kdapL|u#>7Yk$nEWf{CXtO3t4cG@5$Xoxzb%TlSOhzc>mTDFvo2#wahnR9mVE z(WKW?dczzyny@=MAC@l4G ze=2P7J-+41*v;i}f3$x??U0YPWC#excyE{x$C>Ug@6Q8#SCS?p@I*E}f#pZj>F8_! zQi^M8@kjkaQs!!06G{3%FOuY|`>PdKFJ_0p$HT`k{ry}9^L^{Gm+Y=X!S*aHMez(= z*6e@AS7ivHwhvCzA^C1-pMn342vR7rg_|n;A6f6$8J#Yr^A##k05_+I`CO!7)T56j z14Qfc)FqS)(akv_EE8ZBmJHsOK@$THs4f_ggXY$f(w|;+%G(~Mv}m$+#VuU#8(7I(^kSK;3v>V-kMaN!;Hli)D%*i}wxxRB#A`#8x9jTYenRKhJe5 zHXq^3?6(xYAFPDE-a?hGr^LIm#&PEz*U!OP!o9al*$k*Td@8@ZM3o0K6VOYa`QR_I z)$NT8pnz=#Acji_8CxkIw>=}>d8M@pI8<`#t!e{Lor^@_a~R}zHlch+sE7OAq$;+3 zq-lOTsx5V>2Y6bXMM9mb@HUGTA&k0`L(FT&hK7i4AqT!I~eo)gsj2(Fj-qLGm&Xesgq${s$j!Ii z1^yV5`%seW!!6tBLe2M{0!^3nMS&fL`bHXrcDQt(#u<)OD{Xx3QnoTOzD?B+aI%v! z{dl%Dq0gRx@S)B+`J$O#f9wH%X+DSLr=HNA>jrZ)7QE}WXrh@?oEsve zPllzR#?TGvG+KI*o1JIZqgE)_WHa>Xf(7`DF}d1Y*5WnQ$4+9Trnb>$ge1SoH)vPAg z)%EkImfl8-PcY%>TeXnrc4#QVAP-MsP;N{j!i3*1p*GX8UpkE6wZ9MSdR*01>yr=g z@Fn6(#h2>K5aI^qlfTo|C2c1r`Zb7vDAv=vvXmJFA>?&QRAVzeJT8H{m~Jq-2lJ6@ zey{;*-UZyT&bPKNJ}Qw`3DV%*ejwj&a{Y&QyF?j#z#c4b`^mDV#MGJ(({mcnQQ zA`6bUt3w+w?5@-rl#LfFKWS8w(0K=G5+PwOFGpb9I9b8N2~S7}(8|x0IooBm811ap zH2JKqL{*_rr}RXnt+;V5<7h}OKPa*C&$T}**}Uhor|jDt1{jKlaTO17Z_+Fq?J5hN z^xUBlCFHslNpGPHl1p}F(bcq`xD|go)QfG@Qcbyd%*xQ6;9L>`Wh50szW15P50h)G zwYm3ZJ*bx32ggc;)S({b zVBaSqkapv98lq zty4Oc2yHtU`NZxv8uqvHOnKclI$WR@j_oke_aolL>~rHYO;bwR$#ly`M~C-|D})>4 z;5Mvpuqu#%_i`~keDl&|=$9IB=Dxq*HXAx%w7?_Zo0U%`W}*;+*^V-+NPx7Jb(I9B zicLKczeAvb*@X3doth|GoxM0Jm?`t0^l(ek2a=-H)}p|>9e(%N6Lt2O|0sd`fD-uo zlsUc9$12wk4-q_Xe;OFda}-pS^X7=ZR7wa5iizBYfjh>y#Z+)^j@EXR`BZ6=rrTLg zLQ(-c4Y>2F)+}+@+8rhCkQNGZl6X8;PkUBBCf(hCwBe3v9HORowqpP$orzN7pDyCr zG#V1S15dWVuR9Xj^k-!VT_n~c5ovvSFRU6|-In+K?Z?wmh}M=$dbj|W4eRxmi^JYY zt`ai)UI1rvzFX1!kexN02kD}nVeicug$e5rRk0Pf-4FCvcO1=r+FPWRNJ~Sn?C}fFi!)MVSdz9 z5H2&gZbFlI#OlWj#lgmZoFL*s>Rj>ifl!5+Om}9`eHoKbDs0mk%;%t}P-7F!SEPD9 z=s>x;z9lMj(+3Jk2^oEKy;{Y-^JDRmCI^D>ekvBT#Rs6uyc2O`r;BHGIGwR@ z1orEoC|qNlF`UMfWJN^0^7Oq2%wd@$r+w1KCu+ecne;^hR`0SW(#MR4Hjg)Y4xIlK zY$2NyvzBD5R4IM1h*&NKp7SF~CkIHx!L|0zMns%1o8jXGM(|>IG=cuaU(R-UWuDsV z6Xfb(5sChZ)ycp9@8ogCS@E&^RGlX0!QV-)l8bY!+a`vrFyQ9Q?e4IHGF@I`YqYba z^#?wqdH&Y=rBYB_&-#EY#ZO94A4Sf$W>4xVX~w2z_+UTWe#6#hMHG18`d-{`;o;Le zj^J3to`4CCMZj8STB*mL0Mds*+71{}kL4?T+)Eeawq1HV!943>+e1ceZ;XlUXylo%)p6VNU_t1tBYYy`@*d07huMD+g9Ic zwy^g4kFV`Ss&mX8_xmFiEB#9Sk;y@0Knlbu)q77FQm48jZQ8=Wy#P%Vaj@>iuVMb)K$tA`X=V!NV| zS~irZRXSJf0YGiHgV3LI*4*yCYGyOKa#;C>U!d=g)DLzjN=ZFjL>3S2{U%Pu%@lFZS4fOmJD6GD8rr%^ z%UR3FdOZkdv|e}zG13{2L!gPU{)hbk3$t2>h3C?z?&S2M=UwbUXc-^}bt5Ux{k%#j zd98*PSY?^B+sLWLp#ZO-d}Fuypk(K}+0ETrOv7#GmH3no!kKs1k|~~`Gafhypq&LP zCJ6+IAhIVs6&lN9)`3Z28CL*Uj6q_5A(D@xIM%=CJxnGWR@w_$uz^)P@Ew(;I2yJ^ zuMvDfnN?xE>H`a+ zONody<6SLc50b;6&!yS@H2hYP$ArcwiHFW=9I~F->U89?Z%AZmglZeNNyK%Yy)@A6 z+LEB8ZnY-0q;$)A%2M`wRj-*gcf1-{FnaDW;9_E3_ie{})c`;)E>=gGftiQw6J#Br zG5=S7>3c~bV`0Mh{5c`pcaZXEL+{~wD*L3Kn!UQ1)K)S^Kqe>2wYZKK(BCHkB0mt(NC z*Pe4c2ayfdOq}3hGM9j30ODl0BwTlivcAts>Y2KL00Q`FE376^ z03QHS=dk(c-Sq)>-jMj(77({?j^slfP!$9&q6hU#HEc?!wp9w1X#K(E=Zjkp(=sp6 zN0{&&GA>mu1j>dyj(maUI}Y!F=I25I%|f^{($RsHljfNu%QLm$X<;4rsdsrSw0>9* z*00hpZ{%-0;VRVdD}cd9?=w942>#<)+uDuz_*OQ9V(25N)tmyQr+CvNk3e{P9d|_f z^{mbFxeIAklB0*$>%w8W9js^{Jy9-OC-kEnk{)|HtE=*R2%xPzNR;tKV+mxZ%{~`$ zKG*E*4=^I3*2Yr(%Kgnn48QL2tues`R!ITpZ>Gw@akd9K12F?2KmY)+6oxt=b4LHu zg*^2Jw5mILJTqj9uS|e|zuaSJSR34r=~+6}hHJnD;Hh5wMHJ8JB)^E9P1O2{P5D$%5k=YGf@|4Ud7Q6o|7E}b26rIHcOs?!(Ib|<`g~58 zo+^owhereE4lP&H)OP)k6D7p1i0c2uq`LQ_02wfiDl5K}zjN~$J3xE@T%aIqJFVd8 zR{|XkO+adD`qoDYxxf7=sY^iQlx)?0MMetOa@Y&8DG$BtC$nCAz%zsqN+nQUC;q+S zD&e~VtIKE{jw?2mDJ0HWkz?}N{%*i9e@o!}}Ix}-Q zV}g~mMKFKWj`g@#60enKE(KRT=6=Fd-A z;m1a=azz8Fv%q}iPp_$l+Q$=%cXJI{+bWB`Fml1nll0LgBwRkKU^vcFwUmSG9DU!~ zh~iv)=vmqsp488iG23%{pR(lm+-nWI{Lu+yia`%5P?}x`?U<8J{e{ZZC#%N|U!d_$ z^J0vjMyaU8gwxw#3lZ>d-0e67w0#mMSf?vKdGo7f8V!q;o zvd6#(YF~t^SS)1>v_HL@C`xy36lQZ5A%!Fs)-(UM4TW5IE*N4@3C`>31q&jt-530C zWm%0_p3E%;Js{phe;-6odBRc$&uu!P>(zR4Hn&P`_0n(uQ%H~h1ARu{8qs4Ea_?U&iN_*A!j@q4F6LY73fG@lZD3I`*5E5Xt`wa~cSp|I!IPY1h-12oFWP;| z@fAs@xMg4hFQfGx2x}-W1l9@wk_909qvz^cU|`gb);NoNezmA9=;l(3DoNy{;3kJs zK-Qv$TiD9hZx}7u`_C^;P7C^UamB@W>>b+Ez=RAhqa31i%V&a+l}6{lSp-pp#1rkF z_joTpL`_zm-)0a){I#`OqRr)>)a!2N;%?UpuQFbdWG-ln!AZ5U-G9__M-~?|0BE=_ z@cH!PgBk-5^$3)$RFDNLnZ)M{q>#4T=VHV!1_xfyN;M{e zeXNaP7f`V5Bx-E^SLkKjDJ%@w9R`~$2zHzIv_PI^>;0yH%LC>1_O?G9X(I3EBb5Z$ z7bWFug0SYB9jktagucK&({{`9`}yeK?}mnNFyRJXu4Dt44OYkVjs#LF$mrOcxzFWh z>^4!yMDwDHPI_EzZS}sMs(O!gfSxGx@eHVo`;4X{g&h-aRGijQ z(VCc!o|`0G=(!hBLh9&!&ZW$PZ4_QZPVF(qVYMntwg`;HAE=yKg%5wv{D{3H!!BK(Iv2^O z&(ID<1?F*kvEJ_>>aEn}?Eu{KXH-H4KRj1nnGvV7gSE9k(>@qR_7|Al>0R(*Tkw)i*LQO)j*ktGv+21r<9O`lL7o$%rFFMD zp5J66KrZdzz&oN^2T>G5B&9~Qb0l@+he_U`Ca^d!Ya5@r za-2#9Qf}E_eEh4wkY8l3gD^g5B&*8!XE< zM%u-j?}t#IGZ{R4%InA(ML|kxN&&yTwz=;F*SSK6Vi`!l^bup7;-B9T$L7PTVKoYCBSzGNfWj_#E&D{GQ4))kcMR2 z7y?dNetmR0d35MMQ51`X#|D=eW&z~+?L;LA;{XR-r9CCY@mFj*!mE03WYHZt+-s08 zA!H7FR-d&)-r4romLU|A?+LOW*qsjA+Ecmb(^MLRupD#J0`>R1SFh0T)O_H(H0YN2 z;>M^H#%AS{A8waf9bDO4i(S24js5Ko6)5k{l?jjVl2`hgE!T^&9ZQY&C?HE=M*FRz zr5YdPK6_I}L=7~*ZGk%&ak@}P9C*#Q7Pvq=dzlaB_??d6HHH0>vgIAWhv)nE*R-pT zF4bqw*RI-E)~F+p*Vfg~2Ua|;%Qncpl1hR&=B9pkvl*4*88DG>CH_Vw={N*WZ{TM7 z;u{dDCJf=@* zal|s8HbHZgmq%U9&rhp|iOW=x1ylTCuTR1pccmA7!}P>zV+RhS&>FZbX;2?)uC=7~ z^cRdBe%ja#7F+C7p4mTH*>&fw^<+t#v{O0lR2^ zfU|W9gAVY60qgAtUZIC_-!wk>&O7F)g8^Q|ju3)>3Z=e;QV~ys>h=Y@5z$EC?_@Jy z=s@_LR>J>5gCbK#(1g6NFCu{>jn}M61V!FA~{kl zP#ZiHGRPV-8@i~uSswC+sf99NZ)5Ruhqinn9o-v|gh{|}KSiJ3exW$?=<7cl*XeiE zB(~~*^QqkqLuK?tVw{gD=9o;pq_@}=LMJNf%tstmZnmuIJb)8@ocQ~ep_=P%4SxJE zyG!2!p_4C3WbHr!pGWrEMp2=*7B^3UX+Onl?)`+DX{q7+SKu>%R6zbxcAylyo8oHy zsa>zw7RxN%^E$Xu4vx%)P(FLWh>8x6omvc6fmLBiJR4t*ObC znHfX1VVl*%b$oF|rf#rI_+c|G*SD@unI;i=_L~|(QD*%hR*QIxgZyC7_asMgB&?}% z>;Cx_rhf_Mel)9*=o3PjY zh>~Ym5eCLuaff%V(VxR2Qa3WRTB*cTJe!#eS_^Dyi&C>vq7$!f$@34iUd!(GI+p1n zAzO$Zm8P)iZ>+~>bgEB&?8R}n4s93`|HfyP269IfMT%rrEB`m$`{l4NM@`7o#U}25 zHZs>}*RQ`BxV2_2J{3tBxs6{seIQ`%xX zU6MZJvGk0<d)>-vM+u=>-u*I&Aao3DA$?GYW0ulRppfYwfXiM*3YENT5 zU24C-!?~0DPC#cn;3wb6sn$VbqHu6CGX4ouDW2--%!@gfa$Zu)*0xm|>>6v*u$UAp z#cfB=CjU3{gy|C=4jY7W5y!`Zd^dXh?=$c}R_6VvJ(Ag<*4|BbWz&|Vq!A4@#3Cm4 z>o8v)x!W-sBnwHL@wn(C6?tqW4qi;R=WTULFYW4EHe;;L$>xgI0U6H6@cA);8}59o z^)I)~f!zOJ28{g-|lRtKXyt>jA!FwsrY&rXO-SvOkgi}X00V(c3X)hrn7YQH_ z``Oa}Xn7tcQtk3Pf4xC{{C9K>wYF=_s8T$T=;G39iGagPB6b}Y*t4e7WJE@Gefn*D zy!*npBMKlEPF+^s^Y2<%tlnWZ*S>I8eg%jNN~B?T4UcMi@VGQC?&clp&HZsODo%YK zaG&-=N#{citv)^+ZM}Z9eRiHS!2;u-NCxHRvPYCj+0P~i$yG}N;Hi0!`D(m9TyUou;Cg#6yx&w} z>2Fjftt&fk*{0^XXMQ*?((>KimZaaTR+)?}`fBT&nf=Z<(NkOJ~23FWrZ*!u=F^z=D zg``kv%l3{n*@F^1`NQ*FXvU+fyKR_5tb)N+%fa5UW83L)(yNy!FF(J0I;{8V`6TQc zQE_0?wR@>>8wBl#h#V66&w71=DmNoX_!)G&zn{<$19!59f5L&@G5IP^cQKLIZ7um~ zb|7c%x{4>+(}S!wy?)uz#q>48miWb4J+)Lxbc}c9)b`41T`vJKWv}!L{0Jgv5)Y%-)5^X?`r4K1nd?C?^eRn`54&O|Q~s!ongE zZP|_#O=W-_LH$h8QCI3&G4@1(Ch)4W>x;rx2)SS24a7I!8uoTBBB3g3M3uSi@v&RZ zb<-`2B(ZUDaHM#92dYmh7=-4`uRO1@x5XeOea6N9QG}j&>JC%gV&*PuU_i&6l=wz7 z?6~EM=hNGh!-HaGEd<~_b9;0iop2WcpBxTfj z4}IZ<22wZBq8Y*B!}5MS2A@XG2-8uG@Eu50+W3O1FY#K0Xp7UvOorv-^?NRr)_r^O zXnwob&mE9peQD?Jum^5M&!V)DwbmWWYaP7)8ZW#sc0EtuTzNm#PW2`^BJ^xDYhw>B zkl^tlZLM~;?mxM}L8c_xsyYl)tvkK0(_$xe&3s6>YK zEp3RZ5U;<|aD*NikkZS=Owq9yP-5U)C6T%4qDEyMjwHfNmp5d%FIKxvXyJFroAnV% zrOOy+;pf@w>Lg{4vuqb;yh`J#Z#h4Lum-}!(x_mMXT`g*4vf!OsoRRaDt>_%{0?a_ z!GU+=@rReFm{*u6M%OgRZabDlxvR)(m>ZlWKRP@l;`OCTSIU3*XcU{!{N>xpcivvA zhn36-x2{G6{?*!BT}LvSB08zdg7j>KU%l9q(o|Wzl&+H(whh!Higt)vSHyObx+V;0 zmMo(GpIgZVOm3*x9)5{IKb9+*xbB$^6 z&q|-2n!lF0#ULA0?7xkqX&SQG+UqhP%k^S7!CdL?-JC1~eJzgNNNNLkEAX{YK~yPa zl50<^ursI>0^?nrgk^#z4zDXZWI5s6inbTlkvp^w>x)swzUDM8Y+nbu6&c}-_3z`^XPSPJ)}RGatIeJOt8v#N27o6tScl%Es8+{7IJaShhAyDkbYXcFi;J)gUU- zhH?N#UvRgAklLKj)YM!l?39o}5qG=G_=rJ>6R+O%VIVQTaBD2t3C-x?HDmWen{y94pTA?zm%Swd5?{wm^X77%;4ERy!=}FqD=vS4YDU$%{AxIp>Ai;`NP>@hh~rw%NIV2MY!cAC}_8fj>mrcsLnM z#!LpdW$Wc$ue)ErJZgEteEM@i{je^Qw&_{*HP&CsY=ziCR<&g++k{-G_{9%Hcr%g>{M_{9r9m}nw|xX@nGoKpKpQTXNS;6XWW(H&= z1bqD0wP)J0=!~B8yhPESFH;YT|8@BnR%*6-%wRdSxOz4EPpPeUSsqPc%5$^baDJkc z94Gj5!Rj9TgDxXCmz$)W^Q%qZcE&!Va>II_YP6lRWL|1)M~it{i`R^2uZtUMqd56FsWW{aU=udIlYyi z_VF>kLIK4ff`8*>bjVn9R^{#+^5O|TNfORoSp|QE<|LoGLJqCsj#PqWw$a!^WHg_D}C}ys!Zzj&Cs0hwZV?!QY+rKUKh_~k2eJG$ibbek- zm3r?F|0xzR`&*oj@C-3dbOhn~P0x;B1z+`8b%(!l)bg@mz7(%chrbg&uMY2236sAA zfy~jlZxZZj`O>V%L7%`dOYr%H%k5onFY8dg(ewAH%E=Xu6yU)T#3c^e291q5OAhJT z$kjX{il1)N=N~?PtoN?D$#Pk7b4w|TGu<oH?6Ez3lGFN**DIke)gXbSfgK zl?(T%MbD)rdVqw@BMs{4c~HmQkmfTj25R+c^Qh~kBZ*5e6Z=+i6)TlOV7tx*r?LoF z&388gJ2q=^NM-UwiiVjt`f>_m=%hkYC2Bj{uoU5hXrFNRLV@aFv##$1dL}UD!DaT9 zFO#NJ_w#qNrv&c>bpHwnlX{nR~M?Vq7Me$EVo_R%yx9+z?Huf;d z7Cu!|Q)we8_~5(8RCKJzutQYX6|~UIcvbv(M?P0sLBJ;G!=Ym8&T?2?Cf>LOa<^=f zjiaOVs8}~~$!WM^y<%Rs6VUm#8rntLn%Ns16|TY0h;Rg1&FR#U4tEKZBh_#6hH{+u zn;r)fQ7A^Sy?HG=uo}arx3NG;TuoD)b17Vgl@9;x`?0zTjBVj$A^a4 z2hk?n-QvUcg%bJ~;x?|l7h>$ENbDetk{56tC&uAI+olSf3H z0d^*ckkrEo9d65?Ivb=$f)m&?XxPXcL|OZ-5weFor2F#AXHKY(WVmqO-oa79fxbr zfsAck$33^l*+0=SpBX-X2g$liMrlb_yTj$8bEQvSiZcOa=KO93Ek(6Z_9Dsal>%%S zqxcPzv8zH!XNk0bRwdpX35TMH5bvix$7AfGz~N8si%HPex73O+icb3b!B1o|8EiA# zxQ{^D6pGUD)cmSGi>$wBhwg>BjJ(V~+lDpi$r@`TB%5C*KB2zxzH( zwT+o&(TskWf{r@}nw##h)q$Wsm!o>u4nkfzf9B=oRxyk?f6UZ8 z6HO%sU1g#QO_yZc?)vMhI@xf?(%4 z9!K6NYyQR%97WzV?8IiUwQi90=jq9LsrDx>s@x%vH}Ah1-!^ZUj;3=o5R+IwKSAU? zy1kl|T0PS^y{&lR*<3BU`mtI*-eIf$8Gv(_=5OaH;6IwX^%G$eUe+C3VQ9V{-YFH& ze4}0=f%`AhS_3JFLvXA9CG5u;!a}#G?&Z@7RXkA8qMdPea(Sd zVs5y&#!yKPzR7|52*Tc;8ufd{^~~Q+as=RVusfAxf0!M6_*LaJ0oxqU2eyWnG6V)dH*-);g#|3JbK{=x4VjVcM&JV$HIi9$wa<|G* z@hh&BC<%08&a{EwG;lzfj!=NW*!5C(YHedi7pCgH>HBHhh+6x3@39HG3z3t)1hoik zyk5GPp1~APT>#<5ya#k>>(Xw`_wM`AmfmV4`h}JQ1zijS+3{|DR2rAmSUT)$_^A3# z`PMvVHz@~TXl1#b?RGD!0lQS{$)I41;$|Y}goi&<{zGr>+xTE9{a-4PG9R1^4&NBP z>Fgq>T4w#xH*++=?PX)!v^?@{uJ||`Hre03nI4_fePEL|wG%oL0JDSwu6_Ab$&@(| zo)jq20Csm!$LA^YteTqGPVbtB=onfH2%lKOJz4=t6ZW1&cz6L?#iH3G zX=A2CoAv;~2a*v@EAcDULc}u#GvyId<#SCtE}ELAnmo9HH~xh1XhmL|YHe)|8*3q( z(DCD+eM}Fu#pkVPy-%jq`+54^QW@NqgHPw%a%TteC7Td?%g3H`jOW7V*EYt)X^Dy) zqpLUC2{^pAPmexJy5AY^N!pJiC!aQNW_${^qR@4IOIYlb05**orrR4*`3j6njLx|S z7kS8eEg+vpXlG^fc@8EzyA`G968W)I{NP4fz(bIq$C)`D=(zK0XXNhbNy?MKFPf7L zf(8@c`1saOwNf2DD#-fd%<3T_TJLR2|B@(OpJM~r&b2f8QPVchfdT!2-u1U_9N}LE zKb-wXleGE%zO{a>%Xt6sLp(-IwUP*7kd5Zo6`Gc_{Kfom;$oWl{Q53sIlBfv^T55O zDhKKdHR5&_x$~PL;v3#jB*md1w3W*F5Qm@)!767FeBrQ^2Q5a167K6&(JT3H8OCQd zY&~zrEPBtnX}-nX@~ahTDUx+5LnPF=gzFt`+&g87Jzo66jlz6f&cwRqMcioh_a%XqPKdhHGCw2t z4KC*TI5_cvnG+4!`r00EDF1vj-tLQn;J`1PJp9;4;j{{KrmV`NP1J36nVXvdb(tB} zS(ZYK_eZm8MjZ(zZrm5#Yp+M;k-K1QOS7xjrvwL1^X)%qTz_1*1>rzDYCAx}sFRy} z(>1D=Owel2zUf4Cq_Q@hlTXpK3Fi!KYaOqm}i z)&IO$yv`bq#SHDp41?>2H2seoYKUy4+2spGPjsMi4^JcbiXRkQfr7IBcCACTTedNs zfH?RJuTPF`JS}u|5melHgPq6`znrSiAqlcq&e$ids1>kMB9D=*?l)}Gc+Z4KIPHuN zcQ~9K>}e&HV-)D#Q^5(r^s??}rT4HeU;z-?QqwEgP*NqWB%uKbJ9Ya%)zxP1)C*%E z_+x1F!Eigb6Zg}s+^KHkv71KJSD$sS){|$5zJ3uK8A2__%K$Ap$rpJO$4~rd0)%*# zD)dn|NBF5M+cXnhu868tc84T+DHY zIXn_XjFcO~nkuJ@<@PQ?AeWz#@jE8#QDSjd2}rbGCoYbuMlB(7)rNsdEkQ?aAl1==LY z%X$+~65j`|GuF%hi>|K#%Cc*_r355J=@cZSr5ltKknTp1hKFthm2M?OT0pwHn}>Li z?(XhxIGgu-zkkk|`Ol2Zz#y>id+&Yay4EUdDji=goA_74At>Rn3(L(W%VD&9g0AI3 zD4n!W*F{*|zb%4wu}mG7W1fR{z#nlmM-4VVmXdOt^d$Jjr~Y`cH*e%j^1UJ_cEowdR3MYy)re;&0 z5(`Z;v*Zmp2FQ=n7X=RwBjxp(GFLxo1&Y$M!N43X3K6&k z;X=|gCPBjE(8Z1Bm>|I#^0F(#hTA3bvduAH}XDze%i1_Tk-KD zWMwSPb{NUV_yVS7zyG-N$;+h(KHJ=xrc362CBV`J>#k&`-W8 zvNKX}c{aui8M||^s?b4mR5^viDZO7YcUI>~Ow}_~+i7Hq=C^V2CbS=Z{RZnFHmsJ$ z)>QJ82$Ztx3K|L{=R{&HmR*2rsQN6jVIl#acP24e&FTNDFD;XNz7MY6A^j1B(s5O5 z+){hy^rYrN<suw?ye7TdtK51X}_u@E^QZF>< zO@g9dnyj&hgh{}f)3uqn8A+;a)@BU6mv>Tq7OT1PlZ$JO<_xT-wbZZe*(twD1b1x{jy|9IIx#h^BT~n}Qr1~?{_>o7 zukO@})ZHcHDc$J@-Sxwvs`^Skd3E~|itr@r>*GZi@6)O$nZJie3xmBM6Ax?)%8$+X z9*b>zw|IH&uS3d=>Q_R>p?qt@z+lUz&nm^(E~;zyb5u0~v+nEaMrs#?du8O#VGVca z%X@?>^4G;r-no0<9!ae|W2i(yQ#osI{6)i@+XeWf+ggr^>ox;}^* zD43M5Uw#?#4R~Ymw-006vGAl8=i|q)hI`~=baW#7JHrM{$WF6tyy2jx=H^x|Zr?h^_mBVm(ER)9DpBZDm= z6h0^-2=3(;h?Yapdj9rhNfa2z2JJQoRuD7)pvjlx4i4#5)%}nrOJi03ixB+OeN&Sg z3C3#7Y3i6oB`#4o7%%*-W>u3mP+4Iy-geb+Jc;6#am|9lUT9d6cFj*vbdF3B>j1)8 za0U6S7Lc-18%ZT9y=pN68kNu8=Ex5(0@eX?`MhC4A&%NL}R+tRmG2b zR8}8Ou%{e1lt?Ky@D*-WUo6wkuFZzH9JquAHXkB1Y3Ofp(-PByd<#)MRQ5@MI-&~@ zDO@|Z_1ezOjsbVmQtC;;cqYR5nlor-XmeXhI=LaqK3pf;`dkI`g%UW>3hD9frT4&k zKJFhcTrOu%dEKutaa+%9hiy6YQxWs5s~QPrj|{)rY(&LELuj#Pi%yB{aYbK0zKSfr zwCE|u#l`zr?Ro!|eBS3JnT`N)v8&Alj<0-I+k(bnUjxMzxX{OFq))fd!cPMs`Z+8j z{@XA&UVV&=yO{ES&QZUX7lYsl4Ry-K+Eh(biABQ7jvr^uo*wUF3N{XAl7%JOxh`Yf z5T$w1cPnir=T5P}?j(*b8>ZddZ1Ym5r-cvtvNOp7hX)G_%=4E=UFQ5&S&k=7y3-h_ zk6lje*Nld;FSr;gH|9KNY^v!4QG?Az6*JzCg7S;5fksehD?s*h@-bA)K_e`cLdTYO z?X__VwZ)2y;vBnz*k(EQ)u$`1q5Zx$F(WJqWGLs)hj_zKAdS%|wux^SQW|HM3lq8M zgy<5*x*~@J`H{n?kIJ)0w#{)#+vbcrPZn)nxE)qK{Xp!<@$McTGH}shIOsSRJMre@ zs^)&<75BG0L|(VuAcyIxQ-xgP%i1{uq0`!Z~Rxn`!aS38Q)CyGmfN zyxOhw$NLheDjha4^xDLH1UZ$l$PSY5b|5qjeTff*mCx|h_xL@BNet!GEirEwe{Bmzd%>gOXfM_{e392fGh7tjw(Y*e#y)%qp0id zLf3Jn6-*A}=S9Pb+Wb!4xivF);s9X1e$5MHQYhFR%YuVL93oJzZj39Zc#~6*Y0#aR zb#AiD4Xh=`t*2W$$!22O<<%2o@pkT)yJ$kYFI0)@XHW&g-!v2`8}{~-Fe=wLIx*qjfZHIGmF9Ip1E(vM3 z!TkihhBDN+nwSdy!VK(u`9R71cZJFs>bXNZPW^mez3)53#mfxG*iD&!buEjie`H|E z`rP|dvp~J+Zp{pQ&|#5@NLHUG!#>Y%pZN$FZ-`yhWiBWbv&``(dju43+;=yu$W3icW_i7qmBjb<$9@X*azY^U= zqGID@Z`5s)>=XMUhRly=Q>YiPC`{55a8bQYD(NHc(j+wM6h;|1Et$2GGem)AI(SPA z5N~w#*atrQO1~$q`^CSdrc5Ht7p=2DOh$zcIh}UyK+R1wgV6ki?L>wi?qagaO^wXw zw2AQ3C3CbZNS3w=-5N`F3@+x$Wd7f(z?gc{YKgW@lBV~sW&=IUw>wLFwbx@!e`r!( z=1HKeIl(=Oy;s@nYlygOS>Ud1EK0mu$4q_t_O10Z9@g4ko0J@(JO?jb^UOV#|35to z6bcnb_M7 zp3hIFuoF@(i;h64>tsDQJ{36TEU*Cb_pJ)f4^Z0lwrzD~?Z-`A);+ak?a{FRYOa=( zNOJg?!r`>_BqYoA7dJZ;Jisj(&1iK%hZ-Rp35lL(sZ+552I9N;4%b_Ct%fp9|$7m+WWXQ!C;xP zb@vG^fTFWZScB*}=Jz@CKZYxk%Gus+xrtrTd@G*SZrvUA=KP-AecsnN#quUr{;2k) zF{zmX3it3$4G;akv(6jO5D@QB(N2w*lX;H!1+bSl81W3adbpJGh6J>q<*1qjt7iJ6 z6EFlOF{A!L_~&YXXg*q7s=ZP#CPOrd#4~{iPOX4^jycWz2I+Nn5b|r!R?+2yrogb` zrZ#=uF#f^2n#28RJP;YrcKX%)V$no6zxRZsOGlT_U{#po_vpO{cpV6JS3Li6c78_3 z%LKELHyO1I&p;_qZTS~%Z`m!o(6&er3;tf>H_E{7y$Tf1ub6jx;fGgc{thALaAoWL zG;5Xg5M%F&vsdd6Swp71rU?&qLBEGG=9WcuEeXKZ@oa7am1KuWi?{Kl!_N7_+cm`yTnX0Ce5-u`OQaw=h1N}HExj&Uz&eoXVFeajO8ebVEZGJ}iA@?r`)1HaH)wkK8?|9#r{wFvSrB~~8D}op%a!SnHh)n~fVpm& zW2Yj;o`qFraG58s;E!BA4CDT137mrQ?uJYOza(KnyarjM84jCKY)wDk>UYge%E!Yg zN^P^T_KD(cyBWm0Ug?7*yU0!bs$d4agv|c-CQRk|8`l0dm$swnCiI!bGkK=G-(?pC z=}Yh34&QwU#Cm>qf75Y$p#qGX_CNJaVphl9=}bJ^549S6yX#~02IZ~>V+w5>Mib4B zF=CJiQoav>P${;(5{!|&%P+hnkliA6)FsdJRSy#i$HiY?S)uSV_`boI3`nt}C-go0 zZ2p>7KK5!s<&&d+AP#p!z`Ip$@(|9W`8eMKnyK>GoGqUr%f_ysivYnWh&e#*Q!S=4z7&<1~^;C@p!AXKj1t9|Nq+(QRpQ+VWb+?W7kZa`Sn ziJuHuuVmnnGT!Bn0X&dW{upq^stjFZ^bE0i>PZJXiF-5EBUy4fYbA-YpOb$jCMvxB zlIk!i_m3f~qChNv_#!3ZTSM`3P=g3>gTX>p&_}rX8|_`9<6g`=Sc#b=NtQ)6n;?O| zP+yq468<*Uyt(HMqnUahXrX*q__KTmVVZ{jiYw%T2G@R~l#?+>FjuvqL+`OjU9}>4 z%kA8jVoXy!0vX+FJkpsXhnrcV8nq$Y>G&lq*UC$SbBDO(<}-){E}EKgt><|3^!|ugFr&hU3vO@?-#!z%})PdH!&=b9<-iX4# znga2yr`}6_RtaObS$mB_Y6B7$SxMwA1Me;F^|3TxY6FR6=XcLbH1x)__?&*~H}W$C zZvO#YY`|WNqhT-={fMTKegRqO9$ML*uUFv1`x0$`XU zrTaWjEkijs*m4LyR)!hf{@(%c{+T85APl955XT=SOSsKpNzf(=m!bQ0mQ#;1>?`S7 z+$@6E*&ct;#+L8AU#L=QSKJ{UiA>@hC?xP1y(4xK`=-d_ID2J2^<8Qn@8)&gjD1(0 zvl%7SAhlJ0IB*Q0_Pl*6q&z>LdD`S5a1oM+x9n6B!i_HRH;C8-oNIT5oFx&hz{uXWhjb8*zBSLdlN^vS~J#Q8{q2c3rw` z*70l%U1g&zPl>y1mR!1+LsP0W^xfAAN{9O(uGIFjQ-9T&@OQLMKGmNl_L+)U6>wXt z=a1Pi+Q@=_&RMt>VKPMu7FPT@W)FvP4qK9+y87Bpfr{CT=hTIuls!a{l zY7YhyjNPb)k1p9M8k)@`?#a9AnCp~UFDVWJ?(Y+P9*WRL(o#nW?>=e0$|_=*ZX{W` zzk?Wq;{yGmui>y;xAWd>^CMcdt!tM-G@8QkoFa2cF*>29$_b5Ea+u)`B zlG@s3%^=9&+o|r}l8{erq}0;toAo6~NJ<9i%*Fz;+lvLC=9ALXFNI_bJ9`qCdUPp~ zd8f|#FjA%F9yY~hngWp?k%M~)Zy9nSipGSZi&Ll2f$f_or4yVhbr;C>;c*N}UI-_% z7+wSweA$+2NlwcK) zUP?l(@UsP8KKUJHVO&JoLWB_G7230L=BSaG-UCD{H(rE;$V8cAwNc)acvaAw6{R4N zy<}v4aX(uQu|Mc#$r$p*ett4#R-t_osB?_>awj`g`j?l8)XnKy_t=CXze5MniRA>a zed+oPBC;A{?8A=0VBnDnaMCgN4OQrj7Ll6V(i2GFw#g$6h+4IdW6_HNy$PiQyadB_%U@^p>{JYn9-hz9JO3_C9*dp2 zty*@n)J1piryRVo8Rbj*HtKa8OFzpg$c9D(shfp3HSQFaigH^diQ`CqY9C zZ@4@rXy9M73hS&hILgB%^@3DiO;G2{BB&b=ew)+(_{7>>)so9H{O9cl4l@Ui_tOb) z?VF_e2=Dr(au}X!Vn72S-Pcmb!sC;ywUp0M(6AZLV{f=OwmHS6L3I;1gl^*4?WkIL3KB zSO^Ni5AT<1;K8(eU21}E?@_wL=iUKu?yoqFLWv-NjX37tcY)OYm|EWxEMlycOTld4A&Q1eFblJ%W?7X8N|4XfC`3L>gS)n zDA=I0AC;2Q54v3MJv{2zo%Bun5}=^d5!0TIj}Eo~NFJEB`8^f9T8*A{=w;S*?Bnta zoVYcj0mqEs?+pnznPNo9@ha`7a;IT0z#RI%x<})U%My}N(Ycqae}2o))=SQFXW5o* zN#vC?8O{KySF`JAX)*j~FXY3U{l(&(S2{{Q{)#3$P?6z!Fd?_$w)rzg(G1GqN^ML)zziBTdN-iNQp6(Z9M{+_ImGyskUaS1%OwXd9}8d5 zUbgOu)tI*`#_sU((YU>Eh3@QWR)~EYJjaAZ0hs zb9mDzbbnL7T;D*J(CanF;xhJw-|Jy_(QQvpZx9PZd}Ipc%ksgqHeguA*(li%&CF66Q~ar1sq%^YK`IZh>H7pHxTtOr*JFYv0GNj2P=FeP@T*1!Jt zY^T)Jv@SdzivnR6DV$iMgQPg)+|l3xI#_q+u2G9d344!lF8BoHiG6R4lJ#suZ0<-F zhw*r1g~eP{QL+I1_vGl%DQg#&@imX3M$ZA^-TO< z#5Pw?6XWzfr1_A($Yi!c$}>QgJI!R$qes~fh01VFmNR`flW@#jImeSju^laJEXh_a z^%%ukcPIKqV74SwNtE}tYv8Mi8rF4iI9VhY1HcNEWaApMzP_m~2etpxZu$oOP)Wyl zR=7Mv<^Y0pr6&UFEY;*JZ7HVjMtYC#v|Lv+8re`!8C07r0UWf1j72B6t&9&_^*4#v z)=ZMBl!piZ#hfdJO4f&^x}759-N{l?;axWrm)lGG{pJU6c3GBt0E>8}dMDw5P;+oZ z_e9g3oQ^1zTa3^`YS-{3!w7-s6gnIr?Eaf7lG^HvGSckhlO>GgzxkI>1+Ko`VS6_-NT>(|jrm*brCgguy%EHm!H+PFY}V>>U1j7i)9$mb_(>@#%! zMyID!Jt}9>$|rRL3i?Mul2?*pb~^AUhfD3V9y^7n2Meh{Mj~}JD&!kbuOjz4eE0nM zbGXTBTZ%~G>(_jIe7E4+VT5eo;yKLN9oIiRu63_^p}*8P0{c?@&Y`qFe7Ow<>j#1f z`1WV9Lhjq8fxkGHxFbk~`u7`eVn7oD45D=Q=@4-lX~u%%rXd#(7a6G>fu_KYmHFc{ zRWx~|o}6{+I?OJUs!k$Jz&h=6MwLme;YOAd17Nk-o34JY_(EPTHTA4AX>H?2-W|&( z^d-x7#7v-)i;&BGVQOLS@c&~u1QBo4v#|JIlRjr^TbOb6OuUFCH(xeswQS^#Gs4$C zhWnU2sYr6`nJPKH50G^tCuCdh#w%<(bv|{<-G@}l3c^usKDAK1n%}r0nQWMDs%IR{N)2f9{5?4me(Y*H5T*OA)CW07PLO81hDtWG!dCaf7#N#B`TW zy<+0Y;+Z59!-DKh{9_hsK9uecQbur8ANN&3=nNM~O?}v2SHK^7n>}*$mt(+gC7g%ddh*Tr!NNd=#mMD=&;81+H^ZX~$uDk#fdxtc zov@@8pfTBSIxPDkMKE4Ing3`b)u+s~k2vc?{5Sn#v$O3nu&Lo-MZX^P#lSTg0bYIM zM0N%n=ytX8x#X&vr1FTX@6;}kIc$&8H#9a5Sypa>9+NgRkfIEi^aX{)LUoW!B|X)S zl~H5tzc}kV-)M%en>xfe@-?&A-F?s znN-^EV=pgr98|TFdhyo2l&G=k7JE+OPJ<-!tCo+sQvOoJJ-*3%x018QvyKP%B%DxN zsU4NZ&sw_HoxfrRVD0c&Pn!&2GI$N52`RT=^HCemiF z=l4nxI@!G{`Uu>t0+`|IUCjr!OOwN5aHpOd$l92(2CJ1yY7vp>a`9jWVh_|c{*k4I z*2e$CAt#r>`7R^(vhPp6qjZbM=fRkCvz0^d=BLo|;k=|T%qFZQO1w*i*Y;_fkf+dF zM`URE$YJ~(TEiG&YM9vE6iR!0$b5v$o6q*hJL_IvO&0KVW!v1I5wUI3T}K`q$Z~X2 z%B7s}R#%J!&K2a&B&=o#l5&=}Xz zAr9wpzT5LisCC;E^DwSVr;xG=4(KwVfroZ=Z4^8phC&~g*8TfFR zuG)ReKm6E#E76QYO4rW^Vr^yuKwAde8$*f;ybeN5}IFdgW*{3 z9BWzKlU<@0KC_M+OP(5&jg>%Df7jC!jvDgRR_~WUU}AU$??$S2#r0sGmMRzuQXtTX zGs|e6Y25p|XNPlsTX%J$sZE4A2=Wtl4i0GeSoZnGB>>w> z)fsCUHJ-aph)0|?;>&6$aAcjIL_dz_1yLLBz)w);N*1?T=JNJ%nSb9<&``7NLeM9+ znfkbtFpHNiSR{=!(FH@YwzpqHHyR@>C-YPq^B^P`_~4jd<(#3hbf(S(FFiM;V7i+`PUMrkJS;Ppm4!(|Qr8F76bWwdh2CMeh!?NKcUk39ht)U@)I>#NmX# z#7xa~SJboR@84Y_`~4oTXKJH)s;MNG^89J0 zab_s39x4d*F0D!+E~+xx*tj1RUc2mcSq@Yyzd3&6UrpvL+n8b$ zn^Lsy(l>0s)xl5bkJy$0FFT}JpLa^EFtgvo=PhuMY&Bu4U21Am{{ck4LZO{0~f6Rz}4Xim4ar0%)$6@_v=Wn#{&*I#rjbC(O}!>sOBFH(y3uwnb@m z7j6#I2XQB-QLRE0znpx2&aZ0+Z?m_HUyZ%YqhiEo_dofOtB5Y zDos-w!<(=uK`>dNDrJH2PHRd{8(vFM(N4ed1!MZL`pTUPQ`UfdG)cYhH+!i;k>_6- z+A^1_dLo2Qb~}DToKUyz$lRG-qb3~az-M(sCN{`m_~ioybw)GYU%-?BfxLY%N4gTj z{y|ckyi8=_)=0FYzGbPO5}s9Gd~*R024j(JxoRJOg9`z`-J9Q_i1u7$?CmM&$IY#B z&jf=(prv>Ko|`cB_V#AdtBV0~O~k|fMf0rVfY9cdN$?>sG(*5b0cWvwF8BqXS^F72 zekV{3Bm!X)IzAKJZ^w=PNAcKF<1Tjd!IywC7ZC+ZRrrnx{6fL&%?z*8A*s`?k-+MC zk5G^_ydd1XY!;>fs+f;IGy|y*e`LeCdAPi~I%+}0J_NDYV@7ZULajHZe+e5J8hGs% z;eaxODUdD&y@vgU6M3)*aR-O86X)Xkeg6CV8`}U3qPG7;^j*65>Pj=JRX(~a@$vCy z%oS@1L9x`ZtXke?-Uwc9L%bS!+v))Kj2#K$HS z{MGW~6==detBhu9Q|nnqi&Iu3n7dprd*E+wa-CTDloaubv+amJhi;9zP0bRNUw3b} z0a>Jo6W$!HL0S@(<<~rN3W9`e{~8dnsORJ5k9mw6=>j$|HSLcnmMm>FEDd=~-Kz6# zszv9!Q|;*zp>9C@Gq3V>V$Ee<*#HUE1a{P;KL8BRbIV0QqD?H4TnMm{15wzd%FCai zL6b3*6dMDjq14dS+gz z4?0Ltd^gi1b}AT0{Q&`IZIwhN!9p3q^qbF7->MAY{FBxC@0!=A);^KT8_(Lh(x*~w z!vS!=ME!*!V|Uj=X7_Kq9d2cPPfP%Q?YPLxKdfCwPMPtR8I_m1DGj1{90I7jW-^T<4CLCZP8?wNkqIU(@;Fc-_8# zrqKilcBV_Muq7lU3a+fkRI=qz6HA{I)k!7vXu3o!vA(jN*`D9%NGcT&`*f( zERC@esbA$BdGHbMaos~!e#~X`1P^rn+B0{3wT)~gj|Grsu%&&WpynIM@V$<@?vurl za+F87?Q-SBAqDSXyceG6s1Tmnws@f#SKJx-=;o+mS(}5z^~FIkDFO_CNx8n_<#!OD z)af$T#PkFV%M^!&VXT2+oS_`0SHcgsCl2`rY7BTlG{d?!6dp~90F#U#H@UyDFGxPy zcylmKvFtEGCeuN^&O@e6rmb=x4GV3#^@jFn)EN8mqqY*ye7@poq!4F(X$Azr5DZ!N zyBE!XK{u+J5Z6FZ?Dc9StrPl9iWfWe&@TT?C*ZlWL;)E8>AluHE}n|qTb&9GmUN0l zeraNr&Q5K~CTI9^y+iz)ZF9J($b)!jrfjSVXiJDg{>I9gc?x z;?WLi=$DY`9;a2*bw8x)()Cb$oTKf0Hv^aWnHm5v_<>|iwvq61Y?CtrZBj=2S%FTq zJSf>{>m7ULe!&}4O%PP1#f|CFn4q=>S?JXj%|&6Xm}7jg;|tNC=>nj<>a;f<`}_CH zI+s0}Z29C8?Lulgx_TM|Mwzy{6j}dg>D;nCejCjJw^TKGzU&MP4(6@;aN6^l-af6z zZV$zL!sXiZfyq>LQqg5{42D)2>2sd%Yi!x+AjI$13#MQt&(h&@cva{ug%SZWqq1MA zH81balXnuklbI+5IXHMUA1k$)JsML+M^$tEz?mWea|LEVQxjB3C_qcs$cXA6!fTz6 z$7{C3B=H-S*RSSF#H*Y;C245kxaRqc78q46V5Dmpbv zsx>|&lvMm|jY+%Xhy*$MbXDb(*BMtQ;Pz61snysBWOqhp^Rh(bTr9l0U5kqWTaMXM z`*^4gnX7o4$2Tj%%vHmk(CM?@KInay-}GTi=?R(&pyomc0Qr?|BeAYyfXUh=FW&mY;oJobtRh>N|K;7oyt5 z15EU+xAohVZ)^^i?}gvlHw8yjs~xC2g3RnT#gpyx_X3RC;Jp!yFEPN4do~RVU*TFo zQcg-~sc}D-XW>HTWVT0haSL3V^DE3ZRHA z68sctKZZ-b0&4A_%*E zI=ruF3V8~$&qPR2i3P^@+<|WrzZJT0xIEq`ZOr27L`0cgzMsIh^JMZfcLSiruG%V8 zI~%e^T4p>?)2Y0ZNLH7E0`tg|(cgMt8XL-YqS~5S;KwW{3UZK*RB(hE^%eB>$-x0% z23xVZzHY=xOhd`&ms1JgQl{ncv$MLOqVd>0J4ssih&WM&&lgmvyQc?Uaq57gr5nJP z)7fjEPElL)N+jZt#iq$vcw8p>A|5f?~@wF?6+)d_Mvl`n1D1mW81#2r>9c;J6?xbbaFjwpZhf3<}+=s4|EC4BxY zZ!*g+`G(HEmMFVN$l7R4tQp~%qq7RKLfj0sWkwz;MI30*xX6^w>_I>SR=}!UyO5j$ z5wa5X>zU{%zDxZab37Wah@rCg;_CGz*VT3|LRG_wu8q*bgvm^`ZMIxu2tiBj=tOXB z?G+NzC4J!BKO{CR>uiTJwB5{p0)M_ABw61bi0r16h6_A2QHU8%c~-(eE%@tP1murr zTcpGJ!`D>Taxl9;do;0Xc@b~3xr>kf^kPnG(s5vAtSOmYcgJn|A^qbIFIH1GMYo&fDtP)!f@nN;L-w)iille@qDaQY8mV4)qO@%=%1C$K4?l zMX+4;BUz_frMb&iyVu5>z2=3OPJKp)3)&@WA1v;w>BFfM1BDjZ%ra_g_a4oOU&roi zjoxBlmM-Zj#ZXCiQ*%X3kCMsrO3u=}4^?%vdIL09GRz_i6Kb(~nkwi;FY=GG@)#V& z?_Ag-zs_|ntr3-++L(x|LKS?QUlpkEA_^1dVZ;s~yX(35>W4p^_y4|BTe5gCBb6~8 zVip;G@FlUZ7bB_um17{d=L7y8o83o{_P>6riA^knpsr`wRh9dt0iOuhLe39bA)c=> zn-Ud=@&IW|iTd05=TlLW1ybI5imlJ1+n{mf%d>2+mt^-^?}O2&<3ElE(yPVzsnHtg zy(ojZgHl33k{dIPbWMQl`R6^wgx6}c%6czKVSf499ad`5#iL(OKI^GY`Eyk+@|QB$ z=H;uoPQotyS5eZADswn4aL_K4*L4LiHW{ku9fw5j~!@hJX^a z(+6suyZKKG1_^y5?%C-lkq)iQCV1A)(ZzrsHy4^MniyPiR>Dq|w|!WAbA)-4?{bXy zmPc^X__&g4DNZ7)ObmZH@56EVzz@5JJ77HXFhHj`{N#J?3uo!JX_dd54^I}F-$auc zkW%pqX5FIEhZ21W%=!SIFG1w2Y-NJOqQns+9gs(Bl%HU(Oe^^*T8?|l(nBT1RnP`@ z5_f!?CHUaZF9p~XY{sZD)ElvddRdX59$BUWyz;cc9ye!t4w+_pvr!@HKoEQfl6tq%LhUbjT+kbWBiztdpV;f zPgQyF(Ln-P?}=?LxB%1q9vv)caP9?)Sy1=m80#F%=T)3II!dDIda z&l*(bucX}d9p&mg_$eM&cs{|x>rU+Po^i<0<+K_RuA(WokzF+nbhb)p5|yfRuD|JJ zrh9b4eJ_*CGZZSt!&M|Dkos%(Aaz(+{WvOT|L)PDILdvubRynW9eg!n1fQ(Ww=J2e z&iA~Q#v1w<9x22-DMDa=5*E+J;wKL)=wG}l>!D4w=BlJ0;7Q@0aQvCwk-xLPmGt`2 z%--`}I^+T$(siHe@?uDcxEa;QBu7-=wrlxDg;DkE8t#PZZ~JW2hHs60jY3L9&%ZQD zztAqcLVSQnjG?6Tt1*0Z3sX!cLfT^g+Ph*UB1b+j;*N6PY21TcrMOtY>f$n z9)>#85nF$fUUu*u#e4xSF4hO;a!&iegXtRm*}#)$tyb3ETP5WJpf`)$J|Q*u6oW;n zwFRgE$j;!=cyCM^n%4Nnx1oS4pWp1NCOC8Hyp6B4_-0=cT^!Jz+{8nSEg!aNI{cq4 zdr5;gRGy&AXh>#zEm$`7HUqx!Cu5zft7^Lk7_#kVm08=Co~t?T+}Cx9JZ;C}l8p!J zVUT%2SR{>MQYQAeeS6Bbu#@Ox!SyXcI=^SmJzy9W%w{Mlkp&X$w+nOxu1{1e2Qa9* zZP}{}w6{r)Ht>STi>DC*Gbr5m^Ecf(wILkLf%+AG?-uURLB*Cbb`GaakfTziT$?Co~ z{^z(<+NPM@z(u)N@cvQ7rNugMM)Ml$f$0_6$n`UV-N{Nvi8POqk z1XJ|IIolVg&^cBFsDKk0jB+%5-m6NS4zK}zL*V0KC`Ek+o2TWEqAF5QWDOjT3%l^_2mvD z(G&wq+?<_lYod@Rsy{K zq%qy|UagFLLsxC4-(&EHV-NQmnR_#{m=3D#MOvdzJf+HYTHC+bm z)Mu_g1ha!;k9z>1hI~g;73A~!M0wu<_F1Em1BS7^>o>*i~zYrBeIGTU$ZVV{-6bX zO@jujl!#{IJ3jPS8Q(dG>kgfj^Oe9qvnC)Z(kY<`+b<-=Q*$=I7e4hyPQWafU)IrA zNeAo+$d;QNP2G$}@Vw<5T3`uLDIOsOwT>32hhi1CE3TaIYQ?-oe>*S6k0(MIwS?U% zYoGFg96?m|u>!?A@mTE!L(gJmqoz;l$tog{c0sRe&uupi8;FPia5pG>(-_+PEC-?1 ze~^Y}1Fnoq*BdXhlj^XP*i?#wqnjx*=YGkXV@6yUsAcX^1VMEB=GZC5`4Gf@6VT6W zpNs0Q$Kaw>Ludde2mR#+MYZN7I_se2o&l%#snV#vKBUyqgr2?pbpML+a6#@BKa3{4WPq*I94Gd-#nBMco)B z^_JzWpYI)|(<}cxX$6JAm&|x&>~kBj6)zR@xpMTTiQYf?mkk}Wy>FuJv)D_;`Lg6_ z9}z9=w=+W^!gQG8U|Mya^IbbgX#AWKC0vs2I)0B-0N~ub!YRu&ywyjuYmB@udOpkh zNpFwQS)-cIZL4e2Y;>G(a3IDT8E+cD;9=jxw#cM)uXZa>y`ABByxGtap`B$n_}^co z7J7w%hv|7D-Jd{HvKRMGn&#**o(JHewbKNtjGsV^Yge%;9KH2)uiNVAP-#X;PoU-E^+wz)lb2C^}F zc?7YyaU&EKsvEf+k_z_PVI~$iF455?AbPjh{ll%&YJ*#UMEN4cbgS{C6jzH-@G~_a zvZ{xQY4a?17E2nlX=w_(e{9sB{D5lzanu}vTTsx?lNQrac)r4f;^KpMWSqfWRPaSZ zyUxMi-;@A1LwsG`zFpL_(R?xZ_IA+=WRzt9L5WSakC z`Gc+x0?6&0XgP~(1?our#L&-=4zfwiMT$5+XXzG$An-WQ*q+YD7I0Hbbp1Vn8vn|)G;xJwnT$XRD)U)QFOPGlPy-BmAF_>V zfeoWWo;ODQ%;HX=%JT3pOQfHPHId3GnNrHq{(W^~UI(TyL}T(LUzB7(wnOXwz@9JO z#P`xOun*lNuvA#$wV#s(YTb1;_t(5DHa^7E;HL=PcHA4tqc29Wmd9G%;G>WSy=NMD zouAKGja`es#3KEB>CW^`$;AU+g9=Opvq(ok_4wd&0JrXmgO5p#XqA`*ZW!OUtvhs` zWMXeEYI>a)vF%*2pK)FBuJdc)n9>Wo5AzkBUt~jvGb==#zMGz`KR>GT4E-v**Ij&b zb=Q+AZZPHtpoKI!?$E+lqSKtqodZ(JF=`#hJ?NQq1<%r9mYndOP`v$AQ|GPOD4kA& z0S<+^PCo%;M7!&-n>K|;k1AJBro3z!W>hBp4Fp{{&=<8k^;OF!lS_%&tzm6jQ#SeU zn7WS(sXuLX>v%gX==DH<@x95OY<|$*NYXvUYz~G7h^xgOA%LsEM@vdLh0@Zogr@%$ z1z)TdX+ulD`gSJ?C4ynmcQZAmIYI!M8vyUa*^9b!p?}Mh~)an4@d;~(_a4R2qJ z`GIjxOOcC)w>A|=bIp?io+!z5y8T}w0~p$4(I7;yaD(tpZEF+g9XhL~gAS^a54by3 zHJ7_HQzs7Smy5nrzdk@4?ze@29@5{>-vYnMDLI>Iv@p2jp6w#-0`wfI!Q>vfXUjLT zN`QCt_;(CNsTsgZ-6n2{70-r$7+jQJwL4c34SglKKcKA$*rroN2a`AroU(jg??6jU zawEUE)W0U?v;JK%#oJv1gWsXy&LqX*AtxW08a;(k*7ufRW;eq-xxz4dHT+5HpfrQX z)?a!Z#npEo)^WC;GWxwX0PlNbxPRZrs7f<92z4#(MrtJUWU87xNvVSR*_g4T1@`ap z+@aTudVOYeh5}BnE%1KCKC#H{x7Q$!lc@QF^4|-f;ehPbgrsh(A?Y`qD-tPSt8;ME z`PQUHhKaGfw4hf4Pw_luM>KR7$8jF7#3efTbl|>g$m&L^!Z8~p>T4Y)rG~qt>{!`D z2gxLxP9ScMu|$X9!4|h(v275PFSkmpCe`suJ5!3!sboBz^r z!ecvCO!|DxZk$S2Mw{8q(>IIB1t6;Un+7dfEuB@{>Xd3VT4ca1*FQ&NyD&#llLrq{ zCqsl&^qf0sIWn6RojbYLW+$`4*Y{AwiDyF)!>?t*7d&3v-_*rs=(Cf`JCmshxemBFJurLsb>8Yb6g?W9 zQELR3tB~+tvWzUUr2M1+c+z$0Izw4)50_E;YmJeIK2U<|wa&97XfL|x+|4d%U5_65 zXZ>L~FW^RUxj@ql1>Mho)-L|}Y$f%(!5>n*BCNkN?Rir8FUD!2zNTVm1myoMV<*vZ zVB794lZavs2!ccim(b7jm!hd>wwW@}Uav$31Kd?b_;#l@i+fM)E%!&3orO!pW;+qd zzqS5B5G9z!MxO}77fOS00MIKx{wlFbBmG<*b*Ts#%y=z#ulqHs>Qbws{kBKz0-%R? zvX;|k&EMnb`tk;(GVw~DJTVlm=V67bi!!*zzoVLO=eJ~FAPMoR;R_qufe#Q#jJ z1qHMWkTkgy2gu&QM=p=281D7vn@<-%X>TG6oges}q-?THC=`Iw_C^pd7eg(ck1zg3 zdw@!b{VQm_ixTJs!|2!QjzX1IJ50*Am!N+rcs-e)0fc-mh0os~jKx%2!F2SqWl-j# zrx;G2x;kqBO?!Lqe|vR{=0$SD1}Zic#LAA$C=AbM8DfT-XXTxWX|3-MZO0qW1+tRL zW`mxkFXbH}F|`EvoF5Lpq5#kdpK{j&wqx`VKtgD?QS|EXoe3tk@G*{nYuqd{Ykz28 zCcj!?!I#RPEG=u^p0WUMLuQbp3JGNyiSXwmB*aKBsoyl>m?tfb^O@5##i^H%cGR@VHq1rc(@anP&)3Mfi7ce{9F z-x31ftmY39B498z`%uP94bLrG03Mt1B#Ohvbz-Be2kcaRFyH-u=z8m@AiwC{la!D~ zxxGo$gz33L*c-Wmax(B;_16(!Q3{2> ziO%rjpmuOdibr#LldrP&b07~zV_Rp#%64c?^G0sEckXBow^9m8L0zFhfoSe*YEEp? z53+qQF|(goM8x-bZfIqRMhEm5{^y?XWx__hUoH&M$J5>^>wJo3cVf*1iTH@UFz{jd zo~b!9kC3181&RXfi{XDLdim<|#?AWf&t=!GTKqPI1HXFg#i;ZCl74TM#Pq}D5S@MH zl>Lu;|2(=s-!-A7^U+(dS6>YDhpttcz*6DOTNR@iPTLJu|Jb=z?qlt6zbTGlaxyog z+w$R!7~IK1s{Pz)m(kW6QW0poHUI_b0|KN3pu+{-yPfkAi%5AW(l5 z($#MEzf#w=zh#W|HFgTP6+gbKyZc^kaA%*>nddsw7pk9^8LNz<4c zU$YC~J4dptv3`FQ!tmnM^sF!%P9UcXjZxMj^|JBK`l>YA>y`p`?)#@>Ob8QUVNI<+ zN3%TkaOYyf?@aA^m3ilq!L#>pv}{Dawmh>#Tj zokXjW-q5@w89Dj+=w`!m{j z@NkY(vM2uHvgzT<@?AyP6O5LWEbJ?&nmR+o62;C7!xnyqO+w@^V#5|vqnf`UQ`D9INXri}3mWy=Pjl?5*zN0RRHRV5A8O=TKaC1A zOWD>IRjxvB$x4+-({=HEhMIKS?;)5KH=3dC;hc&7^x#_Fq^WQQQd9Z4`>Fb@4^0s* zS}JXEveTFgxp*614I%W-j><9xSZ*!cK5Y{q5mfDc7^~o_%qSF>gdbyk=>U9*b1g7- zdgKw~+T8Aq>~J{NdP0~tVP&>%D{(j+4+hHZSe|V&@SY3|4OxKein!;e==%^a_3qdv zw{ZaUtl6PX9Ku$h-pb=qFGIep|7h1PoBiYL{ae;{Jb5gvOy=Y=*xDLgPRG4BL!bE( z6ceRP%9+ao+1rb04zbLQp9eK99$P{o98xF!j>5G9hA4t(=R1umvXzwWe7?JhUAcFV z-#4#Cd1^BiEQPhXBFnS=*A%Gk*X00>a4nPnvn?=ujjK|Q;L()f&0#3=VDmTwQv+|< ztDFsrG)$geuMQQx09HUTkUHjUeBcF#HOn%B=61~2&p6ro1Xr3sTj;T?JdXNn{C?}H z65q)dyMS9c?i~x&LNsR+OQN6swOopI)=swa#ARAq|DUR_x^9r({0%7+Jkft`>~V@+ z>QU7?=Jv}KK1m;Vf2ryFufo2&v2plt7cI+g8{&Gl87&J;wlvM9!~MfXZ6%VpHcM4j zedD(wUhg?Md-bRCb7#Sxn{6^C;Rwf*h#c#EVZ8D?Ft+{h;`rQ)t|9nr^tMZkUKerY_+N=vmU0emeNEO%;l-K-C{9*R`1GDoI54m zqntZNjz746smc!(9JD7!mYn~c}gF>4rBV^W;C z9efh_!}LN<4Wuy`8tO_(@K+AH>W`3@KJ>QFOvFN^C374xE%quSA?kKkl&RZ{dX|3s zbR12nR*NbbiRWVktv0sbw|vv1&{EZrQ~lSk>fqUw!wdaMW6gbB$66@2pt{FCG)V=ktsql13D4!-6M8uGj%ge174sT|kGjyp#m)Tb3XH`v%Z2YEfzGt&4 zL8s~-*lOSdScI!oE9K<>1S9uH;V8sL_1C4(zfJ z5;m=po(x2)XZly>O4wX2^xKm|8xqg!l7=@~W4Npu_tS6HkWG8;IG%L&y`R6x)U7gy z|Eh4lLQ&!?Pq@uCDm$zQ1?Hd$J8_<3cWXuthUDHUhO6v3AEn(#9~e7YNO~elMC}Lq8|qHazjSI${G4pcRpr=nHoAWF6-d((T~zDI zvi``*I_{g{t3E)l@EXdRfdR;{7S{2uX=OA{rW>?IxBjK;U*pH*5$@u4@%M@QDLgVZ}L*FEk2H z9^daI{_k*jPTf%T6YSos+HRVr>T}k>0dAGW!Lru^N7gM~(MT=dyzjQ(wUheu%UZW3BdyiL4e}73S=Xm`MwSRh7 zj``^v6;GBQ(#LtF8K&tDf3G!P*+DDsvBk7Mo$SFeYFeKMz;S6}*QjZv=rZ1N%^cc# zMGb7Q+`b{>!3L%M;cUh#2%p*gq={N}GIYMIlY(rl;;Gn5(zKW%gkMMEqykKE5TWsL z-J0*eAFiZqb5wuf-|_|V#EVG`hw$yuwf<%oPjmbziZ1IB{npwMHB0uj<-0->*Ie8ZqP2H53XOr=*#)E{cj*kF(%axzB@FIHUxw$ zmV}6q!ZApr%%AR(ncfE}&mON#*lQ~0l&C9i*WR7pSu<|o)oqmAkKHO94}H%|&38wN zans}O%x!O)Q|z8P=6Te7lA%6>so^q z4vq3-b1g%hX@yhnvdSQHP4U+4PUG_dX%)Nu!yTS z{qG+x+Yw((bKei`y&km>@vs##)k@X+?#<}{?JKIq_PynFi7@8ZxEMZ?oeJDr#G&@U+fto1TbP{6GS;cQ-qhx3~M1fp!9AcGpJx$J#159aJ- zBw>qOAq7n^foz%aHp=k9XQjY`UsI({mH6Xf2?Yf;y_%l3nIn+MiogUN<#U-qfl(Tx z@%DImV|Q!o@bGY&I)jycwZ3`y$1Hg3w5rt@l6~z;#Y~^6wPqd^s|AIlXK}-ER$`mh zJ_|(jg8~tZ;6~UQQaT@sqiL178~1D>8url>na<708t@wZvS>B&zhH%b4c(NjDK!7b zrF_~@ZgY6SF55LGG*?kj8@>Xp6noC??)DA#rSs&xG1WIY@{934j zLCFm^my$D7x!<|-xO)WuOj^LxJdaw8#w|7Y01IO`L;5cy#b6-bJ`Hej;0VfU>K=Z+a|dB9i7!{Qy1Va`*YKx?dP7SV!c+8w%_&Ks~ziDOJr#2{e}A~v77`% z%M!nic>I_uEYMFfwn;^0{(?G|m)vshh{wSAkA88|B92xCRn7t~+Pil!jb}^65+Eh#)$~`$aqO57d zGz^LS<$u1wZy+%FY-n8zzLim5r#1u_+4nR|X<4d0^l~o5s ze}A8F1_+7d4b{`ucMw_bY-`;NVzEOwRUZv|g7VT0T3!}40sY)HTS@tA^L&@oU8-O; zbzz>GsB~q5t{JS>A6QIF+6EO%kY4mMGFkQgaS3w zH0|N>LlE}CdvJk^p|zT}6IY)P-F@O;1H^E$hHztdj2HO^pPMALwU$LbOSWJs&E z6CG_cBl>4k+2b6J+0%u9l0Ad@9t(^ z>U-(2k9J)CQA<$>xgB}7qRrFh_ZR_;nP1lERhE#Eos7Jc8rXpJx z>na^H-0Y+ZY+a}3v4QS&kgDY8V@OaW97Zbaf=a~GVUL$~TATLDh*8>)^HFRd(~*>X z_}7*cNu(%adh-U`_9joiE$aWr+iX~rOCJE%F(b|!Odw6IkX;Ur6^oe=fi68Jv*3r1 zw8N<*DCp}y-3l^g`$e>zsYt6w70Ks;OsP%$5%4>*#`jN1=nwfib8u{3S>pvpLkI*& zx}+<5Sag^CkkVaQHoQ_>^Xh8G%v5;7Uu#YoJ~4gMxRugAdbsxzeR6pX8cMbfeL~nj z6FZ`UTWr7|2VuMVa}jLEfUE<>{xD=+t?Wk^Qb2$XLqUWi79QRoLOU`YO^oU=|6;$~ zCz^lRIx(308X4(qe2eKGw&D35OwPE+8|flvTboSp0rlRowK)3zQB^zTVwl#n{hQ?n z;(Z~V5hm9AQ#iF^4WZGYjW;7`>*Eu+tp3iSWTkTfnkog=CX=Ko`^yA_MBacYmwT_n zrh0QfHT9B=f@+|g490RJ0>oY1Mctx`pQA`b}At&e4j>A)JuI}&_*itGur~%YITuhOZ;R4&cY)ukw z>k`bzPoWR|_hmS9z{pN!cD3N5?PcYu4~9sEO=sksr^d6K<*L`MRiMMLzuvy4jI%Kd z-5ACqrGJ5A>TW5wstM-!j>Fcy86O{y5nkOv{q}7*s2HoXxXI|hb-!fiX3RX+q?a15 z@6G}J)>b}cfcDFZa@vYD0;yU0?D7!q)sjk9iA6h!EQ9{PYYfnIM6KXDCYW0* zRY|tL34wJ;E6^S=tPm3VR^c{X1qZIO|L$3N$-@USZC?#2%Ef-#7k&w1gwlhSn0;#r zj~_KXc5;T)%kWi{J~bpxdId9Z<~}Un78sBayNNP)$9{wxgFz zL}!adKMwHL>;!`PD^o`cY57bpOc+7%%Dk0}_wH^kt$!X*qp(LUs6JN`EF>>vF2I7k)Ru(aYWl`_2myWU8jp?-h89!vgWtg7N7l>b^2OX%9; z`}B?KF$*ry&2-|6AFxW1+b&1izH!2%2GTvlc^PW<(Bn0X^(M{;bY58Unh;q|LWzZC z^f|ALJMFiKcwdkP#?dXMML!(a8;m52**3bj7%b(XfvI-F9PBXmpAosV>9TdsjcT|O zS4K8xTZLYr=P~hG6xLJJ5K?s%VYK`{+Ud>y0?)jW$Boj1&(iNtD8v>bzRv$NvC#fo zM#Kh{f#^Wxh5m7pt4aIs$P~kc{t7COs?!P-?I)b#o=UWwm(W~yW#njyC#ZvVa~I2} zWU^RWW|v2+YYyfe(eUZ~Tbp~oHj9G&9fJjTHoTk_2HB*khFQ>`YiLEklH}~PwgiCz zF}zS;iS}Ek5VtoCWR`i;4#@4tyox$b^eWXg4v%G9OLeXEw=64=HI-i6y$4` zx3L+lY`8UZ#q-f0K+&cD z@O#Qr7(S6TGJ{DUSSipzVHNP%7@K4L!DK+tQhynIbbr20?+s=L((l_yD&IA-`@|Fl z)8aqB2PF(3QqTMH@{ujDfjLwy9;UE{1DEhIk{6HDCc~~*r~tNL<6~DB}8;6&7qp66nJYY zErPXMyz)Ffm(rXKPQBV2L@}nxG2EN6&VN`~h!H9lch>+{Qp``saQ!lFOKq_hRZ@}1 zF((H~t0ca(_Mac)mRPi-O4oh!Pij(S!R&x0HV0xF9chZ*K^XH}iHX8OI~i&ta5`od3XsSw~z=?-76;D@-f#b0nXBu(tPCF510PoaS$!ia3qvqA!s(Q9n?mIZ&GiKVen#4o(^@L$sX9m8~b3C&}KR8O$aSF%PvF ziRviSUoGzCS2Mm~-ow4U@jd}Ks7o{E{8~YJ^I~)w0Rq;4$3<5zj`a_Jo zM1PQGI1iD90L@YkHohoaSK#miC)eW`x|LLIN~yFK#BNW}`nkM0(g^n*c|p6QV*W6& zA9#lHd}1^^vV>tr0PWOxTaPVt(2f{Reh~fBVL=~a^G8zs6$l`k1@#Y__kc!06GVvg z>g3dxe-c+CUq|{Y>_7d%mw1q4iga!rfk}#HK-LU_!7i>VtXb{Fu=p6cu@4achtOb- z6p_6h^WW#>BM*zYm~d*Tpqb<{t8@A{(#9kYN>ySLOE~V|LL0Au6WQK>-2)TunuK%% z|201e8>}Fh8?_YiRspiSH4Y6qQn{4ly?1>qNcT@?UenB8&56GJpCxZqflbHNLP)2~ zXV^0mHfMEJXDh<51{L5V^fu;|r`5_iv&@(4fCu`FiA)B(pi8UJzgR(VabASXrAj&GLF}$|L{V`O?gSF&S!mF^=vCK@&sGF z9%F=2oOm|31DKk+roE&p&6fp6%^7!Cxe3)U|V*{*I)VT(Z3L$(yJu92KlVU#sPfUu}BXU%lSJI|&x_!v~#jen-ye!6f%ZS|l z9aIJZf$wp-lhrwOGytJM+dEePA0TamHDtxTS6fNVQ|EU0^b55w`RSa)KFDhXsDmP0 z$uw^r!a=qc6`vrS%QGxP4Fr^bP&k}*TZ4pq`4ju{gK+ej`ZU|tAO3yq1RrDsSnK0* ziK(0^-!5Co-%bVpOME1v8QI7d`Q7U1QQNcG>f5#P&|){fBYY1~^LB=U$WFCVev9Ve%L)d(GL&jgbiMA0fJnAu?3*J0mf@* zdb7cLNe?2F#I>_7Kq_e$djbY-!f6LZ?YSuqTQdG7Vq4jIMpO99q#^lim1rPZ@dA|8 zR^K`(aiGx6_GmK{Z}#lB=sxYvS$J5}Q7vGcBA)%;uw2@G+=J+*OKWXvAT|afpi2Dd zuxv@x@{?_no6G6+mr_?;*4cA0wEeXX{6*GQs3n-);cxR&hbe{(*!1(cps z?|=+bi3jgO2rd!JvG`3!YXze!aXwVI%OeQEr=x&Ne1W+g8uGYtFh2ddLv zt!b733xq07Ng_2dqO+tvZTGg?+J1c|)qx*-XS*rAH7Rw-Q+V|cQ&`!`G(H&WF@~Zy zhn=9_W>{PB%VAe(981T9p*H82AD9@@1z@RUYp^!t33f?e(H|Q|FJkEYB!M zH|EUH{DkdWs25p4eG0?wSjl9vNab6gz1ssAaLxa&fQxqAj9^;{jv z4A8I4Wq;U9Dafu(TUOp3@S`kpMxlAv{*KmvIQrdyd%g4)#rgQ#b4l26^};E#J8Tb(1TdZ~lq2nI10aN;c9UO#F)pUQDB&&RGR#a^3E-R_z4 z)MI9Q*QF*Kh3DR07IEGT5MzfvM}m&l`fijT1fjYUypK!`5NZH`dlm6=nwISK8u*77 z!31E^Qopz$*-xT%7nv@Vx*x4M(UnC7bu;w~M2?r6VucMq-}lTP{va+LKNfl6lV;J2 z&!zdx>T7>oV0+ct$PEXkFG{;xD?H&hE{AuWh}YVC4Nggq;a3uB+s;hS7+M`jh?ctM{VR}8m0 z{4FA7&E+T=v#n><nzfm#?#YZa=jaOj2FLhX&5B*Wj& z2;t3J7xRnm^upr+zt|;<<4cnsg5^G^)rg|X4t%kvIT{B;YH#d_mL%X2d$-gl-DVqC zIL-@z*2tR{aqq1O@Q5E@O3wi6Xt09XFw3W(%uFltk}CK7IiQxC1F+k$X~_CB>qAwK z?tZwQLEKPLl&yIAF*8O%arit16Aic4dZmSbpyluL`xjRtS;ZgB(acuHWn4QYH*fa;>_E)$8C3D=DefsbrUyLH2HDbV4&{HWJt4=>>CCb*DY3 z`;1>65%-_Co}-**0)?>cc~tO)SB}7PX4vDnm4k$9IgmcfkBF809_kJI=UGc_5(bd= zk6^RyTTmZR^W=}iSeU{VwC~tr$EB3nRgR*xic!)MQk_I7OqsRylzsi9{u(OYtKwhM zmgPjvnJQ-W5haq3@KKvC`K*kHDeH&LbHuXKxX7}hI}^h|U=;OiE~<+_`2G08MS`z58}(`$=j-mE|Qn{db^&XZ{7|e^8Y) zemp8S1nOZ6+P0dd&jV8h2#KtHbWT)%8mD$if?M5_?z60Ordv+oa;FKiLDZ>z_@>ow z6Kc*;%_7`F+;M1r^wOpKr?^*LnJLLteU7uZb>UNgz7Gcrr0(S|MF)gB73q)PZc*vq z@7rMKO@Ry?Gt{YiCx%?T!Q+$?VxO6hOb61=uGQ=lU5L#;-t8pS>Aox^8gT7EY)s^S zaBFAU{tN~~*9Q}y6ZX9OBz?z;VTkbDNo9oSa}e;Fi~mM6irCxN*PH!p)#l_}X_(u2 zS+il?z6)zND{HaauRAnnO56M8+NAc_oV2QLWgBw8y zQY8cJ#A7mC>_b<|nGeLMmthnSu143kl=|o@bXI&in$PtAgoj^v$gP8WfQqM;LTJ{$ zC=aS?_xNm4e&RZw{pMj3AL2Pd5QGp4UB60Fx$>K{*6 z#_&pNh(a3wS|N7t>|>i)+f)k5iT(W|_IF7OYIQ}Hz4SKQk#&D1s@WPzArP3QhxeY> z(ApV7^oA-1IGZNMd_Td<4i8TUb9bm|RNED)<$jYAyY!_bb;G*m7(4q)BNpPzqhI+Z zZbnhCAfQoW{->qH3{SG{93K(9>5@2?6*aB2%$hXANnB|-K6Ws$D}W#wR9BEj220IB{!<* zRa5??3|K3b9WKUc_pqzQ-aCMLPsnAL0Jw=XLFUL{q@|L(lkCh_fs5y)EUBK@i$m|_ zpqe5GeAlB>$8LijG3X~XEbNc~ObTSDjG~fk>gmuv7}GytGiRgYf|Dq*L?l^WQIE@@ zqxI5hoN4_3hPsE^4Ij(Wk}`0`zi8H+2EPO7CK&7s1$g?{&h0;q*8&mMtcyYZhCm== zw^LuAY+H%-MrWwcR(+J(ewuc;@tzn0_&G2=2Iydr?8%_RDTMY-dTD&OodwrrS95}_ zm0FuPxWVKNfIVhw1AF~}pUIpo9yWO) z{cC+wWVP_uHnE5td0?mmVDjm@ohbQD$=_+_`bEgu$>7+59svkWGJ9HL`kwz29dj%s z-{t3SOxrlpFc_a<5h8T02>75#uT2D;bkoLDLcK8E_p5_`rzYw2vqjeDT4~+`a4E&< zSPv(=4O&pc_3EH|n1MMJxDuP3EjhnSip_c3*$jjRXs%42`r!&Uwk3y}+7sE$tTj~6adI$fqy z#@O5u4oZ-l7mIJQ0>9Jplm2?Mf6|xlMScK%9jb&<*Vo(owtp-7biUiDBJ*l=qH1!O zp+={I`>xfgp#{gR0U7BC;glD!fZyMkD&|T4&~yhQU-VIzWiyH`217r?auc~5S?qv_ z5>d&O=0gGO1O-jc%Ak4q|7tffkdp_Dj{aDn(cIe?iT;D@w|1bFRIIV{i$vV;S+-%9 z`43$E4ohh|W9H!@YHQ^uG-4=Ay|<${SC8KA&cq~+j`Us+Eapb#QcmSj{^SUf;=q9z z(M9wLO>XhhtYFH$YJRJ_`f+J_xw^WhsAx9dk?QKlXV_2Zix5H)ou&x&u<%bG0L$F>$wI#(~`=jUmjV%J|dAe~#c*-S{AU^YBG z<&Vmtup)OEd=rP}EhC(y?!`yJoX}Zh9L>|U__4pkBzMCs>#@dZnz=Rq{zEjh3A4)0 zK}q_U$M0bE6%DC7B0Qwj#P$S3Re$6V!5l?f_8lZj4xH#`A!4y*J}G%q=r-F|7m?o} zVvf0n;-AaHZm5LoPIDIcR%A5DaLgaNTFExNw-6Y2dp%6v>2|!!J zb6d5M>Wpb~_#bsHG)jgj7pT^@S)vnIAAr{Ni<;VJdHHB{5+8qm7^~*G{e{JIY9XB; z;+Yhe+}N@b`hO=`A(j^+M|$oORi=|Y4q|DE;)wPS;w5feO@Y9y8k}Vzwm(eB+>18 z5>-ahXmG&5Zyg_J3)>motf4g9G?LkJyw*z7P-i{WqMH~M{?zrR61DELe9=41`T`RnMW zyMh3M;x(@NTY4z1=Qr;@>YPg5lz96tLAH3|Qt!W|g}=ag)@zpRk9S>vJ#faNqQ&iJ z?@CDUbb&BE&2Hsph|Z*{S&+wvC%N}$QTg+2+E?o9cwJM*?AJ4x98{6x)-o9U@1oST z8la_DA^J^F8UOcJtYA&}GpqjjG|)rF($UfekB9VICQFn=B1hC{jCIXi9fN(VQTbQb zjFkRI!%3_uv_#^=df$3ahRogz69w)W{a8^6gF7?@$er0z&xjN7ni+&f*T9sZ%!~}5 z?ru?l8T_7HdqBGT@l&opV^H*Qj?6_}?AgJVa^d)1)L1)NO zyKJt?>sUfd&(x4m@Vu$w6h~1&o2P4==Yv$AT<=`-zpz43gHTiFs$>p_W@zwTEFf-5 zl6dAHY6`C{yh|c!_wK7j)ndN#i*Nb)2sG)OntStPh;M0F{9G%H>n`gQ8eyk3AZ@c4 zNb>8iRiAtD3x_Oe?`HTo2@W&9jQ)&@NJ~fOj!g@w5KV|cRJ=r+c;Y(g96>pq3bdqs zyn*}}-Le$a@!GJYL=5ZbX_-7EK7)V_vE$(uM%p=CvS#O_E*`HP%fl@+ry{JkD^&9= z>TS-+#k)NA_6WwBTx6B7k1Q}WrYKhD+_HCi3)A1-;vjtj57hV{PsKP3QMRAfu-AvP zr)dO-#;DJhxd#6fQNl~3Y-}rIqoYyZOI%QD4`YHSyg{WcGFJbv;QHeM^(=4r4~)+4 zpe~t=D9K_;urO@Gt zD)CW=sup&UgK(B6>8j2)B%~|(WLgtZy5d>$)%t-8(!65U#k_wd<1v0claE*ue_k~; z<8RTRU;{;Rbd-{DI2oN03s*r{=`}-fENmjG%;ui)V#W3P*`B!<{4@Cmzf0dNs@4oux$m>(lWNLg4t)^?jUkWh7iAS_!SxG+ zZa&U${!|iHCFhpNm7J#^F-vz8p{sTC_AxG)f+IVFU{BYL;rJRH%2kLCKI9p#pTGy= zIw(vgt2mFw4jrX=z|MMSRf~CoSTOvAsmS?lnw>xvnAiAS%?jdQ|cWexgiELQh_V_)0LGyOdi8k|* zU;NLCyz)gTg_i?w?MuH2{Iso~1WFA(}OFnrs;TV;*fXP|QI2{nJ=HjqoR8byAo4FDFDg3j8h5&5m8rlpv{61=9ejRBnr~?doG^NSakYSu#00Hq z^5Jsmd6~_I1?P;Y$|6J4i*a%ZfDqs?()-iwI@f~t8Oi_#N^a5j5!p*jK%e#z^ zLLH7a)c0?${EGYul{1@2krbm)2GVJ4r)TAI+;4sIAdLM|yBj7wPB(i@{3-{%?U@24 zOYLiFIX$>!U-pTB&ENJ*Bs`pf@5$qMT;6Zm%3jSU4Wy^=N8V7Ik-R1~rnmQIWvRpetT%dnjZ1{`5d$m@W(G`p# zj=ZOTYtDf}Svj8y<4mS!$sXqE5<}Fo1l78mvypIGQFkPZ5wECyE_}m)cikr0?R9b% zRHh9k*g!~P)1B~&^!=H52_6>gGeKd$GJ{he*{zpEon(k10L0Bu5(k|aTeNAc7mmcU zJ+D#$g?JcD2dyKz?q2ABRqVq#79T53(6zimX|cxe;Y8R>%MHPN z3lAcT*!}0Oar27nVoG}WguzDvFWw=Fn=$i~{)u9&`oE~che5?&lm&0QJR1;NjPIT# zXpbjL9X0s7r(gYkvcb-d`?+CZiX8A9-%&PI2O|p0v>xXm`mFt77~?f5S0YFnOPjSr z>@?oykGZ8#z!hENWItPC^s^Fy{asW&i6>k7*p}!7eq7B)wma!jnC>_$(U7Uq)6dp`U`mR*$EY`lz=aBIoh5ua~JA^9a zg0#9{Vq$#|WF#YrX0}ahi zaZX4AC;#hu&9JM{M`srMZm=$q7n zUEHKq+|}n=vCt^*%FF*`k9=!6?2gdUIY`gV)Wi?|2)SUVqnBhJYvSDR10navWT_$i z&Uu;CWm}51tSq%>0(0%CE@Qw}?o#nhro4qeh3j)Elnxkza1b)^N4qYng5eQsKk>6~ zgQcKu!JoF}QSsizx)0X1L`y$aclF=RX~XNbWrT;m+uyO}b%BQ2U(2;_Z#vgVHla)0 zy2I4)gnMnAP4vkps=}L3$oA3nCliI$%>q=EaX3V4u}y@VCwGUnc=ommTNl9@6;l4& z8yn>6dun39!fN?38iP z#rwMT&IEe1O_rGOWu`F+34Hm-A3qaVzfwza$)wR?_yFmCn7~P#&XEF(iRr#by{G<% zF$qG2lPNODINU%~?G1P64_TDBvf_C4oH3D4P5WF+IX!srQ|8>-vKXbBgO^5Ojn23b zp0#OZ&c==uI#`JCQy)BJnxRZ$5v5QVP2S|aDaK(UF4@>r?tz3SL6_)nj7*cojQ{fg z3KBE>4pM2HX{PcNm;?kJ@yXSt#w(D4NWm1mw3R(MhTw>ShC^l1zf;J{)=2*9AArwk zM@s_Km?nAOe6#e7w0SDdUBC)+6$?ivI&>Rqc6nV`2emGu&99(Iu^=B)LbzTEGfO9-bhhiY?kv4keW4<9SHgQX;ZLLg>~ zD*YSndRO7lO(E<8KX|iZQ0#s8DsY84c3+ERpfEAAQ?#}H*%wcrlpTtCUq_1B|5Y32 z>w2XVCo*lg*X~X1+#<*9Pnc}2$dO6ZU_Kxm{`jZzNWZ+ZFFL@1* z<`hc*=crShczYBW|Gn?Y34+^xn8Cx}yHvi)g1cu}(QuB3GgXfOO{+^*ScR*iih5CH znq_8HJ)UMQan3wnF_DSwdH@AjIZ57*`oz|ro+*W!HWaAg4h@diW8aulP~c_^JB98= zHFoM<@sn}8kB^Cu=V<4-3a^ImKZoX@d!ol-Ma}-DJ#7bUfnR~Z8T$inWv%_g!N{U;ztPrfh!B*i! zLqjGQm?pEO-`e-s&mFz=lm3?F^@cWA7Ib(I-L#8etq128= z;ny&!`qvOzt~%n%)R*I|Beg^VU1#?UsIS)GsYFWM}WT|eJeErHhjM^gQuvz z+5P@rEFq6pXI#3M%vlQhVznL>WF|oU0&v<|pjLD{UCC10k~V*PCuMHty@or<2B^5dLT!n=Fo^5?Sj^3+^-v(X#N z6c-^j9Udtnye^WXL?5A**hkOJDov{`n6FvBs;4)sVajZ$yZa413-?j6Z@SN&Ln%q$ zjUHY;`oN*1mdGi!@is7N=@yGjNrx2wjWJK7Wq|`jD zrWvm)66+4vc|@Vllb8;xb*I=i)kBvLHle=q>R}b9iI)&$BgDg(-gQG24Yfc2&zoe} z<95+PUJ1uleM8Zlh0$R>vQ#Q_qHZ6bGnj*R$YwoUmD=NO&VyrJ)wJ8gIX1&N8b9w9 z8alN3Xep3m{zHZF{*{A3a1Ta)=Gya<#1e_O*iF+l+IcofW~~o0&7iAm%5rJC=5eJC zN^>Yz$mLHLg@H0>4s;e**0Jbrq<*!+$G(STJKAS;p_8k!H-t2LJLl>M!5oY`*BmSc z8p?^^j~a)fqe4F&c-q2A8zTRr!}a@G@XlMhxbw=Zec|TyGsOnOt9HWDM9WFSg6_XI zdMv7chDy4}4Kj;c6<4aaO^&rOl7AQ2;J-oQVS#b~9{q#t{l>)~Tqt42Ux8(honA)6 zY0Kt=GwM>a>I-!)A(#x6uMS{~d=5(+Y6Fdzmu01+yUyiuquGWYn!=H*6+Xj;k4PSR z@S3W?c-LEFAfWFkv*=~)!q?}@rVduf^0jrKE(+_8*_PXD2KaeSd49_OTmTi|OOcFO z&>_)SY^*nRrTE&?dP@H>A8(&BR7I<}WA5POAg~W*%V-91{)on$s5;H_pXR9X4+p;M z)+JUd0j)zN7B&&3qrvgzmLoTjaG*#-k;exv$0Uwrw`8|$^m%2bln-2Ut`Dnggq8Tn zwufvT#UeN~C`?{2Se45o3+M@>n$$KWK~%wfw~Zn|K}wILnw@W?!`=^dQd zI0z(zZ|h%ygkRsPF?Kcf+blbq_}P>nUoO`9Gfmf3oq1SNIzuk014*2K!p#M4GUnDJ zy2tJP$n5QdlITie-+I+`I?L@|i0(|T-OOE3!UM?!*X@+;+014Xt9b&MHh3fr471dv z+0)ZbX(^puHY$zF-6<(oBMOd|`tJe9^!lgMaAo|aZ!dPJ7VpBz7fU@Lv(s@=v#ZE@ zC>*R{_G~!0s=4Dm2b@c91=w}ytcYxE*I}o84vX7O-rXzkMT?2(jhd@CXB1W#nFlp8 zzqjw{K5>=FQNRCLY4YT5i*4vwbinXOf3H_utJWK1Z3z+H*_+Qh4^mx4K}u7TZ$^uS z2u#e@$`vk{0)x0_vT<#+*;bPJ2|hgBe1ISg&V0!OAaZ=gi=a|_+HPt6(UG_l0har<*R*zYEc5O|~fGVBj|5WK--OcVha(9Ok z4XveUfCRm}nD3r5C3RqyDLaAPkh%8iX|uP?!|7;QvY@Z?B*q{xmrPz&u}f^9z0`b! z|3`Sb&TthA0*amG8wELqpH)NsK;S=DWlRzpDov~9LO|NKtnBMjaQI1}D|iPJ?(XYM zQCWUu_ewfh1;uxX736oh!>zxrqKehR&!BKV7R`)~j&`O!2oqXz4$n1GXC;5MdC0{7 zDeNrl+YMWcG{r(`svKE>62`zwcHoH6bN!UNK^Obu;mKN&lDMyK>s*5CKqz zQP`JkuVN%1wMf#D%{B+vl1&TEbnd)|joS6)-6s3aHwSnjzo$kB&bKr9lLxVE%_XJ< zbwO_#W^^}(iO9ij~CAsm>j<(zGs;&k;vn_ein@@Py1_$ zB`nc-giQZT`xe90H z`*XuR;RPRJG8b?5h-OpJMkRk52`q~)s9&ZH}+NktDyd{=8r1hZMZUDzPX zHa6-UyS|PTuQUH2H6P|HzSI#zgQim_Q@qDFlxebry90&+V*JB7w~44sCd#_Cw?-IG z55C6nA?+5dwwtLPZ@iTRjiHe(B!IlW%LsX5OnpK?;QRG`GM82n8@7 zwks`b{XKwFvq>q-N+b!ibbn7*gPo!JMrc+2y$$<(+mQ(R^7-|YhIQ7He~&R)?!=T= zBZ|2sL7g=T87IBJt9Na{F4Brp(;c(yOWs1VXz&m)_-*;pImJan5={ypdu-$P%!k{{ zc@4P_39P*3`_o>H?{B%?rH~i^-<=5|^ML2e04B|q9{l84>)@I}dj54i*NmXD_Hi@D z<;S@O%l4e{JjS`q@kN&Sqe{*;yRb7zNW>IJrxO=7ad%6V*x%-Evqd{D$p5-lYtbQC za8R6TYNd~-l(>}zoM^m$3F&-0b6Z(s0e=&3Hd&9HTs0I+-VJ)^WtiI0Pls#tiTCdtdwPttEbxP(?sJH)zaG) zQrguq45In&HHy=2gKck|Z(?!v`ucOrU0ngYh1;_ET51hmgx)Le= zzUDaLX@L*na;$d=k&0R+Y`Ue4uL&m`EOah>Dg_*J!0gul)gZC>+!Fkjc*XS2{_e*{qp2O}1_?DxH;mSD*}_u#p4 zl8NTaE~P!Pu&EdcwI|L|$@TfylwAd9ZS%Wc;*N zG~iFt4(+`c$39f#)0XWoiy%97@Qht-W828I>j#+%X1lu2{29l?_nCCtbKfoRhkeXs zk8p-@c5#*LJ7Ka?1{9Yr?(md6A?PiS|LN_+9{)uKPv0o3b!+)*x{nOrxXFkO(947K z&ka_B(#jqWYLIm6If$!Rua2+1Pw5cxyF8HU{j>5WzWC!4UNF?ric99%pQ8txE?#o* zP5CF_{grMiHMxz{Cy0>s>l{-@>}!XsN-|2dnUSTd`4~rO)$7gr!JaHtCHJ}jH-%|t zn?|>%Fo++QU5yQekTYlUwW+h&+!Y;%3jZ2lDWEJ!FRgAU%dVZ`i34}3cEHx z+^dOKw~_G)rWE)zN#UC>Yi>PE)&36T1h-#pp}=0c3ua2D7btaH6xQ3<%cCHWaOw^p zZ4h#Gcl9^hxKqcKRjhJA5H>WS<+Tzz9ai2Oqc*x?vF&@w!Jz1{O%!3T)prrwe$Wvl zgNFIK{v4hh>S@H3;&E~scZ}7R-*OsW$R!|+CF2&qcJc8;&ixaMa=yuic3CIMr2Eq zapl2#`Y2EJ{Rn^>hb2{8ClVA`N4H?b45VS@fUq zUC?~)#n2`|O31}YPoRnokf?dsikfIy# z(PK;`^Mkh2MLw%qVwwsoJ@PBwebpj&NT_B=@PE$6VbpVX!!fdwqBGX^ru~&dJj5_n zN@i66_hDM>$qYc!56_6uxhExIM}(E{>w~;M4MR>qGxsbgse@LbzG?5~hWT-scy+{irc2R#IGdDUVq=TLx9L>XYEG zEwxmz$#h+e-tOho^t&WZ*IYQozpc?DrcL&9C?Xy}L#Mwd#QzM=clo6r%3S&IO89M6 zNQSTP@s3MuChJuwXYH_|PaiYF9?$LMmuu7#Hy#dGxjkfUV>#dSkyDM9!pd*pLh1?T zRo_9@V|Nie-Ab}N^WPzZTd;qe?m}RcS4^HQ8Kn_E>qV`EAbbmYn?*vc=FOiJ-&y~a zpzKj!p!Gy|B4qI6E#Jw}!rNa*uO7?rD=JrR9%qm4&Qp#c>-En&15&w^@wM>DAjUYoyKUd+BDUA0Cp(GKX?` zy$?OLfAk1OB=^Qs;)!6Yc07Kf)2qHclK9Bkz8@B1D=-YwI*Wa^2=OJqpV%xIX5j9YyLfX=XYhp+$_Ps%{XJ~h&0%^% zPAW+@Ct+dVu1d=6qtJ4pg70<3ty&U=wZ`q&q2Jh+#8S{oDfQJ%7fO^COq7nov9lo_ zYkz{Ee(CFdgKL58tBkTmy{*5)Uhdyh4&Ks3v>ZJ^{%OM#OkYRw?IV3-1)$${>kiu5 zGJ=?_3gx_aU%bHQbJpf`SMhQRF_k~gh)YZg4xBbTzfN#Gv^vFE(XuhiaJiyv@sr+W zW{NEewJ~l$lp2AL)V`WrY=O6#GPP}lj>w1)5%s3U}u0!}r+ufunMhI99`i`F-P@ z(oCQe3m{-vm)p~=0=e$G*aY@k_IWZo0`-~(GNVLFUC@p-F2h-kynlDIwf;hjz_sQy zPLW)+YNtiIeb(e6) zTh0ECRHLQL>$Pc##1Ddz7L5^IS|RIws?P5a8B!}kqzXz(;n|5?Zmw)=B4-;z&kuxc z0j}Te{!dIwmF@I=%H}MJJEW2~(mLd&I*7HH*wAU=_TQ`>oE*oK!IGS)OwY5V@_H_7m*XSi;6Nx_!UvF7?HE+gzWY4)1m%&18)~mwZNF`Ddn?ZwWpo z_Qad^2zji>Ls8tNa%9f|N3e#0E8D;1(g@OFbHw!_cHT!hQ`UjO3@rgt{)Tsgii8rf ztxd@pX&o4miJL6%e!S8o53KN8SUDk@Rd7!edUT;9#*>{XxUMbIGi2Qz)o$8HdRz>Q zGG*ktoJ`v1qFjgIFshB*bt+mOD|yshEBREE9zDDCVoGKlHs?BX*>8$50v_DHF4M4sO zIZFcr_ex(U`Z#Fy;cBI1OkCXk?$K##dU_rIu^PRWXrT$`-^W-X~O+i0-R4t~QD&C< zNd!?Pc2s?+7nZx?;FrzK4UA1{bjzfqr6oOrO|V^0t5dm%E-cF3_6#v4jTY*zLzd%E ze(b)iqwt?4v?>ifm#)KC#_Ad4wA7){Sc@QATb|#ac$kvYiEjAcW9q-1c$q%y(UQPn z)@kFd=TzlXaI)@ajh7LYiz&#&xnZ46G7PoVA`TcM|J2w#ddbwUlI};4yu%AK?&sfX z9|%ES(Q~SLFPjYFwRB#Uvd=xRt^WNqKuPo}DcMkNN_XFp7I@P1L0`*bD|v3OW|b$; zo?Q3aDe61XP;iu5?0YyVJy_tm{UKAjBA1&-F|8HPO1Tl*$m zv8_%{>9sJJE2$HDfA|!kqGD{Yc}4MR94%GXUUOprtF@c}h3K{x&lAlK9X7eTasy>X zoJtJMQO5;78~iOFftYK!h^QlXNpA!z<90Iq#5ISk0kZb4G%^th;|H)hD$4)Uf!vQy z=(;rtB|E>cRE74J`bdWyLkcLMeNKWL=*5IX-{#EGxd}Z8Lc4;ohHh$Zq0rOWaT3-u z%)r15L|UHZ`LokTzDy_4@H?HM4Y8%^bcYBqR{ihqbr+e}Cj)kQdoM3GSae;5mS;bK z+_34nP)qz&5Ps_QZD)k~gO`alGg)7}HX#=OOf1P}de<@J+9~B6fz~nRHNmeb} zf$vmx@yD!i=$^E$VaCofu`=l7M-Q=>{_*j}!MzDE0glo#KKRs5>Qh*GvQtlW0`crg zGwxhxgT*FSm?sVBH%)3^O|YG2T3NB{;OO{QGe%`^_kx8 zmMlZji1}Lo8ndVqJWOegPI+1dxh{Td4LMH->KTNG`&i32OnpJZGM}Z&IwT(s>?QVE zR3&D-#v)@urE<`NOsp(+qPXn%6Ac6Y*H|tfe!zb)04buqo!(;a=Wz4z;@H80RBT?1 z`MM|TLp%u7!7PVjq>K9r#}DU%O{@qULl#H(OHmx>e{iJY4=F&PJwuqr>f%&5u|z5< zXA6;~w6#*lU7+$rfy!e6Do>|r=Q{@-{0s|x_XBf+Im{s|FsHoN!X~aY=IrapS5;@> zLwC-T6rZo=`!VP6<$P(*$+~%tNB;kR5PX{OZR$62L5y^a2HPF}`)R{khs+g9ftU zMPYxlq5OqB(VPE3q08X*2Oc`-In&RvO+c!=Pocx>wAGG0qHw4=DDeVrJmdX_VLj>1 z-%K3TDDart6i&Q*^?}lMc6M&Acrh_Cpyq4EX4(HfMJn_-Sc)aiB26pz{_)|Sn`>m? z1yG`y2GmsjXnYN!{#WQ!$Q55tPfsQ}iQ&zctTA+FknmJ}Yar<1cmV+cFpxZAa|6^U zHv+}rEV5MV2}W$hZ<(56k-}%9KQy+qWFwAjF0Uc^_iD6;5i>J0NX<&az@TMfVv+#D zvABJDf(xdycVN5ZD3hi_V_h;bvYYz)yxiPsCXlmE!(FYNG6Z)64f{|~U|Xf2SX-S4 zxqbTEgGZ1oVD;0(QE!ryb5g~Bn-`SH^=zhEuP1|z#iaR_l}TXzWnpFYfb<;^GMu%I zFy+ptRvU3@Mn-KL8%_g*-b~BMo9EpHNjFOO=OP`;I-D0c3=J*3%oaLWvW^EnI+}(W JC2BSi{{g!a9I*fZ diff --git a/how-to-guides/debug-model-training-runs/tutorial-images/debugging_regex_search.png b/how-to-guides/debug-model-training-runs/tutorial-images/debugging_regex_search.png deleted file mode 100644 index 9278ebbb811154423d71429ebc4e87fcf67dbb13..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 102276 zcmc$`WmFu^7B)Iag1ZKH2u^|q_uv77y9BpD(7~M$7#xC2a8Gb%a1HM6?#|$RlXK3S zbMCk9x_@skX3bP}cXidSZO`7jiTI!_i-Agl3IG5wc5Zqq z*cY6Onydt%Vwh|X)#=bsN;uVaxp0PvhICnf&T z!|-s)(_0bxBzihyH-}9f+(n`%f0X-G-kx4J%0`1|f0-GQ3~b#mOj{_0v}h46>ASY_ z9kWg8NtaI1)S%Sz$m`LVIwIrHq)E|GFW9w;4iA2RYnh~I(Ua15aO*J`bn7i*ne;Z^ zGwVVmOPb4WmeZ`gcG}8nIBSlEgXZsX{w^QvAO81XO_8Gid(Jsf?&IGE4defBr#cwr zCLIhGHY!qv&rSb3q1K(xxUOBNPyCk?mi%5;!Y?l)W3}>(R*v7_S-fgl_(_IGNk(1C z@b`vuTM9;1BR_k>j?OgN8XdgBENB9lgwp2*7ajot08pnOr~Q9kO|J@EB8aLmnBORM zbTb)g;a=n`6YWUlBRIH+9tB_?Evybyx|IuKZ`R9lgyte#}ltaKq$GMWh1rk(;3ufy#xl zb62Dd>3ZSKC&%!K@)1>47FH|l@Qlh9?Z%wB;B^M}9m4gB=690$YBWM%0SiSS@l9HyI{$b2Y3S}U3c%^i zG*eqe>YroNQAj@bcRuAK24J+(5$m(I-+b(Z>El^^2e=3UX)&Y)(hOH{s}}Kh^$&>) za+7e6X4p#tze^5hKbtI1ROg+DFNOz<9$ejAoxZLXVN-fv6#bPn*hKOk9)Ovzv;RML zN9oB;21k6Jz3gtDt$xfx#&Tp&$`WybOXeUlJ=_J3wmM>wz4J!pbJUEudeu9UTUCW< z){EZm(|JeB@ytA&*-ST$_jl#Dr1fz{GI?W{9NGRIB@d8Kws3&_{H>e&h0^YKE{}pI z&qRO^YR=@5$=-GvDu{pvFy@4*8IElW+s`(aMnw<8AP&#h;q0J!b}p)mMRiF@E!PxYPk}D_eu@WVYd=bd zrO>~F$}a9lpaS{@G3S-C;oG(bhVg#r>~IK{GdlKlHWkjL{bEO2jycRLoS{}(UgbTT znUVg_mP`^iX8ey*utER7^uXl1bpFp0hLfBLkD)8)TOsE8yrkcC&Lu=wD=Q=a^Y~rf z-;lU4N$WjMV)UYliXM)z@ejJXF9H0Y_qm5N1vyr_Y!?l+_uVeKJJe2E?%7+O?riNA z8*mJo-4eB$58FtqJ=TC15aPiVFok+G(!h##AR8m|2efP*DI@eO0m@3{<46nUlx=RM zw)0;FL#ayLzYQvltK_}4+vlX{gdj5MZ(S|}pW72ZJ=d`QHkW<)$qjzR__~O#eeH)- z*{v5=bOw^X6M5T^)bG|@yX*3HNS(%je}(4{z71k0X4X##cap;X+V9f z{=2zeGFe;hEKv*%M^csNW0CZ7d*x~WUHPmFHz`5EYo9df%kI_Sy%bRw?%Y8^xvVJH z5s~PvLN>kLhdtXeKi+w@Y|&^aMS4V~ydpHMdjs+VW%g}J%U?1VeZKMTl+kBnZ#M%8 zeJkzme*d^WK^s2HiW0htS6ZBIvCpg4drN`;Hd0e8>mAI z)%d~QeseZaW!`C`08ap5mUnu%hjO|+c(BSPCM*=S1>~!9x`ezAKwY~ce|{1kOh2cH zo;jNrDj4aR_Qaw!S$4L2oQe|**>hR=bw@z=Ej^+OuYjoVYtJQ?VZFSE(2}$t z7bo^2zUUfqHLW)b_I%qDMH)6^2vjmW$`P{DZzB7SiR(wd(&|qysAxT@if1gBr%B~G zVhaB;{Y(17Tt@6{G(TpJr!sbwFs6xP^) z>F4dqB2NZ30{FKe>KXaI(2Lw8U|_^OJGN*BVJ3KoY(?*ba|w}>o^TW}r_ac0d#f#y zHie}WJAi(a_Qf-nrnhP&d8zS)hbgXM$N5jNDD=9l)heGN|R6K~zqBS`zFLfnr7E=oDl0$No1 z8=GklaIy(=GS*%)2C$x_z|ryj!%A4%2^>J4cc|+ozqk8-&CIB`btNTu7d!iUU6b{$ z6Za6uNE6?};eE#5un;+ThN z`xoi}Q~>o@%H?!g3iDC8$2H$W0g}Jdw+KGQO-0E&`UCx~Bc(bz5{kKerg)o*&n;oz0m^Em?6Zk(d9>&srz~RAje6Z&1 zKmZiMV`rRFEPJ?d7PMAV0HKU1C?A16tzV(Ixj5Olb&;OyuihKIs=&|F_F9QNAE%q3 z2ZTRQ97w>#559xz1~fu#sAd$K6l!;7`1YIO4a0gT+ZFJMSuUG~OnoJh@h?yHt?(=5 z^2&^eK7(fmxM^tMpKnL}x|WN#^?bZ?=25ZZ0@O;}!oMaEn$wOXG@^kb!|0=Q12!oK zBSvr^5}5bk1Uzv=2PkKX?H(9(Zjh11-s-&Jpqlj(C>~^0@ET3cfFc4sJ>!CD{0nko zuyiS(3KLVZu+4N82jDcauO6{=eqIy)ej4(Ms7OdQ!y#p9#PIC)qAt21DvPVI;w7tm z;-QEWDq(tvfS$?wdA|O~Hs6vms2*uII8hfUehAzy6!v`@TF>mnJFo4RU|5SHqJw<n)u|34^_XGydhnY*>U0|-Be#+)O0(5;UNL3Q^F3ay{!#ixI$aaVX;#pf;V>%|? zepte59lYi$cXX5xDV10bm42V#7s6b>zN9e?(f|7lQP?xON-JBws)#<~8#MoJ#_`G} ztUXWMztW0H=3on#NuZk{60yN$eN=q=nW`Ky4C@d_Ayt@({QRWEYH>=ztCf%-ar|D) zKGC&xBE?}$6MBG8@S>E>A))hUR~*%a(L~ItGYzKx<%medn0vSOlBn;z@A_N8jfNp0 zLhy?g!>9K@qs-?6!74jCgc!8{_|mQ2bT;I7mwU7v=O1`^+4aIY=?i|C{LuxjI4yA& zt9MXo(LJ)9HK@)Nx3Kpm9@(s83sFcfWr-guL3l?n;+`Au6x8Y{UH3Xx7x@0s-q~6NYI=U-F>tkvc>p zNE0&Bp@q?35*u+w?7Iy44{oKr^-ZbbR9)QA{#n{p(!JZu*hff%+Uv7H?~>{JoJhY% zLIC2@6qX;_8VgTiFm3f~uOS}iE^B7R4^hbk6~{3$&($J=dQs3AFw%Y^0_iwdyw6Kh zraGNhc;JX1dvC*I!O?>%JH`)H+Fd=Eoi{Ixs29?WKVtMuR$nQOd$E$Z%dJezZN2AY zBd%qd`bc<&oSDWtCwFIOXK=$WgYovLP!+V1I3#h-?b1zIy_uaaYkz$wtkNXcQ;Nr6l$ zijub#0?4HxLpXrjVJKNkdDp$1dWu`p;;|(P({I7x4&9ronX{9S&ZRW0+KVp__m6VO z?x((RfD$J~uV-P!k_}f6BYXxS!_L-WEh`3Mg4YZ|e1Ux2yIg#{lP( z#fDKg*#x|astP#ZJ9!s#f_E$O$#G(Vs)47$U+Ym;C&DHSX~Aj<*n%%`%1uLY!43^G0Df;f=(`k69%}X!_C*e57 zQz;jER{GNH2`!H;tCabr%QLUW_(B8xYqVrz+wa<|^9TRz08Q2hbW+|yZKv{D@Zw2^ z=r3u*&3a5QP9#wa-%4cr!_Wbj!8qQSMy;S>dsVCa_Uog6+0)LMpw)kPHJk+RTFPFG znfYt^ac>vM0{$-u+$A)_om$Cff5V1M5P7BmQeeI_RTEM#`iHPm^(65{7$0x&F1V(4 z(7685ia0zX>?08*q7FlI zj>K-Jk_MAvPZQ-m^TV9(JzGCG=D+JZXHJeQqHMW;K7HTuc)qS@sxS`nIGgh8i?rwd zLC5^xXX?9R?m$1m3=f4|1LdC^HoO1MQ>s2>g3jilAxA_hJ5@N&Npu|sE{XmJVx>>e z?VH8-_C&zvVa)TYNtp9~J5_f?A2#FB-wORJ#5En3TbL(|s?QgetC6j6 zQ=i1bC7ABd{Q~0jJr(|$Q0_~W9ShTF?~$dE-_3H97qb2n_W^l=0lsi;KJ8+*`J+bH zrF>iE|79^D%r!^;#m6#mnv`&Z+)ce7U{nU72w&`3jEcCe+Mumg=$6pE3PBs6G;(&< zTH9SmExif3x;oMc77?rLWA8W1*wW^LN1@IMIm7(4dN zo(qxBsVg1K((u(tKGoFOn@{NS9J%btpvDeOOWN#u97h|MlDe`=@;y~kR@GHT-%XA- zz>%~jEr7jR?LC4>Ift32NzI3Ve~sGdSLe{&IJka-w!H5gpaNb zhw3K#y@Hx^(fj>|M;Bt_*eb=sac%2TJEZ81itq=g1_(NuhQh}*9>UWj)?SX|bthRJX9-zKM@lCbzw$hm7N1h8C|N6K63FV*E#{P&RGwM! z(iHrST2GZ7Kb|nCHe=$)R8C$D>$Dx( zm#;lcW)Oem=da}<1tj=Y;^FCVm=DRu6&D&mKsk(6nK>U$z z`_h;psj}p``Kmmv!CAwQ!6ldMrW3g=Z4RGbl?wez1!11;Ydegowd;%Ma|gE=JYT@2 zWbwnFDD8*JJugsn0?^Y0V6CrW%hMM(By4Af>&RsacA&LAEy(P_6kl$0H^9W}#%FqQA3(_V9tSwj zz=jYvH<0vi;R2f*KNqxpfAb>`d5`f6@wn0Sea$L9E)*_)2h_Ho&y}K|5UF7m*1z^{ zlOPqhl9S%FJF|>>8Wcb(_Kc$~+)7WQ@H^HpChF+^3-Niv9#>9JTRiRsFeOIy(p?%MyakQxy|^=E=T%g1Fh77i7YBo?Anml5 zaA_zme0cab;xN0cgdMA?nUNT(|6$}cu6Soa8KQbKBC9Bxxy2`hz8*(QHV`?F4LmF8 z*ow4~Ns4xCQh66HvdnH4NZ?CbiA%Kg$=PS#R1M$12q5)+z%+ox(E=4Bw*n3o+I5mX zm&tosS67o5VMBzd4++)UK9)7r48^3d{_LPr@)&}>K0vH zAVSfw!!pI}UW4!?p-?Ik_bY%JVHeHyo~zhmnukkho(kySY*LhY7h-~366<%!?E6}k zwj`>orHb%&)a1h1rBZcOO@y-wr2C$baG%rMTtO=HwWOB`+j%Df_G=(=_Uo5d2&24+ ztw)C}4Xz z59>BiMp*hkbff`jPDkIe1>lm03el5?I(i9P1r|90V7d&)|7~N3ncrswreYp?)ggO( z>1*q#_fk}E;KihnY2c*laAaC`i1z*CY$ZSwTQe~rn@ytk^}lMacwxFm5Q^Y6g2-+K z2T^4z9Y@$ngMYgmU*BRzKAQ+ng*U*M7BA$0zE`PE@$(lT#Va<|@!*I)kiAlJ9lCwX z7GhZ9>g<_@@LoUm^`#fj72PFJm3o(z!$o{~8lEiJnqo?tjhsR8=qtH#Do`UT$oAUo zPTz63b)PgT_(%MR(6LgE8?0e;>pTEd%*K&wwH8qzQ0I^k<0E0K8FBR5lv0uGH#3wMm_+CJazd`~H)iS#n zCN&KlZKU~;h1{{2UtpNev0`7VbQf*g$9IKl@Mb*D>Khp)ZB5n+ZmcD0Fg3J_&T>dt zRci#)9=9TmCj2qkNU}Fm>8$hzV^O!#W+FhWCfi0K zb?Vy|h&0TndEqZ>^deIfS*qnF69Yh46kzy-V>Knde2z(vzQxCwGKfMC4ZRe^3mH2b z?;V1L#xv;46ot){ASy2;9EI+M71xYO?{fuZb#R+~IY>a?v}hfB3Akdv*I!xA;HH%$ zjA8=LZ4PD@cu(+g>j_a@Du?ZuEA0V_E4FeRD$QB9TdvwQyjZGlr_nmE4NUjsy#5$@ zGk6(y&Z{O~zgCVD@pB!MujqiJ<@~u;xq|GfPu~OErUUp0*8rg#N`*uy|8Y1L-&Xml zZzk^^GKwuMM>Wz?Ujux6JPSA-En7L!Uyps?{t(sHCL-n=K?1#Y|HSng&=4T?8B^^* zKsn^~%V5Kth!F8`*2d7I;=-5)A$6+s#}VI7zV>s%2o7r}FDR_1 zq6nKpco<7eXRAzl0&UOdkPvr9>|%*-KmOZmuD`Dph^u!a!PNv@@Kq9pvUmO@#@nVh zc`Op{=ui~irnQ#j#x*nIyea`syg3wb#W5vn`{)LUG&K^t2GdQDfhJ;N-oX6{Im-VG z#|Nyodw6qSE1`qlwni)F6z;EtDa?19J%R05`C4UCh`#;|H%bcS+xP)uTPO%RO;y7V zx=ougyEL3|p?-O>BW-qzYEse9w?T)sABI`E2m&dmoD`qkt1mICtFe(E8Q`P4eimrkAyoLhKoa(x6oG(XcDhVA07|_*%j@Y~{hZ!ABlyL1 z-F0>lCxw)S_eyjf-jtB5;DoB{r18j7WNn?BuA&dW{?{G5_zgQMDB~N-I+WwpZfu02 zdamK!Qmk()+lb@S6P$;W$hIXL(YDDFCkZf9SVJ~F zSU1wR1alG`s&aCh?@dzu^(wgNRKqaIZCF`rTi-gm=f?f_;&b<>SSfm~#(1f#r}Y2g zrKs@0S9LEtVQ6E|VrIbc|H5gprIT>yxaV5OzM9JMMKFjNPjowYq<%11Y7B#?FqGPpi1=S<8e2Iz zhd0IQyZAwf*AqQ-c>$?iJmB00w0q@co-xw>)z2p)AwzVa^vG1@znSs#7s>HOY6^tU zaFFA#Nkq*@myUBDK4*TL-OB%qDqBA6IQaq%jUFhyn_W374B6tfrUs@;Ki#+;EPMSo zz8W6x-Cdqp^yDwB`WKV+5s}#8y3R!V^+)bvUf+$QozukW{H%%u|3?>uhl8`*8C2h5 zmd6u~k=qTI%5F*1{W@QQJO4}9b9jdTju)QpWNY&J@44Kb|F1;9OD--Bj%P%>u|Moj zvou7xFkb7=PQ>Eb)RK@yy_C<#m8ZY=#)M`Prd{<}cWlh(x%&Nbf(4fa^-8I^;6lP~vy*K=JY|w0KzUbu1g)_#c@!>;o z0;Ph}x*{8MDf+oheF!4USfM3g{RB&Pq3+tOK)Fy-rbjh6EG!QQJx0dFOyZ;dLjlH; zk`kxmMS+Bb1X#KQ?nCL+IRj1pSBM5FYEU~MXc^eMEme=^_gVk)V1bs)=2`~7kHsNA zki*h!vWV}j6x@Mv+)_^~`xcwvRm9xLa8=_{88`y`a3?~86MHW&|2yVV63$nt^4jK4 zo~so7h6h3-CyJl_!Zl&a$2XY-vpku z;V6@#(#0Poir06fF(`q#O}l@{VRB98UA>P|lc?p@myucY!f3ch{dw?By2JvVmM=s65MYxpIF&9ZxUt>ZfXc|ASN!Rc|SX{gf?X6ypdha!F z=Rh=tQtxUzl!~9fuO$5sYwZ`7l#qmBQFi#lA#7;?0JS}zu{6$&-j!eQAUJf_=)03> z6}-yRD^#Ud4e6rg+=erTV(kQXASIce*h0U$x=fdMOu9)V#w>!duw3s^Rz7{F%x|8R zDdJ03h|AXEUBBI92|((B7d`9{m~dHLO(VXVG1$f!eZ(?a-(Xz$DmTf6b=cuBu@U8I zOG$`PAWUi~n& z2?q<~e7S!WC6g}f4mTkBI$6te>0w7=EDuPOMz_D#8OG&x$yEC04;MI(WC{bhJWh>0 zxr`Yjye2zY44fmT^fK`pL&_T$$J# zLrDqv$duo{1T6UW^{TWcWrHPHDrp2I(yX0UVw{8?#D0K(%hMjJ_+TbCv9>Qm?oYcZ zn-D}S%j%1&2*$d@rysjZQnU#Gs-NuQm#4sdsm>xc(_G>XxVx!0TuOrEdp(P2D4j3(1@*lS&gAa=kf~1#(ARPQC`2B& zPcv5y>4n|h1GQHz2E!A%hfRi0G3?Er>H(yQPl;kL06}mKs>N+XX+nM#3UYF<;NcN) z=6C~9Cf`|c+ep<#iHR9!cUt$ch@IFy- zVcci%asBir@Uhuxc)wAr-^(Yu8=(`B&Gt@OPd=nkMfPhwAC^3CY?Y^X=?^D8MFSzE zrD`-ZS+*{RZ{6cF$BClB3aF@&oK|9@=p>^mhSE}B^j<}^H2yv*-T9*%Y>kX}9&4ywlQz;p_4^>o>(xsPyHW2Khpx;0h-m*32I>%|}M zbFAYT&XvN8Vp-UlqrngGn_CKuqBto73Ii1brC`%8h&?Ay&M)vRBl=5(I=eYo>5}r= z_E4JDSndml#fE@y-_X6GOYY~}e~g50&ZcfLxKGB%D|>rW(|CihSvYS0iht50QJNt-uwV^db0nZZ%mFiBR2I@@2;X zi{YstN7KooxpNDLU!L9%fqb&x|?-F&8q{hfNt17F}5hoS?m5#cG|QG)oy|HX)bsS?B36tc4#As zjx#Xl!ynN|Zgx`)^!2sX+RlvrvQ&9Ec5m&>1BrZHc|3OC9nVKQzqt6kJ^1RD;m>;O z7q2(T8)EE5Uuig(<1fU!i&eTira(SgXue|KOvqagCCs0({a}zMxzBMsYBQ%4zn{ro zx~cmb^T`<*$xk5os9z9!B2SQ)(T6cf&ar~-32ybg1X0TUh+%pAB&q&13c}$kd8u%} z+09;T+dB95APPr2nF)f?!Rndkm3q~KTn3cf<-b{d$k9&b$FGb^s&9JB-9cyLu7CHD z_?*4$L+i)!=7|KdM-w8qNS6fEE%hvNH1qN<^*oD>QWgoUv3#tQuo2OSL>-vrp2EZQ zszB}B6ZN%a9e&)AGii71o#KRHbE7wnskY4 z#~liPl-ahTgk}@vbx)KA-Hupfxk~Z#4=2eVAAf^>xI*+RDC1$N|3ae4x*y_{$NtkYVPY4>(X4*~H za`GV3w?^$NCwR>{(XjJTkwHGnY>@{P_JPtNXOZ@W+@7)-19y{wx)UdXnLGUo*zUyD zF`9|BwtOi5sOfqY~h3MSB7q5LeTzPX>lfS#) zLJRwibJ)ymzZ{SSVj#yIPZy?(FEcc(88^D$+ zh16^ATU>%px_#Vx*?3>QW(Sb*qK>t>emu|O@;M&y0D{0h1V?A{VYH{qZ_k~dbBN!4 zDXOmGmM}bqzWL*SP8F{X%z5mKwxzq4a!#pQ{(#9?LgP$9S|rC(zwhiXqT1Rz7padL zT4o=feGdcVrIhXGiX*wMGCI|Ae(-JVMt(Q>(bZ4EDv=1g&OhxS~a_QFo9&)Vo(bN*q^r0;4$|E{^tCO zF|yiv!4n`>h?$deZN(QHY*jY@<=FT;k;_OXgjmRx}xC(nM5xpWcJY1HR4y;Js5T+cV?N-b*Gjl{<6FAt3QIBbKRfA$E?vY4R8rJ7aqz%&74aMx zozze$-~`V23#GRZJ^ClnlU|mr`d(W~nY@cFJE2w~A`c7fX^a=wL&AYm1_PL zsC#=1@Qm#nuEY?3*OgT!#OZ|(GX}OUk$%SLctuheV!q3mwG|XD?r)!oeu8kb2~!b; zBUwo_=4VWuGE@`ohz$DO;FY~`bUfDhZuZV;QE0${V;s|9Fg|>Th90G1VL5$WAj^3D zLe77a>qn)@dWrrmp;%!Br|9HXhF$&Ms*{UrqWcuW*Ll-YdP!U$e@K*Nj+A zgH1)U&pJ(S+3}lC+ zZ9FzkRk-lX%>w@C$VsOyM0w-L?-=2oaM%$gBj9~o>V;eiTY+?(KZ;JDNGp0?nO93i z&yS&BhZJv*eXI%VlnvP~qnCb>yss0kyKWMmVnVVZ@!J@c}@SC;vkaUAqINk%N23| z1*ChJX6vOKycT;~0ai91J~2BL9a(x|?Ry)GLVh-Fn;G@a99Ls~rar{v8U{vwk+H5y ziG4KJOCu$UrLiIQeY(QNq?D8O6^DB>@@Wl*y2ylm;bFDXrq2Oc)sD};_1n{*H6p9F z&uK_2YCeY@!n5@(QHF^7bdJ4Y#il*B4>~rG_BHT%|bjl=GXv9Yl~)xqsSNZnE; znx);aLPX>Y<*?%=1AT8N=o>J(nMlMU!d@7^3YpW^DGsBqSkErrgxygcQi%&pD`rA2Ax+u?#RUZf;&BT!wS?{z&0z?(CC;H`8}QV_V#+b8o^V zP=9yH7B*(hh~-+hBCt~9_jMaYPm#mi!0DkWK>cPkU8p|3sBVhl*ABwlX1BLj$43lu zRRgp4ib_alH`+fdpX{ESEnJ@;Y`*Bd!*z*a@^=4m4-2{UHMu?bKBb3Igs;r;e?0c{)_(4x*QWB zr`NE07)VArqT-t?8i+SF+Aa^uS%z_wKW6&x8Uy2U7~ON!#b*%9I^MI^3*zp%m2}OG zz)=ysN_G`<0_1Kg(NUmd0ZJUG%&-9Ioh&ZUjl>a`*f33xd2YsozB)iE(wtI{2r&hK z3Xv91*4Q9-RDp+Xa#EufGs|8G>@ z52ojs8AXSA6f; zMD7pYerBI!(W>ys1ZRT9z~@UQWT!ceL^=FmU?>z&)ieD^qka82|_ZJz+Z_+P<68|mb;UhiOUQKc?e zW)k5=#BZlz*(~JgeJfs4xaZtQ5UiZ~G`^e_XP932%L)37f-=Ozbz-b+5YZ14!!ys- z+7O8U;6=kX*7Ode7#qjF)TNOxlyweuWH))Rt?$tMkrLk-canPb=+Mxt#i2;T=>0Ou zuun>A^8_`r!qjEl?Hjo;u4uv2=GpHZErpU8mqs!#h?Dn3t|wJ$u+?a;C*Yksw#;e` z$i~JccHzLQwp)AGsaDT+0Q+g2c~d-U`X}{c`H92VX`NeGu1cU`zKP&C8qlK6cFFzN z!LXz8t(0EuV7epsN4YF!M;gdIfv==~+T>c6DWuMh`e7l5W&ZsFL2?cm0e|Bs3{77! zIU|ezxotEsc;1e~tb&N5{zmP+upL0A9!n*=_z&hk`x zlUWFK^-Pv=ZjnKm7-RsvHhD}oGTV)5_jnkSTgUe6rK5GzH|JQ}a=6;840{AVz&)@Pz@?+7bd5p+o;yKBv-@oPxGK>Lvn zpgW0wO2l;J5RFc8$nmiEH4@?xAUS!uphNvyvZ-{?eIPfvnk|OjSXe)0f0}Lhf!Xn@ zk`@PND&MEsjrg58=OiX5<)z+umGh7j*+TV~`hCOg$3CLJ zBOwxAF(N+lMBpb=Q8FR@;B!)4vSFs4uNsyyS|467b=fA!(=(`Ke!D+urL+ik7PBb=g8UrVyoIsv z1nuyF<+Khjbd>SpmAAYDv8qWHEJee%cC-*i+$9&s9g;TF7Zf3)^}=Cul;$Y+=9hxI z3=H8%E?^@WQ?)_OADf4r(~2$}aMbNgm!UjMhgUT(U_7fHh*yw_5Z$`&24pw?lqpls zegyZe%lYhf@FG3B%ejBFN)-=xUnpkfegx(NTfVXwQI@dzV?i=PSh-%qs?p5Lt9F@r z9hai-Wf#L>ZvI+3%0|q2Ck4ziisy6fFR9zwC}<@Y}agL9q1r5Zo68c^4pn3wN~Mq+&F3pVj3n0a$_+v3vmY>K5U})g9rinPcZ(UAL9-u`V>{ zf+ZsBle1vyj4t4_P{g4E-=@<35R9eiBcH=DvLM#3_s&ZTDNi+!;YQdF6^yR4ACf25}M=K|7RXLJf z)DH%Ef@bT%pWok_@S&ojZKX}_%@&(JJQZhp_{wn9Sd#}{5LM&xnRvN=1m zIMx0@ZKZfE;Tm-$A!#_nB2yRhQC3K#7$w@C=ZcdJrE`K-Z*6jB=PY*i;A2!Zt(gy$u z$A$mnXuu+EeH^S^5%Gmir^%B1Hv`n^{GKF*Bj#VB3W^gH{5%$~)QBW@kLGGgbF)-% zW#wH{GJ<*HmGZvHv?8!B?tJ;o2xv<|RFuHAc-NHTaOK?rF5Ff|-KKuP)5kyz{ff(4ydHxE(ANK2o z*yJcqhnbGOC_HW24E)jlOyh=)9MAIYE~4*16MLm|r0MQxP7mxYJD)k)g}M$SZYzn(yT0h9QytH`ABcSrOg#_!HEvzgh;I4 zOxHau$XI24yrA4+`R=R(eqkgU=EY0=iT0>02f#hv&1WFgW>+B#x*fH5p={tdkjOiX zbPr7L+&Y4R$Q(i|SZi$XXM3S$B=74sO{66kskYF$NY(&81B6_HS={Y zND3qM!ocfyLhNd`tewGVm0l3Xl$ly<`xqh~`FG{^%Tm2U+|w^R2G$$ zNHq7=HTXivULw&pgkq5g7WMUTiEpgcpf~T(yBG@FxUR5dz6y4gq?T(~Zq#fM6f>kd z_rfIr_zJe5EVQnGk_ml-$rqlnW-7Ie_iI}F(maD<1p`fZMFZ|M*{hO84rHO?cqo#_ z?qWHC5=N+P$axH?^MvZzwtEuLx|d?}c(smM%cDgh2*%~gVH=Ff@BrQ5TCwEaJ6BH^ z`iQ&wHd4SETiZ2}B6>u-7+Gt#k2=<9?U^iPMTAu#ltALBHAKP6?9H z|H*ml=gl1Cs1IQE8`Va#Y-(U(4#R9CjbEO2p?1UN;$YmUSLwZ?(%Qe%W2XC zRgD5R2|nhk-xnYwR|$#67rFDo%5yE~XXIm(eDMPU0*D1n8^BFtCb(DAPDxT%&qky) zxq=BLrlRoYNfJ41GD@4aMPKE_H9%}d(NBmxM=QY|+_Q zypta+?_{Ky4eu{gPB?Io1`C5^W}!N5FS;f>c#4p18&T#leEEegxId3EHiE$MCSYGc z9W>#vZmL?}w&#w$Gt~_T1Dk8H9t)(MS1>^xb50U_@xc;)Dzg`_X}z%!xc28W^6M_! z^8x^i6LPvq0fXz&C7SD$M(Lzr;5${1{zJq*?()wUJs|QS#QmewTO374M3l)U4+vw4 zT5+3WZ)^L^%5jI$%KNVCp@Ez!8!pfNf%5_i>u_OJwD^^dy1rIZ%_Smj2gtW659_5@ zYtQ99>sujsLYatnyQxIgPhV6N?~MruJ%KrjH_rwWq6uKA#1B>`7ML~cZ&}{=EKnYH@bG(q3T6O6*E+IRB3UxgmQ(GYP&OY3KsVa+ zMWqsU-5b&Ryd5@0U$m1G!ulE^2k|${Pc0aRE;S&sqGuohnZX53G&*;^O7DL1^afws z^;@^g$M_?Jw&5Mer$KU`7UfpP@tKdk&_ed{ooX+#nM}zXHZiew*$-NW(nIj_fec7h zwy;FHkSM*wt#O*G*d&*WGT&&@3OJMW$e#;_^gEge7pw*S>k37L>GIH9mD}A^Xw32n z0W5$R@c!BHP1$mJMsLvT97dt#Fb_{fg_>_)YVHi*Y5K)P2XK6!zgFmF{Q0Pp0w!rZ zn{Z0jDE5I3N-Z${;MJS^{@jr4^W{xaZyZB3TI~6XK{)ox3b)kZvV~W)1U_MH*K9Hw z-CMM{v;Je=6pMKdN=3&YgO%oIi0ckyo>}VmxOolVkzeT_;&YgO+iMiP!KQqVdmeHd zVkNLmpzvn0f9GHq==^2K3qJz%jM_s;FZd+*xo2M(99S)VP(6Q1q3?y6hxt1d?Ze6q zPE!F?oRW8$0rtwIjK+7anxgx=yAx9ch8DZ zmgooO#G>M}Ws-MMe^R0em@WD_L##x!MZWvTF z6%>Y0TE3L=+TV{W#Mz_9on5Dz!z2KQ<9l3%>`cnS_pDwzqo)VA6t4z@g~!Yr52h+R z=(s|`56vE%f#;!-Muw%y$WszGpVd`?dF3|SpP8}Jw@HWzm$iDhjUgTPzy1$n-vLf# z{Qi9;BSf|&Ss}{E-b6%MWo3`-Y_d1WUWx2cGLj@hM)t_gE+c!(-n{pt-~7Mhde7B$ zuG8_H=lRb2{@kB?d=E!{6uDjHFz|D_!^M7$;xN4&YfKz((Fs%L!C~dc945vDx5t?) z;&@Vm5g}>5$IZpBBCNjt)S#C9kbdcI72^xpHePhhbIrZCf=B$Gc-TLsp)Qq`6Wj~K zYA13JM1J^G^~D`m!v}3WQ%P-U#jc}P-A&n&Wq+t-1}lSf8aqY1cs~w*&QXea`O4#2 zZ|J$5>PN8sN<8$`re0`_V6NjDSA}*Iwhr$WPT+(v%w&S3@ZRP5J}#w`R@idjPDE-h zVkcsIvAnm&X!9gz$%YzzJ$a9ML|?q-)ug-a0TrDHNlBnmpSB-u8BC0F!lmL6W9%@Oh_(EcE{+DOlk{7L+xI?5KkT*3iGaeKXYs}pLOi<1``C0TsYl49SiFD z{7}=iQNBHKlkue-29bJYNVkdnbkaeeO|#EjLuxc^$@GhpQw8p=*U7^h>vt8uS^jXx zk+hRnKCcV)pRzKEkfY^KcSrkP(~L2hZRQji z3^?k)Gs#@Pb<(~jyD<`YY`DP5%q*30^9mWMxdOM2gmuy9?298(8btCt*A$1?Cl6ci zHQQ8Sj+An;meE}%vX&rQ*b1Fbx}t4v-?_Pd|s8zUM_ ze(gV9>r>>^@>p~qCWw<5csTTDgD75FD-69gipqUwF`70f-hQA@6Oj<>xq#ldc9Uk` z+UpN(wn5c`z*UcTi-)=e)l2AT!@oQX{Zvm9sXR|nG~P)G?IgRAM>ciCb4%0iy(V>f z>r`r?vU*2jm5TCduiu+m8raif1|OQy^mfz`Tm8H@cAqwa8XXg#S`#96y-bN|qp=D7&_rdq+d4`R5VY*(!npCE1DhG*UnpYqn&i7>F;G&FEuYJZJrZ{GwuI+~9*cyW>P ziAkzVYsnHeuBE~5+VpQMy7h^nQm36fhR|*kN@4!eThv}O4|RX_vmqdRYm<+R&Uk_M^J9KoN9RlVgL9RUKI#+$;4Y2y~x9`8({@&{?28oDZ~KRJB3K{+wM{$@JT zYEFdg-Zd<>G`*LO<8q=wyc^{=(Ut@R3>FLaBm^--4wWmyy9d^%zKgN`?7G3HE0~t! zdTqG0*sTyz&Dyo+7k*{T?y{v3_iOL!XZ%UYk!oU5+8z|u4c{gqL31TF8gOMZTS@7s`I${eq}{QT%w&(0qo45u+Y z5@cfGq1u_1f-t69pT!YP5;E<(%61o6Y!2q=3QuU>$a2tW24>0s9KgoGw&j15W_?i= zVqwX~-|tj9&p&$9oS`*XG5A@;>EK*%uW@K~+3(2WYjNcL3nl|z$yQ`RzS&QtYkN{! zBB>&Mt&%by-@Uyv8YVI0%Q`P=OhZSzRrmAtwO~7RI<#qePgBv%rq_$M#E-?^q90Fn zUwk~|{o#f*-X)8hk*u!vNpoE?2a0@z4tma?7pQ~mrkad?m910s=*&lWa#@N+7^d5v zN|WuRt@w$Z9xJ-^^;!S&95q-i)3J4AQkXtZH!@~vIYKhJ%4NmHhCjNzwG*MDEiLCI z6gEFbf+6F4N9aWIX0pMUw9B=?R?9z+;!kU1#6&{urZlHw7D~p26T|$2@D1iVQ-;bn zQEVz?Qw^Qm6bB08NtMTC!=oF?b2~P7T_a^UKNb+(T`=w065U+7)lhWR^pcw%Rbq<6 zmP^c_?#F9-#!{2DE}Nnscd2os8TTJ3Mftug&L}&rUTyhVLka`SA<}$-L|yk=ZAv_+ zwKhYq^CHIyM1nZa9^YJjBqVzmCC-4^Hj?V_XRn~SMDlk8*Zjue>-ljNm(dQE9eR1f zAYZF4!*Q~!m>nEqdJH7xOLkJ6)aXA4)?H1GUldgM?m>WCDF$1CVvyl4ajCJ7C)Oe3 z3#)u7UhkHx2TMV01^nSNI7Ak32?pB=vy{++L&CipZ!;mK4MDn$meG)Qle($L$MXF9 z9=FycPDBW+O{0k!6RPR!PEM<7FzM+wi^R|=nB6^PY7Y3?I=l%yFG6}5lkeh|zyt4} zbjGN@L1z?)__!l>I!?(4KIH*zpKLRpFf`@vKoPvGrp-`YXO=46AivCqByqng(1X8L zEO35QJfFyYp-4895)bobTwI1KE87WDm}ffp{rgR`-ZvK#c`O+ViI}z5a^+5-K+_BQ zS2H9y7%Fnt6M4-;27Vhrv=1zmn z!fZKHV5-@+UV_XWQByO3D!*RM{@$H6IA&vh;Pu^tV!vmL>%|L_81d4=(av+K7=e?O zaaN2+s#FK3wOF9d@>3H`D4$c4UT8fhv%qg(!GyUTWWINDnITEf|B0TUyKr&V_}U$* z$`vJ7R;!;hlb?+x((1An#A|%bj3{|+TqDfO^7}O^k7{oUrPH`ouU6c;Ov!%jwiR`$6(yOC7qv|N zzSR%bn2aB@zw6cNLvGbKjf?SiyJ?8stbae4_Nqn(H{am!6Kz|s;O5pjwfX_A@W!DR zI`_zfZYB-m*L?YM?WR?VBA5Q!dcZNw<%-oX$F$aQp|PMp!Frxf?c-P58?7aP zmm^wlf`f~ztf`6PFTq}j#1qpoy_6$GA|ZLLmamBs6OJnA%4Z(Le3eyMNX{8p?#R6- z<%oOPBICU%x_oQAQv7uZv|DgjJgYtKocnx|j}CaY;#DUAQ9&obZHz`#PYL&a*}YZUfk{uPHsMt8jA zWB1E>OG0RYlohWcC6pa5$gPmhC*k|kre}Gu=J{vszY;dE!4^a8Btk+SeDG7sNln+_ zjU{KFA!|^al`l+}Ap+A^tQlE>@Hz=GUHSV7c~{{0&+K_A{}kfC$qMkxf4nL`Kz>5` z$A9Fdz+?8$1#SK-pMy}K_CM+aL=!(iQV#y{`X7I4OHZe9z5LHV5)!^@nj;{G*wfnM%rblDaD-Q>4%a9ORwn zd};%$o>CZ=qC@&&e$kg{y^VaeOh7p_H1sl5YiMC=XNrTzZ{!Jb>O!VOAqTuGOHRmEZUhRk1?Tl! zoLyx~f)l7!f8}!Fix(~N6!4xyYs;s*Uw1YBKkBV*F#cEKhCY&G@sC%_g~pa>`tW$1 z#P8pab(=DrHt0~_hKpZQaGOO{YB&~r##2@8-!KgF^Y`!mbT^?g;l+yw7A2ZTdp`=* zj0wUj6py@hy=WDVpD9$1GiH(=jCsbpj6ZU#q8?ux3RJi%ly0-8yJ3;WZ$FLgw)G;K zT&NLxJFOL_9;ObRMw^Orhi+4W*m!TAWaZ3BZ2!X82G_v|+}#^gdFdJI@Y->3=w<&k27nv@c%2i#APn~ycS z%y|CuLk*=W(e`YI+<8n))JFgHj@_BlbJ%!z5OrQhUd9$m&fb(P;Gpz)q3jq6U~-uE zGQfYOb!Va>%64G?*J{rD_YLsPN0)WO_0untu5S)!pi(A$zgU}?7N^|sC(DUjJf(?k#%c%_(z_ONhCdnat$Ck*-(;n0VokH~kX3P&xVmV;zEZBNp<(ugjhA;|)?x@CPD4dspNjai zxGoRXUJs{RJUsn*P))JYU#&nBE!)R_s_7ygweXO^�(NFt=FjHAlB$a>HQ-7_N+z z7z!<@%5FL|H#JQXc<-S@WI1iM^?sa)`&y4jKNlxw&tg#*1Wt$@FDUXm%;IG$q*##t zVbd<7p?-5yx00f~yxcHpa?((EqlKQgu$ZlPp$Euj!QX*bAmTz0pp z%0H*AO^$?&EMzSgLfKz{`i3l%MqXs-i%mMZ^VQjd4)H$i(%$0p)`LOvN7h|DyI!eV zN>NeKy$26S#J+3U1twvmG)v7QAbvjG(vitEE_?{hlcS}cH5)RkKCUNqPzal$FbODq zFg9l=<0Z}aOuq99pUg9W3s#|8)?Y>aC^|o1WGZgO-Er&Z&xA=;nAGay9WA6URqnf` zd0a-7nL-#-{RPVZ^HNgO&BvdlJURyow9vCDjGJJM;=sK2@5SWwvc6#(qmq@EznhmE z|9i1NKRPXqeV{<+D_LM*;63XD zQ>zs3`>R`$nwpf8lan9qrlgBbwibSvjeNc#=(MO}`tu5(wf^DGO2Tbp9FW;jiHR0J ztf44hOIMc+++5dvg-5&G{_Zc`ybrq?Dk=f6BpMnT^r1H0{BSW78M|)S&dN_M9gJIS zY)$){bB+Ea=1B@Eg1@&1^<+*@j-#P$$N1^Vu11kz;QiXXvN8%Nd?*MGr$761X;Tm_ zjLdzi%jAWZ@$pm8t{iS1W3R0q8M}imbC1K7FZX0+3CEkW<&zqkxz@e{?b>Z|?Q}iT zy8)uqm|abNi_0E(a(uP@?YDPYA4HG^;$yNlvC32J@niGNlhcj(niu2ozkX>8Z}&YK z`eck-S6`3Tx&);&=^*kaz;$!>_hy%vjEM;&w5VZ@X(Rj9Ep5GY4)LsL^73UjSAkM% z#inmwo}3@O^-$k3K0Bgv)O`v;v_;d)D5){Fx0-NJM5U%iR8|TXb+pDZMaRWmA8z|H zUDOhnL?5Q!Ii7X&euVI**w;=|W}hKwQ~i#Y4FW0BA9VqhIU zI61*0pvKGUVfNA+`C?<^G=XYeTJC)tx_A6^w1Vf3q-4PeArJSU-2Eaa_uB`*Mv0FP za71P!OsJX_vAlEDeXx8+zdBEWuHO3{bPa_9TlWrZg6h)~_r8GvI#yO(xP1d8pgnb~ z-Afc^6F3;W+{8M!rhM{q341E;+n8u59{|LZo_bZZ526 z46>UV`u*++>mfN_l$MnnQd##iDKBDDJWvLQ#YEP5jhgUW zAH{#R=*Ph&yZOxz0~Z;5NVYkGQhu1r+QyCpYI#H49yVi@u9Kuc5B|)$J3foD-(FOO zt?@lnXXAKx93A!W;X`ESgi5f1@{tU}ziVJXyR$mV+DYmN%K+rDMx|Twt5+sm4_YdZ zK`}+g#P}T@c{DUO2G{$&v4YtxXhWsnk7q;qBqvk-DzmF+6#ohUtuD*|PNvpx%>^gd zXi3c!1znYQlQEWi2R{qu2M?|9`-nZGa)0EV=M1Cj_JcyJDXFl3!`dD zqH6Xgao@aog9tpW(x-%lWvzv`1uZQVfiP+Gu^M=LpL}|5E~~dSDxzX6D}Ng6hJ9~- zBF9C$B663joOx+Sm-?^sK1aO)*A1NrtfW=*qUH zqF7Vg<$&WFR+-P5R|mZJc~-AdSzev+c4YsC{w+s2i^H%1yP>%m9fgTU;RE6XB7d$0 zY8FU0M_eVmb}bUoQMa^8U)ro{ZIn&V>oHL^=1)xMzpI%EQzvt_sLaEiA{~{bD75Bu z)wbU%Bo0%L7|+UIopv!+NIzN9JN@*SCUiYyl_5<1B){jna(H%rI=;=b5q=_a@*uzg z!kg_}lTjCoFLRkd{_V+RcVJ!J9ZnRlPF)x6LF8dOSJy;QGqofnn<|@|<^{M?`B1N$8Xv`}o(Y(CQbB4nhi=c+!1cv}sAjsPq!iu=4qXAS z$O!INE~<|o2eVi2or8`}P66^o@RD#%rV##A-08!=Hqp233zo-s|A%=&fwC3gBOoL! zNLOMGV9j7VvV`Pr6_qgjm8@xkIWCY(dy}E;e?ZwO@g0sJwqvg!$dJ|6Qrv^tu)cB=m|Td1z2cZom7 z?wpRGGZfm##LV>U|<*!kh*fn1?(RRgORz&1qFJ$b?a1c22CScX@w zT(R6vIGF$(%I~(7Qx7{6HmM;*=1%s@Y9Kb)LR zw<$0*zR|~8?OD~G_ej1ZFX8s#DtY|Ni7)LAdmBqO(XOkN;~0BdGYTQ<7#J95X4sdj z0OO#IVibjoyL|Vj3jt)u54vS`dOV{eVjSu6NZVhbBRgGV%E|3-%l|cNt=KnEP(Rb? zRN+=KU8;NII^BkIZ*UhnI*%EhkjtxqFHeTfyT#WH`POl1PPHzVHY~-YL&HPsXt`KR+usvF%|v zSW~r5WzSCjJ67>aYd-u+;8isj`M#@q(ez4JWz0P%reF&IJ+3>#oqA>Gt8@wcduVLv z9K1`Fx(CV8g$W4-rWB$~D*>768Nqwz^KU6GGq}3p zx^i#>Z>c2?;r#{;PI0KWO2@ax0^zR=T6?JvbKmcVgLyxQ0I@3w6>x^Nke|2Y6F#Wu zHOyBtaIs(1_-xjLSLODs)I#Q8v`U|n3r}#K67!{z$_uF;hBtu-#zg%ZWu>&K6n!-I z@}DdK#Ap4{B~s1sIfuh-GjLu};cDo_gt#j%H1zK)6SLX<-(@=>QY8vHE=?cCG3nMQ z@3G&{rd^@(g1u%WF!?^l&{l`8tK~m`NtcOP3qSo~d$O^_w?5OPQ+pw%XPID%49DJ^rxr=;TV1ji>-4`>y-tpn=v<6w0*A#IQSE0y+O7 zQ)~8Y2h&UIF8}3mDF5;}s|b&i`RReKG#g}A!oLkel#ha`xTx(-N=@{Vd|C%8w#czh+sdg&*VX^)61gX*H0=MXoJ&VX{ND#| zysS!dQ`43HqG^Dpq)f{WMIbCFKE$Ins?WTI=bQv0JEQWR>qe;JjIm?rWku?s^&zZVAA@7rKi>6QUO#v|n>lFrw1A7`UJ8Z8omDVJl|Hh4KWp_TTAARt+HUBIazJFAE?lfDCx>-Q&JpY; zH1Bd3mK`Mrek21dVDT|2D$CICRRGS4LU|;8?2O9wcxm=WK*~= ziO_2b&C$g0OCER<8{RDZLdnio3dzqOA_g`LZl$94!#vRYa(nQLoQ&S3TkB!aTfGG* zpI2w2=MR79BJiH(xCezEeiRWi6+OL6_p0DtLw1Mkh6U@eK>SC#b))66c$Px@YwYKV zfT}|V4#wnH_Ww+8MMHI_b~L0*_)je zS?BfTu|s-l_i2OA*tqr3y!o*L+=K0fTQ(t!ixvKy~-1}E7Vm=;tgsrUD9>$=7kPhBu|YT8aia}yy^6hqJ9*;tg0oWnia7hSuf zbKK*OSau*f&&B3g+waJ%ZQa*V#-Ey%_mM_#eALxq+1h|?GL9Vg&S43G^6a8zNoT*x zuj`9L!Ld*#gOO1xGKD|b;W0UfvAthq?$F_9RhRquR_)22qMeIjmV+o1iK6hu^CQ7d z;o7RI7r7}fdV7yH9M6dss3qiUWaSFImL3S0a=zu~59LUlN%WdnZS2Wnllz0$Ef#-> zUz&=e>LM4ehwD?doP>F9*( zd5MI%2jNLaeQ_VtxUMWPNCk{??uh31Q&)Ug=Vw>TqL-DcLwTX-P|d(EpDXlqb@9Wc zjpA+=S2+**Xsi)hv9K*}`(ox4WYTtDp`uMueC}uGg_o@tp1CLOEd97*B;C{+I%*W5 z<0~NysDkd)I6_@QLU;|tVxc}!{~(?l4TsX6n7;rB7jBRi578`mrYK;<*sFA3s9Kab z6(R+d_@LamrQp@{9*q+?5~eR|-m&gNu3RG`r%vv_BQ_*O1ikA8=!gmati?t5KbHuV z>$3*Y<$lQCaNoHEN7>wtOjgQ8T|K)l0+9<#Xm5#>Hm74joX{-D-eAWhf~$z8;u3Lb z;gdQBdlOUwu6J#^53P%$3iUV+)6;vf*3ak}iu4!J5msdG@DdRhSLXLndwQ>S`> zd_d2NZ{jSdg!+_}ua}eWYeesHBJyNpTQ9SsLUZgq%67Ry7RgjIW+f^zcZkto8}%9v zk$SJaeM9bh+xt?dQ2uaAp+ab|^FQ!Tc+|iB7saMroMzoLu0=T&6_k3O#Tbp(2jQro z^*@s9FIM}M$M85XwyehA(y)BGmHj~BW8M#o!1HkI%qJEs#7Fc`K}6v#f<1pxT$Yoh zY8A8MSv+5c;Qyy6%B=t##((HRHE{flvMF>}T>nycCI-ndR+|>L8$?mz?WP^N98HUw zHm^Ao*)-?Fg7hdhzS0y{aSd5myVWWX3?FT$w(`k=?RUH$fsuWkB+93-df3m)(>+Ts zZLFoUJb+bw!*=|mdqHV53wzDNpW@uSyi%=q?ZW7dGoc)qnCKlz_1(@sm9kmtIy&g# z(&%v^HD6XIP8`xY`!XIBm{jJmSB;?x)I!tK#a&hX)0HSP)beK)MW`Qyu_p7DM`*Ft1MKXT*?Cg4A`w6It{(O=w_o_Ev)gQ6ur zUG@+|`YmN!FVVb^vu~=b%GWJ7gE2by!+7mSj~?NcXrjl-(H7E?_xJUU91AfH@@#qL z^yJt)jw)mYJXtT5TU_yoNx%K;x$_ukwDRc`+!|w5cQ|~{CaiOCNh5Ku`q&-d!Hu?3Vg3w`1TTt$o-70>K>;Mp?o6jpaIxUus;g51!PFr;{(Fqp-^wk^dL%hub+jTK zhFopN1U!d=Db$qVD?x>WS2FqsMvfn{5@gp=P@QWmF-+INqFAla8Vf*~mDELDL3&Jo zejCN8ETW{U>OZ0Xv&P7NH02|Kj_h<9@pMWftDao~vy+;$FnZHuzPJ)cx|*b2O+m0w&q6@#!#ErcjtG!ACLq_bvAbXFe*X|Jnbkw1!K+HiaG~$n zO6GgFL`1f7Ws3wuZvhfWYHZH(rfB~%gf_NfcmhBZ0_PYHKQlHkwv)$z5zI<71D-$E zEMaA5mw`lWJ7(`@{FTNX>K{wxatvp{4rFhlTjmBpt{hHWOX`r!{6@pB9^7F-c|9Ym zZp3L`CxN3G2e5&H9p1cp6_>Gm;FPq<^<{qh4!er(7U(8+a}`+?2M)z9etZ<9&74C? zl@_HEml_4_04qWg^hd8E8TVf4?h9RloIWZwfPoid&G~EY6ku*uQrwg3ODF+slc2n7 z^war zp@aKFzgRa=>p_Y-NwOE5a#i_8`PNO0R*o3p3ZaIZ%U*_yl=I>X5YG38%df0YHNDvV z;u(*uHsG#=_xGOUxAvsS2P{9$)SD{3rL;AQiyu>#PX({is^$a?6t6o0bKmYoC>E z9!zmv1Zcb_oAwDg1Ku>7=Yr+pI4S_Bw_FSUkz+RvwWM6JK<{P)=1u)OsnNPvIsCUS z!aW2wlXK)q2Q$IY24%nfOz8$1kD@4m547IbZkeAYQGO7`+*J6zc2lTSNE;(W3H6u?_YMI|x*o$x~F;R5K|l z(0ar4R`ZcI{pG+)pT)q#E>Gzn;lJwF#;)EiBo_RTqmnEQc6a3+PRDsABLnjQaO)dW zX)>{&LnVtO1xxs2wuV+IOT=e97b+lg~kYfDzK1`q5fa{3@aX z0F#C@qAYj{JJ-IXW

i%@ z>bF_0xyy~c>B%qXmncw^Y)#YqVtZQNrGJSiNA7y8@3t`qN-zZ<4Qi8jTjC$ZkZo&JWRDa|XuaPh~06?P7?ODOt8=H}CrwFfnJGK3_?9#^)(ToN-W z(*Dl4FnNqX{&UE=I__WAR;tTk{Oht|bm zS7}xIPA*)&Q>a3;zB`C$&Z+g-Ln`W;af8aMg&vK{5By>wtO6tGTiL!Pb80gF`5|4Z zr|ti3s88yBWq%#C7V5BR%MXDq!z>^UtqP2DJTx|yKt!qxU+%S`E2fp6DjJ3)b|fiv!gT^nMrp0rRE_GL4K`-Vad(P+~IsqO2V!P%O(-V?Q+ zVQEPO@{IkOTR&>Qyf-sg4^zo7U|ymrEarfjnzGLQiG5b1-&M-(VH*YlrWXm!TgD=eo74A|4>H`cPzpHpNSUw>2rKCPfySfUX5G9_#xgvWW z10&=dbt0$|E`0JZl4MRTT*gsMvoUx7XLK9Gmt zZco2q(cFCCGMvh1X2o@F5$sF;XOj&2iNqy2^DN-TY82drWcN?EU+UDb4V23Z-U=5; zz3lQ3&dKnSwyF5Wv=V&Q30K761~XeCxt<185EK_Opx|2cdV0u)0b6vkElwD(W|tcE z4Y9QC``j7$&)GMbpHP!)oI|l!Z+fmMCU1*L0EL%V?PKs zrV@~ll1epz;lgwKZcDUTLxm|TCG{*}PEd!#<^%uDYuN?|4kluu&=71KoVqY`MZ?<4 zat#g}wx+N!;xF-ovgFD3Nn@Xxg9M(NbDt&boHTp*Dq4l8qxqf@71V%L;vEI=6R5m3 zALL3P1`5ES`j4sT4OP2nU>Y_gH&SyfNY7Xlqci!{OWZFUPjzO_kZfc`v5@L>Y%74> zvq8S$@>b=qTn>X_x_xg}isy_Pd48wxi}v{da}BY-Hb~EsuMg_GC#gFGJ4MeS>uRCy zq0gnEzbr=geQA|%9uM0>eL;kqPS5EeF;NH$12dcA#G!s0X(PJf+MB*g7c5wqHQnDk zzXHL4=;uhVq9moO#y(MC{UvND;IlYgyF=z}z#GwNJ;UYm&1V|n5=itnKQ2@$c2FSP z32o2!$pp=g@+yS+X&KC%a0O7{3S~5_pG0j>{QgJ;oXfo}%p^im>C~5q9Tna6yFjhJ zCDaQVNec#&EOomo%gzKXutSof&B-x#K*StHdECN@OGL`qww#TfJjdMGoy4c@9u%Cz z!!!q~GV^EG*qs+=<{|;K{{Dvkhgni(cpQJ&27mVCXKr|acYw?Ft7tjXXN^L}ws$Eb zN{4XfOrhyh;62NJYj9>dXT=xXtvzcK1~ON=HpMhpXEQ~y@%xC18kWwH-%e&oln z!f+{dh)TfDvK;gdMgRveK~LtiWBAZePXRr{H;q46J##l0x0&>!{F%}atF(m;lSPH) zAM}}YAvu{C_oHGa*xjxQ{||L@_unQ-5Wo^dRwN|~6v@vXJr7WxuInXam9+ zYB^@8S*Aa{F|9hCnV6`VRAZEv_Pm;ahmxnSm+(A3m4N+u5x0hx(3CtdS0J+VFA;^u zpTo6!N{EgBoNk9YV`58|!hv0q)7nqq9`}o9Q&}D(xh_aVnB&c=Baqbgf?TCDOZLI zb8ViEQS-P~7Ys$@dK(0&eNoRkuq*^J$JqF-ix@LAHq=7~Pnz0FJ7Q`g%jvKDi&d8_ zPy<~(@=JvULxm+!zPv6yh(*cDoIKsPPXD8Mb5ieTo0T=aG3~Dc2h1J! z*%2Yg4zM2VIEF&Z2jn<|A5I@Wbp{T_HTHmO-pYOFOp6Ol$W{88uj>^yu~tzl-Jgup zyf_cFJ{wDj&q%+lzhvDra~0hCwT=(>p&*iyw*5t@QcWO65(yOsqf;C%+z@i6_xeSF z*ak*S3L%nvT7h30`kJYWEc!cR!Qv#{h*u^k!PD0B4hbZ~5~Vj0b`p-^`X&|c<#&G7a}L>3|Lq3d(Nvf2qh zJR``STiw$ZnB8o4DRED;?c_0elH9>-0;$#Fw^CxYGPASEe0|Z>j3tDs{Y%xa%?n|N zVzrRQ)WV7d0%0yf`2@ZsZK^`K>hIjN;#P>PdlIyksZfdWIw2X$Bze8R?|H1%g! zyDbHrSrFZ`c&jtU<5_Hu=B+S25B$dIW?z$zfKQ>r9MmG`8mDhS?o3~icPlXQZXb#m zP&RLyJ`s#w*jR1KhtbR|o2En*?poUS3ZEOXR}1-b$sz1mE?vlur1G+;8--4{P4z|V3}|5r6e z$YrjA5uRtldVkJHtLin;ulw0TQ9=;7njqyATP)`GRc~>Tg`(!*T*wW@bF;3G9oTF4 zzDs0;z32Z~Gfj!<)HN)`{O-UUF#>7UtSe)7hJ0^GSQN(Rmah~|F|9=8h+j>MtP-43 zzmiTtjrEy0CWpBSbBZ_hW!rzC*JVJjzDGQ|uLUfO*co3B=W}_LpoJW+Vr7n8z!?0l z|Lfou$qz46kz4Zg|1Dq0Pe(i(g`>+It*J@LUm?CP)AkmAWVh3=UU?2okE^+_)NpbR zN5?)?lbJ@_+_Kokzr4)Rx_H~Q^7d};4C1_T1*xdViS=|Jw%j^+Zrq@Hb?>4VM4Y6A zUXv~j_L0t#h#-{ekCXoQEe)q`@vZhGZtVJ(oKt~Ui}X$ z>lVyV?&m(llLEd?|NI^gWebK)g)=dk+ihTD>VXLjvD&~5P4BlLd^*=qGWMPgGtqqq zdy@kqyI+?R73wFl@)*xqBbN9YgHp%)n4B|MmI^pbeOYJwCH@K%IBO)pHbSBuMRmTS zfs8okQ`WlLOnJTqG5;6B-f`M?+=IL zP}Cf2GUJ*hu=zN_A;jtf^WQO+2&b8)h*Z-!Ei@VvAUz+Ph!P5^IZhEM%NIN-+QTZ zNhN&f$;Lo=j~Ma+>}f9ag=8)RCF3gGegNu-5YVq zh)+$p4)_3yWjL({*Pa8pYnp>&tzyBatpg~GT`@}4$WTJbK{1WEDA|xHgBCEbJP1~i zVai6lmH>5A^zdhYMThC^9FwYQiS`W}^ zDV?mz^+^bu1`QZWA|}l?h&rPiP88mK(RqV@)(f~Y(BY=Q+un60Z}1wB4+&h!)T*6l z4sJ8J4gl>E1yy*?1ac%{fCg~jWbt^_wSFJ zA(Yb3bfIem0I*pFu|!<>-v=NrNf~nVI2MD?7yz5ax&`iK+Eenttq|BeIUmA&b|=XR z<_J3l@Lp^%VPdVz$3pa}L#23TqKG@9swuYJkTzv%!RZWoc8(jlDt3P0y4~;=#LB;# zuaf)b_aQP(Cp_-#|W=T;Og|j(Y z@+VFt*UOzH9YjgZoN+;aLl3$D3g(WB1VPi+X3 zpA59qSs$J8-7@dIeS$2~(+mgOp2#!L31S#D!ac~R!S2HpFC>1x`u)t~dlW1|5ZK#k z{^4P3;9KZ1{j*HmQKAS&e)W6-WcZKLibH+RCcMU$q>%p@4@B9${};jrhu+>|(k<}u zVU}oyBTPTYj}4Pd(8;zXW#<(|MK6QcXs|C(!~RAYZsjwufY)tdNk~P-@ai((`#{5G z7n;m)Rjs2luZL2D*b|+x76DKLiFtVdd*N&dtJvM?#3a7_?l-a@A*3LqXt|zVCISMo zC|rvkIW^`ZSYSs7$5HSq`1=r8A)yp?{X@c4fH+@wv(KRfpP5D8h$Ti1@1QPGQ9UI_ zFadmE9*c)lD2nnPokZa|F5*c>rmC%wl*gwF+hZTdK6>AFr5F;Bkp|?FY_Qm)fs*{J z`Fl+#tBC96>*U_0aHJC?Pfq!YXl-W_*3=*j!uFXkZ6OzyPZ}o z`V~~82(~l);b7eMyh=`RlS5pi0a3R+kV+7a5Z8S9a7nR{@6Nj)n#Zd=43yyqr7(8U zOmc6hotjp(GRpFqx(k9-g-E9B%bdwc6h(}5#$NniT`-1sK7$?k4^;AVOZpb{sC4b& zpKl4uNO_$x*-TOH3r$MjjZi)tlNiih$pO}W-3~9#1C|0$Wf#xZ`E)U}`NKcYt}7RV ztwrK11Q$ zjWf*ia8We%CE|A44-saRZC#>j%Fn-hhrJ(=(f2EyCu431;0#k=HL9$rqU_gmS9Rh6 zqoVN;41sPlZf17&FBVup#%5>gjgOfT4ruFMf0(b@ZT8g0q6EjHr1zl-_x1DW=bnz_ zo#TOEn@3|qHg%$(Z_RaVCutT;i*EhNekgkq;p(t7(7l_%dZAA%qq35!q(pPw>mO8{ zsyaYGLNZ21B`^(@!4>^#jv{hB`jjdZk|8OoLb8SEU=r!o$Q55F#>R?l1!AE}lI`2l<$0v)6VUtLTufHe7y?`|yHqD#$VhZtgyd551q3 zW?&GpOS!zi2>6bC(0SS4eGKfU5zzmHBqRX{VP!e?YqbS8et~LhnN#s{UF<^;uvqvq z3JHc3T@aVkB9E9=)Efz@c2uY+J&p6I7f+2un3lo8iV$36?Mb2KU0qV(l8-kja-tFpP}rjfB45R5Q=KSCN+ z$1MVWhv+LL5O>Z)_DXZN;VJf6^?5W=ZIRw#3wz%Dw>r1{>`ZS1tY)&od8rp<_nq%f zN>%I+>9%1AQL!z#*VU@-{`TO^yVFnaNm`g}x3jO&zp%~m;S?`KHu79Y{rfA6pE<1e zs6tvF`It~|Iac}q`8Of`G-7;BDV$MPWP{?8#aHon*woJ4(ug~DeAB~tQ0z*w^(kuiLWq3`HQ%~H<1EM!8n_; zHxMV$sEHv_+*u4%Bw&c{rCaqoGu(ivmTLhSp0s(?UA{z9Nci#0L4ywW-KV;*(1`HV z<$tfI+8yKkv4I3n9I5CqCCgoxQMm+dHBBY%CyWa@RXu|I*A>< zZws@lYSzqC&_SSiGJ4L77tEFrIgeI84rG+~n`u-b085_wU%c3cYNdVC@SKrqd>YnN zqg$NTf~zzX6q>Uw~%{5D?(R{9SFZt7%2=bnJ%FhaZ#0!&8EdN0ObS zOiD^KF^Hbyw#Vtpg%Y0_yQV)+bZ1T{oIAIC%6Rbgn|OHrxWf%3j256EqbXJ{FO~t~Qm@8ooX47$M?*EqU&)}jO)}rD ztlMvfKpV=`0JqM^e!<#5{(5lK*V`^bJfy15sxgJP0OALm=|G{H*1kye6bN-su|ZoJ z#`Vq^ux7>8VgaujNmrr&ifA}tuc2<54hU*sxN%0IA^`~C8Xl_R>t_je15Sq|Ub|Sf zQm&s}_iBJ)8}Wt-3N_XFgn7gn<+lAwokvbpl?P*P%6j|?+GM0Uq(NIr?!^^%-{lMG+8tcJkikb&9ttWD=db^81uP%-`6cXGxwvj7 zFhp9eEjZnIZ!h(B^4AzVh8K9UIn&N>#ayYJ+s6-DH=fTy zK4nga5;4~3zMS1O)x#bMQQGJ9)cMhya~b?2lo`-RW{~bXR8@bad%>(^!FMo`C;#cl zi2zp5m1}T*yK^c|uCbF9=>sBO3ei;*{K{hxckfYxuAU6lquY?g-G}{ubIxOAseL#g zd24L^4{z<%kCH343-)Y8V!Hk%OEps9-h-ONK@isS5q$QD!48XG+>wco{8xb`CC|UC zD_n?#==3Ys#Gs!;dglH12bvsH?CB;FkI)mcx^+5yxI4kz=?=-syT4@odawh^C>XwJ z<6#=%4Kb2G4V#*vAU29^2dILJs_vM>KQ7dU4$BYj2bZ5E5% z0Z(Ol1|Wvl4Gb2dZ*-D^EWuNwy#A1$aB3l%_sSKpkyTtm&JFc`{yMY(3XG96IFaD> z`;K~qplxIsP0R^1f=lpvZ;H_)Jc}}Zf>cM4BfWC@Hwcy^E#|g+Uz`e*FfeN{1@irq?TH~SKGoAYUw?UikLetZs}6fDJwvW@?4>XI zU*G@A$a2<^^CWk*3>itSZ-VVA@R1EF9GsfvXx?OZ6FXinHY7x)*VJ5toU%)y@3a{* z&!OHAw7SmUlY-L1->f5f`Pq|$K5tp;H$7NaksrURZ+oeC^m}7sh_R#R4X#f&>n%#^ z45J%Z+LZWj*8lz(+W0b1uKfAv=xBn-SB0CrPk41pN}MN-2}J7%s=r)`6xnd{Sf4Qt znBkc1C{W~IYsfVz8)d8fQ#G2vXTvsjIzfN)@h|Kz-${j-NGO9E4a1uX8jL)Isq*W$ zEtO2$18%!_zS*QBB>i*flln7iO_wwG15xC<-Q^et3Ds@WbJW9Bm0q(1kJT;(i%JGN z35OBf4Hkbzs&Vs`tkYZD9E<*=yrE=Q5>pqJ=Mu(q91DO- zdPUiEMO9H(bj3F5o~|WjkM2%w$Q!pksBFWkHrk~9U$nh-SXN)OF8a}u(n@!TAl;qP z-3`*MARvt>-6_(IbR&|I0s_({Eg}uledpr$+h?DB?mjo3``q=%_dLq?t(a@hG2Zcx zcZ|6Uzwwg}ICKt9F0OZU5hQBcC-AUjR)g@;Y5kt@s>{rz8PLWa)^Xi*Qe08;*55@| zh=nFT*QSu{Ch$$X`g*gJ{CVW)!Sxk$}YdN78OtiU8A?vw& z=!uOLf`j%RcVJ^A?eY8Zh4IoP)I6_ZwknH z(vW!Of4w44C4d&Od)j7{QFjh%Zhb20BfCwr@M5i5y;4 zE3#ldex!HYCF8ptMisjLLH~KR@UOLWSbmn;*ZbM`gJV{giIfl$rE_%BZn=?AOp6dZ zFe8<|j{+pgfb7DK9%^MsaIk6Pd;-1f3F6i%LxY*ziy+;MraBS@y;(-UTy<*v;@W){ zg!mM!6=KC)WnSMQvOHV5pOc`!0mrXH&mZo2WFqYpFqaekr(ispGJ}sE8Ms_$;CbE( zHosB;-8R#$qWJTWq2ITCtKku&BM=T7h!zDS_?5ErM`CPVzgk{z(GaqWBo?@*j`&_= zhq)^9N>VQZ6f7P%7$|y1fPpo_h(*DVCY4YM+U~e7Ld(X#nr_A3l6m1+wly!7>@8T- zvT>m=`!YpJ-#W5_v*)bF7w4!x2=O~2hF03w!mOO~R!JeG(CFn+k__%Kon^mM7!-g@U`&q(kEcx9OmL;7=@*P{Z@ zlmMiAPdnI~;pasL$zM}VjX+V!?Wk(KuJN(o<(hR5HB8LkUli1eQWEyHyxeh|m>&6x zzBC_odC7nNXKh^9?`-TH8O!{f;K6JY`pi+#{>rw=LS*T*L5@UE%V(n~8XSUGZi%+E zx%8yrxPi=1t8|%FpUapvxg-7Ex}wi0qOfE9aGEYJXrU(#zSG(!#39(*XDJ(xsI|_` zKgScx~B_uon#ISakuNxSzw)L~kQaL~#cwPq+Cb+eL}Z{B}eHIFF} zIUqLMSL&K2@x_{ZSsAC27WK0y`B=Dj0IhwP4)LP1YuC0l`C8JONS*guYWuprT$>1I zODM1p6iVUlXUFvEjrrAZOLu#&sA+D;Cl6`U6RO=>b3#eJcG<>)7g`pXTrj!~NfU~H zL<)$WxDyqDHYRbX$T)@9h>}!@@PRY-DT7GO3(-K-_wESRv2nJxp4I*q0VS=oAZ@RN+%?ngA)edSoqC25E^%p zJ}lL`68vp;3r6j+s6vkUW}5S3$Jk<4y*%eaVCWN_t38v%cdUviz&lq}lbdju@M`ru z>=W$-{%d8cerj_*65){KRn;L#d`rtX+YmcWzgs|=Excn1o^DECj=mt zZZ<3qHyB~TN24Oe>RLObO=r+=bNF>F*P4 zi?VtK;sD`lAumm*cEYVJ3U=HKH9*%gA<^wa8+UA2KX5mqp zkChsG($P114D|+$A{yRHR zcBlnP;df+;w}6-rbb2wzef`B!puF1($pWhQVd6;bWp@YacQvXf@>Vozjy%Qp)G8`u zL8XTs5FZwagFmvzCw-qfZKzQL{sW2DdS=tQwi>#}LU4}@97cFa;}B*1*s^85TH2r! zFwn)=9F87(WrmOlQ_<}HnwatS{oroc)f2z-B-cQi6E!$#@U52zXy6eNphw7+qI^jV z(%DH&bX}X*JS5mw&^@Ekj(hf4P5{tNORrXBz%x$SLGA&jjO6 zBMNZv0O3D$>y3)jzpGybLJufWlRoR_7rg^0bBTef1-h54pp68_lOS!Jb=ov?wfjNf zi3^Y`DWfpw!gv5QI!=!f-N6i9fC=j>ZW|XK>_}f;(v?g z#x=8OWx!BQqP) zW+wkxeN)^j9NQpFHKj?Lx2=$hiY!?*PgXTMqhmhx#|C?H(y97Gjp~!>gPqx0&^@3) z*WnwZ@x`^@^-9!>nT8V_W?P3)Zl*@zFLax4(s;`HChf~-4u9dfG#QF$v^8l2y}D!-`hrz$ zmt7Kocd*K=H{rN&Fx~RVn^|>yf*R7`@lEVf`L_#BK`cR7m7k(@WWd^XS^eehxtCf5 zaM9{PT%K;5!xP|n6I>R-=<)gIAf1&N2Cl@#$yH;4W(j>LL!lF%R7>7jp7QYa8%i_J z{DP|)Ji@gILeA!Y9$cpOd0LXMX_>Ky*~P2Hvr>g!&Q($Y>W0lf&{IE`RU~9?7H%_) z0f12p2#hM`{;p8R4H*BVUXhbZ>5N|q-B0zp>yD3n1S)v%kh`3zKl-E_k1tjxK38Q! zrJU}dESsCzKJ&KiHZVU*ZQ;NL+o@3lj16MY3zz1Ms`N{ARh<~wkF+Hd?qp*dkFaN} z70OEZa(+DCaYrH1qCdP$eS~FS`*G*=I0{cbpOL9^<>hC?{{655$^`2+XiU2K+yF+e zFt+d6Gs5dv$Xbed;Y`*yuHyK-Z)iC2<liODU5YA+=2B zzW(Utmeq*7r!ZH3PmM*MYSFt~i%hBYV4!=kkU%b{=1_V#oVtT+TW^c01$a`=MdF8E z`^P;p(G3j?V!k)&WyF~wF{DvS1;-aR2M@72*A7FA3Q~YUfC_WN|9T49G>>9sEj@&N z22OG?g@r&RO(1nR{hO&I9!UCIe2~ge#&PUxij>KngNzTgQu%a*qXEU&>c;3b;PKpE zFLdfq_+f`u5S;<*6F9Q}9C$@6o*N7ZwUY4)jvm*VEdOvemSn{5jAqCZ)0t+0?3NBz zYq%20)C+*H&rJ##tY;YO=OA#yXeIXofyYrd2KoZPE&+d-QKbYg?*Z?$vam}PDuXi5 z+Vy+F-KA0Gjxvk!O%&Kl739nVco6br`21a|c%$zgrKp2=o4I<> zLco%__MVFDTWL{*kQ4y3N#m8`(T3`A2lRXB)RwNt1wU_BAVFb>(9MLq&+*}=9I867K<%w}aDL}O`{n=| zfkL78?f%Ddo%}G$K{N|~j!jj3g792w+~genvV^S!@{)Ytq3VNQU&4hF6Q@(2TZP<; z;6pH1iu!|QFHLhEX@!~epgV2J+7WlUaiF^Q?%(GcfR&)uDuvA zF@ZEardN_`?h23$29tdN`%94Tt~&K$bC&Z>(mD$Gk9Cqs_Sbc>MPh=^Dc;%q*n zge|6BReYopVmUA0Hx};i^6VuKi9d=0yF-hbFaT~n@shhvpH7!!Cbg}(nWWZwI;===Y|}nK7ow9v2!eD;2nqxgy((t2K4FHINcJ<&0gL(CmdYmhP49o~)~(=Gi`k z3H)fJ&Tn>Og5=}XyYstG{vz={^4{!%XZ@8R*6T=mno?~h`FDBq=<4N8`I_hDr`_Tc zFE|)5L_L2k6{?+fi4TuISZuuO(jO^U5NT@ENTzuZ(zoK&jCEoqnD6Wd7T=ST#`d>$ zCofGGeSjB#Y-BQ^$oBZ{!d}2oZ->Qy2PSg!Sny;HnM}Ys&dU?m43WucdyPMq>5pHU zpdLdyMO+n;5~@Y;{%q;qDQ_?wnUCx{rRZvD5AYZxJ2#>!|?hCD8e*`IjbA>bj4))2nQ`atS z@CVC6jgWamIRj^KAbEaWLQ6|)=QSg~1RrT|-tiSJ5TvKhy-rwXULz0$N?}HC0&d*N^|4?RGcXwV2zcawO#@IKL~ZPV{|A3B zn5l+HO&UE;EBX(v0-$Ds09_i6dA5iB9#FSLNYyxdNK>1G@F^}QTp{;&5^jIRVSg0R zZ4`g7Xs*V;w{le9W}p3omMvrCCN{h4q00Y6OToOi89>Lb`9RP4-8Ul8G$2tJ;QIt- z$pMZJr4CR*ZS}VXH9IqFP|ZXj)^Iprwe}GNlZo8mTN({tf7-pc4|XuQ=lQH^;efOT zJYIX9XVL|RSeSjTdd1#xr~7)1uDq0GG`K2$m<4afQZEjG2%1vAn2U}Kn&#QTi34(N z96u@Ql?Qxxf0SBed`UXhP-@POlLRVzixNKdGC&e>g0RrAIN==(fbV@0)^~W0*FP2~ zmd-E41_z)2l+-kX9PzBvH#0LR3O*h-arg?^Q}Fu0q9Q1LaeM?KUBXfh8f*THEXcJp zAxifRmqr}7vub0vUTf71tiEx`&b<3ZOt6$TAJ6#?~8l0&z3(^%@ z?=&lqvuRXuG8}k?xN^W}=r4d~yA~wYWqz2LKjE4M-v%dZK=G)QDH;N6h(S~{yI3<0 zJ5Sl|-MB)x>>70y^@p9`L(CW9AiPJGHUw zs|IW-PZ}oSqMs7LXsozua4^CMT5UDrxCfMj!2bO@>o(tTo6}evSzG5cR-((>2pb)x zED^avbkPBzS*}dE;zVE~89mm057(XvZ62N8s15PtC_X(D&z3-RfL5 zQ7{k1eL%6mP?A%8(V<1azZ%K+2BA9t4+tP2m<6Kx$c(lBG>(5h&!)Qg3FR$~fFMcB zS_EE*Ty~rDTBOoA(CY#>pKJ-BV|g|{zt4UWGylA2kNczbWo6d|mOY zpL@F7AL%k1riDPpDIUyjLTEJ=!)l^*w4cr!Stzr?Ffied2*pmO#Qrk4JK?Flc>x9W zwb>~d!k}4)MVZrS*}G{;Nf1)*x-=D<$*VL(si$Fv-b3z8OOndZIMk_=3z*+Opla&S zsB54{M%x7yg%(p=#I59&{Si)^jg+dNtD}Q~l3DeG{}uQujRzxJ5f_lo0w{S=$T(Zy z%$Jd+sP626aI|56m$^BL77~K1LY-UFL%YQlM~$;fp~o%40%0NxoBs+~VX*C~3OE?4 z$SZuXHj!ZIe3Ov?$-1msa1NUQUevdQf5pClZcrb^q_9jox)(4l*9o;hBRX+~Qyf_b zrgoiVUF&I?V8xll9eq!3q>|nO(~=v*I8^w2!W>X4ufQ8>04@}wWlwi~QK8wCw7*!- zdFll=H8tP()7S94vD-HZLki!qwph_}JOE_7J~D;J`xa5GT0O+Iq{fEH1P3;`G7Q*R zi(BF*6`KQ!MTu58tmN#K)!Tm)!xM94HEnUogh^LZO;BPK;_Z?%0Gc zvF;AnbY@vTqDz;NT7McLE2$S&JomiWs7x;EpA&>xG|`tVGOk~O+-0K5Y(?0BvM7i6 z?k>{rAqD2+DR_jNFiR=T24L2nf&Hn;&xd@Tfihst1WVT_^?OG7f=eXRs3=S1$9n`_ zMv_Kn@sGUK0kKkA3r-zYD9AUc>r$GT;KKCr)YgsBq^m{ZGTp==*ts6V%bDepKN*&+ zU9kg##qg|m{yXm2D|6oJsk7#;{aw5CycRb zzE-z0A5rOfi=F*`N2ReZng89v3&U1-vZp%CK`j01Y#_0=JURVvtt&?5Us%e&c_ey$ z&NXag^R=cqrDOM1kBEnZVl%5vdr>zH>4VOfC%Dw5QiLh%$eWt6yTRB~ha z0_43Rxi-@y=5l^2%rpKJJey%+k#F5gk{Ce9XEjuE_pj}kRsOo=o zN9lVt`%Bo6pc^v2Z@*##R>OgZC;vM!8D{R6ITk_5nsTF=eb} zG~(#h;Q#&WZEksa;=1@5&EA`&m;B1Ab3@fm-tCk{@TR7SPOZyDK*4o6W5G#bZj_z_ zukvr8oMu8Kxw<9)+i#PsX6^1sW8)*Clfe^`?E%LV%GnP%_b%=kCx>^xhZ(<0rG84Z zx1s;G@y~=q^txIW@xvcGx_eSOi?g-e9$07jRP}3=`hSkh3tVpQJkLo06(d-(UTkQn z^cwpgAKLEBUEmwOq>&pjaS%7T6Ju`j`XAJhAj2fx3;fErt*tI({&-^#{ff(#QmIb9 znaOTbzzA6hN|})N8;QnH1bw${vz?U7_}p(Uy>l|JOHaQ`5t;qNA$c~Y)5qiX`x%i- zP5%vZxlcngKfzu2#Wt^NRsZV66Y5+vc97OQJapNqZ|n_n;fu_!(s%>zq|-j;xgaaP zaAo5e7Q{~glA=eZ)=F_ng*pO}DZ4Uk!2OZsvU26AUzkER{lSC{iAq#-D7TcR6=b7k z_iV~)XYc9P&T*20*|Drfse(voT&_J?lW518BA~)nP-*1Ox zk&vZ8AldkMLcgcFKE~xmYAxijK(xgaMfEC7_t!W!V%Dj5+hVCeBR@Ag1M+XRM{; zhEQB9hJd?8qJM&B%6|C4be$OVzDUXiC=sl$0xTg?vyE?y`YUPgI!kvh_6bgJ8GZx7m z^IbCzdq>nASxo7j#iZz@=UU#>&aIU`0+D@_iXRIhoUMC|R9#VR!&HQ0jFViGw*?@r z_HPf6A%F_FtZK(?LXhG@1)WH>0nq?bX?bq=((m1VOU_I@pnmADRK{b$e@kEGgGB97 ziCxMh&sTPW#?aiBn5t@n;ogj(T?#O2@Y}EK5MTpDIMy^<0-62L4teS`Xtasc0cYhnF#qui5UF1mqVFw|hagW(Gg*=L_2Cu=sfppc^H zXvG!5b!G{=TI`6kfxS~bjn>GWM}=s>XF5z|2H{nQ!ig- z!A4wrPic_EsA3cKl=`5CtiP+@S$)#lT6Kv`S#C(DO=RB=$Ct4=jrH`!zv>M$hGZfD zjHN-Aro&Fje<@cA;xcG2U}3M+qYs(J=Fv-+P;A$Q&6BDtlsl-!zu4zM&a-_;Y!C_9 znz4V4hYt9VDQv&zpGQ=FzqC-X>mje~HF(y! z_!0pk%S$mnQTW3cab!*Uoqn70uiWqm|4Wxp0&=d!Z;Qeo#r6i-w0~js6RqigT26IZ zb0V7Tj0ERqF85ANUa8z9_u3C~qh8cZH4bgY0f!zl3|6ba$j;ZmaWe!W(TiKlL2S3o|HlBfvn` znQo;8JXSj21$n0TZ(GPJrO`Fidq8He?KCL_PlRD{d6~`OiOjyPh%jav^$VYID_|;a z)X|x5;UXQCuZ^GfpMST*Rm>mAWMQgkB+=|EO9fQ7HfPy@&x5MU9;a-j)~3w zC;ozz7yS$kXG3ef>hD1tH#r&q;~LVy^&!#Dnmstves%EpmzzYUo{z_940=k!&7tg9 z|62jezCVE(_q@DfEN{3DfK{Jo{_?wvoLKCfJg3O&t;kU6NiGz3!?*kI(8ci;>fMcV zl^$~}k9E_|$i}0b#jUr;QM(`$^Z>aJXnWq=afboQ(?yr$#-g!ErBIE!T#Yz*eiy%V zU_i64)Q3G%XP>vrs!`nQ&r@J4>~*sJ-k^>}J4bL61ns_`0Es08km1Y(tX&T+c+_ed zNGziLGr{E#%l}^A^(ygq2)wPpPN;! z)dM|l1{y*4+7rwtR%;)W3=DxV zfsw)jDL$2!iB}>Ye}Yw?a-mkAc~~*((E;5sK6UP2Gh45deUKS>?=tiA0eM;!h~bJm z-~4Z}NZX&fYM@c8_DO9ZJOf113ypJMKhf);AP~)?W;x5U&oZtb0|H}!p_WFCsFE>t zk=!(!Lf7_{()gU77~6wLx@TXdq}IQ}YeradX?;g@MW2obaLWzQ8x4-Wh>(p3Yy1b4 zXZAf8r~L9}Vo=l{30VLCm#8D3@wWg%6Fo@YtS8{IPw&;}*9Wpny_~2ms#nS}?C0fB zu>Y;Ndtn=prxrF4x86fhhBwIgJYFPzLUV1rlN@xP==`7g(K;WggnUR30s@N{m1iL2 zzdCJux6n-Q9{VrNS}h>l^pRbfy|JOOr;~^Pylme1_%7l_sY5R(1P?E&as$>Wiz0VI zwr@iPq2P+^I-=O%iY1GAw(K|`9No)#fd-;Dpe{;4u`JB)9}4t&xOv4wfd_ugT1@T) zYddVu80)iAw8Z43XAaQoY9=ezMKhAgY1R9G*j)J4s}6hs#`kDIumIWw7;PDKnt&mT zW^~i<)I$esfQcI0^6vRqCdbVkFv0*?Zd2UX0*s?^#a2NV=O2KVkxJBwH-D9fb>lii z1w6GHdHWo67Art6I*@YU^krfu%Z><-@OT&NZ!u#@c=#K-oe%h^~!Q=u$Jm z&$ySV##-<mQ_~g7PQkKmTZaS<8T27`b)YZMDDFy~QuVXbX!v-+HvZKNc zw6l^&1=PXs*ib;P48-wdEJ-m(aLYsDXhxOq-A~C9iV0L&kCF1VnkwIa%m#zcMVpMP z76wQy0Y>^FvA4}qfPThd#~a)kEpT|1nj{X!0JU|2nKluQR51Jhngqgx^5DxUc1MP_YHa2WQGyXl>W|FRK$!zLK?23rWBw|i^~4y7s8oKuF`5{-^68w+ zJH;}(T-?MLg)#B!i9emXc?5&rsVKSS(FFNj!9 zSh1^=YTE=mVf9DWE}I}u#X`}HLD}-%&be(RdQ6Nj8Wd_HQGuXR))1b_Z|2w@cg4*t zzV)0!O!%+&a@$p4iCwkV*3hSr<3Bvb6#fqkTbZYD)lh-LNq`YeQvoYw$}UZJDdg`9 z2aGL2fB4R|*tQE6mY2()0zsy)n4-}K?_Xlu%ay8zGLV_Tbd$CH(kHwXJ-9x2d!rw&o1D9B|B zzj*yCyK+e5YgA<%j&2M^5M9hT*+har(pzy^0|4Ih(!Ak1gi8aFJj5MEv;;6$2Tde= zw;7r9XsT}49XmGgz&zk6WDHn%mTWbw-86nNIZpBQc4f&_*bn3Jam3`s=sZm_eo7op zvA-AIpY3s;1sg-^OgLC<+h1<6FtM9oG}YmuvUp))Lj?<3Irl$DFv@r!dd)2x4u1l6 z*OLkE88009#s*>|@tcLxjsn*1xy_dsxD&ifKc=269AdE9Cxho}_U6wIX@@^qXeZnw z3JiVK304hII^)=B)2Te&+zq^(@)s-5K-!#3QC;$P_NshOIwg}mz>)7PjQM$Cl_-|{ z5-t0is_2Dk8Pl^I?1{6bAg@zASp)K(8Q#oyUrebcJnrBzFvG0za0&pMHMHl3;rrqf z>GRs;k1mh5!aM@@oFbi<0>3H7Bs1=`JjvwutuP7lI_EC#TY7uB*gQ@mIuvV}U8-SU zCYS+G3F-tfmpK_0IR9kwWRtse869oEYZ=WoZG{*j!2@8EkL~7KG;IpkFZwG=qOIEa%_Uxb_K&PB_&ln&p@uq^#(0}POoD}pt`Wds2%y_mkHIv9W3!RN%k{6WQFH~2h!;I z&4eFK*cJmW8_xoA#AQ`Pkob(u>wB$&G({isxe~{Z>!r7+Oxdg6y&?@j+&p9mcymNG z$iWb)83xb_u;>bv6Y^dwCnqQ`$2QQ=n#9WWHnbISyF+#wGqy}ZtH`wm9+#94+jiD zbk!i{;MjVV6#jDFLlAtid1RjJX>7=lOe8P_obt$$P`s_%^IINd4VbbYJtyMrqF%Bf z`tup?;tb`sPYyp`okJziD~gWp{9?U=SO~Mk(N0NBp3b87Z=0{+HcN zXlE@#SkwBg4)!g3xCL?rrF#VS9OJWax@ED1Kk23t=}y)j1`cD%&S%2xtZjFZ(h#Pa zQ*2QuVnp9@c`P26;d$+%O|)mg_x4c<-y-IFYfI`^sWzWJ5u{-GrgJ;$93MmO$d{osiO@xxJfUv_&$X1pB~u{Bm?Nbyu%R7;{d;B zRBB{WN-)*DAIwEKRybeE1d>NRD%{Vp?8g~po>K`QT3k~80z(qcvzojp%z|oEI}sxS z6A?t;{2D^#c+0dr7XQ^twegu@G1Ah=u!R0Ztbm?kLcWh_QRMm+aj*o{^~r(;q%QQ? zPK$Zb^SAkM>d-^gzyQ5*FpM|4H45z5)k$_En95}J)~9VXDK4=+D-ce~X(_nJA+K%? znKlLeLy5OLI|<3S%(X$8zR;(VkDu?J>B2s_6Hagz)&E-QM5avP9+z%x&S_i1@EL_? zHbCCoj)8aGfZt=1^n&#RlMW)G)k^On?GQtoBcuOy!ptSt|XiZ6pPeInsSUwJF8 z=vCzxeS4F7BNP$m(K+uuQWU?RHj{H+)0N`-jpSpJX|4uNtncNZ*WD>ScjmrN%#!-^ z$AkX8fM4ziUnzeqp&b82{vr?aJNeGgku(_FAOz1mfjg2=;q38f;!GLN^|#pNCCzV{$;fCJ8JQ|V z-rNMLSXyV6w1no(AB1J)hxtVwVOkfw92h4P&HB!Jc`V> zk7sztcueIu1&Bcr+DAk|qJcARAhWw?Ab?m21m)F#!6r3JH!GMx(cfWJyRuo-&}Fdp z)xKTytts2%rv^X~0Y&jQ$E5Iqu**x7A3)FvN-a_k?m~w%g(C=_YKQj(|0Sr5%)h=d zY)30tr!HGkUNlRM1_0y(yX%;hxTUp?YTB)?P7xhjUiTHNk4QPE%Wp+snM4jRtS~R@ zUrRJQ!$qXnki|}NcGtpM6KdAqo;K^cX6XvQ-aCd5R0G9Ob~;6;mlf~l{)`6qmYMJ7 zZ==B0)>M~7N_JhYCs_`QG}7fpR1(?sqA5wqL*|9+qdGJnSJotO7#p?1uMe`C`d$)M z)4$l79q-t96nM2aEmN1g66$d>&e@T~T94_rmaHb{)!yoYwbi6T<-5M!QL~P>KKqy$ zQNUT^1k(ofOLK~(e*0HX?etum@yq!N`>;ia=TXQAsw4h|bg?^FEFz1wOqM?BXmXV% zZ5R)2<($Pxss)7d=}h3tTLroDlja?O6Old4_^e!%OdG=}W|HNeSppBLKPKbT9v|Dw zeQp!kJyCpERSc)DMNs8}v)u29PYsw*z(!j$ba+|B=19dLJ`ODRNY#A)nHRAj>S6{7T%L>3n* z!|H1(LA0fvM0OH%R=Va(!Pyz3=duXQ6Zrz@?|~c zmtyfWt1mv5+ED_O0&h?L;FvDLJ8~(B9G}{xxnN-0i3Ty-^Y*V$nXWVK2GaJA#4#6_ zjxNe?k}M(q*6FP)Fub>Q1gpfEAk%#GNFN59Z*cnhrG75a?GSGHkAZ*Z9QT9uKUg{f_-z?DKJ+Nu#<<6z+rNP0Ag#ZkRTY*b(NnMJ?XBzGn9+3; z@Lff)AJ~36s49A%)}Yq6Bf#QG^CdOqGh^cr*2JVO?@4e^D$%s z9r@3_9q(U02>K(cj1SIlR8f!v4*0Oql#Xe|=M53+^;P2X&+wZJgU21zkzSwd*e-8b zI|VaU zB?s&HNB%Nd)i|LTJEGIV5&!q>sAnT~-nrxSmj=LCUf$VIghY;PXICeq8`S^0GeS+L zs3C0PZ_-?ylz7P%EA|3TQ@NIc-&US5uCi)mTbzR0lQ~I0Zakt|YC)Ypbq?b$?3O90_>3}! zKvKBVmh9o{E9Qc4XYkae6n#u%X$qGb9!TF!vKkDPY*t%SCepoFWZu}tR`HGRTsCeu-8nI#J?Ar_3!^W+2bW3Bf~_69dtnwoXFg0-ko|cy zlFn9StWYCE;Sms4=%%Pe0OnA4W-L6M=#Q*PTNXLqN6Ukxdck04krCGM{!Ku+{_WHM0DlV0@?)i^`dm4l; zG>1AqH@e%cP5uex@oH*+XvbG}vlJJ4%I+^R5T?o1C;PWmVWFh{Bm>t-Rh7*qWdor+ z0!PWGeSFQz3Lhtj$B3B=2)~_l6qe{$TF}D3_SC()*l{m^&eeZOx>7~>IKc>PKAJSB z8U(l7i)6$5+}9$?k8?bvS|y}mTeZ@7lsIASh52qbrXcR(N+M(I$?6-wbbsvj%;VL?c0Bn0A&1W8q_$d)B=1bR}%tw)$BR`0re6he}jGHM|IOSjE@Bfogz%PGwEtMdBM@~Qoua~^y zCP42OQz9@Ls|aMMvf8CnO^(M`lSD$(?8d2)Ivb)vKj+csdE1^$eefvK@m3}$dw=|G zYL(oLa&XSu%@ly6h1ncB32ZVt8xOI$W77fa%gDXP_Z)l=PQLfyp{AenL<~EjBtLsP zgNq^UdxNGV+XnZNeQf_ZZ83|qsa%bAOG(Mxk+xFQsUm*kcQFZ}5n5YRm|uHx3U)Vc zj+}ACYih0JdTU9oj1O@MEUiP8eIy|WV><%7KzRk%GHT5lIJNmGcs`@=?B<{<*+vWh zozqfpTtv#jj~!x90wQPk6VvDQJXe2)MqloHZicIEV8H7BrgLvO6F`pVyo;o-%5j?p zQlR1$eT1VlD}KpWMu|_X@FleFrndHRmM}^@kd!h9;>hN~>G~juvB3tjA0~elmtA|I z>gZl<7kli$MdFP`uVQuyTFFp!Xd_{j zE+wlG7PoY~hKY^9>Waoy0ADG@@{#f6X{m3oOt#wdh!1J+376G@(pIlI!E@}JMOs!t$jL3`W7>j zH|aHSDkFiE4I^tLW}(?#`J-JP2_)3)pK#jFF#bl<6`TB%MLX4s=yAZwl+w%>fq?3l z#r5@aOOm19cVUskN~iq&i;Z8-UlG8lnabL3-=X+T+DYfT^H9R!#5^i4k;r!^q~;V> z#pIz?^tfZ;Dy^i5P3k_!_umA%hHC27awVdoqJ-IPYa+5>F1`nY*`nHoB}}x^fs+(u z;`ULA<9~Kfih=b{8jYepc&DdM&fg-X>a4c#kOKTVC! zzCRN^hp9OF!*gd6V_;#fX0*3OFpy6t*N)v8k%TSl9$kF~!Q0QHA6-ac5Eiw=HBW9S zZ$+T+^&xI{7a<^tvg0zcn=3@vY$bacA*&)^qjH;jJu%F&2RAjsOC7z4^L&riJL7ljNA+Nnr})G{gX`(V|1s|vyval@ zX|yx0_cbQl{V8pI3&G^O*_MybKiSeR|LvdoXZy#@XLn4E35Mo9_RX!6IGav>M+84o z`wQGxZohss5P%YZuR`xSIs7~-kZw&Ms8lwg7&qco`kwN?U0X{a6cGhVDnxkkZ#6=W z=vgY#EgZP*^f3M6ulem;E6QCnehG*F@n*aAt>6^ko!#i34%5_n zpC4n2y$vku%?j6}zM_*`130G3%gsE!_*RC5g^&{g8+IkErvbD|hRvEAMPDBE`IaYE zYWVJHwkS(qlaKpiRxA2M_0z-@?Oq6{{!uJ%$TTMPzN7gzN_)_NdG=zVrr<$T%$mnO zPnRf1cv1T$b_=J~ay{NY6(3KpXVN=0qhVx}yg?`1ks5PFxknzjndP~wq71h!a@9my`CI+=YYyl*uufyVj*w6+7b+Q% za1u&7lTeVUfp@fCXs z&5M@1-@)MYBBH4};UoKzI>dPb9KHXg9`Ae_^a1(zumF4T(F&12B#jQ^?zj%R*VtND zRe;8aNev)C&^w7IqygHaEj!0ulZ=s#=LuLwIL0LgI>1q;4snE7?lOHc` zN&?9nEW3Ued24&4drA`IN0{6}heW=sO@7KR%u1BlVqLlKS8c(ik?uDeD*y^tT2TGw zFn{?Vy7e_FkY?pTdF;!Bp!;u0E0`$kfnqaA^;6dDtkkL82Y%ZV0QHxWN8(N9ctWa; zjS&nQ7Ys{2rxv571{I&6N}o!4)O}Lf{ht;e=G9+26>L5-AMNw$6!>9gD0|B+wWq!x zIRfq+F&`?qCi)B$1z|o-56m)ffRhspJV9@yL1_v?dtK4*$|McwWUFOCxlLLgY2A!~aX2Y{tlbp9$EF$s_ZM2kbkz{n@LPz~l0&+U;eYCsPkU(9CbJ z)-4B~Y_o}oWV*C2@}2U43aT(8!K}A~>)i}RBj)SjmcV(Un&gdT0Ke2Y|Nar^&H}2J zd#)&apuQ%cB+C<%6V#g>{GRvk&5QBDT)O8lqVBaNq$L2ebQefXZSvsSw6WZ|kpCu9keC{u!1q1XVBZV0laMSDM)_ z3)2Wjnl&lBc0QQ$%*AAS1d2}gzSxU9Afnqs4L-d6;W}(6!8N+ijDwHcmtLPK8A=|s z>TsNkj9{&4?cEc*1DN2=f@D-Uq&XSas03BPpr#9NI7jNADghs6${yDyL)3Kukkvo~ zg~Sv?eFRBGw?9fhjPoU)%gdR zab&$!KOZlUtZqx>V0(G*n{LMURJ+Q#f|d@@FP9eYV0W7r``r~_oHsW$KH9f(qppJ* z1mw1n+Ahx|lHSsv0NM6=YWV>5CNiYMkEg}^mJU|T!Pn>=eX;0F?iP z>POHt8_j?8TezqxsBT~Jylz%j+NV-FyItjGCTp=)Br_g7R$m}Ft{(%I%_XXi7HazXo^~=gNl4INvUQylq z;ji|(>YgJX1X_|e3%>CFiF(_-rypAdho;2i?<5S#Me$|Z&eXTJDa87Qlp)npVb%?# zT|6w+@ecvfS-^g4eCOH4#7ey)n1vpITsCWAQvRuH7yeFys^#+14C>7S7$WS_CsCP; z#)xmfzLOqSNw@2rw4XlA%Dls}8vYRHA_DcCL1I|)hStAM=Y-z@v6Ym9*d2F6$3;@< zG#lG;m#_W)`qJxQyH;*bn{i|Z&^dz#1XF>g*!J2>K|>E6OFr1!SgjmM^gN|$vHsBY zI9WK)I|$f>O8+ZwsOwD{#ayE5^SP6Y8(`5SyCgmvJ^Q)y<$ZK?ny&Z0-HZcI+u4}z zm(T2kl~a`l5t)mEo>m);Y+$B^d+xIvYw`4!?%dA+XfEumX^F_D@j7dJ`xG<=P5&eI zgjEMDid<9rorUn? z5r$Y0JmDyVBYV#iX>KWCXRV^kjXP$kc)PF3k%f(jxjIiExmCZ{2sAv-N?PX7U zjBpo zH+OIVLRceaT6C9SO)5QIUAb8~eyyPLavZbgN1nfyOr zzTan9Ui{gATP|Sw`#aHRu=VrhRKYN)#%XA}Sf2m* z`QOtu6E&Sz7`^_V7F-_1zc@VqvUXUVChPC5Z4E}uD{hQ)HPJ-CU7 z%&MV403UD*_}5C!K1!hs^dExcMiUh|xnfi#J`7elNk8D@;QA--4(OH-$NRc=dt*Gu7C zA*4)dM**qi7x6b|BKXbEPESe$v^&ly15Ef#YF~6P`w~Y>?->pa7u3|y^+^3DK0M4@ z=QO|j__2_v^Ej;}DU$?8#

7KNjGV=7LC7?5@vyw-X~ClQi17!g0PL<8mT~;Sx>m z9(_>eeD-Ifsde`Q!^j7Z%ccNl=6$sPz%?5%#46`38A?#?H`@zuyzMBqbOT$u^AdQ; zFVIW2c_&^j^-Ek`cvz2q)B=n#93DoWjU5ZocyKit{4^JSnJjDI6`5l)a5#h2m^fWi zgTZ23zVbIkzJB2Ur0*=gK>an}bcoFHbk4@Y&QibOdovt*1_rleeX= zX-hEXf~Hn-k;+S^SLp-^Knr|6gP5~f$15grur0gOZa?>$@)MheURVvhy$tAqp!QV| zdg5dWMtP*#jXmSZFS?zb9gbpkQS9v1rhUbtcVK?Vd=7R@RsD9%pOzSd2|67#p8aN6G~>^RY67x_lwXP7>me1n*tBf8gNo_M$OL7 zUbZ?juw z;EhX%z1GJ+eoGcA_1Rrc7$$D%A%F0$=>hbtQop|mRX_P{-SVf#ZB|KGM5N9Cb${aJ)oeqNFYUVyrPn%``=_VL>rX@2M=BmlLkti<-v=$qnQ0$cCdFTF zNF6!x7>VD$gbI&q1Je^FV5)gar#Iny^M>it>BiYfQ}xk$U8(ktpf<~;?j~Fn2uL0GOr#lbhkY_*xVEtdS?;=Eih=xU4I5Xj9H{q5^)|ayHL_} z>Qi(Y!QTk#XN10wjJ(+!kBud-Klqh8(%>t$yt2|eJ}$LX|0(23HfV3?h-T@KIr}Y> zU08T??_jI%B(kBQ!PeQiy)*IV*5OKt#UNIHqzORt`0l^?etB$oIAmqGq=Q*^rpM0F zv9;;=Ytzruqm7PaA)C!BXPZ~Vy*JI2K4MEsuAFW2pFIN)_|u^2kowNis?dGt*f^T} z1Hdvbz)^x5WXu1%yyp&h(0$y1<5?EB5<|7l8LIu-QpJx$Nib7R2vO{ zOvlfp+3m_Iqf(LZFnFdDYU^GmWe4r?Kd_kyJ#qjOtAM{@BIY#H}9gN#C2irXpfuv*R_7_n}1*p%0s-PNZuuv`gZ+6IK#s zPV*ms4+m&rdhp=DIR)uTFlho{D+9a$7I3=oQkImTvj)tzM~y7DfE7O;aCTB!J2Z4R z?T#P4@dw9`duu;>M@PfJrh~z(yVK|{+k&H(_T|f$t_$7iBMa6m6JArA5`!FcCm`+( z-`U$U|C+=L?f%K9zcs<`XC=G%wz87?_d<^{*wx@XD}7|O&D0AP0W?t%&ZOq?LU?8h z>*+N`s*CvFCnlmg(0aG_R!8Hv++TwyI$miyy$O!-P?popkj3TYmUKxkbD93SjmC(* zJx|Cx0B1%^6oc4z@R(~qsw}|oi~Ai48yOp0Jm5ba>&ld2IoezM1QtOkuknYUH47Q) z`%NCuyMYz;-hHW0wV{{AfB)g?f_%6G=jk!{x@SNr;?LKX;LD{x9y88Q4*%4EzczC8 z7#smm{q+KQVhT>?m9wp1neX=|^$oqI=wZz^3w#hawG-Zq*BnO41x@BLIF)f=Pq)8t zK_}*<|B+K85YJ!QD`|fJ*8Ur!f2K?CMu#^wuyjABPHDh{ zt1m7_l-Sg>b!xS>6>@BS-emt_5NUuMZM})G_`0~reR?u=CY{te5MKtMW}}4_-Xhe1 zwU=_`qZygBwDhHw%Cy^$34Hff$+f#wOiY--e1`ZP?L7oDekohA)l@0_@$ByI?sPPu zRqU-#FszhzDY25hDa-6#yf)vRE=fyIPZ1%sz4yZf4+b3$4^Ea1N?8rSyaBdCc78q~ zmv%;EG4;UMSh!}YXq!)TPuguEyOfyI1OBGtThJ#!p_$`o*>zD~Zt9ai*@O#V)*>S# zHHf|^(=s!6FJ;BpN)Yc8w4oDP%6MpU2Zv<>r_Q!y-aB_SP~w~(Ur2NGmsx>bfsux= zFDa*X5k$06_>#c(7=1h=K(5lWQ_!@ss%mz7{%h&fr&O25FtMqpLVLTwdW{R1CUSxc zQG!K1E*ZlUV*`V$>f_6*_u6kl5s9_}01CBF*?kh%pk=i+)28Zsd%{^v)MYXf9DiB- zwWr;uCr1`~_%kkj8;wVNci-KoTe&{alrgFB(pL6#%_SgWwj+8Ln1W0FEp5;!#^0;= zcH=QyN#ZjJ2d|7zoaeq6bZfBZ&5bRvvZo5e7O7ra28mHObQnA}9I3FQ1lv1U=Io%S zlrR$VU|#b-Q1bPag#8RS+c-;q!Qk%Mfp+E{lIRYL<>^4%TH@%)nbn$z#m0%7cQqe1&Qo+h&^Yx< zR1@c!6)QPmzTz_WhW6)_(AA$fW-CL*`-j6#ufWd1gAJ8Dh*oCtqw2YH!~^0w^z`)a z{f;~#)AmNhA!&c6ZBHlbYF2JEL;vWRz4+oCRb`7 z2f(d8nNLdKu<@mp=T+@ORyfSP8NJ{WAD7&5VH~P0;nX)N@~lLJh2J(d-dQi-j@VNB zC)L;!b%pxx2n3n~d`SY064h#q<-d_V=88pETiTtnmOj^#4GtVf` z7S4`Y2pr&-@fzjQq`k=1uL417@pLVo`vUxTS%`yO3ym3~rhi}gxWLj1-G%OEb_XZ% zyVm6sf^{9b;^ZWLFnQ!XeXwEX+#p(3N1Jxu56|&niif(raBkt2sb;&kqOb^U32ljc zfjQZ%K8PuVf*@{U4Y(&$i&0ck6`M9l49!*nKhANgkqMZt4zN48c5mfLctOYJgK}EP z(a7}MyVqI3S%8m=Q@=kGQJP_P^4Xm}{PbX}Vc=i~_%$}dgD)_sY7-+Dio!`(L00!QFH#978K$8q23oe12tv z6M*xoa7GE;oh)R`qqWkVe)?<=Xv0$1?FaXyYe`~*Ll*WA?^TLi@>r_)P-)?O!iX|h z_!A_>VC|zu2lT+VqoMB%#ZU>DTCtRDXQ7qQwG({ABvG~w)2+Pr89Npg{JB?;o{Iyf=iyr!)|VdjkD>o#&AWFasc!(hM-& zA>nFhHu}Z#HW)}KDE$wa2GsqXGDDtwEZ`P@z}b%00Jv!MQ%VaYCxDhBZdw z{A)bs|8>{_BDDV7KmS+0;$N=s0Cp%$%R~~cD5&IN$wNylm&9%^XccQ0ha$8*Gaf`9tu-a0fAtx z%oAdnvEO(QR)tIx8xGhGN>0UBq3Ev3)MQYJ=W_(A^m;kxUj%As8w_S=OBTn&e#_y5p<=It zP5J1<%158@g#dt4kIHFSvoP^Q>o4_G>&etjPtQ6M(Ns;IjJf|{u{zd=M=ON3mqg#W z6ZJRGLeV{xBVlvScJgYekpMG71M+NDl4j*w84s22J!tAql{I1dnUc}O>z8j)HvtC> zrZPr3aVgR&$pgQN`w)}pdiEPjlyZCLp}*wBGr7pAMv|{+B~uy7e=ify1~e;(6;Xuz zPpk;a?v=)g=Lf%k;le-$;NNO-KqH>{0`V0<+moEV;~*~}o>ZnK@mnxwOAyhB_Pdh6 z4N}z;>t+<+Il%I0c}N$^IlwImNPwVj^=smBqXE&aCjz3Bb6xF)j`0`>*#T(rjsfPz z@l_jxx~u!p%uH7+dhF%F{u$;10IJP@A+LZEhb;+Uov!Sdl@n)F;-*sL9;U=VEzX6BU78y5#ym#ZlA{j| z^WiaUojH$~#~y-yccWa@c>pZ;K6!Mzs8@k!A$FZRd!QV#9?+^oG=n{&pvwX{q zl`N2+7%(XiC^f+J0*D7or?(f#^5p zzks&8mEoYoLU2A{Wi`-B)NP-c70*%2eMS)R^luYm!h6p@5Kg`XaPa6Pm1SDHD8je{ zKsD|fi-n^*C3#}eiUCeOWnZ6F;I}5iGaT-sdt;GH^H{pKKik-rkiBgc#z9 z@ZIg}DmOQ9#(TTIwLILSVuJ(C@Aez};CFkKvZ2!q{CyHzRTLy4fl6`qa!6I+D^AcLVF;JyzjT!vy+ z_x(WUP4jfhV9oUH%0tU*YUzBxRM#aAhV>6yrojgrWb2=TX*RP~Nr| zHn`SS*HDoo3u=7s{m_ADd~Fj!hQWcrWZR3CEa)8*GlTq9L`24#CY+u5zEZ~Itc2QssrX( z+>~d({*L4PLsy+t0#NDN;8`08QwD&iL|iaSyg~L=5&D6jaKeNxhr&S@a~s>8F#74S z^MSPbxIwtHtAnHBEJij1R2|T0F)94<<&^p?RKl_=?(DVFH=&)h&}aOKniyZ}KmmMo zT<)P?pbc9X(QAf_kD&Ees@GJH|1DLflriG-LqhpksB;ov$RZS<&Y*tP!l4-hKLmO* zhFVva?O9u6fuO0PW)f&>xK^xDmX*-G^nwE@S)f$G9Mc@Cpi-k4)S=z^CNK{6r^=Tte6{sc_03V{^dQ@8xU7ty*PYfP67;T%aQ+yZD zRKQDiQW441-^?ypS5O%HtY=V>MPe%{^}|s%G|Cx_l86Lubzy5CK6dg(`*ovGH~|r;cH^90XuN)s@v&`|;r5g@b+%We4G4p0;+ z>7w|w7%zVjG>*{bdCzkcUXE9voJFOf?-plcH7eZ*h!U=OhGt}@T0q7bC ztRvE$H8&e zI@Y5ryqiR{{Z91Fc<|?cOa&EN=NXzj8W20f>)WOdd|y}YvyHE#hqn|Ib!oolZ!%t_ zdc zLJTE;4edETV($?6DXV}QH(kH}L;4NNi7%)g0V!-=h@Am(*wcli7+?HgG6~QnBYu@z z#BT8y7#@#7n~@E%mna z2?tdy5VNTRLO8{2PW~r5$~b3HY8$m~8G>X26f^rovr4u2&YPzfqi|u4A{k#i+(HK7 za*t|RgFCnew^Y0gvqKEK*(Z>s0adw06-`G#0svO1Z+tcJ@0QLDdq+-Ruq6)<(Uf?4 zr+EpVQB^6ty0BvRK^S))loy`59dwRIEv#3@*NeT3Pptfw)Ae?cz%VdvR2pcS%b@?7 z$BMU*gc?d6-lDh&BjrMQQ4g5WMIv@dKto8YZdCuJgik#xmT#RsXO04FZ2|pT+U^-j z`>iwZJz9kQ(a$d82QJ%}3f~xqMzOlWW$`)r{f^DOd-j&q*qFWFfX9+RZg_4ht6{sIqN3Z$YLMWrEv&8`37 z9E(7%Pa{J6UCFlgeoSYUoP_ur9}aBe5OaQ@ZnE)zcn1J+;Mv{@xdy%szQf%kKm&`* zIE={LovJVL+YmyZiO)=m2e1T^&B2#*-!dzgD>J%%URnj$W`$MXA6v{UrI^aG@K!lm zEFgE_6lW~$ssQFU6u*4Luyg6B3hI2IOGAsp?E6oFiuw5f0K>zkbu~vPjO!{PL51b} zPuGStT$8?(T>wVj+FkfJ2li7IZ&!z?)Zdr-460M@^$xB6p3y`?q68>?Io{`*62xMO zefs*3cOb?r9bNC8We0=&EQz!+88s(zJ4b|DxH4=@fYiUrR;M#E;g*N zg+<`k3xjk{)k)j67zKrxK44&Kpb-a;z=R{kTsrV(qB{yV42};OP*I9O0&eosB*-Os z8l|Lsy2$P`uAiH3wnG9Fiei|i04tmgekX`WG_mitNwYW*oYBXHtP&3%O@t_r*^a0L zV+Qb#R0DUE^0l#AHcOnylRIk`Q3ge-D3dOc9f10bq`zqEaM$J&u)>g4gFX*Lxt!ou zl>p@cj0=|MqnUa3;QUahTC5A~jiBC-(ovNgXOZB0?JdP;HFM3=_`ndLuARSbk>-4r zVFj`P`rSk}T8)ub1&^AyNsHW?gByf>#UVsa76l^&oM0vyLF=f#&25Uor8%V_1U0BI z1fGcz#h^8%1GG&$jRoFbr}kLKhTS;LwYI&8Nxzu$bEx9rzg73fmHYDzkUYGNue)wzts?HigjoL9#OUq*_&7^iox}t^uz!B z=x1=!85U8NKqVJ&IRQ-6ohV(C^**Mo+u|3!Z>gZ=SsGejijgCcw9I{X+S{@Fu!4&fd?VL!8=Gf^R?1 z{{kqE^S3nzGENK8h*qO>*Nuu8wq6~TJHFn>I4JbJBuaizvaR2(PTgx1mks@hlD5lV z8U=HeQ1jZuC$|a)gC)ScR#^u5QowOweF%i^ratzK-B)7*%{fAh!9-4vZ>%qBAW$67 zIENH2zzCtWK?LSD2yM&4+xsA7u=DRn2^4!Xd}rb@huyo^lMul@gah7feFNkAj16Fp zI0j;;i-r|0pjV7L zY576%@qk`#>p0BeYS$Yx zaeB}3s7~D>A#W<*@0&RX{Qi0$gOfS}_3jxCXfi4l7V?yQZF3C^1NH#V4bX(5`VYW_ zV{SzT$Cbz}xXbb8B2)Zj1b|auUlb1ASEw)CF6#2f+!Caq9={)s(G>_YBDTC&x3tVd zBl`f{8Y8u7F%UYAMP?Yl#cg zq^b^HVuxqqPts2 zeU5!503nVD1PT^7*y*3|2pDi|2zboo0esE_7;cgGwgBzo>V01{YL!Ccg*2lG;D6e+C;D9?OmSN zoA2iI-1wlt#1uUL`?AiEG_1`&73JctQant)<&))Y&oyY}`?>6yim>(nruLTrR9Ft; zDoCiUKp1lG}%>ux^sFlKu*J=|K%>I?-j^mgn<;MfM|DR z4Q5|8ixuVP^j#d=UmT^&i_`a=6;(%xO(s#rj;G-Qi)72r{$+TfJK-zIW=#ZURS+!S z)}`ZfeMi(6=xgpUziQYUI#&Rsio-yKm4FsS(K)lbHyNh6B;$s?R|NFbf+n0beP{=_ z1nxMBUUvNOWgf6%`xY-c3SrGlc2{gt3p5KWZqCNv$sZwISrp)K>B`yGAT7YsCzXEB zc_b9lmB(a$J9;O?b4~3}V}6f3D_cBnc8v9#%x?bM>8oVye>bFJFGapYD#^vikj3fL z6lZMLK6Sg|U$H<4#7|ySCsG+Nb|?PU&BqT3i=Z4P~0B!{_dO4FZ!tBZ)JURq9VmE&?hRraSRYuryu1&LClsg zx`ikh7Q*i4wC~+bL6FqPx%?>=uwjyEWpS5chLeNP9{;PV%)iKSi%6 zpNDwd6lFwuzn%6InP{*7K*&>-kVT38t`O-ke(m)Ujsy}6U>P`>UpM!BPee8Z% z1KX^VsSj#A#ep@0T1?L3Oo{W({B82Uzwzkvo8&BsSTRE}=q)E$`q9%CK2wtEbIc1# z04)zVoY9PfPF*olg~Q3qW1mMIJ-p;je)z^T9^sX8I!wP8jsM~tZS0VFi|rrzE1``d zxMDwf$@a(dxS(KrAtuNl<8n%vBYnm_M5|Wq|NObm+3Xbvju&>6uA}=`HRCRjs)K6! z|2D>c8Cm%!;HpMD0klug!NDXEG&8MVe!bvW1{dnZ6_z&%-cdGGpW_D$z{L(0!+aWk zce8ClM^chiI?FFJ=h8Xcj&4qK2H@}+O|nOvwKFxc5k@grKCOwy8ggGd*HY?yZ7HZ~ z105mUT`gC6btK2GIR(J)Cd42B66o8GtkHv!CTfLrz;N2Z$(fj9FcaQ#mky~3qo7asiGU4v_D{>&IGh4yuvzkE@Bq}SJ{Yo2K$ zigGQRvW3A8XwKXy0*-QUPRFAOlRy1oCcmElQhv;R4>i6%^MHRq3^rGWdfCYASbCNQ zJU=)Y%zExUGaTK2KyH%vGPDyKvHZ@24bLL(Fo&Rf#bYTvkPA4^Lxqpkjb@EkGt)#3 zikSf?2Doa9?s`DGs8dozAjOx0y03a2}3t-YhiL5L~yze6N(zGVOhIQTtPY~f*WpOV^*p35! zUc!j_g&P!hBjyV#vAKKq`RTY4nk@wZSfD^Cl$I%2n0n=ngh?|k@Qf16K|W{5rvZYH zK;5w*genhoke=>N69z55ME5vnRKyhd50X`eF_bb68;r@#Z2f)nbFE#DYvH+~Grns`q&SMk4s{s4|v}oKG4;!gS z#p{MWN5i37ff9tg^zSf3jf4iX@7`A}LiCOdi&TB5U^*u0)jaWZ&Q%}2(14=Rsl|&r z(t12y8DB4Wq(Q%04k~PDuHVCPbh|PIMV66*3@n805yAzExeD6!^sf+6L)1$sufYX8 zdz*;1(Z>dE(EaJ!;#M0*fnQJS^&YqvSVaK$^zG*`cI51^G6;;pMIX^#3~AFvuP*iv zM~VJ^KU%RA+a^Kb@s{OH;;auQ@B__*Sk7?ZCXcDOe|d956%8=a=tm&JEvQ4)Oy9(5 z?~Z~qfZ!u4(lS{v{LxinTas?~as9Eeg0{~K&l6VibVFa6645O6s31_?xY@G1R7D{~ zvX8{W#Y{ozEx6gza>ssg0F!_J{As{BL7&Kd)#BPL5+h*{>s-qbQrYh1^eR;iComOc zu%OF+zO~*BgfjXiupXE+$aYfQc2mHW&N}y+!BqYln@FfL6Da4v_Ft%^rHu4%+UwP0 zAF}K*Y4dmqJU&?_re4vDX+&CAq2&d1L?VKCxZXX-o~4oi69-oU0hBzDDxi0P=N3M4 z$i5rV)eQTY{tS@vf5;ZnDvyJ!N_lVWH;J8F)8@dSje|$v;ZYqrU%lYt(*j8$pt8r zp(5vOxmYBRz$jRZz1vx;4tH(bcl7w=>D~dcOq`Ew8hoTAS`#W@rz`TdkXL~P z!Al0i=5dEDhj+t&Cip`V4fa_}*-u(yF`^969Tq*)&HQZy!EY{Ycm|42&9merb(RX@Gd=l6%}T?8cS@#Hs7i5I%4 z<;#Dl0O0*Q!q}=w?*qUp$A;q8Nq>pEZk2ORM21A3)c8!7+S>zrK>k0+>Z=fgs^LSB zLJdU2GSh=Ae7bTF%Q-9gN`Vk;vyR~XMIkSuc`g3$fE)ahu#Nt=^As){xr!GrmBX#} z^PH9a|5wLY$mHxNt2N4C+PBW@;ctah2*+K$OYVov`Yz5xJ0&x?jH@_ z!7~E*N}vl&cSYp$A8COCzIgW0-SP_4;9Cx)pf6T+;rAN=43OiXFK<0p2S5O}HV@6$ zoLXIRlnAc+)AgGd{s|olH?RR@WN%0)ZA2n;eq;7qONP6|6|IBO?0ki{Xv3EKyx#Gn zktvNq1?ip9i@$W+svU7+aR5nud0_y^Ipe{gMxw>F1|d7)WzZc39i{X%n(G{(qN{~g z>}C$G4jv9hDBo2H2R3LOBHRtKJM$nQF0Vcas~qK|C;~>OO+K?_!~KzjOmaS#;SPQQ zc`C_!BQA;(MOmgdha6axew)V*W{wzSg{7P+nOvd>0UKNHT39X6Z-rEEvUp6-k*}wq z5xvdcu$X3KZ7An&DAT&U**f*U>5UL4o{M(vk*f2_%wOJR&Ut>qeva>+kM$mL9J`kE zTP47D_?`QP?;+0%-Sa&Y{=qr?v-Rvd&=iS!y3;L_5Ax)<_-XO6WrkXvYkcpJDGJYH zth;E`AL}{Fup^m+{|8?EwUkH8i`+Wtiaeb!`Y$CinV%oC^6~Snl{y~hTH9?ZNjM>T zE2|b1iQk9_I6VcsU(|h8vLihV0wEA#v2{4ed0g)Ry1hl2YoK{)!a1`duOJrWt^XUW z9wg87T-+^}hdsT-BV2?KLbY|BvkR;#=Bj002?pdB-d`WB{$wBY_bg<%U36V%ZLf6~ z8CFJ4tTm?78S{gr(vTwZ(g1Tin4$ub%0a29U^!p^) z${)8ao@wYQWUay(J>8y9{sNyMaeldZbp^w5Cjs74O1lZ^^QI-#iPuD-(a?(F7pwH3b*CWvrA~6wXvLC ztTc6L0nydWmA_)d7V}CB;O1YTbMcK}r7B9?M5-b{5bv^PY&;jz z>8?`(h!7zKd&LlQK(8vo45mxNemlZ$!N$X@D<>860JOzGA zQbIELD{rf@E>D%hVf$2P4-X(rMI8mU86axE!~h{j=sStPbu9(=BkX7Q)2>$ZSk0si z%&s;LQ*Ny=$gl@QIQ^7fRxZ#1#quvv{r9A{IwRj{6@Pjs>dqVM!JfO`2F_$uIQweD~)d&~9u;DMj+i zVht}1C6J?Gpu@4&`qXg$K}s${>=MeQ0>_IN*U?i2bbbO-hh8Y0J|1%+FVB!tABvS} zGF9diCXw#3QS?&9RK{ZBCc2l7EnlH+Q=Kj2K_s?v0WIIO@Dy)Mc|*7;j0m>-IA}5> z&gm&gbQA5~y(-|+XqYBx699j%Ajkn01a-et|C%7P=d z)F@R{ibN2M!x^Q7(386htJC}ei)B!cr3+dV?pXXKMCYbFtj+754cHwF!6}Y6@(KVz zX63%k^TbeFpNZu@r8DhH-ARu38?R_=uJZMn{#Y|Y*Bm8zb0XV7H2pi9+BTtEL~u5l z?ed}|eKe$lff&6MQdw{e3NNUVvrDL7cS*RDh9un|n7tQ-@t@jpg6F21KypQh{w2?< zJt-k3b|ksxomYvbVCgjB2n*YP6;)J?pnvN*sUaSXi31*oZ5i%}ZY08LHX*MwG)XG) zW%a!a>z>NXZ|5F zax3Me%2#)QOKcq#9Ejx^QW&fm51zOWXCyExanM`Ok44G6h73%eo=fxxyFPa}iOWsL z8Xpdi0%IRK=@ocq(63bwCL3y@vOrk6f{H=i&H)WAfS`v?I|UM6&Z|>ExcbXOr4=N`yAea8{Us?<&eqdQiT)zc9)urAf9Z*=YB1CArLV#gVPs-}#fs$P<&c1fB4j z>S)46^g5Q{H5}M6e>}qhj7J=(EPUY;5BS3X_CO9`5q7a`5y9+8JNw92xOZ=|l`p`a zKL=u4NY5E%G}{=RNy(LC5XrezNf&|w~UPm(Z{EqPh9p4CBwI)Ug zYWenIX_AKY?^169a-^zG-S+ouzuc3C;*|05nl}Yv;O$)x-b!ywtkXDz!b#&X!AbUD z`tBNunZixk*+FJ_IWjp$sgeXlv^3&BfjdA4T_Lx+#WfYqAw*)D)?1za1H!{f@{0c3 zCql1KGF8>|g%6&->&gJUznF|eQP*seWnZqga^P1^b&;y{`|&}csa-oBuyqFWaJ@oUg|JxN1R1PE>#y^C zakA3LUAOZI3dhH#udwORg~jozZ<$gSG` zLf*?lT@J?Te6K|r-lN8Cg9oY+0FA)w2m>WEU<>}FQe^Oe_fZx)rs=*@QVs&-g*n4r z;&%~hh+kiND{FTv>zzck7V=j?{F^V{0Zn6T8SPWf)Q6_(XUtdFCFZqTy?LypwcNpC zuR3wyrAal^{cNW=y*b?!!*wR%=EBMbHwWCm4^JMH$!X|)ndV-kpS-uFn7^GL;dc2d zuEO)&L`KfPJ1EUwBA_C21;`!BEpNs^^Jl&DO1h5yu(!MWPUovhjC(S; zjm{5w>UUT5k~=k2D@K`j?W9Ovwbu?BR$N=dajuZ_G0Qg0j#Ba4z5%#{%aSfav1*96 zl0wS9XVs)xOEa!i&c6^z0}M{hX@@&Pyiy{~8dqEk)r+oSxxrpG>RdYRMc&Z>Poh+@ zbPRjzQ?CUWw$Cm-kg2FxV`yG8 zZVrBIPEC${=rQ>sA-dqW8DaDAj^xR=>Ry5Zw^BoU$B!nUbmrV|=u_UvQA3xxsnrk> zQ2Nje(lM`{ByH@@g<_;WPlwH0ztvnY0IA`!w*;I+OQLN{__zn4)u*aV>2eQIec9!_Me5wOj>~U8f?c#7!+YOKt z|CXE`1!Gyf>X4`0)mKcAEqN=C3TVbV?XF z`uMGA=8WI7P$C;g;_Cj?CXgZ5;u0{;VMn@n0G2YS&;t}f8d2G`RVwu`I4&?B9Qh1k z_Nn3_pCe^qeUZtR6yt^qivtu83D-r^-XIcJJzpVmS0i6(tdDIsZOHce<%0PYUz`zqCynZ zTeSXItEV$Huhxz?4j1S?DA80-?W}?Nk-fIM|>7n6iJY4rE6Vv??vU+2`F!@>!bU%~m64U_r@Pv)s-@0z%l@swt`7V!d z>;lek8FgO1XP(EfJm77=0IcwEpDKGcxX|p&i;H#HB=$FG%7_6)*KW`r zg!B8t6O9NN34iH8rI|5Q*LR^X-{!Om>efx#fk}mK4iYrm8r^j?8y9fZfu}fnF;Z)N zL@4{Okb_pbEA3*V{+p%Df^X?pcD14nB}4!=?hmiZpSeklXoU}nz~j86_t)7vLyXgb zF`HYYdrDylGB?yaz6zF;t`CVZh2IMD!xx z`n)v6f&?L{wgp~7XlFc+L#pc1>fX%6lcexh^cQnpso#sSukV+vY$((-7B$mqFw_(1 z8?TY=+2KGAs%PRw&{vWU*>O%(k?wCt%-rXS3GI9}_N1Iu@4$los=`|RjU<{#pQVWL zA$e*bdM)1o?%Vo6ll8JHGV{IX3DFTrkHxHtm%kzjOe7qXq z?*I1A6A8mkI`V%T&}QHLT|vj8m;RLaLXd+&B}wk~LnOP7KFRL*Jy2Y1Yrj-ZL{u#t zQQ;aZI^RH>5${}K_vct&&%5ctqpG1_b3Vf;Ij}l8uA57N3XMUfLW<MW`l^=Wa43fP^=@?=Aicjnv0gf|mONGOPhkJ|*{dP3<*eZW> z<|vYw+dX_DWz=EY<5a5jRw?=$=7DLS==T&!?V=RXzC*&mo3A6>nHq#Qq}kw1(i3B* zWaZWoA_sb6gsukAO)sThPp0W&(S575fX_*vaK;;m;UFe?Ar`d8747YZ+tljj{X=K? zuXT;_(J&3n(J|Y=J1rG$sT&ytFdZ4m%fRvjR&6Pk?ZQ7y>2D`C^3n$eKqe&Thy~WV zGCWMUyOtfZcbvTs(z@bByoCUDJ*}V|9P^JLB_B}y{oBL?l|6#m^#4_WQdN8d2w0^x zeShYA_g(Ym=ZlbfR2DM~Wb7WQ3$gD{%=TcDwmp|#_{*iQHd)h<)*>1@E>)c_@*;C?aeTRPkV#j-j>&?5956F-A zoH1KW1d7cEU#7BZ@6$H%@A|P*^ip7G)DJD+f)wPR)PUBJU%Wco^{0iuUAVG_yom#n zo<<;GfK&OOm8bYKx9s6o}CP!0ykVJZq(-j>c<4s2q)FRGKXAluv>T+MlPK|@x1;C@ix>1 z01DjRf+GJUDrpefF(|1t|ML!X&;y(ngf7#Tsbm7Q9m?+L*KYOUx(@=V`;wFc4=68P z-TltbDt!|9p$3LCbCP5}Fm)`mmTUvm?cs8q!`fi(n~g8q`RxTLC=LA1#A#@VY24B* z#cl#Oarcl~MKXij!MrTo;A*!VI07qXe*n+AZh~aDRWc^yi0`MG|CLU$_QHX0?S%yi zJe*6B%byK7|NPL+%{ainc;TAg8hNRGr=|M|@W4;@@l>XK<%+&A?7D2=#9qu%{t&LO zC+`Qclu0G9b(5QSa})II(fE-D!B(5Yrcbt|rNK=I0Aw^Xd&yY`D^ycWQjKm&@eGk6 zV*zGJ#d~e*Jjow!e6tk9z%tGPeE%>p4JR%Tp86v7WpP*o<;^%@&u%}Lqk2x;#;Qg& znxk7-XCs`k_sy}Y-Z`_MVO6MOulpVNnuZ}}2dA!8jXK}r^}9E9(1`Hx92Mj@s>cK> zYbdP9+ju0a79Cu>ak9Q~>M!$zx<)j0s^;2bhY!EA z1KvHVIgTc`tGSP*dHVL2(8tbh90|z^Wd#ERJVEyM<*dWG1-$5!fK!EQ8{^Lmt5tl( zZ{=xR*xtV8a)0<08F{d>SC!9X*ay3QoDc=&+evPH!-Oa)+Tk8wK`&+o4P>7gr+t&H zV|2y+Cfw=wpK5&4_k=+QkQ|*Zw(#djrv_?qd{X&DirMn%@txHlo*#c?+F={~hc+66 zY?j@tf3;37YZYCx_T@O3-TT}e*=^8xL2Bw)52+vZcmdEnCMVt-g_Ni`2pNRX^ZMH4 z2p;T}gjl`*`sWN*`Y_%{g;mzYTCHr!HspAY-Nee$-K2?dK1QrW;rOTGTBw^A*IZrG z^Zk#PWwqX1Wod;V<~*elqtFkL+e9Ec}yk>aC0=Ka3jO zqcVmoTH$yxx|03VVy%C)@gQE=tG7kYgNZR$yA{RWizl@!$4osXDYc|~UxAXu9%Z5Y zx5-;qZ#TzTjb(+#bwuM76!aETk3?U|4WdggX|Wqg6K=J&5SB2>ta4NH-y{t6(8dvJ zFG0RtU-4R?nVXPYW`?)S9R&m$))2iB+MY44{_GNsmq#@*Pr1`=Ak4T>xJW;nFZ~2z z<(tzX?c*gCAI21YvbZ6dx^s9qko4mehMfsGL3oOOpIrBf#3f%HVW2o&eW~Z^DGG-d zejhsfZR~$`?N@hlt~BrQ(|IeN)2=+TQW#(UnIKznqp6vE(jLaGQKPq`c#Qm$rJuBb zRmz^LW3to*^A*p}H(#^z2b3@k<~NcquH*hvI{S!7cn4!nD5L;cjp>2mtCLT-sDc$` z4Q%Gr3-qO6Iaj%piHKfaMxXavRmfquCd+C zYqocvm6yQt;@mur5Yat`#L$+nVqz}}Nxd+fmIb;hB)em=Zr3*^k(aHruR8~S9aw69 zajE(F(HJ@UMGLEa)3c{%?^*!xye`_Svl4AC|_S1PEi(Q$Ip6S9Z2KT;k&(vN*C z%FiAynXCT#=j$eKrl#Yz75^Xm55}6je*XFK!I56P;}5-^dXw||sceC0sD0_?;}`85 zzCMAG0t84LR!N4kVol$HH|x&3-%ZH4uD|2deCh4XzxngdoEKL`c2Fg_Kv$AQ&^Rcg zm3VkBbgsV?W7R@@DXue~C@~c7638yV(3X8L@^wF!a=ws~%KD4{`>RL>K^x{dKhZbn z)^_j`7{_S$I##92=oAh3cQq=G(SD)5P6y8bO@%uZWU3N zUwmI~i&9e&hvX@-vRk-1M-q;XlKt?6k?exq;h?O{!5?ttTM83pR2qRs8oow4>A}f` zwTtAjf)f)KGM5$KaP#2WJFtD&|$W&-NBh@j^PSI_nEw$#=(PL_SD^7mBs?l^ejZ%I4 z@_@OCU$H00>fD!ES#7aaN^Sd)b^{**9Yg$k%y^@ihqW@Sedc5yp`k9(J3cu57w<{m z$-ZD1rqth0A}H5^weTe6NL43~WoE9jH_W#ZEia{tD$X~UnlTl4^cxlb%GGjLctB1g z>bVbwJy`%}W^^H_(e%wLzf1j9%Ga(05I@<3&D99WI^1j>up>+TAb-5ch2?ut^rag| z&AL<+P97Zm&g4li^fW?^#)tD&Q!rj$vYz!YH49!ai#l5ZK9^Vb?o_&Z?^_HmhI47});{n14!nJqFE3kU zMDk>h>ic+(bObjCF$43HP5xXBA>EVczA<(>I$W${8v3c&njO77U)eC z%)EooY)jW{+Wd1o7fSUSTHqm>NXob~t5pIk)}>HJ=qWdCgYLP;fLfKj?z1O#c2kZurx0VJeD=^VNw1tg?Fx&@@WJG?Kyd;eeVT6eAc z6GCTi(VcSL9QLrLUo*C!n)?;eSVZaY>Uqb{N5bEa z=DzAu7RUaoRDG4{)X@^9bAcI^koU2$Q9I|quhW?Ng-OQSOBa>;Wb`s~d1xY!QA1rF zBqPPdblxrx&uSI~!Y6x1>;q|^3HH~b5V@>%Xy>$3ojZ!co}F<_s{NgM0U|byq#Fj# zRqLXP;X8gLN3IeMs<0|JyulTx)`1Ab^|yZ?>{vKu=Sp%K;<>Lhue?3 zvJBB3S0zM`W|95v?U&(xQ|Rv3i7v=4Qf$?%keLfh(c@zXt#FF#`S-d{^;FSF@-z|n zyMocteg4yo_OXe3x4CLJpSbQ>kQiOLT8DWoaI^o8P@zONqNi7DdM5q_!CC0wSBl+Y zxX?UUy&GKBhT*ja6>(4#K`hBxkhC|a-s-!1qy9maM z&7tPyj8~-=vq0F^+~ZE5*x1n8QIQ1mZpc^r33kodQ&%5DqRg8x`lyi)CoaDQOA|%S z`()MoP(0~bdtVWKZxDm~-W}e_Aj5LLnVx}>KtG*#r+sdE0`2-C5@mV@JKY|MXwRul zE2U$SV7yVg@q9(IR(J;jsWiu)xm@ct*4mMLcrVEC%njFZtxpmReDhh;Z*ZWPD4B2n zcyaW5Y%cu0p5R3|23%VDULTsv{&NPU?40mMjNVZdoJFTr{WqD=`w*o(db3K}N5U^` zmjVk)s0${Vq~{%%{?*wYU+PBs5D$L7UrWt7P8=><`$opnPO4{#TF2HP;Hdk={vNfp zav1Gu501#W-!Lbm2)*EYI-ZlGi|&(3VrtS9iixe->3{muG2cjHbnOhy-JjrJud6RkxoOH~ ze)951k9;}f!&%yvEmggHLAU_Qf!MAk*dDrmJKJ7S>&qT~|3xe|wek^_R~l6k1x%~> zRD^_!D=H28(QPO1wlhX2eQ?HvHIfO#MsV&(p=?6A>BgE0Q{cCZtj|@rhoSgEwhKFV z$BoyD6DSxwkvBY%wOo#(4o&!d>t4<~zZ9#gUNGW@8h{(Ta>n#G=f)Ffz|z{TbCJdH z?clC{eo4WJh%fujSN>6pcZai<>7~l=opX&o-ZX|2xY|l+X~9LW8bL<-Y#{D_!n>a@ zA>dCJHbXQz2G`OqBphF5(=>S03^CsIN;io=6)_(CHWQV=BWdG9m#B*SiPvzhq42iI zn+I#2wBnF?xtoHt;SBx4pJK`w3eY0{Zr-=n&YBa4B3!Rf@X9`V=KbU6SZZ$%_Vm=7 zP~@+LQ}|~M>#5dZKK`IoTjXcLouU2i9^NLdiVVh&pm|2xDtY>2Z+$*qM$TCC=qrFD z!OY)nm#dcYHz`D=H_MvK)4vK*+|r;lhi9sPV}AS~GyH@X4^n^opYfWv+_->`c#`xiLd?h;`1gV6^mpCFP zUZfo2o&9rZgYzSSl;Ib+q!-605qgTi+0R8%(GS-j9j6|n*EFf_anBBDFnZ*~@lI7PMABeDI4P-T_;CeYq#P!gIVj-@Mt+0^7X&K0A1& z_xEE>6PaX!F^nH%S-6N*Mv)&w`VfkwSQodff?#HvQMu0B>R0vzsNUz3#owZ(&Ytc0 z;8HyGWnM0(kE#}c+cLH%JClmLk^QYsL)nb1JobNn{8c9kjsncV);HZ?(8P~FTFl_#|+5`F=)BW(%=?oGAGC)^C zIiD5uH6x-_lD@zC1pWw@DE(dTq*2QK7-?cs~>VrsU^k z-fOckd=FB~lT%c1^Z|cHv%k!fBOS+>OS;ZEcM2_!p!&$CYMK7VN+3mmd6x#AJiZ6Q z26x5Ex%|}`@wLxPyduWhxxroKa+pWM?_{tEKKGH6Yhme<*q%yP;|Ttuj|&Nmby=DT zGEr)!j0Wi%nMc#rMKwx1nEIeeG_luYT0wSkclt$bz5Thv0wpwtOY=G>6{WPOA8cik z4_SO2Xi3U1pf<}K?z;5o6ldu&L9cN1dz&mTAXMOy|1M55bG^%4UjrY*kV~U7$yqh# zrE5aH_R!H+#UXP=?G-{n+n`2I`or~vTipTyO}Hjjd;RaNbh--rr%#;gtt4NLMoNrY z9Jp!3Z|Eqn$zZF>mhN)5&|SyN8u3lW1|XxuSmAhd@*MQ?Lr4>4WfQBBl~sxF8)==( zS=i%J;im5HQN=}Ja?FQgn*%+J-VIM!!SuBS4=EKJcFvD(K1>S-aK;z7vvowrPEx7Q zbLeDErAxJnCTnRy!h6L@7Lc#Hd;rOXkYq_E_4VyC&zY3|{LBQ7li_h1(t>6!$|m(o zV!sXNOO~w-?BUL4ikh;iV;=0w&5Q`w_X4~M%lDp8#7UahU}zPhy8DvzyE7&hN<>4O zJ`={py{;!eyrk+PnaBBX1Xh<;o4<;mA}sfQxS!6DUoak_MsEDSZ@XTX0-NpH!y6Az ze+3L$aY6cpGQOD4$XBaZHn>&a=395JDd8ZBzQSa*cwYwx+x*tMlIg~@2GPOw>t0}S z(6n+*QsOs2WfU=nhhe%D0*|`>{P`(7MVYqRA3Rw}k|;oLeDeyRNRu_V%_O1w<%8cH zm-*ch|J`?V^JBse7zb8X4Nj{J8*0x?%8r_ZjP;%&sQ{OvNTv>F6k+DD%q1cBF4kia zJ+aC4TJvb46Vw%VM+xVj1QnWR3>*|=Jqn}QLvvU^6-Lh!zTlM`Iog;~4JH#Qq@rKU zIj|8wcn;!G$Iehfh?QVrk>-6d56OItb}&VPvP(xOjGBON%F;w%{|jE`>S{r$2RK=y z65u7?ew>IaI)1PxONkso%hqKc9!VPe%30sw?-Vd7iFik<{2r4*scVnNBA7ko36hS> zz*|DYh&TLZU+&W_EV#``6q%etuc1y1B8KhR`ZrrH>TyKQqF4Bv;Q%`~3uI~s+%pZo zPcbRZxGM!ABzqWCB-L554?s;v zEo7akmUwc4D#fl5!v&}1=c*clA3Z@HOAg~{652yPRoSdOJ#PUuvtPI=L?4RSLcrg? z%H+}(k$n1!k`hu}Jl%%4oVvoc%KRYWOK$&c)UooXIrDw}LjsuA3pDpd&X~DqN^mys z1C^|`I8PcxZ>wuwYCUcOQ;MOkHzlg#dI&2H`N7}z-rm1lrD`aQ5)1gf)1{ux2q33yA9$qb@jDzo#;u#D(WrG;--+%HWNTSxB?v-Q7AylBmq3 z!<7C;_Xpk{o_>9#j9QhI{=3}c0uM45e@%-SnJH?}N!am3k^OuhH}xFu=|`6gDmW4* zOf@x_oVJDZBkGII*@!rO1@Np2dP2^vOg^N_*@)?XnYn;#=n(4m!mN0Q$vUze77y}w zBHuktN(zy?{PbEztTVUC#mPb%$v(A#3h4<}KYYldpE6*k?zs5-%-9@f`QTktUUPuk zUs)uyp*BuDb@?+n%|p7dHsWkuO*W3p%B4pRC&vh?sMTtwV0ZY9f0;%=)#!WKF(J{!2nJcVf(E3kKd=b(>H1o|t1=Ik z`i7v~A#8e6*+F`F1*5qC?oiaup$aI|shtu-k|#kx0OqwETn^5lqN3l7IhdW`%4)?f zc~$#o2b6_RoPrxKh0X3xxRR~VTSn7?ifmL#Ekf$#zrdlr#8Y#Lh?Y;HfWK`)V=imLMUvGP@6$ z?pFF9L}ZDInK5lcSu$(knxlvHyWB9Ac5~*;w|(rG*~{^_58Cchy^~pH?mCd~ceDz( zj3x&n2b9djDZYr;cpj7UH*sptOFjNFQa~sAvVx5RqL55TnP3sA*2tT3b{l;3tkS@`Z6q|FsI^=chE6(w9BQwBSeqijz!EML5%q zpZi!a{_xw9kqb$9W*)uF;BQ8OU-1OpcX4J^XXaw4d1t&I#&NQ>V?@mnDD8u`pD3XE zNQStK;J>s`iR-68&ib!)nnnbiKBYyRYTGvqb;j;a(QB3$1-q$@2|K_e`%fz85? zaE%yG&el@v#Nh7_Jgl}qLPh123=tXTD(GLt#1`e*neoSC3#`gZGn7Om@sJ=p(H1s+ zrDjsFUmqOqKcu!=IeVt~M6F)TpV3*VX7~tBj<=K}{`3VaC$c8=BC!Gd7 z*4~0rNM7kZP$RLw=A=$9!;PNGmo=LGm=ESZUz&Gy8|g>;z2Wv2P#u~t3xGyGA`OKW`Gi=c^lr*1fNQ6`DFnJp>5ciTtZ0{EGett||;{TETK4|td=067xN{v>#=w49Rn zl5go--z!BKF)=vyygae*RKZtcS2{5ZyK~ygTW1hOJC4LkRk+UNeU=0KnUfDP` z)hw>|Q|C(E_rV*+VD{4Hd{5-9=IKybIVDj?az5p&%yfv`Y^In3Wn$Z|AqCZq-sKgr zFsT}^gD4zY(Rx2_ww>z+k7rwA6gs25G|_x1pQ!NYJjM6S@%n&iXE;@|2}f;&1~8*$ zd<&+eap)HBRlixcJ?g}+sn90T#EY&YEdjlWhFUx6`1P_I+NMbacUhY8I=k|V)rB$ok)0~Bl8@>I_ z$1~M=Y(j=w1+r@^LP=;s0r~vA7qtNUsEcW&71_uL+QaT2Pe=tu1d5L%SeXoQw(WPK zU_CurXysaFS*&XcNOv--QS!C2%jPpLRNoJQG7hv*;Nvy;hs>TusW>+-;3OuG^UbT$fr zIlDv|(+df@2<%9y1nGbO1f(t4u5PGIWLKFap`qsCnHVUyn2^%Bs+Jv@B|f8}M=_TC zBJq)ONLE9bBXI}zeEcmwh({(yM43uS!iVw3l~bRJ@QT6&S`sv^7_(%9Bd0`Z+*s*0 z9sac;lpUjk`FB*##zzoPf%TO7k*Vm5hj4Pp_>x zZobnDV|po_+sYfd+{uK#a#qEHpd_}f!HP06^WHmE_hno%1_^a`0^3z;3j1M+Z<&41Q86fJ;(DfCf3z$>ZOndu=V7m~8kg4p7 zcI4$H3a4G{0(vidpTWu^*yLwye_ zn}0U!LC1*6E=5o&A8U`6J(vNP#-{Yq?L%?BLozQ_ zzE{3j3{vueKDT#LK6UR~J`y?GA3xy2e5#gSNcSWEiUt5o0=B*3)3~Me|FpD< zSS0*z;n;8@u(AI%Ndu~xnZZ~C(bc{DNdL;jypXUem1c%_4b;}6Xv@oR(I4*?$`Wrw)zlb4#cc4<^{L#ywYTpZH1rul+)t}cZE|X~#R$SRn*7<5a}|S&O2+`6dSWY~K^HhdKkMQX6OxqA z|Jmj}ZF4U0Rw`=iWlS>y8BdP!3g5mCmO8yGt|Cg|HroTf%;M_!6yZ=!dfPx}cX4U6 zWmp^_+rK6%w0w1s>U)&~XCOwxC8|FY?j|E&lJqc(|D#OgCHi~8bZ#0Ss;P`F2{(AQ zcE$zwWsw5lj^;t`AcRriNkohvK$6(X_DT7zHfGDe9FW6(){VHQCHfNQ9IrNPDh&n%bgr;6PgT}Zf z@(oBJ&Z;{%VPcdN%Ylw}Xj-S5?r%Z(v{8#qkWMpNttWkc@d4A;F7~i4Nu2TTcG*qq zC6*oBj=Q2w!Y=O}lVE)2mnt#rUm~EOBA{jI&snY;fqO&SOV<4R@TQF*et-i!T*TNh z%B@#{r!Gi%ASerjTM#t1tW>!ax9LMI9q8}Il4G}tB>kM8OKCXDD*q! z4lAt6oRA|CSG6Afu+J6sMoN5Y4$Gv%i?AbKHimhtHb;C2>AW@G{pitcl_llYRKg8@UoPk^dZ@%N5g1>TH$M}osmQ{>JdVaaBA`^G1)Ggng2 zGmP(VcJht56JEEM={R9VcSX+Z1oo~VJTCVKrw|_i4Fop)7<#&CWc33}*v|R>QXAOW z!Y3CRX21O@Ir97a1D-_AijJ$l{%8Z>~f%;-{jf!W_6HBHhoQe4N`>T=f3cfsBuzzMGsP~o^ zZ%3t7{%V_aRzCixI={L|wjQrxvx>W@0mSc7B6hArkA^lzZ-=A(wl^BD>Gs8xnhQ>@ zwRTnTjoXK#1>G95KhFqzbU4itogY|!@@xu?El({4LeBg@SEPBWdbmFSvga@qCo`|HCn7Wgad_T1vlv(BcEiO=wKzQ6IbLLg2qJLG;YA94~g~y4kWSel%MlMEL;D=Lnbd3^e1PlO$y!eLJTuCcMn7 zfh3w1HV|NaxT}`Yd7Wt!~}oK@(yXJiQ>(odGvkO=w0{mp@-=|sW|sZo%_vw(DyQRjuggiY}twG z!QorptXn4po$_)pR#q(PFw;q$%pae)NFJHG8daFVO#fyZElC;P$xL+~R3GNByDq|D z=Kavq!@jQ=Rf26FuCUeEln`=CUfvOCk`rPEwQGekv|ATeZ~J!8OAIy7N?2Iw_=V7v za1g2sbx)JPWXQyi-Y;+|+}8x7z7ajO{_esKduqyIzsP`aiI7XYW0_wuN6~+Sk`6Wc z91Lv5!864Tb8^wOd89bs6xnGfC~aQ!w5=7g%c2xU!MiY9>rUY5DlW2r^?y8BK2!M< zwzCNNttJ1@Y1Trvnb)^cAO!4{m49(^a>9Cp=kn}pSG_03@Ji4Yp7-PQ-1^tqe;PkF znV(t&M~9AV1Wnnzl0)|$*feeO#>gF~ekO?0)Dipqra89BKu7!v(|d3yV6;w>K`FC= ze%!9@a@KO!%;=d{Dps^!B$5 zuXl&PC6KK0{<@A;d4w$Dmk{GItFd*i5LRom&_*Y+_1%w?*Htd;?A;zWt;~;jJ%j3f zs47ru&WFUyssTM(GiC0hgKUOoktYL+3#-n+f&dwMgJ68Pm|A-O<)9ZWKR)&9<-rVmG)1B{p2-qL12fm; zhcTiirB|hC*KKxV7zb4w(-{XV2XG%3+^zw^-Ot}4UxxWlIaW)_cC4?xZ;S!I zEZ@X?yUMwvq7r>_87MW}ukHOKim9=1^HGGxZeA9~VejDR+As~8YV!pOr6y4M^#$~A zf#S~-UZlhS+!~0*dw1hI4U{f8nJ-0&uAvESG^%Fs9C z@-x4X#I`S9SI>AVn26Efg9p=Hee8b956iKNpl0)G3gy17qF?w>w|`IEo5=TaI#N|t zHxL}*okv+mZV+`+p?7Q)4bYh8aV_M>eN5a=lU}SlGi2vCQAhlJ59!_E=gFT5HQO61 zKov(RR8d)Z^PTIvt5d6SIpJd_^{BmO@8U}JZ|2pnRl$^hgRAbsve(63)l+YKyCuxc z^X4z=-DWS>IPWTvXifRbO*gT3Ph^z&cZd1>qIILrURgXDo4~nPF>g7heTS_Q{3sRg zVD<>OXqMN4HF4}iNtA-E7^#o~(Df~Vx?~DV)w_wYfvyM}7ZW)5BF}+vd|@=6c1lm} z&mosp!%}^>nlZXWRKgZCwLK9S75#Oj!1ng=u$pCwMd%*yOEFU954Uyc8{#2{QYBwb%!?D;ffVely4mpV&Q-6^;yruZ>ElZ1gN2_Od# z;#U5Wr|0PjkibHv>ICV*j>yk#(oHRQ_^H0$L$vx-Hr==I-~4Tx9avx+@qhdC{J>5y z2xL&afx_AOOi()VcaeXW)hlKeS2MdS2d&7pcM@`Avx!Bqo$ zGf5q#^!Jn$BVX#yOJy@zw3)5-Vw6~&@Qce;7g3Ivm)^~eHz;mmb6=fQJ{M@cp+;t! z4x4m&?3&3?%2FX-7Kz4sh~lDN*6(3QO^IQ`zkmN!Y!uCXr`ce5FZotJ0=Mk@U@`6w zcYlD2^NmU4*dH`?i2U%!9Y-k2Ra8hizgiuj4UPXx-V&Rc+^?aT9~4WQG+imlXDtXk zB~cHR^I@b!`S~aSmT$!VxE*md-dwpFk(BA*{y$)}K-8o$viJF=i%+Glx7JAow^dbs z@wIwYa0q#PqA-y~zPac_iT{Ix$?A1~)fV3K$rHnDmp$wHOkr3^b@cqGstsUl=6O$? zzd72PBVQkM>;{Jd<1w;B{#^_%hCkAhGMbT+c3G^D#1V1hH0 z$;XRk=DY1HS%)F`ROOvT=0CXh{|R>c&{mm)+d``Ns{_SP{_4tD3%`+1uvVsjdvpC{ zHm7$nRVyDVC;XqQ=6U{gs!9&@iu_Nsu6=)UAv9EEsMaT>bt?|y7fYD|m6M@mY9_e~ zD&D777$wH)-?}Rz0GPg2`0Q+H8Sj_2CPkZ6df#7S)Zxjvl;y9X!LhbiA*f1Wwi<%N zZ}X^R9@#gdDI}UV;p9I5=9y2oF+SYTr-!q~ZaowiAp{2WC$0=H+GlOkQRIimvYHK? zqn9#LUGEIhfIVisWP%urHHk@1D(5PLe}Fb=dC91$r|JyO_#E?KTDiK7rNM#daEJw1 z1TCba<=BCjIj?{WKxb0%>w|S@c|erL1W#YIN7IKozj(Gby-xv2rFWi3DQJ6yK3SnE z6F10T94M5-(kj>X+|72&-r%=pubD_NwM*w}Lk_P<4u)@kwEcR; zyXrM2&KBuYP|-xqGnOg{_qlxpjMhea=OO0a-o!eb@m=13PERE6Jk|FMA{x@ZeL-Ab zvXUmqo0`5pj{h9$c$iY^-{^&0d0Z_ep?(wx%-~tE+_I2YtA#QP%AWjgvwn||@_W|n zKacG;*HsJy2-AaXB&nAJuHf#vZdOP2`Yml&QJs;JO=q56N9N30#l+SSDh$2L$JN0h zGCYye?}sKH3uxYWsSig{fdLUZs`~Mh&Td3h9v7 zfSp_&MKSr}zG)Vw`90RVfyo#v<>N>R&WL2HFdiIU~#xd9$!b>8|C zLlz)Ebw*%UIurtQCi;q#F>J`ct;e)*;QSNJsRg*Lm_(cM5$z|7#08lQG2(< zhu6-h0#>WH<3@9}pIXenNwz_7(gg%gpgFz~m^#0b<#UdxDI|Z!baA=I@Unr0(7Mw8W ze~z0NvvHtB>M`TvS4p!Jmf`*_xb@pajcqP2_6`C&jll9f>Cw`?*-u`hkf~up6B9|m zCzV?gv_BSCgF4EaVcko_^Jjmgsuzr8cjM0M;U4R?F=MkL{}M-{Y4*~}cw+fJewVTT zWF0|%vH$gLqYM9|$qyAnqqDqG&o9d|$z#|Qj|+oCZ{J$q*=wd}&>+B_X1vQV#snsC zOJV4p&rHt@{TcGN$x>|isswv~Jovz55?0DPB|7o6=QB0#i-^u}ZGvD|xh z!k0z5zM(~{j-dRS&Znx5hEwSo{YsOy=8!_MOD+8AUIIpqos~3&K}mnmdp)70{VzOs zOio5g38QkcH~G39<4MiGeflbZr?P8t3$=?|?Ed)R*fm~IL()O2hO`_F?jrQ{|I209 zt2?e`9%J~il=m)f#OjYv?S`1F4+gEUE-$O3Ex&dLc>wSQU78POmk8P5OPq~?08WhG z5XL`wN5=ylv0;GtlJhgQEhAx(`f?V1S>3}vrG@AcW2GMQx*Ti{m zw8wnd4ZK62@a%j^$82E8O}P?!-eC)Cb!Uw}LN(0ymXFHbk}Zd1!R;blVv*=k66Tqw zzAv8A3oLe{amsnK^%<;tj!|Qqi*}K=?GBvSYQG+#P%11WFYv zhK7Gmwe4E!geiMrC?uhmpJDxn1gG&2wvR|lZ!wKe=-#$s-a#CYi7ytfIP^}oc$iBK zuuBZFP#lmF<6$=fAg+6@n1lFhQjrU6o|M-=2vy0+tjDE_^hw-z`9U_FHliaTZn&kp zDX=UGqvNQd2L?&Fd7g_&Z9N@wBM?OFZu1@uIK*u<9h&hB*xs_;e@wGnCaP(?7nV~J z6GQ!a%gCoHWmXQinZo9O@lo`o*v093@(RYI|LJ<)8{#+lkGDPf1a6l5&%MC1WxI~F zR~wbSDPJlnT1}Q&hMUNbXa5WastDjyyST`UvinDOxMPS>V|)JZIbw0aMGBLiolg&3 zffKNC$eCG2l7AU*#Nv2dU5|g*`h15yFv&^y1V^=4%XW=KQ%X!lo8aPP4K5w*;-q4Z z^Lvg+Ate{eET^$N)V#S6Zm(>ag5yUhJgtAuXQ zXX0~_gkSN(EB(e6Kd9}gj+Pnc#s-ID9AS)rCNP4x#9BtB`QlfJji+Xgj-l7pU2cw% zv!bc$;A&`p$aDCLZ%ry-Eh&6El(zlL6No(L=WWeDrJ1fhuSa|j$=MW*`M#ntD%+o| zcqy>8+~rHw-d(hjTx$|_c6@x8%L@Ycm1z9jh~WCyCs9r-o=B=f*lBaoW?GOs7${PS zX*}rjcG{_{wE51=`g^2UEQZ}p$$Ve$J6C~U*pVjta-96M)Fp89Fy2%&8_hXr-Y#8g zc3ErtHPrb3l7>q+oHjVcxY}Wi5Cr%dlCoCzDIoP5n8C*$^noo!9z^X`8>H&Fs1CI- ztix8_E{aa$LNqhc^0R{HGLgqlGG>`Uxw&-UoB$o5<#;1%oPEXtpe90ze1k zS1Of(r6mfpW%map{bP+bk7Hnrmi&Df1sK!KJ-=c%Z-&JlI%+4TZCS@~6e0&V| z@q(K={NYijb^H0@8LZSG=z2lFW!Dez-l*MzM_n)tcHj&9E}ms;W?eWIpko?%%6Z@W z4fhR@_^HFn)0oh`pr8BAx>B$=8O~18&w63wc9`*8+CP(sQf?~e4LqpW6LEEI)8DyG z>7VS0#5wOEf4yAA1O)=zW(LoFDf#vOTMF4505bH=Nyk7kPCT%VEh^AzAP*w7p1_F7 zgZrEWa@?Sg(cEKf561Q$?Z9ww6`={t4t@dGqBHoCdBZ zLo~p8BY3p>2(Nof+dnW2LcfJCjqt!Ovv}GbR+_J%1oFcI&J%A3nq0dkIvdE-;}Za* zgGnS)MTyJbGPe`(bZW%Mv+JYv=|S8N8eQ1gx(L=ZrZ8S~wX@~%o#Uiw*sci`86UWghN&y`>mR3=EAgU-=Lf%-vvz9do7bzn!eL4lhBYyabUoI7M5c1@{YAm z4XZgf@59Xnt)}#0e2(ca=JFNTKzC`Er)xC}S~t!LM+&>+erhE0{@YD{P?4&d3V*y2 zRbnF!zC_hvDkg(hH5^rd4O#|a!jSS6%Q1!OES9%ramY(xoTTjkBzi!!u>Fsd5A}oS zaEXK&AO-gYu`WmRQQM$#>YXGL6`hay-}G-!OiB#6q!kq@Zf}}zXmgd%_7HP9&xJPJ ze*CiaQviJef}r(m>@&MmTu(at)0D+;4Up6h)J4E4tDJVe;a0yF65Cq1J-NDk#G+?I zV}=5ZTuc3fb|<%E5imDD%e3u##D;6=Vqc~|*>8reN))ZA=p&(0?uNI_1?@LwZ~-WW zIi7PQ0`mcIIM6X|^m^>O`Gw-l_9#;-j?f_K;_L?SbJ&<-pkC0a)YcCr zBb(%Z16N~o9RW7l&kWG(sS9G8J*nZZc6%}B%z--IpPB8 z+A*FE<(my+4Ht1l(J6wib|FIyGm7?ICPK4nt~Z3`ru556d$3adA)k9M1MK8|q9ina z!cYz;)ud7wtd47ipFcfKZUe8mf7R&1_i1x;4D~#zKR>ddv?VBVQgrrI(AjW5l!&;& z!+>||Yvu6B*n~=DQ?#NT&+lP$RgEGj?SP3I9y5YpFb41M1{CsOc_U_e=xI0Tqz2%y zyo%EJXaxQ*LKYbP53WgI23L>T9X**t&C$i?K*3RvW{&?FJm)m8599`=4CgmHB5}{I zH(c$7ubDFcTrDKd@5OupCUY&zG5`_Y*}wewH=@x;(^N=l;kzt8Q@IC`{Z>#@U**B? zu^cUFE^4H?3&}jTpeYCV5dVf6;L$LJarb<(qY@S9;@le5SbDtPy?yYmy4uM?(X^7* z_~-qul!@TfjlmuC-9JG(7oS{A6pU}Z78Y-w{`;|f!~0f?6z%o>Oc{b)MvkDq_uei$ zGHT^tZ-_G+^gN%H)xXvhX5%pgogf*!Da_w@FhNyWq9HaY$(^vt7Pft*k-gKk}tK@cd*X@lqv9V=0#P&3iQ&l0?KhPM? z$F4Suy~a?9We>Jl|K7A39n2Rza{YNfy%HlIKPvpS3rSfn#!Ku9M3@=sDlC|!iY1`F zeslm?2aYr1X`c~^&kzp_UtNm4rdErF9)`ma%H;vQj z;ecPJdo8myLZPTqgsTv!dr&Svl#EBTP8vI)eiD2jo=t^NBpj69Zfe(h=Y4=Z)|3Nm zm%#bfzkNL~`vv-)ng^^K#LN`VpB2dD2oEncL8oe5|4T|U;|n=W*D92n2)``?xRw3>0idQIA#?3$EBGM_7F@5kE z{-=pn4MP%i37P0Ffc8|0IxAx1USR4W>i`?!YZWw(eVa~M_gu6t(i#2$rvu|39ARhf z*AwIL_55Ht1TzrEjj#Afd4AwO^~PSl`OxL+n}?1%(}a(Ws^q1{eE0xMoxw0Mwf+}7 zl#?d~Is)D_w3-*g0=bZ(x6}Dxk@Y{~(%WUjCEts+e^la^S^tRs^@{&1t+ii_KON0x z6}4?)fMHmJ8K-pm{fbdIf1pKV&kVrxJ{3d;h5_H+;+IrF=m*|$9wuH@{46IarGC1V z%z`$<5!ms`_~kdW;C4~@c`q1xrrd8?p-ZJI2o4cY4On$TR}4qv!v)kZ2RvQ>y4H|b z29jYCGVm==4zG&eS~~X7lhkq5Dw3S6gL1=(n1_O&+Pa|5>IW)PtZJK(EIjQq;oG100j(V1b|KyJiL;Bq+(>wzm@}^li<&EFvVLyzMmXIk zUS1mxyk5I}vI%9_s@`9f?)B_VwAeB+uRNP_Cp5cr>1%Y;PmK2eqUjftWaas)i8UGU z1MdO;M_@tlCZ4O$fZ{U+V}Kg_?d2wJ)(Kd2;yo`P*AbP-%_2be;NX6e2``S z?18mQBpJvyBd`ASh__f_tque?7Vvhh^wX2d%ZK04Tdqv{b*uYbnFe%jyQHI{iE@za zFP=e3CB_j{1qE1 zugB*dy?W7F5};qVYz4Y4t{imdr#6!s_X+~0E1y%8&XD`wTRh%E9I4gE_vA0vhv#y7 zIxzEtg=2LZF~LLz(G#u(2MnVkO%AUWbT8d@(Bd-n{H}XaJp^eS*+%B|yy!tcBJQl$ z|8LT5cx{nZT)4;0=5KWzaOeb-9n8^HjX5~c<2eIFkD#ux;Xv+?IdUT$N1T{u9uh%i z52vY+ra7Bz1b&G^hGIHFF*8{18y`1I7$WYjfcIg3$-KD|7??4wI=4K;Gv?_wM2`G6 zh@uNy%E_lO?)Lyh{*;YXV;vFrk7Df=+^82sCw^So0wgr@tC-c3r~Cd^a0?Sv!Kfrw zl#xsa@OE+Xypwu#Ka`0$*ou9{azr5W4xlpb$_ah5`k;ec9 zx+PtKh2pIl9+>$7YrZ9Sn?;G4nn0mZ_XedvXR_uyXHeX$BCC~8lKHevvs1-;KaVL{nS7e`%y8p@wY9Tp#wEBlaDx|5eEnV6ogCMs;^#D-%Y~=?r@)*Q)vxP2crWCK ztu#f@qGea4G(}vp-8++s%e`_?N#YvX2^{8vuXb$jhx!`NVW;QP9sYGsuapu|09_BM zQP_N{-hkuh{i@U@e{ZY}B{*V!%FvbJyd=5!p`hO7@h_t-zAs9~9UP$51%d{;UQ|!) zw_SED<$y=Ez>Zn`g7nT3Qb}UGb?R@kg9Hr(ZdNSI7nMvrT#;UqIK*mCz8~gejW0fxAboXEO~oJnJY^H`sw!wM!9tje(LHf(T)23LN~Vd2y?%9YS5<# zoWgHR2UT^l-K4q@0x6?XI=oh1n7!S0Fz(c`2*S0;f0!Omi6v2J>{sQ`8vkJ@nL;3* zBF?FIJ{E^;&fKPPI)kam{<*J^LoT$53Z(LK%)w3@@d9IT)$(~uL~IM2vcKD@I|w@d z1$P4bCs5{BcQHm7&^dr!JQ+uN8%`|S$j>x+6t&)b_U{<NQSBsiF@&ArcPmz*SM zh%$3?*>=2G0kL3yNSThpXKyHkp0aFh8JCQd>(ndsJMx8ANr6j|J?E7T7L*%qv(YMj z(0%vfkKO3Ge0S-ZzsA~hU*CP-VuS-o5y}rrFgf8EHYw3^Y|vXIY4ZpF*>;qnPcAwN zD7}i|(|a=b;3y{Ud-zcXB+Qv;&AC~=>E7eXk7&=LRK6!4A2|C~&Y(H7BJw<4uH3>b z1Yd!A5L1+~ny$`O{E5JK^cTRp4G0Fw#$%4CxSa17zK3M?S8O5*bSnph<4eTxtg-=M zk_eblQ>#@P@TQXb@G1+lFcx8v(eS<)Yk+;q+iX+|BlT(u zUkM(-SU}seI*?tTwnyiHBP-?O1<5ZRNtJy^YI`rr%D#y=MH6S?0Sk{l=IQ{>@CdW{ zfR;d2q<68H;>HfvG~?0FSR^~5b%W67akFx=!1M1%6B>no=se_+{e}<|GgJrMEQ9=u zvWsAu2`#{|q*sPPI%T95wraGfy=DyeU4>R04O&F_14F^sp`B|piiHEKNVed3>TGWh z(nqegR?@9<1`Q}%N+tVL`cgb zZc{;Q zuOq!CJFzks3DcBMCQv1{J_L)I_VjiVH82(_a<}Qv+I*2*; z=7TS?&DPg5aq)-Ww+fdQB`?^ly#i$12Y`%=G|n*J%~Kr<;$mcs){*0Cm(r8shX9(x z>t}5)6o>-&qi>eXEQM_Jcb7l7>Dc9RD<&?N*-oP!!*PCrMI^fL#m8a#SFs!{L5 z1t3bN7Q4>-!sh_$IeD(-7RsMQAcgijh^Vz*#FO00A~AuQk#YEJod&98gc)^>>@+#5 zc?xkg095*5g*ziL0S#;ZF}$aLJ!G4{0>JtFS6236o&u6^cpmN*%^An52KCd?rsw+7 zNukP6*ijyd^^>U4vwY_xE;RoD!)>5k;wIUzi>|2UFQE8=pec^ukc>=5#4<2O_x|h+ zASN4lLuxXV*_?8KfPi@|DKMUG1l9D#w@t0r<2o9KGucl2{K`SBmB6a7x{|I+ZjENg z0pqKBF6;HvKs!padHyX(6_mp-XIdK~RGJs45R+M1jzgea+kh&B7XX-g^z(s;%;av; zMezU_${;BbB*G#COD6K7!v!+O)g45VGH5{n{Qc`4AsZ=@uKgbjr5FS~6JtCm#sd7`10cqL%=l36h z)YN(EFumYBwkuMv_io5#yR^xsJTK&1I(#_mzPv`aZ70>boq^c5g})^jOF)$YKEg2j zP!&cXv;1s;W83S_pfK^g?fbY&p^_w5{k|}x-fz*nr0dOJc*v=lMpYO_ENujV)Wr!_ z(Lk8Lmcn!0?>!s^cIh$i@0EC@Em=NUU_as1&Bz!M*5X*lhfoF>p020(oM&Dq#SUEU z#$Mc$DGd$V@jTVx>6p)X4X0x|Y>>jrdo)>NBfK3ij#k7vSNjUPGvfIUv7F)BU=|^B z4UK}g)<{69USL-O(15vc$2Zi)y0`8Vd|G8`Gu>va{tl_e)B!`4&t$egR2=HDX5jA0Mu z3^u0}DtU+rtUa2MTcy}yA!RKM4J%0mLsE11Lf|Q$aG@8X6z2?PB|d?}0#fbMguOD6 zts2s%;!gFwvIC5k0(%_5&xu~j3-r_R4<-tnb?-&s`)F+D1gn_|32*_3j1=X=vmn={ z4wa__OzXO5JUQO8lUoed3@rJM1@E?!Rs%C#a1PifZrl!tVdxfSJ+G|<LvN5cNAUgHcdNsQ1rVv*|^K_fJ6q6e}6xd zx75@pFo@!3WMpdvi>rDn`#vD2G2C9&-TL7&)gQwbL$MRM_ar=5#_?(>a8uz}XPemrj=MB8@7(Ps78b9#%IG z(N5R~8JFr@)jc)1p>GvX(ua<&XA7njx!g5Byb#WPShBa3y45tOpA>jQ zgZZ{uasN^_BAhmDnAF5K%~RkVNR2^K(|9sxfZEwbz$Yyi-FQtO@Ua$A%ez;pJ>5VK zYIkwmsvwB8tt>>3_DkuJKoW8xZBjhq=|Frbbdj%9JO)C~oyV7wR#Bi8GItlxdp<`m zg_?$jP|)F02emCb!1h`6LrEcJ6&2PKWfByfLEj%%$EV6SvvvX&QBU+WlMmN`?}c>^ zPhq`39IzUR3?&sWK3&&125~$sUQ+5RV&OQXrt!kttM+K3l$d;pbmw{ zDXh-@{<>M^{fiouu!ftN#%z~;KsY$4pyIxUlqgy#hB`Y005d6AM-j_u$%UY5QHJj> z-UZ%1C=WBIZ|%hdV4dL>t{cpt=<%FLeq0iL;HEwNa5+>S5?~C($da-uv*)&?Vm6c9 z)6t}nzjh09IjX#_-nR&S_ELtc%9BOy2Ft4`Mu1Gu{`P7rV*{(2^a=>4RTnT19J~*s z7LfOeLYMOjGSAf>h+G~X)Q5*{j*ne%k&3#x<Mu>rn%HLT#96GTLNBN*110QpL?K51g9RfuEWQ#{K88-YbO6#y|ESUp zCOLf=v{S+^hcrOoUcM&{Okj*o(+B`-@#-o7?T`Xm5It&XT_*s?3nV@n`f%_^D=icS zo>0bk)pT@os|)?iX3}khP-_AIIkW{i&}4fZKQ{9A0zUcz2V2AanWr2I<`PopOHuX2_f9YVd z3S0GpRrm5$68E>mRoBpoo*rHcy;GL})h6n3A-!tNwUNcIf5##Uh25 z(vIgL&j_oP{!L!FeS1ZDh6eD3@paer!ym$Op7Xp*OB^2@=6+^PY8l?JdD4JDJguzf z5}b1mwJ%XI9s+d;R2HF5d7$1l7DdI(S|I;T(B>e@-c}{CJ4oPO_D49`?0;IR$r^lw z@2TCNdU#Ay9bnCodwX*eb6^g(zwN(%Ubq@~Kniw&mgj!*%*w9nEz9E|dB>66qTkoBk-PmCe6kIowt1BP{7|LwB$160w^(QsUu1h!* zGP&y^p<{h=IveepPoCedygRUgmu$c7MffMl7brj)L!X;`ww0m(a^ zlqhs8SI3>@(b;PI3Y+O~U*-I^VRlK(uWNHD#$3NOR}`hKf0LSY-(OLbmX@&|?Y72Q z$ToHIa_?`#z3_{|SqI2hb*FwVhfsPwShAd|CG*)hA>q|fV8x|%P)AFP&H}R>%vl!W zTYW}7(Q8VmsKi~G@(Vmn34Hmy^a&=S=^;L0S}H&8$6@*)Kw}B0(@lD+h?vNT`78f_ zLqfo=*=#~DCQ5r=mLAk+h6~==x@;2x*dPFr4;&hz#A1Es~jG_u;Mm{Lq*M|4`H z`}TpRS-mHjPBIP6VqeJAoQj17_h>PS6f*m;w%tth@};hh-^d^CEx06!=obI1&h8Vv zSs?-`EW5aH9g6;Mrwm{Q+NZ{fjTFu`NBy5=-#|B=IlU&nm+f!rZ{ASw%x_z|f0%Oa zA!$lV!2PF$7rJHA>eJP<=?XyW400z|4S*%08_^4!T>pDmP7;OeERZ-;+8NL|Y>$e>i2eQ0b8Ntra6q9UU3*(&@Vx%4nubnLk=t0Z17 zWB`1sbSVhs6zyqhea28|SjZ8aKlGU3RWk+E(dPlT3HL5ei|$?Ec1H&DnFlX6O&KzvHcw% z4E2q5`?}IQOyPUy@kbA@LTTVYk8_w3=}(eZaNXga9%}}JeQ@?Z;+hH5TFUy@n zXu0>~qDI77{L%#;2-<7*69&PIv{yXXZ9nmbb8^^fz1u78NSRs*0D6%L&$8IG z*7n#i535Pe3DFy__b$Y{Ri}Tr+p+q{HShBhJ11eAcYAe;)-;9|$Ay#I*fknI#Oi=o zDHrVs4GyET>BX8D*LX_`6*{@i&F_rY?wi062q?aM1ku(JrX2lc_eN}Hrjo54f}OC% z@xa-v>YfUm#n$7Z?wuko_{CBEJ&ce_k5Rp%iqY8YmC?sdc6V&ZnRph(A$+#9F;}ly zWO1?H6zaB9I(PQdvVHoOx2ZO!B33S^l23ed5jPX#9~`<+`udAe-|sd*-#DkzT}30v zns2+D+ZqFqk`4h35Z!-e#~mD;*^fI0PT2PQ$I^|#kAHSzjB9Kq0gE)?NQ8SPt<}El z8M3V8!}3ZWou)tL^R|o%w?@GSFAQ zG;tTXyCqAuIr5Hn`GfbInj9w`5{Z|3?CCL=!gc$Cf-nsQJr}sNKJ2S=yOctv)<6_n zUU5-d*B$%*G5gr%UFyR}1|Hg?61LM&M45fwKzn<}m$#luDx2NIV+v=bFQ-5(WZ>q| zIQA5kdMHwflDO|9!Sd}h8n`bIK9Ou1wbB(i+nIVQ#Txn6)O+gMGNScq`{Xs(!a=%N zA$C;*u=fXt#aQA}N66^kcIc#^fHYF>IWX-KRmpYqkNviUI`@H>-__y3rW~@aR;w%a z5n z%c()jziraD&-UXnWAtW0yoBv7Dj}wF@yfHJo1U{%{@XCftgDhzY$3b{5-=tFZUJqv zzUsDLdG*VhSI1`xl(M8SE&9EvmT4B1uTc+F4pm#hLg)yy*VElDUx^xau+34!YG{k+^|MmUdKmcVi`+J8uYL3R~OyCi}s1@7Vne1}ZQbgKW!S8u%X%2Uv!0 z`|f<#Bx@{oSUF4iinJ+S*aI)iJ#!XcZ>Uv3<$BbG@UA<(nqCv^e?UMzL%ZTBZiOA~ zmIz5nF(r#xkVA;o(3O>EG()7ya>00MvS5VI6I;$2l~DLnXDV!MxeslCk0xmf+#JED zsmqp!kw6H)VhybZey0b{?2O;TyUu)T>hs^aoT}hU(v!tqh!oBt7F8343gpO+eHq_Q+MRRCP}gBb9gZ9%U9VEBGl3esds@K!J~N z5INYW4k4S|B}@r_SwD#Q)kJA}P)^-PxZ&%dF)-1KzG`H2MxS&z(xY~&CgSIyn@c5! zpx|PBszd@E7|fEjYIk$-*{$|fjQQPE%`yMlcpP6k`t+Iq^a{vjrZ~8xIe|zU>JgD}5vd)Y(mS+hQ zA5*>Ys_)mQ87^UN?`Ye}dBF*Qa}!(Q2_U0*hA96|)K#>HB!!1t8}_RXe+<=07jkOe z6t+8cLf5~+Z@LLDTM3=(`nro>vVhSFUOd`%NSV3Gac!#3?GmaC^=?M;*s#aOHD-Lw zzyfg@E?cnRu8Mde7UoF_S@W_;e!X3SV7;8W^@x97`N-idNY?&sdfgJt8sq_Ak{;l{ z>=fs8%#G4zmv;nKHi!*D+p*V8h_3_*;J>l4Hl}PABON(MIPve-xu4NLv3U^8dVuz? zB1I=o@Nib&#q8{J7YVno9(6sbsy}@*(ivZg7P;zTS1@zs&x;fPEYW&zTB##bp*;xY z*6T@C9s*IY^F3*DMY=O$Fo!J#rM4F_F10j-?H~Hi_uiylt>3fInd5fNjOMzTOPB7o zZY6w#T!MoeoQjQRiO1y7)L=Ck@gY;50| z^p7sIo*CmHZaM43pU)H4b-u;a{)b&Xfi_`cV{`{xS3Hu#4x$4O{|AWRIMJfFwLaGe zLO3LLrk2CdiCxUB?kUl~cb0Lw{q~4H*lEnw`bUJmT!v0}stHV@-RVkA#kMEX4=ZPm zTBq=SvZ>a*1Y6$*03NU=ls-JM75Ak6{U$)r1k%ZFnv#npGK$qpueW%ZfT~!aa z?_}x#_WIMIxu`VZVFB zVC*LR;7>H)@76#K^oYk#4|<{?_&qJMhK7NFD+#4;8!k*JJTU%hY%&>STUk=wSH8~t z83DhL^iomqrb1#I*0Xj7aZ@k5hR@HlcJY=Pf!71Kr;YtuPvhu#^aJWGy5;ku&5x|` z4pHbD>`suT7YS3!swP(DcP0Jy$^uzb^^f&T56K~l_4PkXCwHfZx^^~AYn<2EgI4RS zKJECp$ytQ_v8NiaZo|4Cl(W|;`=5?SDDsf&HG=*UcDa(S?6s9YIT#|K7`>L|4|;v< zzOKQ)d`;k+RX*aqLYi+_=rK!n22$XJh>wq-^NKq8W5U@R$B&Nj_VX6)c;Qq^>nutV z0C;VZUcK+FY&>hZw6%L)!Y@6VHPipA6g!h4AFTXj>qp>n`_A+@(zNyAgDa?x-RVer zWEMr(Cfwnl?#^?}U_Q+uS=j&S?zoLyxV&@#HH1RUU_W~QAOW`-7cT?jQb-}qR|3Xs zO?h;t?tF<^W8|dn z)jKdrCgmBK+rDS7eN-t}4YqRNu{{3b1}bw?vxyxZ1;0J%#-I)9Z^K;)mL_$=uicph|Tw zY(Nw)-)lr1k+jwtk7Zk%w1)Z8%T7nPd>xw5M@Z2V5T4eil1O^fb#UBU-e5G=OOIR? zhAupN4r^Q`3up^ra$j;$gi3p4uN|NPXSL>7(M2W7GZuD?oGcyI;0MNw0W69(j=3V+R&Uu0Ugd=gxbEU zI|<|_HO<->+ND+5*YhGZ&B-*iLyY*!d}XdsD6->~@{BwM`T^&um!v9Pt(@Ri61g?s znXKFY9A3=ALDV@q4t2DUs(X_WljTL7UDsN&T+D;Uyu`r0IARBTlN1xKpW4ET$k5s} zd{%uCzsPL*iUmI5CPo8i_K%SNbSTG|(Ex~#|4c7A%{63%pbrztUAgqpN!*E^qBS)S z0N}z=!`L~Ze-+#Gz8*Rb5z}sh79xY0UevnYsVKY5K(@1mzZ0~EU`1vW`y?YmE zO_a5O4q4fh80+fl1~?AA%PuT5Re%Zg8iAn^Oh<=@3qXOsy`~zN*Z+35S#Yjmlj*N} z!x+L83`X*AdwnfM=0PeDocyP4-Td|QXFv49zeYg`*4G228$e(G`~KmS5dE5(nz+n& z#Ml4ZAaR4>7CoobVd%e8?7Riv(inTO;iamlDNo_)?wrHsp zBM7245o#nMA$gWuI{761iYKJj?x9S- z)RKiBAh6oqlm-WB)PdG4JP9_}Q1IH@MAT^PFx28y#S6Wk&PG?K_@>mS&f6Hh1tu)5 z$NM?U`Z=FCarONGc5CBzy>iopXe;;mq`s@E=_mZLBDzdUxmW)+#@Urg<$bUHedg1Vr_VD3D=zj5A>gpiY z>|=O(eh9c0c}vG_SV;TKV(}6zqSvA$V+)P=gWjR$Ej{mup^Daiu7V4fE?_hlvqMwq zJ5i^}=6pJ>99w$(PrnT(7Ltj$mA-iWD&&7OTA5CprXxGfrtx{)9T(L>QC-rOU;c`# zt1YYyJe$Nf-@`>J+~qUg3!;1ou+>} zY}rsA=+x?}g9(gc1+BLn9sf1ptb?gNN=23bho>8MX_{_sX)mNR-t3fCS{7e6oz%yt zQ)$`WP>x;P%HN#l~Qv7~qcG70VbiWaT(;ZsNGe_#(9SekBeoS86Wsn2X!qdA&p?KA>)wjyvI6+57fNO?UVTWBGuCsac7Ivz>+| zzf5`idHjHd&kTBTDDX$V<)s0hgJt=sw5xOS zZk?6&KjA(`Z7eMf_qAsK1H)A)UjxIKZlPmBBYO;^ zZCwE2jrV;SU*e@3%j+w@Egf^klFyP>f-4aX6Q|a^#@!!vo%!?M2`Slzi2Z)hb~yQh zh+gQB931h*(jlTCtBI_eNN-tUTX8Wc&!Eu-L#yFI#85v3AcUTRihpxAXr#U4i(@ow;bkf_9Ed%Q=A~np#7y_9O z{Vg=qs#VZFxkbQ#JBo)`_EXXf<^Jxeqa(mx!jA}Q17tgokBbEOqjZU7yBdDsmXC$| z(#!7qICGxUHtDLmWj=2C89x8GZXqs40vUT)H{>2brV27d={JqrL0w^>A+wPxR}RO)XmC#_uc%??E|zrb zR&OvKE3`IBR&`Befm_|{yC6>2A#637b!4%p3hesBHv)R`fW-Zhr`Aq3n>e|I zuuZ2qqZj#qu{}r>I%5Lz{VDTw$h`swg6nf0Y)OIBcogRX$oBsr?(2n7Gg#7**{}}8ga932L+mFS}$SC(EsZ1)+jz{iu zi-6%INt5?gr?8WCoX?LlBOp?e^qVVyOmu5;y zUta&7@0Zw1n*Q9#tWs$xl~3^EKHs9){0JU{ceBi00mdkhm-k9hTpB-10m!hbjg^R zg@xP9*SF(!9vK)6W~kVZhb9zmQo22$qg(s#M?AF3u60~eQ^N0V83|f6EEby*<=%0d zLezc>b+~7Hv`pB%X3s~dW%X!Em@U7cdfp^**kh0fQ&{$;oX6X=5t2f$eU>6#pIoy2 zSpV34Nk!EJ^u?60#XIL>PH#bd^ZduI@{4$%YvJz|?q7YFXok2PDB!xL+5P$N4V#vb zL`4C#H@@?4g)|Up?#_Hq^zhJG0;=U6uG8KQ082b4T;u=69om;C<>TM zup><9>?zI=q|xRpElHrO{`h5c%Tm{QhZJaPaem}NNLnZ8smP*stWb&iIrI(Inas*OU@>-8uc5N>l)asuxjZ&SznO#!9Io-1 z@3>4Q?6r4I&Fui*8f`zXgi1pSshP8z=i?XaTt;Y_QDh%E3s&y9+9>=zL$G&dU<&6T zX~!XT<>;cGJxNn1jN>{$CMJv!JFSB? z(qD76aor_~l?&q)L9_M1)#;}J0@CyI6{Dd0pGEi$U!CaCgfpJgM^u3{4ksDDr~vR= zOhiWR#+j;~=lOi*a9_(h%xwYJA5|A{Cm#*ZckQXYoY7Q4)MVaD`_#f`&VAS1*Z|7S zla|{SC}v_DsmU4z)XuLMEUZbrq&~mfRb6ZTG&3i@hrLXa^yVNsfn%aaP3M)0Cn( z4rJps=f@6-r9$Q?K~ul=IMdHLTXsiY=)x=7lDc zD{4AB?mYSBp$k|C9?gE$z({o$F&Q|Iy`>}6OZ!9&>BUV0-607qBTcDa*=itKe()cI zvof~@=fM&m#kQcMHLCIPD08OW51r#_@9P;ZjU=1kQ_3?-Up3$)!4Hd~GhaZiVGV!@ zRSwz%9zLg`>E=0S87igE=FKKUD0WWJ7ZADdqv8v0VX7~=z4FPfEc{}zmY*l#T>-nX zca6zsE~Xd}yq>`}7blC_PCIVz=+zNEqD!hKP>`oSYVd@%lzQ>8CwmTZgM5_ z!o@37X^K3;t-H7MDN{|Ki{&keHhIam>tH*IvN_U*H8pe2z_@k`rW&stv(ebE0_GJ_+^kc6BVgPz1G_Pr97v4p_$1cG%iO0Q^ac$I9H43{+At$XY}JaP+n{r=Hmd}ry-a2gT_)a24a$lD5strjBmzdhf8 zUIGdK2-buX2*H|b73jeqNi(k{AS~a~oGXmO0j<`JJgVnC+8{eI$+8*HWm_Y6Arh76B=1lZl0ii5J>CJB6jCM`l4kKHMkA zV{>eExq;KeaXzc(SJpWafDhcVbK|JEO60=hn``S8Vv%iWQn`tw`dM>Lh+8inPHsJAK^?vh*k;-3lQ`0uPi6J4eq zH76BYd>4cV)P4!NbpfOrF=sHfL);TAhC{rmb=@JJ%^vxV@E{$O?ejco4TklsautUE zag)6qve(?#%4za&XHmJI6HN8QqrsW85u71=0i`}4aGf~bE8obNRNPg6Jbt!V z1zwqYef4JAS3*K(N7DiuzD09vB&e#jZt(HBW>&+eXyN$G;yF1aaQ{SgOOld*0a3^p zPMQGZz#6%a^tLmx?Awj>LY;I{qq^?-S$9w_ms|Fh#ow8#gKwtjk%}CX$+=?-B(%+- zdsgI%lzv$jwpDET?PtkfYr7#H6}&Lx7ZUguuP)MpIK_CvI8fBW0x_E{=fLZs5)2c@ z%I2t3Yx+~^we{c0f{iJULu3@sEN|IqgfnM_X zMp3)hRGqla4FqlZR)lQ64gn(;!~)rDqWBf_vA#bb-+C}J3LaP+aAm3}CCtBENLWqM zJ9<8|J6J!SufZYSCS!T^eeBix3HS9~Uj>dYi`eUD*!+ql`8)RoT+oeTV)~{Lw9^i| zFE#-jgM;9!`O1N~@vnOfj^a&o8yh6+?_ltk3bJjV7Y`Z8cPLhCnA`8Z!r6^rh=XU*(XXjm-8CNE_cde|I$# z?Vt$#5cfp)!PBLMh05Tv^XFxLh6n9|L$59=dU%4J}&%30$#^!A10B>1XR&Rbf6&vB)!vB~vzkh8b z-qTS?9Y*tNCB+Uheatcp_FK=|sU#!aGPA>))% zH&K49Zg6vi8IycGVIL(>IR)VkGl)McB@lj&)p#hYT5?K6n>(-4(7Jc>{k`_SovayR z8CU60d}=)mWcghxnXB}xTp!TH{e#?q``@Zf0-&`VV%yNI`Jq&S(icp>v1FIRYBndw zJwC#ej+!p|5azko_QV{WMM6OR+-aVYWOf%}Wh<80YjCYpC*b}#P#jTYk}}TbzZ4w( z@dx?Z1#Z|*=%O15MG7gie*sa;Df7r1`yA#`eGIm>bvFhrQC_!|;*R z(?(W87e;(cOHBwbNZwp;SR+jOCP7|FV;sPy=g}kK(^Jc=2O6#C8_1i7lmvyAA^=Ug ziMM!pQ?jx=t~D-DZCb(__h(6S*@;?}OtrelYnJH0``}znJc|6Gsw3kRn6*AlLy+nl znl6*KqA~<|Rmr!Uc!vS|y7B(yH>q5{`HG&J=B@io*-5yfAHcTCRN=_Q+my=Hw9rqw zlTog@w)ad~V&2xqnp6xG+d`CX!Kh(r6(zJ-js*u3o`=}#^{|U+YpXBqc#;DH~EqU_m*KLhy z$}5gMI<1flJ%RQ*5pres(aS3RYLAtQ;;-!?5?a9-6@?qO#Nhk7WrMvQo&}W!Pn9NQ z)GkUXxS)MgVRz(;7M9Nls-7nw1wauSM`x1XOWF%YgxkTp#Zv%A`MR&Ilr7mRu_*ic zVGZh#_42KGv_thB|4)*WQY8y$UzL(zOmVaJt|;XmmUY<~{N$B9ZrzwzaA|VVVnOB! zM=%ooMMub{p=M1Wk8e5VDK_{Dp9cMDZWJx-17gWVxMI&t13d9et#8ABAPP9D?-}Us zLCcSe&3d**;Sm%niZdvDN+EbZo_IhZ@b9pYDs!q|Yiu&IekTdTw7l_#@&T(QFuk-= z+j>tSLBWB8AcRXfY_tU$5v2Wn&}FF>kwiXDu?)+s&AkZ^p2&K__IHq}IbvowCfU~| z@xsh1{B2Q})WpOv-X=!i7x*Ekk9!PWJdiY_#-86NXN~92$u4 zEpHMh5!}plvCrCJD`w=|T%$cEY$*t7djy*Z&6q*N41SOs6C(9@nwF9GrTY1EtxNUW zoso+pY}+qjfv(fTM~5Sx!)#&lhQVu^?HqrX4~7wv38QaAzR@%68dYN; z*U+tte)Y77@1JoKeY8=l*x-TiRQ9sM7iJB2b?h+OmFCKrM2E@Sl3$2^oz3(`gxqRG@zysu-XryOcs|?4MYh?Danc^1DVI_ zRa8w!#w|(%-Ll`n?`jWwL|&9!sFZZ_5;AX8%6OO4SiDeK$a?310mJEu#VGaVR;~iG z(8m#(!A`h8UpLnnhfkmeh=v7N8;PizM&sTc<5>1K-ecyK4>^XpA&&eL%5 z#Y>I&r;-~RN0~HuKK|DPrMkLTUN3?{!-JP$X>MB!J>#h1EBv04Y=XfndExM2j_DZ5 z$xttp?jFAQCm@aPDd@C$CRSQV3sj%$WSX zxAtIIMni0W?2CE}_v<^p+fS8b#+3$MQ`Q5TzSHp|_{E3+n_PV2UG z-6+$imYqG|sqE|X6JAH+4lyt=z2SN!HMP?xn= zR^a273xE+|`JHn*-se6fdw#Qomzvdlx5+rg-w}F-Xes11QUQfN(M$MCLI;VySC{%J zyfB{_{F%1{)EL!xMX`muTHj(>U%k+l6>M%P<^2~#Vt8y#8`f=iDzR{ce5A=2LR1@J z@lth+fa&NYu$+!F8=61gt8;~4r!9{qW~pGO$+RYDULGwysfO@NVmfaJRLR!3Qx%22 zYhUfBlv$8%FBVIBU$8e9-SCxIN-nLr(HE+lHEl$?CSzHKcR-dqZAk@o1o7jO%jX)D zqkr8S^8y-V+d|W9+U?3f6#`nzw>^aWEWdwE7z1WYj0m&v2|l8vNKqIrP&2L*oHKu zzN*PaeA!V!27P1{Xwb!L9&A1fgvfJKTjA!OU8V_?b^(WVy5~j46)&MGec!vw z+Syl>WX*N3?Z6-9m%GI3Cm)sxr_@Uurmz@W{dmxhm)D8k`*~hz^BL3v*@NLdYG;W> z^bs1gMo9yo2<7WrDXofl@oVJ-?LB2nTZKbgDfN7P;F(dviPAVh14BR?MAAp|>ApM$ zr7ko+(q;!uTinmK10eR!dCC|dgkpyokC)C45<;KBD9r)|M2iE zY4{6zi%*4eltSpwtI`h6Xcj#%K+l~uyj60UX6E8`WMgft9R%2H`>W>87Wy`ape0<5 zEcYs~VjyEgN0IFcXgE`2?cr3%ij?|W(SpTI^O{Z5G`o)Tr?X8rlkYZJ&N|f8f~@iv zTB(1Zp#JsZ+J}3XD`FMljd=h`D|7s98MJUiom^5{a|50{HC{NE8x9qPwCY6=s zu4Ex%ps7ewAqReAr?9cq@w)5aNTVcxjV=n{l)l>3l|9`Lss+ zvh?mjko>7t-VH}&3K)uCO%b+}zc_Ppyh+kPEYGKPP%=#?njmiUR%UFCfn#5u!i$(p ziR!uRCg`S=>kohByddebiH4>V2Dxlu-P}V%PN{x~D)ln@J)3y;QS3X*2->cV7D znO)w{;${ymX_qr#aki_vVx;ia8^N(UhZJi;4#uLnP{3ZCPHE{~b2>o+Z2LJ#A5F7| zVKF!VaW7v_hu_#pP81L|#@5xRSd%#N)kFOw9E+wDT!pt{wuZ*&0A}P}Z7?Gj_Kv%6 zsXU>;{o%wrcyugRfxli??%$$oKZ}1XcLX@wXwB&{{SPy@_Lq`+556>#9l&iJsTN1`VAsh4)r_j%yu4DIoYJ>nPF}@b`>Pno-EG#W^oip-@+-kk>z&qi z<7f1m?V#lSw$YL24Y=g0D)lE%JVA}}CUCXX`uS^uy56g4#n@pV-K$c@@nY35B#Fei3dJ97o}thzMcy4)F)x?gWst3RbBQlG*2wh z*R*|!)xZ5nCeDwi9k4f}zDPfNRoNI=7yRH>$0Tfr9QTRt+G7U_z;gevq&hk#UvjYz zt*qp@r>K!$H#PD1;r&T@2w_Iv7~yfjn5MmV;P!Ad2Q-ptpvUgKIhW)Jck8A7_N2dt z>_J?CpOg2ZfZPfh=w+1!(faQ}-PmH_N{Q^^I%qXlmYE}Et`2^b!;eH;Ve7sqXU%{82n&bg0h0QX2BIHoWHk(C52zo4u55;@tp>c zdFY3)+}o*zx11J2cD|qoKKpyTWaAZ&nkoXe{`%`{EnzmhDqq?-(j6XL-dr@d5p^mS z;tr(_h2NOCgzPc)fgFLS;bBo1R@|jD-l6otF6~~Q4x9%Kzl7_eHa4KoWB;|3V3vVTg4M~_J;jiK-8WKQ zj1hZ|B2ZNJ-rT*Lk=&TopCnnUC%jTLdx^VYbAFeJ<%?)VXXt`qiJ{flvy3~~E(l-v z2P)gIO)m6_(g>vhY(e*|WcixylkO3xa&8Xu_%}=97S+XM_$+0r2xSHGjV3&bWKX#F zXdm4x?8p*_srkOa-O?IA6Sf2JUGk#=zyWTrCgpVsL~GKdE8ZU-lwQji?c zGyLr;KGMM8q$O}6DOmHtkAL?Z9bT~x@gj?W~<9d5mz-2S_0 zEax#WAdh3G6Dt+rUl{cX-owvZyFXt4go45CktANihm#$b zF2EBEoq@Yq#^4;z^Nv!K?>6HSvX8zSDlamvB|XUn8`2s$;8gC6B{y+rms?ZF$fn+K z_R<#-K>(oX385cp1m3XRBJ>W*xpR2-(80#_gNrvBcV?%QJ8!gBGT2-aFVz#VpJVE&P ze$+cYnowW+a+hfYeC4{lQME&ae*DCEv2(`Fyyyl+V?nXGI|R0)Xq85v?^Ysxx~5&f zl%+1-k9sAvxl#$GQEfw>y~i4&N~rhE(#|d+@8B&0jnh*^I?o=aj~n=%8498CG54N( zyG?ra;$6e+cr-E{8x@X43Ej<)pIJ8;n89L&K~q+wjgb>=2^_SisaFhj%yPWFgry>< zjm~A#p(9QrSl$AfFvf;`3zc3sEmaXArLj*OKFeylcXq2fu(QnY6#*?imwAfxmJ_LE z-!(vGza8v#hNrsl1cLXyq)E#C>>`R4ceU%M+>pDZwVo0oBRGWxt`kd7Y9)HINwF=^N@@LaFMBh{m`%nLm;Dp2uYd@miinx8ZrGeXc0?pM} z$j@bEWI4TKP%w|THpn9yJ3u#2;kVfu4N}T>)JOgW0}7`JH-1&YZAj15!goN7jHanW zP^+~^?FN1Rg3-__o9@owm8-vNHER6dO>_#vHXp!oU(@xo6Hkgff?FuLoT%dmJ}Ae2 zs^fe8N>eDf0iy2flEu#4lpBqP+U1UL9w?1Nhe!{-c@87YmJ1v&bAp;@xPR0fr(_87 zv)R1Akg{6U3}~{7M~WEFpVv!Q`|WZOHeMrmt%u9vsWlXAfL#pK{{IYR!+os51Qu^# zQVLN%64fx)6)Zjw8XlPWvNT!A@wv&!csJy7(cE#b<%GkVg8?hO4~Y)+amE5d=J{`%#R{Y{% z*NfAc8|(WIK;qRY^?BMkJa^3-A*yfs?`gaWu-G21jMl$e#7WZ-`dWd<{Hx;*>A#s) zKENu|WaaEyYr|YThrfqP=!Q-L?P1Pt($?mb3+kG3~$v>}M-I!Y*ZEbDj{3`|g z*mM8BeqOX7)xh?rkIn7>Kt6Qr>i@a_kn1_{U10TfVhy2Mh6#o)MbE2-z0tLo%0vCB z0Ab8N**DuyGWh1`mtFPpQlWI4hyTJgE`JMLH8JLQrkgo~{__M{tamw80J1w74)H5g zH6J!F1~B)Z4VGS&mX>YzB(NO(!c|89-f~dodeA=gK?NEZQ!yA@IOeUX@Xr45&&bkJ zL=}5Z-N9d&Pvocn^gQ%DM+)N2Xo|#S80xGm3Zw7o%lk~UXg-kN{t|eMtW;8pz~u3? zRR2fwy^`ff8dBxv{&vB}dOSYCo}L7WeZIXPA4Sx-Hz57*sQooi(FTS^Y0bWxAj1Q@ zIgwUqIMR@MJU8c?$ogfEKMd3_1HPm+$>?1>16y2AddW{xQHyprtqyBng%0c;Uh&UV z#9G$;p}Ay5(dji8MbM*c+LwBaxr_Et^ zCvK;w?vn#UW+qv5FsU_o;EERmCPE%AkEX-mt@~N2m&e$L`hNdVASd2(?O({ft9Dcx z9KdEmUD9-}i??2VfPyF$yqW`eYwE1}wj8smDkt z6XRmof7IpV>Xxp+G#`(VT*qgSoD5J4`=Qm{T7+klbsCX%e+n04=NQxB%K^(@eU?{m zEZ@=-;*Qc&d*>zM^9EoDWS`jldqIWq?%<*abswh&J>e=?CBO=17&`|qg-LZ{EXSN0 zZBUOBQWugNFGZv$A%f3fQiwKo^~jb46dCh6aR~UHxkpysl2|D26k!K%;S~TRNeq;Uo^E`PIt?mKLbzk^`<3CC;Hj04;kc6PKLDWi^Mg8KV;U z5SuyPb)4s`Di%nfabWe)-=%L%CqmC>hqg{?J-16;;N+bG03ddpn`deQ@!^Z|uX8*x zF6}sx95;J9+ghM(=bo2H#@b(ZvwVs!nRUw-=SElUC(09EZV~kq<#HkHlaBAGIbpDx zpX=bFZFo>%igKJbh9@UNb1Nr8ol^3N-;2>1J-oM=GoWOtcXBGI^VAIczFvIGfHmcV zA^U_MNr2jvZW*CBju*kjq)1z}{9yZyL?E$Dn>gU&B(7PtsJ-!)UC#yVAD1Yjxhdvb zGz_n_M%s3gj+WhR#o{5f}lh^#s$i+2ga_nf~aO?{;meiGL3< zYu69jgZ*dE=voZid*tx7^u$6oIwfzDPDz*w+7dB0H(#!vA$IMq(`=GzEgZtF@t64M zKN(BZb~KEY3zlOrnl>bRQ*gm1bXjou6KA}FR=Wzd<1j#KjB-hy9lYYYH~Trp|3x9h zsn*bmHs5m8WZ6xLio-9z4TMe2M!pJ*JT2Ob{ojo)p5;Z^uePi&A`^Oe{qEllHSG_k7^-EoiiUkgsW_91*X zqXnw$lvrfgXboYThdx~GVQ$}Pg0IVcw;J(Ft74PWq@}@U`mLjzYmySO1rXK#12-Je z32DQMmp=ki7Klrpo}q~|V&ljxuqQ6RToo@@=fwG%F^^3_H54ZxTxO|C(f;nBE+g<> z=_8b)b)a2%s%TtB;d1Zx!&KtR`xb@35^JPd={*HtbG=qF;!&LE+Y&6 z>mQPPQ%|2h?X$anZ6N|z<5|HhC0-2Gal1IfAESS9E5f_g93=RY8(uTs^3{8_p@#9n zfBzUQ(KJzNk$xa)4a#Jxy<*A^bb_u~tg1hrVPnKa$zHxKi zdpzm?MT}p^lG3XCw)JX$W#Bur3~7%ck&RvKkJ$NwSW}C<^*T0&rI3zc(!o z@b)ZeR5Y_}1Yh_^Iu4cSYis{awYySK=h63`@A&cKf(w{CS&)F9$2iY~w35N&KT{2* zkW3p_&yu>Pn-abo2<(Gl>Krw58C!aV31YfOwZ>I^sLNY={94*PVv?Aj<>R(VLY>|( z$Q66OrxaFg6V4E zoE9S6b(hqmjN|M-*KdbLtIWU`dap!XHOsQq>9q!o;1bD+e>~MJ?TZP zWa^`NGY;Cs_YM1A`KB6>H6j(UqiLzHW8tv4cVuDlUwn@pdr3Q|?%f&yW(FVKX>!Y5 z{YGrN1kUgGuZdk8{?{hTo`&8Q=>y)-en`Ic!A298EJ)*t+1|V@h!Kf5oDDnh3*=}b z!5jbjlDWb6eOq6&?0L2_y};G>_m?hQ&p*?*=KfDLHgn>gx7Y;Q@bK_&XLpGBLqeWO z*?fBgQN5k$ zVgHcDMBDrCUJtx6d-kL?v&X3AAHR4f9eWzg4893FRpztsEo{m2lfEo3yF~9s5UuhBB{|u2C z;HKa0IE?g4_H4XwuQnS|wAHJi_3)v7h6DZU+ly2x!K+r9#*5x?u49q8g0qBQ2L5+! zs8}c`l4jwTvlFp0RZ)!|MFv1x90Xe#8R$`$W0XJ06qm{mZU${`@6C^scEdv}@vGFZ z*6i%o{}JmDhrBQT=I)yu&@_W&jEzx%g9(pmGa+&Tk!x*#ko^QNefU4nK7Cj7=J^-L=ZF6PdVg_9M1J=b`j^>9X;2>echNo*=jWzJ zc>kmRse8q%-}Ym9V7Qn+ESKfK=llPTHiOSDuGD$qEhcaAuC^x@0&>=|0|hGMG5gV{ z!T)3P?K5i(l33EEVZzhUNp-yUsi)mdCt|tKBVo z#Q(r%q@pkF+iLDDj7II%>*T%XXr zdGn^*B&rs2-XPzxGY(!_synQl&3pZKZ8mevbHkX~gsBzmGhRx(B@g!PtY+)O{?qW3 zmwY+`Kb!po2BjPDnM%U0R~_OK#0dW)^Z*d{R*G6>U|Jawm~@;X){tAE z+Ur`t6JCOeH&P^5$_Ppbz<=bJbayuc{ zBlb45Ak^jJ@hYd7aO$!E)BAv1BJEL|UBZ_+INTh#{(<1ZnUA3k=1IpW{DOD1S3(j% z0oNkhc{8GG!^626@r#>lEUePfV~tz8sPe$VPRqsKMZLuF8em2DQ`A`9ksZt6;6{sb ztGrlFnU&(4@I<~WSs&H*_Vy5BEw(0b-JbSGsKH=E1RZg>hBQ8`NrX*xP6B+j%ro^U8;8m&gaMmCwyRi{(+V zub=RZ$uB4Xua4eF)7F}V#Kh_&F8zzoq21+CAL5vUppFnywzLZQ?%jQry`V?1lP&cX zlm1=8Edx^!8uLJ4ctsB#IWoVn{3V{HMw-^z>q{%UxyNBGHr}kKvS;NaMqXixp;=9E z8d~R=*g$epTCHvQ1Px+U~-^y-va0$X`fjlm#_Vl;P7Z=O;4Q@@dDs`cbaUii+> zlUWh;UFy781pJI>!-ERjdJ8M74=2uX6lN=hmZDHLySuyO_>qdcdVVG*$unUvcz=t5 zgj|DjoCz~1^Ag+BiZ-D`VUgtHtek^x4pX16+esA1&H5WzN9Y@mYlh1GgR2b+Q?*W_ zIN6f=EbEv1pYTR@F7$P!Q1^;MmPthNaK_gckq*l_%VLDu2qXr?u!o^(WZ~ei_hOrO zHHnk;cx%syig1@$h$(oJnIP*A#1rzO*q0-lZ|z$-NoET$$p8a0M1~>-`&Rk`d$5i( zD(t||HGDdl-abm>-S5`bHYV@F8H^KpO@Y8-K99+#d)OSPFRu7Kvp!e{lVWKPbl|h@ zq7ZS#Of59Vj?Fg$ZcUl)EHy$Euwk*O_6F$%<>EBdi!!{|0<+@pG)ujh>yeY#9|M_E z`Z)c+l-=Jf^l~A?<6g!B^)Z6~ctFn=u%vvW*hW~IZg&e7#YQIVY+MYP@5y3OL88~_ z=3cZF{h61)w!r)EWdU2&RtFbYcIy!Y1+4bn8Y%md?jqk!M3&Jy$7Rd~TA7Xjv!q#E zNot)x7Bzc*GN}7(JQAnXXR`9rvNq6R@Wdu`=sh35-guRKx43V3PQVqtXdR2EmO6lS z46Z4l^chyMy)kT*jRWn`Ie1dXrV7`xI<)bHO+`DgS8nk-?UoK^%q6E{dipV!O1NDV z8&=u+O-$_T@hj5myi}VmY8-Z=Q6jnjyGekJu$hf67cavwv?J+At&0A?8p1&%;M}m& zz>{9e&6{m2criPdD3n1aE?JZSfuFG3k~r6};KIS7V&ySjW6X@XBqhT?RyTKRWMwh9 zy?grnA2!@K^ne@##~66N0Um|oJK`}!4bi>(Wj?(}l_bNR-i7C&Z7!m8$I)iIGmLx@ z8FX%9@rew$-`Ys&ho}_V2w>$_w|rcz>ax1IN7iE)`2uwu8HRM40gUy2 zYd3we${nYr6;ID?*TeV|pM$omt*`^XnCY5iN^88N&*z|85O&1LT1_e-U}byTpG(eH z9qSN*7Hyonz&z6_@2`z)-i+p3mTBFMRaFgZ{?=V3>0+Np)klhaa44xsPOI$+!`@#z z+;er|>d^@1I)Lb|%bDy)of|mXdw#Q{Y9I1u>2Yspskf-#WcJgw{t5h5$9^_MdOvHwiltK_4whBCa)dtoptL60@fY*?_VBCkIAYjWp6g2B8*RHv9pSCT zEIKANYga|k?g-Acy5=}Y-B9Z<9>I_?MHg5c$J3F%T1A^cE3L^47DhsT`5CvC#c8Nq zNNGt9Oi6@h=ZveruBFdKbOMDszU^F3v2DT*(Q>=%h&hz->+_>UIG3KfB$DF&dPkb# zZzg|&&&8lyieE5F_e!oX{9;Puxh#u+NtoLNSg{AP4zEE{R;EaIC*`OnG%bG}c5KjN z$Jqq1AaX}@9nmwA=5%tX%q1}PYU*PLn8gTn7cxg*=JT{lEUd60Arhc2>rG`f8YY|f zcxBf+I5P3Iru<`UWt6U_*u6cb5bw+A*fiS6B^H8hYE49O*j|zG8SUNKo-T7twSsvS zC@Y+Yw*uvY@(UeY#aC2>%yNAQaDK6Kh2Mxh%~YGO@Am18z{_Qw-$-Lb*hZ%T4B z=TO_tTw5i;{o%|bDnijLStPNE2K|B4*(MvKrsqkfKlOV5s%>mG+AqrJE@n&c#r%-e zkjgrhy@Fqt@gqI3(*nN>&0ScIiUo%3e)>!4v3}^>&DRF4Ed!;pMiR^4=tnP`1n04E z(Hbg^a8EBx$UpfU)zd#QFreuTL4dUWt|l(_4s+edDTX7HswYKW?B|CgNCDYA4$yR0!#IQmr&jq_}04M26n2^}-s|PiB^j1+0Tpk&IuwH0Kp&pm^WTWNX!QVWlE5 zF?sc>px5k`y>)2r+z06M@c5FeItsOZ^@S7*P|DLGg`(S}_=LXJsE{FP483T1L3=M| zh*wt%C*ikEiCcM6zPPhC10ZF;r?^yFy7ku{Wv<$DLOah2d{wNF=XQBO%vTf4RH?hA zozLr+FJEpCTRJRf1x^jLMV>sc!YOHKph*;Xyk+u(_AEwkGL*RTbk3{(*rKV4+h)Dj z-J!Y<3U?3Bdh_$CgKSgJ&k-pZjj88}n$K-cvZDO|YB)C9&0>BBEBRnQT*7j;!Uq64 z)a0e2LC9qd`NqH_y=v+_%HfM`4GNO#agTQ%aw+y3XDKPgPNznfSgY$`3f_I zpH(F?>GKc4&AHV?wc6@Bbg|>?XV~m?E=x?|&DOf>O)b2V>g&tISaAi@aw5)f|LHsV zk$r18-iy2c{ZTPcufyQ>)bYTEOtpU$c5zd+D$aGMA0aPX6Klgv`P}(-XMK$5-VU&B z-7!+*QVKD1hyWm%mN+ylCCzf_hXk&Ssji3Xh}U{I-`r@x--%2KOL8>9WsO&vWAt_G z{ifINN^i!Lql%+$HBpS7QQDlT*Le@&=c&&~(poF)`(eeGlFHi$nd`kQxe@nc(V?3R z8m?^$(H=K(H)w5A&?~Fcjb8CAJX2acDop#$U0Pen<*+K_NCa2zsPLrkr%eARPo7YB z`yxbj##E&4{K|D-zw3VOMj~Le?%WxA zDEfS&Bo$*y9RVhONv>Zy7td3E{#gAd_`)9eMf#tMVZ)>Sg&>R6n0}Ql!cE;;G4B4y z%D)5oZw`|{$gu0wCfTw7JXK+MEvrKy%ullDWx9-`b(wTuEi>S&G_713H3oCkH?|Qw zt*l;?lST>E&p&cg4;HdUtEo{$CKL$1QOr^Lz;7np>q)sgt8V6HS(zl5G7=;9lBl-dI3xq z5qGK7h>U_9KhVO^?KNIw9`;#86~rlpT~E_Ymf>&jx8Tcr9CDFG%I!;!4B_sfA1yE{ zanXN;yR-M*%?`m77su7oqgb@$9401Hi61fn{16=~FLn0~`%qKaBSshT*TgCKbKen| zf$!PL5YTl?>KSj+;Pw(tfANfFZ-CZRMz{H9vP^;1XPy@L=A3j~LojLSU2R4)X+m*j zDT15doVM#V)Fe#L!vAKMEK1gSbLTF$gfOroGpIfU)(1kBgYjKc;iu$KKa%@Sql8Ss=aatd8&7L*OH$DWcE64l{oXUr-!|CXc;p>y!fQ@-m0K3 z0AgY7RPQc#oWq;yZ9Id2+8M|f7x!EB?UOJXi(-=4aIO=Yz>FWk62R6MF=v;h2n25MbPIUcPW$!qUh5xjg5^JA@MS zW}3DGf9$5SRRVVB_qy_>Tm|fY_>~zUw1t%rD*7yRXsSzM6iF~s8Q3o)LM90r`P=iS z*=bXs$;$;p2^8|;_`xcw)T(VIaicCj|BmwrLW7{V_w2CGd~oc#K!Cah__HFwh0$Ce zBhf+IpUA7&5)vnDHc*^foW6;sB&xUB9uh5m7+%9wyy{r<0KUDtu19rpPd&u~5#LY! zY|WEUOJe8RFR~$FEaYES;otiztKMkq6q#CGx(P@PO-)VV`_gO;34jcc06)J9pb=CI z=aA2L#^TRZ$nsgy`KY9A=xf%j`nG=(Zb)tI)#!tDZp1-C;<<3oiah$H{birNffBX7 zYQbw0F(T8latr)%D}G%MiNR~{fO5pLNtT=4}zF+KFg!W|^Kd4p~+x zrAb&@v1{d0xks`mnc06qu@2(B$c(7K)UtSy-T=VSQuEa5i?FQ1%8}i*jAMY8x=rpt zs$ORiRyq9ru;&!^a=*no8QJt(JbU~ z+JJ7^kl*fw&dlok;q{)8{q-H^XD18UCba2 z!|$eOKG3|#E4Lsh#sxx@28e$-N!071Z$Lv_9f-D@l+)3X$^e7OCI^t2$r?+MLN|qo zw96590n2hYb#oFW2%+Nvj04%>J@x@_r-NY0P=g#HrX7i?l1i8c&fu?WH|@F|J*PoF z4H+y6goUAjDrYkbzqyzbWY+fU>@Iph6oN{UCYnh_T;(ES_BY^ob=KdQ*tYdpu03td zsQx%fC|dG+6D`G)`lUbY_UIV^tb<8`!U!K4RZYOiK_9_RPi;{zTb4W;Q| zJYke|RfD1F;<-qKB|}XUbYSC8CVIp))7v>w=hGo!CKh4 z{>u#j#0vXO_W@3Sc`C!j{Ve%AHWylF2s^uR`lQMVwo*4|O+tq22R*CY`}W$1YeiWL zHGdX2-0=2G>(4w^)qwh~&kbh;!-U(~;~;mdHcsjmUCfj9Y9#Iw%-kY*md zy7>?F=>O93`MBidqPfmE0a;nITmOEwv&wJN_7D`5@m>3(K-qCRmc zR{;2BTp5Ib{#R7)OP2pavd`9_mSSx*fIVvcyNF>ARK1?7aK$MEls@^l1x>So$=zo; z`{duIyOygQ!9l*2^M@TphbmZTF{QQ20a*4ZvVCbYlExzMl8xNmWu@L~9Q{9Sjy;w! zP6$DG>&HwKspEqWWUb&F_2nTJezg^sDeN7Tr(i=d2uK7B;{Jb)xYNv$1D-BAB{A;P%D*~V=} z>dSx6ADp5DcO9IFKe#upu;G9BiYp^=UQ>zek;({H`-A8mIg*P@=tZ89;p_KA2u%&D9NO)3g%4b_k^H7 zbLz+|76HRTZ5Rv*NGfov$N-W2#^xrCb&TH8=E8_8ptJ|5w-sBYHsoPlMF-ZC0Q{E8 z{8DxSm0}ojF?IyzoU!+xwTxFHFM|&DY2=qbasMtUy$WJrV#0t}u7axO}FaY|2(2C{g>nyQ7e9!T)Pf-8-m57A9{|wn92}*yM zq;FaN51r36HN_60I%5b^@x6{MtY82@cK+pu{?lNl6+j3eeL(9H?X9_6eu#0V_e}Xu z)yCsTW?oWnMN}~crJcQcP&gpmSSIW!_TNU0;Mso`XdZl77q8L!cZ46i@Xx6u|Gzp` z|A)8x3n6#Q32W5w+`2iegSVi-({my#O*UD1#-X>B|*@1MzvTx=wLj( z*&y<@Y_8RiI17nT2bt-W;r~%qG`h97BCtzcNj5L;ei#&J6mjulX-U4|c^5QPKp<15 zxSUSw0y^yA*=`Hl+}=LGuKwNON#94ST^IH!!0ebQ>jL~+&bwxibALcy6r_39sIqPp z$p9;zx$2x7sZUB%a3_M+Gw8)-UA|6F^V+w-sQ0&9Qj8$I?1Potioeu;=nGyFOa)+XXV6*XS9O=1_I~psV~ZEUf4F@te^0``;6-$V7Yg+>dt~Mcp+>G_ZJhog@_y8@phz1MWCGW!m z_^3?JsJNj50#Il1mc8Vzt`NKH?-p6R`kI>adh!xv*{6D*2530uy(ATM$UB1a=57ZD zW!+)9G<%l{dRtY~s+Ne^X%^=LCwr77Nb&SeeNYN#QMT~5xB6?nBqdmRu>)?qo8)aF zzPQihPw3`B2xJF$^5L_rlp#Ye8YmsI9`M1E{U_jk3tOQ-UW+AS5_VO}V=H$cwjuNQ zY5Gwn>6I~3ttD)Q?w1#K{CW{u(jOD4pIktfc=vpMM|7PV0_GVkf|nP1@%L)L%d9Hj z&>KP3PX%npi~p`^`oJplh$8nXZ9Mji|A@fVuoZP*1Dn_gXW#E*`&T*942CiMl8`bN z_drVDYFF9MqZm6hk_%b8a*nyw z_>?NF4@po#DEX9ux$OZYx_5P?=z=npH>?kOUeW#3W~XKRmm1c2$6#)UkZTd_g*7!J z^8r3+2}Sc7W6Ebp%@n3Qy0opTOqf(ysZrGF4W?kry_8?Os|Se+G(S5{GAO<_Ci{J4 zA}BDR8Zt9rRBO8Ttpw7njj_^27bZ)*z+0#I$e!D`qpn_p*gr7`MS7u1#K;~hiLrMY z`flg@2ILuX#QH~2V|GU{B_SUuOoV+<;tH#z@mWbt^s<3j(n7>cV9ia^RSZ3cMS)@~ zouT>I1sf?t@?7B8lxj1@{^x` z(MgH#jjFZ1@|i8f#G?p&^XPhN?QVBiSb|QwLVgU0`gSOhp-LKw_ zibjJ6I zq$PnFVTCT(wbs&5Gl1H4Rf|#xoop^B6w5HC=OZZM5o(u}W3PQ$ zg^^hm+M=+s;9@}XQ+KsyuH8F;Rma=ayTu#Iyskjrp+-#Hjm}IU_Wazf&fZgxyhsS* z#5Q859@`Wae;j)Fm}959tjq6Y3KG@sjGUX%8dEwI2f2>#O5UenWmQBP2ZmnALr4`o z9j4&b&g6MyVJ9jC{>`q%wCLV(LUQDS6oty!xcEgFEI~AQJX(E!-o2sH8!&DAd3EVw z16pN@`foBdtA%wmcb_neujIX7B1A`t>KWC!{21TA$g-g|L&fAe`d^`+Jl%^KnGOgV#27gUZJe4J>ytl*I~Z(XR@Gg?hr6b^eL%2+U=zmr^YTfi>4 zY#;~U@m%H3;M-&6iPn0>_wR65iVm{Q73!fs@c6rpTJ0`$J#<`twX?e@H7z1f#XiI{ zywuDz+ju__04=rEI;gy|`niZQMxDj;Sg)0cRh4H_K-#08C5|?4hL*@62kq`B3na9A z`;&20Nsh~Ixu1qL%}jJA+)yAODA-$V0M!i9JFOCOkU{@}EwylfO7P%O2|h)y`z*Kz zd;iVF6gzXy`Kyv|h^*|eo6Yl5CH;~qYW>k|u}l3;-F{+!rn;5}gGsA2u!4D*b>6uR zAUS?UhXVpR+@K8h+%FLAbt++f%a;$WQ+l$Cf2qLmwYELUTykG1eC5ULHToqvbE~NE z=guQtT^I_h$#hbg1S8q}IEIe}5blpzV zA%fn|b5r!6eXSLK{5Ij?=J*}Y<*lCX`&T|+7#MGCXsyLrgwRp=p{mWJ)zva5)XpvC zrv-Ok4Czpsa&FM*`W{40+s*;6$gXs^sfVwb{xaxYoP{IibsEZqV@a#gUn1 zB(lH1y4s;JuK><%$WCH|T}mUT{(kTj8or1jznf4}$hd+zzItssY? znS#m$+IGh&xxAMsy+%a9$ISahf$~P@I5O*8U1#rh-Lw$*n7E?B+}AI4@L0yN{UbQc zh6i-LO?X??LN+rvvw62SsC!6`5gfCQdGOwLcm7V z#@yK}&Vj+i%l}k{h@A!$P;+N$7aV_lB`{i_+Q!W98LeQe2kDxxpr*P+R|+I8pT0qg z9E@yEYeshH&$rj@r!os29SDe(9Ux07m{+SRPq2U>yIHHdPnA_!UKC~eNq%J%B-m%7zP|uik za)IgIj|=0TooB`=2tmmp&$AbGLNoyjFq!Cu;o98lrFMIQ_0rCb>kbY@cy1xXbf}M6 zWIvC8;=)POw>)p5y(oeTTI_6sa)WuqwOV#7l}@84(-6hUd)^b|dT4@3sTb+Rim&2h z<`QA_atQBYEGsYg4HWqYSJ-B6qVZn4b}gIrt%b0PBc`e-@-4ky$VH7y!*O1|Ga*Ws zgnd5(Ra%iUd4nzU-$7j>1JlDGH6`YZJm{s-CW#MqyL_}o)0cw}jkNsCtukb8|5)#z zyokRps!uOuvIxCzp<>|*GZ&qaz4K$I=@R5!y{^>(LY;oUYt!DNK=6Fq5^|_=6uKiO z8q+SLhF}dv!<`M5+!}&w_nmRsgE}uPbsG+hoxNSm$5bwp-@q^rjc^C9HbwW8J0h+Z zKh$C8HgM?tp5@9rZ`pzji0D|2Rlh0Or19Z1^#)quEQ;-l8D#cNqx${# zk6bM6GZQ*Ya*`BTrO6gstoa}*-A$(=-6`eya(>0e*1+i&tp(>0@YZA}5g)!Vzu50! zB_8#;;ZGTIs)I&@Y-mwD1t%zqmArs7UhC${dbSQ3m<910N2i-rI2W zQD>R!T7jArN?(x<$cw#Z_&I9U(1C)eMI6vGkFH2O@(Do~V_&G>F}28I?~KFCtLXBz zJYmf!mW46aMITUT38@mHb#^aEDkKgTDps?Eu2ls*aP0p6Fb{Eqaer-u?vI7xZ8*TtK zuFV@Hjm2IfakxdTA{3tnecn^s44@|{M*zb}yV7haDyWE>JywXzoOi6=d7^62!e@%; zVmph=sP@NDRWACl!xU7n5;;)YI@Hl(tt39{cMNy)@*mY6i0Tn*7K>i|Oa=mj`Sh4w zoPS2MYdcCDlLYF)5(^^K^JmQO0kY@A~ih6oiyUQ2d3`8|Q{w|Pn^9Q`U`lH9P zi(TZEl@_{c&(c-5%)#sa4KDF1#q6labY8~^YtfGac{8M|;v%ZLq>EWvB`WTliOQl| zKX;ib95bMS&PH!wdmpd7Wg(1jzK(Ct#z~D2HJkHze>@c*l~w%tW#$=`8lO6$`i?SY z6&ZhHoW6>GP}O<^8bjBJ<^+vQ^BghG`cT5XqJois?3}80J&mgCX`lmMD~3!kHyu6yezd{IxGvU8K zk0-Ge)<~5XhxfTZ`JmI=4A?=;yhxo*>_ox6(dVPi;p`@s)gP+umLdui`a6_^ide6N z&BWiEGl)^aOTEl|yDM!#4G8+0EbAUD9?+0EsckV_n~`~E#Zp|Yv9_t7V=%_@E`Loy ztHNY+I|Ousf^Nrxi^!clK$p ze4+109T1r{YI{CkH)vuX+GOJ2ZA+RGZ$fwnZ%w{_&tk1PnSW7UeB&nronoD>Kx8Jo znH1tDS)23yQ)Y8r40X?1zP3m2Qfc)n)HYwPr_%&tY-ZR!Fl*y7vT0%NM*)eiBZ^yp zRG!nxA@JfysC;;%sJB>NSGs=Gpk-gi&xW~nnW3AG(J3&eOxLDy|LPL@QM!R`hv1>S zqnxA;J^9BnxfJ1S{76vG5GtGl=oa|T%`NonJeDhiP0}|#+DCvCv-&)#*8v_a(tV>Q z0>xu zyWauN7x%+8jnuD9Dd!|$(JsP|L<@H9i&uSb8!4tzsMXoo8^NnrBk`C~rf?jzEa9;&s~=OaC(CtHD?}nK z_T<|#3dT{nQs1&{y!|htHpJM5F{ZT<8-j>1JoP+(AVuCbGG#mM1o=rUoN#I!+T(G@QgW2fD7zVsHWRKk~~C3l=^R_o*zGC_PFNn??el7 zBDf)oxy43wnCO!y$bZS4^wI7$;~exo_J^RJ!>#nI?)dQ8`yTxl!nGd5{|6AY|Av0S z7+FRi9xe>UfnbM=?n~S2Gh_pRNuFm=U?@Ws4$z zGH6i3kV;-jI)Kf`+nL9j0Ge4}CMON=2niW_c~y*77C1N^IWjrYpP^jojFou$^l2Uf zL4Gd(p9^Fv02hcYIb3@Y>jVC{%D1COUisT&ijx7>D`_qcG|E&v<@0L*x!rdffj${b zIe2uOh%3AtbO$YPKr_u>z*TeuLNvz#X5G72KsSbb) zg$6hW?crh3N%!FukCwE(ab9_}h#By&4a!bRFHwF-BKI_~#>i8Yaq#KJZ8DY^_Nw{)Ri(bB?{SocBxo#>MwZ21Ir433 zG0~NNBvhXiMSW%#FcP6?xK#HzM#v`T9D!IEoP*QTBJTW>RD3mmH8*I`|J-u5m|)}E zqxj)cM|MT`?`LE_A3*8@{et$k-PvW8IP{J@2zQC|>NkK2V}O?tMGY~`R8VK*CrRI_ z#nT2isN3`Qr^=JaDZl_U%<62tEg@l8TE4LyswwMN{*sZEwLqj(btRO0&cW8H@TFDT z?QzJO#reaH6j~L!x+;aEqGboA&>4es4)kVaqY*%nxFBcH;4WtqE&Gc8rj{$$X+ogu z2(ax#O3B;`Y@kPBzO|jxzbCBIaz1hl$>oaHx4EOUc|I0g*w$Db?ulDb7%HS3 z3ijkZD9Cxj?}?y&o^zPrQg_%6WULo6yo`RTX-Q5ynY%K4}+Px|X zK2hxe)y~0$3xG;8xAGuGVU$o)(X6X!rqaszjjs@x6dPZVdvDje_0Mm(j%H zr`=wG;07|3LBu8Uib3HJkGL7XMOE$;B|P@~U@72Has&^ja)q0P3@G=oqiYwEtU$9U z;D;Wu_r@hD-Iv3`mRTJ=cVpkuZ?buLW~SM9=#&TB7JT{gCAR>Gewu zCe(tX`7z+OJNqRk?-)yK==4NB=@!1N-Sqb}Pet>RP;SwN-VOK59C-XpY2FNS28R1; zK@`HcbnnEh=0QE|MqiJJL*CbL>*|(P#pL#UtC>#Gf7s-xK`_vZmMVF0@?;dl63>wp zQ#@vywp;Xn_`l5=jh1j zSDJ@BpgK_z*iDmk*3tPiFgQp`J=jxLq*?FD*8z*sMaHF%T#-B3z@9JMDlsuJ{?wIf z(tw)q5+E#J?aXuU*A{6)z@9zDWAtbOl^9{))?+bR+qs>Z1aMsorW0MaY(|bk&A0Qo z9S-YL>~JsxLk=|6@1We7zYmzV8cGQG&dHN^Zu{zXba|O%{Mv)%w#4xVaJF9Zk)Tpl ze>sORM-b3#9YgxU8L)9+W1!DV@k&B#$G9+M(Roj+zS&Gf>N8RjvQ>h^D1a@rcT|eb zE)v6w2i0VTJ*|2U)o6%~2)*aw0DJ^>@}%vn0^>YM)QRE^TONK$9oMkI7e1R4PaD>Y zoWppb1Hk`DBbyHmkl?5RyiNN3BWG zw;clVBRMwo8~yKlB4kI?pIfF!i=Zx;UL4BguhOZ3-$DG zIJn?o{P3@H=WJP1Ykx}dIl}FZ6IKO8ZPcb4$b6C?Vi2Cj-7r@zRPg)`SW)@hJdeMW zvU#r_-@<;6QF>PC6o$&cgF9^-j*xrgxG zJ>uT6=9cr!_?LRIDJcnbr0nM?XSRLweG+#@g=e(NLbv30=R%i6g~+OoIOQr=#9pu} z&Wu`FvcmIWzo4=xYU|5gOrc-0tnazk%9hg(^ntk3y_CC~Flf}uu&p}ee3xBEJx3uW z#eX%f4X`LjxHpL-bgUAK?Vq>+ePJDy_v?zs-n;?$kXTp9+T=r^7DFXc-`u=#U_ckp z9X4i_aQf=Z4~#q4N94<|NzkK5-0oL6qEy*sl9mF4R%=_WKH8;14K?C->}RFzFk$IZ z3rV{Xx%L;RL|Z5ba$k81JsSt_8xFWCDDaufBqsNTt+xCvm(S_kHpP{vJnuT(Mf?C8 z3xgBh$$WnJ)b5^R-#s4HI};A<#Y+kw#@m0g=RFg>A?{|MZuY7(%Mz~7*%K{u`NMKr z!h>us$os70P3?9o;U&&iXHWUO*Ut?6IRa8XcS{QqhT3?2tXqOGZK08P{ozBkx?OWc z`emH!sd=`xe*46iML$J?C(v7b@XbEIz;1mmax`0IRmkOgBpH)JPO14oH#QTxZ>M8tRoaWRgo31LN)z}Q}&Ue(52E1vRTjphaLj+)k}OoOUHnVxQm{mhtK zrkcB@ByXIRmEtv9TD7a*`@mY_uyk>E!etViFz&9PQjBqdj^?~&-XiA%!cEfH(- z0V)P3(+>ms&8~z-yIzq4Z+SxD;gQ4voTPuv{ui-9vJqjG`St=wrn{BW(sxa!XOXbW zipI@X-s0zHFYlTx$3$j+R?F}rbpKS{Yq`V|aorZ%k=Jv1`M$35kJi>ZOHJW{qL-$fGvzS>y_qU|# zifi57JSX302(oo)rUF}E#K_3lbv;kz+IAXeni(Ftmc*VrB9O!H2u$dtwhI2i4Y4oH zSyj*7t$IKCyPeQ%2*pGR7=cMv$HB)od-_*wZ=f%K_VG3z@+$P3jdbkvy_MZWj=hd^ zmPZeUf7&ljeic5q_+(vZzoekjKdWEj@=R>Om)MJy@7y~9+qC}UO@?5gX4a7QJgq{4 zG;&(D?Md5eU_B0)6OFYy^NA3k?hlVPb2aewt>Iom5Q_Bz1MA*1v+P!C5`9HUMHjNY0Gee(+7C0pRyjkS zrgnN+8hG9%H`=| zw!fs6JLnHvz5UbcA3Pc#J6X_Y?^~q)a8r0Vxm`GC0PgD#H2r3bQKt>)v$G++YHIR$ zjF)*xYJ+)M8JTK--|AwJOSur(VIa;F_+hwf z?0KL8G>2Ksj~}@}Uer@&SY(<95UUW2>7nN6@OSUDODr1_Xhf5sL%!I9^FBa66Pd{w zwAYVp7idF}b}l%v@w?sKDG$HAb`j3N7H>Lp|7J1tmZ_Tm1<^D3>r>0IE=_Fl#AsPP zmGieOj?W4ml?7C~J!X#*r`oB@k8cb&#asSRmz z)O1I)v~JsM6`cWL-~ZXk*`)Rgv_!0ExY~QQHZ|vD`)fTHFJP};fK2!-%VjzMajFQ+ zSGQUGp3BNBkHV?Xiz77C0=-prdabCTfu92pg!nt0wA!Dc`C^wLd5V-FE8C7t#Htb9 zWZvL0AT)(0#Cz$vju9ppPa4I5YNhe%Dkp zgUL&}TT1*M3&F8oci$%v&-BqJIBU8%z`H;Kajv(+!>Yp0Rn@aax%xl*$rW>_EJQ?i zd`@)Ybz)>`$-H%rl4Hc_tbiPT)fQD~`*!hP5zJg+$yCRJf@D``y}G}yI*tZ)l?|9G zy_4n566Z}2y!(FW>y)^b(2sK^SR?YMJHFv!)3+EHS1Fn@WuuJ59@eWNgOI0tO#})FG zaNjU0iISGEbD~TXJn;DEz|pzLPMp!aPkYjH39bHC?%p2H(a**s?#tuFv{R+0G4c@Y zZJoGMQy0w%3(wv}+&16su^LWRK)Y@C9R6DJawn+zsNFA`R^uqL!Pv+G64Nm3fF)ae zsLu@bn*N)CA@eGq=Iz_Jb1jLD!wp*f4G63x-4Us71_l?P5*n7{DR>o~63}CU=NLiR zH1swOXMAu)dLj?zvBaU3NFf(OvJ)`mOZ`b3vB+29kh;pE)8lC zV!y0WbVy*%wS3eF)w6O@HLxx}JxAcu61B=voxs;fS^)3G;Kx_}5}50r^$TI5-Dz1a zS{If#hw6KWWzE8<&z>(Qy37Cc?iKBMlYJH;c~!XW6fi88z4j8TO8*;sNh8ys;6}2P zj%dC%x&Ji;DOq*&$OEA6j%PE-iLHbaR#;Y&ouRn5SO)Jh56-#OMl7Tfb4rNEr+mpK|q|nO$0ISc`kvsoi^sQQDOhES?QhR$+)u~7M66+bf&-Wb1QYmR{!v2 z$%66wCG<~tg^L8&S{_eB*9zh{xs_L0Z2Z-DX4KsoeK%}Taw{)g%M+weu>_<@@^A^p ztc$&B5FHFu<+pO|zN&pRv+7sA)q0q3eWKFoW8Qs-Qt5%cS|jl3@6Ko4;A$s2&wim& z4M(dR^__d|b3$c6&v9Z$CdDcWtgAP1z;?ih+tZKU{9WO0l9P|;TNx*^#)s(3()YMV2iDNZeU*!= zofqWTWWDS6e@4L70BZZKNkGYwXJf$mp1XLl>X*P?j@TiH zbQd6fl4zdSNqvWM3xF&^)lE`hCEZQn3J=(~lTWe;19c6+qHO_2ydU_)FEsVR`3Ju! zmQvv-PoFNZsH-|hw=y|+c<>AVeRb{Czs%mFepZn`YhGZ;vz1Cd zne#R&1SLBEqNqGBpxiD#(n`r;iG%t&yVRwX5%0Hof2CY_4bPxxxfZdTLJ=JODZfAn z3ja-^O$t0g(JF&FWDJSUpQB(@&UpZ6?7UCm|r8JD!-yh{& z&2t{8%3-u~W{Pko06S5x)VRDQQK1f0TRF>X#U!tERk=VNr*Sr7A&MlgZe_e6Vs5*f`ch@|+T4knt%EmI^j@Pcnskz)aVN=o}jjKhwf;o2= zn>An6bszi++W-WO}yPc9%1T6Y)Z z#nIS2^{76otrG0cNXJ0CO;L);en~5$qaMZ=Zk!smO zhoS4}a|D02q{fiQ5-nlZ(?h$CK*q3*6-kN|Fvi;K6|Q-Q_z6p4#_`0JQ7Zi_hvWyO zs?C>8vS)v$g5P=urJ(3~-jJLGsu9lys0oxIiSe}qIbt({*~tEN;lfkNZWXcMB^PXg z70z*-$;h&oo|71H#vJG-6~0Wvh3YEdU-RwPR5Fz$!K>nSZkdYm?tHA1Su*Jk^{lVe z9OQh5MpxrRk68!AF2$qz6Fxbfg;~gH7A!YuMHHqI3hVZ9ZoM|K4>k}_wZPzka@!*) z1`3m)O5{Yx{o@r1XovCioUGIHOU0q=JgYd2+p4pyFP!UvfJ$=Ay<4WEmebOGi`r(` z06ky`J0+U7jOQ@-7eDNSf^q%mOlnw_SDVSge9zx%h!<7Kq~L)WLc5;kxTsiski zLP=i_DrM6u=X!~?8#U4622Q={+00qBvcw#AMUL47@$eh9Y^N!FPJ#Ia;lkJ=wG|vV zNw^2B0*5C~?Iw>nh!FW}lNB4wErTN4HgV;I=xFYhJ0h~FT|`7vR^gL6F>@6(*?X82 zUzMMTkfkihbc;sl(Xbu4p`EZ<>qS9VT!USFiiy0rnt4wsRgB9&(XeD3$R!GFK!J0E zw97|Vw*N}yR=MdDf@S)(OgPEdM7y@)={#a9y!l4_TMrA}cGy$HI~6tc3N zT_v-EDP9?%JG*+cr0Hxk_}H8R>gO;To%6OXyD7Kr40+UVf*yP>q9zBgU|(6)z*Grq z&8f-@?HAFxdRI(|TQ(xJnporeE;5~U<8PJlWIz)1W57)0{no+tWkwUi!b;$dCRM`6 z1;<;2>?Mt<2AY})658VsquYpa#1OfGiH?@_@@_J_Qd_s1r?u~UZ->NrZqq@edD__m z3yGNu$-K5_q549u9@>wtYFCY^Pn!>dIjZe1n;-iYFfr&@3pZDk4l|ztFlhYdlh>&&#(Ei2 zppt})c~(ozXtfy=Cz3LKQ`JKuz7%BQ+9`1k_vu_KIUf>T`>C}9f!uTmO>6XmXM^Hm zUYy98c}sZfOuP+_D4J;xx2o&SuN%>^qYJ~-6`7N1&>~BVD8~*iWmO?;r-%m6qF3GLn!k4FRK@EX0_UwTIn@97zy>tgGSF6_d2qdj^5@NFkcqf8LRvz zEK?95+`Ghswy~*!rxcfKPE?~9$VE_Pvz0sx%~%<1>uIKlL1JXsjYVW7@frHl%D6Bo zY4=B0C$6q8t1x9Duk)F`tmvDL$Wf1a$p=NarKicM4$mzJV=fkfyOvxA{bnY_M0Uy8 zoO?oWeV^+3t?=2v^cmm@Ru(hsHC0rw6*icK(X#G1cqc)ucln-Yg;w)u$A-SD=G%#l ztrH)Dt2f8;A5!O$!pL=)mS>&fEkH@dK5Xm&Hha-vfwSt5{{(NY@1ebgZjoHZwa1{WDWfe6qP`0i~DWH?lDjBRV>cyV|3z2@_S5gGYX`bi&$io@_~=N zv;@sIy1w+JZT#og-?6uE*N4Nej{V|MZoivUs9!8xZE)JDPK~s+j-08raok%UjgwH7 zmsba_@C}&0@Y+Z2T9CoW(T#@VQv7h@j!sdJYsQyNGto4m!=d{PDDR&_O-chHwIQLC z@<1d3WEFvVIs+0{Fj`oIok$98b3mcBDl>X^t{oApzKY|FXL8u3XwD7okjl5r1G5 zr3cqEn@Pb>fKfMaL?I4WpSvm83qX?Jb9)le;jL4Hu?siW98r8j~{HjK6a&8}glRI!H( zz-&2e;z^*2RPY;+vMC|AKMRD7q3V{w?b803+7!Vypy(5$42C%-c>=aq6(jr%Zw`y%m%7s%!YO0DdiYvQN&wIx$?E+ZL8{3@Pex<$&4LnOl zKVv->yul@PWAMs2&kj@D7tRY1(PHi1pUj8(142*>`xVpey&5oPl=|6|PfC$sZ7i1v{>zyjiv3Hyg)-K%VprdXfl0icc^YXsR` z=VL_`DEm}8Sk2cdp4AM5s)zVH4QUlLy2qY;Q;l9PVXpl>md*b6qC;3EG@!H?fxi>r z-ms#^npKm7aQKOCwEt>->ci@w*#~r(x^n*c4E6J1U1%YYJbtp9P>(MKM&k6ue%5A# z^{JREIGspO*~2kDl`u#!;gbIem=;YSs>}daam6>4{AAXRtW7D`^bo+39=tUSCz|W= zNyjGQDm4&ICPak7%=T)pAk}52N{gdv>aRAk`*M!W!8kkwOBosMt4&4E%ylk|t*8YU zn>;?4Mu1|imSN_138)-85;D8@aMq z2dND~5hc?IBJBXvHDM;XDlz=(>y>YjZ`{=VlPY$yvz}I(y>oi9Y@S38P;G4Dkv}@3 zXy&H-3FpwO(z}C};eW?=I!WTk?BBDQ?t`PHUl}*m%=6M9<27QuRd4RPRNd`DDUEqy zM@1VIZOId&<-((gn}1F8wmqir>q;0A1s5@^q?&A+G|jA4m`qZ)paOlZO*T!N^lWBi z&u$@Kxu4+HoiIs`4UEoOc@lxOx6=JJAHmYvhGu$gwoh}(y3r-rd10si8;m#c`00z< zhTdjwrCLIYcJxTH%;}&VnN->kAOCGPU2LY=H3s3$0?vjgQ2nmjY5|fjYk8;m{NG(4 zT}`xuf}-o|FUzs{meW&Lt**<4ruievmxGdbo?W)`Vz+SX%L zzu*|Mo+I)3`q0H0E$4gtL#M7j7FziT`mD8o=`?h1LvrOABO@d4ej@xAhRHLG2c>jBx7+)nF)dHrN3XB=EI-Xr@g<9Sjm8%b50E3#TJG?JtRlkknFBCQ%{ z_y_j*e1=h4-u|(}RWy1CxkwGaY%7b55KAQr&iF6zu2|s0B(AGA~IQx#g{KNIjvwV{jFoOQ&7xon&r>_RCf7m2{%8Z5wrI z72O-}xp$BtEw=n%nISocBp8EjH=C4pfMM+^1Y?Dinv(K9(S?_rf*S3%Pf!eK3=#qE zTZBpZ!e9rR5~#9jSXD&hK@|p*dj{QFUA;+B!EI1;*C6J8Jny8;`L6%000!Z@Z$Wvd z@5D8&tUl*_rV1(tHh)+VRTU5lnKC-fCt}9?-u9o-qVly42sSR7`UfDZ@+Sh1A4eXY z^fjOMg&HdM@uiA}JH@aU;8T}*W;Jp}xias5x2FBp>MzM6JVpFXzwmV0&h}OehsEkb zONMf9yn~IyJ8rxq?gW3-9slg1-Eqq< zaXoQkzRYkeW04_eZe3kiXIiI5+(vF$8Dy*qWx{0VEU!T1?htBA85tu8WXqpbV z>9ItSvXiZJe50hc?Szc1sfsFFYy)0l<|tCXeb`ah)4NoYuT8)m&bU?kjE6*0b9qb5 zi9J2ku@3$gl&j49ruw0cM`c~u$*cVQU^8ck6r9|EKhM|qAif{M6Y!CdDmKJRW}`y& zgx=KEF(!zIh48(LmdBy24EA~;cKxt~lsq|Ec*gVbPOQi)nVGeeRy$%xVGYr!-4{*4 zD=~{=3D22$NeQco1FKTVY)ibiQq8~{MYkvvK|wCRuia14{6}??d961f8?V~Loc!Bh zQu(Sh}RlVn4+H!cUB(n(Yn}=9? zb)4W;+;!@4-Zd~czWLenkjBQ1@h5w?RB?FoZ0l|{Y^AstPFPGsSw-6-1p55*@RwV# zuaOy!5frRVL}n#3Ime~M^?JuB)1!^ z;2v**y%L%crR#JZP}#`-{2J!HUrXgkpK1hmveUG+?`7-q(25lpo%eR_Yhu}!Wh45Dq9TJ5Y5T2K^ zbq?bsSSI-CAo1!^35u<95JEZsMKMCU+kDwI7&|6(zRH>e3$_ zYU>`gl03Fq`B_EFF*bSSaZAlV%J6FF&+|qns@=IJu>!+C<0kWf?D}DsMvorDw@;0< zjhSkP8nn;5%ICJc@+_x2J7-}nzkIAEn4VB_8eFpr6kcE}1BQlw?8Q3KTbGUWvFfyU znvRsemhDER%DWEdGP3RWM{YPl#9fkDnrf2+fKy#Ld<7;D8B{{Gt*3nIMI-8=wWL3qd5HE7 zIFf??TxPk=bX}95PXaayP%FxyjFXQln(hR#Maz?vf_~f)2gGsqJE2qGiqeZ`W(P6$ zr3HID#M?zC+j44K>#qXyppnJPYaA=BxygB^w6UqDGt+(6yjjK6e$EHcA@gJNFRD3o z`=A3gR*Ew|ks5lb>ErXCPw0zK{qvB&ZF*~|h$T9Mcqg_C6*XejW)6Vaj zEQo?ebwmpkGG(6EaQL2`X4`Hr>($;$lHtm=0jHwO&jVsQCkIPD>yt9bubKT7J1dcc zF?TF}Hl?i^q*N=GH(eRt+C->Y=O59s5{wBvKx=pKDjvSd&SKXq-C|1ukL!|$nY)GM zN#fPpM*xS6(Hd)ajvDaNJ*?)OQ*Pl1Y<;?6 zVvq0b0HlX<+&6V9Q0PI^mcA1=as}4^iMz;jGSHr6%cpQ%VK-AX(izY2SHy8iCx!Vl%H3tN8 z(goi#+D39e_JtWp0vaeIB@0a7`N^%rS8(a4Mp@?_A@PZl-~zxWcw}B)>wWl7C>!&2 zL*15TxZ|C@0rzdD+k*bG4@3UHY)nC3-s9bKAiahj^RQZK3L;F7MuV?ey(xamg--ff zr+DS}(AYajW`Q>Kr@5e0cOyold3;58_cqF5Jk-1KbC`ux?45~by#*_eqUfF;r*Gjt zCM7OpUYXO{T9=Fcc`KNdItRmVYiD{O&Ga0}o1QY)gT~}rh`LF&oOC*_HL#VLImZC= zHC^>Kr(;;fpbQV7=d-|to)C(4329vV;yh9Jh%fIZR!*~Xpe z$T6ytb64g*uWN=Nik!ey_1e0buSpr0{j&&*KiIWJ+u*-GQ91bmzQ~N{PrlXl12&)3 z9`5TXSu}q z-p`&Ct+Qz^D&*{438x=8z4hD|x6@^-!T#hJJBgU76c>`e0XDleWC0KK zAz0Ux`XuHEAMVCJ7jX4F`}YZHI}&L8gRVS*3k@2j(6mTWcO-nP6u-D3ukiIhCbXD$ zeFLXc#keT5(IDSCphDf^&a4toDD^)$8%~OSOcx(c{P)`}GNER5T_;pW2&D`tpV9%^N zDD?dM8bwj1IVRNK2U^L;aSu316TLV1a1)&F>*Rj9a{4pyGft_hY>GN2>4o;ikd?o1 zz}k)__6n#h&k8U(4g#WG8Ie_2aW*v6`$b5@W-i7ks_5=aJSi)%n~^xy_;@&c40WBw z&aANjeoZ`Wg9y3Wtp!I9>1Q%a)}^hIi#f|8)F1Oz!!c*usn=UXCnI>oLcc|!FB<;k zqyL|uw4=a2IJS>LpN-Pix>y}PZINu$zqj)TPZ6D_0f&iQ_PlZ{=H^$YL);iGP9^V$ zdj<6#W6`zU0kz}NwY?a0x3RM$k^*iK2Wt+58c`osESQgNNK&vyN@|}Z&9Xqm&vj9o z`i5m6;~;Xc>T4%Pe#LS~Hb3z`Z>kI6@1m+{I*2|tHuL`dksn-1u` zbToEx)Ld)eUF#gvg29^&h%%|=nqeB-`lDU`uD*xU_IU^q_vZUUlEE0f6U~NMzSsEJ z0h*n-#$P5mTW8zFai6z$VbwKM#Fz!)(>>6Kn=sO*!!lbbZjoW+C3yFub-OPdd?7Ad zKJR-GTj{k2_X71v9FIP#(9QnFqf#2%+kmp`Rc|@qMgb!pWmj6;6Q7#xVna@A+BC-%<4oRomz~OqsP_z zd%gOsz+1A04mD1P#Hct>M%5i7DWXk*ra@zySBgK~N|F0(Gnn`15I3#siu`3~q!X*f zAeaFZ7M3{ulc!s-&8`hnZIgMnzB}j}O1@PUX0FmxH?7+N3rh{WBeFehBlVqJU~clQ z_M{+t07)2)jrjOeGM8nQA~%-h*XoQVNOo6`aJL;-Qc`O0Zm?t6BWT`B6)kBI%e_E#z$;QB`@QBEMWUpNiI5qT$;eJsb zD5F^X7%bPc8K+S{Nv6C#(T3{Wi*fRCjr?jYegxYXXnRv0mjVNKJ@nsS<*Gq5uur90 z7>dvJ-$Y4_%XKum)pk`v@Zg9USr-vH35FXp7Gi_%nKtcSR6X{9#ztJsN8T|zC2DQJ0-Uul?eA-Y#N$us$DWz(_o)?B&KR0<7> zS&Psdt{^`({k5#qEi9&ZO3tQq&S*2Wb8hoKV3szVRHx|cZEz}tNiv16#?6PWq-_F* zB=htl29WW;JHzO<%m0rbH@tiIV^&Cd@6IDf`0)tB>RYknaPNaX;00hUW{kS2 z(OEQt5%TK9)G?WZJhW3cmT3! z7f}&*IE1nl5+BH8q;YgWVgkJ>eR}2ng-`RG28cm&zkK9m;6J!kIr_`N*@*ogR;P>v z`m?3^f2!M7wv#F-cB~KTUK-Ot3sUyI>F^Pvt`S;UHl{!ryAa*ZPyNWlk{Tz^^fwNa z0K$pb=vyqp@2nA9T){(afQN1b&Wz@L3rx~VKstEB?B&8dI{-Q9>>=7tUK$0zkA%Ds zOf6~X6biOgN)hVlef(Y|jN9Vz(sY&js%qq|p3RqgO$allUmLKnr@saut zYl;l%P|BO5k_TfXo$4z`xia~MtxAHrSxX1ycr4^gRZ%Q)iAW|Yy5c_BK^9Y&uJJPsnUH3nJQRacK)V$S0GRT~i z2Bq711GvcuE*!Pu^Mj4KQn%#ZF5!lX&yBRv<}}*j&pc^9d+e+-uT99ApF$Y@QUa9o z2>PrnXIrg7TP{MFJ_q>;ZeVAywDo`T=$!oEY;f}V04t=rI;_yLp}I%t=3WBTpVkhZ z-tO7FYUS~%VjGx{Xn)`4Y;rN3I%CUP^r91&Fh-JLlrhkiG)iGmQs_MoOX#LTpJ~4v zPzbwk?-}A37Xlc4n%YZ8hnhO1cYLDt<9L9!?^)wqhWanq8M7PA!@^An_iRLz;B?VgEv?Jk(-sHM>uY(y(1IzO5{ zrb#zU^ylG{*_Yqe*jYB&8-suNHP)7G=tckD|NI@nP&1eWiF{cZGXuU?Q(VH-{RZ$UxS%SydSsJo2=A<27vl zfR~I>mBPTK;&nir-2p*@Og47ZDJuFOO*2cUC=Y78OxaTU-LSs@A9 z2r@5$#Hve6{6O{Zhb3t{Sb58!B1}hD`mguQ`7A|Ou)6mr26Y0Nk%`pui+kUHck7SS zYw&Pv5^(>q;Q|$uH&d8;MrcJQXhx##xH&njOLBc>^)hwR&h;B_fT|NB5>9T2ikN%D>$D!@~Hzx7?dRHam{ zKJ#>t{qJyRz5Ms?B3HcDv!dg=65?rQs)-LxRBsxmY;+}%Y3VUs_;LytUlCQTdz`yYgy(DzZ`gJTIq(@az25+HiL|e4Ty;3i(jXs|K zMB~zwVE0Sg*>D#d0z}yi13|vePMgKiQJ19RW*hC%=@dK{vojQW1x=~%BZr>P^!C}@ z)g-D4|;5y-Wciz*n2y{?FcH-~(mw-gF^8+jEgb=c~PZ@gnk_E^}jJAN+pU;T$~NuOdA&iIg=>&b`ml_Gk-+sB(gegyH|>^$3_ zhMu2K5A48IO;1&Bo_5~#74e&|3CSe*4Qg3v%_Flpm>J;x0_yzoRt#@ZD-# z^R=%#zBA7TeLVs*KwS6t5@`cu&8ar;WnN|?0;Y`r!@fhMzPGkN*4m8hgI6-e+Bg_m zd0>ORBgH*xIYYAfVaiO>!c1nUa*ULCqF(XwD7{2( z;wDr-h}Un7LvB-B@V>biy{L;=*K8ON)f;V|V??YOVNX`Sg}9#vgB(vc!;Q*#jML~_ zAd3xKx;uuT^xwX48qS3qv{XVLcz)m+g!;{kQ~htVR-xi;m~A@-7)9@H8 z9^?L7#x3)M_@C8XgqPhg#4xl*VlKxqD4M;B4~ zc-9wv3Wx0aKp_&wD|6o3l-!M{xB9kfQvCg|Lfu?;!(PKzu&c>&^L&;sEO;{l0Me6A z+hvyX_!3_9rh+>1o~vk-Zid$&{y1|25?Pu_G1lV)1cTYjm)&>w&qmiRUd>@p#ic2HOZU^8Gz`A}Ky^Y(0lYEfBO~k0 z4G&u#5XvYm+@HwRLW*#r_5RJyh8z1+6D*ljPWLAeF>#ms@ag3;2ADG6@xsXo&Jj;B2IV%81Zo{Q50Mw_+nOP;Gv$kf9{qNbcxrKL> z;>B^I)iCeuPuw-jnZ5%f8KG;mQw3q0f&mAwnr}M{f~!l;0a{Pe}F{mBhKuoF1Wq+Y&C?3g&3SzB>tfVF^{SDLwb_ zgjPrK$#PQ%#~P#QM^KN^)O$yik3?Hr(hB{kM#tLT+%3}Jib2_bOpF?(-W$_UZGEVn zq|#6Zkm$Z}ElS|883WqbjF*+Qb#nzCjwlPyo#+YFt_ymGO%Vmg9=BoS# zgAsU#IXLgCT;xd5@f{D5Gw4A+uUof=N0p$NJIU2A^witl7;Sg%PoE4YWaX>*CPmw_ zsY65!;5Yck9t=t)C_X#`J2iyQBGvQ}swuDR?EfO_!_!{6m%z(9B*eFbAZa4~av=FQYhBUIb`pX2hgWTMRkil> zCfXYewt2RU+m=h;@!MMVIY!t z95ksTy{K5-j|DU!!Ov?Lz!zjCg%0dD~;i^5Zziew(ky}8kG$&wUu_&klt?U$qTNyOA9!Y*Z}8+_`&D<0!3_2Ev;Qmhm`K2BcI*iY&B z;5(}@atUkw{n2DH-p)=Acdfb@s)XV+4*i8b$^n?fj*Og*$+9nhdQPp8bQEAk6Ekx? z2*dsW5|69#e{!$`H(>%GV{oQaN}yD%JT|Kph*~`RYIYEA_O+q>Q{A3(-<0L4)xDp; zJ%VsbGkoNeTHeg~&{RE}9T&f{2t)vSL|%vH#B(eCUJabNu1+G@WU5v4lcZ>61V3RR z;~qq#>s|Ou=y1pG^$u~JG`8odZxCDb8(eUiifwmBB;te{-VT+S#Xq@LYBX!wByvxy zsK~!cnoi-7|DMVyvFX|Squqy6j1)y)?1^NODVc$m=Acp0kz+v#n8vb9cm5yp!YIiYH?J~m~!i=zV(s!4#}46hQ) z90spS3kn`9e#>){7XjIt96(1KW?*+|V)dPppHyVn?vOU-n^Tlk(uFgqkp*oCL)C$K zx2%SUa;ZGLoy8E+H6&d*-0lgY*V0r^(|<+ygVteR+*VgWCn>%^yply;#9}Gsw-B!3 zTJ)1NzZq1Ns%jVI65jN<1)(PX8mZCl9d_Z)p-PapFz=xq%S0L>l~`t%c8N3YQ2>Kl z!P9l8%33mE7XQ;Wu*O$2t_UrmGKqJos_z?5wv<7cOD*yDyAfs{hbZDFTH6DXtiQj>@j37+l7Azlz>-8@6B`9GF8!5h|lqf1^e=T0k{TDD7OeTw~ppn#*4sL;EINRGf$0q);2WL{-$6emdE0( zJDVX58P_uhIVMbMw2J0+H~|5#-*ViX-U>SCd@DqxGF46FD4bgOA}yeAPfPg`mRn8= z(j=+vER9DHB@uI00CWydgY~+IRR-{ap>0Xc-OBl(ZQQozVwq39O}7K`!t8)x_$^Qh z?4CHmUB`1vrKke8_0mJ)4Q8v6ih4LK@6Hk@(Hb3LtF(e1C{r7uLmgYx8sqMEefA^dQghpiT{8d6sRv=#*R6GCDH4uqJe2IoNTMI54ER_( zDKka)?S8{@*Om_*DfKoLxgs?CN4Vpm)7pI5=L#ZFLaW8qln;t}pvZpUb%sXz>e75-afwK=Fi8tx9zMBnyMtJ3q*}k_Np%-ngTfE7 zIc*yLO&Pt|h+3vS36TPAdf?T`Wn`mMfE32$$7{C*SSJlj$Ma>=vqkr74D=n9;^fJ0 zrTgotbST>lk-pz6qn^5Q=3cMv|C}f3qN0Gt)F&y-YoB8J{| z56zOSP#ln8R1;WSm$pZ5TMr28N#X;7lA+FpqXyizg&ETZJ9rImWE}p?Js+|Ieac$0 zRc0br#_k7LONDobX)9CJ40igYsrd=sN`v@g$k5p*s{kUD7fs_O$NNtN2;aj{cgJQYAn2(ezN#fO$pXRmY1!uRn)%*-gnj*q`ZW2l5gNN!x(&I?Xv zW~*zF`S{_r_h3mp+1m3H^L{55_~v(>w;u)mg3|{}XXDw}Cnih;yW)rqIyCXLgyL^F z89~+J1adz98&Tf*6H$^kPCOK8G`#l?+)*d*{WhfK;kxpu%62D>DL7b*Ep)!%Iz39gpZ z&eIcdVLTstoErhbPZ0x4bp%-NFB??<6S$f6>$0T0KXs7XI>ZdRl@TBQAQ!6nS5O8_ zD&R?@%1j$aB7pogx`8ZGXhq&%77ysks{2n*Bx^dgF(4S|rkFYJv?>})oillF zmU@;D#v_pRS8Z_h0+oi+uiM@=eaa9uI1|JpMrh2Wb8UT7a>EILHWb&iuNZm0Jf`9U zEYZKhgUer|M)w061t6D-3EmfdS|$V`MW<)Ww!Xrls`zo+k?|HFh5idg-d~PO7m!F) zH!^YAH#NJlrT=Xfr`^Znqt2&M@!cV`oL}(28UBomc2AVna|KPczgcHS$*HYGp-aTB zU{dAbZ|9DYt-o-ns%xf`1jj`FN(}x)rZO^}ALvPb98MZdCUo@79KR`(mH^~I!L^U2 zklmOCZ^DrlPAH2nMy2)Fg!#A@izEWPw3G8NKj{`xSd1-x$Fn_TV=%NH?{^N zjHU%Y5bg;Lk`z;5Ltr3jg4NCdfAbjClEL+~Irq*Sh&`I6#B@GP*r+yPehxqDe<<4y z+qlA^b1T%t!zf^OlG5N(gc2Qd*&8jc0cd9;qkfgNFI_$$oW-0Q=??)&Md5$FtZH-! zz3Yt`6T|FFx?Wx9x_*4IW|;wHCHAMz_uw&wt^lHyhJ7}V9~?P+n4LhLJ_6v+mEoKf z9)t)9KH&PEtid0m)?$tRJTob2qS;@DbE&t&l3k(*sn!rKDFTXJk)x^g;PToS#+yKX z#8mOd`&1PWzIN`7SW16|DFqj@eH3?#==ZTm)nygA4P0QSpY!^e#zOXSF3DhK_f;>t6bx zW^ZZ)h?Ie3ngeEUu7DE@gkJH(rrRslKL>EUR!ux9Pu~=aU%VM-m9O7Pm^YK*(y z`XiAaln^T?)NwZ*8{Z5P(+NM(owRH1 zkOcI{5@`8>JVP0wQH@nHf=Dvr@OgI1no6@)gb$fl!Ow{jK z`2?N0w46;cg}dWLKaqmj(YkvkP1;$(xKpxSR@OH)r)K{nua*?YtL^5IIZ3C~>>4-0 z1fz9eF;)Mqr@#0^Pdr-hM-PA9yl1L&01xgf7wVft3- z9|bx0^}|56_^)k7cl@d=d-J{K*%+2iZdDR-Clt+{St@uv3V<{lBm<(aC(BNUgx}J5 zKeb}$P|2QZMUKUZD_k-5wtK}p`4clhe}=2;tGB_91Y4gK1IFw{?f0->VgP=kHds%( zH@<$r8E^Q(@mK_H(l;pV+tf#yAa&-7&nTt;iaEH{+O|yEs2v5if4mQ?PVSCU)1$HdGwuW7Y@UxGV8!U z~G)^}3I5jerx*hUaiC`pO@E=+F^Kj^z|3ijkhOn5V=f z0!B&Em{Bb(Q>;1lbO7?6hMj;$$e%;5M`!zd$|I1Fq7v@u z`d}*7djfryHDiQ^5_V}sTbmG5{shZ3u7ALKB%N=4 zKm`J3I`m`o$%$ntl-LND%7`LYHILTDWq3P5=}U8^tfZUiREy*jOND=;;EBPydSCx; z>wh-S(28(ME89X7?Ya4x&nM~O5D;$fB>I4mgfQ9dg&KKbUjT<0@7m_3`MFf1jP9xl zg^v+oYR)wDL!FFW=?DVIP#qWnr}i**lN2i!{`Te0`Ms^kWB*wg{dWUY0aA{i_zLPr zF^cr*8-`N9Esu_ZW>>h_mi5=f(fRK0S;WMrT{etWrUu{1!SLi$EEs?zoJwY<@a(4Z z`j-;|x>h*S?3Y}>s;&Vizm9Jn#=LXh@@a8`PRjGr%O8K2AjcQ|2uk{4M z#%zLp%DFgi?}Q{1X7B;`+ z)8D}pLCE+{4@Mt)p-3Bw#qH+SOeFw>}(z;C#)bUi`P}OT5 zPRlKS_4~>^wq590^=_p9fnzC)qz&mo;euZwX0&*!bD^-vBHgcI36wJ9sw+Imx0WjLTM+4?!!-MaB>a@wxd_ic!+D~k+?7x>GK9iY> zd}ynd%-9fo0EmT;c5NS7Y|9A+X&)yzskT^G`_Qt_&cQlOQ|~@dO;pxP6>iVmU9_~z z2)ENDo;P@j9h0vNDUwl-?xj8jf*w1&jN+M|FrSHdxelTyU1;O=r%6JKt4<Cb2h1{tLHAusFB{!=9*RGM{y5<7fF>AK8*kYWHCnY7xX)%SW;_L?C=gt z39uy5dwmhVBv=BzeBYSC>cv(W?i2oUv=)-akooz*FL53;4Jpn-?`6q}9CIW05~>4N zEM`5687q70qesyzG zg|ps$_r6EejgNjnuz=0s1TacU>#i>F5NzP;>_#2TLPzD^*3J3YkfeV$Q%r5&^R1I{ z-aei)!KoW(WI8dRL}e!+wJs$+SYO_dNo#xm8Vr!PBdm=+kXGb za$FO1A2zCKG*N~KnXI}Tk+*aF&OBgzVCP+PRE(zqpiD*Rqh ztJXR^!*d4kqmB*`|P5>~oimB-;3g0;(%1YyF83#s$Xl$*I_JU+g&YHeCCw@4#$ ztV({7c=QsM87gc2t0R}ZYCm`Oe^rrJw4X4Z?odmfl|_VG88Nmf(y2$$A{^% zriZQJ>7x5Rk<<*Pv*j!?KldjEK#@#9)+cHva`W1V8i`>TR;ag3af8-&c*r)KA>;cg zgI3Np_mP{UZjab)AVN1k-QL*GA<3T+{n8_?=1@rw6oY^hN=?jv1u@VZWsUmsg!Fy= zj!6Km{_Ytg1{MYcZFYRep6F2s?XZhLGd`5=NOZ{BmpPpG;}|> zVSkSU>kPtSNN}$rWyOYcHSSNB`TIkb{5-o`&CMrDo}QlWWyco4hh!GkY3rK?j*FvI zptLc{$)@j(jiAVg8Q`=5+(_3YpS+A4_0vB@hqm<@7%?EjGKjB0{)MEbI}7Mhm;0&V z?dJ!YN$MQ|LAl*Gy*uP@mgKpONLSYTNnB6qOo(p!?XXTvK_ZwjNhIkx!Y%+B>rWr| zx7n?u`R=7E3N&?0#c(uob0CiM4>}y$2kLE9&~dW%Saevo=25_x)1{~C;<7jJDGsc1 zrhXJWD(_dq37U5ZLxt=DRWQofMD?g-gCF3n9?30npFX)s0y?^y)->p0w_Z%%oXCp6 zoB%0KKAV-Dy+ezIl$|jM`PH^+n)MfPK(R~`+}fqLow{QS_iCR)SLWYBA!Z?<=n1zv z&RkWE??FiXQi4_+k?^*%ilNhfjns~J7We^Empt=#Rfzgv<|CC97Y4>nb%ZLz!HR8u zkk5(uH@~|=OpBpkNpgt)NmZK>TWH$6t@@Hs8$k1(&5U2LRWb&y(-#0lcNr4q{4V>rvHQgC6Ec=Oq6 zeChuz-Yn%u^=VV-;d{<3pNf3j({5#ELsM)kO-a5d%7xV*&+1oiJ;zH+s?Ji7n6EaP z&lqjlWOdl{MXzT$x!Lh{{Iq=M6op7?WG=j;ho5L`HJg{p=m+Rt%Z=@$<*BN^zQEewgafU!|044$fI<#taD5gWtTd_+{AI zF4vo9*Smy8f8$CttJfpivkL=&Tx4}@1ZynxbL^`Pj3-x^c^l*Rw5>Yl6U!$(uQie4 zXjh@K@s#J|KsBB9X|y12N`19i8o%ft^tig@35wh`P75|sCBVI{7b%)(eN_o80IeRl(s%pJ%hA3K|xG?X4tR zBkoK|7?D_UpR?cYG}G6VbR!v1vq;da*yC}1<+XNB>h4^6!l<0&={z+n_Sr*(NG!VR zwVpbGAwdq80$#RKK>o)9w;f9_$MbdCLT`+9JDRZo=r(8b`r?(rZbxg z6YAK_R{^QhktZLOg;BMvOCq)Ip&zfe{tDpbxE1_8N43D_QMcP!w8aI;0R*=_Smlmz zuzm^O{~p?zB+lur%|P}uV1o}P zWx@TXp?`l>BOnA^9=m${5+7HpKFB36(7%N9-g%TqunVC68UnwT*-H8NE|afLD(`R* zmcCsw_;|6hnb=d1F7y1nA&`ed=E&{s%Sw)};KmZu$*HHs#Q&3o=f8*y9et|)EzMEB zZ{=7U5d~~Eb4mmM+#jzd+wdw5`|P#-(#Q~p_0;-iPV%!Z2tuuoZ>QJXlfj3eumWB^ z)5ppE;SIsMWeKR3l(CkPr)==Ah6HPQnher_#AavwIT|3ZhHZdlAMJFijrfd(uaDzV zxNUvDkM55bW2g74-QRWdgQWMy8P{heaYB#J6r8x+GG>03--kPv?Ch=H-O@?mEr&z4 z#aSQQmj1$n$)nA0FAjg)aQYb1EnosrU7#w# zJ+zK8=9AJFytCCis9Gwy^kChM@G7?s*zZ;Yhy+QqhY|zuT(qWdPv)A)zn8-PmS05m zC<6>tl`A(Ls7+`z%w(8wE6OABhCXMxk|G1dUz$ze+Rlz`fx4}SJ)%Q{v0hW9+{BUR zzs37y{pQZn8J>~%oCZsi`VzGKt|(KJpRa?6C|vMJ$Mg$E#HzUTqq;R=U9AV#sBto& z)_G^nXMY=>iq6YPu-W5;i8wn zD}IR#Wl^iFRb*AH3VV?lXqZ2LH__#`Q(Kq^pMQWiS=nX3}Ok^G>Z z2GwFKKSfjDm~dVeSgg$_mepo>1fYGCkb!Ki$J< z0Tkxsd2cZ!>63_Fi32K@KyP9=u{-=9<;|>h15mL;^_5;j)jl8lsLnK2ppX4k6nhA(M7{}&yZ;-{$NE2XVLdI=BDypObONLDswo6G}vR%8-R z29pFE7A~*mpeI$cnRTYUMv!JU$%mP#PQo{8!a+xWd>2I^sBKyK4g z@j7nt`TVHH&U}imZ>o9X8sBoA;_EcklhgD3)kOa^`Wa?B_WU_8|2JEVS)^xZwd|ZI z?M6ezn8r>LQL08SDgcsJO^mJE%lLdEXgR9~yjz*~K8l)!q{)T%vxn0}^&?cD{WEgt z^V9Sj*BNh689^OTc81|kM^Po0#d!)(TQTp*83znv1s$SwX%7mM^1^fXCUs@k$Mn>U zEC3q-jDUS&O9hifzVZzrOQ2&h=Q9iYm`m5+hodpF^~2)XeLVdJdUNRwb8BY zs|UgQB+rK`n3}YgN;#3?#5MiGt4}BU$Dbj#@L3PC@M+%6W}$+ivnre_8zU-Wcel@} z(XQd+DhB4{g;p6uxCTf%eurt$^hVS{+w7MNpica&k+OX zXWJ)o(`zp1(awty!PFe{yO_tM?rh`ZTQMv>wH)*8oloU$K*twik24Td*Cr<4l3+Qs zQN=S&agrO>XF(e$MY}OVG$mZeaniG;X=~Gh-=GkLgQ9q z(L)9RygYE#{7ju0=$1U`M0V)g0HP8#GMQf8I^m|T3Ha+6gu^dv9ehd% zAe;#meFJX_PFW8Z@i-FVJdA)sJyke!TcP21CBC&3-DcKRP~C0&UCtlR@wI z(ky-uy(4qG0EqFV8{?PuW^UnIiAm$Hc%7A$ym55mWj4eEFoyqd0&dP%mhLo<%G@^E zJQbHL2^Gbd8bA+~9emJqmrT|36C~D5Z4dy(1JtqG7~m}emP8c$cvbfzr_7hiRqu*y zc-|Om)vr+T)-v ziuBAKB)@#A&wJzszd@Z-glQLWm%uBoe`;2oOSi?=*5ups>$X}St`X|;D zSx<`H@gP-_$V8C6gx%Y*ge`=_xO2=6qb(jZknN~U#`oQ<@zj_U&X?7K-p{;Bmh`bcaoMU}$k+mKf zY9awt4CQoCr;dseK(v7aKP(iGJT)c^-cv-%cuvVAvQxqDQcPW@9!WDo`S+4As-g^_ zf_O*`*`d9CZvyI}jbVSbWB>_|XgJ?*oOeP3(Bj-cS4rIn0j5FA!xd(f=ek=$7pZro zW7aGmop`)xNFh)IIKlftlLw2IrY3y>!JgZSaZpL0^FW=)F_T}P%BQdveVhQ>V*0BISQpmqz#zbJgr``lz(g zJ8E5vc-bH>j>F25Xi6z_$2ZT>xBt}WY@PLn zmE*nHHWI`P<|Wz{IOmyv)HWoAzffb0+qr#mJ95u!t?Zwlhvhv}YM%@uYJ0TQ0$>_)yJrtG2PG*x zTJS(PFh|F&2s7T86)_CZ7%C=h6s310+FuhIl|8x_+cpJHALi|S_OsgwXjhv5gy=~WLGsg(vHy^phef3JM0ayCz4NE1?GGc*p%lc$H;N>w)dtdAf6EgILgP%0~IlCK57_g}VGQ(@{pA$4Xj4k$Uw8>r1k=8^jypYWX>O!^m9hJHeY z)$Ny1*(JAb8_)F$A)5nhl76~9)gkXJBYZ`+1Uh&;2n zfh$Ly->rR<8Tnc3?9AL!5^$b+t`o1yXjjp@z8~T1+@mQ++J^vaO?f1u5E3Dd9M^1d z%u7plx!>Q4uQ)B8;f|pOry;1mKMXzRyUbbN);BA^g56IuwT*ukKC6Cr-|v&$ag(OL z;P(e!o44P9p!cg#$0*^r|J z10sgb9Y9J|WnuumyR&uktSvCfltw?1A}mH~7rcs~byIr_ z?9S-)0kXMxJl~K_&VSMgTKhT>1Dh@u`&TEGsK_gzu%UOtrS(3Lxg;@rt3xC^HagOQ zx#i56l!SflM=r1y>OV>5?pKVvpAPtI^Gbn9HpWF*{@Mj0S zMRrE(nI-fXNa=sZ~Q96!itjCwvGmwh@f=_f50PuMt>Ut$_mj}Z3uJ$TtW-vp!to|H*dApl!aFa z$10jM#EE(Cmau33D8S=ic%$1U!fX=$b20 zGrcvj$W@L8=VEUV&d^1r={x;KLZAIC(PMulsNUC&0lE0zBQ#sVaeG0H9h?831wqK- ziQ1D*EQVF`)Pv<^4{vOiKND|KA`Qo}srpMwwI%XgL#p>>z|hnh=;uizsr=%kQG00S z?^uAhA&w}K5=i)aJO3qi;qE?f)ixb6-r$%1EZXo^)3BCS+kbdRX*uOIM`0u~I13%R zn8L%u^Uxw%#g6JhrQG7-MPq@gNC%)2n%>)y1h_}vosw>2Ya}^kZvcWAGTC5#9d=bc z%vbvo6Vu+fPJ9W9kScV=QU=HI-9Fg)n5w)0iBdV4>Yfg2-5%_ zu)C}B$7J}{j|SNLqucOyj@fGrUKbtiJaoW9JcpsfAVO+9vVfv^(<|+tBH{;TbCCVb z;NkO3haK$r>LwmZS3yGr>H<3*|0+>a8+=7Q6Rg^3>ja>(4W?g_af$IEYGm+u@7z2O z-17p%20(P0Xqkh~80J`X1J<^X*}7i0CEq_zb+r1_xOuC(Mtuthg(0kw8p;~Lfo6Xt ztj6mRl)BnhV6d^=;zR9`d9M-7!J_L3`(b50_}Sw3oz~a?FM;aC!3o5lRk8ib!ro5t z#Cxylte*o_zXI$hrrb(;iOtUXg*{wmX0QI~w3qzBe0qgr?$Djs{S4uz9uP3~10to~ z1l_CLOP5|7n3X*}X&w7VnF6RW=1ldG<2fqI*_k28l$1Ie%Lne7W~H4~Q0rlhpC19z zX!0JnhjXAc4TL0Mm#rs)DGr3| znLND8`EAn=!ydm8{2$viX=);){!-qd8kwtfJJ6l0 zh>WnPDU$0r>efA_TI9c523V!@<0&O(?;LJM{eUqadFSmUA0t%Gkhi~>&IBs%LcQ*w zXbK=8TzUK4@(VK)zgMgAoA31H|{{kkw%{e6I2^-lhAFz)s6gNlS7S5JTPT{^b? zrhNQ1_~nS7I!#jGa_5!CA9h1zc+np{#z4)2s@YVb>hY1w??WGWs+5rBzYU)4M3x?{ zFv>eL3%IQShEC|-zI&=hpRT2-wh}ggwc5zhhHwWJzE{5SBUSU%aWqyKK==gw2jLSL z@h`%saUbFH&w#Tp_fp?s4NKbe_L-Jhv^w%F@?UaKc17T~y69a6D&ijdfd6QPGiC;i zJ?DxUni;!&N)H3B1`qST>GUf_$e0{^RCN1Wa*Fz+(h8wQgxc2%%j?`r0C{4K>`v2s zRAyfSL3TMye}BP7->JlhEKk4c_y7Qv?z>ygShWGDF0&>9fy$m!66q=ZE*G-82E*;= z-7LBy^;I@+z-zLI1R&KJQX5;nE;bMpNsN`(T(f`tzPL`zK1_o|mim57*+wKQm$QGd0tqHwr6F}4x9Prm2y@G@0cv@*$KT^H@fri& z%A@O+e=@bn9a;eA$#qjtzw|TB6QytpdJ1&7F0$^IN-+K02?4+xx1~y|Z6GY3vPP`H zDhU@&wIdagL9|59G&`n!h9=Sf~y&^FCGI~n)QPu1oID4A`~Evq|lRT zjtC*8G@#sb-?g~oMPCrW`+0|V$v*A44^AYFp0;sd2lzv-n2-qCnW8*4t09;OTnKp8 zxPQgwimrTqh&ez&qH#j{a^lpgbUV3$OhCm0n(?h;MXqDOA69WeAB=VKH)=s@fBr*F zBB4WoC}k*B($W{uoriE3eyv=7v6c1W#z=UdqMF0WqR$<@3dINBt7UGb(3~ehfsuWk z&QE|dKEfN$VxgV!5PmFrq}6oicfo-$q`B!MEfqG$q@SaAIi;pw*`yRNL*4xPmF4#_ zuV(Y}k6zg8T{5pr3^ecj=YLYdWOw+KFp*%HuiK=h=QK;lJtIhSG0*o7-^!4|9L00g z`eY1UN{jrwFX)_62Fm|Y4*;K8gTn!eC*_nKf8OVNw~~xrU0x5#S@OFOyr2M+7YiM~ z`bbsjlzt@__0tdAez@YM*6w-(WZhiP_dAikG4zx`XCXqRX{KDL4SWYkSJt+{C8X_Y z)0vTT@$w~XOevOyZQlUokgyIXwadrjIKWL+Msx$_uBOFQ@Sy<1hbt-vj8ws?vn7C; z_G1;X_!UI|v#aeJ?~D<@e@K9|X8E7xOt>d#zDPe){rqYBpkv1FZJEEZs?o!(rg?6C zN(IOYz*B=U|BATqLU&hrHLt0|0b`m3t}EJ9UUa)G`oO!Bq1cDGI%kWOK?90gtH7NT zugAPp4j(>DizR3x3YzfNU3n2;^Z~9|K2DeAhT3yL?!Yzl*QuewmaBxDT>)IKfRjMf z@9fTt8~bZlG1Q*GTVQeGL^h9DhZObEs!w#aB@HnYvY_y_;QZ}p1zSgg18!9Xfx}fi ze!LykDr5%oWvPPBBS5~seuTAf%J~|DH6nSAwd15z8cyr8o8jzFzow}VAHk#5kuC1^ zfj>_p&)yT*LDuGu#&iZq&uBNuzpUh(sg?nx6Y9ha(vyW*W15#7QHLeHN|kg3$c)Ty zml|)r?|Ec5H3pzD{}MB4B3Yl>zT32DZkro3vptewW!;$&@|z#-(Zxc~8C1tyZimxa zCQ^2e?jpQSH8MHABP_u}#K46ImIn(e;11RrGkYyo8GI}nF$RqM?b{-9A6vA)7VO^k ziU>h}H{fb_(2)2`R?WxdVzg-@2cFAUwX;$<5CL-i8nXXndxX*Ske#coS6k6Oi-Bv6 zGYXUqS-Q3-qTM5GH(uwvDJLhFfO=Y7@Npy%9O<(aPp`~sY2vb#H5(_PQ6 z7a56fa>zAK=(3+Z&;h`_7i@d6s@x;}6E|bCD*Vn+8b9Q+zlQ5+d_{dfuBnD`w*BiywcP9##RY z>_r|$Q$4;d?s`-kGI-&imnl`sDR@V{xT%@7V|sT+?-@6ZYZmW530*yfnJESTBHN4# z?jb4wM{?_z;Y%YKnIcN?N0M&xPezV;U?M9m-S3fCR~56YP_MhL2OEJ-LE5W9J|8p45@F|Mo1_>6-ZR8fUhJX;%7JaBe?a!aId42Vfiq|< zOUK_C-$14ym365^^kH5n9_Xd|z+4rX!eZZEFw;TlQ!t)J9-nAjYWV;8!Ni{bJ&>@; zWc_^c_W*KX+Ja#)o~9U&vb@oiJbn+Wp3di$9mEd-Mwq|(K@G-2<3CFJX)CzggV54+Z^_PA|DqL z(5cD)jOCHKKR9OT<~PZV@qk;qx%6YZitIyZNAm!v4|m_lYc4BwHIuWV<+Wi$0%{$j zl(eG1mJfI$phg%>1?qM{+)cyAv9l;S-KB{aUxX1dW1?)3apv{CB+ z;y{4|sX+yx3VdzMLkvv9U7e!N0W4BcxW20k(IiS;_3552LE7;tKfoK^pstiR*f~Fa zeUeM1KN5@%cnf%C{EXL44g&4nd>E-oejFYK!~n$);4|WaD>>!x8OQiE{_T###lh7= z_GZs6=%;z@_#cy*_0T?A0^=Wjcy|oxpB@gaZU1%y)SAx4QoOYgG$mhWuADwl5wAVV zOrO5>BSyLtCa;!qQ_}66{r@w8l?U4=hF(*>oFpTviMz`|H`n|AYTufi3&I ztjw5AkeHE^1{nj6^X&!2bC7@VG$Pd^5m^PLB@fZ>K-CbIr_k@5fG==TrGU;R$MAsp zl>CAxm8yl25sBb;xBVz=EHvq-zOOCJ*vwt%u ztOqHfdb|(A;yd&8XWutDKm9YOrPHQN@IYu|qd+`-p_1lFM< z|EzDS0}JO{i#%JocDCU30J9h%lDX4AAq+4yruHq6ti{WMdXvVJ3h2Hv!I)VsbgTjR z8ttc8WB(f1QpW7LR7RKfv`)w#83Ug)Is49Iq(&3EEv$77j|@Grx4;0H?4qg6vYq{;F5xOxwgF=73J?s9U#1VR% zoPP&N_F$0Giz>4$h^_IQwDt35A=2#TK$qQaW~a1A2W#=Em)X=k1>?|~D^+~uML-tD z^!g)Pb-DrbLJqn=!=yR0o3hLM=Wb@21KZd?Sf8pdI?qy^Pc6~o4*CLU((ozsjxUMK zkWxLZ1?o~?!-E-eLFObAR*uQ4=D5WH?`}}!;8reVW7PeMlI}iD5f(4Q-+z^@5fVem zDs0%EJThhgNClmKno}h=^7E;H*^cf=jdS4Nt2i)gN}B>~az2}D^EW8Tq!4SBql^kW z)PQaHRwN6>hklF1xUsA}Z7vLWo7m(n57jFVV9oyytN6hsEL~@F6`<>rvl|cf2%}I# zUbnvykmgkpUA%Q;KJiQ=@=6vWIt9@~+G?R^vhs@{l~w*VfIj^9nc_lO*|3VA?gq$K zVTjuoaKbM4fzdb-3{MBtkzo-tC0-)>2N6JRc@i+&`RA0y2>+gI6io62Z2ZWWclJyeOVX%jn`jF%4{Uv3>o z*y`5{n(9HKbYy5qbmt|rGUc|=dn0=q87n%Yq~&rtz}nf+-EN=bGk)=)ra-&`;p}CZE=ggv65!xn)kMS>CJ9zoYtkR^0 zdUH&uk4%;0^+h#L*f2Es83%ba*z=ck)*J_An{yJFQPa*y5M-O&vWh6#mJ?b#kz;Zy3Vs4=8XePAQ*259;<>Ot|%vjh-ft!yRY%;G^uM`MMITQ zdFPlHDE7o;)8-G<5vN$8A8OLps|BpE9#Rm>E>LXcyVdemD)*lmk-U@y99JIQtmC#& z2flo+d-g7#X@-_x+XU#yN8OV%_u<9-{Zc?p$TV_@R9KgIE2{V-hw9AyfnY>CV`=Pd zh+TxsUqrchS%eC2?@q$Gy*{9Q3# zn~x}?EU@?`-NX#)bD7GqmUr=YVgRNl*T4kl;*#L{J{U~QEu+@9S({Z}&Pga107x)Jor=z#BNtpI`ldJ;& zxEYPB<^IS)UcA3cC7J>o1J7PsS5j;)x)zjvp^`^4Z@${>Bun&p1s$-F~rTSU>uXUgR(hHoVV@m)!;7JMD z3xIN>fhHl#DHeA*25~w>q50XOds+N7A639sX+Fe{wo;}3_Oi?&Fb&_kHlVehzHalE zJ7l%bfSP`pJo_@&h>Xo})b#TNIWLMBS_VLne{kS{G9?eNwRE8Gq=D4?DBfJ73No^a zhUSNEy?0W>JgHm8c960*^)kVoDLI*Qkg13^F6v%Ac5Yqa)~ZG1ro0L8?i=o?%jtqh zH9+j??#l4SWMu@)iPC7VZcKuDMjgIFoA@G*RP-?`zk4<+iN1Gt+hUF=!afX^SNT(A z8iAJv^MvwMN}Sy3foO7~l$V#RLe*RG{xG`JS65WTcY3YA$!*5rcW-eJo~B8j^MBL6 zNmcL0fN`%#QzD_$%qx8kmQxuXVrd~WDo(fK7j#u_)zh^RL0HkuQS|PV$jRX4vjue3 zHQSjhg6}qL{VjC`Ks$36^k#YH@zWj%zh|qU$~C83wVh~!lW)usjlGlI3tRSZ2%LtLm2)&^1AJ_KxZda_Y1yM9kE`aFh@v4@aW+=E)DU=rpC)yA(OfPp@UT1?TUu zXp9FuTLp5xsvFbX&Wk9~A&xP`5I+v%j1O;EYSJLyK5#JhHiw*P+7zmF6!Ajqk6`QC z%#s5wf1HQU_-y=KemWB}r}Z2W0Z|{^ z|0G>c!MOc|(*kJ$NFzyNLY}RD%Agxsaj5az->-^=0H$X=9`CLYY%BDg!^5VDcff0^ zxn>W*5z+zqDLXWEhaOw=6}c0v2CO9vwBp$8z)jqp4~x#U?5!JJ2VfNgc=4Z@J^|R; z&C}@OOUF*GfBp*#J2vyygI1)1kp*dVJetz$1e@fQvQX>Pa&!!yqiQrXt6XrLinD=Q04c1!_K}KRLJ3#zRm2 zc6qtE)hIS(HhX9u5b#vj0=8t_1uUhf;)$oeWdqE3w-HT1?YdNl1whlSU-KVYE(5JtT5%z`zD+&wM{kWm z=cdUVXnY!yaO`ttF6>Pz+E_y|MM{5p7LVMFTI1E7COXo4SWRYV=EtdLgMTtRsta5y z5bLz6YAithYJ2%9BY1O*(*ifms-o}NZW*#(?+|Knx89g)b5dh(l;~W|!HX3m-qN#&q!e=|@g=aCcdR?OT^!Fv)`?$fsIpLK4zwvsJ#?>I^i^m{ znsMCv&bqkMTGQh7*CsN7bk&k&gl8QqA$GurH)RCxoP4rbAMl#$Cds`%2f<|RgmvR{ zR779UEBVDD!5pj_>XAQQ*!gSxLkbZez$Q!BttO+@0ru5B* z3#ycu@+W&JX`i<3)EeL-XZ37+@ylz|Ufcx*vkNi%Fgcp#Q>@t6X;@4Dc^1uGzB8)U z;o~CZIyJAQX!3`2nECNi%=2#4m5DlQ=8_d2+Qw|qWZ-XZodcls-z2n+23fpWx~HX| z!#%3#o@mo`jJwb;V$}=C7x+*cxFky+9X`FHfpYx)*`E{McC;RQW*shY6j+%@4^Qb9l_E%k zU77^<-ad+KKLpI2<%HS(x(Rl5Uc$uSe$ByKC|T9wkMfX)OYCp}GL?CU@@c@g3*4_> zVi15Fl97Y{lmB*v*`-DQ(l=tK#<+?v>$^sFvuA^k04(r#7azHLGoYQ9X!Qtt&~vU8 zsk;?43+v?k8h5O{2hdsp&G-f|h>0zE{UjuOE?#>DZs_s*cwU?78(;->eMV4tWk~$~ za74nyc-kd~2i|YEBjzXeTcJDPRZ(68iSE&6XJFXX2)Zloyu$j_@1qkjYn+)iKJjEC(kH20i%twJ=!A;-LdwVMPI@i zkbYVq0=YdAIZ!YAIo8j-Yi4do1=Q8kNEfw-NF7Hwr>zs8Xq7V7L7Ymd8kZ*!`Vup# zXid6IV-m}D2UK#Bcu{-1V?{;dXJ~M<+#+Ua$XQt3rfIaWb}RMf-j4X3H%45&UH>s} zDW?rSn6iMZ>C{k9(tti<%ArdHE%|9}alD@xEGQEJ>5->oOR77qLg1OxQ2`tNd-Qs@ z6s77M7;Oi}6q886C9y+Ie&~NHp!1xEv+n5v5;$b=C8*UxVJ6-x``IAem}p9I#n2rF zWp%q)n@r$VG`iAJ^97J69+5yR`dvqBUw$LE~~+@XGF41kl-lQe>VF z!L(uT1!z}HAc&z=u;1OY&S~dLmVs_NE&jR&Utiz)Y`cOG)aoG>uiOVlJNJs`iq6K) zLH%ZgW0rF6hTRC=Mn6J#CCv~jZN1Yh@;(83R4OO?NQJ=7Ch9MjANpx8kj}zDi4(|M z1R{GghCH_M{IsKfojK;PD^B>|xodp)?o|T=u9&1lS~sg{Kn{N-wGtMz5<<)1$M)>e zQTNu7V!hkJ_JDUul!OMn&0nZwOUgoCS^z-f%`ZZ}W}`~!@SES_=rIfBxD?qSBv!ac1p%*#%Qo zLreC-k2rLur089b)CWad>TjLh?L4Lkz*3!t93PEgGXLD0kLc(=?Z*E~TP;A`57nH; z(tew3Hm%buVPcPCbKTyi<5mG-vRO!`IYv9{_J!#=BfV*L?$F?gkihV99v0QVsvUlXsXLIbB4#BfzR4WVcAnItuh;CNX+xyJubxcJ;WH zOn7UH7cw+GE_GMX@#s&1AxV6Ha%duB&h~Pq$;&N0Oy}orq2PCu(ySDSfT5!AnthmX zC;zYpALN+z-jgGeeuNFKFk-+iufpg(K>NkS%BV9i=b#vZ0*NopeIYnUkYWO#v_^~@qt5+^p>E4Vy|}9Q zrq3F0r`gM#Zp_DYhrHT&@k(lPVB%{PP1gH?dMEUYJ|!{v!D2{HCtSbCDnrD^+H|`1 z+_dgdG5aQkdcc%)v%_44IgEk?x7Ek-(w*NOu*(-R)0!OAcqY?Ls{e@&g@z3wt#8i^ zqnjmAxf5tu&Et&);)09P!ptf)nZTsmK&|9MvC}b`XxX^z$M!CLmZ``N09t&GVUxxK z|3d-Zj>J$U%02$bW5~^qC^U@qLSF%QVKB{%{gt@GJHPBJdqp#sQBZ|)(%Pi*HDJQM zYg9c{Nij=Sm=R?_eO%I1K8hZETNuh<)L(PK^mkSOo$TSA^4J2lb!eV`q1-r3aOBE_ z?#=u7%YbQfaYld>&@Y>iUr^nmZL>UP?cEp@qOai@E7)m-sQ-DFh%dX zcHohL)mhw`{I@H0K^l}fmEalXv2Wuh?Sm_s-It&;DWr%Nrm`9T=i4T#SJ26wIK&Ym zc|dTEiM4u8(pnR1J=&$Haq=B@Oc=4Mp|ahcR82SH788sxx4!N);X)OM^^n!wYcB76 z0biI^S;89J?h1KpH--4Fcx<{h5h~ZGfzqMc)a`U!AF4^b zH1>XXvxL;y-6rDvj(%ILf^x}Xl)Az%MN{{?Xx=1(YqfNZbw z=&swFrlcJpy89JBdKvD(z>wETp))AWi#a2AvL*vOg#E-g;x!v3s0m5p1JoD}%r&U| zkB&Yk)u1=qOtFp~Ri?FPLw2)h^*BO4oL@9U9Xd2>(}$8@aEKU%6d`K(&O3SypLUke zBAML!hTJz(cIz$>9@fwd-{`u3!~uD9(R*bBg&2ZRGXNHVUt5>B;H%a2Z1Jd~5LeOo z&i#Q6(HmE02*`7dG2u5#tT#U7#`XkDE19Wk&bG0fKmDUmL#+v~IBDPM@lWChbc?_k z4j+9E=__YVZnnVJ*o&$0eog8ZDt=lyXF#{t-RZQ)-No>;g$T86(ly-1gCr*qNodF0 zXu1nhb8K_1rbQ$=*zT;usV#rVMdS;1WcGsX&G_+0@eu+ZHe;aiJJ_w^qRtp&>N}u6 z9IB0%|Eh*=jLsbD%@pZPM$R1lk1qlD8DfrYdWJ?L_eM<*^PJrpZo`wob#cDMB!KGwj;*d^nQ|WwN>q@0BZY@&&}hj zLOw zWeBr$1}NeH9p~gqOX9+zH4&ReAOK2MK`J$+O{|CV5(ln(?Mpwvt1D}o4WVjvbv~9= z?TDP(biSd@W<`GpUJYFv?hLFI7Pa%JY5xRi00;$_jPa}czj^;Lmu1%8JKg(D^Cx)} z*%{k4Zyn99#bn9LcebOOHLKQSV~?7RoqoI;`r6Sds)i7+!4~S%6ul z^N-r$CHjb*lt5t=MU^bF?3D+m6k%6_Q34ze%1NC#N}T_8vr3D#1znMJ67}WA@u0syws52z&EEYUZAN0y_D`8t*bWqs=mk5gQeX*iq^Ub&LD#7!Y z(!f4#>};;JVBL(F053yzYx44$Im>xlV%ZT)(PbM_lT*N%$DWuU8GDrYP`^qaf>eQP z_3<{4YR&{Pah%+izFVg*{cZv^Y(WtfzjFIioP6*s57I;-f#e}Ozg*RIquZeRq;3I0 z|9J+Jno>c3b{D+y9vd&WQ z{7f)h8DuB7O{kf6TTadg<<~edr{!6Je!_!TNq9XJb8=wT2P@te`Dj4=)BVY90$y@< zE5wKOyXx!tmGNmiA|w9nkSTt|xZz*9q|-MjiUHu8{o7GOOv~wmIg=;QKjNL{E7b&D z@iOW_0HOIvhKZq0*Q{ZwN9@&-rv_GG7#047Idg5?_y71XsHlFKNLQ_GZo zYlN(Kms+=Rp@vSH{mAgdNw2T0{CpyeBc)}cOj_Yb)7+%pa2J?(qGjAl-2{RjAF00F z6(=K3LWi--2sF`+$lI9G1K2$>+PI$SI%-5AdNy?6J~t_vvsNXn$Hg{=4FCxG7t;x_ z=sM-7y{^eh5K@qKjFy%(HDLn2B~SuaTCQKt@eJU^15MQGPPRG-jFif-+!zXo?lY3R z?s-dO>x4k)FuM#Rlf-#>=`&0gWo1t3(xQhvh&TG98#%R_>$6!LbHcCKH0W>_5AhjG z@YRD6*BMpp>e)qbH2KMMLqn{-g_H zqk^qZ#Gk9#sEnAPF6v-U)r)zB*eH+i+cK(vs$_5guer-jin|eY<)MEYOgT}B+*j|J zh4BEYrQMI*Xs%ZWPkbZua5u@9 zURQ{AwwcK=31FIo(_0vS6lCv)@r!M=z8zHi@G-$QLhalkV>_VKJk+NfFK2*z z5o@ay(!mlgqsC^)jtdn)9aCzveyFq=)WX!{w^+e}>m%#U&JIgoj!Jg2$w{ataWtuf z@G*95l{-aAvk{fgOP5J0`{MeO!)=F5`fjANp7HEy#N+%&hL#0(+?&vBU^joqdgnF< zVs&HPaE6}g^z&H@!^2EGQLvR<6fv6KFKN~lv|@i)Y@>!)aDl1yFqZOAUruH>D+tc@ zJuB3C1mL1ayV=~Tmyj;A$IG-PQ8js*m~;dHUQsY(E+$&w#*mzDyeN5y5nt9?kBalx z8UerVl!><*f!`O+HzWdG12UU_df4X(hn0rgnhfPHjHcAs>5*dCp0tGsxJJiLj*rBK zCwthP$e>W<&K;6reb3Y-FXI?fiUEa%PF(MTSL8pPywe39FBHK+42ao|*ZtY1qdB`` zuf7W>yYrx`$l~Kf8R_dbXb$^YCos89UV5F~)xY@EbVO<(G?A}^q}3dCrj&&(9`YKZ zDg$%zuaABfE7KHp=_F8{)q1yJamW^?Fdy4c^rBCHYSVCL0J*%OJKZWhAkRJR!y{=f z{j2#ri@oJDyNy%QV|C$HU!^685W?v0LQ=ywPM&l=Ap@tJfM`h0dFj)b`YyRriHSP; zf{=nJX%;q|x`3#o%4THvSWk@F#5kU14~X43W^9pT%xWldbZ99n4o4P8lT8&>4msKD zy{UgjD!<5)o)#nIy(^Bj|2eRNb2mC7`dEc{Xe7h|+qSNmz1$lggRBMRY!g1yw9?1CE2Vy(@5Lni9Mbdf0Z`A!<9?(a` zs-1|ZFD3PE_-%M;K(Bd2_qJ=7b6zMcoS_F&D;QC%hf?;?QOht-JMY^~RaB--W`BBb@GmL?rp#%Om$>f6e zuwkyM84AkolpYHjczi56SpPWsk%df=rX9=EEL(>=;fRx3(oAehJuz9#l+s1hrCrtMkFW_SX4_FMv+rccir3LIU!_mOmnFl4wzFwQx@d!>AhoD zTB45Z5lVz+r@je)3iVdOIuX}6kXuYha_&7xi;M49wBmAt=XBM-Ej9sx+~TudR$i&~ zX(~p4dy~3kKN`4nW?htv#z-S-{O$yfiK%`9CI3^?qXfU7B78j0x6;lyK=o|E*(wOTa!6={O&0e?jIxJHvx6GOrvcet@#9JD2k_iC zH}DhB9Er_W#d?P7;y*QvRz~Rs)N=SGR+9Hd0UW}Br+nYepMcKjMU(KJlKWF~gI zHxtEC$3guekzG?4a~N~G$md;;AZv{ajlZWhYg!tw}X#b*QtT#-|eVm6@>{1XPeKqcSoECMZE(sDA1~Q+3!t3?z zyX(u=B~uOL%>14m>yr<=Dy61UJEgAMuR*p}vc0mu#cO$jCZ-a(HBqL366!F-?+~dd z#j!IzMS1WkpaDR&6D^GHU%&Gb%J>3acefAsGUeL+t>0ZyT3wh76p8*h_#TYe^Q}IO^j>@^#tn8)i@eI;)Soq$3t#aDhjyuoDf{f=vj)re~A!ftRF9MaA9?+ZGY+htim(9iyb#8Ebw5w;%J%p1T?x z@1-ql@wH5{MH;JRQ=#$|e~&F!DFUnkLfwq5Q@lFQxWlIyU8b)Ebp2aaxrhncjq|d9 zoFnx7l5t?V?75w`TarL0U%R$InSz)sE#jt3Pa zd76sMxgFLd`d#CR&x8@=oTAF6MX~Nj?CYktpaM>Zawg*i}yqEGed#J{**G}s6`5nR1j5KW1nt#9W_ssBilqbl$J?x*u=1R0YT$vSVD-{V*HDoa01V3Dp;x#!vZ`6;~%V;|)L z)us{`6Vs#IpW9gS)hqM*{+61H*HZv)tBCL2*E2Y)xFM-x8+-e4PA{#C)f{#Ss(r)M z8Zzf$LhF&OE@>D})UV@=yf_<^$qKuYN2BKDcm{5JqeDw}uYTf!%H7gjlmP$M z({#Dr((_R#%4F=B28QXK&O(#>3UD9g7Bzx{xXKJenvZQ>>u2a|m3V$N9Fn7m)s%L6 z%KxNJAgJC?eZ>2gq&;vEJW-{RZEscPYQ2xkNZ&v``v+nrY=jAu`>JSXb4tPQ{7h|+ zPiQ@V$0>zU3?8!RWYiqc)K($lXv3o<8dgV+Im0c=WP~KlJp+qJkr9XMP8tn{5$1k; zD1OW>COX+0wcjTFTCsov!M%<@ru3rxvlDz#ysFfzuZP6l^LhOHC82&4VR=gomBQ+I5W zz!JIpbo%wHDL38`23`RNo4y-EZM$XT5G@aZ!})|Q&E{s|O|v>}bC_8lvM@}G4Wt7lv_cD8z6<8!QAIX_{+Mfmet z27IMu`qfC4n=r*foT@Ysap%|M`1{d)#xx=t&Z=IXs&(f6P9q;9t>GF$uCCVuvMCUV zu%p+Zr37I>r85KexN*ubOY!2e`lUQ7-_3l2qfxWZ58syKn~@GG z3Slh5>d|o({-^1wA9Hkwppgn0k3^+S$YZ+QF~u_kHmv_w3ZG*#fiQrdk0}igi0sqI zN+#LZU+dzJ6-H7yy)mCUIcz1n+NzF=?i%IvNnJA+aH+zKxs+&Zve&xJO$$YQ;g* z4OdJb{~{c&1ggO_F2*Z+^Z=Apxer5{gR#3*h`^_DO1@z}7 zBxWEva5T~1Rt&zUs{(k>FI7R=)4LkT9l>~GV}S7U$YlHeKqZGaHc=R9(XQb}It_|L z&Zz;c7H`-vW^tCf+kbg)#=o5T`pqf8F#1R$6qOk0sSzV9yVZY;CMO3}<=p=RioH-M z`elT8)?d^(i*9Uu%s!~^BLm4EAR6=>!R6Q!ImRcosneDSpVkSYI)w0n0|$Z@rbL5; zrgJe1dBpre)lmOM4(;Sv$AO}TTQ+|*K+m38ToMa-a%bl8h`t+*;2@lff4bHkaQOyT zE__Fpy+8*~>bF3U*52g_S4n0q%xP=jr?(TH?#<}C+s1~zX}@B#m7w@@Y)sd~!^3X4 zC62cvPV>Cr*@E}`A8!EyDD52}vIbPT$biJ1&?qCDNNYzOnIdty+xwImpxnCBeYRRQ ze5}i#8hDgZ<;V8q%HtWS`bPcNGOL5yRePx%_X@9(yc5pD9q5g#gPsspxjd~~f&t5r ziC^1tq+a@gkwvy=Un`i>NNXrL>V%&LNWSkT*TlECvR4d^s?!}n)oLd9_p1;O8}}7g zVhN^lPU|h3IR!v_Nty|}#%Ry6^x(;~UuiJ8k7zw6Su0%oejov6>BPU-SK>tNwMtrrftLj%{VaAdu-?F&djYd4K1dBUCG#>(SKdZ^5fT-fqoD! z4t`zF}~UX5&M29G<&;wF)Xn9!&Kwm&-Y)yOplhxya{~J z7gjE!`+>9u&zP~tgWn&Ub?Hej3Ga}#b^%Sl;}KlHxcTr_&0;5vegDG^^^HP%E+Qe5 z_gAM@guNv1d@3%^HLLb1gsi1+Dt<8Z7Eul)ri{Ws1Lq&2j7a5nPKR-s)`{(77G9Pj)&AR8^YbSa8R<_g< zi+4|Rj*%O?>SVxO5Sxqety5zbfrhKthIb<54Q$fV#zNomwH(eeyS=!!y2YS6%$nqd zmFI_de=MIsweUrnV6S?OUgFchViBAEz2k$s-~6fc35khj<06Al4e8Lim1{S|W16M} zmQ`)N({pYOK$&6KmYJ=9Ik9Y{bK51AAiq_rKW@kt;Kj~_wmvdU$aB@-{>MEis6nWk zh3YHzw#4-FbG1)PuaqkS?%ND}OuVZ&CIR9Zbt)CB$+JRsDl{ea{P3!^bpUc`ig(RN z>X37fh!qnshASFz)a~6&qnJTOj!f2iS?lo5HKO8|ik5a$#!=Ik9mA6F9C5772$5Ua zAHU4C#r@OF{C&j5oU=ItV7tXpJ(J+6$y)a!Y?s&ag(`j0*8wv*CX|PKRA@!Zz^r;{ zTWFk_<(8X$Ht8hcO?lB$?Xjk*sVY=A=|WYu*C>k6i~06@bLa+)sQ(0Jv0}-Kf(WR- zzKU^O`Tpx0M~mujU%q7%9JX> z{XEFtZWA$3K>H4NYu`-TchbJQx;m(z@-*7pQFNyT`SI zWWoB~f3yv47{e8sBxc4LjwqEJoI-mc%XEGHfgqi3MQIpbZoEY{5*g>de(-}jyp-e#U5@c!$77}UqGgq#O@quPeDmpRq2jh#(uzKfD1J$Vwzt_O|NNm zLq0yK4tnA*;+Q0Yam=Cuo|1@o=*lw~!phe@Vjo1;KI?7Sx}lpKdMq+tI#n*$KJ+Nd z{^k`}*a6K_v8r$<+JAgAS4e{iF>^^}8;X0IXsSm>9~p4WE|VQzZG-UHE*aDjP`VcK z5zoNyN=l*1_fpQV_0Tw$!2R+HZgzIb`wt&K%JcQ`s5=;vrZY~2?z{-}*{Gclz(hkp z0wPF$XpZ-Mpt?79d@|nq1Jb|sj++Y$d}pVUl8Wa>)X(&^w328uztF~Lh9nkZl8iBS z=`%O%v`qzZYr*g0fR_C&e_DFl8+<>eB3ZjyDp2bg-f*q6@7x)x)WLvQJV7_Q%*g+d zkg>7Ab7z{e%SioX;3!(ct~3w(2e+T&zS{rC2&5ze%yE?zXF6#houqP#{OpH2gTd`q z`coQD_teGH90iXL*&UM#kZ6`GmNJh%oXH3^HghV5PZaIR`xoVpG+tAF22SDP%FCQ@ z6WfwtaVbz(hFG_3#3)bK%rf@}*|)A02Ly@j4}tp9Ku6S2kRV zH8M7_F%ii|+twIgVpm3Gvl=fSsTn8iX65{9V9SO^Ou4C*$l&BmW1Ab>4vkf2Fr|aP zX+CY)iKXiHHya0J6U(O1jUE9&(4vz}hfj~Q zZk$QnL6|<>V?auOB~{NLktQ%bT9*vOI)G^;*@Iv=P$J?)U?t(T*rJ|OdAFwSWiLfO%czeb&nKue| z3WFNhzm?+FvB3A|88`23h((VjeS2J?v?wmp$9mN)(d807Vnch^NmM~vVbO~F!y+0P zY&#jU&>|l}C^oaAk+T2v$rGRceGn*SIXl8QDu4Bpc>kVydy>Yx7lKTJ|3nQKY;e3C z`l7gM-psjx9W~63F_M4?6=b!lGTA#-r%dH!sATR&u z;f}m~RELT;UP7D@daWy{4j6hZQsqpHMuRHM9s8?&QKXxihgFLjOqUN^<|}T#9voZF zI(=V5wD=2SnDtRDvouj)eJc-E4arL!9n7KKSBPFX#LB#!=sFfuAYIkj=rN~>jooEG zW)5?sx9d{;Mhm}YE43!XNRZ&GJ>OtPh=~xC8fLno+Ul;%S^A5+z2_78>oIVB!f5C| zjPTFGH~BqEt|%L)Q10q@h3e-1;oW=?GFwxjbAkm-8*uH?C4i0mw(5rvH?x}VmzzxZ6pfQtd0K8D`(;Hr&hbQ4rI#MTdCscgt&S#%?!z36_33A(Yo zZsb0W&?q^{6i%@BM~h(>+zw-`b^-G(&7kIW(H@<5@k%R->l>FD>E=m{1F!e~ef^%g zNh_-bL6ek8G_i6;IKt%;o^E*3YZa);ogTJxje-HKEOT{AUQ(SlS4A$|$Tz%j#X;lM zyZl%5Ct33qBsN8nRvw8*>tYKhs-LbpFcOcQlEmNdF)EZBjAe<5cOo9^txuwV5%xTa zIaN-1wr~4@A+~;0ELG~=r|yNqY^GC@Cuz;59(g!H9c@p5Tf0#qYW2f7BKLJ=@V5Sj zbeA_n2%N6#6PO*w;k^%g98-H#tH+2W=?{JQ`p!_ei9?xy%agv>Mb5JH`#k>u9r%rM z;wwWvO+FB@?9r967N}Io1V0EAq0PFl+>zx&9Ss<<2RqO+6|SjX!5$`<_9Bl>%q?D{2Wmg<*(4u716xO6km$r=dU3W1o1DQsPl&4f&btpDrPeD$tRCU~Qb?V>(-}U*t7@&WD`XUqY4(U8-2DYRHB~0h`Sdw*Bylb zz>)1_bgDM4jSqRE5jP)zaFRM(l7@*I9Kl}gwH%|FggxE$Lku%DuveP!S25W8$_G{) zh6Mhw<~p5!?hfYqSuz$!7|}SDeKJnm=!flyaD*FC6vvrBT=EThm_Ny)mU3CX8%NbU z!eiHZo|IIV&6vC@nrwMGLv3S_=@VT6y!#|SOInA1D$fETZUWPxi~!Y*O);iN)YO*G z1qF1nav}Eg$^J};L^szT=ctX*ObRr83Z&m9f=9XahsGZoRIX@PF4gC^V#I|DVFD)s zc)7}hNx!usSUaLwZ4YBEemIoh2Qtk+sU!kX zl@^gseCFri(`bLHejupjn_yqtIZ_ZNfWCh^YgitW$jb$ruuGZsOWB}{Y+x(xK`jgr zH=eZrs?#V?g$(`U&9|EK?YYE_l3-GhUul+&&WN*eL#}PsQ?EX4`0y8ZL^=yMx@6(W zTF_OpaX=iCoWkw{D72k_l~W!$b>Lozq9MN3ob57?0>u(?cC`q&^;<1dNqft7Smmeu z3Kby|?qZ*(wmDJk_Z&al<9^j6uqw%omthh$6RkpTH$^Jmip7+Bh;?mq*XtDm@~nOG z#D8LKuTQ!N51%S#olpM^!AXj$c1C?0U9eJ+eGiXm)!rtHw_8Tw=(6Wi+D9#%+)&#D zc!MjK&#rg$V=7i1y-5vTlg6L*|ArpXc|GZlf?t`X7^NzyB+Uzzbx*bLEnllE`4 z-gqW~IY-M?C5w%&)USzs6>0BEzi31VGYt7w}qKZ+O63G-gWavd2G; zrPM;2UKV0Q!6*+!0E|nzDv=iv^B&lCcWoivee705F~?(zBFtovz*klOM)xNx^Tag1N4VeY+3 zK=4pxH{e6ee|lngwdBQB1OzG?EQf!GCiwaA|K|56GbtiYse_qP2;*D$r#{wCMfFwq8e14jIJ{73LsPvFJ3 z|J`@;jVO(3wlOIoKV6%l2mVAL%v?`Nr!v zLb*1=erWp8K!PVT`xq>(l)svd{E&Fs`Lmwfz&q?}Ruwm{5D4DHd7ba%XGV@bA^9GT zK97@Jp+Px-Xf4xJgPwdZ6eX>FrO{R4pp7xkmgAK)T1Li+29MSBlJl`1a(-1!`wGzC zB-KB>Xud_uxWW2qVRyY4r%;D%tx?)>a=Edpr+$s*g=>xW?ln( zAiDh`_VrL^889GzR)h6<=cN4X1?-a2<~gz1#vQ6gQ|^jiIUlkhydLG=e?CwZ`t0g79*`XlACNmOsjoglyxXbT-36jMrqk0k!8oX8 zH;GZ2&e1TyUU?1K*>Um~GCMUMdCzLIut8A3^<-m@P zPY-nO1}}EgruLvFYdD9y!d1i}CSMAk&+zA*(EW=?#B4K)*iSMAyDm?+TQTLWr_1~Z z82k*PmLb0E$Vl#y2FJ`FrmHTF%+8h~DxjuAlKT-goL`w0qSlnnPGSK#8;r}^T*TAm zv-HY9My{{N>e{=Te6Awvz~Ac-W@)=q8(u(YSq;Vqi3|su_;@he^+)t0(ltxM_S~ncS){j@S^wiX%BpIV?i15q0eFZNd z-aM+Y>LJ6POhPBf)g`l)`CG=OCE1{7s0#ea2|O7^E0uI1{Nb2PQ=dOqb#!&b({jW? z8}Vn0GH0?`08Qd16!4?1uZA5bapQWqABl=iPTw7#ot=FT_mF;o=U~BD50;>u3jO{V zr1u;sX{!;zX9C8e8qZD;6BgKE1%y4GiQ zRy`khIQRPmPr`RN5O<$tVwpg_gOkVBsILm1+NJ@!a)D?2S$bYxULBUl^M|bk{DJGr zCxhOt}+mB}#y$h&7*Y0HicvTyF~h#F8BC91K^-05wkMNd?Bs9JSRvwb0&`rucI`kEyf5mvl49fegIIL!Eg|D)mX@9GHyeT3 z*Sf%^jVv(0Z!ZG_11np;ewD{FhjZgPo75?gh}F|G46J@toD27X%JjOjIrrzK^5WtV zRnfUTDpQ}Qq%xmTWyUjnBm2#YX- zLDt!3uA^78z^1@iYX|jLOF5HX?uVo~H!9J}9@9DC?)u!`L73qTl@obl=mVoxfbWfX zJ-!YczB-naw85$|%n@v|$%jpff2TBUlh zVdiIZGP;X85_JM2Ki&eAFUE3{`Fy_f=?`gMv`L()Q@LR>niJwu?7d-O; zoYd|@&a%OT+hszX?hnCU%tq)*#$n_)JOECWKHH5}PV5O7aDO^6A~8GzP>(<*Q^wN^ zp~pK^jEk%X7J-89>H@NDAXQkov$Hc*)Wy_w8qag%I(l~P@2)~XGN>YTJT(FAQOP@W zm*ueF9ei`d%js8ClgzTClHyqc`jAxeAo58AbszUj`%ZcRXOnJ`2WO&(uz4FMUR3$H%LN~U|zVbHHRrlP^>fQq1tUqZqt3wOFY4u@An zMUim63|c8~+M@N=oHm&6PPawlH60+%a&$^`KOlb?a&gSZO2+7uZ&dZ}c95wnF*Y1b z2V_kr*<|{%Ip&ypXoG9d&uSbHSG}V&qQDfm?QRyDP}3w}s*a>6|GO>1T;@F_p`?5c2ie63G#W_85E)~rFcQ4_Q* zDG2v2bEZD>Fd? zH3A$2h~{I3x|W3y!v*1??n_d((&hrnjML?G{-qDhJQg?k-$X3Yhi0ugJkZwFy~aM= z0@w+47M(Xtyr!+&Vh$hhW+x=(m(aMc0#5#K(15oD;_Db`t~nQ*Slo39=2?M zK>2otQJier$k6eKc>>@Bk3ibV6jz>iiOzXyjh*j!G8Z1_srH-IbE5xC0)g8zHLzz_ zgYB*PU$Vd^{H!;l-v6`*!9cSY;nhiDG(Kx|ji@3cRJ?)avgjdVhFEiPJ2GY#uiJ_( z>7F6+8cfG?WBy6gTupJy7Cf|NV=XWy3hw0pdSOa)#rk))3w6)@Hry5i zK6H9Ml{D&--p%39)TOPtN-X8M%D0RgD|*5j+iN*Y8#?K7_YA!d+yeJr3fdvf#MzSU4_*;~i!eAN&7WtXvu z7Hz7FU2g z7z0MC5PN5&^Vu9cv0GDmwf!A&b__=Ew9d=%USpdfR{NbwKa9bY+B@xvvQ5?vQa!|N zF-`TNjs3v~gjBB{sHW2;`-nWAMzSP>52I0IALpOkDxa*Q^Q-Gg`z@QV{{Thxq$@?h z@*^eH3L=qzj%BDfjofc;FF&pJ+e0%z$GbXH;+Oj-qWyiNp3K)!Mg&(qzQK#ny&wN)-_^5AOLZWa7GuwzjqAV~QDZaN=NkrYVgvt5Z;PZbEH&AoStz>c4rTUqF<`e;AR zXol+fHTSOmAG$z2G2UK2M^~1Yj5m!P$%n+_oDO~|Wt0c$FiB`GIHqcj2%uMQ{%cwG zw-5ilUNyE3>HnT@2Ke;a?t;+yTH=RYS)N;U(dp+CdUH#`oe4O_VSA(*byzC=93O(t zsgj&?$}aDOJKuN}hI2pMR}P%{y#}Tue*9(T8~5-IjE8a`diO5K_@?ON()nYSW!$=s zw(kj_d#lZh&ppHs|xM|z8WzVN_V=qgZ>IB?-t_j-W z|NdAVHBrEG?()OuOaiWmulI00zib0>M6nkmj$e%UuK)d6?NzFuGS4qqGua;RhaymU z`uZ7o=DUG-_l0IncK`cxdkp57>IHLl59r(>t99Y|;ryBY+e z-#gCQpSa^j-!29Hw!tr@ZS2XvrcJFg(M;3Zo580fF8VXC@qD2Pf2VaSYZJM2)=bki z6miafZ#X|(5NnBvUT0z!xQ&?Q@QU!!f1ZVmr>;T6Xo7GzN8ch-&foR3{%if_Usle5 z80h&dw4b}<0w(cEUL|z}L(>mwfCxntS(!bptH|?le74_=$m#+R>WA+-l<{f+35Ofr zBIcKACap^ycsM6tu_+F>xX*V`B|kZ9A2wIYES;sfaJ`y`-EYXUz^az$yfj|$Q4M+k z_%c*@&tB#7M6*~AWbpk5{egw56UwEHjR2{uzY*uDs`0m+iaGf-RKq>Q*mU|RDek&x zlz23R14*kO8Qr3*gV$8P92;Wvn;H>`SCbAz%U0;_lD~F@|GsoP11H5C!$jX8<9+Qf z7Xj12b6aOJt55YxD)XVuLp;?1HlPMllJ1pyM%R$kiIFv1hYxRweE()0ej}`;PH$wM z%TfY;=d`OZ$b;!mt}PN-j&VH?)ri0rXtU{RplmhJJ1=O&>d76TUMsd1i^&5{)B-i# z4^Go=$5@`z?L148BI&*SP8w*ys6UyeZ*C(B@QB%yj)g^&k&8NGt~043XCXCk`6eOC z-AJG4YNu}DrP)N^O>}a{e%Ys=t3T(x7jbo`ZKK@Vc}*lkrzX@;wD0 zLa8osXKUkLNHIRsF1D90(rG*?B{HMf=#SQihG_X{amctnnJ2J!<%yTK5nHyCTi=mf zHzlrxx4tejtTGAb*`A)#DQi?)4fAq4oHQ!CTmxl5+E4S0msPS+z_j5y3|t^j^SIgp z^2CVnd`qL?+R<4Bjo!2^{XeV!`sl3c$D;9ne;2o7j5XhEz zPUM8cQy_~m;P#&~-GmKU1U@UVAxibY!^$Rp&2HNGE?1Uy3)Et>32Il@i!QsBFn6P4 zAc&z$db`}MIwSblFvtj>|K%>}WBY49N41z!sQyk9e8NHcfacZkB#%q>kXI{O_v%(l zbDECE80YpV8j*wgqF%i7{*d?IDVQRVKl<2;i3nW49nUdX-_NS=&8=lO|ItrBly z>e|hTF<6v=;a5N9rWnt7KRbIIGJ)hb&Rav;~M`aCYh4XG3k`# zMBG(TI~c^v11(eB*r?mCv5lh*RvJ4t^sngqaU*;;6L?)L##sw1Qsh;zoWAlmp-(D? z9@VT67Pm;x^UXRxm-LGGw+gM!`nl4pp$8JK^US>^+AYF%CMy~Z;;0bDtCr?aJ|Imr zYImQLbChfKGR3n!zT!z-?0#{z@ogr1W>u4-w+C z6J#s`t-_z5bB5~xDG}baCD!|H{!+Nd_$>$X&6CS~aochRF-{j&=jT=i=ou-)=RN z(CUFx=}nIc8tJ!Pv?br3heiK*Cmel^tQ@T=p>duQS`~yF#jy5qEGq_X zfc>E2VTo&8eS$XAxt1owYCcO&+}{EfIOAD9L5A{rf%L1;^k9w$3?5Tr*P~MF!I)SP1sq*NGU5Cu$;?m7u6}|=yZsVg%zuly?+{VeuCDC&(tx_ov>Nw0{TRhs<+wI_BHmE{ms z$7w?Z2mDXGBDiH763P?ll%~|NLneh)6;|TzBUd%9GOT^xDPEpQt#tCRd{>}e>WfsJ zGx_&k+om64J+|LQmKl*!Q>DR}yUB`^=ORMRO~zJen2D4C3{LO*tIIZ=4{Tj@3MnDpzMCVD-45$>p3*fv>#i9@3~xpKS}Ms05p&g#QB`Py z`UBbJB&%+)t&?+SFi#$7E3d3=@*z6<<&|x)&{0z{UoJxbJBd{XD`3uvNvD)jsF@gme`@E3K-LNL_#+JX(k&s*3*aPNV1^ z^1=)k-s5+nlW_l8U4N+@%B=j_I}aJ(`tqm483`WR0k9xJc#@-&S!t9ni^iDboCRos zbQz0UN4NM%Tdjc`Ni)aEt~VznX}c9qFCl$@6)X&l3vA?GakI61X;Pp-cl?p$v*1~r zO@c7LcA`RT^=tLqm2<1bDjAlT00+_}JcG&%s4bmOlU0!)ZK0zD(sde!tes z`YL@!Tgy1-UGZYE)a}$OSK5F+cY*Q-4N*&HvowG0>svKGZcAiOSPlgQF6x^a%lOGK zcKGaVF&V{J>*8%&J@4+^09iuC`?w;HSS#*6W~B=Y>H?#DR&s9Ml^@93mcEyCVX9k1 z4GyIu1l_^Fn>g^R{%qHZuB*Vuzpf{)0WqjyV$snpUxbROmfP~ut_ zoy*uvokN}5qI{W2lP(bAV?Gap7IpBnkn1M0dpDj2$1meW_nV@37Y`(J!!Jm@`~3x$ zu)e?&&vIA~g)LMd=Scr&zy%t(U2~u|L9bwtsT^Si#UX_t%lH|->3@C!PKcsrp;JIY z2$kX%;mn140N}`R9^Oz^)_&P+=znq!7%0XdVw2zRP8g;ctTxv{o?rETR05wiAOAP7 z2*p>VeS3bjleHKqV^jNQ@yulW(Qiirf@AdvF2o|>2n{CR%(VO~8pss4viR|L{O1}r zbH^F&8jB`MVMs|+t_jL|(2F41_$+(a`~keTw0-C=Mgy2Ci{CG*Q1;mHuFA&H zxJi9~E2AdMzJdi%;1ng*EBzEm9^ME7s?pMk<8oAmcOyEs5jxRfo~Q>lveg~F^aQtm z%lL{o3Yy?^f&hwehFs8gK}O)k1%w#90+`BAD=CA1Thn3b3nN|Z>Z5-XM;Vz7A<;}nV+)%qIuhMtB-V?P>P3?!USn*yn(Rb_`jhF?1bUup&9 z??KQii+~=mOKM~}tcf(koXU<}P`2pJQcxJa&IeCRp!6u9nD>zp5To1#?0MNQWhQfk zB1HwgDKAB0zt1nPh|mJL;wCz8K`#(9<8x~g;Xro0I7d_@y zr$npe#?-_;WbK&wwnJk5FPrIl1^<%Em_a}nF-VV7Ds`qsf-;ST%)&6qWya+x^3j15 zE_KD)Oko-^>IfdC?K8HlauU;M$LHMutRekVK1DE5D8zj?i7lC8__ zdDAvUN%Yf151J`?1GmuLcay$Hz&7pzdvu}-^T1ca0lAG{BFeF^2h-0JK~pgkgl3{i z^LHI_or_%|!PAKu8D)X$*fi^#DcyJz$M;W9pi=M2k;-uhH6CUIk%y=Me2zAt^KCT8yvvVr+?f8E#+slN-L%e0l6f{C63?^=ch$UkTR${KYy^*S$g zTQx-;0t_M&8#G~v9Xy0M60wNz!T>;COrcOaoY>|`FRLOj;mxR^Zxn%TVyHLbaZ2xA z3X=XVHq_!F;l@}adtg~7*;WM!F`*+FL<@BGm_vgARWN%`N73Kf%nM`=YhUuL99Rx= zOFvZXeg)Gtu8ORbDX2v7jbP~eFsXH1I}f=w&>7ot4@_+!t=*#vs||HhPuzGsLOCDm z{#(rg30n~8X;>x(x!<^7Qm-5vblhP&XN;EPJ@s4*wgW2lqWupM9yruBnEUAOUoWRH zvxzi)>h>qKJ4*ItX9opBjlbWe40gs1f;;b6V-M98$CDnXd6w5I7)dGEL@2BvX zSb(J;{EuEr6|YVH^uTPs^pvTyh2u1@<_E;)9zf-Rd{r!oo11FuhuVomTZL?8vPrbG z&9jl)i4ixI7aCZO8I8#@o9R5<;W0eU(_|dPcguZhl$nAfI#x{kVr>!i6N%K!wcPYQ zC?E8;*Q`52z1GrQhDnnn#eu=vl##cAX-GigdGqVbn>~APD$i1g<_CX??>&yi{BgU{ zv@)v?2jHm=U0zp)W`eJjMEQ>?qT?L|zveARSQt0NSqVfnh>#g^6CaA+54_XB5YmTQ zkR;qa7+yAR%uuMeZu9Bb4}bXpHz=?6lhkhe>n#| z5C#;!9MqUeXU95JX!O{W6O~Iyp1yT==Yup$7)v5l9^smDX-yCO2j+eA`Hoh)GX_q6 z^G9(Y7SLrYc`)_0?f93%JrXC}8B@71Dg;PsUFjd=(y&xZ zOiMtV@dlSR2qnKf_rHF<9*Zf8uYVbSIaEzlkAkwu5}sWd%7X#wOQLdE8+)q)X2lN? zUvd-u!P{<_%Sr&|X3`qEqcGq>6`oi)WsQlsYG2`kmSYk|+P6+fQ!2jj50sDGe}Bj{ z;`fVP$46Uy#nL`*k~=}Pi#8ki@S|mv=RikUXF*gc?LYi!gZV{OcIqT30!%jEG1Av{{m>i8Z0iIP{NDyoFtN2<#=h?Q4AnIFVB zEMOBX3si!LLv%{ixa%Z$DlOZ;2Ur@|v{K6X5ya{hMN$C%MhD!F=n79P8-)bh8)6A{ zw@#7M=FVNdo?k$>Qz96kq*AvM=n%D{_7O8 zOSVCxRmuiX^SebHpUg>;;HBiaYZ>qsk`oK`W! zJu-9fc8=xgUNK=bO)S8#mk{E_qP8gYnmfti>x<$~hkiXJ+PO%-gQD-g#LqQ#%Vr*A zO;98iww=7`C<5w#drX7aEBlW=KDPqww;7W=Qz5GgfydH4pn~2%D#4oW3xMJ8-(5=@Y;<)kPe^S{eT>6{V0h@w4ijqsABd0 zWY7CcdCw)|NLlx`9rK7lFtZ>ze8u=Ox1Q~vg8g@K%Y#|CqTjY>K#5HPOW?BCBGH$9 z8!gE=@y?pT=a^BPpHTr6R**i4Q!xTPL@8Dt4ANu@%2%eUr*t?0QHiAq>56xp z1<0L8;Q#F+z0<`oQ+guU;wF#x*IS8Hqw-*{drMA+uaMK7q$cSVB2`#ctfRF`2tP|GM&RZ z&Nkdz4SRM6)6ymBi}B34E+S@ElR*bH?w0Y(DH6e{7s0Em7s&4Nsce_+X|lENib@iW zB>j>5 z;51OgK|t{H-UV2EWA2V47s^l-S^0V95x;VdIV0pRkm3$$rp|<16}V1F0i8y^b6(yc zsI>_t0IvFsJ`K+8?Ts{z1wgxdWDMqYWDtJbC$G;@?!&7c8MC4aTS7&A_+PCa{cDCh zj+L1~kNyUf?Eg~XC`3Lu6k~fHRr_}oZHF|9%fXbs2kW>vDTDhPL%avnG63K$`Eo=m zrmAmGQFDUbM({7S>HkITKMmjHx0E#}(%1L0ST3yCln6X2o_yTL?MAS0a*Gb);q+gO zeG&M)_v1Ju;(4GX`36_=tjz_JSqYxcn`TgDtlP)~OnafOe)^oxUoaia@p(4df9cHg zBJ%d+BXdJEr#;a8gk!k)>m?i6lQqDeT^OGkr8;WBgR=U<@v0f$b{c&2gbN=Hvux>E zkjzvPhB(yY5q2Grm>a5O4Yl0zcXU%SDr)T#SWXT-`4)!(#1-lJz7eE^E(OurmGO#! z2`)Xr%z@|torA44%E3sS&9xR+e&03 z#5ct1!-myuPUoVggQ9*^KwYCe3}KXdEde&C56WFI#@A&SSG{IW?&?H1{M=M79isHd z*dy27N^eJVbT2TD?zqf7mjJJb^n-FbxdOmlt>0ymu3%gnx^ja|A7f}IMANr{sPAHV zJ^%kgF+yJAV#GmVj!JA|QBIB)sL6uG`G4_`j~-^h)1= z_39K!*Q$L`N0cgkYklo|Gq4@KB_EB}6^r|D0T1#1EZu|`8!mh68^7JILg=Qv?J}wu zdUEFy4?Fz(MSx~KI>u$01KF02)9Efo`Ic!4^UI0UAZNV<;i`JXCy`VvV&|e6NGj-G z%|Ocj)n9}VjR9g?5LEC&nAu%EEW){8Z!YY~2>*FueJ`dqUxqL+wzMyTP@(g~v3!J{ z<7AF`L|`%Rt=92!M~9tMF49t$L{fl?*e(2Z`%N1P(go9BAQ#C0SfNs}+_;s``Rn8MO4P>@#8_de&PbC(k_EW*G+6E1w_#pn_74qE+xk=iCee~b|Vx zoWv571~eR~WDQL$YIROwDS{R~_2;H_tV>s(bkVHZr@I7n$gt9}GYRgsE;`QAEAn#? zECEaujp|-c;PC^X1SI5MTa-C=iI2*{6(^ zUiW=;n6SKu(K5o`CTB?Z#&-@g3ma9+kf|5W+Q;bwuNI_MK3_t4iY=@>Xpa6hew?l!jlLmhpj#tesajOCx##;bi@*cp-yppk{SZ_&{SsI&whB~72Y zlJMNZS(QXN7U(~a%|q({C~E&U86uNNh9tbz6!r7;>&TIuG$r%2i{$mVveT6Kwqp-; zrvVA?5i&Hu#L4%eD=^SoYkIp}#S1XDJQ1&JzmO_@crJ-Xb2S8Wi@ z+Y=))G1Q?yssp*_`jnr?L|CdR-rIHN?HjJ1cH%giir@_*s+PQt58-*j1`I!Y*Kl2C zPBbDtaqL+d5m#1K<(TjWJnoWqVOS7MmVoy*_OwMf;R| zg>o)>OBymGsf{+#8?@vq<@FbB7}%N@;*JF9m}6yg-N`5MA@j$*$t*+Ei&S?Tz-5Gi ziNhUz+8Xy!2axsO(pLvFVyrgTowU^T3R`^?f49F}r0(_OF(fm+A)oEg9hm38CSIR7 zuB7e56NMlh)R>6l)a0?a4jqh)nZ6@8qdu0LRtmpGi}O{Sms;E#$jfBbXe}b=E4S>y zhmpH~@YnEa8h5|KSH{o!KJPXRm~lvibM6;5?|+Iy(Q7EYkCha$Ho8J;(tigRBXg60 z8|R;!N=Y5bQ~~%RF08UA*O(i!UPSdUIL+G27}Yt3J|_AaWA3i@VCHLTWvE3NxgCk^ z&_`tA9@8IIKSPH_4(=MD2?DS1*L?GW*U$zlx5nrFNQ*lCA;8lTWaKQTyeBu2aZ ze~i6#T+`p%KW@EMEEH6bR4J7P>6l4McPmJDON@!2h;%no=^E0~iiFgV8Vw2p!sr@o zz<%d7@O|Ik`*DB1zwaL^ZfrZR^Ex}{x~^wjgQXZ<6BF|erf2nNljfTd@nR?{CaD>V z)z#F1)lvSd9x=n(Hx>(uw_2v1Z@zgE1|11r>oLC8|%06D@Czu zw?`^eIcF?(fFG5|)2Qcp-J|dGILV&L-Wdl!FqZZuVlsaS9;0;At@vfYAgz;7BPU>| z`9A+q;F)*uT!+}{B8(6B1?JL8aWmRDT=b$I#f?<1wsI@?*BLWg0MJZkk$SLM^let0 zCz#hqlWUJXJJL-|{t9qo;Tp28JQXX#w!A<#qFGwZAf-B%ziG>QRUSLM=*YeWx%G{y zNGZo*1hPl@vL3|bQnH^?E7uqcVHgouHb{LcoA7p&aBal90N#;Isj0;Q*Wc)k}>w zdg3b!w{<=f-+oj4`}s4l`mbyExCRiAEyG()$uPk?tV3%4t{{2aJo380) zYXfg)gQpDPZz+e4seOr;UYb=L5IIZ|nn8keeXk4vq%bQ8AWdAKLxHlm&AJS95&gq} zu_M5YWc%`ROZ<=8>db2N@X;P5v5`88;B5Xcm+`wgNI}x;AIsCN^Qr@2^ki%iWGxe$ zi2LPia9sx!F=V#h92)aqiI)7^DI zjtoyAZ8Oh#h(9eHF8kaBcYQ))%7zp=JwI~zNiu(FQfQ%r1S}r0vM|`T_MEovRx zb`m)=;q8BYh_&G)3C;NZ1l$@hu6u88Rw5)VhCaTUMxfZByRT|1FdxP&v!eZd;@>N;9SBY zb{~sp(BZ@rhY8{%W?TeKcjP1xl%@L~ILU=9&M#SL!4pd8zdRQK#9L;|4klbyXVMMg zMPMTVO&VpysPr+YLbo`N(wni7N%)r?`GyS2X1q6?#3800$^29BNWzT;N$erMN$_;D z&6TxKhrKM~k)Mx`nqs|Ujds~l`kNVFB4x(sOcmdV&HVORI(JaGfTjcK)cgnY>Sw!3 zXfub3Nsj%Z=2TS~rYdA*>-}jKdz-W#dcv6yP4dDxdYn%Si!v7W8L7O7$LJF1j?8p{ zdJ$7g-$J3Q7iXvLl_IXXB4V*^it4Oe%#bm~#l>C!CUGak=%Wxvq{nEe?eNUw(?PSU1f298{`*+T`Y%4I(GGhC-%<0oAfCxnaF&pL0cc>-to> zS(dgknoT%Ya#3*#FtK|4cNwnU7n*M;8knEFdCg%!>0*UWuA zMar^Yc06!`HQ|%ItMD&XDyjf5UCj<~-`MO`%dwL=yq^?-Cl)v?*b-`9*ZmZ%qLmod zyK*B(15nhw*M=!<|BJyEsyjReVT`&1=Sdzw21$L!L(7jW3O2J*pKc7+$9C?ZH5O|6 zb5-Ggwu8KN&1GvywyNdci-KYQ!i7@#!A_4jlY-Je-b2`CyfjW`4DiQO4pc6xkG-R@ z%o;+TjPc6*78wHB8Gb&2D*N>OgA(?FXRzxrxJ?14V`F>PFv`m70WZdJOTb#gvCLC2 z0Q^{eko8Hr*lB5KH;i|Ba{}5PJ>kO8a?(}gR$JOTKHQ$W5CpNl=n;9wHF9AS1mgz% zAK(mw%(m&l^Jaakr79a?ZpmAw;lq|U`zSR<9YvmF^X*o}( z>=b}^i!?IOF>U2Ed*SL!s&Oz)gdT+De%QY@+vYJ=ZIo9uVSc4+XHI?0Z)T7F50=`P zdcYiB+J%ZojtLtjY_63!weW&wCkJ_Scp#{D3_?bNWrOl@Ug>BiQ8NSC$GKzIX>H5RBw0^}By$+soy!H2R?>GG$m!=Q|q$)&Uh@GV)gw z3(LfWay~oPUr3bgrEVFjvaS}TBama=ANx(?c~QNe{NX3gV$}0SJ;fs-2&tx0H{;S) zA>^TSgHX$J(hl4c+l2W7XZ7D>`R{J#WDP`oR?1%PMty)>6Bj+z4KoCF@zC3rR*qA6 z%}Vf*ecY^oyb&1|%bN@S8aIX{jg--J5%MR+VAHr^*zp#MhqIeYcpb*g682_uA++8; zr7x%!Co2zlBZ(>eXjW*ux|aw7!jm?p)VbbhGg&~(xV>Xt9P^f^?b)N^pRqA_%9%ev zK4_t5!Xj!qK^Xe(US-IwG-Z|?2qEywo4Nz%b(4CPXOh%X#{8Ncd;C-uNLJ$0Dx+9K z*D3G5*>4oD;G#_8(*0;muN5|3qaF42jkf`P?01(9$WssEah4!Fgz`RqUk$%8=}6Q1 zXKlKh&G-VW?h%LDe<)k;HpH!gJcAdb;Mf`BmsxT0xG_=9j@O$I8`e1fnj8MC~me{yB*Mb}EYEk5a>| z;NzZO!eEU*NUPWL9@bN#-=Vz})hIKgDUadPdWzCJJTyC_<@xQ2tNuTxcID^A`c zMV+_ZNW`$*-te>R3Vccg7TClig4`^UtakKSLjsKgo4F`eODD(dx5HD|1ALuS-?MJp z>FsSjDr?2M#+n8;QSVgao+hba?sGAf9Cw|m9&4)>j7N4;^I|ovl z$aaf9$motaWNd;87Kg(DI)F36xN)gV8(r`Lm?|zYEfqi&7Ryk(u&3P=UOV(^KWm%c z&SJo_)*sa ze*4^c9iwQe7NVE)Cc8mCuM|@iW9N8lJ$n?}o0^4_$Fx!t5;Z7IZz|DV=0x?8E=OE_Q;|18EmK zV9j0oyPUXUZ423x){?(LD6z-WBvENc#L5c&vYn1vxJ`NT6O^m!fGWH2e5agg#T1#2&v1cblr2W~9-;=%nSsxc@u(qghS3r6DKvDd6-vRiyDO~7>sxBNGzk^HIuNi zSeKYmG2*oRi&n3~6%W#eiGPKLfmS7R-V@7*r_=9GdwpOKOin zNj(<0q25}l6T8WxHFeP)F)2~;IM-JHBmIn^7O>(CKn5M!sqN(UvX(XSMhc4?HdkJV z@d%s}Uca;X`zS%wlo0y}u+{q~d&jlOZypn9EwiomJVrTpvT|W!QJ1~ITG_+=&#roo zA^1>Q0Akvf*h9=w+{rdQfHlhXJI28S@3aYA_NkLo$4rxLme}MSoZD91+K!K|kd0Qa z7NO588!P!s3xG1ldX6SW5@J?NCBKo~g_}si0-JemBR4a@gL4Q52yRxxqtAm!*$z_^ z*8iBAq(KCY6j0jx+@<%|BmL1G&Zx{u(Ito~vl_rXmky)|fNwTQ9QW&6 zfQI(x!OB6_vHcW!hW&dYB2;2jBaJ1B#kGh<`NY-NVU}AkP8Y$Z#ox)ZuFn>;?pn(S zcMaZ+eK+{+-v1S5gBU?vPEFtU)-~TNdVb>C+2KUdY|12<7`&4dx_4yuk{Tm&?9)25 zJ=7G%O{+XrH8>0xXKqv?Qs?chNU{h=&Qttl0BEzH<4^MW9=M1F()s6)^NcykcFdm%C*%bk(f!ZF7_J>)yool;C4 zm&*MZrRhu9aVE-A3{BarmmonnEIx62;muiMwbX<0{3IA8g}Qby_1E4e0wB|_?B&rT z-!HHBu|26km`~*psJld4U6uyljvL{Ni-{se{PmuJB_ei=K0DdFjH$g%^}3TbZPZcp zUjF|I1pPiAa$_NAlJCYANs~z;w8b|-XdEHGa*DnZ29Wo|OfHioL<;u>usqO0CXFIV z0y+RGABNk{6<;OAh>ZT3W~1%9|_v-20u&jnNy zA44{-z-`e5SOU)8!8c8fKlOqrc)-to_KxLkVGvML&eJU85HAjqJQ))x8M-G0e${X< zb5Udr*yLlMKRmR88~`SQe*cG~`5OCISI7Z9Qr&sN6Bwxjx2U0ETwlFTd1PIrrZa5C z;bjlWXf9mm|52d$f1^mH+NMBu!H_p-qJXvLU~w-K)3aq-;8Cq-#sSNj>-`v4ztf9E z0~;qZQ7R>B;zS+io|B9Y;w?ja{Dm5BVwI06eMj6oZkKX1|JG262PEYtL|VQ-3Q0N- z*vz6)^n|nKM%ic0S}S$*R~)smlUWK=$xjD~p957F_k+M)G1}WV5RlX2g$r}Q zChF0eR8?yZr<=zE0#k7R`v__uG2x7cPu_1SJx zhb=)7j{S_@Z#jh@lphdHqTs_XFJm7nWpNExYxCQ+If8Cb|9PAV7YWet3SUGKqNinx zAmzkpcfODLLtS84DkfK&Yx!j;JpL~{Sn{=PQ0u;JQppVw2uOQTw&fpZ%8)Z*Jw!hi1nxVc&aBSz-P@DFt3xae;aZKn$ zB$ux>MU?T9ep;U*Gl|jSyxY2}zDgbfGS%&{3A(-IfBDh=(4gWYxoV%MLjFh*5fML& zfBkx9z~h{0)|HqsDk25n`4IK(>lRo`_}&EmwpLl<@cKTbEKMbBWZO0Tz$YMQD`6qUX@QyJvK!P}K^c0@3!B3mFeVvJMIhTtf6)lZ@hz0Z8vNNA==H;nDm zc|8vOa#~(Z-a^KfvRNAL7FW|s@`csVWx9V}k2$xbSObk<4-Q%?>O0IkrKr!1j-wG1 zR3*z~%@aGM&W~KzAgdIzqh_Aj2C#%8L;* z54}J{Qu!^5^sNO0gZLtajwICXG63h%+tv`wZpwdHd%5-o=z7*P+pB8;cDd;3)Vcem zL9t)rmVg`JzMAC89VdGas{j2x&+Ea<4{LPb9|A0ahN0}Y_Dt1hWZiQ#f|(@ zD=5$Fh|tDD^xeA{vY1J-ThMwt86$s(oX%)aRP{QU2B!}G6U2>u)tW8XKZyUMX8XaJ zmiI-TMh5-|0i@Jaw`Jp@8b3h)BHsxS=w`pmY$wg44qFSD%62q~n{yp72l^}s3JXwM(iSMI~ zN7+@nbg0u#Xn~SK&aI2~de^QaF}6Ig_2A7dO&!z(X@UUZ(0UXk@$>Oy`zu`op!I{7 zy4K*u;5?5}4B`5W#FeSU(mc&e{kF4M#>)TT(+ zxoznscAgxr!>ShtU<{~qv-W>xe%-cQ##%S5UayclW~mc{cZaC23^J4o>) z!&S=Ivqlcv;h)CbIj}bJE*)j)<&^!=b5sCL0Br;Yh5~@DW)!jzX9kd&G^ElS5(P;* zMLwlrscgvC6MNnTwF68Z@MlbsQ^sN#)7&Bb^iX)fEApGhDJ4Ac&f#C2XS}+iR4CDZ z-Ty;&fcrn7XBKkGYNrn$k9AJa%8?r3ZSg>vtXa4_F#N;BW!K((g&o|@R1bS?#tshc z5)JG1bQ6OV)n5%ieA>&U6njJ)ox7T9GMU~I;3znGSlI|4*2DeErs1_&k+UZCOMtal zZczWXRe&pl(uvTn0jOO7=WzN}tT^K{-2Iz4b(?M!%rar#w#BLB&O6YFfSTMe|4`i^ zj`<-tRTs+lC10lmvhmk&p1^;KOZcR9{VQ?Nb>dt(XjBtrD|Uu?yK}3}el!P=1x2)E z#mvZ3p7)K$rCY9>DIX{wfwk$L%X0;elRA`xW~$YoMz)Z_jJf7_h2?y9mi!$f$@`SO zqH=N3wr7c=_Z&ZnP`EAtz*BDA%F81VNCuvv!lB#dE8Iv^L>p*b)52%)u+BiO!&>y!iwUaQ<*VJ3jIyO(rpdD$`rBpKj2nkJLaul> zZA%4R9uEMsf2VG7=93R6Nt#Hw{=w@`>EWUYO92~*$yqC9j_48VbV8J z5!-pKy`msEQW`Kebbs>+$>5Eo-v%VS?Mh5ajsOQe6!^XDc<2OEgR)lPw)_5Z--J;x z|Bz#geT<&@Fj^*gra4Hum~6_y`d#C-WAvGb2d(+d!fj6Ja$r(|^f#nID9cDFVjC@vi^-9$k?F9hk1@$s$c27p5-WMm3i)Zo55ftZQ;t8hR|Yb(DT|AZN0(jymK0_od{D;}hf6j6Og{3^`XugprlL75oORET%iJk+sD8gJ)k#!(>eRIR}}jwkyEb}O?&5BWQ}u5 z_6y@H1biS+PMIKi`Ul;t3!Hm?g#(I*O5HVz2??t8#Mt!5^6L3h^F_(^pU2}=SCdN| zo(*Ux%f$t>cyb_5fdU&Xd)wh>&cB?N=@zxQg!*XhO$}0&EnokuZ5*o> zuW5V;*c=g~XY$oDJQpu?KBh+5wWj|?1q;UB=M4l%MdGwa za?{76lw+CH<0NV_UGMUC>BIx0gm|U0GbzYd=T%Mm$XqiHxW7a5$U-&0Dn@5T!G-Q6 zuQ2~|gK0Gn6EBNChaR-+q;bogP%Ilo95xp?v40soiGh3muI;3X-|nN-YXVQ#bszOIL|bwGS+>;zrTGoPMa!U_~V&&=CW0tOguP&^{_t*0qNy< zEaCc+*B!KV&$NExfRSa&o_4OfL}r(bwLTEns|R@`a}ml7qygBz=#pe~WR5j{6d9GC zNbn6fvG0x+PJ&XRBbNUe**gw_ViHl$>F0?*vx}Xg_^ph56A8uMOr&cI-v9b0V66OL|B`-Hu#W zJ3#k%T2qy^B8$0FZom%tsV0X{?2~uh#zq3s%T#0xV5;9>?Ss5yUOpkc^>x2(;tQ$b zC$Ew?bezq3J{%B4fkzSem1~W32FQ*?LS%G|SjwTY^8Nubc(H`b2Q$h%Fxr&@e3zi9 z4gdQL|54;d1Y!M9G{Fb%KmFENa|rnze$XtJI_U+c6@K z>j6oaGJLd!dbRWAd+^0ydZBK;o_s!{6tMRm32>sPDLHb|+SU33f@3-era+8=A+_~; zbE|oc;mBN@Ju40tJNmnBhI(ZEU&`vsgXC-#z@^xp-?6_VGCIhkNG{{|%&tSo-RVC7 zx#KM!rn4Obg_PLM2B{}v!&(CL?)g7io9he)bP6QuZU1FSrheJK875;_P*ykLxbRhU z(KqqujA!WZ&#yFpIQ14!2d8Vg(q4eMUAf&Ti6{bGL{-a}*FR{E*FCJRjs;A3xh298fu0$&JJE zeByXP{y2!BJR_}R$o2x}DLk4+C-Z%i4--&v3Xvj7ZfBn{y2LBi2&?G7lB9I}>v zN?1Xe5MNhaK`Kl87?y}3Fgfhy)+#0zVzkw{8UvOiGV$?*C5W4AwP~~U&w%AnGB-T; zUwY#(5CcxsIjR|z)V%(Ha~JX~t!PVldI+|8aDbwsW&qp8+WtGje~RdW)GHb)IgywI ztT-^T@jYQRM8fVqe1kAIy*1eOIFKQBY?4@w4HT*LBQ&pCbdAicBaNHNZ1TEcveUln zrgmw7BZcjI8mLhxPdt~=E)h9ku4IM-w8IjBVfZ71dPJGFBH1nm6x@k9)~W{@Lvj)% zKFLaG*aeQiyane$XRy2pVQ{s{E84sf;q%5Ncj^8*!%7Y)xrR@EWw9#aDlz2e8YnE{ zwl=O)5I|eke7cXV`OtYQ+=hFkg6~tIa`2?w%52kTqniFTE}?R34tq7YR@tJrMG{+c zopFnVem<^|iuOoU7sA-(K_tQKJi@ub5c&?@^8_f6@mY*bS-%xGaA*+?`xEcVas~K% zL}bUNgI*^ZT*w;V=SC1KG27x3OQJkB__^`jqAgpy0Z_NEW6P|O>(`@9IE3elP|Ptx zy4DzOHF%o*T9rkVE{DK35K?Q|s$|tD=|%kjv>^K&_VGh+6rQLdzWK1_(ez35o~VVq zk0_=cl(A=Sx-N)4N1Rqxp%*xbrWTo+&pm{^Fps$N+~$yFoG#~OR3U2frK!PWsA<{2 zrGdA5p=us!EN{?~%)?#Ru@XE+*2|lpxl8o(mUZ1v)o1w`5+4MM$|{egW1 zl;D(mDFg11#Sr>4KnOz?Zvfwc5p)aCgGPFKrZurijWWI6O5HX-e*MbsL(1rXx8uu^ z1j{d)uRyYC6B}Ll_!4C~1U9zCh1q`W_)cJ$aI>{x@e6k1a)S~dlkSf!cQB5d+EL4| z0UW6z+vCl7Ct9>BpXDuG5kv@tI$H%BK`K-y$>*oT)?dBec^0@c+2Gx-v~;sY;1vL~ zPRDle8cE%za$hgoEx#%64#fPq+vQ30jxkq9i&dM77-T*>iNtA=;rJ6* z)~`_P;5orqSuk1-UHl4gD2fQ&) zN)NHAVOq@P4z<9+_nlxF!6#A+My1nQDtk^OB*TIFF#BnfkH(4iKI&=xOq63|nT6UP ziCC@lNNGsZW$c{pIdQ{dn(k<7CdGoeCr|q|5kt(SEm>o6>fG2@rsc+nv!}l7G{J;v zl6M7%in$V5iPsO4&=Y{onM0DWxxW|Oec>N>N(2RaM7v1qJkgxJy|T#q5d_dBzZJa! zI=L)4InMeC9@jl|KPdKd^aNRdR+ylZ4urL)p9#xIS31W#MRPxWDwwrUm6koYM|@9d zIj)(f4WIh}wQHWdXWU^ZAnN|4bWy*oGlTivP39r3ta>U zprDO=j?!|xGO)nHG{;nv)c#i%8M95Oy(~=8IQP@XSg3ns6 z#@Byvu5P2F?AEl!s_)cxGbn%FxWQnuFsuF9puSn;d}q5dl*5Tt%Y6!u1qD+tu{4pXUa9U#vNr&K@7^&C;= zUUAhi)3;S^rzjSAtb+>&xoo#y$64lB{Afn~D93n6439laxiw%wipaIGHC`anomEwB zN*lv5s3cSx0(c>0F)-wPC>zT66d%3sXKHfOLus6s1+j@lY^z0pJ;|#g5A|c`eZ%9G z`_iC<*!gG^xQg7(7I$C>*`_s({)ihXAYY|6fweSD<2eD<&Rx4scCuJ8X8s#%$ocv= z5YiM#?;JFU3;FGup%Z#g!f{PLTRa+#9%1L|1RJl|mm^d^7zqlV9HL(b2p z35iMlXmtYv=`xo&)mdX^uz`_u)4qQ)yhz&j^IT**%%CZc%x<%fcDNxRx@C7UNL(Xl zZtSa=So5YT;Mt z-_rVjG74GL{|yvk_gK&NZHMV5o|Z`zo~b;-^P$6YTgRC#`(NP{6cX}b4!LY(w=9nX z_Ct&-*U9t$a^1wA98+zEUB1;GC3YXgTv`A6mOVcfNw{}%9<nwynjeUQ@(hD?#^R)IBgkY>VE zuRrDf<`z}Hhx!;jB^_n*EMVTJjeSOUPzA!TPrss^_wmjwxqZ8M=Gw&R6(ABiX#Wsy zb`X7@hC+L;)OZyruK$w1NYc>B7#+~i{1FZ=4XnK}uD>M@!?R&hPm=H(B)OQoF?h}t z*kDf{9GQ5MLJB28&NF z=ttl2@daoDG2jwDLEcXEXsK+sPdI`=Gn7YPA}vc$OqnbyhE zp`%@?gGLh__9vCyNuWf}NaMciX^=&vL%J|5(5wYTaap5OdXZHJvowA<4(LwJDB8#} z=twUi(+`Xi)ApKY#({)LXx{(KfbLLd{Vk!i5Y}8hX5Z4 zZorKWKf22%?Uy#s2KeA1Rr^n}Njd-AA(;_~`l5XjZL(#FUd|Pk-ANK>WdMwch zK4sux6WO-bk#cFb99mdMu1k<6TgKUJkC5*Xr510*_!>j5)b`oTrjcQ09fQE-;Et!a8gZsY|_p0{$c0-*EiTvih{=(~xWGPkE z(u`jF{E3>v?L*0K|NrCu2;G)1mw%CrH(RfjakmSd*k#K;kjT!xBjsO->*NYfP7@sM ze@Qasq=l9)LJU*^wwFu#;?FuF2(yQhOG}ct^j_jRQk<4f0d@^&BTtb58th-v6p0EO zKEql&v)=@a#I7G^&ISrEoQO+uVRx3R`YWk{uJ6HrgM*7}hmCw#CPg6w7F#BRWUuun z4tMvSw^q5FJP3MZOej-F>y5v%xinotxD z&`R?@cyIuE612-M|BVn`K}xd8Mu|EZ`O3QPEk_=BxVXf`7?rjy>&Kr1?Sx8qx%Y$W z^c|yiBU4w(=f3z7e|-OrOwLn$`+slW^?X-M40MaEC6H%(PoL@R>~s~(gW7xV_psx^ zMSg|yOtZv){W7L-m}@)s!mN8golM8BjfVwJ64$JI7C21zn7f9e z-B>xc-Uj}!XB7WXQN6>~>naB`W5Qo6bZ}T%fxLyWy;6p_JTZ}0BTjg`H&s}dQ@=OD zIOj*MG!m;s649n-z3{mt*IZu$^-iuJ?$p>7YFoGQ1 zaQYlM;(c;*kRPl%&^)C`c*Iec(R=@jDRKKwXN@hY#G}D`yPs=kLNYA~Svd@i{La?f zkpFe$e{HycB?at;sYhK|`Av=@ZrL3zLsT`p1iA@)k0@sxm-nA4>;c{Rc`imey5fR3 z)Gk|ak8qS>toiu4ol{++R3sX7&m-_%qS6;u6B^f^)TeVD<;wg5^L`{!Q3MO$K32YBJ%LuK}DvH5;v~IiBJtaNfK1h(qAQmPS}^} zZ7ES{UKsV0Y(G*!hhcyg(NMp52>gH&#oX8C2f_jnKj9Yi_im%!rKttI);IE+1cqUzrrxGSMS6}|vH1;ST?%-7CmL;;U zt|N)T;H`;xqe5sWJtC6}nq{j#`+9Rve!V_--`4;iH-pds|JO@M6|48GhWueri*pT- zO>dk6kh<&4e2264lj>fj(S5+aec$`aRsCQ3bTh+U9IA#4X`p8GrLn4VgO$|G9{NF# zFgmIa_ZkyoD$hC&j+4f}}pLXeX9XENR!bb7=OQo<*jxgm_`~ zNN;}`(L>y>9;3PTEFJ9M_dWtC`t56#+_E}UN{56jOREP_P*_0CNSt$SOBC8+!dRnSzwUUlbDme5J zBiV$*)pqzQm@)-W5Ir{aI5z!qHw!J21U~4?#EQTpzbUae=_(B;vJ30nfHtb@K94aIB z{&z#^gwpN>*-@>7qBc-g+;~@1b4pea_*q0-X!O1;d3H=eYp6#@x5OnDqtCAv7Zy6t zrj(-mwb`z}9-ZCWcTHUlUU7BzAwso|tjNLCR|=t>&d8~7y&AbEyJtb52-zPy#_3Xi z)RJQw_%Qs979Tq?e!0x2CV=DU3tQ63W*qa1F&tNUza+-3zLcN>uW-D_o$8DVYh7h4 zqoeQF%i2SqJi@#ESDxY^?F2jSAG73$VYfffq1X-?mrei<5P`u$X^E@7n6N#)=@LKxB_|Uqb_HM-DU~<`P=l;q0dA3D~uQ;8lr*zgY#FOb(i()kS z@bpE9qg(61$@#{5mZk;9wCQcfd3S*{?^)UWyz4u~bg2A>T~pS4`P=xU_eQhw4h%hUs^Vb>|^e1z-)9?R0n zOCf|nX&hH-!__mMyKi2iqry3glkNmBda3W`i<1IgNnf1%+vuI~N+KTLso1E{L8^(mt}%dBLCFc8|`d!H!9-FMu*tG|ogOm>X%Pcx%%TXS%-=Hn?#&fOk;32*Fc zT_-zr1@iNM>a-}XeZ~7G`mDq*iFs;PuA5_ht0gl>PAScIAw!*M*XB2`xkvG9#iw5!*%FDY$36Uz&>lg{|r)M2uHGFK77dg<{wq zJ(uFMS6Ww9rlgcWesr8xd58SXW)#e9&e>=?-p2>~&-`XxUOjxEYQ02s?NQ+w;6fxV zFPVv)kkBH(EFT8jEVo;27V)*)r`y|I3fd5nZ6j+Wgoe$n{XD}sXHSG@`jnXjN)eIl zrOG|aNP=>0oKbqfa%vtD;StLP+li>FURT)r9qszwb_QPnwQSnoJ^R*;|nT1w}W~&UQC6re!6)9 z=7En!5!`&Gh*}@4B&TgcP&gq(pH`LmpHm2rB0D5|t*eXn6&USZ8Y2&b0dW`IdZcag z6jG}L!={uPu~~E?^NsodprGW-o0CcppljwMCm21D7bUnhmL}X?w#HnrkFX-Q(Xe!w z-qEdg26aT>UdEZDn?AhRo`04?b=+b|n&j{`4Ih?~#2xyY-#3DM%_r4WY>{f$3spBU zjrIxB)AN0dqf)3-0BSJ*bEEl{K>wXMrSa_6i9gGhV`T>PXdlcqvU!URoKDmLfG7Hg zYwj()6U@m#JJ}t0UIC%gKMbLzIX+;Arn5NE@cc2b0`(} zt%aGh$H_1+=)^wM!xa);JN0Xwh%vgv*Z<&K2(W^Hv&rmIE8XYzx`=hk*!oZRV8fAl zdMqI|(*uhdT62h40~B#5r4o<7E88Ny@sXJxA&sm39d~v1@YZuP!?bZdAJyKm> zRr8bwfAiBsZtQd+#2wBX<(Sv6=X?%3oA4FU^Wt=TTd_D0Xk!`Qt@|=Vk<`;3D6&A4 zxpqe24v9@EQP>$sOl7_%W)zf+Z7Pt_;&$RXD}w32?U858qIyPCSSp6?{pTNa{dUjT z$xNa*0_`~VLbg(Y0fg*yJ0<)c#U30uPiO$Yzh@Ka+79hlIn3F>xnSt$V{BaNLruQKu2eG{CiB=O9{V)Q64$YeNXa#8Wz}VN#vHAl z0T_;~eLuY&m6X8}8G0@`cB9S*-p4)npg>1ZthA?zDrZ5pJt;I9Gc}tYF*|X-*|ND} z90NNiPa~Np#F-a5_8PydvBkoK@YdQSryoEP6|%0{oJ&diO8u<|rp3d(@oo2-uCWL% zqO)(dkgu!B?Hi(pniIJOI00tgVT}hC!ZJWJe2J>0 z^xfWOHvGgsHFW5H(WGd;HRtDGmcT@47oCK?1N@t799e1G zrFa}0x0!U@QLLba92s%XcWo!d9u}&(p(^oBF+*}6%#Ke`lzB+vjB}=5TiYXd zy!v|?{1dN9S7uGLNTQBT;*@^u#d;UT$l|-~!}Z5}9~!MSg#|R_)Gs+bhBF9g+HYW+ z>tP?386xcV*7Xu0b-+?jVQ;hKJovWZ;nLg$%Nyn&)Vh?y<>Avq>j2^+b;%Njz;Y3Be&9Gg>!yU{Of8Kzk+mTjn`YPr8$R&Pq=YRPB_{M9HYa0|=EP5>e|MthUw z?BBM0KH7*6+d9H}!cHwD?YH9G+R79lCs@5A*s3XLx2A*xu^Q!tmQlC&m(|Bk5}4b% z%Pl4E{`vDx<(5PTpN4-E)8*bj=gFmup5&XHMyL4q^>#g_r@HhAR z*L2$Yb@mpC18E>XQwxLPV%_#OKBI_n72nF;2PRyx)k?J)2IQ{TL}Y42W@jG}l((2I zWxxKD$TFA1hJS_S5c?zZl5;N}V@{i4dxpW=0A}f7;J!DOgpcFLcpQoRlPJG=ukP+z z+k4c`e%|n={?9>#n^e_{^71jOzU|7*=ApJ2qiUbQv_i?%Oa{cf#@IG_)`nZCsh%^p zMyIPz4VZ)aZvoh)Kk8MnKk&`)%^Zmg3wG}H`} zhppD;BE_NW`12V+ zD=8o_>i#rFqr|7ObH3L*;GP%N#kB)+0bV z9JZs*PWas=b|x<^_QaX~9quBD-M1{_3@mrBYmKY-u=%}5%WraY>+4;hy(&y6YEcY5Qq*Y|$O z%TE0v^0P7wkB-4t=^Q5xE6va8arc>;OKA`r-|zpb)xkFn&`k|x8RVWS@90l#PaoHk z{9#ZpUJ#Li?E8jH$_|?dDw`2IyRpvQ(YF9g#lXqW&)3a%%@b&AP zy`+O*=qT~r&A9>J9GG=!k%*fgDgRclTruwadMg&(<0!_tY8ZW=7Nd`m!m`r9EM=Qr zms+Pm$>z}6JuAy=Jri}j3ZXKm4BXn;Hdo4efykq`3j6E6fI)nok+b^hq}Kh#$`Konq zw2u{-HKg1i5&~iVBv*Tb%KvrjsZ+JH z#>uord#m4wEo*cuZL&htyb^JvT#<;AoZc|Z%J;~D|F|eyY!bG8>2J5Y|5eO3G!*gX z@o#2cZd~uVCXr_Vyd$VZ$*Wc^~@?o-pzwYK65Q$q$5|X*| zEw;-Ro^d{q?xwkbOA#qC#Df~b@qDF%sWDKmuiFg)(MKYZ#o zr*4y&a)$Q3cefPo9Y1}N>`}Obw1v9N>61=OQ6Bs8@@8(Vt%`i#p8Q_-sa@;Q@#^eI zonSEA+QkglZ|+Klycknqg#YUCt1^=c+}B|;@UpJq+rE6kwlW}Qd*S}x0Ia_IU4Eg! zYCUcae_pVaCPU}i2Z^X71<2ZT-DmW#s^d|~XXoXNRY=`TPV=V69Emf8pTnMc(w!S| zHnFnq=uZk11APX!6a8j)mYGKGCj`U(1VZXLRrwfU%qsZTozAr_Kn(SD(C%k?#MmsXPJ`Evf*8c zIRuMLCADPwtr+IpGk(pQZshridspr#jIm(bC9bTlhc@~@s-t?uGfz`;j+bzZ`utT4u^WsZ0Nk+0?8X)3Yo$<|_T>YMrnFXm4M(YWIUJ5hL_C&=4`p>kB&F zJ$NHl$w+mx6*UDwyW0YlFq=Oi_^=$4=OKZS%o-Xgy>Zp#a~pH~OOi!Y zkFgc?l}dg-P6aIz+%_$S^l6QCXI(aj#TQ2m2)MQHexfUWshmFgg`*K4RSFq|?2a7y z`Rb|@KTgbFK61fSp!uLRstuAg=6Sq|!ajC7w_9bEK_1tGe z562L^BJ!il6su9OnP=Q$<7&2?jo0_LMnSlO_`w}LOTptl=6n(jYj z@ej^;BR-E|gg;EfU+8x>lC7S-P_SMz9yYeSCn%{n8LSj+ka$UJU>)z-dE`iG@}5f> z2bDB+k{_v$HhLsX?D{t>r|#}kB}As%*KuymuTSm6YwV01 zLQw;4g`U>G3k-tw=7qQBsaYi5l=;Qwa5c#rulQ)>c)$?YqcDB3_?=$m0?&@mJPR}u zqwXP6Rrq}PgG8}D^N*wLZMQw2tS5)nO7VLZc7IC^Tu-yNP@lLh$}_gU<27IT#QbC3 z$csG_98KpR&P~^?otERF!gOA)!F-zJYOyVn^e;N%f7*N3mzbvu7rt|7vI#M}{A5*b zTu1`dve`zjR%-S8*y4yp%jo-aM~)~w=SI1)^*ua#0Og*R@R47tut!7;EVeMzs=1Fv zeBfT&nnM`Akbd^-nOyZMO`&9*!Eh^f>vc|+r+}{KEM;dBAx3Jqwfcz?-udj^2iB!M zv-xh)$Ka{_&$wjUsBIHD5? zETK;Kq1^lB&viYg`rO>|)H?Y&ocZSNx_xeOv78cY=RJO7X=&{1Bz(Q$xpDVsT7b*F zhXQ=xKL28)5m6O&$oAFQlAic+%e^(>wyx>ul8M6y>#)=wEw9pv>LESVu;Jfc^ewLM z@6kBwjh%Rbw|$uL>ft6i9i2q_alBlYsb}oEZ1*?z5PI67<>OwZ)i|CpGdkfpL2V{4 zu~Wi{)B`c~POMwRCEgXVha4GH(qQL^Hd|`pJ!LSU0`_C&BZJJRwToo7-C2+~ z>HVjf_n~)xPzcA*;er6q9S+L%KuuJ%=jMt=$wX17wo`NP0{MUpQ$YFqRBx(HIlkC# z$M5?N3g1fhvG9VB_2FehxOm(03kQ_5gKOcN)OIa>FYDioadXoV)*BABo1j743OV8S z|JT@e$FteC{rA4Rr?!fsxZ4^<(bg)84ob`#5nD@%t@a)r+S(PXu}5u^5<5m~M2%2X zY^g0qVkU_2Ug5r<=l4Fp&-3}b*I!r0bzbLr%+;_JO|>Omnpqx27N-Ty^b3QSn81c84^+wao0z>(HtZ+Tabpy;in)OnlA}&C zf!-ZmV$~@tpfD!tGLD>ho4@@#S*-+vzgNaX;}RtUGpJ7f=%0FtpguCUdRc>z<&rc2uII}o}tCF%7NZmCAhiT@Gl`MuQ zaf2#MEQy*U}VT4unr|yc93yX@hM=GLC{X5SrvU9i`f7T<^C}ak^4H0RM{k+7a)mSCG^o(ch++vZePo+_M$7=dUi@erMtU)^8oQ7#~dk?)@UVF{Ze* zj6u@ThRJZPUa8~%ADTQ^J2RVvc;@Y0&294v^B2-!x890~>>kgXYTYkR!+WDMGc#i= z1oFLwc!SVHI;P5qTiigGi`~NcaRCX1O;QG zRfc3$wTG5Umv6_k+tw1DDu*mbw0Prun~N~QMyBwkE`9VnGxnPuiZ5U-lYzf>n_&&$M!*Yh)Ii8f@N*jHtsP+Sdfihm3@_*}j$zakTk%zlK|#(TvDX`spxrt@$o7%{AyD z?&;-k6J1nETsOB7BG9yu71|vy$eZ(8;X|64k?UYZN_1WCc0gfKv;zdLYm+bruY$yx zklWc>|FTw{WM-PJIwLGyM3&`vq2(oC_zUMea@lH?P0VeTP1(&fWX@YLb-@1yN0fn8 zp;*dGTQ%&s%aA6{RBE=G;oedqI?DB;0@wqeb}7}N%fCV)SbX<$>PTkemDFn{)N7j? zaZy!17{uN4%TdvVPL`H&nVIbRZ}}Tn>megYYDt1AZ#9|3O-WHZWW1@~)GbwS1QDpj z(G;3fM{1&)afhwTI{2x9w2i{1(02>=2AoErzSZmaoG6RsHV=g11dHO9pC6?p{DSK) zI;@Dg8(+a3OmW(gy4;y&h5)oeCs)O7U_RO%M#aGQQcpn76-ZV$Y>YA0I6vI?v7@37 z^ubEr6GW8M$>ZTTa>QEbZ(WBos)_p3mBQ;|ol}KKS9<{N&SHQPywRs@Q&{QOo8E_3 z=A<~UVvlnQd6`c=ppBTt_9KC-aeg*u%tFw!F0%1T=7^rw3aOL!Y^>a;=^T5WAlgP3 z*VF4M56;Kjyfp2Nw?$igD;&M~TF+qWR);%I-%7P~hIQP`nyC>P72U!n94v$U_4U2f zUPGG2GtA`|TKc76aWiS?8d}$j@5VBqmF>bKEaMfihkrQ&XV7&o!!q-Vf*{jA9BzMYGO92uvm`n zpMhmb^o2z4xJCIMXOLy>ctfmm82)^FkVc?;o(wi$9n^fcRE#B4r1mq@Fl+RdH4JIc z${9jf&**_uIgje6E-Y2PFw(Iy8=&ZW%+=hQ;O}~!#HEtY1Jx$kgWv>i9g9X}C@kCc zU5QlQ64bt6$b@*+%kKFJuRCHp-cts{ZQZxK-udOVO#9i0k-G|y*m}gb1#~IxcBFuu zy9-;BI|F9L;_7OjR&a104HQx*${^LgimkU_^o?-+#7ipr^-WuojFK3tNE4Sprx0~B z`WgJQx16#zQ7w`9Xhg;0o;!TUzD_?^25e1}rHXqI;%ozZwL}6toa%LKxRQLG$DSxf z+WO;8jCoT}fqHvAA-DKT5>DhY!2EvAJAH)X%Y-b+t_jltqQQo}e;f(IHFkzPqi1QP zswMkQ{_UxOv$hj`qZ-JI^sqYsW4d#HTR^~M!jJgQ#f zPDJO%Bf854-f@`o`Cl+0IciWc`&g970sI=84p>JN(vj4`_2|jr)?VPlt4H$}S#m!F z0P91nmOlVBvj6_ek+ORS>6(L&)|LmUqJ#UN?fO1sPD@FNy^BkOJHTnf$TMfi0^;KR zgFp<+77Z)|{qm(S9BcY`*_W*5U2WSn1bna;mk9n4%%0@T`QQ5sl>CTrEUdV62{U^F zPWSkFc4Z}Pd@w5ZiY*9$a)xIC;q>c1$hkg_K46DLwdUcHJ9^IyS^qDc|AkIRl>2*H z=qU5c_zw0x$AKSpb^(_(>ap z_@DYxOzHt0H z>c#@!Ss!91PdsPxU1k0OZnRU9tb{dNca@5^o10r*+A^tFKR*kwGw1PZp<&M%RHYhLICE$X?aH%>YCaQ1Cmvp^^bP{M7{FM~M8_tz$qr+#nkvB11 zW|?$>LrOq=VlbVcx9^%#q*s`uo>Bz-EC}cz9X0}FGo>9sbO5tYe^Mcr=^>hDvGUbg zeK{Jq=@5pzqM}X?sB;(N9YD8m+qFYLH)fU#)X8D4sX-lM^-n+$vbKW4J9Keo=3_d< zKTf@14Zt=O#9)I4m2d4m5xXI;vLwM?|zKI{&gRKeC3O{ zMa&;GI1lGvbXHFHsn^svIi-o_PpgrW?;CufVra=PqqyK9b;)h1>P8j(u0Nh%mJQG~ z^m&>uuj1S)HUSG`vWUQafAT?{i(PCiA9lIL^P|p!`#{Zz4|yJevYZjvCChk@>?}nH z2nzPCq@8!J%&?>*Bpp6Yw{GUB&`|9J;e_f+*Dl8UZ~IPq^DHt7SL-`3k9coRB=db; z^Oyl53biq`Z>t2fLl4yZj*2NTks(?9x&LFR6nyE@B^BkW=BkkH9o~aI2tUsOlDPr! z)6Y~Q;@e2QMXq^KPmRE;$`tPw2#`7q-dFQ&NOG(l4F2fq5i4_%Ikl#)6lvJfDk{nG zS~@$Tp=+=RkW%_+Og>cIAA`x|by1~mWJbsgc?01TK<8W8X7?cZ$36*mg+R!$_vfUL zWa0=C7^&&e+2r+OT>uRnI%tpOs|iob9Y%0=BfmEu8DC;K(4dSaC9(TiL<(@^qWYoP zDppZc45mkHbaJ&g3HQ`)pA~o7Ym`p-dQ|=;5V`5P?34-DO`BuQwqUbZv%KxIW-bYl zulKi9DkgS+0HOfU3VrfoRDeHFS=5?|Ibd;bDR62z+r6Ef?u~;w{Q?>+ry~=<{&KH( zGeWi#yw_Q5@q2Gg-_rYwBXh_w4b=OmJW@PnN+feE^w7?vF)C~oIUU+hYjC@ufLle8 zF}i*<%a_QxoYw@utIRGpq+rg-5)SLb1N<>Q$-seYK8dj#{?$eopkk9sQ+m)j)}w}S z;g-BGPZIs(^*84@+FEkIH()CO|G{*Lbpml-1X>(WHVP0ZQNq2HSnXRzH#~sIv!VYw zVhlN^o+NDm5P5?wZzh@V6 zagGU|?g@&afJCY^yiB=o(3~F6<^D0OFXHIV;?`=Qs1EsoYBjGx0*~>6yId393W>R1 zd#wY+ueB-L5%HrFsPP8}R@)k!nUx9nKDMOSXH|@;9--kM??^Oh(%%l|D(VOHJi2+$ zyF$)4>kOG=1wKgnb$5JGV&zFI&}Om#e@{q^V*3c3Bup)xF!Z zYdR>T&X4>-3Si1Yw}^;G_eb}Za@B+mwq%2_c<27%K&KWJ&ykFqO74>{qX8KfrW-AU zVrH&-%F-UYtZmh0;W~vFKDlquz$0{5kfV`0&qV#s`u5lI8X{5JI}@-45rFoVZY;^R zJKFDFA1v7vvWPKby0QeewJj)NJ#s|s25^2M2n1nGsw;~p4Q&pw$5o<;ovHKl)@h;_ zCd>dYK@gVj%~59uQU+~rtOMeKICT-VfHU@JkpT?UX5|cN@ugNtNmz}^IM_3S)5iWi zD59mlm1#^@OKXBDP|kO^X&u#~lN-6Zvz=0DN2;eqa2{GSA%D-{_KuE-q}X1OA(Pba z?qEeAjBkcU;IGQNKClq(^8T~Eb2}U^XAP$Vt`6vkn9uYbMMXvUp4#J%ndSC@dfLf4 zxsE3^)Oe@Ua6BNC5$bZ$!0Z?f<_1g@{rW6*=|6mRoB!cHJ-Yw?*?zd@kba*YcQB1b zzdiC$@ZX>(NH53W>_U92=fF@GB>x`Sy#HOLxT9nA9i!~w_<-EcjJ`}JeuGs*V)QAh zzr^4MejiB&2aVjiM|YBcR-`*>A>C5cYQZzk2~=5NZ;tqc1DMmZT7!4IAk&n90WxZb zm+?xXm}^5Gq$WVZJ`Xao)_sN>^ny8zcibf#C{rD1(9k{j0}CAn)3&F{P~}vz^oYnP^_d@{$B(Vi-!^uT0J2Kd`fG3#6Lhaz7Jsq4&p>0 z5J59L-A|V@8D8u$imBa0kcN7uqHWbXtF%8f2=*3qGu)ESjG#K31j2N21eLf}Pswqn zzdIXrHl8rOH`Fzrb%|-NU^A+l;#&^Z8zzNdEWW6{Uuv@c|j5PjbVPMjtRzQ@eDN z`veWp8}nxJ+GHYfY#B2L>{UCsem}O7t|nD`5%Wcv43j*6*r9X#Eax`TNYeuQ`XSCb zbs_$mxm|h-B3v^BG^vBT$@O5H6Y%V-4AGQPN|~SCSc`LnMr!`AvdRiOrwIyCLKJw0 zPqiaL{rb(@x78$)um@Q6Icn$%Gj5A;5|5(4Ip~=MS#o*1w@7x|3o=%-Rqy9pW8631 z9>-0YtWob-gy~!8`~XsxDWPFu2N}~pT3T9cfzKt?7wVx0D}XXEOHRY&gn*A{YKtw!IW@e3KJEM#c5Z@=$wGAvxZ zMj#`_JZrd$i?r6gXS>v!&S8YVC$SCo^7>3BjuiqYDGSKSnNCbhV1UzdzO`AQ@~f)! zTO&F8zz`Fjq+-lq%h`WM(*x8d6S6FS%AVwB>*8Vtb5vO%|Uw|NDc`}@5Ot>q=!{Kr@7?88cD`9{;Z+)3#iN*XrLNMW0kB2Lfjbsa(uHYU?s z_-zzpM>NCV9IwicZ<+KTA$nOu#&$RA6XscLWJi-&0L}cIcZ#;#IyUN%3W&oW;s+-N z(H~_whZ3hGC65Nae3i*rC*D?N(1#yFG^<*O6PT6G(|SW)eb&YvST67BrI?T4jFR@F zzlTV#jIsKc75=PhJNqx{ePc)g*cJwq#OzIz7yhY)K4TeiE`gi=puDe~ssXwoj@zAd z#iHs%W%U%IPZJ5cBk_+49pN%e9^GGWJ-*Z7qoht7PbeSQN9<`g_^i9eu`&1f|DH89 zt&C7!`RD)7HNs%j)DVBr4!$3~sn_)aDJQ%iJ!C`^COx+&KYsHMU4k&;GSyCMORBx& zjifq9b{f8?4U;4N?}mPUwc?@p^N_JsfCB7wuM|dL-K#F|`lH>%91kKsI&I%T4<|R~ zI|k$flwlh`emDCtQlrKbU)sAE)lLK<=C5QmtBw%tXQU|R=%{lVVjgiV=F!|K`LW}> zUmn8TwxNOI8@7W7lOJ@F)2~xD2XSUDFCT?{ZQ#Snyg*IT5E-CSk5L+Duk2B@-Hyyi zU4{5`4JS)sW_!+9YuI7I8~VdRXM+G*quSR#$LYM%M(xG(fp!SurxRs}(iRCmW8)3P z@BsS%=-1Ei>3}rhRSa*MoV@-QyfF1KRIqT&!HRKLa-&8UbebdlUrr;Pzl{d?B5uM$ zhYX=IE3havp-j@ld1TW)BmgyiHZB&-DhXFv!ON})AV(1YGMz`%m5G%?G!59xN>y5o zI}SzJRcK&KEO1O2_TC&Z+Nw+S@mj- zt-J26*h~c%q4WX^2$oY9SLmdfz>dagnZ4QPY%22aUkBD!9A3tGw`F~!O4->Lb$$t& zjOhf}+k2qJgbmU4!nhDpN9{0@)bk2pD+7}%Y{!FOJ^H1|J1u|ewY`OFmWRE6$KYW_ zQ@}|pqYk{iIDfSW^i4xiUmRvO=xgdIzNoqW-zf;g>!>#WK1*+7z(eZg`f~}^OfLrA zc4N0H)OBkcSSTY+S@Dtou?Jh%?bbSJxHpFrNkD&taG0e|Al;?@%)lTcGxJ7VoHvsH zpOI9((l6F?UmDbM7>u$Dr~-V!CRP$Z zzt$l_@qL2_%vPH?MMM`sGu;2`L1Kw3fnzT9a#og4tEj3Lt|xd?xv{3vNbRImPF@b9%+oeDDym88JgwpE zA___3BdjAsY0^m=MYxS{v207Vfd$6*#8a2bTd@?py#sm4QBaRKf)cl}e(&o0{vIi* z;P}3PZX8rTOA|^BI5-*{(8>qbtGS-@X(az5%KAoN10X&(%ob-gv}eEYNRN&llV_`t zgd`PmgB|o7A2Q?%KEgg!eHb@uJMMo|%zZ+7Ce0&ztx(tJ4n#GzR@GCk!65rR`fp}r zyqOLNTIL_jEs97J03z9`oOn=jCdI`iHYO%3Il{njbCRZVCZP=anlmlUX^BI-prW(y z<0GuHDVGrf#QJ_{W3CUJ&vuoc-z8ledCyTHk9Dnf9sgkfufui=RM0yctZBD>2{Zw! z*>3^2x3_6&-~_N-Y1|+I)9oVD{oa-I!;XCOF7sG6q)jjcu51IF_P#kP*J(IAFPGnR zN~j2^AFCRYZ@kGAIHrINyk`Q_tu+rR?EohEm5A?W1ROE_$AhH`TOp_xj0-T7Ar+ZF8ts4|^#c z_T%^NP(xrtNm~R!7DdJK{*;$#C0 zh*&oqNB56kU+_YZ0Dso$zHP&`5p-;Hd`{UTc;X7WUwSE8*K>fjKXaMMXVhVxlpM7$ z6D2SW4sK7C0o^q2mc6Be8o&s+_ueSR8Wfe7$Z~#*Dc0R~>h7)MEdR-OTliU{L$`H5 z=9;W~3^$W)n+Qjg%mi>+a93J-=OdfMl?n@_N`os$&>s>v9PFimYEqpn`ZbNo1 zfJ?2{rK#rp@i3ycpsOo(xg*vQ z^vBU~bw_SW_tEZS`?*PSI(tM}jPTwiycS2wzR{~@_$Z142W#`R!Ic_wc+4CtREgW4 zc%aTEsg$(Xxq{RtzabcY{a&4(SFC%StE;C@vnbvo)m7(;-6kXOOsF3dI(0;^ptnGK zMmQ0Xz+Uy~df?uDb;HlaO{wKSRkIrzpf)+KnDX4-I8D2S(l^7$gFS_RRw>Qdj3s23 zpUsNr)FYM}u&ugPw3_FGhk@OiSVO=vKrINVJc&CaJUf+W&7+Bz@K{%_fol*ypmz=S z1ygw863aEf`h2vfRw`5_(QaZEzc&4Zm6xRv~>ebFXMjZyQ(JWzgE(K(6lz+kL zb5`sA3T#&Pj%o*`%470tX<8a0fhIQe^ujntQKN(pWrj~t#Q;Y}P&VypV_!U!st$${@Ma_B)I?V=)z73MzC3BB$PG6qk zbmqDf7f`LS&Kud6EaZ+_caI$Kj+8v)#x7}R5eXSU)I*hHM&&g#Glv% zw6R$-1uzD@yrsF?E@k+&RSV)bX=l*h)~1Ckg{wrx>DC&G)apF1ntA~?p@!>OO4)9U zq)z9_BbJc_SqKbQ)W}fohvx2=^8`$WI{ROs?BO!()~Ik&xvgT!7ya|z!v<&lE#haR zK%cm0oT#HWrAW`N$|$Tuq*5(b>rVIfQx-#BfY|Q)my}9Kn{_TUU4O(l|1`q)bPop;#|U04B#S2W5sUfQoUC_mF}uF|>_KIt(8=bT%86q}qq7 z^twlxMF4BJ>d7R#n_3DO)^htq?Ixc5XGxXPClfua?A%MZ3sJRN8j+>rW<~J6v>qAF z+(N?o^po&>it!sq_8H~zIb=rx$DNUwV)tO!<@dMhqP{S8h(j}`K69(P$eH4N6TMxF zEPgIAAltl}7p@_>d}-i;}eMHeWuCNmQgycCU)e^b%G8PqCmR#bY zpn>MLqhm-k_3K1UJ-KJ0zjljPP{LUIrRVv!SG<9$s$fDtGGvEgU z-l^jT6V@HNXtmB&@69GgySLoN0zExJ+7r2lga)xbiGAsg*=B}(Ibpl?nibIQWj%pZewE_$RvT-*p)svrTwvOmun^06Yr02dqtmS3&3+kUgaDq zGH&{#(6u&^>2LG&jtxlBZ@9Jk6bcgv1i(lojQNrmVv@0;5p~L0N*8o=b&V_%;su4Z zqt^pt5Xzr)^BzMsqZx92d6fBTCpynbL${K&42PRHTpM4h*9w_xzc$^;MtKBaPGLtT zKNIA3nU&0|5wF=~_(9tFI9b_71FHNJodhP8(1Dds_6X0s4!we1CX_hA15&ppmK|a6 z3|fIIFc~M`-Gw(BSLTf;smu>UEoPPsPdHNQFh=re{a^KSC|ft?772We>Nsi=fbPEN+cV42g?FJaWVCdBSo z`|wu2aA=5g>}*J+gvV-F?b{hQF&_BCF%R zf0krDdjyYalbz}2$_2R&5Pp>RUCZ`x0ZKfMd09{S6bZ!P@}uxq>GY8(D9piT^#d1(LxNK#}ZZeFX&O-n7jM73k6 zK)060qfYa$;N}n5(9f2EW>6MMMYz3HRUaGQ64&Ta0gk$G5<#4@aUlS33W9lu~mr{EyrxXZj_CTGh2A0A; zVB4>%Y2(%X{_@6gAKG`c82b4;X8)z4{~@KsfF*JeYw$?g0y^6 zvCs+8sCgPGW>;ZqSY^-x?%MMEYM91O-TAq(b>hhv1c{_AvX zxd^Xaccww^O?W0tt~6scY*IAWT7?T#@n_whmqSRBZ<=xOs3r@6G!oI)K@1%uzq@xr zYDZmv`nG_}n239Vt6s?tzQ}FPJYSX5J$9tc>+8dWO^Vl5P{IAIOaTzPl}zfuy2!d| zYo~RS$+ZGqx5vHF1L7|}O$4yQ6U8KrTF&!g8DnFYKDrZ>P_YCbAv2RK8MKP_Yvyox zF&1`3Ucn+ITlEY?ZQ$cMs2?sqF|loYV=M7uT6#z0%xJe*cD59y{P?6&m)jE5reZeS zl@oOdRfZV6^p+3vE5(fH)FKE4V34s;;F_tW)%{2)CQDWK7JtCBe(PlYocfpEeOJVL z&v@N9GvB3qPpuaw{lFlV#9eb5B*aba6>r;sjnD8gAt~HDN;0=WpZhu%X1`eW8+kku z_CV72+S{#(#7X1Mn%@!mkrYeA#CsTQ@4|9Y`TW?ZbeIZzu-@Np>$ej((W4275|=y_>h|`n8E5af|O=)a~Z~^5o_aCS~K}5QXe7rVPkM89Y>6Pg_i8J7q`fYyFb{3y} z*M?-WXS0MVEZZ92!M%I+3za6t+qS*ua%Vr))n2oqzxq@t7R?={O7UeO)ZwGR3ryuF v`N*NV5wztIAE=LQ*6NvRufve~8*)!xU@VjGB@_HB-9UpBH53q!pTGGZPi}Nb From 33d3305445b59577b3b70ea6bab48f73f1af5a64 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 15 Apr 2025 17:10:14 +0200 Subject: [PATCH 099/125] style: update ending steps and add button links to examples --- .../debug_training_runs.ipynb | 54 ++++++++++++++----- 1 file changed, 40 insertions(+), 14 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 8728019..7759514 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -382,30 +382,56 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 4: _Analyze and debug training_\n", - "While the model is training, you can start using the Neptune web app to browse your metrics and create custom analyses and visualizations:\n", - "1. To visualize the large number of metrics being logged in near real time, navigate to the **Charts** tab of the active run (_or select link above_).\n", - "\n", - "2. Filter the metrics using the [advanced regex searching capabilities](https://docs-beta.neptune.ai/charts#filtering-charts). For example, enter `gradient & fc & layers.[0-5] & norm` in the search bar. This query filters the metrics for the first 6 layers of the gradients norms of the fully connected layers. You can specify down to exactly the metrics name you want.\n", - "\n", - "3. Export the filter to a [dashboard](https://docs-beta.neptune.ai/custom_dashboard). The saved dashboard will now only display these metrics during training. This is useful if you know that a certain set of layers can be troublesome during training. \n", + "### Step 4: Analyze and Debug Training\n", + "While your model trains, use Neptune's web interface to monitor and analyze metrics in near real-time:\n", + "\n", + "**1. Real-time Metric Visualization**\n", + "- Navigate to the _Charts_ tab to view live training metrics\n", + "- Monitor multiple metrics simultaneously\n", + "- Track training progress in real-time\n", + "\n", + "**2. Advanced Metric Filtering**\n", + "- Focus on specific metrics using [advanced regex search](https://docs.neptune.ai/charts#filtering-charts)\n", + "- Example: `gradient & fc & layers.[0-5] & norm` filters gradients for the first 6 fully connected layers\n", + "- Perfect for isolating problematic layers or components\n", + "\n", + "**3. Create Custom Dashboards**\n", + "- Save filtered metrics to a [custom dashboard](https://docs.neptune.ai/custom_dashboard) for continuous monitoring\n", + "- Automatically updates during training\n", + "- Share with team members\n", + "- Ideal for tracking known problematic layers\n", + "\n", + "_Example dashboard:_\n", + "\n", + " \"Explore\n", + "\n", "\n", - "4. Alternatively, use the [dynamic metric selection](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) and create a new chart widget to display all LSTM layers gradient norms in one chart. Again, use the `(.*gradient)(.*lstm)(.*norm)` query. This makes it easy to have an automatically updating chart that allows you to view all layers on a single chart for rapid debugging in case vanishing or exploding gradients appear. \n", + "**4. Dynamic Metric Analysis**\n", + "- Group related metrics (e.g., all LSTM layer gradients)\n", + "- Create automatically updating charts by [selecting metrics dynamically](https://docs.neptune.ai/chart_widget/#dynamic-metric-selection)\n", + "- Quickly identify vanishing/exploding gradients\n", + "- Example query: `(.*gradient)(.*lstm)(.*norm)`\n", "\n", - "**Explore pre-configured dashboard:**\n", - "\n", + "_Example dashboard:_\n", + "\n", " \"Explore\n", "\n", "\n", + "#### 5. Document Training Insights\n", + "- Create [custom reports](https://docs.neptune.ai/reports) to track training progress\n", + "- Document anomalies and successful configurations\n", + "- Maintain training history\n", + "- Share insights with team members\n", "\n", - "5. To document this behavior, create a [custom report](https://docs-beta.neptune.ai/reports) to outline the model training, global metrics, debugging metrics for the model you're training. This allows you to keep track of any anomalies but also to see what worked or did not work during training.\n", - "\n", - "**Explore pre-configured report:**\n", + "_Example report:_\n", "\n", " \"Explore\n", "\n", "\n", - "See also: PyTorch layer-wise tracking package [here](TODO:Link to integration for tracking layer-wise metrics)\n" + "---\n", + "\n", + "**Additional Resources:**\n", + "- [PyTorch Layer-wise Tracking Package](TODO:Link to integration for tracking layer-wise metrics)\n" ] } ], From 494379f350cfef2b500e658c58558e72ad91f239 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 16 Apr 2025 12:21:34 +0200 Subject: [PATCH 100/125] style: update text for better readability --- .../debug_training_runs.ipynb | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 7759514..0509f9f 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -382,20 +382,20 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 4: Analyze and Debug Training\n", + "### Step 4: Analyze and debug training\n", "While your model trains, use Neptune's web interface to monitor and analyze metrics in near real-time:\n", "\n", - "**1. Real-time Metric Visualization**\n", + "**1. Real-time metric visualization**\n", "- Navigate to the _Charts_ tab to view live training metrics\n", "- Monitor multiple metrics simultaneously\n", "- Track training progress in real-time\n", "\n", - "**2. Advanced Metric Filtering**\n", + "**2. Advanced metric filtering**\n", "- Focus on specific metrics using [advanced regex search](https://docs.neptune.ai/charts#filtering-charts)\n", "- Example: `gradient & fc & layers.[0-5] & norm` filters gradients for the first 6 fully connected layers\n", "- Perfect for isolating problematic layers or components\n", "\n", - "**3. Create Custom Dashboards**\n", + "**3. Create custom dashboards**\n", "- Save filtered metrics to a [custom dashboard](https://docs.neptune.ai/custom_dashboard) for continuous monitoring\n", "- Automatically updates during training\n", "- Share with team members\n", @@ -406,7 +406,7 @@ " \"Explore\n", "\n", "\n", - "**4. Dynamic Metric Analysis**\n", + "**4. Dynamic metric analysis**\n", "- Group related metrics (e.g., all LSTM layer gradients)\n", "- Create automatically updating charts by [selecting metrics dynamically](https://docs.neptune.ai/chart_widget/#dynamic-metric-selection)\n", "- Quickly identify vanishing/exploding gradients\n", @@ -417,7 +417,7 @@ " \"Explore\n", "\n", "\n", - "#### 5. Document Training Insights\n", + "**5. Document training insights**\n", "- Create [custom reports](https://docs.neptune.ai/reports) to track training progress\n", "- Document anomalies and successful configurations\n", "- Maintain training history\n", @@ -430,7 +430,7 @@ "\n", "---\n", "\n", - "**Additional Resources:**\n", + "**Additional resources:**\n", "- [PyTorch Layer-wise Tracking Package](TODO:Link to integration for tracking layer-wise metrics)\n" ] } From 0b8847b407428f45f42c5c1816e742c6e57c0ee6 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 16 Apr 2025 14:28:30 +0200 Subject: [PATCH 101/125] style: Update markdown section of explanations for better readability and flow --- .../debug_training_runs.ipynb | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 0509f9f..73f72da 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Debug Model Training with Neptune\n", + "# Debug Model Training Runs with Neptune\n", "\n", " \n", " \"Open \n", @@ -36,7 +36,9 @@ " \"Explore\n", "\n", "\n", - "_Note: This is a code recipe that you can adapt for your own model training needs._" + "_Note: This is a code recipe that you can adapt for your own model training needs._\n", + "\n", + "If you are using Google Colab to run this example - please ensure that you update the runtime to use the free available GPU." ] }, { @@ -75,7 +77,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Install dependencies and import libraries" + "### Install dependencies and import libraries\n", + "\n", + "This tutorial uses a datset from Hugging Face which is loaded using the `datasets` package from Hugging Face." ] }, { @@ -105,15 +109,13 @@ "source": [ "params = {\n", " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 8,\n", - " \"learning_rate\": 0.01,\n", + " \"batch_size\": 8, # Can be increased to 32 when training via Google Colab\n", + " \"learning_rate\": 0.01, # A smaller lr can be used \n", " \"epochs\": 5,\n", " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", " \"input_features\": 256,\n", " \"embed_size\": 1000,\n", " \"hidden_size\": 256, # hidden size for the LSTM\n", - " \"dropout_prob\": 0.3,\n", - " \"num_lstm_layers\": 3,\n", "}" ] }, @@ -260,7 +262,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Debug model training run with Neptune" + "## Debug model training run with Neptune\n", + "\n", + "In this section, we'll walk through 4 key steps to effectively debug your training runs:\n", + "\n", + "1. **Initialize Neptune** - set up the Neptune environment to track your training metrics\n", + "2. **Log configuration parameters** - record your model's hyperparameters and setup\n", + "3. **Track layer-wise metrics** - monitor gradient norms across all model layers\n", + "4. **Analyze training behavior** - Use Neptune's visualization tools to identify and diagnose training issues" ] }, { @@ -291,7 +300,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 2: _Log configuration parameters and tags_" + "### Step 2: _Log configuration parameters_" ] }, { @@ -306,7 +315,6 @@ " \"config/optimizer\": params[\"optimizer\"],\n", " \"config/batch_size\": params[\"batch_size\"],\n", " \"config/epochs\": params[\"epochs\"],\n", - " \"config/num_lstm_layers\": params[\"num_lstm_layers\"],\n", " \"data/embed_size\": params[\"embed_size\"],\n", " }\n", ")\n", @@ -320,7 +328,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 3: _Execute model training loop_\n", + "### Step 3: _Track gradient norms during training_\n", "\n", "In this training loop, we:\n", "1. Register backward hooks to capture gradient norms from all model layers with `register_full_backward_hook()`\n", @@ -382,7 +390,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 4: Analyze and debug training\n", + "### Step 4: _Analyze training behavior_\n", "While your model trains, use Neptune's web interface to monitor and analyze metrics in near real-time:\n", "\n", "**1. Real-time metric visualization**\n", From 69bdb16a697663fa37ab55ae0acb1798ad1ab610 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 16 Apr 2025 14:30:49 +0200 Subject: [PATCH 102/125] chore: pre-commit cleanup --- .../debug_training_runs.ipynb | 21 +++++-------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 73f72da..67874ba 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -109,8 +109,8 @@ "source": [ "params = {\n", " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 8, # Can be increased to 32 when training via Google Colab\n", - " \"learning_rate\": 0.01, # A smaller lr can be used \n", + " \"batch_size\": 8, # Can be increased to 32 when training via Google Colab\n", + " \"learning_rate\": 0.01, # A smaller lr can be used\n", " \"epochs\": 5,\n", " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", " \"input_features\": 256,\n", @@ -137,21 +137,9 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Training samples: 81926\n", - "Vocabulary size: 128257\n", - "Model created: MultilayerModel\n", - "Optimizer: Adam\n", - "Criterion: CrossEntropyLoss\n" - ] - } - ], + "outputs": [], "source": [ "import torch\n", "import torch.nn as nn\n", @@ -191,6 +179,7 @@ ")\n", "print(f\"Vocabulary size: {params['vocab_size']}\")\n", "\n", + "\n", "# Create model\n", "class MultilayerModel(nn.Module):\n", " \"\"\"A larger language model with multiple LSTM and fully connected layers.\"\"\"\n", From 0fed41210864bd173b0502ec8f7333a382b4d8d5 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 16 Apr 2025 14:49:54 +0200 Subject: [PATCH 103/125] chore: removed unused code for this example --- .../pytorch/notebooks/README.md | 181 ------- .../pytorch/notebooks/TorchWatcher.py | 358 ------------- .../pytorch_text_model_debugging.ipynb | 487 ------------------ .../notebooks/torch_watcher_example.py | 144 ------ 4 files changed, 1170 deletions(-) delete mode 100644 integrations-and-supported-tools/pytorch/notebooks/README.md delete mode 100644 integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py delete mode 100644 integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb delete mode 100644 integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py diff --git a/integrations-and-supported-tools/pytorch/notebooks/README.md b/integrations-and-supported-tools/pytorch/notebooks/README.md deleted file mode 100644 index 67e6bd8..0000000 --- a/integrations-and-supported-tools/pytorch/notebooks/README.md +++ /dev/null @@ -1,181 +0,0 @@ -# TorchWatcher - -A lightweight PyTorch model monitoring tool that automatically tracks layer activations, gradients, and parameters during training. Built for seamless integration with Neptune.ai. - -## Features - -- **Automatic Layer Tracking**: Monitors activations, gradients, and parameters of specified PyTorch layers -- **Flexible Layer Selection**: Track all layers or specify which layer types to monitor -- **Comprehensive Statistics**: Predefined tensor statistics including mean, standard deviation, norm, min, max, variance, and absolute mean -- **Configurable Tracking**: Enable/disable tracking of activations, gradients, and parameters as needed -- **Organized Logging**: Structured metric namespacing for better organization in Neptune -- **Memory Efficient**: Clears stored tensors after each logging step -- **Error Handling**: Robust error handling with informative warnings - -## Installation - -```bash -pip install neptune_scale -``` - -## Usage - -### Basic Usage - -```python -from neptune_scale import Run -from TorchWatcher import TorchWatcher - -# Initialize Neptune run -run = Run(experiment_name="my-experiment") - -# Create your PyTorch model -model = YourModel() - -# Initialize TorchWatcher -watcher = TorchWatcher( - model, - run, - track_layers=[nn.Linear, nn.ReLU], # Specify which layer types to track (default is all layers) - tensor_stats=['mean', 'norm'], # Choose which statistics to compute (default = "mean" only) - base_namespace="model_metrics" # Set base namespace for all metrics (default = "debug") -) - -# Training loop -for epoch in range(n_epochs): - for batch in train_loader: - # Forward pass - output = model(batch) - - # Backward pass - loss.backward() - - # Track metrics with default namespace - watcher.watch(step=current_step) -``` - -### Controlling Metric Logging - -The `watch()` method provides flexible control over what metrics to track and how to organize them: - -```python -# Track all metrics (default behavior) -watcher.watch(step=current_step) - -# Track specific metrics -watcher.watch( - step=current_step, - track_activations=True, - track_gradients=False, - track_parameters=True -) - -# Use a namespace to organize metrics -watcher.watch( - step=current_step, - namespace="train" # Metrics will be under "train/model_metrics/..." -) -``` - -### Namespace Organization - -TorchWatcher provides a hierarchical namespace structure for organizing metrics: - -1. **Base Namespace**: Set during initialization -```python -watcher = TorchWatcher( - model, - run, - base_namespace="model_metrics" # All metrics will be under "model_metrics/" -) -``` - -2. **Per-Call Namespace**: Prefix for specific tracking calls -```python -# During training -watcher.watch(step=step, namespace="train") # Metrics under "train/model_metrics/" - -# During validation -watcher.watch(step=step, namespace="validation") # Metrics under "validation/model_metrics/" -``` - -3. **Metric Structure**: Metrics are organized as: -``` -{namespace}/{base_namespace}/{metric_type}/{layer_name}_{statistic} -``` - -Example metric names: -- `train/model_metrics/activation/fc1_mean` -- `validation/model_metrics/gradient/fc2_norm` -- `train/model_metrics/parameters/fc1_weight_mean` - -### Example Use Cases - -1. **Training with Full Tracking**: -```python -# Track everything during initial training -watcher.watch(step=step, namespace="train") -``` - -2. **Validation with Limited Tracking**: -```python -# Track only activations during validation -watcher.watch( - step=step, - track_activations=True, - track_gradients=False, - track_parameters=False, - namespace="validation" -) -``` - -3. **Efficient Training**: -```python -# Track only gradients during later training phases -watcher.watch( - step=step, - track_activations=False, - track_parameters=False, - track_gradients=True, - namespace="train" -) -``` - -## Supported Layer Types - -TorchWatcher supports tracking of all common PyTorch layer types, including: -- Linear layers -- Convolutional layers -- Recurrent layers -- Normalization layers -- Activation layers -- Pooling layers -- Dropout layers -- Embedding layers -- Transformer layers -- Attention layers -- And more... - -## Available Statistics - -Predefined tensor statistics include: -- `mean`: Mean value -- `std`: Standard deviation -- `norm`: L2 norm -- `min`: Minimum value -- `max`: Maximum value -- `var`: Variance -- `abs_mean`: Mean of absolute values - -## Example - -See `torch_watcher_example.py` for a complete example demonstrating: -- Model definition -- Data generation -- Training loop with different tracking configurations -- Namespace organization -- Integration with Neptune - -## License - -MIT License diff --git a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py b/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py deleted file mode 100644 index 6693859..0000000 --- a/integrations-and-supported-tools/pytorch/notebooks/TorchWatcher.py +++ /dev/null @@ -1,358 +0,0 @@ -import warnings -from typing import Any, Dict, List, Literal, Optional, Type, Union - -import torch -import torch.nn as nn - -# Predefined tensor statistics -TENSOR_STATS = { - "mean": lambda x: x.mean().item(), - "std": lambda x: x.std().item(), - "norm": lambda x: x.norm().item(), - "min": lambda x: x.min().item(), - "max": lambda x: x.max().item(), - "var": lambda x: x.var().item(), - "abs_mean": lambda x: x.abs().mean().item(), -} - -# Common PyTorch layer types for validation -PYTORCH_LAYERS = { - # Linear layers - nn.Linear, - # Convolutional layers - nn.Conv1d, - nn.Conv2d, - nn.Conv3d, - nn.ConvTranspose1d, - nn.ConvTranspose2d, - nn.ConvTranspose3d, - # Recurrent layers - nn.LSTM, - nn.GRU, - nn.RNN, - # Normalization layers - nn.BatchNorm1d, - nn.BatchNorm2d, - nn.BatchNorm3d, - nn.LayerNorm, - nn.InstanceNorm1d, - nn.InstanceNorm2d, - nn.InstanceNorm3d, - # Activation layers - nn.ReLU, - nn.LeakyReLU, - nn.ELU, - nn.SELU, - nn.GELU, - # Pooling layers - nn.MaxPool1d, - nn.MaxPool2d, - nn.MaxPool3d, - nn.AvgPool1d, - nn.AvgPool2d, - nn.AvgPool3d, - # Dropout layers - nn.Dropout, - nn.Dropout2d, - nn.Dropout3d, - # Embedding layers - nn.Embedding, - nn.EmbeddingBag, - # Transformer layers - nn.TransformerEncoderLayer, - nn.TransformerDecoderLayer, - # Attention layers - nn.MultiheadAttention, - # Flatten layers - nn.Flatten, - nn.Unflatten, - # Other common layers - nn.Sequential, - nn.ModuleList, - nn.ModuleDict, -} - - -class HookManager: - """ - A robust hook management class for PyTorch models to track activations, gradients, and parameters. - - Improvements: - - More comprehensive error handling - - Flexible hook registration - - Support for more layer types - - Configurable tracking - """ - - def __init__(self, model: nn.Module, track_layers: Optional[List[Type[nn.Module]]] = None): - """ - Initialize HookManager with layer types to track. - - Args: - model (nn.Module): The PyTorch model to track - track_layers (Optional[List[Type[nn.Module]]]): List of PyTorch layer types to track. - If None, tracks all layers in the model. - If specified, must contain valid PyTorch layer types. - - Raises: - TypeError: If model is not a PyTorch model - ValueError: If track_layers contains invalid layer types - """ - if not isinstance(model, nn.Module): - raise TypeError("The model must be a PyTorch model") - - # Validate that all specified layers are valid PyTorch layers if track_layers is provided - if track_layers is not None: - invalid_layers = [layer for layer in track_layers if layer not in PYTORCH_LAYERS] - if invalid_layers: - raise ValueError( - f"Invalid layer types specified: {invalid_layers}. " - f"Please use valid PyTorch layer types from torch.nn." - ) - - self.model = model - self.hooks: List[torch.utils.hooks.RemovableHandle] = [] - self.activations: Dict[str, torch.Tensor] = {} - self.gradients: Dict[str, torch.Tensor] = {} - self.track_layers = track_layers - - def save_activation(self, name: str): - """Create a forward hook to save layer activations.""" - - def hook(module, input, output): - try: - # Handle different output types (tensor or tuple) - activation = output[0] if isinstance(output, tuple) else output - self.activations[name] = activation.detach() - except Exception as e: - warnings.warn(f"Could not save activation for {name}: {e}") - - return hook - - def save_gradient(self, name: str): - """Create a backward hook to save layer gradients.""" - - def hook(module, grad_input, grad_output): - try: - # Save the first gradient output - self.gradients[name] = grad_output[0].detach() - except Exception as e: - warnings.warn(f"Could not save gradient for {name}: {e}") - - return hook - - def register_hooks(self, track_activations: bool = True, track_gradients: bool = True): - """ - Register hooks for the model with configurable tracking. - - Args: - track_activations (bool): Whether to track layer activations - track_gradients (bool): Whether to track layer gradients - """ - # Clear existing hooks - self.remove_hooks() - - # Register forward hooks for activations - if track_activations: - for name, module in self.model.named_modules(): - # Skip the model itself - if name == "": - continue - # Track all layers if track_layers is None, otherwise only specified types - if self.track_layers is None or any( - isinstance(module, layer_type) for layer_type in self.track_layers - ): - hook = module.register_forward_hook(self.save_activation(name)) - self.hooks.append(hook) - - # Register backward hooks for gradients - if track_gradients: - for name, module in self.model.named_modules(): - # Skip the model itself - if name == "": - continue - # Track all layers if track_layers is None, otherwise only specified types - if self.track_layers is None or any( - isinstance(module, layer_type) for layer_type in self.track_layers - ): - hook = module.register_full_backward_hook(self.save_gradient(name)) - self.hooks.append(hook) - - def remove_hooks(self): - """Remove all registered hooks.""" - for hook in self.hooks: - hook.remove() - self.hooks.clear() - - def clear(self): - """Clear stored activations and gradients.""" - self.activations.clear() - self.gradients.clear() - - def get_activations(self) -> Dict[str, torch.Tensor]: - """Get stored activations.""" - return self.activations - - def get_gradients(self) -> Dict[str, torch.Tensor]: - """Get stored gradients.""" - return self.gradients - - def __del__(self): - """Ensure hooks are removed when the object is deleted.""" - self.remove_hooks() - - -class TorchWatcher: - """ - A comprehensive tracking mechanism for PyTorch models with enhanced logging. - """ - - def __init__( - self, - model: nn.Module, - run: Any, # Made more flexible to support different logging mechanisms - track_layers: Optional[List[Type[nn.Module]]] = None, - tensor_stats: Optional[List[str]] = None, - base_namespace: str = "debug", # Default namespace for all metrics - ) -> None: - """ - Initialize TorchWatcher with configuration options. - - Args: - model (nn.Module): The PyTorch model to watch - run: Logging mechanism from Neptune - track_layers (Optional[List[Type[nn.Module]]]): List of PyTorch layer types to track. - If None, tracks all layers in the model. - If specified, must contain valid PyTorch layer types. - tensor_stats (Optional[List[str]]): List of statistics to compute. - Available options: mean, std, norm, min, max, var, abs_mean. - Defaults to ['mean'] if not specified. - base_namespace (str): Base namespace for all logged metrics. Defaults to "debug". - - Raises: - TypeError: If model is not a PyTorch model - ValueError: If track_layers contains invalid layer types - """ - if not isinstance(model, nn.Module): - raise TypeError("The model must be a PyTorch model") - - self.model = model - self.run = run - self.hm = HookManager(model, track_layers) - self.debug_metrics: Dict[str, float] = {} - self.base_namespace = base_namespace - - # Validate and set tensor statistics - if tensor_stats is None: - tensor_stats = ["mean"] - - # Validate that all requested statistics exist - # TODO: Add ability to set custom statistics - invalid_stats = [stat for stat in tensor_stats if stat not in TENSOR_STATS] - if invalid_stats: - raise ValueError( - f"Invalid statistics requested: {invalid_stats}. " - f"Available statistics are: {list(TENSOR_STATS.keys())}" - ) - - self.tensor_stats = {stat: TENSOR_STATS[stat] for stat in tensor_stats} - - # Default hook registration - self.hm.register_hooks() - - def _safe_tensor_stats(self, tensor: torch.Tensor) -> Dict[str, float]: - """ - Safely compute tensor statistics with error handling. - - Args: - tensor (torch.Tensor): Input tensor - - Returns: - Dict of statistical metrics - """ - stats = {} - for stat_name, stat_func in self.tensor_stats.items(): - try: - stats[stat_name] = stat_func(tensor) - except Exception as e: - warnings.warn(f"Could not compute {stat_name} statistic: {e}") - return stats - - def _track_metric( - self, metric_type: str, data: Dict[str, torch.Tensor], namespace: Optional[str] = None - ): - """Track metrics with enhanced statistics for a given metric type. - - Args: - metric_type (str): Type of metric being tracked (activation/gradient/parameters) - data (Dict[str, torch.Tensor]): Dictionary mapping layer names to tensors - namespace (Optional[str]): Optional namespace to prefix the base namespace - """ - # Construct the full namespace - full_namespace = f"{namespace}/{self.base_namespace}" if namespace else self.base_namespace - - for layer, tensor in data.items(): - if tensor is not None: - stats = self._safe_tensor_stats(tensor) - for stat_name, stat_value in stats.items(): - self.debug_metrics[f"{full_namespace}/{metric_type}/{layer}_{stat_name}"] = ( - stat_value - ) - - def track_activations(self, namespace: Optional[str] = None): - """Track layer activations with enhanced statistics.""" - activations = self.hm.get_activations() - self._track_metric("activation", activations, namespace) - - def track_gradients(self, namespace: Optional[str] = None): - """Track layer gradients with enhanced statistics.""" - gradients = self.hm.get_gradients() - self._track_metric("gradient", gradients, namespace) - - def track_parameters(self, namespace: Optional[str] = None): - """Track model parameters with enhanced statistics.""" - parameters = { - name: param.grad - for name, param in self.model.named_parameters() - if param is not None and param.grad is not None - } - self._track_metric("parameters", parameters, namespace) - - def watch( - self, - step: Union[int, float], - track_gradients: bool = True, - track_parameters: bool = True, - track_activations: bool = True, - namespace: Optional[str] = None, - ): - """ - Log debug metrics with flexible configuration. - - Args: - step (int|float): Logging step - track_gradients (bool): Whether to track gradients. Defaults to True. - track_parameters (bool): Whether to track parameters. Defaults to True. - track_activations (bool): Whether to track activations. Defaults to True. - namespace (Optional[str]): Optional namespace to prefix the base namespace. - If provided, metrics will be logged under {namespace}/{base_namespace}/... - """ - # Reset metrics - self.debug_metrics.clear() - - # Track metrics based on boolean flags - if track_gradients: - self.track_gradients(namespace) - if track_parameters: - self.track_parameters(namespace) - if track_activations: - self.track_activations(namespace) - - # Log metrics - try: - self.run.log_metrics(data=self.debug_metrics, step=step) - except Exception as e: - warnings.warn(f"Logging failed: {e}") - - # Clear hooks - self.hm.clear() diff --git a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb b/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb deleted file mode 100644 index 1b1b03c..0000000 --- a/integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb +++ /dev/null @@ -1,487 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Neptune + PyTorch\n", - "\n", - " \n", - " \"Open \n", - "" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Introduction\n", - "Global metrics such as loss or accuracy provide a high-level performance snapshot and ensure training is on course.\n", - "\n", - "However, for large or foundation models, monitoring layer-wise metrics—such as gradients and activations—delivers critical insights into how each layer learns. This level of detail helps identify issues and fine-tune individual layers for better overall performance.\n", - "The main challenge is the volume of data generated by layer-wise logging.\n", - "\n", - "Fortunately, Neptune is built for hyperscale tracking. It enables you to capture, organize, and analyze metrics from every layer without disrupting the training process. No matter how large is your model." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This guide will show you how to:\n", - "- Initialize the **Neptune Run** object and log configuration parameters\n", - "- Create a **reusable class** to hook and log layer-wise metrics (`TorchWatcher`)\n", - "- Log **aggregated metrics** such as loss and accuracy\n", - "- Log **layer-wise metrics** to debug model training such as:\n", - "\n", - "| **Metric** | **Demonstrated** | **What it shows** | **How to capture** |\n", - "|-----------------------------------|--------------------------------------|--------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|\n", - "| **Activations** | Yes | Dead or exploding activations can indicate issues with training stability. | `TorchWatcher` |\n", - "| **Gradients** | Yes | Essential for diagnosing vanishing or exploding gradients. Small gradients may indicate vanishing gradients, while large ones can signal instability. | `TorchWatcher` |\n", - "| **Parameters** | Yes | Tracks how the model’s parameters evolve during training. Large or small weights may indicate the need for better regularization or adjustments in learning rate. | Extract directly from the model’s parameters. |\n", - "| **Loss** | No | Identifies which parts of the network contribute more to the overall loss, aiding debugging and optimization. | Monitor outputs from each layer and compare with the target. |\n", - "| **Learning rate** | No | Helpful if using techniques like Layer-wise Learning Rate Decay (L2LRD). Tracking this can provide insight into the layer-specific learning rate. | Manually track based on optimizer settings. |\n", - "| **Output norms** | No | The L2-norm of layer outputs can highlight issues like gradient explosion or vanishing gradients. | Compute the L2-norm for each layer’s output. |" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Use this notebook as a code recipe. Add your own code and adapt the sections to your own model training needs." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Before you start\n", - "\n", - " 1. Create a Neptune Scale account. [Register →](https://neptune.ai/early-access)\n", - " 2. Create a Neptune project for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune Scale docs.\n", - " 3. Install and configure Neptune Scale for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune Scale docs." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Set environment variables\n", - "By setting your project name and API token as environment variables, you can use them throughout this notebook.\n", - "\n", - "Uncomment the code block below and replace placeholder values with your own credentials:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Set Neptune credentials as environment variables\n", - "# %env NEPTUNE_API_TOKEN = \"your_api_token\"\n", - "# %env NEPTUNE_PROJECT = \"your_workspace_name_here/your_project_name\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Install dependencies and import libraries" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Install dependencies\n", - "! pip install -qU neptune_scale torch datasets" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Import libraries\n", - "import torch\n", - "import torch.nn as nn\n", - "import torch.optim as optim\n", - "from torch.utils.data import Dataset, DataLoader\n", - "import numpy as np\n", - "from collections import Counter\n", - "from datasets import load_dataset\n", - "\n", - "from neptune_scale import Run\n", - "import os" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Initialize parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Initialize model, loss function, and optimizer\n", - "\n", - "params = {\n", - " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 8,\n", - " \"learning_rate\": 0.01,\n", - " \"epochs\": 5,\n", - " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", - " \"input_features\": 256,\n", - " \"embed_size\": 1000,\n", - " \"hidden_size\": 256, # hidden size for the LSTM\n", - " \"dropout_prob\": 0.3,\n", - " \"num_lstm_layers\": 3,\n", - "}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Download or use next token prediction dataset\n", - "This example uses the dataset from [HuggingFace](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset) (HF).\n", - "\n", - "You can increase the size of the dataset to test the logging capabilities of Neptune. Note that increasing the size will increase the time needed for the dataset to download. The current setup only downloads the first parquet file from the Hugging Face public dataset.\n", - "\n", - "The validation dataset is also reduced to decrease the training loop execution time. To increase the validation size, change the `test_size` key-value pair in the `train_test_split()` method from HuggingFace." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# For the example, download a random subset of 10% of the original dataset\n", - "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", - "data_files = {\n", - " \"train\": base_url\n", - " + \"train-00001-of-00067.parquet\", # download only the first 10 files from the HF dataset\n", - " \"validation\": base_url + \"validation-00000-of-00001.parquet\",\n", - "} # download the complete validation dataset\n", - "\n", - "data_subset = load_dataset(\"parquet\", data_files=data_files, num_proc=4)\n", - "# validation_subset = load_dataset(\"parquet\", data_files = {\"validation\": base_url + \"validation-00000-of-00001.parquet\"}, num_proc=4, split=[\"validation[:5%]\"])\n", - "validation_subset = data_subset.get(\"validation\").train_test_split(test_size=0.1)\n", - "print(\n", - " f\"Training samples: {data_subset['train'].num_rows} \\nValidation samples: {validation_subset['test'].num_rows}\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Create `DataLoader` objects\n", - "To execute the models with PyTorch, convert the training and validation datasets to tensors. Then, set up `DataLoader` for easier batching in the training loop.\n", - "\n", - "The model architecture requires the vocabulary size as an input and this is why we calculate the max token from the dataset." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "train_subset = data_subset[\"train\"].with_format(\n", - " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", - ") # HF provides methods to convert data types to tensors\n", - "validation_subset = validation_subset[\"test\"].with_format(\n", - " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", - ") # HF provides methods to convert data types to tensors\n", - "\n", - "train_dataloader = DataLoader(train_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", - "val_dataloader = DataLoader(validation_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", - "\n", - "# Determine the vocab size of the dataset\n", - "# Flatten the list of tokenized sentences into one long list of token IDs\n", - "vocab_size = (\n", - " max([token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]) + 1\n", - ")\n", - "params[\"vocab_size\"] = vocab_size\n", - "print(f\"Vocabulary size: {vocab_size}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Define PyTorch model architecture and helpers\n", - "Define a simple LLM model architecture using PyTorch. Since this is a text-based example, we use an embedding layer, a LSTM layer, and a fully connected layer.\n", - "\n", - "You can adjust this architecture to your needs and increase its size when testing the workflow:\n", - "- To increase the size of the LSTM layers, change the `num_layers` parameter in the parameters dictionary.\n", - "- To increase the number of fully connected layers, update the mode architecture itself." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Define the simple LLM model with LSTM\n", - "class SimpleLLM(nn.Module):\n", - " def __init__(self, vocab_size, embed_size, hidden_size, num_layers):\n", - " super(SimpleLLM, self).__init__()\n", - " self.embedding = nn.Embedding(vocab_size, embed_size)\n", - " self.lstm = nn.LSTM(embed_size, hidden_size, num_layers=num_layers, batch_first=True)\n", - " self.fc1 = nn.Linear(hidden_size, vocab_size)\n", - "\n", - " def forward(self, x):\n", - " x = self.embedding(x)\n", - " lstm_out, _ = self.lstm(x) # LSTM returns output and hidden/cell state tuple\n", - " out = self.fc1(lstm_out) # Use the last output from the LSTM\n", - " return out\n", - "\n", - "\n", - "# Function to evaluate the model after each epoch/step\n", - "def evaluate(model, val_dataloader, criterion, device, vocab_size):\n", - " model.eval() # Set the model to evaluation mode\n", - " total_loss = 0\n", - " with torch.no_grad(): # Disable gradient calculation for validation\n", - " for batch in val_dataloader:\n", - " input_ids = batch[\"input_ids\"].to(device)\n", - " labels = batch[\"labels\"].to(device)\n", - "\n", - " # Forward pass for validation\n", - " logits = model(input_ids) # Shape: (batch_size, seq_len, vocab_size)\n", - "\n", - " # Calculate the loss\n", - " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", - " total_loss += loss.item()\n", - "\n", - " avg_val_loss = total_loss / len(val_dataloader)\n", - " return avg_val_loss" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Setup tracking class\n", - "\n", - "This section intializes the `TorchWatcher` class:\n", - "- It accepts a PyTorch model object as an input. \n", - "- It allows you to capture the **parameters**, **activations** and **gradients** from each layer.\n", - "- Specify which tensor statistics to capture.\n", - "\n", - "See a pseudo implementation:\n", - "\n", - "```python\n", - "from TorchWatcher import TorchWatcher\n", - "\n", - "model = YourModel()\n", - "watcher = TorchWatcher(model, run) # Uses default mean() statistic\n", - "\n", - "# Training loop\n", - "for epoch in range(3):\n", - " # Forward pass\n", - " output = model(x_batch)\n", - " loss = criterion(output, y_batch)\n", - "\n", - " # Backward pass\n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - "\n", - " # Track metrics after the forward and backward passes\n", - " watcher.watch(step=epoch)\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from TorchWatcher import TorchWatcher" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set up model training" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Initialize Neptune run object and log hyperparameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from neptune_scale import Run\n", - "from uuid import uuid4\n", - "\n", - "custom_run_id = f\"pytorch-text-{uuid4()}\" # Create your own custom run_id\n", - "experiment_name = \"pytorch-text\" # Create a run that is the head of an experiment. This will also be used for forking.\n", - "\n", - "run = Run(\n", - " run_id=custom_run_id,\n", - " experiment_name=experiment_name,\n", - ")\n", - "\n", - "run.log_configs(\n", - " {\n", - " \"config/learning_rate\": params[\"learning_rate\"],\n", - " \"config/optimizer\": params[\"optimizer\"],\n", - " \"config/batch_size\": params[\"batch_size\"],\n", - " \"config/epochs\": params[\"epochs\"],\n", - " \"config/num_lstm_layers\": params[\"num_lstm_layers\"],\n", - " \"data/vocab_size\": params[\"vocab_size\"],\n", - " \"data/embed_size\": params[\"embed_size\"],\n", - " }\n", - ")\n", - "\n", - "run.add_tags(tags=[params[\"optimizer\"]], group_tags=True)\n", - "run.add_tags(tags=[\"text\", \"LLM\", \"Simple\"])\n", - "\n", - "print(run.get_experiment_url())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Execute model training loop\n", - "\n", - "In the training loop we call the `watch()` method from the `TorchWatcher` package after the backward and forward passes to track our parameters, gradients and activations with a combination of tensor statistics. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Initialize model and optimizer\n", - "model = SimpleLLM(\n", - " params[\"vocab_size\"], params[\"embed_size\"], params[\"hidden_size\"], params[\"num_lstm_layers\"]\n", - ")\n", - "optimizer = optim.Adam(model.parameters(), lr=params[\"learning_rate\"])\n", - "criterion = nn.CrossEntropyLoss(\n", - " ignore_index=-100\n", - ") # Ignore the buffering index of -100 in the dataset\n", - "\n", - "# Define watcher class\n", - "watcher = TorchWatcher(\n", - " model=model, run=run, tensor_stats=[\"mean\", \"norm\"], base_namespace=\"debug_metrics\"\n", - ")\n", - "\n", - "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", - "model.to(device)\n", - "step_counter = 0\n", - "\n", - "# Training loop\n", - "for epoch in range(params[\"epochs\"]):\n", - " total_loss = 0\n", - " for batch in train_dataloader:\n", - " model.train()\n", - " step_counter += 1\n", - "\n", - " input_ids = batch[\"input_ids\"].to(device)\n", - " labels = batch[\"labels\"].to(device)\n", - "\n", - " optimizer.zero_grad()\n", - "\n", - " # Forward pass\n", - " logits = model(input_ids)\n", - "\n", - " # Compute the loss (ignore padding tokens by masking labels)\n", - " loss = criterion(logits.view(-1, vocab_size), labels.view(-1))\n", - "\n", - " # Backward pass and optimization\n", - " loss.backward()\n", - " optimizer.step()\n", - "\n", - " total_loss += loss.item()\n", - " print(f\"Step {step_counter} / {len(train_dataloader)}, Loss: {loss.item()}\")\n", - "\n", - " # Call watch() method in loop determing which layer-wise metrics to watch\n", - " watcher.watch(step=step_counter, track_activations=False, track_parameters=False)\n", - "\n", - " if step_counter % 50 == 0: # Log validation loss at every 50 steps\n", - " val_loss = evaluate(model, val_dataloader, criterion, device, vocab_size)\n", - "\n", - " run.log_metrics(\n", - " data={\n", - " \"metrics/train/loss\": loss.item(),\n", - " \"metrics/validation/loss\": val_loss,\n", - " \"epoch/value\": epoch,\n", - " },\n", - " step=step_counter,\n", - " )\n", - " else: # Log training loss and debugging metrics for each step\n", - " run.log_metrics(\n", - " data={\"metrics/train/loss\": loss.item(), \"epoch/value\": epoch},\n", - " step=step_counter,\n", - " )\n", - "\n", - " print(f\"Epoch {epoch + 1}, Loss: {total_loss / len(train_dataloader)}\")\n", - "\n", - "# Close run to ensure all operations are processed\n", - "run.close()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## What's next?\n", - "While the model is training, you can start using the Neptune web app to browse your metrics and create custom analyses and visualizations:\n", - "1. To visualize the large number of metrics being logged in near real time, navigate to the **Charts** tab of the active run.\n", - "2. Filter the metrics using the [advanced regex searching capabilities](https://docs-beta.neptune.ai/charts#filtering-charts). For example, enter `.*gradient+.*fc\\d` in the search bar. This query filters all metrics for the gradients of the fully connected layers. The more FC layers, the more charts will appear.\n", - "3. Export the filter to a [dashboard](https://docs-beta.neptune.ai/custom_dashboard). The saved dashboard will now only display these metrics during training.\n", - "4. Use the [dynamic metric selection](https://docs-beta.neptune.ai/chart_widget#dynamic-metric-selection) and update the chart widget to display all fully connected layers gradients in one chart. Again, use the `.*gradient+.*fc\\d` query.\n", - "5. Create a [custom report](https://docs-beta.neptune.ai/reports) to outline the model training, global metrics, debugging metrics, and more.\n", - "\n", - "See also [a generic example of the training result](https://scale.neptune.ai/o/examples/org/LLM-Pretraining/reports/9e6a2cad-77e7-42df-9d64-28f07d37e908).\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "neptune_scale_py_312_base", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py b/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py deleted file mode 100644 index 1addce3..0000000 --- a/integrations-and-supported-tools/pytorch/notebooks/torch_watcher_example.py +++ /dev/null @@ -1,144 +0,0 @@ -import numpy as np -import torch -import torch.nn as nn -from neptune_scale import Run -from TorchWatcher import TorchWatcher - - -# Define a simple neural network -class SimpleNet(nn.Module): - def __init__(self): - super().__init__() - self.fc1 = nn.Linear(10, 5) - self.fc2 = nn.Linear(5, 1) - self.relu = nn.ReLU() - - def forward(self, x): - x = self.relu(self.fc1(x)) - x = self.fc2(x) - return x - - -def generate_data(n_samples=1000): - """Generate synthetic data for a regression task.""" - # Generate random input features - X = torch.randn(n_samples, 10) - - # Create target values with some non-linear relationship - y = (X[:, 0] ** 2 + 0.5 * X[:, 1] + 0.1 * torch.sum(X[:, 2:], dim=1)).unsqueeze(1) - - # Add some noise - y += 0.1 * torch.randn_like(y) - - return X, y - - -def train_model(model, X_train, y_train, X_val, y_val, watcher, n_epochs=50, batch_size=32): - """Training function demonstrating different ways to use TorchWatcher.""" - optimizer = torch.optim.Adam(model.parameters(), lr=0.01) - criterion = nn.MSELoss() - n_batches = len(X_train) // batch_size - - for epoch in range(n_epochs): - model.train() - train_loss = 0.0 - - # Training batches - for i in range(n_batches): - start_idx = i * batch_size - end_idx = start_idx + batch_size - - x_batch = X_train[start_idx:end_idx] - y_batch = y_train[start_idx:end_idx] - - # Forward pass - output = model(x_batch) - loss = criterion(output, y_batch) - - # Backward pass - optimizer.zero_grad() - loss.backward() - optimizer.step() - - # Track metrics during training - if epoch < 5: # First 5 epochs: track everything - watcher.watch(step=epoch * n_batches + i, namespace="train") - else: # Later epochs: track only gradients for efficiency - watcher.watch( - step=epoch * n_batches + i, - track_activations=False, - track_parameters=False, - track_gradients=True, - namespace="train", - ) - - train_loss += loss.item() - - # Average training loss - train_loss /= n_batches - - # Validation - model.eval() - with torch.no_grad(): - val_output = model(X_val) - val_loss = criterion(val_output, y_val) - - # Track metrics during validation - watcher.watch( - step=epoch, - track_activations=True, - track_gradients=False, - track_parameters=False, - namespace="validation", - ) - - # Log metrics - watcher.run.log_metrics( - data={"train/loss": train_loss, "val/loss": val_loss.item()}, step=epoch - ) - - if (epoch + 1) % 10 == 0: - print( - f"Epoch [{epoch+1}/{n_epochs}], " - f"Train Loss: {train_loss:.4f}, " - f"Val Loss: {val_loss.item():.4f}" - ) - - -def main(): - # Initialize Neptune run - run = Run( - experiment_name="torch-watcher-example", - ) - - # Generate data - X_train, y_train = generate_data(n_samples=1000) - X_val, y_val = generate_data(n_samples=200) - - # Create model and watcher - model = SimpleNet() - - # Initialize watcher with specific layer types to track - watcher = TorchWatcher( - model, - run, - track_layers=[nn.Linear, nn.ReLU], # Track only Linear and ReLU layers - tensor_stats=["mean", "norm"], # Track mean and norm statistics - base_namespace="model_metrics", # Default namespace for all metrics - ) - - # Train the model - print("\nTraining with TorchWatcher:") - print("- Tracking Linear and ReLU layers") - print("- Computing mean and norm statistics") - print("- Using 'model_metrics' as base namespace") - print("- Full tracking during first 5 epochs with 'train/model_metrics' namespace") - print("- Gradient-only tracking during later epochs with 'train/model_metrics' namespace") - print("- Activation-only tracking during validation with 'validation/model_metrics' namespace") - - train_model(model, X_train, y_train, X_val, y_val, watcher) - watcher.run.close() - - -if __name__ == "__main__": - main() From ba14f8f4d1aba47ffc51ae1509d40f908a1f2e1e Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Wed, 16 Apr 2025 14:51:49 +0200 Subject: [PATCH 104/125] chore: update test yaml --- .github/workflows/test-notebooks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-notebooks.yml b/.github/workflows/test-notebooks.yml index 06d5be7..ebf686b 100644 --- a/.github/workflows/test-notebooks.yml +++ b/.github/workflows/test-notebooks.yml @@ -21,7 +21,7 @@ jobs: python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] notebooks: # Add in alphabetical order - how-to-guides/hpo/notebooks/Neptune_HPO.ipynb - - integrations-and-supported-tools/pytorch/notebooks/pytorch_text_model_debugging.ipynb + - how-to-guides/debug-model-training-runs/debug_training_runs.ipynb os: ["${{ inputs.os }}"] steps: - uses: actions/checkout@main From 1ab2ed0eeb5c72f464c468a3440e42e25dc53532 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Fri, 2 May 2025 11:35:47 +0200 Subject: [PATCH 105/125] refactor: use synthetic simulation data in example to illustrate the point of analyzing debugging metrics --- .../debug_training_runs.ipynb | 301 +++++++----------- 1 file changed, 111 insertions(+), 190 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 67874ba..79b5857 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -32,13 +32,11 @@ "3. Analyze the metrics in Neptune's UI to **debug training issues**\n", "\n", "Step through a pre-configured report:\n", - "\n", + "\n", " \"Explore\n", "\n", "\n", - "_Note: This is a code recipe that you can adapt for your own model training needs._\n", - "\n", - "If you are using Google Colab to run this example - please ensure that you update the runtime to use the free available GPU." + "_Note: This is a code recipe that you can adapt for your own model training needs._" ] }, { @@ -89,50 +87,27 @@ "outputs": [], "source": [ "# Install dependencies\n", - "! pip install -qU neptune_scale torch datasets\n", - "\n", - "import torch" + "! pip install -qU neptune_scale" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Initialize parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "params = {\n", - " \"optimizer\": \"Adam\",\n", - " \"batch_size\": 8, # Can be increased to 32 when training via Google Colab\n", - " \"learning_rate\": 0.01, # A smaller lr can be used\n", - " \"epochs\": 5,\n", - " \"device\": torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\"),\n", - " \"input_features\": 256,\n", - " \"embed_size\": 1000,\n", - " \"hidden_size\": 256, # hidden size for the LSTM\n", - "}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Setup data, model and other dependencies\n", + "### Setup training simulation\n", + "\n", + "This tutorial uses a simulated training scenario to demonstrate Neptune's debugging capabilities. The simulation generates synthetic training metrics that mimic real model training, including:\n", + "- Gradually decreasing loss\n", + "- Increasing accuracy\n", + "- Random noise and occasional spikes to mimic gradient norm explosions\n", + "- Per-layer gradient norms that fluctuate naturally and spike during loss anomalies\n", "\n", "In the next cell, we'll:\n", - "1. Import required PyTorch and HuggingFace libraries\n", - "2. Download and load a [next token prediction dataset](https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset) from HuggingFace\n", - "3. Create PyTorch DataLoaders for training\n", - "4. Calculate vocabulary size for the model\n", - "5. Define a multilayer model for training\n", + "1. Import required libraries (numpy, json, etc.) \n", + "2. Create a `TrainingMetrics` class that simulates training progression\n", + "3. Track gradient norms for 20 layers to help identify optimization issues\n", "\n", - "_You can modify this setup to use your own data and model architecture._" + "_You can replace this simulation with your actual model training code._" ] }, { @@ -141,110 +116,84 @@ "metadata": {}, "outputs": [], "source": [ - "import torch\n", - "import torch.nn as nn\n", - "import torch.optim as optim\n", - "from datasets import load_dataset\n", - "from torch.utils.data import DataLoader\n", - "\n", - "# Download dataset\n", - "base_url = \"https://huggingface.co/datasets/Na0s/Next_Token_Prediction_dataset/resolve/main/data/\"\n", - "data_files = {\n", - " \"train\": base_url + \"train-00001-of-00067.parquet\",\n", - " \"validation\": base_url + \"validation-00000-of-00001.parquet\",\n", + "import numpy as np\n", + "import json\n", + "from typing import Dict, Any\n", + "import math\n", + "\n", + "\n", + "class TrainingMetrics:\n", + " def __init__(\n", + " self,\n", + " initial_loss: float = 1.0,\n", + " initial_accuracy: float = 0.0,\n", + " noise_scale: float = 0.1,\n", + " loss_trend: float = -0.1,\n", + " accuracy_trend: float = 0.1,\n", + " ):\n", + " self.previous_loss = initial_loss\n", + " self.previous_accuracy = initial_accuracy\n", + " self.noise_scale = noise_scale\n", + " self.loss_trend = loss_trend\n", + " self.accuracy_trend = accuracy_trend\n", + " self.step_count = 1\n", + "\n", + " # Random convergence points\n", + " self.target_loss = np.random.uniform(0.0, 1.0)\n", + " self.target_accuracy = np.random.uniform(0.0, 1.0)\n", + "\n", + " def update_metrics(self, spikes=True) -> tuple[float, float, dict]:\n", + " \"\"\"Update loss and accuracy using a random walk process with logarithmic trends\"\"\"\n", + " self.step_count += 1\n", + "\n", + " # Base loss update with normal progression\n", + " decay_factor = math.log(1 + abs(self.previous_loss - self.target_loss))\n", + " loss_step = self.noise_scale * np.random.randn() + self.loss_trend * decay_factor\n", + " loss_step *= 1 + 0.1 * math.log(self.step_count)\n", + "\n", + " # Initialize gradient norms for 20 layers with small random values\n", + " gradient_norms = {\n", + " f\"debug/grad_norm/layer_{i}\": np.random.uniform(0.01, 0.1) for i in range(20)\n", + " }\n", + "\n", + " # Check for spike/anomaly (0.01% chance) after step 10k\n", + " if spikes and np.random.random() < 0.0001: # and self.step_count > 10000:\n", + " # Generate a sudden spike, independent of current loss\n", + " spike_magnitude = np.random.uniform(1, 10) # Random spike between 1x and 5x\n", + " current_loss = spike_magnitude\n", + "\n", + " # When there's a loss spike, create a corresponding spike in a random layer\n", + " spike_layer = np.random.randint(0, 20)\n", + " gradient_norms[f\"debug/grad_norm/layer_{spike_layer}\"] = (\n", + " spike_magnitude * np.random.uniform(0.8, 1.2)\n", + " )\n", + " else:\n", + " # Normal progression\n", + " current_loss = max(0.0, self.previous_loss + loss_step)\n", + " self.previous_loss = current_loss\n", + "\n", + " current_accuracy = 1 - current_loss\n", + "\n", + " return current_loss, current_accuracy, gradient_norms\n", + "\n", + "\n", + "# Training parameters\n", + "params = {\n", + " \"batch_size\": 32,\n", + " \"epochs\": 100,\n", + " \"noise_scale\": np.random.uniform(0.0005, 0.002),\n", + " \"loss_trend\": -np.random.uniform(0, 0.0002),\n", + " \"accuracy_trend\": np.random.uniform(0, 0.0002),\n", "}\n", "\n", - "# Load dataset\n", - "data_subset = load_dataset(\"parquet\", data_files=data_files, num_proc=4)\n", - "# validation_subset = data_subset.get(\"validation\").train_test_split(test_size=0.1)\n", - "\n", - "print(f\"Training samples: {data_subset['train'].num_rows}\")\n", - "# print(f\"Validation samples: {validation_subset['test'].num_rows}\")\n", - "\n", - "# Convert to PyTorch format\n", - "train_subset = data_subset[\"train\"].with_format(\n", - " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", - ")\n", - "\"\"\"validation_subset = validation_subset[\"test\"].with_format(\n", - " type=\"torch\", columns=[\"text\", \"input_ids\", \"labels\"]\n", - ")\"\"\"\n", - "\n", - "# Create dataloaders\n", - "train_dataloader = DataLoader(train_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", - "# val_dataloader = DataLoader(validation_subset, batch_size=params[\"batch_size\"], shuffle=True)\n", - "\n", - "# Calculate vocabulary size\n", - "params[\"vocab_size\"] = (\n", - " max([token for sentence in data_subset[\"train\"][\"input_ids\"] for token in sentence]) + 1\n", - ")\n", - "print(f\"Vocabulary size: {params['vocab_size']}\")\n", - "\n", - "\n", - "# Create model\n", - "class MultilayerModel(nn.Module):\n", - " \"\"\"A larger language model with multiple LSTM and fully connected layers.\"\"\"\n", - "\n", - " def __init__(self, vocab_size, embed_size, hidden_size):\n", - " super(MultilayerModel, self).__init__()\n", - " self.embedding = nn.Embedding(vocab_size, embed_size)\n", - "\n", - " # Create multiple LSTM layers\n", - " self.lstm_layers = nn.ModuleList(\n", - " [\n", - " nn.LSTM(\n", - " hidden_size if i > 0 else embed_size,\n", - " hidden_size,\n", - " num_layers=1,\n", - " batch_first=True,\n", - " )\n", - " for i in range(10) # 10 LSTM layers\n", - " ]\n", - " )\n", - "\n", - " # Create multiple fully connected layers\n", - " self.fc_layers = nn.ModuleList(\n", - " [nn.Linear(hidden_size, hidden_size) for _ in range(9)] # 9 FC layers\n", - " )\n", - "\n", - " # Final layer to project back to vocab size\n", - " self.final_layer = nn.Linear(hidden_size, vocab_size)\n", - "\n", - " # Add dropout for regularization\n", - " self.dropout = nn.Dropout(0.1)\n", - "\n", - " def forward(self, x):\n", - " # Embedding layer\n", - " x = self.embedding(x)\n", - "\n", - " # Process through LSTM layers\n", - " for lstm in self.lstm_layers:\n", - " x, _ = lstm(x)\n", - " x = self.dropout(x)\n", - "\n", - " # Process through FC layers\n", - " for fc in self.fc_layers:\n", - " x = fc(x)\n", - " x = self.dropout(x)\n", - " x = torch.relu(x)\n", - "\n", - " # Final projection\n", - " out = self.final_layer(x)\n", - " return out\n", - "\n", - "\n", - "model = MultilayerModel(\n", - " params[\"vocab_size\"],\n", - " params[\"embed_size\"],\n", - " params[\"hidden_size\"],\n", - ")\n", - "model.to(params[\"device\"])\n", - "print(f\"Model created: {model.__class__.__name__}\")\n", - "optimizer = optim.Adam(model.parameters(), lr=params[\"learning_rate\"])\n", - "print(f\"Optimizer: {optimizer.__class__.__name__}\")\n", - "criterion = nn.CrossEntropyLoss(\n", - " ignore_index=-100\n", - ") # Ignore the buffering index of -100 in the dataset\n", - "print(f\"Criterion: {criterion.__class__.__name__}\")" + "# Initialize metrics\n", + "metrics = TrainingMetrics(\n", + " initial_loss=np.random.uniform(3, 7),\n", + " initial_accuracy=0,\n", + " noise_scale=params[\"noise_scale\"],\n", + " loss_trend=params[\"loss_trend\"],\n", + " accuracy_trend=params[\"accuracy_trend\"],\n", + ")" ] }, { @@ -279,7 +228,7 @@ "from neptune_scale import Run\n", "\n", "run = Run(\n", - " experiment_name=\"pytorch-text\", # Create a run that is the head of an experiment.\n", + " experiment_name=\"debugging-gradient-norms\", # Create a run that is the head of an experiment.\n", ")\n", "\n", "print(run.get_experiment_url())" @@ -300,15 +249,12 @@ "source": [ "run.log_configs(\n", " {\n", - " \"config/learning_rate\": params[\"learning_rate\"],\n", - " \"config/optimizer\": params[\"optimizer\"],\n", " \"config/batch_size\": params[\"batch_size\"],\n", " \"config/epochs\": params[\"epochs\"],\n", - " \"data/embed_size\": params[\"embed_size\"],\n", " }\n", ")\n", "\n", - "run.add_tags(tags=[\"text\", \"LLM\", \"multi-layer\", params[\"optimizer\"]])\n", + "run.add_tags(tags=[\"debug\", \"gradient-norm\"])\n", "\n", "print(f\"See configuration parameters:\\n{run.get_experiment_url() + '&detailsTab=metadata'}\")" ] @@ -320,8 +266,8 @@ "### Step 3: _Track gradient norms during training_\n", "\n", "In this training loop, we:\n", - "1. Register backward hooks to capture gradient norms from all model layers with `register_full_backward_hook()`\n", - "2. Track these norms during training to identify potential issues like vanishing/exploding gradients in a dictionary called `debugging_gradient_norms`\n", + "1. Simulate the calculation of `loss`, `accuracy` and `gradient norms`\n", + "2. Track gradient norms during training to identify potential issues like vanishing/exploding gradients in a dictionary called `gradient_norms`\n", "3. Log the gradient norms to Neptune for visualization and analysis using the `log_metrics` method\n", "\n", "This approach allows you to monitor the learning dynamics across your entire model architecture in near real-time." @@ -333,45 +279,25 @@ "metadata": {}, "outputs": [], "source": [ - "# Register hooks to track gradients for each layer\n", - "def hook_fn(module, grad_input, grad_output):\n", - " layer_name = next(name for name, mod in model.named_modules() if mod is module)\n", - " if grad_input[0] is not None: # Check if gradients exist\n", - " grad_norm = grad_input[0].norm().item()\n", - " debugging_gradient_norms[f\"debug/gradient/{layer_name}/norm\"] = grad_norm\n", - "\n", - "\n", - "# Define dictionary of metrics to log to Neptune\n", - "debugging_gradient_norms = {}\n", - "# Register hooks once before training\n", - "for name, module in model.named_modules():\n", - " module.register_full_backward_hook(hook_fn)\n", - "\n", - "# Create custom Neptune URLS for tutorial steps\n", - "print(f\"View charts in near real-time:\\n{run.get_experiment_url() + '&detailsTab=charts'}\")\n", - "\n", "step_counter = 0\n", "# Training loop\n", - "for epoch in range(params[\"epochs\"]):\n", - " total_loss = 0\n", - " for batch in train_dataloader:\n", - " model.train()\n", - " step_counter += 1\n", - " input_ids = batch[\"input_ids\"].to(params[\"device\"])\n", - " labels = batch[\"labels\"].to(params[\"device\"])\n", - " optimizer.zero_grad()\n", - " logits = model(input_ids)\n", - " loss = criterion(logits.view(-1, params[\"vocab_size\"]), labels.view(-1))\n", - " loss.backward()\n", - " optimizer.step()\n", - "\n", - " # Log global training loss and layer-wise gradient norms\n", + "for epoch in range(1, params[\"epochs\"]):\n", + " # Process batches\n", + " for batch in range(0, 10000, params[\"batch_size\"]):\n", + " # Update metrics for this batch\n", + " batch_loss, batch_accuracy, gradient_norms = metrics.update_metrics()\n", + "\n", " run.log_metrics(\n", - " data={\"metrics/train/loss\": loss.item(), **debugging_gradient_norms},\n", + " data={\n", + " \"metrics/train/loss\": batch_loss,\n", + " \"metrics/train/accuracy\": batch_accuracy,\n", + " \"epoch\": epoch,\n", + " **gradient_norms,\n", + " },\n", " step=step_counter,\n", " )\n", + " step_counter += 1\n", "\n", - "# Close run to ensure all operations are processed\n", "run.close()" ] }, @@ -389,7 +315,7 @@ "\n", "**2. Advanced metric filtering**\n", "- Focus on specific metrics using [advanced regex search](https://docs.neptune.ai/charts#filtering-charts)\n", - "- Example: `gradient & fc & layers.[0-5] & norm` filters gradients for the first 6 fully connected layers\n", + "- Example: `norm & layer_\\d` filter all gradient norms layers\n", "- Perfect for isolating problematic layers or components\n", "\n", "**3. Create custom dashboards**\n", @@ -398,19 +324,14 @@ "- Share with team members\n", "- Ideal for tracking known problematic layers\n", "\n", - "_Example dashboard:_\n", - "\n", - " \"Explore\n", - "\n", - "\n", "**4. Dynamic metric analysis**\n", - "- Group related metrics (e.g., all LSTM layer gradients)\n", + "- Group related metrics (e.g., all layer gradients)\n", "- Create automatically updating charts by [selecting metrics dynamically](https://docs.neptune.ai/chart_widget/#dynamic-metric-selection)\n", "- Quickly identify vanishing/exploding gradients\n", - "- Example query: `(.*gradient)(.*lstm)(.*norm)`\n", + "- Example query: `layer_\\d`\n", "\n", "_Example dashboard:_\n", - "\n", + "\n", " \"Explore\n", "\n", "\n", From 3e423f8e7af38748a417df4c64bc7cc10fad5ff7 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Fri, 2 May 2025 11:50:05 +0200 Subject: [PATCH 106/125] fix: add numpy to be installed as dependency --- .../debug-model-training-runs/debug_training_runs.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 79b5857..4e4c429 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -87,7 +87,7 @@ "outputs": [], "source": [ "# Install dependencies\n", - "! pip install -qU neptune_scale" + "! pip install -qU neptune_scale numpy" ] }, { From 90fa8d98adb860e7743e4fbbc5de12ed91a9da21 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Fri, 2 May 2025 15:00:13 +0200 Subject: [PATCH 107/125] chore: remove unneeded comments --- .../debug-model-training-runs/debug_training_runs.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 4e4c429..198ae26 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -156,10 +156,10 @@ " f\"debug/grad_norm/layer_{i}\": np.random.uniform(0.01, 0.1) for i in range(20)\n", " }\n", "\n", - " # Check for spike/anomaly (0.01% chance) after step 10k\n", - " if spikes and np.random.random() < 0.0001: # and self.step_count > 10000:\n", + " # Check for spike/anomaly (0.01% chance)\n", + " if spikes and np.random.random() < 0.0001:\n", " # Generate a sudden spike, independent of current loss\n", - " spike_magnitude = np.random.uniform(1, 10) # Random spike between 1x and 5x\n", + " spike_magnitude = np.random.uniform(1, 10)\n", " current_loss = spike_magnitude\n", "\n", " # When there's a loss spike, create a corresponding spike in a random layer\n", From 7e4f3fff0f7f88415e1d239e18a2c0aa4e5a0890 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 22 May 2025 16:08:39 +0200 Subject: [PATCH 108/125] refactor: update model to PyTorch based --- .../debug_training_runs.ipynb | 259 +++++++++++------- 1 file changed, 159 insertions(+), 100 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index 198ae26..cc229b5 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -96,104 +96,111 @@ "source": [ "### Setup training simulation\n", "\n", - "This tutorial uses a simulated training scenario to demonstrate Neptune's debugging capabilities. The simulation generates synthetic training metrics that mimic real model training, including:\n", - "- Gradually decreasing loss\n", - "- Increasing accuracy\n", - "- Random noise and occasional spikes to mimic gradient norm explosions\n", - "- Per-layer gradient norms that fluctuate naturally and spike during loss anomalies\n", + "This tutorial uses a simple PyTorch model trained on the MNIST dataset to demonstrate Neptune's debugging capabilities. The training code tracks real training metrics including:\n", + "- Loss values from the CrossEntropyLoss function\n", + "- Gradient norms for each layer to monitor optimization\n", "\n", "In the next cell, we'll:\n", - "1. Import required libraries (numpy, json, etc.) \n", - "2. Create a `TrainingMetrics` class that simulates training progression\n", - "3. Track gradient norms for 20 layers to help identify optimization issues\n", + "1. Import PyTorch libraries and utilities\n", + "2. Create a SimpleModel class with 20 layers\n", + "3. Add gradient norm tracking to identify potential issues during training\n", + "\n", + "The setup is not optimized for the best model, but for illustration of using Neptune for debugging the training runs that we create.\n", "\n", "_You can replace this simulation with your actual model training code._" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n# Create training step method\\ndef train_step(model, device, data, target, optimizer):\\n\\n data, target = data.to(device), target.to(device)\\n data = data.view(data.size(0), -1) # Flatten the images\\n \\n # Forward pass\\n optimizer.zero_grad()\\n output = model(data)\\n loss = criterion(output, target)\\n \\n # Backward pass\\n loss.backward()\\n \\n # Get gradient norms for monitoring\\n gradient_norms = model.get_gradient_norms()\\n \\n # Update weights\\n optimizer.step()\\n \\n # Track statistics\\n batch_loss = loss.item()'" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "import numpy as np\n", - "import json\n", - "from typing import Dict, Any\n", - "import math\n", - "\n", - "\n", - "class TrainingMetrics:\n", - " def __init__(\n", - " self,\n", - " initial_loss: float = 1.0,\n", - " initial_accuracy: float = 0.0,\n", - " noise_scale: float = 0.1,\n", - " loss_trend: float = -0.1,\n", - " accuracy_trend: float = 0.1,\n", - " ):\n", - " self.previous_loss = initial_loss\n", - " self.previous_accuracy = initial_accuracy\n", - " self.noise_scale = noise_scale\n", - " self.loss_trend = loss_trend\n", - " self.accuracy_trend = accuracy_trend\n", - " self.step_count = 1\n", - "\n", - " # Random convergence points\n", - " self.target_loss = np.random.uniform(0.0, 1.0)\n", - " self.target_accuracy = np.random.uniform(0.0, 1.0)\n", - "\n", - " def update_metrics(self, spikes=True) -> tuple[float, float, dict]:\n", - " \"\"\"Update loss and accuracy using a random walk process with logarithmic trends\"\"\"\n", - " self.step_count += 1\n", - "\n", - " # Base loss update with normal progression\n", - " decay_factor = math.log(1 + abs(self.previous_loss - self.target_loss))\n", - " loss_step = self.noise_scale * np.random.randn() + self.loss_trend * decay_factor\n", - " loss_step *= 1 + 0.1 * math.log(self.step_count)\n", - "\n", - " # Initialize gradient norms for 20 layers with small random values\n", - " gradient_norms = {\n", - " f\"debug/grad_norm/layer_{i}\": np.random.uniform(0.01, 0.1) for i in range(20)\n", - " }\n", - "\n", - " # Check for spike/anomaly (0.01% chance)\n", - " if spikes and np.random.random() < 0.0001:\n", - " # Generate a sudden spike, independent of current loss\n", - " spike_magnitude = np.random.uniform(1, 10)\n", - " current_loss = spike_magnitude\n", - "\n", - " # When there's a loss spike, create a corresponding spike in a random layer\n", - " spike_layer = np.random.randint(0, 20)\n", - " gradient_norms[f\"debug/grad_norm/layer_{spike_layer}\"] = (\n", - " spike_magnitude * np.random.uniform(0.8, 1.2)\n", - " )\n", - " else:\n", - " # Normal progression\n", - " current_loss = max(0.0, self.previous_loss + loss_step)\n", - " self.previous_loss = current_loss\n", - "\n", - " current_accuracy = 1 - current_loss\n", - "\n", - " return current_loss, current_accuracy, gradient_norms\n", - "\n", + "# Pure PyTorch\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.optim as optim\n", + "from torch.utils.data import DataLoader\n", + "from torchvision import datasets, transforms\n", + "import time\n", + "\n", + "class SimpleModel(nn.Module):\n", + " def __init__(self, input_size: int = 784, hidden_size: int = 128, output_size: int = 10):\n", + " super().__init__()\n", + " \n", + " layers = []\n", + " layers.append(nn.Linear(input_size, hidden_size))\n", + " layers.append(nn.ReLU())\n", + " \n", + " for _ in range(18):\n", + " layers.append(nn.Linear(hidden_size, hidden_size))\n", + " layers.append(nn.ReLU())\n", + "\n", + " layers.append(nn.Linear(hidden_size, output_size))\n", + " \n", + " # Combine all layers into a sequential model\n", + " self.model = nn.Sequential(*layers)\n", + " \n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " return self.model(x)\n", + " \n", + " def get_gradient_norms(self) -> dict:\n", + " \"\"\"\n", + " Calculate the L2 norm of gradients for each layer.\n", + " \n", + " Returns:\n", + " dict: Dictionary containing gradient norms for each layer\n", + " \"\"\"\n", + " gradient_norms = {}\n", + " \n", + " # Iterate through all named parameters\n", + " for name, param in self.named_parameters():\n", + " if param.grad is not None:\n", + " # Calculate L2 norm of gradients\n", + " norm = param.grad.norm(2).item()\n", + " # Store in dictionary with a descriptive key\n", + " gradient_norms[f\"debug/L2_grad_norm/{name}\"] = norm\n", + " \n", + " return gradient_norms\n", + " \n", "\n", "# Training parameters\n", "params = {\n", - " \"batch_size\": 32,\n", - " \"epochs\": 100,\n", - " \"noise_scale\": np.random.uniform(0.0005, 0.002),\n", - " \"loss_trend\": -np.random.uniform(0, 0.0002),\n", - " \"accuracy_trend\": np.random.uniform(0, 0.0002),\n", + " \"batch_size\": 512,\n", + " \"epochs\": 10,\n", + " \"lr\": 0.001,\n", + " \"num_layers\": 20, # Configurable number of layers\n", "}\n", "\n", - "# Initialize metrics\n", - "metrics = TrainingMetrics(\n", - " initial_loss=np.random.uniform(3, 7),\n", - " initial_accuracy=0,\n", - " noise_scale=params[\"noise_scale\"],\n", - " loss_trend=params[\"loss_trend\"],\n", - " accuracy_trend=params[\"accuracy_trend\"],\n", - ")" + "# Data transformations\n", + "transform = transforms.Compose([\n", + " transforms.ToTensor(),\n", + " transforms.Normalize((0.1307,), (0.3081,))\n", + "])\n", + "\n", + "# Load MNIST dataset\n", + "train_dataset = datasets.MNIST('./data', train=True, download=True, transform=transform)\n", + "# test_dataset = datasets.MNIST('./data', train=False, transform=transform)\n", + "\n", + "train_loader = DataLoader(train_dataset, batch_size=params[\"batch_size\"], shuffle=True)\n", + "# test_loader = DataLoader(test_dataset, batch_size=batch_size)\n", + "\n", + "device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n", + "# Initialize model, loss function, and optimizer\n", + "model = SimpleModel().to(device)\n", + "criterion = nn.CrossEntropyLoss()\n", + "optimizer = optim.Adam(model.parameters(), lr=params[\"lr\"])" ] }, { @@ -221,9 +228,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=debugging-gradient-norms&type=experiment\n" + ] + } + ], "source": [ "from neptune_scale import Run\n", "\n", @@ -243,14 +258,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 30, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "See configuration parameters:\n", + "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=debugging-gradient-norms&type=experiment&detailsTab=metadata\n" + ] + } + ], "source": [ "run.log_configs(\n", " {\n", " \"config/batch_size\": params[\"batch_size\"],\n", " \"config/epochs\": params[\"epochs\"],\n", + " \"config/lr\": params[\"lr\"]\n", " }\n", ")\n", "\n", @@ -266,31 +291,65 @@ "### Step 3: _Track gradient norms during training_\n", "\n", "In this training loop, we:\n", - "1. Simulate the calculation of `loss`, `accuracy` and `gradient norms`\n", + "1. Calculate `loss` and ` L2 gradient norms` from the `named_parameters`.\n", "2. Track gradient norms during training to identify potential issues like vanishing/exploding gradients in a dictionary called `gradient_norms`\n", "3. Log the gradient norms to Neptune for visualization and analysis using the `log_metrics` method\n", "\n", - "This approach allows you to monitor the learning dynamics across your entire model architecture in near real-time." + "This approach allows you to monitor the learning dynamics across your entire model architecture in near real-time.\n", + "\n", + "_You can also use hooks to capture layer-wise training dynamics and log to Neptune_. " ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 32, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[32], line 6\u001b[0m\n\u001b[0;32m 3\u001b[0m model\u001b[38;5;241m.\u001b[39mtrain()\n\u001b[0;32m 4\u001b[0m running_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0.0\u001b[39m\n\u001b[1;32m----> 6\u001b[0m \u001b[43m\u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mbatch_idx\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43menumerate\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mtrain_loader\u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 7\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Move data to device\u001b[39;49;00m\n\u001b[0;32m 8\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 9\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mview\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msize\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Flatten the images\u001b[39;49;00m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:708\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 706\u001b[0m \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[0;32m 707\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset() \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[1;32m--> 708\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 710\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 711\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable\n\u001b[0;32m 712\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 713\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called\n\u001b[0;32m 714\u001b[0m ):\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:764\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 762\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 763\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index() \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m--> 764\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m 765\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[0;32m 766\u001b[0m data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory_device)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\_utils\\fetch.py:52\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[1;34m(self, possibly_batched_index)\u001b[0m\n\u001b[0;32m 50\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__(possibly_batched_index)\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m---> 52\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 54\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\datasets\\mnist.py:146\u001b[0m, in \u001b[0;36mMNIST.__getitem__\u001b[1;34m(self, index)\u001b[0m\n\u001b[0;32m 143\u001b[0m img \u001b[38;5;241m=\u001b[39m Image\u001b[38;5;241m.\u001b[39mfromarray(img\u001b[38;5;241m.\u001b[39mnumpy(), mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mL\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 145\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m--> 146\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtransform\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 148\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 149\u001b[0m target \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform(target)\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:95\u001b[0m, in \u001b[0;36mCompose.__call__\u001b[1;34m(self, img)\u001b[0m\n\u001b[0;32m 93\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, img):\n\u001b[0;32m 94\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransforms:\n\u001b[1;32m---> 95\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[43mt\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 96\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m img\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:277\u001b[0m, in \u001b[0;36mNormalize.forward\u001b[1;34m(self, tensor)\u001b[0m\n\u001b[0;32m 269\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, tensor: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m 270\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 271\u001b[0m \u001b[38;5;124;03m Args:\u001b[39;00m\n\u001b[0;32m 272\u001b[0m \u001b[38;5;124;03m tensor (Tensor): Tensor image to be normalized.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 275\u001b[0m \u001b[38;5;124;03m Tensor: Normalized Tensor image.\u001b[39;00m\n\u001b[0;32m 276\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 277\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnormalize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\functional.py:350\u001b[0m, in \u001b[0;36mnormalize\u001b[1;34m(tensor, mean, std, inplace)\u001b[0m\n\u001b[0;32m 347\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(tensor, torch\u001b[38;5;241m.\u001b[39mTensor):\n\u001b[0;32m 348\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mimg should be Tensor Image. Got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(tensor)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m--> 350\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF_t\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnormalize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmean\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstd\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minplace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\_functional_tensor.py:922\u001b[0m, in \u001b[0;36mnormalize\u001b[1;34m(tensor, mean, std, inplace)\u001b[0m\n\u001b[0;32m 920\u001b[0m mean \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mas_tensor(mean, dtype\u001b[38;5;241m=\u001b[39mdtype, device\u001b[38;5;241m=\u001b[39mtensor\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[0;32m 921\u001b[0m std \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mas_tensor(std, dtype\u001b[38;5;241m=\u001b[39mdtype, device\u001b[38;5;241m=\u001b[39mtensor\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[1;32m--> 922\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[43m(\u001b[49m\u001b[43mstd\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m==\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43many\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m:\n\u001b[0;32m 923\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstd evaluated to zero after conversion to \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdtype\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, leading to division by zero.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 924\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mean\u001b[38;5;241m.\u001b[39mndim \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m1\u001b[39m:\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ "step_counter = 0\n", - "# Training loop\n", "for epoch in range(1, params[\"epochs\"]):\n", - " # Process batches\n", - " for batch in range(0, 10000, params[\"batch_size\"]):\n", - " # Update metrics for this batch\n", - " batch_loss, batch_accuracy, gradient_norms = metrics.update_metrics()\n", - "\n", + " model.train()\n", + " running_loss = 0.0\n", + " \n", + " for batch_idx, (data, target) in enumerate(train_loader):\n", + " # Move data to device\n", + " data, target = data.to(device), target.to(device)\n", + " data = data.view(data.size(0), -1) # Flatten the images\n", + " \n", + " optimizer.zero_grad()\n", + " output = model(data)\n", + " loss = criterion(output, target)\n", + " loss.backward()\n", + " optimizer.step()\n", + "\n", + " gradient_norms = model.get_gradient_norms()\n", + " batch_loss = loss.item()\n", + " \n", " run.log_metrics(\n", " data={\n", " \"metrics/train/loss\": batch_loss,\n", - " \"metrics/train/accuracy\": batch_accuracy,\n", " \"epoch\": epoch,\n", " **gradient_norms,\n", " },\n", @@ -331,7 +390,7 @@ "- Example query: `layer_\\d`\n", "\n", "_Example dashboard:_\n", - "\n", + "\n", " \"Explore\n", "\n", "\n", @@ -342,7 +401,7 @@ "- Share insights with team members\n", "\n", "_Example report:_\n", - "\n", + "\n", " \"Explore\n", "\n", "\n", From b0b3f564783dfc256f5f20db554794b29157a728 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 22 May 2025 16:21:53 +0200 Subject: [PATCH 109/125] chore: update readme and pre-commit accept --- README.md | 5 + .../debug_training_runs.ipynb | 100 +++++------------- utils/migration_tools/from_wandb/README.md | 2 +- 3 files changed, 30 insertions(+), 77 deletions(-) diff --git a/README.md b/README.md index 2e0ddca..ff95c3b 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,7 @@ This repo contains tutorials and examples of how to use Neptune. | Organize and filter runs | [![docs-icon]][runs-table] | [![neptune-icon]][runs-table-example] | | | | Resume run or other object | [![docs-icon]][resume-run] | | | | | Use Neptune in HPO jobs | [![docs-icon]][hpo] | [![neptune-icon]][hpo-example] | [![github-icon]][hpo-notebook] | [![colab-icon]][hpo-colab] | +| Debug training runs | [![docs-icon]][debug] | [![neptune-icon]][debug-example] | [![github-icon]][debug-notebook] | [![colab-icon]][debug-colab] | ### Migration tools @@ -43,6 +44,8 @@ This repo contains tutorials and examples of how to use Neptune. [hpo-colab]: https://colab.research.google.com/github/neptune-ai/scale-examples/blob/master/how-to-guides/hpo/notebooks/Neptune_HPO.ipynb [qs-notebook]: how-to-guides/quickstart/notebooks/neptune_quickstart.ipynb [qs-colab]: https://colab.research.google.com/github/neptune-ai/scale-examples/blob/master/how-to-guides/quickstart/notebooks/neptune_quickstart.ipynb +[debug-notebook]: how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +[debug-colab]: https://colab.research.google.com/github/neptune-ai/scale-examples/blob/master/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb [blog]: https://neptune.ai/blog @@ -58,6 +61,8 @@ This repo contains tutorials and examples of how to use Neptune. [resume-run]: https://docs.neptune.ai/resume_run [runs-table]: https://docs.neptune.ai/runs_table [runs-table-example]: https://scale.neptune.ai/o/examples/org/LLM-Pretraining/runs/table?viewId=9e746462-f045-4ff2-9ac4-e41fa349b04d&detailsTab=dashboard&dash=table&type=run&compare=auto-5 +[debug]: TODO - Add link to docs +[debug-example]: TODO - Add link to final Neptune project "examples/debug-training-runs" [docs-icon]: https://neptune.ai/wp-content/uploads/2023/06/file_icon.svg "Read the documentation" diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index cc229b5..ea6d619 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -112,20 +112,9 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\n# Create training step method\\ndef train_step(model, device, data, target, optimizer):\\n\\n data, target = data.to(device), target.to(device)\\n data = data.view(data.size(0), -1) # Flatten the images\\n \\n # Forward pass\\n optimizer.zero_grad()\\n output = model(data)\\n loss = criterion(output, target)\\n \\n # Backward pass\\n loss.backward()\\n \\n # Get gradient norms for monitoring\\n gradient_norms = model.get_gradient_norms()\\n \\n # Update weights\\n optimizer.step()\\n \\n # Track statistics\\n batch_loss = loss.item()'" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Pure PyTorch\n", "import torch\n", @@ -135,35 +124,36 @@ "from torchvision import datasets, transforms\n", "import time\n", "\n", + "\n", "class SimpleModel(nn.Module):\n", " def __init__(self, input_size: int = 784, hidden_size: int = 128, output_size: int = 10):\n", " super().__init__()\n", - " \n", + "\n", " layers = []\n", " layers.append(nn.Linear(input_size, hidden_size))\n", " layers.append(nn.ReLU())\n", - " \n", + "\n", " for _ in range(18):\n", " layers.append(nn.Linear(hidden_size, hidden_size))\n", " layers.append(nn.ReLU())\n", "\n", " layers.append(nn.Linear(hidden_size, output_size))\n", - " \n", + "\n", " # Combine all layers into a sequential model\n", " self.model = nn.Sequential(*layers)\n", - " \n", + "\n", " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", " return self.model(x)\n", - " \n", + "\n", " def get_gradient_norms(self) -> dict:\n", " \"\"\"\n", " Calculate the L2 norm of gradients for each layer.\n", - " \n", + "\n", " Returns:\n", " dict: Dictionary containing gradient norms for each layer\n", " \"\"\"\n", " gradient_norms = {}\n", - " \n", + "\n", " # Iterate through all named parameters\n", " for name, param in self.named_parameters():\n", " if param.grad is not None:\n", @@ -171,9 +161,9 @@ " norm = param.grad.norm(2).item()\n", " # Store in dictionary with a descriptive key\n", " gradient_norms[f\"debug/L2_grad_norm/{name}\"] = norm\n", - " \n", + "\n", " return gradient_norms\n", - " \n", + "\n", "\n", "# Training parameters\n", "params = {\n", @@ -184,13 +174,10 @@ "}\n", "\n", "# Data transformations\n", - "transform = transforms.Compose([\n", - " transforms.ToTensor(),\n", - " transforms.Normalize((0.1307,), (0.3081,))\n", - "])\n", + "transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])\n", "\n", "# Load MNIST dataset\n", - "train_dataset = datasets.MNIST('./data', train=True, download=True, transform=transform)\n", + "train_dataset = datasets.MNIST(\"./data\", train=True, download=True, transform=transform)\n", "# test_dataset = datasets.MNIST('./data', train=False, transform=transform)\n", "\n", "train_loader = DataLoader(train_dataset, batch_size=params[\"batch_size\"], shuffle=True)\n", @@ -228,17 +215,9 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=debugging-gradient-norms&type=experiment\n" - ] - } - ], + "outputs": [], "source": [ "from neptune_scale import Run\n", "\n", @@ -258,24 +237,15 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "See configuration parameters:\n", - "https://scale.neptune.ai/leo/pytorch-tutorial/runs/details?runIdentificationKey=debugging-gradient-norms&type=experiment&detailsTab=metadata\n" - ] - } - ], + "outputs": [], "source": [ "run.log_configs(\n", " {\n", " \"config/batch_size\": params[\"batch_size\"],\n", " \"config/epochs\": params[\"epochs\"],\n", - " \"config/lr\": params[\"lr\"]\n", + " \"config/lr\": params[\"lr\"],\n", " }\n", ")\n", "\n", @@ -302,42 +272,20 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[32], line 6\u001b[0m\n\u001b[0;32m 3\u001b[0m model\u001b[38;5;241m.\u001b[39mtrain()\n\u001b[0;32m 4\u001b[0m running_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0.0\u001b[39m\n\u001b[1;32m----> 6\u001b[0m \u001b[43m\u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mbatch_idx\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43menumerate\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mtrain_loader\u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 7\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Move data to device\u001b[39;49;00m\n\u001b[0;32m 8\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 9\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mview\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msize\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Flatten the images\u001b[39;49;00m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:708\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 706\u001b[0m \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[0;32m 707\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset() \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[1;32m--> 708\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 710\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 711\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable\n\u001b[0;32m 712\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 713\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called\n\u001b[0;32m 714\u001b[0m ):\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\dataloader.py:764\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 762\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 763\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index() \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m--> 764\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m 765\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[0;32m 766\u001b[0m data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory_device)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\utils\\data\\_utils\\fetch.py:52\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[1;34m(self, possibly_batched_index)\u001b[0m\n\u001b[0;32m 50\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__(possibly_batched_index)\n\u001b[0;32m 51\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m---> 52\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[0;32m 53\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 54\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\datasets\\mnist.py:146\u001b[0m, in \u001b[0;36mMNIST.__getitem__\u001b[1;34m(self, index)\u001b[0m\n\u001b[0;32m 143\u001b[0m img \u001b[38;5;241m=\u001b[39m Image\u001b[38;5;241m.\u001b[39mfromarray(img\u001b[38;5;241m.\u001b[39mnumpy(), mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mL\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 145\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m--> 146\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtransform\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 148\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 149\u001b[0m target \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_transform(target)\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:95\u001b[0m, in \u001b[0;36mCompose.__call__\u001b[1;34m(self, img)\u001b[0m\n\u001b[0;32m 93\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, img):\n\u001b[0;32m 94\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransforms:\n\u001b[1;32m---> 95\u001b[0m img \u001b[38;5;241m=\u001b[39m \u001b[43mt\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 96\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m img\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\transforms.py:277\u001b[0m, in \u001b[0;36mNormalize.forward\u001b[1;34m(self, tensor)\u001b[0m\n\u001b[0;32m 269\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, tensor: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m 270\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 271\u001b[0m \u001b[38;5;124;03m Args:\u001b[39;00m\n\u001b[0;32m 272\u001b[0m \u001b[38;5;124;03m tensor (Tensor): Tensor image to be normalized.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 275\u001b[0m \u001b[38;5;124;03m Tensor: Normalized Tensor image.\u001b[39;00m\n\u001b[0;32m 276\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 277\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnormalize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\functional.py:350\u001b[0m, in \u001b[0;36mnormalize\u001b[1;34m(tensor, mean, std, inplace)\u001b[0m\n\u001b[0;32m 347\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(tensor, torch\u001b[38;5;241m.\u001b[39mTensor):\n\u001b[0;32m 348\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mimg should be Tensor Image. Got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(tensor)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m--> 350\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF_t\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnormalize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmean\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmean\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstd\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minplace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[1;32mc:\\Users\\leo.breedt\\miniconda3\\envs\\neptune_scale_py_312_base\\Lib\\site-packages\\torchvision\\transforms\\_functional_tensor.py:922\u001b[0m, in \u001b[0;36mnormalize\u001b[1;34m(tensor, mean, std, inplace)\u001b[0m\n\u001b[0;32m 920\u001b[0m mean \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mas_tensor(mean, dtype\u001b[38;5;241m=\u001b[39mdtype, device\u001b[38;5;241m=\u001b[39mtensor\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[0;32m 921\u001b[0m std \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mas_tensor(std, dtype\u001b[38;5;241m=\u001b[39mdtype, device\u001b[38;5;241m=\u001b[39mtensor\u001b[38;5;241m.\u001b[39mdevice)\n\u001b[1;32m--> 922\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[43m(\u001b[49m\u001b[43mstd\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m==\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43many\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m:\n\u001b[0;32m 923\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstd evaluated to zero after conversion to \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdtype\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, leading to division by zero.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 924\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mean\u001b[38;5;241m.\u001b[39mndim \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m1\u001b[39m:\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ "step_counter = 0\n", "for epoch in range(1, params[\"epochs\"]):\n", " model.train()\n", " running_loss = 0.0\n", - " \n", + "\n", " for batch_idx, (data, target) in enumerate(train_loader):\n", " # Move data to device\n", " data, target = data.to(device), target.to(device)\n", " data = data.view(data.size(0), -1) # Flatten the images\n", - " \n", + "\n", " optimizer.zero_grad()\n", " output = model(data)\n", " loss = criterion(output, target)\n", @@ -346,7 +294,7 @@ "\n", " gradient_norms = model.get_gradient_norms()\n", " batch_loss = loss.item()\n", - " \n", + "\n", " run.log_metrics(\n", " data={\n", " \"metrics/train/loss\": batch_loss,\n", diff --git a/utils/migration_tools/from_wandb/README.md b/utils/migration_tools/from_wandb/README.md index 0fd6f03..6544294 100644 --- a/utils/migration_tools/from_wandb/README.md +++ b/utils/migration_tools/from_wandb/README.md @@ -78,4 +78,4 @@ See the License for the specific language governing permissions and limitations [docs-custom-views]: https://docs.neptune.ai/runs_table#custom-views [docs-project-access]: https://docs.neptune.ai/project_access [docs-reports]: https://docs.neptune.ai/reports/ -[docs-setup]: https://docs.neptune.ai/setup \ No newline at end of file +[docs-setup]: https://docs.neptune.ai/setup From c35c43de15773df71988d2336e17f924b3cc0634 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 10:45:47 +0200 Subject: [PATCH 110/125] fix: add packages dependencies --- .../debug-model-training-runs/debug_training_runs.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb index ea6d619..982fbd9 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb @@ -77,7 +77,7 @@ "source": [ "### Install dependencies and import libraries\n", "\n", - "This tutorial uses a datset from Hugging Face which is loaded using the `datasets` package from Hugging Face." + "This tutorial uses PyTorch and the MNIST dataset for model training. " ] }, { @@ -87,7 +87,7 @@ "outputs": [], "source": [ "# Install dependencies\n", - "! pip install -qU neptune_scale numpy" + "! pip install -qU neptune_scale numpy torch torchvision" ] }, { From b7fe196b02e32886a978adf1adefb8e679dc2be6 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 11:13:38 +0200 Subject: [PATCH 111/125] refactor: update notebooks --- .../{ => notebooks}/debug_training_runs.ipynb | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) rename how-to-guides/debug-model-training-runs/{ => notebooks}/debug_training_runs.ipynb (98%) diff --git a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb similarity index 98% rename from how-to-guides/debug-model-training-runs/debug_training_runs.ipynb rename to how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index 982fbd9..6302598 100644 --- a/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Debug Model Training Runs with Neptune\n", + "# Log and Visualize Debugging Metrics in Neptune\n", "\n", " \n", " \"Open \n", @@ -87,7 +87,7 @@ "outputs": [], "source": [ "# Install dependencies\n", - "! pip install -qU neptune_scale numpy torch torchvision" + "! pip install -qU neptune_scale torch torchvision" ] }, { @@ -116,14 +116,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Pure PyTorch\n", "import torch\n", "import torch.nn as nn\n", "import torch.optim as optim\n", "from torch.utils.data import DataLoader\n", "from torchvision import datasets, transforms\n", - "import time\n", - "\n", "\n", "class SimpleModel(nn.Module):\n", " def __init__(self, input_size: int = 784, hidden_size: int = 128, output_size: int = 10):\n", From ebc9aee95101391c36b8c2c3ab1de24b676efef7 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 11:13:59 +0200 Subject: [PATCH 112/125] feat: add script version of tutorial --- .../scripts/debug_training_runs.py | 134 ++++++++++++++++++ .../scripts/requirements.txt | 3 + .../scripts/run_examples.sh | 7 + 3 files changed, 144 insertions(+) create mode 100644 how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py create mode 100644 how-to-guides/debug-model-training-runs/scripts/requirements.txt create mode 100644 how-to-guides/debug-model-training-runs/scripts/run_examples.sh diff --git a/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py b/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py new file mode 100644 index 0000000..17fb199 --- /dev/null +++ b/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py @@ -0,0 +1,134 @@ +import torch +import torch.nn as nn +import torch.optim as optim +from torch.utils.data import DataLoader +from torchvision import datasets, transforms +from neptune_scale import Run + +class SimpleModel(nn.Module): + def __init__(self, input_size: int = 784, hidden_size: int = 128, output_size: int = 10): + super().__init__() + + layers = [] + layers.append(nn.Linear(input_size, hidden_size)) + layers.append(nn.ReLU()) + + for _ in range(18): + layers.append(nn.Linear(hidden_size, hidden_size)) + layers.append(nn.ReLU()) + + layers.append(nn.Linear(hidden_size, output_size)) + + # Combine all layers into a sequential model + self.model = nn.Sequential(*layers) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return self.model(x) + + def get_gradient_norms(self) -> dict: + """ + Calculate the L2 norm of gradients for each layer. + + Returns: + dict: Dictionary containing gradient norms for each layer + """ + gradient_norms = {} + + # Iterate through all named parameters + for name, param in self.named_parameters(): + if param.grad is not None: + # Calculate L2 norm of gradients + norm = param.grad.norm(2).item() + # Store in dictionary with a descriptive key + gradient_norms[f"debug/L2_grad_norm/{name}"] = norm + + return gradient_norms + +def main(): + + # Training parameters + params = { + "batch_size": 512, + "epochs": 10, + "lr": 0.001, + "num_layers": 20, # Configurable number of layers + } + + # Data transformations + transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]) + + # Load MNIST dataset + train_dataset = datasets.MNIST("./data", train=True, download=True, transform=transform) + # test_dataset = datasets.MNIST('./data', train=False, transform=transform) + + train_loader = DataLoader(train_dataset, batch_size=params["batch_size"], shuffle=True) + # test_loader = DataLoader(test_dataset, batch_size=batch_size) + + device = "cuda" if torch.cuda.is_available() else "cpu" + # Initialize model, loss function, and optimizer + model = SimpleModel().to(device) + criterion = nn.CrossEntropyLoss() + optimizer = optim.Adam(model.parameters(), lr=params["lr"]) + + + # Step 1: Initialize Neptune Run object + run = Run( + experiment_name="debugging-gradient-norms", # Create a run that is the head of an experiment. + ) + + print(run.get_experiment_url()) + + # Step 2: Log configuration parameters + run.log_configs( + { + "config/batch_size": params["batch_size"], + "config/epochs": params["epochs"], + "config/lr": params["lr"], + } + ) + + run.add_tags(tags=["debug", "gradient-norm"]) + + print(f"See configuration parameters:\n{run.get_experiment_url() + '&detailsTab=metadata'}") + + # Step 3: Track gradient norms during training + step_counter = 0 + for epoch in range(1, params["epochs"]): + model.train() + running_loss = 0.0 + + for batch_idx, (data, target) in enumerate(train_loader): + # Move data to device + data, target = data.to(device), target.to(device) + data = data.view(data.size(0), -1) # Flatten the images + + optimizer.zero_grad() + output = model(data) + loss = criterion(output, target) + loss.backward() + optimizer.step() + + gradient_norms = model.get_gradient_norms() + batch_loss = loss.item() + + run.log_metrics( + data={ + "metrics/train/loss": batch_loss, + "epoch": epoch, + **gradient_norms, + }, + step=step_counter, + ) + step_counter += 1 + + run.close() + + # Step 4: Analyze training behavior + # While your model trains, use Neptune's web interface to monitor and analyze metrics in near real-time: + # 1. Real-time metric visualization + # 2. Advanced metric filtering + # 3. Create custom charts and dashboards + # 4. Dynamic metric analysis + +if __name__ == "__main__": + main() diff --git a/how-to-guides/debug-model-training-runs/scripts/requirements.txt b/how-to-guides/debug-model-training-runs/scripts/requirements.txt new file mode 100644 index 0000000..01c3673 --- /dev/null +++ b/how-to-guides/debug-model-training-runs/scripts/requirements.txt @@ -0,0 +1,3 @@ +neptune_scale +torch +torchvision diff --git a/how-to-guides/debug-model-training-runs/scripts/run_examples.sh b/how-to-guides/debug-model-training-runs/scripts/run_examples.sh new file mode 100644 index 0000000..aa15b05 --- /dev/null +++ b/how-to-guides/debug-model-training-runs/scripts/run_examples.sh @@ -0,0 +1,7 @@ +set -e + +echo "Installing requirements..." +pip install -Uq -r requirements.txt + +echo "Running debug_training_runs.py..." +python debug_training_runs.py From 75e88ad4a87455ca9471f7ce40974b032068d24f Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 11:16:26 +0200 Subject: [PATCH 113/125] chore: Add tests to GH workflows --- .github/workflows/test-notebooks.yml | 2 +- .github/workflows/test-scripts.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test-notebooks.yml b/.github/workflows/test-notebooks.yml index 9207d9b..beb7391 100644 --- a/.github/workflows/test-notebooks.yml +++ b/.github/workflows/test-notebooks.yml @@ -20,8 +20,8 @@ jobs: matrix: python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] notebooks: # Add in alphabetical order + - how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb - how-to-guides/hpo/notebooks/Neptune_HPO.ipynb - - how-to-guides/debug-model-training-runs/debug_training_runs.ipynb - how-to-guides/quickstart/notebooks/neptune_quickstart.ipynb os: ["${{ inputs.os }}"] steps: diff --git a/.github/workflows/test-scripts.yml b/.github/workflows/test-scripts.yml index 7a9344b..c30308a 100644 --- a/.github/workflows/test-scripts.yml +++ b/.github/workflows/test-scripts.yml @@ -20,6 +20,7 @@ jobs: matrix: python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] scripts: # Add in alphabetical order + - how-to-guides/debug-model-training-runs/scripts - how-to-guides/hpo/scripts - how-to-guides/quickstart/scripts os: ["${{ inputs.os }}"] From f0db1c2a95ed7ab02b2153af638cb2a58dec12da Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 11:22:17 +0200 Subject: [PATCH 114/125] fix: constrain numpy versions <2 --- .../notebooks/debug_training_runs.ipynb | 2 +- .../debug-model-training-runs/scripts/requirements.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index 6302598..b6e8dc4 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -87,7 +87,7 @@ "outputs": [], "source": [ "# Install dependencies\n", - "! pip install -qU neptune_scale torch torchvision" + "! pip install -qU neptune_scale torch torchvision \"numpy<2\"" ] }, { diff --git a/how-to-guides/debug-model-training-runs/scripts/requirements.txt b/how-to-guides/debug-model-training-runs/scripts/requirements.txt index 01c3673..531d023 100644 --- a/how-to-guides/debug-model-training-runs/scripts/requirements.txt +++ b/how-to-guides/debug-model-training-runs/scripts/requirements.txt @@ -1,3 +1,4 @@ neptune_scale torch torchvision +numpy<2 \ No newline at end of file From d3c820060176b46297037a83d52a12ece93dfd21 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 11:57:37 +0200 Subject: [PATCH 115/125] chore: update links for colab and GH --- .../notebooks/debug_training_runs.ipynb | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index b6e8dc4..1701ead 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -6,10 +6,10 @@ "source": [ "# Log and Visualize Debugging Metrics in Neptune\n", "\n", - " \n", + " \n", " \"Open \n", "\n", - "\n", + "\n", " \"Open\n", "\n", "\n", @@ -348,12 +348,7 @@ "_Example report:_\n", "\n", " \"Explore\n", - "\n", - "\n", - "---\n", - "\n", - "**Additional resources:**\n", - "- [PyTorch Layer-wise Tracking Package](TODO:Link to integration for tracking layer-wise metrics)\n" + "\n" ] } ], From 4925f6956073f2e0c7dc5691e4e8c01f045fb946 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 14:48:52 +0200 Subject: [PATCH 116/125] chore: minor tweaks and update links to project --- .../notebooks/debug_training_runs.ipynb | 14 ++++++++------ .../scripts/debug_training_runs.py | 12 ++++++++---- .../scripts/requirements.txt | 4 ++-- 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index 1701ead..d1c7f95 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -32,7 +32,7 @@ "3. Analyze the metrics in Neptune's UI to **debug training issues**\n", "\n", "Step through a pre-configured report:\n", - "\n", + "\n", " \"Explore\n", "\n", "\n", @@ -122,6 +122,7 @@ "from torch.utils.data import DataLoader\n", "from torchvision import datasets, transforms\n", "\n", + "\n", "class SimpleModel(nn.Module):\n", " def __init__(self, input_size: int = 784, hidden_size: int = 128, output_size: int = 10):\n", " super().__init__()\n", @@ -219,7 +220,8 @@ "from neptune_scale import Run\n", "\n", "run = Run(\n", - " experiment_name=\"debugging-gradient-norms\", # Create a run that is the head of an experiment.\n", + " experiment_name=\"debugging-gradient-norms\",\n", + " project=\"examples/debug-training-metrics\", # Create a run that is the head of an experiment.\n", ")\n", "\n", "print(run.get_experiment_url())" @@ -319,7 +321,7 @@ "\n", "**2. Advanced metric filtering**\n", "- Focus on specific metrics using [advanced regex search](https://docs.neptune.ai/charts#filtering-charts)\n", - "- Example: `norm & layer_\\d` filter all gradient norms layers\n", + "- Example: `norm | \\d.weight` filter all gradient norms layers\n", "- Perfect for isolating problematic layers or components\n", "\n", "**3. Create custom dashboards**\n", @@ -332,10 +334,10 @@ "- Group related metrics (e.g., all layer gradients)\n", "- Create automatically updating charts by [selecting metrics dynamically](https://docs.neptune.ai/chart_widget/#dynamic-metric-selection)\n", "- Quickly identify vanishing/exploding gradients\n", - "- Example query: `layer_\\d`\n", + "- Example query: `\\d.weight$`\n", "\n", "_Example dashboard:_\n", - "\n", + "\n", " \"Explore\n", "\n", "\n", @@ -346,7 +348,7 @@ "- Share insights with team members\n", "\n", "_Example report:_\n", - "\n", + "\n", " \"Explore\n", "\n" ] diff --git a/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py b/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py index 17fb199..2703fb1 100644 --- a/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py +++ b/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py @@ -1,9 +1,10 @@ import torch import torch.nn as nn import torch.optim as optim +from neptune_scale import Run from torch.utils.data import DataLoader from torchvision import datasets, transforms -from neptune_scale import Run + class SimpleModel(nn.Module): def __init__(self, input_size: int = 784, hidden_size: int = 128, output_size: int = 10): @@ -44,6 +45,7 @@ def get_gradient_norms(self) -> dict: return gradient_norms + def main(): # Training parameters @@ -55,7 +57,9 @@ def main(): } # Data transformations - transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]) + transform = transforms.Compose( + [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] + ) # Load MNIST dataset train_dataset = datasets.MNIST("./data", train=True, download=True, transform=transform) @@ -70,7 +74,6 @@ def main(): criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=params["lr"]) - # Step 1: Initialize Neptune Run object run = Run( experiment_name="debugging-gradient-norms", # Create a run that is the head of an experiment. @@ -85,7 +88,7 @@ def main(): "config/epochs": params["epochs"], "config/lr": params["lr"], } - ) + ) run.add_tags(tags=["debug", "gradient-norm"]) @@ -130,5 +133,6 @@ def main(): # 3. Create custom charts and dashboards # 4. Dynamic metric analysis + if __name__ == "__main__": main() diff --git a/how-to-guides/debug-model-training-runs/scripts/requirements.txt b/how-to-guides/debug-model-training-runs/scripts/requirements.txt index 531d023..eaa2f7d 100644 --- a/how-to-guides/debug-model-training-runs/scripts/requirements.txt +++ b/how-to-guides/debug-model-training-runs/scripts/requirements.txt @@ -1,4 +1,4 @@ neptune_scale -torch +numpy<2 +torch torchvision -numpy<2 \ No newline at end of file From 39aaa8116ff305aa1a4b03c4710d1a12f05b62f2 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 14:52:52 +0200 Subject: [PATCH 117/125] chore: readme links --- README.md | 4 ++-- .../notebooks/debug_training_runs.ipynb | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ff95c3b..c1434e2 100644 --- a/README.md +++ b/README.md @@ -45,7 +45,7 @@ This repo contains tutorials and examples of how to use Neptune. [qs-notebook]: how-to-guides/quickstart/notebooks/neptune_quickstart.ipynb [qs-colab]: https://colab.research.google.com/github/neptune-ai/scale-examples/blob/master/how-to-guides/quickstart/notebooks/neptune_quickstart.ipynb [debug-notebook]: how-to-guides/debug-model-training-runs/debug_training_runs.ipynb -[debug-colab]: https://colab.research.google.com/github/neptune-ai/scale-examples/blob/master/how-to-guides/debug-model-training-runs/debug_training_runs.ipynb +[debug-colab]: https://colab.research.google.com/github/neptune-ai/scale-examples/blob/master/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb [blog]: https://neptune.ai/blog @@ -62,7 +62,7 @@ This repo contains tutorials and examples of how to use Neptune. [runs-table]: https://docs.neptune.ai/runs_table [runs-table-example]: https://scale.neptune.ai/o/examples/org/LLM-Pretraining/runs/table?viewId=9e746462-f045-4ff2-9ac4-e41fa349b04d&detailsTab=dashboard&dash=table&type=run&compare=auto-5 [debug]: TODO - Add link to docs -[debug-example]: TODO - Add link to final Neptune project "examples/debug-training-runs" +[debug-example]: https://scale.neptune.ai/o/examples/org/debug-training-metrics/runs/table?viewId=standard-view&dash=table&compareChartsFilter-compound=udzSoRe3VmlvolZ8TbuB_zvfcAcgJmla8UuNku1rGWdg [docs-icon]: https://neptune.ai/wp-content/uploads/2023/06/file_icon.svg "Read the documentation" diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index d1c7f95..28efa27 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -6,7 +6,7 @@ "source": [ "# Log and Visualize Debugging Metrics in Neptune\n", "\n", - " \n", + " \n", " \"Open \n", "\n", "\n", From d9d6e773cec7b87eef53ef4d62ed9097c6addd65 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Mon, 2 Jun 2025 15:10:17 +0200 Subject: [PATCH 118/125] small fixes --- .../notebooks/debug_training_runs.ipynb | 6 ++---- .../scripts/debug_training_runs.py | 3 +-- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index 28efa27..f160fa2 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -220,8 +220,7 @@ "from neptune_scale import Run\n", "\n", "run = Run(\n", - " experiment_name=\"debugging-gradient-norms\",\n", - " project=\"examples/debug-training-metrics\", # Create a run that is the head of an experiment.\n", + " experiment_name=\"debugging-gradient-norms\", # Create a run that is the head of an experiment.\n", ")\n", "\n", "print(run.get_experiment_url())" @@ -276,9 +275,8 @@ "outputs": [], "source": [ "step_counter = 0\n", - "for epoch in range(1, params[\"epochs\"]):\n", + "for epoch in range(params[\"epochs\"]):\n", " model.train()\n", - " running_loss = 0.0\n", "\n", " for batch_idx, (data, target) in enumerate(train_loader):\n", " # Move data to device\n", diff --git a/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py b/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py index 2703fb1..f918807 100644 --- a/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py +++ b/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py @@ -96,9 +96,8 @@ def main(): # Step 3: Track gradient norms during training step_counter = 0 - for epoch in range(1, params["epochs"]): + for epoch in range(params["epochs"]): model.train() - running_loss = 0.0 for batch_idx, (data, target) in enumerate(train_loader): # Move data to device From 0af184c1222f29b53d83fb9c780615951745590d Mon Sep 17 00:00:00 2001 From: Leo Breedt <101509998+LeoRoccoBreedt@users.noreply.github.com> Date: Fri, 6 Jun 2025 11:31:10 +0200 Subject: [PATCH 119/125] Apply suggestions from TW's review Co-authored-by: Edyta <142720610+szaganek@users.noreply.github.com> Signed-off-by: Leo Breedt <101509998+LeoRoccoBreedt@users.noreply.github.com> --- README.md | 2 +- .../notebooks/debug_training_runs.ipynb | 56 +++++++++---------- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index c1434e2..dfbcd11 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ This repo contains tutorials and examples of how to use Neptune. [resume-run]: https://docs.neptune.ai/resume_run [runs-table]: https://docs.neptune.ai/runs_table [runs-table-example]: https://scale.neptune.ai/o/examples/org/LLM-Pretraining/runs/table?viewId=9e746462-f045-4ff2-9ac4-e41fa349b04d&detailsTab=dashboard&dash=table&type=run&compare=auto-5 -[debug]: TODO - Add link to docs +[debug]: https://docs.neptune.ai/debug_runs_tutorial [debug-example]: https://scale.neptune.ai/o/examples/org/debug-training-metrics/runs/table?viewId=standard-view&dash=table&compareChartsFilter-compound=udzSoRe3VmlvolZ8TbuB_zvfcAcgJmla8UuNku1rGWdg diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index f160fa2..f1bef09 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Log and Visualize Debugging Metrics in Neptune\n", + "# Log and visualize debugging metrics in Neptune\n", "\n", " \n", " \"Open \n", @@ -24,7 +24,7 @@ "## Introduction\n", "Training large models requires careful monitoring of layer-wise metrics to catch issues early. \n", "\n", - "Neptune makes it easy to track and visualize metrics like gradient norms across all layers of your model - helping you identify problems like vanishing/exploding gradients quickly.\n", + "Neptune makes it easy to track and visualize metrics like gradient norms across all layers of your model – and helps you quickly identify problems such as vanishing or exploding gradients.\n", "\n", "In this tutorial, you'll learn how to:\n", "1. **Initialize Neptune** and **log configuration parameters**\n", @@ -94,18 +94,18 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Setup training simulation\n", + "### Set up training simulation\n", "\n", "This tutorial uses a simple PyTorch model trained on the MNIST dataset to demonstrate Neptune's debugging capabilities. The training code tracks real training metrics including:\n", - "- Loss values from the CrossEntropyLoss function\n", + "- Loss values from the `CrossEntropyLoss` function\n", "- Gradient norms for each layer to monitor optimization\n", "\n", - "In the next cell, we'll:\n", - "1. Import PyTorch libraries and utilities\n", - "2. Create a SimpleModel class with 20 layers\n", - "3. Add gradient norm tracking to identify potential issues during training\n", + "In the next cell, we will:\n", + "1. Import PyTorch libraries and utilities.\n", + "2. Create a SimpleModel class with 20 layers.\n", + "3. Add gradient norm tracking to identify potential issues during training.\n", "\n", - "The setup is not optimized for the best model, but for illustration of using Neptune for debugging the training runs that we create.\n", + "Note that this setup is not optimized for the best model, but for illustrating how to use Neptune for debugging the training runs that we create.\n", "\n", "_You can replace this simulation with your actual model training code._" ] @@ -192,21 +192,21 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Debug model training run with Neptune\n", + "## Debug model training runs with Neptune\n", "\n", - "In this section, we'll walk through 4 key steps to effectively debug your training runs:\n", + "In this section, we'll walk through four key steps to effectively debug your training runs:\n", "\n", - "1. **Initialize Neptune** - set up the Neptune environment to track your training metrics\n", - "2. **Log configuration parameters** - record your model's hyperparameters and setup\n", - "3. **Track layer-wise metrics** - monitor gradient norms across all model layers\n", - "4. **Analyze training behavior** - Use Neptune's visualization tools to identify and diagnose training issues" + "1. **Initialize Neptune** – set up the Neptune environment to track your training metrics.\n", + "2. **Log configuration parameters** – record your model's hyperparameters and setup.\n", + "3. **Track layer-wise metrics** – monitor gradient norms across all model layers.\n", + "4. **Analyze training behavior** – Use Neptune's visualization tools to identify and diagnose training issues." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 1: _Initialize Neptune Run object_\n", + "### Step 1: Initialize Neptune Run object\n", "\n", "The `Run` object is used to log configuration parameters and metrics. " ] @@ -220,7 +220,7 @@ "from neptune_scale import Run\n", "\n", "run = Run(\n", - " experiment_name=\"debugging-gradient-norms\", # Create a run that is the head of an experiment.\n", + " experiment_name=\"debugging-gradient-norms\", # Create a run that is the head of an experiment\n", ")\n", "\n", "print(run.get_experiment_url())" @@ -230,7 +230,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 2: _Log configuration parameters_" + "### Step 2: Log configuration parameters" ] }, { @@ -256,12 +256,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 3: _Track gradient norms during training_\n", + "### Step 3: Track gradient norms during training\n", "\n", - "In this training loop, we:\n", + "In this training loop, we will:\n", "1. Calculate `loss` and ` L2 gradient norms` from the `named_parameters`.\n", - "2. Track gradient norms during training to identify potential issues like vanishing/exploding gradients in a dictionary called `gradient_norms`\n", - "3. Log the gradient norms to Neptune for visualization and analysis using the `log_metrics` method\n", + "2. Track gradient norms during training to identify potential issues like vanishing or exploding gradients in a dictionary called `gradient_norms`.\n", + "3. Use the `log_metrics` method to log the gradient norms to Neptune for visualization and analysis.\n", "\n", "This approach allows you to monitor the learning dynamics across your entire model architecture in near real-time.\n", "\n", @@ -309,29 +309,29 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 4: _Analyze training behavior_\n", + "### Step 4: Analyze training behavior\n", "While your model trains, use Neptune's web interface to monitor and analyze metrics in near real-time:\n", "\n", "**1. Real-time metric visualization**\n", - "- Navigate to the _Charts_ tab to view live training metrics\n", + "- To view live training, navigate to the _Charts_ tab \n", "- Monitor multiple metrics simultaneously\n", "- Track training progress in real-time\n", "\n", "**2. Advanced metric filtering**\n", "- Focus on specific metrics using [advanced regex search](https://docs.neptune.ai/charts#filtering-charts)\n", - "- Example: `norm | \\d.weight` filter all gradient norms layers\n", + "- Example: `norm | \\d.weight` filters all gradient norms layers\n", "- Perfect for isolating problematic layers or components\n", "\n", "**3. Create custom dashboards**\n", "- Save filtered metrics to a [custom dashboard](https://docs.neptune.ai/custom_dashboard) for continuous monitoring\n", "- Automatically updates during training\n", - "- Share with team members\n", + "- You can share dashboards with team members\n", "- Ideal for tracking known problematic layers\n", "\n", "**4. Dynamic metric analysis**\n", - "- Group related metrics (e.g., all layer gradients)\n", + "- Group related metric, for example all layer gradients\n", "- Create automatically updating charts by [selecting metrics dynamically](https://docs.neptune.ai/chart_widget/#dynamic-metric-selection)\n", - "- Quickly identify vanishing/exploding gradients\n", + "- Quickly identify vanishing or exploding gradients\n", "- Example query: `\\d.weight$`\n", "\n", "_Example dashboard:_\n", From a1ef5e8b178069f4a636cac087cdaf9c0d4161ee Mon Sep 17 00:00:00 2001 From: Leo Breedt <101509998+LeoRoccoBreedt@users.noreply.github.com> Date: Fri, 6 Jun 2025 16:52:53 +0200 Subject: [PATCH 120/125] Apply suggestions from TW's review Co-authored-by: Edyta <142720610+szaganek@users.noreply.github.com> Signed-off-by: Leo Breedt <101509998+LeoRoccoBreedt@users.noreply.github.com> --- .../notebooks/debug_training_runs.ipynb | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index f1bef09..af55b00 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -27,11 +27,11 @@ "Neptune makes it easy to track and visualize metrics like gradient norms across all layers of your model – and helps you quickly identify problems such as vanishing or exploding gradients.\n", "\n", "In this tutorial, you'll learn how to:\n", - "1. **Initialize Neptune** and **log configuration parameters**\n", - "2. Track **layer-wise gradient norms** during training \n", - "3. Analyze the metrics in Neptune's UI to **debug training issues**\n", + "1. **Initialize Neptune** and **log configuration parameters**.\n", + "2. Track **layer-wise gradient norms** during training.\n", + "3. Analyze the metrics in Neptune's UI to **debug training issues**.\n", "\n", - "Step through a pre-configured report:\n", + "See a pre-configured report in the Neptune app:\n", "\n", " \"Explore\n", "\n", @@ -102,7 +102,7 @@ "\n", "In the next cell, we will:\n", "1. Import PyTorch libraries and utilities.\n", - "2. Create a SimpleModel class with 20 layers.\n", + "2. Create a `SimpleModel` class with 20 layers.\n", "3. Add gradient norm tracking to identify potential issues during training.\n", "\n", "Note that this setup is not optimized for the best model, but for illustrating how to use Neptune for debugging the training runs that we create.\n", @@ -208,7 +208,7 @@ "source": [ "### Step 1: Initialize Neptune Run object\n", "\n", - "The `Run` object is used to log configuration parameters and metrics. " + "Use the `Run` object to log configuration parameters and metrics. " ] }, { @@ -259,7 +259,7 @@ "### Step 3: Track gradient norms during training\n", "\n", "In this training loop, we will:\n", - "1. Calculate `loss` and ` L2 gradient norms` from the `named_parameters`.\n", + "1. Calculate `loss` and `L2 gradient norms` from the `named_parameters`.\n", "2. Track gradient norms during training to identify potential issues like vanishing or exploding gradients in a dictionary called `gradient_norms`.\n", "3. Use the `log_metrics` method to log the gradient norms to Neptune for visualization and analysis.\n", "\n", From f3bae1a014a33b68582c3b80da9b8054da4df9dc Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Fri, 6 Jun 2025 17:19:43 +0200 Subject: [PATCH 121/125] refactor: updates from TW and sourcery review --- .../notebooks/debug_training_runs.ipynb | 18 ++++++++++-------- .../scripts/debug_training_runs.py | 18 ++++++++++-------- 2 files changed, 20 insertions(+), 16 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index af55b00..522b2f6 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -124,14 +124,18 @@ "\n", "\n", "class SimpleModel(nn.Module):\n", - " def __init__(self, input_size: int = 784, hidden_size: int = 128, output_size: int = 10):\n", + " def __init__(\n", + " self,\n", + " input_size: int = 784,\n", + " hidden_size: int = 128,\n", + " output_size: int = 10,\n", + " num_layers: int = 10,\n", + " ):\n", " super().__init__()\n", "\n", - " layers = []\n", - " layers.append(nn.Linear(input_size, hidden_size))\n", - " layers.append(nn.ReLU())\n", + " layers = [nn.Linear(input_size, hidden_size), nn.ReLU()]\n", "\n", - " for _ in range(18):\n", + " for _ in range(num_layers):\n", " layers.append(nn.Linear(hidden_size, hidden_size))\n", " layers.append(nn.ReLU())\n", "\n", @@ -176,14 +180,12 @@ "\n", "# Load MNIST dataset\n", "train_dataset = datasets.MNIST(\"./data\", train=True, download=True, transform=transform)\n", - "# test_dataset = datasets.MNIST('./data', train=False, transform=transform)\n", "\n", "train_loader = DataLoader(train_dataset, batch_size=params[\"batch_size\"], shuffle=True)\n", - "# test_loader = DataLoader(test_dataset, batch_size=batch_size)\n", "\n", "device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n", "# Initialize model, loss function, and optimizer\n", - "model = SimpleModel().to(device)\n", + "model = SimpleModel(num_layers=params[\"num_layers\"]).to(device)\n", "criterion = nn.CrossEntropyLoss()\n", "optimizer = optim.Adam(model.parameters(), lr=params[\"lr\"])" ] diff --git a/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py b/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py index f918807..6ed9ecd 100644 --- a/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py +++ b/how-to-guides/debug-model-training-runs/scripts/debug_training_runs.py @@ -7,14 +7,18 @@ class SimpleModel(nn.Module): - def __init__(self, input_size: int = 784, hidden_size: int = 128, output_size: int = 10): + def __init__( + self, + input_size: int = 784, + hidden_size: int = 128, + output_size: int = 10, + num_layers: int = 10, + ): super().__init__() - layers = [] - layers.append(nn.Linear(input_size, hidden_size)) - layers.append(nn.ReLU()) + layers = [nn.Linear(input_size, hidden_size), nn.ReLU()] - for _ in range(18): + for _ in range(num_layers): layers.append(nn.Linear(hidden_size, hidden_size)) layers.append(nn.ReLU()) @@ -63,14 +67,12 @@ def main(): # Load MNIST dataset train_dataset = datasets.MNIST("./data", train=True, download=True, transform=transform) - # test_dataset = datasets.MNIST('./data', train=False, transform=transform) train_loader = DataLoader(train_dataset, batch_size=params["batch_size"], shuffle=True) - # test_loader = DataLoader(test_dataset, batch_size=batch_size) device = "cuda" if torch.cuda.is_available() else "cpu" # Initialize model, loss function, and optimizer - model = SimpleModel().to(device) + model = SimpleModel(num_layers=params["num_layers"]).to(device) criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=params["lr"]) From 6c828f239d270a41606d5cb3367fa8ba7567b73a Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Tue, 24 Jun 2025 16:18:14 +0200 Subject: [PATCH 122/125] refactor: update notebook workflow and links from TW review --- .../notebooks/debug_training_runs.ipynb | 22 ++++++------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index 522b2f6..4f9218b 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -14,6 +14,9 @@ "\n", "\n", " \"View\n", + "\n", + "\n", + " \"Explore\n", "" ] }, @@ -31,10 +34,7 @@ "2. Track **layer-wise gradient norms** during training.\n", "3. Analyze the metrics in Neptune's UI to **debug training issues**.\n", "\n", - "See a pre-configured report in the Neptune app:\n", - "\n", - " \"Explore\n", - "\n", + "See a [pre-configured report](https://scale.neptune.ai/examples/debug-training-metrics/reports/Analyze-debugging-metrics-9f0f017e-c95a-4347-8bd4-2c50120f8315) in the Neptune app.\n", "\n", "_Note: This is a code recipe that you can adapt for your own model training needs._" ] @@ -223,9 +223,7 @@ "\n", "run = Run(\n", " experiment_name=\"debugging-gradient-norms\", # Create a run that is the head of an experiment\n", - ")\n", - "\n", - "print(run.get_experiment_url())" + ")" ] }, { @@ -336,10 +334,7 @@ "- Quickly identify vanishing or exploding gradients\n", "- Example query: `\\d.weight$`\n", "\n", - "_Example dashboard:_\n", - "\n", - " \"Explore\n", - "\n", + "[See an example dashboard](https://scale.neptune.ai/o/examples/org/debug-training-metrics/runs/compare?viewId=standard-view&dash=dashboard&dashboardId=9f0f002f-1852-448b-813d-b230de12b0b5&compare=uXbDuUl1H5I-cMeJj_n1iXy_roGF0tTL3IHVit0zU8Gs).\n", "\n", "**5. Document training insights**\n", "- Create [custom reports](https://docs.neptune.ai/reports) to track training progress\n", @@ -347,10 +342,7 @@ "- Maintain training history\n", "- Share insights with team members\n", "\n", - "_Example report:_\n", - "\n", - " \"Explore\n", - "\n" + "[See an example report](https://scale.neptune.ai/examples/debug-training-metrics/reports/Analyze-debugging-metrics-9f0f017e-c95a-4347-8bd4-2c50120f8315)." ] } ], From 6b97205ffa75327fe8ae9712b7a13ff6b18ed0da Mon Sep 17 00:00:00 2001 From: Leo Breedt <101509998+LeoRoccoBreedt@users.noreply.github.com> Date: Thu, 26 Jun 2025 10:20:26 +0200 Subject: [PATCH 123/125] Apply suggestions from code review Co-authored-by: Edyta <142720610+szaganek@users.noreply.github.com> Signed-off-by: Leo Breedt <101509998+LeoRoccoBreedt@users.noreply.github.com> --- .../notebooks/debug_training_runs.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index 4f9218b..7dcb10d 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -12,7 +12,7 @@ "\n", " \"Open\n", "\n", - "\n", + "\n", " \"View\n", "\n", "\n", @@ -318,7 +318,7 @@ "- Track training progress in real-time\n", "\n", "**2. Advanced metric filtering**\n", - "- Focus on specific metrics using [advanced regex search](https://docs.neptune.ai/charts#filtering-charts)\n", + "- Focus on specific metrics using [advanced regex search](https://docs.neptune.ai/regex)\n", "- Example: `norm | \\d.weight` filters all gradient norms layers\n", "- Perfect for isolating problematic layers or components\n", "\n", From f19dfb7ea929315cfc5cc14fa0a028ac806da3c7 Mon Sep 17 00:00:00 2001 From: LeoRoccoBreedt Date: Thu, 26 Jun 2025 10:29:44 +0200 Subject: [PATCH 124/125] refactor: updates from TW's review --- .../notebooks/debug_training_runs.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index 7dcb10d..57c3575 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -325,8 +325,8 @@ "**3. Create custom dashboards**\n", "- Save filtered metrics to a [custom dashboard](https://docs.neptune.ai/custom_dashboard) for continuous monitoring\n", "- Automatically updates during training\n", - "- You can share dashboards with team members\n", "- Ideal for tracking known problematic layers\n", + "- You can share dashboards with team members\n", "\n", "**4. Dynamic metric analysis**\n", "- Group related metric, for example all layer gradients\n", From 4abd1db95eeb242f75588efa6a1f70cacaef9bcc Mon Sep 17 00:00:00 2001 From: Leo Breedt <101509998+LeoRoccoBreedt@users.noreply.github.com> Date: Fri, 27 Jun 2025 16:34:35 +0200 Subject: [PATCH 125/125] Apply suggestions from code review Remove reference to Scale Co-authored-by: Edyta <142720610+szaganek@users.noreply.github.com> Signed-off-by: Leo Breedt <101509998+LeoRoccoBreedt@users.noreply.github.com> --- .../notebooks/debug_training_runs.ipynb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb index 57c3575..2b9b6a4 100644 --- a/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb +++ b/how-to-guides/debug-model-training-runs/notebooks/debug_training_runs.ipynb @@ -45,9 +45,9 @@ "source": [ "## Before you start\n", "\n", - " 1. Create a Neptune Scale account. [Register →](https://neptune.ai/early-access)\n", - " 2. Create a Neptune project for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune Scale docs.\n", - " 3. Install and configure Neptune Scale for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune Scale docs." + " 1. Create a Neptune account. [Register →](https://neptune.ai/early-access)\n", + " 2. Create a Neptune project for tracking metadata. For instructions, see [Projects](https://docs-beta.neptune.ai/projects/) in the Neptune docs.\n", + " 3. Install and configure Neptune for logging metadata. For instructions, see [Get started](https://docs-beta.neptune.ai/setup) in the Neptune docs." ] }, { @@ -55,7 +55,7 @@ "metadata": {}, "source": [ "### Set environment variables\n", - "Set your project name and API token as environment variables to log to your Neptune Scale project.\n", + "Set your project name and API token as environment variables to log to your Neptune project.\n", "\n", "Uncomment the code block below and replace placeholder values with your own credentials:" ]