I added a little code to include a vertical dashed line on the plot representing where total_weights = number of train observations. I also moved n_epochs as an argument to fit_model() so learners can play around with the impact of n_epochs more easily.
619 lines
66 KiB
Plaintext
619 lines
66 KiB
Plaintext
{
|
|
"nbformat": 4,
|
|
"nbformat_minor": 0,
|
|
"metadata": {
|
|
"colab": {
|
|
"provenance": [],
|
|
"gpuType": "T4"
|
|
},
|
|
"kernelspec": {
|
|
"name": "python3",
|
|
"display_name": "Python 3"
|
|
},
|
|
"language_info": {
|
|
"name": "python"
|
|
},
|
|
"accelerator": "GPU"
|
|
},
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"# **Notebook 8.3: Double Descent**\n",
|
|
"\n",
|
|
"This notebook investigates double descent as described in section 8.4 of the book.\n",
|
|
"\n",
|
|
"It uses the MNIST-1D database which can be found at https://github.com/greydanus/mnist1d\n",
|
|
"\n",
|
|
"Work through the cells below, running each cell in turn. In various places you will see the words \"TO DO\". Follow the instructions at these places and make predictions about what is going to happen or write code to complete the functions.\n",
|
|
"\n",
|
|
"Contact me at udlbookmail@gmail.com if you find any mistakes or have any suggestions."
|
|
],
|
|
"metadata": {
|
|
"id": "L6chybAVFJW2"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"# Run this if you're in a Colab to install MNIST 1D repository\n",
|
|
"!pip install git+https://github.com/greydanus/mnist1d"
|
|
],
|
|
"metadata": {
|
|
"id": "fn9BP5N5TguP",
|
|
"outputId": "3ba15b8f-2395-4b1a-8a66-8ed80e9b5138",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
}
|
|
},
|
|
"execution_count": 29,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Collecting git+https://github.com/greydanus/mnist1d\n",
|
|
" Cloning https://github.com/greydanus/mnist1d to /tmp/pip-req-build-cbhxd1j8\n",
|
|
" Running command git clone --filter=blob:none --quiet https://github.com/greydanus/mnist1d /tmp/pip-req-build-cbhxd1j8\n",
|
|
" Resolved https://github.com/greydanus/mnist1d to commit 350929d12f4c9a4b7355e0c96604e41b9239bdb4\n",
|
|
" Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
|
|
" Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
|
|
" Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
|
|
"Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from mnist1d==0.0.2.post9) (2.31.0)\n",
|
|
"Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from mnist1d==0.0.2.post9) (1.25.2)\n",
|
|
"Requirement already satisfied: matplotlib in /usr/local/lib/python3.10/dist-packages (from mnist1d==0.0.2.post9) (3.7.1)\n",
|
|
"Requirement already satisfied: scipy in /usr/local/lib/python3.10/dist-packages (from mnist1d==0.0.2.post9) (1.11.4)\n",
|
|
"Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->mnist1d==0.0.2.post9) (1.2.1)\n",
|
|
"Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib->mnist1d==0.0.2.post9) (0.12.1)\n",
|
|
"Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->mnist1d==0.0.2.post9) (4.53.0)\n",
|
|
"Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->mnist1d==0.0.2.post9) (1.4.5)\n",
|
|
"Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->mnist1d==0.0.2.post9) (24.1)\n",
|
|
"Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->mnist1d==0.0.2.post9) (9.4.0)\n",
|
|
"Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->mnist1d==0.0.2.post9) (3.1.2)\n",
|
|
"Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib->mnist1d==0.0.2.post9) (2.8.2)\n",
|
|
"Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->mnist1d==0.0.2.post9) (3.3.2)\n",
|
|
"Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->mnist1d==0.0.2.post9) (3.7)\n",
|
|
"Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->mnist1d==0.0.2.post9) (2.0.7)\n",
|
|
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->mnist1d==0.0.2.post9) (2024.6.2)\n",
|
|
"Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.7->matplotlib->mnist1d==0.0.2.post9) (1.16.0)\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"import torch, torch.nn as nn\n",
|
|
"from torch.utils.data import TensorDataset, DataLoader\n",
|
|
"from torch.optim.lr_scheduler import StepLR\n",
|
|
"import numpy as np\n",
|
|
"import matplotlib.pyplot as plt\n",
|
|
"import mnist1d\n",
|
|
"import random\n",
|
|
"random.seed(0)\n",
|
|
"\n",
|
|
"# Try attaching to GPU -- Use \"Change Runtime Type to change to GPUT\"\n",
|
|
"DEVICE = str(torch.device('cuda' if torch.cuda.is_available() else 'cpu'))\n",
|
|
"print('Using:', DEVICE)"
|
|
],
|
|
"metadata": {
|
|
"id": "hFxuHpRqTgri",
|
|
"outputId": "2a246d1e-8568-4605-df0a-28dad08935ec",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
}
|
|
},
|
|
"execution_count": 30,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Using: cuda\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"args = mnist1d.data.get_dataset_args()\n",
|
|
"args.num_samples = 8000\n",
|
|
"args.train_split = 0.5\n",
|
|
"args.corr_noise_scale = 0.25\n",
|
|
"args.iid_noise_scale=2e-2\n",
|
|
"data = mnist1d.data.get_dataset(args, path='./mnist1d_data.pkl', download=False, regenerate=True)\n",
|
|
"\n",
|
|
"# Add 15% noise to training labels\n",
|
|
"for c_y in range(len(data['y'])):\n",
|
|
" random_number = random.random()\n",
|
|
" if random_number < 0.15 :\n",
|
|
" random_int = int(random.random() * 10)\n",
|
|
" data['y'][c_y] = random_int\n",
|
|
"\n",
|
|
"# The training and test input and outputs are in\n",
|
|
"# data['x'], data['y'], data['x_test'], and data['y_test']\n",
|
|
"print(\"Examples in training set: {}\".format(len(data['y'])))\n",
|
|
"print(\"Examples in test set: {}\".format(len(data['y_test'])))\n",
|
|
"print(\"Dimensionality of each example: {}\".format(data['x'].shape[-1]))"
|
|
],
|
|
"metadata": {
|
|
"id": "PW2gyXL5UkLU",
|
|
"outputId": "a4186965-3025-4d00-b780-441baab421de",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
}
|
|
},
|
|
"execution_count": 31,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Did or could not load data from ./mnist1d_data.pkl. Rebuilding dataset...\n",
|
|
"Examples in training set: 4000\n",
|
|
"Examples in test set: 4000\n",
|
|
"Dimensionality of each example: 40\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"# Initialize the parameters with He initialization\n",
|
|
"def weights_init(layer_in):\n",
|
|
" if isinstance(layer_in, nn.Linear):\n",
|
|
" nn.init.kaiming_uniform_(layer_in.weight)\n",
|
|
" layer_in.bias.data.fill_(0.0)\n",
|
|
"\n",
|
|
"# Return an initialized model with two hidden layers and n_hidden hidden units at each\n",
|
|
"def get_model(n_hidden):\n",
|
|
"\n",
|
|
" D_i = 40 # Input dimensions\n",
|
|
" D_k = n_hidden # Hidden dimensions\n",
|
|
" D_o = 10 # Output dimensions\n",
|
|
"\n",
|
|
" # Define a model with two hidden layers\n",
|
|
" # And ReLU activations between them\n",
|
|
" model = nn.Sequential(\n",
|
|
" nn.Linear(D_i, D_k),\n",
|
|
" nn.ReLU(),\n",
|
|
" nn.Linear(D_k, D_k),\n",
|
|
" nn.ReLU(),\n",
|
|
" nn.Linear(D_k, D_o))\n",
|
|
"\n",
|
|
" # Call the function you just defined\n",
|
|
" model.apply(weights_init)\n",
|
|
"\n",
|
|
" # Return the model\n",
|
|
" return model ;"
|
|
],
|
|
"metadata": {
|
|
"id": "hAIvZOAlTnk9"
|
|
},
|
|
"execution_count": 32,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"def fit_model(model, data, n_epoch):\n",
|
|
"\n",
|
|
" # choose cross entropy loss function (equation 5.24)\n",
|
|
" loss_function = torch.nn.CrossEntropyLoss()\n",
|
|
" # construct SGD optimizer and initialize learning rate and momentum\n",
|
|
" # optimizer = torch.optim.Adam(model.parameters(), lr=0.01)\n",
|
|
" optimizer = torch.optim.SGD(model.parameters(), lr = 0.01, momentum=0.9)\n",
|
|
"\n",
|
|
"\n",
|
|
" x_train = torch.tensor(data['x'].astype('float32'))\n",
|
|
" y_train = torch.tensor(data['y'].transpose().astype('long'))\n",
|
|
" x_test= torch.tensor(data['x_test'].astype('float32'))\n",
|
|
" y_test = torch.tensor(data['y_test'].astype('long'))\n",
|
|
"\n",
|
|
" # load the data into a class that creates the batches\n",
|
|
" data_loader = DataLoader(TensorDataset(x_train,y_train), batch_size=100, shuffle=True, worker_init_fn=np.random.seed(1))\n",
|
|
"\n",
|
|
" for epoch in range(n_epoch):\n",
|
|
" # loop over batches\n",
|
|
" for i, batch in enumerate(data_loader):\n",
|
|
" # retrieve inputs and labels for this batch\n",
|
|
" x_batch, y_batch = batch\n",
|
|
" # zero the parameter gradients\n",
|
|
" optimizer.zero_grad()\n",
|
|
" # forward pass -- calculate model output\n",
|
|
" pred = model(x_batch)\n",
|
|
" # compute the loss\n",
|
|
" loss = loss_function(pred, y_batch)\n",
|
|
" # backward pass\n",
|
|
" loss.backward()\n",
|
|
" # SGD update\n",
|
|
" optimizer.step()\n",
|
|
"\n",
|
|
" # Run whole dataset to get statistics -- normally wouldn't do this\n",
|
|
" pred_train = model(x_train)\n",
|
|
" pred_test = model(x_test)\n",
|
|
" _, predicted_train_class = torch.max(pred_train.data, 1)\n",
|
|
" _, predicted_test_class = torch.max(pred_test.data, 1)\n",
|
|
" errors_train = 100 - 100 * (predicted_train_class == y_train).float().sum() / len(y_train)\n",
|
|
" errors_test= 100 - 100 * (predicted_test_class == y_test).float().sum() / len(y_test)\n",
|
|
" losses_train = loss_function(pred_train, y_train).item()\n",
|
|
" losses_test= loss_function(pred_test, y_test).item()\n",
|
|
" if epoch%100 ==0 :\n",
|
|
" print(f'Epoch {epoch:5d}, train loss {losses_train:.6f}, train error {errors_train:3.2f}, test loss {losses_test:.6f}, test error {errors_test:3.2f}')\n",
|
|
"\n",
|
|
" return errors_train, errors_test\n"
|
|
],
|
|
"metadata": {
|
|
"id": "AazlQhheWmHk"
|
|
},
|
|
"execution_count": 33,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"def count_parameters(model):\n",
|
|
" return sum(p.numel() for p in model.parameters() if p.requires_grad)"
|
|
],
|
|
"metadata": {
|
|
"id": "AQNCmFNV6JpV"
|
|
},
|
|
"execution_count": 34,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"The following code produces the double descent curve by training the model with different numbers of hidden units and plotting the test error.\n",
|
|
"\n",
|
|
"TO DO:\n",
|
|
"\n",
|
|
"*Before* you run the code, and considering that there are 4000 training examples predict:<br>\n",
|
|
"\n",
|
|
"1. At what capacity do you think the training error will become zero?\n",
|
|
"2. At what capacity do you expect the first minima of the double descent curve to appear?\n",
|
|
"3. At what capacity do you expect the maximum of the double descent curve to appear?"
|
|
],
|
|
"metadata": {
|
|
"id": "IcP4UPMudxPS"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"# This code will take a while (~30 mins on GPU) to run! Go and make a cup of coffee!\n",
|
|
"\n",
|
|
"hidden_variables = np.array([2,4,6,8,10,14,18,22,26,30,35,40,45,50,55,60,70,80,90,100,120,140,160,180,200,250,300,400]) ;\n",
|
|
"\n",
|
|
"errors_train_all = np.zeros_like(hidden_variables)\n",
|
|
"errors_test_all = np.zeros_like(hidden_variables)\n",
|
|
"total_weights_all = np.zeros_like(hidden_variables)\n",
|
|
"\n",
|
|
"# loop over the dataset n_epoch times\n",
|
|
"n_epoch = 1000\n",
|
|
"\n",
|
|
"# For each hidden variable size\n",
|
|
"for c_hidden in range(len(hidden_variables)):\n",
|
|
" print(f'Training model with {hidden_variables[c_hidden]:3d} hidden variables')\n",
|
|
" # Get a model\n",
|
|
" model = get_model(hidden_variables[c_hidden]) ;\n",
|
|
" # Count and store number of weights\n",
|
|
" total_weights_all[c_hidden] = count_parameters(model)\n",
|
|
" # Train the model\n",
|
|
" errors_train, errors_test = fit_model(model, data, n_epoch)\n",
|
|
" # Store the results\n",
|
|
" errors_train_all[c_hidden] = errors_train\n",
|
|
" errors_test_all[c_hidden]= errors_test\n",
|
|
"\n",
|
|
""
|
|
],
|
|
"metadata": {
|
|
"id": "K4OmBZGHWXpk",
|
|
"outputId": "62a7aaf0-793a-4ab2-960e-c1127f15717c",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
}
|
|
},
|
|
"execution_count": 35,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Training model with 2 hidden variables\n",
|
|
"Epoch 0, train loss 2.280202, train error 89.05, test loss 2.276196, test error 89.18\n",
|
|
"Epoch 100, train loss 1.925899, train error 71.75, test loss 1.761014, test error 70.57\n",
|
|
"Epoch 200, train loss 1.912995, train error 71.20, test loss 1.747572, test error 69.07\n",
|
|
"Epoch 300, train loss 1.908751, train error 71.32, test loss 1.741206, test error 69.07\n",
|
|
"Epoch 400, train loss 1.906859, train error 71.05, test loss 1.737478, test error 70.00\n",
|
|
"Epoch 500, train loss 1.906671, train error 71.43, test loss 1.732123, test error 68.95\n",
|
|
"Epoch 600, train loss 1.903380, train error 71.28, test loss 1.741086, test error 69.65\n",
|
|
"Epoch 700, train loss 1.902236, train error 70.88, test loss 1.733666, test error 69.22\n",
|
|
"Epoch 800, train loss 1.903633, train error 71.88, test loss 1.734591, test error 70.57\n",
|
|
"Epoch 900, train loss 1.901804, train error 71.03, test loss 1.742573, test error 68.10\n",
|
|
"Training model with 4 hidden variables\n",
|
|
"Epoch 0, train loss 2.267358, train error 86.97, test loss 2.269902, test error 87.62\n",
|
|
"Epoch 100, train loss 1.716064, train error 62.22, test loss 1.519437, test error 58.25\n",
|
|
"Epoch 200, train loss 1.705652, train error 61.88, test loss 1.507422, test error 58.40\n",
|
|
"Epoch 300, train loss 1.696660, train error 60.62, test loss 1.507704, test error 57.58\n",
|
|
"Epoch 400, train loss 1.691092, train error 61.60, test loss 1.499592, test error 56.85\n",
|
|
"Epoch 500, train loss 1.684060, train error 61.05, test loss 1.513196, test error 57.95\n",
|
|
"Epoch 600, train loss 1.677890, train error 61.10, test loss 1.504343, test error 56.85\n",
|
|
"Epoch 700, train loss 1.677930, train error 61.10, test loss 1.509796, test error 58.15\n",
|
|
"Epoch 800, train loss 1.676683, train error 61.17, test loss 1.506790, test error 58.28\n",
|
|
"Epoch 900, train loss 1.676809, train error 61.53, test loss 1.505417, test error 57.72\n",
|
|
"Training model with 6 hidden variables\n",
|
|
"Epoch 0, train loss 2.290775, train error 89.18, test loss 2.284365, test error 87.57\n",
|
|
"Epoch 100, train loss 1.634348, train error 58.42, test loss 1.467763, test error 55.67\n",
|
|
"Epoch 200, train loss 1.602023, train error 57.25, test loss 1.456007, test error 55.38\n",
|
|
"Epoch 300, train loss 1.587963, train error 56.50, test loss 1.453522, test error 55.33\n",
|
|
"Epoch 400, train loss 1.587952, train error 56.55, test loss 1.453863, test error 54.92\n",
|
|
"Epoch 500, train loss 1.579495, train error 56.42, test loss 1.452398, test error 55.40\n",
|
|
"Epoch 600, train loss 1.590881, train error 56.67, test loss 1.473001, test error 56.28\n",
|
|
"Epoch 700, train loss 1.581218, train error 56.58, test loss 1.470745, test error 55.90\n",
|
|
"Epoch 800, train loss 1.575105, train error 56.30, test loss 1.455353, test error 55.92\n",
|
|
"Epoch 900, train loss 1.579539, train error 56.15, test loss 1.449462, test error 55.67\n",
|
|
"Training model with 8 hidden variables\n",
|
|
"Epoch 0, train loss 2.278947, train error 85.18, test loss 2.266557, test error 84.68\n",
|
|
"Epoch 100, train loss 1.593652, train error 55.60, test loss 1.449531, test error 56.00\n",
|
|
"Epoch 200, train loss 1.557044, train error 53.92, test loss 1.453447, test error 55.03\n",
|
|
"Epoch 300, train loss 1.544829, train error 53.38, test loss 1.457726, test error 55.22\n",
|
|
"Epoch 400, train loss 1.538902, train error 54.05, test loss 1.456941, test error 55.33\n",
|
|
"Epoch 500, train loss 1.538077, train error 54.28, test loss 1.469433, test error 56.20\n",
|
|
"Epoch 600, train loss 1.529778, train error 53.60, test loss 1.470659, test error 55.72\n",
|
|
"Epoch 700, train loss 1.522470, train error 53.42, test loss 1.476947, test error 55.95\n",
|
|
"Epoch 800, train loss 1.520377, train error 53.22, test loss 1.481796, test error 56.55\n",
|
|
"Epoch 900, train loss 1.522042, train error 53.42, test loss 1.489359, test error 56.15\n",
|
|
"Training model with 10 hidden variables\n",
|
|
"Epoch 0, train loss 2.250067, train error 83.80, test loss 2.238750, test error 81.28\n",
|
|
"Epoch 100, train loss 1.570056, train error 55.80, test loss 1.444564, test error 54.62\n",
|
|
"Epoch 200, train loss 1.512308, train error 53.33, test loss 1.417375, test error 53.70\n",
|
|
"Epoch 300, train loss 1.485546, train error 52.62, test loss 1.415674, test error 53.62\n",
|
|
"Epoch 400, train loss 1.465965, train error 50.22, test loss 1.415631, test error 52.95\n",
|
|
"Epoch 500, train loss 1.450910, train error 50.50, test loss 1.412722, test error 52.70\n",
|
|
"Epoch 600, train loss 1.437163, train error 49.65, test loss 1.409063, test error 52.92\n",
|
|
"Epoch 700, train loss 1.430005, train error 49.38, test loss 1.402299, test error 52.83\n",
|
|
"Epoch 800, train loss 1.420938, train error 49.17, test loss 1.398151, test error 52.22\n",
|
|
"Epoch 900, train loss 1.417372, train error 49.25, test loss 1.407553, test error 52.58\n",
|
|
"Training model with 14 hidden variables\n",
|
|
"Epoch 0, train loss 2.250147, train error 84.45, test loss 2.224496, test error 83.50\n",
|
|
"Epoch 100, train loss 1.467325, train error 49.22, test loss 1.371005, test error 51.35\n",
|
|
"Epoch 200, train loss 1.371697, train error 46.47, test loss 1.341893, test error 49.60\n",
|
|
"Epoch 300, train loss 1.338310, train error 45.25, test loss 1.345958, test error 49.47\n",
|
|
"Epoch 400, train loss 1.329984, train error 44.95, test loss 1.364456, test error 49.85\n",
|
|
"Epoch 500, train loss 1.312067, train error 44.70, test loss 1.368620, test error 49.67\n",
|
|
"Epoch 600, train loss 1.307369, train error 44.00, test loss 1.383248, test error 49.97\n",
|
|
"Epoch 700, train loss 1.302358, train error 44.10, test loss 1.384645, test error 49.78\n",
|
|
"Epoch 800, train loss 1.309552, train error 44.42, test loss 1.399194, test error 50.60\n",
|
|
"Epoch 900, train loss 1.305113, train error 43.70, test loss 1.416064, test error 51.28\n",
|
|
"Training model with 18 hidden variables\n",
|
|
"Epoch 0, train loss 2.159715, train error 80.35, test loss 2.139665, test error 80.20\n",
|
|
"Epoch 100, train loss 1.385202, train error 47.75, test loss 1.362450, test error 51.58\n",
|
|
"Epoch 200, train loss 1.265389, train error 43.55, test loss 1.335479, test error 49.20\n",
|
|
"Epoch 300, train loss 1.224858, train error 41.67, test loss 1.384820, test error 48.47\n",
|
|
"Epoch 400, train loss 1.186068, train error 40.70, test loss 1.428581, test error 49.33\n",
|
|
"Epoch 500, train loss 1.167748, train error 39.30, test loss 1.472766, test error 49.95\n",
|
|
"Epoch 600, train loss 1.160923, train error 38.45, test loss 1.499211, test error 50.65\n",
|
|
"Epoch 700, train loss 1.145295, train error 38.40, test loss 1.528852, test error 50.67\n",
|
|
"Epoch 800, train loss 1.141608, train error 38.08, test loss 1.569496, test error 51.33\n",
|
|
"Epoch 900, train loss 1.132156, train error 38.22, test loss 1.602549, test error 51.35\n",
|
|
"Training model with 22 hidden variables\n",
|
|
"Epoch 0, train loss 2.211529, train error 79.80, test loss 2.206877, test error 79.95\n",
|
|
"Epoch 100, train loss 1.282252, train error 42.90, test loss 1.323739, test error 48.38\n",
|
|
"Epoch 200, train loss 1.161285, train error 38.65, test loss 1.423607, test error 50.78\n",
|
|
"Epoch 300, train loss 1.103865, train error 36.65, test loss 1.482603, test error 50.70\n",
|
|
"Epoch 400, train loss 1.070859, train error 36.42, test loss 1.543965, test error 51.38\n",
|
|
"Epoch 500, train loss 1.044615, train error 34.82, test loss 1.574064, test error 51.38\n",
|
|
"Epoch 600, train loss 1.030322, train error 34.82, test loss 1.633871, test error 51.47\n",
|
|
"Epoch 700, train loss 1.016668, train error 33.93, test loss 1.711600, test error 52.30\n",
|
|
"Epoch 800, train loss 0.997430, train error 32.82, test loss 1.763077, test error 52.95\n",
|
|
"Epoch 900, train loss 1.001320, train error 33.82, test loss 1.814692, test error 52.95\n",
|
|
"Training model with 26 hidden variables\n",
|
|
"Epoch 0, train loss 2.200904, train error 81.43, test loss 2.190686, test error 80.72\n",
|
|
"Epoch 100, train loss 1.189043, train error 39.80, test loss 1.398701, test error 51.97\n",
|
|
"Epoch 200, train loss 1.032893, train error 34.72, test loss 1.504426, test error 52.15\n",
|
|
"Epoch 300, train loss 0.975272, train error 33.55, test loss 1.621815, test error 53.30\n",
|
|
"Epoch 400, train loss 0.939880, train error 32.05, test loss 1.739801, test error 54.03\n",
|
|
"Epoch 500, train loss 0.907610, train error 30.85, test loss 1.822161, test error 54.30\n",
|
|
"Epoch 600, train loss 0.893433, train error 30.55, test loss 1.929679, test error 54.80\n",
|
|
"Epoch 700, train loss 0.864689, train error 29.20, test loss 1.968643, test error 54.75\n",
|
|
"Epoch 800, train loss 0.847545, train error 28.82, test loss 2.045258, test error 54.97\n",
|
|
"Epoch 900, train loss 0.843597, train error 28.97, test loss 2.113636, test error 55.55\n",
|
|
"Training model with 30 hidden variables\n",
|
|
"Epoch 0, train loss 2.105095, train error 78.75, test loss 2.046595, test error 77.50\n",
|
|
"Epoch 100, train loss 1.139368, train error 36.85, test loss 1.384092, test error 50.33\n",
|
|
"Epoch 200, train loss 0.948281, train error 31.70, test loss 1.521346, test error 50.53\n",
|
|
"Epoch 300, train loss 0.866517, train error 28.10, test loss 1.689052, test error 51.30\n",
|
|
"Epoch 400, train loss 0.822552, train error 26.95, test loss 1.871116, test error 53.58\n",
|
|
"Epoch 500, train loss 0.793155, train error 26.53, test loss 2.016083, test error 53.45\n",
|
|
"Epoch 600, train loss 0.747390, train error 25.15, test loss 2.175583, test error 54.60\n",
|
|
"Epoch 700, train loss 0.739243, train error 24.82, test loss 2.375611, test error 55.90\n",
|
|
"Epoch 800, train loss 0.705187, train error 23.97, test loss 2.521607, test error 56.12\n",
|
|
"Epoch 900, train loss 0.678345, train error 22.30, test loss 2.623145, test error 56.05\n",
|
|
"Training model with 35 hidden variables\n",
|
|
"Epoch 0, train loss 2.133684, train error 77.95, test loss 2.105965, test error 78.03\n",
|
|
"Epoch 100, train loss 1.025999, train error 34.07, test loss 1.387318, test error 49.42\n",
|
|
"Epoch 200, train loss 0.801432, train error 27.22, test loss 1.734866, test error 52.33\n",
|
|
"Epoch 300, train loss 0.665120, train error 22.30, test loss 2.111001, test error 54.28\n",
|
|
"Epoch 400, train loss 0.603412, train error 20.30, test loss 2.516824, test error 55.53\n",
|
|
"Epoch 500, train loss 0.557454, train error 19.50, test loss 2.875285, test error 55.90\n",
|
|
"Epoch 600, train loss 0.541823, train error 18.70, test loss 3.230060, test error 56.78\n",
|
|
"Epoch 700, train loss 0.489021, train error 16.43, test loss 3.542463, test error 56.67\n",
|
|
"Epoch 800, train loss 0.487397, train error 16.50, test loss 3.868127, test error 56.67\n",
|
|
"Epoch 900, train loss 0.452621, train error 15.62, test loss 4.107164, test error 57.90\n",
|
|
"Training model with 40 hidden variables\n",
|
|
"Epoch 0, train loss 2.131664, train error 78.45, test loss 2.104819, test error 79.57\n",
|
|
"Epoch 100, train loss 0.916092, train error 29.90, test loss 1.443106, test error 50.62\n",
|
|
"Epoch 200, train loss 0.637736, train error 20.70, test loss 1.931214, test error 52.22\n",
|
|
"Epoch 300, train loss 0.477490, train error 15.45, test loss 2.592646, test error 54.35\n",
|
|
"Epoch 400, train loss 0.375117, train error 11.97, test loss 3.317954, test error 55.75\n",
|
|
"Epoch 500, train loss 0.320583, train error 10.82, test loss 4.105999, test error 56.25\n",
|
|
"Epoch 600, train loss 0.282808, train error 9.62, test loss 4.787386, test error 56.17\n",
|
|
"Epoch 700, train loss 0.270115, train error 9.78, test loss 5.615366, test error 57.00\n",
|
|
"Epoch 800, train loss 0.271310, train error 10.05, test loss 6.246390, test error 56.17\n",
|
|
"Epoch 900, train loss 0.298141, train error 11.35, test loss 6.893893, test error 56.62\n",
|
|
"Training model with 45 hidden variables\n",
|
|
"Epoch 0, train loss 2.124081, train error 78.05, test loss 2.105962, test error 77.93\n",
|
|
"Epoch 100, train loss 0.826444, train error 26.68, test loss 1.503941, test error 49.47\n",
|
|
"Epoch 200, train loss 0.488764, train error 15.75, test loss 2.213665, test error 52.97\n",
|
|
"Epoch 300, train loss 0.339090, train error 11.00, test loss 3.221352, test error 54.25\n",
|
|
"Epoch 400, train loss 0.239335, train error 8.18, test loss 4.424680, test error 55.75\n",
|
|
"Epoch 500, train loss 0.163451, train error 4.93, test loss 5.617220, test error 55.38\n",
|
|
"Epoch 600, train loss 0.099984, train error 3.05, test loss 6.670816, test error 55.85\n",
|
|
"Epoch 700, train loss 0.015253, train error 0.00, test loss 7.652980, test error 55.70\n",
|
|
"Epoch 800, train loss 0.008259, train error 0.00, test loss 8.460521, test error 55.85\n",
|
|
"Epoch 900, train loss 0.005923, train error 0.00, test loss 8.991735, test error 55.92\n",
|
|
"Training model with 50 hidden variables\n",
|
|
"Epoch 0, train loss 2.164114, train error 78.28, test loss 2.153555, test error 78.55\n",
|
|
"Epoch 100, train loss 0.760776, train error 23.62, test loss 1.521621, test error 49.42\n",
|
|
"Epoch 200, train loss 0.388589, train error 11.07, test loss 2.433003, test error 52.42\n",
|
|
"Epoch 300, train loss 0.212128, train error 5.60, test loss 3.755425, test error 54.70\n",
|
|
"Epoch 400, train loss 0.101783, train error 2.12, test loss 5.258374, test error 55.00\n",
|
|
"Epoch 500, train loss 0.024106, train error 0.03, test loss 6.485045, test error 56.25\n",
|
|
"Epoch 600, train loss 0.010166, train error 0.00, test loss 7.247528, test error 56.03\n",
|
|
"Epoch 700, train loss 0.007013, train error 0.00, test loss 7.764050, test error 56.17\n",
|
|
"Epoch 800, train loss 0.005290, train error 0.00, test loss 8.180995, test error 56.25\n",
|
|
"Epoch 900, train loss 0.004218, train error 0.00, test loss 8.507399, test error 56.30\n",
|
|
"Training model with 60 hidden variables\n",
|
|
"Epoch 0, train loss 2.054841, train error 76.82, test loss 2.010036, test error 77.35\n",
|
|
"Epoch 100, train loss 0.533140, train error 15.78, test loss 1.685822, test error 50.55\n",
|
|
"Epoch 200, train loss 0.092306, train error 0.62, test loss 3.330975, test error 53.12\n",
|
|
"Epoch 300, train loss 0.018351, train error 0.00, test loss 4.631015, test error 54.05\n",
|
|
"Epoch 400, train loss 0.008906, train error 0.00, test loss 5.276160, test error 54.12\n",
|
|
"Epoch 500, train loss 0.005695, train error 0.00, test loss 5.702274, test error 54.15\n",
|
|
"Epoch 600, train loss 0.004078, train error 0.00, test loss 6.026574, test error 54.28\n",
|
|
"Epoch 700, train loss 0.003147, train error 0.00, test loss 6.281788, test error 54.22\n",
|
|
"Epoch 800, train loss 0.002537, train error 0.00, test loss 6.493565, test error 54.30\n",
|
|
"Epoch 900, train loss 0.002114, train error 0.00, test loss 6.669685, test error 54.45\n",
|
|
"Training model with 80 hidden variables\n",
|
|
"Epoch 0, train loss 2.028571, train error 74.40, test loss 1.985185, test error 75.28\n",
|
|
"Epoch 100, train loss 0.244489, train error 4.47, test loss 2.115000, test error 51.50\n",
|
|
"Epoch 200, train loss 0.020814, train error 0.00, test loss 3.415434, test error 51.40\n",
|
|
"Epoch 300, train loss 0.008094, train error 0.00, test loss 3.970142, test error 51.72\n",
|
|
"Epoch 400, train loss 0.004785, train error 0.00, test loss 4.299542, test error 51.83\n",
|
|
"Epoch 500, train loss 0.003310, train error 0.00, test loss 4.534029, test error 51.70\n",
|
|
"Epoch 600, train loss 0.002490, train error 0.00, test loss 4.716822, test error 51.78\n",
|
|
"Epoch 700, train loss 0.001978, train error 0.00, test loss 4.864742, test error 51.75\n",
|
|
"Epoch 800, train loss 0.001629, train error 0.00, test loss 4.988203, test error 51.80\n",
|
|
"Epoch 900, train loss 0.001378, train error 0.00, test loss 5.096081, test error 51.78\n",
|
|
"Training model with 120 hidden variables\n",
|
|
"Epoch 0, train loss 1.962679, train error 72.30, test loss 1.879106, test error 71.68\n",
|
|
"Epoch 100, train loss 0.048203, train error 0.00, test loss 2.235627, test error 49.70\n",
|
|
"Epoch 200, train loss 0.009333, train error 0.00, test loss 2.851372, test error 49.85\n",
|
|
"Epoch 300, train loss 0.004629, train error 0.00, test loss 3.129520, test error 49.95\n",
|
|
"Epoch 400, train loss 0.002970, train error 0.00, test loss 3.309089, test error 49.83\n",
|
|
"Epoch 500, train loss 0.002143, train error 0.00, test loss 3.442119, test error 49.92\n",
|
|
"Epoch 600, train loss 0.001658, train error 0.00, test loss 3.548133, test error 49.80\n",
|
|
"Epoch 700, train loss 0.001341, train error 0.00, test loss 3.635033, test error 49.72\n",
|
|
"Epoch 800, train loss 0.001120, train error 0.00, test loss 3.709674, test error 49.72\n",
|
|
"Epoch 900, train loss 0.000957, train error 0.00, test loss 3.774692, test error 49.65\n",
|
|
"Training model with 200 hidden variables\n",
|
|
"Epoch 0, train loss 1.913651, train error 70.00, test loss 1.842174, test error 72.20\n",
|
|
"Epoch 100, train loss 0.021130, train error 0.00, test loss 1.918857, test error 46.60\n",
|
|
"Epoch 200, train loss 0.006089, train error 0.00, test loss 2.209828, test error 46.88\n",
|
|
"Epoch 300, train loss 0.003314, train error 0.00, test loss 2.356240, test error 46.92\n",
|
|
"Epoch 400, train loss 0.002209, train error 0.00, test loss 2.455862, test error 46.85\n",
|
|
"Epoch 500, train loss 0.001633, train error 0.00, test loss 2.532098, test error 46.70\n",
|
|
"Epoch 600, train loss 0.001284, train error 0.00, test loss 2.592178, test error 46.67\n",
|
|
"Epoch 700, train loss 0.001051, train error 0.00, test loss 2.644237, test error 46.80\n",
|
|
"Epoch 800, train loss 0.000885, train error 0.00, test loss 2.687477, test error 46.78\n",
|
|
"Epoch 900, train loss 0.000762, train error 0.00, test loss 2.726535, test error 46.72\n",
|
|
"Training model with 360 hidden variables\n",
|
|
"Epoch 0, train loss 1.881302, train error 67.10, test loss 1.826783, test error 69.07\n",
|
|
"Epoch 100, train loss 0.012637, train error 0.00, test loss 1.789455, test error 46.08\n",
|
|
"Epoch 200, train loss 0.004398, train error 0.00, test loss 1.978857, test error 45.70\n",
|
|
"Epoch 300, train loss 0.002526, train error 0.00, test loss 2.081702, test error 45.45\n",
|
|
"Epoch 400, train loss 0.001732, train error 0.00, test loss 2.153348, test error 45.22\n",
|
|
"Epoch 500, train loss 0.001301, train error 0.00, test loss 2.208247, test error 45.28\n",
|
|
"Epoch 600, train loss 0.001034, train error 0.00, test loss 2.252916, test error 45.28\n",
|
|
"Epoch 700, train loss 0.000853, train error 0.00, test loss 2.290226, test error 45.40\n",
|
|
"Epoch 800, train loss 0.000724, train error 0.00, test loss 2.322630, test error 45.42\n",
|
|
"Epoch 900, train loss 0.000626, train error 0.00, test loss 2.351381, test error 45.33\n",
|
|
"Training model with 680 hidden variables\n",
|
|
"Epoch 0, train loss 1.808186, train error 63.28, test loss 1.783419, test error 66.82\n",
|
|
"Epoch 100, train loss 0.008044, train error 0.00, test loss 1.678780, test error 44.55\n",
|
|
"Epoch 200, train loss 0.003155, train error 0.00, test loss 1.810078, test error 44.47\n",
|
|
"Epoch 300, train loss 0.001885, train error 0.00, test loss 1.886664, test error 44.42\n",
|
|
"Epoch 400, train loss 0.001322, train error 0.00, test loss 1.939946, test error 44.35\n",
|
|
"Epoch 500, train loss 0.001008, train error 0.00, test loss 1.982137, test error 44.45\n",
|
|
"Epoch 600, train loss 0.000809, train error 0.00, test loss 2.015580, test error 44.45\n",
|
|
"Epoch 700, train loss 0.000673, train error 0.00, test loss 2.044045, test error 44.28\n",
|
|
"Epoch 800, train loss 0.000574, train error 0.00, test loss 2.068916, test error 44.28\n",
|
|
"Epoch 900, train loss 0.000499, train error 0.00, test loss 2.090607, test error 44.28\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"import matplotlib.pyplot as plt\n",
|
|
"import numpy as np\n",
|
|
"\n",
|
|
"# Assuming data['y'] is available and contains the training examples\n",
|
|
"num_training_examples = len(data['y'])\n",
|
|
"\n",
|
|
"# Find the index where total_weights_all is closest to num_training_examples\n",
|
|
"closest_index = np.argmin(np.abs(np.array(total_weights_all) - num_training_examples))\n",
|
|
"\n",
|
|
"# Get the corresponding value of hidden variables\n",
|
|
"hidden_variable_at_num_training_examples = hidden_variables[closest_index]\n",
|
|
"\n",
|
|
"# Plot the results\n",
|
|
"fig, ax = plt.subplots()\n",
|
|
"ax.plot(hidden_variables, errors_train_all, 'r-', label='train')\n",
|
|
"ax.plot(hidden_variables, errors_test_all, 'b-', label='test')\n",
|
|
"\n",
|
|
"# Add a vertical line at the point where total weights equal the number of training examples\n",
|
|
"ax.axvline(x=hidden_variable_at_num_training_examples, color='g', linestyle='--', label='N(weights) = N(train)')\n",
|
|
"\n",
|
|
"ax.set_ylim(0, 100)\n",
|
|
"ax.set_xlabel('No. hidden variables')\n",
|
|
"ax.set_ylabel('Error')\n",
|
|
"ax.legend()\n",
|
|
"plt.show()\n"
|
|
],
|
|
"metadata": {
|
|
"id": "Rw-iRboTXbck",
|
|
"outputId": "4b2ec111-9a74-4e10-821c-6f5129c4e109",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/",
|
|
"height": 455
|
|
}
|
|
},
|
|
"execution_count": 36,
|
|
"outputs": [
|
|
{
|
|
"output_type": "display_data",
|
|
"data": {
|
|
"text/plain": [
|
|
"<Figure size 640x480 with 1 Axes>"
|
|
],
|
|
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAG2CAYAAACQ++e6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABTBklEQVR4nO3deVhU9f4H8PdhYBZAQJC12NwX3BAXstKSwiVSK8uim7TYr9JbZpp6TU1LcSnX0pZbYql52yRzzSXJzFBxSVxwyS0VUEmQHWa+vz9OMzoCss0w2/v1PPPMmXPOnPnMCeHddzlHEkIIEBERETkIJ0sXQERERNSQGH6IiIjIoTD8EBERkUNh+CEiIiKHwvBDREREDoXhh4iIiBwKww8RERE5FIYfIiIicigMP0RERORQGH6IiIjIoVg0/Pzyyy+Ii4tDUFAQJElCcnKy0XYhBCZPnozAwEBoNBrExMTgxIkTRvvk5OQgPj4eHh4e8PLywvPPP4/8/PwG/BZERERkSywafgoKCtCxY0d8+OGHlW6fPXs2Fi5ciI8++gipqalwc3NDbGwsiouLDfvEx8fj8OHD2Lx5M9auXYtffvkFL774YkN9BSIiIrIxkrXc2FSSJKxevRqDBg0CILf6BAUF4Y033sCYMWMAALm5ufD390dSUhKGDh2Ko0ePom3bttizZw+ioqIAABs3bkT//v3x119/ISgoyFJfh4iIiKyUs6ULqMrp06eRmZmJmJgYwzpPT090794du3btwtChQ7Fr1y54eXkZgg8AxMTEwMnJCampqRg8eHClxy4pKUFJSYnhtU6nQ05ODnx8fCBJkvm+FBEREZmMEALXr19HUFAQnJxq3pllteEnMzMTAODv72+03t/f37AtMzMTfn5+RtudnZ3h7e1t2KcyiYmJmDp1qokrJiIiIks4f/487rzzzhrvb7Xhx5wmTJiA0aNHG17n5uYiJCQE58+fh4eHhwUrq52C0gIEvS937V184yLclG4WroiIiKjh5OXlITg4GI0aNarV+6w2/AQEBAAAsrKyEBgYaFiflZWFTp06GfbJzs42el95eTlycnIM76+MSqWCSqWqsN7Dw8Omwo+iVAGo5WUPDw+GHyIicki1HbJitdf5CQ8PR0BAALZu3WpYl5eXh9TUVERHRwMAoqOjce3aNaSlpRn22bZtG3Q6Hbp3797gNRMREZH1s2jLT35+Pk6ePGl4ffr0aRw4cADe3t4ICQnBqFGj8O6776JFixYIDw/HpEmTEBQUZJgR1qZNG/Tt2xfDhw/HRx99hLKyMowcORJDhw7lTC8iIiKqlEXDz969e3HfffcZXuvH4QwbNgxJSUl48803UVBQgBdffBHXrl3D3XffjY0bN0KtVhves2LFCowcORJ9+vSBk5MTHn30USxcuLDBv4slODs5Y1jHYYZlIiIiqp7VXOfHkvLy8uDp6Ync3FybGvNDRPZHq9WirKzM0mUQWQUXFxcoFIoqt9f17zebC4iIrIAQApmZmbh27ZqlSyGyKl5eXggICDDpdfgYfmyYEAKFZYUAAFcXV16gkciG6YOPn58fXF3575lICIHCwkLDrO6bZ37XF8OPDSssK4R7ojsAIH9CPqe6E9korVZrCD4+Pj6WLofIamg0GgBAdnY2/Pz8btsFVhtWO9WdiMhR6Mf4uLq6WrgSIuuj/3dhyrFwDD9ERFaCXV1EFZnj3wXDDxERETkUhh8iIrIKYWFhmD9/vqXLIAfAAc9ERFRnvXv3RqdOnUwSWvbs2QM3N07cIPNj+CEiIrMRQkCr1cLZufo/N76+vg1QERG7vWyawkmBx9o+hsfaPgaFk2mm/xER1VRCQgJSUlKwYMECSJIESZKQlJQESZKwYcMGdOnSBSqVCr/++itOnTqFgQMHwt/fH+7u7ujatSu2bNlidLxbu70kScJ///tfDB48GK6urmjRogXWrFnTwN+S7BFbfmyY2lmNb4Z8Y+kyiMgchAAKCy3z2a6uQA1m2CxYsADHjx9HREQEpk2bBgA4fPgwAGD8+PF477330LRpUzRu3Bjnz59H//79MX36dKhUKnzxxReIi4tDRkYGQkJCqvyMqVOnYvbs2ZgzZw4WLVqE+Ph4nD17Ft7e3qb5ruSQGH6IiKxRYSHg7m6Zz87PB2ow9sbT0xNKpRKurq4ICAgAABw7dgwAMG3aNDzwwAOGfb29vdGxY0fD63feeQerV6/GmjVrMHLkyCo/IyEhAU8++SQAYMaMGVi4cCF2796Nvn371umrEQHs9iIiIjOIiooyep2fn48xY8agTZs28PLygru7O44ePYpz587d9jgdOnQwLLu5ucHDw8NwuwOiumLLjw0rKC3g7S2I7JWrq9wCY6nPrqdbZ22NGTMGmzdvxnvvvYfmzZtDo9HgscceQ2lp6W2P4+LiYvRakiTodLp610eOjeGHiMgaSVKNup4sTalUQqvVVrvfzp07kZCQgMGDBwOQW4LOnDlj5uqIKsduLyIiqrOwsDCkpqbizJkzuHLlSpWtMi1atMD333+PAwcO4ODBg3jqqafYgkMWw/BDRER1NmbMGCgUCrRt2xa+vr5VjuGZO3cuGjdujLvuugtxcXGIjY1FZGRkA1dLJJOEEMLSRVhaXl4ePD09kZubCw8PD0uXU2Mc80NkH4qLi3H69GmEh4dDrVZbuhwiq3K7fx91/fvNlh8iIiJyKAw/RERE5FA428uGKZwU6N+iv2GZiIiIqsfwY8PUzmqse2qdpcsgIiKyKez2IiIiIofC8ENEREQOheHHhhWUFsBthhvcZrihoLTA0uUQERHZBI75sXGFZYWWLoGIiMimsOWHiIiIHArDDxERETkUhh8iIqqz3r17Y9SoUSY7XkJCAgYNGmSy4xFVhuGHiIiIHArDDxER1UlCQgJSUlKwYMECSJIESZJw5swZpKeno1+/fnB3d4e/vz/+9a9/4cqVK4b3ffvtt2jfvj00Gg18fHwQExODgoICvP3221i2bBl++OEHw/G2b99uuS9IdouzvWyYk+SEXqG9DMtEZD+EAAotNJnT1RWQpOr3W7BgAY4fP46IiAhMmzYNAODi4oJu3brhhRdewLx581BUVIRx48bh8ccfx7Zt23Dp0iU8+eSTmD17NgYPHozr169jx44dEEJgzJgxOHr0KPLy8rB06VIAgLe3tzm/Kjkohh8bpnHRYHvCdkuXQURmUFgIuLtb5rPz8wE3t+r38/T0hFKphKurKwICAgAA7777Ljp37owZM2YY9vv8888RHByM48ePIz8/H+Xl5XjkkUcQGhoKAGjfvr1hX41Gg5KSEsPxiMyB4YeIiEzm4MGD+Pnnn+FeSXI7deoUHnzwQfTp0wft27dHbGwsHnzwQTz22GNo3LixBaolR8XwQ0RkhVxd5RYYS312XeXn5yMuLg6zZs2qsC0wMBAKhQKbN2/Gb7/9hp9++gmLFi3CxIkTkZqaivDw8HpUTVRzDD82rKC0AGELwgAAZ147AzdlDdqpicgmSFLNup4sTalUQqvVGl5HRkbiu+++Q1hYGJydK/8TI0kSevbsiZ49e2Ly5MkIDQ3F6tWrMXr06ArHIzIHjpK1cVcKr+BK4ZXqdyQiMoOwsDCkpqbizJkzuHLlCkaMGIGcnBw8+eST2LNnD06dOoVNmzbh2WefhVarRWpqKmbMmIG9e/fi3Llz+P7773H58mW0adPGcLw//vgDGRkZuHLlCsrKyiz8DckeMfwQEVGdjRkzBgqFAm3btoWvry9KS0uxc+dOaLVaPPjgg2jfvj1GjRoFLy8vODk5wcPDA7/88gv69++Pli1b4q233sL777+Pfv36AQCGDx+OVq1aISoqCr6+vti5c6eFvyHZI3Z7ERFRnbVs2RK7du2qsP7777+vdP82bdpg48aNVR7P19cXP/30k8nqI6oMW36IiIjIoTD8EBERkUNh+CEiIiKHwjE/NsxJckJUUJRhmYiIiKrH8GPDNC4a7Bm+x9JlEBER2RQ2FxAREZFDYfghIiIih8LwY8MKywoRNj8MYfPDUFhWaOlyiIiIbALH/NgwIQTO5p41LBMREVH12PJDREQN5l//+hdmzJhhtuNLkoTk5OQa7799+3ZIkoRr166ZtI4ePXrgu+++M+kx6+vq1avw8/PDmTNnzHL8pKQkeHl51eo9Q4cOxfvvv2+Wem6H4YeIiOosISEBkiRh5syZRuuTk5MhSZLRuoMHD2L9+vV49dVXzVbPpUuXDPcJM5W3334bnTp1qtV73nrrLYwfPx46nc6ktdxKkiSo1WqcPXvWaP2gQYOQkJBgtG769OkYOHAgwsLCAJg++D3xxBM4fvx4rd7z1ltvYfr06cjNzTVJDTXF8ENERPWiVqsxa9Ys/P3337fdb9GiRRgyZAjc3d3NVktAQABUKpXZjl9T/fr1w/Xr17Fhwwazf5YkSZg8efJt9yksLMRnn32G559/vtbHLy0trdF+Go0Gfn5+tTp2REQEmjVrhuXLl9e6rvpg+CEionqJiYlBQEAAEhMTq9xHq9Xi22+/RVxcnGHdBx98gIiICMNrfWvRRx99ZHTst956y/D6hx9+QGRkJNRqNZo2bYqpU6eivLzcsP3Wbq/ffvsNnTp1glqtRlRUlOEzDhw4YFRfWloaoqKi4OrqirvuugsZGRkA5K6cqVOn4uDBg5AkCZIkISkpCUIIvP322wgJCYFKpUJQUJBRi5ZCoUD//v2xatWqmp/IOho5ciSWL1+O9PT0KvdZv349VCoVevToAQA4c+YM7rvvPgBA48aNIUmSoaWod+/eGDlyJEaNGoUmTZogNjYWADB37ly0b98ebm5uCA4OxiuvvIL8/HzDZ9za7aVvMfvyyy8RFhYGT09PDB06FNevXzeqLS4urkHO080YfoiIrFhBaUGVj+Ly4hrvW1RWVKN960KhUGDGjBlYtGgR/vrrr0r3+eOPP5Cbm4uoqCjDul69euHIkSO4fPkyACAlJQVNmjTB9u3bAQBlZWXYtWsXevfuDQDYsWMHnnnmGbz22ms4cuQIPv74YyQlJWH69OmVfmZeXh7i4uLQvn177Nu3D++88w7GjRtX6b4TJ07E+++/j71798LZ2RnPPfccALkr54033kC7du1w6dIlXLp0CU888QS+++47zJs3Dx9//DFOnDiB5ORktG/f3uiY3bp1w44dO2577tq1awd3d/cqHzXpwuvZsyceeughjB8/vsp9duzYgS5duhheBwcHG8YkZWRk4NKlS1iwYIFh+7Jly6BUKrFz505DGHVycsLChQtx+PBhLFu2DNu2bcObb75529pOnTqF5ORkrF27FmvXrkVKSkqFLtJu3bph9+7dKCkpqfa7mgpne9kwSZLQ1retYZmI7I97YtVdRP1b9Me6p9YZXvu951flZS96hfbC9oTthtdhC8JwpfBKhf3ElLrNHB08eDA6deqEKVOm4LPPPquw/ezZs1AoFEbdIhEREfD29kZKSgoee+wxbN++HW+88Ybhj/Du3btRVlaGu+66CwAwdepUjB8/HsOGDQMANG3aFO+88w7efPNNTJkypcJnrly5EpIk4dNPP4VarUbbtm1x4cIFDB8+vMK+06dPR69evQAA48ePx4ABA1BcXAyNRgN3d3c4OzsjICDAsP+5c+cQEBCAmJgYuLi4ICQkBN26dTM6ZlBQEM6fPw+dTgcnp8rbGtavX4+ysrIqz6tGo6ly280SExPRoUMH7NixA/fcc0+F7WfPnkVQUJDhtUKhgLe3NwDAz8+vwkDlFi1aYPbs2UbrRo0aZVgOCwvDu+++i5deegmLFy+usi6dToekpCQ0atQIgDzgfevWrUaBNSgoCKWlpcjMzERoaGiNvm99MfzYMFcXVxx+5bClyyAiAgDMmjUL999/P8aMGVNhW1FREVQqldH/qEmShHvvvRfbt29HTEwMjhw5gldeeQWzZ8/GsWPHkJKSgq5du8LV1RWAPGB6586dRn84tVotiouLUVhYaNhPLyMjAx06dIBarTasuzWg6HXo0MGwHBgYCADIzs5GSEhIpfsPGTIE8+fPR9OmTdG3b1/0798fcXFxcHa+8WdVo9FAp9OhpKSkyhBjqj/2bdu2xTPPPIPx48dj586dFbYXFRUZnYfq3NxKpLdlyxYkJibi2LFjyMvLQ3l5eZXnXi8sLMwQfAD53GZnZxvtoz83hYUNd706hh8iIiuWPyG/ym0KJ4XR6+wx2VXsWfHmx2deO1Ovuipz7733IjY2FhMmTKgw06hJkyYoLCxEaWkplEqlYX3v3r3xySefYMeOHejcuTM8PDwMgSglJcXQGgMA+fn5mDp1Kh555JEKn12bP+yVcXFxMSzrA9rtZmoFBwcjIyMDW7ZswebNm/HKK69gzpw5SElJMRwrJycHbm5ut229adeuXYWZWje75557ajxoeurUqWjZsmWlU/2bNGlS7YD0m7m5uRm9PnPmDB566CG8/PLLmD59Ory9vfHrr7/i+eefR2lpaZXh5+bzCsjn9tbzmpOTAwDw9fWtcX31xfBDRGTF3JRu1e9k5n1rY+bMmejUqRNatWpltF4/VfzIkSNG08Z79eqFUaNG4ZtvvjGM7enduze2bNmCnTt34o033jDsGxkZiYyMDDRv3rxGtbRq1QrLly9HSUmJYQbYnj21vxm0UqmEVqutsF6j0SAuLg5xcXEYMWIEWrdujUOHDiEyMhIAkJ6ejs6dO9/22Kbq9gLkQDZy5Ej85z//QbNmzYy2de7cucKMKn0Irey73SotLQ06nQ7vv/++oQvv66+/rnFtt5Oeno4777wTTZo0McnxaoIDnm1YYVkh2i1uh3aL2/H2FkRkFdq3b4/4+HgsXLjQaL2vry8iIyPx66+/Gq3v0KEDGjdujJUrVxqFn+TkZJSUlKBnz56GfSdPnowvvvgCU6dOxeHDh3H06FGsWrXKaDbYzZ566inodDq8+OKLOHr0KDZt2oT33nsPQO3GSYaFheH06dM4cOAArly5gpKSEiQlJeGzzz5Deno6/vzzTyxfvhwajcaoG2vHjh148MEHb3vs0NBQNG/evMrHHXfcUeM6AWDChAm4ePEitmzZYrQ+NjYWhw8fNmr9CQ0NhSRJWLt2LS5fvmw0c+tWzZs3R1lZGRYtWoQ///wTX375pdGsvPqoyXkyNYYfGyaEwJHLR3Dk8hHe3oKIrMa0adMq7TJ64YUXsGLFCqN1kiThnnvugSRJuPvuuwHIgcjDwwNRUVFG3S+xsbFYu3YtfvrpJ3Tt2hU9evTAvHnzqhw34+HhgR9//BEHDhxAp06dMHHiRMP1cGrTTfboo4+ib9++uO++++Dr64uvvvoKXl5e+PTTT9GzZ0906NABW7ZswY8//ggfHx8AwIULF/Dbb7/h2WefrfHnmIK3tzfGjRuH4mLjmYDt27dHZGSkUWvNHXfcYRhE7u/vj5EjR1Z53I4dO2Lu3LmYNWsWIiIisGLFitte2qCmiouLkZycXOkgdHOSBP9qIi8vD56ensjNzYWHh4ely6mxgtICw0yQ/An5ZmvGJiLzKi4uxunTpxEeHl7vsSvWrKioCK1atcL//vc/REdHW6SGFStW4Nlnn0Vubm6tupRqa9y4cfj777/xySefmO0zamvdunUYO3Ys0tPTq5x91tCWLFmC1atX46effqpyn9v9+6jr32+O+SEiogah0WjwxRdf4MqVilPszeWLL75A06ZNcccdd+DgwYMYN24cHn/8cbMGH0CePj569GizfkZtDRgwACdOnMCFCxcQHBxs6XIAyAOiFy1a1OCfy/BDREQNRj+up6FkZmZi8uTJyMzMRGBgIIYMGVLlRRFN6eaB2tbk5mv1WIMXXnjBIp/L8ENERHbrzTffrPYqxOR4rKPTrwparRaTJk1CeHg4NBoNmjVrhnfeecdocK8QApMnT0ZgYCA0Gg1iYmJw4sQJC1ZNRERE1syqw8+sWbOwZMkSfPDBBzh69ChmzZqF2bNnG/UPzp49GwsXLsRHH32E1NRUuLm5ITY2tsJId3skSRJCPUMR6hnK21sQ2QHOPyGqyBz/Lqy62+u3337DwIEDMWDAAADytRa++uor7N69G4B8QubPn4+33noLAwcOBCAPbvP390dycjKGDh1qsdobgquLK86MOmPpMoionvRXwS0sLDT7QFwiW6O/7cWtV4uuD6sOP3fddRc++eQTHD9+HC1btsTBgwfx66+/Yu7cuQCA06dPIzMzEzExMYb3eHp6onv37ti1a1eV4aekpMTo7rF5eXnm/SJERLehUCjg5eVluOeRq6srW3PJ4QkhUFhYiOzsbHh5eUGhUFT/phqy6vAzfvx45OXloXXr1lAoFNBqtZg+fTri4+MByKP4AcDf39/off7+/oZtlUlMTMTUqVPNVzgRUS3p7xh+600fiRydl5eX4d+HqVh1+Pn666+xYsUKrFy5Eu3atcOBAwcwatQoBAUFYdiwYXU+7oQJE4yuv5CXl2c11zyojaKyItybdC8A4JeEX6BxYXM5ka2SJAmBgYHw8/O77b2eiByJi4uLSVt89Kw6/IwdOxbjx483dF+1b98eZ8+eRWJiIoYNG2ZIgllZWQgMDDS8Lysry+jGebdSqVSGm9zZMp3QYe/FvYZlIrJ9CoXCLL/siegGq57tVVhYWOES3AqFwnDPmPDwcAQEBGDr1q2G7Xl5eUhNTbXYpdOJiIjIull1y09cXBymT5+OkJAQtGvXDvv378fcuXPx3HPPAZCbiUeNGoV3330XLVq0QHh4OCZNmoSgoCAMGjTIssUTERGRVbLq8LNo0SJMmjQJr7zyCrKzsxEUFIT/+7//M9yVF5Cv3llQUIAXX3wR165dw913342NGzfa9c0BiYiIqO54V3fwru5ERES2qK5/v616zA8RERGRqVl1txdVr4lrE0uXQEREZFMYfmyYm9INl8detnQZRERENoXdXkRERORQGH6IiIjIoTD82LCisiL0TuqN3km9UVRWZOlyiIiIbALH/NgwndAh5WyKYZmIiIiqx5YfIiIicigMP0RERORQGH6IiIjIoTD8EBERkUNh+CEiIiKHwtleNs7VxdXSJRAREdkUhh8b5qZ0Q8F/CixdBhERkU1htxcRERE5FIYfIiIicigMPzasuLwYA1YOwICVA1BcXmzpcoiIiGwCx/zYMK1Oi/Un1huWiYiIqHps+SEiIiKHwvBDREREDoXhh4iIiBwKww8RERE5FIYfIiIicigMP0RERORQONXdhrkp3SCmCEuXQUREZFPY8mNOCQlAr17A4cOWroSIiIj+wZYfc0pNBY4dAy5ftnQlRERE9A+2/JiTRiM/FxWZ5fDF5cUY8s0QDPlmCG9vQUREVEMMP+bk6io/myn8aHVafHvkW3x75Fve3oKIiKiGGH7MycwtP0RERFR7DD/mxPBDRERkdRh+zEkffgoLLVsHERERGXC2lxltuNoNOXDGg5cBX0sXQ0RERADY8mNWr+19Gk9jBY5fcLN0KURERPQPhh8zUjnrAADFhZyJRUREZC3Y7WVGaqUceooLdGY5vquLK/In5BuWiYiIqHoMP2akVsqhp6TIPOFHkiS4KdmlRkREVBvs9jIjlVK+6WhxEW8+SkREZC0YfsxIrZKfzRV+SspLkJCcgITkBJSUl5jlM4iIiOwNw48ZqdVy6CkxUy4p15Vj2cFlWHZwGcp15eb5ECIiIjvD8GNGKpUEACjmPUeJiIisBsOPGak1/4SfEsnClRAREZEew48Z6cNPSSnDDxERkbVg+DEjlasCAFBcprBwJURERKTH8GNGalf59DL8EBERWQ+GHzNSu7Hlh4iIyNrwCs9mpHaXT29JuXnCj6uLK7LHZBuWiYiIqHoMP2akcpNPb7HOBdDpACfTNrRJkgRfN1+THpOIiMjesdvLjNQeLgCAYqh5sR8iIiIrwfBjRupGSgBACVRAYaHJj19SXoIR60ZgxLoRvL0FERFRDTH8mJFK889sL6iBoiKTH79cV47Fexdj8d7FvL0FERFRDTH8mJFaLT+bK/wQERFR7TH8mJE+/JRAxfBDRERkJRh+zEilkp/Z8kNERGQ9GH7MyKjbywwDnomIiKj2GH7MiN1eRERE1ofhx4zY7UVERGR9eIVnMzL3bC+NiwanXzttWCYiIqLqMfyYkbm7vZwkJ4R5hZn8uERERPaM3V5mpO/2KocLyq+z24uIiMgaMPyYkb7lBwBK8stMfvxSbSnG/jQWY38ai1JtqcmPT0REZI8YfsxI3/IDmCf8lGnL8N6u9/DervdQpjX98YmIiOwRw48ZOTsDzk5aAEBxPu+9RUREZA2sPvxcuHABTz/9NHx8fKDRaNC+fXvs3bvXsF0IgcmTJyMwMBAajQYxMTE4ceKEBSs2plL8E35yedd1IiIia2DV4efvv/9Gz5494eLigg0bNuDIkSN4//330bhxY8M+s2fPxsKFC/HRRx8hNTUVbm5uiI2NRXFxsQUrv0GtlMNPyd+8wjMREZE1sOqp7rNmzUJwcDCWLl1qWBceHm5YFkJg/vz5eOuttzBw4EAAwBdffAF/f38kJydj6NChDV7zrdRKHVAAFOcw/BAREVkDq275WbNmDaKiojBkyBD4+fmhc+fO+PTTTw3bT58+jczMTMTExBjWeXp6onv37ti1a1eVxy0pKUFeXp7Rw1xUKgkAUHzNOlqiiIiIHJ1Vh58///wTS5YsQYsWLbBp0ya8/PLLePXVV7Fs2TIAQGZmJgDA39/f6H3+/v6GbZVJTEyEp6en4REcHGy276D+58LLJdd4nR8iIiJrYNXdXjqdDlFRUZgxYwYAoHPnzkhPT8dHH32EYcOG1fm4EyZMwOjRow2v8/LyzBaA1K5yvizOM/11eDQuGqS/nG5YJiIioupZdctPYGAg2rZta7SuTZs2OHfuHAAgICAAAJCVlWW0T1ZWlmFbZVQqFTw8PIwe5qJyVQAAiosFUGraAOQkOaGdXzu082sHJ8mq/1MSERFZDav+i9mzZ09kZGQYrTt+/DhCQ0MByIOfAwICsHXrVsP2vLw8pKamIjo6ukFrrYraTW5cK4YayMmxcDVERERk1d1er7/+Ou666y7MmDEDjz/+OHbv3o1PPvkEn3zyCQBAkiSMGjUK7777Llq0aIHw8HBMmjQJQUFBGDRokGWL/4daIw94LoFKDj+3aZGqrVJtKWbskLsE/3PPf6BUKE12bCIiIntl1eGna9euWL16NSZMmIBp06YhPDwc8+fPR3x8vGGfN998EwUFBXjxxRdx7do13H333di4cSPUN99Yy4L0t7gwR8tPmbYMU1OmAgDG3jWW4YeIiKgGrDr8AMBDDz2Ehx56qMrtkiRh2rRpmDZtWgNWVXP6DFYMNXD1qmWLISIiIuse82MP9OHH0O1FREREFsXwY2ZG3V5s+SEiIrI4hh8zM+r2YssPERGRxTH8mJmpw09JCfD444BCAbg3urH+q6/qfWgiIiKHwPBjZr6+8vNfuLPe3V46HfDMM8A338jLEDe2/d9LwNq19To8ERGRQ2D4MbMOHeTnP9ChXi0/QgCvvw58/TXg4gKsWQP8dUaNDYN3Y8CF3dCVqPH448Dvv5uocCIiIjtl9VPdbZ0+/JxAC1zMdELhSaB589ofZ84cYOFCeXnZMiAuDgAUuCOoK/p8CAz8C9iwARgwAFiyBHB1lfdt0wZo1swU34SIiMg+SEIIUf1u9i0vLw+enp7Izc01y32+/H3KkJ3jAgBwcgJOngTCw2v+/gMHgM6d5eW5c+UWoFsVFAD33Qfs2WO83skJ+O47wEoueE1ERGQydf37zW6vBtChndawrNMBqam1e/+yZfLz4MHGwadUW4o5O+dgzs45cFGXYt064Mknga5d5UerVvLnPfkk8OuvJvgiREREdoDhpwF0iFIZvb5+vebvLS+/MZPr+eeNt5Vpy/Dmljfx5pY3UaYtg68vsHIlsHu3/EhPl7vHiovl58OH6/lFiIiI7ADDTwNoFyEZvc7Kqvl7t2yR92/SBHjwwdp9rrMzsGoVEB0NXLsG9O0LnD9fu2MQERHZG4afBtCtm/Hr2oSf5cvl56FD5VleteXqKk+Bb9MG+OsvOQDxWotEROTIGH4aQEQEsK7PXDyP/wKoefjJzwdWr5aXn3667p/v7Q1s3AgEBQFHjgAPPwwUFdX9eERERLaM4aeB9O+ZixhsAVDz8JOcDBQWylPjb209qq2QEDkAeXoCO3fKg6DLy+t3TCIiIlvE8NNQQkPhDzn11CT8CAF8/rm8/PTTgCTdfv+aaN9evjiiSgX88APw3HNAaWn9j0tERGRLGH4aSkhIrcLP558DP/8sD1r+179MV8a998ozwpycgC+/BB54ALhyxXTHJyIisna1Dj9lZWVwdnZGenq6OeqxXze1/Fy7Jt+gtCoZGcCrr8rL06cDTZtWvp/aWY2fh/2Mn4f9DLWzusalPPIIsG4d4OEB/PKL3KXGafBEROQoah1+XFxcEBISAq1WW/3OdENwMBrjbzijDACQnV35biUl8nicwkKgTx9gzJiqD6lwUqB3WG/0DusNhZOiVuX07SvfB6xpU+D0aXk6/Pr1tToEERGRTapTt9fEiRPxn//8BzmcM11zajWcAvzhBzn1VNX1NX8+sH8/4OMDfPGF3D1lLm3ayBdD7NVLvvBiXBwwb5483oiIiMhe1enGph988AFOnjyJoKAghIaGws3NzWj7vn37TFKc3QkKgn9mFi7ijkrDjxDAZ5/Jy7NmyVPTb6dMW4ZP0j4BALzY5UW4KGp/ISAfH+Cnn4CRI4FPPwVGj5a7wBYvBpTKWh+OiIjI6tUp/AziXTLrxt//toOe9+wBTpyQL0z4xBPVH65UW4qRG0YCABI6JdQp/AByyPn4Y6BtW+CNN+QAduKEfEPUJk3qdEgiIiKrVafwM2XKFFPX4RiqCT/6qzkPGgS4uzdcWYA8lX7UKPlmqEOH3hgI/eOPQLt2DVsLERGROdVrRElaWhqWL1+O5cuXY//+/aaqyX75++NO/AVAvt7OzRcZLCuT78MF1O9qzvXVrx+waxcHQhMRkf2qU/jJzs7G/fffj65du+LVV1/Fq6++ii5duqBPnz64fPmyqWu0H/7+eAH/hYdLIX7/HXj33RubNm8GLl8GfH3la+9YUtu2QGoqB0ITEZF9qlP4+fe//43r16/j8OHDyMnJQU5ODtLT05GXl4dX9ReooYr8/RGGs1jSfC4A4J13gF9/lTd9+aX8/OST8oUNLa1JE3kg9AsvADqdPBB6+HBeEZqIiGxfncLPxo0bsXjxYrRp08awrm3btvjwww+xYcMGkxVnd/z9AQBPYSX+9S85VDz0ENCxozy4GLBsl9etlErgk0/kVh8nJ3kgdPfuwOzZwMGDbAkiIiLbVKfwo9Pp4OJScWaRi4sLdDpdvYuyW/+EH2Rl4YMP5BuW5uYCf/whj/np2BGIirJsibfSD4ReuxZo1Ag4cAAYNw7o1Emeij9smHy7DPZ2EhGRrZCEqP3/vw8cOBDXrl3DV199haB/LkZz4cIFxMfHo3Hjxli9erXJCzWnvLw8eHp6Ijc3Fx4eHub7oMuXAT8/ebm0FDnXXbBvn9yCIklAly5A48Y1P1y5rhybTm4CAMQ2j4Wzk3n7yy5cAL7/Hti0Sb7vWGHhjW2SBERGyleOjo0FevQAKsnHREREJlPXv991Cj/nz5/Hww8/jMOHDyM4ONiwLiIiAmvWrMGdd95Z20NaVIOFH51O7kvSaoG//gLuuMN8n2VmJSXyeKVNm+THH38Yb/fwAO6/Xw5CsbFAeLhl6iQiIvvVoOEHAIQQ2LJlC44dOwYAaNOmDWJiYupyKItrsPADAIGBQGYmkJYmN5XYiUuX5AHSmzbJz1evGm9v2fJGEOrdG7jlouBERES11mDhp6ysDBqNBgcOHEBEREStC7VGDRp+OnWSRwuvXy9fVKceyrRlWHFoBQAgvn18na/wbGpaLbBv341WoV275HV6SiVw9903wlCHDnK3GRERUW3U9e837+re0G4a9FxfpdpSPPvDs3j2h2dRqrWeOegKBdC1K/DWW8COHXIr0PffAy++CISGytPlt20zHjidkAB89RVw5YqlqyciInvHu7o3NBOGH1vh6QkMHizfP+z0aeDYMWDBAqB/f/k+ZpmZwLJlwFNPyePB9cFp0yaAP2JERGRqvKt7QwsIkJ8vXbJsHRYiSfL9w1q1Al599cbA6Y0b5bBz6BCwd6/80GvWTA5EXbvK9xvr3JljhoiIqO54V/eGpp/2dOqUZeuwEioV0KeP/JgzB7h4UR4wvWWLfIuNkyflU3Xq1I17nzk5ARERxoEoIoJT64mIqGZqHX7Ky8shSRKee+45m5vSbhVatJCfT5ywbB1WSj/+JyFBfp2TI7cC7dkjP3bvlhvN/vhDfnz2mbyfWi2PH+rW7UYoatFCDkpEREQ3q9NU90aNGuHQoUMICwszQ0kNr0Fne509C4SFyc0UhYX1upFXQWkB3BPdAQD5E/LhpnSMvqALF24EIX0oys2tuJ+np3zFbH3rUNeu8qWVOLOMiMg+1PXvd53+8t5///1ISUmxm/DToIKD5b6ekhLg/Hle/a8O7rhDfuh7X3U6uXvs5tah/fvlQLR1q/zQCwi4EYS6dZPDkbe3Rb4GERFZSJ3CT79+/TB+/HgcOnQIXbp0qTDg+eGHHzZJcXbJyQlo2hQ4elTu+qpH+FE5q/D1Y18blh2Vk5N8EcWWLYH4eHldWRmQnm4ciA4flmeWrVkjP/SaNTPuLouMlGehERGRfapTt5fTbQZSSJJkc9cAatBuLwAYOFD+6/vBB8CIEeb/PAIg9zLu32/cXXbyZMX9FAqgXTvj7jIOqCYisj4N2u3FO7fXEwc9W4SrK9Czp/zQu3lAtT4UVTWgunNn40DUvDkHVBMR2aJahZ/+/fvjq6++gqenJwBg5syZeOmll+Dl5QUAuHr1Ku655x4cOXLE5IXaFROFn3JdOVYfXQ0AGNxmsNnv6m6PvL2BBx+UH3oXLhi3DukHVO/aJT/0PD1vdJXpQ5EN36uWiMhh1KrbS6FQ4NKlS/Dz8wMAeHh44MCBA2jatCkAICsrC0FBQez2qs62bfKFbVq2BDIy6nwYR53t1dBuHlCtD0X79wPFxRX3DQw0bh3igGoiIvNpkG6vW3NSHW8IT/qWnz//BMrL6zXdncyvugHV+kCUni53md06oLp58xutQ6GhgEZz+4daze40IiJz4l9dS7jjDvkvXHGxfN2fZs0sXRHVkouLPAaoc2f5hq0AUFAgtwjdPMPs1Cm51ejkSfnGrTWlVFYfkGoSomq6j1rN6x8RkeOoVfiRJAnSLb8hb31NNeDkJAeew4flcT8MP3bBzQ24+275oacfUL17t/x8+TJQVHTjUVx8Y7ms7Mb7SkvlR2UXbzSXqsKSqYOW/qFUMnARkWXUutsrISEBKpV8TZni4mK89NJLhuv8lJSUmL5Ce9WixY3w07evpashM6lsQHVVtFrjYFRVSKrPPrfud/PwvOJi+fH33+Y7HzeTpIYLWvoHL1dAREAtw8+wYcOMXj/99NMV9nnmmWfqV5Gj4HR3uoVCAbi7y4+GUlZm2jBVk/30QwWFkK+9VFjYcN9XoahZiFKr5aDk7Cw/9MtVPZtqn9vtq1CwpYzIVGoVfpYuXWquOhyPPvxUdpU9ogbi4iI/GmKSIyAHntJS07dg3e5xc4O0Vgvk58sPW2SOwNUQwa02+3CwPzUEDni2lObN5ed6tPwoFUosHbjUsExk7SRJvrWdSgX8c3kws9Pp5ABUmyBVXCxPxCwrM36ubJ2p9ykvN+6OvJl+uz1zcrLOUGbKfdiKZ3kMP5aib/k5fVr+rVeHwQguChckdEowbV1EdsbJ6UZ3lq3Q6eQAZK6AZcqgVtf33zzA/9bvrh/wb88UCtsMbrUJndbcisfwYylBQfJv46Ii4MyZG2GIiByek5P8sPcB2lqtbQS1+uxTVUudVlt1C5+9kCTjQLRnj3y9NGvA8GMp+ru7Hz4st/7UIfyU68qx6eQmAEBs81je3oKIbIpCIT/+mUBsl4SovBXPVsNcVc+V3fJTCONWPmtqCeJfS0sKDZXDz9mzdXp7SXkJHvrqIQDy7S2clfzPSURkTfStH/Z+IX+druqgpF8OCbF0lTfY+X8OKxcaKj/XMfwQERFZAycn+cKlShuZe2NFjVAOSB+DGX6IiIgaDMOPJelbfs6ds2wdREREDoThx5LY7UVERNTgGH4sSR9+/vrL/q9cRkREZCUYfiwpIECeAqDVAhcvWroaIiIih8DZXpakUADBwfJ1fs6dq/U8QKVCiQ/6fWBYJiIiouox/FhaaKgcfs6eBe6+u1ZvdVG4YES3EWYqjIiIyD6x28vSOOiZiIioQdlU+Jk5cyYkScKoUaMM64qLizFixAj4+PjA3d0djz76KLKysixXZG3V41o/Wp0W289sx/Yz26HV2flNYoiIiEzEZsLPnj178PHHH6NDhw5G619//XX8+OOP+Oabb5CSkoKLFy/ikUcesVCVdVCPa/0UlxfjvmX34b5l96G4vNjEhREREdknmwg/+fn5iI+Px6efforGjRsb1ufm5uKzzz7D3Llzcf/996NLly5YunQpfvvtN/z+++8WrLgW2O1FRETUoGwi/IwYMQIDBgxATEyM0fq0tDSUlZUZrW/dujVCQkKwa9euKo9XUlKCvLw8o4fF3NztJYTl6iAiInIQVj/ba9WqVdi3bx/27NlTYVtmZiaUSiW8vLyM1vv7+yMzM7PKYyYmJmLq1KmmLrVu9OGnsBC4ehVo0sSy9RAREdk5q275OX/+PF577TWsWLECarXaZMedMGECcnNzDY/z58+b7Ni1plYD/v7yMu/xRUREZHZWHX7S0tKQnZ2NyMhIODs7w9nZGSkpKVi4cCGcnZ3h7++P0tJSXLt2zeh9WVlZCAgIqPK4KpUKHh4eRg+L4rgfIiKiBmPV4adPnz44dOgQDhw4YHhERUUhPj7esOzi4oKtW7ca3pORkYFz584hOjragpXXUj2muxMREVHtWPWYn0aNGiEiIsJonZubG3x8fAzrn3/+eYwePRre3t7w8PDAv//9b0RHR6NHjx6WKLlu6jjd3UXhgtkxsw3LREREVD2rDj81MW/ePDg5OeHRRx9FSUkJYmNjsXjxYkuXVTt17PZSKpQY23OsGQoiIiKyX5IQnF+dl5cHT09P5ObmWmb8z5o1wMCBQJcuwN69Df/5RERENqiuf79tvuXHLtRxzI9Wp8W+S/sAAJGBkVA4KUxdGRERkd1h+LEG+m6vK1fk6/24utbobcXlxej2324AgPwJ+XBTupmrQiIiIrth1bO9HIaXF9CokbzMa/0QERGZFcOPNZAkTncnIiJqIAw/1qIed3cnIiKimmP4sRa8yjMREVGDYPixFgw/REREDYLhx1pwzA8REVGD4FR3a1GHMT8uChdM6TXFsExERETVY/ixFvrw89dfQHk54Fz9fxqlQom3e79t3rqIiIjsDLu9rEVAgBx4tFrg4kVLV0NERGS3GH6shUIBBAfLyzXs+tIJHQ5nH8bh7MPQCZ0ZiyMiIrIfDD/WpJYzvorKihCxJAIRSyJQVFZkxsKIiIjsB8OPNeF0dyIiIrNj+LEm+unuvMozERGR2TD8WBO2/BAREZkdw481YfghIiIyO4Yfa3LzVZ6FsGwtREREdorhx5row09hIZCTY9laiIiI7BSv8GxN1GrA3x/IypJbf3x8bru7i8IFY6LHGJaJiIioegw/1iYk5Eb4iYy87a5KhRJzHpzTQIURERHZB3Z7WZs63OCUiIiIao4tP9YmLEx+PnWq2l11QodzuXJICvEMgZPELEtERFQdhh9rExEhPx86VO2uRWVFCF8QDgDIn5APN6WbOSsjIiKyC2wqsDYdOsjPf/zB6e5ERERmwPBjbdq0ke/wnpMDXLxo6WqIiIjsDsOPtVGrgVat5OU//rBsLURERHaI4cca3dz1RURERCbF8GONGH6IiIjMhuHHGjH8EBERmQ2nulsjffg5dgwoKQFUqkp3c3ZyxitRrxiWiYiIqHr8i2mN7rwT8PICrl0Djh4FOnWqdDeVswofDviwISsjIiKyeez2skaSxK4vIiIiM2H4sVY1CD9CCFwuuIzLBZcheEFEIiKiGmG3l7WqQfgpLCuE33t+AHh7CyIioppiy4+16thRfma3FxERkUkx/Firdu3ksT9ZWfKDiIiITILhx1q5uQHh8h3bkZFh2VqIiIjsCMOPNQsNlZ/PnrVsHURERHaE4cea6cPPuXOWrYOIiMiOMPxYM7b8EBERmRynuluzkBD5uYrw4+zkjGEdhxmWiYiIqHr8i2nNqun2UjmrkDQoqeHqISIisgPs9rJmN3d78QrOREREJsHwY82Cg+XnoiLgypUKm4UQKCgtQEFpAW9vQUREVEMMP9ZMpQICAuTlSrq+CssK4Z7oDvdEdxSWFTZwcURERLaJ4cfaccYXERGRSTH8WDuGHyIiIpNi+LF21Ux3JyIiotph+LF2vMozERGRSTH8WDt2exEREZkUw4+1Y/ghIiIyKV7h2drpx/xcvQoUFABuboZNCicFHmv7mGGZiIiIqsfwY+28vAAPDyAvTx7306aNYZPaWY1vhnxjudqIiIhsELu9bAG7voiIiEyG4ccWcLo7ERGRyTD82IIqprsXlBZAmipBmiqhoLTAAoURERHZHoYfW8BuLyIiIpNh+LEF7PYiIiIyGYYfW8CrPBMREZkMw48tCA6Wny9cALRay9ZCRERk4xh+bEFgIKBQyMEnM9PS1RAREdk0hh9boFAAQUHy8l9/WbYWIiIiG2fV4ScxMRFdu3ZFo0aN4Ofnh0GDBiEjI8Non+LiYowYMQI+Pj5wd3fHo48+iqysLAtVbEZ33ik/nz9vWKVwUqB/i/7o36I/b29BRERUQ1YdflJSUjBixAj8/vvv2Lx5M8rKyvDggw+ioODGNW1ef/11/Pjjj/jmm2+QkpKCixcv4pFHHrFg1WaiH/dzU/hRO6ux7ql1WPfUOqid1RYqjIiIyLZY9b29Nm7caPQ6KSkJfn5+SEtLw7333ovc3Fx89tlnWLlyJe6//34AwNKlS9GmTRv8/vvv6NGjhyXKNo9Kwg8RERHVnlW3/NwqNzcXAODt7Q0ASEtLQ1lZGWJiYgz7tG7dGiEhIdi1a1eVxykpKUFeXp7Rw+rpww/H/BAREdWLzYQfnU6HUaNGoWfPnoiIiAAAZGZmQqlUwsvLy2hff39/ZN5mVlRiYiI8PT0Nj2B9sLBmlYz5KSgtgNsMN7jNcOPtLYiIiGrIZsLPiBEjkJ6ejlWrVtX7WBMmTEBubq7hcd4WupKq6PYqLCtEYVmhBQoiIiKyTVY95kdv5MiRWLt2LX755RfcqW8BARAQEIDS0lJcu3bNqPUnKysLAQEBVR5PpVJBpVKZs2TT04efS5eA8nLA2Sb+0xEREVkdq275EUJg5MiRWL16NbZt24bw8HCj7V26dIGLiwu2bt1qWJeRkYFz584hOjq6ocs1L39/wMUF0OnkAERERER1YtXNByNGjMDKlSvxww8/oFGjRoZxPJ6entBoNPD09MTzzz+P0aNHw9vbGx4eHvj3v/+N6Oho+5rpBQBOTsAddwBnzshdX7YwTomIiMgKWXX4WbJkCQCgd+/eRuuXLl2KhIQEAMC8efPg5OSERx99FCUlJYiNjcXixYsbuNIGcuedN8IPERER1YlVhx8hRLX7qNVqfPjhh/jwww8boCIL47V+iIiI6s2qww/d4pbw4yQ5oVdoL8MyERERVY/hx5bccqFDjYsG2xO2W64eIiIiG8TmAlvCbi8iIqJ6Y/ixJZVc5ZmIiIhqh+HHluhbfrKygNJSFJQWwHeOL3zn+PL2FkRERDXE8GNLfH0BlQoQArh4EQBwpfAKrhResXBhREREtoPhx5ZIEru+iIiI6onhx9Zw0DMREVG9MPzYGrb8EBER1QvDj61hyw8REVG9MPzYmlsudEhERES1wys825qbWn6cJCdEBUUB4O0tiIiIaorhx9bcNOZH46LBnuF7LFsPERGRjWFzga3Rt/xcvgwUF1u2FiIiIhvE8GNrvL0BjUZe5rgfIiKiWmP4sTWSBDRvDgAoPHIAYfPDEDY/DIVlhRYujIiIyDYw/NiiDh0AAOJQOs7mnsXZ3LMQQli4KCIiItvA8GOL/gk/SE+3bB1EREQ2iOHHFjH8EBER1RnDjy3Sh58Txy1bBxERkQ1i+LFFgYGAjw+g4zgfIiKi2mL4sUWSdKP1h4iIiGqF4cdWdegACUBbrTfa+raFJEmWroiIiMgm8PYWtqpDB7iWAYd/6wxs2WLpaoiIiGwGW35slb7b6+BBgNf4ISIiqjGGH1vVti3g5ARcuQJkZVm6GiIiIpvB8GOrXF1R2LoZ2r0CtPuiO29vQUREVEMc82PDREQEjvidAIrO8fYWRERENcSWH1sWEWHpCoiIiGwOw48ta9/e0hUQERHZHIYfW3Zzy09pqeXqICIisiEMP7YsOPjGMu/zRUREVCMMP7bs5qs68w7vRERENcLZXjZMkiSEat2B6/mQjmVYuhwiIiKbwJYfG+bq4ooz6gk4Mx9wvXjZ0uUQERHZBIYfWxcUJD9fvGjZOoiIiGwEw4+tCwyUny9dsmwdRERENoLhx4YVlRWh67E30HU4UJR1wdLlEBER2QQOeLZhOqHD3muHgTsA3d85QEkJoFJZuiwiIiKrxpYfe5KZaekKiIiIrB7Djz3hoGciIqJqMfzYEw56JiIiqhbDjz1hyw8REVG1GH7sCVt+iIiIqsXwY+OauDZBE7jKL9jyQ0REVC2GHxvmpnTD5bGXcTn0Q7iVgS0/RERENcDwYw/0V3lmyw8REVG1GH7sAe/vRUREVGO8wrMNKyorQr8V/YCyMmxwBjRXr/Iqz0RERNVg+LFhOqFDytkUeVnpDJSXy1d5Dg21cGVERETWi91e9oJ3dyciIqoRhh97ERAgP3PcDxER0W0x/NiLAM74IiIiqgmGH3uhb/lhtxcREdFtMfzYC17rh4iIqEY428vGubr8c2uLAA54JiIiqgmGHxvmpnRDwX8K5BebNsnPbPkhIiK6LXZ72Qv9VZ7Z8kNERHRbDD/2Qj/m58oV+SrPREREVCmGHxtWXF6MASsHYMDKASj2dANcXOQNmZmWLYyIiMiKccyPDdPqtFh/Yr28LHRy68+5c3LXF29xQUREVCm2/NgT3t2diIioWgw/9oSDnomIiKplN+Hnww8/RFhYGNRqNbp3747du3dbuqSGxwsdEhERVcsuws///vc/jB49GlOmTMG+ffvQsWNHxMbGIjs729KlNSx2exEREVXLLgY8z507F8OHD8ezzz4LAPjoo4+wbt06fP755xg/fryFq2tA+pafP/4Avv/esrUQERHd7IEHgEaNLF0FADsIP6WlpUhLS8OECRMM65ycnBATE4Ndu3ZV+p6SkhKU3HQtnNzcXABAXl6eeYs1sYLSAqBYXs7Ly4PWy0t+sW8f8OijFquLiIiogrQ0oHlzkx5S/3dbCFGr99l8+Lly5Qq0Wi38/f2N1vv7++PYsWOVvicxMRFTp06tsD44ONgsNTaEoJlBli6BiIioal26mO3Q169fh6enZ433t/nwUxcTJkzA6NGjDa91Oh1ycnLg4+MDSZJM8hl5eXkIDg7G+fPn4eHhYZJj2iKehxt4LmQ8DzfwXMh4Hm7guZDV9DwIIXD9+nUEBdWuAcDmw0+TJk2gUCiQlZVltD4rKwsBAQGVvkelUkGlUhmt89J3GZmYh4eHQ/8A6/E83MBzIeN5uIHnQsbzcAPPhawm56E2LT56Nj/bS6lUokuXLti6dathnU6nw9atWxEdHW3ByoiIiMga2XzLDwCMHj0aw4YNQ1RUFLp164b58+ejoKDAMPuLiIiISM8uws8TTzyBy5cvY/LkycjMzESnTp2wcePGCoOgG5JKpcKUKVMqdK85Gp6HG3guZDwPN/BcyHgebuC5kJn7PEiitvPDiIiIiGyYzY/5ISIiIqoNhh8iIiJyKAw/RERE5FAYfoiIiMihMPyYwYcffoiwsDCo1Wp0794du3fvtnRJJvfLL78gLi4OQUFBkCQJycnJRtuFEJg8eTICAwOh0WgQExODEydOGO2Tk5OD+Ph4eHh4wMvLC88//zzy8/Mb8FvUX2JiIrp27YpGjRrBz88PgwYNQkZGhtE+xcXFGDFiBHx8fODu7o5HH320wkU5z507hwEDBsDV1RV+fn4YO3YsysvLG/Kr1MuSJUvQoUMHwwXJoqOjsWHDBsN2RzgHlZk5cyYkScKoUaMM6xzlXLz99tuQJMno0bp1a8N2RzkPehcuXMDTTz8NHx8faDQatG/fHnv37jVsd4TfmWFhYRV+JiRJwogRIwA08M+EIJNatWqVUCqV4vPPPxeHDx8Ww4cPF15eXiIrK8vSpZnU+vXrxcSJE8X3338vAIjVq1cbbZ85c6bw9PQUycnJ4uDBg+Lhhx8W4eHhoqioyLBP3759RceOHcXvv/8uduzYIZo3by6efPLJBv4m9RMbGyuWLl0q0tPTxYEDB0T//v1FSEiIyM/PN+zz0ksvieDgYLF161axd+9e0aNHD3HXXXcZtpeXl4uIiAgRExMj9u/fL9avXy+aNGkiJkyYYImvVCdr1qwR69atE8ePHxcZGRniP//5j3BxcRHp6elCCMc4B7favXu3CAsLEx06dBCvvfaaYb2jnIspU6aIdu3aiUuXLhkely9fNmx3lPMghBA5OTkiNDRUJCQkiNTUVPHnn3+KTZs2iZMnTxr2cYTfmdnZ2UY/D5s3bxYAxM8//yyEaNifCYYfE+vWrZsYMWKE4bVWqxVBQUEiMTHRglWZ163hR6fTiYCAADFnzhzDumvXrgmVSiW++uorIYQQR44cEQDEnj17DPts2LBBSJIkLly40GC1m1p2drYAIFJSUoQQ8vd2cXER33zzjWGfo0ePCgBi165dQgg5SDo5OYnMzEzDPkuWLBEeHh6ipKSkYb+ACTVu3Fj897//dchzcP36ddGiRQuxefNm0atXL0P4caRzMWXKFNGxY8dKtznSeRBCiHHjxom77767yu2O+jvztddeE82aNRM6na7BfybY7WVCpaWlSEtLQ0xMjGGdk5MTYmJisGvXLgtW1rBOnz6NzMxMo/Pg6emJ7t27G87Drl274OXlhaioKMM+MTExcHJyQmpqaoPXbCq5ubkAAG9vbwBAWloaysrKjM5F69atERISYnQu2rdvb3RRztjYWOTl5eHw4cMNWL1paLVarFq1CgUFBYiOjnbIczBixAgMGDDA6DsDjvfzcOLECQQFBaFp06aIj4/HuXPnADjeeVizZg2ioqIwZMgQ+Pn5oXPnzvj0008N2x3xd2ZpaSmWL1+O5557DpIkNfjPBMOPCV25cgVarbbClaX9/f2RmZlpoaoanv673u48ZGZmws/Pz2i7s7MzvL29bfZc6XQ6jBo1Cj179kRERAQA+XsqlcoKN8699VxUdq7022zFoUOH4O7uDpVKhZdeegmrV69G27ZtHeocAMCqVauwb98+JCYmVtjmSOeie/fuSEpKwsaNG7FkyRKcPn0a99xzD65fv+5Q5wEA/vzzTyxZsgQtWrTApk2b8PLLL+PVV1/FsmXLADjm78zk5GRcu3YNCQkJABr+34Zd3N6CyBqMGDEC6enp+PXXXy1dikW0atUKBw4cQG5uLr799lsMGzYMKSkpli6rQZ0/fx6vvfYaNm/eDLVabelyLKpfv36G5Q4dOqB79+4IDQ3F119/DY1GY8HKGp5Op0NUVBRmzJgBAOjcuTPS09Px0UcfYdiwYRauzjI+++wz9OvXD0FBQRb5fLb8mFCTJk2gUCgqjE7PyspCQECAhapqePrvervzEBAQgOzsbKPt5eXlyMnJsclzNXLkSKxduxY///wz7rzzTsP6gIAAlJaW4tq1a0b733ouKjtX+m22QqlUonnz5ujSpQsSExPRsWNHLFiwwKHOQVpaGrKzsxEZGQlnZ2c4OzsjJSUFCxcuhLOzM/z9/R3mXNzKy8sLLVu2xMmTJx3qZwIAAgMD0bZtW6N1bdq0MXQDOtrvzLNnz2LLli144YUXDOsa+meC4ceElEolunTpgq1btxrW6XQ6bN26FdHR0RasrGGFh4cjICDA6Dzk5eUhNTXVcB6io6Nx7do1pKWlGfbZtm0bdDodunfv3uA115UQAiNHjsTq1auxbds2hIeHG23v0qULXFxcjM5FRkYGzp07Z3QuDh06ZPSLbfPmzfDw8KjwC9OW6HQ6lJSUONQ56NOnDw4dOoQDBw4YHlFRUYiPjzcsO8q5uFV+fj5OnTqFwMBAh/qZAICePXtWuATG8ePHERoaCsCxfmcCwNKlS+Hn54cBAwYY1jX4z4RJhmyTwapVq4RKpRJJSUniyJEj4sUXXxReXl5Go9PtwfXr18X+/fvF/v37BQAxd+5csX//fnH27FkhhDxt08vLS/zwww/ijz/+EAMHDqx02mbnzp1Famqq+PXXX0WLFi1satqmEEK8/PLLwtPTU2zfvt1oCmdhYaFhn5deekmEhISIbdu2ib1794ro6GgRHR1t2K6fvvnggw+KAwcOiI0bNwpfX1+bmtI7fvx4kZKSIk6fPi3++OMPMX78eCFJkvjpp5+EEI5xDqpy82wvIRznXLzxxhti+/bt4vTp02Lnzp0iJiZGNGnSRGRnZwshHOc8CCFf9sDZ2VlMnz5dnDhxQqxYsUK4urqK5cuXG/ZxlN+ZWq1WhISEiHHjxlXY1pA/Eww/ZrBo0SIREhIilEql6Natm/j9998tXZLJ/fzzzwJAhcewYcOEEPLUzUmTJgl/f3+hUqlEnz59REZGhtExrl69Kp588knh7u4uPDw8xLPPPiuuX79ugW9Td5WdAwBi6dKlhn2KiorEK6+8Iho3bixcXV3F4MGDxaVLl4yOc+bMGdGvXz+h0WhEkyZNxBtvvCHKysoa+NvU3XPPPSdCQ0OFUqkUvr6+ok+fPobgI4RjnIOq3Bp+HOVcPPHEEyIwMFAolUpxxx13iCeeeMLoujaOch70fvzxRxERESFUKpVo3bq1+OSTT4y2O8rvzE2bNgkAFb6bEA37MyEJIUSt26yIiIiIbBTH/BAREZFDYfghIiIih8LwQ0RERA6F4YeIiIgcCsMPERERORSGHyIiInIoDD9ERETkUBh+iKjWkpKSKtx9+VYJCQkYNGjQbffp3bs3Ro0addt9wsLCMH/+/FrVZ01q8h1vtn37dkiSVOEeRzeryfknoqox/BDZiYSEBEiShJkzZxqtT05OhiRJDV7PggULkJSU1OCfa22+//57vPPOO5Yug4huwvBDZEfUajVmzZqFv//+29KlwNPT06FbJ0pLSwEA3t7eaNSokYWrIaKbMfwQ2ZGYmBgEBAQgMTHxtvt99913aNeuHVQqFcLCwvD+++/X6fM2bdqENm3awN3dHX379sWlS5cM227t9iooKMAzzzwDd3d3BAYGVvqZ2dnZiIuLg0ajQXh4OFasWFFhn2vXruGFF16Ar68vPDw8cP/99+PgwYOG7W+//TY6deqEL7/8EmFhYfD09MTQoUNx/fr1Sr9DXl4eNBoNNmzYYLR+9erVaNSoEQoLCwEA48aNQ8uWLeHq6oqmTZti0qRJKCsrq/C5//3vfxEeHg61Wg2gYrfXl19+iaioKDRq1AgBAQF46qmnjO5Srbdz50506NABarUaPXr0QHp6eqX16/3www+IjIyEWq1G06ZNMXXqVJSXlwMAhBB4++23ERISApVKhaCgILz66qu3PR6RPWP4IbIjCoUCM2bMwKJFi/DXX39Vuk9aWhoef/xxDB06FIcOHcLbb7+NSZMm1bqLqrCwEO+99x6+/PJL/PLLLzh37hzGjBlT5f5jx45FSkoKfvjhB/z000/Yvn079u3bZ7RPQkICzp8/j59//hnffvstFi9eXCEYDBkyBNnZ2diwYQPS0tIQGRmJPn36ICcnx7DPqVOnkJycjLVr12Lt2rVISUmp0B2o5+HhgYceeggrV640Wr9ixQoMGjQIrq6uAIBGjRohKSkJR44cwYIFC/Dpp59i3rx5Ru85efIkvvvuO3z//fc4cOBApZ9XVlaGd955BwcPHkRycjLOnDmDhISESs/X+++/jz179sDX1xdxcXFGYetmO3bswDPPPIPXXnsNR44cwccff4ykpCRMnz4dgBx2582bh48//hgnTpxAcnIy2rdvX+mxiBxC3e7LSkTWZtiwYWLgwIFCCCF69OghnnvuOSGEEKtXrxY3/1N/6qmnxAMPPGD03rFjx4q2bdvW+LOWLl0qABjdpfvDDz8U/v7+ldZz/fp1oVQqxddff23YfvXqVaHRaAx3PM/IyBAAxO7duw37HD16VAAQ8+bNE0IIsWPHDuHh4SGKi4uN6mnWrJn4+OOPhRBCTJkyRbi6uoq8vDyj79e9e/cqv8/q1auFu7u7KCgoEEIIkZubK9RqtdiwYUOV75kzZ47o0qWL4fWUKVOEi4uLyM7ONtrv1ru632rPnj0CgOHu3D///LMAIFatWmXYR3+u/ve//wkh5PPv6elp2N6nTx8xY8YMo+N++eWXIjAwUAghxPvvvy9atmwpSktLq6yDyJGw5YfIDs2aNQvLli3D0aNHK2w7evQoevbsabSuZ8+eOHHiBLRabY0/w9XVFc2aNTO8DgwMrLT7BpBbYkpLS9G9e3fDOm9vb7Rq1cqoLmdnZ3Tp0sWwrnXr1kbjhg4ePIj8/Hz4+PjA3d3d8Dh9+jROnTpl2C8sLMxonM3tagOA/v37w8XFBWvWrAEgt5R4eHggJibGsM///vc/9OzZEwEBAXB3d8dbb72Fc+fOGR0nNDQUvr6+VX4OILe8xcXFISQkBI0aNUKvXr0AoMKxoqOjK5yryv576s/LtGnTjM7J8OHDcenSJRQWFmLIkCEoKipC06ZNMXz4cKxevdrQJUbkiBh+iOzQvffei9jYWEyYMMFsn+Hi4mL0WpIkCCHM9nkAkJ+fj8DAQBw4cMDokZGRgbFjx962Np1OV+VxlUolHnvsMUPX18qVK/HEE0/A2dkZALBr1y7Ex8ejf//+WLt2Lfbv34+JEycaBjXrubm53bb+goICxMbGwsPDAytWrMCePXuwevVqAKhwrNrIz8/H1KlTjc7JoUOHcOLECajVagQHByMjIwOLFy+GRqPBK6+8gnvvvbfKbjQie+ds6QKIyDxmzpyJTp06GbWuAECbNm2wc+dOo3U7d+5Ey5YtoVAozFJLs2bN4OLigtTUVISEhAAA/v77bxw/ftzQ8tG6dWuUl5cjLS0NXbt2BQBkZGQYXe8mMjISmZmZcHZ2RlhYmElrjI+PxwMPPIDDhw9j27ZtePfddw3bfvvtN4SGhmLixImGdWfPnq31Zxw7dgxXr17FzJkzERwcDADYu3dvpfv+/vvvFc5VmzZtKt03MjISGRkZaN68eZWfrdFoEBcXh7i4OIwYMQKtW7fGoUOHEBkZWevvQWTrGH6I7FT79u0RHx+PhQsXGq1/44030LVrV7zzzjt44oknsGvXLnzwwQdYvHixYZ8+ffpg8ODBGDlypElqcXd3x/PPP4+xY8fCx8cHfn5+mDhxIpycbjQ+t2rVCn379sX//d//YcmSJXB2dsaoUaOg0WgM+8TExCA6OhqDBg3C7Nmz0bJlS1y8eBHr1q3D4MGDERUVVeca7733XgQEBCA+Ph7h4eFGXXQtWrTAuXPnsGrVKnTt2hXr1q0ztNjURkhICJRKJRYtWoSXXnoJ6enpVV4DaNq0afDx8YG/vz8mTpyIJk2aVHnRyMmTJ+Ohhx5CSEgIHnvsMTg5OeHgwYNIT0/Hu+++i6SkJGi1WnTv3h2urq5Yvnw5NBoNQkNDa/0diOwBu72I7Ni0adMqdPdERkbi66+/xqpVqxAREYHJkydj2rRpRjOOTp06hStXrpi0ljlz5uCee+5BXFwcYmJicPfddxuN7wGApUuXIigoCL169cIjjzyCF198EX5+fobtkiRh/fr1uPfee/Hss8+iZcuWGDp0KM6ePQt/f/961SdJEp588kkcPHgQ8fHxRtsefvhhvP766xg5ciQ6deqE3377DZMmTar1Z/j6+iIpKQnffPMN2rZti5kzZ+K9996rdN+ZM2fitddeQ5cuXZCZmYkff/wRSqWy0n1jY2Oxdu1a/PTTT+jatSt69OiBefPmGcKNl5cXPv30U/Ts2RMdOnTAli1b8OOPP8LHx6fW34HIHkjC3J30RERERFaELT9ERETkUBh+iIiIyKEw/BAREZFDYfghIiIih8LwQ0RERA6F4YeIiIgcCsMPERERORSGHyIiInIoDD9ERETkUBh+iIiIyKEw/BAREZFDYfghIiIih/L/zJRiKv/VRnAAAAAASUVORK5CYII=\n"
|
|
},
|
|
"metadata": {}
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [],
|
|
"metadata": {
|
|
"id": "KT4X8_hE5NFb"
|
|
},
|
|
"execution_count": 36,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [],
|
|
"metadata": {
|
|
"id": "iGKZSfVF2r4z"
|
|
},
|
|
"execution_count": 36,
|
|
"outputs": []
|
|
}
|
|
]
|
|
} |