From decd7a5d37b406046f75f079b07a4fc16aee6449 Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Wed, 15 Jun 2016 10:40:26 +0200 Subject: [PATCH 01/10] initial commit for ladder nets --- examples/ladder_nets/LadderNets.ipynb | 592 ++++++++++++++++++++++ examples/ladder_nets/ladder_nets.py | 333 ++++++++++++ examples/ladder_nets/train_ladder_nets.py | 203 ++++++++ examples/ladder_nets/utils.py | 118 +++++ 4 files changed, 1246 insertions(+) create mode 100644 examples/ladder_nets/LadderNets.ipynb create mode 100644 examples/ladder_nets/ladder_nets.py create mode 100644 examples/ladder_nets/train_ladder_nets.py create mode 100644 examples/ladder_nets/utils.py diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb new file mode 100644 index 0000000..14aa391 --- /dev/null +++ b/examples/ladder_nets/LadderNets.ipynb @@ -0,0 +1,592 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false, + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/home/adrian/code/sideprojects\n" + ] + } + ], + "source": [ + "cd /home/adrian/code/sideprojects/" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import sys\n", + "sys.path.append('/home/adrian/code/sideprojects/')" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 2: Tesla K80 (CNMeM is disabled, cuDNN 4007)\n" + ] + } + ], + "source": [ + "# import ladder_nets\n", + "# reload(ladder_nets)\n", + "from ladder_nets import build_model, build_cost" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "num_encoder = [500, 10]\n", + "num_decoder = [500, 784]\n", + "\n", + "[train_output_l, eval_output_l], dirty_net, clean_net = build_model(num_encoder, num_decoder, 0.3, 0.3, \n", + " batch_size=50, inp_size=784,\n", + " combinator_type='milaUDEM')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[['input', (50, 784)],\n", + " ['input_corr', (50, 784)],\n", + " ['dirty_enc_dense_0', (50, 500)],\n", + " ['dirty_enc_batchn_0_norm', (50, 500)],\n", + " ['dirty_enc_noise_0_', (50, 500)],\n", + " ['dirty_enc_batchn_0_learn', (50, 500)],\n", + " ['dirty_enc_activation_0', (50, 500)],\n", + " ['dirty_enc_dense_1', (50, 10)],\n", + " ['dirty_enc_batchn_1_norm', (50, 10)],\n", + " ['dirty_enc_noise_1_', (50, 10)],\n", + " ['dirty_enc_batchn_1_learn', (50, 10)],\n", + " ['dirty_enc_softmax', (50, 10)],\n", + " ['dec_batchn_softmax', (50, 10)],\n", + " ['dec_combinator_0', (50, 10)],\n", + " ['dec_batchn_0', (50, 10)],\n", + " ['dec_dense_1', (50, 500)],\n", + " ['dec_batchn_dense_1', (50, 500)],\n", + " ['dec_combinator_1', (50, 500)],\n", + " ['dec_batchn_1', (50, 500)],\n", + " ['dec_dense_2', (50, 784)],\n", + " ['dec_combinator_2', (50, 784)]]" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "map(lambda x: [x.name, x.output_shape], dirty_net.values())" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[['input', (50, 784)],\n", + " ['clean_enc_dense_0', (50, 500)],\n", + " ['clean_enc_batchn_0_norm', (50, 500)],\n", + " ['clean_enc_batchn_0_learn', (50, 500)],\n", + " ['clean_enc_activation_0', (50, 500)],\n", + " ['clean_enc_dense_1', (50, 10)],\n", + " ['clean_enc_batchn_1_norm', (50, 10)],\n", + " ['clean_enc_batchn_1_learn', (50, 10)],\n", + " ['clean_enc_softmax', (50, 10)]]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "map(lambda x: [x.name, x.output_shape], clean_net.values())" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import theano.tensor as T\n", + "import lasagne\n", + "import theano" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# set up input/output variables\n", + "X = T.fmatrix('X')\n", + "y = T.imatrix('y')\n", + "\n", + "# training output\n", + "output_train = lasagne.layers.get_output(train_output_l, X, deterministic=False)\n", + "\n", + "# evaluation output. Also includes output of transform for plotting\n", + "output_eval = lasagne.layers.get_output(eval_output_l, X, deterministic=True)\n", + "\n", + "# set up (possibly amortizable) lr, cost and updates\n", + "sh_lr = theano.shared(lasagne.utils.floatX(0.0005))\n", + "\n", + "cost, rec_costs = build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, [0.1]*3)\n", + "\n", + "net_params = lasagne.layers.get_all_params(train_output_l, trainable=True)\n", + "updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr)\n", + "\n", + "# # get training and evaluation functions\n", + "train = theano.function([X, y], [cost + T.sum(rec_costs)] + rec_costs, updates=updates)\n", + "eval = theano.function([X], [output_eval])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "network_dump = {'output_layer': output_layer,\n", + " 'net': net,\n", + " 'x': X,\n", + " 'y': y,\n", + " 'output_eval': output_eval\n", + " }" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from fuel.datasets import MNIST\n", + "from fuel.streams import DataStream\n", + "from fuel.schemes import ShuffledScheme\n", + "from fuel.transformers import Flatten, OneHotEncoding, Duplicate, Mapping, \\\n", + " FilterSources, Merge\n", + "\n", + "mnist = MNIST(which_sets=('train',), # sources='features',\n", + " subset=slice(0, 50000), load_in_memory=True)\n", + "mnist_val = MNIST(which_sets=('train',), # sources='features',\n", + " subset=slice(50000, 60000), load_in_memory=True)\n", + "mnist_test = MNIST(which_sets=('test',), # sources='features',\n", + " load_in_memory=True)\n", + "\n", + "batch_size = 100\n", + "\n", + "data_stream = DataStream(mnist,\n", + " iteration_scheme=ShuffledScheme(mnist.num_examples,\n", + " batch_size=batch_size))\n", + "\n", + "data_stream_val = DataStream(mnist_val,\n", + " iteration_scheme=ShuffledScheme(mnist_val.num_examples,\n", + " batch_size=batch_size))\n", + "\n", + "data_stream_test = DataStream(mnist_test,\n", + " iteration_scheme=ShuffledScheme(mnist_test.num_examples,\n", + " batch_size=batch_size))\n", + "\n", + "data_stream = Flatten(data_stream, which_sources=('features',))\n", + "data_stream_val = Flatten(data_stream_val, which_sources=('features',))\n", + "data_stream_test = Flatten(data_stream_test, which_sources=('features',))\n", + "\n", + "num_classes = 10\n", + "\n", + "data_stream = OneHotEncoding(data_stream=data_stream,\n", + " which_sources=('targets',),\n", + " num_classes=num_classes)\n", + "\n", + "data_stream_val = OneHotEncoding(data_stream=data_stream_val,\n", + " which_sources=('targets',),\n", + " num_classes=num_classes)\n", + "\n", + "data_stream_test = OneHotEncoding(data_stream=data_stream_test,\n", + " which_sources=('targets',),\n", + " num_classes=num_classes)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "batch = next(data_stream.get_epoch_iterator())" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[array(0.7222740358114242), array(0.008365500718355179, dtype=float32), array(0.013544824905693531, dtype=float32), array(0.05210600048303604, dtype=float32)]\n" + ] + }, + { + "data": { + "text/plain": [ + "(array([ 114., 74., 19., 29., 11., 16., 12., 42., 67., 116.]),\n", + " array([ 0.99610198, 0.99688592, 0.99766986, 0.99845381, 0.99923775,\n", + " 1.0000217 , 1.00080564, 1.00158958, 1.00237353, 1.00315747,\n", + " 1.00394142]),\n", + " )" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAENCAYAAAACHGKEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAE31JREFUeJzt3X/wZXV93/HnCxeqiey3q5W91VU2JIjEiSGUCJ3GuiOo\naDpC0w4TmjoiqekIUUZb465xhm0mjaCttk2GSa2Uro7EQW0KtibgzvJ1xjaIERYQcLutupCd3a81\n2F2dtsDiu3/cA16+7u736/2x97Cf52Pmzt577jn3vPZ+v9/7uuecez43VYUkqV0nzDuAJGm+LAJJ\napxFIEmNswgkqXEWgSQ1ziKQpMatWARJrk+ylOTekWkfTPJgkp1JPptk7ch9W5Ls7u5/3ayCS5Km\nYzVbBDcAr1827Tbg5VV1FrAb2AKQ5GeBS4AzgTcA1yXJ9OJKkqZtxSKoqi8B3102bXtV/aC7eQew\nobv+JuBTVXWoqr7FsCReOb24kqRpm8YxgsuBz3fXXwQ8PHLf3m6aJKmnJiqCJL8NPF5VfzSlPJKk\nY2zNuAsmuQx4I/Cakcl7gReP3N7QTTvc8g5yJEljqKqpHntd7RZBusvwRnIh8B7gTVX16Mh8twC/\nmuSkJD8F/Axw55EetKp6f7n66qvnnsGc5nwm53wmZHwm5ZyFFbcIktwIbAKen+Qh4GrgfcBJwBe6\nDwXdUVVXVNUDSW4CHgAeB66oWSWXJE3FikVQVf/gMJNvOMr8HwA+MEkoSdKx45nFK9i0adO8I6yK\nOafLnNPzTMgIz5ycs5B57blJ4l4jSfoxJaHmdLBYknScsggkaQWDwUaSzPUyGGyc2f/PXUOStILh\npyPn/XoVqspdQ5Kk6Rv7zOJpe/vb/yl33nn3XDOce+7ZXHfdh+aaQZKOtd7sGlpYGHDw4O8Dz5tL\nHvhL1q69igMH9s1p/ZL66njfNdSbLYKhVwGDOa3bApDUJo8RSFLjLAJJapxFIEmNswgkqXEWgSQ1\nziKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMs\nAklqnEUgSY1bsQiSXJ9kKcm9I9PWJbktya4ktyZZGLlvS5LdSR5M8rpZBZckTcdqtghuAF6/bNpm\nYHtVnQHsALYAJPlZ4BLgTOANwHVJMr24kqRpW7EIqupLwHeXTb4I2NZd3wZc3F1/E/CpqjpUVd8C\ndgOvnE5USdIsjHuM4JSqWgKoqv3AKd30FwEPj8y3t5smSeqpaR0srik9jiTpGFsz5nJLSdZX1VKS\nAfDtbvpe4MUj823oph3W1q1bn7p+6NBjY0aRpOPZYnd5+mvmNKVq5TfzSTYCn6uqn+tuXws8UlXX\nJnkvsK6qNncHiz8JnMtwl9AXgNPrMCtJ8rTJCwsDDh7cCQwm/k+NZx9r157NgQP75rR+SX01/MzL\nvHd8hKoiCVU11Q/hrLhFkORGYBPw/CQPAVcD1wCfTnI5sIfhJ4WoqgeS3AQ8ADwOXHG4EpAk9ceq\ntghmsmK3CCQ9QxzvWwSeWSxJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2z\nCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItA\nkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1LiJiiDJliT3J7k3ySeTnJRkXZLbkuxK\ncmuShWmFlSRN39hFkORU4G3AL1TVK4A1wKXAZmB7VZ0B7AC2TCOoJGk2JtkiOAg8BvxkkjXAc4C9\nwEXAtm6ebcDFEyWUJM3U2EVQVd8F/iXwEMMCOFBV24H1VbXUzbMfOGUaQSVJs7Fm3AWTnAa8CzgV\nOAB8OsmvAbVs1uW3n7J169anrh869Ni4USTpOLbYXZ7+mjlNqTri6/TRF0wuAV5bVW/rbr8ZOA94\nDbCpqpaSDIDbq+rMwyxfo+teWBhw8OBOYDBWnsntY+3aszlwYN+c1i+pr5JwlPe0xyoFVUUSqirT\nfORJjhHsAs5L8uwMn6XzgQeAW4DLunneAtw8UUJJ0kyNvWuoqu5J8nHgq8ATwN3AR4GTgZuSXA7s\nAS6ZRlBJ0myMXQQAVfUh4EPLJj8CXDDJ40qSjh3PLJakxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmN\nm+jjo8eb733vYHcG4fysX38q+/d/a64ZJLXFIhhR9X+Y92nkS0vzLSJJ7XHXkCQ1ziKQpMZZBJLU\nOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DjHGpLUa4PBRpaW9sw7xnHNIpDU\na8MSmO9gkHB8DwbpriFJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkho3UREkWUjy6SQPJrk/\nyblJ1iW5LcmuJLcmWZhWWEnS9E26RfCvgc9X1ZnAzwNfBzYD26vqDGAHsGXCdUiSZmjsIkiyFnhV\nVd0AUFWHquoAcBGwrZttG3DxxCklSTMzyRbBTwHfSXJDkruSfDTJTwDrq2oJoKr2A6dMI6gkaTYm\nGWtoDXA2cGVV/XmSjzDcLbR8UJAjDhKydevWp64fOvTYBFEk6Xi12F2e/po5TakabzCnJOuBP6uq\n07rbv8SwCH4a2FRVS0kGwO3dMYTly9fouhcWBhw8uBMYjJVncvuAF9KHwa3G/ZlIx6Mk9OHvsg8Z\nqookVNVUR8Ebe9dQt/vn4SQv7SadD9wP3AJc1k17C3DzJAElSbM16TDU7wQ+meRE4BvAW4FnATcl\nuRzYA1wy4TokSTM0URFU1T3ALx7mrgsmeVxJ0rHjmcWS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSp\ncRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpn\nEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklq3MRFkOSEJHcluaW7\nvS7JbUl2Jbk1ycLkMSVJszKNLYKrgAdGbm8GtlfVGcAOYMsU1iFpDgaDjSSZ60WzN1ERJNkAvBH4\n2Mjki4Bt3fVtwMWTrEPS/Cwt7QFqzhfN2qRbBB8B3sPTf1rrq2oJoKr2A6dMuA5J0gytGXfBJL8M\nLFXVziSbjjLrESt969atT10/dOixcaNI0nFssbs8/TVzmlI13qZXkt8D/iFwCHgOcDLwx8A5wKaq\nWkoyAG6vqjMPs3yNrnthYcDBgzuBwVh5JrcPeCHz3xQN4/5MpGkb7qOf9++jGZ7MUFUkoaqmevBk\n7F1DVfW+qnpJVZ0G/Cqwo6reDHwOuKyb7S3AzROnlCTNzCzOI7gGeG2SXcD53W1JUk+NfYxgVFV9\nEfhid/0R4IJpPK4kafY8s1iSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpn\nEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQD9iMNhIkrleBoON834apGZM5RvKdHxZ\nWtrDvL+oe2lpqt/NLeko3CKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTG\nWQSS1LixiyDJhiQ7ktyf5L4k7+ymr0tyW5JdSW5NsjC9uJKkaZtki+AQ8O6qejnwN4Erk7wM2Axs\nr6ozgB3AlsljSpJmZewiqKr9VbWzu/594EFgA3ARsK2bbRtw8aQhJUmzM5VjBEk2AmcBdwDrq2oJ\nhmUBnDKNdUiSZmPiIkjyXOAzwFXdlsHy8YvnO56xJOmoJvo+giRrGJbAJ6rq5m7yUpL1VbWUZAB8\n+0jLb9269anrhw49NkkUSTpOLXaXp79mTlOqxn/DnuTjwHeq6t0j064FHqmqa5O8F1hXVZsPs2yN\nrnthYcDBgzuBwdh5JrMPeCHz34B5NvDonDPA/J+HMMnvpqYjCX34XTDDMENVkYSqmuo3N429RZDk\nbwG/BtyX5G6Gz9L7gGuBm5JcDuwBLplG0HY8Sh9+4SS1Y+wiqKr/CjzrCHdfMO7jSpKOLc8slqTG\nWQSS1DiLQJIaZxFIRzAYbCTJ3C6DwcZ5PwVqxETnEUjHs6WlPczzE1xLS356S8eGWwSS1DiLQJIa\nZxFIUuMsAklqnEUgSY3zU0NSb/2VbtA3abYsAqm3HIBQx4ZFoJ7y3bB0rFgE6infDUvHigeLJalx\nFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcR\nSFLjZlYESS5M8vUk/z3Je2e1HknSZGZSBElOAP4AeD3wcuDSJC+bxbpmb3HeAVZpcd4BVmlx3gFW\naXHeAVZpcd4BVmFx3gFWaXHeAeZmVlsErwR2V9Weqnoc+BRw0YzWNWOL8w6wSovzDrBKi/MOsEqL\n8w6wSovzDrAKi/MOsEqL8w4wN7MqghcBD4/c/otumiSpZ3rzVZUnnngiJ5/8ZpJnz2X9Vf+P731v\nLquWpLlK1fS/FzbJecDWqrqwu70ZqKq6dmSeeX8hrSQ9I1XVVL9Qe1ZF8CxgF3A+sA+4E7i0qh6c\n+sokSROZya6hqnoiyW8CtzE8DnG9JSBJ/TSTLQJJ0jPH1D41tJoTyJL8myS7k+xMctZKyyb5+0m+\nluSJJGf3NOMHkzzYzf/ZJGt7mvN3ktzTzb89yYY+5hy5/58k+UGS5/UxZ5Krk/xFkru6y4V9zNnd\n947ud/S+JNf0MWeST408l99McldPc/5ikjuT3N39e04PM74iyX/r/t5vTvLcFYNU1cQXhoXyP4BT\ngROBncDLls3zBuC/dNfPBe5YaVngDOB0YAdwdk8zXgCc0F2/BvhAT3M+d2T5dwAf62PO7v4NwJ8C\n3wSe18ecwNXAu6fx9zPjnJsY7qJd093+a33MuWz5fwG8v485gduB140sf3sPM94J/FJ3/TLgd1bK\nMq0tgtWcQHYR8HGAqvoysJBk/dGWrapdVbUbmMYR8lll3F5VP+iWv4Phi1gfc35/ZPmfBL7Tx5yd\njwDvmTDfscg5zU9uzCrn24FrqupQt1yff+5PugT4o57m3AcsdNf/KrC3hxlfWlVf6q5vB/7eSkGm\nVQSrOYHsSPMcq5PPjkXGy4E/6WvOJL+b5CGG7xI+0MecSd4EPFxV902Yb6Y5O7/Zba5/LMkCk5lV\nzpcCfzvJHUlun3RXxgxzApDkVcD+qvqfPc25Gfhw93f0QWBLDzN+rfs7gmGprvjmdJ6jj071c7Az\nsuqMSX4beLyqbpxhniOufjUzVdX7q+olwA3Av5ptpMM6as4kzwHex3C3y6qWmZHVrPM64LSqOgvY\nD3x4tpEOazU51wDrquo84LeAm2Yb6bB+nJ/hpUy+NTCu1eS8HnhH93f0LuDfzzbSj1hNxl8Hrkzy\nFYZb/4+ttMC0Pj66F3jJyO0N/Ogm017gxYeZ56RVLNvrjEkuA94IvKbPOUfcCHy+hzl/GtgI3JMk\n3fSvJnllVX27Rzmpqv81Mv3fAZ8bM99MczJ8p/gfu8xf6Q7AP7+q/rJnOZ88/+hXgIk/GDLDnOdW\n1WsBquozSa7vW8aq2sVwwE+SnA788opJJjkgM3JA41n88MDFSQwPXJy5bJ438sODHufxw4Meq1n2\nduBv9DEjcCFwP/D8Pj+XwM+MLP8O4BN9zLls+W8yfDfbu5zAYGT5dwE39jTnPwb+WXf9pcCePuYc\n+Vu6vad/R08eiP0q8Oru+vnAV3qU8cmf+Qu6f08AtgGXrZhlGk/6yA9xF7Ab2DzyS/gbI/P8QRf+\nHkY+BXS4ZbvpFzPcD/Z/GR6k+ZMeZtwN7AHu6i7X9fS5/AxwL3A38FnglD7mXPb432DCTw3N8Pn8\nePd87gT+E7C+pzlPBD4B3Af8Od2LWN9ydvfdMPoYfcwJnAN8ufs7+jPgF3qY8Z3d9K8Dv7eaHJ5Q\nJkmN86sqJalxFoEkNc4ikKTGWQSS1DiLQFLzVjtQW5KrusH77kty1cj0WQxCeGU32NwTmcLgi0dj\nEUhqSpJXJ7lh2eSPAb9VVT8P/DHDs7CXL/dyhmftngOcBfydJKeNzPLhqjq7u/zpFKJ+ieG5Cnum\n8FhHZRFIatHyz82fXisP1HYm8OWqerSqngC+yPBM6Cf9yPAPSU7IcKj6L3fjUr1t1QGr7qmqhw73\nuNNmEUhq0fIX1/tXMVDb14BXJVmX5CcYnvU7OvzD4QYh/HXgf1fVuQxHDP2NJKdO778xHZ5QJqkJ\nSe5gOBzDycA64KHurvcy3P3y+8DzgFuAd1bVCw7zGG8FrgS+z3BomUer6t1JXgB8p6oqye8yHILk\nHyX5NPBzDEdHAFjL8MzhHQzPFB59AU53+61V9dWRdX6T4RA7j0zhaTgsi0BSU5K8GnhLVV1+hPtP\nZzgW13krPM4/Zzhk+h8um34q8LmqekWSzwD/tqq+MEHebwDnzLII3DUkqXndO3qSnAC8H/jDFeZ7\nCfB3GY7kS5LByGy/wnA3EsCtwBVJ1nTznd4Nt/5jxWPGxwksAkmCS5PsAh4A9lbVfwBI8teT/OeR\n+T6b5GvAzcAVVXWwm/7BJPcm2Qm8muGItDD8NNIDwF1J7mNYMKsa/j/D75p+mOEXztyT5KOT/ReP\nsi53DUlS29wikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXu/wMNUnqS3U06OQAA\nAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "print train(*batch)\n", + "plt.hist(dirty_net['enc_batchn_0_learn'].gamma.get_value(borrow=True).ravel())" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "train_mean = theano.function([X, y], [cost, dirty_net['enc_batchn_0_norm'].mean, dirty_net['enc_batchn_0_norm'].inv_std,\n", + " dirty_net['enc_batchn_0_learn'].mean, dirty_net['enc_batchn_0_learn'].inv_std],\n", + " updates=updates)" + ] + }, + { + "cell_type": "code", + "execution_count": 71, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.485343794748\n" + ] + }, + { + "data": { + "text/plain": [ + "(array([ 0., 0., 0., 0., 0., 500., 0., 0., 0., 0.]),\n", + " array([ 0.5, 0.6, 0.7, 0.8, 0.9, 1. , 1.1, 1.2, 1.3, 1.4, 1.5]),\n", + " )" + ] + }, + "execution_count": 75, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlsAAAE4CAYAAABsTTYYAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3X+wXWV97/H3J8TwQyAN2JxTCSQggsFRfhQCc7F1VzT8\ncG7COB0GaS0/qu1cLGXUUhLsncTbTiHMeKmOpR0EY6BQDFgFLJoQw75erCH8SAiSkMYfCSElB/kh\njuMVE/jeP9ZzwuZk73N29l5rr/3j85rZk7WfvfZzvmfvtZ7zzfM861mKCMzMzMysGJPKDsDMzMys\nnznZMjMzMyuQky0zMzOzAjnZMjMzMyuQky0zMzOzAjnZMjMzMyvQhMmWpFskjUjaUFN2vaRNktZL\n+rqkQ2teWyhpS3p9blGBm5k1Q9JWSU9IWidpbSqbJmmlpM2SVkiaWrO/2zAzy1UzPVtLgbPHlK0E\n3h0RJwFbgIUAkk4ALgBmA+cCN0pSfuGame2z14FKRJwcEXNS2QJgVUQcD6zGbZiZFWjCZCsiHgJe\nHlO2KiJeT0/XADPS9jzgzojYHRFbyRKxOZiZlUfs3dbNB5al7WXA+WnbbZiZ5S6POVuXAfen7SOA\n7TWv7UhlZmZlCeABSY9I+ngqG4qIEYCI2AlMT+Vuw8wsd5PbebOkzwK7IuJfc4rHzCxvZ0bEc5J+\nG1gpaTNZAlbL9y0zs8K0nGxJugQ4D/hATfEO4Mia5zNSWb33u3EzG0AR0dE5UBHxXPr3Z5K+STYs\nOCJpKCJGJA0Dz6fd3YaZWUOttl/NDiMqPbIn0jnAVcC8iHi1Zr97gQslTZF0NHAssLZRpRHRE49F\nixaVHoPjdJz9EGunSTpI0sFp+63AXOBJsrbqkrTbxcA9abvv2rBeOTZ6Jc43juPI+ZH/8dQrn2mv\nxNmOCXu2JN0BVIDDJT0DLAKuAaaQzYMAWBMRl0fERknLgY3ALuDyaDdCM7PWDQHfSL1Qk4HbI2Kl\npEeB5ZIuA7aRXYGI2zAzK8KEyVZEXFSneOk4+18LXNtOUGZmeYiInwIn1Sl/Cfhgg/e4DTOzXHkF\n+SZUKpWyQ2iK48xXr8QJvRWrdVavHBu9Emcv6ZXPtFfibIfK6iGX5N55swEjiejwBPmiuA0bXNn0\nmby/e7U9L8iK1U775Z4tMzMzswI52TIzMzMrkJMtMzMzswI52TIzMzMrkJMtMzMzswI52TIzMzMr\nkJMtMzMzswI52TIzMzMrkJMtMzMzswI52TIzMzMr0ITJlqRbJI1I2lBTNk3SSkmbJa2QNLXmtYWS\ntkjaJGluUYFb7xsenoWkQh7Dw7PK/vXMzMyA5nq2lgJnjylbAKyKiOOB1cBCAEknABcAs4FzgRuV\n3UTKelSRCdHIyDay+4vl/8jqNjMzK9+EyVZEPAS8PKZ4PrAsbS8Dzk/b84A7I2J3RGwFtgBz8gnV\nylBkQmRmZjYIWp2zNT0iRgAiYicwPZUfAWyv2W9HKjMzMzMbSHlNkHc3hZmZmVkdk1t834ikoYgY\nkTQMPJ/KdwBH1uw3I5XVtXjx4j3blUqFSqXSYjhm1o2q1SrVarXsMJA0CXgUeDYi5kmaBnwNmAls\nBS6IiFfSvguBy4DdwJURsbKcqM2sXyhi4k4pSbOA+yLiPen5EuCliFgi6WpgWkQsSBPkbwdOJxs+\nfAB4Z9T5IZLqFVuXya5vKOp7KrZuH1/dRxIR0fGLZiR9Cvhd4NCUbC0BXoyI6xu0YaeR/WdxFW7D\nbIxi2kW3Wd2unfarmaUf7gD+AzhO0jOSLgWuAz4kaTNwVnpORGwElgMbgfuBy90amVmZJM0AzgNu\nrin2RT5m1jETDiNGxEUNXvpgg/2vBa5tJygzsxzdAFwFTK0pG6q9yEdS7UU+P6jZzxf5mFnbWp2z\nZWbW9SR9GBiJiPWSKuPs2lIPvOedmvWvPOecNjVnqwie79AbPGfL8tTpOVuS/h74Y7LJ7gcChwDf\nAE4FKjUX+TwYEbMlLQAiIpak938HWBQRD9ep223YgPKcrcFU6JwtM7NeFRHXRMRREXEMcCGwOiI+\nBtwHXJJ2uxi4J23fC1woaYqko4FjgbUdDtvM+oyHEc1sEF0HLJd0GbCN7DZjRMRGSaMX+ezCF/mY\nWQ48jGjj8jCi5amspR+K4DZscHkYcTB5GNHMzMysSznZMjMzMyuQky0zMzOzAjnZMjMzMyuQky0z\nMzOzAjnZMjMzMyuQky0zMzOzArWVbElaKOkpSRsk3Z5WXZ4maaWkzZJWSJo6cU1mZmZm/anlZEvS\nTOATwMkR8V6y1eg/CiwAVkXE8cBqYGEegZqZmZn1onZ6tn4B/AZ4q6TJZDd53QHMB5alfZYB57cV\noZmZmVkPaznZioiXgc8Dz5AlWa9ExCpgKCJG0j47gel5BGpmZmbWi9oZRjwG+BQwE3g7WQ/XH7H3\nDaN8syczMzMbWJPbeO+pwPcj4iUASd8A/hswImkoIkYkDQPPN6pg8eLFe7YrlQqVSqWNcMys21Sr\nVarVatlhmJmVSq3eZVzSicC/AKcBrwJLgUeAo4CXImKJpKuBaRGxoM77w3c4737F3N1+T+2F1u3j\nq/tIIiJUdhx5cBs2uIppF91mdbt22q+We7Yi4glJtwKPAa8B64CbgEOA5ZIuA7YBF7T6M8zMzMx6\nXcs9W23/YP+vsCe4Z8vy5J4t6wfu2RpM7bRfXkG+DwwPz0JSIQ+zXiZpf0kPS1qXFmD++1TecPHl\ntFjzFkmbJM0tL3oz6xfu2eoDvdz75J6twVJGz5akgyLiV5L2A74PfAaYB7wYEdfXzi2VdAJwO9lc\n1BnAKuCd9Rort2GDyz1bg8k9W2ZmDUTEr9Lm/mRt3ss0Xnx5HnBnROyOiK3AFmBO56I1s37kZMvM\n+pqkSZLWATuBakRspPHiy0cA22veviOVmZm1rJ11tszMul5EvA6cLOlQYIWkCl582cw6yMmWmQ2E\niPiFpPvJFmRutPjyDuDImrfNSGV1eWFms/6V56LMniDfBzxBvn7dPr66T6cnyEt6G7ArIl6RdCCw\nAvgcMJc6iy/XTJA/nWz48AE8Qd7G8AT5wVTKoqZmZj3gd4Blyv46TgJui4jvpjlcey2+HBEbJS0H\nNgK7gMudUZlZu9yz1Qfcs1W/bh9f3ceLmlo/cM/WYPLSD2ZmZmZdysmWmZmZWYGcbJmZmZkVyMmW\nmZmZWYHaSrYkTZV0V7ph61OSTh/vBq9mZmZmg6bdnq0vAPdHxGzgROBpYAGwKiKOB1YDC9v8GWYt\n2B9JhTyGh2eV/cuZmVkPaXnph3Tri3UR8Y4x5U8D769ZmbkaEe+q835fNp0TL/3Q+bp97LbGSz9Y\nP/DSD4OprKUfjgZekLRU0uOSbpJ0EI1v8GpmZmY2cNpZQX4ycArwyYh4VNINZEOITd/g1fcVM+tv\ned5bzMysV7UzjDgE/CAijknP30eWbL0DqNQMIz6Y5nSNfb+74HPiYcTO1+1jtzUeRrR+4GHEwVTK\nMGIaKtwu6bhUdBbwFHAvcEkquxi4p9WfYWZmZtbr2ro3oqQTgZuBtwA/AS4F9gOWA0eSbvAaET+v\n817/rzAn7tnqfN0+dlvjni3rB+7ZGkzttF++EXUfcLLV+bp97LbGyZb1Aydbg8k3ojYzMzPrUk62\nzMzMzArkZMvMzMysQE62zKxvSZohaXW6d+uTkv4ylTe8h6ukhZK2pHu+zi0vejPrF54g3wc8Qb7z\ndfvYbU2nJ8intf6GI2K9pIOBx4D5ZFdOvxgR10u6GpgWEQsknQDcDpwGzABWAe+s11i5DRtcniA/\nmDxB3sysjojYGRHr0/YvgU1kSdR8YFnabRlwftqeB9wZEbsjYiuwBZjT0aDNrO842TKzgSBpFnAS\nsIbG93A9Athe87YdqczMrGVOtsys76UhxLuBK1MPV9P3cDUza1c7N6I2M+t6kiaTJVq3RcTo7cNG\nJA3V3MP1+VS+g+zuF6NmpLK6Fi9evGe7UqlQqVRyjNzMylStVqlWq7nU5QnyHTI8PIuRkW0F/oTe\nnGjem3UfALxaSM1DQzPZuXNrIXV3gzJWkJd0K/BCRHy6pmwJ8FJELGkwQf50suHDB/AEeRvDE+QH\nk2/X0wN8xaDrbrbufj4vSrga8Uzge8CTZF9aANcAa2lwD1dJC4E/BXaRDTuubFD3QLVh9gYnW4PJ\nyVYPcLLluputu5/PC98b0fqBk63BVOrSD5ImSXpc0r3pecPFAs3MzMwGTR5XI14JbKx5vgBYFRHH\nA6uBhTn8DDMzM7Oe1FayJWkGcB5wc01xo8UCzczMzAZOuz1bNwBX8ebB60aLBZqZmZkNnJbX2ZL0\nYWAk3XOsMs6uDWf8eY0as/6W5zo1Zma9quWrESX9PfDHwG7gQOAQ4BvAqUClZrHAByNidp33D9SV\nPL4a0XU3W3c/nxe+GtH6ga9GHEylXI0YEddExFERcQxwIbA6Ij4G3Adckna7GLinQRVmZmZmfa+I\neyNeB3xI0mbgrPTczMzMbCB5UdMO8TCi62627n4+LzyMaP3Aw4iDqdRFTc3MzMysMSdbZmZmZgVy\nsmVmZmZWICdbZmZmZgVysmVmZmZWICdbZmZmZgVysmVmZmZWICdbZmZmZgVysmVmfU3SLZJGJG2o\nKZsmaaWkzZJWSJpa89pCSVskbZI0t5yozayfONkys363FDh7TNkCYFVEHA+sBhYCSDoBuACYDZwL\n3KhsuXAzs5Y52TKzvhYRDwEvjymeDyxL28uA89P2PODOiNgdEVuBLcCcTsRpZv3LyZaZDaLpETEC\nEBE7gemp/Ahge81+O1KZmVnLJrf6RkkzgFuBIeB14MsR8UVJ04CvATOBrcAFEfFKDrGaDYD9KWrU\namhoJjt3bi2k7j7Q0h2AFy9evGe7UqlQqVRyCsfMylatVqlWq7nUpVbvMi5pGBiOiPWSDgYeI+ua\nvxR4MSKul3Q1MC0iFtR5fwzSHc6LuUv8ntpdt+tuqu6yzzlJRETH50BJmgncFxHvTc83AZWIGElt\n2YMRMVvSAiAiYkna7zvAooh4uE6dA9WG2RuKac/LPz9tfO20Xy0PI0bEzohYn7Z/CWwCZtB4LoSZ\nWVmUHqPuBS5J2xcD99SUXyhpiqSjgWOBtZ0K0sz6U8vDiLUkzQJOAtYAQ7VzISRNH+etZmaFknQH\nUAEOl/QMsAi4DrhL0mXANrIrEImIjZKWAxuBXcDl7r4ys3a1nWylIcS7gSsj4peSxjZMDRsqz3cw\n6295znloVURc1OClDzbY/1rg2uIiMrNB0/KcLQBJk4FvAd+OiC+ksrpzIeq8d6D+w+g5W667G+ou\n+5wra85WEQatDbM3eM7WYCplzlbyFWDjaKKVNJoLYWZmZjZw2rka8Uzge8CTZCl+ANeQTSZdDhxJ\nmgsRET+v8/6B+l+he7ZcdzfUXfY5554t6wfu2RpM7bRfbQ0jtqMbG6rh4VmMjGwr8Cf05h9o190/\ndZd9zjnZsn7gZGswOdnKiXufXHe/1132Oedky/qBk63BVOacLTMzMzMbh5MtMzMzswI52TIzMzMr\nkJMtMzMzswLlcrueTnn++ee57bbbyg7DzMzMrGk9lWwtXbqUv/mbu5F+P/e6X3vtB7nXaWZmZtZT\nyRZAxAfYvXtJATX/LeCEy8zMzPLlOVtmZmZmBXKyZTYw9kdSIY/h4Vll/3JmZl2r54YRzaxVr1LU\n6vQjI32xKLyZWSHcs2VmZmZWoMKSLUnnSHpa0n9Kurqon2Nmlje3X2aWp0KSLUmTgC8BZwPvBj4q\n6V1F/KzOqJYdQJOqZQfQpGrZATSpWnYA+6BadgB9o9/ar2q1WnYITemVOHtJr3ymvRJnO4rq2ZoD\nbImIbRGxC7gTmF/Qz+qAatkBNKladgBNqpYdQJOqZQewD6plB9BP+qr96pU/ZL0SZy/plc+0V+Js\nR1HJ1hHA9prnz6YyM7Nu5/bLzHLVU1cjHnbYYUya9HcccMC/5l73b37zCrt25V6tmVlTnn76aebO\nnZtrnQcccAAXXXRRrnWa2b5TRP6Xgks6A1gcEeek5wuAiIglNfsUcw26mXW1iOjqdSKaab9Sudsw\nswHTavtVVLK1H7AZOAt4DlgLfDQiNuX+w8zMcuT2y8zyVsgwYkS8JukvgJVk88JucUNlZr3A7ZeZ\n5a2Qni0zMzMzy3RkBXlJfyjph5Jek3RKTflMSb+S9Hh63Fjz2imSNqRFBf+hzDjTawslbZG0SdLc\nmvKOxzkmrkWSnq35DM+ZKOYydfNikZK2SnpC0jpJa1PZNEkrJW2WtELS1BLiukXSiKQNNWUN4yrz\ne28Qa08do400eyxImirprvQ7PSXp9G6MM+07KX0n93YyxvSzJ4xT0gxJq9Pn+KSkv+xgfBO2VZK+\nmI7f9ZJO6lRsY2IYN05JF6V27QlJD0l6Txlxpliaav8lnSZpl6SPdDK+mp/fzHdfSX8rfijpwQkr\njYjCH8DxwDuB1cApNeUzgQ0N3vMwcFravh84u8Q4ZwPryIZdZwE/4o1ewY7HOSbmRcCn65Q3jLms\nB1ly/6P0vb8FWA+8q8yYxsT3E2DamLIlwF+n7auB60qI633ASbXnSqO4gBPK/N4bxNozx+gEv1tT\nxwLwVeDStD0ZOLQb40yvfwr4F+Debvw8gWHgpLR9MNlcusLbjGbaKuBc4N/T9unAmhI+w2biPAOY\nmrbPKSPOZmOt2e+7wLeAj3RjnMBU4CngiPT8bRPV25GerYjYHBFbgHqz+PcqkzQMHBIRj6SiW4Hz\nCwwRGDfO+cCdEbE7IrYCW4A5ZcVZR73PtW7MHY1qb92+WKTYu7d3PrAsbS+jhO83Ih4CXh5T3Ciu\neZT4vTeIFXrnGB3PhMeCpEOB34uIpQDpd/tF50IEmjxmJc0AzgNu7lBcY00YZ0TsjIj1afuXwCY6\ns+ZZM23VfLI2n4h4GJgqaagDsdWaMM6IWBMRr6Snayhvzbhm2/8rgLuB5zsZXI1m4rwI+HpE7ACI\niBcmqrQbbkQ9K3VjPyjpfansCLKFBEeVvajg2EUOd6SybonzL1I39s01XfGNYi5Tty8WGcADkh6R\n9PFUNhQRI5A1/MD00qJ7s+kN4urG7x165xgdT6PPvNbRwAuSlqZ27SZJB3Y0yubiBLgBuIrsuC9D\ns3ECIGkWWa/pw4VH1lxb1Q3H7762qR8Hvl1oRI1NGKuktwPnR8Q/Uf8/aJ3QzGd6HHBYylsekfSx\niSrN7WpESQ8AtVm9yE7iz0bEfQ3e9l/AURHxsrI5Ut+UdEJeMeUYZ6nGixm4EfhfERGS/g74PNkJ\nZfvuzIh4TtJvAyslbWbvP0TdekVJt8YFPXSMjnOu/U2d3et95pOBU4BPRsSjyuZxLiAbSu2aOCV9\nGBiJiPWSKhT0hy2Hz3O0noPJejuuTD1cto8k/QFwKdlQf7f6B7Ih5VHduibf6Hn+AeCtwA8k/SAi\nfjTeG3IRER9q4T27SEMOEfG4pB+TZYw7gCNrdp2RykqJc5x4Couz1j7E/GVgNGHsSGz7aAdwVM3z\nbohpj4h4Lv37M0nfJOtOHpE0FBEjadi4rK7tsRrF1XXfe0T8rOZpVx+j451raeL/RMfCs8D2iHg0\nPb+bN//x6JY4zwTmSToPOBA4RNKtEfEnXRYnkiaTfY63RcQ9ecY3jmbaqm44fptqUyW9F7gJOCci\n6g3zd0IzsZ4K3ClJwNuAcyXtiohOXsDRTJzPAi9ExK+BX0v6HnAi2VyvusoYRtyTqUp6m6RJafsY\n4FjgJ6lL+RVJc9KH/idAp06yveIE7gUulDRF0tEpzrXdEGdqpEZ9BPjheDF3MrY6HgGOVXYV6hTg\nQrI4SyfpoPS/ZyS9FZgLPEkW3yVpt4vp/HE4Sux9TF6Stmvj6obv/U2x9tgxOp4Jj4U0LLZd0nGp\n6CxgY0eie0MzcV4TEUdFxDFk5+HqvBOtJjR7bn0F2BgRX+hEUEkzbdW9ZG3+6F0Hfj46LNpBE8Yp\n6Sjg68DHIuLHHY6v1oSxRsQx6XE0WYJ9eYcTrabiJDtW3ydpP0kHkV0gMf5afHnP5K/3IJv4uB34\nf2QrMn87lY82vI8DjwLn1bznd8n+2G0BvlBmnOm1hWRZ6yZgbplxjon5VmAD2RUT3ySbYzRuzGU+\nyK6G2Zw+rwVlx1MT19HpM1yXvs8FqfwwYFWKeSXwWyXEdgfZkPurwDNkQwHTGsVV5vfeINaeOkbH\n+d3qHgvA7wDfqtnvRLIGez3wb6Qrwbotzpr93085VyNOGCdZD9xrNefm42S9M52Ib6+2Cvhz4M9q\n9vlSOn6foOYK9g5/juPGSdab/GL67NaRdRR0PM5mP9Oafb9CCVcj7sN3/1dkVyRuAK6YqE4vampm\nZmZWoG64GtHMzMysbznZMjMzMyuQky0zMzOzAjnZMjMzMyuQky0zMzOzAjnZMjMzMyuQky0zMzOz\nAk2YbEm6Jd1WYUNN2fWSNqUby3493el+9LWFkrak1+cWFbiZWTMkbZX0hKR1ktamsmmSVkraLGlF\nzc2x3YaZWe6a6dlaCpw9pmwl8O6IOIlshdWFAOkm0hcAs4FzgRvTbWzMzMryOlCJiJMjYk4qWwCs\niojjgdW4DTOzAk2YbEXEQ6SbRdeUrYqI19PTNWQ3agSYB9wZEbsjYitZIjYHM7PyiL3buvnAsrS9\njOxWXeA2zMwKkMecrcuA+9P2EWT3Fhy1I5WZmZUlgAckPSLp46lsKNJNgyO7ofz0VO42zMxyN7md\nN0v6LLArIv41p3jMzPJ2ZkQ8J+m3gZWSNpMlYLV8k1gzK0zLyZakS4DzgA/UFO8Ajqx5PiOV1Xu/\nGzezARQRHZ0DFRHPpX9/JumbZMOCI5KGImJE0jDwfNrdbZiZNdRq+9XsMKLSI3sinQNcBcyLiFdr\n9rsXuFDSFElHA8cCaxtVGhFd9Vi0aFHpMXRLPENDM/f9aGpgaGhm330+vRBPN8bUaZIOknRw2n4r\nMBd4kqytuiTtdjFwT9ru6TasF46BXo/zjeM4cn7kfzz1ymfaK3G2Y8KeLUl3ABXgcEnPAIuAa4Ap\nZPMgANZExOURsVHScmAjsAu4PNqN0EoxMrKNvUdWFqfHvtbli7msNEPAN1Iv1GTg9ohYKelRYLmk\ny4BtZFcg4jbMzIowYbIVERfVKV46zv7XAte2E5SZWR4i4qfASXXKXwI+2OA9bsPMLFdeQb5GpVIp\nO4Q36bZ4sg7O7tFtn0+3xQPdGZN1Vq8cA70SZy/plc+0V+Jsh8rqIZfk3vkulg0P5/X9qO3xbusP\nkogOT5AvituwwZVv+7inVreTXa6d9ss9W31keHgWknJ5mJmZWT7cs9VH8u6Ncs+W5c09W9YP3LM1\nmNyzZWZmZtalnGyZmZmZFcjJlpmZmVmBnGyZmZmZFcjJlpmZmVmBnGyZmZmZFcjJlpmZmVmBnGyZ\nmZmZFcjJlpmZmVmBJky2JN0iaUTShpqyaZJWStosaYWkqTWvLZS0RdImSXOLCtzMzMysFzTTs7UU\nOHtM2QJgVUQcD6wGFgJIOgG4AJgNnAvcKN9oz8zMzAbYhMlWRDwEvDymeD6wLG0vA85P2/OAOyNi\nd0RsBbYAc/IJ1czMzKz3tDpna3pEjABExE5geio/Athes9+OVGZmZmY2kCbnVE9LtypfvHjxnu1K\npUKlUskpHDPrBtVqlWq1WnYYSJoEPAo8GxHzJE0DvgbMBLYCF0TEK2nfhcBlwG7gyohYWU7UZtYv\nFDFxniRpJnBfRLw3Pd8EVCJiRNIw8GBEzJa0AIiIWJL2+w6wKCIerlNnNPOzrXnZ9Li8PtN86/J3\nbZAdoxHR8Xmckj4F/C5waEq2lgAvRsT1kq4GpkXEgjTv9HbgNGAGsAp4Z73Gym3Y4Mq3rd1Tq9vJ\nLtdO+9XsMKLSY9S9wCVp+2LgnpryCyVNkXQ0cCywtpXAzMzyIGkGcB5wc02x552aWcc0s/TDHcB/\nAMdJekbSpcB1wIckbQbOSs+JiI3AcmAjcD9wuf/rN77h4VlIyuVhZnXdAFzFm7sihjzv1Mw6ZcI5\nWxFxUYOXPthg/2uBa9sJapCMjGwj36E/Mxsl6cPASESsl1QZZ1fPOzWzN8lzzmlTc7aK4PkOmW6e\nZ+U5W5a3Ts/ZkvT3wB+TTXY/EDgE+AZwKp53ai3ynK3B1Ik5W2ZmPSciromIoyLiGOBCYHVEfAy4\nD887NbMOyWvpBzOzXnIdsFzSZcA2sjtfEBEbJY3OO92F552aWQ48jFgyDyPaIClr6YciuA0bXB5G\nHEweRjQzMzPrUk62zMzMzArkZMvMzMysQE62zMzMzArkZMvMzMysQE62zMzMzArkZMvMzMysQE62\nzMzMzArUVrIlaaGkpyRtkHR7usXFNEkrJW2WtELS1LyCNTMzM+s1LSdbkmYCnwBOjoj3kt3656PA\nAmBVRBwPrAYW5hGomZmZWS9qp2frF8BvgLdKmgwcCOwA5gPL0j7LgPPbitDMzMysh7WcbEXEy8Dn\ngWfIkqxXImIVMBQRI2mfncD0PAI1MzMz60WTW32jpGOATwEzgVeAuyT9EXvfnbPhnTUXL168Z7tS\nqVCpVFoNx8y6ULVapVqtlh2GmVmp1OpdxiVdAHwoIj6Rnn8MOAP4AFCJiBFJw8CDETG7zvvDdzjP\n++7x3VuXv2uD7HiPCJUdRx7chg2ufNvtPbW6nexy7bRf7czZ2gycIekAZUfeWcBG4F7gkrTPxcA9\nbfwMMzMzs57W8jBiRDwh6VbgMeA1YB1wE3AIsFzSZcA24II8AjUzMzPrRS0PI7b9g90FD3gY0QZL\np4cRJe0PfA+Ykh73RMQ1kqYBXyObc7oVuCAiXknvWQhcBuwGroyIlQ3qdhs2oDyMOJjaab+cbJXM\nyZYNkjLmbEk6KCJ+JWk/4PvAZ4B5wIsRcb2kq4FpEbFA0gnA7cBpwAxgFfDOeo2V27DB5WRrMJU1\nZ8vMrOtFxK/S5v5kbd7LNF4PcB5wZ0TsjoitwBZgTueiNbN+5GTLzPqapEmS1gE7gWpEbKTxeoBH\nANtr3r46TSTeAAAQBUlEQVQjlZmZtazlCfJmZr0gIl4HTpZ0KLBCUoV9WA/QzKxdTrbMbCBExC8k\n3Q+cCoxIGqpZD/D5tNsO4Miat81IZXV5YWaz/pXnosyeIF8yT5C3QVLC1YhvA3ZFxCuSDgRWAJ8D\n5gIvRcSSBhPkTycbPnwAT5C3MTxBfjC10365Z8vM+tnvAMvSwsuTgNsi4rtpDtde6wFGxEZJy8kW\naN4FXO6Mysza5Z6tkrlnywaJb9dj/cA9W4PJSz+YmZmZdSknW2ZmZmYFcrJlZmZmViAnW2ZmZmYF\naivZkjRV0l2SNkl6StLpkqZJWilps6QVkqbmFayZmZlZr2m3Z+sLwP0RMRs4EXgaWACsiojjgdXA\nwjZ/hpmZmVnPannph3Tri3UR8Y4x5U8D769ZmbkaEe+q835fNo2XfrDB4qUfrB946YfBVNbSD0cD\nL0haKulxSTdJOojGN3g1MzMzGzjtrCA/GTgF+GREPCrpBrIhxKZv8Or7ipn1tzzvLWZm1qvaGUYc\nAn4QEcek5+8jS7beAVRqhhEfTHO6xr7fXfB4GNEGi4cRrR94GHEwlTKMmIYKt0s6LhWdBTwF3Atc\nksouBu5p9WeYmZmZ9bq27o0o6UTgZuAtwE+AS4H9gOXAkaQbvEbEz+u81/8rxD1bNljcs2X9wD1b\ng6md9ss3oi6Zky0bJE62rB842RpMvhF1hw0Pz0JSLg8zMzPrb+7ZasGg9Ea5Z8vy5p4t6wfu2RpM\n7tkyMzMz61JOtsysb0maIWl1unfrk5L+MpU3vIerpIWStqR7vs4tL3oz6xceRmyBhxH3va5e/a4t\nX50eRkxr/Q1HxHpJBwOPAfPJrpx+MSKul3Q1MC0iFkg6AbgdOA2YAawC3lmvserlNsza42HEweRh\nRDOzOiJiZ0SsT9u/BDaRJVHzgWVpt2XA+Wl7HnBnROyOiK3AFmBOR4M2s77jZMs6YP/crt4cHp5V\n9i9jPUrSLOAkYA2N7+F6BLC95m07UpmZWcvauTeiWZNeJa8u95GRvriQzTosDSHeDVwZEb+U1PQ9\nXM3M2uVky8z6mqTJZInWbRExevuwEUlDNfdwfT6V7yC7+8WoGamsrsWLF+/ZrlQqVCqVHCM3szJV\nq1Wq1WoudXmCfAs8Qb7cunr1uLFy1tmSdCvwQkR8uqZsCfBSRCxpMEH+dLLhwwfwBHkbwxPkB5Nv\n19NhTrbKratXjxsr5WrEM4HvAU+SHYQBXAOspcE9XCUtBP4U2EU27LiyQd0924ZZe5xsDSYnWx3m\nZKvcunr1uDGvIG/9wcnWYCp16QdJkyQ9Lune9LzhYoFmZmZmgyaPpR+uBDbWPF8ArIqI44HVwMIc\nfoaZmZlZT2or2ZI0AzgPuLmmuNFigWZmZmYDp92erRuAq3jz4HWjxQLNzMzMBk7LyZakDwMj6VYY\n400Y84w/MzMzG1jtLGp6JjBP0nnAgcAhkm4DdjZYLHAvXhDQrL/luSigmVmvymXpB0nvBz4TEfMk\nXQ+8OHaxwDrv6dnLpr30Q7l19epxY176wfqDl34YTKUu/VDHdcCHJG0GzkrPzczMzAaSFzVtgXu2\nyq2rV48bc8+W9Qf3bA2mbuvZMjMzM7PEyZb1mP2RlNtjeHhW2b+QmZn1uXauRjQrwavk2X0/MtIX\nI1pmZtbF3LNlZmZmViAnW2ZmZmYFcrJlZmZmViAnW2ZmZmYFcrJlZmZmViAnW2bW1yTdImlE0oaa\nsmmSVkraLGmFpKk1ry2UtEXSJklzy4nazPqJky0z63dLgbPHlC0AVkXE8cBqYCGApBOAC4DZwLnA\njcqWCzcza5mTLTPraxHxEPDymOL5wLK0vQw4P23PA+6MiN0RsRXYAszpRJxm1r+cbJnZIJoeESMA\nEbETmJ7KjwC21+y3I5WZmbWs5RXkJc0AbgWGgNeBL0fEFyVNA74GzAS2AhdExCs5xGpmVpSWbkuw\nePHiPduVSoVKpZJTOGZWtmq1SrVazaUutXqXcUnDwHBErJd0MPAYWdf8pcCLEXG9pKuBaRGxoM77\no1fvcJ7vHd9dV3l1ZfX16nHYiyQRER2fAyVpJnBfRLw3Pd8EVCJiJLVlD0bEbEkLgIiIJWm/7wCL\nIuLhOnX2bBtm7cn3b8CeWt0Wdbl22q+WhxEjYmdErE/bvwQ2ATNoPBfCzKwsSo9R9wKXpO2LgXtq\nyi+UNEXS0cCxwNpOBWlm/SmXG1FLmgWcBKwBhmrnQkiaPs5bzcwKJekOoAIcLukZYBFwHXCXpMuA\nbWRXIBIRGyUtBzYCu4DL3X1lZu1qeRhxTwXZEGIV+NuIuEfSSxFxWM3rL0bE4XXe17NtmIcR+6Wu\nrL5ePQ57UVnDiEXo5TbM2uNhxMHUTvvVVs+WpMnA3cBtETHaDT8iaahmLsTzjd7vyaVm/S3PCaZm\nZr2qrZ4tSbcCL0TEp2vKlgAvRcQST5BvqjbXVVpdWX29ehz2IvdsWT9wz9Zgaqf9audqxDOB7wFP\nkh11AVxDNpl0OXAkaS5ERPy8zvt7tqFystUvdWX19epx2IucbFk/cLI1mEpJttrVyw2Vk61+qSur\nr1ePw17kZMv6gZOtwVTK0g9mZmZmNjEnW2ZmZmYFymWdrW737LPPcvfdd5cdhpmZmQ2ggUi2vvCF\nL3HDDf+X/fY7re26XnvtsRwiMjMzs0ExEMlWBLz22n/ntdf2WoGiBZ8HHsqhHjMzMxsEnrNlZmZm\nViAnW2ZmZmYFcrJlA25/JOXyGB6eVfYvY2ZmXWgg5myZNfYqeS1OODLSF2t1mplZztyzZWZmZlYg\nJ1tmZmZmBXKyZWZmZlagwpItSedIelrSf0q6uqifY9Y9PNm+X7j9MrM8FZJsSZoEfAk4G3g38FFJ\n7yriZ+WrWnYAY1TLDmCMatkBjFEtO4AxRifbt/8YGdmWS0TVajWXegZJ77Zf9fXKMdArcfaSXvlM\neyXOdhTVszUH2BIR2yJiF3AnML+gn5WjatkBjFEtO4AxqmUHMEa17AC63iA0YgXo0farvl45Bnol\nzl7SK59pr8TZjqKWfjgC2F7z/FmyBszMmpINSebhn//5q+zcuTWXugaE2y8zy9VArLM1Zcpb2H//\nr7L//t8fd79f/3ozBxww/o2mf/ObH/PrX+cZnVk9ea3/tZiRkc/lUI8VbevWrVxxxRW51jllyhTe\n85735Fqnme07ReSzoOObKpXOABZHxDnp+QIgImJJzT75/2Az63oR0dWrvzbTfqVyt2FmA6bV9quo\nZGs/YDNwFvAcsBb4aERsyv2HmZnlyO2XmeWtkGHEiHhN0l8AK8km4d/ihsrMeoHbLzPLWyE9W2Zm\nZmaWyW3ph2YWAZT0RUlbJK2XdHJN+VZJT0haJ2ltTfk0SSslbZa0QtLUkuO5XtKmtP/XJR1aZjw1\nr39G0uuSDis7HklXpM/oSUnXNRtPUTFJOk3S2tFySacWGM9JNeVTJd2VPounJJ2eyjt5TDcTTyeP\n6QnjqXl9n4/pIjTzO6b9TpO0S9JHOhnfmBia+T4q6Vz4oaQHOx1jimHcOCUdLunb6Zh5UtIlJYSJ\npFskjUjaMM4+dY/vTpooTkkXpbbxCUkPSSrliolmPs+0X6nnUpPf+76fRxHR9oMsafsRMBN4C7Ae\neNeYfc4F/j1tnw6sqXntJ8C0OvUuAf46bV8NXFdyPB8EJqXt64Bry4wnvTYD+A7wU+Cwkj+fCtnQ\ny+T0/G1dcAw9CMytef+DHYrnq8ClaXsycGjJx3SjeMo6puvG0+oxXcSjmd+xZr/vAt8CPtKtsQJT\ngaeAI9Lzps/PDse5aPQ4BN4GvEhqUzoc6/uAk4ANDV5veHx3WZxnAFPT9jndGmfN8VH2uTTR59nS\neZRXz1YziwDOB24FiIiHgamShtJron4v23xgWdpeBpxfZjwRsSoiXk9P15D9USgtnuQG4Kom4yg6\nnv9BljzsTu97oQtieo7s5AD4LWBH0fGk3qHfi4il6bXdEfGLmvd09JgeL54yjukJPh9o7ZguQrOL\nm14B3A0838ngxmgm1ouAr0fEDtjn8zMvzcS5EzgkbR8CvDjapnRSRDwEvDzOLuO1Rx0zUZwRsSYi\nXklP15CtI9dxTXye0AXnUhNxtnQe5ZVs1VsEcOwXOnafHTX7BPCApEckfaJmn+kRMQIQETuB6SXH\nU+sy4NtlxiNpHrA9Ip5sMo5C4wGOA35f0hpJD+7LkF2BMS0A/rekZ4DrgYUdiOdo4AVJSyU9Lukm\nSQemfco4pseLp1anjumG8bRxTBdhwt9R0tuB8yPin8gS/rI0830cBxyWzs1HJH2sY9G9oZk4vwy8\nW9J/AU8AV3Yotn01XnvUrT5O8+d4R3XRuTSRls6jwm5EvY/OjIhTgPOAT0p6X4P9OjWbf9x4JH0W\n2BURd5QVT/rjdA1Zl/ue0MqKJ5VPJhvKOwP4a2B5h+IZL6ZbgCsi4ijgU8BXOhDLZOAU4B9TTL8i\nS/pg7++oE8f0ePFkQXX2mK4bT8nHdKv+gWw4eFQ3xzv6uZ9LNpz0PyUdW25IdS0EnoiItwMnA/8o\n6eCSY+p5kv4AuJQ3H6/dpFfOpZbOo7ySrR3AUTXPZ7D3cM0O4Mh6+0TEc+nfnwHf4I1bY4yMdstK\nGqb5rsWi4iFN1jyPrCuxWUXE8w5gFvCEpJ+m/R+T1ExPSVGfz7PAv6XXHgFel3R4E/EUGdPpEfHN\n9NrdNH/blXbieZasd+bRVH432ckJsLOEY3q8eMo4phvF084xXYRmfsdTgTtTvH9IlhjM61B8tZqJ\n9VlgRUT8OiJeBL4HnNih+EY1E+eZwF0AEfFjsrl73Xgj8IbtUbeR9F7gJmBeREw0lFeWbjmXJtLa\nedTKBLI6E8b2441Jj1PIJj3OHrPPebwxmfAM0iQ94CDg4LT9VuD7vDGheQlwddrel8nERcVzDtnE\nuMO74fMZ8/6f0mASfQc/nz8HPpe2jwO2lfgZfSg9fwx4f9o+C3ik6HjS8/8DHJe2FwFLyjqmJ4in\n48f0ePG0ekwX8Wjmdxyz/1LKm9TbzPfxLuCBtO9BwJPACV0Y5+eBRWl7iGyorpQLJciS/ycbvNbw\n+O6yOI8CtgBnlBVfM3GO2a+0c6mJz7Ol8yjP4M4hW3V5C7Aglf058Gc1+3wpnWhPAKeksqPTCbcu\nBb2gZv/DgFWp3pXAb5UczxZgG/B4etxYZjxj6v/JvjRIBX0+bwFuS+WPkpKckmM6FXg4vfYD4OSi\n40nlJwKPpLj+jTeuBur4MT1BPB0/pseLp51juohHM79jzb5fodw/EM18H39FllxvIBte77o4ya5A\nvC8dMxvIVu8vI847gP8iu1HpM2RDcE0d390UJ9kcuBfT+b0OWNuNcY7Zt7RzqcnvfZ/PIy9qamZm\nZlagbpkgb2ZmZtaXnGyZmZmZFcjJlpmZmVmBnGyZmZmZFcjJlpmZmVmBnGyZmZmZFcjJlpmZmVmB\nnGyZmZmZFej/A1Bv+vRapYP7AAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# batchnorm working!\n", + "cost_val, mean_norm, inv_std_norm, mean_learn, inv_std_learn = train_mean(*batch)\n", + "print cost_val\n", + "fig, ax = plt.subplots(2,2, figsize=[10,5])\n", + "ax[0,0].hist(np.asarray(mean_norm).ravel())\n", + "ax[0,1].hist(mean_learn.ravel(1))\n", + "ax[1,0].hist(np.asarray(inv_std_norm).ravel())\n", + "ax[1,1].hist(inv_std_learn.ravel(1))" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[['input', (50, 784)],\n", + " ['input_corr', (50, 784)],\n", + " ['dirty_enc_dense_0', (50, 500)],\n", + " ['dirty_enc_batchn_0_norm', (50, 500)],\n", + " ['dirty_enc_noise_0_', (50, 500)],\n", + " ['dirty_enc_batchn_0_learn', (50, 500)],\n", + " ['dirty_enc_activation_0', (50, 500)],\n", + " ['dirty_enc_dense_1', (50, 10)],\n", + " ['dirty_enc_batchn_1_norm', (50, 10)],\n", + " ['dirty_enc_noise_1_', (50, 10)],\n", + " ['dirty_enc_batchn_1_learn', (50, 10)],\n", + " ['dirty_enc_softmax', (50, 10)],\n", + " ['dec_batchn_softmax', (50, 10)],\n", + " ['dec_combinator_0', (50, 10)],\n", + " ['dec_batchn_0', (50, 10)],\n", + " ['dec_dense_1', (50, 500)],\n", + " ['dec_batchn_dense_1', (50, 500)],\n", + " ['dec_combinator_1', (50, 500)],\n", + " ['dec_batchn_1', (50, 500)],\n", + " ['dec_dense_2', (50, 784)],\n", + " ['dec_combinator_2', (50, 784)]]" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "map(lambda x: [x.name, x.output_shape], dirty_net.values())" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[['input', (50, 784)],\n", + " ['clean_enc_dense_0', (50, 500)],\n", + " ['clean_enc_batchn_0_norm', (50, 500)],\n", + " ['clean_enc_batchn_0_learn', (50, 500)],\n", + " ['clean_enc_activation_0', (50, 500)],\n", + " ['clean_enc_dense_1', (50, 10)],\n", + " ['clean_enc_batchn_1_norm', (50, 10)],\n", + " ['clean_enc_batchn_1_learn', (50, 10)],\n", + " ['clean_enc_softmax', (50, 10)]]" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "map(lambda x: [x.name, x.output_shape], clean_net.values())" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from lasagne.layers import get_all_layers" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "W = lasagne.init.GlorotUniform()\n", + "beta=lasagne.init.Constant(0)\n", + "gamma=lasagne.init.Constant(1)\n", + "\n", + "shp = (500,)\n", + "zero_const = T.zeros(shp, np.float32)\n", + "one_const = T.ones(shp, np.float32)" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "['InputLayer',\n", + " 'DenseLayer',\n", + " 'BatchNormLayer',\n", + " 'NonlinearityLayer',\n", + " 'BatchNormLayer',\n", + " 'NonlinearityLayer']" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l1b = InputLayer((64, 768))\n", + "l2b = batch_norm(batch_norm(DenseLayer(l1, num_units=500, nonlinearity=tanh), \n", + " beta=None, gamma=None), alpha=1., \n", + " beta=beta, gamma=gamma, mean=zero_const, inv_std=one_const)\n", + "[l.__class__.__name__ for l in get_all_layers(l2b)]" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "['InputLayer',\n", + " 'DenseLayer',\n", + " 'BatchNormLayer',\n", + " 'BatchNormLayer',\n", + " 'NonlinearityLayer']" + ] + }, + "execution_count": 46, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l1 = InputLayer((64, 768))\n", + "l2 = DenseLayer(l1, num_units=500, nonlinearity=linear)\n", + "lb1 = BatchNormLayer(l2, alpha=0.1, beta=None, gamma=None)\n", + "lb2 = BatchNormLayer(lb1, alpha=1., beta=beta, gamma=gamma, \n", + " mean=zero_const, inv_std=one_const)\n", + "l3 = NonlinearityLayer(lb2, nonlinearity=tanh)\n", + "[l.__class__.__name__ for l in get_all_layers(l3)]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "References: \n", + "http://arxiv.org/pdf/1411.7783.pdf \n", + "http://arxiv.org/pdf/1507.02672v2.pdf \n", + "http://arxiv.org/pdf/1511.06430.pdf" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 2", + "language": "python", + "name": "python2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/examples/ladder_nets/ladder_nets.py b/examples/ladder_nets/ladder_nets.py new file mode 100644 index 0000000..31961da --- /dev/null +++ b/examples/ladder_nets/ladder_nets.py @@ -0,0 +1,333 @@ +from lasagne.layers import InputLayer, MergeLayer, DenseLayer, DropoutLayer, \ + GaussianNoiseLayer, NonlinearityLayer +from lasagne.layers.normalization import BatchNormLayer +from lasagne.nonlinearities import softmax, linear, tanh, rectify +import lasagne +import theano +from theano import tensor as T +import numpy as np +from collections import OrderedDict + + +def _milaUDEM_params(shape, name): + values = np.zeros((6,) + shape, dtype=theano.config.floatX) + + b_lin = theano.shared(values[0], + name='bias_lin_{}'.format(name)) + b_sigm = theano.shared(values[1], + name='bias_sigm_{}'.format(name)) + + w_u_lin = theano.shared(values[2], + name='weight_u_lin_{}'.format(name)) + w_u_sigm = theano.shared(values[3], + name='weight_u_sigm_{}'.format(name)) + w_zu_lin = theano.shared(values[4], + name='weight_zu_lin_{}'.format(name)) + w_zu_sigm = theano.shared(values[5], + name='weight_zu_sigm_{}'.format(name)) + + values = np.ones((3,) + shape, dtype=theano.config.floatX) + w_z_lin = theano.shared(values[0], + name='weight_z_lin_{}'.format(name)) + w_z_sigm = theano.shared(values[1], + name='weight_z_sigm_{}'.format(name)) + w_sigm = theano.shared(values[2], + name='weight_sigm_{}'.format(name)) + + # combinator params used in combinator calculations + return [w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm, + w_zu_sigm, w_sigm, b_lin, b_sigm] + + +def _curiousAI_params(shape, name): + values = np.zeros((8,) + shape, dtype=theano.config.floatX) + + b_mu_sig = theano.shared(values[0], + name='b_mu_sig_{}'.format(name)) + b_mu_lin = theano.shared(values[1], + name='b_mu_lin_{}'.format(name)) + b_v_sig = theano.shared(values[2], + name='b_v_sig_{}'.format(name)) + b_v_lin = theano.shared(values[3], + name='b_v_lin_{}'.format(name)) + + w_mu_lin = theano.shared(values[4], + name='w_mu_lin_{}'.format(name)) + w_v_lin = theano.shared(values[5], + name='w_v_lin_{}'.format(name)) + w_mu = theano.shared(values[6], + name='w_mu_{}'.format(name)) + w_v = theano.shared(values[7], + name='w_v_{}'.format(name)) + + values = np.ones((2,) + shape, dtype=theano.config.floatX) + w_mu_sig = theano.shared(values[0], + name='w_mu_sig_{}'.format(name)) + w_v_sig = theano.shared(values[1], + name='w_v_sig_{}'.format(name)) + + # combinator params used in combinator calculations + return [w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v, + b_mu_lin, b_v_lin, b_mu_sig, b_v_sig] + + +def _get_combinator_params(combinator_type, shape, name): + if combinator_type == 'milaUDEM': + return _milaUDEM_params(shape, name) + elif combinator_type == 'curiousAI': + return _curiousAI_params(shape, name) + + +def _combinator_MILAudem(z, u, combinator_params, bc_pttrn): + w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm, w_zu_sigm, w_sigm, \ + b_lin, b_sigm = combinator_params + + lin_out = w_z_lin.dimshuffle(*bc_pttrn) * z + \ + w_u_lin.dimshuffle(*bc_pttrn) * u + \ + w_zu_lin.dimshuffle(*bc_pttrn) * z * u + \ + b_lin.dimshuffle(*bc_pttrn) + sigm_pre = w_z_sigm.dimshuffle(*bc_pttrn) * z + \ + w_u_sigm.dimshuffle(*bc_pttrn) * u + \ + w_zu_sigm.dimshuffle(*bc_pttrn) * z * u + \ + b_sigm.dimshuffle(*bc_pttrn) + sigm_out = T.nnet.sigmoid(sigm_pre) + output = lin_out + w_sigm.dimshuffle(*bc_pttrn) * sigm_out + + return output + + +def _combinator_curiousAI(z, u, combinator_params, bc_pttrn): + w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v, \ + b_mu_lin, b_v_lin, b_mu_sig, b_v_sig = combinator_params + + mu_sig_pre = w_mu_sig.dimshuffle(*bc_pttrn) * u + \ + b_mu_sig.dimshuffle(*bc_pttrn) + + mu_lin_out = w_mu_lin.dimshuffle(*bc_pttrn) * u + \ + b_mu_lin.dimshuffle(*bc_pttrn) + + mu_u = w_mu.dimshuffle(*bc_pttrn) * T.nnet.sigmoid(mu_sig_pre) + \ + mu_lin_out + + v_sig_pre = w_v_sig.dimshuffle(*bc_pttrn) * u + \ + b_v_sig.dimshuffle(*bc_pttrn) + v_lin_out = w_v_lin.dimshuffle(*bc_pttrn) * u + \ + b_v_lin.dimshuffle(*bc_pttrn) + + v_u = w_v * T.nnet.sigmoid(v_sig_pre) + v_lin_out + + output = (z - mu_u) * v_u + mu_u + + return output + + +def _combinator(z, u, combinator_type, combinator_params): + if u.ndim == 2: + bc_pttrn = ('x', 0) + elif u.ndim == 4: + bc_pttrn = ('x', 0, 'x', 'x') + elif u.ndim == 5: + bc_pttrn = ('x', 0, 'x', 'x', 'x') + + if combinator_type == 'milaUDEM': + return _combinator_MILAudem(z, u, combinator_params, bc_pttrn) + elif combinator_type == 'curiousAI': + return _combinator_curiousAI(z, u, combinator_params, bc_pttrn) + + +class CombinatorLayer(MergeLayer): + """ + """ + def __init__(self, incoming_z, incoming_u, combinator_type, **kwargs): + super(CombinatorLayer, self).__init__( + [incoming_z, incoming_u], **kwargs) + self.combinator_type = combinator_type + z_shp, u_shp = self.input_shapes + + if len(z_shp) != 2: + raise ValueError("The input network must have a 2-dimensional " + "output shape: (batch_size, num_hidden)") + + if len(u_shp) != 2: + raise ValueError("The input network must have a 2-dimensional " + "output shape: (batch_size, num_hidden)") + + self.combinator_params = _get_combinator_params(combinator_type, u_shp[1:], + self.name) + + def get_output_shape_for(self, input_shapes): + return input_shapes[0] + + def get_output_for(self, inputs, **kwargs): + z, u = inputs + assert z.ndim == u.ndim + return _combinator(z, u, self.combinator_type, self.combinator_params) + + +def build_encoder(net, num_hidden, activation, name, + p_drop_hidden=0., shared_net=None): + for i, num_nodes in enumerate(num_hidden): + dense_lname = 'enc_dense_{}'.format(i) + nbatchn_lname = 'enc_batchn_{}_norm'.format(i) + noise_lname = 'enc_noise_{}'.format(i) + lbatchn_lname = 'enc_batchn_{}_learn'.format(i) + + if shared_net is None: + # dense pars + W = lasagne.init.GlorotUniform() + # batchnorm pars + beta = lasagne.init.Constant(0) + gamma = None if activation == rectify else lasagne.init.Constant(1) + else: + # dense pars + W = shared_net[dense_lname].get_params()[0] + # batchnorm pars + if activation==rectify: + beta = shared_net[lbatchn_lname].get_params()[0] + gamma = None + else: + beta, gamma = shared_net[lbatchn_lname].get_params() + + net[dense_lname] = DenseLayer(net.values()[-1], num_units=num_nodes, W=W, + nonlinearity=linear, + name='{}_{}'.format(name, dense_lname)) + + shp = net[dense_lname].output_shape[1] + zero_const = T.zeros(shp, np.float32) + one_const = T.ones(shp, np.float32) + + # 1. batchnormalize without learning -> goes to combinator layer + l_name = '{}_{}'.format(name, nbatchn_lname) + net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1, + beta=None, gamma=None, name=l_name) + + if shared_net is None: + # add noise in dirty encoder + net[noise_lname] = GaussianNoiseLayer(net.values()[-1], + sigma=p_drop_hidden, + name='{}_{}_'.format(name, + noise_lname)) + + # 2. batchnormalize learning, alpha one in order to depenend only on the batch mean + l_name = '{}_{}'.format(name, lbatchn_lname) + net[lbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=1., + beta=beta, gamma=gamma, name=l_name, + mean=zero_const, inv_std=one_const) + + if i < len(num_hidden) - 1: + act_name = 'enc_activation_{}'.format(i) + net[act_name] = NonlinearityLayer(net.values()[-1], + nonlinearity=activation, + name='{}_{}'.format(name, act_name)) + + net['enc_softmax'] = NonlinearityLayer(net.values()[-1], nonlinearity=softmax, + name='{}_enc_softmax'.format(name)) + + return net['enc_softmax'], net + + +def build_decoder(dirty_net, clean_net, num_nodes, sigma, + combinator_type='milaUDEM'): + L = len(num_nodes) - 1 + + # dirty_enc_dense_1 ... z_L, dirty_enc_softmax .. u_0 + z_L = dirty_net['enc_noise_{}'.format(L)] + dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None, + gamma=None, name='dec_batchn_softmax') + + comb_name = 'dec_combinator_0' + dirty_net[comb_name] = CombinatorLayer(*[z_L, dirty_net['u_0']], + combinator_type=combinator_type, + name=comb_name) + enc_bname = 'enc_batchn_{}_norm'.format(L) + mu, inv_std = clean_net[enc_bname].get_params() + bname = 'dec_batchn_0' + dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1., + beta=None, gamma=None, name=bname, + mean=mu, inv_std=inv_std) + + for i in range(L): + d_name = 'dec_dense_{}'.format(L-i) + dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], + num_units=num_nodes[i], + nonlinearity=linear, name=d_name) + # dirty_enc_dense_L-l ... z_l, dec_dense_l ... u_l + z_l = dirty_net['enc_noise_{}'.format(i)] + dirty_net['u_l'] = BatchNormLayer(dirty_net.values()[-1], beta=None, + gamma=None, + name='dec_batchn_dense_{}'.format(L-i)) + + comb_name = 'dec_combinator_{}'.format(i+1) + dirty_net[comb_name] = CombinatorLayer(*[z_l, dirty_net['u_l']], + combinator_type=combinator_type, + name=comb_name) + enc_bname = 'enc_batchn_{}_norm'.format(L-i-1) + mu, inv_std = clean_net[enc_bname].get_params() + bname = 'dec_batchn_{}'.format(L-i) + dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1., + beta=None, gamma=None, name=bname, + mean=mu, inv_std=inv_std) + + d_name = 'dec_dense_{}'.format(L+1) + dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], nonlinearity=linear, + num_units=num_nodes[i+1], name=d_name) + # input ... z_0, dec_dense_L ... u_L + z_0 = dirty_net['inp_corr'] + dirty_net['u_L'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None) + + comb_name = 'dec_combinator_{}'.format(L+1) + dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_L']], name=comb_name, + combinator_type=combinator_type) + + return dirty_net + + +def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden, + activation=rectify, batch_size=None, inp_size=None, + combinator_type='MILAudem'): + net = OrderedDict() + net['input'] = InputLayer((batch_size, inp_size), name='input') + net['inp_corr'] = GaussianNoiseLayer(net['input'], sigma=p_drop_input, + name='input_corr') + + # dirty encoder + train_output_l, dirty_encoder = build_encoder(net, num_encoder, activation, + 'dirty', p_drop_hidden) + + # clean encoder + clean_net = OrderedDict(net.items()[:1]) + eval_output_l, clean_net = build_encoder(clean_net, num_encoder, activation, + 'clean', 0., shared_net=dirty_encoder) + + # decoders + dirty_net = build_decoder(dirty_encoder, clean_net, num_decoder, + p_drop_hidden, combinator_type) + + return [train_output_l, eval_output_l], dirty_net, clean_net + + +def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, lambdas): + class_cost = T.nnet.categorical_crossentropy(T.clip(output_train, 1e-15, 1), + y).mean() + L = len(num_decoder) + + z_clean_l = clean_net['input'] + z_dirty_l = dirty_net['dec_combinator_{}'.format(L)] + + z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False) + z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False) + + rec_costs = [lambdas[L] * T.sqr(z_clean - z_dirty).mean()] + + for l in range(L): + z_clean_l = clean_net['enc_batchn_{}_norm'.format(l)] + z_dirty_l = dirty_net['dec_batchn_{}'.format(L-l-1)] + + z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False) + z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False) + + rec_costs.append(lambdas[l] * T.sqr(z_clean - z_dirty).mean()) + + # cost = class_cost + T.sum(rec_costs) + # return cost + return class_cost, rec_costs \ No newline at end of file diff --git a/examples/ladder_nets/train_ladder_nets.py b/examples/ladder_nets/train_ladder_nets.py new file mode 100644 index 0000000..7e958f5 --- /dev/null +++ b/examples/ladder_nets/train_ladder_nets.py @@ -0,0 +1,203 @@ +from fuel.datasets import MNIST +from fuel.streams import DataStream +from fuel.schemes import ShuffledScheme +from fuel.transformers import Flatten, OneHotEncoding + +import lasagne + +from ladder_nets import build_cost, build_model + +import numpy as np + +import theano +import theano.tensor as T +import theano.misc.pkl_utils + +import argparse +import cPickle + + +arg_parser = argparse.ArgumentParser() +arg_parser.add_argument('-lr', '--learning_rate', type=float, default=0.1) +arg_parser.add_argument('-dlr', '--decrease_lr', type=float, default=1.) +arg_parser.add_argument('-bs', '--batch_size', type=int, default=100) +arg_parser.add_argument('-ep', '--max_epochs', type=int, default=15) +arg_parser.add_argument('-ctype', '--combinator', type=str, default='milaUDEM') +arg_parser.add_argument('-l', '--lambdas', type=str, default='0.1,0.1,0.1') +arg_parser.add_argument('-hdrop', '--hid_dropout', type=float, default=0.3) +args = arg_parser.parse_args() + +NUM_EPOCHS = args.max_epochs +BATCH_SIZE = args.batch_size +LEARNING_RATE = args.learning_rate + +mnist = MNIST(which_sets=('train',), # sources='features', + subset=slice(0, 50000), load_in_memory=True) +mnist_val = MNIST(which_sets=('train',), # sources='features', + subset=slice(50000, 60000), load_in_memory=True) +mnist_test = MNIST(which_sets=('test',), # sources='features', + load_in_memory=True) + +data_stream = DataStream(mnist, + iteration_scheme=ShuffledScheme(mnist.num_examples, + batch_size=BATCH_SIZE)) +data_stream_val = DataStream(mnist_val, + iteration_scheme=ShuffledScheme( + mnist_val.num_examples, batch_size=BATCH_SIZE)) +data_stream_test = DataStream(mnist_test, + iteration_scheme=ShuffledScheme( + mnist_test.num_examples, batch_size=BATCH_SIZE)) + +data_stream = Flatten(data_stream, which_sources=('features',)) +data_stream_val = Flatten(data_stream_val, which_sources=('features',)) +data_stream_test = Flatten(data_stream_test, which_sources=('features',)) + +num_classes = 10 + +data_stream = OneHotEncoding(data_stream=data_stream, + which_sources=('targets',), + num_classes=num_classes) + +data_stream_val = OneHotEncoding(data_stream=data_stream_val, + which_sources=('targets',), + num_classes=num_classes) + +data_stream_test = OneHotEncoding(data_stream=data_stream_test, + which_sources=('targets',), + num_classes=num_classes) + +# build network +num_encoder = [500, 10] +num_decoder = [500, 784] + +[train_output_l, eval_output_l], dirty_net, clean_net = build_model( + num_encoder, num_decoder, args.hid_dropout, args.hid_dropout, + batch_size=None, inp_size=784, combinator_type=args.combinator) + +# print map(lambda x: [x.name, x.output_shape], dirty_net.values()) +# print map(lambda x: [x.name, x.output_shape], clean_net.values()) + +# set up input/output variables +X = T.fmatrix('X') +y = T.imatrix('y') + +# training output +output_train = lasagne.layers.get_output(train_output_l, X, deterministic=False) + +# evaluation output. Also includes output of transform for plotting +output_eval = lasagne.layers.get_output(eval_output_l, X, deterministic=True) + +# set up (possibly amortizable) lr, cost and updates +sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE)) + +cost, rec_costs = build_cost(X, y, num_decoder, dirty_net, clean_net, + output_train, [float(x) for x in args.lambdas.split(',')]) + +net_params = lasagne.layers.get_all_params(train_output_l, trainable=True) +updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr) + +# get training and evaluation functions +train = theano.function([X, y], [cost + T.sum(rec_costs)] + rec_costs, + updates=updates) +eval = theano.function([X], [output_eval]) + +bl_name = 'enc_batchn_{}_learn' +means = [dirty_net[bl_name.format(i)].mean.ravel().mean() for i + in range(len(num_encoder))] +means = T.stack(means, axis=1) +stds = [dirty_net[bl_name.format(i)].inv_std.ravel().mean() for i + in range(len(num_encoder))] +stds = T.stack(stds, axis=1) +get_stats = theano.function([], [means, stds]) + # , on_unused_input='ignore') + +network_dump = {'train_output_layer': train_output_l, + 'eval_output_layer': eval_output_l, + 'dirty_net': dirty_net, + 'clean_net': clean_net, + 'x': X, + 'y': y, + 'output_eval': output_eval + } + + +def save_dump(filename,param_values): + f = file(filename, 'wb') + cPickle.dump(param_values,f,protocol=cPickle.HIGHEST_PROTOCOL) + f.close() + + +def train_epoch(stream): + costs = [] + rec_costs = [] + stats = [] + for batch in stream.get_epoch_iterator(): + train_out = train(*batch) + stats.append(np.vstack(get_stats())) + cur_cost = train_out[0] + rec_cost = train_out[1:] + + costs.append(cur_cost) + rec_costs.append(rec_cost) + + print '\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c + in enumerate(np.mean(rec_costs, axis=0))]) + stats = np.stack(stats, axis=0).mean(axis=0) + means, inv_stds = stat + for i in range(len(num_encoder)): + print '{}: mean {}, inv_std {}'.format(bl_name.format(i), + np.allclose(means[i], 0.), + np.allclose(inv_stds[i], 1.)) + return np.mean(costs) + + +def eval_epoch(stream, acc_only=True): + preds = [] + targets = [] + for batch in stream.get_epoch_iterator(): + preds.extend(eval(batch[0])) + targets.extend(batch[1]) + + preds = np.vstack(preds) + targets = np.vstack(targets) + + acc = np.mean(preds.argmax(1) == targets.argmax(1)) # accuracy + if not acc_only: + nloglik = (np.log(preds) * targets).sum(1).mean() + # confm = conf_mat(preds, targets)[0].astype(int) + # CONF_MATS['iter_{}'.format(n)] = confm + # save_dump('conf_mats_{}.pkl'.format(experiment_name), CONF_MATS) + # print confm + # return acc, nloglik, confm + else: + return acc + +train_costs, train_accs, valid_accs = [], [], [] +print 'Start training...' +try: + for n in range(NUM_EPOCHS): + train_costs.append(train_epoch(data_stream)) + train_accs.append(eval_epoch(data_stream)) + valid_accs.append(eval_epoch(data_stream_val)) + if (n+1) % 10 == 0: + new_lr = sh_lr.get_value() * args.decrease_lr + print "New LR:", new_lr + sh_lr.set_value(lasagne.utils.floatX(new_lr)) + save_dump('accs_{}_ladder_net_mnist.pkl'.format(n), + zip(train_accs, valid_accs)) + # theano.misc.pkl_utils.dump(network_dump, + # 'iter_{}_ladder_nets_mnist.zip'.format(n)) + print "Epoch {}: Train cost {}, train acc {}, val acc {}".format( + n, train_costs[-1], train_accs[-1], valid_accs[-1]) + # print 'TIMES: \ttrain {:10.2f}s, \tval {:10.2f}s'.format(t1-t0, + # t2-t1) + + # TODO: needs an early stopping +except KeyboardInterrupt: + pass + +# save_dump('final_iter_{}_{}'.format(n, experiment_name), +# lasagne.layers.get_all_param_values(output_layer)) + +theano.misc.pkl_utils.dump(network_dump, + 'final_iter_{}_ladder_net_mnist.pkl'.format(n)) \ No newline at end of file diff --git a/examples/ladder_nets/utils.py b/examples/ladder_nets/utils.py new file mode 100644 index 0000000..63121e2 --- /dev/null +++ b/examples/ladder_nets/utils.py @@ -0,0 +1,118 @@ +import numpy as np +from PIL import Image + +import theano +th_rng = theano.tensor.shared_randomstreams.RandomStreams(9999) + +np.random.seed(9999) + + +def scale_to_unit_interval(ndar, eps=1e-8): + """ Scales all values in the ndarray ndar to be between 0 and 1 """ + ndar = ndar.copy() + ndar -= ndar.min() + ndar *= 1.0 / (ndar.max() + eps) + return ndar + + +def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0), + scale_rows_to_unit_interval=True, + output_pixel_vals=True): + """ + Transform an array with one flattened image per row, into an array in + which images are reshaped and layed out like tiles on a floor. + + This function is useful for visualizing datasets whose rows are images, + and also columns of matrices for transforming those rows + (such as the first layer of a neural net). + + Parameters + ---------- + X: a 2-D ndarray or a tuple of 4 channels + A 2-D array in which every row is a flattened image, the elements of + which can be 2-D ndarrays or None. + img_shape: tuple (height, width) + The original shape of each image. + tile_shape: tuple (nrows, ncols) + The number of images to tile (rows, cols). + tile_spacing: tuple, default (0, 0) + Spacing of the tiles. + scale_rows_to_unit_interval: bool, default True + If True, if the values need to be scaled before being plotted to [0,1]. + output_pixel_vals: bool, default True + If True, output should be pixel values (i.e. int8 values), otherwise + floats. + + Returns + ------- + array suitable for viewing as an image. (See:`Image.fromarray`.) + """ + assert len(img_shape) == 2 + assert len(tile_shape) == 2 + assert len(tile_spacing) == 2 + + out_shape = [(ishp + tsp) * tshp - tsp + for ishp, tshp, tsp in + zip(img_shape, tile_shape, tile_spacing)] + + # if we are dealing with only one channel + height, width = img_shape + height_s, width_s = tile_spacing + + # generate a matrix to store the output + dt = X.dtype + if output_pixel_vals: + dt = 'uint8' + out_array = np.zeros(out_shape, dtype=dt) + + for tile_row in xrange(tile_shape[0]): + for tile_col in xrange(tile_shape[1]): + if tile_row * tile_shape[1] + tile_col < X.shape[0]: + this_x = X[tile_row * tile_shape[1] + tile_col] + if scale_rows_to_unit_interval: + # if we should scale values to be between 0 and 1 + # do this by calling the `scale_to_unit_interval` + # function + this_img = scale_to_unit_interval( + this_x.reshape(img_shape)) + else: + this_img = this_x.reshape(img_shape) + + # add the slice to the corresponding position in the + # output array + c = 1 + if output_pixel_vals: + c = 255 + + tile_h = tile_row * (height + height_s) + tile_w = tile_col * (width + width_s) + + out_array[tile_h:tile_h + height, + tile_w: tile_w + width] = this_img * c + return out_array + + +def binarize(X, err=1e-15): + X_mean = X.min(axis=1, keepdims=True) + X_ptp = X.ptp(axis=1) + err + X_norm = (X - X_mean) / X_ptp + return th_rng.binomial(pvals=X_norm, dtype=X.dtype) + + +def half_linear(x): + return 0.5 * x + + +def z_vals(dist, shape): + if dist == 'Gaussian': + return np.random.randn(*shape).astype(np.float32) + elif dist == 'Laplacian': + return np.random.laplace(loc=0.0, scale=np.sqrt(0.5), + size=shape).astype(np.float32) + + +def visualize (it, images, shape=[30,30], name='samples_', p=0): + image_data = tile_raster_images(images, img_shape=[28,28], tile_shape=shape, + tile_spacing=(2,2)) + im_new = Image.fromarray(np.uint8(image_data)) + im_new.save(name+str(it)+'.png') \ No newline at end of file From 5e941217775d5a75a40144fe0b57a6ec7c0db439 Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Sat, 18 Jun 2016 20:50:12 +0200 Subject: [PATCH 02/10] trained model --- examples/ladder_nets/LadderNets.ipynb | 635 ++++------------------ examples/ladder_nets/train_ladder_nets.py | 2 +- 2 files changed, 95 insertions(+), 542 deletions(-) diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb index 14aa391..e63d907 100644 --- a/examples/ladder_nets/LadderNets.ipynb +++ b/examples/ladder_nets/LadderNets.ipynb @@ -1,567 +1,120 @@ { "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": false, - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/home/adrian/code/sideprojects\n" - ] - } - ], - "source": [ - "cd /home/adrian/code/sideprojects/" - ] - }, - { - "cell_type": "code", - "execution_count": 88, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "import sys\n", - "sys.path.append('/home/adrian/code/sideprojects/')" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using gpu device 2: Tesla K80 (CNMeM is disabled, cuDNN 4007)\n" - ] - } - ], - "source": [ - "# import ladder_nets\n", - "# reload(ladder_nets)\n", - "from ladder_nets import build_model, build_cost" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "num_encoder = [500, 10]\n", - "num_decoder = [500, 784]\n", - "\n", - "[train_output_l, eval_output_l], dirty_net, clean_net = build_model(num_encoder, num_decoder, 0.3, 0.3, \n", - " batch_size=50, inp_size=784,\n", - " combinator_type='milaUDEM')" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[['input', (50, 784)],\n", - " ['input_corr', (50, 784)],\n", - " ['dirty_enc_dense_0', (50, 500)],\n", - " ['dirty_enc_batchn_0_norm', (50, 500)],\n", - " ['dirty_enc_noise_0_', (50, 500)],\n", - " ['dirty_enc_batchn_0_learn', (50, 500)],\n", - " ['dirty_enc_activation_0', (50, 500)],\n", - " ['dirty_enc_dense_1', (50, 10)],\n", - " ['dirty_enc_batchn_1_norm', (50, 10)],\n", - " ['dirty_enc_noise_1_', (50, 10)],\n", - " ['dirty_enc_batchn_1_learn', (50, 10)],\n", - " ['dirty_enc_softmax', (50, 10)],\n", - " ['dec_batchn_softmax', (50, 10)],\n", - " ['dec_combinator_0', (50, 10)],\n", - " ['dec_batchn_0', (50, 10)],\n", - " ['dec_dense_1', (50, 500)],\n", - " ['dec_batchn_dense_1', (50, 500)],\n", - " ['dec_combinator_1', (50, 500)],\n", - " ['dec_batchn_1', (50, 500)],\n", - " ['dec_dense_2', (50, 784)],\n", - " ['dec_combinator_2', (50, 784)]]" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "map(lambda x: [x.name, x.output_shape], dirty_net.values())" - ] - }, { "cell_type": "code", "execution_count": 5, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "[['input', (50, 784)],\n", - " ['clean_enc_dense_0', (50, 500)],\n", - " ['clean_enc_batchn_0_norm', (50, 500)],\n", - " ['clean_enc_batchn_0_learn', (50, 500)],\n", - " ['clean_enc_activation_0', (50, 500)],\n", - " ['clean_enc_dense_1', (50, 10)],\n", - " ['clean_enc_batchn_1_norm', (50, 10)],\n", - " ['clean_enc_batchn_1_learn', (50, 10)],\n", - " ['clean_enc_softmax', (50, 10)]]" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "map(lambda x: [x.name, x.output_shape], clean_net.values())" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "import theano.tensor as T\n", - "import lasagne\n", - "import theano" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# set up input/output variables\n", - "X = T.fmatrix('X')\n", - "y = T.imatrix('y')\n", - "\n", - "# training output\n", - "output_train = lasagne.layers.get_output(train_output_l, X, deterministic=False)\n", - "\n", - "# evaluation output. Also includes output of transform for plotting\n", - "output_eval = lasagne.layers.get_output(eval_output_l, X, deterministic=True)\n", - "\n", - "# set up (possibly amortizable) lr, cost and updates\n", - "sh_lr = theano.shared(lasagne.utils.floatX(0.0005))\n", - "\n", - "cost, rec_costs = build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, [0.1]*3)\n", - "\n", - "net_params = lasagne.layers.get_all_params(train_output_l, trainable=True)\n", - "updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr)\n", - "\n", - "# # get training and evaluation functions\n", - "train = theano.function([X, y], [cost + T.sum(rec_costs)] + rec_costs, updates=updates)\n", - "eval = theano.function([X], [output_eval])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "network_dump = {'output_layer': output_layer,\n", - " 'net': net,\n", - " 'x': X,\n", - " 'y': y,\n", - " 'output_eval': output_eval\n", - " }" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from fuel.datasets import MNIST\n", - "from fuel.streams import DataStream\n", - "from fuel.schemes import ShuffledScheme\n", - "from fuel.transformers import Flatten, OneHotEncoding, Duplicate, Mapping, \\\n", - " FilterSources, Merge\n", - "\n", - "mnist = MNIST(which_sets=('train',), # sources='features',\n", - " subset=slice(0, 50000), load_in_memory=True)\n", - "mnist_val = MNIST(which_sets=('train',), # sources='features',\n", - " subset=slice(50000, 60000), load_in_memory=True)\n", - "mnist_test = MNIST(which_sets=('test',), # sources='features',\n", - " load_in_memory=True)\n", - "\n", - "batch_size = 100\n", - "\n", - "data_stream = DataStream(mnist,\n", - " iteration_scheme=ShuffledScheme(mnist.num_examples,\n", - " batch_size=batch_size))\n", - "\n", - "data_stream_val = DataStream(mnist_val,\n", - " iteration_scheme=ShuffledScheme(mnist_val.num_examples,\n", - " batch_size=batch_size))\n", - "\n", - "data_stream_test = DataStream(mnist_test,\n", - " iteration_scheme=ShuffledScheme(mnist_test.num_examples,\n", - " batch_size=batch_size))\n", - "\n", - "data_stream = Flatten(data_stream, which_sources=('features',))\n", - "data_stream_val = Flatten(data_stream_val, which_sources=('features',))\n", - "data_stream_test = Flatten(data_stream_test, which_sources=('features',))\n", - "\n", - "num_classes = 10\n", - "\n", - "data_stream = OneHotEncoding(data_stream=data_stream,\n", - " which_sources=('targets',),\n", - " num_classes=num_classes)\n", - "\n", - "data_stream_val = OneHotEncoding(data_stream=data_stream_val,\n", - " which_sources=('targets',),\n", - " num_classes=num_classes)\n", - "\n", - "data_stream_test = OneHotEncoding(data_stream=data_stream_test,\n", - " which_sources=('targets',),\n", - " num_classes=num_classes)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "batch = next(data_stream.get_epoch_iterator())" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "%matplotlib inline" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": { - "collapsed": false - }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[array(0.7222740358114242), array(0.008365500718355179, dtype=float32), array(0.013544824905693531, dtype=float32), array(0.05210600048303604, dtype=float32)]\n" + "Start training...\n", + "Layer #0 rec cost: 0.0523031353951\n", + "Layer #1 rec cost: 0.00836394913495\n", + "Layer #2 rec cost: 0.117911726236\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 0: Train cost 0.85103150726, train acc 0.93518, val acc 0.9366\n", + "Layer #0 rec cost: 0.0523090846837\n", + "Layer #1 rec cost: 0.00838615372777\n", + "Layer #2 rec cost: 0.115602619946\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 1: Train cost 0.742657381409, train acc 0.94888, val acc 0.9453\n", + "Layer #0 rec cost: 0.0523090697825\n", + "Layer #1 rec cost: 0.00840173009783\n", + "Layer #2 rec cost: 0.114492356777\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 2: Train cost 0.719053864464, train acc 0.95816, val acc 0.9541\n", + "Layer #0 rec cost: 0.0522957369685\n", + "Layer #1 rec cost: 0.00840636808425\n", + "Layer #2 rec cost: 0.114234052598\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 3: Train cost 0.70547588383, train acc 0.96916, val acc 0.9609\n", + "Layer #0 rec cost: 0.0523124374449\n", + "Layer #1 rec cost: 0.00841217115521\n", + "Layer #2 rec cost: 0.113789305091\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 4: Train cost 0.693144676127, train acc 0.97076, val acc 0.9607\n", + "Layer #0 rec cost: 0.0523018464446\n", + "Layer #1 rec cost: 0.0084175830707\n", + "Layer #2 rec cost: 0.11399256438\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 5: Train cost 0.687760960555, train acc 0.97554, val acc 0.9645\n", + "Layer #0 rec cost: 0.0523033551872\n", + "Layer #1 rec cost: 0.00842206645757\n", + "Layer #2 rec cost: 0.113653443754\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 6: Train cost 0.681915052639, train acc 0.97504, val acc 0.9613\n", + "Layer #0 rec cost: 0.052292086184\n", + "Layer #1 rec cost: 0.00842817965895\n", + "Layer #2 rec cost: 0.113471724093\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 7: Train cost 0.676880990575, train acc 0.98124, val acc 0.9688\n", + "Layer #0 rec cost: 0.0523066557944\n", + "Layer #1 rec cost: 0.0084327859804\n", + "Layer #2 rec cost: 0.113498724997\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 8: Train cost 0.673917843942, train acc 0.98184, val acc 0.9668\n", + "Layer #0 rec cost: 0.0522929169238\n", + "Layer #1 rec cost: 0.00842999480665\n", + "Layer #2 rec cost: 0.113631747663\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "New LR: 0.10000000149\n", + "Epoch 9: Train cost 0.671246978571, train acc 0.98178, val acc 0.9683\n", + "Layer #0 rec cost: 0.0523014776409\n", + "Layer #1 rec cost: 0.00842962320894\n", + "Layer #2 rec cost: 0.112782202661\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 10: Train cost 0.668746202706, train acc 0.9863, val acc 0.9723\n", + "Layer #0 rec cost: 0.0523017942905\n", + "Layer #1 rec cost: 0.00843219831586\n", + "Layer #2 rec cost: 0.112931199372\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 11: Train cost 0.667900841003, train acc 0.9857, val acc 0.9729\n", + "Layer #0 rec cost: 0.052298579365\n", + "Layer #1 rec cost: 0.00843434035778\n", + "Layer #2 rec cost: 0.112664811313\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 12: Train cost 0.662375827058, train acc 0.98466, val acc 0.9703\n", + "Layer #0 rec cost: 0.0522987246513\n", + "Layer #1 rec cost: 0.0084279216826\n", + "Layer #2 rec cost: 0.112784132361\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 13: Train cost 0.66105420699, train acc 0.98562, val acc 0.9704\n", + "Layer #0 rec cost: 0.0522985383868\n", + "Layer #1 rec cost: 0.00843381509185\n", + "Layer #2 rec cost: 0.112974517047\n", + "enc_batchn_0_learn: mean True, inv_std True\n", + "enc_batchn_1_learn: mean True, inv_std True\n", + "Epoch 14: Train cost 0.661718979622, train acc 0.9877, val acc 0.9725\n" ] - }, - { - "data": { - "text/plain": [ - "(array([ 114., 74., 19., 29., 11., 16., 12., 42., 67., 116.]),\n", - " array([ 0.99610198, 0.99688592, 0.99766986, 0.99845381, 0.99923775,\n", - " 1.0000217 , 1.00080564, 1.00158958, 1.00237353, 1.00315747,\n", - " 1.00394142]),\n", - " )" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAENCAYAAAACHGKEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAE31JREFUeJzt3X/wZXV93/HnCxeqiey3q5W91VU2JIjEiSGUCJ3GuiOo\naDpC0w4TmjoiqekIUUZb465xhm0mjaCttk2GSa2Uro7EQW0KtibgzvJ1xjaIERYQcLutupCd3a81\n2F2dtsDiu3/cA16+7u736/2x97Cf52Pmzt577jn3vPZ+v9/7uuecez43VYUkqV0nzDuAJGm+LAJJ\napxFIEmNswgkqXEWgSQ1ziKQpMatWARJrk+ylOTekWkfTPJgkp1JPptk7ch9W5Ls7u5/3ayCS5Km\nYzVbBDcAr1827Tbg5VV1FrAb2AKQ5GeBS4AzgTcA1yXJ9OJKkqZtxSKoqi8B3102bXtV/aC7eQew\nobv+JuBTVXWoqr7FsCReOb24kqRpm8YxgsuBz3fXXwQ8PHLf3m6aJKmnJiqCJL8NPF5VfzSlPJKk\nY2zNuAsmuQx4I/Cakcl7gReP3N7QTTvc8g5yJEljqKqpHntd7RZBusvwRnIh8B7gTVX16Mh8twC/\nmuSkJD8F/Axw55EetKp6f7n66qvnnsGc5nwm53wmZHwm5ZyFFbcIktwIbAKen+Qh4GrgfcBJwBe6\nDwXdUVVXVNUDSW4CHgAeB66oWSWXJE3FikVQVf/gMJNvOMr8HwA+MEkoSdKx45nFK9i0adO8I6yK\nOafLnNPzTMgIz5ycs5B57blJ4l4jSfoxJaHmdLBYknScsggkaQWDwUaSzPUyGGyc2f/PXUOStILh\npyPn/XoVqspdQ5Kk6Rv7zOJpe/vb/yl33nn3XDOce+7ZXHfdh+aaQZKOtd7sGlpYGHDw4O8Dz5tL\nHvhL1q69igMH9s1p/ZL66njfNdSbLYKhVwGDOa3bApDUJo8RSFLjLAJJapxFIEmNswgkqXEWgSQ1\nziKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMs\nAklqnEUgSY1bsQiSXJ9kKcm9I9PWJbktya4ktyZZGLlvS5LdSR5M8rpZBZckTcdqtghuAF6/bNpm\nYHtVnQHsALYAJPlZ4BLgTOANwHVJMr24kqRpW7EIqupLwHeXTb4I2NZd3wZc3F1/E/CpqjpUVd8C\ndgOvnE5USdIsjHuM4JSqWgKoqv3AKd30FwEPj8y3t5smSeqpaR0srik9jiTpGFsz5nJLSdZX1VKS\nAfDtbvpe4MUj823oph3W1q1bn7p+6NBjY0aRpOPZYnd5+mvmNKVq5TfzSTYCn6uqn+tuXws8UlXX\nJnkvsK6qNncHiz8JnMtwl9AXgNPrMCtJ8rTJCwsDDh7cCQwm/k+NZx9r157NgQP75rR+SX01/MzL\nvHd8hKoiCVU11Q/hrLhFkORGYBPw/CQPAVcD1wCfTnI5sIfhJ4WoqgeS3AQ8ADwOXHG4EpAk9ceq\ntghmsmK3CCQ9QxzvWwSeWSxJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2z\nCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItA\nkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1LiJiiDJliT3J7k3ySeTnJRkXZLbkuxK\ncmuShWmFlSRN39hFkORU4G3AL1TVK4A1wKXAZmB7VZ0B7AC2TCOoJGk2JtkiOAg8BvxkkjXAc4C9\nwEXAtm6ebcDFEyWUJM3U2EVQVd8F/iXwEMMCOFBV24H1VbXUzbMfOGUaQSVJs7Fm3AWTnAa8CzgV\nOAB8OsmvAbVs1uW3n7J169anrh869Ni4USTpOLbYXZ7+mjlNqTri6/TRF0wuAV5bVW/rbr8ZOA94\nDbCpqpaSDIDbq+rMwyxfo+teWBhw8OBOYDBWnsntY+3aszlwYN+c1i+pr5JwlPe0xyoFVUUSqirT\nfORJjhHsAs5L8uwMn6XzgQeAW4DLunneAtw8UUJJ0kyNvWuoqu5J8nHgq8ATwN3AR4GTgZuSXA7s\nAS6ZRlBJ0myMXQQAVfUh4EPLJj8CXDDJ40qSjh3PLJakxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmN\nm+jjo8eb733vYHcG4fysX38q+/d/a64ZJLXFIhhR9X+Y92nkS0vzLSJJ7XHXkCQ1ziKQpMZZBJLU\nOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DjHGpLUa4PBRpaW9sw7xnHNIpDU\na8MSmO9gkHB8DwbpriFJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkho3UREkWUjy6SQPJrk/\nyblJ1iW5LcmuJLcmWZhWWEnS9E26RfCvgc9X1ZnAzwNfBzYD26vqDGAHsGXCdUiSZmjsIkiyFnhV\nVd0AUFWHquoAcBGwrZttG3DxxCklSTMzyRbBTwHfSXJDkruSfDTJTwDrq2oJoKr2A6dMI6gkaTYm\nGWtoDXA2cGVV/XmSjzDcLbR8UJAjDhKydevWp64fOvTYBFEk6Xi12F2e/po5TakabzCnJOuBP6uq\n07rbv8SwCH4a2FRVS0kGwO3dMYTly9fouhcWBhw8uBMYjJVncvuAF9KHwa3G/ZlIx6Mk9OHvsg8Z\nqookVNVUR8Ebe9dQt/vn4SQv7SadD9wP3AJc1k17C3DzJAElSbM16TDU7wQ+meRE4BvAW4FnATcl\nuRzYA1wy4TokSTM0URFU1T3ALx7mrgsmeVxJ0rHjmcWS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSp\ncRaBJDXOIpCkxlkEktQ4i0CSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpn\nEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklq3MRFkOSEJHcluaW7\nvS7JbUl2Jbk1ycLkMSVJszKNLYKrgAdGbm8GtlfVGcAOYMsU1iFpDgaDjSSZ60WzN1ERJNkAvBH4\n2Mjki4Bt3fVtwMWTrEPS/Cwt7QFqzhfN2qRbBB8B3sPTf1rrq2oJoKr2A6dMuA5J0gytGXfBJL8M\nLFXVziSbjjLrESt969atT10/dOixcaNI0nFssbs8/TVzmlI13qZXkt8D/iFwCHgOcDLwx8A5wKaq\nWkoyAG6vqjMPs3yNrnthYcDBgzuBwVh5JrcPeCHz3xQN4/5MpGkb7qOf9++jGZ7MUFUkoaqmevBk\n7F1DVfW+qnpJVZ0G/Cqwo6reDHwOuKyb7S3AzROnlCTNzCzOI7gGeG2SXcD53W1JUk+NfYxgVFV9\nEfhid/0R4IJpPK4kafY8s1iSGmcRSFLjLAJJapxFIEmNswgkqXEWgSQ1ziKQpMZZBJLUOItAkhpn\nEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTGWQSS1DiLQD9iMNhIkrleBoON834apGZM5RvKdHxZ\nWtrDvL+oe2lpqt/NLeko3CKQpMZZBJLUOItAkhpnEUhS4ywCSWqcRSBJjbMIJKlxFoEkNc4ikKTG\nWQSS1LixiyDJhiQ7ktyf5L4k7+ymr0tyW5JdSW5NsjC9uJKkaZtki+AQ8O6qejnwN4Erk7wM2Axs\nr6ozgB3AlsljSpJmZewiqKr9VbWzu/594EFgA3ARsK2bbRtw8aQhJUmzM5VjBEk2AmcBdwDrq2oJ\nhmUBnDKNdUiSZmPiIkjyXOAzwFXdlsHy8YvnO56xJOmoJvo+giRrGJbAJ6rq5m7yUpL1VbWUZAB8\n+0jLb9269anrhw49NkkUSTpOLXaXp79mTlOqxn/DnuTjwHeq6t0j064FHqmqa5O8F1hXVZsPs2yN\nrnthYcDBgzuBwdh5JrMPeCHz34B5NvDonDPA/J+HMMnvpqYjCX34XTDDMENVkYSqmuo3N429RZDk\nbwG/BtyX5G6Gz9L7gGuBm5JcDuwBLplG0HY8Sh9+4SS1Y+wiqKr/CjzrCHdfMO7jSpKOLc8slqTG\nWQSS1DiLQJIaZxFIRzAYbCTJ3C6DwcZ5PwVqxETnEUjHs6WlPczzE1xLS356S8eGWwSS1DiLQJIa\nZxFIUuMsAklqnEUgSY3zU0NSb/2VbtA3abYsAqm3HIBQx4ZFoJ7y3bB0rFgE6infDUvHigeLJalx\nFoEkNc4ikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXOIpCkxlkEktQ4i0CSGmcR\nSFLjZlYESS5M8vUk/z3Je2e1HknSZGZSBElOAP4AeD3wcuDSJC+bxbpmb3HeAVZpcd4BVmlx3gFW\naXHeAVZpcd4BVmFx3gFWaXHeAeZmVlsErwR2V9Weqnoc+BRw0YzWNWOL8w6wSovzDrBKi/MOsEqL\n8w6wSovzDrAKi/MOsEqL8w4wN7MqghcBD4/c/otumiSpZ3rzVZUnnngiJ5/8ZpJnz2X9Vf+P731v\nLquWpLlK1fS/FzbJecDWqrqwu70ZqKq6dmSeeX8hrSQ9I1XVVL9Qe1ZF8CxgF3A+sA+4E7i0qh6c\n+sokSROZya6hqnoiyW8CtzE8DnG9JSBJ/TSTLQJJ0jPH1D41tJoTyJL8myS7k+xMctZKyyb5+0m+\nluSJJGf3NOMHkzzYzf/ZJGt7mvN3ktzTzb89yYY+5hy5/58k+UGS5/UxZ5Krk/xFkru6y4V9zNnd\n947ud/S+JNf0MWeST408l99McldPc/5ikjuT3N39e04PM74iyX/r/t5vTvLcFYNU1cQXhoXyP4BT\ngROBncDLls3zBuC/dNfPBe5YaVngDOB0YAdwdk8zXgCc0F2/BvhAT3M+d2T5dwAf62PO7v4NwJ8C\n3wSe18ecwNXAu6fx9zPjnJsY7qJd093+a33MuWz5fwG8v485gduB140sf3sPM94J/FJ3/TLgd1bK\nMq0tgtWcQHYR8HGAqvoysJBk/dGWrapdVbUbmMYR8lll3F5VP+iWv4Phi1gfc35/ZPmfBL7Tx5yd\njwDvmTDfscg5zU9uzCrn24FrqupQt1yff+5PugT4o57m3AcsdNf/KrC3hxlfWlVf6q5vB/7eSkGm\nVQSrOYHsSPMcq5PPjkXGy4E/6WvOJL+b5CGG7xI+0MecSd4EPFxV902Yb6Y5O7/Zba5/LMkCk5lV\nzpcCfzvJHUlun3RXxgxzApDkVcD+qvqfPc25Gfhw93f0QWBLDzN+rfs7gmGprvjmdJ6jj071c7Az\nsuqMSX4beLyqbpxhniOufjUzVdX7q+olwA3Av5ptpMM6as4kzwHex3C3y6qWmZHVrPM64LSqOgvY\nD3x4tpEOazU51wDrquo84LeAm2Yb6bB+nJ/hpUy+NTCu1eS8HnhH93f0LuDfzzbSj1hNxl8Hrkzy\nFYZb/4+ttMC0Pj66F3jJyO0N/Ogm017gxYeZ56RVLNvrjEkuA94IvKbPOUfcCHy+hzl/GtgI3JMk\n3fSvJnllVX27Rzmpqv81Mv3fAZ8bM99MczJ8p/gfu8xf6Q7AP7+q/rJnOZ88/+hXgIk/GDLDnOdW\n1WsBquozSa7vW8aq2sVwwE+SnA788opJJjkgM3JA41n88MDFSQwPXJy5bJ438sODHufxw4Meq1n2\nduBv9DEjcCFwP/D8Pj+XwM+MLP8O4BN9zLls+W8yfDfbu5zAYGT5dwE39jTnPwb+WXf9pcCePuYc\n+Vu6vad/R08eiP0q8Oru+vnAV3qU8cmf+Qu6f08AtgGXrZhlGk/6yA9xF7Ab2DzyS/gbI/P8QRf+\nHkY+BXS4ZbvpFzPcD/Z/GR6k+ZMeZtwN7AHu6i7X9fS5/AxwL3A38FnglD7mXPb432DCTw3N8Pn8\nePd87gT+E7C+pzlPBD4B3Af8Od2LWN9ydvfdMPoYfcwJnAN8ufs7+jPgF3qY8Z3d9K8Dv7eaHJ5Q\nJkmN86sqJalxFoEkNc4ikKTGWQSS1DiLQFLzVjtQW5KrusH77kty1cj0WQxCeGU32NwTmcLgi0dj\nEUhqSpJXJ7lh2eSPAb9VVT8P/DHDs7CXL/dyhmftngOcBfydJKeNzPLhqjq7u/zpFKJ+ieG5Cnum\n8FhHZRFIatHyz82fXisP1HYm8OWqerSqngC+yPBM6Cf9yPAPSU7IcKj6L3fjUr1t1QGr7qmqhw73\nuNNmEUhq0fIX1/tXMVDb14BXJVmX5CcYnvU7OvzD4QYh/HXgf1fVuQxHDP2NJKdO778xHZ5QJqkJ\nSe5gOBzDycA64KHurvcy3P3y+8DzgFuAd1bVCw7zGG8FrgS+z3BomUer6t1JXgB8p6oqye8yHILk\nHyX5NPBzDEdHAFjL8MzhHQzPFB59AU53+61V9dWRdX6T4RA7j0zhaTgsi0BSU5K8GnhLVV1+hPtP\nZzgW13krPM4/Zzhk+h8um34q8LmqekWSzwD/tqq+MEHebwDnzLII3DUkqXndO3qSnAC8H/jDFeZ7\nCfB3GY7kS5LByGy/wnA3EsCtwBVJ1nTznd4Nt/5jxWPGxwksAkmCS5PsAh4A9lbVfwBI8teT/OeR\n+T6b5GvAzcAVVXWwm/7BJPcm2Qm8muGItDD8NNIDwF1J7mNYMKsa/j/D75p+mOEXztyT5KOT/ReP\nsi53DUlS29wikKTGWQSS1DiLQJIaZxFIUuMsAklqnEUgSY2zCCSpcRaBJDXu/wMNUnqS3U06OQAA\nAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "print train(*batch)\n", - "plt.hist(dirty_net['enc_batchn_0_learn'].gamma.get_value(borrow=True).ravel())" - ] - }, - { - "cell_type": "code", - "execution_count": 69, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "train_mean = theano.function([X, y], [cost, dirty_net['enc_batchn_0_norm'].mean, dirty_net['enc_batchn_0_norm'].inv_std,\n", - " dirty_net['enc_batchn_0_learn'].mean, dirty_net['enc_batchn_0_learn'].inv_std],\n", - " updates=updates)" - ] - }, - { - "cell_type": "code", - "execution_count": 71, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "import numpy as np" - ] - }, - { - "cell_type": "code", - "execution_count": 75, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.485343794748\n" - ] - }, - { - "data": { - "text/plain": [ - "(array([ 0., 0., 0., 0., 0., 500., 0., 0., 0., 0.]),\n", - " array([ 0.5, 0.6, 0.7, 0.8, 0.9, 1. , 1.1, 1.2, 1.3, 1.4, 1.5]),\n", - " )" - ] - }, - "execution_count": 75, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlsAAAE4CAYAAABsTTYYAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3X+wXWV97/H3J8TwQyAN2JxTCSQggsFRfhQCc7F1VzT8\ncG7COB0GaS0/qu1cLGXUUhLsncTbTiHMeKmOpR0EY6BQDFgFLJoQw75erCH8SAiSkMYfCSElB/kh\njuMVE/jeP9ZzwuZk73N29l5rr/3j85rZk7WfvfZzvmfvtZ7zzfM861mKCMzMzMysGJPKDsDMzMys\nnznZMjMzMyuQky0zMzOzAjnZMjMzMyuQky0zMzOzAjnZMjMzMyvQhMmWpFskjUjaUFN2vaRNktZL\n+rqkQ2teWyhpS3p9blGBm5k1Q9JWSU9IWidpbSqbJmmlpM2SVkiaWrO/2zAzy1UzPVtLgbPHlK0E\n3h0RJwFbgIUAkk4ALgBmA+cCN0pSfuGame2z14FKRJwcEXNS2QJgVUQcD6zGbZiZFWjCZCsiHgJe\nHlO2KiJeT0/XADPS9jzgzojYHRFbyRKxOZiZlUfs3dbNB5al7WXA+WnbbZiZ5S6POVuXAfen7SOA\n7TWv7UhlZmZlCeABSY9I+ngqG4qIEYCI2AlMT+Vuw8wsd5PbebOkzwK7IuJfc4rHzCxvZ0bEc5J+\nG1gpaTNZAlbL9y0zs8K0nGxJugQ4D/hATfEO4Mia5zNSWb33u3EzG0AR0dE5UBHxXPr3Z5K+STYs\nOCJpKCJGJA0Dz6fd3YaZWUOttl/NDiMqPbIn0jnAVcC8iHi1Zr97gQslTZF0NHAssLZRpRHRE49F\nixaVHoPjdJz9EGunSTpI0sFp+63AXOBJsrbqkrTbxcA9abvv2rBeOTZ6Jc43juPI+ZH/8dQrn2mv\nxNmOCXu2JN0BVIDDJT0DLAKuAaaQzYMAWBMRl0fERknLgY3ALuDyaDdCM7PWDQHfSL1Qk4HbI2Kl\npEeB5ZIuA7aRXYGI2zAzK8KEyVZEXFSneOk4+18LXNtOUGZmeYiInwIn1Sl/Cfhgg/e4DTOzXHkF\n+SZUKpWyQ2iK48xXr8QJvRWrdVavHBu9Emcv6ZXPtFfibIfK6iGX5N55swEjiejwBPmiuA0bXNn0\nmby/e7U9L8iK1U775Z4tMzMzswI52TIzMzMrkJMtMzMzswI52TIzMzMrkJMtMzMzswI52TIzMzMr\nkJMtMzMzswI52TIzMzMrkJMtMzMzswI52TIzMzMr0ITJlqRbJI1I2lBTNk3SSkmbJa2QNLXmtYWS\ntkjaJGluUYFb7xsenoWkQh7Dw7PK/vXMzMyA5nq2lgJnjylbAKyKiOOB1cBCAEknABcAs4FzgRuV\n3UTKelSRCdHIyDay+4vl/8jqNjMzK9+EyVZEPAS8PKZ4PrAsbS8Dzk/b84A7I2J3RGwFtgBz8gnV\nylBkQmRmZjYIWp2zNT0iRgAiYicwPZUfAWyv2W9HKjMzMzMbSHlNkHc3hZmZmVkdk1t834ikoYgY\nkTQMPJ/KdwBH1uw3I5XVtXjx4j3blUqFSqXSYjhm1o2q1SrVarXsMJA0CXgUeDYi5kmaBnwNmAls\nBS6IiFfSvguBy4DdwJURsbKcqM2sXyhi4k4pSbOA+yLiPen5EuCliFgi6WpgWkQsSBPkbwdOJxs+\nfAB4Z9T5IZLqFVuXya5vKOp7KrZuH1/dRxIR0fGLZiR9Cvhd4NCUbC0BXoyI6xu0YaeR/WdxFW7D\nbIxi2kW3Wd2unfarmaUf7gD+AzhO0jOSLgWuAz4kaTNwVnpORGwElgMbgfuBy90amVmZJM0AzgNu\nrin2RT5m1jETDiNGxEUNXvpgg/2vBa5tJygzsxzdAFwFTK0pG6q9yEdS7UU+P6jZzxf5mFnbWp2z\nZWbW9SR9GBiJiPWSKuPs2lIPvOedmvWvPOecNjVnqwie79AbPGfL8tTpOVuS/h74Y7LJ7gcChwDf\nAE4FKjUX+TwYEbMlLQAiIpak938HWBQRD9ep223YgPKcrcFU6JwtM7NeFRHXRMRREXEMcCGwOiI+\nBtwHXJJ2uxi4J23fC1woaYqko4FjgbUdDtvM+oyHEc1sEF0HLJd0GbCN7DZjRMRGSaMX+ezCF/mY\nWQ48jGjj8jCi5amspR+K4DZscHkYcTB5GNHMzMysSznZMjMzMyuQky0zMzOzAjnZMjMzMyuQky0z\nMzOzAjnZMjMzMyuQky0zMzOzArWVbElaKOkpSRsk3Z5WXZ4maaWkzZJWSJo6cU1mZmZm/anlZEvS\nTOATwMkR8V6y1eg/CiwAVkXE8cBqYGEegZqZmZn1onZ6tn4B/AZ4q6TJZDd53QHMB5alfZYB57cV\noZmZmVkPaznZioiXgc8Dz5AlWa9ExCpgKCJG0j47gel5BGpmZmbWi9oZRjwG+BQwE3g7WQ/XH7H3\nDaN8syczMzMbWJPbeO+pwPcj4iUASd8A/hswImkoIkYkDQPPN6pg8eLFe7YrlQqVSqWNcMys21Sr\nVarVatlhmJmVSq3eZVzSicC/AKcBrwJLgUeAo4CXImKJpKuBaRGxoM77w3c4737F3N1+T+2F1u3j\nq/tIIiJUdhx5cBs2uIppF91mdbt22q+We7Yi4glJtwKPAa8B64CbgEOA5ZIuA7YBF7T6M8zMzMx6\nXcs9W23/YP+vsCe4Z8vy5J4t6wfu2RpM7bRfXkG+DwwPz0JSIQ+zXiZpf0kPS1qXFmD++1TecPHl\ntFjzFkmbJM0tL3oz6xfu2eoDvdz75J6twVJGz5akgyLiV5L2A74PfAaYB7wYEdfXzi2VdAJwO9lc\n1BnAKuCd9Rort2GDyz1bg8k9W2ZmDUTEr9Lm/mRt3ss0Xnx5HnBnROyOiK3AFmBO56I1s37kZMvM\n+pqkSZLWATuBakRspPHiy0cA22veviOVmZm1rJ11tszMul5EvA6cLOlQYIWkCl582cw6yMmWmQ2E\niPiFpPvJFmRutPjyDuDImrfNSGV1eWFms/6V56LMniDfBzxBvn7dPr66T6cnyEt6G7ArIl6RdCCw\nAvgcMJc6iy/XTJA/nWz48AE8Qd7G8AT5wVTKoqZmZj3gd4Blyv46TgJui4jvpjlcey2+HBEbJS0H\nNgK7gMudUZlZu9yz1Qfcs1W/bh9f3ceLmlo/cM/WYPLSD2ZmZmZdysmWmZmZWYGcbJmZmZkVyMmW\nmZmZWYHaSrYkTZV0V7ph61OSTh/vBq9mZmZmg6bdnq0vAPdHxGzgROBpYAGwKiKOB1YDC9v8GWYt\n2B9JhTyGh2eV/cuZmVkPaXnph3Tri3UR8Y4x5U8D769ZmbkaEe+q835fNp0TL/3Q+bp97LbGSz9Y\nP/DSD4OprKUfjgZekLRU0uOSbpJ0EI1v8GpmZmY2cNpZQX4ycArwyYh4VNINZEOITd/g1fcVM+tv\ned5bzMysV7UzjDgE/CAijknP30eWbL0DqNQMIz6Y5nSNfb+74HPiYcTO1+1jtzUeRrR+4GHEwVTK\nMGIaKtwu6bhUdBbwFHAvcEkquxi4p9WfYWZmZtbr2ro3oqQTgZuBtwA/AS4F9gOWA0eSbvAaET+v\n817/rzAn7tnqfN0+dlvjni3rB+7ZGkzttF++EXUfcLLV+bp97LbGyZb1Aydbg8k3ojYzMzPrUk62\nzMzMzArkZMvMzMysQE62zKxvSZohaXW6d+uTkv4ylTe8h6ukhZK2pHu+zi0vejPrF54g3wc8Qb7z\ndfvYbU2nJ8intf6GI2K9pIOBx4D5ZFdOvxgR10u6GpgWEQsknQDcDpwGzABWAe+s11i5DRtcniA/\nmDxB3sysjojYGRHr0/YvgU1kSdR8YFnabRlwftqeB9wZEbsjYiuwBZjT0aDNrO842TKzgSBpFnAS\nsIbG93A9Athe87YdqczMrGVOtsys76UhxLuBK1MPV9P3cDUza1c7N6I2M+t6kiaTJVq3RcTo7cNG\nJA3V3MP1+VS+g+zuF6NmpLK6Fi9evGe7UqlQqVRyjNzMylStVqlWq7nU5QnyHTI8PIuRkW0F/oTe\nnGjem3UfALxaSM1DQzPZuXNrIXV3gzJWkJd0K/BCRHy6pmwJ8FJELGkwQf50suHDB/AEeRvDE+QH\nk2/X0wN8xaDrbrbufj4vSrga8Uzge8CTZF9aANcAa2lwD1dJC4E/BXaRDTuubFD3QLVh9gYnW4PJ\nyVYPcLLluputu5/PC98b0fqBk63BVOrSD5ImSXpc0r3pecPFAs3MzMwGTR5XI14JbKx5vgBYFRHH\nA6uBhTn8DDMzM7Oe1FayJWkGcB5wc01xo8UCzczMzAZOuz1bNwBX8ebB60aLBZqZmZkNnJbX2ZL0\nYWAk3XOsMs6uDWf8eY0as/6W5zo1Zma9quWrESX9PfDHwG7gQOAQ4BvAqUClZrHAByNidp33D9SV\nPL4a0XU3W3c/nxe+GtH6ga9GHEylXI0YEddExFERcQxwIbA6Ij4G3Adckna7GLinQRVmZmZmfa+I\neyNeB3xI0mbgrPTczMzMbCB5UdMO8TCi62627n4+LzyMaP3Aw4iDqdRFTc3MzMysMSdbZmZmZgVy\nsmVmZmZWICdbZmZmZgVysmVmZmZWICdbZmZmZgVysmVmZmZWICdbZmZmZgVysmVmfU3SLZJGJG2o\nKZsmaaWkzZJWSJpa89pCSVskbZI0t5yozayfONkys363FDh7TNkCYFVEHA+sBhYCSDoBuACYDZwL\n3KhsuXAzs5Y52TKzvhYRDwEvjymeDyxL28uA89P2PODOiNgdEVuBLcCcTsRpZv3LyZaZDaLpETEC\nEBE7gemp/Ahge81+O1KZmVnLJrf6RkkzgFuBIeB14MsR8UVJ04CvATOBrcAFEfFKDrGaDYD9KWrU\namhoJjt3bi2k7j7Q0h2AFy9evGe7UqlQqVRyCsfMylatVqlWq7nUpVbvMi5pGBiOiPWSDgYeI+ua\nvxR4MSKul3Q1MC0iFtR5fwzSHc6LuUv8ntpdt+tuqu6yzzlJRETH50BJmgncFxHvTc83AZWIGElt\n2YMRMVvSAiAiYkna7zvAooh4uE6dA9WG2RuKac/LPz9tfO20Xy0PI0bEzohYn7Z/CWwCZtB4LoSZ\nWVmUHqPuBS5J2xcD99SUXyhpiqSjgWOBtZ0K0sz6U8vDiLUkzQJOAtYAQ7VzISRNH+etZmaFknQH\nUAEOl/QMsAi4DrhL0mXANrIrEImIjZKWAxuBXcDl7r4ys3a1nWylIcS7gSsj4peSxjZMDRsqz3cw\n6295znloVURc1OClDzbY/1rg2uIiMrNB0/KcLQBJk4FvAd+OiC+ksrpzIeq8d6D+w+g5W667G+ou\n+5wra85WEQatDbM3eM7WYCplzlbyFWDjaKKVNJoLYWZmZjZw2rka8Uzge8CTZCl+ANeQTSZdDhxJ\nmgsRET+v8/6B+l+he7ZcdzfUXfY5554t6wfu2RpM7bRfbQ0jtqMbG6rh4VmMjGwr8Cf05h9o190/\ndZd9zjnZsn7gZGswOdnKiXufXHe/1132Oedky/qBk63BVOacLTMzMzMbh5MtMzMzswI52TIzMzMr\nkJMtMzMzswLlcrueTnn++ee57bbbyg7DzMzMrGk9lWwtXbqUv/mbu5F+P/e6X3vtB7nXaWZmZtZT\nyRZAxAfYvXtJATX/LeCEy8zMzPLlOVtmZmZmBXKyZTYw9kdSIY/h4Vll/3JmZl2r54YRzaxVr1LU\n6vQjI32xKLyZWSHcs2VmZmZWoMKSLUnnSHpa0n9Kurqon2Nmlje3X2aWp0KSLUmTgC8BZwPvBj4q\n6V1F/KzOqJYdQJOqZQfQpGrZATSpWnYA+6BadgB9o9/ar2q1WnYITemVOHtJr3ymvRJnO4rq2ZoD\nbImIbRGxC7gTmF/Qz+qAatkBNKladgBNqpYdQJOqZQewD6plB9BP+qr96pU/ZL0SZy/plc+0V+Js\nR1HJ1hHA9prnz6YyM7Nu5/bLzHLVU1cjHnbYYUya9HcccMC/5l73b37zCrt25V6tmVlTnn76aebO\nnZtrnQcccAAXXXRRrnWa2b5TRP6Xgks6A1gcEeek5wuAiIglNfsUcw26mXW1iOjqdSKaab9Sudsw\nswHTavtVVLK1H7AZOAt4DlgLfDQiNuX+w8zMcuT2y8zyVsgwYkS8JukvgJVk88JucUNlZr3A7ZeZ\n5a2Qni0zMzMzy3RkBXlJfyjph5Jek3RKTflMSb+S9Hh63Fjz2imSNqRFBf+hzDjTawslbZG0SdLc\nmvKOxzkmrkWSnq35DM+ZKOYydfNikZK2SnpC0jpJa1PZNEkrJW2WtELS1BLiukXSiKQNNWUN4yrz\ne28Qa08do400eyxImirprvQ7PSXp9G6MM+07KX0n93YyxvSzJ4xT0gxJq9Pn+KSkv+xgfBO2VZK+\nmI7f9ZJO6lRsY2IYN05JF6V27QlJD0l6Txlxpliaav8lnSZpl6SPdDK+mp/fzHdfSX8rfijpwQkr\njYjCH8DxwDuB1cApNeUzgQ0N3vMwcFravh84u8Q4ZwPryIZdZwE/4o1ewY7HOSbmRcCn65Q3jLms\nB1ly/6P0vb8FWA+8q8yYxsT3E2DamLIlwF+n7auB60qI633ASbXnSqO4gBPK/N4bxNozx+gEv1tT\nxwLwVeDStD0ZOLQb40yvfwr4F+Debvw8gWHgpLR9MNlcusLbjGbaKuBc4N/T9unAmhI+w2biPAOY\nmrbPKSPOZmOt2e+7wLeAj3RjnMBU4CngiPT8bRPV25GerYjYHBFbgHqz+PcqkzQMHBIRj6SiW4Hz\nCwwRGDfO+cCdEbE7IrYCW4A5ZcVZR73PtW7MHY1qb92+WKTYu7d3PrAsbS+jhO83Ih4CXh5T3Ciu\neZT4vTeIFXrnGB3PhMeCpEOB34uIpQDpd/tF50IEmjxmJc0AzgNu7lBcY00YZ0TsjIj1afuXwCY6\ns+ZZM23VfLI2n4h4GJgqaagDsdWaMM6IWBMRr6Snayhvzbhm2/8rgLuB5zsZXI1m4rwI+HpE7ACI\niBcmqrQbbkQ9K3VjPyjpfansCLKFBEeVvajg2EUOd6SybonzL1I39s01XfGNYi5Tty8WGcADkh6R\n9PFUNhQRI5A1/MD00qJ7s+kN4urG7x165xgdT6PPvNbRwAuSlqZ27SZJB3Y0yubiBLgBuIrsuC9D\ns3ECIGkWWa/pw4VH1lxb1Q3H7762qR8Hvl1oRI1NGKuktwPnR8Q/Uf8/aJ3QzGd6HHBYylsekfSx\niSrN7WpESQ8AtVm9yE7iz0bEfQ3e9l/AURHxsrI5Ut+UdEJeMeUYZ6nGixm4EfhfERGS/g74PNkJ\nZfvuzIh4TtJvAyslbWbvP0TdekVJt8YFPXSMjnOu/U2d3et95pOBU4BPRsSjyuZxLiAbSu2aOCV9\nGBiJiPWSKhT0hy2Hz3O0noPJejuuTD1cto8k/QFwKdlQf7f6B7Ih5VHduibf6Hn+AeCtwA8k/SAi\nfjTeG3IRER9q4T27SEMOEfG4pB+TZYw7gCNrdp2RykqJc5x4Couz1j7E/GVgNGHsSGz7aAdwVM3z\nbohpj4h4Lv37M0nfJOtOHpE0FBEjadi4rK7tsRrF1XXfe0T8rOZpVx+j451raeL/RMfCs8D2iHg0\nPb+bN//x6JY4zwTmSToPOBA4RNKtEfEnXRYnkiaTfY63RcQ9ecY3jmbaqm44fptqUyW9F7gJOCci\n6g3zd0IzsZ4K3ClJwNuAcyXtiohOXsDRTJzPAi9ExK+BX0v6HnAi2VyvusoYRtyTqUp6m6RJafsY\n4FjgJ6lL+RVJc9KH/idAp06yveIE7gUulDRF0tEpzrXdEGdqpEZ9BPjheDF3MrY6HgGOVXYV6hTg\nQrI4SyfpoPS/ZyS9FZgLPEkW3yVpt4vp/HE4Sux9TF6Stmvj6obv/U2x9tgxOp4Jj4U0LLZd0nGp\n6CxgY0eie0MzcV4TEUdFxDFk5+HqvBOtJjR7bn0F2BgRX+hEUEkzbdW9ZG3+6F0Hfj46LNpBE8Yp\n6Sjg68DHIuLHHY6v1oSxRsQx6XE0WYJ9eYcTrabiJDtW3ydpP0kHkV0gMf5afHnP5K/3IJv4uB34\nf2QrMn87lY82vI8DjwLn1bznd8n+2G0BvlBmnOm1hWRZ6yZgbplxjon5VmAD2RUT3ySbYzRuzGU+\nyK6G2Zw+rwVlx1MT19HpM1yXvs8FqfwwYFWKeSXwWyXEdgfZkPurwDNkQwHTGsVV5vfeINaeOkbH\n+d3qHgvA7wDfqtnvRLIGez3wb6Qrwbotzpr93085VyNOGCdZD9xrNefm42S9M52Ib6+2Cvhz4M9q\n9vlSOn6foOYK9g5/juPGSdab/GL67NaRdRR0PM5mP9Oafb9CCVcj7sN3/1dkVyRuAK6YqE4vampm\nZmZWoG64GtHMzMysbznZMjMzMyuQky0zMzOzAjnZMjMzMyuQky0zMzOzAjnZMjMzMyuQky0zMzOz\nAk2YbEm6Jd1WYUNN2fWSNqUby3493el+9LWFkrak1+cWFbiZWTMkbZX0hKR1ktamsmmSVkraLGlF\nzc2x3YaZWe6a6dlaCpw9pmwl8O6IOIlshdWFAOkm0hcAs4FzgRvTbWzMzMryOlCJiJMjYk4qWwCs\niojjgdW4DTOzAk2YbEXEQ6SbRdeUrYqI19PTNWQ3agSYB9wZEbsjYitZIjYHM7PyiL3buvnAsrS9\njOxWXeA2zMwKkMecrcuA+9P2EWT3Fhy1I5WZmZUlgAckPSLp46lsKNJNgyO7ofz0VO42zMxyN7md\nN0v6LLArIv41p3jMzPJ2ZkQ8J+m3gZWSNpMlYLV8k1gzK0zLyZakS4DzgA/UFO8Ajqx5PiOV1Xu/\nGzezARQRHZ0DFRHPpX9/JumbZMOCI5KGImJE0jDwfNrdbZiZNdRq+9XsMKLSI3sinQNcBcyLiFdr\n9rsXuFDSFElHA8cCaxtVGhFd9Vi0aFHpMXRLPENDM/f9aGpgaGhm330+vRBPN8bUaZIOknRw2n4r\nMBd4kqytuiTtdjFwT9ru6TasF46BXo/zjeM4cn7kfzz1ymfaK3G2Y8KeLUl3ABXgcEnPAIuAa4Ap\nZPMgANZExOURsVHScmAjsAu4PNqN0EoxMrKNvUdWFqfHvtbli7msNEPAN1Iv1GTg9ohYKelRYLmk\ny4BtZFcg4jbMzIowYbIVERfVKV46zv7XAte2E5SZWR4i4qfASXXKXwI+2OA9bsPMLFdeQb5GpVIp\nO4Q36bZ4sg7O7tFtn0+3xQPdGZN1Vq8cA70SZy/plc+0V+Jsh8rqIZfk3vkulg0P5/X9qO3xbusP\nkogOT5AvituwwZVv+7inVreTXa6d9ss9W31keHgWknJ5mJmZWT7cs9VH8u6Ncs+W5c09W9YP3LM1\nmNyzZWZmZtalnGyZmZmZFcjJlpmZmVmBnGyZmZmZFcjJlpmZmVmBnGyZmZmZFcjJlpmZmVmBnGyZ\nmZmZFcjJlpmZmVmBJky2JN0iaUTShpqyaZJWStosaYWkqTWvLZS0RdImSXOLCtzMzMysFzTTs7UU\nOHtM2QJgVUQcD6wGFgJIOgG4AJgNnAvcKN9oz8zMzAbYhMlWRDwEvDymeD6wLG0vA85P2/OAOyNi\nd0RsBbYAc/IJ1czMzKz3tDpna3pEjABExE5geio/Athes9+OVGZmZmY2kCbnVE9LtypfvHjxnu1K\npUKlUskpHDPrBtVqlWq1WnYYSJoEPAo8GxHzJE0DvgbMBLYCF0TEK2nfhcBlwG7gyohYWU7UZtYv\nFDFxniRpJnBfRLw3Pd8EVCJiRNIw8GBEzJa0AIiIWJL2+w6wKCIerlNnNPOzrXnZ9Li8PtN86/J3\nbZAdoxHR8Xmckj4F/C5waEq2lgAvRsT1kq4GpkXEgjTv9HbgNGAGsAp4Z73Gym3Y4Mq3rd1Tq9vJ\nLtdO+9XsMKLSY9S9wCVp+2LgnpryCyVNkXQ0cCywtpXAzMzyIGkGcB5wc02x552aWcc0s/TDHcB/\nAMdJekbSpcB1wIckbQbOSs+JiI3AcmAjcD9wuf/rN77h4VlIyuVhZnXdAFzFm7sihjzv1Mw6ZcI5\nWxFxUYOXPthg/2uBa9sJapCMjGwj36E/Mxsl6cPASESsl1QZZ1fPOzWzN8lzzmlTc7aK4PkOmW6e\nZ+U5W5a3Ts/ZkvT3wB+TTXY/EDgE+AZwKp53ai3ynK3B1Ik5W2ZmPSciromIoyLiGOBCYHVEfAy4\nD887NbMOyWvpBzOzXnIdsFzSZcA2sjtfEBEbJY3OO92F552aWQ48jFgyDyPaIClr6YciuA0bXB5G\nHEweRjQzMzPrUk62zMzMzArkZMvMzMysQE62zMzMzArkZMvMzMysQE62zMzMzArkZMvMzMysQE62\nzMzMzArUVrIlaaGkpyRtkHR7usXFNEkrJW2WtELS1LyCNTMzM+s1LSdbkmYCnwBOjoj3kt3656PA\nAmBVRBwPrAYW5hGomZmZWS9qp2frF8BvgLdKmgwcCOwA5gPL0j7LgPPbitDMzMysh7WcbEXEy8Dn\ngWfIkqxXImIVMBQRI2mfncD0PAI1MzMz60WTW32jpGOATwEzgVeAuyT9EXvfnbPhnTUXL168Z7tS\nqVCpVFoNx8y6ULVapVqtlh2GmVmp1OpdxiVdAHwoIj6Rnn8MOAP4AFCJiBFJw8CDETG7zvvDdzjP\n++7x3VuXv2uD7HiPCJUdRx7chg2ufNvtPbW6nexy7bRf7czZ2gycIekAZUfeWcBG4F7gkrTPxcA9\nbfwMMzMzs57W8jBiRDwh6VbgMeA1YB1wE3AIsFzSZcA24II8AjUzMzPrRS0PI7b9g90FD3gY0QZL\np4cRJe0PfA+Ykh73RMQ1kqYBXyObc7oVuCAiXknvWQhcBuwGroyIlQ3qdhs2oDyMOJjaab+cbJXM\nyZYNkjLmbEk6KCJ+JWk/4PvAZ4B5wIsRcb2kq4FpEbFA0gnA7cBpwAxgFfDOeo2V27DB5WRrMJU1\nZ8vMrOtFxK/S5v5kbd7LNF4PcB5wZ0TsjoitwBZgTueiNbN+5GTLzPqapEmS1gE7gWpEbKTxeoBH\nANtr3r46TSTeAAAQBUlEQVQjlZmZtazlCfJmZr0gIl4HTpZ0KLBCUoV9WA/QzKxdTrbMbCBExC8k\n3Q+cCoxIGqpZD/D5tNsO4Miat81IZXV5YWaz/pXnosyeIF8yT5C3QVLC1YhvA3ZFxCuSDgRWAJ8D\n5gIvRcSSBhPkTycbPnwAT5C3MTxBfjC10365Z8vM+tnvAMvSwsuTgNsi4rtpDtde6wFGxEZJy8kW\naN4FXO6Mysza5Z6tkrlnywaJb9dj/cA9W4PJSz+YmZmZdSknW2ZmZmYFcrJlZmZmViAnW2ZmZmYF\naivZkjRV0l2SNkl6StLpkqZJWilps6QVkqbmFayZmZlZr2m3Z+sLwP0RMRs4EXgaWACsiojjgdXA\nwjZ/hpmZmVnPannph3Tri3UR8Y4x5U8D769ZmbkaEe+q835fNo2XfrDB4qUfrB946YfBVNbSD0cD\nL0haKulxSTdJOojGN3g1MzMzGzjtrCA/GTgF+GREPCrpBrIhxKZv8Or7ipn1tzzvLWZm1qvaGUYc\nAn4QEcek5+8jS7beAVRqhhEfTHO6xr7fXfB4GNEGi4cRrR94GHEwlTKMmIYKt0s6LhWdBTwF3Atc\nksouBu5p9WeYmZmZ9bq27o0o6UTgZuAtwE+AS4H9gOXAkaQbvEbEz+u81/8rxD1bNljcs2X9wD1b\ng6md9ss3oi6Zky0bJE62rB842RpMvhF1hw0Pz0JSLg8zMzPrb+7ZasGg9Ea5Z8vy5p4t6wfu2RpM\n7tkyMzMz61JOtsysb0maIWl1unfrk5L+MpU3vIerpIWStqR7vs4tL3oz6xceRmyBhxH3va5e/a4t\nX50eRkxr/Q1HxHpJBwOPAfPJrpx+MSKul3Q1MC0iFkg6AbgdOA2YAawC3lmvserlNsza42HEweRh\nRDOzOiJiZ0SsT9u/BDaRJVHzgWVpt2XA+Wl7HnBnROyOiK3AFmBOR4M2s77jZMs6YP/crt4cHp5V\n9i9jPUrSLOAkYA2N7+F6BLC95m07UpmZWcvauTeiWZNeJa8u95GRvriQzTosDSHeDVwZEb+U1PQ9\nXM3M2uVky8z6mqTJZInWbRExevuwEUlDNfdwfT6V7yC7+8WoGamsrsWLF+/ZrlQqVCqVHCM3szJV\nq1Wq1WoudXmCfAs8Qb7cunr1uLFy1tmSdCvwQkR8uqZsCfBSRCxpMEH+dLLhwwfwBHkbwxPkB5Nv\n19NhTrbKratXjxsr5WrEM4HvAU+SHYQBXAOspcE9XCUtBP4U2EU27LiyQd0924ZZe5xsDSYnWx3m\nZKvcunr1uDGvIG/9wcnWYCp16QdJkyQ9Lune9LzhYoFmZmZmgyaPpR+uBDbWPF8ArIqI44HVwMIc\nfoaZmZlZT2or2ZI0AzgPuLmmuNFigWZmZmYDp92erRuAq3jz4HWjxQLNzMzMBk7LyZakDwMj6VYY\n400Y84w/MzMzG1jtLGp6JjBP0nnAgcAhkm4DdjZYLHAvXhDQrL/luSigmVmvymXpB0nvBz4TEfMk\nXQ+8OHaxwDrv6dnLpr30Q7l19epxY176wfqDl34YTKUu/VDHdcCHJG0GzkrPzczMzAaSFzVtgXu2\nyq2rV48bc8+W9Qf3bA2mbuvZMjMzM7PEyZb1mP2RlNtjeHhW2b+QmZn1uXauRjQrwavk2X0/MtIX\nI1pmZtbF3LNlZmZmViAnW2ZmZmYFcrJlZmZmViAnW2ZmZmYFcrJlZmZmViAnW2bW1yTdImlE0oaa\nsmmSVkraLGmFpKk1ry2UtEXSJklzy4nazPqJky0z63dLgbPHlC0AVkXE8cBqYCGApBOAC4DZwLnA\njcqWCzcza5mTLTPraxHxEPDymOL5wLK0vQw4P23PA+6MiN0RsRXYAszpRJxm1r+cbJnZIJoeESMA\nEbETmJ7KjwC21+y3I5WZmbWs5RXkJc0AbgWGgNeBL0fEFyVNA74GzAS2AhdExCs5xGpmVpSWbkuw\nePHiPduVSoVKpZJTOGZWtmq1SrVazaUutXqXcUnDwHBErJd0MPAYWdf8pcCLEXG9pKuBaRGxoM77\no1fvcJ7vHd9dV3l1ZfX16nHYiyQRER2fAyVpJnBfRLw3Pd8EVCJiJLVlD0bEbEkLgIiIJWm/7wCL\nIuLhOnX2bBtm7cn3b8CeWt0Wdbl22q+WhxEjYmdErE/bvwQ2ATNoPBfCzKwsSo9R9wKXpO2LgXtq\nyi+UNEXS0cCxwNpOBWlm/SmXG1FLmgWcBKwBhmrnQkiaPs5bzcwKJekOoAIcLukZYBFwHXCXpMuA\nbWRXIBIRGyUtBzYCu4DL3X1lZu1qeRhxTwXZEGIV+NuIuEfSSxFxWM3rL0bE4XXe17NtmIcR+6Wu\nrL5ePQ57UVnDiEXo5TbM2uNhxMHUTvvVVs+WpMnA3cBtETHaDT8iaahmLsTzjd7vyaVm/S3PCaZm\nZr2qrZ4tSbcCL0TEp2vKlgAvRcQST5BvqjbXVVpdWX29ehz2IvdsWT9wz9Zgaqf9audqxDOB7wFP\nkh11AVxDNpl0OXAkaS5ERPy8zvt7tqFystUvdWX19epx2IucbFk/cLI1mEpJttrVyw2Vk61+qSur\nr1ePw17kZMv6gZOtwVTK0g9mZmZmNjEnW2ZmZmYFymWdrW737LPPcvfdd5cdhpmZmQ2ggUi2vvCF\nL3HDDf+X/fY7re26XnvtsRwiMjMzs0ExEMlWBLz22n/ntdf2WoGiBZ8HHsqhHjMzMxsEnrNlZmZm\nViAnW2ZmZmYFcrJlA25/JOXyGB6eVfYvY2ZmXWgg5myZNfYqeS1OODLSF2t1mplZztyzZWZmZlYg\nJ1tmZmZmBXKyZWZmZlagwpItSedIelrSf0q6uqifY9Y9PNm+X7j9MrM8FZJsSZoEfAk4G3g38FFJ\n7yriZ+WrWnYAY1TLDmCMatkBjFEtO4AxRifbt/8YGdmWS0TVajWXegZJ77Zf9fXKMdArcfaSXvlM\neyXOdhTVszUH2BIR2yJiF3AnML+gn5WjatkBjFEtO4AxqmUHMEa17AC63iA0YgXo0farvl45Bnol\nzl7SK59pr8TZjqKWfjgC2F7z/FmyBszMmpINSebhn//5q+zcuTWXugaE2y8zy9VArLM1Zcpb2H//\nr7L//t8fd79f/3ozBxww/o2mf/ObH/PrX+cZnVk9ea3/tZiRkc/lUI8VbevWrVxxxRW51jllyhTe\n85735Fqnme07ReSzoOObKpXOABZHxDnp+QIgImJJzT75/2Az63oR0dWrvzbTfqVyt2FmA6bV9quo\nZGs/YDNwFvAcsBb4aERsyv2HmZnlyO2XmeWtkGHEiHhN0l8AK8km4d/ihsrMeoHbLzPLWyE9W2Zm\nZmaWyW3ph2YWAZT0RUlbJK2XdHJN+VZJT0haJ2ltTfk0SSslbZa0QtLUkuO5XtKmtP/XJR1aZjw1\nr39G0uuSDis7HklXpM/oSUnXNRtPUTFJOk3S2tFySacWGM9JNeVTJd2VPounJJ2eyjt5TDcTTyeP\n6QnjqXl9n4/pIjTzO6b9TpO0S9JHOhnfmBia+T4q6Vz4oaQHOx1jimHcOCUdLunb6Zh5UtIlJYSJ\npFskjUjaMM4+dY/vTpooTkkXpbbxCUkPSSrliolmPs+0X6nnUpPf+76fRxHR9oMsafsRMBN4C7Ae\neNeYfc4F/j1tnw6sqXntJ8C0OvUuAf46bV8NXFdyPB8EJqXt64Bry4wnvTYD+A7wU+Cwkj+fCtnQ\ny+T0/G1dcAw9CMytef+DHYrnq8ClaXsycGjJx3SjeMo6puvG0+oxXcSjmd+xZr/vAt8CPtKtsQJT\ngaeAI9Lzps/PDse5aPQ4BN4GvEhqUzoc6/uAk4ANDV5veHx3WZxnAFPT9jndGmfN8VH2uTTR59nS\neZRXz1YziwDOB24FiIiHgamShtJron4v23xgWdpeBpxfZjwRsSoiXk9P15D9USgtnuQG4Kom4yg6\nnv9BljzsTu97oQtieo7s5AD4LWBH0fGk3qHfi4il6bXdEfGLmvd09JgeL54yjukJPh9o7ZguQrOL\nm14B3A0838ngxmgm1ouAr0fEDtjn8zMvzcS5EzgkbR8CvDjapnRSRDwEvDzOLuO1Rx0zUZwRsSYi\nXklP15CtI9dxTXye0AXnUhNxtnQe5ZVs1VsEcOwXOnafHTX7BPCApEckfaJmn+kRMQIQETuB6SXH\nU+sy4NtlxiNpHrA9Ip5sMo5C4wGOA35f0hpJD+7LkF2BMS0A/rekZ4DrgYUdiOdo4AVJSyU9Lukm\nSQemfco4pseLp1anjumG8bRxTBdhwt9R0tuB8yPin8gS/rI0830cBxyWzs1HJH2sY9G9oZk4vwy8\nW9J/AU8AV3Yotn01XnvUrT5O8+d4R3XRuTSRls6jwm5EvY/OjIhTgPOAT0p6X4P9OjWbf9x4JH0W\n2BURd5QVT/rjdA1Zl/ue0MqKJ5VPJhvKOwP4a2B5h+IZL6ZbgCsi4ijgU8BXOhDLZOAU4B9TTL8i\nS/pg7++oE8f0ePFkQXX2mK4bT8nHdKv+gWw4eFQ3xzv6uZ9LNpz0PyUdW25IdS0EnoiItwMnA/8o\n6eCSY+p5kv4AuJQ3H6/dpFfOpZbOo7ySrR3AUTXPZ7D3cM0O4Mh6+0TEc+nfnwHf4I1bY4yMdstK\nGqb5rsWi4iFN1jyPrCuxWUXE8w5gFvCEpJ+m/R+T1ExPSVGfz7PAv6XXHgFel3R4E/EUGdPpEfHN\n9NrdNH/blXbieZasd+bRVH432ckJsLOEY3q8eMo4phvF084xXYRmfsdTgTtTvH9IlhjM61B8tZqJ\n9VlgRUT8OiJeBL4HnNih+EY1E+eZwF0AEfFjsrl73Xgj8IbtUbeR9F7gJmBeREw0lFeWbjmXJtLa\nedTKBLI6E8b2441Jj1PIJj3OHrPPebwxmfAM0iQ94CDg4LT9VuD7vDGheQlwddrel8nERcVzDtnE\nuMO74fMZ8/6f0mASfQc/nz8HPpe2jwO2lfgZfSg9fwx4f9o+C3ik6HjS8/8DHJe2FwFLyjqmJ4in\n48f0ePG0ekwX8Wjmdxyz/1LKm9TbzPfxLuCBtO9BwJPACV0Y5+eBRWl7iGyorpQLJciS/ycbvNbw\n+O6yOI8CtgBnlBVfM3GO2a+0c6mJz7Ol8yjP4M4hW3V5C7Aglf058Gc1+3wpnWhPAKeksqPTCbcu\nBb2gZv/DgFWp3pXAb5UczxZgG/B4etxYZjxj6v/JvjRIBX0+bwFuS+WPkpKckmM6FXg4vfYD4OSi\n40nlJwKPpLj+jTeuBur4MT1BPB0/pseLp51juohHM79jzb5fodw/EM18H39FllxvIBte77o4ya5A\nvC8dMxvIVu8vI847gP8iu1HpM2RDcE0d390UJ9kcuBfT+b0OWNuNcY7Zt7RzqcnvfZ/PIy9qamZm\nZlagbpkgb2ZmZtaXnGyZmZmZFcjJlpmZmVmBnGyZmZmZFcjJlpmZmVmBnGyZmZmZFcjJlpmZmVmB\nnGyZmZmZFej/A1Bv+vRapYP7AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# batchnorm working!\n", - "cost_val, mean_norm, inv_std_norm, mean_learn, inv_std_learn = train_mean(*batch)\n", - "print cost_val\n", - "fig, ax = plt.subplots(2,2, figsize=[10,5])\n", - "ax[0,0].hist(np.asarray(mean_norm).ravel())\n", - "ax[0,1].hist(mean_learn.ravel(1))\n", - "ax[1,0].hist(np.asarray(inv_std_norm).ravel())\n", - "ax[1,1].hist(inv_std_learn.ravel(1))" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[['input', (50, 784)],\n", - " ['input_corr', (50, 784)],\n", - " ['dirty_enc_dense_0', (50, 500)],\n", - " ['dirty_enc_batchn_0_norm', (50, 500)],\n", - " ['dirty_enc_noise_0_', (50, 500)],\n", - " ['dirty_enc_batchn_0_learn', (50, 500)],\n", - " ['dirty_enc_activation_0', (50, 500)],\n", - " ['dirty_enc_dense_1', (50, 10)],\n", - " ['dirty_enc_batchn_1_norm', (50, 10)],\n", - " ['dirty_enc_noise_1_', (50, 10)],\n", - " ['dirty_enc_batchn_1_learn', (50, 10)],\n", - " ['dirty_enc_softmax', (50, 10)],\n", - " ['dec_batchn_softmax', (50, 10)],\n", - " ['dec_combinator_0', (50, 10)],\n", - " ['dec_batchn_0', (50, 10)],\n", - " ['dec_dense_1', (50, 500)],\n", - " ['dec_batchn_dense_1', (50, 500)],\n", - " ['dec_combinator_1', (50, 500)],\n", - " ['dec_batchn_1', (50, 500)],\n", - " ['dec_dense_2', (50, 784)],\n", - " ['dec_combinator_2', (50, 784)]]" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "map(lambda x: [x.name, x.output_shape], dirty_net.values())" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[['input', (50, 784)],\n", - " ['clean_enc_dense_0', (50, 500)],\n", - " ['clean_enc_batchn_0_norm', (50, 500)],\n", - " ['clean_enc_batchn_0_learn', (50, 500)],\n", - " ['clean_enc_activation_0', (50, 500)],\n", - " ['clean_enc_dense_1', (50, 10)],\n", - " ['clean_enc_batchn_1_norm', (50, 10)],\n", - " ['clean_enc_batchn_1_learn', (50, 10)],\n", - " ['clean_enc_softmax', (50, 10)]]" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "map(lambda x: [x.name, x.output_shape], clean_net.values())" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "from lasagne.layers import get_all_layers" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "W = lasagne.init.GlorotUniform()\n", - "beta=lasagne.init.Constant(0)\n", - "gamma=lasagne.init.Constant(1)\n", - "\n", - "shp = (500,)\n", - "zero_const = T.zeros(shp, np.float32)\n", - "one_const = T.ones(shp, np.float32)" - ] - }, - { - "cell_type": "code", - "execution_count": 45, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "['InputLayer',\n", - " 'DenseLayer',\n", - " 'BatchNormLayer',\n", - " 'NonlinearityLayer',\n", - " 'BatchNormLayer',\n", - " 'NonlinearityLayer']" - ] - }, - "execution_count": 45, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "l1b = InputLayer((64, 768))\n", - "l2b = batch_norm(batch_norm(DenseLayer(l1, num_units=500, nonlinearity=tanh), \n", - " beta=None, gamma=None), alpha=1., \n", - " beta=beta, gamma=gamma, mean=zero_const, inv_std=one_const)\n", - "[l.__class__.__name__ for l in get_all_layers(l2b)]" - ] - }, - { - "cell_type": "code", - "execution_count": 46, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "['InputLayer',\n", - " 'DenseLayer',\n", - " 'BatchNormLayer',\n", - " 'BatchNormLayer',\n", - " 'NonlinearityLayer']" - ] - }, - "execution_count": 46, - "metadata": {}, - "output_type": "execute_result" } ], "source": [ - "l1 = InputLayer((64, 768))\n", - "l2 = DenseLayer(l1, num_units=500, nonlinearity=linear)\n", - "lb1 = BatchNormLayer(l2, alpha=0.1, beta=None, gamma=None)\n", - "lb2 = BatchNormLayer(lb1, alpha=1., beta=beta, gamma=gamma, \n", - " mean=zero_const, inv_std=one_const)\n", - "l3 = NonlinearityLayer(lb2, nonlinearity=tanh)\n", - "[l.__class__.__name__ for l in get_all_layers(l3)]" + "%run train_ladder_nets.py -dlr 1. -l 0.1,1.,0.1 -ep 15" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "References: \n", + "### References: \n", "http://arxiv.org/pdf/1411.7783.pdf \n", "http://arxiv.org/pdf/1507.02672v2.pdf \n", "http://arxiv.org/pdf/1511.06430.pdf" diff --git a/examples/ladder_nets/train_ladder_nets.py b/examples/ladder_nets/train_ladder_nets.py index 7e958f5..eb42119 100644 --- a/examples/ladder_nets/train_ladder_nets.py +++ b/examples/ladder_nets/train_ladder_nets.py @@ -143,7 +143,7 @@ def train_epoch(stream): print '\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c in enumerate(np.mean(rec_costs, axis=0))]) stats = np.stack(stats, axis=0).mean(axis=0) - means, inv_stds = stat + means, inv_stds = stats for i in range(len(num_encoder)): print '{}: mean {}, inv_std {}'.format(bl_name.format(i), np.allclose(means[i], 0.), From 9b9ddccea6cb5414339edb1e04d1406bfa9186ee Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Wed, 22 Jun 2016 09:23:44 +0200 Subject: [PATCH 03/10] working example --- examples/ladder_nets/LadderNets.ipynb | 939 +++++++++++++++++++++++--- 1 file changed, 844 insertions(+), 95 deletions(-) diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb index e63d907..0400c2a 100644 --- a/examples/ladder_nets/LadderNets.ipynb +++ b/examples/ladder_nets/LadderNets.ipynb @@ -1,8 +1,533 @@ { "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Ladder Networks" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Bit of description" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from PIL import Image" + ] + }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 42, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcUAAAFLCAYAAACne5csAADI3klEQVR4nOydd1hUR9fAf7ssHaRZ\nKKKCnQ4qakyMJrZojIktJqaY3uub90tiiqaYvOm9mB4TuybGbqKxxK70KqIgCAhI78vu3u+PZS8s\nsLAYEJX5PU+eyC1zZ3fPPWfOmTNnFJIkSQgEAoFAIEDZ2R0QCAQCgeBSQRhFgUAgEAjqEEZRIBAI\nBII6hFEUCAQCgaAOYRQFAoFAIKhDGEWBQCAQCOoQRlEgEAgEgjqEURTIVCZ/y7wB1igc+hA26Q6e\neuk5bpt4P3/k6jq7awKBQHBRUIjF+wIAdHlsfvYp/u4zFo/zkRzYd4DIcz2Y/f6vvHuzN6rO7p9A\nIBBcBIRRFAgEAoFpyg/z6pyH+X5fArm1Dgy++R1+/+VBBmmOs2jyDbwTbcXgG5ew4dcF+FwBo2dh\nFAUCgUDQMroCNs8fxPRVNdywPIVNt3tigZqk9ybxsO4L/nzeH+vO7mM7IeYUBZQffpUbQrywt1Rh\n5eLP3G9SqAEqji/i6h42OHgFM++nNDSd3VGBQNA5KN2Y8MLjDKGCbW9+Q0I1ULyfT9a48+SCoVeM\nQQThKQrq0BVsZv6g6ayquYHlKZu43dMC1Em8N+lhdF/8yfP+V5LYCwSCNqPNZsX0QczfZs1tmxNZ\nkr2A29PfYNeS4dh1dt/aEeEpCgBQuk3ghceHQMU23vwmAf1A8BPWuD/JgqHCIAoEXR4LT256+T68\nKWTNyy/z0lILHnwo9IoyiCCMokDGhoCHXuEGe0j67H/syk1j7XsxTHruBnoJKREIBIBD+FO8OMYS\nbfR37Bn2HDP7WHR2l9odET4VNKCcg0/5MebTbELun4tFzkR+++MerkC5FwgEF0jRjgUMnZHA/8Uc\n4NnBVp3dnXZH+ACCBjgQ/tSLjLHUEv3dHoY9N1MYRIGgy6OjMvsE6SVaQIekU+E95xXuHHjlGUQQ\nRlHQCJXvPF65vRfWw5/j2aucOrs7AoGgsyn5i3v9h+Az9kOSi6P45M2TzHx+Ej2uUOtxhX4sQZvQ\nVZJ9Ih39QFBCp/Jmzit3coUOBAUCQVuwHcSU8R44qA/xxt2LyH/0B54NsOnsXnUYYk5RQMmOefSb\nspo+7yax97qVTHvWhqU7XuQKlnuBQNAWdNUUnM2DXr1xs76yfSlhFAWo035i3pgnSBp1E4PUTsz9\n5CPm9xfLMAQCQddDGEUBALrqAvQDQTeu8IGgQCAQmEQYRYFAIBAI6hA+gaAJGRkZaDSi0qlAIGie\n559/nvLy8s7uRocgjKKgCS+++CK//vprZ3dDIBBcgqSlpfHhhx/y2WefdXZXOgQRPhUYceLECfz8\n/PD19SU5ORkLC7F6XyAQ1HPffffxww8/4ObmRnp6Og4ODp3dpXZFeIoCI15//XV0Oh2pqanCWxQI\nBEakpaWxbNkyAAoKCvj88887uUftj/AUBTLJycn4+/uj0+kAGDBggPAWBQKBjMFLNNC9e3fS0tKu\nKG9ReIoCGYOXaCA1NZXly5d3Yo8EAsGlQkMv0cD58+evOG9ReIoCoKmXaGDgwIEkJSUJb1Eg6OI0\n9hINdO/enfT0dOzt7TuhV+2P8BQFQFMv0cDJkyeFtygQdHFOnz7dxEs0cKV5i8JTFJj0Eg0Ib1Eg\n6Nrce++9/PjjjybPX0neovAUBSa9RAMnT55kxYoVF7FHAoHgUuH06dP88ssvLV5z/vx5vvjii4vU\no45FeIpdnNa8RAODBg0iMTFReIsCQRejNS/RQI8ePUhLS7vsvUXhKXZxWvMSDaSkpAhvUSDoYpjj\nJRrIz8+/IrxF4Sl2Ycz1Eg0Ib1Eg6FqY6yUauBK8RVVnd0DQeZjrJRoweIt33nlnB/ZKIBBcCqjV\napydnXn88cflY1qtlq+++kr++/rrr2fw4MFG9yUmJjJixIiL1s/2RniKXZi0tDS0Wq3RsSlTpnDq\n1CkA7rnnHl588UWj846Ojri7u1+0PgoEgksHtVqNtXX9BuQ///wzd911Vyf2qP0RnmIXxsfHp8kx\nKysr+d8uLi4MHDjwYnZJIBAIOhWRaCMwQgQOBAJBV0YYRYFJFApFZ3dBIBBcQnSFQbMwigKBQCC4\nIK7EgbOYUxQY0TAbVavVolarAdMjxI4+3pnPvtyPt2dbbm5u2NjYmHyOQHClIIyiwIiioiL53x9/\n/DEff/xx53VGcMnw4Ycf8swzz3R2NwSCDkeETwVGdIU5A0Hbabx0R9A1aawfrsTwqTCKAoGgVYRR\nFHQVRPhUYIS9vT3nz58H4IYbbmDevHnyOVOjwo4+3pnPvtyPX2hbOTk53HffffIxsV5V0FUQRlFg\nRMPF+wEBAVdctQqBeaSnpxv97ejo2DkdEVzSiPCpQCDoEjQu+i7CpwLoGjkHwigKjGgo9FfiKFBg\nHiqVcRBJGEVBV0EYRYFA0AThKQq6KsIoCgSCJjQ2ihqNppN6IriUuRKjScIoCowQ4VMBiPCpoHnE\nnKJAIOiSiPCpoKsijKJAIGhCY09RhE8FzXElRpOEURQYIcKnAhCeoqDrIoyiQCBogjCKguYQc4qC\nLkdXEPqORFNwiuxK0+d1JRlkl+tMX3CJoFQqjSIFwigKmuNKjCYJoygwyZUo8B2JOvVnnl60l7I6\nJ0udn8DeLZvZHZ1Ddd01SrJZ/n+vsSPv0jeMDb1FMaco6CoIoygQtAfqeP43fy0jFy5gsLWO4iNf\n8c6KZCzcncj8YTajbl9GmhpwGsVTj1qw+L6V5FzidrGhURSeoqCrIIyiwAgRPr0wijc9wQf9nmWm\npxIo5fBnX3DYNoTwYddw1+svMfDvN/gyRh9XtQp4mEfz/8vrR1uIs14CNMxAFUZRAF1DPwijKDCJ\nCJ+aSwn7lx4hfP4w7AHoxtWvfc+SGd5YAbrKAgp1zni5GIxMTybcac+a9amom2lNVxzBj5/v4lw7\n2KGSiO/5ctc5LqQpET4VtMaVqCOEURQI/jXFxByoJqi/fd3fShz6jySklxVo8/nz7a+wfvYL7vGt\n35bLcYAf5XsSKGvcVHUin96xkPxrx+Bu0fhk23EKmYTrsrt45WBJm+8V4VNBV0QYRYERYp3iBaCp\norRKibWq0eukK+TAhy+yMfBr1vxfOE4NT1sqUZdUNvLg1Jz44iFWX/spTwfatE/fLLyZ+84Coh9e\nzOE2Rmsbhk+FpyjoKgijKBD8W1Qu9OutJfN8df0xXSGHv/mUY8Pe5NMHg6jZ9SZL4+vP1+ZnofL1\nwLphO8V/89IH1jx110CsaD+U7jN4MXwNT6/Ioi25PcJTFDRGzCkKBBcbTSa/vfE+B4pNXVBN7NeL\n+SW52tQFnYAb4+5wZtfB/DqjU03M29O47pHXeOZ6DywVCrrf9A9uHgbvr4bUvekE3xqGU4NWCv/+\ngL8GPcykXu39WtoTctsY4t9bR2YbrOIlk2hzWcpE16DTokkdKBPCKAqM6NzwaQ1Ri29lfdi9jHHW\nkLHuGWbe9iiPzh3L0EHX8NiqNGqwIejOSRy74wn2lF/k3qX9zuInn+etTz7j48+X8dnCt9lToANU\nDLxvEZ7LfiVFA2BD8EuHqJQkJMN/VTuY7VbXUGUUP+0M56Wb3Ru0XkHcb4dwviHUyFC2F46BNzHk\n5CoOF5t/z6WRaHNpy4SgM+hgmZAEggb06dNHAiRAevnllzvmIbVlUlG5pu4PjVRr+GfBCmlkj2el\nmFpJkqQCacP0blK3ab9J56Vy6fAzfSRcbpf+KtZfmv5uX6n/Rxkd079mqEn9Rrpp4LXSOzEVkiRJ\n0vnfb5LsbadI688brqiQji+aIM39KV2qbbGlainhwxulmd+dbnRdlvTVAKTrfi/sgN5LklRxSHrA\nzln6b2y12bf4+vrKsvDCCy90TL+MaCALBi5hmeiKlJSUyDIBSKtXr774nehgmRCeouCiUnP6V+4J\n9cbNwRrXAcMZe811PLw+Ew1QcuB7YsbPYJAKwJUZGwoo2HgLbroyMtJKsR0yAm9bfTu9b5xD1tKd\n5F+MTmszWf3kc+wf/iIPBtkBWirzc1H63USYs+EiO4YtXsszlRtb9MY0OfvY5fwGP9zng9E+FOoi\nMoqs6OlsaXxDdRzv3RDIhDejaD1Pppq4924gcMKbRDW+WOWEh10pp/OaWwTSPBczfKorPsq7E1yw\n9HuI9Rn1fbxkZUIAdE74tKNlQtX6JYKuhNTB4VOFfQj/tyef76xziDtynDN2I5ky2hMVUHU6imof\nL2SzoFSh0pVw9IMH+cDyFbb/Op++dRJr4eyOKuWMGYbi36PN2MjHWxVM2j4SZwBKiNx0AvepV+Fh\ntGzCmVGPPNFiWyqPiTxxTzMnJB06SYFK2eg715ZyJimeJM8SM9Yaaik9k0R8kiclTS6WkLQ6dCby\nJB5//HFOnDiBJEnodDokSSIzM1M+v3z5cvbv3y+Hg3U6HY888gj33NPch2kj6iQ+mnk/hyat4aD3\nVv7v5oex+es7prkpL1mZEHQeHS0TwigKLipWvQIYCoA3Idd7E9LgnGU3N4gvRgNYAGhy2PrSHSzJ\nmsgTc2xZ8cBrSBs+51pHQF1KuZVdu2ZpmqL6XBLnVIN4LLCb/kB5Ilsj7bj62f7G2aP/BksnPB1r\niCqtNT5uP4bP0yU+N6sRe8Z8no7U3MWaUnKqHfBysWzmJIwcOZIvvvjCZMvZ2dlkZ2fXP8nenunT\np5vVq1axGsozf0XzHwslMIV/btOhU+qDWJeqTAg6j46WCRE+FRjR0Z5i+YHnGD1kODfe+yT//c89\njB/qy8Cxr3C0Apyvmovrvgj0y8xL2PPU1Ux7928OLn+RO+Y+ytKiIfSx07dTErkPp9vH4druPWyK\njVcwvi4O2FsqAQ2Zf3zAmqIApvo7tN9DlE4MDrHiZGpJq8sm1GlreG7+E/xyqsb89qszSKjyZbh3\n8+sfb7/9dgYPHmx2c4888gjdu3c3//ktUH7gOcb4h9fLhP8ABl/iMmEaDZm/vcH7clqkloqSinov\nvzqWrxf/wuWaKCtdpCUZnaon2m/2U3Al0Lt3b3kS/dVXX2339s+vnSYNf+GoVK4tlg6+GCApcJZm\n/JQm1UiSJGnSpM9Gj5a+SG+cbdGYAmntNH9pYYz5SSP/jnIp5vN7pdv/+6H07XefSa/O9ZQswz6X\nTrfWzTaS89MIyXHqJqmolevK9j0i+WIpBS6KlqrMbLvi0IOSs/cb0okW+rx8+XKjJApT/9na2kq5\nublmPrl1Lk+ZaJ7qyJek0bdvlgokSarN2Ci9/cZH0tKvXpPuHDddeutYqSRJklR+4Alp2P27pbJO\n7emFUVxcbCQLa9eu7ZDndKZMCKMoMKKjjWJp5C/SqsQyKeePeyQvkPo9tE3K09afr07+Urr7wZ+l\nUyZTODVS7qYnpNs/ipc6R/2dl9bf4CQNeNl8g2Q251ZI1/dZIO0xR1uWHpA++jbWzD5UScee8pKG\nvHtSakmNaLVayc/Pr1Wj+Mwzz5j1VHO5/GXCQIG0YmQP6dmYWkmStNKZr66Wwt/Q9ynnpxGS24wt\nkj4pMl16t29/6XJMlL1YRrEzZUIYRYERXl5essAvWrSoQ55Rc3KpNMURSTVssXSsrEw68uH/pL1F\n9eers+Ol9HJTd1dLOYln298gmaIqUfr+mcelL2L1T9Rk/Srd2DNAejW6I3pQIR357zDp5pU5kraV\nK4sPfiJ9fKjUvGaLtkhzB90mbTFjtceqVataNIg2NjZSTk6Oec9tA5eVTJiieKN0vc1caa+hI9pa\nqVYrSZJUJh14zEcKXZKg93QkjZT4nI005Ie8zurpBXOxjKIkdZ5MiDlFwcWl/Dj/m/cY25nCl6tf\nZLginrWrj1HcYKrC2sOfvvamGrDGfagX7VQZtFV0RZGs+nkbkbHH+WfDZzz98K/0fn89C4M7ogd2\nhC/8EN+vnmV1Vgu5ppXxbDjmy8zhjq03qStkxwtLsHn7Qya7tH753LlzCQgIMHn+gQcewN3d3eT5\nC+Iykwk0xaTHHGTf0RQK1ABqinPLoeo0UdU+eBlymZQqVEoN2Rtf5UPLD/jj//zqEj4scHZXkXLm\n8suTlS5WmbdOlAlhFAUmaf9EmxriP7mPRREaHAY7ceTNO5k4/Hrez+yBa/NJkZ2O0mM+27OOsDDA\nAkvf6byxbhtf3Tmo/bJOG+M8lreXzSZ1+W5yTdlFuwDufvJGvM3IHS859jP7Rn3DVzPdMWfTDYVC\nwaJFi5o9Z21tzfPPP29GK23hMpIJXRFHv3yImfOf59u/0ynNO8ayJa/x5QcPcsf3p8GyG24UUCwX\n/9GQs/MTlubM49t3RnJqcwIVdWfUpeVY2V7+ebIds06xk2XiArxLwRWMp6enHBpZvHhxZ3dH0Ano\ndDopODi4Sej00Ucf7eyudR6lx6UPbgmTJr68XcppOI9VvFu638tDumd3mSRpkqWXXAdJS3MlSZK0\nUsFfD0l9sJZce3SX3Lp1kwLfiK8LnxZLv41zku493LkzoBdCUVGRkUysW7eus7vU7ghPUSAQGNGc\nt2hlZcULL7zQST3qZLR5bHz0Jv7Hf/lp8WTcG3roTv5Mumo80/wdwGIA97/mxk/rzqBFieuErzkj\nVVOQl8/5khJiX/bXh08L/+L7/Md4KqTD4g2Cf4EwigKTiP0Uuy633HILYWFh8t8LFizA29u7E3vU\neVRGvMtTKx15/LVb8GwSg3Yk7JFnGOsKYEG/B37kzqNvsfy0iQLq2jw2L/6dCatfJegytImS2DpK\n0NXoCkIvMI/FixcDYGlpycKFCzu3M51GJbHLV5Le/w5mDW7OitnQf/xwehiMpfVgHlm6mGEWJgor\naHT0f/57nva/DC1iM1yJA2dR5k0gEDTL9OnTGT58OMHBwfTt27ezu9NJVJEZk0+34OH0biYvRlt+\njkJVT3rYNPAvrD3wN/V1Wbsz1KtDOipoJ4RR7ATUajVVVdXodG3ZB/3i0LBP1dXVFBUVd15nWsDS\nUoWdnR1K5eUb7NDpdFRWVlFbW9v6xZ3E88+/QP/+/S9ZOVAoFdja2GJt3VGZnCocujtgY2/dTFit\nkoQ1aymb+QQ9GuT+a/MOsXpNDM7T72Vq37b3Sy8XldTWdtYelqYpKS3F1dVN/lutrr0kZUOpVGJr\na4OVVdu/f4Uk4mUXBUmSOJORSURkNCknT12yYcoNv62lqqoKgMCgYAICgzu5R6axt7dneFgIoSFB\n2NnZdnZ3zKaiopLo2Dgio2IoKxO74rYHgwb2J3zEMPp4927nlnUU//0YIx6x5afjHzLGsf540ZFl\nrCyayINTvIy9i5oEFvuOpXRLNh+2IZmmqqqKyOhYjkdEU1FR0foNghZRKBQMGtifsNAQ+vX1NjvU\nK4ziRUCSJPbs28+hw8c6uytXJN26OXLn/Ftx6tats7vSKvn551mxeh0VFZffwu3LgevGjWXUyOHt\n26iuhGOfPMbCg32Zf/+NDLXKIT7qFDWDbmHBjQOwM7pWi7Z4Ezf4r+L5lFVcb0Z9BYDy8nJ+/nUV\nJSWl7dt3AQDhw8O4/rprzTKMwiheBHbv/Uc2iL169iB8RBienp5YWOgDMvnnz5OTkytfb2Vlha9P\n3za5/hUVFaSnZ6LV6Vd8K5VK+vbxxtHR/J0cams1pKWfobq6voR/r5496Nmzh9mjLEmSOHs2m6Li\nYvmYg4M9ffv0kT+vORQWFpGVlYOEXjwtLS3x8emLjbU1SFBVXU1CQhIRUTFotVpcXJy5+87bsLO9\ndD3G/PMF/Lp8NVXV1ahUKsJCgvAbOhg7OztQ6MPqp9POoFbXb7Lr4dGLHm3YjUKn05GZmUVJab1y\n7dbNkT7evdsUaj5/voDsnHPy3xcmk5Wkp2fIMqlQKOjbx5tu3cy0FOhlMj39DFUNZLJnj+706tUT\nhUKBTqsj+9w5Dh8+Rl7+eQBunDqZoEB/s59hLpridGKikil19mdEkDcODTNRdcXsX/II30ljCcp4\nj//EL+H84dtwM9laPWq1mp9/XUV+/nmUSiXDw0Lw9x+KrY0NKPTTGGlpGdRq9GF2BdDbywsXV2ez\n+67T6khLz6Cist4DdXFxpreXZ5ve7dzcPPl7BrC1scGnX19UlubPxJWWlpGRkYmuzvQolUp8+/XF\nzt6ulTvrUdeoSUs/Q02Dd8XTw53u3fXfuFarIzs7m2PHoziXmwdAaEgQN0ye0GrbV4ZR1FZQUm2D\nk705NTsuLsXFJXy59HsA/IYO5qYbb0CpVKLVaolPSCIyOpacBsrHgKWlJQH+QxkWFkLPHqaVYlp6\nBpFRpkOy/fv7MCw0mP6+PiaFv6CwiIiIKOISEqmpaboze88ePQgLDSIwwA9Ly+ZLSlRVVxMdHUdU\ndCzFJSVNztvb2xEcFMCw0GAcHZtXijqdjsTkE0RFxZJ5NqvJeQsLC/yGDmZ4WAgeHvpSY+lnMli1\n5jd0Oh3jrr2aq0aFN9v2pcD63zdxIuUkNjY23H7rLNzdewGQl3+eiMho4hOSmp1f9HDvRVhoMAH+\nQ7GwaF7GKyoriYqOJTomjtLSsibnHR0dCA0OIjQkCHsTyker1ZKQmExkVIyRQTRgaanC328ow4eF\n0LNHD5OfM/1MBhGRMaScTG1eJn19CAsNZkB/0zJZWFhERGQ0sfGJ1NQ0zeTs2aM7oaHBBNXJpEaj\nYdWa38jIPIuDgz2PP/LARZ1vLt99Fz7PjSH62D2UvT6QGc77SHq6r1np/VHRsWzbsROAubNvZkB/\nXwCys3M4HhlNUnIKWm3T8kZ9vHsTGhLE0CGDTH7W0tIyIqJiiI2Np6KyaXTCxdmZ0JAgQoIDsLFp\nvihabW0tsfGJREXFGBlEA9bW1gQF+DEsLARX1+ZrCUqSROqpNCKjYjh1Oq3JeX2ocwDDwoLp17dP\ns20A5OXnczwimoTEpGbnXD09PQgLCcLfbwgWFhbodDq27dhJTGw8APcuuAP3Xj1Ntg8dYhR1VObn\nonbywLnDqxhpyNz0Pj+fcqV76X5WHx/Ke8tfxJySkG3l77//xtXVlZCQkDbd99euPRw7HomDgz2P\nPnQfKpWKqupq1v+2kYzMs/J1Dg722FhbI0kSRcUlcsKLUqnkxqmTCfAfatSuJEns3vsPh48cl49Z\nWVnRrc4zLC+voLqBMgkLDWLShOuavDwpJ1PZsHErGo1ewBQKBa4uzigUCmrUaqM5L08Pd+bMurmJ\nUi0sLGL12t+NvENnJydUKgu0Op3RRLytrQ1zZ92Ml5enURtqdS1/bNrCydTT8jE7O1vsbG2RgJKS\nUrmPAJMmXsfwsBAAdvz1NxGR0Tg7O/HIg/d2aJp4dXU1v/zyCwsWLDA5QGiOouJivlr6AwDTp00h\nMMAPgLiERLZs/dPo93ZxdkKhUFBdU0N5ef3Ivm8fb2bdMr2J8srLz2f12t+NfitnZydUFhZotbpG\nXrsDt865mV49jRVDdU0Nv/2+kfQzmfIxe3t7bG2skdAP7gyKWalUMu2GiQQGGHtjkiSxd98BDh4+\nKh9rSSZDggOZMun6JjJ5MvU0v/+x2UgmXVycUTYjk+7uvZg762YcHOwpKirmq2/03/GcWTMYOKA/\nFwc1CS96MtH+MJkL7fg2LJzUz3dw98CBBPZqXQn+9MtKsrNz8PcbwozpUwE4eiyCnX/vla9RqVQ4\nOXVDAVRWVVFZWSWfGzRwADOm39BEHjPPZrF2/R9GkR9XF2eUSiW1Go1RqNbVxZlb587ExdnZqI3y\n8grWrN/AuXP1kSxHRwesrazQSRJFRcXywEelUnHLjBsZOMDXqA2dTsf2P3cRHRMnH7OxtsbBQV+4\ntLSs3Cg6ctXocK69ZkyT9zg2LoEt2/6Un2dhYYGzsxMKoKq6xmgetl/fPsy8ZTo21tbodDq+/f5n\nCgqLjL5jU1yAUdRRmZtDbXcvnBoNWqsTPmPejf/hj7zrWJ66lds9lEANKas/JSL0SW4b1M5rc3QZ\nfH3tHM5/vY+X/Yv4OTyA3xefYsOkCrYt3YnX3XcR1E77wP7vf/9j4cKF3HzzzSxevJigoCCz7vvu\nh2Xk5Z/nqtHhjBt7NbW1tfy6cq3sHQ4ZPIhhYcH08e4tC0FVVRWx8YkcOx4pj/pvmn4DAX71htFg\nbAHce/UkfMQwhgweiEqlD2NotVpSTp7i6PEIsrJyAAgK9GfaDZPk55xMPcXa9X8AegUYPjyUoMAA\n2ehJkkRWdg6RkTHEJyYB0L27G3fdMU8fxkRvrH76ZQUVFZUolUpCgwMJDQ028m6LS0qIjo7jeGQ0\narUalUrFnfNvxaPOU9Jqtaxe+zvpZzIAvXc7IiwUH5++cl9ramqIT0zm6LEI2chOnDCeEcNCycrO\n4edfVgLw+CMPtCk811bKysro1q0bPj4+LFy4kLvvvtss4xgTG8+WbX9iY23NU088jIWFBfGJSWzc\ntA3QhzhHDA8jKMAP27oQsCRJZGSeJSIyhuQTKQB4eLhzx21z5GcWFBTy8y8rqa6pkUOyoSFBuLnV\nb6taWFhEVHQsEVExaDQarK2tueuOefSoCzXV1tayfNU6srP1cjJ40ECGDwsxlsnqauLiEzl6LEKW\nyenTJhsZxl2793LkaARgmCYYxtAhg4xk8mTqKY4ej+Ts2WwAAvz9mD5tsvyc1FOnWbv+DyRJwt7O\njhEjwggK9MfB3l7+TrJzzhERGUN8QiIAbq4u3HXnbdja2MgGZmT4MK4ff20bftl/R9k/T3HTd31Z\nMPI8a979jR6z5zH3v68ytVfLvqJOp+N/730MwOyZMxg0sD9HjkWwq84gurq6ED48DH+/oXKGrU6n\nIy3tDMciozh9Oh0AH5++zJ11sxxJyMrOYfnKtfLvPTwshJDgQJyc6ufd8/LziYzSRxd0Oh0ODvYs\nuPN2+f2pqq5m2S8rKSgsAvS/1bCwYDw93OXfq7yigti4BI4di6SishKFQsHcWTfTv78PoP+9Nm3Z\nTnyCXn/07u1J+PAwBg7oL/e1traW5BMnOXosgty8fABGhg/n+vFj5b7GxSewacsOQP+uhI8YRmCA\nnz7EjHEi44mUVEDvNc6fNxtLS0uORUTx187d2Nna8vSTj7T4m7TZKOrOrWTykBfw25bIJ6PtG55g\n0wvvcmrkDK4fPRx/T3uUQPHfTzBn8xzWfjgWZwB1Jjt/2kS6lSWVRbaMWXA7w1wuPMyh02hApUJZ\nfpDHgx7Hc/NhFvpZURP/PvPe78uXP8zBox2iKG+//ba8gFmhUDBz5kwWL17c4o4CAF989R0lpaXc\nMHkCoSFBbN66g9i4BKD1uY+qqirWrN9AVlYOSqWS+xbcQY8e3UlITOaPTVsBvaGbOmWiyfCJJEns\n3LWHYxFRAEyZdD1hocEUl5TwzXc/o9Fo6NmzB/PmzJRHbs2RfCKF3//YgiRJDB40gFm33IQkSXz/\n06/k5eVjZWXFvDkz6d3b02QbBQWFrFi9jrKycuzt7XjkwfuwsrJk1+59HDmq93gnXHct4SOGmWxD\nrVazfsMm0tLOAHDX/Hk4OTny2ZffAvDg/Qvo3sAgtDcGo2jAx8eHl156ibvvvltW/s1hGPl37+7G\ng/fdTf75Ar7/8Rd0Oh1eXh7MnXWzbAybIzYugc1b9UohKNCfG6dORqfTsfTbnygqLsbO1pZ5t85q\nMTSUm5fHytXrqayswsmpGw8/cA8WFhZs3f6XPIqfOmUiIcGBJtuoqq5m7foNnD2bjVKp5N4F8+nZ\noweJSSfYsHELoFee026YaDLUK0kSu3bv5egx/aBu0oTxDB8WSmlpGUu/+5HaWg09e3Rn3tyZODiY\nHtWmnEzltw2b0el0DBzgy5xZN7P+942cSEk1e/6oPdGWF1Jp5YqjRTnFNXY427WueKqqq/noky8B\nuOfu+fpB84o1gH5wOHPGjSYHXZIkceRoBH/v2QfA6FHhjL/2ampq1Hz1zff637lbN26bNwtXF9Nb\npGRknmXNut9Rq2txd+/FPXfdjkKhYO36DZxMPY1SqWTmzTcyaOAAk22Ul5ezas1v5OWfx9JSxUP3\n30O3bo4cj4jiz527ARg5YhjXjR9rMpKj1WrZsu0vebBzy4wbGTpkEHn5+Xz/469IkkTv3p7MnXWz\nyVAvQHRMHFu3/wXooxFTp0wk9dRp1qzbgFKp5IX/Pm3yXriAijZV6ZGc05xlw6pYGiYN6/KPcaTv\nPTw861oC6wwilcd47bE0Hn6pziBSybHFt/KZ9XTuWfAA949L4ckFP5D+L5bjKFUqlJpsNr76IZYf\n/MH/+elHU9YBD/O0xcs882fxhTfegIZjB0mSWL9+PUFBQcydO5eEhAST9ymU9QJQUVEpj5jGjR3T\najKAra0tc2fdjKOjAzqdjoioGACOHNOPxvv17dOiQQS9AZ9w/ThZoI8ei0CSJKKi49BoNNja2rRq\nEEHv0U6aMB6AEympFBeXcCYjk7y6kd0tM6a1aBAB3NxcmTdnJkqlkoqKShKTklGr1URF6z9X+Iiw\nFg0i6MNxs26+SfaEjkVEGr9kF3mKPC0tjfvvv59Bgwbx/fffG4V4G6Ko+42kujBpRGQ0Op0OR0cH\n5s6+pUWDCHpDeO3YMQDEJyRRUVHJydTTcmh09swZrc6V9OrZkzmzbgb0Hn7KyVNUVlbJg7SxV1/V\nokEEfWLF3Fk3062bo14mI/W/3dG6qEXfPt7cOHWSSYMIepm8fvy1DBk8SL5XkiSiYmKprdVgY2PD\nrXNaNoigDxtOnngdoA+5FhYVdWqFFQsHVxytAAsHswwigLaBvKgsLOToT48e3Vs0iKD/HkeNHM7w\nYaEAREbFUFtbS0JiEpWVVVhYWHDrnFtaNIign5ucMX0aAOfO5XI2K5vCwiJ5KmPyxOtaNIhgCMvP\nxMbGhtpaDdExcUiSJA98hg4Z1KJBBH049Mapk+RlNUfr9NzxiGgkScKpW7dWDSLoDeE1V48GIC4+\nkcrKqjbJRZuNYnmOjlueHUPxhlXENrCKRdHHcR3V32j/qpK9S1jh9xSTDSlYJQd47/sapk3wwgKw\nGzyF4OgP+CWlaXKH2Why2PnJUnLmfcs7I0+xOcHQKQdGPTKRPa/+QW6LDZhHcw61JEmsXbuWoKAg\nbr31VhITE5tcYzBYOp2O2Lh4dDod1tZWjBge1uTa5rC1tWXEMP218QmJZGSeleP7V48ZZVYygUKh\n4JqrRwFQWFTM6bQzxMTqPYOwkOBWDaKBkOBA7OvCWFEx+qQa0Icp+vv6mNVGjx7dGTpErwwjo2NJ\nTDqBWl2LUqnkqlEjzWrDysqS0SNHAHoDXdlMAsHFpjXjaFH3O2l1OtRqtTwaHjE8TA4BtcaIYWFY\nW1vJshRZN5jw8enb6oDEgFeD3yoqOobYuAR0Oh1WVpaEjzBPJm1sbAgfbpDJJDLPZsuh1zFXjTRb\nJq8eo5fJ4uISTp1OJzpGnwwRGhJkdtZ0cFCAfG1UdFwrV196aBok0NTUqEk5eQqA0SNHmD1nfdXo\ncH0OQE0NiUkniIzSv5d+QwfL2ZitMXCArzyoioyKISpG34ajowPBQS1Hwww4OjoQGqIfVEXFxHHq\ndJqcdHf1VaPMMkxKpZIxV+nlIis7h8yzWSTUTd2MGBHWqkE0ED58GFZWlmi1WmLjTTstzdHGijbl\nnEpyZNz8hzj46UJWxb7F6NH2QAmJx6wZ+VTDBIxK4lbtoPu0bzCIt+ZcBNFlzsyzrXtpVC54WWWw\nN6UC/FqYkK45w5ZP3+Xb3w9wqlSDBCi6jeGd39/GatEoJv5UTg/HL/g/tSf/PXicm+tusx4wiYD4\nTzhUfDc3OzdtdvHixaSmpqJUKrGwsJD/3/jfFhYWHD16tGkDdeh0OtasWcO6deuYO3cuixYtYsiQ\nIYCxUYyr8xID/E1ncTZHUKA/e/btR62u5eAhfT/cXF3w7m1+vahePXvi4eFOTs45Dh89Jk/Uh4S0\n7Bk0xMLCguAgfw4eOkpsXIJsjEJb8S4aExIcSEJiMufO5RJZZ1iHDB7YpgX4Q4cM4q9du+sUSX1y\nzo4dO8jI0IdWJUmS/2vp79b+3fBYc5mQDTEYxyVLlhiFVRvKQUrqKXkgEBRg/tIBKytLAvyGEhEV\nQ0xcAoV1cz2hwebNbxsIDQnk1Ok00s9kyok8/n5D27TcIjDAj91791NbW8vBQ4cBfSZj3z7mFw3v\n2aM7Xl4eZGXlcOTocTlRoi3ypFQqCQ4KYP+Bw8THJ+LtfXnVUNNq6o3iqdPpSJKEjbU1QwYPNLsN\nB3t7Bg8aQPKJk0RGxZCXr4/etF0ugti2YydJySnY2OhzBkKCAtuUxRsSHMihw8eoqKiQ55d7e3nS\no4UM+sb06+uNs7MTxcUlHDh0hNpaDRYWFnJymjlYW1vh7zeUqOhY4uOTGD/uarPvbZtRrDlDbGUo\nN/cfyb3hJbywPJolo8fgUJlGZE0ItxvlN5QSt6eW0AfqR3ya8gIqscTCMGBQWmKlqKSgtIUyVxUR\nvD5pLvuuXczzrw/j57n3kf3uMb6Y0BffXq5Yf30G6WsT99r5EKg9StR5HTc7N/1ht2/fzpEjR9r0\nFbSETqdj1apVrFmzhnnz5vHqq6+iVBiUYb1CNSSYmIudnS1OTt0oKiqW23B379XmUJGHey9ycs7J\nyy7sbG3bvODdo27X9ZqaGtlguLu3HLYz1QZATXXd5+nVtu/E0tKS7m5uZGXnGGWu/f7776xfv65N\nbXUEjY1j2DD9gnKtVid//05O3dpcicfDwx2iYoyMc1vlqeH11XV9aWsbtra2ODs7UVBQSLXhN3Tv\neQEy6U5WVo6clWpjbY2zs1Ob2wCMMlsBDhw6Ujev5iiH5ROTT8iJZ9eNuwYLCwvO5eYRF6/33EOC\nA+nR3Y0atZp9/xwE9CHhQQP717V5lMrKSro5OjIyXN9mUnIKZ7P0iUPjr70alUpFbm4esS222ZtB\nAweg0dZHFDR16xB79Oje4vx0899BL5JPnDSSi7a+l4YlQjpdvYy6t1EuXJydsbG2prqmxkhXtQWF\nQoGHey+Ki0tk/eDs7GR2RMWAh3svoqDVgWxj2vTNa3LiyAu7hh6qnkx+bAJPPfQl/7wxhomF0WQP\nGY9ReoOuivwCBT2d6x+hsLRGha7BtI8WrWSBpaWpF6mG+Hdv44uAZZx+awz2VOIw6ylm57kwqF+P\n1ncSt3LFy66c5MJaaGav9ObW/rQHOp2OTZs24evrSx+fgfKxf0N7zJQ0aeMCGm1e57WtoWbbuJAP\neBkU6E9LS2PdunX06KlX3Dpdvcy1W/fb3FD7PLljZLLtrZq6JTomjvMFhXh5eshGMS3tDMcjowH9\nvL6FhQXnCwrkZST9+vahR3c3atW1RktLDEYxJjaO/PMFeHl6yEYxLf2MnMg29pqrUAHnCwrl+/v2\n8aZHdzc0tfVtSpLEoIEDjDxFRRs8sma+haZH2vhdNvtaXtB7aXxTh8hJB9IGo6gl63ASA0fNxgIl\nbtc9yUzVDD7Zkkmw4wm8g+YZGymlCltLDWer6+fiLF1646T5k3LD4EhTSaHakT7dTYRtalL45bNs\nZmwNxR5AV0T84XJ8J7u0bhABdGoqNJbY2zT/lQ4bNgx7e3u0Wi06nQ6tViv/1/jv/Px8ioqKWn2k\no6MjTzzxBM8++yxubm78uGyFviuScaHttiBJUpNRsGF03hYat6FW16LValtMimjSRjPPbevnae76\ntrYBUF3V9B5HR0fc3NxkhaBQKOT/Wvq7tX8b/i9JEmlpTRcfN8eUKVNYvHgxI0eOJPlECvGJJ4wL\nrtd5221RXqa+u7Z4/M230TZ5kiSJqup/L5ON26itbbtMVpmQnccevr/JsWk3TGLaDZOMjgX4DTVa\n7gT6dcOLXvq/Jvc/+tB9TY5NnTKRqVMmGh3z9xuCv98Qo2P29k3bbDinqKyTg8bvqTk09x1UV9eY\nLNTQfBtNn2vquzWFRqNpUoCiuXZbo7EsVVVfyLvS9udCWxJt1GnsiR/EqN51dtRxJE8+6sO+V19n\n5UE7Qvs1dm0d8PW34kxO/Zeq7DmGmweVkpRT19mK08Rpw5kZ5AjVafz1+wHOGeUn6NBY9sLfS//D\n1iR9zyfq//DhVDNT7qtzSK/tg5+JBbRff/01e/bs4Z9//uHAgQMcPnyYY8eOERkZSXR0NHFxcSQm\nJnLixAmefvrpFh/VrVs3Xn75ZdLT01myZAlubvoJbmVd9qlOp5PnABMSk83rfx3pZzLkOUBf336A\nfnTalvqZDSfxfX30bWg0GvmYuRgmvb29vXB1cQYgMelEG9vQf34rKyv69vWW22jL6qBz53Ll9VOe\nnvXh2Pfee4/z58+Tn59Pfn4+eXl55Obmcu7cOc6dO0dOTg7Z2dlkZWWRlZXF2bNnyczMJDMzk4yM\nDM6cOcOZM2dIT08nLS2NtLQ0Tp8+zenTpzl16hQxMTGt9m3KlCkcPnyYbdu2MXKkPnlIqdQrea1W\nR++6wgWVlVXy2kxzMXx3fbx7ywlSbZUnw/X29nZypl9CYlKbvv+MzLPyHKBvXeJO+pkMo2IDraFW\n15JyUr+mrL+Pfq8lrVYrrzMzl/rv5PKaTwTQNPAUvet+i/z88/K8oDnodDoSk/TfgU+/vnK+gmFt\nsbkY3u0e3d0uWFedSEmVI3AGPZNyMhW12vydYMrLy+vXLNfJVkVFhVGxk9aQJMlIV7WF1o1i2V4e\nCwtl4uxnOD56In1l39Ia/0c/5O7a73jrZCBDmqyXdmL4TT05fjgH2c5Z+/PkJzcR8/H3HEiN5/cP\nf8HpxY+5pZcS9amVPD3zRh5vuITCeiiPLhnJjne+ZM2yt3ji9SJe3rqEEeYOfoqOscfhFq5qOSP5\nX+Hk5MQrr7xCeno6b7zxBq6uxgbbMKco6SRCQ/QT3znncpst7WaKiLpwj5eXByNHDJczEA0ZpOYQ\nn5BIbW0tFhYWjBwRJgtbZFSM2cqwoKCQtHS9sA4LDSE0NLiu7SSzPT2dTidnxwUG+DF8WAigz0A8\nnZZu9ucxLE9xdXXBy9PD7Ps6ksmTJ3Po0CEjY2jAokGiTa+ePfDy0vfZsKTBHLJzzpFTl3kcFhos\nL5+IjU0we/spjUYjl7wKDgpgWJj+N8zNy5czSM3BIJOeHu6MCh8mV2NqWLWkNRISk1Cr1SiVSkaM\nGCaXN4uIjDZbJguLiuQF7KEhl+6OLqbQ1s0pqlQqfH36yvOpbZGLU6fT5IIKw4aFytWvIqNizJ62\nqayqkge3oSFBhIXqddXp0+lmbw0lSRIRUdGAPpt1xIgwlEolarVaNlDmEFW3nMPGxoaR4cPkko5t\n+U6ysnPkQgBhbZSL1o2ilTfjbxxGv1FP8tLknsY3OI/l3V1H2Pfd9GYK3yrxvvW/eK1aSf2KCyVu\n17/Lhg+uh1P5+D62ihWPDcUasPJfSGzGT4yxa/gyWNH//hVsevNGhk9+lq9Xf8S8/uZWxdGR8ccv\nWD12D4Pbodxc45fUycmJRYsWkZ6ezuuvv46LibVADbMO+3j3xq2uNuDGzduorKpq9p6GRMfEyd5c\nWEiwPgPRX5+F9c+Bw2TVTfC3RG5ePrv37gdg6JDB2Nraygb6TEamvO6xJWpq1GyoKxjg6OjAgP4+\nBAX4YWFhQW1tLRs3b2t1jlaSJP7auVtO0w4NCaJnjx6y57R1+1+UlTWt29mY5BMnZcUeFhLU6bt/\nG4zh9u3bGTVqVLPXKC3q5UCSJPlFTTmZapYhqayqYuNmffUbN1cX+nj3JiQoUD63ZdufrSpASZLY\nuu0vuQZmaHAQvb085bT9jVu2G5UPM0VsXALJJ04CeuNsaWlJYN2a2wOHjjRbt7Yx+fnn5UXnQ4cM\nwt7OTlbEmWezjMoXmkKtVrNho14m7e3tm5QXuxwweIoWFhYoFArC6t7LqOhYs6I4paVlbNuxC9BH\nD7q7ucrvdlFRMbv+3tvqAEOr1bJx0zY0Gg0qlYqAAD8GDugvh143bNxilMxmisNHjsmVisJCgrG3\ns5OzaP/es4/8ZuqmNibzbJacYW+oa2v4TpJPpMhraluisrKKTVu2A3qv1zAANZfWjaK1L7Nf/45v\nF07EvckMpBLHQeH4uzYf/1f2vZfPJ2/hw73FRsdVroMZM3k8wZ42DTpQTfKeDEL8m2aeWTj3wbeX\nTdsWVVZG8tUvQ/noaT/aswSrs7MzixcvJj09ncWLF+PcqFZgYwzV49VqNQqFgimTJ6BUKikoLOKX\nX1c1W3hZf30t+w8clisz+PTrg9/QwYB+LZiTUze0Wi0rVq8nPjGpWYUoSRInUk6yfMUa1Go19vb2\nXDv2KkA/kjMsyP179z7+3vOPySyt3Lx8fl2xmty6avNTJk2o28TTluuv05fSSj2Vxpp1GygubloM\nHPTloDZt2SF7eOHDw+RScJMmjMfSUkVZmX77nDMZmc2+yBqNhqPHI/n9j82APhMzNCTIqDBwW+ai\n/i3mGEMDlqr6JTi1tbX4DR2MTz994eOt2/9i/4HDJkNM2Tnn+OXXVRQWFqFUKpkyeQIKhYJu3Rzl\nBf2JSSf4bcOmZouBg74Sz28bNskhtWuuHq2vpalQyPVHi4qKWfbrSrJMeIxqdS0HDh2RK+v07eMt\nz5tdNTocZ2cntFotK1evJy4+0aRMppxM5ZcVa6ipUWNvZyd/hv6+PvKC/t17/2HX7r0m59fy8vP5\ndcUaec3uDZOv1w/Q6taHWvyrpJWLh2FAp9Vq9YOl0BB5veD63zdyLCKq2YIQkiSRfiaDn39dSXl5\nOZaWlkycMA7Ql3001AU+FhHFlm1/NlsMHPQ1eVev/U2O0ky8fhw21tZYWFgwZZK+IlDOuVx+XbGm\n2WLgoJ8D3bV7rzzwHjJ4kDzNM+7aq7G3s6OmRs0vK9aYLBKv0+mIi09k5er1aLVanJ2dGD1aX9w/\nwH+ovNRn89YdHDh0xOS7kpWVzbJfV1JUVIxSqWTypOtRKBTy9eboh47fJaP8KG8sWE7Ylx8xrWcL\ngqrO5HiKPaEBruYl0bRIGQdfe4St137Gm+PaJ3b6ySefUFJSwlNPPYWTk/kp4zv+3EVEVAyuLs48\n9MA9KBQKkpJT+GPTVllpuLv3wt9vCLY2NkiSRM65c8QnJMujMy9PD+bNnYm1db2X3LBkGui9t8AA\nP7l6RUlJKbHxCXLRXzs7W26bO4teDaqe1NbWsnb9BrkItKWlCr+hQ+TtZGpqaozSzUGfqNB4Me++\n/QfZf+Cw/Levbz8GDeiPSqVCq9WSfiaDEymp8udtXO8S4HRaOut+2ygrgB49uhPoPxQ7OzskSSIv\n/zxx8YlymLZHj+7cdussHOzt5V0GlEolzz716AXttm0u5eXlzJo1i8WLFzN69Giz7ystLePzr/Sl\n6Azlq6prali95jfZCFlZWRHgPwQPd31tyarqank9J+ijDjOmT5WLH0BdGb+/98qVUAAGDuhPf99+\n8vd/6nQaJ1NPy8poWFgIkyaMN/r+k0+cZMPGLfUy2asn/n5DsbU1yGQu8QlJskx6eLhz29yZRoup\nCwuLWLF6nWyYHRwcCArwk3dOKCktJS4uUY4U2NracNvcWUYp+xqNhrXr/yAtXb/WVKVS4Td0ML29\nPFEqldSo1SQln5A9EkAuoajRaPj086VU19Rw3fixjApv530VO4Bz53L54eflANw5/1a8e3tRXl7B\nitXrOH++ANB/T4EBfvTo3h2FQkFFRQVxCUnyeUtLFbNnzsCnX1+5XZ1Ox6Yt2+U5QaVSyZDBA+nX\nt49cEDzlZKpcLhH0mbNXX2U8uGu4gwfoa5cOHTJYXxBcp+NsVjaJSSfk99bHpy9zZs4wWlKScy6X\nVWvWU1WXGOfs5ERQoL9cY7WwsIjY+AR5PrpbN0duv3W20Y4b1dXVrFzzmzztZG1tRYDfUHlpWlVV\nNQmJSfI2UUqlkptvmiZ7qn9s2kpCYjI9e3Tn/nvvavE3uThbR9XkkVbuio9bG2sFXCiaYjIKrOjT\ny/zMq44iNzeP73/6FTCuK3kmI5Ptf+6ioKDQ5L1KpZLAAL86T6rpYv+ysjI2b/vTSLCbo493b6ZN\nndSkAj7oR6i7du8jKjq2xfCns7MTkyaMl+d9GhMTG8+efftbTP6xtrZiVPgIuQJHY3JyzrFl218t\nJhkoFAqGDhnElMkT5PVQP/28nMKiYrMq4Hcmq9f+zqnTafTo0Z275s/D2tqK2tpa/ty526RnZcDN\n1YUpkyc0uzhekiSOR0az/8AhWfE0h62NDWPGjGLEsNBmv/+MzLNs37GT863IZID/UCZPvK5ZmSwv\nL2fz1j9bnR/27u3FtKmTmi1BptVq+XvPP0RFx5osmwd65Tpxwjh5N4zDR47z9559KBQKnnj0gVbL\nxF0KSJLEDz8vJzc3jz7evZk3dyYqlYrq6mq27dhJ8omTLYY/e/XswbQbJjW7FlCSJPYfPMyRoxEt\nhj/t7e25btzVTXY9MXAy9RR/7tzd4gbIhmL04+vWfjamsKiILVv/bDW07uvTjxunTmr2t6utrWXH\nX38Tn9B8ZMxAdzdXpkyeICeRpZ/JYMUq/drl1mr7wpWyn+IlzopV6+RsqmuvGcOwsGBs6rzCzLNZ\nREbFGAmLjbU1gQF+BAb6Y2/XumEvLCoiKjqOEydO1m8yrFAyaGB/QkKC5N0QWsKwC0JsXAJVDeY6\nPT3cCQ0Jxqdfn1bn7gy7IERFxxop1m6OjoQEB+I3dHCrlXwMuyBERsUYZWZaWVri7zfUqKxXTs45\ntv+5S04+ueuOefL85KVIWnoGK1frX04P915MmjAeT08P/ei/spK4uAS9N9wgZOjd24uw0GC8e3u1\n+v1rNBpOpKQSFR3bZBuv0NAghgwa2OqicINMRkXHGmX7WVtbE+jvR1Cgv1lp/oVFRUTHxJGcfNJo\nk+FBA/oTGhJkVoWT6upq4hKSiImNN5JJD3d3wkKD8OnXV45oHI+IZu8/BwD9HOUtM25stf1LhYYF\n/vv19eb6666Vt/YqKysjOjaehMRko2Qqn359CQsNxsOMIh5qdS2JSclEx8Ybzdl3d3MjLDSIAf19\nWw0r6pcindHv/3qufsrH1taW4KAAAv2HmlWCLT//vH6+NLV+/1cLpQVDhgwkNDgIl7qM9paoqKgk\nNi6hbv/X+nelj3dvwkKD5UhXdU2NfrC+dz9arZZu3Rx5+IF7Wn0HhFG8CFRVVbFyzW9yGMzCwgI3\nVxeUF3H+64pCkqiqrjYauTYX1r0UaVjBH/ShIjtb2wtcJS3QaXUUFhXJHqV+qmGWvM3S5cKBQ0fY\nu++A/Lezs5N+ezYhFxeETquloLBIjn65uDhz262zcDZj6ksYxYtEdU0Nf+/eR3xCUoshIUHb6O7m\nyjVXX2U0z3apcyLlJP/sP2QycUHQdlQqFf5+Q7j+umvlvT4vN2Ji4zl4+KjZSyAEraNSqfQ7dIwb\na3YhA2EULzLVNTWcTD1FRXmlUZWbS4mKuglvezN3z+gMLC0t6dmju9FGuJcTkiSRlZXDudxc1Gau\nMbzYHDl8mKqqKiQJ+vbtg2//i7WTvfkoFQrs7OwYOMC31e23LgcMm+Xm55+XM2kvRfJy8+jZylZl\nnYlSocTewY6B/X3N3lnDgDCKgib85z//wcrKirfffruzuyLoRHx9feWSdi+88IKQBwEAmzZt4q23\n3uLQoUOd3ZUO4fJYzCO4aOTk5PDVV1/xxRdfmFXrVXDl0nDLIDF2FoBeDl599VUOHz7Mrl27Ors7\nHYIwigIjlixZQlVVFWVlZXz66aed3R1BJ9IwLP1vd3kRXBmsX7+e6OhoAN58883O7UwHIYyiQCYj\nI4Nvv/1W/vvTTz81q+ya4MqkoVEUnqJAp9OxaNEi+e89e/Zw4MCBFu64PBFGUSDz+uuvGy3yLSws\n5Msvv+zEHgk6ExE+FTRkxYoVJCYmGh27Er1FYRQFAKSmpvLzzz83Of7hhx8aLZwWdB1E+FRgQKPR\n8NprrzU5vn37diIiWt9Q4HJCGEUBAIsXL252/WReXh7ffPNNJ/RI0NmI8KnAwE8//URqavP7XF5p\n3qIwigISExNZuXKlyfPvvfeeWVvHCK4shFEUgH6HnzfeeMPk+T/++IP4+PiL2KOORRjFTkNDwals\nTJfP1lGSkU35RYhavfrqqy2Gx7Kysvjpp586viOCSwoxp9jJaAo4lW1aQ1wsHfHNN9+QkZFh8rwk\nSSxZsqRjO3EREUaxU1CT+vPTLNpbJm+Tpc5PYO+WzeyOzkG/z4ESspfzf6/tIK8DhT46Oprffvut\n1ev+97//ifJ0XQwxp9iJqFP5+elF7C2TNQT5CXvZsnk30TmGnVA6XkdUVVXx1ltvtXrdmjVrSElJ\n6ZhOXGSEUewE1PH/Y/7akSxcMBhrdBQf+Yp3ViRj4e5E5g+zGXX7MtLU4DTqKR61WMx9K3PoKJX0\nyiuvmOUFpKWlsWLFig7qheBSRIRPOws18f+bz9qRC1kw2Bp0xRz56h1WJFvg7pTJD7NHcfuyNNR0\nvI748ssvyclpftPphuh0uiun4pEk6HA01dWSRv6rSFo3rps0d1e5/Pe2+f7S1KWpUo0kSVLRFml2\nrwHSc0cr9Kdzl0kjPR6WDlU017JWKjr+g/TZzpwG7Teg+Lj03Rc7pZxmT0rS4cOHJcDs/4YMGSJp\ntdoL/RoElxkhISHyb//oo492dne6DkXrpHHd5kr1KmKbNN9/qrQ0tUb/55bZUq8Bz0kGFdGyjrhw\nysrKpB49epitH1QqlZSent6+negEhKfYkajTWP7YfF74YCGzZywhqhIo2c/SI+HMH2Yott2Nq1/7\nniUzvLECdJUFFOqc8XKp2/Or5wTutF/D+tSmiS7ViZ9yx8J8rh3jTrObUDmFMMl1GXe9cpCSZk6/\n/PLLbfo4ycnJrFu3rk33CC5fRPj0YqGlpqZ+g++S/Us5Ej6fehVxNa99v4QZ3laAjsqCQnTOXhhU\nREs6AnQUR/zI57vO0dwW4iUR3/OliXOffPIJ+S1s+N0YjUbD//73P7Ovv1QRRrEDKdz5Is8fCuFm\n72SicipRS0BxDAeqg+gvb0ChxKH/SEJ6WYE2nz/f/grrZ7/gHl/DfnCODPArZ09Co8oy6hN88dBq\nrv30aQJNFoG3wHvuOyyIfpjFh5tO2O/YsQO1Wk1VVRXl5eWUlJQwaFD9FkwPPPAAZ86c4fTp05w8\neZLk5GSuvvrqf/elCC4bRPi041GnLeex+S/wwcLZzFgSRSVQHHOA6qD+1KsIB/qPDEGvIv7k7a+s\nefaLe5BVhCkdQesDZ6eQSbguu4tXDjYdNi9cuJDa2lqqq6upqKigtLSUe++9Vz4fEBBARkYGaWlp\npKamkpyczLPPPvsvv5HOp+UtiAX/gnIS/jiAxagnCLnzv6TfqT+qySylSmmNqvFwRFfIgQ9fZGPg\n16y5PwiHBuctlWpKKo3HcsV/v8QH1k8RNbCVzVSV7sx4MZz/Pr2C5w7ej1eDdpVKJUqlEktLS/lY\nQ0Xo5OREnz592vKhBVcQwih2NIXsfPF5Dg1bzTN75rMhRI2EhqrSKpTWqiYei67wAB++uJHAr9dw\nf5CD0fnmdIQ8cP5ur+mBs4U3c99ZwLJJizl8+CNGNdhyUKFQoFKpjHaqt7KyMvq3t7f3BX3ySxnh\nKXYUmlyOHSqm75gBNNzlTeXSj97aTM5XNzioK+TwN59ybNibfPpgEDW73mRpvOGCWvKzVPh6NNw4\ntZC/P/iLQQ9PopcZv6B9yG2MiX+PdZmth8Aahskux30KBe2HWJLRwZQn8McBC0ZdFcKdW9I5umQk\n9qhw6dcbbeZ5jFXEYb759BjD3vyUB4Nq2PXmUmQV0ayOqB8439XKwFnpPoMXw9fw9IqsVpN1GsrB\nlaofhFHsAMoivuHlp5/j0zgtlTve5sV3t5BpWM3gNo47nHdxMN8gftXEvD2N6x55jWeu98BSoaD7\nTf/g5lE3tKtJZW96MLeGOdU/oCKO3w45c0OoE2bhGMhNQ06y6nBxq5c2FPqGSlHQ9RBzih2LJvcY\nh4r7MmaA8ebIbuPuwHnXQepVRAxvT7uOR157hus9LFEounPTP24YVESzOqJNA2d7Qm4bQ/x762ht\n3CyMouCCcBz2IK88FIy18zhe/vhj3vm/aXgbIhCqgdy3yJNlv6agt5M2BL90iEpJQjL8V7WD2W76\nyyujfmJn+Evc7N7gASVJHKkYwGDXZtNrmsGBAWG2JEdkUdPKlQ2VnzCKXRsRPu0oyoj45mWefu5T\n4rSV7Hj7Rd7dkolh3KwaeB+LPJfxa0rdEZtgXjpUWa8fJImqHbOpUxHN64g2DpwdA29iyMlVtDZu\nFkZRcMGUJ+8n1/MaBjs0PqOi/4NLmbVnEcvPtLIYviaRpUvO8fxn03FpcFhdlEGRVU+cLRtdXx3H\nezcEMuHNqEaVclQ4edhRejqP1oq1ifCpwIAIn3YUjgx78BUeCrbGedzLfPzxO/zfNO/6BA9Vfx5c\nOos9i5bTmoowpSPaPHB2GECYbTIRWS0Pm4VRFFwglZw6dBrrwOF4NhfOtxvG4rXPULnxMMUm29CQ\ns28Xzm/8wH0+xvlQkk6HpFChbCyT2lLOJMWTlFbSJMVakrTodK0rNhE+FRgQ4dOOpJzk/bl4XjOY\nJuNmwG7YYtY+U8nGFl030zrC1MC5Ou49bgicwJtRjbLRVU542JVyOq/lYXNXMIoi+7Qj0BWTcCiP\n3ncOalbgAXAexSNPtNSICo+JT3BPM2csnTxxrImitLbRCfsxfJ4u8XmTOzSU5lTj4OVCY+eySddF\n+FRQhwifdiCVpzh02prA5z0xlQbjPOoRWlQRLegIUwNnbekZkuKT8CxpMmxG0upobdzcFYyi0Hod\nQeVJDmS6EB7UvflF9f8SpdNgQqxOklpi7ui9moyEKnyHe2NySWMdInwqMCCMYsehK07gUF5vxg4y\nOWz+V+gHznlNBs72Yz4nXcri+3GOxic0peRUO+Dl0vKwWRhFwQWhzY8homIw4wfbt37xheAUzC3B\n5/krrtS86ytT2JXkzZzRbq1eKsKnAgNiTrHjqDx5gEyXcIK6d8Sw+QIGztUZJFT5Mty75WGzMIqC\nC6IsaR/5g28h3LWjnuDOlGfCiV8fQ7kZV1cnrmKL+xPM69/6CyjCpwIDYk6xo9CSHxNBxeDxdNS4\nua0D58qUXSR5z6G1cbMwigLz0eWz47nJzHjnCPF70wl4cDreHTMIBKDXjLe4Ne5jNp9rTVkV8/d7\n2xj7wf0MMKM/InwqMCDCpx1FGUn78hl8SzgdNm5u08C5msRVW3B/Yh6tjZuFURSYj66MtLh87KTd\nLMtbwAe39+mQ+UQZu3AWfujLV8+uJqu5ar76TlG44wWW2LzNh5NdTF1khAifCgx0BQV4MdHl7+C5\nyTN450g8e9MDeHC6d4fqCLMHzsV/8962sXxw/4BW+9MVZEJovfZC5cvDW3byxpSbWbL0cfxby2hp\nB5zHvs2y2aks353bbJV7So7x875RfPPVTNzNfPtE+FRgoCsowIuJriyNuHw7pN3LyFvwAbf36dBh\ns3kDZ10hO15Ygs3bH2LOuLkryIRYktGeqFwZENJxAZGmWNF35iv8n6nTTiN5ZsnINrUowqcCA11B\nAV5MVL4Ps2XnBOLznHh2SA+TSzHaE+exb7Ps/Dss353Lfyb0auIJlhz7mX2jvuGrmSa2n2tEV5AJ\nYRQFRojwqcBAV1CAFxuV6wAu6ri5lYGz08hnaMu4uSvIhNB6AiNE+FRgoCsoQEHb6AoyIbSewAgR\nPhUY6AoKUNA2uoJMCKMoMEKETwUGuoICFLSNriATQusJjBDhU4GBrqAABW2jK8iE0HoCI0T4VGCg\nKyhAQdvoCjIhjKLACBE+FRjoCgpQ0Da6gkwIrScwQniKAgNdQQEKLpwrVSaEURQYITxFgQFhFAWN\n6Qo1cIXWExghPEWBAWEUBY3pCjIhjKLAJFeq0AvMoysoQMGFc6V6jcIoCkxypQp9R6IpOEV2penz\nupIMsssvj70JhVEUNKYrbCcmjKLACKH8Lhx16s88vWgvZQ0qK1dnHONIllr+W0k2y//vNXbkXfqG\nURhFQWOEURR0aa5Uoe8Q1PH8b/5aRi5cwGBr0ObtY+lbL/PAdaO5f0sBsgl0GsVTj1qw+L6V5Fzi\ndlEYRUFjGibfXan6QRhFgRFdYSTYERRveoIP+j3LTE/9K2XRcywPLXyee0a6NdmKxirgYR7N/y+v\nH20hznoJIIyioDEN5aBhUt6VhDCKAiOEUbwQSti/9Ajh84dhb9b1PZlwpz1r1qeibuasrjiCHz/f\nxTlTG8O2pWcR3/PlrnPNb0LdCsIoChojPEVBl6PTlZ8mk9/eeJ8DxaYuqCb268X8klx9ETvVGsXE\nHKgmqL95JhHAcYAf5XsSKGt8ojqRT+9YSP61Y3Bvh43ZnUIm4brsLl45WNLmey8Zo3hZysSVySUz\naO5AmRBGUWCSiy/0NUQtvpX1YfcyxllH8fHvWPTcg0zx6cHkdYV119gQdOckjt3xBHvKL3L3TKGp\norRKibWqDa+TpRJ1SWUjD07NiS8eYvW1n/J0oE379M3Cm7nvLCD64cUcbmO09tIwipepTFyhXBrh\n046VCWEUBUZ06kiw8Dce+WY0z092BZQ4Dp3JswvvwLuqlCptg77YX8V/bv2L+7/LvLj9M4XKhX69\ntWSeN39UWpufhcrXA+uGB4v/5qUPrHnqroFYtWP3lO4zeDF8DU+vyOJC1VinGcXLVSauUC6J8GkH\ny4QwigIjLory05RTXGHwkbRo6v5ZcuB7YsbPYFBdZoqFvStOVs33p/eNc8haupP8ju+tGbgx7g5n\ndh3Ml42OriSaZW+9zpcH8ji18g1e/2IHmRrD9TWk7k0n+NYwnBq0Uvj3B/w16GEm9Wrv19KekNvG\nEP/eOjLbYBUvvqdYLwsGLl+ZuDK5FMKnHS0TwigKTNIRQl9z+lfuCfXGzcEa1wHDGXvNdTy8PhMN\nUHU6imofLyzNaMfC2R1VyhkuZv5mTdrvLH7yed765DM+/nwZny18mz0FOkDFwPsW4bnsV1LqDJ/S\nKYS7Fr7HunSJ0t1fsvixyXgb0lAro/hpZzgv3ezeoPUK4n47hPMNoUaGsr1wDLyJISdXcbjY/Hsu\nplHUFR/l3QkuWPo9xPqM+vSjS10muhqXQvi0o2VCGEWBER09ElTYh/B/e/JRl6Xx99KX+M+7K/li\nrjcqwLKbGxQUo2m1FUBdSrmVbbuGGVt83KlvmTv5E+zuX8TCp57gzt7refHjfRTWvUKq/g+ydNYe\nFi0/00r/a0hcuoRzz3/GdJeGx0tIOlLBgMGutEN+TVMcBhBmm0xEVk2zp2fPno2bmxs9e/bEw8OD\n3r17c/LkSfn8Tz/9REBAACEhIQwbNozw8HC+++679umbOomPZt7PoQlrOPiqNR/f/DBbCvQK91KW\nia7IpRA+7XCZkASCBtja2kqABEiffvrpRX22JvklyXXQUim37u+KmC+k/z48R/K3RvK66RHpvx/u\nlwq0+nPFv42TnO49LFVflI5lSMumdpNcb9suFekPSBnfjJQch30ppWkaXlgkHfryU+mfItNN1Wb/\nKX36Q5RU3PhETbz0opuVNG93mfHxqljp3SkB0vVvREoVrXa0Sop9d4oUcP0bUmTji2sSpVe7K6VZ\nO0ubvXP37t3y727Of/b29tK5c+da7ZG5aDXaBn9oJcNfl6xMdFHuvfdeWQZGjRrVYc8p2/8fadTg\nYdK0e56Qnnt2gTRuiI804JqXpSPlHS8TwigKjGhoFD/55JN2b78lYZc0adJno0dLX6RrWmmlQFo7\nzV9aGHNx1J/m9OdSGE7SvO1F8vM3THeWBr4S3X4KuDpWet7VWrpjb7nx8fL90mN9kTzv3S01b86M\nLpb2P9ZXwvNeaXfji2sSpFdckG75y3QrV199tdlG8fnnn7+AD9k8l6NMdFXuu+8+WQbCw8M77Dnn\n106Thr9wVCrXFksHXwyQFDhLM35Kk2okqcNlQhhFgRF2dnay0H/88cft3n6Lwi5JUnXyl9LdD/4s\nnao11YJGyt30hHT7R/EXzSMoP/iY5KkaIX2fVTf8LPtHetDLU7qnsVf3b9CekT7pizRpU1H7tdmQ\nikPS/bYO0uPHq0xesm3bNrMMYrdu3aSCgoJ269rlKBPNUpshrX/9PWl/keGAVqour5a93qqYr6RF\ny5Ik07/Apc8DDzwgy8GIESM67Dmlkb9IqxLLpJw/7pG8QOr30DYpr0EwoSNlQhhFgRH29vYdahRb\nE3ZJkqTq7Hgpvbz5+yWpWspJPHtRFYvmzDfS1T3GS6vyJEmSaqWMX2+WnO0mSWvz2vMpxdL2GVaS\nz0fpkraVK2tOr5b+c/vj0rLUNrzuBaul0QRJP+W2fNnw4cNbNYqLFi0y/7lmcDnKRHN9iHxptHT7\nZv1goerkWumzxbdI3fu9ISXLDk25dOCJYdL97TmYusg8+OCDshwMHz68Q59Vc3KpNMURSTVssXSs\nrEw68uH/pL1F9ec7SiaEURQY0dAofvTRRx3yjNaE/dKjXIr5/F7p9v9+KH373WfSq3M9Jcuwz6XT\nrUVv2kjOTyMkx6mb6uYtTVO27xHJF0spcFG02S99xaEHJWfvN6QTrfT5999/b9Egurq6SiUlJWY+\n1XwuP5loRMEKaWSPZ6WYhp5LzleSn89rUnJNg2Pp70p9+38kZVzs/rUTDz/8sCwLYWFhHfegsmPS\na8NUEo5TpG9SaySp/JD03MhZ0h+FHfdIAyL7VGBEh69DKj/O/+Y9xnam8OXqFxmuiGft6mMUX9Jl\nFO0Jeux7lr/7DPffdxvBZRX0nXo1Hu2cJuo+5RnC49cT00oFDodrvuRU6R7u7W3u61tN4qotuD8x\nj/6t9HnGjBkEBgaaPP/f//6Xbt26mflcM7mMZEJTnE7MwX0cTSnQ161VF5Nbrm2yds4kvW9kTtZS\ndl6miykvzjrFGuI/uY9FERocBjtx5M07mTj8et7P7IGrOesw/iXCKAqM6Nj1aJ0r7BdEdRI/PPsE\nX8bpq9Vos7fzY4Q3t88eTDsVYqun1wzeujWOjzefa7XyTEn8cRQB/czrQ/HfvLdtLB/cP6DV5R4K\nhYKFCxc2e65nz5488cQT5jyxDVweMqErOsqXD81k/vPf8nd6KXnHlrHktS/54ME7+P60xvy1cxbO\nuKtSOHOZLqa8OEbRmoCXYpAkibJjq/jux9X8lVSBlPUVVzt00CMb0Nq4RtCFaX+hrxP2l9q52Q5E\nVxTJqp+30WfYrfxzKoo1P2yl9/vrWRjc7iYRsCN84Yf4zniW1df8wm1eJkxYZTwbjvky81HH1pvU\nFbLjhSXYvL2eyS6tXw4wd+5cFi1aREpKitHxF154AXt784uem8elLxNlER9y94Pb8X9rGcsnu8tK\n85o9D+C/tCe/fm6NZYQbxOvXzrU88FBTWm6F3WW6mLLhOkWxdZSgS3AplHG6lFB6zGd71hEWBlhg\n6TudN9Zt46s7BxnXLG1PnMfy9rLZpC7fTa6p/Z7sArj7yRvrK+S0QMmxn9k36hu+muludlEApVLJ\niy++aHTMy8uLRx55xMwWrhy0eRt59Kb/wX9/YnEDgwjg5D+Jq8ZPw98BnK+ai+u+CAx7kdSkb+bz\nj9ZzMm0rX3y2jlRDWdySSPY53c4414v8QdqJLqEfOn7aUnA54eTkJE+kv//++53dHUEnUVtbK/Xr\n10+WhS+//LKzu9QJVEhH/tNPshj8mhTbXKJvVar097E8SSNJZq+dK1g7TfJfGHPZFhh48sknZZkI\nCAjo7O50CMJTFJhEulJHgoJWUalUPP/88wD069eP+++/v5N71AlUxrJ8ZTr975jF4OZCAzb9GT+8\nh94Dt+jHAz/eydG3lnPaRP0xbd5mFv8+gdWvBnVcpKGDEeFTQZejS4RHBGZxzz334OXlxauvvoql\n5SWU9XKxqMokJr8bwcN7N1M7U0v5uXyqG9gF68GPsHTxMCyaLy+LRtef579/Gv/L1SLSNfSDSLS5\nyNTW1lJYWESNWt36xZ2AQqFAqVSiUCgoLikhI/NsZ3epCUqFEls7W1xdnDt3R/h/SXFxCeUVFZf0\niPu1115n3PjrLkk5ALCytMTZxRkb6w6wNCoHujvYYG/djO9QmcCatWXMfKJHgwxgLSXpMWyKOcP0\ne6fSt5EltXYfilcrj5QkiaKiYiorq9BJl55cWFvb4OPbH4VCgYeH56UrF1ZWuLq4YGXV9sGcQrpS\nzf0lRllZGcciooiOjqO6xsRQUtAmPDzcuWrUCAYNHHDZGEdJkkg+cZJjxyM5m5Xd2d25IlCpVAQF\n+jFi+DDcXM1MsTUHXTF/PzaCR2x/4viHY5BzfXVFHFm2kqKJDzLFy9ivqElYjO/YUrZkf0hIG+y0\nJEmknEzl0OFjZOeca7eP0JWxtrYmJDiQEcNC6dbNjEztOoRRvAjk5Z9nxcq1VFZVdXZXrkjGXDWS\na68Z09ndaBWdTsfW7X8RG5fQ2V25IlGpVNw65xb69vFutzZ1Jcf45LGFHOw7n/tvHIpVTjxRp2oY\ndMsCbhxgZ3ytVkvxphvwX/U8Kauux3w1DHv3HeDAoSPt1m9BPXZ2tsyfN4cePbqbdb0wih1MQUEh\nvyxfTWVVFVZWlowcMZygQH8cHPTrvbRaLWlpZ6iqrpbv8fRwx83N/JxtSZLIzjlHYWGRfKxbN0f6\nePdukwdVUlJqFA6xsrLE16dfm+aTampqOJ12Bo1Gn22gUCjo17eP/HnNQafTkX4mg4qK+hXOvXr2\noGfPHvL5rOwc/tl/iMyzWQBMnTKRkGDTlVguBTZv3SEbxH59+zBq5Ah6e3liYaEPzxUWFpGVnSNf\nb2NjjU+/vqhU5s9yVFVVk5Z+Bq1Wv55DqVTi068PdnZ2rdxZj1arJS39DFVV9TLp4d6L7t3dzG5D\nkiRycnIpKCyUj3VzdKRPn7bLZObZLHn+qrFMVlVVkZB4gsNHjlFRWYlKpWLe3Jn08e5t9jNaR0Nx\negxRyaU4+48gyNvBaHmLrng/Sx75DmlsEBnv/Yf4Jec5fJv531VkVAzb/9wFgHdvL64eM5reXh5y\nUkte/nlyc/Pk6+3t7OjXr49R0ktrlJdXkJ6RgaTTf48WFhb4+vZrU9i5traW02lnUDeY+und2wsX\nZ/O3xZYkicyzWZSUlMrHXF1c8PR0b5NcFBQWkW30rtjg69MXCwv9L1NeXkFsfAJHjh5Hra7Fzs6W\nu+bPw9WMSMKVYRS1FZRU2+Bk3yHbs/4rNmzcQmLSCWysrbn9tjm49+oJ6F/2yOhYYmLjqKxs6kH2\n8e5NaEgQQwYPlH/oxqjVauLiE4mMjiU//3yT8y7OzoSGBBEc5I+trW2zbUiSxMnU00RGx3D6dHqT\n89bW1gQF+BEWGtyioc7NzSMiKoaExCRqa43T7xQKBYMGDiAsNIh+ffuYFP7y8nKiYuKIjomjrKxp\nrTNPTw/CQoLwGzoYlUqFVqtlzbrfSUvPwN7ejicefbBNiuJikp2dw0+/rARg+LBQJl4/DoVCgU6n\n40RKKlHRMaSfyWxyn62tDcFBAYSFBOPcgvLJys4hMiqGxKQTskE0oFQqGTpkEGGhwXj3Nj2rVVpa\nRmR0DDGx8UYDEgPevb0ICw1uVSbjE5KIjIolL79pLTNnZyfCQoIICgrArgWZTD2VRmR0DKdOpTU5\nb21tRWCAP2GhwXSvk8mSklJ+XbGGktJSPNx7seCu2y9SSL2c3Xf58NyYaI7dU8brA2fgvC+Jp/ua\nJ4darZbPvviGyqoqfH36MWfWDCwsLNBoNCQlnyAyKtZooGTAwcGBkOAAQoODcHQ0XeYl/UwGkVGx\nnEg52SQxRqVS4e83hGGhwbi79zLZRmFhEZFRMcTGJTQ79ePj05ewkGAGDexv8juvqq4mNjaBqOgY\nCouKm5zv3t2NsJAgAgP8sDZhqHU6HcknThIVHcuZjKbvip2tLUFBAYSFBuHspH9Xzp3LZfmqddTU\n1BDg78dNN04x+TkNXOZGUUPmpvf5+ZQr3Uv3s/r4UN5b/iLD2xK3MIOamhreeustnnnmGZydnc2+\nr7S0jC++/g5Jkph2wySCgwIAOH06nfUbNlFbWytfa29vj0KhQK1WG43EfPr1YebN05sISnFJCavX\n/k5BQf1I3NbGBguVCp1Oa2RoHR0dmDdnZpPwgUajYdOW7SQl11cusbS0rHuWRHl5hXxcqVQyfdoU\n/P2GNPmcx45H8teuPfLfCoUCe3s7QEF1dbXsNQIMCwth4vXjmhivs2ezWbt+g5HHbG9vh0KhRFNb\na/Qyenl6MHvWDOzt7CgqLuarpT8AMHf2zQzo79ukf+3Jjz/+iL+/P+Hh4W267/c/NpOUnEKP7m7c\nf+9d8m/9+8YtRorfysoKKysrJEmioqL++1epVNwyYxoDB/Q3aleSJA4cOsK+fw7Kx5RKpewZVlVV\nGRnJ0aPCGTd2TBPldTrtDL9t2Iha3VQma2vV1NTUy2TfPt7MmnlTEy+jpLSU1Wt/5/z5AvlYSzJ5\n65xb6Nmjh1EbGo2GLdv+JCExWT5maanC2tqG5mRy2tRJBPr7AXAmI5PlK9cCcMftc9vZWzSBOoEX\nPSdifziThXbfEhaeyuc77mbgwEB6mVG15kTKSdb/vgmAxx65H6du3aisqmLd+j+M5pytra2xtLRE\nknRGAxYbGxtmz7ypyWfV6XTs+nsvxyKi5GMqlQobG/33WFlZZZTgdf111xI+PKyJXCQlp7Bx8zYj\nGdJHfRTU1NQY6bAhgwcyfdqUJpGl8+cLWLX2N0pLy+Rjdna2KJUWaLUao4iEq6sLt865BZdGeram\nRs3vf2zmdFq6fMzUu2JpqeKWGTfKusDgiSuVSh5/5IFWo1YdYBR1VObnonbywLmjSxnpMvj62jmc\n/3ofL/sX8XN4AL8vPsWGqea78+ZQWVmJvb09Tk5OPP300zzzzDM4ObX+jNi4BDZv3YGVlRVPP/Ew\nKpWK06fTWb3udyRJwtbGhrDQYEKCA3Fy0hdZ1ul0pKWd4XhUtKwsPT09mD9vtixsZWVl/PTLSsrK\nylEoFAT4DyUsNBhPj/oQRG5ePlHRscTExqPVarGxtuauO2+TR9Y6nY61v/0hP6OPd2+GDwth4ID+\nxiGIuHiOR0bLyuimG28gwH+o/BkPHz3O37v3AXovYPiwUAID/LC10efk1dbWknwihWMR0Zw7lwtA\nYIAfN06dLPc1KyubX1euRavVYmVlRWhIIKHBQXKoQ5IkzmRkEhkVQ/KJk4B+ZHnX/FuxsbHhp2Ur\nyM45x8jw4Vw/fmzbf+A2cO+99/Ljjz9yww03sHjxYrON4/sffY5arWbKpOsJCw1Go9GwcvV6Ofzr\n69OPYWHB9Pf1kQcMpaVlRMfGERkVIxuU2TNnMGhgvWFsOBfl5ubK8GGhBPgNkQdRarWaxKQTHI+I\nIq8umjBieBgTrx8nt5GWfoZVa35DkiRsbGwICwkiJCRQHm1LksTptHQio2I4mXoa0IdT5982V87u\nKy8v56dfVlJaWoZCocDfbyhhoUF4eXrIv3Ne/nlZJjUaDdbW1tx1xzx61IVldTod63/fKD/Du7cX\nw4eFMmhgA5msqCA2LoHjEdGUl+ujCTdOnUxQoD+SJPHdD8vIP1/A6FEjGH/tNeb9qP+KMv556ia+\n67uAkefX8O5vPZg9by7/fXUqvcxwFv/862+OR0bj3duLO+ffSnVNDb8uXy3/VoMHDSAsNIR+fb3l\n77GwqIjomDiiomOpqVFjYWHB7fNmy1EASZLYtmMn0TFxAPTq1ZMRw0IZOmSQrEOqq6uJS0jieEQU\nRXWe2/hrr2b0qHp5Tkw6wYaNWwD9AGl4WAjBQf44OOg9U61WS+qp0xyPiJY9Nx+fvsyddbP8exUW\nFvHzLyupqq7GwsKCoEB/wkKC6FUXMdOH2c8RGR1LXHwikiTh4GDPgjtvl5NjNBoNK1atkwcJvr79\nGB4Wgq9PP+N3JSaOiKhoqqqqUSgUzJk1gwH9famtreWjT79Co9EwY/rUZgf2DbkAo6ijMjeH2u5e\nODWKoFQnfMa8G//DH3nXsTx1K7d7KIEaUlZ/SkTok9w2qP3TpnUaDahUKMsP8njQ43huPszCQefZ\ntnQnXnffRVA7FJA1GEUDzs7OPPPMMzz99NMt7hhw9HgkO3ftobubKw/ev4CS0lK+/f5n1Opaund3\nY96cmS1mRUXHxLF1+1+A3pBMnzYFSZL4cdkKzp3LxdJSxZxZN9Ovbx+TbeScy2X1mt+orKrC2cmJ\nB++/G5VKxe69+zl0+CgA144dw1Wjwk2HPqqqWLN+A1lZOSiVSu6563Z69epJWnoGK1evA6B/fx9u\nuelGkynQkiTx1649HK8buU68fhwjhodRVVXF0m9/orKqCqdu3bjt1lktxv2TklPYsHELkiQxoL8P\nc2ffwvrfN3Ei5SShIUHcMHmCyXvbA4NRNGCOcZQkibff/QiA226djU+/Pmz/cyeRUbEATJp4HcPD\nQkzeX15ewaq1v5GXl49KpeL+e+/E1cWF5BMn+W2D3ssYOmQQ06dNMTn/qNVq2bZjpzynOX3aZAID\n/CktLeOb739GrVbj5urCbbfOblEmDQM9gAC/odw0/QYkSeLnX1eRnZ2DSqVizqwZ+PTra7KN3Nw8\nVq5ZT2VlFd26OfLwA/egUqnY+88BDhzUG/ixV1/FmKtGthiOW7t+A2fPZqNQKFhw1+14uPdi3W8b\nSTmZelFkoR4t5YWVWLk6YlFeTI2dM3ZmRvE3bt5OfEKi/F0a+q9QKJgxfSp+QwebvLeoqJiVq9dT\nXFKCrY0NDz6wAHs7OyKiYthRN0cZFhrMpAnjTU4r1NbWsmHjFnkgMm/OTHx9+5GXf54ffvoVnU6H\np4c7c2ffgp2d6XD3kaPH+XvPPwCMDB/G9eOvRavV8s33P1NUVIytjQ23zp2Jp4e7yc+TkXmWNet+\nR62upVfPHty74A4UCgVbt/8lG3jDoNIUZWXlrFr7G/n557G0VPHAvXfj7OzEl0u/p7i4pNX74QIW\n7+vOrWbG4Kt49WhF4xP89XMa497/i9iT65nnoW+6+O/neOzISG4wGER1Jju/+ZLvfvqWTz/6lYii\nf7cWR6lSodRks/HVD7H84A/+z88KVJ5cd20eix5fS047LPVpPG4oLi5m0aJF9OvXjzfeeIPS0tJm\n7zMIoq7u/qioWNTqWmxtbFo1iAAhwYFcO1afVRmfkER5eTkZmWdlj+vmm6a1aBBBP6KfM2uGvt8l\nJZxISUWtriUiMhrQz2+NGW1a+QDY2toyd/YtOHXrhk6nk0MyR44dB/RJMDNnmDaIoA+pTrx+HEMG\nD6q7NwKdTkdsfCKVVVVygkRrE+FDhwxiyqTrAUg9lcb5gkK5750xE7Bt2zZGjhzJ1KlTOXbsWLPX\nKBSK+j7qdFRWVhETqzdOY64a2aJBBH24at6cmdjb2aHRaIiMigHgyFH99+/d24ubbryhxYQcCwsL\npk6ZiI+P3lgdPhqBJElEx8ShVquxsbZm3q2zWpXJoEB/2QNLSEqmtLSMs1nZctLDjOlTWzSIoPdc\n5s6+BYVCQWlpGUknUqitreV4RDSgV+RXjxnVskza2DB31s04OzshSRLHjutlUqnsjKU5Fji4OmIF\nWDiYbxABdDp9WNJCZUFhUREpJ1MBmDRhfIsGEcDFxZl5t87E0tKSqupq4uIS9AbqiF4uBg0cwOSJ\n17U4z25packtM27Eo25O0fBOH4+IQqfT4ejowNw5pg0i6OV71MgRhI8IAyAySu/Bppw8JXuhs2fN\naNEggj5adctNNwL6SJch2c4wkLvm6tGtGjTDVJGdrS21tRoio/XvitLw/rV4t542G8Wq9EjOac6y\nYVUsDc2iLv8YR/rew8OzriXQ017fcOUxXnssjYdfGoszAJUcW3wrn1lP554FD3D/uBSeXPAD6SbK\nIpmFJoednywlZ963vDPyFJsT9L2yDniYpy1e5pk/i/9F4y1TVFTEq6++Sr9+/XjzzTcpKyszOm9R\nJ4xarRatVktMbDygf+nNXTcTPjwMGxsbJEkiJjZBVoieHu5N5pdM4eXlKV8bFR1LYlIyarUapVLJ\n1VeNMqsNWxsbRo4cDujDKrm5eXJizpirRpmVoapQKBh7zVWAPtxx+nQ6UXWfJzDAz+yM2+CgAPn7\ni46ONeuejmbbtm2Eh4czbdq0Zo2jSqUPq2i0GmLjE+pCxZaMHjnCrPYdHOwZPiwUgNi4RLKysuUE\njGuuHm0y8aUhSqWSa6/Wf//5+efJzMwiqm4EHhoahJOZ+ySOGB6Kra1BJuNlmXTv1ZPBgwaY1Yan\nhzuDBuqvjYqKJSk5hZqaGhQKBVePMU8mbWxsGBWul8mk5BNUXYZLnrRa/ahdqVQSFa3/LRwdHQgN\nCTLrflcXF4IC/QGIjI4lLf0MxSX6suTXXnOVWclGKpWKMXV6IC09g3O5uSQkJgEwMny4yYSoxowZ\nPRILCwtqa2tJSEwiqs4g9e/v02KCV0P69/eht5en/vNExRIbF49Op8Pa2kr+rVvD0dGBYXUDTUOo\nvi20uaJNeY6OW54dwyc/ryL2rdGMrosqFkUfx3XU9Ub7u5XsXcIKv6d4w5CdXHKA976vYVqkFxaA\n3eApBEffxy8pd/GK3wVMQOoK2fn4KCb+VE4Pxy/4P7Un/z14nJsBcGDUIxO59dE/yJ1yN83lVn32\n2WdkZmai1WrR6XTN/l+r1VLTymL7oqIiXnnlFT766CP+85//8MQTT+Do6CgrKq1Wx+m0dCoq9RPk\nISHmLx2wtLQkKMCPo8cjiYmNp7TO8Jr70hgICwniZOopMjLPoq6bHB8yeGCLI8DGBPgP5e/d+9Bo\nNOytS+ywt7dj4ADzk1u6u7nSx7s3GZlnOXI8Qs5ECws1//MolUpCg4PY+88BYuMS6FvnLRcXF/HF\nF18Y/YaG/xr+be655v69f//+Fvu2detWtm7dytSpU1m8eDEjRuiNnspCRW2tBo1GS1zdyDfAfyhW\nVubLfXCQP/v2H6S6upr9dfOILi7ObVqX5+HhTs+ePcjLy+fg4aNygkJblrPoF8sHcOTocWJi4ymv\na6OtMhkaEsSJlJOczcqWkz4GDxqIQxu2p/L3G8qu3XuprdUYJYxdLsieolIpy0VIUGCbsqhDQ4KI\niIymuLiEw3VeYm8vT7PX5QEM6O+Dg4MD5eXl7PvnELW1GiwsLAgM8DO7DVtbW4YOGUR8QhJRMXHy\nMpKwC5CLs1nZpJxMlTOYA/392rQ0LCQ4gH8OHKKqqppTp5tmMLdEG41iOaeSHBk3/yEOfrqQVbFv\nMXq0PVBC4jFrRj7VcC1UJXGrdtB92jcYpvU05yKILnNmnm3dD65ywcsqg70pFdCSUaw5w5ZP3+Xb\n3w9wqlSDBCi6jeGdP5Yy9eszSF83f5v1gEkExH/CoeK7udm56flly5Zx/Pjxtn0FLVBYWMhLL73E\nhx9+yH/+8x8mTJgE6Bf1GjKsHBzszR6RG/D09AD0c3sG5WE4Zn4b9aELw4i6rW3YWFvj5uZKbm6e\nnCXq7t7LLC/FuC8eZGSepaoueUSpVNKrZ882twEYZaXm5+fz9JOPt6mdjqKxcbRQGQZIWvm78/Ro\n2/fv4OCAk1M3iotL6n9Dj7at71IoFHh5epCXly+3YWdn2yTbrzW86uSpqvrCZdLLSCbrvhPPlkNs\njbG2tqJ79+7k5JwzymIE2Lr9L0rLynFzdZETi45HRJFaF+GYfct0VCoVGZlnOXhY791fe/VoPDzc\nqaqu5o9N2wD94DGkLnN8246dlJSWGbcZGU1qXcLarFumY6lSkZGZxcG6OfuxV4/G08Od6uoaNmza\nqm9z0ABCggPrPUULZb1ctPE76NmjO5aW+kHXhX6PSqUST49epJwsl+XC1dVFTpgzFy9PD+ITkowy\njdsq5151ciRJEtXy52lbG46OjnTr5khpaVkTuWiNtoVPa84QWxmKX//ruTe8hN+XR1MOUJlGZE0I\ng4wigqXE7akldFB9poumvIBKLLEwvMNKS6wUlRSU1mKSighev+46PioaxWOvP0lwdgI9n/6JNSve\n5Poepm8DwM6HQO1Ros43P7HYVmVuLsXFxSQnJ1NcUgyAVlefznwha6eau6et7bRHG83do6Ad2rig\nfjQ9dimuUczIyODMmTOyrDUM5VzIMrr2+e7av40LaaejZNJAYWER+fn5FBcXy8fKyivIz88nPz9f\nnl+qrq6Rj9XULUmRdJJ8rGG6f0Fdm4VF9YUyyhu0Sd28dk1NgzbrBm46nU4+VlaXzS17ig100aXy\nm7bHu30hfelouWiNNnmKmpw48sKuoYeqJ5Mfm8BTD33JP2+MYWJhNNlDxmM0I6SrIr9AQU/n+kco\nLK1RoaM+H0KLVrLA0tJU52uIf/c2vghYxum3xmBPJQ6znmJ2nguD+vVofdNUK1e87MpJLqyFZjZr\n8fb2pqCgAAsLCywsLFAqlSiVSqO/LSws0Ol0HDnSegkmCwsL7rjjDl5++WUGDBjAydRTHIuIkUeD\noE+RlySpTT9Yc+FbdRvrpzZXgFxd0/ai5I2fq76AwuaN29BqtWg0mjZVbmnuuRZKC6ysrIx+R8O/\nTf2tUCia/f1N/fvkyZPk5eU10yNjAgICePXVV5k9ezYKhYKl3/4of1YDNW38/iVJavKbtbUN/T2N\nf8NadDpdmwYVzT23rfLUHm3o72n+Xbjj9rlNjo2/9mrGX3u10bFBA/sbLXMBvff8xKMPNm3ztjlN\njo0bO4ZxY43LDA4c4MvAAcb3N9dmwzlFA239TbVabZOCGRcmF41kS932Gs3NPbdGXdOmaZrm9F1r\nU1iNae5dMZc2GEUtWYeTGDhqNhYocbvuSWaqZvDJlkyCHU/gHTTP2EgpVdhaajhbXZ/vY+nSGyfN\nn5Qbfj9NJYVqR/p0NxE6rUnhl8+ymbE1FHsAXRHxh8vxnexi3i7iOjUVGkvsbZo3QGvXrjWnFcrK\nylpceqFSqbjjjjt46aWXGDCgPtHAQlkfMjNkXtXUqDmdlk5/Xx+zng36JALQhxAKCgspLS0jMflE\nm0IKSUn6NqysrOjt5UVJSSmJySe4arTppRiNOZebJ88B9vXuTVZWNplnsygrK8PR0bzEIa1WS3KK\nfq1hH+/e5J8vkItkN1z/2BqJdZ/Hw72X3P9+Pj5tfnnaSuMlGY1pbAwNWNQZfE2dLKScPEVScoqc\nPGMOGZln5Xnpvn28yc4+x+m0dKqqq80Oc6nVtXL6fR/v3uScy60r35XepsIHiQaZ9PCguKSEkpJS\nkpJP0Lu3Z5vbsLS0pLeXJ0XFxSQln2g187Qhefn5nK8rYOHp6U6uGQOWSwltXejZQmmBp4c7Wdk5\nJCWnMHTIILPbOJGSKmde9/H2Ijcvj5SUVCZPvM7saFh5eYW81rBvX2+ysnMoKSklJ+ccHq1kjRqQ\nJInEJH3RBW8vT06lpVNTU0NS0gmuGj3S7M9jkAsHBwd69ezBqdNpJCWntJp52pD0MxkXHI42f2io\nTmNP/CBG9a6zo44jefJRH/a9+jorD9oR2q/xS+mAr78VZ3Lq47nKnmO4eVApSTl1iqviNHHacGYG\nOUJ1Gn/9foBzRgMeHRrLXvh76ecqa5K+5xP1f/hwqpl1QatzSK/tg585pSVawFSqv0qlYsGCBSQn\nJ/Pjjz8aGUTQzxMY7nd1dZEzsCIiY8x+dkFhEWnpGQAMCwuWEyJi4xKNqkm0hE6nI7IuSzPQfyjD\nh4UA+gzEtuzUYMgydHVxZsxVI+UMREMGozmknDwlV+QIDx8mj9AjoqLNXlJRXlEhL+Jvy4vSkQQE\nBLBmzRpiY2OZM2dOE6WusqgfIIWF6PuceTar2VJopjDIjX53kHAsLfWl7tpSYDwhMUnOPB41aoSc\npNMWmSwqLpYzj4eFBcsJNrHxiUYVcVpCkiQ58zjAfyjDh+sHB+cLCtu0HZFBJl2cnVtdnnQpotMa\nwqdKOdnsRMrJZsscmsKwvGrgAF9G1mVoVlZVye+IOUTHxumLN1hbc9WocLnIR0SU+XKRnXOO3Dy9\nPA8bFkJQXZJOZHSs2duj1dbWyln6IcEBDAvTvytnMjKNKiW1hkGevbw8mlRNao3WjWLZXh4LC2Xi\n7Gc4PnoifWXf0hr/Rz/k7trveOtkIEOaOApODL+pJ8cP5yDbOWt/nvzkJmI+/p4DqfH8/uEvOL34\nMbf0UqI+tZKnZ97I4w2XUFgP5dElI9nxzpesWfYWT7xexMtblzDC3NrGRcfY43ALV7XjbjKgN4b3\n3HMPJ06c4Mcff6R//+aXRjQcpel0OlmBp546bZYiq62tZdNm/WS/g4MDA/r7EhIUgEKhL5+2bfvO\nVg2JJEns3vMPxcX6NO3Q0CA86zIQAbZu+9OsVPbUU6flBbShocH6rNhAffLBocPHyDLDuJaUlvLn\nzt2AviqFs5MToXUGIisrhyPHIlptQ6vVsmnzdnQ6HVZWVgwd0vJaro6mNWNooOGcoo9PX7mC0abN\n280KdSUkJpN8Qp9dGRYShLW1Nf5+es/6n/0HZWXUEgWFRezeq8+eHTJ4IPZ2drJMnjqdJv++LVFb\nW8vGOpnUZx73JyjQH6VSSU1NDVu3/2nW4Gb33n/kqENoSBAe7r3k+ptbt//VbD3gxpw6nSYvYwgN\nCbxstg9riMFYKJUWDBk8CBtrayRJYtMW49Jqpjh6PFKuihQWol/qZfD4d+7aIy/PaInsnHMcPKRP\nCgoK9NdXlaqTi9i4BHntZEtUVVezpa6oQ4/ubvT28pQHS6WlZez8e69Zumr7n7vkxJiQoEB8ffrJ\ny682btlu1qArLiFR7rNhANoWWjeKVt6Mv3EY/UY9yUuTexrf4DyWd3cdYd9302laE16J963/xWvV\nSlLU9cfcrn+XDR9cD6fy8X1sFSseG4o1YOW/kNiMnxhj1/CLs6L//SvY9OaNDJ/8LF+v/oh5/c2t\niqMj449fsHrsHgb/y3Jzhh9TpVJx7733cuLECX744Qd8fVsONzVMIa5RqxkyeCD9+upH5pu37uDQ\n4WMm19AUFhWxfNU6eW81Q1UKBwcHrhkzGoD4xCQ2bNwih9QaU11Tw/Y/d8nGJiw0mJ49eqBQKOT2\nCgqL+GX5apNKVafTERUdy7rfNgL6hfqhdd7qqJHDcXLqhlarZcXq9SQlp5gU/MyzWSz7dRUVFRVY\nWVly3Tj9AnCffn0YPGggAH/v3seefftNesClpWWsXvs7aelnAJhw3bVYWVnK36HFRUy0MdcYGrCs\nK2ygVteiUCiYPPE6QL9IefnKNUY1bBui1Wo5ejxSNkS9e3vKZaquvmoU9vb2qNW1LF+5ltRTp5v9\n/iVJIi39DL8sX0V1XajVsF508KAB+PTTe1hbt//FwcNHTcpkUXExK1avIytLvz5y0gR9eM7B3p6x\nV+tlMjHpBL9t2NxsQXHQzw3t+HOXvHQgJDhQLpJvqIlbVFTML8tXGe0M0RCdTkdMbDzrftuIJEn0\n7NFdHlwZlhtdTFn4NxjkRqvTYmlpyYS6jNb0M5msWvOb0W4SDamtrWXfPwfZWVdzeOCA/vj69gNg\n/LhrsLa2oqKykmW/riYj86xJuUg+cZIVq9ah0Wjo1s2RUXXrZkOCAuRBym8bNhMZFWPSSOfln+fX\n5avlQhoTJ4xHoVDQvbsbI+qmB45HRLF1+19UVzefCVpZWcUfm7YSF58I6GW7WzdHlEolkybo35Vz\n53L170qD3YAaotFoOHz0OJs2bwf00wOGAgi1bdARHVwQvJx9j45j2S07+W6icyvXVpPwy7fkTXmM\n8T3aQaArj/Pi9Z9wzdZfmPovPcWysjKeeeYZXnrpJXx8zJ8LrKio5JPP9etFbpp+AwF+Q6muqWHl\nqnXk1FWlsbW1ISjQX64RWVOjJiEpmbS0M3I7kydeJy9GBb0w//nX33JoQ6lUMmTwQAYO6I9KZYFW\nqyMt/QwJicmyghs6ZBAzpk81mtBPTDrBH5u2yi+Md28vAvyHYmdniyRBbl4eMbEJco1JV1cX5s+b\nY1SVv6CgkOWr1snXODs5ERwcgJurKwqFvuxSbFwC5+oUnKWlitkzb5YVMehf8DXrNshzGtbWVgT4\n+9HHuzdKpQK1upbkEyc5mXpKvmfsNVdx9VWjjHYZGHft1Vw1yrxapBfK+++/T9++fZvMGbaGoVRV\n9+5uPFBXEDwmNp4t2/6Ur+nXtw/+fkOwsbGu2w4sl9i4eNlr6tmjB7ffNttoMXVubh4rVq+TR9du\nri4EBwXi4qKvW1pSUkpMXIK8i4q1tTXz5tyCl1f93F9NTQ0rV6+XB2C2NnUy6VUvk4nJJ4x2UZk4\nYbys8KBpGT9TMpmYlCwnhQweNIBbZtxoJJPJJ1LYsHGr7EH17u1JoL+fLJN5efnExMXL4UUXF2fm\nz5tDt26OVFVX8/mX31JbW8v1469lZPgws3+fzmLt+g2cTD1Nv77e3D5Pn8Rz8PBR9uytXw87oL8v\nQ4cMwsrKEp1Ov/VSXHyiPH/ex7s3c2ffYlRR6kxGJmvWbZAHmL169iAoKIBujg5Ikn7QHRMTT1Fd\nZq69vT23z5st16EF/Tzj8lVr5QGbg4M9QYH+uPfqhUKhX0YTn5Akh7sVCgXTb5xCgF99boBOp2PT\nlu1ygXeVSoXf0MH4+vTDwkKJRqOvn5qUnCL/5iHBgdwweYLR+xUVHcu2HTvlv3369cFvaP27kpV9\njri4BHnP2l69enL7vNnY2tiQm5fP9z/+AsBtt85qteJSx++SUX6UNxYsJ+zLj5jWswVjp87keIo9\noQGu5iXRtEgZB197hK3Xfsab49o5dtpG1v32ByknT+Hq6sLdd8zD1tYWtbqW7Tt2El9XNcIU9vb2\nTJ44Xi6N1hB9vcEI9h881GJIQaVSMWJ4WLM7I4B+QnrL1j8pMVGqzsCA/r5Mnza52S2oSkvL2Lh5\nW6tzQW5urtx04w1ySamGaLVa/ty5m5jY+BbnH2xtbbhu3Fh5x5EjxyLY9fdeAB5/5IE27bB9McnJ\nOcePy1YAxns/nkw9xbYdu+RBhSmGDhnE1CmTsLZuGvYoLCpi46Ztre7Y3rNnD2ZMn2qk+AzU1tay\nbccuEhKTWgxz2dvbMfH65kuQSZLE0eOR/LP/UItZyRYWFowYHsq4sVc3m/F6JiOTLVv/bDX019/X\nh+nTpmBnZ4tOp2PLtj+Ji0/E0tKSxx+53+R2aZcSJ1NPs3b9BgDmzJohV56Ki09g19/7WtyYXKFQ\nEBwUwKQJ45vN3M7NzeOPzdtanYvz7u3FTdNvaHb9dFV1NZu37DAakDZHt26OTJ0yCV+fpgZHkiT2\n/XOQI8ciWqwuY2VlyVWjRzJ65IhmddWJlFR2/LXLaKeU5vAbOpipUyZiZWVFbW0tq9f+TkbmWVxd\nXXjo/gWtDmYvztZRNXmklbvi49bmAjoXhqaYjAIr+vQyf2PVjiLzbBa/LF8N6Efx48ddQ39fHyws\nLORdEGLjEozCCl6e+nj8wAG+rWaPGXZBiIyOpbDhhq7duhEaHEhAgx0rTFG/C4JhnzLDhq5W+PsN\nJTQkEFeX1gcXefn5cskujbZ+k+H+vj7yXn6tCaRhF4SYmHgqKuuFv1fPnoSFBjN40ABUKhXl5RUc\nj4iSF0gPHjSAWbfc1GofO5PlK9fK3vCokcMZFhYi15M17B94tm5+CAx7Wfob7aLSEjnncomKjuXE\niZPy2lilQsnAgf2b7KJiirKyMqJj44mNTaCq2ngBdlhokNEuKqZQq2vr9gKMabLJcEjdnnnmyGRa\n+hkio2JJP5OBQSYtLa0I8B8i76IiSZJ+8f2ho3JYffSo8CbLLi5VJEni2x+Wcf58gb7s4phRhAQH\n4mBvj0ajqdtrM5ZzubnyPXZ2doQEBRAUGNDqNkiSJHE2K5vIqBhST6UhSXXZrhYWDB0ymNCQIHr1\nbD0RpaiomKiYOOITklA3WKrRt483YSHB+Pj0bXVJT3V1tVztpqTBgMfFxYWwkCD8/Ya0WuFJp9PJ\n+782zGOwsbGR35Vu3RzR6fRVxPbtPyTXip4+bYpZFXou8/0ULw8SEpPZuHmbPAK3sLDAxsbmghZv\nC/QvRsNEDC8vD+bNmWlyc9JLhaqqKlav22C0Y7itrU2HFZG40pEkUKtrjNbojRgWyvXXXXtJFnIw\nRVlZGStXr5eXloB+w1xD9rqgbUiSfkqgoVc64bprCR9hXjhdGMWLRPqZDA4eOlo36hW0B3pPyo+x\n11x1yRtEA2q1mn8OHCIuLrHF0Jigbbi792LEsBACA/w7uysXRGVVFfv+OUhCYtIFLbwXNE8f796M\nDB/epvrMwiheZIpLSsjNzTN7PVdncOJEMt7e3tjZmV+Y+WKiUCqwtbGhj3fvNhUJvpTQarXyYnxJ\nd+m9gvn5+fz99y59QXStjunTp9PNjI21LzaWliq6u7nRvZl50suR2tpafV3g6upLUi5A74Xl5p6j\nT5+WE1Y6E0tLS3r16tHmmr4gjKKgGW644QbGjRvH888/39ldEXQS+/fv55pr6neuT0pKYsiQlncs\nF3QN3nnnHfbs2cO2bds6uysdgghaC4w4fPgw27dv5/333zcqhCzoWjSekzNnIbngyqe8vJz333+f\n7du3c/To0c7uTocgjKLAiMWLFwNw/vx5vvjii87tjKDTaJz8I4yiAODzzz/n/Hn9mleDrrjSEEZR\nIHP48GF27Ngh//3BBx9QaaJajuDKprFRNLd2peDKxeAlGti2bRvHjh3rxB51DMIoCmQWLVpk9Hde\nXh5fffVVJ/VG0JmI8KmgMZ999hkFBcaFAF577bVO6k3HIYyiAIBDhw7x559/Njn+3nvvCW+xCyLC\np4KGlJWV8cEHHzQ5vmXLFo4fP94JPeo4hFEUAE29RAO5ubl8/fXXF7k3gs5GGEVBQ5rzEg1cad6i\nMIoCDh48yF9//WXy/LvvvmvW9lKCKwdhFAUGysrK+PDDD02e37x5MxERrW/7drkgjKKg1Swy4S12\nPYRRFBhoyUs0cCV5i8IodnEOHDjQopdoQHiLXQthFAVgei6xMZs2bSIyMvIi9KjjEUax09BQcCob\n0yksOkoysinv4Ex4c9canTt3jqVLl3ZsZwSXDMIoXgJoCjiV3VKSW8friE8//dRo952WuFK8RWEU\nOwU1qT8/zaK9ZUZ7R1ZnHONIlqEYsBKyl/N/r+0gr4OE/sCBA+zcubP1C+t45513hLfYRRBGsZNR\np/Lz04vYW2akIcg4dgRZRXSwjigtLW1xLrExGzduJCoqqv07cpERRrETUMf/j/lrR7JwwWCsAW3e\nPpa+9TIPXDea+7cUYJBvp1FP8ajFYu5bmUNH2MW3334bKysrrKyssLS0bFJcW6FQYGFhgVKpRKlU\nkpuby7ffftsBPRFcagij2Jmoif/ffNaOXMiCwdaAlrx9S3nr5Qe4bvT9bCmo1wYdqSO+/vprysvL\njfRD4/04G+oHhULBG2+80c696AQkwUWmSFo3rps0d1d5o+Ol0q7be0ohS7MlbcPDucukkR4PS4cq\nLk7v/Pz8JPQ7ukrPPPPMxXmo4JLj3LlzshwA0h9//NHZXeo6FK2TxnWbKzVVEbuk23uGSEuztcbH\nL6KOePTRR2WZCA0N7fgHdgLCU7wIaGtqkMfZJftZeiSc+cPM3Jap5wTutF/D+tTm9ljTURzxI5/v\nOkez4/iSCL7/chfnxCBf0EaEp3gx0VJTU//9luxfypHw+ZirIv6NjiiJ+J4vTemPVpCu0A2WhFHs\nSNRpLH9sPi98sJDZM5YQVQkUx3CgOoj+Zm9V6MgAv3L2JJQ1OVOd+Cl3LMzn2jHuNLt3u1MIk1yX\ncdcrBym58E8h6IIIo3hxUKct57H5L/DBwtnMWBKFXkUcoDqoP+bvZnrhOsIpZBKuy+7ilYPmaYjG\n4dMrEWEUO5DCnS/y/KEQbvZOJiqnErUEmqpSqpTWqNrwzVsq1ZRUNlJK6hN88dBqrv30aQJtTN1p\ngffcd1gQ/TCLD4tSbQLzEUbxYlDIzhef51DIzXgnR5FTqUZCQ1VpFUprVZuU8wXrCAtv5r6zgOiH\nF9NWFSE8RUEbKSfhjwNYjLqKkDu3kH50CSPtQeXSj97aTM5Xm9tOLflZKnw9rI2OFv/9Eh9YP8Vd\nA61avl3pzowXw1nz9AqyxEYHAjMRRvEiUJ7AHwcsGHVVCHduSefokpHYo8KlX2+0mecxW0X8Sx2h\ndJ/Bi+FreHpFVqvJOg09RWEUBW1Dk8uxQ8X0HTMA24bH3cZxh/MuDubXi5+uJJplb73OlwfyOLXy\nDV7/YgeZmrqTNansTQ/m1jCnBo0U8vcHfzHo4Un0MuMXtA+5jTHx77EuU1hFgXkIo9jxaHKPcai4\nL2MGGGkI3MbdgfOug9SrCB0l0ct46/UvOZB3ipVvvM4XOzIxqIh/ryPsCbltDPHvrUOoCGEUO4Sy\niG94+enn+DROS+WOt3nx3S31Rk41kPsWebLs1xRZqJVOIdy18D3WpUuU7v6SxY9NxlulP1cZ9RM7\nw1/iZvcGD6iI47dDztwQ2vAlaAHHQG4acpJVh4vb5wMKrniEUexIyoj45mWefu5T4rSV7Hj7Rd7d\nUm/kVAPvY5HnMn5NkTUETiF3sfC9daRLpez+cjGPTfamTkW0i45wDLyJISdX0ZqKEJ6i4IJwHPYg\nrzwUjLXzOF7++GPe+b9pspEDFf0fXMqsPYtYfkbTUjNQk8jSJed4/rPpuDQ8XpLEkYoBDHZtNr2m\nGRwYEGZLckQWNW3+NIKuiNhkuCNxZNiDr/BQsDXO417m44/f4f+m1Rs5VP15cOks9ixaTmsqot10\nhMMAwmyTichqWUOIRBvBBVOevJ9cz2sY7NDMSbthLF77DJUbD1NssgUNOft24fzGD9znozI6oy7K\noMiqJ86WjW6pjuO9GwKZ8GZUo/JxKpw87Cg9nUdzSdsCQWPEJsMdTTnJ+3PxvGYwzauIxax9ppKN\nLbpubdcR1XHvcUPgBN6MapRVo3LCw66U03nma4gr1VNUtX6JoO1UcurQaawDn8fT1By38ygeeaKl\nNlR4THyCe5o5I+l0SAoVysaDNm0pZ5LiSfIsabLuSJK06HRXphALOgYLCwvZGAqj2M5UnuLQaWsC\nn/fEtIp4hBZVxAXoCG3pGZLik/AsaaIhkLQ6WlMRXSF8KoxiR6ArJuFQHr3vHNTsKPDfYunkiWNN\nFKW1jU7Yj+HzdInPm9yhoTSnGgcvFxo7lwKBKYRR7Dh0xQkcyuvNnYM6QkOY1hH2Yz4nXWqqIdCU\nklPtgJeL0BAifNoRVJ7kQKYL4UHdm19U/y9ROg0mxOokqSXmzvNUk5FQhe9wb0wuaRQIGtFwXlEY\nxfal8uQBMl3CCereERriAnREdQYJVb4M925ZQ3QFT1EYxQ5Amx9DRMVgxg82vyZFm3AK5pbg8/wV\nV2re9ZUp7EryZs5ot47pj+CKRBjFjkJLfkwEFYPH01Eqoq06ojJlF0nec2hNRQijKLggypL2kT/4\nFsJdO+oJ7kx5Jpz49TGUm3F1deIqtrg/wbz+HTMqFVyZCKPYUZSRtC+fwbeE02Eqok06oprEVVtw\nf2IeQkUIo9h+6PLZ8dxkZrxzhPi96QQ8OB3vDhSwXjPe4ta4j9l8rrXwSDF/v7eNsR/czwAh8II2\nIIxi+6LL38Fzk2fwzpF49qYH8OB07w6ZXjFgto4o/pv3to3lg/sHtNof4SkKzEdXRlpcPnbSbpbl\nLeCD2/t0qMBjF87CD3356tnVZJnUVzoKd7zAEpu3+XCyi6mLBIJmabgs40pVgBcTXVkacfl2SLuX\nkbfgA27v08GjVHN0hK6QHS8swebtD2mrirhSZUJkn7YXKl8e3rKTCfF5OD07hB6tlCRtD5zHvs2y\n8++wfHcu/5nQq6kRLjnGz/tG8c1XM3EXXqLgX3ClKsCLicr3YbbsnEB8nhPPDulhcilGe9Kajig5\n9jP7Rn3DVzNN7LTTiK6weF8YxfZE5cqAkI6bJWiKFX1nvsL/mTrtNJJnloy8iP0RXEl0hVDZxUbl\nOoCLqiJa0RFOI5+hLSqiK8iECJ8KBIJm6QpegUDQGGEUBQJBq1ypXoGgbQhPUSAQdFm6ggIUXDhX\nqkwIoygQCJpFGEVBY7pCSF0YRYFA0CxdQQEK2kZXGCgJoygQCFrlSlWAggvnSpUJYRQFAkGzdAWv\nQNA2ukL0QBhFgUDQLF1BAQounCt1oCSMokAgaJUrVQEK2kZXiB4IoygQCJqlKyhAQdvoCtEDYRQF\nJhGKsO1oCk6RXWn6vK4kg+xyczeH7lyEURQ0pqFM6HSXhxy3FWEUBUaInREuHHXqzzy9aC9ldZWV\ndWWn+GfTH+xOyENdd42SbJb/32vsyLv0FUpX8AoEbaOhfhBGUdAl6ApC3yGo4/nf/LWMXLiAwdag\nSf+F++78nsIBwTgeXsjM/2wnTwc4jeKpRy1YfN9Kci6jr1cMkARgvMfmlaofhFEUGCGM4oVRvOkJ\nPuj3LDM9lUAFxz9YRPzUR5g+tB/D73iS0K3/5dsTen/RKuBhHs3/L68fbSHOegkgwqeCxnSFjaeF\nURQYIYzihVDC/qVHCJ8/DHsAXQFRe/Jx7eWgf8Gse9LPKpWdieV11/dkwp32rFmfKodVG6Irjvj/\n9s48PqrqbPzfmclkDwlhzb6SkJ2w7/uigiKbgIpotdWqrdW+v7cVrbu2xbq29a2tWkVBFtlBUUB2\nCEkge0JCNggkISFkXyaZmfv7Y2ZuZkhCJpgQkPP9fPgwmbn33DN3nnue8zznOc/Df/+xn9JuGHOq\nT33KR/tLuZ6mhPtUcDXm44NQioLbgttB6LufKlKONREd5GT4U9/AlSawUZqUihIbmqis18pnuASH\nU3cwg9qrm2rK5MMHV1E+ZUK3FIZ2HTYb9zUP8afj1T+pHWEpCkBYioLbEGEpXgfaRmoaldjZGO+d\n0pF+9hLNWqMi0WvRSA64O5vV9FYraa5uuMqCayb7n4+zYcqH/C7Kvnv6pvLhvr8+TPITrxDXRW+t\ncJ8KrkasKQpuO4RSvA5s+uLvraPocpPhb+VAhs8cxOWiaoPSay4mRxPGXZF95FNayi9iE+iBnXk7\nVT/ywjt2PPPQEGy7sXvKwfN5fvRGfrfuIl35RYVSFFyNsBQFtx23w0yw++nH1Afd2H+83Kh07Bnx\n3F8Z88Of+ff3x9j5jw84c+/b/CLYZClqyD1USMzS4biatXLlx3fYG/IEswd192PpxLDlE0h/+xuK\nuvCTijVFwdXcDkrRpvNDBLcTwlLsGE3BVv78Xhy2Qd44qlxRFV8k6vd/YGo/G4Y8+jKe935FzrMv\nMNQGVD6L+deWKWSdPkPtvR+yLbhv68PWkMTn+0bzwpuDzVqvJ23LCdzu/JeFouwuXKLuYejZfxJX\n9Qx+7l0/X1iKArg9lKKwFAUWiECb9mnO+w/3zfkAx8deZtUzv2GF92aef/8wV4yPkE3Qr/h40UFe\nXnsOOZzGdgBhYycx2lwhoiHz4zcp/cPfubuv+RWqyTpZT3CoO90QX9MW52CGO5zh1EWN1acI96ng\nam6HSbNQigILbgeh7zK6Ijb89n84OvJ5fhXtCOhoKL+EMvwehruZDnJkxCubeLZhB3FVHTelLTnM\nfrfX+ezRAEs3TXMl5yttGeimtjyhKY2374xi5htJdB4n00Ta23cSNfMNkq4+2MYVD8ca8sva2wTS\nPsJ9Kria28FSFO5TgQVCKbZFd34H73+rYPaeMbgBUM3pndkMvms8HhZmnRtjf/2ba7Zl4zGL3zzS\nzgeSHr2kMNvGYbp4Deey0snyrLZir6GOmnNZpGd5Ut3mYAlJp0d/nQafsBQFYKkUJUlCkqSf3eRJ\nKEWBBUIptqWpNItSmxCeijJGj9Zl8u1pRyY+F2QZPfpTULvi6aIhqabF8n2nCfyjUOIfVjXixIR/\nFCK1d7C2hpImZ7z6qtv5sH2E+1RwNeZKEQzWoo3Nz0uN/Ly+jeAnI9YU22LvFUNg30yc1EpAS9H2\nd9hYGcl/Ipy77yJKV0KH2fJ+bjV63K65rtFcsJFVLx4h5rW/sSLISrXcdJ6MxkAe92l//2NaWhpa\nrdZCEWo0reuPZWVlpKamWnzet29fvL29rbu+4GeBUIqC247e25Khpa5Kg4ObEypAp9WhsumRkJMu\no/K9n3++HMdf336P2lA1RT/EUz90FSOuI4qzY1yJWRDD5Y1p1PzOz+imbZ/mCwfZuu4TfhjyGEte\nicGabf4NOfvJ8lnCuH7tf/7OO+/wxRdfdHj+mjVrWLNmjcV769atY/ny5VZc/XrQodWpuElEQGCk\nPaX4c0ME2ggs6BWlqMnnq0di8ennjJ17MCMnT2L6E5sp0nZ+6o3BieinPmXt6md57NHlxNTW43fX\nxKvWE386g+94ltHpm0mpu/ZxzpM+Iq/mIL/wtvbxbSJz/W4G/2YZQR30+cUXX2wz4F2LsLAwli5d\navXxXUFfFc/qmX1Rhz/O5vPWBwYJeh5zTxIIpSi4DegV96nCiWH/e5Dy5loKfvyYF36/mq//eR8+\nN4MfoymLz577DR+lGbLV6Ir38N9TPty/ONQqC61LDJrPW0vTeH9XaaeZZ6rTE1FE+lvXh6ofefu7\nybzzWHCH2z2Cg4N54IEHrO7qSy+91GaA7Baas3hv4WOcmLmR4y/Z8f69T7C74hZZ29YWseX1v3Gs\nyvi3roHqhtZnqCn1X7zy5RmaeqVz3cPtYCkiCQRmLFiwQAIkQLrzzjt7uzu9jq74K2mWe5D06FdH\npMNbP5SevvsO6Yk12VJTT12w8pD03OTl0roL2o6PqU+TPv9gp3S+xYr2dBXSnsfHSw9tLpGu0aIk\nSZKUk5MjqVQq+ffv6F94eLik0+m68q26hE5r1rZOJ/XclbqTJun0C+Ok+3dVSJIkSZUHn5cWP/E3\n6YtPX5VW/uJf0pkmSZKkOunYb0ZIjx2o7dWe/hQ2b95sIQsVFRU37uJ18dJfH7lbGh8dJkVMflba\nV9Ek5X56vzQyarQ0c8nz0g9l3SMpQikKLFi0aJEs8HPmzOnx69XF/1V65O7xUnRYhDT52X1SRVOu\n9On9I6Wo0TOlJc//IHWTnP8kdI2Xpbzk49KJlAKpUtPz19MUbpZe++teqbQzLWYFVXHvSqv+my7V\nW3n8gw8+2KlSXL9+/U/vWDvcCrLQIRXrpDEDnpNSWiRJ0l2Q/hnpK/0pUyNJUpX07UI/6f79NYbj\nCldLfkHvSed7s68/gW3btlnIQllZ2Y3tgK5SOvJ8tKRWx0p/OlYg7V81V1r+fxlWy7c1CKUosGDJ\nkiWywM+aNeuGXFNXeUR6PlotqWP/JB0r2C+tmrtc+r+M7hRzgbVkZ2df01qMiIjoWSvxFpWFqh0z\nJPv7DkmNkiRJtfulRTZ+0v+VSpIkNUpxjzlJPh9cMByozZT+x36o9NkN1iXdxc6dOy3koaSk5MZ3\nQlMgfbl4oIRygDTij4elym4WR7GmKLCgNzJWKN0m8sr2z5h/8XUmjf5f9H/4iF+FO96QawssCQkJ\nYdmyZR1+3mNriUZudlnQVhWScvww8TkVhgLRzVVcqtPRmJ9EU4AXagCphRZT2TAA9DQ3GAOGVG4M\ntsnhXBfLeN0s3BRrirbeTJo/jUDHclJ3f0t6TfeuOQulKLCgtzbv23pPYv60QBzLU9n9bTrdLOeC\nLvCnP/2pXcUXERHBkiVLevz6N6Ms6Cvj+ejxhTzwh//wY2ENZQlrePPVj3jnVw/yab4WdZ9+UFFl\nyHvrFML4AC2lVTpAw5XLfRgZadq/00xNnS0O3Vkb7AbS+9GneioOvMRTe2awI2kdi8r/wvxHviC/\nG4OUhVIUWNArSlFfwYGXnmLPjB0krVtE+V/m88gX+Yhg/N4hNDS0XWvx5Zdf7vmUXjehLNSeepfF\nM1+kZOFHrP34TZ69fx7zHniW56Yk8d4PAxkfaIfb+PtwP3yKagClLyv+PIPDH3zFge8+4b91j/Gn\nqcbaJ9WnOex6P1O7dY/rjeNqS/FGTpz15ft4cck0Rs/9OzUjJhIcfAcrZ3twZdvjzLjjQd482cle\nJmvpXm+s4FbnoYcektcLJk6c2MNX00lle1+QFk8OlBycJ0nvZjZJ0pXvpIc8kFD7S9MeeEOKu3UD\n9W5psrKyJKVSKctCZGSkpNfre/CKN6csaC9tlx70HCAt+Ppi2+jdso3Skvu/kcokSZK0BdLfx42T\n/lnYelTjxVTp+MlsqdLsxIpNc6WIVSk9F73cw/z4448Wa4q5ubm93aVuRyhFgQUPP/ywLPDjxo3r\n7e4IepHly5fLsrBx48be7k4vUC+d/L2/pAp9VUptT4s15ko/JpTJyrLpzEfSyl99IeV1sFVGe2mn\n9Jv735PSb1WNKEnSoUOHLJRidnZ2b3ep2xHuU4EFvZfmTXCzYVpbjIqKYvHixb3dnRtPQyprvy4k\n6MFFhLaXYtY+iGkjB8gJEexCf83Hr4xA1UHJSq0+iD98+jsiui2L/I3npgi06WFuhpwhgpsIUSVD\nYCIsLIwlS5awePHin115IKtoLCKlvA8xI71pGxejo670CjYDB2BvZlrYeUTg10FzdoPD8OqZnt4w\nhFIUdDs1NbVkZmVTX19/Uyodta0do0aPQalQ4O3tzd59B3q7S+2itlXj6TGYIcFBt+SArdfrOZub\nR0npJVqaWzo/oZe4Z/5C+ri637RyoFQqcXJyIiJ8KC4u3Vi1BMDGmf7O9jjZteNQa8hg46ZaFv5m\ngFmqPR1lJzawMcWNu39xF37XEWGq0+nIys6hvLwCbcvNJxeVlZU8+fRvUSiUKBUKsnPyKCkt7+1u\ntUGpUuLi4kJURBgODg5dOlchSaJQ2o2gvqGB/fsPkZF1RtSm60bc3fsybcokQkOCe7srVpOWnsGR\no3FUVVf3dld+NiiVSqKjIpg5fQq2tt2030FfxY9PjeLXDp+T+O4EXOT3Kzm55msqZ/2KO7ws7QpN\nxisETq5hd/G7DOuCm1SSJOITThMXn0B9/S26ifEmRK1WM2J4DFMmTbA64b1QijeAxsZG1n79DWXl\nhhmVg709gwcPQqUSS7rXgwTU1zdQWnoJMBTDXbzwHoYEB/Vux6wgIfE0e/cflP/2GDwIRydHbj1b\n9+ZAp9NTUlJKk7H2Y0CAH/cturdLFT+uhb46gQ+eWsVxvwd4bF4YtiXpJOVpCFnwMPOCLZMK6HU6\nqnbeScT6P5CzfkarEu0ESZLY9+MhEhJPy+95e3liZ28n5OI60Wp1XCwuocVobcdER3LXHbOs8ioJ\npdjDSJLEl+s2cOFCMQqFghnTpxAbE4VabaiA3tjURFp6JsXFJfI59vb2REeG4+Ex2KofUafTcTY3\nj5yzebJLVqlUEjIkmCHBgVYNEJIkUVxSSlp6Bk1NrZECXp4eREWGY29vXU2I6poaklPSqKyskt/r\n06cPw2Iice/b16o2NJpmMjKzOF90QX7P1taWyIgwfLy95HtyqayMHbv2UF5+GZVKxaOPrKB/v5t3\nA9iZ7LNs2bYTAF8fb+bMnsGA/oYCh5IkUXThIukZWTQ3t+7K8/XxJiI8DDs766yfK5WVJKekU1NT\nI7/Xt68bw2KicO3Tx6o2mowyedFCJu2IiozAs0symU/O2dxWmVQoCQkJYkhwkNUyWVJSSmp6Jk1N\nrbUlPI0y6WCUyebmFuLiEzh6LA6AkSNimT1zmlXf1Tq0VBWmkHSmBreIUUT7OFtUG9FXHeXNX3+C\nNDma82//nvQ3LxO3vIPCle1wPC6eg4eOAjA0dAgzpk3B1dXwW0mSREHBOTLPZKPVGmqpKRQKAgP8\nGBoaIo8jnVFWfpnU1HTq6uvl9wYM6E9MVCTOzk5WtVFf30BqWjqXylrdpU6OjkRHRzJo4ACr2tBq\ntZzJPktefoHsMVOpVIQNDSEwwN+qbEmSJHG+6AIZmWcsnhU/Xx8iwofKnoLGpiYOHDxCckoaAHNm\nTWfE8GGdtv/zUIq6eqqb7HF1uvkqkp4vusBX6zYCcPe8O4iKCAegurqGw0ePk5mV3eFi9aCBAxg7\nZhQR4UPb/Vyr1XLsxEmSU9I6dLk4OTkROyyK8WNHd1ghOz0ji7j4RMrK2l8bsLGxITwslEkTx3U4\nsBaXlHL02Aly8wra/RwgwN+XCePH4uvTfrX2uvp6jhw9QXpGljzDu5p+/dwZPXI4w2KiUCgU1NXX\n89nnX1FXV98Dg2FbTI9LV9cxJUniv1+spfRSGV6eHty/bDFqtRpJkkhJTSc+4RSXK660e65arSYy\nIoxJE8fh7NT+AHa+6ALHjsdRUHi+wz4EBQUwafxYPD092v28pqZWlknTAHw1AwcMYMyYEbIcX41W\nq+V4XDxJyWnUmw3A5jg5OTIsJooJ48Z0KJMZmWeIO5lgMQCbo1KpCA8LZfLE8bIC2X/gMCfjE7Gx\nseGZp5+weiLx06jjwEMB/M+EZBIeqeW1IfNxO5zF7/ys8wK1tLTw94/+Q1NTE2FDQ5h/910olUr0\nej0JiUkknk6iurqm3XPt7eyIjopg4sRx2Nu176vNzcvneFw8Fy4Ut/u5QqEgNGQIkyaMZcCA/u0e\nU1FxhSPHTnAm+2yHcRBenh6MGzuakCHte2s0mmaOHj9BamoGjU3tF8/q08eFEbHDGDN6RLvKUZIk\nkpJTSUg8TcWVynbbsLU1PisTxuPk5IgkSWzeuoOcs3m4u/fl8cce7vTZ7QGlqKeh/BLNrh649bhM\naina+Te+yHOnf81RNiSG8fba5xlprd+iCyQmJhIUFERfK60dE1u37yLrTA6DBw/ikYfuR6FQUHqp\njA2btsiKTK22ISgwEDs7W9liu3y5Qm5jzOgRTJ862eLHbGxqYtPmbRbC7u/nKw8Q1dXVFJ4rkj/z\n8fZiyaL5FhafJEns//EQ8WZumwH9+8kWqkajITevQB4gnZycWLZkAYMGDbT4jmeyc9i+8ztZuTs5\nOhLg74fKRoVOp6fw3Hnq6gzZJhQKBXPvnE10VIRFGxUVV1i/cQvVRgtHpVIRFOiPg4MDkiRRVlZO\n6aUy+fiY6EjunDMTpVIpz7Tt7ez47dOPdzjQdge1tbVMnjyZl156iXvvvddq5XjxYjFffLUegBUP\nLMXH2wu9Xs933+8jJTVdPm7QoIEMGjgAhUJBY2MjefmF8n117dOHZfctpN9V1nBqWga7v/tBVtjO\nzs74+/miUinRaXUUFJ6jvsEgayqVivl338nQ0BCLNi6VlbF+41ZZkdnY2BAcFICdnZ1ssZWbyeSo\nkcOZOX2Kxfdvamrimy07LCx8fz8fXF0N2Vyqq2soPNeqtL29PVmycL5FIIQkSfx48Agn4xPl9/r3\n7ydbqBpNM3n5+bS0GGXS0ZGlSxYwePAgGpua+PAfH6PT6bjrjlkMi4my6rf5STRn8LznLJziiljl\n+B+Gj87lH9+vZMiQKAZZMf4lJafy3ff7UCqVPP3rx3B2dkar1bJtx7fknM2Vj/Py8qCfu7s8EczP\nL5R/7wH9+7F0yUL69LEc+OLiE/nxwGH5bzdXV3x8vFAqlbS0tJCfXyi7nW1tbVm88B78/Xwt2jhf\ndIFNm7ejMR5nZ2dLUGAAarUavV7PhYvFFl6hqZMnMH7cGIs26urqWL9pqzzpVigUBAT44eLsjCRJ\nVFZWUXThonx8cFAAC+bPs7CC9Xo9u7/7gbT0TPm9wYMGMrCDZ8XN1ZVl9y3E3b0vJaWX+O8XawF4\neMXyDieFJq5j9NDTcKmElv5euF5lmDVl/J1l837P9rLprM39lvs9lICGnA0fcir2tywP6eYNOvpi\ndq/eCv86zBMR9+AwOpI3jjzJttn1fPfxPrxWPkR0NwWkbd68mY8++oinn36a5557jn79rHOPlJdf\nBiB8aAgKhYIrlZV8veEbGhubsLOzZcqkCURFhmNnNtOTJImLF0s4fPQYheeKOBl/CrWNmsmTxgOG\n2bi5Qhw1IpZRo4bjZhx8TFRVVROfcIrE08kUXbjIN1t2sOy+hbLSOHTkmKwQA/x9mTRxPF6eHpYD\nnUZDWnomhw4fo76+nq83bGblQ8vp6+YGQH7BObZu340kSbi5ujJ1ykRCQ4Lb7HfMzSvgwKEjVFRc\nYde332Nrq5YH5traOtZt+Iba2jpsbGyYPHEcMdGRbaLGSkovcfRYHGdz80hJTUelUjFn1nSGBAdx\n8NBRmjQa6usb5IlBT5GcnMzChQsZNmyY1crRZPE4Ozvj7eWJJEn8sO+ArBCHBAcycfxYPDwGW5zX\n2NhISmo6h4+eoLqmhnUbvuHhFffLkZZnss+y69vvAYMVPW3KRIKDAtsUi87OyeXgoaNUVVezdftu\nli6xIzDAsHmgsqqKrzdspqGhEVtbNVMmTzS4zK+WyeISjhw9TkHheRIST6O2sWHqlImAQSbNFeLI\n4cMYNWq4LCcmqqqrSUhMIiHxNBcuFLNpy3buX7pYlskjR0/ICtHfz4dJE8fj7eVpcX81Jpk8coz6\nhga+3riZlSuW4963Lz7enhSeK7KYVPYotr7c9UAon+z4in9d3kmV8wC2bSri/71knUI2yYW/ny/O\nRiWxY9ceWSFGRoQxbuxo2c1uoq6untNJKRw7cZLyyxWs37SFhx5YKk96TyelyArRY/AgpkyeSIC/\nr8V9bGlpIetMDgcOHaG+voGN32xjxf33yTJ46VIZGzZtpaWlBUcHB6ZOmUhE+FALZSVJEoXnijh0\n5BjFxSUcPHwMtVrNqJHDAcNvtX7jVjmeYvzY0YwYPqxNpPDliivEnUwgNS2D3LwCtu34lkUL7kap\nVCJJEnt+2C8rxJAhQUwcP5bBgwdZtNHQ2EhKShqHj56gqrpaflYGDxqIo6MDDQ2NXK640qlS7HKk\nh750A/NDx/NSfP3VH7D3iwKm/m0vqWc3s8zD0HTVj//DUyfHcKdJITYXse/fH/HJ5//hw/e+4lTl\nT9iWoPTlVweOsSrCDurySbjsy2h/B7DxZPqUMl5+ehMl3bTrQalUUlNTw1tvvYW/vz9/+MMfKC/v\nPBTZNKM1CevOXXtobGzC3t6eFQ8sY+SIWAuFCIaZlLe3J8vuW0RkRBgAR4/HyUrw+IlWd8i8u+Yw\na+a0NgoRwM3NldmzpjP3ztmAYdZ34mSC/Pr4iXgAoiLDWbpkYZvBBwwumlEjYlnxwFLs7exoaGxk\n5+49xu/Wwrbtu5AkiQH9+7HyoeWEh4W2WS8yrG8GsXLFcjyND9yOXXtoaGwE4Lvv91FbW4dabcMD\nyxYzdsyodsOoPQYPYvHCexg5IhYwPPi5efkWg3dHbteewKQchw8fztatW68ZVdxitLYd7O1QKBTk\n5hVwOikFMCiQxQvnt1GIAA4ODowdM4oHli1GrbahtraO777fCxgU5o5d3wHg4TGYlQ8uI2RIcBvX\nk8nVuPKh5QwY0B9Jkti2fZd8r3bt/p6Ghkbs7exY8cAyRo2IbeOOUygUeHt5snTJQqIiDa7T43Hx\nshKMO5kov55752xmz5reRiGCYQY/a8ZU7p47B4ALF4o5duKk4fXFYo4eN6wLRoaHsey+RRZryCbs\n7OwYOSKWFQ8sw8HensbGJnbs2mP8zN7ifvc8Lkz6YB//fHghj721m7zE93ntpbsYZOXIavLC2Nsb\n7ndKajpnsnMAmDJpAvfMu7ONQgRwdnZi8qTx3LfIMCG7fLmC/UYlWFlVxfd7fwQgMMCfB++/j8AA\nvzb3Ua1WEx0VwcMr7qdPHxe0Wi1btu1Cr9cjSRJbd+ympaUFZ2dnVj60nGFmsRAmFAoFAf6+PLh8\nCcFBgQDs3X9QnpQcOHSUsvJyYzDcfKZOmdju1pn+/dyZd9ccpk+dDMDZ3DySklMByDmbJ68Ljh45\nnEUL7mmjEAEcHRwYN3Y09y8zTLJqamrZ88M+FAqFPMZaIxddVoqNhacp1V5g2/pUzNWivjyBk36P\n8MSiKUR5Ohkabkjg1acKeOKFybgB0EDCK0v5u93dPPLwL3lsag6/ffgzCn+C/CptbFBqi9nx0ruo\n39nO/4YbfBZ2kU/wO9WLPPtD1fU3bn4ds4Gmrq6O1atX4+/vz+9//3tKS0s7PM8kiJIkUXqpTA5e\nuGfeHQzswIdvfs25d86WH4rTySnodDqSUgzCMmpEbBs3ZHvEREfKC8xJyWno9Xp5QB44oD933TGr\n0wXuQQMHMG/uHYBhICsrLyczK5smjQalUsmSRffi5HjtEj/2dnYsXjQftVqNVqslLT2T6uoacvPy\nAZgzawZeXp7XbEOhUDBrxlS8vQ3HnUpKobe3KZorx23btrWrHE33V2/87FRSMmBwIc6aOa1TS9PL\ny5M7Zs8AIDevgOrqGlLTM9FqtajV6jau8fZwcnRkyaL5KJVKmjQaMrOyKSsvl11X8+bO6TRgQqlU\nctcdsxg4wHDc6aQUgzwZB7ARsTHEREdesw2AqMgIRhknN8kpaeh0Olkm+/fvx9y7ZncqkwMH9Ofu\neQaZLC4uofRSWS/JggpndxdsAZWzG45dGFVNE0iTIjp1OhmA0JBgxo8b3en5QUEBTJ1ssNYzMs/Q\n1NREUnIakiTh5OTUxg3ZHq6ufVi84B7AECiXX1BI4bkirhjX7RYtuLvdCY45NjY2LJg/V1Z4SSlp\naDTNsnU3aeK4DtcbzRk7ZiThYaGAQbYkSeK08Vnx9fFmxlUu+/bw8fZizqzpgEGh1tTUdimKt8tK\nsa5Ez4LnJlC1bT2pZlqxMjkR97FBmD+W1YfeZF34M8wxTXSqj/H2pxrmzvRCBTiG3kFM8jt8mfMT\ncuBrS9j3wceULPsPfx2Tx64MU6ecGfvrWRx8aTuXrr91mfYe0IaGBt59910CAwN55plnKC5uu5it\nUBp+Dr1xkRgMe+uCAgOsuq5KpZIto6wzOaRnZslrkaNGDbe6/6ONx9bV1ZGekcWZ7LOGNkYOtzp8\nfUhwIH37ugGQlJQqD4RDQ4fg5tbWUm0PZycn2fpNSkqR74mTk2OHAUVXo1AoGD1yBAD5+YXyOmRv\nk5yczIIFC2TlaI6sFPV6qqqryc8vBGD0yBFWr0uGhw3Fyckw8Tid3HrvIiPCOgzAuRo3V1eGhg4x\ntpFKUpKhjb5ublZvaVGpVIwaaZDJM9lnSc/IkteMR48aYVUb0Cq/9fUNpGdmkXXGYCGNGhFrtUwG\nBQbg7m5Y5zfdj1sJ0/fUanWUlF6S3aljRo20Wi5iY6OxsbFBq9WSkpZBqtElP3xYtNXBRoMHD8LP\n1weA00mpJCUbJijeXp54deJuNKFWqxkRGwMY1rlT09JpaWlBpVLJ71vDGKMMlV+uIPNMthw8NnqU\n9c9KZEQYjkZvU3JqmtXXhi6vKdaRl+XC1Ace5/iHq1if+hbjxjkB1WQm2DHmGXNLoYG09d/Tf+6/\nMRnL2tJTJNe6sczBqGBs+uJle55DOfUQfo0fT3OO3R+u5j9bj5FXo0UCFH0m8Netf8b25bHM+ryO\nAS7/5H+bPfl/xxO513iaXfBsItM/4ETVSu51a9tsRkYGtbW1tLS0oNVq5f/be52UlNRh9xobG/nw\nww/5+OOPefTRR/njH/+Ij49BwJQmS1Gvl9cJYqIiuxS9GBE+lB/2HUCn05GSYhB4fz/fdl2mHdHX\nzQ0/Xx/OnS8iOcVgLdrY2BA2NNTqNhQKBTFRERw8fIys7BwaGgzuz+iozi0Dc4ZFR5KUnMqVyip5\nIIyMCO/S3rIhwYE4OBhcZwUF5+T3X3/9dXJzz6LT6eR/er2+3dfX+uxa53SGSTnGxsbKa44qM6V4\nNtdgGTvY2zMkONDq76xSqYiKDCfuZCJZZ3KoqjJs/rfGMjMnJjqSzKxsSkpK5a0b0dERXZLJ8LBQ\nvt/7I1qtVnZt+fp4y5Mma3BzdSXA35eCwvOytWhw9Vo3OQKDTA6LjuTHg0fIzjnbYWTzzYppr7JO\nryMnxzA+uLv3xcvLOkUEBg/M0NAQ0jMySU/PlJcluioX0dERnDtfRG5eviwLXW8jkoOHj6HRaEjP\nyAIMVm9Xssp4eAxmwID+lJdflidtjo4OBAdZZ0iA4VmJjAwjPuG0fF+tpWtKUXOO1IZY7g0awy9G\nV/PHtcm8OW4Czg0FnNYM436L4Kca0g62EPvLVv+xtq6CBtSoTM+eUo2tooGKmmusA9Wf4rXZ93F4\nyiv84bURfHHfoxSvTuCfM/0IHOSO3b/OIf2rg3MdA4jSxZN0Wc+9bm0tvRUrVlxT2XUVjUbDRx99\nxCeffMLKlStZtWoVCjO3mV5nWODsaiCIra0tjo4O1NbWodNfXxvm55jCqh0dHbC1tW6fU2sbBkVs\n+i4Abm5d64urmVWp0xujxay0NE2oVCr6uLjQ2Ngk3xOAxMQEjh8/3qW2eoqkpCRZOb7wwp8Aw703\n3TsXF5cubzI3TYTMQ+O7Kgvmk6nrlUm1Wo2TkyPV1TVyX65PJi2/j6OjQ5e3UrQnkwB/Xv0eZeWX\n8fP14Xe/eQKATVu2y2vpf33zZWxtbUlKTmXN2g0A/PLRhwgfGkptbR0vvfZnAKZPncTdxqWDv/zt\nfS5dKsfXx5tnf/trADZv3cHR44Z10b+88RJ2dnYkp6TJ0ca//MVDhIeFUltXx0uvGtqcOmUi8+fd\niUpp+P11Wp3Fs93VLT+mZ9D0PAFtolE7bcNMLkxLAF39TZ2dnGSr9aeMVW6ufSgvvyy30cfFxao9\njJZtGL6ProNtJB3RJaWoLUmjbPgkBtgMZM5TM3nm8Y848voEZl1JpnjoNCwCxfWNlFcoGOjWegmF\n2g4b9LQuuejQSSrU6o4EQEP66uX8M3IN+W9NwIkGnBc9w+KyvoT4D6DT4cTWHS/HOs5caQHaRr5a\nu/G1qzQ3N/Pll1/i6urKkFDDmp+kv/W3g94KKJU3117VUaNG8fLLL+PrH0B2bsFNme/258q8u+bQ\n1KTBybnVgzVqRCz+voZtBypjxKuvrw/3LzVUAfEcbAh2sre3k98z34I0947ZhjadWtscMTwWX6Nn\nyBRF6+vj3dqmMYDK3q5tmyobo1LU//wSa9+qdEEp6rgYl8WQsYtRoaTf9N+y0GY+H+wuIsYlG5/o\nZZZKSmmDg1rLhaZWZaDu642r9gfqTIE12gauNLvg27+DmaEmhy//Xsz8b2NxAtBXkh5XR+Ccvp0r\nRAB9M/VaNU727SvdntjPZmtryy9/+UtWrVqFp6cnn/73SwAkSY/S6CrpaDNuRzQ3N8uuSpMbruY6\n1tFM1zXNuBoaGmlpaenS5KDamK9TaZairrq6xupsNQDVVa05P00z5eou5gHV6XTU1NYa22jty5gx\no+nXzx2VSiX/UyqVHf5t7Wem1y0tLaxatarT/pmU4dy5cwFDEAQYLCLTvautrZVdhtZiypdqPmuu\nrq7pNMipvTag9Xes6aJMtrS0yGvbSlkma7vUBpjJk5lMNjc3dyl/aXsyCchRsub4+/m22YvXz70v\n/dwt5dewrSC2zfntt+mDv5+PxXvu7n3ltc5rtWn67XU6vSzH1dU1SJLUJWvR9GyrzCaFNTW1XbIW\nzeVCoVAgSVKXx6q6+no5otb8+3SVKvn7GGWrttbw7HTBWjR9H5VSibYLk1HrtUJzAQfTQ5i82HiK\nyxh++2QAo156ja+X+DJ2+tWRb84ERtiyr6QJk5WmHDiBe0PeJ6tEAwPtoD6fNN1oVka7QFMBe78r\nJuruCQyWe6VHqx5EhJfhgddkfcoHzb/nv3dZmcqrqYTCFl/GdrCL9osvvkCj0WBjY2PxT61WW/xv\nY2PD22+/zQsvvNDhpdRqNY888ggvvviivJ4IWLhPhwQHkZySRkpaOmPHWL+QnpF5Rh48Y6IjuXCx\nmILC81RX11jtmqisquLcecNm/piYKIpLStFqtWRmZVu9biBJEilpGQAMDQ2hpKSUktJLpKSmE+Df\nUcGctqSkGdZF3fu6ERoawom4eNLSM7uUtDc3L5/GRkNmjAB/Pw4fPQHAH//4PP3bCWHvLmpra6+p\nFK9WhiZaA20khgQFsnffARqbmjibmy8HvnSGTqeTo/nCQkPIzjnLlcoqUlLTZWvEGkx7Iz0GD8LD\nYzCnk1JISctg3NjRVsukKeuNUqlkWEwUFy4Wc+58EZVVVZ1GKpqorq6RgyhioqIoLS1Dp9ORkXmG\n2GHRVrVhyggEEDIk2CId3K1Aq1LUETIkiONx8Vy5UsnFiyVyhHVnNGk08jaOyIgwampraWxsIiU1\nnUkTx1ndl9RUw7MdHBSASmVDds5ZUlLTu7SuaArysbOzIzIijJLSS2Tn5NLY2Gj1umJJSam8vzs2\nNprzRRdoaGgkN6/AqghWMNxP05pmyJBgMrPOWP0dOle7tYd4angssxY/S+K4WfjJCsuOiCffZWXL\nJ7x1NoqhbSYkroy8ZyCJcSXIOy7sIvjtB/eQ8v6nHMtNZ+u7X+L6/PssGKSkOe9rfrdwHk+bb6Gw\nC+PJN8fw/V8/YuOat/jNa5W8+O2bjLJ2UlyZwEHnBYzvwIgJDg4mIiKC0NBQgoKC8PPzw8vLi4ED\nB+Lu7o6LiwsODg6o1eoOB2sbGxseeeQRsrOz+fjjjy0UIpgH2kgMN0ZgXblSSV5+x+nQzNHpdCQa\nw7TDQkOIjAiTXTfmmWg6IyHBcKyzsxNREWHyQJyQmGR1TbTcvHw5e0XssGj5+5zJPmt1xYf6+gbS\n0rOMbcQQOyxKft9awTVVFADDPqye3qxvDaNGjWLXrl3Ex8e3UYjQGlCh1+txc3MlMMAfMCQItzap\nlKHkmME6i42NJnaY4f6nZ2RZXVmhurpGjjweHhsjK5/Kyio5AKgz9Ho9CacMa/FDQ4cYol+N+TNN\nv4s1mBJgOzk5EhUZRpgxmUPiKetlMj+/UE75NbwLEY43C+ZK0cNjsLwl5mTCKavlIjk5jZYWLSqV\niujoSDnwLSkl1SI36LUovVQmT5qHx8YwPNYgFxcuFlvkZb4WLS0tckR6dGQ40VGRqNVq43Yb6yOD\nTyacAgxbc8KHhsqWfXwX7kl6RpbsXetqZqPOlaKtD9PmjcB/7G95Yc5AyxPcJrN6/0kOf3I3befm\nSnyW/j+81n9N644LJf1mrGbbOzMgr5zAp9az7qkw7ADbiFWknv+cCY7mX9qWoMfWsfONeYyc8xz/\n2vAey4KszYqj5/z2L7F96hFCuyHd3NVmu1KpZMWKFWRlZfHZZ58RENB+ZJS8JUOvZ/CggfKMfqcx\nkfU1v4Fez+7v9srHDY+NQaVSMSza8CMnJJ4mLT2j076npmXIijU2JhqlUikPIGXl5Xz3/b5O17rK\nysvZuduQOcXby5NBAwcQNjQUezs79Ho9mzZvl4WwI5o0GjZt2U5LSws2NjZERYXj5upKkDGqbM8P\n+y2SULeHqaKAaW/d8NgYrHxOeoTOlKEJ8y0Z0DqAF124yL79Bzt92C8Wl7Dnh32AYW+am6srUVHh\n2NjY0NLSwqbN2+SUXR3R0NDIps3b0Ov12NnZETY0lEEDB+Bt3Bu6a/ceOfNIR+j1er7ds1dO2TU8\nNgalUklsjGEQPXU6mdS0zmUyLSNTntQNi45CpVLJ96T8cgW7v9vbqUyWX66Qkxd4egxm8KCBvSoL\n14ONqjXQRqFQyPcgO+csJ+LiOz0/P7+Qg4cNycQjwofiYG8vTzTr6urZsn1Xh3lsTVTX1PDNlu2A\nIZVgYIA//n6+uBsjiTdv2ylHOneEITXdbtmFPsy4HcS0/erw0eOczc3r9PvExSeSmZUNGGTL/J6c\nL7rA/gOHOn1WLlwslpMXDAkOpE8fF7oiFp0rRbtAFr/2Cf9ZNcvMrdl6ukvIaCLc27eilH6/4B9z\ndvPuoSqL923cQ5kwZxoxnvZmHWjizMHzDItoG4WocvMlcJB91zZVNpzm/74M473fhbdTNbvrmAY1\npVLJsmXLyMjIYM2aNQQHX7uOn61ZNQyAe+bdacjC0dTEmrXrSTydjEZjOZuTJIkLF4vZsGkL6RkG\nd9mEcWNkd8qE8WPkgWzn7u/Zu/9gu377qupq9u47IKcB8/H2YtzYUYAhEGDcWMPm4NS0DDZs2srF\ni8VtBE6j0ZB4Kok1X22gqakJRwcHORLP1lbNvffMRaFQUF5+mc/XrCPrTE6bWb6poO6aL7+WZ513\nz71D3kd015xZuLg409KiZe3Xm4g7mUhjY1sFW3qpjM1bd8gWxvDYaIYEB1oog54KnrqakSNHsnPn\nzk6VoQlTv3Q6Hc3NLQwJDpRn4wmnkvhmyw65FJY5jY2NxJ1MZO3Xm2hp0eLi4sxdc2YBhgwe95g2\nr5eUsubLry0qpZjQ6XRkncnh8y/XUVZ+GYVCwYJ75sqRx6bfokmjYc1XG0g8lSTnujRhSD1YzIZv\ntspKb9zYUfIWCPPXu779nh/2HWjXe1BdXcO+/QfZacxA4+3lyYTxhlyZ3t6eTDDmzUzPyGT9xi1c\nuNCeTDaTeDqZNV99TWNTEw729twz707jZ4Z+q3sw/2130lrRoRFJkhgWE0VoiMGLc/DwMXbs2tNu\nyrq6+noOHz3Ohm+2otfr6d/PnRnTpwDg3rcvc4yJHvLzC/ly3UYKCs+1uY8tLS2kpmXw+Zp11NTU\nGjbg3zsPpVKJQqHgXuPG/9raOj7/ch0pqeltMkYZ0rydZ+3Xm2RPw8wZU+WEI9OnTmKgMYvSN1t2\ncPDwUXlPqzkVFVfY/d0Pcmq6IcGBDDd6MUJDgmVrLz7hNFu27eSSWR5kE42NjZyIS2Dt15vQarX0\n6ePCnXNmIkkSzcYx1hq56PkqGXXxvP7wWoZ/9B5zB15DrTUXkZjjRGyku3VBNNekluOv/ppvp/yd\nN6Z2LYF3R7z//vscPXqUV155hchI633s+/YfJD7xNH36uPDk44+iVCopLb3Ehk1b5STNarXaIvly\nsZlPHQybWadPuyoheGMjmzZv58LF1oQBAf6+cnh6VVW1RfJlH28vFi+aL5fbgfbruA0c0P+qhOBm\nyZedHFm6ZCGDr0oInnUmhx27zBKCOzkSGOCPSqVCp9NReO48tbWtCcHvumNWm3WKyxVX2HBVQvDg\noAB5HaL0UpmF0oiOipAz8dzIhOAajYa9e/cyb968Lp1XV1fP3z/6N5IkydVSTFaXuWU1eNBAOYVV\nY2MTuXn58n3t08eFZfctalMe6+qE4C4upoTghvufX1Aou1dVKhX3zLuTsKFXJQS/VMb6q5LUBwcF\nWiQELzOTyVEjYpk5Y2qbhOCbNm+3SO7s7+crb7eprq62qOLh7eXJkkXtJAQ/cFh2oYEh4bWnMSev\nKUm9aXB2cnTkviUL8OithOA/kfLyy/znszUALF+6iAB/P1paWti+81tyzrZaVt7envQ35luuq6u3\nKL3Uv38/lrWXEPxkIj8ebE0I3tfNTU4Irm3RkptfIK/B2tqqWbRgPgH+lkFI584X8c2WHWYJwe0I\nDgqQK7wUFV3gillC8CmTJ8gTGxN1dXUW+U8VCgWBgf64OBu261VUXLGQmaCgABbcM89iu1h7CcE9\nBg+So3gbGw1rjnLyfNc+LLtvEf3c+1JaeonPjAnBV65Y3mkyghtTOkpTRkGdOwH9btDsTVvF+Qpb\nfAdZH5HXGV2N0jRxueIK//7kcwBmz5rOSGO6tarqao4cPXHN0lEDBwxg7JiRsgviarRaLceOG0tH\nNXRUOqrzMj1p6ZmcjE+0GPTMkcv0TBrfYemoi8UlHD0eR941Skf5+/kyYfwYOXPG1dTV1XPk2AnS\nMzJlRXw1/dz7MmrkcGKHRaNQKKhvaODT/35FXV0dI4YPk9M73Yxs3rqD7Jxc+vVzZ+WK5dgbFU5y\nShrxiaep6LB0lA0R4WFMnji+w9p3584Xcez4SYuJ0NUEBQYwccLYDgeF6poaDh+5djmzgQP6M2b0\nCKIi208vqNVqOX4inqSU1I7LmTk6EhMTycTxY69dzuxkYofuXEMNvlAmTxon70c7cOgoJ+LijaWj\nHm+TU/hmZc1X67lwsRgvTw8eWL4EGxsb9Ho98QmnOXU6ucOMTXZ2dkRHhjNp4rgOU/zl5uUbciVf\nbJttC0ylo4KZOGFch2knL1dc4cjR42Tn5Hbo0vb09GD82FGEDGnfc6bRNHP02AlS0jI6DIYylI6K\nYczokR2WjjqdnEpCwikLRWyOqcza5ImtpaO2bt/FmeyzuPd14/FfPtIbpaMEV7Pxm61yncFZM6bK\na4NgmOG0LehqT3RU1wq65pzNsyzoqlQSGhLcpYKuxcUlbQq6enl5WhR07Yyq6uo2RYZdXfswLDqq\nTYh6R2g0GjIyz8gL/9BaZNjXx1u+JxUVV9i281suXSpDpVLxi5UPdFgT7mag6MJFvjRuEvfy9ODu\nuXfI98RUZDgtPbPdwqnWDvBXrlSSnJpm4U43FRm2NgOSqfD1RbOB1FT42vOqKiodYSp8bT6Qmgpf\nhwzpgkyWlJJ61UBqKnxtsjC1Wi3xiaflQr3DY2PkPLG3Atk5uWzeugOAwEB/7pozS7b6JEkiv6DQ\nosalociwP2FDu1JkuJyUlLZFhodFR+LsbF0pobr6elJT2xYZjomObFNOriO0Wi1Z2Tnk5ZkXGbYh\nPKzrRYavLshteFZaC3I3Nzdz8PAxEo1BYbNmTpPz7V4LoRRvABqNhrVfb5LrATo5OeHt5dHlbCYC\nA5IkUV/fYFG3b+G989rUCLwZSUlNZ/d3P8h/+/p44+Tk2OUMJgIDWq2Oi8XFslXq5+vD0iULetSF\n3hOciIvngFGpKxQK/Hx9cHCwF3JxnbS0aCm6cFGeTEVFhjPvrjlW3U+hFG8QDQ2NfL93v5zrU9A9\n9OnjwvSpk+XM+rcCaekZHDx8TF5nFfx0FAoFEWFDmT17eodV6G92Ek4lcex4XKdR3ALrUalUxMZE\nMWP6FKuNEKEUbzCVVVWkp2dRV19/06b8qqurw87OFrW6O+J2ewZbW1s8PAYRFhpyS1rcpojQktJL\nVu8lE7RFqVTi7OREVGR4l/Pn3oy0tLSQkXmGsvLLN7Q2aFdpbGjAoQtZlG40KpWSPi4uREdFdrgO\n3xFCKQra8MILL+Dl5cWTTz7Z210RCAQ3GXFxcbz66qt89913vd2VHkEoRYEFly9fJiAggL59+5Kb\nm9ulHJQCgeDnz8yZM9m/fz+JiYmMGGF9/cxbhS4XGRb8vFm9ejV1dXUUFRXxxRdf9HZ3BALBTcTB\ngwfZv38/AG+88UYv96ZnEJaiQObSpUsEBgbSYNzzGBAQQE5Ozi0XyScQCHqGSZMmcfRoa5Rsampq\nl5KZ3AoIS1Eg85e//EVWiAAFBQWsXbu2F3skEAhuFvbs2SMrRDBsjXrrrbd6sUc9g7AUBQAUFxcT\nFBTUJttESEgIWVlZXa56LRAIfl6MHDmSU6dOWbynUqnIyspiyBDryp/dCoiRTgDAW2+91W76pZyc\nHNavX98LPRIIBDcLW7dubaMQwbC16M9//nMv9KjnEJaigKKiIoKDgzvcLxceHk56errIriEQ3Ibo\n9XpiYmJIT09v93O1Ws3Zs2fx87O+0PjNjLAUBbzxxhvX3ECemZnJ5s2bb2CPBALBzcKGDRs6VIhg\nSDiwevXqG9ijnkVYirc5BQUFhIaGdpo9IyYmhqSkJGEtCgS3ETqdjvDwcHJyrp2e0t7envz8fDw8\nrl2W6VZAWIq3Oa+//rpV6aRSUlLYuXPnDeiRQCC4WVizZk2nChEMtTT/9re/3YAe9TzCUryNyc3N\nZejQoR3WzruakSNHkpCQ0MO9EggENwMtLS2EhIRQWFho1fFOTk4UFhbSv//NW77NGoSleBvz6quv\nWq0QARITE3+2+Q4FAoEln3zyidUKEaC+vp7333+/x/pzoxCW4m1MZmYmLS0t6PV6+d/SpUspKDAU\nRF6yZAlPPvmkxedeXl5ERLRfdV0gEPx8KC4upra2Fq1Wi06nQ6fT8dZbb/HNN98AEBwczMcffyx/\nptPpcHR0ZNq0ab3c85+GyN91GxMeHt7mPfMq3B4eHkydOvUG9kggENwseHp6tnlv8ODB8msnJyem\nT59+I7t0QxDuU4EF5plruuJaFQgEP3/Mo89v1nqwPxWhFAUWmBfs/bkKvUAguD7MJ80/15U3oRQF\nFpgLvVCKAoHAHGEpCm47hPtUIBB0xO0waRZKUWCBcJ8KBIKOMLcUhftUcFtwO8wEBQLB9XE7jA9C\nKQosEO5TgUDQEcJSFNx23A4zQYFAcH3cDuODUIoCC8SaokAg6IjbwVIUad4EFly4cAGNRoNKpcLZ\n2fmWT+4rEAi6j5ycHAoKClAqlTg6OjJhwoTe7lK3I5SiQCAQCARGhPtUIBAIBAIjQikKBAKBQGBE\nKEWBQCAQCIwIpSgQCAQCgRGhFAUCgUAgMCKUokAgEAgERoRSFEB9Aqt/cQ8TYsKJnPIc+69oyPvs\nAUZFj2HWfavYWy428QsEtzW30Rgh9ikKDOirOPriFKb/TcUfD25h6u6n+cRnNZ88EY5jb/dNIBD0\nPrfJGCGUoqCV5kK+emAMK7ZIjPjfzex7cxJuwpcgEAhM3AZjxM/s6wh+ErbeTJo/jUDHclJ3f0t6\nzc/HJSIQCLqB22CMEEpRYERPxYGXeGrPDHYkrWNR+V+Y/8gX5Df3dr8EAsHNwe0xRgilKEBfvo8X\nl0xj9Ny/UzNiIsHBd7BytgdXtj3OjDse5M2Tdb3dRYFA0IvcTmOEWFMUCAQCgcCIsBQFAoFAIDAi\nlKJAIBAIBEaEUhQIBAKBwIhQigKBQCAQGBFKUSAQCAQCI0IpCgQCgUBg5P8Dxxn+0UMlxz8AAAAA\nSUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Image.open('./ladder_net_arch.png') # the ladder net architecture diagram from the Curious AI paper" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Ladder net building blocks:\n", + " * dirty encoder - the most left, built first\n", + " * clean encoder - the most right, built second, sharing weights and BN params of the dirty one \n", + " * decoder - the middle one, built on top of the dirty encoder, sharing its transposed weights\n", + " * batch normalization divided between normalization part (mean, inv_std) and learning part (beta, gamma)\n", + " * Gaussian noise injection in between the batchnorm parts in the dirty encode \n", + " * ReLUs (no gamma needed to learn in BN) used in encoder's latent layers, softmax as a classifier\n", + " * denoising (combinator) function g used as a nonlinearity in the decoder (instance of Merge layer):\n", + " 1. used in Curious AI paper:\n", + "$$ \\hat{z} = g(\\tilde{z}, u) = (\\tilde{z} - \\mu(u)) \\odot \\sigma(u) + \\mu(u) $$\n", + "$$ \\mu(u) = w_0 \\odot \\text{sigmoid}(w_2 \\odot u + b_0) + w_3 \\odot u + b_1 $$\n", + "$$ \\sigma(u) = w_4 \\odot \\text{sigmoid}(w_5 \\odot u + b_2) + w_6 \\odot u + b_3 $$\n", + " 2. used in MILA UDEM paper:\n", + "$$ g(\\tilde{z}(l), u(l+1)) = b_0 + w_0^z \\odot \\tilde{z}(l) + w_0^u \\odot u(l+1) + w_0^{zu} \\odot \\tilde{z}(l) \\odot u(l+1) + \\\\\n", + "w^\\sigma \\odot \\text{sigmoid}(b_1 + w_1^z \\odot \\tilde{z}(l) + w_1^u \\odot u(l+1) + w_1^{zu} \\odot \\tilde{z}(l) \\cdot u(l+1)) $$ \n", + "$$ \\mu(u) = w_0 \\odot \\text{sigmoid}(w_2 \\odot u + b_0) + w_3 \\odot u + b_1 $$\n", + "$$ \\sigma(u) = w_4 \\odot \\text{sigmoid}(w_5 \\odot u + b_2) + w_6 \\odot u + b_3 $$\n", + "Cost terms:\n", + " * classification cost\n", + " * reconstruction cost(s)\n", + "$$ \\text{Cost} = - \\sum\\limits^N_{n=1} \\log P\\left(\\tilde{y}(n) = t(n)|x(n) \\right) - \\sum\\limits^L_{l=0} \\lambda_l \\sum\\limits^N_{n=1} \\| z^{(l)}(n) - \\tilde{z}^{(l)}_{BN}(n) \\|^2 $$" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Implementation" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 2: Tesla K80 (CNMeM is disabled, cuDNN 4007)\n" + ] + } + ], + "source": [ + "from lasagne.layers import InputLayer, MergeLayer, DenseLayer, DropoutLayer, \\\n", + " GaussianNoiseLayer, NonlinearityLayer\n", + "from lasagne.layers.normalization import BatchNormLayer\n", + "from lasagne.nonlinearities import softmax, linear, tanh, rectify\n", + "import lasagne\n", + "\n", + "import theano\n", + "import theano.tensor as T\n", + "\n", + "import numpy as np\n", + "from collections import OrderedDict\n", + "\n", + "\n", + "def _create_milaUDEM_params(shape, name):\n", + " values = np.zeros((6,) + shape, dtype=theano.config.floatX)\n", + "\n", + " b_lin = theano.shared(values[0],\n", + " name='bias_lin_{}'.format(name))\n", + " b_sigm = theano.shared(values[1],\n", + " name='bias_sigm_{}'.format(name))\n", + "\n", + " w_u_lin = theano.shared(values[2],\n", + " name='weight_u_lin_{}'.format(name))\n", + " w_u_sigm = theano.shared(values[3],\n", + " name='weight_u_sigm_{}'.format(name))\n", + " w_zu_lin = theano.shared(values[4],\n", + " name='weight_zu_lin_{}'.format(name))\n", + " w_zu_sigm = theano.shared(values[5],\n", + " name='weight_zu_sigm_{}'.format(name))\n", + "\n", + " values = np.ones((3,) + shape, dtype=theano.config.floatX)\n", + " w_z_lin = theano.shared(values[0],\n", + " name='weight_z_lin_{}'.format(name))\n", + " w_z_sigm = theano.shared(values[1],\n", + " name='weight_z_sigm_{}'.format(name))\n", + " w_sigm = theano.shared(values[2],\n", + " name='weight_sigm_{}'.format(name))\n", + "\n", + " # combinator params used in combinator calculations\n", + " return [w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm,\n", + " w_zu_sigm, w_sigm, b_lin, b_sigm]\n", + "\n", + "\n", + "def _create_curiousAI_params(shape, name):\n", + " values = np.zeros((8,) + shape, dtype=theano.config.floatX)\n", + "\n", + " b_mu_sig = theano.shared(values[0],\n", + " name='b_mu_sig_{}'.format(name))\n", + " b_mu_lin = theano.shared(values[1],\n", + " name='b_mu_lin_{}'.format(name))\n", + " b_v_sig = theano.shared(values[2],\n", + " name='b_v_sig_{}'.format(name))\n", + " b_v_lin = theano.shared(values[3],\n", + " name='b_v_lin_{}'.format(name))\n", + "\n", + " w_mu_lin = theano.shared(values[4],\n", + " name='w_mu_lin_{}'.format(name))\n", + " w_v_lin = theano.shared(values[5],\n", + " name='w_v_lin_{}'.format(name))\n", + " w_mu = theano.shared(values[6],\n", + " name='w_mu_{}'.format(name))\n", + " w_v = theano.shared(values[7],\n", + " name='w_v_{}'.format(name))\n", + "\n", + " values = np.ones((2,) + shape, dtype=theano.config.floatX)\n", + " w_mu_sig = theano.shared(values[0],\n", + " name='w_mu_sig_{}'.format(name))\n", + " w_v_sig = theano.shared(values[1],\n", + " name='w_v_sig_{}'.format(name))\n", + "\n", + " # combinator params used in combinator calculations\n", + " return [w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v,\n", + " b_mu_lin, b_v_lin, b_mu_sig, b_v_sig]\n", + "\n", + "\n", + "def _create_combinator_params(combinator_type, shape, name):\n", + " if combinator_type == 'milaUDEM':\n", + " return _create_milaUDEM_params(shape, name)\n", + " elif combinator_type == 'curiousAI':\n", + " return _create_curiousAI_params(shape, name)\n", + "\n", + "\n", + "def _combinator_MILAudem(z, u, combinator_params, bc_pttrn):\n", + " w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm, w_zu_sigm, w_sigm, \\\n", + " b_lin, b_sigm = combinator_params\n", + "\n", + " lin_out = w_z_lin.dimshuffle(*bc_pttrn) * z + \\\n", + " w_u_lin.dimshuffle(*bc_pttrn) * u + \\\n", + " w_zu_lin.dimshuffle(*bc_pttrn) * z * u + \\\n", + " b_lin.dimshuffle(*bc_pttrn)\n", + " \n", + " sigm_pre = w_z_sigm.dimshuffle(*bc_pttrn) * z + \\\n", + " w_u_sigm.dimshuffle(*bc_pttrn) * u + \\\n", + " w_zu_sigm.dimshuffle(*bc_pttrn) * z * u + \\\n", + " b_sigm.dimshuffle(*bc_pttrn)\n", + " \n", + " sigm_out = T.nnet.sigmoid(sigm_pre)\n", + " \n", + " output = w_sigm.dimshuffle(*bc_pttrn) * sigm_out + lin_out\n", + "\n", + " return output\n", + "\n", + "\n", + "def _combinator_curiousAI(z, u, combinator_params, bc_pttrn):\n", + " w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v, \\\n", + " b_mu_lin, b_v_lin, b_mu_sig, b_v_sig = combinator_params\n", + "\n", + " mu_sig_pre = w_mu_sig.dimshuffle(*bc_pttrn) * u + \\\n", + " b_mu_sig.dimshuffle(*bc_pttrn)\n", + "\n", + " mu_lin_out = w_mu_lin.dimshuffle(*bc_pttrn) * u + \\\n", + " b_mu_lin.dimshuffle(*bc_pttrn)\n", + "\n", + " mu_u = w_mu.dimshuffle(*bc_pttrn) * T.nnet.sigmoid(mu_sig_pre) + \\\n", + " mu_lin_out\n", + "\n", + " v_sig_pre = w_v_sig.dimshuffle(*bc_pttrn) * u + \\\n", + " b_v_sig.dimshuffle(*bc_pttrn)\n", + " \n", + " v_lin_out = w_v_lin.dimshuffle(*bc_pttrn) * u + \\\n", + " b_v_lin.dimshuffle(*bc_pttrn)\n", + "\n", + " v_u = w_v * T.nnet.sigmoid(v_sig_pre) + v_lin_out\n", + "\n", + " output = (z - mu_u) * v_u + mu_u\n", + "\n", + " return output\n", + "\n", + "\n", + "def _combinator(z, u, combinator_type, combinator_params):\n", + " if u.ndim == 2:\n", + " bc_pttrn = ('x', 0)\n", + " elif u.ndim == 4:\n", + " bc_pttrn = ('x', 0, 'x', 'x')\n", + " \n", + " if combinator_type == 'milaUDEM':\n", + " return _combinator_MILAudem(z, u, combinator_params, bc_pttrn)\n", + " elif combinator_type == 'curiousAI':\n", + " return _combinator_curiousAI(z, u, combinator_params, bc_pttrn)\n", + "\n", + "\n", + "class CombinatorLayer(MergeLayer):\n", + " \"\"\"\n", + " A layer that combines the terms from dirty and clean encoders,\n", + " in a following way:\n", + " $$ \\hat{z} = g(\\tilde{z}, u) = $$\n", + " \"\"\"\n", + " def __init__(self, incoming_z, incoming_u, combinator_type, **kwargs):\n", + " super(CombinatorLayer, self).__init__(\n", + " [incoming_z, incoming_u], **kwargs)\n", + " self.combinator_type = combinator_type\n", + " z_shp, u_shp = self.input_shapes\n", + "\n", + " if len(z_shp) != len(u_shp):\n", + " raise ValueError(\"The inputs must have the same shape: \"\n", + " \"(batch_size, num_hidden) in case of dense layer or \\n\"\n", + " \"(batch_size, num_feature_maps, height, width) \"\n", + " \"in case of conv layer.\")\n", + "\n", + " self.combinator_params = _create_combinator_params(combinator_type, \n", + " u_shp[1:],\n", + " self.name)\n", + "\n", + " def get_output_shape_for(self, input_shapes):\n", + " return input_shapes[0]\n", + "\n", + " def get_output_for(self, inputs, **kwargs):\n", + " z, u = inputs\n", + " assert z.ndim == u.ndim\n", + " return _combinator(z, u, self.combinator_type, self.combinator_params)\n", + "\n", + "\n", + "def build_encoder(net, num_hidden, activation, name,\n", + " p_drop_hidden=0., shared_net=None):\n", + " for i, num_nodes in enumerate(num_hidden):\n", + " dense_lname = 'enc_dense_{}'.format(i)\n", + " nbatchn_lname = 'enc_batchn_{}_norm'.format(i)\n", + " noise_lname = 'enc_noise_{}'.format(i)\n", + " lbatchn_lname = 'enc_batchn_{}_learn'.format(i)\n", + "\n", + " if shared_net is None:\n", + " # dense pars\n", + " W = lasagne.init.GlorotUniform()\n", + " # batchnorm pars\n", + " beta = lasagne.init.Constant(0)\n", + " gamma = None if activation == rectify else lasagne.init.Constant(1)\n", + " else:\n", + " # dense pars\n", + " W = shared_net[dense_lname].get_params()[0]\n", + " # batchnorm pars\n", + " if activation==rectify:\n", + " beta = shared_net[lbatchn_lname].get_params()[0]\n", + " gamma = None\n", + " else:\n", + " beta, gamma = shared_net[lbatchn_lname].get_params()\n", + "\n", + " net[dense_lname] = DenseLayer(net.values()[-1], num_units=num_nodes, W=W,\n", + " nonlinearity=linear,\n", + " name='{}_{}'.format(name, dense_lname))\n", + "\n", + " shp = net[dense_lname].output_shape[1]\n", + " zero_const = T.zeros(shp, np.float32)\n", + " one_const = T.ones(shp, np.float32)\n", + "\n", + " # 1. batchnormalize without learning -> goes to combinator layer\n", + " l_name = '{}_{}'.format(name, nbatchn_lname)\n", + " net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1,\n", + " beta=None, gamma=None, name=l_name)\n", + "\n", + " if shared_net is None:\n", + " # add noise in dirty encoder\n", + " net[noise_lname] = GaussianNoiseLayer(net.values()[-1],\n", + " sigma=p_drop_hidden,\n", + " name='{}_{}_'.format(name,\n", + " noise_lname))\n", + "\n", + " # 2. batchnormalization learning, \n", + " # alpha set to one in order to depenend only on the given batch mean and inv_std\n", + " l_name = '{}_{}'.format(name, lbatchn_lname)\n", + " net[lbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=1.,\n", + " beta=beta, gamma=gamma, name=l_name,\n", + " mean=zero_const, inv_std=one_const)\n", + "\n", + " if i < len(num_hidden) - 1:\n", + " act_name = 'enc_activation_{}'.format(i)\n", + " net[act_name] = NonlinearityLayer(net.values()[-1],\n", + " nonlinearity=activation,\n", + " name='{}_{}'.format(name, act_name))\n", + "\n", + " net['enc_softmax'] = NonlinearityLayer(net.values()[-1], nonlinearity=softmax,\n", + " name='{}_enc_softmax'.format(name))\n", + "\n", + " return net['enc_softmax'], net\n", + "\n", + "\n", + "def build_decoder(dirty_net, clean_net, num_nodes, sigma,\n", + " combinator_type='milaUDEM'):\n", + " L = len(num_nodes) - 1\n", + "\n", + " # dirty_enc_dense_1 ... z_L\n", + " z_L = dirty_net['enc_noise_{}'.format(L)]\n", + " \n", + " # batchnormalized softmax output .. u_0 without learning bn beta, gamma\n", + " dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None,\n", + " gamma=None, name='dec_batchn_softmax')\n", + " \n", + " # denoised latent \\hat{z}_L = g(\\tilde{z}_L, u_L)\n", + " comb_name = 'dec_combinator_0'\n", + " dirty_net[comb_name] = CombinatorLayer(*[z_L, dirty_net['u_0']],\n", + " combinator_type=combinator_type,\n", + " name=comb_name)\n", + " \n", + " # batchnormalize denoised latent using clean encoder's bn mean/inv_std without learning\n", + " enc_bname = 'enc_batchn_{}_norm'.format(L)\n", + " mu, inv_std = clean_net[enc_bname].get_params()\n", + " bname = 'dec_batchn_0'\n", + " dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", + " beta=None, gamma=None, name=bname,\n", + " mean=mu, inv_std=inv_std)\n", + "\n", + " for i in range(L):\n", + " # dirty_enc_dense_L-i ... z_l\n", + " z_l = dirty_net['enc_noise_{}'.format(i)]\n", + " \n", + " # affine transformation\n", + " d_name = 'dec_dense_{}'.format(L-i)\n", + " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1],\n", + " num_units=num_nodes[i],\n", + " nonlinearity=linear, name=d_name)\n", + " \n", + " # batchnormalization ... u_l\n", + " dirty_net['u_l'] = BatchNormLayer(dirty_net.values()[-1], beta=None,\n", + " gamma=None,\n", + " name='dec_batchn_dense_{}'.format(L-i))\n", + " \n", + " # denoised latent \\hat{z}_L-i\n", + " comb_name = 'dec_combinator_{}'.format(i+1)\n", + " dirty_net[comb_name] = CombinatorLayer(*[z_l, dirty_net['u_l']],\n", + " combinator_type=combinator_type,\n", + " name=comb_name)\n", + " \n", + " # batchnormalized latent \\hat{z}_L-i^{BN}\n", + " enc_bname = 'enc_batchn_{}_norm'.format(L-i-1)\n", + " mu, inv_std = clean_net[enc_bname].get_params()\n", + " bname = 'dec_batchn_{}'.format(L-i)\n", + " dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", + " beta=None, gamma=None, name=bname,\n", + " mean=mu, inv_std=inv_std)\n", + " \n", + " # corrupted input ... z_0\n", + " z_0 = dirty_net['inp_corr']\n", + "\n", + " # affine transformation\n", + " d_name = 'dec_dense_{}'.format(L+1)\n", + " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], nonlinearity=linear,\n", + " num_units=num_nodes[i+1], name=d_name)\n", + " \n", + " # batchnormalization ... u_L\n", + " dirty_net['u_L'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None)\n", + " \n", + " # denoised input reconstruction\n", + " comb_name = 'dec_combinator_{}'.format(L+1)\n", + " dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_L']], name=comb_name,\n", + " combinator_type=combinator_type)\n", + "\n", + " return dirty_net\n", + "\n", + "\n", + "def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden,\n", + " activation=rectify, batch_size=None, inp_size=None,\n", + " combinator_type='MILAudem'):\n", + " net = OrderedDict()\n", + " net['input'] = InputLayer((batch_size, inp_size), name='input')\n", + " # corrupted input\n", + " net['inp_corr'] = GaussianNoiseLayer(net['input'], sigma=p_drop_input,\n", + " name='input_corr')\n", + "\n", + " # dirty encoder\n", + " train_output_l, dirty_encoder = build_encoder(net, num_encoder, activation,\n", + " 'dirty', p_drop_hidden)\n", + "\n", + " # clean encoder\n", + " clean_net = OrderedDict(net.items()[:1])\n", + " eval_output_l, clean_net = build_encoder(clean_net, num_encoder, activation,\n", + " 'clean', 0., shared_net=dirty_encoder)\n", + "\n", + " # dirty decoder\n", + " dirty_net = build_decoder(dirty_encoder, clean_net, num_decoder,\n", + " p_drop_hidden, combinator_type)\n", + "\n", + " return [train_output_l, eval_output_l], dirty_net, clean_net\n", + "\n", + "\n", + "def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, lambdas):\n", + " class_cost = T.nnet.categorical_crossentropy(T.clip(output_train, 1e-15, 1),\n", + " y).mean()\n", + " L = len(num_decoder)\n", + " \n", + " # get clean and corresponding dirty latent layer output\n", + " z_clean_l = clean_net['input']\n", + " z_dirty_l = dirty_net['dec_combinator_{}'.format(L)]\n", + " \n", + " z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False)\n", + " z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False)\n", + "\n", + " # squared error\n", + " rec_costs = [lambdas[L] * T.sqr(z_clean - z_dirty).mean()]\n", + "\n", + " for l in range(L):\n", + " z_clean_l = clean_net['enc_batchn_{}_norm'.format(l)]\n", + " z_dirty_l = dirty_net['dec_batchn_{}'.format(L-l-1)]\n", + "\n", + " z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False)\n", + " z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False)\n", + "\n", + " rec_costs.append(lambdas[l] * T.sqr(z_clean - z_dirty).mean())\n", + "\n", + " return class_cost, rec_costs" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import gzip\n", + "import cPickle as pickle\n", + "import sys\n", + "\n", + "def pickle_load(f, encoding):\n", + " return pickle.load(f)\n", + "\n", + "def load_data():\n", + " \"\"\"Get data with labels, split into training, validation and test set.\"\"\"\n", + " with gzip.open('mnist.pkl.gz', 'rb') as f:\n", + " data = pickle_load(f, encoding='latin-1')\n", + " X_train, y_train = data[0]\n", + " X_valid, y_valid = data[1]\n", + " X_test, y_test = data[2]\n", + "\n", + " return dict(\n", + " X_train=theano.shared(lasagne.utils.floatX(X_train)),\n", + " y_train=T.cast(theano.shared(y_train), 'int32'),\n", + " X_valid=theano.shared(lasagne.utils.floatX(X_valid)),\n", + " y_valid=T.cast(theano.shared(y_valid), 'int32'),\n", + " X_test=theano.shared(lasagne.utils.floatX(X_test)),\n", + " y_test=T.cast(theano.shared(y_test), 'int32'),\n", + " num_examples_train=X_train.shape[0],\n", + " num_examples_valid=X_valid.shape[0],\n", + " num_examples_test=X_test.shape[0],\n", + " input_dim=X_train.shape[1],\n", + " output_dim=10,\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2016-06-21 12:04:46-- http://deeplearning.net/data/mnist/mnist.pkl.gz\n", + "Resolving deeplearning.net (deeplearning.net)... 132.204.26.28\n", + "Connecting to deeplearning.net (deeplearning.net)|132.204.26.28|:80... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 16168813 (15M) [application/x-gzip]\n", + "Saving to: 'mnist.pkl.gz.1'\n", + "\n", + "100%[======================================>] 16,168,813 4.61MB/s in 3.3s \n", + "\n", + "2016-06-21 12:04:54 (4.61 MB/s) - 'mnist.pkl.gz.1' saved [16168813/16168813]\n", + "\n" + ] + } + ], + "source": [ + "!wget http://deeplearning.net/data/mnist/mnist.pkl.gz" + ] + }, + { + "cell_type": "code", + "execution_count": 6, "metadata": { "collapsed": false }, @@ -11,103 +536,327 @@ "name": "stdout", "output_type": "stream", "text": [ - "Start training...\n", - "Layer #0 rec cost: 0.0523031353951\n", - "Layer #1 rec cost: 0.00836394913495\n", - "Layer #2 rec cost: 0.117911726236\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 0: Train cost 0.85103150726, train acc 0.93518, val acc 0.9366\n", - "Layer #0 rec cost: 0.0523090846837\n", - "Layer #1 rec cost: 0.00838615372777\n", - "Layer #2 rec cost: 0.115602619946\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 1: Train cost 0.742657381409, train acc 0.94888, val acc 0.9453\n", - "Layer #0 rec cost: 0.0523090697825\n", - "Layer #1 rec cost: 0.00840173009783\n", - "Layer #2 rec cost: 0.114492356777\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 2: Train cost 0.719053864464, train acc 0.95816, val acc 0.9541\n", - "Layer #0 rec cost: 0.0522957369685\n", - "Layer #1 rec cost: 0.00840636808425\n", - "Layer #2 rec cost: 0.114234052598\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 3: Train cost 0.70547588383, train acc 0.96916, val acc 0.9609\n", - "Layer #0 rec cost: 0.0523124374449\n", - "Layer #1 rec cost: 0.00841217115521\n", - "Layer #2 rec cost: 0.113789305091\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 4: Train cost 0.693144676127, train acc 0.97076, val acc 0.9607\n", - "Layer #0 rec cost: 0.0523018464446\n", - "Layer #1 rec cost: 0.0084175830707\n", - "Layer #2 rec cost: 0.11399256438\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 5: Train cost 0.687760960555, train acc 0.97554, val acc 0.9645\n", - "Layer #0 rec cost: 0.0523033551872\n", - "Layer #1 rec cost: 0.00842206645757\n", - "Layer #2 rec cost: 0.113653443754\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 6: Train cost 0.681915052639, train acc 0.97504, val acc 0.9613\n", - "Layer #0 rec cost: 0.052292086184\n", - "Layer #1 rec cost: 0.00842817965895\n", - "Layer #2 rec cost: 0.113471724093\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 7: Train cost 0.676880990575, train acc 0.98124, val acc 0.9688\n", - "Layer #0 rec cost: 0.0523066557944\n", - "Layer #1 rec cost: 0.0084327859804\n", - "Layer #2 rec cost: 0.113498724997\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 8: Train cost 0.673917843942, train acc 0.98184, val acc 0.9668\n", - "Layer #0 rec cost: 0.0522929169238\n", - "Layer #1 rec cost: 0.00842999480665\n", - "Layer #2 rec cost: 0.113631747663\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", + "Loading data...\n", + "Building model and compiling functions...\n", + "Starting training...\n", + "Epoch 1 took 26.803 s\n", + "Train cost 0.843797385693, val cost 0.735326230526, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424227267504\n", + "Layer #1 rec cost: 0.0209430493414\n", + "Layer #2 rec cost: 0.018017789349\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 2 took 26.748 s\n", + "Train cost 0.737002074718, val cost 0.744888663292, val acc 0.949999988079\n", + "Layer #0 rec cost: 0.0424131974578\n", + "Layer #1 rec cost: 0.0197453834116\n", + "Layer #2 rec cost: 0.0170130133629\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 3 took 26.716 s\n", + "Train cost 0.708538234234, val cost 0.734336614609, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424265153706\n", + "Layer #1 rec cost: 0.019520400092\n", + "Layer #2 rec cost: 0.0167733673006\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 4 took 26.713 s\n", + "Train cost 0.692537605762, val cost 0.735073328018, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.042408503592\n", + "Layer #1 rec cost: 0.01941104047\n", + "Layer #2 rec cost: 0.0166323203593\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 5 took 26.132 s\n", + "Train cost 0.681930422783, val cost 0.719261467457, val acc 0.990000009537\n", + "Layer #0 rec cost: 0.0424162633717\n", + "Layer #1 rec cost: 0.0193498600274\n", + "Layer #2 rec cost: 0.0165363010019\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 6 took 26.017 s\n", + "Train cost 0.673169791698, val cost 0.745770931244, val acc 0.949999988079\n", + "Layer #0 rec cost: 0.0424140915275\n", + "Layer #1 rec cost: 0.0192885622382\n", + "Layer #2 rec cost: 0.0165096893907\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 7 took 26.034 s\n", + "Train cost 0.668693244457, val cost 0.695348381996, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424080193043\n", + "Layer #1 rec cost: 0.0192632935941\n", + "Layer #2 rec cost: 0.0163964517415\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 8 took 26.017 s\n", + "Train cost 0.665378808975, val cost 0.685513734818, val acc 0.949999988079\n", + "Layer #0 rec cost: 0.0424257256091\n", + "Layer #1 rec cost: 0.0192788075656\n", + "Layer #2 rec cost: 0.016430022195\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 9 took 26.167 s\n", + "Train cost 0.660260021687, val cost 0.650075316429, val acc 0.949999988079\n", + "Layer #0 rec cost: 0.0424268692732\n", + "Layer #1 rec cost: 0.019260045141\n", + "Layer #2 rec cost: 0.0163774229586\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 10 took 26.650 s\n", + "Train cost 0.657784998417, val cost 0.696567416191, val acc 0.97000002861\n", + "Layer #0 rec cost: 0.0424044318497\n", + "Layer #1 rec cost: 0.0192706119269\n", + "Layer #2 rec cost: 0.0163583438843\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", "New LR: 0.10000000149\n", - "Epoch 9: Train cost 0.671246978571, train acc 0.98178, val acc 0.9683\n", - "Layer #0 rec cost: 0.0523014776409\n", - "Layer #1 rec cost: 0.00842962320894\n", - "Layer #2 rec cost: 0.112782202661\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 10: Train cost 0.668746202706, train acc 0.9863, val acc 0.9723\n", - "Layer #0 rec cost: 0.0523017942905\n", - "Layer #1 rec cost: 0.00843219831586\n", - "Layer #2 rec cost: 0.112931199372\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 11: Train cost 0.667900841003, train acc 0.9857, val acc 0.9729\n", - "Layer #0 rec cost: 0.052298579365\n", - "Layer #1 rec cost: 0.00843434035778\n", - "Layer #2 rec cost: 0.112664811313\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 12: Train cost 0.662375827058, train acc 0.98466, val acc 0.9703\n", - "Layer #0 rec cost: 0.0522987246513\n", - "Layer #1 rec cost: 0.0084279216826\n", - "Layer #2 rec cost: 0.112784132361\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 13: Train cost 0.66105420699, train acc 0.98562, val acc 0.9704\n", - "Layer #0 rec cost: 0.0522985383868\n", - "Layer #1 rec cost: 0.00843381509185\n", - "Layer #2 rec cost: 0.112974517047\n", - "enc_batchn_0_learn: mean True, inv_std True\n", - "enc_batchn_1_learn: mean True, inv_std True\n", - "Epoch 14: Train cost 0.661718979622, train acc 0.9877, val acc 0.9725\n" + "Epoch 11 took 26.544 s\n", + "Train cost 0.654187440872, val cost 0.653413057327, val acc 0.980000019073\n", + "Layer #0 rec cost: 0.0424147695303\n", + "Layer #1 rec cost: 0.0192655138671\n", + "Layer #2 rec cost: 0.0163049418479\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 12 took 26.329 s\n", + "Train cost 0.651110231876, val cost 0.662563860416, val acc 0.97000002861\n", + "Layer #0 rec cost: 0.0424222685397\n", + "Layer #1 rec cost: 0.0192585140467\n", + "Layer #2 rec cost: 0.0163124445826\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 13 took 26.731 s\n", + "Train cost 0.647521734238, val cost 0.689861774445, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424127243459\n", + "Layer #1 rec cost: 0.0192261245102\n", + "Layer #2 rec cost: 0.0163582395762\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 14 took 26.702 s\n", + "Train cost 0.645864963531, val cost 0.670720160007, val acc 0.980000019073\n", + "Layer #0 rec cost: 0.0424097888172\n", + "Layer #1 rec cost: 0.0191815402359\n", + "Layer #2 rec cost: 0.016245925799\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 15 took 26.710 s\n", + "Train cost 0.644159853458, val cost 0.64711368084, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424187406898\n", + "Layer #1 rec cost: 0.019159225747\n", + "Layer #2 rec cost: 0.0162756647915\n", + "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", + "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n" ] } ], "source": [ - "%run train_ladder_nets.py -dlr 1. -l 0.1,1.,0.1 -ep 15" + "import time\n", + "import theano.misc.pkl_utils\n", + "\n", + "LEARNING_RATE = 0.1\n", + "LR_DECREASE = 1.\n", + "BATCH_SIZE = 100\n", + "NUM_EPOCHS = 15\n", + "COMBINATOR_TYPE = 'milaUDEM'\n", + "LAMBDAS = [0.1, 0.1, 0.1]\n", + "DROPOUT = 0.3\n", + "\n", + "print \"Loading data...\"\n", + "dataset = load_data()\n", + "\n", + "# build network\n", + "num_encoder = [500, 10]\n", + "num_decoder = [500, 784]\n", + "\n", + "print \"Building model and compiling functions...\"\n", + "[train_output_l, eval_output_l], dirty_net, clean_net = build_model(\n", + " num_encoder, num_decoder, DROPOUT, DROPOUT, batch_size=None, \n", + " inp_size=784, combinator_type=COMBINATOR_TYPE)\n", + "\n", + "# set up input/output variables\n", + "X = T.fmatrix('X')\n", + "y = T.ivector('y')\n", + "\n", + "# training output\n", + "output_train = lasagne.layers.get_output(train_output_l, X, deterministic=False)\n", + "\n", + "# evaluation output. Also includes output of transform for plotting\n", + "output_eval = lasagne.layers.get_output(eval_output_l, X, deterministic=True)\n", + "\n", + "# set up (possibly amortizable) lr, cost and updates\n", + "sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE))\n", + "\n", + "class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder, \n", + " dirty_net, clean_net, output_train, LAMBDAS)\n", + "cost = class_cost + T.sum(rec_costs)\n", + "\n", + "net_params = lasagne.layers.get_all_params(train_output_l, trainable=True)\n", + "updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr)\n", + "\n", + "# get training and evaluation functions, cost = class_cost + T.sum(rec_costs)\n", + "batch_index = T.iscalar('batch_index')\n", + "batch_slice = slice(batch_index * BATCH_SIZE, (batch_index + 1) * BATCH_SIZE)\n", + "\n", + "pred = T.argmax(output_eval, axis=1)\n", + "accuracy = T.mean(T.eq(pred, y), dtype=theano.config.floatX)\n", + "\n", + "train = theano.function([batch_index], [cost] + rec_costs,\n", + " updates=updates, givens={\n", + " X: dataset['X_train'][batch_slice],\n", + " y: dataset['y_train'][batch_slice],\n", + " })\n", + "\n", + "eval = theano.function([batch_index], [cost, accuracy], givens={\n", + " X: dataset['X_valid'][batch_slice],\n", + " y: dataset['y_valid'][batch_slice],\n", + " })\n", + "\n", + "# checking for constants in means and inv_stds during training\n", + "bl_name = 'enc_batchn_{}_learn'\n", + "means = [abs(dirty_net[bl_name.format(i)].mean.ravel()).mean() for i\n", + " in range(len(num_encoder))]\n", + "means = T.stack(means, axis=1)\n", + "stds = [abs(dirty_net[bl_name.format(i)].inv_std.ravel()).mean() for i\n", + " in range(len(num_encoder))]\n", + "stds = T.stack(stds, axis=1)\n", + "get_stats = theano.function([], [means, stds])\n", + "\n", + "network_dump = {'train_output_layer': train_output_l,\n", + " 'eval_output_layer': eval_output_l,\n", + " 'dirty_net': dirty_net,\n", + " 'clean_net': clean_net,\n", + " 'x': X,\n", + " 'y': y,\n", + " 'output_eval': output_eval\n", + " }\n", + "\n", + "def save_dump(filename,param_values):\n", + " f = file(filename, 'wb')\n", + " cPickle.dump(param_values,f,protocol=cPickle.HIGHEST_PROTOCOL)\n", + " f.close()\n", + "\n", + "\n", + "def train_epoch():\n", + " costs = []\n", + " rec_costs = []\n", + " stats = []\n", + " for b in range(num_batches_train):\n", + " train_out = train(b)\n", + " train_cost = train_out[0]\n", + " rec_cost = train_out[1:]\n", + "\n", + " costs.append(train_cost)\n", + " rec_costs.append(rec_cost)\n", + " stats.append(np.vstack(get_stats()))\n", + "\n", + " return (np.mean(costs), np.mean(rec_costs, axis=0),\n", + " np.stack(stats, axis=0).mean(axis=0))\n", + " \n", + "\n", + "def eval_epoch():\n", + " costs = []\n", + " accs = []\n", + " preds = []\n", + " targets = []\n", + " for b in range(num_batches_valid):\n", + " eval_cost, eval_acc = eval(b)\n", + " costs.append(eval_cost)\n", + " accs.append(eval_acc)\n", + "\n", + " return np.mean(eval_cost), np.mean(eval_acc)\n", + "\n", + "num_batches_train = dataset['num_examples_train'] // BATCH_SIZE\n", + "num_batches_valid = dataset['num_examples_valid'] // BATCH_SIZE\n", + "\n", + "train_costs, valid_costs, valid_accs = [], [], []\n", + "\n", + "print \"Starting training...\"\n", + "now = time.time()\n", + "\n", + "try:\n", + " for n in range(NUM_EPOCHS):\n", + " train_cost, rec_costs, stats = train_epoch()\n", + " eval_cost, acc = eval_epoch()\n", + " \n", + " train_costs.append(train_cost)\n", + " valid_costs.append(eval_cost)\n", + " valid_accs.append(acc)\n", + "\n", + " print \"Epoch %d took %.3f s\" % (n + 1, time.time() - now)\n", + " now = time.time()\n", + " print \"Train cost {}, val cost {}, val acc {}\".format(train_costs[-1], \n", + " valid_costs[-1], \n", + " valid_accs[-1])\n", + " print '\\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c\n", + " in enumerate(rec_costs)])\n", + " means, inv_stds = stats\n", + " for i in range(len(num_encoder)):\n", + " print '{}: mean == 0. {}, inv_std == 1. {}'.format(bl_name.format(i),\n", + " np.allclose(means[i], 0.),\n", + " np.allclose(inv_stds[i], 1.))\n", + " \n", + " if (n+1) % 10 == 0:\n", + " new_lr = sh_lr.get_value() * LR_DECREASE\n", + " print \"New LR:\", new_lr\n", + " sh_lr.set_value(lasagne.utils.floatX(new_lr))\n", + "except KeyboardInterrupt:\n", + " pass\n", + "\n", + "# uncomment if to save the learning curve\n", + "# save_dump('final_epoch_{}_accs_ladder_net_mnist.pkl'.format(n),\n", + "# zip(train_cost, valid_cost))\n", + "\n", + "# uncomment if to save the params only\n", + "# save_dump('final_epoch_{}_ladder_net_mnist'.format(n),\n", + "# lasagne.layers.get_all_param_values(output_layer))\n", + "\n", + "# uncomment if to save the whole network\n", + "# theano.misc.pkl_utils.dump(network_dump,\n", + "# 'final_epoch_{}_ladder_net_mnist.pkl'.format(n))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEACAYAAABfxaZOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd8FFX3+PHPDQSkSK8CCU1qaEERQSGAIkXEAhoIUpTy\nYEEsXwUryE8fREEU0EcQBaQKiAE1FOlioRNKAkjvvUP6+f0xAQKEZJPsZrac9+u1r+zOztw5oZw7\nc+bOHSMiKKWU8g1+dgeglFIq+2jSV0opH6JJXymlfIgmfaWU8iGa9JVSyodo0ldKKR/iUNI3xrQy\nxkQbY3YYY95K5fuixpgIY8xGY8xmY0z3FN/tNcZsMsZsMMasdmLsSimlMsikN07fGOMH7ABaAIeB\nNUCoiESnWOcD4A4RGWiMKQZsB0qKSIIxZjdQX0TOuOqXUEop5RhHjvQbADtFZJ+IxAPTgfY3rXMU\nuDP5/Z3AKRFJSP5sHNyPUkopF3MkGZcBDqT4fDB5WUrjgJrGmMPAJuCVFN8JsMgYs8YY0ysrwSql\nlMqanE5qZyCwSUSaGWMqYSX52iJyEWgsIkeMMcWTl0eJyB9O2q9SSqkMcCTpHwICUnwum7wspcbA\nRwAisssYsweoBqwVkSPJy08YY+ZglYtuSfrGGJ0ESCmlMkhETEbWd6S8swaobIwJNMbkAkKBuTet\nEwU8BGCMKQlUAXYbY/IaY/InL88HtAS2pBG8R74++OAD22PQ+O2PQ+P3zJcnx58Z6R7pi0iiMeYl\nYCFWJzFeRKKMMX2sr2Us8F/ge2PMJqwLt2+KyGljTAVgTvJRfE5giogszFSkSimlssyhmr6IzAeq\n3rTsmxTvTwLtUtluD1A3izEqpZRyEh1K6QQhISF2h5AlGr+9NH57eXr8GZXuzVnZxRgj7hKLUkp5\nAmMMksELuc4asqmUUplWvnx59u3bZ3cYbiswMJC9e/c6pS090ldK2S75iNXuMNzW7f58MnOk71Y1\n/aQkuyNQSinv5lZJf/NmuyNQSinv5lZJf9kyuyNQSinvpklfKaVcrG/fvnz00Ud2hwG42YXcIkWE\nEyfAz626IqWUq7n7hdwKFSowfvx4mjdvbsv+vfZCbvHiEBlpdxRKKeW4xMREu0PIELdK+iEhWuJR\nSrmXrl27sn//fh599FEKFCjAp59+ip+fH9999x2BgYG0aNECgKeffprSpUtTuHBhQkJC2LZt27U2\nevTowfvvvw/A8uXLKVeuHCNGjKBkyZKUKVOGCRMmZNvvo0lfKaXSMGnSJAICAvj11185f/48Tz/9\nNAArVqwgOjqaBQsWANCmTRt27drF8ePHCQ4OJiws7LZtHj16lAsXLnD48GG+/fZbXnzxRc6dO5ct\nv49bJf2mTWHFCh2vr5S6lTFZf2VFypq6MYbBgweTJ08ecufODUD37t3Jmzcv/v7+vP/++2zatIkL\nFy6k2lauXLl47733yJEjB61btyZ//vxs3749awE6yK2SfunSUKKE1vWVUrcSyfrLmcqWLXvtfVJS\nEgMGDKBy5coUKlSIChUqYIzh5MmTqW5btGhR/FKMWMmbNy8XL150boC34VZJH6BZM1i61O4olFLq\nOpPKaULKZVOnTmXevHksWbKEs2fPsnfv3iw96MSV3C7pa11fKeVuSpUqxe7duwFSTeYXLlwgd+7c\nFC5cmEuXLjFw4MBUOwp34HZJ/2pd38NGQSmlvNiAAQMYMmQIRYoUYfbs2bck9K5duxIQEECZMmUI\nCgqiUaNGGWo/OzsIt7o562os1avD1KlQr57NQSmlsoW735xlN6+9OesqLfEopZRruGXS14u5Sinl\nGm5Z3jl2DKpVg5MnIUcOmwNTSrmclnfS5vXlnZIlrTH7mzbZHYlSSnkXt0z6oHV9pZRyBbdN+s2a\nadJXSilnc8uaPsDx41C1qtb1lfIFWtNPm9fX9MGag+euu2DjRrsjUUop7+G2SR+0rq+U8lxX582/\nKigoiBUrVji0ritp0ldKKRdJOb3Cli1baNKkiUPrupJbJ/2mTWHlSkhIsDsSpZTyDm6d9EuUgLJl\nta6vlLLPsGHD6Nix4w3L+vfvT//+/ZkwYQI1atSgQIECVK5cmbFjx962nQoVKrBkyRIAYmJi6N69\nO0WKFCEoKIg1a9a49HdIKWe27SmTrpZ47rnH7kiUUr4oNDSUDz/8kEuXLpEvXz6SkpL48ccf+fnn\nnzl16hS//vorFSpUYOXKlbRq1YoGDRpQt27dNNscNGgQe/bsYc+ePVy8eJFWrVpl02/jIUl/wgR4\n4w27I1FK2ckMznrNWz7I+LDQgIAAgoODmTNnDl26dGHx4sXky5ePBg0a3LDegw8+SMuWLVm5cmW6\nSX/mzJn873//o2DBghQsWJB+/foxZMiQDMeWGW6f9Js2hZ49rbp+TrePVinlKplJ2M7SqVMnpk2b\nRpcuXZg2bRqdO3cGICIigg8//JAdO3aQlJTElStXqF27drrtHT58+IbHLQYGBros9pu5dU0foHhx\nKFcONmywOxKllK/q2LEjy5Yt49ChQ8yZM4ewsDDi4uLo0KEDb775JidOnODMmTO0bt3aoZvMSpcu\nzYEDB6593rdvnyvDv4HbJ33QoZtKKXsVK1aMpk2b0qNHDypWrEiVKlWIi4sjLi6OYsWK4efnR0RE\nBAsXLnSovaeffpr//ve/nD17loMHDzJ69GgX/wbXadJXSikHdO7cmcWLFxMWFgZA/vz5+fLLL+nY\nsSNFihRh+vTptG/f/rbbpxyH/8EHHxAQEECFChVo1aoVXbt2dXn81+Jw5FTEGNMKGInVSYwXkU9u\n+r4oMBkoDeQAhovIBEe2TdGG3C6WEyegcmU4dUrr+kp5I517J23ZOveOMcYPGA08AtQEOhljqt20\n2kvARhGpCzQDhhtjcjq4bbqKF4eAAK3rK6VUVjlS3mkA7BSRfSISD0wHbj6HOQrcmfz+TuCUiCQ4\nuK1D9BGKSimVdY4k/TLAgRSfDyYvS2kcUNMYcxjYBLySgW0donV9pZTKOmdVyAcCm0SkmTGmErDI\nGJP+YNWbDBo06Nr7kJAQQkJCrn1u0gR69NDx+kop37Vs2TKWZfHoN90LucaYhsAgEWmV/HkAICkv\nyBpjfgM+EpFVyZ8XA29hdSppbpuijdteyL2qdm349lu46UY4pZSH0wu5acvuh6isASobYwKNMbmA\nUGDuTetEAQ8lB1ESqALsdnBbh2mJRymlsibdpC8iiVijcxYCW4HpIhJljOljjOmdvNp/gXuMMZuA\nRcCbInL6dttmNli9mKuUdwoMDMQYo6/bvJw5TYPbPiM3NadOQYUK1k9//2wKTCml3JRXPSM3NUWL\nWkl//Xq7I1FKKc/kUUkftK6vlFJZoUlfKaV8iEfV9AFOn4by5bWur5RSXl/TByhSBCpWhHXr7I5E\nKaU8j8clfdASj1JKZZYmfaWU8iEeV9MHresrpRT4SE0frLp+pUqwdq3dkSillGfxyKQPWuJRSqnM\n0KSvlFI+xCNr+gBnzkBgoNb1lVK+y2dq+gCFC1sPS9e6vlJKOc5jkz5YJR6dalkppRzn8Ulf6/pK\nKeU4j63pg1XXDwiw6vq5crkoMKWUclM+VdMHq65/991a11dKKUd5dNIH6xGKWuJRSinHeHzS14u5\nSinlOI+u6QOcPQvlymldXynle3yupg9QqBBUqQJr1tgdiVJKuT+PT/qgQzeVUspRXpH09WKuUko5\nxuNr+nC9rn/yJOTO7eTAlFLKTflkTR+sun7VqlrXV0qp9HhF0get6yullCM06SullA/xipo+wLlz\nULas1vWVUr7DZ2v6AAULQrVqsHq13ZEopZT78pqkD1riUUqp9GjSV0opH+I1NX3Qur5Syrf4dE0f\nrLp+9erwzz92R6KUUu7Jq5I+aIlHKaXSoklfKaV8iEM1fWNMK2AkVicxXkQ+uen7N4AwQAB/oDpQ\nTETOGmP2AueAJCBeRBrcZh9ZrukDnD8PZcrAiRNwxx1Zbk4ppdxWZmr66SZ9Y4wfsANoARwG1gCh\nIhJ9m/UfBfqLyEPJn3cD9UXkTDr7cUrSB7jvPvj0U2jSxCnNKaWUW3LVhdwGwE4R2Sci8cB0oH0a\n63cCpqWMy8H9OI0+QlEppVLnSDIuAxxI8flg8rJbGGPyAK2A2SkWC7DIGLPGGNMrs4FmhNb1lVIq\ndTmd3F474A8ROZtiWWMROWKMKY6V/KNE5A8n7/cGjRtb0yzHxGhdXymlUnIk6R8CAlJ8Lpu8LDWh\n3FjaQUSOJP88YYyZg1UuSjXpDxo06Nr7kJAQQkJCHAjvVgUKQM2a1nj9pk0z1YRSSrmdZcuWsSyL\nZQxHLuTmALZjXcg9AqwGOolI1E3rFQR2A2VF5ErysryAn4hcNMbkAxYCg0VkYSr7cdqFXIABAyBP\nHvjgA6c1qZRSbsUlF3JFJBF4CSthbwWmi0iUMaaPMaZ3ilUfBxZcTfjJSgJ/GGM2AH8D81JL+K6g\nF3OVUupWXjX3TkoXLkDp0tY8PP65Esnhl8NpbSvHfbP2G/ac3cPQh4baHYpSXsfn595JKcn/HCVa\nfUejb1qQ56M8tJvWjrnb55KQlGB3aD5j1D+jGLpqKN9v/J4tx7fYHY5SCi9L+nGJcYRHh9NxZkcC\nRgaQs8Y8Kp95gSOvH+HJak8y9I+hBI4M5N0l77LnzB67w/VqI/8eyed/f87SbksZ0HgA7yx5x+6Q\nlFJ4QXknSZJYtX8VkyMnMztqNjVL1CSsVhgdanRgzYoi/Pe/N47Z33p8K+PWj2Ny5GTqla5Hr+Be\ntK/antw5dS5mZxnx1wjGrBnD0m5LCSgYQExCDFVHV2XaU9NoVK6R3eEp5TVcMg1Ddslo0t96fCuT\nIyczdctU7sx1J11qd6FTUCcCCwVeW+fiRShVypqHJ0+eG7ePSYhhTtQcxq0fx5bjW+hapys9g3tS\nrVg1Z/1KPunTVZ/yzbpvWNptKeUKlru2/PsN3/P9xu9Z3n05xmTo36hS6ja8PukfOn+IaVumMTly\nMicvn6Rzrc6E1Qqjdsnat00k998PH38MzZrdvt2dp3YyfsN4JmycQJWiVegV3IsONTqQxz/P7TdS\ntxj6x1C+2/AdS7otoWyBsjd8l5CUQO2vazO85XBa393apgiV8i5emfTPxZxjdtRsJkdOZuPRjTxZ\n/Um61O5Ck8Am+Jn0L0kMHAi5csHgwenHEJ8Yzy87fmHc+nH8c+gfOgV1oldwL+qUqpOZX8mnfLzy\nYyZumsiSrksoUyDVWTqYEzWHwcsHs77Peof+7pRSafOapB+bEEvEvxFM2TyFhbsW0qJCC8JqhdG2\nSlvuyJmxeRUWLLCO9Jcvz1g8+8/t57sN3/Hdhu8olb8UvYJ7ERoUyp2578xYQz5gyPIhTNk8haXd\nllL6ztK3XU9EaDi+If3v60+nWp2yMUKlvJPHJ/3le5czJXIKs6JmUatErWsXZAvnKZzpdi9ehIAA\nmDsXHngg49snJiWyYNcCvl3/LUv3LqVD9Q70qt+Le++6V2vTwOBlg5mxdQZLui2hVP5S6a6/dM9S\nes3rxbYXt5ErR65siFAp7+XxST/oqyC61OpCp1qdCCgYkP5GDlq4EJ59FubPh3r1Mt/O0YtHmbBx\nAt+u/5a8/nnpFdyLLrW7ZKpTEhHiEuOITYwlNiE2zZ9F8xSl/l31Mx+4C4gIg5YNYlbULJZ0XULJ\n/CUd3vaRyY/weNXH6XtvXxdGqJT38/ik78pYfvoJXnzRmpqhWhYH6CRJEsv2LmPc+nFE7Iygafmm\n5MqRK83kHZMQc8OyuMQ4/P38yZ0zN7lz5E7z585TO6l/V32GtxxO+ULlnfLnkRUiwntL3yN8eziL\nuy6mRL4SGdp+3eF1tJvWjp0v7yRfrnwuilIp76dJPx0TJ8J778GKFVC+vHPaPHn5JIt3L8bP+DmU\nwK/+zJUjl8MXM2MSYvjsz88Y+fdIXm7wMm82ftO2kUUiwtuL3+a3f3/j92d/p3i+4plq55lZz1C3\nZF0GPjjQyREq5Ts06Ttg9GgYORJWrrTm5vEk+8/t5/WFr7P28FpGtBzB49Uez9brCiLCgN8HsGDX\nAn7v+jvF8hbLdFs7Tu2g8XeN2f7SdorkKeLEKJXyHZr0HfTRRzB9ujWip4gH5pvFuxfTb34/ytxZ\nhi9bf5ktN5SJCP+36P9YsmcJi55dRNG8RbPcZp95fSh0RyE+efgTJ0SolO/RpO8gEXjrLSvp//47\n3OmBozDjE+MZs2YMH638iG51uvF+0/cpkLuAS/YlIry+8HVW7FvBwmcXOu3I/ND5Q9T+X20i/xN5\n27H9Sqnb01k2HWQMfPIJ1K0Ljz0GV66kv4278c/hT/+G/dnSdwunr5ym2uhqTNo0iSRJcup+RIRX\nF7zKyv0rWfTsIqeWYsoUKEPPej35cPmHTmtTKZU2nzzSvyoxEbp0gUuXYPZs8PfP1t071d8H/+bl\niJfx9/NndJvRBJcOznKbIkK/iH6sPryaBV0WUOiOQk6I9Eanr5ym6uiqrHpuFVWKVnF6+0p5Mz3S\nz6AcOWDSJKvc062b1Ql4qoZlG/JPz394rt5ztJnShj7z+nDy8slMt5ckSbz424usPbKWhV0WuiTh\nAxTJU4TXGr7Ge0vfc0n7Sqkb+XTSB+vo/scf4ehReOEFqwPwVH7Gj57BPYl6MYrcOXNTY0wNxqwe\nk+EHxyRJEi/8+gIbj25kQZcFFLyjoIsitvS7rx8r961k3eF1Lt2PUsrHyzspXbgADz0ETZta9X5v\nmGFh87HNvBzxMmdjzjKq9SgeDHww3W2SJIk+8/oQdTKKiLCIbJtr6Os1X/Pz9p9Z0GVBtuxPKW+g\no3ey6PRpK+mHhsI7XvKgJxHhx60/8saiN2gS2IRhDw277UiZJEmi19xe7Dy9k187/5qtk8vFJ8ZT\nfUx1xrYbS/MKzbNtv0p5Mq3pZ1GRItY8PRMmwKhRdkfjHMYYngl6hugXoylfsDx1/leHT/74hNiE\n2BvWS0xK5Pm5z7PrzC5+C/st22cT9c/hz5BmQxi4eCB2d/5KeTNN+jcpXRoWLYJPP7WmbfAW+XLl\n46MWH/F3z79ZuX8ltb6uRcTOCMBK+D3Ce7Dv7D5+7fwr+XPltyXGZ4KeITYhlp+jf7Zl/0r5Ai3v\n3EZ0tPW0rTFj4Mkn7Y7G+X7d8Sv9F/SnerHq5PXPy8nLJ5nbaS55/fPaGlfEzgheX/g6kX0jyemX\n09ZYlHJ3Wt5xomrV4LffoG9fq+TjbdpWacuWvlu4v+z9+Bk/5nWaZ3vCB2hVuRXF8xXnh00/2B2K\nUl5Jj/TTsWoVPPEEzJkDjRvbHY1v+PPAn4TOCmXHyzsy/KQ0pXyJHum7QOPGMHmyVeLZsMHuaHxD\no3KNqFe6Hl+v+druUJTyOnqk7yBnPoRFpW/L8S20mNSCnS/vdNlEckp5Oj3Sd6Enn4ShQ6FlS9i3\nz+5ovF9QiSBaVW7F8D+H2x2KUl5Fj/QzaNQo+PJL6yEspdJ/DrjKgr1n91J/bH2iXozK8CMZlfIF\nekduNvl//w9mzPDch7B4klciXgHgi9Zf2ByJUu5Hk342EYE337SeteupD2HxFMcvHaf6mOqs673O\nLR4Kr5Q70aSfjUTgP/+BnTut8fx36MhCl/lg6QfsPbeXiY970S3SSjmBJv1sdvUhLGfPWvP1lCxp\nd0Te6Xzsee4edTeLuy4mqESQ3eEo5TZ09E42u/oQltq1ISgIvvgCEjI2db1yQIHcBRjQeADvLPGS\nqU+VspEm/Szy97fm31+xAubNg+Bg671yrr739mXj0Y38eeBPu0NRyqNpeceJRGDWLHj9dWjSBIYN\ng7vusjsq7/H9hu/5fuP3LO++HOMNT7lRKotcVt4xxrQyxkQbY3YYY95K5fs3jDEbjDHrjTGbjTEJ\nxphCjmzrTYyBjh0hKgrKlbPKPsOHQ3y83ZF5h2frPMvJyyeZ/+98u0NRymOle6RvjPEDdgAtgMPA\nGiBURKJvs/6jQH8ReSgj23rDkf7Ntm+Hfv3g4EEYPdqaqlllzZyoOQxePpj1fdbjZ7Q6qXybq470\nGwA7RWSfiMQD04H2aazfCZiWyW29StWqMH++dTNXjx7WYxgPHbI7Ks/2eLXHyZ0zNzO2zLA7FKU8\nkiNJvwxwIMXng8nLbmGMyQO0AmZndFtvZYw1NfO2bXD33VCnjlXrj4uzOzLPZIxhaIuhvLf0PeIS\n9Q9RqYxy9vlxO+APETnr5HY9Xt68MGQI/P23NX1DnTrW3bwq45pVaEalIpUYv3683aEo5XEceR7d\nISAgxeeyyctSE8r10k5Gt2XQoEHX3oeEhBASEuJAeJ6lcmX45RdreGfv3lC/PowYYV34VY77uPnH\ntJvWjq51upIvVz67w1EqWyxbtoxly5ZlqQ1HLuTmALZjXYw9AqwGOolI1E3rFQR2A2VF5EpGtk1e\n1+su5KbnyhVrjP/o0dYwz9deg9y57Y7Kczwz6xnqlqzLwAcH2h2KUrZwyYVcEUkEXgIWAluB6SIS\nZYzpY4zpnWLVx4EFVxN+WttmJEBvlicPDBoEq1fDX39BrVqwYIHdUXmOIc2GMOLvEZy+ctruUNyS\niNDlpy78HP2z3aEoN6I3Z7mRX3+FV16xxvd//jkEBtodkfv7zy//YeuJrfRr0I92VdvpM3VTmLd9\nHn1/7YsgbHthGwXvKGh3SMrJdMI1LxATA599BiNHQv/+8MYbOoNnWmISYpi5dSaTIiex/sh6OlTv\nQNc6XWlUrpFP37UbmxBL0NdBjG49mp+ifsI/hz+j24y2OyzlZJr0vcjevVaNPzLS6gTatbMmeFO3\nd/D8QaZETmHiponEJsbStXZXnq3zLBULV7Q7tGz32Z+fsXzfcuZ1mseZK2eo8VUNwkPDaVCmgd2h\nKSfSpO+FFiyAd9+F48fh+efhueegbFm7o3JvIsL6I+uZtGkS07ZMo2qxqnSt3ZWONTtS6I5Cdofn\ncscuHqPmVzX58/k/qVK0CgCTIycz/K/hrOm1hpx+jgzaU55Ak74X27ABxo2D6dOhcWPo1QvatIGc\n+v83TfGJ8cz/dz6TIiexaNciHqn8CF1rd6VlpZb45/C3OzyX6DW3FwXvKMhnLT+7tkxEePiHh2l7\nd1tevf9VG6NTzqRJ3wdcugQzZ8LYsbB/vzW9w/PPQ/nydkfm/s5cOcOPW39kUuQk/j39L52DOtO1\nTlfqlqrrNfX/9UfW02ZKG7a/tP2WC7c7Tu2g0fhGbOizgXIF9caQLce30Hl2Z/6v0f/RpXYXj/w3\noEnfx2zZYh39T5kC99xjHf0/9pg1x79K285TO5kcOZlJkZPInys/XWt3Jax2GHfd6blzYYsITSY0\n4dnaz9K7fu9U1xm8bDAbj21kzjNzsjk69xJ1IooWk1rQ775+/BD5A3VK1uHrtl973AinzCR9RMQt\nXlYoKjMuXxaZPFmkSRORUqVE3npLZOdOu6PyDIlJibJi7wrpGd5TCg8tLC1/aCmTN02Wi7EX7Q4t\nw2ZsmSF1vq4jCYkJt10nJj5GqoyqIuHR4dkYmXvZfnK7lBleRiZtnCQiIpfjLssLv7wg5UeWlz/2\n/WFzdBmTnDczlmszuoGrXpr0nSM6WuSNN0SKFxdp3lxk2jSRmBi7o/IMl+Muy/TN06XNlDZSaGgh\n6f5zd1mye4kkJiXaHVq6LsddloDPA2TZnmXprrtk9xIJ+DxALsReyIbI3Mu/p/6VsiPKyvj142/5\nbm70XCn1WSl5f8n7Ep8Yb0N0GZeZpK/lHS8VGwvh4Vb5Z9MmePZZq/xTrZrdkXmGoxePMm3zNMZv\nGE9w6WAmPj7RrWu+Q5YPIfJ4JDM7znRo/a5zulIiX4kbLvZ6uz1n9hAyMYR3HnzntuWvIxeO0D28\nO+djzzPlySluP9xXyzsqVf/+KzJwoFX6eeABkUmTrJKQSt+luEsS9FWQjF071u5QbuvAuQNS5JMi\nsufMHoe3OXbxmBQfVlw2HtnousDcyN4ze6X8yPIyZvWYdNdNTEqUEX+OkGLDiskPm37IhugyDy3v\nqLTExYn89JNI69YiRYuKvPyySGSk3VG5v6gTUVJsWDG3TZBhs8PkncXvZHi7cevGyX3j7vOI8lVW\nHDh3QCp+UVG++PuLDG234cgGqT66unSe3VnOXjnrouiyJjNJX58350P8/a0Huvz2G6xbB4ULW2P9\n773XerDLrl12R+ieqhWrxhetvqDjzI6cjz1vdzg3+OvAXyzbu4wBDwzI8LbP1XuOnH45GbturAsi\ncw+HLxym2cRmvHDPC/S7r1+Gtq1bqi5re6+lYO6C1P2mLqv2r3JRlNlLa/o+LjERli6F2bPhp5+g\nTBno0AGeesp63KO6rs+8PpyNPcv0p6a7RX0/SZJo+G1DXm7wMs/WeTZTbWw5voXmE5sT2TeSUvlL\nOTlCex29eJSQCSF0r9s9U51iSnO3z6X3vN70qd+H95q+5zZ3Nes4fZUliYnwxx9WBzB7NhQpYnUA\nHTpAjRrWox99WUxCDPePv59ewb144d4X7A6HiRsn8tXar/jr+b+y9JD4Ab8PYP+5/Ux9aqoTo7PX\n8UvHCZkQQudanXm3ybtOafPIhSN0+7kbF+MuMuXJKVQoXMEp7WaFXshVTpOYKLJqlcirr4oEBIhU\nrSryzjsiGzaIJCXZHZ19dp7aKcWGFZO1h9baGsf5mPNy1/C75K8Df2W5rUtxl6T8yPKy4N8FTojM\nficunZBaX9WS95e87/S2E5MSZfifw93mIi86ZFO5ggisXQuzZlkvuH4GcM89vncGMHPrTAYsHsC6\n3utsm8Dt7cVvc+D8AX544gentPfbzt/oF9GPzX03k8c/j1PatMPpK6dpMakFrSu35qPmH7msDLfx\n6EY6ze5EcOlgvmrzlW138mp5R7mciDXu/2oHcOWKVf/v0AEaNgQ/Hxka8PJvL3PowiFmPz072+v7\nu8/s5t5x9xL5n0jKFCjjtHY7zuxItaLVGNJ8iNPazE5nY87y0KSHaFa+GcMeHubyv5fL8Zd5fcHr\nzN81n8lDuh+/AAASNElEQVRPTKZxQGOX7i81mvRVthKBbduudwCnT1sdwFNPwQMPePf8/7EJsTzw\n/QOE1Qqjf8P+2brvp358inql6jmtVn3V4QuHqfO/OqzovoLqxas7tW1XOxdzjpaTW3J/2fv5/JHP\ns7UjDo8Op88vffjPPf/h3SbvZutFXk36ylbR0dcvAh8+bA0PfeopCAnxzimg95zZQ8PxDQkPDadh\n2YbZss+le5bSI7wHUS9GuaQMM+qfUcyOms3SbkvdYoSSIy7EXuCRyY8QXDqYUa1H2RL34QuH6fZz\nNy7HX2byE5Oz7SKvJn3lNnbtspL/rFnWbKBVq0LNmtarRg3rZ4UKnn82EB4dzivzX2Fd73UUzVvU\npftKTEokeGww7z74Lh1rdnTZPhqOb8iL975I97rdXbIPZ7oYd5HWU1pTs3hNvm77ta0dVZIk8flf\nnzN01VBGPjKSsNphLt+nJn3lli5csM4Ctm69/tq2DU6cuN4ZXO0IrnYGnnRt4I2FbxB9Mpq5neZm\naehker5Z+w1Tt0xlWbdlLk1uV+fk3/LCForlLeay/WTV5fjLtJ3alkqFKzG23ViX/tlnxIYjG+j8\nU2fql67PmDZjXHqRV5O+8igXLkBU1PVO4GqHcPLkjWcGV1/ly7tnZxCfGE/TCU15vNrjvNn4TZfs\n42zMWaqOrsr8sPnUK13PJftIqf/8/lyIvcD49uNdvq/MuBJ/hXbT2lGmQBm+b/+92yT8qy7HX+a1\nBa+xcNdCfun8CzWK13DJfjTpK6+QsjNIeWZw8qQ1S+jNZSJ36AwOnDvAvePuZdbTs3gg4AGnt//a\ngte4GHeRse2yZ8qE87HnqflVTaY+OZUHAx/Mln06KiYhhvbT21MsbzEmPT6JHH7uWyOcuHEib/7+\nJjM7zqRJYBOnt69JX3m18+dTPzM4fhzKlYPAQKsDKF/+xvd33ZU91w4idkbQ+5ferO+9nuL5ijut\n3eiT0Tzw3QNse3EbJfKVcFq76Zm9bTbvLX2Pjf/ZSK4cubJtv2mJTYjlyR+f5M5cdzL5ycluMx1C\nWhbtWkTYT2GMbjOap2s+7dS2Nekrn3TlivW84H37YO9e65Xy/cmT1pxCqXUIgYFQtqzzRhe9vfht\n1h1ZR0RYhNNKDm2ntqV5+ea83uh1p7TnKBGh3bR2NCrXiLcffDtb952auMQ4Os7siL+fP9OemuZR\nD7bfdHQTbae25dWGr/La/a857ZqMJn2lUhEbCwcOpN4h7NsHx45B6dI3dgYpO4eAAMc7hYSkBFpM\nasHDFR92yjj6iJ0RvDL/Fba8sMWWo+29Z/dyz9h7+KfnP1QqUinb939VfGI8obNDSUhKYGbHmW5z\n5pER+8/tp82UNrSo0IIRj4xwSllKk75SmRAXBwcPpt4h7NljnSnUrAn16l1/1aoFefOm3t7hC4e5\nZ+w9THlyCs0qNMt0XPGJ8dT6uhaftfyMR6s8mul2smrYqmEs2bOEiLAIW4ZEJiQlEPZTGBfjLvLT\n0z+RO2fubI/BWc7GnOWJGU9QJE8RJj8xOcv3WmjSV8oFLlyAyEjYsOH6KzraOgtI2RHUq2fNTArw\n++7f6fZzN9b1XpfpKYtH/j2SiH8jmB8239bx5/GJ8dQfW593HnyHZ4KeydZ9JyYl0vXnrpy6fIqf\nQ3/mjpx3ZOv+XSE2IZYe4T3Yd24f4aHhWRoWq0lfqWwSF2ddTE7ZEWzaZD2Y5moHEF1qEPtYwYqe\ni8iZwSvJJy6doMZXNVjefbnLhvtlxJ8H/qTjzI5sfWFrtkwyJyKsO7KOoX8M5VzsOeaGzvXoieBu\nliRJvL34beZEzyEiLCLTz+LVpK+UjZKSYPfu653A+g2JLC37CDkON6JR7Ic3nBFUqZL2iKK+v/TF\nP4c/X7b+Mvt+gXT0mdeHnH45GdN2jEvaFxE2H9/MjC0zmLF1BgCdgjox8MGB5PW/TS3Nw41ZPYaP\nVn5EeGg495a5N8Pba9JXys0cu3iMul8H82LZ7zG7W17rEI4ds64L1KsHlSpBqVJQsqT181TOSDrO\ne4jol6IpkqeI3b/CNWeunKHGVzUIDw2nQZkGTmt3+8ntTN8ynRlbZ3Ap/hJP13ia0KBQgksHe8z8\nP1kRHh1Oz3k9mdB+Am2rtM3Qtpr0lXJDy/cuJ3R2KGt7rb02FfK5c7Bxo/Xatw+OHk1+HRN23t+C\npK1PUWr/izd0Binfp1xWsGD2PdNgSuQUPvvrM9b0WpOlMfJ7zuxhxtYZTN8yneOXjtOxRkdCg0K5\nr+x9bnd3bXb4++DfPDHjCQaHDKZ3/d4Ob6dJXyk39fHKj4n4N4Kl3ZammSznRM3hvaXvsfq5jZw+\nmfNaZ3DsGLd9Hxubemdw9X1AgHU2Ubhw1jsHEeHhHx6m7d1tefX+VzO07cHzB/lx64/M2DqD3Wd2\n06F6B54JeoYHAx5067tqs8vOUztpPaU1oUGhDGk2xKGzHE36SrmpJEmi7dS21ClZh6EPDU11nZiE\nGGqMqcHYdmN5qOJDDrd9+bKV/FPrGI4csc4kdu2ypqqoVAkqVrReV99XqmTd0ezovQg7Tu2g0fhG\nbOizgXIFy6W57rGLx5i1bRbTt05n24lttK/antCgUJpXaO4Rd9Nmt+OXjtNuWjuqFq3Kt499m+79\nCJr0lXJjJy+fJPibYL5q+1Wq4+6H/jGUvw7+RXhouNP3LWI95GbXLuti89WfV98fO2bdmZyyI0jZ\nORQocGN7g5cNZuOxjcx5Zs4t+zp1+RQ/Rf3EjK0zWHt4LY9WeZTQoFBaVmrpkTdVZbfL8ZcJnRXK\n5fjLzH56dpqzdGrSV8rNrdq/iid/fJLVPVcTWCjw2vIjF44Q9HUQ//T8h8pFKmd7XLGx188Ibu4U\ndu+GO+64sSMIqBjLR6dq80HjT+ne8DEuxp8jfHs407dMZ9WBVTxS6RGeqfkMbe5u41VDLbNLQlIC\n/SL6serAKn7r/NttH4vpsqRvjGkFjAT8gPEi8kkq64QAnwP+wAkRaZa8fC9wDkgC4kUk1cv+mvSV\nr/jsz8+YtW0WK3qsuHbk2yO8B8XzFmfYw8Nsju5WItakdik7g127YP2ZpWyr0g05XB8qLKHw2WZU\nTXiG+wq1o1K5/AQEWGWjgADrpjUfGIjjVCLCJ6s+4as1X/Fb2G8ElQi6ZR2XJH1jjB+wA2gBHAbW\nAKEiEp1inYLAn0BLETlkjCkmIieTv9sN1BeRM+nsR5O+8gkiQvvp7alcpDIjHhnBmkNreGz6Y2x/\naTsFchdIvwE38uU/X5IvR0HuubM9544VYv9+a56jqz+vvo+Nvd4BpPyZ8n2+fHb/Nu5pSuQUXl3w\nKjM6zLhlWg9XJf2GwAci0jr58wBAUh7tG2P6AqVF5P1Utt8D3CMip9LZjyZ95TNOXzlN/bH1Gd5y\nOJ/9+RnP13ue54Oftzssl7lw4XonkLJTSNk55Mlza6cQEGA9Sa1iRShRwnfPFpbsWULorFBGthpJ\n51qdry13VdJ/CnhERHonf+4CNBCRfinWuVrWqQnkB74UkR+Sv9sNnAUSgbEiMu42+9Gkr3zK6kOr\naT6xOVWLVWV1z9U+PWxRxJrY7uaO4Oqkd7t3Q0zMrSOPrr4PDITcnjsPm0M2H9tM26lteeHeF3ir\n8VsYYzKV9J01ZionEAw0B/IBfxlj/hKRf4HGInLEGFMcWGSMiRKRP1JrZNCgQdfeh4SEEBIS4qTw\nlHI/Dco04IcnfqBSkUo+nfDBOoIvXtx6BQenvs65czdeXN68GcLDrfcHDlj3JaTWIVSsCEWLev5Z\nQq2StRheZTgvDX2J2QVm0/ru1plqx9HyziARaZX8ObXyzlvAHSIyOPnzt0CEiMy+qa0PgAsiMiKV\n/eiRvlIqUxISrMSfchhqyvdJSdc7gpSdQfnyUKyYNSTVWQ/ScbVzMed46senyJcrH3M7zXVJeScH\nsB3rQu4RYDXQSUSiUqxTDRgFtAJyA/8AzwB7AT8RuWiMyQcsBAaLyMJU9qNJXynlEmfOpD4Udc8e\n67vz561rCoUK3foqWDD9ZQULQq5svAUhLjGOnnN78sOTP7h0yOYXXB+yOdQY0wfriH9s8jpvAD2w\navfjRGSUMaYCMAcQrBLQFBFJ9XZETfpKKbskJcHFi3D27I2vc+duXXa75blypd5ppHwVLpz6+4IF\nwT+DT38UEfz8/PTmLKWUym4icOnSrZ3BmTPXl505c+PymzuRlGcaKTuEtDqLevX0jlyllPI4Kc80\nbu4QbtdZnDkDmzdr0ldKKZ+RmSGbvjdxtVJK+TBN+kop5UM06SullA/RpK+UUj5Ek75SSvkQTfpK\nKeVDNOkrpZQP0aSvlFI+RJO+Ukr5EE36SinlQzTpK6WUD9Gkr5RSPkSTvlJK+RBN+kop5UM06Sul\nlA/RpK+UUj5Ek75SSvkQTfpKKeVDNOkrpZQP0aSvlFI+RJO+Ukr5EE36SinlQzTpK6WUD9Gkr5RS\nPkSTvlJK+RBN+kop5UM06SullA/RpK+UUj5Ek75SSvkQTfpKKeVDNOkrpZQP0aSvlFI+RJO+Ukr5\nEIeSvjGmlTEm2hizwxjz1m3WCTHGbDDGbDHGLM3ItkoppbJHuknfGOMHjAYeAWoCnYwx1W5apyAw\nBnhURIKAjo5u6w2WLVtmdwhZovHbS+O3l6fHn1GOHOk3AHaKyD4RiQemA+1vWqczMFtEDgGIyMkM\nbOvxPP0fjcZvL43fXp4ef0Y5kvTLAAdSfD6YvCylKkARY8xSY8waY8yzGdhWKaVUNsnpxHaCgeZA\nPuAvY8xfTmpbKaWUkxgRSXsFYxoCg0SkVfLnAYCIyCcp1nkLuENEBid//haIAA6lt22KNtIORCml\n1C1ExGRkfUeO9NcAlY0xgcARIBTodNM64cAoY0wOIDdwHzAC2O7AtpkKXCmlVMalm/RFJNEY8xKw\nEOsawHgRiTLG9LG+lrEiEm2MWQBEAonAWBHZBpDatq76ZZRSSqUt3fKOUkop72H7HbmefPOWMaas\nMWaJMWarMWazMaaf3TFllDHGzxiz3hgz1+5YMsMYU9AYM9MYE5X893Cf3TE5yhgzMDnmSGPMFGNM\nLrtjSosxZrwx5pgxJjLFssLGmIXGmO3GmAXJ9+y4pdvEPyz5385GY8xsY0wBO2NMS2rxp/judWNM\nkjGmSHrt2Jr0veDmrQTgNRGpCdwPvOhh8QO8AmyzO4gs+AL4TUSqA3UAjygfJl/n6gXUE5HaWKXW\nUHujStf3WP9XUxoA/C4iVYElwMBsj8pxqcW/EKgpInWBnXhe/BhjygIPA/scacTuI32PvnlLRI6K\nyMbk9xexEo7H3IeQ/I+lDfCt3bFkRvJR2YMi8j2AiCSIyHmbw3LUeSAOyGeMyQnkBQ7bG1LaROQP\n4MxNi9sDE5PfTwQez9agMiC1+EXkdxFJSv74N1A22wNz0G3+/AE+B/7P0XbsTvpec/OWMaY8UBf4\nx95IMuTqPxZPvbBTAThpjPk+uUQ11hiTx+6gHCEiZ4DhwH6soc1nReR3e6PKlBIicgysgyCghM3x\nZMVzWEPNPYYx5jHggIhsdnQbu5O+VzDG5AdmAa8kH/G7PWNMW+BY8pmKSX55mqs3BY4RkWDgMla5\nwe0ZYyoCrwKBwF1AfmNMZ3ujcgqPPIAwxrwDxIvIVLtjcVTyAc7bwAcpF6e3nd1J/xAQkOJz2eRl\nHiP51HwW8IOIhNsdTwY0Bh4zxuwGpgHNjDGTbI4pow5iHeWsTf48C6sT8AT3AKtE5LSIJAI/AY1s\njikzjhljSgIYY0oBx22OJ8OMMd2xypye1ulWAsoDm4wxe7Dy5zpjTJpnW3Yn/Ws3fiWPXAgFPG0U\nyXfANhH5wu5AMkJE3haRABGpiPXnvkREutodV0YklxUOGGOqJC9qgedclN4ONDTG3GGMMVixe8JF\n6JvPCucC3ZPfd8O6UdOd3RC/MaYVVonzMRGJtS0qx12LX0S2iEgpEakoIhWwDoLqiUiaHa+tST/5\nCOfqzVtbgemedPOWMaYxEAY0T36WwPrkf0Qq+/QDphhjNmKN3vnY5ngcIiKbgEnAOmAT1n/ksbYG\nlQ5jzFTgT6CKMWa/MaYHMBR42BizHavjGmpnjGm5TfyjgPzAouT/v1/ZGmQabhN/SoID5R29OUsp\npXyI3eUdpZRS2UiTvlJK+RBN+kop5UM06SullA/RpK+UUj5Ek75SSvkQTfpKKeVDNOkrpZQP+f+Q\nm5ZOw+MtFwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.plot(zip(train_costs, valid_costs))\n", + "plt.legend(['train', 'valid'])" ] }, { @@ -116,8 +865,8 @@ "source": [ "### References: \n", "http://arxiv.org/pdf/1411.7783.pdf \n", - "http://arxiv.org/pdf/1507.02672v2.pdf \n", - "http://arxiv.org/pdf/1511.06430.pdf" + "Curious AI combinator - http://arxiv.org/pdf/1507.02672v2.pdf \n", + "MILA UDEM combinator - http://arxiv.org/pdf/1511.06430.pdf" ] } ], From 3430199230b7ac984e5c8287c576bf353e6bf35d Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Wed, 22 Jun 2016 09:50:07 +0200 Subject: [PATCH 04/10] all --- examples/ladder_nets/LadderNets.ipynb | 178 ++++++++--------- examples/ladder_nets/ladder_nets.py | 95 +++++---- examples/ladder_nets/train_ladder_nets.py | 229 +++++++++------------- examples/ladder_nets/utils.py | 146 +++----------- 4 files changed, 270 insertions(+), 378 deletions(-) diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb index 0400c2a..c6d9eb9 100644 --- a/examples/ladder_nets/LadderNets.ipynb +++ b/examples/ladder_nets/LadderNets.ipynb @@ -84,24 +84,17 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using gpu device 2: Tesla K80 (CNMeM is disabled, cuDNN 4007)\n" - ] - } - ], + "outputs": [], "source": [ + "# %load ladder_nets.py\n", "from lasagne.layers import InputLayer, MergeLayer, DenseLayer, DropoutLayer, \\\n", " GaussianNoiseLayer, NonlinearityLayer\n", "from lasagne.layers.normalization import BatchNormLayer\n", - "from lasagne.nonlinearities import softmax, linear, tanh, rectify\n", + "from lasagne.nonlinearities import *\n", "import lasagne\n", "\n", "import theano\n", @@ -242,8 +235,8 @@ "class CombinatorLayer(MergeLayer):\n", " \"\"\"\n", " A layer that combines the terms from dirty and clean encoders,\n", - " in a following way:\n", - " $$ \\hat{z} = g(\\tilde{z}, u) = $$\n", + " and outputs denoised variable:\n", + " $$ \\hat{z} = g(\\tilde{z}, u)$$\n", " \"\"\"\n", " def __init__(self, incoming_z, incoming_u, combinator_type, **kwargs):\n", " super(CombinatorLayer, self).__init__(\n", @@ -460,12 +453,13 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ + "# %load utils.py\n", "import gzip\n", "import cPickle as pickle\n", "import sys\n", @@ -527,7 +521,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -539,116 +533,118 @@ "Loading data...\n", "Building model and compiling functions...\n", "Starting training...\n", - "Epoch 1 took 26.803 s\n", - "Train cost 0.843797385693, val cost 0.735326230526, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424227267504\n", - "Layer #1 rec cost: 0.0209430493414\n", - "Layer #2 rec cost: 0.018017789349\n", + "Epoch 1 took 26.439 s\n", + "Train cost 0.845667004585, val cost 0.739950656891, val acc 0.949999988079\n", + "Layer #0 rec cost: 0.0424115508795\n", + "Layer #1 rec cost: 0.0210689622909\n", + "Layer #2 rec cost: 0.017903175205\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 2 took 26.748 s\n", - "Train cost 0.737002074718, val cost 0.744888663292, val acc 0.949999988079\n", - "Layer #0 rec cost: 0.0424131974578\n", - "Layer #1 rec cost: 0.0197453834116\n", - "Layer #2 rec cost: 0.0170130133629\n", + "Epoch 2 took 26.815 s\n", + "Train cost 0.737154126167, val cost 0.728129386902, val acc 0.949999988079\n", + "Layer #0 rec cost: 0.0424207411706\n", + "Layer #1 rec cost: 0.0197576656938\n", + "Layer #2 rec cost: 0.0171148162335\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 3 took 26.716 s\n", - "Train cost 0.708538234234, val cost 0.734336614609, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424265153706\n", - "Layer #1 rec cost: 0.019520400092\n", - "Layer #2 rec cost: 0.0167733673006\n", + "Epoch 3 took 26.899 s\n", + "Train cost 0.708354771137, val cost 0.662552952766, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424175597727\n", + "Layer #1 rec cost: 0.0194249469787\n", + "Layer #2 rec cost: 0.0167284980416\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 4 took 26.713 s\n", - "Train cost 0.692537605762, val cost 0.735073328018, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.042408503592\n", - "Layer #1 rec cost: 0.01941104047\n", - "Layer #2 rec cost: 0.0166323203593\n", + "Epoch 4 took 26.708 s\n", + "Train cost 0.693259179592, val cost 0.734445691109, val acc 0.949999988079\n", + "Layer #0 rec cost: 0.0424144156277\n", + "Layer #1 rec cost: 0.0193312242627\n", + "Layer #2 rec cost: 0.0166478361934\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 5 took 26.132 s\n", - "Train cost 0.681930422783, val cost 0.719261467457, val acc 0.990000009537\n", - "Layer #0 rec cost: 0.0424162633717\n", - "Layer #1 rec cost: 0.0193498600274\n", - "Layer #2 rec cost: 0.0165363010019\n", + "Epoch 5 took 26.818 s\n", + "Train cost 0.682324171066, val cost 0.704281806946, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424068495631\n", + "Layer #1 rec cost: 0.0193558614701\n", + "Layer #2 rec cost: 0.0165634099394\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 6 took 26.017 s\n", - "Train cost 0.673169791698, val cost 0.745770931244, val acc 0.949999988079\n", - "Layer #0 rec cost: 0.0424140915275\n", - "Layer #1 rec cost: 0.0192885622382\n", - "Layer #2 rec cost: 0.0165096893907\n", + "Epoch 6 took 26.665 s\n", + "Train cost 0.673853754997, val cost 0.664340019226, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424051769078\n", + "Layer #1 rec cost: 0.0193317253143\n", + "Layer #2 rec cost: 0.016635145992\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 7 took 26.034 s\n", - "Train cost 0.668693244457, val cost 0.695348381996, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424080193043\n", - "Layer #1 rec cost: 0.0192632935941\n", - "Layer #2 rec cost: 0.0163964517415\n", + "Epoch 7 took 26.801 s\n", + "Train cost 0.668162584305, val cost 0.692064702511, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424223914742\n", + "Layer #1 rec cost: 0.019355526194\n", + "Layer #2 rec cost: 0.0165152177215\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 8 took 26.017 s\n", - "Train cost 0.665378808975, val cost 0.685513734818, val acc 0.949999988079\n", - "Layer #0 rec cost: 0.0424257256091\n", - "Layer #1 rec cost: 0.0192788075656\n", - "Layer #2 rec cost: 0.016430022195\n", + "Epoch 8 took 26.760 s\n", + "Train cost 0.662031590939, val cost 0.68827599287, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424108207226\n", + "Layer #1 rec cost: 0.0192943159491\n", + "Layer #2 rec cost: 0.0163748040795\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 9 took 26.167 s\n", - "Train cost 0.660260021687, val cost 0.650075316429, val acc 0.949999988079\n", - "Layer #0 rec cost: 0.0424268692732\n", - "Layer #1 rec cost: 0.019260045141\n", - "Layer #2 rec cost: 0.0163774229586\n", + "Epoch 9 took 26.680 s\n", + "Train cost 0.659080207348, val cost 0.661842346191, val acc 0.959999978542\n", + "Layer #0 rec cost: 0.0424109250307\n", + "Layer #1 rec cost: 0.0192462466657\n", + "Layer #2 rec cost: 0.0164167992771\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 10 took 26.650 s\n", - "Train cost 0.657784998417, val cost 0.696567416191, val acc 0.97000002861\n", - "Layer #0 rec cost: 0.0424044318497\n", - "Layer #1 rec cost: 0.0192706119269\n", - "Layer #2 rec cost: 0.0163583438843\n", + "Epoch 10 took 26.697 s\n", + "Train cost 0.657592475414, val cost 0.702867507935, val acc 0.97000002861\n", + "Layer #0 rec cost: 0.0424037612975\n", + "Layer #1 rec cost: 0.0192435141653\n", + "Layer #2 rec cost: 0.0163965579122\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", "New LR: 0.10000000149\n", - "Epoch 11 took 26.544 s\n", - "Train cost 0.654187440872, val cost 0.653413057327, val acc 0.980000019073\n", - "Layer #0 rec cost: 0.0424147695303\n", - "Layer #1 rec cost: 0.0192655138671\n", - "Layer #2 rec cost: 0.0163049418479\n", + "Epoch 11 took 26.767 s\n", + "Train cost 0.651364922523, val cost 0.676774442196, val acc 0.980000019073\n", + "Layer #0 rec cost: 0.0424225926399\n", + "Layer #1 rec cost: 0.0192226637155\n", + "Layer #2 rec cost: 0.0163397565484\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 12 took 26.329 s\n", - "Train cost 0.651110231876, val cost 0.662563860416, val acc 0.97000002861\n", - "Layer #0 rec cost: 0.0424222685397\n", - "Layer #1 rec cost: 0.0192585140467\n", - "Layer #2 rec cost: 0.0163124445826\n", + "Epoch 12 took 26.645 s\n", + "Train cost 0.652854502201, val cost 0.678124725819, val acc 0.97000002861\n", + "Layer #0 rec cost: 0.042415548116\n", + "Layer #1 rec cost: 0.0192005801946\n", + "Layer #2 rec cost: 0.0163279604167\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 13 took 26.731 s\n", - "Train cost 0.647521734238, val cost 0.689861774445, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424127243459\n", - "Layer #1 rec cost: 0.0192261245102\n", - "Layer #2 rec cost: 0.0163582395762\n", + "Epoch 13 took 26.839 s\n", + "Train cost 0.647919416428, val cost 0.678905069828, val acc 0.980000019073\n", + "Layer #0 rec cost: 0.0424238294363\n", + "Layer #1 rec cost: 0.0191765259951\n", + "Layer #2 rec cost: 0.0162966195494\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 14 took 26.702 s\n", - "Train cost 0.645864963531, val cost 0.670720160007, val acc 0.980000019073\n", - "Layer #0 rec cost: 0.0424097888172\n", - "Layer #1 rec cost: 0.0191815402359\n", - "Layer #2 rec cost: 0.016245925799\n", + "Epoch 14 took 26.501 s\n", + "Train cost 0.646459579468, val cost 0.650809168816, val acc 0.949999988079\n", + "Layer #0 rec cost: 0.0424147360027\n", + "Layer #1 rec cost: 0.0191764775664\n", + "Layer #2 rec cost: 0.0163504853845\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 15 took 26.710 s\n", - "Train cost 0.644159853458, val cost 0.64711368084, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424187406898\n", - "Layer #1 rec cost: 0.019159225747\n", - "Layer #2 rec cost: 0.0162756647915\n", + "Epoch 15 took 26.767 s\n", + "Train cost 0.643889009953, val cost 0.675578832626, val acc 0.97000002861\n", + "Layer #0 rec cost: 0.0424266718328\n", + "Layer #1 rec cost: 0.0191710442305\n", + "Layer #2 rec cost: 0.0163805447519\n", "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n" ] } ], "source": [ + "# %load train_ladder_nets.py\n", + "\n", "import time\n", "import theano.misc.pkl_utils\n", "\n", diff --git a/examples/ladder_nets/ladder_nets.py b/examples/ladder_nets/ladder_nets.py index 31961da..a2be597 100644 --- a/examples/ladder_nets/ladder_nets.py +++ b/examples/ladder_nets/ladder_nets.py @@ -1,15 +1,17 @@ from lasagne.layers import InputLayer, MergeLayer, DenseLayer, DropoutLayer, \ GaussianNoiseLayer, NonlinearityLayer from lasagne.layers.normalization import BatchNormLayer -from lasagne.nonlinearities import softmax, linear, tanh, rectify +from lasagne.nonlinearities import * import lasagne + import theano -from theano import tensor as T +import theano.tensor as T + import numpy as np from collections import OrderedDict -def _milaUDEM_params(shape, name): +def _create_milaUDEM_params(shape, name): values = np.zeros((6,) + shape, dtype=theano.config.floatX) b_lin = theano.shared(values[0], @@ -39,7 +41,7 @@ def _milaUDEM_params(shape, name): w_zu_sigm, w_sigm, b_lin, b_sigm] -def _curiousAI_params(shape, name): +def _create_curiousAI_params(shape, name): values = np.zeros((8,) + shape, dtype=theano.config.floatX) b_mu_sig = theano.shared(values[0], @@ -71,11 +73,11 @@ def _curiousAI_params(shape, name): b_mu_lin, b_v_lin, b_mu_sig, b_v_sig] -def _get_combinator_params(combinator_type, shape, name): +def _create_combinator_params(combinator_type, shape, name): if combinator_type == 'milaUDEM': - return _milaUDEM_params(shape, name) + return _create_milaUDEM_params(shape, name) elif combinator_type == 'curiousAI': - return _curiousAI_params(shape, name) + return _create_curiousAI_params(shape, name) def _combinator_MILAudem(z, u, combinator_params, bc_pttrn): @@ -86,12 +88,15 @@ def _combinator_MILAudem(z, u, combinator_params, bc_pttrn): w_u_lin.dimshuffle(*bc_pttrn) * u + \ w_zu_lin.dimshuffle(*bc_pttrn) * z * u + \ b_lin.dimshuffle(*bc_pttrn) + sigm_pre = w_z_sigm.dimshuffle(*bc_pttrn) * z + \ w_u_sigm.dimshuffle(*bc_pttrn) * u + \ w_zu_sigm.dimshuffle(*bc_pttrn) * z * u + \ b_sigm.dimshuffle(*bc_pttrn) + sigm_out = T.nnet.sigmoid(sigm_pre) - output = lin_out + w_sigm.dimshuffle(*bc_pttrn) * sigm_out + + output = w_sigm.dimshuffle(*bc_pttrn) * sigm_out + lin_out return output @@ -111,6 +116,7 @@ def _combinator_curiousAI(z, u, combinator_params, bc_pttrn): v_sig_pre = w_v_sig.dimshuffle(*bc_pttrn) * u + \ b_v_sig.dimshuffle(*bc_pttrn) + v_lin_out = w_v_lin.dimshuffle(*bc_pttrn) * u + \ b_v_lin.dimshuffle(*bc_pttrn) @@ -126,9 +132,7 @@ def _combinator(z, u, combinator_type, combinator_params): bc_pttrn = ('x', 0) elif u.ndim == 4: bc_pttrn = ('x', 0, 'x', 'x') - elif u.ndim == 5: - bc_pttrn = ('x', 0, 'x', 'x', 'x') - + if combinator_type == 'milaUDEM': return _combinator_MILAudem(z, u, combinator_params, bc_pttrn) elif combinator_type == 'curiousAI': @@ -137,6 +141,9 @@ def _combinator(z, u, combinator_type, combinator_params): class CombinatorLayer(MergeLayer): """ + A layer that combines the terms from dirty and clean encoders, + and outputs denoised variable: + $$ \hat{z} = g(\tilde{z}, u)$$ """ def __init__(self, incoming_z, incoming_u, combinator_type, **kwargs): super(CombinatorLayer, self).__init__( @@ -144,16 +151,15 @@ def __init__(self, incoming_z, incoming_u, combinator_type, **kwargs): self.combinator_type = combinator_type z_shp, u_shp = self.input_shapes - if len(z_shp) != 2: - raise ValueError("The input network must have a 2-dimensional " - "output shape: (batch_size, num_hidden)") - - if len(u_shp) != 2: - raise ValueError("The input network must have a 2-dimensional " - "output shape: (batch_size, num_hidden)") + if len(z_shp) != len(u_shp): + raise ValueError("The inputs must have the same shape: " + "(batch_size, num_hidden) in case of dense layer or \n" + "(batch_size, num_feature_maps, height, width) " + "in case of conv layer.") - self.combinator_params = _get_combinator_params(combinator_type, u_shp[1:], - self.name) + self.combinator_params = _create_combinator_params(combinator_type, + u_shp[1:], + self.name) def get_output_shape_for(self, input_shapes): return input_shapes[0] @@ -208,7 +214,8 @@ def build_encoder(net, num_hidden, activation, name, name='{}_{}_'.format(name, noise_lname)) - # 2. batchnormalize learning, alpha one in order to depenend only on the batch mean + # 2. batchnormalization learning, + # alpha set to one in order to depenend only on the given batch mean and inv_std l_name = '{}_{}'.format(name, lbatchn_lname) net[lbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=1., beta=beta, gamma=gamma, name=l_name, @@ -230,15 +237,20 @@ def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type='milaUDEM'): L = len(num_nodes) - 1 - # dirty_enc_dense_1 ... z_L, dirty_enc_softmax .. u_0 + # dirty_enc_dense_1 ... z_L z_L = dirty_net['enc_noise_{}'.format(L)] + + # batchnormalized softmax output .. u_0 without learning bn beta, gamma dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None, name='dec_batchn_softmax') - + + # denoised latent \hat{z}_L = g(\tilde{z}_L, u_L) comb_name = 'dec_combinator_0' dirty_net[comb_name] = CombinatorLayer(*[z_L, dirty_net['u_0']], combinator_type=combinator_type, name=comb_name) + + # batchnormalize denoised latent using clean encoder's bn mean/inv_std without learning enc_bname = 'enc_batchn_{}_norm'.format(L) mu, inv_std = clean_net[enc_bname].get_params() bname = 'dec_batchn_0' @@ -247,34 +259,46 @@ def build_decoder(dirty_net, clean_net, num_nodes, sigma, mean=mu, inv_std=inv_std) for i in range(L): + # dirty_enc_dense_L-i ... z_l + z_l = dirty_net['enc_noise_{}'.format(i)] + + # affine transformation d_name = 'dec_dense_{}'.format(L-i) dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], num_units=num_nodes[i], nonlinearity=linear, name=d_name) - # dirty_enc_dense_L-l ... z_l, dec_dense_l ... u_l - z_l = dirty_net['enc_noise_{}'.format(i)] + + # batchnormalization ... u_l dirty_net['u_l'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None, name='dec_batchn_dense_{}'.format(L-i)) - + + # denoised latent \hat{z}_L-i comb_name = 'dec_combinator_{}'.format(i+1) dirty_net[comb_name] = CombinatorLayer(*[z_l, dirty_net['u_l']], combinator_type=combinator_type, name=comb_name) + + # batchnormalized latent \hat{z}_L-i^{BN} enc_bname = 'enc_batchn_{}_norm'.format(L-i-1) mu, inv_std = clean_net[enc_bname].get_params() bname = 'dec_batchn_{}'.format(L-i) dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1., beta=None, gamma=None, name=bname, mean=mu, inv_std=inv_std) + + # corrupted input ... z_0 + z_0 = dirty_net['inp_corr'] + # affine transformation d_name = 'dec_dense_{}'.format(L+1) dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], nonlinearity=linear, num_units=num_nodes[i+1], name=d_name) - # input ... z_0, dec_dense_L ... u_L - z_0 = dirty_net['inp_corr'] + + # batchnormalization ... u_L dirty_net['u_L'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None) - + + # denoised input reconstruction comb_name = 'dec_combinator_{}'.format(L+1) dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_L']], name=comb_name, combinator_type=combinator_type) @@ -287,6 +311,7 @@ def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden, combinator_type='MILAudem'): net = OrderedDict() net['input'] = InputLayer((batch_size, inp_size), name='input') + # corrupted input net['inp_corr'] = GaussianNoiseLayer(net['input'], sigma=p_drop_input, name='input_corr') @@ -299,9 +324,9 @@ def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden, eval_output_l, clean_net = build_encoder(clean_net, num_encoder, activation, 'clean', 0., shared_net=dirty_encoder) - # decoders + # dirty decoder dirty_net = build_decoder(dirty_encoder, clean_net, num_decoder, - p_drop_hidden, combinator_type) + p_drop_hidden, combinator_type) return [train_output_l, eval_output_l], dirty_net, clean_net @@ -310,13 +335,15 @@ def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, lambdas): class_cost = T.nnet.categorical_crossentropy(T.clip(output_train, 1e-15, 1), y).mean() L = len(num_decoder) - + + # get clean and corresponding dirty latent layer output z_clean_l = clean_net['input'] z_dirty_l = dirty_net['dec_combinator_{}'.format(L)] - + z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False) z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False) + # squared error rec_costs = [lambdas[L] * T.sqr(z_clean - z_dirty).mean()] for l in range(L): @@ -328,6 +355,4 @@ def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, lambdas): rec_costs.append(lambdas[l] * T.sqr(z_clean - z_dirty).mean()) - # cost = class_cost + T.sum(rec_costs) - # return cost return class_cost, rec_costs \ No newline at end of file diff --git a/examples/ladder_nets/train_ladder_nets.py b/examples/ladder_nets/train_ladder_nets.py index eb42119..2797f54 100644 --- a/examples/ladder_nets/train_ladder_nets.py +++ b/examples/ladder_nets/train_ladder_nets.py @@ -1,85 +1,30 @@ -from fuel.datasets import MNIST -from fuel.streams import DataStream -from fuel.schemes import ShuffledScheme -from fuel.transformers import Flatten, OneHotEncoding -import lasagne - -from ladder_nets import build_cost, build_model - -import numpy as np - -import theano -import theano.tensor as T +import time import theano.misc.pkl_utils -import argparse -import cPickle - - -arg_parser = argparse.ArgumentParser() -arg_parser.add_argument('-lr', '--learning_rate', type=float, default=0.1) -arg_parser.add_argument('-dlr', '--decrease_lr', type=float, default=1.) -arg_parser.add_argument('-bs', '--batch_size', type=int, default=100) -arg_parser.add_argument('-ep', '--max_epochs', type=int, default=15) -arg_parser.add_argument('-ctype', '--combinator', type=str, default='milaUDEM') -arg_parser.add_argument('-l', '--lambdas', type=str, default='0.1,0.1,0.1') -arg_parser.add_argument('-hdrop', '--hid_dropout', type=float, default=0.3) -args = arg_parser.parse_args() - -NUM_EPOCHS = args.max_epochs -BATCH_SIZE = args.batch_size -LEARNING_RATE = args.learning_rate - -mnist = MNIST(which_sets=('train',), # sources='features', - subset=slice(0, 50000), load_in_memory=True) -mnist_val = MNIST(which_sets=('train',), # sources='features', - subset=slice(50000, 60000), load_in_memory=True) -mnist_test = MNIST(which_sets=('test',), # sources='features', - load_in_memory=True) - -data_stream = DataStream(mnist, - iteration_scheme=ShuffledScheme(mnist.num_examples, - batch_size=BATCH_SIZE)) -data_stream_val = DataStream(mnist_val, - iteration_scheme=ShuffledScheme( - mnist_val.num_examples, batch_size=BATCH_SIZE)) -data_stream_test = DataStream(mnist_test, - iteration_scheme=ShuffledScheme( - mnist_test.num_examples, batch_size=BATCH_SIZE)) - -data_stream = Flatten(data_stream, which_sources=('features',)) -data_stream_val = Flatten(data_stream_val, which_sources=('features',)) -data_stream_test = Flatten(data_stream_test, which_sources=('features',)) - -num_classes = 10 - -data_stream = OneHotEncoding(data_stream=data_stream, - which_sources=('targets',), - num_classes=num_classes) - -data_stream_val = OneHotEncoding(data_stream=data_stream_val, - which_sources=('targets',), - num_classes=num_classes) - -data_stream_test = OneHotEncoding(data_stream=data_stream_test, - which_sources=('targets',), - num_classes=num_classes) +LEARNING_RATE = 0.1 +LR_DECREASE = 1. +BATCH_SIZE = 100 +NUM_EPOCHS = 15 +COMBINATOR_TYPE = 'milaUDEM' +LAMBDAS = [0.1, 0.1, 0.1] +DROPOUT = 0.3 + +print "Loading data..." +dataset = load_data() # build network num_encoder = [500, 10] num_decoder = [500, 784] +print "Building model and compiling functions..." [train_output_l, eval_output_l], dirty_net, clean_net = build_model( - num_encoder, num_decoder, args.hid_dropout, args.hid_dropout, - batch_size=None, inp_size=784, combinator_type=args.combinator) - -# print map(lambda x: [x.name, x.output_shape], dirty_net.values()) -# print map(lambda x: [x.name, x.output_shape], clean_net.values()) + num_encoder, num_decoder, DROPOUT, DROPOUT, batch_size=None, + inp_size=784, combinator_type=COMBINATOR_TYPE) # set up input/output variables X = T.fmatrix('X') -y = T.imatrix('y') +y = T.ivector('y') # training output output_train = lasagne.layers.get_output(train_output_l, X, deterministic=False) @@ -90,26 +35,40 @@ # set up (possibly amortizable) lr, cost and updates sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE)) -cost, rec_costs = build_cost(X, y, num_decoder, dirty_net, clean_net, - output_train, [float(x) for x in args.lambdas.split(',')]) +class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder, + dirty_net, clean_net, output_train, LAMBDAS) +cost = class_cost + T.sum(rec_costs) net_params = lasagne.layers.get_all_params(train_output_l, trainable=True) updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr) -# get training and evaluation functions -train = theano.function([X, y], [cost + T.sum(rec_costs)] + rec_costs, - updates=updates) -eval = theano.function([X], [output_eval]) +# get training and evaluation functions, cost = class_cost + T.sum(rec_costs) +batch_index = T.iscalar('batch_index') +batch_slice = slice(batch_index * BATCH_SIZE, (batch_index + 1) * BATCH_SIZE) + +pred = T.argmax(output_eval, axis=1) +accuracy = T.mean(T.eq(pred, y), dtype=theano.config.floatX) +train = theano.function([batch_index], [cost] + rec_costs, + updates=updates, givens={ + X: dataset['X_train'][batch_slice], + y: dataset['y_train'][batch_slice], + }) + +eval = theano.function([batch_index], [cost, accuracy], givens={ + X: dataset['X_valid'][batch_slice], + y: dataset['y_valid'][batch_slice], + }) + +# checking for constants in means and inv_stds during training bl_name = 'enc_batchn_{}_learn' -means = [dirty_net[bl_name.format(i)].mean.ravel().mean() for i +means = [abs(dirty_net[bl_name.format(i)].mean.ravel()).mean() for i in range(len(num_encoder))] means = T.stack(means, axis=1) -stds = [dirty_net[bl_name.format(i)].inv_std.ravel().mean() for i +stds = [abs(dirty_net[bl_name.format(i)].inv_std.ravel()).mean() for i in range(len(num_encoder))] stds = T.stack(stds, axis=1) get_stats = theano.function([], [means, stds]) - # , on_unused_input='ignore') network_dump = {'train_output_layer': train_output_l, 'eval_output_layer': eval_output_l, @@ -120,84 +79,86 @@ 'output_eval': output_eval } - def save_dump(filename,param_values): f = file(filename, 'wb') cPickle.dump(param_values,f,protocol=cPickle.HIGHEST_PROTOCOL) f.close() -def train_epoch(stream): +def train_epoch(): costs = [] rec_costs = [] stats = [] - for batch in stream.get_epoch_iterator(): - train_out = train(*batch) - stats.append(np.vstack(get_stats())) - cur_cost = train_out[0] + for b in range(num_batches_train): + train_out = train(b) + train_cost = train_out[0] rec_cost = train_out[1:] - costs.append(cur_cost) + costs.append(train_cost) rec_costs.append(rec_cost) + stats.append(np.vstack(get_stats())) - print '\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c - in enumerate(np.mean(rec_costs, axis=0))]) - stats = np.stack(stats, axis=0).mean(axis=0) - means, inv_stds = stats - for i in range(len(num_encoder)): - print '{}: mean {}, inv_std {}'.format(bl_name.format(i), - np.allclose(means[i], 0.), - np.allclose(inv_stds[i], 1.)) - return np.mean(costs) - + return (np.mean(costs), np.mean(rec_costs, axis=0), + np.stack(stats, axis=0).mean(axis=0)) + -def eval_epoch(stream, acc_only=True): +def eval_epoch(): + costs = [] + accs = [] preds = [] targets = [] - for batch in stream.get_epoch_iterator(): - preds.extend(eval(batch[0])) - targets.extend(batch[1]) - - preds = np.vstack(preds) - targets = np.vstack(targets) - - acc = np.mean(preds.argmax(1) == targets.argmax(1)) # accuracy - if not acc_only: - nloglik = (np.log(preds) * targets).sum(1).mean() - # confm = conf_mat(preds, targets)[0].astype(int) - # CONF_MATS['iter_{}'.format(n)] = confm - # save_dump('conf_mats_{}.pkl'.format(experiment_name), CONF_MATS) - # print confm - # return acc, nloglik, confm - else: - return acc - -train_costs, train_accs, valid_accs = [], [], [] -print 'Start training...' + for b in range(num_batches_valid): + eval_cost, eval_acc = eval(b) + costs.append(eval_cost) + accs.append(eval_acc) + + return np.mean(eval_cost), np.mean(eval_acc) + +num_batches_train = dataset['num_examples_train'] // BATCH_SIZE +num_batches_valid = dataset['num_examples_valid'] // BATCH_SIZE + +train_costs, valid_costs, valid_accs = [], [], [] + +print "Starting training..." +now = time.time() + try: for n in range(NUM_EPOCHS): - train_costs.append(train_epoch(data_stream)) - train_accs.append(eval_epoch(data_stream)) - valid_accs.append(eval_epoch(data_stream_val)) + train_cost, rec_costs, stats = train_epoch() + eval_cost, acc = eval_epoch() + + train_costs.append(train_cost) + valid_costs.append(eval_cost) + valid_accs.append(acc) + + print "Epoch %d took %.3f s" % (n + 1, time.time() - now) + now = time.time() + print "Train cost {}, val cost {}, val acc {}".format(train_costs[-1], + valid_costs[-1], + valid_accs[-1]) + print '\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c + in enumerate(rec_costs)]) + means, inv_stds = stats + for i in range(len(num_encoder)): + print '{}: mean == 0. {}, inv_std == 1. {}'.format(bl_name.format(i), + np.allclose(means[i], 0.), + np.allclose(inv_stds[i], 1.)) + if (n+1) % 10 == 0: - new_lr = sh_lr.get_value() * args.decrease_lr + new_lr = sh_lr.get_value() * LR_DECREASE print "New LR:", new_lr sh_lr.set_value(lasagne.utils.floatX(new_lr)) - save_dump('accs_{}_ladder_net_mnist.pkl'.format(n), - zip(train_accs, valid_accs)) - # theano.misc.pkl_utils.dump(network_dump, - # 'iter_{}_ladder_nets_mnist.zip'.format(n)) - print "Epoch {}: Train cost {}, train acc {}, val acc {}".format( - n, train_costs[-1], train_accs[-1], valid_accs[-1]) - # print 'TIMES: \ttrain {:10.2f}s, \tval {:10.2f}s'.format(t1-t0, - # t2-t1) - - # TODO: needs an early stopping except KeyboardInterrupt: pass -# save_dump('final_iter_{}_{}'.format(n, experiment_name), +# uncomment if to save the learning curve +# save_dump('final_epoch_{}_accs_ladder_net_mnist.pkl'.format(n), +# zip(train_cost, valid_cost)) + +# uncomment if to save the params only +# save_dump('final_epoch_{}_ladder_net_mnist'.format(n), # lasagne.layers.get_all_param_values(output_layer)) -theano.misc.pkl_utils.dump(network_dump, - 'final_iter_{}_ladder_net_mnist.pkl'.format(n)) \ No newline at end of file +# uncomment if to save the whole network +# theano.misc.pkl_utils.dump(network_dump, +# 'final_epoch_{}_ladder_net_mnist.pkl'.format(n)) \ No newline at end of file diff --git a/examples/ladder_nets/utils.py b/examples/ladder_nets/utils.py index 63121e2..56adb90 100644 --- a/examples/ladder_nets/utils.py +++ b/examples/ladder_nets/utils.py @@ -1,118 +1,28 @@ -import numpy as np -from PIL import Image - -import theano -th_rng = theano.tensor.shared_randomstreams.RandomStreams(9999) - -np.random.seed(9999) - - -def scale_to_unit_interval(ndar, eps=1e-8): - """ Scales all values in the ndarray ndar to be between 0 and 1 """ - ndar = ndar.copy() - ndar -= ndar.min() - ndar *= 1.0 / (ndar.max() + eps) - return ndar - - -def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0), - scale_rows_to_unit_interval=True, - output_pixel_vals=True): - """ - Transform an array with one flattened image per row, into an array in - which images are reshaped and layed out like tiles on a floor. - - This function is useful for visualizing datasets whose rows are images, - and also columns of matrices for transforming those rows - (such as the first layer of a neural net). - - Parameters - ---------- - X: a 2-D ndarray or a tuple of 4 channels - A 2-D array in which every row is a flattened image, the elements of - which can be 2-D ndarrays or None. - img_shape: tuple (height, width) - The original shape of each image. - tile_shape: tuple (nrows, ncols) - The number of images to tile (rows, cols). - tile_spacing: tuple, default (0, 0) - Spacing of the tiles. - scale_rows_to_unit_interval: bool, default True - If True, if the values need to be scaled before being plotted to [0,1]. - output_pixel_vals: bool, default True - If True, output should be pixel values (i.e. int8 values), otherwise - floats. - - Returns - ------- - array suitable for viewing as an image. (See:`Image.fromarray`.) - """ - assert len(img_shape) == 2 - assert len(tile_shape) == 2 - assert len(tile_spacing) == 2 - - out_shape = [(ishp + tsp) * tshp - tsp - for ishp, tshp, tsp in - zip(img_shape, tile_shape, tile_spacing)] - - # if we are dealing with only one channel - height, width = img_shape - height_s, width_s = tile_spacing - - # generate a matrix to store the output - dt = X.dtype - if output_pixel_vals: - dt = 'uint8' - out_array = np.zeros(out_shape, dtype=dt) - - for tile_row in xrange(tile_shape[0]): - for tile_col in xrange(tile_shape[1]): - if tile_row * tile_shape[1] + tile_col < X.shape[0]: - this_x = X[tile_row * tile_shape[1] + tile_col] - if scale_rows_to_unit_interval: - # if we should scale values to be between 0 and 1 - # do this by calling the `scale_to_unit_interval` - # function - this_img = scale_to_unit_interval( - this_x.reshape(img_shape)) - else: - this_img = this_x.reshape(img_shape) - - # add the slice to the corresponding position in the - # output array - c = 1 - if output_pixel_vals: - c = 255 - - tile_h = tile_row * (height + height_s) - tile_w = tile_col * (width + width_s) - - out_array[tile_h:tile_h + height, - tile_w: tile_w + width] = this_img * c - return out_array - - -def binarize(X, err=1e-15): - X_mean = X.min(axis=1, keepdims=True) - X_ptp = X.ptp(axis=1) + err - X_norm = (X - X_mean) / X_ptp - return th_rng.binomial(pvals=X_norm, dtype=X.dtype) - - -def half_linear(x): - return 0.5 * x - - -def z_vals(dist, shape): - if dist == 'Gaussian': - return np.random.randn(*shape).astype(np.float32) - elif dist == 'Laplacian': - return np.random.laplace(loc=0.0, scale=np.sqrt(0.5), - size=shape).astype(np.float32) - - -def visualize (it, images, shape=[30,30], name='samples_', p=0): - image_data = tile_raster_images(images, img_shape=[28,28], tile_shape=shape, - tile_spacing=(2,2)) - im_new = Image.fromarray(np.uint8(image_data)) - im_new.save(name+str(it)+'.png') \ No newline at end of file +import gzip +import cPickle as pickle +import sys + +def pickle_load(f, encoding): + return pickle.load(f) + +def load_data(): + """Get data with labels, split into training, validation and test set.""" + with gzip.open('mnist.pkl.gz', 'rb') as f: + data = pickle_load(f, encoding='latin-1') + X_train, y_train = data[0] + X_valid, y_valid = data[1] + X_test, y_test = data[2] + + return dict( + X_train=theano.shared(lasagne.utils.floatX(X_train)), + y_train=T.cast(theano.shared(y_train), 'int32'), + X_valid=theano.shared(lasagne.utils.floatX(X_valid)), + y_valid=T.cast(theano.shared(y_valid), 'int32'), + X_test=theano.shared(lasagne.utils.floatX(X_test)), + y_test=T.cast(theano.shared(y_test), 'int32'), + num_examples_train=X_train.shape[0], + num_examples_valid=X_valid.shape[0], + num_examples_test=X_test.shape[0], + input_dim=X_train.shape[1], + output_dim=10, + ) \ No newline at end of file From f87238056db65708bdf6023885909c6a94ddeed1 Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Wed, 22 Jun 2016 12:37:04 +0200 Subject: [PATCH 05/10] changed references and added name --- examples/ladder_nets/LadderNets.ipynb | 37 ++++++++++++++++++--------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb index c6d9eb9..8f43335 100644 --- a/examples/ladder_nets/LadderNets.ipynb +++ b/examples/ladder_nets/LadderNets.ipynb @@ -7,6 +7,19 @@ "# Ladder Networks" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## References: \n", + "[1] original (Gaussian) denoising function - called here Curious AI combinator \n", + "Rasmus, Antti et al.: \"Semi-Supervised Learning with Ladder Networks\" - http://arxiv.org/pdf/1507.02672v2.pdf \n", + "[2] \"vanilla\" denoising function - called here MILA UDEM combinator \n", + " Pezeshki, et al.: \"Deconstructing the Ladder Network Architecture\" - http://arxiv.org/pdf/1511.06430.pdf \n", + "[3] for a deeper dive into theory behind: \n", + " Valpola, Harri: \"From Neural PCA to Deep Unsupervised Learning\" - http://arxiv.org/pdf/1411.7783.pdf " + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -16,7 +29,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 2, "metadata": { "collapsed": true }, @@ -27,25 +40,26 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 3, "metadata": { - "collapsed": false + "collapsed": false, + "scrolled": true }, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcUAAAFLCAYAAACne5csAADI3klEQVR4nOydd1hUR9fAf7ssHaRZ\nKKKCnQ4qakyMJrZojIktJqaY3uub90tiiqaYvOm9mB4TuybGbqKxxK70KqIgCAhI78vu3u+PZS8s\nsLAYEJX5PU+eyC1zZ3fPPWfOmTNnFJIkSQgEAoFAIEDZ2R0QCAQCgeBSQRhFgUAgEAjqEEZRIBAI\nBII6hFEUCAQCgaAOYRQFAoFAIKhDGEWBQCAQCOoQRlEgEAgEgjqEURTIVCZ/y7wB1igc+hA26Q6e\neuk5bpt4P3/k6jq7awKBQHBRUIjF+wIAdHlsfvYp/u4zFo/zkRzYd4DIcz2Y/f6vvHuzN6rO7p9A\nIBBcBIRRFAgEAoFpyg/z6pyH+X5fArm1Dgy++R1+/+VBBmmOs2jyDbwTbcXgG5ew4dcF+FwBo2dh\nFAUCgUDQMroCNs8fxPRVNdywPIVNt3tigZqk9ybxsO4L/nzeH+vO7mM7IeYUBZQffpUbQrywt1Rh\n5eLP3G9SqAEqji/i6h42OHgFM++nNDSd3VGBQNA5KN2Y8MLjDKGCbW9+Q0I1ULyfT9a48+SCoVeM\nQQThKQrq0BVsZv6g6ayquYHlKZu43dMC1Em8N+lhdF/8yfP+V5LYCwSCNqPNZsX0QczfZs1tmxNZ\nkr2A29PfYNeS4dh1dt/aEeEpCgBQuk3ghceHQMU23vwmAf1A8BPWuD/JgqHCIAoEXR4LT256+T68\nKWTNyy/z0lILHnwo9IoyiCCMokDGhoCHXuEGe0j67H/syk1j7XsxTHruBnoJKREIBIBD+FO8OMYS\nbfR37Bn2HDP7WHR2l9odET4VNKCcg0/5MebTbELun4tFzkR+++MerkC5FwgEF0jRjgUMnZHA/8Uc\n4NnBVp3dnXZH+ACCBjgQ/tSLjLHUEv3dHoY9N1MYRIGgy6OjMvsE6SVaQIekU+E95xXuHHjlGUQQ\nRlHQCJXvPF65vRfWw5/j2aucOrs7AoGgsyn5i3v9h+Az9kOSi6P45M2TzHx+Ej2uUOtxhX4sQZvQ\nVZJ9Ih39QFBCp/Jmzit3coUOBAUCQVuwHcSU8R44qA/xxt2LyH/0B54NsOnsXnUYYk5RQMmOefSb\nspo+7yax97qVTHvWhqU7XuQKlnuBQNAWdNUUnM2DXr1xs76yfSlhFAWo035i3pgnSBp1E4PUTsz9\n5CPm9xfLMAQCQddDGEUBALrqAvQDQTeu8IGgQCAQmEQYRYFAIBAI6hA+gaAJGRkZaDSi0qlAIGie\n559/nvLy8s7uRocgjKKgCS+++CK//vprZ3dDIBBcgqSlpfHhhx/y2WefdXZXOgQRPhUYceLECfz8\n/PD19SU5ORkLC7F6XyAQ1HPffffxww8/4ObmRnp6Og4ODp3dpXZFeIoCI15//XV0Oh2pqanCWxQI\nBEakpaWxbNkyAAoKCvj88887uUftj/AUBTLJycn4+/uj0+kAGDBggPAWBQKBjMFLNNC9e3fS0tKu\nKG9ReIoCGYOXaCA1NZXly5d3Yo8EAsGlQkMv0cD58+evOG9ReIoCoKmXaGDgwIEkJSUJb1Eg6OI0\n9hINdO/enfT0dOzt7TuhV+2P8BQFQFMv0cDJkyeFtygQdHFOnz7dxEs0cKV5i8JTFJj0Eg0Ib1Eg\n6Nrce++9/PjjjybPX0neovAUBSa9RAMnT55kxYoVF7FHAoHgUuH06dP88ssvLV5z/vx5vvjii4vU\no45FeIpdnNa8RAODBg0iMTFReIsCQRejNS/RQI8ePUhLS7vsvUXhKXZxWvMSDaSkpAhvUSDoYpjj\nJRrIz8+/IrxF4Sl2Ycz1Eg0Ib1Eg6FqY6yUauBK8RVVnd0DQeZjrJRoweIt33nlnB/ZKIBBcCqjV\napydnXn88cflY1qtlq+++kr++/rrr2fw4MFG9yUmJjJixIiL1s/2RniKXZi0tDS0Wq3RsSlTpnDq\n1CkA7rnnHl588UWj846Ojri7u1+0PgoEgksHtVqNtXX9BuQ///wzd911Vyf2qP0RnmIXxsfHp8kx\nKysr+d8uLi4MHDjwYnZJIBAIOhWRaCMwQgQOBAJBV0YYRYFJFApFZ3dBIBBcQnSFQbMwigKBQCC4\nIK7EgbOYUxQY0TAbVavVolarAdMjxI4+3pnPvtyPt2dbbm5u2NjYmHyOQHClIIyiwIiioiL53x9/\n/DEff/xx53VGcMnw4Ycf8swzz3R2NwSCDkeETwVGdIU5A0Hbabx0R9A1aawfrsTwqTCKAoGgVYRR\nFHQVRPhUYIS9vT3nz58H4IYbbmDevHnyOVOjwo4+3pnPvtyPX2hbOTk53HffffIxsV5V0FUQRlFg\nRMPF+wEBAVdctQqBeaSnpxv97ejo2DkdEVzSiPCpQCDoEjQu+i7CpwLoGjkHwigKjGgo9FfiKFBg\nHiqVcRBJGEVBV0EYRYFA0AThKQq6KsIoCgSCJjQ2ihqNppN6IriUuRKjScIoCowQ4VMBiPCpoHnE\nnKJAIOiSiPCpoKsijKJAIGhCY09RhE8FzXElRpOEURQYIcKnAhCeoqDrIoyiQCBogjCKguYQc4qC\nLkdXEPqORFNwiuxK0+d1JRlkl+tMX3CJoFQqjSIFwigKmuNKjCYJoygwyZUo8B2JOvVnnl60l7I6\nJ0udn8DeLZvZHZ1Ddd01SrJZ/n+vsSPv0jeMDb1FMaco6CoIoygQtAfqeP43fy0jFy5gsLWO4iNf\n8c6KZCzcncj8YTajbl9GmhpwGsVTj1qw+L6V5FzidrGhURSeoqCrIIyiwAgRPr0wijc9wQf9nmWm\npxIo5fBnX3DYNoTwYddw1+svMfDvN/gyRh9XtQp4mEfz/8vrR1uIs14CNMxAFUZRAF1DPwijKDCJ\nCJ+aSwn7lx4hfP4w7AHoxtWvfc+SGd5YAbrKAgp1zni5GIxMTybcac+a9amom2lNVxzBj5/v4lw7\n2KGSiO/5ctc5LqQpET4VtMaVqCOEURQI/jXFxByoJqi/fd3fShz6jySklxVo8/nz7a+wfvYL7vGt\n35bLcYAf5XsSKGvcVHUin96xkPxrx+Bu0fhk23EKmYTrsrt45WBJm+8V4VNBV0QYRYERYp3iBaCp\norRKibWq0eukK+TAhy+yMfBr1vxfOE4NT1sqUZdUNvLg1Jz44iFWX/spTwfatE/fLLyZ+84Coh9e\nzOE2Rmsbhk+FpyjoKgijKBD8W1Qu9OutJfN8df0xXSGHv/mUY8Pe5NMHg6jZ9SZL4+vP1+ZnofL1\nwLphO8V/89IH1jx110CsaD+U7jN4MXwNT6/Ioi25PcJTFDRGzCkKBBcbTSa/vfE+B4pNXVBN7NeL\n+SW52tQFnYAb4+5wZtfB/DqjU03M29O47pHXeOZ6DywVCrrf9A9uHgbvr4bUvekE3xqGU4NWCv/+\ngL8GPcykXu39WtoTctsY4t9bR2YbrOIlk2hzWcpE16DTokkdKBPCKAqM6NzwaQ1Ri29lfdi9jHHW\nkLHuGWbe9iiPzh3L0EHX8NiqNGqwIejOSRy74wn2lF/k3qX9zuInn+etTz7j48+X8dnCt9lToANU\nDLxvEZ7LfiVFA2BD8EuHqJQkJMN/VTuY7VbXUGUUP+0M56Wb3Ru0XkHcb4dwviHUyFC2F46BNzHk\n5CoOF5t/z6WRaHNpy4SgM+hgmZAEggb06dNHAiRAevnllzvmIbVlUlG5pu4PjVRr+GfBCmlkj2el\nmFpJkqQCacP0blK3ab9J56Vy6fAzfSRcbpf+KtZfmv5uX6n/Rxkd079mqEn9Rrpp4LXSOzEVkiRJ\n0vnfb5LsbadI688brqiQji+aIM39KV2qbbGlainhwxulmd+dbnRdlvTVAKTrfi/sgN5LklRxSHrA\nzln6b2y12bf4+vrKsvDCCy90TL+MaCALBi5hmeiKlJSUyDIBSKtXr774nehgmRCeouCiUnP6V+4J\n9cbNwRrXAcMZe811PLw+Ew1QcuB7YsbPYJAKwJUZGwoo2HgLbroyMtJKsR0yAm9bfTu9b5xD1tKd\n5F+MTmszWf3kc+wf/iIPBtkBWirzc1H63USYs+EiO4YtXsszlRtb9MY0OfvY5fwGP9zng9E+FOoi\nMoqs6OlsaXxDdRzv3RDIhDejaD1Pppq4924gcMKbRDW+WOWEh10pp/OaWwTSPBczfKorPsq7E1yw\n9HuI9Rn1fbxkZUIAdE74tKNlQtX6JYKuhNTB4VOFfQj/tyef76xziDtynDN2I5ky2hMVUHU6imof\nL2SzoFSh0pVw9IMH+cDyFbb/Op++dRJr4eyOKuWMGYbi36PN2MjHWxVM2j4SZwBKiNx0AvepV+Fh\ntGzCmVGPPNFiWyqPiTxxTzMnJB06SYFK2eg715ZyJimeJM8SM9Yaaik9k0R8kiclTS6WkLQ6dCby\nJB5//HFOnDiBJEnodDokSSIzM1M+v3z5cvbv3y+Hg3U6HY888gj33NPch2kj6iQ+mnk/hyat4aD3\nVv7v5oex+es7prkpL1mZEHQeHS0TwigKLipWvQIYCoA3Idd7E9LgnGU3N4gvRgNYAGhy2PrSHSzJ\nmsgTc2xZ8cBrSBs+51pHQF1KuZVdu2ZpmqL6XBLnVIN4LLCb/kB5Ilsj7bj62f7G2aP/BksnPB1r\niCqtNT5uP4bP0yU+N6sRe8Z8no7U3MWaUnKqHfBysWzmJIwcOZIvvvjCZMvZ2dlkZ2fXP8nenunT\np5vVq1axGsozf0XzHwslMIV/btOhU+qDWJeqTAg6j46WCRE+FRjR0Z5i+YHnGD1kODfe+yT//c89\njB/qy8Cxr3C0Apyvmovrvgj0y8xL2PPU1Ux7928OLn+RO+Y+ytKiIfSx07dTErkPp9vH4druPWyK\njVcwvi4O2FsqAQ2Zf3zAmqIApvo7tN9DlE4MDrHiZGpJq8sm1GlreG7+E/xyqsb89qszSKjyZbh3\n8+sfb7/9dgYPHmx2c4888gjdu3c3//ktUH7gOcb4h9fLhP8ABl/iMmEaDZm/vcH7clqkloqSinov\nvzqWrxf/wuWaKCtdpCUZnaon2m/2U3Al0Lt3b3kS/dVXX2339s+vnSYNf+GoVK4tlg6+GCApcJZm\n/JQm1UiSJGnSpM9Gj5a+SG+cbdGYAmntNH9pYYz5SSP/jnIp5vN7pdv/+6H07XefSa/O9ZQswz6X\nTrfWzTaS89MIyXHqJqmolevK9j0i+WIpBS6KlqrMbLvi0IOSs/cb0okW+rx8+XKjJApT/9na2kq5\nublmPrl1Lk+ZaJ7qyJek0bdvlgokSarN2Ci9/cZH0tKvXpPuHDddeutYqSRJklR+4Alp2P27pbJO\n7emFUVxcbCQLa9eu7ZDndKZMCKMoMKKjjWJp5C/SqsQyKeePeyQvkPo9tE3K09afr07+Urr7wZ+l\nUyZTODVS7qYnpNs/ipc6R/2dl9bf4CQNeNl8g2Q251ZI1/dZIO0xR1uWHpA++jbWzD5UScee8pKG\nvHtSakmNaLVayc/Pr1Wj+Mwzz5j1VHO5/GXCQIG0YmQP6dmYWkmStNKZr66Wwt/Q9ynnpxGS24wt\nkj4pMl16t29/6XJMlL1YRrEzZUIYRYERXl5essAvWrSoQ55Rc3KpNMURSTVssXSsrEw68uH/pL1F\n9eers+Ol9HJTd1dLOYln298gmaIqUfr+mcelL2L1T9Rk/Srd2DNAejW6I3pQIR357zDp5pU5kraV\nK4sPfiJ9fKjUvGaLtkhzB90mbTFjtceqVataNIg2NjZSTk6Oec9tA5eVTJiieKN0vc1caa+hI9pa\nqVYrSZJUJh14zEcKXZKg93QkjZT4nI005Ie8zurpBXOxjKIkdZ5MiDlFwcWl/Dj/m/cY25nCl6tf\nZLginrWrj1HcYKrC2sOfvvamGrDGfagX7VQZtFV0RZGs+nkbkbHH+WfDZzz98K/0fn89C4M7ogd2\nhC/8EN+vnmV1Vgu5ppXxbDjmy8zhjq03qStkxwtLsHn7Qya7tH753LlzCQgIMHn+gQcewN3d3eT5\nC+Iykwk0xaTHHGTf0RQK1ABqinPLoeo0UdU+eBlymZQqVEoN2Rtf5UPLD/jj//zqEj4scHZXkXLm\n8suTlS5WmbdOlAlhFAUmaf9EmxriP7mPRREaHAY7ceTNO5k4/Hrez+yBa/NJkZ2O0mM+27OOsDDA\nAkvf6byxbhtf3Tmo/bJOG+M8lreXzSZ1+W5yTdlFuwDufvJGvM3IHS859jP7Rn3DVzPdMWfTDYVC\nwaJFi5o9Z21tzfPPP29GK23hMpIJXRFHv3yImfOf59u/0ynNO8ayJa/x5QcPcsf3p8GyG24UUCwX\n/9GQs/MTlubM49t3RnJqcwIVdWfUpeVY2V7+ebIds06xk2XiArxLwRWMp6enHBpZvHhxZ3dH0Ano\ndDopODi4Sej00Ucf7eyudR6lx6UPbgmTJr68XcppOI9VvFu638tDumd3mSRpkqWXXAdJS3MlSZK0\nUsFfD0l9sJZce3SX3Lp1kwLfiK8LnxZLv41zku493LkzoBdCUVGRkUysW7eus7vU7ghPUSAQGNGc\nt2hlZcULL7zQST3qZLR5bHz0Jv7Hf/lp8WTcG3roTv5Mumo80/wdwGIA97/mxk/rzqBFieuErzkj\nVVOQl8/5khJiX/bXh08L/+L7/Md4KqTD4g2Cf4EwigKTiP0Uuy633HILYWFh8t8LFizA29u7E3vU\neVRGvMtTKx15/LVb8GwSg3Yk7JFnGOsKYEG/B37kzqNvsfy0iQLq2jw2L/6dCatfJegytImS2DpK\n0NXoCkIvMI/FixcDYGlpycKFCzu3M51GJbHLV5Le/w5mDW7OitnQf/xwehiMpfVgHlm6mGEWJgor\naHT0f/57nva/DC1iM1yJA2dR5k0gEDTL9OnTGT58OMHBwfTt27ezu9NJVJEZk0+34OH0biYvRlt+\njkJVT3rYNPAvrD3wN/V1Wbsz1KtDOipoJ4RR7ATUajVVVdXodG3ZB/3i0LBP1dXVFBUVd15nWsDS\nUoWdnR1K5eUb7NDpdFRWVlFbW9v6xZ3E88+/QP/+/S9ZOVAoFdja2GJt3VGZnCocujtgY2/dTFit\nkoQ1aymb+QQ9GuT+a/MOsXpNDM7T72Vq37b3Sy8XldTWdtYelqYpKS3F1dVN/lutrr0kZUOpVGJr\na4OVVdu/f4Uk4mUXBUmSOJORSURkNCknT12yYcoNv62lqqoKgMCgYAICgzu5R6axt7dneFgIoSFB\n2NnZdnZ3zKaiopLo2Dgio2IoKxO74rYHgwb2J3zEMPp4927nlnUU//0YIx6x5afjHzLGsf540ZFl\nrCyayINTvIy9i5oEFvuOpXRLNh+2IZmmqqqKyOhYjkdEU1FR0foNghZRKBQMGtifsNAQ+vX1NjvU\nK4ziRUCSJPbs28+hw8c6uytXJN26OXLn/Ftx6tats7vSKvn551mxeh0VFZffwu3LgevGjWXUyOHt\n26iuhGOfPMbCg32Zf/+NDLXKIT7qFDWDbmHBjQOwM7pWi7Z4Ezf4r+L5lFVcb0Z9BYDy8nJ+/nUV\nJSWl7dt3AQDhw8O4/rprzTKMwiheBHbv/Uc2iL169iB8RBienp5YWOgDMvnnz5OTkytfb2Vlha9P\n3za5/hUVFaSnZ6LV6Vd8K5VK+vbxxtHR/J0cams1pKWfobq6voR/r5496Nmzh9mjLEmSOHs2m6Li\nYvmYg4M9ffv0kT+vORQWFpGVlYOEXjwtLS3x8emLjbU1SFBVXU1CQhIRUTFotVpcXJy5+87bsLO9\ndD3G/PMF/Lp8NVXV1ahUKsJCgvAbOhg7OztQ6MPqp9POoFbXb7Lr4dGLHm3YjUKn05GZmUVJab1y\n7dbNkT7evdsUaj5/voDsnHPy3xcmk5Wkp2fIMqlQKOjbx5tu3cy0FOhlMj39DFUNZLJnj+706tUT\nhUKBTqsj+9w5Dh8+Rl7+eQBunDqZoEB/s59hLpridGKikil19mdEkDcODTNRdcXsX/II30ljCcp4\nj//EL+H84dtwM9laPWq1mp9/XUV+/nmUSiXDw0Lw9x+KrY0NKPTTGGlpGdRq9GF2BdDbywsXV2ez\n+67T6khLz6Cist4DdXFxpreXZ5ve7dzcPPl7BrC1scGnX19UlubPxJWWlpGRkYmuzvQolUp8+/XF\nzt6ulTvrUdeoSUs/Q02Dd8XTw53u3fXfuFarIzs7m2PHoziXmwdAaEgQN0ye0GrbV4ZR1FZQUm2D\nk705NTsuLsXFJXy59HsA/IYO5qYbb0CpVKLVaolPSCIyOpacBsrHgKWlJQH+QxkWFkLPHqaVYlp6\nBpFRpkOy/fv7MCw0mP6+PiaFv6CwiIiIKOISEqmpaboze88ePQgLDSIwwA9Ly+ZLSlRVVxMdHUdU\ndCzFJSVNztvb2xEcFMCw0GAcHZtXijqdjsTkE0RFxZJ5NqvJeQsLC/yGDmZ4WAgeHvpSY+lnMli1\n5jd0Oh3jrr2aq0aFN9v2pcD63zdxIuUkNjY23H7rLNzdewGQl3+eiMho4hOSmp1f9HDvRVhoMAH+\nQ7GwaF7GKyoriYqOJTomjtLSsibnHR0dCA0OIjQkCHsTyker1ZKQmExkVIyRQTRgaanC328ow4eF\n0LNHD5OfM/1MBhGRMaScTG1eJn19CAsNZkB/0zJZWFhERGQ0sfGJ1NQ0zeTs2aM7oaHBBNXJpEaj\nYdWa38jIPIuDgz2PP/LARZ1vLt99Fz7PjSH62D2UvT6QGc77SHq6r1np/VHRsWzbsROAubNvZkB/\nXwCys3M4HhlNUnIKWm3T8kZ9vHsTGhLE0CGDTH7W0tIyIqJiiI2Np6KyaXTCxdmZ0JAgQoIDsLFp\nvihabW0tsfGJREXFGBlEA9bW1gQF+DEsLARX1+ZrCUqSROqpNCKjYjh1Oq3JeX2ocwDDwoLp17dP\ns20A5OXnczwimoTEpGbnXD09PQgLCcLfbwgWFhbodDq27dhJTGw8APcuuAP3Xj1Ntg8dYhR1VObn\nonbywLnDqxhpyNz0Pj+fcqV76X5WHx/Ke8tfxJySkG3l77//xtXVlZCQkDbd99euPRw7HomDgz2P\nPnQfKpWKqupq1v+2kYzMs/J1Dg722FhbI0kSRcUlcsKLUqnkxqmTCfAfatSuJEns3vsPh48cl49Z\nWVnRrc4zLC+voLqBMgkLDWLShOuavDwpJ1PZsHErGo1ewBQKBa4uzigUCmrUaqM5L08Pd+bMurmJ\nUi0sLGL12t+NvENnJydUKgu0Op3RRLytrQ1zZ92Ml5enURtqdS1/bNrCydTT8jE7O1vsbG2RgJKS\nUrmPAJMmXsfwsBAAdvz1NxGR0Tg7O/HIg/d2aJp4dXU1v/zyCwsWLDA5QGiOouJivlr6AwDTp00h\nMMAPgLiERLZs/dPo93ZxdkKhUFBdU0N5ef3Ivm8fb2bdMr2J8srLz2f12t+NfitnZydUFhZotbpG\nXrsDt865mV49jRVDdU0Nv/2+kfQzmfIxe3t7bG2skdAP7gyKWalUMu2GiQQGGHtjkiSxd98BDh4+\nKh9rSSZDggOZMun6JjJ5MvU0v/+x2UgmXVycUTYjk+7uvZg762YcHOwpKirmq2/03/GcWTMYOKA/\nFwc1CS96MtH+MJkL7fg2LJzUz3dw98CBBPZqXQn+9MtKsrNz8PcbwozpUwE4eiyCnX/vla9RqVQ4\nOXVDAVRWVVFZWSWfGzRwADOm39BEHjPPZrF2/R9GkR9XF2eUSiW1Go1RqNbVxZlb587ExdnZqI3y\n8grWrN/AuXP1kSxHRwesrazQSRJFRcXywEelUnHLjBsZOMDXqA2dTsf2P3cRHRMnH7OxtsbBQV+4\ntLSs3Cg6ctXocK69ZkyT9zg2LoEt2/6Un2dhYYGzsxMKoKq6xmgetl/fPsy8ZTo21tbodDq+/f5n\nCgqLjL5jU1yAUdRRmZtDbXcvnBoNWqsTPmPejf/hj7zrWJ66lds9lEANKas/JSL0SW4b1M5rc3QZ\nfH3tHM5/vY+X/Yv4OTyA3xefYsOkCrYt3YnX3XcR1E77wP7vf/9j4cKF3HzzzSxevJigoCCz7vvu\nh2Xk5Z/nqtHhjBt7NbW1tfy6cq3sHQ4ZPIhhYcH08e4tC0FVVRWx8YkcOx4pj/pvmn4DAX71htFg\nbAHce/UkfMQwhgweiEqlD2NotVpSTp7i6PEIsrJyAAgK9GfaDZPk55xMPcXa9X8AegUYPjyUoMAA\n2ehJkkRWdg6RkTHEJyYB0L27G3fdMU8fxkRvrH76ZQUVFZUolUpCgwMJDQ028m6LS0qIjo7jeGQ0\narUalUrFnfNvxaPOU9Jqtaxe+zvpZzIAvXc7IiwUH5++cl9ramqIT0zm6LEI2chOnDCeEcNCycrO\n4edfVgLw+CMPtCk811bKysro1q0bPj4+LFy4kLvvvtss4xgTG8+WbX9iY23NU088jIWFBfGJSWzc\ntA3QhzhHDA8jKMAP27oQsCRJZGSeJSIyhuQTKQB4eLhzx21z5GcWFBTy8y8rqa6pkUOyoSFBuLnV\nb6taWFhEVHQsEVExaDQarK2tueuOefSoCzXV1tayfNU6srP1cjJ40ECGDwsxlsnqauLiEzl6LEKW\nyenTJhsZxl2793LkaARgmCYYxtAhg4xk8mTqKY4ej+Ts2WwAAvz9mD5tsvyc1FOnWbv+DyRJwt7O\njhEjwggK9MfB3l7+TrJzzhERGUN8QiIAbq4u3HXnbdja2MgGZmT4MK4ff20bftl/R9k/T3HTd31Z\nMPI8a979jR6z5zH3v68ytVfLvqJOp+N/730MwOyZMxg0sD9HjkWwq84gurq6ED48DH+/oXKGrU6n\nIy3tDMciozh9Oh0AH5++zJ11sxxJyMrOYfnKtfLvPTwshJDgQJyc6ufd8/LziYzSRxd0Oh0ODvYs\nuPN2+f2pqq5m2S8rKSgsAvS/1bCwYDw93OXfq7yigti4BI4di6SishKFQsHcWTfTv78PoP+9Nm3Z\nTnyCXn/07u1J+PAwBg7oL/e1traW5BMnOXosgty8fABGhg/n+vFj5b7GxSewacsOQP+uhI8YRmCA\nnz7EjHEi44mUVEDvNc6fNxtLS0uORUTx187d2Nna8vSTj7T4m7TZKOrOrWTykBfw25bIJ6PtG55g\n0wvvcmrkDK4fPRx/T3uUQPHfTzBn8xzWfjgWZwB1Jjt/2kS6lSWVRbaMWXA7w1wuPMyh02hApUJZ\nfpDHgx7Hc/NhFvpZURP/PvPe78uXP8zBox2iKG+//ba8gFmhUDBz5kwWL17c4o4CAF989R0lpaXc\nMHkCoSFBbN66g9i4BKD1uY+qqirWrN9AVlYOSqWS+xbcQY8e3UlITOaPTVsBvaGbOmWiyfCJJEns\n3LWHYxFRAEyZdD1hocEUl5TwzXc/o9Fo6NmzB/PmzJRHbs2RfCKF3//YgiRJDB40gFm33IQkSXz/\n06/k5eVjZWXFvDkz6d3b02QbBQWFrFi9jrKycuzt7XjkwfuwsrJk1+59HDmq93gnXHct4SOGmWxD\nrVazfsMm0tLOAHDX/Hk4OTny2ZffAvDg/Qvo3sAgtDcGo2jAx8eHl156ibvvvltW/s1hGPl37+7G\ng/fdTf75Ar7/8Rd0Oh1eXh7MnXWzbAybIzYugc1b9UohKNCfG6dORqfTsfTbnygqLsbO1pZ5t85q\nMTSUm5fHytXrqayswsmpGw8/cA8WFhZs3f6XPIqfOmUiIcGBJtuoqq5m7foNnD2bjVKp5N4F8+nZ\noweJSSfYsHELoFee026YaDLUK0kSu3bv5egx/aBu0oTxDB8WSmlpGUu/+5HaWg09e3Rn3tyZODiY\nHtWmnEzltw2b0el0DBzgy5xZN7P+942cSEk1e/6oPdGWF1Jp5YqjRTnFNXY427WueKqqq/noky8B\nuOfu+fpB84o1gH5wOHPGjSYHXZIkceRoBH/v2QfA6FHhjL/2ampq1Hz1zff637lbN26bNwtXF9Nb\npGRknmXNut9Rq2txd+/FPXfdjkKhYO36DZxMPY1SqWTmzTcyaOAAk22Ul5ezas1v5OWfx9JSxUP3\n30O3bo4cj4jiz527ARg5YhjXjR9rMpKj1WrZsu0vebBzy4wbGTpkEHn5+Xz/469IkkTv3p7MnXWz\nyVAvQHRMHFu3/wXooxFTp0wk9dRp1qzbgFKp5IX/Pm3yXriAijZV6ZGc05xlw6pYGiYN6/KPcaTv\nPTw861oC6wwilcd47bE0Hn6pziBSybHFt/KZ9XTuWfAA949L4ckFP5D+L5bjKFUqlJpsNr76IZYf\n/MH/+elHU9YBD/O0xcs882fxhTfegIZjB0mSWL9+PUFBQcydO5eEhAST9ymU9QJQUVEpj5jGjR3T\najKAra0tc2fdjKOjAzqdjoioGACOHNOPxvv17dOiQQS9AZ9w/ThZoI8ei0CSJKKi49BoNNja2rRq\nEEHv0U6aMB6AEympFBeXcCYjk7y6kd0tM6a1aBAB3NxcmTdnJkqlkoqKShKTklGr1URF6z9X+Iiw\nFg0i6MNxs26+SfaEjkVEGr9kF3mKPC0tjfvvv59Bgwbx/fffG4V4G6Ko+42kujBpRGQ0Op0OR0cH\n5s6+pUWDCHpDeO3YMQDEJyRRUVHJydTTcmh09swZrc6V9OrZkzmzbgb0Hn7KyVNUVlbJg7SxV1/V\nokEEfWLF3Fk3062bo14mI/W/3dG6qEXfPt7cOHWSSYMIepm8fvy1DBk8SL5XkiSiYmKprdVgY2PD\nrXNaNoigDxtOnngdoA+5FhYVdWqFFQsHVxytAAsHswwigLaBvKgsLOToT48e3Vs0iKD/HkeNHM7w\nYaEAREbFUFtbS0JiEpWVVVhYWHDrnFtaNIign5ucMX0aAOfO5XI2K5vCwiJ5KmPyxOtaNIhgCMvP\nxMbGhtpaDdExcUiSJA98hg4Z1KJBBH049Mapk+RlNUfr9NzxiGgkScKpW7dWDSLoDeE1V48GIC4+\nkcrKqjbJRZuNYnmOjlueHUPxhlXENrCKRdHHcR3V32j/qpK9S1jh9xSTDSlYJQd47/sapk3wwgKw\nGzyF4OgP+CWlaXKH2Why2PnJUnLmfcs7I0+xOcHQKQdGPTKRPa/+QW6LDZhHcw61JEmsXbuWoKAg\nbr31VhITE5tcYzBYOp2O2Lh4dDod1tZWjBge1uTa5rC1tWXEMP218QmJZGSeleP7V48ZZVYygUKh\n4JqrRwFQWFTM6bQzxMTqPYOwkOBWDaKBkOBA7OvCWFEx+qQa0Icp+vv6mNVGjx7dGTpErwwjo2NJ\nTDqBWl2LUqnkqlEjzWrDysqS0SNHAHoDXdlMAsHFpjXjaFH3O2l1OtRqtTwaHjE8TA4BtcaIYWFY\nW1vJshRZN5jw8enb6oDEgFeD3yoqOobYuAR0Oh1WVpaEjzBPJm1sbAgfbpDJJDLPZsuh1zFXjTRb\nJq8eo5fJ4uISTp1OJzpGnwwRGhJkdtZ0cFCAfG1UdFwrV196aBok0NTUqEk5eQqA0SNHmD1nfdXo\ncH0OQE0NiUkniIzSv5d+QwfL2ZitMXCArzyoioyKISpG34ajowPBQS1Hwww4OjoQGqIfVEXFxHHq\ndJqcdHf1VaPMMkxKpZIxV+nlIis7h8yzWSTUTd2MGBHWqkE0ED58GFZWlmi1WmLjTTstzdHGijbl\nnEpyZNz8hzj46UJWxb7F6NH2QAmJx6wZ+VTDBIxK4lbtoPu0bzCIt+ZcBNFlzsyzrXtpVC54WWWw\nN6UC/FqYkK45w5ZP3+Xb3w9wqlSDBCi6jeGd39/GatEoJv5UTg/HL/g/tSf/PXicm+tusx4wiYD4\nTzhUfDc3OzdtdvHixaSmpqJUKrGwsJD/3/jfFhYWHD16tGkDdeh0OtasWcO6deuYO3cuixYtYsiQ\nIYCxUYyr8xID/E1ncTZHUKA/e/btR62u5eAhfT/cXF3w7m1+vahePXvi4eFOTs45Dh89Jk/Uh4S0\n7Bk0xMLCguAgfw4eOkpsXIJsjEJb8S4aExIcSEJiMufO5RJZZ1iHDB7YpgX4Q4cM4q9du+sUSX1y\nzo4dO8jI0IdWJUmS/2vp79b+3fBYc5mQDTEYxyVLlhiFVRvKQUrqKXkgEBRg/tIBKytLAvyGEhEV\nQ0xcAoV1cz2hwebNbxsIDQnk1Ok00s9kyok8/n5D27TcIjDAj91791NbW8vBQ4cBfSZj3z7mFw3v\n2aM7Xl4eZGXlcOTocTlRoi3ypFQqCQ4KYP+Bw8THJ+LtfXnVUNNq6o3iqdPpSJKEjbU1QwYPNLsN\nB3t7Bg8aQPKJk0RGxZCXr4/etF0ugti2YydJySnY2OhzBkKCAtuUxRsSHMihw8eoqKiQ55d7e3nS\no4UM+sb06+uNs7MTxcUlHDh0hNpaDRYWFnJymjlYW1vh7zeUqOhY4uOTGD/uarPvbZtRrDlDbGUo\nN/cfyb3hJbywPJolo8fgUJlGZE0ItxvlN5QSt6eW0AfqR3ya8gIqscTCMGBQWmKlqKSgtIUyVxUR\nvD5pLvuuXczzrw/j57n3kf3uMb6Y0BffXq5Yf30G6WsT99r5EKg9StR5HTc7N/1ht2/fzpEjR9r0\nFbSETqdj1apVrFmzhnnz5vHqq6+iVBiUYb1CNSSYmIudnS1OTt0oKiqW23B379XmUJGHey9ycs7J\nyy7sbG3bvODdo27X9ZqaGtlguLu3HLYz1QZATXXd5+nVtu/E0tKS7m5uZGXnGGWu/f7776xfv65N\nbXUEjY1j2DD9gnKtVid//05O3dpcicfDwx2iYoyMc1vlqeH11XV9aWsbtra2ODs7UVBQSLXhN3Tv\neQEy6U5WVo6clWpjbY2zs1Ob2wCMMlsBDhw6Ujev5iiH5ROTT8iJZ9eNuwYLCwvO5eYRF6/33EOC\nA+nR3Y0atZp9/xwE9CHhQQP717V5lMrKSro5OjIyXN9mUnIKZ7P0iUPjr70alUpFbm4esS222ZtB\nAweg0dZHFDR16xB79Oje4vx0899BL5JPnDSSi7a+l4YlQjpdvYy6t1EuXJydsbG2prqmxkhXtQWF\nQoGHey+Ki0tk/eDs7GR2RMWAh3svoqDVgWxj2vTNa3LiyAu7hh6qnkx+bAJPPfQl/7wxhomF0WQP\nGY9ReoOuivwCBT2d6x+hsLRGha7BtI8WrWSBpaWpF6mG+Hdv44uAZZx+awz2VOIw6ylm57kwqF+P\n1ncSt3LFy66c5MJaaGav9ObW/rQHOp2OTZs24evrSx+fgfKxf0N7zJQ0aeMCGm1e57WtoWbbuJAP\neBkU6E9LS2PdunX06KlX3Dpdvcy1W/fb3FD7PLljZLLtrZq6JTomjvMFhXh5eshGMS3tDMcjowH9\nvL6FhQXnCwrkZST9+vahR3c3atW1RktLDEYxJjaO/PMFeHl6yEYxLf2MnMg29pqrUAHnCwrl+/v2\n8aZHdzc0tfVtSpLEoIEDjDxFRRs8sma+haZH2vhdNvtaXtB7aXxTh8hJB9IGo6gl63ASA0fNxgIl\nbtc9yUzVDD7Zkkmw4wm8g+YZGymlCltLDWer6+fiLF1646T5k3LD4EhTSaHakT7dTYRtalL45bNs\nZmwNxR5AV0T84XJ8J7u0bhABdGoqNJbY2zT/lQ4bNgx7e3u0Wi06nQ6tViv/1/jv/Px8ioqKWn2k\no6MjTzzxBM8++yxubm78uGyFviuScaHttiBJUpNRsGF03hYat6FW16LValtMimjSRjPPbevnae76\ntrYBUF3V9B5HR0fc3NxkhaBQKOT/Wvq7tX8b/i9JEmlpTRcfN8eUKVNYvHgxI0eOJPlECvGJJ4wL\nrtd5221RXqa+u7Z4/M230TZ5kiSJqup/L5ON26itbbtMVpmQnccevr/JsWk3TGLaDZOMjgX4DTVa\n7gT6dcOLXvq/Jvc/+tB9TY5NnTKRqVMmGh3z9xuCv98Qo2P29k3bbDinqKyTg8bvqTk09x1UV9eY\nLNTQfBtNn2vquzWFRqNpUoCiuXZbo7EsVVVfyLvS9udCWxJt1GnsiR/EqN51dtRxJE8+6sO+V19n\n5UE7Qvs1dm0d8PW34kxO/Zeq7DmGmweVkpRT19mK08Rpw5kZ5AjVafz1+wHOGeUn6NBY9sLfS//D\n1iR9zyfq//DhVDNT7qtzSK/tg5+JBbRff/01e/bs4Z9//uHAgQMcPnyYY8eOERkZSXR0NHFxcSQm\nJnLixAmefvrpFh/VrVs3Xn75ZdLT01myZAlubvoJbmVd9qlOp5PnABMSk83rfx3pZzLkOUBf336A\nfnTalvqZDSfxfX30bWg0GvmYuRgmvb29vXB1cQYgMelEG9vQf34rKyv69vWW22jL6qBz53Ll9VOe\nnvXh2Pfee4/z58+Tn59Pfn4+eXl55Obmcu7cOc6dO0dOTg7Z2dlkZWWRlZXF2bNnyczMJDMzk4yM\nDM6cOcOZM2dIT08nLS2NtLQ0Tp8+zenTpzl16hQxMTGt9m3KlCkcPnyYbdu2MXKkPnlIqdQrea1W\nR++6wgWVlVXy2kxzMXx3fbx7ywlSbZUnw/X29nZypl9CYlKbvv+MzLPyHKBvXeJO+pkMo2IDraFW\n15JyUr+mrL+Pfq8lrVYrrzMzl/rv5PKaTwTQNPAUvet+i/z88/K8oDnodDoSk/TfgU+/vnK+gmFt\nsbkY3u0e3d0uWFedSEmVI3AGPZNyMhW12vydYMrLy+vXLNfJVkVFhVGxk9aQJMlIV7WF1o1i2V4e\nCwtl4uxnOD56In1l39Ia/0c/5O7a73jrZCBDmqyXdmL4TT05fjgH2c5Z+/PkJzcR8/H3HEiN5/cP\nf8HpxY+5pZcS9amVPD3zRh5vuITCeiiPLhnJjne+ZM2yt3ji9SJe3rqEEeYOfoqOscfhFq5qOSP5\nX+Hk5MQrr7xCeno6b7zxBq6uxgbbMKco6SRCQ/QT3znncpst7WaKiLpwj5eXByNHDJczEA0ZpOYQ\nn5BIbW0tFhYWjBwRJgtbZFSM2cqwoKCQtHS9sA4LDSE0NLiu7SSzPT2dTidnxwUG+DF8WAigz0A8\nnZZu9ucxLE9xdXXBy9PD7Ps6ksmTJ3Po0CEjY2jAokGiTa+ePfDy0vfZsKTBHLJzzpFTl3kcFhos\nL5+IjU0we/spjUYjl7wKDgpgWJj+N8zNy5czSM3BIJOeHu6MCh8mV2NqWLWkNRISk1Cr1SiVSkaM\nGCaXN4uIjDZbJguLiuQF7KEhl+6OLqbQ1s0pqlQqfH36yvOpbZGLU6fT5IIKw4aFytWvIqNizJ62\nqayqkge3oSFBhIXqddXp0+lmbw0lSRIRUdGAPpt1xIgwlEolarVaNlDmEFW3nMPGxoaR4cPkko5t\n+U6ysnPkQgBhbZSL1o2ilTfjbxxGv1FP8tLknsY3OI/l3V1H2Pfd9GYK3yrxvvW/eK1aSf2KCyVu\n17/Lhg+uh1P5+D62ihWPDcUasPJfSGzGT4yxa/gyWNH//hVsevNGhk9+lq9Xf8S8/uZWxdGR8ccv\nWD12D4Pbodxc45fUycmJRYsWkZ6ezuuvv46LibVADbMO+3j3xq2uNuDGzduorKpq9p6GRMfEyd5c\nWEiwPgPRX5+F9c+Bw2TVTfC3RG5ePrv37gdg6JDB2Nraygb6TEamvO6xJWpq1GyoKxjg6OjAgP4+\nBAX4YWFhQW1tLRs3b2t1jlaSJP7auVtO0w4NCaJnjx6y57R1+1+UlTWt29mY5BMnZcUeFhLU6bt/\nG4zh9u3bGTVqVLPXKC3q5UCSJPlFTTmZapYhqayqYuNmffUbN1cX+nj3JiQoUD63ZdufrSpASZLY\nuu0vuQZmaHAQvb085bT9jVu2G5UPM0VsXALJJ04CeuNsaWlJYN2a2wOHjjRbt7Yx+fnn5UXnQ4cM\nwt7OTlbEmWezjMoXmkKtVrNho14m7e3tm5QXuxwweIoWFhYoFArC6t7LqOhYs6I4paVlbNuxC9BH\nD7q7ucrvdlFRMbv+3tvqAEOr1bJx0zY0Gg0qlYqAAD8GDugvh143bNxilMxmisNHjsmVisJCgrG3\ns5OzaP/es4/8ZuqmNibzbJacYW+oa2v4TpJPpMhraluisrKKTVu2A3qv1zAANZfWjaK1L7Nf/45v\nF07EvckMpBLHQeH4uzYf/1f2vZfPJ2/hw73FRsdVroMZM3k8wZ42DTpQTfKeDEL8m2aeWTj3wbeX\nTdsWVVZG8tUvQ/noaT/aswSrs7MzixcvJj09ncWLF+PcqFZgYwzV49VqNQqFgimTJ6BUKikoLOKX\nX1c1W3hZf30t+w8clisz+PTrg9/QwYB+LZiTUze0Wi0rVq8nPjGpWYUoSRInUk6yfMUa1Go19vb2\nXDv2KkA/kjMsyP179z7+3vOPySyt3Lx8fl2xmty6avNTJk2o28TTluuv05fSSj2Vxpp1GygubloM\nHPTloDZt2SF7eOHDw+RScJMmjMfSUkVZmX77nDMZmc2+yBqNhqPHI/n9j82APhMzNCTIqDBwW+ai\n/i3mGEMDlqr6JTi1tbX4DR2MTz994eOt2/9i/4HDJkNM2Tnn+OXXVRQWFqFUKpkyeQIKhYJu3Rzl\nBf2JSSf4bcOmZouBg74Sz28bNskhtWuuHq2vpalQyPVHi4qKWfbrSrJMeIxqdS0HDh2RK+v07eMt\nz5tdNTocZ2cntFotK1evJy4+0aRMppxM5ZcVa6ipUWNvZyd/hv6+PvKC/t17/2HX7r0m59fy8vP5\ndcUaec3uDZOv1w/Q6taHWvyrpJWLh2FAp9Vq9YOl0BB5veD63zdyLCKq2YIQkiSRfiaDn39dSXl5\nOZaWlkycMA7Ql3001AU+FhHFlm1/NlsMHPQ1eVev/U2O0ky8fhw21tZYWFgwZZK+IlDOuVx+XbGm\n2WLgoJ8D3bV7rzzwHjJ4kDzNM+7aq7G3s6OmRs0vK9aYLBKv0+mIi09k5er1aLVanJ2dGD1aX9w/\nwH+ovNRn89YdHDh0xOS7kpWVzbJfV1JUVIxSqWTypOtRKBTy9eboh47fJaP8KG8sWE7Ylx8xrWcL\ngqrO5HiKPaEBruYl0bRIGQdfe4St137Gm+PaJ3b6ySefUFJSwlNPPYWTk/kp4zv+3EVEVAyuLs48\n9MA9KBQKkpJT+GPTVllpuLv3wt9vCLY2NkiSRM65c8QnJMujMy9PD+bNnYm1db2X3LBkGui9t8AA\nP7l6RUlJKbHxCXLRXzs7W26bO4teDaqe1NbWsnb9BrkItKWlCr+hQ+TtZGpqaozSzUGfqNB4Me++\n/QfZf+Cw/Levbz8GDeiPSqVCq9WSfiaDEymp8udtXO8S4HRaOut+2ygrgB49uhPoPxQ7OzskSSIv\n/zxx8YlymLZHj+7cdussHOzt5V0GlEolzz716AXttm0u5eXlzJo1i8WLFzN69Giz7ystLePzr/Sl\n6Azlq6prali95jfZCFlZWRHgPwQPd31tyarqank9J+ijDjOmT5WLH0BdGb+/98qVUAAGDuhPf99+\n8vd/6nQaJ1NPy8poWFgIkyaMN/r+k0+cZMPGLfUy2asn/n5DsbU1yGQu8QlJskx6eLhz29yZRoup\nCwuLWLF6nWyYHRwcCArwk3dOKCktJS4uUY4U2NracNvcWUYp+xqNhrXr/yAtXb/WVKVS4Td0ML29\nPFEqldSo1SQln5A9EkAuoajRaPj086VU19Rw3fixjApv530VO4Bz53L54eflANw5/1a8e3tRXl7B\nitXrOH++ANB/T4EBfvTo3h2FQkFFRQVxCUnyeUtLFbNnzsCnX1+5XZ1Ox6Yt2+U5QaVSyZDBA+nX\nt49cEDzlZKpcLhH0mbNXX2U8uGu4gwfoa5cOHTJYXxBcp+NsVjaJSSfk99bHpy9zZs4wWlKScy6X\nVWvWU1WXGOfs5ERQoL9cY7WwsIjY+AR5PrpbN0duv3W20Y4b1dXVrFzzmzztZG1tRYDfUHlpWlVV\nNQmJSfI2UUqlkptvmiZ7qn9s2kpCYjI9e3Tn/nvvavE3uThbR9XkkVbuio9bG2sFXCiaYjIKrOjT\ny/zMq44iNzeP73/6FTCuK3kmI5Ptf+6ioKDQ5L1KpZLAAL86T6rpYv+ysjI2b/vTSLCbo493b6ZN\nndSkAj7oR6i7du8jKjq2xfCns7MTkyaMl+d9GhMTG8+efftbTP6xtrZiVPgIuQJHY3JyzrFl218t\nJhkoFAqGDhnElMkT5PVQP/28nMKiYrMq4Hcmq9f+zqnTafTo0Z275s/D2tqK2tpa/ty526RnZcDN\n1YUpkyc0uzhekiSOR0az/8AhWfE0h62NDWPGjGLEsNBmv/+MzLNs37GT863IZID/UCZPvK5ZmSwv\nL2fz1j9bnR/27u3FtKmTmi1BptVq+XvPP0RFx5osmwd65Tpxwjh5N4zDR47z9559KBQKnnj0gVbL\nxF0KSJLEDz8vJzc3jz7evZk3dyYqlYrq6mq27dhJ8omTLYY/e/XswbQbJjW7FlCSJPYfPMyRoxEt\nhj/t7e25btzVTXY9MXAy9RR/7tzd4gbIhmL04+vWfjamsKiILVv/bDW07uvTjxunTmr2t6utrWXH\nX38Tn9B8ZMxAdzdXpkyeICeRpZ/JYMUq/drl1mr7wpWyn+IlzopV6+RsqmuvGcOwsGBs6rzCzLNZ\nREbFGAmLjbU1gQF+BAb6Y2/XumEvLCoiKjqOEydO1m8yrFAyaGB/QkKC5N0QWsKwC0JsXAJVDeY6\nPT3cCQ0Jxqdfn1bn7gy7IERFxxop1m6OjoQEB+I3dHCrlXwMuyBERsUYZWZaWVri7zfUqKxXTs45\ntv+5S04+ueuOefL85KVIWnoGK1frX04P915MmjAeT08P/ei/spK4uAS9N9wgZOjd24uw0GC8e3u1\n+v1rNBpOpKQSFR3bZBuv0NAghgwa2OqicINMRkXHGmX7WVtbE+jvR1Cgv1lp/oVFRUTHxJGcfNJo\nk+FBA/oTGhJkVoWT6upq4hKSiImNN5JJD3d3wkKD8OnXV45oHI+IZu8/BwD9HOUtM25stf1LhYYF\n/vv19eb6666Vt/YqKysjOjaehMRko2Qqn359CQsNxsOMIh5qdS2JSclEx8Ybzdl3d3MjLDSIAf19\nWw0r6pcindHv/3qufsrH1taW4KAAAv2HmlWCLT//vH6+NLV+/1cLpQVDhgwkNDgIl7qM9paoqKgk\nNi6hbv/X+nelj3dvwkKD5UhXdU2NfrC+dz9arZZu3Rx5+IF7Wn0HhFG8CFRVVbFyzW9yGMzCwgI3\nVxeUF3H+64pCkqiqrjYauTYX1r0UaVjBH/ShIjtb2wtcJS3QaXUUFhXJHqV+qmGWvM3S5cKBQ0fY\nu++A/Lezs5N+ezYhFxeETquloLBIjn65uDhz262zcDZj6ksYxYtEdU0Nf+/eR3xCUoshIUHb6O7m\nyjVXX2U0z3apcyLlJP/sP2QycUHQdlQqFf5+Q7j+umvlvT4vN2Ji4zl4+KjZSyAEraNSqfQ7dIwb\na3YhA2EULzLVNTWcTD1FRXmlUZWbS4mKuglvezN3z+gMLC0t6dmju9FGuJcTkiSRlZXDudxc1Gau\nMbzYHDl8mKqqKiQJ+vbtg2//i7WTvfkoFQrs7OwYOMC31e23LgcMm+Xm55+XM2kvRfJy8+jZylZl\nnYlSocTewY6B/X3N3lnDgDCKgib85z//wcrKirfffruzuyLoRHx9feWSdi+88IKQBwEAmzZt4q23\n3uLQoUOd3ZUO4fJYzCO4aOTk5PDVV1/xxRdfmFXrVXDl0nDLIDF2FoBeDl599VUOHz7Mrl27Ors7\nHYIwigIjlixZQlVVFWVlZXz66aed3R1BJ9IwLP1vd3kRXBmsX7+e6OhoAN58883O7UwHIYyiQCYj\nI4Nvv/1W/vvTTz81q+ya4MqkoVEUnqJAp9OxaNEi+e89e/Zw4MCBFu64PBFGUSDz+uuvGy3yLSws\n5Msvv+zEHgk6ExE+FTRkxYoVJCYmGh27Er1FYRQFAKSmpvLzzz83Of7hhx8aLZwWdB1E+FRgQKPR\n8NprrzU5vn37diIiWt9Q4HJCGEUBAIsXL252/WReXh7ffPNNJ/RI0NmI8KnAwE8//URqavP7XF5p\n3qIwigISExNZuXKlyfPvvfeeWVvHCK4shFEUgH6HnzfeeMPk+T/++IP4+PiL2KOORRjFTkNDwals\nTJfP1lGSkU35RYhavfrqqy2Gx7Kysvjpp586viOCSwoxp9jJaAo4lW1aQ1wsHfHNN9+QkZFh8rwk\nSSxZsqRjO3EREUaxU1CT+vPTLNpbJm+Tpc5PYO+WzeyOzkG/z4ESspfzf6/tIK8DhT46Oprffvut\n1ev+97//ifJ0XQwxp9iJqFP5+elF7C2TNQT5CXvZsnk30TmGnVA6XkdUVVXx1ltvtXrdmjVrSElJ\n6ZhOXGSEUewE1PH/Y/7akSxcMBhrdBQf+Yp3ViRj4e5E5g+zGXX7MtLU4DTqKR61WMx9K3PoKJX0\nyiuvmOUFpKWlsWLFig7qheBSRIRPOws18f+bz9qRC1kw2Bp0xRz56h1WJFvg7pTJD7NHcfuyNNR0\nvI748ssvyclpftPphuh0uiun4pEk6HA01dWSRv6rSFo3rps0d1e5/Pe2+f7S1KWpUo0kSVLRFml2\nrwHSc0cr9Kdzl0kjPR6WDlU017JWKjr+g/TZzpwG7Teg+Lj03Rc7pZxmT0rS4cOHJcDs/4YMGSJp\ntdoL/RoElxkhISHyb//oo492dne6DkXrpHHd5kr1KmKbNN9/qrQ0tUb/55bZUq8Bz0kGFdGyjrhw\nysrKpB49epitH1QqlZSent6+negEhKfYkajTWP7YfF74YCGzZywhqhIo2c/SI+HMH2Yott2Nq1/7\nniUzvLECdJUFFOqc8XKp2/Or5wTutF/D+tSmiS7ViZ9yx8J8rh3jTrObUDmFMMl1GXe9cpCSZk6/\n/PLLbfo4ycnJrFu3rk33CC5fRPj0YqGlpqZ+g++S/Us5Ej6fehVxNa99v4QZ3laAjsqCQnTOXhhU\nREs6AnQUR/zI57vO0dwW4iUR3/OliXOffPIJ+S1s+N0YjUbD//73P7Ovv1QRRrEDKdz5Is8fCuFm\n72SicipRS0BxDAeqg+gvb0ChxKH/SEJ6WYE2nz/f/grrZ7/gHl/DfnCODPArZ09Co8oy6hN88dBq\nrv30aQJNFoG3wHvuOyyIfpjFh5tO2O/YsQO1Wk1VVRXl5eWUlJQwaFD9FkwPPPAAZ86c4fTp05w8\neZLk5GSuvvrqf/elCC4bRPi041GnLeex+S/wwcLZzFgSRSVQHHOA6qD+1KsIB/qPDEGvIv7k7a+s\nefaLe5BVhCkdQesDZ6eQSbguu4tXDjYdNi9cuJDa2lqqq6upqKigtLSUe++9Vz4fEBBARkYGaWlp\npKamkpyczLPPPvsvv5HOp+UtiAX/gnIS/jiAxagnCLnzv6TfqT+qySylSmmNqvFwRFfIgQ9fZGPg\n16y5PwiHBuctlWpKKo3HcsV/v8QH1k8RNbCVzVSV7sx4MZz/Pr2C5w7ej1eDdpVKJUqlEktLS/lY\nQ0Xo5OREnz592vKhBVcQwih2NIXsfPF5Dg1bzTN75rMhRI2EhqrSKpTWqiYei67wAB++uJHAr9dw\nf5CD0fnmdIQ8cP5ur+mBs4U3c99ZwLJJizl8+CNGNdhyUKFQoFKpjHaqt7KyMvq3t7f3BX3ySxnh\nKXYUmlyOHSqm75gBNNzlTeXSj97aTM5XNzioK+TwN59ybNibfPpgEDW73mRpvOGCWvKzVPh6NNw4\ntZC/P/iLQQ9PopcZv6B9yG2MiX+PdZmth8Aahskux30KBe2HWJLRwZQn8McBC0ZdFcKdW9I5umQk\n9qhw6dcbbeZ5jFXEYb759BjD3vyUB4Nq2PXmUmQV0ayOqB8439XKwFnpPoMXw9fw9IqsVpN1GsrB\nlaofhFHsAMoivuHlp5/j0zgtlTve5sV3t5BpWM3gNo47nHdxMN8gftXEvD2N6x55jWeu98BSoaD7\nTf/g5lE3tKtJZW96MLeGOdU/oCKO3w45c0OoE2bhGMhNQ06y6nBxq5c2FPqGSlHQ9RBzih2LJvcY\nh4r7MmaA8ebIbuPuwHnXQepVRAxvT7uOR157hus9LFEounPTP24YVESzOqJNA2d7Qm4bQ/x762ht\n3CyMouCCcBz2IK88FIy18zhe/vhj3vm/aXgbIhCqgdy3yJNlv6agt5M2BL90iEpJQjL8V7WD2W76\nyyujfmJn+Evc7N7gASVJHKkYwGDXZtNrmsGBAWG2JEdkUdPKlQ2VnzCKXRsRPu0oyoj45mWefu5T\n4rSV7Hj7Rd7dkolh3KwaeB+LPJfxa0rdEZtgXjpUWa8fJImqHbOpUxHN64g2DpwdA29iyMlVtDZu\nFkZRcMGUJ+8n1/MaBjs0PqOi/4NLmbVnEcvPtLIYviaRpUvO8fxn03FpcFhdlEGRVU+cLRtdXx3H\nezcEMuHNqEaVclQ4edhRejqP1oq1ifCpwIAIn3YUjgx78BUeCrbGedzLfPzxO/zfNO/6BA9Vfx5c\nOos9i5bTmoowpSPaPHB2GECYbTIRWS0Pm4VRFFwglZw6dBrrwOF4NhfOtxvG4rXPULnxMMUm29CQ\ns28Xzm/8wH0+xvlQkk6HpFChbCyT2lLOJMWTlFbSJMVakrTodK0rNhE+FRgQ4dOOpJzk/bl4XjOY\nJuNmwG7YYtY+U8nGFl030zrC1MC5Ou49bgicwJtRjbLRVU542JVyOq/lYXNXMIoi+7Qj0BWTcCiP\n3ncOalbgAXAexSNPtNSICo+JT3BPM2csnTxxrImitLbRCfsxfJ4u8XmTOzSU5lTj4OVCY+eySddF\n+FRQhwifdiCVpzh02prA5z0xlQbjPOoRWlQRLegIUwNnbekZkuKT8CxpMmxG0upobdzcFYyi0Hod\nQeVJDmS6EB7UvflF9f8SpdNgQqxOklpi7ui9moyEKnyHe2NySWMdInwqMCCMYsehK07gUF5vxg4y\nOWz+V+gHznlNBs72Yz4nXcri+3GOxic0peRUO+Dl0vKwWRhFwQWhzY8homIw4wfbt37xheAUzC3B\n5/krrtS86ytT2JXkzZzRbq1eKsKnAgNiTrHjqDx5gEyXcIK6d8Sw+QIGztUZJFT5Mty75WGzMIqC\nC6IsaR/5g28h3LWjnuDOlGfCiV8fQ7kZV1cnrmKL+xPM69/6CyjCpwIDYk6xo9CSHxNBxeDxdNS4\nua0D58qUXSR5z6G1cbMwigLz0eWz47nJzHjnCPF70wl4cDreHTMIBKDXjLe4Ne5jNp9rTVkV8/d7\n2xj7wf0MMKM/InwqMCDCpx1FGUn78hl8SzgdNm5u08C5msRVW3B/Yh6tjZuFURSYj66MtLh87KTd\nLMtbwAe39+mQ+UQZu3AWfujLV8+uJqu5ar76TlG44wWW2LzNh5NdTF1khAifCgx0BQV4MdHl7+C5\nyTN450g8e9MDeHC6d4fqCLMHzsV/8962sXxw/4BW+9MVZEJovfZC5cvDW3byxpSbWbL0cfxby2hp\nB5zHvs2y2aks353bbJV7So7x875RfPPVTNzNfPtE+FRgoCsowIuJriyNuHw7pN3LyFvwAbf36dBh\ns3kDZ10hO15Ygs3bH2LOuLkryIRYktGeqFwZENJxAZGmWNF35iv8n6nTTiN5ZsnINrUowqcCA11B\nAV5MVL4Ps2XnBOLznHh2SA+TSzHaE+exb7Ps/Dss353Lfyb0auIJlhz7mX2jvuGrmSa2n2tEV5AJ\nYRQFRojwqcBAV1CAFxuV6wAu6ri5lYGz08hnaMu4uSvIhNB6AiNE+FRgoCsoQEHb6AoyIbSewAgR\nPhUY6AoKUNA2uoJMCKMoMEKETwUGuoICFLSNriATQusJjBDhU4GBrqAABW2jK8iE0HoCI0T4VGCg\nKyhAQdvoCjIhjKLACBE+FRjoCgpQ0Da6gkwIrScwQniKAgNdQQEKLpwrVSaEURQYITxFgQFhFAWN\n6Qo1cIXWExghPEWBAWEUBY3pCjIhjKLAJFeq0AvMoysoQMGFc6V6jcIoCkxypQp9R6IpOEV2penz\nupIMsssvj70JhVEUNKYrbCcmjKLACKH8Lhx16s88vWgvZQ0qK1dnHONIllr+W0k2y//vNXbkXfqG\nURhFQWOEURR0aa5Uoe8Q1PH8b/5aRi5cwGBr0ObtY+lbL/PAdaO5f0sBsgl0GsVTj1qw+L6V5Fzi\ndlEYRUFjGibfXan6QRhFgRFdYSTYERRveoIP+j3LTE/9K2XRcywPLXyee0a6NdmKxirgYR7N/y+v\nH20hznoJIIyioDEN5aBhUt6VhDCKAiOEUbwQSti/9Ajh84dhb9b1PZlwpz1r1qeibuasrjiCHz/f\nxTlTG8O2pWcR3/PlrnPNb0LdCsIoChojPEVBl6PTlZ8mk9/eeJ8DxaYuqCb268X8klx9ETvVGsXE\nHKgmqL95JhHAcYAf5XsSKGt8ojqRT+9YSP61Y3Bvh43ZnUIm4brsLl45WNLmey8Zo3hZysSVySUz\naO5AmRBGUWCSiy/0NUQtvpX1YfcyxllH8fHvWPTcg0zx6cHkdYV119gQdOckjt3xBHvKL3L3TKGp\norRKibWqDa+TpRJ1SWUjD07NiS8eYvW1n/J0oE379M3Cm7nvLCD64cUcbmO09tIwipepTFyhXBrh\n046VCWEUBUZ06kiw8Dce+WY0z092BZQ4Dp3JswvvwLuqlCptg77YX8V/bv2L+7/LvLj9M4XKhX69\ntWSeN39UWpufhcrXA+uGB4v/5qUPrHnqroFYtWP3lO4zeDF8DU+vyOJC1VinGcXLVSauUC6J8GkH\ny4QwigIjLory05RTXGHwkbRo6v5ZcuB7YsbPYFBdZoqFvStOVs33p/eNc8haupP8ju+tGbgx7g5n\ndh3Ml42OriSaZW+9zpcH8ji18g1e/2IHmRrD9TWk7k0n+NYwnBq0Uvj3B/w16GEm9Wrv19KekNvG\nEP/eOjLbYBUvvqdYLwsGLl+ZuDK5FMKnHS0TwigKTNIRQl9z+lfuCfXGzcEa1wHDGXvNdTy8PhMN\nUHU6imofLyzNaMfC2R1VyhkuZv5mTdrvLH7yed765DM+/nwZny18mz0FOkDFwPsW4bnsV1LqDJ/S\nKYS7Fr7HunSJ0t1fsvixyXgb0lAro/hpZzgv3ezeoPUK4n47hPMNoUaGsr1wDLyJISdXcbjY/Hsu\nplHUFR/l3QkuWPo9xPqM+vSjS10muhqXQvi0o2VCGEWBER09ElTYh/B/e/JRl6Xx99KX+M+7K/li\nrjcqwLKbGxQUo2m1FUBdSrmVbbuGGVt83KlvmTv5E+zuX8TCp57gzt7refHjfRTWvUKq/g+ydNYe\nFi0/00r/a0hcuoRzz3/GdJeGx0tIOlLBgMGutEN+TVMcBhBmm0xEVk2zp2fPno2bmxs9e/bEw8OD\n3r17c/LkSfn8Tz/9REBAACEhIQwbNozw8HC+++679umbOomPZt7PoQlrOPiqNR/f/DBbCvQK91KW\nia7IpRA+7XCZkASCBtja2kqABEiffvrpRX22JvklyXXQUim37u+KmC+k/z48R/K3RvK66RHpvx/u\nlwq0+nPFv42TnO49LFVflI5lSMumdpNcb9suFekPSBnfjJQch30ppWkaXlgkHfryU+mfItNN1Wb/\nKX36Q5RU3PhETbz0opuVNG93mfHxqljp3SkB0vVvREoVrXa0Sop9d4oUcP0bUmTji2sSpVe7K6VZ\nO0ubvXP37t3y727Of/b29tK5c+da7ZG5aDXaBn9oJcNfl6xMdFHuvfdeWQZGjRrVYc8p2/8fadTg\nYdK0e56Qnnt2gTRuiI804JqXpSPlHS8TwigKjGhoFD/55JN2b78lYZc0adJno0dLX6RrWmmlQFo7\nzV9aGHNx1J/m9OdSGE7SvO1F8vM3THeWBr4S3X4KuDpWet7VWrpjb7nx8fL90mN9kTzv3S01b86M\nLpb2P9ZXwvNeaXfji2sSpFdckG75y3QrV199tdlG8fnnn7+AD9k8l6NMdFXuu+8+WQbCw8M77Dnn\n106Thr9wVCrXFksHXwyQFDhLM35Kk2okqcNlQhhFgRF2dnay0H/88cft3n6Lwi5JUnXyl9LdD/4s\nnao11YJGyt30hHT7R/EXzSMoP/iY5KkaIX2fVTf8LPtHetDLU7qnsVf3b9CekT7pizRpU1H7tdmQ\nikPS/bYO0uPHq0xesm3bNrMMYrdu3aSCgoJ269rlKBPNUpshrX/9PWl/keGAVqour5a93qqYr6RF\ny5Ik07/Apc8DDzwgy8GIESM67Dmlkb9IqxLLpJw/7pG8QOr30DYpr0EwoSNlQhhFgRH29vYdahRb\nE3ZJkqTq7Hgpvbz5+yWpWspJPHtRFYvmzDfS1T3GS6vyJEmSaqWMX2+WnO0mSWvz2vMpxdL2GVaS\nz0fpkraVK2tOr5b+c/vj0rLUNrzuBaul0QRJP+W2fNnw4cNbNYqLFi0y/7lmcDnKRHN9iHxptHT7\nZv1goerkWumzxbdI3fu9ISXLDk25dOCJYdL97TmYusg8+OCDshwMHz68Q59Vc3KpNMURSTVssXSs\nrEw68uH/pL1F9ec7SiaEURQY0dAofvTRRx3yjNaE/dKjXIr5/F7p9v9+KH373WfSq3M9Jcuwz6XT\nrUVv2kjOTyMkx6mb6uYtTVO27xHJF0spcFG02S99xaEHJWfvN6QTrfT5999/b9Egurq6SiUlJWY+\n1XwuP5loRMEKaWSPZ6WYhp5LzleSn89rUnJNg2Pp70p9+38kZVzs/rUTDz/8sCwLYWFhHfegsmPS\na8NUEo5TpG9SaySp/JD03MhZ0h+FHfdIAyL7VGBEh69DKj/O/+Y9xnam8OXqFxmuiGft6mMUX9Jl\nFO0Jeux7lr/7DPffdxvBZRX0nXo1Hu2cJuo+5RnC49cT00oFDodrvuRU6R7u7W3u61tN4qotuD8x\nj/6t9HnGjBkEBgaaPP/f//6Xbt26mflcM7mMZEJTnE7MwX0cTSnQ161VF5Nbrm2yds4kvW9kTtZS\ndl6miykvzjrFGuI/uY9FERocBjtx5M07mTj8et7P7IGrOesw/iXCKAqM6Nj1aJ0r7BdEdRI/PPsE\nX8bpq9Vos7fzY4Q3t88eTDsVYqun1wzeujWOjzefa7XyTEn8cRQB/czrQ/HfvLdtLB/cP6DV5R4K\nhYKFCxc2e65nz5488cQT5jyxDVweMqErOsqXD81k/vPf8nd6KXnHlrHktS/54ME7+P60xvy1cxbO\nuKtSOHOZLqa8OEbRmoCXYpAkibJjq/jux9X8lVSBlPUVVzt00CMb0Nq4RtCFaX+hrxP2l9q52Q5E\nVxTJqp+30WfYrfxzKoo1P2yl9/vrWRjc7iYRsCN84Yf4zniW1df8wm1eJkxYZTwbjvky81HH1pvU\nFbLjhSXYvL2eyS6tXw4wd+5cFi1aREpKitHxF154AXt784uem8elLxNlER9y94Pb8X9rGcsnu8tK\n85o9D+C/tCe/fm6NZYQbxOvXzrU88FBTWm6F3WW6mLLhOkWxdZSgS3AplHG6lFB6zGd71hEWBlhg\n6TudN9Zt46s7BxnXLG1PnMfy9rLZpC7fTa6p/Z7sArj7yRvrK+S0QMmxn9k36hu+muludlEApVLJ\niy++aHTMy8uLRx55xMwWrhy0eRt59Kb/wX9/YnEDgwjg5D+Jq8ZPw98BnK+ai+u+CAx7kdSkb+bz\nj9ZzMm0rX3y2jlRDWdySSPY53c4414v8QdqJLqEfOn7aUnA54eTkJE+kv//++53dHUEnUVtbK/Xr\n10+WhS+//LKzu9QJVEhH/tNPshj8mhTbXKJvVar097E8SSNJZq+dK1g7TfJfGHPZFhh48sknZZkI\nCAjo7O50CMJTFJhEulJHgoJWUalUPP/88wD069eP+++/v5N71AlUxrJ8ZTr975jF4OZCAzb9GT+8\nh94Dt+jHAz/eydG3lnPaRP0xbd5mFv8+gdWvBnVcpKGDEeFTQZejS4RHBGZxzz334OXlxauvvoql\n5SWU9XKxqMokJr8bwcN7N1M7U0v5uXyqG9gF68GPsHTxMCyaLy+LRtef579/Gv/L1SLSNfSDSLS5\nyNTW1lJYWESNWt36xZ2AQqFAqVSiUCgoLikhI/NsZ3epCUqFEls7W1xdnDt3R/h/SXFxCeUVFZf0\niPu1115n3PjrLkk5ALCytMTZxRkb6w6wNCoHujvYYG/djO9QmcCatWXMfKJHgwxgLSXpMWyKOcP0\ne6fSt5EltXYfilcrj5QkiaKiYiorq9BJl55cWFvb4OPbH4VCgYeH56UrF1ZWuLq4YGXV9sGcQrpS\nzf0lRllZGcciooiOjqO6xsRQUtAmPDzcuWrUCAYNHHDZGEdJkkg+cZJjxyM5m5Xd2d25IlCpVAQF\n+jFi+DDcXM1MsTUHXTF/PzaCR2x/4viHY5BzfXVFHFm2kqKJDzLFy9ivqElYjO/YUrZkf0hIG+y0\nJEmknEzl0OFjZOeca7eP0JWxtrYmJDiQEcNC6dbNjEztOoRRvAjk5Z9nxcq1VFZVdXZXrkjGXDWS\na68Z09ndaBWdTsfW7X8RG5fQ2V25IlGpVNw65xb69vFutzZ1Jcf45LGFHOw7n/tvHIpVTjxRp2oY\ndMsCbhxgZ3ytVkvxphvwX/U8Kauux3w1DHv3HeDAoSPt1m9BPXZ2tsyfN4cePbqbdb0wih1MQUEh\nvyxfTWVVFVZWlowcMZygQH8cHPTrvbRaLWlpZ6iqrpbv8fRwx83N/JxtSZLIzjlHYWGRfKxbN0f6\nePdukwdVUlJqFA6xsrLE16dfm+aTampqOJ12Bo1Gn22gUCjo17eP/HnNQafTkX4mg4qK+hXOvXr2\noGfPHvL5rOwc/tl/iMyzWQBMnTKRkGDTlVguBTZv3SEbxH59+zBq5Ah6e3liYaEPzxUWFpGVnSNf\nb2NjjU+/vqhU5s9yVFVVk5Z+Bq1Wv55DqVTi068PdnZ2rdxZj1arJS39DFVV9TLp4d6L7t3dzG5D\nkiRycnIpKCyUj3VzdKRPn7bLZObZLHn+qrFMVlVVkZB4gsNHjlFRWYlKpWLe3Jn08e5t9jNaR0Nx\negxRyaU4+48gyNvBaHmLrng/Sx75DmlsEBnv/Yf4Jec5fJv531VkVAzb/9wFgHdvL64eM5reXh5y\nUkte/nlyc/Pk6+3t7OjXr49R0ktrlJdXkJ6RgaTTf48WFhb4+vZrU9i5traW02lnUDeY+und2wsX\nZ/O3xZYkicyzWZSUlMrHXF1c8PR0b5NcFBQWkW30rtjg69MXCwv9L1NeXkFsfAJHjh5Hra7Fzs6W\nu+bPw9WMSMKVYRS1FZRU2+Bk3yHbs/4rNmzcQmLSCWysrbn9tjm49+oJ6F/2yOhYYmLjqKxs6kH2\n8e5NaEgQQwYPlH/oxqjVauLiE4mMjiU//3yT8y7OzoSGBBEc5I+trW2zbUiSxMnU00RGx3D6dHqT\n89bW1gQF+BEWGtyioc7NzSMiKoaExCRqa43T7xQKBYMGDiAsNIh+ffuYFP7y8nKiYuKIjomjrKxp\nrTNPTw/CQoLwGzoYlUqFVqtlzbrfSUvPwN7ejicefbBNiuJikp2dw0+/rARg+LBQJl4/DoVCgU6n\n40RKKlHRMaSfyWxyn62tDcFBAYSFBOPcgvLJys4hMiqGxKQTskE0oFQqGTpkEGGhwXj3Nj2rVVpa\nRmR0DDGx8UYDEgPevb0ICw1uVSbjE5KIjIolL79pLTNnZyfCQoIICgrArgWZTD2VRmR0DKdOpTU5\nb21tRWCAP2GhwXSvk8mSklJ+XbGGktJSPNx7seCu2y9SSL2c3Xf58NyYaI7dU8brA2fgvC+Jp/ua\nJ4darZbPvviGyqoqfH36MWfWDCwsLNBoNCQlnyAyKtZooGTAwcGBkOAAQoODcHQ0XeYl/UwGkVGx\nnEg52SQxRqVS4e83hGGhwbi79zLZRmFhEZFRMcTGJTQ79ePj05ewkGAGDexv8juvqq4mNjaBqOgY\nCouKm5zv3t2NsJAgAgP8sDZhqHU6HcknThIVHcuZjKbvip2tLUFBAYSFBuHspH9Xzp3LZfmqddTU\n1BDg78dNN04x+TkNXOZGUUPmpvf5+ZQr3Uv3s/r4UN5b/iLD2xK3MIOamhreeustnnnmGZydnc2+\nr7S0jC++/g5Jkph2wySCgwIAOH06nfUbNlFbWytfa29vj0KhQK1WG43EfPr1YebN05sISnFJCavX\n/k5BQf1I3NbGBguVCp1Oa2RoHR0dmDdnZpPwgUajYdOW7SQl11cusbS0rHuWRHl5hXxcqVQyfdoU\n/P2GNPmcx45H8teuPfLfCoUCe3s7QEF1dbXsNQIMCwth4vXjmhivs2ezWbt+g5HHbG9vh0KhRFNb\na/Qyenl6MHvWDOzt7CgqLuarpT8AMHf2zQzo79ukf+3Jjz/+iL+/P+Hh4W267/c/NpOUnEKP7m7c\nf+9d8m/9+8YtRorfysoKKysrJEmioqL++1epVNwyYxoDB/Q3aleSJA4cOsK+fw7Kx5RKpewZVlVV\nGRnJ0aPCGTd2TBPldTrtDL9t2Iha3VQma2vV1NTUy2TfPt7MmnlTEy+jpLSU1Wt/5/z5AvlYSzJ5\n65xb6Nmjh1EbGo2GLdv+JCExWT5maanC2tqG5mRy2tRJBPr7AXAmI5PlK9cCcMftc9vZWzSBOoEX\nPSdifziThXbfEhaeyuc77mbgwEB6mVG15kTKSdb/vgmAxx65H6du3aisqmLd+j+M5pytra2xtLRE\nknRGAxYbGxtmz7ypyWfV6XTs+nsvxyKi5GMqlQobG/33WFlZZZTgdf111xI+PKyJXCQlp7Bx8zYj\nGdJHfRTU1NQY6bAhgwcyfdqUJpGl8+cLWLX2N0pLy+Rjdna2KJUWaLUao4iEq6sLt865BZdGeram\nRs3vf2zmdFq6fMzUu2JpqeKWGTfKusDgiSuVSh5/5IFWo1YdYBR1VObnonbywLmjSxnpMvj62jmc\n/3ofL/sX8XN4AL8vPsWGqea78+ZQWVmJvb09Tk5OPP300zzzzDM4ObX+jNi4BDZv3YGVlRVPP/Ew\nKpWK06fTWb3udyRJwtbGhrDQYEKCA3Fy0hdZ1ul0pKWd4XhUtKwsPT09mD9vtixsZWVl/PTLSsrK\nylEoFAT4DyUsNBhPj/oQRG5ePlHRscTExqPVarGxtuauO2+TR9Y6nY61v/0hP6OPd2+GDwth4ID+\nxiGIuHiOR0bLyuimG28gwH+o/BkPHz3O37v3AXovYPiwUAID/LC10efk1dbWknwihWMR0Zw7lwtA\nYIAfN06dLPc1KyubX1euRavVYmVlRWhIIKHBQXKoQ5IkzmRkEhkVQ/KJk4B+ZHnX/FuxsbHhp2Ur\nyM45x8jw4Vw/fmzbf+A2cO+99/Ljjz9yww03sHjxYrON4/sffY5arWbKpOsJCw1Go9GwcvV6Ofzr\n69OPYWHB9Pf1kQcMpaVlRMfGERkVIxuU2TNnMGhgvWFsOBfl5ubK8GGhBPgNkQdRarWaxKQTHI+I\nIq8umjBieBgTrx8nt5GWfoZVa35DkiRsbGwICwkiJCRQHm1LksTptHQio2I4mXoa0IdT5982V87u\nKy8v56dfVlJaWoZCocDfbyhhoUF4eXrIv3Ne/nlZJjUaDdbW1tx1xzx61IVldTod63/fKD/Du7cX\nw4eFMmhgA5msqCA2LoHjEdGUl+ujCTdOnUxQoD+SJPHdD8vIP1/A6FEjGH/tNeb9qP+KMv556ia+\n67uAkefX8O5vPZg9by7/fXUqvcxwFv/862+OR0bj3duLO+ffSnVNDb8uXy3/VoMHDSAsNIR+fb3l\n77GwqIjomDiiomOpqVFjYWHB7fNmy1EASZLYtmMn0TFxAPTq1ZMRw0IZOmSQrEOqq6uJS0jieEQU\nRXWe2/hrr2b0qHp5Tkw6wYaNWwD9AGl4WAjBQf44OOg9U61WS+qp0xyPiJY9Nx+fvsyddbP8exUW\nFvHzLyupqq7GwsKCoEB/wkKC6FUXMdOH2c8RGR1LXHwikiTh4GDPgjtvl5NjNBoNK1atkwcJvr79\nGB4Wgq9PP+N3JSaOiKhoqqqqUSgUzJk1gwH9famtreWjT79Co9EwY/rUZgf2DbkAo6ijMjeH2u5e\nODWKoFQnfMa8G//DH3nXsTx1K7d7KIEaUlZ/SkTok9w2qP3TpnUaDahUKMsP8njQ43huPszCQefZ\ntnQnXnffRVA7FJA1GEUDzs7OPPPMMzz99NMt7hhw9HgkO3ftobubKw/ev4CS0lK+/f5n1Opaund3\nY96cmS1mRUXHxLF1+1+A3pBMnzYFSZL4cdkKzp3LxdJSxZxZN9Ovbx+TbeScy2X1mt+orKrC2cmJ\nB++/G5VKxe69+zl0+CgA144dw1Wjwk2HPqqqWLN+A1lZOSiVSu6563Z69epJWnoGK1evA6B/fx9u\nuelGkynQkiTx1649HK8buU68fhwjhodRVVXF0m9/orKqCqdu3bjt1lktxv2TklPYsHELkiQxoL8P\nc2ffwvrfN3Ei5SShIUHcMHmCyXvbA4NRNGCOcZQkibff/QiA226djU+/Pmz/cyeRUbEATJp4HcPD\nQkzeX15ewaq1v5GXl49KpeL+e+/E1cWF5BMn+W2D3ssYOmQQ06dNMTn/qNVq2bZjpzynOX3aZAID\n/CktLeOb739GrVbj5urCbbfOblEmDQM9gAC/odw0/QYkSeLnX1eRnZ2DSqVizqwZ+PTra7KN3Nw8\nVq5ZT2VlFd26OfLwA/egUqnY+88BDhzUG/ixV1/FmKtGthiOW7t+A2fPZqNQKFhw1+14uPdi3W8b\nSTmZelFkoR4t5YWVWLk6YlFeTI2dM3ZmRvE3bt5OfEKi/F0a+q9QKJgxfSp+QwebvLeoqJiVq9dT\nXFKCrY0NDz6wAHs7OyKiYthRN0cZFhrMpAnjTU4r1NbWsmHjFnkgMm/OTHx9+5GXf54ffvoVnU6H\np4c7c2ffgp2d6XD3kaPH+XvPPwCMDB/G9eOvRavV8s33P1NUVIytjQ23zp2Jp4e7yc+TkXmWNet+\nR62upVfPHty74A4UCgVbt/8lG3jDoNIUZWXlrFr7G/n557G0VPHAvXfj7OzEl0u/p7i4pNX74QIW\n7+vOrWbG4Kt49WhF4xP89XMa497/i9iT65nnoW+6+O/neOzISG4wGER1Jju/+ZLvfvqWTz/6lYii\nf7cWR6lSodRks/HVD7H84A/+z88KVJ5cd20eix5fS047LPVpPG4oLi5m0aJF9OvXjzfeeIPS0tJm\n7zMIoq7u/qioWNTqWmxtbFo1iAAhwYFcO1afVRmfkER5eTkZmWdlj+vmm6a1aBBBP6KfM2uGvt8l\nJZxISUWtriUiMhrQz2+NGW1a+QDY2toyd/YtOHXrhk6nk0MyR44dB/RJMDNnmDaIoA+pTrx+HEMG\nD6q7NwKdTkdsfCKVVVVygkRrE+FDhwxiyqTrAUg9lcb5gkK5750xE7Bt2zZGjhzJ1KlTOXbsWLPX\nKBSK+j7qdFRWVhETqzdOY64a2aJBBH24at6cmdjb2aHRaIiMigHgyFH99+/d24ubbryhxYQcCwsL\npk6ZiI+P3lgdPhqBJElEx8ShVquxsbZm3q2zWpXJoEB/2QNLSEqmtLSMs1nZctLDjOlTWzSIoPdc\n5s6+BYVCQWlpGUknUqitreV4RDSgV+RXjxnVskza2DB31s04OzshSRLHjutlUqnsjKU5Fji4OmIF\nWDiYbxABdDp9WNJCZUFhUREpJ1MBmDRhfIsGEcDFxZl5t87E0tKSqupq4uIS9AbqiF4uBg0cwOSJ\n17U4z25packtM27Eo25O0fBOH4+IQqfT4ejowNw5pg0i6OV71MgRhI8IAyAySu/Bppw8JXuhs2fN\naNEggj5adctNNwL6SJch2c4wkLvm6tGtGjTDVJGdrS21tRoio/XvitLw/rV4t542G8Wq9EjOac6y\nYVUsDc2iLv8YR/rew8OzriXQ017fcOUxXnssjYdfGoszAJUcW3wrn1lP554FD3D/uBSeXPAD6SbK\nIpmFJoednywlZ963vDPyFJsT9L2yDniYpy1e5pk/i/9F4y1TVFTEq6++Sr9+/XjzzTcpKyszOm9R\nJ4xarRatVktMbDygf+nNXTcTPjwMGxsbJEkiJjZBVoieHu5N5pdM4eXlKV8bFR1LYlIyarUapVLJ\n1VeNMqsNWxsbRo4cDujDKrm5eXJizpirRpmVoapQKBh7zVWAPtxx+nQ6UXWfJzDAz+yM2+CgAPn7\ni46ONeuejmbbtm2Eh4czbdq0Zo2jSqUPq2i0GmLjE+pCxZaMHjnCrPYdHOwZPiwUgNi4RLKysuUE\njGuuHm0y8aUhSqWSa6/Wf//5+efJzMwiqm4EHhoahJOZ+ySOGB6Kra1BJuNlmXTv1ZPBgwaY1Yan\nhzuDBuqvjYqKJSk5hZqaGhQKBVePMU8mbWxsGBWul8mk5BNUXYZLnrRa/ahdqVQSFa3/LRwdHQgN\nCTLrflcXF4IC/QGIjI4lLf0MxSX6suTXXnOVWclGKpWKMXV6IC09g3O5uSQkJgEwMny4yYSoxowZ\nPRILCwtqa2tJSEwiqs4g9e/v02KCV0P69/eht5en/vNExRIbF49Op8Pa2kr+rVvD0dGBYXUDTUOo\nvi20uaJNeY6OW54dwyc/ryL2rdGMrosqFkUfx3XU9Ub7u5XsXcIKv6d4w5CdXHKA976vYVqkFxaA\n3eApBEffxy8pd/GK3wVMQOoK2fn4KCb+VE4Pxy/4P7Un/z14nJsBcGDUIxO59dE/yJ1yN83lVn32\n2WdkZmai1WrR6XTN/l+r1VLTymL7oqIiXnnlFT766CP+85//8MQTT+Do6CgrKq1Wx+m0dCoq9RPk\nISHmLx2wtLQkKMCPo8cjiYmNp7TO8Jr70hgICwniZOopMjLPoq6bHB8yeGCLI8DGBPgP5e/d+9Bo\nNOytS+ywt7dj4ADzk1u6u7nSx7s3GZlnOXI8Qs5ECws1//MolUpCg4PY+88BYuMS6FvnLRcXF/HF\nF18Y/YaG/xr+be655v69f//+Fvu2detWtm7dytSpU1m8eDEjRuiNnspCRW2tBo1GS1zdyDfAfyhW\nVubLfXCQP/v2H6S6upr9dfOILi7ObVqX5+HhTs+ePcjLy+fg4aNygkJblrPoF8sHcOTocWJi4ymv\na6OtMhkaEsSJlJOczcqWkz4GDxqIQxu2p/L3G8qu3XuprdUYJYxdLsieolIpy0VIUGCbsqhDQ4KI\niIymuLiEw3VeYm8vT7PX5QEM6O+Dg4MD5eXl7PvnELW1GiwsLAgM8DO7DVtbW4YOGUR8QhJRMXHy\nMpKwC5CLs1nZpJxMlTOYA/392rQ0LCQ4gH8OHKKqqppTp5tmMLdEG41iOaeSHBk3/yEOfrqQVbFv\nMXq0PVBC4jFrRj7VcC1UJXGrdtB92jcYpvU05yKILnNmnm3dD65ywcsqg70pFdCSUaw5w5ZP3+Xb\n3w9wqlSDBCi6jeGdP5Yy9eszSF83f5v1gEkExH/CoeK7udm56flly5Zx/Pjxtn0FLVBYWMhLL73E\nhx9+yH/+8x8mTJgE6Bf1GjKsHBzszR6RG/D09AD0c3sG5WE4Zn4b9aELw4i6rW3YWFvj5uZKbm6e\nnCXq7t7LLC/FuC8eZGSepaoueUSpVNKrZ882twEYZaXm5+fz9JOPt6mdjqKxcbRQGQZIWvm78/Ro\n2/fv4OCAk1M3iotL6n9Dj7at71IoFHh5epCXly+3YWdn2yTbrzW86uSpqvrCZdLLSCbrvhPPlkNs\njbG2tqJ79+7k5JwzymIE2Lr9L0rLynFzdZETi45HRJFaF+GYfct0VCoVGZlnOXhY791fe/VoPDzc\nqaqu5o9N2wD94DGkLnN8246dlJSWGbcZGU1qXcLarFumY6lSkZGZxcG6OfuxV4/G08Od6uoaNmza\nqm9z0ABCggPrPUULZb1ctPE76NmjO5aW+kHXhX6PSqUST49epJwsl+XC1dVFTpgzFy9PD+ITkowy\njdsq5151ciRJEtXy52lbG46OjnTr5khpaVkTuWiNtoVPa84QWxmKX//ruTe8hN+XR1MOUJlGZE0I\ng4wigqXE7akldFB9poumvIBKLLEwvMNKS6wUlRSU1mKSighev+46PioaxWOvP0lwdgI9n/6JNSve\n5Poepm8DwM6HQO1Ros43P7HYVmVuLsXFxSQnJ1NcUgyAVlefznwha6eau6et7bRHG83do6Ad2rig\nfjQ9dimuUczIyODMmTOyrDUM5VzIMrr2+e7av40LaaejZNJAYWER+fn5FBcXy8fKyivIz88nPz9f\nnl+qrq6Rj9XULUmRdJJ8rGG6f0Fdm4VF9YUyyhu0Sd28dk1NgzbrBm46nU4+VlaXzS17ig100aXy\nm7bHu30hfelouWiNNnmKmpw48sKuoYeqJ5Mfm8BTD33JP2+MYWJhNNlDxmM0I6SrIr9AQU/n+kco\nLK1RoaM+H0KLVrLA0tJU52uIf/c2vghYxum3xmBPJQ6znmJ2nguD+vVofdNUK1e87MpJLqyFZjZr\n8fb2pqCgAAsLCywsLFAqlSiVSqO/LSws0Ol0HDnSegkmCwsL7rjjDl5++WUGDBjAydRTHIuIkUeD\noE+RlySpTT9Yc+FbdRvrpzZXgFxd0/ai5I2fq76AwuaN29BqtWg0mjZVbmnuuRZKC6ysrIx+R8O/\nTf2tUCia/f1N/fvkyZPk5eU10yNjAgICePXVV5k9ezYKhYKl3/4of1YDNW38/iVJavKbtbUN/T2N\nf8NadDpdmwYVzT23rfLUHm3o72n+Xbjj9rlNjo2/9mrGX3u10bFBA/sbLXMBvff8xKMPNm3ztjlN\njo0bO4ZxY43LDA4c4MvAAcb3N9dmwzlFA239TbVabZOCGRcmF41kS932Gs3NPbdGXdOmaZrm9F1r\nU1iNae5dMZc2GEUtWYeTGDhqNhYocbvuSWaqZvDJlkyCHU/gHTTP2EgpVdhaajhbXZ/vY+nSGyfN\nn5Qbfj9NJYVqR/p0NxE6rUnhl8+ymbE1FHsAXRHxh8vxnexi3i7iOjUVGkvsbZo3QGvXrjWnFcrK\nylpceqFSqbjjjjt46aWXGDCgPtHAQlkfMjNkXtXUqDmdlk5/Xx+zng36JALQhxAKCgspLS0jMflE\nm0IKSUn6NqysrOjt5UVJSSmJySe4arTppRiNOZebJ88B9vXuTVZWNplnsygrK8PR0bzEIa1WS3KK\nfq1hH+/e5J8vkItkN1z/2BqJdZ/Hw72X3P9+Pj5tfnnaSuMlGY1pbAwNWNQZfE2dLKScPEVScoqc\nPGMOGZln5Xnpvn28yc4+x+m0dKqqq80Oc6nVtXL6fR/v3uScy60r35XepsIHiQaZ9PCguKSEkpJS\nkpJP0Lu3Z5vbsLS0pLeXJ0XFxSQln2g187Qhefn5nK8rYOHp6U6uGQOWSwltXejZQmmBp4c7Wdk5\nJCWnMHTIILPbOJGSKmde9/H2Ijcvj5SUVCZPvM7saFh5eYW81rBvX2+ysnMoKSklJ+ccHq1kjRqQ\nJInEJH3RBW8vT06lpVNTU0NS0gmuGj3S7M9jkAsHBwd69ezBqdNpJCWntJp52pD0MxkXHI42f2io\nTmNP/CBG9a6zo44jefJRH/a9+jorD9oR2q/xS+mAr78VZ3Lq47nKnmO4eVApSTl1iqviNHHacGYG\nOUJ1Gn/9foBzRgMeHRrLXvh76ecqa5K+5xP1f/hwqpl1QatzSK/tg585pSVawFSqv0qlYsGCBSQn\nJ/Pjjz8aGUTQzxMY7nd1dZEzsCIiY8x+dkFhEWnpGQAMCwuWEyJi4xKNqkm0hE6nI7IuSzPQfyjD\nh4UA+gzEtuzUYMgydHVxZsxVI+UMREMGozmknDwlV+QIDx8mj9AjoqLNXlJRXlEhL+Jvy4vSkQQE\nBLBmzRpiY2OZM2dOE6WusqgfIIWF6PuceTar2VJopjDIjX53kHAsLfWl7tpSYDwhMUnOPB41aoSc\npNMWmSwqLpYzj4eFBcsJNrHxiUYVcVpCkiQ58zjAfyjDh+sHB+cLCtu0HZFBJl2cnVtdnnQpotMa\nwqdKOdnsRMrJZsscmsKwvGrgAF9G1mVoVlZVye+IOUTHxumLN1hbc9WocLnIR0SU+XKRnXOO3Dy9\nPA8bFkJQXZJOZHSs2duj1dbWyln6IcEBDAvTvytnMjKNKiW1hkGevbw8mlRNao3WjWLZXh4LC2Xi\n7Gc4PnoifWXf0hr/Rz/k7trveOtkIEOaOApODL+pJ8cP5yDbOWt/nvzkJmI+/p4DqfH8/uEvOL34\nMbf0UqI+tZKnZ97I4w2XUFgP5dElI9nxzpesWfYWT7xexMtblzDC3NrGRcfY43ALV7XjbjKgN4b3\n3HMPJ06c4Mcff6R//+aXRjQcpel0OlmBp546bZYiq62tZdNm/WS/g4MDA/r7EhIUgEKhL5+2bfvO\nVg2JJEns3vMPxcX6NO3Q0CA86zIQAbZu+9OsVPbUU6flBbShocH6rNhAffLBocPHyDLDuJaUlvLn\nzt2AviqFs5MToXUGIisrhyPHIlptQ6vVsmnzdnQ6HVZWVgwd0vJaro6mNWNooOGcoo9PX7mC0abN\n280KdSUkJpN8Qp9dGRYShLW1Nf5+es/6n/0HZWXUEgWFRezeq8+eHTJ4IPZ2drJMnjqdJv++LVFb\nW8vGOpnUZx73JyjQH6VSSU1NDVu3/2nW4Gb33n/kqENoSBAe7r3k+ptbt//VbD3gxpw6nSYvYwgN\nCbxstg9riMFYKJUWDBk8CBtrayRJYtMW49Jqpjh6PFKuihQWol/qZfD4d+7aIy/PaInsnHMcPKRP\nCgoK9NdXlaqTi9i4BHntZEtUVVezpa6oQ4/ubvT28pQHS6WlZez8e69Zumr7n7vkxJiQoEB8ffrJ\ny682btlu1qArLiFR7rNhANoWWjeKVt6Mv3EY/UY9yUuTexrf4DyWd3cdYd9302laE16J963/xWvV\nSlLU9cfcrn+XDR9cD6fy8X1sFSseG4o1YOW/kNiMnxhj1/CLs6L//SvY9OaNDJ/8LF+v/oh5/c2t\niqMj449fsHrsHgb/y3Jzhh9TpVJx7733cuLECX744Qd8fVsONzVMIa5RqxkyeCD9+upH5pu37uDQ\n4WMm19AUFhWxfNU6eW81Q1UKBwcHrhkzGoD4xCQ2bNwih9QaU11Tw/Y/d8nGJiw0mJ49eqBQKOT2\nCgqL+GX5apNKVafTERUdy7rfNgL6hfqhdd7qqJHDcXLqhlarZcXq9SQlp5gU/MyzWSz7dRUVFRVY\nWVly3Tj9AnCffn0YPGggAH/v3seefftNesClpWWsXvs7aelnAJhw3bVYWVnK36HFRUy0MdcYGrCs\nK2ygVteiUCiYPPE6QL9IefnKNUY1bBui1Wo5ejxSNkS9e3vKZaquvmoU9vb2qNW1LF+5ltRTp5v9\n/iVJIi39DL8sX0V1XajVsF508KAB+PTTe1hbt//FwcNHTcpkUXExK1avIytLvz5y0gR9eM7B3p6x\nV+tlMjHpBL9t2NxsQXHQzw3t+HOXvHQgJDhQLpJvqIlbVFTML8tXGe0M0RCdTkdMbDzrftuIJEn0\n7NFdHlwZlhtdTFn4NxjkRqvTYmlpyYS6jNb0M5msWvOb0W4SDamtrWXfPwfZWVdzeOCA/vj69gNg\n/LhrsLa2oqKykmW/riYj86xJuUg+cZIVq9ah0Wjo1s2RUXXrZkOCAuRBym8bNhMZFWPSSOfln+fX\n5avlQhoTJ4xHoVDQvbsbI+qmB45HRLF1+19UVzefCVpZWcUfm7YSF58I6GW7WzdHlEolkybo35Vz\n53L170qD3YAaotFoOHz0OJs2bwf00wOGAgi1bdARHVwQvJx9j45j2S07+W6icyvXVpPwy7fkTXmM\n8T3aQaArj/Pi9Z9wzdZfmPovPcWysjKeeeYZXnrpJXx8zJ8LrKio5JPP9etFbpp+AwF+Q6muqWHl\nqnXk1FWlsbW1ISjQX64RWVOjJiEpmbS0M3I7kydeJy9GBb0w//nX33JoQ6lUMmTwQAYO6I9KZYFW\nqyMt/QwJicmyghs6ZBAzpk81mtBPTDrBH5u2yi+Md28vAvyHYmdniyRBbl4eMbEJco1JV1cX5s+b\nY1SVv6CgkOWr1snXODs5ERwcgJurKwqFvuxSbFwC5+oUnKWlitkzb5YVMehf8DXrNshzGtbWVgT4\n+9HHuzdKpQK1upbkEyc5mXpKvmfsNVdx9VWjjHYZGHft1Vw1yrxapBfK+++/T9++fZvMGbaGoVRV\n9+5uPFBXEDwmNp4t2/6Ur+nXtw/+fkOwsbGu2w4sl9i4eNlr6tmjB7ffNttoMXVubh4rVq+TR9du\nri4EBwXi4qKvW1pSUkpMXIK8i4q1tTXz5tyCl1f93F9NTQ0rV6+XB2C2NnUy6VUvk4nJJ4x2UZk4\nYbys8KBpGT9TMpmYlCwnhQweNIBbZtxoJJPJJ1LYsHGr7EH17u1JoL+fLJN5efnExMXL4UUXF2fm\nz5tDt26OVFVX8/mX31JbW8v1469lZPgws3+fzmLt+g2cTD1Nv77e3D5Pn8Rz8PBR9uytXw87oL8v\nQ4cMwsrKEp1Ov/VSXHyiPH/ex7s3c2ffYlRR6kxGJmvWbZAHmL169iAoKIBujg5Ikn7QHRMTT1Fd\nZq69vT23z5st16EF/Tzj8lVr5QGbg4M9QYH+uPfqhUKhX0YTn5Akh7sVCgXTb5xCgF99boBOp2PT\nlu1ygXeVSoXf0MH4+vTDwkKJRqOvn5qUnCL/5iHBgdwweYLR+xUVHcu2HTvlv3369cFvaP27kpV9\njri4BHnP2l69enL7vNnY2tiQm5fP9z/+AsBtt85qteJSx++SUX6UNxYsJ+zLj5jWswVjp87keIo9\noQGu5iXRtEgZB197hK3Xfsab49o5dtpG1v32ByknT+Hq6sLdd8zD1tYWtbqW7Tt2El9XNcIU9vb2\nTJ44Xi6N1hB9vcEI9h881GJIQaVSMWJ4WLM7I4B+QnrL1j8pMVGqzsCA/r5Mnza52S2oSkvL2Lh5\nW6tzQW5urtx04w1ySamGaLVa/ty5m5jY+BbnH2xtbbhu3Fh5x5EjxyLY9fdeAB5/5IE27bB9McnJ\nOcePy1YAxns/nkw9xbYdu+RBhSmGDhnE1CmTsLZuGvYoLCpi46Ztre7Y3rNnD2ZMn2qk+AzU1tay\nbccuEhKTWgxz2dvbMfH65kuQSZLE0eOR/LP/UItZyRYWFowYHsq4sVc3m/F6JiOTLVv/bDX019/X\nh+nTpmBnZ4tOp2PLtj+Ji0/E0tKSxx+53+R2aZcSJ1NPs3b9BgDmzJohV56Ki09g19/7WtyYXKFQ\nEBwUwKQJ45vN3M7NzeOPzdtanYvz7u3FTdNvaHb9dFV1NZu37DAakDZHt26OTJ0yCV+fpgZHkiT2\n/XOQI8ciWqwuY2VlyVWjRzJ65IhmddWJlFR2/LXLaKeU5vAbOpipUyZiZWVFbW0tq9f+TkbmWVxd\nXXjo/gWtDmYvztZRNXmklbvi49bmAjoXhqaYjAIr+vQyf2PVjiLzbBa/LF8N6Efx48ddQ39fHyws\nLORdEGLjEozCCl6e+nj8wAG+rWaPGXZBiIyOpbDhhq7duhEaHEhAgx0rTFG/C4JhnzLDhq5W+PsN\nJTQkEFeX1gcXefn5cskujbZ+k+H+vj7yXn6tCaRhF4SYmHgqKuuFv1fPnoSFBjN40ABUKhXl5RUc\nj4iSF0gPHjSAWbfc1GofO5PlK9fK3vCokcMZFhYi15M17B94tm5+CAx7Wfob7aLSEjnncomKjuXE\niZPy2lilQsnAgf2b7KJiirKyMqJj44mNTaCq2ngBdlhokNEuKqZQq2vr9gKMabLJcEjdnnnmyGRa\n+hkio2JJP5OBQSYtLa0I8B8i76IiSZJ+8f2ho3JYffSo8CbLLi5VJEni2x+Wcf58gb7s4phRhAQH\n4mBvj0ajqdtrM5ZzubnyPXZ2doQEBRAUGNDqNkiSJHE2K5vIqBhST6UhSXXZrhYWDB0ymNCQIHr1\nbD0RpaiomKiYOOITklA3WKrRt483YSHB+Pj0bXVJT3V1tVztpqTBgMfFxYWwkCD8/Ya0WuFJp9PJ\n+782zGOwsbGR35Vu3RzR6fRVxPbtPyTXip4+bYpZFXou8/0ULw8SEpPZuHmbPAK3sLDAxsbmghZv\nC/QvRsNEDC8vD+bNmWlyc9JLhaqqKlav22C0Y7itrU2HFZG40pEkUKtrjNbojRgWyvXXXXtJFnIw\nRVlZGStXr5eXloB+w1xD9rqgbUiSfkqgoVc64bprCR9hXjhdGMWLRPqZDA4eOlo36hW0B3pPyo+x\n11x1yRtEA2q1mn8OHCIuLrHF0Jigbbi792LEsBACA/w7uysXRGVVFfv+OUhCYtIFLbwXNE8f796M\nDB/epvrMwiheZIpLSsjNzTN7PVdncOJEMt7e3tjZmV+Y+WKiUCqwtbGhj3fvNhUJvpTQarXyYnxJ\nd+m9gvn5+fz99y59QXStjunTp9PNjI21LzaWliq6u7nRvZl50suR2tpafV3g6upLUi5A74Xl5p6j\nT5+WE1Y6E0tLS3r16tHmmr4gjKKgGW644QbGjRvH888/39ldEXQS+/fv55pr6neuT0pKYsiQlncs\nF3QN3nnnHfbs2cO2bds6uysdgghaC4w4fPgw27dv5/333zcqhCzoWjSekzNnIbngyqe8vJz333+f\n7du3c/To0c7uTocgjKLAiMWLFwNw/vx5vvjii87tjKDTaJz8I4yiAODzzz/n/Hn9mleDrrjSEEZR\nIHP48GF27Ngh//3BBx9QaaJajuDKprFRNLd2peDKxeAlGti2bRvHjh3rxB51DMIoCmQWLVpk9Hde\nXh5fffVVJ/VG0JmI8KmgMZ999hkFBcaFAF577bVO6k3HIYyiAIBDhw7x559/Njn+3nvvCW+xCyLC\np4KGlJWV8cEHHzQ5vmXLFo4fP94JPeo4hFEUAE29RAO5ubl8/fXXF7k3gs5GGEVBQ5rzEg1cad6i\nMIoCDh48yF9//WXy/LvvvmvW9lKCKwdhFAUGysrK+PDDD02e37x5MxERrW/7drkgjKKg1Swy4S12\nPYRRFBhoyUs0cCV5i8IodnEOHDjQopdoQHiLXQthFAVgei6xMZs2bSIyMvIi9KjjEUax09BQcCob\n0yksOkoysinv4Ex4c9canTt3jqVLl3ZsZwSXDMIoXgJoCjiV3VKSW8friE8//dRo952WuFK8RWEU\nOwU1qT8/zaK9ZUZ7R1ZnHONIlqEYsBKyl/N/r+0gr4OE/sCBA+zcubP1C+t45513hLfYRRBGsZNR\np/Lz04vYW2akIcg4dgRZRXSwjigtLW1xLrExGzduJCoqqv07cpERRrETUMf/j/lrR7JwwWCsAW3e\nPpa+9TIPXDea+7cUYJBvp1FP8ajFYu5bmUNH2MW3334bKysrrKyssLS0bFJcW6FQYGFhgVKpRKlU\nkpuby7ffftsBPRFcagij2Jmoif/ffNaOXMiCwdaAlrx9S3nr5Qe4bvT9bCmo1wYdqSO+/vprysvL\njfRD4/04G+oHhULBG2+80c696AQkwUWmSFo3rps0d1d5o+Ol0q7be0ohS7MlbcPDucukkR4PS4cq\nLk7v/Pz8JPQ7ukrPPPPMxXmo4JLj3LlzshwA0h9//NHZXeo6FK2TxnWbKzVVEbuk23uGSEuztcbH\nL6KOePTRR2WZCA0N7fgHdgLCU7wIaGtqkMfZJftZeiSc+cPM3Jap5wTutF/D+tTm9ljTURzxI5/v\nOkez4/iSCL7/chfnxCBf0EaEp3gx0VJTU//9luxfypHw+ZirIv6NjiiJ+J4vTemPVpCu0A2WhFHs\nSNRpLH9sPi98sJDZM5YQVQkUx3CgOoj+Zm9V6MgAv3L2JJQ1OVOd+Cl3LMzn2jHuNLt3u1MIk1yX\ncdcrBym58E8h6IIIo3hxUKct57H5L/DBwtnMWBKFXkUcoDqoP+bvZnrhOsIpZBKuy+7ilYPmaYjG\n4dMrEWEUO5DCnS/y/KEQbvZOJiqnErUEmqpSqpTWqNrwzVsq1ZRUNlJK6hN88dBqrv30aQJtTN1p\ngffcd1gQ/TCLD4tSbQLzEUbxYlDIzhef51DIzXgnR5FTqUZCQ1VpFUprVZuU8wXrCAtv5r6zgOiH\nF9NWFSE8RUEbKSfhjwNYjLqKkDu3kH50CSPtQeXSj97aTM5Xm9tOLflZKnw9rI2OFv/9Eh9YP8Vd\nA61avl3pzowXw1nz9AqyxEYHAjMRRvEiUJ7AHwcsGHVVCHduSefokpHYo8KlX2+0mecxW0X8Sx2h\ndJ/Bi+FreHpFVqvJOg09RWEUBW1Dk8uxQ8X0HTMA24bH3cZxh/MuDubXi5+uJJplb73OlwfyOLXy\nDV7/YgeZmrqTNansTQ/m1jCnBo0U8vcHfzHo4Un0MuMXtA+5jTHx77EuU1hFgXkIo9jxaHKPcai4\nL2MGGGkI3MbdgfOug9SrCB0l0ct46/UvOZB3ipVvvM4XOzIxqIh/ryPsCbltDPHvrUOoCGEUO4Sy\niG94+enn+DROS+WOt3nx3S31Rk41kPsWebLs1xRZqJVOIdy18D3WpUuU7v6SxY9NxlulP1cZ9RM7\nw1/iZvcGD6iI47dDztwQ2vAlaAHHQG4acpJVh4vb5wMKrniEUexIyoj45mWefu5T4rSV7Hj7Rd7d\nUm/kVAPvY5HnMn5NkTUETiF3sfC9daRLpez+cjGPTfamTkW0i45wDLyJISdX0ZqKEJ6i4IJwHPYg\nrzwUjLXzOF7++GPe+b9pspEDFf0fXMqsPYtYfkbTUjNQk8jSJed4/rPpuDQ8XpLEkYoBDHZtNr2m\nGRwYEGZLckQWNW3+NIKuiNhkuCNxZNiDr/BQsDXO417m44/f4f+m1Rs5VP15cOks9ixaTmsqot10\nhMMAwmyTichqWUOIRBvBBVOevJ9cz2sY7NDMSbthLF77DJUbD1NssgUNOft24fzGD9znozI6oy7K\noMiqJ86WjW6pjuO9GwKZ8GZUo/JxKpw87Cg9nUdzSdsCQWPEJsMdTTnJ+3PxvGYwzauIxax9ppKN\nLbpubdcR1XHvcUPgBN6MapRVo3LCw66U03nma4gr1VNUtX6JoO1UcurQaawDn8fT1By38ygeeaKl\nNlR4THyCe5o5I+l0SAoVysaDNm0pZ5LiSfIsabLuSJK06HRXphALOgYLCwvZGAqj2M5UnuLQaWsC\nn/fEtIp4hBZVxAXoCG3pGZLik/AsaaIhkLQ6WlMRXSF8KoxiR6ArJuFQHr3vHNTsKPDfYunkiWNN\nFKW1jU7Yj+HzdInPm9yhoTSnGgcvFxo7lwKBKYRR7Dh0xQkcyuvNnYM6QkOY1hH2Yz4nXWqqIdCU\nklPtgJeL0BAifNoRVJ7kQKYL4UHdm19U/y9ROg0mxOokqSXmzvNUk5FQhe9wb0wuaRQIGtFwXlEY\nxfal8uQBMl3CCereERriAnREdQYJVb4M925ZQ3QFT1EYxQ5Amx9DRMVgxg82vyZFm3AK5pbg8/wV\nV2re9ZUp7EryZs5ot47pj+CKRBjFjkJLfkwEFYPH01Eqoq06ojJlF0nec2hNRQijKLggypL2kT/4\nFsJdO+oJ7kx5Jpz49TGUm3F1deIqtrg/wbz+HTMqFVyZCKPYUZSRtC+fwbeE02Eqok06oprEVVtw\nf2IeQkUIo9h+6PLZ8dxkZrxzhPi96QQ8OB3vDhSwXjPe4ta4j9l8rrXwSDF/v7eNsR/czwAh8II2\nIIxi+6LL38Fzk2fwzpF49qYH8OB07w6ZXjFgto4o/pv3to3lg/sHtNof4SkKzEdXRlpcPnbSbpbl\nLeCD2/t0qMBjF87CD3356tnVZJnUVzoKd7zAEpu3+XCyi6mLBIJmabgs40pVgBcTXVkacfl2SLuX\nkbfgA27v08GjVHN0hK6QHS8swebtD2mrirhSZUJkn7YXKl8e3rKTCfF5OD07hB6tlCRtD5zHvs2y\n8++wfHcu/5nQq6kRLjnGz/tG8c1XM3EXXqLgX3ClKsCLicr3YbbsnEB8nhPPDulhcilGe9Kajig5\n9jP7Rn3DVzNN7LTTiK6weF8YxfZE5cqAkI6bJWiKFX1nvsL/mTrtNJJnloy8iP0RXEl0hVDZxUbl\nOoCLqiJa0RFOI5+hLSqiK8iECJ8KBIJm6QpegUDQGGEUBQJBq1ypXoGgbQhPUSAQdFm6ggIUXDhX\nqkwIoygQCJpFGEVBY7pCSF0YRYFA0CxdQQEK2kZXGCgJoygQCFrlSlWAggvnSpUJYRQFAkGzdAWv\nQNA2ukL0QBhFgUDQLF1BAQounCt1oCSMokAgaJUrVQEK2kZXiB4IoygQCJqlKyhAQdvoCtEDYRQF\nJhGKsO1oCk6RXWn6vK4kg+xyczeH7lyEURQ0pqFM6HSXhxy3FWEUBUaInREuHHXqzzy9aC9ldZWV\ndWWn+GfTH+xOyENdd42SbJb/32vsyLv0FUpX8AoEbaOhfhBGUdAl6ApC3yGo4/nf/LWMXLiAwdag\nSf+F++78nsIBwTgeXsjM/2wnTwc4jeKpRy1YfN9Kci6jr1cMkARgvMfmlaofhFEUGCGM4oVRvOkJ\nPuj3LDM9lUAFxz9YRPzUR5g+tB/D73iS0K3/5dsTen/RKuBhHs3/L68fbSHOegkgwqeCxnSFjaeF\nURQYIYzihVDC/qVHCJ8/DHsAXQFRe/Jx7eWgf8Gse9LPKpWdieV11/dkwp32rFmfKodVG6Irjvj/\n9s48PqrqbPzfmclkDwlhzb6SkJ2w7/uigiKbgIpotdWqrdW+v7cVrbu2xbq29a2tWkVBFtlBUUB2\nCEkge0JCNggkISFkXyaZmfv7Y2ZuZkhCJpgQkPP9fPgwmbn33DN3nnue8zznOc/Df/+xn9JuGHOq\nT33KR/tLuZ6mhPtUcDXm44NQioLbgttB6LufKlKONREd5GT4U9/AlSawUZqUihIbmqis18pnuASH\nU3cwg9qrm2rK5MMHV1E+ZUK3FIZ2HTYb9zUP8afj1T+pHWEpCkBYioLbEGEpXgfaRmoaldjZGO+d\n0pF+9hLNWqMi0WvRSA64O5vV9FYraa5uuMqCayb7n4+zYcqH/C7Kvnv6pvLhvr8+TPITrxDXRW+t\ncJ8KrkasKQpuO4RSvA5s+uLvraPocpPhb+VAhs8cxOWiaoPSay4mRxPGXZF95FNayi9iE+iBnXk7\nVT/ywjt2PPPQEGy7sXvKwfN5fvRGfrfuIl35RYVSFFyNsBQFtx23w0yw++nH1Afd2H+83Kh07Bnx\n3F8Z88Of+ff3x9j5jw84c+/b/CLYZClqyD1USMzS4biatXLlx3fYG/IEswd192PpxLDlE0h/+xuK\nuvCTijVFwdXcDkrRpvNDBLcTwlLsGE3BVv78Xhy2Qd44qlxRFV8k6vd/YGo/G4Y8+jKe935FzrMv\nMNQGVD6L+deWKWSdPkPtvR+yLbhv68PWkMTn+0bzwpuDzVqvJ23LCdzu/JeFouwuXKLuYejZfxJX\n9Qx+7l0/X1iKArg9lKKwFAUWiECb9mnO+w/3zfkAx8deZtUzv2GF92aef/8wV4yPkE3Qr/h40UFe\nXnsOOZzGdgBhYycx2lwhoiHz4zcp/cPfubuv+RWqyTpZT3CoO90QX9MW52CGO5zh1EWN1acI96ng\nam6HSbNQigILbgeh7zK6Ijb89n84OvJ5fhXtCOhoKL+EMvwehruZDnJkxCubeLZhB3FVHTelLTnM\nfrfX+ezRAEs3TXMl5yttGeimtjyhKY2374xi5htJdB4n00Ta23cSNfMNkq4+2MYVD8ca8sva2wTS\nPsJ9Kria28FSFO5TgQVCKbZFd34H73+rYPaeMbgBUM3pndkMvms8HhZmnRtjf/2ba7Zl4zGL3zzS\nzgeSHr2kMNvGYbp4Deey0snyrLZir6GOmnNZpGd5Ut3mYAlJp0d/nQafsBQFYKkUJUlCkqSf3eRJ\nKEWBBUIptqWpNItSmxCeijJGj9Zl8u1pRyY+F2QZPfpTULvi6aIhqabF8n2nCfyjUOIfVjXixIR/\nFCK1d7C2hpImZ7z6qtv5sH2E+1RwNeZKEQzWoo3Nz0uN/Ly+jeAnI9YU22LvFUNg30yc1EpAS9H2\nd9hYGcl/Ipy77yJKV0KH2fJ+bjV63K65rtFcsJFVLx4h5rW/sSLISrXcdJ6MxkAe92l//2NaWhpa\nrdZCEWo0reuPZWVlpKamWnzet29fvL29rbu+4GeBUIqC247e25Khpa5Kg4ObEypAp9WhsumRkJMu\no/K9n3++HMdf336P2lA1RT/EUz90FSOuI4qzY1yJWRDD5Y1p1PzOz+imbZ/mCwfZuu4TfhjyGEte\nicGabf4NOfvJ8lnCuH7tf/7OO+/wxRdfdHj+mjVrWLNmjcV769atY/ny5VZc/XrQodWpuElEQGCk\nPaX4c0ME2ggs6BWlqMnnq0di8ennjJ17MCMnT2L6E5sp0nZ+6o3BieinPmXt6md57NHlxNTW43fX\nxKvWE386g+94ltHpm0mpu/ZxzpM+Iq/mIL/wtvbxbSJz/W4G/2YZQR30+cUXX2wz4F2LsLAwli5d\navXxXUFfFc/qmX1Rhz/O5vPWBwYJeh5zTxIIpSi4DegV96nCiWH/e5Dy5loKfvyYF36/mq//eR8+\nN4MfoymLz577DR+lGbLV6Ir38N9TPty/ONQqC61LDJrPW0vTeH9XaaeZZ6rTE1FE+lvXh6ofefu7\nybzzWHCH2z2Cg4N54IEHrO7qSy+91GaA7Baas3hv4WOcmLmR4y/Z8f69T7C74hZZ29YWseX1v3Gs\nyvi3roHqhtZnqCn1X7zy5RmaeqVz3cPtYCkiCQRmLFiwQAIkQLrzzjt7uzu9jq74K2mWe5D06FdH\npMNbP5SevvsO6Yk12VJTT12w8pD03OTl0roL2o6PqU+TPv9gp3S+xYr2dBXSnsfHSw9tLpGu0aIk\nSZKUk5MjqVQq+ffv6F94eLik0+m68q26hE5r1rZOJ/XclbqTJun0C+Ok+3dVSJIkSZUHn5cWP/E3\n6YtPX5VW/uJf0pkmSZKkOunYb0ZIjx2o7dWe/hQ2b95sIQsVFRU37uJ18dJfH7lbGh8dJkVMflba\nV9Ek5X56vzQyarQ0c8nz0g9l3SMpQikKLFi0aJEs8HPmzOnx69XF/1V65O7xUnRYhDT52X1SRVOu\n9On9I6Wo0TOlJc//IHWTnP8kdI2Xpbzk49KJlAKpUtPz19MUbpZe++teqbQzLWYFVXHvSqv+my7V\nW3n8gw8+2KlSXL9+/U/vWDvcCrLQIRXrpDEDnpNSWiRJ0l2Q/hnpK/0pUyNJUpX07UI/6f79NYbj\nCldLfkHvSed7s68/gW3btlnIQllZ2Y3tgK5SOvJ8tKRWx0p/OlYg7V81V1r+fxlWy7c1CKUosGDJ\nkiWywM+aNeuGXFNXeUR6PlotqWP/JB0r2C+tmrtc+r+M7hRzgbVkZ2df01qMiIjoWSvxFpWFqh0z\nJPv7DkmNkiRJtfulRTZ+0v+VSpIkNUpxjzlJPh9cMByozZT+x36o9NkN1iXdxc6dOy3koaSk5MZ3\nQlMgfbl4oIRygDTij4elym4WR7GmKLCgNzJWKN0m8sr2z5h/8XUmjf5f9H/4iF+FO96QawssCQkJ\nYdmyZR1+3mNriUZudlnQVhWScvww8TkVhgLRzVVcqtPRmJ9EU4AXagCphRZT2TAA9DQ3GAOGVG4M\ntsnhXBfLeN0s3BRrirbeTJo/jUDHclJ3f0t6TfeuOQulKLCgtzbv23pPYv60QBzLU9n9bTrdLOeC\nLvCnP/2pXcUXERHBkiVLevz6N6Ms6Cvj+ejxhTzwh//wY2ENZQlrePPVj3jnVw/yab4WdZ9+UFFl\nyHvrFML4AC2lVTpAw5XLfRgZadq/00xNnS0O3Vkb7AbS+9GneioOvMRTe2awI2kdi8r/wvxHviC/\nG4OUhVIUWNArSlFfwYGXnmLPjB0krVtE+V/m88gX+Yhg/N4hNDS0XWvx5Zdf7vmUXjehLNSeepfF\nM1+kZOFHrP34TZ69fx7zHniW56Yk8d4PAxkfaIfb+PtwP3yKagClLyv+PIPDH3zFge8+4b91j/Gn\nqcbaJ9WnOex6P1O7dY/rjeNqS/FGTpz15ft4cck0Rs/9OzUjJhIcfAcrZ3twZdvjzLjjQd482cle\nJmvpXm+s4FbnoYcektcLJk6c2MNX00lle1+QFk8OlBycJ0nvZjZJ0pXvpIc8kFD7S9MeeEOKu3UD\n9W5psrKyJKVSKctCZGSkpNfre/CKN6csaC9tlx70HCAt+Ppi2+jdso3Skvu/kcokSZK0BdLfx42T\n/lnYelTjxVTp+MlsqdLsxIpNc6WIVSk9F73cw/z4448Wa4q5ubm93aVuRyhFgQUPP/ywLPDjxo3r\n7e4IepHly5fLsrBx48be7k4vUC+d/L2/pAp9VUptT4s15ko/JpTJyrLpzEfSyl99IeV1sFVGe2mn\n9Jv735PSb1WNKEnSoUOHLJRidnZ2b3ep2xHuU4EFvZfmTXCzYVpbjIqKYvHixb3dnRtPQyprvy4k\n6MFFhLaXYtY+iGkjB8gJEexCf83Hr4xA1UHJSq0+iD98+jsiui2L/I3npgi06WFuhpwhgpsIUSVD\nYCIsLIwlS5awePHin115IKtoLCKlvA8xI71pGxejo670CjYDB2BvZlrYeUTg10FzdoPD8OqZnt4w\nhFIUdDs1NbVkZmVTX19/Uyodta0do0aPQalQ4O3tzd59B3q7S+2itlXj6TGYIcFBt+SArdfrOZub\nR0npJVqaWzo/oZe4Z/5C+ri637RyoFQqcXJyIiJ8KC4u3Vi1BMDGmf7O9jjZteNQa8hg46ZaFv5m\ngFmqPR1lJzawMcWNu39xF37XEWGq0+nIys6hvLwCbcvNJxeVlZU8+fRvUSiUKBUKsnPyKCkt7+1u\ntUGpUuLi4kJURBgODg5dOlchSaJQ2o2gvqGB/fsPkZF1RtSm60bc3fsybcokQkOCe7srVpOWnsGR\no3FUVVf3dld+NiiVSqKjIpg5fQq2tt2030FfxY9PjeLXDp+T+O4EXOT3Kzm55msqZ/2KO7ws7QpN\nxisETq5hd/G7DOuCm1SSJOITThMXn0B9/S26ifEmRK1WM2J4DFMmTbA64b1QijeAxsZG1n79DWXl\nhhmVg709gwcPQqUSS7rXgwTU1zdQWnoJMBTDXbzwHoYEB/Vux6wgIfE0e/cflP/2GDwIRydHbj1b\n9+ZAp9NTUlJKk7H2Y0CAH/cturdLFT+uhb46gQ+eWsVxvwd4bF4YtiXpJOVpCFnwMPOCLZMK6HU6\nqnbeScT6P5CzfkarEu0ESZLY9+MhEhJPy+95e3liZ28n5OI60Wp1XCwuocVobcdER3LXHbOs8ioJ\npdjDSJLEl+s2cOFCMQqFghnTpxAbE4VabaiA3tjURFp6JsXFJfI59vb2REeG4+Ex2KofUafTcTY3\nj5yzebJLVqlUEjIkmCHBgVYNEJIkUVxSSlp6Bk1NrZECXp4eREWGY29vXU2I6poaklPSqKyskt/r\n06cPw2Iice/b16o2NJpmMjKzOF90QX7P1taWyIgwfLy95HtyqayMHbv2UF5+GZVKxaOPrKB/v5t3\nA9iZ7LNs2bYTAF8fb+bMnsGA/oYCh5IkUXThIukZWTQ3t+7K8/XxJiI8DDs766yfK5WVJKekU1NT\nI7/Xt68bw2KicO3Tx6o2mowyedFCJu2IiozAs0symU/O2dxWmVQoCQkJYkhwkNUyWVJSSmp6Jk1N\nrbUlPI0y6WCUyebmFuLiEzh6LA6AkSNimT1zmlXf1Tq0VBWmkHSmBreIUUT7OFtUG9FXHeXNX3+C\nNDma82//nvQ3LxO3vIPCle1wPC6eg4eOAjA0dAgzpk3B1dXwW0mSREHBOTLPZKPVGmqpKRQKAgP8\nGBoaIo8jnVFWfpnU1HTq6uvl9wYM6E9MVCTOzk5WtVFf30BqWjqXylrdpU6OjkRHRzJo4ACr2tBq\ntZzJPktefoHsMVOpVIQNDSEwwN+qbEmSJHG+6AIZmWcsnhU/Xx8iwofKnoLGpiYOHDxCckoaAHNm\nTWfE8GGdtv/zUIq6eqqb7HF1uvkqkp4vusBX6zYCcPe8O4iKCAegurqGw0ePk5mV3eFi9aCBAxg7\nZhQR4UPb/Vyr1XLsxEmSU9I6dLk4OTkROyyK8WNHd1ghOz0ji7j4RMrK2l8bsLGxITwslEkTx3U4\nsBaXlHL02Aly8wra/RwgwN+XCePH4uvTfrX2uvp6jhw9QXpGljzDu5p+/dwZPXI4w2KiUCgU1NXX\n89nnX1FXV98Dg2FbTI9LV9cxJUniv1+spfRSGV6eHty/bDFqtRpJkkhJTSc+4RSXK660e65arSYy\nIoxJE8fh7NT+AHa+6ALHjsdRUHi+wz4EBQUwafxYPD092v28pqZWlknTAHw1AwcMYMyYEbIcX41W\nq+V4XDxJyWnUmw3A5jg5OTIsJooJ48Z0KJMZmWeIO5lgMQCbo1KpCA8LZfLE8bIC2X/gMCfjE7Gx\nseGZp5+weiLx06jjwEMB/M+EZBIeqeW1IfNxO5zF7/ys8wK1tLTw94/+Q1NTE2FDQ5h/910olUr0\nej0JiUkknk6iurqm3XPt7eyIjopg4sRx2Nu176vNzcvneFw8Fy4Ut/u5QqEgNGQIkyaMZcCA/u0e\nU1FxhSPHTnAm+2yHcRBenh6MGzuakCHte2s0mmaOHj9BamoGjU3tF8/q08eFEbHDGDN6RLvKUZIk\nkpJTSUg8TcWVynbbsLU1PisTxuPk5IgkSWzeuoOcs3m4u/fl8cce7vTZ7QGlqKeh/BLNrh649bhM\naina+Te+yHOnf81RNiSG8fba5xlprd+iCyQmJhIUFERfK60dE1u37yLrTA6DBw/ikYfuR6FQUHqp\njA2btsiKTK22ISgwEDs7W9liu3y5Qm5jzOgRTJ862eLHbGxqYtPmbRbC7u/nKw8Q1dXVFJ4rkj/z\n8fZiyaL5FhafJEns//EQ8WZumwH9+8kWqkajITevQB4gnZycWLZkAYMGDbT4jmeyc9i+8ztZuTs5\nOhLg74fKRoVOp6fw3Hnq6gzZJhQKBXPvnE10VIRFGxUVV1i/cQvVRgtHpVIRFOiPg4MDkiRRVlZO\n6aUy+fiY6EjunDMTpVIpz7Tt7ez47dOPdzjQdge1tbVMnjyZl156iXvvvddq5XjxYjFffLUegBUP\nLMXH2wu9Xs933+8jJTVdPm7QoIEMGjgAhUJBY2MjefmF8n117dOHZfctpN9V1nBqWga7v/tBVtjO\nzs74+/miUinRaXUUFJ6jvsEgayqVivl338nQ0BCLNi6VlbF+41ZZkdnY2BAcFICdnZ1ssZWbyeSo\nkcOZOX2Kxfdvamrimy07LCx8fz8fXF0N2Vyqq2soPNeqtL29PVmycL5FIIQkSfx48Agn4xPl9/r3\n7ydbqBpNM3n5+bS0GGXS0ZGlSxYwePAgGpua+PAfH6PT6bjrjlkMi4my6rf5STRn8LznLJziiljl\n+B+Gj87lH9+vZMiQKAZZMf4lJafy3ff7UCqVPP3rx3B2dkar1bJtx7fknM2Vj/Py8qCfu7s8EczP\nL5R/7wH9+7F0yUL69LEc+OLiE/nxwGH5bzdXV3x8vFAqlbS0tJCfXyi7nW1tbVm88B78/Xwt2jhf\ndIFNm7ejMR5nZ2dLUGAAarUavV7PhYvFFl6hqZMnMH7cGIs26urqWL9pqzzpVigUBAT44eLsjCRJ\nVFZWUXThonx8cFAAC+bPs7CC9Xo9u7/7gbT0TPm9wYMGMrCDZ8XN1ZVl9y3E3b0vJaWX+O8XawF4\neMXyDieFJq5j9NDTcKmElv5euF5lmDVl/J1l837P9rLprM39lvs9lICGnA0fcir2tywP6eYNOvpi\ndq/eCv86zBMR9+AwOpI3jjzJttn1fPfxPrxWPkR0NwWkbd68mY8++oinn36a5557jn79rHOPlJdf\nBiB8aAgKhYIrlZV8veEbGhubsLOzZcqkCURFhmNnNtOTJImLF0s4fPQYheeKOBl/CrWNmsmTxgOG\n2bi5Qhw1IpZRo4bjZhx8TFRVVROfcIrE08kUXbjIN1t2sOy+hbLSOHTkmKwQA/x9mTRxPF6eHpYD\nnUZDWnomhw4fo76+nq83bGblQ8vp6+YGQH7BObZu340kSbi5ujJ1ykRCQ4Lb7HfMzSvgwKEjVFRc\nYde332Nrq5YH5traOtZt+Iba2jpsbGyYPHEcMdGRbaLGSkovcfRYHGdz80hJTUelUjFn1nSGBAdx\n8NBRmjQa6usb5IlBT5GcnMzChQsZNmyY1crRZPE4Ozvj7eWJJEn8sO+ArBCHBAcycfxYPDwGW5zX\n2NhISmo6h4+eoLqmhnUbvuHhFffLkZZnss+y69vvAYMVPW3KRIKDAtsUi87OyeXgoaNUVVezdftu\nli6xIzDAsHmgsqqKrzdspqGhEVtbNVMmTzS4zK+WyeISjhw9TkHheRIST6O2sWHqlImAQSbNFeLI\n4cMYNWq4LCcmqqqrSUhMIiHxNBcuFLNpy3buX7pYlskjR0/ICtHfz4dJE8fj7eVpcX81Jpk8coz6\nhga+3riZlSuW4963Lz7enhSeK7KYVPYotr7c9UAon+z4in9d3kmV8wC2bSri/71knUI2yYW/ny/O\nRiWxY9ceWSFGRoQxbuxo2c1uoq6untNJKRw7cZLyyxWs37SFhx5YKk96TyelyArRY/AgpkyeSIC/\nr8V9bGlpIetMDgcOHaG+voGN32xjxf33yTJ46VIZGzZtpaWlBUcHB6ZOmUhE+FALZSVJEoXnijh0\n5BjFxSUcPHwMtVrNqJHDAcNvtX7jVjmeYvzY0YwYPqxNpPDliivEnUwgNS2D3LwCtu34lkUL7kap\nVCJJEnt+2C8rxJAhQUwcP5bBgwdZtNHQ2EhKShqHj56gqrpaflYGDxqIo6MDDQ2NXK640qlS7HKk\nh750A/NDx/NSfP3VH7D3iwKm/m0vqWc3s8zD0HTVj//DUyfHcKdJITYXse/fH/HJ5//hw/e+4lTl\nT9iWoPTlVweOsSrCDurySbjsy2h/B7DxZPqUMl5+ehMl3bTrQalUUlNTw1tvvYW/vz9/+MMfKC/v\nPBTZNKM1CevOXXtobGzC3t6eFQ8sY+SIWAuFCIaZlLe3J8vuW0RkRBgAR4/HyUrw+IlWd8i8u+Yw\na+a0NgoRwM3NldmzpjP3ztmAYdZ34mSC/Pr4iXgAoiLDWbpkYZvBBwwumlEjYlnxwFLs7exoaGxk\n5+49xu/Wwrbtu5AkiQH9+7HyoeWEh4W2WS8yrG8GsXLFcjyND9yOXXtoaGwE4Lvv91FbW4dabcMD\nyxYzdsyodsOoPQYPYvHCexg5IhYwPPi5efkWg3dHbteewKQchw8fztatW68ZVdxitLYd7O1QKBTk\n5hVwOikFMCiQxQvnt1GIAA4ODowdM4oHli1GrbahtraO777fCxgU5o5d3wHg4TGYlQ8uI2RIcBvX\nk8nVuPKh5QwY0B9Jkti2fZd8r3bt/p6Ghkbs7exY8cAyRo2IbeOOUygUeHt5snTJQqIiDa7T43Hx\nshKMO5kov55752xmz5reRiGCYQY/a8ZU7p47B4ALF4o5duKk4fXFYo4eN6wLRoaHsey+RRZryCbs\n7OwYOSKWFQ8sw8HensbGJnbs2mP8zN7ifvc8Lkz6YB//fHghj721m7zE93ntpbsYZOXIavLC2Nsb\n7ndKajpnsnMAmDJpAvfMu7ONQgRwdnZi8qTx3LfIMCG7fLmC/UYlWFlVxfd7fwQgMMCfB++/j8AA\nvzb3Ua1WEx0VwcMr7qdPHxe0Wi1btu1Cr9cjSRJbd+ympaUFZ2dnVj60nGFmsRAmFAoFAf6+PLh8\nCcFBgQDs3X9QnpQcOHSUsvJyYzDcfKZOmdju1pn+/dyZd9ccpk+dDMDZ3DySklMByDmbJ68Ljh45\nnEUL7mmjEAEcHRwYN3Y09y8zTLJqamrZ88M+FAqFPMZaIxddVoqNhacp1V5g2/pUzNWivjyBk36P\n8MSiKUR5Ohkabkjg1acKeOKFybgB0EDCK0v5u93dPPLwL3lsag6/ffgzCn+C/CptbFBqi9nx0ruo\n39nO/4YbfBZ2kU/wO9WLPPtD1fU3bn4ds4Gmrq6O1atX4+/vz+9//3tKS0s7PM8kiJIkUXqpTA5e\nuGfeHQzswIdvfs25d86WH4rTySnodDqSUgzCMmpEbBs3ZHvEREfKC8xJyWno9Xp5QB44oD933TGr\n0wXuQQMHMG/uHYBhICsrLyczK5smjQalUsmSRffi5HjtEj/2dnYsXjQftVqNVqslLT2T6uoacvPy\nAZgzawZeXp7XbEOhUDBrxlS8vQ3HnUpKobe3KZorx23btrWrHE33V2/87FRSMmBwIc6aOa1TS9PL\ny5M7Zs8AIDevgOrqGlLTM9FqtajV6jau8fZwcnRkyaL5KJVKmjQaMrOyKSsvl11X8+bO6TRgQqlU\nctcdsxg4wHDc6aQUgzwZB7ARsTHEREdesw2AqMgIRhknN8kpaeh0Olkm+/fvx9y7ZncqkwMH9Ofu\neQaZLC4uofRSWS/JggpndxdsAZWzG45dGFVNE0iTIjp1OhmA0JBgxo8b3en5QUEBTJ1ssNYzMs/Q\n1NREUnIakiTh5OTUxg3ZHq6ufVi84B7AECiXX1BI4bkirhjX7RYtuLvdCY45NjY2LJg/V1Z4SSlp\naDTNsnU3aeK4DtcbzRk7ZiThYaGAQbYkSeK08Vnx9fFmxlUu+/bw8fZizqzpgEGh1tTUdimKt8tK\nsa5Ez4LnJlC1bT2pZlqxMjkR97FBmD+W1YfeZF34M8wxTXSqj/H2pxrmzvRCBTiG3kFM8jt8mfMT\ncuBrS9j3wceULPsPfx2Tx64MU6ecGfvrWRx8aTuXrr91mfYe0IaGBt59910CAwN55plnKC5uu5it\nUBp+Dr1xkRgMe+uCAgOsuq5KpZIto6wzOaRnZslrkaNGDbe6/6ONx9bV1ZGekcWZ7LOGNkYOtzp8\nfUhwIH37ugGQlJQqD4RDQ4fg5tbWUm0PZycn2fpNSkqR74mTk2OHAUVXo1AoGD1yBAD5+YXyOmRv\nk5yczIIFC2TlaI6sFPV6qqqryc8vBGD0yBFWr0uGhw3Fyckw8Tid3HrvIiPCOgzAuRo3V1eGhg4x\ntpFKUpKhjb5ublZvaVGpVIwaaZDJM9lnSc/IkteMR48aYVUb0Cq/9fUNpGdmkXXGYCGNGhFrtUwG\nBQbg7m5Y5zfdj1sJ0/fUanWUlF6S3aljRo20Wi5iY6OxsbFBq9WSkpZBqtElP3xYtNXBRoMHD8LP\n1weA00mpJCUbJijeXp54deJuNKFWqxkRGwMY1rlT09JpaWlBpVLJ71vDGKMMlV+uIPNMthw8NnqU\n9c9KZEQYjkZvU3JqmtXXhi6vKdaRl+XC1Ace5/iHq1if+hbjxjkB1WQm2DHmGXNLoYG09d/Tf+6/\nMRnL2tJTJNe6sczBqGBs+uJle55DOfUQfo0fT3OO3R+u5j9bj5FXo0UCFH0m8Netf8b25bHM+ryO\nAS7/5H+bPfl/xxO513iaXfBsItM/4ETVSu51a9tsRkYGtbW1tLS0oNVq5f/be52UlNRh9xobG/nw\nww/5+OOPefTRR/njH/+Ij49BwJQmS1Gvl9cJYqIiuxS9GBE+lB/2HUCn05GSYhB4fz/fdl2mHdHX\nzQ0/Xx/OnS8iOcVgLdrY2BA2NNTqNhQKBTFRERw8fIys7BwaGgzuz+iozi0Dc4ZFR5KUnMqVyip5\nIIyMCO/S3rIhwYE4OBhcZwUF5+T3X3/9dXJzz6LT6eR/er2+3dfX+uxa53SGSTnGxsbKa44qM6V4\nNtdgGTvY2zMkONDq76xSqYiKDCfuZCJZZ3KoqjJs/rfGMjMnJjqSzKxsSkpK5a0b0dERXZLJ8LBQ\nvt/7I1qtVnZt+fp4y5Mma3BzdSXA35eCwvOytWhw9Vo3OQKDTA6LjuTHg0fIzjnbYWTzzYppr7JO\nryMnxzA+uLv3xcvLOkUEBg/M0NAQ0jMySU/PlJcluioX0dERnDtfRG5eviwLXW8jkoOHj6HRaEjP\nyAIMVm9Xssp4eAxmwID+lJdflidtjo4OBAdZZ0iA4VmJjAwjPuG0fF+tpWtKUXOO1IZY7g0awy9G\nV/PHtcm8OW4Czg0FnNYM436L4Kca0g62EPvLVv+xtq6CBtSoTM+eUo2tooGKmmusA9Wf4rXZ93F4\nyiv84bURfHHfoxSvTuCfM/0IHOSO3b/OIf2rg3MdA4jSxZN0Wc+9bm0tvRUrVlxT2XUVjUbDRx99\nxCeffMLKlStZtWoVCjO3mV5nWODsaiCIra0tjo4O1NbWodNfXxvm55jCqh0dHbC1tW6fU2sbBkVs\n+i4Abm5d64urmVWp0xujxay0NE2oVCr6uLjQ2Ngk3xOAxMQEjh8/3qW2eoqkpCRZOb7wwp8Aw703\n3TsXF5cubzI3TYTMQ+O7Kgvmk6nrlUm1Wo2TkyPV1TVyX65PJi2/j6OjQ5e3UrQnkwB/Xv0eZeWX\n8fP14Xe/eQKATVu2y2vpf33zZWxtbUlKTmXN2g0A/PLRhwgfGkptbR0vvfZnAKZPncTdxqWDv/zt\nfS5dKsfXx5tnf/trADZv3cHR44Z10b+88RJ2dnYkp6TJ0ca//MVDhIeFUltXx0uvGtqcOmUi8+fd\niUpp+P11Wp3Fs93VLT+mZ9D0PAFtolE7bcNMLkxLAF39TZ2dnGSr9aeMVW6ufSgvvyy30cfFxao9\njJZtGL6ProNtJB3RJaWoLUmjbPgkBtgMZM5TM3nm8Y848voEZl1JpnjoNCwCxfWNlFcoGOjWegmF\n2g4b9LQuuejQSSrU6o4EQEP66uX8M3IN+W9NwIkGnBc9w+KyvoT4D6DT4cTWHS/HOs5caQHaRr5a\nu/G1qzQ3N/Pll1/i6urKkFDDmp+kv/W3g94KKJU3117VUaNG8fLLL+PrH0B2bsFNme/258q8u+bQ\n1KTBybnVgzVqRCz+voZtBypjxKuvrw/3LzVUAfEcbAh2sre3k98z34I0947ZhjadWtscMTwWX6Nn\nyBRF6+vj3dqmMYDK3q5tmyobo1LU//wSa9+qdEEp6rgYl8WQsYtRoaTf9N+y0GY+H+wuIsYlG5/o\nZZZKSmmDg1rLhaZWZaDu642r9gfqTIE12gauNLvg27+DmaEmhy//Xsz8b2NxAtBXkh5XR+Ccvp0r\nRAB9M/VaNU727SvdntjPZmtryy9/+UtWrVqFp6cnn/73SwAkSY/S6CrpaDNuRzQ3N8uuSpMbruY6\n1tFM1zXNuBoaGmlpaenS5KDamK9TaZairrq6xupsNQDVVa05P00z5eou5gHV6XTU1NYa22jty5gx\no+nXzx2VSiX/UyqVHf5t7Wem1y0tLaxatarT/pmU4dy5cwFDEAQYLCLTvautrZVdhtZiypdqPmuu\nrq7pNMipvTag9Xes6aJMtrS0yGvbSlkma7vUBpjJk5lMNjc3dyl/aXsyCchRsub4+/m22YvXz70v\n/dwt5dewrSC2zfntt+mDv5+PxXvu7n3ltc5rtWn67XU6vSzH1dU1SJLUJWvR9GyrzCaFNTW1XbIW\nzeVCoVAgSVKXx6q6+no5otb8+3SVKvn7GGWrttbw7HTBWjR9H5VSibYLk1HrtUJzAQfTQ5i82HiK\nyxh++2QAo156ja+X+DJ2+tWRb84ERtiyr6QJk5WmHDiBe0PeJ6tEAwPtoD6fNN1oVka7QFMBe78r\nJuruCQyWe6VHqx5EhJfhgddkfcoHzb/nv3dZmcqrqYTCFl/GdrCL9osvvkCj0WBjY2PxT61WW/xv\nY2PD22+/zQsvvNDhpdRqNY888ggvvviivJ4IWLhPhwQHkZySRkpaOmPHWL+QnpF5Rh48Y6IjuXCx\nmILC81RX11jtmqisquLcecNm/piYKIpLStFqtWRmZVu9biBJEilpGQAMDQ2hpKSUktJLpKSmE+Df\nUcGctqSkGdZF3fu6ERoawom4eNLSM7uUtDc3L5/GRkNmjAB/Pw4fPQHAH//4PP3bCWHvLmpra6+p\nFK9WhiZaA20khgQFsnffARqbmjibmy8HvnSGTqeTo/nCQkPIzjnLlcoqUlLTZWvEGkx7Iz0GD8LD\nYzCnk1JISctg3NjRVsukKeuNUqlkWEwUFy4Wc+58EZVVVZ1GKpqorq6RgyhioqIoLS1Dp9ORkXmG\n2GHRVrVhyggEEDIk2CId3K1Aq1LUETIkiONx8Vy5UsnFiyVyhHVnNGk08jaOyIgwampraWxsIiU1\nnUkTx1ndl9RUw7MdHBSASmVDds5ZUlLTu7SuaArysbOzIzIijJLSS2Tn5NLY2Gj1umJJSam8vzs2\nNprzRRdoaGgkN6/AqghWMNxP05pmyJBgMrPOWP0dOle7tYd4angssxY/S+K4WfjJCsuOiCffZWXL\nJ7x1NoqhbSYkroy8ZyCJcSXIOy7sIvjtB/eQ8v6nHMtNZ+u7X+L6/PssGKSkOe9rfrdwHk+bb6Gw\nC+PJN8fw/V8/YuOat/jNa5W8+O2bjLJ2UlyZwEHnBYzvwIgJDg4mIiKC0NBQgoKC8PPzw8vLi4ED\nB+Lu7o6LiwsODg6o1eoOB2sbGxseeeQRsrOz+fjjjy0UIpgH2kgMN0ZgXblSSV5+x+nQzNHpdCQa\nw7TDQkOIjAiTXTfmmWg6IyHBcKyzsxNREWHyQJyQmGR1TbTcvHw5e0XssGj5+5zJPmt1xYf6+gbS\n0rOMbcQQOyxKft9awTVVFADDPqye3qxvDaNGjWLXrl3Ex8e3UYjQGlCh1+txc3MlMMAfMCQItzap\nlKHkmME6i42NJnaY4f6nZ2RZXVmhurpGjjweHhsjK5/Kyio5AKgz9Ho9CacMa/FDQ4cYol+N+TNN\nv4s1mBJgOzk5EhUZRpgxmUPiKetlMj+/UE75NbwLEY43C+ZK0cNjsLwl5mTCKavlIjk5jZYWLSqV\niujoSDnwLSkl1SI36LUovVQmT5qHx8YwPNYgFxcuFlvkZb4WLS0tckR6dGQ40VGRqNVq43Yb6yOD\nTyacAgxbc8KHhsqWfXwX7kl6RpbsXetqZqPOlaKtD9PmjcB/7G95Yc5AyxPcJrN6/0kOf3I3befm\nSnyW/j+81n9N644LJf1mrGbbOzMgr5zAp9az7qkw7ADbiFWknv+cCY7mX9qWoMfWsfONeYyc8xz/\n2vAey4KszYqj5/z2L7F96hFCuyHd3NVmu1KpZMWKFWRlZfHZZ58RENB+ZJS8JUOvZ/CggfKMfqcx\nkfU1v4Fez+7v9srHDY+NQaVSMSza8CMnJJ4mLT2j076npmXIijU2JhqlUikPIGXl5Xz3/b5O17rK\nysvZuduQOcXby5NBAwcQNjQUezs79Ho9mzZvl4WwI5o0GjZt2U5LSws2NjZERYXj5upKkDGqbM8P\n+y2SULeHqaKAaW/d8NgYrHxOeoTOlKEJ8y0Z0DqAF124yL79Bzt92C8Wl7Dnh32AYW+am6srUVHh\n2NjY0NLSwqbN2+SUXR3R0NDIps3b0Ov12NnZETY0lEEDB+Bt3Bu6a/ceOfNIR+j1er7ds1dO2TU8\nNgalUklsjGEQPXU6mdS0zmUyLSNTntQNi45CpVLJ96T8cgW7v9vbqUyWX66Qkxd4egxm8KCBvSoL\n14ONqjXQRqFQyPcgO+csJ+LiOz0/P7+Qg4cNycQjwofiYG8vTzTr6urZsn1Xh3lsTVTX1PDNlu2A\nIZVgYIA//n6+uBsjiTdv2ylHOneEITXdbtmFPsy4HcS0/erw0eOczc3r9PvExSeSmZUNGGTL/J6c\nL7rA/gOHOn1WLlwslpMXDAkOpE8fF7oiFp0rRbtAFr/2Cf9ZNcvMrdl6ukvIaCLc27eilH6/4B9z\ndvPuoSqL923cQ5kwZxoxnvZmHWjizMHzDItoG4WocvMlcJB91zZVNpzm/74M473fhbdTNbvrmAY1\npVLJsmXLyMjIYM2aNQQHX7uOn61ZNQyAe+bdacjC0dTEmrXrSTydjEZjOZuTJIkLF4vZsGkL6RkG\nd9mEcWNkd8qE8WPkgWzn7u/Zu/9gu377qupq9u47IKcB8/H2YtzYUYAhEGDcWMPm4NS0DDZs2srF\ni8VtBE6j0ZB4Kok1X22gqakJRwcHORLP1lbNvffMRaFQUF5+mc/XrCPrTE6bWb6poO6aL7+WZ513\nz71D3kd015xZuLg409KiZe3Xm4g7mUhjY1sFW3qpjM1bd8gWxvDYaIYEB1oog54KnrqakSNHsnPn\nzk6VoQlTv3Q6Hc3NLQwJDpRn4wmnkvhmyw65FJY5jY2NxJ1MZO3Xm2hp0eLi4sxdc2YBhgwe95g2\nr5eUsubLry0qpZjQ6XRkncnh8y/XUVZ+GYVCwYJ75sqRx6bfokmjYc1XG0g8lSTnujRhSD1YzIZv\ntspKb9zYUfIWCPPXu779nh/2HWjXe1BdXcO+/QfZacxA4+3lyYTxhlyZ3t6eTDDmzUzPyGT9xi1c\nuNCeTDaTeDqZNV99TWNTEw729twz707jZ4Z+q3sw/2130lrRoRFJkhgWE0VoiMGLc/DwMXbs2tNu\nyrq6+noOHz3Ohm+2otfr6d/PnRnTpwDg3rcvc4yJHvLzC/ly3UYKCs+1uY8tLS2kpmXw+Zp11NTU\nGjbg3zsPpVKJQqHgXuPG/9raOj7/ch0pqeltMkYZ0rydZ+3Xm2RPw8wZU+WEI9OnTmKgMYvSN1t2\ncPDwUXlPqzkVFVfY/d0Pcmq6IcGBDDd6MUJDgmVrLz7hNFu27eSSWR5kE42NjZyIS2Dt15vQarX0\n6ePCnXNmIkkSzcYx1hq56PkqGXXxvP7wWoZ/9B5zB15DrTUXkZjjRGyku3VBNNekluOv/ppvp/yd\nN6Z2LYF3R7z//vscPXqUV155hchI633s+/YfJD7xNH36uPDk44+iVCopLb3Ehk1b5STNarXaIvly\nsZlPHQybWadPuyoheGMjmzZv58LF1oQBAf6+cnh6VVW1RfJlH28vFi+aL5fbgfbruA0c0P+qhOBm\nyZedHFm6ZCGDr0oInnUmhx27zBKCOzkSGOCPSqVCp9NReO48tbWtCcHvumNWm3WKyxVX2HBVQvDg\noAB5HaL0UpmF0oiOipAz8dzIhOAajYa9e/cyb968Lp1XV1fP3z/6N5IkydVSTFaXuWU1eNBAOYVV\nY2MTuXn58n3t08eFZfctalMe6+qE4C4upoTghvufX1Aou1dVKhX3zLuTsKFXJQS/VMb6q5LUBwcF\nWiQELzOTyVEjYpk5Y2qbhOCbNm+3SO7s7+crb7eprq62qOLh7eXJkkXtJAQ/cFh2oYEh4bWnMSev\nKUm9aXB2cnTkviUL8OithOA/kfLyy/znszUALF+6iAB/P1paWti+81tyzrZaVt7envQ35luuq6u3\nKL3Uv38/lrWXEPxkIj8ebE0I3tfNTU4Irm3RkptfIK/B2tqqWbRgPgH+lkFI584X8c2WHWYJwe0I\nDgqQK7wUFV3gillC8CmTJ8gTGxN1dXUW+U8VCgWBgf64OBu261VUXLGQmaCgABbcM89iu1h7CcE9\nBg+So3gbGw1rjnLyfNc+LLtvEf3c+1JaeonPjAnBV65Y3mkyghtTOkpTRkGdOwH9btDsTVvF+Qpb\nfAdZH5HXGV2N0jRxueIK//7kcwBmz5rOSGO6tarqao4cPXHN0lEDBwxg7JiRsgviarRaLceOG0tH\nNXRUOqrzMj1p6ZmcjE+0GPTMkcv0TBrfYemoi8UlHD0eR941Skf5+/kyYfwYOXPG1dTV1XPk2AnS\nMzJlRXw1/dz7MmrkcGKHRaNQKKhvaODT/35FXV0dI4YPk9M73Yxs3rqD7Jxc+vVzZ+WK5dgbFU5y\nShrxiaep6LB0lA0R4WFMnji+w9p3584Xcez4SYuJ0NUEBQYwccLYDgeF6poaDh+5djmzgQP6M2b0\nCKIi208vqNVqOX4inqSU1I7LmTk6EhMTycTxY69dzuxkYofuXEMNvlAmTxon70c7cOgoJ+LijaWj\nHm+TU/hmZc1X67lwsRgvTw8eWL4EGxsb9Ho98QmnOXU6ucOMTXZ2dkRHhjNp4rgOU/zl5uUbciVf\nbJttC0ylo4KZOGFch2knL1dc4cjR42Tn5Hbo0vb09GD82FGEDGnfc6bRNHP02AlS0jI6DIYylI6K\nYczokR2WjjqdnEpCwikLRWyOqcza5ImtpaO2bt/FmeyzuPd14/FfPtIbpaMEV7Pxm61yncFZM6bK\na4NgmOG0LehqT3RU1wq65pzNsyzoqlQSGhLcpYKuxcUlbQq6enl5WhR07Yyq6uo2RYZdXfswLDqq\nTYh6R2g0GjIyz8gL/9BaZNjXx1u+JxUVV9i281suXSpDpVLxi5UPdFgT7mag6MJFvjRuEvfy9ODu\nuXfI98RUZDgtPbPdwqnWDvBXrlSSnJpm4U43FRm2NgOSqfD1RbOB1FT42vOqKiodYSp8bT6Qmgpf\nhwzpgkyWlJJ61UBqKnxtsjC1Wi3xiaflQr3DY2PkPLG3Atk5uWzeugOAwEB/7pozS7b6JEkiv6DQ\nosalociwP2FDu1JkuJyUlLZFhodFR+LsbF0pobr6elJT2xYZjomObFNOriO0Wi1Z2Tnk5ZkXGbYh\nPKzrRYavLshteFZaC3I3Nzdz8PAxEo1BYbNmTpPz7V4LoRRvABqNhrVfb5LrATo5OeHt5dHlbCYC\nA5IkUV/fYFG3b+G989rUCLwZSUlNZ/d3P8h/+/p44+Tk2OUMJgIDWq2Oi8XFslXq5+vD0iULetSF\n3hOciIvngFGpKxQK/Hx9cHCwF3JxnbS0aCm6cFGeTEVFhjPvrjlW3U+hFG8QDQ2NfL93v5zrU9A9\n9OnjwvSpk+XM+rcCaekZHDx8TF5nFfx0FAoFEWFDmT17eodV6G92Ek4lcex4XKdR3ALrUalUxMZE\nMWP6FKuNEKEUbzCVVVWkp2dRV19/06b8qqurw87OFrW6O+J2ewZbW1s8PAYRFhpyS1rcpojQktJL\nVu8lE7RFqVTi7OREVGR4l/Pn3oy0tLSQkXmGsvLLN7Q2aFdpbGjAoQtZlG40KpWSPi4uREdFdrgO\n3xFCKQra8MILL+Dl5cWTTz7Z210RCAQ3GXFxcbz66qt89913vd2VHkEoRYEFly9fJiAggL59+5Kb\nm9ulHJQCgeDnz8yZM9m/fz+JiYmMGGF9/cxbhS4XGRb8vFm9ejV1dXUUFRXxxRdf9HZ3BALBTcTB\ngwfZv38/AG+88UYv96ZnEJaiQObSpUsEBgbSYNzzGBAQQE5Ozi0XyScQCHqGSZMmcfRoa5Rsampq\nl5KZ3AoIS1Eg85e//EVWiAAFBQWsXbu2F3skEAhuFvbs2SMrRDBsjXrrrbd6sUc9g7AUBQAUFxcT\nFBTUJttESEgIWVlZXa56LRAIfl6MHDmSU6dOWbynUqnIyspiyBDryp/dCoiRTgDAW2+91W76pZyc\nHNavX98LPRIIBDcLW7dubaMQwbC16M9//nMv9KjnEJaigKKiIoKDgzvcLxceHk56errIriEQ3Ibo\n9XpiYmJIT09v93O1Ws3Zs2fx87O+0PjNjLAUBbzxxhvX3ECemZnJ5s2bb2CPBALBzcKGDRs6VIhg\nSDiwevXqG9ijnkVYirc5BQUFhIaGdpo9IyYmhqSkJGEtCgS3ETqdjvDwcHJyrp2e0t7envz8fDw8\nrl2W6VZAWIq3Oa+//rpV6aRSUlLYuXPnDeiRQCC4WVizZk2nChEMtTT/9re/3YAe9TzCUryNyc3N\nZejQoR3WzruakSNHkpCQ0MO9EggENwMtLS2EhIRQWFho1fFOTk4UFhbSv//NW77NGoSleBvz6quv\nWq0QARITE3+2+Q4FAoEln3zyidUKEaC+vp7333+/x/pzoxCW4m1MZmYmLS0t6PV6+d/SpUspKDAU\nRF6yZAlPPvmkxedeXl5ERLRfdV0gEPx8KC4upra2Fq1Wi06nQ6fT8dZbb/HNN98AEBwczMcffyx/\nptPpcHR0ZNq0ab3c85+GyN91GxMeHt7mPfMq3B4eHkydOvUG9kggENwseHp6tnlv8ODB8msnJyem\nT59+I7t0QxDuU4EF5plruuJaFQgEP3/Mo89v1nqwPxWhFAUWmBfs/bkKvUAguD7MJ80/15U3oRQF\nFpgLvVCKAoHAHGEpCm47hPtUIBB0xO0waRZKUWCBcJ8KBIKOMLcUhftUcFtwO8wEBQLB9XE7jA9C\nKQosEO5TgUDQEcJSFNx23A4zQYFAcH3cDuODUIoCC8SaokAg6IjbwVIUad4EFly4cAGNRoNKpcLZ\n2fmWT+4rEAi6j5ycHAoKClAqlTg6OjJhwoTe7lK3I5SiQCAQCARGhPtUIBAIBAIjQikKBAKBQGBE\nKEWBQCAQCIwIpSgQCAQCgRGhFAUCgUAgMCKUokAgEAgERoRSFEB9Aqt/cQ8TYsKJnPIc+69oyPvs\nAUZFj2HWfavYWy428QsEtzW30Rgh9ikKDOirOPriFKb/TcUfD25h6u6n+cRnNZ88EY5jb/dNIBD0\nPrfJGCGUoqCV5kK+emAMK7ZIjPjfzex7cxJuwpcgEAhM3AZjxM/s6wh+ErbeTJo/jUDHclJ3f0t6\nzc/HJSIQCLqB22CMEEpRYERPxYGXeGrPDHYkrWNR+V+Y/8gX5Df3dr8EAsHNwe0xRgilKEBfvo8X\nl0xj9Ny/UzNiIsHBd7BytgdXtj3OjDse5M2Tdb3dRYFA0IvcTmOEWFMUCAQCgcCIsBQFAoFAIDAi\nlKJAIBAIBEaEUhQIBAKBwIhQigKBQCAQGBFKUSAQCAQCI0IpCgQCgUBg5P8Dxxn+0UMlxz8AAAAA\nSUVORK5CYII=\n", "text/plain": [ - "" + "" ] }, - "execution_count": 42, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "Image.open('./ladder_net_arch.png') # the ladder net architecture diagram from the Curious AI paper" + "Image.open('./ladder_net_arch.png') # the ladder net architecture diagram from [1]" ] }, { @@ -516,7 +530,7 @@ } ], "source": [ - "!wget http://deeplearning.net/data/mnist/mnist.pkl.gz" + "!wget -N http://deeplearning.net/data/mnist/mnist.pkl.gz" ] }, { @@ -826,7 +840,8 @@ "cell_type": "code", "execution_count": 11, "metadata": { - "collapsed": false + "collapsed": false, + "scrolled": true }, "outputs": [ { @@ -859,10 +874,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### References: \n", - "http://arxiv.org/pdf/1411.7783.pdf \n", - "Curious AI combinator - http://arxiv.org/pdf/1507.02672v2.pdf \n", - "MILA UDEM combinator - http://arxiv.org/pdf/1511.06430.pdf" + "## Author: \n", + "Adrian Lisko - https://github.com/AdrianLsk" ] } ], From 098b334a268be15c8221d0b599c6e144743469a1 Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Fri, 24 Jun 2016 15:26:52 +0200 Subject: [PATCH 06/10] added semisupervised functionality, pseudolabels and fixed the double batchnorm --- examples/ladder_nets/LadderNets.ipynb | 484 ++++++++++++---------- examples/ladder_nets/ladder_nets.py | 214 +++++++--- examples/ladder_nets/train_ladder_nets.py | 52 +-- examples/ladder_nets/utils.py | 5 + 4 files changed, 442 insertions(+), 313 deletions(-) diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb index 8f43335..845dd48 100644 --- a/examples/ladder_nets/LadderNets.ipynb +++ b/examples/ladder_nets/LadderNets.ipynb @@ -106,11 +106,14 @@ "source": [ "# %load ladder_nets.py\n", "from lasagne.layers import InputLayer, MergeLayer, DenseLayer, DropoutLayer, \\\n", - " GaussianNoiseLayer, NonlinearityLayer\n", + " GaussianNoiseLayer, NonlinearityLayer, standardize, BiasLayer, ScaleLayer, \\\n", + " ExpressionLayer\n", "from lasagne.layers.normalization import BatchNormLayer\n", "from lasagne.nonlinearities import *\n", "import lasagne\n", "\n", + "# from ladder_net_layers import CombinatorLayer, SharedNormLayer\n", + "\n", "import theano\n", "import theano.tensor as T\n", "\n", @@ -195,14 +198,14 @@ " w_u_lin.dimshuffle(*bc_pttrn) * u + \\\n", " w_zu_lin.dimshuffle(*bc_pttrn) * z * u + \\\n", " b_lin.dimshuffle(*bc_pttrn)\n", - " \n", + "\n", " sigm_pre = w_z_sigm.dimshuffle(*bc_pttrn) * z + \\\n", " w_u_sigm.dimshuffle(*bc_pttrn) * u + \\\n", " w_zu_sigm.dimshuffle(*bc_pttrn) * z * u + \\\n", " b_sigm.dimshuffle(*bc_pttrn)\n", - " \n", + "\n", " sigm_out = T.nnet.sigmoid(sigm_pre)\n", - " \n", + "\n", " output = w_sigm.dimshuffle(*bc_pttrn) * sigm_out + lin_out\n", "\n", " return output\n", @@ -223,7 +226,7 @@ "\n", " v_sig_pre = w_v_sig.dimshuffle(*bc_pttrn) * u + \\\n", " b_v_sig.dimshuffle(*bc_pttrn)\n", - " \n", + "\n", " v_lin_out = w_v_lin.dimshuffle(*bc_pttrn) * u + \\\n", " b_v_lin.dimshuffle(*bc_pttrn)\n", "\n", @@ -239,7 +242,7 @@ " bc_pttrn = ('x', 0)\n", " elif u.ndim == 4:\n", " bc_pttrn = ('x', 0, 'x', 'x')\n", - " \n", + "\n", " if combinator_type == 'milaUDEM':\n", " return _combinator_MILAudem(z, u, combinator_params, bc_pttrn)\n", " elif combinator_type == 'curiousAI':\n", @@ -260,11 +263,11 @@ "\n", " if len(z_shp) != len(u_shp):\n", " raise ValueError(\"The inputs must have the same shape: \"\n", - " \"(batch_size, num_hidden) in case of dense layer or \\n\"\n", - " \"(batch_size, num_feature_maps, height, width) \"\n", - " \"in case of conv layer.\")\n", + " \"(batch_size, num_hidden) in case of dense layer \"\n", + " \"or \\n (batch_size, num_feature_maps, height, \"\n", + " \"width) in case of conv layer.\")\n", "\n", - " self.combinator_params = _create_combinator_params(combinator_type, \n", + " self.combinator_params = _create_combinator_params(combinator_type,\n", " u_shp[1:],\n", " self.name)\n", "\n", @@ -277,8 +280,47 @@ " return _combinator(z, u, self.combinator_type, self.combinator_params)\n", "\n", "\n", - "def build_encoder(net, num_hidden, activation, name,\n", - " p_drop_hidden=0., shared_net=None):\n", + "class SharedNormLayer(MergeLayer):\n", + " \"\"\"\n", + " A layer that calculates mean and standard deviation statistics from the \n", + " incoming2stats layer output and uses them for normalizing the output of\n", + " the incoming2norm layer.\n", + " \n", + " \"\"\"\n", + " def __init__(self, incoming2stats, incoming2norm, axes='auto', epsilon=1e-4,\n", + " **kwargs):\n", + " super(SharedNormLayer, self).__init__(\n", + " [incoming2stats, incoming2norm], **kwargs)\n", + " stats_shp, norm_shp = self.input_shapes\n", + "\n", + " if len(stats_shp) != len(norm_shp):\n", + " raise ValueError(\"The inputs must have the same shape: \"\n", + " \"(batch_size, num_hidden) in case of dense layer \"\n", + " \"or \\n (batch_size, num_feature_maps, height, \"\n", + " \"width) in case of conv layer.\")\n", + "\n", + " if axes == 'auto':\n", + " # default: normalize over all but the second axis\n", + " axes = (0,) + tuple(range(2, len(stats_shp)))\n", + " elif isinstance(axes, int):\n", + " axes = (axes,)\n", + " self.axes = axes\n", + " self.epsilon = epsilon\n", + "\n", + " def get_output_shape_for(self, input_shapes):\n", + " return input_shapes[0]\n", + "\n", + " def get_output_for(self, inputs, **kwargs):\n", + " to_stats, to_norm = inputs\n", + " assert to_stats.ndim == to_norm.ndim\n", + "\n", + " mean = to_stats.mean(self.axes)\n", + " inv_std = T.inv(T.sqrt(to_stats.var(self.axes) + self.epsilon))\n", + "\n", + " return (to_norm - mean) * inv_std\n", + "\n", + "\n", + "def build_encoder(net, num_hidden, activation, name, p_drop_hidden, shared_net):\n", " for i, num_nodes in enumerate(num_hidden):\n", " dense_lname = 'enc_dense_{}'.format(i)\n", " nbatchn_lname = 'enc_batchn_{}_norm'.format(i)\n", @@ -295,105 +337,107 @@ " # dense pars\n", " W = shared_net[dense_lname].get_params()[0]\n", " # batchnorm pars\n", - " if activation==rectify:\n", - " beta = shared_net[lbatchn_lname].get_params()[0]\n", - " gamma = None\n", - " else:\n", - " beta, gamma = shared_net[lbatchn_lname].get_params()\n", - "\n", - " net[dense_lname] = DenseLayer(net.values()[-1], num_units=num_nodes, W=W,\n", - " nonlinearity=linear,\n", - " name='{}_{}'.format(name, dense_lname))\n", + " beta = shared_net[lbatchn_lname + '_beta'].get_params()[0]\n", + " gamma = None if activation==rectify else \\\n", + " shared_net[lbatchn_lname + '_gamma'].get_params()[0]\n", "\n", - " shp = net[dense_lname].output_shape[1]\n", - " zero_const = T.zeros(shp, np.float32)\n", - " one_const = T.ones(shp, np.float32)\n", + " # affine transformation: $W \\hat{h}$\n", + " net[dense_lname] = DenseLayer(net.values()[-1], num_units=num_nodes,\n", + " W=W, nonlinearity=linear,\n", + " name='{}_{}'.format(name, dense_lname))\n", "\n", " # 1. batchnormalize without learning -> goes to combinator layer\n", " l_name = '{}_{}'.format(name, nbatchn_lname)\n", - " net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1,\n", - " beta=None, gamma=None, name=l_name)\n", + " net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1, name=l_name,\n", + " beta=None, gamma=None)\n", "\n", " if shared_net is None:\n", - " # add noise in dirty encoder\n", + " # for dirty encoder -> add noise\n", " net[noise_lname] = GaussianNoiseLayer(net.values()[-1],\n", " sigma=p_drop_hidden,\n", " name='{}_{}_'.format(name,\n", " noise_lname))\n", "\n", - " # 2. batchnormalization learning, \n", - " # alpha set to one in order to depenend only on the given batch mean and inv_std\n", + " # 2. scaling & offsetting batchnormalization + noise\n", " l_name = '{}_{}'.format(name, lbatchn_lname)\n", - " net[lbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=1.,\n", - " beta=beta, gamma=gamma, name=l_name,\n", - " mean=zero_const, inv_std=one_const)\n", - "\n", + " # offset by beta\n", + " net[lbatchn_lname + '_beta'] = BiasLayer(net.values()[-1], b=beta,\n", + " name=l_name + '_beta')\n", + "\n", + " if gamma is not None:\n", + " # if not rectify, scale by gamma\n", + " net[lbatchn_lname + '_gamma'] = ScaleLayer(net.values()[-1],\n", + " scales=gamma,\n", + " name=l_name + '_gamma')\n", + " # apply activation\n", " if i < len(num_hidden) - 1:\n", " act_name = 'enc_activation_{}'.format(i)\n", " net[act_name] = NonlinearityLayer(net.values()[-1],\n", " nonlinearity=activation,\n", " name='{}_{}'.format(name, act_name))\n", "\n", + " # classfication layer activation -> softmax\n", " net['enc_softmax'] = NonlinearityLayer(net.values()[-1], nonlinearity=softmax,\n", " name='{}_enc_softmax'.format(name))\n", "\n", " return net['enc_softmax'], net\n", "\n", "\n", - "def build_decoder(dirty_net, clean_net, num_nodes, sigma,\n", - " combinator_type='milaUDEM'):\n", + "def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type):\n", " L = len(num_nodes) - 1\n", "\n", " # dirty_enc_dense_1 ... z_L\n", " z_L = dirty_net['enc_noise_{}'.format(L)]\n", - " \n", + "\n", " # batchnormalized softmax output .. u_0 without learning bn beta, gamma\n", " dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None,\n", " gamma=None, name='dec_batchn_softmax')\n", - " \n", + "\n", " # denoised latent \\hat{z}_L = g(\\tilde{z}_L, u_L)\n", " comb_name = 'dec_combinator_0'\n", - " dirty_net[comb_name] = CombinatorLayer(*[z_L, dirty_net['u_0']],\n", + " dirty_net[comb_name] = CombinatorLayer(z_L, dirty_net['u_0'],\n", " combinator_type=combinator_type,\n", " name=comb_name)\n", - " \n", + "\n", " # batchnormalize denoised latent using clean encoder's bn mean/inv_std without learning\n", " enc_bname = 'enc_batchn_{}_norm'.format(L)\n", - " mu, inv_std = clean_net[enc_bname].get_params()\n", " bname = 'dec_batchn_0'\n", - " dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", - " beta=None, gamma=None, name=bname,\n", - " mean=mu, inv_std=inv_std)\n", + "\n", + " to_stats_l = clean_net[enc_bname]\n", + " to_norm_l = dirty_net[comb_name]\n", + " dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l)\n", "\n", " for i in range(L):\n", " # dirty_enc_dense_L-i ... z_l\n", " z_l = dirty_net['enc_noise_{}'.format(i)]\n", - " \n", + "\n", " # affine transformation\n", " d_name = 'dec_dense_{}'.format(L-i)\n", " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1],\n", " num_units=num_nodes[i],\n", " nonlinearity=linear, name=d_name)\n", - " \n", + "\n", " # batchnormalization ... u_l\n", - " dirty_net['u_l'] = BatchNormLayer(dirty_net.values()[-1], beta=None,\n", - " gamma=None,\n", - " name='dec_batchn_dense_{}'.format(L-i))\n", - " \n", + " dirty_net['u_{}'.format(i+1)] = BatchNormLayer(dirty_net.values()[-1],\n", + " alpha=1.,\n", + " beta=None,gamma=None,\n", + " name='dec_batchn_dense_'\n", + " '{}'.format(L-i))\n", + "\n", " # denoised latent \\hat{z}_L-i\n", " comb_name = 'dec_combinator_{}'.format(i+1)\n", - " dirty_net[comb_name] = CombinatorLayer(*[z_l, dirty_net['u_l']],\n", + " dirty_net[comb_name] = CombinatorLayer(z_l, dirty_net['u_{}'.format(i+1)],\n", " combinator_type=combinator_type,\n", " name=comb_name)\n", - " \n", + "\n", " # batchnormalized latent \\hat{z}_L-i^{BN}\n", " enc_bname = 'enc_batchn_{}_norm'.format(L-i-1)\n", - " mu, inv_std = clean_net[enc_bname].get_params()\n", " bname = 'dec_batchn_{}'.format(L-i)\n", - " dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", - " beta=None, gamma=None, name=bname,\n", - " mean=mu, inv_std=inv_std)\n", - " \n", + "\n", + " to_stats_l = clean_net[enc_bname]\n", + " to_norm_l = dirty_net[comb_name]\n", + " dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l)\n", + "\n", " # corrupted input ... z_0\n", " z_0 = dirty_net['inp_corr']\n", "\n", @@ -401,13 +445,15 @@ " d_name = 'dec_dense_{}'.format(L+1)\n", " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], nonlinearity=linear,\n", " num_units=num_nodes[i+1], name=d_name)\n", - " \n", + "\n", " # batchnormalization ... u_L\n", - " dirty_net['u_L'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None)\n", - " \n", + " dirty_net['u_{}'.format(L+1)] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", + " beta=None, gamma=None)\n", + "\n", " # denoised input reconstruction\n", " comb_name = 'dec_combinator_{}'.format(L+1)\n", - " dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_L']], name=comb_name,\n", + " dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_{}'.format(L+1)]],\n", + " name=comb_name,\n", " combinator_type=combinator_type)\n", "\n", " return dirty_net\n", @@ -424,7 +470,7 @@ "\n", " # dirty encoder\n", " train_output_l, dirty_encoder = build_encoder(net, num_encoder, activation,\n", - " 'dirty', p_drop_hidden)\n", + " 'dirty', p_drop_hidden, None)\n", "\n", " # clean encoder\n", " clean_net = OrderedDict(net.items()[:1])\n", @@ -438,20 +484,49 @@ " return [train_output_l, eval_output_l], dirty_net, clean_net\n", "\n", "\n", - "def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, lambdas):\n", - " class_cost = T.nnet.categorical_crossentropy(T.clip(output_train, 1e-15, 1),\n", - " y).mean()\n", + "def get_mu_sigma_costs(hid):\n", + " shp = hid.shape\n", + " mu = hid.mean(0)\n", + " sigma = T.dot(hid.T, hid) / shp[0]\n", + "\n", + " C_mu = T.sum(mu ** 2)\n", + " C_sigma = T.diagonal(sigma - T.log(T.clip(sigma, 1e-15, 1)))\n", + " C_sigma -= - T.ones_like(C_sigma)\n", + " return C_mu, C_sigma.sum() # trace(C_sigma)\n", + "\n", + "\n", + "def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train,\n", + " lambdas, use_extra_costs=False, alphas=None, betas=None,\n", + " num_labeled=None, pseudo_labels=None):\n", + " xe = T.nnet.categorical_crossentropy\n", + " pred = T.clip(output_train, 1e-15, 1)\n", + " N = num_labeled if num_labeled else pred.shape[0]\n", + " class_cost = xe(pred[:N], y[:N]).mean()\n", + "\n", + " if pseudo_labels == 'soft':\n", + " n = 0 if num_labeled else N\n", + " class_cost += xe(pred[n:], pred[n:]).mean()\n", + " elif pseudo_labels == 'hard':\n", + " M = y.shape[1]\n", + " n = 0 if num_labeled else N\n", + " pseudo_target = T.eye(M)[pred[n:].argmax(axis=1)]\n", + " class_cost += xe(pred[n:], pseudo_target).mean()\n", + "\n", " L = len(num_decoder)\n", - " \n", + "\n", " # get clean and corresponding dirty latent layer output\n", " z_clean_l = clean_net['input']\n", " z_dirty_l = dirty_net['dec_combinator_{}'.format(L)]\n", - " \n", + "\n", " z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False)\n", " z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False)\n", "\n", " # squared error\n", - " rec_costs = [lambdas[L] * T.sqr(z_clean - z_dirty).mean()]\n", + " cost = lambdas[L] * T.sqr(z_clean - z_dirty).mean()\n", + " if use_extra_costs:\n", + " C_mu, C_sigma = get_mu_sigma_costs(z_clean)\n", + " cost += alphas[L] * C_mu + betas[L] * C_sigma\n", + " rec_costs = [cost]\n", "\n", " for l in range(L):\n", " z_clean_l = clean_net['enc_batchn_{}_norm'.format(l)]\n", @@ -460,20 +535,28 @@ " z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False)\n", " z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False)\n", "\n", - " rec_costs.append(lambdas[l] * T.sqr(z_clean - z_dirty).mean())\n", + " cost = lambdas[l] * T.sqr(z_clean - z_dirty).mean()\n", + " if use_extra_costs:\n", + " C_mu, C_sigma = get_mu_sigma_costs(z_clean)\n", + " cost += alphas[l] * C_mu + betas[l] * C_sigma\n", + " rec_costs.append(cost)\n", "\n", " return class_cost, rec_costs" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": { "collapsed": false }, "outputs": [], "source": [ "# %load utils.py\n", + "import theano\n", + "import theano.tensor as T\n", + "import lasagne\n", + "\n", "import gzip\n", "import cPickle as pickle\n", "import sys\n", @@ -506,7 +589,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -515,16 +598,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "--2016-06-21 12:04:46-- http://deeplearning.net/data/mnist/mnist.pkl.gz\n", + "--2016-06-22 23:23:59-- http://deeplearning.net/data/mnist/mnist.pkl.gz\n", "Resolving deeplearning.net (deeplearning.net)... 132.204.26.28\n", "Connecting to deeplearning.net (deeplearning.net)|132.204.26.28|:80... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 16168813 (15M) [application/x-gzip]\n", - "Saving to: 'mnist.pkl.gz.1'\n", + "Saving to: 'mnist.pkl.gz.2'\n", "\n", - "100%[======================================>] 16,168,813 4.61MB/s in 3.3s \n", + "100%[======================================>] 16,168,813 4.80MB/s in 3.2s \n", "\n", - "2016-06-21 12:04:54 (4.61 MB/s) - 'mnist.pkl.gz.1' saved [16168813/16168813]\n", + "2016-06-22 23:24:08 (4.80 MB/s) - 'mnist.pkl.gz.2' saved [16168813/16168813]\n", "\n" ] } @@ -535,7 +618,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -547,128 +630,109 @@ "Loading data...\n", "Building model and compiling functions...\n", "Starting training...\n", - "Epoch 1 took 26.439 s\n", - "Train cost 0.845667004585, val cost 0.739950656891, val acc 0.949999988079\n", - "Layer #0 rec cost: 0.0424115508795\n", - "Layer #1 rec cost: 0.0210689622909\n", - "Layer #2 rec cost: 0.017903175205\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 2 took 26.815 s\n", - "Train cost 0.737154126167, val cost 0.728129386902, val acc 0.949999988079\n", - "Layer #0 rec cost: 0.0424207411706\n", - "Layer #1 rec cost: 0.0197576656938\n", - "Layer #2 rec cost: 0.0171148162335\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 3 took 26.899 s\n", - "Train cost 0.708354771137, val cost 0.662552952766, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424175597727\n", - "Layer #1 rec cost: 0.0194249469787\n", - "Layer #2 rec cost: 0.0167284980416\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 4 took 26.708 s\n", - "Train cost 0.693259179592, val cost 0.734445691109, val acc 0.949999988079\n", - "Layer #0 rec cost: 0.0424144156277\n", - "Layer #1 rec cost: 0.0193312242627\n", - "Layer #2 rec cost: 0.0166478361934\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 5 took 26.818 s\n", - "Train cost 0.682324171066, val cost 0.704281806946, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424068495631\n", - "Layer #1 rec cost: 0.0193558614701\n", - "Layer #2 rec cost: 0.0165634099394\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 6 took 26.665 s\n", - "Train cost 0.673853754997, val cost 0.664340019226, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424051769078\n", - "Layer #1 rec cost: 0.0193317253143\n", - "Layer #2 rec cost: 0.016635145992\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 7 took 26.801 s\n", - "Train cost 0.668162584305, val cost 0.692064702511, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424223914742\n", - "Layer #1 rec cost: 0.019355526194\n", - "Layer #2 rec cost: 0.0165152177215\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 8 took 26.760 s\n", - "Train cost 0.662031590939, val cost 0.68827599287, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424108207226\n", - "Layer #1 rec cost: 0.0192943159491\n", - "Layer #2 rec cost: 0.0163748040795\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 9 took 26.680 s\n", - "Train cost 0.659080207348, val cost 0.661842346191, val acc 0.959999978542\n", - "Layer #0 rec cost: 0.0424109250307\n", - "Layer #1 rec cost: 0.0192462466657\n", - "Layer #2 rec cost: 0.0164167992771\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 10 took 26.697 s\n", - "Train cost 0.657592475414, val cost 0.702867507935, val acc 0.97000002861\n", - "Layer #0 rec cost: 0.0424037612975\n", - "Layer #1 rec cost: 0.0192435141653\n", - "Layer #2 rec cost: 0.0163965579122\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", + "Epoch 1 took 22.822 s\n", + "Train cost 2.42561721802, val cost 1.87803590298, val acc 0.810000002384\n", + "Layer #0 rec cost: 0.424141585827\n", + "Layer #1 rec cost: 0.572997391224\n", + "Layer #2 rec cost: 0.489471495152\n", + "Epoch 2 took 22.866 s\n", + "Train cost 2.18242812157, val cost 1.8710668087, val acc 0.829999983311\n", + "Layer #0 rec cost: 0.424109280109\n", + "Layer #1 rec cost: 0.517513096333\n", + "Layer #2 rec cost: 0.471111416817\n", + "Epoch 3 took 22.901 s\n", + "Train cost 2.11269044876, val cost 1.88646125793, val acc 0.910000026226\n", + "Layer #0 rec cost: 0.424222230911\n", + "Layer #1 rec cost: 0.507005512714\n", + "Layer #2 rec cost: 0.466582417488\n", + "Epoch 4 took 22.896 s\n", + "Train cost 2.09363341331, val cost 1.88520228863, val acc 0.899999976158\n", + "Layer #0 rec cost: 0.424132347107\n", + "Layer #1 rec cost: 0.501805484295\n", + "Layer #2 rec cost: 0.465649992228\n", + "Epoch 5 took 22.898 s\n", + "Train cost 2.05789875984, val cost 1.98522758484, val acc 0.870000004768\n", + "Layer #0 rec cost: 0.424120694399\n", + "Layer #1 rec cost: 0.499489545822\n", + "Layer #2 rec cost: 0.465319514275\n", + "Epoch 6 took 22.880 s\n", + "Train cost 2.05530428886, val cost 1.94931507111, val acc 0.870000004768\n", + "Layer #0 rec cost: 0.424116581678\n", + "Layer #1 rec cost: 0.496793806553\n", + "Layer #2 rec cost: 0.466006070375\n", + "Epoch 7 took 22.849 s\n", + "Train cost 2.04769968987, val cost 1.97700107098, val acc 0.899999976158\n", + "Layer #0 rec cost: 0.424246698618\n", + "Layer #1 rec cost: 0.494832396507\n", + "Layer #2 rec cost: 0.465624034405\n", + "Epoch 8 took 22.739 s\n", + "Train cost 2.03270721436, val cost 1.76881194115, val acc 0.920000016689\n", + "Layer #0 rec cost: 0.424224495888\n", + "Layer #1 rec cost: 0.493043601513\n", + "Layer #2 rec cost: 0.464083582163\n", + "Epoch 9 took 22.830 s\n", + "Train cost 2.02665328979, val cost 1.9300942421, val acc 0.920000016689\n", + "Layer #0 rec cost: 0.424160569906\n", + "Layer #1 rec cost: 0.492138564587\n", + "Layer #2 rec cost: 0.465402901173\n", + "Epoch 10 took 22.917 s\n", + "Train cost 2.00475525856, val cost 2.03754162788, val acc 0.930000007153\n", + "Layer #0 rec cost: 0.424184262753\n", + "Layer #1 rec cost: 0.49082672596\n", + "Layer #2 rec cost: 0.464062064886\n", "New LR: 0.10000000149\n", - "Epoch 11 took 26.767 s\n", - "Train cost 0.651364922523, val cost 0.676774442196, val acc 0.980000019073\n", - "Layer #0 rec cost: 0.0424225926399\n", - "Layer #1 rec cost: 0.0192226637155\n", - "Layer #2 rec cost: 0.0163397565484\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 12 took 26.645 s\n", - "Train cost 0.652854502201, val cost 0.678124725819, val acc 0.97000002861\n", - "Layer #0 rec cost: 0.042415548116\n", - "Layer #1 rec cost: 0.0192005801946\n", - "Layer #2 rec cost: 0.0163279604167\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 13 took 26.839 s\n", - "Train cost 0.647919416428, val cost 0.678905069828, val acc 0.980000019073\n", - "Layer #0 rec cost: 0.0424238294363\n", - "Layer #1 rec cost: 0.0191765259951\n", - "Layer #2 rec cost: 0.0162966195494\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 14 took 26.501 s\n", - "Train cost 0.646459579468, val cost 0.650809168816, val acc 0.949999988079\n", - "Layer #0 rec cost: 0.0424147360027\n", - "Layer #1 rec cost: 0.0191764775664\n", - "Layer #2 rec cost: 0.0163504853845\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n", - "Epoch 15 took 26.767 s\n", - "Train cost 0.643889009953, val cost 0.675578832626, val acc 0.97000002861\n", - "Layer #0 rec cost: 0.0424266718328\n", - "Layer #1 rec cost: 0.0191710442305\n", - "Layer #2 rec cost: 0.0163805447519\n", - "enc_batchn_0_learn: mean == 0. True, inv_std == 1. True\n", - "enc_batchn_1_learn: mean == 0. True, inv_std == 1. True\n" + "Epoch 11 took 23.146 s\n", + "Train cost 2.01219749451, val cost 2.06196784973, val acc 0.920000016689\n", + "Layer #0 rec cost: 0.424222916365\n", + "Layer #1 rec cost: 0.489564478397\n", + "Layer #2 rec cost: 0.464669078588\n", + "Epoch 12 took 23.039 s\n", + "Train cost 1.9962041378, val cost 1.93116974831, val acc 0.910000026226\n", + "Layer #0 rec cost: 0.424142956734\n", + "Layer #1 rec cost: 0.488685041666\n", + "Layer #2 rec cost: 0.464961528778\n", + "Epoch 13 took 22.913 s\n", + "Train cost 1.98841309547, val cost 1.86944377422, val acc 0.870000004768\n", + "Layer #0 rec cost: 0.424248784781\n", + "Layer #1 rec cost: 0.488236039877\n", + "Layer #2 rec cost: 0.465781867504\n", + "Epoch 14 took 23.036 s\n", + "Train cost 1.98689389229, val cost 1.96216869354, val acc 0.889999985695\n", + "Layer #0 rec cost: 0.424197643995\n", + "Layer #1 rec cost: 0.486736387014\n", + "Layer #2 rec cost: 0.465528964996\n", + "Epoch 15 took 23.210 s\n", + "Train cost 1.98063921928, val cost 1.94275975227, val acc 0.930000007153\n", + "Layer #0 rec cost: 0.424104750156\n", + "Layer #1 rec cost: 0.486671447754\n", + "Layer #2 rec cost: 0.465840518475\n" ] } ], "source": [ "# %load train_ladder_nets.py\n", - "\n", + "from utils import load_data\n", + "from ladder_nets import *\n", "import time\n", "import theano.misc.pkl_utils\n", + "import lasagne\n", + "import cPickle\n", "\n", "LEARNING_RATE = 0.1\n", "LR_DECREASE = 1.\n", "BATCH_SIZE = 100\n", "NUM_EPOCHS = 15\n", "COMBINATOR_TYPE = 'milaUDEM'\n", - "LAMBDAS = [0.1, 0.1, 0.1]\n", + "LAMBDAS = [1, 1, 1]\n", "DROPOUT = 0.3\n", + "# add extra cost to enforce zero mean and unity covariance mat for each layer\n", + "EXTRA_COST = False # True\n", + "ALPHAS = None, # [0.1]*3\n", + "BETAS = None, #[0.1]*3\n", + "# use only limited number of labeled data\n", + "NUM_LABELED = 10\n", + "# class entropy regularization using pseudolabels predicted for unlabeled data \n", + "PSEUDO_LABELS = None\n", "\n", "print \"Loading data...\"\n", "dataset = load_data()\n", @@ -695,10 +759,11 @@ "# set up (possibly amortizable) lr, cost and updates\n", "sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE))\n", "\n", - "class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder, \n", - " dirty_net, clean_net, output_train, LAMBDAS)\n", + "class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder,\n", + " dirty_net, clean_net, output_train, LAMBDAS,\n", + " use_extra_costs=EXTRA_COST, alphas=ALPHAS, betas=BETAS,\n", + " num_labeled=NUM_LABELED, pseudo_labels=PSEUDO_LABELS)\n", "cost = class_cost + T.sum(rec_costs)\n", - "\n", "net_params = lasagne.layers.get_all_params(train_output_l, trainable=True)\n", "updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr)\n", "\n", @@ -720,16 +785,6 @@ " y: dataset['y_valid'][batch_slice],\n", " })\n", "\n", - "# checking for constants in means and inv_stds during training\n", - "bl_name = 'enc_batchn_{}_learn'\n", - "means = [abs(dirty_net[bl_name.format(i)].mean.ravel()).mean() for i\n", - " in range(len(num_encoder))]\n", - "means = T.stack(means, axis=1)\n", - "stds = [abs(dirty_net[bl_name.format(i)].inv_std.ravel()).mean() for i\n", - " in range(len(num_encoder))]\n", - "stds = T.stack(stds, axis=1)\n", - "get_stats = theano.function([], [means, stds])\n", - "\n", "network_dump = {'train_output_layer': train_output_l,\n", " 'eval_output_layer': eval_output_l,\n", " 'dirty_net': dirty_net,\n", @@ -748,7 +803,6 @@ "def train_epoch():\n", " costs = []\n", " rec_costs = []\n", - " stats = []\n", " for b in range(num_batches_train):\n", " train_out = train(b)\n", " train_cost = train_out[0]\n", @@ -756,17 +810,13 @@ "\n", " costs.append(train_cost)\n", " rec_costs.append(rec_cost)\n", - " stats.append(np.vstack(get_stats()))\n", "\n", - " return (np.mean(costs), np.mean(rec_costs, axis=0),\n", - " np.stack(stats, axis=0).mean(axis=0))\n", + " return (np.mean(costs), np.mean(rec_costs, axis=0))\n", " \n", "\n", "def eval_epoch():\n", " costs = []\n", " accs = []\n", - " preds = []\n", - " targets = []\n", " for b in range(num_batches_valid):\n", " eval_cost, eval_acc = eval(b)\n", " costs.append(eval_cost)\n", @@ -784,7 +834,7 @@ "\n", "try:\n", " for n in range(NUM_EPOCHS):\n", - " train_cost, rec_costs, stats = train_epoch()\n", + " train_cost, rec_costs = train_epoch()\n", " eval_cost, acc = eval_epoch()\n", " \n", " train_costs.append(train_cost)\n", @@ -794,16 +844,11 @@ " print \"Epoch %d took %.3f s\" % (n + 1, time.time() - now)\n", " now = time.time()\n", " print \"Train cost {}, val cost {}, val acc {}\".format(train_costs[-1], \n", - " valid_costs[-1], \n", - " valid_accs[-1])\n", + " valid_costs[-1], \n", + " valid_accs[-1])\n", " print '\\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c\n", " in enumerate(rec_costs)])\n", - " means, inv_stds = stats\n", - " for i in range(len(num_encoder)):\n", - " print '{}: mean == 0. {}, inv_std == 1. {}'.format(bl_name.format(i),\n", - " np.allclose(means[i], 0.),\n", - " np.allclose(inv_stds[i], 1.))\n", - " \n", + "\n", " if (n+1) % 10 == 0:\n", " new_lr = sh_lr.get_value() * LR_DECREASE\n", " print \"New LR:\", new_lr\n", @@ -826,7 +871,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 10, "metadata": { "collapsed": true }, @@ -847,7 +892,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 11, @@ -856,9 +901,9 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEACAYAAABfxaZOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd8FFX3+PHPDQSkSK8CCU1qaEERQSGAIkXEAhoIUpTy\nYEEsXwUryE8fREEU0EcQBaQKiAE1FOlioRNKAkjvvUP6+f0xAQKEZJPsZrac9+u1r+zOztw5oZw7\nc+bOHSMiKKWU8g1+dgeglFIq+2jSV0opH6JJXymlfIgmfaWU8iGa9JVSyodo0ldKKR/iUNI3xrQy\nxkQbY3YYY95K5fuixpgIY8xGY8xmY0z3FN/tNcZsMsZsMMasdmLsSimlMsikN07fGOMH7ABaAIeB\nNUCoiESnWOcD4A4RGWiMKQZsB0qKSIIxZjdQX0TOuOqXUEop5RhHjvQbADtFZJ+IxAPTgfY3rXMU\nuDP5/Z3AKRFJSP5sHNyPUkopF3MkGZcBDqT4fDB5WUrjgJrGmMPAJuCVFN8JsMgYs8YY0ysrwSql\nlMqanE5qZyCwSUSaGWMqYSX52iJyEWgsIkeMMcWTl0eJyB9O2q9SSqkMcCTpHwICUnwum7wspcbA\nRwAisssYsweoBqwVkSPJy08YY+ZglYtuSfrGGJ0ESCmlMkhETEbWd6S8swaobIwJNMbkAkKBuTet\nEwU8BGCMKQlUAXYbY/IaY/InL88HtAS2pBG8R74++OAD22PQ+O2PQ+P3zJcnx58Z6R7pi0iiMeYl\nYCFWJzFeRKKMMX2sr2Us8F/ge2PMJqwLt2+KyGljTAVgTvJRfE5giogszFSkSimlssyhmr6IzAeq\n3rTsmxTvTwLtUtluD1A3izEqpZRyEh1K6QQhISF2h5AlGr+9NH57eXr8GZXuzVnZxRgj7hKLUkp5\nAmMMksELuc4asqmUUplWvnx59u3bZ3cYbiswMJC9e/c6pS090ldK2S75iNXuMNzW7f58MnOk71Y1\n/aQkuyNQSinv5lZJf/NmuyNQSinv5lZJf9kyuyNQSinvpklfKaVcrG/fvnz00Ud2hwG42YXcIkWE\nEyfAz626IqWUq7n7hdwKFSowfvx4mjdvbsv+vfZCbvHiEBlpdxRKKeW4xMREu0PIELdK+iEhWuJR\nSrmXrl27sn//fh599FEKFCjAp59+ip+fH9999x2BgYG0aNECgKeffprSpUtTuHBhQkJC2LZt27U2\nevTowfvvvw/A8uXLKVeuHCNGjKBkyZKUKVOGCRMmZNvvo0lfKaXSMGnSJAICAvj11185f/48Tz/9\nNAArVqwgOjqaBQsWANCmTRt27drF8ePHCQ4OJiws7LZtHj16lAsXLnD48GG+/fZbXnzxRc6dO5ct\nv49bJf2mTWHFCh2vr5S6lTFZf2VFypq6MYbBgweTJ08ecufODUD37t3Jmzcv/v7+vP/++2zatIkL\nFy6k2lauXLl47733yJEjB61btyZ//vxs3749awE6yK2SfunSUKKE1vWVUrcSyfrLmcqWLXvtfVJS\nEgMGDKBy5coUKlSIChUqYIzh5MmTqW5btGhR/FKMWMmbNy8XL150boC34VZJH6BZM1i61O4olFLq\nOpPKaULKZVOnTmXevHksWbKEs2fPsnfv3iw96MSV3C7pa11fKeVuSpUqxe7duwFSTeYXLlwgd+7c\nFC5cmEuXLjFw4MBUOwp34HZJ/2pd38NGQSmlvNiAAQMYMmQIRYoUYfbs2bck9K5duxIQEECZMmUI\nCgqiUaNGGWo/OzsIt7o562os1avD1KlQr57NQSmlsoW735xlN6+9OesqLfEopZRruGXS14u5Sinl\nGm5Z3jl2DKpVg5MnIUcOmwNTSrmclnfS5vXlnZIlrTH7mzbZHYlSSnkXt0z6oHV9pZRyBbdN+s2a\nadJXSilnc8uaPsDx41C1qtb1lfIFWtNPm9fX9MGag+euu2DjRrsjUUop7+G2SR+0rq+U8lxX582/\nKigoiBUrVji0ritp0ldKKRdJOb3Cli1baNKkiUPrupJbJ/2mTWHlSkhIsDsSpZTyDm6d9EuUgLJl\nta6vlLLPsGHD6Nix4w3L+vfvT//+/ZkwYQI1atSgQIECVK5cmbFjx962nQoVKrBkyRIAYmJi6N69\nO0WKFCEoKIg1a9a49HdIKWe27SmTrpZ47rnH7kiUUr4oNDSUDz/8kEuXLpEvXz6SkpL48ccf+fnn\nnzl16hS//vorFSpUYOXKlbRq1YoGDRpQt27dNNscNGgQe/bsYc+ePVy8eJFWrVpl02/jIUl/wgR4\n4w27I1FK2ckMznrNWz7I+LDQgIAAgoODmTNnDl26dGHx4sXky5ePBg0a3LDegw8+SMuWLVm5cmW6\nSX/mzJn873//o2DBghQsWJB+/foxZMiQDMeWGW6f9Js2hZ49rbp+TrePVinlKplJ2M7SqVMnpk2b\nRpcuXZg2bRqdO3cGICIigg8//JAdO3aQlJTElStXqF27drrtHT58+IbHLQYGBros9pu5dU0foHhx\nKFcONmywOxKllK/q2LEjy5Yt49ChQ8yZM4ewsDDi4uLo0KEDb775JidOnODMmTO0bt3aoZvMSpcu\nzYEDB6593rdvnyvDv4HbJ33QoZtKKXsVK1aMpk2b0qNHDypWrEiVKlWIi4sjLi6OYsWK4efnR0RE\nBAsXLnSovaeffpr//ve/nD17loMHDzJ69GgX/wbXadJXSikHdO7cmcWLFxMWFgZA/vz5+fLLL+nY\nsSNFihRh+vTptG/f/rbbpxyH/8EHHxAQEECFChVo1aoVXbt2dXn81+Jw5FTEGNMKGInVSYwXkU9u\n+r4oMBkoDeQAhovIBEe2TdGG3C6WEyegcmU4dUrr+kp5I517J23ZOveOMcYPGA08AtQEOhljqt20\n2kvARhGpCzQDhhtjcjq4bbqKF4eAAK3rK6VUVjlS3mkA7BSRfSISD0wHbj6HOQrcmfz+TuCUiCQ4\nuK1D9BGKSimVdY4k/TLAgRSfDyYvS2kcUNMYcxjYBLySgW0donV9pZTKOmdVyAcCm0SkmTGmErDI\nGJP+YNWbDBo06Nr7kJAQQkJCrn1u0gR69NDx+kop37Vs2TKWZfHoN90LucaYhsAgEWmV/HkAICkv\nyBpjfgM+EpFVyZ8XA29hdSppbpuijdteyL2qdm349lu46UY4pZSH0wu5acvuh6isASobYwKNMbmA\nUGDuTetEAQ8lB1ESqALsdnBbh2mJRymlsibdpC8iiVijcxYCW4HpIhJljOljjOmdvNp/gXuMMZuA\nRcCbInL6dttmNli9mKuUdwoMDMQYo6/bvJw5TYPbPiM3NadOQYUK1k9//2wKTCml3JRXPSM3NUWL\nWkl//Xq7I1FKKc/kUUkftK6vlFJZoUlfKaV8iEfV9AFOn4by5bWur5RSXl/TByhSBCpWhHXr7I5E\nKaU8j8clfdASj1JKZZYmfaWU8iEeV9MHresrpRT4SE0frLp+pUqwdq3dkSillGfxyKQPWuJRSqnM\n0KSvlFI+xCNr+gBnzkBgoNb1lVK+y2dq+gCFC1sPS9e6vlJKOc5jkz5YJR6dalkppRzn8Ulf6/pK\nKeU4j63pg1XXDwiw6vq5crkoMKWUclM+VdMHq65/991a11dKKUd5dNIH6xGKWuJRSinHeHzS14u5\nSinlOI+u6QOcPQvlymldXynle3yupg9QqBBUqQJr1tgdiVJKuT+PT/qgQzeVUspRXpH09WKuUko5\nxuNr+nC9rn/yJOTO7eTAlFLKTflkTR+sun7VqlrXV0qp9HhF0get6yullCM06SullA/xipo+wLlz\nULas1vWVUr7DZ2v6AAULQrVqsHq13ZEopZT78pqkD1riUUqp9GjSV0opH+I1NX3Qur5Syrf4dE0f\nrLp+9erwzz92R6KUUu7Jq5I+aIlHKaXSoklfKaV8iEM1fWNMK2AkVicxXkQ+uen7N4AwQAB/oDpQ\nTETOGmP2AueAJCBeRBrcZh9ZrukDnD8PZcrAiRNwxx1Zbk4ppdxWZmr66SZ9Y4wfsANoARwG1gCh\nIhJ9m/UfBfqLyEPJn3cD9UXkTDr7cUrSB7jvPvj0U2jSxCnNKaWUW3LVhdwGwE4R2Sci8cB0oH0a\n63cCpqWMy8H9OI0+QlEppVLnSDIuAxxI8flg8rJbGGPyAK2A2SkWC7DIGLPGGNMrs4FmhNb1lVIq\ndTmd3F474A8ROZtiWWMROWKMKY6V/KNE5A8n7/cGjRtb0yzHxGhdXymlUnIk6R8CAlJ8Lpu8LDWh\n3FjaQUSOJP88YYyZg1UuSjXpDxo06Nr7kJAQQkJCHAjvVgUKQM2a1nj9pk0z1YRSSrmdZcuWsSyL\nZQxHLuTmALZjXcg9AqwGOolI1E3rFQR2A2VF5ErysryAn4hcNMbkAxYCg0VkYSr7cdqFXIABAyBP\nHvjgA6c1qZRSbsUlF3JFJBF4CSthbwWmi0iUMaaPMaZ3ilUfBxZcTfjJSgJ/GGM2AH8D81JL+K6g\nF3OVUupWXjX3TkoXLkDp0tY8PP65Esnhl8NpbSvHfbP2G/ac3cPQh4baHYpSXsfn595JKcn/HCVa\nfUejb1qQ56M8tJvWjrnb55KQlGB3aD5j1D+jGLpqKN9v/J4tx7fYHY5SCi9L+nGJcYRHh9NxZkcC\nRgaQs8Y8Kp95gSOvH+HJak8y9I+hBI4M5N0l77LnzB67w/VqI/8eyed/f87SbksZ0HgA7yx5x+6Q\nlFJ4QXknSZJYtX8VkyMnMztqNjVL1CSsVhgdanRgzYoi/Pe/N47Z33p8K+PWj2Ny5GTqla5Hr+Be\ntK/antw5dS5mZxnx1wjGrBnD0m5LCSgYQExCDFVHV2XaU9NoVK6R3eEp5TVcMg1Ddslo0t96fCuT\nIyczdctU7sx1J11qd6FTUCcCCwVeW+fiRShVypqHJ0+eG7ePSYhhTtQcxq0fx5bjW+hapys9g3tS\nrVg1Z/1KPunTVZ/yzbpvWNptKeUKlru2/PsN3/P9xu9Z3n05xmTo36hS6ja8PukfOn+IaVumMTly\nMicvn6Rzrc6E1Qqjdsnat00k998PH38MzZrdvt2dp3YyfsN4JmycQJWiVegV3IsONTqQxz/P7TdS\ntxj6x1C+2/AdS7otoWyBsjd8l5CUQO2vazO85XBa393apgiV8i5emfTPxZxjdtRsJkdOZuPRjTxZ\n/Um61O5Ck8Am+Jn0L0kMHAi5csHgwenHEJ8Yzy87fmHc+nH8c+gfOgV1oldwL+qUqpOZX8mnfLzy\nYyZumsiSrksoUyDVWTqYEzWHwcsHs77Peof+7pRSafOapB+bEEvEvxFM2TyFhbsW0qJCC8JqhdG2\nSlvuyJmxeRUWLLCO9Jcvz1g8+8/t57sN3/Hdhu8olb8UvYJ7ERoUyp2578xYQz5gyPIhTNk8haXd\nllL6ztK3XU9EaDi+If3v60+nWp2yMUKlvJPHJ/3le5czJXIKs6JmUatErWsXZAvnKZzpdi9ehIAA\nmDsXHngg49snJiWyYNcCvl3/LUv3LqVD9Q70qt+Le++6V2vTwOBlg5mxdQZLui2hVP5S6a6/dM9S\nes3rxbYXt5ErR65siFAp7+XxST/oqyC61OpCp1qdCCgYkP5GDlq4EJ59FubPh3r1Mt/O0YtHmbBx\nAt+u/5a8/nnpFdyLLrW7ZKpTEhHiEuOITYwlNiE2zZ9F8xSl/l31Mx+4C4gIg5YNYlbULJZ0XULJ\n/CUd3vaRyY/weNXH6XtvXxdGqJT38/ik78pYfvoJXnzRmpqhWhYH6CRJEsv2LmPc+nFE7Iygafmm\n5MqRK83kHZMQc8OyuMQ4/P38yZ0zN7lz5E7z585TO6l/V32GtxxO+ULlnfLnkRUiwntL3yN8eziL\nuy6mRL4SGdp+3eF1tJvWjp0v7yRfrnwuilIp76dJPx0TJ8J778GKFVC+vHPaPHn5JIt3L8bP+DmU\nwK/+zJUjl8MXM2MSYvjsz88Y+fdIXm7wMm82ftO2kUUiwtuL3+a3f3/j92d/p3i+4plq55lZz1C3\nZF0GPjjQyREq5Ts06Ttg9GgYORJWrrTm5vEk+8/t5/WFr7P28FpGtBzB49Uez9brCiLCgN8HsGDX\nAn7v+jvF8hbLdFs7Tu2g8XeN2f7SdorkKeLEKJXyHZr0HfTRRzB9ujWip4gH5pvFuxfTb34/ytxZ\nhi9bf5ktN5SJCP+36P9YsmcJi55dRNG8RbPcZp95fSh0RyE+efgTJ0SolO/RpO8gEXjrLSvp//47\n3OmBozDjE+MZs2YMH638iG51uvF+0/cpkLuAS/YlIry+8HVW7FvBwmcXOu3I/ND5Q9T+X20i/xN5\n27H9Sqnb01k2HWQMfPIJ1K0Ljz0GV66kv4278c/hT/+G/dnSdwunr5ym2uhqTNo0iSRJcup+RIRX\nF7zKyv0rWfTsIqeWYsoUKEPPej35cPmHTmtTKZU2nzzSvyoxEbp0gUuXYPZs8PfP1t071d8H/+bl\niJfx9/NndJvRBJcOznKbIkK/iH6sPryaBV0WUOiOQk6I9Eanr5ym6uiqrHpuFVWKVnF6+0p5Mz3S\nz6AcOWDSJKvc062b1Ql4qoZlG/JPz394rt5ztJnShj7z+nDy8slMt5ckSbz424usPbKWhV0WuiTh\nAxTJU4TXGr7Ge0vfc0n7Sqkb+XTSB+vo/scf4ehReOEFqwPwVH7Gj57BPYl6MYrcOXNTY0wNxqwe\nk+EHxyRJEi/8+gIbj25kQZcFFLyjoIsitvS7rx8r961k3eF1Lt2PUsrHyzspXbgADz0ETZta9X5v\nmGFh87HNvBzxMmdjzjKq9SgeDHww3W2SJIk+8/oQdTKKiLCIbJtr6Os1X/Pz9p9Z0GVBtuxPKW+g\no3ey6PRpK+mHhsI7XvKgJxHhx60/8saiN2gS2IRhDw277UiZJEmi19xe7Dy9k187/5qtk8vFJ8ZT\nfUx1xrYbS/MKzbNtv0p5Mq3pZ1GRItY8PRMmwKhRdkfjHMYYngl6hugXoylfsDx1/leHT/74hNiE\n2BvWS0xK5Pm5z7PrzC5+C/st22cT9c/hz5BmQxi4eCB2d/5KeTNN+jcpXRoWLYJPP7WmbfAW+XLl\n46MWH/F3z79ZuX8ltb6uRcTOCMBK+D3Ce7Dv7D5+7fwr+XPltyXGZ4KeITYhlp+jf7Zl/0r5Ai3v\n3EZ0tPW0rTFj4Mkn7Y7G+X7d8Sv9F/SnerHq5PXPy8nLJ5nbaS55/fPaGlfEzgheX/g6kX0jyemX\n09ZYlHJ3Wt5xomrV4LffoG9fq+TjbdpWacuWvlu4v+z9+Bk/5nWaZ3vCB2hVuRXF8xXnh00/2B2K\nUl5Jj/TTsWoVPPEEzJkDjRvbHY1v+PPAn4TOCmXHyzsy/KQ0pXyJHum7QOPGMHmyVeLZsMHuaHxD\no3KNqFe6Hl+v+druUJTyOnqk7yBnPoRFpW/L8S20mNSCnS/vdNlEckp5Oj3Sd6Enn4ShQ6FlS9i3\nz+5ovF9QiSBaVW7F8D+H2x2KUl5Fj/QzaNQo+PJL6yEspdJ/DrjKgr1n91J/bH2iXozK8CMZlfIF\nekduNvl//w9mzPDch7B4klciXgHgi9Zf2ByJUu5Hk342EYE337SeteupD2HxFMcvHaf6mOqs673O\nLR4Kr5Q70aSfjUTgP/+BnTut8fx36MhCl/lg6QfsPbeXiY970S3SSjmBJv1sdvUhLGfPWvP1lCxp\nd0Te6Xzsee4edTeLuy4mqESQ3eEo5TZ09E42u/oQltq1ISgIvvgCEjI2db1yQIHcBRjQeADvLPGS\nqU+VspEm/Szy97fm31+xAubNg+Bg671yrr739mXj0Y38eeBPu0NRyqNpeceJRGDWLHj9dWjSBIYN\ng7vusjsq7/H9hu/5fuP3LO++HOMNT7lRKotcVt4xxrQyxkQbY3YYY95K5fs3jDEbjDHrjTGbjTEJ\nxphCjmzrTYyBjh0hKgrKlbPKPsOHQ3y83ZF5h2frPMvJyyeZ/+98u0NRymOle6RvjPEDdgAtgMPA\nGiBURKJvs/6jQH8ReSgj23rDkf7Ntm+Hfv3g4EEYPdqaqlllzZyoOQxePpj1fdbjZ7Q6qXybq470\nGwA7RWSfiMQD04H2aazfCZiWyW29StWqMH++dTNXjx7WYxgPHbI7Ks/2eLXHyZ0zNzO2zLA7FKU8\nkiNJvwxwIMXng8nLbmGMyQO0AmZndFtvZYw1NfO2bXD33VCnjlXrj4uzOzLPZIxhaIuhvLf0PeIS\n9Q9RqYxy9vlxO+APETnr5HY9Xt68MGQI/P23NX1DnTrW3bwq45pVaEalIpUYv3683aEo5XEceR7d\nISAgxeeyyctSE8r10k5Gt2XQoEHX3oeEhBASEuJAeJ6lcmX45RdreGfv3lC/PowYYV34VY77uPnH\ntJvWjq51upIvVz67w1EqWyxbtoxly5ZlqQ1HLuTmALZjXYw9AqwGOolI1E3rFQR2A2VF5EpGtk1e\n1+su5KbnyhVrjP/o0dYwz9deg9y57Y7Kczwz6xnqlqzLwAcH2h2KUrZwyYVcEUkEXgIWAluB6SIS\nZYzpY4zpnWLVx4EFVxN+WttmJEBvlicPDBoEq1fDX39BrVqwYIHdUXmOIc2GMOLvEZy+ctruUNyS\niNDlpy78HP2z3aEoN6I3Z7mRX3+FV16xxvd//jkEBtodkfv7zy//YeuJrfRr0I92VdvpM3VTmLd9\nHn1/7YsgbHthGwXvKGh3SMrJdMI1LxATA599BiNHQv/+8MYbOoNnWmISYpi5dSaTIiex/sh6OlTv\nQNc6XWlUrpFP37UbmxBL0NdBjG49mp+ifsI/hz+j24y2OyzlZJr0vcjevVaNPzLS6gTatbMmeFO3\nd/D8QaZETmHiponEJsbStXZXnq3zLBULV7Q7tGz32Z+fsXzfcuZ1mseZK2eo8VUNwkPDaVCmgd2h\nKSfSpO+FFiyAd9+F48fh+efhueegbFm7o3JvIsL6I+uZtGkS07ZMo2qxqnSt3ZWONTtS6I5Cdofn\ncscuHqPmVzX58/k/qVK0CgCTIycz/K/hrOm1hpx+jgzaU55Ak74X27ABxo2D6dOhcWPo1QvatIGc\n+v83TfGJ8cz/dz6TIiexaNciHqn8CF1rd6VlpZb45/C3OzyX6DW3FwXvKMhnLT+7tkxEePiHh2l7\nd1tevf9VG6NTzqRJ3wdcugQzZ8LYsbB/vzW9w/PPQ/nydkfm/s5cOcOPW39kUuQk/j39L52DOtO1\nTlfqlqrrNfX/9UfW02ZKG7a/tP2WC7c7Tu2g0fhGbOizgXIF9caQLce30Hl2Z/6v0f/RpXYXj/w3\noEnfx2zZYh39T5kC99xjHf0/9pg1x79K285TO5kcOZlJkZPInys/XWt3Jax2GHfd6blzYYsITSY0\n4dnaz9K7fu9U1xm8bDAbj21kzjNzsjk69xJ1IooWk1rQ775+/BD5A3VK1uHrtl973AinzCR9RMQt\nXlYoKjMuXxaZPFmkSRORUqVE3npLZOdOu6PyDIlJibJi7wrpGd5TCg8tLC1/aCmTN02Wi7EX7Q4t\nw2ZsmSF1vq4jCYkJt10nJj5GqoyqIuHR4dkYmXvZfnK7lBleRiZtnCQiIpfjLssLv7wg5UeWlz/2\n/WFzdBmTnDczlmszuoGrXpr0nSM6WuSNN0SKFxdp3lxk2jSRmBi7o/IMl+Muy/TN06XNlDZSaGgh\n6f5zd1mye4kkJiXaHVq6LsddloDPA2TZnmXprrtk9xIJ+DxALsReyIbI3Mu/p/6VsiPKyvj142/5\nbm70XCn1WSl5f8n7Ep8Yb0N0GZeZpK/lHS8VGwvh4Vb5Z9MmePZZq/xTrZrdkXmGoxePMm3zNMZv\nGE9w6WAmPj7RrWu+Q5YPIfJ4JDM7znRo/a5zulIiX4kbLvZ6uz1n9hAyMYR3HnzntuWvIxeO0D28\nO+djzzPlySluP9xXyzsqVf/+KzJwoFX6eeABkUmTrJKQSt+luEsS9FWQjF071u5QbuvAuQNS5JMi\nsufMHoe3OXbxmBQfVlw2HtnousDcyN4ze6X8yPIyZvWYdNdNTEqUEX+OkGLDiskPm37IhugyDy3v\nqLTExYn89JNI69YiRYuKvPyySGSk3VG5v6gTUVJsWDG3TZBhs8PkncXvZHi7cevGyX3j7vOI8lVW\nHDh3QCp+UVG++PuLDG234cgGqT66unSe3VnOXjnrouiyJjNJX58350P8/a0Huvz2G6xbB4ULW2P9\n773XerDLrl12R+ieqhWrxhetvqDjzI6cjz1vdzg3+OvAXyzbu4wBDwzI8LbP1XuOnH45GbturAsi\ncw+HLxym2cRmvHDPC/S7r1+Gtq1bqi5re6+lYO6C1P2mLqv2r3JRlNlLa/o+LjERli6F2bPhp5+g\nTBno0AGeesp63KO6rs+8PpyNPcv0p6a7RX0/SZJo+G1DXm7wMs/WeTZTbWw5voXmE5sT2TeSUvlL\nOTlCex29eJSQCSF0r9s9U51iSnO3z6X3vN70qd+H95q+5zZ3Nes4fZUliYnwxx9WBzB7NhQpYnUA\nHTpAjRrWox99WUxCDPePv59ewb144d4X7A6HiRsn8tXar/jr+b+y9JD4Ab8PYP+5/Ux9aqoTo7PX\n8UvHCZkQQudanXm3ybtOafPIhSN0+7kbF+MuMuXJKVQoXMEp7WaFXshVTpOYKLJqlcirr4oEBIhU\nrSryzjsiGzaIJCXZHZ19dp7aKcWGFZO1h9baGsf5mPNy1/C75K8Df2W5rUtxl6T8yPKy4N8FTojM\nficunZBaX9WS95e87/S2E5MSZfifw93mIi86ZFO5ggisXQuzZlkvuH4GcM89vncGMHPrTAYsHsC6\n3utsm8Dt7cVvc+D8AX544gentPfbzt/oF9GPzX03k8c/j1PatMPpK6dpMakFrSu35qPmH7msDLfx\n6EY6ze5EcOlgvmrzlW138mp5R7mciDXu/2oHcOWKVf/v0AEaNgQ/Hxka8PJvL3PowiFmPz072+v7\nu8/s5t5x9xL5n0jKFCjjtHY7zuxItaLVGNJ8iNPazE5nY87y0KSHaFa+GcMeHubyv5fL8Zd5fcHr\nzN81n8lDuh+/AAASNElEQVRPTKZxQGOX7i81mvRVthKBbduudwCnT1sdwFNPwQMPePf8/7EJsTzw\n/QOE1Qqjf8P+2brvp358inql6jmtVn3V4QuHqfO/OqzovoLqxas7tW1XOxdzjpaTW3J/2fv5/JHP\ns7UjDo8Op88vffjPPf/h3SbvZutFXk36ylbR0dcvAh8+bA0PfeopCAnxzimg95zZQ8PxDQkPDadh\n2YbZss+le5bSI7wHUS9GuaQMM+qfUcyOms3SbkvdYoSSIy7EXuCRyY8QXDqYUa1H2RL34QuH6fZz\nNy7HX2byE5Oz7SKvJn3lNnbtspL/rFnWbKBVq0LNmtarRg3rZ4UKnn82EB4dzivzX2Fd73UUzVvU\npftKTEokeGww7z74Lh1rdnTZPhqOb8iL975I97rdXbIPZ7oYd5HWU1pTs3hNvm77ta0dVZIk8flf\nnzN01VBGPjKSsNphLt+nJn3lli5csM4Ctm69/tq2DU6cuN4ZXO0IrnYGnnRt4I2FbxB9Mpq5neZm\naehker5Z+w1Tt0xlWbdlLk1uV+fk3/LCForlLeay/WTV5fjLtJ3alkqFKzG23ViX/tlnxIYjG+j8\nU2fql67PmDZjXHqRV5O+8igXLkBU1PVO4GqHcPLkjWcGV1/ly7tnZxCfGE/TCU15vNrjvNn4TZfs\n42zMWaqOrsr8sPnUK13PJftIqf/8/lyIvcD49uNdvq/MuBJ/hXbT2lGmQBm+b/+92yT8qy7HX+a1\nBa+xcNdCfun8CzWK13DJfjTpK6+QsjNIeWZw8qQ1S+jNZSJ36AwOnDvAvePuZdbTs3gg4AGnt//a\ngte4GHeRse2yZ8qE87HnqflVTaY+OZUHAx/Mln06KiYhhvbT21MsbzEmPT6JHH7uWyOcuHEib/7+\nJjM7zqRJYBOnt69JX3m18+dTPzM4fhzKlYPAQKsDKF/+xvd33ZU91w4idkbQ+5ferO+9nuL5ijut\n3eiT0Tzw3QNse3EbJfKVcFq76Zm9bTbvLX2Pjf/ZSK4cubJtv2mJTYjlyR+f5M5cdzL5ycluMx1C\nWhbtWkTYT2GMbjOap2s+7dS2Nekrn3TlivW84H37YO9e65Xy/cmT1pxCqXUIgYFQtqzzRhe9vfht\n1h1ZR0RYhNNKDm2ntqV5+ea83uh1p7TnKBGh3bR2NCrXiLcffDtb952auMQ4Os7siL+fP9OemuZR\nD7bfdHQTbae25dWGr/La/a857ZqMJn2lUhEbCwcOpN4h7NsHx45B6dI3dgYpO4eAAMc7hYSkBFpM\nasHDFR92yjj6iJ0RvDL/Fba8sMWWo+29Z/dyz9h7+KfnP1QqUinb939VfGI8obNDSUhKYGbHmW5z\n5pER+8/tp82UNrSo0IIRj4xwSllKk75SmRAXBwcPpt4h7NljnSnUrAn16l1/1aoFefOm3t7hC4e5\nZ+w9THlyCs0qNMt0XPGJ8dT6uhaftfyMR6s8mul2smrYqmEs2bOEiLAIW4ZEJiQlEPZTGBfjLvLT\n0z+RO2fubI/BWc7GnOWJGU9QJE8RJj8xOcv3WmjSV8oFLlyAyEjYsOH6KzraOgtI2RHUq2fNTArw\n++7f6fZzN9b1XpfpKYtH/j2SiH8jmB8239bx5/GJ8dQfW593HnyHZ4KeydZ9JyYl0vXnrpy6fIqf\nQ3/mjpx3ZOv+XSE2IZYe4T3Yd24f4aHhWRoWq0lfqWwSF2ddTE7ZEWzaZD2Y5moHEF1qEPtYwYqe\ni8iZwSvJJy6doMZXNVjefbnLhvtlxJ8H/qTjzI5sfWFrtkwyJyKsO7KOoX8M5VzsOeaGzvXoieBu\nliRJvL34beZEzyEiLCLTz+LVpK+UjZKSYPfu653A+g2JLC37CDkON6JR7Ic3nBFUqZL2iKK+v/TF\nP4c/X7b+Mvt+gXT0mdeHnH45GdN2jEvaFxE2H9/MjC0zmLF1BgCdgjox8MGB5PW/TS3Nw41ZPYaP\nVn5EeGg495a5N8Pba9JXys0cu3iMul8H82LZ7zG7W17rEI4ds64L1KsHlSpBqVJQsqT181TOSDrO\ne4jol6IpkqeI3b/CNWeunKHGVzUIDw2nQZkGTmt3+8ntTN8ynRlbZ3Ap/hJP13ia0KBQgksHe8z8\nP1kRHh1Oz3k9mdB+Am2rtM3Qtpr0lXJDy/cuJ3R2KGt7rb02FfK5c7Bxo/Xatw+OHk1+HRN23t+C\npK1PUWr/izd0Binfp1xWsGD2PdNgSuQUPvvrM9b0WpOlMfJ7zuxhxtYZTN8yneOXjtOxRkdCg0K5\nr+x9bnd3bXb4++DfPDHjCQaHDKZ3/d4Ob6dJXyk39fHKj4n4N4Kl3ZammSznRM3hvaXvsfq5jZw+\nmfNaZ3DsGLd9Hxubemdw9X1AgHU2Ubhw1jsHEeHhHx6m7d1tefX+VzO07cHzB/lx64/M2DqD3Wd2\n06F6B54JeoYHAx5067tqs8vOUztpPaU1oUGhDGk2xKGzHE36SrmpJEmi7dS21ClZh6EPDU11nZiE\nGGqMqcHYdmN5qOJDDrd9+bKV/FPrGI4csc4kdu2ypqqoVAkqVrReV99XqmTd0ezovQg7Tu2g0fhG\nbOizgXIFy6W57rGLx5i1bRbTt05n24lttK/antCgUJpXaO4Rd9Nmt+OXjtNuWjuqFq3Kt499m+79\nCJr0lXJjJy+fJPibYL5q+1Wq4+6H/jGUvw7+RXhouNP3LWI95GbXLuti89WfV98fO2bdmZyyI0jZ\nORQocGN7g5cNZuOxjcx5Zs4t+zp1+RQ/Rf3EjK0zWHt4LY9WeZTQoFBaVmrpkTdVZbfL8ZcJnRXK\n5fjLzH56dpqzdGrSV8rNrdq/iid/fJLVPVcTWCjw2vIjF44Q9HUQ//T8h8pFKmd7XLGx188Ibu4U\ndu+GO+64sSMIqBjLR6dq80HjT+ne8DEuxp8jfHs407dMZ9WBVTxS6RGeqfkMbe5u41VDLbNLQlIC\n/SL6serAKn7r/NttH4vpsqRvjGkFjAT8gPEi8kkq64QAnwP+wAkRaZa8fC9wDkgC4kUk1cv+mvSV\nr/jsz8+YtW0WK3qsuHbk2yO8B8XzFmfYw8Nsju5WItakdik7g127YP2ZpWyr0g05XB8qLKHw2WZU\nTXiG+wq1o1K5/AQEWGWjgADrpjUfGIjjVCLCJ6s+4as1X/Fb2G8ElQi6ZR2XJH1jjB+wA2gBHAbW\nAKEiEp1inYLAn0BLETlkjCkmIieTv9sN1BeRM+nsR5O+8gkiQvvp7alcpDIjHhnBmkNreGz6Y2x/\naTsFchdIvwE38uU/X5IvR0HuubM9544VYv9+a56jqz+vvo+Nvd4BpPyZ8n2+fHb/Nu5pSuQUXl3w\nKjM6zLhlWg9XJf2GwAci0jr58wBAUh7tG2P6AqVF5P1Utt8D3CMip9LZjyZ95TNOXzlN/bH1Gd5y\nOJ/9+RnP13ue54Oftzssl7lw4XonkLJTSNk55Mlza6cQEGA9Sa1iRShRwnfPFpbsWULorFBGthpJ\n51qdry13VdJ/CnhERHonf+4CNBCRfinWuVrWqQnkB74UkR+Sv9sNnAUSgbEiMu42+9Gkr3zK6kOr\naT6xOVWLVWV1z9U+PWxRxJrY7uaO4Oqkd7t3Q0zMrSOPrr4PDITcnjsPm0M2H9tM26lteeHeF3ir\n8VsYYzKV9J01ZionEAw0B/IBfxlj/hKRf4HGInLEGFMcWGSMiRKRP1JrZNCgQdfeh4SEEBIS4qTw\nlHI/Dco04IcnfqBSkUo+nfDBOoIvXtx6BQenvs65czdeXN68GcLDrfcHDlj3JaTWIVSsCEWLev5Z\nQq2StRheZTgvDX2J2QVm0/ru1plqx9HyziARaZX8ObXyzlvAHSIyOPnzt0CEiMy+qa0PgAsiMiKV\n/eiRvlIqUxISrMSfchhqyvdJSdc7gpSdQfnyUKyYNSTVWQ/ScbVzMed46senyJcrH3M7zXVJeScH\nsB3rQu4RYDXQSUSiUqxTDRgFtAJyA/8AzwB7AT8RuWiMyQcsBAaLyMJU9qNJXynlEmfOpD4Udc8e\n67vz561rCoUK3foqWDD9ZQULQq5svAUhLjGOnnN78sOTP7h0yOYXXB+yOdQY0wfriH9s8jpvAD2w\navfjRGSUMaYCMAcQrBLQFBFJ9XZETfpKKbskJcHFi3D27I2vc+duXXa75blypd5ppHwVLpz6+4IF\nwT+DT38UEfz8/PTmLKWUym4icOnSrZ3BmTPXl505c+PymzuRlGcaKTuEtDqLevX0jlyllPI4Kc80\nbu4QbtdZnDkDmzdr0ldKKZ+RmSGbvjdxtVJK+TBN+kop5UM06SullA/RpK+UUj5Ek75SSvkQTfpK\nKeVDNOkrpZQP0aSvlFI+RJO+Ukr5EE36SinlQzTpK6WUD9Gkr5RSPkSTvlJK+RBN+kop5UM06Sul\nlA/RpK+UUj5Ek75SSvkQTfpKKeVDNOkrpZQP0aSvlFI+RJO+Ukr5EE36SinlQzTpK6WUD9Gkr5RS\nPkSTvlJK+RBN+kop5UM06SullA/RpK+UUj5Ek75SSvkQTfpKKeVDNOkrpZQP0aSvlFI+RJO+Ukr5\nEIeSvjGmlTEm2hizwxjz1m3WCTHGbDDGbDHGLM3ItkoppbJHuknfGOMHjAYeAWoCnYwx1W5apyAw\nBnhURIKAjo5u6w2WLVtmdwhZovHbS+O3l6fHn1GOHOk3AHaKyD4RiQemA+1vWqczMFtEDgGIyMkM\nbOvxPP0fjcZvL43fXp4ef0Y5kvTLAAdSfD6YvCylKkARY8xSY8waY8yzGdhWKaVUNsnpxHaCgeZA\nPuAvY8xfTmpbKaWUkxgRSXsFYxoCg0SkVfLnAYCIyCcp1nkLuENEBid//haIAA6lt22KNtIORCml\n1C1ExGRkfUeO9NcAlY0xgcARIBTodNM64cAoY0wOIDdwHzAC2O7AtpkKXCmlVMalm/RFJNEY8xKw\nEOsawHgRiTLG9LG+lrEiEm2MWQBEAonAWBHZBpDatq76ZZRSSqUt3fKOUkop72H7HbmefPOWMaas\nMWaJMWarMWazMaaf3TFllDHGzxiz3hgz1+5YMsMYU9AYM9MYE5X893Cf3TE5yhgzMDnmSGPMFGNM\nLrtjSosxZrwx5pgxJjLFssLGmIXGmO3GmAXJ9+y4pdvEPyz5385GY8xsY0wBO2NMS2rxp/judWNM\nkjGmSHrt2Jr0veDmrQTgNRGpCdwPvOhh8QO8AmyzO4gs+AL4TUSqA3UAjygfJl/n6gXUE5HaWKXW\nUHujStf3WP9XUxoA/C4iVYElwMBsj8pxqcW/EKgpInWBnXhe/BhjygIPA/scacTuI32PvnlLRI6K\nyMbk9xexEo7H3IeQ/I+lDfCt3bFkRvJR2YMi8j2AiCSIyHmbw3LUeSAOyGeMyQnkBQ7bG1LaROQP\n4MxNi9sDE5PfTwQez9agMiC1+EXkdxFJSv74N1A22wNz0G3+/AE+B/7P0XbsTvpec/OWMaY8UBf4\nx95IMuTqPxZPvbBTAThpjPk+uUQ11hiTx+6gHCEiZ4DhwH6soc1nReR3e6PKlBIicgysgyCghM3x\nZMVzWEPNPYYx5jHggIhsdnQbu5O+VzDG5AdmAa8kH/G7PWNMW+BY8pmKSX55mqs3BY4RkWDgMla5\nwe0ZYyoCrwKBwF1AfmNMZ3ujcgqPPIAwxrwDxIvIVLtjcVTyAc7bwAcpF6e3nd1J/xAQkOJz2eRl\nHiP51HwW8IOIhNsdTwY0Bh4zxuwGpgHNjDGTbI4pow5iHeWsTf48C6sT8AT3AKtE5LSIJAI/AY1s\njikzjhljSgIYY0oBx22OJ8OMMd2xypye1ulWAsoDm4wxe7Dy5zpjTJpnW3Yn/Ws3fiWPXAgFPG0U\nyXfANhH5wu5AMkJE3haRABGpiPXnvkREutodV0YklxUOGGOqJC9qgedclN4ONDTG3GGMMVixe8JF\n6JvPCucC3ZPfd8O6UdOd3RC/MaYVVonzMRGJtS0qx12LX0S2iEgpEakoIhWwDoLqiUiaHa+tST/5\nCOfqzVtbgemedPOWMaYxEAY0T36WwPrkf0Qq+/QDphhjNmKN3vnY5ngcIiKbgEnAOmAT1n/ksbYG\nlQ5jzFTgT6CKMWa/MaYHMBR42BizHavjGmpnjGm5TfyjgPzAouT/v1/ZGmQabhN/SoID5R29OUsp\npXyI3eUdpZRS2UiTvlJK+RBN+kop5UM06SullA/RpK+UUj5Ek75SSvkQTfpKKeVDNOkrpZQP+f+Q\nm5ZOw+MtFwAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEACAYAAABI5zaHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd8VFX6+PHPEzqE3iItNEGqAQEhgAzwZQWVIk0pi7Ai\niij63aKIq7CyfnX1p7sqCIIUcQEXpChgAAUCIoIUA6F3aUsPJRAISc7vjzsphJSZZCZ3ZvK8X6/7\nyp2ZM+c+E8Jzz5x77jlijEEppVRgCbI7AKWUUp6nyV0ppQKQJnellApAmtyVUioAaXJXSqkApMld\nKaUCULbJXUSqicgaEdktItEiMjqDMh1E5LKIbHduf/VOuEoppVxR0IUyCcAfjTFRIhIMbBORVcaY\nfenKrTfG9PB8iEoppdyVbcvdGHPGGBPl3I8F9gJVMygqHo5NKaVUDrnV5y4iNYEwYHMGL7cRkSgR\nWS4iDT0Qm1JKqRxypVsGAGeXzNfAS84WfFrbgBrGmBsi0g1YAtTzXJhKKaXcIa7MLSMiBYFlQIQx\n5iMXyh8FHjDGXEr3vE5ko5RSOWCMcavr29VumRnAnswSu4hUTrPfCuukcSmjssYYv93GjRtnewwa\nv/1x5Mf4/Tn2QIg/J7LtlhGRtsAgIFpEfgUMMBYItXK1mQr0FZGRwG0gDngiR9EopZTyiGyTuzHm\nJ6BANmUmAZM8FZRSSqnc0TtU3eBwOOwOIVc0fnv5c/z+HDv4f/w54dIFVY8dTMTk5fGUUioQiAjG\nzQuqLg+FVEqp3KpZsya//fab3WH4rNDQUI4dO+aRurTlrpTKM84WqN1h+KzMfj85ablrn7tSSgUg\nTe5KKRWANLkrpVQA0uSulFIeMnLkSN5++227wwD0gqpSKg/5+gXVWrVqMX36dDp16mTL8fWCqlJK\n5bHExES7Q3CLJnellAKGDBnC8ePHeeyxxyhVqhTvv/8+QUFBzJgxg9DQUDp37gxA//79ueeeeyhb\ntiwOh4M9e/ak1DFs2DDefPNNANatW0f16tX58MMPqVy5MlWrVmXWrFl59nk0uSulFDB79mxq1KjB\n8uXLuXr1Kv379wdg/fr17Nu3j5UrVwLwyCOPcPjwYc6dO0fz5s0ZNGhQpnWeOXOGa9eucfr0aT7/\n/HNGjRrFlStX8uTzaHJXSvkMEc9suZG2z1tE+Nvf/kaxYsUoUqQIAEOHDqV48eIUKlSIN998kx07\ndnDt2rUM6ypcuDBvvPEGBQoUoFu3bgQHB7N///7cBegiTe5KKZ9hjGc2T6pWrVrKflJSEmPGjKFu\n3bqUKVOGWrVqISJcuHAhw/eWL1+eoKDUNFu8eHFiY9MvZOcdmtyVUspJMmj2p31u7ty5LF26lDVr\n1nD58mWOHTuWqwU1vEmTu1JKOYWEhHDkyBGADJP2tWvXKFKkCGXLluX69eu89tprGZ4QfIEmd6WU\nchozZgwTJkygXLlyLFy48K7EPWTIEGrUqEHVqlVp3Lgx4eHhbtWflyeCPL+J6dAhQ506eXZIpZQP\n8fWbmOzm1zcxzZuX10dUSqn8J9vkLiLVRGSNiOwWkWgRGZ1F2ZYicltEemdWZs4cz1/NVkopdSdX\nWu4JwB+NMY2ANsAoEbkvfSERCQLeBVZmVdnNm7BjR05CVUop5apsk7sx5owxJsq5HwvsBapmUPRF\n4GvgXFb1DRxotd6VUkp5j1t97iJSEwgDNqd7vgrQyxgzGciy03/QIKvfPSnJvUCVUkq5zuUFskUk\nGKtl/pKzBZ/Wv4BX0xbPrJ7588eTkAB/+AMMHerA4XC4E69SSgW8yMhIIiMjc1WHS0MhRaQgsAyI\nMMZ8lMHrR5J3gQrAdWCEMebbdOWMMYb33oPDh+Gzz3IVu1LKz+hQyKx5ciikq8l9NnDBGPNHF8rO\nBJYaYxZl8JoxxnD8ODRvDqdPQ+HC7oSrlPJnmtyzlqfj3EWkLTAI6CQiv4rIdhHpKiLPisiIDN6S\n7b9cjRrQqBGsWOFOqEop5XuS521P1rhxY9avX+9SWW/Kts/dGPMTUMDVCo0xf3Cl3MCBMHcu9Ojh\nas1KKeWb0k4rsGvXLpfLepNtc8v07QsREZDJNMhKKaVywbbkXr48PPQQLFliVwRKKZXqvffeo1+/\nfnc89/LLL/Pyyy8za9YsGjZsSKlSpahbty5Tp07NtJ5atWqxZs0aAG7evMnQoUMpV64cjRs3ZsuW\nLV79DGnZOivkoEFW14xSStntySefJCIiguvXrwPWwhzz589n4MCBVK5cOWX5vZkzZ/K///u/REVF\nZVvn+PHjOXr0KEePHmXlypV88cUX3v4YKVwe5+4N3bvDc8/BuXNQqZKdkSilfIH8zTP90Wac+yNy\natSoQfPmzVm8eDGDBw9m9erVlChRglatWt1Rrn379vzud7/jxx9/JCwsLMs6FyxYwJQpUyhdujSl\nS5dm9OjRTJgwwe3YcsLW5F6iBDz2GCxYAKNG2RmJUsoX5CQpe9KAAQOYN28egwcPZt68eQwcOBCA\niIgI3nrrLQ4cOEBSUhJxcXE0bdo02/pOnz59xzJ9oaGhXos9PdsX60geNaOUUnbr168fkZGRnDp1\nisWLFzNo0CDi4+Pp27cvr7zyCufPnycmJoZu3bq5NF7/nnvu4cSJEymPf/vtN2+Gfwfbk3uXLnDw\nIBw9anckSqn8rkKFCnTo0IFhw4ZRu3Zt6tWrR3x8PPHx8VSoUIGgoCAiIiJYtWqVS/X179+fd955\nh8uXL3Py5EkmTpzo5U+QyvbkXqgQ9Ouni3gopXzDwIEDWb16NYMGDQIgODiYjz/+mH79+lGuXDm+\n+uorevbsmen7045jHzduHDVq1KBWrVp07dqVIUOGeD3+lDjyepm9jI73008wYgTs2gU+utasUsoD\ndPqBrPn1MnsZadMGrl+H6Gi7I1FKqcDgE8k9KAgGDNBFPJRSylN8olsGrC6ZRx6BY8esZK+UCjza\nLZO1gOuWAWjcGMqUsfrflVJK5Y7PJHfQMe9KKeUpPtMtA/Dbb9CiBZw6pYt4KBWItFsma57slrF1\n+oH0QkPhvvtg1SprWgKlVGAJDQ3Ns/nM/ZEnpyfwqZY7wOTJ8OOP2j2jlFLJvLaGqqe4ktwvXIC6\ndeHkSQgOzqPAlFLKh/n1aJlkFSpA27bwzTd2R6KUUv7LlQWyq4nIGhHZLSLRIjI6gzI9RGSHcwHt\nrSLSKTdB6SIeSimVO9l2y4hICBBijIkSkWBgG9DTGLMvTZnixpgbzv0mwGJjTN0M6sq2WwYgNhaq\nVbNmi6xY0b0PpJRSgcYr3TLGmDPGmCjnfiywF6iarsyNNA+DgQvuBJFecLB1t+rXX+emFqWUyr/c\n6nMXkZpAGLA5g9d6iche4Dvgrq4bdw0cqHPNKKVUTrk8zt3ZJfM18JKzBX8HY8wSYImItAO+BOpn\nVM/48eNT9h0OBw6HI8PjPfwwDBtmzTVTs6arUSqllP+LjIwkMjIyV3W4NBRSRAoCy4AIY8xHLpQ/\nDLQyxlxM97xLfe7JRo60bmwaM8bltyilVMDx5lDIGcCezBK7iNRJs98cIH1izwmda0YppXIm224Z\nEWkLDAKiReRXwABjgVDAGGOmAn1EZAgQD1wHnvBEcG3bwuXL1iIeTZp4okallMoffO4O1fRefdWa\n3/2dd7wUlFJK+biAmH4gvZ07oUcPOHJEF/FQSuVPATH9QHpNmljj3n/+2e5IlFLKf/h8chfRC6tK\nKeUun++WATh6FB580FrEo1AhLwSmlFI+LCC7ZQBq1YJ774Xvv7c7EqWU8g9+kdxBu2aUUsodftEt\nA3DuHNSrZ3XNlCjh4cCUUsqHBWy3DEClStCmDXz7rd2RKKWU7/Ob5A66iIdSSrnKb7plAK5dg+rV\n4fBhKF/eg4EppZQPC+huGYCSJaFrV13EQymlsuNXyR10EQ+llHKFX3XLAMTHQ5UqsH071KjhocCU\nUsqHBXy3DEDhwtCnD3z1ld2RKKWU7/K75A56Q5NSSmXHL5N7+/Zw8SLs3m13JEop5Zv8MrkHBcGA\nAdp6V0qpzPjdBdVkUVHw+OPWIh7i1mUGpZTyL/nigmqy+++HYsVg0ya7I1FKKd+TbXIXkWoiskZE\ndotItIiMzqDMQBHZ4dw2iIjXl7PWRTyUUipz2XbLiEgIEGKMiRKRYGAb0NMYsy9NmdbAXmPMFRHp\nCow3xrTOoC6PdcuANQ1BeDicPKmLeCilApdXumWMMWeMMVHO/VhgL1A1XZlNxpgrzoeb0r/uLXXq\nQO3asHp1XhxNKaX8h1t97iJSEwgDNmdRbDgQkfOQ3KNdM0opdbeCrhZ0dsl8DbzkbMFnVKYjMAxo\nl1k948ePT9l3OBw4HA5XQ8hQ//7w5ptw4wYUL56rqpRSyidERkYSGRmZqzpcGgopIgWBZUCEMeaj\nTMo0BRYCXY0xhzMp49E+92QPPwx/+AM88YTHq1ZKKdt5cyjkDGBPFom9BlZi/31mid2bdBEPpZS6\nkyujZdoC64FowDi3sUAoYIwxU0VkGtAb+A0Q4LYxplUGdXml5X71qrWIx9GjUK6cx6tXSilb5aTl\n7rd3qKbXvz907gzPPuuV6pVSyjY5Se4uX1D1daNHW9MRVKhgTQmslFL5WcC03AG2bbMS/LBhMG6c\nNcGYUkr5u3zdLZPszBmr5V6pEsyeba27qpRS/ixfTRyWmZAQWLPG6p5p08aaokAppfKbgEvuAEWK\nwNSpMHKkNffMDz/YHZFSSuWtgOuWSS8yEp58EsaMgZde0rnflVL+R/vcM3HsGPTsCc2bw+TJULRo\nnoeglFI5pn3umahZEzZuhNhYcDjg9Gm7I1JKKe/KF8kdoEQJmD8fHnsMWrWCzVnNa6mUUn4uX3TL\npPfNNzB8OHzwAQwZYnc0SimVNe1zd8Pu3dCrF3TvDu+9BwUD5l5dpVSg0eTupkuXrJE0AF99pZOO\nKaV8k15QdVO5cvDdd9CkidUPv3u33REppZRn5OvkDlZ3zAcfWKs5ORxWf7xSSvm7fN0tk94vv0Dv\n3ta0wX/9q97wpJTyDdrn7gGnT1sJvnp1mDkTgoPtjkgpld9pn7sHVKliTVkQHAxt21qrOymllL/R\n5J6BokVhxgxr0e02bWDtWrsjUkop92hyz4SINdHYv/9tDZecNAl8vEdJKaVSuLJAdjVgNlAZSAKm\nGWM+TlemPjATaA6MNcZ8mEldPt/nnpHDh62Jx0qXhvbtrdZ8mzbWgiBKKeVtXrmgKiIhQIgxJkpE\ngoFtQE9jzL40ZSoAoUAvICbQkjtAXBysXw8//2xtmzdD+fKpib5NG2jaVO90VUp5Xp6MlhGRJcAn\nxpjVGbw2DrgWiMk9vaQk2LcvNdn//DMcPw4PPACtW2vrXinlOV5P7iJSE4gEGhtjYjN4Pd8k94xc\nvmy16LV1r5TypJwkd5fTjLNL5mvgpYwSu6vGjx+fsu9wOHA4HDmtyueUKQMPP2xtcHfr/tNPU1v3\naRN+xYr2xq2U8i2RkZFERkbmqg6XWu4iUhBYBkQYYz7Koly+brm7IqPWffJi3t27w+OPQ6FCdkep\nlPIlXuuWEZHZwAVjzB+zKTcOiDXGfJDJ6/k+uaeX3Lr/6SeYMwcOHIARI+CZZ6BqVbujU0r5Am+N\nlmkLrAeiAePcxmKNjjHGmKkiUhnYCpTEGi4ZCzRM332jyT17u3db3Tfz5kHnzjBqFHTooPPcKJWf\n6dwyAeTqVfjySyvRAzz/PPz+91CqlL1xKaXynib3AGQMrFtn3SG7ejUMGGAl+kaN7I5MKZVXdOKw\nACRizTO/YAFER1sja7p0SX3u9m27I1RK+SJtufuh27dh8WKry+bgQevi64gR1oyWSqnAoy33fKJQ\nIejf35qaeOVKOHcOGje2nlu3Tic4U0ppyz1gJF+AnTQJgoJSL8CWLGl3ZEqp3NILqgpjrBb9pEmw\nZo1egFU5Z4xhx9kdrDi0gtj4WCZ0nIDomFxbeHX6AeUfRKBjR2s7dQqmTrUuwNavD61aQZEi1mIk\nyVtOHhcqpOPuA9Xlm5f5/vD3RByKYMWhFZQoXIJudbvx4/EfqVmmJsObD7c7ROUibbnnA/HxsHQp\nHDoEt27BzZupW04eJyRkfAIoVw4qV7ZmwqxcOXVL+7hUKT0x+BJjDFFnoog4FEHEoQh2nNlBuxrt\n6Fa3G93u7UbdcnUBiD4bTafZnfj12V+pVqqazVHnP9oto/JEYmJq0k/+GRcHly7B2bPWdu5c6n7a\nx/Hxdyb7zPYrV7ZOFgUK2P1pA09MXAzfH0ltnZcsXDIlmXcI7UCxQsUyfN/fIv/GL6d/YdmAZdo9\nk8c0uSufFxeXdfJPu3/lijVlcnKyr1/fGhXUpIn1U+/WdU2SSeLX//6a0jqPPhtN+9D2VkKv2406\n5eq4VE98YjwtprbglbavMLjpYC9HrdLS5K4Cyu3bcP68lez/+19rgrXoaGvbs8e6oSs50TdpYm31\n60PhwnZHbr9LcZdYdXgVEYciWHloJWWKlqFb3W50rduVDjU7ULRg0RzVu/X0Vh6d+yg7n9tJ5eDK\nHo5aZUaTu8o3EhPh6NHUZJ+8/fYb1KmTmuyTk39oqDVENFAlmSS2/3c7EQet1vmuc7voULNDSkKv\nXba2x4415ocxHI45zIJ+CzxWp8qaJneV7928CXv3pib7Xbusn1evWsNB07f0K1SwO+Lc23N+D12+\n7EKpIqVSulrah7bPces8O3G342j2WTP+3unv9G3Y1yvHUHfS5K5UJi5dSk30yT+jo6FYsdSEHxJi\nXcTNaCte3DdH+Vy+eZlW01oxtv1YhoYNzbPj/nT8J/ou6MuukbsoX7x8nh03v9LkrpQbjIETJ6xk\nv2eP1b9/8aJ1Iki/JSbemezLl8/8RJB2K1nSeyeFJJNEj3k9qF22Nh93+9g7B8nCSxEvEXMzhtmP\nz87zY+c3mtyV8pK4OIiJSU32mZ0E0m83b0LZstZonzZtUm8wCwnJfUzj1o5j7bG1rB6ymkIF8n5t\nxuvx12kyuQmfdPuER+s9mufHz080uSvlY27dsk4Kp0/Djz/C2rWwfr2V3Dt1shK9w2F9E3DHkn1L\nGB0xmi3PbLF11MrqI6sZ+s1Qdo3cRemipW2LI9BpclfKDyQmQlSUlejXrLHWz61Vy0r0nTrBQw9B\n6Szy5N7ze+kwqwPLBy6nZdWWeRd4Jp5d+iwGw9TuU+0OJWBpclfKD92+Ddu2WYl+7VrYtAnuuy+1\nZd+uHQQHW2Wv3LzCg58/yKttX2VYs2H2Bu505eYVGk9uzKyes+hcu7Pd4QQkby2QXQ2YDVTGWvx6\nmjHmrqs3IvIx0A24Dgw1xkRlUEaTu1LZuHULNm9Obdlv2wb33w+OjklEVn6cxqHV+KzHJI8d78YN\n60ax5DuDk/fTPr550xpKGhZmbU2apJ5wAJYfWM6LES+yc+ROggsHZ34wlSPeSu4hQIgxJkpEgoFt\nQE9jzL40ZboBLxhjHhWRB4GPjDGtM6hLk7tSbrpxAzZuhLfWvUXUtVUkzlhDqwcKp3TjtGp15125\niYmp8/ykTdSZJe/ExNS5fZK39I8LFYLdu63upKgoa3RR1aqpyT4sDGbEDKFKubJ83O0j+35ZASpP\numVEZAnwiTFmdZrnpgBrjTH/cT7eCziMMWfTvVeTu1I5sHT/Up7/7nm2PLOFEiaEDRtSu3EOHLDG\n6V+7ZiXsS5egTJnME3X6x8HB7g/XTEiA/fthx47UhL997yViBjSm+aEFOOq0JSzM+sZRv751clA5\n5/XkLiI1gUigsTEmNs3zS4F3jDEbnY9/AF4xxmxP935N7kq5af+F/bSf2Z5vB3xL62p3fSEmJsa6\nISs5oVeoAAVtWKnBGJi+cRHjN7zGiMQodu8oRlSUdS9Bw4akJPuwMGjaNOuLxupOXl2sw9kl8zXw\nUtrE7q7x48en7DscDhwOR06rUirgXb11lV7/6cX/df6/DBM7WOPoH3oojwPLgAgMb9ublafncaPs\n3/jP2HcBuH7dOvkkt/DnzLEeh4SkJvuGDa3HISHWN4ucfJsIJJGRkURGRuaqDpda7iJSEFgGRBhj\n7upQy6BbZh/QQbtllMq5JJNEn/l9qFyiMlMem2J3OC47G3uWplOasmzAskyHaiYmWovHJCf8fftS\np3w+c8b6FlC5cmqyT94yepwfTgRe65YRkdnABWPMHzN5/RFglPOCamvgX3pBVanceXv92yw/uJy1\nT62lSMEidofjljk75/CPn/7B1hFbKVzA/TmYY2PvTPZp5/5P+zjtiSCrk0HZsla/f6FCVpdV2p9p\n9331JOGt0TJtgfVANGCc21ggFDDGmKnOchOBrlhDIYel7293ltHkbpMzsWcoIAWoWKKi3aEoFyw/\nsJxnlz3Llme2cE/Je+wOx23GGHp81YMW97RgnGOcV4/lyong8mXrfoLbt62LwZn9DArK/gSQ0c8S\nJe6cc6h8+bv3y5fP+TKTehOTytCRmCO0ndGWmwk36V6vO6NajqJV1Va6VJqPOnjxIG1ntGXJk0sI\nrx5udzg5durqKcI+C2P1kNU0rdzU7nCyZYzVXZTdCSCjk8T166nzDV28mPl+XJz1LSKrk0BG+yVK\naHJX6cTExRA+I5xRLUcxoPEAZkbNZPLWyZQtWpYXWr3AE42eyHTNTJX3rt26RuvprRndajTPtnjW\n7nBy7fPtnzNl6xQ2Dd9EwSAbhvD4mPh4a3RTVieAjB7fuqXJXaURnxhPtzndaFKpCf/q+q+U55NM\nEisOrWDiLxPZcnoLw8KGMbLFSGqVrWVjtHeLux3HqsOrWLh3IasOr2LpgKU+MZeKtxhj6LegH2WL\nlmVq96kB8c3KGEOXL7vQpXYXXm33qt3h+CVjIChIk7tyMsbw9LdPc+HGBRY/sZgCQQUyLHf40mEm\nb53MrKhZtK7WmlEtR/Fw3YcJEnvWpLt26xrfHfyOhXsXsvLwSprf05w+Dfpw6uopTsee5oteX9gS\nV154d8O7LNm3hHVD1/ndBdSsHI05SstpLfnpDz9Rv0J9u8PxS9rnrlK88+M7LNizgPXD1rs018eN\n2zeYFz2PSVsmcS3+GiNbjGRY2DDKFivr9Vhj4mL4dv+3LNy7kMhjkbSt0ZY+DfrQo34PKpWoBMDF\nGxep83EdDo8+HJAr/6w4tIKnv32aX4b/QtVSVe0Ox+Mm/jKRebvmsX7o+kwbGipzmtwVAPN3z+fP\nq/7Mz0//7HaiMMaw6eQmJm6ZyHcHv6Nvg76MajWKsJAwj8Z4NvYsS/YtYdG+Rfx84mc61+5MnwZ9\neKzeY5QpWibD9zy15CmaVGrCn8P/7NFY7Hb40mHCZ4SzsP9C2tVoZ3c4XpFkkugwqwP9GvZj9IOj\n7Q7H72hyV/x84md6fNWD73//fa4T8tnYs9YFsW1TqFG6BqNajqJvw745GrcMcOLKCRbtXcTCvQvZ\neXYn3e7tRu/7etPt3m4ufbvYdHITgxcN5sCLB2zrNvK02PhY2kxvw8gWI3m+5fN2h+NVBy4eIHx6\nOL888wu1y9a2OxyPmfjLRObvnk/raq0Jrx5Om2ptPL6Aiib3fC55yOPn3T/36LJnCUkJfLv/WyZt\nmcSe83sY3mw4z7Z4lmqlqmX73kOXDrFwz0IW7VvEoUuH6F6vO30a9KFLnS4ULVjUrTiMMTSf2px3\nO7/Lw3UfzunH8RnGGJ5c+CQlCpVgeo/pAXEBNTvv//Q+Kw6v4Iff/xAQn3fGrzN4a91bfNT1I3ae\n3cnGkxvZdHIT5YuVJ7x6eMrWqGKjXHVHaXLPx9IOeXyh1QteO87e83v5dMunzImeQ8daHXmh5Qs4\najpS/qMaY9h9fndKQj8be5Ze9/WiT4M+OGo6cr3W57Rt01h+cDlLnlziiY9jq/d/ej/luoi7Jzp/\nlZCUQPj0cJ5p/gzPPPCM3eHkyqK9i3jhuxeIHBpJvfL1Up5PMknsPb+Xn0/+zMYTG9l4YiP/jf0v\nD1Z9MCXZP1j1QbeWJdTknk8lD3lsXLExH+XRXNrXbl3jy51fMmmLtWjEiOYjOHv9LAv3LiTudhy9\nG/SmT4M+hFcP9+gFtOvx16n+z+pEPRdFjdI1PFZvXvv+8Pc8teQpNg/fTPXS1e0OJ0/tOreLjl90\nZPuI7X772dccXcOTXz/JisEraH5P82zLX7hxgU0nN6Uk+62nt1K7bO07Wvd1ytbJ9NuMJvd8yBjD\n8G+Hc/7G+SyHPHrz+Ot+W8f0X6dTtWRVejfoTcsqLb36lXt0xGhKFynNhE4TvHYMbzoSc4Q209uw\noN8CHgr1gekcbfDWurfYfGozywYs87vumS2ntvDo3EdZ0G8BHWp2yFEdtxNvs+PsjpRkv/HERm4m\n3Lwj2T9wzwMpNxhqcs+H3B3yGAj2nN9D59md+e3l33J8cdcu1+OvEz4jnOHNhvPigy/aHY5t4hPj\naTmtJX8J/wuDmw62OxyX7T2/l45fdGRq96n0qN/Do3WfuHLijq6c3ed306RSE8Krh/PPrv/U5J6f\n5GbIo79zzHLwfMvn6d+ov92huMwYw6BFgyhUoBCzes7yuxarp207vY1H5j7Cjud2EBIcYnc42Tp+\n5TjtZrTj753+zpD7h3j9eDdu32Dr6a1sPLGR19q/psk9v/DkkEd/NH/3fCZvnczap9baHYrLPvz5\nQ+ZGz+XHYT/qfD5OY1eP5cDFA3zd/2u7Q8nS+evnaT+zPc+1eI6XW7+c58fPSbdMYAwWzmeOxByh\n9/zezOo5K18mdoBe9/Vi34V97Dm/x+5QXLL6yGre3/g+i55YpIk9jTc7vMnu87tZsHuB3aFk6uqt\nq3Sd05W+DfvakthzSpO7n7l88zKPzn2U19u/7tGx7P6mcIHCDG82nClbfX+Fopi4GAYvHszc3nP9\neoSPNxQtWJSZPWfy/HfPM+mXSfjaN/ubCTfp+VVPWlVpxYSO/nUBX7tl/IgdQx592YkrJwj7LIzj\nLx+nROESdoeTqTE/jOFS3CWmdp9qdyg+68DFAwxcOJAqJaswo+cMKhSvYHdIJCQl0Hd+X4oWLMqc\n3nNsnROuV7LrAAAQQ0lEQVRHu2UCmDGGkctGUrxQcT58+EO7w/EJ1UtXp12NdsyNnmt3KJk6dfUU\n07ZP480Ob9odik+rV74eG5/eyH0V7iNsShg/HPnB1niSTBLPLH2Gmwk3mf34bL+c7EyTu5/4x0//\n4NczvzKvzzy//EPzludbPM+nWz/1ua/zySasn8DTzZ52aaqG/K5wgcK81+U9ZvWaxVNLnuKV718h\nPjE+z+MwxvCXVX9h/4X9LOy/0O+G2ybT5O4H5u+ez6Qtk1g6YGm+Gcvuqi51unDt1jU2n9psdyh3\nOXjxIF/v+Zox7cbYHYpf+Z/a/0PUs1Hsu7CP8OnhHLh4IE+P/+6Gd1l1ZBXLBi7z6e6+7GSb3EVk\nuoicFZGdmbxeRkQWicgOEdkkIg09H2b+tenkJkZ9N4qlA5bmu7HsrgiSIJ5r8RyTt062O5S7vLH2\nDf7Y5o+UK1bO7lD8TsUSFfnmyW8YFjaMtjPaMvPXmXny7eyzrZ8xbfs0Vg5e6f//bsaYLDegHRAG\n7Mzk9feAN5z79YEfsqjLKNcduXTEhPy/ELN0/1K7Q/FpF65fMGXeLWMuXL9gdygptp3eZu75f/eY\n2Fuxdofi96LPRpvGnzY2Tyx4wsTExXjtOPN3zTdVPqhiDl486LVj5JQzd2abr9Nu2bbcjTEbgJgs\nijQE1jjL7gdqikjFHJ9tFGANeXxk7iOMbTeWx+o9Znc4Pq188fJ0r9edmVEz7Q4lxdjVY/nrQ3/1\n66/1vqJxpcb8MvwXKhSvQNiUMDYc3+DxY6w6vIoXIl7gu4HfUbdcXY/XbwdP9LnvAHoDiEgroAag\nV49y4XbibfrO78vvav8uX88/4o7nWz7PlK1TSDJJdofC2qNrOXjpIMObD7c7lIBRrFAxJj4ykU+6\nfULf+X0ZHzmehKQEj9S96eQmBi0axML+C7k/5H6P1OkLCnqgjneBj0RkOxAN/AokZlZ4/PjxKfsO\nhwOHw+GBEAKHMYaRy0dSrFAxHfLohgerPkjJIiX5/vD3ti7kYYzhtdWvMaHjBL8dZeHLutfvzvYq\n23lqyVM4ZjmY03sOoWVCc1zf7nO76fVVL77o9YVPLXEYGRlJZGRk7ipxpe8GCCWTPvcMyh4FgjN5\nzXudUgHinR/fMWFTwsy1W9fsDsXvTN061fSc19PWGBbvXWyaTm5qEpMSbY0j0CUmJZr3NrxnKr5X\n0XwV/VWO6jgac9RU+7CambNzjoej8zxy0OfuanKvCURn8lppoJBz/xlgVhb1ePt34Nfm75pvqn1Y\nzZy8ctLuUPxS7K1YU+4f5czxy8dtOX5CYoJpMLGBWX5guS3Hz4+2nNpi7v34XjNsyTC3GkRnrp0x\ndT+uaz7Z/IkXo/OcnCT3bKcfEJG5gAMoD5wFxgGFnQebKiKtgS+AJGA38LQx5komdZnsjucrjDGc\niT3DzrM7re3cTs5fP0/BoIIUDCpIgaACKfsFgwpSQLJ+nN174hLimLB+Qr6d5dFTXvzuRcoULWPL\nQh6zomYx49cZrBu6Lt9P55uXYuNjGR0xmg3HNzC3z1xaVGmRZfkrN6/g+MJBr/q9GOcYl0dR5o4u\n1pFDcbfj2HN+zx2JfOfZnRhjuD/kfppWakrTyk0JCQ4h0SSSkJRAQlICiUmp+wlJCXe8ltHrWZYx\nCQxuMpjOtTvb/evwa8kLeRx/+Xiu12t1x82Em9SfWJ95feYRXj08z46rUv1n1394MeJF/hL+F/4U\n/ieC5O7xIjdu36Drv7sSFhLGR10/8puTsF8k95WHVlKxeEUqlqhIxeIVKVKwSJ4d3xjD8SvH70ri\nxy4fo175ejSt3DQlkScnc3/5x1epHLMcjGo5in6N+uXZMf+16V+sPbaWb578Js+Oqe527PIxBi8a\nTLFCxZjdazb3lLwn5bXbibfpPb83pYuUZvbjszNM/r7KL5J75y86c/7Gec5fP8+FGxcoWrAoFUtU\npELxCnck/eT99M8HFw52KeFeu3WNXed23ZXIgwsH35XE61eoryMbAkheL+Rx9dZV7v3kXlYPWU3j\nSo3z5JgqcwlJCby9/m0mb53MtO7T6F6/O0kmiaeWPEVMXAyLn1icp9/qPMEvknva4xljuHrrakqy\nT5v0z984n+HzCUkJmZ4MbiXeIvpcNDvP7uRM7BkaVmx4RxJvUrmJT0wlqrwrPjGe0H+FsmbIGhpU\nbOD1441bO45jV47xRa8vvH4s5boNxzcweNFgHqv3GIIQdTaKlYNXUrxQcbtDc5vfJfeciLsdd0fS\nv3DjQsp+ASmQksjrlqursyfmY39d81eu3rrKx90+9upxzl0/R4NJDdg2Yhs1y9T06rGU+y7fvMxz\ny57j4KWDrB6ymjJFy9gdUo7ki+SulCuOXzlOs8+aeX0hj5ciXgLQxVN8nDHGr6+f6WIdSjnVKF2D\ndjXaMW/XPK8d49jlY/w7+t+8/tDrXjuG8gx/Tuw5pcldBayRLUby6RbvLeQxLnIcL7R8gUolKnml\nfqVyQ5O7Cli/q/M7rty6wi+nfvF43bvO7WLFoRX8KfxPHq9bKU/Q5K4CVpAE8dwDz/Hp1k89Xvfr\na15nTNsxlCpSyuN1K+UJmtxVQBvWbBjf7v+WizcueqzOjSc2EnUmipEtR3qsTqU8TZO7CmgVileg\ne73uzIqa5ZH6jDGM+WEM4zuMp2jBoh6pUylv0OSuAt7IFiOZvHWyRxbyWHFoBRduXOD39//eA5Ep\n5T2a3FXAa12tNSWLlOSHIz/kqp4kk8Rrq1/j7U5vUzDIE+vcKOU9mtxVwBORlGGRufGfXf+hSMEi\n9Lqvl4ciU8p7NLmrfGFgk4H8ePxHTlw5kaP3xyfG88baN3i387v58oYY5X80uat8IbhwMAMbD2Ta\n9mk5ev/07dOpU64OHWt19HBkSnmHzi2j8o3d53bzP1/+j9sLeVyPv869n9zL0gFLeaDKA16MUKmM\n6dwySmWhUaVG1C9fnyX7lrj1vo83f0z70Paa2JVfyTa5i8h0ETkrIjszeb28iESISJSIRIvIUI9H\nqZSHjGwx0q07Vi/FXeLDTR8yoWPer8mqVG640nKfCTycxesvAFHGmDCgI/CBiOg4MeWTHm/wOPsu\n7GPv+b0ulf/Hhn/Q+77e1Ctfz8uRKeVZ2SZ3Y8wGICaLImeAks79ksBFY0yCB2JTyuMKFyjM082e\nZsrWKdmWPXn1JJ//+jlvdngzDyJTyrM80ec+DWgkIqeBHcBLHqhTKa8Z8cAI/h39b67HX8+y3Fvr\n3mJ4s+FULVU1jyJTynM8kdxfA3YYY6oAzYBJIhLsgXqV8gpXFvLYf2E/i/Yu4tV2r+ZhZEp5jif6\nxtsCbwMYYw6LyFHgPmBrRoXHjx+fsu9wOHA4HB4IQSn3jGwxkrGrx/J0s6czvCnpjbVv8Kc2f6Jc\nsXI2RKfyu8jISCIjI3NVh0vj3EWkJrDUGNMkg9c+AK4aY/4mIpWxkvr9xphLGZTVce7KJySZJO79\n5F7m9p7Lg9UevOO1rae30mNeDw6+eNCr668q5SqvjHMXkbnARqCeiBwXkWEi8qyIjHAWeQdoISI7\ngO+BVzJK7Er5kuSFPCZvnXzXa2NXj+XNDm9qYld+Te9QVfnWhRsXqPtxXQ6PPkz54uUBWH1kNc8u\ne5a9o/a6dRerUt6kd6gq5YYKxSvQo36PlIU8jDG8tvo1/t7p75rYld/T5K7ytZEtRjJl2xSSTBKL\n9y3mdtJt+jfqb3dYSuWa3kmq8rXW1VoTXDiYlYdW8vqa1/nnw/8kSLTNo/yf/hWrfC15IY+nljxF\n5RKVebhOVjNtKOU/NLmrfG9gk4EUCCrAO53f0YU4VMDQ0TJKAbcTb+tFVOWzdLSMUjmkiV0FGk3u\nSikVgDS5K6VUANLkrpRSAUiTu1JKBSBN7kopFYA0uSulVADS5K6UUgFIk7tSSgUgTe5KKRWANLkr\npVQA0uSulFIBSJO7UkoFIFcWyJ4uImdFZGcmr/9ZRH4Vke0iEi0iCSJSxvOhKqWUcpUrLfeZQKYr\nGBhj/p8xppkxpjnwGhBpjLnsqQB9SWRkpN0h5IrGby9/jt+fYwf/jz8nsk3uxpgNQIyL9Q0A5uUq\nIh/m738gGr+9/Dl+f44d/D/+nPBYn7uIFAO6Ags9VadSSqmc8eQF1e7AhkDtklFKKX/i0jJ7IhIK\nLDXGNM2izCJgvjHmqyzK6Bp7SimVA+4us1fQxXLi3DJ+UaQ00AEYlFUl7ganlFIqZ7JN7iIyF3AA\n5UXkODAOKAwYY8xUZ7FewEpjTJy3AlVKKeU6l7pllFJK+Zc8u0NVRLqKyD4ROSAir+bVcT1BRKqJ\nyBoR2e28UWu03TG5S0SCnDeafWt3LO4SkdIiskBE9jr/DR60OyZ3iMhrzrh3isgcESlsd0xZyejG\nRREpKyKrRGS/iKx0dsX6pEzif8/59xMlIgtFpJSdMWYlqxtHReRPIpIkIuWyqydPkruIBAETsW6G\nagQMEJH78uLYHpIA/NEY0whoA4zys/gBXgL22B1EDn0EfGeMaQDcD+y1OR6XOQcjPAM0cw5IKAg8\naW9U2croxsUxwA/GmPrAGqwbFn1VRvGvAhoZY8KAg/hf/IhINaAL8JsrleRVy70VcNAY85sx5jbw\nFdAzj46da8aYM8aYKOd+LFZyqWpvVK5z/lE8Anxudyzucraw2htjZgIYYxKMMVdtDssdV4F4oISI\nFASKA6ftDSlrmdy42BP4wrn/BdZ1Np+UUfzGmB+MMUnOh5uAankemIuyuHH0n8BfXK0nr5J7VeBE\nmscn8aPkmJaI1ATCgM32RuKW5D8Kf7zAUgu4ICIznd1KU503zPkFY0wM8AFwHDgFXDbG/GBvVDlS\nyRhzFqzGDlDJ5nhy4w9AhN1BuENEegAnjDHRrr5HZ4V0g4gEA18DLzlb8D5PRB4Fzjq/eWQ5pNVH\nFQSaA5Oc8xfdwOoi8AsiUhv4XyAUqAIEi8hAe6PyCH9sKCAirwO3jTFz7Y7FVc7GzFiskYopT2f3\nvrxK7qeAGmkeV3M+5zecX6m/Br40xnxjdzxuaAv0EJEjWPP+dBSR2TbH5I6TWC2Wrc7HX2Mle3/R\nAvjJGHPJGJMILALCbY4pJ86KSGUAEQkBztkcj9tEZChW96S/nVzrADWBHSJyFCt/bhORLL895VVy\n3wLUFZFQ50iBJwF/G7UxA9hjjPnI7kDcYYwZa4ypYYypjfV7X2OMGWJ3XK5ydgWcEJF6zqc6418X\nhvcDrUWkqIgIVvz+cEE4/be8b4Ghzv2nAF9v4NwRv4h0xeqa7GGMuWVbVK5Lid8Ys8sYE2KMqW2M\nqYXV4GlmjMnyBJsnyd3ZYnkB64r1buArY4w//IEDICJtse6+7ZRm7vqudseVj4wG5ohIFNZomf+z\nOR6XGWN2ALOBbcAOrP+wU7N8k82cNy5uBOqJyHERGQa8C3QRkf1YJ6h37YwxK5nE/wkQDHzv/P/7\nqa1BZiGT+NMyuNAtozcxKaVUANILqkopFYA0uSulVADS5K6UUgFIk7tSSgUgTe5KKRWANLkrpVQA\n0uSulFIBSJO7UkoFoP8P2H7H0OSmu1EAAAAASUVORK5CYII=\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -870,6 +915,17 @@ "plt.legend(['train', 'valid'])" ] }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "TODOs:\n", + " 1. modify for semi-supervised learning (using pseudolabels)\n", + " 2. include additional costs to enforce unit variance-covariance matrix and zero mean" + ] + }, { "cell_type": "markdown", "metadata": {}, diff --git a/examples/ladder_nets/ladder_nets.py b/examples/ladder_nets/ladder_nets.py index a2be597..e3c7e34 100644 --- a/examples/ladder_nets/ladder_nets.py +++ b/examples/ladder_nets/ladder_nets.py @@ -1,9 +1,12 @@ from lasagne.layers import InputLayer, MergeLayer, DenseLayer, DropoutLayer, \ - GaussianNoiseLayer, NonlinearityLayer + GaussianNoiseLayer, NonlinearityLayer, standardize, BiasLayer, ScaleLayer, \ + ExpressionLayer from lasagne.layers.normalization import BatchNormLayer from lasagne.nonlinearities import * import lasagne +# from ladder_net_layers import CombinatorLayer, SharedNormLayer + import theano import theano.tensor as T @@ -88,14 +91,14 @@ def _combinator_MILAudem(z, u, combinator_params, bc_pttrn): w_u_lin.dimshuffle(*bc_pttrn) * u + \ w_zu_lin.dimshuffle(*bc_pttrn) * z * u + \ b_lin.dimshuffle(*bc_pttrn) - + sigm_pre = w_z_sigm.dimshuffle(*bc_pttrn) * z + \ w_u_sigm.dimshuffle(*bc_pttrn) * u + \ w_zu_sigm.dimshuffle(*bc_pttrn) * z * u + \ b_sigm.dimshuffle(*bc_pttrn) - + sigm_out = T.nnet.sigmoid(sigm_pre) - + output = w_sigm.dimshuffle(*bc_pttrn) * sigm_out + lin_out return output @@ -116,7 +119,7 @@ def _combinator_curiousAI(z, u, combinator_params, bc_pttrn): v_sig_pre = w_v_sig.dimshuffle(*bc_pttrn) * u + \ b_v_sig.dimshuffle(*bc_pttrn) - + v_lin_out = w_v_lin.dimshuffle(*bc_pttrn) * u + \ b_v_lin.dimshuffle(*bc_pttrn) @@ -132,7 +135,7 @@ def _combinator(z, u, combinator_type, combinator_params): bc_pttrn = ('x', 0) elif u.ndim == 4: bc_pttrn = ('x', 0, 'x', 'x') - + if combinator_type == 'milaUDEM': return _combinator_MILAudem(z, u, combinator_params, bc_pttrn) elif combinator_type == 'curiousAI': @@ -153,11 +156,11 @@ def __init__(self, incoming_z, incoming_u, combinator_type, **kwargs): if len(z_shp) != len(u_shp): raise ValueError("The inputs must have the same shape: " - "(batch_size, num_hidden) in case of dense layer or \n" - "(batch_size, num_feature_maps, height, width) " - "in case of conv layer.") + "(batch_size, num_hidden) in case of dense layer " + "or \n (batch_size, num_feature_maps, height, " + "width) in case of conv layer.") - self.combinator_params = _create_combinator_params(combinator_type, + self.combinator_params = _create_combinator_params(combinator_type, u_shp[1:], self.name) @@ -170,8 +173,46 @@ def get_output_for(self, inputs, **kwargs): return _combinator(z, u, self.combinator_type, self.combinator_params) -def build_encoder(net, num_hidden, activation, name, - p_drop_hidden=0., shared_net=None): +class SharedNormLayer(MergeLayer): + """ + A layer that combines the terms from dirty and clean encoders, + and outputs denoised variable: + $$ \hat{z} = g(\tilde{z}, u)$$ + """ + def __init__(self, incoming2stats, incoming2norm, axes='auto', epsilon=1e-4, + **kwargs): + super(SharedNormLayer, self).__init__( + [incoming2stats, incoming2norm], **kwargs) + stats_shp, norm_shp = self.input_shapes + + if len(stats_shp) != len(norm_shp): + raise ValueError("The inputs must have the same shape: " + "(batch_size, num_hidden) in case of dense layer " + "or \n (batch_size, num_feature_maps, height, " + "width) in case of conv layer.") + + if axes == 'auto': + # default: normalize over all but the second axis + axes = (0,) + tuple(range(2, len(stats_shp))) + elif isinstance(axes, int): + axes = (axes,) + self.axes = axes + self.epsilon = epsilon + + def get_output_shape_for(self, input_shapes): + return input_shapes[0] + + def get_output_for(self, inputs, **kwargs): + to_stats, to_norm = inputs + assert to_stats.ndim == to_norm.ndim + + mean = to_stats.mean(self.axes) + inv_std = T.inv(T.sqrt(to_stats.var(self.axes) + self.epsilon)) + + return (to_norm - mean) * inv_std + + +def build_encoder(net, num_hidden, activation, name, p_drop_hidden, shared_net): for i, num_nodes in enumerate(num_hidden): dense_lname = 'enc_dense_{}'.format(i) nbatchn_lname = 'enc_batchn_{}_norm'.format(i) @@ -188,105 +229,107 @@ def build_encoder(net, num_hidden, activation, name, # dense pars W = shared_net[dense_lname].get_params()[0] # batchnorm pars - if activation==rectify: - beta = shared_net[lbatchn_lname].get_params()[0] - gamma = None - else: - beta, gamma = shared_net[lbatchn_lname].get_params() - - net[dense_lname] = DenseLayer(net.values()[-1], num_units=num_nodes, W=W, - nonlinearity=linear, - name='{}_{}'.format(name, dense_lname)) + beta = shared_net[lbatchn_lname + '_beta'].get_params()[0] + gamma = None if activation==rectify else \ + shared_net[lbatchn_lname + '_gamma'].get_params()[0] - shp = net[dense_lname].output_shape[1] - zero_const = T.zeros(shp, np.float32) - one_const = T.ones(shp, np.float32) + # affine transformation: $W \hat{h}$ + net[dense_lname] = DenseLayer(net.values()[-1], num_units=num_nodes, + W=W, nonlinearity=linear, + name='{}_{}'.format(name, dense_lname)) # 1. batchnormalize without learning -> goes to combinator layer l_name = '{}_{}'.format(name, nbatchn_lname) - net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1, - beta=None, gamma=None, name=l_name) + net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1, name=l_name, + beta=None, gamma=None) if shared_net is None: - # add noise in dirty encoder + # for dirty encoder -> add noise net[noise_lname] = GaussianNoiseLayer(net.values()[-1], sigma=p_drop_hidden, name='{}_{}_'.format(name, noise_lname)) - # 2. batchnormalization learning, - # alpha set to one in order to depenend only on the given batch mean and inv_std + # 2. scaling & offsetting batchnormalization + noise l_name = '{}_{}'.format(name, lbatchn_lname) - net[lbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=1., - beta=beta, gamma=gamma, name=l_name, - mean=zero_const, inv_std=one_const) - + # offset by beta + net[lbatchn_lname + '_beta'] = BiasLayer(net.values()[-1], b=beta, + name=l_name + '_beta') + + if gamma is not None: + # if not rectify, scale by gamma + net[lbatchn_lname + '_gamma'] = ScaleLayer(net.values()[-1], + scales=gamma, + name=l_name + '_gamma') + # apply activation if i < len(num_hidden) - 1: act_name = 'enc_activation_{}'.format(i) net[act_name] = NonlinearityLayer(net.values()[-1], nonlinearity=activation, name='{}_{}'.format(name, act_name)) + # classfication layer activation -> softmax net['enc_softmax'] = NonlinearityLayer(net.values()[-1], nonlinearity=softmax, name='{}_enc_softmax'.format(name)) return net['enc_softmax'], net -def build_decoder(dirty_net, clean_net, num_nodes, sigma, - combinator_type='milaUDEM'): +def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type): L = len(num_nodes) - 1 # dirty_enc_dense_1 ... z_L z_L = dirty_net['enc_noise_{}'.format(L)] - + # batchnormalized softmax output .. u_0 without learning bn beta, gamma dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None, name='dec_batchn_softmax') - + # denoised latent \hat{z}_L = g(\tilde{z}_L, u_L) comb_name = 'dec_combinator_0' - dirty_net[comb_name] = CombinatorLayer(*[z_L, dirty_net['u_0']], + dirty_net[comb_name] = CombinatorLayer(z_L, dirty_net['u_0'], combinator_type=combinator_type, name=comb_name) - + # batchnormalize denoised latent using clean encoder's bn mean/inv_std without learning enc_bname = 'enc_batchn_{}_norm'.format(L) - mu, inv_std = clean_net[enc_bname].get_params() bname = 'dec_batchn_0' - dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1., - beta=None, gamma=None, name=bname, - mean=mu, inv_std=inv_std) + + to_stats_l = clean_net[enc_bname] + to_norm_l = dirty_net[comb_name] + dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l) for i in range(L): # dirty_enc_dense_L-i ... z_l z_l = dirty_net['enc_noise_{}'.format(i)] - + # affine transformation d_name = 'dec_dense_{}'.format(L-i) dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], num_units=num_nodes[i], nonlinearity=linear, name=d_name) - + # batchnormalization ... u_l - dirty_net['u_l'] = BatchNormLayer(dirty_net.values()[-1], beta=None, - gamma=None, - name='dec_batchn_dense_{}'.format(L-i)) - + dirty_net['u_{}'.format(i+1)] = BatchNormLayer(dirty_net.values()[-1], + alpha=1., + beta=None,gamma=None, + name='dec_batchn_dense_' + '{}'.format(L-i)) + # denoised latent \hat{z}_L-i comb_name = 'dec_combinator_{}'.format(i+1) - dirty_net[comb_name] = CombinatorLayer(*[z_l, dirty_net['u_l']], + dirty_net[comb_name] = CombinatorLayer(z_l, dirty_net['u_{}'.format(i+1)], combinator_type=combinator_type, name=comb_name) - + # batchnormalized latent \hat{z}_L-i^{BN} enc_bname = 'enc_batchn_{}_norm'.format(L-i-1) - mu, inv_std = clean_net[enc_bname].get_params() bname = 'dec_batchn_{}'.format(L-i) - dirty_net[bname] = BatchNormLayer(dirty_net.values()[-1], alpha=1., - beta=None, gamma=None, name=bname, - mean=mu, inv_std=inv_std) - + + to_stats_l = clean_net[enc_bname] + to_norm_l = dirty_net[comb_name] + dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l) + # corrupted input ... z_0 z_0 = dirty_net['inp_corr'] @@ -294,13 +337,15 @@ def build_decoder(dirty_net, clean_net, num_nodes, sigma, d_name = 'dec_dense_{}'.format(L+1) dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], nonlinearity=linear, num_units=num_nodes[i+1], name=d_name) - + # batchnormalization ... u_L - dirty_net['u_L'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None) - + dirty_net['u_{}'.format(L+1)] = BatchNormLayer(dirty_net.values()[-1], alpha=1., + beta=None, gamma=None) + # denoised input reconstruction comb_name = 'dec_combinator_{}'.format(L+1) - dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_L']], name=comb_name, + dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_{}'.format(L+1)]], + name=comb_name, combinator_type=combinator_type) return dirty_net @@ -317,7 +362,7 @@ def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden, # dirty encoder train_output_l, dirty_encoder = build_encoder(net, num_encoder, activation, - 'dirty', p_drop_hidden) + 'dirty', p_drop_hidden, None) # clean encoder clean_net = OrderedDict(net.items()[:1]) @@ -331,20 +376,49 @@ def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden, return [train_output_l, eval_output_l], dirty_net, clean_net -def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, lambdas): - class_cost = T.nnet.categorical_crossentropy(T.clip(output_train, 1e-15, 1), - y).mean() +def get_mu_sigma_costs(hid): + shp = hid.shape + mu = hid.mean(0) + sigma = T.dot(hid.T, hid) / shp[0] + + C_mu = T.sum(mu ** 2) + C_sigma = T.diagonal(sigma - T.log(T.clip(sigma, 1e-15, 1))) + C_sigma -= - T.ones_like(C_sigma) + return C_mu, C_sigma.sum() # trace(C_sigma) + + +def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, + lambdas, use_extra_costs=False, alphas=None, betas=None, + num_labeled=None, pseudo_labels=None): + xe = T.nnet.categorical_crossentropy + pred = T.clip(output_train, 1e-15, 1) + N = num_labeled if num_labeled else pred.shape[0] + class_cost = xe(pred[:N], y[:N]).mean() + + if pseudo_labels == 'soft': + n = 0 if num_labeled else N + class_cost += xe(pred[n:], pred[n:]).mean() + elif pseudo_labels == 'hard': + M = y.shape[1] + n = 0 if num_labeled else N + pseudo_target = T.eye(M)[pred[n:].argmax(axis=1)] + class_cost += xe(pred[n:], pseudo_target).mean() + L = len(num_decoder) - + # get clean and corresponding dirty latent layer output z_clean_l = clean_net['input'] z_dirty_l = dirty_net['dec_combinator_{}'.format(L)] - + z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False) z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False) # squared error - rec_costs = [lambdas[L] * T.sqr(z_clean - z_dirty).mean()] + cost = lambdas[L] * T.sqr(z_clean - z_dirty).mean() + if use_extra_costs: + C_mu, C_sigma = get_mu_sigma_costs(z_clean) + cost += alphas[L] * C_mu + betas[L] * C_sigma + rec_costs = [cost] for l in range(L): z_clean_l = clean_net['enc_batchn_{}_norm'.format(l)] @@ -353,6 +427,10 @@ def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, lambdas): z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False) z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False) - rec_costs.append(lambdas[l] * T.sqr(z_clean - z_dirty).mean()) + cost = lambdas[l] * T.sqr(z_clean - z_dirty).mean() + if use_extra_costs: + C_mu, C_sigma = get_mu_sigma_costs(z_clean) + cost += alphas[l] * C_mu + betas[l] * C_sigma + rec_costs.append(cost) return class_cost, rec_costs \ No newline at end of file diff --git a/examples/ladder_nets/train_ladder_nets.py b/examples/ladder_nets/train_ladder_nets.py index 2797f54..caf8b82 100644 --- a/examples/ladder_nets/train_ladder_nets.py +++ b/examples/ladder_nets/train_ladder_nets.py @@ -1,14 +1,22 @@ - +from utils import load_data +from ladder_nets import * import time import theano.misc.pkl_utils +import lasagne +import cPickle LEARNING_RATE = 0.1 LR_DECREASE = 1. BATCH_SIZE = 100 NUM_EPOCHS = 15 COMBINATOR_TYPE = 'milaUDEM' -LAMBDAS = [0.1, 0.1, 0.1] +LAMBDAS = [1, 1, 1] DROPOUT = 0.3 +EXTRA_COST = False # True +ALPHAS = None, # [0.1]*3 +BETAS = None, #[0.1]*3 +NUM_LABELED = None +PSEUDO_LABELS = None print "Loading data..." dataset = load_data() @@ -35,10 +43,12 @@ # set up (possibly amortizable) lr, cost and updates sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE)) -class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder, - dirty_net, clean_net, output_train, LAMBDAS) +class_cost, rec_costs, \ +z_cleans, z_dirties = build_cost(X, lasagne.utils.one_hot(y), num_decoder, + dirty_net, clean_net, output_train, LAMBDAS, + use_extra_costs=EXTRA_COST, alphas=ALPHAS, betas=BETAS, + num_labeled=NUM_LABELED, pseudo_labels=PSEUDO_LABELS) cost = class_cost + T.sum(rec_costs) - net_params = lasagne.layers.get_all_params(train_output_l, trainable=True) updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr) @@ -47,7 +57,7 @@ batch_slice = slice(batch_index * BATCH_SIZE, (batch_index + 1) * BATCH_SIZE) pred = T.argmax(output_eval, axis=1) -accuracy = T.mean(T.eq(pred, y), dtype=theano.config.floatX) +accuracy = T.mean(T.eq(pred, y[:NUM_LABELED]), dtype=theano.config.floatX) train = theano.function([batch_index], [cost] + rec_costs, updates=updates, givens={ @@ -60,16 +70,6 @@ y: dataset['y_valid'][batch_slice], }) -# checking for constants in means and inv_stds during training -bl_name = 'enc_batchn_{}_learn' -means = [abs(dirty_net[bl_name.format(i)].mean.ravel()).mean() for i - in range(len(num_encoder))] -means = T.stack(means, axis=1) -stds = [abs(dirty_net[bl_name.format(i)].inv_std.ravel()).mean() for i - in range(len(num_encoder))] -stds = T.stack(stds, axis=1) -get_stats = theano.function([], [means, stds]) - network_dump = {'train_output_layer': train_output_l, 'eval_output_layer': eval_output_l, 'dirty_net': dirty_net, @@ -88,7 +88,6 @@ def save_dump(filename,param_values): def train_epoch(): costs = [] rec_costs = [] - stats = [] for b in range(num_batches_train): train_out = train(b) train_cost = train_out[0] @@ -96,17 +95,13 @@ def train_epoch(): costs.append(train_cost) rec_costs.append(rec_cost) - stats.append(np.vstack(get_stats())) - return (np.mean(costs), np.mean(rec_costs, axis=0), - np.stack(stats, axis=0).mean(axis=0)) + return (np.mean(costs), np.mean(rec_costs, axis=0)) def eval_epoch(): costs = [] accs = [] - preds = [] - targets = [] for b in range(num_batches_valid): eval_cost, eval_acc = eval(b) costs.append(eval_cost) @@ -124,7 +119,7 @@ def eval_epoch(): try: for n in range(NUM_EPOCHS): - train_cost, rec_costs, stats = train_epoch() + train_cost, rec_costs = train_epoch() eval_cost, acc = eval_epoch() train_costs.append(train_cost) @@ -134,16 +129,11 @@ def eval_epoch(): print "Epoch %d took %.3f s" % (n + 1, time.time() - now) now = time.time() print "Train cost {}, val cost {}, val acc {}".format(train_costs[-1], - valid_costs[-1], - valid_accs[-1]) + valid_costs[-1], + valid_accs[-1]) print '\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c in enumerate(rec_costs)]) - means, inv_stds = stats - for i in range(len(num_encoder)): - print '{}: mean == 0. {}, inv_std == 1. {}'.format(bl_name.format(i), - np.allclose(means[i], 0.), - np.allclose(inv_stds[i], 1.)) - + if (n+1) % 10 == 0: new_lr = sh_lr.get_value() * LR_DECREASE print "New LR:", new_lr diff --git a/examples/ladder_nets/utils.py b/examples/ladder_nets/utils.py index 56adb90..5145a1a 100644 --- a/examples/ladder_nets/utils.py +++ b/examples/ladder_nets/utils.py @@ -1,3 +1,8 @@ +# %load utils.py +import theano +import theano.tensor as T +import lasagne + import gzip import cPickle as pickle import sys From c83ae279a7c33b3336b275a186dabf3aa83d62e2 Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Wed, 6 Jul 2016 12:34:26 +0200 Subject: [PATCH 07/10] added option for convolution and pooling layers --- examples/ladder_nets/LadderNets.ipynb | 490 +++++++++++----------- examples/ladder_nets/ladder_nets.py | 366 ++++++---------- examples/ladder_nets/train_ladder_nets.py | 56 ++- examples/ladder_nets/utils.py | 20 +- 4 files changed, 405 insertions(+), 527 deletions(-) diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb index 845dd48..fc7ccc8 100644 --- a/examples/ladder_nets/LadderNets.ipynb +++ b/examples/ladder_nets/LadderNets.ipynb @@ -100,51 +100,34 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": false + "collapsed": true }, "outputs": [], "source": [ - "# %load ladder_nets.py\n", - "from lasagne.layers import InputLayer, MergeLayer, DenseLayer, DropoutLayer, \\\n", - " GaussianNoiseLayer, NonlinearityLayer, standardize, BiasLayer, ScaleLayer, \\\n", - " ExpressionLayer\n", - "from lasagne.layers.normalization import BatchNormLayer\n", - "from lasagne.nonlinearities import *\n", - "import lasagne\n", - "\n", - "# from ladder_net_layers import CombinatorLayer, SharedNormLayer\n", + "# %load ladder_net_layers.py\n", + "from lasagne.layers import MergeLayer\n", "\n", "import theano\n", "import theano.tensor as T\n", "\n", "import numpy as np\n", - "from collections import OrderedDict\n", "\n", "\n", "def _create_milaUDEM_params(shape, name):\n", " values = np.zeros((6,) + shape, dtype=theano.config.floatX)\n", "\n", - " b_lin = theano.shared(values[0],\n", - " name='bias_lin_{}'.format(name))\n", - " b_sigm = theano.shared(values[1],\n", - " name='bias_sigm_{}'.format(name))\n", + " b_lin = theano.shared(values[0], name='bias_lin_{}'.format(name))\n", + " b_sigm = theano.shared(values[1], name='bias_sigm_{}'.format(name))\n", "\n", - " w_u_lin = theano.shared(values[2],\n", - " name='weight_u_lin_{}'.format(name))\n", - " w_u_sigm = theano.shared(values[3],\n", - " name='weight_u_sigm_{}'.format(name))\n", - " w_zu_lin = theano.shared(values[4],\n", - " name='weight_zu_lin_{}'.format(name))\n", - " w_zu_sigm = theano.shared(values[5],\n", - " name='weight_zu_sigm_{}'.format(name))\n", + " w_u_lin = theano.shared(values[2], name='weight_u_lin_{}'.format(name))\n", + " w_u_sigm = theano.shared(values[3], name='weight_u_sigm_{}'.format(name))\n", + " w_zu_lin = theano.shared(values[4], name='weight_zu_lin_{}'.format(name))\n", + " w_zu_sigm = theano.shared(values[5], name='weight_zu_sigm_{}'.format(name))\n", "\n", " values = np.ones((3,) + shape, dtype=theano.config.floatX)\n", - " w_z_lin = theano.shared(values[0],\n", - " name='weight_z_lin_{}'.format(name))\n", - " w_z_sigm = theano.shared(values[1],\n", - " name='weight_z_sigm_{}'.format(name))\n", - " w_sigm = theano.shared(values[2],\n", - " name='weight_sigm_{}'.format(name))\n", + " w_z_lin = theano.shared(values[0], name='weight_z_lin_{}'.format(name))\n", + " w_z_sigm = theano.shared(values[1], name='weight_z_sigm_{}'.format(name))\n", + " w_sigm = theano.shared(values[2], name='weight_sigm_{}'.format(name))\n", "\n", " # combinator params used in combinator calculations\n", " return [w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm,\n", @@ -154,29 +137,19 @@ "def _create_curiousAI_params(shape, name):\n", " values = np.zeros((8,) + shape, dtype=theano.config.floatX)\n", "\n", - " b_mu_sig = theano.shared(values[0],\n", - " name='b_mu_sig_{}'.format(name))\n", - " b_mu_lin = theano.shared(values[1],\n", - " name='b_mu_lin_{}'.format(name))\n", - " b_v_sig = theano.shared(values[2],\n", - " name='b_v_sig_{}'.format(name))\n", - " b_v_lin = theano.shared(values[3],\n", - " name='b_v_lin_{}'.format(name))\n", - "\n", - " w_mu_lin = theano.shared(values[4],\n", - " name='w_mu_lin_{}'.format(name))\n", - " w_v_lin = theano.shared(values[5],\n", - " name='w_v_lin_{}'.format(name))\n", - " w_mu = theano.shared(values[6],\n", - " name='w_mu_{}'.format(name))\n", - " w_v = theano.shared(values[7],\n", - " name='w_v_{}'.format(name))\n", + " b_mu_sig = theano.shared(values[0], name='b_mu_sig_{}'.format(name))\n", + " b_mu_lin = theano.shared(values[1], name='b_mu_lin_{}'.format(name))\n", + " b_v_sig = theano.shared(values[2], name='b_v_sig_{}'.format(name))\n", + " b_v_lin = theano.shared(values[3], name='b_v_lin_{}'.format(name))\n", + "\n", + " w_mu_lin = theano.shared(values[4], name='w_mu_lin_{}'.format(name))\n", + " w_v_lin = theano.shared(values[5], name='w_v_lin_{}'.format(name))\n", + " w_mu = theano.shared(values[6], name='w_mu_{}'.format(name))\n", + " w_v = theano.shared(values[7], name='w_v_{}'.format(name))\n", "\n", " values = np.ones((2,) + shape, dtype=theano.config.floatX)\n", - " w_mu_sig = theano.shared(values[0],\n", - " name='w_mu_sig_{}'.format(name))\n", - " w_v_sig = theano.shared(values[1],\n", - " name='w_v_sig_{}'.format(name))\n", + " w_mu_sig = theano.shared(values[0], name='w_mu_sig_{}'.format(name))\n", + " w_v_sig = theano.shared(values[1], name='w_v_sig_{}'.format(name))\n", "\n", " # combinator params used in combinator calculations\n", " return [w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v,\n", @@ -190,7 +163,7 @@ " return _create_curiousAI_params(shape, name)\n", "\n", "\n", - "def _combinator_MILAudem(z, u, combinator_params, bc_pttrn):\n", + "def _combinator_milaUDEM(z, u, combinator_params, bc_pttrn):\n", " w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm, w_zu_sigm, w_sigm, \\\n", " b_lin, b_sigm = combinator_params\n", "\n", @@ -241,10 +214,11 @@ " if u.ndim == 2:\n", " bc_pttrn = ('x', 0)\n", " elif u.ndim == 4:\n", - " bc_pttrn = ('x', 0, 'x', 'x')\n", + " bc_pttrn = ('x', 0, 1, 2)\n", + " # bc_pttrn = ('x', 0, 'x', 'x')\n", "\n", " if combinator_type == 'milaUDEM':\n", - " return _combinator_MILAudem(z, u, combinator_params, bc_pttrn)\n", + " return _combinator_milaUDEM(z, u, combinator_params, bc_pttrn)\n", " elif combinator_type == 'curiousAI':\n", " return _combinator_curiousAI(z, u, combinator_params, bc_pttrn)\n", "\n", @@ -282,10 +256,9 @@ "\n", "class SharedNormLayer(MergeLayer):\n", " \"\"\"\n", - " A layer that calculates mean and standard deviation statistics from the \n", - " incoming2stats layer output and uses them for normalizing the output of\n", - " the incoming2norm layer.\n", - " \n", + " A layer that combines the terms from dirty and clean encoders,\n", + " and outputs denoised variable:\n", + " $$ \\hat{z} = g(\\tilde{z}, u)$$\n", " \"\"\"\n", " def __init__(self, incoming2stats, incoming2norm, axes='auto', epsilon=1e-4,\n", " **kwargs):\n", @@ -314,49 +287,91 @@ " to_stats, to_norm = inputs\n", " assert to_stats.ndim == to_norm.ndim\n", "\n", - " mean = to_stats.mean(self.axes)\n", - " inv_std = T.inv(T.sqrt(to_stats.var(self.axes) + self.epsilon))\n", + " mean = to_stats.mean(self.axes, keepdims=True)\n", + " inv_std = T.inv(T.sqrt(to_stats.var(self.axes,\n", + " keepdims=True) + self.epsilon))\n", + "\n", + " return (to_norm - mean) * inv_std" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# %load ladder_nets.py\n", + "from lasagne.layers import InputLayer, DenseLayer\n", + "from lasagne.layers import Conv2DLayer as conv\n", + "from lasagne.layers import Deconv2DLayer as deconv\n", + "from lasagne.layers import MaxPool2DLayer as pool\n", + "from lasagne.layers.special import InverseLayer as unpool\n", + "from lasagne.layers.special import BiasLayer, ScaleLayer, NonlinearityLayer\n", + "from lasagne.layers.noise import GaussianNoiseLayer\n", + "from lasagne.layers.normalization import BatchNormLayer\n", + "from lasagne.nonlinearities import rectify, linear\n", + "import lasagne\n", + "\n", + "from ladder_net_layers import CombinatorLayer, SharedNormLayer\n", + "from utils import softmax\n", "\n", - " return (to_norm - mean) * inv_std\n", + "import theano.tensor as T\n", + "\n", + "from collections import OrderedDict\n", "\n", "\n", - "def build_encoder(net, num_hidden, activation, name, p_drop_hidden, shared_net):\n", + "def build_encoder(net, num_hidden, activation, name, p_drop_hidden,\n", + " convolution, pooling, shared_net):\n", " for i, num_nodes in enumerate(num_hidden):\n", - " dense_lname = 'enc_dense_{}'.format(i)\n", + " affine_lname = 'enc_affine_{}'.format(i)\n", " nbatchn_lname = 'enc_batchn_{}_norm'.format(i)\n", " noise_lname = 'enc_noise_{}'.format(i)\n", " lbatchn_lname = 'enc_batchn_{}_learn'.format(i)\n", "\n", " if shared_net is None:\n", - " # dense pars\n", + " # affine pars\n", " W = lasagne.init.GlorotUniform()\n", " # batchnorm pars\n", " beta = lasagne.init.Constant(0)\n", " gamma = None if activation == rectify else lasagne.init.Constant(1)\n", " else:\n", - " # dense pars\n", - " W = shared_net[dense_lname].get_params()[0]\n", + " # affine weights\n", + " W = shared_net[affine_lname].get_params()[0]\n", " # batchnorm pars\n", " beta = shared_net[lbatchn_lname + '_beta'].get_params()[0]\n", " gamma = None if activation==rectify else \\\n", " shared_net[lbatchn_lname + '_gamma'].get_params()[0]\n", "\n", " # affine transformation: $W \\hat{h}$\n", - " net[dense_lname] = DenseLayer(net.values()[-1], num_units=num_nodes,\n", - " W=W, nonlinearity=linear,\n", - " name='{}_{}'.format(name, dense_lname))\n", + " if convolution:\n", + " net[affine_lname] = conv(net.values()[-1],\n", + " num_filters=num_nodes[0],\n", + " filter_size=num_nodes[1],\n", + " pad=num_nodes[2], stride=num_nodes[3],\n", + " W=W, nonlinearity=linear,\n", + " name='{}_{}_{}'.format(name,\n", + " affine_lname, 'conv'))\n", + " else:\n", + " net[affine_lname] = DenseLayer(net.values()[-1],\n", + " num_units=num_nodes,\n", + " W=W, nonlinearity=linear,\n", + " name='{}_{}_{}'.format(name,\n", + " affine_lname, 'affine'))\n", "\n", " # 1. batchnormalize without learning -> goes to combinator layer\n", " l_name = '{}_{}'.format(name, nbatchn_lname)\n", - " net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1, name=l_name,\n", - " beta=None, gamma=None)\n", + " net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1,\n", + " name=l_name, beta=None,\n", + " gamma=None)\n", "\n", " if shared_net is None:\n", " # for dirty encoder -> add noise\n", " net[noise_lname] = GaussianNoiseLayer(net.values()[-1],\n", " sigma=p_drop_hidden,\n", - " name='{}_{}_'.format(name,\n", - " noise_lname))\n", + " name='{}_{}'.format(\n", + " name, noise_lname))\n", "\n", " # 2. scaling & offsetting batchnormalization + noise\n", " l_name = '{}_{}'.format(name, lbatchn_lname)\n", @@ -368,34 +383,51 @@ " # if not rectify, scale by gamma\n", " net[lbatchn_lname + '_gamma'] = ScaleLayer(net.values()[-1],\n", " scales=gamma,\n", - " name=l_name + '_gamma')\n", + " name=l_name+'_gamma')\n", + "\n", + " if pooling:\n", + " pool_name = 'enc_pool_{}'.format(i)\n", + " net[pool_name] = pool(net.values()[-1], pool_size=num_nodes[4],\n", + " stride=num_nodes[5],\n", + " name='{}_{}'.format(name, pool_name))\n", + "\n", " # apply activation\n", " if i < len(num_hidden) - 1:\n", " act_name = 'enc_activation_{}'.format(i)\n", " net[act_name] = NonlinearityLayer(net.values()[-1],\n", " nonlinearity=activation,\n", - " name='{}_{}'.format(name, act_name))\n", + " name='{}_{}'.format(\n", + " name, act_name))\n", "\n", " # classfication layer activation -> softmax\n", - " net['enc_softmax'] = NonlinearityLayer(net.values()[-1], nonlinearity=softmax,\n", - " name='{}_enc_softmax'.format(name))\n", + " net['enc_softmax'] = NonlinearityLayer(net.values()[-1],\n", + " nonlinearity=softmax,\n", + " name='{}_enc_softmax'.format(\n", + " name))\n", "\n", " return net['enc_softmax'], net\n", "\n", "\n", - "def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type):\n", + "def build_decoder(dirty_net, clean_net, num_nodes, combinator_type,\n", + " convolution, pooling):\n", " L = len(num_nodes) - 1\n", "\n", - " # dirty_enc_dense_1 ... z_L\n", + " # dirty_enc_affine_1 ... z_L\n", " z_L = dirty_net['enc_noise_{}'.format(L)]\n", "\n", " # batchnormalized softmax output .. u_0 without learning bn beta, gamma\n", " dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None,\n", " gamma=None, name='dec_batchn_softmax')\n", "\n", + " if pooling:\n", + " unpool_name = 'dec_unpool_{}'.format(L)\n", + " dirty_net[unpool_name] = unpool(dirty_net.values()[-1],\n", + " dirty_net['enc_pool_{}'.format(L)],\n", + " name=unpool_name)\n", + "\n", " # denoised latent \\hat{z}_L = g(\\tilde{z}_L, u_L)\n", " comb_name = 'dec_combinator_0'\n", - " dirty_net[comb_name] = CombinatorLayer(z_L, dirty_net['u_0'],\n", + " dirty_net[comb_name] = CombinatorLayer(z_L, dirty_net.values()[-1],\n", " combinator_type=combinator_type,\n", " name=comb_name)\n", "\n", @@ -408,25 +440,42 @@ " dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l)\n", "\n", " for i in range(L):\n", - " # dirty_enc_dense_L-i ... z_l\n", + " # dirty_enc_affine_L-i ... z_l\n", " z_l = dirty_net['enc_noise_{}'.format(i)]\n", "\n", " # affine transformation\n", - " d_name = 'dec_dense_{}'.format(L-i)\n", - " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1],\n", - " num_units=num_nodes[i],\n", - " nonlinearity=linear, name=d_name)\n", - "\n", - " # batchnormalization ... u_l\n", - " dirty_net['u_{}'.format(i+1)] = BatchNormLayer(dirty_net.values()[-1],\n", - " alpha=1.,\n", - " beta=None,gamma=None,\n", - " name='dec_batchn_dense_'\n", - " '{}'.format(L-i))\n", + " d_name = 'dec_affine_{}'.format(L-i)\n", + " if convolution:\n", + " dirty_net[d_name] = deconv(dirty_net.values()[-1],\n", + " num_filters=num_nodes[i][0],\n", + " filter_size=num_nodes[i][1],\n", + " crop=num_nodes[i][2],\n", + " stride=num_nodes[i][3],\n", + " nonlinearity=linear, name=d_name +\n", + " '_conv')\n", + " else:\n", + " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1],\n", + " num_units=num_nodes[i],\n", + " nonlinearity=linear,\n", + " name=d_name+'_affine')\n", + "\n", + " # batchnormalization ... u_l\\\n", + " ul_name = 'u_{}'.format(i+1)\n", + " dirty_net[ul_name] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", + " beta=None, gamma=None,\n", + " name='dec_batchn_affine_'\n", + " '{}'.format(L-i))\n", + "\n", + " if pooling:\n", + " unpool_name = 'dec_unpool_{}'.format(L-i-1)\n", + " dirty_net[unpool_name] = unpool(dirty_net.values()[-1],\n", + " dirty_net['enc_pool_{}'\n", + " ''.format(L-i-1)],\n", + " name=unpool_name)\n", "\n", " # denoised latent \\hat{z}_L-i\n", " comb_name = 'dec_combinator_{}'.format(i+1)\n", - " dirty_net[comb_name] = CombinatorLayer(z_l, dirty_net['u_{}'.format(i+1)],\n", + " dirty_net[comb_name] = CombinatorLayer(z_l, dirty_net.values()[-1],\n", " combinator_type=combinator_type,\n", " name=comb_name)\n", "\n", @@ -442,9 +491,18 @@ " z_0 = dirty_net['inp_corr']\n", "\n", " # affine transformation\n", - " d_name = 'dec_dense_{}'.format(L+1)\n", - " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], nonlinearity=linear,\n", - " num_units=num_nodes[i+1], name=d_name)\n", + " d_name = 'dec_affine_{}'.format(L+1)\n", + " if convolution:\n", + " dirty_net[d_name] = deconv(dirty_net.values()[-1],\n", + " num_filters=num_nodes[i+1][0],\n", + " filter_size=num_nodes[i+1][1],\n", + " crop=num_nodes[i+1][2],\n", + " stride=num_nodes[i+1][3],\n", + " nonlinearity=linear,name=d_name+'_conv')\n", + " else:\n", + " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1],\n", + " nonlinearity=linear, name=d_name,\n", + " num_units=num_nodes[i+1])\n", "\n", " # batchnormalization ... u_L\n", " dirty_net['u_{}'.format(L+1)] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", @@ -452,7 +510,7 @@ "\n", " # denoised input reconstruction\n", " comb_name = 'dec_combinator_{}'.format(L+1)\n", - " dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_{}'.format(L+1)]],\n", + " dirty_net[comb_name] = CombinatorLayer(z_0, dirty_net['u_{}'.format(L+1)],\n", " name=comb_name,\n", " combinator_type=combinator_type)\n", "\n", @@ -460,26 +518,33 @@ "\n", "\n", "def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden,\n", - " activation=rectify, batch_size=None, inp_size=None,\n", - " combinator_type='MILAudem'):\n", + " input_shape, batch_size=None, activation=rectify,\n", + " combinator_type='MILAudem', convolution=False,\n", + " pooling=False):\n", " net = OrderedDict()\n", - " net['input'] = InputLayer((batch_size, inp_size), name='input')\n", + " net['input'] = InputLayer((batch_size, ) + tuple(input_shape), # inp_size),\n", + " name='input')\n", " # corrupted input\n", " net['inp_corr'] = GaussianNoiseLayer(net['input'], sigma=p_drop_input,\n", " name='input_corr')\n", "\n", " # dirty encoder\n", - " train_output_l, dirty_encoder = build_encoder(net, num_encoder, activation,\n", - " 'dirty', p_drop_hidden, None)\n", + " train_output_l, dirty_encoder = build_encoder(net, num_encoder,\n", + " activation, 'dirty',\n", + " p_drop_hidden,\n", + " convolution, pooling,\n", + " None)\n", "\n", " # clean encoder\n", " clean_net = OrderedDict(net.items()[:1])\n", - " eval_output_l, clean_net = build_encoder(clean_net, num_encoder, activation,\n", - " 'clean', 0., shared_net=dirty_encoder)\n", + " eval_output_l, clean_net = build_encoder(clean_net, num_encoder,\n", + " activation, 'clean', 0.,\n", + " convolution, pooling,\n", + " shared_net=dirty_encoder)\n", "\n", " # dirty decoder\n", " dirty_net = build_decoder(dirty_encoder, clean_net, num_decoder,\n", - " p_drop_hidden, combinator_type)\n", + " combinator_type, convolution, pooling)\n", "\n", " return [train_output_l, eval_output_l], dirty_net, clean_net\n", "\n", @@ -546,24 +611,23 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "# %load utils.py\n", + "import gzip\n", + "import cPickle as pickle\n", "import theano\n", "import theano.tensor as T\n", "import lasagne\n", "\n", - "import gzip\n", - "import cPickle as pickle\n", - "import sys\n", - "\n", "def pickle_load(f, encoding):\n", " return pickle.load(f)\n", "\n", + "\n", "def load_data():\n", " \"\"\"Get data with labels, split into training, validation and test set.\"\"\"\n", " with gzip.open('mnist.pkl.gz', 'rb') as f:\n", @@ -584,12 +648,22 @@ " num_examples_test=X_test.shape[0],\n", " input_dim=X_train.shape[1],\n", " output_dim=10,\n", - " )" + " )\n", + "\n", + "\n", + "def softmax(vec, axis=1):\n", + " \"\"\"\n", + " The ND implementation of softmax nonlinearity applied over a specified\n", + " axis, which is by default the second dimension.\n", + " \"\"\"\n", + " xdev = vec - vec.max(axis, keepdims=True)\n", + " rval = T.exp(xdev)/(T.exp(xdev).sum(axis, keepdims=True))\n", + " return rval" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 1, "metadata": { "collapsed": false }, @@ -598,16 +672,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "--2016-06-22 23:23:59-- http://deeplearning.net/data/mnist/mnist.pkl.gz\n", + "--2016-07-03 19:39:16-- http://deeplearning.net/data/mnist/mnist.pkl.gz\n", "Resolving deeplearning.net (deeplearning.net)... 132.204.26.28\n", "Connecting to deeplearning.net (deeplearning.net)|132.204.26.28|:80... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 16168813 (15M) [application/x-gzip]\n", - "Saving to: 'mnist.pkl.gz.2'\n", + "Saving to: ‘mnist.pkl.gz’\n", "\n", - "100%[======================================>] 16,168,813 4.80MB/s in 3.2s \n", + "100%[======================================>] 16.168.813 5,71MB/s in 2,7s \n", "\n", - "2016-06-22 23:24:08 (4.80 MB/s) - 'mnist.pkl.gz.2' saved [16168813/16168813]\n", + "2016-07-03 19:39:19 (5,71 MB/s) - ‘mnist.pkl.gz’ saved [16168813/16168813]\n", "\n" ] } @@ -618,97 +692,11 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Loading data...\n", - "Building model and compiling functions...\n", - "Starting training...\n", - "Epoch 1 took 22.822 s\n", - "Train cost 2.42561721802, val cost 1.87803590298, val acc 0.810000002384\n", - "Layer #0 rec cost: 0.424141585827\n", - "Layer #1 rec cost: 0.572997391224\n", - "Layer #2 rec cost: 0.489471495152\n", - "Epoch 2 took 22.866 s\n", - "Train cost 2.18242812157, val cost 1.8710668087, val acc 0.829999983311\n", - "Layer #0 rec cost: 0.424109280109\n", - "Layer #1 rec cost: 0.517513096333\n", - "Layer #2 rec cost: 0.471111416817\n", - "Epoch 3 took 22.901 s\n", - "Train cost 2.11269044876, val cost 1.88646125793, val acc 0.910000026226\n", - "Layer #0 rec cost: 0.424222230911\n", - "Layer #1 rec cost: 0.507005512714\n", - "Layer #2 rec cost: 0.466582417488\n", - "Epoch 4 took 22.896 s\n", - "Train cost 2.09363341331, val cost 1.88520228863, val acc 0.899999976158\n", - "Layer #0 rec cost: 0.424132347107\n", - "Layer #1 rec cost: 0.501805484295\n", - "Layer #2 rec cost: 0.465649992228\n", - "Epoch 5 took 22.898 s\n", - "Train cost 2.05789875984, val cost 1.98522758484, val acc 0.870000004768\n", - "Layer #0 rec cost: 0.424120694399\n", - "Layer #1 rec cost: 0.499489545822\n", - "Layer #2 rec cost: 0.465319514275\n", - "Epoch 6 took 22.880 s\n", - "Train cost 2.05530428886, val cost 1.94931507111, val acc 0.870000004768\n", - "Layer #0 rec cost: 0.424116581678\n", - "Layer #1 rec cost: 0.496793806553\n", - "Layer #2 rec cost: 0.466006070375\n", - "Epoch 7 took 22.849 s\n", - "Train cost 2.04769968987, val cost 1.97700107098, val acc 0.899999976158\n", - "Layer #0 rec cost: 0.424246698618\n", - "Layer #1 rec cost: 0.494832396507\n", - "Layer #2 rec cost: 0.465624034405\n", - "Epoch 8 took 22.739 s\n", - "Train cost 2.03270721436, val cost 1.76881194115, val acc 0.920000016689\n", - "Layer #0 rec cost: 0.424224495888\n", - "Layer #1 rec cost: 0.493043601513\n", - "Layer #2 rec cost: 0.464083582163\n", - "Epoch 9 took 22.830 s\n", - "Train cost 2.02665328979, val cost 1.9300942421, val acc 0.920000016689\n", - "Layer #0 rec cost: 0.424160569906\n", - "Layer #1 rec cost: 0.492138564587\n", - "Layer #2 rec cost: 0.465402901173\n", - "Epoch 10 took 22.917 s\n", - "Train cost 2.00475525856, val cost 2.03754162788, val acc 0.930000007153\n", - "Layer #0 rec cost: 0.424184262753\n", - "Layer #1 rec cost: 0.49082672596\n", - "Layer #2 rec cost: 0.464062064886\n", - "New LR: 0.10000000149\n", - "Epoch 11 took 23.146 s\n", - "Train cost 2.01219749451, val cost 2.06196784973, val acc 0.920000016689\n", - "Layer #0 rec cost: 0.424222916365\n", - "Layer #1 rec cost: 0.489564478397\n", - "Layer #2 rec cost: 0.464669078588\n", - "Epoch 12 took 23.039 s\n", - "Train cost 1.9962041378, val cost 1.93116974831, val acc 0.910000026226\n", - "Layer #0 rec cost: 0.424142956734\n", - "Layer #1 rec cost: 0.488685041666\n", - "Layer #2 rec cost: 0.464961528778\n", - "Epoch 13 took 22.913 s\n", - "Train cost 1.98841309547, val cost 1.86944377422, val acc 0.870000004768\n", - "Layer #0 rec cost: 0.424248784781\n", - "Layer #1 rec cost: 0.488236039877\n", - "Layer #2 rec cost: 0.465781867504\n", - "Epoch 14 took 23.036 s\n", - "Train cost 1.98689389229, val cost 1.96216869354, val acc 0.889999985695\n", - "Layer #0 rec cost: 0.424197643995\n", - "Layer #1 rec cost: 0.486736387014\n", - "Layer #2 rec cost: 0.465528964996\n", - "Epoch 15 took 23.210 s\n", - "Train cost 1.98063921928, val cost 1.94275975227, val acc 0.930000007153\n", - "Layer #0 rec cost: 0.424104750156\n", - "Layer #1 rec cost: 0.486671447754\n", - "Layer #2 rec cost: 0.465840518475\n" - ] - } - ], + "outputs": [], "source": [ "# %load train_ladder_nets.py\n", "from utils import load_data\n", @@ -717,52 +705,69 @@ "import theano.misc.pkl_utils\n", "import lasagne\n", "import cPickle\n", + "import numpy as np\n", "\n", "LEARNING_RATE = 0.1\n", "LR_DECREASE = 1.\n", "BATCH_SIZE = 100\n", + "INPUT_SHAPE = [1, 28, 28]\n", "NUM_EPOCHS = 15\n", "COMBINATOR_TYPE = 'milaUDEM'\n", "LAMBDAS = [1, 1, 1]\n", "DROPOUT = 0.3\n", - "# add extra cost to enforce zero mean and unity covariance mat for each layer\n", "EXTRA_COST = False # True\n", "ALPHAS = None, # [0.1]*3\n", - "BETAS = None, #[0.1]*3\n", - "# use only limited number of labeled data\n", - "NUM_LABELED = 10\n", - "# class entropy regularization using pseudolabels predicted for unlabeled data \n", + "BETAS = None, # [0.1]*3\n", + "NUM_LABELED = None\n", "PSEUDO_LABELS = None\n", + "CONV = True # False\n", + "POOL = True # False\n", "\n", "print \"Loading data...\"\n", "dataset = load_data()\n", "\n", - "# build network\n", - "num_encoder = [500, 10]\n", - "num_decoder = [500, 784]\n", + "# build model\n", + "if CONV:\n", + " input_shape = INPUT_SHAPE\n", + " if POOL:\n", + " num_encoder = [[40, 8, 0, 1, 2, 2], [10, 8, 0, 1, 2, 2]]\n", + " num_decoder = [[40, 8, 0, 1, 2, 2], [1, 8, 0, 1, 2, 2]]\n", + " else:\n", + " num_encoder = [[40, 15, 0, 1], [10, 14, 0, 1]]\n", + " num_decoder = [[40, 14, 0, 1], [1, 15, 0, 1]]\n", + "else:\n", + " input_shape = np.prod(INPUT_SHAPE)\n", + " num_encoder = [500, 10]\n", + " num_decoder = [500, input_shape]\n", "\n", "print \"Building model and compiling functions...\"\n", "[train_output_l, eval_output_l], dirty_net, clean_net = build_model(\n", - " num_encoder, num_decoder, DROPOUT, DROPOUT, batch_size=None, \n", - " inp_size=784, combinator_type=COMBINATOR_TYPE)\n", + " num_encoder, num_decoder, DROPOUT, DROPOUT, input_shape=input_shape,\n", + " combinator_type=COMBINATOR_TYPE, convolution=CONV, pooling=POOL)\n", + "\n", + "print map(lambda x: (x.name, x.output_shape), dirty_net.values())\n", "\n", "# set up input/output variables\n", - "X = T.fmatrix('X')\n", + "X = T.fmatrix('X') if not CONV else T.ftensor4('x')\n", "y = T.ivector('y')\n", "\n", "# training output\n", - "output_train = lasagne.layers.get_output(train_output_l, X, deterministic=False)\n", + "output_train = lasagne.layers.get_output(train_output_l, X,\n", + " deterministic=False).flatten(2)\n", "\n", "# evaluation output. Also includes output of transform for plotting\n", - "output_eval = lasagne.layers.get_output(eval_output_l, X, deterministic=True)\n", + "output_eval = lasagne.layers.get_output(eval_output_l, X,\n", + " deterministic=True).flatten(2)\n", "\n", "# set up (possibly amortizable) lr, cost and updates\n", "sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE))\n", "\n", "class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder,\n", - " dirty_net, clean_net, output_train, LAMBDAS,\n", - " use_extra_costs=EXTRA_COST, alphas=ALPHAS, betas=BETAS,\n", - " num_labeled=NUM_LABELED, pseudo_labels=PSEUDO_LABELS)\n", + " dirty_net, clean_net, output_train,\n", + " LAMBDAS, use_extra_costs=EXTRA_COST,\n", + " alphas=ALPHAS, betas=BETAS,\n", + " num_labeled=NUM_LABELED,\n", + " pseudo_labels=PSEUDO_LABELS)\n", "cost = class_cost + T.sum(rec_costs)\n", "net_params = lasagne.layers.get_all_params(train_output_l, trainable=True)\n", "updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr)\n", @@ -772,16 +777,20 @@ "batch_slice = slice(batch_index * BATCH_SIZE, (batch_index + 1) * BATCH_SIZE)\n", "\n", "pred = T.argmax(output_eval, axis=1)\n", - "accuracy = T.mean(T.eq(pred, y), dtype=theano.config.floatX)\n", + "accuracy = T.mean(T.eq(pred, y[:NUM_LABELED]), dtype=theano.config.floatX)\n", "\n", "train = theano.function([batch_index], [cost] + rec_costs,\n", " updates=updates, givens={\n", - " X: dataset['X_train'][batch_slice],\n", + " X: dataset['X_train'][batch_slice].reshape(\n", + " (-1, 1, 28, 28)\n", + " ),\n", " y: dataset['y_train'][batch_slice],\n", " })\n", "\n", "eval = theano.function([batch_index], [cost, accuracy], givens={\n", - " X: dataset['X_valid'][batch_slice],\n", + " X: dataset['X_valid'][batch_slice].reshape(\n", + " (-1, 1, 28, 28)\n", + " ),\n", " y: dataset['y_valid'][batch_slice],\n", " })\n", "\n", @@ -824,6 +833,7 @@ "\n", " return np.mean(eval_cost), np.mean(eval_acc)\n", "\n", + "\n", "num_batches_train = dataset['num_examples_train'] // BATCH_SIZE\n", "num_batches_valid = dataset['num_examples_valid'] // BATCH_SIZE\n", "\n", @@ -883,49 +893,17 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 12, "metadata": { "collapsed": false, "scrolled": true }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEACAYAAABI5zaHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd8VFX6+PHPEzqE3iItNEGqAQEhgAzwZQWVIk0pi7Ai\niij63aKIq7CyfnX1p7sqCIIUcQEXpChgAAUCIoIUA6F3aUsPJRAISc7vjzsphJSZZCZ3ZvK8X6/7\nyp2ZM+c+E8Jzz5x77jlijEEppVRgCbI7AKWUUp6nyV0ppQKQJnellApAmtyVUioAaXJXSqkApMld\nKaUCULbJXUSqicgaEdktItEiMjqDMh1E5LKIbHduf/VOuEoppVxR0IUyCcAfjTFRIhIMbBORVcaY\nfenKrTfG9PB8iEoppdyVbcvdGHPGGBPl3I8F9gJVMygqHo5NKaVUDrnV5y4iNYEwYHMGL7cRkSgR\nWS4iDT0Qm1JKqRxypVsGAGeXzNfAS84WfFrbgBrGmBsi0g1YAtTzXJhKKaXcIa7MLSMiBYFlQIQx\n5iMXyh8FHjDGXEr3vE5ko5RSOWCMcavr29VumRnAnswSu4hUTrPfCuukcSmjssYYv93GjRtnewwa\nv/1x5Mf4/Tn2QIg/J7LtlhGRtsAgIFpEfgUMMBYItXK1mQr0FZGRwG0gDngiR9EopZTyiGyTuzHm\nJ6BANmUmAZM8FZRSSqnc0TtU3eBwOOwOIVc0fnv5c/z+HDv4f/w54dIFVY8dTMTk5fGUUioQiAjG\nzQuqLg+FVEqp3KpZsya//fab3WH4rNDQUI4dO+aRurTlrpTKM84WqN1h+KzMfj85ablrn7tSSgUg\nTe5KKRWANLkrpVQA0uSulFIeMnLkSN5++227wwD0gqpSKg/5+gXVWrVqMX36dDp16mTL8fWCqlJK\n5bHExES7Q3CLJnellAKGDBnC8ePHeeyxxyhVqhTvv/8+QUFBzJgxg9DQUDp37gxA//79ueeeeyhb\ntiwOh4M9e/ak1DFs2DDefPNNANatW0f16tX58MMPqVy5MlWrVmXWrFl59nk0uSulFDB79mxq1KjB\n8uXLuXr1Kv379wdg/fr17Nu3j5UrVwLwyCOPcPjwYc6dO0fz5s0ZNGhQpnWeOXOGa9eucfr0aT7/\n/HNGjRrFlStX8uTzaHJXSvkMEc9suZG2z1tE+Nvf/kaxYsUoUqQIAEOHDqV48eIUKlSIN998kx07\ndnDt2rUM6ypcuDBvvPEGBQoUoFu3bgQHB7N///7cBegiTe5KKZ9hjGc2T6pWrVrKflJSEmPGjKFu\n3bqUKVOGWrVqISJcuHAhw/eWL1+eoKDUNFu8eHFiY9MvZOcdmtyVUspJMmj2p31u7ty5LF26lDVr\n1nD58mWOHTuWqwU1vEmTu1JKOYWEhHDkyBGADJP2tWvXKFKkCGXLluX69eu89tprGZ4QfIEmd6WU\nchozZgwTJkygXLlyLFy48K7EPWTIEGrUqEHVqlVp3Lgx4eHhbtWflyeCPL+J6dAhQ506eXZIpZQP\n8fWbmOzm1zcxzZuX10dUSqn8J9vkLiLVRGSNiOwWkWgRGZ1F2ZYicltEemdWZs4cz1/NVkopdSdX\nWu4JwB+NMY2ANsAoEbkvfSERCQLeBVZmVdnNm7BjR05CVUop5apsk7sx5owxJsq5HwvsBapmUPRF\n4GvgXFb1DRxotd6VUkp5j1t97iJSEwgDNqd7vgrQyxgzGciy03/QIKvfPSnJvUCVUkq5zuUFskUk\nGKtl/pKzBZ/Wv4BX0xbPrJ7588eTkAB/+AMMHerA4XC4E69SSgW8yMhIIiMjc1WHS0MhRaQgsAyI\nMMZ8lMHrR5J3gQrAdWCEMebbdOWMMYb33oPDh+Gzz3IVu1LKz+hQyKx5ciikq8l9NnDBGPNHF8rO\nBJYaYxZl8JoxxnD8ODRvDqdPQ+HC7oSrlPJnmtyzlqfj3EWkLTAI6CQiv4rIdhHpKiLPisiIDN6S\n7b9cjRrQqBGsWOFOqEop5XuS521P1rhxY9avX+9SWW/Kts/dGPMTUMDVCo0xf3Cl3MCBMHcu9Ojh\nas1KKeWb0k4rsGvXLpfLepNtc8v07QsREZDJNMhKKaVywbbkXr48PPQQLFliVwRKKZXqvffeo1+/\nfnc89/LLL/Pyyy8za9YsGjZsSKlSpahbty5Tp07NtJ5atWqxZs0aAG7evMnQoUMpV64cjRs3ZsuW\nLV79DGnZOivkoEFW14xSStntySefJCIiguvXrwPWwhzz589n4MCBVK5cOWX5vZkzZ/K///u/REVF\nZVvn+PHjOXr0KEePHmXlypV88cUX3v4YKVwe5+4N3bvDc8/BuXNQqZKdkSilfIH8zTP90Wac+yNy\natSoQfPmzVm8eDGDBw9m9erVlChRglatWt1Rrn379vzud7/jxx9/JCwsLMs6FyxYwJQpUyhdujSl\nS5dm9OjRTJgwwe3YcsLW5F6iBDz2GCxYAKNG2RmJUsoX5CQpe9KAAQOYN28egwcPZt68eQwcOBCA\niIgI3nrrLQ4cOEBSUhJxcXE0bdo02/pOnz59xzJ9oaGhXos9PdsX60geNaOUUnbr168fkZGRnDp1\nisWLFzNo0CDi4+Pp27cvr7zyCufPnycmJoZu3bq5NF7/nnvu4cSJEymPf/vtN2+Gfwfbk3uXLnDw\nIBw9anckSqn8rkKFCnTo0IFhw4ZRu3Zt6tWrR3x8PPHx8VSoUIGgoCAiIiJYtWqVS/X179+fd955\nh8uXL3Py5EkmTpzo5U+QyvbkXqgQ9Ouni3gopXzDwIEDWb16NYMGDQIgODiYjz/+mH79+lGuXDm+\n+uorevbsmen7045jHzduHDVq1KBWrVp07dqVIUOGeD3+lDjyepm9jI73008wYgTs2gU+utasUsoD\ndPqBrPn1MnsZadMGrl+H6Gi7I1FKqcDgE8k9KAgGDNBFPJRSylN8olsGrC6ZRx6BY8esZK+UCjza\nLZO1gOuWAWjcGMqUsfrflVJK5Y7PJHfQMe9KKeUpPtMtA/Dbb9CiBZw6pYt4KBWItFsma57slrF1\n+oH0QkPhvvtg1SprWgKlVGAJDQ3Ns/nM/ZEnpyfwqZY7wOTJ8OOP2j2jlFLJvLaGqqe4ktwvXIC6\ndeHkSQgOzqPAlFLKh/n1aJlkFSpA27bwzTd2R6KUUv7LlQWyq4nIGhHZLSLRIjI6gzI9RGSHcwHt\nrSLSKTdB6SIeSimVO9l2y4hICBBijIkSkWBgG9DTGLMvTZnixpgbzv0mwGJjTN0M6sq2WwYgNhaq\nVbNmi6xY0b0PpJRSgcYr3TLGmDPGmCjnfiywF6iarsyNNA+DgQvuBJFecLB1t+rXX+emFqWUyr/c\n6nMXkZpAGLA5g9d6iche4Dvgrq4bdw0cqHPNKKVUTrk8zt3ZJfM18JKzBX8HY8wSYImItAO+BOpn\nVM/48eNT9h0OBw6HI8PjPfwwDBtmzTVTs6arUSqllP+LjIwkMjIyV3W4NBRSRAoCy4AIY8xHLpQ/\nDLQyxlxM97xLfe7JRo60bmwaM8bltyilVMDx5lDIGcCezBK7iNRJs98cIH1izwmda0YppXIm224Z\nEWkLDAKiReRXwABjgVDAGGOmAn1EZAgQD1wHnvBEcG3bwuXL1iIeTZp4okallMoffO4O1fRefdWa\n3/2dd7wUlFJK+biAmH4gvZ07oUcPOHJEF/FQSuVPATH9QHpNmljj3n/+2e5IlFLKf/h8chfRC6tK\nKeUun++WATh6FB580FrEo1AhLwSmlFI+LCC7ZQBq1YJ774Xvv7c7EqWU8g9+kdxBu2aUUsodftEt\nA3DuHNSrZ3XNlCjh4cCUUsqHBWy3DEClStCmDXz7rd2RKKWU7/Ob5A66iIdSSrnKb7plAK5dg+rV\n4fBhKF/eg4EppZQPC+huGYCSJaFrV13EQymlsuNXyR10EQ+llHKFX3XLAMTHQ5UqsH071KjhocCU\nUsqHBXy3DEDhwtCnD3z1ld2RKKWU7/K75A56Q5NSSmXHL5N7+/Zw8SLs3m13JEop5Zv8MrkHBcGA\nAdp6V0qpzPjdBdVkUVHw+OPWIh7i1mUGpZTyL/nigmqy+++HYsVg0ya7I1FKKd+TbXIXkWoiskZE\ndotItIiMzqDMQBHZ4dw2iIjXl7PWRTyUUipz2XbLiEgIEGKMiRKRYGAb0NMYsy9NmdbAXmPMFRHp\nCow3xrTOoC6PdcuANQ1BeDicPKmLeCilApdXumWMMWeMMVHO/VhgL1A1XZlNxpgrzoeb0r/uLXXq\nQO3asHp1XhxNKaX8h1t97iJSEwgDNmdRbDgQkfOQ3KNdM0opdbeCrhZ0dsl8DbzkbMFnVKYjMAxo\nl1k948ePT9l3OBw4HA5XQ8hQ//7w5ptw4wYUL56rqpRSyidERkYSGRmZqzpcGgopIgWBZUCEMeaj\nTMo0BRYCXY0xhzMp49E+92QPPwx/+AM88YTHq1ZKKdt5cyjkDGBPFom9BlZi/31mid2bdBEPpZS6\nkyujZdoC64FowDi3sUAoYIwxU0VkGtAb+A0Q4LYxplUGdXml5X71qrWIx9GjUK6cx6tXSilb5aTl\n7rd3qKbXvz907gzPPuuV6pVSyjY5Se4uX1D1daNHW9MRVKhgTQmslFL5WcC03AG2bbMS/LBhMG6c\nNcGYUkr5u3zdLZPszBmr5V6pEsyeba27qpRS/ixfTRyWmZAQWLPG6p5p08aaokAppfKbgEvuAEWK\nwNSpMHKkNffMDz/YHZFSSuWtgOuWSS8yEp58EsaMgZde0rnflVL+R/vcM3HsGPTsCc2bw+TJULRo\nnoeglFI5pn3umahZEzZuhNhYcDjg9Gm7I1JKKe/KF8kdoEQJmD8fHnsMWrWCzVnNa6mUUn4uX3TL\npPfNNzB8OHzwAQwZYnc0SimVNe1zd8Pu3dCrF3TvDu+9BwUD5l5dpVSg0eTupkuXrJE0AF99pZOO\nKaV8k15QdVO5cvDdd9CkidUPv3u33REppZRn5OvkDlZ3zAcfWKs5ORxWf7xSSvm7fN0tk94vv0Dv\n3ta0wX/9q97wpJTyDdrn7gGnT1sJvnp1mDkTgoPtjkgpld9pn7sHVKliTVkQHAxt21qrOymllL/R\n5J6BokVhxgxr0e02bWDtWrsjUkop92hyz4SINdHYv/9tDZecNAl8vEdJKaVSuLJAdjVgNlAZSAKm\nGWM+TlemPjATaA6MNcZ8mEldPt/nnpHDh62Jx0qXhvbtrdZ8mzbWgiBKKeVtXrmgKiIhQIgxJkpE\ngoFtQE9jzL40ZSoAoUAvICbQkjtAXBysXw8//2xtmzdD+fKpib5NG2jaVO90VUp5Xp6MlhGRJcAn\nxpjVGbw2DrgWiMk9vaQk2LcvNdn//DMcPw4PPACtW2vrXinlOV5P7iJSE4gEGhtjYjN4Pd8k94xc\nvmy16LV1r5TypJwkd5fTjLNL5mvgpYwSu6vGjx+fsu9wOHA4HDmtyueUKQMPP2xtcHfr/tNPU1v3\naRN+xYr2xq2U8i2RkZFERkbmqg6XWu4iUhBYBkQYYz7Koly+brm7IqPWffJi3t27w+OPQ6FCdkep\nlPIlXuuWEZHZwAVjzB+zKTcOiDXGfJDJ6/k+uaeX3Lr/6SeYMwcOHIARI+CZZ6BqVbujU0r5Am+N\nlmkLrAeiAePcxmKNjjHGmKkiUhnYCpTEGi4ZCzRM332jyT17u3db3Tfz5kHnzjBqFHTooPPcKJWf\n6dwyAeTqVfjySyvRAzz/PPz+91CqlL1xKaXynib3AGQMrFtn3SG7ejUMGGAl+kaN7I5MKZVXdOKw\nACRizTO/YAFER1sja7p0SX3u9m27I1RK+SJtufuh27dh8WKry+bgQevi64gR1oyWSqnAoy33fKJQ\nIejf35qaeOVKOHcOGje2nlu3Tic4U0ppyz1gJF+AnTQJgoJSL8CWLGl3ZEqp3NILqgpjrBb9pEmw\nZo1egFU5Z4xhx9kdrDi0gtj4WCZ0nIDomFxbeHX6AeUfRKBjR2s7dQqmTrUuwNavD61aQZEi1mIk\nyVtOHhcqpOPuA9Xlm5f5/vD3RByKYMWhFZQoXIJudbvx4/EfqVmmJsObD7c7ROUibbnnA/HxsHQp\nHDoEt27BzZupW04eJyRkfAIoVw4qV7ZmwqxcOXVL+7hUKT0x+BJjDFFnoog4FEHEoQh2nNlBuxrt\n6Fa3G93u7UbdcnUBiD4bTafZnfj12V+pVqqazVHnP9oto/JEYmJq0k/+GRcHly7B2bPWdu5c6n7a\nx/Hxdyb7zPYrV7ZOFgUK2P1pA09MXAzfH0ltnZcsXDIlmXcI7UCxQsUyfN/fIv/GL6d/YdmAZdo9\nk8c0uSufFxeXdfJPu3/lijVlcnKyr1/fGhXUpIn1U+/WdU2SSeLX//6a0jqPPhtN+9D2VkKv2406\n5eq4VE98YjwtprbglbavMLjpYC9HrdLS5K4Cyu3bcP68lez/+19rgrXoaGvbs8e6oSs50TdpYm31\n60PhwnZHbr9LcZdYdXgVEYciWHloJWWKlqFb3W50rduVDjU7ULRg0RzVu/X0Vh6d+yg7n9tJ5eDK\nHo5aZUaTu8o3EhPh6NHUZJ+8/fYb1KmTmuyTk39oqDVENFAlmSS2/3c7EQet1vmuc7voULNDSkKv\nXba2x4415ocxHI45zIJ+CzxWp8qaJneV7928CXv3pib7Xbusn1evWsNB07f0K1SwO+Lc23N+D12+\n7EKpIqVSulrah7bPces8O3G342j2WTP+3unv9G3Y1yvHUHfS5K5UJi5dSk30yT+jo6FYsdSEHxJi\nXcTNaCte3DdH+Vy+eZlW01oxtv1YhoYNzbPj/nT8J/ou6MuukbsoX7x8nh03v9LkrpQbjIETJ6xk\nv2eP1b9/8aJ1Iki/JSbemezLl8/8RJB2K1nSeyeFJJNEj3k9qF22Nh93+9g7B8nCSxEvEXMzhtmP\nz87zY+c3mtyV8pK4OIiJSU32mZ0E0m83b0LZstZonzZtUm8wCwnJfUzj1o5j7bG1rB6ymkIF8n5t\nxuvx12kyuQmfdPuER+s9mufHz080uSvlY27dsk4Kp0/Djz/C2rWwfr2V3Dt1shK9w2F9E3DHkn1L\nGB0xmi3PbLF11MrqI6sZ+s1Qdo3cRemipW2LI9BpclfKDyQmQlSUlejXrLHWz61Vy0r0nTrBQw9B\n6Szy5N7ze+kwqwPLBy6nZdWWeRd4Jp5d+iwGw9TuU+0OJWBpclfKD92+Ddu2WYl+7VrYtAnuuy+1\nZd+uHQQHW2Wv3LzCg58/yKttX2VYs2H2Bu505eYVGk9uzKyes+hcu7Pd4QQkby2QXQ2YDVTGWvx6\nmjHmrqs3IvIx0A24Dgw1xkRlUEaTu1LZuHULNm9Obdlv2wb33w+OjklEVn6cxqHV+KzHJI8d78YN\n60ax5DuDk/fTPr550xpKGhZmbU2apJ5wAJYfWM6LES+yc+ROggsHZ34wlSPeSu4hQIgxJkpEgoFt\nQE9jzL40ZboBLxhjHhWRB4GPjDGtM6hLk7tSbrpxAzZuhLfWvUXUtVUkzlhDqwcKp3TjtGp15125\niYmp8/ykTdSZJe/ExNS5fZK39I8LFYLdu63upKgoa3RR1aqpyT4sDGbEDKFKubJ83O0j+35ZASpP\numVEZAnwiTFmdZrnpgBrjTH/cT7eCziMMWfTvVeTu1I5sHT/Up7/7nm2PLOFEiaEDRtSu3EOHLDG\n6V+7ZiXsS5egTJnME3X6x8HB7g/XTEiA/fthx47UhL997yViBjSm+aEFOOq0JSzM+sZRv751clA5\n5/XkLiI1gUigsTEmNs3zS4F3jDEbnY9/AF4xxmxP935N7kq5af+F/bSf2Z5vB3xL62p3fSEmJsa6\nISs5oVeoAAVtWKnBGJi+cRHjN7zGiMQodu8oRlSUdS9Bw4akJPuwMGjaNOuLxupOXl2sw9kl8zXw\nUtrE7q7x48en7DscDhwOR06rUirgXb11lV7/6cX/df6/DBM7WOPoH3oojwPLgAgMb9ublafncaPs\n3/jP2HcBuH7dOvkkt/DnzLEeh4SkJvuGDa3HISHWN4ucfJsIJJGRkURGRuaqDpda7iJSEFgGRBhj\n7upQy6BbZh/QQbtllMq5JJNEn/l9qFyiMlMem2J3OC47G3uWplOasmzAskyHaiYmWovHJCf8fftS\np3w+c8b6FlC5cmqyT94yepwfTgRe65YRkdnABWPMHzN5/RFglPOCamvgX3pBVanceXv92yw/uJy1\nT62lSMEidofjljk75/CPn/7B1hFbKVzA/TmYY2PvTPZp5/5P+zjtiSCrk0HZsla/f6FCVpdV2p9p\n9331JOGt0TJtgfVANGCc21ggFDDGmKnOchOBrlhDIYel7293ltHkbpMzsWcoIAWoWKKi3aEoFyw/\nsJxnlz3Llme2cE/Je+wOx23GGHp81YMW97RgnGOcV4/lyong8mXrfoLbt62LwZn9DArK/gSQ0c8S\nJe6cc6h8+bv3y5fP+TKTehOTytCRmCO0ndGWmwk36V6vO6NajqJV1Va6VJqPOnjxIG1ntGXJk0sI\nrx5udzg5durqKcI+C2P1kNU0rdzU7nCyZYzVXZTdCSCjk8T166nzDV28mPl+XJz1LSKrk0BG+yVK\naHJX6cTExRA+I5xRLUcxoPEAZkbNZPLWyZQtWpYXWr3AE42eyHTNTJX3rt26RuvprRndajTPtnjW\n7nBy7fPtnzNl6xQ2Dd9EwSAbhvD4mPh4a3RTVieAjB7fuqXJXaURnxhPtzndaFKpCf/q+q+U55NM\nEisOrWDiLxPZcnoLw8KGMbLFSGqVrWVjtHeLux3HqsOrWLh3IasOr2LpgKU+MZeKtxhj6LegH2WL\nlmVq96kB8c3KGEOXL7vQpXYXXm33qt3h+CVjIChIk7tyMsbw9LdPc+HGBRY/sZgCQQUyLHf40mEm\nb53MrKhZtK7WmlEtR/Fw3YcJEnvWpLt26xrfHfyOhXsXsvLwSprf05w+Dfpw6uopTsee5oteX9gS\nV154d8O7LNm3hHVD1/ndBdSsHI05SstpLfnpDz9Rv0J9u8PxS9rnrlK88+M7LNizgPXD1rs018eN\n2zeYFz2PSVsmcS3+GiNbjGRY2DDKFivr9Vhj4mL4dv+3LNy7kMhjkbSt0ZY+DfrQo34PKpWoBMDF\nGxep83EdDo8+HJAr/6w4tIKnv32aX4b/QtVSVe0Ox+Mm/jKRebvmsX7o+kwbGipzmtwVAPN3z+fP\nq/7Mz0//7HaiMMaw6eQmJm6ZyHcHv6Nvg76MajWKsJAwj8Z4NvYsS/YtYdG+Rfx84mc61+5MnwZ9\neKzeY5QpWibD9zy15CmaVGrCn8P/7NFY7Hb40mHCZ4SzsP9C2tVoZ3c4XpFkkugwqwP9GvZj9IOj\n7Q7H72hyV/x84md6fNWD73//fa4T8tnYs9YFsW1TqFG6BqNajqJvw745GrcMcOLKCRbtXcTCvQvZ\neXYn3e7tRu/7etPt3m4ufbvYdHITgxcN5sCLB2zrNvK02PhY2kxvw8gWI3m+5fN2h+NVBy4eIHx6\nOL888wu1y9a2OxyPmfjLRObvnk/raq0Jrx5Om2ptPL6Aiib3fC55yOPn3T/36LJnCUkJfLv/WyZt\nmcSe83sY3mw4z7Z4lmqlqmX73kOXDrFwz0IW7VvEoUuH6F6vO30a9KFLnS4ULVjUrTiMMTSf2px3\nO7/Lw3UfzunH8RnGGJ5c+CQlCpVgeo/pAXEBNTvv//Q+Kw6v4Iff/xAQn3fGrzN4a91bfNT1I3ae\n3cnGkxvZdHIT5YuVJ7x6eMrWqGKjXHVHaXLPx9IOeXyh1QteO87e83v5dMunzImeQ8daHXmh5Qs4\najpS/qMaY9h9fndKQj8be5Ze9/WiT4M+OGo6cr3W57Rt01h+cDlLnlziiY9jq/d/ej/luoi7Jzp/\nlZCUQPj0cJ5p/gzPPPCM3eHkyqK9i3jhuxeIHBpJvfL1Up5PMknsPb+Xn0/+zMYTG9l4YiP/jf0v\nD1Z9MCXZP1j1QbeWJdTknk8lD3lsXLExH+XRXNrXbl3jy51fMmmLtWjEiOYjOHv9LAv3LiTudhy9\nG/SmT4M+hFcP9+gFtOvx16n+z+pEPRdFjdI1PFZvXvv+8Pc8teQpNg/fTPXS1e0OJ0/tOreLjl90\nZPuI7X772dccXcOTXz/JisEraH5P82zLX7hxgU0nN6Uk+62nt1K7bO07Wvd1ytbJ9NuMJvd8yBjD\n8G+Hc/7G+SyHPHrz+Ot+W8f0X6dTtWRVejfoTcsqLb36lXt0xGhKFynNhE4TvHYMbzoSc4Q209uw\noN8CHgr1gekcbfDWurfYfGozywYs87vumS2ntvDo3EdZ0G8BHWp2yFEdtxNvs+PsjpRkv/HERm4m\n3Lwj2T9wzwMpNxhqcs+H3B3yGAj2nN9D59md+e3l33J8cdcu1+OvEz4jnOHNhvPigy/aHY5t4hPj\naTmtJX8J/wuDmw62OxyX7T2/l45fdGRq96n0qN/Do3WfuHLijq6c3ed306RSE8Krh/PPrv/U5J6f\n5GbIo79zzHLwfMvn6d+ov92huMwYw6BFgyhUoBCzes7yuxarp207vY1H5j7Cjud2EBIcYnc42Tp+\n5TjtZrTj753+zpD7h3j9eDdu32Dr6a1sPLGR19q/psk9v/DkkEd/NH/3fCZvnczap9baHYrLPvz5\nQ+ZGz+XHYT/qfD5OY1eP5cDFA3zd/2u7Q8nS+evnaT+zPc+1eI6XW7+c58fPSbdMYAwWzmeOxByh\n9/zezOo5K18mdoBe9/Vi34V97Dm/x+5QXLL6yGre3/g+i55YpIk9jTc7vMnu87tZsHuB3aFk6uqt\nq3Sd05W+DfvakthzSpO7n7l88zKPzn2U19u/7tGx7P6mcIHCDG82nClbfX+Fopi4GAYvHszc3nP9\neoSPNxQtWJSZPWfy/HfPM+mXSfjaN/ubCTfp+VVPWlVpxYSO/nUBX7tl/IgdQx592YkrJwj7LIzj\nLx+nROESdoeTqTE/jOFS3CWmdp9qdyg+68DFAwxcOJAqJaswo+cMKhSvYHdIJCQl0Hd+X4oWLMqc\n3nNsnROuV7LrAAAQQ0lEQVRHu2UCmDGGkctGUrxQcT58+EO7w/EJ1UtXp12NdsyNnmt3KJk6dfUU\n07ZP480Ob9odik+rV74eG5/eyH0V7iNsShg/HPnB1niSTBLPLH2Gmwk3mf34bL+c7EyTu5/4x0//\n4NczvzKvzzy//EPzludbPM+nWz/1ua/zySasn8DTzZ52aaqG/K5wgcK81+U9ZvWaxVNLnuKV718h\nPjE+z+MwxvCXVX9h/4X9LOy/0O+G2ybT5O4H5u+ez6Qtk1g6YGm+Gcvuqi51unDt1jU2n9psdyh3\nOXjxIF/v+Zox7cbYHYpf+Z/a/0PUs1Hsu7CP8OnhHLh4IE+P/+6Gd1l1ZBXLBi7z6e6+7GSb3EVk\nuoicFZGdmbxeRkQWicgOEdkkIg09H2b+tenkJkZ9N4qlA5bmu7HsrgiSIJ5r8RyTt062O5S7vLH2\nDf7Y5o+UK1bO7lD8TsUSFfnmyW8YFjaMtjPaMvPXmXny7eyzrZ8xbfs0Vg5e6f//bsaYLDegHRAG\n7Mzk9feAN5z79YEfsqjLKNcduXTEhPy/ELN0/1K7Q/FpF65fMGXeLWMuXL9gdygptp3eZu75f/eY\n2Fuxdofi96LPRpvGnzY2Tyx4wsTExXjtOPN3zTdVPqhiDl486LVj5JQzd2abr9Nu2bbcjTEbgJgs\nijQE1jjL7gdqikjFHJ9tFGANeXxk7iOMbTeWx+o9Znc4Pq188fJ0r9edmVEz7Q4lxdjVY/nrQ3/1\n66/1vqJxpcb8MvwXKhSvQNiUMDYc3+DxY6w6vIoXIl7gu4HfUbdcXY/XbwdP9LnvAHoDiEgroAag\nV49y4XbibfrO78vvav8uX88/4o7nWz7PlK1TSDJJdofC2qNrOXjpIMObD7c7lIBRrFAxJj4ykU+6\nfULf+X0ZHzmehKQEj9S96eQmBi0axML+C7k/5H6P1OkLCnqgjneBj0RkOxAN/AokZlZ4/PjxKfsO\nhwOHw+GBEAKHMYaRy0dSrFAxHfLohgerPkjJIiX5/vD3ti7kYYzhtdWvMaHjBL8dZeHLutfvzvYq\n23lqyVM4ZjmY03sOoWVCc1zf7nO76fVVL77o9YVPLXEYGRlJZGRk7ipxpe8GCCWTPvcMyh4FgjN5\nzXudUgHinR/fMWFTwsy1W9fsDsXvTN061fSc19PWGBbvXWyaTm5qEpMSbY0j0CUmJZr3NrxnKr5X\n0XwV/VWO6jgac9RU+7CambNzjoej8zxy0OfuanKvCURn8lppoJBz/xlgVhb1ePt34Nfm75pvqn1Y\nzZy8ctLuUPxS7K1YU+4f5czxy8dtOX5CYoJpMLGBWX5guS3Hz4+2nNpi7v34XjNsyTC3GkRnrp0x\ndT+uaz7Z/IkXo/OcnCT3bKcfEJG5gAMoD5wFxgGFnQebKiKtgS+AJGA38LQx5komdZnsjucrjDGc\niT3DzrM7re3cTs5fP0/BoIIUDCpIgaACKfsFgwpSQLJ+nN174hLimLB+Qr6d5dFTXvzuRcoULWPL\nQh6zomYx49cZrBu6Lt9P55uXYuNjGR0xmg3HNzC3z1xaVGmRZfkrN6/g+MJBr/q9GOcYl0dR5o4u\n1pFDcbfj2HN+zx2JfOfZnRhjuD/kfppWakrTyk0JCQ4h0SSSkJRAQlICiUmp+wlJCXe8ltHrWZYx\nCQxuMpjOtTvb/evwa8kLeRx/+Xiu12t1x82Em9SfWJ95feYRXj08z46rUv1n1394MeJF/hL+F/4U\n/ieC5O7xIjdu36Drv7sSFhLGR10/8puTsF8k95WHVlKxeEUqlqhIxeIVKVKwSJ4d3xjD8SvH70ri\nxy4fo175ejSt3DQlkScnc3/5x1epHLMcjGo5in6N+uXZMf+16V+sPbaWb578Js+Oqe527PIxBi8a\nTLFCxZjdazb3lLwn5bXbibfpPb83pYuUZvbjszNM/r7KL5J75y86c/7Gec5fP8+FGxcoWrAoFUtU\npELxCnck/eT99M8HFw52KeFeu3WNXed23ZXIgwsH35XE61eoryMbAkheL+Rx9dZV7v3kXlYPWU3j\nSo3z5JgqcwlJCby9/m0mb53MtO7T6F6/O0kmiaeWPEVMXAyLn1icp9/qPMEvknva4xljuHrrakqy\nT5v0z984n+HzCUkJmZ4MbiXeIvpcNDvP7uRM7BkaVmx4RxJvUrmJT0wlqrwrPjGe0H+FsmbIGhpU\nbOD1441bO45jV47xRa8vvH4s5boNxzcweNFgHqv3GIIQdTaKlYNXUrxQcbtDc5vfJfeciLsdd0fS\nv3DjQsp+ASmQksjrlqursyfmY39d81eu3rrKx90+9upxzl0/R4NJDdg2Yhs1y9T06rGU+y7fvMxz\ny57j4KWDrB6ymjJFy9gdUo7ki+SulCuOXzlOs8+aeX0hj5ciXgLQxVN8nDHGr6+f6WIdSjnVKF2D\ndjXaMW/XPK8d49jlY/w7+t+8/tDrXjuG8gx/Tuw5pcldBayRLUby6RbvLeQxLnIcL7R8gUolKnml\nfqVyQ5O7Cli/q/M7rty6wi+nfvF43bvO7WLFoRX8KfxPHq9bKU/Q5K4CVpAE8dwDz/Hp1k89Xvfr\na15nTNsxlCpSyuN1K+UJmtxVQBvWbBjf7v+WizcueqzOjSc2EnUmipEtR3qsTqU8TZO7CmgVileg\ne73uzIqa5ZH6jDGM+WEM4zuMp2jBoh6pUylv0OSuAt7IFiOZvHWyRxbyWHFoBRduXOD39//eA5Ep\n5T2a3FXAa12tNSWLlOSHIz/kqp4kk8Rrq1/j7U5vUzDIE+vcKOU9mtxVwBORlGGRufGfXf+hSMEi\n9Lqvl4ciU8p7NLmrfGFgk4H8ePxHTlw5kaP3xyfG88baN3i387v58oYY5X80uat8IbhwMAMbD2Ta\n9mk5ev/07dOpU64OHWt19HBkSnmHzi2j8o3d53bzP1/+j9sLeVyPv869n9zL0gFLeaDKA16MUKmM\n6dwySmWhUaVG1C9fnyX7lrj1vo83f0z70Paa2JVfyTa5i8h0ETkrIjszeb28iESISJSIRIvIUI9H\nqZSHjGwx0q07Vi/FXeLDTR8yoWPer8mqVG640nKfCTycxesvAFHGmDCgI/CBiOg4MeWTHm/wOPsu\n7GPv+b0ulf/Hhn/Q+77e1Ctfz8uRKeVZ2SZ3Y8wGICaLImeAks79ksBFY0yCB2JTyuMKFyjM082e\nZsrWKdmWPXn1JJ//+jlvdngzDyJTyrM80ec+DWgkIqeBHcBLHqhTKa8Z8cAI/h39b67HX8+y3Fvr\n3mJ4s+FULVU1jyJTynM8kdxfA3YYY6oAzYBJIhLsgXqV8gpXFvLYf2E/i/Yu4tV2r+ZhZEp5jif6\nxtsCbwMYYw6LyFHgPmBrRoXHjx+fsu9wOHA4HB4IQSn3jGwxkrGrx/J0s6czvCnpjbVv8Kc2f6Jc\nsXI2RKfyu8jISCIjI3NVh0vj3EWkJrDUGNMkg9c+AK4aY/4mIpWxkvr9xphLGZTVce7KJySZJO79\n5F7m9p7Lg9UevOO1rae30mNeDw6+eNCr668q5SqvjHMXkbnARqCeiBwXkWEi8qyIjHAWeQdoISI7\ngO+BVzJK7Er5kuSFPCZvnXzXa2NXj+XNDm9qYld+Te9QVfnWhRsXqPtxXQ6PPkz54uUBWH1kNc8u\ne5a9o/a6dRerUt6kd6gq5YYKxSvQo36PlIU8jDG8tvo1/t7p75rYld/T5K7ytZEtRjJl2xSSTBKL\n9y3mdtJt+jfqb3dYSuWa3kmq8rXW1VoTXDiYlYdW8vqa1/nnw/8kSLTNo/yf/hWrfC15IY+nljxF\n5RKVebhOVjNtKOU/NLmrfG9gk4EUCCrAO53f0YU4VMDQ0TJKAbcTb+tFVOWzdLSMUjmkiV0FGk3u\nSikVgDS5K6VUANLkrpRSAUiTu1JKBSBN7kopFYA0uSulVADS5K6UUgFIk7tSSgUgTe5KKRWANLkr\npVQA0uSulFIBSJO7UkoFIFcWyJ4uImdFZGcmr/9ZRH4Vke0iEi0iCSJSxvOhKqWUcpUrLfeZQKYr\nGBhj/p8xppkxpjnwGhBpjLnsqQB9SWRkpN0h5IrGby9/jt+fYwf/jz8nsk3uxpgNQIyL9Q0A5uUq\nIh/m738gGr+9/Dl+f44d/D/+nPBYn7uIFAO6Ags9VadSSqmc8eQF1e7AhkDtklFKKX/i0jJ7IhIK\nLDXGNM2izCJgvjHmqyzK6Bp7SimVA+4us1fQxXLi3DJ+UaQ00AEYlFUl7ganlFIqZ7JN7iIyF3AA\n5UXkODAOKAwYY8xUZ7FewEpjTJy3AlVKKeU6l7pllFJK+Zc8u0NVRLqKyD4ROSAir+bVcT1BRKqJ\nyBoR2e28UWu03TG5S0SCnDeafWt3LO4SkdIiskBE9jr/DR60OyZ3iMhrzrh3isgcESlsd0xZyejG\nRREpKyKrRGS/iKx0dsX6pEzif8/59xMlIgtFpJSdMWYlqxtHReRPIpIkIuWyqydPkruIBAETsW6G\nagQMEJH78uLYHpIA/NEY0whoA4zys/gBXgL22B1EDn0EfGeMaQDcD+y1OR6XOQcjPAM0cw5IKAg8\naW9U2croxsUxwA/GmPrAGqwbFn1VRvGvAhoZY8KAg/hf/IhINaAL8JsrleRVy70VcNAY85sx5jbw\nFdAzj46da8aYM8aYKOd+LFZyqWpvVK5z/lE8Anxudyzucraw2htjZgIYYxKMMVdtDssdV4F4oISI\nFASKA6ftDSlrmdy42BP4wrn/BdZ1Np+UUfzGmB+MMUnOh5uAankemIuyuHH0n8BfXK0nr5J7VeBE\nmscn8aPkmJaI1ATCgM32RuKW5D8Kf7zAUgu4ICIznd1KU503zPkFY0wM8AFwHDgFXDbG/GBvVDlS\nyRhzFqzGDlDJ5nhy4w9AhN1BuENEegAnjDHRrr5HZ4V0g4gEA18DLzlb8D5PRB4Fzjq/eWQ5pNVH\nFQSaA5Oc8xfdwOoi8AsiUhv4XyAUqAIEi8hAe6PyCH9sKCAirwO3jTFz7Y7FVc7GzFiskYopT2f3\nvrxK7qeAGmkeV3M+5zecX6m/Br40xnxjdzxuaAv0EJEjWPP+dBSR2TbH5I6TWC2Wrc7HX2Mle3/R\nAvjJGHPJGJMILALCbY4pJ86KSGUAEQkBztkcj9tEZChW96S/nVzrADWBHSJyFCt/bhORLL895VVy\n3wLUFZFQ50iBJwF/G7UxA9hjjPnI7kDcYYwZa4ypYYypjfV7X2OMGWJ3XK5ydgWcEJF6zqc6418X\nhvcDrUWkqIgIVvz+cEE4/be8b4Ghzv2nAF9v4NwRv4h0xeqa7GGMuWVbVK5Lid8Ys8sYE2KMqW2M\nqYXV4GlmjMnyBJsnyd3ZYnkB64r1buArY4w//IEDICJtse6+7ZRm7vqudseVj4wG5ohIFNZomf+z\nOR6XGWN2ALOBbcAOrP+wU7N8k82cNy5uBOqJyHERGQa8C3QRkf1YJ6h37YwxK5nE/wkQDHzv/P/7\nqa1BZiGT+NMyuNAtozcxKaVUANILqkopFYA0uSulVADS5K6UUgFIk7tSSgUgTe5KKRWANLkrpVQA\n0uSulFIBSJO7UkoFoP8P2H7H0OSmu1EAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "plt.plot(zip(train_costs, valid_costs))\n", "plt.legend(['train', 'valid'])" ] }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "TODOs:\n", - " 1. modify for semi-supervised learning (using pseudolabels)\n", - " 2. include additional costs to enforce unit variance-covariance matrix and zero mean" - ] - }, { "cell_type": "markdown", "metadata": {}, diff --git a/examples/ladder_nets/ladder_nets.py b/examples/ladder_nets/ladder_nets.py index e3c7e34..9ae7c48 100644 --- a/examples/ladder_nets/ladder_nets.py +++ b/examples/ladder_nets/ladder_nets.py @@ -1,254 +1,72 @@ -from lasagne.layers import InputLayer, MergeLayer, DenseLayer, DropoutLayer, \ - GaussianNoiseLayer, NonlinearityLayer, standardize, BiasLayer, ScaleLayer, \ - ExpressionLayer +from lasagne.layers import InputLayer, DenseLayer +from lasagne.layers import Conv2DLayer as conv +from lasagne.layers import Deconv2DLayer as deconv +from lasagne.layers import MaxPool2DLayer as pool +from lasagne.layers.special import InverseLayer as unpool +from lasagne.layers.special import BiasLayer, ScaleLayer, NonlinearityLayer +from lasagne.layers.noise import GaussianNoiseLayer from lasagne.layers.normalization import BatchNormLayer -from lasagne.nonlinearities import * +from lasagne.nonlinearities import rectify, linear import lasagne -# from ladder_net_layers import CombinatorLayer, SharedNormLayer +from ladder_net_layers import CombinatorLayer, SharedNormLayer +from utils import softmax -import theano import theano.tensor as T -import numpy as np from collections import OrderedDict -def _create_milaUDEM_params(shape, name): - values = np.zeros((6,) + shape, dtype=theano.config.floatX) - - b_lin = theano.shared(values[0], - name='bias_lin_{}'.format(name)) - b_sigm = theano.shared(values[1], - name='bias_sigm_{}'.format(name)) - - w_u_lin = theano.shared(values[2], - name='weight_u_lin_{}'.format(name)) - w_u_sigm = theano.shared(values[3], - name='weight_u_sigm_{}'.format(name)) - w_zu_lin = theano.shared(values[4], - name='weight_zu_lin_{}'.format(name)) - w_zu_sigm = theano.shared(values[5], - name='weight_zu_sigm_{}'.format(name)) - - values = np.ones((3,) + shape, dtype=theano.config.floatX) - w_z_lin = theano.shared(values[0], - name='weight_z_lin_{}'.format(name)) - w_z_sigm = theano.shared(values[1], - name='weight_z_sigm_{}'.format(name)) - w_sigm = theano.shared(values[2], - name='weight_sigm_{}'.format(name)) - - # combinator params used in combinator calculations - return [w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm, - w_zu_sigm, w_sigm, b_lin, b_sigm] - - -def _create_curiousAI_params(shape, name): - values = np.zeros((8,) + shape, dtype=theano.config.floatX) - - b_mu_sig = theano.shared(values[0], - name='b_mu_sig_{}'.format(name)) - b_mu_lin = theano.shared(values[1], - name='b_mu_lin_{}'.format(name)) - b_v_sig = theano.shared(values[2], - name='b_v_sig_{}'.format(name)) - b_v_lin = theano.shared(values[3], - name='b_v_lin_{}'.format(name)) - - w_mu_lin = theano.shared(values[4], - name='w_mu_lin_{}'.format(name)) - w_v_lin = theano.shared(values[5], - name='w_v_lin_{}'.format(name)) - w_mu = theano.shared(values[6], - name='w_mu_{}'.format(name)) - w_v = theano.shared(values[7], - name='w_v_{}'.format(name)) - - values = np.ones((2,) + shape, dtype=theano.config.floatX) - w_mu_sig = theano.shared(values[0], - name='w_mu_sig_{}'.format(name)) - w_v_sig = theano.shared(values[1], - name='w_v_sig_{}'.format(name)) - - # combinator params used in combinator calculations - return [w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v, - b_mu_lin, b_v_lin, b_mu_sig, b_v_sig] - - -def _create_combinator_params(combinator_type, shape, name): - if combinator_type == 'milaUDEM': - return _create_milaUDEM_params(shape, name) - elif combinator_type == 'curiousAI': - return _create_curiousAI_params(shape, name) - - -def _combinator_MILAudem(z, u, combinator_params, bc_pttrn): - w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm, w_zu_sigm, w_sigm, \ - b_lin, b_sigm = combinator_params - - lin_out = w_z_lin.dimshuffle(*bc_pttrn) * z + \ - w_u_lin.dimshuffle(*bc_pttrn) * u + \ - w_zu_lin.dimshuffle(*bc_pttrn) * z * u + \ - b_lin.dimshuffle(*bc_pttrn) - - sigm_pre = w_z_sigm.dimshuffle(*bc_pttrn) * z + \ - w_u_sigm.dimshuffle(*bc_pttrn) * u + \ - w_zu_sigm.dimshuffle(*bc_pttrn) * z * u + \ - b_sigm.dimshuffle(*bc_pttrn) - - sigm_out = T.nnet.sigmoid(sigm_pre) - - output = w_sigm.dimshuffle(*bc_pttrn) * sigm_out + lin_out - - return output - - -def _combinator_curiousAI(z, u, combinator_params, bc_pttrn): - w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v, \ - b_mu_lin, b_v_lin, b_mu_sig, b_v_sig = combinator_params - - mu_sig_pre = w_mu_sig.dimshuffle(*bc_pttrn) * u + \ - b_mu_sig.dimshuffle(*bc_pttrn) - - mu_lin_out = w_mu_lin.dimshuffle(*bc_pttrn) * u + \ - b_mu_lin.dimshuffle(*bc_pttrn) - - mu_u = w_mu.dimshuffle(*bc_pttrn) * T.nnet.sigmoid(mu_sig_pre) + \ - mu_lin_out - - v_sig_pre = w_v_sig.dimshuffle(*bc_pttrn) * u + \ - b_v_sig.dimshuffle(*bc_pttrn) - - v_lin_out = w_v_lin.dimshuffle(*bc_pttrn) * u + \ - b_v_lin.dimshuffle(*bc_pttrn) - - v_u = w_v * T.nnet.sigmoid(v_sig_pre) + v_lin_out - - output = (z - mu_u) * v_u + mu_u - - return output - - -def _combinator(z, u, combinator_type, combinator_params): - if u.ndim == 2: - bc_pttrn = ('x', 0) - elif u.ndim == 4: - bc_pttrn = ('x', 0, 'x', 'x') - - if combinator_type == 'milaUDEM': - return _combinator_MILAudem(z, u, combinator_params, bc_pttrn) - elif combinator_type == 'curiousAI': - return _combinator_curiousAI(z, u, combinator_params, bc_pttrn) - - -class CombinatorLayer(MergeLayer): - """ - A layer that combines the terms from dirty and clean encoders, - and outputs denoised variable: - $$ \hat{z} = g(\tilde{z}, u)$$ - """ - def __init__(self, incoming_z, incoming_u, combinator_type, **kwargs): - super(CombinatorLayer, self).__init__( - [incoming_z, incoming_u], **kwargs) - self.combinator_type = combinator_type - z_shp, u_shp = self.input_shapes - - if len(z_shp) != len(u_shp): - raise ValueError("The inputs must have the same shape: " - "(batch_size, num_hidden) in case of dense layer " - "or \n (batch_size, num_feature_maps, height, " - "width) in case of conv layer.") - - self.combinator_params = _create_combinator_params(combinator_type, - u_shp[1:], - self.name) - - def get_output_shape_for(self, input_shapes): - return input_shapes[0] - - def get_output_for(self, inputs, **kwargs): - z, u = inputs - assert z.ndim == u.ndim - return _combinator(z, u, self.combinator_type, self.combinator_params) - - -class SharedNormLayer(MergeLayer): - """ - A layer that combines the terms from dirty and clean encoders, - and outputs denoised variable: - $$ \hat{z} = g(\tilde{z}, u)$$ - """ - def __init__(self, incoming2stats, incoming2norm, axes='auto', epsilon=1e-4, - **kwargs): - super(SharedNormLayer, self).__init__( - [incoming2stats, incoming2norm], **kwargs) - stats_shp, norm_shp = self.input_shapes - - if len(stats_shp) != len(norm_shp): - raise ValueError("The inputs must have the same shape: " - "(batch_size, num_hidden) in case of dense layer " - "or \n (batch_size, num_feature_maps, height, " - "width) in case of conv layer.") - - if axes == 'auto': - # default: normalize over all but the second axis - axes = (0,) + tuple(range(2, len(stats_shp))) - elif isinstance(axes, int): - axes = (axes,) - self.axes = axes - self.epsilon = epsilon - - def get_output_shape_for(self, input_shapes): - return input_shapes[0] - - def get_output_for(self, inputs, **kwargs): - to_stats, to_norm = inputs - assert to_stats.ndim == to_norm.ndim - - mean = to_stats.mean(self.axes) - inv_std = T.inv(T.sqrt(to_stats.var(self.axes) + self.epsilon)) - - return (to_norm - mean) * inv_std - - -def build_encoder(net, num_hidden, activation, name, p_drop_hidden, shared_net): +def build_encoder(net, num_hidden, activation, name, p_drop_hidden, + convolution, pooling, shared_net): for i, num_nodes in enumerate(num_hidden): - dense_lname = 'enc_dense_{}'.format(i) + affine_lname = 'enc_affine_{}'.format(i) nbatchn_lname = 'enc_batchn_{}_norm'.format(i) noise_lname = 'enc_noise_{}'.format(i) lbatchn_lname = 'enc_batchn_{}_learn'.format(i) if shared_net is None: - # dense pars + # affine pars W = lasagne.init.GlorotUniform() # batchnorm pars beta = lasagne.init.Constant(0) gamma = None if activation == rectify else lasagne.init.Constant(1) else: - # dense pars - W = shared_net[dense_lname].get_params()[0] + # affine weights + W = shared_net[affine_lname].get_params()[0] # batchnorm pars beta = shared_net[lbatchn_lname + '_beta'].get_params()[0] gamma = None if activation==rectify else \ shared_net[lbatchn_lname + '_gamma'].get_params()[0] # affine transformation: $W \hat{h}$ - net[dense_lname] = DenseLayer(net.values()[-1], num_units=num_nodes, - W=W, nonlinearity=linear, - name='{}_{}'.format(name, dense_lname)) + if convolution: + net[affine_lname] = conv(net.values()[-1], + num_filters=num_nodes[0], + filter_size=num_nodes[1], + pad=num_nodes[2], stride=num_nodes[3], + W=W, nonlinearity=linear, + name='{}_{}_{}'.format(name, + affine_lname, 'conv')) + else: + net[affine_lname] = DenseLayer(net.values()[-1], + num_units=num_nodes, + W=W, nonlinearity=linear, + name='{}_{}_{}'.format(name, + affine_lname, 'affine')) # 1. batchnormalize without learning -> goes to combinator layer l_name = '{}_{}'.format(name, nbatchn_lname) - net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1, name=l_name, - beta=None, gamma=None) + net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1, + name=l_name, beta=None, + gamma=None) if shared_net is None: # for dirty encoder -> add noise net[noise_lname] = GaussianNoiseLayer(net.values()[-1], sigma=p_drop_hidden, - name='{}_{}_'.format(name, - noise_lname)) + name='{}_{}'.format( + name, noise_lname)) # 2. scaling & offsetting batchnormalization + noise l_name = '{}_{}'.format(name, lbatchn_lname) @@ -260,34 +78,51 @@ def build_encoder(net, num_hidden, activation, name, p_drop_hidden, shared_net): # if not rectify, scale by gamma net[lbatchn_lname + '_gamma'] = ScaleLayer(net.values()[-1], scales=gamma, - name=l_name + '_gamma') + name=l_name+'_gamma') + + if pooling: + pool_name = 'enc_pool_{}'.format(i) + net[pool_name] = pool(net.values()[-1], pool_size=num_nodes[4], + stride=num_nodes[5], + name='{}_{}'.format(name, pool_name)) + # apply activation if i < len(num_hidden) - 1: act_name = 'enc_activation_{}'.format(i) net[act_name] = NonlinearityLayer(net.values()[-1], nonlinearity=activation, - name='{}_{}'.format(name, act_name)) + name='{}_{}'.format( + name, act_name)) # classfication layer activation -> softmax - net['enc_softmax'] = NonlinearityLayer(net.values()[-1], nonlinearity=softmax, - name='{}_enc_softmax'.format(name)) + net['enc_softmax'] = NonlinearityLayer(net.values()[-1], + nonlinearity=softmax, + name='{}_enc_softmax'.format( + name)) return net['enc_softmax'], net -def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type): +def build_decoder(dirty_net, clean_net, num_nodes, combinator_type, + convolution, pooling): L = len(num_nodes) - 1 - # dirty_enc_dense_1 ... z_L + # dirty_enc_affine_1 ... z_L z_L = dirty_net['enc_noise_{}'.format(L)] # batchnormalized softmax output .. u_0 without learning bn beta, gamma dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None, gamma=None, name='dec_batchn_softmax') + if pooling: + unpool_name = 'dec_unpool_{}'.format(L) + dirty_net[unpool_name] = unpool(dirty_net.values()[-1], + dirty_net['enc_pool_{}'.format(L)], + name=unpool_name) + # denoised latent \hat{z}_L = g(\tilde{z}_L, u_L) comb_name = 'dec_combinator_0' - dirty_net[comb_name] = CombinatorLayer(z_L, dirty_net['u_0'], + dirty_net[comb_name] = CombinatorLayer(z_L, dirty_net.values()[-1], combinator_type=combinator_type, name=comb_name) @@ -300,25 +135,42 @@ def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type): dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l) for i in range(L): - # dirty_enc_dense_L-i ... z_l + # dirty_enc_affine_L-i ... z_l z_l = dirty_net['enc_noise_{}'.format(i)] # affine transformation - d_name = 'dec_dense_{}'.format(L-i) - dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], - num_units=num_nodes[i], - nonlinearity=linear, name=d_name) - - # batchnormalization ... u_l - dirty_net['u_{}'.format(i+1)] = BatchNormLayer(dirty_net.values()[-1], - alpha=1., - beta=None,gamma=None, - name='dec_batchn_dense_' - '{}'.format(L-i)) + d_name = 'dec_affine_{}'.format(L-i) + if convolution: + dirty_net[d_name] = deconv(dirty_net.values()[-1], + num_filters=num_nodes[i][0], + filter_size=num_nodes[i][1], + crop=num_nodes[i][2], + stride=num_nodes[i][3], + nonlinearity=linear, name=d_name + + '_conv') + else: + dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], + num_units=num_nodes[i], + nonlinearity=linear, + name=d_name+'_affine') + + # batchnormalization ... u_l\ + ul_name = 'u_{}'.format(i+1) + dirty_net[ul_name] = BatchNormLayer(dirty_net.values()[-1], alpha=1., + beta=None, gamma=None, + name='dec_batchn_affine_' + '{}'.format(L-i)) + + if pooling: + unpool_name = 'dec_unpool_{}'.format(L-i-1) + dirty_net[unpool_name] = unpool(dirty_net.values()[-1], + dirty_net['enc_pool_{}' + ''.format(L-i-1)], + name=unpool_name) # denoised latent \hat{z}_L-i comb_name = 'dec_combinator_{}'.format(i+1) - dirty_net[comb_name] = CombinatorLayer(z_l, dirty_net['u_{}'.format(i+1)], + dirty_net[comb_name] = CombinatorLayer(z_l, dirty_net.values()[-1], combinator_type=combinator_type, name=comb_name) @@ -334,9 +186,18 @@ def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type): z_0 = dirty_net['inp_corr'] # affine transformation - d_name = 'dec_dense_{}'.format(L+1) - dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], nonlinearity=linear, - num_units=num_nodes[i+1], name=d_name) + d_name = 'dec_affine_{}'.format(L+1) + if convolution: + dirty_net[d_name] = deconv(dirty_net.values()[-1], + num_filters=num_nodes[i+1][0], + filter_size=num_nodes[i+1][1], + crop=num_nodes[i+1][2], + stride=num_nodes[i+1][3], + nonlinearity=linear,name=d_name+'_conv') + else: + dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], + nonlinearity=linear, name=d_name, + num_units=num_nodes[i+1]) # batchnormalization ... u_L dirty_net['u_{}'.format(L+1)] = BatchNormLayer(dirty_net.values()[-1], alpha=1., @@ -344,7 +205,7 @@ def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type): # denoised input reconstruction comb_name = 'dec_combinator_{}'.format(L+1) - dirty_net[comb_name] = CombinatorLayer(*[z_0, dirty_net['u_{}'.format(L+1)]], + dirty_net[comb_name] = CombinatorLayer(z_0, dirty_net['u_{}'.format(L+1)], name=comb_name, combinator_type=combinator_type) @@ -352,26 +213,33 @@ def build_decoder(dirty_net, clean_net, num_nodes, sigma, combinator_type): def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden, - activation=rectify, batch_size=None, inp_size=None, - combinator_type='MILAudem'): + input_shape, batch_size=None, activation=rectify, + combinator_type='MILAudem', convolution=False, + pooling=False): net = OrderedDict() - net['input'] = InputLayer((batch_size, inp_size), name='input') + net['input'] = InputLayer((batch_size, ) + tuple(input_shape), # inp_size), + name='input') # corrupted input net['inp_corr'] = GaussianNoiseLayer(net['input'], sigma=p_drop_input, name='input_corr') # dirty encoder - train_output_l, dirty_encoder = build_encoder(net, num_encoder, activation, - 'dirty', p_drop_hidden, None) + train_output_l, dirty_encoder = build_encoder(net, num_encoder, + activation, 'dirty', + p_drop_hidden, + convolution, pooling, + None) # clean encoder clean_net = OrderedDict(net.items()[:1]) - eval_output_l, clean_net = build_encoder(clean_net, num_encoder, activation, - 'clean', 0., shared_net=dirty_encoder) + eval_output_l, clean_net = build_encoder(clean_net, num_encoder, + activation, 'clean', 0., + convolution, pooling, + shared_net=dirty_encoder) # dirty decoder dirty_net = build_decoder(dirty_encoder, clean_net, num_decoder, - p_drop_hidden, combinator_type) + combinator_type, convolution, pooling) return [train_output_l, eval_output_l], dirty_net, clean_net diff --git a/examples/ladder_nets/train_ladder_nets.py b/examples/ladder_nets/train_ladder_nets.py index caf8b82..988d5fa 100644 --- a/examples/ladder_nets/train_ladder_nets.py +++ b/examples/ladder_nets/train_ladder_nets.py @@ -4,50 +4,69 @@ import theano.misc.pkl_utils import lasagne import cPickle +import numpy as np LEARNING_RATE = 0.1 LR_DECREASE = 1. BATCH_SIZE = 100 +INPUT_SHAPE = [1, 28, 28] NUM_EPOCHS = 15 COMBINATOR_TYPE = 'milaUDEM' LAMBDAS = [1, 1, 1] DROPOUT = 0.3 EXTRA_COST = False # True ALPHAS = None, # [0.1]*3 -BETAS = None, #[0.1]*3 +BETAS = None, # [0.1]*3 NUM_LABELED = None PSEUDO_LABELS = None +CONV = True # False +POOL = True # False print "Loading data..." dataset = load_data() -# build network -num_encoder = [500, 10] -num_decoder = [500, 784] +# build model +if CONV: + input_shape = INPUT_SHAPE + if POOL: + num_encoder = [[40, 8, 0, 1, 2, 2], [10, 8, 0, 1, 2, 2]] + num_decoder = [[40, 8, 0, 1, 2, 2], [1, 8, 0, 1, 2, 2]] + else: + num_encoder = [[40, 15, 0, 1], [10, 14, 0, 1]] + num_decoder = [[40, 14, 0, 1], [1, 15, 0, 1]] +else: + input_shape = np.prod(INPUT_SHAPE) + num_encoder = [500, 10] + num_decoder = [500, input_shape] print "Building model and compiling functions..." [train_output_l, eval_output_l], dirty_net, clean_net = build_model( - num_encoder, num_decoder, DROPOUT, DROPOUT, batch_size=None, - inp_size=784, combinator_type=COMBINATOR_TYPE) + num_encoder, num_decoder, DROPOUT, DROPOUT, input_shape=input_shape, + combinator_type=COMBINATOR_TYPE, convolution=CONV, pooling=POOL) + +print map(lambda x: (x.name, x.output_shape), dirty_net.values()) # set up input/output variables -X = T.fmatrix('X') +X = T.fmatrix('X') if not CONV else T.ftensor4('x') y = T.ivector('y') # training output -output_train = lasagne.layers.get_output(train_output_l, X, deterministic=False) +output_train = lasagne.layers.get_output(train_output_l, X, + deterministic=False).flatten(2) # evaluation output. Also includes output of transform for plotting -output_eval = lasagne.layers.get_output(eval_output_l, X, deterministic=True) +output_eval = lasagne.layers.get_output(eval_output_l, X, + deterministic=True).flatten(2) # set up (possibly amortizable) lr, cost and updates sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE)) -class_cost, rec_costs, \ -z_cleans, z_dirties = build_cost(X, lasagne.utils.one_hot(y), num_decoder, - dirty_net, clean_net, output_train, LAMBDAS, - use_extra_costs=EXTRA_COST, alphas=ALPHAS, betas=BETAS, - num_labeled=NUM_LABELED, pseudo_labels=PSEUDO_LABELS) +class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder, + dirty_net, clean_net, output_train, + LAMBDAS, use_extra_costs=EXTRA_COST, + alphas=ALPHAS, betas=BETAS, + num_labeled=NUM_LABELED, + pseudo_labels=PSEUDO_LABELS) cost = class_cost + T.sum(rec_costs) net_params = lasagne.layers.get_all_params(train_output_l, trainable=True) updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr) @@ -61,12 +80,16 @@ train = theano.function([batch_index], [cost] + rec_costs, updates=updates, givens={ - X: dataset['X_train'][batch_slice], + X: dataset['X_train'][batch_slice].reshape( + (-1, 1, 28, 28) + ), y: dataset['y_train'][batch_slice], }) eval = theano.function([batch_index], [cost, accuracy], givens={ - X: dataset['X_valid'][batch_slice], + X: dataset['X_valid'][batch_slice].reshape( + (-1, 1, 28, 28) + ), y: dataset['y_valid'][batch_slice], }) @@ -109,6 +132,7 @@ def eval_epoch(): return np.mean(eval_cost), np.mean(eval_acc) + num_batches_train = dataset['num_examples_train'] // BATCH_SIZE num_batches_valid = dataset['num_examples_valid'] // BATCH_SIZE diff --git a/examples/ladder_nets/utils.py b/examples/ladder_nets/utils.py index 5145a1a..ab49253 100644 --- a/examples/ladder_nets/utils.py +++ b/examples/ladder_nets/utils.py @@ -1,15 +1,13 @@ -# %load utils.py +import gzip +import cPickle as pickle import theano import theano.tensor as T import lasagne -import gzip -import cPickle as pickle -import sys - def pickle_load(f, encoding): return pickle.load(f) + def load_data(): """Get data with labels, split into training, validation and test set.""" with gzip.open('mnist.pkl.gz', 'rb') as f: @@ -30,4 +28,14 @@ def load_data(): num_examples_test=X_test.shape[0], input_dim=X_train.shape[1], output_dim=10, - ) \ No newline at end of file + ) + + +def softmax(vec, axis=1): + """ + The ND implementation of softmax nonlinearity applied over a specified + axis, which is by default the second dimension. + """ + xdev = vec - vec.max(axis, keepdims=True) + rval = T.exp(xdev)/(T.exp(xdev).sum(axis, keepdims=True)) + return rval \ No newline at end of file From a9760aaf2014df452b6209cdef422683ef6a98df Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Wed, 6 Jul 2016 13:03:50 +0200 Subject: [PATCH 08/10] fixed hardoded reshape in theano.function --- examples/ladder_nets/train_ladder_nets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/ladder_nets/train_ladder_nets.py b/examples/ladder_nets/train_ladder_nets.py index 988d5fa..934c821 100644 --- a/examples/ladder_nets/train_ladder_nets.py +++ b/examples/ladder_nets/train_ladder_nets.py @@ -81,14 +81,14 @@ train = theano.function([batch_index], [cost] + rec_costs, updates=updates, givens={ X: dataset['X_train'][batch_slice].reshape( - (-1, 1, 28, 28) + (-1,) + tuple(input_shape) ), y: dataset['y_train'][batch_slice], }) eval = theano.function([batch_index], [cost, accuracy], givens={ X: dataset['X_valid'][batch_slice].reshape( - (-1, 1, 28, 28) + (-1,) + tuple(input_shape) ), y: dataset['y_valid'][batch_slice], }) From 0d15e2a1d063d74874eeffd83393cbe85fc76a86 Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Wed, 6 Jul 2016 13:10:03 +0200 Subject: [PATCH 09/10] same as before but in ipnb --- examples/ladder_nets/LadderNets.ipynb | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb index fc7ccc8..6420aa2 100644 --- a/examples/ladder_nets/LadderNets.ipynb +++ b/examples/ladder_nets/LadderNets.ipynb @@ -694,7 +694,8 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": false + "collapsed": false, + "scrolled": true }, "outputs": [], "source": [ @@ -782,14 +783,14 @@ "train = theano.function([batch_index], [cost] + rec_costs,\n", " updates=updates, givens={\n", " X: dataset['X_train'][batch_slice].reshape(\n", - " (-1, 1, 28, 28)\n", + " (-1,) + tuple(input_shape)\n", " ),\n", " y: dataset['y_train'][batch_slice],\n", " })\n", "\n", "eval = theano.function([batch_index], [cost, accuracy], givens={\n", " X: dataset['X_valid'][batch_slice].reshape(\n", - " (-1, 1, 28, 28)\n", + " (-1,) + tuple(input_shape)\n", " ),\n", " y: dataset['y_valid'][batch_slice],\n", " })\n", From bb3a6133685fa1ef093d78dcdb88c286f30263e3 Mon Sep 17 00:00:00 2001 From: AdrianLsk Date: Sun, 4 Sep 2016 20:04:01 +0200 Subject: [PATCH 10/10] refactored code, corrected pooling --- examples/ladder_nets/LadderNets.ipynb | 691 +++++++++++++--------- examples/ladder_nets/ladder_net_layers.py | 186 ++++++ examples/ladder_nets/ladder_nets.py | 449 +++++++------- examples/ladder_nets/train_ladder_nets.py | 109 ++-- examples/ladder_nets/utils.py | 55 +- 5 files changed, 931 insertions(+), 559 deletions(-) create mode 100644 examples/ladder_nets/ladder_net_layers.py diff --git a/examples/ladder_nets/LadderNets.ipynb b/examples/ladder_nets/LadderNets.ipynb index 6420aa2..8cc3830 100644 --- a/examples/ladder_nets/LadderNets.ipynb +++ b/examples/ladder_nets/LadderNets.ipynb @@ -98,11 +98,19 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { - "collapsed": true + "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using gpu device 0: GeForce GTX 970M (CNMeM is disabled, cuDNN 5005)\n" + ] + } + ], "source": [ "# %load ladder_net_layers.py\n", "from lasagne.layers import MergeLayer\n", @@ -112,7 +120,6 @@ "\n", "import numpy as np\n", "\n", - "\n", "def _create_milaUDEM_params(shape, name):\n", " values = np.zeros((6,) + shape, dtype=theano.config.floatX)\n", "\n", @@ -215,7 +222,6 @@ " bc_pttrn = ('x', 0)\n", " elif u.ndim == 4:\n", " bc_pttrn = ('x', 0, 1, 2)\n", - " # bc_pttrn = ('x', 0, 'x', 'x')\n", "\n", " if combinator_type == 'milaUDEM':\n", " return _combinator_milaUDEM(z, u, combinator_params, bc_pttrn)\n", @@ -235,11 +241,11 @@ " self.combinator_type = combinator_type\n", " z_shp, u_shp = self.input_shapes\n", "\n", - " if len(z_shp) != len(u_shp):\n", - " raise ValueError(\"The inputs must have the same shape: \"\n", - " \"(batch_size, num_hidden) in case of dense layer \"\n", - " \"or \\n (batch_size, num_feature_maps, height, \"\n", - " \"width) in case of conv layer.\")\n", + " if z_shp != u_shp:\n", + " raise ValueError(\"Mismatch: input shapes must be the same. \"\n", + " \"Got dirty z ({0}) of shape {1} and clean u ({\"\n", + " \"2}) of shape {3}\".format(incoming_z.name, z_shp,\n", + " incoming_u.name, u_shp))\n", "\n", " self.combinator_params = _create_combinator_params(combinator_type,\n", " u_shp[1:],\n", @@ -266,11 +272,12 @@ " [incoming2stats, incoming2norm], **kwargs)\n", " stats_shp, norm_shp = self.input_shapes\n", "\n", - " if len(stats_shp) != len(norm_shp):\n", - " raise ValueError(\"The inputs must have the same shape: \"\n", - " \"(batch_size, num_hidden) in case of dense layer \"\n", - " \"or \\n (batch_size, num_feature_maps, height, \"\n", - " \"width) in case of conv layer.\")\n", + " if stats_shp != norm_shp:\n", + " raise ValueError(\"Mismatch: input shapes must be the same. \"\n", + " \"Got dirty z ({0}) of shape {1} and clean u ({\"\n", + " \"2}) of shape {3}\"\n", + " .format(incoming2stats.name, stats_shp,\n", + " incoming2norm.name, norm_shp))\n", "\n", " if axes == 'auto':\n", " # default: normalize over all but the second axis\n", @@ -296,7 +303,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -310,243 +317,275 @@ "from lasagne.layers.special import InverseLayer as unpool\n", "from lasagne.layers.special import BiasLayer, ScaleLayer, NonlinearityLayer\n", "from lasagne.layers.noise import GaussianNoiseLayer\n", - "from lasagne.layers.normalization import BatchNormLayer\n", + "from lasagne.layers.normalization import BatchNormLayer, batch_norm\n", "from lasagne.nonlinearities import rectify, linear\n", "import lasagne\n", "\n", "from ladder_net_layers import CombinatorLayer, SharedNormLayer\n", - "from utils import softmax\n", + "from utils import softmax, unzip\n", "\n", "import theano.tensor as T\n", - "\n", "from collections import OrderedDict\n", "\n", - "\n", - "def build_encoder(net, num_hidden, activation, name, p_drop_hidden,\n", - " convolution, pooling, shared_net):\n", - " for i, num_nodes in enumerate(num_hidden):\n", - " affine_lname = 'enc_affine_{}'.format(i)\n", - " nbatchn_lname = 'enc_batchn_{}_norm'.format(i)\n", - " noise_lname = 'enc_noise_{}'.format(i)\n", - " lbatchn_lname = 'enc_batchn_{}_learn'.format(i)\n", - "\n", - " if shared_net is None:\n", - " # affine pars\n", - " W = lasagne.init.GlorotUniform()\n", - " # batchnorm pars\n", - " beta = lasagne.init.Constant(0)\n", - " gamma = None if activation == rectify else lasagne.init.Constant(1)\n", - " else:\n", - " # affine weights\n", - " W = shared_net[affine_lname].get_params()[0]\n", - " # batchnorm pars\n", - " beta = shared_net[lbatchn_lname + '_beta'].get_params()[0]\n", - " gamma = None if activation==rectify else \\\n", - " shared_net[lbatchn_lname + '_gamma'].get_params()[0]\n", - "\n", - " # affine transformation: $W \\hat{h}$\n", - " if convolution:\n", - " net[affine_lname] = conv(net.values()[-1],\n", - " num_filters=num_nodes[0],\n", - " filter_size=num_nodes[1],\n", - " pad=num_nodes[2], stride=num_nodes[3],\n", - " W=W, nonlinearity=linear,\n", - " name='{}_{}_{}'.format(name,\n", - " affine_lname, 'conv'))\n", - " else:\n", - " net[affine_lname] = DenseLayer(net.values()[-1],\n", - " num_units=num_nodes,\n", - " W=W, nonlinearity=linear,\n", - " name='{}_{}_{}'.format(name,\n", - " affine_lname, 'affine'))\n", - "\n", - " # 1. batchnormalize without learning -> goes to combinator layer\n", - " l_name = '{}_{}'.format(name, nbatchn_lname)\n", - " net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1,\n", - " name=l_name, beta=None,\n", - " gamma=None)\n", - "\n", - " if shared_net is None:\n", - " # for dirty encoder -> add noise\n", - " net[noise_lname] = GaussianNoiseLayer(net.values()[-1],\n", - " sigma=p_drop_hidden,\n", - " name='{}_{}'.format(\n", - " name, noise_lname))\n", - "\n", - " # 2. scaling & offsetting batchnormalization + noise\n", - " l_name = '{}_{}'.format(name, lbatchn_lname)\n", - " # offset by beta\n", - " net[lbatchn_lname + '_beta'] = BiasLayer(net.values()[-1], b=beta,\n", - " name=l_name + '_beta')\n", - "\n", - " if gamma is not None:\n", - " # if not rectify, scale by gamma\n", - " net[lbatchn_lname + '_gamma'] = ScaleLayer(net.values()[-1],\n", - " scales=gamma,\n", - " name=l_name+'_gamma')\n", - "\n", - " if pooling:\n", - " pool_name = 'enc_pool_{}'.format(i)\n", - " net[pool_name] = pool(net.values()[-1], pool_size=num_nodes[4],\n", - " stride=num_nodes[5],\n", - " name='{}_{}'.format(name, pool_name))\n", - "\n", + "get_items = lambda zipped: unzip(zipped)\n", + "xe = T.nnet.categorical_crossentropy\n", + "\n", + "def build_encoder(net, encoder_specs, activation, name, p_drop_hidden,\n", + " shared_net):\n", + " # encoder specs is a tuple of string and tuple of integers\n", + " for i, (transform, specs) in enumerate(encoder_specs):\n", + " if transform == 'unpool':\n", + " specs = net.get(specs)\n", + " # if specs have already the name of the corresponding pool layer\n", + " update = build_enc_layer(\n", + " net.values()[-1], name, transform, specs, activation, i,\n", + " p_drop_hidden, shared_net\n", + " )\n", + " net.update(update)\n", " # apply activation\n", - " if i < len(num_hidden) - 1:\n", + " if i < len(encoder_specs) - 1:\n", " act_name = 'enc_activation_{}'.format(i)\n", - " net[act_name] = NonlinearityLayer(net.values()[-1],\n", - " nonlinearity=activation,\n", - " name='{}_{}'.format(\n", - " name, act_name))\n", + " net[act_name] = NonlinearityLayer(\n", + " net.values()[-1], nonlinearity=activation,\n", + " name='{}_{}'.format(name, act_name)\n", + " )\n", "\n", " # classfication layer activation -> softmax\n", - " net['enc_softmax'] = NonlinearityLayer(net.values()[-1],\n", - " nonlinearity=softmax,\n", - " name='{}_enc_softmax'.format(\n", - " name))\n", + " net['enc_softmax'] = NonlinearityLayer(\n", + " net.values()[-1], nonlinearity=softmax, name=name+'_enc_softmax'\n", + " )\n", "\n", " return net['enc_softmax'], net\n", "\n", "\n", - "def build_decoder(dirty_net, clean_net, num_nodes, combinator_type,\n", - " convolution, pooling):\n", - " L = len(num_nodes) - 1\n", + "def build_enc_layer(incoming, name, transform, specs, activation, i,\n", + " p_drop_hidden, shared_net):\n", + " net = OrderedDict()\n", + " lname = 'enc_{}_{}'.format(i, transform if 'pool' in transform else 'affine')\n", + " nbatchn_lname = 'enc_batchn_{}_norm'.format(i)\n", + " noise_lname = 'enc_noise_{}'.format(i)\n", + " lbatchn_lname = 'enc_batchn_{}_learn'.format(i)\n", + "\n", + " if shared_net is None:\n", + " # affine pars\n", + " W = lasagne.init.GlorotUniform()\n", + " # batchnorm pars\n", + " beta = lasagne.init.Constant(0)\n", + " gamma = None if activation == rectify else lasagne.init.Constant(1)\n", + " else:\n", + " # batchnorm pars\n", + " beta = shared_net[lbatchn_lname + '_beta'].get_params()[0]\n", + " gamma = None if activation == rectify else \\\n", + " shared_net[lbatchn_lname + '_gamma'].get_params()[0]\n", + " if not isinstance(shared_net[lname], (pool, unpool)):\n", + " # affine weights\n", + " W = shared_net[lname].get_params()[0]\n", + " else:\n", + " W = None\n", "\n", - " # dirty_enc_affine_1 ... z_L\n", - " z_L = dirty_net['enc_noise_{}'.format(L)]\n", + " # affine (conv/dense/deconv) or (un)pooling transformation: $W \\hat{h}$\n", + " net[lname] = get_transform_layer(\n", + " incoming, name+'_'+lname, transform, specs, W\n", + " )\n", "\n", - " # batchnormalized softmax output .. u_0 without learning bn beta, gamma\n", - " dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None,\n", - " gamma=None, name='dec_batchn_softmax')\n", + " # 1. batchnormalize without learning -> goes to combinator layer\n", + " layer2bn = net.values()[-1]\n", + " l_name = '{}_{}'.format(name, nbatchn_lname)\n", + " bn_broadcast_cond = layer2bn.output_shape[1] == 1\n", + " if len(layer2bn.output_shape) == 4 and bn_broadcast_cond:\n", + " ax = (0, 1, 2, 3)\n", + " elif len(layer2bn.output_shape) == 2 and bn_broadcast_cond:\n", + " ax = (0, 1)\n", + " else:\n", + " ax = 'auto'\n", + " net[nbatchn_lname] = BatchNormLayer(\n", + " layer2bn, axes=ax, alpha=0.1, beta=None, gamma=None, name=l_name\n", + " )\n", + " if shared_net is None:\n", + " # for dirty encoder -> add noise\n", + " net[noise_lname] = GaussianNoiseLayer(\n", + " net.values()[-1], sigma=p_drop_hidden,\n", + " name='{}_{}'.format(name, noise_lname)\n", + " )\n", + "\n", + " # 2. scaling & offsetting batchnormalization + noise\n", + " l_name = '{}_{}'.format(name, lbatchn_lname)\n", + " # offset by beta\n", + " net[lbatchn_lname + '_beta'] = BiasLayer(\n", + " net.values()[-1], b=beta, name=l_name+'_beta'\n", + " )\n", + " if gamma is not None:\n", + " # if not rectify, scale by gamma\n", + " net[lbatchn_lname + '_gamma'] = ScaleLayer(\n", + " net.values()[-1], scales=gamma, name=l_name+'_gamma'\n", + " )\n", + "\n", + " return net\n", + "\n", + "\n", + "def get_transform_layer(incoming, name, transform, specs, W):\n", + " if transform == 'conv':\n", + " layer = conv(\n", + " incoming, num_filters=specs[0], filter_size=specs[1],\n", + " stride=specs[2], pad=specs[3], nonlinearity=linear, W=W, b=None,\n", + " name=name+'_conv'\n", + " )\n", + " elif transform == 'dense':\n", + " layer = DenseLayer(\n", + " incoming, num_units=specs, nonlinearity=linear, W=W, b=None,\n", + " name=name+'_dense'\n", + " )\n", + " elif transform == 'pool':\n", + " if len(specs) == 4:\n", + " psize, pstride = specs[1:3]\n", + " else:\n", + " psize, pstride = specs\n", + " layer = pool(\n", + " incoming, pool_size=psize, stride=pstride, name=name\n", + " )\n", + " elif transform == 'deconv':\n", + " layer = deconv(\n", + " incoming, num_filters=specs[0], filter_size=specs[1],\n", + " stride=specs[2], crop=specs[3], nonlinearity=linear, W=W, b=None,\n", + " name=name+'_deconv'\n", + " )\n", + " elif transform == 'unpool':\n", + " pl = specs\n", + " # print(pl.name, pl.output_shape)\n", + " layer = unpool(incoming, pl, name=name)\n", + "\n", + " return layer\n", + "\n", + "\n", + "def build_dec_layer(incoming, z_l, name, transform, specs, l,\n", + " combinator_type, layer2stats=None, last=False):\n", + " dirty_net = OrderedDict()\n", + "\n", + " if l > 0:\n", + " # transformation layer: dense, deconv, unpool\n", + " lname = 'dec_{}_{}'.format(l, transform if 'pool' in transform\n", + " else 'affine')\n", + " if transform in ['pool', 'unpool']:\n", + " W = None\n", + " else:\n", + " W = lasagne.init.GlorotUniform()\n", + " dirty_net[lname] = get_transform_layer(incoming, name+'_'+lname,\n", + " transform, specs, W)\n", + " layer2bn = dirty_net.values()[-1]\n", + " else:\n", + " layer2bn = incoming\n", + "\n", + " # batchnormalization ... u_l\n", + " ul_name = 'dec_batchn_u_{}'.format(l)\n", + " bn_broadcast_cond = layer2bn.output_shape[1] == 1\n", + " if len(layer2bn.output_shape) == 4 and bn_broadcast_cond:\n", + " ax = (0, 1, 2, 3)\n", + " elif len(layer2bn.output_shape) == 2 and bn_broadcast_cond:\n", + " ax = (0, 1)\n", + " else:\n", + " ax = 'auto'\n", + " dirty_net[ul_name] = BatchNormLayer(\n", + " layer2bn, axes=ax, alpha=1., beta=None, gamma=None,\n", + " name=name+'_'+ul_name\n", + " )\n", "\n", - " if pooling:\n", - " unpool_name = 'dec_unpool_{}'.format(L)\n", - " dirty_net[unpool_name] = unpool(dirty_net.values()[-1],\n", - " dirty_net['enc_pool_{}'.format(L)],\n", - " name=unpool_name)\n", + " # denoised latent \\hat{z}_L-i\n", + " comb_name = 'dec_combinator_{}'.format(l)\n", + " dirty_net[comb_name] = CombinatorLayer(\n", + " z_l, dirty_net.values()[-1], combinator_type=combinator_type,\n", + " name=name+'_'+comb_name\n", + " )\n", "\n", - " # denoised latent \\hat{z}_L = g(\\tilde{z}_L, u_L)\n", - " comb_name = 'dec_combinator_0'\n", - " dirty_net[comb_name] = CombinatorLayer(z_L, dirty_net.values()[-1],\n", - " combinator_type=combinator_type,\n", - " name=comb_name)\n", + " if not last:\n", + " # batchnormalized latent \\hat{z}_L-i^{BN}\n", + " layer2norm = dirty_net[comb_name]\n", + " bname = 'dec_batchn_z_{}'.format(l)\n", + " dirty_net[bname] = SharedNormLayer(\n", + " layer2stats, layer2norm, name=name+'_'+bname\n", + " )\n", "\n", - " # batchnormalize denoised latent using clean encoder's bn mean/inv_std without learning\n", + " return dirty_net\n", + "\n", + "\n", + "def build_decoder(dirty_net, clean_net, name, decoder_specs, combinator_type):\n", + " L = len(decoder_specs) - 1\n", + " net = OrderedDict()\n", + "\n", + " # dirty_enc_affine_1 ... z_L\n", + " z_L = dirty_net['enc_noise_{}'.format(L)]\n", + "\n", + " # batchnormalize denoised latent using clean encoder's bn mean/inv_std\n", + " # without learning\n", " enc_bname = 'enc_batchn_{}_norm'.format(L)\n", - " bname = 'dec_batchn_0'\n", + " layer2stats = clean_net[enc_bname]\n", "\n", - " to_stats_l = clean_net[enc_bname]\n", - " to_norm_l = dirty_net[comb_name]\n", - " dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l)\n", + " # batchnorm and combinator\n", + " update = build_dec_layer(\n", + " dirty_net.values()[-1], z_L, name, 'N/A', None, 0, combinator_type,\n", + " layer2stats\n", + " )\n", + " net.update(update)\n", "\n", - " for i in range(L):\n", + " for i, (transform, specs) in enumerate(decoder_specs[:-1]):\n", " # dirty_enc_affine_L-i ... z_l\n", - " z_l = dirty_net['enc_noise_{}'.format(i)]\n", - "\n", - " # affine transformation\n", - " d_name = 'dec_affine_{}'.format(L-i)\n", - " if convolution:\n", - " dirty_net[d_name] = deconv(dirty_net.values()[-1],\n", - " num_filters=num_nodes[i][0],\n", - " filter_size=num_nodes[i][1],\n", - " crop=num_nodes[i][2],\n", - " stride=num_nodes[i][3],\n", - " nonlinearity=linear, name=d_name +\n", - " '_conv')\n", - " else:\n", - " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1],\n", - " num_units=num_nodes[i],\n", - " nonlinearity=linear,\n", - " name=d_name+'_affine')\n", - "\n", - " # batchnormalization ... u_l\\\n", - " ul_name = 'u_{}'.format(i+1)\n", - " dirty_net[ul_name] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", - " beta=None, gamma=None,\n", - " name='dec_batchn_affine_'\n", - " '{}'.format(L-i))\n", - "\n", - " if pooling:\n", - " unpool_name = 'dec_unpool_{}'.format(L-i-1)\n", - " dirty_net[unpool_name] = unpool(dirty_net.values()[-1],\n", - " dirty_net['enc_pool_{}'\n", - " ''.format(L-i-1)],\n", - " name=unpool_name)\n", - "\n", - " # denoised latent \\hat{z}_L-i\n", - " comb_name = 'dec_combinator_{}'.format(i+1)\n", - " dirty_net[comb_name] = CombinatorLayer(z_l, dirty_net.values()[-1],\n", - " combinator_type=combinator_type,\n", - " name=comb_name)\n", - "\n", - " # batchnormalized latent \\hat{z}_L-i^{BN}\n", + " z_l = dirty_net['enc_noise_{}'.format(L-i-1)]\n", " enc_bname = 'enc_batchn_{}_norm'.format(L-i-1)\n", - " bname = 'dec_batchn_{}'.format(L-i)\n", + " layer2stats = clean_net[enc_bname]\n", "\n", - " to_stats_l = clean_net[enc_bname]\n", - " to_norm_l = dirty_net[comb_name]\n", - " dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l)\n", + " if transform == 'unpool':\n", + " # print(dirty_net.keys(), specs)\n", + " specs = dirty_net.get(specs)\n", + " update = build_dec_layer(\n", + " net.values()[-1], z_l, name, transform, specs, i+1,\n", + " combinator_type, layer2stats\n", + " )\n", + " net.update(update)\n", "\n", " # corrupted input ... z_0\n", - " z_0 = dirty_net['inp_corr']\n", - "\n", - " # affine transformation\n", - " d_name = 'dec_affine_{}'.format(L+1)\n", - " if convolution:\n", - " dirty_net[d_name] = deconv(dirty_net.values()[-1],\n", - " num_filters=num_nodes[i+1][0],\n", - " filter_size=num_nodes[i+1][1],\n", - " crop=num_nodes[i+1][2],\n", - " stride=num_nodes[i+1][3],\n", - " nonlinearity=linear,name=d_name+'_conv')\n", - " else:\n", - " dirty_net[d_name] = DenseLayer(dirty_net.values()[-1],\n", - " nonlinearity=linear, name=d_name,\n", - " num_units=num_nodes[i+1])\n", - "\n", - " # batchnormalization ... u_L\n", - " dirty_net['u_{}'.format(L+1)] = BatchNormLayer(dirty_net.values()[-1], alpha=1.,\n", - " beta=None, gamma=None)\n", - "\n", - " # denoised input reconstruction\n", - " comb_name = 'dec_combinator_{}'.format(L+1)\n", - " dirty_net[comb_name] = CombinatorLayer(z_0, dirty_net['u_{}'.format(L+1)],\n", - " name=comb_name,\n", - " combinator_type=combinator_type)\n", + " z_0 = dirty_net['input_corr']\n", + " transform, specs = decoder_specs[-1]\n", + "\n", + " if transform == 'unpool':\n", + " specs = dirty_net.get(specs)\n", + " update = build_dec_layer(\n", + " net.values()[-1], z_0, name, transform, specs, i+2,\n", + " combinator_type, None, True\n", + " )\n", + " net.update(update)\n", "\n", - " return dirty_net\n", + " return net\n", "\n", "\n", - "def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden,\n", + "def build_model(encoder_specs, decoder_specs, p_drop_input, p_drop_hidden,\n", " input_shape, batch_size=None, activation=rectify,\n", - " combinator_type='MILAudem', convolution=False,\n", - " pooling=False):\n", + " combinator_type='MILAudem'):\n", " net = OrderedDict()\n", - " net['input'] = InputLayer((batch_size, ) + tuple(input_shape), # inp_size),\n", - " name='input')\n", + " net['input'] = InputLayer(\n", + " (batch_size, ) + tuple(input_shape), name='input'\n", + " )\n", " # corrupted input\n", - " net['inp_corr'] = GaussianNoiseLayer(net['input'], sigma=p_drop_input,\n", - " name='input_corr')\n", + " net['input_corr'] = GaussianNoiseLayer(\n", + " net['input'], sigma=p_drop_input, name='input_corr'\n", + " )\n", "\n", " # dirty encoder\n", - " train_output_l, dirty_encoder = build_encoder(net, num_encoder,\n", - " activation, 'dirty',\n", - " p_drop_hidden,\n", - " convolution, pooling,\n", - " None)\n", + " train_output_l, dirty_encoder = build_encoder(\n", + " net, encoder_specs, activation, 'dirty', p_drop_hidden, None\n", + " )\n", "\n", " # clean encoder\n", - " clean_net = OrderedDict(net.items()[:1])\n", - " eval_output_l, clean_net = build_encoder(clean_net, num_encoder,\n", - " activation, 'clean', 0.,\n", - " convolution, pooling,\n", - " shared_net=dirty_encoder)\n", + " clean_encoder = OrderedDict(net.items()[:1])\n", + " eval_output_l, clean_net = build_encoder(\n", + " clean_encoder, encoder_specs, activation, 'clean', 0., dirty_encoder\n", + " )\n", "\n", " # dirty decoder\n", - " dirty_net = build_decoder(dirty_encoder, clean_net, num_decoder,\n", - " combinator_type, convolution, pooling)\n", + " dirty_decoder = build_decoder(\n", + " dirty_encoder, clean_net, 'dirty', decoder_specs, combinator_type\n", + " )\n", "\n", - " return [train_output_l, eval_output_l], dirty_net, clean_net\n", + " return (train_output_l, eval_output_l, dirty_encoder, dirty_decoder,\n", + " clean_encoder)\n", "\n", "\n", "def get_mu_sigma_costs(hid):\n", @@ -560,13 +599,11 @@ " return C_mu, C_sigma.sum() # trace(C_sigma)\n", "\n", "\n", - "def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train,\n", - " lambdas, use_extra_costs=False, alphas=None, betas=None,\n", - " num_labeled=None, pseudo_labels=None):\n", - " xe = T.nnet.categorical_crossentropy\n", + "def build_costNstats(y_onehot, output_train, output_eval, num_labeled=None,\n", + " pseudo_labels=None):\n", " pred = T.clip(output_train, 1e-15, 1)\n", " N = num_labeled if num_labeled else pred.shape[0]\n", - " class_cost = xe(pred[:N], y[:N]).mean()\n", + " class_cost = xe(pred[:N], y_onehot[:N]).mean()\n", "\n", " if pseudo_labels == 'soft':\n", " n = 0 if num_labeled else N\n", @@ -577,7 +614,16 @@ " pseudo_target = T.eye(M)[pred[n:].argmax(axis=1)]\n", " class_cost += xe(pred[n:], pseudo_target).mean()\n", "\n", - " L = len(num_decoder)\n", + " pred = T.argmax(output_eval[:N], axis=1)\n", + " y = T.argmax(y_onehot[:N], axis=1)\n", + " accuracy = T.mean(T.eq(pred, y), dtype='float32')\n", + "\n", + " return class_cost, [accuracy]\n", + "\n", + "\n", + "def build_rec_costs(X, clean_net, dirty_net, decoder_specs, lambdas,\n", + " alphas=None, betas=None, use_extra_costs=False):\n", + " L = len(decoder_specs)\n", "\n", " # get clean and corresponding dirty latent layer output\n", " z_clean_l = clean_net['input']\n", @@ -591,11 +637,14 @@ " if use_extra_costs:\n", " C_mu, C_sigma = get_mu_sigma_costs(z_clean)\n", " cost += alphas[L] * C_mu + betas[L] * C_sigma\n", + "\n", " rec_costs = [cost]\n", "\n", - " for l in range(L):\n", + " dec_batchns = [x for x in dirty_net.keys() if 'dec_batchn_z' in x][::-1]\n", + "\n", + " for l, name in enumerate(dec_batchns):\n", " z_clean_l = clean_net['enc_batchn_{}_norm'.format(l)]\n", - " z_dirty_l = dirty_net['dec_batchn_{}'.format(L-l-1)]\n", + " z_dirty_l = dirty_net[name]\n", "\n", " z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False)\n", " z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False)\n", @@ -604,51 +653,69 @@ " if use_extra_costs:\n", " C_mu, C_sigma = get_mu_sigma_costs(z_clean)\n", " cost += alphas[l] * C_mu + betas[l] * C_sigma\n", + "\n", " rec_costs.append(cost)\n", "\n", - " return class_cost, rec_costs" + " return rec_costs\n" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { "collapsed": false }, "outputs": [], "source": [ "# %load utils.py\n", + "import numpy as np\n", "import gzip\n", "import cPickle as pickle\n", - "import theano\n", - "import theano.tensor as T\n", + "\n", "import lasagne\n", + "import theano.misc.pkl_utils\n", + "import theano.tensor as T\n", "\n", "def pickle_load(f, encoding):\n", " return pickle.load(f)\n", "\n", "\n", - "def load_data():\n", + "def load_data(shared_var=True):\n", " \"\"\"Get data with labels, split into training, validation and test set.\"\"\"\n", - " with gzip.open('mnist.pkl.gz', 'rb') as f:\n", + " with gzip.open('./mnist.pkl.gz', 'rb') as f:\n", " data = pickle_load(f, encoding='latin-1')\n", " X_train, y_train = data[0]\n", " X_valid, y_valid = data[1]\n", " X_test, y_test = data[2]\n", "\n", - " return dict(\n", - " X_train=theano.shared(lasagne.utils.floatX(X_train)),\n", - " y_train=T.cast(theano.shared(y_train), 'int32'),\n", - " X_valid=theano.shared(lasagne.utils.floatX(X_valid)),\n", - " y_valid=T.cast(theano.shared(y_valid), 'int32'),\n", - " X_test=theano.shared(lasagne.utils.floatX(X_test)),\n", - " y_test=T.cast(theano.shared(y_test), 'int32'),\n", - " num_examples_train=X_train.shape[0],\n", - " num_examples_valid=X_valid.shape[0],\n", - " num_examples_test=X_test.shape[0],\n", - " input_dim=X_train.shape[1],\n", - " output_dim=10,\n", - " )\n", + " if shared_var:\n", + " return dict(\n", + " X_train=theano.shared(lasagne.utils.floatX(X_train)),\n", + " y_train=T.cast(theano.shared(y_train), 'int32'),\n", + " X_valid=theano.shared(lasagne.utils.floatX(X_valid)),\n", + " y_valid=T.cast(theano.shared(y_valid), 'int32'),\n", + " X_test=theano.shared(lasagne.utils.floatX(X_test)),\n", + " y_test=T.cast(theano.shared(y_test), 'int32'),\n", + " num_examples_train=X_train.shape[0],\n", + " num_examples_valid=X_valid.shape[0],\n", + " num_examples_test=X_test.shape[0],\n", + " input_dm=X_train.shape[1],\n", + " output_dim=10,\n", + " )\n", + " else:\n", + " return dict(\n", + " X_train=np.float32(X_train),\n", + " y_train=np.int32(y_train),\n", + " X_valid=np.float32(X_valid),\n", + " y_valid=np.int32(y_valid),\n", + " X_test=np.float32(X_test),\n", + " y_test=np.int32(y_test),\n", + " num_examples_train=X_train.shape[0],\n", + " num_examples_valid=X_valid.shape[0],\n", + " num_examples_test=X_test.shape[0],\n", + " input_dm=X_train.shape[1],\n", + " output_dim=10,\n", + " )\n", "\n", "\n", "def softmax(vec, axis=1):\n", @@ -658,7 +725,9 @@ " \"\"\"\n", " xdev = vec - vec.max(axis, keepdims=True)\n", " rval = T.exp(xdev)/(T.exp(xdev).sum(axis, keepdims=True))\n", - " return rval" + " return rval\n", + "\n", + "unzip = lambda zipped: zip(*zipped)" ] }, { @@ -700,6 +769,7 @@ "outputs": [], "source": [ "# %load train_ladder_nets.py\n", + "from __future__ import print_function\n", "from utils import load_data\n", "from ladder_nets import *\n", "import time\n", @@ -713,44 +783,59 @@ "BATCH_SIZE = 100\n", "INPUT_SHAPE = [1, 28, 28]\n", "NUM_EPOCHS = 15\n", - "COMBINATOR_TYPE = 'milaUDEM'\n", - "LAMBDAS = [1, 1, 1]\n", + "COMBINATOR_TYPE = 'milaUDEM' # or 'curiousAI'\n", "DROPOUT = 0.3\n", "EXTRA_COST = False # True\n", - "ALPHAS = None, # [0.1]*3\n", - "BETAS = None, # [0.1]*3\n", + "ALPHAS = None # [0.1]*3\n", + "BETAS = None # [0.1]*3\n", "NUM_LABELED = None\n", "PSEUDO_LABELS = None\n", "CONV = True # False\n", "POOL = True # False\n", "\n", - "print \"Loading data...\"\n", + "print (\"Loading data...\")\n", "dataset = load_data()\n", "\n", - "# build model\n", - "if CONV:\n", - " input_shape = INPUT_SHAPE\n", - " if POOL:\n", - " num_encoder = [[40, 8, 0, 1, 2, 2], [10, 8, 0, 1, 2, 2]]\n", - " num_decoder = [[40, 8, 0, 1, 2, 2], [1, 8, 0, 1, 2, 2]]\n", + "def get_encoder_settings(convolution, pooling):\n", + " if convolution and pooling:\n", + " settings = [('conv', (40, 8, 1, 0)), ('pool', (0, 2, 2, 0)),\n", + " ('conv', (10, 8, 1, 0)), ('pool', (0, 2, 2, 0))]\n", + " elif convolution:\n", + " settings = [('conv', (40, 15, 1, 0)), ('conv', (10, 14, 1, 0))]\n", " else:\n", - " num_encoder = [[40, 15, 0, 1], [10, 14, 0, 1]]\n", - " num_decoder = [[40, 14, 0, 1], [1, 15, 0, 1]]\n", - "else:\n", - " input_shape = np.prod(INPUT_SHAPE)\n", - " num_encoder = [500, 10]\n", - " num_decoder = [500, input_shape]\n", + " settings = [('dense', 500), ('dense', 10)]\n", "\n", - "print \"Building model and compiling functions...\"\n", - "[train_output_l, eval_output_l], dirty_net, clean_net = build_model(\n", - " num_encoder, num_decoder, DROPOUT, DROPOUT, input_shape=input_shape,\n", - " combinator_type=COMBINATOR_TYPE, convolution=CONV, pooling=POOL)\n", + " return settings\n", "\n", - "print map(lambda x: (x.name, x.output_shape), dirty_net.values())\n", + "def get_decoder_settings(convolution, pooling):\n", + " if convolution and pooling:\n", + " settings = [('unpool', 'enc_3_pool'), ('deconv', (40, 8, 1, 0)),\n", + " ('unpool', 'enc_1_pool'), ('deconv', (1, 8, 1, 0))]\n", + " elif convolution:\n", + " settings = [('deconv', (40, 14, 1, 0)), ('deconv', (1, 15, 1, 0))]\n", + " else:\n", + " settings = [('dense', 10), ('dense', 784)]\n", + "\n", + " return settings\n", + "\n", + "# build model\n", + "encoder_specs = get_encoder_settings(convolution=CONV, pooling=POOL)\n", + "decoder_specs = get_decoder_settings(convolution=CONV, pooling=POOL)\n", + "LAMBDAS = [1] * (len(decoder_specs) + 1)\n", + "input_shape = INPUT_SHAPE if CONV else np.prod(INPUT_SHAPE)\n", + "\n", + "print (\"Building model ...\")\n", + "train_output_l, eval_output_l, dirty_encoder, dirty_decoder, clean_encoder = \\\n", + " build_model(encoder_specs, decoder_specs, DROPOUT, DROPOUT,\n", + " input_shape=input_shape, combinator_type=COMBINATOR_TYPE)\n", + "\n", + "print (map(lambda x: (x.name, x.output_shape), dirty_encoder.values()))\n", + "print (map(lambda x: (x.name, x.output_shape), dirty_decoder.values()))\n", "\n", "# set up input/output variables\n", - "X = T.fmatrix('X') if not CONV else T.ftensor4('x')\n", + "X = T.ftensor4('x') if CONV else T.fmatrix('X')\n", "y = T.ivector('y')\n", + "y_onehot = lasagne.utils.one_hot(y, 10)\n", "\n", "# training output\n", "output_train = lasagne.layers.get_output(train_output_l, X,\n", @@ -763,12 +848,14 @@ "# set up (possibly amortizable) lr, cost and updates\n", "sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE))\n", "\n", - "class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder,\n", - " dirty_net, clean_net, output_train,\n", - " LAMBDAS, use_extra_costs=EXTRA_COST,\n", - " alphas=ALPHAS, betas=BETAS,\n", - " num_labeled=NUM_LABELED,\n", - " pseudo_labels=PSEUDO_LABELS)\n", + "print (\"Building costs and updates ...\")\n", + "class_cost, stats = build_costNstats(y_onehot, output_train, output_eval,\n", + " NUM_LABELED, PSEUDO_LABELS)\n", + "\n", + "rec_costs = build_rec_costs(X, clean_encoder, dirty_decoder, decoder_specs,\n", + " lambdas=LAMBDAS, alphas=ALPHAS, betas=BETAS,\n", + " use_extra_costs=EXTRA_COST)\n", + "\n", "cost = class_cost + T.sum(rec_costs)\n", "net_params = lasagne.layers.get_all_params(train_output_l, trainable=True)\n", "updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr)\n", @@ -777,9 +864,7 @@ "batch_index = T.iscalar('batch_index')\n", "batch_slice = slice(batch_index * BATCH_SIZE, (batch_index + 1) * BATCH_SIZE)\n", "\n", - "pred = T.argmax(output_eval, axis=1)\n", - "accuracy = T.mean(T.eq(pred, y[:NUM_LABELED]), dtype=theano.config.floatX)\n", - "\n", + "print (\"Compiling functions...\")\n", "train = theano.function([batch_index], [cost] + rec_costs,\n", " updates=updates, givens={\n", " X: dataset['X_train'][batch_slice].reshape(\n", @@ -788,7 +873,7 @@ " y: dataset['y_train'][batch_slice],\n", " })\n", "\n", - "eval = theano.function([batch_index], [cost, accuracy], givens={\n", + "eval = theano.function([batch_index], [cost] + stats, givens={\n", " X: dataset['X_valid'][batch_slice].reshape(\n", " (-1,) + tuple(input_shape)\n", " ),\n", @@ -797,8 +882,8 @@ "\n", "network_dump = {'train_output_layer': train_output_l,\n", " 'eval_output_layer': eval_output_l,\n", - " 'dirty_net': dirty_net,\n", - " 'clean_net': clean_net,\n", + " 'dirty_net': dirty_decoder,\n", + " 'clean_net': clean_encoder,\n", " 'x': X,\n", " 'y': y,\n", " 'output_eval': output_eval\n", @@ -840,7 +925,7 @@ "\n", "train_costs, valid_costs, valid_accs = [], [], []\n", "\n", - "print \"Starting training...\"\n", + "print (\"Starting training...\")\n", "now = time.time()\n", "\n", "try:\n", @@ -852,17 +937,17 @@ " valid_costs.append(eval_cost)\n", " valid_accs.append(acc)\n", "\n", - " print \"Epoch %d took %.3f s\" % (n + 1, time.time() - now)\n", + " print (\"Epoch %d took %.3f s\" % (n + 1, time.time() - now))\n", " now = time.time()\n", - " print \"Train cost {}, val cost {}, val acc {}\".format(train_costs[-1], \n", - " valid_costs[-1], \n", - " valid_accs[-1])\n", - " print '\\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c\n", - " in enumerate(rec_costs)])\n", + " print (\"Train cost {}, val cost {}, val acc {}\".format(train_costs[-1],\n", + " valid_costs[-1],\n", + " valid_accs[-1]))\n", + " print ('\\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c\n", + " in enumerate(rec_costs)]))\n", "\n", " if (n+1) % 10 == 0:\n", " new_lr = sh_lr.get_value() * LR_DECREASE\n", - " print \"New LR:\", new_lr\n", + " print (\"New LR:\", new_lr)\n", " sh_lr.set_value(lasagne.utils.floatX(new_lr))\n", "except KeyboardInterrupt:\n", " pass\n", @@ -872,17 +957,17 @@ "# zip(train_cost, valid_cost))\n", "\n", "# uncomment if to save the params only\n", - "# save_dump('final_epoch_{}_ladder_net_mnist'.format(n),\n", + "# save_dump('final_epoch_{}_ladder_net_mnist.pkl'.format(n),\n", "# lasagne.layers.get_all_param_values(output_layer))\n", "\n", "# uncomment if to save the whole network\n", - "# theano.misc.pkl_utils.dump(network_dump,\n", - "# 'final_epoch_{}_ladder_net_mnist.pkl'.format(n))" + "# save_dump('final_epoch_{}_ladder_net_mnist.pkl'.format(n),\n", + "# network_dump)" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 5, "metadata": { "collapsed": true }, @@ -894,15 +979,39 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 14, "metadata": { "collapsed": false, - "scrolled": true + "scrolled": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3QAAAGoCAYAAAAO86fGAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xl4VeW5///3HSZlCBDCZCBhSEUGFajiLEFkqEerx1aP\noIdqnS5bW7XtaS3Wiu3POtb28LWDtIqodahWe7QKYTI4IEIVEVAQmWeQKcxD8vz+WHvHnZCQae+9\nhnxe15WLvdd4L4iuda/7Gcw5h4iIiIiIiIRPht8BiIiIiIiISN0ooRMREREREQkpJXQiIiIiIiIh\npYROREREREQkpJTQiYiIiIiIhJQSOhERERERkZCqNqEzs2Zm9oGZzTezxWb2m0q2GW1mC2I/75rZ\nKQnrVsWWzzezucm+ABERkVQzs5FmtsTMPjezn1Wyvo2ZvRK7380xsz4J624zs4Wxn9sSlt9jZuvM\n7KPYz8h0XY+IiERH4+o2cM4dNLMhzrl9ZtYIeM/MznHOvZew2QrgfOfcrtgNaQJwZmxdKVDgnNuR\n9OhFRERSzMwygMeAocAGYJ6Z/Z9zbknCZmOB+c65y82sF/AH4EIz6wtcD5wGHAGmmNnrzrkVsf0e\ndc49mraLERGRyKlRk0vn3L7Yx2axfXZUWD/HObcr9nUOkJOw2mp6HhERkQAaBCxzzq12zh0GXgAu\nrbBNH2AmgHNuKdDNzNoDvYEPnHMHnXMlwCzg8oT9LOXRi4hIpNUo0TKzDDObD2wCipxznx5j8xuA\nyQnfHTDNzOaZ2Y11D1VERMQXOcDahO/rKP/iEmABsUTNzAYBuUAXYBFwnpm1NbPmwEVA14T9bjWz\nj83sr2bWOlUXICIi0VXTCl2pc24A3s3pfDMbXNl2ZjYEuA5I7F9wjnNuIN5N7Ptmdm49YxYREQma\nB4C2ZvYR8H1gPlASa5b5IDANeDO+PLbPH4Eezrn+eC9M1fRSRERqrdo+dImcc8Vm9gZeX4BZieti\nA6FMAEYm9pdzzm2M/bnVzF7Fa7rybsVjm5mrffgiIhJGzrkwNTVcj1dxi+sSW1bGObcb+G78u5mt\nxOtfjnNuIjAxtvw+YtU+59zWhEP8BXi9spPr/igi0rDU9h5Zk1Eus+PNQMzseGAY8HGFbXKBfwD/\n7ZxbnrC8uZm1jH1uAQzHa35SVfCR+7nnnnt8j0HXpWvTdYXzJ6rXFkLzgHwzyzOzpsBVwGuJG5hZ\nazNrEvt8IzDLObcn9r197M9c4D+B52LfOyUc4nJ0f4zMT1SvLarXFeVr03WF76cualKh6wxMMrP4\n4CbPOOdmmNnN3j3GTQDuBrKAP8a2O+ycGwR0BF6NvV1sDPzNOTe1TpGKiIj4wDlXYma3AlPx7oNP\nOOc+q3Af7I13rywFFuONbBn3DzPLAg4D33POFceWP2Rm/fFGg14F3JyeKxIRkSipybQFC4GBlSx/\nPOHzjcBRA54451YC/esZo4iIiK+cc1OAXhWWJd4H51Rcn7Du/CqWj0lmjCIi0jBpOoEUKygo8DuE\nlIjqdUF0r03XFT5RvjaRKP9+R/XaonpdEN1r03U1DFbXtprJZmYuKLGIiEjqmBkuXIOi+Er3RxGR\nhqMu98hajXIpIlKdbt26sXr1ar/DkADIy8tj1apVfochInIU3avEb8m8R6pCJyJJFXuz5HcYEgBV\n/S6oQlc7uj+KJJ/uVeK3ZN4j1YdOREREREQkpJTQiYiIiIiIhJQSOhERERERkZBSQiciUgu33HIL\n9913X9K3FRERqY9Zs2bRtWvXsu/9+vXj7bffrtG2taX7W7BolEsRaTC6d+/OE088wQUXXFDnY/zp\nT39KybZ+y8jI4IsvvqBHjx5+hyIiInVk9tVYGosWLarxtscyadIk/vrXv/LOO++ULQvT/a0hUIVO\nRCSmpKTE7xB8U9Mbu4iINCzOuQZzjwjrc4ASOhFpEMaMGcOaNWu45JJLyMzM5JFHHmH16tVkZGTw\n5JNPkpeXx9ChQwG48sor6dy5M23btqWgoIBPP/207DjXXXcdv/zlL4Gvmqw8+uijdOzYkZycHJ56\n6qk6bbt9+3YuueQSWrduzRlnnMHdd9/NeeedV+X1vPvuu5xzzjm0bduWvLw8nn76aQCKi4sZM2YM\nHTp0oHv37uWaxCxfvpyCggLatGlDhw4dGDVqFACDBw/GOccpp5xCZmYmL730Uv3+skVEpE4eeugh\nrrjiinLLbrvtNm6//XYAnnrqKfr06UNmZib5+flMmDChymN1796dmTNnAnDgwAGuvfZasrKy6Nev\nH/PmzSu37YMPPkh+fj6ZmZn069ePf/7znwAsWbKEW265hffff59WrVqRlZUFlL+/AfzlL3/ha1/7\nGtnZ2Vx22WVs3LixbF1GRgaPP/44J554IllZWdx6661Vxjxv3jzOPvts2rZtS05ODj/4wQ84cuRI\n2frFixczfPhw2rVrR+fOnXnggQcAKC0t5Te/+U3ZNZx++umsX7++7D5fWlpadowhQ4bw5JNPAl71\n8dxzz+VHP/oR2dnZ3HvvvaxYsYKhQ4eSnZ1Nhw4duOaaayguLi7bf926dXzrW9+iQ4cOtG/fnh/+\n8IccPnyYdu3asXjx4rLttm7dSosWLdi2bVuV15ssSuhEpEF4+umnyc3N5V//+hfFxcX85Cc/KVv3\n9ttvs2TJEgoLCwG46KKLWL58OVu2bGHgwIFcffXVVR5306ZN7N69mw0bNvDXv/6V73//++zatavW\n237ve9+jVatWbNmyhaeeeopJkyZV+UZ0zZo1XHTRRdx22218+eWXfPzxx/Tv3x+AW2+9ld27d7Nq\n1SqKiop4+umnmThxIgB33303I0aMYOfOnaxbt44f/OAHgJdsAixcuJDi4uKjHiZERCQ9rrrqKiZP\nnszevXsBL1F56aWXyu5DHTt25M0336S4uJiJEydyxx138PHHH1d73HHjxrFy5UpWrlxJYWEhkyZN\nKrc+Pz+f9957j+LiYu655x6uueYaNm/ezEknncSf//xnzjrrLHbv3s327duPOvbMmTMZO3YsL7/8\nMhs3biQ3N5errrqq3DZvvPEGH374IQsWLODvf/87U6dOrTTORo0a8fvf/57t27fz/vvvM3PmTP74\nxz8CsGfPHoYNG8ZFF13Exo0b+eKLL8pexP72t7/lxRdfZMqUKRQXF/Pkk0/SvHlzoPoWKB988AH5\n+fls2bKFu+66C+ccY8eOZdOmTXz22WesW7eOcePGAd6/x8UXX0z37t1Zs2YN69ev56qrrqJJkyaM\nGjWKZ599tuy4zz//PBdeeCHt2rU75vmTwjkXiB8vFBEJuyD/t9ytWzc3Y8aMsu+rVq1yGRkZbtWq\nVVXus2PHDmdmrri42Dnn3LXXXuvuvvtu55xzRUVFrnnz5q6kpKRs+w4dOrgPPvigVtuWlJS4Jk2a\nuGXLlpWt+8UvfuHOO++8SmO6//773eWXX37U8pKSEte0aVO3ZMmSsmWPP/64GzJkiHPOuTFjxrib\nb77ZrVu37qh9zcwtX768yr+HuqjqdyG23Pf7Tlh+gvzflEhYBfm/q/POO88988wzzjnnpk6d6vLz\n86vc9rLLLnPjx493znn3ma5du5atS7zn9ejRw02dOrVs3YQJE8ptW1H//v3da6+95pxz7qmnnjrq\nfpR4f7v++uvdz372s7J1e/bscU2aNHGrV692znn3l9mzZ5etv/LKK92DDz54jL+Br/z+978vu989\n//zzbuDAgZVu16tXL/f6668ftTx+n0+89xYUFLgnnnii7Nry8vKOGcM///nPsvPOnj3bdejQodzx\n4ubMmeNyc3PLvp922mnupZdeqvK4ybxHqkInImlllpyfZOrSpUvZ59LSUu68807y8/Np06YN3bt3\nx8z48ssvK923Xbt2ZGR89b/S5s2bs2fPnlptu3XrVkpKSsrFcazRx9auXUvPnj2PWv7ll19y5MgR\ncnNzy5bl5eWxfv16wGvKU1payqBBgzj55JPLKnciIlKen/eqUaNG8fzzzwNelWf06NFl6yZPnsxZ\nZ51Fu3btaNu2LZMnT67y/pRow4YN5e4xeXl55dY//fTTDBgwgLZt29K2bVsWL15co+PGj514vBYt\nWtCuXbuyew94lcW4Y90nly1bxiWXXELnzp1p06YNd911V1kcVd374uvqOqhXxfvtli1bGDVqFF26\ndKFNmzZcc801ZTGsW7eOvLy8cvfyuDPOOIPmzZsza9Ysli5dyvLly/nmN79Zp5hqSwmdiKSVc8n5\nqYuqml0kLn/uued4/fXXmTlzJjt37mTVqlWJlZKUaN++PY0bN2bdunVly9auXVvl9l27duWLL744\nanl2djZNmjRh9erVZctWr15NTk4O4N1QJ0yYwPr16/nzn//M9773PVasWJHEKxERiQY/71VXXHEF\nRUVFrF+/nldffbUsoTt06BDf/va3+elPf8rWrVvZsWMH3/jGN2p0f+rcuXO5+0rifWLNmjXcdNNN\n/PGPf2THjh3s2LGDvn37lh23uiaLJ5xwQrnj7d27l23btpVLIGvqlltuoXfv3ixfvpydO3dy3333\nlcXRtWtXli9fXul+ubm5la5r0aIFAPv27StbtmnTpnLbVLy+sWPHkpGRweLFi9m5cyfPPvtsuRjW\nrFlTrk9eou985zs888wzPPPMM3z729+madOmNbzy+lFCJyINRqdOnY5KYCreCHfv3k2zZs1o27Yt\ne/fu5ec//3nKR/fKyMjg8ssvZ9y4cezfv58lS5aUDXJSmauvvpoZM2bw8ssvU1JSwvbt21mwYAEZ\nGRlceeWV3HXXXezZs4fVq1fzu9/9jv/+7/8G4OWXXy57Y9qmTRsyMjLK3jJW9ncjIiLpl52dzeDB\ng7nuuuvo0aMHvXr1AryE7tChQ2RnZ5ORkcHkyZOr7ItW0ZVXXsn9999f1of6scceK1u3d+9eMjIy\nyM7OprS0lIkTJ5ab8qBjx46sW7eOw4cPV3rsUaNGMXHiRD755BMOHjzI2LFjOfPMM+s0z93u3bvJ\nzMykefPmLFmypNz0CBdffDGbNm1i/PjxHDp0iD179jB37lwArr/+eu6+++6yl50LFy5kx44dZGdn\nk5OTw7PPPktpaSlPPvlklUlhYgwtW7akVatWrF+/nocffrhs3aBBg+jcuTN33nkn+/bt4+DBg8ye\nPbts/dVXX82rr77K3/72N8aMGVPr668rJXQi0mDceeed/PrXvyYrK4tHH30UOPrN3JgxY8jNzSUn\nJ4d+/fpx9tln1+octUn+Erf9f//v/7Fz5046d+7Md77zHUaPHk2zZs0q3a9r1668+eabPPLII2Rl\nZTFgwAA++eQTAMaPH0/z5s3p0aMH559/Ptdccw3XXXcd4I0edsYZZ5CZmclll13G+PHj6datG+B1\nmB8zZgxZWVm8/PLLtbpmERFJrtGjRzNjxoxyg3K1bNmS8ePHc8UVV5CVlcULL7zApZdeWuUxEu8x\n99xzD7m5uXTv3p2RI0eWSzZ69+7Nj3/8Y84880w6derE4sWLOffcc8vWX3DBBfTt25dOnTrRoUOH\no84zdOhQfv3rX3P55ZeTk5PDypUreeGFFyqNo7LviR555BH+9re/kZmZyc0331xucJWWLVsybdo0\nXnvtNTp16sSJJ55IUVERAD/60Y+48sorGT58OK1bt+aGG25g//79AEyYMIGHHnqI7OxsPvvsM845\n55wqzx//u/rwww9p06YNl1xyCd/61rfK1mVkZPD666+zbNkycnNz6dq1K3//+9/L1nft2pUBAwZg\nZuX+DlPNUtmMqDbMzAUlFhGpOzNLafPEhuLOO+9k8+bNoe7nVtXvQmx5w5jUKAl0fxRJPt2rJFVu\nuOEGTjjhBH71q18dc7tk3iMb1y5EERFJhaVLl3Lo0CFOPvlk5s6dyxNPPFE2T46IiIgE3+rVq3nl\nlVeYP39+Ws+rJpciIgGwe/duLr/8clq2bMmoUaP4n//5Hy655BK/wxIREZEa+OUvf8nJJ5/MT3/6\n06NGEU01NbkUkaRSMxaJU5PL5ND9UST5dK8SvyXzHqkKnYiIiIiISEgpoRMREREREQkpJXQiIiIi\nIiIhpYROREREREQkpDRtgYiIiIg0KHl5ecec4Fok1ZI5EqYqdCIi1Zg1axZdu3Yt+96vXz/efvvt\nGm1bW7fccgv33XdfnfcXEZHqrVq1CuecfvTj28+qVauS9vushE5EpAYS3+QuWrSI888/v0bbHsuk\nSZM477zzyi3705/+xF133VW3INOosthFREQk/ZTQiYj4xDkX2iY/YY5dREQkSpTQiUiD8NBDD3HF\nFVeUW3bbbbdx++23A/DUU0/Rp08fMjMzyc/PZ8KECVUeq3v37sycOROAAwcOcO2115KVlUW/fv2Y\nN29euW0ffPBB8vPzyczMpF+/fvzzn/8EYMmSJdxyyy28//77tGrViqysLACuu+46fvnLX5bt/5e/\n/IWvfe1rZGdnc9lll7Fx48aydRkZGTz++OOceOKJZGVlceutt1YZc2lpKb/5zW/Iz8+ndevWnH76\n6axfvx6A2bNnM2jQINq2bcsZZ5zB+++/X7bfU089Rc+ePcnMzKRnz548//zzVcYuIiIi6aeETkQa\nhKuuuorJkyezd+9ewEtwXnrpJa6++moAOnbsyJtvvklxcTETJ07kjjvu4OOPP672uOPGjWPlypWs\nXLmSwsJCJk2aVG59fn4+7733HsXFxdxzzz1cc801bN68mZNOOok///nPnHXWWezevZvt27cfdeyZ\nM2cyduxYXn75ZTZu3Ehubi5XXXVVuW3eeOMNPvzwQxYsWMDf//53pk6dWmmcv/3tb3nxxReZMmUK\nu3bt4sknn6R58+bs2LGDiy++mNtvv51t27Zxxx138B//8R/s2LGDffv2cdttt1FYWEhxcTGzZ8+m\nf//+NYpdRERE0iNQCd3+/X5HICJRlZuby8CBA3n11VcBmDFjBi1atOD0008H4Bvf+AbdunUD4Lzz\nzmP48OG888471R73pZde4he/+AWtW7cmJyeHH/7wh+XWf+tb36Jjx44AXHHFFXzta19j7ty5NYr5\nueee4/rrr+fUU0+lSZMm3H///bz//vusWbOmbJuf//zntGrViq5duzJkyJAqk9AnnniC++67j/z8\nfABOPvlk2rZtyxtvvMGJJ57I6NGjycjI4KqrruKkk07i9ddfB6BRo0YsXLiQAwcO0LFjR3r37l2j\n2EVERCQ9AjVtwfr1EHvWEJGIsnuT0+/K3eNqvc+oUaN4/vnnueaaa3j++ecZPXp02brJkyfzq1/9\nis8//5zS0lL279/PKaecUu0xN2zYQJcuXcq+VxyG+Omnn+Z3v/td2WhWe/fu5csvv6xRvBs2bODr\nX/962fcWLVrQrl071q9fT25uLkBZsgjQvHlz9uzZU+mx1q5dS48ePSo9R8WY8/LyWL9+Pc2bN+fF\nF1/k4Ycf5rvf/S7nnnsujzzyCL169apR/CIiIpJ6gUro1q1TQicSdXVJxJLliiuu4Cc/+Qnr16/n\n1VdfZc6cOQAcOnSIb3/72zz77LNceumlZGRk8J//+Z84V32snTt3Zu3atWWVq9WrV5etW7NmDTfd\ndBNvvfUWZ511FgADBgwoO251g4qccMIJ5Y63d+9etm3bVi6BrKnc3FyWL19Onz59jjrHP/7xj3LL\n1qxZwze+8Q0Ahg0bxrBhwzh48CB33XUXN910E7NmzdKAKCIiIgERqCaXsf75IiIpkZ2dzeDBg7nu\nuuvo0aNHWaXp0KFDHDp0iOzsbDIyMpg8eXKVfdEquvLKK7n//vvZuXMn69at47HHHitbt3fvXjIy\nMsjOzqa0tJSJEyeyaNGisvUdO3Zk3bp1HD58uNJjjxo1iokTJ/LJJ59w8OBBxo4dy5lnnlmnee6u\nv/567r77br744gsAFi5cyI4dO7joootYtmwZL7zwAiUlJbz44ot89tlnXHzxxWzZsoXXXnuNffv2\n0aRJE1q2bElGRkaNYhcREZH0CFRCt26d3xGISNSNHj2aGTNmlA2GAtCyZUvGjx/PFVdcQVZWFi+8\n8AKXXnpplcdIrE7dc8895Obm0r17d0aOHMmYMWPK1vXu3Zsf//jHnHnmmXTq1InFixdz7rnnlq2/\n4IIL6Nu3L506daJDhw5HnWfo0KH8+te/5vLLLycnJ4eVK1fywgsvVBpHZd8T/ehHP+LKK69k+PDh\ntG7dmhtuuIH9+/eTlZXFv/71Lx555BGys7N55JFHeOONN8jKyqK0tJRHH32UnJwcsrOzefvtt/nT\nn/5Uo9hFREQkPawmTYrSwczcD37gGD/e70hEpD7MrEZNFSX6qvpdiC1Xm80aMjOn/6ZERBqGutwj\nVaETEREREREJKSV0IiIiIiIiIRWohE6DooiIiIiIiNRcoBK6rVvhyBG/oxARESnPzEaa2RIz+9zM\nflbJ+jZm9oqZLTCzOWbWJ2HdbWa2MPbzw4Tlbc1sqpktNbNCM2udrusREZHoCFRCl50Nmzb5HYWI\niMhXzCwDeAwYAfQFRpnZSRU2GwvMd86dCnwHGB/bty9wPXAa0B+4xMziM7zfCUx3zvUCZgI/T/W1\niIhI9AQqoevSRf3oREQkcAYBy5xzq51zh4EXgIrzWvTBS8pwzi0FuplZe6A38IFz7qBzrgSYBVwe\n2+dSYFLs8yTgstRehoiIRFGgErqcHCV0IiISODnA2oTv62LLEi0glqiZ2SAgF+gCLALOizWvbA5c\nBMRnhu/onNsM4JzbBGhCPxERqbXGfgeQqEsXDYwiEnZ5eXnHnOBaGo68vDy/Q0inB4D/NbOPgIXA\nfKDEObfEzB4EpgF74surOIYmmxORpJkzx3u27tLF70gk1QKX0KlCJxJuq1at8jsEkWRbj1dxi+sS\nW1bGObcb+G78u5mtBFbE1k0EJsaW38dX1b5NZtbRObfZzDoBW6oKYNy4cWWfCwoKKCgoqPvViEiD\ncPvt8F//BXfc4XckcixFRUUUFRXV6xjmXDBeCJqZe+YZx5tvwnPP+R2NiIikipnhnAtNGdfMGgFL\ngaHARmAuMMo591nCNq2Bfc65w2Z2I3COc+7a2Lr2zrmtZpYLTAHOdM4Vxyp3251zD8ZGzmzrnLuz\nkvO7oNyrRSQctm2D9u3hllvgD3/wOxqpjbrcI1WhExEROQbnXImZ3QpMxet7/oRz7jMzu9lb7Sbg\nDX4yycxKgcV4I1vG/cPMsoDDwPecc8Wx5Q8Cfzez7wKrgSvTdEkiEnHTp0PLlrB8ud+RSDoEqkK3\nbJlj+HBYscLvaEREJFXCVqHzmyp0IlJb3/0utGgBkyfDF1/4HY3URl3ukYFK6Pbtc7RtC/v3g8ZU\nEBGJJiV0taOETkRqwzmv1VthIXz967BnDzRp4ndUUlN1uUcGatqC44/33iZ8+aXfkYiIiIiIhM/i\nxdCsGfTtCyecAGvW+B2RpFqgEjrw5qLT1AUiIiIiIrU3ZQqMGOG1dsvPV5PLhiBwCZ0GRhERERER\nqZvCQi+hA+jZUwOjNARK6EREREREImDfPm9C8Qsu8L6rQtcwBC6hU5NLEREREZHamzULBg6EzEzv\ne8+eSugagsAldKrQiYiIiIjUXrz/XFx+vppcNgTVJnRm1szMPjCz+Wa22Mx+U8k2o81sQeznXTM7\nJWHdSDNbYmafm9nPqjtfTo4SOhERERGR2krsPwfQowesXAmlpf7FJKlXbULnnDsIDHHODQBOAS4w\ns3MqbLYCON85dyrw/wETAMwsA3gMGAH0BUaZ2UnHOl+XLmpyKSIiIiJSG6tXw/btMGDAV8tatIA2\nbWDDBv/iktSrUZNL59y+2MdmsX12VFg/xzm3K/Z1DpAT+zwIWOacW+2cOwy8AFx6rHOpyaWIiIiI\nSO0UFsKwYZBR4eleA6NEX40SOjPLMLP5wCagyDn36TE2vwGYHPucA6xNWLeOr5K9SrVuDSUlUFxc\nk8hERERERGTKFBg58ujlSuiir6YVutJYk8suwPlmNriy7cxsCHAdUG1fuaqYqdmliIiIiEhNHT4M\nM2fC8OFHr9NcdNHXuDYbO+eKzewN4DRgVuK62EAoE4CRzrl4k8z1QG7CZl1iyyo1btw4AA4dgjfe\nKKB374LahCciIgFUVFREUVGR32GIiETWBx9A9+7QsePR6/Lz4eWX0x+TpI855469gVk2cNg5t8vM\njgcKgXudczMStskFZgD/7Zybk7C8EbAUGApsBOYCo5xzn1VyHhePZcwYb0LEa6+t59WJiEjgmBnO\nOfM7jrBIvD+KiFTm7ru9Kt0DDxy97t//hhtvhPnz0x+X1F5d7pE1qdB1BiaZmeE10XzGOTfDzG4G\nnHNuAnA3kAX8MbbdYefcIOdciZndCkyN7ftEZclcRRoYRURERESkZqZMgYcfrnxdvMmlc17XJome\nait06ZL4BvIPf4BFi+BPf/I5KBERSTpV6GpHFToROZYvv/SStq1boWnTyrfJzoZPP4UOHdIbm9Re\nXe6RNRoUJd1UoRMRERERqd60aTB4cNXJHGhglKgLZEKXk6OETkRERESkOoWFMGLEsbfR1AXRFsiE\nTtMWiIiIiIgcm3NeQlfZ/HOJlNBFWyATug4dYOdOOHjQ70hERERERILpk0+gRQuvSeWxqMlltAUy\nocvIgM6dYcMGvyMREREREQmmmjS3BFXooi6QCR1oYBQRERERkWOpaULXs6cSuigLbEKngVFERERE\nRCq3Zw/MnQtDhlS/bYcOXlemnTtTH5ekX2ATOg2MIiIiIiJSuaIiOO00aNWq+m3NvGaX6kcXTYFO\n6FShExERERE5Wk2bW8ZpYJToCmxCl5OjCp2IiIiISGVqm9BpYJToCmxCpwqdiIiIiMjRVqyA4mI4\n9dSa76OELrqU0ImIiIiIhEhhIQwf7k31VVNqchldgU3oOneGzZuhpMTvSEREREREgqO2zS1BFboo\nC2xC17QpZGV5SZ2IiIiIiMDhw94Il8OG1W6/nBzYvh327UtJWOKjwCZ0oIFRREREREQSvf++V23r\n0KF2+2UIIzozAAAgAElEQVRkQPfuXv87iZZAJ3TqRyciIiIi8pW6NLeMU7PLaFJCJyIiIiISElOm\n1D2h08Ao0RTohE5NLkVEREREPFu2eAnZWWfVbX9V6KIp0AmdKnQiIiIiIp5p02DIEGjSpG77K6GL\npkAndKrQiYiIiIh46tN/DtTkMqoCndCpQiciIiIiAqWl9U/o8vJgwwY4dCh5cYn/Ap3Q5eR4CZ1z\nfkciIiIiIuKfBQugdWtv6oG6atLEe75etSppYUkABDqha9kSmjWDHTv8jkRERERExD+FhTByZP2P\nk5+vZpdRE+iEDtTsUkRERESkvs0t4zQwSvQEPqGLN7sUERHxi5mNNLMlZva5mf2skvVtzOwVM1tg\nZnPMrE/Cup+b2WIz+8TM/mZmTWPL7zGzdWb2UewnCe/eRSSKdu+Gf/8bCgrqfywNjBI9gU/ounTR\nSJciIuIfM8sAHgNGAH2BUWZ2UoXNxgLznXOnAt8Bxsf2zQNuBAY4504BGgNXJez3qHNuYOxnSoov\nRURC6q23YNAgaNGi/sdShS56QpHQqUInIiI+GgQsc86tds4dBl4ALq2wTR9gJoBzbinQzczaA8XA\nIaCFmTUGmgMbEvazVAcvIuGXrP5z4FXolNBFS+ATOs1FJyIiPssB1iZ8XxdblmgBcDmAmQ0CcoEu\nzrkdwG+BNcB6YKdzbnrCfrea2cdm9lcza52qCxCRcEtW/zmAHj1g9WooKUnO8cR/gU/oVKETEZEQ\neABoa2YfAd8H5gMlZtYDuAPIA04AWprZ6Ng+fwR6OOf6A5uAR9MftogE3RdfwL59cPLJyTne8cdD\ndraer6Oksd8BVEeDooiIiM/W41Xc4rrElpVxzu0Gvhv/bmYrgBXARcB7zrntseWvAGcDzznntiYc\n4i/A61UFMG7cuLLPBQUFFCRjZAQRCYXCQhg+HCyJDbTjzS7z8pJ3TKmboqIiioqK6nUMcwGZtdvM\nXGWxbNvmdd7UXHQiItFgZjjnQtN3zMwaAUuBocBGYC4wyjn3WcI2rYF9zrnDZnYjcI5z7lozOxV4\nFjgdOAhMBOY55/5gZp2cc5ti+98BnO6cG00FVd0fRaRh+OY3YfRouOqq6retqRtu8AZZuemm5B1T\nkqMu98jAV+iysuDAAdi7Nzkj+4iIiNSGc67EzG4FpuJ1VXjCOfeZmd3srXYTgN7AJDMrBRYD18f2\nXWBmTwMfAiV4TTEnxA79kJn1B0qBVcDNabwsEQmBQ4dg1iyYODG5x9VIl9ES+ITO7KuBUU480e9o\nRESkIYpNKdCrwrLHEz7Pqbg+Yd3DwMOVLB+T5DBFJGJmz4ZevaBdu+Qet2dPmDcvuccU/wR+UBTQ\nwCgiIiIi0vBMmZK80S0TqUIXLaFI6DQwioiIiIg0NMmcfy5Rz56wfDmoe240hCKh69JFc9GJiIiI\nSMOxeTOsWgVnnJH8Y2dmQvPm3jkk/EKT0KlCJyIiIiINxdSpcMEF0DhFI16o2WV0hCKhiw+KIiIi\nIiLSEKSq/1xcvNmlhF8oEjpV6ERERESkoSgthWnTUpvQqUIXHUroREREREQCZP58b6qCvLzUnUMJ\nXXSEIqHr2BG2b/cmVxQRERERibLCwtRW50BNLqMkFAldo0ZeUrdxo9+RiIiIiIikVqr7z4EqdFES\nioQONDCKiIiIiERfcbHX5HLw4NSep107KCnxWsFJuIUmoVM/OhERERGJupkz4ayzvHniUsnMq9Kp\n2WX4KaETEREREQmIdPSfi1Ozy2gITUKnJpciIiIiEmXOpaf/XJwGRomG0CR0qtCJiIiISJQtWwaH\nD0Pfvuk5nyp00RCahE4VOhERERGJsnhzS7P0nE8JXTSEJqFThU5EREREoiyd/edATS6jwpxzfscA\ngJm5Y8Vy4AC0bg3790NGaNJQERGpyMxwzqXp/XP4VXd/FJFoOHgQ2reHVasgKys95ywthZYtYcsW\n70/xX13ukaFJjY47DjIzYetWvyMREREREUmud9/1+s6lK5kDr0jSo4eqdGEXmoQO1OxSRERERKIp\n3c0t4zQXXfiFKqHTwCgiIiIiEkV+JnQaGCXcQpXQqUInIiIiIlGzYQOsXQunn57+c2tglPBTQici\nIiIi4qOpU2HoUGjcOP3nVoUu/EKV0KnJpYiIiIhETWEhjBzpz7mV0IVftQmdmTUzsw/MbL6ZLTaz\n31SyTS8zm21mB8zsRxXWrTKzBbH959YnWFXoRERERCRKSkpg2jR/+s8BdO0Kmzd70yZIOFVb2HXO\nHTSzIc65fWbWCHjPzM5xzr2XsNk24AfAZZUcohQocM7tqG+wOTlK6EREREQkOj76CDp29AoXfmjc\nGHJzYeVKOOkkf2KQ+qlRk0vn3L7Yx2axfXZUWP+lc+5D4Eglu1tNz1OdLl28JpeaX1VEREREomDK\nFP+qc3E9e6rZZZjVKNEyswwzmw9sAoqcc5/W4hwOmGZm88zsxroEGZeZ6U2AuGtXfY4iIiIiIhIM\nfvafi9NcdOFW0wpdqXNuANAFON/MBtfiHOc45wYCFwHfN7Nz6xBnGQ2MIiIiIiJRsGsXfPIJnHee\nv3FoYJRwq9XgqM65YjN7AzgNmFXDfTbG/txqZq8Cg4B3K9t23LhxZZ8LCgooKCg4apv4wCh9+9Ym\nchER8UtRURFFRUV+hyEiEjgzZsDZZ8Pxx/sbR8+eXqVQwqnahM7MsoHDzrldZnY8MAy491i7JOzb\nHMhwzu0xsxbA8GPtm5jQVUUDo4iIhEvFF3T33nusW4iISMNRWOh//zlQhS7salKh6wxMMrP44CbP\nOOdmmNnNgHPOTTCzjsC/gVZAqZndBvQB2gOvmpmLnetvzrmp9Qk4PjCKiIiIiEhYOecNiHL77X5H\nAt27w5o1cOSIP5ObS/3UZNqChcDASpY/nvB5M9C1kt33AP3rE2BFXbp4w7uKiIiIiITV0qVeUheE\nqQKaNfOmTli71kvuJFySMp1AOmlQFBEREREJu3hzS7Pqt00HNbsMr9AldPFBUUREREREwioo/efi\nNBddeCmhExERERFJowMH4N134cIL/Y7kK5qLLrxCl9BlZ8PevbB/v9+RiIiIiIjU3jvvwMknQ5s2\nfkfyFTW5DK/QJXRmcMIJ6kcnIiIiIuEUtOaW4DW5VIUunEKX0IEGRhERERGR8ApyQlda6nckUluh\nTOjUj05EREREwmjdOti4EU47ze9IymvZEjIzvdgkXJTQiYiIiIikydSp3mAojRr5HcnRNDBKOIUy\noVOTSxERSSczG2lmS8zsczP7WSXr25jZK2a2wMzmmFmfhHU/N7PFZvaJmf3NzJrGlrc1s6lmttTM\nCs2sdTqvSUT8EcTmlnEaGCWcQpnQqUInIiLpYmYZwGPACKAvMMrMTqqw2VhgvnPuVOA7wPjYvnnA\njcAA59wpQGPgqtg+dwLTnXO9gJnAz1N9LSLir5ISmD4dhg/3O5LKaS66cAplQqcKnYiIpNEgYJlz\nbrVz7jDwAnBphW364CVlOOeWAt3MrD1QDBwCWphZY6A5EL+DXQpMin2eBFyW0qsQEd/Nm+c9x+bk\n+B1J5dTkMpxCmdCpQiciImmUA6xN+L4utizRAuByADMbBOQCXZxzO4DfAmvwErmdzrkZsX06OOc2\nAzjnNgEdUnYFIhIIQW5uCWpyGVaN/Q6gLjp1gq1b4cgRaBzKKxARkYh5APhfM/sIWAjMB0rMrAdw\nB5AH7AJeNrPRzrnnKjmGS1u0Dcy118LDD0P79n5HIkFz5Ig3QMm2bek535o18Oqr6TlXXcSnLnDO\nm/s5iD7+GJ5/Hh580O9IgiOU6VCTJpCdDZs2edU6ERGRFFqPV3GL68JXzSYBcM7tBr4b/25mK4AV\nwEXAe8657bHlrwBnA88Bm82so3Nus5l1ArZUFcC4cePKPhcUFFBQUFC/K2pA1q2DSZPgmmu8B3eR\nRHPnwpdfeglCOjRqBL17p+dcdZGVBRkZXoKbne13NJV78UX4wx/g17+Gpk39jqb+ioqKKCoqqtcx\nQpnQwVfNLpXQiYhIis0D8mMDnGzEG9RkVOIGsREq9znnDpvZjcDbzrk9ZrYUuNvMjgMOAkNjxwN4\nDbgWeBBvIJX/qyqAxIROaqew0Ptz+XIldHK0KVPgP/4DTj7Z70iCIz4wSlATuilTvKTzvfdgyBC/\no6m/ii/p7r333lofI5R96EADo4iISHo450qAW4GpwGLgBefcZ2Z2s5ndFNusN7DIzD7DGw3ztti+\nC4CngQ/x+tkZMCG2z4PAsFjSNxSv2aYkWWEh9OqlfkFSucJCGDnS7yiCJcgDo2zeDKtWwfe//9XL\nGolAhU5ERCTVnHNTgF4Vlj2e8HlOxfUJ6x4GHq5k+XZANaMUig8RP24c1LNFk0TQtm2wZAmcc47f\nkQRLkAdGmToVLrjAq6r+4AfwgF6DASGu0CmhExERkWOJDxF//vnBfUAV/0yf7v1uRKEfVjLFB0YJ\novgooWecAStXeuNpSIgTOjW5FBERkWOZMsV7+EscuU8kLv77IeUFtUJXWupV6EaM8AZIvOAC77uE\nOKFThU5ERESOJd4/qlUraNlSb/PlK855yYD6zx0tqAnd/PnQrh3k5XnfR45UP7q40CZ0OTlK6ERE\nRKRyO3bA4sVw7rne96A+pIo/Fi2C447zfi+kvI4dYd8+KC72O5LyKk7KPmIETJvmVe4aulAndBs2\nqPmEiIiIHG36dC+ZO+4473t8KHYRODo5kK+YBbMfXcV/s7w8b968+fP9iykoQpvQNW8OLVp4k0GK\niIiIJKrYPyrIQ7FL+qn/3LEF7QVIcTF89BEMHlx++YgR3r9lQxfahA40MIqIiIgczbmj5xdTk0uJ\n27sXPvjAG1RDKhe0FyAzZ8KZZ3oFnUTqR+cJdUKngVFERESkok8/hcaN4cQTv1oWxCZk4o9Zs+Dr\nX/cGy5HKBe0FSFUTwA8e7DW5DFp/v3QLdUKngVFERESkonhfG7OvluXnw7Jl6nsv6j9XE0F6AeJc\n1U1kmzf3KnczZ6Y/riAJdULXpYuaXIqIiEh5lT2wZ2V5f27fnv54JFjUf656QarQLVsGhw9D376V\nr1c/uggkdKrQiYiISNy+fTB7NgwdWn65WfD6BUn6rVoFO3dC//5+RxJsXbrA1q2wf7/fkXgvaIYP\nL19xTxTvR9eQq++hTug0KIqIiIgkevtt72G9deuj1wWp6iD+iCcHGaF+Ak69Ro2gWzdYudLvSKru\nPxfXt69XwVu2LH0xBU2of51VoRMREZFEx+ofFaR+QeIP9Z+ruSC8ADl40HtJc+GFVW9j5iXpDXm0\nSyV0IiIiEhnHemAPwgOq+OfwYW/wjOHD/Y4kHIIwF92770KfPl/1ga3KiBFK6EKrdWsoKdFQpSIi\nIgJr1nj9fr7+9crXB+EBVfwzZ473O9Chg9+RhEMQ+pzWtKJ64YVeJe/gwdTHFEShTujMNNKliIiI\neAoLYdiwqvtHBeEBVfyj5pa1E4SKdnX95+LatfMqee++m/qYgijUCR1oYBQRERHxVPfA3rkz7N7t\n/UjDo4Sudvzuc7phA6xdC6efXrPtG3Kzy9AndOpHJyIiIkeOwIwZx+4fZQY9eqhK1xBt3Qqffw5n\nneV3JOHRrZv3jH34sD/nnzrVm36kceOaba+ELsSU0ImIiMgHH0BenleFOxY1u2yYpk2DIUOgaVO/\nIwmPpk29/55Wr/bn/LWtqA4a5FX0NmxIXUxBFfqETk0uRUREpKYPf0HoFyTpp+aWdePXC5CSEi8J\nr82/WePGXkVv6tTUxRVUoU/oVKETERGRmj6w+90vSNLPOe8hXwld7fn1AuSjj6BjR+jatXb7NdRm\nl6FP6FShExERadi2bYPPPoNzzql+W1XoGp5PPoGWLb3+k1I7fk31UdeK6ogRXmWvpCT5MQVZ6BM6\nVehEREQatmnTYPBgaNas+m01F13DM2VKzYa+l6P51eRyypS6JXRdu3qVvQ8/TH5MQRb6hK5DB9i5\ns+FOJCgiItLQ1eZtfteusGULHDiQ2pgkONR/ru78qGjv2gULFsD559dt/4bY7DL0CV1GhjcCT0Mc\n0UZERKShq23/qMaNvdEwV65MbVwSDHv2wLx5UFDgdyTh1KOH999KaWn6zjljhtd8+vjj67a/ErqQ\nUrNLERGRhmnRIq+pZX5+zfdRs8uGo6jIm5i6ZUu/Iwmn5s0hKyu941XUt6J6/vlehW/nzuTFFHSR\nSOg0MIqIiEjDFO8fZVbzfTQXXcOh/nP1l84XIM7Vvf9c3PHHexW+GTOSF1fQRSKhU4VORESkYarL\n23yNdNlwqP9c/aXzBcjSpV7zzt6963echtbsUgmdiIiIhNLevfDBBzBkSO3201x0DcOKFV4fulNO\n8TuScEvnC5DCwtpX3CsTT+icS05cQReJhE5NLkVERBqeWbNg4EDIzKzdfqrQNQyFhTB8eP2Tg4Yu\nnU0uk1VR7d3bq/QtXVr/Y4VBJBI6VehEREQanrr2j+rWDdasgSNHkh6SBIj6zyVHuppcHjgA77wD\nQ4fW/1hm3r/9lCn1P1YYRCKhU4VORESk4anr2/xmzbwpj9asSX5MEgyHDnkjXA4b5nck4Rev0KW6\n+eI778DJJ0Pbtsk5XkPqRxeJhO6EE2DTJigp8TsSERERSYdVq7xhyfv3r9v+anYZbe+/DyeeCNnZ\nfkcSfm3aeC9Btm5N7Xni/eeSZehQePddr/IXdZFI6Jo29ebI2LzZ70hEREQkHQoLvepLRh2fZDQX\nXbRpdMvkSscLkGT/m7Vt61X83nkneccMqkgkdKBmlyIiIg1JfftHaS66aFP/ueRK9QuQdetgwwY4\n7bTkHreh9KOLTEKngVFEREQahsOH4a23vBEM60pNLqNr82ZYuRLOOMPvSKIj1S9Apk6FCy+ERo2S\ne9yG0o8uMgldTo4SOhERkYZgzhzo0QM6dKj7MTQXXXRNm+bNTdikid+RREeqX4Aku/9c3GmnwcaN\n0c8RIpPQdemiJpciIiINQTL62vTs6U08XVqanJgkONR/LvlS+QKkpASmT69fxb0qjRp5lb+pU5N/\n7CCJVEIX9exbREREktM/qkULaN3a67cj0VFaqoQuFVJZoZs3zxuxPicnNcdvCP3oqk3ozKyZmX1g\nZvPNbLGZ/aaSbXqZ2WwzO2BmP6qwbqSZLTGzz83sZ8kMPpEGRRERkVSp7l5mZm3M7BUzW2Bmc8ys\nT2z5ibH750exP3eZ2Q9j6+4xs3WxdR+ZmYZwqIGtW2HZMjjrrPofSwOjRM/HH3sjn3fr5nck0dK+\nvTe3386dyT92qhPw4cO9CmCUpzerNqFzzh0EhjjnBgCnABeY2TkVNtsG/AB4OHGhmWUAjwEjgL7A\nKDM7KRmBV6QKnYiIpEIN72VjgfnOuVOB7wDjAZxznzvnBjjnBgJfB/YCryTs96hzbmDsJ+LvkJNj\n2jQoKPCmLKovDYwSParOpYZZ6ppdpqr/XFxOjvczb17qzuG3GjW5dM7ti31sFttnR4X1XzrnPgSO\nVNh1ELDMObfaOXcYeAG4tH4hVy4+KEqqZ7EXEZEGpyb3sj7ATADn3FKgm5m1r7DNhcBy51zi60dL\nUcyRlcwHds1FFz1K6FInFS9AduyARYvg3HOTe9yKoj7aZY0SOjPLMLP5wCagyDn3aQ2PnwOsTfi+\nLrYs6Vq29Gax37Gj+m1FRERqoSb3sgXA5QBmNgjIBbpU2Oa/gOcrLLvVzD42s7+aWevkhRxN8f5R\nyXqbryaX0VJcDB9+CIMH+x1JNKXiBcj06V4yd9xxyT1uRVHvR1fTCl1prMllF+B8Mwvkfypqdiki\nIj55AGhrZh8B3wfmA2U9NsysCfBN4KWEff4I9HDO9cd7Yfpo+sINp08+gVatvCkLkkFNLqPlrbfg\nzDO9AW8k+VLxAiRdFdVzz4XFi6Nb+Glcm42dc8Vm9gZwGjCrBrusx3tLGdcltqxS48aNK/tcUFBA\nQUFBbcIrGxjllFNqtZuIiKRQUVERRUVFfodRH9Xey5xzu4Hvxr+b2UpgRcIm3wA+dM5tTdhna8L6\nvwCvVxVAfe+PUZHsh794nyDnvD5CEm5qbpla+fnw9NPJO55z3r/Z//xP8o5ZleOO85K66dPhiitS\nf77aSMY90lw1nc7MLBs47JzbZWbHA4XAvc65GZVsew+wxzn329j3RsBSYCiwEZgLjHLOfVbJvq66\nWKpz/fXem5kbb6zXYUREJIXMDOdcaB6fa3IvizWX3OecO2xmNwLnOOeuTVj/PDDFOTcpYVkn59ym\n2Oc7gNOdc6MrOX+9749RccEFcMcdcMklyTtmu3awZIk3ip+El3Negv5//wcnn+x3NNG0dq33nJ2s\nUeU//RQuughWrkzPC5X//V9YuBD++tfUn6s+6nKPrEmFrjMwycwMr4nmM865GWZ2M+CccxPMrCPw\nb6AVUGpmtwF9nHN7zOxWYGps3ycqS+aSRU0uRUQk2ZxzJZXdyxLvg0BvvHtlKbAYuD6+v5k1xxsQ\n5aYKh37IzPoDpcAq4OaUX0yI7dnjjVI3ZEhyjxvvF6SELty++AIOHoR+/fyOJLpycmD7dti7NznN\nWqdM8Sqq6aqOjxwJDz8czYp8tQmdc24hMLCS5Y8nfN4MdK1i/ylAr3rEWGM5OTB3bjrOJCIiDUll\n97IK98E5FdcnrNsHHJUuOOfGJDnMSHvrLTj9dG8QtGSK9wtKxrx24p94c8uoPagHSUYGdO8OK1Yk\npwpaWAg3p/E11oknQuPGXmWwb9/0nTcdajQoSlioQiciIhJNqeofpYFRokH959IjWQOj7N8Ps2fD\n0KH1P1ZNmUV3+oJIJXTxQVFEREQkWlL1wJ6qyZIlfQ4ehLffhgsv9DuS6EvWC5C334b+/aF1midr\nUUIXAqrQiYiIRM/y5bB7d2pGsVaFLvzeew969/YGuJHUStZcdPH+c+k2dKhXGdy3L/3nTqVIJXRZ\nWXDggNdZU0RERKIhXp3LSMFTixK68FNzy/RJVpNLv/7NWrf2KoNvv53+c6dSpBI6MzW7FBERiZpU\nPvx16OC9DN61KzXHl9RTQpc+yXgBsnYtbNkCA48acjE9otjsMlIJHajZpYiISJQcOgRFRTBsWGqO\nb6Z+dGG2cSOsWQODBvkdScOQm+v9nR86VPdjFBbC8OHQqFHy4qoNJXQhoAqdiIhIdLz/Pnzta6md\nJy5Z/YIk/aZO9fpFNa7JzMpSb02aeMWTVavqfgy/+s/FDRzoVQjXrPEvhmSLXEKnCp2IiEh0TJni\nTQicSsnqFyTpp+aW6VefFyBHjsCMGV6Fzi+NGnnnj1KVTgmdiIiIBFY6Htg1MEo4lZbCtGlK6NKt\nPi9A5s71mm127pzcmGoras0uI5fQqcmliIhINGzeDCtWwJlnpvY86kMXTh995DXF7drV70galvq8\nACksTH3FvSaGD/cqhUeO+B1JckQuoVOFTkREJBqmTYMhQ7x+O6mkCl04BSU5aGjq0+TS7/5zcZ07\ne5XCuXP9jiQ5IpfQqUInIiISDenoPwfes8O2bbB/f+rPJckTlOSgoalrk8tt2+Czz+Ccc5IfU12M\nHOn9DkVB5BK6Tp28X5j6DKcqIiIi/iot9UYwTMcDe6NG0K2b17xTwmHXLvj4Yzj/fL8jaXh69PBG\nuSwpqd1+06d7/17NmqUkrFqLUj+6yCV0jRpBx47eHBkiIiISTh9/DG3beolWOqjZZbjMnAlnnw3H\nH+93JA3Pccd5fRdr28UpaE1kzznHqxhu2+Z3JPUXuYQO1OxSREQk7NI9HL3moguXoCUHDU1tX4A4\nF7wpJpo18yqG06f7HUn9RTKh08AoIiIi4Zau/nNxmosuPJxT/zm/1fYFyKJFXgKVn5+6mOoiKv3o\nIpnQqUInIiISXsXF3pD0gwen75xqchken3/u9d/q3dvvSBqu2r4AiVfnzFIXU12MGOH11XXO70jq\nJ5IJnSp0IiIi4fXWW3DGGdCiRfrOqbnowiOoyUFDUtsXIEFtIpuf71UOFy3yO5L6UUInIiIigeJH\nX5u8PK91j0bJDr6gJgcNSW2aXO7dC3PmeHNKBo1ZNEa7jGRCpyaXIiIi4RTvH5XuB/amTeGEE2D1\n6vSeV2rnwAF45x0YOtTvSBq2nj29aT5q0lRx1iwYOBAyM1MfV11EoR9dJBM6VehERETC6Ysv4OBB\n6Ncv/efWwCjB9+673u9G27Z+R9KwZWZ6TaI3bap+26CNblnRkCHwwQdeJTGsIpnQnXACbNjgTUoq\nIiIi4VFYCMOH+9M/SgOjBF/Qk4OGpKb9ToP+b5aZ6VUQZ83yO5K6i2RCd9xx0Lo1bN3qdyQiIiJS\nG34+/GkuuuBT/7ngqMkLkFWrYPt2GDAgLSHVWdj70UUyoQM1uxQREQmbgwe9t+TDhvlzfjW5DLb1\n672f007zOxKBmr0AiVfcMwKecYS9H13A/3rrTgOjiIiIhMt773lzi7Vr58/51eQy2KZOhQsvhEaN\n/I5EoGYvQILe3DKuf3/YudOrKIZRZBM6VehERETCxe+Hvx49vAe6khL/YpCq+f37IeVV9wLk8GGY\nOdOr0AVdRobXMiCszS6V0ImIiEgg+P3AfvzxXnVQLXyCp6QEpk8PR3LQUFTX5HLOHO8lSceO6Yup\nPsLcjy6yCZ2aXIqIiITHxo3eHHBnnOFvHBoYJZj+/W/o3Nl7YS/B0K6dNw/d9u2Vr/f7BU1tDR/u\nVRQPH/Y7ktqLbEKnCp2IiEh4TJ3qTRbduLG/cWhglGAKW3LQEJgdu9ll2P7NOnb0Kopz5vgdSe1F\nNqFThU5ERCQ8gvLwp4FRgikovx9SXlVz0X35JXz+OZx9dvpjqo+wNruMbEIXr9A553ckIiIiciyl\npTBtWjAe2NXkMnh27ICFC+G88/yORCqq6gXItGlQUABNm6Y9pHpRQhcwmZneiDW7dvkdiYiIiBzL\nR2N5gCQAACAASURBVB9B+/aQm+t3JGpyGUQzZsC558Jxx/kdiVRU1QuQKVOC8YKmts4+26ssbt3q\ndyS1E9mEDtTsUkREJAyC9PAXb0KmFj7BoeaWwVXZCxDnvD6xYfw3a9rUqyxOm+Z3JLUT6YROA6OI\niIgEX5Ae2Fu39ipBW7b4HYmAlxwE6fdDyqusyeUnn0CLFt7LkTAKY7PLSCd0qtCJiIgE265d8PHH\ncP75fkfyFQ2MEhxLlnijKfbq5XckUpnOnWH3btiz56tlhYUwcqR/MdVXPKErLfU7kpqLdEKnCp2I\niEiwzZzp9Vtp3tzvSL6igVGCY8oULzkw8zsSqYyZN9R/YrPLIDWhrouePaFVK6/SGBZK6ERERMQ3\nQXz408AowaHmlsGX+AJkzx6YNw+GDPE3pvoKW7PLSCd0anIpIiISXEHtH6Uml8Gwfz/Mnu1NOC/B\nlfgCpKgITjsNWrb0NaR6U0IXIKrQiYiIBNfnn8ORI9Cnj9+RlFfVZMmSXu+8A6ec4g1UI8GV+AIk\n7P3n4oYM8SqNiX0DgyzSCZ0qdCIiIsEVf/gLWv8oVeiCId5/ToItscllEJtQ10XLlnD66fDWW35H\nUjORTuiys72Rd/bv9zsSEREJMzMbaWZLzOxzM/tZJevbmNkrZrbAzOaYWZ/Y8hPNbL6ZfRT7c5eZ\n/TC2rq2ZTTWzpWZWaGYNrg4R1Ie/7Gyvcrhjh9+RNGxBbI4rR4s3uVyxwnvuPuUUvyNKjjA1u4x0\nQpeRoSqdiIjUj5llAI8BI4C+wCgzO6nCZmOB+c65U4HvAOMBnHOfO+cGOOcGAl8H9gKvxPa5E5ju\nnOsFzAR+nvKLCZADB+Ddd+HCC/2O5Ghmanbpt7VrvbkABw70OxKpTteusHkzvPYaDB/uPX9HgRK6\nAFFCJyIi9TQIWOacW+2cOwy8AFxaYZs+eEkZzrmlQDcza19hmwuB5c65eO/uS4FJsc+TgMtSEXxQ\nvfsu9O0Lbdv6HUnl1OzSX1Onesl+o0Z+RyLVadwYcnPhz3+OVkX1lFO8imMYXuxEPqHTwCgiIlJP\nOcDahO/rYssSLQAuBzCzQUAu0KXCNv8FPJ/wvYNzbjOAc24T0CGJMQde0AdP0Fx0/lL/uXDJz/cG\nORo+3O9IkicjIzxVusZ+B5BqqtCJiEgaPAD8r5l9BCwE5gMl8ZVm1gT4Jl4zy6q4lEYYMFOmwF/+\n4ncUVcvP96qIQbZ3r9dvKWqcgxkzYPx4vyORmurZ02se275iu4SQGzECnnoK/n/27js8qmpr4PBv\npxB675BAQu89lAQICSAoCoKo4AUEC95L0U/lil1ERbherwXFCmJFmgUEQg0ldEjooUiABAi9EyBl\nf3+cBEJIyEwyM2fKep9nHjOn7XU05syavffaHTuaHcnduX1CV726e/6xE0II4TBHMXrcMlXP2HaT\n1voSMCzzvVIqHsj69OkJbNFan8qy7YRSqpLW+oRSqjJwMrcA3nrrrZs/h4WFERYWZv1dOJFjx4wv\nW9u0MTuS3NWubXyQc2avvAJz50Lp0mZHYnvdu0OVKmZHISzVtSs0aWJ2FLZ3zz3wv//BwIH2a+Py\n5SiuXIkq0DWU1s7xhaBSStsjltmz4eefjT94QgghzKeUQmvtZIXqc6eU8gb2AhHAcWAjMEBrvSfL\nMaWAq1rrFKXUU0CI1vrxLPt/ARZpradn2TYROKu1nphRObOM1vqOHjx7PR/NNG0aLFwIM2eaHUnu\nEhMhONhIPp1V3brGv8Pmzc2ORAhhK/l5Rrp9D50MuRRCCFEQWus0pdRIYDHG3PNvtdZ7lFLDjd36\nK6ABMF0plQ7sAp7IPF8pVRSjIMrT2S49EZiplBoGHAYetv/dOAdnnz8HULUqnD9vDGssVszsaO50\n8CBcvOg+JeKFEPnn9j10CQnQrp0kdUII4SxcrYfObO7WQ5eWBhUrwrZtxrQIZ9aoEfzyi3MmTVOm\nwPr1MH163scKIVxHfp6Rbl/lsnJlOHXKWCBUCCGEEObavNmYG+XsyRzcWjDZGcmi20KITG6f0Pn6\nQvnykJRkdiRCCCGEcKVExFnXoktJgago6NbN7EiEEM7A7RM6kLXohBBCCGfhCvPnMjnrWnTr1hnJ\npruViBdC5I9HJHRSGEUIIYQw37lzsH2786/plMlZh1zKottCiKw8IqGTHjohhBDCfMuWQWgoFC5s\ndiSWcdYhl640bFUIYX+S0AkhhBDCIVwtEQkIMObgX79udiS3nDxp9Bq2a2d2JEIIZ+ERCZ0MubSt\nFfErGL1wtNlhCCGEcCFau9b8OQAfH/D3h0OHzI7kliVLoEsXo+ibEEKAhyR00kNnO4fPH2bg3IFM\njZnKjbQbZocjhBDCRezZA0pBvXpmR2IdZyuMIvPnhBDZ5ZnQKaX8lFIblFIxSqldSqn3cjnuE6XU\nfqVUrFKqRZbth5RS2zLO32jL4C0lPXS2kZySTN+Zffl3h39Tp1wdth7fanZIQgghXETmcEvlYkvK\nO1NhlPR0WLzYtYatCiHsL8+ETmt9HeiitW4BNAXClVIhWY9RSvUEammt6wDDgSlZdqcDYVrrFlrr\nYNuFbrnMhE5rM1p3D1prnvnrGeqXr89z7Z4jxD+E6CPRZoclhBDCRbja/LlMzlQYZds2KF0aatY0\nOxIhhDOxaMil1vpqxo9+Geecy3ZIb+D7jGM3AKWUUpUy9ilL27GXokWhWDE4fdrMKFzbZ5s+IzYp\nlq/v/xqllJHQJUhCJ4QQIm/JyRAdDRERZkdiPWcacumqSbEQwr4sSrSUUl5KqRggCYjSWu/Odkg1\nICHL+6MZ2wA0sEQptUkp9VRBA84vGXaZf6sPr+adVe/w2yO/UdS3KAChAaGsObIGLd2eQggh8rBq\nFTRrZvQuuRpnGnIp8+eEEDmxtIcuPWPIZXWgk1KqsxVthGitWwL3AiOUUqH5iLPApDBK/hy9eJRH\nZj/C9D7TCSoTdHO7fyl/CvsU5sBZJ/naUgghhNNy5Z6lwEA4fBjS0syN49Il2LIFOlvzCUwI4RF8\nrDlYa31RKfUX0BpYmWXXUcA/y/vqGdvQWh/P+OcppdRvQDCwJqfrv/XWWzd/DgsLIywszJrw7kp6\n6Kx3PfU6/Wb2Y1TwKO6pfeeTOLOXrk65OiZEJ4RwFVFRUURFRZkdhjBRZCRMm2Z2FPlTuDBUrAgJ\nCebOXVuxAtq2NaaQCCFEVnkmdEqp8kCK1vqCUqoI0A0Yl+2wP4ERwK9KqXbAea31CaVUUcBLa31Z\nKVUM6J7DuTdlTehsTXrorDd64Wiql6zO2NCxOe7PnEc3tMVQB0cmhHAl2b+gGzcu18eAcEMJCXDi\nBLRqZXYk+ZdZGMXMhM6VezmFEPZlyZDLKsCKjDl064E/tdbLlFLDlVJPA2itFwDxSqkDwJfAvzLO\nrQSsyXLuPK31YpvfhQUkobPO11u+Zk3CGqb1nobKpcZ0Zg+dEEIIkZvISOjWDby9zY4k/5yhMMqi\nRZLQCSFylmcPndZ6B9Ayh+1fZns/Modj4oHmlgZz5caV295nTyQUdyYWlh5TsQokHIOUNIWvt6+l\nIXmkDYkbeHX5q6wZtoYSfiVyPa5xxcYkXU7i9NXTlC9a3oERCiGEcBWRkXDffWZHUTBmF0Y5cMCo\nFNqkiXkxCCGcl1Vz6Oyt4gcVb/6cvXqi5s5qinkdk3W/1pDaFgq/m8bywcvpXFNmFeck6XISD816\niG8f+Ja65ere9VhvL2/aVm/L2oS1PFDvAQdFKIQQwlWkpsKyZfDxx2ZHUjC1a8P69ea176qLsgsh\nHMOpErorr1zJ+6B8OnfOGPv+3O9vsvDAQknocpCSlsLDsx7miRZPcH+9+y06J9TfGHYpCZ0QQojs\nNm0Cf3+oWtXsSAqmVi1ze+giI2HgQPPaF0I4N1MX/Hak0qWNbwrbVgpnWfwys8NxSi8sfoFShUvx\nRuc3LD4nJEAWGBdCCJEzdynkkZnQmbH06o0bEBVlzEMUQoiceExCp5RRGKVaejviTsdxLvmc2SE5\nle+3fc+iA4v44cEf8FKW/1q0rdaW2KRYrqVes2N0QgghXJG7FPIoUcJ4HT/u+Lajo6FBAyhXzvFt\nCyFcg8ckdGCsRXcqyY8O/h1YeXhl3id4iK3Ht/LC4hf47ZHfKF24tFXnFitUjIYVGrL52GY7RSeE\nEMIVnT0Lu3dDaKjZkdiGWcMu3aWXUwhhPx6V0GUuXRBeM5xlB2XYJcDpq6fp+2tfptw3hUYVG+Xr\nGpnz6IQQQohMS5dCx47g52d2JLaRuRado0lCJ4TIi0cldNWqwdGjEBEUwfJDy80Ox3Sp6ak8OvtR\nBjQewEMNH8r3dWQenRBCiOwiI6FHD7OjsB0z1qJLSoJDh6BtW8e2K4RwLR6V0GX20LWo3ILjl45z\n/JIJg+GdyMtLX8bby5t3wt8p0HVC/ENYm7CWdJ1uo8iEEEK4Mq3dZ/5cJjPWolu8GCIiwMepapIL\nIZyNRyZ03l7edK7ZmeXxnttL9+vOX5mzZw4/9/0Zby/vAl2rSokqlC5cmrjTcTaKTgghhCvbtQsK\nFYI6dcyOxHbMGHIpwy2FEJbwqIQuc8glQERghMcmdNtPbGfkwpHMfWQu5YrapmxWiH8I0Udk2KUQ\nQgj3XAg7c8ilo5YuSE+HJUskoRNC5M2jErrMHjowErpl8cvQZiwqY6Jzyefo+2tfPu7xMc0rN7fZ\ndUMDQlmTIIVRhBBCuN/8OYCyZY0E9exZx7QXE2MsVRAQ4Jj2hBCuy6MSuooV4fx5uH4d6pevz420\nGxw8d9DssBwmLT2Nx+Y+xv1172dgk4E2vbb00AkhhAC4ehXWrYPwcLMjsS2lHDvsctEi90uKhRD2\n4VEJnZcXVKkCx46BUorwwHCPGnb5ZtSbXE25yqRuk2x+7QYVGnA2+SxJl5Nsfm0hhBCuY+VKaNEC\nSpY0OxLbc+RadDJ/TghhKY9K6ABq1ICtW42fM4ddeoLf437nh+0/MLP/THy9fW1+fS/lRQf/DtJL\nJ4QQHs6dExFH9dBdvGgMuezUyf5tCSFcn8cldK+9Bs89Z4yBz+yhc/dy+3tO7eHpeU8zu/9sKhar\naLd2QvxlPTohhPB07jh/LpOj1qJbvhzat4eiRe3flhDC9XlcQte9Ozz4IIwYATVK16CkX0l2ndxl\ndlh2c/H6RR789UHe7/o+baq1sWtboQGhrDkihVGEEMJTHT4MZ84YQy7dkaPWonO3NfyEEPblcQkd\nwPvvQ2ws/PKLew+7TNfpDP5tMOGB4QxrMczu7bWu2ppdp3Zx5cYVu7clhBDC+URGQrduxpx1d+SI\nIZdau3cvpxDC9tz0T+7dFS0KP/4Izz4LTUuGu21C997q9zh19RQf9fjIIe0V8S1Cs0rN2Hh0o0Pa\nE0II4Vzcef4cQOXKcPkyXLpkvzb274eUFGjY0H5tCCHci0cmdACtWhkJ3YwJ4aw+vJrU9FSzQ7Kp\nBfsXMGXzFGb1n0Uh70IOa1fm0QkhhGdKSTHmfnXvbnYk9qOU/StduuOi7EII+/LYhA7gpZcg/VIF\niqbUYPOxzWaHYzMHzh5g6B9DmfnQTKqWqOrQtmUenRBCeKYNG6BmTaMXy53Ze9ilzJ8TQljLoxM6\nHx/4/ns4tzWcn9e7x7DLyzcu8+CvD/JW57cICQhxePsd/DuwPnE9aelpDm9bCCGEedx9uGUme/bQ\nXb8Oq1dD1672ub4Qwj15dEIHxh/mpyIimLZiOdevmx1NwWiteeLPJ2hTtQ3PtH7GlBgqFKtApeKV\n2HXKfSuHCiGEuJOnJHT27KFbswYaNYKyZe1zfSGEe/L4hA5g/JOdSC63gVfeTDY7lAL5YO0HHDx3\nkM/v+xxl4uD7UH8ZdimEEJ7k9GnYuxdCHD8wxOHsuRadpyTFQgjbkoQOKFW4JM2rNmHa0rWsWmV2\nNPkTfSSaD9d/yNyH51LYp7CpsYQESGEUIYR7UUr1UErFKaX2KaVeymF/aaXUXKXUNqXUeqVUwyz7\nSimlZiml9iildiml2mZsf1MplaiU2prxctlC9UuWQOfOUMhxNbhMY8+16GT+nBAiPyShy9CjbgRd\nn1rOkCFw8aLZ0VhvyuYpvNbxNfxL+ZsdihRGEUK4FaWUFzAZuAdoBAxQStXPdtgrQIzWuhkwBPgk\ny76PgQVa6wZAM2BPln0faq1bZrwW2e0m7MyTepb8/eHkSbh2zbbXPXYMEhOhTRvbXlcI4f4kocsQ\nERjBEe9ldO8Oo0ebHY11klOSmb9vPv0a9jM7FADqlK1DckoyCRcSzA5FCCFsIRjYr7U+rLVOAWYA\nvbMd0xBYDqC13gvUVEpVUEqVBDpqradl7EvVWmf92tDli9NrDYsXe05C5+0NNWpAfLxtr7t4sVEM\nxcfHttcVQrg/SegytPdvz65Tu3jjvQtER8OcOWZHZLnIvyNpWaUllYs7R61opZQMuxRCuJNqQNZv\nqBIztmW1DegLoJQKBgKA6kAgcFopNS1jWOVXSqkiWc4bqZSKVUp9o5QqZb9bsJ8dO6BoUWMooqew\nR2EUT+rlFELYliR0GQr7FKZttbZsPb2KH36Af/0Ljh83OyrLzNw1k4cbPWx2GLcJ8Q8h+ogkdEII\nj/E+UEYptRUYAcQAaYAP0BL4TGvdErgKjM0453MgSGvdHEgCPnR41DbgifO+bF0YJS3NmIfoaf8e\nhRC2IR37WUQERrAsfhkf9bif4cNh2DBYsABMLBiZp+SUZBbsX8BHPT4yO5TbhPiH8POOn80OQwgh\nbOEoRo9bpuoZ227SWl8ChmW+V0rFAweBYkCC1npzxq7ZwEsZ55zKcomvgXm5BfDWW2/d/DksLIyw\nsDDr78JOIiPh2WfNjsKxateGfftsd70tW6BSJahe3XbXFEK4hqioKKKiogp0DUnosggPDOfJeU8C\n8PrrRvnlKVOM3jpntfDAQlpXbU3FYhXNDuU2Lau0ZN+ZfVy6fokSfiXMDkcIIQpiE1BbKVUDOA48\nCgzIekDGcMmrWusUpdRTwEqt9WXgslIqQSlVV2u9D4gAdmecU1lrnZRxib7AztwCyJrQOZMrV2Dj\nRujSxexIHKt2beMLX1uJjIQeLlvjVAhRENm/pBs3bpzV15Ahl1m0qtqKhAsJnLh8Al9f+OEHeOMN\nY20dZ+WMwy0B/Hz8aFmlJesT15sdihBCFIjWOg0YCSwGdgEztNZ7lFLDlVJPZxzWANiplNqDUQ0z\na5/VaOAnpVQsRpXL9zK2T1JKbc/Y3hn4Pwfcjk1FRUHr1lDCw763s/WQS5k/J4QoCKW1NjsGAJRS\n2hli6T2jNwMaD+DRxo8C8PnnMG0arF0Lvr4mB5fN1ZSrVP1vVfaP2k+FYhXMDucOryx7BV8vX8Z1\nsf6bBiGE+1JKobV24sHszsVZno85GTUKqlWDsWPzPtadXL8OpUrBpUsF/2xw/vytpRCKFMn7eCGE\ne8vPM1J66LIJrxnOsoPLbr7/5z+hfHl45x0Tg8rFwv0LCa4W7JTJHGQURpFKl0II4bY8tWfJzw+q\nVIEjRwp+rWXLjCkekswJIfJLErpsIoIiWH5o+c33SsHUqfDll7DeyUYPztztnMMtM3Xw78DGoxtJ\nTU81OxQhhBA2Fh8PFy5As2ZmR2KOWrXg778Lfh2ZPyeEKChJ6LJpVKERl29c5tD5Qze3ValiDL0c\nNAguXzYvtqyu3LhC5IFIHqz/oNmh5KpMkTIElApgW9I2s0MRQghhY5m9c14e+knCFmvRae25vZxC\nCNvx0D/DuVNKER54+7BLgL59jSERL7xgUmDZLNi/gHbV21GuaDmzQ7krGXYphBDuyRPXn8vKFoVR\n4uKMpK5+fdvEJITwTJLQ5SAi8PZhl5k++QQWL4b5800IKpuZu2fSv2F/s8PIU2hAKGuOrDE7DCGE\nEDaUkmJUuOzWzexIzFO7dsGHXGb2zjnzerdCCOcnCV0OIgIjWB6/nOxVxUqWhO+/h6efNqpRmeXy\njcss/nsxfer3MS8IC4UEGD10zlqhTQghhPXWrTN6qCo61xKoDmWLIZcyf04IYQuS0OUgsEwghX0K\ns/vU7jv2dewIgwcbSZ1ZOcpf+/6ig38Hpx9uCRBYOhCt9W1zEoUQQrg2mfcFQUFGYZj09Pydn5wM\n0dEQEWHbuIQQnkcSulxk9tLlZNw4OHTIqH5phpm7Z/JwQ+etbpmVUupmL50QQgj3sGiR9CwVKwal\nS8OxY/k7f/VqaNLEuIYQQhSEJHS5iAiMYFn8shz3+fnBjz8aC6naomSxNS5dv8TSg0tdYrhlplB/\nmUcnhBDu4uRJ49nXvr3ZkZivIMMupZdTCGErktDloktgF1YeXpnrGmqNG8MrrxjDL1MduMza/H3z\nCQ0IpUyRMo5rtICkh04IIdzHkiUQFga+vmZHYr6CrEUn8+eEELYiCV0uKhevTLUS1Yg5HpPrMc8+\nC4ULw6RJjovLlYZbZmpWqRmHzh/iXPI5s0MRQghRQNKzdEt+e+gSEyEpCVq1sn1MQgjPIwndXdxt\n2CUYi6l+9x189BFs2WL/eC5ev8jy+OX0rt/b/o3ZkK+3L8HVglmXuM7sUIQQQhRAerqxfI8kdIb8\nrkUXGQldu4K3t+1jEkJ4Hkno7iI8MPyuCR2Avz98/DH84x9GxSp7mrd3Hp1qdKJ0YdebQR3iH0L0\nERl2KYQQrmzbNmMJn6AgsyNxDvldi056OYUQtiQJ3V10rtmZ9YnruZ56/a7HDRgAzZvDSy/ZNx5X\nHG6ZKTQglDUJUhhFCCFcmSQit8vsobNmGaO0NFi6FLp3t19cQgjPIgndXZQuXJqGFRpaNFTw88/h\n99+NoSj2cPH6RaIORfFAvQfs04Cdtaveji3HtnAj7YbZoXikC9cusP/Mfvn3L4QoEEnoblemjFEc\n5vRpy8/ZtAmqV4dq1ewXlxDCs/iYHYCzC68ZzrKDywirGXbX48qUgWnTYMgQ2L4dypa1bRx/7v2T\nsJphlCpcyrYXdpCSfiWpU64OW49vpV31dmaH41EW7F/Ak38+iZ+PH8cuHaNSsUoElQkisEwgQaUz\n/lkmiKAyQVQqVgmllNkhCyGc0KVLsHmzUeFS3JJZGKVCBcuOX7RIkmIhhG1JQpeHiKAI3ox6k/GM\nz/vYCOjfH555Bn79FWz5uXjmrpk80ugR213QBJnz6CShc4wrN67w4uIXWXhgIb/0+4XONTuTmp5K\n4sVEDp47SPy5eA6eO8hf+/+6+fPlG5cJLBNIYOlbSV7mz4FlAileqLjZtyWEMMmKFRAcDMXlz8Bt\nMhM6S9fli4yE8Xl/pBBCCItJQpeHEP8QtiVt49L1S5TwK5Hn8e+9B61bw08/GYVSbOH8tfOsPLyS\nH/v+aJsLmiQ0IJSZu2byAi+YHYrb23h0I4N+G0Tbam3Z9sy2mz27Pl4+1Cxdk5qla0LgneddvnH5\nZnIXf97459KDS4k/H0/8uXiKFyp+q0cvS+9eYOlA/Ev54+Mlf1KEcFcy3DJn1qxFd+4c7NoFoaH2\njUkI4Vnk01ceivgWoU21Nqw6vIr76t6X9/FF4McfjcnOnTpBQEDBY/hz7590qdmFkn4lC34xE4X4\nhzB64Wi01jKsz05S01N5b/V7fLbpMyb3nEz/Rv2tOr94oeI0qdSEJpWa3LFPa82JKyc4eO7gzR6+\n6IRoftz+IwfPHeTElRNUL1mdwNKBDG0+lMeaPmar2xJCOIHISJgzx+wonE/t2pbPn1+6FDp2NNaw\nFUIIW5GEzgIRgREsj19uUUIH0KIFvPCCMZ9u2TJjvbqCmLlrJgObDCzYRZyAfyl/CvsU5sDZA9Qp\nV8fscO6QkpZCj5960LlGZ55v/7zLDS88cPYAg34bRIlCJdj69FaqlbTtjHulFJWLV6Zy8cp08O9w\nx/7rqdc5cuEIsUmxjFw4kv6N+lPIu5BNYxBCmOPAAbhyBZo2NTsS52PNWnQyf04IYQ9S5dICeS0w\nnpMxY4zSxE8+aUwkz69zyedYfWQ199e9P/8XcSKhAaGsOeKcyxf8vONnrqZcZe+ZvdT9tC5TNk0h\nJS3F7LDypLXm6y1f0+6bdgxoPIBF/1hk82TOEn4+ftQpV4f+jfpTv3x9/tr3l8NjEELYR+ZwSxlc\ncSdL16LTWoatCiHsQxI6C7Su2pr48/GcunLK4nO8vWHePOPh16wZREXlr+0/9v5BRGCERfP3XEGI\nfwjRCc63wHhqeirvrn6XCRET+KnvT8wfOJ/f4n6j0eeNmL17NtqaRYYc6OSVk/T5tQ+fb/6clY+v\nZHTb0Xgp8/+3HtZ8GN/GfGt2GEIIG5FEJHcVK8K1a3Dhwt2P270bfHygbl3HxCWE8Bzmf/JzAb7e\nvnQM6EjUoSirzitVCr79Fj75BB57DJ57Dq5eta7tmbtm8nAj11xMPCfO2kM3Y+cMKhevTOcanQFo\nWaUliwct5rN7P+O91e/R7tt2rDy00uQobzd/33yafdGMBuUbsOHJDTSq2MjskG56qOFDRCdEc+zS\nMbNDEUIU0I0bsHIldOtmdiTOSSnLCqNERkKPHtLLKYSwPUnoLJSfYZeZevUy1qY7edKYX7cu73XK\nATibfJbohGh61e2Vr3adUeOKjUm6nGRVb6e9paWn8c6qd3ij8xt3FGvpVqsbm5/ezHNtn2PoH0Pp\n9XMvdpzYYVKkhis3rjB83nBGLRzFrw/9yvtd33e6uWrFChXjoQYP8f22780ORQhRQGvXQr16UL68\n2ZE4r8ylC+5G5s8JIewlz4ROKeWnlNqglIpRSu1SSr2Xy3GfKKX2K6VilVLNs2zvoZSKU0rtU0q9\nZMvgHSk8MDzfCR1AuXLw88/GsgYPPghjx8L163c/5/e43+kW1M3linPcjbeXN+2qt2NtwlqzQ7lp\n1u5ZlCtajojAiBz3eykvBjQZwJ4Re+heqztdf+jK478/zpELRxwcKWxI3EDzL5tzLe0ascNjNPBV\nggAAIABJREFU6VSjk8NjsNQTLZ9gasxUpx2uKoSwjCQiecsrobt61fgyNzzccTEJITxHngmd1vo6\n0EVr3QJoCoQrpUKyHqOU6gnU0lrXAYYDX2Rs9wImA/cAjYABSqn6tr0Fx2hSqQnnr50v8If4fv2M\n3rp9+4z16rZuzf1YdxtumcmZ5tGl63TGrxrPG53u7J3Lzs/Hj9FtR7N/1H78S/rT4ssWjFk8hrPJ\nZ+0eZ2p6Km9FvcUDMx5gQsQEpveZfnNtOWfVtlpbfLx8nHKIrRDCcjJ/Lm95DblctQqaNzemYggh\nhK1ZNORSa50588sv45xz2Q7pDXyfcewGoJRSqhIQDOzXWh/WWqcAMzKOdTleyovwwHCWxy8v8LUq\nVjTW8hk71hhPP24cpGQrpnjm6hnWJa7jvjqWLZXgSkICnCehm7N7DsULFad7re4Wn1PSryTjw8ez\n8587uXTjEvUm12NS9CSSU5LtEuP+M/sJmRrCusR1xAyP4aGGD9mlHVtTSjGsxTCmxk41OxQhRD6d\nOAHx8dC2rdmROLe8eugkKRZC2JNFCZ1SykspFQMkAVFa693ZDqkGJGR5n5ixLbftLim8ZsGGXWal\nlFEoJSYGNmyAdu1g585b+3+L+417at1DsULFbNKeM2lbrS2xSbFcS71mahzW9M7lpEqJKnzR6wvW\nDF3DxqMbqTu5LlNjppKWnmaT+LTWfLXlKzpM7cA/mvyDhY8tpGqJqja5tqMMajqI3/b8xqXrBVi7\nwwNprXl56cumDOsVIqvFiyEiAnx9zY7EueW1Ft2iRcYXuEIIYQ+W9tClZwy5rA50Ukp1zuMUt6zh\nFBFkLDBuyzlB1arBX3/BP/8JXbrAxInG+nWzds9yy+GWYBTMaFihIZuPbTY1jj/i/qCQdyHurXNv\nga5Tr3w9Zj88m1n9Z/Fd7Hc0/aIp8/bOK9DvyYnLJ3hgxgN8sfkLVj2+ilFtRznFcgTWqlS8El0C\nuzBz10yzQ3Epiw4sYtLaSby+4nWzQxEeTubPWaZ6dTh7NudK1keOwOnT0LKl4+MSQngGqz4haq0v\nAn8BrbPtOgr4Z3lfPWPbUSAgh+05euutt26+ovK7cJsd1SpTCx8vH/ae2WvT6yplLEC+aZMxLKNt\nl9OsPbK+wImGMwv1N3f5Aq01b696O8fKlvnVrno7Vj6+koldJ/Lyspfp/F1n1iVYWNI0i3l759H8\ny+Y0qdiE9U+up0GFBjaJzyyyJp110nU6Y5eNZVrvaSw6sIidJ3fmfZKTi4qKuu3vu3AN6emwZIkk\ndJbw8oLAQDh48M59kZHGkg9ervednBDCRfjkdYBSqjyQorW+oJQqAnQDxmU77E9gBPCrUqodcF5r\nfUIpdRqorZSqARwHHgUG5NaWsz/olVJGtcuDy6hf3va1XWrWhKVL4bEPf2NHbA++/rwoo0a550Mg\nJCCE6dumm9b+vH1GD9r9de+36XWVUvSq24uetXvy/bbveXj2w7Sp2ob3It7L83fm8o3LPB/5PEsP\nLmVW/1mEBoTaNDaz9KzTk6fnP82eU3tcPjl1hJ93/Ewx32IMajqIM1fP8OryV/nj0T/MDqtAwsLC\nCAsLu/l+3LjsjxDhjGJioGxZqFHD7EhcQ2ZhlMaNb98eGQn32/ZRI4QQt7EkVagCrMiYQ7ce+FNr\nvUwpNVwp9TSA1noBEK+UOgB8CfwrY3saMBJYDOwCZmit99jhPhwmIjCC5YcKXhglN15ecLrSTP47\n9GFmzjRKHMfH260504T4hxB9JJp0ne7wtrXWvL3Str1z2Xl7eTO0xVD2jdxH++rt6TitI8PnDc91\noe31ietp8WULUtJTiH0m1m2SOQAfLx8GNx3MtNhpZofi9K6nXue15a/xftf3UUrxzzb/JDYplugj\nzlFESHiWzIWwhWVyKoySmgrLlkF3y+tuCSGE1SxZtmCH1rql1rqF1rqZ1vqDjO1faq2/ynLcSK11\n7YxjtmbZvkhrXU9rXUdr/b59bsNxwgPDiToUZbPCF9mdunKKTUc3MaxTT1atMhYlDw6Gr74Cd1rO\nq0qJKpQpUoa403EOb3vhgYXcSLtBn/p97N5WEd8ijAkZw96ReylVuBRNpjThteWvceHaBQBS0lJ4\nc8Wb9JnRh4ldJzKt9zRK+pW0e1yONqzFML7f9j0paSl5H+zBpmyeQpNKTW6uL1jYpzDjwsYxdtlY\nWc9POJzMn7NOTgndhg1GD2eVKubEJITwDG44mM++qpaoSsViFYlNirXL9efumUvPOj0p6lsUb294\n8UVYuRK+/hp69oTERLs0a4rMXjpH0lozbuU4Xu/0ukOLjJQtUpZJ3SYRMzyGo5eOUndyXSasnkDI\n1BA2HttIzPAY+jbo67B4HK1e+XrULlubBfsXmB2K07pw7QIT1kxgQsSE27YPajqIs8lnWXhgoUmR\nCU908aIx5LJzXiXQxE05rUUnyxUIIRxBErp8iAiMsMl6dDmZuXsmDze8vbplw4awdi2EhhpVsr7/\n3j1660IDQlmT4NjCKIv/XszlG5fp17CfQ9vNFFAqgGm9p7F00FJ2nNzBkGZDWDBwAVVKuP/Xt7Im\n3d39Z+1/uLfOvTSuePsEHG8vb94Nf5eXl71syhBl4ZmWLzeW0yla1OxIXEdOPXSS0AkhHEESunyI\nCIyw2Xp0WZ24fIItx7bQo/adkxZ8feG114w1gf77X+jTB5KSbB6CQzm6h86s3rmcNKnUhJ/7/cyI\n4BF2m8fnbPo37M+qw6tIuuziv7h2cPzScaZsnsK4sJyLhfSu15siPkWYsXOGgyMTnkrmz1mvRg04\nehRu3DDenzkDcXEQEmJuXEII9ycJXT50rtmZtQlruZF2w6bXnbtnLvfVvY8ivkVyPaZ5c9i40aii\n1awZzHTh5b0aVGjA2eSzDvuAvzx+OWeTz9K/YX+HtCduV8KvBH3r9+WHbT+YHYrTGbdyHMOaDyOg\nVECO+5VSvN/1fV5f8brN/+4IkZ3WMn8uP3x9jfXoDh823i9ZAp06gZ+fuXEJIdyfJHT5ULZIWeqW\nq8uGxA02vW5Owy1z4ucH774Lf/4Jb7wBjzxiLFrqaryUFx38Ozikly6zd+61Tq/h7eVt9/ZEzoa1\nMNakkwIft+w9vZc5e+bwcseX73pcWM0w6pStwzdbv3FQZMJT7d8PKSnQqJHZkbierMMuZbilEMJR\nJKHLJ1sPu0y6nERsUiz31Lb8r3/btsak9erVoWlT+PFH4yHsSkIDHLPA+MrDKzl++TiPNn7U7m2J\n3HXw74BGsy7R+gXX3dWry1/lxfYvUrZI2TyPfS/iPd5Z9Q5XblxxQGTCU0VGGmX2PWQ0uE1lFkbR\n2pgiIQmdEMIRJKHLp/DAcJsWRpmzew696vaisE9hq84rUsSYU/frrzB1qrE4+fjxcOKEzUKzqxD/\nEKIT7N9D9/bKt3mt42v4ePnYvS2RO6UUw5oPY2qMFEcBY/3B9YnrGd12tEXHt6zSkk41OvHxho/t\nHJnwZNKzlH+ZPXQ7d0LhwsZ7IYSwN0no8ik0IJStx7fa7JtyS4db5qZjR6Mq2aJFxtIG9evD4MGw\naZNNwrOb1lVbs+vULrv2OKw+vJrDFw4zsMlAu7UhLDe42WDm7JnD5RuXzQ7FVFprxi4dy7iwcXed\nN5vd+C7j+XDdh5y5esaO0YnslFI9lFJxSql9SqmXcthfWik1Vym1TSm1XinVMMu+UkqpWUqpPUqp\nXUqpthnbyyilFiul9iqlIpVSpRx5Tzm5fh1WrYKuXc2OxDXVqmUkdJlzEKWXUwjhCJLQ5VOxQsVo\nVbUVq4+sLvC1jl86zo4TO+heq3uBr9WkCXz5pTHko2lT6N8f2reHX365VXnLmRTxLUKzSs3YeHSj\n3dp4e9XbvBL6Cr7evnZrQ1iuSokqdAzoyOzds80OxVSLDizixJUTDGk+xKrz6pSrw0MNH2Ji9EQ7\nRWZfaxPWmh2C1ZRSXsBk4B6gETBAKVU/22GvADFa62bAEOCTLPs+BhZorRsAzYA9GdvHAku11vWA\n5cDdJ1I6wJo1xlI55cqZHYlrql3beP5KL6cQwpEkoSuA8Jq2GXY5Z48x3NLPx3alsMqWNRYl//tv\nGDsWvvnGGI759tvOt9yBPYddrk1Yy/4z+xnUbJBdri/yJ7M4iqdKS0/jpaUvMSFiQr6GAb/R+Q2+\njfmWxIuJdojOfs4mn2XAnAFmh5EfwcB+rfVhrXUKMAPone2YhhhJGVrrvUBNpVQFpVRJoKPWelrG\nvlSt9cWMc3oD0zN+ng70sfN95EkSkYIJCoL4eNiwAbp0MTsaIYSnkISuACKCbFMYZeaumTzcKP/D\nLe/G2xt694Zly4wJ2seOQYMGMGiQsfyBM7BnYZTxq8bzSsdXKORdyC7XF/lzX5372H9mP/vO7DM7\nFFP8vONnihcqTu962XMCy1QtUZWnWj7F2yvftnFk9qO1ZugfQ+nXoJ/ZoeRHNSAhy/vEjG1ZbQP6\nAiilgoEAoDoQCJxWSk1TSm1VSn2llMocY1tRa30CQGudBFS04z1YRBK6gilSBMqXh5YtoWRJs6MR\nQngKqRBRAMHVgtl/Zj9nk89aVKEuJ0cvHmXnyZ10C+pm4+ju1LgxfPEFTJhgFFB59FGoWBFGjTKG\nZhYyKefp4N+BIb8PIS09zaZLCmw8upFdJ3fx+yO/2+yawjZ8vX0Z1HQQ02KmMaHrBLPDcahrqdd4\nfcXr/PDgDwVaVP6lkJeoO7kuL7R/gXrl69kwQvuYvHEyRy8eZeZDM/kf/zM7HHt4H/hYKbUV2AHE\nAGmAL9ASGKG13qyU+ghjqOWbQPZfgFzX83jlFbvEfJvUVEhIgOBg+7flzmrXlqRYCOFYktAVQCHv\nQoQEhBB1KIq+Dfrm6xpz9szhgXoP2HS4ZV7KlIEXXoDnnoO//oJPPzWGZw4fbryqVHFYKABUKFaB\nysUrs/PkTppVbmaz67698m3Gho516L9bYbmhLYbS9fuujA8f71HVR6dsmkLTSk3pWKNjga5TpkgZ\nXmz/Iq+teI1Z/WfZKDr72Hp8K2+vept1T6xz1f8fj2L0uGWqnrHtJq31JWBY5nulVDxwECgGJGit\nN2fsmg1kFlVJUkpV0lqfUEpVBk7mFsCGDW/d/DkwMIygoLB83srdffst+HjO/452MX481K1rdhRC\nCFcRFRVFVFRUga6hnGWBX6WUdpZYrPHB2g+IPxfPZ/d9lq/zQ6eG8krHV7i3zr02jsw6u3fD5Mkw\nYwb07AmjRxvr3DnKE388QauqrfhXm3/Z5Hpbjm2h94ze/D36b1f9AOkR2n/bnlc7vkqvur3MDsUh\nLly7QJ1P67B8yHIaV2xc4OtdTblKnU/r8Psjv9OmWhsbRGh7l65fotVXrRgXNo4BTYz5c0optNYu\nU/9PKeUN7AUigOPARmCA1npPlmNKAVe11ilKqaeAEK314xn7VgJPaa33KaXeBIpqrV9SSk0Ezmqt\nJ2ZUziyjtR6bQ/su+XwUQghhvfw8I2UOXQEVZIHxxIuJ7Dm9h65B5teHbtgQPv8cDh6E1q1h4EBj\n2M2PPxplrO0tJMC2hVHGrxrPSyEvSTLn5J5o8YRHrUk3KXoSver2skkyB1DUtyhvdHqDV5Y7YDxe\nPmit+edf/6RTjU43kzlXpLVOA0YCi4FdwAyt9R6l1HCl1NMZhzUAdiql9mBUw3w2yyVGAz8ppWIx\nqly+l7F9ItBNKZWZLL5v/7sRQgjhbqSHroDSdToV/lOB7c9sp1rJ7HPk7+6j9R+x/cR2pvZ2vg+0\naWmwcCF88gls335rOGbVqvZpb9+ZfXT7oRuHnztc4GvFJsVy70/38vfov61a30s43sXrFwn4XwD7\nRu2jYjHT60HY1bFLx2gypQmxw2PxL+Vvs+umpKXQ8POGTLlvilN8OZTVd7HfMSl6Epuf3kxR36I3\nt7taD53ZXPX5KIQQwnrSQ2cCL+VFl5pd8rV8gT2rWxaUtzf06mVUxlyxAk6fhkaNjJ67devA1p8t\n6pStQ3JKMgkXEvI+OA/jV41nTIcxksy5gJJ+JelTvw8/bv/R7FDsblzUOJ5o8YRNkzkwCsy80+Ud\nXl72Ms70oT/udBxjloxhZv+ZtyVzQgghhLAtSehsID/DLo9cOMK+M/uICIywU1S206ABfPaZsbZO\ncLCx5EHTpsY8u1mzbLOunVLKJsMud5zYwdqEtQxvPbzgQQmHyFyTzpmSEVuLOx3H3Li5jA29Y3qU\nTfRv1J+09DTm7plrl+tbKzklmUdmP8K74e/abHipEEIIIXImCZ0NhAcaC4xb84F09u7Z9KnfB19v\nXztGZlulSxuVMfftg6++gurV4fvvjfl3derAsGEwbRocOJC/HrwQ/xCijxQsoXtn9Tu80P4F6RFw\nIR0DOnIj7QYbjzrJwoh28OryV3mx/Yv5Xt4kL17KiwkRE3h1+aukpqfapQ1rvLD4BeqXr89TLZ8y\nOxQhhBDC7UlCZwN1y9UlXadz4OwBi89x5uGWefHygvbt4d//hnnzjOGYc+caxVQWL4awMKhWDR5+\n2FgSITbWmJOXl9CAUNYk5H+B8d2ndhN1KIpnWj+T72sIx1NKMaz5MLctjrI+cT0bEjcwuu1ou7bT\nvVZ3qpSowvTY6XZtJy9zds9h0YFFfNXrqwKtsyeEEEIIy0hRFBsZ8vsQ2ldvb1Eycej8Idp83YZj\nzx9zqR46S2kNhw7B6tW3XklJ0KEDdOxovNq0Ab9sBShvpN2g7MSyHH/hOCX8Sljd7sA5A2laqand\nhrUJ+zl68ShNpjQh8flEt+pd1VoTNj2MwU0H80TLJ+ze3vrE9fSf1Z99I/eZMof00PlDBH8dzPyB\n8wmulvvq1FIUxTqu/nwUQghhOSmKYqLwmuEWF0aZvXs2D9Z/0C2TOQClIDAQBg+Gr7+GuDhjmOaT\nT8LJk8awzXLloFMnePVVWLQILl40FmpvWaUl6xPXW91m3Ok4lh5cyog2I+xwR8LeqpWsRnv/9sze\nPdvsUGxq4YGFnLpyiiHNhzikvXbV29G6ams+3/S5Q9rLKiUthQFzBvDvkH/fNZkTQgghhG1JQmcj\nEUERrDi0gnSdnuexM3fNpH/D/g6IynlUrAh9+8L//gebN8Px4/Daa8bwzfffN5ZDaNkSkveG8lXk\nGk6csO76761+j2fbPpuvnj3hHNxtTbq09DTGLh3LhIgJ+Hj5OKzdd8PfZWL0RC5cu+CwNgFeX/E6\nZQqX4fn2zzu0XSGEEMLTSUJnI9VLVqdskbJsP7H9rsfFn4sn/nw8XQK7OCgy51SiBHTvDuPHQ1QU\nnDkDkydD45IhrDgQTf36ULcuPPEETJ0KW7ZAcnLO19p/Zj8LDyxkZPBIh96DsK1edXux+9Ruq+ai\nOrOfdvxECb8SPFDvAYe227BCQ3rV7cUHaz9wWJuRByL5cfuPTO8zHS8ljxUhhBDCkeTJa0OWDLuc\nvXs2fev3deg39q7Az8+YY/fh/3XgRoWNnDiVyuzZRq/dsmVGBc2yZY0kr29feOMNmDkTdu+Gd1a9\nx6jgUZQqXMrs2xAFUMi7EP9o+g++i/3O7FAK7FrqNV5f8ToTu040pTDIW2Fv8fnmzzlx2cqu7nw4\nfuk4j//xOD88+AMVilWwe3tCCCGEuJ0URbGh2btnMy12Gn8N/CvXY9p83Yb3I94nIsj5158zS+PP\nGzO9z3RaVW112/aUFNi/H3buvPWKOXSQQ92CabTsAM3qlaZxY26+atQwhnQK17Hz5E56/NiDw88d\nxtvL2+xw8u3DdR+y8vBK/nj0D9NieD7yeVLSUvj03k/t1kZaehr3/HgPIf4hjOsyzuLzpCiKddzh\n+SiEEMIy+XlGSkJnQ2euniHokyBOjzmdY8GTg+cO0u6bdhx74Zj00N3FM/OfoUH5Bjzb7tk8j33q\nz6coX7gK/cu/zY4dtyd7588ba+Q1acJtiV6lSkbhFuGc2n7Tlrc6v0XPOj3NDiVfzl87T91P67Ji\nyAoaVWxkWhynrpyiwWcN2PjURoLKBNmljXdXvUvk35EsH7Lcqr9pktBZxx2ej0IIISyTn2ekZBU2\nVK5oOYLKBLHp2CY6+He4Y/+sXbPo16CfJHN5CPEPYd6+eXkmdIfOH2Ju3Fz2j9pP2SLG8Myszp+H\nXbtuJXi//w47dhjJXGZyl5nsNWpkLJwuzDes+TCmxk512YRuUvQk7q97v6nJHECFYhUY3XY0b6x4\ngx/7/mjz6685soZPN37K5qc3y980IYQQwkTSQ2djLy5+kVJ+pXi98+t37Gv1VSs+6PaBxxdEycvB\ncwfpOK0jif+XeNf5R8/Mf4ZyRcrxbsS7Fl9bazhx4vaevJ07jcSvdOlbyV2jRkbvXsOGRgEX4TgX\nrl2gxkc1ODD6AOWLljc7HKscvXiUpl80JXZ4LP6l/M0Oh0vXL1Hn0zpE/iOSZpWb2ey6Z5PP0uLL\nFnx272f0qtvL6vOlh8467vJ8FEIIkTcZcukEFu5fyMToiUQ9HnXb9gNnDxA6NZSjzx916blBjqC1\nptqH1YgeFk1gmcAcj0m4kEDzL5uzd+Rem3zoT0+Hw4dvJXe7dhkFV+LioHx5I7HLTPQaNYIGDSTR\ns6dBvw2iVZVWPNfuObNDscrT856mdOHSTOo2yexQbvpkwycs/nsx8wfOt8n1tNb0+bUPQaWD+F+P\n/+XrGpLQWcddno9CCCHyJgmdE7h84zKVP6jMyTEnKepb9Ob2CasnkHgxkc/u+8zE6FxH/1n96V2v\nN/9o+o8c94/4awTFCxVnYreJdo0jLQ0OHTKSu5wSvcyePEn0bCvqUBSjF45m2zPbTKkSmR9xp+Po\nOK0je0fupWyRsmaHc9P11OvUm1yPHx78gY41Ohb4ep9u+JTp26YTPSwaPx+/fF1DEjrruMvzUQgh\nRN5kDp0TKF6oOM0rNyf6SDTdanW7uX3m7pl8dM9HJkbmWkL9Q1lzZE2OCd3Ri0f5ZecvxI2Ms3sc\n3t5Qq5bxuv/+W9szE73MBG/5cvj0UyPRq1Dh9mGbmf8sXtzu4bqNTjU6cSXlCluOb6F11dZmh2OR\nV5e/ypgOY5wqmQPw8/Hj7S5v8/Kyl1k9dHWBEuStx7fy9qq3WffEunwnc0IIIYSwLSnqbgcRgREs\ni1928/2+M/tIupxEaECoiVG5lpCAEKITonPcNyl6EsNaDKNisYoOjuqWzETvgQdg7Fj44QfYuhUu\nXTKSu2eeMdbNy/y5YkVjGYV774UxY2DaNNi4Ea5dM+0WnJqX8mJo86FMjZlqdigWWZ+4no1HNzIq\neJTZoeTosSaPcf7aef7an/uSKnm5dP0Sj85+lE96fELtsrVtGJ0QQgghCkKGXNrBykMrGbNkDBuf\n2ggYpb2TLifZdT0od5OankrZiWU5/NxhyhQpc3P78UvHafR5I3aP2E3l4pVNjNA6WXv0Mnv1du6E\nffugeXMICYHQUGNx9fKuVQfEbjLnSSb+XyJFfIuYHU6utNZ0/q4zjzd/nGEthpkdTq7+3Psnry5/\nldjhsVbP49VaM/j3wfh5+/HNA98UOBYZcmkdd3o+CiGEuLv8PCOlh84O2lVvR9zpOM5fOw8Ywy0f\nbvSwyVG5Fh8vH9pUa8O6xHW3bf/P2v8wuNlgl0rm4PYevZdfNnr0YmLg5EkYP96Yd/fZZ8YxDRrA\nk0/Cd98ZC6m70uc4reHoUThypODX8i/lT3C1YObumVvwi9nRgv0LOJN8hsHNBpsdyl3dX/d+SvqV\n5Jedv1h97vRt09lybAsf9/jYDpEJIYQQoiBkDp0d+Pn40d6/PVGHoqhfvj6nrpwiJCDE7LBcToh/\nCNFHorm3zr0AnLh8gu9iv2Pnv3aaHJntFCsG4eHGC4yevB07IDoaIiPh9dfhxg2j9y6zF69FC/C9\nc916h0pLg/h42LPH6G3cs8d4xcVBkSJG1dBy5aBnT+PVqRP45WPK1bDmw/hiyxc81vQx29+EDaSl\npzF22VgmRExw+rXYlFK8H/E+g38fzMONHqaQdyGLzos7HceYJWNYMWQFxQoVs3OUQgghhLCWDLm0\nk4lrJnL00lEqFK3Aqaun+KTnJ2aH5HIW/72Yd1e/y8rHVwLw7yX/Jjkl2eOGrh45AmvWGEnemjVw\n8CC0bn0rwWvfHkqVsk/b168bw0KzJ24HDhjzAhs2NHoUs77KljUSuq1bYeFC47VzJ3TufCvBC8x5\nNYo720+9TvX/VWfjkxtzXcLCTNNjp/P11q8LXGzEke77+T561u7JyOCReR6bnJJMu2/bMaLNCJ5u\n9bTNYpAhl9Zxt+ejEEKI3MmyBU5k87HNDPl9CF7Kiyn3TZGCKPlw8fpFqv63KmdfOsuFaxeoN7ke\n2/+5neolq5sdmqkuXIB1624leJs3Q1DQrQQvJAQCAsCa/OLiRaN3LTNhy0zeEhKM5KtBg9uTt3r1\njN5FS505A0uWGMndokVQpsztvXeFC+d+7rMLn6V04dKM6zLO8gYd4FrqNep+Wpdf+v3iUj3w25K2\n0eOnHuwftZ/ihe5eevVff/2L01dP8+tDv9o0YZWEzjru9nwUQgiRO0nonEhaehoV/lOBIr5FSPi/\nBLyUTFfMjxZftmDKfVP4I+4Pzl87z5ReU8wOyemkpBjz8TITvOhoY0hm1gSvaVPw8oJTp24lbVkT\nt3PnjCQtM2HLTN5q1YJClo3Ms1h6uhFvZu/djh1GUpeZ4AUF3X78tqRt3P/L/cQ/G291MQ97+u/a\n/7L6yGp+f/R3s0Ox2mNzH6NB+Qa81um1XI+Zs3sOY5aMIWZ4DKUK27YLWBI667jb81EIIUTuJKFz\nMn1/7Yt/SX8+7imFBPJr5IKRlPQryZdbviRmeAwBpQLMDsnpaQ1//337MM1jx8DHx9iXfYhkw4ZG\nj56XSd85nD0Lixffvfeu9VeteS/iPbrX6m5OkNmcv3aeup/WJerxKBpWaGh2OFb7++yRN8HaAAAM\nl0lEQVTftP2mLXEj4yhf9M6yqofOHyL462DmD5xPcLVgm7cvCZ113PH5KIQQImeS0DmZY5eOUbxQ\ncUr6lTQ7FJc1Y+cMBv02iKHNh/LV/V+ZHY7LOnPG6MmrVMm6oZiOllvvXbGwz7lSfhXzH59hdogA\nvLz0ZU5eOcm3vb81O5R8G/HXCAr7FOa/9/z3tu0paSl0+q4T/Rr048UOL9qlbUnorOOOz0chhBA5\nk4ROuJ3Ei4nU/qQ2e0bsccqiGMK+zp415t79HnmOXysHEjTvIL0iytKzp1Fk5W5z7+zl6MWjNJnS\nhG3PbMO/lL9NrpmWZiTcWV9pacaahLYe8prp+KXjNJ7S+I6e77FLx7L9xHbmD5xvt6HiktBZR56P\nQgjhOSShE27pXPK52xYXF55p4JzHqE47SsWNYuFC2L4dOnY05gn6+hq9e1rfemV9b+k+S45bXuwp\nfFPL0vTkxDuSMEteqal3bgPjHrK+vL2NhLZSJaMwTU6vqlULNlT21WWvknQ56WZPY+SBSJ748wli\nhsdQoVgFG/xXy5kkdNaR56MQQngOSeiEEG5r2cFlvLjkRWKGxwBGIZclS2DjRmO/UsbLy+vWz9nf\nF3TfibQ4PrrYkQlV91Har8wdSZg1Lx+f25O3nKSmGpVG4+Nzfp07Z8x/zC3hK1fu7kNsM+cCrnx8\nJaULl6blVy35ue/PdAnsYuP/ereThM468nwUQgjPIQmdEMJtpet0gj4O4rdHfqNFlRYObz8lLYV+\nM/vRMaAjY0LGOLz9nCQnw6FDuSd8aWlQs2buCV/x4vCf6P+wNnEtl65fooN/B97u8rbd45aEzjry\nfBRCCM8hCZ0Qwq2NixrH6aunHbq4fNLlJL7e8jVfbPmC+uXrM3/AfIr4FnFY+wVx/nzuyd6hQ8Za\ngjVqJbO7a21KptWi+/Hl6DQf0tONZDAtjZs/W7rNknOOHJGEzhryfBRCCM8hCZ0Qwq0dPn+YVl+1\nIvH5RAr72K8iitaa9YnrmbxpMgv2L6B/w/6MaDOCZpWb2a1NR9MaTpwwkruVcTsonFKZUr4V8PY2\nhoB6eXHHz5Zuy2t/zZqS0FlDno9CCOE5JKETQri97j90Z1iLYTza+FGbXzs5JZkZO2cwedNkLly7\nwIg2I3i8+eNSlMfGZMildeT5KIQQnkMSOiGE25uxcwZTY6ayeNBim13z0PlDTNk0hamxUwmuFszI\nNiO5p/Y9divb7+kkobOOPB+FEMJz5OcZKZ9WhBAupU/9Pmw9vpXD5w8X6DrpOp3Ffy/mgV8eoNVX\nrUhNT2XdE+v4a+Bf9KzTU5I5IYQQQrgE6aETQricUQtGUb5oed4Me9Pqcy9cu8D0bdP5bNNnFPYp\nzMg2IxnYZCDFChWzQ6QiJ9JDZx15PgohhOeQIZdCCI8QczyGB399kIPPHrS4J23XyV18tukzftn5\nC91rdWdkm5GEBoSi7rZQm7ALSeisI89HIYTwHPl5RvrYKxghhLCXFlVaUKZIGVbEryAiKCLX41LT\nU/lz759M3jiZPaf38HTLp9n5z51UK1nNgdEKIYQQQtiPJHRCCJc0rPkwpsZOzTGhO3nlJN9s/YYp\nm6dQo1QNRrQZQb+G/SjkXciESIUQQggh7EeGXAohXNLZ5LMEfRxE/LPxN5cV2Hh0I5M3Tmbevnn0\na9CPEW1G0KJKC5MjFdnJkEvryPNRCCE8h8yhE0J4lEdnP0pwtWDKFSnH5E2TOXP1DP9q8y+GNh9K\nuaLlzA5P5EISOuvI81EIITyHXRI6pVR14HugEpAOfK21/iTbMaWBqUAtIBkYprXenbHvEHAh49wU\nrXVwLu3IA0sIYZWlB5fS7Ydu3FPrHkYGj6Rn7Z54e3mbHZbIgysmdEqpHsBHGMv9fKu1nphtv9XP\nQaXUm8BTwMmMy7yitV6UQ9vyfBRCCA9hr3XoUoHntdaNgPbACKVU/WzHvALEaK2bAUOArAlfOhCm\ntW6RWzLnzqKioswOwS7c9b7Afe/NHe+ra1BXfgv+jUX/WESvur3cLplzx/9mrkgp5QVMBu4BGgED\nbPgc/FBr3TLjdUcy587c+ffbXe/NXe8L3Pfe5L48Q54JndY6SWsdm/HzZWAPkL1EXENgecYxe4Ga\nSqkKGfuUJe24K3f9hXPX+wL3vTd3va/YDbFmh2A37vrfzAUFA/u11oe11inADKB3tmPy+xx0qZ5K\nW3Ln3293vTd3vS9w33uT+/IMViVaSqmaQHNgQ7Zd24C+GccEAwFA9Yx9GliilNqklHqqIMEKIYQQ\nJqgGJGR5n8idX2zm9zk4UikVq5T6RilVyvahCyGEcHcWJ3RKqeLAbODZjJ66rN4HyiiltgIjgBgg\nLWNfiNa6JXAvxnDN0IKHLYQQQjiV/DwHPweCtNbNgSTgQwfHLIQQwg1YVOVSKeUDzAcWaq0/tuD4\neKBJ9sQvYwL4Ja31HQ8tpZTM+BZCCA/hSkVRlFLtgLe01j0y3o8FdPbCKNnOseo5qJSqAczTWjfN\n4VryfBRCCA9i7TPS0oXFpwK7c0vmMoaJXNVap2QMJ1mptb6slCoKeGX8XAzoDoyzReBCCCGEg2wC\namckXceBR4EBWQ/Iz3NQKVVZa52UcYm+wM6cGpfnoxBCiLvJM6FTSoUAjwE7lFIxGHMBXgFqYHxD\n+RXQAJiulEoHdgFPZJxeCfgt49tFH+AnrfVi29+GEEIIYR9a6zSl1EhgMbeWLdijlBpOwZ6Dk5RS\nzTGqYB4ChjvspoQQQrgNp1lYXAghhBBCCCGEdUxfTkAp1UMpFaeU2qeUesnseGxBKVVdKbVcKbVL\nKbVDKTXa7JhsTSnlpZTaqpT60+xYbEUpVUopNUsptSfjv11bs2OyBaXUyxn3s10p9ZNSqpDZMeWX\nUupbpdQJpdT2LNvKKKUWK6X2KqUiXbFSYC73NSnjdzFWKTVHKVXSzBjzK6d7y7LvBaVUulKqrBmx\nuQJ5Rroed3w+gjwjXYE8I12LLZ+PpiZ0yrLFWl2RJYuxu7pngd1mB2FjHwMLtNYNgGYYay66tIw5\nP08BLTKKLfhgzP9xVdMw/l5kNRZYqrWuh7EO2MsOj6rgcrqvxUCjjAqI+3HN+4Kc7w2lVHWgG3DY\n4RG5CHlGuix3fD6CPCNdgTwjXYvNno9m99BZsliry7FwMXaXlfGLdi/wjdmx2ErGNzsdtdbT/r+9\n+3exq4qiOP5dYOGP1DFICGaE9IpF0EpFCAijpVho9A8QLGxikU7SiIVokUYSMVqEQNJYGKxULAw6\nmBRpLBIjjEgiKQR/Lot7EsfgRHnvJmf2YX1g4L1b7cubeevsO+feDWD7d9tXO5c1h6vAr8A9mp5W\nezfwfd+SFmf7U+DKDYefBo6010eAZ25rUTP4t/Oyfdr2n+3tF/w906yUTT4zgDeBV29zOdUkI4sZ\nMR8hGVlFMrKWOfOxd0P3f4a1lqbNh7FXdu0XbaQbMHcDP0p6t22VOSzprt5FLcv2FeAN4AJwCfjJ\n9um+Vc1uu+11mBaKwPbO9dwKLwEf9S5iLpJWgYu2v+ldyxaXjKxnxHyEZGRlychCFs3H3g3d0HTz\nYewlSXoKWG9XV9V+RnAH8BDwdhsA/DPTNoXSJK0ArzA9lfY+YJuk5/pWdcsNtZCS9Brwm+1jvWuZ\nQ1sEHgAObjzcqZzoaLSMHDgfIRk5kmTkFrVMPvZu6C4Buza839mOldf+dX8ceM/2yd71zOhRYFXS\nt8AHwGOSjnauaQ7fMV0R+bK9P84UXtU9DHxm+7LtP4ATwCOda5rbuqR7YZrrBfzQuZ7ZSNrPtH1r\npAXGA8D9wJqm4ds7gTOSRrxqvKxkZC2j5iMkIytLRtaxcD72buiuD2ttTxV6FhjlqVA3HcZele0D\ntnfZXmH6vD6x/XzvupbVtiNclLSnHXqCMW5qPw/slXSnJDGdV/Ub2W+88n0K2N9evwBUXRz+47wk\n7WPaurVq+5duVc3j+rnZPmt7h+0V27uZFooP2h5mkTGjZGQho+YjJCOLSUbWMks+dm3o2tWQa8Na\nzwEf2q7+h7RxGPvjkr5q+8339a4r/tPLwPuSvmZ6gtfrnetZmu014ChwBlhj+tI43LWoJUg6BnwO\n7JF0QdKLwCHgSUnnmcL4UM8aF7HJeb0FbAM+bt8h73QtckGbnNtGZqytabNJRsYWk4zc4pKRtcyZ\njxksHhERERERUVTvLZcRERERERGxoDR0ERERERERRaWhi4iIiIiIKCoNXURERERERFFp6CIiIiIi\nIopKQxcREREREVFUGrqIiIiIiIii0tBFREREREQU9Rev+TCFPnw4HwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ - "plt.plot(zip(train_costs, valid_costs))\n", - "plt.legend(['train', 'valid'])" + "fig, (ax1, ax2) = plt.subplots(1, 2, figsize=[15, 7])\n", + "ax1.plot(zip(train_costs, valid_costs))\n", + "ax1.legend(['training cost', 'validation cost'])\n", + "ax2.plot(valid_accs)\n", + "ax2.legend(['validation accuracy'])" ] }, { diff --git a/examples/ladder_nets/ladder_net_layers.py b/examples/ladder_nets/ladder_net_layers.py new file mode 100644 index 0000000..5101406 --- /dev/null +++ b/examples/ladder_nets/ladder_net_layers.py @@ -0,0 +1,186 @@ +from lasagne.layers import MergeLayer + +import theano +import theano.tensor as T + +import numpy as np + +def _create_milaUDEM_params(shape, name): + values = np.zeros((6,) + shape, dtype=theano.config.floatX) + + b_lin = theano.shared(values[0], name='bias_lin_{}'.format(name)) + b_sigm = theano.shared(values[1], name='bias_sigm_{}'.format(name)) + + w_u_lin = theano.shared(values[2], name='weight_u_lin_{}'.format(name)) + w_u_sigm = theano.shared(values[3], name='weight_u_sigm_{}'.format(name)) + w_zu_lin = theano.shared(values[4], name='weight_zu_lin_{}'.format(name)) + w_zu_sigm = theano.shared(values[5], name='weight_zu_sigm_{}'.format(name)) + + values = np.ones((3,) + shape, dtype=theano.config.floatX) + w_z_lin = theano.shared(values[0], name='weight_z_lin_{}'.format(name)) + w_z_sigm = theano.shared(values[1], name='weight_z_sigm_{}'.format(name)) + w_sigm = theano.shared(values[2], name='weight_sigm_{}'.format(name)) + + # combinator params used in combinator calculations + return [w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm, + w_zu_sigm, w_sigm, b_lin, b_sigm] + + +def _create_curiousAI_params(shape, name): + values = np.zeros((8,) + shape, dtype=theano.config.floatX) + + b_mu_sig = theano.shared(values[0], name='b_mu_sig_{}'.format(name)) + b_mu_lin = theano.shared(values[1], name='b_mu_lin_{}'.format(name)) + b_v_sig = theano.shared(values[2], name='b_v_sig_{}'.format(name)) + b_v_lin = theano.shared(values[3], name='b_v_lin_{}'.format(name)) + + w_mu_lin = theano.shared(values[4], name='w_mu_lin_{}'.format(name)) + w_v_lin = theano.shared(values[5], name='w_v_lin_{}'.format(name)) + w_mu = theano.shared(values[6], name='w_mu_{}'.format(name)) + w_v = theano.shared(values[7], name='w_v_{}'.format(name)) + + values = np.ones((2,) + shape, dtype=theano.config.floatX) + w_mu_sig = theano.shared(values[0], name='w_mu_sig_{}'.format(name)) + w_v_sig = theano.shared(values[1], name='w_v_sig_{}'.format(name)) + + # combinator params used in combinator calculations + return [w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v, + b_mu_lin, b_v_lin, b_mu_sig, b_v_sig] + + +def _create_combinator_params(combinator_type, shape, name): + if combinator_type == 'milaUDEM': + return _create_milaUDEM_params(shape, name) + elif combinator_type == 'curiousAI': + return _create_curiousAI_params(shape, name) + + +def _combinator_milaUDEM(z, u, combinator_params, bc_pttrn): + w_u_lin, w_z_lin, w_zu_lin, w_u_sigm, w_z_sigm, w_zu_sigm, w_sigm, \ + b_lin, b_sigm = combinator_params + + lin_out = w_z_lin.dimshuffle(*bc_pttrn) * z + \ + w_u_lin.dimshuffle(*bc_pttrn) * u + \ + w_zu_lin.dimshuffle(*bc_pttrn) * z * u + \ + b_lin.dimshuffle(*bc_pttrn) + + sigm_pre = w_z_sigm.dimshuffle(*bc_pttrn) * z + \ + w_u_sigm.dimshuffle(*bc_pttrn) * u + \ + w_zu_sigm.dimshuffle(*bc_pttrn) * z * u + \ + b_sigm.dimshuffle(*bc_pttrn) + + sigm_out = T.nnet.sigmoid(sigm_pre) + + output = w_sigm.dimshuffle(*bc_pttrn) * sigm_out + lin_out + + return output + + +def _combinator_curiousAI(z, u, combinator_params, bc_pttrn): + w_mu_lin, w_v_lin, w_mu_sig, w_v_sig, w_mu, w_v, \ + b_mu_lin, b_v_lin, b_mu_sig, b_v_sig = combinator_params + + mu_sig_pre = w_mu_sig.dimshuffle(*bc_pttrn) * u + \ + b_mu_sig.dimshuffle(*bc_pttrn) + + mu_lin_out = w_mu_lin.dimshuffle(*bc_pttrn) * u + \ + b_mu_lin.dimshuffle(*bc_pttrn) + + mu_u = w_mu.dimshuffle(*bc_pttrn) * T.nnet.sigmoid(mu_sig_pre) + \ + mu_lin_out + + v_sig_pre = w_v_sig.dimshuffle(*bc_pttrn) * u + \ + b_v_sig.dimshuffle(*bc_pttrn) + + v_lin_out = w_v_lin.dimshuffle(*bc_pttrn) * u + \ + b_v_lin.dimshuffle(*bc_pttrn) + + v_u = w_v * T.nnet.sigmoid(v_sig_pre) + v_lin_out + + output = (z - mu_u) * v_u + mu_u + + return output + + +def _combinator(z, u, combinator_type, combinator_params): + if u.ndim == 2: + bc_pttrn = ('x', 0) + elif u.ndim == 4: + bc_pttrn = ('x', 0, 1, 2) + + if combinator_type == 'milaUDEM': + return _combinator_milaUDEM(z, u, combinator_params, bc_pttrn) + elif combinator_type == 'curiousAI': + return _combinator_curiousAI(z, u, combinator_params, bc_pttrn) + + +class CombinatorLayer(MergeLayer): + """ + A layer that combines the terms from dirty and clean encoders, + and outputs denoised variable: + $$ \hat{z} = g(\tilde{z}, u)$$ + """ + def __init__(self, incoming_z, incoming_u, combinator_type, **kwargs): + super(CombinatorLayer, self).__init__( + [incoming_z, incoming_u], **kwargs) + self.combinator_type = combinator_type + z_shp, u_shp = self.input_shapes + + if z_shp != u_shp: + raise ValueError("Mismatch: input shapes must be the same. " + "Got dirty z ({0}) of shape {1} and clean u ({" + "2}) of shape {3}".format(incoming_z.name, z_shp, + incoming_u.name, u_shp)) + + self.combinator_params = _create_combinator_params(combinator_type, + u_shp[1:], + self.name) + + def get_output_shape_for(self, input_shapes): + return input_shapes[0] + + def get_output_for(self, inputs, **kwargs): + z, u = inputs + assert z.ndim == u.ndim + return _combinator(z, u, self.combinator_type, self.combinator_params) + + +class SharedNormLayer(MergeLayer): + """ + A layer that combines the terms from dirty and clean encoders, + and outputs denoised variable: + $$ \hat{z} = g(\tilde{z}, u)$$ + """ + def __init__(self, incoming2stats, incoming2norm, axes='auto', epsilon=1e-4, + **kwargs): + super(SharedNormLayer, self).__init__( + [incoming2stats, incoming2norm], **kwargs) + stats_shp, norm_shp = self.input_shapes + + if stats_shp != norm_shp: + raise ValueError("Mismatch: input shapes must be the same. " + "Got dirty z ({0}) of shape {1} and clean u ({" + "2}) of shape {3}" + .format(incoming2stats.name, stats_shp, + incoming2norm.name, norm_shp)) + + if axes == 'auto': + # default: normalize over all but the second axis + axes = (0,) + tuple(range(2, len(stats_shp))) + elif isinstance(axes, int): + axes = (axes,) + self.axes = axes + self.epsilon = epsilon + + def get_output_shape_for(self, input_shapes): + return input_shapes[0] + + def get_output_for(self, inputs, **kwargs): + to_stats, to_norm = inputs + assert to_stats.ndim == to_norm.ndim + + mean = to_stats.mean(self.axes, keepdims=True) + inv_std = T.inv(T.sqrt(to_stats.var(self.axes, + keepdims=True) + self.epsilon)) + + return (to_norm - mean) * inv_std \ No newline at end of file diff --git a/examples/ladder_nets/ladder_nets.py b/examples/ladder_nets/ladder_nets.py index 9ae7c48..18985b1 100644 --- a/examples/ladder_nets/ladder_nets.py +++ b/examples/ladder_nets/ladder_nets.py @@ -5,243 +5,275 @@ from lasagne.layers.special import InverseLayer as unpool from lasagne.layers.special import BiasLayer, ScaleLayer, NonlinearityLayer from lasagne.layers.noise import GaussianNoiseLayer -from lasagne.layers.normalization import BatchNormLayer +from lasagne.layers.normalization import BatchNormLayer, batch_norm from lasagne.nonlinearities import rectify, linear import lasagne from ladder_net_layers import CombinatorLayer, SharedNormLayer -from utils import softmax +from utils import softmax, unzip import theano.tensor as T - from collections import OrderedDict - -def build_encoder(net, num_hidden, activation, name, p_drop_hidden, - convolution, pooling, shared_net): - for i, num_nodes in enumerate(num_hidden): - affine_lname = 'enc_affine_{}'.format(i) - nbatchn_lname = 'enc_batchn_{}_norm'.format(i) - noise_lname = 'enc_noise_{}'.format(i) - lbatchn_lname = 'enc_batchn_{}_learn'.format(i) - - if shared_net is None: - # affine pars - W = lasagne.init.GlorotUniform() - # batchnorm pars - beta = lasagne.init.Constant(0) - gamma = None if activation == rectify else lasagne.init.Constant(1) - else: - # affine weights - W = shared_net[affine_lname].get_params()[0] - # batchnorm pars - beta = shared_net[lbatchn_lname + '_beta'].get_params()[0] - gamma = None if activation==rectify else \ - shared_net[lbatchn_lname + '_gamma'].get_params()[0] - - # affine transformation: $W \hat{h}$ - if convolution: - net[affine_lname] = conv(net.values()[-1], - num_filters=num_nodes[0], - filter_size=num_nodes[1], - pad=num_nodes[2], stride=num_nodes[3], - W=W, nonlinearity=linear, - name='{}_{}_{}'.format(name, - affine_lname, 'conv')) - else: - net[affine_lname] = DenseLayer(net.values()[-1], - num_units=num_nodes, - W=W, nonlinearity=linear, - name='{}_{}_{}'.format(name, - affine_lname, 'affine')) - - # 1. batchnormalize without learning -> goes to combinator layer - l_name = '{}_{}'.format(name, nbatchn_lname) - net[nbatchn_lname] = BatchNormLayer(net.values()[-1], alpha=0.1, - name=l_name, beta=None, - gamma=None) - - if shared_net is None: - # for dirty encoder -> add noise - net[noise_lname] = GaussianNoiseLayer(net.values()[-1], - sigma=p_drop_hidden, - name='{}_{}'.format( - name, noise_lname)) - - # 2. scaling & offsetting batchnormalization + noise - l_name = '{}_{}'.format(name, lbatchn_lname) - # offset by beta - net[lbatchn_lname + '_beta'] = BiasLayer(net.values()[-1], b=beta, - name=l_name + '_beta') - - if gamma is not None: - # if not rectify, scale by gamma - net[lbatchn_lname + '_gamma'] = ScaleLayer(net.values()[-1], - scales=gamma, - name=l_name+'_gamma') - - if pooling: - pool_name = 'enc_pool_{}'.format(i) - net[pool_name] = pool(net.values()[-1], pool_size=num_nodes[4], - stride=num_nodes[5], - name='{}_{}'.format(name, pool_name)) - +get_items = lambda zipped: unzip(zipped) +xe = T.nnet.categorical_crossentropy + +def build_encoder(net, encoder_specs, activation, name, p_drop_hidden, + shared_net): + # encoder specs is a tuple of string and tuple of integers + for i, (transform, specs) in enumerate(encoder_specs): + if transform == 'unpool': + specs = net.get(specs) + # if specs have already the name of the corresponding pool layer + update = build_enc_layer( + net.values()[-1], name, transform, specs, activation, i, + p_drop_hidden, shared_net + ) + net.update(update) # apply activation - if i < len(num_hidden) - 1: + if i < len(encoder_specs) - 1: act_name = 'enc_activation_{}'.format(i) - net[act_name] = NonlinearityLayer(net.values()[-1], - nonlinearity=activation, - name='{}_{}'.format( - name, act_name)) + net[act_name] = NonlinearityLayer( + net.values()[-1], nonlinearity=activation, + name='{}_{}'.format(name, act_name) + ) # classfication layer activation -> softmax - net['enc_softmax'] = NonlinearityLayer(net.values()[-1], - nonlinearity=softmax, - name='{}_enc_softmax'.format( - name)) + net['enc_softmax'] = NonlinearityLayer( + net.values()[-1], nonlinearity=softmax, name=name+'_enc_softmax' + ) return net['enc_softmax'], net -def build_decoder(dirty_net, clean_net, num_nodes, combinator_type, - convolution, pooling): - L = len(num_nodes) - 1 +def build_enc_layer(incoming, name, transform, specs, activation, i, + p_drop_hidden, shared_net): + net = OrderedDict() + lname = 'enc_{}_{}'.format(i, transform if 'pool' in transform else 'affine') + nbatchn_lname = 'enc_batchn_{}_norm'.format(i) + noise_lname = 'enc_noise_{}'.format(i) + lbatchn_lname = 'enc_batchn_{}_learn'.format(i) + + if shared_net is None: + # affine pars + W = lasagne.init.GlorotUniform() + # batchnorm pars + beta = lasagne.init.Constant(0) + gamma = None if activation == rectify else lasagne.init.Constant(1) + else: + # batchnorm pars + beta = shared_net[lbatchn_lname + '_beta'].get_params()[0] + gamma = None if activation == rectify else \ + shared_net[lbatchn_lname + '_gamma'].get_params()[0] + if not isinstance(shared_net[lname], (pool, unpool)): + # affine weights + W = shared_net[lname].get_params()[0] + else: + W = None + + # affine (conv/dense/deconv) or (un)pooling transformation: $W \hat{h}$ + net[lname] = get_transform_layer( + incoming, name+'_'+lname, transform, specs, W + ) + + # 1. batchnormalize without learning -> goes to combinator layer + layer2bn = net.values()[-1] + l_name = '{}_{}'.format(name, nbatchn_lname) + bn_broadcast_cond = layer2bn.output_shape[1] == 1 + if len(layer2bn.output_shape) == 4 and bn_broadcast_cond: + ax = (0, 1, 2, 3) + elif len(layer2bn.output_shape) == 2 and bn_broadcast_cond: + ax = (0, 1) + else: + ax = 'auto' + net[nbatchn_lname] = BatchNormLayer( + layer2bn, axes=ax, alpha=0.1, beta=None, gamma=None, name=l_name + ) + if shared_net is None: + # for dirty encoder -> add noise + net[noise_lname] = GaussianNoiseLayer( + net.values()[-1], sigma=p_drop_hidden, + name='{}_{}'.format(name, noise_lname) + ) + + # 2. scaling & offsetting batchnormalization + noise + l_name = '{}_{}'.format(name, lbatchn_lname) + # offset by beta + net[lbatchn_lname + '_beta'] = BiasLayer( + net.values()[-1], b=beta, name=l_name+'_beta' + ) + if gamma is not None: + # if not rectify, scale by gamma + net[lbatchn_lname + '_gamma'] = ScaleLayer( + net.values()[-1], scales=gamma, name=l_name+'_gamma' + ) + + return net + + +def get_transform_layer(incoming, name, transform, specs, W): + if transform == 'conv': + layer = conv( + incoming, num_filters=specs[0], filter_size=specs[1], + stride=specs[2], pad=specs[3], nonlinearity=linear, W=W, b=None, + name=name+'_conv' + ) + elif transform == 'dense': + layer = DenseLayer( + incoming, num_units=specs, nonlinearity=linear, W=W, b=None, + name=name+'_dense' + ) + elif transform == 'pool': + if len(specs) == 4: + psize, pstride = specs[1:3] + else: + psize, pstride = specs + layer = pool( + incoming, pool_size=psize, stride=pstride, name=name + ) + elif transform == 'deconv': + layer = deconv( + incoming, num_filters=specs[0], filter_size=specs[1], + stride=specs[2], crop=specs[3], nonlinearity=linear, W=W, b=None, + name=name+'_deconv' + ) + elif transform == 'unpool': + pl = specs + # print(pl.name, pl.output_shape) + layer = unpool(incoming, pl, name=name) + + return layer + + +def build_dec_layer(incoming, z_l, name, transform, specs, l, + combinator_type, layer2stats=None, last=False): + dirty_net = OrderedDict() + + if l > 0: + # transformation layer: dense, deconv, unpool + lname = 'dec_{}_{}'.format(l, transform if 'pool' in transform + else 'affine') + if transform in ['pool', 'unpool']: + W = None + else: + W = lasagne.init.GlorotUniform() + dirty_net[lname] = get_transform_layer(incoming, name+'_'+lname, + transform, specs, W) + layer2bn = dirty_net.values()[-1] + else: + layer2bn = incoming + + # batchnormalization ... u_l + ul_name = 'dec_batchn_u_{}'.format(l) + bn_broadcast_cond = layer2bn.output_shape[1] == 1 + if len(layer2bn.output_shape) == 4 and bn_broadcast_cond: + ax = (0, 1, 2, 3) + elif len(layer2bn.output_shape) == 2 and bn_broadcast_cond: + ax = (0, 1) + else: + ax = 'auto' + dirty_net[ul_name] = BatchNormLayer( + layer2bn, axes=ax, alpha=1., beta=None, gamma=None, + name=name+'_'+ul_name + ) + + # denoised latent \hat{z}_L-i + comb_name = 'dec_combinator_{}'.format(l) + dirty_net[comb_name] = CombinatorLayer( + z_l, dirty_net.values()[-1], combinator_type=combinator_type, + name=name+'_'+comb_name + ) + + if not last: + # batchnormalized latent \hat{z}_L-i^{BN} + layer2norm = dirty_net[comb_name] + bname = 'dec_batchn_z_{}'.format(l) + dirty_net[bname] = SharedNormLayer( + layer2stats, layer2norm, name=name+'_'+bname + ) - # dirty_enc_affine_1 ... z_L - z_L = dirty_net['enc_noise_{}'.format(L)] + return dirty_net - # batchnormalized softmax output .. u_0 without learning bn beta, gamma - dirty_net['u_0'] = BatchNormLayer(dirty_net.values()[-1], beta=None, - gamma=None, name='dec_batchn_softmax') - if pooling: - unpool_name = 'dec_unpool_{}'.format(L) - dirty_net[unpool_name] = unpool(dirty_net.values()[-1], - dirty_net['enc_pool_{}'.format(L)], - name=unpool_name) +def build_decoder(dirty_net, clean_net, name, decoder_specs, combinator_type): + L = len(decoder_specs) - 1 + net = OrderedDict() - # denoised latent \hat{z}_L = g(\tilde{z}_L, u_L) - comb_name = 'dec_combinator_0' - dirty_net[comb_name] = CombinatorLayer(z_L, dirty_net.values()[-1], - combinator_type=combinator_type, - name=comb_name) + # dirty_enc_affine_1 ... z_L + z_L = dirty_net['enc_noise_{}'.format(L)] - # batchnormalize denoised latent using clean encoder's bn mean/inv_std without learning + # batchnormalize denoised latent using clean encoder's bn mean/inv_std + # without learning enc_bname = 'enc_batchn_{}_norm'.format(L) - bname = 'dec_batchn_0' + layer2stats = clean_net[enc_bname] - to_stats_l = clean_net[enc_bname] - to_norm_l = dirty_net[comb_name] - dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l) + # batchnorm and combinator + update = build_dec_layer( + dirty_net.values()[-1], z_L, name, 'N/A', None, 0, combinator_type, + layer2stats + ) + net.update(update) - for i in range(L): + for i, (transform, specs) in enumerate(decoder_specs[:-1]): # dirty_enc_affine_L-i ... z_l - z_l = dirty_net['enc_noise_{}'.format(i)] - - # affine transformation - d_name = 'dec_affine_{}'.format(L-i) - if convolution: - dirty_net[d_name] = deconv(dirty_net.values()[-1], - num_filters=num_nodes[i][0], - filter_size=num_nodes[i][1], - crop=num_nodes[i][2], - stride=num_nodes[i][3], - nonlinearity=linear, name=d_name + - '_conv') - else: - dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], - num_units=num_nodes[i], - nonlinearity=linear, - name=d_name+'_affine') - - # batchnormalization ... u_l\ - ul_name = 'u_{}'.format(i+1) - dirty_net[ul_name] = BatchNormLayer(dirty_net.values()[-1], alpha=1., - beta=None, gamma=None, - name='dec_batchn_affine_' - '{}'.format(L-i)) - - if pooling: - unpool_name = 'dec_unpool_{}'.format(L-i-1) - dirty_net[unpool_name] = unpool(dirty_net.values()[-1], - dirty_net['enc_pool_{}' - ''.format(L-i-1)], - name=unpool_name) - - # denoised latent \hat{z}_L-i - comb_name = 'dec_combinator_{}'.format(i+1) - dirty_net[comb_name] = CombinatorLayer(z_l, dirty_net.values()[-1], - combinator_type=combinator_type, - name=comb_name) - - # batchnormalized latent \hat{z}_L-i^{BN} + z_l = dirty_net['enc_noise_{}'.format(L-i-1)] enc_bname = 'enc_batchn_{}_norm'.format(L-i-1) - bname = 'dec_batchn_{}'.format(L-i) + layer2stats = clean_net[enc_bname] - to_stats_l = clean_net[enc_bname] - to_norm_l = dirty_net[comb_name] - dirty_net[bname] = SharedNormLayer(to_stats_l, to_norm_l) + if transform == 'unpool': + # print(dirty_net.keys(), specs) + specs = dirty_net.get(specs) + update = build_dec_layer( + net.values()[-1], z_l, name, transform, specs, i+1, + combinator_type, layer2stats + ) + net.update(update) # corrupted input ... z_0 - z_0 = dirty_net['inp_corr'] - - # affine transformation - d_name = 'dec_affine_{}'.format(L+1) - if convolution: - dirty_net[d_name] = deconv(dirty_net.values()[-1], - num_filters=num_nodes[i+1][0], - filter_size=num_nodes[i+1][1], - crop=num_nodes[i+1][2], - stride=num_nodes[i+1][3], - nonlinearity=linear,name=d_name+'_conv') - else: - dirty_net[d_name] = DenseLayer(dirty_net.values()[-1], - nonlinearity=linear, name=d_name, - num_units=num_nodes[i+1]) - - # batchnormalization ... u_L - dirty_net['u_{}'.format(L+1)] = BatchNormLayer(dirty_net.values()[-1], alpha=1., - beta=None, gamma=None) + z_0 = dirty_net['input_corr'] + transform, specs = decoder_specs[-1] - # denoised input reconstruction - comb_name = 'dec_combinator_{}'.format(L+1) - dirty_net[comb_name] = CombinatorLayer(z_0, dirty_net['u_{}'.format(L+1)], - name=comb_name, - combinator_type=combinator_type) + if transform == 'unpool': + specs = dirty_net.get(specs) + update = build_dec_layer( + net.values()[-1], z_0, name, transform, specs, i+2, + combinator_type, None, True + ) + net.update(update) - return dirty_net + return net -def build_model(num_encoder, num_decoder, p_drop_input, p_drop_hidden, +def build_model(encoder_specs, decoder_specs, p_drop_input, p_drop_hidden, input_shape, batch_size=None, activation=rectify, - combinator_type='MILAudem', convolution=False, - pooling=False): + combinator_type='MILAudem'): net = OrderedDict() - net['input'] = InputLayer((batch_size, ) + tuple(input_shape), # inp_size), - name='input') + net['input'] = InputLayer( + (batch_size, ) + tuple(input_shape), name='input' + ) # corrupted input - net['inp_corr'] = GaussianNoiseLayer(net['input'], sigma=p_drop_input, - name='input_corr') + net['input_corr'] = GaussianNoiseLayer( + net['input'], sigma=p_drop_input, name='input_corr' + ) # dirty encoder - train_output_l, dirty_encoder = build_encoder(net, num_encoder, - activation, 'dirty', - p_drop_hidden, - convolution, pooling, - None) + train_output_l, dirty_encoder = build_encoder( + net, encoder_specs, activation, 'dirty', p_drop_hidden, None + ) # clean encoder - clean_net = OrderedDict(net.items()[:1]) - eval_output_l, clean_net = build_encoder(clean_net, num_encoder, - activation, 'clean', 0., - convolution, pooling, - shared_net=dirty_encoder) + clean_encoder = OrderedDict(net.items()[:1]) + eval_output_l, clean_net = build_encoder( + clean_encoder, encoder_specs, activation, 'clean', 0., dirty_encoder + ) # dirty decoder - dirty_net = build_decoder(dirty_encoder, clean_net, num_decoder, - combinator_type, convolution, pooling) + dirty_decoder = build_decoder( + dirty_encoder, clean_net, 'dirty', decoder_specs, combinator_type + ) - return [train_output_l, eval_output_l], dirty_net, clean_net + return (train_output_l, eval_output_l, dirty_encoder, dirty_decoder, + clean_encoder) def get_mu_sigma_costs(hid): @@ -255,13 +287,11 @@ def get_mu_sigma_costs(hid): return C_mu, C_sigma.sum() # trace(C_sigma) -def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, - lambdas, use_extra_costs=False, alphas=None, betas=None, - num_labeled=None, pseudo_labels=None): - xe = T.nnet.categorical_crossentropy +def build_costNstats(y_onehot, output_train, output_eval, num_labeled=None, + pseudo_labels=None): pred = T.clip(output_train, 1e-15, 1) N = num_labeled if num_labeled else pred.shape[0] - class_cost = xe(pred[:N], y[:N]).mean() + class_cost = xe(pred[:N], y_onehot[:N]).mean() if pseudo_labels == 'soft': n = 0 if num_labeled else N @@ -272,7 +302,16 @@ def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, pseudo_target = T.eye(M)[pred[n:].argmax(axis=1)] class_cost += xe(pred[n:], pseudo_target).mean() - L = len(num_decoder) + pred = T.argmax(output_eval[:N], axis=1) + y = T.argmax(y_onehot[:N], axis=1) + accuracy = T.mean(T.eq(pred, y), dtype='float32') + + return class_cost, [accuracy] + + +def build_rec_costs(X, clean_net, dirty_net, decoder_specs, lambdas, + alphas=None, betas=None, use_extra_costs=False): + L = len(decoder_specs) # get clean and corresponding dirty latent layer output z_clean_l = clean_net['input'] @@ -286,11 +325,14 @@ def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, if use_extra_costs: C_mu, C_sigma = get_mu_sigma_costs(z_clean) cost += alphas[L] * C_mu + betas[L] * C_sigma + rec_costs = [cost] - for l in range(L): + dec_batchns = [x for x in dirty_net.keys() if 'dec_batchn_z' in x][::-1] + + for l, name in enumerate(dec_batchns): z_clean_l = clean_net['enc_batchn_{}_norm'.format(l)] - z_dirty_l = dirty_net['dec_batchn_{}'.format(L-l-1)] + z_dirty_l = dirty_net[name] z_clean = lasagne.layers.get_output(z_clean_l, X, deterministic=False) z_dirty = lasagne.layers.get_output(z_dirty_l, X, deterministic=False) @@ -299,6 +341,7 @@ def build_cost(X, y, num_decoder, dirty_net, clean_net, output_train, if use_extra_costs: C_mu, C_sigma = get_mu_sigma_costs(z_clean) cost += alphas[l] * C_mu + betas[l] * C_sigma + rec_costs.append(cost) - return class_cost, rec_costs \ No newline at end of file + return rec_costs diff --git a/examples/ladder_nets/train_ladder_nets.py b/examples/ladder_nets/train_ladder_nets.py index 934c821..04b3bd4 100644 --- a/examples/ladder_nets/train_ladder_nets.py +++ b/examples/ladder_nets/train_ladder_nets.py @@ -1,7 +1,7 @@ +from __future__ import print_function from utils import load_data from ladder_nets import * import time -import theano.misc.pkl_utils import lasagne import cPickle import numpy as np @@ -11,44 +11,59 @@ BATCH_SIZE = 100 INPUT_SHAPE = [1, 28, 28] NUM_EPOCHS = 15 -COMBINATOR_TYPE = 'milaUDEM' -LAMBDAS = [1, 1, 1] +COMBINATOR_TYPE = 'milaUDEM' # or 'curiousAI' DROPOUT = 0.3 EXTRA_COST = False # True -ALPHAS = None, # [0.1]*3 -BETAS = None, # [0.1]*3 +ALPHAS = None # [0.1]*3 +BETAS = None # [0.1]*3 NUM_LABELED = None PSEUDO_LABELS = None CONV = True # False POOL = True # False -print "Loading data..." +print ("Loading data...") dataset = load_data() -# build model -if CONV: - input_shape = INPUT_SHAPE - if POOL: - num_encoder = [[40, 8, 0, 1, 2, 2], [10, 8, 0, 1, 2, 2]] - num_decoder = [[40, 8, 0, 1, 2, 2], [1, 8, 0, 1, 2, 2]] +def get_encoder_settings(convolution, pooling): + if convolution and pooling: + settings = [('conv', (40, 8, 1, 0)), ('pool', (0, 2, 2, 0)), + ('conv', (10, 8, 1, 0)), ('pool', (0, 2, 2, 0))] + elif convolution: + settings = [('conv', (40, 15, 1, 0)), ('conv', (10, 14, 1, 0))] else: - num_encoder = [[40, 15, 0, 1], [10, 14, 0, 1]] - num_decoder = [[40, 14, 0, 1], [1, 15, 0, 1]] -else: - input_shape = np.prod(INPUT_SHAPE) - num_encoder = [500, 10] - num_decoder = [500, input_shape] + settings = [('dense', 500), ('dense', 10)] + + return settings + +def get_decoder_settings(convolution, pooling): + if convolution and pooling: + settings = [('unpool', 'enc_3_pool'), ('deconv', (40, 8, 1, 0)), + ('unpool', 'enc_1_pool'), ('deconv', (1, 8, 1, 0))] + elif convolution: + settings = [('deconv', (40, 14, 1, 0)), ('deconv', (1, 15, 1, 0))] + else: + settings = [('dense', 10), ('dense', 784)] + + return settings -print "Building model and compiling functions..." -[train_output_l, eval_output_l], dirty_net, clean_net = build_model( - num_encoder, num_decoder, DROPOUT, DROPOUT, input_shape=input_shape, - combinator_type=COMBINATOR_TYPE, convolution=CONV, pooling=POOL) +# build model +encoder_specs = get_encoder_settings(convolution=CONV, pooling=POOL) +decoder_specs = get_decoder_settings(convolution=CONV, pooling=POOL) +LAMBDAS = [1] * (len(decoder_specs) + 1) +input_shape = INPUT_SHAPE if CONV else np.prod(INPUT_SHAPE) + +print ("Building model ...") +train_output_l, eval_output_l, dirty_encoder, dirty_decoder, clean_encoder = \ + build_model(encoder_specs, decoder_specs, DROPOUT, DROPOUT, + input_shape=input_shape, combinator_type=COMBINATOR_TYPE) -print map(lambda x: (x.name, x.output_shape), dirty_net.values()) +print (map(lambda x: (x.name, x.output_shape), dirty_encoder.values())) +print (map(lambda x: (x.name, x.output_shape), dirty_decoder.values())) # set up input/output variables -X = T.fmatrix('X') if not CONV else T.ftensor4('x') +X = T.ftensor4('x') if CONV else T.fmatrix('X') y = T.ivector('y') +y_onehot = lasagne.utils.one_hot(y, 10) # training output output_train = lasagne.layers.get_output(train_output_l, X, @@ -61,12 +76,14 @@ # set up (possibly amortizable) lr, cost and updates sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE)) -class_cost, rec_costs = build_cost(X, lasagne.utils.one_hot(y), num_decoder, - dirty_net, clean_net, output_train, - LAMBDAS, use_extra_costs=EXTRA_COST, - alphas=ALPHAS, betas=BETAS, - num_labeled=NUM_LABELED, - pseudo_labels=PSEUDO_LABELS) +print ("Building costs and updates ...") +class_cost, stats = build_costNstats(y_onehot, output_train, output_eval, + NUM_LABELED, PSEUDO_LABELS) + +rec_costs = build_rec_costs(X, clean_encoder, dirty_decoder, decoder_specs, + lambdas=LAMBDAS, alphas=ALPHAS, betas=BETAS, + use_extra_costs=EXTRA_COST) + cost = class_cost + T.sum(rec_costs) net_params = lasagne.layers.get_all_params(train_output_l, trainable=True) updates = lasagne.updates.adam(cost, net_params, learning_rate=sh_lr) @@ -75,9 +92,7 @@ batch_index = T.iscalar('batch_index') batch_slice = slice(batch_index * BATCH_SIZE, (batch_index + 1) * BATCH_SIZE) -pred = T.argmax(output_eval, axis=1) -accuracy = T.mean(T.eq(pred, y[:NUM_LABELED]), dtype=theano.config.floatX) - +print ("Compiling functions...") train = theano.function([batch_index], [cost] + rec_costs, updates=updates, givens={ X: dataset['X_train'][batch_slice].reshape( @@ -86,7 +101,7 @@ y: dataset['y_train'][batch_slice], }) -eval = theano.function([batch_index], [cost, accuracy], givens={ +eval = theano.function([batch_index], [cost] + stats, givens={ X: dataset['X_valid'][batch_slice].reshape( (-1,) + tuple(input_shape) ), @@ -95,8 +110,8 @@ network_dump = {'train_output_layer': train_output_l, 'eval_output_layer': eval_output_l, - 'dirty_net': dirty_net, - 'clean_net': clean_net, + 'dirty_net': dirty_decoder, + 'clean_net': clean_encoder, 'x': X, 'y': y, 'output_eval': output_eval @@ -138,7 +153,7 @@ def eval_epoch(): train_costs, valid_costs, valid_accs = [], [], [] -print "Starting training..." +print ("Starting training...") now = time.time() try: @@ -150,17 +165,17 @@ def eval_epoch(): valid_costs.append(eval_cost) valid_accs.append(acc) - print "Epoch %d took %.3f s" % (n + 1, time.time() - now) + print ("Epoch %d took %.3f s" % (n + 1, time.time() - now)) now = time.time() - print "Train cost {}, val cost {}, val acc {}".format(train_costs[-1], - valid_costs[-1], - valid_accs[-1]) - print '\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c - in enumerate(rec_costs)]) + print ("Train cost {}, val cost {}, val acc {}".format(train_costs[-1], + valid_costs[-1], + valid_accs[-1])) + print ('\n'.join(['Layer #{} rec cost: {}'.format(i, c) for i, c + in enumerate(rec_costs)])) if (n+1) % 10 == 0: new_lr = sh_lr.get_value() * LR_DECREASE - print "New LR:", new_lr + print ("New LR:", new_lr) sh_lr.set_value(lasagne.utils.floatX(new_lr)) except KeyboardInterrupt: pass @@ -170,9 +185,9 @@ def eval_epoch(): # zip(train_cost, valid_cost)) # uncomment if to save the params only -# save_dump('final_epoch_{}_ladder_net_mnist'.format(n), +# save_dump('final_epoch_{}_ladder_net_mnist.pkl'.format(n), # lasagne.layers.get_all_param_values(output_layer)) # uncomment if to save the whole network -# theano.misc.pkl_utils.dump(network_dump, -# 'final_epoch_{}_ladder_net_mnist.pkl'.format(n)) \ No newline at end of file +# save_dump('final_epoch_{}_ladder_net_mnist.pkl'.format(n), +# network_dump) \ No newline at end of file diff --git a/examples/ladder_nets/utils.py b/examples/ladder_nets/utils.py index ab49253..8e039a3 100644 --- a/examples/ladder_nets/utils.py +++ b/examples/ladder_nets/utils.py @@ -1,34 +1,51 @@ +import numpy as np import gzip import cPickle as pickle -import theano -import theano.tensor as T + import lasagne +import theano.misc.pkl_utils +import theano.tensor as T def pickle_load(f, encoding): return pickle.load(f) -def load_data(): +def load_data(shared_var=True): """Get data with labels, split into training, validation and test set.""" - with gzip.open('mnist.pkl.gz', 'rb') as f: + with gzip.open('./mnist.pkl.gz', 'rb') as f: data = pickle_load(f, encoding='latin-1') X_train, y_train = data[0] X_valid, y_valid = data[1] X_test, y_test = data[2] - return dict( - X_train=theano.shared(lasagne.utils.floatX(X_train)), - y_train=T.cast(theano.shared(y_train), 'int32'), - X_valid=theano.shared(lasagne.utils.floatX(X_valid)), - y_valid=T.cast(theano.shared(y_valid), 'int32'), - X_test=theano.shared(lasagne.utils.floatX(X_test)), - y_test=T.cast(theano.shared(y_test), 'int32'), - num_examples_train=X_train.shape[0], - num_examples_valid=X_valid.shape[0], - num_examples_test=X_test.shape[0], - input_dim=X_train.shape[1], - output_dim=10, - ) + if shared_var: + return dict( + X_train=theano.shared(lasagne.utils.floatX(X_train)), + y_train=T.cast(theano.shared(y_train), 'int32'), + X_valid=theano.shared(lasagne.utils.floatX(X_valid)), + y_valid=T.cast(theano.shared(y_valid), 'int32'), + X_test=theano.shared(lasagne.utils.floatX(X_test)), + y_test=T.cast(theano.shared(y_test), 'int32'), + num_examples_train=X_train.shape[0], + num_examples_valid=X_valid.shape[0], + num_examples_test=X_test.shape[0], + input_dm=X_train.shape[1], + output_dim=10, + ) + else: + return dict( + X_train=np.float32(X_train), + y_train=np.int32(y_train), + X_valid=np.float32(X_valid), + y_valid=np.int32(y_valid), + X_test=np.float32(X_test), + y_test=np.int32(y_test), + num_examples_train=X_train.shape[0], + num_examples_valid=X_valid.shape[0], + num_examples_test=X_test.shape[0], + input_dm=X_train.shape[1], + output_dim=10, + ) def softmax(vec, axis=1): @@ -38,4 +55,6 @@ def softmax(vec, axis=1): """ xdev = vec - vec.max(axis, keepdims=True) rval = T.exp(xdev)/(T.exp(xdev).sum(axis, keepdims=True)) - return rval \ No newline at end of file + return rval + +unzip = lambda zipped: zip(*zipped) \ No newline at end of file