diff --git a/Scripts/Code/gif_maker.py b/Scripts/Code/gif_maker.py
index 1428039b3d37b7acbb6cdc845ae37b03c365687c..009a5f53640f99959743fe4fde09a058c837c3e1 100644
--- a/Scripts/Code/gif_maker.py
+++ b/Scripts/Code/gif_maker.py
@@ -1,11 +1,39 @@
 import imageio
+import PIL
+import numpy as np
+
+frames = 200
+time_steps = 6
 
 images_in = []
 images_out = []
+images_pred = []
+images_combi = []
 
-for number = 10 to 109:
+for number in range(frames):
     images_in.append(imageio.imread("in_" + str(number) + ".png"))
     images_out.append(imageio.imread("out_" + str(number) + ".png"))
 
 imageio.mimsave("./in.gif", images_in)
-imageio.mimsave("./out.gif", images_out)
\ No newline at end of file
+imageio.mimsave("./out.gif", images_out)
+
+for number in range(time_steps, frames):
+    images_pred.append(imageio.imread("pred_" + str(number) + ".png"))
+
+imageio.mimsave("./pred.gif", images_pred)
+
+for number in range(time_steps, frames):
+
+    list_im = ["in_" + str(number) + ".png", "out_" + str(number) + ".png", "pred_" + str(number) + ".png"]
+    imgs = [PIL.Image.open(i) for i in list_im]
+
+    min_shape = sorted([(np.sum(i.size), i.size) for i in imgs])[0][1]
+    imgs_comb = np.hstack((np.asarray(i.resize(min_shape)) for i in imgs))
+
+    imgs_comb = PIL.Image.fromarray(imgs_comb)
+    imgs_comb.save("combi_" + str(number) + ".png")
+
+for number in range(time_steps, frames):
+    images_combi.append(imageio.imread("combi_" + str(number) + ".png"))
+
+imageio.mimsave("./combi.gif", images_combi)
\ No newline at end of file
diff --git a/Scripts/Notebooks/Carga de datos.ipynb b/Scripts/Notebooks/Carga de datos.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..ae6a923a1ec950fffc031468be6f57823458bd3a
--- /dev/null
+++ b/Scripts/Notebooks/Carga de datos.ipynb	
@@ -0,0 +1,293 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "import sys\n",
+    "import tensorflow as tf\n",
+    "import numpy as np\n",
+    "import scipy.misc\n",
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "sys.path.append(\"../tools\")  # Herramientas propias de MantaFlow\n",
+    "import uniio  # Lectura de ficheros .uni"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Hiperparámetros"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "num_sims = 2000  # num_sims - 1000 escenas. \n",
+    "frames = 200  # Frames por escena.\n",
+    "\n",
+    "epochs_autoencoder = 5\n",
+    "epochs_lstm = 50\n",
+    "epochs_pretraining = 1\n",
+    "\n",
+    "batch_size_autoencoder = 4\n",
+    "batch_size_lstm = 16\n",
+    "\n",
+    "time_steps_lstm = 6\n",
+    "out_time_steps_lstm = 1\n",
+    "\n",
+    "save_autoencoder = True\n",
+    "save_lstm = True"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Datos iniciales"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Inicializamos las seed para funciones random. Al ser inicializadas al mismo número, el resultado no cambiará en cada ejecución."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "np.random.seed(13)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Ruta a los datos de simulación, donde también se guardan los resultados."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "base_path = \"../data\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Carga de datos"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Podemos elegir el número de escenas y los frames de cada una, dependiendo de la configuración de los simuladores clásicos."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(\"Cargamos {} escenas, con {} frames cada una.\".format(num_sims-1000, frames))\n",
+    "print(\"Trabajamos con un total de {} frames.\".format((num_sims-1000) * frames))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cargamos los datos desde los ficheros .uni en arrays de numpy. Los .uni son ficheros propios de MantaFlow, en los que se guarda los resultados de los simuladores clásicos. En este caso cargamos los datos de densidad de humo simulados previamente."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "densities = []\n",
+    "\n",
+    "for sim in range(1000, num_sims):\n",
+    "    \n",
+    "    if os.path.exists(\"%s/simSimple_%04d\" % (base_path, sim)):  # Comprueba la existencia de las carpetas (cada una 100 frames de datos).\n",
+    "        \n",
+    "        for i in range(0, frames):\n",
+    "            \n",
+    "            filename = \"%s/simSimple_%04d/density_%04d.uni\"  # Nombre de cada frame (densidad).\n",
+    "            uni_path = filename % (base_path, sim, i)  # 200 frames por sim, rellena parametros de la ruta.\n",
+    "            header, content = uniio.readUni(uni_path)  # Devuelve un array Numpy [Z, Y, X, C].\n",
+    "            \n",
+    "            h = header[\"dimX\"]\n",
+    "            w = header[\"dimY\"]\n",
+    "            \n",
+    "            arr = content[:, ::-1, :, :]  # Cambia el orden de Y.\n",
+    "            arr = np.reshape(arr, [w, h, 1])  # Deshecha Z.\n",
+    "            \n",
+    "            densities.append(arr)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve los datos de cada frame (canal de grises, 0 a 255) en una lista de Python. En este caso las imagenes son de 64x64 pixels. (64, 64, 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Necesitamos al menos 2 simulaciones para trabajar de manera adecuada."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "load_num = len(densities)\n",
+    "\n",
+    "if load_num < 2 * frames:\n",
+    "    \n",
+    "    print(\"Error - Usa al menos dos simulaciones completas\")\n",
+    "    \n",
+    "    exit(True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos la lista \"densities\" en un array de Numpy."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "densities = np.reshape(densities, (len(densities), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del array: {}\".format(densities.shape))\n",
+    "print(\"Dimensiones del array: {}\".format(densities.ndim))\n",
+    "print(\"Número de pixels en total: {}\".format(densities.size))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creación del set de validación"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Con el fin de entrenar correctamente a los modelos Deep Learning, separamos los datos de densidad en un set de entrenamiento y otro de validación. Creamos el set de validación de entre los datos de simulación generados, al menos una simulación completa o el 10% de los datos (el que sea mayor de los dos)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "vali_set_size = max(200, int(load_num * 0.1))  # Al menos una simu completa o el 10% de los datos.\n",
+    "\n",
+    "vali_data = densities[load_num - vali_set_size : load_num, :]  # \"load_num\" datos del final de \"densities\".\n",
+    "train_data = densities[0 : load_num - vali_set_size, :]  # El resto de datos del principio de \"densities\".\n",
+    "\n",
+    "print(\"Separamos en {} frames de entrenamiento y {} frames de validación.\".format(train_data.shape[0], vali_data.shape[0]))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos los datos de entrenamiento y validación en arrays."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "train_data = np.reshape(train_data, (len(train_data), 64, 64, 1))\n",
+    "vali_data = np.reshape(vali_data, (len(vali_data), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del set de entrenamiento: {}\".format(train_data.shape))\n",
+    "print(\"Forma del set de validación: {}\".format(vali_data.shape))"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.6.8"
+  },
+  "toc": {
+   "base_numbering": 1,
+   "nav_menu": {},
+   "number_sections": true,
+   "sideBar": true,
+   "skip_h1_title": false,
+   "title_cell": "Table of Contents",
+   "title_sidebar": "Contents",
+   "toc_cell": false,
+   "toc_position": {},
+   "toc_section_display": true,
+   "toc_window_display": false
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/Scripts/Notebooks/Entrenamiento Modelos Alt.ipynb b/Scripts/Notebooks/Entrenamiento Modelos Alt.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..e7b1006155a4ff7cbbdd74dc8d89f59d697cd1e9
--- /dev/null
+++ b/Scripts/Notebooks/Entrenamiento Modelos Alt.ipynb	
@@ -0,0 +1,2622 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "import sys\n",
+    "import tensorflow as tf\n",
+    "import numpy as np\n",
+    "import scipy.misc\n",
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "sys.path.append(\"../tools\")  # Herramientas propias de MantaFlow\n",
+    "import uniio  # Lectura de ficheros .uni"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Hiperparámetros"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "num_sims = 2000  # num_sims - 1000 escenas. \n",
+    "frames = 200  # Frames por escena.\n",
+    "\n",
+    "epochs_autoencoder = 5\n",
+    "epochs_lstm = 5\n",
+    "epochs_pretraining = 1\n",
+    "\n",
+    "batch_size_autoencoder = 128\n",
+    "batch_size_lstm = 32\n",
+    "\n",
+    "time_steps_lstm = 6\n",
+    "out_time_steps_lstm = 1\n",
+    "\n",
+    "save_autoencoder = True\n",
+    "save_lstm = True"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Datos iniciales"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Inicializamos las seed para funciones random. Al ser inicializadas al mismo número, el resultado no cambiará en cada ejecución.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "np.random.seed(13)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Ruta a los datos de simulación, donde también se guardan los resultados.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "base_path = \"../data\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Carga de datos"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Podemos elegir el número de escenas y los frames de cada una, dependiendo de la configuración de los simuladores clásicos."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Cargamos 1000 escenas, con 200 frames cada una.\n",
+      "Trabajamos con un total de 400000 frames.\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(\"Cargamos {} escenas, con {} frames cada una.\".format(num_sims-1000, frames))\n",
+    "print(\"Trabajamos con un total de {} frames.\".format(num_sims * frames))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cargamos los datos desde los ficheros .uni en arrays de numpy. Los .uni son ficheros propios de MantaFlow, en los que se guarda los resultados de los simuladores clásicos. En este caso cargamos los datos de densidad de humo simulados previamente."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "densities = []\n",
+    "\n",
+    "for sim in range(1000, num_sims):\n",
+    "    \n",
+    "    if os.path.exists(\"%s/simSimple_%04d\" % (base_path, sim)):  # Comprueba la existencia de las carpetas (cada una 100 frames de datos).\n",
+    "        \n",
+    "        for i in range(0, frames):\n",
+    "            \n",
+    "            filename = \"%s/simSimple_%04d/density_%04d.uni\"  # Nombre de cada frame (densidad).\n",
+    "            uni_path = filename % (base_path, sim, i)  # 200 frames por sim, rellena parametros de la ruta.\n",
+    "            header, content = uniio.readUni(uni_path)  # Devuelve un array Numpy [Z, Y, X, C].\n",
+    "            \n",
+    "            h = header[\"dimX\"]\n",
+    "            w = header[\"dimY\"]\n",
+    "            \n",
+    "            arr = content[:, ::-1, :, :]  # Cambia el orden de Y.\n",
+    "            arr = np.reshape(arr, [w, h, 1])  # Deshecha Z.\n",
+    "            \n",
+    "            densities.append(arr)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve los datos de cada frame (canal de grises, 0 a 255) en una lista de Python. En este caso las imagenes son de 64x64 pixels. (64, 64, 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Necesitamos al menos 2 simulaciones para trabajar de manera adecuada."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "load_num = len(densities)\n",
+    "\n",
+    "if load_num < 2 * frames:\n",
+    "    \n",
+    "    print(\"Error - Usa al menos dos simulaciones completas\")\n",
+    "    \n",
+    "    exit(True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos la lista \"densities\" en un array de Numpy.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del array: (200000, 64, 64, 1)\n",
+      "Dimensiones del array: 4\n",
+      "Número de pixels en total: 819200000\n"
+     ]
+    }
+   ],
+   "source": [
+    "densities = np.reshape(densities, (len(densities), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del array: {}\".format(densities.shape))\n",
+    "print(\"Dimensiones del array: {}\".format(densities.ndim))\n",
+    "print(\"Número de pixels en total: {}\".format(densities.size))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creación del set de validación"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Con el fin de entrenar correctamente a los modelos Deep Learning, separamos los datos de densidad en un set de entrenamiento y otro de validación. Creamos el set de validación de entre los datos de simulación generados, al menos una simulación completa o el 10% de los datos (el que sea mayor de los dos)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Separamos en 180000 frames de entrenamiento y 20000 frames de validación.\n"
+     ]
+    }
+   ],
+   "source": [
+    "vali_set_size = max(200, int(load_num * 0.1))  # Al menos una simu completa o el 10% de los datos.\n",
+    "\n",
+    "vali_data = densities[load_num - vali_set_size : load_num, :]  # \"load_num\" datos del final de \"densities\".\n",
+    "train_data = densities[0 : load_num - vali_set_size, :]  # El resto de datos del principio de \"densities\".\n",
+    "\n",
+    "print(\"Separamos en {} frames de entrenamiento y {} frames de validación.\".format(train_data.shape[0], vali_data.shape[0]))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos los datos de entrenamiento y validación en arrays."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del set de entrenamiento: (180000, 64, 64, 1)\n",
+      "Forma del set de validación: (20000, 64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "train_data = np.reshape(train_data, (len(train_data), 64, 64, 1))\n",
+    "vali_data = np.reshape(vali_data, (len(vali_data), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del set de entrenamiento: {}\".format(train_data.shape))\n",
+    "print(\"Forma del set de validación: {}\".format(vali_data.shape))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Autoencoder 2D"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El modelo que vamos a utilizar es un autoencoder completamente convolucional. Las típicas capas de MaxPooling y UpSampling no aparecen en nuestro modelo, y en su lugar cambiamos las dimensiones mediante un Stride de 2.  "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creacion de las capas del modelo"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Parametros de inicialización"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Regula la cantidad de filtros convolucionales:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "feature_multiplier = 8 "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Tamaño del kernel de la primera capa del encoder y la última del decoder (kernels exteriores):"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "surface_kernel_size = 4  # Matriz 4x4"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Tamaño de los kernels interiores:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "kernel_size = 2  # Matriz 2x2"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El valor de la capa Dropout:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "dropout = 0.0"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "La función que utilizamos para inicializar los parametros de las capas:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "init_func = \"glorot_normal\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "En la primera capa debemos definir las dimensiones del input esperado:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "input_shape = (train_data.shape[1], \n",
+    "               train_data.shape[2], \n",
+    "               train_data.shape[3])\n",
+    "\n",
+    "print(input_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "Using TensorFlow backend.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from keras.layers import Input, Dropout, Conv2D, Conv2DTranspose, BatchNormalization, Flatten, Activation, Reshape\n",
+    "from keras.layers.advanced_activations import LeakyReLU"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Capas del Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 156,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "layer_conv = []"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 94,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 1 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv1_input_shape = input_shape\n",
+    "\n",
+    "conv1_input = Input(shape = conv1_input_shape)\n",
+    "\n",
+    "x = conv1_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 1,\n",
+    "            padding = \"same\",\n",
+    "            kernel_initializer = init_func)(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func,\n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 2 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv1_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 157,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_47\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_21 (InputLayer)        (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_64 (Conv2D)           (None, 64, 64, 8)         136       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_131 (LeakyReLU)  (None, 64, 64, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_131 (Bat (None, 64, 64, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_65 (Conv2D)           (None, 64, 64, 8)         1032      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_132 (LeakyReLU)  (None, 64, 64, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_132 (Bat (None, 64, 64, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_66 (Conv2D)           (None, 32, 32, 8)         1032      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_133 (LeakyReLU)  (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_133 (Bat (None, 32, 32, 8)         32        \n",
+      "=================================================================\n",
+      "Total params: 2,296\n",
+      "Trainable params: 2,248\n",
+      "Non-trainable params: 48\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_1 = Model(conv1_input, conv1_output)\n",
+    "layer_conv.append(convolution_1)\n",
+    "convolution_1.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 96,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(32, 32, 8)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv1_output_shape = (convolution_1.output_shape[1],\n",
+    "                      convolution_1.output_shape[2],\n",
+    "                      convolution_1.output_shape[3])\n",
+    "\n",
+    "print(conv1_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 2"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 101,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 2 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv2_input_shape = conv1_output_shape\n",
+    "\n",
+    "conv2_input = Input(shape = conv2_input_shape)\n",
+    "\n",
+    "x = conv2_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 2, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 2, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv2_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 158,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_48\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_24 (InputLayer)        (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_71 (Conv2D)           (None, 32, 32, 16)        528       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_138 (LeakyReLU)  (None, 32, 32, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_138 (Bat (None, 32, 32, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "conv2d_72 (Conv2D)           (None, 16, 16, 16)        1040      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_139 (LeakyReLU)  (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_139 (Bat (None, 16, 16, 16)        64        \n",
+      "=================================================================\n",
+      "Total params: 1,696\n",
+      "Trainable params: 1,632\n",
+      "Non-trainable params: 64\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_2 = Model(conv2_input, conv2_output)\n",
+    "layer_conv.append(convolution_2)\n",
+    "convolution_2.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 103,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(16, 16, 16)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv2_output_shape = (convolution_2.output_shape[1],\n",
+    "                      convolution_2.output_shape[2],\n",
+    "                      convolution_2.output_shape[3])\n",
+    "\n",
+    "print(conv2_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 3"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 104,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 3 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv3_input_shape = conv2_output_shape\n",
+    "\n",
+    "conv3_input = Input(shape = conv3_input_shape)\n",
+    "\n",
+    "x = conv3_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 4, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 4, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv3_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 159,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_49\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_25 (InputLayer)        (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_73 (Conv2D)           (None, 16, 16, 32)        2080      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_140 (LeakyReLU)  (None, 16, 16, 32)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_140 (Bat (None, 16, 16, 32)        128       \n",
+      "_________________________________________________________________\n",
+      "conv2d_74 (Conv2D)           (None, 8, 8, 32)          4128      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_141 (LeakyReLU)  (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_141 (Bat (None, 8, 8, 32)          128       \n",
+      "=================================================================\n",
+      "Total params: 6,464\n",
+      "Trainable params: 6,336\n",
+      "Non-trainable params: 128\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_3 = Model(conv3_input, conv3_output)\n",
+    "layer_conv.append(convolution_3)\n",
+    "convolution_3.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 106,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(8, 8, 32)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv3_output_shape = (convolution_3.output_shape[1],\n",
+    "                      convolution_3.output_shape[2],\n",
+    "                      convolution_3.output_shape[3])\n",
+    "\n",
+    "print(conv3_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 4"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 107,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 4 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv4_input_shape = conv3_output_shape\n",
+    "\n",
+    "conv4_input = Input(shape = conv4_input_shape)\n",
+    "\n",
+    "x = conv4_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 8, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 8, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv4_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 160,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_50\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_26 (InputLayer)        (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_75 (Conv2D)           (None, 8, 8, 64)          8256      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_142 (LeakyReLU)  (None, 8, 8, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_142 (Bat (None, 8, 8, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "conv2d_76 (Conv2D)           (None, 4, 4, 64)          16448     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_143 (LeakyReLU)  (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_143 (Bat (None, 4, 4, 64)          256       \n",
+      "=================================================================\n",
+      "Total params: 25,216\n",
+      "Trainable params: 24,960\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_4 = Model(conv4_input, conv4_output)\n",
+    "layer_conv.append(convolution_4)\n",
+    "convolution_4.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 90,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(4, 4, 64)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv4_output_shape = (convolution_4.output_shape[1],\n",
+    "                      convolution_4.output_shape[2],\n",
+    "                      convolution_4.output_shape[3])\n",
+    "\n",
+    "print(conv4_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 5"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 113,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 5 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv5_input_shape = conv4_output_shape\n",
+    "\n",
+    "conv5_input = Input(shape = conv5_input_shape)\n",
+    "\n",
+    "x = conv5_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 16, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv5_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 161,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_51\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_28 (InputLayer)        (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_77 (Conv2D)           (None, 2, 2, 128)         32896     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_145 (LeakyReLU)  (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_145 (Bat (None, 2, 2, 128)         512       \n",
+      "=================================================================\n",
+      "Total params: 33,408\n",
+      "Trainable params: 33,152\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_5 = Model(conv5_input, conv5_output)\n",
+    "layer_conv.append(convolution_5)\n",
+    "convolution_5.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 133,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(2, 2, 128)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv5_output_shape = (convolution_5.output_shape[1],\n",
+    "                      convolution_5.output_shape[2],\n",
+    "                      convolution_5.output_shape[3])\n",
+    "\n",
+    "print(conv5_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 6"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 134,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 6 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv6_input_shape = conv5_output_shape\n",
+    "\n",
+    "conv6_input = Input(shape = conv6_input_shape)\n",
+    "\n",
+    "x = conv6_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 32, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv6_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 162,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_52\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_36 (InputLayer)        (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_79 (Conv2D)           (None, 1, 1, 256)         131328    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_153 (LeakyReLU)  (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_153 (Bat (None, 1, 1, 256)         1024      \n",
+      "=================================================================\n",
+      "Total params: 132,352\n",
+      "Trainable params: 131,840\n",
+      "Non-trainable params: 512\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_6 = Model(conv6_input, conv6_output)\n",
+    "layer_conv.append(convolution_6)\n",
+    "convolution_6.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 136,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(1, 1, 256)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv6_output_shape = (convolution_6.output_shape[1],\n",
+    "                      convolution_6.output_shape[2],\n",
+    "                      convolution_6.output_shape[3])\n",
+    "\n",
+    "print(conv6_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Capas del Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 163,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "layer_deconv = []"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 164,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 6 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv6_input_shape = conv6_output_shape\n",
+    "\n",
+    "deconv6_input = Input(shape = deconv6_input_shape)\n",
+    "\n",
+    "x = deconv6_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 16, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv6_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 165,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_53\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_46 (InputLayer)        (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_102 (Conv2D (None, 2, 2, 128)         131200    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_166 (LeakyReLU)  (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_166 (Bat (None, 2, 2, 128)         512       \n",
+      "=================================================================\n",
+      "Total params: 131,712\n",
+      "Trainable params: 131,456\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_6 = Model(deconv6_input, deconv6_output)\n",
+    "layer_deconv.append(deconvolution_6)\n",
+    "deconvolution_6.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 166,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 5 ###\n",
+    "\n",
+    "# Input # \n",
+    "\n",
+    "deconv5_input_shape = conv5_output_shape\n",
+    "\n",
+    "deconv5_input = Input(shape = deconv5_input_shape)\n",
+    "\n",
+    "x = deconv5_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 8, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 8, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 1,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv5_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 167,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_54\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_47 (InputLayer)        (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_103 (Conv2D (None, 4, 4, 64)          32832     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_167 (LeakyReLU)  (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_167 (Bat (None, 4, 4, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_104 (Conv2D (None, 4, 4, 64)          16448     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_168 (LeakyReLU)  (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_168 (Bat (None, 4, 4, 64)          256       \n",
+      "=================================================================\n",
+      "Total params: 49,792\n",
+      "Trainable params: 49,536\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_5 = Model(deconv5_input, deconv5_output)\n",
+    "layer_deconv.append(deconvolution_5)\n",
+    "deconvolution_5.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 168,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 4 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv4_input_shape = conv4_output_shape\n",
+    "\n",
+    "deconv4_input = Input(shape = deconv4_input_shape)\n",
+    "\n",
+    "x = deconv4_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 4, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 4, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 1,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0  else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv4_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 169,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_55\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_48 (InputLayer)        (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_105 (Conv2D (None, 8, 8, 32)          8224      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_169 (LeakyReLU)  (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_169 (Bat (None, 8, 8, 32)          128       \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_106 (Conv2D (None, 8, 8, 32)          4128      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_170 (LeakyReLU)  (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_170 (Bat (None, 8, 8, 32)          128       \n",
+      "=================================================================\n",
+      "Total params: 12,608\n",
+      "Trainable params: 12,480\n",
+      "Non-trainable params: 128\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_4 = Model(deconv4_input, deconv4_output)\n",
+    "layer_deconv.append(deconvolution_4)\n",
+    "deconvolution_4.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 170,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 3 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv3_input_shape = conv3_output_shape\n",
+    "\n",
+    "deconv3_input = Input(shape = deconv3_input_shape)\n",
+    "\n",
+    "x = deconv3_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 2, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 2, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 1,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv3_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 171,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_56\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_49 (InputLayer)        (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_107 (Conv2D (None, 16, 16, 16)        2064      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_171 (LeakyReLU)  (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_171 (Bat (None, 16, 16, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_108 (Conv2D (None, 16, 16, 16)        1040      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_172 (LeakyReLU)  (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_172 (Bat (None, 16, 16, 16)        64        \n",
+      "=================================================================\n",
+      "Total params: 3,232\n",
+      "Trainable params: 3,168\n",
+      "Non-trainable params: 64\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_3 = Model(deconv3_input, deconv3_output)\n",
+    "layer_deconv.append(deconvolution_3)\n",
+    "deconvolution_3.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 172,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 2 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv2_input_shape = conv2_output_shape\n",
+    "\n",
+    "deconv2_input = Input(shape = deconv2_input_shape)\n",
+    "\n",
+    "x = deconv2_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 1, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x) \n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 1, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 1,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output # \n",
+    "\n",
+    "deconv2_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 173,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_57\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_50 (InputLayer)        (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_109 (Conv2D (None, 32, 32, 8)         520       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_173 (LeakyReLU)  (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_173 (Bat (None, 32, 32, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_110 (Conv2D (None, 32, 32, 8)         264       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_174 (LeakyReLU)  (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_174 (Bat (None, 32, 32, 8)         32        \n",
+      "=================================================================\n",
+      "Total params: 848\n",
+      "Trainable params: 816\n",
+      "Non-trainable params: 32\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_2 = Model(deconv2_input, deconv2_output)\n",
+    "layer_deconv.append(deconvolution_2)\n",
+    "deconvolution_2.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 174,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 1 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv1_input_shape = conv1_output_shape\n",
+    "\n",
+    "deconv1_input = Input(shape = deconv1_input_shape)\n",
+    "\n",
+    "x = deconv1_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(input_shape[-1],\n",
+    "                    kernel_size = surface_kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    padding = \"same\",\n",
+    "                    kernel_initializer = init_func)(x)\n",
+    "\n",
+    "x = Activation(\"linear\")(x)\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv1_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 175,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_58\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_51 (InputLayer)        (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_111 (Conv2D (None, 64, 64, 1)         129       \n",
+      "_________________________________________________________________\n",
+      "activation_12 (Activation)   (None, 64, 64, 1)         0         \n",
+      "=================================================================\n",
+      "Total params: 129\n",
+      "Trainable params: 129\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_1 = Model(deconv1_input, deconv1_output)\n",
+    "layer_deconv.append(deconvolution_1)\n",
+    "deconvolution_1.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Ensamblando el Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Optimizador"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Importamos el optimizador Adam:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 176,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import Adam"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos los parametros del optimizador:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 177,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "adam_learning_rate = 0.00015  # El learning rate de Adam (tamaño step)\n",
+    "adam_epsilon = 1e-8  # Previene problemas de división por 0.\n",
+    "adam_lr_decay = 1e-05  # Learning rate decay"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos el optimizador:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 178,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "optimizer = Adam(lr = adam_learning_rate, \n",
+    "                 epsilon = adam_epsilon, \n",
+    "                 decay = adam_lr_decay)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Modelo por capas"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 213,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.models import Model"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 214,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "reverse_layer_deconv = list(reversed(layer_deconv))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 215,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_58\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_51 (InputLayer)        (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_111 (Conv2D (None, 64, 64, 1)         129       \n",
+      "_________________________________________________________________\n",
+      "activation_12 (Activation)   (None, 64, 64, 1)         0         \n",
+      "=================================================================\n",
+      "Total params: 129\n",
+      "Trainable params: 129\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "reverse_layer_deconv[0].summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 212,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_53\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_46 (InputLayer)        (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_102 (Conv2D (None, 2, 2, 128)         131200    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_166 (LeakyReLU)  (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_166 (Bat (None, 2, 2, 128)         512       \n",
+      "=================================================================\n",
+      "Total params: 131,712\n",
+      "Trainable params: 131,456\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "layer_deconv[0].summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 216,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stages = []\n",
+    "\n",
+    "for i in range(len(layer_conv)):\n",
+    "    \n",
+    "    if i == 0:\n",
+    "        \n",
+    "        stage_input = Input(shape = input_shape)\n",
+    "        x = stage_input\n",
+    "        x = layer_conv[i](x)\n",
+    "        x = reverse_layer_deconv[i](x)\n",
+    "        stage_output = x\n",
+    "        \n",
+    "        stages.append(Model(inputs = stage_input, outputs = stage_output))\n",
+    "        stages[i].compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "    \n",
+    "    else:\n",
+    "        \n",
+    "        stage_input = Input(shape = input_shape)\n",
+    "        x = stage_input\n",
+    "        for j in range(i):\n",
+    "            x = layer_conv[j](x) \n",
+    "        x = layer_conv[i](x)\n",
+    "        for j in range(i):\n",
+    "            x = reverse_layer_deconv[i-j](x) \n",
+    "        x = layer_deconv[i](x)\n",
+    "        stage_output = x\n",
+    "        \n",
+    "        stages.append(Model(inputs = stage_input, outputs = stage_output))   \n",
+    "        stages[i].compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 217,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_79\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_72 (InputLayer)        (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "model_47 (Model)             (None, 32, 32, 8)         2296      \n",
+      "_________________________________________________________________\n",
+      "model_58 (Model)             multiple                  129       \n",
+      "=================================================================\n",
+      "Total params: 2,425\n",
+      "Trainable params: 2,377\n",
+      "Non-trainable params: 48\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "stages[0].summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Pre-entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 218,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "pre_epochs = 1\n",
+    "pre_batch_size = 256"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "142592/180000 [======================>.......] - ETA: 1:21 - loss: 2.6148e-04 - mae: 0.0090"
+     ]
+    }
+   ],
+   "source": [
+    "for stage in stages:\n",
+    "    autoencoder_layer = stage.fit(train_data, train_data,\n",
+    "                                  epochs = pre_epochs,\n",
+    "                                  batch_size = pre_batch_size,\n",
+    "                                  validation_data = (vali_data, vali_data),\n",
+    "                                  shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Parametros del entrenamiento:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "training_epochs = epochs_autoencoder  # Número de vueltas completas al set de entrenamiento.\n",
+    "batch_size = batch_size_autoencoder  # Número de ejemplos antes de calcular el error de la función de coste."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Entrenamos el modelo autoencoder:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder_train = autoencoder_greedy.fit(train_data, train_data, \n",
+    "                                    epochs = training_epochs,\n",
+    "                                    batch_size = batch_size,\n",
+    "                                    verbose = 1,\n",
+    "                                    validation_data = (vali_data, vali_data),\n",
+    "                                    shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Los datos del entrenamiento se guardan en \"autoencoder_train\"."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Plot Errores"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Plot de Loss (MSE y MAE) y Validation Loss (MSE y MAE) respecto a las epochs."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plot_epochs = range(training_epochs)\n",
+    "plot_loss = autoencoder_train.history[\"loss\"]\n",
+    "plot_val_loss = autoencoder_train.history[\"val_loss\"]\n",
+    "plot_mae = autoencoder_train.history[\"mae\"]\n",
+    "plot_val_mae = autoencoder_train.history[\"val_mae\"]\n",
+    "\n",
+    "plt.figure(figsize = (15, 5))\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 1)\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 2)\n",
+    "plt.plot(plot_epochs, plot_mae, plot_val_mae)\n",
+    "\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar Modelo Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import h5py"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    autoencoder_greedy.save(\"autoencoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo del autoencoder con sus pesos / parametros."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Encoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Utilizamos las capas iniciales entrenadas por el modelo autoencoder para el modelo Encoder."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar Modelo Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    encoder_greedy.save(\"encoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo encoder con sus pesos."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Decoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar modelo Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    decoder_greedy.save(\"decoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo decoder con sus pesos."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# LSTM"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El output del modelo encoder sirve como input para la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Optimizador"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import RMSprop"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm_optimizer = RMSprop(lr = 0.000126, \n",
+    "                         rho = 0.9, \n",
+    "                         epsilon = 1e-08,\n",
+    "                         decay = 0.000334)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parametros LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "time_steps = time_steps_lstm\n",
+    "out_time_steps = out_time_steps_lstm\n",
+    "data_dimension = 256\n",
+    "\n",
+    "encoder_lstm_neurons = 256\n",
+    "decoder_lstm_neurons = 512\n",
+    "attention_neurons = 400\n",
+    "\n",
+    "activation = \"tanh\"\n",
+    "loss = \"mae\"\n",
+    "batch_size = batch_size_lstm\n",
+    "\n",
+    "dropout = 0.0132\n",
+    "recurrent_dropout = 0.385\n",
+    "use_bias = True\n",
+    "stateful = False"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Capas LSTM "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Utilizamos 3 capas LSTM. RepeatVector repite el input para la segunda capa de LSTM out_time_steps veces."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.layers import RepeatVector, LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "input_frames = Input(shape = (time_steps, data_dimension))\n",
+    "\n",
+    "l0 = LSTM(units = encoder_lstm_neurons,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = False,\n",
+    "          go_backwards = True, \n",
+    "          stateful = stateful)(input_frames)\n",
+    "\n",
+    "l1 = RepeatVector(out_time_steps)(l0)\n",
+    "\n",
+    "l2 = LSTM(units = decoder_lstm_neurons,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = True,\n",
+    "          go_backwards = False, \n",
+    "          stateful = stateful)(l1)\n",
+    "\n",
+    "l3 = LSTM(units = data_dimension,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = out_time_steps > 1,\n",
+    "          go_backwards = False, \n",
+    "          stateful = stateful)(l2)     \n",
+    "\n",
+    "output_frames = l3"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Modelo"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm = Model(inputs = input_frames, outputs = output_frames)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Compilación"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.losses import mean_absolute_error, mean_squared_error"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm.compile(loss = loss,\n",
+    "             optimizer = lstm_optimizer,\n",
+    "             metrics = ['mean_squared_error', 'mean_absolute_error'])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Preparación de datos para LSTM"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Pasamos el set de entrenamiento y validación por el encoder para lograr el input de la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "encoded_train = encoder_greedy.predict(train_data)\n",
+    "encoded_vali = encoder_greedy.predict(vali_data)\n",
+    "\n",
+    "print(encoded_train.shape)\n",
+    "print(encoded_vali.shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos unas cuantas funciones útiles a la hora de preparar el input de la red LSTM:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from math import floor"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    scene_count = len(encoded_data) // frames\n",
+    "    sample_count = frames\n",
+    "    scene_iteration_count = floor((sample_count + 1 - (time_steps + out_time_steps)) / batch_size)\n",
+    "    return scene_count, sample_count, scene_iteration_count"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cuenta cuantos batches entran en el set de entrenamiento:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_batch_samples(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    scene_count, sample_count, scene_iteration_count = generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames)\n",
+    "    batch_samples = scene_count * scene_iteration_count\n",
+    "    return batch_samples"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Función para aplicar el mismo Shuffle a varias arrays, manteniendo el orden:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def shuffle_in_unison(*np_arrays):\n",
+    "    rng = np.random.get_state()\n",
+    "    for array in np_arrays:\n",
+    "        np.random.set_state(rng)\n",
+    "        np.random.shuffle(array)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Reestructuramos los datos codificados."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve arrays con la forma (batch_size, time_steps, data_dimension) y (batch_size, out_time_steps, data_dimension)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def restructure_encoded_data(encoded_data, time_steps, out_time_steps, batch_size):\n",
+    "    \n",
+    "    content_shape = encoded_data[0].shape  # (256,)\n",
+    "    final_sample_count = encoded_data.shape[0] - time_steps - out_time_steps  # frames, frames - batch_size, frames - 2 * batch_size, ...\n",
+    "    final_sample_count = min(batch_size, final_sample_count)  # 8\n",
+    "        \n",
+    "    X_data = np.zeros((final_sample_count, time_steps) + content_shape)  # (8, 6, 256)\n",
+    "    y_data = np.zeros((final_sample_count, out_time_steps) + content_shape)  # (8, 1, 256)\n",
+    "        \n",
+    "    curTS = 0\n",
+    "            \n",
+    "    for z in range(time_steps, final_sample_count + time_steps):\n",
+    "        X_data[curTS] = np.array(encoded_data[curTS:z])\n",
+    "        y_data[curTS] = np.array(encoded_data[z:z+out_time_steps])\n",
+    "        curTS += 1\n",
+    "        \n",
+    "    return X_data, y_data"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Generador para entrenar a la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_scene(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    \n",
+    "    scene_count, sample_count, scene_iteration_count = generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames)\n",
+    "    \n",
+    "    while True:\n",
+    "\n",
+    "        for i in range(scene_count):\n",
+    "            \n",
+    "            scene = encoded_train[(i * frames):((i + 1) * frames)]  # Selecciona escenas individualmente.\n",
+    "     \n",
+    "            for j in range(scene_iteration_count):  # Número de batches que entran en una escena individual.\n",
+    "                start = j * batch_size\n",
+    "                end = sample_count\n",
+    "                \n",
+    "                data = scene[start:end]\n",
+    "                X, Y  = restructure_encoded_data(data, time_steps, out_time_steps, batch_size)\n",
+    "            \n",
+    "                X = X.reshape(*X.shape[0:2], -1)\n",
+    "                Y = np.squeeze(Y.reshape(Y.shape[0], out_time_steps, -1))\n",
+    "                \n",
+    "                shuffle_in_unison(X, Y)\n",
+    "        \n",
+    "                yield X, Y"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "train_gen_samples = generator_batch_samples(encoded_train, batch_size, time_steps, out_time_steps, frames)\n",
+    "print (\"Number of train batch samples per epoch: {}\".format(train_gen_samples))\n",
+    "train_generator = generator_scene(encoded_train, batch_size, time_steps, out_time_steps, frames)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "vali_gen_samples = generator_batch_samples(encoded_vali, batch_size, time_steps, out_time_steps, frames)\n",
+    "print (\"Number of validation batch samples per epoch: {}\".format(vali_gen_samples))\n",
+    "vali_generator = generator_scene(encoded_vali, batch_size, time_steps, out_time_steps, frames)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "training_epochs = epochs_lstm"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm_train = lstm.fit_generator(generator = train_generator,\n",
+    "                                    steps_per_epoch = train_gen_samples,\n",
+    "                                    epochs = training_epochs,\n",
+    "                                    verbose = 1,\n",
+    "                                    callbacks = None,\n",
+    "                                    validation_data = vali_generator,\n",
+    "                                    validation_steps = vali_gen_samples,\n",
+    "                                    class_weight = None,\n",
+    "                                    workers = 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Plot Errores"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plot_epochs = range(training_epochs)\n",
+    "plot_loss = lstm_train.history[\"loss\"]\n",
+    "plot_val_loss = lstm_train.history[\"val_loss\"]\n",
+    "\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Guardar Modelo LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_lstm:\n",
+    "    lstm.save(\"lstm_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.6.8"
+  },
+  "toc": {
+   "base_numbering": 1,
+   "nav_menu": {},
+   "number_sections": true,
+   "sideBar": true,
+   "skip_h1_title": false,
+   "title_cell": "Table of Contents",
+   "title_sidebar": "Contents",
+   "toc_cell": false,
+   "toc_position": {},
+   "toc_section_display": true,
+   "toc_window_display": false
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/Scripts/Notebooks/Entrenamiento Modelos Alternativos.ipynb b/Scripts/Notebooks/Entrenamiento Modelos Alternativos.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..bce516d4f9d22776161809f3ebd076187bc31c33
--- /dev/null
+++ b/Scripts/Notebooks/Entrenamiento Modelos Alternativos.ipynb	
@@ -0,0 +1,2515 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "import sys\n",
+    "import tensorflow as tf\n",
+    "import numpy as np\n",
+    "import scipy.misc\n",
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "sys.path.append(\"../tools\")  # Herramientas propias de MantaFlow\n",
+    "import uniio  # Lectura de ficheros .uni"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Hiperparámetros"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "num_sims = 2000  # num_sims - 1000 escenas. \n",
+    "frames = 200  # Frames por escena.\n",
+    "\n",
+    "ae_epochs = 10\n",
+    "lstm_epochs = 10\n",
+    "pretrain_epochs = 1\n",
+    "\n",
+    "ae_batch_size = 512\n",
+    "lstm_batch_size = 512\n",
+    "\n",
+    "lstm_time_steps = 6\n",
+    "lstm_out_time_steps = 1\n",
+    "\n",
+    "save_autoencoder = True\n",
+    "save_lstm = True\n",
+    "\n",
+    "first_train_run = True"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Datos iniciales"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Inicializamos las seed para funciones random. Al ser inicializadas al mismo número, el resultado no cambiará en cada ejecución."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "np.random.seed(13)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Ruta a los datos de simulación, donde también se guardan los resultados."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "base_path = \"../data\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Carga de datos"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Podemos elegir el número de escenas y los frames de cada una, dependiendo de la configuración de los simuladores clásicos."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Cargamos 1000 escenas, con 200 frames cada una.\n",
+      "Trabajamos con un total de 200000 frames.\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(\"Cargamos {} escenas, con {} frames cada una.\".format(num_sims-1000, frames))\n",
+    "print(\"Trabajamos con un total de {} frames.\".format((num_sims-1000) * frames))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cargamos los datos desde los ficheros .uni en arrays de numpy. Los .uni son ficheros propios de MantaFlow, en los que se guarda los resultados de los simuladores clásicos. En este caso cargamos los datos de densidad de humo simulados previamente."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "densities = []\n",
+    "\n",
+    "for sim in range(1000, num_sims):\n",
+    "    \n",
+    "    if os.path.exists(\"%s/simSimple_%04d\" % (base_path, sim)):  # Comprueba la existencia de las carpetas (cada una 100 frames de datos).\n",
+    "        \n",
+    "        for i in range(0, frames):\n",
+    "            \n",
+    "            filename = \"%s/simSimple_%04d/density_%04d.uni\"  # Nombre de cada frame (densidad).\n",
+    "            uni_path = filename % (base_path, sim, i)  # 200 frames por sim, rellena parametros de la ruta.\n",
+    "            header, content = uniio.readUni(uni_path)  # Devuelve un array Numpy [Z, Y, X, C].\n",
+    "            \n",
+    "            h = header[\"dimX\"]\n",
+    "            w = header[\"dimY\"]\n",
+    "            \n",
+    "            arr = content[:, ::-1, :, :]  # Cambia el orden de Y.\n",
+    "            arr = np.reshape(arr, [w, h, 1])  # Deshecha Z.\n",
+    "            \n",
+    "            densities.append(arr)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve los datos de cada frame (canal de grises, 0 a 255) en una lista de Python. En este caso las imagenes son de 64x64 pixels. (64, 64, 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Necesitamos al menos 2 simulaciones para trabajar de manera adecuada."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "load_num = len(densities)\n",
+    "\n",
+    "if load_num < 2 * frames:\n",
+    "    \n",
+    "    print(\"Error - Usa al menos dos simulaciones completas\")\n",
+    "    \n",
+    "    exit(True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos la lista \"densities\" en un array de Numpy."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del array: (200000, 64, 64, 1)\n",
+      "Dimensiones del array: 4\n",
+      "Número de pixels en total: 819200000\n"
+     ]
+    }
+   ],
+   "source": [
+    "densities = np.reshape(densities, (len(densities), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del array: {}\".format(densities.shape))\n",
+    "print(\"Dimensiones del array: {}\".format(densities.ndim))\n",
+    "print(\"Número de pixels en total: {}\".format(densities.size))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creación del set de validación"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Con el fin de entrenar correctamente a los modelos Deep Learning, separamos los datos de densidad en un set de entrenamiento y otro de validación. Creamos el set de validación de entre los datos de simulación generados, al menos una simulación completa o el 10% de los datos (el que sea mayor de los dos)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Separamos en 180000 frames de entrenamiento y 20000 frames de validación.\n"
+     ]
+    }
+   ],
+   "source": [
+    "vali_set_size = max(200, int(load_num * 0.1))  # Al menos una simu completa o el 10% de los datos.\n",
+    "\n",
+    "vali_data = densities[load_num - vali_set_size : load_num, :]  # \"load_num\" datos del final de \"densities\".\n",
+    "train_data = densities[0 : load_num - vali_set_size, :]  # El resto de datos del principio de \"densities\".\n",
+    "\n",
+    "print(\"Separamos en {} frames de entrenamiento y {} frames de validación.\".format(train_data.shape[0], vali_data.shape[0]))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos los datos de entrenamiento y validación en arrays."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del set de entrenamiento: (180000, 64, 64, 1)\n",
+      "Forma del set de validación: (20000, 64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "train_data = np.reshape(train_data, (len(train_data), 64, 64, 1))\n",
+    "vali_data = np.reshape(vali_data, (len(vali_data), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del set de entrenamiento: {}\".format(train_data.shape))\n",
+    "print(\"Forma del set de validación: {}\".format(vali_data.shape))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Modelos Alternativos Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Analizamos varias formas de componer un autoencoder, desde la más simple hasta otras formas alternativas."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parametros"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "Using TensorFlow backend.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from keras.layers import Dense, LSTM, Conv2D, MaxPooling2D, UpSampling2D \n",
+    "from keras.layers import Input, Flatten, Reshape, Activation, BatchNormalization, Dropout\n",
+    "from keras.models import Model, load_model\n",
+    "from keras.callbacks import ModelCheckpoint"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "input_shape = (train_data.shape[1], \n",
+    "               train_data.shape[2], \n",
+    "               train_data.shape[3])\n",
+    "\n",
+    "print(input_shape)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 54,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Total pixels in a single frame: 4096\n",
+      "Size of encoded space: 256\n"
+     ]
+    }
+   ],
+   "source": [
+    "image_dim = input_shape[0] * input_shape[1]\n",
+    "print(\"Total pixels in a single frame: {}\".format(image_dim))\n",
+    "encoding_dim = 256\n",
+    "print(\"Size of encoded space: {}\".format(encoding_dim))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Autoencoder Deep 1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 55,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "batch_normalization = False\n",
+    "dropout = 0.0"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Encoder Deep 1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 57,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_14\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_15 (InputLayer)        (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "flatten_7 (Flatten)          (None, 4096)              0         \n",
+      "_________________________________________________________________\n",
+      "dense_22 (Dense)             (None, 256)               1048832   \n",
+      "_________________________________________________________________\n",
+      "activation_22 (Activation)   (None, 256)               0         \n",
+      "=================================================================\n",
+      "Total params: 1,048,832\n",
+      "Trainable params: 1,048,832\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "encoder_input = Input(shape = input_shape)\n",
+    "\n",
+    "x = encoder_input\n",
+    "\n",
+    "x = Flatten()(x)\n",
+    "\n",
+    "x = Dense(units = encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = BatchNormalization()(x) if batch_normalization else x\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "encoder_output = x\n",
+    "\n",
+    "encoder_deep1 = Model(inputs = encoder_input, outputs = encoder_output)\n",
+    "encoder_deep1.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Decoder Deep 1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 58,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_15\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_16 (InputLayer)        (None, 256)               0         \n",
+      "_________________________________________________________________\n",
+      "dense_23 (Dense)             (None, 4096)              1052672   \n",
+      "_________________________________________________________________\n",
+      "activation_23 (Activation)   (None, 4096)              0         \n",
+      "_________________________________________________________________\n",
+      "reshape_5 (Reshape)          (None, 64, 64, 1)         0         \n",
+      "=================================================================\n",
+      "Total params: 1,052,672\n",
+      "Trainable params: 1,052,672\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "decoder_input = Input(shape = (encoder_deep1.output_shape[1],))\n",
+    "\n",
+    "x = decoder_input\n",
+    "\n",
+    "x = Dense(units = image_dim)(x)\n",
+    "x = Activation(activation = \"sigmoid\")(x)\n",
+    "x = BatchNormalization()(x) if batch_normalization else x\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "x = Reshape(target_shape = input_shape)(x)\n",
+    "\n",
+    "decoder_output = x\n",
+    "\n",
+    "decoder_deep1 = Model(inputs = decoder_input, outputs = decoder_output)\n",
+    "decoder_deep1.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Optimizador Deep 1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import Adam"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "adam_learning_rate = 0.00015  # El learning rate de Adam (tamaño step)\n",
+    "adam_epsilon = 1e-8  # Previene problemas de división por 0.\n",
+    "adam_lr_decay = 1e-05  # Learning rate decay"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ae_optimizer = Adam(lr = adam_learning_rate, \n",
+    "                    epsilon = adam_epsilon, \n",
+    "                    decay = adam_lr_decay)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Autoencoder Deep 1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 61,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_18\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_19 (InputLayer)        (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "model_14 (Model)             (None, 256)               1048832   \n",
+      "_________________________________________________________________\n",
+      "model_15 (Model)             (None, 64, 64, 1)         1052672   \n",
+      "=================================================================\n",
+      "Total params: 2,101,504\n",
+      "Trainable params: 2,101,504\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder_input = Input(shape = input_shape)\n",
+    "\n",
+    "x = autoencoder_input\n",
+    "\n",
+    "x = encoder_deep1(x)\n",
+    "x = decoder_deep1(x)\n",
+    "\n",
+    "autoencoder_output = x\n",
+    "\n",
+    "autoencoder_deep1 = Model(inputs = autoencoder_input, outputs = autoencoder_output)\n",
+    "autoencoder_deep1.compile(optimizer = ae_optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "autoencoder_deep1.summary()\n",
+    "\n",
+    "autoencoder_clean_weights = autoencoder_deep1.get_weights()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 77,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ae_epochs_list = 25\n",
+    "ae_batch_multiple = True\n",
+    "ae_batch_list = [1024, 512, 256, 128, 64, 32, 16, 8, 4]  # Distintas batch size para comparación\n",
+    "\n",
+    "ae_epochs = 5\n",
+    "ae_batch_size = 512"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 78,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "mc = ModelCheckpoint(filepath = \"Modelos/model_autoencoder_deep1.h5\", \n",
+    "                     monitor = \"val_loss\", \n",
+    "                     mode = \"min\", \n",
+    "                     save_best_only = True,\n",
+    "                     verbose = 1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 79,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/25\n",
+      "180000/180000 [==============================] - 16s 90us/step - loss: 0.0242 - mae: 0.0712 - val_loss: 0.0050 - val_mae: 0.0255\n",
+      "\n",
+      "Epoch 00001: val_loss improved from inf to 0.00496, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 2/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0049 - mae: 0.0254 - val_loss: 0.0049 - val_mae: 0.0248\n",
+      "\n",
+      "Epoch 00002: val_loss improved from 0.00496 to 0.00493, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 3/25\n",
+      "180000/180000 [==============================] - 16s 88us/step - loss: 0.0048 - mae: 0.0247 - val_loss: 0.0049 - val_mae: 0.0244\n",
+      "\n",
+      "Epoch 00003: val_loss improved from 0.00493 to 0.00491, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 4/25\n",
+      "180000/180000 [==============================] - 16s 88us/step - loss: 0.0048 - mae: 0.0243 - val_loss: 0.0049 - val_mae: 0.0241\n",
+      "\n",
+      "Epoch 00004: val_loss improved from 0.00491 to 0.00490, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 5/25\n",
+      "180000/180000 [==============================] - 16s 88us/step - loss: 0.0048 - mae: 0.0240 - val_loss: 0.0049 - val_mae: 0.0238\n",
+      "\n",
+      "Epoch 00005: val_loss improved from 0.00490 to 0.00489, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 6/25\n",
+      "180000/180000 [==============================] - 16s 88us/step - loss: 0.0048 - mae: 0.0238 - val_loss: 0.0049 - val_mae: 0.0237\n",
+      "\n",
+      "Epoch 00006: val_loss improved from 0.00489 to 0.00488, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 7/25\n",
+      "180000/180000 [==============================] - 16s 88us/step - loss: 0.0048 - mae: 0.0236 - val_loss: 0.0049 - val_mae: 0.0236\n",
+      "\n",
+      "Epoch 00007: val_loss improved from 0.00488 to 0.00487, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 8/25\n",
+      "180000/180000 [==============================] - 16s 88us/step - loss: 0.0048 - mae: 0.0235 - val_loss: 0.0049 - val_mae: 0.0235\n",
+      "\n",
+      "Epoch 00008: val_loss improved from 0.00487 to 0.00487, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 9/25\n",
+      "180000/180000 [==============================] - 16s 88us/step - loss: 0.0047 - mae: 0.0234 - val_loss: 0.0049 - val_mae: 0.0234\n",
+      "\n",
+      "Epoch 00009: val_loss improved from 0.00487 to 0.00486, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 10/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0047 - mae: 0.0234 - val_loss: 0.0048 - val_mae: 0.0234\n",
+      "\n",
+      "Epoch 00010: val_loss improved from 0.00486 to 0.00484, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 11/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0047 - mae: 0.0234 - val_loss: 0.0048 - val_mae: 0.0235\n",
+      "\n",
+      "Epoch 00011: val_loss improved from 0.00484 to 0.00480, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 12/25\n",
+      "180000/180000 [==============================] - 16s 90us/step - loss: 0.0047 - mae: 0.0234 - val_loss: 0.0047 - val_mae: 0.0235\n",
+      "\n",
+      "Epoch 00012: val_loss improved from 0.00480 to 0.00475, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 13/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0046 - mae: 0.0233 - val_loss: 0.0047 - val_mae: 0.0234\n",
+      "\n",
+      "Epoch 00013: val_loss improved from 0.00475 to 0.00472, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 14/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0046 - mae: 0.0233 - val_loss: 0.0047 - val_mae: 0.0234\n",
+      "\n",
+      "Epoch 00014: val_loss improved from 0.00472 to 0.00468, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 15/25\n",
+      "180000/180000 [==============================] - 16s 91us/step - loss: 0.0046 - mae: 0.0232 - val_loss: 0.0046 - val_mae: 0.0234\n",
+      "\n",
+      "Epoch 00015: val_loss improved from 0.00468 to 0.00464, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 16/25\n",
+      "180000/180000 [==============================] - 16s 90us/step - loss: 0.0045 - mae: 0.0232 - val_loss: 0.0046 - val_mae: 0.0233\n",
+      "\n",
+      "Epoch 00016: val_loss improved from 0.00464 to 0.00456, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 17/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0044 - mae: 0.0231 - val_loss: 0.0045 - val_mae: 0.0232\n",
+      "\n",
+      "Epoch 00017: val_loss improved from 0.00456 to 0.00447, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 18/25\n",
+      "180000/180000 [==============================] - 16s 90us/step - loss: 0.0043 - mae: 0.0230 - val_loss: 0.0044 - val_mae: 0.0231\n",
+      "\n",
+      "Epoch 00018: val_loss improved from 0.00447 to 0.00439, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 19/25\n",
+      "180000/180000 [==============================] - 16s 90us/step - loss: 0.0042 - mae: 0.0228 - val_loss: 0.0043 - val_mae: 0.0230\n",
+      "\n",
+      "Epoch 00019: val_loss improved from 0.00439 to 0.00430, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 20/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0041 - mae: 0.0227 - val_loss: 0.0042 - val_mae: 0.0228\n",
+      "\n",
+      "Epoch 00020: val_loss improved from 0.00430 to 0.00419, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 21/25\n",
+      "180000/180000 [==============================] - 16s 90us/step - loss: 0.0040 - mae: 0.0225 - val_loss: 0.0041 - val_mae: 0.0227\n",
+      "\n",
+      "Epoch 00021: val_loss improved from 0.00419 to 0.00406, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 22/25\n",
+      "180000/180000 [==============================] - 16s 91us/step - loss: 0.0039 - mae: 0.0224 - val_loss: 0.0039 - val_mae: 0.0225\n",
+      "\n",
+      "Epoch 00022: val_loss improved from 0.00406 to 0.00391, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 23/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0037 - mae: 0.0221 - val_loss: 0.0037 - val_mae: 0.0223\n",
+      "\n",
+      "Epoch 00023: val_loss improved from 0.00391 to 0.00374, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 24/25\n",
+      "180000/180000 [==============================] - 16s 90us/step - loss: 0.0035 - mae: 0.0218 - val_loss: 0.0035 - val_mae: 0.0220\n",
+      "\n",
+      "Epoch 00024: val_loss improved from 0.00374 to 0.00355, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 25/25\n",
+      "180000/180000 [==============================] - 16s 89us/step - loss: 0.0033 - mae: 0.0215 - val_loss: 0.0033 - val_mae: 0.0217\n",
+      "\n",
+      "Epoch 00025: val_loss improved from 0.00355 to 0.00333, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/25\n",
+      "180000/180000 [==============================] - 19s 107us/step - loss: 0.0127 - mae: 0.0446 - val_loss: 0.0049 - val_mae: 0.0247\n",
+      "\n",
+      "Epoch 00001: val_loss did not improve from 0.00333\n",
+      "Epoch 2/25\n",
+      "180000/180000 [==============================] - 19s 107us/step - loss: 0.0048 - mae: 0.0245 - val_loss: 0.0049 - val_mae: 0.0240\n",
+      "\n",
+      "Epoch 00002: val_loss did not improve from 0.00333\n",
+      "Epoch 3/25\n",
+      "180000/180000 [==============================] - 20s 113us/step - loss: 0.0048 - mae: 0.0239 - val_loss: 0.0049 - val_mae: 0.0237\n",
+      "\n",
+      "Epoch 00003: val_loss did not improve from 0.00333\n",
+      "Epoch 4/25\n",
+      "180000/180000 [==============================] - 19s 107us/step - loss: 0.0048 - mae: 0.0236 - val_loss: 0.0049 - val_mae: 0.0235\n",
+      "\n",
+      "Epoch 00004: val_loss did not improve from 0.00333\n",
+      "Epoch 5/25\n",
+      "180000/180000 [==============================] - 19s 108us/step - loss: 0.0047 - mae: 0.0234 - val_loss: 0.0049 - val_mae: 0.0234\n",
+      "\n",
+      "Epoch 00005: val_loss did not improve from 0.00333\n",
+      "Epoch 6/25\n",
+      "180000/180000 [==============================] - 20s 110us/step - loss: 0.0047 - mae: 0.0233 - val_loss: 0.0048 - val_mae: 0.0234\n",
+      "\n",
+      "Epoch 00006: val_loss did not improve from 0.00333\n",
+      "Epoch 7/25\n",
+      "180000/180000 [==============================] - 19s 107us/step - loss: 0.0047 - mae: 0.0234 - val_loss: 0.0047 - val_mae: 0.0234\n",
+      "\n",
+      "Epoch 00007: val_loss did not improve from 0.00333\n",
+      "Epoch 8/25\n",
+      "180000/180000 [==============================] - 19s 108us/step - loss: 0.0046 - mae: 0.0232 - val_loss: 0.0046 - val_mae: 0.0233\n",
+      "\n",
+      "Epoch 00008: val_loss did not improve from 0.00333\n",
+      "Epoch 9/25\n",
+      "180000/180000 [==============================] - 19s 108us/step - loss: 0.0045 - mae: 0.0231 - val_loss: 0.0045 - val_mae: 0.0232\n",
+      "\n",
+      "Epoch 00009: val_loss did not improve from 0.00333\n",
+      "Epoch 10/25\n",
+      "180000/180000 [==============================] - 19s 106us/step - loss: 0.0043 - mae: 0.0229 - val_loss: 0.0043 - val_mae: 0.0230\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "Epoch 00010: val_loss did not improve from 0.00333\n",
+      "Epoch 11/25\n",
+      "180000/180000 [==============================] - 19s 105us/step - loss: 0.0041 - mae: 0.0227 - val_loss: 0.0041 - val_mae: 0.0228\n",
+      "\n",
+      "Epoch 00011: val_loss did not improve from 0.00333\n",
+      "Epoch 12/25\n",
+      "180000/180000 [==============================] - 19s 105us/step - loss: 0.0038 - mae: 0.0223 - val_loss: 0.0037 - val_mae: 0.0223\n",
+      "\n",
+      "Epoch 00012: val_loss did not improve from 0.00333\n",
+      "Epoch 13/25\n",
+      "180000/180000 [==============================] - 19s 106us/step - loss: 0.0034 - mae: 0.0218 - val_loss: 0.0033 - val_mae: 0.0218\n",
+      "\n",
+      "Epoch 00013: val_loss did not improve from 0.00333\n",
+      "Epoch 14/25\n",
+      "180000/180000 [==============================] - 19s 105us/step - loss: 0.0030 - mae: 0.0210 - val_loss: 0.0030 - val_mae: 0.0209\n",
+      "\n",
+      "Epoch 00014: val_loss improved from 0.00333 to 0.00295, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 15/25\n",
+      "180000/180000 [==============================] - 19s 105us/step - loss: 0.0027 - mae: 0.0201 - val_loss: 0.0026 - val_mae: 0.0202\n",
+      "\n",
+      "Epoch 00015: val_loss improved from 0.00295 to 0.00262, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 16/25\n",
+      "180000/180000 [==============================] - 19s 105us/step - loss: 0.0024 - mae: 0.0193 - val_loss: 0.0024 - val_mae: 0.0194\n",
+      "\n",
+      "Epoch 00016: val_loss improved from 0.00262 to 0.00237, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 17/25\n",
+      "180000/180000 [==============================] - 19s 106us/step - loss: 0.0022 - mae: 0.0187 - val_loss: 0.0022 - val_mae: 0.0190\n",
+      "\n",
+      "Epoch 00017: val_loss improved from 0.00237 to 0.00217, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 18/25\n",
+      "180000/180000 [==============================] - 19s 107us/step - loss: 0.0020 - mae: 0.0182 - val_loss: 0.0020 - val_mae: 0.0185\n",
+      "\n",
+      "Epoch 00018: val_loss improved from 0.00217 to 0.00197, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 19/25\n",
+      "180000/180000 [==============================] - 19s 108us/step - loss: 0.0018 - mae: 0.0177 - val_loss: 0.0018 - val_mae: 0.0180\n",
+      "\n",
+      "Epoch 00019: val_loss improved from 0.00197 to 0.00180, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 20/25\n",
+      "180000/180000 [==============================] - 19s 107us/step - loss: 0.0016 - mae: 0.0172 - val_loss: 0.0017 - val_mae: 0.0176\n",
+      "\n",
+      "Epoch 00020: val_loss improved from 0.00180 to 0.00166, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 21/25\n",
+      "180000/180000 [==============================] - 19s 107us/step - loss: 0.0015 - mae: 0.0167 - val_loss: 0.0015 - val_mae: 0.0171\n",
+      "\n",
+      "Epoch 00021: val_loss improved from 0.00166 to 0.00155, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 22/25\n",
+      "180000/180000 [==============================] - 19s 106us/step - loss: 0.0013 - mae: 0.0163 - val_loss: 0.0015 - val_mae: 0.0167\n",
+      "\n",
+      "Epoch 00022: val_loss improved from 0.00155 to 0.00145, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 23/25\n",
+      "180000/180000 [==============================] - 19s 104us/step - loss: 0.0012 - mae: 0.0158 - val_loss: 0.0014 - val_mae: 0.0163\n",
+      "\n",
+      "Epoch 00023: val_loss improved from 0.00145 to 0.00137, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 24/25\n",
+      "180000/180000 [==============================] - 19s 108us/step - loss: 0.0012 - mae: 0.0154 - val_loss: 0.0013 - val_mae: 0.0160\n",
+      "\n",
+      "Epoch 00024: val_loss improved from 0.00137 to 0.00131, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 25/25\n",
+      "180000/180000 [==============================] - 19s 108us/step - loss: 0.0011 - mae: 0.0150 - val_loss: 0.0012 - val_mae: 0.0157\n",
+      "\n",
+      "Epoch 00025: val_loss improved from 0.00131 to 0.00125, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/25\n",
+      "180000/180000 [==============================] - 24s 135us/step - loss: 0.0086 - mae: 0.0347 - val_loss: 0.0049 - val_mae: 0.0242\n",
+      "\n",
+      "Epoch 00001: val_loss did not improve from 0.00125\n",
+      "Epoch 2/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 0.0048 - mae: 0.0238 - val_loss: 0.0049 - val_mae: 0.0236\n",
+      "\n",
+      "Epoch 00002: val_loss did not improve from 0.00125\n",
+      "Epoch 3/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 0.0047 - mae: 0.0235 - val_loss: 0.0047 - val_mae: 0.0235\n",
+      "\n",
+      "Epoch 00003: val_loss did not improve from 0.00125\n",
+      "Epoch 4/25\n",
+      "180000/180000 [==============================] - 24s 132us/step - loss: 0.0046 - mae: 0.0233 - val_loss: 0.0046 - val_mae: 0.0232\n",
+      "\n",
+      "Epoch 00004: val_loss did not improve from 0.00125\n",
+      "Epoch 5/25\n",
+      "180000/180000 [==============================] - 23s 129us/step - loss: 0.0043 - mae: 0.0230 - val_loss: 0.0043 - val_mae: 0.0230\n",
+      "\n",
+      "Epoch 00005: val_loss did not improve from 0.00125\n",
+      "Epoch 6/25\n",
+      "180000/180000 [==============================] - 24s 133us/step - loss: 0.0040 - mae: 0.0226 - val_loss: 0.0039 - val_mae: 0.0225\n",
+      "\n",
+      "Epoch 00006: val_loss did not improve from 0.00125\n",
+      "Epoch 7/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 0.0034 - mae: 0.0219 - val_loss: 0.0032 - val_mae: 0.0216\n",
+      "\n",
+      "Epoch 00007: val_loss did not improve from 0.00125\n",
+      "Epoch 8/25\n",
+      "180000/180000 [==============================] - 24s 132us/step - loss: 0.0027 - mae: 0.0207 - val_loss: 0.0025 - val_mae: 0.0204\n",
+      "\n",
+      "Epoch 00008: val_loss did not improve from 0.00125\n",
+      "Epoch 9/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 0.0021 - mae: 0.0194 - val_loss: 0.0019 - val_mae: 0.0192\n",
+      "\n",
+      "Epoch 00009: val_loss did not improve from 0.00125\n",
+      "Epoch 10/25\n",
+      "180000/180000 [==============================] - 24s 132us/step - loss: 0.0017 - mae: 0.0181 - val_loss: 0.0017 - val_mae: 0.0181\n",
+      "\n",
+      "Epoch 00010: val_loss did not improve from 0.00125\n",
+      "Epoch 11/25\n",
+      "180000/180000 [==============================] - 24s 132us/step - loss: 0.0014 - mae: 0.0170 - val_loss: 0.0015 - val_mae: 0.0172\n",
+      "\n",
+      "Epoch 00011: val_loss did not improve from 0.00125\n",
+      "Epoch 12/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 0.0013 - mae: 0.0161 - val_loss: 0.0014 - val_mae: 0.0165\n",
+      "\n",
+      "Epoch 00012: val_loss did not improve from 0.00125\n",
+      "Epoch 13/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 0.0011 - mae: 0.0154 - val_loss: 0.0013 - val_mae: 0.0159\n",
+      "\n",
+      "Epoch 00013: val_loss did not improve from 0.00125\n",
+      "Epoch 14/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 0.0011 - mae: 0.0148 - val_loss: 0.0012 - val_mae: 0.0154\n",
+      "\n",
+      "Epoch 00014: val_loss improved from 0.00125 to 0.00118, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 15/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 9.7305e-04 - mae: 0.0143 - val_loss: 0.0011 - val_mae: 0.0150\n",
+      "\n",
+      "Epoch 00015: val_loss improved from 0.00118 to 0.00111, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 16/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 9.0389e-04 - mae: 0.0138 - val_loss: 0.0011 - val_mae: 0.0145\n",
+      "\n",
+      "Epoch 00016: val_loss improved from 0.00111 to 0.00105, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 17/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 8.4438e-04 - mae: 0.0134 - val_loss: 0.0010 - val_mae: 0.0142\n",
+      "\n",
+      "Epoch 00017: val_loss improved from 0.00105 to 0.00100, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 18/25\n",
+      "180000/180000 [==============================] - 24s 131us/step - loss: 7.9221e-04 - mae: 0.0131 - val_loss: 9.5457e-04 - val_mae: 0.0139\n",
+      "\n",
+      "Epoch 00018: val_loss improved from 0.00100 to 0.00095, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 19/25\n",
+      "180000/180000 [==============================] - 24s 132us/step - loss: 7.4917e-04 - mae: 0.0127 - val_loss: 9.1473e-04 - val_mae: 0.0136\n",
+      "\n",
+      "Epoch 00019: val_loss improved from 0.00095 to 0.00091, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 20/25\n",
+      "180000/180000 [==============================] - 24s 132us/step - loss: 7.1077e-04 - mae: 0.0124 - val_loss: 8.8052e-04 - val_mae: 0.0133\n",
+      "\n",
+      "Epoch 00020: val_loss improved from 0.00091 to 0.00088, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 21/25\n",
+      "180000/180000 [==============================] - 24s 133us/step - loss: 6.7721e-04 - mae: 0.0121 - val_loss: 8.4944e-04 - val_mae: 0.0131\n",
+      "\n",
+      "Epoch 00021: val_loss improved from 0.00088 to 0.00085, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 22/25\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "180000/180000 [==============================] - 23s 129us/step - loss: 6.4841e-04 - mae: 0.0119 - val_loss: 8.2200e-04 - val_mae: 0.0128\n",
+      "\n",
+      "Epoch 00022: val_loss improved from 0.00085 to 0.00082, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 23/25\n",
+      "180000/180000 [==============================] - 23s 128us/step - loss: 6.2076e-04 - mae: 0.0117 - val_loss: 7.9849e-04 - val_mae: 0.0127\n",
+      "\n",
+      "Epoch 00023: val_loss improved from 0.00082 to 0.00080, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 24/25\n",
+      "180000/180000 [==============================] - 23s 130us/step - loss: 5.9638e-04 - mae: 0.0115 - val_loss: 7.7653e-04 - val_mae: 0.0125\n",
+      "\n",
+      "Epoch 00024: val_loss improved from 0.00080 to 0.00078, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 25/25\n",
+      "180000/180000 [==============================] - 23s 129us/step - loss: 5.7506e-04 - mae: 0.0113 - val_loss: 7.5744e-04 - val_mae: 0.0123\n",
+      "\n",
+      "Epoch 00025: val_loss improved from 0.00078 to 0.00076, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/25\n",
+      "180000/180000 [==============================] - 35s 197us/step - loss: 0.0069 - mae: 0.0305 - val_loss: 0.0049 - val_mae: 0.0238\n",
+      "\n",
+      "Epoch 00001: val_loss did not improve from 0.00076\n",
+      "Epoch 2/25\n",
+      "180000/180000 [==============================] - 33s 186us/step - loss: 0.0047 - mae: 0.0235 - val_loss: 0.0048 - val_mae: 0.0235\n",
+      "\n",
+      "Epoch 00002: val_loss did not improve from 0.00076\n",
+      "Epoch 3/25\n",
+      "180000/180000 [==============================] - 33s 184us/step - loss: 0.0045 - mae: 0.0232 - val_loss: 0.0043 - val_mae: 0.0231\n",
+      "\n",
+      "Epoch 00003: val_loss did not improve from 0.00076\n",
+      "Epoch 4/25\n",
+      "180000/180000 [==============================] - 34s 190us/step - loss: 0.0038 - mae: 0.0225 - val_loss: 0.0034 - val_mae: 0.0222\n",
+      "\n",
+      "Epoch 00004: val_loss did not improve from 0.00076\n",
+      "Epoch 5/25\n",
+      "180000/180000 [==============================] - 34s 186us/step - loss: 0.0025 - mae: 0.0210 - val_loss: 0.0020 - val_mae: 0.0201\n",
+      "\n",
+      "Epoch 00005: val_loss did not improve from 0.00076\n",
+      "Epoch 6/25\n",
+      "180000/180000 [==============================] - 34s 187us/step - loss: 0.0016 - mae: 0.0183 - val_loss: 0.0015 - val_mae: 0.0180\n",
+      "\n",
+      "Epoch 00006: val_loss did not improve from 0.00076\n",
+      "Epoch 7/25\n",
+      "180000/180000 [==============================] - 33s 185us/step - loss: 0.0012 - mae: 0.0166 - val_loss: 0.0013 - val_mae: 0.0166\n",
+      "\n",
+      "Epoch 00007: val_loss did not improve from 0.00076\n",
+      "Epoch 8/25\n",
+      "180000/180000 [==============================] - 34s 187us/step - loss: 0.0010 - mae: 0.0153 - val_loss: 0.0011 - val_mae: 0.0157\n",
+      "\n",
+      "Epoch 00008: val_loss did not improve from 0.00076\n",
+      "Epoch 9/25\n",
+      "180000/180000 [==============================] - 33s 183us/step - loss: 9.0662e-04 - mae: 0.0144 - val_loss: 0.0010 - val_mae: 0.0149\n",
+      "\n",
+      "Epoch 00009: val_loss did not improve from 0.00076\n",
+      "Epoch 10/25\n",
+      "180000/180000 [==============================] - 33s 184us/step - loss: 8.1321e-04 - mae: 0.0136 - val_loss: 9.5986e-04 - val_mae: 0.0143\n",
+      "\n",
+      "Epoch 00010: val_loss did not improve from 0.00076\n",
+      "Epoch 11/25\n",
+      "180000/180000 [==============================] - 33s 184us/step - loss: 7.4251e-04 - mae: 0.0129 - val_loss: 8.9890e-04 - val_mae: 0.0137\n",
+      "\n",
+      "Epoch 00011: val_loss did not improve from 0.00076\n",
+      "Epoch 12/25\n",
+      "180000/180000 [==============================] - 33s 183us/step - loss: 6.8652e-04 - mae: 0.0124 - val_loss: 8.5082e-04 - val_mae: 0.0132\n",
+      "\n",
+      "Epoch 00012: val_loss did not improve from 0.00076\n",
+      "Epoch 13/25\n",
+      "180000/180000 [==============================] - 33s 183us/step - loss: 6.3999e-04 - mae: 0.0120 - val_loss: 8.1068e-04 - val_mae: 0.0129\n",
+      "\n",
+      "Epoch 00013: val_loss did not improve from 0.00076\n",
+      "Epoch 14/25\n",
+      "180000/180000 [==============================] - 33s 183us/step - loss: 6.0179e-04 - mae: 0.0116 - val_loss: 7.7748e-04 - val_mae: 0.0125\n",
+      "\n",
+      "Epoch 00014: val_loss did not improve from 0.00076\n",
+      "Epoch 15/25\n",
+      "180000/180000 [==============================] - 33s 183us/step - loss: 5.6869e-04 - mae: 0.0112 - val_loss: 7.4882e-04 - val_mae: 0.0123\n",
+      "\n",
+      "Epoch 00015: val_loss improved from 0.00076 to 0.00075, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 16/25\n",
+      "180000/180000 [==============================] - 33s 184us/step - loss: 5.4147e-04 - mae: 0.0109 - val_loss: 7.2564e-04 - val_mae: 0.0120\n",
+      "\n",
+      "Epoch 00016: val_loss improved from 0.00075 to 0.00073, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 17/25\n",
+      "180000/180000 [==============================] - 34s 188us/step - loss: 5.1704e-04 - mae: 0.0107 - val_loss: 7.0404e-04 - val_mae: 0.0118\n",
+      "\n",
+      "Epoch 00017: val_loss improved from 0.00073 to 0.00070, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 18/25\n",
+      "180000/180000 [==============================] - 33s 185us/step - loss: 4.9589e-04 - mae: 0.0105 - val_loss: 6.8656e-04 - val_mae: 0.0116\n",
+      "\n",
+      "Epoch 00018: val_loss improved from 0.00070 to 0.00069, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 19/25\n",
+      "180000/180000 [==============================] - 33s 184us/step - loss: 4.7821e-04 - mae: 0.0103 - val_loss: 6.7161e-04 - val_mae: 0.0115\n",
+      "\n",
+      "Epoch 00019: val_loss improved from 0.00069 to 0.00067, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 20/25\n",
+      "180000/180000 [==============================] - 33s 183us/step - loss: 4.6256e-04 - mae: 0.0101 - val_loss: 6.5816e-04 - val_mae: 0.0113\n",
+      "\n",
+      "Epoch 00020: val_loss improved from 0.00067 to 0.00066, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 21/25\n",
+      "180000/180000 [==============================] - 34s 188us/step - loss: 4.4921e-04 - mae: 0.0099 - val_loss: 6.4625e-04 - val_mae: 0.0111\n",
+      "\n",
+      "Epoch 00021: val_loss improved from 0.00066 to 0.00065, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 22/25\n",
+      "180000/180000 [==============================] - 33s 184us/step - loss: 4.3711e-04 - mae: 0.0098 - val_loss: 6.3627e-04 - val_mae: 0.0110\n",
+      "\n",
+      "Epoch 00022: val_loss improved from 0.00065 to 0.00064, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 23/25\n",
+      "180000/180000 [==============================] - 33s 185us/step - loss: 4.2533e-04 - mae: 0.0096 - val_loss: 6.2610e-04 - val_mae: 0.0109\n",
+      "\n",
+      "Epoch 00023: val_loss improved from 0.00064 to 0.00063, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 24/25\n",
+      "180000/180000 [==============================] - 34s 189us/step - loss: 4.1502e-04 - mae: 0.0095 - val_loss: 6.1808e-04 - val_mae: 0.0108\n",
+      "\n",
+      "Epoch 00024: val_loss improved from 0.00063 to 0.00062, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 25/25\n",
+      "180000/180000 [==============================] - 34s 187us/step - loss: 4.0663e-04 - mae: 0.0094 - val_loss: 6.1150e-04 - val_mae: 0.0108\n",
+      "\n",
+      "Epoch 00025: val_loss improved from 0.00062 to 0.00061, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/25\n",
+      "180000/180000 [==============================] - 57s 316us/step - loss: 0.0060 - mae: 0.0280 - val_loss: 0.0048 - val_mae: 0.0235\n",
+      "\n",
+      "Epoch 00001: val_loss did not improve from 0.00061\n",
+      "Epoch 2/25\n",
+      "180000/180000 [==============================] - 57s 314us/step - loss: 0.0043 - mae: 0.0231 - val_loss: 0.0039 - val_mae: 0.0228\n",
+      "\n",
+      "Epoch 00002: val_loss did not improve from 0.00061\n",
+      "Epoch 3/25\n",
+      "180000/180000 [==============================] - 57s 314us/step - loss: 0.0027 - mae: 0.0215 - val_loss: 0.0020 - val_mae: 0.0198\n",
+      "\n",
+      "Epoch 00003: val_loss did not improve from 0.00061\n",
+      "Epoch 4/25\n",
+      "180000/180000 [==============================] - 56s 313us/step - loss: 0.0015 - mae: 0.0180 - val_loss: 0.0014 - val_mae: 0.0176\n",
+      "\n",
+      "Epoch 00004: val_loss did not improve from 0.00061\n",
+      "Epoch 5/25\n",
+      "180000/180000 [==============================] - 57s 315us/step - loss: 0.0011 - mae: 0.0159 - val_loss: 0.0012 - val_mae: 0.0159\n",
+      "\n",
+      "Epoch 00005: val_loss did not improve from 0.00061\n",
+      "Epoch 6/25\n",
+      "180000/180000 [==============================] - 57s 314us/step - loss: 9.2617e-04 - mae: 0.0145 - val_loss: 0.0010 - val_mae: 0.0148\n",
+      "\n",
+      "Epoch 00006: val_loss did not improve from 0.00061\n",
+      "Epoch 7/25\n",
+      "180000/180000 [==============================] - 56s 313us/step - loss: 7.9364e-04 - mae: 0.0134 - val_loss: 9.2695e-04 - val_mae: 0.0140\n",
+      "\n",
+      "Epoch 00007: val_loss did not improve from 0.00061\n",
+      "Epoch 8/25\n",
+      "180000/180000 [==============================] - 56s 312us/step - loss: 7.0207e-04 - mae: 0.0126 - val_loss: 8.5265e-04 - val_mae: 0.0133\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "Epoch 00008: val_loss did not improve from 0.00061\n",
+      "Epoch 9/25\n",
+      "180000/180000 [==============================] - 56s 312us/step - loss: 6.3486e-04 - mae: 0.0120 - val_loss: 7.9787e-04 - val_mae: 0.0128\n",
+      "\n",
+      "Epoch 00009: val_loss did not improve from 0.00061\n",
+      "Epoch 10/25\n",
+      "180000/180000 [==============================] - 56s 312us/step - loss: 5.8400e-04 - mae: 0.0114 - val_loss: 7.5453e-04 - val_mae: 0.0124\n",
+      "\n",
+      "Epoch 00010: val_loss did not improve from 0.00061\n",
+      "Epoch 11/25\n",
+      "180000/180000 [==============================] - 57s 319us/step - loss: 5.4413e-04 - mae: 0.0110 - val_loss: 7.1943e-04 - val_mae: 0.0120\n",
+      "\n",
+      "Epoch 00011: val_loss did not improve from 0.00061\n",
+      "Epoch 12/25\n",
+      "180000/180000 [==============================] - 57s 314us/step - loss: 5.1263e-04 - mae: 0.0106 - val_loss: 6.9418e-04 - val_mae: 0.0116\n",
+      "\n",
+      "Epoch 00012: val_loss did not improve from 0.00061\n",
+      "Epoch 13/25\n",
+      "180000/180000 [==============================] - 56s 312us/step - loss: 4.8769e-04 - mae: 0.0103 - val_loss: 6.7404e-04 - val_mae: 0.0114\n",
+      "\n",
+      "Epoch 00013: val_loss did not improve from 0.00061\n",
+      "Epoch 14/25\n",
+      "180000/180000 [==============================] - 57s 318us/step - loss: 4.6729e-04 - mae: 0.0101 - val_loss: 6.5521e-04 - val_mae: 0.0112\n",
+      "\n",
+      "Epoch 00014: val_loss did not improve from 0.00061\n",
+      "Epoch 15/25\n",
+      "180000/180000 [==============================] - 57s 314us/step - loss: 4.5009e-04 - mae: 0.0099 - val_loss: 6.4142e-04 - val_mae: 0.0111\n",
+      "\n",
+      "Epoch 00015: val_loss did not improve from 0.00061\n",
+      "Epoch 16/25\n",
+      "180000/180000 [==============================] - 56s 311us/step - loss: 4.3567e-04 - mae: 0.0097 - val_loss: 6.2873e-04 - val_mae: 0.0109\n",
+      "\n",
+      "Epoch 00016: val_loss did not improve from 0.00061\n",
+      "Epoch 17/25\n",
+      "180000/180000 [==============================] - 56s 312us/step - loss: 4.2337e-04 - mae: 0.0095 - val_loss: 6.1804e-04 - val_mae: 0.0108\n",
+      "\n",
+      "Epoch 00017: val_loss did not improve from 0.00061\n",
+      "Epoch 18/25\n",
+      "180000/180000 [==============================] - 56s 312us/step - loss: 4.1271e-04 - mae: 0.0094 - val_loss: 6.0977e-04 - val_mae: 0.0107\n",
+      "\n",
+      "Epoch 00018: val_loss improved from 0.00061 to 0.00061, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 19/25\n",
+      "180000/180000 [==============================] - 56s 310us/step - loss: 4.0344e-04 - mae: 0.0092 - val_loss: 6.0220e-04 - val_mae: 0.0105\n",
+      "\n",
+      "Epoch 00019: val_loss improved from 0.00061 to 0.00060, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 20/25\n",
+      "180000/180000 [==============================] - 56s 311us/step - loss: 3.9517e-04 - mae: 0.0091 - val_loss: 5.9500e-04 - val_mae: 0.0104\n",
+      "\n",
+      "Epoch 00020: val_loss improved from 0.00060 to 0.00059, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 21/25\n",
+      "180000/180000 [==============================] - 63s 348us/step - loss: 3.8778e-04 - mae: 0.0090 - val_loss: 5.8874e-04 - val_mae: 0.0104\n",
+      "\n",
+      "Epoch 00021: val_loss improved from 0.00059 to 0.00059, saving model to Modelos/model_autoencoder_deep1.h5\n",
+      "Epoch 22/25\n",
+      "165440/180000 [==========================>...] - ETA: 4s - loss: 3.8127e-04 - mae: 0.0089"
+     ]
+    },
+    {
+     "ename": "KeyboardInterrupt",
+     "evalue": "",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
+      "\u001b[0;32m<ipython-input-79-e5a422a8bc97>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m     10\u001b[0m                                                         \u001b[0mvalidation_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mvali_data\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvali_data\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     11\u001b[0m                                                         \u001b[0mshuffle\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 12\u001b[0;31m                                                         callbacks = [mc])\n\u001b[0m\u001b[1;32m     13\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     14\u001b[0m         \u001b[0mplot_epochs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mae_epochs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)\u001b[0m\n\u001b[1;32m   1237\u001b[0m                                         \u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1238\u001b[0m                                         \u001b[0mvalidation_steps\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvalidation_steps\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1239\u001b[0;31m                                         validation_freq=validation_freq)\n\u001b[0m\u001b[1;32m   1240\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1241\u001b[0m     def evaluate(self,\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/keras/engine/training_arrays.py\u001b[0m in \u001b[0;36mfit_loop\u001b[0;34m(model, fit_function, fit_inputs, out_labels, batch_size, epochs, verbose, callbacks, val_function, val_inputs, shuffle, initial_epoch, steps_per_epoch, validation_steps, validation_freq)\u001b[0m\n\u001b[1;32m    194\u001b[0m                     \u001b[0mins_batch\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtoarray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    195\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 196\u001b[0;31m                 \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfit_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    197\u001b[0m                 \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mto_list\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    198\u001b[0m                 \u001b[0;32mfor\u001b[0m \u001b[0ml\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mo\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mout_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mouts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/tensorflow_core/python/keras/backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m   3738\u001b[0m         \u001b[0mvalue\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmath_ops\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcast\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtensor\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   3739\u001b[0m       \u001b[0mconverted_inputs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3740\u001b[0;31m     \u001b[0moutputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_graph_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mconverted_inputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   3741\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   3742\u001b[0m     \u001b[0;31m# EagerTensor.numpy() will often make a copy to ensure memory safety.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1079\u001b[0m       \u001b[0mTypeError\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mFor\u001b[0m \u001b[0minvalid\u001b[0m \u001b[0mpositional\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mkeyword\u001b[0m \u001b[0margument\u001b[0m \u001b[0mcombinations\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1080\u001b[0m     \"\"\"\n\u001b[0;32m-> 1081\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1082\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1083\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0m_call_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcancellation_manager\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, args, kwargs, cancellation_manager)\u001b[0m\n\u001b[1;32m   1119\u001b[0m       raise TypeError(\"Keyword arguments {} unknown. Expected {}.\".format(\n\u001b[1;32m   1120\u001b[0m           list(kwargs.keys()), list(self._arg_keywords)))\n\u001b[0;32m-> 1121\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_flat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcaptured_inputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcancellation_manager\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1122\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1123\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0m_filtered_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py\u001b[0m in \u001b[0;36m_call_flat\u001b[0;34m(self, args, captured_inputs, cancellation_manager)\u001b[0m\n\u001b[1;32m   1222\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mexecuting_eagerly\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1223\u001b[0m       flat_outputs = forward_function.call(\n\u001b[0;32m-> 1224\u001b[0;31m           ctx, args, cancellation_manager=cancellation_manager)\n\u001b[0m\u001b[1;32m   1225\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1226\u001b[0m       \u001b[0mgradient_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_delayed_rewrite_functions\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mregister\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py\u001b[0m in \u001b[0;36mcall\u001b[0;34m(self, ctx, args, cancellation_manager)\u001b[0m\n\u001b[1;32m    509\u001b[0m               \u001b[0minputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    510\u001b[0m               \u001b[0mattrs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"executor_type\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mexecutor_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"config_proto\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 511\u001b[0;31m               ctx=ctx)\n\u001b[0m\u001b[1;32m    512\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    513\u001b[0m           outputs = execute.execute_with_cancellation(\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/execute.py\u001b[0m in \u001b[0;36mquick_execute\u001b[0;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[1;32m     59\u001b[0m     tensors = pywrap_tensorflow.TFE_Py_Execute(ctx._handle, device_name,\n\u001b[1;32m     60\u001b[0m                                                \u001b[0mop_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattrs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 61\u001b[0;31m                                                num_outputs)\n\u001b[0m\u001b[1;32m     62\u001b[0m   \u001b[0;32mexcept\u001b[0m \u001b[0mcore\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     63\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mname\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4gAAAE+CAYAAADPvD6PAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzde5jcdXn//+c9p53ZzX42B8IhCTHhYAXCOQWtgsVUi1iNWOQgKloqP20pbb3kV+pVUfnaXqVVsB5+KFawUhEsiub7JUitUE9fS0kQ0YCUCIkk4ZDjnmfndP/++Hxmdnay2cxudnZmdl+P65prPud9b9C8c3/e7/d9m7sjIiIiIiIiEmt2A0RERERERKQ1KEAUERERERERQAGiiIiIiIiIRBQgioiIiIiICKAAUURERERERCIKEEVERERERARocIBoZueb2VNmttnMrhvnfIeZ3R2df9jMVkTHX29mG83sF9H366ru+c/omY9Fn8Mb+TuIiIiIiIjMFYlGPdjM4sDngdcD24BHzGyduz9RddmVwF53P87MLgVuBC4BdgFvdvcdZrYKeABYWnXf5e6+od62HHbYYb5ixYpD+4VERKTlbdy4cZe7L252O9qF+kcRkbmj3j6yYQEicBaw2d2fATCzu4C1QHWAuBb4WLR9D/A5MzN3/1nVNZuAjJl1uPvIVBqyYsUKNmyoO54UEZE2ZWZbm92GdqL+UURk7qi3j2zkFNOlwHNV+9sYOwo45hp3LwC9wKKaa/4QeLQmOLw9ml76ETOz6W22iIiIiIjI3NTSSWrM7CTCaaf/T9Xhy939ZOCc6POuA9x7lZltMLMNO3fubHxjRURERERE2lwjA8TtwNFV+8uiY+NeY2YJoAfYHe0vA+4F3u3uvy7f4O7bo+9+4E7Cqaz7cfdb3X21u69evFjLUURERERERA6mkWsQHwGON7OVhIHgpcA7aq5ZB1wB/BS4CHjQ3d3M5gP3Ade5+0/KF0dB5Hx332VmSeAPgP9o4O8gIrNcPp9n27ZtZLPZZjdFJiGdTrNs2TKSyWSzmyIiMiupf2xfh9pHNixAdPeCmV1NmIE0Dtzm7pvM7AZgg7uvA74M3GFmm4E9hEEkwNXAccD1ZnZ9dOwNwCDwQBQcxgmDwy816ncQkdlv27ZtdHd3s2LFCrSkuT24O7t372bbtm2sXLmy2c0REZmV1D+2p+noIxs5goi7rwfW1xy7vmo7C7x9nPs+AXziAI89czrbKCJzWzabVefXZsyMRYsWofXlIiKNo/6xPU1HH9nSSWpERGaCOr/2o/9mIiKNp79r29Oh/ndTgCgiIiIiIiKAAkQRkaabN29es5sgIiLSctQ/NocCxDp8/8kX+Y8nXmx2M0RERFrKzv4R7nz4N+zYN9zspoiIyDRRgFiHW3/4DLf+6JlmN0NE5pAtW7bwute9jlNOOYU1a9bwm9/8BoB/+7d/Y9WqVZx66qmce+65AGzatImzzjqL0047jVNOOYWnn366mU2XOWTHvmE+fO8vePL5vmY3RUTmCPWPjdfQLKazRZBJ8tyeoWY3Q0Qa7OP/exNP7Jjef+ieuCTgo28+adL3/dmf/RlXXHEFV1xxBbfddhvXXHMN3/72t7nhhht44IEHWLp0Kfv27QPgC1/4An/+53/O5ZdfTi6Xo1gsTuvvIHIgQSassdWXzTe5JSLSSOof5xaNINYhSCfpzxaa3QwRmUN++tOf8o53vAOAd73rXfz4xz8G4NWvfjXvec97+NKXvlTp6F71qlfxd3/3d9x4441s3bqVTCbTtHbL3NJTDhCH1UeKyMxQ/9h4GkGsQ08mSd+w3o6KzHZTeZM5077whS/w8MMPc99993HmmWeyceNG3vGOd3D22Wdz3333ccEFF/DFL36R173udc1uqkyRmZ0P/BMQB/7Z3f++5nwH8FXCusC7gUvcfYuZXQ5cW3XpKcAZ7v5Yo9ranQ7/GaE+UmR2U/84t2gEsQ5BJkH/SIFiyZvdFBGZI37nd36Hu+66C4Cvfe1rnHPOOQD8+te/5uyzz+aGG25g8eLFPPfcczzzzDMcc8wxXHPNNaxdu5bHH3+8mU2XQ2BmceDzwBuBE4HLzOzEmsuuBPa6+3HAzcCNAO7+NXc/zd1PA94FPNvI4BAgGY/RmYrTqwBRRGaI+sfG0whiHYJ0OIVmIFugpzPZ5NaIyGwzNDTEsmXLKvsf/OAH+exnP8t73/te/vEf/5HFixdz++23A3Dttdfy9NNP4+6sWbOGU089lRtvvJE77riDZDLJkUceyYc//OFm/Spy6M4CNrv7MwBmdhewFnii6pq1wMei7XuAz5mZuXv1W8zLgLsa39ywj9QaRBFpBPWPzaEAsQ7lRfi9w3kFiCIy7Uql0rjHH3zwwf2Ofetb39rv2HXXXcd111037e2SplgKPFe1vw04+0DXuHvBzHqBRcCuqmsuIQwk92NmVwFXASxfvvyQGxxkElqDKCINof6xOTTFtA5BeY2F3pCKiEiLM7OzgSF3/+V45939Vndf7e6rFy9efMg/ryejEUQRkdlEAWIdKmm8tcZCREQaaztwdNX+sujYuNeYWQLoIUxWU3Yp8PUGtnEMTTEVEZldFCDWobwGUR2giIg02CPA8Wa20sxShMHeuppr1gFXRNsXAQ+W1x+aWQy4mBlafwjhS1QlqRERmT20BrEO5XWHWmMhIiKNFK0pvBp4gLDMxW3uvsnMbgA2uPs64MvAHWa2GdhDGESWnQs8V05yMxOCtNYgiojMJgoQ66A1iCIiMlPcfT2wvubY9VXbWeDtB7j3P4FXNrJ9tYJMkv5snlLJicVsJn+0iIg0gKaY1qErlSBmWoMoIiJSqyeTpOQwmNMooojIbKAAsQ6xmNGd1hoLEZl+5513Hg888MCYY5/+9Kf5wAc+MOF98+bNA2DHjh1cdNFF417zu7/7u2zYsGHC53z6059maGiosn/BBRewb9++epo+oY997GN88pOfPOTnSOsbXaevAFFEpo/6x+ZRgFinIJNQ5yci0+6yyy7jrrvG5hO56667uOyyy+q6f8mSJdxzzz1T/vm1HeD69euZP3/+lJ8nc0+QCZdh9A7pJaqITB/1j82jALFOQTqpKaYiMu0uuugi7rvvPnK5HABbtmxhx44dnHPOOQwMDLBmzRrOOOMMTj75ZL7zne/sd/+WLVtYtWoVAMPDw1x66aWccMIJXHjhhQwPD1eu+8AHPsDq1as56aST+OhHPwrAZz7zGXbs2MF5553HeeedB8CKFSvYtSust37TTTexatUqVq1axac//enKzzvhhBN43/vex0knncQb3vCGMT/nYMZ75uDgIG9605s49dRTWbVqFXfffTcQFjg+8cQTOeWUU/jQhz40qT9XmTnK9C0ijaD+sXn9o5LU1El1nkTmgPuvgxd+Mb3PPPJkeOPfH/D0woULOeuss7j//vtZu3Ytd911FxdffDFmRjqd5t577yUIAnbt2sUrX/lK3vKWt2A2fiKQW265hc7OTp588kkef/xxzjjjjMq5v/3bv2XhwoUUi0XWrFnD448/zjXXXMNNN93EQw89xGGHHTbmWRs3buT222/n4Ycfxt05++yzee1rX8uCBQt4+umn+frXv86XvvQlLr74Yr75zW/yzne+86B/FAd65jPPPMOSJUu47777AOjt7WX37t3ce++9/OpXv8LMpmVajzSGagWLzAHqHyvmQv+oEcQ69WSSSuMtIg1RPY2mevqMu/PhD3+YU045hd/7vd9j+/btvPjiiwd8zg9/+MNKR3TKKadwyimnVM594xvf4IwzzuD0009n06ZNPPHEExO26cc//jEXXnghXV1dzJs3j7e97W386Ec/AmDlypWcdtppAJx55pls2bKlrt/zQM88+eST+d73vsdf/dVf8aMf/Yienh56enpIp9NceeWVfOtb36Kzs7OunyEzryejNYgi0hjqH5vTP2oEsU7hGkS9HRWZ1SZ4k9lIa9eu5S//8i959NFHGRoa4swzzwTga1/7Gjt37mTjxo0kk0lWrFhBNpud9POfffZZPvnJT/LII4+wYMEC3vOe90zpOWUdHR2V7Xg8PqkpNON5+ctfzqOPPsr69ev5m7/5G9asWcP111/Pf//3f/P973+fe+65h8997nM8+OCDh/RzpDEqU0w1gigye6l/rMts6R81glgnrUEUkUaZN28e5513Hn/0R380ZvF9b28vhx9+OMlkkoceeoitW7dO+Jxzzz2XO++8E4Bf/vKXPP744wD09fXR1dVFT08PL774Ivfff3/lnu7ubvr7+/d71jnnnMO3v/1thoaGGBwc5N577+Wcc845pN/zQM/csWMHnZ2dvPOd7+Taa6/l0UcfZWBggN7eXi644AJuvvlmfv7znx/Sz5bGmRfVClambxGZbuofm9M/agSxTkEmyWCuSL5YIhlXXC0i0+uyyy7jwgsvHJOx7fLLL+fNb34zJ598MqtXr+YVr3jFhM/4wAc+wHvf+15OOOEETjjhhMqb1lNPPZXTTz+dV7ziFRx99NG8+tWvrtxz1VVXcf7557NkyRIeeuihyvEzzjiD97znPZx11lkA/PEf/zGnn3563dNlAD7xiU9UFtoDbNu2bdxnPvDAA1x77bXEYjGSySS33HIL/f39rF27lmw2i7tz00031f1zZWbFY0Z3h2bZiEhjqH+c+f7R3H3aH9pqVq9e7QerdXIwX/nJs3zsfz/Box95PQu7UtPUMhFptieffJITTjih2c2QKRjvv52ZbXT31U1qUtuZjv4R4NV//yCvPGYRn7r41GlolYi0AvWP7e1Q+kgNhdVJWdpERETGF2SU6VtEZLZQgFgn1XkSEREZX5BO6AWqiMgsoQCxTj2d5RFEpfEWmW3mwlT72Ub/zVpLkEkqSY3ILKS/a9vTof53U4BYJ40gisxO6XSa3bt3qxNsI+7O7t27SafTzW6KRIJ0kn7VQRSZVdQ/tqfp6COVxbROQSb8o9IUGpHZZdmyZWzbto2dO3c2uykyCel0mmXLljW7GRIJMppiKjLbqH9sX4faRypArFN5BFFTaERml2QyycqVK5vdDJG21pNJ0j9SoFhy4jFrdnNEZBqof5y7NMW0Tp2pOPGYaYqpiIhIjfJL1AFNMxURaXsKEOtkZlGWNnV+IiIi1cqloDTLRkSk/SlAnATVeRIREdlfkI7W6auPFBFpewoQJ6Enk9QifBERkRrlEUT1kSIi7U8B4iQE6SR9Wl8hIiIyRk9GpaBERGYLBYiToDTeIiIi+xsdQdRLVBGRdqcAcRKCdFIL8EVERGqU1yCqjxQRaX8KECdBSWpERET215VKEDNNMRURmQ0UIE5CkE6QzZcYKRSb3RQREZGWEYsZ3WklchMRmQ0UIE5CeY1FvxLViIiIjNGTUSI3EZHZQAHiJARppfEWEREZjxK5iYjMDgoQJ2E0jbfekIqIiFRTIjcRkdlBAeIkBJkwS5vekIqIiIwV1gpW/ygi0u4UIE5CeYqp3pCKiIiMFU4x1QwbEZF2pwBxEiqFgPWGVEREZIwelYISEZkVFCBOwmiSGr0hFRERqRakkwzliuSLpWY3RUREDoECxElIJ2Mk46Y3pCIiIjUqs2y0DENEpK0pQJwEMwsX4avzExERGaOSyE2ZvkVE2lpDA0QzO9/MnjKzzWZ23TjnO8zs7uj8w2a2Ijr+ejPbaGa/iL5fV3XPmdHxzWb2GTOzRv4OtVQIWEREZH+qFSwiMjs0LEA0szjweeCNwInAZWZ2Ys1lVwJ73f044Gbgxuj4LuDN7n4ycAVwR9U9twDvA46PPuc36ncYT3dGI4giIiK1epTITURkVmjkCOJZwGZ3f8bdc8BdwNqaa9YC/xJt3wOsMTNz95+5+47o+CYgE402HgUE7v5f7u7AV4G3NvB32E+QTqjMhYiISI3RNYiaZSMi0s4aGSAuBZ6r2t8WHRv3GncvAL3Aoppr/hB41N1Houu3HeSZDRUojbeIiMh+VCtYRGR2SDS7ARMxs5MIp52+YQr3XgVcBbB8+fJpa1OYpEZvR0VERKqNJqlRgCgi0s4aOYK4HTi6an9ZdGzca8wsAfQAu6P9ZcC9wLvd/ddV1y87yDMBcPdb3X21u69evHjxIf4qo4JMQp2fiIhIjUwyTiJmWqcvItLmGhkgPgIcb2YrzSwFXAqsq7lmHWESGoCLgAfd3c1sPnAfcJ27/6R8sbs/D/SZ2Suj7KXvBr7TwN9hP0E6Sa5QIpsvzuSPFRERaWlmFmX6VoAoItLOGhYgRmsKrwYeAJ4EvuHum8zsBjN7S3TZl4FFZrYZ+CBQLoVxNXAccL2ZPRZ9Do/O/Qnwz8Bm4NfA/Y36HcajLG0iItJIUy0RFZ07xcx+amabopJQ6Zlse5DRMgwRkXbX0DWI7r4eWF9z7Pqq7Szw9nHu+wTwiQM8cwOwanpbWr/qLG2HdzerFSIiMhtVlYh6PWEitkfMbJ27P1F1WaVElJldSrhW/5Joqca/Au9y95+b2SJgRt9mKtO3iEj7a+QU01kpSIcxtTpAERFpgCmXiCJM6Pa4u/8cwN13u/uMrodQpm8RkfanAHGSAk0xFRGRxjmUElEvB9zMHjCzR83s/x3vB5jZVWa2wcw27Ny5c1obH2b6Vv8oItLOFCBOUrnOkzpAERFpMQngNcDl0feFZram9qJGZfmG8gii1iCKiLQzBYiTNFrnSR2giIhMu0MpEbUN+KG773L3IcIcAGc0vMVVgkxCL1BFRNqcAsRJ0giiiIg00JRLRBFmDT/ZzDqjwPG1wBPMoCCdZESloERE2lpDs5jORulknI5ETGsQRURk2rl7wczKJaLiwG3lElHABndfR1gi6o6oRNQewiASd99rZjcRBpkOrHf3+2ay/dXr9NPJ+Ez+aBERmSYKEKdAdZ5ERKRRploiKjr3r4SlLpqinOlbpaBERNqXpphOQZDWGgsREZFaPcr0LSLS9hQgToHqPImIiOyvMsVUL1FFRNqWAsQpUJ0nERGR/ZUTufWqjxQRaVsKEKdAdZ5ERET2p1JQIiLtTwHiFGgNooiIyP5UCkpEpP0pQJyCnmgNYlh2SkRERECloEREZgMFiFMQZJLki042X2p2U0RERFqKSkGJiLQ3BYhToEX4IiIi49MyDBGR9qYAcQpGF+GrAxQREammUlAiIu1NAeIUaBG+iIjI+FQKSkSkvSlAnIJKIWC9IRURERmjR6WgRETamgLEKQjS0RRTLcIXEREZI8hoDaKISDtTgDgFPRpBFBERGVeQTtI7rFJQIiLtSgHiFHRrDaKIiMi4gkySQskZzheb3RQREZkCBYhTkErEyCTjKnMhIiJSYzSRm5ZhiIi0IwWIUxSusVDnJyIiUk3LMERE2psCxCkK0qrzJCIiUqtSK1izbERE2pICxClSIWAREZH9laeYahmGiEh7UoA4RUFaU0xFRERqqVawiEh7U4A4RT0aQRQREdmPagWLiLQ3BYhTFGSSWl8hIiJSozKCqD5SRKQtKUCcojBJTUGFgEVERKok4zE6U3HNshERaVMKEKcoyCQolpzBnAoBi4iIVAvSSSWpERFpUwoQp2i0ELA6QBERkWqqFSwi0r4UIE6RsrSJiIiMT7WCRUTalwLEKRodQdQbUhERkWrK9C0i0r4UIE5Rj7K0iYiIjCvM9K0XqCIi7UgB4hQFmajOk96QioiIjBGkE0pSIyLSphQgTlF5iqk6QBERkbGCTJL+bJ5SSaWgRETajQLEKepORyOImkIjIiIyRpBOUnIYzKmPFBFpNwoQpygRj9GlQsAiIiL7GV2GoQBRRKTdKEA8BOEifAWIIiIi1ZTITUSkfSlAPASq8yQiIrI/rdMXEWlfChAPQY/SeIuIiOwn0AiiiEjbmlSAaGYxMwsa1Zh2E2QSGkEUERGpUR5B1BpEEZH2c9AA0czuNLPAzLqAXwJPmNm1jW9a6wvSSU2fERGR/ZjZ282sO9r+GzP7lpmd0ex2zZRKkhr1kSIibaeeEcQT3b0PeCtwP7ASeFdDW9UmlKRGREQO4CPu3m9mrwF+D/gycEuT2zRjuisjiOojRUTaTT0BYtLMkoQB4jp3zwOqfAsE6QT9IwUVAhYRkVrF6PtNwK3ufh+QamJ7ZlQ8ZnR3JDTLRkSkDdUTIH4R2AJ0AT80s5cBfY1sVLsIMkncYUCFgEVEZKztZvZF4BJgvZl1MMcSwwVK5CYi0pYO2lm5+2fcfam7X+ChrcB5M9C2lldZhK83pCIiMtbFwAPA77v7PmAhMKfW73enlchNRKQd1ZOk5ggz+7KZ3R/tnwhc0fCWtYHRNN56QyoiIqPcfQh4CXhNdKgAPF3PvWZ2vpk9ZWabzey6cc53mNnd0fmHzWxFdHyFmQ2b2WPR5wvT89tMjdbpi4i0p3qmu3yF8C3okmj/f4C/aFSD2kklS5vekIqISBUz+yjwV8BfR4eSwL/WcV8c+DzwRuBE4LLoxWy1K4G97n4ccDNwY9W5X7v7adHn/Yf4axySnkxSZS5ERNpQPQHiYe7+DaAE4O4FRhffz2nlKaZahC8iIjUuBN4CDAK4+w6gu477zgI2u/sz7p4D7gLW1lyzFviXaPseYI2Z2bS0ehoFaY0gioi0o3oCxEEzW0SUudTMXgn0NrRVbaInozWIIiIyrpy7O6N9Z1ed9y0Fnqva3xYdG/ea6KVtL7AoOrfSzH5mZj8ws3Om2vjpEGQS6h9FRNpQPQHiB4F1wLFm9hPgq8Cf1fPwQ1hHscjMHjKzATP7XM09/xk9s7zG4vB62tIIlSQ1mkIjIiJjfSPKYjrfzN4H/AfwpQb/zOeB5e5+OmHffaeZBbUXmdlVZrbBzDbs3LmzYY0J0kn6RwoUVQpKRKStJA52gbs/amavBX4LMOCpqBbihKrWUbye8A3oI2a2zt2fqLqsso7CzC4lXEdxCZAFPgKsij61Lnf3DQdrQ6PNS0drEPWGVEREqrj7J83s9YRloX4LuN7dv1fHrduBo6v2l0XHxrtmm5klgB5gdzRiORL9/I1m9mvg5cCY/tLdbwVuBVi9enXDordyIreBbIGezmSjfoyIiEyzerKYvh3IuPsm4K3A3WZ2Rh3PnvI6CncfdPcfEwaKLatcCFhJakREpFo0pfRBd7+WcOQwY2b1REmPAMeb2UozSwGXEs7iqbaO0WziF0U/x81scfRyFjM7BjgeeGYafp0pqSzDUB8pItJW6pli+hF37zez1wBrgC8Dt9Rx36Guo5jI7dH00o8caGH+jE2hUSFgERHZ3w+BDjNbCnwXeBdhVvAJRX3h1YTZw58EvuHum8zsBjN7S3TZl4FFZraZcCppeQnHucDjZvYY4UvX97v7nmn8nSYliGbZKJGbiEh7OegUU0Yzlr4J+JK732dmn2hgmw7mcnffbmbdwDcJO92v1l40k1No9HZURERqmLsPmdmVwC3u/g9R4HZQ7r4eWF9z7Pqq7Szw9nHu+yZhv9gSAiVyExFpS/WMIG6PFtpfAqw3s45676P+dRRUr6OY6KHuvj367gfuJJzK2jRBOqG3oyIiUsvM7FXA5cB90bF4E9sz40YTuamPFBFpJ/UEehcTTnX5fXffBywErq3jvimvozjQA80sYWaHRdtJ4A+AX9bRloYJp5iq8xMRkTH+Avhr4N5oiugxwENNbtOMCjLlRG5ahiEi0k7qmWJ6FHCfu4+Y2e8CpzDOlM5a7l4ws/I6ijhwW3kdBbDB3dcRrqO4I1pHsYcwiATAzLYAAZAys7cCbwC2Ag9EwWGcmUkbPqEgnaRfZS5ERKSKu/8A+AGAmcWAXe5+TXNbNbOUpEZEpD3VEyB+E1htZscRrun7DuHUzgsOduNU11FE51Yc4LFn1tHmGaNCwCIiUsvM7gTeT7iO/xEgMLN/cvd/bG7LZk5XKkHMlKRGRKTd1DPFtBRlVXsb8NkoZfdRjW1W+1AhYBERGceJ7t5HWB7qfmAlYVK1OSMWM7rTWoYhItJu6gkQ82Z2GfBu4P9Ex1TxNtJTVQhYREQkkoyWQ7wVWOfueWDOvUkMMgn61D+KiLSVegLE9wKvAv7W3Z81s5XAHY1tVvsItMZCRET290VgC9AF/NDMXgb0NbVFTRBoBFFEpO0cNEB09yeADwG/MLNVwDZ3v7HhLWsTKgQsIiK13P0z7r7U3S/w0FbgvGa3a6b1qFawiEjbOWiAGGUufRr4PPD/Af9jZuc2uF1tQ4WARUSklpn1mNlNZrYh+nyKcDRxTgnSSb1AFRFpM/VMMf0U8AZ3f627nwv8PnBzY5vVPlQIWERExnEb0E9YS/hiwumltze1RU0QZvrWGkQRkXZST5mLpLs/Vd5x9/+JFt4LKgQsIiLjOtbd/7Bq/+Nm9ljTWtMkQVpTTEVE2k09I4gbzOyfzex3o8+XgA2Nbli7UJIaEREZx7CZvaa8Y2avBoab2J6mCDJJhnJF8sVSs5siIiJ1qmcE8QPAnwLXRPs/IlyPKMC8qBCw1iCKiEiV9wNfNbOeaH8vcEUT29MU5VJQ/dkCC7tSTW6NiIjU46ABoruPADdFHwDM7G7gkga2q21UCgGrzpOIiETc/efAqWYWRPt9ZvaHwOPNbdnMKi/D6B3OK0AUEWkT9UwxHc+rprUVbS7IJJSlTURE9uPufe5ern845xK8VRK5qY8UEWkbUw0QpYoKAYuISB2s2Q2YaVqnLyLSfg44xdTMzjjQKUBZTKsoS5uIiNTBm92AmTY6gqhlGCIi7WKiNYifmuDcr6a7Ie0syCTYsmuo2c0QEZEmM7NfMH4gaMARM9ycpuvRCKKISNs5YIDo7ufNZEPamUYQRUQk8gfNbkArqU5SIyIi7aGeMhdyED0ZrUEUERFw963NbkMrySTjJGKmPlJEpI0oSc00CDJJBnNFCioELCIiUmFmBBnNshERaScKEKdBkA4HYvtVC1FERGSMIJ1QkhoRkTZywADRzN5Ztf3qmnNXN7JR7aacxltrLERE5jYzCyY4t3wm29IqejSCKCLSViYaQfxg1fZna879UQPa0rYqabzVAYqIzHX/Wd4ws+/XnPv2zDalNQSZpF6gioi0kYkCRDvA9muezJ0AACAASURBVHj7c1qlELCm0IiIzHXV/ePCCc7NGUFaidxERNrJRAGiH2B7vP05rZzGWyOIIiJznvrOGkEmQZ/W6IuItI2Jyly8wsweJ3zjeWy0TbR/TMNb1kYqhYD1hlREZK473Mw+SNhXlreJ9hc3r1nNoxFEEZH2MlGAeMKMtaLNaQ2iiIhEvgR0j7MN8M8z35zmCzJJRgolsvki6WS82c0REZGDOGCAWFvs18wWAecCv3H3jY1uWDvpTMWJx0xrEEVE5jh3//iBzpnZb89kW1pFZZ1+Nq8AUUSkDUxU5uL/mNmqaPso4JeE2UvvMLO/mKH2tQUzI0gnlKVNRETGMLMTzex/mdlm4JZmt6cZyrWC9RJVRKQ9TDTFdKW7/zLafi/wPXd/t5l1Az8BPt3w1rWRQHWeREQEMLMVwGXRJw+8DFjt7lua16rmqR5BFBGR1jdRFtPqv8nXAOsB3L0fKDWyUe1Ii/BFRMTMfgrcR/gC9g/d/Uygf64Gh1C1Tl99pIhIW5hoBPE5M/szYBtwBvBdADPLAMkZaFtbURpvEREBXgSWAkcQZi19mjla3qKskulbfaSISFuYaATxSuAk4D3AJe6+Lzr+SuD2Brer7fRkNIIoIjLXuftbgZOBjcDHzOxZYIGZndXcljVPuVaw1umLiLSHibKYvgS8f5zjDwEPNbJR7ShIaw2iiIiAu/cSvki93cyOAC4Gbjaz5e5+dHNbN/M0xVREpL0cMEA0s3UT3ejub5n+5rSvIJNUhjYRERnD3V8EPgt81sxe1uz2NEM6GSeViOklqohIm5hoDeKrgOeArwMPAzYjLWpTQTrBcL5IrlAilZho5q6IiMxWB3u5CszJl6thIje9RBURaQcTBYhHAq8nTNP9DsKsbF93900z0bB2U53G+7B5HU1ujYiINIlero6jJ5PQCKKISJs44FCXuxfd/bvufgVhYprNwH+a2dUz1ro2ojUWIiJC+HL1w8Aq4J8IX7TucvcfuPsPmtqyJgqUyE1EpG1MOBfSzDrM7G3AvwJ/CnwGuHcmGtZuylnalMZbRGTu0svV8alWsIhI+5goSc1XCd+Argc+7u6/nLFWtaFKnSd1gCIic5qZdQBvIlyisQK9XCXIJPnNnqFmN0NEROow0QjiO4HjgT8H/q+Z9UWffjPrm5nmtY/KFFOtsRARmbOil6s/Bc4gfLn62+7+v9x9+ySecb6ZPWVmm83sunHOd5jZ3dH5h81sRc355WY2YGYfOsRfZ9oE6YReoIqItImJ6iAqFeckVJLUKEubiMhc9k5gkPDl6jVmlRw1Bri7BxPdbGZx4POEaxe3AY+Y2Tp3f6LqsiuBve5+nJldCtwIXFJ1/ibg/un4ZaZLTyasFezuVP2ZiIhIC5ooi6lMQnkEsVdvSEVE5qxpeLl6FrDZ3Z8BMLO7gLVAdYC4FvhYtH0P8DkzM3d3M3sr8CxhkNoygkySfNEZzhfpTOmfHiIirUyjhNMknYyRjJummIqIyKFYSlgmo2xbdGzca9y9APQCi8xsHvBXwMdnoJ2TMprpW7NsRERanQLEaWJmytImIiLN9DHgZncfmOgiM7vKzDaY2YadO3fOSMNGM32rjxQRaXWa5zGNgkxSZS5ERORQbAeOrtpfFh0b75ptZpYAeoDdwNnARWb2D8B8oGRmWXf/XPXN7n4rcCvA6tWrvSG/RQ3VChYRaR8KEKeRCgGLiMghegQ43sxWEgaClwLvqLlmHXAFYbbUi4AH3d2Bc8oXmNnHgIHa4LBZKqWgNIIoItLyFCBOoyCdUOcnIiJT5u4FM7saeACIA7e5+yYzuwHY4O7rgC8Dd5jZZmAPYRDZ0sqZvpXITUSk9SlAnEZBJsmOfcPNboaIiLQxd18PrK85dn3VdhZ4+0Ge8bGGNG6KgnS0BlFJakREWp6S1EyjIJ2kV52fiIjIGN1agygi0jYUIE6jIKMppiIiIrVSiRiZZFx9pIhIG1CAOI2CdJJcoUQ2X2x2U0RERFpKTyapKaYiIm1AAeI0CpSlTUREZFxBJqEkNSIibaChAaKZnW9mT5nZZjO7bpzzHWZ2d3T+YTNbER1fZGYPmdmAmX2u5p4zzewX0T2fMTNr5O8wGZU03npDKiIiMkaQTuoFqohIG2hYgGhmceDzwBuBE4HLzOzEmsuuBPa6+3HAzcCN0fEs8BHgQ+M8+hbgfcDx0ef86W/91FSytKkDFBERGSPIKEAUEWkHjRxBPAvY7O7PuHsOuAtYW3PNWuBfou17gDVmZu4+6O4/JgwUK8zsKCBw9/+KigJ/FXhrA3+HSalMMdUUGhERkTGCdEIzbERE2kAjA8SlwHNV+9uiY+Ne4+4FoBdYdJBnbjvIMwEws6vMbIOZbdi5c+ckmz41QVqFgEVERMbToxFEEZG2MGuT1Lj7re6+2t1XL168eEZ+ZpApTzHVG1IREZFqQSZJ33CeUsmb3RQREZlAIwPE7cDRVfvLomPjXmNmCaAH2H2QZy47yDObJlAhYBERkXEF6SQlh8GcXqKKiLSyRgaIjwDHm9lKM0sBlwLraq5ZB1wRbV8EPBitLRyXuz8P9JnZK6Pspe8GvjP9TZ+adDJOKhHTFBoREZEammUjItIeEo16sLsXzOxq4AEgDtzm7pvM7AZgg7uvA74M3GFmm4E9hEEkAGa2BQiAlJm9FXiDuz8B/AnwFSAD3B99WkaQViFgERGRWtWzbJbOzzS5NSIiciANCxAB3H09sL7m2PVV21ng7Qe4d8UBjm8AVk1fK6dXTyahEUQREZEaPcr0LSLSFmZtkppmKS/CFxERkVHlUlDK9C0i0toUIE6zcIqpOj8REZFqlSmmWoMoItLSFCBOsyCTVOcnIiJSo5KkRi9RRURamgLEaRakE+r8REREanRXRhDVR4qItDIFiNMsHEHMM0G1DhERkTknHjO6OxLK9C0i0uIUIE6zIJ0kX3Sy+VKzmyIiItJSgkxSSWpERFqcAsRpVknjrSk0IiIiY3SnVQpKRKTVKUCcZlqELyIiMj6VghIRaX0KEKdZOY23ptCIiIiMFaSV6VtEpNUpQJxmgaaYioiIjKtHI4giIi1PAeI0C9LlKaZ6QyoiIlItyKgUlIhIq1OAOM00gigiIjK+IJ2kf6RAsaRSUCIirUoB4jQrr0HUG1IREZGxyi9RB7QOUUSkZSlAnGapRIxMMq5F+CIiIjUqyzA0y0ZEpGUpQGwArbEQERHZX7lWsDJ9i4i0LgWIDRCkk+r8REREalTW6auPFBFpWQoQGyDIJDV9RkREpEZlnb76SBGRlqUAsQGCdEJlLkRERGoEGZWCEhFpdQoQG0AjiCIiIvtTKSgRkdanALEBgnRS6ytERERqzEsliJnWIIqItDIFiA3Qk0nSly3grkLAIiIiZbGY0a1EbiIiLU0BYgMEmQTFkjOUKza7KSIiIi0lyCRUK1hEpIUpQGyAcpY2vSEVEREZS8swRERamwLEBtAifBERkfEFaSVyExFpZQoQG6BS50lpvEVERMboySTVP4qItDAFiA0wWudJb0hFRESqBZmElmCIiLQwBYgNUBlB1BQaERGRMTTFVESktSlAbICe8hpEvSEVEREZI8gkGcoVyRdLzW6KiIiMQwFiA3SnoymmSuMtIiKTZGbnm9lTZrbZzK4b53yHmd0dnX/YzFZEx88ys8eiz8/N7MKZbns9gqiP7FcfKSLSkhQgNkAiHqMrFdcaCxERmRQziwOfB94InAhcZmYn1lx2JbDX3Y8DbgZujI7/Eljt7qcB5wNfNLPEzLS8fj2dmmUjItLKFCA2SJBRnScREZm0s4DN7v6Mu+eAu4C1NdesBf4l2r4HWGNm5u5D7l4elksDPiMtniTVChYRaW0KEBtEi/BFRGQKlgLPVe1vi46Ne00UEPYCiwDM7Gwz2wT8Anh/VcBYYWZXmdkGM9uwc+fOBvwKE1OtYBGR1qYAsUGCTEJ1nkREZEa5+8PufhLw28Bfm1l6nGtudffV7r568eLFM95G1QoWEWltChAbRCOIIiIyBduBo6v2l0XHxr0mWmPYA+yuvsDdnwQGgFUNa+kUVWoFq48UEWlJChAbpCejAFFERCbtEeB4M1tpZingUmBdzTXrgCui7YuAB93do3sSAGb2MuAVwJaZaXb9VApKRKS1tVx2s9kiTFKj6TMiIlI/dy+Y2dXAA0AcuM3dN5nZDcAGd18HfBm4w8w2A3sIg0iA1wDXmVkeKAF/4u67Zv63mFgmGScRMyWpERFpUQoQGyRIJ+jL5imVnFjMmt0cERFpE+6+Hlhfc+z6qu0s8PZx7rsDuKPhDTxEZha+RNUsGxGRlqQppg0SZJK4w0BOo4giIiLVgrQSuYmItCoFiA0ymqVNb0hFRESqaQRRRKR1KUBskEqWNr0hFRERGaMnk9QLVBGRFqUAsUEqI4h6QyoiIjJGkE4qSY2ISItSgNgggdJ4i4iIjCvIJOjLaoaNiEgrUoDYIJU6T+oARURExgjSmmIqItKqFCA2SHmKqabQiIiIjBVkkowUSmTzxWY3RUREaihAbJB56XKSGgWIIiIi1crLMPo1y0ZEpOUoQGyQeMzo7kgoSY2IiEiNIHqJqlk2IiKtRwFiAwWZpMpciIiI1KgkctNLVBGRlqMAsYG60xpBFBERqVUpBaURRBGRlqMAsYFUCFhERGR/PZlonb7WIIqItBwFiA0UZJLq/ERERGqoVrCISOtqaIBoZueb2VNmttnMrhvnfIeZ3R2df9jMVlSd++vo+FNm9vtVx7eY2S/M7DEz29DI9h8q1XkSERHZn0pBiYi0rkSjHmxmceDzwOuBbcAjZrbO3Z+ouuxKYK+7H2dmlwI3ApeY2YnApcBJwBLgP8zs5e5eLph0nrvvalTbp0uQSShAFBERqZFOxkklYlqnLyLSgho5gngWsNndn3H3HHAXsLbmmrXAv0Tb9wBrzMyi43e5+4i7Pwtsjp7XVoJ0kv6RAsWSN7spIiIi06+Qgy0/hnx20reGs2y0DENEpNU0bAQRWAo8V7W/DTj7QNe4e8HMeoFF0fH/qrl3abTtwL+bmQNfdPdbx/vhZnYVcBXA8uXLD+03+drFsH0DxDsgnoRER7idSB3guwPiKd7wUpbORB/5f/9v4qkUxBIQi0ff1dvVx6r2LR5uWyzajo1uWyw6V96uPR47+Kdyne1/jtpjFn5ERETKdvwMvvKmsO9b9tuw4jWw4tXhdjIz4a1BJsHj2/bx0K9e4vTl85nfmZqhRouIyEQaGSA2ymvcfbuZHQ58z8x+5e4/rL0oChxvBVi9evWhDeEdex70LIPiSPi2tPIdfXJDUNxbc26E40eyHBvPEv+vB4BZ8pb0gMFjdWBp4xw7yHW114z5GbU/d7xnHyDQ3e9YtF8JwBNhcD1mPz4afNcG8uUXAMkMJNLhJxl9JzJV5zrG7sfiTftPJiLSMEecCJd+Hbb+BLb8CH74D/CDEsRTsHR1GCy+7NVw9NmQ6hxz62tfvpiv/nQr7/3KIwAcd/g8zly+gDNXLODMly3gmMO6ML2YFBGZcY0MELcDR1ftL4uOjXfNNjNLAD3A7onudffy90tmdi/h1NP9AsRp9coPTOm2fX1ZPvXv/0P/SJ5svkQunyefz1Mo5MjnC+TzOYqFAoVC+F0sFkhQJE6JZPRtlIhHH8OJUyJmTiw6FqN6O9wvX1s+F4vuieMkDOIxJ2lO3JxE5RsSsRIxIB4di0F4X3RvzIiOe3ScaLtEHMeAmIXPiOGYUWkjQNxKYaxXaSPEKEH5endi7hiOVZ0399FtA/NSdE35umLlWPg8x7wUPTN8vkV/luaOeTF6bjHc9uh+L4TfpcLY/crS10MQS4b/OEpGn1QnJLvC4DHVFR0/wHYiGrmuGp0+8LHU6HYsoVFfEWmsjm54xQXhB2B4H/zmv2Drj2HLT+BHn4If/mP4d+DSM8JgccVr4Oiz+eibT+La3/8tHt/Wy8ate9m4dS/f3fQCd28IJx/N70xy5vIFnPGyMGA8ddl8Mim9bBMRaTRzb8z6uCjg+x9gDWFw9wjwDnffVHXNnwInu/v7oyQ1b3P3i83sJOBOwuBvCfB94HggDcTcvd/MuoDvATe4+3cnasvq1at9w4aWTngKQKnk5Iolsvki2XyJfDH8FEpOrhB+V44Vy9seXVMiX3DypfBcoeSUSuF3sVSKvn30uxgeL/rofqHklKL98nexxDjHqq9jzLZXXVs+V3KnVBrdLpbC68r3eXSdQ+U+r7q/Qf8TnYQwoE1QJEWBDvKkLUcHOdLk6SBHh0Xf5ElH++mq/a5Yjq5YnnmxHPNshE7L0WkjZBghQ5Y0OdKeJeVZOkrZSuB8aK02iKewMYFj9F2ZEp0aG1SWv5OZ0UB1zPZExzrDfywmOqbhz1xkasxso7uvbnY72kXD+8dsHzz3cLhOcetPYPuj4MXwBdZRp4VB46Lj4bDjYNHxlLqX8MzuYR7dupcNW/ewcetefr1zEIBEzDhxScAZyxdw+vL5HH94NysP61LQKCJSp3r7yIaNIEZrCq8GHgDiwG3uvsnMbgA2uPs64MvAHWa2GdhDmLmU6LpvAE8Qzs38U3cvmtkRwL3RlJMEcOfBgsN2EosZ6VicdFKdXTX30SCyOuisDiy9KgAtOTg190SJgipBKeGx2udVB7deG/hWBcj5YjngLlUC73LQXRhzTbjfVyixs1BipBAG/yOF8EVA9fdI+TtXoFjIES8MESsME/ccKQrRJx9+W54UeZIUSZGnw/KV88nytZYnVSjSkcvTYUXSsQJpK9BhRTosvKbDBkmxj2S0n/QCKXKkPEfKsyRL2coIcL1KiTR09GCZBVhmPqR7IDMf0tXbPeF+5fyC8JPs1KinyGySDuD414cfgJGBMGDc+pMwaHzsTsgNVC6PJTIct+hYjlt0HBcvOg6OP56+rhU8NrSIh58vsnHrXu565Dd85f9uqdyzdH6GYxZ3cezieRy7uItjFs/jmMVdHBmkNUVVRGQKGjaC2EraZQRRZDyF4tiAsjaozFYFl9VBZ64YjirnikXyxXAUOlcskSuEo9Dl75Ex+85IofzsEiP5MFi1whBpz5GxETLkyDBC2nLRCGj5+AjdDNNjgwQM0mODzLchFsSG6LFBuhmk04cmDDiLsRTFjvl4ZgHWuZBE1yJinQugc2EUREbfY/bnHzQZhswdGkGcnKb3j+4w8CLsehp2bw4/u56G3U/D3q3haGNZ1+JwlHHhsexMLWNHaQG/zgb8amgej+3L8MSuAkO50eu7UnFWLu7imMPmcWwUNB6zuIujF3bS3ZFQ8Cgic07TRxBFZHok4jES8RhdHc37v6t7OEq6X5CaHx0VzRaKDOeKDGQLDIwUeGmkwOBIgf6RAgPZcHsgm8OzfdhIL4lcL4lcH5niAPNtgPkMMN8GmZ/rZ/7AIAtsLz1sY6ENMN/6SU2Q6KkYS1FMBZSikctY53wSXQux6tHL2lHMcoBZkzhDRGaQGXQfGX5WnjP2XCEHe7eEwWIlcNxM7OnvcsTgTo4ATq+63Dt7KCw+koHU4eyOLWRHcT7PjPTw5DNd/McvunihtJBdBDgxOlNxjgzSHNmTHv3uSXNEkOao6NiieR3EYwoiRWTuUYAoIgdlZiTjRjIeo3uan50vlujPFugdztM7nGffUI7e4TxPDefpHRo9NjzUT2lwLwzvwbJ7SY7sY16pj4BwhDLIDdAzOEgPgwS2mx57nPk2SDcHGbWMd1DsWIBnFhDrWhQGlp0Lw+BxvO/ORWGgGWtkGVkRIZGCxS8PP7VG+qHveejfUfm2vudJ9j/Pgr4dLOjbzHEDL3JulKiMqIJGyRJkk/MZiHXTV+hm964uXnqhi+dzaXaU5rGJbvb5PPb5PPpi3SS6FpLuWczi+d0c3p1mfmeShV0p5nemWNiZYn5nkgVd4bbWQorIbKEAUUSaKhmPsbArxcKuyddAy+aL9JUDyyigfKESXIaBZu/QCLmhXgqD+2B4Hzayj/hILwEDLGCAnsIAC0YGWNDfz3zbyQKeZWFsgB4GSDB+BtuSJShkFsG8I4h3H068+wiYdzh0HR5+zzsc5h0RTonLLNC6SpHp1tENi7vHDx7LigUYfGk0kOx/gVjfDjqHdtE5vJfDh/Zy3PAeGHoWZw9WzO3/jBywE4Z3ptlHF32lDANkGPQ0vWTY7p0MkGGANFnrpJTqxtLdxNLdJDI9pLoC0l3z6eicR0emm3TXPLrTHcxLJ+hKJehOJ5jXkWBeOkEyrpdOItIaFCCKSNtKJ8OkTocH6Und5+70jxTYN5hn71COPUM59g3l2Bbt7x3KsXcwR3ZgH6XBPTC8h1h2L13FPhZaP4dZL4vzvRzW38viF37N4bGfsYhekuNMgy3FkhQyh0HXYhI9S4j1LIHuJRAcBd1HQbA03O4IFEiKTKd4AoIl4YczJ7zU3CE/BEN7YDicqRBuh/uZob1ksvs4IttPYbiX4nAfjOzBcluJ5QdJFgYxHIrAYPQ5gBFPkCXFMB0Me4oX6GCYVJj3OtZBIZ6mGM9QSmTwRAfEO7BEilgiRSyZJpbsIJ5ME0+mSKTSJFIZkqk0qY4OUh1pUulOOqL9ZCpFKtVBPJEKS43Eo095W3/niMg4FCCKyJxjZgTpJEE6yfJF9a9BHM4V2TOUY1f/CDv7R9g5MMKTA+H2rv4Rhvp2w8AL2NBOunJ7WGy9YTCZ28fivl6OeOEJjor9hPn07/fsYqKTYteRxHqWEJ+/BOuO/mFbDiLnHx2OSOofdCLTzyysPZvqCv+/dgAxKrNVxyqVID8YTn0d6Q+ztY70wUg/pWwfuewgueFB8tkBCiPDFEcGKeaGiI0M0ZkfojM/jBWzxAuDxIu7SRSzpPJZEp4jEWWvboQCcYokKFqcoiUoWYKSxSCqWhz+fWOjf++U98vHov1ywh8r1xWmVKkjXN4frTlcPlYkFtUwLitXOAbDjfC7UkE5Kt9kBg5uYaXoYixByZIUYylKlqQUG/14LInHU+GnEiCHpZ1iMSNmMWKxGLGYEY/FiJmFx2Ph8XgsRry8b7HoZ3uYOr38oWbfS/tf46Xwz2xMvePO0frI49VArnx3QiIdtlt//8sMUYAoIlKnTCrO0lSGpfMPnjU1my+yqxw8DuTY0T/Cz/qyvNiXZXdvH/m9O4gNPM+8kZc4wvZwZGEvR4zs4ci9L3GUPcXhtne/EcliLEWxeymxBctJLFgO85dDzzLoOTr8R22wNPwHkIjMrFgsnPbasf8q7RhhEefJzXOo4Q7FHBRGyOdHGB4eZmR4iJGRLCPZIXIjWUZGsuRHsuRzwxRyWUqFPMVCjlIhT6mQo1TM44UcXszjxTwU81AqhM8t5bFSASvmgTDA8bBWVFRqynF89LiXw7XRkA6gSIwiMUrEKLlVtquPj36H553y1FoP9yz8MzNz4haGozELr4oZmIdxUsycuBeJFwvEPE/CC8Sj72S57JONhCWcqj6pqKxTVciJEZW/qvo2PPobePSa8LoYJbOotaN/Am7hfql83MZ+x3A6fIQOz9Lh2QMuYZhIIZaiVP7EU3i8Ay/XNU6ksUQHlghHmWOJDmKpNPFkBktmwhrFifT43+Odj6fCeqWVeslVAXYsoWB1llOAKCLSAOlknGULOlm2YOIRymy+yEt9I7zQl+WFviyP9g7zQu8IL/UOMbTvRbxvBx1Dz3OE72Kp7WLJnl0s27uNZfYYh1nvmGc5Rr7zCLznaJILlxObfzQsXAkLj4WFx4SjkUquI9J+zKJ/uHeQTENyurOFTUE5u3W5FnCx5OGgIoyOKBINNEYh5JiBSMLjYbBnYfA3DUFHuV2FopMrligUw3rEuUL4PVQssa8weqxc5unAZaB8zDWFYlgvuVwHufz7l8rf7lXX+AGvoZAjUQrrDadKw6Si72RpmFQpR4cPh8FkKUuiFI4kdxDWQC5/p6xQdWyYlPVF+2GAnLawjnI6qpc83jKIKf85x5J4FDBaLAmJFBZPQrJr/+zh+9VCrjmX6Ji2dsn0UIAoItJE6WSc5Ys6J5zqWiw5uwZG2L5vmB37hvnvfcN8e1+WF/f0Utj7G+J92+jOvcgy28XS/l0s6d/F0h0/YontGfMPgmKsg1zwMmKLjiF1+HHYwmPCwHHRseHoY0xZGEWkPqPZrZvdkrGq25WhxRp3CArFEtmoxFT4Kdc+Ht3eF31nq44N54oM54sM5YqM5HLkRoYp5bIUc0MU81lKuf+/vbuPreu+6zj+/lw7fnZcO35Il6RJW/e5W7sSTTAmNA0VNiYoaGhrQaKgiY2JwhASaoUETAgkmHiYChVSpw0V2OgmRksF0rqqm0a1li1tKX1I+pCmSdPEcZzY19e+vvfa1/7yxzlxblz7pGls35v485Ku7rm/e3z8u9+cnK+/9/zO+ZVZmCtDtUxUy+TmZ2ljlk3MJ2ddVaWZ+cUzsc3M06LTZ2Rr32vNzdOdq9CrCTbrLTZTpCuKtEcp+7Pl2qi2dBObkmHeau0i19pJc1s3udau00Nuax+bapbbek7fZXxTh89urgIXiGZmDa4pJ4Y2J3O03XJZ75J3PwjAzGyVo/kyR/Ml3syX+J/JMkfHpymNHSKXf4PNpcPs1Ci7Th5j5/iL7Nz/OG2aW9zKvDZR7t4BvVfQOjRMc/8wDFwD/dckd2V1wjUzq5vmphxdTTm61nhO5PmFoLRYhCaF5qkitLKk+Dz1PFlN5kUuV+cpzybF6ExanM7MVpmtVMhVCjTPTtJSLdAyV6BjfprNKrKZYjpV1QydKtNOhU6m6NAYHVToUmWxvYW5s/Z/oamVhbZeSKeuynXUTFG13LRVHVuSAtM57gwuEM3MLgIdLc0MD3YxPNi15J1bgGQo61sTJd4cL/LUyRm+cbLI1NhhYvx12qYOsW1hhJ0To+zKv8bOg0/QrMriGi4yigAAC/FJREFUFirNmylfMkzT4LV0bLuO3MC1yfQCPZd5yKqZ2UWkKadk6pU1LkQX0kJ0ZjYpJIuzVabKVQqlOY6V5xaXC6eWy3MUZ8rMlqaolovMl6eYrxRpWyjRo2kuUTJ1VW91mt7KFL2FZOqqPr1Bn6bpYYomFpbvi5qptvay0LGFXGc/zd0D5Dr7obM/KSA7+6GjZrm9L7lL8kXs4v50ZmYGJENZ315A3ggk1+yMTVc4PD7DyydnePREkfzoQWLsVdon97OjfJjh40e5cuw/6dr79cWfnsu1Uuy6nOi/mvZt19O29ToYuDYZsuqb5ZiZ2QpyOdHZ2kzneRSiEUF5boGpcjIfcqE8R6FUZTItMl9N50kulKoUSrPMzeShNE5TaYKmSp62uQl6Saav6pubYkuxQN+J4/TyOv25Aj0rzFcTKCkou4bIdW+luWcIdW1N5j/uHkqeTz1auy/Is5MuEM3MNjhJDHa3Mdjdxo/t7EtbrwF+drF4PDBW5LGxIkdHjjB77GU2jb9Gb+kNrpw4wnD+KXoPPLK4vao2Mdl1JQuDN9C54yY6LrsJhm5Mvnk1MzNbBZJob2miveXc50OGZDhtfmaW8eIsJ4vJ877pyuLyxPQMc4UTLBRPopkTtMyOc0kU2KICA9VJBmbyDI69yYBeYED5ZaejmW9qY75ziFzXIE09W5NCsnsIurZC96micmtydrKB7gPgAtHMzFZUWzz++BVbgMuAnwBgtrrAm+Mz7Bub5tHRE0wf2YfGXqG78ApX5A9xXeFxOl7/1uK2ppq3MH3JNWjrjfTsupn27TdB/9XJLdrNzMzWUVNObOlqZUtXK1e9g/UXFoLJ0hwnixWOp/Mh7ylUGC2UOV4oUyycIArHUPE43XMnGVCewWqegdk8gxN5ho48w6Am6V7mzGSoiYWOfnKblxaRNcXk0A3JlCTrwAWimZm9Ky3NudPDVm/YSu2Q1dFChZdHp3js8CGKh5+neWwvfdOvMnz8EFeP7aH1xeRmA1WaOdm+i1LftbRc/3He85O/UsdPZGZmtrxcTvR2ttDb2cLwYPZcMzOzVY6fKh6nKuybqvD9dDqrE/lJ5vIjMD1K38IEg5pgUHkGJifZOpXnPc2vMsCP2LyQJ0csbrP4mz+gc9uNa/0xAReIZma2yiSxtaeNrT1tcPUAsBtIvn09ki/x5LEJRt/Yy+yR52kd38dQcT9XzzzJgXKrC0QzM7vgdbQ0s6u/mV39nSuus7AQnCzOMjJZYmSyzEi+xJOT5WR5ssToRJHq1Ch9McGAJvmj3BBXrFP/XSCamdm6yOXEjr4OdvR1wPXbgFuB5DqQw+Mz7Ij5+nbQzMxsneRyYqC7lYHuVt63ffl1FtJ5kEcmy2wf2LxufXOBaGZmddWUU+a3rGZmZhtRLicGN7e9q5vwnNfvXdffZmZmZmZmZg3LBaKZmZmZmZkBLhDNzMwaiqSPSnpF0n5J9yzzfqukb6Tv/1DSrrT9VknPSHohff7IevfdzMwufC4QzczMGoSkJuA+4GPA9cAdkq5fstqngYmIGAb+FvjLtP0E8PMR8V7gTuCf16fXZmZ2MXGBaGZm1jg+AOyPiAMRMQs8CNy2ZJ3bgAfS5X8DflqSIuJ/I+Jo2v4S0C6pdV16bWZmFw0XiGZmZo1jG3C45vVbaduy60REFZgEtixZ5xPAsxFRWfoLJH1G0tOSnh4bG1u1jpuZ2cXBBaKZmdlFRNINJMNOP7vc+xFxf0TsjojdAwMD69s5MzNreC4QzczMGscRYEfN6+1p27LrSGoGeoCT6evtwEPAr0XE62veWzMzu+i4QDQzM2sce4CrJF0uqQW4HXhkyTqPkNyEBuCXge9GREi6BPgv4J6I+MG69djMzC4qLhDNzMwaRHpN4V3Ao8A+4JsR8ZKkP5X0C+lqXwG2SNoP/D5waiqMu4Bh4I8lPZc+Btf5I5iZ2QVOEVHvPqw5SWPAofPcTD/JLcRteY5PNscnm+OTzfHJVhufnRHhC+veoVXKj+B99Gwcn2yOTzbHJ5vjk+2cc+SGKBBXg6SnI2J3vfvRqByfbI5PNscnm+OTzfGpP/8bZHN8sjk+2RyfbI5PtncTHw8xNTMzMzMzM8AFopmZmZmZmaVcIL5z99e7Aw3O8cnm+GRzfLI5Ptkcn/rzv0E2xyeb45PN8cnm+GQ75/j4GkQzMzMzMzMDfAbRzMzMzMzMUi4Qz0LSRyW9Imm/pHvO/hMbj6SDkl5I59x6ut79qTdJX5V0XNKLNW19kh6T9Fr63FvPPtbTCvH5gqQjNXO3/Vw9+1hPknZI+p6kvZJekvT5tN37EJnx8T5UB86R2Zwfz+T8mM35MZvzY7bVzI8eYppBUhPwKnAr8BawB7gjIvbWtWMNRtJBYHdEeA4aQNJPAdPAP0XEjWnbF4HxiPiL9I+o3oi4u579rJcV4vMFYDoi/qqefWsEki4FLo2IZyV1A88Avwj8Ot6HsuLzSbwPrSvnyLNzfjyT82M258dszo/ZVjM/+gxitg8A+yPiQETMAg8Ct9W5T9bgIuK/gfElzbcBD6TLD5D8h92QVoiPpSJiJCKeTZengH3ANrwPAZnxsfXnHGnnxPkxm/NjNufHbKuZH10gZtsGHK55/Rb+Q2Q5AXxH0jOSPlPvzjSooYgYSZePAUP17EyDukvS8+kQmw05PGQpSbuA9wM/xPvQ2yyJD3gfWm/OkWfn/Hh2PradnY9tSzg/Zjvf/OgC0VbDhyLiFuBjwG+nQyRsBZGM6/bY7jP9A3AlcDMwAvx1fbtTf5K6gG8BvxcRhdr3vA8tGx/vQ9aInB/PgY9ty/KxbQnnx2yrkR9dIGY7Auyoeb09bbMaEXEkfT4OPEQy7MjONJqODT81Rvx4nfvTUCJiNCLmI2IB+DIbfB+StInk4P61iPj3tNn7UGq5+HgfqgvnyLNwfnxHfGzL4GPbmZwfs61WfnSBmG0PcJWkyyW1ALcDj9S5Tw1FUmd6ISySOoGfAV7M/qkN6RHgznT5TuA/6tiXhnPqwJ76JTbwPiRJwFeAfRHxNzVveR9i5fh4H6oL58gMzo/vmI9tGXxsO835Mdtq5kffxfQs0lvBfgloAr4aEX9e5y41FElXkHwrCtAMfH2jx0jSvwIfBvqBUeBPgIeBbwKXAYeAT0bEhrwQfYX4fJhk6EMAB4HP1lxPsKFI+hDwBPACsJA2/yHJdQQbfh/KiM8deB9ad86RK3N+fDvnx2zOj9mcH7OtZn50gWhmZmZmZmaAh5iamZmZmZlZygWimZmZmZmZAS4QzczMzMzMLOUC0czMzMzMzAAXiGZmZmZmZpZygWjWgCTNS3qu5nHPKm57l6QNO4+SmZld2JwjzdZWc707YGbLKkXEzfXuhJmZWQNyjjRbQz6DaHYBkXRQ0hclvSDpR5KG0/Zdkr4r6XlJj0u6LG0fkvSQpP9LHx9MN9Uk6cuSXpL0HUnt6fq/K2lvup0H6/QxzczMzplzpNnqcIFo1pjalwyf+VTNe5MR8V7g74EvpW1/BzwQEe8Dvgbcm7bfC3w/Im4CbgFeStuvAu6LiBuAPPCJtP0e4P3pdn5rrT6cmZnZeXCONFtDioh698HMlpA0HRFdy7QfBD4SEQckbQKORcQWSSeASyNiLm0fiYh+SWPA9oio1GxjF/BYRFyVvr4b2BQRfybp28A08DDwcERMr/FHNTMzOyfOkWZry2cQzS48scLyuajULM9z+nrkjwP3kXyTukeSr1M2M7MLiXOk2XlygWh24flUzfNT6fKTwO3p8q8CT6TLjwOfA5DUJKlnpY1KygE7IuJ7wN1AD/C2b2jNzMwamHOk2XnyNx9mjald0nM1r78dEadu490r6XmSbzjvSNt+B/hHSX8AjAG/kbZ/Hrhf0qdJvgX9HDCywu9sAv4lTZAC7o2I/Kp9IjMzs9XhHGm2hnwNotkFJL2+YndEnKh3X8zMzBqJc6TZ6vAQUzMzMzMzMwN8BtHMzMzMzMxSPoNoZmZmZmZmgAtEMzMzMzMzS7lANDMzMzMzM8AFopmZmZmZmaVcIJqZmZmZmRngAtHMzMzMzMxS/w94MbkFh5y1SwAAAABJRU5ErkJggg==\n",
+      "text/plain": [
+       "<Figure size 1080x360 with 2 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4gAAAE9CAYAAABJKEwhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdeZxU9Zn3/c/VtTVdRbNUt+wIbhEEcemAxiUqicGVaIxxHU1MfCaJcebJZCGZSWJ84nPHGUfNdpsxGm/jHaOJifcwUUIWHY0Zo7aOooAKKkoDAt1AQ+9VXdf9R52Gom3oarqrq6v7+3696lVn+Z1T14GEn9f5bebuiIiIiIiIiJQVOwAREREREREZGpQgioiIiIiICKAEUURERERERAJKEEVERERERARQgigiIiIiIiIBJYgiIiIiIiICQLjYAQyGqqoqnzFjRrHDEBGRAnv++efr3b262HGUCtWPIiIjR7515IhIEGfMmEFtbW2xwxARkQIzs7eLHUMpUf0oIjJy5FtHqoupiIiIiIiIAEoQRUREREREJKAEUURERERERIARMgZRRGRfUqkUdXV1tLW1FTsU6YPy8nKmTp1KJBIpdigHxMwWAd8DQsBd7v7dbudjwM+A44EG4BPuvi7n/HRgFXCDu98SHFsH7AI6gbS71xT+SURkuFL9WLr6W0cqQRSREa2uro7Ro0czY8YMzKzY4Uge3J2Ghgbq6uqYOXNmscPpMzMLAT8CPgzUAc+Z2VJ3X5VT7Bpgu7sfZmaXADcDn8g5fyuwrIfbn+7u9QUKXURGENWPpWkg6kh1MRWREa2trY1kMqnKr4SYGclkspTfas8H1rr7m+7eATwALO5WZjFwb7D9ELDQgv+RmtlHgbeAlYMUr4iMQKofS9NA1JFKEEVkxFPlV3pK/O9sCrA+Z78uONZjGXdPA41A0swSwFeBb/dwXwd+b2bPm9m1Ax61iIw4Jf5v7YjV3783JYgiIkWWSCSKHYKUjhuA29y9qYdzJ7v7ccBZwOfN7NSebmBm15pZrZnVbt26tYChioj0j+rH4lCCKCIiMrg2ANNy9qcGx3osY2ZhYAzZyWoWAP8cTEjz98DXzew6AHffEHxvAR4m25X1Pdz9Tnevcfea6urqgXomEREZJpQg5uFPqzfzx1Wbix2GiIwg69at44wzzuDoo49m4cKFvPPOOwD86le/Ys6cOcybN49TT802EK1cuZL58+dzzDHHcPTRR7NmzZpihi69ew443MxmmlkUuARY2q3MUuCqYPsi4DHPOsXdZ7j7DOB24P939x+aWdzMRgOYWRw4E3il0A+ydVc79z/zDpsaWwv9UyIigOrHwaAEMQ8/+fOb3Pnkm8UOQ0RGkC984QtcddVVrFixgssvv5zrr78egBtvvJHly5fz0ksvsXRpNqf48Y9/zN/93d/x4osvUltby9SpU4sZuvQiGFN4HbAcWA380t1XmtmNZnZ+UOxusmMO1wJfBJb0ctsJwFNm9hLwLPCIu/+uME+wx6bGVr7+8Mu8smFnoX9KRARQ/TgYtMxFHpKJGK9uUuUnMtx9+z9WsmrjwP5/ffbkSr513lF9vu7pp5/mN7/5DQBXXnklX/nKVwA46aSTuPrqq7n44ou58MILATjxxBO56aabqKur48ILL+Twww8fuAeQgnD3R4FHux37Zs52G/DxXu5xQ872m8C8gY2yd8lEDIBtze2D/dMiMohUP44sakHMQ1U8SkNzR7HDEBHhxz/+Md/5zndYv349xx9/PA0NDVx22WUsXbqUUaNGcfbZZ/PYY48VO0wZIZLxKAD1TaojRaS4VD8OHLUg5mF8PMaOlhSpzgyRkHJqkeHqQN5kFsoHPvABHnjgAa688kp+/vOfc8oppwDwxhtvsGDBAhYsWMCyZctYv349jY2NHHLIIVx//fW88847rFixgjPOOKPITyAjQXkkRDwaokEJosiwpvpxZFGCmIdkIvuGdHtzBwdVlhc5GhEZblpaWvYaF/HFL36RH/zgB3zyk5/kX/7lX6iuruaee+4B4Mtf/jJr1qzB3Vm4cCHz5s3j5ptv5r777iMSiTBx4kS+/vWvF+tRZARKJmI0qIupiBSA6sfiUIKYh6rEni40ShBFZKBlMpkej/fUFaZr3EWuJUuWsGRJb3OYiBRGMhFVC6KIFITqx+JQf8k8dA3C1xtSERGRvSXjMeqbVD+KiAwXShDz0DUIX29IRURE9laV0ERuIiLDiRLEPHS1IOoNqYiIyN6SiSjbmjvIZLzYoYiIyABQgpiHyvIwkZDpDamIiEg3yXiMzozT2JoqdigiIjIAlCDmwcxIxmM0qAVRRERkL10zfWucvojI8KAEMU+apU1EROS9qnYPw1AdKSIyHBQ0QTSzRWb2mpmtNbP3zDFrZjEzezA4/4yZzQiOJ83scTNrMrMf5pSvMLNHzOxVM1tpZt8tZPy5kokY9epiKiID7PTTT2f58uV7Hbv99tv57Gc/u9/rEokEABs3buSiiy7qscxpp51GbW3tfu9z++2309LSsnv/7LPPZseOHfmEvl833HADt9xyS7/vI0Pf7hZEJYgiMoBUPxZPwRJEMwsBPwLOAmYDl5rZ7G7FrgG2u/thwG3AzcHxNuAbwJd6uPUt7n4kcCxwkpmdVYj4u6uKR9XFVEQG3KWXXsoDDzyw17EHHniASy+9NK/rJ0+ezEMPPXTAv9+9Anz00UcZO3bsAd9PRp5kXEtBicjAU/1YPIVsQZwPrHX3N929A3gAWNytzGLg3mD7IWChmZm7N7v7U2QTxd3cvcXdHw+2O4AXgKkFfIbd1MVURArhoosu4pFHHqGjI/vvy7p169i4cSOnnHIKTU1NLFy4kOOOO465c+fy7//+7++5ft26dcyZMweA1tZWLrnkEmbNmsUFF1xAa2vr7nKf/exnqamp4aijjuJb3/oWAN///vfZuHEjp59+OqeffjoAM2bMoL6+HoBbb72VOXPmMGfOHG6//fbdvzdr1iw+85nPcNRRR3HmmWfu9Tu96emezc3NnHPOOcybN485c+bw4IMPAtkFjmfPns3RRx/Nl77U0/tCGQrGVUQwUxdTERlYqh+LVz+GB/yOe0wB1ufs1wEL9lXG3dNm1ggkgfrebm5mY4HzgO8NSLS9SCZitKY6aelIUxEt5B+biIwk48ePZ/78+SxbtozFixfzwAMPcPHFF2NmlJeX8/DDD1NZWUl9fT0nnHAC559/PmbW473uuOMOKioqWL16NStWrOC4447bfe6mm25i/PjxdHZ2snDhQlasWMH111/PrbfeyuOPP05VVdVe93r++ee55557eOaZZ3B3FixYwAc/+EHGjRvHmjVr+MUvfsFPfvITLr74Yn79619zxRVX9Pqs+7rnm2++yeTJk3nkkUcAaGxspKGhgYcffphXX30VMxuQbj1SGOFQGeMqomxTC6KIDCDVj8WrH0sy0zGzMPAL4Pvu/uY+ylwLXAswffr0fv9mMr5njEXF+JL8YxOR3ixbAu++PLD3nDgXztr/cOmubjRdFeDdd98NgLvz9a9/nSeffJKysjI2bNjA5s2bmThxYo/3efLJJ7n++usBOProozn66KN3n/vlL3/JnXfeSTqdZtOmTaxatWqv89099dRTXHDBBcTjcQAuvPBC/vznP3P++eczc+ZMjjnmGACOP/541q1bl9cfxb7uuWjRIv7hH/6Br371q5x77rmccsoppNNpysvLueaaazj33HM599xz8/oNKY5kXL1sRIY11Y+7jYT6sZBdTDcA03L2pwbHeiwTJH1jgIY87n0nsMbdb99XAXe/091r3L2murq6T4H3ZM8sbXpDKiIDa/HixfzpT3/ihRdeoKWlheOPPx6An//852zdupXnn3+eF198kQkTJtDW1tbL3d7rrbfe4pZbbuFPf/oTK1as4Jxzzjmg+3SJxWK7t0OhEOl0+oDvBXDEEUfwwgsvMHfuXP7pn/6JG2+8kXA4zLPPPstFF13Eb3/7WxYtWtSv35DCGq8EUUQKQPVjcerHQjaFPQccbmYzySaClwCXdSuzFLgKeBq4CHjM3X1/NzWz75BNJD894BHvh2ZpExkBenmTWSiJRILTTz+dT33qU3sNvm9sbOSggw4iEonw+OOP8/bbb+/3Pqeeeir3338/Z5xxBq+88gorVqwAYOfOncTjccaMGcPmzZtZtmwZp512GgCjR49m165d7+lCc8opp3D11VezZMkS3J2HH36Y++67r1/Pua97bty4kfHjx3PFFVcwduxY7rrrLpqammhpaeHss8/mpJNO4pBDDunXb0thVSVirH53Z7HDEJFCUf2420ioHwuWIAZjCq8DlgMh4KfuvtLMbgRq3X0pcDdwn5mtBbaRTSIBMLN1QCUQNbOPAmcCO4F/BF4FXgj6Gf/Q3e8q1HN0SSY0S5uIFM6ll17KBRdcsNeMbZdffjnnnXcec+fOpaamhiOPPHK/9/jsZz/LJz/5SWbNmsWsWbN2v2mdN28exx57LEceeSTTpk3jpJNO2n3Ntddey6JFi5g8eTKPP/747uPHHXccV199NfPnzwfg05/+NMcee2ze3WUAvvOd7+weaA9QV1fX4z2XL1/Ol7/8ZcrKyohEItxxxx3s2rWLxYsX09bWhrtz66235v27Mvg0kZuIFIrqx8GvH62XBrthoaamxntb66Q3balOjvzG7/jyR97H508/bIAiE5FiW716NbNmzSp2GHIAevq7M7Pn3b2mSCGVnIGoHwG+98c13PbH13n9O2cRDRd0iWURGSSqH0tbf+pI/Suep/JIiHg0xLZmvSEVERHJ1TUMY3uL6kgRkVKnBLEPkokYDZqkRkREZC9VQYKoidxEREqfEsQ+SCaiNKgFUUREZC+7x+lrHKKISMlTgtgHyXiMelV+IsPOSBiLPdzo72xo2b1WsCZyExlW9G9taerv35sSxD6oSkTVxVRkmCkvL6ehoUGVYAlxdxoaGigvLy92KBJQC6LI8KP6sTQNRB1ZyHUQh51kIsq25g4yGaeszIodjogMgKlTp1JXV8fWrVuLHYr0QXl5OVOnTi12GBKoLA8TCZl62YgMI6ofS1d/60gliH2QjMdIZ5ydbSnGVkSLHY6IDIBIJMLMmTOLHYZISTMzknFN5CYynKh+HLnUxbQPkrtnadMbUhERkVyayE1EZHhQgtgHVbvHWOgNqYiISC4tBSUiMjwoQeyDrhZEvSEVERHZW1VcLYgiIsOBEsQ+SMbVgigiItKTZCKqWUxFRIYBJYh9MK4igpnGIIqIiHQ3Ph6jNdVJS0e62KGIiEg/KEHsg3CojHEVUS0ELCIi0s3uYRh6iSoiUtKUIPZRMq4uNCIiIt1V7Z7pWy9RRURKmRLEPtIYCxERGQhmtsjMXjOztWa2pIfzMTN7MDj/jJnN6HZ+upk1mdmX8r1nIe0Zp686UkSklClB7KNkIka9upiKiEg/mFkI+BFwFjAbuNTMZncrdg2w3d0PA24Dbu52/lZgWR/vWTB7ZvpWHSkiUsqUIPZRlbqYiohI/80H1rr7m+7eATwALO5WZjFwb7D9ELDQzAzAzD4KvAWs7OM9C6arBVETuYmIlDYliH00Ph6jsTVFRzpT7FBERKR0TQHW5+zXBcd6LOPuaaARSJpZAvgq8O0DuCdmdq2Z1ZpZ7datW/v1ELlGRUPEoyG9RBURKXFKEPuoqwvN9hZVgCIiUhQ3ALe5e9OBXOzud7p7jbvXVFdXD2hgyURMXUxFREpcuNgBlJrcWdomVJYXORoRESlRG4BpOftTg2M9lakzszAwBmgAFgAXmdk/A2OBjJm1Ac/ncc+C0kRuIiKlTwliHyUTmqVNRET67TngcDObSTaJuwS4rFuZpcBVwNPARcBj7u7AKV0FzOwGoMndfxgkkb3ds6CS8Rh121sG8ydFRGSAqYtpHyXj2RbEbc1KEEVE5MAEYwqvA5YDq4FfuvtKM7vRzM4Pit1NdszhWuCLwH6XrdjXPQv1DD2pSkRpUP0oIlLS1ILYR10tiFoIWERE+sPdHwUe7XbsmznbbcDHe7nHDb3dczAlE1G2NXeQyThlZVasMEREpB/UgthHleVhIiHTG1IREZFukvEYnRmnsTVV7FBEROQAKUHsIzMjGY/RoBZEERGRvXTN9K2XqCIipUsJ4gHQLG0iIiLvVbV7Ije9RBURKVVKEA9AMhGjXm9HRURE9jI+rhZEEZFSpwTxAFTFo3o7KiIi0s3uLqaqI0VESpYSxAOgLqYiIiLvNb4imyDWq44UESlZShAPQDIRozXVSUtHutihiIiIDBnhUBnjKiI0NKsFUUSkVClBPADJrjEWekMqIiKyl2QipvpRRKSEKUE8AF2ztNVrjIWIiMheknENwxARKWVKEA/AnkH4qgBFRERyVSVi1KuLqYhIySpogmhmi8zsNTNba2ZLejgfM7MHg/PPmNmM4HjSzB43syYz+2G3a443s5eDa75vZlbIZ+hJsmudJ1WAIiIie9FEbiIipa1gCaKZhYAfAWcBs4FLzWx2t2LXANvd/TDgNuDm4Hgb8A3gSz3c+g7gM8DhwWfRwEe/f11jEDVLm4iIyN6S8RiNrSk60plihyIiIgegkC2I84G17v6mu3cADwCLu5VZDNwbbD8ELDQzc/dmd3+KbKK4m5lNAird/a/u7sDPgI8W8Bl6VB4JEY+G9IZURESkm65hGNtbVEeKiJSiQiaIU4D1Oft1wbEey7h7GmgEkr3cs66Xew6KZCKmLqYiIiLdVCW6etmojhQRKUXDdpIaM7vWzGrNrHbr1q0Dfn+NsRAREXmv3eP0VUeKiJSkQiaIG4BpOftTg2M9ljGzMDAGaOjlnlN7uScA7n6nu9e4e011dXUfQ+9dMh7T21EREZFuusbpb2tWgigiUooKmSA+BxxuZjPNLApcAiztVmYpcFWwfRHwWDC2sEfuvgnYaWYnBLOX/g3w7wMfeu+qElEaVPmJiIjsJam1gkVESlq4UDd297SZXQcsB0LAT919pZndCNS6+1LgbuA+M1sLbCObRAJgZuuASiBqZh8FznT3VcDngP8FjAKWBZ9Bl0xE2dbcQSbjlJUN+kobIiIiQ1JleZhIyPQSVUSkRBUsQQRw90eBR7sd+2bOdhvw8X1cO2Mfx2uBOQMX5YFJxmN0ZpydbSnGVkSLHY6IiMiQYGaMj0dpUAuiiEhJGraT1BRaMqG1EEVERHqSjMc0SY2ISIlSgniAqnbP0qY3pCIiIrmSiSj16mIqIlKSlCAeoK4WRI2xEBER2VtVIqYXqCIiJUoJ4gFKxtWCKCIi0pNkXGsFi4iUKiWIB2hcRQQzjUEUERHpLpmI0ZrqpKUjXexQRESkj5QgHqBwqIxxFVEamtWCKCIikmv3MAy9RBURKTlKEPtBXWhERETeq2r3TN96iSoiUmqUIPZDMqEEUUREpLs94/RVR4qIlBoliP2QTMSoVxdTERGRveyZ6Vt1pIhIqVGC2A9V6mIqIiLyHl0tiJrITUSk9ChB7IdkIkZja4qOdKbYoYiIiAwZo6Ih4tGQXqKKiJQgJYj9MD6e7UKzvUUVoIiISK5kIsY2dTEVESk5ShD7QbO0iYiI9CyZiNLQrBeoIiKlRgliPyQTmqVNRESkJ8l4TGMQRURKkBLEfkjGNUubiIj0nZktMrPXzGytmS3p4XzMzB4Mzj9jZjOC4/PN7MXg85KZXZBzzTozezk4Vzt4T9Oz7FrBqh9FREpNuNgBlDK1IIqISF+ZWQj4EfBhoA54zsyWuvuqnGLXANvd/TAzuwS4GfgE8ApQ4+5pM5sEvGRm/+Hu6eC60929fvCeZt+SiSjbmjvIZJyyMit2OCIikie1IPZDZXmYSMjUhUZERPpiPrDW3d909w7gAWBxtzKLgXuD7YeAhWZm7t6SkwyWAz4oER+AZCJGOuPsbEsVOxQREekDJYj9YGYk4zF1oRERkb6YAqzP2a8LjvVYJkgIG4EkgJktMLOVwMvA3+YkjA783syeN7Nr9/XjZnatmdWaWe3WrVsH5IF6smciN71EFREpJUoQ+0mztImIyGBy92fc/Sjg/cDXzKw8OHWyux8HnAV83sxO3cf1d7p7jbvXVFdXFyzOZLxrGIZeooqIlBIliP2UTMSUIIqISF9sAKbl7E8NjvVYxszCwBigIbeAu68GmoA5wf6G4HsL8DDZrqxFk0x0TeSmOlJEpJQoQeynKs3SJiIiffMccLiZzTSzKHAJsLRbmaXAVcH2RcBj7u7BNWEAMzsYOBJYZ2ZxMxsdHI8DZ5Kd0KZodieIqiNFREqKZjHtp2QiqllMRUQkb8EMpNcBy4EQ8FN3X2lmNwK17r4UuBu4z8zWAtvIJpEAJwNLzCwFZIDPuXu9mR0CPGxmkK3b73f33w3uk+1tfIXGIIqIlCIliP2UTMRoTXXS0pGmIqo/ThER6Z27Pwo82u3YN3O224CP93DdfcB9PRx/E5g38JEeuHCojHEVEa0VLCJSYtTFtJ+S8a4uNHpDKiIikiuZiKl+FBEpMUoQ+6kqkZ2lrV5jLERERPaSjGsYhohIqVGC2E97BuGrAhQREclVlYipi6mISIlRgthPyaAFURWgiIjI3rRWsIhI6VGC2E9dYxA1S5uIiMjekvEYO1pSpDozxQ5FRETypASxn8ojIeLRkLqYioiIdNM1DGO7WhFFREqGEsQBkNQYCxERkfdQLxsRkdKjBHEAJBOapU1ERKQ7jdMXESk9ShAHQDIe0zIXIiIi3WimbxGR0qMEcQBUaZY2ERGR96iKa61gEZFS06cE0czKzKyyUMGUqmQiyrbmDjIZL3YoIiIyiMzs42Y2Otj+JzP7jZkdV+y4horKUWHCZaaXqCIiJaTXBNHM7jezSjOLA68Aq8zsy/nc3MwWmdlrZrbWzJb0cD5mZg8G558xsxk5574WHH/NzD6Sc/z/NbOVZvaKmf3CzMrziaWQkvEYnRmnsTVV7FBERGRwfcPdd5nZycCHgLuBO4oc05BhZsE4fbUgioiUinxaEGe7+07go8AyYCZwZW8XmVkI+BFwFjAbuNTMZncrdg2w3d0PA24Dbg6unQ1cAhwFLAL+p5mFzGwKcD1Q4+5zgFBQrqh2j7HQIHwRkZGmM/g+B7jT3R8BokWMZ8hJxmMagygiUkLySRAjZhYhmyAudfcUkE9fyvnAWnd/0907gAeAxd3KLAbuDbYfAhaamQXHH3D3dnd/C1gb3A8gDIwyszBQAWzMI5aCqkp0jbFQBSgiMsJsMLN/Az4BPGpmMTS+fy/JRJR6dTEVESkZ+VRi/wasA+LAk2Z2MLAzj+umAOtz9uuCYz2Wcfc00Agk93Wtu28AbgHeATYBje7++zxiKSjN0iYiMmJdDCwHPuLuO4DxQF7DMEaKqkRMXUxFREpIrwmiu3/f3ae4+9me9TZw+iDE9h5mNo5s6+JMYDIQN7Mr9lH2WjOrNbParVu3FjSuZFzrPImIjETu3gJsAU4ODqWBNcWLaOhJxrVWsIhIKclnkpoJZna3mS0L9mcDV+Vx7w3AtJz9qcGxHssEXUbHAA37ufZDwFvuvjXo6vob4AM9/bi73+nuNe5eU11dnUe4B25cRQQzdTEVERlpzOxbwFeBrwWHIsD/Ll5EQ08yEaM11UlLR7rYoYiISB7y6WL6v8h2n5kc7L8O/H0e1z0HHG5mM80sSnYymaXdyixlT7J5EfCYu3tw/JJgltOZwOHAs2S7lp5gZhXBWMWFwOo8YimocKiMcRVRtqkFUURkpLkAOB9oBnD3jcDookY0xGgYhohIacknQaxy918CGdg9VrBz/5fsLncd2eRyNfBLd19pZjea2flBsbuBpJmtBb4ILAmuXQn8ElgF/A74vLt3uvszZCezeQF4OYj/znwftpDUhUZEZETqCF5sOkCwJJTkqNo907fqSBGRUhDOo0yzmSXZU/mdQHYymV65+6PAo92OfTNnuw34+D6uvQm4qYfj3wK+lc/vD6bsOk+q/ERERphfBrOYjjWzzwCfAn5S5JiGlN3j9DVRjYhIScgnQfwi2S6fh5rZX4Bqst1BJUcyEWP1pnwmdxURkeHC3W8xsw+Tnd37fcA33f0PRQ5rSFEXUxGR0tJrgujuL5jZB8lWfAa8FkwQIzmq1MVURGTECbqUPubufzCz9wHvM7OI6sk9uloQ6zVOX0SkJOQzi+nHgVHBuMCPAg+a2XEFj6zEJBMxGltTdKQzxQ5FREQGz5NAzMymkB0zfyXZyd0kMCoaoiIa0ktUEZESkc8kNd9w911mdjLZWUPvBu4obFilZ3w824Vme4sqQBGREcSCtRAvBO5w948DRxU5piEnO05fLYgiIqUgnwSxa8bSc4CfuPsjQLRwIZWmrlna6lUBioiMJGZmJwKXA48Ex0JFjGdISsZjmsVURKRE5JMgbghmaPsE8KiZxfK8bkRJJrpmaVMFKCIygvw98DXg4WApp0OAx4sc05BTlYhSr/pRRKQk5DOL6cXAIuAWd99hZpOALxc2rNKTjHet86QWRBGRkcLdnwCeADCzMqDe3a8vblRDTzIeY0VdXitkiYhIkeXTEjgJeMTd15jZaWTXLXy2oFGVILUgioiMPGZ2v5lVBrOZvgKsMjO9RO0mmYiyrbmDTMaLHYqIiPQinwTx10CnmR0G3AlMA+4vaFQlqLI8TCRk6kIjIjKyzHb3nWRn+V4GzCQ7k6nkSCZipDPOzjat/iEiMtTlkyBm3D1Ndoa2H7j7l8m2KkoOM8sOwtckNSIiI0nEzCJkE8SlwfqHaibrZs9EbnqJKiIy1OWTIKbM7FLgb4DfBscihQupdCUTUc3SJiIysvwbsA6IA0+a2cHAzqJGNAQl413DMPQSVURkqMsnQfwkcCJwk7u/ZWYzgfsKG1ZpSibUgigiMpK4+/fdfYq7n+1ZbwOnFzuuoSYZtCBu00tUEZEhr9cE0d1XAV8CXjazOUCdu99c8MhKUFVc03iLiIwkZjbGzG41s9rg869kWxPzuXaRmb1mZmvNbEkP52Nm9mBw/hkzmxEcn29mLwafl8zsgnzvWSxdCWK9EkQRkSGv1wQxmLl0DfAj4LP448oAACAASURBVH8Cr5vZqQWOqyRlu5i2467hJyIiI8RPgV1kl4S6mGz30nt6u8jMQmTr1bOA2cClZja7W7FrgO3ufhhwG9D1cvYVoMbdjyG7DNW/mVk4z3sWxfiKYCko9bIRERny8lkH8V+BM939NQAzOwL4BXB8IQMrRclEjLZUhpaOTuKxfP5oRUSkxB3q7h/L2f+2mb2Yx3XzgbXu/iaAmT0ALAZW5ZRZDNwQbD8E/NDMzN1bcsqUs2dSnHzuWRThUBnjKiJaCkpEpATkMwYx0pUcArj762iSmh4l411vSFUBioiMEK1mdnLXjpmdBLTmcd0UYH3Ofl1wrMcywWzijUAy+J0FZrYSeBn42+B8PvcsmmQiRkOzWhBFRIa6fJq5as3sLuB/B/uXA7WFC6l0VSWys7TVN7czPVlR5GhERGQQ/C3wMzMbE+xvB64q9I+6+zPAUWY2C7jXzJble62ZXQtcCzB9+vQCRfhe4zVOX0SkJOTTgvhZst1Trg8+q8hWiNLN7lnaVAGKiIwI7v6Su88DjgaOdvdjgcPzuHQDMC1nf2pwrMcyZhYGxgAN3X5/NdAEzMnznrj7ne5e4+411dXVeYQ6MKoSUY1BFBEpAfnMYtru7re6+4XB5za0zEWPkkELorrQiIiMLO6+09271j+8LY9LngMON7OZZhYFLgGWdiuzlD2tkRcBj7m7B9eEAYJ1F48kuxZjPvcsmmQ8prWCRURKwIHOpHLigEYxTHSNQVQXGhGREc16K+DuaTO7DlgOhICfuvtKM7sRqHX3pcDdwH1mthbYRjbhAzgZWGJmKSADfM7d6wF6uucAP9sBSyai7GhJkerMEAnl04FJRESKQVNtDqDySIhELKxJakRERra81jpy90eBR7sd+2bOdhvw8R6uu4999OTp6Z5DRVcvm+3NHRxUWV7kaEREZF/2mSCa2XH7OoVmMd2n8fGoupiKiAxzZvYyPSeCBkwY5HBKQlVOLxsliCIiQ9f+WhD/dT/nXh3oQIaLZCKqFkQRkeHv3GIHUGo0Tl9EpDTsM0F099MHM5DhIhmPUbe9pfeCIiJSstz97WLHUGq6ZvrWS1QRkaFNo8QHWFUiqlnaREREuqmKB2sFa6kLEZEhTQniAEsmomxr7iCTyWuOAhERkRGhclSYcJnpJaqIyBCnBHGAJeMxOjNOY2uq2KGIiEiBmFnlfs5NH8xYSoWZZV+iqoupiMiQts8E0cyuyNk+qdu56woZVCnbPcZCg/BFRIaz/+zaMLM/dTv3fwY3lNKRjMdUP4qIDHH7a0H8Ys72D7qd+1QBYhkWqhJdYyz0hlREZBiznO3x+zknOZKJqOpHEZEhbn8Jou1ju6d9CWiWNhGREcH3sd3TvgSqEmpBFBEZ6va3DqIqvwOQjGudJxGREeAgM/si2RemXdsE+9XFC2toS8a1VrCIyFC3vwTxSDNbQbayOzTYJtg/pOCRlahxFRHM1MVURGSY+wkwuodtgLsGP5zSMD4RpaWjk5aONBXR/f0niIiIFMv+/nWeNWhRDCPhUBnjKqI0aJ0nEZFhy92/va9zZvb+wYyllHSthdjQ1EHFeCWIIiJD0T7HILr727kfoAk4DqgK9ntlZovM7DUzW2tmS3o4HzOzB4Pzz5jZjJxzXwuOv2ZmH8k5PtbMHjKzV81stZmd2IfnHRTqQiMiMrKY2Wwz+//MbC1wR7HjGar2zPStOlJEZKja5+s7M/stsMTdXzGzScALQC3Z7qZ3uvvt+7uxmYWAHwEfBuqA58xsqbuvyil2DbDd3Q8zs0uAm4FPmNls4BLgKGAy8EczO8LdO4HvAb9z94vMLApUHOCzF0wyEdUYRBGRYS54qXlp8EkBBwM17r6ueFENbclEVwui6kgRkaFqf7OYznT3V4LtTwJ/cPfzgAXkt8zFfGCtu7/p7h3AA8DibmUWA/cG2w8BC83MguMPuHu7u78FrAXmm9kY4FTgbgB373D3HXnEMqiSiZhaEEVEhjEzexp4hOyL1o+5+/HALiWH+5eMa6ZvEZGhbn8JYipneyHwKIC77wIyedx7CrA+Z78uONZjGXdPA41Acj/XzgS2AveY2X+b2V1mFs8jlkFVFY9Sr7ejIiLD2WayE9NMYM+spZrhuxddXUzr1ctGRGTI2l+CuN7MvmBmF5Ade/g7ADMbBUQGI7gehINY7nD3Y4Fm4D1jGwHM7FozqzWz2q1btw5mjCQTMXa2pelI55NHi4hIqXH3jwJzgeeBG8zsLWCcmc0vbmRDW0U0TEU0pBZEEZEhbH8J4jVkxwBeDXwipyvnCcA9edx7AzAtZ39qcKzHMmYWBsYADfu5tg6oc/dnguMPkU0Y38Pd73T3Gnevqa4e3CWpxgddaLa3qAIUERmu3L3R3e9x9zPJ1o3fBG4zs/W9XDqiJROa6VtEZCjb3yymW9z9b919sbv/Puf44+5+Sx73fg443MxmBpPJXAIs7VZmKXBVsH0R8Ji7e3D8kmCW05nA4cCz7v4u2ZbN9wXXLARWMcRUdXWhUQUoIjIiuPtmd/+Bu58EnFzseIayZDymWUxFRIaw/c1i2j2Z24u7n9/L+bSZXQcsB0LAT919pZndCNS6+1Kyk83cF0wLvo1sEklQ7pdkk7808PlgBlOALwA/D5LON8lOoDOk7JmlTRWgiMhw1FsdCey3jhzJqhJRNu5oK3YYIiKyD/tbpfZEshPF/AJ4BrC+3tzdHyWY3Cbn2DdzttuAj+/j2puAm3o4/iJQ09dYBtPuWdo0CF9EZLjqdx05UiXjMV7e0FjsMEREZB/2lyBOJLuG4aXAZWSn8/6Fu68cjMBKmVoQRUSGPdWRByg7BrEDdye7spWIiAwl+xuD2Onuv3P3q8gOvl8L/GfQbVT2o7I8TCRk1CtBFBEZllRHHrhkIkY64+xsTRc7FBER6cH+WhAxsxhwDtk3pDOA7wMPFz6s0mZm2UH4mqRGRGTYUh15YKpy1kIcU1GsVbNERGRf9jdJzc+AOWTHEH7b3V8ZtKiGgWQiqlnaRESGKdWRBy4Z3zMM49DBXYVKRETysL8WxCvILkT/d8D1OeMEDHB3ryxwbCUtmVALoojIMKY68gB1rRWsOlJEZGjaZ4Lo7vscnyi9q4pHeWNLU7HDEBGRAlAdeeD2dDFVLxsRkaFIFVyBZLuYtuPuxQ5FRERkyBinFkQRkSFNCWKBJBMx2lIZWjo6ix2KiIjIkBEJlTG2IqKloEREhigliAWS3P2GVBWgiIhIrmQ828tGRESGHiWIBVKVyM7SVq8KUEREZC/JRExrBYuIDFFKEAskmVALooiISE+qElGNQRQRGaKUIBZIMtG1zpMqQBERkVzJeExrBYuIDFFKEAtk9xhEVYAiItKNmS0ys9fMbK2ZLenhfMzMHgzOP2NmM4LjHzaz583s5eD7jJxr/jO454vB56DBe6K+SSai7GhJkerMFDsUERHpRgligZRHQiRiYerVgigiIjnMLAT8CDgLmA1camazuxW7Btju7ocBtwE3B8frgfPcfS5wFXBft+sud/djgs+Wgj1EP3X1stneopeoIiJDjRLEAhofj2oMooiIdDcfWOvub7p7B/AAsLhbmcXAvcH2Q8BCMzN3/2933xgcXwmMMrPYoEQ9gKo007eIyJClBLGAkoko29TFVERE9jYFWJ+zXxcc67GMu6eBRiDZrczHgBfcPberyj1B99JvmJkNbNgDZ884fdWRIiJDjRLEAkrGY+piKiIiA87MjiLb7fT/yTl8edD19JTgc+U+rr3WzGrNrHbr1q2FD7YHu2f61lJQIiJDTrjYAZSEx/8HbH8LonGIVGS/d28nIBoci8SD7QREKpgyKsUr61uLHb2IiAwtG4BpOftTg2M9lakzszAwBmgAMLOpwMPA37j7G10XuPuG4HuXmd1Ptivrz7r/uLvfCdwJUFNT4/16kvYmWP41OO4qmHI85NloWRUP1gpWC6KIyJCjBDEf296A9c9ARwukgk8evh18/NtlWCgGoSiEo9nvUATecyz4hGPZ82WR4DuUsx3e8+n1XARCXce6b0d6LhvpSn5H5V3Ri4hInzwHHG5mM8kmgpcAl3Urs5TsJDRPAxcBj7m7m9lY4BFgibv/patwkESOdfd6M4sA5wJ/LPiTvPsyvPIbeOFnMGkevP8zMOdj2Zel+1E5Kky4zLQUlIjIEKQEMR8fu2vv/UwmmyR2NEOqOZs47t4O9lPNPPvaep5a9Q4XzqsmHs4QszRR0oQ9RcjTWGc7dKYg3Q6dHdnv9p17jmVS0JmGTDq7nUnvve8FnB7cyoLW0fie79joPa2n0UT2EwvOxath/CEw/lCIVym5FBHZB3dPm9l1wHIgBPzU3Vea2Y1ArbsvBe4G7jOztcA2skkkwHXAYcA3zeybwbEzgWZgeZAchsgmhz8p+MMcfCL8w6uw4kF49i5Yeh38/h/hmCvg/ddA8tAeLzMzTeQmIjJEmXv/epeUgpqaGq+trR303/3jqs18+mc9/24kZMRjYRK5n/Iw8ViY0bHs96hIiFi4jFikjFg4ux0N79mOhSFmGcpDTqwsk/2EMkTpJGKdhOkkYhnCpLHuyeVe213JZypIfJuyiW57U7Ad7Hc0Q/uuYLtpz3d3sUoYPzObLCYP3ZM4Jg+FiqSSRxEpGDN73t1rih1HqRjQ+tEd3v4veO4uWL00W68cega8/9NwxKJsj5cc5//wKV7dtItTj6jmvHmT+NCsCcRjem8tIlIo+daRShALyN1ZvWkXO1o62NWeprk9TVN7ml1te7ab2tM0taVp7sh+7y7XlqY11UlmgP56wmVGJFRGJNT1XUYkbETKcraD49FQNhGNhsqIhLv2LbvfdS6c3Y6FYBQdJH07kzo3UNWxgcqWd6hoWkdo+1uw4x3wzj2BxMZkk8fcxHHcwTD2YBg9Cco0b5KIHDgliH1TsPpx12Z44V6ovQd2bYQx0+D4q+G4v4HEQQCs3dLEL559h9+u2Mjmne2UR8pYeOQEzps3idPedxDlkdD+f0NERPpECWKOYiWIAyHdmaE93fXppKNrO5Xd39fxVKeT6syQzjgd6QzpTIZUZ852Ons+lXFSwbH2dCZ7LCjXEey3pzN0dGa3u46n88hcyyNlTKgoY1bFDo6IbGGmbWaqb2JCagPj29cTb92I5XaTDUWz/xEx7mAYOz2bNI47GMbOyH6r9VFEeqEEsW8KXj92puG1R7Otim89kR3vftRHs62K0xaAGZmM89y6bfx2xSYefXkTDc0dxKMhzjxqIufNm8TJh1UTDevloYhIfylBzFHKCeJQlck4HZ1B4pjO0NSepqG5g4amDrY1t9PQ3MG2pg62NXdkt5u7tttpS2WTwghpptpWptkWZoTqmVW+nUPC9UxhC8nUJkalG/f+0Uh8T2vjuINh4tEw4+TstogIShD7alDrx62vQ+1P4cX7ob0RJszNJouTj4GJ8yBRTbozw1/f3MZ/vLSR3618l8bWFGNGRVh01ETOmzeZEw4ZTzikZFFE5EAoQcyhBHFoaelIB4lkB1t3tbOxsZUN21up295K3fYWNuxopb6pgzitTAsSyIPLtnJEkEBO9i1UpTYRzWSXEPGx07EZp2aTxZmnwJipxX1AESkaJYh9U5T6saMZXv4VPHc3vLtiz/HKKdkXf5PmwaR5dBw0l6c2R/iPFe/yh1WbaWpPk4xHOXvuJM6aM5GjpoxhzKjI4MYuIlLClCDmUIJYelo7OtmwY0/CWLe9K4nM7m/Z2coRVscJZas5ObyKE8peZbTvAqBj9HRCh5xK6JAgaRwzpchPIyKDRQli3xS9fmzdnl0qY9NLez71a4Dgv00qqmDSPNIT5vJyZgb/590qHnwjRFsqe37ymHLeN3E075tYyZETR/O+iaM5tDqhLqkiIj1Qgpij6BWgDLi2VCdrtzSxcmMjKzfuZNWGHXS++wrHdL7CiWWrWFD2KmOsGYCdFdNpn3Iio2edQflhH4TKSUWOXkQKRQli3wzJ+rG9CTav3Dtp3Lo6Oysq4LFKGkcfzpayKtanx7K2JcHKpjgbO8eymXE02HimVo/bkzROyCaOU8eNwjSOXURGMCWIOYZkBSgDrjPjrGtoDhLGbTS9/RLjtjzD0emXWVD2KpXWAsDm6DR2TlhA5ZGncdDchVjl5CJHLiIDRQli35RM/Zhuhy2r9iSMW17Nzo66611It72neFPZaN718WxIj2Gzj+NdxtEYqiI8djIVY6ooT4wjMWY8leOSjBubpLpyFAeNjjG2IqIkUkSGLSWIOUqmApQB5+68u7ONleu3s3VtLaF3nmLS9lrmZVZRadkxjFsiU9k5cQGVR55O9ZwzMHVJFSlZShD7puTrR/dsN9Vd78KuTXs+OzfBrnfpbNxA585NhFu2Ukamx1tk3GhiFDupoIlRtJUlSIUTdEZHQ3klZeVjCMfHEhuVIBIOBUtAhYJloELZY+EQ4TLLJpdmgOXMum1QFoZQJLsWZFk4O5trWTi7H4rkHAvllA1nZ/eOVECkHMKjtBSUiPRLvnWkVqSVYc3MmDRmFJPGjII55wPn4+68tWUnf3npv2hb8wTJ+uc45p1lVK7/NfwhmzDuChLGKiWMIiJDlxlUjM9+Jsx+z+lQ8CHTCU1bsslj63a8bSftzdtpbtxGa9MOOpp30Nmyg7K2nSTadxJObSfaup6K5mbi3kLEOt9z72JIWZRUWYzO0Cg6Q+V4eFQ2eYxUUBatIBQdRag8TiQWJzyqEsorIRZ8yishNjrYHw3lY7LfIU30IyJ7U4IoI46ZcciEMRxy5llw5lm4O29u2clTL/4XbWufIFlfyzHvLGNMkDBujUxh58QTGDPrdJLzzsbiyWI/goiI9EVZKDv+PBiDbkB58OlNKt3J5sZGGht30ppK055K09KRoa0jRXuqk9ZUmtaOTtpSnbQH2+2pNK2pTtpTnbSn04Q9Q7isk4hliJAhYp1EyRC2TsKWIUL2O0wnEeskTPZjnR2kO1qhowVPtWDpVizdRrijjXLaKaeDcjoYZQ2MYlN23zqooI0ErUTzSGw7Q+Vkogk8VklZrJLQ2MnY+ENg3AwYNzP7PXY6hKP9+AsQkVKiLqYi3bg7b2zeyasrnqZtzRNU1z/LMZlVjLEWUoRZN+5EYsdewrQTLsSiFcUOV0RyqItp36h+LE3uTnNHJztbU+xqS7OrLcXOtuz2zrY0O1tTNLZ00NzcTEfzDtKtO+hs3YW3NWLtu4h2NpGgldG0kLDs92hrpZIWJpVtY7ptoZyOPb+H0V4xCR83g0jVIYSruiWQFeOL9mchIvnTGMQcqgClP7IJYyOv/vdfYOWvqdn1GBNtO82MYm3ydEYdfymHzT+LsrC66YgUmxLEvlH9ODJ1pDPsbEuxoyVFY2uKna0pdrR2sKMlxead7Wzc3kLzto2EG9eRaFnPNNvC9OBzsG2m2hr3ul97eDRtiWmUjZ3GqOoZhMcfnG11HDMt+z1qXM6YTBEpFiWIOVQBykDatquVl576LaFXHuLYpicYba00MJbXqz9MvOYyjqo5jVBIEwmIFIMSxL5R/Si9SXVmeLexjY07WtnYmF2TeMu27aTr3yLcuI6K5jomZTYxzbYyxeqZYvUkbO+ZZdOhCtKVUwiPO5jw+JzEset79EQlkCKDYEgkiGa2CPge2THid7n7d7udjwE/A44HGoBPuPu64NzXgGuATuB6d1+ec10IqAU2uPu5vcWhClAKpXHXLl598ldEVv6aOc1/JWpp1jGZNRPOYsz8Szn2mOOJKFkUGTRKEPtG9aP0l7vT2Jpi/bZW1jU083Z9E5u3bKZt61v4jvVUtm/anThOsa1MK2tgLLv2ukd69BRCh52BHXIaHHIaxKsG/0FERoCiJ4hBEvc68GGgDngOuNTdV+WU+RxwtLv/rZldAlzg7p8ws9nAL4D5wGTgj8AR7t4ZXPdFoAaoVIIoQ0XzjnreeOJ+yl/9NYe1vESZOSs4nDcmnMVBH7iME+ceSVmZ3pCKFJISxL5R/SiF1tSe5u2GZtbVt2QTyIZmNm1toKPhbUa1bGCGbWZB2WpOCq2ikubsNeNmEzn8dGJHLITpJ4LG+4sMiKGQIJ4I3ODuHwn2vwbg7v8jp8zyoMzTZhYG3gWqgSW5ZbuVmwrcC9wEfFEJogxFbQ3v8M4T9xF/7TdMaV9Lu0dYHjmD1PzPceapJzG6XOMVRQpBCWLfqH6UYmrpSPPm1mZW1DXy4tv1NL/9AgfveIaTy17h+LLXiVmatEVoGH8sZYeewfi5ZxKackx2VloR6bOhsA7iFGB9zn4dsGBfZdw9bWaNQDI4/tdu13YtRnc78BVgdAFiFhkQ5cnpHHHhPwL/SGrjK2z6/fdZtO43hP/ye/74l/m8feRnWPihszikOlHsUEVERIqiIhpmzpQxzJkyhssWTAeOo7H1Kl5av4O73nqX1jeepGrL07x/6wqOavguPPtdmizBxnHzycz8IJOPP5vKyUcU+zFEhp2SWgfRzM4Ftrj782Z2Wi9lrwWuBZg+ffogRCfSs8jkOcy4+k5o+g6b/3A7J798L2e+9mn+unoWD0+4guMWfpwPHnGQup+KiMiIN2ZUhFOPqObUI6qBubh/jrfqm/ntmjdofu0xxm36C3MaXmDytsfg+W9RF57OtimnM7FmMQfN/iCESuo/bUWGpJLqYgqcD1wJpMmub1sJ/Mbdr9hfLOpCI0NK+y6anv4p/l8/YnTHZl7NTOM3oz7G5JMv52Pvn6nupyL9oC6mfaP6UUpRS3uK11e/SMOLjzK27jHmpl4map3ssjgbkieTOPocptSch2l9RpG9DIUxiGGyk9QsBDaQnaTmMndfmVPm88DcnElqLnT3i83sKOB+9kxS8yfg8K5JaoJrTwO+pDGIUrI6U6Rf+hUt/3krlTvXsMGT3OfnkDn2b7jk5FnqfipyAJQg9o3qRxkO1m3czOv/9e+E3/g9R7c8Q5XtpJMy6uJz4YiPMGX+RwlPnK2lNGTEK3qCGARxNtkxgyHgp+5+k5ndCNS6+1IzKwfuA44FtgGXuPubwbX/CHyKbGvh37v7sm73Pg0liDIcuMOaP9D02L+SePev7PA493V+mNcPvowLTz2W046oxlSpieRFCWLfqH6U4aZ+Vyv//dfHaFv5KIdsf4qjbB0ADZGJNE1fyISaj1J+2KkQKS9uoCJFMCQSxKFCFaCUjLpa2p64jdiaR+ggzEPpU/njQVfzmXM+wAcO1bpQIr1Rgtg3qh9lOGvpSPPMSyvZ+sJSqjf9Jyf4CkZZB202is1HXsH0c5dgWnNRRhAliDlUAUrJqV9L51++Dy/dT1smxPdTF/DazCv40llzmTNlTLGjExmylCD2jepHGSnSnRmeX7uJtc8to+qNh/lw5i+0W4x1h17OIYu/RqyyutghihScEsQcqgClZG17k85lXye0ZhnrmMQNHVeSmHMWXzrzfcyoihc7OpEhRwli36h+lJGoI53hib/8mchTt3Bqx59ptRirp13KYYu/xtiqCcUOT6RglCDmUAUoJW/NH+lc9hVC297gMT+e76Su5MT31/B3Cw/noEqNoxDpogSxb1Q/ykjm7vx37dOkHv8uC1qeoMlHUTvxYg49/6tMmzKl9xuIlJh868iywQhGRPrp8A8R+txf4cM3clr0VZbHvsLkF27hI/+yjH/+3as0tqaKHaGIiEhJMTOOe/8HWPCVpbx18R95Y8wCTtt8L2PuPJ7ffu8L/N/27jxOqvLO9/jn19X7Tq803TRrs4ggyOICqIgLGo06WdRksky8o5OY7TU3jprkzk1yJ5NksmiceDNjEjNqNCYT1xi3KDsC0somSwMNDXTTTW/0Sq9Vz/xxSigQClq6u3r5vl+vetWpU6eqn/NY1o9vnec8Z9Ouskg3USQiFBBFBovoWJj/NaK+8g4x02/hbt/zLI27hwMrf8dlP1rKf64opb3Lf+b3ERERkROMO28uF/zjC9R9ZilVmRdxw5HHGf/kJTz94y/xxsZd+ANDf8SdyPs0xFRksDqwDl6+B6q2sCPuAr7edDuNKZP4+lVFfHx2AdE+/f4jw4+GmPaM6qPIqbUd2EjNS9+lsHoZjS6RP8XeRMLCL3PzxVNIjI2OdPNEPhSdgxhCBVCGrIAf3n0M3vwerr2Jl+M/wv1HbiArO5d7l0zhmvNydQ1FGVYUEHtG9VEkPH/FJmpf+i65lUtpcEn8wfcRkhd8kY8tvID4GF+kmyfSIzoHUWQ4iPLBnC/AV97F5vwd17e/RHHavdzQ9TpffGIDn//tBvbVtka6lSIiIoOSL38muXc9B3cuxxVezF2BP/KxFdfw8g9u58/LVtHlD0S6iSK9TgFRZChIzICP/BS7ayWxuVP5x/aHeTv7+wT2r+XaB1by49d2crSzO9KtFBERGZxGzWLEHc/ivrSehqJbuDHwJh9ZfiNr//U6Vr75ks5RlCFFAVFkKBk5Hf7uZfjYb8iikSfsn/l95q95ZtnbXPXTFbyytZLhMKxcRESkL1jOFEb+7a+I/t/bKDvvi8wMbOOyVZ+m5PsXsfHVx3B+/Rgrg58CoshQYwbTPw5fKYbL7mF26yrWJP8Td9qzfP3JdXz20bcprWmJdCtFREQGLUsZyfhbf0DyvTvZesG3SQ80MmvdV6n6/jR2v/QzXIfqrAxeCogiQ1VsElz5bfjy2/iKruLz7b+jeMS3yDj4OkseXMEPX9lJa4d+6RSJBDNbYmYlZrbHzO47xfNxZvaH4PPrzWxscP3VZvaOmW0N3l8Z8prZwfV7zOwh0wxVIn0uKj6Z6bfcQ84332PN7Aeoc6kUFX+Xlh9OoeLZb0FLdaSbKNJjCogiQ92IsXDr7+CzL5KSksbP+Ql/Sf8pb65cwVU/W8FLWw5p2KlIPzIzH/AwcB1wHnC7mZ130mZ3AEeccxOBjOZLsAAAIABJREFUB4AfBdfXAjc656YDnwOeCHnNL4G/B4qCtyV9thMicoLomBjm3/gFJn1rPa/Oe4xiziNv88N0/uQ86p66C2pKIt1EkbOmgCgyXIy/HO5aBdf9mEn+Ul6Pv5/7+C3ffGoVf/ub9eypbo50C0WGi3nAHufcXudcJ/A0cNNJ29wEPBZc/hOw2MzMObfROXcouH4bkBA82pgHpDrn1jnvF5/HgZv7fldEJFRsdBRLrr+Zi+9/hT9e8hzPs4ikkmfg4Xk0/ef1uO0vgM5TlAFOAVFkOPFFw0V3wlc3YrM/z0c7/8LbKf/E1PL/5voHV/CvL++gRcNORfpaPnAw5HF5cN0pt3HOdQONQOZJ23wMeNc51xHcvvwM7yki/SQh1sdtSxZx3X1P8fjFL/NzPkXToRLsj5+l/cdT8S/9ATRVRrqZIqekgCgyHCVmwA0/w+5aRXz+dL7Nr1me+s9sXvUXrvzJcp5cv1/XdhIZwMxsGt6w07s+xGvvNLNiMyuuqanp/caJyDEp8THced08/v6b/86ya1/n/rhvsq41D9/KHxJ4YBpdv/8M7FsJOtVDBhAFRJHhbOT58Lk/wyceY1RcJ3+I+388GPUA//X8q1z1sxW8sKmCgK7tJNLbKoDRIY8LgutOuY2ZRQNpQF3wcQHwHPBZ51xpyPYFZ3hPAJxzjzjn5jjn5mRnZ5/jrojI2UiMjeYzl07gX+79Jzpv+yNfyXqUX3UtoXXnUnjsRroemgvr/gPaGiLdVBFsOExOMWfOHFdcXBzpZogMbF1tsOYh3JqfY12trI65hB+03IA/dwbfuGYyi6fmoEkRZaAzs3ecc3Mi3Y5wgoFvF7AYL8RtAD7lnNsWss3dwHTn3D+Y2W3A3zjnPmlm6cAK4LvOuWdPet+3ga8C64GXgX93zr0cri2qjyKRs/lgA4+v3InteJ5PR/2VWVF78Ecn4JvxSZj7vyBvRqSbKEPM2dZIBUQROdHRelj3S9z6/8A6mljrm82Pj94Io+dxz7VTuGTCyadBiQwcgyEgApjZ9cCDgA941Dn3fTP7HlDsnHvRzOLxZiidBdQDtznn9prZt4H7gd0hb3eNc67azOYA/wUkAK8AX3FnKPKqjyKRV9HQxmNvlbF5/XJu8b/KLdFriaMDVzAPm3sHTLkB4pIj3UwZAhQQQ6gAinwI7Y3w9q9wax/G2urZYNP5WcdHiR5/GfcsmcKMgvRIt1DkAwZLQBwoVB9FBo6Wjm7+uOEg/716K5c0v87nY9+k0B3C+WKxMZfCxKuh6GrImgQa0SMfggJiCBVAkXPQ2QrFv8WteQhrPcxGJvNg580kTLmGbyyZzMSclEi3UOQYBcSeUX0UGXi6/QFe336YX68sJaZ8HVfHbOT6+PcY1VnmbZBWCEVXeYFx3GU6uihnTQExhAqgSC/oaoeNTxBY/QBRTRVsc+N5qPsmUmbcxNeunszojMRIt1BEAbGHVB9FBrbNBxt4bmMFf958iLjWQyyJ38rHUncw5ei7+LqPgi8WdHRRzpICYggVQJFe1N0JW/6Af+VP8DWUUeJG80v/zaTP+QR3XjGJUekJkW6hDGMKiD2j+igyOHT7A6zeU8sLmw7x2rYqujvbuTZlH3+buYuZHcXEHdnlbaijixKGAmIIFUCRPuDvhm3P0bX834ip38Vel8cT/qs5Mv4mPnrpdC6flIMvSr9iSv9SQOwZ1UeRwedoZzd/3X6Y5zdWsHJ3Lf6AY2F2G3fklXJR97skHFwFXa0QFQNjLoGJV8GExZA7TUcXhzkFxBAqgCJ9KBCAnS/RufwnxFZvphsfS/0zWRZ/FQXzbuYTF40nJzU+0q2UYUIBsWdUH0UGt7qWDv6ytZLnN1bw7gHvGoqXjknmC4VVLGAz8fuXQ3XwCjopeV5QnHgljF8EiRmRa7hEhAJiCBVAkX5yeBv+jU/RvfFp4jpqqXfJvBS4lPLCW1h4+VXMn5hNlI4qSh9SQOwZ1UeRoeNA3VFe2FTB85sqKK1pJcpgRkE6149xXBP3HoVH1hG1dxm0N4BFQf7sYGC8CvIvhChfpHdB+pgCYggVQJF+5u+G0qW0bniCuD2vEu062RXIZ2ncYpLmforrLr2QrOS4SLdShiAFxJ5RfRQZepxzbDvUxOvbD7NyVw1byhsIOEiNj2bBhBHcnHOYiwMbSS1fARXvAA7i02HCouPDUVPzIr0b0gcUEEOoAIpEUFsDXVufoWndE2TWb8TvjLfcdEpH3cjURZ9i3qR8TOdESC9RQOwZ1UeRoa/haCer99SyalctK3fXUNnYDsD47CSuHRfLDcklTG5ZT/TeZdBS5b0oaxKMXeDdxiyAlNwI7oH0FgXEECqAIgNEXSn1bz1G1NanSe88TLNLYGXsfALTPsHMBdczOis10i2UQU4BsWdUH0WGF+cce6pbWLGrhpW7a1m/t46O7gCxvijmjk3n5lENXObbSk5dMbb/Lehs9l6owDgkKCCGUAEUGWACATr3ruLQikfJLX+VBNfOEZfMu3Fz6ZxwLZPm38KEgpGRbqUMQgqIPaP6KDK8tXf52VBWz8pdNazcVUvJYS8QjkyNZ/HkEdyYU8cst424g2/BKQPjQu8+OSeCeyFnSwExhAqgyADW2UrNxr9wZOML5B1eQYprpsNFszl6Bk1jrqLg4o8zuWiShqHKWVFA7BnVRxEJVdXYzspdNSwrqWbV7lpaOrqJ9UVx0fgMFk/K4JqMakY1FEPZati/NiQwTj5+hFGBccBSQAyhAigySPi7qdu5iqq3nyGz/A1G+isB2Bk1geq8K8macwtTZlxClC8qwg2VgUoBsWdUH0XkdDq7AxSX1bOspJqlO6sprWkFYHxWEoum5HDlpAzmxZcTc3CNAuMgoYAYQgVQZBByjoYD77H/rf8mseyvTGjfQZQ5KsniQNblJM+8icnzriU6VtdYlOMUEHtG9VFEztaBuqMs3XmYpSU1rNtbR2d3gKRYHwuKsrhySg6LijLIaS3xwuK+VXBgLXS2eC9WYBwQFBBDqACKDH5NtRXsWf0MUbteYXJrMQnWSTOJ7E2Zh026hgmX3kxSZn6kmykRpoDYM6qPIvJhHO3s5q09dSwtqWbZzupjM6NOG5XKFZOzuWJyDrPyk4k+vBXKVnmhMTQwZk85cdKb5OwI7s3wMSACopktAX4O+IBfO+d+eNLzccDjwGygDrjVOVcWfO5+4A7AD3zVOfeamY0Obp8LOOAR59zPz9QOFUCRoeVoaxM7Vv+Zju1/YWLjW+RwBIB9sUU0FSwib+7N5Ey+WBf9HYYUEHtG9VFEzpVzjp1VzSzdWc2KkhreOXAEf8CRGh/NwqJsLp+czRWTsslJiobKTccD4/610OUNWyV7Cky40ruNmQ+xiZHdqSEq4gHRzHzALuBqoBzYANzunNsess2XgBnOuX8ws9uAW5xzt5rZecDvgXnAKOANYBKQA+Q55941sxTgHeDm0Pc8FRVAkaGru9vP9k1vUbfxz2RVruQ8/0585miwVCoy55M0bQmF8z5KVFJGpJsq/UABsWdUH0WktzW2dbFmTy3LS6pZXlJDdXMHAOflHT+6eGFhOtH4oXKzFxj3rvBmSfV3gC8WCi+BiYu9wJh7Pmiiul4xEALiJcB3nHPXBh/fD+Cc+0HINq8Ft1lrZtFAFZAN3Be6beh2J/2NF4BfOOf+Gq4tKoAiw0fZwYPsXfsi0fveYNrRDWRaM36iOJA4ja7xVzH6optIKJipYjNEKSD2jOqjiPQl5xw7KptZvssLi+/s944upsRHs7Aoiysm5XD55GxyU+Ohqw32r4HSZVC6FKqDx3+Sco4fXZywSOcvnoOzrZHRfdiGfOBgyONy4KLTbeOc6zazRiAzuH7dSa894eQiMxsLzALWn+qPm9mdwJ0AhYWFH3IXRGSwGTt6NGNH3w3cTUNLGyvWL6N12yuMqV/NtPcegPce4Igvk7rc+Yw470oypy2C9DEKjCIiIr3MzDhvVCrnjUrlS1dMpKm9izW7a1leUsPyXdW8vLUKgKl5qSyeksOiKbOZefVifNcaNB0KhsU3YffrsOVp701HTocJwaOLhRdDdFwE93Bo6suA2GfMLBl4Bvi6c67pVNs45x4BHgHvF9J+bJ6IDBDpyQlcvvh6WHw9Xf4AG7aXcKj4JdLKlzKjYikZh16EN6AxJpf2/IvJmLaImPGXQcZ4BUYREZFelhofw3XT87huet6xcxeXl3jXXfzlilJ+sWwPGUmxXDEpmyun5rBwyidJm/VpCASgajPsedMLjWt/AWsehJhEGH2RNyS18GIomAOxSZHezUGvLwNiBTA65HFBcN2ptikPDjFNw5us5rSvNbMYvHD4pHPu2b5puogMNTG+KOZOnwrTp+LcN9hX08zyd9fTvHM52fXFzNu3jJiyFwA4GpeNG7OApEmXebOrZRUpMIqIiPQiM2NqXipT81L54hUTaDzaxYrdNSzdcZilJdU8u7GC6ChjztgRLJ6Sy6IpRUxYOBO77BvQ0exNdFO61JvsZvkPAAfmg7wLYMylXmAcfbFmSP0Q+vIcxGi8SWoW44W7DcCnnHPbQra5G5geMknN3zjnPmlm04CnOD5JzZtAERAAHgPqnXNfP9u26BwLEQmnrdPP2tIa3ttSTOeeVUxq38xFUTvItQYAOuOz8I2bj2/cQq/oZE/RDKkDlM5B7BnVRxEZiPwBx8YDR1i6s5qlO6vZWdUMwJjMRBZNzmHx1BzmjcsgLjpYi9saoHyDdymNA+ugvNib8AYgc2LwCGPwKOMwHiUU8Ulqgo24HngQ7zIXjzrnvm9m3wOKnXMvmlk88ATeuYT1wG3Oub3B134L+ALQjTeU9BUzWwCsArbihUWAbzrnXg7XDhVAETlbzjn21bayfGc1O7ZvIq78LWaznYujdpJndQAEYpKwUTOx/NmQfyHkz4a00cO24AwkCog9o/ooIoNBRUMbS3d611xcs6eWju4ASbE+FhRlcfmkHBYWZTE6I+TSGN0d3gyp+9/yAuPBddDmXRKLpBwvKI651LukRu75EBUVmR3rZwMiIA4UKoAi8mG1dfpZu7eW5TurKdn5HgVNm5gRVcqF0fuYwn5i6ALAJWadGBhHXQhJmRFu/fCjgNgzqo8iMti8X5ff3OEFxkON7QCMzUxkQVEWC4uyuWRCJqnxMcdfFAhA7a7gEca13rDUxgPec/FpUHgpjJ3vBcaRM8A3KKdpOSMFxBAqgCLSG5xzHKxvY/2+Otbvq2fj3sMkNpRwQVQpc6L3MTd2H6O6DmAEv1fTx5wYGEee7xUi6TMKiD2j+igig5lzjtKaVlbtrmH17lrW7a2jtdOPL8qYOTqdBROzWFiUxQWj04nxnXSUsLEcytbA/tXefX2ptz42xTvCOHa+Nw/BqJngi/ngHx+EFBBDqACKSF+pbGzj7X31rN9Xz9v76qmsrmF61D4ujN7H5YkHmOr2kNpRefwFqfmQPRmyp0LOFO98xuzJCo69RAGxZ1QfRWQo6ewOsPHAEVbvqWXl7lq2ljcQcJASF83FEzJZGDzCODYzETv5tJCmSu86jPvXeIGxtsRbH5MEo+cdD4z5Fw7aS2soIIZQARSR/lLb0sGGkMC4o6qJDNfI7Oh9zE+r4YLYSsYEDpDWupeo7vbjL1Rw7BUKiD2j+igiQ1nD0U7eKq1j1e5aVu2uofxIGwD56QnHwuL8iZmkJ8Z+8MUtNScGxurgPJvR8d7s5lmTIGvy8eXMCRCT0I9713MKiCFUAEUkUhrbuigu88LixoMNvFfRyNFOP1EEmBx3hMWZ9cxJOkyRlZPdXkZM/W6su+34G6TmHy88GRO82dgyJ0B64ZAZ8tKbFBB7RvVRRIYL5xz7646yak8tq3bVsLa0juaObsxgRn4aC4uyWViUxazCEcRGn2LSmqP1wUlv1nrnM9aUQMMBeP+0EgxGjAkGx0knhsgBMieBAmIIFUARGSj8AUdpTQubDjawpbyBLeWN7KhsosvvfRfnJEVz5ch25qfVMC2mkvyu/cQd2Q31e6Gj6fgbmc8rRBkTQsLjeO8+vXDYXoZDAbFnVB9FZLjq9gfYXN7Ayl21rN5Ty6aDDfgDjsRYH5eM94ajLijKZkJ20geHo76vqw3qSr3AeMJtD4T+2JuQ4YXF3GlQMAcK5nr1up9nT1VADKECKCIDWXuXn51VzWw+2MDmYGgsrWnh/a/nghEJTMlNZlZWNzMS6pjoqyanqxzfkb3eSfV1e6Gr9fgb+mJhxNjjYTEtH9IKvEtxpBVAcu6QDZAKiD2j+igi4mlq72JtaR2rdtewanct++uOAjAqLZ6FRdksKMpi/sQsMpJOMRz1ZIEANJVDzUnBsWrr8R9749O9SewK5nqhMX82JGb04R4qIJ5ABVBEBpvm9i62VjSypbyR9yoa2XW4mb01rXQHvO/sGJ8xPiuZySNTmJybzPS0dqbEHCars4KoI6XeL5r1+6Dx4IlHHgGioiF11PHAmFbgDWUNfRyfGoG9PncKiD2j+igicmoH6o6yao83O+qaPbU0tXvDUc8flcbF4zOYO9a7jTibwPi+9y+3Ub4BKoqhvBiqt4MLXt49cyLkzwkeZZzjXaOxF08nUUAMoQIoIkNBR7efvTWt7DrczM6qZnZVefcVDceHsSTF+pg0MoUpI1OYlJvC+OxkxiV3M8rqiG4+5AXGxvITb82HINB94h+LTYGUXEgeGf4+Ph1ON/QmAhQQe0b1UUTkzLr9AbZUNLJ6dy2rd9eyqbyBzm4v1BXlJDN3XAYXjfMC46j0Hk5U09EChzYGQ+M73n3LYe+56HjIm+mFxUvu9n7cPQcKiCFUAEVkKGtu72LX4RZKqpopqWqi5HAzJVXNHDnadWyb6ChjdEYiYzITGZuZ5N1nJTE2M4mCtFhi2mqCgfEgNByE5kporvKK1Pv3XUc/+Md9cR8MjknZkJwNSTknLscl93lfKCD2jOqjiEjPtXf52VLeyIbgJHTv7D9CS4f3Q2t+egLzgmFx3rgRTMhOPv05jKfinFeLy4NHGCuK4dAm+OpG75SRc6CAGEIFUESGG+ccNS0dlNUepayulf11rceWy2pbae30H9vWF2UUjEhgTGYSYzMTGZOZRGFGInlp8YxKT2BEYgwG0NF8YmBsroKWKmg+fOJ9e+OpGxWTCElZXlhMzvngcvYU7wT+c6CA2DOqjyIi584fcOyobOLtffVsKPNutS2dAGQmxTJn7Ajmjs1g9pgRTM1LJT6mh/MAdHdCdA+Gsp7G2dbI6HP+SyIiMuCYGTkp8eSkxDNv3IknvTvnqGvtpKy2lbK6o+yva2VfbSv7646ycf8RmjtOHG4aFx3FqPQE8tLiyUtLYFR6DiPTChmVkUDeeG9danz08V9Iuzuhteb4raX6g8sNB7yhNK214IJhddZn4KZf9Ef3RJyZLQF+DviAXzvnfnjS83HA48BsoA641TlXZmaZwJ+AucB/Oee+HPKa5UAe8P6Y42ucc9V9vS8iIsOdL8o4Pz+N8/PT+MKCcTjn2FfbGjzCeIQNZfW8ts0bNhrjM6aMTGVGQRoXFKQzY3QaRTkp+KLCHGXshXDYEwqIIiLDjJmRlRxHVnIcc8Z+MDzWt3ZSfqSNysY2DjW0e/eN7VQ2tLG2tJbDzR34AyeOPkmK9ZEXDJG5qfHkpMSRk5JGTmouOSPiyCmMJyc17oO/mgYC0Fbvhcbo+L7e9QHBzHzAw8DVQDmwwcxedM5tD9nsDuCIc26imd0G/Ai4FWgH/g9wfvB2sk8753RIUEQkgsyM8dnJjM9O5ta5hQBUNbaz6eARNh1sZEt5Ay9uOsST6w8AkBDjY3p+GjMK0pgxOp0LCtIozEjs2dDUXqSAKCIix5gZmclxZCbHccHo9FNu0+0PUNPScSw8Vja0cyh4X9nUTml1LdXNHcdmXA2VGh9NzrEAGXdsOTsllckjU5jS1zs4MMwD9jjn9gKY2dPATUBoQLwJ+E5w+U/AL8zMnHOtwGozm9iP7RURkXM0Mi2eJWl5LDk/D4BAwLGvrpUt5Q1sPtjI5vIGHl+3n87V+wBIT4xhRoEXFmcUpHPJhEyS4/onuikgiohIj0T7oshLSyAvLQEYccptAgHHkaOdVDd3eLem9hPvmzt458ARDjd1HJsJ7tY5o/nRx2f0455ETD5wMORxOXDR6bZxznWbWSOQCdSe4b1/a2Z+4BngX9xwmGhARGQQiooyJmQnMyE7mVtmFQDQ5Q9QUtXMlvLGY9dG/v/La/EHHG/842VMzEnpl7YpIIqISK+Lijp+JHJq3um3c87R1NZNdXM7cdE9PGlfTvZp51yFmaXgBcTP4J3HeAIzuxO4E6CwsLB/WygiIqcV44s6di7jpy7yvp/bOv1sO9TI+Ky+nwn8fVH99pdEREROYmakJcZQlJtCYWZipJvTXyqA0SGPC4LrTrmNmUUDaXiT1ZyWc64ieN8MPIU3lPVU2z3inJvjnJuTnZ39oXZARET6R0KsjzljM4gKN4lNL1NAFBER6V8bgCIzG2dmscBtwIsnbfMi8Lng8seBpeGGi5pZtJllBZdjgBuA93q95SIiMuRpiKmIiEg/Cp5T+GXgNbzLXDzqnNtmZt8Dip1zLwK/AZ4wsz1APV6IBMDMyoBUINbMbgauAfYDrwXDoQ94A/hVP+6WiIgMEQqIIiIi/cw59zLw8knr/jlkuR34xGleO/Y0bzu7t9onIiLDl4aYioiIiIiICKCAKCIiIiIiIkEKiCIiIiIiIgIoIIqIiIiIiEiQAqKIiIiIiIgACogiIiIiIiISpIAoIiIiIiIiAJhzLtJt6HNmVoN3EeFzkQXU9kJzhir1T3jqn/DUP+Gpf8IL7Z8xzrnsSDZmMOml+gj6jJ6J+ic89U946p/w1D/h9bhGDouA2BvMrNg5NyfS7Rio1D/hqX/CU/+Ep/4JT/0TefpvEJ76Jzz1T3jqn/DUP+F9mP7REFMREREREREBFBBFREREREQkSAHx7D0S6QYMcOqf8NQ/4al/wlP/hKf+iTz9NwhP/ROe+ic89U946p/wetw/OgdRREREREREAB1BFBERERERkSAFxDMwsyVmVmJme8zsvki3ZyAyszIz22pmm8ysONLtiTQze9TMqs3svZB1GWb2VzPbHbwfEck2RtJp+uc7ZlYR/AxtMrPrI9nGSDKz0Wa2zMy2m9k2M/tacL0+Q4TtH32GIkA1MjzVxxOpPoan+hie6mN4vVkfNcQ0DDPzAbuAq4FyYANwu3Nue0QbNsCYWRkwxzmna9AAZnYZ0AI87pw7P7ju34B659wPg/+IGuGcuzeS7YyU0/TPd4AW59xPItm2gcDM8oA859y7ZpYCvAPcDHwefYbC9c8n0WeoX6lGnpnq44lUH8NTfQxP9TG83qyPOoIY3jxgj3Nur3OuE3gauCnCbZIBzjm3Eqg/afVNwGPB5cfw/ocdlk7TPxLknKt0zr0bXG4GdgD56DMEhO0f6X+qkdIjqo/hqT6Gp/oYXm/WRwXE8PKBgyGPy9E/RE7FAa+b2TtmdmekGzNA5TrnKoPLVUBuJBszQH3ZzLYEh9gMy+EhJzOzscAsYD36DH3ASf0D+gz1N9XIM1N9PDN9t52ZvttOovoY3rnWRwVE6Q0LnHMXAtcBdweHSMhpOG9ct8Z2n+iXwARgJlAJ/DSyzYk8M0sGngG+7pxrCn1On6FT9o8+QzIQqT72gL7bTknfbSdRfQyvN+qjAmJ4FcDokMcFwXUSwjlXEbyvBp7DG3YkJzocHBv+/hjx6gi3Z0Bxzh12zvmdcwHgVwzzz5CZxeB9uT/pnHs2uFqfoaBT9Y8+QxGhGnkGqo9nRd9tYei77USqj+H1Vn1UQAxvA1BkZuPMLBa4DXgxwm0aUMwsKXgiLGaWBFwDvBf+VcPSi8DngsufA16IYFsGnPe/2INuYRh/hszMgN8AO5xzPwt5Sp8hTt8/+gxFhGpkGKqPZ03fbWHou+041cfwerM+ahbTMwhOBfsg4AMedc59P8JNGlDMbDzer6IA0cBTw72PzOz3wBVAFnAY+L/A88AfgUJgP/BJ59ywPBH9NP1zBd7QBweUAXeFnE8wrJjZAmAVsBUIBFd/E+88gmH/GQrTP7ejz1C/U408PdXHD1J9DE/1MTzVx/B6sz4qIIqIiIiIiAigIaYiIiIiIiISpIAoIiIiIiIigAKiiIiIiIiIBCkgioiIiIiICKCAKCIiIiIiIkEKiCIDkJn5zWxTyO2+XnzvsWY2bK+jJCIig5tqpEjfio50A0TklNqcczMj3QgREZEBSDVSpA/pCKLIIGJmZWb2b2a21czeNrOJwfVjzWypmW0xszfNrDC4PtfMnjOzzcHbpcG38pnZr8xsm5m9bmYJwe2/ambbg+/zdIR2U0REpMdUI0V6hwKiyMCUcNLwmVtDnmt0zk0HfgE8GFz378BjzrkZwJPAQ8H1DwErnHMXABcC24Lri4CHnXPTgAbgY8H19wGzgu/zD321cyIiIudANVKkD5lzLtJtEJGTmFmLcy75FOvLgCudc3vNLAaocs5lmlktkOec6wqur3TOZZlZDVDgnOsIeY+xwF+dc0XBx/cCMc65fzGzV4EW4HngeedcSx/vqoiISI+oRor0LR1BFBl83GmWe6IjZNnP8fORPwI8jPdL6gYz03nKIiIymKhGipwjBUSRwefWkPu1weW3gNuCy58GVgWX3wS+CGBmPjNLO92bmlkUMNo5twy4F0gDPvALrYiIyACmGilyjvTLh8jAlGBmm0Iev+qce38a7xFmtgXvF87bg+u+AvzWzO4BaoC/C67/GvCImd2B9yvoF4HK0/xNH/C7YIE04CHnXEOv7ZGIiEjvUI0U6UM6B1FkEAmeXzHHOVcb6baIiIgMJKqRIr1DQ0xFREREREQE0BFEERERERHDBQMHAAAAQUlEQVQRCdIRRBEREREREQEUEEVERERERCRIAVFEREREREQABUQREREREREJUkAUERERERERQAFRREREREREgv4HpoGSSMNdUcsAAAAASUVORK5CYII=\n",
+      "text/plain": [
+       "<Figure size 1080x360 with 2 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4gAAAE9CAYAAABJKEwhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdeXhV1fX/8ffKRAaGBAgquSCjA6NCxBErUhVHikUFJ1QsrVVpf7ZatK1aqv3WatVqrdaKVqmKVkWpgjjhjMggIggqAkKYCTMhhCTr98c9gRBDuJDc3Nzk83qePNy7zz4n68DzuF3n7L2XuTsiIiIiIiIiCbEOQEREREREROoGJYgiIiIiIiICKEEUERERERGRgBJEERERERERAZQgioiIiIiISEAJooiIiIiIiACQFOsAakPLli29Xbt2sQ5DRESibObMmevcPTvWccQLjY8iIg1HpGNkg0gQ27Vrx4wZM2IdhoiIRJmZfRfrGOKJxkcRkYYj0jFSU0xFREREREQEUIIoIiIiIiIiASWIIiIiIiIiAjSQNYgiInuzc+dO8vLyKCwsjHUosh9SU1MJhUIkJyfHOhQRkXpJ42P8qu4YqQRRRBq0vLw8mjRpQrt27TCzWIcjEXB38vPzycvLo3379rEO54CY2QDgb0Ai8Ji7/7nC8UbAU0BvIB+4yN2XmFkf4NGybsDt7j4+OGcJsAUoAYrdPbc27kVE6ieNj/GpJsZITTEVkQatsLCQFi1aaPCLI2ZGixYt4vaptpklAg8BZwJdgKFm1qVCt+HABnfvBNwH3BW0zwVy3f0oYADwTzMr/7C3n7sfpeRQRKpL42N8qokxUgmiiDR4GvziT5z/m/UBFrr7IncvAsYBAyv0GQg8GXx+AehvZubuBe5eHLSnAl4rEYtIgxTn/61tsKr776YEUUQkxho3bhzrEKR25QDLyn3PC9oq7RMkhJuAFgBmdqyZzQO+AH5WLmF04A0zm2lmI6IYv4hIrdD4GBtagygiIhJH3H0a0NXMjgSeNLNJ7l4InOTuy82sFfCmmS1w9/crnh8kjyMA2rZtW6uxi4hI3ac3iBGY8PkKpn6bH+swRKQBWbJkCaeeeio9evSgf//+LF26FID//ve/dOvWjZ49e3LyyScDMG/ePPr06cNRRx1Fjx49+Oabb2IZuuzbcqBNue+hoK3SPsEaw2aEN6vZxd3nA1uBbsH35cGfa4DxhKeyfo+7P+ruue6em52dXa0bWZpfwNipS9i2o3iffUVEaoLGx+hTghiBuyYtYNz0pbEOQ0QakOuvv55hw4YxZ84cLrnkEkaOHAnA6NGjmTx5Mp9//jkTJkwA4JFHHuEXv/gFs2fPZsaMGYRCoViGLvs2HehsZu3NLAUYAkyo0GcCMCz4PBh4x909OCcJwMwOBY4AlphZhpk1CdozgNMJb2gTVfNWbOL3r8xjSf62aP8qERFA42Nt0BTTCISy0li+YXuswxCRKPvD/+bx5YrNNXrNLq2bctu5Xff7vKlTp/LSSy8BcNlll3HTTTcBcOKJJ3LFFVdw4YUXcv755wNw/PHHc+edd5KXl8f5559P586da+4GpMa5e7GZXQdMJlzm4nF3n2dmo4EZ7j4BGAOMNbOFwHrCSSTAScAoM9sJlAI/d/d1ZtYBGB9sTJAEPOPur0f7XkJZ6QDkbdhO19bNov3rRCRGND42LHqDGIGcrDSWb1SCKCKx98gjj3DHHXewbNkyevfuTX5+PhdffDETJkwgLS2Ns846i3feeSfWYco+uPtEdz/M3Tu6+51B261Bcoi7F7r7Be7eyd37uPuioH2su3cNSln0cveXg/ZF7t4z+Olads1oC2WlAeEEUUQkljQ+1hy9QYxAKDONVZsLKSouJSVJObVIfXUgTzKj5YQTTmDcuHFcdtllPP300/Tt2xeAb7/9lmOPPZZjjz2WSZMmsWzZMjZt2kSHDh0YOXIkS5cuZc6cOZx66qkxvgNpCDLTk8lISSRvQ0GsQxGRKNL42LAoQYxAKCsdd1i1qZC2LdJjHY6I1DMFBQV7rIu44YYbePDBB7nyyiu5++67yc7O5oknngDgxhtv5JtvvsHd6d+/Pz179uSuu+5i7NixJCcnc/DBB3PLLbfE6lakgTEzQlnpeoMoIlGh8TE2lCBGIKdsCs3GAiWIIlLjSktLK22vbCpM2bqL8kaNGsWoUaNqPC6RSISy0pQgikhUaHyMDc2XjEBOptZYiIiIVCaUlUbe+gLcPdahiIhIDVCCGIFDMlMxQzuZioiIVNCmeTpbdhSzebtqIYqI1AdKECPQKCmRVk0aaSdTERGRCsp2Ml2mjWpEROoFJYgRyslM0y5tIiIiFZSvhSgiIvFPCWKEQlnpeoMoIiJSwe5aiHqIKiJSHyhBjFBOVhorNxZSUqpF+CIiImWapSXTuFGS3iCKiNQTShAjFMpKo7jUWb25MNahiEg90q9fPyZPnrxH2/33388111xT5XmNGzcGYMWKFQwePLjSPqeccgozZsyo8jr3338/BQW73/ycddZZbNy4MZLQq3T77bdzzz33VPs6UveFayGq1IWI1CyNj7GjBDFCZaUuNM1URGrS0KFDGTdu3B5t48aNY+jQoRGd37p1a1544YUD/v0VB8CJEyeSmZl5wNeThimcIGqKqYjUHI2PsaMEMUJaYyEi0TB48GBee+01ioqKAFiyZAkrVqygb9++bN26lf79+9OrVy+6d+/OK6+88r3zlyxZQrdu3QDYvn07Q4YM4cgjj2TQoEFs3777gdY111xDbm4uXbt25bbbbgPggQceYMWKFfTr149+/foB0K5dO9atWwfAvffeS7du3ejWrRv333//rt935JFH8pOf/ISuXbty+umn7/F79qWya27bto2zzz6bnj170q1bN5577jkgXOC4S5cu9OjRg1//+tf79fcqtSuUlc7yDdtVC1FEaozGx9iNj0k1fsVyzGwA8DcgEXjM3f9c4Xgj4CmgN5APXOTuS4JjNwPDgRJgpLtPDtr/H3A14MAXwJXuHvV5nzmZ4V3aVAtRRGpS8+bN6dOnD5MmTWLgwIGMGzeOCy+8EDMjNTWV8ePH07RpU9atW8dxxx3Heeedh5lVeq2HH36Y9PR05s+fz5w5c+jVq9euY3feeSfNmzenpKSE/v37M2fOHEaOHMm9997LlClTaNmy5R7XmjlzJk888QTTpk3D3Tn22GP5wQ9+QFZWFt988w3PPvss//rXv7jwwgt58cUXufTSS/d5r3u75qJFi2jdujWvvfYaAJs2bSI/P5/x48ezYMECzKxGpvVI9ISy0nbVQmyWnhzrcESkHtD4GLvxMWoJopklAg8BpwF5wHQzm+DuX5brNhzY4O6dzGwIcBdwkZl1AYYAXYHWwFtmdhhwMDAS6OLu283s+aDfv6N1H2XSUhJpkZGiKaYi9dmkUbDqi5q95sHd4cw/V9mlbBpN2QA4ZswYANydW265hffff5+EhASWL1/O6tWrOfjggyu9zvvvv8/IkSMB6NGjBz169Nh17Pnnn+fRRx+luLiYlStX8uWXX+5xvKIPP/yQQYMGkZGRAcD555/PBx98wHnnnUf79u056qijAOjduzdLliyJ6K9ib9ccMGAAv/rVr/jNb37DOeecQ9++fSkuLiY1NZXhw4dzzjnncM4550T0OyQ2ytdCbJbeLMbRiEiN0/i4S0MYH6M5xbQPsNDdF7l7ETAOGFihz0DgyeDzC0B/C6f+A4Fx7r7D3RcDC4PrQTipTTOzJCAdWBHFe9hDjhbhi0gUDBw4kLfffptZs2ZRUFBA7969AXj66adZu3YtM2fOZPbs2Rx00EEUFu7/hInFixdzzz338PbbbzNnzhzOPvvsA7pOmUaNGu36nJiYSHFx8QFfC+Cwww5j1qxZdO/end/97neMHj2apKQkPv30UwYPHsyrr77KgAEDqvU7JLpUC1FEokHjY2zGx2hOMc0BlpX7ngccu7c+7l5sZpuAFkH7JxXOzXH3qWZ2D7AU2A684e5vVPbLzWwEMAKgbdu21b8bwk9IF6zcUiPXEpE6aB9PMqOlcePG9OvXj6uuumqPxfebNm2iVatWJCcnM2XKFL777rsqr3PyySfzzDPPcOqppzJ37lzmzJkDwObNm8nIyKBZs2asXr2aSZMmccoppwDQpEkTtmzZ8r0pNH379uWKK65g1KhRuDvjx49n7Nix1brPvV1zxYoVNG/enEsvvZTMzEwee+wxtm7dSkFBAWeddRYnnngiHTp0qNbvlujSOn2Rek7j4y4NYXyM6hrEmmZmWYTfLrYHNgL/NbNL3f0/Ffu6+6PAowC5ubk1smo+JzONt+evwd33OsdZRORADB06lEGDBu2xY9sll1zCueeeS/fu3cnNzeWII46o8hrXXHMNV155JUceeSRHHnnkrietPXv25Oijj+aII46gTZs2nHjiibvOGTFiBAMGDKB169ZMmTJlV3uvXr244oor6NMnPHnj6quv5uijj454ugzAHXfcsWuhPUBeXl6l15w8eTI33ngjCQkJJCcn8/DDD7NlyxYGDhxIYWEh7s69994b8e+V2qdaiCISLRofa398tGjtOGZmxwO3u/sZwfebAdz9/8r1mRz0mRpMGV0FZAOjyvct6weEgAHuPjxovxw4zt1/XlUsubm5vq9aJ5F48uMl3DZhHp/+tj+tmqRW+3oiEnvz58/nyCOPjHUYcgAq+7czs5nunhujkOJOTY2PAAPuf59QVhqPDTumRq4nIrGl8TG+VWeMjOYaxOlAZzNrb2YphDeTmVChzwRgWPB5MPCOhzPWCcAQM2tkZu2BzsCnhKeWHmdm6cFaxf7A/Cjewx521ULUE1IREZE9hLLS9QZRRKQeiFqC6O7FwHXAZMJJ3PPuPs/MRpvZeUG3MUALM1sI3MDuN4fzgOeBL4HXgWvdvcTdpxHezGYW4RIXCQTTSGtDTrDGQjuZioiI7CkUbOSmWogiIvEtqmsQ3X0iMLFC263lPhcCF+zl3DuBOytpvw24rWYjjUzOrkX4ShBFRETKC2WlsXVHMZu27yQzPSXW4YiIyAGK5hTTeqdpajJNU5M0xVSkntEbj/ijf7O6R6UuROof/bc2PlX3300J4n7KyUrXFFOReiQ1NZX8/HwNgnHE3cnPzyc1VZuF1SUqdSFSv2h8jE81MUbGVZmLuiCUlcZ3+dtiHYaI1JBQKEReXh5r166NdSiyH1JTUwmFQrEOQ8ppozeIIvWKxsf4Vd0xUgnifsrJTOPjhetUC1GknkhOTqZ9+/axDkMk7jVNS6KJaiGK1BsaHxsuTTHdT6GsNLYVlbCxYGesQxEREakzzIycrDRNMRURiXNKEPdTSKUuREREKqVaiCIi8U8J4n7KydQaCxERkcqoFqKISPxTgriftEubiIhI5crXQhQRkfikBHE/ZaYnk56SqCmmIiIiFagWoohI/FOCuJ/MjJzMNJZr8BMREdlDm+aaZSMiEu+UIB6AsjUWIiIislvZG8Rl6zVGiojEKyWIByAnK01TTEVERCpolpZMk9QkvUEUEYljShAPQE5mOpu272RLoRbhi4iIlKdSFyIi8U0J4gFQLUQREZHKaRmGiEh8U4J4AHLKEkQNgCIiInsIJ4gFqoUoIhKnlCAeAL1BFBERqVwoK51tRSVsLNAyDBGReKQE8QC0zGhESlKCptCIiIhUUPYQVWOkiEh8UoJ4ABISVAtRRESkMrsTRO1kKiISj5QgHqCyNRYiIiKyW1ktRL1BFBGJT0oQD1BOpmohioiIVKRaiCIi8U0J4gHKyUxj3dYiCneWxDoUERGROkW1EEVE4ldUE0QzG2BmX5nZQjMbVcnxRmb2XHB8mpm1K3fs5qD9KzM7I2g73Mxml/vZbGa/jOY97E2ouRbhi4iIVEa1EEVE4lfUEkQzSwQeAs4EugBDzaxLhW7DgQ3u3gm4D7grOLcLMAToCgwA/mFmie7+lbsf5e5HAb2BAmB8tO6hKjmZ4TUWmmYqIiL760AfoJpZn3IPST83s0GRXrM2tclKVy1EEZE4Fc03iH2Ahe6+yN2LgHHAwAp9BgJPBp9fAPqbmQXt49x9h7svBhYG1yuvP/Ctu38XtTuoQk5ZLUQ9IRURkf1QnQeowFwgN3hQOgD4p5klRXjNWhPKSlMtRBGROBXNBDEHWFbue17QVmkfdy8GNgEtIjx3CPBsDca7Xw5q0oikBNMifBER2V8H/ADV3QuC8RIgFSh7RRfJNWuNaiGKiMSvuNykxsxSgPOA/1bRZ4SZzTCzGWvXrq3xGJISEzi4WaqmmIqIyP6qzgNUzOxYM5sHfAH8LDgeyTVrTVmpi2V6iCoiEneimSAuB9qU+x4K2irtY2ZJQDMgP4JzzwRmufvqvf1yd3/U3XPdPTc7O/uAb6Iqoaw0TTEVEZFa5e7T3L0rcAxws5ml7s/50X6ACruXYWiWjYhI/Ilmgjgd6Gxm7YM3fkOACRX6TACGBZ8HA+94eEX7BGBIsEi/PdAZ+LTceUOJ4fTSMjmZ2sZbRET2W3UeoO7i7vOBrUC3CK9Zdl7UH6A2S0umaWqSxkgRkTgUtQQxmPJyHTAZmA887+7zzGy0mZ0XdBsDtDCzhcANwKjg3HnA88CXwOvAte5eAmBmGcBpwEvRij1SOVlprN5SSFFxaaxDERGR+HHAD1CDc5IAzOxQ4AhgSYTXrFWqhSgiEp+Sonlxd58ITKzQdmu5z4XABXs5907gzkratxGsw4i1UFYa7rBqUyFtW6THOhwREYkD7l5sZmUPUBOBx8seoAIz3H0C4QeoY4MHqOsJJ3wAJwGjzGwnUAr83N3XAVR2zVq9sQpCWWksyd8WyxBEROQARDVBrO9CmbvXWChBFBGRSB3oA1R3HwuMjfSasRTKSufDhetwd8IVrEREJB7E5S6mdcWuRfjayVRERGQPoaw0CopK2KBaiCIicUUJYjUc0iwNM7STqYiISAUh7WQqIhKXlCBWQ0pSAgc1SdUifBERkQrKaiFqjBQRiS9KEKspJyuN5Rv1dFRERKS8UHO9QRQRiUdKEKsplJWmp6MiIiIVNE1NpllassZIEZE4owSxmnIy01i1qZCSUo91KCIiInWKHqKKiMQfJYjVFMpKp7jUWb25MNahiIiI1CnhBFFTTEVE4okSxGraVepCT0hFRET2EMpKJ2/Ddtw1y0ZEJF4oQaymnMxwgqiNakRERPZUVgtx/baiWIciIiIRUoJYTWV1nlQLUUREZE8qdSEiEn+UIFZTanIiLRunaPATERGpIKRlGCIicUcJYg3IyUxj+UYNfiIiIuXtXqevZRgiIvFCCWINCGWla4qpiIhIBaqFKCISf5Qg1oCcrDTyNm6nVLUQRURE9qBSFyIi8UUJYg0IZaVRVFzKum07Yh2KiIhInRJOEPUGUUQkXihBrAG7Sl1oABQREdmDaiGKiMQXJYg1IEe7tImIiFQqlJXG9p2qhSgiEi+UINaAXW8QtZOpiIjIHtqoFqKISFxRglgDmuzapU2L8EVERMoLNdcsGxGReKIEsYbkZKZpDaKIiEgFZbNs9BBVRCQ+KEGsIaGsNE0xFRERqaBJajKZ6aqFKCISL6KaIJrZADP7yswWmtmoSo43MrPnguPTzKxduWM3B+1fmdkZ5dozzewFM1tgZvPN7Pho3kOkcoJtvLVLm4iIyJ5UC1FEJH5ELUE0s0TgIeBMoAsw1My6VOg2HNjg7p2A+4C7gnO7AEOArsAA4B/B9QD+Brzu7kcAPYH50bqH/ZGTmUZBUQkbC3bGOhQREZE6JZSZzjK9QRQRiQvRfIPYB1jo7ovcvQgYBwys0Gcg8GTw+QWgv5lZ0D7O3Xe4+2JgIdDHzJoBJwNjANy9yN03RvEeIhYKdmnTNFMREZE9lb1B1CwbEZG6L5oJYg6wrNz3vKCt0j7uXgxsAlpUcW57YC3whJl9ZmaPmVlGdMLfP6EsLcIXERGpTCgrjcKdpeSrFqKISJ0Xb5vUJAG9gIfd/WhgG/C9tY0AZjbCzGaY2Yy1a9dGPbDdCaLeIIqIiJQXUi1EEZG4Ec0EcTnQptz3UNBWaR8zSwKaAflVnJsH5Ln7tKD9BcIJ4/e4+6PunuvuudnZ2dW8lX1rlpZMRkqippiKiIhUsLsWombZiIjUddFMEKcDnc2svZmlEN50ZkKFPhOAYcHnwcA7Hl6gMAEYEuxy2h7oDHzq7quAZWZ2eHBOf+DLKN5DxMxs106mIiIistvuWogaI0VE6rqkaF3Y3YvN7DpgMpAIPO7u88xsNDDD3ScQ3mxmrJktBNYTTiIJ+j1POPkrBq5195Lg0tcDTwdJ5yLgymjdw/4KZaWzXIOfiIjIHnbXQtQbRBGRui5qCSKAu08EJlZou7Xc50Lggr2ceydwZyXts4Hcmo20ZuRkpjHzuw2xDkNERKTOCWmWjYhIXIi3TWrqtJysNDZt38mWQtVCFBERKa9NVroSRBGROKAEsQaV7WSqjWpERET2pFqIIiLxQQliDdq1CH+9EkQREZHyQlnpqoUoIhIHlCDWoLI6T3qDKCIisifVCxYRiQ9KEGtQy8YpNEpKUIIoIiJSQdlDVO1kKiJStylBrEFmRk5mmgY/ERGRCnL0BlFEJC4oQaxhOVlpqoUoIiJSQeNGSWSlJ7NsvR6iiojUZUoQa1goK01TTEVERCoRUqkLEZE6TwliDcvJTGPd1iK2F5XEOhQREZE6pazUhYiI1F1KEGuYdjIVERGpXDhB3K5aiCIidZgSxBpWtghfCaKIiMieQlnp7CguZd1W1UIUEamr9itBNLMEM2sarWDqg5zMsl3aNIVGRKS+M7MLzKxJ8Pl3ZvaSmfWKdVx11e5aiBojRUTqqn0miGb2jJk1NbMMYC7wpZndGP3Q4tNBTVNJSjDtZCoi0jD83t23mNlJwA+BMcDDMY6pztpdC1FjpIhIXRXJG8Qu7r4Z+BEwCWgPXBbVqOJYYoJxSGaqppiKiDQMZTuSnQ086u6vASkxjKdOC6kWoohInRdJgphsZsmEE8QJ7r4T0OryKoQytY23iEgDsdzM/glcBEw0s0ZENjtngJl9ZWYLzWxUJccbmdlzwfFpZtYuaD/NzGaa2RfBn6eWO+fd4Jqzg59WNXaXNSSjURLNM1I0xVREpA6LJEH8J7AEyADeN7NDgc3RDCre5WSlaYqpiEjDcCEwGTjD3TcCzYEql2GYWSLwEHAm0AUYamZdKnQbDmxw907AfcBdQfs64Fx37w4MA8ZWOO8Sdz8q+FlTjfuKmrKdTEVEpG7aZ4Lo7g+4e467n+Vh3wH9aiG2uJWTmcbqLYUUFZfGOhQREYkidy8A1gAnBU3FwDf7OK0PsNDdF7l7ETAOGFihz0DgyeDzC0B/MzN3/8zdVwTt84C04K1l3FAtRBGRui2SaTAHmdkYM5sUfO9C+Kml7EUoKw13WLlJT0hFROozM7sN+A1wc9CUDPxnH6flAMvKfc8L2irt4+7FwCagRYU+PwZmufuOcm1PBNNLf29mFvGN1KJQVrpqIYqI1GGRTDH9N+HpM62D718Dv4xWQPXBrlqImkIjIlLfDQLOA7YBBG/3mkT7l5pZV8LTTn9arvmSYOpp3+Cn0g3lzGyEmc0wsxlr166NdqjfE8pKUy1EEZE6LJIEsaW7Pw+Uwq4nmSVVn9KwhTK1jbeISANR5OFXYQ4QlITal+VAm3LfQ0FbpX3MLAloBuQH30PAeOByd/+27AR3Xx78uQV4hvBU1u9x90fdPdfdc7OzsyMIt2apFqKISN0WSYK4zcxasHvwO47wVBfZi4ObpZJgkKdSFyIi9d3zwS6mmWb2E+At4F/7OGc60NnM2ptZCjAEmFChzwR2L+cYDLzj7m5mmcBrwCh3/6iss5klmVnL4HMycA7h2sV1TlktxGV6iCoiUiclRdDnBsIDVUcz+wjIJjxYyV6kJCVwUNNUTTEVEann3P0eMzuN8O7ehwO3uvub+zin2MyuI7x8IxF43N3nmdloYIa7TwDGAGPNbCGwnnASCXAd0Am41cxuDdpOJzzFdXKQHCYSWaIaEzmZeoMoIlKX7TNBdPdZZvYDwgOfAV8FtRD3ycwGAH8jPFg95u5/rnC8EfAU0Jvw1JmL3H1JcOxmwtt8lwAj3X1y0L4E2BK0F7t7biSx1LacTO3SJiJS3wVTSt9x9zfN7HDgcDNL3tc46e4TgYkV2m4t97kQuKCS8+4A7tjLZXvvb/yxsLsWoh6iiojURZHsYnoBkObu84AfAc+ZWa8IzjvgOk9BvyFAV2AA8I/gemX6BTWe6mRyCOE1Fss1xVREpL57H2hkZjnA64Q3hvl3TCOKA6qFKCJSd0WyBvH37r7FzE4C+hOe9vJwBOcdcJ2noH2cu+9w98XAQvay2L6uyslKY9WmQopLVAtRRKQes6AW4vnAw+5+AeGHm1IF1UIUEam7IkkQy3YsPRv4l7u/BqREcF516jxVda4Db5jZTDMbEUEcMRHKSqe41Fm9Zce+O4uISLwyMzseuITw5jEQXlbRMOzYCl9OgNL929w8lJXOctVCFBGpkyJJEJcHO7RdBEwM1g1Gcl60nOTuvQhPXb3WzE6urFOs6zyVLcLXRjUiIvXaL4GbgfHBRjMdgCkxjqn2zH0Bnr8MHuwNn/4LirZFdFqboBbi2q16iCoiUtdEkuhdSHintTPcfSPQHLgxgvOqU+dpr+eWq/O0hnAdqDpZ5yknqPO0fKOm0IiI1Ffu/p67n+fud5lZArDO3UfGOq5ac/RlcMGTkN4CJv4a7usKb/8Rtqyu8rSyUhdahygiUvdEUubiEOA1d99hZqcAPQjvPLovu+o8EU7uhgAXV+hTVudpKnvWeZoAPGNm9wKtgc7Ap8FucQnBmsgMwlt7j44glur57xWwsxAyWkBGdvgnvSVktAy+twx/T9o983bXNt7rNfiJiNRXZvYM8DPCyzGmA03N7G/ufndsI6slCYnQ9UfQZSAsmwYfPwgf/BU+fgB6XATHXwetjvjeaaGsslIX2+nVNqu2oxYRkSpEkiC+COSaWSfgUeAV4BngrKpOqk6dp6Df88CXQDFwrbuXmIjxkfgAACAASURBVNlBwPjwPjYkAc+4++v7fdf7y0thcx6snA3b1kHpXnYvT20WJI7ZpGa05N60HaR/3Q4OuwhyeoUHUhERqU+6uPtmM7sEmASMAmYCDSNBLGMGbY8L/+R/C1MfgtlPw2djofPpcML10K5vuB+7Z9nMXrqRM7oeRKMkjY8iInWF7WuBuJnNcvdeZnYTsN3dHzSzz9z96NoJsfpyc3N9xowZNXMxdyjcFE4Ut62FguDPbd//c+O6FWT6pvB5qc2gwynQsT90PBUy21T1W0RE5ACY2czaLIFkZvOAowg/OP27u79nZp+7e8/aiqE6anR8rGhbPswYA9P+GR4rD+4BJ4wMv3FMTOaUu6ewJL+ARkkJ9GqbxXEdWnBch+Yc1TZTCaOISBREOkZG8gZxp5kNBS4Hzg3akqsTXFwzg7TM8E/LTlV2/e0zs8hbnscrZ+6Eb9+Bhe/Al6+ED7Y8LJwoduwP7U6ElIxaCF5ERGrYP4ElwOfA+2Z2KLA5phHVFRkt4Ac3hd8eznku/FbxpavhrdvhuJ8xYcQlTFu+k08W5fPJonzuf/tr/C2UMIqIxFgkbxC7EF5fMdXdnw3WFF7o7nfVRoA1IapPSKvwfxPnM+bDxVx4TBs6tMygY3YGhyeu4OC1H5GwaAos+QiKt0NiSnhaTlnCeFA3SIjlRrEiIvGptt8g7iWGpKB0U51Xq+NjaSl880Z4neJ3H0KjppDTO7zBTXoLCpMzWbw9lS83JjFjbQKz8xPJL21CQVJTurdtpYRRRKSaIh0j95kgBhdLAQ4Lvn7l7ntZhFc3xSpB/HzZRka/+iXfrt3KxoLdf2UpSQm0a5HOYc1T6Jv6DUcVzaLN+qmkb1gQ7pDRCjr2g66DoNNpkBjJi14REYnBFNNmwG1AWcml94DR7mXrC+q2WI2PLJ8VLouR/w0U5Id/Cvf+V1ZAGuu8Meu9CZtoSklGK7Y1Oxw/uCsZbXrSJtSGts3TSU1W4igisjc1liAGO5c+SXgKjREuPzHM3d+vfpi1I2YDYDnrtxWxaO1WFq3dxrfrwn8uWruV7/ILKC4N/xtks4Ez0+bzw0bz6LVzFo1LNlGa0YqEoy6Goy+Flp1jeg8iInVdDBLEF4G5hMdJgMuAnu5+fm3FUB11YXzcpWQnbN+wO2Hc42c9RZvXsnn9Koo2ryO9cCWZpRt3nbrKs1jgbclL7sCmZodTmt2VjJwjaN8qk3YtMwhlpZGcqJk5ItKw1WSCOBO42N2/Cr4fBjzr7r1rJNJaUKcGwAp2lpSybH1BOGHclThuY/HqDfTcMYOLEt/l1MTPSKSUrQflkt5nGAndzodGjWMduohInRODBHG2ux+1r7a6qi6Pj/u0dQ3bln3OpsWzKFk5l9T188natpgkwrN7d3gSCz2HBd6Wr70t6xofRnF2Fw4NteXoQ5tzVJtMsjJS9vFLRETqj5pMEOe4e499tdVl8TgAlpY6X67czLtfreGzL7+i06pXuTDhXTomrGSHpbGqzZk0P+kqmnQ+ade24SIiDV0MEsSpwI3u/mHw/UTgHnc/vrZiqI54HB+rVFwE+d/gq+ZSmPc5RSu+IGXdfNJ2rN3VZbOns9RbsdRbsSm1NYktOpKZ05m2HbvQoeMRpDRqFMMbEBGJnppMEB8HSoH/BE2XAInuflW1o6wl9WEA3LCtiPe/XsOS2e/SdulLnFb6EY2tkOWJIZa0HUSLEy7n8E6dMSWLItKAxSBB7Ak8BTQLmjYQXoYxp7ZiqI76MD5GZNs6WD0XVs9j59pv2bp6Iaz/jsbbl5PM7j0Cij2B/KRWFKSHSGjRnsycw2h6SGesebvw7uPacVxE4lhNJoiNgGuBk4KmD4CH3L2o2lHWkvo2AJaUOnMXL2fl1Odos+RFuhbPo9gT+DihF4vbDKL9CefT9/BDlCyKSIMTq11MzawpgLtvNrMfu/uLtR3Dgahv4+N+Ky3BN69gXd7XLF80ny0rv4ENS2iyPY8Qa2hpuyuWFCeksr3DGTQ+5mKsU39IbLgVv0QkPtXoLqaVXPw5d7/ogCKLgfo+AK7/7kvWfDCGgxePJ7MknzxvybjMEZx03nCO69gy1uGJiNSaOlLmYqm7t41lDJGq7+PjgSoqLmXBqs18sWg5yxcvYPOKrzhi2wzOTpxGlm1lW2Im+e3OIvuEy0nrcJyWeohIXIh2ghg3gx80oAGwpJjiryezZeIfyNryFZ+UHsn/Wv+SIeecSfdQs32fLyIS5+pIgrjM3dvEMoZINZjxsQYs37id9+cvJ3/2RDqumkg/ZpBqO1mdeAh5bc6h+XGX0u7wnpq9IyJ1lhLEchrcAFhaws5Pn6DkrdEkF2/mmeL+fN75Wn525jF0atUk1tGJiERNHUkQ42aMbHDjYw0pKi7ls6+/Y830F8hZ9j+O2jmHBHPmWye+PfgsMnIv5JhuR9K4keoYi0jdUe0E0cx67e0c4FV3P6Qa8dWqBjsAFqyn6K07SZr1BFs8lXuLL2B7z8sZedqRhLLSYx2diEiNq60E0cy+ACobQA04zN3jYivMBjs+1rCVeYtZ+eF/aLHoFQ4t+oYSNz727sxpfgYZPQYy6PgjaJamNYsiEls1kSBOqepEd+93gLHVugY/AK7+kp2v3UTy0g9Y4G25o+RyOvU5i2v7dSK7SVz8P4yISERqMUE8tKrj7v5dtGOoCQ1+fIyColXzWf3hUzT5ejyZRSvZ5o14yC6i8cnXMezEjmToraKIxEhUp5jGGw2AgDvMn0DxpFtI2pLHxNJj+atfxoCTjmFE3440S9eTTRGJf3Vhimk80fgYRe6wbBpb3rqLJkvfYVZpJ/6UfB1n9TuFi49tS2pyYqwjFJEGJtIxMqE2gpE6wAy6DCRp5Azo91sGpHzOpKQbSPngLk77yyQemrKQgqLiWEcpIiJSP5hB2+NocuVLcP5j9Ehbx7MlN7Ju0p/44d1v8eynS9lZUhrrKEVEvkcJYkOTnAY/uImE62aQ0uUcfpH0EpMSb+DLN5/kB3+ZwpSv1sQ6QhERkfrDDHpcQNL100nucjY3JT/Pk6U3M3b8//jhve/x8mfLKSmt/7O5RCR+KEFsqDLbwAVPwBUTadHyYB5KeYAxfjs3P/E690z+SoOViEgVzKxpFcfiYgdTqWWNW8GFT8KFT9Gh0RZeTb2VEcXPctNz0znzb+/z+txVNIRlPyJS9+01QTSzS8t9PrHCseuiGZTUonYnwk/fg7PvpXviEl5v8kdef/ddLhszjbVbdsQ6OhGRuurdsg9m9naFYy/XbigSV7oMxK6dRkKPC7hkx3PMzB5Np6IF/Ow/Mxn40Ee89/VaJYoiElNVvUG8odznByscuyoKsUisJCTCMcOxKyeR2ciY2PgOfOknnP3AB0xblB/r6ERE6qLy1dCbV3FM5PvSm8OgR+Di/9LECnmocBSvd3mDrVu2MOzxT7no0U+YvmR9rKMUkQaqqgTR9vK5su9SHxzSA4a/QUqTbJ5O+RM/TJzFxY9N45H3vtXTTBGRPflePlf2XaRyh50OP/8E6zWMIxb9m7fSb+GRvoUsXreNCx6Zyg3Pz2ZHcUmsoxSRBqaqBFGDX0OU1Q6Gv0HCQV25c8ef+UNoJn+etICfPDWTTQU7Yx2diEhd0crMbjCzX5X7XPY9O9bBSRxJbQrn3g+XTyDBSxgw/Sqm9pjEDT84hJdmLWfY459q/BWRWlVVgniEmc0xsy/KfS77fngtxSexkNEShv0P63AKl665hxe7fsR7X6/mnL9/wBd5m2IdnYhIXfAvoAnQuNznsu+PxTAuiVcdfgA/nwrHXkPSzDGMXHA5Y/sXM/O7Dfz4kY/J21AQ6whFpIGwvU0dNLNDqzrR3b/b58XNBgB/AxKBx9z9zxWONwKeAnoD+cBF7r4kOHYzMBwoAUa6++Ry5yUCM4Dl7n7OvuJQIeADVFwEE66DOc+x5sjLOf/bc1mzrYTbzuvCxX3aYqaZxiJSt0RaBDjKMRzj7tNjGUOkND7WUUunwcvXwMalfH38Xfz4oxCpyYk8PuwYuoeaxTo6EYlTkY6Re32D6O7flf8BtgK9gJYRJoeJwEPAmUAXYKiZdanQbTiwwd07AfcBdwXndgGGAF2BAcA/guuV+QUwf18xSDUlpcCPHoETrqfV/Kd4p91T9O3QhN+On8v/e24223YUxzpCEZE6wcy6mNkfzWwh8HCs45E41/ZY+Mk70PY4DvvoBt45dhYpCcZFj05lygLVKxaR6KqqzMWrZtYt+HwIMJfw7qVjzeyXEVy7D7DQ3Re5exEwDhhYoc9A4Mng8wtAfwu/lhoIjHP3He6+GFgYXA8zCwFnoyk8tSMhAU6/A06/g5Sv/8djCf/Hzf1a88rnKxj40EcsXLMl1hGKiMSEmbUzs5vNbA4wFrgG+GGs32BKPZGWCZe+CN0Gkz3tz7x5+Ct0apnK8Cen8/S0fT6nFxE5YFWtQWzv7nODz1cCb7r7ucCxRFbmIgdYVu57XtBWaR93LwY2AS32ce79wE1AaQQxSE054Xo4/1/Ysk/46aLreW5oezYWFHHe3z/ildnLYx2diEitMrOpwGtAEvBjd+8NbClbJiFSI5Iawfn/ghN/SfqcJ3mp+cOc1qkxvx0/l7teX0BpqfYMFJGaV1WCWH7LrP7ARAB330KMkjMzOwdY4+4zI+g7wsxmmNmMtWvX1kJ0DUCPC+Hi52H9Ivq8cxGvX9aarq2b8otxs7l78oJYRyciUptWE96U5iB271qq/1uXmpeQAKf9Ac66h6Rv3+CRktv5Sa8mPPzut/ziOZXBEJGaV1WCuMzMrjezQYTXHr4OYGZpQHIE114OtCn3PRS0VdrHzJKAZoQ3q9nbuScC55nZEsJTVk81s/9U9svd/VF3z3X33Oxs7TheYzr1hyv+B0UFtHzuXJ49O4Uhx7ThoSnf8tz0pbGOTkSkVrj7j4DuwEzgdjNbDGSZWZ/YRib1Vp+fwEX/wdbM55aV1/Onk9P53+cruGzMp2wsKIp1dCJSj1SVIA4nvEnMFYR3F90YtB8HPBHBtacDnc2svZmlEN50ZkKFPhOAYcHnwcA7Ht5WdQIwxMwamVl7oDPwqbvf7O4hd28XXO8dd780glikJuX0huFvQEpjkp46lzu7raJv55b87uW5fLIoP9bRiYjUCnff5O5PuPvphMfGW4H7zGzZPk4VOTBHnA1XvIrt2MLFX1zFU6fD7KUb+fHDH7NsvcpgiEjNqGoX0zXu/jN3H+jub5Rrn+Lu9+zrwsGawuuAyYR3HH3e3eeZ2WgzOy/oNgZoEez6dgMwKjh3HvA88CXhN5fXurvmUNQlLTrC8DehRUcSnxvKI72W0aZ5Otf8ZyZL8zVIiUjD4u6r3f1Bdz8ROCnW8Ug9FsoNj79pmZz88VW8etoG1m7ZwaB/fMycvI37Pl9EZB+qqoNY8W3fHtz9vKqO1yWq8xRFhZvh6cGwai55Q97k7P8sp1WTRrz48xNomhrJTGQRkZpTW3UQ68sYqfExjm1bB88OgbwZrO07mkEzupG/tYi/X3w0/Y88KNbRiUgdVO06iMDxhNf+fQDcA/y1wo8IpDaFwY9DQhKhd/8fD1/ck8XrtnH9M59RXKKNZkWk3tIYKbGV0RIunwCHn0X2B79nctc36Zydzk+emsHYT1QGQ0QOXFUJ4sHALUA34G/AacA6d3/P3d+rjeAkTjQLwdn3wLJpnLDqP/xhYFfe+3otf5qonU1FpN7SGCmxl5IOF42FY35Cxox/8NJBj3Na50x+//Jc/vrGV+xtlpiISFWqWoNY4u6vu/swwovvFwLvmtl1tRadxI/uF0DXQTDlT1zSdiNXnNCOxz9azLOfamdTEal/NEZKnZGQCGfdDaeNJmn+eB7hDq44OpMH31nILeO/0GweEdlvSVUdNLNGwNnAUKAd8AAwPvphSdwxg7Pvhe+mwksj+N3VU1i0bhu/f3ku7VpkcHzHFrGOUESkRmmMlDrDDE78BTTNwV6+htuy/h+tj7+LP01dRv7WIh4YejSpyYmxjlJE4sRe3yCa2VPAVMI1EP/g7se4+x/dvWItQ5Gw9Obwo4dg7QKSptzB3y8+mnYtM7jm6ZksWbct1tGJiNQYjZFSJ3UfDJeNx7auYsTXI3jwB/Dm/NVc/vinbNq+M9bRiUicqGoN4qWE6w/+AvjYzDYHP1vMbHPthCdxp9MP4Zir4ZOHaLpyKmOGhTdKGv7kdDYXanASkXqjWmOkmQ0ws6/MbKGZjarkeCMzey44Ps3M2gXtp5nZTDP7Ivjz1HLn9A7aF5rZA2ZmNXa3Ej/anRQug5GUyrmzrubZUzbx2dINXPTPqazZXBjr6EQkDlS1BjHB3ZsEP03L/TRx96a1GaTEmdNGQ4tOMP4aDs0o5uFLevNdfgHXaWdTEaknqjNGmlki8BBwJtAFGGpmXSp0Gw5scPdOwH3AXUH7OuBcd+8ODAPGljvnYeAnhBPXzsCAat6mxKvsw+Hqt6DlYRz3ybVMOmEhy9YXcP7DH7NYM3pEZB+qeoMocmBSMmDQo7BlJUy8ieM7tuCPP+rG+1+v5c6J82MdnYhIrPUBFrr7IncvAsYBAyv0GQg8GXx+AehvZubun7n7iqB9HpAWvG08BGjq7p94eOvKp4AfRf9WpM5qchBc8Rp0Pp1O03/PlKPepXDHTgY//DFz8jbGOjoRqcOUIEp0hHrDyb+GOeNg3ssM7dOWq05szxMfLeHpaarPJCINWg6wrNz3vKCt0j7uXgxsAiru9vVjYJa77wj65+3jmgCY2Qgzm2FmM9auXXvANyFxoFFjuOhpyL2KVnMe5t2OT9M0uZShj37CB9/o315EKqcEUaLn5Buh9dHw6i9hyypuOesITjk8m9temcfH366LdXQiInHLzLoSnnb60/09190fdfdcd8/Nzs6u+eCkbklMCu8y/sM/0PibV5jc4l6OzCzhqn9PZ8LnK/Z9vog0OEoQJXoSk8NTTXduh1euIynBeGBosLPpf2ZpHYSINFTLgTblvoeCtkr7mFkS0AzID76HCJfTuNzdvy3XP7SPa0pDZQYn/RJ+PIaUVbN4LulWTm+9g5HPfsYTHy2OdXQiUscoQZToyj4MTvsjLHwTZj5B09RkxgzLJcHCO5tq220RaYCmA53NrL2ZpQBDgAkV+kwgvAkNwGDgHXd3M8sEXgNGuftHZZ3dfSWw2cyOC3YvvRx4Jdo3InGm+2C47GUSC9bx94IbGdFxA3/435fcM/krwktXRUSUIEptOOZq6NAPJv8W8r/l0BYZPHxpb5bmF3DdM7MoKdWgJCINR7Cm8DpgMjAfeN7d55nZaDM7L+g2BmhhZguBG4CyUhjXAZ2AW81sdvDTKjj2c+AxYCHwLTCpdu5I4kq7E2H4m1hyGjevuZHRhy/l71MWMurFL7TTuIgAYA3hiVFubq7PmDEj1mE0bJtXwD+Oh5ad4crXITGJZ6Yt5ZbxX3DrOV246qT2sY5QROoBM5vp7rmxjiNeaHxswLaugWcuxFd+zjvtfsXw+UdzWpeDeHDo0aQmJ8Y6OhGJgkjHSL1BlNrRtDWc/VfImw4f3gfA0D5tOOXwbO6e/BVL8wtiHKCIiEgD0rgVXPEa1vkM+i++m9eOmMzb81cy/MnpbC8qiXV0IhJDShCl9nQfDN1+DO/9GVZ8hpnxp0HdSUwwbh4/R+sfREREalNKBgx5Go65mq5LnuT99mOZ8a2SRJGGTgmi1K6z7oGMVvDST2HndlpnpnHzWUfw0cJ8npu+bN/ni4iISM1JSAyPzaf9kdCKyXx88P0sWLRYSaJIA6YEUWpXenP40UOw7it46w8ADD2mLcd1aM6dr81n5abtMQ5QRESkgTGDE0fCBf+mxeb5vN/8TlYvnstV/55OQVFxrKMTkVqmBFFqX8dToc9PYdrDsOhdEhKMu37cg52lpfxu/FxNNRUREYmFroPgildp7AVMyhhN6ZKPlCSKNEBKECU2fng7tOgML/8cCjdxaIsMfn364by9YA0TPl8R6+hEREQapjZ94Oq3SGnaimdS/4+Dv/ufkkSRBkYJosRGSjqc/0/YshLe+D0AV57YnqPaZHL7hHms27ojxgGKiIg0UM07wPA3SGxzLPcnP8QxS8dw5eOfKkkUaSCUIErs5PSG46+DWU/CondJTDD+MrgHW3cUc/uEebGOTkREpOFKbw6XvQQ9hvCrpP9ywfL/Y/jjH7Nth5JEkfouqgmimQ0ws6/MbKGZjarkeCMzey44Ps3M2pU7dnPQ/pWZnRG0pZrZp2b2uZnNM7M/RDN+qQX9boHmHWHCSCjaxmEHNeH6Uzvz6pyVTJ63KtbRiYiINFxJjWDQI3DKzQxOfJ/rV4zi2jFTlCSK1HNRSxDNLBF4CDgT6AIMNbMuFboNBza4eyfgPuCu4NwuwBCgKzAA+EdwvR3Aqe7eEzgKGGBmx0XrHqQWJKfBeQ/Cxu/g7T8CcM0pHTni4Cb87uW5bCrYGeMARUREGjAzOGUUDPonxyV+zW9X/4Kb/vU/JYki9Vg03yD2ARa6+yJ3LwLGAQMr9BkIPBl8fgHob2YWtI9z9x3uvhhYCPTxsK1B/+TgR1texrt2J8IxP4Fpj8DSaSQnJnD34J6s31bEHa99GevoREREpOcQEi4fz6EpW/jD2pHc8c+n2KokUaReimaCmAOUr3yeF7RV2sfdi4FNQIuqzjWzRDObDawB3nT3aVGJXmrXD2+DZiGYcB3sLKR7qBkjTu7Af2fm8f7Xa2MdnYiIiLTvS8qIt0nPaMpt+Tfx8D/uU5IoUg/F3SY17l7i7kcBIaCPmXWrrJ+ZjTCzGWY2Y+1aJRh1XqMmcO7fYN3X8P5fAPhF/850yM7g5pe+0FQWERGRuiD7MNJ//i7bW3ThVxvv5MUHf8PWQi0HEalPopkgLgfalPseCtoq7WNmSUAzID+Sc919IzCF8BrF73H3R909191zs7Ozq3EbUms69YejLoEP74cVs0lNTuQvP+7Bik3b+cvrC2IdnYiIiAA0zibrmsmsDp3OsK2P8eHfhrGlYHusoxKRGhLNBHE60NnM2ptZCuFNZyZU6DMBGBZ8Hgy84+4etA8JdjltD3QGPjWzbDPLBDCzNOA0QJlDfXLGnZDRMjzVtGQnue2aM+z4djw59TumL1kf6+hEREQEIDmNQ4aPY9FhVzNg+2ssuu8M1i7RvgEi9UHUEsRgTeF1wGRgPvC8u88zs9Fmdl7QbQzQwswWAjcAo4Jz5wHPA18CrwPXunsJcAgwxczmEE5A33T3V6N1DxIDaVlw9l9h1Rfw0f0A3HjG4YSy0vjNC3Mo3FkS4wBFREQEgIQEOlz8V+bl/omORV/T7N99WfHCb2DH1n2fKyJ1loVf2NVvubm5PmPGjFiHIfvjv1fAgtfgpx9AqyP44Ju1XDbmU376gw7cfOaRsY5OROooM5vp7rmxjiNeaHyUmrJo8bd8/fSvGVD8DtsatSL97D9h3QeHy2SISJ0Q6RgZd5vUSANx5t2Q0jg81bS0hL6ds7kwN8S/3l/EnLyNsY5OREREyunQviMn/vp5/tz6ARZtT8deupqSx8+CVXNjHZqI7CcliFI3Nc6GM++CvOnh+ojAb8/uQsvGjbjphTkUFZfGOEAREREpr0lqMjddfTkfnPI8t+wczta8L/B/9oWJN8L2DbEOT0QipARR6q7uF0DnM+DtP8L6Rfz/9u47PK7qQP/492g06tKoy7KK5SLjhjvdpsaEDQFCIIAXspCyJIFAkt3kl7DlCZvy7KZCgDQ2yQKJKQECoYUWOhjjgquMmyzb6r2MujTn98e9ksa2PLZsSSNZ7+d57nPv3Lkan7nPeM6899xzji/Wyw+vPJWPKlv49Rt7wl06EREROUREhOGWC0/h4zfewWX8gsfsx7Brfwf3LoH1D0JAF3hFxjoFRBm7jIFP3gUeLzxzO1jLijlZXLZgMve9vosdlS3hLqGIiIgM4ryZGay67R94KOV2Lu38IWWRefDs7fC7C6FU/V5FxjIFRBnbfDlw8feh5G1Y/wAAd142h6QYL199eAOtnT3hLZ+IiIgMKi81jr/ccjazFp7NOdXf4v70Owg0l8PvLoKnbwV/dbiLKCKDUECUsW/xjTD1XHj5P6GplLSEaO5ZuYg9NX7u+MsWJsJIvCIiIuNRjNfDzz6zgO9fMY8fl8/n0sDd1C38Cmx+zLntdPWvoLc73MUUkSAKiDL2GQOX3QO2F577BljLOTPS+ZcVM3lmUzkPrd4X7hKKiIjIERhj+OxZBTx685nUdUexbP35/P3Cv0Le6fDSHXDfabDlCfVPFBkjFBBlfEidChf+J+x6GTb/GYBbzp/BhbMy+cHzRWzYr9HRRERExrKlBak8d9sy5uUk8YXnGvmB73v0rvyzM63Vk1+A354Lu14B3RkkElYKiDJ+nPElyD0dXvw2+KuJiDDcdc1CspJiuHXVBur8neEuoYiIiISQmRTDqi+eyU1nF/C7d0u4/s0kyq97Ca76PXS1wKqr4YFLYf+acBdVZMJSQJTxI8IDV9wHXa3OnEqAL87Lb25YQl1rF197dCO9AV11FBERGcuiIiO48/K53HXtAjYdaGLFXW/zYMtSem/5AC79GdTthj9cDI+shKqicBdXZMJRQJTxJeMUOO/bUPS0018BmJfj43uXz+Wd3bX84tWdYS6giIiIHIsrF+Xy8jfOZfGUFL77zDau/t917Mi7Fm7/0OlWUvIO/PpseOrL0KDxBkRGiwKijD/nfA3yzoCnb4F97wFw7Wl5fGZJLve8tpvXPqoKcwFFRETkWOSlxvHQ50/nrmsXsK+ujUvveZufvl5Kx1nfgK9tgrNvg21POSOevvD/NDWGyChQQJTxx+OFlY9Ccj48ch1UFWGM4fufmsfs7CS+8dgmDtS3hbuUIiIicgyMMVy5KJdXhyU6HwAAIABJREFU/+U8Ll84mfte380nfvE271daZy7k2zbAwn+Etb+DXyyE134IHc3hLrbISUsBUcanuFT47F8gMhb+dBU0lRLj9fDr6xcTsJZbVm2go7s33KUUERGRY5QaH8XPr1nIH79wOt2BANfd/z7feXIzTd5MuPweuHUNFK6At34Mv1gA79wNnS3hLrbISUcBUcav5Hy44Qno8jshsb2BgvR4fvaZBWwpa+K/nlXHdhERkfFmeWEGL3/9PL507jQeX1/KRT9/k+c3V2DTZsA1D8I/vw6TF8Kr34W75sJrP4DWunAXW+SkoYAo49ukU+G6VVBf7Ix21t3OxXMn8eXzpvPIB/t5Yn1puEsoIiIiQxQb5eGOT8zmr7eewyRfNLc+vIEvPriO8sZ2yFkMn30K/vk1KFgOb/0E7p4Hf/sONKneFzlRCogy/k09F678Lex/H578IgR6+ebFMzlzWir//tQWisrVT0FERGQ8mpfj4+lbzuE/Lp3Ne3vqWPHzN3ng3b3OtFY5S5yLxLesgTlXwAf3O30Un74VaneFu+gi45YCopwc5n0aLvlv+Og5eOFbREYY7l25GF+sl6+sWk9Te3e4SygiIiLHIdITwReXT+Plb5zLkoJU7ny2iKt+/R5by5qcAzJnwZW/ga9thKWfg61PwH2nwZ//Cco3hrfwIuOQAqKcPM78ijMFxrrfw9s/JSMxml9ev5jShna+9fgmrLXhLqGIiIgcp7zUOB783Gncfe1C9te38cl73+HWVRvYVeUOVJOcD5/4CXx9Cyz7Bux5He4/D/74aWdORf0OEDkmCohycrnoTph/ndNhfcMfOa0glTv+YRYvF1Vx/1vF4S6diIiInABjDJ9alMPr/3o+t104gzd2VHPx3W/xjcc2UlLb6hyUkAkf+y58Yytc9F2o3AwPXAq/vxh2/E1BUeQozERoVVm6dKldt25duIsho6W3Gx6+BorfhJWPYAsv5taHN/Di1kpWffFMzpqeFu4SisgIMcast9YuDXc5xgvVjzLe1bd28ds39/Dg6hK6ey1XLc7htgsLyUuNGzioux0+/BO8ew807YeM2bDoejj1M5A4KWxlFxltx1pHKiDKyamzBR74JNTsgJueoyV9AVfc9y7NHT28cPsyMpNiwl1CERkBCohDo/pRThbVLR38+o09rFqzH2st1yzN46sXziDbFztwUG83bH0S1vwWyjeAiYBp5zt3Hs26FKITwlV8kVFxrHXkiN5iaoy5xBizwxiz2xjznUGejzbGPOY+v8YYUxD03B3u/h3GmI+7+/KMMa8bY4qMMduMMV8byfLLOBadCNc/7lwZXPUZEv0l/PqGJbR29vClP2nQGhERkZNJZmIM371sLm9+63yuWZrHn9cd4LyfvMF/PbuN6pYO5yCPFxZcBze/DreuheX/CnW74amb4acz4S83w+6/Q6A3vG9GJMxGrAXRGOMBdgIrgFJgLbDSWlsUdMwtwHxr7ZeNMdcBV1prrzXGzAEeAU4HJgOvAjOBTCDbWrvBGJMIrAc+Ffyag9EV0gmsbo/T5yAqDr7wCi/us9z2yIcUpMXzwOdPJyc59uivISLjhloQh0b1o5ysDtS3ce9ru3hyQxlej+HGswv40rnTSY2POvjAQAAOvA+bHoVtT0NnEyRMglOvhvnXOvMtGxOeNyEyzMZCC+LpwG5rbbG1tgt4FLjikGOuAB50t58ALjLGGHf/o9baTmvtXmA3cLq1tsJauwHAWtsCbAdyRvA9yHiXNt1pSWytgz9dzSUz4njwc6dT2dTBlb98l23lTeEuoYiIiAyzvNQ4fnz1Al79l/O4ZO4k7n+rmOU/eo2fvbzj4LuIIiJgytlw+T3wzZ1wzUPO/Iprfgu/XQ6/PhveuQuaysL3ZkRG2UgGxBzgQNDjUg4Pc/3HWGt7gCYg7Vj+1r0ddRGwZrB/3BhzszFmnTFmXU1NzXG/CTkJ5Cx2vvBrtsNjN3B2QSJPfOVsPBGGa3/7Pm/v0udDRETkZDQ1PZ67r1vES18/l/NOyeDe13az7H9e43vPFg2MetrHGwNzroCVDzth8dKfQVQCvHon3DUXHrwMPvhfaCgJx1sRGTXjcpoLY0wC8CTwdWtt82DHWGvvt9YutdYuzcjIGN0CythT+DG4/D7Y+xY8/jlOSeziqVvOITclls/931oeX3fg6K8hIjJMjrePvjEmze2L7zfG3HfI37zhvuZGd8kcnXcjMvbNzErkV9cv4fnbl3H+rEweWl3CBT97g88/sJY3d9YQCBzS5SouFU77InzxFbhtA5z3bWgqhRe+Cb9YAPcuhRfvcPosdneE5T2JjJTIEXztMiAv6HGuu2+wY0qNMZGAD6gL9bfGGC9OOFxlrf3LyBRdTkoLV0JHI7z8H3DvYiZd9J88fvMNfOXhTXzric2UN3Zw+0UzMOprICIjyO2j/0uC+ugbY545pD/9F4AGa+0Mt4/+j4BrgQ7gP4F57nKo66216lQocgRzJ/u4d+Uiqi6dzao1+3l4zT5u/MMHTMuI58azCrhqSS4J0Yf8PE6bDhfcAed/xxnbYPcrsOsVWPt7eP9X4I2DguVQuAJmfAxSp4bnzYkMk5EcpCYSZ5Cai3DC3VrgH62124KOuRU4NWiQmk9ba68xxswFHmZgkJq/A4VAAKfPYr219uvHWhZ1wpeDVG+HF74FJW9D9gK6P/4Tvv1BNH/ZUMa1S/P4wZXz8HrGZeO6yIQ3HgapMcacBdxpre0bofsOAGvtfwcd85J7zGq3Pq0EMqxbaRtjbgKWWmu/GvQ3bwDfHEpAVP0oE11nTy8vbKnggff2selAIwnRkVy9JJd/OmsK0zKOMu1FVxuUvAO7XnZCY9+tp2mFA2FxyjnOrasiY8Cx1pEj1oJore0xxnwVeAnwAH+w1m4zxnwPWGetfQb4PfBHY8xuoB64zv3bbcaYPwNFQA9wq7W21xizDPgssMUYs9H9p/7NWvvCSL0POQllzoYbn3XmQnr5P/A+cDE/W3gDhcs/y4/ePkBlcwe/un4x8YdeQRQRGR6D9bM/40jHuPVpXx/92qO89v8ZY3px7rT5gR2pq8AiJ4noSA9XLsrlykW5fLi/gQffK2HVmn088F4J583M4KZzCjivMIOIiEHuLoqKg5kXO4u1oVsXZ14Msy6DxKzRf5MiQzRiLYhjia6QyhF1tsCbP3a+xKPiWTftVlZunMsp2T7+cNNpZCbqqp/IeDJOWhCvBi6x1n7RffxZ4IxDWgO3useUuo/3uMfUuo9v4vAWxBxrbZk7DdSTwJ+stQ8N8u/fDNwMkJ+fv2Tfvn0j9E5Fxqfqlg4eXrOfVWv2U9PSydT0eD575hSuXppLUoz32F6kr3WxLzA27AWMM2LqnCtg9uWQlD2i70PkUMdaRyogigDU7HBuO937Ji3Js/lS/Ur2xZ3Kg58/jRmZieEunYgco3ESEEfkFtND/o2Qz/dR/ShyZF09Af62tYIH3ivhw/2NxEd5uHzhZK5eksvi/JRjH7PAWqj5CIr+6sy1WLPd2Z93phMW51wOvtyReyMiLgXEIKoA5ZhYC0VPw4v/Bi3lPGMu4C57PT+68SJOn5oa7tKJyDEYJwHxuPvoBz1/E0EB0H3NZGttrTuY2yPAq9ba34Qqi+pHkWOzubSRB9/bxwtbKmjv7mVaRjxXL8nl04tymeQb4t1GNTug6BnnN0fVVmdf7mkDLYspU4b/DYiggHgQVYAyJJ1+eOsn2NW/pDUQxV29n2HJVd/kEwvzjv63IhJW4yEgAhhjPgHczUAf/R8G99E3xsQAf8SZ77ceuM5aW+z+bQmQBEQBjcDFwD7gLcDrvuarwL9Ya3tDlUP1o8jQ+Dt7eGFzBY+vP8DakgYiDCwvzOAzS3P52OwsYryeob1g7W7Y/lendbFik7Nv8mK3ZfEKjYgqw0oBMYgqQDkutbvofu6beEveYHsgn11L7+Syyz6taTBExrDxEhDHCtWPIsevpLaVJ9aX8uSGUiqaOvDFerl8wWQ+szSXU3N8Q/+9UF/stiz+Fco3OPsmneqMhDp5MeQshtTpEKGR1uX4KCAGUQUox81aurY+jf+v/4/Unmo+ippLzOk3UbD8HyH6KMNfi8ioU0AcGtWPIieuN2B5b08tj68r5aVtlXT2BDglK5Grl+TyqUU5ZCRGD/1FG/bB9mdhxwtQ/iF0tzn7o30wecFAYJy82Om/qIvXcgwUEIOoApQTFejws+HJn5Kx6zGmUE67iaV95hWkLvuC029AX8wiY4IC4tCofhQZXk3t3Ty3uZzH15Wy8UAjngjDBadkcNXiXC6YlTn0W1ABenugdgeUbXBaFss/hMqtEOh2no/PCAqMi5zthIzhfWNyUlBADKIKUIZLW2c3L774V8yHf+Ji+x7xppOu5BlEnfZPsGAlJGSGu4giE5oC4tCofhQZOburW3h8fSl/2VBGTUsn8VEeLpqdxaXzszlvZsbxhcU+PZ3OADdlbmAs2+CMlIr7u96XB3mnw9RznSVlqi5miwJiMFWAMtya2rt56I0tVK1+jE/xGksjdmKNBzPzElh0AxReDJ7IcBdTZMJRQBwa1Y8iI6+nN8D7xfU8v6WCF7dW0NDWTUJ0JB+bncml8ydz7sx0oiNPICz26fQ7A92Ub3AC4773wF/pPOfLGwiLBcvBl3Pi/56MOwqIQVQBykip83fymzf38M7q97jSvMHK6HdJ7KmHhCxYcB0svAEyZoa7mCIThgLi0Kh+FBld3b0B3i+u4/nNFby4rZLGtm4SoyNZMcdpWVxWOExhEZzpu+p2w943Ye9bsPdtaK93nkubcXBgjE8fnn9TxjQFxCCqAGWkVTZ1cN/ru3jig71c4NnE19M+YGbTuxjb60yEO+8qmH6B84WsWzxERowC4tCofhQJn+7eAO/tqeP5zeW8tK2KpvZuEmMiuXjOJD45P5tzZqQTFTmMI5YGAlC9zQ2Lb0HJu9DV4jyXNc8JilPPhSlnQ2zy8P27MmYoIAZRBSijZX9dG3f/fSdPf1hGnreF70/dxjn+v+Gp2+UckJQD0853lqnnQWJW+AorchJSQBwa1Y8iY0NXT4B399Ty/OYKXtpWSUtHD0kxkXx87iQ+PncS58xIJzZqmFoW+/T2QMXGgRbG/Wugpx0wkDkbcpc6A/Hlng7pMzW9xklAATGIKkAZbburW/j5Kzt5YUslvlgvty2K5PLEnWTWrHa+iNsbnAMz5wwExilnQ3Ri+AotchJQQBwa1Y8iY09XT4B3dtfw3OYKXimqoqWjhxhvBMsLM1gxO4sLZ2eSnnAcU2ccTU8nlK6DknegdK2zdDQ6z0UnQc4SNzCe5oTHuNThL4OMKAXEIKoAJVy2ljVx1ys7eW1HNdbC7OwkLp8/iU9PriOr5n0ofgP2r4aeDoiIhJylA4Exdyl4vGEtv8h4o4A4NKofRca2rp4AH+yt55WiSl7dXk1ZYzvGwOL8FD42O4sVc7KYnhGPGYnuK9ZC3Z6BsFi6Fqq2ge11nk+dPhAWc0+DrLn63TLGKSAGUQUo4VbV3MHzmyt4ZlM5Gw84V+MW5ydz+YLJfGJOCpkNm5ywWPyGM1w1FqISIP8sZ5jqvslwdbVOJCQFxKFR/SgyflhrKapo5tWial7ZXsnWsmYApqbHs2JOFh+bncWSKSl4IkZwrIOuVud3Sulap7XxwAfQWu08FxkL2fMhewFMctcZsyAyauTKI0OigBhEFaCMJQfq23hmUznPbirno8oWIgycNT2NyxdM5pK52fhocW7vKH7DWdfupH9eo9Rpzi0efcukU8EbG863IzKmKCAOjepHkfGroqmdV4uqeGV7Nav31NLda0mJ83LhrCxWzMlkeWEG8dEjPOWWtdB0wAmMB9Y602xUboYuv/O8J8rpz9gfGhc6LY1RcSNbLhmUAmIQVYAyVu2qaukPiyV1bXg9hvNmZnDZgsmsmJNFXFQkdDRB+UYoW+8uG6Cl3HmBiEjnizY4NKbPhIhh7sguMk4oIA6N6keRk0NLRzdv7azl1e1VvPZRNU3t3Xg9hkX5KSyfkc6ywnTm5yaPbOtin0AAGvY6A+BUbIKKzc66b4oNE+H8VsleEBQc50OMb+TLNsEpIAZRBShjnbWWLWVNPLupnGc3VVDZ3EGs18OFszJZVpjOmdPSKEiLG+hj0FzuBMW+0Fj+IXQ6t5oQlQCTFzlfullznSX9FPDGhO8NiowSBcShUf0ocvLp7g2wtqSet3bW8vauGraVO78PfLFezp6exrLCdJbPyCA/bRRb8ayFplKndbFi00Bw7LvgDZA8xbkzatJ8d30q+HI1PdgwUkAMogpQxpNAwLK2pJ5n3XmRalo6AchKiubMaWmcOS2Ns6alMSU4MAYCzmS4/a2M66G6yBn8BsB4IL3QDYzz3GUuJE3WF6+cVBQQh0b1o8jJr87fybt76nhnVw3v7KqlvMn5bZCfGseywnTOLUznrOnp+GLDMMCMv9oJipWboHKLs9Ttob9rTWzK4aExfaYGwzlOCohBVAHKeGWtpbi2lfeL63i/uJ73i+v6A+OkpBjOnJbaHxoPCowAgV6oL3a+bKu2DSxN+weOiUkeCIt94TFzFkTFj/I7FRkeCohDo/pRZGLp+13x9s4a3tldy+o9dbR29RJhYH5uMssL01k2I52F+clER4apu0qn37nIXbl5IDRWbRu46N3Xr7EvNGbOhozZEJ+ui95HoYAYRBWgnCz6vthX76nrD421/mMMjH06mqCqCKq2DoTG6qKBDuUASbmQPgPSCp0rdX3bSTmaKFfGNAXEoVH9KDKxdfcG2Higkbd31fLOrho2HmgkYCE6MoLF+Smc4f6uWJiXTIw3jOMb9PY4d0pVbgkKjpuhrW7gmNhUNyye4gTGjFOcUVQTMhUcXQqIQVQBysnKWsuemr4WxoMDY1p8FKfm+jg1x1nm5yaTlRQ9eGgMBKBxnxsWt0PdLqh1l66WgeO8cZA23Q2OhUHrGRCdMErvWuTIFBCHRvWjiARrau9mTXEda/Y6dy0VVTRjLURFRrAoL5kzp6VxxrRUFuenhDcwgtOvsaUSarZDzQ7n90vNDudxR9PAcbEpTlDsWzLddULWhAuOCohBVAHKROEERj+ri+vZfKCRLWVN7KxqIeD+N89IjA4KjM46MynE4DXWgr/KCYrBobFuFzTuBxsYODZxMqROdTqZpxRAypSB7YQstTzKqFBAHBrVjyISSlNbN2tLnLC4Zm8928qbCFiI8kSwMC+ZM6elcsa0NBbnpxAbNUZGUO/77dIfGD9ylurt0NE4cJw3HpKyITHbGZNhsHVCFnhGeKqQUTQmAqIx5hLgF4AH+J219n8OeT4aeAhYAtQB11prS9zn7gC+APQCt1trX3L3/wH4JFBtrZ13LOVQBSgTWXtXL0UVzWwpbWRzWRNby5rYXe3vD41ZSdGcmpPcHxjn5fjISIw++gt3dzh9HPuCY91uaCiBhn3QUkF/B3MATzQk5x8eHPu2Y5OH/43LhKSAODSqH0VkKJo7ullXUs/7xfWsKa5jS5kTGL0ew4LcZJYWpLIoP5lF+clkJo6x0dOtdQbF6QuMDSXOqPAtFdBc4awD3Qf/jYmA+EwnMAaHx5S+3zFTnRbKcdISGfaAaIzxADuBFUApsBZYaa0tCjrmFmC+tfbLxpjrgCuttdcaY+YAjwCnA5OBV4GZ1tpeY8y5gB94SAFR5Pi0dva4obGJLWVNbC5tpLi2lb6vg7T4KE6ZlMgpkxKZPSmJUyYlMjMr8divDnZ3OBPnNuyDxpKB4Ni4z9kOvvUDINrnXMXr/wJ210k57v6ccfUFLOGjgDg0qh9F5ES0dHSzbl9DfzeXovImunudHxM5ybFuWExhYV4ycycnhf+21FACAWirDQqNweGxfGA92G+Y/sBY4NxN1bftyxtTI64eax05km2mpwO7rbXFboEeBa4AioKOuQK4091+ArjPOB2krgAetdZ2AnuNMbvd11ttrX3LGFMwguUWOenFR0dyWkEqpxWk9u/zd/awrayJbeXN7Khs4aPKZh794ADt3b2Ak82mpMa5wTGJWZMSmTUpkSlp8YdPvOuNcfomphcOXoD2xoGw2LDPmRupucz5Iq7e7vQp4JCLV5Ex7pW7nIEwmTgZEic5S0KWs/bGDt+JEhERkSNKjPFywSmZXHBKJgAd3b1sK2/mw/0NfHigkQ/3N/Lc5grAaWWcM9nHojynhXFRXgp5qbGDj40QDhERzoA2CZnAwiMf1+kf+A1Tv9f9LVPi/H7Z+SL0dg0cazzOXI59gTF4HIeUgjEVHoONZEDMAQ4EPS4FzjjSMdbaHmNME5Dm7n//kL/NGco/boy5GbgZID8/f0gFF5mIEqIjOWNaGmdMS+vfFwhY9te38VFlS39o3FHZwitFVf23qMZ4IyjMdFobp2ckMDU9nmkZ8eSnxh35SmFssrNkLxj8+d4ep/9Ac/lAcGwucx9XwIEPnH3BX8J9on2QmDUQGPvXk9z97jo6SS2SIiIiwyjG62HJlBSWTEnp31fV3MGH+xv58EADG/c38tjaAzzwXgng3LG0KD+ZhXnJzM9N5tQcHynxUWEq/TGKThiYHuxQgYDTytgXGoOXj553Wij7REQ6t6j2DfaX7o4cn1YI8WmHv/YoOnl6XR7CWns/cD84t9CEuTgi41JEhKEgPZ6C9HgumTepf39Hdy+7qvz9gfGjyhbe3FnDE+tL+48xxrm9ZGp6/EHLtPQEclJiD291DOaJBF+Os3Da4MdY6wxv3VIJ/kpoqTpkXekESX/VwNxJwSJjID7DmTcpLn1gOz4jaNt9HJfutIqKiIjIkGQlxXDJvEn9vyN6egPsqGpxQqMbHF/dXt1/fH5qHKfm+pjvjsA+LyeJxJix2dJ2mIgIp8XQlwsFyw5/vr3RGbOhdhfU7nTHcdgNu189+KJ3bMrhU40VroDIYxgjYhiMZEAsA/KCHue6+wY7ptQYEwn4cAarOZa/FZEwifF6nCk0cn0H7W/p6Kakto3iWj97a1v7l6c2lNHS2dN/XJQngvy0OArSnNbGKWlx5KXEkZsSS05K7LFNzmvMQIgjRHdka53+Av4qN0y669YaaK111zXOrSGtNdDbOfjrRCdBXJrz78WmQlyqs45NgbiUoH1B2944tVKKiIgEifREMHeyj7mTfdxw5hTAmV5ja1kTm0ub2FLWyMb9jTzv3poKMC0jngVuC+OCPB9zsn1jZ9TUoYhNhtylzhIs0Ovctlq72w2NO93g+Aps/BNg4N8rBn3JkTCSAXEtUGiMmYoT7q4D/vGQY54BbgRWA1cDr1lrrTHmGeBhY8zPcQapKQQ+GMGyisgwSIzxDhocrbXU+rvcwOinuLaVEjc8vrWrhq6egekyjIGsxBhyU2LJS40jLyWW3JQ4clNjyUuJI9sXQ6RnCFNmGDNwS2vGKaGPtRa6/IeHx9bagcdttW5fySJoq4fu1iO/nifaDZB9YTIZYpIPXsemHL4vxjdm+yWIiIgMN1+sl3NmpHPOjPT+fXX+TncgPWd5b08tT33otBdFGJiZlcj8XB9zspOYlZ3E7OwkfLHjtO6M8EDqNGfh4oOf62hyblEdxTEWRnqai08Ad+NMc/EHa+0PjTHfA9ZZa58xxsQAfwQWAfXAdUGD2vw78HmgB/i6tfZv7v5HgPOBdKAK+K619vehyqFR2kTGrt6Apaq5gwP1bZQ2tHOgoY0D9e2UNjiPK5ra+/s7AngiDNk+N0CmxJGdHEu2L4ZsXwyTk2OZ5IshaTRvRenphPYGJyy21w+sD9rX4O5rdOZgam+EnvbQrxuVcHBwjElyWjEHXfsO3x8VPyFbLzWK6dCofhSR8aSqucNpZSxtZJM7Ent968CtmTnJsczOTmTWJCcwzs4+wmB6E1TYp7kYS1QBioxf3b0BKho7KG1oOyg8Hmhw1tUtnRz6NZYQHUm2L4ZJvhgm+5zQODk5hmyfGyaTY0mIDnMX7J7OgwNjqHVHE3Q0Q2ffuhlsIPTrGw9EJzpLVILTqb5/negEyP59wcfEO8/3b7vryJhxETgVEIdG9aOIjGfWWqpbOtle0cz2iha2VzTzUWUze2pa6XWvLsd6PcyclMicoOA4KztxdC8mjxFjYZoLEZET5nX7K+anxQ36fHdvgOqWTioa26lo6qCiqZ3yxg4q3e2PKluo9R8eIuOjPKQnRpOREE16QjQZicHrqIMej8i8TZHRzmiqiVlD/1troavVCYp9gfHQANkXKrtaoavFGZa7y+/0v+zyQ2eLsw70HP3fA2ey4KiEgcAYHB4PfeyNHVh749wleF/f2t3niRoX4VNERMYWYwxZSTFkJcVwvjvVBjiD6e2u9h8UHP+2tZJHPhiYYCEnOZbpmQkUusuMzAQKMxPxxU284HgoBUQRGde8nghykmPJST7yvfldPQGqmjv6A2RFUwfVzZ3U+jupaelkT42f9/fW0djWPejfJ8ZEOkHSDZSp8VGkxkeRlhA1sB3v7E+J8w6tj+TxMMZp4YtOcOaDPF7WOi2Zh4bIvnVXq7scabvVGfQn+HGXH2zvEN9PhBMaz/sWnPO1438/IiIiOIPpzcvxMS9nYEwEay1VzU5rY1FFM7uqWthd42fVmjo6ugfuyklPiHZCY5YTGvuCY3pC1NiZs3GEKSCKyEkvKjLCGfAmdfBWyD5dPQHqWp3Q2Bcea/1d1LQ42zX+TrZXNlPf2nXEMGmM09k+LSg0piZEkRYfRXKcEyCT47wkx0WRHOusfbHe8PSPMMaZvsMbM3xzLlkLvd3O4D3d7dDVBt1BS1ebs7//eXfd3QZZIUajFREROQHGGCa53U8umDXQ2hgIWMoa29lV3cLuaj+7qvzsqvYfNgK7L9bb39I4LSOe6RkJTMtIIC8lduQvDI8yBUQREVdUZITbT/EyZEi0AAAKMklEQVToI4X19AZoaOumvrWLutZO6lu7nG1/18B2ayfFtX7W7XMeB0J0+U6KiSQl3gmNvr4gGRQgk2K9JMVEkhjjJSk2kqQYZ19idCQRY6nzvTEQGeUssSlHP15ERCSMIiJM/0XkC2cNdPvoa3HsD47VfnZX+Xm5qOqggXG8HkN+ahzTMtzgmJ7A9Exn3ueU+KhwvKUTpoAoInIcIj0RZCQ6fRQh8ajHBwKWlo4eGtu7aGjrprHNaYVsbHMeN7V309C3r72bfXWtNLZ109zRfVj/yUMlRkc6YTGmLzg668SgQJkY9Ng5zn0uxkuMN2LC3DYjIiJyLIJbHJcXZhz0XGNbF3tqWimu8fevi2tbeWNHNd29A5V2SpzXCY7p8UzLSGBqujP/c35q3MiMbzBMFBBFREZBRITBF+fFF+dlyhDu5uwNWJrbu2np6KG5wwmMze3OdktHD83tA/ta3OfLGzv4qKOF5vZu/J09IVsuASIjzEHhMTEmkoRoZzs+2kNCtJeEaA/x0ZEk9C0xkQc97tvWUOIiInKyS46LYsmUKJZMOfhOmZ7eAKUN7RTX+imuaWVPTSt7avy8vqOGx9eX9h9nDEz2xVKQHkdBWjxT052lID2evJQ4oiLDe8uqAqKIyBjmiTCkxEcd920q1lpau3ppCQqUfWGzpaPHXboPe1za0Ia/s4fWzh5aO3vp6j3KtBquWK+H+GgPsVEe4qMiiYvyEOeu46Mj3f0D++KiI93HHmZmJTItI+G43qeIiEi4RXoiKHCD3oWzDn6uuaObktpW9rpLSW0re+vaeG5zBU3tA+MaRBjITYmjID2eqWnuOj2es6anER05Oq2OCogiIicxY0x/K1+27+jHH0lnTy+tnb20dvbgD1paO3vwd/Rt9+Lv7Katq9ddemjrcv6m1t/Zv6+1s5f27sNHOv3XFTO57aLCE3i3IiIiY1NSjJf5ucnMz00+7LmG1i6K3dBYUucGyLpWNuxrwO8OlLP9e5eMWlkVEEVE5KiiIz1ER3pIHaYO94GApb27l9auHto6nUCZljA+O/OLiIiciJT4KJbEH37LqrWWWn8XBxraiI0avT6LCogiIjLqIiIM8W7fxWMY40dERGTCMcYEDYg3ek6uSTtERERERETkuCkgioiIiIiICKCAKCIiIiIiIi4FRBEREREREQEUEEVERERERMSlgCgiIiIiIiKAAqKIiIiIiIi4FBBFREREREQEUEAUERERERERlwKiiIiIiIiIAGCsteEuw4gzxtQA+07wZdKB2mEozslK5yc0nZ/QdH5C0/kJLfj8TLHWZoSzMOOJ6sdRofNzdDpHoen8hKbzE9qQ68gJERCHgzFmnbV2abjLMVbp/ISm8xOazk9oOj+h6fyEl85/aDo/R6dzFJrOT2g6P6Edz/nRLaYiIiIiIiICKCCKiIiIiIiISwHx2N0f7gKMcTo/oen8hKbzE5rOT2g6P+Gl8x+azs/R6RyFpvMTms5PaEM+P+qDKCIiIiIiIoBaEEVERERERMSlgHgUxphLjDE7jDG7jTHfCXd5xiJjTIkxZosxZqMxZl24yxNuxpg/GGOqjTFbg/alGmNeMcbsctcp4SxjOB3h/NxpjClzP0MbjTGfCGcZw8kYk2eMed0YU2SM2WaM+Zq7X58hQp4ffYbCQHVkaKofD6b6MTTVj6GpfgxtOOtH3WIagjHGA+wEVgClwFpgpbW2KKwFG2OMMSXAUmut5qABjDHnAn7gIWvtPHffj4F6a+3/uD+iUqy13w5nOcPlCOfnTsBvrf1pOMs2FhhjsoFsa+0GY0wisB74FHAT+gyFOj/XoM/QqFIdeXSqHw+m+jE01Y+hqX4MbTjrR7UghnY6sNtaW2yt7QIeBa4Ic5lkjLPWvgXUH7L7CuBBd/tBnP+wE9IRzo+4rLUV1toN7nYLsB3IQZ8hIOT5kdGnOlKGRPVjaKofQ1P9GNpw1o8KiKHlAAeCHpeiHyKDscDLxpj1xpibw12YMSrLWlvhblcCWeEszBj1VWPMZvcWmwl5e8ihjDEFwCJgDfoMHeaQ8wP6DI021ZFHp/rx6PTddnT6bjuE6sfQTrR+VECU4bDMWrsY+AfgVvcWCTkC69zXrXu7D/ZrYDqwEKgAfhbe4oSfMSYBeBL4urW2Ofg5fYYGPT/6DMlYpPpxCPTdNih9tx1C9WNow1E/KiCGVgbkBT3OdfdJEGttmbuuBp7Cue1IDlbl3hved494dZjLM6ZYa6ustb3W2gDwv0zwz5Axxovz5b7KWvsXd7c+Q67Bzo8+Q2GhOvIoVD8eE323haDvtoOpfgxtuOpHBcTQ1gKFxpipxpgo4DrgmTCXaUwxxsS7HWExxsQDFwNbQ//VhPQMcKO7fSPw1zCWZczp+2J3XckE/gwZYwzwe2C7tfbnQU/pM8SRz48+Q2GhOjIE1Y/HTN9tIei7bYDqx9CGs37UKKZH4Q4FezfgAf5grf1hmIs0phhjpuFcFQWIBB6e6OfIGPMIcD6QDlQB3wWeBv4M5AP7gGustROyI/oRzs/5OLc+WKAE+FJQf4IJxRizDHgb2AIE3N3/htOPYMJ/hkKcn5XoMzTqVEcemerHw6l+DE31Y2iqH0MbzvpRAVFEREREREQA3WIqIiIiIiIiLgVEERERERERARQQRURERERExKWAKCIiIiIiIoACooiIiIiIiLgUEEXGIGNMrzFmY9DynWF87QJjzISdR0lERMY31ZEiIysy3AUQkUG1W2sXhrsQIiIiY5DqSJERpBZEkXHEGFNijPmxMWaLMeYDY8wMd3+BMeY1Y8xmY8zfjTH57v4sY8xTxphN7nK2+1IeY8z/GmO2GWNeNsbEusffbowpcl/n0TC9TRERkSFTHSkyPBQQRcam2ENun7k26Lkma+2pwH3A3e6+e4EHrbXzgVXAPe7+e4A3rbULgMXANnd/IfBLa+1coBG4yt3/HWCR+zpfHqk3JyIicgJUR4qMIGOtDXcZROQQxhi/tTZhkP0lwIXW2mJjjBeotNamGWNqgWxrbbe7v8Jam26MqQFyrbWdQa9RALxirS10H38b8Fprf2CMeRHwA08DT1tr/SP8VkVERIZEdaTIyFILosj4Y4+wPRSdQdu9DPRHvhT4Jc6V1LXGGPVTFhGR8UR1pMgJUkAUGX+uDVqvdrffA65zt68H3na3/w58BcAY4zHG+I70osaYCCDPWvs68G3ABxx2hVZERGQMUx0pcoJ05UNkbIo1xmwMevyitbZvGO8UY8xmnCucK919twH/Z4z5FlADfM7d/zXgfmPMF3Cugn4FqDjCv+kB/uRWkAa4x1rbOGzvSEREZHiojhQZQeqDKDKOuP0rllpra8NdFhERkbFEdaTI8NAtpiIiIiIiIgKoBVFERERERERcakEUERERERERQAFRREREREREXAqIIiIiIiIiAiggioiIiIiIiEsBUURERERERAAFRBEREREREXH9f+minCG/8aa9AAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<Figure size 1080x360 with 2 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4gAAAE9CAYAAABJKEwhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdeXhV1fX/8ffKHEiYMwhBZhIGQSHiiAKpiopSrQM4VK3VOre12mK/rVa/+q3+aq3VWq1DHaiK1mpLFaQqOBaVQUSZBxkCCGEeAxnW7497E0MM4UJyOUnu5/U8eTh3n7NP1pU+7K5z9l7b3B0RERERERGRuKADEBERERERkYZBCaKIiIiIiIgAShBFREREREQkTAmiiIiIiIiIAEoQRUREREREJEwJooiIiIiIiACQEHQAh0K7du28c+fOQYchIiJRNmPGjPXunhF0HI2FxkcRkdgR6RgZEwli586dmT59etBhiIhIlJnZ8qBjaEw0PoqIxI5Ix0hNMRURERERERFACaKIiIiIiIiEKUEUERERERERIMprEM1sOPBHIB540t3vrXY+GXgOGAhsAC5092Xhc7cBVwJlwE3uPsnMcoGXqtyiK3C7uz8Yze8hIk1XSUkJhYWFFBcXBx2KHICUlBRycnJITEwMOhQRkSZJ42PjVdcxMmoJopnFA48ApwCFwDQzG+/uc6tcdiWwyd27m9ko4D7gQjPrDYwC+gDtgbfNrKe7LwCOrHL/VcBr0foOItL0FRYWkp6eTufOnTGzoMORCLg7GzZsoLCwkC5dugQdjohIk6TxsXGqjzEymlNMBwGL3X2pu+8BxgEjq10zEng2fPwKUGCh/wWOBMa5+253/wpYHL5fVQXAEndXxToROWjFxcW0bdtWg18jYma0bdtWT7VFRKJI42PjVB9jZDQTxA7AyiqfC8NtNV7j7qXAFqBthH1HAS/WY7wiEqM0+DU++jsTEYk+/VvbONX1761RFqkxsyTgbODvtVxztZlNN7PpRUVFhy44EZEDlJaWFnQIIiIiDY7Gx2BEM0FcBXSs8jkn3FbjNWaWALQkVKxmf31PB2a6+9p9/XJ3f9zd8909PyMj46C/hIiIiIiISKyIZoI4DehhZl3Cb/xGAeOrXTMeuCx8fB4w2d093D7KzJLNrAvQA/i0Sr/RHMLppf/+fDVTl2w4VL9ORIRly5YxbNgw+vXrR0FBAStWrADg73//O3379qV///6cdNJJAMyZM4dBgwZx5JFH0q9fPxYtWhRk6BJDVm7cyTMffcWuPWVBhyIiMULjY/RFLUEMrym8AZgEzANedvc5ZnaXmZ0dvuwpoK2ZLQZuBsaE+84BXgbmAm8C17t7GYCZNSdUGfXVaMVe3X1vzuf5T1QLR0QOnRtvvJHLLruM2bNnc/HFF3PTTTcBcNdddzFp0iQ+//xzxo8PPXN77LHH+PGPf8ysWbOYPn06OTk5QYYuMeTLVVv4zb/nsmjdtqBDEZEYofEx+qK6D6K7TwAmVGu7vcpxMXD+PvreA9xTQ/sOQoVsDpncrHQWrtXgJ9LU3fnvOcxdvbVe79m7fQvuOKvPAfebOnUqr74aeg526aWX8vOf/xyAE044gcsvv5wLLriAc889F4DjjjuOe+65h8LCQs4991x69OhRf19ApBa52ekAzP96G/1yWgUcjYhEi8bH2NIoi9QcarnZ6Swt2sGe0vKgQxGRGPfYY49x9913s3LlSgYOHMiGDRu46KKLGD9+PKmpqZxxxhlMnjw56DBlP8xsuJktMLPFZjamhvPJZvZS+PwnZtY53D7IzGaFfz43s3MivWc0dGrbnJTEOBZ8rYeoIhIsjY/1J6pvEJuK3Ox0SsudJUXb6XVYi6DDEZEoOZgnmdFy/PHHM27cOC699FKef/55Bg8eDMCSJUs45phjOOaYY5g4cSIrV65ky5YtdO3alZtuuokVK1Ywe/Zshg0bFvA3kH0xs3jgEULLJQqBaWY23t3nVrnsSmCTu3c3s1HAfcCFwJdAvruXmtlhwOdm9m/AI7hnvYuPM3pkpitBFGniND7GFiWIEaiYQrNw7TYliCJS73bu3LnXuoibb76Zhx9+mCuuuILf/e53ZGRk8PTTTwNw6623smjRItydgoIC+vfvz3333cfYsWNJTEwkOzubX/7yl0F9FYnMIGCxuy8FMLNxwEhC6+4rjAR+Ez5+BfiTmZm776xyTQqhxDDSe0ZFbnY67y7QdlIiUv80PgZDCWIEurZLIyHOmP/1NkYGHYyINDnl5TVPX69pKkzFuouqxowZw5gxh2RGodSPDsDKKp8LgWP2dU34beEWQuvv15vZMcBfgU7ApeHzkdwzKvKy03llRiEbtu+mbVryofiVIhIjND4GQ2sQI5CUEEfXjOYs1BQaEREJmLt/4u59gKOB28ws5UD6m9nVZjbdzKYXFdX9zV/FLBtNMxURaRqUIEYoN7sF8zX4iYhI3a0COlb5nBNuq/EaM0sAWgJ7bcjr7vOA7UDfCO9Z0e9xd8939/yMjIw6fI2QqpVMRUSk8VOCGKHcrDRWbd7FtuKSoEMREZHGbRrQw8y6mFkSMAoYX+2a8cBl4ePzgMnu7uE+CQBm1gnIA5ZFeM+oyEhLpk3zJG0HJSLSRGgNYoRys0PFaRau3c7ATq0DjkZERBqr8JrBG4BJQDzwV3efY2Z3AdPdfTzwFDDWzBYDGwklfAAnAmPMrAQoB65z9/UANd3zUHwfMyM3K11vEEVEmggliBHKzfqmkqkSRBERqQt3nwBMqNZ2e5XjYuD8GvqNBcZGes9DJTc7nZenr6S83ImLsyBCEBGReqIpphHKaZ1Ks6R4LcIXERGpJi87nZ17yijctCvoUEREpI6UIEYoLs7omZXO/K+3Bh2KiDQhQ4cOZdKkSXu1Pfjgg1x77bW19ktLSwNg9erVnHfeeTVeM2TIEKZPn17rfR588EF27vxma70zzjiDzZs3RxJ6rX7zm99w//331/k+0jh8U6hGY6SI1A+Nj8FRgngAcrPSWfD1Ntx9/xeLiERg9OjRjBs3bq+2cePGMXr06Ij6t2/fnldeeeWgf3/1AXDChAm0atXqoO8nsalnlra6EJH6pfExOEoQD0BudjqbdpZQtH130KGISBNx3nnn8cYbb7Bnzx4Ali1bxurVqxk8eDDbt2+noKCAAQMGcMQRR/Cvf/3rW/2XLVtG3759Adi1axejRo2iV69enHPOOeza9c10v2uvvZb8/Hz69OnDHXfcAcBDDz3E6tWrGTp0KEOHDgWgc+fOrF+/HoAHHniAvn370rdvXx588MHK39erVy+uuuoq+vTpw6mnnrrX79mfmu65Y8cOzjzzTPr370/fvn156aWXgNAGx71796Zfv37ccsstB/TfVQ6t5skJHN6mGfNVyVRE6onGx+DGRxWpOQAVU2gWfr2dzPQD2pdYRKRGbdq0YdCgQUycOJGRI0cybtw4LrjgAsyMlJQUXnvtNVq0aMH69es59thjOfvsszGruQjIo48+SrNmzZg3bx6zZ89mwIABlefuuece2rRpQ1lZGQUFBcyePZubbrqJBx54gClTptCuXbu97jVjxgyefvppPvnkE9ydY445hpNPPpnWrVuzaNEiXnzxRZ544gkuuOAC/vGPf3DJJZfs97vu655Lly6lffv2vPHGGwBs2bKFDRs28NprrzF//nzMrF6m9Uh05Wan6w2iiNQbjY/BjY9KEA9A1TUWJ/Zot5+rRaTRmTgGvv6ifu+ZfQScfm+tl1RMo6kYAJ966ikA3J1f/vKXvP/++8TFxbFq1SrWrl1LdnZ2jfd5//33uemmmwDo168f/fr1qzz38ssv8/jjj1NaWsqaNWuYO3fuXuer+/DDDznnnHNo3rw5AOeeey4ffPABZ599Nl26dOHII48EYODAgSxbtiyi/xT7uufw4cP52c9+xi9+8QtGjBjB4MGDKS0tJSUlhSuvvJIRI0YwYsSIiH6HBCcvO53J89exu7SM5IT4oMMRkfqk8bFSLIyPmmJ6ANqlJdNWmwGLSD0bOXIk77zzDjNnzmTnzp0MHDgQgOeff56ioiJmzJjBrFmzyMrKori4+IDv/9VXX3H//ffzzjvvMHv2bM4888yDuk+F5OTkyuP4+HhKS0sP+l4APXv2ZObMmRxxxBH86le/4q677iIhIYFPP/2U8847j9dff53hw4fX6XdI9OVmp1NW7ixetz3oUESkidD4GMz4qDeIB0hTaESasP08yYyWtLQ0hg4dyg9+8IO9Ft9v2bKFzMxMEhMTmTJlCsuXL6/1PieddBIvvPACw4YN48svv2T27NkAbN26lebNm9OyZUvWrl3LxIkTGTJkCADp6els27btW1NoBg8ezOWXX86YMWNwd1577TXGjq1x+72I7eueq1evpk2bNlxyySW0atWKJ598ku3bt7Nz507OOOMMTjjhBLp27Vqn3y3Rl5f9TaGaPu1bBhyNiNQrjY+VYmF8VIJ4gHKz0xn3qTYDFpH6NXr0aM4555y9KrZdfPHFnHXWWRxxxBHk5+eTl5dX6z2uvfZarrjiCnr16kWvXr0qn7T279+fo446iry8PDp27MgJJ5xQ2efqq69m+PDhtG/fnilTplS2DxgwgMsvv5xBgwYB8MMf/pCjjjoq4ukyAHfffXflQnuAwsLCGu85adIkbr31VuLi4khMTOTRRx9l27ZtjBw5kuLiYtydBx54IOLfK8Ho3LY5SfFxeogqIvVK4+OhHx8tFrZsyM/P9/3tdRKpcZ+uYMyrX/DerUPo1LZ5vdxTRIIzb948evXqFXQYchBq+rszsxnunh9QSI1OfY6PAKf/8QMy05N59geD6u2eIhIMjY+NW13GSK1BPEDfFKrRE1IREZGq8rQMQ0Sk0VOCeIB6ZFVsdaEBUEREpKrc7HS+3lrMlp0lQYciIiIHSQniAUpLTqBjm1RtBiwiIlJN1e2gRESkcVKCeBBys9L1BlGkCYmFtdhNjf7OGqbKSqZ6iCrSJOjf2saprn9vShAPQm52OkvX72B3aVnQoYhIHaWkpLBhwwYNgo2Iu7NhwwZSUlKCDkWqyW6RQouUBK3TF2kCND42TvUxRmqbi4PQMyu0GfDSoh30OqxF0OGISB3k5ORQWFhIUVFR0KHIAUhJSSEnJyfoMKQaMyMvu4UK1Yg0ARofG6+6jpFKEA9CXnYoKVzw9TYliCKNXGJiIl26dAk6DJEmIzc7nX9+tgp3x0z7BYs0VhofY5emmB6ELu2akxhvmkIjIiJSTW52Ott2l7Jq866gQxERkYOgBPEgJCXE0bVdGgu1CF9ERGQvlYVq9BBVRKRRUoJ4kHK1GbCIiMi39Kzc6kJjpIhIYxTVBNHMhpvZAjNbbGZjajifbGYvhc9/Ymadq5y7Ldy+wMxOq9LeysxeMbP5ZjbPzI6L5nfYl9zsdFZt3sW2Ym0GLCIiUqFFSiIdWqXqIaqISCMVtQTRzOKBR4DTgd7AaDPrXe2yK4FN7t4d+ANwX7hvb2AU0AcYDvw5fD+APwJvunse0B+YF63vUJvcrNATUk0zFRER2Ztm2YiINF7RfIM4CFjs7kvdfQ8wDhhZ7ZqRwLPh41eAAguVPBsJjHP33e7+FbAYGGRmLYGTgKcA3H2Pu2+O4nfYp9zKNRbbg/j1IiIiDVZudjpLirazp7Q86FBEROQARTNB7ACsrPK5MNxW4zXuXgpsAdrW0rcLUAQ8bWafmdmTZtY8OuHXrkOrVJonxbPg661B/HoREZEGKy87ndJyZ+l6PUQVEWlsGluRmgRgAPCoux8F7AC+tbYRwMyuNrPpZjY9Ght8xsUZPbPTWaAppiIiInvJVSVTEZFGK5oJ4iqgY5XPOeG2Gq8xswSgJbChlr6FQKG7fxJuf4VQwvgt7v64u+e7e35GRkYdv0rNcrNCayzcPSr3FxERaYy6tksjIU77BYuINEbRTBCnAT3MrIuZJREqOjO+2jXjgcvCx+cBkz2UbY0HRoWrnHYBegCfuvvXwEozyw33KQDmRvE71Co3O51NO0so2rY7qBBEREQanKSEOLplpOkNoohII5QQrRu7e6mZ3QBMAuKBv7r7HDO7C5ju7uMJFZsZa2aLgY2EkkjC171MKPkrBa5397LwrW8Eng8nnUuBK6L1HfanopLpgrXbyGyRElQYIiIiDU5udjozlm8KOgwRETlAUUsQAdx9AjChWtvtVY6LgfP30fce4J4a2mcB+fUb6cGpusZicI/oTGMVERFpjHKz0xn/+Wq2FpfQIiUx6HBERCRCja1ITYPSNi2ZdmlJmkIjIiJSTV74IepCjZEiIo2KEsQ6ylUlUxERkW+pmGWjQjUiIo2LEsQ6ys1qwcK12ygvVyVTERGRCh1apZKenKBZNiIijYwSxDrKzU6juKScFRt3Bh2KiIhIg2EW3i9YCaKISKOiBLGOcrNbAGiaqYiISDW52enM/3qr9gsWEWlElCDWUY/MNAA9IRUREakmLzudrcWlfL21OOhQREQkQkoQ66h5cgKHt2mmBFFERKSaiv2CVahGRKTxUIJYD3pmqZKpiIhIdXkVyzCUIIqINBpKEOtBXnY6X63fwe7SsqBDERERaTBaNksku0WKEkQRkUZECWI96JmdTlm5s2TdjqBDERERaVBChWqUIIqINBZKEOtBXngz4AVrtwYciYiISMOSl53OknXbKSkrDzoUERGJgBLEetClXXMS440FX28POhQREZEGJTc7nT1l5Sxbr1k2IiKNgRLEepAYH0e3jDQWfK03iCIiIlXlZquSqYhIY6IEsZ7kZqdrEb6IiEg13TPTiI8zjZEiIo2EEsR60jMrndVbitlaXBJ0KCIiIg1GckI8Xdo11xtEEZFGQgliPakoVLNQA6CIiMhecrPTWaj9gkVEGgUliPWkZ1ZFJVMNgCIiIlXlZaWzYuNOduwuDToUERHZDyWI9SSndSppyQlaYyEiIlJNRaEavUUUEWn4lCDWEzOjZ1aaEkQREZFq8rJbAGiMFBFpBJQg1qPc7HQWrN2GuwcdioiISIOR0zqVZknxKlQjItIIKEGsR7lZ6WzeWULRtt1BhyIiItJgxMUZPbO0HZSISGOgBLEe9dRmwCIiEgEzG25mC8xssZmNqeF8spm9FD7/iZl1DrefYmYzzOyL8J/DqvR5N3zPWeGfzEP3jfYvT7NsREQaBSWI9Si3opKpEkQREdkHM4sHHgFOB3oDo82sd7XLrgQ2uXt34A/AfeH29cBZ7n4EcBkwtlq/i939yPDPuqh9iYOQm53Oxh17KNquWTYiIg2ZEsR61DYtmXZpydrqQkREajMIWOzuS919DzAOGFntmpHAs+HjV4ACMzN3/8zdV4fb5wCpZpZ8SKKuo4pKpnqIKiLSsClBrGd52VpjISIiteoArKzyuTDcVuM17l4KbAHaVrvme8BMd6/6Su7p8PTSX5uZ1fTLzexqM5tuZtOLiorq8j0OiCqZiog0DkoQ61ludjqL1m2jrFxrLEREJDrMrA+haac/qtJ8cXjq6eDwz6U19XX3x909393zMzIyoh9sWJvmSWSkJ2udvohIA6cEsZ7lZqVTXFLOio07gw5FREQaplVAxyqfc8JtNV5jZglAS2BD+HMO8BrwfXdfUtHB3VeF/9wGvEBoKmuDolk2IiINnxLEeqY1FiIish/TgB5m1sXMkoBRwPhq14wnVIQG4Dxgsru7mbUC3gDGuPtHFRebWYKZtQsfJwIjgC+j/D0OWG5WOgvXapaNiEhDpgSxnvXISsNMCaKIiNQsvKbwBmASMA942d3nmNldZnZ2+LKngLZmthi4GajYCuMGoDtwe7XtLJKBSWY2G5hF6A3kE4fuW0WmZ3Y6u0vLWb5hR9ChiIjIPiRE8+ZmNhz4IxAPPOnu91Y7nww8BwwkNHXmQndfFj53G6Ey32XATe4+Kdy+DNgWbi919/xofocD1SwpgcPbNGOhKpmKiMg+uPsEYEK1tturHBcD59fQ727g7n3cdmB9xhgNeVVm2XTNSAs4GhERqUnU3iDWZZ+n8HWjgD7AcODP4ftVGBre46lBJYcVemalM//rrUGHISIi0qD0yEzHDBWqERFpwKI5xfSg93kKt49z993u/hWwmAa42H5f8rLTWbZhJ8UlZUGHIiIi0mCkJsXTuW1zLcMQEWnAopkg1mWfp9r6OvAfM5thZldHIe4665mVTlm5s6Roe9ChiIiINCi5Weks0DIMEZEGqzEWqTnR3QcQmrp6vZmdVNNFQW0EDHuvsRAREZFv5Gans2zDDnbt0SwbEZGGKJoJYl32edpn3yr7PK0jtA9UjVNPg9oIGKBzu+YkxcfpCamIiEg1ednpuMOidRojRUQaomgmiAe9z1O4fZSZJZtZF6AH8KmZNTezdAAzaw6cSgPc5ykxPo6uGVpjISIiUl3FfsEqVCMi0jBFbZsLdy81s4p9nuKBv1bs8wRMd/fxhPZ5Ghve52kjoSSS8HUvA3OBUuB6dy8zsyzgtVAdGxKAF9z9zWh9h0qF06FtN0htHXGXvOx0Pv1qYxSDEhERaXw6tW1OSmKcHqKKiDRQUd0H8WD3eQqfuwe4p1rbUqB//Udai/IyGHcR7NwAXYdA75GQNwKatam1W8/sdP45azVbdpXQMjXxkIQqIiJySJUUQ9luSGkZcZf4OKNHZroSRBGRBqoxFqk5tCwORo+D466HDYth/I3wu+7w3Hdh+tOwveYCOBWFahZpHaKIiDRVi9+GezvBoyfCGz+DL16BzSv32y03O11TTEVEGqiovkFsEsygw4DQz3fuhK9nw5x/wtx/wus/gTduhs4nht8sngXpWUBoqwsIrbHI71z720YREZFGKSMXhoyBFR/D5+Ng2pOh9hY5cPgxcPhxcPixkNkb4uIru+Vlp/PKjEI2bN9N27TkgIIXEZGaKEE8EGZwWP/QT8HtsHZOKFGc88/Qk9M3boFOx0PvkXTodRbpyQmaQiMiIk1Xux6hBBGgrBTWzQkliys+huX/hS//ETqX3AJyjg4li4cfS692nYDQdlDHd1eCKCLSkFioaGjTlp+f79OnT4/eL3CHovkw91+hZLFoHgDzEnvzXrNTuOYnd4aSSxERiSozm+Hu+UHH0VhEdXx0h80rYOUnsGIqrPgE1s0FHI9L4PPSw9nWsYCBZ/2IZtk9ohODiIhUinSM1BvE+mAGmb1CP0PGQNECmDueVlNf5Jotf6T4/daknPzToKMUERE5dMygdafQT78LQm27NkPhNFg+FfvvBE4ofJK4x55gbkIvFmefSdwR53BE9y4c3qYZpgerIiKB0BvEKJoy/2t2PH8ZI+I/ZtGQP9NjyMWHPAYRkViiN4gHJqjxEWBbcQlfzJ1L2ayX6Lz6dTqWLme3JzCl/CjeThzCjk7D6NcpiwGHt6JfTitSk+L3f1MREdmnSMdIJYhRNnPJGpKeH0n3sqW83PcxRp1zLkkJKh4rIhINShAPTJDj417cKVv9OZs/fo5mC/5J6p4NbCWN8aXH8GrZYGZbT3od1pKBnVpz1OGtOLZrW7JapAQdtYhIo6IEsYqgB8Adm76m+NGhlO/ezs9bPsBtFw+vrHIqIiL1RwnigQl6fKxRWSksnQKfj8Pnv4GV7mJTcg7vJA7hya1HM39PBglxxuXHd+bGgh7aa1hEJEJKEKtoEAPg+kWUPF7Ayj3pnF96J9cNH8gVx3cmLk5rLERE6osSxAPTIMbH2hRvhXn/htnj4KsPAGdn5kAmJJ3Cz5f0pXWzFG45LZcL8jsSr/FURKRWkY6Rmut4qLTrQeLo5+kSt5a/pf2Je1+fzaV//YQ1W3YFHZmIiEjDlNICjroYLvs3/PRL+M5vaOY7Oa/wXmZ3fYz8NsXc9uoXnPXwh3yydEPQ0YqINAlKEA+lLoOxsx+mV/FnvNn9NT5bsYnT/vA+4z9fHXRkIiIiDVvLHDjxp3DdVBjxIGnrZvLYtht45aS1bN65hwsf/5jrX5hJ4aadQUcqItKoKUE81I4cDSf/gm6Fr/HR8bPolpnGTS9+xo/HfcaWnSVBRyciItKwmUH+FfCjD7C23cj/9Ke83/Nlfj7kMN6Zt5aC37/HA28tZOee0qAjFRFplJQgBmHIbXDE+bT++F5eOWE1N5/Sk9dnr2H4H9/nv4vXBx2diIhIw9euO/xgEpz8CxK+fJnr5l3GB6NSOLVPNg+9s4iC37/Hv2atIhZqLYiI1CcliEEwg5GPwOHHEz/+em7qsYFXrz2e1KR4LnryE/739bkUl5QFHaWIiEjDFp8IQ38ZShTj4sj4+zk8nPEvXrlqIG3TkvjxuFmc99hUZhduDjpSEZFGQwliUBKSYdTzoTUVL46mf7MNvHHjYL5/XCee+vArzv7Th6zYoHUUIiIi+9VxEFzzIRx1CXz4B/LfOp9/XZDB//teP5Zv2MHIRz7i1r9/zrptxUFHKiLS4ClBDFKzNnDx30PHL1xAaukW7hrZl2euOJo1m4u5ffyXwcYnIiLSWCSnw8g/wYXPw5ZC4p84mQv8Tab87GSuHtyVf85axbD73+O9hUVBRyoi0qApQQxa224w6gXYvAJeugRKdzMkN5MbC7rz7oIirUkUERE5EL1GhCqddj4RJtxC+j9Gc9vg1vznpyeT0zqVG16YyZKi7UFHKSLSYClBbAg6HQfffRSWfwTjbwR3vn9cZzq0SuW3E+dTXq4F9iIiIhFLz4aLX4Ez7odlH8Kfj6NL0WSevCyfxPg4rnpuOluLVTlcRKQmB5QgmlmcmbWIVjAx7YjzYOivYPZL8N59pCTGc8tpPfli1Rb+PVv7JIqINERmdr6ZpYePf2Vmr5rZgKDjEkIF4QZdBT96H1p1hJcuIef9W3nswl6s2LCTn4ybRZkewIqIfMt+E0Qze8HMWphZc+BLYK6Z3Rr90GLQSbfAkRfDu7+Fz19iZP8O9D6sBb+btIDdpapqKiLSAP3a3beZ2YnAd4CngEcDjkmqysiFK9+GE2+Gz55n0OKHuP2s3kyev44H3loQdHQiIg1OJG8Qe7v7VuC7wESgC3BpVKOKVWYw4kHoPBj+dT1xq2dw2xl5FG7axdipy4OOTrEOnQoAACAASURBVEREvq3i6d2ZwOPu/gaQFGA8UpOEJPjOHZB/BUx7kku77mTU0R15ZMoS3pi9JujoREQalEgSxEQzSySUII539xJAczKiJSEJLhwLKS3hwz8wuEcGg3u0409TFrNll9ZLiIg0MKvM7C/AhcAEM0tG6/sbrqG/guQ0bNJt3Hl2bwYc3opb/v45c1dvDToyEZEGI5JB7C/AMqA58L6ZdQL0L2k0pbaGoy6GBRNh6xrGnJ7Hll0lPPrukqAjExGRvV0ATAJOc/fNQBtAyzAaquZtYcgvYem7JC9+k8cuGUjL1ESuem46G3fsCTo6EZEGYb8Jors/5O4d3P0MD1kODD0EscW2AZeBl8Gsv9GnfUvOObIDf/3oK1Zt3hV0ZCIiEubuO4F1wInhplJgUXARyX4dfSVk5MF//ofMVPjLpQMp2r6b65+fSUlZedDRiYgELpIiNVlm9pSZTQx/7g1cFvXIYl3bbtDlJJjxHJSXcfOpPQF44D8LAw5MREQqmNkdwC+A28JNicDfgotI9is+EYbfC5uWwceP0L9jK357zhFMXbqBe96YF3R0IiKBi2SK6TOEps+0D39eCPwkWgFJFQOvgC0rYMkUclo344rjO/PqZ4XMW6MZviIiDcQ5wNnADgB3Xw2kBxqR7F+3oZB7Jrz/e9i6mu8NzOHKE7vwzH+X8fK0lUFHJyISqEgSxHbu/jJQDuDupXxTtU2iKW8ENGsHM54G4Loh3WmRksi9E+cHHJiIiITtcXcnXLwtvCWUNAan3Q3lJfD2nQDcdnoeJ3Zvx6/++SUzV2wKODgRkeBEkiDuMLO2fDP4HQtsiWpUEpKQBEdeFCpWs+1rWjZL5Iah3XlvYREfLlofdHQiIgIvh6uYtjKzq4C3gScCjkki0aYrHHcDzB4HK6eREB/Hny46iuyWKVwzdgZrtxYHHaGISCAiSRBvBsYD3czsI+A54MaoRiXfGHh5qFjNZ2MBuPS4TnRolcpvJ86jvFy7jYiIBMnd7wdeAf4B5AK3u/vDwUYlERt8M6Rlw8SfQ3k5rZol8cT389m+u5QfjZ1BcYkmTIlI7ImkiulM4GTgeOBHQB93nx3Jzc1suJktMLPFZjamhvPJZvZS+PwnZta5yrnbwu0LzOy0av3izewzM3s9kjgatb2K1ZSTkhjPraflMmf1Vv49e3XQ0YmIxLTwlNLJ7n4roTeHqeG9g6UxSE6HU+6E1TPh8xcByM1O54ELjmTWys386p9fEppBLCISOyKpYno+kOruc4DvAi+Z2YAI+sUDjwCnA72B0eEKqFVdCWxy9+7AH4D7wn17A6OAPsBw4M/h+1X4MRA7pcYGXh4qVrN0MgBn929Pn/Yt+H9vLmB3qZ5uiogE6H0g2cw6AG8ClxIq7iaNxREXQM7R8PZvoDhUBG5432x+XNCDV2YU8sx/lwUanojIoRbJFNNfu/s2MzsRKACeAh6NoN8gYLG7L3X3PcA4YGS1a0YCz4aPXwEKzMzC7ePcfbe7fwUsDt8PM8sBzgSejCCGpiHvrFCxmumhYjVxccZtp/di1eZdjJ26PODgRERimoX3QjwXeNTdzyf0cFMai7g4GH4f7FgH7/+usvnHBT04tXcWd78xj48Wa92/iMSOSBLEildUZwJPuPsbQFIE/ToAVWtFF4bbarwmXB11C9B2P30fBH5OuKpqTKhWrAbgxB7tOKlnBg9PXsyWnSUBBygiErPMzI4DLgbeCLfF13K9NEQ5A+HIi+HjR2HDEiD0MPaBC4+kW0Zzrn9hJis37gw4SBGRQyOSBHFVuELbhcAEM0uOsF+9M7MRwDp3nxHBtVeb2XQzm15UVHQIoouyymI13+y/PGZ4HluLS/jze4uDi0tEJLb9BLgNeM3d55hZV2BKwDHJwSi4AxKSYdIvK5vSkhN4/NJ8ysudm8Z9pvWIIhITIkn0LgAmAae5+2agDXBrBP1WAR2rfM4Jt9V4jZklAC2BDbX0PQE428yWEZqyOszM/kYN3P1xd8939/yMjIwIwm3gKorVzHwWykMvT3u3b8E5R3Xg6Y+WsWrzroADFBGJPe7+nruf7e73mVkcsN7dbwo6LjkI6Vlw0q2w8E1Y9HZlc+d2zRlzei8+W7GZqUs2BBigiMihEUmCeBjwhrsvMrMhwPnApxH0mwb0MLMuZpZEqOjM+GrXjAcuCx+fR6gSnIfbR4WrnHYBegCfuvtt7p7j7p3D95vs7pdEEEvTMPBy2PxNsRqAn52aC8Dv/7MgoKBERGKXmb1gZi3C1Uy/BOaaWSQPUaUhOvba0P6Ib46Bsm+Wb5w7oAPt0pJ59L0lAQYnInJoRJIg/gMoM7PuwOOE3uy9sL9O4TWFNxB6+zgPeDk8/eYuMzs7fNlTQFszW0xov8Ux4b5zgJeBuYSqwl3v7irXmTcCmrWFGc9UNnVolcoVJ3Tmtc9WMXf11uBiExGJTb3dfSuhKt8TgS6EKplKY5SQDKf9FjYsgk8fr2xOSYznByd25oNF6/ly1ZYAAxQRib5IEsTycLJ3LvBweK+nwyK5ubtPcPee7t7N3e8Jt93u7uPDx8Xufr67d3f3Qe6+tErfe8L9ct19Yg33ftfdR0QSR5ORkBxaRF+lWA3AdSd3p0VKIve+OT/A4EREYlJieN/D7wLj3b0E0EK1xqznadCtAN69F7Z/U8PgkmM7kZ6cwGN6iygiTVwkCWKJmY0Gvg9UbEyvTYCDMuAyKC/dq1hNy2aJ3DisO+8vLOKDRU2gII+ISOPxF2AZ0Bx438w6AZrO0ZiZwfB7oWQnTP7fyuYWKYlcdOzhTPhiDcs37AgwQBGR6IokQbwCOA64x92/Cq8JHBvdsGSf2nWHzoP3KlYDcOlxnchpncq9E+dTXq6H1yIih4K7P+TuHdz9DA9ZDgwNOi6po4yeMOhHMPM5WD2rsvnKE7qQEBfHEx8sraWziEjjtt8E0d3nArcAX5hZX6DQ3e+LemSyb/lXhIvVfFNJPTkhnltPy2XO6q2M/3x1gMGJiMQOM2tpZg9UbKtkZr8n9DZRGruTfx5a9z/xFxDe3iKzRQrnDujA36cXUrRtd8ABiohEx34TxHDl0kXAI8CfgYVmdlKU45LaVBareXqv5rP6tadvhxY8NHlRQIGJiMScvwLbCG0JdQGh6aVP19oDMLPhZrbAzBab2Zgazieb2Uvh85+YWedw+ylmNsPMvgj/OaxKn4Hh9sVm9pCZWT19x9iU2goKfg0rP4Yv/1HZfPVJXdlTVs4z//0qwOBERKInkimmvwdOdfeT3f0k4DTgD9ENS2qVkAxHXvStYjVxccaF+R1ZWrSDpUXbAwxQRCRmdHP3O9x9afjnTqBrbR3MLJ7QQ9fTgd7AaDPrXe2yK4FN7t6d0JhbMXNnPXCWux9BaJuoqks+HgWuIrQ1VA9geN2+mnDUpZDdD966HfaE1h12zUhjeJ9sxk5dzvbdpQEHKCJS/yJJEBPdvXKTPXdfiIrUBG/A5aFiNbOe36t5aF4mAJPnrwsgKBGRmLPLzE6s+GBmJwC79tNnELA4nFDuAcYBI6tdMxJ4Nnz8ClBgZubun7l7xTqCOUBq+G3jYUALd/84vJ/wc4Qqq0pdxMXD6ffB1lXw4YOVzdec3I2txaW8+MmKAIMTEYmOSBLE6Wb2pJkNCf88AUyPdmCyHxXFambsXawmp3UzcrPSeWeeEkQRkUPgGuARM1tmZsuAPwE/2k+fDsDKKp8Lw201XhPeamoL0LbaNd8DZrr77vD1hfu5pxyMTsdD3+/BR3+EjaFppf07tuK4rm156sOv2FNavp8biIg0LpEkiNcS2rD+pvDPXEIDogRt4OWweflexWoAhvXKZNqyjWwtLgkmLhGRGOHun7t7f6Af0M/djyI0vTOqzKwPoWmn+0tGa+p7dUVRnaIibY0UkVP+F+ISYNL/VDZdM6QbX28t5p+zVgUYmIhI/Yukiulud3/A3c8N//wBbXPRMPQ6K1ys5pm9mgvyMiktdz5YuD6YuEREYoy7b3X3iv0P97dOfxXQscrnnHBbjdeYWQLQEtgQ/pwDvAZ8392XVLk+Zz/3rIj1cXfPd/f8jIyM/YQqALTsACffCgvegEVvAXBSj3b0PqwFj723RNtLiUiTEskbxJocV69RyMGpLFYzAbatrWw+6vDWtGqWyDvz19bSWUREomR/1UOnAT3MrIuZJQGjgPHVrhlPqAgNwHnAZHd3M2sFvAGMcfePKi529zXAVjM7Nly99PvAv+rhu0iFY6+DNt1C216U7sbMuGZIN5YW7eCteRpvRaTpONgEURqKymI1f6tsio8zhuZm8u6CIsr0VFNE5FCr9R/e8JrCG4BJwDzgZXefY2Z3mdnZ4cueAtqa2WLgZqBiK4wbgO7A7WY2K/yTGT53HfAksBhYAkyszy8V8xKS4fT/BxuXwMd/BuCMvtl0bJPKY+8twV3jrYg0DQn7OmFmA/Z1ClUxbTiqFqs54acQF8r5h+Zl8tpnq5i1cjMDO7UOOEgRkabFzL6g5kTQgKz99Xf3CcCEam23VzkuBs6vod/dwN37uOd0oO/+frfUQY/vQO6Z8N7v4IgLSGjZgasHd+XX/5rDp19t5Jiu1esIiYg0PvtMEAntf7gv8+s7EKmDgZfDP66Er96FbqE9k0/ukUF8nDFl/joliCIi9W9E0AFIQIb/H/xpELz1azjvr5yf35EH317EY+8tUYIoIk3CPhNEdx96KAOROqgoVjP96coEsWWzRPI7tead+eu45bTcgAMUEWla3H150DFIQFp3hhN/Cu/dCwOvIKXLYK44oTP3/2ch89ZspddhLYKOUESkTrQGsSnYR7Gagl6ZzFuzldWb97dns4iIiETsxJ9Aq8Nh4s+hrJRLj+1M86R4/vLekv33FRFp4JQgNhWVxWqer2walhdaBjN5/rqAghIREWmCElPhtP+DdXNh2pO0bJbI6EGH8+/Za1i5cWfQ0YmI1IkSxKaioljNzGehvByAbhnNObxNMyWIIiL1zMz2OY/QzA4/lLFIQPJGhJZ1TLkHtq/jysFdiDN46sOvgo5MRKRO9pkgmtklVY5PqHbuhmgGJQdp4OWwaVmoWA1gZgzLy+SjxevZtacsyMhERJqadysOzOydauf+eWhDkUCYhba9KNkFb9/JYS1TGXlkB8ZNW8HGHXuCjk5E5KDV9gbx5irHD1c794MoxCJ11essSG0TKlYTVtArk92l5Uxduj7AwEREmhyrctymlnPSlLXrAcddF9qLeOU0rjm5K8Ul5Tzz32VBRyYictBqSxBtH8c1fZaGICEZjjgPFr0FJcUADOrShuZJ8bwzT9NMRUTqke/juKbP0pSddCukHwYTbqF7u2ac0juL56YuY+ee0qAjExE5KLUliBr8GqPup0DpLlgxFYDkhHgG98hg8vx1uOuvTUSknmSa2c1m9rMqxxWfM4IOTg6h5HQ49W5YMws+G8s1J3dj884Sxn26MujIREQOSm0JYp6ZzTazL6ocV3zWxnoNVecTID4JlnyzJGZYXiZrthQzb822AAMTEWlSngDSgbQqxxWfnwwwLglC3+9BpxPg7TsZmOEM6tyGJz9YSklZedCRiYgcsIRazvU6ZFFI/UlqDocfC4snw6mhpiF5oYfZUxaso3d7beArIlJX7n7nvs6Z2dGHMhZpACoK1vxlMEy5h2uG/JwfPDOd8bNW872BOUFHJyJyQPb5BtHdl1f9AbYDA4B24c/SUHUrgHVzYOsaADLTU+if05J35q0NODARkabJzHqb2f+a2WLg0aDjkQBk94Wjr4Lpf2Voy6/JzUrnL+8vobxcyztEpHGpbZuL182sb/j4MOBLQtVLx5rZTw5RfHIwuheE/lw6pbJpWF4Wn63czIbtuwMKSkSkaTGzzmZ2m5nNBsYC1wLfcff8gEOToAz9JaS2wSb8nGtO7sLCtduZskBF4kSkcaltDWIXd/8yfHwF8Ja7nwUcg7a5aNgy+0DzTFj8zTrEgl6ZuMO7C4oCDExEpGkws6nAG4SWanzP3QcC29x9WaCBSbBSW8F3fgMrP+Ys+5AOrVJ57L0lQUclInJAaksQS6ocFwATANx9G6BV1w1ZXBx0GxZ6g1ge+qvq074FmenJTJ6vJ5kiIvVgLaGiNFl8U7VUcwkFjrwYOgwk4Z07uPa4DKYt28Tk+VriISKNR20J4kozu9HMziG09vBNADNLBRIPRXBSB90LYOeGUNltwMwYlpfJ+wuLVFVNRKSO3P27wBHADOA3ZvYV0NrMBgUbmQQuLg7O+B1sX8eonS+Sl53OL/7xBRt37Ak6MhGRiNSWIF4J9AEuBy50983h9mOBp6Mcl9RV16GhP6ttd7FtdynTlm0MKCgRkabD3be4+9PufiqhsfF24A9mpg3wYl2HgTDgUhKm/YVHTm3Olp0l3PbqbO1HLCKNQm1VTNe5+zXuPtLd/1OlfYq73x/Jzc1suJktMLPFZjamhvPJZvZS+PwnZta5yrnbwu0LzOy0cFuKmX1qZp+b2Rwz22eZ8ZiXlgGH9Ycl3xSqOaF7O5IS4pg8T9NMRUTqk7uvdfeH3f0E4MSg45EGoOAOSGpOt+l3ccupPZg0Zy1/n1EYdFQiIvu1z30QzWx8bR3d/ezazptZPPAIcApQCEwzs/HuPrfKZVcCm9y9u5mNAu4DLjSz3sAoQm8w2wNvm1lPYDcwzN23m1ki8KGZTXT3j/f7TWNRt2Hw34eheCuktKB5cgLHdW3L5Pnr+NWI3kFHJyLSaO1vjARqHSMlBjRvB8N+DRNu4YfdJjK5az53jp/DcV3b0rFNs6CjExHZp9qmmB4H5AAfAPcDv6/2sz+DgMXuvtTd9wDjgJHVrhkJPBs+fgUoMDMLt49z993u/hWwGBjkIdvD1yeGfzRfY1+6FUB5KSz7oLJpWF4mS9fvYGnR9lo6iojIftR1jJRYkH8l9DqbuLd+zSNHriTOjJ++NIsy7Y0oIg1YbQliNvBLoC/wR0JvAte7+3vu/l4E9+4AVF2HURhuq/Eady8FtgBta+trZvFmNgtYR2jrjU8iiCU2dTwGktL22u5iWF4mgKqZiojUTV3HSIkFcXFw7uOQczRt/3MjfzqphOnLN2nrCxFp0Gpbg1jm7m+6+2WEFt8vBt41sxsOWXT7jutIQk9uB5lZ35quM7OrzWy6mU0vKorRvf8SkqDz4L0K1XRs04yeWWnauFdEpA4a6hgpDVBiKox+EdIP46QZN3F5Xjl/eGshX67aEnRkIiI1qu0NYkURmXOBvwHXAw8Br0V471VAxyqfc8JtNV5jZglAS2BDJH3DVVWnAMNr+uXu/ri757t7fkZGRk2XxIZuw2DTMti4tLJpWF4WnyzdyLbikn33ExGRWtVxjJRY0rwdXPIPzJ1fb7mdbs1385OXZlFcUhZ0ZCIi37LPBNHMngOmEtoD8U53P9rd/9fdqyd5+zIN6GFmXcwsiVDRmeqL+scDl4WPzwMme6gG9HhgVHjw7QL0AD41swwzaxWOL5XQlJ75EcYTm7oXhP6sMs20oFcmpeXOB4vWBxSUiEjjVg9jpMSatt1g9Djit67i5ZYPsXLdRu6dqP8LIyINT21vEC8hlJj9GPivmW0N/2wzs637u3F4TeENwCRgHvCyu88xs7vMrKK621NAWzNbDNwMjAn3nQO8DMwF3gSud/cy4DBgipnNJpSAvuXurx/4144hbbpCq06wZHJl01EdW9EyNZF3tN2FiMjBqtMYKTHq8GPg3L/Qcv1MXs1+jmf/u5T3F8boMhgRabD2uc2Fu9c6/TQS7j4BmFCt7fYqx8XA+fvoew9wT7W22cBRdY0rppiF3iLOfhlK90BCEgnxcQzJzeDdBesoL3fi4izoKEVEGpX6GCMlRvU5BzavpM9bv+a+Fi259ZVUJv3kJFo1Swo6MhERYD9rEKWJ6FYAe7ZD4aeVTcPyMtmwYw+fF24OMDAREZEYdPyNcPQPuWDPawzf9Tr/89qXhFbYiIgETwliLOgyGCx+r2mmJ/fMID7OtN2FiIjIoWYGw++DnqdzR/yzFM95ndc+0/JVEWkYlCDGgpSW0HHQXoVqWjVLYmCn1lqHKCIiEoT4BDjvKax9fx5J/hMv/Ws8hZt2Bh2ViIgSxJjRrQDWfA47vqlcOiwvk7lrtrJmy64AAxMREYlRSc2x0S+RkJbBI9zLb1+YRFm5ppqKSLCUIMaK7sMAhyVTKpsK8jIBNM1UREQkKOlZJHz/VVokOT9Z+z88N3lW0BGJSIxTghgrDjsSUlvDkm+mmXbPTKNjm1SmKEEUEREJTkYuiRe9QJe4tfR6/zrmrdTWFyISHCWIsSIuHroODRWqCVdKMzMK8rL4cPF6ikvKAg5QREQkdlmXwRSf+TDHxs1l1XM/pHhPadAhiUiMUoIYS7oXwPa1sHZOZdOwvEyKS8qZumRDgIGJiIhI2tEXsbTfT/lOybtMe/pnQYcjIjFKCWIs6TYs9GeVaabHdG1Ds6R43pm/NqCgREREpELXc+5gepuzGLzmGb4af2/lrB8RkUNFCWIsadEeMnvvtd1FckI8J3Zvx+R567RJr4iISNDM6HPVk3yYcCxdZv6Wzc9cCDs3Bh2ViMQQJYixptswWDEV9nyz11JBr0xWbylmwdptAQYmIiIiAKmpKXS9/lX+lHg5zZe/Tckjx8Oyj4IOS0RihBLEWNNtGJTtgeXfDDRDc0PbXbwzT9VMRUREGoL2rZvz3et+y4+S7mP1DsefHQFT/g/KVLxGRKJLCWKs6XQ8JKTsNc00s0UK/XJaaj9EERGRBiSndTN+86OL+UHS73mdwfDeffDsCNi8MujQRKQJU4IYaxJTodMJexWqgdBbxJkrNrFxx56AAhMREZHqDm/bjKeuHsrdiTfx67ibKF8zGx47EeaODzo0EWmilCDGom7DYP3CvZ5AntonC3f49+erAwxMREREquvcrjkvXHUsb8afzPf8PopbdIKXL4XXfwolu4IOT0SaGCWIsah7QejPJZMrm/q0b0n/nJaM/Xi5qpmKiIg0MN0y0njhh8ewwrMp2HQbW466Fqb/FR4fCmvnBh2eiDQhShBjUUYepLf/1jTTS4/rzOJ125m6ZENAgYmIiMi+9MhK54WrjmVnWTynzz2FopEvwM718MRQmPaU9kwUkXqhBDEWmUH3YbD03b2qoY3odxitmyXy3NTlwcUmIiIi+5Sbnc7ffngMO/aUcc5/Ullz0eRQbYE3bg5NO9WeiSJSR0oQY1W3AijeAqtnVjalJMZzwdEdeWveWtZs0ZoGERGRhqhP+5b87cpj2LKrhAufX8Kas8bCqXfDgjfhscGw/L9BhygijZgSxFjVdQhge213AXDJMZ0od+eFT1YEEZWIiIhE4Iicloy98hg27tjDRU9OY23fq+DK/0B8IjxzJrx1B5TuDjpMEWmElCDGqmZtoMOAvQrVAHRs04xhuZm8+OlK9pSWBxSciIiI7M+RHVvx7A+OZt3WYi564mOKWvSBaz6Aoy6Fjx6Ex4fA6llBhykijYwSxFjWrQBWTYddm/ZqvvS4TqzfvpuJX64JKDARERGJxMBObXj6ikGs3hxKEjeUJMHZD8HFr4TWIz5ZAO/eB2UlQYcqIo2EEsRY1r0AvByWvrdX80k9MujcthljVaxGRESkwRvUpQ1PXZ7Pyk07ufjJT9i0Yw/0OAWumwp9zoV3/w+e/A6smxd0qCLSCChBjGUdBkJyi29tdxEXZ1xybCemL9/E3NVbAwpORETk/7d33/FxVQfe/z9nmnqxim0VF7kBtjEYsIHY9FACZAlpQEhIAs+SkJCySXZTdlOezea3m/1tQiCFFFIoSQihxWFJgBBITDPGxhgXcJVtFcsqVm+jmfP8ce5II1kaS1jSSNb3/Xrd171z753xmcswR9855cpwvW1+AXfdsII9dW184K51VDV2uOEk7/kZvP8eaDoAPzkXnr8dopFkF1dEJjAFxKnMH4Syc2HXX4+4d9L7Tp9FatDHvS+VJ6dsIiLHMWPMZcaYN40xu4wxXxrkeIox5nfe8XXGmLne/nxjzDPGmFZjzA8GPOdZ7zU3ecv08Xk3MlGsXljAz244gwMN7Vxxx1rW7qx1BxZfBZ9YBwsvgae+Br98B9TvTm5hRWTCUkCc6hZcBM0VULez3+6c9CBXnVLCo69W0dShcQsiIqPFGOMHfgi8A1gMXGeMWTzgtJuAw9baBcBtwLe9/Z3AV4EvDPHy11trT/WWQ6NfepnozltUyB9uXUVhVgo3/OJl7nh6J9GohcxCuOY+uPqncOgN+PFqePlnENWEdCLSnwLiVDf/Irce0M0U3GQ1HeEID26oGOdCiYgc11YCu6y1e6y13cD9wFUDzrkKuNvbfhC4yBhjrLVt1trncEFRZFDzCzN59JOruOqUYr771A5uvHs9je3dYAycco0bmzj7bHj8C3Dvu6DxQLKLLCITiALiVDdtDuQvOOJ+iABLS3I4bXYu9720z/36KCIio6EEiP+LvMLbN+g51toeoAnIH8Zr/9LrXvpVY4wZjcLK5JQeCnDbNafyzXct5flddVxxx3O8XtHkDuaUwAcfgnfeDpUb4Ednw6v3HTHcRESmJgVEgfkXQvlzED7yB+kbzp7L3ro21u6qS0LBRERkBK631p4MnOMtHxrsJGPMzcaYV4wxr9TW1o5rAWV8GWP40Flz+P3H34a1lvfc+QK/Wbcfa61rTTz9I3DL81B0Cvzhk/Dba+GwZjAXmerGNCC+1UH43rEve/vfNMZc6u2b5Q3O32aM2WqM+cxYln/KmH8R9HTA/hePOPSOk2eSnxHi3hfLx71YIiLHqUpgVtzjUm/foOcYYwJADlCf6EWttZXeugX4Da4r62Dn/dRae4a19ozCwsK39AZkcjl1Vi6PffoczpyXx1ceeZ0v/H4zHd3eTKbTLS3IfQAAIABJREFU5sKH/wiX/RfseRa+fxr84VZo2JvMIotIEo1ZQDyWQfjeedcCS4DLgB95r9cDfN5auxg4C/jkIK8pIzV3NfiCsPuvRxxKCfi5duUsnn7jEAca2pNQOBGR4856YKExpswYE8LVd2sGnLMG+LC3/V7gr9YO3f/PGBMwxhR420HgSmDLqJdcJq28jBC/+uhKPn3RQh5+tYKrf/Q85XVt7qDPB2fdAp/aCGfcBJsfgO+fDo/cotlORaagsWxBfMuD8L3991tru6y1e4FdwEprbbW1diP0/kK6nSPHbchIpWTC7LMGDYgAHzhzDgb49br941suEZHjkDem8FbgCVw99oC1dqsx5t+NMf/gnfZzIN8Yswv4HNDbC8cYUw58F/iIMabC+6E0BXjCGLMZ2IRrgfzZeL0nmRz8PsPnLl7ELz6ygoPNnbzz+8/xxNaDfSfklMDl/w2f3Qxnfhy2PgI/OAMevhlqdySv4CIyrsYyIB7LIPyjPtfrjrocWDeKZZ665l8INVugufqIQyW5abz9pBn8bv1+OsO6ua6IyLGy1j5urV1krZ1vrf2Wt+9r1to13nantfZ91toF1tqV1to9cc+da63Ns9ZmWmtLrbXbvNlNT7fWLrPWLrHWfsZaqy9sGdQFJ0znj7eupqwwg4/du4H//NN2eiJxt7vImgmX/X8uKJ59K2z/I/xwJTx4IxzanryCi8i4mJST1BhjMoGHgM9aa5uHOEeD8EfixCvBF4D//dyg90S64ey5HG4P87+bjwyQIiIiMrnMykvn9x8/m+vPnM1P/raHD/58HYdaBkxWlzkdLvkmfPZ1WP1PsOMJ+NFZ8MANcPD15BRcRMbcWAbEYxmEP+RzvbEVDwG/ttY+PNQ/rkH4I1S4CC75Frz5OKz9zhGHVy3IZ15hBve8pNnNREREjgcpAT/fuvpkvvO+U9h0oJEr73iOtTsH+VE9owDe/nUXFM/9F9j9DPx4Ndx/PVRtGv+Ci8iYGsuAeCyD8NcA13qznJYBC4GXvfGJPwe2W2u/O4Zln5rO/Bgsuwae+RbsfKrfodhU2a8daGRzRWOSCigiIiKj7T2nl/LIJ1aRmRrgQz9/mc8/8BqH27qPPDE9Dy78VxcUz/8KlK+Fn54Hv7kGKjaMf8FFZEyMWUA8lkH41tqtwAPANuDPwCe9sRSrcPd1utC7EfAmY8zlY/Uephxj4Mrvwcyl8NBN0LCn3+H3nF5KesjPPS+qFVFEROR4clJRNo9/+hw+ecF8/rCpkotv+xtrXqti0Mlz03Lh/C/CZ7fAhV+FA+vgrgvhnqtg799h6Al3RWQSMAlmzT5unHHGGfaVV15JdjEmj8Pl8JPzILsE/s9TEMroPfSVR17noQ0VvPTli5iWEUpeGUVEBmGM2WCtPSPZ5ZgsVD/KYLZXN/OlhzbzWkUTF544nW++aykluWlDP6GrBV75Jbz4A2itgdIVcM7nYdFl7sdnEZkQhltHTspJamSMTZsL7/05HNoGaz7d75fAG86eQ1dPlAdeOTD080VERGTSOqkom4c/sYp/u+IkXtxdzyXf/Rt3v1BONDpEo0JKFqz6NHxmM1zxXRcSf3st3LkKXn8QIj3j+wZE5JgoIMrgFrwdLvoqbHkQXrqzd/eJM7NZOTeP+9btIzJURSEiIiKTmt9n+D/nzOPJfzqX0+ZM4+trtvLeH7/AjpqWoZ8UTIUVN8GnXoWrfwo24oas/OAM2PAr6Okat/KLyFungChDW/05d/uLJ/8N9q7t3f2hs+dwoKGDv+04lMTCiYiIyFiblZfOPTeu5LZrTmFvXRtX3LGW257aQVdPgtts+gNwyjVwy4twzX2QmgN//Azcfiq8+CPobhu/NyAiI6aAKEMzBt51J+TPh99/BJrcXUouXTKTwqwUTVYjIiIyBRhjuHp5KX/53HlcfnIRtz+9kyvueI4N+xoSP9Hng5PeCTc/Cx96xP098cSX4bal8Lf/HzoOj0fxRWSEFBAlsdRsuObXrlvIAx+Cni5CAR/XrZzN33bUsq9evwKKiIhMBfmZKdx+7XJ++dEVdHRHeO+PX+Rrf9hCS2c48RONgfkXwkcegxufdJPYPPMfcNvJ8NTXjpg1XUSSSwFRjq5wEVx9J1RugMf/GYAPrJyNzxjue0mtiCIiIlPJBSdM58l/OpePvG0u9760j0tu+ztPbD04+C0xBpp9Jlz/AHxsLSy8GJ6/A+5YDnddDOvvgvajtEqKyJhTQJThOemdbsrqjXfDhl8xMyeVS5fM4IFXKujoTjAOQURERI47GSkBvv7OJTx8y9vITg3ysXs38O47X+CF3XXDe4GiZfC+X8I/bYG3/1/oboX//Tz8zyL47XWw9VEId47tmxCRQSkgyvBd8K8w/yLXiljxCh86ay5NHWH++FpVsksmIiIiSbB89jQe+/Rq/vPdJ1Pd2MkHfraO6+96iVf3D3N8YU4prP4s3PKCa1U882NQuRF+/2EXFtd8Csqfg2h0bN+IiPQyw+oOMMnpRsCjqL0Bfno+RMLYm5/l0rveIBTw8cdbV2N0M1wRSbLh3gRYHNWPMpo6wxF+vW4/P3pmF/Vt3Vy8eAafv2QRJ87MHtkLRSOw92+w+QHYtgbCbZAzC05+Hyy7BqafODZvQOQ4N9w6Ui2IMjLpeW7K6o7DmAdv5IaVxWypbObVA43JLpmIiIgkUWrQz02ry/j7v1zAFy5ZxEt76nnH7Wv5zP2vUl43gkntfH43qc3VP4Z/3gnvvgsKT4Dnvwc/OhN+ci68+ENorh67NyMyhakFUd6a134Hj9xM94pbOO3lC1hcnM2vPrqC9FAg2SUTkSlMLYgjo/pRxlJjezc/+fsefvV8Od2RKO8/o5RPXbiQ4ty0t/aCLTWw5SHY/Duo3uT2FS+HEy6HE94BM5a6GVNFZFDDrSMVEOWt+9MXYd2Pefm0b3Pti7M4dVYuv/jICnLTQ8kumYhMUQqII6P6UcbDoZZOfvTMbn69bh/GGD545hw+ccF8CjJT3vqL1r4JbzwGb/4JKl4BrOuGesI73DJnNQT094hIPAXEOKoAx0gkDHf/A1S9yvPn/YaP/rmLsoIM7r1pJdOzU5NdOhGZghQQR0b1o4ynisPt3PH0Th7cUEFq0M+Nq8r4x3PnkZMWPLYXbqmBnU+4sLj7GejpgJRsWHCRa11c8HY3REZkilNAjKMKcAy11LhJazoa2LfkE/zDq6eRnZnBfTedyZz8jGSXTkSmGAXEkVH9KMmwu7aV257awWObq8lODXD9WXO44ew5FOW8xa6n8brb3QQ3bz4Ob/4Z2g6B8cOct/W1LubNO/Z/R2QSUkCMowpwjDVXwZ+/DNsepTNnHp9pvp6NgVO596aVI5+5TETkGCggjozqR0mmrVVNfP/pXTy57SDGGC4/uYiPrprLabOnjc4/EI1C1UYvLP4JDm1z+wtOcJPgzL8A5qyClMzR+fdEJjgFxDiqAMfJrr+4eyQ27OFJ32q+Hf0g//3RSzl9jrp1iMj4UEAcGdWPMhEcaGjn7hfK+d36A7R09XDqrFxuXF3GO5bOJOgfxQn3G/bCjj/Djidg/4vQ0wm+IMxaCfMucIGxeLmbRVXkOKSAGEcV4DgKd8Lzt2PXfof2iI/bo+9n1Qe+zHknFiW7ZCIyBSggjozqR5lIWrt6eGhDBb96oZy9dW3MzE7lQ2fP4QMrZzMtY5QnnAl3upC45xnY8yxUv+b2p+ZA2bkuMM4733VH1cyocpxQQIyjCjAJGvbQvebzhMr/yjY7h8MXfJtV578j2aUSkeOcAuLIqH6UiSgatTy74xC/eK6c53bVkRr0cfXyUm5cNZeFM7LG5h9tq4e9z7pJbvY8C00H3P7c2X2ti2XnabIbmdQUEOOoAkwSa2l77RE613yB/Gg9u2a9hwXX/Y++XEVkzCggjozqR5no3jzYwq9e2MvDGyvp6olyzsICblxVxnmLCvH5xqhlz1qo3+1aF3c/A+VroasZMDB9McxaAaUroHQl5C8A3yh2gxUZQwqIcVQBJldHSyPP/uwLXNz0EOFgNmmX/wecer2+UEVk1CkgjozqR5ksGtq6+e3L+7nnxXJqmruYV5DB9WfN4R9OKaYw6xjupzgckR432c2eZ2H/S+6+i11N7lhqLpSe4QXGFVByOqTljm15RN4iBcQ4qgCTLxyJ8p17H+HCPd9mpe9N7KwzMVd8F2YuTXbRROQ4ooA4MqofZbIJR6I8/no1v3y+nE0HGvH7DOctKuTdp5Xw9pNmkBochwlmolGo3wkHXoaK9W45tB2wgIHCE7zQuNKFxsIT9aO4TAgKiHFUAU4M0ajl/655nbb19/GN1PvJiLZiVv4jrLwZ8ucnu3gichxQQBwZ1Y8yme2saeHhVyt59NVKqps6yUoNcOWyIq5eXsqKudMw4zm5TGczVG5wrYsVXnDsOOyOpWRD8akwcxnMWAozT4aCRRAY5Yl3RI5CATGOKsCJw1rLbU/t4O6/buIH09ewuuVxjI3C7LNdt9Ml74KUMRqALiLHPQXEkVH9KMeDSNTy0p56Ht5YyZ+2VNPeHWFWXhpXLy/l3ctLmFuQMf6Fio1jjIXFyo2ulTHS5Y77gq5lcebJrjdVLDhqngYZQwqIcVQBTjw/f24v33xsG5fPsfzHvC3k7fg91O+CYAYsvgqWXw+z36YuGSIyIgqII6P6UY437d09PLH1IA9vrOS5XXVYC6fPmca7TyvhypOLyUkPJq9wkR73t07NFji4GQ5ucdutNX3nZJd4YdELjDNOhrwy3ZtRRoUCYhxVgBPTQxsq+LdHt9AdifKe5cV8fnETM3Y/CFsege4WmDYXTvkAnHqdm2ZaROQoFBBHRvWjHM8ONnXy6KZKHt5YwY6aVkJ+HxedNJ2rl5dw7qLC8RmvOByth+Dg615w3OK263aAjbjjwXSYfhLMWOIC44wlbtFkODJCCohxVAFOXLUtXdz57G7uW7cPay3XrpjNrecUM6PiKdh0H+z9O2DcTWuXfxBOvBJC6ckutohMUAqII6P6UaYCay1bq5p5eGMla16rpK61m/SQn/NPKOTSJTO54MTpZKcmsWVxMOFOqH3DC45bXXis2dI3rhEgZ5YXFpe69cyTIW+eWhtlSAqIcVQBTnzVTR384K+7+N36A/h9hhvOnsPHz5tPfk8NbPotbPo1NO5zA72XXO3CYukKGM8B6CIy4SkgjozqR5lqwpEoL+6u54mtB3lyWw21LV0E/Yaz5uVz6ZKZXLJ4BtOzU5NdzMFZCy3VfYHx4Ba3Hd/aGEj1WhuXuns2Tj/RjXXMKtLfTKKAGE8V4OSxv76d25/eySOvVpAa9HPjqjL+8Zx55KT6Yd/zLihu+wOE2yFzBsw7v2/JLk5m0UVkAlBAHBnVjzKVRaOWVw808uTWgzyx9SDl9e0YA8tn5XLpkplcumRmcia4GameLtfaWLPVLbHuqu31feek5Ljbb8QCY2zJLlZwnEIUEOOoApx8dh1q5Xt/2cFjm6vJSg1w8znz+OjqMjJTAtDVAtv/CLuedjetba9zTypYBPMucGFx7ipIzUniOxCRZFBAHBnVjyKOtZYdNa084YXFrVXNAJwwI4tLlszg0iUzWVKcPb63zjhWrbUuOMaWQ9469ncTuJ5ZhSf0D42FiyCrGPyB5JVdxsSECIjGmMuA2wE/cJe19r8GHE8B7gFOB+qBa6y15d6xLwM3ARHg09baJ7z9vwCuBA5Za4d1l3VVgJPX9upmvvvUDp7aVsO09CC3nD+fD501l7SQ178+GoVD21xQ3POsa2UMt4PxQ8npfa2LpSt0vyGRKUABcWRUP4oM7kBDO09uq+HJrQdZX95A1EJJbhoXnjid1QsLOHt+/sQbtzhcbXVeYNwOtW/2Bci22r5zjN91S80pgZxSN7tqziz3OLadnqfWx0km6QHRGOMHdgAXAxXAeuA6a+22uHM+ASyz1n7cGHMtcLW19hpjzGLgt8BKoBj4C7DIWhsxxpwLtAL3KCBOHZsONPLdp3bw9x21FGalcOsFC7hmxawjZyDr6XL3G4oFxsoNYKNuBrA5q7zWxdVuMLd/kn6xi8iQFBBHRvWjyNHVt3bx9PZDPLntIC/srqe9O4LfZ1g+K5dzFhZyzqIClpXkEPBP8ltztdW7oFi3A5oroanCLc2V0FTZdw/HmEBa/8CYUwLTyiB/PuQv0D0dJ6CJEBDPBr5hrb3Ue/xlAGvtf8ad84R3zovGmABwECgEvhR/bvx53uO5wGMKiFPPy3sb+J8n3+TlvQ2kh/y8/aQZvPOUYs5dVEBKYJBZuzoaXatiLDDW7XD7/Slutq/iU6F4uVsKTlB3CpFJTgFxZFQ/ioxMd0+UjfsPs3ZnLWt31vF6ZRPWQlZqgFXzCzhnUQHnLixkVt5xNuO6ta7lselAX2Ds3a5wj1sPuh/lY9KmuaCY5wXGWHDMmwcpmcl7L1PYcOvIsfxruAQ4EPe4AjhzqHOstT3GmCYg39v/0oDnloxdUWWyWFmWx+9uPot1exv4w6ZK/rTlIGteqyIrNcAli2dy5SlFrF5QQDD2K15aLpx4hVvAfYkdWAdVr0LVJnjtd7D+LncskAZFy/oCY/Fy90Wm6aJFREQECAV8nDUvn7Pm5fPPl8Lhtm6e313H2h11rN1Zy5+3HgRgTn465yws4JyFhZO7O2qMMZBZ6JaS0wY/JxKGxv1Qvytu2Q3lz8Hm+/ufm1XUFxrz5rvQmFMC2aWQng++Sd4aO8kdt80lxpibgZsBZs/WTdaPJ8aY3i/nf79qKc/vquOxzdU8sfUgD22sIDc9yGVLZnLlsmLOmpfXv8tHTqlblr7HPY5GoWG3Fxi9ZeM9sO7H7ngoE4pOcWGx6FQ3dXT+AghO0CmwRUREZNxMywhx5bJirlxWjLWWPXVtrN3hWhcf3ljJfS/tx+8znFKaw4qyPFbOzeOMOXnkpE/ywDgYf9BrJZwPXNr/WHc7NOzpC46x7e1/7D/bKoA/5GZXzY51Xy05cjs9X+Mfx5C6mMpxo6snwtoddfxxcxV/2VZDW3eEgswQly11YXHl3Dx8vmF8mUQjritqfGg8+Dr0dLrjxge5c7xZv05wXVMLT3CzqKZmj+2bFJGE1MV0ZFQ/ioyd7p4or+4/zN931vLSngY2VzQSjliMcbOjrpib1xsaZ+ZM4R+e2xvgcDk0V7kuq7EurLHt5mqIhvs/x5/iQmROqWuNzC5yM69mzXT7s4rctuab6GcijEEM4CapuQioxE1S8wFr7da4cz4JnBw3Sc27rbXvN8YsAX5D3yQ1TwMLrXV3AVVAlKPpDEd45o1DPLa5mqffqKEzHGVGdgqXn1zEZUtmcurs3MHHLA4lEu6b6atuh9uu2+F+/Yp0952XVeSCYmya6Fh4zCjUL10i40ABcWRUP4qMn85whFf3N7K+vIH15Q1s3HeYtm53g/tZeWmsmOvC4oqyPOYVZEyuW2qMpWgU2g7FBccqaK7oC5Et1dBysP/fYwAY9/dXdpEXGIv6wmN2kbufdnq+WwIpSXlr4y3pAdErxOXA93C3ufiFtfZbxph/B16x1q4xxqQC9wLLgQbgWmvtHu+5/wrcCPQAn7XW/snb/1vgfKAAqAG+bq39eaJyqAKc2tq6enj6jUM89loVz+6opbsnSkrAx/LZuZxZ5rqqLp+de+SMqMMR6XG/etW92RcaY+vu1r7zUrIhd7ZrecydDdO8dWyfWh5FRoUC4siofhRJnp5IlG3Vzby8t8ELjYdpaHMhpyAzxBlzXFg8dVYuS4qz39rfKVOFta6ranOVFxirXctjS5W3rnbHOhoGf34oy826mlHQFxoHLrFjaXnu77ZJ2Do5IQLiRKEKUGKaO8O8uLuedXsaWLe3nm3VzVgLIb+PU2flcua8PM4sy+e0Obmkh45hiK617osoFhwb9rqB24374PA+CLf1Pz81Ny40znHLtDlu2ujsYkjNUQukyDAoII6M6keRicNay+7aNhcW9zbwcnkDFYc7AAj4DItmZHHKrByWleayrDSHRTOy+iblk+EJd7rZVpurXatke727vUd7/FLnur2217t7aw8lkAYpWQOWbBceB9ufkuVmdo0FztTccZ+MRwExjipAGUpTR5hXyhtYt7eBdXvq2VLVTCRqCfgMy0pzOHNePmeW5XHG3DwyU0ZpTidr3RdP4z5v2e9CY+P+vqWno/9zgulHdo3IKu6/zpwxKX/NEhlNCogjo/pRZGI72NTJaxWNbK5oZHNFE5srmmjqcOPxUgI+Fhdnc4oXGJeV5jKvIGN48y3I8HS3u1bHtjovPHrBsasFupq99RDbnc3gRscNzvj6B8b0fNeKmT5YK2aeazw4xkCpgBhHFaAMV2tXT29gfGlPPa9XNNETtfh9hqXF2SwtyWFxcTaLi7I5cWY2aaEx6O5hLbTVutDYtD+ua0RlX3eJofraZ07vC5KZ0yFjuut/n1no1rElbZpaJOW4pIA4MqofRSYXay37G9p5raKJzQdcaNxS1US7N5YxKyXA0pIclpXmcHJpDouLspmTn4FfoXH8WQvhjrjg2OTuzx0LmYmWaM+Rr/eVaggd2/01FRDjqAKUt6q9u4cN+w6zbo8bH7CtqpmWLvc/rTFQVpDB4qJsFhdnc1JRNkuKsinMShn7geUD+9r3riv7AmWr13WCQf4f9wW8sFgweIhMy3O/VsXWSegGIfJWKCCOjOpHkckvErXsrm3ltQOxVsZGtle30B1xN61PC/o5sSiLxUXub5XFxdmcODPr2IbSyNix1rVCxrdYtjfAqdcd80srIMZRBSijxVpLxeEOtlU3s62qmW3VzWyvbu4dIwBuYPlJRdm9wXFxUTZlBRn978c4XqIRr399rQuMbXVuu+2Qt6/We1zn9sVu5TGQ8bmQGOvmkJbnbU/rC5Hp+W6sZGxJyXZrnwbVy/hRQBwZ1Y8ix6funig7D7WwraqZ7dUtbKtuYltVM82dcT9y52dwUnH/v1emj8eP3JI0CohxVAHKWGvqCLPdC4ux4LizprX317uAzzA7L52yggzKCjKYW5DBvIIMygozmJGVOjHGC1jrZl5tq4X2w67PfexXqyO2G/p+1Yp0JX7dUJYbsD1YeEzN8QZzxwZ157hB3L0DvLMhlKnWSxk2BcSRUf0oMnVYa6ls7HCBsarZhcbqZg409P3InZ/hfuReOCOTRTOyWDQjk4UzsshO1RwHx4Ph1pFqWxYZBTlpQc6a526ZEROORNld28q2qmZ2HWplb10be+vaeH53HZ3haO95aUE/c/LTmVeY4QXITMoK0ikryGRaenD8fskzpm+2rbxhPsdaN8NXLDh2NnlLc992V9x2Z5PrDtv1Rt95iQZwu4L1nwEsPjymZLkAGcqIWzJdH/3ebW9/MLZOV+AUEZEpxxhD6bR0Sqelc/HiGb37mzvDvFHdwraqJq9nVAv3v3yAjnBf/TwzO7VfaFww3a2zFByPSwqIImMk6Pdx4kw3mU28aNRysLmTvXVt7Klro9wLjturW3hiaw2RaF+rfnZqgNJp6ZRMS6N0WholuW5dOi2dktw0csczQA7GmL4Aljtr5M+PtVrGZvuKDeLufdw8+LH2Bnf/ya4WN8NYdyuDjrUcSjDDhchgmguM/dbD2BdIddsD18E0N+11MNWtFURFRGSCy04NsrIsj5Vlfb8OR6OutXFHTQs7alrZWdPCjkMt/Hrdvn4/chflpLJwRhaLprvwOH96BnPzM8jLCKmr6iSmgCgyznw+Q3FuGsW5aaxaUNDvWDgSpeJwB3vrWtlT20Z5fRuVhzvYV9/G87vqemcpi8kI+SnpDY79g2RRThoFmaHkjH0crvhWy+zit/46sZnCuttcWAy39213tw2ytLrzw+3e0uGWjsOuhTO2r7vd3bPSRo9ehsH4Q3GBMRYmU9z2kOtB9vlD3mulxK2D4E/p2+cPQSAUt8877g8pqE5AxpjLgNsBP3CXtfa/BhxPAe4BTgfqgWusteXGmHzgQWAF8Ctr7a1xzzkd+BWQBjwOfMZOhXEkIjLqfD7DrLx0ZuWlc9FJfa2N0aibi2GHFxh31rSyo6aFe/fU09XTV1dmpwYoK8ykLN/1iCorzKAsP4O5BelqdZwEFBBFJpCg39c7TvHCE/sfs9bS2B6msrGDisPtVBzuoOJwB5WNHVQe7mDDvsO9g89jjIGCzBRmZKcwPSs1bp3K9KwUZmS7ffmZKZN7CmxjvG6l6UDh6L62tRAJxwXJdjeZT7jT3a9yyLW39HT2rXu6vMXb7qrt/zh+PdgU12+VLzggVIb6h8uhAqc/6C0hN/Nt73aw71j8duy82Lmx7eE+jnUPPs4ZY/zAD4GLgQpgvTFmjbV2W9xpNwGHrbULjDHXAt8GrgE6ga8CS70l3p3APwLrcAHxMuBPY/leRGRq8fkMs/PTmZ2fztvjuqlGopYDDe1H9I5aX36YRzdV9XuNwqwUyvK9YTWFrsWxrCCDWXlpmll1gtB/BZFJwhjDtIwQ0zJCLC3JGfSc5s4wlYddYDzY3Mmh5k4OtXRR09zJwaZONlc0Utc68P6J4OsNki445meGKMh0wbGgd9utp6WHJneYHCljXKAKhCAtd/z+3UiPmwAoFhgj3dDT7e3rdo97t7uOfnzI87r6gmtXS//zIj1uHQ27kBwJu+2xcsG/wnn/MnavP3GsBHZZa/cAGGPuB64C4gPiVcA3vO0HgR8YY4y1tg14zhizIP4FjTFFQLa19iXv8T3Au1BAFJFx4PcZ5nqT8F0w4FhnOMK++nbXOyouPD79Rg11r/T/m6QwK4XZeenM9lovY9tz8tMpzEyZGJP6TQEKiCLHkezUINlFQU4qyh7ynHAkSl1rFzXNLjjGh8ia5i6qmjp5vbKJ+rbufuMhY4yBvPT+oTG2zk0PMi09RG5akJzYdnouJCQiAAAM+0lEQVSQtKBfYxFGyh9wSygj2SXpz1rXuhkJe+HRC5GRcN92tMfb9tZR71g00ndeNDzgcQ8UL0/2uxsvJcCBuMcVwJlDnWOt7THGNAH5QF2C16wY8Jolg51ojLkZuBlg9uzZIy27iMiIpAb9nDAzixNmZh1xrLkz3BsYDzS0s99bXt7bwKObKonvJJ8S8PULjbHtktw0inNTyUlL8rwMxxEFRJEpJuj3UZTjxigmEo1amjrC1Ld1UdfaTV1rF/Wt3dS3dlHrrevbunmtopH61m5au4buEhny+8hND7olLdS37QXInDS3ZKd6a+9xVmqA4EQeQzkVGdPXpZTjvzvo8cha+1Pgp+Buc5Hk4ojIFJadGmRZaS7LSo/sodPVE6HycAf7G9r7hcd99e2s21NP24B5GdJDfopyUt08Dzluroei3FRv2+1PDerezMOhgCgig/L5+rq0Lph+9PM7wxEa28M0dnRzuC1MU0c3je1hDnv7Gtu8dXuYffXtvFbRzeH2MN09iSeAyQj5ewNjdqoLj9lpgd7HWakBslIDZKYEyfS2s1ICZKYGyEwJkBEKqEuKTDSVQPy0v6XevsHOqTDGBIAc3GQ1iV6z9CivKSIyaaQE/MwrzGReYeYRx6y1NLR1s7+hnarGTqqb3JwM1Y2dVDW5ez3WtR55n+a8jFBviCzKSWVmTiozs/uvNQ5SAVFERklq0M/MHD8zc1JH9LyO7ghNHWGaOsI0d4Zpanfr5o4wTR09bl9H7HHYu8mve9ySoNUyxhjIDPUFxqzUAJmpQTJT/KSHAmSE/KSnBEgPunXscUbIO57Sf50e8qtVU47VemChMaYMF+KuBT4w4Jw1wIeBF4H3An9NNCOptbbaGNNsjDkLN0nNDcD3x6LwIiLJZowh35srYfkQPeW7eiIcbOqkqrGTqsYOqps6qGpy2/vq21i3p/6Iyf3AzcBalJPGjJxUirJT3XpAkEz6bcbGmAKiiCRVWshPWmjkwRLcrGlt3T20dPbQ2tlDa1eYlk7vcZfb19LpgmSrt6+ls4em9m6qGiO0d/XQ1h2hoztCd2T4t7II+g2pQT/pXoiMbacF3XtJ8x4P3J8ajC0+UgNx2946ZcA+BdHjkzem8FbgCdxtLn5hrd1qjPl34BVr7Rrg58C9xphdQAMuRAJgjCkHsoGQMeZdwCXeDKifoO82F39CE9SIyBSWEvAzJz+DOflDj+Vv7+7hYJObyO9gcyfVTZ3UxK23VzdT19rFwJ/nQgEf07NS4maET6UwK+WIWeIn67hIBUQRmbT8PuO6nY7CPZW6e6J0dEdo6+6hvbuHti5vO7bujtDW5dYdYRcqO7ojtMe2w+55da1ddIYj7jzv+GCT/Qz3/aUGfL3BMiXgIxTwkeJtx/a5xU+KFzxTgnH7Aj7vcdy5cc8LDXFeyO9T19wxZK19HHcrivh9X4vb7gTeN8Rz5w6x/xWOvPWFiIgMIT0UGLIba0w4EuVQS1e/IHmopZND3mR/Ow+18tyuOloGaY0cGCQLs1IozEyhIH6d5WaMTwlMnPGRCogiIrgv8VDAR0766N/ANxY+O3sidIYjdIaj3jpCZ0/fdlc4Osg5bl+3d15XT9Qt4QjNHeHebbffvUZXT3RELaJDCfoNIb+v99qEvOAYCvgJ+c2AfbH9PkIBQ9DvI+DzEQy41+i/bQgGfAT9PoJ+4619LJyeuJIWEREZb0G/j5LcNEpyE0/u19Ed4VBLZ98s8S1dHGru7N1+s6aF53fVDdqtFVzX1oIBAbIvUIZYvaCQUGB8ehYpIIqIjLHe8Mnoh8+hRKKWLi9YdvVEvbULnL1hsifqBcr+wTN2fnfEWw943NW77VpJmyJhwj2293g4Elss4Yg7d+jRc30+f/EiPnXRwrG/OCIiIqMsLXT0Lq3gJvWrb+umtqWLupYualvj1q1d1LZ0sa2qmdqWrn4zxL/xzcvG+i30UkAUETkO+X3Gm1Qn2SVxIlHbLzj2eMExFiLDkSiFmSnJLqaIiMiYSg36h9UiCS5M1ra44Diet+hQQBQRkTHn9xn8Pr/uQSUiIjJMqUE/s/LSmZU3vvcd1hR5IiIiIiIiAiggioiIiIiIiEcBUURERERERAAFRBEREREREfEoIIqIiIiIiAiggCgiIiIiIiIeBUQREREREREBFBBFRERERETEo4AoIiIiIiIigAKiiIiIiIiIeIy1NtllGHPGmFpg3zG+TAFQNwrFOV7p+iSm65OYrk9iuj6JxV+fOdbawmQWZjJR/TgudH2OTtcoMV2fxHR9EhtxHTklAuJoMMa8Yq09I9nlmKh0fRLT9UlM1ycxXZ/EdH2SS9c/MV2fo9M1SkzXJzFdn8TeyvVRF1MREREREREBFBBFRERERETEo4A4fD9NdgEmOF2fxHR9EtP1SUzXJzFdn+TS9U9M1+fodI0S0/VJTNcnsRFfH41BFBEREREREUAtiCIiIiIiIuJRQDwKY8xlxpg3jTG7jDFfSnZ5JiJjTLkx5nVjzCZjzCvJLk+yGWN+YYw5ZIzZErcvzxjzlDFmp7eelswyJtMQ1+cbxphK7zO0yRhzeTLLmEzGmFnGmGeMMduMMVuNMZ/x9uszRMLro89QEqiOTEz1Y3+qHxNT/ZiY6sfERrN+VBfTBIwxfmAHcDFQAawHrrPWbktqwSYYY0w5cIa1VvegAYwx5wKtwD3W2qXevv8GGqy1/+X9ETXNWvvFZJYzWYa4Pt8AWq21/5PMsk0ExpgioMhau9EYkwVsAN4FfAR9hhJdn/ejz9C4Uh15dKof+1P9mJjqx8RUPyY2mvWjWhATWwnsstbusdZ2A/cDVyW5TDLBWWv/DjQM2H0VcLe3fTfuf9gpaYjrIx5rbbW1dqO33QJsB0rQZwhIeH1k/KmOlBFR/ZiY6sfEVD8mNpr1owJiYiXAgbjHFegPkcFY4EljzAZjzM3JLswENcNaW+1tHwRmJLMwE9StxpjNXhebKdk9ZCBjzFxgObAOfYaOMOD6gD5D40115NGpfjw6fbcdnb7bBlD9mNix1o8KiDIaVltrTwPeAXzS6yIhQ7CuX7f6dvd3JzAfOBWoBr6T3OIknzEmE3gI+Ky1tjn+mD5Dg14ffYZkIlL9OAL6bhuUvtsGUP2Y2GjUjwqIiVUCs+Iel3r7JI61ttJbHwIewXU7kv5qvL7hsT7ih5JcngnFWltjrY1Ya6PAz5jinyFjTBD35f5ra+3D3m59hjyDXR99hpJCdeRRqH4cFn23JaDvtv5UPyY2WvWjAmJi64GFxpgyY0wIuBZYk+QyTSjGmAxvICzGmAzgEmBL4mdNSWuAD3vbHwb+kMSyTDixL3bP1Uzhz5AxxgA/B7Zba78bd0ifIYa+PvoMJYXqyARUPw6bvtsS0HdbH9WPiY1m/ahZTI/Cmwr2e4Af+IW19ltJLtKEYoyZh/tVFCAA/GaqXyNjzG+B84ECoAb4OvAo8AAwG9gHvN9aOyUHog9xfc7HdX2wQDnwsbjxBFOKMWY1sBZ4HYh6u7+CG0cw5T9DCa7PdegzNO5URw5N9eORVD8mpvoxMdWPiY1m/aiAKCIiIiIiIoC6mIqIiIiIiIhHAVFEREREREQABUQRERERERHxKCCKiIiIiIgIoIAoIiIiIiIiHgVEkQnIGBMxxmyKW740iq891xgzZe+jJCIik5vqSJGxFUh2AURkUB3W2lOTXQgREZEJSHWkyBhSC6LIJGKMKTfG/Lcx5nVjzMvGmAXe/rnGmL8aYzYbY542xsz29s8wxjxijHnNW97mvZTfGPMzY8xWY8yTxpg07/xPG2O2ea9zf5LepoiIyIipjhQZHQqIIhNT2oDuM9fEHWuy1p4M/AD4nrfv+8Dd1tplwK+BO7z9dwB/s9aeApwGbPX2LwR+aK1dAjQC7/H2fwlY7r3Ox8fqzYmIiBwD1ZEiY8hYa5NdBhEZwBjTaq3NHGR/OXChtXaPMSYIHLTW5htj6oAia23Y219trS0wxtQCpdbarrjXmAs8Za1d6D3+IhC01v6HMebPQCvwKPCotbZ1jN+qiIjIiKiOFBlbakEUmXzsENsj0RW3HaFvPPIVwA9xv6SuN8ZonLKIiEwmqiNFjpECosjkc03c+kVv+wXgWm/7emCtt/00cAuAMcZvjMkZ6kWNMT5glrX2GeCLQA5wxC+0IiIiE5jqSJFjpF8+RCamNGPMprjHf7bWxqbxnmaM2Yz7hfM6b9+ngF8aY/4ZqAU+6u3/DPBTY8xNuF9BbwGqh/g3/cB9XgVpgDustY2j9o5ERERGh+pIkTGkMYgik4g3vuIMa21dsssiIiIykaiOFBkd6mIqIiIiIiIigFoQRURERERExKMWRBEREREREQEUEEVERERERMSjgCgiIiIiIiKAAqKIiIiIiIh4FBBFREREREQEUEAUERERERERz/8DXdtmVXAzSiwAAAAASUVORK5CYII=\n",
+      "text/plain": [
+       "<Figure size 1080x360 with 2 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "if ae_batch_multiple:\n",
+    "\n",
+    "    for ae_batch_size_list in ae_batch_list:\n",
+    "\n",
+    "        autoencoder_deep1.set_weights(autoencoder_clean_weights)\n",
+    "\n",
+    "        autoencoder_deep1_train = autoencoder_deep1.fit(train_data, train_data,\n",
+    "                                                        epochs = ae_epochs_list,\n",
+    "                                                        batch_size = ae_batch_size_list,\n",
+    "                                                        validation_data = (vali_data, vali_data),\n",
+    "                                                        shuffle = True,\n",
+    "                                                        callbacks = [mc])\n",
+    "        \n",
+    "        plot_epochs = range(ae_epochs)\n",
+    "        plot_loss = autoencoder_deep1_train.history[\"loss\"]\n",
+    "        plot_val_loss = autoencoder_deep1_train.history[\"val_loss\"]\n",
+    "        plot_mae = autoencoder_deep1_train.history[\"mae\"]\n",
+    "        plot_val_mae = autoencoder_deep1_train.history[\"val_mae\"]\n",
+    "\n",
+    "        plt.figure(figsize = (15, 5))\n",
+    "\n",
+    "        ax = plt.subplot(1, 2, 1)\n",
+    "        plt.plot(plot_epochs, plot_loss, label = \"Loss\")\n",
+    "        plt.plot(plot_epochs, plot_val_loss, label = \"Validation Loss\")\n",
+    "        plt.legend()\n",
+    "        plt.xlabel(\"Epochs\")\n",
+    "        plt.ylabel(\"MSE Losses\")\n",
+    "\n",
+    "        ax = plt.subplot(1, 2, 2)\n",
+    "        plt.plot(plot_epochs, plot_mae, label = \"Loss\")\n",
+    "        plt.plot(plot_epochs, plot_val_mae, label = \"Validation Loss\")\n",
+    "        plt.legend()\n",
+    "        plt.xlabel(\"Epochs\")\n",
+    "        plt.ylabel(\"MAE Losses\")\n",
+    "\n",
+    "        if batch_normalization and dropout > 0.0:\n",
+    "            plt.savefig(\"Plots/model_ae_deep1_BN_DO\" + str(dropout + 100) + \"_BS\" + str(ae_batch_size_list) + \".png\")\n",
+    "        \n",
+    "        elif batch_normalization:\n",
+    "            plt.savefig(\"Plots/model_ae_deep1_BN_BS\" + str(ae_batch_size_list) + \".png\")\n",
+    "        \n",
+    "        elif dropout > 0.0:\n",
+    "            plt.savefig(\"Plots/model_ae_deep1_DO\" + str(dropout + 100) + \"_BS\" + str(ae_batch_size_list) + \".png\")\n",
+    "        \n",
+    "        else:\n",
+    "            plt.savefig(\"Plots/model_ae_deep1_BS\" + str(ae_batch_size_list) + \".png\")\n",
+    "        \n",
+    "else:\n",
+    "    \n",
+    "    autoencoder_deep1.set_weights(autoencoder_clean_weights)\n",
+    "\n",
+    "    autoencoder_deep1_train = autoencoder_deep1.fit(train_data, train_data,\n",
+    "                                                    epochs = ae_epochs,\n",
+    "                                                    batch_size = ae_batch_size,\n",
+    "                                                    validation_data = (vali_data, vali_data),\n",
+    "                                                    shuffle = True,\n",
+    "                                                    callbacks = [mc])\n",
+    "    plot_epochs = range(ae_epochs)\n",
+    "    plot_loss = autoencoder_deep1_train.history[\"loss\"]\n",
+    "    plot_val_loss = autoencoder_deep1_train.history[\"val_loss\"]\n",
+    "    plot_mae = autoencoder_deep1_train.history[\"mae\"]\n",
+    "    plot_val_mae = autoencoder_deep1_train.history[\"val_mae\"]\n",
+    "\n",
+    "    plt.figure(figsize = (15, 5))\n",
+    "\n",
+    "    ax = plt.subplot(1, 2, 1)\n",
+    "    plt.plot(plot_epochs, plot_loss, label = \"Loss\")\n",
+    "    plt.plot(plot_epochs, plot_val_loss, label = \"Validation Loss\")\n",
+    "    plt.legend()\n",
+    "    plt.xlabel(\"Epochs\")\n",
+    "    plt.ylabel(\"MSE Losses\")\n",
+    "\n",
+    "    ax = plt.subplot(1, 2, 2)\n",
+    "    plt.plot(plot_epochs, plot_mae, label = \"Loss\")\n",
+    "    plt.plot(plot_epochs, plot_val_mae, label = \"Validation Loss\")\n",
+    "    plt.legend()\n",
+    "    plt.xlabel(\"Epochs\")\n",
+    "    plt.ylabel(\"MAE Losses\")\n",
+    "\n",
+    "    plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 26,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_4\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_8 (InputLayer)         (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "model_1 (Model)              (None, 256)               1048832   \n",
+      "_________________________________________________________________\n",
+      "model_3 (Model)              (None, 64, 64, 1)         1052672   \n",
+      "=================================================================\n",
+      "Total params: 2,101,504\n",
+      "Trainable params: 2,101,504\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder_deep1 = load_model(\"Modelos/model_autoencoder_deep1.h5\")\n",
+    "autoencoder_simple.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_1\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_3 (InputLayer)         (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "flatten_2 (Flatten)          (None, 4096)              0         \n",
+      "_________________________________________________________________\n",
+      "dense_2 (Dense)              (None, 256)               1048832   \n",
+      "_________________________________________________________________\n",
+      "activation_1 (Activation)    (None, 256)               0         \n",
+      "=================================================================\n",
+      "Total params: 1,048,832\n",
+      "Trainable params: 1,048,832\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "encoder_deep1 = autoencoder_deep1.get_layer(index = 1)\n",
+    "encoder_deep1.save(\"Modelos/model_encoder_deep1.h5\")\n",
+    "encoder_deep1.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_3\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_7 (InputLayer)         (None, None, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "dense_6 (Dense)              (None, None, 4096)        1052672   \n",
+      "_________________________________________________________________\n",
+      "activation_5 (Activation)    (None, None, 4096)        0         \n",
+      "_________________________________________________________________\n",
+      "reshape_2 (Reshape)          (None, 64, 64, 1)         0         \n",
+      "=================================================================\n",
+      "Total params: 1,052,672\n",
+      "Trainable params: 1,052,672\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "decoder_deep1 = autoencoder_deep1.get_layer(index = -1)\n",
+    "decoder_deep1.save(\"Modelos/model_decoder_deep1.h5\")\n",
+    "decoder_deep1.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Autoencoder Deep 2"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Podemos añadir más capas al encoder y al decoder."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 34,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "batch_normalization = False\n",
+    "dropout = 0.0"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Encoder Deep 2"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 35,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_6\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_6 (InputLayer)         (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "flatten_3 (Flatten)          (None, 4096)              0         \n",
+      "_________________________________________________________________\n",
+      "dense_7 (Dense)              (None, 512)               2097664   \n",
+      "_________________________________________________________________\n",
+      "activation_7 (Activation)    (None, 512)               0         \n",
+      "_________________________________________________________________\n",
+      "dense_8 (Dense)              (None, 256)               131328    \n",
+      "_________________________________________________________________\n",
+      "activation_8 (Activation)    (None, 256)               0         \n",
+      "=================================================================\n",
+      "Total params: 2,228,992\n",
+      "Trainable params: 2,228,992\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "encoder_input = Input(shape = input_shape)\n",
+    "\n",
+    "x = encoder_input\n",
+    "\n",
+    "x = Flatten()(x)\n",
+    "\n",
+    "x = Dense(units = 2 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = BatchNormalization()(x) if batch_normalization else x\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "x = Dense(units = encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = BatchNormalization()(x) if batch_normalization else x\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "encoder_output = x\n",
+    "\n",
+    "encoder_deep2 = Model(inputs = encoder_input, outputs = encoder_output)\n",
+    "encoder_deep2.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Decoder Deep 2"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 36,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_7\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_7 (InputLayer)         (None, 256)               0         \n",
+      "_________________________________________________________________\n",
+      "dense_9 (Dense)              (None, 512)               131584    \n",
+      "_________________________________________________________________\n",
+      "activation_9 (Activation)    (None, 512)               0         \n",
+      "_________________________________________________________________\n",
+      "dense_10 (Dense)             (None, 4096)              2101248   \n",
+      "_________________________________________________________________\n",
+      "activation_10 (Activation)   (None, 4096)              0         \n",
+      "_________________________________________________________________\n",
+      "reshape_3 (Reshape)          (None, 64, 64, 1)         0         \n",
+      "=================================================================\n",
+      "Total params: 2,232,832\n",
+      "Trainable params: 2,232,832\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "decoder_input = Input(shape = (encoder_deep_2.output_shape[1],))\n",
+    "\n",
+    "x = decoder_input\n",
+    "\n",
+    "x = Dense(units = 2 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = BatchNormalization()(x) if batch_normalization else x\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "x = Dense(units = image_dim)(x)\n",
+    "x = Activation(activation = \"sigmoid\")(x)\n",
+    "x = BatchNormalization()(x) if batch_normalization else x\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "x = Reshape(target_shape = input_shape)(x)\n",
+    "\n",
+    "decoder_output = x\n",
+    "\n",
+    "decoder_deep2 = Model(inputs = decoder_input, outputs = decoder_output)\n",
+    "decoder_deep2.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Optimizador Deep 2"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 38,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import Adam"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 39,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "adam_learning_rate = 0.00015  # El learning rate de Adam (tamaño step)\n",
+    "adam_epsilon = 1e-8  # Previene problemas de división por 0.\n",
+    "adam_lr_decay = 1e-05  # Learning rate decay"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 40,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ae_optimizer = Adam(lr = adam_learning_rate, \n",
+    "                    epsilon = adam_epsilon, \n",
+    "                    decay = adam_lr_decay)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Autoencoder Deep 2"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 37,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_8\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_8 (InputLayer)         (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "model_6 (Model)              (None, 256)               2228992   \n",
+      "_________________________________________________________________\n",
+      "model_7 (Model)              (None, 64, 64, 1)         2232832   \n",
+      "=================================================================\n",
+      "Total params: 4,461,824\n",
+      "Trainable params: 4,461,824\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder_input = Input(shape = input_shape)\n",
+    "\n",
+    "x = autoencoder_input\n",
+    "\n",
+    "x = encoder_deep2(x)\n",
+    "x = decoder_deep2(x)\n",
+    "\n",
+    "autoencoder_output = x\n",
+    "\n",
+    "autoencoder_deep2 = Model(inputs = autoencoder_input, outputs = autoencoder_output)\n",
+    "autoencoder_deep2.compile(optimizer = ae_optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "autoencoder_deep2.summary()\n",
+    "\n",
+    "autoencoder_clean_weights = autoencoder_deep_2.get_weights()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 41,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ae_epochs = 5\n",
+    "ae_batch_size = 512"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 42,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "mc = ModelCheckpoint(filepath = \"Modelos/model_autoencoder_deep_2.h5\", \n",
+    "                     monitor = \"val_loss\", \n",
+    "                     mode = \"min\", \n",
+    "                     save_best_only = True,\n",
+    "                     verbose = 1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 43,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/5\n",
+      "180000/180000 [==============================] - 32s 177us/step - loss: 0.0093 - mae: 0.0323 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "\n",
+      "Epoch 00001: val_loss improved from inf to 0.00490, saving model to Modelos/model_autoencoder_deep_2.h5\n",
+      "Epoch 2/5\n",
+      "180000/180000 [==============================] - 32s 176us/step - loss: 0.0048 - mae: 0.0221 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "\n",
+      "Epoch 00002: val_loss did not improve from 0.00490\n",
+      "Epoch 3/5\n",
+      "180000/180000 [==============================] - 33s 181us/step - loss: 0.0048 - mae: 0.0221 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "\n",
+      "Epoch 00003: val_loss did not improve from 0.00490\n",
+      "Epoch 4/5\n",
+      "180000/180000 [==============================] - 33s 182us/step - loss: 0.0048 - mae: 0.0221 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "\n",
+      "Epoch 00004: val_loss did not improve from 0.00490\n",
+      "Epoch 5/5\n",
+      "180000/180000 [==============================] - 32s 178us/step - loss: 0.0048 - mae: 0.0221 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "\n",
+      "Epoch 00005: val_loss did not improve from 0.00490\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder_deep2.set_weights(autoencoder_clean_weights)\n",
+    "\n",
+    "autoencoder_train_deep_2 = autoencoder_deep_2.fit(train_data, train_data,\n",
+    "                                           epochs = ae_epochs,\n",
+    "                                           batch_size = ae_batch_size,\n",
+    "                                           validation_data = (vali_data, vali_data),\n",
+    "                                           shuffle = True,\n",
+    "                                           callbacks = [mc])\n",
+    "\n",
+    "first_run_train = False"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Autoencoder Deep 3 capas"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 44,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_9\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_9 (InputLayer)         (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "flatten_4 (Flatten)          (None, 4096)              0         \n",
+      "_________________________________________________________________\n",
+      "dense_11 (Dense)             (None, 1024)              4195328   \n",
+      "_________________________________________________________________\n",
+      "activation_11 (Activation)   (None, 1024)              0         \n",
+      "_________________________________________________________________\n",
+      "dense_12 (Dense)             (None, 512)               524800    \n",
+      "_________________________________________________________________\n",
+      "activation_12 (Activation)   (None, 512)               0         \n",
+      "_________________________________________________________________\n",
+      "dense_13 (Dense)             (None, 256)               131328    \n",
+      "_________________________________________________________________\n",
+      "activation_13 (Activation)   (None, 256)               0         \n",
+      "=================================================================\n",
+      "Total params: 4,851,456\n",
+      "Trainable params: 4,851,456\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "encoder_input = Input(shape = input_shape)\n",
+    "x = encoder_input\n",
+    "x = Flatten()(x)\n",
+    "x = Dense(units = 4 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = 2 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "encoder_output = x\n",
+    "\n",
+    "encoder = Model(inputs = encoder_input, outputs = encoder_output)\n",
+    "encoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 45,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_10\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_10 (InputLayer)        (None, 256)               0         \n",
+      "_________________________________________________________________\n",
+      "dense_14 (Dense)             (None, 512)               131584    \n",
+      "_________________________________________________________________\n",
+      "activation_14 (Activation)   (None, 512)               0         \n",
+      "_________________________________________________________________\n",
+      "dense_15 (Dense)             (None, 1024)              525312    \n",
+      "_________________________________________________________________\n",
+      "activation_15 (Activation)   (None, 1024)              0         \n",
+      "_________________________________________________________________\n",
+      "dense_16 (Dense)             (None, 4096)              4198400   \n",
+      "_________________________________________________________________\n",
+      "activation_16 (Activation)   (None, 4096)              0         \n",
+      "_________________________________________________________________\n",
+      "reshape_4 (Reshape)          (None, 64, 64, 1)         0         \n",
+      "=================================================================\n",
+      "Total params: 4,855,296\n",
+      "Trainable params: 4,855,296\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "decoder_input = Input(shape = (encoder.output_shape[1],))\n",
+    "x = decoder_input\n",
+    "x = Dense(units = 2 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = 4 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = image_dim)(x)\n",
+    "x = Activation(activation = \"sigmoid\")(x)\n",
+    "x = Reshape(target_shape = input_shape)(x)\n",
+    "decoder_output = x\n",
+    "\n",
+    "decoder = Model(inputs = decoder_input, outputs = decoder_output)\n",
+    "decoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 47,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_12\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_12 (InputLayer)        (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "model_9 (Model)              (None, 256)               4851456   \n",
+      "_________________________________________________________________\n",
+      "model_10 (Model)             (None, 64, 64, 1)         4855296   \n",
+      "=================================================================\n",
+      "Total params: 9,706,752\n",
+      "Trainable params: 9,706,752\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder_input = Input(shape = input_shape)\n",
+    "x = autoencoder_input\n",
+    "x = encoder(x)\n",
+    "x = decoder(x)\n",
+    "autoencoder_output = x\n",
+    "\n",
+    "autoencoder = Model(inputs = autoencoder_input, outputs = autoencoder_output)\n",
+    "autoencoder.compile(optimizer = ae_optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "autoencoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 48,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/5\n",
+      "180000/180000 [==============================] - 62s 347us/step - loss: 0.0150 - mae: 0.0454 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "Epoch 2/5\n",
+      "180000/180000 [==============================] - 61s 337us/step - loss: 0.0048 - mae: 0.0222 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "Epoch 3/5\n",
+      "180000/180000 [==============================] - 60s 336us/step - loss: 0.0048 - mae: 0.0221 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "Epoch 4/5\n",
+      "180000/180000 [==============================] - 60s 336us/step - loss: 0.0048 - mae: 0.0221 - val_loss: 0.0049 - val_mae: 0.0225\n",
+      "Epoch 5/5\n",
+      "180000/180000 [==============================] - 61s 339us/step - loss: 0.0048 - mae: 0.0221 - val_loss: 0.0049 - val_mae: 0.0225\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder_train = autoencoder.fit(train_data, train_data,\n",
+    "                                    epochs = ae_epochs,\n",
+    "                                    batch_size = ae_batch_size,\n",
+    "                                    validation_data = (vali_data, vali_data),\n",
+    "                                    shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Autoencoder Deep 4 capas"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "encoder_input = Input(shape = input_shape)\n",
+    "x = encoder_input\n",
+    "x = Flatten()(x)\n",
+    "x = Dense(units = 8 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = 4 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = 2 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "encoder_output = x\n",
+    "\n",
+    "encoder = Model(inputs = encoder_input, outputs = encoder_output)\n",
+    "encoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "decoder_input = Input(shape = (encoder.output_shape[1],))\n",
+    "x = decoder_input\n",
+    "x = Dense(units = 2 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = 4 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = 8 * encoding_dim)(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = Dense(units = image_dim)(x)\n",
+    "x = Activation(activation = \"sigmoid\")(x)\n",
+    "x = Reshape(target_shape = input_shape)(x)\n",
+    "decoder_output = x\n",
+    "\n",
+    "decoder = Model(inputs = decoder_input, outputs = decoder_output)\n",
+    "decoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder_input = Input(shape = input_shape)\n",
+    "x = autoencoder_input\n",
+    "x = encoder(x)\n",
+    "x = decoder(x)\n",
+    "autoencoder_output = x\n",
+    "\n",
+    "autoencoder = Model(inputs = autoencoder_input, outputs = autoencoder_output)\n",
+    "autoencoder.compile(optimizer = ae_optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "autoencoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder_train = autoencoder.fit(train_data, train_data,\n",
+    "                                    epochs = ae_epochs,\n",
+    "                                    batch_size = ae_batch_size,\n",
+    "                                    validation_data = (vali_data, vali_data),\n",
+    "                                    shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Autoencoder Convolucional"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "A la hora de trabajr con imagenes, los autoencoders creados mediante redes neuronales convolucional funcionan mucho mejor y obtienen mejor rendimiento que el resto de las redes neuronales."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "feature_multiplier = 8"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "encoder_input = Input(shape = input_shape)\n",
+    "x = encoder_input\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 1, kernel_size = 4, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = MaxPooling2D(pool_size = 2, padding = \"same\")(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 2, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = MaxPooling2D(pool_size = 2, padding = \"same\")(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 4, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = MaxPooling2D(pool_size = 2, padding = \"same\")(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 8, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = MaxPooling2D(pool_size = 2, padding = \"same\")(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 16, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = MaxPooling2D(pool_size = 2, padding = \"same\")(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 32, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = MaxPooling2D(pool_size = 2, padding = \"same\")(x)\n",
+    "\n",
+    "x = Flatten()(x)\n",
+    "\n",
+    "encoder_output = x\n",
+    "\n",
+    "encoder = Model(inputs = encoder_input, outputs = encoder_output)\n",
+    "encoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "decoder_input = Input(shape = (encoder.output_shape[1],))\n",
+    "\n",
+    "x = decoder_input\n",
+    "x = Reshape(target_shape = (1, 1, encoder.output_shape[1]))(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 16, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = UpSampling2D(size = 2)(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 8, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = UpSampling2D(size = 2)(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 4, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = UpSampling2D(size = 2)(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 2, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = UpSampling2D(size = 2)(x)\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 1, kernel_size = 2, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = UpSampling2D(size = 2)(x)\n",
+    "\n",
+    "x = Conv2D(filters = 1, kernel_size = 4, padding = \"same\")(x)\n",
+    "x = Activation(activation = \"relu\")(x)\n",
+    "x = UpSampling2D(size = 2)(x)\n",
+    "\n",
+    "decoder_output = x\n",
+    "\n",
+    "decoder = Model(inputs = decoder_input, outputs = decoder_output)\n",
+    "decoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder_input = Input(shape = input_shape)\n",
+    "x = autoencoder_input\n",
+    "x = encoder(x)\n",
+    "x = decoder(x)\n",
+    "autoencoder_output = x\n",
+    "\n",
+    "autoencoder = Model(inputs = autoencoder_input, outputs = autoencoder_output)\n",
+    "autoencoder.compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "autoencoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "mc = ModelCheckpoint(filepath = \"Modelos/model_autoencoder_simple.h5\", \n",
+    "                     monitor = \"val_loss\", \n",
+    "                     mode = \"min\", \n",
+    "                     save_best_only = True,\n",
+    "                     verbose = 1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder_train = autoencoder.fit(train_data, train_data,\n",
+    "                                    epochs = ae_epochs,\n",
+    "                                    batch_size = ae_batch_size,\n",
+    "                                    validation_data = (vali_data, vali_data),\n",
+    "                                    shuffle = True,\n",
+    "                                    callbacks = [mc])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import matplotlib.pyplot as plt\n",
+    "\n",
+    "plot_epochs = range(ae_epochs)\n",
+    "plot_loss = autoencoder_train.history[\"loss\"]\n",
+    "plot_val_loss = autoencoder_train.history[\"val_loss\"]\n",
+    "plot_mae = autoencoder_train.history[\"mae\"]\n",
+    "plot_val_mae = autoencoder_train.history[\"val_mae\"]\n",
+    "\n",
+    "plt.figure(figsize = (15, 5))\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 1)\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "plt.xlabel(\"Epochs\")\n",
+    "plt.ylabel(\"MSE Losses\")\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 2)\n",
+    "plt.plot(plot_epochs, plot_mae, plot_val_mae)\n",
+    "plt.xlabel(\"Epochs\")\n",
+    "plt.ylabel(\"MAE Losses\")\n",
+    "\n",
+    "plt.savefig(\"Plots/model_ae_simple_loss.png\")\n",
+    "plt.show()\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder = load_model(\"Modelos/model_ae_simple.h5\")\n",
+    "autoencoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "encoder = autoencoder.get_layer(index = 1)\n",
+    "encoder.save(\"Modelos/model_encoder_simple.h5\")\n",
+    "encoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "decoder = autoencoder.get_layer(index = -1)\n",
+    "decoder.save(\"Modelos/model_decoder_simple.h5\")\n",
+    "decoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## LSTM simple"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Preparación de datos para LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "encoded_train_data = encoder.predict(train_data)\n",
+    "encoded_vali_data = encoder.predict(vali_data)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(encoded_train_data.shape)\n",
+    "print(encoded_vali_data.shape)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from math import floor"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def lstm_count(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    scene_count = len(encoded_data) // frames\n",
+    "    sample_count = frames\n",
+    "    scene_iteration_count = floor((sample_count + 1 - (time_steps + out_time_steps)) / batch_size)\n",
+    "    return scene_count, sample_count, scene_iteration_count"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def lstm_batch_samples(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    scene_count, sample_count, scene_iteration_count = lstm_count(encoded_data, batch_size, time_steps, out_time_steps, frames)\n",
+    "    batch_samples = scene_count * scene_iteration_count\n",
+    "    return batch_samples"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def shuffle_in_unison(*np_arrays):\n",
+    "    rng = np.random.get_state()\n",
+    "    for array in np_arrays:\n",
+    "        np.random.set_state(rng)\n",
+    "        np.random.shuffle(array)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def restructure_encoded_data(encoded_data, time_steps, out_time_steps, batch_size):\n",
+    "    \n",
+    "    content_shape = encoded_data[0].shape  # (256,)\n",
+    "    final_sample_count = encoded_data.shape[0] - time_steps - out_time_steps  # frames, frames - batch_size, frames - 2 * batch_size, ...\n",
+    "    final_sample_count = min(batch_size, final_sample_count)  # 8\n",
+    "        \n",
+    "    X_data = np.zeros((final_sample_count, time_steps) + content_shape)  # (8, 6, 256)\n",
+    "    y_data = np.zeros((final_sample_count, out_time_steps) + content_shape)  # (8, 1, 256)\n",
+    "        \n",
+    "    curTS = 0\n",
+    "            \n",
+    "    for z in range(time_steps, final_sample_count + time_steps):\n",
+    "        X_data[curTS] = np.array(encoded_data[curTS:z])\n",
+    "        y_data[curTS] = np.array(encoded_data[z:z+out_time_steps])\n",
+    "        curTS += 1\n",
+    "        \n",
+    "    return X_data, y_data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "\n",
+    "    while True:\n",
+    "        \n",
+    "        scene_count, sample_count, scene_iteration_count = lstm_count(encoded_train_data, lstm_batch_size, lstm_time_steps, lstm_out_time_steps, frames)\n",
+    "\n",
+    "        for i in range(scene_count):\n",
+    "            \n",
+    "            scene = encoded_train_data[(i * frames):((i + 1) * frames)]  # Selecciona escenas individualmente.\n",
+    "     \n",
+    "            for j in range(scene_iteration_count):  # Número de batches que entran en una escena individual.\n",
+    "                start = j * lstm_batch_size\n",
+    "                end = sample_count\n",
+    "                \n",
+    "                data = scene[start:end]\n",
+    "                X, Y  = restructure_encoded_data(data, lstm_time_steps, lstm_out_time_steps, lstm_batch_size)\n",
+    "            \n",
+    "                X = X.reshape(*X.shape[0:2], -1)\n",
+    "                Y = np.squeeze(Y.reshape(Y.shape[0], lstm_out_time_steps, -1))\n",
+    "                \n",
+    "                shuffle_in_unison(X, Y)\n",
+    "        \n",
+    "                yield X, Y"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "train_steps = lstm_batch_samples(encoded_train_data, lstm_batch_size, lstm_time_steps, lstm_out_time_steps, frames)\n",
+    "print (\"Number of train batch samples per epoch: {}\".format(training_steps))\n",
+    "train_generator = generator(encoded_train_data, lstm_batch_size, lstm_time_steps, lstm_out_time_steps, frames)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "vali_steps = lstm_batch_samples(encoded_vali_data, lstm_batch_size, lstm_time_steps, lstm_out_time_steps, frames)\n",
+    "print (\"Number of validation batch samples per epoch: {}\".format(validation_steps))\n",
+    "vali_generator = generator(encoded_vali_data, lstm_batch_size, lstm_time_steps, lstm_out_time_steps, frames)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Modelo LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "lstm_input = Input(shape = (lstm_time_steps, encoding_dim))\n",
+    "x = lstm_input\n",
+    "x = LSTM(units = 256)(x)\n",
+    "lstm_output = x\n",
+    "\n",
+    "lstm_simple = Model(inputs = lstm_input, outputs = lstm_output)\n",
+    "lstm_simple.compile(optimizer = \"adam\", loss = \"mse\", metrics = [\"mae\"])\n",
+    "lstm_clean_weights = lstm.get_weights()\n",
+    "lstm_simple.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "mc = ModelCheckpoint(filepath = \"Modelos/model_lstm_simple.h5\", \n",
+    "                     monitor = \"val_loss\", \n",
+    "                     mode = \"min\", \n",
+    "                     save_best_only = True,\n",
+    "                     verbose = 1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "lstm_simple.set_weights(lstm_clean_weights)\n",
+    "\n",
+    "lstm_train = lstm.fit_generator(generator = train_generator,\n",
+    "                                steps_per_epoch = train_steps,\n",
+    "                                epochs = lstm_epochs,\n",
+    "                                verbose = 1,\n",
+    "                                callbacks = None,\n",
+    "                                validation_data = vali_generator,\n",
+    "                                validation_steps = vali_steps,\n",
+    "                                class_weight = None,\n",
+    "                                workers = 1)"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.6.8"
+  },
+  "toc": {
+   "base_numbering": 1,
+   "nav_menu": {},
+   "number_sections": true,
+   "sideBar": true,
+   "skip_h1_title": false,
+   "title_cell": "Table of Contents",
+   "title_sidebar": "Contents",
+   "toc_cell": false,
+   "toc_position": {},
+   "toc_section_display": true,
+   "toc_window_display": false
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/Scripts/Notebooks/Entrenamiento Modelos Autoencoder LSTM.ipynb b/Scripts/Notebooks/Entrenamiento Modelos Autoencoder LSTM.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..2b78139a6de2e2d1f76be560402cffe543f58170
--- /dev/null
+++ b/Scripts/Notebooks/Entrenamiento Modelos Autoencoder LSTM.ipynb	
@@ -0,0 +1,2934 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "import sys\n",
+    "import tensorflow as tf\n",
+    "import numpy as np\n",
+    "import scipy.misc\n",
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "sys.path.append(\"../tools\")  # Herramientas propias de MantaFlow\n",
+    "import uniio  # Lectura de ficheros .uni"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Hiperparámetros"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "num_sims = 2000  # num_sims - 1000 escenas. \n",
+    "frames = 200  # Frames por escena.\n",
+    "\n",
+    "epochs_autoencoder = 5\n",
+    "epochs_lstm = 10\n",
+    "epochs_pretraining = 1\n",
+    "\n",
+    "batch_size_autoencoder = 4\n",
+    "batch_size_lstm = 16\n",
+    "\n",
+    "time_steps_lstm = 6\n",
+    "out_time_steps_lstm = 1\n",
+    "\n",
+    "save_autoencoder = True\n",
+    "save_lstm = True"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Datos iniciales"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Inicializamos las seed para funciones random. Al ser inicializadas al mismo número, el resultado no cambiará en cada ejecución.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "np.random.seed(13)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Ruta a los datos de simulación, donde también se guardan los resultados.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "base_path = \"../data\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Carga de datos"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Podemos elegir el número de escenas y los frames de cada una, dependiendo de la configuración de los simuladores clásicos."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Cargamos 1000 escenas, con 200 frames cada una.\n",
+      "Trabajamos con un total de 200000 frames.\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(\"Cargamos {} escenas, con {} frames cada una.\".format(num_sims-1000, frames))\n",
+    "print(\"Trabajamos con un total de {} frames.\".format((num_sims-1000) * frames))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cargamos los datos desde los ficheros .uni en arrays de numpy. Los .uni son ficheros propios de MantaFlow, en los que se guarda los resultados de los simuladores clásicos. En este caso cargamos los datos de densidad de humo simulados previamente."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "densities = []\n",
+    "\n",
+    "for sim in range(1000, num_sims):\n",
+    "    \n",
+    "    if os.path.exists(\"%s/simSimple_%04d\" % (base_path, sim)):  # Comprueba la existencia de las carpetas (cada una 100 frames de datos).\n",
+    "        \n",
+    "        for i in range(0, frames):\n",
+    "            \n",
+    "            filename = \"%s/simSimple_%04d/density_%04d.uni\"  # Nombre de cada frame (densidad).\n",
+    "            uni_path = filename % (base_path, sim, i)  # 200 frames por sim, rellena parametros de la ruta.\n",
+    "            header, content = uniio.readUni(uni_path)  # Devuelve un array Numpy [Z, Y, X, C].\n",
+    "            \n",
+    "            h = header[\"dimX\"]\n",
+    "            w = header[\"dimY\"]\n",
+    "            \n",
+    "            arr = content[:, ::-1, :, :]  # Cambia el orden de Y.\n",
+    "            arr = np.reshape(arr, [w, h, 1])  # Deshecha Z.\n",
+    "            \n",
+    "            densities.append(arr)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve los datos de cada frame (canal de grises, 0 a 255) en una lista de Python. En este caso las imagenes son de 64x64 pixels. (64, 64, 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Necesitamos al menos 2 simulaciones para trabajar de manera adecuada."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "load_num = len(densities)\n",
+    "\n",
+    "if load_num < 2 * frames:\n",
+    "    \n",
+    "    print(\"Error - Usa al menos dos simulaciones completas\")\n",
+    "    \n",
+    "    exit(True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos la lista \"densities\" en un array de Numpy.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del array: (200000, 64, 64, 1)\n",
+      "Dimensiones del array: 4\n",
+      "Número de pixels en total: 819200000\n"
+     ]
+    }
+   ],
+   "source": [
+    "densities = np.reshape(densities, (len(densities), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del array: {}\".format(densities.shape))\n",
+    "print(\"Dimensiones del array: {}\".format(densities.ndim))\n",
+    "print(\"Número de pixels en total: {}\".format(densities.size))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creación del set de validación"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Con el fin de entrenar correctamente a los modelos Deep Learning, separamos los datos de densidad en un set de entrenamiento y otro de validación. Creamos el set de validación de entre los datos de simulación generados, al menos una simulación completa o el 10% de los datos (el que sea mayor de los dos)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Separamos en 180000 frames de entrenamiento y 20000 frames de validación.\n"
+     ]
+    }
+   ],
+   "source": [
+    "vali_set_size = max(200, int(load_num * 0.1))  # Al menos una simu completa o el 10% de los datos.\n",
+    "\n",
+    "vali_data = densities[load_num - vali_set_size : load_num, :]  # \"load_num\" datos del final de \"densities\".\n",
+    "train_data = densities[0 : load_num - vali_set_size, :]  # El resto de datos del principio de \"densities\".\n",
+    "\n",
+    "print(\"Separamos en {} frames de entrenamiento y {} frames de validación.\".format(train_data.shape[0], vali_data.shape[0]))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos los datos de entrenamiento y validación en arrays."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del set de entrenamiento: (180000, 64, 64, 1)\n",
+      "Forma del set de validación: (20000, 64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "train_data = np.reshape(train_data, (len(train_data), 64, 64, 1))\n",
+    "vali_data = np.reshape(vali_data, (len(vali_data), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del set de entrenamiento: {}\".format(train_data.shape))\n",
+    "print(\"Forma del set de validación: {}\".format(vali_data.shape))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Autoencoder 2D"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El modelo que vamos a utilizar es un autoencoder completamente convolucional. Las típicas capas de MaxPooling y UpSampling no aparecen en nuestro modelo, y en su lugar cambiamos las dimensiones mediante un Stride de 2.  "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creacion de las capas del modelo"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Parametros de inicialización"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Regula la cantidad de filtros convolucionales:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "feature_multiplier = 8 "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Tamaño del kernel de la primera capa del encoder y la última del decoder (kernels exteriores):"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "surface_kernel_size = 4  # Matriz 4x4"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Tamaño de los kernels interiores:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "kernel_size = 2  # Matriz 2x2"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El valor de la capa Dropout:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "dropout = 0.0"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "La función que utilizamos para inicializar los parametros de las capas:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "init_func = \"glorot_normal\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "En la primera capa debemos definir las dimensiones del input esperado:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "input_shape = (train_data.shape[1], \n",
+    "               train_data.shape[2], \n",
+    "               train_data.shape[3])\n",
+    "\n",
+    "print(input_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "Using TensorFlow backend.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from keras.layers import Input, Dropout, Conv2D, Conv2DTranspose, BatchNormalization, Flatten, Activation, Reshape\n",
+    "from keras.layers.advanced_activations import LeakyReLU\n",
+    "from keras.models import Model"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Capas del Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "layer_conv = []"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 1 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv1_input_shape = input_shape\n",
+    "\n",
+    "conv1_input = Input(shape = conv1_input_shape)\n",
+    "\n",
+    "x = conv1_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 1,\n",
+    "            padding = \"same\",\n",
+    "            kernel_initializer = init_func)(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func,\n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 2 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv1_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 21,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_1\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_1 (InputLayer)         (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_1 (Conv2D)            (None, 64, 64, 8)         136       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_1 (LeakyReLU)    (None, 64, 64, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_1 (Batch (None, 64, 64, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_2 (Conv2D)            (None, 64, 64, 8)         1032      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_2 (LeakyReLU)    (None, 64, 64, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_2 (Batch (None, 64, 64, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_3 (Conv2D)            (None, 32, 32, 8)         1032      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_3 (LeakyReLU)    (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_3 (Batch (None, 32, 32, 8)         32        \n",
+      "=================================================================\n",
+      "Total params: 2,296\n",
+      "Trainable params: 2,248\n",
+      "Non-trainable params: 48\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_1 = Model(conv1_input, conv1_output)\n",
+    "layer_conv.append(convolution_1)\n",
+    "convolution_1.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 22,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(32, 32, 8)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv1_output_shape = (convolution_1.output_shape[1],\n",
+    "                      convolution_1.output_shape[2],\n",
+    "                      convolution_1.output_shape[3])\n",
+    "\n",
+    "print(conv1_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 2"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 23,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 2 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv2_input_shape = conv1_output_shape\n",
+    "\n",
+    "conv2_input = Input(shape = conv2_input_shape)\n",
+    "\n",
+    "x = conv2_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 2, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 2, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv2_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 24,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_2\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_2 (InputLayer)         (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_4 (Conv2D)            (None, 32, 32, 16)        528       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_4 (LeakyReLU)    (None, 32, 32, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_4 (Batch (None, 32, 32, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "conv2d_5 (Conv2D)            (None, 16, 16, 16)        1040      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_5 (LeakyReLU)    (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_5 (Batch (None, 16, 16, 16)        64        \n",
+      "=================================================================\n",
+      "Total params: 1,696\n",
+      "Trainable params: 1,632\n",
+      "Non-trainable params: 64\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_2 = Model(conv2_input, conv2_output)\n",
+    "layer_conv.append(convolution_2)\n",
+    "convolution_2.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 25,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(16, 16, 16)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv2_output_shape = (convolution_2.output_shape[1],\n",
+    "                      convolution_2.output_shape[2],\n",
+    "                      convolution_2.output_shape[3])\n",
+    "\n",
+    "print(conv2_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 3"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 26,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 3 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv3_input_shape = conv2_output_shape\n",
+    "\n",
+    "conv3_input = Input(shape = conv3_input_shape)\n",
+    "\n",
+    "x = conv3_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 4, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 4, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv3_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_3\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_3 (InputLayer)         (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_6 (Conv2D)            (None, 16, 16, 32)        2080      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_6 (LeakyReLU)    (None, 16, 16, 32)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_6 (Batch (None, 16, 16, 32)        128       \n",
+      "_________________________________________________________________\n",
+      "conv2d_7 (Conv2D)            (None, 8, 8, 32)          4128      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_7 (LeakyReLU)    (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_7 (Batch (None, 8, 8, 32)          128       \n",
+      "=================================================================\n",
+      "Total params: 6,464\n",
+      "Trainable params: 6,336\n",
+      "Non-trainable params: 128\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_3 = Model(conv3_input, conv3_output)\n",
+    "layer_conv.append(convolution_3)\n",
+    "convolution_3.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(8, 8, 32)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv3_output_shape = (convolution_3.output_shape[1],\n",
+    "                      convolution_3.output_shape[2],\n",
+    "                      convolution_3.output_shape[3])\n",
+    "\n",
+    "print(conv3_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 4"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 4 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv4_input_shape = conv3_output_shape\n",
+    "\n",
+    "conv4_input = Input(shape = conv4_input_shape)\n",
+    "\n",
+    "x = conv4_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 8, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 8, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv4_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 30,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_4\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_4 (InputLayer)         (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_8 (Conv2D)            (None, 8, 8, 64)          8256      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_8 (LeakyReLU)    (None, 8, 8, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_8 (Batch (None, 8, 8, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "conv2d_9 (Conv2D)            (None, 4, 4, 64)          16448     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_9 (LeakyReLU)    (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_9 (Batch (None, 4, 4, 64)          256       \n",
+      "=================================================================\n",
+      "Total params: 25,216\n",
+      "Trainable params: 24,960\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_4 = Model(conv4_input, conv4_output)\n",
+    "layer_conv.append(convolution_4)\n",
+    "convolution_4.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 31,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(4, 4, 64)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv4_output_shape = (convolution_4.output_shape[1],\n",
+    "                      convolution_4.output_shape[2],\n",
+    "                      convolution_4.output_shape[3])\n",
+    "\n",
+    "print(conv4_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 5"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 32,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 5 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv5_input_shape = conv4_output_shape\n",
+    "\n",
+    "conv5_input = Input(shape = conv5_input_shape)\n",
+    "\n",
+    "x = conv5_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 16, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv5_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 33,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_5\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_5 (InputLayer)         (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_10 (Conv2D)           (None, 2, 2, 128)         32896     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_10 (LeakyReLU)   (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_10 (Batc (None, 2, 2, 128)         512       \n",
+      "=================================================================\n",
+      "Total params: 33,408\n",
+      "Trainable params: 33,152\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_5 = Model(conv5_input, conv5_output)\n",
+    "layer_conv.append(convolution_5)\n",
+    "convolution_5.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 34,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(2, 2, 128)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv5_output_shape = (convolution_5.output_shape[1],\n",
+    "                      convolution_5.output_shape[2],\n",
+    "                      convolution_5.output_shape[3])\n",
+    "\n",
+    "print(conv5_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Convolución 6"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 35,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Conv 6 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "conv6_input_shape = conv5_output_shape\n",
+    "\n",
+    "conv6_input = Input(shape = conv6_input_shape)\n",
+    "\n",
+    "x = conv6_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2D(filters = feature_multiplier * 32, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "conv6_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 36,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_6\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_6 (InputLayer)         (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_11 (Conv2D)           (None, 1, 1, 256)         131328    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_11 (LeakyReLU)   (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_11 (Batc (None, 1, 1, 256)         1024      \n",
+      "=================================================================\n",
+      "Total params: 132,352\n",
+      "Trainable params: 131,840\n",
+      "Non-trainable params: 512\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "convolution_6 = Model(conv6_input, conv6_output)\n",
+    "layer_conv.append(convolution_6)\n",
+    "convolution_6.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 37,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(1, 1, 256)\n"
+     ]
+    }
+   ],
+   "source": [
+    "conv6_output_shape = (convolution_6.output_shape[1],\n",
+    "                      convolution_6.output_shape[2],\n",
+    "                      convolution_6.output_shape[3])\n",
+    "\n",
+    "print(conv6_output_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Capas del Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 38,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "layer_deconv = []"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Deconvolución 6"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 39,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 6 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv6_input_shape = conv6_output_shape\n",
+    "\n",
+    "deconv6_input = Input(shape = deconv6_input_shape)\n",
+    "\n",
+    "x = deconv6_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 16, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv6_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 40,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_7\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_7 (InputLayer)         (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_1 (Conv2DTr (None, 2, 2, 128)         131200    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_12 (LeakyReLU)   (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_12 (Batc (None, 2, 2, 128)         512       \n",
+      "=================================================================\n",
+      "Total params: 131,712\n",
+      "Trainable params: 131,456\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_6 = Model(deconv6_input, deconv6_output)\n",
+    "layer_deconv.append(deconvolution_6)\n",
+    "deconvolution_6.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Deconvolución 5"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 41,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 5 ###\n",
+    "\n",
+    "# Input # \n",
+    "\n",
+    "deconv5_input_shape = conv5_output_shape\n",
+    "\n",
+    "deconv5_input = Input(shape = deconv5_input_shape)\n",
+    "\n",
+    "x = deconv5_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 8, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 8, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 1,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv5_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 42,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_8\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_8 (InputLayer)         (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_2 (Conv2DTr (None, 4, 4, 64)          32832     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_13 (LeakyReLU)   (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_13 (Batc (None, 4, 4, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_3 (Conv2DTr (None, 4, 4, 64)          16448     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_14 (LeakyReLU)   (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_14 (Batc (None, 4, 4, 64)          256       \n",
+      "=================================================================\n",
+      "Total params: 49,792\n",
+      "Trainable params: 49,536\n",
+      "Non-trainable params: 256\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_5 = Model(deconv5_input, deconv5_output)\n",
+    "layer_deconv.append(deconvolution_5)\n",
+    "deconvolution_5.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Deconvolución 4"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 43,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 4 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv4_input_shape = conv4_output_shape\n",
+    "\n",
+    "deconv4_input = Input(shape = deconv4_input_shape)\n",
+    "\n",
+    "x = deconv4_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 4, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 4, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 1,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0  else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv4_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 44,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_9\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_9 (InputLayer)         (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_4 (Conv2DTr (None, 8, 8, 32)          8224      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_15 (LeakyReLU)   (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_15 (Batc (None, 8, 8, 32)          128       \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_5 (Conv2DTr (None, 8, 8, 32)          4128      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_16 (LeakyReLU)   (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_16 (Batc (None, 8, 8, 32)          128       \n",
+      "=================================================================\n",
+      "Total params: 12,608\n",
+      "Trainable params: 12,480\n",
+      "Non-trainable params: 128\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_4 = Model(deconv4_input, deconv4_output)\n",
+    "layer_deconv.append(deconvolution_4)\n",
+    "deconvolution_4.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Deconvolución 3"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 45,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 3 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv3_input_shape = conv3_output_shape\n",
+    "\n",
+    "deconv3_input = Input(shape = deconv3_input_shape)\n",
+    "\n",
+    "x = deconv3_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 2, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 2, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 1,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv3_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 46,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_10\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_10 (InputLayer)        (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_6 (Conv2DTr (None, 16, 16, 16)        2064      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_17 (LeakyReLU)   (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_17 (Batc (None, 16, 16, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_7 (Conv2DTr (None, 16, 16, 16)        1040      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_18 (LeakyReLU)   (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_18 (Batc (None, 16, 16, 16)        64        \n",
+      "=================================================================\n",
+      "Total params: 3,232\n",
+      "Trainable params: 3,168\n",
+      "Non-trainable params: 64\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_3 = Model(deconv3_input, deconv3_output)\n",
+    "layer_deconv.append(deconvolution_3)\n",
+    "deconvolution_3.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Deconvolución 2"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 47,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 2 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv2_input_shape = conv2_output_shape\n",
+    "\n",
+    "deconv2_input = Input(shape = deconv2_input_shape)\n",
+    "\n",
+    "x = deconv2_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 1, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x) \n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "x = Conv2DTranspose(filters = feature_multiplier * 1, \n",
+    "                    kernel_size = kernel_size,\n",
+    "                    strides = 1,\n",
+    "                    kernel_initializer = init_func,\n",
+    "                    padding = \"same\")(x)\n",
+    "\n",
+    "x = LeakyReLU(alpha = 0.2)(x)\n",
+    "\n",
+    "x = BatchNormalization()(x)\n",
+    "\n",
+    "x = Dropout(dropout)(x) if dropout > 0.0 else x\n",
+    "\n",
+    "# Output # \n",
+    "\n",
+    "deconv2_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 48,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_11\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_11 (InputLayer)        (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_8 (Conv2DTr (None, 32, 32, 8)         520       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_19 (LeakyReLU)   (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_19 (Batc (None, 32, 32, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_9 (Conv2DTr (None, 32, 32, 8)         264       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_20 (LeakyReLU)   (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_20 (Batc (None, 32, 32, 8)         32        \n",
+      "=================================================================\n",
+      "Total params: 848\n",
+      "Trainable params: 816\n",
+      "Non-trainable params: 32\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_2 = Model(deconv2_input, deconv2_output)\n",
+    "layer_deconv.append(deconvolution_2)\n",
+    "deconvolution_2.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Deconvolución 1"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 49,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 1 ###\n",
+    "\n",
+    "# Input #\n",
+    "\n",
+    "deconv1_input_shape = conv1_output_shape\n",
+    "\n",
+    "deconv1_input = Input(shape = deconv1_input_shape)\n",
+    "\n",
+    "x = deconv1_input\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "x = Conv2DTranspose(input_shape[-1],\n",
+    "                    kernel_size = surface_kernel_size,\n",
+    "                    strides = 2,\n",
+    "                    padding = \"same\",\n",
+    "                    kernel_initializer = init_func)(x)\n",
+    "\n",
+    "x = Activation(\"linear\")(x)\n",
+    "\n",
+    "# Output #\n",
+    "\n",
+    "deconv1_output = x"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 50,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_12\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_12 (InputLayer)        (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_10 (Conv2DT (None, 64, 64, 1)         129       \n",
+      "_________________________________________________________________\n",
+      "activation_1 (Activation)    (None, 64, 64, 1)         0         \n",
+      "=================================================================\n",
+      "Total params: 129\n",
+      "Trainable params: 129\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "deconvolution_1 = Model(deconv1_input, deconv1_output)\n",
+    "layer_deconv.append(deconvolution_1)\n",
+    "deconvolution_1.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Damos la vuelta a la lista layer_deconv par aque su orden coincida con layer_conv:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 51,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "layer_deconv.reverse()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Ensamblando el Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Optimizador"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Importamos el optimizador Adam:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 52,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import Adam"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos los parametros del optimizador:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 53,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "adam_learning_rate = 0.00015  # El learning rate de Adam (tamaño step)\n",
+    "adam_epsilon = 1e-8  # Previene problemas de división por 0.\n",
+    "adam_lr_decay = 1e-05  # Learning rate decay"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos el optimizador:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 54,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "optimizer = Adam(lr = adam_learning_rate, \n",
+    "                 epsilon = adam_epsilon, \n",
+    "                 decay = adam_lr_decay)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Modelo por capas"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 55,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stages = []"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 56,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stage_input = Input(shape = input_shape)\n",
+    "x = stage_input\n",
+    "x = layer_conv[0](x)\n",
+    "x = layer_deconv[0](x)\n",
+    "stage_output = x\n",
+    "\n",
+    "stage_1 = Model(inputs = stage_input, outputs = stage_output)\n",
+    "stage_1.compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "stages.append(stage_1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 57,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stage_input = Input(shape = input_shape)\n",
+    "x = stage_input\n",
+    "x = layer_conv[0](x)\n",
+    "x = layer_conv[1](x)\n",
+    "x = layer_deconv[1](x)\n",
+    "x = layer_deconv[0](x)\n",
+    "stage_output = x\n",
+    "\n",
+    "stage_2 = Model(inputs = stage_input, outputs = stage_output)\n",
+    "stage_2.compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "stages.append(stage_2)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 58,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stage_input = Input(shape = input_shape)\n",
+    "x = stage_input\n",
+    "x = layer_conv[0](x)\n",
+    "x = layer_conv[1](x)\n",
+    "x = layer_conv[2](x)\n",
+    "x = layer_deconv[2](x)\n",
+    "x = layer_deconv[1](x)\n",
+    "x = layer_deconv[0](x)\n",
+    "stage_output = x\n",
+    "\n",
+    "stage_3 = Model(inputs = stage_input, outputs = stage_output)\n",
+    "stage_3.compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "stages.append(stage_3)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 59,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stage_input = Input(shape = input_shape)\n",
+    "x = stage_input\n",
+    "x = layer_conv[0](x)\n",
+    "x = layer_conv[1](x)\n",
+    "x = layer_conv[2](x)\n",
+    "x = layer_conv[3](x)\n",
+    "x = layer_deconv[3](x)\n",
+    "x = layer_deconv[2](x)\n",
+    "x = layer_deconv[1](x)\n",
+    "x = layer_deconv[0](x)\n",
+    "stage_output = x\n",
+    "\n",
+    "stage_4 = Model(inputs = stage_input, outputs = stage_output)\n",
+    "stage_4.compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "stages.append(stage_4)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 60,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stage_input = Input(shape = input_shape)\n",
+    "x = stage_input\n",
+    "x = layer_conv[0](x)\n",
+    "x = layer_conv[1](x)\n",
+    "x = layer_conv[2](x)\n",
+    "x = layer_conv[3](x)\n",
+    "x = layer_conv[4](x)\n",
+    "x = layer_deconv[4](x)\n",
+    "x = layer_deconv[3](x)\n",
+    "x = layer_deconv[2](x)\n",
+    "x = layer_deconv[1](x)\n",
+    "x = layer_deconv[0](x)\n",
+    "stage_output = x\n",
+    "\n",
+    "stage_5 = Model(inputs = stage_input, outputs = stage_output)\n",
+    "stage_5.compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "stages.append(stage_5)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 61,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stage_input = Input(shape = input_shape)\n",
+    "x = stage_input\n",
+    "x = layer_conv[0](x)\n",
+    "x = layer_conv[1](x)\n",
+    "x = layer_conv[2](x)\n",
+    "x = layer_conv[3](x)\n",
+    "x = layer_conv[4](x)\n",
+    "x = layer_conv[5](x)\n",
+    "x = layer_deconv[5](x)\n",
+    "x = layer_deconv[4](x)\n",
+    "x = layer_deconv[3](x)\n",
+    "x = layer_deconv[2](x)\n",
+    "x = layer_deconv[1](x)\n",
+    "x = layer_deconv[0](x)\n",
+    "stage_output = x\n",
+    "\n",
+    "stage_6 = Model(inputs = stage_input, outputs = stage_output)\n",
+    "stage_6.compile(optimizer = optimizer, loss = \"mse\", metrics = [\"mae\"])\n",
+    "stages.append(stage_6)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Pre-entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 63,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "pre_epochs = 1\n",
+    "pre_batch_size = 128"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 64,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 403s 2ms/step - loss: 0.0222 - mae: 0.0630 - val_loss: 0.0021 - val_mae: 0.0301\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 488s 3ms/step - loss: 0.0070 - mae: 0.0379 - val_loss: 0.0013 - val_mae: 0.0204\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 539s 3ms/step - loss: 0.0021 - mae: 0.0212 - val_loss: 8.9808e-04 - val_mae: 0.0144\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 554s 3ms/step - loss: 9.2454e-04 - mae: 0.0128 - val_loss: 6.4195e-04 - val_mae: 0.0110\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 569s 3ms/step - loss: 9.6373e-04 - mae: 0.0124 - val_loss: 7.3874e-04 - val_mae: 0.0113\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 576s 3ms/step - loss: 9.5400e-04 - mae: 0.0124 - val_loss: 7.8057e-04 - val_mae: 0.0112\n"
+     ]
+    }
+   ],
+   "source": [
+    "for stage in stages:\n",
+    "    autoencoder_layer = stage.fit(train_data, train_data,\n",
+    "                                  epochs = pre_epochs,\n",
+    "                                  batch_size = pre_batch_size,\n",
+    "                                  validation_data = (vali_data, vali_data),\n",
+    "                                  shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Modelo autoencoder una vez finalizado el pre-entrenamiento por capas:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 65,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder = stages[-1]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Parametros del entrenamiento:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 66,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "training_epochs = epochs_autoencoder  # Número de vueltas completas al set de entrenamiento.\n",
+    "batch_size = batch_size_autoencoder  # Número de ejemplos antes de calcular el error de la función de coste."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Entrenamos el modelo autoencoder:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 67,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/5\n",
+      "180000/180000 [==============================] - 1005s 6ms/step - loss: 0.0012 - mae: 0.0143 - val_loss: 0.0150 - val_mae: 0.0641\n",
+      "Epoch 2/5\n",
+      "180000/180000 [==============================] - 972s 5ms/step - loss: 8.5839e-04 - mae: 0.0126 - val_loss: 0.0338 - val_mae: 0.1079\n",
+      "Epoch 3/5\n",
+      "180000/180000 [==============================] - 989s 5ms/step - loss: 7.6021e-04 - mae: 0.0122 - val_loss: 0.0157 - val_mae: 0.0649\n",
+      "Epoch 4/5\n",
+      "180000/180000 [==============================] - 990s 6ms/step - loss: 7.0600e-04 - mae: 0.0119 - val_loss: 0.0199 - val_mae: 0.0751\n",
+      "Epoch 5/5\n",
+      "180000/180000 [==============================] - 1012s 6ms/step - loss: 6.7178e-04 - mae: 0.0116 - val_loss: 0.0192 - val_mae: 0.0743\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder_train = autoencoder.fit(train_data, train_data, \n",
+    "                                    epochs = training_epochs,\n",
+    "                                    batch_size = batch_size,\n",
+    "                                    verbose = 1,\n",
+    "                                    validation_data = (vali_data, vali_data),\n",
+    "                                    shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Los datos del entrenamiento se guardan en \"autoencoder_train\"."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Plot Errores"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Plot de Loss (MSE y MAE) y Validation Loss (MSE y MAE) respecto a las epochs."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 68,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 69,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3YAAAEvCAYAAAAJs1ObAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdd3hUZfrG8e+ThIQqKMVCEZSiYEGJYMVCERRBV8De1hXrqmt3Veyu3bWusjbE9SfFFqVZwK4IqIigKKBSLFTpLcnz++MdNMZAJiSZk5m5P9c1V2bOvOfMMxckM/c5bzF3R0RERERERJJXRtQFiIiIiIiISPko2ImIiIiIiCQ5BTsREREREZEkp2AnIiIiIiKS5BTsREREREREkpyCnYiIiIiISJLLiqeRmfUA7gcygcfd/fZiz+cAzwAdgMXAce7+vZl1BAZtbAbc4O4vxfb5HlgBFAD57p5bWh0NGjTw5s2bx1OyiIgkscmTJy9y94ZR15Es9PkoIpI+NvUZWWqwM7NM4GGgGzAPmGhmee4+vUizM4Gl7t7SzI4H7gCOA74Ect0938y2B6aY2avunh/b71B3XxTvm2jevDmTJk2Kt7mIiCQpM/sh6hqSiT4fRUTSx6Y+I+PpitkRmOnus919PfA80KdYmz7A4Nj9EUAXMzN3X10kxFUHtBq6iIiIiIhIBYsn2DUG5hZ5PC+2rcQ2sSC3DKgPYGadzGwaMBU4p0jQc+B1M5tsZgO2/C2IiIiIiIikt7jG2JWHu08A2pnZrsBgMxvt7muBA919vpk1At4ws6/d/d3i+8dC3wCAZs2aVXa5IiIiIiIiSSeeK3bzgaZFHjeJbSuxjZllAXUJk6j8xt2/AlYCu8Uez4/9XAC8ROjy+SfuPsjdc909t2FDjaMXEREREREpLp5gNxFoZWYtzCwbOB7IK9YmDzgtdr8vMM7dPbZPFoCZ7QjsAnxvZrXMrE5sey2gO2GiFRERERERESmjUrtixma0vAAYS1ju4El3n2ZmNwGT3D0PeAIYYmYzgSWE8AdwIHCVmW0ACoHz3H2Rme0EvGRmG2t4zt3HVPSbExERERERSQdxjbFz91HAqGLbBha5vxboV8J+Q4AhJWyfDexZ1mJFRERERETkz+LpiikiIiIiIiJVmIKdiIiIiIhIklOwk/Ir2ACz34HCgqgrERERqVoWfQsLv4m6ChFJAwp2Un5v3w7P9IaP/xN1JSIiIlXH0h/g8a7htnhW1NWISIpTsJPyWTgDPrgfMnNg3C2w5LuoKxIREYnehrUw7FRwBzMYegqsXxV1VSKSwhTsZMu5w8hLIbsmnDkWMjLhtX+E7SIiIulszFXw0+dwzH+g7xOwYDq8epE+I0Wk0ijYyZb7Yih8/x50vQF22Cv8nD0epjwfbV0iIiJRmvI8TH4KDrgIdjkSWnaFQ6+BqcPhk0FRVyciKUrBTrbM6iUw9hposg/sfXrYlnsmNOkIY6+GlQsjLU9ERCQSv0yHVy+GHQ+Ewwb+vv2gS6F1Txj7T/jho+jqE5GUpWAnW+atG2HNUuh1H2TE/htlZEDvB2HdytAFRUREJJ2sXQ7DToHqW0HfJyEz6/fnMjLgmEehXjMYfhqs+Dm6OkUkJSnYSdnNmQCTn4Z9z4Xtdv/jc412gc6XwZcj4JuxkZQnIiKScO6Q9/cwiVjfJ6HOtn9uU6MeHPdsCIDDzwjLBYmIVBAFOymbgnwYeQls1RgO2cRVuQP/AQ13gdcugXUrElufiIhIFCY8CtNfhi4DofmBm263bbvQu2XOh/D6dYmrT0RSnoKdlM2ER+GXL6HH7ZBTp+Q2WTnhQ2v5/LAEgoiISCqbMwFevxbaHBkmTCnNHv2g0zkw4T8wdUTl1yciaUHBTuK3bB6Mvw1aHQ67HrX5tk07QsezYMJjMHdiYuoTERFJtJULYfjpULcJHP1IWLMuHt1vgab7hu6bv0yr1BJFJD0o2En8Rl8JXghH3BnfB1eXgbDVDuFDK3995dcnIiKSSIUF8OLfYPVi6P9MGEMXr8xq0H9w6P0y9GRYu6zy6hSRtKBgJ/GZMRq+fg0OvgK2bh7fPjl14Mh7YeFX8MG/K7U8ERGRhHv7dpj9Nhx5N2y/Z9n3r7Md9BsMv86Bl86BwsIKL1FE0oeCnZRu/SoYdUWYEGW/C8q2b5sesNux8O5dsHBG5dQnIiKSaN++Ae/eCe1Phr1P3fLj7LgfdL8VZoyC9++puPpEJO0o2Enp3rkTls0JV9+yssu+f4/boVpNyLtQZyNFRCT5/ToHXjwLtt0Njrir/MfrdDbs3g/G3Qoz3yr/8UQkLSnYyeYt+Ao+egjanwTND9iyY9RuBIffBnM/hslPVmx9IiIiiZS/DoadFsbX9X8GsmuW/5hmcNT90KgtvHAmLP2h/McUkbSjYCebVlgIr/0jjJXrdlP5jtX+RGhxMLxxAyybXyHliYiIJNzYf8KPn4YZMOvvXHHHza4Fxw0Jn73DToENayru2CKSFhTsZNOmPAdzPgqhrlaD8h3LDI76NxTmw8hLwb1iahQREUmUL4bDxMfDePPSlv3ZEvV3hr88Bj9NgZGX6bNSRMpEwU5KtmoxvH5dWGOn/ckVc8xtdoJD/wnfjIbpL1fMMUVEqigz62FmM8xsppldVcLznc3sUzPLN7O+xZ47zcy+jd1OS1zVskkLvoZXL4Rm+0HXGyrvddr0hM6Xw+fPwuSnK+91RCTlKNhJyd4cCOuWQ697IaMC/5vse16YEnrU5bB6ScUdV0SkCjGzTOBhoCfQFjjBzNoWazYHOB14rti+2wDXA52AjsD1ZrZ1Zdcsm7FuRegemV0b+j4V1qCrTIdcDTt3gdFXwLzJlftaIpIyFOzkz374CD57FvY7H7ZtV7HHzsyC3g+GUPfGdRV7bBGRqqMjMNPdZ7v7euB5oE/RBu7+vbt/ARSfLvhw4A13X+LuS4E3gB6JKFpK4B5mdV48E/o+AVttX/mvmZEJxz4e1rkbdgqsWlT5rykiSU/BTv6oYEOYMKVuUzj4ysp5je33hP3/HsLj7Lcr5zVERKLVGJhb5PG82LbK3lcq2if/hWkvwmHXQovOiXvdmttA/yGwejGMOAMK8hP32iKSlBTs5I8+ehgWfgU97wwzdFWWQ66CrVvAqxfD+tWV9zoiIinKzAaY2SQzm7Rw4cKoy0lNcyeGWTBb94AD/pH419+hfVhD9rt3YVw5Z6cWkZSnYCe/W/oDvH07tDkSdjmicl+rWg3o/QAs/Q7eub1yX0tEJPHmA02LPG4S21Zh+7r7IHfPdffchg0bbnGhsgmrFsPw00PXy2Merdjx5mWx10nQ4Qz44H6Y/ko0NYhIUlCwk8A9DNI2g553JOY1W3SGvU6BDx+CHz9PzGuKiCTGRKCVmbUws2zgeCAvzn3HAt3NbOvYpCndY9skUQoL4MW/waoFYRHyGhHPXdPzDmjcAV4+DxZ+E20tIlJlKdhJ8PVI+GZMmImrXtPS21eU7jdDzfqQ93eNHxCRlOHu+cAFhED2FTDM3aeZ2U1m1hvAzPYxs3lAP+AxM5sW23cJcDMhHE4Eboptk0R59y6YNS4MS9hhr6irgaycEDCzqsPQk8MsnSIixcQV7OJYiyfHzIbGnp9gZs1j2zua2eex2xQzOybeY0oCrVsZrtY1agf7npvY166xNRxxF/z8BXz8cGJfW0SkErn7KHdv7e47u/utsW0D3T0vdn+iuzdx91ruXt/d2xXZ90l3bxm7PRXVe0hLM98MwxL2PAE6nB51Nb+r2wT6PgmLv4VXztfi5SLyJ6UGuzjX4jkTWOruLYH7gI19+b4Ect29PWGq5sfMLCvOY0qivHM7LJ8f1qyr7LV5StK2TxjXN/42WDwr8a8vIiICsGwevHAWNNo1TFpiFnVFf7TTwWFx9OmvwIcPRl2NiFQx8VyxK3UtntjjwbH7I4AuZmbuvjrWHQWgOrDx9FI8x5RE+PlL+OgR2PtUaLZvNDWYwZF3Q2Y2vHaxzkKKiEji5a+HYaeFZX/6D4HsmlFXVLL9L4Rde8Ob14fZMkVEYuIJdvGsp/Nbm1iQWwbUBzCzTrFxA1OBc2LPa42eqqCwMKxZV6MedL0x2lq22iGchfzuXfj8f9HWIiIi6ef1a2H+JOjzEDRoGXU1m2YGRz8C9VvC8DNgWbyTrYpIqqv0yVPcfUJs3MA+wNVmVr0s+2udnkr02TMw7xPofktYCDVqHc6AZvvB2Gtg5YKoqxERkXQxdQR88hjsez60OzrqakqXUweOexby18KwUyF/XdQViUgVEE+wi2c9nd/amFkWUBdYXLSBu38FrAR2i/OYG/fTOj2VYeVCeON62PHAMEC8KsjIgKMegA2rYfSVUVcjIiLpYOEMyLsQmnaCbhH3XimLhm2gz8PhKuOYq6OuRkSqgHiCXTxr8eQBp8Xu9wXGubvH9skCMLMdgV2A7+M8plSmN66D9avChClVaXB4w9bQ+QqY9iLMGB11NSIiksrWrQxXvKrVgH5PRzOBWHm0OzqMuZv0BHz+XNTViEjESg128azFAzwB1DezmcAlwMblCw4EppjZ58BLwHnuvmhTx6zINyab8d17MOX/4IALwxm/quaAi6BRWxh5KaxdHnU1IiKSitzDhF0LZ0DfJ8JY72TU5XpoflAYM//TlKirEZEImSfRDIS5ubk+adKkqMtIbvnr4dEDQr/88yZU3Vm/5k2Cx7vCPn8LM2aKSFoxs8nunht1HclCn49bYOLj4QTiodfCwZdHXU35rFwIgw6GjEwY8E7VGDcvIpVmU5+RlT55ilQxHz4Ai76BI+6puqEOoEkudDonfPDO+TjqakREJJXMnxzGpbXqDgddGnU15Ve7IfR/Blb8DC+eBYUFUVckIhFQsEsnS76Dd+8K69+07h51NaU77Fqo2yQMateMXyIiUhFWLwnr1dXeDo55LEzclQqa5ELPO2Dmm/D27VFXIyIRSJG/ZlIqdxh1GWRkQY8k+YOfUxt63QeLZsB790ZdjYiIJLvCQnhxAKz8BfoPTr0uix3OgPYnwbt3wowxUVcjIgmmYJcupr8SzuIdeg3UTaK14Ft1g937w3v3wIKvoq5GRESS2Xv3wMw3oMe/oPHeUVdT8czgyHtg+z1DgF08K+qKRCSBFOzSwboVMOYq2G536Dgg6mrKrse/wmKseX/XuAEREdkys8bD+FvDycLcM6OupvJUqwH9h4QupkNPgfWro65IRBJEwS4djL8tDKju9W/IzIq6mrKr1SB0H503ESY+EXU1IiKSbJbNhxfOhIa7wFH/rlrrt1aGrXeEYx+HBdPh1YvCcAwRSXkKdqnupykw4VHIPSMMrE5We/SHnbvAWzfCr3OjrkZERJJFwQYYfnqYhKv/M5BdK+qKEqNl1zD8Yuow+GRQ1NWISAIo2KWywgJ49WKoWR+6DIy6mvIxg173ghfCyEt09lFEROLzxkCY9wn0fhAato66msQ66FJo3RPG/lNLB4mkAQW7VDb5KfjxUzj8NqixddTVlN/WzcMSCN++Dl++EHU1IiJS1U17CT5+JKyLuttfoq4m8TIy4JhHoV6zsMTDil+irkhEKpGCXapa8Qu8eRO06Ay794u6morT6RzYYW8YfWVYi0hERKQki76FVy6AJvtAt5ujriY6NerBcc/C2mWhS2rBhqgrEpFKomCXql6/BvLXwJH3ptYg8YxM6P0ArP0Vxl4TdTUiIlIVrV8Fw06FrBzo9zRkZUddUbS2bRe6os75MHRNFZGUpGCXima/DVOHwwEXQ4NWUVdT8bbbHQ64CKY8B7PGRV2NiIhUJe7w2iVh7dNjH4e6TaKuqGrYo1/o9fLxIzB1RNTViEglULBLNRvWhg+0rVvAQZdEXU3l6XwF1G8ZJodZvyrqakREpKqY/BR88TwccjXsfFjU1VQt3W+BpvuGdWF/mRZ1NSJSwRTsUs0H98OSWXDk3WGR0lRVrTocdT/8+kNYp09EROTHz8IY7J27QOfLo66m6smsBv0HQ04dGHpyGHcnIilDwS6VLJ4F790D7f4S1q9Jdc0PhA6nh24l8z+NuhoREYnS6iVhXF2tRvCX/4YZIeXP6mwH/QbDr3PgpXOgsDDqikSkguivXqpwh5GXhoHih6fRFayuN4YP8bwLNdOXiEi6KiwMIWX5T+GKVK36UVdUte24H3S/FWaMgvfvjboaEakgCnap4ssXYPZ4OOw62Gr7qKtJnBr1QrfTX6bChw9GXY2IiEThg/vg27HhxGaT3KirSQ6dzg7LIY27BWa+FXU1IlIBFOxSwdplMPafsH172OfMqKtJvF2PCre3bw/dUUVEJH18924IJ7sdCx3Pirqa5GEWxqo3agsvnAlLf4i6IhEpJwW7VDDuFli5AHrdF9Z5S0c974Ks6vDqRaFbqoiIpL7lP8KIv0L9VnDUA6m1bmsiZNeC44aErqzDTgkza4tI0lKwS3bzP4VP/hvOUjbeO+pqorPV9tD9Jvj+PfhsSNTViIhIZSvYEELd+tUhnOTUjrqi5FR/Z/jLY/DTFBh1qU6OiiQxBbtkVlgAr10MtRvBYddGXU309joVdjwQXr8WVvwcdTUiIlKZ3rwB5nwEvR+Ahm2iria5tekZlof47FmY/HTU1YjIFlKwS2YTHw9n2Hr8C6rXjbqa6GVkhPECG9bC6CuirkZERCrL9Dz46CHY5yzYvW/U1aSGQ64O6/+NvgLmTY66GhHZAgp2yWr5T/DWzbDzYWHdOgkatIRDroTpr8BXr0VdjYiIVLTFs+CV86FxBzj81qirSR0ZmXDs42Gdu2GnwqpFUVckImWkYJesxv4TCtbDEXdrsHhx+18I2+4Goy4LM4aKiEhqWL86hI6MTOj3dFi7VSpOzW2g/xBYvQhGnAEF+VFXJCJloGCXjGa+CdNehIMuDYOe5Y8yq4UxFyt/CWMwREQk+bmHE3a/TIO/PA71mkVdUWraoT0ceW9sGYmbo65GRMpAwS7ZbFgDIy+D+i3hwIujrqbqatwB9j0PJj0JP3wYdTUiIlJenz4Dn/8PDr4CWnWNuprUttdJ0OEM+ODfYTyjiCQFBbtk8969sPQ7OPIedUEpzaH/DGd08y7U2jwiIsnspykw6nLY6VA4+Mqoq0kPPe+Axrnw8rmw8JuoqxGROCjYJZOF38D798Hu/WGnQ6KupurLrgW9/g2Lv4X37o66GhER2RJrfg3j6mo1CJN7ZGRGXVF6yMqB/s9AVnUYejKsWxF1RSJSCgW7ZOEOIy+BajU1C1hZtOwCe54QAvEv06KuRkREyqKwMFwxWjYvTJZSq0HUFaWXuo2h75PhBOkr52vxcpEqLq5gZ2Y9zGyGmc00s6tKeD7HzIbGnp9gZs1j27uZ2WQzmxr7eViRfd6OHfPz2K1RRb2plDR1OHz/HnS9PixILvE7/DaoXg/y/h4WdRcRkeTw4QMwYxR0vxWadoy6mvS008HQ9YawjNBHD0VdjYhsRqnBzswygYeBnkBb4AQza1us2ZnAUndvCdwH3BHbvgg4yt13B04DhhTb7yR3bx+7LSjH+0hta5aG5Q0adwiDmaVsam4TxgrMnwyfDIq6GhERicf378NbN0K7Y6DT2VFXk972vxB27Q1vXA/fvRd1NSKyCfFcsesIzHT32e6+Hnge6FOsTR9gcOz+CKCLmZm7f+buP8a2TwNqmJlm/Cirt26C1Yuh132Qod6zW2S3Y6FV97Co+9Ifoq5GREQ2Z8XPMPwM2GZn6P2g1muNmhkc/UhYYmn46bBsftQViUgJ4kkJjYG5RR7Pi20rsY275wPLgPrF2hwLfOru64pseyrWDfM6s5L/apvZADObZGaTFi5cGEe5KWbuRJj0FHQ6B7bfM+pqkpdZWJcH4LV/aJyAiEhVVZAPI/4K61fCcUMgp07UFQmEf4fjnoX8tWEym/x1pe8jIgmVkMs/ZtaO0D2zaF+Kk2JdNA+K3U4paV93H+Tuue6e27Bhw8ovtiopyA8hpM72Yep+KZ96TaHLQJj1VhizKCIiVc+4m+CHD+Co+6HRrlFXI0U1bAN9Hob5k2DM1VFXIyLFxBPs5gNNizxuEttWYhszywLqAotjj5sALwGnuvusjTu4+/zYzxXAc4Qun1LUJ4/BL1Oh5+06Y1lROp4V1uUZfSWsWhR1NSIiUtTXI+GD+yH3r7BH/6irkZK0OzqMuZv0BHz+XNTViEgR8QS7iUArM2thZtnA8UBesTZ5hMlRAPoC49zdzaweMBK4yt0/2NjYzLLMrEHsfjWgF/Bl+d5Kilk2H8bfFsaF7do76mpSR0ZmGK+xbnmYkEZEpJKUY0bpamY2ODaj9Fdmlh6XRpbMhpfOhR32gh63R12NbE6X66H5QaFX0U9Toq5GRGJKDXaxMXMXAGOBr4Bh7j7NzG4ys42J4wmgvpnNBC4BNn6AXQC0BAYWW9YgBxhrZl8AnxOu+P23It9Y0htzFRTmQ887NWi8om3bFg68BL4YCt++GXU1IpKCyjmjdD8gJzZcoQNw9sbQl7I2rAnjtsyg3+CwOLZUXZlZ0PcpqFkfhp4Cq5dEXZGIEOcYO3cf5e6t3X1nd781tm2gu+fF7q91937u3tLdO7r77Nj2W9y9VpElDdq7+wJ3X+XuHdx9D3dv5+4XubsWGNvom7HwVR50vhy2aRF1Namp82XQoHU427huZdTViEjq2eIZpQEHasWGNtQA1gPLE1N2REZdDj9Phb8Mgq13jLoaiUfthtD/GVjxE7x4ltaJFakCNHd+VbN+NYy6DBq0CX3YpXJk5cBRD8CyOTD+1qirEZHUU54ZpUcAq4CfgDnA3e7+p0siKTNr9GfPwmdD4KDLoPXhUVcjZdEkN6wTO/NNeOeO0tuLSKVSsKtq3r0Lfp0Dve6FrOyoq0ltO+4HuWfCx/+BeZOirkZEZKOOQAGwA9ACuNTMdireKCVmjf55Koy8FFp01uzPyarDGdD+pBDsZoyJuhqRtKZgV5Us+Bo+fAD2PBGaHxh1Nemh6/VhOYm8C6FgQ9TViEjqKM+M0icCY9x9g7svAD4Aciu94kRbuyyMq6uxNRz7ZJjcSpKPGRx5T1hr98UBsHhW6fuISKVQsKsq3GHkJZBdG7rfHHU16aN63fCBtGBamGJbRKRibPGM0oTul4cBmFktYF/g64RUnSju8PJ5oYdKv6fDeC1JXtVqQP8hkJERwvr61VFXJJKWFOyqiin/FxZk7XYj1GoQdTXpZZcjoO3R8M6dsOjbqKsRkRRQzhmlHwZqm9k0QkB8yt2/SOw7qGQfPghfvwbdboJm+0ZdjVSErXeEYx+HX6bBqxeF8C4iCZUVdQFCmCb49WuhSUfY69Soq0lPPe+E2ePDh9Fpr4WzjiIi5eDuo4BRxbYNLHJ/LWFpg+L7rSxpe8r44UN484awRuu+50VdjVSkll3h0Gtg/C3QZB/oNCDqikTSir69VgVvXg9rfoVe9ylQRKXOttD91nDV9NPBpbcXEZGyW/ELDD8Dtm4OfR7WOq2p6KBLoXVPGHs1zPk46mpE0opSRNTmfAyfPgP7nQfb7RZ1Neltr5PDzGxvDITlP0VdjYhIainIhxfODJOmHDcEqm8VdUVSGTIy4JhHoV4zGHZaCPMikhAKdlEq2BAWyN6qCRx8VentpXKZQa9/Q8H6sJagiIhUnPG3wvfvhd4p27aLuhqpTDXqwXHPwrrlMPx0zTotkiAKdlH6+BFYMB2OuBNyakddjQDU3xkOuToM6p9efAI7ERHZIjNGw/v3wt6nQfsToq5GEmHbdtD7QZjzYegJIyKVTsEuKr/OhbdvhzZHwC5HRl2NFLXfBbDdHuGq3Zpfo65GRCS5LfkOXjo7rHPW886oq5FE2r0vdDonnMieOiLqakRSnoJdVEZfGX72vCPaOuTPMrPCWcZVi3SWUUSkPDasheGxpfr6PwPVqkdbjyRe91ug2X6Q93f4ZXrU1Ug8CvJh3Yrw+1tYGHU1UgZa7iAKX4+EGSOh641hcLFUPTu0h/3Ohw8fgN37QYuDoq5IRCT5jLkSfpoCJzwfZsKU9JNZLSxC/1hnGHoyDBgP1etGXZUUt2oxfPs6fDMaZo6D9St+f84yITM7dqtW5FZ0WzZkbGL7ptpnVovtk13C85vYXlr7jGppPcO8gl2irVsJo66AhruG4CBV1yFXw1d5YW27cz+AajWirkhEJHl8/n8w+Wk48B/QpmfU1UiU6mwH/QbD4F7w0jlw3P/S+st3leAOC2eEIDdjDMz7BLwQam8Lux0D9VtB4YYw8U3B+tgtv8j9ItsL83/flr82TJpTUHTfDX/crzD2uLJkZFVgcCwlnJY1zGZUg5r1Q++wSqBgl2jv3AHL58EZY8I/slRd2TXhqPvhmT7wzp3Q9fqoKxIRSQ6/TAuzPjc/CA69NupqpCrYcb+wXuyYK8NEOp01+3TC5a8Pk9nMGBMC3dLvw/bt9oDOl0Prw2H7vRITut2LBML18QXB4tvL3X4DrF8NhctKaFtsn8IKnNn1/E+gYZuKO14RCnaJ9Ms0+Ohh2OuU8AdOqr6dDoH2J8MH98Nuf4Htdo+6IhGRqm3tchh6Suhud+wTlXZmWpJQp7Nh/iQYdwvssBe07BJ1Ralv9ZLQxXLGaJg1LlxNy8yBnQ6G/S+E1j2gbuPE12X2+1UtaiX+9cvK/Y8hr7QgWHR78fa1t620MvXXNlEKC+G1S8IHXbeboq5GyqL7zfDt2DDw+8w39SVFRGRT3OGV88OVgNNfgzqV9wVGkpBZ6Anzy/SwWP2Ad2DrHaOuKrW4w6JvQpD7ZgzMnRC6WNZqBG37hG7ROx0C2UkQpqoSM8jKDrcqTN9QE+XzZ2Hux9DnYai5TdTVSFnU3CZM0T3iDJjwKOx/QdQViYhUTR8/EsYmd7sZdtw/6mqkKsquBccNgUGHwrBT4a9jNVtqeRVsgB8+DEFuxmhY+l3Yvu3ucNCl0LpnuEKqcY0pT8EuETZOm99sf9jzxKirkS3R7hj4YljoPrLLkbBNi6grEhGpWuZ8HD7rdukF+/896mqkKqu/M/zlMfi/42HUpdD7oXBFROK3egnMfDMEuZlvwbploYtli87hBKDvdU0AACAASURBVHSrw6Fe06irlARTsEuENwaG9UB63auzJcnKDI68Bx7uFCYEOOUlfQiJiGy0ciEMPx3qNoWjH9HfRyldm55hwo5374Im+0CH06OuqOpb9C3MGBUmP5n7cayLZUNoe1S4KrfTIZBTO+oqJUIKdpXt+/fh8/+F6Z4b7Rp1NVIedRuHmTFHXQZTnof2J0RdkYhI9AoLwnipNUvhb29qjTKJ3yFXw/xPYdTlodtgkw5RV1S1FGyAOR/FZrEcA0tmhe3b7gYHXhLC8Q5766KB/EbBrjLlrw8TptRrBp2viLoaqQi5Z8LU4TD2amjZFWo3jLoiEZFojb8NvnsnjCHXzMFSFhmZcOzjMOjgMN7u7HegVoOoq4rWmqXw7ZuxhcLfhLXLwvpnzQ+Cfc8NSxLUaxZ1lVJFKdhVpo8egkUz4MRhYU00SX4ZGXDUA/DogTDmKuj7RNQViYhE55ux8N7dYRmfvU6OuhpJRjW3gf5D4MnDwyRlJ7+UfrNPL5r5+0Lhcz4CLwhdLHc5Ctr0gJ0OVRdLiUua/eYk0NLvw6LWu/QKZ1ckdTTaJSys+va/YI/++vcVkfS09Ad4cUC4SnfEXVFXI8lsh/Zw5L3wynkw7mbodmPUFVWugvwwRm7jkgSLZ4btjdrBgReH8XKNO6iLpZSZgl1lcA/9xS0Det4RdTVSGQ78B0x7KXS1Pf9jyKkTdUUiIomTvw6GnxY+7/o/A9VqRF2RJLu9ToJ5E+GDf4dQ07Z31BVVrDW/FpnF8o0iXSwPhI5nh5PEWtNPyknBrjJ89Sp8+zp0vxXqNom6GqkMWTnQ+0F4ontYAkEBXkTSyZir4cfP4PjnYJudoq5GUkXPO+DnqfDyedBwF2jYOuqKymfxrN+vyv3wYehiWbM+tDkydLHc+TCdGJYKpWBX0datgNFXhtmdOp0TdTVSmZp2hI5nwYTHYLe+0HSfqCsSEal8XwyDSU/A/heGdT1FKkpWTrgC/FhnGHoynPVWcgWfgnyYO+H38XKLvw3bG7WFAy4Ks1g27hAmjRGpBAp2FW38v2DFT+EPU7oN/k1HXQbC1yMh7+9w9ruQlR11RSIilWfBV/DqRbDjAdDl+qirkVRUtzH0fRKGHA2vXAD9nq7a6yKuXRbrYjkmdLFcsxQyqsW6WJ4V62LZPOoqJU3ENSrTzHqY2Qwzm2lmV5XwfI6ZDY09P8HMmse2dzOzyWY2NfbzsCL7dIhtn2lmD5hV5d/aOP30BUx4NCyyqas36SGnThjwvfCrMC5ARCRVrVsBQ0+B7Nrhi7dOXkpl2elg6HoDTH85zDBe1SyZDR89AoOPgjt3ghF/DeGudQ/oNxiumA2nvgydzlaok4Qq9a+ymWUCDwPdgHnARDPLc/fpRZqdCSx195ZmdjxwB3AcsAg4yt1/NLPdgLFA49g+/wHOAiYAo4AewOiKeVsRKCyE1/4BNbYOi1hL+mjTA3Y7Ft69C9r2gYZtoq5IRKRiuYeeCUtmwWmvQp3toq5IUt3+F8K8SfDG9bB9e2hxUHS1FOSHiV02drFcNCNsb7gL7HdB6GLZZB91sZTIxXO6rSMw091nA5jZ80AfoGiw6wPcELs/AnjIzMzdPyvSZhpQw8xygG2Ardz949gxnwGOJpmD3adPw/xJcMxjIdxJeulxB8waB3kXwhmjNUWxiKSWCY+FmYC73hC6mIlUNjM4+hH472Ew/PQw3KFu41J3qzBrl8HMt8LEJ9++XqSL5QGQe0a4OrdNi8TVIxKHeIJdY2BukcfzgE6bauPu+Wa2DKhPuGK30bHAp+6+zswax45T9Jgl/raa2QBgAECzZs3iKDcCKxfAmzdA84Ngj+OirkaiULshHH4bvHwuTH4S9vlb1BVJSdzh5y9gylD44X2oVguq1y35VqPen7flbKUzspJ+5n4Cr18DbY6AAy6OuhpJJzl14Lj/wX8PDctrnD4yTLBSWZZ8F4LcjNHwwwdQmA81toFWh/8+i2X1upX3+iLllJAO8mbWjtA9s3tZ93X3QcAggNzcXK/g0irG69fC+tVhrFUKDBWULbTnCfDFUHjjhrC4aCLPLMrmLZsPU4eFQLfwq3DWdcf9QtBbPg8WTAtnZ9cuB0r5M5OzFVQvIfRtLhBuvGXX0dVcSS6rFoWrJVs1hqP/o884SbyGrcOVu2GnhmU2et1bcccuLAhdLDcuSbDw67C9QRvY7/zwWd60o07oSdKIJ9jNB5oWedwktq2kNvPMLAuoCywGMLMmwEvAqe4+q0j7ogu8lXTM5DD7nfBlvvPlyb/eipSPGfT6NzyyH4y8FE74P30JitK6FWFNySn/B9+9B3gYA3HkPdDuL1Bzmz/vU1gI61eEhWTXLtvMrcjzv/7w+/11yzdfk2XEgmHx0Fdv84FwY5DMrqX/U5I4hQXwwt9CuPvbG+H/qEgU2vYJY+4+fACa5EL7E7f8WGuXh6ETG7tYrl4MGVmw4/6w92nhypzWZpQkFU+wmwi0MrMWhPB1PFD8NyoPOA34COgLjHN3N7N6wEjgKnf/YGNjd//JzJab2b6EyVNOBR4s97tJtPx14Qv81s3hoEujrkaqgm1awGHXhKu401+GdsdEXVF6KciH2W/DF8/DV69B/prw+3nwFaGbdP2dN79/RsbvYWpLFBbEHwg33pbMDj/X/AobVm3++JYZxxXCepu4Xxeq1VAwlPi9cwfMHg9HPQDb7xl1NZLuulwPP34WJqrbtl3Z/k8u/R6+GRuuzH3/PhRuCPMhtOoexsq17KIulpISSg12sTFzFxBmtMwEnnT3aWZ2EzDJ3fOAJ4AhZjYTWEIIfwAXAC2BgWY2MLatu7svAM4DngZqECZNSb6JUz54ICw+edIL4QuTCECnc2HqCBh1ObQ4uOQrQ1Jxio6bmzocVi0IgWbP48OtaafEhZmMzPDvvaX/5gUbwtnkkgJgiduWwcIZv19hzF9TSn3V4usyWr1eyd1Nq1XfsvclyefbN+GdO6H9SbD3qVFXIxKW1+j7FAw6OCy7MeDtTf+tLSwIM2punMVy4Vdhe4PWsO+5sVksO2rJDkk55l41h62VJDc31ydNmhR1GcHiWaHLXZue0H9w1NVIVfPTFzDoEGh/AvR5OOpqUlNJ4+ZaHx6uzLU+vHIH2FdV+etiwbBoGNxMt9I/dDn9FQrWb/74WdU3EwaLXCFs1BaaFZ9jq2zMbLK755brIGmkQj8ff50Ljx0UxtWd+QZk16yY44pUhHmT4Kme0KIznDj893HL61aELpYzNnaxXBS6WDaLfVdr3aP0XhsiSWJTn5E6VbEl3GHUZZCZDT3+FXU1UhVtvwcccCG8fx/s3g92OiTqilLDuhUwPS90tfxt3FzHzY+bSydZOWGG1toNt2z/DWs3cZWwhHC45ldYvSTMIrexTWF+OM7ep5U72ElE8teF2QcLC6D/Mwp1UvU0yYWed4QumW9cB/V2DFfmvn8/nJyqXg9adYt1seyqsaGSVhTstsS0l8JZoR53wFY7RF2NVFUHXwnTX4FXL4ZzP9QXpC1VkB/G+Ux5Hr4eWWTc3JWwR3+dga1I1aqHW51ty76vO2xYE0KeZpBLXq9fC/MnQ/8h+t2SqqvDGeHK3UcPhcf1W0Gns2OzWHZSF0tJW/qfX1Zrl4fpdrffU2uVyeZVqwFH3Q+Dj4J3boduN0VdUfKoSuPmJD5m4eSFTmAkr6kj4JNBsN8F0LZ31NWIbJpZWGJqp0OgcQedhBCJUbArq/G3wspf4ITndEZISteic5h44MOHQlfBHdpHXVHVpnFzItFY8DXkXRjGI3W9IepqREpXrXrotSEiv1EyKYsfPwtnM/f5WzhDJBKPbjeFaZbz/g5njdcJgeI0bk4kejm1w4moXvdBZrWoqxERkS2gb5jxKiwIA3VrNoDDro26GkkmNbaGI+6CYafCxw/DARdFXVH0NG5OpGqp2wROfD7qKkREpBwU7OI16clwxe7YJzTDkpTdrr1hl14w/rbwMx2Dy6bGzbU/AfY4Hpp21Lg5ERERkS2kYBePFT/DWzeFQbq7HRt1NZKMzMJVu4c7wWsXw6l56RNiNjVubs/joVV3jZsTERERqQAKdvEY+8+wts+R96bPl3GpeFvtAN1uDF16P/8f7HVy1BVVHo2bExEREUkoBbvSzBoHX74Ah1ydnt3npGLtfTp8MRzGXgMtu23ZemFVlcbNiYiIiERGwW5zNqyFkZfCNjvDARdHXY2kgowM6P0A/Gd/GHMl9Hs66orKxx1+mgJfDA1rYGncnIiIiEgkFOw25/37YMlsOOXlsF6KSEVo0Ao6XwHjbwnrs7XpGXVFZffbuLnnYeHXGjcnUgIz6wHcD2QCj7v77cWezwGeAToAi4Hj3P372HN7AI8BWwGFwD7uvjZx1YuISLJRsNuURTPh/Xtht76w86FRVyOp5oCLYNqL4YrwjgdA9a2irqh0JY2ba9opjD1td4zGzYkUYWaZwMNAN2AeMNHM8tx9epFmZwJL3b2lmR0P3AEcZ2ZZwLPAKe4+xczqAxsS/BZERCTJKNiVxB1GXgJZNeDw26KuRlJRVjb0fhAe7xpmXD3y7qgrKlmJ4+ZaaNycSOk6AjPdfTaAmT0P9AGKBrs+wA2x+yOAh8zMgO7AF+4+BcDdFyeqaBERSV4KdiWZOgK+eweOuDu1JreQqqVJLnQ6ByY8Crv3hWb7Rl1RoHFzIhWhMTC3yON5QKdNtXH3fDNbBtQHWgNuZmOBhsDz7n5n8RcwswHAAIBmzZpV+BsQEZHkomBX3JpfYezVsMPekPvXqKuRVHfYtfD1a5B3IZzzXrRj05bNgy+GhUCncXMiUcoCDgT2AVYDb5nZZHd/q2gjdx8EDALIzc31hFcpIiJVioJdceNuhtWL4aQRkJEZdTWS6nJqQ6/74H994b174dCrE/v6a5fDV3mhq+X376NxcyIVZj7QtMjjJrFtJbWZFxtXV5cwico84F13XwRgZqOAvYG3EBER2QQFu6LmTYaJT0Cns2GH9lFXI+miVTfYvT+8dw+0Oxoa7Vq5r6dxcyKJMBFoZWYtCAHueODEYm3ygNOAj4C+wDh339gF8wozqwmsBw4G7ktY5SIikpQU7DYqyIfXLoba28Kh10RdjaSbHv+CmW9C3t/hr2Mr/mqxxs2JJFRszNwFwFjCcgdPuvs0M7sJmOTuecATwBAzmwksIYQ/3H2pmd1LCIcOjHL3kZG8ERERSRoKdhtN/C/8/EVYMDoZpp6X1FKrAfS4HV4aELtqPKBijqtxcyKRcfdRwKhi2wYWub8W6LeJfZ8lLHkgIiISFwU7gOU/wrhboGVXaHt01NVIutqjfwhgb90YFi2v17T0fUqicXMiIiIiaUfBDmDMVVCYD0fcpe5oEh2zMJHKI/uGdRRPHBb//8eCfJg1Liwe/vWo38fNHXJVCIzb7FS5tYuIiIhIpBTsvn0Dpr8Spp3Xl1+J2tY7wmHXhSU3vnwhrG+3KX8YNzccVi38fdzcnidAk310okJEREQkTaR3sNuwBkZeCg1aw/4XRl2NSNDp7BDURl8JOx/2566TGjcnIiIiIsWkd7B792749Qc47VV9GZaqIyMTej8Igw6GsdfAMf/RuDkRERER2az0DXYLZ8AH94ep3lt0jroakT/abjc44GJ4725YvRi+e1fj5kRERERkk9Iz2LnDa5dAdk3ofkvU1YiUrPPlYQHxeZ9A+xNDV0uNmxMRERGREqRnsJvyPPzwPvT6N9RuGHU1IiWrVh3OfgcsAzKrRV2NiIiIiFRhGfE0MrMeZjbDzGaa2VUlPJ9jZkNjz08ws+ax7fXNbLyZrTSzh4rt83bsmJ/Hbo0q4g2VavUSeP3acOVj79MS8pIiWywrR6FOREREREpV6hU7M8sEHga6AfOAiWaW5+7TizQ7E1jq7i3N7HjgDuA4YC1wHbBb7FbcSe4+qZzvoWzeuhHWLIVeL0NGXLlWRERERESkSosn2XQEZrr7bHdfDzwP9CnWpg8wOHZ/BNDFzMzdV7n7+4SAF705E2Dy07DvubDd7lFXIyIiIiIiUiHiCXaNgblFHs+LbSuxjbvnA8uA+nEc+6lYN8zrzCp5Rgh3GH05bNU4zCooIiIiIiKSIqKcPOUkd59vZnWAF4BTgGeKNzKzAcAAgGbNmm35q5mFtcHWLoOcOlt+HBERERERkSomnit284GmRR43iW0rsY2ZZQF1gcWbO6i7z4/9XAE8R+jyWVK7Qe6e6+65DRuWcwbL7ffUmnUiIiIiIpJy4gl2E4FWZtbCzLKB44G8Ym3ygI1TTPYFxrm7b+qAZpZlZg1i96sBvYAvy1q8iIiIiIiIxNEV093zzewCYCyQCTzp7tPM7CZgkrvnAU8AQ8xsJrCEEP4AMLPvga2AbDM7GugO/ACMjYW6TOBN4L8V+s5ERERERETSRFxj7Nx9FDCq2LaBRe6vBfptYt/mmzhsh/hKFBERERERkc3RQm4iIiIiIiJJTsFOREREREQkySnYiYiIiIiIJDkFOxERERERkSSnYCciIiIiIpLkFOxERERERESSnIKdiIiIiIhIklOwExERERERSXIKdiIiIiIiIklOwU5ERERERCTJKdiJiIiIiIgkOQU7ERERERGRJKdgJyIiIiIikuQU7ERERERERJKcgp2IiIiIiEiSU7ATERERERFJcgp2IiIiIiIiSU7BTkREREREJMkp2ImIiIiIiCQ5BTsREREREZEkp2AnIiIiIiKS5BTsREREREREkpyCnYiIiIiISJJTsBMREREREUlyCnYiIiIiIiJJTsFORESkEphZDzObYWYzzeyqEp7PMbOhsecnmFnzYs83M7OVZnZZomoWEZHkpWAnIiJSwcwsE3gY6Am0BU4ws7bFmp0JLHX3lsB9wB3Fnr8XGF3ZtYqISGpQsBMREal4HYGZ7j7b3dcDzwN9irXpAwyO3R8BdDEzAzCzo4HvgGkJqldERJJcXMFuS7uTmFl9Mxsf60ryULF9OpjZ1Ng+D2z8MBMREUkBjYG5RR7Pi20rsY275wPLgPpmVhu4ErgxAXWKiEiKKDXYlbM7yVrgOqCk8QH/Ac4CWsVuPbbkDYiIiKSYG4D73H3l5hqZ2QAzm2RmkxYuXJiYykREpMqK54rdFncncfdV7v4+IeD9xsy2B7Zy94/d3YFngKPL80ZERESqkPlA0yKPm8S2ldjGzLKAusBioBNwp5l9D1wM/NPMLij+Au4+yN1z3T23YcOGFf8OREQkqWTF0aak7iSdNtXG3fPNbBlQH1i0mWPOK3bM4l1UREREktVEoJWZtSAEuOOBE4u1yQNOAz4C+gLjYic7D9rYwMxuAFa6+0OIiIhsRjzBLlJmNgAYANCsWbOIqxERESld7CTnBcBYIBN40t2nmdlNwCR3zwOeAIaY2UxgCSH8iYiIbJF4gl1ZupPMK9adZHPHbFLKMYHQ1QQYBJCbm+tx1CsiIhI5dx8FjCq2bWCR+2uBfqUc44ZKKU5ERFJOPGPsfutOYmbZhDOKecXabOxOAn/sTlIid/8JWG5m+8ZmwzwVeKXM1YuIiIiIiEjpV+zK250kNvh7KyA7ti5Pd3efDpwHPA3UICzAqkVYRUREREREtkBcY+zK053E3ZtvYvskYLd4CxUREREREZGSxbVAuYiIiIiIiFRdCnYiIiIiIiJJTsFOREREREQkySnYiYiIiIiIJDkFOxERERERkSSnYCciIiIiIpLkFOxERERERESSnIKdiIiIiIhIklOwExERERERSXIKdiIiIiIiIklOwU5ERERERCTJKdiJiIiIiIgkOQU7ERERERGRJKdgJyIiIiIikuQU7ERERERERJKcgp2IiIiIiEiSU7ATERERERFJcgp2IiIiIiIiSU7BTkREREREJMkp2ImIiIiIiCQ5BTsREREREZEkp2AnIiIiIiKS5BTsREREREREkpyCnYiIiIiISJJTsBMREREREUlyCnYiIiIiIiJJTsFOREREREQkySnYiYiIiIiIJDkFOxERERERkSQXV7Azsx5mNsPMZprZVSU8n2NmQ2PPTzCz5kWeuzq2fYaZHV5k+/dmNtXMPjezSRXxZkRERERERNJRVmkNzCwTeBjoBswDJppZnrtPL9LsTGCpu7c0s+OBO4DjzKwtcDzQDtgBeNPMWrt7QWy/Q919UQW+HxERERERkbQTzxW7jsBMd5/t7uuB54E+xdr0AQbH7o8AupiZxbY/7+7r3P07YGbseCIiIiIiIlJB4gl2jYG5RR7Pi20rsY275wPLgPql7OvA62Y22cwGbOrFzWyAmU0ys0kLFy6Mo1wREREREZH0EuXkKQe6+95AT+B8M+tcUiN3H+Tuue6e27Bhw8RWKCIiIiIikgTiCXbzgaZFHjeJbSuxjZllAXWBxZvb1903/lwAvIS6aIqIiIiIiGyReILdRKCVmbUws2zCZCh5xdrkAafF7vcFxrm7x7YfH5s1swXQCvjEzGqZWR0AM6sFdAe+LP/bERERERERST+lzorp7vlmdgEwFsgEnnT3aWZ2EzDJ3fOAJ4AhZjYTWEIIf8TaDQOmA/nA+e5eYGbbAi+F+VXIAp5z9zGV8P5ERERERERSXqnBDsDdRwGjim0bWOT+WqDfJva9Fbi12LbZwJ5lLVZERERERET+LMrJU0RERERERKQCKNiJiIhUAjPrYWYzzGymmV1VwvM5ZjY09vwEM2se294tthTQ1NjPwxJdu4iIJB8FOxERkQpmZpnAw4QlfdoCJ5hZ22LNzgSWuntL4D7gjtj2RcBR7r47YWKyIYmpWkREkpmCnYiISMXrCMx099nuvh54HuhTrE0fYHDs/gigi5mZu3/m7j/Gtk8DaphZTkKqFhGRpKVgJyIiUvEaA3OLPJ4X21ZiG3fPB5YB9Yu1ORb41N3XVVKdIiKSIuKaFVNEREQSy8zaEbpndt/E8wOAAQDNmjVLYGUiIlIV6YqdiIhIxZsPNC3yuElsW4ltzCwLqAssjj1uArwEnOrus0p6AXcf5O657p7bsGHDCi5fRESSjYKdiIhIxZsItDKzFmaWDRwP5BVrk0eYHAWgLzDO3d3M6gEjgavc/YOEVSwiIklNwU5ERKSCxcbMXQCMBb4Chrn7NDO7ycx6x5o9AdQ3s5nAJcDGJREuAFoCA83s89itUYLfgoiIJBmNsRMREakE7j4KGFVs28Ai99cC/UrY7xbglkovUEREUoqu2ImIiIiIiCQ5BTsREREREZEkp2AnIiIiIiKS5BTsREREREREkpyCnYiIiIiISJJTsBMREREREUlyCnYiIiIiIiJJTsFOREREREQkySnYiYiIpLmCQmddfgHuHnUpIiKyhbKiLkBERESiNe3HZfR+6AOyMoxaOVnUys6kZk7Wb/f/8DMni5rZmdTOyaJmdha1cjKplR1rm5NJzeys8Fxse2aGRf32RETSgoKdiIhImmtUpzqXH96G1evzWbWugFXr8llV5P7S1WtYtS7/t+fXbCiI+9jVq2X8FgJ/C4Q5WdQuGgKLhMfwXAnhMSeLWtlZVK+WgZnCoohIcQp2IiIiaW67utU5/9CWcbcvKPTfQ+D6/BAE1xWwen0+K9fls3p9wW/bfn8+n1XrQ5tlazbw069rftu2al0++YXxdQPNMH67QlgzJ/P3YFjkqmGt7D+Gxz9eVSx6FTIco1qmRqaISPJLq2B3+lOfMGXur2RnZZCdlUG1zAyyM8P9jT+rbXy8cVtmBtWyjOzMzNg2+1O7apkZ5MTaVyt2nJysotvCvjmZmbFjZpClDxMREUkymRlGnerVqFO9WoUcz91ZX1DI6nUFvwXDlb9dISwaEIuEx3UFrFyfz+rY8z8vX/t7eIz9jFd2VsYfwt7GK4Q1/7CtaHfUEq42FgmVNapl8v/t3W+sLHddx/H3Z/bcHltr2kqvsfYWWtP6oDWopWmqTYRITOqf9D6whkuiUoIxUQkaHhjxAUYSEnnifw0hpUnBP4VUY65NSYMpiY8oXBGUgporYigh6bXoLeDlnrM7Xx/MzM5vZmfPmXJ3zs7u+bzSk52d+c1vPvs7vfOb79k5ezLfgmpmR+xYFXav/b6TvPI7r2Fvmhdfs+Jxf1Yvf2NvVm6fsT+Levs053L5uEqZqIvEVrG4WHSq3D7hxER10XhgMZn2URSodT8dx0wKWk9KZmZ2FCSxuzNhd2fCDd9+1Ur6zPPg0v6s8a7gN1pF49cvF0VgUSDWt6BWbV546XJSYM7Ym/W7BpDgmhP1baW7OxmTTPWX1HyeiZ3G84yJKB6z4nEnW9yn3c9BbXYmIpPYSfqcPy606c6XqWjTlb9ok5EJ3yprtibHqrB78/23XXEfEcE0bxZ8e8nj/jTYm824PM0XCsPFtuXzZHuzz5i3u7Q/4+KlpMhMCtKqn/3Zaj/NbCdTo0Dc3WkVmJNmMZhlIhNkKk7+KpcnWb1cbZfEJFtsm4myn2bbTDCRyDr6SvdTsn6S9pst9pW2zbJiIpok/WlJnvZra7St9s+a2ebHbWWs+ppkzbZmZnZlsuqDYHZXd6mzN825tFe/U9h+d7EqFKtCslp3eX9GXl4/zJKvvWnONI9i2yzmbfK82bZuk5MHTPOcPC8fR/hBpsuKzuyQArGrTV1oFoXoTpY12kza83t28DVIPf8X+yqZqxfm6MZ1zbI5vp67J0l7LclSXxMsXqe0r02a2ZvHnUgoW3I9VGa34+dYFXarIIkTE43yfvw8D/bztEiMskic1UViV5HZetey8U7mvO1iMbs/KwrMr31zyn452UQUk08eRZ75cgRRPuYRzPJW23L7rNwnbTvGSWto7cJPAlGf4AUgEHXRqHK9yo1Za7+qbe/+yu00jrGkv9Z+dV/1BJf2l7X2W+yraJ9l7f6qY9T7kbyGRr6u/pLCuZ1p3q7aPh+TjjakfcwHfen2etqKfAAACaJJREFUdBxJnydj2ZWp2qe5vX5tdc52m+WZ07Fsvu5mpvbxD8pMa93NN1zNXd9zHWbbprrD5bprVnML6ipEtIq/pDBcXiDGQqHZLBaXtUn6ftlt8gPbzPJgFs1+Lu/nB7aprhmWXXcUz5vXE9U1xnGwrBhNi855UZost9uKet6uimhB/UNzmtci6TVEc7/mD8LT+SxbaFMeo1yP6iK2uj6ornUWjlGtz5Ycg+QY5XolfaT5G3lbr6+dLR0TaL8ZUM+nP3L7jVy7wh84pVzYbZEsE7tZcSvLtllWJM4iiLx50o5yfXWSbxaJ6Qm/o6/G5NDqq9qeL04SaVE7y5O2yfb0NaRF7SxfbFtN1LMIKP4jyqxBc2KKiMa6oi1AkTVorq/a14/lvsUu8/ad/cXiflT9du2Xw4x8Yb/m6ykfD+pvno35WKX70Xj90XGMZJzyZn/pGFb7k+SsGqX9tvexpjfccwvveejV645hdixIxTtbWzj1D6Y55zZ/sJzOwfV8TXEtkFxTNK4Zyvb19UbH3L5QZKbXEx3XMXn3vu1tUWWrrmfy+rolvS45+LqjO0tzTq+vKeZzenrdQn2MdL/5D+yp2xLN/I1jtPqe71feBd3MBtW1yUJf6TGS73n1uE5///bXcvt3XTtI3y7sbCNkmcjm7weYjVM1GUGzmCy21cUhrXX1MvM/ED0vGnu0ibr6bBSbncVox/EPyszC9laejtd1/YjezTAzayveoYKJryuOra7iMf0h87wgTIrZdtE4X19Wis07zmBZ0XnqhqsHe129CjtJDwB/CEyARyLid1vbd4EPAK8BXgTeEBFfLLe9A3gLMAPeFhFP9+nTzGzTpLdIlmvWFcXMzMyWmN96uWXz9KG/KCZpAvwp8BPAncAbJd3ZavYW4H8i4nbg94H3lPveCZwB7gIeAP5M0qRnn2ZmZmZmZtZDn08AuRc4HxFfiIg94HHgdKvNaeCxcvkJ4PUqfjP/NPB4RFyOiP8Ezpf99enTzMzMzMzMeuhT2N0MfCl5/ny5rrNNREyBi8ArDti3T58ASPolSecknbtw4UKPuGZmZmZmZsfL+D6zvyUi3hcR90TEPSdPnlx3HDMzMzMzs9HpU9h9GbgleX6qXNfZRtIOcB3Fh6gs27dPn2ZmZmZmZtZDn8Luk8Adkm6TdBXFh6GcbbU5C7ypXH4IeCaKz+M+C5yRtCvpNuAO4BM9+zQzMzMzM7MeDv1zBxExlfRW4GmKP03waEQ8J+ldwLmIOAu8H/igpPPAVykKNcp2HwY+B0yBX42IGUBXn6t/eWZmZmZmZtuv19+xi4ingKda696ZLH8T+Nkl+74beHefPs3MzMzMzOzlG/2Hp5iZmZmZmdnBXNiZmZmZmZltOBWfcbIZJF0A/usKu7kR+O8VxDkKzjqcTcrrrMPYpKywWXlXkfVVEeG/cdPTiuZHOH7/nx0VZx3GJmWFzcrrrMNYVdbOOXKjCrtVkHQuIu5Zd44+nHU4m5TXWYexSVlhs/JuUlZr2qTvnbMOw1mHs0l5nXUYQ2f1rZhmZmZmZmYbzoWdmZmZmZnZhjuOhd371h3gZXDW4WxSXmcdxiZlhc3Ku0lZrWmTvnfOOgxnHc4m5XXWYQya9dj9jp2ZmZmZmdm2OY7v2JmZmZmZmW2VrS3sJD0g6d8knZf0mx3bdyV9qNz+rKRbjz7lPMthWR+WdEHSp8uvX1xHzjLLo5JekPTZJdsl6Y/K1/LPku4+6oxJlsOyvk7SxWRc33nUGZMst0j6mKTPSXpO0q91tBnF2PbMOoqxlfRtkj4h6TNl1t/paDOKc0HPrKM5F5R5JpL+SdKTHdtGMa62yPPjMDw/DsPz43A8Rw5rLXNkRGzdFzAB/gP4XuAq4DPAna02vwK8t1w+A3xoxFkfBv5k3eNaZvlR4G7gs0u2/yTwEUDAfcCzI876OuDJdY9pmeUm4O5y+TuAf+/4/2AUY9sz6yjGthyra8vlE8CzwH2tNmM5F/TJOppzQZnn7cBfdn2vxzKu/lr4vnh+HC6v58dhsnp+HC6v58hhMx/5HLmt79jdC5yPiC9ExB7wOHC61eY08Fi5/ATwekk6woyVPllHIyL+AfjqAU1OAx+IwseB6yXddDTpmnpkHY2I+EpEfKpc/hrweeDmVrNRjG3PrKNQjtXXy6cnyq/2LxaP4lzQM+toSDoF/BTwyJImoxhXW+D5cSCeH4fh+XE4niOHs645clsLu5uBLyXPn2fxH9a8TURMgYvAK44k3ZIcpa6sAD9T3l7whKRbjibat6Tv6xmLHy7f1v+IpLvWHQagfDv+hyh+GpUa3dgekBVGMrblrRCfBl4APhoRS8d1zeeCPllhPOeCPwB+A8iXbB/NuFqD58f1Gd05/BCjOIenPD+unufIwaxljtzWwm7b/B1wa0S8GvgodYVvV+ZTwKsi4geAPwb+ds15kHQt8NfAr0fES+vOc5BDso5mbCNiFhE/CJwC7pX0/evKcpgeWUdxLpD008ALEfGP6zi+WWIU/ya20GjO4RXPj8PwHLl665wjt7Ww+zKQVumnynWdbSTtANcBLx5JuiU5SgtZI+LFiLhcPn0EeM0RZftW9Bn7UYiIl6q39SPiKeCEpBvXlUfSCYqJ4C8i4m86moxmbA/LOraxLXP8L/Ax4IHWprGcC+aWZR3RueB+4EFJX6S4Pe7HJP15q83oxtUAz4/rNJpz+GHGdg73/Dg8z5ErtbY5clsLu08Cd0i6TdJVFL+UeLbV5izwpnL5IeCZiFjHvbqHZm3dJ/4gxT3bY3UW+AUV7gMuRsRX1h2qi6Tvru5nlnQvxb+HtZysyhzvBz4fEb+3pNkoxrZP1rGMraSTkq4vl68Gfhz411azUZwL+mQdy7kgIt4REaci4laKc9YzEfFzrWajGFdb4PlxfUZxDu9jLOfw8vieHwfiOXIY65wjd660gzGKiKmktwJPU3yq1qMR8ZykdwHnIuIsxT+8D0o6T/ELxGdGnPVtkh4EpmXWh9eRFUDSX1F8otONkp4HfpviF1iJiPcCT1F8OtV54P+AN68naa+sDwG/LGkKXALOrPHC837g54F/Ke8fB/gt4JUwurHtk3UsY3sT8JikCcXk+eGIeHKM54KeWUdzLugy0nG1hOfH4Xh+HIznx+F4jjxCRzGu8g9QzczMzMzMNtu23oppZmZmZmZ2bLiwMzMzMzMz23Au7MzMzMzMzDacCzszMzMzM7MN58LOzMzMzMxsw7mwMzMzMzMz23Au7MzMzMzMzDacCzszMzMzM7MN9/+MZ/2MvJsX9gAAAABJRU5ErkJggg==\n",
+      "text/plain": [
+       "<Figure size 1080x360 with 2 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plot_epochs = range(training_epochs)\n",
+    "plot_loss = autoencoder_train.history[\"loss\"]\n",
+    "plot_val_loss = autoencoder_train.history[\"val_loss\"]\n",
+    "plot_mae = autoencoder_train.history[\"mae\"]\n",
+    "plot_val_mae = autoencoder_train.history[\"val_mae\"]\n",
+    "\n",
+    "plt.figure(figsize = (15, 5))\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 1)\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 2)\n",
+    "plt.plot(plot_epochs, plot_mae, plot_val_mae)\n",
+    "\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar Modelo Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 70,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import h5py"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 71,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    autoencoder.save(\"autoencoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo Autoencoder no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo del autoencoder con sus pesos / parametros."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Encoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Utilizamos las capas iniciales entrenadas por el modelo autoencoder para el modelo Encoder."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 72,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_19\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_19 (InputLayer)        (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "model_1 (Model)              (None, 32, 32, 8)         2296      \n",
+      "_________________________________________________________________\n",
+      "model_2 (Model)              (None, 16, 16, 16)        1696      \n",
+      "_________________________________________________________________\n",
+      "model_3 (Model)              (None, 8, 8, 32)          6464      \n",
+      "_________________________________________________________________\n",
+      "model_4 (Model)              (None, 4, 4, 64)          25216     \n",
+      "_________________________________________________________________\n",
+      "model_5 (Model)              (None, 2, 2, 128)         33408     \n",
+      "_________________________________________________________________\n",
+      "model_6 (Model)              (None, 1, 1, 256)         132352    \n",
+      "=================================================================\n",
+      "Total params: 201,432\n",
+      "Trainable params: 200,168\n",
+      "Non-trainable params: 1,264\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "encoder_input = Input(shape = input_shape)\n",
+    "x = encoder_input\n",
+    "x = layer_conv[0](x)\n",
+    "x = layer_conv[1](x)\n",
+    "x = layer_conv[2](x)\n",
+    "x = layer_conv[3](x)\n",
+    "x = layer_conv[4](x)\n",
+    "x = layer_conv[5](x)\n",
+    "encoder_output = x\n",
+    "\n",
+    "encoder = Model(inputs = encoder_input, outputs = encoder_output)\n",
+    "encoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar Modelo Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 73,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    encoder.save(\"encoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo Encoder no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo encoder con sus pesos."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Decoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Utilizamos las capas finales entrenadas por el modelo autoencoder para el modelo Encoder."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 75,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_20\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_20 (InputLayer)        (None, 256)               0         \n",
+      "_________________________________________________________________\n",
+      "reshape_1 (Reshape)          (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "model_7 (Model)              (None, 2, 2, 128)         131712    \n",
+      "_________________________________________________________________\n",
+      "model_8 (Model)              (None, 4, 4, 64)          49792     \n",
+      "_________________________________________________________________\n",
+      "model_9 (Model)              (None, 8, 8, 32)          12608     \n",
+      "_________________________________________________________________\n",
+      "model_10 (Model)             (None, 16, 16, 16)        3232      \n",
+      "_________________________________________________________________\n",
+      "model_11 (Model)             (None, 32, 32, 8)         848       \n",
+      "_________________________________________________________________\n",
+      "model_12 (Model)             (None, 64, 64, 1)         129       \n",
+      "=================================================================\n",
+      "Total params: 198,321\n",
+      "Trainable params: 197,585\n",
+      "Non-trainable params: 736\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "decoder_input = Input(shape = (256,))\n",
+    "x = decoder_input\n",
+    "x = Reshape(target_shape = deconv6_input_shape)(x)\n",
+    "x = layer_deconv[-1](x)\n",
+    "x = layer_deconv[-2](x)\n",
+    "x = layer_deconv[-3](x)\n",
+    "x = layer_deconv[-4](x)\n",
+    "x = layer_deconv[-5](x)\n",
+    "x = layer_deconv[-6](x)\n",
+    "decoder_output = x\n",
+    "\n",
+    "decoder = Model(inputs = decoder_input, outputs = decoder_output)\n",
+    "decoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar modelo Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 76,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    decoder.save(\"decoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo Decoder no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo decoder con sus pesos."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# LSTM"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El output del modelo encoder sirve como input para la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Optimizador"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 77,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import RMSprop"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 78,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm_optimizer = RMSprop(lr = 0.000126, \n",
+    "                         rho = 0.9, \n",
+    "                         epsilon = 1e-08,\n",
+    "                         decay = 0.000334)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parametros LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 79,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "time_steps = time_steps_lstm  # 6 frames para alimentar al LSTM.\n",
+    "out_time_steps = out_time_steps_lstm  # Predicción de 1 frame por el LSTM.\n",
+    "data_dimension = convolution_6.output_shape[3]  # 256 features en el codificado.\n",
+    "\n",
+    "encoder_lstm_neurons = 256\n",
+    "decoder_lstm_neurons = 512\n",
+    "attention_neurons = 400\n",
+    "\n",
+    "activation = \"tanh\"\n",
+    "loss = \"mae\"\n",
+    "batch_size = batch_size_lstm\n",
+    "\n",
+    "dropout = 0.0132\n",
+    "recurrent_dropout = 0.385\n",
+    "use_bias = True\n",
+    "stateful = False"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Capas LSTM "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Utilizamos 3 capas LSTM. RepeatVector repite el input para la segunda capa de LSTM out_time_steps veces."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 80,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.layers import RepeatVector, LSTM, Conv1D, Reshape"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 81,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm_input = Input(shape = (time_steps, data_dimension))\n",
+    "\n",
+    "x = lstm_input\n",
+    "\n",
+    "x = LSTM(units = encoder_lstm_neurons,\n",
+    "         activation = activation,\n",
+    "         use_bias = use_bias,\n",
+    "         recurrent_activation = \"hard_sigmoid\",\n",
+    "         kernel_initializer='glorot_uniform',\n",
+    "         recurrent_initializer='orthogonal',\n",
+    "         bias_initializer='zeros',\n",
+    "         unit_forget_bias = True,\n",
+    "         dropout = dropout,\n",
+    "         recurrent_dropout = recurrent_dropout,\n",
+    "         return_sequences = False,\n",
+    "         go_backwards = True, \n",
+    "         stateful = stateful)(x)\n",
+    "\n",
+    "x = RepeatVector(out_time_steps)(x)\n",
+    "\n",
+    "x = LSTM(units = decoder_lstm_neurons,\n",
+    "         activation = activation,\n",
+    "         use_bias = use_bias,\n",
+    "         recurrent_activation = \"hard_sigmoid\",\n",
+    "         kernel_initializer='glorot_uniform',\n",
+    "         recurrent_initializer='orthogonal',\n",
+    "         bias_initializer='zeros',\n",
+    "         unit_forget_bias = True,\n",
+    "         dropout = dropout,\n",
+    "         recurrent_dropout = recurrent_dropout,\n",
+    "         return_sequences = True,\n",
+    "         go_backwards = False, \n",
+    "         stateful = stateful)(x)\n",
+    "\n",
+    "x = Conv1D(filters = data_dimension, kernel_size = 1)(x)\n",
+    "\n",
+    "if out_time_steps == 1:\n",
+    "    x = Flatten()(x)\n",
+    "\n",
+    "lstm_output = x"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "l3 = LSTM(units = data_dimension,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = out_time_steps > 1,\n",
+    "          go_backwards = False, \n",
+    "          stateful = stateful)(l2)     "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Modelo"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 82,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_21\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_21 (InputLayer)        (None, 6, 256)            0         \n",
+      "_________________________________________________________________\n",
+      "lstm_1 (LSTM)                (None, 256)               525312    \n",
+      "_________________________________________________________________\n",
+      "repeat_vector_1 (RepeatVecto (None, 1, 256)            0         \n",
+      "_________________________________________________________________\n",
+      "lstm_2 (LSTM)                (None, 1, 512)            1574912   \n",
+      "_________________________________________________________________\n",
+      "conv1d_1 (Conv1D)            (None, 1, 256)            131328    \n",
+      "_________________________________________________________________\n",
+      "flatten_1 (Flatten)          (None, 256)               0         \n",
+      "=================================================================\n",
+      "Total params: 2,231,552\n",
+      "Trainable params: 2,231,552\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "lstm = Model(inputs = lstm_input, outputs = lstm_output)\n",
+    "lstm.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Compilación"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 83,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.losses import mean_absolute_error, mean_squared_error"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 84,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm.compile(loss = loss,\n",
+    "             optimizer = lstm_optimizer,\n",
+    "             metrics = ['mean_squared_error', 'mean_absolute_error'])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Preparación de datos para LSTM"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Pasamos el set de entrenamiento y validación por el encoder para lograr el input de la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 85,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(180000, 1, 1, 256)\n",
+      "(20000, 1, 1, 256)\n"
+     ]
+    }
+   ],
+   "source": [
+    "encoded_train = encoder.predict(train_data)\n",
+    "encoded_vali = encoder.predict(vali_data)\n",
+    "\n",
+    "print(encoded_train.shape)\n",
+    "print(encoded_vali.shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos unas cuantas funciones útiles a la hora de preparar el input de la red LSTM:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 86,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from math import floor"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 87,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    scene_count = len(encoded_data) // frames\n",
+    "    sample_count = frames\n",
+    "    scene_iteration_count = floor((sample_count + 1 - (time_steps + out_time_steps)) / batch_size)\n",
+    "    return scene_count, sample_count, scene_iteration_count"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cuenta cuantos batches entran en el set de entrenamiento:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 88,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_batch_samples(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    scene_count, sample_count, scene_iteration_count = generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames)\n",
+    "    batch_samples = scene_count * scene_iteration_count\n",
+    "    return batch_samples"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Función para aplicar el mismo Shuffle a varias arrays, manteniendo el orden:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 89,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def shuffle_in_unison(*np_arrays):\n",
+    "    rng = np.random.get_state()\n",
+    "    for array in np_arrays:\n",
+    "        np.random.set_state(rng)\n",
+    "        np.random.shuffle(array)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Reestructuramos los datos codificados."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve arrays con la forma (batch_size, time_steps, data_dimension) y (batch_size, out_time_steps, data_dimension)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 90,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def restructure_encoded_data(encoded_data, time_steps, out_time_steps, batch_size):\n",
+    "    \n",
+    "    content_shape = encoded_data[0].shape  # (256,)\n",
+    "    final_sample_count = encoded_data.shape[0] - time_steps - out_time_steps  # frames, frames - batch_size, frames - 2 * batch_size, ...\n",
+    "    final_sample_count = min(batch_size, final_sample_count)  # 8\n",
+    "        \n",
+    "    X_data = np.zeros((final_sample_count, time_steps) + content_shape)  # (8, 6, 256)\n",
+    "    y_data = np.zeros((final_sample_count, out_time_steps) + content_shape)  # (8, 1, 256)\n",
+    "        \n",
+    "    curTS = 0\n",
+    "            \n",
+    "    for z in range(time_steps, final_sample_count + time_steps):\n",
+    "        X_data[curTS] = np.array(encoded_data[curTS:z])\n",
+    "        y_data[curTS] = np.array(encoded_data[z:z+out_time_steps])\n",
+    "        curTS += 1\n",
+    "        \n",
+    "    return X_data, y_data"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Generador para entrenar a la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 91,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_scene(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    \n",
+    "    scene_count, sample_count, scene_iteration_count = generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames)\n",
+    "    \n",
+    "    while True:\n",
+    "\n",
+    "        for i in range(scene_count):\n",
+    "            \n",
+    "            scene = encoded_train[(i * frames):((i + 1) * frames)]  # Selecciona escenas individualmente.\n",
+    "     \n",
+    "            for j in range(scene_iteration_count):  # Número de batches que entran en una escena individual.\n",
+    "                start = j * batch_size\n",
+    "                end = sample_count\n",
+    "                \n",
+    "                data = scene[start:end]\n",
+    "                X, Y  = restructure_encoded_data(data, time_steps, out_time_steps, batch_size)\n",
+    "            \n",
+    "                X = X.reshape(*X.shape[0:2], -1)\n",
+    "                Y = np.squeeze(Y.reshape(Y.shape[0], out_time_steps, -1))\n",
+    "                \n",
+    "                shuffle_in_unison(X, Y)\n",
+    "        \n",
+    "                yield X, Y"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 92,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Number of train batch samples per epoch: 10800\n"
+     ]
+    }
+   ],
+   "source": [
+    "train_gen_samples = generator_batch_samples(encoded_train, batch_size, time_steps, out_time_steps, frames)\n",
+    "print (\"Number of train batch samples per epoch: {}\".format(train_gen_samples))\n",
+    "train_generator = generator_scene(encoded_train, batch_size, time_steps, out_time_steps, frames)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 93,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Number of validation batch samples per epoch: 1200\n"
+     ]
+    }
+   ],
+   "source": [
+    "vali_gen_samples = generator_batch_samples(encoded_vali, batch_size, time_steps, out_time_steps, frames)\n",
+    "print (\"Number of validation batch samples per epoch: {}\".format(vali_gen_samples))\n",
+    "vali_generator = generator_scene(encoded_vali, batch_size, time_steps, out_time_steps, frames)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 94,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "training_epochs = epochs_lstm"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 95,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Epoch 1/10\n",
+      "10800/10800 [==============================] - 223s 21ms/step - loss: 0.1394 - mean_squared_error: 0.0386 - mean_absolute_error: 0.1394 - val_loss: 0.1178 - val_mean_squared_error: 0.0305 - val_mean_absolute_error: 0.1271\n",
+      "Epoch 2/10\n",
+      "10800/10800 [==============================] - 226s 21ms/step - loss: 0.1220 - mean_squared_error: 0.0282 - mean_absolute_error: 0.1220 - val_loss: 0.1089 - val_mean_squared_error: 0.0242 - val_mean_absolute_error: 0.1128\n",
+      "Epoch 3/10\n",
+      "10800/10800 [==============================] - 224s 21ms/step - loss: 0.1141 - mean_squared_error: 0.0246 - mean_absolute_error: 0.1141 - val_loss: 0.1039 - val_mean_squared_error: 0.0212 - val_mean_absolute_error: 0.1051\n",
+      "Epoch 4/10\n",
+      "10800/10800 [==============================] - 224s 21ms/step - loss: 0.1093 - mean_squared_error: 0.0226 - mean_absolute_error: 0.1093 - val_loss: 0.1002 - val_mean_squared_error: 0.0191 - val_mean_absolute_error: 0.0995\n",
+      "Epoch 5/10\n",
+      "10800/10800 [==============================] - 225s 21ms/step - loss: 0.1059 - mean_squared_error: 0.0213 - mean_absolute_error: 0.1059 - val_loss: 0.0981 - val_mean_squared_error: 0.0180 - val_mean_absolute_error: 0.0964\n",
+      "Epoch 6/10\n",
+      "10800/10800 [==============================] - 225s 21ms/step - loss: 0.1034 - mean_squared_error: 0.0203 - mean_absolute_error: 0.1034 - val_loss: 0.0957 - val_mean_squared_error: 0.0169 - val_mean_absolute_error: 0.0934\n",
+      "Epoch 7/10\n",
+      "10800/10800 [==============================] - 225s 21ms/step - loss: 0.1014 - mean_squared_error: 0.0195 - mean_absolute_error: 0.1014 - val_loss: 0.0944 - val_mean_squared_error: 0.0162 - val_mean_absolute_error: 0.0914\n",
+      "Epoch 8/10\n",
+      "10800/10800 [==============================] - 225s 21ms/step - loss: 0.0998 - mean_squared_error: 0.0190 - mean_absolute_error: 0.0998 - val_loss: 0.0925 - val_mean_squared_error: 0.0155 - val_mean_absolute_error: 0.0892\n",
+      "Epoch 9/10\n",
+      "10800/10800 [==============================] - 225s 21ms/step - loss: 0.0984 - mean_squared_error: 0.0185 - mean_absolute_error: 0.0984 - val_loss: 0.0919 - val_mean_squared_error: 0.0151 - val_mean_absolute_error: 0.0880\n",
+      "Epoch 10/10\n",
+      "10800/10800 [==============================] - 226s 21ms/step - loss: 0.0972 - mean_squared_error: 0.0181 - mean_absolute_error: 0.0972 - val_loss: 0.0904 - val_mean_squared_error: 0.0146 - val_mean_absolute_error: 0.0864\n"
+     ]
+    }
+   ],
+   "source": [
+    "lstm_train = lstm.fit_generator(generator = train_generator,\n",
+    "                                    steps_per_epoch = train_gen_samples,\n",
+    "                                    epochs = training_epochs,\n",
+    "                                    verbose = 1,\n",
+    "                                    callbacks = None,\n",
+    "                                    validation_data = vali_generator,\n",
+    "                                    validation_steps = vali_gen_samples,\n",
+    "                                    class_weight = None,\n",
+    "                                    workers = 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Plot Errores"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 96,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXxV1b338c8vM2QigSRASEggzKCIITgiiFqsVVtHtNqqrdSp0623T6fnPq33ttfbem9tq62z13mo1daOWhRxZAg4IHMIgYQpCYGMZF7PH/tAAoIcSMLOOef7fr3OKzl773Pyy1G+e2Xttdcy5xwiIhK+ovwuQERE+paCXkQkzCnoRUTCnIJeRCTMKehFRMJcjN8FHGzIkCEuLy/P7zJERELK8uXLq51zGYfa1++CPi8vj+LiYr/LEBEJKWa2+XD71HUjIhLmFPQiImFOQS8iEuaCCnozm2tm68ysxMy+d4j9M81shZm1m9llh9ifYmYVZnZPbxQtIiLBO2LQm1k0cC9wPjARuMrMJh502BbgOuDpw7zNvwNvHnuZIiJyrIJp0RcBJc65UudcK/AscHH3A5xzZc65j4DOg19sZicDWcCrvVCviIgcpWCCPhso7/a8IrDtiMwsCvhv4PYjHDffzIrNrLiqqiqYtxYRkSD19cXYW4C/OecqPu0g59wDzrlC51xhRsYhx/sfUe3eNv771XWUVDYc0+tFRMJVMDdMbQVyuj0fEdgWjFOBM83sFiAJiDOzBufcJy7o9lR7RycPvlXKtj3N/PcVJ/b224uIhKxgWvTLgDFmlm9mccA84OVg3tw590XnXK5zLg+v++bxvgh5gMFJ8VxdNJI/frCV8pqmvvgRIiIh6YhB75xrB24DXgHWAM8751aZ2R1mdhGAmU03swrgcuB+M1vVl0UfzvyZo4g2475FG/348SIi/ZL1t6UECwsLXU/muvnBSyt5obiCN787m6GpCb1YmYhI/2Vmy51zhYfaF3Z3xt581mg6nOPBt0r9LkVEpF8Iu6DPSR/IxVOH89SSzexqaPG7HBER34Vd0APcMquAlvZOHnlnk9+liIj4LiyDviAzic9OHsbj726mdm+b3+WIiPgqLIMe4JbZo6lvaefxd8v8LkVExFdhG/SThqcyZ3wmD7+zicaWdr/LERHxTdgGPcCtZxewp6mNp5ds8bsUERHfhHXQT8tN4/SCwTzwVinNbR1+lyMi4ouwDnqAW2cXUFXfwu+Ly498sIhIGAr7oD911GBOHpnGfYtKaev4xHT5IiJhL+yD3sy4bXYBW/fs5aX3g510U0QkfIR90APMGpfBpOEp/O6NjXR09q+5fURE+lpEBP2+Vv2m6kb+unK73+WIiBxXERH0AJ+ZNJSCzCTufb2ETrXqRSSCREzQR0UZt84ezbqd9SxYs9PvckREjpuICXqAC08YTm76QO5ZWEJ/m4dfRKSvRFTQx0RHcfOs0XxUUctbG6r9LkdE5LiIqKAHuGRaNsNSE7jn9RK/SxEROS4iLujjY6KZP3MUS8tqWFK6y+9yRET6XMQFPcC86bkMSYrjnoVq1YtI+IvIoB8QF81XzhjFWxuq+bB8j9/liIj0qYgMeoBrTskldUCsWvUiEvYiNuiTE2K57rQ8/rl6J2t31PldjohIn4nYoAe4/vQ8EuOiuXfhRr9LERHpMxEd9IMGxnHNqSP560fbKK1q8LscEZE+EdFBD/DVM0YRGx3F795Qq15EwlPEB31GcjxXFeXy0vtbqdjd5Hc5IiK9LuKDHmD+zFGYwf2LSv0uRUSk1ynogeGDBnDptBE8V1xOZV2z3+WIiPQqBX3AzbNG097RyYNvqVUvIuFFQR8wcnAiF504nKeWbKGmsdXvckREek1QQW9mc81snZmVmNn3DrF/ppmtMLN2M7us2/aRge0fmNkqM7upN4vvbbfOLqCptYNH39nkdykiIr3miEFvZtHAvcD5wETgKjObeNBhW4DrgKcP2r4dONU5NxWYAXzPzIb3tOi+MiYrmbmThvK/75ZR19zmdzkiIr0imBZ9EVDinCt1zrUCzwIXdz/AOVfmnPsI6Dxoe6tzriXwND7In+er284uoL65nSfe2+x3KSIivSKY4M0Gyrs9rwhsC4qZ5ZjZR4H3+C/n3LZDHDPfzIrNrLiqqirYt+4Tk7NTmTUug4ff3kRTa7uvtYiI9IY+b2E758qdcycABcCXzSzrEMc84JwrdM4VZmRk9HVJR/T1swuoaWzl6SVb/C5FRKTHggn6rUBOt+cjAtuOSqAl/zFw5tG+9ng7eWQ6p4xK58G3Smlu6/C7HBGRHgkm6JcBY8ws38zigHnAy8G8uZmNMLMBge/TgDOAdcda7PH09bPHsLOuhReWV/hdiohIjxwx6J1z7cBtwCvAGuB559wqM7vDzC4CMLPpZlYBXA7cb2arAi+fACwxsw+BRcBdzrmVffGL9LbTRg9mas4g7lu0kbaOziO/QESknzLnnN81HKCwsNAVFxf7XQYAr63ZyVceK+auy0/kspNH+F2OiMhhmdly51zhofb1++GOfjp7fCYThqXw2zdK6OjsXydEEZFgKeg/hZlx2+wCSqsa+fvH2/0uR0TkmCjoj2Du5KGMykjkntdL6G/dXCIiwVDQH0F0lHHrrALW7qjntTWVfpcjInLUFPRBuGjqcEakDeCehWrVi0joUdAHITY6iptnjeaD8j28U7LL73JERI6Kgj5Il508gqyUeO5ZuMHvUkREjoqCPkjxMdHMnzmaxaU1FJfV+F2OiEjQFPRH4aqiHNIT47hnYYnfpYiIBE1BfxQGxsXwlTPyeWNdFSsrav0uR0QkKAr6o3TtqSNJTojhXrXqRSREKOiPUkpCLNeflsc/Vu1g/c56v8sRETkiBf0xuP70fAbGRfNbtepFJAQo6I9BWmIc15wykpc/3EZZdaPf5YiIfCoF/TH66hn5xERHcd+ijX6XIiLyqRT0xygzJYF503P4w4oKtu3Z63c5IiKHpaDvga+dNRrn4IE3S/0uRUTksBT0PZA9aACXTMvmmaVbqKpv8bscEZFDUtD30M2zCmjr6OSht9WqF5H+SUHfQ/lDEvncCcN58r3N7Glq9bscEZFPUND3gltnF9DY2sGj75T5XYqIyCco6HvBuKHJnDcxi0ff2UR9c5vf5YiIHEBB30tuO7uAuuZ2nly8xe9SREQOoKDvJSeMGMTMsRk89FYpe1s7/C5HRGQ/BX0vum12AbsaW3l2mVr1ItJ/KOh7UVF+OkX56dy/qJTmNrXqRaR/UND3sm/NGcOOumZuenK5unBEpF9Q0Pey0wqGcOclU1i0voovP7KUOo3CERGfKej7wLyiXH497yRWbNnN1Q8upqZRN1KJiH8U9H3kwhOH8+CXCtmws4Er7n+PHbXNfpckIhFKQd+HZo/P5LEbithR28zl97/Lll1NfpckIhEoqKA3s7lmts7MSszse4fYP9PMVphZu5ld1m37VDN7z8xWmdlHZnZlbxYfCk4ZNZinb5xBfXM7l933rtaZFZHj7ohBb2bRwL3A+cBE4Cozm3jQYVuA64CnD9reBHzJOTcJmAvcbWaDelp0qDlhxCCe/9qpAFxx/3t8WL7H54pEJJIE06IvAkqcc6XOuVbgWeDi7gc458qccx8BnQdtX++c2xD4fhtQCWT0SuUhZmxWMi/cdBrJCTF88aElLC7d5XdJIhIhggn6bKC82/OKwLajYmZFQBzwiUVWzWy+mRWbWXFVVdXRvnXIyB08kN9/7TSGpSbw5UeW8vranX6XJCIR4LhcjDWzYcATwPXOuc6D9zvnHnDOFTrnCjMywrvBPzQ1gee+dipjs5KZ//hy/vzhNr9LEpEwF0zQbwVyuj0fEdgWFDNLAf4K/NA5t/joygtP6YlxPH3jDKaNTOMbz77PM0s1N46I9J1ggn4ZMMbM8s0sDpgHvBzMmweOfwl43Dn3wrGXGX6SE2J57PoizhqbwfdfXMmDWmBcRPrIEYPeOdcO3Aa8AqwBnnfOrTKzO8zsIgAzm25mFcDlwP1mtirw8iuAmcB1ZvZB4DG1T36TEDQgLpoHri3kginD+Onf1vA/r67DOed3WSISZqy/BUthYaErLi72u4zjqqPT8YMXV/JccTnXnZbHv31uIlFR5ndZIhJCzGy5c67wUPtijncx8knRUcadl04hKSGGh9/eRENLO3deMoWYaN24LCI9p6DvJ8yMH10wgZSEWH65YD2NLe3cPW8q8THRfpcmIiEufJqMbXvhxflQucbvSo6ZmfHNc8bwb5+byN8/3sFXHyumqbXd77JEJMSFT9A3VkHpInj881Czye9qeuSGM/L5+aUn8E5JNV96WHPai0jPhE/QD8qFL/0ROlrg8YuhLrRvRLpieg6/uWoaH1bs4aoHFlPd0OJ3SSISosIn6AEyJ8A1f4CmGq9l3xja88lccMIwHvxSIRurvDntt9fu9bskEQlB4RX0ANknw9XPwp7N8OQl0Fzrd0U9MmtcJo/fMIOquhYu+917lFU3+l2SiISY8At6gLwz4IonYOfH8PQ8aA3tBT+K8tN5Zv4p7G3r4PL732Ptjjq/SxKREBKeQQ8w9jy45AHY8h48fy20h/a6rZOzU3n+a6cQZXDl/Yv5QHPai0iQwjfoASZfChf+CkoWwIs3QmeH3xX1SEGmN6d96oBYvvjgYt7dWO13SSISAsI76AFO/jKc91NY/Uf48zeg8xOzJIeUnPSB/P6mU8lOG8B1jy5jwWrNaS8iny78gx7gtNvgrP8D7z8Jr/4Q+tn8PkcrKyWB5+afyvihyXztyeX86YOgZ40WkQgUGUEPMOv7MONmWPxbeONOv6vpsbTEOJ766gwKR6bxrec+4Kklm/0uSUT6qcgJejP4zM9g6jWw6E54716/K+qx5IRYHruhiNnjMvnhSx9z36JPrNIoIhJBQQ8QFQUX/RomXgyv/ABWPO53RT2WEBvN/deezIUnDufOv6/lF6+s1Zz2InKAyJu9MioaLnkIWhvhz9+E+GSY9AW/q+qR2Ogo7r5yKknx0dy7cCP1ze38+MJJmtNeRIBIDHqAmDjvhqonL4U/3AhxSTDmXL+r6pHoKONnX5hCckIsD7xZSkNzOz+/7ATNaS8iEdZ1013cQG+qhKyJ8Nw1UPaO3xX1mJnx/fPH851zx/Li+1u56cnlVNVrMjSRSBe5QQ+QkArXvASDRsLTV8LWFX5X1GNmxtfnjOEnF03ijXVVzL7rDe5ftJGW9tC+WUxEjl1kBz1A4mBveuOB6V5XTuVavyvqFV8+LY9Xvj2Tovx0/vPvaznvl2/y6qodulArEoEU9AApw+FLf4LoOG8u+xBfuGSf0RlJPHLddB67oYjY6CjmP7Gcax5ewrod9X6XJiLHkYJ+n/T8wMIlrWGxcEl3Z43N4O/fPJMfXziRj7fWcf6v3uRHf1xJTWNoT/QmIsFR0HcXZguXdBcbHcV1p+fzxu2zuPaUkTyztJxZv1jII29voq0jtOf/EZFPp6A/WPY0uPq5sFm45GBpiXH85OLJ/P2bZ3JiziDu+Mtq5t79JgvXVfpdmoj0EQX9oeSdHlYLlxzK2KxkHr+hiIe/XEing+sfXcZ1jy6lpLLB79JEpJcp6A9n7HlwyYNQvjgsFi45FDNjzoQsXvnWTH742QksL9vN3Lvf5Cd/XkVtU5vf5YlIL1HQf5rJl4TVwiWHExcTxY0zR7HwX2dxeWEOj71bxqy7FvLEe2W0q/9eJOQp6I9k2pe8WS/DZOGSTzMkKZ7/vGQKf/n6mYwbmsz//dMqLvj127y9QStZiYQyBX0wTr0Vzvpe2CxcciQTh6fwzI2ncN8102hqa+eah5fw1ceKKatu9Ls0ETkGkTmp2bGY9T1oqfMWLolPgdnf97uiPmVmzJ08jFnjMnnknU3c+3oJ5/5yEdefns9tZxeQkhDrd4kiEiS16IO1b+GSk8Jn4ZJgJMRGc8usAhbePovPT83mwbdKOfuuN3hm6RY6OsP7LxuRcKGgPxpmcOGvYeLnw2bhkmBlpiTwi8tP5OVbzyBvcCLff3ElF/7mbRaXhs9NZSLhKqigN7O5ZrbOzErM7HuH2D/TzFaYWbuZXXbQvn+Y2R4z+0tvFe2rqGhv2GXBufDyN+DjF/2u6LiaMiKV3990Kr+56iRq97Yx74HF3PLUcsprwu9eA5FwccSgN7No4F7gfGAicJWZTTzosC3AdcDTh3iLXwDX9qzMfiYmDq54HHJP9YZdrn/V74qOKzPjwhOH89p3zuJfzh3LwrVVzPmfRfz8H2tpaGn3uzwROUgwLfoioMQ5V+qcawWeBS7ufoBzrsw59xHwibGHzrnXgPCbLnH/wiWTvBuqyt72u6LjLiE2mm/MGcPrt5/FBVOG8ds3NnL2XW/wwvIKOtV/L9JvBBP02UB5t+cVgW29xszmm1mxmRVXVVX15lv3rQMWLpkXFguXHIthqQP45ZVTefGW0xg2aAC3//5DvvDbd1i+ucbv0kSEfnIx1jn3gHOu0DlXmJGR4Xc5R+eAhUsugco1flfkm2m5abx082n88soT2VHXzKW/e49vPPM+W/fs9bs0kYgWTNBvBXK6PR8R2Cb77F+4JN6b3jhMFi45FlFRxhdOGsHC22fxjbMLeGXVDmb+fCHzHy9m4bpKDckU8UEwQb8MGGNm+WYWB8wDXu7bskLQAQuXXATb3ve7Il8NjIvhX84bx+u3z+LGM0exYsturn90GTN/vpDfvLaBnXXNfpcoEjEsmDVEzeyzwN1ANPCIc+6nZnYHUOyce9nMpgMvAWlAM7DDOTcp8Nq3gPFAErAL+Ipz7pXD/azCwkJXXFzcw1/LR1tXeAuNN1ZB4Q0w5//CgDS/q/Jda3snC9bs5OklW3i7pJroKOPs8ZlcXZTLzLEZREeZ3yWKhDQzW+6cKzzkvv62WHTIBz14i5Us/BksfcAL+XPvgBOvhqh+cUnEd5t3NfLssnJ+X1xOdUMr2YMGcOX0HK4ozGFoaoLf5YmEJAW9X3ashL9+B8qXQM4M+OxdMOwEv6vqN1rbO3ltzU6eXrqFtzZUE2Vw9vgsrp6Rw1ljM9XKFzkKCno/dXbCh8/AP/8N9tZA0XyY/QNvaKbst2VXE88u28LzxRVUN7QwPDWBK6fncsX0EQxLHeB3eSL9noK+P9i7G17/D1j2MCRmwHn/Didc6c2fI/u1dXit/KeWdG/lZ3JVUS6zxqmVL3I4Cvr+ZNv7XnfO1uUw8nSvOyfr4BklBKC8pquVX1XfwrDUhP19+cMHqZUv0p2Cvr/p7IT3n4AFP/Yu3J5yszfffXyy35X1S14rv5Jnlm7hzQ1VGDB73L5WfgYx0brILaKg76+aauC1n8DyxyB5KJz3HzD5UnXnfIrymiaeW1bOc8Xl+1v5VxTmcOV0tfIlsino+7uK5fDXf4HtH0D+TK87J2Oc31X1a20dnby+1mvlL1rvtfJnBVr5s9XKlwikoA8FnR2w/FF47Q5obfTWqZ35XYhP8ruyfq9id6CVv6ycyvoWslLiubIwhyuLcslWK18ihII+lDRWw4L/5y1EnpINc/8TJlyk7pwgtHdr5b+x3psFddbYDK4qyuXs8Zlq5UtYU9CHoi1LvNE5O1fC6LO97pzBo/2uKmRU7G7i+UBf/s46r5V/8dRszpmQxbTcQQp9CTsK+lDV0Q7FD3vj79ub4bRvwJnf8RY9kaC0d3SycF0VzyzdwlsbqmjrcKQNjGX2uEzOmZjFzLEZJMXH+F2mSI8p6ENd/U7vztqPnoXUXDj/Thj3WXXnHKX65jbe2lDNgtU7eX1dJXua2oiLjmLGqHTOnZjFnAlZ6tOXkKWgDxdl78DfbofK1TDmPDj/vyB9lN9VhaT2jk5WbNnDgjU7WbBmJ6VVjQBMGJbCuRMymTMhiynZqUTpTlwJEQr6cNLR5s2KufBn3vdnfBvO+BbEqiXaExurGnhtzU4WrKmkuKyGTgeZyfHMmZDJOROyOL1gCAmx0X6XKXJYCvpwVLcdXv0RfPwCpOXB+T+HsZ/xu6qwsLuxlTfWV7JgdSWL1lfR0NJOQmwUZxRkcO7ETGaPzyQzWdMpS/+ioA9npYvgb/8K1etg3AXecMy0kX5XFTZa2ztZsmkXC1Z7rf19699OzRnEORO8C7rjspIxXS8Rnynow117Kyz+LSz6ObhOmPkdb4ROTLzflYUV5xxrd9R7ob+2kg/L9wAwIm0A50zI4pwJWRTlpxMXo6Gbcvwp6CNFbQW88gNY/SdIH+1drC04R6Nz+khlXTOvra3ktTU7eWtDNS3tnSTHxzBzXAbnTMhk9rhMBg2M87tMiRAK+khT8prXnVOzEbKmeLNjTr4UYtWv3Ff2tnbwTkl1YBRPJdUNLURHGYUj07zW/sQs8ock+l2mhDEFfSRqb4EPn4Ul93nDMRMzoPArMP0rkJTpd3VhrbPT8dHW2kC//k7W7qgHYFRGIudOyOKMMUOYlptGom7Ukl6koI9kzkHpG7D4d7DhFYiOgymXw4ybtH7tcVJe08Rra3by2tpKFpfuoq3DER1lTB6eQlF+OtPzvEdaorp55Ngp6MVTXeK18D94CtqaIO9Mr1tn7FyI0hjx46GhpZ0Vm3ezdFMNSzfV8EHFHlrbOwEYm5W0P/hn5A9maKq62iR4Cno50N7dsOIJ78ar2nJvHP6Mm2DqFyEhxe/qIkpzWwcfVdSyrKyGJZtqWLF5Nw0t7QDkpA+gKG8wRflpFOUPJm/wQA3jlMNS0MuhdbTD2r943TrliyEuGaZdC0XzIT3f7+oiUntHJ2u217Nk0y6WldWwrGw3NY2tAGQkx1OUl76/1T9+aLKmaJD9FPRyZFuXw+L7YNWL3iIo4y/wunVGnq7hmT5yzrGxqoElm2pYFuju2VbbDEBKQgyF3YJ/SnaqxvBHMAW9BK9uGyx7CIof8bp4hp4Ap9wCky/RDVj9RMXupv19/EvLavZPyJYQG8VJOWkU5aczIz+dk3LTGBCnay+RQkEvR6+1CVY+73XrVK2FxEyY/lUovAGSMvyuTrqpqm+hONDHv6yshtXb63AOYqKMKSNS93f3FI5MJ3VgrN/lSh9R0Muxcw5KF8J7v4WSfwaGZ14Bp9wEQ6f4XZ0cQu3eNm9kT5nX6v+oYg9tHQ4zGJeVvL+r58QRg8hJH6ALvGFCQS+9o2q9Nzzzw2e6Dc+8xZs1U8Mz+63mtg7e37KHZYHgX7FlN02tHYDXzz9xeAqTh6cyKdv7OiojiWhd5A05CnrpXU01sOJxb3hm3VZIy/cu3E69GuKT/a5OjqCto5M12+v4eGsdH2+rZdW2OtZur6MlMJ4/ITaKCcNSmLTvBDA8lbFDk4iP0cm8P1PQS9/oaIM1f/b68SuWQnwKTPsSFN3ojc2XkNHe0cnGqkY+3uoF/8fbalmzrY76wJj+mChjTFZyIPxTmJSdyoRhKVpvtx/pcdCb2VzgV0A08JBz7s6D9s8E7gZOAOY5517otu/LwI8CT//DOffYp/0sBX2Iqij2An/1H72pksdf4HXr5J6q4ZkhqrPTsaWmiVXb6li1rZaPt9WxamstuwLj+s0gf3Aik7JTu7X+UzSVg096FPRmFg2sB84FKoBlwFXOudXdjskDUoDbgZf3Bb2ZpQPFQCHggOXAyc653Yf7eQr6EFe7FZY9CMWPQvMeGHYinHwd5J/lrW+r0A9pzjl21rV4wb/VOwGs2la3f0EWgOxBA7r6/YenMDk7layUeF307WOfFvTB/N1VBJQ450oDb/YscDGwP+idc2WBfZ0HvfYzwD+dczWB/f8E5gLPHOXvIKEiNRvO+THM/C589KzXyv/Lt719ycO8G7DyzvAegwsU/CHGzBiamsDQ1ATmTMjav313Y+uBLf9ttSxYs5N97cjBiXGfaPnnpg/Unb3HSTBBnw2Ud3teAcwI8v0P9drsgw8ys/nAfIDc3Nwg31r6tbiB3pj7k6+H6g1Q9hZsfsf7+nGgZy8pKxD8p3sjeIaMVfCHqLTEOM4YM4QzxgzZv62xpZ012+u8Pv9A3/+Db5bS3umlf1J8DBOHpTB2aBJjMpMZk5nEmKxkhiTFqfXfy/rFlRTn3APAA+B13fhcjvQmM8gY6z2mf8Ubl79rI2x+G8rehrJ3vGkXwJszv3uLP2O8gj+EJcZ7UzQU5qXv39bS3sGGnQ37g3/19jr+9ME26pvb9x8zaGDs/tAfk+mdBMZmJZGRrO6fYxVM0G8Fcro9HxHYFoytwKyDXvtGkK+VcGQGQwq8x8nXecFfUxpo7b/jhf/qP3rHDhwMI0/zWvsjT4fMiRCluVxCWXxMNJOzU5mcnbp/m3OOyvoWNuxsYENlPet3NlBSWc9fP9pO7d62/celJMQwNiuZMVlJFATCf0xmsvr/gxDMxdgYvIuxc/CCexlwtXNu1SGO/V/gLwddjF0OTAscsgLvYmzN4X6eLsZGOOdgz+au1n7Z21C7xds3IK2rxT/ydMiarOAPY845qhpaKNnZwPqd9WyobPAeO+vZ3dR1AkhOiNnf8h+T1fWXwLDUhIg6AfTG8MrP4g2fjAYecc791MzuAIqdcy+b2XTgJSANaAZ2OOcmBV57A/CDwFv91Dn36Kf9LAW9fMKeLV2hv/lt2F3mbU8Y5LX494X/0Cm6QzcCOOfY1djK+p31lFQ2sCFwIiipbNg/9BO8awAFmUmMyUxibFYyBVne1+FhegLQDVMSXmorAsEfuMBbU+ptj0+Fkad2XeAdeiJE94vLUHKc7GpoOaDl73UHNVDd0LL/mMS4aO8EsO8aQFYSBRnJDB+UQEx06P6FqKCX8Fa3zQv+fRd4d5V42+OSIfeUrou7w6Yq+CPU7sbWwAmg/oBrAVX1XSeA2GhjRNpARg4eSN7gxAO+jkgb2O/n+lfQS2Sp3xG4uBvo569e522PT4VRZ8GYc2H0HG/Mv0S0PU2tlFQ2UFLZwOaaJjbvaqSs2vvaGJj4DSDKIDttAHmDE8lN73YiGOI9T4j1v8tQQS+RraHSC/3ShbBhAdRv87ZnToKCOV7w55wCMbp1Xzz7rgN0D/6yXd7XzTVN7Ol2MRhgWGpCt3wyVREAAAmxSURBVL8AvJOA90g8bvMBKehF9nEOKtd4c+uXLIDN70FnG8QledM0FMyBgnMgbaTflUo/tqeplc27mijb1XjA1827GqluaD3g2CFJ8eQFQj9v8EBGDgl8TU/s1YVgFPQih9PSAJve9EK/5J/eCB/w7tItOMd7jDwdYhP8rVNCRkNLu9fy33cCqO46Eeyoaz7g2EEDY7tOAIMTmTgsmbmThx3Tz1XQiwTDOe9C7oZAa7/sbehogZgBkH9mV/APHu13pRKi9rZ2UL67ibLqxk/8RbBtz16m5abxws2nHdN7K+hFjkVrk3dRt2SBF/41G73taflev37BOd5du3ED/a1TwkJreye1e9vISI4/ptcr6EV6Q00plLzmBf+mN73lFKPjvZu2Cs7xwl8Ts4lPFPQiva2tGba8F+jbXwBVa73tqbldF3RHnaWlFeW4UdCL9LU95V2hX7oIWushKsZbYatgDhScC1mT1NqXPqOgFzme2lu9NXQ3/NPr6tm50tuePMwL/eyTvS6eIeMgcYjCX3qFgl7ET3XbYWOgb3/j69Bc27UvYZAX+hlju8J/yBhvcXVN0CZHQUEv0l90dkLdVqhe7628Vb3O+1q1Dhoru46LjvOWWhwyJhD+gZPB4AKIS/Svfum3erpmrIj0lqgoGJTjPQrmHLhv726oLgmE/3qoWg87PoY1fwbXbTnm1JxA6/+gvwTUDSSHoaAX6S8GpEHOdO/RXXuLN7RzX/hXr/dOBive84Z47qNuIDkMBb1IfxcTD5kTvEd3nZ3eBG1V6w7sBlr/Krz/ZNdxh+sGypwI0b0314r0Xwp6kVAVFQWpI7zHsXQDxSTA8JMgpwhGFHlfkzKP/+8hfU5BLxKOjtQNVLkaKpZ7w0AX/w46fuXtT8vrCv2cIm8qZy3WEvL0X1AkknTvBpp8qbetrRm2f+iFfvkSb3qHlc97+2ITIXtaIPhnwIjpMDDdv/rlmCjoRSJdbALkzvAefN2bxbO2HMqXeo+KpfD23eACKy4NLugK/ZwiyBivi739nIJeRA5kBoNyvceUy7xtrU2w7f1Aq38prH8FPnjK2xef4t3tu6+7J7sQBgzyr375BAW9iBxZ3EDIO917gNfq373pwFb/m78IXOg1r5WfMz3Q3z/D+ysgqn8vrh3OFPQicvTMIH2U9zhxnretpR62rugK/tUvw4rHvX0JgwJdPTO8E0D2yZrZ8zhS0ItI74hP9qZmHnWW97yz01uxa193T/lSb74fHFiUN6Jn+FRIyvJGCQ1I8y707vt+30Nj/XtMQS8ifSMqyrsxK2MsnHSNt625FiqKu1r96/8BTbsOnOLhYHHJgZNA9xPAQSeEA04Q6d41Ap0g9lPQi8jxk5AamJ+/2w1enZ3e/P17d0NTjff14Ef37bVbYW/g+ZFOEMGeHJKHeovGhOl1BAW9iPgrKso7ASSkejdsBWvfCeJwJ4dPnCAqur4/1AkiPgWyJsPQKV2PjPHe8NMQp6AXkdDU/QRBfvCv6+yElroDTwi15d4UETtWesNGWxsCPyPGmx+oe/gPnRJyN40p6EUkskRFeX34AwZxyBNEZ6c3dHTHyq7Hpjfho2e7jknJ/mT4D8rrt10/CnoRke6iomDwaO8x6fNd2xurDwz/HSu95SL33TEclwxDD+76mdAvun4U9CIiwUgcAqNne4992vZC5ZoDw/+Dp7u6fiwaMg7u+jnhuHf9BBX0ZjYX+BUQDTzknLvzoP3xwOPAycAu4ErnXJmZxQH3A4VAJ/BN59wbvVe+iIiPYgd4k75lT+vadsiun7fgo+e6jjnOXT9HDHoziwbuBc4FKoBlZvayc251t8O+Aux2zhWY2Tzgv4ArgRsBnHNTzCwT+LuZTXfu08ZEiYiEsKPq+nm1awRQXDKMORcuf7TXSwqmRV8ElDjnSgHM7FngYqB70F8M/Djw/QvAPWZmwETgdQDnXKWZ7cFr3S/tlepFREJFMF0/CSl98qODCfpsoLzb8wpgxuGOcc61m1ktMBj4ELjIzJ4BcvC6dnI4KOjNbD4wHyA3N/fofwsRkVB0qK6fPtDXY4EewTsxFAN3A+8CHQcf5Jx7wDlX6JwrzMjI6OOSREQiSzAt+q14rfB9RgS2HeqYCjOLAVKBXc45B3x730Fm9i6wvkcVi4jIUQmmRb8MGGNm+YFRNPOAlw865mXgy4HvLwNed845MxtoZokAZnYu0H7QRVwREeljR2zRB/rcbwNewRte+YhzbpWZ3QEUO+deBh4GnjCzEqAG72QAkAm8YmadeK3+a/vilxARkcMzr3el/ygsLHTFxcV+lyEiElLMbLlzrvBQ+/rnxAwiItJrFPQiImFOQS8iEub6XR+9mVUBm3vwFkOA6l4qJ9TpsziQPo8D6fPoEg6fxUjn3CFvROp3Qd9TZlZ8uAsSkUafxYH0eRxIn0eXcP8s1HUjIhLmFPQiImEuHIP+Ab8L6Ef0WRxIn8eB9Hl0CevPIuz66EVE5EDh2KIXEZFuFPQiImEubILezOaa2TozKzGz7/ldj5/MLMfMFprZajNbZWbf9Lsmv5lZtJm9b2Z/8bsWv5nZIDN7wczWmtkaMzvV75r8ZGbfDvw7+djMnjGzBL9r6m1hEfTd1rU9H2/5wqvMbKK/VfmqHfiOc24icApwa4R/HgDfBNb4XUQ/8SvgH8658cCJRPDnYmbZwDeAQufcZLwZeud9+qtCT1gEPd3WtXXOtQL71rWNSM657c65FYHv6/H+IWf7W5V/zGwEcAHwkN+1+M3MUoGZeFOL45xrdc7t8bcq38UAAwKLJg0EtvlcT68Ll6A/1Lq2ERts3ZlZHnASsMTfSnx1N/BdoNPvQvqBfKAKeDTQlfXQvsWBIpFzbitwF7AF2A7UOude9beq3hcuQS+HYGZJwB+Abznn6vyuxw9m9jmg0jm33O9a+okYYBrwO+fcSUAjELHXtMwsDe+v/3xgOJBoZtf4W1XvC5egD2Zd24hiZrF4If+Uc+5Fv+vx0enARWZWhteld7aZPelvSb6qACqcc/v+wnsBL/gj1TnAJudclXOuDXgROM3nmnpduAR9MOvaRgwzM7w+2DXOuf/xux4/Oee+75wb4ZzLw/v/4nXnXNi12ILlnNsBlJvZuMCmOUAkr+O8BTglsL614X0eYXdx+ohrxoaCw61r63NZfjodb33elWb2QWDbD5xzf/OxJuk/vg48FWgUlQLX+1yPb5xzS8zsBWAF3mi19wnD6RA0BYKISJgLl64bERE5DAW9iEiYU9CLiIQ5Bb2ISJhT0IuIhDkFvYhImFPQi4iEuf8P3jlDGwteUYsAAAAASUVORK5CYII=\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plot_epochs = range(training_epochs)\n",
+    "plot_loss = lstm_train.history[\"loss\"]\n",
+    "plot_val_loss = lstm_train.history[\"val_loss\"]\n",
+    "\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Guardar Modelo LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 97,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_lstm:\n",
+    "    lstm.save(\"lstm_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo LSTM no guardado.\")"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.6.8"
+  },
+  "toc": {
+   "base_numbering": 1,
+   "nav_menu": {},
+   "number_sections": true,
+   "sideBar": true,
+   "skip_h1_title": false,
+   "title_cell": "Table of Contents",
+   "title_sidebar": "Contents",
+   "toc_cell": false,
+   "toc_position": {},
+   "toc_section_display": true,
+   "toc_window_display": false
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/Scripts/Notebooks/Entrenamiento Modelos.ipynb b/Scripts/Notebooks/Entrenamiento Modelos.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..ecd263862755a272c10d6e3e3712348baec7d373
--- /dev/null
+++ b/Scripts/Notebooks/Entrenamiento Modelos.ipynb	
@@ -0,0 +1,1801 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "import sys\n",
+    "import tensorflow as tf\n",
+    "import numpy as np\n",
+    "import scipy.misc\n",
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "sys.path.append(\"../tools\")  # Herramientas propias de MantaFlow\n",
+    "import uniio  # Lectura de ficheros .uni"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Hiperparámetros"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "num_sims = 2000  # num_sims - 1000 escenas. \n",
+    "frames = 200  # Frames por escena.\n",
+    "\n",
+    "epochs_autoencoder = 5\n",
+    "epochs_lstm = 5\n",
+    "epochs_pretraining = 1\n",
+    "\n",
+    "batch_size_autoencoder = 128\n",
+    "batch_size_lstm = 32\n",
+    "\n",
+    "time_steps_lstm = 6\n",
+    "out_time_steps_lstm = 1\n",
+    "\n",
+    "save_autoencoder = True\n",
+    "save_lstm = True"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Datos iniciales"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Inicializamos las seed para funciones random. Al ser inicializadas al mismo número, el resultado no cambiará en cada ejecución.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "np.random.seed(13)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Ruta a los datos de simulación, donde también se guardan los resultados.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "base_path = \"../data\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Carga de datos"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Podemos elegir el número de escenas y los frames de cada una, dependiendo de la configuración de los simuladores clásicos."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Cargamos 1000 escenas, con 200 frames cada una.\n",
+      "Trabajamos con un total de 400000 frames.\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(\"Cargamos {} escenas, con {} frames cada una.\".format(num_sims-1000, frames))\n",
+    "print(\"Trabajamos con un total de {} frames.\".format(num_sims * frames))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cargamos los datos desde los ficheros .uni en arrays de numpy. Los .uni son ficheros propios de MantaFlow, en los que se guarda los resultados de los simuladores clásicos. En este caso cargamos los datos de densidad de humo simulados previamente."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "densities = []\n",
+    "\n",
+    "for sim in range(1000, num_sims):\n",
+    "    \n",
+    "    if os.path.exists(\"%s/simSimple_%04d\" % (base_path, sim)):  # Comprueba la existencia de las carpetas (cada una 100 frames de datos).\n",
+    "        \n",
+    "        for i in range(0, frames):\n",
+    "            \n",
+    "            filename = \"%s/simSimple_%04d/density_%04d.uni\"  # Nombre de cada frame (densidad).\n",
+    "            uni_path = filename % (base_path, sim, i)  # 200 frames por sim, rellena parametros de la ruta.\n",
+    "            header, content = uniio.readUni(uni_path)  # Devuelve un array Numpy [Z, Y, X, C].\n",
+    "            \n",
+    "            h = header[\"dimX\"]\n",
+    "            w = header[\"dimY\"]\n",
+    "            \n",
+    "            arr = content[:, ::-1, :, :]  # Cambia el orden de Y.\n",
+    "            arr = np.reshape(arr, [w, h, 1])  # Deshecha Z.\n",
+    "            \n",
+    "            densities.append(arr)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve los datos de cada frame (canal de grises, 0 a 255) en una lista de Python. En este caso las imagenes son de 64x64 pixels. (64, 64, 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Necesitamos al menos 2 simulaciones para trabajar de manera adecuada."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "load_num = len(densities)\n",
+    "\n",
+    "if load_num < 2 * frames:\n",
+    "    \n",
+    "    print(\"Error - Usa al menos dos simulaciones completas\")\n",
+    "    \n",
+    "    exit(True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos la lista \"densities\" en un array de Numpy.\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del array: (200000, 64, 64, 1)\n",
+      "Dimensiones del array: 4\n",
+      "Número de pixels en total: 819200000\n"
+     ]
+    }
+   ],
+   "source": [
+    "densities = np.reshape(densities, (len(densities), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del array: {}\".format(densities.shape))\n",
+    "print(\"Dimensiones del array: {}\".format(densities.ndim))\n",
+    "print(\"Número de pixels en total: {}\".format(densities.size))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creación del set de validación"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Con el fin de entrenar correctamente a los modelos Deep Learning, separamos los datos de densidad en un set de entrenamiento y otro de validación. Creamos el set de validación de entre los datos de simulación generados, al menos una simulación completa o el 10% de los datos (el que sea mayor de los dos)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Separamos en 180000 frames de entrenamiento y 20000 frames de validación.\n"
+     ]
+    }
+   ],
+   "source": [
+    "vali_set_size = max(200, int(load_num * 0.1))  # Al menos una simu completa o el 10% de los datos.\n",
+    "\n",
+    "vali_data = densities[load_num - vali_set_size : load_num, :]  # \"load_num\" datos del final de \"densities\".\n",
+    "train_data = densities[0 : load_num - vali_set_size, :]  # El resto de datos del principio de \"densities\".\n",
+    "\n",
+    "print(\"Separamos en {} frames de entrenamiento y {} frames de validación.\".format(train_data.shape[0], vali_data.shape[0]))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos los datos de entrenamiento y validación en arrays."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del set de entrenamiento: (180000, 64, 64, 1)\n",
+      "Forma del set de validación: (20000, 64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "train_data = np.reshape(train_data, (len(train_data), 64, 64, 1))\n",
+    "vali_data = np.reshape(vali_data, (len(vali_data), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del set de entrenamiento: {}\".format(train_data.shape))\n",
+    "print(\"Forma del set de validación: {}\".format(vali_data.shape))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Autoencoder 2D"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El modelo que vamos a utilizar es un autoencoder completamente convolucional. Las típicas capas de MaxPooling y UpSampling no aparecen en nuestro modelo, y en su lugar cambiamos las dimensiones mediante un Stride de 2.  "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Estructura Layer-Wise"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def convert_shape(shape):\n",
+    "    out_shape = []\n",
+    "    for i in shape:\n",
+    "        try:\n",
+    "            out_shape.append(int(i))\n",
+    "        except:\n",
+    "            out_shape.append(None)\n",
+    "    return out_shape"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "Using TensorFlow backend.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from keras.models import Model\n",
+    "from keras.layers import Input\n",
+    "\n",
+    "class StagedModel:\n",
+    "    \n",
+    "    def start(self, first_input):\n",
+    "        self.stages = []\n",
+    "        self.current_input = first_input\n",
+    "        return self.current_input\n",
+    "    \n",
+    "    def stage(self, output):\n",
+    "        stage = Model(inputs = self.current_input, outputs = output)\n",
+    "        self.stages.append(stage)\n",
+    "        self.current_input = Input(shape = stage.output_shape[1:])\n",
+    "        return self.current_input\n",
+    "    \n",
+    "    def end(self, output):\n",
+    "        stage = Model(inputs = self.current_input, outputs = output)\n",
+    "        self.stages.append(stage)\n",
+    "    \n",
+    "    @property\n",
+    "    def model(self):\n",
+    "        model_input = []\n",
+    "        for layer_input in self.stages[0].inputs:\n",
+    "            shape = convert_shape(layer_input.shape[1:])\n",
+    "            model_input.append(Input(shape = shape))\n",
+    "        if len(model_input) == 1:\n",
+    "            model_input = model_input[0]\n",
+    "        x = model_input\n",
+    "        for stage in self.stages:\n",
+    "            x = stage(x)\n",
+    "        model_output = x\n",
+    "        model = Model(inputs = model_input, outputs = model_output)\n",
+    "        return model\n",
+    "    \n",
+    "    def __getitem__(self, key):\n",
+    "        if isinstance(key, slice):\n",
+    "            substages = self.stages[key]\n",
+    "            model_input = []\n",
+    "            for layer_input in substages[0].inputs:\n",
+    "                shape = convert_shape(layer_input.shape[1:]) #[int(i) for i in layer_input.shape[1:]]\n",
+    "                model_input.append(Input(shape=shape))\n",
+    "            if len(model_input) == 1:\n",
+    "                model_input = model_input[0]\n",
+    "            x = model_input\n",
+    "            for index, stage in enumerate(substages):\n",
+    "                x = stage(x)\n",
+    "            model_output = x\n",
+    "            model = Model(inputs = model_input, outputs = model_output)\n",
+    "            return model\n",
+    "        else:\n",
+    "            return self.stages[key]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creacion de las capas del modelo"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Parametros de inicialización"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Regula la cantidad de filtros convolucionales:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "feature_multiplier = 8 "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Tamaño del kernel de la primera capa del encoder y la última del decoder (kernels exteriores):"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "surface_kernel_size = 4  # Matriz 4x4"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Tamaño de los kernels interiores:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "kernel_size = 2  # Matriz 2x2"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El valor de la capa Dropout:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "dropout = 0.0"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "La función que utilizamos para inicializar los parametros de las capas:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "init_func = \"glorot_normal\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "En la primera capa debemos definir las dimensiones del input esperado:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "input_shape = (train_data.shape[1], \n",
+    "               train_data.shape[2], \n",
+    "               train_data.shape[3])\n",
+    "\n",
+    "print(input_shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.layers import Input, Dropout, Conv2D, Conv2DTranspose, BatchNormalization, Flatten, Activation, Reshape\n",
+    "from keras.layers.advanced_activations import LeakyReLU"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Capas del Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 21,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Input\n",
+    "encoder_input = Input(shape = input_shape)\n",
+    "\n",
+    "encoder_stages = StagedModel()\n",
+    "encoder_stages.start(encoder_input)\n",
+    "\n",
+    "### Conv 1 ###\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "l0 = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 1,\n",
+    "            padding = \"same\",\n",
+    "            kernel_initializer = init_func)(encoder_input)\n",
+    "\n",
+    "l0_act = LeakyReLU(alpha = 0.2)(l0)\n",
+    "\n",
+    "l0_batch = BatchNormalization()(l0_act)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "l1 = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func,\n",
+    "            padding = \"same\")(l0_batch)\n",
+    "\n",
+    "l1_act = LeakyReLU(alpha = 0.2)(l1)\n",
+    "\n",
+    "l1_batch = BatchNormalization()(l1_act)\n",
+    "\n",
+    "# Layer 2 #\n",
+    "\n",
+    "l2 = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l1_batch)\n",
+    "\n",
+    "l2_act = LeakyReLU(alpha = 0.2)(l2)\n",
+    "\n",
+    "l2_batch = BatchNormalization()(l2_act)\n",
+    "\n",
+    "l2_drop = Dropout(dropout)(l2_batch)\n",
+    "\n",
+    "conv1_stage = encoder_stages.stage(l2_drop)\n",
+    "\n",
+    "### Conv 2 ###\n",
+    "\n",
+    "# Layer 3 #\n",
+    "\n",
+    "l3 = Conv2D(filters = feature_multiplier * 2, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(conv1_stage)\n",
+    "\n",
+    "l3_act = LeakyReLU(alpha = 0.2)(l3)\n",
+    "\n",
+    "l3_batch = BatchNormalization()(l3_act)\n",
+    "\n",
+    "# Layer 4 #\n",
+    "\n",
+    "l4 = Conv2D(filters = feature_multiplier * 2, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l3_batch)\n",
+    "\n",
+    "l4_act = LeakyReLU(alpha = 0.2)(l4)\n",
+    "\n",
+    "l4_batch = BatchNormalization()(l4_act)\n",
+    "\n",
+    "l4_drop = Dropout(dropout)(l4_batch)\n",
+    "\n",
+    "conv2_stage = encoder_stages.stage(l4_drop)\n",
+    "\n",
+    "### Conv 3 ###\n",
+    "\n",
+    "# Layer 5 #\n",
+    "\n",
+    "l5 = Conv2D(filters = feature_multiplier * 4, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(conv2_stage)\n",
+    "\n",
+    "l5_act = LeakyReLU(alpha = 0.2)(l5)\n",
+    "\n",
+    "l5_batch = BatchNormalization()(l5_act)\n",
+    "\n",
+    "# Layer 6 #\n",
+    "\n",
+    "l6 = Conv2D(filters = feature_multiplier * 4, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l5_batch)\n",
+    "\n",
+    "l6_act = LeakyReLU(alpha = 0.2)(l6)\n",
+    "\n",
+    "l6_batch = BatchNormalization()(l6_act)\n",
+    "\n",
+    "l6_drop = Dropout(dropout)(l6_batch)\n",
+    "\n",
+    "conv3_stage = encoder_stages.stage(l6_drop)\n",
+    "\n",
+    "### Conv 4 ###\n",
+    "\n",
+    "# Layer 7 #\n",
+    "\n",
+    "l7 = Conv2D(filters = feature_multiplier * 8, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(conv3_stage)\n",
+    "\n",
+    "l7_act = LeakyReLU(alpha = 0.2)(l7)\n",
+    "\n",
+    "l7_batch = BatchNormalization()(l7_act)\n",
+    "\n",
+    "# Layer 8 #\n",
+    "\n",
+    "l8 = Conv2D(filters = feature_multiplier * 8, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l7_batch)\n",
+    "\n",
+    "l8_act = LeakyReLU(alpha = 0.2)(l8)\n",
+    "\n",
+    "l8_batch = BatchNormalization()(l8_act)\n",
+    "\n",
+    "l8_drop = Dropout(dropout)(l8_batch)\n",
+    "\n",
+    "conv4_stage = encoder_stages.stage(l8_drop)\n",
+    "\n",
+    "### Conv 5 ###\n",
+    "\n",
+    "# Layer 9 #\n",
+    "\n",
+    "l9 = Conv2D(filters = feature_multiplier * 16, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(conv4_stage)\n",
+    "\n",
+    "l9_act = LeakyReLU(alpha = 0.2)(l9)\n",
+    "\n",
+    "l9_batch = BatchNormalization()(l9_act)\n",
+    "\n",
+    "l9_drop = Dropout(dropout)(l9_batch)\n",
+    "\n",
+    "conv5_stage = encoder_stages.stage(l9_drop)\n",
+    "\n",
+    "# Layer 10 #\n",
+    "\n",
+    "l10 = Conv2D(filters = feature_multiplier * 32, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(conv5_stage)\n",
+    "\n",
+    "l10_act = LeakyReLU(alpha = 0.2)(l10)\n",
+    "\n",
+    "l10_batch = BatchNormalization()(l10_act)\n",
+    "\n",
+    "### Output Encoder ###\n",
+    "\n",
+    "encoder_stages.end(l10_batch)\n",
+    "\n",
+    "encoder_output = l10_batch"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Capas del Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 22,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "decoder_input = Input(shape = (None, None, feature_multiplier * 32))\n",
+    "\n",
+    "decoder_stages = StagedModel()\n",
+    "decoder_stages.start(decoder_input)\n",
+    "\n",
+    "### Deconv 1 ###\n",
+    "\n",
+    "# Layer 11 #\n",
+    "\n",
+    "l11 = Conv2DTranspose(filters = feature_multiplier * 16, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(decoder_input)\n",
+    "\n",
+    "l11_act = LeakyReLU(alpha = 0.2)(l11)\n",
+    "\n",
+    "l11_batch = BatchNormalization()(l11_act)\n",
+    "\n",
+    "l11_drop = Dropout(dropout)(l11_batch)\n",
+    "\n",
+    "deconv1_stage = decoder_stages.stage(l11_drop)\n",
+    "\n",
+    "### Deconv 2 ###\n",
+    "\n",
+    "# Layer 12  #\n",
+    "\n",
+    "l12 = Conv2DTranspose(filters = feature_multiplier * 8, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func, \n",
+    "                     padding = \"same\")(deconv1_stage)\n",
+    "\n",
+    "l12_act = LeakyReLU(alpha = 0.2)(l12)\n",
+    "\n",
+    "l12_batch = BatchNormalization()(l12_act)\n",
+    "\n",
+    "l12_drop = Dropout(dropout)(l12_batch)\n",
+    "\n",
+    "# Layer 13 #\n",
+    "\n",
+    "l13 = Conv2DTranspose(filters = feature_multiplier * 8, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 1,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l12_drop)\n",
+    "\n",
+    "l13_act = LeakyReLU(alpha = 0.2)(l13)\n",
+    "\n",
+    "l13_batch = BatchNormalization()(l13_act)\n",
+    "\n",
+    "deconv2_stage = decoder_stages.stage(l13_batch)\n",
+    "\n",
+    "### Deconv 3 ###\n",
+    "\n",
+    "# Layer 14 #\n",
+    "\n",
+    "l14 = Conv2DTranspose(filters = feature_multiplier * 4, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(deconv2_stage)\n",
+    "\n",
+    "l14_act = LeakyReLU(alpha = 0.2)(l14)\n",
+    "\n",
+    "l14_batch = BatchNormalization()(l14_act)\n",
+    "\n",
+    "# Layer 15 #\n",
+    "\n",
+    "l15 = Conv2DTranspose(filters = feature_multiplier * 4, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 1,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l14_batch)\n",
+    "\n",
+    "l15_act = LeakyReLU(alpha = 0.2)(l15)\n",
+    "\n",
+    "l15_batch = BatchNormalization()(l15_act)\n",
+    "\n",
+    "l15_drop = Dropout(dropout)(l15_batch)\n",
+    "\n",
+    "deconv3_stage = decoder_stages.stage(l15_drop)\n",
+    "\n",
+    "### Deconv 4 ###\n",
+    "\n",
+    "# Layer 16 #\n",
+    "\n",
+    "l16 = Conv2DTranspose(filters = feature_multiplier * 2, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(deconv3_stage)\n",
+    "\n",
+    "l16_act = LeakyReLU(alpha = 0.2)(l16)\n",
+    "\n",
+    "l16_batch = BatchNormalization()(l16_act)\n",
+    "\n",
+    "# Layer 17 #\n",
+    "\n",
+    "l17 = Conv2DTranspose(filters = feature_multiplier * 2, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 1,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l16_batch)\n",
+    "\n",
+    "l17_act = LeakyReLU(alpha = 0.2)(l17)\n",
+    "\n",
+    "l17_batch = BatchNormalization()(l17_act)\n",
+    "\n",
+    "l17_drop = Dropout(dropout)(l17_batch)\n",
+    "\n",
+    "deconv4_stage = decoder_stages.stage(l17_drop)\n",
+    "\n",
+    "### Deconv 5 ###\n",
+    "\n",
+    "# Layer 18 #\n",
+    "\n",
+    "l18 = Conv2DTranspose(filters = feature_multiplier * 1, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(deconv4_stage)\n",
+    "\n",
+    "l18_act = LeakyReLU(alpha = 0.2)(l18)\n",
+    "\n",
+    "l18_batch = BatchNormalization()(l18_act) \n",
+    "\n",
+    "# Layer 19 #\n",
+    "\n",
+    "l19 = Conv2DTranspose(filters = feature_multiplier * 1, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 1,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l18_batch)\n",
+    "\n",
+    "l19_act = LeakyReLU(alpha = 0.2)(l19)\n",
+    "\n",
+    "l19_batch = BatchNormalization()(l19_act)\n",
+    "\n",
+    "l19_drop = Dropout(dropout)(l19_batch)\n",
+    "\n",
+    "deconv5_stage = decoder_stages.stage(l19_drop)\n",
+    "\n",
+    "### Output Decoder ###\n",
+    "\n",
+    "l20 = Conv2DTranspose(input_shape[-1],\n",
+    "                                 kernel_size = surface_kernel_size,\n",
+    "                                 strides = 2,\n",
+    "                                 padding = \"same\",\n",
+    "                                 kernel_initializer = init_func)(deconv5_stage)\n",
+    "\n",
+    "decoder_output = Activation(\"linear\")(l20)\n",
+    "\n",
+    "decoder_stages.end(decoder_output)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Ensamblando el Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Optimizador"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Importamos el optimizador Adam:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 23,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import Adam"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos los parametros del optimizador:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 24,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "adam_learning_rate = 0.00015  # El learning rate de Adam (tamaño step)\n",
+    "adam_epsilon = 1e-8  # Previene problemas de división por 0.\n",
+    "adam_lr_decay = 1e-05  # Learning rate decay"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos el optimizador:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 25,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "optimizer = Adam(lr = adam_learning_rate, \n",
+    "                 epsilon = adam_epsilon, \n",
+    "                 decay = adam_lr_decay)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Modelo por capas"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 26,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "max_enc_stages = len(encoder_stages.stages)\n",
+    "max_dec_stages = len(decoder_stages.stages)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stages = []\n",
+    "\n",
+    "for stage_num in range(1, max_enc_stages + 1):\n",
+    "    stage_input = Input(shape = input_shape)\n",
+    "    x = encoder_stages[0:stage_num](stage_input)\n",
+    "    stage_output = decoder_stages[max_dec_stages-stage_num:max_dec_stages](x)\n",
+    "    stage_model = Model(inputs = stage_input, outputs = stage_output)\n",
+    "    stage_model.compile(loss = \"mse\", metrics = [\"mae\"], optimizer = optimizer)\n",
+    "    stages.append(stage_model)    "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "## Autoencoder ##\n",
+    "\n",
+    "autoencoder_input = Input(shape = input_shape)\n",
+    "h = encoder_stages[0:max_enc_stages](autoencoder_input)\n",
+    "autoencoder_output = decoder_stages[0:max_dec_stages](h)\n",
+    "\n",
+    "autoencoder_greedy = Model(inputs = autoencoder_input, outputs = autoencoder_output)\n",
+    "autoencoder_greedy.compile(loss = \"mse\", metrics=[\"mae\"], optimizer = optimizer)\n",
+    "\n",
+    "## Encoder ##\n",
+    "\n",
+    "encoder_input = Input(shape = input_shape)\n",
+    "h = encoder_stages[0:max_enc_stages](encoder_input)\n",
+    "encoder_output = h\n",
+    "\n",
+    "encoder_greedy = Model(inputs = encoder_input, outputs = encoder_output)\n",
+    "\n",
+    "## Decoder ##\n",
+    "\n",
+    "decoder_greedy = decoder_stages.model"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Pre-entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "pre_epochs = 1\n",
+    "pre_batch_size = 128"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 396s 2ms/step - loss: 0.0224 - mae: 0.0631 - val_loss: 0.0021 - val_mae: 0.0301\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 481s 3ms/step - loss: 0.0070 - mae: 0.0379 - val_loss: 0.0013 - val_mae: 0.0204\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 520s 3ms/step - loss: 0.0021 - mae: 0.0212 - val_loss: 9.0002e-04 - val_mae: 0.0144\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 543s 3ms/step - loss: 9.1907e-04 - mae: 0.0127 - val_loss: 6.3946e-04 - val_mae: 0.0111\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "180000/180000 [==============================] - 557s 3ms/step - loss: 9.6276e-04 - mae: 0.0124 - val_loss: 7.3397e-04 - val_mae: 0.0112\n",
+      "Train on 180000 samples, validate on 20000 samples\n",
+      "Epoch 1/1\n",
+      "158848/180000 [=========================>....] - ETA: 1:04 - loss: 9.7541e-04 - mae: 0.0126"
+     ]
+    }
+   ],
+   "source": [
+    "for index, stage in enumerate(stages):\n",
+    "    autoencoder_layer = stage.fit(train_data, train_data,\n",
+    "                                  epochs = pre_epochs,\n",
+    "                                  batch_size = pre_batch_size,\n",
+    "                                  validation_data = (vali_data, vali_data),\n",
+    "                                  shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Parametros del entrenamiento:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "training_epochs = epochs_autoencoder  # Número de vueltas completas al set de entrenamiento.\n",
+    "batch_size = batch_size_autoencoder  # Número de ejemplos antes de calcular el error de la función de coste."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Entrenamos el modelo autoencoder:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder_train = autoencoder_greedy.fit(train_data, train_data, \n",
+    "                                    epochs = training_epochs,\n",
+    "                                    batch_size = batch_size,\n",
+    "                                    verbose = 1,\n",
+    "                                    validation_data = (vali_data, vali_data),\n",
+    "                                    shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Los datos del entrenamiento se guardan en \"autoencoder_train\"."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Plot Errores"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Plot de Loss (MSE y MAE) y Validation Loss (MSE y MAE) respecto a las epochs."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plot_epochs = range(training_epochs)\n",
+    "plot_loss = autoencoder_train.history[\"loss\"]\n",
+    "plot_val_loss = autoencoder_train.history[\"val_loss\"]\n",
+    "plot_mae = autoencoder_train.history[\"mae\"]\n",
+    "plot_val_mae = autoencoder_train.history[\"val_mae\"]\n",
+    "\n",
+    "plt.figure(figsize = (15, 5))\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 1)\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 2)\n",
+    "plt.plot(plot_epochs, plot_mae, plot_val_mae)\n",
+    "\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar Modelo Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import h5py"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    autoencoder_greedy.save(\"autoencoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo del autoencoder con sus pesos / parametros."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Encoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Utilizamos las capas iniciales entrenadas por el modelo autoencoder para el modelo Encoder."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar Modelo Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    encoder_greedy.save(\"encoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo encoder con sus pesos."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Decoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Guardar modelo Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_autoencoder:\n",
+    "    decoder_greedy.save(\"decoder_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos el modelo decoder con sus pesos."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# LSTM"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El output del modelo encoder sirve como input para la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Optimizador"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import RMSprop"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm_optimizer = RMSprop(lr = 0.000126, \n",
+    "                         rho = 0.9, \n",
+    "                         epsilon = 1e-08,\n",
+    "                         decay = 0.000334)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parametros LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "time_steps = time_steps_lstm\n",
+    "out_time_steps = out_time_steps_lstm\n",
+    "data_dimension = 256\n",
+    "\n",
+    "encoder_lstm_neurons = 256\n",
+    "decoder_lstm_neurons = 512\n",
+    "attention_neurons = 400\n",
+    "\n",
+    "activation = \"tanh\"\n",
+    "loss = \"mae\"\n",
+    "batch_size = batch_size_lstm\n",
+    "\n",
+    "dropout = 0.0132\n",
+    "recurrent_dropout = 0.385\n",
+    "use_bias = True\n",
+    "stateful = False"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Capas LSTM "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Utilizamos 3 capas LSTM. RepeatVector repite el input para la segunda capa de LSTM out_time_steps veces."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.layers import RepeatVector, LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "input_frames = Input(shape = (time_steps, data_dimension))\n",
+    "\n",
+    "l0 = LSTM(units = encoder_lstm_neurons,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = False,\n",
+    "          go_backwards = True, \n",
+    "          stateful = stateful)(input_frames)\n",
+    "\n",
+    "l1 = RepeatVector(out_time_steps)(l0)\n",
+    "\n",
+    "l2 = LSTM(units = decoder_lstm_neurons,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = True,\n",
+    "          go_backwards = False, \n",
+    "          stateful = stateful)(l1)\n",
+    "\n",
+    "l3 = LSTM(units = data_dimension,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = out_time_steps > 1,\n",
+    "          go_backwards = False, \n",
+    "          stateful = stateful)(l2)     \n",
+    "\n",
+    "output_frames = l3"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Modelo"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm = Model(inputs = input_frames, outputs = output_frames)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Compilación"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.losses import mean_absolute_error, mean_squared_error"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm.compile(loss = loss,\n",
+    "             optimizer = lstm_optimizer,\n",
+    "             metrics = ['mean_squared_error', 'mean_absolute_error'])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Preparación de datos para LSTM"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Pasamos el set de entrenamiento y validación por el encoder para lograr el input de la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "encoded_train = encoder_greedy.predict(train_data)\n",
+    "encoded_vali = encoder_greedy.predict(vali_data)\n",
+    "\n",
+    "print(encoded_train.shape)\n",
+    "print(encoded_vali.shape)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos unas cuantas funciones útiles a la hora de preparar el input de la red LSTM:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from math import floor"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    scene_count = len(encoded_data) // frames\n",
+    "    sample_count = frames\n",
+    "    scene_iteration_count = floor((sample_count + 1 - (time_steps + out_time_steps)) / batch_size)\n",
+    "    return scene_count, sample_count, scene_iteration_count"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cuenta cuantos batches entran en el set de entrenamiento:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_batch_samples(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    scene_count, sample_count, scene_iteration_count = generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames)\n",
+    "    batch_samples = scene_count * scene_iteration_count\n",
+    "    return batch_samples"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Función para aplicar el mismo Shuffle a varias arrays, manteniendo el orden:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def shuffle_in_unison(*np_arrays):\n",
+    "    rng = np.random.get_state()\n",
+    "    for array in np_arrays:\n",
+    "        np.random.set_state(rng)\n",
+    "        np.random.shuffle(array)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Reestructuramos los datos codificados."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve arrays con la forma (batch_size, time_steps, data_dimension) y (batch_size, out_time_steps, data_dimension)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def restructure_encoded_data(encoded_data, time_steps, out_time_steps, batch_size):\n",
+    "    \n",
+    "    content_shape = encoded_data[0].shape  # (256,)\n",
+    "    final_sample_count = encoded_data.shape[0] - time_steps - out_time_steps  # frames, frames - batch_size, frames - 2 * batch_size, ...\n",
+    "    final_sample_count = min(batch_size, final_sample_count)  # 8\n",
+    "        \n",
+    "    X_data = np.zeros((final_sample_count, time_steps) + content_shape)  # (8, 6, 256)\n",
+    "    y_data = np.zeros((final_sample_count, out_time_steps) + content_shape)  # (8, 1, 256)\n",
+    "        \n",
+    "    curTS = 0\n",
+    "            \n",
+    "    for z in range(time_steps, final_sample_count + time_steps):\n",
+    "        X_data[curTS] = np.array(encoded_data[curTS:z])\n",
+    "        y_data[curTS] = np.array(encoded_data[z:z+out_time_steps])\n",
+    "        curTS += 1\n",
+    "        \n",
+    "    return X_data, y_data"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Generador para entrenar a la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_scene(encoded_data, batch_size, time_steps, out_time_steps, frames):\n",
+    "    \n",
+    "    scene_count, sample_count, scene_iteration_count = generator_count(encoded_data, batch_size, time_steps, out_time_steps, frames)\n",
+    "    \n",
+    "    while True:\n",
+    "\n",
+    "        for i in range(scene_count):\n",
+    "            \n",
+    "            scene = encoded_train[(i * frames):((i + 1) * frames)]  # Selecciona escenas individualmente.\n",
+    "     \n",
+    "            for j in range(scene_iteration_count):  # Número de batches que entran en una escena individual.\n",
+    "                start = j * batch_size\n",
+    "                end = sample_count\n",
+    "                \n",
+    "                data = scene[start:end]\n",
+    "                X, Y  = restructure_encoded_data(data, time_steps, out_time_steps, batch_size)\n",
+    "            \n",
+    "                X = X.reshape(*X.shape[0:2], -1)\n",
+    "                Y = np.squeeze(Y.reshape(Y.shape[0], out_time_steps, -1))\n",
+    "                \n",
+    "                shuffle_in_unison(X, Y)\n",
+    "        \n",
+    "                yield X, Y"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "train_gen_samples = generator_batch_samples(encoded_train, batch_size, time_steps, out_time_steps, frames)\n",
+    "print (\"Number of train batch samples per epoch: {}\".format(train_gen_samples))\n",
+    "train_generator = generator_scene(encoded_train, batch_size, time_steps, out_time_steps, frames)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "vali_gen_samples = generator_batch_samples(encoded_vali, batch_size, time_steps, out_time_steps, frames)\n",
+    "print (\"Number of validation batch samples per epoch: {}\".format(vali_gen_samples))\n",
+    "vali_generator = generator_scene(encoded_vali, batch_size, time_steps, out_time_steps, frames)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "training_epochs = epochs_lstm"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm_train = lstm.fit_generator(generator = train_generator,\n",
+    "                                    steps_per_epoch = train_gen_samples,\n",
+    "                                    epochs = training_epochs,\n",
+    "                                    verbose = 1,\n",
+    "                                    callbacks = None,\n",
+    "                                    validation_data = vali_generator,\n",
+    "                                    validation_steps = vali_gen_samples,\n",
+    "                                    class_weight = None,\n",
+    "                                    workers = 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Plot Errores"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plot_epochs = range(training_epochs)\n",
+    "plot_loss = lstm_train.history[\"loss\"]\n",
+    "plot_val_loss = lstm_train.history[\"val_loss\"]\n",
+    "\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Guardar Modelo LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if save_lstm:\n",
+    "    lstm.save(\"lstm_model.h5\")\n",
+    "    \n",
+    "else:\n",
+    "    print(\"Modelo no guardado.\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.6.8"
+  },
+  "toc": {
+   "base_numbering": 1,
+   "nav_menu": {},
+   "number_sections": true,
+   "sideBar": true,
+   "skip_h1_title": false,
+   "title_cell": "Table of Contents",
+   "title_sidebar": "Contents",
+   "toc_cell": false,
+   "toc_position": {},
+   "toc_section_display": true,
+   "toc_window_display": false
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/Scripts/Notebooks/Modelo Total.ipynb b/Scripts/Notebooks/Modelo Total.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..4e77533a6db20cf174f6b8a0b643e80ae830fce4
--- /dev/null
+++ b/Scripts/Notebooks/Modelo Total.ipynb	
@@ -0,0 +1,2242 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Datos iniciales"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Importamos las bibliotecas necesarias"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import time\n",
+    "import os\n",
+    "import shutil\n",
+    "import sys\n",
+    "import math\n",
+    "import random\n",
+    "import tensorflow as tf\n",
+    "import numpy as np\n",
+    "import scipy.misc\n",
+    "import matplotlib.pyplot as plt\n",
+    "sys.path.append(\"../tools\")  # Herramientas propias de MantaFlow\n",
+    "import uniio  # Lectura de ficheros .uni"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Inicializamos las seed para funciones random. Al ser inicializadas al mismo número, el resultado no cambiará en cada ejecución."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "np.random.seed(13)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Ruta a los datos de simulación, donde también se guardan los resultados."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "base_path = \"../data\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Carga de datos de simulación"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cargamos los datos desde los ficheros .uni en arrays de numpy. Los .uni son ficheros propios de MantaFlow, en los que se guarda los resultados de los simuladores clásicos."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "densities = []  # Creamos la lista \"densities\".\n",
+    "frames = 200\n",
+    "sims = 1500"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "for sim in range(1000, sims):\n",
+    "    \n",
+    "    if os.path.exists(\"%s/simSimple_%04d\" % (base_path, sim)):  # Comprueba la existencia de las carpetas (cada una 100 frames de datos).\n",
+    "        \n",
+    "        for i in range(0, frames):\n",
+    "            \n",
+    "            filename = \"%s/simSimple_%04d/density_%04d.uni\"  # Nombre de cada frame (densidad).\n",
+    "            uni_path = filename % (base_path, sim, i)  # 200 frames por sim, rellena parametros de la ruta.\n",
+    "            header, content = uniio.readUni(uni_path)  # Devuelve un array Numpy [Z, Y, X, C].\n",
+    "            \n",
+    "            h = header[\"dimX\"]\n",
+    "            w = header[\"dimY\"]\n",
+    "            \n",
+    "            arr = content[:, ::-1, :, :]  # Cambia el orden de Y.\n",
+    "            arr = np.reshape(arr, [w, h, 1])  # Deshecha Z.\n",
+    "            \n",
+    "            densities.append(arr)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve los datos de cada frame (canal de grises, 0 a 255) en una lista ( metodo .append() ). En este caso las imagenes son de 64x64 pixels."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Necesitamos al menos 2 simulaciones para trabajar de manera adecuada."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "load_num = len(densities)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if load_num < 2 * frames:\n",
+    "    \n",
+    "    print(\"Error - usa al menos dos simulaciones completas\")\n",
+    "    \n",
+    "    exit(True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos la lista \"densities\" en un array de Numpy."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del array: (100000, 64, 64, 1)\n",
+      "Dimensiones del array: 4\n",
+      "Número de pixels en total: 409600000\n"
+     ]
+    }
+   ],
+   "source": [
+    "densities = np.reshape(densities, (len(densities), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del array: {}\".format(densities.shape))\n",
+    "print(\"Dimensiones del array: {}\".format(densities.ndim))\n",
+    "print(\"Número de pixels en total: {}\".format(densities.size))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creación del set de validación"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Creamos el set de validación de entre los datos de simulación generados, al menos una simulación completa o el 10% de los datos (el que sea mayor de los dos)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Read uni files, total data (100000, 64, 64, 1)\n",
+      "Split into 100000 training and 10000 validation samples\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(\"Read uni files, total data \" + format(densities.shape))\n",
+    "\n",
+    "vali_size = max(200, int(load_num * 0.1)) # Al menos una simu completa.\n",
+    "vali_data = densities[load_num - vali_size : load_num, :]\n",
+    "train_data = densities[0 : load_num - vali_size, :]\n",
+    "\n",
+    "print(\"Split into {} training and {} validation samples\".format(densities.shape[0], vali_data.shape[0]))\n",
+    "\n",
+    "load_num = densities.shape[0]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos los datos de entrenamiento y validación en arrays."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del set de entrenamiento: (90000, 64, 64, 1)\n",
+      "Forma del set de validación: (10000, 64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "train_data = np.reshape(train_data, (len(train_data), 64, 64, 1))\n",
+    "vali_data = np.reshape(vali_data, (len(vali_data), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del set de entrenamiento: {}\".format(train_data.shape))\n",
+    "print(\"Forma del set de validación: {}\".format(vali_data.shape))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Modelo Autoencoder mediante Keras"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Creacion de las capas del modelo"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El modelo que vamos a utilizar es un autoencoder completamente convolucional. Las típicas capas de MaxPooling y UpSampling no aparecen en nuestro modelo, y en su lugar cambiamos las dimensiones mediante un Stride de 2.  "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Definición de parametros de inicialización"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Regula la cantidad de filtros convolucionales:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "feature_multiplier = 8 "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Tamaño del kernel de la primera capa del encoder y la última del decoder (kernels exteriores):"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "surface_kernel_size = 4  # Matriz 4x4"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Tamaño de los kernels interiores:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "kernel_size = 2  # Matriz 2x2"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "El valor de la capa Dropout:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "dropout = 0.0"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "La función que utilizamos para inicializar los parametros de las capas:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "init_func = \"glorot_normal\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "En la primera capa debemos definir las dimensiones del input esperado."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "input_shape = (train_data.shape[1], \n",
+    "               train_data.shape[2], \n",
+    "               train_data.shape[3])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Importamos librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "Using TensorFlow backend.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from keras.models import Model\n",
+    "from keras.layers import Input, Activation, Dropout, Conv2D, Conv2DTranspose, BatchNormalization\n",
+    "from keras.layers.advanced_activations import LeakyReLU"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Capas del Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Input\n",
+    "encoder_input = Input(shape = input_shape)\n",
+    "\n",
+    "### Conv 1 ###\n",
+    "\n",
+    "# Layer 0 #\n",
+    "\n",
+    "l0 = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 1,\n",
+    "            padding = \"same\",\n",
+    "            kernel_initializer = init_func)(encoder_input)\n",
+    "\n",
+    "l0_act = LeakyReLU(alpha = 0.2)(l0)\n",
+    "\n",
+    "l0_batch = BatchNormalization()(l0_act)\n",
+    "\n",
+    "# Layer 1 #\n",
+    "\n",
+    "l1 = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func,\n",
+    "            padding = \"same\")(l0_batch)\n",
+    "\n",
+    "l1_act = LeakyReLU(alpha = 0.2)(l1)\n",
+    "\n",
+    "l1_batch = BatchNormalization()(l1_act)\n",
+    "\n",
+    "# Layer 2 #\n",
+    "\n",
+    "l2 = Conv2D(filters = feature_multiplier * 1, \n",
+    "            kernel_size = surface_kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l1_batch)\n",
+    "\n",
+    "l2_act = LeakyReLU(alpha = 0.2)(l2)\n",
+    "\n",
+    "l2_batch = BatchNormalization()(l2_act)\n",
+    "\n",
+    "l2_drop = Dropout(dropout)(l2_batch)\n",
+    "\n",
+    "### Conv 2 ###\n",
+    "\n",
+    "# Layer 3 #\n",
+    "\n",
+    "l3 = Conv2D(filters = feature_multiplier * 2, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l2_drop)\n",
+    "\n",
+    "l3_act = LeakyReLU(alpha = 0.2)(l3)\n",
+    "\n",
+    "l3_batch = BatchNormalization()(l3_act)\n",
+    "\n",
+    "# Layer 4 #\n",
+    "\n",
+    "l4 = Conv2D(filters = feature_multiplier * 2, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l3_batch)\n",
+    "\n",
+    "l4_act = LeakyReLU(alpha = 0.2)(l4)\n",
+    "\n",
+    "l4_batch = BatchNormalization()(l4_act)\n",
+    "\n",
+    "l4_drop = Dropout(dropout)(l4_batch)\n",
+    "\n",
+    "### Conv 3 ###\n",
+    "\n",
+    "# Layer 5 #\n",
+    "\n",
+    "l5 = Conv2D(filters = feature_multiplier * 4, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l4_drop)\n",
+    "\n",
+    "l5_act = LeakyReLU(alpha = 0.2)(l5)\n",
+    "\n",
+    "l5_batch = BatchNormalization()(l5_act)\n",
+    "\n",
+    "# Layer 6 #\n",
+    "\n",
+    "l6 = Conv2D(filters = feature_multiplier * 4, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l5_batch)\n",
+    "\n",
+    "l6_act = LeakyReLU(alpha = 0.2)(l6)\n",
+    "\n",
+    "l6_batch = BatchNormalization()(l6_act)\n",
+    "\n",
+    "l6_drop = Dropout(dropout)(l6_batch)\n",
+    "\n",
+    "### Conv 4 ###\n",
+    "\n",
+    "# Layer 7 #\n",
+    "\n",
+    "l7 = Conv2D(filters = feature_multiplier * 8, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 1,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l6_drop)\n",
+    "\n",
+    "l7_act = LeakyReLU(alpha = 0.2)(l7)\n",
+    "\n",
+    "l7_batch = BatchNormalization()(l7_act)\n",
+    "\n",
+    "# Layer 8 #\n",
+    "\n",
+    "l8 = Conv2D(filters = feature_multiplier * 8, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l7_batch)\n",
+    "\n",
+    "l8_act = LeakyReLU(alpha = 0.2)(l8)\n",
+    "\n",
+    "l8_batch = BatchNormalization()(l8_act)\n",
+    "\n",
+    "l8_drop = Dropout(dropout)(l8_batch)\n",
+    "\n",
+    "### Conv 5 ###\n",
+    "\n",
+    "# Layer 9 #\n",
+    "\n",
+    "l9 = Conv2D(filters = feature_multiplier * 16, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l8_drop)\n",
+    "\n",
+    "l9_act = LeakyReLU(alpha = 0.2)(l9)\n",
+    "\n",
+    "l9_batch = BatchNormalization()(l9_act)\n",
+    "\n",
+    "l9_drop = Dropout(dropout)(l9_batch)\n",
+    "\n",
+    "# Layer 10 #\n",
+    "\n",
+    "l10 = Conv2D(filters = feature_multiplier * 32, \n",
+    "            kernel_size = kernel_size,\n",
+    "            strides = 2,\n",
+    "            kernel_initializer = init_func, \n",
+    "            padding = \"same\")(l9_drop)\n",
+    "\n",
+    "l10_act = LeakyReLU(alpha = 0.2)(l10)\n",
+    "\n",
+    "l10_batch = BatchNormalization()(l10_act)\n",
+    "\n",
+    "### Output Encoder ###\n",
+    "\n",
+    "encoder_output = l10_batch"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Capas del Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 21,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "### Deconv 1 ###\n",
+    "\n",
+    "# Layer 11 #\n",
+    "\n",
+    "decoder_input = Conv2DTranspose(filters = feature_multiplier * 16, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(encoder_output)\n",
+    "\n",
+    "l11_act = LeakyReLU(alpha = 0.2)(decoder_input)\n",
+    "\n",
+    "l11_batch = BatchNormalization()(l11_act)\n",
+    "\n",
+    "l11_drop = Dropout(dropout)(l11_batch)\n",
+    "\n",
+    "### Deconv 2 ###\n",
+    "\n",
+    "# Layer 12  #\n",
+    "\n",
+    "l12 = Conv2DTranspose(filters = feature_multiplier * 8, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func, \n",
+    "                     padding = \"same\")(l11_drop)\n",
+    "\n",
+    "l12_act = LeakyReLU(alpha = 0.2)(l12)\n",
+    "\n",
+    "l12_batch = BatchNormalization()(l12_act)\n",
+    "\n",
+    "l12_drop = Dropout(dropout)(l12_batch)\n",
+    "\n",
+    "# Layer 13 #\n",
+    "\n",
+    "l13 = Conv2DTranspose(filters = feature_multiplier * 8, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 1,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l12_drop)\n",
+    "\n",
+    "l13_act = LeakyReLU(alpha = 0.2)(l13)\n",
+    "\n",
+    "l13_batch = BatchNormalization()(l13_act)\n",
+    "\n",
+    "### Deconv 3 ###\n",
+    "\n",
+    "# Layer 14 #\n",
+    "\n",
+    "l14 = Conv2DTranspose(filters = feature_multiplier * 4, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l13_batch)\n",
+    "\n",
+    "l14_act = LeakyReLU(alpha = 0.2)(l14)\n",
+    "\n",
+    "l14_batch = BatchNormalization()(l14_act)\n",
+    "\n",
+    "# Layer 15 #\n",
+    "\n",
+    "l15 = Conv2DTranspose(filters = feature_multiplier * 4, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 1,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l14_batch)\n",
+    "\n",
+    "l15_act = LeakyReLU(alpha = 0.2)(l15)\n",
+    "\n",
+    "l15_batch = BatchNormalization()(l15_act)\n",
+    "\n",
+    "l15_drop = Dropout(dropout)(l15_batch)\n",
+    "\n",
+    "### Deconv 4 ###\n",
+    "\n",
+    "# Layer 16 #\n",
+    "\n",
+    "l16 = Conv2DTranspose(filters = feature_multiplier * 2, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l15_drop)\n",
+    "\n",
+    "l16_act = LeakyReLU(alpha = 0.2)(l16)\n",
+    "\n",
+    "l16_batch = BatchNormalization()(l16_act)\n",
+    "\n",
+    "# Layer 17 #\n",
+    "\n",
+    "l17 = Conv2DTranspose(filters = feature_multiplier * 2, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 1,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l16_batch)\n",
+    "\n",
+    "l17_act = LeakyReLU(alpha = 0.2)(l17)\n",
+    "\n",
+    "l17_batch = BatchNormalization()(l17_act)\n",
+    "\n",
+    "l17_drop = Dropout(dropout)(l17_batch)\n",
+    "\n",
+    "### Deconv 5 ###\n",
+    "\n",
+    "# Layer 18 #\n",
+    "\n",
+    "l18 = Conv2DTranspose(filters = feature_multiplier * 1, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 2,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l17_drop)\n",
+    "\n",
+    "l18_act = LeakyReLU(alpha = 0.2)(l18)\n",
+    "\n",
+    "l18_batch = BatchNormalization()(l18_act) \n",
+    "\n",
+    "# Layer 19 #\n",
+    "\n",
+    "l19 = Conv2DTranspose(filters = feature_multiplier * 1, \n",
+    "                     kernel_size = kernel_size,\n",
+    "                     strides = 1,\n",
+    "                     kernel_initializer = init_func,\n",
+    "                     padding = \"same\")(l18_batch)\n",
+    "\n",
+    "l19_act = LeakyReLU(alpha = 0.2)(l19)\n",
+    "\n",
+    "l19_batch = BatchNormalization()(l19_act)\n",
+    "\n",
+    "l19_drop = Dropout(dropout)(l19_batch)\n",
+    "\n",
+    "### Output Decoder ###\n",
+    "\n",
+    "decoder_output = Conv2DTranspose(input_shape[-1],\n",
+    "                                 kernel_size = surface_kernel_size,\n",
+    "                                 strides = 2,\n",
+    "                                 padding = \"same\",\n",
+    "                                 kernel_initializer = init_func)(l19_drop)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Ensamblando el Autoencoder"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Especificamos la primera y última capa para crear el modelo."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 22,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder = Model(encoder_input, decoder_output)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Es recomendable visualizar el resumen del modelo."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 23,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_1\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_1 (InputLayer)         (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_1 (Conv2D)            (None, 64, 64, 8)         136       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_1 (LeakyReLU)    (None, 64, 64, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_1 (Batch (None, 64, 64, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_2 (Conv2D)            (None, 64, 64, 8)         1032      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_2 (LeakyReLU)    (None, 64, 64, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_2 (Batch (None, 64, 64, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_3 (Conv2D)            (None, 32, 32, 8)         1032      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_3 (LeakyReLU)    (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_3 (Batch (None, 32, 32, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "dropout_1 (Dropout)          (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_4 (Conv2D)            (None, 32, 32, 16)        528       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_4 (LeakyReLU)    (None, 32, 32, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_4 (Batch (None, 32, 32, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "conv2d_5 (Conv2D)            (None, 16, 16, 16)        1040      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_5 (LeakyReLU)    (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_5 (Batch (None, 16, 16, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "dropout_2 (Dropout)          (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_6 (Conv2D)            (None, 16, 16, 32)        2080      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_6 (LeakyReLU)    (None, 16, 16, 32)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_6 (Batch (None, 16, 16, 32)        128       \n",
+      "_________________________________________________________________\n",
+      "conv2d_7 (Conv2D)            (None, 8, 8, 32)          4128      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_7 (LeakyReLU)    (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_7 (Batch (None, 8, 8, 32)          128       \n",
+      "_________________________________________________________________\n",
+      "dropout_3 (Dropout)          (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_8 (Conv2D)            (None, 8, 8, 64)          8256      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_8 (LeakyReLU)    (None, 8, 8, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_8 (Batch (None, 8, 8, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "conv2d_9 (Conv2D)            (None, 4, 4, 64)          16448     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_9 (LeakyReLU)    (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_9 (Batch (None, 4, 4, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "dropout_4 (Dropout)          (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_10 (Conv2D)           (None, 2, 2, 128)         32896     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_10 (LeakyReLU)   (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_10 (Batc (None, 2, 2, 128)         512       \n",
+      "_________________________________________________________________\n",
+      "dropout_5 (Dropout)          (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_11 (Conv2D)           (None, 1, 1, 256)         131328    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_11 (LeakyReLU)   (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_11 (Batc (None, 1, 1, 256)         1024      \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_1 (Conv2DTr (None, 2, 2, 128)         131200    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_12 (LeakyReLU)   (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_12 (Batc (None, 2, 2, 128)         512       \n",
+      "_________________________________________________________________\n",
+      "dropout_6 (Dropout)          (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_2 (Conv2DTr (None, 4, 4, 64)          32832     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_13 (LeakyReLU)   (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_13 (Batc (None, 4, 4, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "dropout_7 (Dropout)          (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_3 (Conv2DTr (None, 4, 4, 64)          16448     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_14 (LeakyReLU)   (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_14 (Batc (None, 4, 4, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_4 (Conv2DTr (None, 8, 8, 32)          8224      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_15 (LeakyReLU)   (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_15 (Batc (None, 8, 8, 32)          128       \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_5 (Conv2DTr (None, 8, 8, 32)          4128      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_16 (LeakyReLU)   (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_16 (Batc (None, 8, 8, 32)          128       \n",
+      "_________________________________________________________________\n",
+      "dropout_8 (Dropout)          (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_6 (Conv2DTr (None, 16, 16, 16)        2064      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_17 (LeakyReLU)   (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_17 (Batc (None, 16, 16, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_7 (Conv2DTr (None, 16, 16, 16)        1040      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_18 (LeakyReLU)   (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_18 (Batc (None, 16, 16, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "dropout_9 (Dropout)          (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_8 (Conv2DTr (None, 32, 32, 8)         520       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_19 (LeakyReLU)   (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_19 (Batc (None, 32, 32, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_9 (Conv2DTr (None, 32, 32, 8)         264       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_20 (LeakyReLU)   (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_20 (Batc (None, 32, 32, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "dropout_10 (Dropout)         (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_10 (Conv2DT (None, 64, 64, 1)         129       \n",
+      "=================================================================\n",
+      "Total params: 399,753\n",
+      "Trainable params: 397,753\n",
+      "Non-trainable params: 2,000\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Optimizador"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Importamos el optimizador Adam:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 24,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import Adam"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos los parametros del optimizador:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 30,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "adam_learning_rate = 0.001  # El learning rate de Adam (tamaño step)\n",
+    "adam_epsilon = 1e-8  # Previene problemas de división por 0.\n",
+    "adam_lr_decay = 0.0005  # Learning rate decay"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos el optimizador."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 31,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "optimizer = Adam(lr = adam_learning_rate, \n",
+    "                 epsilon = adam_epsilon, \n",
+    "                 decay = adam_lr_decay)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Compilación"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Utilizamos el optimizador Adam ya definido, loss \"mse\" (Mean Squared Error) y metrics \"mae\" (Mean Absolute Error)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 32,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoder.compile(optimizer = optimizer, \n",
+    "                    loss = \"mse\", \n",
+    "                    metrics = [\"mae\"])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Entrenamiento"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Parametros del entrenamiento:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 33,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "training_epochs = 5  # Número de vueltas completas al set de entrenamiento.\n",
+    "batch_size = 32  # Número de ejemplos para calcular el error de la gradiente."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Entrenamos el modelo autoencoder:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 34,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Train on 90000 samples, validate on 10000 samples\n",
+      "Epoch 1/5\n",
+      "90000/90000 [==============================] - 368s 4ms/step - loss: 0.0064 - mae: 0.0324 - val_loss: 0.0019 - val_mae: 0.0187\n",
+      "Epoch 2/5\n",
+      "90000/90000 [==============================] - 361s 4ms/step - loss: 0.0017 - mae: 0.0173 - val_loss: 0.0013 - val_mae: 0.0151\n",
+      "Epoch 3/5\n",
+      "90000/90000 [==============================] - 340s 4ms/step - loss: 0.0013 - mae: 0.0149 - val_loss: 0.0011 - val_mae: 0.0130\n",
+      "Epoch 4/5\n",
+      "90000/90000 [==============================] - 339s 4ms/step - loss: 0.0011 - mae: 0.0133 - val_loss: 9.2643e-04 - val_mae: 0.0125\n",
+      "Epoch 5/5\n",
+      "90000/90000 [==============================] - 347s 4ms/step - loss: 9.3338e-04 - mae: 0.0125 - val_loss: 8.5024e-04 - val_mae: 0.0117\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder_train = autoencoder.fit(train_data, train_data, \n",
+    "                                    epochs = training_epochs,\n",
+    "                                    batch_size = batch_size,\n",
+    "                                    verbose = 1,\n",
+    "                                    validation_data = (vali_data, vali_data),\n",
+    "                                    shuffle = True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Los datos del entrenamiento se guardan en \"autoencoder_train\"."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Plot"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Plot de Loss (MSE y MAE) y Validation Loss (MSE y MAE) respecto a las epochs."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 35,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAl8AAAEvCAYAAAB7daRBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdd3Rc9Z3//+dnZtR7GWnkhlwkbEuAAWMwWALTJNJIIbuQbEI2hPIN/HaT7GYD33OS3c0m392UTdslCQSSEFKAkOYkYDkOBtsUN6pkucjdxuq21az++f1xr2xZyLasdmdGr8c5OjO6c+/4PdnV5TWfaqy1iIiIiMjk8HldgIiIiMhUovAlIiIiMokUvkREREQmkcKXiIiIyCRS+BIRERGZRApfIiIiIpMo4HUB5yI7O9vm5+d7XYaITJItW7Y0WmuDXtcxHnT/Epl6TncPi6jwlZ+fz+bNm70uQ0QmiTFmn9c1jBfdv0SmntPdw9TtKCIiIjKJFL5EREREJpHCl4iIiMgkUvgSERERmUQKXyIiIiKTSOFLREREZBIpfImIiIhMIoUvERERkUmk8CUiIiIyiaIyfG3Z18wfXj/kdRkiIuesvqWTR9btprOnz+tSRGSCRGX4+vkr+/n3P26lv996XYqIyDmprm3lK3+uZv3ORq9LEZEJEpXhq7Qwm+b2brYebvG6FBGRc7J0ThYp8QFWVtV6XYqITJCoDF/L5jkbiL+wo8HjSkREzk1swMf1C3JZXV1Hb1+/1+WIyASIyvAVTIljYV4q63YqfIlI5CkrCnG0o4eNe5q9LkVEJkBUhi+AksJstuw7QntXr9eliIick6sLg8TH+NT1KBKlojZ8XV0QpKfP8sruJq9LERE5Jwmxfq4uDFJRVauJQyJRKGrD16X5GcTH+FircV8iEoHKi0PUtXTx+sGjXpciIuMsasNXXMDPFXOyWKfp2iISga6dn0vAZ6ioVNejSLSJ2vAFUFoQZHdjOweaO7wuRUTknKQlxHDlvGwqqmqxVl2PItEkusNXYTaAWr9EJCKVF4XY29TB9rpWr0sRkXEU1eFrbjCZaWnxWnJCRCLSDQtzMQZWqutRJKpEdfgyxlBSEGR9TaMWKxSRiBNMiWPxeRkKXyJRJqrDF0BpYZDWzl7eOHjM61JERM5ZWVGIbbWt7G1s97oUERknUR++rpqXhc+gJSdEJCKVFYUAqNCCqyJRI+rDV3piLBfOSNe4LxGJSDMzEymenqrwJRJFoj58AZQWZPP6gaMc6+jxuhQRkXNWXhTi1f1HqWvp9LoUERkHIwpfxphyY8x2Y0yNMeb+YV6PM8Y86b6+wRiTP+i1B9zj240xZYOOpxtjnjbGbDPGVBtjlo7HBxpOaWGQfgsv7dKSEyISecqLna7HVWr9EokKZw1fxhg/8CBwE7AQuM0Ys3DIaXcAR6y184BvA19zr10I3AoUAeXA9933A/gusNJaOx+4CKge+8cZ3kUz00mJC7BW632JSASal5PCnGCSNtoWiRIjaflaAtRYa3dba7uBJ4Cbh5xzM/CY+/xp4DpjjHGPP2Gt7bLW7gFqgCXGmDSgFHgUwFrbba2dsA3MYvw+ls7NYu2OBq0ULSIRqbwoxCu7mznS3u11KSIyRiMJX9OBA4N+P+geG/Yca20vcAzIOsO1s4EG4CfGmNeMMY8YY5JG9QlGqLQwyKGjx9mj6doiEoHKi0P09VtWV9d5XYqIjJFXA+4DwCXAD6y1FwPtwDvGkgEYY+4yxmw2xmxuaBj9jMXSgiCgJSdEJDJdMD2NaWnxmvUoEgVGEr4OATMH/T7DPTbsOcaYAJAGNJ3h2oPAQWvtBvf40zhh7B2stQ9baxdbaxcHg8ERlDu8WVmJ5Gclap9HEYlIxhjKikOs3dlIe1ev1+WIyBiMJHxtAgqMMbONMbE4A+hXDDlnBXC7+/wW4DnrDK5aAdzqzoacDRQAG621tcABY8z57jXXAVvH+FnOqqQgyMu7m+ju1VZDIhJ5yotCdPf28/x2teCLRLKzhi93DNd9QAXOjMSnrLVVxpgvG2Pe5572KJBljKkBPofbhWitrQKewglWK4F7rbV97jX/H/ALY8ybwCLg/43fxxpeaWGQju4+tuw7MtH/lIjIuFucn0lWUqxmPYpEuMBITrLWPgM8M+TYlwY97wQ+fJprvwp8dZjjrwOLz6XYsbpiTiYBn2HtzgaWzs2azH9aRGTM/D7DDQtz+eMbb9PZ00d8jP/sF4lI2JkSK9wPSImP4ZLzMrTVkMgUMtpFoo0xS4wxr7s/bxhjPjDS95xIZcUh2rv7tGi0SASbUuELnK2GKg+10NTW5XUpIjLBxrJINFAJLLbWLsJZJPohY0xghO85Ya6cm0VKXICVlep6FIlUUy58lbhLTqyv0bdGkSlg1ItEW2s73DGvAPHAwArNI3nPCRMX8HPtghxWV9fT26fJQyKRaMqFr+LpaWQkxrB2h8KXyBQwlkWiMcZcboypAt4C7nFfH8l7TqjyohDN7d1s2qvJQyKRaMqFL7/PcNW8bNbt1FZDInJm1toN1toi4DLgAWNM/LlcP16LRA919flB4gI+LbgqEqGmXPgCZ8mJ+tYutte1el2KiEyssSwSfYK1thpoA4pH+J4D143LItFDJcYGKC0MsrKylv5+fYkUiTRTMnyVFGQD2mpIZAoY9SLR7jUBAGPMecB8YO8I33PClReFqG3p5M1Dxyb7nxaRMZqS4SsvLYHC3GRtNSQS5caySDSwDHjDGPM68Dvg09baxtO95+R9Ksd1C3II+IxmPYpEoBEtshqNSgqCPP7KPo5395EQq4UKRaLVaBeJttY+Djw+0vecbOmJsSydm0VFVS1fKD8fY4yX5YjIOZiSLV/gjPvq7u1n495mr0sRERmVsqIQexrb2Vnf5nUpInIOpmz4WpKfSWzAp3FfIhKxblyYizGo61EkwkzZ8JUQ62dJfqa2GhKRiJWTGs8lszIUvkQizJQNXwClhdnsqGuj9lin16WIiIxKeVGIrYdb2N/U4XUpIjJCUzp8DWw1tFatXyISocqKQgBacFUkgkzp8DU/lEIwJU5LTohIxJqVlcjCvFSFL5EIMqXDlzGGkoJs1u9soE+rRItIhCovDrFl/xHqWzSEQiQSTOnwBXB1YZAjHT1Uva1VokUkMpUXh7AWVm2t87oUERmBKR++rpqnrYZEJLIV5CQzJztJXY8iEWLKh6/s5DiKp6eyVuO+RCRCGWO4sSjEy7uaONrR7XU5InIWUz58gTPr8dV9R2jt7PG6FBGRUSkvDtHbb/lrdb3XpYjIWSh8AaUFQXr7La/s1lZDIhKZLpyeRl5avLoeRSKAwhdwyXnpJMb6tdq9iEQsn89QVhTihR0NdHT3el2OiJyBwhcQF/BzxZwsDboXkYhWVhSiq7efF7brXiYSzhS+XKUF2ext6tAWHSISsS7LzyAzKZaV6noUCWsKX66SQm01JCKRLeD3cf2CHJ6rrqert8/rckTkNBS+XHOyk5ienqBxXyIS0cqLQ7R29fLSriavSxGR01D4chljKC3M5qWaJnr6+r0uR0RkVK6cm01yXIBV6noUCVsKX4OUFgRp7erljQNHvS5FRGRU4mP8LJ+fw6qqOu1ZKxKmFL4GuXJuNj6jrYZEJLKVF4Voau9m816tXSgSjhS+BklLjGHRzHRtNSQiEe2a84PEBnya9SgSphS+higpCPLmwaPaH01EIlZSXIDSgmwqKmuxVl2PIuFG4WuI0sJs+i28WKOZQiISucqKQrx9rJO3Dh3zuhQRGULha4iLZqSTEh/QkhMiEtGuX5CL32e016NIGFL4GiLg93HV3GzW7mhQc72IRKyMpFiumJPJykqFL5Fwo/A1jNLCIG8f62RXQ7vXpYiIjFp5UYhdDe3U1Ld6XYqIDKLwNYySgmxAS06ISGS7sSgEoNYvkTAzovBljCk3xmw3xtQYY+4f5vU4Y8yT7usbjDH5g157wD2+3RhTNuj4XmPMW8aY140xm8fjw4yXmZmJzMlO0rgvEYlouanxXDwrXUtOiISZs4YvY4wfeBC4CVgI3GaMWTjktDuAI9baecC3ga+51y4EbgWKgHLg++77DVhurV1krV085k8yzkoKsnlld7M2pxWRiFZeFKLyUAsHmju8LkVEXCNp+VoC1Fhrd1tru4EngJuHnHMz8Jj7/GngOmOMcY8/Ya3tstbuAWrc9wt7pYVBjvf0sWXvEa9LEREZtTK363HV1jqPKxGRASMJX9OBA4N+P+geG/Yca20vcAzIOsu1FlhljNlijLnrdP+4MeYuY8xmY8zmhobJ6wa8Yk4WMX7DC+p6FJEIlp+dxPxQChUa9yUSNrwccL/MWnsJTnfmvcaY0uFOstY+bK1dbK1dHAwGJ624pLgAl56Xwbod2mpIRCJbeXGITfuaaWjt8roUEWFk4esQMHPQ7zPcY8OeY4wJAGlA05mutdYOPNYDvyMMuyNLCoJsPdyiG5aIRLTy4hDWwl/U9SgSFkYSvjYBBcaY2caYWJwB9CuGnLMCuN19fgvwnHVWKF0B3OrOhpwNFAAbjTFJxpgUAGNMEnAjUDn2jzO+Sguclrb1Nep6FJHIdX5uCvlZiZr1KBImzhq+3DFc9wEVQDXwlLW2yhjzZWPM+9zTHgWyjDE1wOeA+91rq4CngK3ASuBea20fkAusN8a8AWwE/mytXTm+H23siqalkpkUq65HEYloxhjKikK8VNPIseM9XpcjMuUFRnKStfYZ4Jkhx7406Hkn8OHTXPtV4KtDju0GLjrXYiebz2dYNi+btTsb6e+3+HzG65JEREalrDjEQ2t3s2ZbPe+/eOicKRGZTFrh/ixKC4M0tnWxrVbbc4hI5Fo0I53c1Ditdi8SBhS+zuLEVkNackJEIpjP53Q9Pr+jnuPdWjxaxEsKX2eRmxrP/FCKthoSkYhXXhSis6efF7RvrYinFL5GoKQgm017jtDR3et1KSIio7ZkdibpiTFUaNajiKcUvkagtDBId18/G/Y0e12KiMioBfw+rl+Qy+rqOrp7+70uR2TKUvgagcvyM4kL+LTkhIhEvPKiEK2dvby8u8nrUkSmLIWvEYiP8bNkdqYG3YtIxFtWkE1irF9djyIeUvgaoasLg9TUt/H20eNelyIiMmrxMX6Wz89hVVUdff3W63JEpiSFrxEqcbca0qxHEYl05UUhGtu6eHX/Ea9LEZmSFL5GqDA3mdzUONbu1LgvEYlsy+fnEOv3acFVEY8ofI2QMYaSgiDrdzaqqV5EIlpyXIBlBdmsrKzFWt3PRCabwtc5KC0Mcux4D28dOuZ1KSIiY1JeFOLQ0eNUvd3idSkiU47C1zlYNi8bY2CtVocWkQh3/cJcfAbNehTxgMLXOchMiuWC6WkadC8iES8zKZbLZ2dp3JeIBxS+zlFJQTav7j9Ka2eP16WIiIxJeXGInfVt1NS3eV2KyJSi8HWOSgqC9PVbXtql1aFFIoExptwYs90YU2OMuX+Y1+OMMU+6r28wxuS7x28wxmwxxrzlPl476Jrn3fd83f3JmbxPNH5uLMoF1PUoMtkUvs7RJbMySIr1q+tRJAIYY/zAg8BNwELgNmPMwiGn3QEcsdbOA74NfM093gi811p7AXA78PiQ6z5qrV3k/tRP2IeYQHlpCVw0M13hS2SSKXydo9iAj6Vzs1irfR5FIsESoMZau9ta2w08Adw85Jybgcfc508D1xljjLX2NWvt2+7xKiDBGBM3KVVPovKiEG8ePMYh7d4hMmkUvkahtDDI/uYO9jW1e12KiJzZdODAoN8PuseGPcda2wscA7KGnPMh4FVrbdegYz9xuxy/aIwxw/3jxpi7jDGbjTGbGxrCs7W8zO16XKXWL5FJo/A1CgNbDWnJCZHoZ4wpwumKvHvQ4Y+63ZEl7s/HhrvWWvuwtXaxtXZxMBic+GJHYU4wmfNzUzTrUWQSKXyNQn5WIjMzE7TVkEj4OwTMHPT7DPfYsOcYYwJAGtDk/j4D+B3wcWvtroELrLWH3MdW4Jc43ZsRq6w4xKa9zTS2dZ39ZBEZM4WvURjYaujlXU309PV7XY6InN4moMAYM9sYEwvcCqwYcs4KnAH1ALcAz1lrrTEmHfgzcL+19sWBk40xAWNMtvs8BngPUDnBn2NClReF6Lewemud16WITAkKX6NUWhCkrauX1/Yf9boUETkNdwzXfUAFUA08Za2tMsZ82RjzPve0R4EsY0wN8DlgYDmK+4B5wJeGLCkRB1QYY94EXsdpOfvR5H2q8bcgL4VZmYms1LgvkUkR8LqASHXlvCz8PsO6nQ0smZ3pdTkichrW2meAZ4Yc+9Kg553Ah4e57ivAV07ztpeOZ41eM8ZQVpTLT1/aS0tnD6nxMV6XJBLV1PI1SqnxMVw8M12D7kUkKpQXh+jps6zZFpFLlolEFIWvMSgpCPLmoWMcae/2uhQRkTG5eGYGwZQ4LbgqMgkUvsagpDAba2F9jWY9ikhk8/mcrsc12xro7OnzuhyRqKbwNQYXzUgnNT6grYZEJCqUF+VxvKdPwylEJpjC1xj4fYZlBdms3dGItdbrckRExuTyOZmkJcRo1qPIBFP4GqPSgiC1LZ3U1Ld5XYqIyJjE+H1ctyCH1VvrtIahyARS+BqjkkJny5AX1EwvIlGgvChES2cvG3Y3e12KSNRS+Bqj6ekJzA0msU5bDYlIFCgtDJIQ42dl1WGvSxGJWgpf46CkIMiGPU2aISQiES8+xs/y+UEqquro79dYVpGJoPA1Dq4uDNLZ08/mvUe8LkVEZMzKikI0tHbx2gHd00QmgsLXOLh8TiYxfqMlJ0QkKlw7P4dYv4+VlZr1KDIRRhS+jDHlxpjtxpgaY8z9w7weZ4x50n19gzEmf9BrD7jHtxtjyoZc5zfGvGaM+dNYP4iXEmMDLD4vU4PuRSQqpMTHcOW8LFZW1WoZHZEJcNbwZYzxAw8CNwELgduMMQuHnHYHcMRaOw/4NvA199qFwK1AEVAOfN99vwH/CFSP9UOEg9LCINtqW6lv6fS6FBGRMSsvCnGg+TjVh1u9LkUk6oyk5WsJUGOt3W2t7QaeAG4ecs7NwGPu86eB64wxxj3+hLW2y1q7B6hx3w9jzAzg3cAjY/8Y3ispyAbQrEcRiQrXL8zFZ9CCqyITYCThazpwYNDvB91jw55jre0FjgFZZ7n2O8C/AGdcyc8Yc5cxZrMxZnNDQ/h26y3MSyU7OVbjvkQkKmQnx3FZfiYVGvclMu48GXBvjHkPUG+t3XK2c621D1trF1trFweDwUmobnR8PsOyedms29mo6dkiEhXKi0Nsr2tld4N28BAZTyMJX4eAmYN+n+EeG/YcY0wASAOaznDtVcD7jDF7cboxrzXG/HwU9YeV0sIgTe3dbD3c4nUpIiJjVlYUAqCiqs7jSkSiy0jC1yagwBgz2xgTizOAfsWQc1YAt7vPbwGes84UmRXAre5syNlAAbDRWvuAtXaGtTbffb/nrLV/Nw6fx1PL3HFfa9X1KCJRYFp6AhfNSNO4L5Fxdtbw5Y7hug+owJmZ+JS1tsoY82VjzPvc0x4FsowxNcDngPvda6uAp4CtwErgXmtt1C4Dn5MSz4K8VNbt0KB7EYkONxaFeOPAUQ4fO+51KSJRY0Rjvqy1z1hrC621c621X3WPfclau8J93mmt/bC1dp61dom1dvega7/qXne+tfbZYd77eWvte8brA3mttCCbzfua6eju9boUEZExKy92uh5XqetRZNxohftxVlIQpKfP8sruJq9LEREZs7nBZApykrXavcg4UvgaZ4vzM4iP8bFWXY8iEiXKi0Ns2NNEc3u316WIRAWFr3EWH+Pn8tlZGnQvIlGjrChEv4XVW9X1KDIeFL4mQGlhkN0N7Rw80uF1KSIiY1Y0LZUZGQma9SgyThS+JkCpthoSkShijKGsKMT6nY20dWkykchYKXxNgHk5yeSlxWurIRGJGuXFIbr7+lmzrd7rUkQinsLXBDDGUFKQzfqdjfT2nXHrShGRiHDJrAyyk+PU9SgyDhS+JkhpYZCWzl7ePHTM61JERMbM7zPcWJTLmm31dPZE7VrZIpNC4WuCXDU3G2PQavciEjXKi0J0dPexXuNZRcZE4WuCZCTFcuGMdC05ISJR44o5WaTGB9T1KDJGCl8TqLQgm9cPHOXY8R6vSxERGbPYgI/rFuSyurpO41lFxkDhawKVFATp67e8vEtN9CISHcqKQhzt6GHjnmavSxGJWApfE+jiWekkxwVYq/ERIhIlri4MEh/jU9ejyBgofE2gGL+PpXOzWLujAWut1+WIiIxZQqyfawpzqKiqpb9f9zWR0VD4mmClhUEOHjnO3iZtNSQi0aG8OERdSxevHzzqdSkiEUnha4INbDW0dodmPYpIdFg+P4cYv6GiUl2PIqOh8DXBzstK4rysRG01JCJRIy0hhqVzs1lZVashFSKjoPA1CUoKsnl5VxPdvZqaLSLRobwoxL6mDrbXtXpdikjEUfiaBKUFQdq7+3h1/xGvSxERGRc3LMzFGFiprkeRc6bwNQmWzs3C7zPqehSRqBFMieOy8zIVvkRGQeFrEqTEx3DJrHTWap9HEYkiZcUhttW2srex3etSRCKKwtckKS0IUvn2MZraurwuRURkXJQV5QJQoQVXRc6JwtckKSkMYi2sr1Hrl4hEhxkZiVwwPU2r3YucI4WvSXLB9DTSE2NYp62GRCSKlBXl8tr+o9S1dHpdikjEUPiaJH6f4ap52azbqa2GRCR6lBeHAFil1i+REVP4mkRXFwSpa+liR12b16WIiIyLeTkpzA0mqetR5BwofE2ikkJtNSQi0ae8OMQru5s50t7tdSkiEUHhaxLlpSVQkJPMWq33JSJRpLwoj75+y+rqOq9LEYkICl+TrKQgyMY9zXT29HldiojIuCiensr09AQtOSEyQgpfk6y0MJuu3n427mn2uhQRkXFhjOHGolzW7mykvavX63JEwp7C1yS7fHYWsX6fthoSkahSXhSiu7ef57fr3iZyNgpfkywh1s9lszO01ZDIJDHGlBtjthtjaowx9w/zepwx5kn39Q3GmHz3+A3GmC3GmLfcx2sHXXOpe7zGGPM9Y4yZvE8UnhbnZ5KVFKtZjyIjoPDlgdKCINvrWrUoocgEM8b4gQeBm4CFwG3GmIVDTrsDOGKtnQd8G/iae7wReK+19gLgduDxQdf8ALgTKHB/yifsQ0QIv8/penyuuk5jWkXOQuHLAyUFQUBLTohMgiVAjbV2t7W2G3gCuHnIOTcDj7nPnwauM8YYa+1r1tq33eNVQILbSpYHpFprX7HOisk/A94/8R8l/JUVhWjv7uOlXWrZFzkThS8PLMhLITs5TlsNiUy86cCBQb8fdI8Ne461thc4BmQNOedDwKvW2i73/INneU8AjDF3GWM2G2M2NzRE/5etK+dmkxIXYGWluh5FzmRE4Wu0Yybc1x5wj283xpS5x+KNMRuNMW8YY6qMMf8+Xh8oEhhjKC3IZn1NI/392mpIJJwZY4pwuiLvPtdrrbUPW2sXW2sXB4PB8S8uzMQGfFy7IIfV1fX09vV7XY5I2Dpr+BrLmAn3vFuBIpwxEd93368LuNZaexGwCCg3xlwxPh8pMpQWBmlu76bq7RavSxGJZoeAmYN+n+EeG/YcY0wASAOa3N9nAL8DPm6t3TXo/Blnec8pq7woRHN7N5v2HvG6FJGwNZKWr1GPmXCPP2Gt7bLW7gFqgCXWMbDBYYz7M6WagJYVuFsNackJkYm0CSgwxsw2xsTifBlcMeScFTgD6gFuAZ6z1lpjTDrwZ+B+a+2LAydbaw8DLcaYK9z73MeBP0z0B4kUV58fJC7g04KrImcwkvA1ljETp73WGOM3xrwO1AN/sdZuGM0HiFTZyXEUTUvVoHuRCeTej+4DKoBq4ClrbZUx5svGmPe5pz0KZBljaoDPAQNDK+4D5gFfMsa87v7kuK99GngE5wvlLuDZyflE4S8xNsDVhUFWVtZqWIXIaQS8+oettX3AIvfb5e+MMcXW2sqh5xlj7gLuApg1a9YkVzmxSgqCPLp+N21dvSTHefZ/CpGoZq19BnhmyLEvDXreCXx4mOu+AnzlNO+5GSge30qjR3lxiFVb63jz0DEWzUz3uhyRsDOSlq+xjJk467XW2qPAGk6zTk40D1gtLcimp8/yyq4mr0sRERk3183PJeAzmvUochojCV+jHjPhHr/VnQ05G2cxwo3GmKDb4oUxJgG4Adg29o8TWS7NzyAhxq+thkQkqqQlxrB0bhYVVbU4/ykQkcHOGr7GMmbCWlsFPAVsBVYC97rdjXnAGmPMmzjh7i/W2j+N70cLf3EBP1fMyWSt1vsSkShTVhRiT2M7O+vbzn6yyBQzooFGox0z4b72VeCrQ469CVx8rsVGo9LCIGv+uJUDzR3MzEz0uhwRkXFx48JcvviHSlZW1lKYm+J1OSJhRSvce+zEVkPqehSRKJKTGs+lszI07ktkGApfHpsbTGJ6egLrdqjrUUSiS3lxiK2HW9jf1OF1KSJhReHLY8YYSgqyeXFXo7bjEJGoUlYUAtCCqyJDKHyFgdLCIK2dvbxx8KjXpYiIjJuZmYkUTUtV+BIZQuErDFw5NwufgbXqehSRKFNWFGLL/iPUt3R6XYpI2FD4CgPpibFcOCNdg+5FBPp64c2noD86hiGUF4ewFlZtrfO6FJGwofAVJkoLg7xx4CjHOnq8LkVEvLT19/DbO+GXfwPtkd8aXpCTzJzsJHU9igyi8BUmSguy6bfw4q7Iv9mKyBgUfwje9U3Y8wL8cBnsXe91RWNijKGsOMTLu5o42tHtdTkiYUHhK0wsmplOSlxAWw2JTHXGwJI74VN/hdgkeOy98MLXob/P68pGrbwoRG+/5a/V9V6XIhIWFL7CRMDv48p5Wazd0ai90EQE8i6Eu56H4ltgzVfh8fdDa2R23V04I428tHh1PYq4FL7CSGlhkENHj7O7sd3rUkQkHMSlwAcfhpsfhAObnG7IXc95XdU5M8ZQVhTihR0NdHT3el2OiOcUvsJI6cBWQzvU9SgiLmPg4v2SInMAACAASURBVL9zWsESs+HxD8Jfv+zMiowgZUUhunr7eWG77m8iCl9hZGZmIrOzk1i3U4PuRWSInPlw53Nwycdg3X/DT98Nxw56XdWIXZafQWZSLCvV9Sii8BVuSgqyeXlXE129kTu4VkQmSGwivO9/4IOPQF2l0w25/VmvqxqRgN/HDQtyea66Xvc3mfIUvsJMaUGQ4z19bNl3xOtSRCRcXfhhuHstpM2EX90KK/8v9Ib/Mg7lxSFau3p5aVeT16WIeErhK8xcMTeLgM+o61FEzixrLnxqNSy5G155EH58IzTv8bqqM7pyXhbJcQFWqetRpjiFrzCTHBfgkvMyNOheRM4uEAfv+jr87c+heTc8VApVv/O6qtOKC/hZPj+HVVV19PVrSR2ZuhS+wtDVhUGq3m6hsa3L61JEJBIseC/cvQ6yC+HXn4A/fQ56wnMj6/KiEE3t3Wze2+x1KSKeUfgKQyUF2QCsV9ejiIxUxnnwyZVw5T/A5kfhkeugcafXVb3DNecHiQ34NOtRpjSFrzBUPC2NjMQY1mqrIRE5F/4YuPE/4KNPQ+theOhqeOMJr6s6RVJcgNKCIBWVtdrNQ6Ysha8w5PMZlhUEWbdTWw2JyCgU3AD3rIdpi+B3d8PvPw3d4bNzRnlxiLePdfLWoWNelyLiCYWvMFVakE1Daxfbalu9LkVEIlHqNPj4Crj6C/D6L+Hha6CuyuuqALh+QQ5+n2FlpboeZWpS+ApTJe5WQ+vU9Sgio+UPwPL/Cx//PXQegx9dC1t+Ch63qKcnxrJ0ThaPv7yPH76wS/s9ypSj8BWmQmnxnJ+bwtodGnQvImM05xqnG3LWUvjjP8Jv7oDOFk9L+o/3F3PJeRn817PbKP36Gh5Zt5vOHq18L1ODwlcYKynIZuPeZo5364YkImOUnAN/91u47l+h6vfOmmBvv+ZZObOzk3jsk0t4+p6lFOam8JU/V3P1N9bws5f3avshiXoKX2GspDBId28/G/ZoKw4RGQc+H5R8Dv7+GejrhkdugFd+6Gk35OL8TH555xX86s4rmJWZyJf+UMXybzzPLzfsp6ev37O6RCaSwlcYu3x2JrEBn7YaEpHxNesKpxty3vWw8gvwxEehw9tFT5fOzeKpu5fy+B1LyEmN5//+7i2u/e/neWrzAXoVwiTKKHyFsfgYP5fPztRWQyIy/hIz4bZfQdl/ws5VTjfkgY2elmSMoaQgyO8+fSU/+cRlpCfE8i9Pv8kN317L7187pC2JJGoofIW50oIgO+vbOHzsuNeliEi0MQaWfhruqADjgx+Xw/pvQ7+3LU3GGJbPz2HFfVfx0McuJS7g4zNPvk7Zd9bypzffpl8hTCKcwleYKyl0thpap1mPIjJRpl8K96yDBe+B1f8Gv7gF2rxvcTfGUFYU4pl/KOHBj1wCwH2/fI13fW8dK7VCvkQwha8wd35uCjkpcdpqSEQmVnwafPgxePe3YO96+OEy2LPO66oAZ9ePd1+YR8VnSvnurYvo6u3nnp9v4b3/u57nttUphEnEUfgKcwNjINbXNGq8g4hMLGPgsjvgzr9CXAr87H3w/H9Bf3gs/eD3GW5eNJ2/fLaUb374Io4d7+GTP93MB77/Emt3NCiEScRQ+IoApYXZHO3ooVL7oInIZAhdAHc9Dxf8DTz/n/Czm6HlsNdVnRDw+7jl0hk890/X8J8fvID6lk4+/uON/M1DL/PSLg3RkPCn8BUBls1zx32p61FEJktcMnzwIXj/D+DQFqcbsma111WdIsbv47Yls1jz+Wv4j5uL2N/cwUd+tIHbHn6FzXu9XTpD5EwUviJAVnIcxdNTtdWQiEy+RR9xWsGSc+DnH4K//Cv09Xhd1SniAn4+tjSfFz6/nC+9ZyE769u45Ycv87FHN/D6gaNelyfyDiMKX8aYcmPMdmNMjTHm/mFejzPGPOm+vsEYkz/otQfc49uNMWXusZnGmDXGmK3GmCpjzD+O1weKVqUFQV7df4TWzvC66YnIFBA8H+58Di79BLz4HfjJu+DoAa+reof4GD+fXDabtf9yDQ/cNJ/KQ8d4/4MvcsdPN2nYhoSVs4YvY4wfeBC4CVgI3GaMWTjktDuAI9baecC3ga+51y4EbgWKgHLg++779QL/ZK1dCFwB3DvMe8ogJQVBevstL+/SVkMi4oGYBHjvd+FDj0J9tdMNue3PXlc1rMTYAHdfPZd1X7iWz5edz+Z9R3jP/6zn7sc3s63W2w3FRWBkLV9LgBpr7W5rbTfwBHDzkHNuBh5znz8NXGeMMe7xJ6y1XdbaPUANsMRae9ha+yqAtbYVqAamj/3jRK9Lz8sgMdavrYZExFsX3AJ3vwAZ58ETH4Fn74feLq+rGlZyXIB7l89j3ReW85nrC3ippony76zj3l++Sk19q9flyRQ2kvA1HRjcvnyQdwalE+dYa3uBY0DWSK51uygvBjaMvOypJzbgY+mcLK33JSLey5oLd/wFLv8/sOEH8OiN0Lzb66pOKzU+hs9cX8i6Lyzn3uVzWbOtnhu/vZbPPvk6exrbvS5PpiBPB9wbY5KB3wCfsdYO2xZsjLnLGLPZGLO5oWFqB4/SwiD7mjrY16SbhYh4LBAHN/0X/O0v4Mge+GEpVP7G66rOKD0xls+XzWfdvyznzpI5PFt5mOu/9QKf//UbHGju8Lo8mUJGEr4OATMH/T7DPTbsOcaYAJAGNJ3pWmNMDE7w+oW19ren+8ettQ9baxdbaxcHg8ERlBu9SgqcJSfWqutRRMLFgvfAPeshZwE8/Un44z9CT3jvRZuVHMcD71rA2n9Zzu1L8/nDG2+z/JvP88Bv3+Lto+Fdu0SHkYSvTUCBMWa2MSYWZwD9iiHnrABud5/fAjxnnaWGVwC3urMhZwMFwEZ3PNijQLW19lvj8UGmgtnZSczISGDdjqndAigiYSZ9Fvz9M3DVZ2DLT+FH10HDdq+rOquclHi+9N6FrP38cm5bMountxzgmm88z7/+oZK6lk6vy5Modtbw5Y7hug+owBkY/5S1tsoY82VjzPvc0x4FsowxNcDngPvda6uAp4CtwErgXmttH3AV8DHgWmPM6+7Pu8b5s0Wdga2GXt7VRE9fv9fliIic5I+BG/4dPvobaKuFh6+B13/pdVUjEkqL5z/eX8zzn1/Ohy6dzi827Kf062v4jz9tpbEtPCcTSGQzkbQX1uLFi+3mzZvPfuKmR50xCJfd6czIiSIrKw9zz89f5df3LOWy/EyvyxGZUMaYLdbaxV7XMR5GfP+KBi2H4Tefgn3r4aLb4F3fdFbMjxD7mzr43nM7+e2rB4kL+Ln9ynzuKp1DZlKs16VJhDndPSw6V7hv2gUvfx++exH86iOw+3mIoJB5JkvnZuMzqOtRRMJXah7cvgKuvh/eeMJpBaut9LqqEZuVlcg3P3wRqz93NWVFuTy0dhclX3uO/161nWMdWuhaxi46w1f5/4PPvAUl/wQHNjibwj54OWz8EXS1eV3dmKQlxLBoZjovaNC9iIQznx+WP+CEsK5W+NG1Tq9EBH0RnhNM5ju3Xsyqz5Ryzfk5/M9zNSz7+nN8d/VO7TYiYxKd4QsgbTpc90X4bBW8/4cQmwjP/DN8a4GzKGDTLq8rHLXSwiBvHjzK0Y5ur0sRETmz2aXObMj8ZfDnz8GvPwGdkbXVT0FuCg9+9BKe+YcSls7J4turd1Dy9TU8uKaG9q5er8uTCBS94WtATDwsug3uXAN3rIbCMtj0CPzPJfDzW2DnX6A/sgavlxQEsRbW16j1S+RsRrs3rTEmy92Dts0Y879Drnnefc+BCUM5k/NpIlRyED76NFz/b1D9R3ioFA5t8bqqc7ZwWioPf3wxf7xvGRfPTOcbFdsp/foafrR2N8e7+7wuTyJI9IevAcbAzMvgQ4/AZyvhmgeg9k34xS3wv4vhlR9EzLexi2akkRofYN0OhS+RMxnL3rRAJ/BF4J9P8/YftdYucn/qx7/6KOPzwbLPwt8/C3298GgZrP0GHD/qdWXn7IIZafzk75fw209fycJpqXz1mWpKv7GGn7y4h84ehTA5u6kTvgZLCcE198NnKp1NYhOzYOX98K2F8Od/Cvv1aQJ+H1fNy2btzgYiabaqiAdGvTettbbdWrseJ4TJeJl1OdyzzumFeO4rzlCQP33W2aw7wlwyK4PH77icp+5eypzsJP79j1u55hvP8/gr++jujaweFZlcUzN8DQjEOpvEfuovcNfzsOB98OrP4MElziD9bc9Af3h+iyktDHL4WCe7GiJ7AoHIBBvL3rRn8xO3y/GL7sLR76Dt0U4jMRNu/QXcvRaKP+isB/b9K+Cn73G6JfsiaxzVktmZPHHXFfzyU5czPSOBL/6+kuXffJ4nN+3XmowyrKkdvgabdjF84AfwuWq49ovQuBOeuA2+dzG8+D04fsTrCk8xsNXQP//6TR5/ZR/1Wo1ZZDJ91Fp7AVDi/nxsuJO0PdpZ5F0ENz/o3Hev/zc4shee/Dv43iJY/23oaPa4wJEzxnDlvGyevmcpj31yCdnJsXzhN29x/bde4OG1u9jfpL0j5aToXGR1PPT1wrY/wcaHYd+LEEiAC/8GLr8bcosmp4azeHT9Hn7+yj72NDobbV88K52yohBlRSFmZyd5XJ3I2I11kVVjzFLg36y1Ze7vDwBYa/9z0DkV7jkvu3vT1gJBd4s0jDGfABZba+87zb9xxtcHTKlFVkerrxd2PAsbHoK96yAQ7/ROLLkb8i70urpzYq3lr9X1/M+aGt444IxrW5iXyk3FIcqLQxTkpnhcoUyG093DFL5GovYtJ4S9+WvoPQ7nLYPL74Lz3w3+wOTXM4i1lp31bVRU1lKxtZbKQy0AFOYmnwhiRdNSOU2viEhYG4fwFQB2ANcBh3D2qv2Iu/XZwDn3AhdYa+8xxtwKfNBa+zeDXv8Eg8KV+57p1tpGY0wM8CtgtbX2h2eqReHrHNVtde+7T0JPB8xaCkvuggXvdbYyiiAHmjtYWVnLyqpatuxzelHmBpMoLw5xU3Ge7tFRTOFrPHQ0w2uPw8ZH4Nh+SJ0Bl30SLvkEJI1kiMjEO3ikg1VVdVRU1bJpbzP9FqanJ3BjUS5lRSEuy8/E79MfuUSG8dheyN039juAH/ixtfarxpgvA5uttSuMMfHA48DFQDNwq7V2t3vtXiAViAWOAjcC+4C1QIz7nquBz7n71p6W5/evSHX8CLz2C9j0I6dbMmUaLP4kXPoJZwmLCFPX0smqqlqeraxlw55m+votMzISKC8KcdMFIS6emYFP9+ioofA1nvr7YMdKp2l8zwvgj3Obxu+CaYu8ru6EprYu/lpdT0VVLetqGunu7SczKZbrF+RQVhTiqnnZxMf4vS5T5LS0t6Oc0N/nrMu48WHY9Vfwx0LRB5wuyRmXel3dqDS3d7N6ax3PVh5mfU0jPX2WnJQ4yoqcrsnLZ2cS8GtodiRT+Joo9ducm8EbT0BPO8y83AlhC28Oq6bxtq5eXtjeQEVVLWu21dPa1UtirJ9rzg9SVhRi+fwcUuPDp14RUPiS02jc6WwX9/ovobsVpl/qhLCi90MgzuvqRqWls4c12+pZWVnL89sbON7TR0ZiDDcszKW82PmyHBfQl+VIo/A10Y4fdW4Em34EzbshOXSyaTwl1+vqTtHd28/Lu5uoqKrlL1vraGjtIsZvWDo3m7KiXG5YmEtOSrzXZYoofMmZdbY4X3w3PgxNOyEpx7nnLv6ks7l3hDre3ccLOxpYWXmYv1Y7X5aT4wJcOz+Hm4pDXH1+kMRYb8cby8gofE2W/n6oWQ0bH3IefTFO0/jld8OM8PtvSH+/5bUDR6hwx4nta+rAGGfxwDJ3nNh5WZo5Kd5Q+JIR6e+H3WucELajwtnUe8H7nPvuzMudHU4iVFdvHy/tamLlW7Ws2lrLkY4e4mN8XF0YpLw4xLXzc0lLUK9FuFL48kJjjdMS9tovnKbxaZc4N4OiD4Rl07i1lu11rVRUOkFs62Fn5uT8UAo3FoUoK8plYZ5m5cjkUfiSc9a8GzY9Cq8+Dl3HIHShc98t/hDEJHhd3Zj09vWzcW8zFe7MyboWp9fiyrnZ3FQc4oaFuWQlh99/W6YyhS8vdbWebBpv3AFJwUFN49O8ru60DjR3UFFVy6qqOjbta8ZamJGRcGIJi0vPy9DMSZlQCl8yat3tzjIVGx6GhmpIyIRLb4fFd0D6TK+rGzOn1+IoFVW1PFt5mAPNx/EZZ7X9m4rzKCsKEUrT8BGvKXyFA2udpvENDzuzJX1+Z82aJXfDrCvCumm8sa2L1VudFrEXa5ro7usnKymW6xfkUlacq8GgMiEUvmTMrHUWbN3wEGx/xjk2/93OfTd/WVjfd0fKWsvWwy3OWmKVteysd7adu3hWurOERXEes7ISPa5yalL4CjfNe2DTI866YZ3HIHSBM0vygg+HfdN4a2cPz29vYNXWOtZsq6etq5ekWD/XzHeWsFh+fpAUzZyUcaDwJePq6H63S/IxZ/2wnIWw5E648G8hNnrGttbUt51oERtYeHvB4NX1c5I1fGSSKHyFq+52ePMpp0uyfiskZMAlH4fLPgXps7yu7qwGBoOucmdONrZ1E+v3ceW8LMqKQly/IJdgisYgyOgofMmE6DkObz3tTIyqfQvi0+Dijzn33czZXlc3rgaGjzxbeXJ1/TnBpBMtYsXTNY53Iil8hTtrnT0kNzzk7CkJcP67nNaw2aUR0TTe1295df+RE1sdHWg+jjGw+LyME+PEZmaq6VtGTuFLJpS1cGCDc9+tXuEs5FpY5tx3514bEffdczGwuv7Kqlpe2e2srj89PcHd5ijEJbO0uv54U/iKJEcPwOZHYctjcLwZggucpvGLbo2YpnFrLdWHW6moqqWiqpZtta2A0/Q9sITF/FCKvnHJGSl8yaRpeRs2/wS2/ATaGyCrwAlhi26DuOjbBLu5vZvV1XWsrKxl/c5Guvv6CabEUVaUS3lRHpfPySRGq+uPmcJXJOo5DpW/dZrGD78BcWlw8d/Bkk9B5hyvqzsn+5s6TgSxLfuPYC3Mykw8EcQunqWZk/JOCl8y6Xq7oOr3zn330BaITYFFH3GCWPY8r6ubEK2dPTw3ZHX99MQYbljgrK6/rEATqkZL4SuSWQsHNjo3g61/cJrGC26Ey++C2deAP7JWOm5o7eIv7szJl3Y5+5llJ8dxw8IcbiwKceXcLP2hC6DwJR47uMW571b+Fvp7YO51zpph824AX3S2Cg2srl9RVcvq6jpaO53V9ZcPrK5fGCQpLrL+m+Mlha9o0XLYaRbf/GOnaTyQAKFiZyHBvIucn5wFYbmI63Ba3JmTFVW1PL+tnvbuPlLiAu7MyVyuOT+HZP2hT1kKXxIW2uphy0+d+27rYciY7QwFWfRRSEj3uroJ093bz0u7GllZWcuqrXU0t3cTFzi5uv51C7S6/tkofEWb3i5nzZoDm5wuydo3ocuZUowvxglgeRdC3iInkOUWhf14sc6ePl7a1UhFZR1/qXb+0GMDPpbOyeLCGWnMD6WyIC+F87KS1EU5RSh8SVjp63EG5m94GA68AjGJzljcJXc599wo1tvXz6a9R1hZefjE6voBn+HKeSdX18/W6vrvoPAV7fr74cgeJ4QdfuPkT0eT87rxOQNIB1rH8i5y1hYL029tff2WzXubqaiqY+3OBnY3tNHv/r9qfIyP83NTWJCXyvxQCvPzUlkQSiUtUd/Aoo3Cl4Stw284IeytX0NfF+SXOF2ShTdF3FCQc9Xfb3n94FEqKp0lLPY3dwCQlxbP/FAK54dS3ccU5gaTiQ1EZxftSCh8TUXWOjN4Boex2jeh5dDJczLyB4Ux9zE56FnJp9PZ00dNfRtbD7ew7XAr22pbqD7cwpGOnhPnTEuLZ74byBbkOa1k+VlJBDRjJ2IpfEnYa2+C137mLN567ACkzYTL7oBLbofETK+rm3ADM9tf2NHA9toWttW2squhjZ4+J1sEfIY5waSTgSzXCWUzMhKmxGx3hS85qa0BagcCmdtSdmTPyddTprmBbNA4stTpYbfmjbWW+tYuqg87f/DVbjDb1dBGr9tMFhvwUZibzIJQqttC5rSUZSbFely9jITCl0SMvl7Y8ayzZtjedRCIhwtucbYxyrvQ6+omVU9fP7sb2tlW28L22la217ayrbaVQ0ePnzgnOS7A+W7r2EAomx+FPRgKX3Jmx486Kz0PtI4dfsPZBNz2O68nZrmtY4MCWcbssJzx09Xbx676djeUnQxmjW3dJ87JTY1jfiiV+XkpLMxLZX4olTnBJK1rE2YUviQi1W11di1580no6YD4dGcz77RZ7uNMSJtx8lhSdth9uZ0IrZ097KhzgthAINt2uIWWzt4T54RS408GMvdnXk5yxM6AV/iSc9fdDnVVp3Zb1lc7U64B4lKdcWODx5FlFYTteIeG1i4njB1upbq2herDrdTUt55oHo/1+5iXk8z8vBS3pczpvtQgUu8ofElEO37E2caovtrpkjx20FlEu7v11PMCCYPC2MyToWzgWMq0sL2vjpW1ltqWzhOBbCCU7apvo7vP+fLv9xnmZCcNCmVOF+b09ISwX5Ff4UvGR283NFQPGUdWCb1uc3IgHnKLBwWyC53Na8N06YuB5vHqwy1U154cT1bX0nXinOzkOBbkpZwYSzY/lMrcnKSI/SYWSRS+JOpYC51HnRB27MCgx/1OODt2wFlGaDDjh9Rpg4LZ4Ec3pMUkePN5JkhPXz97G9tPaSXbXtfCgeaTXZdJsX4KB3VbDoSyjDAaVqLwJROnrxeaak4d1H/4jUFLXwTcpS8GDeoPFYf10hdNbV1sr22lemAsWW0LO+ra6O51vokFfIa5wWQnlLmD/BfmpRJMiZsSg0gni8KXTEk9x91Wsv1DApr72PI22L5Tr0nMHhLIhgS1+PSo6Nps6+o90UI2MMB/e10rRwdNvspNjXvHAP95OcnEx0z+F2aFL5lc/f1wdO+gFrI34fDrJ5e+wEB24akD+0MXhu3SF+Csc7O3qZ2th51xCgNjyQ4f6zxxTmZSrLP8hbsm2YK8VM/+6KOBwpfIMPp6ncVeTwSy/UMC2sGTvREDYlOG6docFNSSc8NyDO9IDEy+2jY4kNW2srP+5Bdmv8+Qn5XI/FDqKQP9Z2YkTmjXpcKXeG/w0heD1yMbuvTFiUH9i5xglpzjWckjcbSj+8TA0Wq323J7XSudPaeOVzi5DIYTykKp8WolOwuFL5FRsNb5ojtsy5kb1DqPnnqNP9aZ1Z42Y/iWs9QZEAif7ryRcL4wd5yYdTkQygbWJQNIjPVTmHvqAP/5ofGbET+m8GWMKQe+C/iBR6y1/zXk9TjgZ8ClQBPwt9bave5rDwB3AH3AP1hrK9zjPwbeA9Rba4tH8iF084pS7Y3vXIuseffJ15Ny3D/+ac7A09Rpzk0iNe/ksZh47+ofRl+/ZV9T+4kwNvB48MjJb6NpCTGnrEk2OzuZ7ORYgilxJMcFFMxQ+BKZMF2tJycAvKPl7AC01gKD84GBlNCQmZqDuzhnQVyyV5/mnLR39bKjrvWUQLa9rpXm9pMz4oMpcad0W84PpVKQe+69GKMOX8YYP7ADuAE4CGwCbrPWbh10zqeBC6219xhjbgU+YK39W2PMQuBXwBJgGrAaKLTW9hljSoE24GcKX/IOncdOLn1Rv9VpMRv4GRhLNlhCphvIprmhzH2eMuh5fOrkf44hWjp7nD/4wy1O96X7jayj+9TxG3EBH8GUOLKT4wimxJ363A1oA78nxkbnLChQ+BLxTG+30ytxupazlkPQ133qNckhyC6ArHnOz8Dz9PPCframtZYGd6zviWUwalvYWddGl9t16TNw1bxsHr/j8hG/7+nuYSP5X2MJUGOt3e2+0RPAzcDWQefcDPyb+/xp4H+N87X9ZuAJa20XsMcYU+O+38vW2rXGmPwRfwKZWuLTIH+Z8zNUV6uzwXjLISeMtb59ajg7tAU6Gt95XWyyG84Gt6AN/pnurGc2gS1OqfExXJafyWX5J1e+7u+37G/u4MCRDhpau2hs63Ifu2lo7eJAcwev7jtCc0c3w31XSoz1u6HsZCA7+XhqUNPYMxEZkUAsZM52fobT3w/t9ScD2ZF9zsSrphrY+gc43nzyXF8MZM4ZJpgVQFLW5HyeszDGkJMST05KPCUFJ3d56eu37G1qPxHI4mPGZ1zcSMLXdODAoN8PAkNj34lzrLW9xphjQJZ7/JUh104fdbUiAHEpEEyBYOHpz+ntcgaktgwJZi2HnON7XnCa1YfOGPLHntpadkormhvSknPH9Vucz2fIz04iP/vMsz97+/ppbu+mYUg4GxzYdjW08cqeplNm/gyWEhcYpkUtdkhgc36m8n5sInIWPp/TDZkSgpmXvfP1jmZo3AlNO93HGudxR8XJtSIBEjKcEDY0mGXOCYslivzuzPa5wWTedUHeuL1veLcDAsaYu4C7AGbNmuVxNRIxAnHO4P2M/NOf098HbfWnhrKB1rSWw04LWvXbzqa5gxmfE8De0Yo2sePQAn4fOanx5KSe/X27e/tpau+isbWbhrZO99EJaA1tXTS2dlFd28LanV20DlpderC0hJiTLWonHmNP+T2YEkdWUqz2zxSRUyVmwqzLnZ/B+nqdlrLGmlODWc1f4fVfnDzP+JxxZMMFs5S8iF82YyTh6xAwc9DvM9xjw51z0BgTANJwBt6P5NozstY+DDwMzpiJc7lW5Ix8fjcs5eHMFRmGtc43uNZBLWcth08+b9wJu18Iu3FosQEfeWkJ5KUl4Pw5nl5nTx+NbSdb0k52e558fOvgURrbumnremdQMwYyEmNPhLHhWtIGHjOTYvGH+YrUIjKB/AGnVStzDnDjqa91tkDzriHBbCfse9HZpmlAbDJkzX1nMMuaFzGD/kcSvjYBBcaY2TjB8Q425AAABjxJREFU6VbgI0POWQHcDrwM3AI8Z621xpgVwC+NMd/CGXBfAGwcr+JFJpwxzpiEpCxnK6XTGY9xaElBZ8um+NRBj2lDfk9xnseljtuaPPExfmZkJDIjI/Gs5x7vdoJafes7A9rA45b97TS0dp1YamMwn4F/fW8Rt1+ZPy61i0gUiU+FaRc7P4P19zv31IFWsoEuzIMbofI3nDIrM2UaZM8bFMwKnKCWPsv5wh0mzhq+3DFc9wEVOEtN/NhaW2WM+TKw2Vq7AngUeNwdUN+ME9Bwz3sKZ3B+L3Cvtc4gG2PMr4BrgGxjzEHgX621j477JxSZDGMdh9byNhzZ63zz62qB/uG7Ak8RmzIkmA33mHbq73GDrolLPeexawmxfmZmJjIz88xBzVpLe3cfjYO6OQe6PYunn7klTkTkFD6fs7xF2gyYu/zU13qOO0sTDQ1mlU87s+YH+OPcQf/DBLPETCabFlkVCTfWOjeUrhanRa2zBbqOnQxm73g8Nszx1neOVRtOTNJZAtxpWt4Gv+aPmbD/KbTUhIiMirXOGpKDuy+bdjnPj+w59QtuYpYbyOad2pWZMXvMC8uOZakJEZlMxkBs4v/f3t2E1lFGYRx/HpNUAxUqtmJpSioohSBaUUqwu4KQquhWQVeCCAoVBKlLl27EjRtRcVFRBF2IGy0YdSN+V7FWJfiBESEWCanFftzkuJgpSWpqmtyZeSfv/H8wcD/CcM7Nzclz3zt3brFdee3699M7s0pAm1sh2M0WZ8U+f9+FX1GyksHhta28bb/p/z8IAQD9sqXN24pt9Pbl982fK0+NsSSYnZiSfnxPOnV4yT4GpKtGlweya8b++yGCdSB8AbkavHxx+KxX72yxirbqytvJ5bedPxnu6Tnp3Knl+5x4Rhp/pL/eAGC9BoaKVa6t10u7Dyy/75/ZYoXswmD284dS73RxPNrDH/RdAuELwMUNbpIGr+7vRIjzvfIt1DKM9bOaBwB1Gt4ijdxabEstLEhz08ULzQoQvgDUa2CwOKA1wUGtAFCJy8rzjlW1u8r2BAAAgFURvgAAABpE+AIAAGgQ4QsAAKBBhC8AAIAGEb4AAAAaRPgCAABoEOELAACgQYQvAACABhG+AAAAGuSISF3DJbP9p6RfL/HHt0o6UWM5bdGFPrvQo0SfKxmNiD6+Gbw9mF8ros98dKFHae19rjjDNlT4Wgvbn0fEbanrqFsX+uxCjxJ9YlFXHiP6zEcXepSq65O3HQEAABpE+AIAAGhQzuHrhdQFNKQLfXahR4k+sagrjxF95qMLPUoV9ZntMV8AAABtlPPKFwAAQOtkGb5sT9j+wfaU7UOp66mD7Zdtz9j+NnUtdbG90/ak7e9sH7N9MHVNdbB9he1PbX9d9vl06prqYnvA9le230ldS1sxv/LA/MpPlfMru/Ble0DS85IOSBqTdL/tsbRV1eIVSROpi6hZT9ITETEmaVzSo5n+Ls9I2h8RN0vaI2nC9njimupyUNLx1EW0FfMrK8yv/FQ2v7ILX5L2SpqKiJ8i4qyk1yXdm7imykXER5L+Sl1HnSLij4j4srx8UsWTfkfaqqoXhb/Lq0Pllt3BmLZHJN0l6cXUtbQY8ysTzK+8VD2/cgxfOyT9tuT6tDJ8wneN7V2SbpH0SdpK6lEuZx+VNCPpSETk2Odzkp6UtJC6kBZjfmWI+ZWFSudXjuELmbG9WdKbkh6PiLnU9dQhIuYjYo+kEUl7bd+YuqYq2b5b0kxEfJG6FqBJzK+Nr475lWP4+l3SziXXR8rbsAHZHlIxuF6NiLdS11O3iJiVNKn8jofZJ+ke27+oeCttv+3DaUtqJeZXRphf2ah8fuUYvj6TdIPt62xvknSfpLcT14R1sG1JL0k6HhHPpq6nLra32d5SXh6WdIek79NWVa2IeCoiRiJil4q/yfcj4oHEZbUR8ysTzK981DG/sgtfEdGT9Jikd1Uc4PhGRBxLW1X1bL8m6WNJu21P234odU012CfpQRWvMo6W252pi6rBdkmTtr9R8c/3SERwKoYOYn5lhfmFi+IM9wAAAA3KbuULAACgzQhfAAAADSJ8AQAANIjwBQAA0CDCFwAAQIMIXwAAAA0ifAEAADSI8AUAANCgfwEZW7hcmTeJ+QAAAABJRU5ErkJggg==\n",
+      "text/plain": [
+       "<Figure size 720x360 with 2 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plot_epochs = range(training_epochs)\n",
+    "plot_loss = autoencoder_train.history[\"loss\"]\n",
+    "plot_val_loss = autoencoder_train.history[\"val_loss\"]\n",
+    "plot_mae = autoencoder_train.history[\"mae\"]\n",
+    "plot_val_mae = autoencoder_train.history[\"val_mae\"]\n",
+    "\n",
+    "plt.figure(figsize = (10, 5))\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 1)\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "\n",
+    "ax = plt.subplot(1, 2, 2)\n",
+    "plt.plot(plot_epochs, plot_mae, plot_val_mae)\n",
+    "\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Las predicciones del autoencoder sobre los datos de validación."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Predicciones"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 107,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "num_scenes_vali = vali_data.shape[0] // frames\n",
+    "scene_vali_num = random.randrange(0, num_scenes_vali)\n",
+    "prediction_vali = vali_data[scene_vali_num * frames:scene_vali_num * frames + frames, :, :, :]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 108,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "autoencoded_imgs = autoencoder.predict(prediction_vali)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Plot de 10 imagenes del set de validación y predicción."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 109,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAB8CAYAAACG/9HcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOx9e4wc2VX+V9XV7+7px/S8Z/zc2F57vfF6s+yD8NvsZneTIKIgpEQIBH8gIkUI/ogURQpCRIIgIQQCgkIQiQKCCAgkIe8NWTYh2ezLXtvZl8f2ej3jefb7/X7V74/Jub5dU1Vd3VM93W3XJ1me6emqul+dc88999xzzxVkWYYFCxYsWLBgwcKdAnHYDbBgwYIFCxYsWNhPWM6PBQsWLFiwYOGOguX8WLBgwYIFCxbuKFjOjwULFixYsGDhjoLl/FiwYMGCBQsW7ihYzo8FCxYsWLBg4Y6C1MuXBUEY633xsiwLen8fd34AkrIsT+l9Ydw5dpMhYHEcB1h9cfw5Wnq6g9ud47jzg0ZftCI/txduDrsBFixYAGD1RQsWRgWqfdFyfixYsGDBggULdxQs58eCBQsWLFiwcEfBcn4sWLBgwYIFC3cULOfHggULFixYsHBHwXJ+LFiwYMGCBQt3FCznx4IFCxYsWLBwR8FyfixYsGDBggULdxQs58eCBQsWLFiwcEfBcn4sWLBgwYIFC3cULOfHggULFiyMFQRBgCB0PXlibCEIAmw2223LURRF2O122Gy2obWhp7O9LPQHURQhyzJkedyPSFEHdVRZltFut29LnqIowuFwoN1uo9lsot1uD7tJpkIQBEiSBLfbjWaziVqthlarNexmmQabzQan0wm/349arYZyuYx6vT7sZpkGQRBgt9vh8XgwOTmJUqmEbDaLarU67KaZBlEU4XK5EAwGMT09jWw2i2QyiWKxOOymmQZJkuDz+TAzM4O5uTlEo1Fsb28jl8sNu2mmwOl0IhwO48CBA5iZmcHa2hrW19eRSqX2vS1Dd35oUJFlGc1m87YyuDSgeL1etFotVKtVNBqNYTfLVNCgEgqF0Gg0UCgUUKlUht0s00CDitfrxfz8PCqVCpLJJPL5/LCbZhpoUJmcnMThw4eRz+exsbGBZDI57KaZAkmS4Pf7MT8/jxMnTiCRSODtt9/G5ubmsJtmGhwOByKRCI4ePYq7774bm5ubWF5exo0bN26byYjb7cbS0hJOnTqFo0ePYnV1FW+88QaWl5cB4Lbg6ff7cfz4cZw5cwazs7O4fv063njjDbz66qsQBGHsJ12Tk5O47777cObMGbhcLoRCIUiShEwmw/jtlxyH5vzQoOL3+7G4uIharYZEIjEUD3AQEEURHo8HkUgEx48fR6lUws2bN7G+vj7sppkGSZIQDAaxuLiI++67D9lsFsvLy7hy5cqwm2YaHA4HZmdncfz4cTzwwANIJpO4cOECLly4AOD2MLgejwdHjx7F6dOncfbsWUSjUZw7dw4/+clPbouIZSAQwL333ouzZ8/ixIkT2NjYgNfrRSwWQ7vdHvsBBQCmp6fx0EMP4ezZs/D5fAiHw7Db7VhfX0er1botIrJLS0t47LHHcPr0aRa1a7fbWFlZYRPncZalIAg4fvw4nnzySczNzSGTySAcDuPw4cO4fv06ms0mGo3G2AYIRFHEvffei8ceewwOhwNra2sQRRFzc3Pw+/1oNBrs335gaM7Pvffei4985CO49957IYoiyuUylpeX8fnPfx7pdBqNRmOsFfmxxx7Db/7mbyISiUCWZZRKJVy7dg3//M//jHg8PvbLCoIg4MMf/jB+/dd/HY1GA8ViEYVCAffccw++8pWvIBqNolKpjHWkSxRFfOxjH8Ojjz6KRqOBa9euIRwO493vfjcqlQq2t7dRKpVQq9WG3dS+IUkSPvnJT+LYsWOoVCp4/vnnUalUsLS0hJMnTyKRSKBQKKBcLg+7qX3B4XDgj//4jxEMBpFOp/GVr3wFjUYDTqcTx44dQy6XQz6fR6FQGHZT+4bb7canP/1pCIKAlZUVfPnLX0ar1YLL5cKRI0dQLpeRz+eRy+XG0gESBAFerxef+MQnUKvVcOnSJXznO9+BLMtwOByYm5tDvV5HuVxGJpMZW46BQAC/8zu/g1wuh6effhrnzp1DuVyGJEkIBAIQBAHVahXpdHrsxkZRFBEOh/GhD30I29vb+P73v4/l5WWI4k7aMS25t1otpNPpfRkbh+L8iKKIX/u1X8Pp06fRbDaxtraGZrMJm82GhYUFtFot5PP5sV2vliQJv/qrv4pQKIRUKoULFy6g1WpBkiREIhE0Gg1ks1mUSqVhN7VvOBwOfOADH0C5XMbGxgaeeeYZtNtt+Hw+eDwehEIhAECz2RxLYwTsDCpnz55FKpXC1atX8Z3vfAftdht+vx82mw0TExOQZRn1en0sOQqCAI/Hg4WFBWxubuLSpUv43ve+BwDwer3weDzwer1oNpuoVqtjZ3AFQYDP54PT6cSNGzfw05/+FBcvXoQgCGg2mwgGg7Db7XC5XCiXy2M5GRFFEX6/H+VyGSsrK3jmmWewvb0NWZbRarXgcDjgdDrhcDhgs9nQbDaH3eSeIQgCJiYmUC6X8eqrr+IHP/gBixDk83m2bGuz2SBJ0ljaHJJjpVLBK6+8gueffx6iKDI+yWQSDocDdrsdkiSh0WiMFUdRFOHz+VCr1XDx4kVcv34dbreb5YlWKhWUy2XGjz4fJIbi/FDi4dNPP41isYjXXnsNABAMBgHsDDq1Wg21Wm2sBExwuVzIZrN4/vnnkclk8Morr0CWZfj9fgQCAdjtdjgcDlQqlbEbUIAdY+R2u3Hz5k288MILSCQSeOutt5jBnZ2dhSzLsNvtEARhLGUoCAJcLhdWVlbwv//7v9je3kYikWDLJOQAiaIIURTHcuDkOX7nO99BNBplyyOZTAbZbBZOpxPAeCbtC4IAp9OJ1dVVfOMb30A8HmczTVmWEY1G4XA4IEkSRFEcy6UhSh9YW1vDt771LWQyGcan2WyiUCggn8/D6XQyXR03m0MbKlZWVvDss8+iVCrB5XLB6XRClmWk02nkcjnGe1xtjiiKuHHjBl566SUAYM4BAGSzWdRqNVSrVWZ3xsnmEI8bN27g9ddfB7Cz3E4bSGRZRrlcRqlUgt1u3xd7MxTnp9lsYn19Hf/1X//F1m6bzSb72eFwDKNZpqFWqyGdTuOHP/wharUa20GTSCSwvr7OZijjClrGy+VyeOmllyDLMnNcS6USVldX0W63Ybfbh9zS/iHLMvL5PIrFIpaXlyHLMsLhMERRRL1eRzQaRavVYoPpOBrcdruNdDqNUqmE9fV1FnoHdvhEo1G2k4Z4jhPa7TZisRgqlQpSqRSL8rTbbXg8HmQyGTYBGUd+ANBqtbC5uYlKpYJSqQS32w273Y5qtQpJkuB0OlGpVFAoFIa6rXgvaLVabBm92WzC5/PBbrcjn8+j1WrBbrdDlmVUq9Wx7Y+tVgvr6+solUosquxyuZBMJlkQgCKW5PSME0fS03w+D7fbzZa5otEoSqVSR6Rnv3ZhDmUEpsS0gwcPYmtrC16vF5VKhQ025XJ5bDsqABaSvffee3H16lXmzImiyPJgxjH8zKPZbCKfz+O9730vXnvtNTSbTTbTBsCWg8Z1UAF2OMbjcXzwgx/E66+/ziI/siyzmRc5QONihJRot9vY2NjAL//yL2Nra4s5emSI9nsHhtmQZRk3btzA448/jkqlgpdffpnNNKkP7keIfZAQBAHXrl3DQw89BEEQ8NOf/hTVapXliBC3cebYaDRw+fJl3H///RBFEc899xxKpRIEQUC9Xt/FcRz1VZZlvPbaazh58iQkScJPf/pTlovGl9cgR2jc0Gw2cenSJRw4cABOpxPPP/88CoUCBEFgtpSwH/yEXpREEATTNGpmZoZ57C6Xi4Vo0+m0WY/YBVmWdd+omfymp6fZ2iXVh6lWq0gmk4PsmBdkWX6X3hfM5BiJRCBJEtrtNqvz02w2B5qQ102GgLkcw+Ew40hhWFmWkcvlbhuOwWAQNpuNGSAyPMVicWgczeTn9/vZMhA5q6IoshnngLCvfdHr9TJHgPILbTbbQBPV91tP3W43gJ3IOhU5pCjXoOS43xxpKY+fcBDHQWE/+6Ldbke73WZRK1rSHPCmEdW+ODTnBwBL6PL5fKjX64M2Rvsq5J/fD6Iosjo/5XJ50DOSfTW43D3h8XggyzIqlcpQZfjz9gyEIxnf25EjOTyUZzDoWk3D6IuU40QTkQFj3/si5bs4nU60Wq2B70Ichp5S1WOn08mKcQ4Sw+BIuUt2u52tIozjuEg2Rdl2quxss9lQr9f3Y0ewal8c+LKX1+tlnTGfz7MXQeF0Eu6gHZ9BgXaTtFottpWUDC3vwe+D4zMw+Hw+uFwutFotZLNZxoMiW8COPMeZI62xt1otZDIZFvGgApwk13HmGAgE2HbSTCbTsVTJJxeO6y7LUCgEj8fDtstS7oDH4wEA1hfHlR+wUyTO5/OxCCtx8Xq9AG5xHOfyC9PT0/D7/YxjuVxmmwxoiaTRaIx1he65uTkEAgHGsVAooNVqsSglFcUdV45LS0sIhUKo1+vIZrPI5XKo1+uYmJiAzWZDo9FAuVweaimUgTo/Pp8PH/3oR2Gz2ZDP5/Hmm28ilUp1LP+43W6kUqmxHFC8Xi9+7/d+r4NfIpFArVZDMplEs9mE0+lEOp0eS37ADsc/+IM/gMPhQC6Xw+XLlxGNRlGv15FIJNBoNFiFznHl6PF48PGPfxxOp5Nx3NzcZIU3a7UaRFFEJpMZdlP7hsfjwSc+8Qm43W7kcjksLy9jfX0dlUoFiUSCRXqy2eyQW9ofPB4PPvWpT8HtdiOfz2N5eRmrq6soFotIJpNsK/s4V+Z2u934oz/6I3i9XuTzeVy5cgVvv/02CoUCEokEisUi2/49rvB4PPj0pz8Nn8+HfD6Pa9eu4a233kI2m0U8HmfLzePs3Hk8HvzJn/wJ/H4/isUirl27hmvXriGZTCIajSKbzY61c+fxePCZz3wGTqcTjUYDb7/9Nq5cuYJYLIbt7W2kUinmwA4TA3V+zpw5g1/6pV+CIAiIxWJIJBIolUqIx+NsF8k417p55zvfibNnz8JmsyGZTDJesViMDSLjfu7M6dOnceLECTgcDqTTaUSjUeRyOUSj0dumGvc999yD+fl5uN1u+Hw+rK2tweVyYWtrC/F4fNjNMwUnT56E3+9nEQKn08kqAG9vbw+5dXvHiRMnOhImyRFPJpPY2NgYVrNMxfHjx5FKpdhmg0qlglarhVgsdttUjn/HO96B69evIxQKIZ/PI5lMolKpYGtrC2tra8Nunik4cuQIzp8/zzim02mk02msra3dFhwPHz6M73//+5icnGTFb2Ox2MjxG2jOz+TkJH7jN34DwWAQ29vbeO655xCLxVAsFoey28nstU2qWDk5OYl4PI6XX355qPwwgDyDUCiEJ554AtPT00gmk7h06RJz8kZRhkB/HB966CHMzs4inU7jzTffZAcm3k4cT506hYWFBeRyOVy/fp1FC4ZRL8TsvhgKhXD48GEsLCwgn8/j5s2bbBv/kOqhmN4Xw+EwpqamsLi4iGKxiO3t7Y5lof3GIPQ0HA7D6/ViYWGBnaOXzWaHttw8CI6Tk5MQRRHz8/Oo1WpsWWhYZyKa3RcnJydRr9dZ5W1ygIZYDHY4Cc+HDh1COBxmZ1sNs3DhIBK75ufnEQqFUKlUsLm5OexqvwNJsoxEIggGg6y+zTDDsYNKQAwEAmwNnpbzhoVBcfR4PJiYmOjI+RkWBtEXHQ4H/H4/24035CJwA+mLNpuNRe8GuRvPCAahp3zi/Sjk2A2So8PhgCAIQy/ma3ZfpHxXStwegeW70dvttd/Y7x0mQ8C+7TDRKrA16MJb+7n7wuI4fjtMVO6zi4fWLhSTsW99Ua1qM226GKRztJ96eidwtNlsu2pq0Q6+QTry+9EXaefzMPhBoy/uewU62tev9bdxhyAImtWbx7ngHw+SoVJeZIxuB1AZBjWOt4scaTutFsdxl6coimx2zYPX33HnaLPZ7giOTqdzFwfqo1S3aZz7JVXj5kH9kDjS9vBxlKXNZoPL5epoO+kobX2XJEnV5g4K+17h2W63w2637wpn0oug7eH8jG1cdhEJggCHwwGHw4FisajKjy/uxBfNGxfQ1miHw7HrJGxy/Mi7JyWm87DGBSRHp9O5a+cMb4zkn1d6BsDOqBkXkBxdLteu077JaWi1WpBlmU1W2u32KISwDYFq+rhcro46KTSg8Pz4s7CGvQTRC+hAT7fb3bHcTrbG6XQy2dEAQ1uMx4mj2+1m5z0qOZJ8SZ8lSUKtVhv6kmAvII4ej6ejYCPZU+Jus9ngdrvhcDhQKpXY8R6jDpvNBo/HA4/H05HXRPyIt91uZ6Vj8vn8wJfm98X5IUUNBALsQMgbN250NkSSEAwGceDAAWSzWdjtdnz605/GU089BZvNhp/85CdwOBzsZN8vfvGL2NraGolOTPyCwSB8Ph9sNtuuXV52ux3hcBgHDx5EKpWC1+vFn//5n+Pxxx9HvV7HT37yE9jtdpRKJfzsZz/DP/7jPyIajY4EPwBs5hEMBlm9Dd7BI4chEokwjj6fD5/97Gdx5swZCIKAH//4x5AkCaVSCZcuXcI//MM/IBaLjRRH0kOfzwdBEFAoFHZxnJmZwYEDB5BMJhEOh/H5z38eR44cgcPhwP/93/+xc4cuXbqEv//7v0c8Hh8pjna7vYNjLpdjfyenYW5uDktLS0gkEpiZmcEXv/hFRCIRiKKIc+fOQZIkZLNZXLx4EZ/73OeQSCRGgiPNJIPBIMuN4bfvU7HKpaUlLCwsIBaLYWlpCV/60pcwOTmJVquFc+fOwWazIZvN4pVXXsHf/d3fDboye0+gaE8wGITH44EgCB2V8YWfFx09fPgwZmdnEY1GcfToUXzpS1+Cy+WCJEk4f/48RFFEOp3G+fPn8bd/+7cjVXJEkqQOjgA6dpfSKeFHjhzB1NQUEokETpw4gS984QvsO5cvX4YgCEilUnjppZfwN3/zNyNVdoSiPcFgkBVQTSQS7O+CIMDv9+PYsWMIhUJIJBI4ffo0/uIv/gITExMAdjgCO7saX3rpJfzVX/3VyOzCtdvtjJ/L5QKAjt2zoigiEAjg7rvvht/vRzKZxP3334/PfOYzcLlckGUZq6urAIBYLIYXXngBf/mXf2kav31xfmh2Qo6PMtpBzoPf78cjjzyCXC6HTCaD7e1tZLNZHD58GB/4wAcA7JwPcvLkSbz66qv49re/PXTPl4wpJZPSDFLJT5IkBAIBPPLII8hkMkin07h+/TruvvtuLCws4H3vex+AHX733nsvXnvtNXzrW98aCX7ATgKi1+tljg9/1gyBnKOHH34YqVQKmUwGr776KhYWFrC4uIj3ve997AiMM2fOMBkOO2JCHD0eD3w+H3w+HwDs4khyDAaDeOSRR1hJgxdffBGBQACLi4t44okn2LUPPPAAXn/9dXzzm98cek0L4uj1euHz+ZhjoMUxHA7jkUcewdbWFrLZLJ599lk89dRTWFxcxHve8x527cMPP4w333wT3/jGN4YaFeL50ZZ+isYp9VSSJIRCIdx///1IJBLIZrP47ne/i/e+971YWlrCu9/9bgA7/H7xF38Rly9fxte//vWhR72Io8/nw8TEBCvo2Gg0dsnQbrcjFArhvvvuYxy/9rWv4fHHH8fS0hIefPBBADsc/9//+3+M47Dr5/AcqShns9lU5Uh6eubMGSQSCeRyOfzbv/0bHnvsMRw8eBBnz54FcEuOV69exde+9rWh7ari2w7sFFal4qp0sLeWvTl9+jSSySRyuRy+9rWv4bHHHsOhQ4dw+vRpNp4+9NBDuHLlCr761a8O9FiTbqBl1omJCQSDQTidTtTrdU1+gUAA99xzD+LxOGKxGP7jP/4D73nPe3DkyBEcP34csizj+PHjeOCBB3Dt2jX853/+pyklcga6SErreJOTk1hcXGTenJry0aza6/Vifn4e99xzDyqVCi5evIiNjQ22HuhwOLC5uYlqtTp0x4DWYIkff4aXGsgTnpubw+nTp1EoFHDu3Dlsbm6ye9ntdqysrIwEP+BWNGRychLz8/PsbBYtjsSBZJjNZvHSSy+x+g607HDt2jXU6/WhOz7Abo4kR6WekkNLslpYWMCpU6eQSqXw/PPPY2VlpeP7b7zxBqrV6tAdH6CT48LCApxOJ9rttqqR5B13kmMikcBzzz2Ht956iy1Nt9ttXLp0aSQ4Er9wOIzZ2Vmmp2r82u02Wq0WbDYb5ubmcPfddyMajeLHP/4xrly5wv7eaDRw7ty5kam0y8twenpak6Msy6z9oihidnYWJ06cwPb2Nn74wx/ijTfeYH+vVqt44YUXUK/Xh+74ALcct8nJyY6zA5UcybGt1+uw2WyYmZnBiRMnsLW1hWeffRY/+9nPGL9SqYQXX3wRtVpt6I4PcItjOBzG5OSkJkdZltkypSiKmJmZwfHjxxGNRvHMM8/glVdeYfyy2Sxefvll1Ov1oTo+wC2nJhQKIRAIsERnNRnWajV2usPMzAzuvvtuxGIx/OAHP8CLL76IcrmMQqGAeDyO8+fPs9MgzMBAIz90wB4lo1FxLmWlXBJyLpdDrVZjSU+0xPXKK69gYWEBLpcLV69exVe/+lXEYrFBNt0QiB8NFnr86vU60ul0B79SqYRXX30V58+fZwZ7eXkZ3/3udzvCn8MEOWCtVosdR0KROR5UUj+RSKBarTKOsizjZz/7Gc6dO8eWTZaXl/Hss8+OTHiWOFJV1VqtpsmxUqkgGo2iXC53cHzttddw7tw5BINByLKMy5cv44UXXhiZqtDEsVqtolwuo1wuI5/P76roTE7f5uYmSqUS49hsNvHGG2/g3LlzLOKwvLyMS5cudRx5MiwQv0qlglKphGq1qsmvXC5jfX0dxWKRJXy3Wi1cvnwZ58+fh8PhQLVaxZUrV3D16tWRqZhMOUqFQoHltuTz+Y5lS2CHY7FYxMrKCu677z6WSNtqtXD16lVcuHABAFAul3Ht2jXcvHlzV/7esEBOTTabhSDsHNRaKBRUORYKBbz11ls4deoUS6ZtNBp46623cPHiRVbtmmpajRpHOgC60WiocqTjhK5evYoTJ06w5b96vY58Po9XX30VxWIR2WwWN27cYPV0hg3iRwU56dByZT+ikhtvvvkmjh49ypb+KG3g9ddfx2c/+1mkUimsra2x+5iFgS970UsgB0CrqFqr1WKl6IPBIPsM2DHYn/vc59BoNFCpVJDJZEbmfB5eiUk4Wvzy+TwSiQRCoRBkWYbdbgeww+/zn/98x8A7KvyAnbZTSXKSk1oyIXGMRqOYnJyE/PMT7ek8pf/+7/9GrVZjBm0UOdKSZaFQ0OSYy+Wwvb2NyclJADudlRyjr3/966hWq2g2mygWiyMxmybQmVd0ppeWHJvNJnK5HDY3NxGJRACgIxpGSwfkSIwKR56fKIq6/LLZLDY2NjA1NQUAHVGUp59+miWejkrUh9But5HNZtnES6vAIXFcX1/HzMwMgB2OdMDy9773PVSrVVZnZtQ40jEWlOepx3FtbQ2zs7MAwKIopVKJcaQDNEeNIw3kTqdT87BkcoBWV1cxPz/PoirNZrODI38I6iiAHHBKStfaSEAO0MrKCubn5wGAcSmVSnj66adRr9fhcDjY8qdZ2Jecn3K5jGazyTLz1SD//PDPmzdvot1us6UFfrCp1Wqqa/jDBg0EtHarBhocV1dXmeGy2WysA8diMdRqNRZhGTXQbIMO/1QDcVxZWUG73Ybdbocoimg0GuxsnlqtNrK7v5rNJiqVClueVQPPkZdjo9FAOp1mBRJplj5qoCUrSkDU+k65XMbKygo7n45m1alUCqlUiuVgjBJHfmuw2+1GoVBQrfPDy5DnV6vVkE6nkc1mmZ0Ztd2YtGTi8/ngdrtRLBYNcSR512o1pFIpVgRyFHeciqIIj8eD6elpuN1udgCm0mZocaxWqyzSw8tw1Dj6/X4cOnQIHo8HtVrNEEePx8M+o+OiKHo0ShwpHeT48ePweDy4dOkSEonErrGDJhirq6uo1+vwer3M/tAqAvE1m9/AixxOTk7iySefxKOPPopisYivfOUreP3113Vni5SwKEkSKpUKyuWyKWu1gyjmNDMzgw9+8IN4/PHHUalU8OUvfxkvv/yy7rqrz+eDx+Nh/CqVillRkIEUVltcXMSHP/xhPPHEE6jX6/inf/on/OhHP9INQdLWRpvNxviZMfMaVNGxQ4cO4bd/+7fx5JNPotls4ktf+hK+/e1v6x70SVtwiSMZsL1iUByPHTuGj370o3jiiScgyzL+6Z/+Cf/+7/+OZDKpeQ1tFxdFkcnQjDytQfTFe+65B7//+7+Pxx9/HKIo4l/+5V/whS98QffsMofD0cGPHFcTMJC+ePbsWXz84x/Hgw8+CJvNhm9/+9v467/+a9y8eVPzGso1pGUktUG2HwxKTx966CF88pOfxKlTp2C32/Hcc8/hT//0T3H9+nXNaygflDiaNYkcFMdHH30Un/rUp3Dw4EE4HA68+eab+MM//EO88cYbmtdQPiVxNGvyMYi++NRTT+FTn/oUFhcXIQgCtre38fGPfxyvvPKK5jV8bTWzdPTnUO2LA4/83H///fjIRz6CBx98kG1P/LM/+zPdwwYLhcJIrF0awS/8wi/gQx/6EE6fPg2n0wmPx4ObN2/u2srPo1gsjtWBpw8++CDe//73swNOnU4nrl27hitXrmheQ3klarPSUYMgCHj44Yfx6KOP4sCBA5AkCR/72Mfw6quv6jo/5LiOC8dHHnkE73rXuxAIBCBJEn73d38XL774oq7zU61WR2p5UgvE7/DhwwB2Bvzf+q3fwrPPPqvr/IzacogeSE+npqZQq9Xg9Xrx1FNP4Zvf/Kau89NoNHbtQB1VCIKAhx56CE6nE8ViEX6/Hw8//DDuuusuXeen2Wwyp3XUeYqiiAceeAC1Wg2ZTAZ+vx8nTpzAkSNHdJ0f3qEbZY6iKId3ufEAACAASURBVOLs2bPMrkxMTGBychIHDx7UdX4ogrVf9nSgzo8oiviVX/kVtFot3LhxA0ePHsUTTzyBf/3XfzV00jItsYxqxxVFEe9///vZEsGRI0fwwAMPYHFxUdf5IbjdbrbrYhT5ATuzjSeffBLJZBJXrlzB0aNHcfToUczNzek6P8CteiO0K2NUOYqiiHe/+93Y3NxEuVzG0aNHMTExgenp6a7XCoIAr9c78gXyRFHEmTNncOPGDaTTaRw+fBjNZhPhcLjrtVRvpNFosGXLUYPNZsPJkydx48YNJJNJHDp0CNVqFX6/v+u1giAgEAigXq+jWq2OJD9gh+Ndd92Fa9euYWtrCwcOHGBL0d0gCAJCoRDb8TTKHA8cOIDl5WWsra1hfn4eqVTKUNVfQRAwOTnJVgtGlSPt3Lp8+TLcbjemp6exvb1tKGosiiIikQibXI7CjmAlqH4P8QsGg7h586ahXWg2mw3T09MolUoDP3R5oM5Pu93G9evXsbKywgqPbW9vdyUkiiJCoRDe9a53IZPJIJVK4e233x5kU/tCu93G66+/DofDweoU0QCqB1EUEQ6Hcf/99yOTySCZTBpyloaBVqvFdsBIkgSXy4WNjY1dOxOUoE565swZZLPZkef48ssvs+UPu92OjY2NrjvubDYbIpEI3vnOdyKdTiOdTo80x5deeokt0wmCgI2NDWxtbeleZ7PZMDU1hdOnT7P6VKPIsdls4rnnnoPX64Ug7Jz3tLW1hWg0qnsdbZM+deoU64tUWG3U0Gw28aMf/YjVaGq1WohGox2F49QgSRLbRkx9cZQ5PvPMM6wAZ61WQywW6yjiqAZJktiW/nHg+D//8z/MMedPr9eDJEmYn5/HsWPHRpqjkl+xWEQmk+m6mkMlUt7xjnewcX+Q/Aa+7PX222+zwnirq6tYW1vTDUMDYI4EZYyPctj9xo0bCAQCbB12fX296zZ8m80Gn8/HdkeNMj9ghyPtwCOOekslwG6Oo1BfQw+rq6sIBoOsxML6+npXYySKIrxeL9uqOg4cQ6EQRFFEqVTCxsZGV4NEpekpaX3YNUT0cPPmTbYDr1gssu36eqAjA2q1GrLZ7MjLcG1tDZOTkxAEAfl8HltbW13tBx2DUa1WkclkRp7j+vo648hvBtGDKIps11Q6nR5pPQWAra0thEIhxjEej3eN/NDEjHZFm1XvZhBQ8lNLdlaCdrIVCgUkk8mBy3Dgzs/m5iZ8Ph+rA7O9vd21vgvtBtra2kImkxmZOhtq2NraYu2rVqusKrUeqIjaxsYGstnsyOc3bW9vs10FlUoFsVisq0xo/XZzc9OQ1z9sbG1toVQqodVqsZ0U3drcarVQr9exubmJbDY78nlc8Xic7ZoolUqGDAwt51G0b5Q5Kvml0+muA32z2US1WsXm5ubI8wPQsQOmVCoZKvtBuxjJ2R3lQRPYOcaCOBWLRVb/TQ/NZhPlcnlsOPJ9j87p6ub80PZ2Khkzyg4eyZD0tFgsGuZHZ7MNmt/Ad3spD/LsBW6329RZyiCy2veSnEWzMRMxkB0me4HT6TS1Dsygdl/sBXcCR4fDYWpi8CD64l5gNj+MYF+k+ilmYRT11OLYO0atL1IdIxOh2hcHerwFcCsrvR8HYdSXg4C9Zd2PSnG4QWJcdtLsBZYcxx+3Oz8AI1MAb5CwOI4/9uvIo4E7P3vBqO6cMQu3Oz/gzuBowYIFCxbGCyPt/FiwYMGCBQsWLJgNy/mxYMGCBQsWLNxR6HW3VxKAdinR0cZBA98ZZ37A7c/RCD/A4jjquN31FLj9OVp6egu3O8dx5gdocOxpt5cFCxYsWLBgwcK4w1r2smDBggULFizcUbCcHwsWLFiwYMHCHQXL+bFgwYIFCxYs3FGwnB8LFixYsGDBwh0Fy/mxYMGCBQsWLNxRsJwfCxYsWLBgwcIdBcv5sWDBggULFizcUbCcHwsWLFiwYMHCHQXL+bFgwYIFCxYs3FGwnB8LFixYsGDBwh2Fns72EkVRttlsAAA6FkMQhF0/C4LQ9TtGvs9/3us9lT+32220223BKL9xRLPZTMqyPKX3nXHm2Gq1usoQsDiOOoxwHGd+gNUXCRbH0cad3Bd7dX4QDAb7agDvwAwCavfnnaBsNtv1HjabDaFQqONaAu9IGYHy+0qHTgtqTp/R5yaTya6Hz9lsNoTD4Z649INe35cRZDIZQ98jjv1g0Hra7bnpdNrQ9202GyYnJwcux0HACMdx5gcA8XjcUF8cV4696imw26b2ikHYFD30w5GH3gRe7Weta3v9vNvP/LiYSqUM8YtEImOppwAQi8VU+2Kvp7oz9PMi9K7hBarlyHRTfq2/ybJsqL163+uVr/L79LvRdvT7XCPo9g61nEgAmnLp9Rl0r25RPa3nGkW/eqqla3pt26us9qpjyr+ZIUcjzx+UHM3kZyaUz+b5Ktth5F5Gn6P8/jA59nqvvT6/3/vsF4yMG0Z+Vl6r1ofMelY/eqrXt7U+1+oXavqldx+9z/vBnnN+eAL9/qxUciNGcy/P1YJWe8x4Zq/t6rUNZkHtnoIg6LZHS0m17snfT0uu9B2zOJohi25tU/vcLN3sFVp9aC9yVPvbfstRr139PKufdmnJfS8OgtG2Dep9dnu2kuNe72fG9wfdh3rBXvt9r2PCIO5vBGo8u9kA5Xe7jTFatkPvuVp+hR5X0xKe9Tp/t8/Vfu9mdHt5rtqz9O49CM+617b02ob9QC+OWr/vvpfPjaJXfelFT818rtnYi2NN6Kdtg5LjIGDmux9FfmajX2dx0BHsUYsKDWKsUPtZaVfMjvjoQc+50nKIzH6W8ju9PK/nZS8jyzm9Lh1pXcsT2S+hmhWi1ft8r8/r5f693o/QLRJg1rIU/2wtefdz/72Gm3u5p1nPNQtmyrEXHVPru2briZHn9KJHvfYhI0tRZtuhUeTYC/Teh96zzGzDXp5r9H5mod8+N0i7wi/B6dmUXmyN1mdafzcamDB6/75zfqgxWg9Q80iNfL8Xo9nL/QeJfmbYRjz3fp/bL7qFLbX+Zsb71nv2Xu+v1Itu73+/9NQMbmroVY7dDFq/z+31+kE8p9u77rV9Rp5tBudR52gW+uXZy/2NvhMzORpxEIy0rZt92q9xURnJMUtG3b5vtG/148j27PwoO5ryZSgNKf+z1svvRRH1vMd+nYj9hLJt+xkRGBQGZRj3cn8jnUTLyHTTU617qEFP1oN+b0bB89qLMz7qIH7Abjt1u+BO4Ajs7DxW61t69nWQ0HJA9fqT2vf4/9XsUzfbo3b/UZI/32YtGZr9HC30nPOjfKH8/2ov2wgh/lq9a4wOGP2+RLNDl2r37MUBHBS6hR+NdCCSmVJ2ZrZJ+ax+7qdso97PZrTV6PfMkHcvIeNuclT+rHd/vbabKUez9EpLF7SwV36jzlE5sPb7rP2EkmO73WY/K0H8+H/70T69/7Xayv9NyatX+6TXX/f6HrqNGfzv3Wwvz9WIrmo9d699se/Ij5onyj+41xmkkdl0t5CbGnrpqMpZk9qzjd5Pq61a9zFyX7OWS7pF2tR+3o8wcT9/63aNUWfZiHyNOt/KNvD9oNt3ekEvIeNu74M3KMo+rvVdI8/kv2t2SLwbtAYevaUEo21S64tmRiiV7dSCkYGg3/doFke6TstR6cax3W6rfs+IbvUy7phhU/sdF7U49qqfWu1S3m+v99G6t/J/anur1YIgCGi32yzio4w06z1L7zlqvIy8rz3l/ADd1x2VPxOUQtV7GWrf03u23nONQOs6pfLqXc8LmL9WjbdZ7RskuhlX8rIFQRhoOLMf6BluPZnychRFkaqEM470M/8M5X17ad9+oFtfpNkYcex2ba/PMgPd+qCWLMkI0z0kSdpTXzSDX79Ogdrf+KgB2RpJkjr0VO96o8/pFd046nFVOuN8tEAURV2Oowg9h5z+l2WZ6anNZoMsy7Db7V2DCWp6vBfHaa/QmlCRHW02m6DK0ZKk7ooYDQr0o6d7TnhWE0g3ZVb7TBRFtFotZnCVnUJNaZTPNAvK2YCRWbOSMxki/u/k9Srv20/7BmGUeh0E1GbPSodAzega4W0WR7qX3n31ohvKGRk5PVozbi0niHcOzUa/cuTbRoOl2gSkn3abMaPWmiDx99d7/2RXAHQ4ciRD/ntaEy8lem2DEXSzN93uT44q8Wq1WmzQ5O9jtE8ZaUOv6Maxm86Ro2O329FqtXZx5CebWn1zv6Blb7pNlHg58hxJV7vdS88hUl6/F25aY263CTJxkyQJzWaTjfdKx9VMPdW7z54jP70aRX42TYLlFZhehpoyaz1TaST3G7wAeAeO+PGRAvqfV2qtew5y5qz3WbfBmnfiaNbFn/1Cnddms7EZjN6z1TAsw8U/XzmzVMqT77w81AzEoJwe5TN7kSN9brPZWBTE5XKhXq+j2WxCEAQ2O9PTVTUMwnHVe5be92w2GxwOBwDA6/WiVCqh0WgAQAc/o7NMeqZW+wYha6WhV86qJUmC2+2GJElwOp3I5XJoNBosisA77d3aZ4Sj2VCz8fyECgBcLhd8Ph8kSYIkSchms0xXieOw7YYSRgZj4mqz2TAxMQG/3w+XywVZlpHJZFCtVlGtVjs4agUDlM9QTh76hZ796PYZQRRFhMNhBINBeDweNBoNpNNplEolVCqVnmWopaf873q625Pzozc76uaB8qCBhDoqdcpCodDhIdK99LxcPXJ7jQz1O+Ntt9uw2+1wOp3weDx0eBxyuRzq9Trjp+fg6L3DQXRwNUdTz7C32224XC64XC54PB7Isoxms4l0Oo1arcacoUE6cXowoqfd0G634fP54Ha74fF4AAC1Wo1xFEURNpttl55pzeyUP3dray9QDoZG5SjLMnw+HzweD9xuNwRBQKVSQTKZRLVaZf1wWHIkqPFTcyyVg2a73UY4HGYcRVFEoVBAPB5HtVrVjHb12i4zYJQjgTjKsoxQKIRgMAiv14tWqwW73Y5EIoFqtcq+268Ts18clVB+HolEEA6H4fF4UKvVIEkSEokEGo3GnmTIt2tYoPZPTU0hEonA7XajXC5DEAQmRz4qpIVB8jBiT/VsjSiKmJubQyQSgcvlQqFQYMtfxM8MGRrV856cH97j1DKw3RpOAnQ6nQiHw3C5XGi32+xFUGelZ/XiaKl5gPzst1f0ElGi75EA3W43pqam4HK5YLPZkMvl0G63USwWO/j1ArMUW20WqRU+VBvISYYejwczMzPwer0QRRHZbBayLLMDAdVmIHohUzNnmEb0VO99Ekev19vBMZ1Oo9VqIZ1O74oW6M3yBu3M9itHm83GOHo8HgjCzuGqjUYDzWZTc1fNfkYF6Dl6zoBa+2w2W4eeejweFrmrVCpoNBodOSRmOXj93qNfjiTDSCQCn8+HarWKZrOJYrGIRqOxa7Ac5kBvxKlT6hYtlbhcLubgFYtFVCoVOJ1O1Go1llDLP8MIzH4X/U5oKQJL0S2fz8ecWLvdzqLovBOrFQFSe+5eoz/d7Cn/HbVn8BFm4lgulyFJEuun/PKX8jlaUSZlG7TarQbTDjZVvhCth9KgcvLkSRw7dgzhcBiJRAKrq6u4efMmZFlmIWn+fvxztO6vNaPey6xH7dl631Pym5ycRCwWw40bN9BoNNBoNFCv14c+01BzFPXAv0tS1lOnTuGuu+5COBzGxsYGVldXUSgU4HQ6GUflc4zO8sxCv3pqt9shCAKOHz+OQ4cOIRwOY21tjc1QlByVuqH2u9noZ/LBy5GWgu666y4sLi4iEAhgdXUV2WwW7XYbDoeDLZ2o3WfQUNMdLX48L56fIAg4fPgwpqenEQwGWT+s1+uw2+0d/MySUS/vRqkn3Tgqr3M6nQCApaUlRCIRTExM4Nq1a6hUKqjX6ywCa1Tv9wPdbKryd5fLBUEQMDc3h1AoBL/fj2g0imKxyKIj/HLJMLlpTdb1IlOCIMDlcgEAi2y53W5ks1lkMpmOFRG1SIuRPq/Vtl6hZ0/Vnkt/dzqdEAQBExMTbPyIx+PIZDLI5XJsMqYlQ72IklY7u437pp3txT+Ub5iaYjudThw8eBAzMzMIhUIs01sQhI5ZinLW2cvsrN9oT7fruwmF+B04cICFL5vNJvvHe++95lFoYdAhbbUImMPhwNzcHILBIGw2G6rVasdsmg9hDtvRU4OenlK77XY7pqenMTExwaJ25XIZ9Xq9Q47KDsv/342/1iyqXz56UMqx3W6zCCytwRcKBZTLZSZHPaO9XzD6HtX42e12lkNRrVaRz+dRLpdZVIv0dJjg+RnRFd6wU3TA4/HA4XCgXC6jUCigVCqx5XagM/9rGLJUcuTboTfDJ46Ut0XLlqVSCeVymSV4601MRwHK9660O5R7Rs5qPp9HPp9HpVJhS0NKuzLKUAYeSIaEXC7XVYZGx3mt53eDqc6PmnCUs2Ly+mjWKUkSqtVqx3coHK/m6fb6Qnr1epXRCj2DIctyx/Zn4mez2WC329FutyFJEhtM6PvkxfP332vHHWRkQcmf5ENZ+4IgdITYeceg2/s3avjN5GRET/l8nnq9DgAsUZY6K/FTbgnvpS2EQRpuLTnyz6Roa6VSYYZWjaNWO/dbjspn820AsEv3ms0mALAlIQBsokV9sdskYtjOA98GAk2iWq0Wi0TWajX2Hd754XnqPcdsjr3YXgC7bCqh1WoxO1qv11W39hvtR2bLUG2c0hs3tPpis9lk3PjJMoHPFzXKYy8OghFoObRa/GiZkniq8dNqXzc704vu7nm3lxZkuXObKYXRA4EAbDYbotEoUqkUJEnC5cuXkc/nUSgU0Gg0mOOgVGijERM1xTAqaP7abrMw5a41u92OiYkJSJKEzc1NRKNRCIKA5eVlFItFFItF9j3l+qZWu41gUMaYlyGtOTscDgSDQcZxc3MTsizj2rVrLDLCOxFGQvr7OWNTtkGLYyAQgCRJiEaj2NzcRLvdxvXr11GpVFCpVDrarTZbpc+7tYW+N8iZqxGOW1tbaDabePvtt1Gr1VgyMH8Po0syZsJI1InnRw45RXwkSUI8HkcsFkOz2cTa2hpqtRpzaPl78P9rtWUQ6Jcj5fpIkoRUKsWSf7e2thhH3nEdR478LrZ0Oo1kMolGo4FoNMrSCNSir0baYyZ6cbr4vkgcXS4XHA4Hstks45hIJNBsNjsSupVVkY1ESrTe/aAmzEoZUpoA7dBLp9Oo1+vIZDKMH6+jerLs9p57keuenB81o89/RpEPCmkBOzMvp9OJzc1N9ju9DH4ruNL76xbNUZLmX+AgOjQpIr9Nlo8SbG9vswTubDbLFFhZ/FDZXr1lhl4/N4sjbfWmz5Qcy+Uycrmc6lIQj2E5Pr3oKf2dOEajUTSbTVQqFRSLxY6ZGC8zPW77FeHRglKOJCPiSEa2XC6zLadaDtygdE0LRp7F8+N1kAxvOp1Gs9lEqVRiW4a17r/f/NTaoAY1jqIodnBstVoolUq7EoCHEalStn2vciTnh+wNJeQThs2RRzfnstVqdfRFvoBqJpNhHJWRn24TciOTEzNsUTd7SkuxSj2VJAm5XI7ZU54fXc8HNgbdF0051V35GTWYlg34YlQAmKEVhJ1dF+VymXnBdI0ybK32v5G29RrG7Tag8VCGIPnQHvGjAYXnp2yj8tm9KvggFYSWH5V1l0iBSYbVapUZLX6be7d2DjLiofYstd+VciTHu9VqMY7VapVt4VeuXwPqxqXXkPQgQ/HEkZ91AjuOH3GkaAE59EaW83id6CbHQchaKUNlUcNWq8V2kFKicy/89tIms++lJkN697VaDUAnR633PYoOLIH4kX5SGoEsy2zJSylHYG+Tiv18HxQZIdkBt2wsgA6OFBFRk6NaBLuftuwFWvaU+FAuL6VIAGCpEd34GW2v1nXdItXAAJa9eA+OlnhowGi322xdmmZg1DgaUPjQntb9ldBaWulHuEZnS2Q8qXMSX1pvp9wCXgi8seIFbkSJjSwfmQ1ZltlyJR/9aTQaLMGZEkdJbsoQezeodeperu/lGWohYn72xZdYp9oTlMTN63U3jr20fZCy5GdixNHhcHREgajAGOUyAbfeCbB7pqgWKem1PWaCZtLEj3Y/0dJyrVZDrVYzxM/MNpl9L56jy+WCKIpsdt1oNNiMmqJaygrWe3n+fjkGJBebzQaPx8OSnKkvFotFVtSQvk9QLgP1an/M4tktMk//S5LElmVpeaharaJUKqFer7McPGVek9pz9tveaNlTfuy22+0IBAKw2+2QJAmNRgOVSoXlTSr5GeHBy7YbD7U2KrHnre78rI//jBR4aWmJddKtrS0A2DWgqCmHllepF/lQ/k3Zpl65aYH4ud1uLC0tweFwQJZlRKNRAGCdlOfHt0ErAtKtTfvtAJGhXVpaYoNKIpGALO8UpKzVah3LKNQuvs1G2mpEmfuFlp7yUR6n04nFxUV4vV4IgsCWEHK5HKrV6i6Oe21Pt1mc2SCnhzhS3alms4lsNst2XFBb+P+VbdeC1vvdD/D8fD4f7HY7CoUCms0mMpkMW67k27Wf7TMD5OwsLi5iYmICTqeTbTJIp9PI5/Mdky3AHI77GSEijgsLCwgGg3C73WwZL5VKMZ0dN468DRKEnWrcVOzP6/Uin8+jWq0ikUh0bDiga/gx0Ug0fRjg5WGz2TA/P49IJAK/39+xw5LfNLIX2Zlha3p2frQcD2X42eVywe/3Y2lpCeFwmJHN5XIs8akf71Vv6URpgPtZgjDi+BA/r9eLxcVFRCIRJvRsNsvW4PkIj/LeRgWm5bwNUunpXVKF6oWFBUQiEUiShLfeeguZTAaCICCVShmKtKnJQStsqzW76RXd9JSeQxwXFxcxNTUFh8OBGzduIJPJQJZlljSqdhaUWcZokJEfQdg5D8ntdmNubg6zs7NwuVxYW1tjOkrJzUaPeNBrs96kyGxQBI6qqc/NzWF+fh5utxsbGxssR6RUKnUc72AE3Rx35TLwoMAnj7pcLszOzmJhYQE+nw9bW1tIJpNoNpsoFAoswj6uHKnI38zMDHPytre3WSJ3Pp/fc4VjNRtsRhSw27W8rjocDszMzGBpaQnBYBCbm5vM8cnn87o2cL8nFmrP1ZrAAbeOkolEIjhw4ADC4TA2NzcRj8fh9XqRyWQ6+qIRB2YvEXa97/Ts/HRTFFneWSrx+/1sZ9DExARLgqJ8GPqu0uh2m3Gq/V3pXPSrxEaUip5D/MLhMCYmJlioj7xb/p5Ko9vLwKC1xDJIg0T3pZ1rVDpflneW7hqNBqtUTW0xWrNIGb1Se7YZnVupp2qOsSAIuzjSdykEzbepVwfWKI9BR72osBidq0PGh4wtfVctjK3XZiPPHuSgSff2+/0QRRGBQAA+nw+iKLLBkgqo7YWfnk3ar6gB8fL5fKwgXrFYRC6XY0myytn0uHH0+/0AALfbzWxpNptFKpViSetanIza1P2OnCjths/nY+kgsiyjVCohmUwikUgglUp15dit7crVDrP46Tk+/Od01BHluOZyOcRiMU1+Rp3VfscEvev6WvbSazD9rVgswuVyYWNjA9FoFPV6Hclkkm2FVmZ1q0GZDEU/K+v/aIXQugmsX8iyjGKxCI/Hw7z2Wq2GRCKBUqnUoez8cgnfDjUeRvmZ2WH13g3JcGtrC6lUCvV6HbFYDPl8vkOGfNSNftfiQj8rD8vkDa1Z/NSeq/y/VCrB5XJhe3ubHesQjUZRKBR2FYlTDhY8R7XESyVHLQfRjNm1nhxLpRKcTie2trbYIBKPx5HNZnftKOF58fdTclRyp+9qHYexV2hF2QRBYPy2t7fZ9tlEIoFcLqd59AHPj37W4qfsi4Pg141juVyG3W5HNBplcksmk8jn8x1FN/vluB8y7Ma7VCrBbrcjFoshm82i1WohlUohn8+zRFniobQ3wK16TdR+XnZafXE/OPIypSMdotEoW8ajKs7EUW2nM3Ghv/G5inocBxEl0nOAqAI+ybDdbiObzbIlWj6FgG+7MgrH538pd8Xx/PaCPS97KUHEGo0G255I1xWLRWZs+RO/tWYYagqqJKzs8Frt3WuITAkqRkVLPwDYScq0q4sKAKoNwjwXfmaqdAjUdqSY5dHz70ZNmakqdTqdZu2gXAqtU9u1nBg9p0atmKVWW41C6/vKgUGNIxlbkqOWnio56SXvGdXTXjny99OToyRJyGQyLOmeT5Dlzw3q1hf5wZOPaGrpqhk8lXy1+GWzWcaPcmC0+PFt4e+r5wQMmp8aR/qZtrRns1mWmE99kddTNY7K6AM/wCpt0DBkSL+TrCg1gpcjcVTbCq3FUWvCM6hdfkZAHOlsMkEQ2ESL/qbmtPDjA/++lLLoxrFf2ak9R02OpIPFYpEtp1POnfLUdj07SPei39VWT/TGQSM8B7LsBdyqpEqRHooUaM2klLU36Dn84EOhUDUDoUdWOYvtxs0IqtUqy2AnfsrBiHdmlAOoHj+1BFu1e/P36wfd+JIMKekXUN8pQ4pJik+/0/q8ngyNRrR6ncFovR9lx6vVaowjz0epU7wctTjSd2j3hlJWWhz7dWaVsyUt0A4ZNY58m5QOjjLqpTYL442tkYlGvzNRvetoxqysbKwlQzWnRpblDn78Ds794Kd1LfUTsgvb29uq7dCSobKf8SUOlByVDsSwOMZisa4caTzhr1eWOaD+qSZHMyaPRqC0AaSntHGE+CjfCy9H/h58HTySlzIPSs3x2eu4wV+nJX96r41GA6lUSrO/8O+flyFfUoWXIfHkZdiNgxEdHViFZ/Lc+MQmoNOgKv+32WxsMAJubVelKAud4NtoNNjaIp1EzQuXf14/g6YRxaABkArFqfFReqp0BhYNoA6HA61Wi5X8djgcqNfrqNfr8Hg8AMD4Aeozff7+ZoI3HHw1XGXkhH/XpLy0C4x24ZBjVK1W4XA4WD0Z4sZz3G+Q4g/hngAAHNBJREFUHNUO01Xr8MSRascIwq3DJSnfizjWajXGzev17rqX8nmDAK8zPEel7JQyJV0Fds5xc7lcTI6FQkGT437LsR9+wI4cK5UK22FEeTSyLCOXy40MPyVHvrAfsNuxVjo6VGPM6XQyGZbLZWQyGWZviCOwk5PSrzO+V/TLkWr+uFwulktTLpeRTqc7OLrdbgiC0MHRLK7d7qPsXzxHtWt5jrRVnPSU6qnxHElXeY7KZSUjzrsRnt1AdoJ/tvI6cuQEQWDjPG1coLaWSiWkUinY7XbU63VUq9UOPTWyYqAH05a9tJwPrZmuUqElSWK7qMhx8nq9cLvdLMLicrlQq9VQLBY7wrz0fbVO0is3rUiBGgctUJv40DLxc7vdzKmgAxfr9TrLW6jX6x07N/T48c/rRQmMOnfK2QL/v5Ij77lTzRxZlhEIBDoOlSRjxHOkeyorRO9loDFiiNQ+U+PKz7JoWcXj8TCnZ3JyEna7HdVqtaOz5vP5jsrRoijqGnYzOfL315MjgS/yyBe2JGMaiUTYwZnxeJwlFZMc+UmNWXIcJD/6R8dDTE1NQZIklEolbG5uskGVbA0/6+SPGxiknhrhqPwevzuMisv5/X7Y7XZEIhGI4s7BoPR32rzA7xSjSMIgOBqxVUY40qBJJUdo40I4HIYgCIyTFkf6XG1g3itHtc+13qMWR+LpcDjYUS10rFAgEEC73WZ9j7jo2VS1dvbC26ieqvHmP+O5ET+n0wm32834eb1etNtttruP7GY+n+/KrxdOe1720hKqVvSFBkYKv5KXSGHY6elpzMzMIBwO4/jx46xYkt/vh8fjQSAQwKVLl3Dx4kUsLy+zs6VosFU+ux9u3T5T8lfjxyetUciO+M3NzSEcDuMd73gHU+yJiQm4XC5MTEzgwoULuHDhAq5evcqOkOArCu814qN2jZF3xoeXlZW4yaGTJAnT09Nse/zRo0dhs9ng9/vh8/ngdDrh9/vxyiuvMI6xWIxxVOs0/UBLT7X+puTIG1n6x1cspS3H09PTOHLkCERRZDrqcDjg8/lw/vx5XLx4kXGk5TEzZmDKdivbb/QaJUe73c64zs3N4eDBg5iensahQ4cgiiK8Xi+cTifjyMsxHo93lO7XauN+8uO5CcJOtJUOHl5cXGT8Dhw4wAZSu90Ou93O+F28eBFXrlxBPB5n+UVm8NsrR6Wu0t/oHCW73Y6DBw/i8OHDrE8KgsAiJFoczZSh2vVG7qfkCKCDI3FwOp04cuQIDh06hJmZGczMzAAA01FJknTlqDbhNRNq4yJ9zi/BKZd7iCNF7I4dO4ZDhw5hamoKk5OTAHYisuQceb1enD9/HhcuXMDy8jIrgcDLkY909SpTNXno3Yd3nHk95b/vdrvhcrngdrtx4sQJHDp0CJFIhO0sJvkSv5dffpmN+6lUapee9grTd3sRlC+Z37nFvwSqcNlut+F0OnHw4EH2Eu666y6EQiE4nU4cO3YMMzMzEASB7Rzb3t7G2tpax+DLt1HL4zUDgtCZfKYMwSlnmQCYMSJ+hw4dQjAYhNPpxIkTJzA9PQ1B2DkuIpFIIBqNYn19nXm/evUtzODTjSMN3ErHjzx4ALtkuLS0hEAgAKfTibvvvhvT09MAdnY9JBIJxGIxbG5uqspwr0a3Hz1VntWmdAxoCWFpaQmHDh3C5OQkZmdnWYTr5MmTmJqaArCzy4oO1dza2mL32esuhW581D5T6qraLjbiSDVllpaWcODAAUxOTmJychITExNwu927OCYSCcTjcWxtbXVEWMxyZPvlR4MJz4+cAo/Hg7m5OSwsLCAcDiMQCCAQCMDtduPuu+9m/Hg9VfIbFPrhyIOcO4/Hg9nZWczMzGBycpJNstQ4JpNJVRnuJ9Q48pEa5QSGDj2lCXMwGITX64Xf79fUU6UczeqLfNuMcuSjbMoJLoFkGQwGEQqFWKkDtb6Yz+cRi8UQj8cRjUY7ItYEMyZcyiU8pa7wE9lu/Ggi7fP52MSKDgj3eDw4deoUIpEIACCbzSIWi7F/e7U1pu/24r+ndAb4GQr/OW9wg8EgXC4XfD4fK93ucrnQaDSQzWbh8XjYIZp03pLL5dLdRmw2N/67SkeAP7OFPicHz+12w+fzsZ9pLZqWSTKZDLxeL9vmSfk/Rvj12nYjM04+6Y4UTRkpII7USV0uF1uXtdvtqFQqbLZNh9jyMqT8H8pHUDMi/XTYfvSUlyHPE7iV60McSSY2mw2lUontTimXy0ilUoyjLMsd6/FKOWpxNtp2LTkSH2B3gi+vp/wgQL9TX3Q6nczh5ouvlUolCIKg2hfVOCrbZTb0+PEOJ/0sSRKbMVMyfy6XY/eiGlZut5v1RTUZakUS94ujsi9SPyUZ8v2OtlS32220Wi0Ui0XI8k7OJHHUkmG3NIZBclTKkXYMEUeKxpZKJVZEjwZbniN9rtcXeb69wug1RjkqHXZa5nK73YxLs9lU5VitVlGv1xlHpX3px97o2Sk1/eB1kZ6p5MdPMqlQp9PpZLs0Kb+w1Woxfu12W1WG/Th5fS17KR+iFmHhBUhJr0SYT4Sm0NzExAR7Ae12G+l0mhWEqlarLBm60WhgdXUVyWQSyWQSsryzhd7tdmseqNkLN6NQ40dhVP7ANtrB4PP52Lpss9lkA6Qsy8xBoGTvlZUVXX58W/vxeLW+q/ycDI0gCIwjJaeRDKlztVotTExMoF6vs/oqqVQKTqeTyZDymxqNBlZWVpBKpdiuAOJo1qyzVz0ljgBYIjrlBFDSPS3r1Wo15PN51Go1JJNJJvdqtcqcWpJjOp1GKpUCgA6OenLohaORz3k5kpNJZ3xRMjvJkuqQVKtVZLNZVCoVxGIxttZeq9Xg9XpZ/ZwbN24gm82ykhZU/2o/owc8P7fbDVmWmeNGB37yuipJEiqVCtLpNIrFIqLRKONHmynq9Tra7fZI8OM5Uh4WADZQ0C43sjd0yDBFrQqFAot2UF/0+XyM48rKiiZHwNzI3V45ArfO3bPb7SgWi4jFYsjlctjY2GCRBtJT4ri6uqrLkXj2mzLRD0fKbXG5XIwjv3O2Vqsx20J1gYgjbQLyeDysfg7JkarTFwqFjiT9vfBS3oOP/ij5kUPn8/kgyzKzifx5nsSPSsJsbW0hm83ukiHfF0mGmUwGwN711JTdXmpeFg2ItDbrdrsZedqaKssyM0qUzLSxscEU3+fzoVwud8yg6SXS2WC0rkvtUA5wZs5UlPcnfg6HgwmJ6sOQEtOgSAPn1tYWS5T1eDyoVqu7+JGTR/yUTg/dl4/69KvYWjMCUuxms9mRx0LRKHIMqB18gm80GoXdbmfLYdVqFYVCYRdH2g5O+qHWjn4iW0a48hxJjk6nEz6fjxlXOp9NjeP29nbHIZrEEUBXjlqO2F44GpEjLQeQI04zfbqeOBaLRUiShFgsxhxCiiDkcjmIosj6IRXYo90ag5KjEX4UKp+YmEC1WmX/+O3CxK9arSIWi8FutwPYkWG5XGZRn0ajweRI/dkov0H2Ra/XC5/PxziWy2VWwkDJUZIkxONxJkNy+pQcyZEdtAyVPLU4+nw++Hw+BIPBDo58pICSmIkj9UWeI01EqS8qOfLOjppN7QV60RReJ3iOlMRMm3gqlUpH0nmj0UC5XEa9XkcikYDT6WTLRKSrPEd6R+12m40bSo5KGfQKvXsQR+qLExMTCIfDzG7SuWXArc0f5XIZNpsNiUSCTfBplyIV01XKsJcxQ68vmna2l/IFAGCh11AoBLfbjWKxyI62oJfAz1gAMEWnGXatVkOhUGAzVDqFemJigh3ISOuLSqLKsH8v3LopMnm3dISHz+dDsVjsUEj+PlSkrFQqoVwud2xP5KtflkolVKtV+P1+VCoVVX4k6H5rVmiFLpXvkGbJdrsd4XAYfr8fpVIJ+XyeRQ1448UX8OITv8lQEUeSMc+RQrtqMlQapn44qukpfUYcI5EIJiYm2OBAsyseyiJltNxFAw4NlHQgI3GUZVlXTwcpRyXHQCDAnFFy9HiQrvJyBHYGVTqgUMmRBmCaqWqF2vciw278HA4HpqenEQwG2bEP5OTx9yJ7QDuASIZ0cjjZokHz64ej3W7H1NQUgsEgi5I3m02mYwTSU7InNBCSvdlvjmqDr5a9cTgcmJ2dRTgcZhND0j3+OuJYLpdZFEGNIx00reTI2xstJ6EXjsqftfq6kiONGbQLlr+WnBmSk5auEkcaF3l7wz+bt9e95Dtp6SndS43f/Pw8pqammJ7S8isvQxrfyDEiW0NjBq+nJEM6RUFLT/XazGPPu7209tpT6GpiYgLz8/OYnZ1lCbw04FEIkAZSqhED3EoUppcmy7eKJFKODJFXq2yqTHTshZvyZ/4F844H8VtaWsLs7CxisRjW1tYQj8c7+AHo8M7JceLD0MRPEHZyoKhCNt1H2SZ+i7la+NEoTy0QR/LiFxcXMTc3h2g0ypYnaSZFSqzGkZYTlDM3LY70bJ4j/dwPR6N6GggEsLi4iIWFBWxtbaFer7Py7Eo50oyZzz3gi0HSc6igI+kpb2zo5370VMlRD2QEm80mgsEglpaWsLCwgI2NDVbvhZcjcMvpI468rvKzU2BniZDfVaLVF9V+HhS/AwcO4ObNmyz/qhs/yh2h2il8gUqen56t6YdfLxxJx8LhMA4dOoQDBw5gdXUVxWJR9cwkXk/JwaMlaz6qqcZRmZdiFkc9rrwcw+EwDh8+jMOHD2NlZQXFYhGJRILJkU8pIFlSziG/TEbLJYIgaHIEOvuiss29cOSdAbVIBHCrZk8kEsGRI0dw1113MTnSrldejhR5JEeP8vHI3pLN0eOo1bZ+7anWdRSRqdfrbKfvsWPHsLq6yo5h4cdxChLw1a1pdxvJlnwFwLie8mO0HnpyftScAy3vikgtLi7iyJEjmJ2dZXVsUqkUOwOET6QlhebX8PicDOoc5NGSweLJKp2UfhwgvSgB/U78FhYWcPDgQczPz8PhcLCdWkp+dA9qvxa/RqPB+PGJcGbx64U3LeEtLCzgwIEDmJ+fZxGBeDzOBg9aOgDAZqJ82/lkWooEkbyVHOl7fKLeXrh101PqZPPz88wxoATYWCzGZs28nio50kyG11u+HH8verpXaMmx1Wphfn4ei4uLmJ+fR6PRQCaTQTweZxyVcqTCajxvnmM3OQ6Coxo/kiHt3pqdnWU7mLa3t7vyU05qiIuSn1Inh9EXZ2dn2T/aTbi9vc2cHOLI6ynpG3Ewg+NeocdxZmYGs7OzLGoQCoWwubnJJshkP5V9EbhVMZ9sDdlUPY7K8hN7tTfK6Bh/P+IYiUQwPT2NcDiMQqHAatzwjhx/P+JIDhAdncTX+SGOZG/4PquUo7JdvfDTAzlrkUiERdLpcHNyjJQy5PnRxBhAB18+zUJLTwlGZdiTC09C5YWr9RBBEFiBrdnZWRw9ehTT09OYnJxEIBDo6FhKZaGXQoMOKSeFAJUeH+8FaiWwdQuB8Ry1+Ci9X0rUnpmZweHDhxGJRNi2Wf67WhEHnh/NmokLfw3/mVYEwyi/bt9VypNkODU1hYMHD2JycpKdZE/g85v0ONLMWU2PlDMVrfb0wtGInvIcaWt+KBRCKBSC3+/fJUelUSMDxDsI3fRUSyd75dqLHG22nYJ+4XCYlR+g5Vr6Li9HXt/JAFF9Ll5Xu8lxL4ZWD0pjZ7PZ4PF4EAwGsbCwgImJCQQCAbbzUIsfyY04avHjn9WNXy990ShHAIxjIBDA3NwcvF4v2/LcjSMvQ4p27oWjWVDj6HK54PV6MTMzA5fLxepnUTv4qA3PkfhpyZEHz5F+3wtHpfOkdR/SVdoxGolEWHIwX2VcOQbw74fnqSZH3r7w99mLXqpdq8aR+FEubjAYBABWFJbap6anfBSWZGhET+keauOHHue+l734m6sZcr/fj3K5jHK5zJKcDx48yGoRJJNJttTBe3E8KQpzAmChLpvNxpIseQ+RyGvNUowqtRFHSRAExq9UKrHEtPn5eWQyGUSjUbb0pcaPX+ahNtMMlLx/WgsFOkN55PH36tQp29+Nu5IjraPPzMwglUohHA4jkUjs4giAOTkkE/rHFwmkECjJld9yzbeRPuvVKPWqp4VCAeVyGdlsljl4oVBIlSN1QpIFn2hIctTTU9JvXo5mcOQ/53kSx3w+j3w+j3g8jmAwyP6pnTNEbSSOZISosBjx4eVIOsvLkY8UmekUKPW0VCqx3TwbGxvMMQgGgx3nDGnx47fXqvEjHko9HQQ/NY4+n4+V+4/FYiyvgpwhPY40a6aoQ7PZ7IgQ0eaKQXHUuk4ZKSOO8Xgcm5ubzDbSZgvqW0DnbltKmwB2ooDEk+RIHHkbpGdvzLCpWvfwer0ol8uIRqNYXV3tOO7I6/WypUmlEyPLMlvmouiKHkeyQdQWpb1Rjo+98uM/57mSDDc3N+H1elnOVrvdZgVE1fjxMqRNNcSPnCM1PeX5UWDA6Li/591eSgWmh9OSyPb2Nq5cuYJkMolIJIJMJsMarSxcxYdn+QGBjkSoVCqsJhC//ssvpdAL4MOcvSqzUjmUAycNeMRveXkZiUQCkUiEbaUkT5bnwXu4wK0ZDC0H8jVx+OsoF4r40bviFaEXjnodnFcWpQzj8TjC4TDjSO2gmSSvC9TxeI7kJJIMyWFQcqT7mCFDJa9uehqLxRAKhZBKpTpmzcSRv4cy/ErHr+jJkcK+dA3dwwyOyr8ReI4ulwvRaBTBYBDJZJJ9l9qklCMfzQJ21t3puBmlHEkXtOSoFbU0g5/NZsP29jYcDge2traY49qNH/8ZDbS8reGjzt30lP4fNEdJkrC5uckmIEY40vJXu902zJGuVePYK4xM2HiOoihifX39/7d3brtpJEEYLlCMQfKNo6yUu32Jff832HfI5iKJY+EgTBIlMMxeZL/mp1IzTI9taW3qvzFgDv1NVVdXH8eur69LR3IIo3ZCfvz4UbbE1zKO5dPnSNsNprVubm7MzOz9+/f25s2bcgJ8X7whAcCObAqC0cdUrslkcnw6/Zh4U+On7BRtmsbevXtnb9++tdvb26OyYEM+z3cwRa98fKdfV0g81fZGN5WcqosPSn4iZ/ZZF3v427a15XJpm82mHCimThFVDp6TyV9dXRVD6+gQW5V1izlrZ/Qu4mP4oseq7XZb7rS8XC7t/v6+8ClDdN1wUuXj0EOyeBgXi8XRSnmdG/b3i6rhOiXK9/HjR2uaxu7u7uz+/r7sSvCMfTbcbrfljBgYGSWCEW4zO2LUBdFjGfv8dLvd2qdPn2y/3xc/Xa1WvT0l7MdftrbDSKKgo1/YsctPH8rYpciOm83G1uv10Xd5O5LUqK9yFg6M2Cuyo/fVGjvW8v38+dNubm5st9vZarWyzWZTdrXp+yI+Aio9zshPtS4+Bl8to9mveMMtKNbrdYmnQxgZAeKoir666OPpQxhreTkHrWka+/r1a4k3UV30jNQh7MgRJLV2HFsXtR71se52O7u7uyubQRh99vVNxWv4atM0R75KYgOjxhsdPWKd5lPURWxBHdzv9/bhw4dixy4/VT4fT4g1fXxqw+l0Ws5q6y1rTeV79epVe319ffKiqOE5m0C3PXMjTx2y46/2LBaLhX3//r1kdjpUhgOw2p/Xo2HN6XRqq9XKdrtd7zjfxcXFEZ9n85XOzMqNWPndpmlKr0qndLgu2quYz+eFj6wW+caG5zqt4su0XC7/btv2r1OMr1+/Dvm6GLnTrmfkSALPqJV2qA2Vsc+G2+325FitMiqXl/dTM/uNUQ8a02FVteNisbBv374dMfL93k9h6WL8b3v2IEbu8VNjR7PDiCvXnaBPmfkb2ZHPD2HEV8cwjuGbzWa/jQ7Ap9tia/mol1wvFpN28ZmZ3d7eDqqLYxjNDjGTaS12TqoiRvxUfTmqixEj5RjrpxFnxKiLfbUx1YXNnpG6XMvIGprHZIw4/eu60J6y664un+ipLefz+REj0tEiz6h2JAbX1EVuMTHET9v2cBiu2oFkxo+saZ30fFoXlU/PC1Q+vkOXjXz+/Dmsi6N3e/kL4XvS/J9hKW0oWQcRLU5WUDMrGaquu/DzmzrkzkWZzWalkRmzniJij76DeUrPR7LnpY0nyQx8ZLTKx6IvhkIJAvDpzrAa+Z6KZ1RWbEgiQNZNmSI+Mytl0s+YHWxIZdfdCfCyLgEb1vaQI1bPFfkpiQC20MXMPnjo69iR//FZDWhD/PShjN5+kR2VEd+J7MjndXclfmp2OEtG7Qgjr3s7jmUcwqc7QjQ50J1o+n3qp/DpNn6YCLLeT7v4fBLylIxqwyg+e0bt8ZMQsHsIe+r0y2PaMFLE6Nc1wtLHyP/VT/lejaOsI9GpE+riU8cbntNG8F7t8Gsnkuf6fcSRqLOs7YP6qr4O45hp9qFtBn7Fe/VzOl2stoO7i0/rJEmw2pBkjw7QkHZx9O0ttNfsQfxF0uydihb1wng/Q46shdFhPuYLdW5TGxaGwMjodT5/LJ/n8D0VXtfgEfHpaM5kMinrYJSPhojvY9V7lNXCp9dsKKOWO+qN+cfehgwh63u1ohJMOZtBM3YSw8iGXHdlpBLUVFYfNNSW3k/1dX5Lh1i7fpsh5D476vy1JlDUgbF+anbwVa6lJmW+zN6OMGqQiq4dXNhRF0DTyGtiq73tyI5j6iJ8lHMoX/Q5nzR4P1U+TYK1cTrFxwGnT82I/4xhxG6aBPuEXuON7qCq9VP1J18P+xgpk04D+c6LMu73+zLd4e3o63NUF3UHVa2GxJsuRpIUTdhUWhfZtckoitkhpvqYpu0icci3G0Pl2wwtF4+9T2gd6ZrK47O0LREf5fYzB33xFB/vaxerkh/fM9GpFy2Uf59eLB328o683+/LHLSZHU2XTKfTct+hL1++HN0vTM8iYQhMe6E1FVUrpWbHWhEjPu/I/N9XYIbg4eN3ptNfpz9fXV0VPsqvQ926FVB/NxplGsKoQ73+fz7Io4iRx56R04PptakNdSjWM15cXBwNnWq5hjJSJh9kuwK4+qkO2aoYldNpBuxIYo4dV6vVIEYNfrWM3leH2FH9ts+OupsEO8LIwnVvR6bWlJEGaYwd+/j8KBxMyv0QPuoifJeXl7/xcYNU+PSaPpRR41fE6Ov+EMa2bUt8Xa/XR4xwRTb0jI/pp1FM9c/1mmoCo53OiDGyo2ekzSA51AZ+DKPvOHe1i/65T+4oHyNT2JGDgDX++Lo4m816400to28zIn+MElfliuoFfHrOD3zqp9y0tiuedtmwry4+yl3dfeOIfPA1s7DiTCaTkrlpY6wOtNvtbD6fW9M0dnl5WQKqAjKiolk+vz20skZ8+l1aPnVa/XzETRbLsN8pPpxUe6C6Wl57MV2O1ceoAYTXtMes8kHWD0fCGNlQE4q2bcvppJ7R21AZa5M7fo+/2oB4P+2y6Sk/1Z2K2ljBSE878lO1o06V1TQoESOPo0Sd8ntG/a7Ijj75VTv2MaodlbHGjpENPF+kKOnzdfGp+GrrYh+jf1zLaGYhI+XtYiTe9DHWSBm13J6rLxGqiak0fPv9/ohRlxj4mMoUn//tofKNPa91xZuhjH0xVeMNo17YUT+j8UYZx7YZp2yoiXkUa5Bv9/v8FD61ofLpVLTGhz7GqgXPk8nk1sz+GfyB/5f+bNv2j743PHM+s5fPeJLPLBmfgV66n5q9fMb00//00hmfOZ9ZB2NV8pNKpVKpVCr13FV/mlMqlUqlUqnUM1YmP6lUKpVKpc5KmfykUqlUKpU6K2Xyk0qlUqlU6qyUyU8qlUqlUqmzUiY/qVQqlUqlzkqZ/KRSqVQqlTorZfKTSqVSqVTqrJTJTyqVSqVSqbPSv/O87lrIc6LrAAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<Figure size 720x144 with 20 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "n = 10  # Número de frames para comparar.\n",
+    "\n",
+    "plt.figure(figsize = (10, 2))\n",
+    "\n",
+    "for i in range(n):\n",
+    "    \n",
+    "    ax = plt.subplot(2, n, i + 1)\n",
+    "    plt.imshow(prediction_vali[i].reshape(64, 64))\n",
+    "    plt.gray()\n",
+    "    ax.get_xaxis().set_visible(False)\n",
+    "    ax.get_yaxis().set_visible(False)\n",
+    "    \n",
+    "    ax = plt.subplot(2, n, i + 1 + n)\n",
+    "    plt.imshow(autoencoded_imgs[i].reshape(64, 64))\n",
+    "    plt.gray()\n",
+    "    ax.get_xaxis().set_visible(False)\n",
+    "    ax.get_yaxis().set_visible(False)\n",
+    "\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Guardamos las imagenes en el directorio /test_simple."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 121,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "out_dir = \"{}/test_simple\".format(base_path)\n",
+    "if not os.path.exists(out_dir): os.makedirs(out_dir)\n",
+    "\n",
+    "for i in range(frames):\n",
+    "    scipy.misc.toimage(np.reshape(prediction_vali[i], [64, 64])).save(\"{}/in_{}.png\".format(out_dir, i))\n",
+    "    scipy.misc.toimage(np.reshape(autoencoded_imgs[i], [64, 64]),).save(\"{}/out_{}.png\".format(out_dir, i))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Construir el modelo Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 40,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.layers import Flatten"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 47,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_8\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_1 (InputLayer)         (None, 64, 64, 1)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_1 (Conv2D)            (None, 64, 64, 8)         136       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_1 (LeakyReLU)    (None, 64, 64, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_1 (Batch (None, 64, 64, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_2 (Conv2D)            (None, 64, 64, 8)         1032      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_2 (LeakyReLU)    (None, 64, 64, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_2 (Batch (None, 64, 64, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_3 (Conv2D)            (None, 32, 32, 8)         1032      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_3 (LeakyReLU)    (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_3 (Batch (None, 32, 32, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "dropout_1 (Dropout)          (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_4 (Conv2D)            (None, 32, 32, 16)        528       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_4 (LeakyReLU)    (None, 32, 32, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_4 (Batch (None, 32, 32, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "conv2d_5 (Conv2D)            (None, 16, 16, 16)        1040      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_5 (LeakyReLU)    (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_5 (Batch (None, 16, 16, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "dropout_2 (Dropout)          (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_6 (Conv2D)            (None, 16, 16, 32)        2080      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_6 (LeakyReLU)    (None, 16, 16, 32)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_6 (Batch (None, 16, 16, 32)        128       \n",
+      "_________________________________________________________________\n",
+      "conv2d_7 (Conv2D)            (None, 8, 8, 32)          4128      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_7 (LeakyReLU)    (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_7 (Batch (None, 8, 8, 32)          128       \n",
+      "_________________________________________________________________\n",
+      "dropout_3 (Dropout)          (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_8 (Conv2D)            (None, 8, 8, 64)          8256      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_8 (LeakyReLU)    (None, 8, 8, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_8 (Batch (None, 8, 8, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "conv2d_9 (Conv2D)            (None, 4, 4, 64)          16448     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_9 (LeakyReLU)    (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_9 (Batch (None, 4, 4, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "dropout_4 (Dropout)          (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_10 (Conv2D)           (None, 2, 2, 128)         32896     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_10 (LeakyReLU)   (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_10 (Batc (None, 2, 2, 128)         512       \n",
+      "_________________________________________________________________\n",
+      "dropout_5 (Dropout)          (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_11 (Conv2D)           (None, 1, 1, 256)         131328    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_11 (LeakyReLU)   (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_11 (Batc (None, 1, 1, 256)         1024      \n",
+      "_________________________________________________________________\n",
+      "flatten_7 (Flatten)          (None, 256)               0         \n",
+      "=================================================================\n",
+      "Total params: 201,432\n",
+      "Trainable params: 200,168\n",
+      "Non-trainable params: 1,264\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "flattener = Flatten()(autoencoder.layers[38].output)\n",
+    "encoder = Model(autoencoder.input, flattener)\n",
+    "encoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Construir el modelo Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 55,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.layers import Reshape"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 83,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "encoded_size = autoencoder.layers[38].output.shape[-1]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Recreación del modelo Decoder a partir de las capas ya entrenadas del Autoencoder."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Recogemos el output vectorial del LSTM y lo reconstruimos en un array."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 57,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "decoder_input_flat = Input(shape = (encoded_size,))\n",
+    "decoder_input = Reshape((1, 1, encoded_size))(decoder_input_flat)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 78,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_19\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_5 (InputLayer)         (None, 256)               0         \n",
+      "_________________________________________________________________\n",
+      "reshape_1 (Reshape)          (None, 1, 1, 256)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_1 (Conv2DTr (None, 2, 2, 128)         131200    \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_12 (LeakyReLU)   (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_12 (Batc (None, 2, 2, 128)         512       \n",
+      "_________________________________________________________________\n",
+      "dropout_6 (Dropout)          (None, 2, 2, 128)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_2 (Conv2DTr (None, 4, 4, 64)          32832     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_13 (LeakyReLU)   (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_13 (Batc (None, 4, 4, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "dropout_7 (Dropout)          (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_3 (Conv2DTr (None, 4, 4, 64)          16448     \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_14 (LeakyReLU)   (None, 4, 4, 64)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_14 (Batc (None, 4, 4, 64)          256       \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_4 (Conv2DTr (None, 8, 8, 32)          8224      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_15 (LeakyReLU)   (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_15 (Batc (None, 8, 8, 32)          128       \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_5 (Conv2DTr (None, 8, 8, 32)          4128      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_16 (LeakyReLU)   (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_16 (Batc (None, 8, 8, 32)          128       \n",
+      "_________________________________________________________________\n",
+      "dropout_8 (Dropout)          (None, 8, 8, 32)          0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_6 (Conv2DTr (None, 16, 16, 16)        2064      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_17 (LeakyReLU)   (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_17 (Batc (None, 16, 16, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_7 (Conv2DTr (None, 16, 16, 16)        1040      \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_18 (LeakyReLU)   (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_18 (Batc (None, 16, 16, 16)        64        \n",
+      "_________________________________________________________________\n",
+      "dropout_9 (Dropout)          (None, 16, 16, 16)        0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_8 (Conv2DTr (None, 32, 32, 8)         520       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_19 (LeakyReLU)   (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_19 (Batc (None, 32, 32, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_9 (Conv2DTr (None, 32, 32, 8)         264       \n",
+      "_________________________________________________________________\n",
+      "leaky_re_lu_20 (LeakyReLU)   (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "batch_normalization_20 (Batc (None, 32, 32, 8)         32        \n",
+      "_________________________________________________________________\n",
+      "dropout_10 (Dropout)         (None, 32, 32, 8)         0         \n",
+      "_________________________________________________________________\n",
+      "conv2d_transpose_10 (Conv2DT (None, 64, 64, 1)         129       \n",
+      "=================================================================\n",
+      "Total params: 198,321\n",
+      "Trainable params: 197,585\n",
+      "Non-trainable params: 736\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "decoder_l0 = decoder_input\n",
+    "decoder_l1 = autoencoder.layers[39](decoder_l0)\n",
+    "decoder_l2 = autoencoder.layers[40](decoder_l1)\n",
+    "decoder_l3 = autoencoder.layers[41](decoder_l2)\n",
+    "decoder_l4 = autoencoder.layers[42](decoder_l3)\n",
+    "decoder_l5 = autoencoder.layers[43](decoder_l4)\n",
+    "decoder_l6 = autoencoder.layers[44](decoder_l5)\n",
+    "decoder_l7 = autoencoder.layers[45](decoder_l6)\n",
+    "decoder_l8 = autoencoder.layers[46](decoder_l7)\n",
+    "decoder_l9 = autoencoder.layers[47](decoder_l8)\n",
+    "decoder_l10 = autoencoder.layers[48](decoder_l9)\n",
+    "decoder_l11 = autoencoder.layers[49](decoder_l10)\n",
+    "decoder_l12 = autoencoder.layers[50](decoder_l11)\n",
+    "decoder_l13 = autoencoder.layers[51](decoder_l12)\n",
+    "decoder_l14 = autoencoder.layers[52](decoder_l13)\n",
+    "decoder_l15 = autoencoder.layers[53](decoder_l14)\n",
+    "decoder_l16 = autoencoder.layers[54](decoder_l15)\n",
+    "decoder_l17 = autoencoder.layers[55](decoder_l16)\n",
+    "decoder_l18 = autoencoder.layers[56](decoder_l17)\n",
+    "decoder_l19 = autoencoder.layers[57](decoder_l18)\n",
+    "decoder_l20 = autoencoder.layers[58](decoder_l19)\n",
+    "decoder_l21 = autoencoder.layers[59](decoder_l20)\n",
+    "decoder_l22 = autoencoder.layers[60](decoder_l21)\n",
+    "decoder_l23 = autoencoder.layers[61](decoder_l22)\n",
+    "decoder_l24 = autoencoder.layers[62](decoder_l23)\n",
+    "decoder_l25 = autoencoder.layers[63](decoder_l24)\n",
+    "decoder_l26 = autoencoder.layers[64](decoder_l25)\n",
+    "decoder_l27 = autoencoder.layers[65](decoder_l26)\n",
+    "decoder_l28 = autoencoder.layers[66](decoder_l27)\n",
+    "decoder_l29 = autoencoder.layers[67](decoder_l28)\n",
+    "decoder_l30 = autoencoder.layers[68](decoder_l29)\n",
+    "decoder_l31 = autoencoder.layers[69](decoder_l30)\n",
+    "decoder_l32 = autoencoder.layers[70](decoder_l31)\n",
+    "decoder_l33 = autoencoder.layers[71](decoder_l32)\n",
+    "\n",
+    "\n",
+    "decoder = Model(decoder_input_flat, decoder_l33)\n",
+    "decoder.summary()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Output del Encoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "encoded_imgs = encoder.predict(vali_data)\n",
+    "encoded_imgs.shape"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# LSTM"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "\n",
+    "El output del modelo encoder sirve como input para la red LSTM."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Definimos el optimizador a utilizar en las redes LSTM."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 80,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.optimizers import RMSprop\n",
+    "from keras.layers import RepeatVector, LSTM\n",
+    "from keras.losses import mean_absolute_error, mean_squared_error, mean_squared_logarithmic_error"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 81,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm_optimizer = RMSprop(lr = 0.000126, \n",
+    "                         rho = 0.9, \n",
+    "                         epsilon = 1e-08,\n",
+    "                         decay = 0.000334)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Opciones de ajuste de las redes LSTM."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 84,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "time_steps = 6\n",
+    "out_time_steps = 1\n",
+    "data_dimension = encoded_size\n",
+    "\n",
+    "encoder_lstm_neurons = 256\n",
+    "decoder_lstm_neurons = 512\n",
+    "attention_neurons = 400\n",
+    "\n",
+    "activation = \"tanh\"\n",
+    "loss = \"mae\"\n",
+    "batch_size = 16\n",
+    "\n",
+    "stateful = False\n",
+    "\n",
+    "use_bidirectional = False\n",
+    "\n",
+    "use_attention = False\n",
+    "\n",
+    "use_deep_encoder = False\n",
+    "\n",
+    "use_time_conv_encoder = False\n",
+    "time_conv_encoder_kernel = 2\n",
+    "time_conv_encoder_dilation = 1\n",
+    "time_conv_encoder_filters = 2048\n",
+    "time_conv_encoder_depth = 0\n",
+    "\n",
+    "use_time_conv_decoder = True\n",
+    "time_conv_decoder_filters = 512\n",
+    "time_conv_decoder_depth = 0\n",
+    "\n",
+    "use_noisy_training = False\n",
+    "noise_probability = 0.3"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 85,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "dropout = 0.0132\n",
+    "recurrent_dropout = 0.385\n",
+    "use_bias = True"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Modelo LSTM para predicción de frames."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 86,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "input_frames = Input(shape = (time_steps, data_dimension))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 87,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "l0 = LSTM(units = encoder_lstm_neurons,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = False,\n",
+    "          go_backwards = True, \n",
+    "          stateful = stateful)(input_frames)\n",
+    "\n",
+    "l1 = RepeatVector(out_time_steps)(l0)\n",
+    "\n",
+    "l2 = LSTM(units = decoder_lstm_neurons,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = True,\n",
+    "          go_backwards = False, \n",
+    "          stateful = stateful)(l1)\n",
+    "\n",
+    "l3 = LSTM(units = data_dimension,\n",
+    "          activation = activation,\n",
+    "          use_bias = use_bias,\n",
+    "          recurrent_activation = \"hard_sigmoid\",\n",
+    "          kernel_initializer='glorot_uniform',\n",
+    "          recurrent_initializer='orthogonal',\n",
+    "          bias_initializer='zeros',\n",
+    "          unit_forget_bias = True,\n",
+    "          dropout = dropout,\n",
+    "          recurrent_dropout = recurrent_dropout,\n",
+    "          return_sequences = out_time_steps > 1,\n",
+    "          go_backwards = False, \n",
+    "          stateful = stateful)(l2)     \n",
+    "\n",
+    "output_frames = l3"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 88,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"model_20\"\n",
+      "_________________________________________________________________\n",
+      "Layer (type)                 Output Shape              Param #   \n",
+      "=================================================================\n",
+      "input_6 (InputLayer)         (None, 6, 256)            0         \n",
+      "_________________________________________________________________\n",
+      "lstm_1 (LSTM)                (None, 256)               525312    \n",
+      "_________________________________________________________________\n",
+      "repeat_vector_1 (RepeatVecto (None, 1, 256)            0         \n",
+      "_________________________________________________________________\n",
+      "lstm_2 (LSTM)                (None, 1, 512)            1574912   \n",
+      "_________________________________________________________________\n",
+      "lstm_3 (LSTM)                (None, 256)               787456    \n",
+      "=================================================================\n",
+      "Total params: 2,887,680\n",
+      "Trainable params: 2,887,680\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
+    }
+   ],
+   "source": [
+    "lstm = Model(inputs = input_frames, outputs = output_frames)\n",
+    "lstm.summary()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 89,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lstm.compile(loss = loss,\n",
+    "             optimizer = lstm_optimizer,\n",
+    "             metrics = ['mean_squared_error', 'mean_absolute_error'])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Entrenamiento de la red LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 90,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "(90000, 256)"
+      ]
+     },
+     "execution_count": 90,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "encoded_train = encoder.predict(train_data)\n",
+    "encoded_train.shape"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 91,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "(10000, 256)"
+      ]
+     },
+     "execution_count": 91,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "encoded_vali = encoder.predict(vali_data)\n",
+    "encoded_vali.shape"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 92,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from math import floor"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 93,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "shuffle = True\n",
+    "scene_count = len(encoded_train) // frames\n",
+    "sample_count = frames\n",
+    "scene_iteration_count = floor((sample_count + 1 - (time_steps + out_time_steps)) / batch_size)\n",
+    "batch_samples = scene_count * scene_iteration_count"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 94,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_batch_samples(encoded_train, batch_size = 16, time_steps = 6, out_time_steps = 1, frames = 200):\n",
+    "    scene_count = len(encoded_train) // frames\n",
+    "    sample_count = frames\n",
+    "    scene_iteration_count = floor((sample_count + 1 - (time_steps + out_time_steps)) / batch_size)\n",
+    "    batch_samples = scene_count * scene_iteration_count\n",
+    "    return batch_samples"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 95,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def shuffle_in_unison(*np_arrays):\n",
+    "    rng = np.random.get_state()\n",
+    "    for array in np_arrays:\n",
+    "        np.random.set_state(rng)\n",
+    "        np.random.shuffle(array)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 96,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def generator_scene(scene_count, sample_count, input_shape, scene_iteration_count, encoded_train, batch_size, time_steps, out_time_steps, frames = 200):\n",
+    "    \n",
+    "    shuffle = True\n",
+    "    scene_count = len(encoded_train) // frames\n",
+    "    sample_count = frames\n",
+    "    scene_iteration_count = floor((sample_count + 1 - (time_steps + out_time_steps)) / batch_size)\n",
+    "    print(\"Scene Count: {}  Sample Count: {} In-Scene Iteration: {}\".format(scene_count, sample_count, scene_iteration_count))\n",
+    "    \n",
+    "    while True:\n",
+    "\n",
+    "        for i in range(scene_count):\n",
+    "            \n",
+    "            scene = encoded_train[(i * frames):((i + 1) * frames)]\n",
+    "            # print(\"Scene Count: {} => Scene {}: {}\".format(scene_count, i, len(scene)))\n",
+    "     \n",
+    "            for j in range(scene_iteration_count):\n",
+    "                encoded_data = scene\n",
+    "                start = j * batch_size\n",
+    "                end = sample_count\n",
+    "                \n",
+    "                data = encoded_data[start:end]\n",
+    "                content_shape = data[0].shape\n",
+    "                final_sample_count = data.shape[0] - time_steps - out_time_steps\n",
+    "                final_sample_count = min(batch_size, final_sample_count)\n",
+    "        \n",
+    "                X_data = np.zeros((final_sample_count, time_steps) + content_shape)\n",
+    "                y_data = np.zeros((final_sample_count, out_time_steps) + content_shape)\n",
+    "        \n",
+    "                curTS = 0\n",
+    "                \n",
+    "                for z in range(time_steps, final_sample_count + time_steps):\n",
+    "                    X_data[curTS] = np.array(data[curTS:z])\n",
+    "                    y_data[curTS] = np.array(data[z:z+out_time_steps])\n",
+    "                    curTS += 1\n",
+    "            \n",
+    "                X = X_data.reshape(*X_data.shape[0:2], -1)\n",
+    "                # print(\"Batch Size: {} -- X Shape: {} -> {}\".format(batch_size, input_shape, X.shape))\n",
+    "                Y = np.squeeze(y_data.reshape(y_data.shape[0], out_time_steps, -1))\n",
+    "                # print(\"Batch Size: {} -- Y Shape: {} -> {}\".format(batch_size, input_shape, Y.shape))\n",
+    "                \n",
+    "                shuffle_in_unison(X, Y)\n",
+    "        \n",
+    "                yield X, Y"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 97,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def restructure_encoded_data(encoded_data, time_steps, time_steps_out, batch_size):\n",
+    "    \n",
+    "    data = encoded_data\n",
+    "    content_shape = data[0].shape\n",
+    "    final_sample_count = data.shape[0] - time_steps - out_time_steps\n",
+    "    final_sample_count = min(batch_size, final_sample_count)\n",
+    "        \n",
+    "    X_data = np.zeros((final_sample_count, time_steps) + content_shape)\n",
+    "    y_data = np.zeros((final_sample_count, out_time_steps) + content_shape)\n",
+    "        \n",
+    "    curTS = 0\n",
+    "            \n",
+    "    for z in range(time_steps, final_sample_count + time_steps):\n",
+    "        X_data[curTS] = np.array(data[curTS:z])\n",
+    "        y_data[curTS] = np.array(data[z:z+out_time_steps])\n",
+    "        curTS += 1\n",
+    "        \n",
+    "    return X_data, y_data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 98,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Number of train batch samples per epoch: 5400\n"
+     ]
+    }
+   ],
+   "source": [
+    "train_gen_samples = generator_batch_samples(encoded_train, batch_size)\n",
+    "print (\"Number of train batch samples per epoch: {}\".format(train_gen_samples))\n",
+    "train_generator = generator_scene(scene_count, sample_count, input_shape, scene_iteration_count, encoded_train, batch_size, time_steps, out_time_steps)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 100,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "scene_count_vali = scene_count = len(encoded_vali) // frames\n",
+    "sample_count = frames\n",
+    "scene_iteration_count = floor((sample_count + 1 - (time_steps + out_time_steps)) / batch_size)\n",
+    "batch_samples_vali = scene_count_vali * scene_iteration_count"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 101,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Number of validation batch samples per epoch: 600\n"
+     ]
+    }
+   ],
+   "source": [
+    "vali_gen_samples = generator_batch_samples(encoded_vali, batch_size)\n",
+    "print (\"Number of validation batch samples per epoch: {}\".format(vali_gen_samples))\n",
+    "vali_generator = generator_scene(scene_count, sample_count, input_shape, scene_iteration_count, encoded_vali, batch_size, time_steps, out_time_steps)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 102,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "epochs = 10"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 103,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Epoch 1/10\n",
+      "Scene Count: 450  Sample Count: 200 In-Scene Iteration: 12Scene Count: 50  Sample Count: 200 In-Scene Iteration: 12\n",
+      "\n",
+      "5400/5400 [==============================] - 157s 29ms/step - loss: 0.6134 - mean_squared_error: 0.6407 - mean_absolute_error: 0.6134 - val_loss: 0.4413 - val_mean_squared_error: 0.4605 - val_mean_absolute_error: 0.5177\n",
+      "Epoch 2/10\n",
+      "5400/5400 [==============================] - 155s 29ms/step - loss: 0.5067 - mean_squared_error: 0.4453 - mean_absolute_error: 0.5067 - val_loss: 0.3921 - val_mean_squared_error: 0.3662 - val_mean_absolute_error: 0.4587\n",
+      "Epoch 3/10\n",
+      "5400/5400 [==============================] - 155s 29ms/step - loss: 0.4687 - mean_squared_error: 0.3857 - mean_absolute_error: 0.4687 - val_loss: 0.3690 - val_mean_squared_error: 0.3302 - val_mean_absolute_error: 0.4329\n",
+      "Epoch 4/10\n",
+      "5400/5400 [==============================] - 153s 28ms/step - loss: 0.4491 - mean_squared_error: 0.3582 - mean_absolute_error: 0.4491 - val_loss: 0.3559 - val_mean_squared_error: 0.3110 - val_mean_absolute_error: 0.4182\n",
+      "Epoch 5/10\n",
+      "5400/5400 [==============================] - 155s 29ms/step - loss: 0.4367 - mean_squared_error: 0.3418 - mean_absolute_error: 0.4367 - val_loss: 0.3474 - val_mean_squared_error: 0.2987 - val_mean_absolute_error: 0.4082\n",
+      "Epoch 6/10\n",
+      "5400/5400 [==============================] - 156s 29ms/step - loss: 0.4278 - mean_squared_error: 0.3305 - mean_absolute_error: 0.4278 - val_loss: 0.3408 - val_mean_squared_error: 0.2901 - val_mean_absolute_error: 0.4010\n",
+      "Epoch 7/10\n",
+      "5400/5400 [==============================] - 155s 29ms/step - loss: 0.4211 - mean_squared_error: 0.3223 - mean_absolute_error: 0.4211 - val_loss: 0.3362 - val_mean_squared_error: 0.2835 - val_mean_absolute_error: 0.3953\n",
+      "Epoch 8/10\n",
+      "5400/5400 [==============================] - 154s 29ms/step - loss: 0.4157 - mean_squared_error: 0.3159 - mean_absolute_error: 0.4157 - val_loss: 0.3333 - val_mean_squared_error: 0.2784 - val_mean_absolute_error: 0.3908\n",
+      "Epoch 9/10\n",
+      "5400/5400 [==============================] - 155s 29ms/step - loss: 0.4113 - mean_squared_error: 0.3108 - mean_absolute_error: 0.4113 - val_loss: 0.3309 - val_mean_squared_error: 0.2743 - val_mean_absolute_error: 0.3870\n",
+      "Epoch 10/10\n",
+      "5400/5400 [==============================] - 155s 29ms/step - loss: 0.4076 - mean_squared_error: 0.3065 - mean_absolute_error: 0.4076 - val_loss: 0.3287 - val_mean_squared_error: 0.2708 - val_mean_absolute_error: 0.3838\n"
+     ]
+    }
+   ],
+   "source": [
+    "if encoded_train is None:\n",
+    "    lstm_train = lstm.fit(X,\n",
+    "                          Y,\n",
+    "                          nb_epoch = epochs,\n",
+    "                          batch_size = batch_size,\n",
+    "                          shuffle = True)\n",
+    "else:\n",
+    "    lstm_train = lstm.fit_generator(generator = train_generator,\n",
+    "                                    steps_per_epoch = train_gen_samples,\n",
+    "                                    epochs = epochs,\n",
+    "                                    verbose = 1,\n",
+    "                                    callbacks = None,\n",
+    "                                    validation_data = vali_generator,\n",
+    "                                    validation_steps = vali_gen_samples,\n",
+    "                                    class_weight = None,\n",
+    "                                    workers = 1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 104,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deZhU1Z3/8fe3et+hF2hoaLqbHTfAFlyQuCQRowGNxgXjksQQE9Ekk8wkzsxvMmN+v3kyWyaZaDKicUmUoFGjGE3MpgKyNiIqIArN1tDQTTfQG72f3x+3oKubBhoouNVVn9fz1FNVt+6t+nY98Dmnzr33XHPOISIi0SvgdwEiInJ6KehFRKKcgl5EJMop6EVEopyCXkQkysX7XUBPubm5rqioyO8yRET6ldWrV+91zuX19lrEBX1RURFlZWV+lyEi0q+Y2bajvaahGxGRKKegFxGJcgp6EZEop6AXEYlyCnoRkSinoBcRiXIKehGRKBc1QX/gYBs/+uNGNlXV+12KiEhEiZqg7+h0PLKonEcXbfG7FBGRiBI1QZ+dlsjnS4fx2zU7qapr9rscEZGIETVBD3D3tBLaOjt5culWv0sREYkYURX0RblpzDgrn6eXb6Ohpd3vckREIkJUBT3AnOkl1DW38+yqHX6XIiISEaIu6CcVDmRKUTaPL9lCW0en3+WIiPiuT0FvZjPMbKOZbTKz7x1lnZvMbL2ZrTOz+SHL7zSzj4O3O8NV+LHMmV7Czv0Hee39yjPxcSIiEe24QW9mccDDwNXABOBWM5vQY53RwAPAJc65s4BvBpdnA98HpgJTgO+b2cCw/gW9uGLcIEbmpTFvUTnOudP9cSIiEa0vPfopwCbnXLlzrhVYAMzqsc5XgIedc/sAnHNVweVXAX9yztUGX/sTMCM8pR9dIGDMmV7Cul11LN1cc7o/TkQkovUl6AuA0D2bFcFlocYAY8zsbTNbbmYzTmDb0+K6SQXkZSTxyKLyM/FxIiIRK1w7Y+OB0cBlwK3Ao2Y2oK8bm9kcMyszs7Lq6uqwFJQUH8ddFxex6KNqNlTWheU9RUT6o74E/U5geMjzYcFloSqAhc65NufcFuAjvODvy7Y45+Y550qdc6V5eb1e2/akfGHqCFIT43hUvXoRiWF9CfpVwGgzKzazROAWYGGPdV7C681jZrl4QznlwOvAp81sYHAn7KeDy86IrNQEbr5gOAvX7mLX/oNn6mNFRCLKcYPeOdcOzMUL6A3Ac865dWb2oJnNDK72OlBjZuuBN4C/dc7VOOdqgR/gNRargAeDy86YL08rxgFPvK3JzkQkNlmkHX5YWlrqysrKwvqe9/96DX/9sIqlD1xBZnJCWN9bRCQSmNlq51xpb69F3ZmxvZkzvYSGlnbmr9judykiImdcTAT92QVZXDIqhyfe3kJru6ZFEJHYEhNBDzBn+kj21LWwcO0uv0sRETmjYibop4/OZVx+Bo9qWgQRiTExE/Rm3rQIG/fU8+ZH4TkpS0SkP4iZoAe49tyh5GcmM+8tnUAlIrEjpoI+MT7Al6YVsay8hvcrDvhdjojIGRFTQQ9w65RCMpLieWTRZr9LERE5I2Iu6DOSE5g9tZDX3q9kR22T3+WIiJx2MRf0AF+8pJi4gPGLJZoWQUSiX0wGfX5WMjPPK+DZVTvY19jqdzkiIqdVTAY9eNMiHGzr4Onl2/wuRUTktIrZoB+bn8FlY/N4atlWmts6/C5HROS0idmgB69Xv7ehld+uOeJaKCIiUSOmg/6ikhzOKcji0cXldHZqWgQRiU4xHfRmxleml1Be3cifN+zxuxwRkdMipoMe4DNn5zNsYArzdF1ZEYlSMR/08XEBvjytmLJt+1i9bZ/f5YiIhF3MBz3ATaXDyUpJYJ6mRRCRKKSgB9KS4rn9whH8cf0eyqsb/C5HRCSsFPRBd15cREJcgMc0LYKIRBkFfVBeRhI3TC7g+dUV7G1o8bscEZGwUdCHuPvSEto6Ovnl0q1+lyIiEjYK+hAj89L55PjB/HL5Ng62aloEEYkOCvoe5kwvYX9TG79ZvcPvUkREwkJB30PpiIFMKhzAY4u30KFpEUQkCvQp6M1shpltNLNNZva9Xl6/y8yqzezd4O3ukNc6QpYvDGfxp4OZ8dXpJWyvbeIPH+z2uxwRkVMWf7wVzCwOeBj4FFABrDKzhc659T1WfdY5N7eXtzjonJt46qWeOZ+akE9RTirzFm3mM+fkY2Z+lyQictL60qOfAmxyzpU751qBBcCs01uWv+ICxt2XlrC24gArttT6XY6IyCnpS9AXAKF7JiuCy3q6wczeM7PnzWx4yPJkMyszs+Vmdl1vH2Bmc4LrlFVXV/e9+tPoxvOHkZOWqMnORKTfC9fO2FeAIufcucCfgKdCXhvhnCsFZgM/NrORPTd2zs1zzpU650rz8vLCVNKpSU6I446Livjrh1V8vKfe73JERE5aX4J+JxDaQx8WXHaYc67GOXfodNLHgPNDXtsZvC8H3gQmnUK9Z9TtF40gOSGgXr2I9Gt9CfpVwGgzKzazROAWoNvRM2Y2JOTpTGBDcPlAM0sKPs4FLgF67sSNWNlpidxUOpyX3t3Jnrpmv8sRETkpxw1651w7MBd4HS/An3POrTOzB81sZnC1+81snZmtBe4H7gouHw+UBZe/Afywl6N1ItqXpxXT0el4UtMiiEg/Zc5F1klBpaWlrqyszO8yuvn6M6tZ/PFelj1wJelJxz0iVUTkjDOz1cH9oUfQmbF9MGf6SOqb21mwcrvfpYiInDAFfR9MHD6AKcXZPL5kC20dnX6XIyJyQhT0ffTV6SXsOtDMq+9V+l2KiMgJUdD30eVjBzFqUDqPLCon0vZriIgci4K+jwIBY86lJWyorGPJpr1+lyMi0mcK+hMwa9JQBmUk6QQqEelXFPQnICk+jrsuKWLxx3tZt+uA3+WIiPSJgv4E3TZ1BGmJcTyqXr2I9BMK+hOUlZLAzRcU8sp7lezaf9DvckREjktBfxK+NK0IgMeXbPG3EBGRPlDQn4RhA1O59twh/Hrldg4cbPO7HBGRY1LQn6Q500tobO1g/gpNiyAikU1Bf5LOGprFtFG5PPH2FlraO/wuR0TkqBT0p2DO9BKq6lt4+d1dfpciInJUCvpTcOnoXMYPyeTRReV0dmpaBBGJTAr6U2BmzJlezMdVDbz5UZXf5YiI9EpBf4quPXcoQ7OSeeQtnUAlIpFJQX+KEuICfGlaMSu21LJ2x36/yxEROYKCPgxuvmA4GUnxmuxMRCKSgj4MMpITmH1hIb//oJLtNU1+lyMi0o2CPky+dEkxcQHjF0vUqxeRyKKgD5PBmcnMmljAc2UV7Gts9bscEZHDFPRhNGd6CQfbOvjV8m1+lyIicpiCPozGDM7g8rF5PLV0K40t7X6XIyICKOjD7t7LR1Hb1MoNP1/K1r2NfpcjIqKgD7fSomye/OIUdtc189mHlvDn9Xv8LklEYlyfgt7MZpjZRjPbZGbf6+X1u8ys2szeDd7uDnntTjP7OHi7M5zFR6pPjMnjlbnTGJGTyt2/LOM/X99Ih+bCERGfHDfozSwOeBi4GpgA3GpmE3pZ9Vnn3MTg7bHgttnA94GpwBTg+2Y2MGzVR7Dh2ak8f8/F3FQ6jIfe2MRdT6ykVkfjiIgP+tKjnwJscs6VO+dagQXArD6+/1XAn5xztc65fcCfgBknV2r/k5wQx7/feB4//Nw5rCiv5bM/XcJ7FZomQUTOrL4EfQGwI+R5RXBZTzeY2Xtm9ryZDT+Rbc1sjpmVmVlZdXV1H0vvP26ZUshv7rkIgBt/vowFK3VVKhE5c8K1M/YVoMg5dy5er/2pE9nYOTfPOVfqnCvNy8sLU0mR5bzhA3jlvmlMLcnmey++z3eff4/mNl2ZSkROv74E/U5geMjzYcFlhznnapxzLcGnjwHn93XbWJKdlsiTX5zC3MtH8WzZDj7/v8vYUau5cUTk9OpL0K8CRptZsZklArcAC0NXMLMhIU9nAhuCj18HPm1mA4M7YT8dXBaz4gLGd64ay6N3lLJ1byOffWgJb30UfcNVIhI5jhv0zrl2YC5eQG8AnnPOrTOzB81sZnC1+81snZmtBe4H7gpuWwv8AK+xWAU8GFwW8z41YTAL75tGfmYydz2xkp/+5WNdjlBETgtzLrLCpbS01JWVlfldxhnT1NrOAy++z8vv7uLKcYP40c0TyUpJ8LssEelnzGy1c660t9d0ZqzPUhPj+fHNE/mXmWfx1kfVzHxoCRsq6/wuS0SiiII+ApgZd15cxLNfvZDmtg6u/9nb/HZNhd9liUiUUNBHkPNHZPPKfdM4b9gAvvXsWv7p5Q9obe/0uywR6ecU9BFmUEYyz9w9la9cWswvl23j5nnL2H2g2e+yRKQfU9BHoPi4AP9wzQQenj2Zjbvrufani1m2ucbvskSkn1LQR7Brzh3CwrmXkJWSwBd+sYJ5izYTaUdJiUjkU9BHuFGDMnh57jSuOmsw//rah3z9mXdo0NWrROQEKOj7gfSkeB6ePZm//8w4Xl+3m1kPLWFTVb3fZYlIP6Gg7yfMjDnTR/L03VM5cLCNWQ+9zavvVfpdloj0Awr6fubikbn87r5LGZufwb3z3+H/vbqe9g4dgikiR6eg74fys5JZMOci7rhoBI8u3sJtj62gur7l+BuKSExS0PdTifEBHpx1Nj+66TzWVuzn2p8uZvU2zRcnIkdS0Pdzn5s8jN9+/RKSE+K4+ZHlPLV0qw7BFJFuFPRRYPyQTBbOncZlY/P4/sJ1fOvZd2lq1SGYIuJR0EeJrJQE5t1eync+PYaX1+7icz9byta9jX6XJSIRQEEfRQIBY+4Vo3nyi1PYXdfM1T9ZzHeff4+1O/ZrOEckhunCI1GqYl8TP/3LJhau3cXBtg7OGprJ7KmFzJpYQHpSvN/liUiYHevCIwr6KFfX3MbLa3byzIrtfLi7nrTEOGZOLOC2qYWcXZDld3kiEiYKesE5x5od+5m/Yju/e28XzW2dnDssi9lTCvnseUNJUy9fpF9T0Es3Bw628dt3Kpi/cjsf7WkgPSme6yYN5dYphZw1VL18kf5IQS+9cs6xets+5q/czqvvVdLS3sl5wwdw25RCrj1vCKmJ6uWL9BcKejmu/U2tvPjOTuav3M6mqgYykuO5flIBs6cWMi4/0+/yROQ4FPTSZ845Vm3dx/wV23jtg920tncyuXAAs6eO4Npzh5CcEOd3iSLSCwW9nJR9ja288E4F81dsp3xvI5nJ8Xxu8jBum1rI6MEZfpcnIiEU9HJKnHMsL69l/srt/OGDSto6HBcUDWT21EKuPlu9fJFIoKCXsKlpaOGFdyr49codbNnbyIDUBG6YPIxbpxQyalC63+WJxKxTDnozmwH8BIgDHnPO/fAo690APA9c4JwrM7MiYAOwMbjKcufcPcf6LAV9/9DZ6VheXsMzK7fzx3W7aetwTCnO5raphcw4O5+kePXyRc6kYwX9cY+fM7M44GHgU0AFsMrMFjrn1vdYLwP4BrCix1tsds5NPKnKJWIFAsbFo3K5eFQu1fUtPL+6gl+v3M43FrzLwNQEbjzf6+WX5KmXL+K3vhwoPQXY5JwrBzCzBcAsYH2P9X4A/Bvwt2GtUCJeXkYSX7tsJF+dXsLbm/cyf8V2nnh7K48u3sJFJTnccP4wpo3KJT8r2e9SRWJSX4K+ANgR8rwCmBq6gplNBoY75141s55BX2xma4A64B+dc4t7foCZzQHmABQWFp5A+RJJAgHj0tF5XDo6j6r6Zn5T5vXyv/ObtQAU56Zx0cgcLirJ4cKSHPIyknyuWCQ2nPKpj2YWAH4E3NXLy5VAoXOuxszOB14ys7Occ3WhKznn5gHzwBujP9WaxH+DMpK59/JRfO0TI1lfWcfy8hqWba5h4bu7mL9iOwBjBqdzUUkOF43MYWpxDgPTEn2uWiQ69SXodwLDQ54PCy47JAM4G3jTzADygYVmNtM5Vwa0ADjnVpvZZmAMoL2tMSIQMM4uyOLsgizuvrSE9o5OPthVx7LNNSwrr+G5sgqeWrYNMxifn3m4xz+lJJvM5AS/yxeJCsc96sbM4oGPgCvxAn4VMNs5t+4o678JfCd41E0eUOuc6zCzEmAxcI5z7qhXsdZRN7Gltb2T9yr2s2xzDUs317B6+z5a2zsJGJxTkMWFweC/oChbM2yKHMMpHXXjnGs3s7nA63iHVz7unFtnZg8CZc65hcfYfDrwoJm1AZ3APccK+VO2bysMGAHeLwvpBxLjA5QWZVNalM19V46mua2DNdv3s2zzXpaV1/D4ki088lY58QHjvOEDDg/1nD9ioE7UEumj6Dlhau/H8MgnYOpX4cp/UthHiabWdlZv28fSzd4Y//s7D9DR6UiMCzCpcMDhoZ6JhQN07L7EtNg4M7azE179Fqx+Eqb/LVz+Dwr7KFTf3MaqrbWHx/jX7arDOUhOCFA6ItsL/pE5nFOQRUKcLoksseOUhm76jUAArvlvcJ2w6D/A4uDyB/yuSsIsIzmBK8YN5opxgwFveuUVW7zgX15ew3+87p2EnZYYxwXF2VxUksPFI3OZMDSTuIAafolN0RP04IX9tT/xevdv/RAsAJd91++q5DQakJrIVWflc9VZ+YA3F8/y8lqWle9l6eYa3txYDUBmcjwTCwcyPj+D8UMyGT8kk5K8NPX6JSZEV9CDF/Yz/8fr2b/5r97z6TpZN1bkpCdxzblDuObcIQDsqWtmeXkNSzd54/tPbK6htaMTgMS4AKMGpQeD32sAxuVnkJOuE7kkukRf0AME4mDWQ17Y//X/esM4l/6N31WJDwZnJjNrYgGzJhYA0NbRyebqBj6srGdDZR3rK+tY9HE1L7xTcXibQRlJXugPyWBCsPdfnKvev/Rf0Rn04IX9dT8D1wF/+Rfv+SXf8Lsq8VlCXIBx+ZmMy8/kukkFh5fvbWhhQ2Xd4QZgw+56lm7eS1uHd7BCYlyA0YPTD/f6JwzJZNyQTLJ1Nq/0A9Eb9BAM+//1evZ/+idvzP7i+/yuSiJQbnrS4Xl6DmltD/b+d9exIdgAvLmxmudXd/X+B2cGe//53vDPhGDvP169f4kg0R30AHHxcP08L+z/+I9e2F90r99VST+QGB84vOP2+kldy6vrW4Lh39UALPl4L+2d7vB2YwanMz7f6/WPH5LB+PxMzeUjvon+oAcv7D/3KHR2wOt/743ZX3jM65+IHFVeRhJ5GUf2/jdVNXRrAP76YRW/Cen952cmM35IBiV56RTlpFKYk8aI7FQKBqZo/F9Oq9gIeoC4BLjxcfjNXfCH73rDOlO+4ndVEiUS4wNMGJrJhKGZ3ZZX1Td3jftX1vHh7nqWldfQ3NZ5eJ24gFEwIIUROaneLTuNwpxUinLSKMxOJSVRZ/zKqYmdoIdg2D/hhf1r3/HOnL3gbr+rkig2KCOZQRnJTB/T1fvv7HRU1bewraaRbbVN3n1NE9trm1j47i7qmtt7vEeSF/o5qYzITmVErvdLYEROKgNSNRwkxxdbQQ8QnwiffxKeuwNe/bY3jFP6Rb+rkhgSCBj5WcnkZyUztSTniNf3N7WyrabJawT2eo3B9pomFn1UTVV9S7d1s1ISGJGTSmF2arfGoCg3jUEZSZimARFiMejBC/ubnoJnb4fffdPbQXv+nX5XJQJ4Z/sOSE3kvOEDjnjtYGsH22ub2FrTyPaaJrbVer8G3qs4wO8/2E1HZ9fcVckJAQqzUxmR0/ULYEROGiNyUikYkKIjg2JIbAY9QHwS3PRLePY2eOUb3pj9pC/4XZXIMaUkxjE2P4Ox+RlHvNbW0cmu/QfZWtPE9uBw0NYab2ho8cfVR+wXGJyRRH5WMkOyUoL33R8PykhSYxAlYjfoARKS4eZnYMGt8PJcr2c/cbbfVYmclIS4QLDHngbkdXut536B7TVNVB5oZnfdQTZU1vGXD/d0awgAAubtYzgU/KGNwaHngzOTdcRQPxDbQQ9e2N8yH359K7z0dS/sz7vF76pEwup4+wWcc9QdbKey7iCVB5qp3N/M7gMHg41BMx9XNbDoo2oaWzu6bWfmnWw29HBDkHJEgzAoM0nXCvCZgh4gISUY9jfDS1/zdtCe+3m/qxI5Y8yMrNQEslITGJef2es6zjnqW9rZfaDZawAOHGTX/mbveV0zW/Y2snRzDfU9jhoCyE1PPNwQhP46GJzpDRHlpSeTmRKvnceniYL+kMRUuPVZmH8T/HaO11U550a/qxKJGGZGZnICmckJjBl85D6CQxpa2g//GvAahGYqg8931DaxckstBw62HbFdYlyAnPRE8jKSyE1PIi89idyMRO9xcNmhx5nJahROhII+VGIqzH4Wnvk8vDjHG8Y5+3N+VyXSr6QnxTNqUAajBh29MWhq9X4Z7D7QTHVDC9X1LextaGVv8PGeumY+2HmAmsbWbkcSHZIYFyA3tFE43BAkkpeRTG56IrkZ3vKMJDUKCvqeEtNg9nNe2L9wtxf2Z13nd1UiUSU1MZ6SvHRK8tKPuV5np2P/wbZgQ9ByuCGobmhhb30r1Q0tVB5o5v1jNQrxgeCvgyTyQhqHng3EwNREslISCEThlcgU9L1JSofbnoOnb4QXvuwdejn+s35XJRJzAgEjOy2R7LRExnL0XwjgNQr7mlrZ29Daa8NQXd/Czv3NrK04QE1DC720CQTMOwltYJoX/N4tgew079yG7LSE4L23/FDjEOmHoUbPxcFPh+Y6ePoG2PWOd8z9uGv8rkhEwqDjcKPgNQA1Da3sa2plX2Mr+5raqG1qZX9TK7WNbcH7VlraO4/6flkpCV7w92ggDj0PbSAGBBuIcB+WeqyLgyvoj6f5APzqc1C5Fm7+FYy92u+KRMQHB1s7qD3cGHgNwuHHwQZiX7BR2N/URm1jKwfbOo76fhlJ8cGGoKtBGJufwT2fGHlS9R0r6DV0czzJWXD7i/DL67wpE255BsZc5XdVInKGpSTGUZCYQsGAlD5v09zWEWwI2oKNQ8ivhsbgr4bg401VDexraj3poD8WBX1fJGfB7b+FX86CZ7/gHXM/+lN+VyUiES45IS547kDfG4fToU+DRGY2w8w2mtkmM/veMda7wcycmZWGLHsguN1GM+u/XeGUAXDHSzBoPCy4DTb92e+KRET65LhBb2ZxwMPA1cAE4FYzm9DLehnAN4AVIcsmALcAZwEzgJ8F369/ShkIt78EeWO8sN/8V78rEhE5rr706KcAm5xz5c65VmABMKuX9X4A/BvQHLJsFrDAOdfinNsCbAq+X/+Vmg13LIScUd78OOVv+l2RiMgx9SXoC4AdIc8rgssOM7PJwHDn3Ksnum1w+zlmVmZmZdXV1X0q3Fep2XDHy5BdAvNvgS2L/K5IROSoTvlATjMLAD8Cvn2y7+Gcm+ecK3XOlebl5R1/g0iQluv17AcWwfybYesSvysSEelVX4J+JzA85Pmw4LJDMoCzgTfNbCtwIbAwuEP2eNv2b+l5cOdCyBoOz9wE25b6XZGIyBH6EvSrgNFmVmxmiXg7VxceetE5d8A5l+ucK3LOFQHLgZnOubLgereYWZKZFQOjgZVh/yv8lD4I7nwFsgq8KRO2L/e7IhGRbo4b9M65dmAu8DqwAXjOObfOzB40s5nH2XYd8BywHvgDcK9z7uinivVXGYO9sM8c4k2ZsCO62jIR6d80BUI41VXCk9dAQxVM/zaUfhmSe7+Ig4hIOB1rCoTInnKtv8kcAnf9DoZfAH/+Z/jvs+EvP4DGvX5XJiIxTEEfbplDvekSvvIGlHwCFv+XF/iv/R3s33H87UVEwkxBf7oUTPZmu7x3pXeVqrJfwP9M9C5AXv2R39WJSAxR0J9ueWPgup/B/e96Y/YfvAgPT/Fmwty1xu/qRCQGKOjPlAHD4TP/Dt/6AC79NpS/BfMu86Y/3rIYImynuIhEDwX9mZaWC1f+Hy/wP/nPsGcdPHUt/OJT8OFr0Hn0q9iIiJwMBb1fkjNh2rfgm+/BNf8FDXtgwa3w84vhveego93vCkUkSijo/ZaQAhfcDfetgevnecte/Ar8dDKsegzamo+9vYjIcSjoI0VcPJx3M3xtqXcFq7Q8ePXb8JNzYcmPvQuVi4icBAV9pAkEYNw1cPefvWkVBk2AP38ffqyTr0Tk5CjoI5UZFE/3Ll/4lTe8x4dOvvr9d+FAhd8Vikg/oaDvDwomw81Pw70r4KzrvbH7n5wHL90Lez/2uzoRiXAK+v4kbyxc/3O4f03w5KsX4KEL4Lk7YNe7flcnIhFKQd8fDSj0Tr765vtw6d/A5jdh3ifgV9d7V7rSyVciEkJB35+l58GV/wTfet87+Wr3+940yb/4NGz8vU6+EhFA89FHl7aDsOZpWPo/sH875I6BMVdByeUw4mLvmH0RiUrHmo9eQR+NOtq8ydPW/Ap2rICOVohLgsILYeQVMPJyGHyOdyiniEQFBX0sa22Ebctg81+h/A2oWu8tT8315ssvudwL/qxh/tYpIqfkWEEff6aLkTMsMQ1Gf9K7AdTvhvI3YfMbXvB/8IK3PHdMV+gXTYOkDN9KFpHwUo8+ljnn9fAPhf7Wt6H9IATiYdgUL/RLLoehk7wpGkQkYmnoRvqmvQW2L/dCf/MbULkWcJCUBcWXesE/8grILvG7UhHpQUEvJ6exBra8FRzffxMOBK95O2BEV2+/eDqkZvtapogo6CUcnIOazV29/S2LoLUeLOAN7Rwa3x82BeIT/a5WJOYo6CX8Otpg5+qu8f2KMnAdkJAGRZd4Qzwll3vTNpj5Xa1I1NNRNxJ+cQnecfmFF8LlD0DzAe/at4d6/B//0VsvYwiMuAQGT/CmXB40AbKG6xh+kTNIQS/hkZwF46/1buCdmbv5DW98f8dK+OD5rnUT0yFvXEj4j/fu0wf5U7tIlOvT0I2ZzQB+AsQBjznnftjj9XuAe4EOoAGY45xbb2ZFwAZgY3DV5c65e471WRq6iVLNdVD9oXc4Z9UG76LoVeuhqaZrndTcrtA/1AjkjfOurysix3RKQzdmFgc8DHwKqABWmdlC59z6kNXmO+f+N7j+TOBHwIzga5udcxNP5SCWHRsAAAfpSURBVA+QKJCcCcOneLdQDdVQtc4L/6r1sGc9vPsMtDZ0rZM1vHvPf9B47wSvhOQz+zeI9FN9GbqZAmxyzpUDmNkCYBZwOOidc6EXNE0DImsPr0Su9DxIvwxKLuta1tnpHcpZtb7rF0DVBm8YqLPNW8fiIGdkSPgHb9nFEIjz4Q8RiVx9CfoCYEfI8wpgas+VzOxe4G+AROCKkJeKzWwNUAf8o3NucS/bzgHmABQWFva5eIlSgQAMHOHdxl7dtbyjzTvEM7QB2P0+rF/I4b5FfLJ3pE+3XwATIHOojv6RmHXcMXozuxGY4Zy7O/j8dmCqc27uUdafDVzlnLvTzJKAdOdcjZmdD7wEnNXjF0A3GqOXE9baCNUbu4Z/DjUC9ZVd6yRlQe5or8c/sLj7ffpgNQLS753q4ZU7geEhz4cFlx3NAuDnAM65FqAl+Hi1mW0GxgBKcgmfxDTvuroFk7svb6rtHv41m2D7Cm8iNxdyUZaEVBhYFNIAFHU1BAMKvUNJRfqxvgT9KmC0mRXjBfwtwOzQFcxstHPu0FWqrwE+Di7PA2qdcx1mVgKMBsrDVbzIMaVmeydvFV3SfXl7q3f4574tULul6752M2z+C7Q3d61rcd4Uzr39EhhYDEnpZ/ZvEjkJxw1651y7mc0FXsc7vPJx59w6M3sQKHPOLQTmmtkngTZgH3BncPPpwINm1gZ0Avc452pPxx8i0mfxiZA7yrv11NkJDbu7NwCH7te/BAf3dV8/bdDRG4G0XA0JSUTQFAgiJ+Lg/pBfAOXBx1u9+7pddDvgLDE9GPxF3RuAzALv5LDkLDUEEjaaAkEkXFIGQMokbyK3ntqaYf+2I38NVH0IH73uXdIxVHyyF/jpg4O3QZCe38uyQRCfdGb+PolKCnqRcEkIHtqZN/bI1zo7vB7/vi3eVb4a9gRvVd59bTlsWwoHjzKymTIwJPgH97iFLEsZqHmE5AgKepEzIRAHA4Z7t2Npb4XG6u6NQLdbFVSsgvo93tXAjvichK5fAcdsGAZ5Rxtp6CgmKOhFIkl8ImQVeLdjcQ5a6ns0Bj0ahrqdsGuN13CEHk56SCDBm5oiKTPkPsu7XnC3ZaH3Wd2fq7HoFxT0Iv2RmRe0yZm9Hz0UqrPDmzyufnf3xqD5gNdYtNR5k8611Hn7FUKfH282E4vzGobeGoHe7ntblpiu4abTTEEvEu0CcV3DNSeis9ObXK6lzmsQDoV/84HujUG3+3qoq4CqkGWu4/iflZDqBX5SuncCXGJGyON0rzE5/Djdu+/5ODEt+DxDF7PvQd+GiPQuEOj61XCynIO2piMbhdDHrY1eA9HaEHzc4D1urPZ+YbQ2dC3r63yJcUlHaRDSgo1Gj4YhMa2rIUlMg8TUkMdpXkPUjyfLU9CLyOlj1hWWDDm19zrUaBwK/cMNQKN3/eLDjxuCDUdj90aieT8cqOi+bV9+bRwSnxLSIKR1bxwSUns0Fqm9vNZLI3KG9nEo6EWkf+jWaAw+9fdzzpvu4tAviramrsahNeRxt+WNwdeCj9uaoHFv99faGk/kj+reSBRMhhsfP/W/rQcFvYjEJjNISPFuabnhe9/OTq8BaAtpEHre2npZ1tp4/MNvT5KCXkQknAIBb+w/KR2IjOsg65gmEZEop6AXEYlyCnoRkSinoBcRiXIKehGRKKegFxGJcgp6EZEop6AXEYlyEXfNWDOrBradwlvkAnvDVE5/p++iO30f3en76BIN38UI51xeby9EXNCfKjMrO9oFcmONvovu9H10p++jS7R/Fxq6ERGJcgp6EZEoF41BP8/vAiKIvovu9H10p++jS1R/F1E3Ri8iIt1FY49eRERCKOhFRKJc1AS9mc0ws41mtsnMvud3PX4ys+Fm9oaZrTezdWb2Db9r8puZxZnZGjP7nd+1+M3MBpjZ82b2oZltMLOL/K7JT2b2reD/kw/M7Ndmlux3TeEWFUFvZnHAw8DVwATgVjOb4G9VvmoHvu2cmwBcCNwb498HwDeADX4XESF+AvzBOTcOOI8Y/l7MrAC4Hyh1zp0NxAG3+FtV+EVF0ANTgE3OuXLnXCuwAJjlc02+cc5VOufeCT6ux/uPXOBvVf4xs2HANcBjftfiNzPLAqYDvwBwzrU65/b7W5Xv4oEUM4sHUoFdPtcTdtES9AXAjpDnFcRwsIUysyJgErDC30p89WPg74BOvwuJAMVANfBEcCjrMTNL87sovzjndgL/CWwHKoEDzrk/+ltV+EVL0EsvzCwdeAH4pnOuzu96/GBm1wJVzrnVftcSIeKBycDPnXOTgEYgZvdpmdlAvF//xcBQIM3MvuBvVeEXLUG/Exge8nxYcFnMMrMEvJB/xjn3ot/1+OgSYKaZbcUb0rvCzJ72tyRfVQAVzrlDv/Cexwv+WPVJYItzrto51wa8CFzsc01hFy1BvwoYbWbFZpaItzNloc81+cbMDG8MdoNz7kd+1+Mn59wDzrlhzrkivH8Xf3XORV2Pra+cc7uBHWY2NrjoSmC9jyX5bTtwoZmlBv/fXEkU7pyO97uAcHDOtZvZXOB1vL3mjzvn1vlclp8uAW4H3jezd4PL/t4595qPNUnkuA94JtgpKge+6HM9vnHOrTCz54F38I5WW0MUToegKRBERKJctAzdiIjIUSjoRUSinIJeRCTKKehFRKKcgl5EJMop6EVEopyCXkQkyv1/TneZdnBxFKMAAAAASUVORK5CYII=\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "plot_epochs = range(epochs)\n",
+    "plot_loss = lstm_train.history[\"loss\"]\n",
+    "plot_val_loss = lstm_train.history[\"val_loss\"]\n",
+    "\n",
+    "plt.plot(plot_epochs, plot_loss, plot_val_loss)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Predicción LSTM"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "scene_vali = encoded_vali[0:6]\n",
+    "time_frames = scene_vali.reshape(1, 6, 1024)\n",
+    "frame_prediction = lstm.predict(time_frames, batch_size = 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Output del Decoder"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 112,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "prediction_vali = vali_data[scene_vali_num * frames:scene_vali_num * frames + frames, :, :, :]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 116,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "frame_prediction=[]\n",
+    "\n",
+    "for i in range(frames-5):\n",
+    "    scene_vali = encoded_vali[scene_vali_num * frames + i: scene_vali_num * frames + (i+6)]\n",
+    "    time_frames = scene_vali.reshape(1, 6, encoded_size)\n",
+    "    prediction = lstm.predict(time_frames, batch_size = 1)\n",
+    "    decoded_frame = decoder.predict(prediction)\n",
+    "    frame_prediction.append(decoded_frame)\n",
+    "\n",
+    "frame_prediction = np.reshape(frame_prediction, (len(frame_prediction), 64, 64, 1))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 119,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAC0CAYAAAB7RE0FAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOx9eYwcx3X+1z0998zOscO9l6dMUqIoU5QVHXF+smRJtoMYDgLYCBIkfwQxYATJHwYMAw6CGEgcIAgSJHHgOIgNJ0iMJE5sx7ccK7Jjy7pIibQuLklR3OWec9/31b8/1q9Y09vd0zPTszND9gcQ3J2d7q6v36tXr169eiXIsgwLFixYsGDBgoXbBeKoG2DBggULFixYsLCfsJwfCxYsWLBgwcJtBcv5sWDBggULFizcVrCcHwsWLFiwYMHCbQXL+bFgwYIFCxYs3FawnB8LFixYsGDBwm0FqZcvC4Iw0fviZVkW9P4+6fwAJGVZPqD3hUnn2E2GgMVxEmD1xcnnaOnpLm51jpPODxp90Yr83Fq4MeoGWLBgAYDVFy1YGBeo9kXL+bFgwYIFCxYs3FawnB8LFixYsGDBwm0Fy/mxYMGCBQsWLNxWsJwfCxYsWLBgwcJtBcv5sWDBggULFizcVrCcHwsWLFiwYMHCbQXL+bFgwYIFCxYs3FawnB8LFixYsGDBwm0Fy/mxYMGCBQsWLNxWsJwfCxYsWLAwURAEAYLQ9eSJiYUgCLDZbLcsR1EUYbfbYbPZRtaGns72stAfRFGELMuQ5Uk/IkUd1FFlWUa73b4leYqiCIfDgXa7jWaziXa7PeommQpBECBJEtxuN5rNJmq1Glqt1qibZRpsNhucTif8fj9qtRrK5TLq9fqom2UaBEGA3W6Hx+PB9PQ0SqUSstksqtXqqJtmGkRRhMvlQjAYxMzMDLLZLJLJJIrF4qibZhokSYLP58Ps7Czm5+cRjUaxs7ODXC436qaZAqfTiXA4jIMHD2J2dhbr6+vY2NhAKpXa97aM3PmhQUWWZTSbzVvK4NKA4vV60Wq1UK1W0Wg0Rt0sU0GDSigUQqPRQKFQQKVSGXWzTAMNKl6vFwsLC6hUKkgmk8jn86NummmgQWV6ehpHjhxBPp/H5uYmksnkqJtmCiRJgt/vx8LCAk6ePIlEIoG3334bW1tbo26aaXA4HIhEIjh27BjuvPNObG1tYWVlBdevX79lJiNutxvLy8s4deoUjh07hrW1NbzxxhtYWVkBgFuCp9/vx4kTJ3DmzBnMzc3h2rVreOONN/Dqq69CEISJn3RNT0/j3nvvxZkzZ+ByuRAKhSBJEjKZDOO3X3IcmfNDg4rf78fS0hJqtRoSicRIPMBhQBRFeDweRCIRnDhxAqVSCTdu3MDGxsaom2YaJElCMBjE0tIS7r33XmSzWaysrODy5cujbpppcDgcmJubw4kTJ3D//fcjmUzilVdewSuvvALg1jC4Ho8Hx44dw+nTp3H27FlEo1GcO3cOP/nJT26JiGUgEMA999yDs2fP4uTJk9jc3ITX60UsFkO73Z74AQUAZmZm8OCDD+Ls2bPw+XwIh8Ow2+3Y2NhAq9W6JSKyy8vLePTRR3H69GkWtWu321hdXWUT50mWpSAIOHHiBJ544gnMz88jk8kgHA7jyJEjuHbtGprNJhqNxsQGCERRxD333INHH30UDocD6+vrEEUR8/Pz8Pv9aDQa7N9+YGTOzz333IOPfOQjuOeeeyCKIsrlMlZWVvD5z38e6XQajUZjohX50UcfxW/+5m8iEolAlmWUSiVcvXoV//zP/4x4PD7xywqCIODDH/4wfv3Xfx2NRgPFYhGFQgF33303vvKVryAajaJSqUx0pEsURXzsYx/DI488gkajgatXryIcDuPd7343KpUKdnZ2UCqVUKvVRt3UviFJEj75yU/i+PHjqFQqeO6551CpVLC8vIy77roLiUQChUIB5XJ51E3tCw6HA3/8x3+MYDCIdDqNr3zlK2g0GnA6nTh+/DhyuRzy+TwKhcKom9o33G43Pv3pT0MQBKyuruLLX/4yWq0WXC4Xjh49inK5jHw+j1wuN5EOkCAI8Hq9+MQnPoFarYaLFy/iO9/5DmRZhsPhwPz8POr1OsrlMjKZzMRyDAQC+J3f+R3kcjk89dRTOHfuHMrlMiRJQiAQgCAIqFarSKfTEzc2iqKIcDiMD33oQ9jZ2cH3v/99rKysQBR3045pyb3VaiGdTu/L2DgS50cURfzar/0aTp8+jWazifX1dTSbTdhsNiwuLqLVaiGfz0/serUkSfjVX/1VhEIhpFIpvPLKK2i1WpAkCZFIBI1GA9lsFqVSadRN7RsOhwMf+MAHUC6Xsbm5iaeffhrtdhs+nw8ejwehUAgA0Gw2J9IYAbuDytmzZ5FKpXDlyhV85zvfQbvdht/vh81mw9TUFGRZRr1en0iOgiDA4/FgcXERW1tbuHjxIr73ve8BALxeLzweD7xeL5rNJqrV6sQZXEEQ4PP54HQ6cf36dfz0pz/FhQsXIAgCms0mgsEg7HY7XC4XyuXyRE5GRFGE3+9HuVzG6uoqnn76aezs7ECWZbRaLTgcDjidTjgcDthsNjSbzVE3uWcIgoCpqSmUy2W8+uqr+MEPfsAiBPl8ni3b2mw2SJI0kTaH5FipVPDyyy/jueeegyiKjE8ymYTD4YDdbockSWg0GhPFURRF+Hw+1Go1XLhwAdeuXYPb7WZ5opVKBeVymfGjz4eJkTg/lHj41FNPoVgs4rXXXgMABINBALuDTq1WQ61WmygBE1wuF7LZLJ577jlkMhm8/PLLkGUZfr8fgUAAdrsdDocDlUpl4gYUYNcYud1u3LhxA88//zwSiQTeeustZnDn5uYgyzLsdjsEQZhIGQqCAJfLhdXVVfzv//4vdnZ2kEgk2DIJOUCiKEIUxYkcOHmO3/nOdxCNRtnySCaTQTabhdPpBDCZSfuCIMDpdGJtbQ3f+MY3EI/H2UxTlmVEo1E4HA5IkgRRFCdyaYjSB9bX1/Gtb30LmUyG8Wk2mygUCsjn83A6nUxXJ83m0IaK1dVVPPPMMyiVSnC5XHA6nZBlGel0GrlcjvGeVJsjiiKuX7+OF198EQCYcwAA2WwWtVoN1WqV2Z1JsjnE4/r163j99dcB7C630wYSWZZRLpdRKpVgt9v3xd6MxPlpNpvY2NjAf/3Xf7G122azyX52OByjaJZpqNVqSKfT+OEPf4harcZ20CQSCWxsbLAZyqSClvFyuRxefPFFyLLMHNdSqYS1tTW0223Y7fYRt7R/yLKMfD6PYrGIlZUVyLKMcDgMURRRr9cRjUbRarXYYDqJBrfdbiOdTqNUKmFjY4OF3oFdPtFolO2kIZ6ThHa7jVgshkqlglQqxaI87XYbHo8HmUyGTUAmkR8AtFotbG1toVKpoFQqwe12w263o1qtQpIkOJ1OVCoVFAqFkW4rHgStVostozebTfh8PtjtduTzebRaLdjtdsiyjGq1OrH9sdVqYWNjA6VSiUWVXS4XkskkCwJQxJKcnkniSHqaz+fhdrvZMlc0GkWpVOqI9OzXLsyRjMCUmHbo0CFsb2/D6/WiUqmwwaZcLk9sRwXAQrL33HMPrly5wpw5URRZHswkhp95NJtN5PN5vPe978Vrr72GZrPJZtoA2HLQpA4qwC7HeDyOD37wg3j99ddZ5EeWZTbzIgdoUoyQEu12G5ubm/jlX/5lbG9vM0ePDNF+78AwG7Is4/r163jsscdQqVTw0ksvsZkm9cH9CLEPE4Ig4OrVq3jwwQchCAJ++tOfolqtshwR4jbJHBuNBi5duoT77rsPoiji2WefRalUgiAIqNfrezhOor7KsozXXnsNd911FyRJwk9/+lOWi8aX1yBHaNLQbDZx8eJFHDx4EE6nE8899xwKhQIEQWC2lLAf/IRelEQQBNM0anZ2lnnsLpeLhWjT6bRZj9gDWZZ136iZ/GZmZtjaJdWHqVarSCaTw+yYr8iy/C69L5jJMRKJQJIktNttVuen2WwONSGvmwwBczmGw2HGkcKwsiwjl8vdMhyDwSBsNhszQGR4isXiyDiayc/v97NlIHJWRVFkM84hYV/7otfrZY4A5RfabLahJqrvt5663W4Au5F1KnJIUa5hyXG/OdJSHj/hII7Dwn72Rbvdjna7zaJWtKQ55E0jqn1xZM4PAJbQ5fP5UK/Xh22M9lXIP78fRFFkdX7K5fKwZyT7anC5e8Lj8UCWZVQqlZHK8OftGQpHMr63IkdyeCjPYNi1mkbRFynHiSYiQ8a+90XKd3E6nWi1WkPfhTgKPaWqx06nkxXjHCZGwZFyl+x2O1tFmMRxkWyKsu1U2dlms6Fer+/HjmDVvjj0ZS+v18s6Yz6fZy+Cwukk3GE7PsMC7SZptVpsKykZWt6D3wfHZ2jw+XxwuVxotVrIZrOMB0W2gF15TjJHWmNvtVrIZDIs4kEFOEmuk8wxEAiw7aSZTKZjqZJPLpzUXZahUAgej4dtl6XcAY/HAwCsL04qP2C3SJzP52MRVuLi9XoB3OQ4yeUXZmZm4Pf7Gcdyucw2GdASSaPRmOgK3fPz8wgEAoxjoVBAq9ViUUoqijupHJeXlxEKhVCv15HNZpHL5VCv1zE1NQWbzYZGo4FyuTzSUihDdX58Ph8++tGPwmazIZ/P480330QqlepY/nG73UilUhM5oHi9Xvze7/1eB79EIoFarYZkMolmswmn04l0Oj2R/IBdjn/wB38Ah8OBXC6HS5cuIRqNol6vI5FIoNFosAqdk8rR4/Hg4x//OJxOJ+O4tbXFCm/WajWIoohMJjPqpvYNj8eDT3ziE3C73cjlclhZWcHGxgYqlQoSiQSL9GSz2RG3tD94PB586lOfgtvtRj6fx8rKCtbW1lAsFpFMJtlW9kmuzO12u/FHf/RH8Hq9yOfzuHz5Mt5++20UCgUkEgkUi0W2/XtS4fF48OlPfxo+nw/5fB5Xr17FW2+9hWw2i3g8zpabJ9m583g8+JM/+RP4/X4Ui0VcvXoVV69eRTKZRDQaRTabnWjnzuPx4DOf+QycTicajQbefvttXL58GbFYDDs7O0ilUsyBHSWG6vycOXMGv/RLvwRBEBCLxZBIJFAqlRCPx9kukkmudfPOd74TZ8+ehc1mQzKZZLxisRgbRCb93JnTp0/j5MmTcDgcSKfTiEajyOVyiEajt0w17rvvvhsLCwtwu93w+XxYX1+Hy+XC9vY24vH4qJtnCu666y74/X4WIXA6nawC8M7OzohbNzhOnjzZkTBJjngymcTm5uaommUqTpw4gVQqxTYbVCoVtFotxGKxW6Zy/Dve8Q5cu3YNoVAI+XweyWQSlUoF29vbWF9fH3XzTMHRo0dx/vx5xjGdTiOdTmN9ff2W4HjkyBF8//vfx/T0NCt+G4vFxo7fUHN+pqen8Ru/8RsIBoPY2dnBs88+i1gshmKxOJLdTmavbVLFyunpacTjcbz00ksj5Ych5BmEQiE8/vjjmJmZQTKZxMWLF5mTN44yBPrj+OCDD2Jubg7pdBpvvvkmOzDxVuJ46tQpLC4uIpfL4dq1ayxaMIp6IWb3xVAohCNHjmBxcRH5fB43btxg2/hHVA/F9L4YDodx4MABLC0toVgsYmdnp2NZaL8xDD0Nh8Pwer1YXFxk5+hls9mRLTcPg+P09DREUcTCwgJqtRpbFhrVmYhm98Xp6WnU63VWeZscoBEWgx1NwvPhw4cRDofZ2VajLFw4jMSuhYUFhEIhVCoVbG1tjbra71CSLCORCILBIKtvM8pw7LASEAOBAFuDp+W8UWFYHD0eD6ampjpyfkaFYfRFh8MBv9/PduONuAjcUPqizWZj0bth7sYzgmHoKZ94Pw45dsPk6HA4IAjCyIv5mt0XKd+VErfHYPlu/HZ77Tf2e4fJCLBvO0y0CmwNu/DWfu6+sDhO3g4Tlfvs4aG1C8Vk7FtfVKvaTJsuhukc7aee3g4cbTbbnppatINvmI78fvRF2vk8Cn7Q6Iv7XoGO9vVr/W3SIQiCZvXmSS74x4NkqJQXGaNbAVSGQY3jrSJH2k6rxXHS5SmKIptd8+D1d9I52my224Kj0+ncw4H6KNVtmuR+SdW4eVA/JI60PXwSZWmz2eByuTraTjpKW98lSVK1ucPCvld4ttvtsNvte8KZ9CJoezg/Y5uUXUSCIMDhcMDhcKBYLKry44s78UXzJgW0NdrhcOw5CZscP/LuSYnpPKxJAcnR6XTu2TnDGyP555WeAbAzaiYFJEeXy7XntG9yGlqtFmRZZpOVdrs9DiFsQ6CaPi6Xq6NOCg0oPD/+LKxRL0H0AjrQ0+12dyy3k61xOp1MdjTA0BbjSeLodrvZeY9KjiRf0mdJklCr1Ua+JNgLiKPH4+ko2Ej2lLjbbDa43W44HA6USiV2vMe4w2azwePxwOPxdOQ1ET/ibbfbWemYfD4/9KX5fXF+SFEDgQA7EPL69eudDZEkBINBHDx4ENlsFna7HZ/+9Kfx5JNPwmaz4Sc/+QkcDgc72feLX/witre3x6ITE79gMAifzwebzbZnl5fdbkc4HMahQ4eQSqXg9Xrx53/+53jsscdQr9fxk5/8BHa7HaVSCT/72c/wj//4j4hGo2PBDwCbeQSDQVZvg3fwyGGIRCKMo8/nw2c/+1mcOXMGgiDgxz/+MSRJQqlUwsWLF/EP//APiMViY8WR9NDn80EQBBQKhT0cZ2dncfDgQSSTSYTDYXz+85/H0aNH4XA48H//93/s3KGLFy/i7//+7xGPx8eKo91u7+CYy+XY38lpmJ+fx/LyMhKJBGZnZ/HFL34RkUgEoiji3LlzkCQJ2WwWFy5cwOc+9zkkEomx4EgzyWAwyHJj+O37VKxyeXkZi4uLiMViWF5expe+9CVMT0+j1Wrh3LlzsNlsyGazePnll/F3f/d3w67M3hMo2hMMBuHxeCAIQkdlfOHnRUePHDmCubk5RKNRHDt2DF/60pfgcrkgSRLOnz8PURSRTqdx/vx5/O3f/u1YlRyRJKmDI4CO3aV0SvjRo0dx4MABJBIJnDx5El/4whfYdy5dugRBEJBKpfDiiy/ib/7mb8aq7AhFe4LBICugmkgk2N8FQYDf78fx48cRCoWQSCRw+vRp/MVf/AWmpqYA7HIEdnc1vvjii/irv/qrsdmFa7fbGT+XywUAHbtnRVFEIBDAnXfeCb/fj2Qyifvuuw+f+cxn4HK5IMsy1tbWAACxWAzPP/88/vIv/9I0fvvi/NDshBwfZbSDnAe/34+HH34YuVwOmUwGOzs7yGazOHLkCD7wgQ8A2D0f5K677sKrr76Kb3/72yP3fMmYUjIpzSCV/CRJQiAQwMMPP4xMJoN0Oo1r167hzjvvxOLiIt73vvcB2OV3zz334LXXXsO3vvWtseAH7CYger1e5vjwZ80QyDl66KGHkEqlkMlk8Oqrr2JxcRFLS0t43/vex47AOHPmDJPhqCMmxNHj8cDn88Hn8wHAHo4kx2AwiIcffpiVNHjhhRcQCASwtLSExx9/nF17//334/XXX8c3v/nNkde0II5erxc+n485Blocw+EwHn74YWxvbyObzeKZZ57Bk08+iaWlJbznPe9h1z700EN488038Y1vfGOkUSGeH23pp2icUk8lSUIoFMJ9992HRCKBbDaL7373u3jve9+L5eVlvPvd7wawy+8Xf/EXcenSJXz9618fedSLOPp8PkxNTbGCjo1GY48M7XY7QqEQ7r33Xsbxa1/7Gh577DEsLy/jgQceALDL8f/9v//HOI66fg7PkYpyNptNVY6kp2fOnEEikUAul8O//du/4dFHH8WhQ4dw9uxZADfleOXKFXzta18b2a4qvu3AbmFVKq5KB3tr2ZvTp08jmUwil8vha1/7Gh599FEcPnwYp0+fZuPpgw8+iMuXL+OrX/3qUI816QZaZp2amkIwGITT6US9XtfkFwgEcPfddyMejyMWi+E//uM/8J73vAdHjx7FiRMnIMsyTpw4gfvvvx9Xr17Ff/7nf5pSImeoi6S0jjc9PY2lpSXmzakpH82qvV4vFhYWcPfdd6NSqeDChQvY3Nxk64EOhwNbW1uoVqsjdwxoDZb48Wd4qYE84fn5eZw+fRqFQgHnzp3D1tYWu5fdbsfq6upY8ANuRkOmp6exsLDAzmbR4kgcSIbZbBYvvvgiq+9Ayw5Xr15FvV4fueMD7OVIclTqKTm0JKvFxUWcOnUKqVQKzz33HFZXVzu+/8Ybb6BarY7c8QE6OS4uLsLpdKLdbqsaSd5xJzkmEgk8++yzeOutt9jSdLvdxsWLF8eCI/ELh8OYm5tjeqrGr91uo9VqwWazYX5+HnfeeSei0Sh+/OMf4/Lly+zvjUYD586dG5tKu7wMZ2ZmNDnKsszaL4oi5ubmcPLkSezs7OCHP/wh3njjDfb3arWK559/HvV6feSOD3DTcZuenu44O1DJkRzber0Om82G2dlZnDx5Etvb23jmmWfws5/9jPErlUp44YUXUKvVRu74ADc5hsNhTE9Pa3KUZZktU4qiiNnZWZw4cQLRaBRPP/00Xn75ZcYvm83ipZdeQr1eH6njA9x0akKhEAKBAEt0VpNhrVZjpzvMzs7izjvvRCwWww9+8AO88MILKJfLKBQKiMfjOH/+PDsNwgwMNfJDB+xRMhoV51JWyiUh53I51Go1lvRES1wvv/wyFhcX4XK5cOXKFXz1q19FLBYbZtMNgfjRYKHHr16vI51Od/ArlUp49dVXcf78eWawV1ZW8N3vfrcj/DlKkAPWarXYcSQUmeNBJfUTiQSq1SrjKMsyfvazn+HcuXNs2WRlZQXPPPPM2IRniSNVVa3VapocK5UKotEoyuVyB8fXXnsN586dQzAYhCzLuHTpEp5//vmxqQpNHKvVKsrlMsrlMvL5/J6KzuT0bW1toVQqMY7NZhNvvPEGzp07xyIOKysruHjxYseRJ6MC8atUKiiVSqhWq5r8yuUyNjY2UCwWWcJ3q9XCpUuXcP78eTgcDlSrVVy+fBlXrlwZm4rJlKNUKBRYbks+n+9YtgR2ORaLRayuruLee+9libStVgtXrlzBK6+8AgAol8u4evUqbty4sSd/b1QgpyabzUIQdg9qLRQKqhwLhQLeeustnDp1iiXTNhoNvPXWW7hw4QKrdk01rcaNIx0A3Wg0VDnScUJXrlzByZMn2fJfvV5HPp/Hq6++imKxiGw2i+vXr7N6OqMG8aOCnHRoubIfUcmNN998E8eOHWNLf5Q28Prrr+Ozn/0sUqkU1tfX2X3MwtCXveglkAOgVVSt1WqxUvTBYJB9Buwa7M997nNoNBqoVCrIZDJjcz4Pr8QkHC1++XweiUQCoVAIsizDbrcD2OX3+c9/vmPgHRd+wG7bqSQ5yUktmZA4RqNRTE9PQ/75ifZ0ntJ///d/o1arMYM2jhxpybJQKGhyzOVy2NnZwfT0NIDdzkqO0de//nVUq1U0m00Ui8WxmE0T6MwrOtNLS47NZhO5XA5bW1uIRCIA0BENo6UDciTGhSPPTxRFXX7ZbBabm5s4cOAAAHREUZ566imWeDouUR9Cu91GNptlEy+tAofEcWNjA7OzswB2OdIBy9/73vdQrVZZnZlx40jHWFCepx7H9fV1zM3NAQCLopRKJcaRDtAcN440kDudTs3DkskBWltbw8LCAouqNJvNDo78IajjAHLAKSldayMBOUCrq6tYWFgAAMalVCrhqaeeQr1eh8PhYMufZmFfcn7K5TKazSbLzFeD/PPDP2/cuIF2u82WFvjBplarqa7hjxo0ENDarRpocFxbW2OGy2azsQ4ci8VQq9VYhGXcQLMNOvxTDcRxdXUV7XYbdrsdoiii0Wiws3lqtdrY7v5qNpuoVCpseVYNPEdejo1GA+l0mhVIpFn6uIGWrCgBUes75XIZq6ur7Hw6mlWnUimkUimWgzFOHPmtwW63G4VCQbXODy9Dnl+tVkM6nUY2m2V2Ztx2Y9KSic/ng9vtRrFYNMSR5F2r1ZBKpVgRyHHccSqKIjweD2ZmZuB2u9kBmEqbocWxWq2ySA8vw3Hj6Pf7cfjwYXg8HtRqNUMcPR4P+4yOi6Lo0ThxpHSQEydOwOPx4OLFi0gkEnvGDppgrK2toV6vw+v1MvtDqwjE12x+Qy9yOD09jSeeeAKPPPIIisUivvKVr+D111/XnS1SwqIkSahUKiiXy6as1Q6jmNPs7Cw++MEP4rHHHkOlUsGXv/xlvPTSS7rrrj6fDx6Ph/GrVCpmRUGGUlhtaWkJH/7wh/H444+jXq/jn/7pn/CjH/1INwRJWxttNhvjZ8bMa1hFxw4fPozf/u3fxhNPPIFms4kvfelL+Pa3v6170CdtwSWOZMAGxbA4Hj9+HB/96Efx+OOPQ5Zl/NM//RP+/d//HclkUvMa2i4uiiKToRl5WsPoi3fffTd+//d/H4899hhEUcS//Mu/4Atf+ILu2WUOh6ODHzmuJmAoffHs2bP4+Mc/jgceeAA2mw3f/va38dd//de4ceOG5jWUa0jLSGqDbD8Ylp4++OCD+OQnP4lTp07Bbrfj2WefxZ/+6Z/i2rVrmtdQPihxNGsSOSyOjzzyCD71qU/h0KFDcDgcePPNN/GHf/iHeOONNzSvoXxK4mjW5GMYffHJJ5/Epz71KSwtLUEQBOzs7ODjH/84Xn75Zc1r+NpqZunoz6HaF4ce+bnvvvvwkY98BA888ADbnvhnf/ZnuocNFgqFsVi7NIJf+IVfwIc+9CGcPn0aTqcTHo8HN27c2LOVn0exWJyoA08feOABvP/972cHnDqdTly9ehWXL1/WvIbyStRmpeMGQRDw0EMP4ZFHHsHBgwchSRI+9rGP4dVXX9V1fshxnRSODz/8MN71rnchEAhAkiT87u/+Ll544QVd56darY7V8qQWiN+RI0cA7A74v/Vbv4VnnnlG1/kZt+UQPZCeHjhwALVaDV6vF08++SS++c1v6jo/jUZjzw7UcYUgCHjwwQfhdDpRLBbh9/vx0EMP4Y477tB1fprNJnNax52nKIq4//77UavVkMlk4Pf7cfLkSRw9elTX+eEdutTJTlcAACAASURBVHHmKIoizp49y+zK1NQUpqencejQIV3nhyJY+2VPh+r8iKKIX/mVX0Gr1cL169dx7NgxPP744/jXf/1XQyct0xLLuHZcURTx/ve/ny0RHD16FPfffz+WlpZ0nR+C2+1muy7GkR+wO9t44oknkEwmcfnyZRw7dgzHjh3D/Py8rvMD3Kw3QrsyxpWjKIp497vfja2tLZTLZRw7dgxTU1OYmZnpeq0gCPB6vWNfIE8URZw5cwbXr19HOp3GkSNH0Gw2EQ6Hu15L9UYajQZbthw32Gw23HXXXbh+/TqSySQOHz6MarUKv9/f9VpBEBAIBFCv11GtVseSH7DL8Y477sDVq1exvb2NgwcPsqXobhAEAaFQiO14GmeOBw8exMrKCtbX17GwsIBUKmWo6q8gCJienmarBePKkXZuXbp0CW63GzMzM9jZ2TEUNRZFEZFIhE0ux2FHsBJUv4f4BYNB3Lhxw9AuNJvNhpmZGZRKpaEfujxU56fdbuPatWtYXV1lhcd2dna6EhJFEaFQCO9617uQyWSQSqXw9ttvD7OpfaHdbuP111+Hw+FgdYpoANWDKIoIh8O47777kMlkkEwmDTlLo0Cr1WI7YCRJgsvlwubm5p6dCUpQJz1z5gyy2ezYc3zppZfY8ofdbsfm5mbXHXc2mw2RSATvfOc7kU6nkU6nx5rjiy++yJbpBEHA5uYmtre3da+z2Ww4cOAATp8+zepTjSPHZrOJZ599Fl6vF4Kwe97T9vY2otGo7nW0TfrUqVOsL1JhtXFDs9nEj370I1ajqdVqIRqNdhSOU4MkSWwbMfXFceb49NNPswKctVoNsViso4ijGiRJYlv6J4Hj//zP/zDHnD+9Xg+SJGFhYQHHjx8fa45KfsViEZlMputqDpVIecc73sHG/WHyG/qy19tvv80K462trWF9fV03DA2AORKUMT7OYffr168jEAiwddiNjY2u2/BtNht8Ph/bHTXO/IBdjrQDjzjqLZUAezmOQ30NPaytrSEYDLISCxsbG12NkSiK8Hq9bKvqJHAMhUIQRRGlUgmbm5tdDRKVpqek9VHXENHDjRs32A68YrHItuvrgY4MqNVqyGazYy/D9fV1TE9PQxAE5PN5bG9vd7UfdAxGtVpFJpMZe44bGxuMI78ZRA+iKLJdU+l0eqz1FAC2t7cRCoUYx3g83jXyQxMz2hVtVr2bYUDJTy3ZWQnayVYoFJBMJocuw6E7P1tbW/D5fKwOzM7OTtf6LrQbaHt7G5lMZmzqbKhhe3ubta9arbKq1HqgImqbm5vIZrNjn9+0s7PDdhVUKhXEYrGuMqH1262tLUNe/6ixvb2NUqmEVqvFdlJ0a3Or1UK9XsfW1hay2ezY53HF43G2a6JUKhkyMLScR9G+ceao5JdOp7sO9M1mE9VqFVtbW2PPD0DHDphSqWSo7AftYiRnd5wHTWD3GAviVCwWWf03PTSbTZTL5YnhyPc9Oqerm/ND29upZMw4O3gkQ9LTYrFomB+dzTZsfkPf7aU8yLMXuN1uU2cpw8hqHyQ5i2ZjJmIoO0wGgdPpNLUOzLB2XwyC24Gjw+EwNTF4GH1xEJjND2PYF6l+ilkYRz21OPaOceuLVMfIRKj2xaEebwHczErvx0EY9+UgYLCs+3EpDjdMTMpOmkFgyXHycavzAzA2BfCGCYvj5GO/jjwauvMzCMZ154xZuNX5AbcHRwsWLFiwMFkYa+fHggULFixYsGDBbFjOjwULFixYsGDhtkKvu72SALRLiY43Dhn4ziTzA259jkb4ARbHccetrqfArc/R0tObuNU5TjI/QINjT7u9LFiwYMGCBQsWJh3WspcFCxYsWLBg4baC5fxYsGDBggULFm4rWM6PBQsWLFiwYOG2guX8WLBgwYIFCxZuK1jOjwULFixYsGDhtoLl/FiwYMGCBQsWbitYzo8FCxYsWLBg4baC5fxYsGDBggULFm4r9FThWRRF2WazQZZlCIIAABPzc6vVQrvdFozwm1Q0m82kLMsH9L4zyRyNyBCwOI47rL64i0nmaOnpTdzqHCeZH6DdF3t1fhAMBgHcPK1bEIQ9P/POh9Z3jHxfy6Exck/lz7lcris/m83G+O0X+DYOimQy2bUEuc1mQygUMuV5+41MJmPoezabDeFwGEB/emrW5/383A/HSUM6ne76nUnmBwDxeNxQX5xUjkZkCNy+HLX6t1kw655G++L09LThexoJRvQKI9dqPSsWi6n2xV7P9mLgG2LkZ/73Xq/t9/688hmF8lr+816UTfl95UCp9kx6rlKA49pxRv0Mo+3o5edBrjVLf41iXN7xsHCr8wNuH47AXpvaz33G9V31YkOMOARGPu82bmj93O+4aPTd92pne71nr8/VQt/OTz9KqHeNliCVn+ndQ+tvsiwbaq/e93rlq/w+/W60Hf0+1wi6vUO1d0/Qkkuvz6B7dYucaD3XKPrVUy1d02vboLIaVMeUfzNDjkaePyw5msnPTCifrRxMepHjpHLs9V6DPr/f++wXjIwbRn5WXqvWh8x6Vj962i2a1Yt9VNMvvfvofd4PBk54NmPmqzYD7mY0B51lq0GrPWY8c5Cog5E2mAWtyJRee7SUVOue/P30ond6z+0VZsiiW9vUPjdLN3uFVh8aRI5qf9tvOeq1q59n9dMuLbkP4iAYbduw3me3Zys5Dno/M74/7D7UCwbt972OCcO4vxGo8exmA5Tf7TbGaNkOvedq+RV6XE3b7aXX+bt9rvZ7N6Pby3PVnqV372F41r22pdc27Ad6cdT6ffe9fG4UvepLL3pq5nPNxiCONaGftg1LjsOAme9+HPmZjX6dxWFHsMctKjSMsULtZ6VdMTviowc950rLITL7Wcrv9PK8npe9jCzn9Lp0pHUtT2S/hGpWiFbv80Gf18v9e70foVskwKxlKf7ZWvLu5/6Dhpt7uadZzzULZsqxFx1T67tm64mR5/SiR732ISNLUWbboXHk2Av03ofes8xswyDPNXo/s9BvnxumXeGX4PRsSi+2Ruszrb8bDUwYvX/fOT/UGK0HqHmkRr7fi9Hs5f7DRD8zbCOee7/P7RfdwpZafzPjfes9e9D7K/Wi2/vfLz01g5saepVjN4PW73N7vX4Yz+n2rnttn5Fnm8F53DmahX559nJ/o+/ETI5GHAQjbetmn/ZrXFRGcsySUbfvG+1b/TiyPTs/yo6mfBlKQ8r/rPXye1FEPe+xXydiP6Fs235GBIaFYRnGQe5vpJNoGZlueqp1DzXoyXrY780oeF6DOOPjDuIH7LVTtwpuB47AbtkVtb6lZ1+HCS0HVK8/qX2P/1/NPnWzPWr3Hyf5823WkqHZz9FCzzk/yhfK/6/2so0Q4q/Vu8bogNHvSzQ7dKl2z14cwGGhW/jRSAcimSllZ2ablM/q537KNur9bEZbjX7PDHn3EjLuJkflz3r312u7mXI0S6+0dEELg/Ibd47KgbXfZ+0nlBzb7Tb7WQnix//bj/bp/a/VVv5vSl692ie9/jroe+g2ZvC/d7O9PFcjuqr13EH7Yt+RHzVPlH9wrzNII7PpbiE3NfTSUZWzJrVnG72fVlu17mPkvmYtl3SLtKn9vB9h4n7+1u0ao86yEfkadb6VbeD7Qbfv9IJeQsbd3gdvUJR9XOu7Rp7Jf9fskHg3aA08eksJRtuk1hfNjFAq26kFIwNBv+/RLI50nZaj0o1ju91W/Z4R3epl3DHDpvY7Lmpx7FU/tdqlvN+g99G6t/J/anur1YIgCGi32yzio4w06z1L7zlqvIy8r4FyfoDu647KnwlKoeq9DLXv6T1b77lGoHWdUnn1rucFzF+rxtus9g0T3YwredmCIAw1nNkP9Ay3nkx5OYqiiHa7zYwU/a7nzBo1WPv5frr1RZqNEcdu1/b6LDPQrQ9qyZKMMN1DkqSB+qIZ/Pp1CtT+xkcNyNZIktShp3rXG31Or+jGUY+r0hnnowWiKOpyHEfoOeT0vyzLTE/pOCm73d41mKCmx4M4ToNCa0JFdrTZbIKOzZAkdVfEaFCgHz0dOOFZTSDdlFntM1EU0Wq1mMFVdgo1pVE+0ywoZwNGZs1KzmSI+L+T16u8bz/tG4ZR6nUQUJs9Kx0CNaNrhLdZHOleevfVi24oZ2Tk9GjNuLWcIN45NBv9ypFvGw2WahOQftptxoxaa4LE31/v/ZNdAdDhyJEM+e9pTbyU6LUNRtDN3nS7PzmqxKvVarFBk7+P0T5lpA29ohvHbjpHjo7dbker1drDkZ9savXN/YKWvek2UeLlyHMkXe12Lz2HSHn9INy0xtxuE2TiJkkSms0mG++VjquZeqp3n4EjP70aRX42TYLlFZhehpoyaz1TaST3G7wAeAeO+PGRAvqfV2qtew5z5qz3WbfBmnfiaNbFH3xHnddms7EZjN6z1TAqw8U/XzmzVMqT77w81AzEsJwe5TN7kSN9brPZWBTE5XKhXq+j2WxCEAQ2O9PTVTUMw3HVe5be92w2GxwOBwDA6/WiVCqh0WgAQAc/o7NMeqZW+4Yha6WhV86qJUmC2+2GJElwOp3I5XJoNBosisA77d3aZ4Sj2VCz8fyECgBcLhd8Ph8kSYIkSchms0xXieOo7YYSRgZj4mqz2TA1NQW/3w+XywVZlpHJZFCtVlGtVjs4agUDlM9QTh76hZ796PYZQRRFhMNhBINBeDweNBoNpNNplEolVCqVnmWopaf873q625Pzozc76uaB8qCBhDoqdcpCodDhIdK99LxcPXKDRob6nfG2223Y7XY4nU54PB46ORe5XA71ep3x03Nw9N7hMDq4mqOpZ9jb7TZcLhdcLhc8Hg9kWUaz2UQ6nUatVmPO0DCdOD0Y0dNuaLfb8Pl8cLvd8Hg8AIBarcY4iqIIm822R8+0ZnbKn7u1tRcoB0OjcpRlGT6fDx6PB263G4IgoFKpIJlMolqtsn44KjkS1PipOZbKQbPdbiMcDjOOoiiiUCggHo+jWq1qRrt6bZcZMMqRQBxlWUYoFEIwGITX60Wr1YLdbkcikUC1WmXf7deJ2S+OSig/j0QiCIfD8Hg8qNVqkCQJiUQCjUZjIBny7RoVqP0HDhxAJBKB2+1GuVyGIAhMjnxUSAvD5GHEnurZGlEUMT8/j0gkApfLhUKhwJa/iJ8ZMjSq5z05P7zHqWVguzWcBOh0OhEOh+FyudBut9mLoM5Kz+rF0VLzAPnZb6/oJaJE3yMBut1uHDhwAC6XCzabDblcDu12G8VisYNfLzBLsdVmkVrhQ7WBnGTo8XgwOzsLr9cLURSRzWYhyzI7KVhtBqIXMjVzhmlET/XeJ3H0er0dHNPpNFqtFtLp9J5ogd4sb9jObL9ytNlsjKPH44EgCEin02g0Gmg2m5q7avYzKkDP0XMG1Npns9k69NTj8bDIXaVSQaPR6MghMcvB6/ce/XIkGUYiEfh8PlSrVTSbTRSLRTQajT2D5SgHeiNOnVK3aKnE5XIxB69YLKJSqcDpdKJWq7GEWv4ZRmD2u+h3QksRWIpu+Xw+5sTa7XYWReedWK0IkNpzB43+dLOn/HfUnsFHmIljuVyGJEmsn/LLX8rnaEWZlG3QarcaTDvYVPlCtB5Kg8pdd92F48ePIxwOI5FIYG1tDTdu3IAsyywkzd+Pf47W/bVm1IPMetSerfc9Jb/p6WnEYjFcv34djUYDjUYD9Xp95DMNNUdRD/y7JGU9deoU7rjjDoTDYWxubmJtbQ2FQgFOp5NxVD7H6CzPLPSrp3a7HYIg4MSJEzh8+DDC4TDW19fZDEXJUakbar+bjX4mH7wcaSnojjvuwNLSEgKBANbW1pDNZtFut+FwONjSidp9hg013dHix/Pi+QmCgCNHjmBmZgbBYJD1w3q9Drvd3sHPLBn18m6UetKNo/I6p9MJAFheXkYkEsHU1BSuXr2KSqWCer3OIrBG9X4/0M2mKn93uVwQBAHz8/MIhULw+/2IRqMoFossOsIvl4ySm9ZkXS8yJQgCXC4XALDIltvtRjabRSaT6VgRUYu0GOnzWm3rFXr2VO259Hen0wlBEDA1NcXGj3g8jkwmg1wuxyZjWjLUiyhptbPbuG/a2V78Q/mGqSm20+nEoUOHMDs7i1AoxDK9BUHomKUoZ529zM76jfZ0u76bUIjfwYMHWfiy2Wyyf7z33msehRaGHdJWi4A5HA7Mz88jGAzCZrOhWq12zKb5EOaoHT016Okptdtut2NmZgZTU1Msalcul1Gv1zvkqOyw/P/d+GvNovrlowelHNvtNovA0hp8oVBAuVxmctQz2vsFo+9RjZ/dbmc5FNVqFfl8HuVymUW1SE9HCZ6fEV3hDTtFBzweDxwOB8rlMgqFAkqlEltuBzrzv0YhSyVHvh16M3ziSHlbtGxZKpVQLpdZgrfexHQcoHzvSrtDuWfkrObzeeTzeVQqFbY0pLQr4wxl4IFkSMjlcl1laHSc13p+N5jq/KgJRzkrJq+PZp2SJKFarXZ8h8Lxap5ury+kV69XGa3QMxiyLHdsfyZ+NpsNdrsd7XYbkiSxwYS+T148f/9BO+4wIwtK/iQfytoXBKEjxM47Bt3ev1HDbyYnI3rK5/PU63UAYImy1FmJn3JLeC9tIQzTcGvJkX8mRVsrlQoztGoctdq533JUPptvA4A9utdsNgGALQkBYBMt6ovdJhGjdh74NhBoEtVqtVgkslarse/wzg/PU+85ZnPsxfYC2GNTCa1Wi9nRer2uurXfaD8yW4Zq45TeuKHVF5vNJuPGT5YJfL6oUR6DOAhGoOXQavGjZUriqcZPq33d7Ewvujvwbi8tyHLnNlMKowcCAdhsNkSjUaRSKUiShEuXLiGfz6NQKKDRaDDHQanQRiMmaophVND8td1mYcpda3a7HVNTU5AkCVtbW4hGoxAEASsrKygWiygWi+x7yvVNrXYbwbCMMS9DWnN2OBwIBoOM49bWFmRZxtWrV1lkhHcijIT093PGpmyDFsdAIABJkhCNRrG1tYV2u41r166hUqmgUql0tFtttkqfd2sLfW+YM1cjHLe3t9FsNvH222+jVquxZGD+HkaXZMyEkagTz48ccor4SJKEeDyOWCyGZrOJ9fV11Go15tDy9+D/12rLMNAvR8r1kSQJqVSKJf9ub28zjrzjOokc+V1s6XQayWQSjUYD0WiUpRGoRV+NtMdM9OJ08X2ROLpcLjgcDmSzWcYxkUig2Wx2JHQrqyIbiZRovfthTZiVMqQ0Adqhl06nUa/XkclkGD9eR/Vk2e099yLXgZwfNaPPf0aRDwppAbszL6fTia2tLfY7vQx+K7jS++sWzVGS5l/gMDo0KSK/TZaPEuzs7LAE7mw2yxRYWfxQ2V69ZYZePzeLI231ps+UHMvlMnK5nOpSEI9ROT696Cn9nThGo1E0m01UKhUUi8WOmRgvMz1u+xXh0YJSjiQj4khGtlwusy2nWg7csHRNC0aexfPjdZAMbzqdRrPZRKlUYluGte6/3/zU2qAGNY6iKHZwbLVaKJVKexKARxGpUrZ9UDmS80P2hhLyCaPmyKObc9lqtTr6Il9ANZPJMI7KyE+3CbmRyYkZtqibPaWlWKWeSpKEXC7H7CnPj67nAxvD7oumnOqu/IwaTMsGfDEqAMzQCsLurotyucy8YLpGGbZW+99I23oN43Yb0HgoQ5B8aI/40YDC81O2UfnsXhV8mApCy4/KukukwCTDarXKjBa/zb1bO4cZ8VB7ltrvSjmS491qtRjHarXKtvAr168BdePSa0h6mKF44sjPOoFdx484UrSAHHojy3m8TnST4zBkrZShsqhhq9ViO0gp0bkXfoO0yex7qcmQ3n2tVgPQyVHrfY+jA0sgfqSflEYgyzJb8lLKERhsUrGf74MiIyQ74KaNBdDBkSIianJUi2D305ZBoGVPiQ/l8lKKBACWGtGNn9H2al3XLVINDGHZi/fgaImHBox2u83WpWkGRo2jAYUP7WndXwmtpZV+hGt0tkTGkzon8aX1dsot4IXAGyte4EaU2MjykdmQZZktV/LRn0ajwRKcKXGU5KYMsXeDWqfu5fpenqEWIuZnX3yJdao9QUncvF5349hL24cpS34mRhwdDkdHFIgKjFEuE3DznQB7Z4pqkZJe22MmaCZN/Gj3Ey0t12o11Go1Q/zMbJPZ9+I5ulwuiKLIZteNRoPNqCmqpaxgPcjz98sxILnYbDZ4PB6W5Ex9sVgssqKG9H2CchmoV/tjFs9ukXn6X5IktixLy0PVahWlUgn1ep3l4CnzmtSes9/2Rsue8mO33W5HIBCA3W6HJEloNBqoVCosb1LJzwgPXrbdeKi1UYmBt7rzsz7+M1Lg5eVl1km3t7cBYM+AoqYcWl6lXuRD+Tdlm3rlpgXi53a7sby8DIfDAVmWEY1GAYB1Up4f3watCEi3Nu23A0SGdnl5mQ0qiUQCsrxbkLJWq3Uso1C7+DYbaasRZe4XWnrKR3mcTieWlpbg9XohCAJbQsjlcqhWq3s4DtqebrM4s0FOD3GkulPNZhPZbJbtuKC28P8r264Frfe7H+D5+Xw+2O12FAoFNJtNZDIZtlzJt2s/22cGyNlZWlrC1NQUnE4n22SQTqeRz+c7JluAORz3M0JEHBcXFxEMBuF2u9kyXiqVYjo7aRx5GyQIu9W4qdif1+tFPp9HtVpFIpHo2HBA1/BjopFo+ijAy8Nms2FhYQGRSAR+v79jhyW/aWQQ2Zlha3p2frQcD2X42eVywe/3Y3l5GeFwmJHN5XIs8akf71Vv6URpgPtZgjDi+BA/r9eLpaUlRCIRJvRsNsvW4PkIj/LeRgWm5bwNU+npXVKF6sXFRUQiEUiShLfeeguZTAaCICCVShmKtKnJQStsqzW76RXd9JSeQxyXlpZw4MABOBwOXL9+HZlMBrIss6RRtbOgzDJGw4z8CMLueUhutxvz8/OYm5uDy+XC+vo601FKbjZ6xINem/UmRWaDInBUTX1+fh4LCwtwu93Y3NxkOSKlUqnjeAcj6Oa4K5eBhwU+edTlcmFubg6Li4vw+XzY3t5GMplEs9lEoVBgEfZJ5UhF/mZnZ5mTt7OzwxK58/n8wBWO1WywGVHAbtfyuupwODA7O4vl5WUEg0FsbW0xxyefz+vawP2eWKg9V2sCB9w8SiYSieDgwYMIh8PY2tpCPB6H1+tFJpPp6ItGHJhBIux63+nZ+emmKLK8u1Ti9/vZzqCpqSmWBEX5MPRdpdHtNuNU+7vSuehXiY0oFT2H+IXDYUxNTbFQH3m3/D2VRreXgUFriWWYBonuSzvXqHS+LO8u3TUaDVapmtpitGaRMnql9mwzOrdST9UcY0EQ9nCk71IImm9Trw6sUR7DjnpRYTE6V4eMDxlb+q5aGFuvzUaePcxBk+7t9/shiiICgQB8Ph9EUWSDJRVQG4Sfnk3ar6gB8fL5fKwgXrFYRC6XY0myytn0pHH0+/0AALfbzWxpNptFKpViSetanIza1P2OnCjths/nY+kgsiyjVCohmUwikUgglUp15dit7crVDrP46Tk+/Od01BHluOZyOcRiMU1+Rp3VfscEvev6WvbSazD9rVgswuVyYXNzE9FoFPV6Hclkkm2FVmZ1q0GZDEU/K+v/aIXQugmsX8iyjGKxCI/Hw7z2Wq2GRCKBUqnUoez8cgnfDjUeRvmZ2WH13g3JcHt7G6lUCvV6HbFYDPl8vkOGfNSNftfiQj8rD8vkDa1Z/NSeq/y/VCrB5XJhZ2eHHesQjUZRKBT2FIlTDhY8R7XESyVHLQfRjNm1nhxLpRKcTie2t7fZIBKPx5HNZvfsKOF58fdTclRyp+9qHYcxKLSibIIgMH47Ozts+2wikUAul9M8+oDnRz9r8VP2xWHw68axXC7DbrcjGo0yuSWTSeTz+Y6im/1y3A8ZduNdKpVgt9sRi8WQzWbRarWQSqWQz+dZoizxUNob4Ga9Jmo/LzutvrgfHHmZ0pEO0WiULeNRFWfiqLbTmbjQ3/hcRT2Ow4gS6TlAVAGfZNhut5HNZtkSLZ9CwLddGYXj87+Uu+J4foNg4GUvJYhYo9Fg2xPpumKxyIwtf+K31gxDTUGVhJUdXqu9g4bIlKBiVLT0A4CdpEy7uqgAoNogzHPhZ6ZKh0BtR4pZHj3/btSUmapSp9Np1g7KpdA6tV3LidFzatSKWWq11Si0vq8cGNQ4krElOWrpqZKTXvKeUT3tlSN/Pz05SpKETCbDku75BFn+3KBufZEfPPmIppaumsFTyVeLXzabZfwoB0aLH98W/r56TsCw+alxpJ9pS3s2m2WJ+dQXeT1V46iMPvADrNIGjUKG9DvJilIjeDkSR7Wt0FoctSY8w9rlZwTEkc4mEwSBTbTob2pOCz8+8O9LKYtuHPuVndpz1ORIOlgsFtlyOuXcKU9t17ODdC/6XW31RG8cNMJzKMtewM1KqhTpoUiB1kxKWXuDnsMPPhQKVTMQemSVs9hu3IygWq2yDHbipxyMeGdGOYDq8VNLsFW7N3+/ftCNL8mQkn4B9Z0ypJik+PQ7rc/rydBoRKvXGYzW+1F2vFqtxjjyfJQ6xctRiyN9h3ZvKGWlxbFfZ1Y5W9IC7ZBR48i3SengKKNearMw3tgamWj0OxPVu45mzMrKxloyVHNqZFnu4Mfv4NwPflrXUj8hu7Czs6PaDi0ZKvsZX+JAyVHpQIyKYywW68qRxhP+emWZA+qfanI0Y/JoBEobQHpKG0eIj/K98HLk78HXwSN5KfOg1ByfQccN/jot+dN7bTQaSKVSmv2Ff/+8DPmSKrwMiScvw24cjOjo0Co8k+fGJzYBnQZV+b/NZmODEXBzuypFWegE30ajwdYW6SRqXrj88/oZNI0oBg2AVChOjY/SU6UzsGgAdTgcaLVarOS3w+FAvV5HvV6Hx+MBAMYPUJ/p8/c3E7zhgbqNHAAAIABJREFU4KvhKiMn/Lsm5aVdYLQLhxyjarUKh8PB6skQN57jfoPkqHaYrlqHJ45UO0YQbh4uSflexLFWqzFuXq93z72UzxsGeJ3hOSplp5Qp6Sqwe46by+ViciwUCpoc91uO/fADduVYqVTYDiPKo5FlGblcbmz4KTnyhf2AvY610tGhGmNOp5PJsFwuI5PJMHtDHIHdnJR+nfFB0S9HqvnjcrlYLk25XEY6ne7g6Ha7IQhCB0ezuHa7j7J/8RzVruU50lZx0lOqp8ZzJF3lOSqXlYw470Z4dgPZCf7ZyuvIkRMEgY3ztHGB2loqlZBKpWC321Gv11GtVjv01MiKgR5MW/bScj60ZrpKhZYkie2iIsfJ6/XC7XazCIvL5UKtVkOxWOwI89L31TpJr9y0IgVqHLRAbeJDy8TP7XYzp4IOXKzX6yxvoV6vd+zc0OPHP68XJTDq3ClnC/z/So685041c2RZRiAQ6DhUkowRz5HuqawQPchAY8QQqX2mxpWfZdGyisfjYU7P9PQ07HY7qtVqR2fN5/MdlaNFUdQ17GZy5O+vJ0cCX+SRL2xJxjQSibCDM+PxOEsqJjnykxqz5DhMfvSPjoc4cOAAJElCqVTC1tYWG1TJ1vCzTv64gWHqqRGOyu/xu8OouJzf74fdbkckEoEo7h4MSn+nzQv8TjGKJAyDoxFbZYQjDZpUcoQ2LoTDYQiCwDhpcaTP1QbmQTmqfa71HrU4Ek+Hw8GOaqFjhQKBANrtNut7xEXPpqq1sxfeRvVUjTf/Gc+N+DmdTrjdbsbP6/Wi3W6z3X1kN/P5fFd+vXAaeNlLS6ha0RcaGCn8Sl4ihWFnZmYwOzuLcDiMEydOsGJJfr8fHo8HgUAAFy9exIULF7CyssLOlqLBVvnsfrh1+0zJX40fn7RGITviNz8/j3A4jHe84x1MsaempuByuTA1NYVXXnkFr7zyCq5cucKOkOArCg8a8VG7xsg748PLykrc5NBJkoSZmRm2Pf7YsWOw2Wzw+/3w+XxwOp3w+/14+eWXGcdYLMY4qnWafqClp1p/U3LkjSz94yuW0pbjmZkZHD16FKIoMh11OBzw+Xw4f/48Lly4wDjS8pgZMzBlu5XtN3qNkqPdbmdc5+fncejQIczMzODw4cMQRRFerxdOp5Nx5OUYj8c7SvdrtXE/+fHcBGE32koHDy8tLTF+Bw8eZAOp3W6H3W5n/C5cuIDLly8jHo+z/CIz+A3KUamr9Dc6R8lut+PQoUM4cuQI65OCILAIiRZHM2Wodr2R+yk5AujgSBycTieOHj2Kw4cPY3Z2FrOzswDAdFSSJF05qk14zYTauEif80twyuUe4kgRu+PHj+Pw4cM4cOAApqenAexGZMk58nq9OH/+PF555RWsrKywEgi8HPlIV68yVZOH3n14x5nXU/77brcbLpcLbrcbJ0+exOHDhxGJRNjOYpIv8XvppZfYuJ9Kpfboaa8wfbcXQfmS+Z1b/EugCpftdhtOpxOHDh1iL+GOO+5AKBSC0+nE8ePHMTs7C0EQ2M6xnZ0drK+vdwy+fBu1PF4zIAidyWfKEJxylgmAGSPid/jwYQSDQTidTpw8eRIzMzMQhN3jIhKJBKLRKDY2Npj3q1ffwgw+3TjSwK10/MiDB7BHhsvLywgEAnA6nbjzzjsxMzMDYHfXQyKRQCwWw9bWlqoMBzW6/eip8qw2pWNASwjLy8s4fPgwpqenMTc3xyJcd911Fw4cOABgd5cVHaq5vb3N7jPoLoVufNQ+U+qq2i424kg1ZZaXl3Hw4EFMT09jenoaU1NTcLvdezgmEgnE43Fsb293RFjMcmT75UeDCc+PnAKPx4P5+XksLi4iHA4jEAggEAjA7XbjzjvvZPx4PVXyGxb64ciDnDuPx4O5uTnMzs5ienqaTbLUOCaTSVUZ7ifUOPKRGuUEhg49pQlzMBiE1+uF3+/X1FOlHM3qi3zbjHLko2zKCS6BZBkMBhEKhVipA7W+mM/nEYvFEI/HEY1GOyLWBDMmXMolPKWu8BPZbvxoIu3z+djEig4I93g8OHXqFCKRCAAgm80iFouxf4PaGtN3e/HfUzoD/AyF/5w3uMFgEC6XCz6fj5Vud7lcaDQayGaz8Hg87BBNOm/J5XLpbiM2mxv/XaUjwJ/ZQp+Tg+d2u+Hz+djPtBZNyySZTAZer5dt86T8HyP8em27kRknn3RHiqaMFBBH6qQul4uty9rtdlQqFTbbpkNseRlS/g/lI6gZkX46bD96ysuQ5wnczPUhjiQTm82GUqnEdqeUy2WkUinGUZbljvV4pRy1OBttu5YciQ+wN8GX11N+EKDfqS86nU7mcPPF10qlEgRBUO2LahyV7TIbevx4h5N+liSJzZgpmT+Xy7F7UQ0rt9vN+qKaDLUiifvFUdkXqZ+SDPl+R1uq2+02Wq0WisUiZHk3Z5I4asmwWxrDMDkq5Ug7hogjRWNLpRIrokeDLc+RPtfrizzfXmH0GqMclQ47LXO53W7GpdlsqnKsVquo1+uMo9K+9GNv9OyUmn7wukjPVPLjJ5lUqNPpdLJdmpRf2Gq1GL92u60qw36cvL6WvZQPUYuw8AKkpFcizCdCU2huamqKvYB2u410Os0KQlWrVZYM3Wg0sLa2hmQyiWQyCVne3ULvdrs1D9TshZtRqPGjMCp/YBvtYPD5fGxdttlssgFSlmXmIFCy9+rqqi4/vq39eLxa31V+ToZGEATGkZLTSIbUuVqtFqamplCv11l9lVQqBafTyWRI+U2NRgOrq6tIpVJsVwBxNGvW2aueEkcALBGdcgIo6Z6W9Wq1GvL5PGq1GpLJJJN7tVplTi3JMZ1OI5VKAUAHRz059MLRyOe8HMnJpDO+KJmdZEl1SKrVKrLZLCqVCmKxGFtrr9Vq8Hq9rH7O9evXkc1mWUkLqn+1n9EDnp/b7YYsy8xxowM/eV2VJAmVSgXpdBrFYhHRaJTxo80U9Xod7XZ7LPjxHCkPCwAbKGiXG9kbOmSYolaFQoFFO6gv+nw+xnF1dVWTI2Bu5G5QjsDNc/fsdjuKxSJisRhyuRw2NzdZpIH0lDiura3pciSe/aZM9MORcltcLhfjyO+crdVqzLZQXSDiSJuAPB4Pq59DcqTq9IVCoSNJfxBeynvw0R8lP3LofD4fZFlmNpE/z5P4UUmY7e1tZLPZPTLk+yLJMJPJABhcT03Z7aXmZdGASGuzbrebkaetqbIsM6NEyUybm5tM8X0+H8rlcscMml4inQ1G67rUDuUAZ+ZMRXl/4udwOJiQqD4MKTENijRwbm9vs0RZj8eDarW6hx85ecRP6fTQffmoT7+KrTUjIMVuNpsdeSwUjSLHgNrBJ/hGo1HY7Xa2HFatVlEoFPZwpO3gpB9q7egnsmWEK8+R5Oh0OuHz+ZhxpfPZ1Dju7Ox0HKJJHAF05ajliA3C0YgcaTmAHHGa6dP1xLFYLEKSJMRiMeYQUgQhl8tBFEXWD6nAHu3WGJYcjfCjUPnU1BSq1Sr7x28XJn7VahWxWAx2ux3ArgzL5TKL+jQaDSZH6s9G+Q2zL3q9Xvh8PsaxXC6zEgZKjpIkIR6PMxmS06fkSI7ssGWo5KnF0efzwefzIRgMdnDkIwWUxEwcqS/yHGkiSn1RyZF3dtRsai/Qi6bwOsFzpCRm2sRTqVQ6ks4bjQbK5TLq9ToSiQScTidbJiJd5TnSO2q322zcUHJUyqBX6N2DOFJfnJqaQjgcZnaTzi0Dbm7+KJfLsNlsSCQSbIJPuxSpmK5Shr2MGXp90bSzvZQvAAALvYZCIbjdbhSLRXa0Bb0EfsYCgCk6zbBrtRoKhQKbodIp1FNTU+xARlpfVBJVhv174dZNkcm7pSM8fD4fisVih0Ly96EiZaVSCeVyuWN7Il/9slQqoVqtwu/3o1KpqPIjQfdbs0IrdKl8hzRLttvtCIfD8Pv9KJVKyOfzLGrAGy++gBef+E2GijiSjHmOFNpVk6HSMPXDUU1P6TPiGIlEMDU1xQYHml3xUBYpo+UuGnBooKQDGYmjLMu6ejpMOSo5BgIB5oySo8eDdJWXI7A7qNIBhUqONADTTFUr1D6IDLvxczgcmJmZQTAYZMc+kJPH34vsAe0AIhnSyeFki4bNrx+OdrsdBw4cQDAYZFHyZrPJdIxAekr2hAZCsjf7zVFt8NWyNw6HA3NzcwiHw2xiSLrHX0ccy+UyiyKocaSDppUceXuj5ST0wlH5s1ZfV3KkMYN2wfLXkjNDctLSVeJI4yJvb/hn8/a6l3wnLT2le6nxW1hYwIEDB5ie0vIrL0Ma38gxIltDYwavpyRDOkVBS0/12sxj4N1eWnvtKXQ1NTWFhYUFzM3NsQReGvAoBEgDKdWIAW4mCtNLk+WbRRIpR4bIq1U2VSY69sJN+TP/gnnHg/gtLy9jbm4OsVgM6+vriMfjHfwAdHjn5DjxYWjiJwi7OVBUIZvuo2wTv8VcLfxolKcWiCN58UtLS5ifn0c0GmXLkzSTIiVW40jLCcqZmxZHejbPkX7uh6NRPQ0EAlhaWsLi4iK2t7dRr9dZeXalHGnGzOce8MUg6TlU0JH0lDc29HM/eqrkqAcygs1mE8FgEMvLy1hcXMTm5iar98LLEbjp9BFHXlf52Smwu0TI7yrR6otqPw+L38GDB3Hjxg2Wf9WNH+WOUO0UvkAlz0/P1vTDrxeOpGPhcBiHDx/GwYMHsba2hmKxqHpmEq+n5ODRkjUf1VTjqMxLMYujHldejuFwGEeOHMGRI0ewurqKYrGIRCLB5MinFJAsKeeQXyaj5RJBEDQ5Ap19UdnmXjjyzoBaJAK4WbMnEong6NGjuOOOO5gcadcrL0eKPJKjR/l4ZG/J5uhx1Gpbv/ZU6zqKyNTrdbbT9/jx41hbW2PHsPDjOAUJ+OrWtLuNZEu+AmBcT/kxWg89OT9qzoGWd0WklpaWcPToUczNzbE6NqlUip0BwifSkkLza3h8TgZ1DvJoyWDxZJVOSj8OkF6UgH4nfouLizh06BAWFhbgcDjYTi0lP7oHtV+LX6PRYPz4RDiz+PXCm5bwFhcXcfDgQSwsLLCIQDweZ4MHLR0AYDNRvu18Mi1FgkjeSo70PT5RbxBu3fSUOtnCwgJzDCgBNhaLsVkzr6dKjjST4fWWL8ffi54OCi05tlotLCwsYGlpCQsLC2g0GshkMojH44yjUo5UWI3nzXPsJsdhcFTjRzKk3Vtzc3NsB9POzk5XfspJDXFR8lPq5Cj64tzcHPtHuwl3dnaYk0MceT0lfSMOZnAcFHocZ2dnMTc3x6IGoVAIW1tbbIJM9lPZF4GbFfPJ1pBN1eOoLD8xqL1RRsf4+xHHSCSCmZkZhMNhFAoFVuOGd+T4+xFHcoDo6CS+zg9xJHvD91mlHJXt6oWfHshZi0QiLJJOh5uTY6SUIc+PJsYAOvjyaRZaekowKsOeXHgSKi9crYcIgsAKbM3NzeHYsWOYmZnB9PQ0AoFAR8dSKgu9FBp0SDkpBKj0+HgvUCuBrVsIjOeoxUfp/VKi9uzsLI4cOYJIJMK2zfLf1Yo48Pxo1kxc+Gv4z7QiGEb5dfuuUp4kwwMHDuDQoUOYnp5mJ9kT+PwmPY40c1bTI+VMRas9vXA0oqc8R9qaHwqFEAqF4Pf798hRadTIAPEOQjc91dLJXrn2IkebbbegXzgcZuUHaLmWvsvLkdd3MkBUn4vX1W5yHMTQ6kFp7Gw2GzweD4LBIBYXFzE1NYVAIMB2HmrxI7kRRy1+/LO68eulLxrlCIBxDAQCmJ+fh9frZVueu3HkZUjRzkE4mgU1ji6XC16vF7Ozs3C5XKx+FrWDj9rwHImflhx58Bzp90E4Kp0nrfuQrtKO0UgkwpKD+SrjyjGAfz88TzU58vaFv88geql2rRpH4ke5uMFgEABYUVhqn5qe8lFYkqERPaV7qI0fepz7Xvbib65myP1+P8rlMsrlMktyPnToEKtFkEwm2VIH78XxpCjMCYCFumw2G0uy5D1EIq81SzGq1EYcJUEQGL9SqcQS0xYWFpDJZBCNRtnSlxo/fpmH2kwzUPL+aS0U6Azlkcffq1OnbH837kqOtI4+OzuLVCqFcDiMRCKxhyMA5uSQTOgfXySQQqAkV37LNd9G+qxXo9SrnhYKBZTLZWSzWebghUIhVY7UCUkWfKIhyVFPT0m/eTmawZH/nOdJHPP5PPL5POLxOILBIPunds4QtZE4khGiwmLEh5cj6SwvRz5SZKZToNTTUqnEdvNsbm4yxyAYDHacM6TFj99eq8aPeCj1dBj81Dj6fD5W7j8Wi7G8CnKG9DjSrJmiDs1msyNCRJsrhsVR6zplpIw4xuNxbG1tMdtImy2obwGdu20pbQLYjQIST5IjceRtkJ69McOmat3D6/WiXC4jGo1ibW2t47gjr9fLliaVTowsy2yZi6IrehzJBlFblPZGOT72yo//nOdKMtza2oLX62U5W+12mxUQVePHy5A21RA/co7U9JTnR4EBo+P+wLu9lApMD6clkZ2dHVy+fBnJZBKRSASZTIY1Wlm4ig/P8gMCHYlQqVRYTSB+/ZdfSqEXwIc5e1VmpXIoB04a8IjfysoKEokEIpEI20pJnizPg/dwgZszGFoO5Gvi8NdRLhTxo3fFK0IvHPU6OK8sShnG43GEw2HGkdpBM0leF6jj8RzJSSQZksOg5Ej3MUOGSl7d9DQWiyEUCiGVSnXMmokjfw9l+JWOX9GTI4V96Rq6hxkclX8j8BxdLhei0SiCwSCSyST7LrVJKUc+mgXsrrvTcTNKOZIuaMlRK2ppBj+bzYadnR04HA5sb28zx7UbP/4zGmh5W8NHnbvpKf0/bI6SJGFra4tNQIxwpOWvdrttmCNdq8axVxiZsPEcRVHExsYGQv+/vfdqjhxJsoUPUmtqluoesWO7OzbP9///g+9hZ++o6u7qrioWZSqmYiaZ4nvgnOCBM4AEkKy+09NwMxqTIBKIE+7h4SoiDg6cI5kEozohi8XCLYlPizErPv2bpPMG01pXV1cAgE+fPuH4+NjtAB+nb2gAkI9cFESMVqeyT4IgvDt9Fn2TRk65UnS1WuGnn37C69evcXNzE2oLecjv8xlM0Ss+PtPWFVKf6nyji0q2jcWdjB+fMFuri2v4N5sNer0eJpOJ21BMhcI3OPg3LflWq+UYrdEhLlXWJeasndFTxLPg831Wenh4cCct93o9jMdjh08x+PqNQqr4uOkhrXhirNfroUp5zQ3b86LS4NpGbN/FxQVWqxX6/T7G47FblWAxxvHw4eHB7RFDjIwSESNxAwhh1ILorBjj5PTh4QGXl5dYr9dOTofDYaynRP7xN5e2EyMNBY1+kY9Rcrorxijy8XEymWA0GoWeZflIo0ZllXvhECP55eOjldU0fEyL7/7+HldXV1gulxgOh5hMJm5Vm97nw0eFSo/TJ6c6Fl8CX1qMwKO+4REUo9HI6dMkGBkB4lYVcWPR6tNdMKbFy33QVqsVptOp0ze+sWgxcgyRj9yCJC0fs45FHUdxWJfLJfr9vlsMwuizHW9KvEZZXa1WIVmlYUOMqm80esQ6za8xFskLjsH1eo3z83PHxyg5VXxWn1DXxOFTHhYKBbdXW2xb0wy+Uqm0OTg42NopynjuTaDLnnmQp4bs+Fs9i3q9jru7O2fZaaiMAsBqf173hTULhQKGwyGWy2VsnK9cLofwWWx20AFwB7HyvavVynlVmtJhv6hXUavVHD5atSQ72fBvTavYNvV6vf9vs9n8n20YDw8PvfiiMPKkXYuRWxJYjDpok/JQMcbx8OHhYWusVjEqLktWTgE8w6gbjWlYVflYr9cxm81CGPl8K6fEEoXxn8uzE2HkGT9p+Ag8RVzZ71T6bDN/+/jI7yfBSFnNgjELvkql8iw6QHy6LDYtPo5L9heLSaPwAcDNzU2isZgFI/CkM5nW4spJJR9GyqnKsm8s+jCyHVnl1IfTh1GLfXUy1cJmi5FjOS1G1tC8JEYfTntdC+3Zdl3VZQ095WWtVgthJGm0yGJUPlIHpxmLPGIiiZxuNk+b4SofaMzYyJqOSYtPx6Li0/0CFR+foWUj19fX3rGYebWX7QjrSfP/DEvpRMk6CF9xsgIF4CxUrbuw+U0NubNTKpWKm2Sy1FP4sPuewTylxUdjz5JOnjRmiI8WreJj0RdDoVQCxKcrw9KQ9VQsRsVKHtIQoNXNNvnwAXBt0u8ATzzkYNfVCcTLugTyMK2H7MNqcfnklIYAeaHFzFZ56HXykf/jd1WhJZHTXTFa/vn4qBgpOz4+8vu6upJyCjztJaN8JEZet3zMijEJPl0RosaBrkTT56mcEp8u4ycmKlkrp1H4rBHyNTEqD3362WJUj58GAVcPkZ+afnlJHvrIh9HWNRJLHEb+X+WUz1U9yjoSTZ1wLH5tfcO/OUfwXnX41Ynk3/o86hGfs6zzg8qqXifGLGn2pHMG5Yr36vc0Xay8I+4ofDomaQQrD2ns0QFKMi9mPt5CvWYLxHaSWu8caD4vjPcz5MhaGA3zMV+ouU2dWBgCo0Wv+fys+CwO66nwuioPHz6N5gRB4OpgFB8nIj6PVe8+q5b4tM+SYtR2+7wx+9nykCFkvVcHKpUp92ZQi52GoY+H7HfFyEGQZrBapaG8tHKq1/kuDbFGvZsh5Dg+av5aDSiOgaxyCjzJKvtSjTLbZstHYlQl5es74iIftQCak7watupt+/iYZSwSH9uZFJ/ve9ZosHKq+NQI1slpGz5ucPq1MVJ+smAk39QItga96htdQZVWTlWe7DiMw8g2aRrIOi+Kcb1eu3SH5aMdz76xqCuo0lISfROFkUaKGmxKOha5apNRFOBJp1qdpvMi9ZCdN5KSnTO0XfxsZULHSFQqj9/l3OLDx3bbzEGcPqWMx82LqYwf65lo6kUbZe/TztKwlxXk9XrtctAAQumSQqHgzh0aDAah88J0LxKGwNQLTTNQdVCqdawD0YfPCjL/bwcwQ/DEx/cUCo+7P7daLYeP7ddQty4F1Pf6okxJMGqo1/7PKnmSDyM/W4zcPZhem/JQQ7EWY7lcDoVOtV1JMbJNVslGKXCVUw3ZKjEqp2kG8pGGOfk4HA4TYVTllxajldUkfFS5jeOjriYhH4mRheuWj0ytKUZOSFn4GIfPRuGISXHvgo9jkfiq1eozfDwglfi0T3fFqPrLh9GO/SQYN5uN06+j0SiEkbh8PLQYX1JOfTrV/q19qgaMOp0+jD4+WoycM2gc6gSfBaN1nKPmRfu3Ne7YPkamyEduBKz6x47FSqUSq2/SYrRzhk8efYar4vKNC+LTfX6IT+WUh9ZG6dMoHsaNxRc51d1OjiSrfAF4B04QBM5y08lYBWi5XKJWq2G1WqFarTqFqgAZUVErn+9OOlh9+PRZ2j4VWv2+DzetWIb9tuGjkKoHqtXy6sVECVYcRlUgvKYes5JVsjYcSYw+HqpBsdls3O6kFqPloWJMa9zxffytE4iV0yiebpNTXamokxUx0tP2yanyUVNlaSYUH0Z+9hnqbL/FqM/y8dEav8rHOIzKR8WYho8+Hlh8PvIZfXYsfi18acdiHEb7OS1GAF6MbG8URuqbOIxpSDFquy2uOEMojU7lxLder0MYtcTA6lSm+Oy7k5Kd7HktSt8kxRinU1XfMOpFPup3VN8oxqxzxjYeqmHu0zUkO+/HySnxKQ8Vn6aiVT/EYUxV8BwEwQ2Aj4m/8K9Fv91sNidxN/zC8QH//hi34gNyjL8A+neXU+DfH2Mup/+kf3eMv3B8QATGVMZPTjnllFNOOeWU0y+d0u/mlFNOOeWUU0455fQLptz4ySmnnHLKKaecflWUGz855ZRTTjnllNOvinLjJ6eccsopp5xy+lVRbvzklFNOOeWUU06/KsqNn5xyyimnnHLK6VdFufGTU0455ZRTTjn9qig3fnLKKaeccsopp18VpTreolAobLKeWPz/mv55jELsnuW/ZHwA8PDw0N22W+cvGeM/Dy/cuu98jvFfm5Jg/CXjA/KxSMox/mvTv+pY9J29mJWixmIqRKVSCW/fvvWe6WEPa7Nkzy6JOnB021knepYN7496jl6/vLzMjG9bW5J+jmqbxe2jJM///Pnz1i3IS6US3rx5k4qHSfvYnueyrR9898S15+rqahu8RBjjBlXSs4uyyMA2eQd+foxxPHrJsZgW4y8ZHwCcnZ3tPBbTyGmStm27/2vKKfD8fKddxso2veJrc9x3fZ+TzBnE+LXnjaR8tLLqm3f1vl3mRd/7LcXh0O/vwsNt90fNi6nNOd9x9r6/o+7Rw8sUhDLLR7zuEzD9va0zt5E94FCZq79te+07o4QkSnjT9p9PyLNgjHtfFA79nw93Eh762r6tD9PQS8kpKauc+jDq814Ko08hxE3avrYpVo7RKIM1KcavMRajFGhafC/BQx++rGMxiodRsuKTU7YrqV74ueXUvttiSWIg6d9ROlWfb9/jmz/087YJ3Uc8ZDVOL8fNG0lk0qeTksiqXk97Wj3ppcaiUpxs+eaUtHLqkyelTDU/elKqT+j0cxA8npjLz/zR5+i1IAhSPd++K+r5aSjJ82x7fff4TpSNwpf1Opmd9tTzpO/iD3mo9wN4xlt+9t2vfaX3sz16n21Plgk0Tl7soImSU9vP+owojBabYtR7fNezYrTP9F23/RnFd21XFN+j2vpzjcWXwJeFh0nwpR2LUTyMGhNRYy4Oo31O2vbsOhaT6Bt9p+XLS84ZcTqVp72npST6O27eSKrvtR+2tTOqH36usRg3N/ietY2HWca6ypGl1JGfIPBbUzqhAGFLXT3IbVY377fX+Xxfp1jr2fecNPiStNHi87Uxyqvw3e/7btx13+c0GLUN+tteJ5GHvuu+/rH3b8MS9d6sHqflo70e1YYo3sVhtxgVu+/5VoZ0OGi1AAAgAElEQVSzemNZMKaVpTiMqoDjxmJWjD8XPt/kmkbXfC0e2rEVJae+tilZHnISjZLTbc/JgjEqouMb97ZtUc/RdvI6+cffcX1l32vvz4IxjRzGzRu+Nti+4sS+jY9RfZUWX1yULIk+jcJt8SXl4bbItP1sKVPaS1/Ih2uHblNa+t2sk50vtJb2GT6yguF7VlRYT/8XFwaMI3tvln7dRlGTPP/eJui+7/kwRPE27hn2vVGKYBtF8TFpf9p22sEXhzGun3z44try0hijHAWfsZ0Fow9vVoz/L/DFjeWk+F4CY5J+S9Of1pAgzqQpqV3l1DdnaDuiDO5t1+1nK6uWJ0nnjbQ6Z7VaeWXL1xZ9n08PR8lwHMY4PibV23EUZTBt06dRfPcZM8Rm8RUKBVcmE4XBN+62jcVMkR/9rQAUvPUKt123FiSv+a772qTPiZvck+LztVexWty2LXFesRLvi7JgfWHhuAk7DUbfd6J4koYPVhaivhOnxHZRtlHv1P7cJo/6DJ/8xmGM6qc4XD8XRp9cbsNo32fHWFSbXmos8rs6trLgI/nw+ZSuj+zzffiyjEWlrPrUPtcnh4rTRu/sd+MwpqFt+kbJp/PsdZ9Osm3mhGmfofh31TFKts1Wr/tkY5us+uYZfubqq/V6HclH+6woYysJRfVZ1FiMmvOi5j/7v0ql4rBFRX7ijK0kY3Gn9WtxDUnymd+L6xxfR9oJSq9vm6CTkj7PXrcU1dG+QZ8GX9y7diF9h+2vbXyLUpSK1ZdTVoVr+9bKUJI+T0NpMfrIyqkPI6/RC/R5Rf+KGFWRaTstRjsxWYzb2vQS41Kfve2z770Wn50Mi8ViSOHGRYKj8GUdo3G6a9tEvQ2jUqlU2hljGorql7RGhzVUdcJVg6dQKLhrDw8PzzDavnwJnWoxxcnktnlMMZJHxERjYbPZOAOI+oZF19pX9v27Yt3Gs6RjUfEBjzwslUooFotYrVZYr9colUpYLpfP8CUZZ0nuSW38RCnZKNCWqfr/KMvNKlnmNpfL5TMjRwUkCmwahidRQFHX4j77LF4ShZgD1Rpx/BwVPk47gH2ewjajMcnkmgajhrDjsNnPSSmuvfY+tiOJnLK9PsOWGH282yanWZRwEoxJx6Lv+z7HpFQqoVAoYLFYPMOon3140mJMw8O0RoMad1rkWi6XAQCLxQKFQgHL5fIZvqj2vBQPt43DuL+VyD9OmiSNGhB7Gh6mJeqbOMr6Hho5xWIRpVLJYeCcwbkkbs7wvXdXPkbdk+R/dswpDyuVSkhuV6tVaH5QjNvet+tYTGIIW91p8REHf8rlMur1ujPuHh4eMJvNQm22PLTXFduLGj+2WM6Sr1N8f1vLT+/Rgie1CGkFLpfL0HeiQmdJGeRrW5bv2vdaxut19mOpVHKDlpYu8Vk8Pmz6zKTk8wZVMKPabPsk6p28XwW6WCyiUCjg/v4e9/f3eHh4CN37kjwkxqjvK8ZtaSy9Zp9FTKVSCdVq1eGYz+fPMFpcPxdGnyKNwsgfjfhQPiuVCsrlsru3XC5jsVg4jBbTS2CM0zVxSjWOh/QoibNSqaBWqzmcwKOTVS6XMZ/PE/NM/84yFi1G5Z/9bd9nFfx6vUa5XHZYm80m6vW6w0fjtVQqxWKMwpLWMFB945NJHxZ7j41OrddrVCoVrFYrNBoNtFotNJtNx9vNZoPpdIpisRjCGDUW45ygJJRFR9r+sAaQYmy322i322i1Wo63q9UKo9EIxWIRd3d3W1NfpJcci1Fzkv7PzvWqtyqVCpbLJfb29rC3t4d2u+2ir/f39xgMBgDwTE7tu32fVZf5KFPNjwWp4KPI1+Ea0rMAdKAWCgXu0Bxqhz7H975tbfJR3GQXRVZoldnW61KDzYbdfSsirBVrhS0L+Xjnu+Z7j68NdskmP9M4INF49aVTXjJ6p3i2yamVEx/PN5tNaHmmjUpS4QZB4EK2dtBFeVpZeZgGY9w17XMfRvJXFSvHYlqML8HDqIki6h1Wp/B5GiUoFAqhSAEdkGKxiPv7ey8+36SVBWeUcRNHPn3DtpXL5dBn4iRG4nt4eHCRH8UYlcZ8CTlN8twog1JljQYBjTxNCfFd5KGVUx+Poj7vijGNPlO9CSBk4FQqFadLda5YLBa4v7/Hcrn0RvKtnPiup8UXNRbjMBKbGqaU0/V67ZwPnStXqxXm8zkeHh4cPiUtgNa26HvtZ0uZ0l4WuO8lvgnTZxGq0qEC5kClBaiTKZWxDlJV1FHeURp8Ud6O9Q5831FrU5WqtpXRAubegafQMI0m3xJjxWfbmWb5YtxEEmcYaDtsdE6JvKNnTWzM3fIeHw997dS27oIxjRKmLG42Gzcw2W7+Vl4SIycSyrZi5nWfnP4cGPUeEpWmKh9i5TXK6nK5dL9tYeO2d2Udi4rPd90aJHyPT2eoXCo+jjvi4m9OnHYc+Lx8viPt5JKkX6xxx3cpRuIhRraxUqm4Z2jU/P7+PmRYxOkz+3kXnbrNYPe9ixMngFAUmdhrtRqAJx24WCyw2WxCRp1vcvSNm7TYlFQu7HV9r7ZD65WIsVKpONmkHqrX627C1+j5fD53z6JOjdKbPj2UlLbpqCh7QB18puyoTziPl0olh49G7Xw+x2q1wmKxCPXVNnx2jMbNi5kKnpMw2Nco+z3m1zudjgNGa48NpwVPa943eZCsIsyKzYfTvksxWXycCOmVHB8fO3yTyQT39/eh0B7bG4dPlzL6sKUZtHH3RimJKIwU5Hfv3rnro9EI8/ncTZRxGC355CqLUooyYOMw2ndy8qtUKqhUKvjtb3/rBudwOMTd3R2CIHDKyIfRV8v0EjzMghF4vhqD3j9TI7/5zW+ctzUYDBzG2Wzmxah1BvZ9Ly2ndrzxHp+ckpg62NvbQ6PRwLfffov5fI7lcol+v4/pdIrlcvkMH6PMVtnG4XsJOd32DJ/xsdls0Gw2cXBwgE6ng1evXuHu7g739/e4ubnB3d0d7u7usFgsQpNJWoxfcyza51p5Wq/XODw8xNHRETqdDo6OjjCdTh3GyWSC8XiM+XweMgh052WtEYnCuAvZNkfNGfxs9ftms8HJyQlOT0/R6XTQbrcxmUwwn8/R7XYxm83Q7/cdRn4nCUbbprSYs8ipvodte/fuHU5OTtDpdFCtVh2+Xq+HyWSC2Wzm8LGeSfHp86IMODs3R1HmtJcCs5+jvqcNKZfLLld7dHSEh4cHTCYT3N3dOe8linF8XtS7d1G6ii9NNMU+o1wuo91uY29vD69evcL9/T1GoxEmk4kLRWs7oyIEPkxRAyst+YzFKD76jBJOKnt7e/jmm29wf3+P4XCI0WjkLHutX9K05bbnv5RCsoZGmucGQYBqtYqjoyPs7+/j7du3uL+/R7/fd54W8FjEzecqxqgB+tJK12dMJeVjoVBAvV7H8fExDg8P8fr1a8znc4dRDUEqVruVfxyml8AYJadxfQs8Rd+azSZOTk5wfHyMk5MT3N3dodvthib/fx7w6P5O2n8vYeBZgzhOofuul0oltFotnJ6e4vj4GAcHB5hOp7i5uQnVSTLVRR7ugjENpZkzfLqNDkin08Hr169xeHiIRqOBSqWCbreL+/t750jasahyEtUO+95dDLw0smoxlstlHBwc4O3bt9jb23M69OHhwaW3+Jmya3Wqb97IavD4KIucchxWq1Xs7e3hzZs36HQ6oUgdHZKHhwcsFgs3b/jmjKj3R9UhR9FOaa+kg4f38zcNn/39fezv7+Pdu3eYTqdoNBq4v793BtBqtQrlCZO2zb43TYgvyjKPaoO9X/EdHh5if38fb968wXg8Rrlcxmw2c4V4m83G4dNUlx18/NtXF8PwbxZDLeug4EBttVpu0jw+PsZ0OsV6vcbt7S0mkwmCIFxrwHfG8TOOh1nIF5GMw2v52Ol0HL79/X1noLPglykGTeNFRe/ilOpLYkxyP3lDPhJjq9UCABei1poJTdPaqE+SMZoVo01zR/Wj8pjprGq1ina7jf39fRwcHKBarbqVXPa5viXgOva2EfFlSUHbtuj/fBO1pkvK5TKazSY6nQ729/dDqXY+NwkPo/SP0rZVW3EY48aivYfvonFXqVRcUXOn03FGHdusKcq4sRgno8qLrGMxqaySyEMumqBR12g0cHd353DSAeEzS6XSM0crqm7SJz9xxz6kwafP9/WtldNareaM2dFo5AxWOiDEYcsi4vBFtXPbfZn3+bHCZD8rqeXNqMc333yD169f482bN2i32xgOh7i5ucF0OnXpEoYvyWRfzk/bYykqUpQUX9zzeI81rqrVKgDgd7/7HV69eoU3b96gWq2iXC67qn0AoXSeFWL+jrLg2Rbt1zRKyWeZR/HRYgyCAPV6HYVCAb///e9xcnKCN2/eOG9S87eLxcLlcS2WbYZkFu8rKUbfdcvHRqOBIAjw+9//HoeHh3jz5k2ohqBarYa8FOsR+T7HteGlMPL6Nj4CQK1WQ7lcxn/8x39gf38fr1+/xng8dikSFnM/PDyEJpo4jFGUNTLpw+l7dtRYrFar+M1vfoO9vT0cHR2h3+9jNBphPB47o4AhdjsO9ce2y/Z1Vl2TFiMQXppfq9VQq9Xw7bffotVqodVq4fLyEr1eDzc3N05mGS1IgtGOP58OyEI+I8j3TPuucrmMarWKV69euRT0xcWFwzibzbBarZycWl2jRp2vP/WavS8ttm16R//WGjGupjw6OnLG0MXFBbrdLnq9njOEojBu0zf2vVpjmhRflGEfx0MS65j29/ddO87OznBzc4PBYOD0aNScmAaf1QdRlOlgU9sIe13JNoTCTGt+vV6j3+/j9vbW5d9pGaq3EudRRQlrlPJKiy9Oyfs6ulqtuojHYrFAr9fDcDjEeDx2tRKsd9psnq+m8Rk+vnfvkpaLem5cX+p3K5WKwzydTtHtdh0PacnryhMaaFYR+ZSG7Ycs/NNnJX2GxVir1Vx7b29v0ev1MBqNHMbN5mmjMR38UfVZcWNmF5y+SSzO0ONnOiOMdgwGAwwGA4zHY8xmMxd61uikYvRFfOIMhp9rLOrf9JAtPk4mLLgk2fOSfG3R3y+BLwqPfY+NLGnUgJNiv993uoapkn8FjPwende48W//ZnSV9aA0YFk/SflVjEnlNA5fVoM96rnaNh8fC4WCk8tut+t0jRquFqPW8EUZBrY/fcZgGmxRvIrDTXzUnay30xVd5KF+J+r5cVG8pHKaOe0VBVLv8SlbKiQAmE6nqNVqmE6nTpBZ7FwqlUKTDylq8Oi7rQVqVzPEka9AVTFGCZVGXzihcA+G6XTqmM5oFnOgfKZavNsY63tvXD3NNoz6bH2uDTPqckvW8wyHQ2w2G5fO0/1tGMrdbDahVTTafvvZR1nC0FYOkmDku/g+pl6HwyGWyyWm06kzCuhREyNTKfb9vgna17600bskGH0TgGIE4JY6DwYD3N/fO8NAVwZRVolZV2hGkR37WTBafFHvi8NXKDzuadPv99FoNBy+zeapaLtUKjlnRNtoI11R+PRa2rFIjD69EqVniEv7hjUT9XrdFZHqM4iR3/HxcJvxpRjTkI6xuHFs+W1XdREj5wxdGEMj0MqtPtdijRqL7Ns0xoGmFu07rbOp/aArgmn83N/fo16vO0NI28K5RWsprQGk77L9beeNpBhVTqPIFwGy+LiwoFarOV2qfUHjh/h0vrD4fLj4mTogbixmPtvLNwlrIxW8GgXVahWlUgn9fh/dbhfn5+cYjUYuWsCIUBAEaDQaruBLC9l8lrRVIkmUZhS+uEnLYlV8DMkWi0X0+33c3Nzg7OwMk8kktFMs7yPOxWIRCknbd28T0rQRoCiMFqcOKhqsTOGVSiX0ej23/JlRu9Vq5TY2ZJ8EQRDaDC9KdpR24WEURt/7FCMHaa1Wc2HaXq+Hh4cHN3DpoXCgMpdNAzZusL2knFocWTCSl/1+300kihGAW83HlYv00qImEpXVqDGTBl+cnPr6kvhYO1GpVJycFotF3N7ehsYi8XGvEaYW7KTl6+9d8SnGqNSMksXYbDZdHUW/33cTCY11lVPgkedMLejkGYfRh3eXsRj13ShHoNFoPJPTQqGA4XDooj5BELgoMx1mdVB8GFVO7QrItPj4DN+7LEa9Tj7W63VUq1Unq8TU7/dD+/jQ8OEc6uOjbVOckZdm3kjCQ97H34qPOnU4HDoH/+bmxukT3k8bgPp023uUfLZIHO0U+fFFEKzConGgg3Czedx9c7VaYTKZuJCtLXLivUEQhIwDHzPjAKcVZjLY553zdxy+9Xrt8G02G2fY6b06YG3qJG7S9lm8cQoriqJCn75J2e5pw0HFiXKz2bi9NViPwO/rbro+zycJb7IooywYGV7n4GMdAScShp/t/lPExt8qw/qutF52EvJ5QlZm+T/dRl5TyYz0sE6LhgL/T9lVHFEGTpSs+vogCcUZBT7Dx+4qTr4RBycM9Si1bVnlNKuMWow+2dRr9KRpkHJCjMKoukz3A6JO+rkwJtWp5AGNUu5dBAB3d3cOo93cT/Wx8nfbWPRFE7LIqX5ff8c5zbp7ukZ0mN1QPmpqj8/hqQcWo45X2zb9O+28ESWnykNfSpYBgfX6af8ePs+3ylnnDsq1/j+qfy2+bbTT2V7bBo1uGc9O4GcKLD0tTadQONST1lNe4zzLJO1Kgs83KLbhY7SDgkyDjQJMxcWJ03rYtHStcPn63LYvDda4e+3/ojCqpU5FpCufqGx18tSzaOJ4GGUEpqG0GNXwoYFHpctBTT6qV6Men3pnNIKiogU+Pr4URl/fKka2U9MKNH7ohamRriuEtEbIGnpRGJO0OQ0+H+nmqMSnxoIaBkA4HK8rXyjjPnw+w0TbmsQzjsIY9V2dDFSH0ABSx0R5qBjJQ+0rpoi2YbTt20Vek3xXDdhisehqP/lDnanOpA+jbsyZRE53xbbtezYiojykoU45JEauYFN51qiIpvl+rrGocup7pjXQ1fhRfnBu1F3lgfC5c+pMKr64d9s2x0W3Mqe9fNesl0lLdX9/321qxBUy/X7feZkUZp1g7Dp/tYAVnA03RlmCSa1cOwHb75LxNOCIr91uu6X63HhLCw6JjcxVQ4c4dUfgKE9e26G/01rxPiWnn6MwtlotLJdLLBYLXF1duTw8MalAK+/4XGJUHEl5mJWSYCyXy+5smXa7jdVqhfv7e1xeXrqiPCok5aNvGSrl1OfRxHmXu3jWPoz8mwYLl7Xv7e3h4ODgGR8ZgQXgMKoCUj4qxqQy+FL47DOJj8uh9/b2cHh46CJZ5CHHI/B0xlehUAiNU/aBxRc3ifD6S41F+wx1JBTjycmJw/jlyxfM53O3whJ4XOHFCYi4mA7i5GINH/tuXwQqK22L+tBpopzu7+/j1atXLmJwfn7u+GgP9FRDQFdFJcHoa8suOshnOJI4nriNBldZko+fP392fFTdogar7tuUBqNSFoxRcmqNF+qavb29Z/jOzs6e4eNzdHECMSaV0yiKq0/LvMmhbxCox14qPR6o12g0cHBw4CYUAKFdRqNWkpDp+h5GF3gPPVPfRGNDfmkHrU9wiY91Hs1mE81mE3t7e+h0Om5SYK6Zg5J/21CkzWlSyPXdxKe4fOmVNGR5qAJMjEwdtFotNBoNt5lhEAShwm0VTttXxKgDRvfjSMPDl8CobYvC2Ol0UC6X3WZibDP5SP4rRlXE/Jt8tBi1bVET+i4Y9ZkaBWm1WqjVas4RaTQazhGhYaNttvuA+Pho91NJ0saXwgcg5PETHxVuu90ObexHfBo1UBnjdVKU0t0WbXsJjPo8jbp2Oh1UKhW3PxoN2H6/D+DJYGXNEqMGxGaXuhObrdN6aYz2+1YPanSj3W67zfC4o/NqtUKv18Nms3nmBGv0kv+3fMyCMS3WbbKqfGy3224sHh0d4ejoCOv1Gt1uF5vNxp0AQF4pRgDPMNJh/pp83KZPNTLFsXhwcIDj42McHx9jtVqh2+06B0PxWHz27EDiiwoCpLmulNr40ciEzwJjYWSj0UCj0cC7d++ckTAcDp1S4YRCA0GLngCEPGu17qnwbLW7j7JY79ag0MhEEAQOHzfcev36tZs8p9NpyCtWgY/Cx/dxwqEQsQCV7/Vh0WtpDKEkGCnEjUYDr1+/RrPZRKvVwmw2A4Bnm1ACT8apRv14nc9WjLbSX9vjw5iGkmCk4dNqtXBycuIw0shhTp0pV/JR01s+jPS2lY/6Y+lrYCwUCq6WgLi4iSEdEfbDZrN5hpF8ohenkZ8ojJZ25eM2fOVy2W0I12q13PEOnU4HxWIRs9nM4SJO4MmY1cim4vONRR8GO4mo87YLRk23cv+Xer2OdruNw8NDt3M8V8z4MJKXvKZba7APOFa1cNZGsKIwpiF1bNSBAx75yALeer3uopI0YhuNBmazmStkJlZiseUGNB5IOj6j0kIvgdHWrhAjxzxTP8TIXeMPDg7QbDYxm81Qr9ex2WzcCln2HVNHxB+Fke1IQlG6KIqSyGmx+HjOWqfTwcnJiTPuyEPu/+aTU9U/fKY6ADS84hxIX0TqxdNePiVA6x2AU0SvX7/G69evXa0Pz+woFototVouZTKfz50Al8vlUK2PRoA0EqQM533q2fEnLZN9namRgiAIcHh46I6tePXqlWs7w7M0HBii5SAlPt0wjlEuG8lRZaWRkaiIT1aM/NuHcX9/H6enpzg5OXmGkdvNc+niw8NDyNjjZKLeQhxGK9h6PYsy2oaxUCjg5OQE7XYbr169wtHRkWsPz5ehl82i/IeHB7d6TWtfonhoMSopX7W9u2DkZy1MPz4+RrvdxunpKQ4PD90hkPf395jP504ZT6dTt808FVm1Wg3xUHlp+UjsqrR2jRRE8VDrXo6Pj93xFUdHR6jVas4wXywWqNfrjodMl9Cg4Oonepo+PqpxpOSTK9vurBg1MlUoFNwEeXJygpOTE7RaLadr5/O5i1py3xRipOFLjPytBhDwfJl2XBQvC/l0sL5zs9m4lPrJyQnevn2LVqvldNHd3Z2LlPD8Ln6/Wq26VDxTeuoYa0QzCQafQ5+EbDSYn1VnMwhweHiId+/eodPpOCN8vV67iBe3Y6BRUalUnMNCHmqkUqPpUf29qyMSJ6fEV6lU3Hh7+/atw0fnyeIDnmqBedwF50vVqxoFimqTxZSEjzuf7RX1Uiqg2WzmwusUWi00VMNAhRSAywHyWaoQ1ErUgWQjU3Hhv21ko1t8v+KbTqfuqAPuNMp2cRKiwrEDRL1sXcmm+Mj8SqXiXUq9Cz4f1m0YqVzVc6MXrthpjAZB4G2zYmRInpOtKq9deaj4+DwNs5IHs9kMpVIJi8XCrfDSKB29Z/V4KMvEzr5TL5DyzT7QejYrX1kw+owq9ToVI/cN0Z23lddURPxbU0Qa9bITiY2e2OJ93psFo51M+FtTicViEQ8PD27VGr1irkBUfJou0WfS6YryHjWCYHmofbErKUYdS4VCwWHkJMFrKlucaIhXn6n6Ro1uG8W1hrn2xy64fBElHUvAozHHlbIPDw+Ot4y+cbypke/DqKkxOiuajiav7Vy2K0Z9FjGTJ5wH5vM5JpOJ4yP1h5ZH0GkiH7WgnTpG+agF4dRt2zDugtfKqTqC8/nc7aB+f3+PSqXi+GnxkTdWJpPis22y+OIotfHj8+70hQTCwl/dKZZpLwJbLpeu0p2AqED5PF15QcENgsB5sHyOFrzZmpI0YegoD51t4+SwWCwcPiqf29tb50VyEOpqBWUqn1etVp1QM6oAIISPOVKb91UjImm4k++10RWdLDVSx2MO+PzxeOzayzZVq9XQSiJNIfB57BcfRtZJ2QmJfM8ySC1GvkflarPZ4O7uzu3ITYxUvja6ocahGrPKR4tRI36csIDnBe9ZlJAdi3wOrxEjUyOqTJii1X7SyJ31tIiRvKWM8LpiYF9yHPqiI2nwAc+NKLZzvV4747xcLmM6nbpogUZ/qaRV2apXvlw+bp5HvusYJVY1/Hx6JUst3jY55dijnM5mM8xmMwRB4JxJG6UiRv7oqiGmj7RQlo7HarV6FsWzFFdAGkWW9/ZvtmM+n7tUHiMfNNQ1IsdVYEyXqTHE1Iv2C4CQA6m61CdXWTBGzYv8rCk5OiLcGJUbxSpGjkFG8DgP1mo1zOdzNJtNN9/xHTrvcE61RqfFmNQZ2TYWSYyuMiNQLBbdnGHxAXA8pM4sFosOHx1hNa5Ye0k5j2pPEnw7p73Uwt1sNu50dioRhtMp3EDYYmM4k8Jyd3eH29tbAI+TJg86pXVMYykIHnPxrVbLdeh4PA4pJY1OJFW8caE0WrKj0cjlkHU3VdbDKD6eCk7hmc1mGI1G7n+MimnOk8p5sVig2Ww65TyZTFzbdIDaAsJtpIKhkyX5R6z0Kmu1mgtTTqfTZxgrlYorTAyCAJPJxLWVg5EeAN+vGzzW63WnmGjUabFfltRQHEaGVlerFVqtlotuceffKIynp6chjLrLs8VI5cXBTAPRF9mizKY18qyS9mFcLpdOUTKdR1lVI5XG0unpqTNsJ5MJZrNZKPJzf3/vjB1VQMQIwBnrWqiYxduM46EWajebTYeZY5CH7HKyo+44Pj5244e7kgOPeohjsVarOWeLk+1isQhFEGy/Kz5bLJ4VI42b5XLp6iV0sYHqWQDOiDk+PgYAV/fEMV2v112EmvqG3rTW7qmjpnolSWrMR/Ze1bGcLDXlsVqtMJ/PUSgUHB+19KFcLuPk5AQAXLSW/GG5wWQycWPQGssa1bRtyzJnEJNvXgTwbKUocXCc0BEhRgYFiJGGz2QyQbFYxN7eHu7u7jAajULGPEsudO7Sdil/02LcNmdYw5/XS6USJpNJCB/LBxQfAyalUglHR0eYTqcYDoeujbQFyEPqTGtkWnxxtFPaS1/IRlAxVKtVp2jpZegBiRRMFrrVajU0Gg0AcMaFDeUIseYAACAASURBVI3SYh+NRiiVSpjNZqHQNj1vjRzYpdVp8NmOJT5OAHoMgE5sio8nvFerVVfQdnt76/oFCE+UtJSLxaI7QVzxqDeTBV8S/IpRJ0kqSouxUqmg2Ww6jMvl0oU92TYKOI2729tbFItFd36NKnAN49vwZxqKMggVI1NeVDoAQnJKGbUYHx4eHEZ+j7UkxDocDhEEAUajkYsKBsHzdMJLYVS5Zd9xK/n5fO4cBkba1LMqlR53OW632w4rj7vgxASEd/gulUpupdHt7a3bWdimHNimLBjjdA37XfGxj6PwdTodp2uWyyVGo1EIH3lN2b65uQHwGLUeDAaRqbGs+OKIGOnxMwrLSVu3CmF7ueKtXq87x2k0GrkaKLaT9xcKBVxeXiIIHneGJg+1pjLOIUyKI8450/SIlkcAT8d2KMZGo+GKoVutFoIgcDqV+kpLJDabDc7OzlAsPu7wzRMFNKug7fSN0bQYVVaBpxWh6vDyf5RVRm6C4PF0g8PDQzSbTbTbbZTLZTcW9cQAzXR8+fIFhUIB4/HY8ZERa3WYs2L0kc5Ny+XSlS5wRTeNPBqAjEg1Gg0cHR2h2Wyi0+mgXq9jPB4DgDvtnfXA1Cfn5+fO8aRMWzlNE2XOlPbSl7FTNUyruXXWT+i5MlRSnPBZ0NVut10UiEpa62eYJ+z3+5hMJlgsFvj48aNblsx2qDJilCgLPsAfvqT3x8nE5h4tvmq1ik6nE8JHZct+YT8EQYBer+ciSh8+fAjh0yiPb0VKUozaJ1QWPoycKKkstmHkD71Upg3Iv2q16jDycMIffvghtFGiKhGtqUlDGobmQLcYWefBydMqfT6H7eDkyR8O7PV67QyeSqWCWq2GIAhwdXWF4XCI2WyG7777LrQrrSVdAZEVo45F5SPl8/7+HovFIoRRDRU1ELjsX5UXMfIk8SAIcHFxgcFggP39fed52rofviMtxjh8fK6+h+NE5dSHb39/323doPiomKvVqsN3dnaGXq+Hvb290ERscdADB5628kiDUf+2UV2rb+xYBBAaizQO+EPjQnUIx+Jms8HHjx9DGOlkWc8aQCilmwajkn0m203++jBafdNsNnFwcOCWU1PfMEKtONfrNT58+IBut4v9/X18//33LtLM9vAdwFPpRlqMVlYVo6ah1In0YaQT1Wq13HL/V69eAYDLAJA3fMZqtcKHDx/cKfcaGfVFzn2p7ST4ov7muFSHnFiUGJ3knH9ycoLDw0O8ffvWOWw8lJfpXo5xxffhwwcXeOB7NcpFfPaIE6XUxo/NpWlen51CK5ZhLzaQ1ion+yAIXMhvsVig3W67peP7+/v4/e9/71Ys8NnVahXfffcdvv/+e1xeXqLb7eLq6ipkLbJdtLbTWLhqPNnrFp9V8nomEI0S4qNy/eabb9xS1d/+9rduICu+v//97/jhhx/w5csXXF1d4fr6GuPx+Flxo+LbNe2lOBWjhsZ9GIvFosvP0xL/3e9+5ybQb7/91i3xZJur1Sr+8pe/4Pvvv8fZ2RkuLy9dLYNuu691OGm9FDuwfXLK/qShzIGim8BReVKp0pj7wx/+4Pj47t07FAoF1Ot155lXKhX87//+L96/f4/Pnz87WbV1Gl8bo07WpdLTsQZRGCn71WoV//mf/+k21Hvz5g2CIHiG8c9//jP+/ve/49OnT7i+vnbnaPmKEZW3WfD5JhVV7qwH8OFjGomTbL1ex3/913+h2Wy6VY1BELgaBBpr//M//4O//e1v+Omnn9DtdtHr9VzEl20hD6MMoyQYo/QpJzhGJ7kHFfUrDQJOmHd3d27ibjQa+OMf/+iMoZOTE2w2G1f/xRQSMf7444/o9/sYDAZugtV2KsY0FKVTladMPTPNzrmC2YMgeNqeggZuofC45cgf//hH1Ot1HBwcuBID6l/qpD//+c/461//ig8fPoQiXL4IpY2wp+Gj4rIY1+t1KJLOMaIY9SzEWq3mNgskHw8ODrC/vx+q2eI89H//7//FX/7yF/z0008YDAbuHC1LWTBaOfXhC4LApWTJ07u7O1d/B8Cl/1lyUK1WcXh4GMK3t7fnnHkaQYVCAX/729/w17/+FT/++KOLprM/tU2KL24spq/sMqQPVyYDTykM9dSAp1VdVFI8DXw0GrlUwXq9Rr1ed8tXW60W3r596w7zA57qT+gNWE9z1/ClDWNafJoqIS7gaXUQJ1ieQDwajdxJ70EQoNls4vj4GLVaDc1mE2/evEG9XneTkcWnho/FlzZqEIdVB4Yt/NUIHwV8sVi45eDj8dgVoHJLA2Ks1+t4/fq1m2BYh8I+U4PupTCSrJxajKok6DXqajst3mctDFNAnU7HYWw0Gjg9PQ1NMDQqGKn72hg12kVcxKhGF4AQxsViETpz7+7uzsljp9PB6empS/udnJyEVvMx7aRe/NfAGIXPGlyKj/hZC0Lv8u7uDo1GI8RDFtIeHR25CZTjmDy0q1C+Jg/Zj+xn1XHq5bIPtOZyPB5jNpuh0Wg4J5K4yuUyDg8PQ3i0GJiLFF4Co0/PWN2qaUOfviHG9Xrt9M16/VjnOZlMXKSOS60ph/ysq8K0KNxXv8X37CqnSpwP6EhybPBHMapOXa/XGA6HuL29DS2iabVarp/29vackc+xx9S0L2quGNNSXNRP8WkkxvKQ+pT4BoMBer0eALhi/lqt5mrAuLUD09E0Eq2c+vDF8XCng019pNYXGUDi4NJVPfRGGd6aTqcYjUYol8suN1uv1xEEAQaDAUqlEj5//ozz83Pc3Ny4vS1IFHQN3aYRZBvO0+iIzZvqKjXi46Rn8U2nU4ePgjwajbBYLFwqqN/vo1Qq4dOnTzg/P0e323UF4y+FTzFZRatpSYtRU5lsh6ZXyMNKpfIMo+7j1Ov1UCwW8fHjR4eR+z/x3VEY01ASOaVC4gRA5etLmTIyxGJRi5FL5dfrNa6vr1EoFPDx40dcXFy4aAEjS18bo+WjxUgjiHKnRvvDw4Mz7CaTybPaLD7z8vISxWIRP/30k8PIIzIoS7tijMOnY5H41JAjPk03KL5arYbJZOKKKlk8yvdeXFxsxccJlDLD92YZi1EY+T9GTNUIYI2F6iEWZ0+nU9RqNdze3rq6LGKl0cjVOD/88AMuLi7Q7XbdKlbyLwpjGorSqfYzI1EqN7rwQeWUk2e1WsVgMEC9Xsfh4SEGgwGAJ0OQRbLff/89zs/P0ev1Qof3Kka2JwtGa8wBz2tQaMCy1IFOLvHq3Eh9w7mw2+2iUqng4ODArbpk8TsXzHz33Xc4OztDt9sN1aLGYUwqq3bOIEXhUx7SDtCxSHy3t7coFAq4vr5GsVjEwcGB6xcW57P+9P379/jy5Qu63W7oMPQ4fHF8zHy2lzJYJ1H1yrgSo1qthpZ0s/aAhV/dbhetVguLxcJthc3VQq1WC+PxGL1eD1++fEGv18NPP/2E29tbF21g8RgnM7tKwaZ2kuCzn0namYvFAvv7+yF8FDofvvl8jtPTUxchWS6XaDQaGI1G6Ha7z/AxYkTjRyMVVgjT5G8tLstTFfTFYuEGXKVScWkvKhZipeK8u7vDyckJFosFbm9vXfTg9vYWNzc3ODs7Q7/fx8ePH109DGuKbNTQeolJeejDZJWvetPz+TyEkR6HFs0uFgvc3NxgPp9jNpvh5OQEd3d3GAwGGI/HqNVqGAwGuLm5cQP08+fPTk6pjL8mRqUojAylM5xML401IdfX17i7u8NkMsHR0RFmsxkODw8xHA5RrVbR7/cdH6+vr/HlyxdXu2Ux2ok9LcY4fMDTpEp8zWbTRW64H5UWhj48PLgU6+3tLQ4ODjCZTHB4eIher4dKpYJer4fr62t8/vwZ3W4XZ2dnLrqgxf5sEydnbeOuY9EXFSkUClgsFmi1Wm4zOS6G0DT8/f09Li4uMJ1O0e/30W63MR6PcXR0hOvra5TLZfR6PVxeXuLz58+4ubnB+fm5ixLxGdswZpHTqO/pcx8eHtzu1fV63aWJOSapd87OznB7e4vLy0s0Gg3c3t7i6OgIV1dXKJVK6Ha7OD8/x8ePH9HtdnF5eemifZQFTV36jNCsYzGOp4yMEF+z2XSyyvpR8oBtPz8/R6VSwWAwwNHRkXM8KJ+fPn3C1dWVc5bJx5fEGHefDx+jU61Wy9VwUZ9y/v/hhx/QbDZdcKPX67mdr4vFIm5ubvDp0yc3FrvdrkujsZ806mPx0YCPotTGTxR4vpykVnq9Xken03FRDOuxcbfZcrnsiiYHg4HL1w6HQ/T7fXz+/Bn9fh/X19fO+teUBS1Kkqbcklq4NtRMsgOf1iZXc+3t7bkiZW1PEARulUahUHDL8WksFItF9Pv9EL5ut4tSqRRaLUdSZmbBpxh95FPi5CF35+ShdIqRVfkAXNU+01qFQsHhovHT7XYdv9VCV49eMZInL0HbMDIiRwOexJoRAG47BqYZgMdC/F6vF+IjN4f8OTBGyatGPjgWb29vXbRNz6Jj2JnhdhqBxEtcnz59Qq/XQ7/fdxhVVvk+fn4JjL70CfGxLoA8vL29dU4DDTIArnZruVxiMBg4fMTa6/Wc0u31es7j1mXYik/Tb1nSCVHetGJUr5oYDw4OnEHEdnEssj6IuNgHg8EA6/UavV4PV1dXWzECT+en8bpGR9NijHJE9D7Wh/JsKEaOGelg39IYnc/n6Pf7CILAi5G1WooReMpCWIyMlmRJe1ksill/s56lVqvh6OgIo9EoZMjyXtax0smiYXF7e+sizFdXV/jxxx9dPZpiVDm0fKROSDsv6t+KW/GxsL5er+P4+NiVCRAf8KjzyFtG76hbuS/Q9fU1zs/P8enTJ4ePq+X4jDh8vnonpcybHCpw20k0DHQbeRb10gigkQMgZAwNh0OX6uIkyxN9ucSNm2FReduUjIalGeZPauEqPt9g1SiSLqFtNBouhEl8xKUTHZfxNxoNt3pkNpvh8vIS0+nUbd/Opa3A0z4PUW1gCDspWeWshp79zZAlMWqokeFxi5E85JkuTDVcXFxgNps5jMDTyhgKvr7fYkxDSeQ0KUamCqgwFCPP5WFK7Pz83KVuuffIvxLGarXqlnkzRaKbL+pzBoMB7u/vXfSA6dvz83PMZjNn7AMI7YOj7doFYxQ+axQACB3JofhYC6hjmt/v9/t4eHhAu91GqVRyYXbKKfdyopHhwwc8pYCpa9KMRaubogyh9fpp1STllCti6UBqn7OfWYDO9A93UGYELAlGyj9TF2kiImy7xejTqUxVAnBntmmpBFMktu6o1+thtVphf3/fYZxMJvjy5Qum06nLEKjO0hV0FiON2iwYbfTcYmT6EoCbGzUdzvbxOXzuzc2Nw8gtSMbjsZNVZgkKhUKoPk3bsQsfk8gpjVem5QqFx5WTNKo3m41zPoiN+HioK+WcZQWU0+l06mQhiZxyTL74aq84b4Wg6Bnt7+/j22+/xcnJiYt40FNmFTiVEw8qJNFz6Xa7+PHHH50hxIgCLTy1lrmUV5VdGis+aoWJEq3L9XodwsdwKyMfGp4DwkvTFd/19TV+/PFHrNdrLz5aupvNJhRF4jPSkq9y34eRljpXbbEglEvxf/jhBzd5Mh3Awjsagcvl0kXr4jBqCkgxpuWfYtwmp3z2crlEu93GN998g5OTE1QqFbedwocPH1y9jl1ZxJ/1eu0w/vDDDwCwFSON/58LI1dXvHv3DicnJ2g0GiE+csdZXVHIFBKLCofDIS4uLkIY1+u1W/1EWX0pjHH4KJ86STQaDbx58wanp6dot9shfNzgUfnIbQl0z5/z83N8//33Tk7T4CN/Xxqj/l2v13F6eorT01P86U9/Qr/fx3Q6xXfffeeMURafEyPTKywn+Pz5M96/f58II1fsWoxpKE6nKk5GXBgROTk5wZ/+9CcMBgNMp1O8f//eFeLT4eDZV9wPZ71eu5Tz+/fvsdk87f2jGNWAeimM7PMoo4IYmdHgdgv//d//7VYu/eMf/3ApSDpNHIdcBa0Y//GPfzijQnWrjXwwak1e6JyZBl+UnBKfOjiNRgO1Wg1/+MMfXHTr/fv3rtyBjhN5yJWlm83Gpda/++67SHw+HsYZaJYypb181q0NidHK5VK9g4MDtNttZwCcn58DeNrgSXfAZRSFy/5Y/a5L5S1DeJ1FjyQNB6ehqBA9/6ZwlUolt+R5f3/fLcckPrabUbDV6mnvAw445rSj8Glfq+GSdoBaDHGf1RvjcnyeRlytVl0KC3gqvmQ9iXo39DJYwGb5oRj5Xg3JvgTOKDnl++lR8OTs09NT1Ot19Ho9nJ2dOQ/l4eHBGQMA3AoT3dtKN0hMIqe74kyKkd5gvV53KycVI9PHLC7k8nBi1OMxbC3BNoyqpNJiTIIPgPOYGWV+9+4dGo0G+v2+w8c0Ccci8HTECvExbJ9G11h8aXVNGoxsKw29ZrPpUuZ0tDabTQiP8lCdJjplUWMRwFeRU3uNpPWajFKenp6i2WxiOBzi06dPrl6EHj6j4jztnPoGeNpjjkTdYnWqpmd31Tc+A8FipBPAiNbx8bGr/fz48aNLdWmUhnON3YuMGG201bbJ8jErPn0mP+vzFB/HBmu4ptMpPn365E4M4NzCiBwj0TqHKz6S5SHwJKdR87aPMtf82AnLfqaVCzxFY2gIMK3Aba51+eZyucR0OkW9XndL4Dmgyfy4AeRrU9x9ltiB9vvsVPViOeAUH+uQWJNk8a1Wj/tZNBoNV2TKXLbFF2VYWgWbVaCJUzHrs2nQMOLGdB6NGGLmvhzA03EUxMg+Yb/okn2foGpb0ljxSTFaUox3d3cOIyd4KhrmmjVkPplMXNpLea9LvX0evLbNXnspjPpZxxlrsBjNYnuJkUYE+Tgej9Fut90RAj4+knwYvzY+Gis8noI1SxYfUyqUY4610WjkNvebz+cuWsv/b8Pn0xVp05dJMOo4Y+qAcqf1VLo7NQ0D1v0QHwti4zDatlncacjqlzg+cuGIHqfD1Dl1LzFqWpMr2rhEnOk8XxrWh8+nU7NijDKALEZGPlgGwHpYzi3kI+dFpoYWiwXG47FLRVsjPGputn+/xJzhw8dIMSNXt7e3zxbu0BCnrmGND7/HnbiZvlbDzff+KJxfZbUXP9sO5N/0Ns/PzzEajXB5eem2L6exUKvVMB6PsdlsXEiaOWoyfb1eu6MBNpungjCmWBSk5vyypBGIKW6QcyDRWv3y5QsGgwG+fPni8recJCuVSuh4gF6v52pF+B4qYHqmxKmTEiMwcfjSKKaoe613RsXCVWifP392NTHqoekRAYPBwAk7200FzM9a9EYeKkYaHVoknCVyF0U6iXEC4BLRs7MztNttF71hm1iExzQBC7/ZbqaF+Jn9o3IaBE9Fq3QOssppkj7hcxmNu7i4cFECbpQGwKVVuT/MYrHAYDBwkyuVrxZbJsH4EmMxCT5uqX91dYXb21ucnZ3h4ODA/V+VMKM/TINE4dMoJceiD589IiFJu7NiXK1WDuP5+TkODw/dJMAVWhYjj4vgWFaM6limxZiGrBMX5ZjSuOt2uxiNRri6usLh4aHTt0xbESP/5oIZjmWORU7GHMtayweEVwsqxizRO5va8+FTjNxI8vr6GkdHR84IYH3ScDh0PF0ul/juu+9CGMlr8pjXOBY5d/zcY5H4RqMR3r9/j16vhx9//NHpWjolnAeJz545t1wunT4iT8lH4qPuoQ5OOxYzn+3lE2R7jZMEFSq3VOdux1ypoOeAaJEpPbQgeNwDh+Hbvb09F03gMznxtNttp8wo8GkEeVsVvI1ScMk6PTMaQMViMbRMUxUKw9eFQsFZwIPBwIXg9/f3Q/hqtVoIH/tW8cVZuJbUaI3yeFRh6TLmer3uzu+iQqKSYZ+Tt/w+U5rEGASBw6hywR1JO50OgKeT6+Py6NswRilcxUrje7N5PAOo1Wo5jMDTCiFi5J5U6snUajUUi0VcXV2hXq8DeNx8jLJh5VQxcvJ5CYxRXjWjIev12jkkrHUB4FZj6KRP44DvYCrl6uoqNBYtRq642RVjGnwshFyv17i4uMB4PHYnQwPhA3nJb7tKjZh6vZ5bfkt8TJf58KkxGOX174qR/6OhygUginE8HjuHihjpbFJfpMG4Wq2wt7f3DGPcRO8jm8b2GT/KR+rHy8tLTCYTd/iwxUjdSj3CyZAYu92u21me+oYGBDECT5sEZp0zfHyMImKkc3hzc4O7uzt3DpvFCDylfnwYb25uvBiLxcdjaHq9HoIgwMHBAYCnzU5faiwSk8VH3d3v97FYLFwpSxA8ns3F+RBAaIsF1gNzLry5uXG6lfi4n1wcPjWKomin1V5xjKZA01vWPCzwtCMrBxItPLXGAbjVDRyUXOfP3SG5mRdD2pPJxHm6uulZGnyKwX62Yc3VahUy5sh0hs9JtF6JT5/NGotKpeJC07R6gyBw+H34sljyVmi3GXq0xJmqJAYaRIrRZ+wFwdPW9MTBlSa8hzwkdqaWbAQoKUXJqeUjn0tZpBfNvzko+R1VSCqnwFPhHjHqjsJB8FhvE4UxrZzGYbTRMpVV8oZGDicbLVjn5EmM5LEe3MrxNp/PnYPDuikdi7tg9OHzTTCqMzQqSzyMxvLdHIccq7qEmhitrlmtVm5zxG340kwq23ho/06CkX2x2WwwnU7dajCSDyN5mARjmnMEte3b9I5i1PE3mUy8GNnf1E1RGLnCTTHyMO2vrW/4twYEfBjVONfSELZlNpuFMNIZsRgZhSdG7gfFw7KVjzblGUc+HeqbNxQfIzRMt1N/Ep8WmGt0UvHRyCE+Rru4f5zFt1wuXb3iNqd5p7SXrwMolCo8nOzYKb6dHnk/Q3SbzcZtxkYvZLPZOG+LxdDcK2YwGLjDNS8vL3F8fBwKyafFl8Qg4Of7+8cjOrR4loNIPQpaxVTgNCboSQJ4ho9LN4lvb2/P4bPPTENRlrv+T3nI3USZgmSRL2sH7Jlc/D7vIUZ6muv1OsRDCjALchVjWh4Sg5JPIdnrjHRwstMdkTmwOIEy7EplwqNZqOgYobN8HI1GPytGGnjWWOcxFkkxMiRPjAAcRl26e3d3h9vbW3fiNjFqyPql8Ok1Vbqs/VFdw4hkFL5CoYDJZOKiAcDTWPThazabz/BpqvprYGRb02Jk6ofp28Fg4N7L1UOKcT6fYzgcuk3qmH7S1FFasg5fFE7KLFceKsY4OWV0JAiCZxipb2q1mptXuD9Qu91Gp9PBxcXFThjjIsv8rAYQMdIwiMLIsasYgccUdb/fd+OJEbp6ve4Mh9lshl6v5w7VVow0GLPStsAHo8g+fOyLOHzkIR1SHgjeaDRC+LrdLvb29hy+g4MDrNdrVwsVR5nTXgSpTNfwLP+vnjW9bVWE3ANA7+dkQQ+jVCqh0Wi4VFiz2XThMwBoNptuwmUulTv2UlmkoSh8th+AsNepoTaNDKi3StysbaHxUK1WQ/gAhHa5ppBbfFkEOcrwsbz0TZzKQ/ZPHA8pzKzx4vcZamZNFHkaBEEkxjS0TU594VsgHDVRnuryX95Hw41yyhRlEDzm78k3nufWaDQAwKU5rZy+JEZ7n5LlY1KMlFWmSwqFgjMQ6HkRI9Ocu/AxCT69ptHmbfhUTnXHa25aCiCEbzQaucgWlfXXGIvbMJKyYqRnTB0DPBpAANw+OrVazRlSw+EQ4/HY7S/zEmPR4tKxqOQbi0wZsa8sRkY0eSo66fDwEMDTER8cc9xkdjwev7i+sXh4j17XSJ7q1LixyIUHXB5OoiFLw+8lMWYZixYf5xI+y2ccMhpJ57jdbrv7aNhwU0uuelR8h4eHzsjdZsTudLZXlNdiQ9RqKGhjKNw2ZKiFdqyI58qvQqGAt2/fhmph9vf3sVwuUa/X3coPrm7xrWjYRlFeyjbvJUooogaF1sboZmPv3r1z4T7FxxViPnxpw7Tb8MR5LTTEVIDtb97DiIdi5CD2YWQNA5fNa7otTXRrm5xafL7ol/1/FEaV08lk4gqhv/32Wy/GVqv1DKNvJWMajHrNF72MUjZR/EuKkfhYpMlCatZM7YIxLgria7tPQUfh4mfiq1QqrraLq28UH4s0iY/1UruOxbQY0/AwDiM34eT1arUaktPVauWOJiBGjeSmkVNSlL5JMhbVefR9lxg5eRIjZZapPJ53tre3h9Vq5Yrld8Xomwui7vHNi1E6VbFvw8iUs2JkVE8xbjYbNyZfYixG/a16SA0/nz5lvxMfazC5U/tmswnxcDQaodPpOHyUU6a8iC8u8LHT8RZRnqY1ApTZUbls+30WcVGpbDYbHB4eYjKZ4OrqCqvVCgcHBy7cR2XUaDTcYF6vHyvio9qZFF+UFe/DnAYf8HSYHwCH7/r6GsvlEvv7+9jb20MQBG6VFU+hVnx8ZlKywmhxKb98A1YFVzFbASfG5XLpFG8cRhoF9XodpVLJ7aczHA4z8TAKY9z/ojD6Pq/XjxvrBUEQwsgzo7h0s9PpuNOl2+12JMZdiIrRF+GKUkIqmz4+WoxcMh+FkUcuWIyMJGTByLbsgo/f12hJFD5OLK9evcJsNnNySnzc14sGHqO2ii+tYZAGI2kXHhKjHYudTgeHh4colUrodDqOh3Q0V6tVZjnddSwq+fQNo8cA3ESocnp5eYnl8nEz08PDQ7fjOY2C4+Njt1KJOjUrbdOpik8/q5GjtWOUWR/G+Xzu+HhxceGcq6OjI1efx2L+k5MTh5FlKGlxRcmpz6jVzzoWrT7VyBcLnbnz+mKxwNHRkdvN2uKjYcczRJvNpls5tm3OyFTwrKDioj/bvBLf30EQuJVS7GhWd1er1dDZYDzNlkKgHg6Zy5qLpLUG1lJU69y22/7PKmIfVsXHSNhsNgttda77I/HQVN4PwG1vDsDtppwm1B7Hwzhv2odL02J6Tc/tKpVKbhmjYtxsNpE8VOufK+JeCqPFEhWG9eFVYlRrs9mEMNbrpx/T4wAAAYJJREFU9dD+GwxR/9wY7Vi034mTVYuRPI3CGMVHficLRitbvoiW/Z8PH4BQISV/szbPh487Ilse0rPkczgWLb6rq6vEGLVdUTzk/6J4FaVvLEYuN46T02q1GnoejxvIKqfUqUnGoq1ftBEC2yfAk75huoR8LJfLzzDSgGcEhP1fqVRcmUEWneqLMMSld3w4eE1lVfuMOpUOsw8j01mKUfmYFeMuY9Hii9KzeiCxjkVGlhWfzouqa2i4aq1lFAVpvJQgCG4AfEz8hX8t+u1mszmJu+EXjg/498e4FR+QY/wF0L+7nAL//hhzOf0n/btj/IXjAyIwpjJ+csopp5xyyimnnH7plG5tbU455ZRTTjnllNMvnHLjJ6eccsopp5xy+lVRbvzklFNOOeWUU06/KsqNn5xyyimnnHLK6VdFufGTU0455ZRTTjn9qig3fnLKKaeccsopp18V5cZPTjnllFNOOeX0q6Lc+Mkpp5xyyimnnH5VlBs/OeWUU0455ZTTr4r+f6qLNc6DmHDbAAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<Figure size 720x216 with 30 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "n = 10  # Número de frames para comparar.\n",
+    "\n",
+    "plt.figure(figsize = (10, 3))\n",
+    "\n",
+    "for i in range(n):\n",
+    "    \n",
+    "    ax = plt.subplot(3, n, i + 1)\n",
+    "    plt.imshow(prediction_vali[i].reshape(64, 64))\n",
+    "    plt.gray()\n",
+    "    ax.get_xaxis().set_visible(False)\n",
+    "    ax.get_yaxis().set_visible(False)\n",
+    "    \n",
+    "    ax = plt.subplot(3, n, i + 1 + n)\n",
+    "    plt.imshow(autoencoded_imgs[i].reshape(64, 64))\n",
+    "    plt.gray()\n",
+    "    ax.get_xaxis().set_visible(False)\n",
+    "    ax.get_yaxis().set_visible(False)\n",
+    "\n",
+    "    ax = plt.subplot(3, n, i + 1 + n + n)\n",
+    "    plt.imshow(frame_prediction[i].reshape(64, 64))\n",
+    "    plt.gray()\n",
+    "    ax.get_xaxis().set_visible(False)\n",
+    "    ax.get_yaxis().set_visible(False)\n",
+    "    \n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 120,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "out_dir = \"{}/test_simple\".format(base_path)\n",
+    "\n",
+    "if not os.path.exists(out_dir): \n",
+    "    os.makedirs(out_dir)\n",
+    "\n",
+    "for i in range(time_steps, frames):\n",
+    "    scipy.misc.toimage(np.reshape(frame_prediction[i-time_steps], [64, 64])).save(\"{}/pred_{}.png\".format(out_dir, i))\n",
+    "   "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.6.8"
+  },
+  "toc": {
+   "base_numbering": 1,
+   "nav_menu": {},
+   "number_sections": true,
+   "sideBar": true,
+   "skip_h1_title": false,
+   "title_cell": "Table of Contents",
+   "title_sidebar": "Contents",
+   "toc_cell": false,
+   "toc_position": {},
+   "toc_section_display": true,
+   "toc_window_display": false
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git "a/Scripts/Notebooks/Predicci\303\263n Modelos.ipynb" "b/Scripts/Notebooks/Predicci\303\263n Modelos.ipynb"
new file mode 100644
index 0000000000000000000000000000000000000000..decaea72d2852901bda14a73118b96448bb6303f
--- /dev/null
+++ "b/Scripts/Notebooks/Predicci\303\263n Modelos.ipynb"	
@@ -0,0 +1,444 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Librerías"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "import sys\n",
+    "import tensorflow as tf\n",
+    "import numpy as np\n",
+    "import scipy.misc\n",
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "sys.path.append(\"../tools\")  # Herramientas propias de MantaFlow\n",
+    "import uniio  # Lectura de ficheros .uni"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Hiperparámetros"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "num_sims = 2000  # num_sims - 1000 escenas. \n",
+    "frames = 200  # Frames por escena.\n",
+    "\n",
+    "epochs_autoencoder = 1\n",
+    "epochs_lstm = 1\n",
+    "\n",
+    "batch_size_autoencoder = 16\n",
+    "batch_size_lstm = 8\n",
+    "\n",
+    "time_steps_lstm = 6\n",
+    "out_time_steps_lstm = 1\n",
+    "\n",
+    "time_steps = time_steps_lstm"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Datos iniciales"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Ruta a los datos de simulación, donde también se guardan los resultados."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "base_path = \"../data\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Carga de datos"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cargamos los datos desde los ficheros .uni en arrays de numpy. Los .uni son ficheros propios de MantaFlow, en los que se guarda los resultados de los simuladores clásicos. En este caso cargamos los datos de densidad de humo simulados previamente, pero solo los correspondientes al set de validación."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "densities = []\n",
+    "\n",
+    "for sim in range(int(num_sims - (num_sims-1000) * 0.1), num_sims):\n",
+    "    \n",
+    "    if os.path.exists(\"%s/simSimple_%04d\" % (base_path, sim)):  # Comprueba la existencia de las carpetas (cada una 100 frames de datos).\n",
+    "        \n",
+    "        for i in range(0, frames):\n",
+    "            \n",
+    "            filename = \"%s/simSimple_%04d/density_%04d.uni\"  # Nombre de cada frame (densidad).\n",
+    "            uni_path = filename % (base_path, sim, i)  # 200 frames por sim, rellena parametros de la ruta.\n",
+    "            header, content = uniio.readUni(uni_path)  # Devuelve un array Numpy [Z, Y, X, C].\n",
+    "            \n",
+    "            h = header[\"dimX\"]\n",
+    "            w = header[\"dimY\"]\n",
+    "            \n",
+    "            arr = content[:, ::-1, :, :]  # Cambia el orden de Y.\n",
+    "            arr = np.reshape(arr, [w, h, 1])  # Deshecha Z.\n",
+    "            \n",
+    "            densities.append(arr)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Devuelve los datos de cada frame (canal de grises, 0 a 255) en una lista de Python. En este caso las imagenes son de 64x64 pixels. (64, 64, 1)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Convertimos la lista \"densities\" en un array de Numpy."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del array: (20000, 64, 64, 1)\n",
+      "Dimensiones del array: 4\n",
+      "Número de pixels en total: 81920000\n"
+     ]
+    }
+   ],
+   "source": [
+    "densities = np.reshape(densities, (len(densities), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del array: {}\".format(densities.shape))\n",
+    "print(\"Dimensiones del array: {}\".format(densities.ndim))\n",
+    "print(\"Número de pixels en total: {}\".format(densities.size))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Forma del set de validación: (20000, 64, 64, 1)\n"
+     ]
+    }
+   ],
+   "source": [
+    "vali_data = np.reshape(densities, (len(densities), 64, 64, 1))\n",
+    "\n",
+    "print(\"Forma del set de validación: {}\".format(vali_data.shape))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Modelos"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Cargamos los modelos previamente entrenados:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "Using TensorFlow backend.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from keras.models import load_model"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/home/jon/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/keras/engine/saving.py:341: UserWarning: No training configuration found in save file: the model was *not* compiled. Compile it manually.\n",
+      "  warnings.warn('No training configuration found in save file: '\n"
+     ]
+    }
+   ],
+   "source": [
+    "autoencoder = load_model(\"autoencoder_model.h5\")\n",
+    "encoder = load_model(\"encoder_model.h5\")\n",
+    "decoder = load_model(\"decoder_model.h5\")\n",
+    "lstm = load_model(\"lstm_model.h5\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/home/jon/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/keras/engine/saving.py:341: UserWarning: No training configuration found in save file: the model was *not* compiled. Compile it manually.\n",
+      "  warnings.warn('No training configuration found in save file: '\n"
+     ]
+    }
+   ],
+   "source": [
+    "alt_ae_model = True\n",
+    "alt_ae_model_number = 1\n",
+    "\n",
+    "if alt_ae_model:\n",
+    "    if alt_ae_model_number == 1:\n",
+    "        autoencoder = load_model(\"Modelos/model_ae_simple.h5\")\n",
+    "        encoder = load_model(\"Modelos/model_encoder_simple.h5\")\n",
+    "        decoder = load_model(\"Modelos/model_decoder_simple.h5\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Predicciones"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Elegimos una de las escenas del set de validación al azar:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import random"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "num_scenes_vali = vali_data.shape[0] // frames\n",
+    "scene_vali_rand = random.randrange(0, num_scenes_vali)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "vali_scene = vali_data[scene_vali_rand * frames:scene_vali_rand * frames + frames, :, :, :]\n",
+    "autoencoder_scene = autoencoder.predict(vali_scene)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "vali_scene_encoded = encoder.predict(vali_scene)\n",
+    "encoded_size = vali_scene_encoded.shape[1]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {},
+   "outputs": [
+    {
+     "ename": "ValueError",
+     "evalue": "Error when checking input: expected input_7 to have 3 dimensions, but got array with shape (1, 256)",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[0;31mValueError\u001b[0m                                Traceback (most recent call last)",
+      "\u001b[0;32m<ipython-input-16-3b21be2c1d1e>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      5\u001b[0m     \u001b[0mtime_frames\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtime_frames\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m6\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mencoded_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      6\u001b[0m     \u001b[0mlstm_prediction\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlstm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtime_frames\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m     \u001b[0mdecoded_frame\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdecoder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlstm_prediction\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      8\u001b[0m     \u001b[0mlstm_scene\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdecoded_frame\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      9\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mpredict\u001b[0;34m(self, x, batch_size, verbose, steps, callbacks, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[1;32m   1439\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1440\u001b[0m         \u001b[0;31m# Case 2: Symbolic tensors or Numpy array-like.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1441\u001b[0;31m         \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_standardize_user_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1442\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstateful\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1443\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m>\u001b[0m \u001b[0mbatch_size\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mbatch_size\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36m_standardize_user_data\u001b[0;34m(self, x, y, sample_weight, class_weight, check_array_lengths, batch_size)\u001b[0m\n\u001b[1;32m    577\u001b[0m             \u001b[0mfeed_input_shapes\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    578\u001b[0m             \u001b[0mcheck_batch_axis\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m  \u001b[0;31m# Don't enforce the batch size.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 579\u001b[0;31m             exception_prefix='input')\n\u001b[0m\u001b[1;32m    580\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    581\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0my\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m~/PycharmProjects/TensorFlow/venv/lib/python3.6/site-packages/keras/engine/training_utils.py\u001b[0m in \u001b[0;36mstandardize_input_data\u001b[0;34m(data, names, shapes, check_batch_axis, exception_prefix)\u001b[0m\n\u001b[1;32m    133\u001b[0m                         \u001b[0;34m': expected '\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mnames\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m' to have '\u001b[0m \u001b[0;34m+\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    134\u001b[0m                         \u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m' dimensions, but got array '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 135\u001b[0;31m                         'with shape ' + str(data_shape))\n\u001b[0m\u001b[1;32m    136\u001b[0m                 \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mcheck_batch_axis\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    137\u001b[0m                     \u001b[0mdata_shape\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdata_shape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;31mValueError\u001b[0m: Error when checking input: expected input_7 to have 3 dimensions, but got array with shape (1, 256)"
+     ]
+    }
+   ],
+   "source": [
+    "lstm_scene=[]\n",
+    "\n",
+    "for i in range(frames-5):\n",
+    "    time_frames = vali_scene_encoded [i:i+6]\n",
+    "    time_frames = time_frames.reshape(1, 6, encoded_size)\n",
+    "    lstm_prediction = lstm.predict(time_frames, batch_size = 1)\n",
+    "    decoded_frame = decoder.predict(lstm_prediction)\n",
+    "    lstm_scene.append(decoded_frame)\n",
+    "\n",
+    "lstm_scene = np.reshape(lstm_scene, (len(lstm_scene), 64, 64, 1))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "n = 10  # Número de frames para comparar.\n",
+    "\n",
+    "plt.figure(figsize = (10, 3))\n",
+    "\n",
+    "for i in range(n):\n",
+    "    \n",
+    "    ax = plt.subplot(3, n, i + 1)\n",
+    "    plt.imshow(vali_scene[i].reshape(64, 64))\n",
+    "    plt.gray()\n",
+    "    ax.get_xaxis().set_visible(False)\n",
+    "    ax.get_yaxis().set_visible(False)\n",
+    "    \n",
+    "    ax = plt.subplot(3, n, i + 1 + n)\n",
+    "    plt.imshow(autoencoder_scene[i].reshape(64, 64))\n",
+    "    plt.gray()\n",
+    "    ax.get_xaxis().set_visible(False)\n",
+    "    ax.get_yaxis().set_visible(False)\n",
+    "\n",
+    "    ax = plt.subplot(3, n, i + 1 + n + n)\n",
+    "    plt.imshow(lstm_scene[i].reshape(64, 64))\n",
+    "    plt.gray()\n",
+    "    ax.get_xaxis().set_visible(False)\n",
+    "    ax.get_yaxis().set_visible(False)\n",
+    "    \n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "out_dir = \"{}/test_simple\".format(base_path)\n",
+    "if not os.path.exists(out_dir): os.makedirs(out_dir)\n",
+    "\n",
+    "for i in range(time_steps, frames):\n",
+    "    scipy.misc.toimage(np.reshape(vali_scene[i-time_steps], [64, 64])).save(\"{}/in_{}.png\".format(out_dir, i))\n",
+    "    scipy.misc.toimage(np.reshape(autoencoder_scene[i-time_steps], [64, 64]),).save(\"{}/out_{}.png\".format(out_dir, i))\n",
+    "    scipy.misc.toimage(np.reshape(lstm_scene[i-time_steps], [64, 64])).save(\"{}/pred_{}.png\".format(out_dir, i))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.6.8"
+  },
+  "toc": {
+   "base_numbering": 1,
+   "nav_menu": {},
+   "number_sections": true,
+   "sideBar": true,
+   "skip_h1_title": false,
+   "title_cell": "Table of Contents",
+   "title_sidebar": "Contents",
+   "toc_cell": false,
+   "toc_position": {},
+   "toc_section_display": true,
+   "toc_window_display": false
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}