From 1d4017faa97d6d90757e591aaff4b4884563ce87 Mon Sep 17 00:00:00 2001
From: Mirko Birbaumer <mirko.birbaumer@hslu.ch>
Date: Thu, 3 Mar 2022 09:53:03 +0000
Subject: [PATCH] Adaptation to tf upgrade

---
 ... Notebook Block 2 - Neural Networks .ipynb | 4954 ++++++++---------
 1 file changed, 2455 insertions(+), 2499 deletions(-)

diff --git a/notebooks/Block_2/Jupyter Notebook Block 2 - Neural Networks .ipynb b/notebooks/Block_2/Jupyter Notebook Block 2 - Neural Networks .ipynb
index cd230a5..96e8505 100644
--- a/notebooks/Block_2/Jupyter Notebook Block 2 - Neural Networks .ipynb	
+++ b/notebooks/Block_2/Jupyter Notebook Block 2 - Neural Networks .ipynb	
@@ -34,12 +34,12 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 33,
    "metadata": {},
    "outputs": [
     {
      "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAABzqElEQVR4nO2dd3zU5f3A38/tDBJCWIGwN8gSRBQEwYUT92hrtdW6rT9rtbbaupUOZ2tbrdq690IFGQIqguw9wworhEDIzs3v8/vjuUsuue8lF3LZz/v1yit333Wf+97d83mezxRSSjQajUbTdrE0tQAajUajaVq0ItBoNJo2jlYEGo1G08bRikCj0WjaOFoRaDQaTRvH1tQCHA8dO3aUvXv3bmoxNBqNpkWxatWqI1LKTtW3t0hF0Lt3b1auXNnUYmg0Gk2LQgiRbbY9LqYhIcRrQojDQoiNUfYLIcQLQogdQoj1QogTw/ZdJ4TICv5dFw95NBqNRhM78fIR/A+YVsP+c4EBwb+bgH8BCCE6AA8BJwPjgIeEEGlxkkmj0Wg0MRAXRSCl/A7Ir+GQ6cAbUvEj0F4IkQGcA8yTUuZLKY8B86hZoWg0Go0mzjRW1FB3YF/Y8/3BbdG2azQajaaRaDHOYiHETSizEj179mxiaTTHg8fjZ+2K/ZSVehlyQle6dk9papE0Gg2NpwgOAD3CnmcGtx0ATq+2fZHZBaSULwMvA4wdO1ZXymthbNlwiOeeXAiAYUikASdP7MUNd56KxSKaWDqNpm3TWKahmcDPg9FD44FCKWUOMAc4WwiRFnQSnx3cpmlFlJf7ePaJhbjL/bjL/Xg9AXy+AMuXZPPtvKymFk+jafPEZUUghHgXNbPvKITYj4oEsgNIKf8NzALOA3YAZcAvgvvyhRCPASuCl3pUSlmT01nTAln1417T7V5PgLlfbmXKOQMbWSKNRhNOXBSBlPKaWvZL4PYo+14DXouHHJrmSWmxl0DAMN9X4m1kaTQaTXV0rSFNgzNoWGdTP4DFIhg6vEsTSKTRaMLRikDT4PTul87QERk4HNaKbUKA02njkmtGNqFkGo0GWlD4qKZpMAIGmzccIv9oGX36pdOj9/Elft/5u8nMmbmZb2Zvx13uY8jwrlzxs9F0ydAhpBpNU6MVgSYqhw4WMeOP8ygv8yIlSEMyaFhnfv37KVVm97Fgs1k4/9ITOP/SExpIWo1Gc7xo05DGFCklTz/6DQX5ZbjL/XjcfrzeAFs3Heajt9Y0tXgajSaOaEWgMWX3jqMUFriR1VL3fN4Ai+bq2H+NpjWhFYHGlOJCT9SMX4/bj2Ho5G6NprWgFYHGlD79O+DzBUz3ZfZqr8tCaDStCK0INKaktE/gjGmDcDirOoUdDis/vWFsE0ml0WgaAh01pInKNb8cQ9fuKcz6dBNFhW569k7jimtHM2iYTgLTaFoTWhG0cAIBA4tFIET8TTVCCKZOG8jUaboWkEbTmtGKoIWyevk+3v3vKg7nFON02Zg6bSCX/3QUNnvd4vs1Go1G+whaIOtWHuBff/uewznFgIri+WbWNv71zOImlkyj0bREtCJogbz/xiq83qoRPV5vgHUrD3D4UHETSaXRaFoqWhG0QHL2F5lut9ks7N19rJGl0Wg0LR2tCFogye2cptsNKUlLT2xkaTQaTUtHK4IWyLSLh0bE91ssgg4dE+k7IL2JpNJoNC0VrQhaIOdOH8rEKf2w2y0kJNpxOK10y0zl3ofObJAwUo1G07oRsnpVsRbA2LFj5cqVK5tajCansKCcvbuPkZqWQM/j7BOg0WjaDkKIVVLKiNIA8WpePw14HrACr0gpZ1Tb/ywwJfg0EegspWwf3BcANgT37ZVSXhQPmdoCqe0TGD46oanF0Gg0LZx6KwIhhBV4ETgL2A+sEELMlFJuDh0jpbw77Pg7gdFhlyiXUo6qrxwajUajOT7i4SMYB+yQUu6SUnqB94DpNRx/DfBuHF631ZKXW8Lq5fvI3pVPSzTdaTSalkU8TEPdgX1hz/cDJ5sdKIToBfQBFoRtdgkhVgJ+YIaU8rMo594E3ATQs2fP+kvdxEgpCfgNrDZLhYPX7wvw7+d+YO3y/djsFgIBg64ZKdzzp6m076DDQjUaTcPQ2LWGrgY+klKGp8X2klIeEEL0BRYIITZIKXdWP1FK+TLwMihnceOI2zB8Oy+LT95ZR2FBOYlJDs67ZBjnXTKMD99aw9oV+/H5AhW9APbvLeDZJxbyyNPnN7HUmuNHAgVAKeACOqAD9jTNiXgoggNAj7DnmcFtZlwN3B6+QUp5IPh/lxBiEcp/EKEIWgvzZ23j/ddX4fWogb60xMvnH6ynqNDNorlZ+KqVjjAMycH9hRzcV0i3HqlNIbKmXniBNYAHpRAE6md3IkopaDRNTzymJSuAAUKIPkIIB2qwn1n9ICHEYCANWBq2LU0I4Qw+7ghMADZXP7e1YAQMPnxzTYUSCOH1BFgwezset9/0PKvVwrH8ssrjvQHyckvwesyP1zQntgLlQAAwgv89VAbKaTRNT71XBFJKvxDiDmAOKnz0NSnlJiHEo8BKKWVIKVwNvCerej+HAC8JIQyUUpoRHm3U2vjh2924y32m+2x2C4lJdgoL3BH7/L4APXqnKUXy1hrmz9qGQCClZMq0gVx13YlYrdrU0PzwA/molUB1SlBzol5ABmqloNE0DXHxEUgpZwGzqm37U7XnD5uctwQYHg8ZmjtSSj56c3XU/X5fgKuuG807r62qsmJwOK1MnNKPlFQX7/1vFd/M3lZl/8I52zEMyc9uPKlB5dccD0Yt+91AFlAG9G94cTSaKOhpZCNxLL+c0lLz1QBAr37pTDlnEDffPZEuGe1AQLsUJ9OvHMG1N43D4/FHKAFQZqVFc7OirjQ0TUUZsBrz1UA4Bsql5m1wiTSaaOgOZY2Ey2VDGuaDghBw/S3jABg7vidjx/dESlmlblBBfnnUOkJWqyD/SJl2JjcbAiglEKtyFkAR0LEOrxH6LmmTkqb+aEXQSCQmORg8vAub1x/CCIQpBAG9+nagR+8OVY6vPui3T3NhRFEkAb9BWrouNdF8yKN2s1B1Yv0pBoAdwKHgayQBA1BxGNWRwJGgPBagK5BKVeUhgUKUmapd8HqatoY2DTUiN901gU6dk3El2LDaLLgSbKR1SOSO+ybXeq7TZWfSmf1wOKqWn3Y4rJx6el8SEh0NJbamTgRQOZWB2g6sds56YC81m5IksA7IoVLRlAbPLax2rBE8djOQGzxnHUqJhCgHfgyevx1YGTymLrJrWgN6RdCIpLZPYMY/LmLD2hwO7iukS0Y7Ro7tHnPEz09+eRKGX7J40S6sVgsBf4Dxk/rw85vGNbDkmtjZANTULtSCGtCrD/gBYHdwey+T8wxgC5EDfmjfbmBU2Lbc4LFGteMOAl1Qs//1qJVAOAXALtQqQ9NW0GWom4j9ewuY8/lm9u8roHffdKZNH0KXjJSYzi0r9ZJ/pJS09CSSkvVKoPlQAqyidrOQIPrM3wpMJHKxvgU1uEc7zwacFvZ8NeZKA1T+Z9caZLUAk9D+h9ZHg5ah1tSN9asP8Pc/f4vfZ2AYkj078/lh4S7ufeQMBgzuXOv5iUkOEpO0Amh+1LQSCKc2848PCG9H6gUO13JeXb4PMnjNaAO9QWUWtKYtoH0EjYxhSF55YQleT6DC+WsEJB6Pn1f/vrSWszXNGyf1HzwN1KognPJarmsh0pzUBfOftwXoBCQTXbEkRDk3nGKUaekHlG/hcC3Ha5ozWhE0MjkHCnFHKSWRd7iEgmPljSyRJn6kEZ9F9tFqz13UvBrIRA384WSgBvvwn7gF6IyKHHIA3YgcAizU7h8oRJmejqJWFsUo09WeWs7TNFe0aaiRsdksUfMJkGDTpSLigBc1WNmA9jSeiUOgaiaGnLBmTuFY2IMqTZGAGtCdKCVTvVyFQFWB72dyDUtQlsMo34IFNfB3oPJ+9A++xl6UOSoxeK0O1S9WjSwifQsGkB2Ux17L+ZrmhlYEjUznru1IS08kN6eqPVkE8wmSU5xRztTUjkRFvOyncrATKGXgRcXI9yB6rHwZKj4/gEruak/dlUgCqldTCWpAP1LH80NylAVfey8wAhiKmnXnU+lszqDm0hSh3IGuUfYL1Goisw6ySaL7QkKJcelRzjuGel+JKMWmfRDNBa0IGhkhBLffO4mnHpxLwG/g9QZwOK04HFZuumtCU4vXwjmMUgLVZ6uhwbgINTseTuSsdx9KiYTOPYhSBMOpuwVVoMIzu6IGP7O4/BSUsqgpwkgGz92IKsw7HKXQPChzUVPNvC1El9tsSDErxW1HleLWE5/mgFYETUCvvh34278vYfHCnRzYV0ivvmlMOL0vDqf+OOrHXmoP3TRQpaFPoXJGWk5VJRA6rgC1Quh2nPKkowbsMqqadGzAMFTbjSMxylyMUh4O6hYhFG8Eyh9xiEizlw0lY3W2oO5x+PEGsAmlDDRNjR55mojkFCfTpg/FMCRff76Z3978GSXFHtLSE7n0JyOZdIauRll3Yi3c5kMNTKH2n9Hi80MJWMerCCyogW43lQNnB5Qd3oUy9xyh0hx1rIZrNad8n/6o1UwZ6h5ZUApiBJHmHh/qfVWXP1TaIp/afRKahkYrguNg3aoDfPXJJvKPlNJ/UEcuumLEcRd8+/DN1cyfVVlV9NjRMt58eTk+r8EZ5w6Mp9itCD/KDBSqj5OOGoxSUXV1YiF8wArFzZtR15pB1bGhonDMInEEKpSzU/D5KpT5yox29ZQjntiAMagVUzHKvNORyLBXUAqupgS69SinthWlMItRZqN2wddJRimdnOC1UlCKVBdYjCdaEdSR2Z9t4pN311UM3EfzSlm9fD+/f/xs+vQ3c5JFp7zMy7yvtkW0p/R6AnzyzlqmnN0fi44iqkYRsBY1sBhUtn4cA/RGhTTWNnhbUY7cVJSZoyPKR1D9vFC4ZWMxCBWWGa6YLMBgml+kt0A5fM2K3YXjRN3vaJ+JRJmOPGHHeFCDvxmFqM//RJqXcmzZNLdvV7OmvMzLx++sq9ITwDAkHrefN19eXufr5RwowmYz/wg8bj/FxZ7jlrV1IlG1fEJtH0PbfMAyKk0xoQqb1uD/8AgigscfQoVBLqNyRls95t5B7RE1BvVfNYRIRkUcZaJmvl1RCq4xlVG8EajVUE0RQuXU7R4atOK25k2CXhHUgayteVFXuLuyjmAYEosl9pC49h0S8fuiVHoU6IqiERQRvTKmRFXaHEulA1KiZv6hqp7VPzwDNfvMQjlvD6N8AgGUuaY70X8ibmAblfbvVGAgajCvDy5aX7eyLihz3vY4XjPWch6aWIjLikAIMU0IsU0IsUMIcb/J/uuFEHlCiLXBvxvD9l0nhMgK/l0XD3kaig2rD+KLMnBbbRai9I2JSof0RAYO7RyxKrA7rCqKyGFmc21LmA3cNVGMGnBCZBFbJFEovLQLyl49FlWyIZoSCKDs+eEJXqFs28ie0xpQDvd4horqpLV4Um9FIISwAi8C56LCIK4RQgw1OfR9KeWo4N8rwXM7AA8BJ6PWxA8JIWozOjYJRsBg8cLoy9ExJ/eI2kGsJm6/dxJ9B3TE4bCSkGjHbrcyfHQ3ftpmexAHUAP4d8AiVB2bguC+FGqOngl3SnpRs/tYTA51jcjJparCCWGg8hg0kYSiimxUDjtWVORWZ+qWXGZBJQZq4kU8TEPjgB1Syl0AQoj3gOmodXptnAPMk1LmB8+dB0wD3o2DXHGlpMSL12u+GhACJp91fMv5pGQnDzx1DjkHCsnLLaFbZiodO9fXvNCSCTVZCQ3OxahmKaNRiqAvVZurhBNyTIJyNlqIrclKXbNcizBXMKGQSI05ycCpqMiuUMRXB5RSLQ1ui/Z5hUJUJcp3EktIr27nGSvxUATdUSEXIfajZvjVuUwIMQllKLxbSrkvyrndzV5ECHETcBNAz5494yC2OXm5xXzwxhrWrz6IzW5h4pS+XHL1SBIT7VHt/1aroFuP9vV63YzuqWR0b+shcUXBPzOT0C5U45UeYc+r40VVwxyBUgq1zfRDDuW6hum6iB4SqVuG1oyVyJIXduAklL+lGKUMilD+m/aoz9yDcvKnoO5/TZSghplCKkN0B9C0iXjNm8aKGvoC6C2lHAHMA16v6wWklC9LKcdKKcd26tSp9hOOg2P5ZTx0zyxWLMnGXe6jpMjD/FnbePIPc7BYBKefPSDCbm+zWxg6shtpHRKjXFVFG+3bc4zSEh0FVDM1OQDD9/VCLUTbVzvGQM0u16EUQQLRZ4M2lE9gHJWJZbGSEeW62mRx/AjU6qAXatU3CjWfHERlbaLO1K4EyqnalEeiViCxNAxqu8RjRXCAqt/+zOC2CqSU4XV1XwH+Enbu6dXOXRQHmY6Lrz/fjNvtJ7xpm99ncCinmHWrDnDVdWMoK/GybPEebHYrfr/B4GFduO2eiabXCwQM3nl1Jd/O34HVasHvD3DyhN5cf9t47Qg2xUH0gduBiurJCT7vSs2x6XmolcE6KmvchB8fSkrrQt2dmM7gtTdVu+ZAdGx7U2CghpKDKPNS9e9FKMT4CC07FLfhiIciWAEMEEL0QX0aVwM/CT9ACJEhpQz9gi9CZZAAzAGeDHMQnw38Pg4yHRcb1+YQ8EcOLh63n62bchk9rge/umsCV113IjkHiujYOZn0TtEqWcIHb6zmu2924PMG8AVtn8uXZGNIyS13myuPtk0oQ7i6nTi0cN1C5Y+8oIbrhMJCXagZfxFKIZgdV73uUKykoQrBhfwFqei0nKYglFtSQM0z/gAqyksrAjPqrQiklH4hxB2oQd0KvCal3CSEeBRYKaWcCfxaCHERahqWD1wfPDdfCPEYSpkAPBpyHDcFqWkJ7M8uiNhut1tITau0/aa0TyClfc22YJ8vwIKvt1dJPgPweQOsXJJNyQ0n6ZLTEVhQJoH1VEblSNSgW71ZS00/eiuVxc9CmcfR/AU+KpVGXRHoUgdNTSG1K4EQNdVyIniNI6jVpBVlAmwbn29cEsqklLOAWdW2/Sns8e+JMtOXUr4GvBYPOerLORcOIWtLHl5P1dBAIQSnTupTp2sVF7qjjj02u5WjR0q1IjAlGTVDL6LSObixDucLKm3KIWoyw0n0TL4lk0/stn83Sumb/e58qFDl8DyQHNR3qRMqSul4JgstA/0LCGPkmO6cd8lQ7HYLTpcNV4Idp8vG7fdOon0UZ7DXG+CHRbsqzEAetw+AlFQXIkqUkd9n0LFzdJOSJjTT7ojyDdTUvtNFZfinFfWDHUVVU4+L6NE8odLOmpaJjdjNetHCiX3Aj5gnA5ahkhKXoZRO60SXmKjGJVePZMo5A9m0LgeHw8qIE7vhdJlnMebllvDY/bNxl/vxuP04XTbe/99qHnjqHLplpjLtoiHM/nxzFfOQw2nl1Ml9SUrWq4HaOYKqKVNTeelEYGQM1zoBFU0SqlNkDf4NqaeMmqalC6pqaSxJgVbMJwR7MU8QDBEqT7IJ5RdqffPn1veO6okRMLBZLZw8sTcnndorqhIAeOm5xRQVevAEm9F73H5KSjy8+NfvALj46pFMu2gITqcNp9OGw2Fl0hn9ufamcY3yXlo2h1E/vLJajjNNOzEhEWVyGgj0DP4fj477b+k4USGmFiqTzsywEL34XW6MryWJXia8ZaNXBEGklMz9Yguff7ABj9uP1WrhjPMGcvnPRmM1KQVdUuRhd9bRyEb0EnJzisnLLaFTl2Qu++loLrpyBAX55aS0d+HUXchioAyVmF5bOYlQL4JYCTkANa2LrqgchDzUiq9D8P8eVMZyIqpEefso59clYqw5NQiKH3pUCjL3iy189PbaCjOO328w/6ttlJf6uP628RHHe32BqD4AUA1nOnZOZsKUvnTv0Z5OXdpy2Yi6YKBMOLFkBY9Elw/QKBxErg5jMRmCMi+FKtTWhlkrzpaPNg2hzEGff7AhItTT6w2weOEuSooiM4LTOiSQkmoeReDzBli2OJvZn23moXtmMfvzWMouaRRHiK0+kKD2cECNJhZ6UruJ0IIyQbXORFCtCIDSUi8ej7mzyGa3cCgn0i4ohOCGO07B4bRGLT9tGBKfN8DHb68lL1fXT4+NWJuUGOiSz5r4YCN6RrgTZU4cg1o5tE60IkA1gLFazG+F3xeImj08bGQGf5wxjdEnZZLeKQmXy9zSFggYLP8hO27ytm5cxPa1tFD/JjAaDSifVDSHsY/4NBxq3mhFANhsFqacOxCHM7Kg3LCRGVELyvn9BnO+2ML6NQcpK/XidpuvKoyAZNHcrOjdyDQo++wOlJO4thWBQCmM9g0sk6ZtUFvby5rCl1sHWhEEueJnozllUh9sdkuwQYyFYSMyuOWe06Ke8+Fba1i+OBu/z6C8zFfj9fOPlDHniy01HtO2OUi1WoVh2KksSBcqKzwa7SjWxIeafE2SttANTUcNBbHZLPzy9lO48toTOZRTRIeOSXRIj16eOBAwWDh7e9RmNdXx+w0Wzsni/EtPiJfIrYxoLSUtqKSvdFTSTyheXKOJFzVNKNKI3UEcQIWwFlDpW2gZZSm0IqhGcoqTNE8i387LIu9QCQOHdeaU03pHJJa5y30EAnWrb+6JYjrSQM3L79A+/XXVNASdqCxvXp1YM8+9qJ4HXtSERqAmNydgnutSiMpzKEEpi16okipNg/5lVWPDmoO8MGMRhiHx+wxW/riXz95dx8N/O69KvaGERAcJiXaKTUJLzbBYBCPGxJoF29rxoZrRHUEtu7ujnHFmWZsSCDnrPagfmK4NpIknfVF1hHxUrkoFdetqtoPKvhdQtSzFRKquYvNRpbNDr+UNHtcP1ZKl8dFr7DD8vgD//Nt3eD0B/D71IXncfgoL3Lz5yooqx1osgkuuGRnhYHY4rWR0T6mSQWy1ChIS7Vxy9YiGfxPNHi+wHMhGzYaOoRzEDsy/jhJVffRHYCmwBFW1vLQxhNW0CRyovhX9ULP3DGAssZcvAWUSipaQVhD2WKLaaFa3JhhAFmpl0vid1PSKIIysrXkYJp+BYUjWLNuHlBIRljRwxrmDEELwybtrKSvxYndYOev8wVx89QiWfZ/N3K+2UFrsZfiJ3bjwshPo0FFXHFXLYR9VfzQGapbUD/VjqE71VVcJKvt4PG3BkadpDGyo2fjxzshrykqu3hmvpvyX7ah+3GNoTP+CVgRh+P1G9OSwKJ/z1GkDmXLOANxuP06HFUuwLtGEKX2ZMKVvA0nakjmC+Y9GAoeI3hS+OgZq9tQzfqJpNMdNGuZlqiVVw5xrM8IYqInSFlRkXOOgTUNhDBzaGSMQOQgJAcNGdK2yGqi6X5CQYK9QAprjQaIa1Mda1Mug5mb3Gk1jMoDI3ggWoD+R8+3aVrES5UxuvOASPXKF4XBYmXLOACxhxeRCeQXX/kqXjq4ZHyoPYA/K7m82oIdmO/HAQqUTWaNpahJRfoYeqMJ0nVANkqr7GTYS+2+g8XwFcTENCSGmAc+jAm5fkVLOqLb/N8CNKBWXB/xSSpkd3BdAudAB9kopL4qHTHVFSsm/n1nMmhX7MYJ2ICEgtb2LB5+apu37NXKUynaSBmqQTgFGUDUGu+A4rh3NVCRQ3cg0msbGjTJjelAmoY6o77wT5eeKRjnqNxDLqtdJY/q/6r0iEEJYgReBc4GhwDVCiKHVDlsDjJVSjgA+Av4Stq9cSjkq+NckSgBgy4ZDrFmxv0qsv5RQUuRly8ZYG1e0RQIoJWBQOYMxUKGge6sd66Xu2cC9UbHYoagiC6pS5Ch0GKmm8TmCalu5B5UNvxUVxRbLLL+M2L7/oUqnjZc5Hw/T0Dhgh5Ryl5TSC7wHTA8/QEq5UEoZajX1I00VLFsDP36/xzThy+Pxs2TRrhrPlVJWrCLaHkcx/8IaqB/LquAxoPoQ1+U+dUApgk7AqaiQvpOAk2mtdeE1zZkAKt7foPJ7HEDN9HfHcH4C0b//Iri/EypiKBVlPDlM/Myp0YmHaag7sC/s+X7ULzUaNwCzw567hBArUWajGVLKz8xOEkLcBNwE0LNn/CNFLBYR1QoRzUlcXublnddWsfTb3fj8Afr0S+fam06i38BOcZev+RKg5sG9CLViGIiKz+6E+oLXZP8UVLYWDN+mzXOapiQf80mPRFUvHVjL+YmoAb6AyN+MhcoM+kLUBCr0WhLoQ0NGyDWqs1gI8TPUtO6vYZt7SSnHAj8BnhNCmBrZpJQvSynHSinHduoU/4F2/KQ+OB2RetHptDFxamQYqJSSpx6cx5JFu/D5AiBh946jzPjjPPbtaUsNU9rHcIyByrw0UCn7vWs41oWaW4xD/XA0muZCTZOXWB27oZIToclOiACVtYpCCWehbQZqxdFw40o8FMEBlKs8RCYmZSSFEGcCDwAXSSkrMoSklAeC/3cBi2jM4NkwBg3tzLiJvXCG9RRwumwMGtaZcaf2ijh+8/pDHDpYhN9f9Qvg8wb47P31DS5v8yEBNdOv7askUUtogRrgoxXysqFWAi2jWJemLZFG9AE/1t7ZNmA4MAG1goh1CDZQxpaGIR6moRXAACFEH5QCuBo1u69ACDEaeAmYJqU8HLY9DSiTUnqEEB1RdyfckdxohDqOnTKpDz8s2kXAbzD+tN6MHJtZJZw0xJ6d+ab9BaSEnduPNIbIzYgBqCXvXlTWrxkGlV+38Jos1YmtdlNLxVNQwoa/vMeudxcghKDftWcy/N6r8ZWUs+HP77Lvyx+xpyQy5PaLGXD9OYgoDZM0TYEDVZdoN1VrElmpOVrIDDsq+qguIaIN99uotyKQUvqFEHcAc1B35DUp5SYhxKPASinlTJQpKBn4MGhvD4WJDgFeEkKEYg5nSCmbrMGvEIJhIzMYNjKj1mM7pCdit1sJBCIdzDWVr26dCFQbvy7AWiKXsALVCtAZfJ5M9IiI1usHKD9SyCeDrsN7rDIRbsNf3mf3B4vw5BfjKyjF8Knv07K7/sHBb1Zx+tsPNpW4GlN6or7L+6kMH+1BbBFsHpQ7NR+lCBJQw14sykAEX6thEFK2vGiXsWPHypUrVzapDB6Pn7tv+JjSkqrlkx1OK7fcPZEx49tq6QMvqg5QqByvBfWlH02luUeinGElmDvNMlHOsdYzG5aGwYcDrqV096GIfcJmAQmyWllzi9PO+YtfoOOY2pyQleRv2MXy3/yT3O83YE1w0v/nZzPmyRuwJ9XWnF3TsLiBUExM6Dsf7gyuTvXIFRsqBqd+IdNCiFVBn2wVWs8vrZFxOm387tGzSOuQgMtlU13NHFYuumJ4G1YCoL6oJ6OcYv2AYajicOE2f4HKA+hM5MogZAs1Kz7XMvAcK2bn2/PZ8cZcynNV/ZmD81dTtvew6fHSb0QoAQDD42PR1Y9VrBJqo3D7Pr6a8GtyvlmD4fXjKyxl28tf8vUZv6UlTvhaF7uILLYYemxBGVOswcfdUX43W/B5J1SMTcPlzeiic/WgV98OPPPKZezafoTych99B3QkKVknOanBvUPwLxyJWgUUoBJzyoheiiIHZY+tKbvSQJmhDFT0UtNXIs16Yw5Lb3kOYVPOcOkPMPrR6/EWlJoO9rVRuu8wO9/5hgHXnVPrsesef4tAeVU7suHxUbA5m5yFa+k2tUniMDSAeUE6UL+VfqhBPoAy/4QmTYMaQS6FXhHUE4tF0H9wJ4aP7qaVQI2UoPoJrEaFkhZQc1cyCyrKKBrHgMWoBJ8twA+oBLamo3DbPpbe+jwBtxd/STn+knICbi9rHn4dX0kZWGNteViJ4fWz4/W5MR2bu3iDqbIJlHvI+7HJXG8aIPpQK1Dz8c40ZWvLNrciCAQMsrbk4fX6GTCkMwkJxz+LlFKy/IdsZn22maJj5fQf3IlLrh5Jtx6pcZS4NRBAVRmpSzVFSaVzuTo+YD2RTrZslCMv1lC++LL91VmmZpxAuZcjq7ZDILb+1pFUXTVJwyBn4VqKdx6k/bDedD51mKqA2zWdkj2R5VCsLgcJXTsgpcRf6kbYLPiKynB2SMFiq7ty0sSKD2USOoz6DZghacoWlSHalCLYtimXF2Yswu+XCAEBv8FV14/hzPOObwn20VtrmPflNjwe9eNfsXQv61Yd4IEnz6FX3+pmkbZMbZnE1QlFSERTBNFqPxmoqIymUQRlh/KRfpMfvJQUbIilBEEktiQX/a89u+J56f48Zk/5DeW5x5CGgRCC1IE9OGf+Xznhniv4/vo/4y+t2vhEWAQWu40Pel9D2f48kCCsFmxJLkb8/icMv+/qqNnzmuMlgHIORwuVFsG/ITSHYbjNmIaKi9w8/dgCSoq9uMt9lJf58HoDvP/6KrYeR1G5ooJyvp65pUIJAEhD4nH7eee1po1oan7UNV7aiqpfGA1PDddrujyEzLPHYks2j86pPjjHgi3JRfqYgfT72ZkV2xZc8Qglew4ps1OZB3+pm/yNu/jmkj/h7JTKkF9fgtXlwJ6SiL1dIo4O7RjxwM9YcuuzlO3LqxiTZMDAV1TGqj+8yvuZV7L5hU8IeGoy1WnqxmGU6bO6EhAof1ZfVBBF58YVKwptRhEs/Xa3aWE4ryfA7M831fl627fkYYuyrM7amlfn67VukomeSWyGgXIkRyM1yvVCP7Kmofu0cVhMypRAZGhoTdjaJZBxxomc9LdbmDb/b1js6pol2bkcW7cz4lrSFyD3u/XMOeteNjz1LkYggC3ZRfpJg5j89gPseH0ugbIoClJKynPyWfG7l/l0+I0cXbMj4hB3XgH5G3bhL6u7Mmu75GM+WZGo72lPoq94G5+mX5M0EkfySvF5ze10Rw7XrRH6kcMlfDd/Bx63eVVAh0PbXauSjvrSlxNb9dFQh6bqFUZzUQ7hkBO5eqy1BYgsB9LQHNu0h++ufZL8DbvhOCKDwrE47AiLhbwfN5O7eAM73pjL6e88SHKvLriPFmJx2Ai4zWfuMpjpLn0Byg/mU34wn7wfN0dEEplheHwU7zjAl6fcQZeJJ3DG548h/QG++/kMDsxdidVhxwgEOOG3VzL6oeu0KalWahrkm19QSZtRBP0HdeJbVxbuaqWmrVbBoKGxL88OHSji4Xtn4fH4MQvNttstnHZGXdPNWzsCOBFVTMs8lj7y+OpO/H0ox1v1gdaCUgZpqLaADRd1IaVk55vzWPvYm5Tuz6Nd3270vHgCG2a8i+mXoY4k9exCeW4+vsLKicmR5Vv56rRfc8XOt2k/tDeyjuXOo64EomB4feT+sJEf7/w7Jdm5HF6yCcPjw/CoSc+mpz/EmdaOYXddVqfrtj0yUBV3zL6vza4Kf9sxDZ04LpPUtASs1foK2x1Wzr14WMzXee/1VbjLfaa9jR1OK736duDyn+l47UjsqOSyWJzoApVEEyJUfdFstm0FJgMjacjyFNIwmH/RA3z/iz9TvPMghsdH4ZZsNjz1TlyUAIC/tLxiwK143YCBr7CMvV8sxeZycOLjv8CW2LAmBcPjY9c7C8j7cXOEPP5SN+ufeqdBX791kERlUbnwZLG+NMdeGm1GEdjsVv7452mMm9ALm82CEDD4hC78ccY0OnVJjvk6m9blmP7ubTYL06YP5cEZ03A628xC6zjoTO1fOwNVsyg0m63JXxCgMRzEi2/8G/u/Wla3vjp1JJpD2V/mpmi7qjw57NeXcdrr95M2oi8Wlx1MCiLGA2kEKnwT1XEfLsA47lDYtkQGqqHSoODfqVQt1Nx8aFMjVrsUF7f8ZiI33z0BiN5wpibsDiteT+SPwGa30qd/urad1kpnlJmnnOiRPxLV0GYdqiOZnegjsKShv8al+/PY9c439b+QRUAU044tyUXKwEzyTZy1tkQnKQMrzQm9L5tE78sm4S0s4eOB1+E+UljnVYmw2xAWETHjD+FMT61iogonISMdy3Ekx7VN7KhijM2bNrMiCEcIcdwD9mlT+2G3m902yQmjdDP12rGi/AW9UH0JaupNUA4Uoxxv0ZbTFlSERsNN1fOWb40aDRQLwm5l8nsPkj6qP7YkFyL8WkJgS3LR69LTOPm527FWM/sIqwVH+2R6XnhKxHUdqclcsOxFup89tm4rAyHIPHccFy57kS6TR0bM/K2JTsY/fzsZU0djdVX11dgSXYx++Oexv5amRaCrj9YRj9vHUw/OI2d/IW63H4fDihCCu/5wekzlq1s3oRpBB4OPO6GWwrVlby8i+kDeF6U0QlVN3SbHWoKvE9lJLh4c+n49c8+9v06OV2G3YrXbsKcmMeHle+hx/ngA8tfvpCQ7FywWDn+/HsMXoNdlk+h8ylCEEOz55HuW3v48/pJyDH+ATicNYtLbD5Dco+aABsPnx1dSzsH5q8ldvIFjG3YhDYn3WAmF2/ZheCtn/laXgwuX/5O0E/ogpSTrtdmse/JtynPySRmQyZgnb6DH+ePxl7lZcsuz7Pnou4qktJEP/Zxhd12mV75xpRyVdV/TpCg+RKs+qhXBcWAYkk3rcsjacpiU9gmMn9ib5JTmExPcNEhgA5VF4EA5fR1UmnfMCADLUQO8GYmoaqbFKEUQzZxkAU6hIULzfGVu3k69MKZcgKSenZn68cMkZqTjL/fSrk/XOjeXkYZB8e5D2NslkNC5fjXot73yFUtve75KxrPFZWfiq/fS75ozossgJTkL1rDrvYUYPj/dzxlL78smYXU0fWG/1oMb9Zspo7IKb18aMqoomiJoUz6CeGGxCIaP7sbw0doUVEkhVZUAKOXgQ4XR9a62vRAVDlpEzWadcpSy2EzN2ckieM1QtJFEFbYLmZY6cryzrc3PfYw0alcCtqQELtv+Rr0HS2GxkNKv/t8twx9g5e/+E1H2wnD7WHbXi/S58nRTW7+Uku9+/hR7P/uhwoGd/fF37J+1jElv/F6vBo6bMtRvwY0ydR4gMtBhJ+r7Gv++7DWhFUGcOZpXyrv/W8Xa5fsxDIN2qS5GjunOeZcMo2u35hc2Fj+OYD5QG6haQ73Dnq9HDdKxrkbdRF8xhBP6OvtRUUdlVDbH2Y5qjhN7hBhAwOtj3ZNv1yqqxWFj1EPXNqsZc/HOg1F7GQTK3JTuPUy7PpHmzANzVlRRAqAimnZ/sIhuZ46JqSS2pjqHUVVyQ7+Ro0QvwZ5NYyuCNuksbihKSzw8/NtZrFySjc8XIBCQFOSX8+28HTz4f1+wdkXDNZ9uemxEbz8ZPuvcjZq5x6oE2hPb19SCKj0Bqsx1CWolIYP//SgFVDdT6Oo//rfCNyD6dYWRvfG6EvBbbRjCggQkgs4TT+CEe66s07UbGntqUlRFYAQM7CnmLVV3vDHXNJRV+gL8cOPf2P7a7LjK2foJAFuJXC1Ho6by6w1DXFYEQohpwPOoX/wrUsoZ1fY7gTeAMShVeJWUck9w3++BG1B369dSyjnxkKkpWDQ3C3e5zzSSz+c1eOm5xfz99Sux2Vqj/u2CmsmYOXK7hz3PIfYerTZUUk5xLcdagBFUZhnnmsgBShkUU5eEnm0vfYHzlP5sHDmJQ7mGqiTdC3X9sJdI9xTRdcV+kpKd9OjdnoTEpi8jkNi1A53GDebwkk1V/BvCZqXraSNwpZuXSzfMKqgGkQGDH+94gcxpJ5HYrenLJ7cMCut4fOP37a73iCSEsAIvAueiSkZeI4SoXjryBuCYlLI/8Czw5+C5Q4GrUSmn04B/Bq/XItm8/hDeKPWMAAwDdm5rrQXpElCdlixUrgwsKNt8eMRLXRKR/FQ2njFTHgI1Kk+kcnCXUY4NvybBY3zBv/2oGdtuVCiqG8hByo2c/vkVLOk3iQMHjWrtBASIyr+jzhSee3IRTz/2DXde/xGfvreuWbSHPP3dB0nu3RV7uwQsLge2dgm069eNSW/eb3q8v8xN8c6DtV53z0ffxVtUDaB+M30a/VXjsSIYB+yQUu4CEEK8B0xHefdCTAceDj7+CPiHUB6n6cB7UkoPsFsIsSN4vaVxkKvRSe+UhMUiTKuctg0yUQP/YdRA24HI2XcqyqlcG6F7WBJlfwpq3hFe9lkN4OprbWYSkajZ1lYqVw3RPyshINvogM9bULu4QQequ1y97lefbKRjpyROO6N/7ec2IIndOnLZttc5OH81hdv3035wDzKmjo4aybT0jhco2Jxd4zUNfwB/tVDa3MUbWHHfSxxdswNnWjuG3HkJw++7SieeAZUmSzPsVH5XHcAAVN2sxiUeiqA7KlU0xH5UvJ/pMVJKvxCiEFWSsjvwY7Vzu2OCEOIm4CaAnj2bZ3P4M84dxNJvd0ddFVgs0G9Q4zqBGh8XqsRuNPqjwkDrW6IgQFUlkI8KxYs2uIcqk25FKaLYlPWhg37c7rordp/X4NV/LOX7BTuZOLUf4yf2xuG0EQgY+P1Go5YhERYL3c8eqxLPasBf7mHXuwuiZhuHsDhsdJ92UsXzrNfn8MNNT1dUPy0/lM+6J96iYPMeJr/5h/q/gRaPFRhM1ZVtqAbRGCr7FduJ7mdrWFpM1JCU8mXgZVB5BE0sTgXbtxxm0dwsSku8jDm5Bz+5YQxvv7IKn69yoBNClaa4+e6JrdQ/UBeSUV/+kNM41K9VAnuJ3ZkrUElmobrvO6jZJBQqXFc3uve043KJ41IGUsK2TYfZsTWPLz7cSK++HVizfB+GIenarR3X3jSOoSOaTxKit7C01tBQW5KLXpdMJH1Uf6SULLnlWba/MiuixEWgzEP2x99T/GiOaWRS26MzajUaCh9NBbqhBv9yVNhoPkpBZKCi7BpvNRUPRXCAqpWUMoPbzI7ZL4Swoe7C0RjPbbZ89t46vvp0Ez5vAClhy/pDpHdO4q8vXczm9YfYvC6HwkI33TJTmTptYCsPH60LScAJ1bYdQy0IY1kpWFCrgXALYv36AERjzPgE3v1vAR6PPO4io4GA5PChYvJyiyuucXB/Ec8+vpD7Hz+bfgObh9PV1SkVW7LLvN+BEHQaN5ghd1xM32umApD9yfeqBlOUGyPsVvKWb9WKoIJQRdJw3KiWliHzUAD1OziGmjA1zgohHtPTFcAAIUQfIYQD5fydWe2YmcB1wceXAwuk8qTNBK4WQjiFEH1QBrLlcZCpwTl8qJgvP96E1xOo+B14PH7yDpWwcM52Jpzel1/dNYHf/ukMfvLLsVoJ1Ep7VBZx9S++qLbNgvpBhfIWQn8Ng90u+OOMzgwY7KC+eVTVx0uvN8Cn766r30XjiMVqZcyTN0bUO7IlOhnxwE+4YOk/6PfTMyv8C1v/PbPWFpwJXRrf3t2y2Evk5CfUoS+/0aSo94ogaPO/A5iDWsu8JqXcJIR4FFgppZwJvAq8GXQG56OUBcHjPkA5lv3A7VLKFlHfdu2K/UgTM4bPF2Dpt7u59JpRFduklPywaBczP9zAsaNlZHRP5fKfjWLEiabukDaKAEahTDy5qB9DMmpuYAMOoX4wHVFKoLaQ0vjRsbONB57qTHFhgJJSg7LiATz92HJKS+rf43f7llga9TQeg248H1uii9V/fI2S7FwSunRgxO+vYcjtF3P4x81se/lLPEeL6HnRqXiPRXPkKxypyXSdNKKRJG+pRPNXBVCm0/RGkSIuPgIp5SxgVrVtfwp77AauiHLuE8AT8ZCjsYk+Qay658uPNjLzow0V5auzd+Xz9xnf8qv/m8C4Uxu/tWLzxYZyqg0KPg+/j+HRNzmNJlElgnapXWmX2hdI5NZ7nLwwYxF+n1GvKDGvx4/X48fRjHpY9PvJGfT7SdU6ROueeIt1T71DoNwLwTpE1gQnVpcjauvMXpdOpN7LqFZPtEx0Sw374k9b91weN6PH9cBMFdjtViacXhkH7C73VVECIbzeAO+8urJZxJo3P6qbg6qTRuN9da0ox91klF9DZeMOH92Nh/92HhOn9qPvwHROPLnHcRUetNksHNhX14SjxqUkO5d1T7ytMqyD31d/qRtvUSkWpx2L03zAynplNuv//C4A5YePcWDuCrJen8POt+dTuG2f6Tltjx5E/y43Xh+D5jMNaeaUl/uw2SzY7cqT36lLMtOvHM7MjzZUOIudLhudOiczbXplPt2BfQXB9piRFq/iQjelxV5dubTOdEXZVj3ULcqoLkrXgspzTCNa9Eb3Hu254Y5TKC5y88BdX1BeWnPYpRk+n4HD2bxj7fd+YZ7WI71+HF3S6HX9OWx+4ZOI2+svc7N+xrsU7zrEjjfmqPBSKREWgbDb6H7OSUz54E/Nqj5T49MRFSOzj0qFIFGTjsbLTteKoBa2bDjE6y8tIzenGCEEo8dl8otbxpOc4uTCK4YzdGRXFs3dQWmJhxNP7sG4Cb1xOCp/2MntXAT8UZyZApwJ+iOoO1ZgLCocNOQ7qIl2qFn9LpQTriaFEFqN9EH9SGtn7pdbKSvxEoihTLUZTz04j7/8azqJzaAsRV2RAvpcNYWs/80x7WjmL3Wz8615SG9lgp80JNLjY//s5ax64FXG/fWWxhS5mSFQGfmZKH+BBeUXaNzJgTYN1cDuHUd55vEF5OwvwghIAn6DNcv388Qf5lTYhfsN7MQNd5zCr+8/nYlT+lVRAgBdMtrRvWd7LNU6SNnsFsad2qtihaGpK3ZUKN6p1Pw1DsVld0T1RRiNGuTTg9ewo+K5R6ASznoHj4s9aXHtiv34fMcfuVRc6OaBO7/A6zEvENfU9LzolKi9GLz5xSR27xg9CS1gYETxIUifn03PfEjx7qbw+TQ3nKiVbmcaWwmAVgQ18tn76yOyhAN+g/wjpWxcW3s9lhC//v3pdOqSjMtlw+my4XBa6dMvnZ/fUj0BW1N3bFTWOKqOQK0GMsKep6IG+xGoGkUTUc7pdFRTkN6E/ACxkphU/5n8sfwyFs7NqnhuBAyKCsrx+5o+iC65ZxdcXdqb7vOXuFl4+UNknjcOq6vqfbAkxHBfJHz70yfjIKWmPmi7RA1k78o3tSJ4fQH27SmIOfyzQ3oif/7ndLZtPsyR3BIye7Wnd7/GCQtrG2SiBvz9qNpEAlXqoitqJdCw852zzh/M7qyjeOoxo5cSFs/fwdkXDGbeV1v57L31eD0BhIBJZ/bnml+MwdaEq8ea8gXy1+9m5IPXYrHb2Pv5EiwOG4YvwKCbL2DvzCWU7Kp5xn90TRblh4/Vuxub5vjRiqAGOnVO4tjRsojtDruVjp3rVipWCMHgYV1gWBcOHSji6Ue/YePaHKxWwdhTevGTX44hpX1C7RfSRCGVmot7NRxjxvdg4hn9+G7+DpASi0UQMCTSkAQCsTuo92YX8J8XlrBiSXaVKLPv5u+gtNTLLXdPbAjxY6Jd324cXbXddJ/h8XFg7kpOf/ePuI8WUnbgCO36ZGBvl0jnU4fx/fV/rrHfs7BYVFiqpsnQpqEauODy4ZERHcG6QSp8tO4U5JfxyH2z2LDmIIYh8fkMlv+wh0fum11jCWtN80UIwc9vGsfjz17AldedyE9uOIlnX7mM088eUOeIoB8W7jINNV6xJJuCY43fsCTE6IevA0uUkF6LwFtQyvqn3mHXuwtJ6JKGvZ0yr/W5fDITX72XhB7RHe+ujikk9ewcdb+m4dHN62th/ldb+eCNNVisAiMgad8hgf/7wxS69ajb7FNKydLvdvPuf1dRVBC5zHa6bPz85nFMnNIvXqJrmhgpJd/N38HXM7dQXORhwOCOrF1x4LgS0BIS7fzfH6Yw+ITGiy2vzpLbn2fbv6pXjwEsFqxOG4bXj8WhjAynvX4/fS6fXHGIlJI9H3/P99fPqEhKwyKwuhxM/ehhMqeNa6y30aaJ1rxeK4IY8Hj8ZO/KJzHRTvee7Y+refdb/1nBd/N31GhHnnxWf355+yn1EVXTzHn28QWsXVn3uop2u5Wn/nERnbrUredyPAk1tc/+5HsCHl+w5pAEYUFWa4lpTXByZfa7uDpWnTDlrdjK+hnvUrA5mw4j+jLi/mtIHz2gEd9FS0RS2TApCWUCPb6M7WiKQPsIYsDptDFwyPEvXY8cLmHR3O01hhja7JY6+x00LY/rbh3Plts/x+M2nxCohmdVmxvZ7BYGDevcpEoAlFyT3/wDeb/eyt6ZS7A67ex4Yy7FO0wi6ATs+fg7Bt98YZXNnU4azBkfP9JIErcG3Kj+HX4qI1cSUXW54peIp30EjcDWjblYrDXfaotFMHFq03az0sQHj8fPd9/s4J3XVrJobhbl5ZUx9h3SE3n21cvI7NXe/GShnM92u5WERDt2u4Whw7ty+72TGkf4GOh00mDGPPZLRj14LQGvuUILuL34ipvOp9F62IDKoA9QWWm3FNgW11fRK4JGwJVor9GclJBo5/Z7J9EhvW7x65rmR25OMY/dPxuvJ4DH7cfptPHBG6t58KlpFX6lpCQHTzx/IauX7eXvf/6uqs9AQtaWPGa8eBGFBW7S0hOb9fcidWAmZXtNKqgakg4jtb+rfpQF/6ojgTyUcohPSLFeETQCI0Z3wywhwWqzcMrkPvzj9SsYPrpb4wumiTsvPbuYkiJPhenH4/FTWuLlgbu+4OXnf+DI4crSzTkHirBW61gnJZSVeVmzYj/9BnZs1koAwHMkSsE8i6BgY907wmnC8VOzL+BI3F5JK4JGwOG08X9/mIIzmFlssQicThsDBqvyFE2ZKKSJH0WFbrJ35Zs27DIMyZJFu/jj3V9xNE/V5NmyIRefSciw1xNgy4ZDDS1uXIhWghoJ3qLI2kOaupBEzU2XdlG3QorR0aahRmLI8K489+plrFiSTUmxhwGDOzNgSKdaI5D27jnGh2+uJmtzHolJdqaeO4gzzx+Ey9WWKzY2T/y+ACJarD2Vs/1/PfM93TJTKSv1IkRk5zKrVdCpc9M6hmOlxwWnULwrB6Oar8CW6KTbmWOaSKrWghVVBytalFnId1D/YVwrgkYgEDBYu2I/27ccJjUtgQlT+tE+rfYs4r17jvH4/V+rkFOpSmF/+OYaPnxzDYOGdeEXt51MRvemyabVRJKWnkhqexdHDtcwEw76ALK25GGzCdPVg9VmYco51XvbNk9O+O2V7HhjLt5jJRjBEFJropOuk0fQ+dRhTSxda6A7cBDzmb8gXkYdnUfQwJSXeXn893PIyy3B4/Zjt1sRFrjzd5NrrVX0zGMLWLf6QNTVX2KSgz//czopqa4GkFxzPGxal8NzTy6s6FERK06XFSGUYrjx16e2qM51ZYfy2TDjXbI//wFboovBt1zA4FunY7Fpk2d8WI6KFApHoBrXDKnTlXRCWRPx5n+Ws2hOFv5qPQlcLht/f/2KGlsU3vKT9ygvi97sxG63cuHlJzD9Kt0XtjmxP/sY77+xmvWrYq9Qa7UKEpMc/OK28YwZH3sJbE1boBxYC/ionBW2Q1XQrZtRJ5oiqNe6QgjRQQgxTwiRFfwfUT5QCDFKCLFUCLFJCLFeCHFV2L7/CSF2CyHWBv9G1Uee5siSRbsjlAAAAjauq7kqY1JyzWV8fb4AWVvz6iOepgHI7JXGtb8ah80W+88rEJAUF3n49zOL2bk9ftEgDUHB1r3sn72M4j0tw6Hd8kkAxqO6lg1A9dQ4kXha9ut7pfuBb6SUM4QQ9wef/67aMWXAz6WUWUKIbsAqIcQcKWVBcP+9UsqP6ilHs8Xvj15IzixiJJyzzh/Mx++sjShCFsJiFXTp1o7Vy/cx88MNHD1cSmav9lz6k5EMGKwyoaWUbF5/iB8W7iQQkJx8Wm9Gjc2MaJSjqRkpJds2H2brxlySkhyMm9iL1BqqxXbqkkxqWkJFhFCseH0BZn6wnrsfnFpfkeOOJ7+Iby7+I0dWZalS0x4f3aedxOR3HsTmannd1VoWAujQYFevryKYDpwefPw6sIhqikBKuT3s8UEhxGGgE1BQz9duEZwwshtrVuyLsBf7/QZDhnet8dyzLxjM7h1HWfnjXvwm5SlsNgsJiXb+9fT3Fcpi8/pD7Niax533T2b46G68+velLF+SXRHXvmbFfgYN7czdD0ypNdtZo/D7Ajz92AJ2bj8S9PNYeP+N1dzym4mMjWLGEULwq1+fyrOPL8TnC8ReaE7C/uyC+AkfRxZd/Th5y7dieP0EylVZ6QNfr2D53S9y6r/urnJsyd5cNvz1fQ4tXEtit44Mu/syMs/VjZiaK/UdCbpIKUP2jUMo70VUhBDjUB2Zd4ZtfiJoMnpWCBG1i7sQ4iYhxEohxMq8vJZjDrn6F2NISHRUSRxyOK1cfNWIWp28FquFW+85jcefu4BzLhpSkYfgSrCTmOTgprsmMPeLraZli994aTlbN+ay/IfsKnVtPG4/2zYfZvmS7Pi+0RaKETDMTXdhzPliC1lb8yruo89n4PMG+PfT3/PFRxuZ++UW05n/kOFdefTZ8zn97AH0GZCOzR7bz61r95S6v5EGpvRAHrmL10eEiQbcXna8MRd/WD5B4fZ9fDbyV2x7+SsKNmdzcP4qFl75KOuefLuxxdbESK0rAiHEfFSrp+o8EP5ESimFEFGnPUKIDOBN4DopZeiX93uUAnEAL6NWE4+anS+lfDl4DGPHjm0xHu4uGe144oULmf3ZJjavP0RaeiLnXDikTpnEGd1TmXbREISA9asO4vMFSEl1sWbF/qh5h/lHyvh+wU7Taqcet5/FC3cx/rQ+x/muWj5FBeW8/vJy1izbh2FI+g7oyHW3nEyvvpHL74VzskzNeD6fwSfvrsVqEXzw+houvmYEF1x6QpVjOnZKYtjIDDp2TuK0M/qx+JudZO8+BlIipTI5ha8WHU4rF10xPO7vt76UHTyKxWEn4DYJXpDgKyzB6lQuwhX3voSvqKxKgoS/1M3aR99k0E0XRFQk1TQ9tSoCKeWZ0fYJIXKFEBlSypzgQG9SdASEECnAV8ADUsofw64dWk14hBD/BX5bJ+lbCB3SE/npDScd9/l7d+fzxB/m4PMGKjpe5eWWsHvH0agmBykly77fE/2iLTBaLF74fQEe/d3X5B8prbifO7cf4Yk/zOGxZy+gS0a7KsfXVDrcCEiM4DU+f389w0Zk0Ke/akOal1vM47+fg7vch9cbwOGw0i7FxePPXkBKqovSUi8v/vU7Du4vxGIR2GwWrr1pHIOGNV3PgWikDuoRsRoIYU1wsPTOv7Nv5hKkIZGGYfr9Mrw+vrnkT5y78BkdWtrMqK+PYCZwHTAj+P/z6gcIIRzAp8Ab1Z3CYUpEABcDG+spT4vgaF4pi+ZlkXuwiP6DOjFxar8aG6C/9uKPuMsjf4TRlICwgETi95vvdzptnHp6X3WNgMHsz7cw94stlJR46Nk7jauuH6PaarZSVi3bR1GhO6KNpM8bYNanG/nFbVV7Qowa053FC3fVauf3+Qy+nZdVoQj+8ZfvKCxwI4Pnucv9eD2lvPXKcu575CySU5w8+sz5HDlcgtvtJ6N7CtZm6rdxpCQx5M5L2Pri5/jLKhsrWRIcIGHvZz8gawiMCHFk5TbWz3iHUQ9e25DiaupIfb91M4CzhBBZwJnB5wghxgohXgkecyUwCbjeJEz0bSHEBlSt1Y7A4/WUp9mzaV0Ov79jJrM+2cSyxdl8+NYa7rv1M/Jyi02PLy/3kb07v9brOl1WbHYLTpcNaYCMZvYWkJhsp0dvtYx/9cUf+ez9dRQcK8fvM9iVdZSnH/mGrRtzj/ctkn+0jLUr9wfr7jS/lceurCOm/QAMQ7Jja2To5sVXjyQxyV7rIC0NSWmJspUfzSvlwL7CCiUQ/hrbNh2mtKSyh2/Hzslk9mzfbJVAiLFP3cioh3+Os6PyYSRmdqTPVVMwfP6YlACo/sabX/i0IcXUHAf1WhFIKY8CZ5hsXwncGHz8FvBWlPObX4xcA2IEDP719PdVTA1eTwCfz+C///yR+x45K+KcWKI8rTYL1908ntISD3aHlff+uwp3lMYnSCg85ubR+2ZzzS/GsOz73RENc7zeAO/+dxWPPH1end5fIGCoKKUf9mCzWzECkk5dk7nnT2c0qyqaHTsn43BaTcNyzZq/pHdK4srrTuTDN9ZQXBS9CbvTaWPUSZmAyiiPGqIrlJ8mKTlqbESzRFgsDLltOj0vOhVXlzScqcl8+/On8JdGtl6tCW9BSe0HaRqV5j0FaWXs2nEUny9y8JGGZMuGXPwm+5wuOwMGd6am2nQ9erVnwpS+nH3hEEaflBlh8qiOYUi8ngDvvLYKi9X8wvv21L4Kqc7n769nxZJsfD6D8jIfHo+fg/sKefrRb2K+RmFBOSXF0QfbeHDKpD5YTG6ow2nl3IuHVtl2LL+MJx+Yy2v/+LFGJQDQoVMi4yao0hBdu6dGTSjz+w02ro0967g5YAQCLP/tv3in06XMHHsr72dcwZJbnyOpR6eKPsURRFGEacPbbpBCc0UrgkZE2Zijj+jRTNA33HEKSclO7NXKVYfyCH5114SKbe07JDL4hC4xZbUKQYWjszoJNfgsojHvq214q0XXGIYk71AJe2sxb2VtPczvbv+c3/zqE379i4949L7ZHDpYVGcZ3G4fn72/jntu+oSbrnqXh387i03VMriT2zm595EzSW3vwpVgIyHRjtNp46c3nFTFUZt/tIwH7/qSbZtqN5PZ7BauvHZ0xWdks1n46Y0nYXeYfA4S3vzPihZTahpg1R9eZeu/vyBQ7sFfUl4RNlqy6xDCxPFrTXSSee44rIlVVz3WBCcn/fWWKttU5FTzMyG2JXT10Uakb/90LGbjs4B+gzricJhHUnTJaMdf/30xixfuYsvGQ5SVeHE4bQwa2pnJZ/WnXUrVfIRb75nIc08uYs+Oo1htlqj1iqxWC1anwOerWlPebrdw1vmD6/TeDENSVmpem95iFRw7Wk7PKBPB3Jwi/vrQN1VMZruyjvDY/V/zt5cuISGh9pLbUko+/2ADMz9YX2VFtHvHUZ57YiG3/vY0ThzXo2J7/0GdeO61y9mVdQSfN0DfgR1xVqv79Pn766vY8mvCIgQZmVXDIiec3pec/YV8+cmmCF+B1xPgi4821ppU2Bzwu71sffEzAmVV70Wg3MPez39g4uu/Y8mvniFUB0f6A5zyz7vo+9Mz2fCX99n8/Cd4jxWRNrwvY/9yM92mjgbg2KY9/HjnCxz6bj0Wm5Vel57G+OfvwNWpfSO/Q41WBI2IzW7lV7+ewL+e+R6/z8AwJDa7Bbvdyi9uG1/juYlJDs6+YDBnX1D7AJ2U7OSBJ8/h4P5CDh8qZvZnm9m++XBE1IuUEmlErlCcCXbOvzT2EsJ+v8GCr7djtVkImCRnlZf5WPljNgOGdDKNjpr92eYIk5mU4PMEWPrtLqZOG1SrDAtmb+OrjzeamsW83gBvv7KS0SdlVun/YLEI+g/qFPWaa1bsjynK1mqz0KtfB9OS4HaHNWqobm5O3Vc8TYE79xjRbJMWh432g3tyTe5H5H6/AcMfoMtpwzmyYhsLr3yU8px8htw+ncG3XYQrvfL+lGTn8tWEO1W+AWB4/ez5+Dvylm3l0i3/xerQ/TYaE60IGpkTT+7Bw387j3lfbuXQwSL6D+7MmecNiqk/QV3plplKt8xUMnu25+F7Z+Ep9+P1BrBaBVabheGju7Fmxf6I83zeAFs35saU9Cal5JnHFpC19bCpEgixZNFusrbk8fhzF0R0ZMvelW8amunx+MnedSyGdwozP9wYYZYKJ/9oKW63P6bVRQhvlLj5EBaLuo8DBneK2lw+s2d7nE5bhPNeCEyT15ojri5pUU03hsdHUo/OWJ2OikY06558m/VPvlMRZpq/dgdb//k5F678F0ndleLd+PQH+MurriClL4A7r4DsT76n79VtKo6kydGKoAno3qM9199a8wognnTsnMyfX7yY77/ZwbbNuXTu2o6p0wby9z9/a1rDyOP2s3ldTo2KIJQLsX1TLllb82p1UPv9BvlHy1ixZC+nTK5qI+rWI5U9OyOVgc1mMY3iqY6UkoJj5TUeY7WIqKa36hw6UMT61QfwRou8AlLTXNx456l079Ge9E5JUY8bdVIm7VJdeH2lVfwxdoeV6Ve2jPLhNpeDIbdNZ8s/P69iHrImOOn7k6k421d+RmWH8ln3+JtVMpADbi9uf4DVf/ovp716HwC5329A+iLvr7+knMNLN2tF0MhoRdBGSEp2MG36UKZNHxq2zTx80WazkFhDaOPm9Tk898QiAoZhqkii4XH72bj2YIUiMAzJiiXZHNhbaDrj9PsNPntvHcVFbq66bkzUcEwhBOmdkqJW+hRCvf9P3lnLGecNjhrKKqXkrf+s4Nv5O5BSRlVuNpuFa34xJmpjoZIiDx+9vYZli7ORUnLC6G507JzE9i15CFQ4arRyFs0RT34R/a49C3+5h6zXvkZYLUh/gH7Xnsn4F+6scuyBOSsQNhuqdn4l0h9g72c/wKvqeXLvruSv20l1rAlOknu33mTG5opWBG2Ys84fzO6soxElFIRFcOpkc8+uETB48a/f1Vh2IRpWqyA1zAT28nM/sHrZvhqv5fMp/4Mrwc4lV4+Metyl14zk9ZeWmeYGSAkFx9x8PXML82dt54Enz6Znn8hBePWyfXz/zc5ay4MLIRg+ylwJeD1+HrlvFvlHyiqK2a1aupd2KU7+9u+LcThsJLVz1NqrujngKynn+1/+hX1fLMXqsCEDBkPvupT+Pz+bpO4dsbeLVKhWhz1qYJzFVjncnHDPFRyYtzLCAS0sgv4/i8yn0TQsOny0FSKlZMfWPL6ZtY21K/YTCJjP2seM78FpZ/bD7rBit1txOm3YHVZuuOOUqOaO2Z9vpqTYPDqoNqxWC5PP6g+oqKBVy/bGpFC8ngBzZm7BiPI+ACZO7cfEKf1qvI7fZ+Au9/HqP5aa7p8/a1uN8gihcg2uuv5EklPMV0zLFmdTWOCuUtHUMCTlZT6Wfreb5BRni1ACAAuvepR9XyzF8PjwFZfjL/Ow5YVP2frPz1l+70t8e+2TZH+6GCNQqTi7nzvONMvY4rTT72eVZcu6TBzOuGduw5rgxJ6SiL1dIs70FM768kkdNdQE6BVBK6O83MdfH5rP/r0FGIbEahW4XHZ+/8TZdO1WtbyxEIJrfzWOs84fzIY1B3E4rIw5uWfUQa7gWDmfvLuuxte3WMBms9KtZ3v27cnHZrMihOrAdd2tJ5OXW8LKpfvYuT0vasMdM7zeAOXl/qhd23Jzilm8MNLUYMa+7AJKS7wR1wqVhzCjQ8dEhpzQlTPOG0S/gR2jHrdp3UHT8hVeb4ANaw5yfrXqpM2V4j2HOLRwLYanqonHX+Zmyz8+U1pRSrI/+4F2vbsy7plbyZg6Gmf7ZCa+ei+Lb/gbMhDA8PqxJSeQ3KsLox76eZVrDb7pAvr95AwO/7ARa4KTzqcO08XomgitCFoZb/9nBdm78yts9z6Ubf7Zxxcy48WLTGejXbulRCgJM378fncN6XDQqXMyU6YNZMSJ3ejRO43CgnI2rDmI1WpBGpLXX1pWMUjWtUOay6USv6Kx8OttppnZpkhM8zlGj8vkwL6CCL+H02XjF7eNj+oTCKd9h0QsVhGRqCcEpHVoPmU2aqN4xwGsTjsBdxTlGPTpBErdFGzaw7yLHsDRLomzvnySvldPpdPJQ8j63xzKc47S7cwx9LpkIhZ75HBjT06g+znHX5lXEx/ajCKQhsHRtTuQAYP00QNa5czDCBgs/X53xEAmJRw7Wsq+7AJ69o5oK12FooJyFs7JYtvmXLpkpHDm+YPo3qM9oJyg1esShRAC/u/BKWT2bF+xLbV9AhOn9GPmB+v59L31VaKCojliLRYB1TKeHU4bF15+Qo3K41BOMUYMfmshoM+AdBISI1cWZ503mEVzsygu8lSEwjocVnr16cAJo2LrHzH5rP7Mn7WtirkEwG63cub5tedDNBdSBmQS8JgnIpohPX48nkJmnX43Pzn8Ce36ZHDiI9c3nICauNImfASHvl/P+5lXMfv03zDnzHt5r+tl7PvS3E7ckvH7jaglIyxWC6W11PA5dKCI390+ky8+2sCmdYdYNDeLh++ZxcqlqpvZoGFdcLoi5w7CogbAcCUQouBYOZ9/uKHGEs4i+C10umwMHdmV06Yqv4XDaSUhwc5FVwyvEu1kxoAhNddjAjWoJyY5uPHOU0z3J6c4eeyZ85k6bSAd0hPpnNGOi68ewX2PnhnzCiajeyq/vH08docVV4IdV4INu93CFT8/kX4DoyevNTeSe3Wh21ljsNaxF7Hh9rLqj681kFSahkK0xBofY8eOlStXrozp2LKDR/h40HURFRKtiU4uWvEv2g/p1RAiNhn33/45OQciM1btDit//9/lpjPhEH/+4zy2bDwUkQibkGjnH69fgcVq4ckH5rBnZ35FZI3ForKen3j+QtqbmD5+WLirikmoOk6XjTEn96BdipNRJ2UyZHhXhBB4PH5KijykpiXUWjeptMTD4UMlPPq72aaKUAhl/pp8Zn9OO7M/ye0avupneZmXDWtyCAQMho/qFtXv0pzxl7n54eZnyP74exUyKqUqOV2LCc7VJY1rcj6q8RhN0yCEWCWlHFt9e6s3DW17ZRaGSRSD4fGx6flPmPDvu03Oarlce9M4nntiYZUsW4fTygWXnVCjEggEDLZsyjWvhiBhx/YjDB7WhfseOYuvPtnIt/N24PMGGDm2O5f9ZJSpEgBVjK2mKBlpSK675WRc1TJ+nU4bzk41fz1Lij385/kf2Lg2B6vVgs1mwRuI/Kztdit3Pzg1ovNYQ5KQ6KioRNpSsSW6mPzmH/C+WIrnSCGJ3Tvy413/YNfb39RYetqbb95bQ9N8afWKoGjbvojIBwAZMCjatq8JJGpYho3M4L5Hz+Tjt9exd3c+aemJXHj5CTH1JxaEyoZVJbxmqsNh5ZKrR9YY0x/OiBO7YUQx3lsscNlPR0UogViQUvLXh+ezP7sAv7+yAb01WFbbZq+MVvrlHac0qhJobThSknCkqHDiU/91N70unsiah/7HkRXbTI9P7B49qkrTPGn1iqDTyUPY+/kP+KslrlgcdjqdPKSJpGpYBgzuzP2P1S0px2q1MGxkBhvX5URUyrRYBP1qKM5WEwmJDm66awIvP/cDgYBR4SR2OKzccOcpMSkoM/bszCdnf2GVeH1QA3+XjHZMv2oEtmA9pZragGrqhhCCzGnj6H7OSXw84FqKd1Ut8W1LdDLygZ82kXSa46XVK4L+153N2sffxO/2Vin4b3XZGXLnxU0nWDPkultO5pH7ZuFx+/F6AlhtFqxWwa33TIypv0E0Tjq1F30HdGTxwp0UHCtn6PCujB7Xo17XPLi/MKrJKf9oGROCPZk1DYMQgvO+f54Flz9M/todWOw2DF+AE+67mgG/PLepxdPUkXopAiFEB+B9oDewB7hSShlRLlIIEUD1JQbYK6W8KLi9D/AekA6sAq6VUh5f2moUHKnJXLD0RX741d/IXbwRgPQTBzDh5d9UVELUKDp1SeYv/1TF6bK25tElox2nnz0wpsJvtZHeKSmuRdYyuqdELRGd3jF6Ebi2QHluPoe+XY8tOYFuZ4zG6myYFVFiRjoX/PB3infn4D5cQPthvbEnx7+KrqbhqVfUkBDiL0C+lHKGEOJ+IE1K+TuT40qklBGjiRDiA+ATKeV7Qoh/A+uklP+q7XXrEjUUjr/MjQwYpjVSNC0LKSUP3TOL/dkFVUpoOJxWbrzzVE6e2LvphGtCVv3pNTb97UOE3YYQauZ+xmeP0XVybD6d6kjD4MCcFex6byFCCPr+9Ay6nTmmxZTJ0FQlWtRQfRXBNuB0KWWOECIDWCSljMiaMVMEQn2T8oCuUkq/EOIU4GEp5Tm1ve7xKgJN66KkyMNLzy1m84ZDWK0WLBbB5T8dxZl17K7WWsj+bDHfXRvZTN6WnMCVe9+rUi46FqRhsPCKRzgwd2XFNW1JLnpePIFJb/xeK4MWSEOFj3aRUoa8RYeAaPVjXUKIlYAfmCGl/AxlDiqQUoYCzPcDUXP4hRA3ATcB9OzZs55ia1oDySlO7vnTGRQXuSkp9tCpc3JE05u2xManPzQP6zQku99fyOCbL6zT9fbOXFJFCQD4S93s/ewHDs5bRfezI8YTTQulVkUghJgPmDVWfSD8iZRSCiGiLS96SSkPCCH6AguEEBuAwroIKqV8GXgZ1IqgLudqWjftUlwRfZvbIuWH8k23+8vdqt1kHdn51jxTxeIvdbPz7flaEbQialUEUsozo+0TQuQKITLCTEOHo1zjQPD/LiHEImA08DHQXghhC64KMoEDx/EeNBoN0HXyCEr2HEJWK9dtS0qg0yk1l+gwQ9bQda76a2haNvWtNTQTuC74+Drg8+oHCCHShBDO4OOOwARgs1TOiYXA5TWdr9FoYmPkH36KLclVpdG8xWmn/ZCedDvjxDpfr+9PzlDXq4YtyUXfa3QrydZEfRXBDOAsIUQWcGbwOUKIsUKIV4LHDAFWCiHWoQb+GVLKzcF9vwN+I4TYgfIZvFpPeTSaNom3sIQD81bR+/LJdBjVF4vTjqN9MoNvvYhpC55GmNXdroVel06ky2nDqygDW5KLbmeNIfPccfEUX9PEtPqicxpNa+fIqu18fcZvMfwBAmVu7O0ScHZI4fyl/yCxa/36IhuBANmfLGbn2/MRFgv9rz2LntNPNVUs0jDY9OxHbPjbB7jzCkjp140xT/2K3peeVi8ZNPGjQcJHmwqtCDQahZSSD3tfQ+m+vCrbhc1K5nknc+Znj1Ueaxhkf/YDWf+dTcDto+81U+n30zPilnC27Df/ZNvLX1bpQ2xNdDLxld/S92ptSmoOtNnqow1BwOPFc7QIZ8dU1axbo2ki8tfuwHOsJGK79AfYP3sZAa8Pq8OOlJJvf/Yk+75YWhEJlPfjZra9/CXnfftsvZWBJ7+Ibf/+IqKjWaDMw4p7X6LPVVN03kEzpk00pokXRiDAyvtf5p2Ol/DRgJ/zTsdLWP3Qf5GxtMbSaBoAf5kHEa1pjgTDp9J0cr9bX0UJgAoDLdi4hx2vz623HMc27sHiNJ8Ulecew1dcVu/X0DQcekVQB1b89t9s/89XVSqZbnr6Q2RAMubxX0Y9z/D5ETarnhFp4kLukk1kvToLb1EZPS4YjxEllDPthN7Yk1Ttn13vLTDPCShzs/Ot+Qy66YJ6yZSQ0QHDa958yOKwYUvUeR7NGa0IYsRXXMa2l76MWPr6yzxsfv5jRj74M2zV2vodmLuSZXe/SOHWfVhdDgb8chon/eVmbAktr1uVpnmw6o+vsenZjwiUe0FKDny9nISOqZQbhtoGCKsFq8vBKf/8P6DSNxANEYds7NQBmaSN6MvRVduRYY2grAkOBv7y3FbZI7w1oU1DMVKy9zAWe3S9WZ5ztMrzQ9+u45tL/kThlr0gJYFyD1mvzmb+9AcbWlRNK6Vw2z42PfOhcsYGgzz8pW7Kco/R//ppZJ4/ntQhPen3szO5aOW/6TxeJZEdnL8Kd16URH6rhYFxKht9xqePknZCH2xJLuwpiVhdDrqffRJj/3JzXK6vaTj0iiBGErt3JOCN7HQGasbl6ty+yraVf3iFQHnVZjgBt5fDSzaRv24nHUb2i7hOwOujeOdBnOkpJHROi5vsmtZB9qeLCZiYXwy3l5x5q7hs+xum52X9bw5E6xJntdDn6ilxkS+xawemr36Jo2t3UJKdS9oJfUjp1y0u19Y0LFoRxIizfTJ9r5rC7g+/rTLAWxOcDPjFORW22BDHNuwyvY4QgqOrsyIUweZ/fMrqB19DGhLp89Nl0ggmv/0Aro6p8X8zmpaJlFEH9OI9h5CGYRrfX7h9f9RL2tslYrHG12yTPqo/6aP6x/WamoZFm4bqwCn/vptel07E6nJULH37XjOVcc/cFnFstBm9sFhI6lG1Ic6u9xaw8v7/4Csqw19STsDjI2fROuacdS8tMc9D0zD0vHiCeVNp1Pcqb9kW033JPTtHvWZCF73y1OgVQZ2wuRxMfvMPuJ8rpCQ7l3Z9MnCmmTdFH37/NSy/+59VIzUsAkdaMhlTR1c5ds0jr1dJwgGQPj9FOw5wZPnWVttbWVM3UgdHL79uddlN8wkABt5wHvtnLTON6inaeZCynKMkZqTHTU5Ny0OvCI4DV3oqHU8cGFUJgPrxDb5tOhanHXtqErYkFyn9uzPtm8i6L6XZpkVbQYgal/WatoUQgvSxA033GV4/nU42b8iTee64CB9W2EWVD0HTptErggZCCMFJf76J4fddxdHVWTjT2pG3chvfXPxHfEWldD/3ZEY+8FOSe3QmuU8GhVuyIy8iJe2H6CY8rQ0pJflrd1Cy9zAdRvSlXZ+MmM89+bk7mHP2vRXho6AKwQ39v8twpZv7k4TFQmJGOmX7j0TsM9xeSrJzj++NaFoNWhE0MK70VLqfNZaFVz3Kvq9+rDABZb02mz0ffcv0NS9z4qPX8911M6qYhywOG+2H9CJ9jPkMMBzDH0BYLTphrQVQlnOUuef9nuIdBxBWC4bXT+b5JzP5rT/EVOahy6nDOO/b51jz0H85smIbCRnpDL/v6lrLQneZNIL8dTsjzEO25AS6TjyhXu9J0/LRRecagaNrd/DVxF9H+AGEzcqAX0xjwku/Ye3jb7L+z+9BwMAwDDKnjeO0/95XYX7yFpZQui+PpB6dcKSq3rN5y7bw46//zpGV27E4bPS9agrjnru9zr1pNQ1HzsI1bP3XTNx5hWReMJ5d737DsfW7I5KuBt18ISebBB3UlbxlW9j+2mx8RWX0umQivS6ZiMVuo/RAHp8NvxFvYWnFSkLYrSRlduKSTf+NSIbUtE509dEmZOPTH7DqgVdNnXWJ3TvSYWQ/chauxWKzYgQCJHXvyLRvniYpsxMBr48f7/w7O9+ch8Vhw/D66XftWQy+9SJmTbwLf1mlM9risJEyMJOL1/7nuOrPa+LLmkdeZ8Nf36+YAFhdDgIer2nkjy3JxU8LZtYrlHPVn15j0zMfqex3Q2JLcpE2vA/TFjyDzeWgcNs+lv3fixz8ZjUWm5Vel0/i5KdvxdWp/XG/pqZloauPNiGO1CQsdpupIvCXechZsIaA20tojli8K4f5Fz3A9NUvs+yuF9n59ny1P1jeYudb88lZsAa/u+oKw/D6KdmTy4G5K8mcphuHNCUle3PZ8Of3qpQkqV6eJJyAx0fA7cVSLR8lVgq2ZLPp6Q8rykyAyjrOX7eLbS99wbC7LiN1UA/Onj3juK6vad3oaWMj0OvS05BG5DTQlujEV1wWMUDIgEHh9v3krdxG1v++jjApBco9FO86CCbX9Je5OboqK75vQFNn9s9aXqVlZG0kZqTXqzBb9qeLMXyBiO2Bco+OCtLUilYEjYCzQwqnv/sg1gQntkQXFrsNW6KTbmePjVrC2mK3cWTFVowoZS2EMP/obIkuEjM7xk12TSRGIMCm5z7io/4/451Ol7Dgykco3LavyjE1Ou+rbbYmOjnprzfXy9kvDaPC9h8psC6TrqmZeikCIUQHIcQ8IURW8H9EmqIQYooQYm3Yn1sIcXFw3/+EELvD9o2qjzzNmZ4XncpV+95j3LO3ceITN3De988z9eNHSO5hnvVpeLzkr9sZNZMUqwVrQqSDz2Kz0vvySXGUvGXjyS9i67+/YO1jb3JwwZq4ZGp/+9MnWfXgaxTvysFztIjsT77ni3G3VlEGPS86xVTJW10OMqaMUnZ5IUgZ0J3Jb/6BPleeXi+Zek6fgMWkSZI1wUG/a8+q17U1rZ96OYuFEH8B8qWUM4QQ9wNpUsrf1XB8B2AHkCmlLBNC/A/4Ukr5UV1et6U5i2tiz6eL+e5nT1apX2RLdDHo1gvZ8+G3lO41TzbrMnkEPS84hdV/+i8Wuw0pJY52iZwx83E6nlh7yGlrxQgE2PPBt2T972vKc49RuHUvWC0Ybh+2JBcdRvbjnLl/Oe5S4Mc27eGLcbdFFBTEIuh92SSmvP+nik1bX/6S5Xe/iOELIP0BbMkJtOubwfmLX8CeHN0XULLvMEVZB0jp143kXl1ilm3Zb/6p+mUEs9lDSYzn//CC7gegARrOWTwdOD34+HVgERBVEQCXA7OllLpdUZDel0zE+uFDrPr9KxRu24urcxrD77uKIbdfzN4oNeSFzULvyyYx9I5LGPir88lbtgV7u0Q6jRvc5NFCRiDAhhnvsum5j/HkF5M6KJOT/nIzPS44Jeo5hdv3se+LpQirlV6XTIxp8Mtfv5Pc7zfgTE+h50WnYkt0IQ2DBZc+pBzpZk1YSso5umo76554u8ZGQjVx6Nt1mC7TDEnOwrVVNg2+6QK6TjyB7f/9GndeAZnnnkyvSyZGbW/qL/fw7U+f4MDXy7E4HRgeHxlTR3P6e3+sUXGEGPf0rfQ4fzzbX/kKb1EZfS6fTJ9rpurQUE2t1HdFUCClbB98LIBjoedRjl8APCOl/DL4/H/AKYAH+Aa4X0rpiXLuTcBNAD179hyTnW2SidvKWPfk26x74q0qkSCgzAuX73zruOrDFG7fx7aXv6R072G6ThlN/2vPimmQiZXFv/obu95dULWBeYKT0997kJ4Xnhpx/Mr7X2bzC59WmGyEEJz42C844Z4rTa9v+AMsvOpRDsxZAYZE2FXnt7O+fBJfSTkLr3wEf0mkEgjH1SWNa3LqtAitYNd7C/jh5mfwF5dH7GvXvzuXRykFHQvfXT+DPR98WyV4wOKy0+OCU5j6wUPHfV2NJkS0FUGt00chxHwhxEaTv+nhx0n1S46qVYQQGcBwIDyE4ffAYOAkoAM1rCaklC9LKcdKKcd26tQp2mGtimF3X64afQQHamGzYk1wMO7pW49LCez+YBGfj76ZzS98yp6PvmPlfS/xydDrKTuUHxd5y3KOsuONeaZRTivufSni+IPfrGbLi58TcHsxPD6MYAjl6j/9T/lHTNj07Ecc+HoFgTIPAbcXf3E5vqIy5l3wB3a9v7BWJRCS53jpedGpiOreXpTDd+idF9fpWt6iUrI/XUz2Z4spO3gkQgkAGG4f+75Yiie/6Lhl1mhqo1bTkJTyzGj7hBC5QogMKWVOcKCPUj0NgCuBT6WUFWEwUsqc4EOPEOK/wG9jlLtNYEtwcv4Pf2fvzCXsn70cZ3oKA647m/ZDetX5Wr7Schb/8q9VBkF/qZuAx8eKe//N5Df/UG95d741D+kz71tbtH1/RL38bS99YWrCCXh8bH9tNuOfvyNi35Z/fmY+kMtglzghokfPAFgE3c4aU/ubiYIt0cWZXzzB/AsfACSGXzmEe154KoNvm17zyWFkvT6Hpbc9jyXYJtLw+jGiRZA5bJQfysfZIeW45dZoaqK+PoKZwHXAjOD/z2s49hrUCqCCMCUigIuBjfWUp9VhsVnpfelp9L70tHpdJ2fBGoQtcgEo/QGyP11cr2uH2PbyV1H32dolRPgvPMeKzQ82DLxR9vmKzN1LRiBAh1EDOLx4U5Vs63CEzYotycWYJ2+MKmcsdJ00gqsOfsC+L3/Em19Ml0kjSBvWO+bz8zfsYultzxMo9xCItDBFIAMGyb27Hr/AGk0t1FcRzAA+EELcAGSjZv0IIcYCt0gpbww+7w30AL6tdv7bQohOqMjqtcAt9ZRHEwWzhLbKnfULqZRSsvPNeRTvPBj1mF7TJ0Zuu2Qih5dujjAl2ZJd9DDxJwB0nTSSvTOXRMosJQN/OY1AuYftr86qKLNgcdqxJ7mwt0+m25knMuL+n9CuDoOqEQjgKy7HkZJYRZHZkxLoe9XxtXjc+q+Z5vkhQiCslip1iGyJLobefZmO+tE0KPVSBFLKo8AZJttXAjeGPd8DdDc5ruaSiZq4kTFllGnmqbBaaozoiYW1j77Bxr9+EHW/sFkY+utLIrb3v+4cNj//CSV7D2N41MBodTlIHdyTXhdPML3WmCdvIGfBanyl7orMamuik96XTSJ1YA/GP38Hfa+Zyq53F2D4/PS5YjJdTx9V52QtaRis//N7bPzLe/jLPdgSnZxw39WM+N019a7yWro/DxkwMQNJSergHpQfysd7rAR7ahIjfncNJ/zW3HGu0cQLXXSuDZH1v69ZevsLGF4fMmBgTXBib5fAhSv+FTWxrTY8x4p5v/uVNdbRSe7Tlct3vGU6gHoLS9j49IfsencBwmphwPXTGHrXpTXG+Rds3cuaP/2XQ9+uw5HWjqG/vpTBt1wY19DZVX96jc3PfFzFzGRLdDHst1dw4sPXRz3P8PlVVnENsmx6/mNWP/Aq/morIWuii7FP3cCQOy4hUO7BmuDUpcU1cUVXH20jFGzJZvtrsynPPUbm2WPpfcXkKnXuj67dwdZ/fk5Jdi4ZZ5zIoF+dX2Ontdo4MGcFC696NKrt3p6WzHmLnqXD8L7Hdf2inQc5snIbiV070OW04Y2SJ+Ev9/Bu50tNHdm2JBfX5H0aEZuft2wLS+94nqNrdqjKnpdN4pS/32nq4PUWlfLJkF/gziuoMAMJm5WEzu25dMv/sLdLbJg3pmnz6OqjbYBtr85i2a//geHzI/0B9n62mHVPvcMFS/5e0cMgfVR/Jrx8T9xe09E+Oar/wWK3cdW+97Efh33b8Af47ton2fv5EkQwssaZ1o5z5v2V1AGZUc8ryznKpuc/5tCCtSRmdmTY/11O10kj6vTapXsPRy8YJwSl+w5XkaFg8x6+PuO3FasHw+sn++PvyF+7g4vXvxJRWtqRksRFK//Fivteqkga7HnJRE76y81aCWiaBK0IWgnuo4Usu/PvVUw0/hI3xbtyWPfE25z0l5sb5HU7jhuMs0M7/CVVw18sTjv9rzvnuJQAwLon3mLvzKXq/QQn5v4SN3PP+R2X7zQ3MxXtPMgX427FX+ZRPodV2zk4dyVjnryRob++NObXTuiahhElDFb6AyR0qVpSa+3jb0XG/3v9lO7P48DXK+hx/viI6yRmpMclZFejiQe6+mgrYd+XPyKskR+n4fGx8635Dfa6oaxeZ8dU7O0SsTjt2JJcpI/uz7i/HX8Q2JZ/mOQLSIn7aCGHl2wyPWf5b/6Jt7C0wvGMlPjLPKy8/z94Ckpifm1HajK9L5+EtZr5x+py0OfK03GkJFXZnrdsi2mBOX9JOUdX65LgmuaPXhG0EkyjUGLYFw/STujDVfveY9+XP1J24AjpYwbS+dRh9XJ0egtLTbcLISiPkgl9YO5K0x4NFoeNQwvX0uuSyBDWaEx4+R4C5V72f/UjFpcDw+0l84LxnPKv/4s4NrlXF0p2H4rYbktykdSjbWTBa1o2WhG0EjLPHcePd0QO+Ba7jd6X1y8ZLRasTge9L4tf+eu0EX3JN5lNG14/HU8aZHqOxWatXA1U3+c0L/QWDVuCk6kfPUzZwSMU78qhXd8MEruZ93kY8btryFu+NSIfwmKz0fuKyXV6XY2mKdCmoVZCYkY6ox76ObZEZ4Wj05rgxNWlPaMeuq6Jpas74/56M9ZqIaTWRCd9rjqd5J7m1Un7XHU6FofJ3EZKMqaOPi45Ert1pMvE4VGVAED3c05izJM3qHDclCRsyQkkdu/IOfP/iv04W09qNI2JDh9tZeQu3sCWf35O+aF8Ms87mUG/Or8iYqilkbNoLSt/9zL563bi7JDC0Lsu5YTfXhm1wbsnv4gvT7mDspx8/CXlWJx2hMXC1I8eIvPckxtcXl9JeUVJ8I5jBzZ5SXCNpjo6j0DTJgh4fWR/8j2HvltPUo9O9P/52SR113Z6jQZ0HoGmjWB12Ol79VT6Xq2rl2g0saLXrhqNRtPG0YpAo9Fo2jhaEWg0Gk0bRysCjUajaeNoRaDRaDRtnBYZPiqEyEN1RGtOdASONLUQNdDc5QMtY7zQMsaH1ihjLyllRDx1i1QEzREhxEqz+NzmQnOXD7SM8ULLGB/akozaNKTRaDRtHK0INBqNpo2jFUH8eLmpBaiF5i4faBnjhZYxPrQZGbWPQKPRaNo4ekWg0Wg0bRytCDQajaaNoxVBjAghOggh5gkhsoL/00yOmSKEWBv25xZCXBzc9z8hxO6wfaOaQsbgcYEwOWaGbe8jhFgmhNghhHhfCOEwO7+hZRRCjBJCLBVCbBJCrBdCXBW2r8HuoxBimhBiW/D932+y3xm8LzuC96l32L7fB7dvE0KcEy+Z6ijfb4QQm4P37BshRK+wfaafeRPIeL0QIi9MlhvD9l0X/F5kCSEarJtSDDI+GybfdiFEQdi+xrqPrwkhDgshNkbZL4QQLwTfw3ohxIlh++p+H6WU+i+GP+AvwP3Bx/cDf67l+A5APpAYfP4/4PLmICNQEmX7B8DVwcf/Bm5tChmBgcCA4ONuQA7QviHvI2AFdgJ9AQewDhha7ZjbgH8HH18NvB98PDR4vBPoE7yOtQnkmxL2fbs1JF9Nn3kTyHg98A+TczsAu4L/04KP05pCxmrH3wm81pj3Mfg6k4ATgY1R9p8HzAYEMB5YVp/7qFcEsTMdeD34+HXg4lqOvxyYLaUsa0ihqlFXGSsQqtP8VOCj4zm/DtQqo5Ryu5QyK/j4IHAYaOjuMuOAHVLKXVJKL/BeUNZwwmX/CDgjeN+mA+9JKT1Syt3AjuD1GlU+KeXCsO/bj0BmnGWot4w1cA4wT0qZL6U8BswDpjUDGa8B3m0AOWpESvkdaiIZjenAG1LxI9BeCJHBcd5HrQhip4uUMif4+BBg3ji3kquJ/AI9EVzGPSuEcJqdVE9ildElhFgphPgxZLoC0oECKaU/+Hw/0L0JZQRACDEONXPbGba5Ie5jd2Bf2HOz919xTPA+FaLuWyznNoZ84dyAmjGGMPvM402sMl4W/Pw+EkL0qOO5jSUjQdNaH2BB2ObGuI+xEO19HNd91B3KwhBCzAe6mux6IPyJlFIKIaLG3QY183BgTtjm36MGPgcq9vd3wKNNJGMvKeUBIURfYIEQYgNqUIsLcb6PbwLXSSmN4Oa43MfWjBDiZ8BYYHLY5ojPXEq50/wKDcoXwLtSSo8Q4mbUCqu5tpO7GvhIShkI29Zc7mNc0YogDCnlmdH2CSFyhRAZUsqc4AB1uIZLXQl8KqX0hV07NAv2CCH+C/y2qWSUUh4I/t8lhFgEjAY+Ri0vbcHZbiZwoKlkFEKkAF8BDwSXvqFrx+U+mnAA6BH23Oz9h47ZL4SwAanA0RjPbQz5EEKciVK4k6WUntD2KJ95vAewWmWUUh4Ne/oKymcUOvf0aucuirN8odeJ9bO6Grg9fEMj3cdYiPY+jus+atNQ7MwEQh7464DPazg2wq4YHPRCtviLAdNogHpSq4xCiLSQOUUI0RGYAGyWytO0EOXbiHp+I8noAD5F2UA/qravoe7jCmCAUJFTDtQgUD0qJFz2y4EFwfs2E7haqKiiPsAAYHmc5IpZPiHEaOAl4CIp5eGw7aafeZzli1XGjLCnFwFbgo/nAGcHZU0DzqbqirrRZAzKORjlbF0atq2x7mMszAR+HoweGg8UBidJx3cfG8MD3hr+ULbgb4AsYD7QIbh9LPBK2HG9UVrZUu38BcAG1MD1FpDcFDICpwblWBf8f0PY+X1RA9gO4EPA2UQy/gzwAWvD/kY19H1ERWJsR83wHghuexQ1sAK4gvdlR/A+9Q0794HgeduAcxvoO1ibfPOB3LB7NrO2z7wJZHwK2BSUZSEwOOzcXwbv7Q7gF00lY/D5w8CMauc15n18FxUt50PZ+W8AbgFuCe4XwIvB97ABGFuf+6hLTGg0Gk0bR5uGNBqNpo2jFYFGo9G0cbQi0Gg0mjaOVgQajUbTxtGKQKPRaNo4WhFoNBpNG0crAo1Go2nj/D/7qB+KLav5RwAAAABJRU5ErkJggg==\n",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYMAAAD4CAYAAAAO9oqkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAB2KElEQVR4nO2dd3gcxd2A39lr6pZky3KXe+82YKoxBtN7JwQIISSQkEAaNfTwESCBNEgIvfdiMAZcARsbbOPebbnKTVaz6pXd+f6YO+mk25NOvc37PHp0tzu7N7u3N7+ZXxVSSjQajUbTuTFauwMajUajaX20MNBoNBqNFgYajUaj0cJAo9FoNGhhoNFoNBrA2dodaAjdunWT/fv3b+1uaDQaTbtixYoVh6WUGXb72qUw6N+/P8uXL2/tbmg0Gk27QgixK9o+rSbSaDQajRYGGo1Go9HCQKPRaDRoYaDRaDQa2qkBWdP65OWWsnbVPtxuB+Mn9yEh0d3aXdJoNI1AC4NOTO7BYvbuKiQjM4k+WWkxHSOl5J1XVzLnk40IQ2AIwYvWUm689XiOOi6rmXus0WiaCy0MOiE+n8l//voNa1buw+k0ME2L3v1S+e09p5DSJa7WY1ct28u8WZvx+61q2599ajGDhmWQ3jWhObuu0WiaCW0z6IS8+cJy1qzch99nUl7mx+c12Z2dzz//8lWdx3756Sa83kDEdktKlnyVjZSSXdn5rF+9n9ISb3N0X6PRNAN6ZdDJCAQsFs3fjt9nVttumpId2/LIPVhMRmZy1OOPFFXYn9dvcXB/MXf8ciYF+WUYhiDgtzj9vBFccvV4hBBNeh0ajaZp0SuDTkZFuR/Lsi9o5HQaFBaU13r82Im9cTojHxtPnJMVS/dwcP8RvBUBysv8+P0mX366iW/mb2+Svms0muZDC4MOjmVJNq07yLJvd5GfV0ZikpuEJHvPn0DAolefLrWe74zzRxCf4MIwqmb6LreDtK4J+P0BahbO83kDzHp/faOvQ6PRNC9aTdSB2burgCcemEd5uR8QmAGTE6cP5tKrJ/Dq/77H561SFbk9Dk45fSiJSZ5az9klNZ4H/no2H7y5ilXLcnC6DE46dTBdUuN4++UfbI+pa7Wh0WhaHy0MOiimafGXe+dG6PgXLdhO736pXP/LY3nvtZUcPlRKcoqHsy4cxRnnj4zp3F0zEvnZr4+vtm3n9ryo7fv2T613/zUaTcuihUEHZd3K/fhqGIkBfF6Tzz/ewBP/vZBjTxqAlLJJjLv9B3Vl4JBubN+cW83t1O12cMnVExp9fo1G07xom0EHpbCgDMuybPcVH6ly+WxKL5/f/ukUTjhlEC63AyGgZ+8Ufn3nyQwfldlkn6HRaJoHvTLooAwa2g3snYYYMLhrs3ymx+PkupumcM3Pj8E0LVwuR7N8jkajaXr0yqCD0icrjZFje+ByVx+Q3R4Hl/64edU2hiG0INBo2hlaGHRgbrl9KmecN4LEJDcOh2DQ0G788YFT1apBo9FowhCypmN4O2Dy5MlSl73UaDSa+iGEWCGlnGy3T68MNBqNRqOFgUaj0WiaSBgIIc4QQmwWQmwTQtxhs/9JIcSq4N8WIURh2D4zbN/MpuiPRqPRaOpHo11LhRAO4N/AacBeYJkQYqaUckOojZTytrD2twDh7izlUsrxje2HRqPRaBpOU8QZHA1sk1JmAwgh3gLOBzZEaX8lcF8TfG67pLCgnFXL9iKlZNyk3qR3S2ztLmk0Gk2TCIPewJ6w93uBY+waCiGygAHA/LDNcUKI5UAAeFRK+VET9KlNMmfWJt5+aQUimPHz9eeWcf5lYzn30jHs3lnAiqW7EcDk47Lo0y+1Vfuq0Wg6Fy0dgXwF8J6UMjxpTpaUMkcIMRCYL4RYK6WMSIAvhLgRuBGgX79+LdPbJmT3jnzeefmHiHKRM99by64d+axenkMgYAKCWR+s57RzhnPZNRNbp7MajabT0RQG5Bygb9j7PsFtdlwBvBm+QUqZE/yfDSykuj0hvN2zUsrJUsrJGRkZje1zi/PV3G3Bwb46Pq/JiqV78PlMLEvVH/D5TGZ9sJ5n/vpN1MpiGo1G05Q0hTBYBgwRQgwQQrhRA36EV5AQYjiQBiwJ25YmhPAEX3cDjie6raFdU1zkJUreuKiVx75btJN7bv1UCwSNRtPsNFoYSCkDwK+AL4CNwDtSyvVCiAeFEOeFNb0CeEtWD3keASwXQqwGFqBsBh1SGIyb1BtPXKRWrrakoVJCabGX2R91yFui0WjaEDodRQvh95vc+9tZHDpQTCBoN3A6DeITXHgrAra1B0L06JXCX54+v6W62smQQB5wGHAAPYDkVu1R/TkA7AAqADeQhfLraLr05JqOgU5H0QZwuRzc8seTGDmmB4lJbrqkxXHqWcN46Klz6NmnCw5H9B9uXILONN48WMBKlGZyP8oR7gdgZyv2qb7kAJtRggDAB2xHCQeNJna0MGghPnhzNff+dhab1x/EDFiUlfrJGpROWnoC9/zf6ZxzyRjb49weB9PPHNbCve0s5ADFQPiqzAJ2AWWt0qP6IVGDfk1jlIXy9g60eI807RctDFqAdav2Mfuj9fj9Fl6vSUVFAL/P5IV/LyX3YDFuj5OLrhzHz359HC63A5fLgWEI3B4H4yf34YRpg1r7EjooB4gcSEENsrlN+Dkmara+GPgGtRJpCqcAH9UFWTgCKG+Cz9B0FrT+oQWYO2szPm/kj9ayJN/Mz+aiK8cBcMIpgxg9vifff7sLb3mA0RN6NVtVMg1ELQWHrGVfQz5jJVBKleA5iLJTHA14GnHu2n6+EmU/0GhiQwuDFqCoyH6GZgYsjhRW35eansCMc0a0RLc6CSZK5eME4mvs645SCdVcHRhAUxUAyg9+fs3PMFGqnMGNOLcDdQ2HapxfAF2IFDRlqOs9AsQB/VDe3hqNFgYtwriJvdmzoxC/v/rqwBPnZNS4nq3Uq87AXiA7+FoCCcAY1EAIKj7yIEplExpMDZRHUVIT9aEAe1WORAmKxjIUpS4qRAkBCSQCo2q0K0atUEJ9KQseMxjleaTp7Ghh0AKcetZw5n++heJiC8tU6geXyyCzRzITj+lbx9GahnEYpacPnzGXoLyFjkUNnE5gMsqTKBc10+4F1Fc1Z6EMuftRRtsuqEE2GXBRNUjXpCnUOA5gHGpwL0WtfuwE2WYihZIFbEMJP12zurOjhUEzI6WkvNzPH+6fztzPNrNi6R4cDoMTThnIuZeMweHQNvzmYSf2xuEAakYeGvAdqBVCn0Z81hqgKOzzClFCZzzRfTSMGD5TBs91BCU4Moj+k00I/tlhoVYGdojg+bW6qLOjhUEzsnpFDi8+vZTSYi+WlGQNSOe+x88kI7O9BTW1R6J50sha9tXn3DtQKiAD8BI58w/FMBg19oWEQx9qt0uYwCrUbN8MHrcVtQroUs/+CqKvTsL7VBd+lNBzAKnooLaOhZ6WNhM7t+fxr8e+oiCvDJ/PJOC3yN6Wx0N3fF5rtLGmqYhWJ0LUsi8WylDpuA6idPUV1O6VVPO7TgKmAHW5C2ej1Fqh463g6zXYr3hqI0B0G4gDterIp3YhuQP4FuUWuxblJnuklvYlKNVbe4jX0IAWBs3GJ++tw19j0JeWxFseYMWS3a3Uq87EACIfb4EyHqeGbQugZt+xBmhlE923PxaKic1WUFsMRGE9Pi8HNYiX1thuoARBPPA9sC74fxWR9+IQsJsqgWSiVgmribwXPmA5sAKVqmxZlHaatoZWEzUTe3cVYpf2qaIiQM6ewhbvT+cjDRiJUq34gtu6AsNRQsECtqBm+CEVSk+U4be2OVJBI/sVa/xCbYNnrIKrGGUgthMqfVGrmtwa+wtRs/+xYdt2U3twXo+wbetQq4Lw6yxEfQ/DY+y3pjXQwqCZyOyVzIF9kctoT5yTnr3rq/PVNIwMlF7ej5oFh3vMbCbSP38/ShDU5vvvoHFpHpKJTdeeir3gkVRf2dRGDtFVSl4iBUHo/PkoARpawfiwx6qxrwIlgOzsJwdRbrBaGdFW0d9MM2CZFnt3FdruczoNjjqu/VVqa78I1KAWLgj8qMHJLqdPDmqw99nsB+WTX9fPxkC5qNZsZwBDajlOogy0eUB/It09Qx5IsbqkRhvEofZ0GEaNY1NqaRfuDOEluqCTKOFWSP1tHpqWQK8MmoHVK3IoKfba7hs2MhO3R9/21iWH2o2+i4OvBUqd0p+qQa4vasAuIPqgFhr0u6Iifr2oAbU/0Q25JVTXrUuU+iUQ/Dw3KmK4PlX+0qP0MxRhXRLlGiRVgXmg7C95REY5J1B9lZJI7fd1HVX3cTgqelrTVtCjUjOwc3s+3gp7VYK2F7Q2FkoHHo2aeYlC+vKQ94+B0qcfQalZ9lJ9kDRQNZtCA24saS1MlBtqzWfmAGrQrBlNHCs9UP33UXVNAhUIF4p8zyay/32oPjQkoqrRbkVdd6juw0CqrwScwWNr3pMQ4ds2ooRJU0V6axqLFgbNQFq3BDweJ15vpEBI6xotMEjTMtT0qqkLCzW49ae62iYl+NcXtdI4gvLM6UP04K9o5GE/ow4Jrsx6ni9EKMI6m6osrBmoQTw0cAtUgJ4/uK1f8K8mKcCkGD5zIGoVExJCTuxtLKE02zoPV1tBC4Nm4Jjjs3jrxRW2+/bvLeJIYTkpqTWTpmmaBx9qsC5CqT4yqH9GUoFS9dgN8m6UGqUxeImucqpN7x8LbtTqws6TR6AEQu/g5xs0PpAspFrrg7rn24kej9CQ4D8zeJyLxmV81dREG5CbgfgEN7+/b7ptfeOSYi+vPde+Sna2X8qA71B6+wKUt9A66j8Hau500LV5GLVEtLpArXqaKqLYh4pZWEN0QRDKrFofdgGLUKk+lqJUa7UJS4umS0Xe8WkSYSCEOEMIsVkIsU0IcYfN/uuEELlCiFXBvxvC9l0rhNga/Lu2KfrTFgj4LVtDsWlKli/dTXusPd22MVGqkAOomTaoOIIA1QcEK7gtVoFgoNQ0zbmI7oLSndccjA0av+poDdajBHFtsRKx5GYKZz9V+abM4P8ilNG95m/pMEpYfIUqJrQV7cFUN41+woUQDuDfwGko5eoyIcRMKeWGGk3fllL+qsax6cB9KMWmBFYEj21sZE+r4/ebGIb9TMsyLaTEduWgaQj5qBl/CIlSfUR7jAQqIK0A9cjaCebQPCkD5R/fnAhUzqHtVEUeJ6M8ktpbHisvtaepACX8hlE/NY9d3YlQnqliqtxf81DCKNTWBPYF241FE52mmO4cDWyTUmYDCCHeAs5HhTHWxenAHCllfvDYOcAZwJtN0K9WZcjwjIj6BSHcbqcWBI3GQs0+JSpXTs2BYm8dxxsoD6EA1WMODJTLYy+UQbilqoU5UQPkMNQ1tdcHxE/0vgvUcNEQJ4ra4iLKqRIGNdOWE3xfgHpe7D5booSYQWeuDtcUwqA3yi0gxF7gGJt2FwshTkKt3W+TUu6JcqxtpQ0hxI3AjQD9+rXdoK31q/fz4ZurVfRxFFWQJSWbNxxi+KiGeol0dnJQP3qIrheWRPdkCemrBcqw2pMqb5vuRA+yainaqyCA2gd6u2pzsRKPfdK7UNGiENES44UEQs3+5QObUEJMotR1I23adXxayoD8CdBfSjkWmAO8XN8TSCmflVJOllJOzsioT+BNy/HtV9k89cgCtm7KpfiIl0DAXhgIYN+eopbtXIchD5VvJ5QwrTbbiwPldRJ6zEXw9cjg/wDKBXIHajBoC4KgvWOg3Evtoq8H0XBBZ3dOgRq8w1VprlrOsZvqz0spalUZ8uaSKJXTDzQu5Uj7pCmEQQ7KlyxEn+C2SqSUeVLKkFXvOaoclus8tr1gmRavP7fctvB9TQyHICNTB9s0DDs1gB0CFQE8BTWQZKAetaOD270oT6NQXYIDKO+UutRLmrrpg4ofSEQJ5CRU4FxjSrxmoGwoTqpcYLuhbC3h1FY50Ev1Ij92dghQk4xDDe5pe6Up1ETLgCFCiAGogfwK4KrwBkKInlLK/cG356HCDwG+AB4RQoTKLM0A7myCPrU4uYdKYqpTYBiC5JQ4Xfu4wcSaH98AslCPeF8iB4lsIt0SLZSwyaT2GaambrrT9OkmeqEEihf1vdoNX32pUiHWxEDZHkKrv5Io7SzqH5zY/mm0MJBSBoQQv0IN7A7gBSnleiHEg8ByKeVM4NdCiPOoqjl4XfDYfCHEQyiBAvBgyJjc3ohPcGNZ0WesnjgnliXpm5XKr/44NaqnkaYxhNRA6ajVQFwtbQ/Xco58Gh71q2leQjUpatufQHT7QviKPAn7Qd+gcQWQ2idN4jwtpfwM+KzGtnvDXt9JlBm/lPIF4IWm6EdrktIljsHDMtiy4RCWVaWXNAzB4OEZ/Oink0lM8mj1UKOJI3rkajeUQVgH1nduBqKcGWsm1nOj/FVSUWqnftin8Q55lHUudARyE3LTb0+gW2YScfFOnC6DuHgn3boncvPvT6T/oK5aEMREgNoDlmorF3kYVWErFuNfBvbGTImyKWjaLxmoSUG4m6hEqYj2obyHvkfFOYwOtgvZIdwoh8bOZ0AW7TESdvLkyXL58pZJ6eDzBli2ZDd5uaX065/G2Im9MBzRZahlSdav3s/+nCJ69u7CqHE9tUooJuwqj/UhMjMmKKPvzijnCUXt1uV+7EMJjvC6BQYqwEzbczoGW4nuECBQmVeHo561XahnKvxZ60f7jACPjhBihZRyst0+vZ6uhb27Cvi/e74k4LfwegN44pykpiVwz6Onk5xir7c0DMGYCb0YM6EXAEcKy3n31ZUsC9Y9njSlH5ddM4EuYYnqykp9SAmJSZ0l4OUwyshXjpqdZaE8eg5Tfcm+F/uUDANQRsT9RGKhPEHqEgZulGfRQZSNwIMyUHY+XXHHRKJWAbXtP4QSBmUoYWCXvjwVVUK146OFQRSklDz1yEJKiqs8TirKA+T6innpme+45fapdZ6jotzP/b+fTWFBGaapHrIlX2WzftU+/u9f55F3uIzn/vktu7OVzbzfwHRu+NWx9MnqyA/fAVTJydCgX0H03DGhNMdZRGo0axOcNSuE1dauV/BP0zEwUZOKCup2QQ7t3x+lbSh9eUf+PVahbQZR2LOrkCNFkSHwpilZ+f1eAlFSTYSzeGE2JcXeSkEQOr601MecTzfx8B2fs2NbHqYpMU3Jjq15PHznFxQVNiS1b3tAEj1dQDRCAqGm10cm9o9vqOSkpvNRhKpStwnlOlwX6cH//lra1LavY6GFQRQqyv1Rdf0SSSBQfQArK/Xx5aebePapxXz6/lqOFJazbtU+2wI3Pq/JooXZBPxWRABtwG+y8IutTXYdbQs/9f9xSZSNYDnKQyR0wxKJXDEYKONv5/ME0ViolNmhjKZ14QQGB1+nY7+aDD1PnQOtJopC/4HpWKa9cb1nrxTi4quCkg7sO8JDt8/G5zPxeU1cLgefvLuO0ROU8Tjc1RSUXcHvt2wT2fn9Ftu3RPOBb+/Upb4JGY5rEtLl5qIChkKpj/ujPEdCiea6UZVzSNO5KCD22gUOlL0olDU1A2UzKKe6ILFQ9qQuVK/1HKIYlR5FoCYg7btglV4ZRMHtcXLFTybh9lQNYEKA2+Pgml9Uz8P33D++pbTEV5mKwu83qagIsG3TYRyOyIHJ6TQYOToThzPy9judBr361rfoR3vBgfrR2A3WKVSpfqI9lhaR2UoSUR5Hg1E/WC0IOif1cQU1qJ4+2wAmoiYZNSPPC1E1Ew6GbZMoVdQPqFXrDpSram21tds+emVQC6ecMZTuPZL49P115B4sof+grpx/2Rj6DUivbFNW6iN7W55tgtLycj/nXTqGme+tw+EwQEpMS/KjG45i9PieLPt2N2YNdZPhMJh+5rDmvrRWZChVOe9DA3ccyt/bg8o/sxs1U7OjsWUgNR2TLsS+Mki12eZETSoO2uwLuT1noATHYaqnPQ+tXHeg1Ert0yNNC4M6GD2+F6PHRzdI1lQBhSOAcZP7cOrZw1m7cl/l+UIupLfdM41n/raIinKlR4+Lc/KL357QwYPTnMAEVF6YUpQASKFqNeBE6XCjCYP2FxejaQniUPEh0TyDQjiIHjvgI7pNS6JcUJNQLqvRvI/2U2WLaF9oYdBIkpI99OqTwp6dhRH7XB4HfbNSMRwGx5zQP2L/iDE9eOr5i9m7uxCkpE9WWicKUCtBeXyEko71QdkA6rr+UOrqWN1HNZ2HUGW43US6lgrUimAw0WfuBtEnG5KqZ642lVTdXoZtFS0MmoCf/uo4FZwWsDADFoYhcLoMfvbr42uNVgZlTO7Xv6P6MftQBjZQy+dQbMB+1LI79GMNoFYCuSi7QW25CgXaLqCxR6BWB6EI8lC9ZCexPTMulLqpiEihEE+Vgbg7ajJjl9OoW7173VbQwqAJGDC4K4/841y+/HQT2VsP06tPF044ZRD9B6bXfXCHZS+qCE34j3AgagWQjX0921Jq9w8PeW1ovwdNLNTmjBCNEVQVtwmtQA2UTStET5QjQ6goTuizUqiKXWh/6NxETcyKpbt57bllHClUAWsTj+nLdTdN6USpJqCqWpTdzGksyjujvs+dAzUzm4Cew2iaFwtlJC5FpcPuRqRaMoCa8BxEPdc9UcGObXuionMTtRAb1uznP39bVK3IzQ/f7eHQgWLuf+IshOgs6o3aDGwHiB5PEA0HMAbtOqppGWJJYe1E2bj6N3dnWgwtDBqAaVosX7KbxQuyQcAJ0wYxeUpf3n99dUS1s0DAYn/OEbZuymXoiM4SGVub+6cPlSI4h9giRQm286AFgaZ9I1H2iABKpdS2tAVaGNQTy7T420Pz2bopF2+F8irYtO4gi+ZnkrOnwPYYaUlydhd2ImHQFWUEtlMTdUUtp70og3EsKwSJqk98LG19Ga7R2FNMVboMUM90X5Sba9uY5OhfVj1Z8d2eaoIAwFsRYNP6gyQkemyPMRyCbt07cuxATTKJnMmHCof0RD12o1DF6rsS24/BpHYvI42mrWICq1Cr4pBrdCgB46HW61YNtDCoJ0u+2lFNEITwVgRI75ZQLX0FKNfRpGQPo8b2aKkutgEcwCSUOsiNctnrBUymuiEuDmULGIVKE5xEdMEQqlTlQ2U+XYaaaWkBoWnr5GG/ArZoSyksmkQYCCHOEEJsFkJsE0LcYbP/t0KIDUKINUKIeUKIrLB9phBiVfBvZlP0pzkxbHINheiemcRFV43H43ESF+/C7XbQt38ad/359DrjDToeLlQQ0PHACag0FC5UhOdhVM4XiRr8M4DxwFHYpwoIP+d3qBlVCepHtpbo0coaTVsg3AXVbl/boNE2AyGEA/g3cBrK12qZEGKmlHJDWLOVwGQpZZkQ4ibgMeDy4L5yKeX4xvajLnw+k03rDmCZkuGjM4mLd2GZFls25lJW6mPwsG6kpNaddfCEaYNY88O+iNWBx+Pk2KkDGTOhF6ecMZSc3YUkJnnI7JncXJfUDtmJGrhDAtWBcjUNv0cDUEa2mtGjiagldc1VmRU8b0/amkFOo1Eko+bddtHJKS3cl+g0hQH5aGCblDIbQAjxFnA+Kvk8AFLKBWHtlwJXN8HnxswP3+/hv08uqnTtNAMWZ104igVfbMHrDSAQBAImM84dwaU/nlCrC+jYSb0ZN7k3K5buqZZkzpKS5BRlM/B4nAwc0n4jEZsOC2UkLkD9EGoajE3UPOF4qtRHXVCqoy2olMKhQLOhwLdRPkcEPyOzabuv0TQJXVCTmZpRywYqELNt0BTCoDdq3R5iL3BMlLYAPwVmh72PE0IsR035HpVSfmR3kBDiRuBGgH796qpvW0XuwWKe+es3lemlQ3z09pqItnNmbaJPVirHTY3+BRmG4Khj+/HDd3uqbff7TB67by5PPX8xbk/12yql5Jt52/n0g3UUFVTQb0Aal149gaEjO7J3kZ+qgvO15WsJ1SkIt6mko4zLJmqgD6nYajM0dzY1nKb9IFBq0O2oOBsTtVoYgrKTtQ1a9BckhLgaZUV8PGxzVjAi7irgKSHEILtjpZTPSiknSyknZ2RkxPyZC7/cVq3sZG34vCaffbi+znazPtygqpTVwDQtVtQQEgDvvrqSV/+3jIP7iqko97NlwyEev38u61fbFXTvKGxHGXzrStxlEV1vGkoFECITe4Egac9pADSdAQdqdXsScDJqGGxbdUuaQhjkoBxmQ/QhsgIJQohTgbuB86SUlb9+KWVO8H82sBCVb6DJyMstiagZUBtFBZF1j2tSmF9mu93vMynMr16/uOSIly8/2YSvRvlLn8/k9eeWxdyv9schYoshMKhuM6iNgaj0ACGVUmjVMBKdxVTTuvhQebV+QGnIj9TStm3EFdSkKYTBMmCIEGKAEMINXAFU8woSQkwA/osSBIfCtqcJITzB191QyuNww3OjGTYqE48nRm2YgEHD6tb1DxzSDTuzgtPlYMDg6jVTd2zPw+myv805e4swzdgFVfsi1nQTCSi30prko3yzl6IeiVKUVnMyMBylneyP0kjGvlLsSEjLImfOclb/+XW2vDAbf7H9JEXT3JRT5eVWhMpXtBKVlqX90GibgZQyIIT4FfAFanr2gpRyvRDiQWC5lHImSi2UBLwbNM7ullKeh0oR+F8hhIUSTI/W8EJqNMdNHcDMd9fiD5iVNY2FIRACDCGqFbZ3ux1cdOW4Os950ZXjWLdqXzU7hNNl0LtvF4aNqm4HSEr2RC2A43Y5OnD9gq4oW0BdhAJwwmf2e1FqptB3Ux481wSU90V32kPRe8sfoHDjLtxdkkjKalrjtr+4jNmn/I6izXsIlHlxxrv5/rdPM2P2o3Q/dhSBsgoq8o6Q0CMdw6UTDTQvW7H3ctuKek7bx/3vFFlLCwvKefOF5axYugdLSsZO7MWlP57Awi+38vXc7fi8AQYN7cb0M4fy7dc72LIhl4REF6eePZwzzhuhSlbWYPuWw7z+3DKytx7G5XZwwrRBXH7tROLiq9dQlVLyx5s/JvdAcbXSmC6Xg5NOHcQ1P6/N1t6eqUAtGuuqTWugdKmhHPQmsAh7v+xk1Mqg7bP1pc/5/ransSwL6TdJHZXFtHfuI3lAz7oPjoElv/o7W56fjeWtXpnL3TWZfucdz44354MhMFxOxt9zNaN+e2knSpTY0izEfiXsQKkw245nYW1ZSzuFMIiFPTsLeOiOz/F6A5Xfq9vjYOzE3txy+9Sox0kp6/yRHdh3hEfv+ZLycn/lKmHQ0G7cds8psauw2iXbiS3CMoOqfPEFqECyaIbnqbR1z6GcOcuZd+G9mGVVhnFhGMT3TOfS7NebZKb+aso5BErKI7YLp4EwDCxflRB2JsQx6dEbGPmrCxv9uRo7vsJ+8uJARdd3tdnXOugU1jHw3msrqwkCUN5Fq1fksHd3IX36pdoeV1MQlJf72bT2AIbDYMToTNweJz16pfC3/13E+jUHyD9cSv9BXcnqFIVvYlETgXJDzUfZDmozBLePKmerH36tmiAApd/3HSllz6ylZF1wQkznKVi/k01Pf0TxjgP0OHk8w244C0+6ClIyK+wzw8qAhawxMAXKKlj90GuM+OUFenXQLHQjetLF1JbtSiPQwiDIlo25tt+lELB146GowiCcr+du49Vnv8fhMJCoVcMvbjuBicf0xXAYjJnQq8n73baJ1TheiMrq6ADGBf/XXBkI1I+u7Q9mR7bbGw6tCj9HtsVmVMx+ewGLrn8cy+dHmhYHvlrN2r+8ychbLiI+M41uRw0jd0ns5jVv/hHMci/OhLiYj9HEyhCU4diPeuZDk5bavNzKUQNOPG3lmdbCIEhikpuy0sjZlmEIklPq/gHt2JbHq//7PljPoGoge+Zv3/DIP84lI7MzpqXojjIGx6KKDN23NagI5FXB40LGZTfKttD2SRvVn/J9eRHbpWXhTKz7WQqUVbD4hicwy6tWF2a5D7Pcx6qHX8XhcSNNE8PjQgZMZAweaa6UBBzx9ll1NY3FjfJqO4RSc8ahEjPafdclwHqUTQ3UEDyctqBKatvK1xZkxjnDIzKOghIGYyf1rvP4ObM24fdF6rlNU7JwzrYm6WP7Iwv1Q6nPzCeAEgDHoWZcWSins6NpL7mHxt97je3AK02L5Xc8S8nugwCUHcjn8PLN+IpKqrU78PUaRLTEhpbELPdW2gTcaXVPMpwJcYz5w+VaRdSsOFBOECNR8TB2gsCPcjktQz3jFio+YR3Kdbp10SuDIKeeNYwd2/JYtmQ3hhAIQ2AYgt/fdwpud90BTXmHSrGzxZsBi7xDJZE7OgUu1CC+F9hRj+N8qEezfajV8tdmU7RpD12G9iF93CAyjx/N8JvOY/2T70YsiqwKP2sefZPSvbnsn7MCw+PG8vkZ9vNzOPqvNyEMI+ZB2wqY+AqjPFsCXMkJmBU+HPFufrj/ZbY89xnj7vkxg6+dUe0zfEUlbH3pC/bPX0lSVibDbzqP1BFZ9ufVNACJWvUeIHo52N2oSU/roYVBEMNh8PPbTuD8y4+wZcMhkpI9jJnYC5crtsjWkWN7sH3LYfz+6qsDT5yT4aM7cwK1UK3YNFR0Zl1I2lImx9rwFZUw59y7yfthK4bDgWWapI8ZyGmf/R/F2ftstWOWP8D21+dh+fxYXj9m0DV0y/9m4U5LZsK915B50lhklNiUapgWOKM8n4ZB33OOY8d7C/HmqWjY4uz9LP3V3ynbn8e4O68CoDQnl0+Ouhn/kVICZV6E08GW52dzwgt/YODl0xp0XzThHAK2oSY4tX2nrT9h1GqiMCxLUlhQjifOSe9+XWIWBACnnDkUT5wTERZEZjgEiUlujq0l8V3noQtK9ZNAdK8gA5V/qG0bOaWUHFy0llkn/IbcJRsxy7z4i8swy7zkrdzKgssfYPfH0TKsQqC0IiI+IFDmZf3f3kVaFpZlkTy47lWR4XZGVyeZFtlvzkX6qsd5BMq8rHnkdcr2Hea72/7Ne4N+TPmBfAJB7ycZMDHLvSz+6RP4SyNdVzX1IQ/YiMq9VZdwb32boo4zCHJwfzGP3TeHkiNeEGAGJOOP6s0vfnsiTmdsMjP3YAlvPL+cNT/kIAzBpCl9uer6yXSJoU5C56IMlbtlN1W6UgPlhpeCWkV0IVJg+KiqadAl2L5l9eCBCh9zzryD3GWbItxHKxHU+tt3JsYRKLXJgeUw6H/xSez64BtkoK4Ef2DEu0kZ1IuiTXtial/5+cnxeNKTKd+fXy0eIRxXSgJTX7+bvmdPifm8mposI7YZv4Eq7JTQvN1BxxnUiZSSJx6YS15udb3/quU5fPz2Gi7+0fiYzpORmcRv7jq5WfrYsXASGcJvoWIN8lE5XlJQhW9CgjgPZWgLtTWCbcbRkgvc1Q+/Su53G6P6+QN1TgKtKAO3EILdHy2OeWAXQnD6l4+z4JL7yVu5FcPlxF9aAZZVax/MCh/e3KKogiCcnDnLWfXQqxzZspekAT0Ycu3pDLxqOu6UxJj62LmpLVdUaHXsRnkTNb8gqAutJgKytx6mqLAiwgDs95nMm725dTrVocmh9tTWJspve2ew7XKUy2nIA4Pg/yPYl7wMBbEdIfaEebGx5X+f1S4I6iChdzeyLjrR3tsoYGL5/DZH2ZPYqxsJPdI5e9E/uGDt80x65Kck9ulW6yULpwN3alKlWigaMmBRdjCfeRfey6FF66g4VMjh7zax5Oa/82bGRXz326eRVkdNsthURPN+C0UmH4Oq22GXqLHl0SsDVNrqaAnj7GIPNI2lkLoHaYu6axtbwH5UqUyC59yJUj+Fvk8XaoXRNDNZX1HtLoCOeA9pY/pzePlWNUOvgbeghGE3nIW7SyKb//NJg/thuJyMvetHAPhLy/nqqj9TuGGXvfopdIzHRZehfeh+3Ci2PD/bfgUiwBHn4bj/3sZ3t/7bVhVm+QNsefZT3F0SmXDftQ2+ho5PFmoFXLOEq5u2GECpVwZA/8FdCfjtZ6qxRB5r6ktTBj+Ff2+HUILAoiqIrQLl292wWeyez77jo3E38LLndN7ueznSrF2FM/bOK5n+0cPEZ6aCja3JLKtg3oX3knHMCBwxBKBFY/TvL2PwtTMAWHHX8+Sv3q5yFdnZAB0Gnq4pzPj8Uc5f9T9G/voi+/xIhqDfhSdy3rKn6XbUMExv9IlQoMzL+iffw6rjfnRueqJKvRhUFWpKRFU9a1uCALQwACC9awLHTR0YEXTmdju48vr2kSWzfdGU6afDIzd3Ed2PO7/eZ9754SIWXPoABWt3YPkDlOUcrjPat/+lJ5PQI50L170QXa8uJXtmLcWsZRYfjiMxDuF0IFwOMqeO5cINLzLpzz+tjBXY9tIXEd5JIQy3k4FXTOPCdc/Tc+p4hBCkjshiyr9uwRHnxpkUjzMpHsPj4qi/3Mj09+4ndWR/XCmJWFEmSCHMcl+tKxGNQAWgHY9anR4V/Gub3nJaTRTkupuOIbN3Cl/O3EhJsZc+Walcfu1ERo5tmpTDmnBC+Vsaq893UqUigujlMyVV4f+xIaVk2W+frpYSou7uGOSt2ELqsL540pIRhv1cywqYuJITcCbF22ceNQycSXFYAZPxf/oxY2+/staPDUTro0OQMWUko267hPjM6okRh/7kTLLOP569n32HFTDpc+bR1dok9EgnfexADi+LbjNzpSTgSlKecqbPz4EFq/CXVtDjpLHEdWtbJR1bFyf1S1iXj5rYlKNWEv1piRKZWhgEMRwGZ184irMvHNXaXekEuFCL0saoGOJRs65wt91kVG4YO+rnx22WeyndG2vWVYUz3lM5OAL0mDqWXR8ugogAMsHwm88j5/PvMcu81QyxjgQPJ718B4l9u5M6KgtXYt1uyZnHj+bAV6ttLkLFQ3x24q0c/9zvGHTl9Gq7PekpDLr6NNtz+o6UUrLjQPRrTfAw7p6rEYbBga/XMO+CP6nrkMqmMPauHzH+nqvr7LumJvuBLVStcL0oG9tomjt/kVYTaVqBVBo/DylHJfxaTJUAsJs9CVSRvfpFNRseF4bbVXfD8E8Sgl4zqtSKEx+6XmUJDUv94Ejw0OfMo8iYNIyzF/+TbkcPx/C4cCZ4iO+Rzslv3EP/i08i4+jhMQkCgKOfvBlnUpz9SiSYy2jJL56q1QZQ2dwfwJt/hK0vfk6gzH41JZwGRpyblfe/zOzpv+PLM+/AV1iC/0iZCr6r8LH20TfZ8+mSmPqvCRGqjlZTFWmhBETzxoTpoDNNK1GKykxqUpWdtKEIVF6XTVHOM5GGLLOX3PJPtj4fmyup4XFx2qz/o9cpE6ptL9y4ixV3Pc+Br1fjTklk+C/PZ9Stl2CEpZEoP5hPoLSCpP49oqqW6qJw4y5WPvgyO9/+yna/KyWBUz9+mB5Tq5d1Ldl9kF0fLsIs91Kwdge7PlqsjORCRLVDYBi2nlI1yZw6jrMW/K3e19J5KUY5O9itmAUqgr9xyRp10JmmDZKIergLqIoXaKhAkFSvmVyTlSif7vpFgh/1+M/JXbqBvBVbADCyMjCPHo4s9+L/egNlzkQsp5PUI4e5ZNOLJPeLzEGVOiKL6R8+WOvn1NTnN4TUEVlMffVudr2/yN5lVCpjcjjr//4+K+58DkDlSIp1YhhjfEH5vsOxnU8TxEnts//mVeQ0iTAQQpwB/B3lP/WclPLRGvs9wCvAJFQo6eVSyp3BfXcCP0WJw19LKb9oij5p2gMCSEcF3eSg0k00lNqODQmL0bW0icQZ5ya+Rxo9Zwwk96zpLPzajxUAywBOHqkGTynBMPj2tjm43A76ZKVy+rkjOOq4rBZPGW04HfQ54yj2zv4+wuvJ8LjodvTwyvcF63aw4q7nGxVAVxvC6SDzpHF1N9SEER/8s4tlSaO55+6NFjVCCAfwb+BMVDLvK4UQI2s0+ylQIKUcDDwJ/CV47EjgClQ43hnA08HzaToVAjVQh3yxoaqgTazL4rpmtaFiM/VbfQy8sj9d7jqfbxYHCATACp90C6FUJqi6FRXlAbZtOsy/H/+G2274gJ3bIwvcNDfHPn0rcd3TKovoOOLcOBPjmPbOvRiOqp/W1pc+jzna2ZWcgCslASPOjSM+hu9DCJzxHsbdWbsXlMaO0ahnPvRdOVCuqMOjHtFUNIWoORrYJqXMBhBCvAWcD4TX5DsfuD/4+j3gX0JNm84H3pJSeoEdQohtwfNpy1OnohwVMJaMEgwu1EwoE1ULYSeNsykQPD5UuNwJ9Av+hc/ey6hKnpcExDPwysG89sdDeL31s60V5JXxyF1f8MSzF5HSpeX8yhP7ZHDxlpfJfmM+uUs3kDy4N0N+cgYJPaqronwFJTFVSHN3TWHaW8pTKGPKSN4bdDVmefXVhBHnJn3sQIo27cas8NF10lAcHhezTvwNiX0yGHvHVWRdGFvdZ00CcCxwmCrX0q60RJBaUwiD3qjMYiH2ohS0tm2klAEhRBHqCnsDS2sca1tWTAhxI3AjQL9+/Zqg25q2QT6wFjWzl6iH3oHyrXagIjiPBNuFBuSGOj2EBr8ASsD4UWqqXSjjXfi0X9UAEEJwpKhhgsjrNbn3tk+ZetoQppzUn569W8b33pUYz7Cfnc2wn50dtU2fs6ew492FBEqix184EjyM/ePl9Jo+sXLbjM8f5csZf8Tymyr6WELv0ycz7e17MVxODnyzhi/PvEMJDCkp35/P19f8H+Puudo2XqJk90GKt+8jZUgfEvtkNO7COwwGkIGyp+Wghs4MVERz8ylO2o0BWUr5LPAsKG+iVu6OpkmQqAWkVWNbAOUZNAn1wxiDSgVciBIWW5rgsy3Uj2xPXQ0ZPtrD4gVlMdtXwynIL2fmu2uZ9eF6LrpqHGdd0DbiWPqddxxpowaQvya7KrBOqEBAV3ICli/AsBvPZvhN51U7rtvEoVy+7132zv6eikOFdD92JGmjqwL/lv76nxH5jAKlFax64BWG/+Jc3F2SAJVPaeEVD7F/3koMjwvL66fPmUdz0mt34dS1mlEFcfZR9dsoQk1ahqLcpJv+HjWFMMhBTd9C9Alus2uzVwjhRPn55cV4rKbDUkJ09U8xSiiEHtGk4B+oR6Tlasaed2kKy5eUU1HesDmIZUksn8k7r/zA5x9toKI8wMChXbn82kkMGNw6hdANp4Mz5v+VDf/8kK0vfo4MmAy4fBr9L5tKxaECtr7wOZue+YSN//yIlKF9mPKPWypXCA63i6zzj484p+nzU7DWvryp4XZxeNlmep06CYBF1z/Ovrk/qGpvQSP23tnfs+SXf+fEF/7YTFfdXiiluiAANUkK1UsWqJQuw2lKD6NGxxkEB/ctwHTUr3QZcJWUcn1Ym18CY6SUvxBCXAFcJKW8TAgxCngDZSfoBcwDhkgpaw1N1XEGHYW6/KpPwH6+UoSKUYgmSJJQ+tamS6K2b4+fN14sZP0qb6yelXXichvc/cgZrSYQ7JBS8snRN6t8TGEGZke8hzPmPUH3KTV9Q8KOtSxeSTzLNj7BmRTPmfP/SrfJw/DmH+Gt3pfZtnPEubny4Pu4kls/v3/rsQtVM7wuN9PewOB6nblZ4wyCNoBfAV+gFFovSCnXCyEeBJZLKWcCzwOvBg3E+SgPIoLt3kHpCgLAL+sSBJqORBLR01KkEP3x7IJK+LUJJRhChOwNI1F2iAqaKmqzV18Xv79X6bQL85PIO9yf7pnJHDxQzKb1B5n1/jrKSmOvRQDg91n852/f8JenL2iSPjYFB79ZS9Hm3RGeRma5lx/ufZEzvnw86rHCMBh4xTSy31wQcbynawpdJw0FoGx/Pg63y1YYCIdBxeGiTi4MYjEWW6i596AY29dNk9gMpJSfAZ/V2HZv2OsK4NIox/4Z+HNT9EPT3hBUDdwhA7IR/BtWx7EJqMjiIyi/gwqUB1JvlGveJFRswf4m7rNBavooUtPVYJXcJY7BwzJY+8M+Nq07WO+zHdhXzL69RfTq0zYSu+Wv2hY1W2nB6u11Hn/MU7+kYO0OirbsxfT6VKqNOA+nffLnyriL5AE9oqe+NgQJvdrOSql1yKDulQHB/SZNZfrVuYmagLzcUlYvzyFnT2Frd6Udko7SEvZBFfzIQjmjxVqMJgUlUCaiMpiG/OBdKJ3qMBr2mNv9wDxEq1U7aUo/3O6GeXp8NWdrg45rDhL7dcfhth9cEoLePuWHClj18KvMu+g+frjvRUpzqhL6ubskcdrnj5JxzHCVF6nUS5dhfaol43MmxDHq1ktwJFQ3gjoT4xh7+5U4PI1LudD+iUd509WFi6b0LtK5iRpBwG/yn6cWs+r7PThdDkzTom9WGrfdM43klLaZs7xzkoeaaZWjBvJohmsnKhNqGuoHmQ8cRM3AMqnN37ui3M+fbvuU/LwyAv76GRWyBqZx58MziE9o/UHQ8gd4p98VlB8qrJaewpkQx4kv/ZGUIX34bOqtWL4AZoWa+RsuJ6d/8Re6HzsKyx/gw9HXU7LzIJa/qsayMymeC1Y9S/LAXoCyTax97C3WPvY2/uIy3F0SGXv3jxj1m4tbPHK77bIXlbjODgMYgjK1xk5tNgMtDBrBa/9bxsI5W/H7qpa8Dodg0LAM7n7k9FbsmaZ2CqhSTYVqK8QBk2nMkru0xMdnH61n6Vc7KMgvx7KsmNxRHU5BWnoCDzxxNkkpre9WWbR5D3PPu4eyfYcRTgeWL8D4e1VdhY/G/4yCNdkRxyT178El219j1wff8M1PHouo0yCcDoZcfybH/+e2atulZREo8+JMjNNCwJaDKIEQbl9xoyYt9a+1ohPVNQOmafHV3OqCQG2X7NiWR+7BYjIyVQ79wvwygEo9s6a1SUNFeR5C5YtPoSmiPBOT3Fx69QQuvXoC5WU+Zr67ls8+3FDncWZAUphfzqfvr+WKn7R+Zb0uw/py0aaXKFibja+ghK4Th+BKTqD8YD5Fm+3jMioOFXBkWw6532+yLdgjAybZr81l4OUn03NaVWZXYRjVakBoapKJciP1o1YDIux/06KFQQOpKA9gBuynfU6nQUFeOSXFPp59ajGHDhYDkNkzmZ/fegJZAxufpVLTWFxECXZvEuIT3AwelkFcvJOK8kCd7QMBi+8W72oTwgBU5HX62EHVttW+yhFgSZL6dccR77GtEBcoq2DOuXdz3H9uY3CUojoaOwSNTV0dC9qA3EASEl0kJdt/QQG/RUKSm0f/9CX79hYR8FsE/BY5u4t45O4vKSyInDlpOh7lZf56RS0bRttWkyT0SCdlkL2O2tM1hZShfRh45SkIR/RhxSzz8t1v/l3NnqBpG2hh0ECEEFz24wm4PdWt+W6Pg6kzhrD0m50EApGGRDNgsvCLpkinoGnrDBuViRVDMrgQx08b2Iy9aRpOfPkOXMnxGB5VBc5wO3EkeDjx5dsRQuBJT2HG7Edxp0UvM2oFAhRu2t1SXdbEiBYGjeCE6YP5yc1T6Jqh3CATk9ycd+kYfvTTyezKzrP1KvH7LXZm50dsLyn28r9/fMvPLnuDn1z0Go/dN4d9e4oi2mnaDxmZSZw4fXDEhCEaw0Z2b+YeNZ5uk4Zy4YYXGf3bS+l+wmjie6RjVfj4YsYfmXPuXZTsPkjm8aO5ePPLCKf9dcuAiTslVtdhjcKHyqi7FWXraqIw+DC0zaCRHDd1IMdNHYhlWhhhy+O+WWlsWH0gYnXgdBn0yUqrti0QsHjo9s/JPVSCGWy/fs0BHvzjbP78j3MrhY2m/XHNz49m0NBufPr+OvbnHInazuEQ5B5suXxLjSGxdwYjf30hm56Zia+oVBkTLJOcz5fxydE3c/GWV4jr1oWeJ49n/8JV1SuvGYLUEVkkZUVWhdNEI5TZF5QQ2I+KeZmIsn01DXpl0EQYNfSk088chsMZeXstU7Ji6W5ee24Zebnqx//Dd3soyC+rFAQASPD5Asz+uG5vFE3bRQjBCacMIjU9vlabgNPpILNndNVKW2PTMzMJVPiqWZWlaeErKuXz6b9j4Y/+zKAfn0pinwycyfEIpwNXcjzxmWlMe+feWs6sqY6FSk5nUbUaMFExM3VHhNcHvTJoJrpmJPL7+6bz378t4siRCsyAxDQtLEuyb08RB/cXs2jedu597Ey2bTqEtyLSoGaakk3rDrRC7zWNpbzcz9aNh3C7nfTJ6sKWjblYVnRrclKKh2Gj2s9s+cA3a7FsSmZaXj95K7aSt2Ire2Z+S68ZRzHg8pM4vGwLXccPpv+lU3G4m242274IoAZ0F1WuoRYqIHIfapDvgko+F5oYFEY5l0TFIDRdBTQtDBqBlJLVy3P4as42fL4Ax5zYn2NPGoDLpXSlQ0d054lnL2T3zgIe+uNswtOxmAGL8oDFf55cxNHHZ+FyOyJiFgCtImqHzP1sM2+/tAKH00BKpQKqy60oMdHd5r2JwkkZ3JuDX6+ptVpaoLSCPZ9+y95ZSzHcTqRpcXjFFo567OcYUewJHRMfsBEV7AhKxTMUFduyBpVsMXQfC4EfULm1kqg9827TBgxrYdBApJQ8989vWfbt7spZ/daNh5g/ezN3PXJGZZ4aIQSFeeU4XQ78NgblXdn57NtbiGVGfrFuj4MzzoueMljT9ti07iBvv7wCn88EG+EejZw9RZSV+khIbP2UFLEw8pYL2f763IhCNjWRfhOJWZnFdPOzn4KUHPPkL1uim20ACaxAJVIMUYFS/QyluiAIYQHZwFggleiDflqU7Q1D2wwayPbNh/l+8a5q6h2v1yRnTxHfzNtWra3LbdQqxP0+C7OGMHA4DS69egIjxvRo0n5rmpfZH6/H521IFnZJe8rGkDZ6ACe+eDuu5ARcKQkIG/uYHWaZl83PzsJfWo5lmux87yvmnn8Pc8+7mx3vLMQyTXbP/JaZk3/B610v4NPjbmHf3BXNfDXNSR7VU0mEsFDeQdEIORu4UKknwu9vKFV7/WoZ1IVeGTSQZUt22/7ofV6TxQuzmX5mVQrmoSMzEfVUATgcgsnH6lrP7YmA3+RALR5D0RAC+g/u2iYS1dWHAZdOpd95x5K7dCMHv13HmkfeIFAavaZyCOEwKMs5zLI//Jf981dWHrN/wSpWPvgyJTsPVq44cpduYO4Ff+LEF/7AgMumNev1NA+lRFf1+IieViLcrtIXpTLag1pVpAL9UPm0mg69MmgghiGizuRq6n6dToNbbj8Jj8cZ8+zPsiRfzd1Wd0NNqyOlZPZH6/nVNe9y6EBxvY51uR3EJ7i54Zbjmql3DaMir4iN//6I5Xc/x+6Z30atP+DwuOkxdRxjb7+SntMn4kyse4CSAZO8VdvZv2BlNeERKK2gaMPuCNWTWeblu1v/XS0Ndvshnuhppj3YCwOD6tWAQamExqLSvQ+lqQUB6JVBg5lyYn/mztqkdMNheDxOTjo1cvk2cmxPnvjvBbz4zFJWLdtbZ+nEgN/i0P76DSya1mHhF1t5/41V+H2xD1ZOp0G/AWlMmtKPqacNblMpzw98tZo5596FNCVmuRdnUjxJWZmc9c3f8aQm2R4jDIPpHzzA7pnfsv31uYAgUFLOga/XVMtT5EjwMPT6M9n90SICJXWvIkL4isoo25dHYrCmQvuhG9Gr+ZWhjMjhdgOJykZa/4ykjUWvDBpI1sB0pp81DLfHUTnb98Q5GTy8G8dNtU8rkJIaz423nkBylziMOu68EDB4eHt78DsXAb/J94t38eZLK2oVBHZeQi63g7seOZ2JR/fls4828OLTS/n2q2wWzdvG1/O2VWa6bWksf4B5F91LoKSichAPlJRzZGsOy29/NupxOz/4hk+P/RVLf/VPsCTj/3Q1p858mMHXzsAR58aVHI8jzs2Q607n6L/ehKjrB1ADaVq4kttjdlMDFRxmJ+wlKqBsODA6+H8Kaubf8gYkXc+gkWzbnMuiBdvxeU2OPi6LsRN7RQSg1ST/cCmvPbeMlcv22noRhXjivxeSkWk/EzuQc4QV3+3GsiQTj+lL776pjbkMTT3Jyy3loTs+p7zUR4VNjEgIh9PgpOmD+HreNixTVnqYOp0GPft04cC+I5gBqzIGQQhwu51YlsX5l4/l3EvGtMTlVJIzZzkLLn0A/5FIYeRMjOPHxbMitq96+DXW/uXNKpWPEDjjPZw+93G6TxmJv7iM0pzDJPbuVlnbeO/n37Pg0gcibQyGQCCqqYQMl5NeMyZz2iftuTruFlTNYju6AS3zPTdbPQMhRDrwNqpG207gMillQY0244FnUEnjTeDPUsq3g/teAqZSVdX8Oinlqsb0qaUZPCyDwcPqN4NP75bIr+84mZw9hdz/u88iVE2gDMjx8fbBOR++tZpZH6zHMi0k8PE7a5l+5lCubCPpjzsDz/59MUUF5bUGkoEa9E+eMYRd2fns3J5PaPIVCFjs2VkQ0V5K8HqVcJn57lqGDO/O8NEtF4wWqMVV1LQpYO8tKGbNI69jhgegSUmgrILvfvMvzv3uaVzJCaQOr+4M0fv0o+h3wfHs/mhxpUBwJsbRY9p4ijbsojy3EBkwEQ4HSf0zOfHFPzbNBbYatWVpbYj3WdPTWJvBHcA8KeWjQog7gu9vr9GmDLhGSrlVCNELWCGE+EJKWRjc/wcp5XuN7Ee7pFefLqR0ieNwbo2cNAL6DUi3rXq1fcthZn2wrppawjJNFny+lbETezNqXMvrGjsbpSVetm2qPaI4hGVaSCnZs6swpvbh+Hwmcz/b1KLCoMdJY7F8NgOXEPScNj5ic+7SDRgeV3VhEOTw8i1Iy7JVCQkhOOmVO9k3dwXZb8wHKRl45Sn0mjEZpGTfvB8o3raP1JFZZJ40tgNUQcsAcomMKTCC+1qfxgqD84GTg69fBhZSQxhIKbeEvd4nhDiEuvrCRn52u0cIwU2/O5HH7p+LZVr4/RYutwOXy+Bnv7H3Lpk7a5OtftrrDfDVnK1aGLQAXq8Zk6uwy2UwaUo//D4Lp9OwjTCvFQlFhbEbWZsCT1oyEx64jlUPvEKgTH22cDpwxns4+m83R7R3dUkimjeEw+OiNvc5IQS9T5tM79Mm19yhtnWo+jfdUCkmiqkSCAbK26htxBI1VhhkSin3B18fQNVoi4oQ4mhUyZ7wDEt/FkLcC8wD7pBS2q5ThRA3AjcC9OvXcfzvBw/P4LGnz2fhl1vJ2VNE/0HpTD11SNRauBvWRM9VVFZqF9yiaWrS0uNJSYkj73D0LKNCwORjs7j+l1Pw+UzbdOZ14XI7GDepfgXPm4Ixf7ic9LEDWffXdynNySXzxLGMvf0KkgdETjS6TxmBKzkBf3H1gk2Gx8Wgq0/tADP6pkIA41E5iPajjMc9UNX22kZqjjqFgRBiLvai6+7wN1JKKYSIug4WQvQEXgWulVKGfhl3ooSIG3gWtap40O54KeWzwTZMnjy5/Vm9ayE1PYELrhhXZztvhZ8jRdFnikNHtI3lZntGSsmi+dv57KMNHCksp/+grlxy9QQGDO5a2UYIwXU3H8OTDy+wVf243Q5+fOPRnHTqYHbvLOD911epsUBQLRLdMMAwDNsiSA6HIDHRzbTThzb9RcZA79OPovfpR9XZThgGp376CJ9P/z1WIIBZ7sMR56bLsL4c/debWqCn7QkD6BP8q4sylBm2CBWP0JfmVifVKQyklKdG2yeEOCiE6Cml3B8c7A9FaZcCzALullIuDTt3aFXhFUK8CPy+Xr3vZPh9llJP2AxAQsCw0Y1fbu7Kzufbr7LxVgSYeExfRo/v1a4SqDWWd15dqeJHgtHl61btZ8vGQ/zh/lMZOqKq+MzYib254+EZ/O3BedW8iVxuB32yUjnu5IHs3J7HI3d9idcXiEhH0jUjkYt/NI6kpDjmfb6Z4qIKXG4H+/cewbIkk4/ty4VXjicxyX6F2JboOn4wl+99m90fLaY05zDdJg2hx8nj9aqgwZSgktWF1IoVKPVSP2BAs31qY9VEM4FrgUeD/z+u2UAI4QY+BF6paSgOEyQCuACVvUkThcRkN+ld422LoHjinAwc0tXmqNj5KOil5PebSAnffrWDoSO6c9s903DU4S7bEThSVMGcTzZGJBT0eU3eeH459z9xVrXtw0Z25/FnL+Slp5eybtV+DEMw5cT+XHX9ZJxOg7deXFHpGRSO02Vwz/+dTno3lZF23OTezXdRLYQz3sPAK09p7W50ELYS6WEUymXUh6YsaBNOY3/hjwKnCSG2AqcG3yOEmCyEeC7Y5jLgJOA6IcSq4N/44L7XhRBrUWV8ugEPN7I/HZqtm3Jtjcduj4Orrp9cmTq7IezdXcinH6zH5zMrfeG9FQG2bDjE4oXZDT5ve2L75lycUe7hzu151IzJ8XoDPHH/PNau3Ie3IkB5mZ+v5m7j9RdUDMy2zYdtz+V0GmzdlNu0nW8FrIBJ7rJNHF6+OWq6Ck1DiFbuVtCcfjeNWhlIKfOA6TbblwM3BF+/BrwW5Xg9lYiR3IMlPPHAvIgiOIYh+Nmvj+fo47NiPpeUku2bD1NYWM6AQV3pmpHI94t3Vq+0FkR5KW3jpOkqxUZhQTmrl+9FCMG4yb3pktoeo0LtSUz2RAz4ITxxzgi1x4LPt5Czp7CacdgyJQu/2IoA3HEO/H67QVK0m1TV0dg7+zu+vuZRTK8PGbAw3C5OfPl2ss4/vrW71gGIlr4CmtPYrHMTtRPmfrYpiqHR4MC+2DNlHjpQzOP3z6WosAIhBH6/SWpaPA6HiOoHHwgOaF/M3MC7r64M2hAEr/z3Oy67dhIzzmm6akutyeBhGSQkuKkory5wXS6Hbb6pRQu2R/USWvDFVuLj7X9eLpfByLFtw52wIRRt2cP8Sx+ollDOrPAx/8J7GX//NUy499pW7F1HIJMqj6NwBCpjafPQ8RXB7QApJRvXHmDWB+tYtGA73opIF9HdOwpsZ+5+v2kbyRrtcx6/fy65B0vwVgSoKPdjBizycks5dKDE9hi328GxJw0ge+th3nt9FX6/hddr4vUG8Pst3n3lB3Zl59fvgtsohiH43b2nkJziIS7eidvtwONxMmhoNy69ekJE+7oyuZSX29sLfnfv9HZtg9n4r49so5EBVj/8OgcXa9Nf4xgEJFK1CjCCr8fSnEO2Xhm0MhXlfv5y7xxy9hTh95m43A5e+98y/nD/qQwa2q2yXb8BaWzecChCILjcDvr2j63i0bbNuRwprKhzEAvh9jjonpnMtNOH8Nr/ltkGTQUCFgu+2MJ1N02J7aRtnD5ZaTz1/MWsXpFDYX45A4Z0ZeCQbrZtT5g2kLde+qFe5zcMQe++XZqiq61G0eY9EKXcpQyYbPjHB2QeP7qFe9WR8KE8h3yoNBZxKLfS5h2u2+/0pIPwzis/sHtnAd6KAJYlKw2RT/55PlbYD+7Us4bjtJlNOh0GJ58WW8WjwvzymJIhJiS6GTqyO5dfO5F7Hz8TT5yLI0X2QsSyJEWF5ZE72jFOl4NJU/ox/axhUQUBwPQzh+Fy1e8nZBiC3EP2q7D2QsaUEVCLu3Hp3vZvHG8dLFRN5GXAZmAHyls/jZaYt2th0MosWpBtq3f2+0w2b6gK28jITOIPD0yne48klbLC7aBXny7c+ecZpMRoxB0wuCtmoO5lQZ+sVO5+5HROPWs4Ho+TvNxSTNNC2Dwtnjgn4ybHEkTTNJimRWmJr155fooKy9mxLY/Sksj8OY1hy8ZDdiEftRIIWKR0aTu1CxrC8JvOx+GJYgA3BK7keMr257VspzoE24EClFAwg3/lKGfL5keriVqZaPlqBILy8up62SHDu/PYMxeQl1uKEIKuGYn1+qxu3ZOYfGw/VizdbZspFVRxnhNPGVT5fsOa/Tz154UETAtZQ2Y5XQapafEce1LTBsL4vAG+W7SL9av3kdY1kamnDSYjM4n3X1/F3M82EwhYxMe7uPCKsUw/a1jU4CZvhZ9n//4tq5fvxelyEPBbnHjqIK6+4ah66exzD5aw5OsdlJX4GD2hJyPH9sQwBG++uMLWjhMNp8tg7ITebaqQTUNI6JHOWYv+zqfH/gpZM6mdJTnw1RreG3Q1J7z4RwZe3h5LVbYGEpWqoubzJFHRyKUoO0LzoYVBKzN4eAZbNkQGbgcCFkNsitsIIejW3b7GQSz87DfH0aN3CnM+3URJsRchqgyhnjgnAwZ35biTVXEey7R45q+LbAOnHE6DU88cxnmXjcHjabrHqOSIlwf++BlFhRV4KwI4HIK5szYxeFg3tm05XBkZXFLs5e1XfkACp50d6c2kbDFz2ZWdh2nKykCyhV9uZce2PC6/ZiLDR2fWGSX7zbxtvPzf75GWJBCwmP/FFgYM7sof7ptOzu7CWo91uQ1A4HSqlBODh2Vw461tq7xlQ0ns1RVnvAe/TYZTK2hcXnT94/SaPpG4bu3bRtIyWEQKghACZT/QwqBD86OfTuaRu77E5zeRQZ2D2+PgzAtGNdkMUkpJ9tY8dmzLIy09nnMuGsUFl49FSsmmdQf5Zv52/D6To4/PYuIxfStnzbt2FOCzS2cMmAGL6WcP40hRBR++tYbDh0oYPiqTk04dHJMPfUF+Gd8t2smmtQfZmZ2Pt8LPoKEZuD0O8g6XVc64TVNimiYb1h6MOIfPa/LRW2uYfuawaikzlny9gxf+tcR29WOZkh1b83jy4QWMGJPJr+88OeoqoaiwnJf/+3211Zu3IkD2lsPM/WwziUluSoqjq55Gju3Jj356FAdyjpDZK5kevVLqvC/thU3PzIzqURRCCMGuD75h2I3ncGjJera/NhfT56f/xSfRe8bkelc769g4UIZiu9xjEmj4BDBWtDBoZfoP6sp9T5zJx2+vYeumXFLT4jnrwlEcdVzsQWS14fUG+NuD88nedhgpVQI0l8vBHQ+dRp+sNEaM6cGIMfY+73Xp5e/5zadYpoVpSSxTsn71fmZ9uJ77Hz+rVhXW4gXZvPjMUgJ+q1qQ19qV++p9fRUVfspKfSQlqxw++/YWRRUE4Xi9ATasPcii+duZetoQ2zbLv91tu93nM1n45VZOPXs4H7+9xtawbjgEg4Z2I7NnMpk9k+t3Ue2Ag4vWYdnUMAjHMk0CpRV899un2fLspwTKfSAlO95aQI9p45n+4YMYjraRsbNtMAjYSPUVggH0orlSUISjRXMboHffVG7+/Uk8+dzF3Pf4WU0mCADefeUHtgfVK36fSUV5gOIjXv764Pw6B/v+g9JxOqP/WL0VKtYgVLrT5zUpOVLB688ti3pMYX4ZLz69FL/PjBrtWx8chkFcWEW4BV9ssQ3Os8PnDbDgi61R9/vDVmsRx/pMzrt0TNTCMw6HwQnTBtnu6wikDOmNqMPuIgyD+F7pShCUeSv1kYHSCg4sWMXOd75qia62I7oDo4AElGrIjUpMF5u3YGPRwqCD8/W87bYpEcpKfWzfUrsLoMNhcONvjsfhjD37pGXBquV7ow70y77d3WS1vt0eB9POHIrTWfUY5x8uq5enkc/GHhJizET7jK1Op8FRx/bD4TC446EZnHXhSBwOgdvjUMFqHge/+O0J9TbwtyaBci85Xywj54tlBMqjl74MMfKWCzE80WerzsQ4Bl4xjUNLNhKwWUEESivY8uLsRvW5Y9INOAZVM+x4VLxBy2R/1WqiDoyU0tb4CyAMQWkt+u4Q4yb35srrJvPmi8sxzcbP5Csq/PXywAlHCKWH9sQ5CfhNjjmhf0Rk8MixPfjh+z2Vq5XacLmMWnM69e6byvHTBvLtwh2V99HpMkhK9nD2xVVBVZdfO4kzzx/J+tUHcLoMxkzoVW210tbZ8c5CFt3wRGX1NmlJTnjhDwy4ZGrUY1JH9uekV+5k0fWPAxJpSiwzQHxGKgl9MhjxywsYeOUpLP31PyOzKgSR/trqAmtaGi0MOjBCCAYM6sqObZE+3wG/xcCh0QOqQPn0r16Rg9frxzBETMJACBg7qXdUL53R43sx8921lV5BdsdH0x45XQ7OuXg0I8f2oEevFFt//eNPHsgbzy/HijYCBXG5HKSmx3PaOSOwLMmRogri45144qoP4tf+4hhGjevJ3M82U1bqY8JRfZhxzoiISnQpqfEcO7X5cs03F4Ubd/HNTx7DrLEa+Oa6v5A+ZiBdhvWttr04ex+lew+TOjKL/hedSN9zppD73UaEYZBxzAiMGmrF/hefxLaXvqgseh/CmRjHoKs7VF3Ldo8WBh2cq346mcfvm1vNoOr2ODnt7GG1Bj/lHizhz3d9QXmZj4Dfqpw1OpwCMyDxxDlJSHRTcqQC05RYllS5fOKdXH1D9ApZAwZ3ZcJRfVm5bE+EQDAMkLJGObBwpBrsMzKje1bExbsYOqo7G1ZHlgcVArqkxdMlNZ7Jx/bj1LOGsXblPt54fjmlJV6khInH9OUnN0+p9IgSQnDUcVlNasdpS2x8+mMsmxm65Quw6ZmZHPPULwGoyCti/kX3cXj5Fgy3E8vrp98Fx2OWe9m/YDXOpDiG//xcxtx+BQ53lUDtMXUcfc6ewt5ZSysFgjMhjrTRAxh0ddS6WZpWQAuDDs7QEd2565HTef/1VezYdpguQW+l44OxBNH451++orCgvJoB1XAIMronMWp8L0aMzmTC0X05uP8I8z7bTO6hEkaO7sFJpw2uszrXL357AosXZDNv9mbKy3z07pfKkOEZzJ21mcO59nWFnU7BWReOrFUQhDj9nBFs33Q4QkXmdDn4/b3TK3M5rV6Rw3P/+LaaoPzhuz0cPlTCvY+d2SkqdZXsOIAMRK7SZMCkeGeVQJ13/p84vGwzlj9QuYrY8daCyv3+I6WsefRNDn6zlhlf/KXy3gkhOPmNu9n10WK2vjgbs8LPoB9NZ+CVp0SPYta0CloYdAIGDO7K7++LKDsRldyDJezbWxThSWOZksOHSrn0xxOIj3exKzufJV9lI4EzzhvJyLE9YhpADUNw4vRBnDi9urfNJ+9Fz3Y59bShXHTV+Jj6P25yb46fNoBF87MJBNSqxhCCC68cVy2p3wdvrIpwQQ0ELHJ2F5G9Na9aosCOSo+Tx7F/waoINZEjwUOPqaoud9HmPeSt3Ga7ggjHLPdyaMl6cr/bSPcpIyu3C8OgzxlHESgpp3DjboRhRPXS0rQeWhhoIigr9eFwCOxCioQAb7mf2R+tZ/ZHG4Iuoip2YMToTH5z58kYDUzPbEbJhAkw+bh+MZ9HCMG1v5jCtDOGsWrZXhwOg6OO60f3HtX9/ffnRK8DkbOnsFMIg6E/PYt1j7+jAsgsdf+Fw8CVFM/Q688AoGTXQQy3M0Jg2GH5AhxctK6aMCjaupfPTvgNgfIKAiUVOJPiWPbH/3L24n+SMqhX81yYpt5o11JNBL37dkFEcWdL7hLHkaIKZn+4AZ+3eonMDWsPsOTrnQ36TNO0IorKhHA4RGWBnfrQr38a5106hrMvGhUhCAC6dovi+imge2bHCxSzw5OWzLnLnqbfecdiuJwYLid9zzuOc5c9g7uLUsmljsqqM8AshOF2EteteqT1V1c9TMXhIgIlymYQKKmg4nARX/3oz017MZpG0amFwZHt+zjw9Roq8qLVHO2cOF0OrvjJJNye6p4hbo+Da248mu8W7bQN7PJ5TRbOiR7EVRuGIYhPsHfHdDiNyuLxTcn5l4+JuEbDEKSlJzBsVPcm/7y2SlK/TKZ/8CDXVHzONRWfM/39B0jqW3X9ib0zyLr4JBzxtduCQmRddGLl67J9hylcvyvSRcyS5K/eTtmBjlEYqSPQKDWRECIdeBvoD+wELpNSRpTdEkKYVOVh3S2lPC+4fQDwFtAVWAH8WErZtHmGbajILWTehfeSt3KbWv5W+Bh24zkc8+TNOl9KkJNnDCG9WwIfv7OW3APF9O6XyoVXjGPoyO5sXHcwalBZXTP4inI/O7fnE5/got+AtGqGxhnnDGf2xxuqeRkZhqBXny706ZfaZNcWYsqJAyjIK+PDN9cgDIFpWvTrn8Ytd0ztFMbjmtR2zSe++Efie6Sz+b+fYPn8uNNTSB2ZRe6SDQiHoX43AqZ/9BDulCrBHSjzRv1NCYcRk+pJ0zKIxqQEEEI8BuRLKR8VQtwBpEkpb7dpVyKljHADEUK8A3wgpXxLCPEfYLWU8pm6Pnfy5Mly+fLlDe73J8fcTN6qbciwgcuR4GH8n37M2NuvbPB5Owub1x/krw/Oj/DWcbsdXPrjCcw4d4TtcV/M3MB7r63C4TSwLElKShy/uevkSqOuaVq8+PRSln69A6fLgWla9OrThdvuOYXUtNhqNjQEnzfAvr1FJCV7GpURtjMQyjfkSk5ACEHxjv0c+Go1rpRE+px5NM4aqwdpWbzd93LK90euABJ6d+Oy3W91SsHbWgghVkgpJ9vua6Qw2AycLKXcL4ToCSyUUg6zaRchDIR6AnKBHlLKgBDiWOB+KeXpdX1uY4RBwbodfDLll9WKeYfwdE3hqtwPG3TezoSUkqf/+g2rl+fgrVACwe1x0L1HMvc+dqZtSutVy/by7ye+jogtSEp28+RzF+MOO6Ywv4w9uwpJ65rQLCsCTcuyZ9ZSFlz+IGYwUR2GwBHn5pR376PPmce0dvc6FbUJg8Z6E2VKKfcHXx8A7LN2QZwQYjmqoOejUsqPUKqhQillaHq5F+gd7YOEEDcCNwL06xe7Z0lNSnYdxHA5MYkUBt68I0jL0qqiOhBCcNNvT2T5kt0s/HIrfr/JlBP7c+Ipg6oN6uF8+v4626jjgN9ixXd7qhXISU1PIDU9odn6r6kf3oJiCjfsIqF3N5L722e4rY2+Z0/hrK+fYu2jb1Kwbidpo/sz9s6r6DrBPluspnWoUxgIIeYCdk/A3eFvpJRSCBFtmZElpcwRQgwE5gsh1gL1stpKKZ8FngW1MqjPseGkjR5QWXyjJkn9e1QTBFJKijbuQlqS1JFZWkiEYRiCo4/PqjW3Tzh5UYLJfH6T/MNlTdk1DeAvKWfHOwsp2XmA9LED6Xf+8Riu+s39pGXx/e//w+b/fILhcWF5/XQ7ahinvP9AvQvWdJs4lGnv3FevYzQtS51Ph5Qyasy4EOKgEKJnmJoosmSXOkdO8H+2EGIhMAF4H0gVQjiDq4M+QE4DrqFeJGVl0vecKeyZtVQtW4M4EjxMeuSnle8PLVnPwisexpt/BAS4khI48eXb6X2a7QpLUwf9B3WlIL8swqnE7XLQb0Ca/UGaBpG/Zjuzp/0WyxcgUFqBMzmeuD/8l7OX/IuEHukxn2fdX99h87OfYlb4MIOupYeWbmTuuXdzzpJ/NVf3Na1EY6e6M4Frg6+vBT6u2UAIkSaE8ARfd0PlZd0glbFiAXBJbcc3Bye9eifDfnYOjgQPhstBfM+uHPfMrQy84hQAyvbn8cXpt1O65xCBUhUoU34gn/kX3kvR1r0t0cUOxwVXjMXlru7G6XQadO2exKhxPVupVx0PKSXzLroPX0FJZS6gQHE5pTmHWfyzJ+p1rrWPvR1hW5P+APlrsylYv7OpuqxpIzRWGDwKnCaE2AqcGnyPEGKyEOK5YJsRwHIhxGrU4P+olHJDcN/twG+FENtQNoTnG9mfmHB43Bzz1C+5uvATrsz9kMv3vs3gH8+o3L/5f7NsQ+9NX4CN/9QG5oaQNTBd5QXKSsUwVF3gycf14+5HZtjWDNA0jIJ1O6g4GOHdjQyY7P3sew4sWmtzVCTSsvDm2UdoGy4nJTsjEwFq2jeNMiBLKfOAiKQ3UsrlwA3B198CY6Icnw0c3Zg+NAbD6ajmEx2iaOMuW7uCDJgUbtjVEl3rkAwblcnDfz8Xn8/E4RBRaw9rGk6gtCJ6BTIp+fL02xlx83kc9fgvaj2PMAySsjIp2RVZe9ry+kkb3f7SdWtqR/8abeg6aahttKXhdtF10tBW6FHHwu12aEHQTHSdMDhqBnBQyeQ2PjOT3O831XmuiY/8FEdC9d+BI95Nn7OPISnL3nFw10eL+GDEdbzkOo23el/G+n980CTlTTXNj/5F2jD0+jNxxLlUVrYwHB4nI2+5sJV6pdHUjcPj5pi//xJnQvTUEWaFj+2vz63zXIOunM6x//o1cZlpGG4njngPQ64/i6mv3WXbPvut+Xx19SMUbd6DNC3K9+fxw13Ps+z2Zxt8PZqWo1FBZ61FYyOQYyFUASp/1TYAugzvxwnP/4FuemWgaQcc+GYN8y+8T3nD2TD0xrM5/j+/jelc0rLwFZXiSoqP6p4qpeSdfldQlnM4Yp8jzs3l+97Fk6qju1ub5gw667Ckjsji3KX/xpt/BCklcV3r51et0TQl5YcKOPDVGpyJcfSaPqHWwjD7F6xk/ZPv4UyMw3ukBGokFXQmxdda37gmwjDwpNWexdVfXEbFoUjDNYDhcVGwNpseJ46N+TM1LY8WBnXgSU+pu5FG04ysfPAV1vzfG6qcpACEYPoHD9Bz2oSItuuefJeVf3qRQMgltIajljMxjp6nTKDn9IlN2kdnQhzC4QCbRIWWL0BCz65N+nmapkfbDDSaNsyeT5ew7vG3sbx+/MVl+I+U4S8qZe5590SogLwFxfxw9wtVggCUMVmAOz2ZnqdO5Pj//Y5T3r+/yZPDGU4Hg687HUdc9RWLcDpIGzuQlMFRM81o2ghaGGg0bZh1f3u3MngsHCkl2WE1iAEOLFyN4bZZ7EvlcnrGl48z8IpTMByOyDZNwNF/vYme0yfiiHPjSknAmRhH6ogspn/4YLN8nqZp0WqiZiR/bTZ7Zi5BOAyyLjqBLkP7tnaXNO2Msn15ttvNMi+7Z36LKzmBrAtPwJUUjyPOhRWlnkR98xI1BGe8h9M++TNFW/ZQsHYHSf170HXiEJ2iup2ghUEzIKXku1v/zZbnPsPyBRCGYNWDrzD2rh8x/p6rW7t7mnZEj5PHU5y9HxmIHOT3z1/JoW/Xs/RXf+fUTx6hNCfXvliMQzDwylNaoLeKLkP76olPO0S7ljYDOV8uZ/7F90Us7x0JHs76+im6TdTuqZrYKN55gI/H/wx/cXlk6cgwnMnxGA4HvsKSiH3CYXD5/neJ75bajD3VtAdqcy3VNoNmYPOzn9rqea0KP1tf/LwVeqRpryT378E5S/5FnzOPwvBEBkJWYsrKzKI1MTyuymL0Gk00tJqoGfAfsc/dLy0L/xGdu19TP1JHZHHap/8HwGtp5+Eviny+pJRYNqokAGlauFNqLxZk+vwcXr4Fw2HQdfLQZjMya9ouemXQDGRdfBLOhLiI7c6keLIuOKEVelQ3Ukq2vTaXD0Zcx6sp5/DJMTezb+6K1u6WpgY9Tx5vvzqQkvRxgyKS1Amngx4nja01Xmbnh4t4q8clzDnrDr6Y8Qfe7nUp+xesbOKea9o6Whg0A4OvmUHSgB7VfK4dCR7Sxwyk77nHtmLPorPmkddZctOTFG3eQ6CknMPLNjP3/D+x88NFrd21docVMNn4zEw+GncD7w76Ed/d9m/KDkQWhAco2XOIZX/8L7On/46lv/kXR7bVXt9p0v/dgCspvlrVPWdCHAOvms70jx4kKSsTV3I8RpwbZ3I8yYN6cdIrd0Q9X8G6HXz940fwFZaoGIbicipyi5h73j2U5uQ27AZo2iXagNxM+EvK2fDPD8l+fS7C6WDIT85g+C/OrTWNQGvhLynnzcyLbT1REvt159Idb2j3wBiRUjL/4vvY9+XyyuAvw+XEnZbEBav/R3xmVaWx3GWb+Hz677F8fuV15nLgcLmYPvNhep0SGV0comjLHlY+8DIHFq7Gk57CyN9czNDrz0AYBtKy2DdnBUVb9pI6oh89T5lQa7nWxTf+la0vfo40q6esMDwuxt5xJRPuuzbKkZr2iM5N1Aq4kuIZd+dVjLvzqlrbWaaJEKJV6ysXrM3GcDkwyyP3le/Px1dUqpOMhVGy6yAVeUWkjuyPs0bE7eHvN7FvzopqUcCWP4CvsIS1j7/N0U/cVLl90fWPEyipuunSbxLwm3x9zf9x+e63oj4TXYb25eTX77HdJwyD3qcfRe/Tj4rpWoq27I0QBKBqFtS1StF0LLSaqJUo3LSbz0/9Pa/EncHLcWcw9/x7KN3bOstyT7cuUYOVMESt6ZA7E6V7c/lkyi/5YMR1fH7K73iz+4Ws/8f71drsm/uDrVeP5Quwe+aSyvflhwqiDrb+olKKNu9p2s5Hoftxo5SXUg0cCXFkHDOiRfqgaRtoYdAKlO3PY9Zxt7B/wSqkaQVLEn7HJ8fcjL/EZnrezHQZ0ocuw/tGGB8Nj4sBl52sEqR1cEr35pK3ahuBKO6Z0rKYffJt5K3Yglnhw3+kjEBJBT/c9Tw73/+6sp0rOd4+JQTgCvPoEYYRvQiNpMVWiiN/dQGOGi6rwjBwJcYx+JoZtRyp6WhoYdAKbPz3RwTKvdWCiKSp3E5jKTrSHEz/UBkfncnxOBPjcCbG0W3SUI79169bpT8tRdn+PGad+BveH3oNs6fexpvdL2Ldk+9GtNu/YBXlhwojVCqBMi+rHnq18n3/S+1TQzsT4xhx8/mV7+O6dSExq7tt27jMVFKG9mnI5dSbhF7dOHvxP+l+3CiEw0A4HfScPoFzvvu3bUlYTcelUTYDIUQ68DbQH9gJXCalLKjRZhrwZNim4cAVUsqPhBAvAVOBouC+66SUqxrTp/bAwW/W2tZYDpRWcHDxOob//NwW71NSv0wu3vIKB75aTfGOA6SPHUi3ycOa7fMCFT5yPv+eirwjlGTvZ8c7C/EVldJz2ngmPvSTFklnIKXk8+m/48i2fciAWaneWfmnl0jo1Y2Bl0+rbFu8fR/SitStA9XqBCf07MoJz/+BRdc/DgIsv4nhdtL3nGMZct3ple32zfuB0j2HIs4lHAYnv3FPixrs00b15+xv/k6gwocwRKdYCWoiaawB+Q5gnpTyUSHEHcH3t4c3kFIuAMZDpfDYBnwZ1uQPUsr3GtmPdkXywF4cWrw+YnAxPC5SBvVq0DnL9uex64NvML1++px5NKkjsmI6TloWuz9ZwraXv0SaJgOvms7ga2ZgOB0c2b6PtY+9xaFv15M8oAej/3B5kxQoObRkPXPOuhNpSQJlFdVm2zvf/5qcL5Zx7rJn6DKkeWfHBxetpXTv4Yi8P4GyClY9+Go1YZA6qn90g+7w6oJr4BWn0HPaeHa+/w3+4jJ6nTYpIgXJstufxaqInBCEPI9ag5rGcE3norHC4Hzg5ODrl4GF1BAGNbgEmC2l7NRhuCN/cxE73l2IWVbdlVM4DIb+9Kx6n2/z85/x3S3/BCGQpsUPf3qBntMm4C0soXTXQdInDGbCvddEzPSllCy84iH2zv6+Mn3G/vkr2fLcZ0x65Kd8Pv33mBU+ZMCkcP1O9s1fyTFP3sywn53T4GsPlHv58qw7baNoAbAkgZIKVt77Eie/ae8x01QUb9sXNd9P6e6D1d53P24UXYb0oWD9DixfoHK7I97DxAd/os6XvY81j71N7rfrSRrQg9G/vyyq8Cxct8N2u+FykvfDVp3oTdPiNCrOQAhRKKVMDb4WQEHofZT284G/SSk/Db5/CTgW8ALzgDuklDZpF0EIcSNwI0C/fv0m7dq1q8H9bgtsf3Me3/78SYSh1AHCYXDyW3+i92m2LsBRKc7ex4djbrDPVhlCCBzxbkb++iJ2vrOQkt2HSOjdjf4Xncim/35iK5Tie6ZTtjeynq0zMY4rD75vG2EdCzveWcjin/0Vf3Ht84G47qlceeD9Wts0ltzvN/H59N/Z5pFK6t+DwT8+jeRBveh/iYoo9xYUs/hnf2XPp0sRhsCTnszRT/2SAZdMJW/VNj476dZK4Qkq0PCYp37JsBvOjjj/230us01P7UyK57RZj+gSkZpmobY4gzqFgRBiLtDDZtfdwMvhg78QokBKmRblPD2BNUAvKaU/bNsBwA08C2yXUtZZCaM9BJ3FQqDCx6Fv12M4HXQ/dmSDcs6vfOBl1jzyBpY/UHdjIarNhIXTYZsauTZcKYmc8sEDtQZF2WEFPaY2PzeLnC+WIaO5sgZJGdKbize/Uq/PqC9SSj495pfkr9lebbaPUEXczXIfzqR4HB4XZ339VKXqzV9ajr+4nPjMtErd/qfH30Lukg0RnxFNeK7727usvPfF6lXJDEFSViaXbH21VeNONB2XRgWdSSlPreXEB4UQPaWU+4MDe6RFrIrLgA9DgiB47v3Bl14hxIvA7+vqT0fCGeeu96BaE19hSWyCACJUIvUVBOogC2d8/XTLFbmFzDrh15TtzydQWh7dpTKIM8HDiFsurH/f6okQghlfPsbinz3Bnk+WIhwCK+jqa5YrY3KgpJxAaQXzL7mfi9a/CIArMR5XYnzleayASe53G+0/w2FwaOnGiO951K0Xc2RbDltf/ByHx4W0JAk9uzLj80e1INC0Co21GcwErgUeDf7/uJa2VwJ3hm8IEyQCuABY18j+dDp6n34UW56fXS2StTlxJMTR7ejh9Tpm8c//RvHOg8gYhVbm1LEMv+m8hnSv3nhSkzjl3fvxl5RTmpPLx+N+hrRqSCspKdl1kKKte22N2sIQGA4HlmVzfVLaCk9hGBz39K2Mv/ca8lZsIT4zja6Thuq0H5pWo7FTkEeB04QQW4FTg+8RQkwWQjwXaiSE6A/0Bb6qcfzrQoi1wFqgG/BwI/vT6eg9YzJdJw7BEd/AKGGj9sHHkajUG44ED67kBKZ/+GC90hsHKnzs/ey7mAUBgOFytfjs2JUUT1x6CmB/PwyHEVXgCsOg3wXHI5yR96Uu4ZnQI52+Z0+h2+RhWhBoWpVGrQyklHnAdJvty4Ebwt7vBHrbtGu5WnwdFGEYnP7FX9jwzw+DZTb99Jg2gT0zv8Xy+QmU+1T2VAGm148M040bbic9T5lA4cbdlO6q7j0jnA56n34Ug398GrnLNpHcvwcDr5qOJy25Xv2zfP7ImXYd7J21lIVXPszJb7asv70zOR5HvBvLF+nyKRwO0kYPiHrslH/ewuHlm6nILSJQUo4j3oNwGEz/4AFdG0DTLtBZSzsoptfH7o+/pWTnAdLGDiRjygjmXXAvh5dvxggajtPGDOS0WY9QcbhIecKUewmU+3DGu4nv2ZWzvn6K+O62/gAR+EvK2f3xYrwFJfQ8eVy1gfOD0ddTtKF+3l/OxDhOfutP9D17Sr2OawzzLryXvbO/jxAGhsfJCc//gUFXRTWfAapAzK4PF3F42SaS+vdg0FXTa60joNG0NI3yJmqLaGHQcArW7aBo025ShvQhfdygyu2m18fumUsozt5H2piB9D59cswz2n3zVzLvgj+BUJk3EYK+50xh6ht3YzgcHPhqNXPOvpNAua/SiO1I8OCIc+MrKInq69//0qlMe/vexl90DBRt3cvH435mm2QuaUBPLt3+Wov0Q6NpTnQKa00laaMH2Ko7HB43A6Lk1akNf0k58y74U4Q+fc+spWx6+mNG3nIRPaaO46xF/2D1w6+R98NWkgf2ZOwdV5I+fjCfHn8LxVvts3dK097bqWx/HjmfL8NwOehz9pR6q67syF+1HcPttBUGdmkjNJqOhhYGmkaxe+a3tjZXs8zLxn99xMhbLgKg6/jBnPLe/RHtjnrs53x99SMRgV/OxDgGXhlhjmL1I6+z+qFXlbFWCKT5N47/3+/qVOHURWLfjKi2jbhuXRp1bo2mPaCFQSfB9PnZ/uoctr2s0kINvnYGg358WoOSkuXMWc7K+1/myJa9uJLjbXPsAPiipZwIo9+5x9Lj5HEcWLi6UiA4E+PIPGEM/S44vlrb/QtXseaRNzC9fghL9Lf4Z38j45iRDc7rBJBxzAgSeneleNu+armSHAkeRv/+sgafV6NpL2hh0AmwAiZfzPgjh5dvwSxTA+7hH7ay7dU5nDH3CQwbl8hobH9jLotv/FtlCgtv3hHbdsIw6BlDQJ0wDKZ/9BC7PljEtleUoBp09an0v+SkCJvFxn99RKAsMnWEDJhsfelzJj10fczXEdEPITj9y8eZe85dFGfvRzgdWF4/Q68/k1G3Xtzg82o07QUtDDoBuz74RhVlCRtIzbIK8lZsYdeHi2K2FVimyXe3/jsil1FNhGHgTIqrTOBWF4bDwYBLp9bZj/IoReUtfyDqvvqQ1Lc7F6x+jvy12ZQfyKfr+MHEZaQ2+rwaTXtAx713Ana8s9A2GVugtIIdby+I+TwlOw9WpmmoieF2kpjVnbjuqfS/bCrnLf8PKYMjQksaRe8zjlIxEzVwJsXT69RJTfY56WMG0vu0yVoQaDoVemXQCbAbQGPZVxN3l0SsaPmMhODcpf8mPjO9vt2LmRE3n8+mp2dScbioMq+S4XGRlJVJ1oUnNNvnajSdAb0y6AQMue50nImRKaediXEM+ckZMZ8nrlsXMo8fHZF2QTgdZEwZ2ayCAMCTnsJ5K/7D4Gtn4OmaQlxmGiN+dQFnL/6Hrs6l0TQSHXTWCZBSsuTmp9j26pxKNY8j3s3gH5/GsU/fWq+UD2UH8pk99VbKD+Rj+k0Ml4P47mmc9dWTJPTq1lyXoNFomgAdgawB4NDSDex8V+UK7H/pVLpPGdmg80jLYt+8HyjavJcuQ3rT67RJOu2yRtMO0MJAo9FoNLUKAz2d02g0Go0WBhqNRqPRwkCj0Wg0aGGg0Wg0GrQw0Gg0Gg3t1JtICJEL1K90VvPSDTjc2p2og/bQR2gf/dR9bBp0H5uOWPuZJaXMsNvRLoVBW0MIsTyau1ZboT30EdpHP3Ufmwbdx6ajKfqp1UQajUaj0cJAo9FoNFoYNBXPtnYHYqA99BHaRz91H5sG3cemo9H91DYDjUaj0eiVgUaj0Wi0MNBoNBoNWhjEjBAiXQgxRwixNfg/zabNNCHEqrC/CiHEBcF9LwkhdoTtG98afQy2M8P6MTNs+wAhxHdCiG1CiLeFELGXQWvCPgohxgshlggh1gsh1gghLg/b12z3UQhxhhBic/D677DZ7wnel23B+9Q/bN+dwe2bhRCnN1WfGtjP3wohNgTv3TwhRFbYPtvvvhX6eJ0QIjesLzeE7bs2+HxsFUJc24p9fDKsf1uEEIVh+1rqPr4ghDgkhFgXZb8QQvwjeA1rhBATw/bV7z5KKfVfDH/AY8Adwdd3AH+po306kA8kBN+/BFzSFvoIlETZ/g5wRfD1f4CbWqOPwFBgSPB1L2A/kNqc9xFwANuBgYAbWA2MrNHmZuA/wddXAG8HX48MtvcAA4LncTTTdxxLP6eFPXc3hfpZ23ffCn28DviXzbHpQHbwf1rwdVpr9LFG+1uAF1ryPgY/5yRgIrAuyv6zgNmAAKYA3zX0PuqVQeycD7wcfP0ycEEd7S8BZkspy5qzUzWobx8rEarc2SnAew05vh7U2Ucp5RYp5dbg633AIcA2arIJORrYJqXMllL6gLeCfQ0nvO/vAdOD9+184C0ppVdKuQPYFjxfq/RTSrkg7LlbCvRppr40uI+1cDowR0qZL6UsAOYAsddmbb4+Xgm82Qz9qBUp5deoSWU0zgdekYqlQKoQoicNuI9aGMROppRyf/D1ASCzjvZXEPnw/Dm4lHtSCOFp8h7G3sc4IcRyIcTSkBoL6AoUSikDwfd7gd6t2EcAhBBHo2Zu28M2N8d97A3sCXtvd/2VbYL3qQh132I5tqmo72f9FDVzDGH33Tc1sfbx4uD3+J4Qom89j22pPhJUsw0A5odtbon7GAvRrqPe99HZ5F1rxwgh5gI9bHbdHf5GSimFEFF9coOSeQzwRdjmO1GDnxvlE3w78GAr9TFLSpkjhBgIzBdCrEUNbE1CE9/HV4FrpZRWcHOT3MfOgBDiamAyMDVsc8R3L6Xcbn+GZuUT4E0ppVcI8XPUiuuUVuhHLFwBvCelNMO2tZX72GRoYRCGlPLUaPuEEAeFED2llPuDg9ShWk51GfChlNIfdu7QbNgrhHgR+H1r9VFKmRP8ny2EWAhMAN5HLTGdwVlvHyCntfoohEgBZgF3B5e/oXM3yX20IQfoG/be7vpDbfYKIZxAFyAvxmObipg+SwhxKkr4TpVSekPbo3z3TT2I1dlHKWVe2NvnULak0LEn1zh2YRP3L/Q5sX5nVwC/DN/QQvcxFqJdR73vo1YTxc5MIGSRvxb4uJa2EfrF4MAX0s1fANh6BzSSOvsohEgLqVaEEN2A44ENUlmdFqBsHVGPb6E+uoEPUbrQ92rsa677uAwYIpRHlRs1ANT0Egnv+yXA/OB9mwlcIZS30QBgCPB9E/Wr3v0UQkwA/gucJ6U8FLbd9rtvpT72DHt7HrAx+PoLYEawr2nADKqvsFusj8F+DkcZYJeEbWup+xgLM4Frgl5FU4Ci4ISp/vexJSziHeEPpRueB2wF5gLpwe2TgefC2vVHSWWjxvHzgbWowes1IKk1+ggcF+zH6uD/n4YdPxA1iG0D3gU8rdTHqwE/sCrsb3xz30eUZ8YW1Azv7uC2B1GDKkBc8L5sC96ngWHH3h08bjNwZjM/i3X1cy5wMOzezazru2+FPv4fsD7YlwXA8LBjrw/e423AT1qrj8H39wOP1jiuJe/jmyhvOj9K7/9T4BfAL4L7BfDv4DWsBSY39D7qdBQajUaj0WoijUaj0WhhoNFoNBq0MNBoNBoNWhhoNBqNBi0MNBqNRoMWBhqNRqNBCwONRqPRAP8PMwGE2scCHIMAAAAASUVORK5CYII=\n",
       "text/plain": [
        "<Figure size 432x288 with 1 Axes>"
       ]
@@ -1027,2705 +1027,740 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "# Part II : Toy Neural Network with Keras"
+    "# Part 2 : Keras for MNIST\n",
+    "\n",
+    "Keras is a deep learning API for Python, built on top of TensorFlow, that provides a convenient\n",
+    "way to define and train any kind of deep learning model. Keras was initially\n",
+    "developed for research, with the aim of enabling fast deep learning experimentation.\n",
+    "\n",
+    "Through TensorFlow, Keras can run on top of different types of hardware - GPU, TPU, or plain CPU — and can be seamlessly scaled to thousands of machines. Keras is also a popular framework on Kaggle, the machine learning competition website, where most deep learning competitions have been won using Keras."
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "_Keras_ is a deep-learning framework for Python that provides a convenient way to define and train almost any kind of deep-learning model. Keras was initially developed for researchers, with the aim of enabling fast experimentation. \n",
-    "\n",
-    "Keras has the following features:\n",
-    "\n",
-    "- It allows the same code to run seamlessly on CPU or GPU\n",
-    "- It has a user-friendly API that makes it easy to quickly prototype deep-learning models\n",
-    "- It has built-in support for convolutional networks (for computer vision), recurrent networks (for sequence processing), and any combination of both.\n",
-    "- It supports arbitrary network architectures: multi-input oder multi-output models, layer sharing, model sharing, and so on. This means Keras is appropriate for building essentially any deep-learning model, from a generative adversarial network to a neural Turing machine.\n",
-    "\n",
-    "Keras is also a popular framework on Kaggle, the machine-learning competition website, where almost every recent deep-learning competition has been won using Keras models.\n",
-    "\n",
-    "Keras is a model-level library, providing high-level building blocks for developing deep-learning models. It does not handle low-level operations such as tensor manipulations and \n",
-    "differentiation. Via TensorFlow (or Theano, or CNTK), Keras is able to run seamlessly on both CPUs and GPUs. When running on CPU, TensorFlow is itself wrapping a low-level library for tensor operations called Eigen. On GPU, TensorFlow wraps a library of well-optimized deep-learning operations called the NVIDIA CUDA Deep Neural Network library (cuDNN).\n",
-    "\n",
-    "The typical Keras workflow looks as follows:\n",
+    "Let’s look at a concrete example of a neural network that uses the Python library\n",
+    "Keras to learn to classify handwritten digits. \n",
     "\n",
-    "1. Define your training data : input tensors and target tensors\n",
-    "2. Define a network of layers (or _model_ ) that maps your inputs to your targets.\n",
-    "3. Configure the learning process by choosing a loss function, an optimizer, and some metrics to monitor.\n",
-    "4. Iterate on your training data by calling the `fit()` method of your model."
+    "The problem we’re trying to solve here is to classify grayscale images of handwritten\n",
+    "digits (28x28 pixels) into their 10 categories (0 through 9). We’ll use the MNIST\n",
+    "dataset, a classic in the machine learning community, which has been around almost\n",
+    "as long as the field itself and has been intensively studied. It’s a set of 60'000 training images, plus 10'000 test images, assembled by the National Institute of Standards and Technology (the NIST in MNIST) in the 1980s. You can think of \n",
+    "“solving” MNIST as the “Hello World” of deep learning—it’s what you do to verify \n",
+    "that your algorithms are working as expected. As you become a machine learning practitioner, you’ll see MNIST come up over and over again in scientific papers, blog posts, and so on."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Loading the MNIST dataset in Keras\n",
+    "The MNIST dataset comes preloaded in Keras, in the form of a set of four NumPy\n",
+    "arrays."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 24,
+   "execution_count": 1,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "2.7.1\n"
+      "Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz\n",
+      "11493376/11490434 [==============================] - 0s 0us/step\n",
+      "11501568/11490434 [==============================] - 0s 0us/step\n"
      ]
     }
    ],
    "source": [
-    "# Import TensorFlow \n",
-    "import tensorflow as tf\n",
-    "\n",
-    "# Helper libraries\n",
-    "import math\n",
-    "import numpy as np\n",
-    "import matplotlib.pyplot as plt\n",
-    "\n",
-    "\n",
-    "print(tf.__version__)\n",
-    "\n"
+    "from tensorflow.keras.datasets import mnist\n",
+    "(X_train, y_train), (X_test, y_test) = mnist.load_data()"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## 1. Define Your Network\n",
-    "\n",
-    "The `tf.keras.models.Sequential` class is a wrapper for the neural network model that treats \n",
-    "the network as a sequence of layers. It implements the Keras model interface with common \n",
-    "methods like `compile()`, `fit()`, and `evaluate()` that are used to train and \n",
-    "run the model. We'll cover these functions soon, but first let's start looking at the layers of the model.\n",
-    "\n",
-    "#### Layers\n",
-    "\n",
-    "The Keras Layer class provides a common interface for a variety of standard neural network layers. You can add a layer to a model using the \n",
-    "model's `add()` method. For example, a simple model with a single hidden layer might look like this for the spiral dataset:"
+    "`X_train` and `y_train` form the training set, the data that the model will\n",
+    "learn from. The model will then be tested on the test set, `X_test` and `y_test`. The images are encoded as NumPy arrays, and the labels are an array of digits, ranging from 0 to 9. The images and labels have a one-to-one correspondence.\n",
+    "Let’s look at the training data:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 25,
+   "execution_count": 2,
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "(60000, 28, 28)"
+      ]
+     },
+     "execution_count": 2,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
    "source": [
-    "model = tf.keras.Sequential()\n",
-    "# From Input to first hidden layer\n",
-    "model.add(tf.keras.layers.Dense(100, activation= tf.nn.relu, \n",
-    "                                input_shape=(2,)))\n",
-    "# From first hidden layer to output layer\n",
-    "model.add(tf.keras.layers.Dense(3, activation=tf.nn.softmax))"
+    "X_train.shape"
    ]
   },
   {
-   "cell_type": "markdown",
+   "cell_type": "code",
+   "execution_count": 3,
    "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "60000"
+      ]
+     },
+     "execution_count": 3,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
    "source": [
-    "## 2. Compile Network\n",
-    "\n",
-    "\n",
-    "Once we have our model built, we need to compile it before it can be run. Compiling the Keras \n",
-    "model calls the backend (tensorflow, theano, etc.) and binds the optimizer, loss function, \n",
-    "and other parameters required before the model can be run on any input data. We'll specify the \n",
-    "loss function to be `categorical_crossentropy`, \n",
-    "and specify `adam` as the optimizer (which is a reasonable default when speed is a priority). And finally, \n",
-    "we can specify what metrics we want to evaluate the model with. Here we'll use `accuracy`.\n"
+    "len(y_train)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 26,
+   "execution_count": 4,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "(300, 2)"
+       "array([5, 0, 4, ..., 5, 6, 8], dtype=uint8)"
       ]
      },
-     "execution_count": 26,
+     "execution_count": 4,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "model.compile(optimizer='adam',\n",
-    "              loss='categorical_crossentropy',\n",
-    "              metrics=['accuracy'])\n",
-    "X.shape"
+    "y_train"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "We can see the resulting model architecture with the following command:"
+    "Let us display an the fourth digit:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 27,
+   "execution_count": 5,
    "metadata": {},
    "outputs": [
     {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Model: \"sequential_2\"\n",
-      "_________________________________________________________________\n",
-      " Layer (type)                Output Shape              Param #   \n",
-      "=================================================================\n",
-      " dense_4 (Dense)             (None, 100)               300       \n",
-      "                                                                 \n",
-      " dense_5 (Dense)             (None, 3)                 303       \n",
-      "                                                                 \n",
-      "=================================================================\n",
-      "Total params: 603\n",
-      "Trainable params: 603\n",
-      "Non-trainable params: 0\n",
-      "_________________________________________________________________\n"
-     ]
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAANpElEQVR4nO3db6xU9Z3H8c9HtxpDS4TlSpCSvbXyhKwpbSaySbGyaRbUaLAmEokSTIj0ASY2qXENakqMGt0sbWpcmtBVSrUrmrQKD0yRJY3YJ4TRsAqarmggFdF70ZhSo7LY7z64h+aKd35zmf/l+34lNzNzvnPmfDP64cyc35nzc0QIwJnvrH43AKA3CDuQBGEHkiDsQBKEHUji73q5sRkzZsTw8HAvNwmkcvDgQR09etQT1doKu+0rJP1U0tmS/jMiHiw9f3h4WPV6vZ1NAiio1WoNay1/jLd9tqT/kHSlpHmSltue1+rrAeiudr6zXyrpQES8FRHHJW2RtLQzbQHotHbCPlvSH8c9frta9jm2V9uu266Pjo62sTkA7ej60fiI2BgRtYioDQ0NdXtzABpoJ+yHJc0Z9/ir1TIAA6idsO+RNNf212yfI+kGSds60xaATmt56C0iTti+VdJ2jQ29PRYR+zvWGYCOamucPSKek/Rch3oB0EWcLgskQdiBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kARhB5Ig7EAShB1Ioq0pm20flHRM0meSTkRErRNNAei8tsJe+eeIONqB1wHQRXyMB5JoN+wh6XnbL9lePdETbK+2XbddHx0dbXNzAFrVbtgXRsS3JF0paY3t75z6hIjYGBG1iKgNDQ21uTkArWor7BFxuLodkfSMpEs70RSAzms57Lan2P7KyfuSFkva16nGAHRWO0fjZ0p6xvbJ1/mviPhtR7oC0HEthz0i3pL0jQ72AqCLGHoDkiDsQBKEHUiCsANJEHYgiU78EAYDbPfu3cX6448/Xqzv2rWrWN+3r/VTK9avX1+sX3jhhcX6iy++WKyvWLGiYW3BggXFdc9E7NmBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnG2c8ATz31VMPabbfdVly32aXCIqJYX7RoUbF+9Gjja5HefvvtxXWbadZbadtbtmxpa9t/i9izA0kQdiAJwg4kQdiBJAg7kARhB5Ig7EASjLMPgBMnThTre/bsKdZvueWWhrWPPvqouO7ll19erN9zzz3F+sKFC4v1Tz/9tGFt2bJlxXW3b99erDdTqzGp8Hjs2YEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcbZB8ATTzxRrK9atarl1168eHGxXvotvCRNnTq15W03e/12x9HnzJlTrK9cubKt1z/TNN2z237M9ojtfeOWTbe9w/Yb1e207rYJoF2T+Rj/C0lXnLLsTkk7I2KupJ3VYwADrGnYI2KXpA9OWbxU0ubq/mZJ13a2LQCd1uoBupkRcaS6/66kmY2eaHu17brterPrnQHonraPxsfYVf8aXvkvIjZGRC0iakNDQ+1uDkCLWg37e7ZnSVJ1O9K5lgB0Q6th3ybp5LjGSklbO9MOgG5pOs5u+0lJiyTNsP22pB9JelDS07ZXSTokqfzD5OTuvvvuYv2BBx4o1m0X62vWrGlYu++++4rrtjuO3sz999/ftdd++OGHi3W+Nn5e07BHxPIGpe92uBcAXcTpskAShB1IgrADSRB2IAnCDiTBT1w74N577y3Wmw2tnXvuucX6kiVLivWHHnqoYe28884rrtvMJ598Uqw///zzxfqhQ4ca1ppNudzsMtZLly4t1vF57NmBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnG2Sfpww8/bFjbsGFDcd1mP1FtNo7+7LPPFuvtOHDgQLF+4403Fuv1er3lbV9//fXF+h133NHya+OL2LMDSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKMs0/S8ePHG9bandaq2SWRR0bKc3Bs2rSpYW3r1vIl/ffv31+sHzt2rFhvdg7BWWc13p/cdNNNxXWnTJlSrOP0sGcHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQYZ5+kc845p2HtggsuKK7bbJx8eHi4WG82lt2O2bNnF+vNpnR+5513ivUZM2Y0rF1zzTXFddFZTffsth+zPWJ737hl62wftr23+ruqu20CaNdkPsb/QtIVEyz/SUTMr/6e62xbADqtadgjYpekD3rQC4AuaucA3a22X6k+5k9r9CTbq23XbdfbPYccQOtaDfvPJH1d0nxJRyStb/TEiNgYEbWIqA0NDbW4OQDtainsEfFeRHwWEX+R9HNJl3a2LQCd1lLYbc8a9/B7kvY1ei6AwdB0nN32k5IWSZph+21JP5K0yPZ8SSHpoKTvd6/FwXD++ec3rDW7rvvVV19drL///vvF+sUXX1ysl+Ypv/nmm4vrTp8+vVi/4YYbivVm4+zN1kfvNA17RCyfYPGjXegFQBdxuiyQBGEHkiDsQBKEHUiCsANJ8BPXDliwYEGxPsinCe/atatYf+GFF4r1Zj+/veiii067J3QHe3YgCcIOJEHYgSQIO5AEYQeSIOxAEoQdSIJx9uQ+/vjjYr3ZOHqzOj9xHRzs2YEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcbZk1uyZEm/W0CPsGcHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQYZ09u+/bt/W4BPdJ0z257ju3f2X7N9n7bt1XLp9veYfuN6nZa99sF0KrJfIw/IemHETFP0j9JWmN7nqQ7Je2MiLmSdlaPAQyopmGPiCMR8XJ1/5ik1yXNlrRU0ubqaZslXdulHgF0wGkdoLM9LOmbknZLmhkRR6rSu5JmNlhnte267fogz3kGnOkmHXbbX5b0a0k/iIg/ja9FREiKidaLiI0RUYuI2tDQUFvNAmjdpMJu+0saC/qvIuI31eL3bM+q6rMkjXSnRQCd0HTozWPXCn5U0usR8eNxpW2SVkp6sLrd2pUO0VVvvvlmv1tAj0xmnP3bklZIetX23mrZWo2F/GnbqyQdkrSsKx0C6IimYY+I30tqNBPAdzvbDoBu4XRZIAnCDiRB2IEkCDuQBGEHkuAnrslddtllxfrYyZE4E7BnB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkGGdP7pJLLinW586dW6w3+z18qc6Vi3qLPTuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJME4O4rWrl1brK9atarl9R955JHiuvPmzSvWcXrYswNJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEpOZn32OpF9KmikpJG2MiJ/aXifpFkmj1VPXRsRz3WoU/XHdddcV61u2bCnWd+zY0bC2bt264rqbNm0q1qdMmVKs4/Mmc1LNCUk/jIiXbX9F0ku2T/4X/ElE/Hv32gPQKZOZn/2IpCPV/WO2X5c0u9uNAeis0/rObntY0jcl7a4W3Wr7FduP2Z7WYJ3Vtuu266OjoxM9BUAPTDrstr8s6deSfhARf5L0M0lflzRfY3v+9ROtFxEbI6IWETWuOQb0z6TCbvtLGgv6ryLiN5IUEe9FxGcR8RdJP5d0affaBNCupmG3bUmPSno9In48bvmscU/7nqR9nW8PQKdM5mj8tyWtkPSq7b3VsrWSltuer7HhuIOSvt+F/tBnU6dOLdaffvrpYv2uu+5qWNuwYUNx3WZDc/wE9vRM5mj87yV5ghJj6sDfEM6gA5Ig7EAShB1IgrADSRB2IAnCDiThiOjZxmq1WtTr9Z5tD8imVqupXq9PNFTOnh3IgrADSRB2IAnCDiRB2IEkCDuQBGEHkujpOLvtUUmHxi2aIelozxo4PYPa26D2JdFbqzrZ2z9ExITXf+tp2L+wcbseEbW+NVAwqL0Nal8SvbWqV73xMR5IgrADSfQ77Bv7vP2SQe1tUPuS6K1VPemtr9/ZAfROv/fsAHqEsANJ9CXstq+w/QfbB2zf2Y8eGrF90Partvfa7uuP76s59EZs7xu3bLrtHbbfqG4nnGOvT72ts324eu/22r6qT73Nsf0726/Z3m/7tmp5X9+7Ql89ed96/p3d9tmS/lfSv0h6W9IeScsj4rWeNtKA7YOSahHR9xMwbH9H0p8l/TIi/rFa9m+SPoiIB6t/KKdFxL8OSG/rJP2539N4V7MVzRo/zbikayXdrD6+d4W+lqkH71s/9uyXSjoQEW9FxHFJWyQt7UMfAy8idkn64JTFSyVtru5v1tj/LD3XoLeBEBFHIuLl6v4xSSenGe/re1foqyf6EfbZkv447vHbGqz53kPS87Zfsr26381MYGZEHKnuvytpZj+bmUDTabx76ZRpxgfmvWtl+vN2cYDuixZGxLckXSlpTfVxdSDF2HewQRo7ndQ03r0ywTTjf9XP967V6c/b1Y+wH5Y0Z9zjr1bLBkJEHK5uRyQ9o8Gbivq9kzPoVrcjfe7nrwZpGu+JphnXALx3/Zz+vB9h3yNpru2v2T5H0g2StvWhjy+wPaU6cCLbUyQt1uBNRb1N0srq/kpJW/vYy+cMyjTejaYZV5/fu75Pfx4RPf+TdJXGjsi/KemufvTQoK+LJP1P9be/371JelJjH+v+T2PHNlZJ+ntJOyW9Iem/JU0foN4el/SqpFc0FqxZfeptocY+or8iaW/1d1W/37tCXz153zhdFkiCA3RAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kMT/A38cJNEbCe0NAAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
     }
    ],
    "source": [
-    "model.summary()"
+    "import matplotlib.pyplot as plt\n",
+    "digit = X_train[4]\n",
+    "plt.imshow(digit, cmap=plt.cm.binary)\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "9"
+      ]
+     },
+     "execution_count": 6,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "y_train[4]"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## 3. Fit Network\n",
-    "\n",
-    "The model is trained with the `fit()` method, through the following command that specifies the \n",
-    "number of training epochs and the message level (how much information we want displayed on the screen \n",
-    "during training).\n",
-    "\n",
-    "Before starting, we need to one-hot-encode the labels.\n"
+    "And the test data:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 28,
+   "execution_count": 7,
    "metadata": {},
    "outputs": [
     {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "[[0 0 1]\n",
-      " [0 1 0]\n",
-      " [1 0 0]]\n",
-      "[[1 0 0]\n",
-      " [1 0 0]\n",
-      " [1 0 0]]\n"
-     ]
+     "data": {
+      "text/plain": [
+       "(10000, 28, 28)"
+      ]
+     },
+     "execution_count": 7,
+     "metadata": {},
+     "output_type": "execute_result"
     }
    ],
    "source": [
-    "def convertToOneHot(vector, num_classes=None):\n",
-    "    result = np.zeros((len(vector), num_classes), dtype='uint8')\n",
-    "    result[np.arange(len(vector)), vector] = 1\n",
-    "    return result\n",
-    "print(convertToOneHot([2,1,0], 3))\n",
-    "\n",
-    "# One-hot-encoded labels of spiral datset\n",
-    "y_cat = convertToOneHot(y,3)\n",
-    "print(y_cat[:3])"
+    "X_test.shape"
    ]
   },
   {
-   "cell_type": "markdown",
+   "cell_type": "code",
+   "execution_count": 8,
    "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "10000"
+      ]
+     },
+     "execution_count": 8,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
    "source": [
-    "or alternatively"
+    "len(y_test)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 29,
+   "execution_count": 9,
    "metadata": {},
    "outputs": [
     {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "[[1. 0. 0.]\n",
-      " [1. 0. 0.]\n",
-      " [1. 0. 0.]]\n"
-     ]
+     "data": {
+      "text/plain": [
+       "array([7, 2, 1, ..., 4, 5, 6], dtype=uint8)"
+      ]
+     },
+     "execution_count": 9,
+     "metadata": {},
+     "output_type": "execute_result"
     }
    ],
    "source": [
-    "from tensorflow.keras import utils\n",
-    "y_cat = utils.to_categorical(y, 3)\n",
-    "print(y_cat[:3])"
+    "y_test"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Before training, we’ll preprocess the data by reshaping it into the shape the model\n",
+    "expects and scaling it so that all values are in the $[0, 1]$ interval. Previously, our training images were stored in an array of shape $(60000, 28, 28)$ of type `uint8` with values in the $[0, 255]$ interval. We’ll transform it into a `float32` array of shape $(60000, 28 * 28)$ with values between $0$ and $1$."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 30,
+   "execution_count": 10,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Epoch 1/1000\n",
-      "3/3 [==============================] - 1s 3ms/step - loss: 1.0828 - accuracy: 0.3133\n",
-      "Epoch 2/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 1.0702 - accuracy: 0.3733\n",
-      "Epoch 3/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 1.0585 - accuracy: 0.4300\n",
-      "Epoch 4/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 1.0472 - accuracy: 0.5100\n",
-      "Epoch 5/1000\n",
-      "3/3 [==============================] - 0s 16ms/step - loss: 1.0359 - accuracy: 0.5367\n",
-      "Epoch 6/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 1.0246 - accuracy: 0.5500\n",
-      "Epoch 7/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 1.0135 - accuracy: 0.5533\n",
-      "Epoch 8/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 1.0028 - accuracy: 0.5600\n",
-      "Epoch 9/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.9918 - accuracy: 0.5500\n",
-      "Epoch 10/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.9819 - accuracy: 0.5500\n",
-      "Epoch 11/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.9713 - accuracy: 0.5533\n",
-      "Epoch 12/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.9609 - accuracy: 0.5533\n",
-      "Epoch 13/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.9512 - accuracy: 0.5533\n",
-      "Epoch 14/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.9410 - accuracy: 0.5533\n",
-      "Epoch 15/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.9311 - accuracy: 0.5533\n",
-      "Epoch 16/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.9219 - accuracy: 0.5500\n",
-      "Epoch 17/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.9123 - accuracy: 0.5533\n",
-      "Epoch 18/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.9033 - accuracy: 0.5500\n",
-      "Epoch 19/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8944 - accuracy: 0.5533\n",
-      "Epoch 20/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8854 - accuracy: 0.5533\n",
-      "Epoch 21/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8767 - accuracy: 0.5533\n",
-      "Epoch 22/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8686 - accuracy: 0.5533\n",
-      "Epoch 23/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8600 - accuracy: 0.5533\n",
-      "Epoch 24/1000\n",
-      "3/3 [==============================] - 0s 7ms/step - loss: 0.8524 - accuracy: 0.5533\n",
-      "Epoch 25/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.8446 - accuracy: 0.5533\n",
-      "Epoch 26/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.8372 - accuracy: 0.5533\n",
-      "Epoch 27/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8299 - accuracy: 0.5500\n",
-      "Epoch 28/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8230 - accuracy: 0.5500\n",
-      "Epoch 29/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8165 - accuracy: 0.5500\n",
-      "Epoch 30/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.8102 - accuracy: 0.5500\n",
-      "Epoch 31/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.8041 - accuracy: 0.5533\n",
-      "Epoch 32/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.7979 - accuracy: 0.5533\n",
-      "Epoch 33/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7924 - accuracy: 0.5533\n",
-      "Epoch 34/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7870 - accuracy: 0.5533\n",
-      "Epoch 35/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7817 - accuracy: 0.5533\n",
-      "Epoch 36/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7769 - accuracy: 0.5500\n",
-      "Epoch 37/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7721 - accuracy: 0.5500\n",
-      "Epoch 38/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7673 - accuracy: 0.5533\n",
-      "Epoch 39/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7627 - accuracy: 0.5500\n",
-      "Epoch 40/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.7585 - accuracy: 0.5500\n",
-      "Epoch 41/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7545 - accuracy: 0.5533\n",
-      "Epoch 42/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7504 - accuracy: 0.5567\n",
-      "Epoch 43/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7466 - accuracy: 0.5567\n",
-      "Epoch 44/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7430 - accuracy: 0.5600\n",
-      "Epoch 45/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7392 - accuracy: 0.5600\n",
-      "Epoch 46/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7359 - accuracy: 0.5600\n",
-      "Epoch 47/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.7324 - accuracy: 0.5600\n",
-      "Epoch 48/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.7291 - accuracy: 0.5600\n",
-      "Epoch 49/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.7259 - accuracy: 0.5633\n",
-      "Epoch 50/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.7229 - accuracy: 0.5633\n",
-      "Epoch 51/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.7198 - accuracy: 0.5633\n",
-      "Epoch 52/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.7170 - accuracy: 0.5667\n",
-      "Epoch 53/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7141 - accuracy: 0.5700\n",
-      "Epoch 54/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7113 - accuracy: 0.5733\n",
-      "Epoch 55/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.7084 - accuracy: 0.5733\n",
-      "Epoch 56/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.7059 - accuracy: 0.5733\n",
-      "Epoch 57/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7032 - accuracy: 0.5733\n",
-      "Epoch 58/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.7005 - accuracy: 0.5733\n",
-      "Epoch 59/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6979 - accuracy: 0.5700\n",
-      "Epoch 60/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6956 - accuracy: 0.5700\n",
-      "Epoch 61/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6930 - accuracy: 0.5733\n",
-      "Epoch 62/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6904 - accuracy: 0.5733\n",
-      "Epoch 63/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6880 - accuracy: 0.5733\n",
-      "Epoch 64/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6856 - accuracy: 0.5733\n",
-      "Epoch 65/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6834 - accuracy: 0.5733\n",
-      "Epoch 66/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6809 - accuracy: 0.5733\n",
-      "Epoch 67/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6787 - accuracy: 0.5733\n",
-      "Epoch 68/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6762 - accuracy: 0.5733\n",
-      "Epoch 69/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6741 - accuracy: 0.5733\n",
-      "Epoch 70/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6716 - accuracy: 0.5800\n",
-      "Epoch 71/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6694 - accuracy: 0.5800\n",
-      "Epoch 72/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6673 - accuracy: 0.5833\n",
-      "Epoch 73/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.6650 - accuracy: 0.5833\n",
-      "Epoch 74/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6627 - accuracy: 0.5833\n",
-      "Epoch 75/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6605 - accuracy: 0.5867\n",
-      "Epoch 76/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6584 - accuracy: 0.5833\n",
-      "Epoch 77/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6561 - accuracy: 0.5900\n",
-      "Epoch 78/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6541 - accuracy: 0.5867\n",
-      "Epoch 79/1000\n",
-      "3/3 [==============================] - 0s 7ms/step - loss: 0.6517 - accuracy: 0.5900\n",
-      "Epoch 80/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6496 - accuracy: 0.5933\n",
-      "Epoch 81/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6473 - accuracy: 0.5967\n",
-      "Epoch 82/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6453 - accuracy: 0.5967\n",
-      "Epoch 83/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6430 - accuracy: 0.5967\n",
-      "Epoch 84/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6409 - accuracy: 0.5967\n",
-      "Epoch 85/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6388 - accuracy: 0.5967\n",
-      "Epoch 86/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6366 - accuracy: 0.5967\n",
-      "Epoch 87/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6344 - accuracy: 0.6000\n",
-      "Epoch 88/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6325 - accuracy: 0.6000\n",
-      "Epoch 89/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.6301 - accuracy: 0.6000\n",
-      "Epoch 90/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6279 - accuracy: 0.6033\n",
-      "Epoch 91/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6259 - accuracy: 0.6033\n",
-      "Epoch 92/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6236 - accuracy: 0.6067\n",
-      "Epoch 93/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6216 - accuracy: 0.6133\n",
-      "Epoch 94/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6194 - accuracy: 0.6133\n",
-      "Epoch 95/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6173 - accuracy: 0.6167\n",
-      "Epoch 96/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6150 - accuracy: 0.6233\n",
-      "Epoch 97/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6129 - accuracy: 0.6300\n",
-      "Epoch 98/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6107 - accuracy: 0.6300\n",
-      "Epoch 99/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6087 - accuracy: 0.6333\n",
-      "Epoch 100/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6064 - accuracy: 0.6367\n",
-      "Epoch 101/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6044 - accuracy: 0.6367\n",
-      "Epoch 102/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.6022 - accuracy: 0.6400\n",
-      "Epoch 103/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.6000 - accuracy: 0.6433\n",
-      "Epoch 104/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5979 - accuracy: 0.6433\n",
-      "Epoch 105/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.5957 - accuracy: 0.6433\n",
-      "Epoch 106/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5937 - accuracy: 0.6500\n",
-      "Epoch 107/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5915 - accuracy: 0.6533\n",
-      "Epoch 108/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5895 - accuracy: 0.6600\n",
-      "Epoch 109/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5871 - accuracy: 0.6633\n",
-      "Epoch 110/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5849 - accuracy: 0.6633\n",
-      "Epoch 111/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5829 - accuracy: 0.6700\n",
-      "Epoch 112/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.5806 - accuracy: 0.6733\n",
-      "Epoch 113/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5786 - accuracy: 0.6767\n",
-      "Epoch 114/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5767 - accuracy: 0.6800\n",
-      "Epoch 115/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5744 - accuracy: 0.6800\n",
-      "Epoch 116/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5722 - accuracy: 0.6867\n",
-      "Epoch 117/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5701 - accuracy: 0.6867\n",
-      "Epoch 118/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5678 - accuracy: 0.6867\n",
-      "Epoch 119/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5657 - accuracy: 0.6867\n",
-      "Epoch 120/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5635 - accuracy: 0.6900\n",
-      "Epoch 121/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5615 - accuracy: 0.6900\n",
-      "Epoch 122/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5593 - accuracy: 0.6900\n",
-      "Epoch 123/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5572 - accuracy: 0.6867\n",
-      "Epoch 124/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5551 - accuracy: 0.6867\n",
-      "Epoch 125/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5528 - accuracy: 0.6900\n",
-      "Epoch 126/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5506 - accuracy: 0.6900\n",
-      "Epoch 127/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5485 - accuracy: 0.6900\n",
-      "Epoch 128/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5465 - accuracy: 0.6967\n",
-      "Epoch 129/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5442 - accuracy: 0.7000\n",
-      "Epoch 130/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5421 - accuracy: 0.7000\n",
-      "Epoch 131/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5399 - accuracy: 0.7033\n",
-      "Epoch 132/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5378 - accuracy: 0.7067\n",
-      "Epoch 133/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5357 - accuracy: 0.7067\n",
-      "Epoch 134/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5336 - accuracy: 0.7067\n",
-      "Epoch 135/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5314 - accuracy: 0.7067\n",
-      "Epoch 136/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5295 - accuracy: 0.7067\n",
-      "Epoch 137/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5271 - accuracy: 0.7100\n",
-      "Epoch 138/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5249 - accuracy: 0.7100\n",
-      "Epoch 139/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5231 - accuracy: 0.7100\n",
-      "Epoch 140/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5208 - accuracy: 0.7133\n",
-      "Epoch 141/1000\n",
-      "3/3 [==============================] - ETA: 0s - loss: 0.5236 - accuracy: 0.72 - 0s 3ms/step - loss: 0.5185 - accuracy: 0.7133\n",
-      "Epoch 142/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5164 - accuracy: 0.7133\n",
-      "Epoch 143/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5143 - accuracy: 0.7133\n",
-      "Epoch 144/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5120 - accuracy: 0.7133\n",
-      "Epoch 145/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5102 - accuracy: 0.7167\n",
-      "Epoch 146/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5079 - accuracy: 0.7167\n",
-      "Epoch 147/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5058 - accuracy: 0.7233\n",
-      "Epoch 148/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.5037 - accuracy: 0.7233\n",
-      "Epoch 149/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.5017 - accuracy: 0.7233\n",
-      "Epoch 150/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4995 - accuracy: 0.7200\n",
-      "Epoch 151/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4974 - accuracy: 0.7233\n",
-      "Epoch 152/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4953 - accuracy: 0.7233\n",
-      "Epoch 153/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4933 - accuracy: 0.7233\n",
-      "Epoch 154/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4913 - accuracy: 0.7300\n",
-      "Epoch 155/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4892 - accuracy: 0.7333\n",
-      "Epoch 156/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4871 - accuracy: 0.7333\n",
-      "Epoch 157/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4850 - accuracy: 0.7333\n",
-      "Epoch 158/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4831 - accuracy: 0.7367\n",
-      "Epoch 159/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4810 - accuracy: 0.7433\n",
-      "Epoch 160/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.4789 - accuracy: 0.7400\n",
-      "Epoch 161/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4768 - accuracy: 0.7433\n",
-      "Epoch 162/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4751 - accuracy: 0.7433\n",
-      "Epoch 163/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.4729 - accuracy: 0.7433\n",
-      "Epoch 164/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.4710 - accuracy: 0.7467\n",
-      "Epoch 165/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4688 - accuracy: 0.7467\n",
-      "Epoch 166/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4669 - accuracy: 0.7500\n",
-      "Epoch 167/1000\n",
-      "3/3 [==============================] - 0s 11ms/step - loss: 0.4649 - accuracy: 0.7500\n",
-      "Epoch 168/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4629 - accuracy: 0.7500\n",
-      "Epoch 169/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4611 - accuracy: 0.7533\n",
-      "Epoch 170/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4591 - accuracy: 0.7533\n",
-      "Epoch 171/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4571 - accuracy: 0.7533\n",
-      "Epoch 172/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4552 - accuracy: 0.7567\n",
-      "Epoch 173/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4533 - accuracy: 0.7600\n",
-      "Epoch 174/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4513 - accuracy: 0.7600\n",
-      "Epoch 175/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4495 - accuracy: 0.7600\n",
-      "Epoch 176/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4475 - accuracy: 0.7667\n",
-      "Epoch 177/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4458 - accuracy: 0.7667\n",
-      "Epoch 178/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4437 - accuracy: 0.7667\n",
-      "Epoch 179/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4421 - accuracy: 0.7667\n",
-      "Epoch 180/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4400 - accuracy: 0.7667\n",
-      "Epoch 181/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4382 - accuracy: 0.7767\n",
-      "Epoch 182/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4363 - accuracy: 0.7733\n",
-      "Epoch 183/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4345 - accuracy: 0.7767\n",
-      "Epoch 184/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4327 - accuracy: 0.7767\n",
-      "Epoch 185/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4309 - accuracy: 0.7767\n",
-      "Epoch 186/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4291 - accuracy: 0.7767\n",
-      "Epoch 187/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4272 - accuracy: 0.7767\n",
-      "Epoch 188/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4254 - accuracy: 0.7767\n",
-      "Epoch 189/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4237 - accuracy: 0.7767\n",
-      "Epoch 190/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4219 - accuracy: 0.7767\n",
-      "Epoch 191/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4201 - accuracy: 0.7800\n",
-      "Epoch 192/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4185 - accuracy: 0.7833\n",
-      "Epoch 193/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4166 - accuracy: 0.7833\n",
-      "Epoch 194/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4150 - accuracy: 0.7867\n",
-      "Epoch 195/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4132 - accuracy: 0.7833\n",
-      "Epoch 196/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4114 - accuracy: 0.7867\n",
-      "Epoch 197/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4098 - accuracy: 0.7867\n",
-      "Epoch 198/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4081 - accuracy: 0.7900\n",
-      "Epoch 199/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4064 - accuracy: 0.7933\n",
-      "Epoch 200/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4047 - accuracy: 0.7933\n",
-      "Epoch 201/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.4031 - accuracy: 0.7933\n",
-      "Epoch 202/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.4015 - accuracy: 0.7967\n",
-      "Epoch 203/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3997 - accuracy: 0.7967\n",
-      "Epoch 204/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3982 - accuracy: 0.7967\n",
-      "Epoch 205/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3965 - accuracy: 0.8000\n",
-      "Epoch 206/1000\n",
-      "3/3 [==============================] - 0s 9ms/step - loss: 0.3950 - accuracy: 0.8000\n",
-      "Epoch 207/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.3933 - accuracy: 0.8000\n",
-      "Epoch 208/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3918 - accuracy: 0.8000\n",
-      "Epoch 209/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3901 - accuracy: 0.8000\n",
-      "Epoch 210/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3886 - accuracy: 0.8000\n",
-      "Epoch 211/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3870 - accuracy: 0.8033\n",
-      "Epoch 212/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3855 - accuracy: 0.8067\n",
-      "Epoch 213/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.3839 - accuracy: 0.8067\n",
-      "Epoch 214/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.3823 - accuracy: 0.8067\n",
-      "Epoch 215/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3808 - accuracy: 0.8100\n",
-      "Epoch 216/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3794 - accuracy: 0.8100\n",
-      "Epoch 217/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3778 - accuracy: 0.8133\n",
-      "Epoch 218/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3766 - accuracy: 0.8133\n",
-      "Epoch 219/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3749 - accuracy: 0.8133\n",
-      "Epoch 220/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3733 - accuracy: 0.8133\n",
-      "Epoch 221/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3718 - accuracy: 0.8133\n",
-      "Epoch 222/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3704 - accuracy: 0.8133\n",
-      "Epoch 223/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3689 - accuracy: 0.8133\n",
-      "Epoch 224/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3676 - accuracy: 0.8167\n",
-      "Epoch 225/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3661 - accuracy: 0.8167\n",
-      "Epoch 226/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3647 - accuracy: 0.8200\n",
-      "Epoch 227/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3633 - accuracy: 0.8200\n",
-      "Epoch 228/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3618 - accuracy: 0.8267\n",
-      "Epoch 229/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3603 - accuracy: 0.8267\n",
-      "Epoch 230/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3590 - accuracy: 0.8267\n",
-      "Epoch 231/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3577 - accuracy: 0.8233\n",
-      "Epoch 232/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3563 - accuracy: 0.8233\n",
-      "Epoch 233/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3549 - accuracy: 0.8233\n",
-      "Epoch 234/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3535 - accuracy: 0.8233\n",
-      "Epoch 235/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3522 - accuracy: 0.8267\n",
-      "Epoch 236/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3509 - accuracy: 0.8267\n",
-      "Epoch 237/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3498 - accuracy: 0.8267\n",
-      "Epoch 238/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3484 - accuracy: 0.8300\n",
-      "Epoch 239/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3468 - accuracy: 0.8333\n",
-      "Epoch 240/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3458 - accuracy: 0.8300\n",
-      "Epoch 241/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3443 - accuracy: 0.8300\n",
-      "Epoch 242/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3429 - accuracy: 0.8333\n",
-      "Epoch 243/1000\n",
-      "3/3 [==============================] - 0s 17ms/step - loss: 0.3417 - accuracy: 0.8333\n",
-      "Epoch 244/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3403 - accuracy: 0.8367\n",
-      "Epoch 245/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3391 - accuracy: 0.8333\n",
-      "Epoch 246/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3379 - accuracy: 0.8367\n",
-      "Epoch 247/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3366 - accuracy: 0.8400\n",
-      "Epoch 248/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3353 - accuracy: 0.8400\n",
-      "Epoch 249/1000\n",
-      "3/3 [==============================] - ETA: 0s - loss: 0.3375 - accuracy: 0.83 - 0s 4ms/step - loss: 0.3341 - accuracy: 0.8433\n",
-      "Epoch 250/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3328 - accuracy: 0.8433\n",
-      "Epoch 251/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3316 - accuracy: 0.8433\n",
-      "Epoch 252/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3304 - accuracy: 0.8433\n",
-      "Epoch 253/1000\n",
-      "3/3 [==============================] - 0s 21ms/step - loss: 0.3293 - accuracy: 0.8433\n",
-      "Epoch 254/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3282 - accuracy: 0.8433\n",
-      "Epoch 255/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3269 - accuracy: 0.8433\n",
-      "Epoch 256/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3258 - accuracy: 0.8433\n",
-      "Epoch 257/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3245 - accuracy: 0.8433\n",
-      "Epoch 258/1000\n",
-      "3/3 [==============================] - 0s 20ms/step - loss: 0.3232 - accuracy: 0.8433\n",
-      "Epoch 259/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3221 - accuracy: 0.8433\n",
-      "Epoch 260/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3209 - accuracy: 0.8433\n",
-      "Epoch 261/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3197 - accuracy: 0.8467\n",
-      "Epoch 262/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3186 - accuracy: 0.8433\n",
-      "Epoch 263/1000\n",
-      "3/3 [==============================] - 0s 18ms/step - loss: 0.3175 - accuracy: 0.8433\n",
-      "Epoch 264/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3163 - accuracy: 0.8433\n",
-      "Epoch 265/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3151 - accuracy: 0.8433\n",
-      "Epoch 266/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3141 - accuracy: 0.8467\n",
-      "Epoch 267/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3131 - accuracy: 0.8500\n",
-      "Epoch 268/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3117 - accuracy: 0.8467\n",
-      "Epoch 269/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3108 - accuracy: 0.8567\n",
-      "Epoch 270/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.3096 - accuracy: 0.8567\n",
-      "Epoch 271/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3085 - accuracy: 0.8567\n",
-      "Epoch 272/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3074 - accuracy: 0.8567\n",
-      "Epoch 273/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3064 - accuracy: 0.8600\n",
-      "Epoch 274/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3052 - accuracy: 0.8600\n",
-      "Epoch 275/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3041 - accuracy: 0.8600\n",
-      "Epoch 276/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.3032 - accuracy: 0.8600\n",
-      "Epoch 277/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3020 - accuracy: 0.8633\n",
-      "Epoch 278/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.3010 - accuracy: 0.8633\n",
-      "Epoch 279/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2999 - accuracy: 0.8667\n",
-      "Epoch 280/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2988 - accuracy: 0.8667\n",
-      "Epoch 281/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2978 - accuracy: 0.8667\n",
-      "Epoch 282/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2968 - accuracy: 0.8700\n",
-      "Epoch 283/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2958 - accuracy: 0.8700\n",
-      "Epoch 284/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2948 - accuracy: 0.8700\n",
-      "Epoch 285/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2938 - accuracy: 0.8700\n",
-      "Epoch 286/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2927 - accuracy: 0.8700\n",
-      "Epoch 287/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2917 - accuracy: 0.8700\n",
-      "Epoch 288/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2907 - accuracy: 0.8767\n",
-      "Epoch 289/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2897 - accuracy: 0.8767\n",
-      "Epoch 290/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2888 - accuracy: 0.8800\n",
-      "Epoch 291/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2878 - accuracy: 0.8767\n",
-      "Epoch 292/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.2867 - accuracy: 0.8767\n",
-      "Epoch 293/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2858 - accuracy: 0.8800\n",
-      "Epoch 294/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2849 - accuracy: 0.8800\n",
-      "Epoch 295/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2839 - accuracy: 0.8800\n",
-      "Epoch 296/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2831 - accuracy: 0.8800\n",
-      "Epoch 297/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2819 - accuracy: 0.8800\n",
-      "Epoch 298/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2810 - accuracy: 0.8800\n",
-      "Epoch 299/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2801 - accuracy: 0.8800\n",
-      "Epoch 300/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2792 - accuracy: 0.8833\n",
-      "Epoch 301/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2783 - accuracy: 0.8833\n",
-      "Epoch 302/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2772 - accuracy: 0.8867\n",
-      "Epoch 303/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2764 - accuracy: 0.8833\n",
-      "Epoch 304/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2754 - accuracy: 0.8867\n",
-      "Epoch 305/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2745 - accuracy: 0.8867\n",
-      "Epoch 306/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2736 - accuracy: 0.8867\n",
-      "Epoch 307/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2727 - accuracy: 0.8867\n",
-      "Epoch 308/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2719 - accuracy: 0.8867\n",
-      "Epoch 309/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2709 - accuracy: 0.8867\n",
-      "Epoch 310/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.2700 - accuracy: 0.8867\n",
-      "Epoch 311/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2691 - accuracy: 0.8867\n",
-      "Epoch 312/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2682 - accuracy: 0.8900\n",
-      "Epoch 313/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2673 - accuracy: 0.8900\n",
-      "Epoch 314/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2665 - accuracy: 0.8900\n",
-      "Epoch 315/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2656 - accuracy: 0.8933\n",
-      "Epoch 316/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2648 - accuracy: 0.8933\n",
-      "Epoch 317/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2640 - accuracy: 0.8933\n",
-      "Epoch 318/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2631 - accuracy: 0.8967\n",
-      "Epoch 319/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.2621 - accuracy: 0.8967\n",
-      "Epoch 320/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2613 - accuracy: 0.8967\n",
-      "Epoch 321/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2605 - accuracy: 0.8967\n",
-      "Epoch 322/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2598 - accuracy: 0.8933\n",
-      "Epoch 323/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2588 - accuracy: 0.8967\n",
-      "Epoch 324/1000\n",
-      "3/3 [==============================] - 0s 7ms/step - loss: 0.2580 - accuracy: 0.8967\n",
-      "Epoch 325/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2571 - accuracy: 0.8967\n",
-      "Epoch 326/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2563 - accuracy: 0.8933\n",
-      "Epoch 327/1000\n",
-      "3/3 [==============================] - 0s 7ms/step - loss: 0.2555 - accuracy: 0.8967\n",
-      "Epoch 328/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2547 - accuracy: 0.9000\n",
-      "Epoch 329/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2540 - accuracy: 0.9000\n",
-      "Epoch 330/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2531 - accuracy: 0.9000\n",
-      "Epoch 331/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2524 - accuracy: 0.8967\n",
-      "Epoch 332/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2515 - accuracy: 0.8967\n",
-      "Epoch 333/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2507 - accuracy: 0.8967\n",
-      "Epoch 334/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2498 - accuracy: 0.9000\n",
-      "Epoch 335/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2492 - accuracy: 0.9000\n",
-      "Epoch 336/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2485 - accuracy: 0.9000\n",
-      "Epoch 337/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2476 - accuracy: 0.9000\n",
-      "Epoch 338/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2468 - accuracy: 0.9000\n",
-      "Epoch 339/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.2461 - accuracy: 0.9000\n",
-      "Epoch 340/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2453 - accuracy: 0.9000\n",
-      "Epoch 341/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2445 - accuracy: 0.9000\n",
-      "Epoch 342/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2438 - accuracy: 0.9033\n",
-      "Epoch 343/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2430 - accuracy: 0.9033\n",
-      "Epoch 344/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2422 - accuracy: 0.9000\n",
-      "Epoch 345/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2415 - accuracy: 0.9000\n",
-      "Epoch 346/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2409 - accuracy: 0.9000\n",
-      "Epoch 347/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2401 - accuracy: 0.9033\n",
-      "Epoch 348/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2392 - accuracy: 0.9033\n",
-      "Epoch 349/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2386 - accuracy: 0.9033\n",
-      "Epoch 350/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2379 - accuracy: 0.9067\n",
-      "Epoch 351/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2373 - accuracy: 0.9033\n",
-      "Epoch 352/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.2364 - accuracy: 0.9033\n",
-      "Epoch 353/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2357 - accuracy: 0.9033\n",
-      "Epoch 354/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.2349 - accuracy: 0.9067\n",
-      "Epoch 355/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2343 - accuracy: 0.9067\n",
-      "Epoch 356/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2336 - accuracy: 0.9067\n",
-      "Epoch 357/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2328 - accuracy: 0.9067\n",
-      "Epoch 358/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2321 - accuracy: 0.9133\n",
-      "Epoch 359/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2315 - accuracy: 0.9133\n",
-      "Epoch 360/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2309 - accuracy: 0.9133\n",
-      "Epoch 361/1000\n",
-      "3/3 [==============================] - ETA: 0s - loss: 0.1736 - accuracy: 0.95 - 0s 4ms/step - loss: 0.2300 - accuracy: 0.9133\n",
-      "Epoch 362/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2295 - accuracy: 0.9100\n",
-      "Epoch 363/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2288 - accuracy: 0.9100\n",
-      "Epoch 364/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2281 - accuracy: 0.9067\n",
-      "Epoch 365/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2273 - accuracy: 0.9100\n",
-      "Epoch 366/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2268 - accuracy: 0.9133\n",
-      "Epoch 367/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2261 - accuracy: 0.9133\n",
-      "Epoch 368/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2255 - accuracy: 0.9133\n",
-      "Epoch 369/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2247 - accuracy: 0.9167\n",
-      "Epoch 370/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2241 - accuracy: 0.9167\n",
-      "Epoch 371/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2235 - accuracy: 0.9167\n",
-      "Epoch 372/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2228 - accuracy: 0.9167\n",
-      "Epoch 373/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2222 - accuracy: 0.9167\n",
-      "Epoch 374/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2215 - accuracy: 0.9200\n",
-      "Epoch 375/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2208 - accuracy: 0.9200\n",
-      "Epoch 376/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2202 - accuracy: 0.9200\n",
-      "Epoch 377/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2196 - accuracy: 0.9200\n",
-      "Epoch 378/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2190 - accuracy: 0.9200\n",
-      "Epoch 379/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2182 - accuracy: 0.9200\n",
-      "Epoch 380/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2178 - accuracy: 0.9233\n",
-      "Epoch 381/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2171 - accuracy: 0.9233\n",
-      "Epoch 382/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2164 - accuracy: 0.9233\n",
-      "Epoch 383/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2159 - accuracy: 0.9233\n",
-      "Epoch 384/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2152 - accuracy: 0.9267\n",
-      "Epoch 385/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2147 - accuracy: 0.9267\n",
-      "Epoch 386/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2140 - accuracy: 0.9267\n",
-      "Epoch 387/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2133 - accuracy: 0.9267\n",
-      "Epoch 388/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2127 - accuracy: 0.9267\n",
-      "Epoch 389/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2122 - accuracy: 0.9233\n",
-      "Epoch 390/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2115 - accuracy: 0.9300\n",
-      "Epoch 391/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2110 - accuracy: 0.9267\n",
-      "Epoch 392/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2104 - accuracy: 0.9267\n",
-      "Epoch 393/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2098 - accuracy: 0.9300\n",
-      "Epoch 394/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.2091 - accuracy: 0.9300\n",
-      "Epoch 395/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2086 - accuracy: 0.9267\n",
-      "Epoch 396/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2080 - accuracy: 0.9267\n",
-      "Epoch 397/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2076 - accuracy: 0.9267\n",
-      "Epoch 398/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2070 - accuracy: 0.9300\n",
-      "Epoch 399/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2064 - accuracy: 0.9267\n",
-      "Epoch 400/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2057 - accuracy: 0.9267\n",
-      "Epoch 401/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2051 - accuracy: 0.9267\n",
-      "Epoch 402/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2046 - accuracy: 0.9267\n",
-      "Epoch 403/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2040 - accuracy: 0.9333\n",
-      "Epoch 404/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2034 - accuracy: 0.9333\n",
-      "Epoch 405/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2029 - accuracy: 0.9300\n",
-      "Epoch 406/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2023 - accuracy: 0.9333\n",
-      "Epoch 407/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2017 - accuracy: 0.9333\n",
-      "Epoch 408/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2013 - accuracy: 0.9333\n",
-      "Epoch 409/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2007 - accuracy: 0.9367\n",
-      "Epoch 410/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.2001 - accuracy: 0.9333\n",
-      "Epoch 411/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1995 - accuracy: 0.9367\n",
-      "Epoch 412/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1990 - accuracy: 0.9400\n",
-      "Epoch 413/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1984 - accuracy: 0.9367\n",
-      "Epoch 414/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1980 - accuracy: 0.9367\n",
-      "Epoch 415/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1975 - accuracy: 0.9400\n",
-      "Epoch 416/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1968 - accuracy: 0.9400\n",
-      "Epoch 417/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1963 - accuracy: 0.9400\n",
-      "Epoch 418/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1957 - accuracy: 0.9367\n",
-      "Epoch 419/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1953 - accuracy: 0.9367\n",
-      "Epoch 420/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1948 - accuracy: 0.9367\n",
-      "Epoch 421/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1942 - accuracy: 0.9400\n",
-      "Epoch 422/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1937 - accuracy: 0.9400\n",
-      "Epoch 423/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1932 - accuracy: 0.9400\n",
-      "Epoch 424/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1927 - accuracy: 0.9433\n",
-      "Epoch 425/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1922 - accuracy: 0.9433\n",
-      "Epoch 426/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1916 - accuracy: 0.9433\n",
-      "Epoch 427/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1911 - accuracy: 0.9433\n",
-      "Epoch 428/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1907 - accuracy: 0.9400\n",
-      "Epoch 429/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1900 - accuracy: 0.9400\n",
-      "Epoch 430/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1896 - accuracy: 0.9400\n",
-      "Epoch 431/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1891 - accuracy: 0.9433\n",
-      "Epoch 432/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1886 - accuracy: 0.9433\n",
-      "Epoch 433/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1881 - accuracy: 0.9433\n",
-      "Epoch 434/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.1875 - accuracy: 0.9433\n",
-      "Epoch 435/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1871 - accuracy: 0.9433\n",
-      "Epoch 436/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1866 - accuracy: 0.9433\n",
-      "Epoch 437/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1860 - accuracy: 0.9433\n",
-      "Epoch 438/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1856 - accuracy: 0.9433\n",
-      "Epoch 439/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1851 - accuracy: 0.9433\n",
-      "Epoch 440/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1846 - accuracy: 0.9433\n",
-      "Epoch 441/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1841 - accuracy: 0.9433\n",
-      "Epoch 442/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1838 - accuracy: 0.9433\n",
-      "Epoch 443/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1832 - accuracy: 0.9433\n",
-      "Epoch 444/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1828 - accuracy: 0.9433\n",
-      "Epoch 445/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1822 - accuracy: 0.9433\n",
-      "Epoch 446/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1818 - accuracy: 0.9433\n",
-      "Epoch 447/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1812 - accuracy: 0.9433\n",
-      "Epoch 448/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1809 - accuracy: 0.9433\n",
-      "Epoch 449/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1804 - accuracy: 0.9433\n",
-      "Epoch 450/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1799 - accuracy: 0.9433\n",
-      "Epoch 451/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1794 - accuracy: 0.9433\n",
-      "Epoch 452/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1789 - accuracy: 0.9433\n",
-      "Epoch 453/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1786 - accuracy: 0.9433\n",
-      "Epoch 454/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1781 - accuracy: 0.9433\n",
-      "Epoch 455/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1775 - accuracy: 0.9433\n",
-      "Epoch 456/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1771 - accuracy: 0.9433\n",
-      "Epoch 457/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1767 - accuracy: 0.9433\n",
-      "Epoch 458/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1763 - accuracy: 0.9433\n",
-      "Epoch 459/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1758 - accuracy: 0.9433\n",
-      "Epoch 460/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1753 - accuracy: 0.9467\n",
-      "Epoch 461/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1748 - accuracy: 0.9467\n",
-      "Epoch 462/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1744 - accuracy: 0.9467\n",
-      "Epoch 463/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1741 - accuracy: 0.9500\n",
-      "Epoch 464/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1736 - accuracy: 0.9467\n",
-      "Epoch 465/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1732 - accuracy: 0.9467\n",
-      "Epoch 466/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1727 - accuracy: 0.9467\n",
-      "Epoch 467/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1722 - accuracy: 0.9467\n",
-      "Epoch 468/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1719 - accuracy: 0.9467\n",
-      "Epoch 469/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1715 - accuracy: 0.9467\n",
-      "Epoch 470/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1710 - accuracy: 0.9467\n",
-      "Epoch 471/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1706 - accuracy: 0.9500\n",
-      "Epoch 472/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1701 - accuracy: 0.9467\n",
-      "Epoch 473/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1697 - accuracy: 0.9500\n",
-      "Epoch 474/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1692 - accuracy: 0.9467\n",
-      "Epoch 475/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1688 - accuracy: 0.9500\n",
-      "Epoch 476/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1683 - accuracy: 0.9533\n",
-      "Epoch 477/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1680 - accuracy: 0.9533\n",
-      "Epoch 478/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1675 - accuracy: 0.9533\n",
-      "Epoch 479/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1672 - accuracy: 0.9533\n",
-      "Epoch 480/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1667 - accuracy: 0.9533\n",
-      "Epoch 481/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1663 - accuracy: 0.9533\n",
-      "Epoch 482/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1659 - accuracy: 0.9500\n",
-      "Epoch 483/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1656 - accuracy: 0.9533\n",
-      "Epoch 484/1000\n",
-      "3/3 [==============================] - 0s 12ms/step - loss: 0.1652 - accuracy: 0.9533\n",
-      "Epoch 485/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1646 - accuracy: 0.9533\n",
-      "Epoch 486/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1642 - accuracy: 0.9567\n",
-      "Epoch 487/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1639 - accuracy: 0.9533\n",
-      "Epoch 488/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1634 - accuracy: 0.9533\n",
-      "Epoch 489/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1630 - accuracy: 0.9533\n",
-      "Epoch 490/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1627 - accuracy: 0.9533\n",
-      "Epoch 491/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1622 - accuracy: 0.9533\n",
-      "Epoch 492/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1618 - accuracy: 0.9533\n",
-      "Epoch 493/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1616 - accuracy: 0.9533\n",
-      "Epoch 494/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1610 - accuracy: 0.9567\n",
-      "Epoch 495/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1607 - accuracy: 0.9533\n",
-      "Epoch 496/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1603 - accuracy: 0.9533\n",
-      "Epoch 497/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1598 - accuracy: 0.9567\n",
-      "Epoch 498/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1595 - accuracy: 0.9567\n",
-      "Epoch 499/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1592 - accuracy: 0.9567\n",
-      "Epoch 500/1000\n",
-      "3/3 [==============================] - 0s 22ms/step - loss: 0.1587 - accuracy: 0.9567\n",
-      "Epoch 501/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1583 - accuracy: 0.9567\n",
-      "Epoch 502/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1580 - accuracy: 0.9533\n",
-      "Epoch 503/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1575 - accuracy: 0.9533\n",
-      "Epoch 504/1000\n",
-      "3/3 [==============================] - 0s 21ms/step - loss: 0.1573 - accuracy: 0.9533\n",
-      "Epoch 505/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1568 - accuracy: 0.9533\n",
-      "Epoch 506/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1564 - accuracy: 0.9567\n",
-      "Epoch 507/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1561 - accuracy: 0.9500\n",
-      "Epoch 508/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1556 - accuracy: 0.9567\n",
-      "Epoch 509/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1553 - accuracy: 0.9567\n",
-      "Epoch 510/1000\n",
-      "3/3 [==============================] - 0s 11ms/step - loss: 0.1549 - accuracy: 0.9567\n",
-      "Epoch 511/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1545 - accuracy: 0.9567\n",
-      "Epoch 512/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1541 - accuracy: 0.9567\n",
-      "Epoch 513/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1539 - accuracy: 0.9567\n",
-      "Epoch 514/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1534 - accuracy: 0.9567\n",
-      "Epoch 515/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1530 - accuracy: 0.9567\n",
-      "Epoch 516/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1527 - accuracy: 0.9567\n",
-      "Epoch 517/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1523 - accuracy: 0.9567\n",
-      "Epoch 518/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1519 - accuracy: 0.9567\n",
-      "Epoch 519/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1516 - accuracy: 0.9567\n",
-      "Epoch 520/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1513 - accuracy: 0.9567\n",
-      "Epoch 521/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1509 - accuracy: 0.9567\n",
-      "Epoch 522/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1506 - accuracy: 0.9567\n",
-      "Epoch 523/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1503 - accuracy: 0.9567\n",
-      "Epoch 524/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1499 - accuracy: 0.9567\n",
-      "Epoch 525/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1496 - accuracy: 0.9533\n",
-      "Epoch 526/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1491 - accuracy: 0.9567\n",
-      "Epoch 527/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1488 - accuracy: 0.9567\n",
-      "Epoch 528/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1484 - accuracy: 0.9567\n",
-      "Epoch 529/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1480 - accuracy: 0.9633\n",
-      "Epoch 530/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1477 - accuracy: 0.9600\n",
-      "Epoch 531/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1474 - accuracy: 0.9600\n",
-      "Epoch 532/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1470 - accuracy: 0.9633\n",
-      "Epoch 533/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.1467 - accuracy: 0.9633\n",
-      "Epoch 534/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1463 - accuracy: 0.9633\n",
-      "Epoch 535/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1460 - accuracy: 0.9633\n",
-      "Epoch 536/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1457 - accuracy: 0.9567\n",
-      "Epoch 537/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1453 - accuracy: 0.9600\n",
-      "Epoch 538/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1449 - accuracy: 0.9633\n",
-      "Epoch 539/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.1446 - accuracy: 0.9633\n",
-      "Epoch 540/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.1444 - accuracy: 0.9633\n",
-      "Epoch 541/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1440 - accuracy: 0.9633\n",
-      "Epoch 542/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1436 - accuracy: 0.9633\n",
-      "Epoch 543/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1434 - accuracy: 0.9633\n",
-      "Epoch 544/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1430 - accuracy: 0.9633\n",
-      "Epoch 545/1000\n",
-      "3/3 [==============================] - 0s 7ms/step - loss: 0.1426 - accuracy: 0.9600\n",
-      "Epoch 546/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1424 - accuracy: 0.9600\n",
-      "Epoch 547/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1420 - accuracy: 0.9633\n",
-      "Epoch 548/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1417 - accuracy: 0.9633\n",
-      "Epoch 549/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1414 - accuracy: 0.9633\n",
-      "Epoch 550/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1411 - accuracy: 0.9633\n",
-      "Epoch 551/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1408 - accuracy: 0.9600\n",
-      "Epoch 552/1000\n",
-      "3/3 [==============================] - 0s 11ms/step - loss: 0.1404 - accuracy: 0.9633\n",
-      "Epoch 553/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1401 - accuracy: 0.9633\n",
-      "Epoch 554/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1398 - accuracy: 0.9600\n",
-      "Epoch 555/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1395 - accuracy: 0.9633\n",
-      "Epoch 556/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1392 - accuracy: 0.9633\n",
-      "Epoch 557/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1390 - accuracy: 0.9633\n",
-      "Epoch 558/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1385 - accuracy: 0.9633\n",
-      "Epoch 559/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1382 - accuracy: 0.9633\n",
-      "Epoch 560/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1379 - accuracy: 0.9633\n",
-      "Epoch 561/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1377 - accuracy: 0.9633\n",
-      "Epoch 562/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1374 - accuracy: 0.9667\n",
-      "Epoch 563/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1370 - accuracy: 0.9667\n",
-      "Epoch 564/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1367 - accuracy: 0.9667\n",
-      "Epoch 565/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1365 - accuracy: 0.9667\n",
-      "Epoch 566/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1361 - accuracy: 0.9700\n",
-      "Epoch 567/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1358 - accuracy: 0.9700\n",
-      "Epoch 568/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1356 - accuracy: 0.9633\n",
-      "Epoch 569/1000\n",
-      "3/3 [==============================] - ETA: 0s - loss: 0.1524 - accuracy: 0.95 - 0s 3ms/step - loss: 0.1353 - accuracy: 0.9667\n",
-      "Epoch 570/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1349 - accuracy: 0.9667\n",
-      "Epoch 571/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1346 - accuracy: 0.9700\n",
-      "Epoch 572/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1343 - accuracy: 0.9700\n",
-      "Epoch 573/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1340 - accuracy: 0.9700\n",
-      "Epoch 574/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1337 - accuracy: 0.9667\n",
-      "Epoch 575/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1335 - accuracy: 0.9633\n",
-      "Epoch 576/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1332 - accuracy: 0.9667\n",
-      "Epoch 577/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1329 - accuracy: 0.9700\n",
-      "Epoch 578/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1325 - accuracy: 0.9700\n",
-      "Epoch 579/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1322 - accuracy: 0.9700\n",
-      "Epoch 580/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1322 - accuracy: 0.9700\n",
-      "Epoch 581/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1317 - accuracy: 0.9700\n",
-      "Epoch 582/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1315 - accuracy: 0.9700\n",
-      "Epoch 583/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1312 - accuracy: 0.9667\n",
-      "Epoch 584/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1309 - accuracy: 0.9667\n",
-      "Epoch 585/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1306 - accuracy: 0.9667\n",
-      "Epoch 586/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1304 - accuracy: 0.9667\n",
-      "Epoch 587/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1300 - accuracy: 0.9667\n",
-      "Epoch 588/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1297 - accuracy: 0.9667\n",
-      "Epoch 589/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1294 - accuracy: 0.9700\n",
-      "Epoch 590/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1293 - accuracy: 0.9633\n",
-      "Epoch 591/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1289 - accuracy: 0.9667\n",
-      "Epoch 592/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1286 - accuracy: 0.9700\n",
-      "Epoch 593/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1284 - accuracy: 0.9700\n",
-      "Epoch 594/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1281 - accuracy: 0.9700\n",
-      "Epoch 595/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1278 - accuracy: 0.9700\n",
-      "Epoch 596/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1275 - accuracy: 0.9700\n",
-      "Epoch 597/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1273 - accuracy: 0.9700\n",
-      "Epoch 598/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1270 - accuracy: 0.9700\n",
-      "Epoch 599/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1268 - accuracy: 0.9700\n",
-      "Epoch 600/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1266 - accuracy: 0.9700\n",
-      "Epoch 601/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1262 - accuracy: 0.9733\n",
-      "Epoch 602/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1260 - accuracy: 0.9733\n",
-      "Epoch 603/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1257 - accuracy: 0.9733\n",
-      "Epoch 604/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1254 - accuracy: 0.9733\n",
-      "Epoch 605/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1252 - accuracy: 0.9733\n",
-      "Epoch 606/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1249 - accuracy: 0.9733\n",
-      "Epoch 607/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1246 - accuracy: 0.9800\n",
-      "Epoch 608/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1244 - accuracy: 0.9767\n",
-      "Epoch 609/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1241 - accuracy: 0.9800\n",
-      "Epoch 610/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1238 - accuracy: 0.9800\n",
-      "Epoch 611/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1236 - accuracy: 0.9733\n",
-      "Epoch 612/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1233 - accuracy: 0.9733\n",
-      "Epoch 613/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1231 - accuracy: 0.9700\n",
-      "Epoch 614/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1229 - accuracy: 0.9733\n",
-      "Epoch 615/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1226 - accuracy: 0.9767\n",
-      "Epoch 616/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1224 - accuracy: 0.9733\n",
-      "Epoch 617/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1221 - accuracy: 0.9767\n",
-      "Epoch 618/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1218 - accuracy: 0.9767\n",
-      "Epoch 619/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1216 - accuracy: 0.9800\n",
-      "Epoch 620/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1213 - accuracy: 0.9767\n",
-      "Epoch 621/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1211 - accuracy: 0.9767\n",
-      "Epoch 622/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.1208 - accuracy: 0.9767\n",
-      "Epoch 623/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1205 - accuracy: 0.9767\n",
-      "Epoch 624/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1202 - accuracy: 0.9767\n",
-      "Epoch 625/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1201 - accuracy: 0.9767\n",
-      "Epoch 626/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1199 - accuracy: 0.9767\n",
-      "Epoch 627/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1195 - accuracy: 0.9767\n",
-      "Epoch 628/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1195 - accuracy: 0.9767\n",
-      "Epoch 629/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1191 - accuracy: 0.9800\n",
-      "Epoch 630/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1188 - accuracy: 0.9800\n",
-      "Epoch 631/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1186 - accuracy: 0.9800\n",
-      "Epoch 632/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1185 - accuracy: 0.9800\n",
-      "Epoch 633/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1181 - accuracy: 0.9833\n",
-      "Epoch 634/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1179 - accuracy: 0.9800\n",
-      "Epoch 635/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1176 - accuracy: 0.9800\n",
-      "Epoch 636/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1174 - accuracy: 0.9800\n",
-      "Epoch 637/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1171 - accuracy: 0.9800\n",
-      "Epoch 638/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1169 - accuracy: 0.9800\n",
-      "Epoch 639/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1168 - accuracy: 0.9800\n",
-      "Epoch 640/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1165 - accuracy: 0.9800\n",
-      "Epoch 641/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1162 - accuracy: 0.9800\n",
-      "Epoch 642/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1160 - accuracy: 0.9800\n",
-      "Epoch 643/1000\n",
-      "3/3 [==============================] - 0s 7ms/step - loss: 0.1158 - accuracy: 0.9800\n",
-      "Epoch 644/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1154 - accuracy: 0.9800\n",
-      "Epoch 645/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1153 - accuracy: 0.9833\n",
-      "Epoch 646/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1150 - accuracy: 0.9833\n",
-      "Epoch 647/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1147 - accuracy: 0.9833\n",
-      "Epoch 648/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1145 - accuracy: 0.9833\n",
-      "Epoch 649/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1143 - accuracy: 0.9833\n",
-      "Epoch 650/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1141 - accuracy: 0.9800\n",
-      "Epoch 651/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1139 - accuracy: 0.9767\n",
-      "Epoch 652/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1137 - accuracy: 0.9833\n",
-      "Epoch 653/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1134 - accuracy: 0.9833\n",
-      "Epoch 654/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1132 - accuracy: 0.9800\n",
-      "Epoch 655/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1129 - accuracy: 0.9800\n",
-      "Epoch 656/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1127 - accuracy: 0.9800\n",
-      "Epoch 657/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1125 - accuracy: 0.9800\n",
-      "Epoch 658/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1122 - accuracy: 0.9800\n",
-      "Epoch 659/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1120 - accuracy: 0.9800\n",
-      "Epoch 660/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1118 - accuracy: 0.9800\n",
-      "Epoch 661/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1116 - accuracy: 0.9800\n",
-      "Epoch 662/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1114 - accuracy: 0.9800\n",
-      "Epoch 663/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1111 - accuracy: 0.9800\n",
-      "Epoch 664/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1109 - accuracy: 0.9833\n",
-      "Epoch 665/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1106 - accuracy: 0.9833\n",
-      "Epoch 666/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1105 - accuracy: 0.9800\n",
-      "Epoch 667/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1102 - accuracy: 0.9800\n",
-      "Epoch 668/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1100 - accuracy: 0.9800\n",
-      "Epoch 669/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1099 - accuracy: 0.9800\n",
-      "Epoch 670/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1096 - accuracy: 0.9800\n",
-      "Epoch 671/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1095 - accuracy: 0.9800\n",
-      "Epoch 672/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1091 - accuracy: 0.9800\n",
-      "Epoch 673/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1090 - accuracy: 0.9833\n",
-      "Epoch 674/1000\n",
-      "3/3 [==============================] - ETA: 0s - loss: 0.1040 - accuracy: 1.00 - 0s 3ms/step - loss: 0.1088 - accuracy: 0.9833\n",
-      "Epoch 675/1000\n",
-      "3/3 [==============================] - 0s 8ms/step - loss: 0.1085 - accuracy: 0.9833\n",
-      "Epoch 676/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1083 - accuracy: 0.9833\n",
-      "Epoch 677/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1080 - accuracy: 0.9833\n",
-      "Epoch 678/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1079 - accuracy: 0.9800\n",
-      "Epoch 679/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1077 - accuracy: 0.9800\n",
-      "Epoch 680/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1074 - accuracy: 0.9800\n",
-      "Epoch 681/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1073 - accuracy: 0.9833\n",
-      "Epoch 682/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1071 - accuracy: 0.9800\n",
-      "Epoch 683/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1069 - accuracy: 0.9800\n",
-      "Epoch 684/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1066 - accuracy: 0.9800\n",
-      "Epoch 685/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1064 - accuracy: 0.9800\n",
-      "Epoch 686/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1062 - accuracy: 0.9833\n",
-      "Epoch 687/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1059 - accuracy: 0.9833\n",
-      "Epoch 688/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1058 - accuracy: 0.9833\n",
-      "Epoch 689/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1056 - accuracy: 0.9833\n",
-      "Epoch 690/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1054 - accuracy: 0.9833\n",
-      "Epoch 691/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1053 - accuracy: 0.9833\n",
-      "Epoch 692/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1050 - accuracy: 0.9800\n",
-      "Epoch 693/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1048 - accuracy: 0.9800\n",
-      "Epoch 694/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1046 - accuracy: 0.9833\n",
-      "Epoch 695/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1043 - accuracy: 0.9833\n",
-      "Epoch 696/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1042 - accuracy: 0.9833\n",
-      "Epoch 697/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1040 - accuracy: 0.9833\n",
-      "Epoch 698/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1038 - accuracy: 0.9800\n",
-      "Epoch 699/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1036 - accuracy: 0.9833\n",
-      "Epoch 700/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1034 - accuracy: 0.9800\n",
-      "Epoch 701/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1032 - accuracy: 0.9800\n",
-      "Epoch 702/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1029 - accuracy: 0.9833\n",
-      "Epoch 703/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1028 - accuracy: 0.9833\n",
-      "Epoch 704/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1026 - accuracy: 0.9833\n",
-      "Epoch 705/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1026 - accuracy: 0.9833\n",
-      "Epoch 706/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1022 - accuracy: 0.9833\n",
-      "Epoch 707/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1020 - accuracy: 0.9800\n",
-      "Epoch 708/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1019 - accuracy: 0.9833\n",
-      "Epoch 709/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1016 - accuracy: 0.9833\n",
-      "Epoch 710/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1015 - accuracy: 0.9800\n",
-      "Epoch 711/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1013 - accuracy: 0.9800\n",
-      "Epoch 712/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1011 - accuracy: 0.9800\n",
-      "Epoch 713/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.1010 - accuracy: 0.9800\n",
-      "Epoch 714/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1007 - accuracy: 0.9800\n",
-      "Epoch 715/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1005 - accuracy: 0.9800\n",
-      "Epoch 716/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.1004 - accuracy: 0.9833\n",
-      "Epoch 717/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.1002 - accuracy: 0.9833\n",
-      "Epoch 718/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0999 - accuracy: 0.9833\n",
-      "Epoch 719/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0998 - accuracy: 0.9800\n",
-      "Epoch 720/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0997 - accuracy: 0.9833\n",
-      "Epoch 721/1000\n",
-      "3/3 [==============================] - 0s 9ms/step - loss: 0.0994 - accuracy: 0.9833\n",
-      "Epoch 722/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0993 - accuracy: 0.9833\n",
-      "Epoch 723/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0990 - accuracy: 0.9833\n",
-      "Epoch 724/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0989 - accuracy: 0.9833\n",
-      "Epoch 725/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0987 - accuracy: 0.9833\n",
-      "Epoch 726/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0986 - accuracy: 0.9800\n",
-      "Epoch 727/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0983 - accuracy: 0.9800\n",
-      "Epoch 728/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0982 - accuracy: 0.9800\n",
-      "Epoch 729/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0980 - accuracy: 0.9800\n",
-      "Epoch 730/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0978 - accuracy: 0.9833\n",
-      "Epoch 731/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0976 - accuracy: 0.9833\n",
-      "Epoch 732/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0974 - accuracy: 0.9833\n",
-      "Epoch 733/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0973 - accuracy: 0.9833\n",
-      "Epoch 734/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0970 - accuracy: 0.9833\n",
-      "Epoch 735/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0969 - accuracy: 0.9833\n",
-      "Epoch 736/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0967 - accuracy: 0.9833\n",
-      "Epoch 737/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0965 - accuracy: 0.9833\n",
-      "Epoch 738/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0964 - accuracy: 0.9833\n",
-      "Epoch 739/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0962 - accuracy: 0.9833\n",
-      "Epoch 740/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0960 - accuracy: 0.9833\n",
-      "Epoch 741/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0959 - accuracy: 0.9800\n",
-      "Epoch 742/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0957 - accuracy: 0.9800\n",
-      "Epoch 743/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0956 - accuracy: 0.9833\n",
-      "Epoch 744/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0954 - accuracy: 0.9833\n",
-      "Epoch 745/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0952 - accuracy: 0.9833\n",
-      "Epoch 746/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0950 - accuracy: 0.9833\n",
-      "Epoch 747/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0948 - accuracy: 0.9833\n",
-      "Epoch 748/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.0948 - accuracy: 0.9833\n",
-      "Epoch 749/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0946 - accuracy: 0.9800\n",
-      "Epoch 750/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0943 - accuracy: 0.9833\n",
-      "Epoch 751/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0941 - accuracy: 0.9800\n",
-      "Epoch 752/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0940 - accuracy: 0.9833\n",
-      "Epoch 753/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0938 - accuracy: 0.9833\n",
-      "Epoch 754/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0937 - accuracy: 0.9833\n",
-      "Epoch 755/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0936 - accuracy: 0.9833\n",
-      "Epoch 756/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0933 - accuracy: 0.9833\n",
-      "Epoch 757/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0932 - accuracy: 0.9833\n",
-      "Epoch 758/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0930 - accuracy: 0.9833\n",
-      "Epoch 759/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0928 - accuracy: 0.9833\n",
-      "Epoch 760/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0927 - accuracy: 0.9833\n",
-      "Epoch 761/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0925 - accuracy: 0.9833\n",
-      "Epoch 762/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0925 - accuracy: 0.9800\n",
-      "Epoch 763/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0923 - accuracy: 0.9833\n",
-      "Epoch 764/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0920 - accuracy: 0.9800\n",
-      "Epoch 765/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0918 - accuracy: 0.9833\n",
-      "Epoch 766/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0917 - accuracy: 0.9833\n",
-      "Epoch 767/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0916 - accuracy: 0.9833\n",
-      "Epoch 768/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0914 - accuracy: 0.9833\n",
-      "Epoch 769/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0913 - accuracy: 0.9833\n",
-      "Epoch 770/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0911 - accuracy: 0.9833\n",
-      "Epoch 771/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0909 - accuracy: 0.9833\n",
-      "Epoch 772/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0908 - accuracy: 0.9833\n",
-      "Epoch 773/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0906 - accuracy: 0.9833\n",
-      "Epoch 774/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0904 - accuracy: 0.9833\n",
-      "Epoch 775/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0904 - accuracy: 0.9833\n",
-      "Epoch 776/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0901 - accuracy: 0.9833\n",
-      "Epoch 777/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0900 - accuracy: 0.9833\n",
-      "Epoch 778/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0898 - accuracy: 0.9833\n",
-      "Epoch 779/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0896 - accuracy: 0.9833\n",
-      "Epoch 780/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0895 - accuracy: 0.9833\n",
-      "Epoch 781/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0894 - accuracy: 0.9833\n",
-      "Epoch 782/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0892 - accuracy: 0.9867\n",
-      "Epoch 783/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0890 - accuracy: 0.9833\n",
-      "Epoch 784/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0890 - accuracy: 0.9833\n",
-      "Epoch 785/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0888 - accuracy: 0.9833\n",
-      "Epoch 786/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0887 - accuracy: 0.9833\n",
-      "Epoch 787/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0886 - accuracy: 0.9833\n",
-      "Epoch 788/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0883 - accuracy: 0.9833\n",
-      "Epoch 789/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0882 - accuracy: 0.9833\n",
-      "Epoch 790/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0880 - accuracy: 0.9833\n",
-      "Epoch 791/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0879 - accuracy: 0.9833\n",
-      "Epoch 792/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0877 - accuracy: 0.9833\n",
-      "Epoch 793/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0876 - accuracy: 0.9833\n",
-      "Epoch 794/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0874 - accuracy: 0.9833\n",
-      "Epoch 795/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0873 - accuracy: 0.9833\n",
-      "Epoch 796/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0872 - accuracy: 0.9833\n",
-      "Epoch 797/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0870 - accuracy: 0.9833\n",
-      "Epoch 798/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0869 - accuracy: 0.9833\n",
-      "Epoch 799/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0867 - accuracy: 0.9833\n",
-      "Epoch 800/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0866 - accuracy: 0.9833\n",
-      "Epoch 801/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0864 - accuracy: 0.9833\n",
-      "Epoch 802/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0863 - accuracy: 0.9833\n",
-      "Epoch 803/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0861 - accuracy: 0.9833\n",
-      "Epoch 804/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0860 - accuracy: 0.9833\n",
-      "Epoch 805/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0858 - accuracy: 0.9833\n",
-      "Epoch 806/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0857 - accuracy: 0.9833\n",
-      "Epoch 807/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0855 - accuracy: 0.9833\n",
-      "Epoch 808/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0854 - accuracy: 0.9833\n",
-      "Epoch 809/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0855 - accuracy: 0.9833\n",
-      "Epoch 810/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0852 - accuracy: 0.9867\n",
-      "Epoch 811/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0850 - accuracy: 0.9867\n",
-      "Epoch 812/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0849 - accuracy: 0.9833\n",
-      "Epoch 813/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0847 - accuracy: 0.9833\n",
-      "Epoch 814/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0845 - accuracy: 0.9833\n",
-      "Epoch 815/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0847 - accuracy: 0.9833\n",
-      "Epoch 816/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0843 - accuracy: 0.9800\n",
-      "Epoch 817/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0842 - accuracy: 0.9833\n",
-      "Epoch 818/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0840 - accuracy: 0.9833\n",
-      "Epoch 819/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0839 - accuracy: 0.9833\n",
-      "Epoch 820/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0837 - accuracy: 0.9833\n",
-      "Epoch 821/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0836 - accuracy: 0.9833\n",
-      "Epoch 822/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0835 - accuracy: 0.9867\n",
-      "Epoch 823/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0833 - accuracy: 0.9867\n",
-      "Epoch 824/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0832 - accuracy: 0.9867\n",
-      "Epoch 825/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0830 - accuracy: 0.9867\n",
-      "Epoch 826/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0830 - accuracy: 0.9833\n",
-      "Epoch 827/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0828 - accuracy: 0.9833\n",
-      "Epoch 828/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0827 - accuracy: 0.9833\n",
-      "Epoch 829/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0825 - accuracy: 0.9833\n",
-      "Epoch 830/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0825 - accuracy: 0.9867\n",
-      "Epoch 831/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0823 - accuracy: 0.9867\n",
-      "Epoch 832/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0822 - accuracy: 0.9867\n",
-      "Epoch 833/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0820 - accuracy: 0.9867\n",
-      "Epoch 834/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.0819 - accuracy: 0.9867\n",
-      "Epoch 835/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0818 - accuracy: 0.9867\n",
-      "Epoch 836/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0816 - accuracy: 0.9833\n",
-      "Epoch 837/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0815 - accuracy: 0.9867\n",
-      "Epoch 838/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0813 - accuracy: 0.9867\n",
-      "Epoch 839/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0813 - accuracy: 0.9833\n",
-      "Epoch 840/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0811 - accuracy: 0.9833\n",
-      "Epoch 841/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0810 - accuracy: 0.9833\n",
-      "Epoch 842/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0808 - accuracy: 0.9867\n",
-      "Epoch 843/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0807 - accuracy: 0.9867\n",
-      "Epoch 844/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0806 - accuracy: 0.9867\n",
-      "Epoch 845/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0804 - accuracy: 0.9867\n",
-      "Epoch 846/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0803 - accuracy: 0.9867\n",
-      "Epoch 847/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0802 - accuracy: 0.9867\n",
-      "Epoch 848/1000\n",
-      "3/3 [==============================] - 0s 21ms/step - loss: 0.0801 - accuracy: 0.9867\n",
-      "Epoch 849/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0800 - accuracy: 0.9867\n",
-      "Epoch 850/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0799 - accuracy: 0.9867\n",
-      "Epoch 851/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0798 - accuracy: 0.9867\n",
-      "Epoch 852/1000\n",
-      "3/3 [==============================] - 0s 23ms/step - loss: 0.0796 - accuracy: 0.9867\n",
-      "Epoch 853/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0794 - accuracy: 0.9867\n",
-      "Epoch 854/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0793 - accuracy: 0.9867\n",
-      "Epoch 855/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0792 - accuracy: 0.9867\n",
-      "Epoch 856/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0791 - accuracy: 0.9867\n",
-      "Epoch 857/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0790 - accuracy: 0.9867\n",
-      "Epoch 858/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0788 - accuracy: 0.9900\n",
-      "Epoch 859/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0787 - accuracy: 0.9867\n",
-      "Epoch 860/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0786 - accuracy: 0.9833\n",
-      "Epoch 861/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0785 - accuracy: 0.9833\n",
-      "Epoch 862/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0783 - accuracy: 0.9867\n",
-      "Epoch 863/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0783 - accuracy: 0.9867\n",
-      "Epoch 864/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0781 - accuracy: 0.9867\n",
-      "Epoch 865/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0780 - accuracy: 0.9867\n",
-      "Epoch 866/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0779 - accuracy: 0.9867\n",
-      "Epoch 867/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0777 - accuracy: 0.9867\n",
-      "Epoch 868/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0777 - accuracy: 0.9867\n",
-      "Epoch 869/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0775 - accuracy: 0.9867\n",
-      "Epoch 870/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0774 - accuracy: 0.9900\n",
-      "Epoch 871/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0772 - accuracy: 0.9900\n",
-      "Epoch 872/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0772 - accuracy: 0.9867\n",
-      "Epoch 873/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0770 - accuracy: 0.9867\n",
-      "Epoch 874/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0770 - accuracy: 0.9867\n",
-      "Epoch 875/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0768 - accuracy: 0.9867\n",
-      "Epoch 876/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0766 - accuracy: 0.9867\n",
-      "Epoch 877/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0765 - accuracy: 0.9867\n",
-      "Epoch 878/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0765 - accuracy: 0.9867\n",
-      "Epoch 879/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0763 - accuracy: 0.9867\n",
-      "Epoch 880/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0763 - accuracy: 0.9867\n",
-      "Epoch 881/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0761 - accuracy: 0.9900\n",
-      "Epoch 882/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0760 - accuracy: 0.9900\n",
-      "Epoch 883/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0759 - accuracy: 0.9867\n",
-      "Epoch 884/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0757 - accuracy: 0.9867\n",
-      "Epoch 885/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0757 - accuracy: 0.9867\n",
-      "Epoch 886/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0755 - accuracy: 0.9867\n",
-      "Epoch 887/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0754 - accuracy: 0.9867\n",
-      "Epoch 888/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0754 - accuracy: 0.9867\n",
-      "Epoch 889/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0752 - accuracy: 0.9900\n",
-      "Epoch 890/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0751 - accuracy: 0.9900\n",
-      "Epoch 891/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0750 - accuracy: 0.9900\n",
-      "Epoch 892/1000\n",
-      "3/3 [==============================] - 0s 12ms/step - loss: 0.0748 - accuracy: 0.9900\n",
-      "Epoch 893/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0748 - accuracy: 0.9867\n",
-      "Epoch 894/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0747 - accuracy: 0.9867\n",
-      "Epoch 895/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0745 - accuracy: 0.9867\n",
-      "Epoch 896/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0744 - accuracy: 0.9867\n",
-      "Epoch 897/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0743 - accuracy: 0.9833\n",
-      "Epoch 898/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0742 - accuracy: 0.9900\n",
-      "Epoch 899/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0740 - accuracy: 0.9900\n",
-      "Epoch 900/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0740 - accuracy: 0.9900\n",
-      "Epoch 901/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0739 - accuracy: 0.9900\n",
-      "Epoch 902/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0738 - accuracy: 0.9900\n",
-      "Epoch 903/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0737 - accuracy: 0.9900\n",
-      "Epoch 904/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0735 - accuracy: 0.9900\n",
-      "Epoch 905/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0737 - accuracy: 0.9867\n",
-      "Epoch 906/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0734 - accuracy: 0.9867\n",
-      "Epoch 907/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0733 - accuracy: 0.9900\n",
-      "Epoch 908/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0731 - accuracy: 0.9900\n",
-      "Epoch 909/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0730 - accuracy: 0.9900\n",
-      "Epoch 910/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0729 - accuracy: 0.9900\n",
-      "Epoch 911/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0728 - accuracy: 0.9900\n",
-      "Epoch 912/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0727 - accuracy: 0.9900\n",
-      "Epoch 913/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0726 - accuracy: 0.9900\n",
-      "Epoch 914/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0725 - accuracy: 0.9900\n",
-      "Epoch 915/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0724 - accuracy: 0.9900\n",
-      "Epoch 916/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0723 - accuracy: 0.9867\n",
-      "Epoch 917/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0722 - accuracy: 0.9900\n",
-      "Epoch 918/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0721 - accuracy: 0.9900\n",
-      "Epoch 919/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0719 - accuracy: 0.9900\n",
-      "Epoch 920/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0718 - accuracy: 0.9900\n",
-      "Epoch 921/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0718 - accuracy: 0.9900\n",
-      "Epoch 922/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0717 - accuracy: 0.9900\n",
-      "Epoch 923/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.0716 - accuracy: 0.9900\n",
-      "Epoch 924/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0715 - accuracy: 0.9833\n",
-      "Epoch 925/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0713 - accuracy: 0.9867\n",
-      "Epoch 926/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0713 - accuracy: 0.9867\n",
-      "Epoch 927/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0711 - accuracy: 0.9867\n",
-      "Epoch 928/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0710 - accuracy: 0.9900\n",
-      "Epoch 929/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0709 - accuracy: 0.9900\n",
-      "Epoch 930/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0708 - accuracy: 0.9900\n",
-      "Epoch 931/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0708 - accuracy: 0.9900\n",
-      "Epoch 932/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0706 - accuracy: 0.9900\n",
-      "Epoch 933/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0705 - accuracy: 0.9900\n",
-      "Epoch 934/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0704 - accuracy: 0.9900\n",
-      "Epoch 935/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0703 - accuracy: 0.9900\n",
-      "Epoch 936/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0702 - accuracy: 0.9900\n",
-      "Epoch 937/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0701 - accuracy: 0.9900\n",
-      "Epoch 938/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0701 - accuracy: 0.9900\n",
-      "Epoch 939/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0699 - accuracy: 0.9867\n",
-      "Epoch 940/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0699 - accuracy: 0.9900\n",
-      "Epoch 941/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0697 - accuracy: 0.9900\n",
-      "Epoch 942/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0696 - accuracy: 0.9900\n",
-      "Epoch 943/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0696 - accuracy: 0.9900\n",
-      "Epoch 944/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0694 - accuracy: 0.9900\n",
-      "Epoch 945/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0693 - accuracy: 0.9900\n",
-      "Epoch 946/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0694 - accuracy: 0.9867\n",
-      "Epoch 947/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0691 - accuracy: 0.9900\n",
-      "Epoch 948/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0690 - accuracy: 0.9900\n",
-      "Epoch 949/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0690 - accuracy: 0.9867\n",
-      "Epoch 950/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0689 - accuracy: 0.9900\n",
-      "Epoch 951/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0688 - accuracy: 0.9900\n",
-      "Epoch 952/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0687 - accuracy: 0.9900\n",
-      "Epoch 953/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0686 - accuracy: 0.9900\n",
-      "Epoch 954/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0685 - accuracy: 0.9900\n",
-      "Epoch 955/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0685 - accuracy: 0.9900\n",
-      "Epoch 956/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0684 - accuracy: 0.9900\n",
-      "Epoch 957/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0683 - accuracy: 0.9900\n",
-      "Epoch 958/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0681 - accuracy: 0.9900\n",
-      "Epoch 959/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0680 - accuracy: 0.9900\n",
-      "Epoch 960/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0679 - accuracy: 0.9867\n",
-      "Epoch 961/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0679 - accuracy: 0.9900\n",
-      "Epoch 962/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0677 - accuracy: 0.9900\n",
-      "Epoch 963/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0678 - accuracy: 0.9867\n",
-      "Epoch 964/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0675 - accuracy: 0.9900\n",
-      "Epoch 965/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0675 - accuracy: 0.9900\n",
-      "Epoch 966/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0674 - accuracy: 0.9900\n",
-      "Epoch 967/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0673 - accuracy: 0.9867\n",
-      "Epoch 968/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0672 - accuracy: 0.9900\n",
-      "Epoch 969/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0671 - accuracy: 0.9900\n",
-      "Epoch 970/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0670 - accuracy: 0.9900\n",
-      "Epoch 971/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0670 - accuracy: 0.9900\n",
-      "Epoch 972/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0670 - accuracy: 0.9900\n",
-      "Epoch 973/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0667 - accuracy: 0.9900\n",
-      "Epoch 974/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0667 - accuracy: 0.9900\n",
-      "Epoch 975/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0666 - accuracy: 0.9900\n",
-      "Epoch 976/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0664 - accuracy: 0.9900\n",
-      "Epoch 977/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0665 - accuracy: 0.9900\n",
-      "Epoch 978/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0664 - accuracy: 0.9900\n",
-      "Epoch 979/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0662 - accuracy: 0.9900\n",
-      "Epoch 980/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0661 - accuracy: 0.9900\n",
-      "Epoch 981/1000\n",
-      "3/3 [==============================] - 0s 6ms/step - loss: 0.0660 - accuracy: 0.9900\n",
-      "Epoch 982/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0659 - accuracy: 0.9900\n",
-      "Epoch 983/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0659 - accuracy: 0.9900\n",
-      "Epoch 984/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0658 - accuracy: 0.9900\n",
-      "Epoch 985/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0657 - accuracy: 0.9900\n",
-      "Epoch 986/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0657 - accuracy: 0.9900\n",
-      "Epoch 987/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0655 - accuracy: 0.9867\n",
-      "Epoch 988/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0654 - accuracy: 0.9900\n",
-      "Epoch 989/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0653 - accuracy: 0.9900\n",
-      "Epoch 990/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0653 - accuracy: 0.9867\n",
-      "Epoch 991/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0652 - accuracy: 0.9867\n",
-      "Epoch 992/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0651 - accuracy: 0.9900\n",
-      "Epoch 993/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0650 - accuracy: 0.9900\n",
-      "Epoch 994/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0649 - accuracy: 0.9900\n",
-      "Epoch 995/1000\n",
-      "3/3 [==============================] - 0s 5ms/step - loss: 0.0649 - accuracy: 0.9900\n",
-      "Epoch 996/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0647 - accuracy: 0.9900\n",
-      "Epoch 997/1000\n",
-      "3/3 [==============================] - 0s 4ms/step - loss: 0.0646 - accuracy: 0.9900\n",
-      "Epoch 998/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0645 - accuracy: 0.9900\n",
-      "Epoch 999/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0646 - accuracy: 0.9867\n",
-      "Epoch 1000/1000\n",
-      "3/3 [==============================] - 0s 3ms/step - loss: 0.0644 - accuracy: 0.9900\n"
+      "(60000, 784)\n",
+      "(10000, 784)\n"
      ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "<keras.callbacks.History at 0x7f472477ad50>"
-      ]
-     },
-     "execution_count": 30,
-     "metadata": {},
-     "output_type": "execute_result"
     }
    ],
    "source": [
-    "BATCH_SIZE=128\n",
-    "num_train_examples = X.shape[0]\n",
-    "num_train_examples\n",
-    "model.fit(X, y_cat, epochs=1000, steps_per_epoch=math.ceil(num_train_examples/BATCH_SIZE))"
+    "X_train = X_train.reshape((60000, 28 * 28))\n",
+    "print(X_train.shape)\n",
+    "X_train = X_train.astype(\"float32\") / 255\n",
+    "X_test = X_test.reshape((10000, 28 * 28))\n",
+    "X_test = X_test.astype(\"float32\") / 255\n",
+    "print(X_test.shape)"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## 4. Evaluate Network\n",
-    "\n",
-    "Finally, we can use the following command to evaluate the model:"
+    "In addition, we need to _one-hot-encode_ the labels: "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "5\n",
+      "[0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]\n"
+     ]
+    }
+   ],
+   "source": [
+    "from tensorflow.keras.utils import to_categorical\n",
+    "y_train_cat = to_categorical(y_train)\n",
+    "print(y_train[0])\n",
+    "print(y_train_cat[0])"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 31,
+   "execution_count": 12,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "10/10 [==============================] - 0s 2ms/step - loss: 0.0643 - accuracy: 0.9900\n",
-      "Accuracy on test dataset: 0.9900000095367432\n"
+      "7\n",
+      "[0. 0. 0. 0. 0. 0. 0. 1. 0. 0.]\n"
      ]
     }
    ],
    "source": [
-    "test_loss, test_accuracy = model.evaluate(X, y_cat, steps=math.ceil(num_train_examples/32))\n",
-    "print('Accuracy on test dataset:', test_accuracy)"
+    "y_test_cat = to_categorical(y_test)\n",
+    "print(y_test[0])\n",
+    "print(y_test_cat[0])"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "# Part 3 : Keras for MNIST"
+    "The workflow will be as follows: First, we’ll feed the neural network the training data, `X_train` and `y_train`. The network will then learn to associate images and labels. Finally, we’ll ask the network to produce predictions for test_images, and we’ll verify whether these predictions match the labels from `test_labels`."
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Let’s look at a concrete example of a neural network that uses the Python library\n",
-    "Keras to learn to classify handwritten digits. \n",
+    "## 1. Define Networks\n",
     "\n",
-    "The problem we’re trying to solve here is to classify grayscale images of handwritten\n",
-    "digits (28x28 pixels) into their 10 categories (0 through 9). We’ll use the MNIST\n",
-    "dataset, a classic in the machine learning community, which has been around almost\n",
-    "as long as the field itself and has been intensively studied. It’s a set of 60'000 training images, plus 10'000 test images, assembled by the National Institute of Standards and Technology (the NIST in MNIST) in the 1980s. You can think of \n",
-    "“solving” MNIST as the “Hello World” of deep learning—it’s what you do to verify \n",
-    "that your algorithms are working as expected. As you become a machine learning practitioner, you’ll see MNIST come up over and over again in scientific papers, blog posts, and so on."
+    "Building the neural network requires configuring the layers of the model, then compiling the model.\n",
+    "\n",
+    "\n",
+    "#### Setup the layers\n",
+    "\n",
+    "The basic building block of a neural network is the *layer*. A layer extracts a representation from the data fed into it. Hopefully, a series of connected layers results in a representation that is meaningful for the problem at hand.\n",
+    "\n",
+    "Much of deep learning consists of chaining together simple layers. Most layers, like `tf.keras.layers.Dense`, have internal parameters which are adjusted (\"learned\") during training."
    ]
   },
   {
-   "cell_type": "markdown",
+   "cell_type": "code",
+   "execution_count": 13,
    "metadata": {},
+   "outputs": [],
    "source": [
-    "## Loading the MNIST dataset in Keras\n",
-    "The MNIST dataset comes preloaded in Keras, in the form of a set of four NumPy\n",
-    "arrays."
+    "from tensorflow import keras\n",
+    "from tensorflow.keras import layers\n",
+    "\n",
+    "model = keras.Sequential([\n",
+    "layers.Dense(500, activation=\"relu\", input_shape=(784,)),\n",
+    "layers.Dense(50, activation=\"relu\"),    \n",
+    "layers.Dense(10, activation=\"softmax\")\n",
+    "])"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 72,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [],
    "source": [
-    "from tensorflow.keras.datasets import mnist\n",
-    "(X_train, y_train), (X_test, y_test) = mnist.load_data()"
+    "This network has three layers:\n",
+    "\n",
+    "\n",
+    "\n",
+    "* **\"hidden\"** `layers.Dense`— A densely connected layer of 500 neurons. Each neuron (or node) takes input from all 784 nodes in the previous layer - by specifying an `input_shape` to the first layer in the Sequential model - , weighting that input according to hidden parameters which will be learned during training, and outputs a single value to the next layer.  \n",
+    "\n",
+    "* **\"hidden\"** `layers.Dense`— A densely connected layer of 50 neurons. Each neuron (or node) takes input from all 500 nodes in the previous layer, weighting that input according to hidden parameters which will be learned during training, and outputs a single value to the next layer.\n",
+    "\n",
+    "* **output** `layers.Dense` — A 10-node *softmax* layer, with each node representing a class of clothing. As in the previous layer, each node takes input from the 50 nodes in the layer before it. Each node weights the input according to learned parameters, and then outputs a value in the range `[0, 1]`, representing the probability that the image belongs to that class. The sum of all 10 node values is 1.\n"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "`X_train` and `y_train` form the training set, the data that the model will\n",
-    "learn from. The model will then be tested on the test set, `X_test` and `y_test`. The images are encoded as NumPy arrays, and the labels are an array of digits, ranging from 0 to 9. The images and labels have a one-to-one correspondence.\n",
-    "Let’s look at the training data:"
+    "## 2. Compile Networks\n",
+    "\n",
+    "\n",
+    "Before the model is ready for training, it needs a few more settings. These are added during the model's *compile* step:\n",
+    "\n",
+    "\n",
+    "* *Loss function* — An algorithm for measuring how far the model's outputs are from the desired output. The goal of training is this measures loss.\n",
+    "* *Optimizer* —An algorithm for adjusting the inner parameters of the model in order to minimize loss.\n",
+    "* *Metrics* —Used to monitor the training and testing steps. The following example uses *accuracy*, the fraction of the images that are correctly classified."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 73,
+   "execution_count": 14,
    "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "(60000, 28, 28)"
-      ]
-     },
-     "execution_count": 73,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
+   "outputs": [],
    "source": [
-    "X_train.shape"
+    "model.compile(optimizer='sgd',\n",
+    "              loss='categorical_crossentropy',\n",
+    "              metrics=['accuracy'])"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 74,
+   "execution_count": 15,
    "metadata": {},
    "outputs": [
     {
-     "data": {
-      "text/plain": [
-       "60000"
-      ]
-     },
-     "execution_count": 74,
-     "metadata": {},
-     "output_type": "execute_result"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model: \"sequential\"\n",
+      "_________________________________________________________________\n",
+      " Layer (type)                Output Shape              Param #   \n",
+      "=================================================================\n",
+      " dense (Dense)               (None, 500)               392500    \n",
+      "                                                                 \n",
+      " dense_1 (Dense)             (None, 50)                25050     \n",
+      "                                                                 \n",
+      " dense_2 (Dense)             (None, 10)                510       \n",
+      "                                                                 \n",
+      "=================================================================\n",
+      "Total params: 418,060\n",
+      "Trainable params: 418,060\n",
+      "Non-trainable params: 0\n",
+      "_________________________________________________________________\n"
+     ]
     }
    ],
    "source": [
-    "len(y_train)"
+    "model.summary()"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 75,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([5, 0, 4, ..., 5, 6, 8], dtype=uint8)"
-      ]
-     },
-     "execution_count": 75,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
    "source": [
-    "y_train"
+    "# 3. Fit Network"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Let us display an the fourth digit:"
+    "We’re now ready to train the model, which in Keras is done via a call to the model’s\n",
+    "`model.fit` method — we fit the model to its training data.\n"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 76,
+   "execution_count": 16,
    "metadata": {},
    "outputs": [
     {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAANpElEQVR4nO3db6xU9Z3H8c9HtxpDS4TlSpCSvbXyhKwpbSaySbGyaRbUaLAmEokSTIj0ASY2qXENakqMGt0sbWpcmtBVSrUrmrQKD0yRJY3YJ4TRsAqarmggFdF70ZhSo7LY7z64h+aKd35zmf/l+34lNzNzvnPmfDP64cyc35nzc0QIwJnvrH43AKA3CDuQBGEHkiDsQBKEHUji73q5sRkzZsTw8HAvNwmkcvDgQR09etQT1doKu+0rJP1U0tmS/jMiHiw9f3h4WPV6vZ1NAiio1WoNay1/jLd9tqT/kHSlpHmSltue1+rrAeiudr6zXyrpQES8FRHHJW2RtLQzbQHotHbCPlvSH8c9frta9jm2V9uu266Pjo62sTkA7ej60fiI2BgRtYioDQ0NdXtzABpoJ+yHJc0Z9/ir1TIAA6idsO+RNNf212yfI+kGSds60xaATmt56C0iTti+VdJ2jQ29PRYR+zvWGYCOamucPSKek/Rch3oB0EWcLgskQdiBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kARhB5Ig7EAShB1Ioq0pm20flHRM0meSTkRErRNNAei8tsJe+eeIONqB1wHQRXyMB5JoN+wh6XnbL9lePdETbK+2XbddHx0dbXNzAFrVbtgXRsS3JF0paY3t75z6hIjYGBG1iKgNDQ21uTkArWor7BFxuLodkfSMpEs70RSAzms57Lan2P7KyfuSFkva16nGAHRWO0fjZ0p6xvbJ1/mviPhtR7oC0HEthz0i3pL0jQ72AqCLGHoDkiDsQBKEHUiCsANJEHYgiU78EAYDbPfu3cX6448/Xqzv2rWrWN+3r/VTK9avX1+sX3jhhcX6iy++WKyvWLGiYW3BggXFdc9E7NmBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnG2c8ATz31VMPabbfdVly32aXCIqJYX7RoUbF+9Gjja5HefvvtxXWbadZbadtbtmxpa9t/i9izA0kQdiAJwg4kQdiBJAg7kARhB5Ig7EASjLMPgBMnThTre/bsKdZvueWWhrWPPvqouO7ll19erN9zzz3F+sKFC4v1Tz/9tGFt2bJlxXW3b99erDdTqzGp8Hjs2YEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcbZB8ATTzxRrK9atarl1168eHGxXvotvCRNnTq15W03e/12x9HnzJlTrK9cubKt1z/TNN2z237M9ojtfeOWTbe9w/Yb1e207rYJoF2T+Rj/C0lXnLLsTkk7I2KupJ3VYwADrGnYI2KXpA9OWbxU0ubq/mZJ13a2LQCd1uoBupkRcaS6/66kmY2eaHu17brterPrnQHonraPxsfYVf8aXvkvIjZGRC0iakNDQ+1uDkCLWg37e7ZnSVJ1O9K5lgB0Q6th3ybp5LjGSklbO9MOgG5pOs5u+0lJiyTNsP22pB9JelDS07ZXSTokqfzD5OTuvvvuYv2BBx4o1m0X62vWrGlYu++++4rrtjuO3sz999/ftdd++OGHi3W+Nn5e07BHxPIGpe92uBcAXcTpskAShB1IgrADSRB2IAnCDiTBT1w74N577y3Wmw2tnXvuucX6kiVLivWHHnqoYe28884rrtvMJ598Uqw///zzxfqhQ4ca1ppNudzsMtZLly4t1vF57NmBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnG2Sfpww8/bFjbsGFDcd1mP1FtNo7+7LPPFuvtOHDgQLF+4403Fuv1er3lbV9//fXF+h133NHya+OL2LMDSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKMs0/S8ePHG9bandaq2SWRR0bKc3Bs2rSpYW3r1vIl/ffv31+sHzt2rFhvdg7BWWc13p/cdNNNxXWnTJlSrOP0sGcHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQYZ5+kc845p2HtggsuKK7bbJx8eHi4WG82lt2O2bNnF+vNpnR+5513ivUZM2Y0rF1zzTXFddFZTffsth+zPWJ737hl62wftr23+ruqu20CaNdkPsb/QtIVEyz/SUTMr/6e62xbADqtadgjYpekD3rQC4AuaucA3a22X6k+5k9r9CTbq23XbdfbPYccQOtaDfvPJH1d0nxJRyStb/TEiNgYEbWIqA0NDbW4OQDtainsEfFeRHwWEX+R9HNJl3a2LQCd1lLYbc8a9/B7kvY1ei6AwdB0nN32k5IWSZph+21JP5K0yPZ8SSHpoKTvd6/FwXD++ec3rDW7rvvVV19drL///vvF+sUXX1ysl+Ypv/nmm4vrTp8+vVi/4YYbivVm4+zN1kfvNA17RCyfYPGjXegFQBdxuiyQBGEHkiDsQBKEHUiCsANJ8BPXDliwYEGxPsinCe/atatYf+GFF4r1Zj+/veiii067J3QHe3YgCcIOJEHYgSQIO5AEYQeSIOxAEoQdSIJx9uQ+/vjjYr3ZOHqzOj9xHRzs2YEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcbZk1uyZEm/W0CPsGcHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQYZ09u+/bt/W4BPdJ0z257ju3f2X7N9n7bt1XLp9veYfuN6nZa99sF0KrJfIw/IemHETFP0j9JWmN7nqQ7Je2MiLmSdlaPAQyopmGPiCMR8XJ1/5ik1yXNlrRU0ubqaZslXdulHgF0wGkdoLM9LOmbknZLmhkRR6rSu5JmNlhnte267fogz3kGnOkmHXbbX5b0a0k/iIg/ja9FREiKidaLiI0RUYuI2tDQUFvNAmjdpMJu+0saC/qvIuI31eL3bM+q6rMkjXSnRQCd0HTozWPXCn5U0usR8eNxpW2SVkp6sLrd2pUO0VVvvvlmv1tAj0xmnP3bklZIetX23mrZWo2F/GnbqyQdkrSsKx0C6IimYY+I30tqNBPAdzvbDoBu4XRZIAnCDiRB2IEkCDuQBGEHkuAnrslddtllxfrYyZE4E7BnB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkGGdP7pJLLinW586dW6w3+z18qc6Vi3qLPTuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJME4O4rWrl1brK9atarl9R955JHiuvPmzSvWcXrYswNJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEpOZn32OpF9KmikpJG2MiJ/aXifpFkmj1VPXRsRz3WoU/XHdddcV61u2bCnWd+zY0bC2bt264rqbNm0q1qdMmVKs4/Mmc1LNCUk/jIiXbX9F0ku2T/4X/ElE/Hv32gPQKZOZn/2IpCPV/WO2X5c0u9uNAeis0/rObntY0jcl7a4W3Wr7FduP2Z7WYJ3Vtuu266OjoxM9BUAPTDrstr8s6deSfhARf5L0M0lflzRfY3v+9ROtFxEbI6IWETWuOQb0z6TCbvtLGgv6ryLiN5IUEe9FxGcR8RdJP5d0affaBNCupmG3bUmPSno9In48bvmscU/7nqR9nW8PQKdM5mj8tyWtkPSq7b3VsrWSltuer7HhuIOSvt+F/tBnU6dOLdaffvrpYv2uu+5qWNuwYUNx3WZDc/wE9vRM5mj87yV5ghJj6sDfEM6gA5Ig7EAShB1IgrADSRB2IAnCDiThiOjZxmq1WtTr9Z5tD8imVqupXq9PNFTOnh3IgrADSRB2IAnCDiRB2IEkCDuQBGEHkujpOLvtUUmHxi2aIelozxo4PYPa26D2JdFbqzrZ2z9ExITXf+tp2L+wcbseEbW+NVAwqL0Nal8SvbWqV73xMR5IgrADSfQ77Bv7vP2SQe1tUPuS6K1VPemtr9/ZAfROv/fsAHqEsANJ9CXstq+w/QfbB2zf2Y8eGrF90Partvfa7uuP76s59EZs7xu3bLrtHbbfqG4nnGOvT72ts324eu/22r6qT73Nsf0726/Z3m/7tmp5X9+7Ql89ed96/p3d9tmS/lfSv0h6W9IeScsj4rWeNtKA7YOSahHR9xMwbH9H0p8l/TIi/rFa9m+SPoiIB6t/KKdFxL8OSG/rJP2539N4V7MVzRo/zbikayXdrD6+d4W+lqkH71s/9uyXSjoQEW9FxHFJWyQt7UMfAy8idkn64JTFSyVtru5v1tj/LD3XoLeBEBFHIuLl6v4xSSenGe/re1foqyf6EfbZkv447vHbGqz53kPS87Zfsr26381MYGZEHKnuvytpZj+bmUDTabx76ZRpxgfmvWtl+vN2cYDuixZGxLckXSlpTfVxdSDF2HewQRo7ndQ03r0ywTTjf9XP967V6c/b1Y+wH5Y0Z9zjr1bLBkJEHK5uRyQ9o8Gbivq9kzPoVrcjfe7nrwZpGu+JphnXALx3/Zz+vB9h3yNpru2v2T5H0g2StvWhjy+wPaU6cCLbUyQt1uBNRb1N0srq/kpJW/vYy+cMyjTejaYZV5/fu75Pfx4RPf+TdJXGjsi/KemufvTQoK+LJP1P9be/371JelJjH+v+T2PHNlZJ+ntJOyW9Iem/JU0foN4el/SqpFc0FqxZfeptocY+or8iaW/1d1W/37tCXz153zhdFkiCA3RAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kMT/A38cJNEbCe0NAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<Figure size 432x288 with 1 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Epoch 1/5\n",
+      "469/469 [==============================] - 8s 15ms/step - loss: 1.1797 - accuracy: 0.7119\n",
+      "Epoch 2/5\n",
+      "469/469 [==============================] - 7s 15ms/step - loss: 0.4722 - accuracy: 0.8792\n",
+      "Epoch 3/5\n",
+      "469/469 [==============================] - 7s 15ms/step - loss: 0.3663 - accuracy: 0.9000\n",
+      "Epoch 4/5\n",
+      "469/469 [==============================] - 8s 16ms/step - loss: 0.3208 - accuracy: 0.9111 0s - loss: 0.321\n",
+      "Epoch 5/5\n",
+      "469/469 [==============================] - 7s 15ms/step - loss: 0.2929 - accuracy: 0.9183\n"
+     ]
     }
    ],
    "source": [
-    "import matplotlib.pyplot as plt\n",
-    "digit = X_train[4]\n",
-    "plt.imshow(digit, cmap=plt.cm.binary)\n",
-    "plt.show()"
+    "history = model.fit(X_train, y_train_cat, epochs=5, batch_size=128)"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 77,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "9"
-      ]
-     },
-     "execution_count": 77,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
    "source": [
-    "y_train[4]"
+    "Two quantities are displayed during training: the __loss__ of the model over the training\n",
+    "data, and the __accuracy__ of the model over the training data. We quickly reach an accuracy of $0.92$ on the training data."
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "And the test data:"
+    "##  4. Evaluate Network\n",
+    "\n",
+    "On average, how good is our model at classifying never-before-seen digits? Let’s\n",
+    "check by computing average accuracy over the entire test set."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 78,
+   "execution_count": 24,
    "metadata": {},
    "outputs": [
     {
-     "data": {
-      "text/plain": [
-       "(10000, 28, 28)"
-      ]
-     },
-     "execution_count": 78,
-     "metadata": {},
-     "output_type": "execute_result"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "313/313 [==============================] - 2s 5ms/step - loss: 0.2683 - accuracy: 0.9264\n",
+      "test_acc: 0.9264000058174133\n"
+     ]
     }
    ],
    "source": [
-    "X_test.shape"
+    "test_loss, test_acc = model.evaluate(X_test, y_test_cat)\n",
+    "print(f\"test_acc: {test_acc}\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## 5. Make Predictions\n",
+    "\n",
+    "Now that we have a trained model, we can use it to predict class probabilities for\n",
+    "new digits—images that weren’t part of the training data, like those from the test set."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 79,
+   "execution_count": 19,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "10000"
+       "array([7, 2, 1, 0, 4, 1, 4, 9, 6, 9])"
       ]
      },
-     "execution_count": 79,
+     "execution_count": 19,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "len(y_test)"
+    "import numpy as np\n",
+    "predictions=model.predict(X_test[0:10])\n",
+    "np.argmax(predictions, axis=1)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 80,
+   "execution_count": 20,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "array([7, 2, 1, ..., 4, 5, 6], dtype=uint8)"
+       "array([7, 2, 1, 0, 4, 1, 4, 9, 5, 9], dtype=uint8)"
       ]
      },
-     "execution_count": 80,
+     "execution_count": 20,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "y_test"
+    "y_test[0:10]"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Before training, we’ll preprocess the data by reshaping it into the shape the model\n",
-    "expects and scaling it so that all values are in the $[0, 1]$ interval. Previously, our training images were stored in an array of shape $(60000, 28, 28)$ of type `uint8` with values in the $[0, 255]$ interval. We’ll transform it into a `float32` array of shape $(60000, 28 * 28)$ with values between $0$ and $1$."
+    "Each number of index $i$ in that array corresponds to the probability that digit image\n",
+    "`X_test[0]` belongs to class $i$. This first test digit has the highest probability score (0.9956499, almost 1) at\n",
+    "index 7, so according to our model, it must be a 7:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 81,
+   "execution_count": null,
    "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(60000, 784)\n",
-      "(10000, 784)\n"
-     ]
-    }
-   ],
+   "outputs": [],
    "source": [
-    "X_train = X_train.reshape((60000, 28 * 28))\n",
-    "print(X_train.shape)\n",
-    "X_train = X_train.astype(\"float32\") / 255\n",
-    "X_test = X_test.reshape((10000, 28 * 28))\n",
-    "X_test = X_test.astype(\"float32\") / 255\n",
-    "print(X_test.shape)"
+    "predictions[0]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "predictions[0].argmax()"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "In addition, we need to _one-hot-encode_ the labels: "
+    "We can check that the test label agrees:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "y_test[0]"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 82,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "5\n",
-      "[0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]\n"
-     ]
-    }
-   ],
    "source": [
-    "from tensorflow.keras.utils import to_categorical\n",
-    "y_train_cat = to_categorical(y_train)\n",
-    "print(y_train[0])\n",
-    "print(y_train_cat[0])"
+    "# Part 3 : Toy Neural Network with Keras"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 83,
+   "execution_count": 24,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "7\n",
-      "[0. 0. 0. 0. 0. 0. 0. 1. 0. 0.]\n"
+      "2.7.1\n"
      ]
     }
    ],
    "source": [
-    "y_test_cat = to_categorical(y_test)\n",
-    "print(y_test[0])\n",
-    "print(y_test_cat[0])"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "The workflow will be as follows: First, we’ll feed the neural network the training data, `X_train` and `y_train`. The network will then learn to associate images and labels. Finally, we’ll ask the network to produce predictions for test_images, and we’ll verify whether these predictions match the labels from `test_labels`."
+    "# Import TensorFlow \n",
+    "import tensorflow as tf\n",
+    "\n",
+    "# Helper libraries\n",
+    "import math\n",
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "\n",
+    "\n",
+    "print(tf.__version__)\n",
+    "\n"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## 1. Define Networks\n",
-    "\n",
-    "Building the neural network requires configuring the layers of the model, then compiling the model.\n",
-    "\n",
+    "## 1. Define Your Network\n",
     "\n",
-    "#### Setup the layers\n",
+    "The `tf.keras.models.Sequential` class is a wrapper for the neural network model that treats \n",
+    "the network as a sequence of layers. It implements the Keras model interface with common \n",
+    "methods like `compile()`, `fit()`, and `evaluate()` that are used to train and \n",
+    "run the model. We'll cover these functions soon, but first let's start looking at the layers of the model.\n",
     "\n",
-    "The basic building block of a neural network is the *layer*. A layer extracts a representation from the data fed into it. Hopefully, a series of connected layers results in a representation that is meaningful for the problem at hand.\n",
+    "#### Layers\n",
     "\n",
-    "Much of deep learning consists of chaining together simple layers. Most layers, like `tf.keras.layers.Dense`, have internal parameters which are adjusted (\"learned\") during training."
+    "The Keras Layer class provides a common interface for a variety of standard neural network layers. You can add a layer to a model using the \n",
+    "model's `add()` method. For example, a simple model with a single hidden layer might look like this for the spiral dataset:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 84,
+   "execution_count": 25,
    "metadata": {},
    "outputs": [],
    "source": [
-    "from tensorflow import keras\n",
-    "from tensorflow.keras import layers\n",
-    "\n",
-    "model = keras.Sequential([\n",
-    "layers.Dense(500, activation=\"relu\", input_shape=(784,)),\n",
-    "layers.Dense(50, activation=\"relu\"),    \n",
-    "layers.Dense(10, activation=\"softmax\")\n",
-    "])"
+    "model = tf.keras.Sequential()\n",
+    "# From Input to first hidden layer\n",
+    "model.add(tf.keras.layers.Dense(100, activation= tf.nn.relu, \n",
+    "                                input_shape=(2,)))\n",
+    "# From first hidden layer to output layer\n",
+    "model.add(tf.keras.layers.Dense(3, activation=tf.nn.softmax))"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "This network has three layers:\n",
-    "\n",
-    "\n",
-    "\n",
-    "* **\"hidden\"** `layers.Dense`— A densely connected layer of 500 neurons. Each neuron (or node) takes input from all 784 nodes in the previous layer - by specifying an `input_shape` to the first layer in the Sequential model - , weighting that input according to hidden parameters which will be learned during training, and outputs a single value to the next layer.  \n",
+    "## 2. Compile Network\n",
     "\n",
-    "* **\"hidden\"** `layers.Dense`— A densely connected layer of 50 neurons. Each neuron (or node) takes input from all 500 nodes in the previous layer, weighting that input according to hidden parameters which will be learned during training, and outputs a single value to the next layer.\n",
     "\n",
-    "* **output** `layers.Dense` — A 10-node *softmax* layer, with each node representing a class of clothing. As in the previous layer, each node takes input from the 50 nodes in the layer before it. Each node weights the input according to learned parameters, and then outputs a value in the range `[0, 1]`, representing the probability that the image belongs to that class. The sum of all 10 node values is 1.\n"
+    "Once we have our model built, we need to compile it before it can be run. Compiling the Keras \n",
+    "model calls the backend (tensorflow, theano, etc.) and binds the optimizer, loss function, \n",
+    "and other parameters required before the model can be run on any input data. We'll specify the \n",
+    "loss function to be `categorical_crossentropy`, \n",
+    "and specify `adam` as the optimizer (which is a reasonable default when speed is a priority). And finally, \n",
+    "we can specify what metrics we want to evaluate the model with. Here we'll use `accuracy`.\n"
    ]
   },
   {
-   "cell_type": "markdown",
+   "cell_type": "code",
+   "execution_count": 26,
    "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "(300, 2)"
+      ]
+     },
+     "execution_count": 26,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
    "source": [
-    "## 2. Compile Networks\n",
-    "\n",
-    "\n",
-    "Before the model is ready for training, it needs a few more settings. These are added during the model's *compile* step:\n",
-    "\n",
-    "\n",
-    "* *Loss function* — An algorithm for measuring how far the model's outputs are from the desired output. The goal of training is this measures loss.\n",
-    "* *Optimizer* —An algorithm for adjusting the inner parameters of the model in order to minimize loss.\n",
-    "* *Metrics* —Used to monitor the training and testing steps. The following example uses *accuracy*, the fraction of the images that are correctly classified."
+    "model.compile(optimizer='adam',\n",
+    "              loss='categorical_crossentropy',\n",
+    "              metrics=['accuracy'])\n",
+    "X.shape"
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": 85,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [],
    "source": [
-    "model.compile(optimizer='sgd',\n",
-    "              loss='categorical_crossentropy',\n",
-    "              metrics=['accuracy'])"
+    "We can see the resulting model architecture with the following command:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 86,
+   "execution_count": 27,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Model: \"sequential_4\"\n",
+      "Model: \"sequential_2\"\n",
       "_________________________________________________________________\n",
       " Layer (type)                Output Shape              Param #   \n",
       "=================================================================\n",
-      " dense_12 (Dense)            (None, 500)               392500    \n",
-      "                                                                 \n",
-      " dense_13 (Dense)            (None, 50)                25050     \n",
+      " dense_4 (Dense)             (None, 100)               300       \n",
       "                                                                 \n",
-      " dense_14 (Dense)            (None, 10)                510       \n",
+      " dense_5 (Dense)             (None, 3)                 303       \n",
       "                                                                 \n",
       "=================================================================\n",
-      "Total params: 418,060\n",
-      "Trainable params: 418,060\n",
+      "Total params: 603\n",
+      "Trainable params: 603\n",
       "Non-trainable params: 0\n",
       "_________________________________________________________________\n"
      ]
@@ -3739,207 +1774,2128 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "# 3. Fit Network"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "We’re now ready to train the model, which in Keras is done via a call to the model’s\n",
-    "`model.fit` method — we fit the model to its training data.\n"
+    "## 3. Fit Network\n",
+    "\n",
+    "The model is trained with the `fit()` method, through the following command that specifies the \n",
+    "number of training epochs and the message level (how much information we want displayed on the screen \n",
+    "during training).\n",
+    "\n",
+    "Before starting, we need to one-hot-encode the labels.\n"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 94,
+   "execution_count": 28,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Epoch 1/5\n",
-      "469/469 [==============================] - 7s 15ms/step - loss: 0.2764 - accuracy: 0.9218\n",
-      "Epoch 2/5\n",
-      "469/469 [==============================] - 7s 15ms/step - loss: 0.2598 - accuracy: 0.9262 0s - los\n",
-      "Epoch 3/5\n",
-      "469/469 [==============================] - 6s 14ms/step - loss: 0.2455 - accuracy: 0.9304\n",
-      "Epoch 4/5\n",
-      "469/469 [==============================] - 7s 15ms/step - loss: 0.2331 - accuracy: 0.9341\n",
-      "Epoch 5/5\n",
-      "469/469 [==============================] - 7s 15ms/step - loss: 0.2216 - accuracy: 0.9379\n"
+      "[[0 0 1]\n",
+      " [0 1 0]\n",
+      " [1 0 0]]\n",
+      "[[1 0 0]\n",
+      " [1 0 0]\n",
+      " [1 0 0]]\n"
      ]
     }
    ],
    "source": [
-    "history = model.fit(X_train, y_train_cat, epochs=5, batch_size=128)"
+    "def convertToOneHot(vector, num_classes=None):\n",
+    "    result = np.zeros((len(vector), num_classes), dtype='uint8')\n",
+    "    result[np.arange(len(vector)), vector] = 1\n",
+    "    return result\n",
+    "print(convertToOneHot([2,1,0], 3))\n",
+    "\n",
+    "# One-hot-encoded labels of spiral datset\n",
+    "y_cat = convertToOneHot(y,3)\n",
+    "print(y_cat[:3])"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Two quantities are displayed during training: the __loss__ of the model over the training\n",
-    "data, and the __accuracy__ of the model over the training data. We quickly reach an accuracy of $0.92$ on the training data."
+    "or alternatively"
    ]
   },
   {
-   "cell_type": "markdown",
+   "cell_type": "code",
+   "execution_count": 29,
    "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[[1. 0. 0.]\n",
+      " [1. 0. 0.]\n",
+      " [1. 0. 0.]]\n"
+     ]
+    }
+   ],
    "source": [
-    "##  4. Evaluate Network\n",
-    "\n",
-    "On average, how good is our model at classifying never-before-seen digits? Let’s\n",
-    "check by computing average accuracy over the entire test set."
+    "from tensorflow.keras import utils\n",
+    "y_cat = utils.to_categorical(y, 3)\n",
+    "print(y_cat[:3])"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 101,
+   "execution_count": 30,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "313/313 [==============================] - 1s 4ms/step - loss: 0.2117 - accuracy: 0.9394\n",
-      "Accuracy : 0.9394000172615051\n"
+      "Epoch 1/1000\n",
+      "3/3 [==============================] - 1s 3ms/step - loss: 1.0828 - accuracy: 0.3133\n",
+      "Epoch 2/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 1.0702 - accuracy: 0.3733\n",
+      "Epoch 3/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 1.0585 - accuracy: 0.4300\n",
+      "Epoch 4/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 1.0472 - accuracy: 0.5100\n",
+      "Epoch 5/1000\n",
+      "3/3 [==============================] - 0s 16ms/step - loss: 1.0359 - accuracy: 0.5367\n",
+      "Epoch 6/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 1.0246 - accuracy: 0.5500\n",
+      "Epoch 7/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 1.0135 - accuracy: 0.5533\n",
+      "Epoch 8/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 1.0028 - accuracy: 0.5600\n",
+      "Epoch 9/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.9918 - accuracy: 0.5500\n",
+      "Epoch 10/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.9819 - accuracy: 0.5500\n",
+      "Epoch 11/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.9713 - accuracy: 0.5533\n",
+      "Epoch 12/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.9609 - accuracy: 0.5533\n",
+      "Epoch 13/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.9512 - accuracy: 0.5533\n",
+      "Epoch 14/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.9410 - accuracy: 0.5533\n",
+      "Epoch 15/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.9311 - accuracy: 0.5533\n",
+      "Epoch 16/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.9219 - accuracy: 0.5500\n",
+      "Epoch 17/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.9123 - accuracy: 0.5533\n",
+      "Epoch 18/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.9033 - accuracy: 0.5500\n",
+      "Epoch 19/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8944 - accuracy: 0.5533\n",
+      "Epoch 20/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8854 - accuracy: 0.5533\n",
+      "Epoch 21/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8767 - accuracy: 0.5533\n",
+      "Epoch 22/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8686 - accuracy: 0.5533\n",
+      "Epoch 23/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8600 - accuracy: 0.5533\n",
+      "Epoch 24/1000\n",
+      "3/3 [==============================] - 0s 7ms/step - loss: 0.8524 - accuracy: 0.5533\n",
+      "Epoch 25/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.8446 - accuracy: 0.5533\n",
+      "Epoch 26/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.8372 - accuracy: 0.5533\n",
+      "Epoch 27/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8299 - accuracy: 0.5500\n",
+      "Epoch 28/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8230 - accuracy: 0.5500\n",
+      "Epoch 29/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8165 - accuracy: 0.5500\n",
+      "Epoch 30/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.8102 - accuracy: 0.5500\n",
+      "Epoch 31/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.8041 - accuracy: 0.5533\n",
+      "Epoch 32/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.7979 - accuracy: 0.5533\n",
+      "Epoch 33/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7924 - accuracy: 0.5533\n",
+      "Epoch 34/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7870 - accuracy: 0.5533\n",
+      "Epoch 35/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7817 - accuracy: 0.5533\n",
+      "Epoch 36/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7769 - accuracy: 0.5500\n",
+      "Epoch 37/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7721 - accuracy: 0.5500\n",
+      "Epoch 38/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7673 - accuracy: 0.5533\n",
+      "Epoch 39/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7627 - accuracy: 0.5500\n",
+      "Epoch 40/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.7585 - accuracy: 0.5500\n",
+      "Epoch 41/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7545 - accuracy: 0.5533\n",
+      "Epoch 42/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7504 - accuracy: 0.5567\n",
+      "Epoch 43/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7466 - accuracy: 0.5567\n",
+      "Epoch 44/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7430 - accuracy: 0.5600\n",
+      "Epoch 45/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7392 - accuracy: 0.5600\n",
+      "Epoch 46/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7359 - accuracy: 0.5600\n",
+      "Epoch 47/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.7324 - accuracy: 0.5600\n",
+      "Epoch 48/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.7291 - accuracy: 0.5600\n",
+      "Epoch 49/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.7259 - accuracy: 0.5633\n",
+      "Epoch 50/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.7229 - accuracy: 0.5633\n",
+      "Epoch 51/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.7198 - accuracy: 0.5633\n",
+      "Epoch 52/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.7170 - accuracy: 0.5667\n",
+      "Epoch 53/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7141 - accuracy: 0.5700\n",
+      "Epoch 54/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7113 - accuracy: 0.5733\n",
+      "Epoch 55/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.7084 - accuracy: 0.5733\n",
+      "Epoch 56/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.7059 - accuracy: 0.5733\n",
+      "Epoch 57/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7032 - accuracy: 0.5733\n",
+      "Epoch 58/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.7005 - accuracy: 0.5733\n",
+      "Epoch 59/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6979 - accuracy: 0.5700\n",
+      "Epoch 60/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6956 - accuracy: 0.5700\n",
+      "Epoch 61/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6930 - accuracy: 0.5733\n",
+      "Epoch 62/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6904 - accuracy: 0.5733\n",
+      "Epoch 63/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6880 - accuracy: 0.5733\n",
+      "Epoch 64/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6856 - accuracy: 0.5733\n",
+      "Epoch 65/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6834 - accuracy: 0.5733\n",
+      "Epoch 66/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6809 - accuracy: 0.5733\n",
+      "Epoch 67/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6787 - accuracy: 0.5733\n",
+      "Epoch 68/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6762 - accuracy: 0.5733\n",
+      "Epoch 69/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6741 - accuracy: 0.5733\n",
+      "Epoch 70/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6716 - accuracy: 0.5800\n",
+      "Epoch 71/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6694 - accuracy: 0.5800\n",
+      "Epoch 72/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6673 - accuracy: 0.5833\n",
+      "Epoch 73/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.6650 - accuracy: 0.5833\n",
+      "Epoch 74/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6627 - accuracy: 0.5833\n",
+      "Epoch 75/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6605 - accuracy: 0.5867\n",
+      "Epoch 76/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6584 - accuracy: 0.5833\n",
+      "Epoch 77/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6561 - accuracy: 0.5900\n",
+      "Epoch 78/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6541 - accuracy: 0.5867\n",
+      "Epoch 79/1000\n",
+      "3/3 [==============================] - 0s 7ms/step - loss: 0.6517 - accuracy: 0.5900\n",
+      "Epoch 80/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6496 - accuracy: 0.5933\n",
+      "Epoch 81/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6473 - accuracy: 0.5967\n",
+      "Epoch 82/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6453 - accuracy: 0.5967\n",
+      "Epoch 83/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6430 - accuracy: 0.5967\n",
+      "Epoch 84/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6409 - accuracy: 0.5967\n",
+      "Epoch 85/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6388 - accuracy: 0.5967\n",
+      "Epoch 86/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6366 - accuracy: 0.5967\n",
+      "Epoch 87/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6344 - accuracy: 0.6000\n",
+      "Epoch 88/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6325 - accuracy: 0.6000\n",
+      "Epoch 89/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.6301 - accuracy: 0.6000\n",
+      "Epoch 90/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6279 - accuracy: 0.6033\n",
+      "Epoch 91/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6259 - accuracy: 0.6033\n",
+      "Epoch 92/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6236 - accuracy: 0.6067\n",
+      "Epoch 93/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6216 - accuracy: 0.6133\n",
+      "Epoch 94/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6194 - accuracy: 0.6133\n",
+      "Epoch 95/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6173 - accuracy: 0.6167\n",
+      "Epoch 96/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6150 - accuracy: 0.6233\n",
+      "Epoch 97/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6129 - accuracy: 0.6300\n",
+      "Epoch 98/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6107 - accuracy: 0.6300\n",
+      "Epoch 99/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6087 - accuracy: 0.6333\n",
+      "Epoch 100/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6064 - accuracy: 0.6367\n",
+      "Epoch 101/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6044 - accuracy: 0.6367\n",
+      "Epoch 102/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.6022 - accuracy: 0.6400\n",
+      "Epoch 103/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.6000 - accuracy: 0.6433\n",
+      "Epoch 104/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5979 - accuracy: 0.6433\n",
+      "Epoch 105/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.5957 - accuracy: 0.6433\n",
+      "Epoch 106/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5937 - accuracy: 0.6500\n",
+      "Epoch 107/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5915 - accuracy: 0.6533\n",
+      "Epoch 108/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5895 - accuracy: 0.6600\n",
+      "Epoch 109/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5871 - accuracy: 0.6633\n",
+      "Epoch 110/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5849 - accuracy: 0.6633\n",
+      "Epoch 111/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5829 - accuracy: 0.6700\n",
+      "Epoch 112/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.5806 - accuracy: 0.6733\n",
+      "Epoch 113/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5786 - accuracy: 0.6767\n",
+      "Epoch 114/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5767 - accuracy: 0.6800\n",
+      "Epoch 115/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5744 - accuracy: 0.6800\n",
+      "Epoch 116/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5722 - accuracy: 0.6867\n",
+      "Epoch 117/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5701 - accuracy: 0.6867\n",
+      "Epoch 118/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5678 - accuracy: 0.6867\n",
+      "Epoch 119/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5657 - accuracy: 0.6867\n",
+      "Epoch 120/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5635 - accuracy: 0.6900\n",
+      "Epoch 121/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5615 - accuracy: 0.6900\n",
+      "Epoch 122/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5593 - accuracy: 0.6900\n",
+      "Epoch 123/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5572 - accuracy: 0.6867\n",
+      "Epoch 124/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5551 - accuracy: 0.6867\n",
+      "Epoch 125/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5528 - accuracy: 0.6900\n",
+      "Epoch 126/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5506 - accuracy: 0.6900\n",
+      "Epoch 127/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5485 - accuracy: 0.6900\n",
+      "Epoch 128/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5465 - accuracy: 0.6967\n",
+      "Epoch 129/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5442 - accuracy: 0.7000\n",
+      "Epoch 130/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5421 - accuracy: 0.7000\n",
+      "Epoch 131/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5399 - accuracy: 0.7033\n",
+      "Epoch 132/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5378 - accuracy: 0.7067\n",
+      "Epoch 133/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5357 - accuracy: 0.7067\n",
+      "Epoch 134/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5336 - accuracy: 0.7067\n",
+      "Epoch 135/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5314 - accuracy: 0.7067\n",
+      "Epoch 136/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5295 - accuracy: 0.7067\n",
+      "Epoch 137/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5271 - accuracy: 0.7100\n",
+      "Epoch 138/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5249 - accuracy: 0.7100\n",
+      "Epoch 139/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5231 - accuracy: 0.7100\n",
+      "Epoch 140/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5208 - accuracy: 0.7133\n",
+      "Epoch 141/1000\n",
+      "3/3 [==============================] - ETA: 0s - loss: 0.5236 - accuracy: 0.72 - 0s 3ms/step - loss: 0.5185 - accuracy: 0.7133\n",
+      "Epoch 142/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5164 - accuracy: 0.7133\n",
+      "Epoch 143/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5143 - accuracy: 0.7133\n",
+      "Epoch 144/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5120 - accuracy: 0.7133\n",
+      "Epoch 145/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5102 - accuracy: 0.7167\n",
+      "Epoch 146/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5079 - accuracy: 0.7167\n",
+      "Epoch 147/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5058 - accuracy: 0.7233\n",
+      "Epoch 148/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.5037 - accuracy: 0.7233\n",
+      "Epoch 149/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.5017 - accuracy: 0.7233\n",
+      "Epoch 150/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4995 - accuracy: 0.7200\n",
+      "Epoch 151/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4974 - accuracy: 0.7233\n",
+      "Epoch 152/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4953 - accuracy: 0.7233\n",
+      "Epoch 153/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4933 - accuracy: 0.7233\n",
+      "Epoch 154/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4913 - accuracy: 0.7300\n",
+      "Epoch 155/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4892 - accuracy: 0.7333\n",
+      "Epoch 156/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4871 - accuracy: 0.7333\n",
+      "Epoch 157/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4850 - accuracy: 0.7333\n",
+      "Epoch 158/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4831 - accuracy: 0.7367\n",
+      "Epoch 159/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4810 - accuracy: 0.7433\n",
+      "Epoch 160/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.4789 - accuracy: 0.7400\n",
+      "Epoch 161/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4768 - accuracy: 0.7433\n",
+      "Epoch 162/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4751 - accuracy: 0.7433\n",
+      "Epoch 163/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.4729 - accuracy: 0.7433\n",
+      "Epoch 164/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.4710 - accuracy: 0.7467\n",
+      "Epoch 165/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4688 - accuracy: 0.7467\n",
+      "Epoch 166/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4669 - accuracy: 0.7500\n",
+      "Epoch 167/1000\n",
+      "3/3 [==============================] - 0s 11ms/step - loss: 0.4649 - accuracy: 0.7500\n",
+      "Epoch 168/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4629 - accuracy: 0.7500\n",
+      "Epoch 169/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4611 - accuracy: 0.7533\n",
+      "Epoch 170/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4591 - accuracy: 0.7533\n",
+      "Epoch 171/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4571 - accuracy: 0.7533\n",
+      "Epoch 172/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4552 - accuracy: 0.7567\n",
+      "Epoch 173/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4533 - accuracy: 0.7600\n",
+      "Epoch 174/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4513 - accuracy: 0.7600\n",
+      "Epoch 175/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4495 - accuracy: 0.7600\n",
+      "Epoch 176/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4475 - accuracy: 0.7667\n",
+      "Epoch 177/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4458 - accuracy: 0.7667\n",
+      "Epoch 178/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4437 - accuracy: 0.7667\n",
+      "Epoch 179/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4421 - accuracy: 0.7667\n",
+      "Epoch 180/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4400 - accuracy: 0.7667\n",
+      "Epoch 181/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4382 - accuracy: 0.7767\n",
+      "Epoch 182/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4363 - accuracy: 0.7733\n",
+      "Epoch 183/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4345 - accuracy: 0.7767\n",
+      "Epoch 184/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4327 - accuracy: 0.7767\n",
+      "Epoch 185/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4309 - accuracy: 0.7767\n",
+      "Epoch 186/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4291 - accuracy: 0.7767\n",
+      "Epoch 187/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4272 - accuracy: 0.7767\n",
+      "Epoch 188/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4254 - accuracy: 0.7767\n",
+      "Epoch 189/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4237 - accuracy: 0.7767\n",
+      "Epoch 190/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4219 - accuracy: 0.7767\n",
+      "Epoch 191/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4201 - accuracy: 0.7800\n",
+      "Epoch 192/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4185 - accuracy: 0.7833\n",
+      "Epoch 193/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4166 - accuracy: 0.7833\n",
+      "Epoch 194/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4150 - accuracy: 0.7867\n",
+      "Epoch 195/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4132 - accuracy: 0.7833\n",
+      "Epoch 196/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4114 - accuracy: 0.7867\n",
+      "Epoch 197/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4098 - accuracy: 0.7867\n",
+      "Epoch 198/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4081 - accuracy: 0.7900\n",
+      "Epoch 199/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4064 - accuracy: 0.7933\n",
+      "Epoch 200/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4047 - accuracy: 0.7933\n",
+      "Epoch 201/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.4031 - accuracy: 0.7933\n",
+      "Epoch 202/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.4015 - accuracy: 0.7967\n",
+      "Epoch 203/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3997 - accuracy: 0.7967\n",
+      "Epoch 204/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3982 - accuracy: 0.7967\n",
+      "Epoch 205/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3965 - accuracy: 0.8000\n",
+      "Epoch 206/1000\n",
+      "3/3 [==============================] - 0s 9ms/step - loss: 0.3950 - accuracy: 0.8000\n",
+      "Epoch 207/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.3933 - accuracy: 0.8000\n",
+      "Epoch 208/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3918 - accuracy: 0.8000\n",
+      "Epoch 209/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3901 - accuracy: 0.8000\n",
+      "Epoch 210/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3886 - accuracy: 0.8000\n",
+      "Epoch 211/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3870 - accuracy: 0.8033\n",
+      "Epoch 212/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3855 - accuracy: 0.8067\n",
+      "Epoch 213/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.3839 - accuracy: 0.8067\n",
+      "Epoch 214/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.3823 - accuracy: 0.8067\n",
+      "Epoch 215/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3808 - accuracy: 0.8100\n",
+      "Epoch 216/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3794 - accuracy: 0.8100\n",
+      "Epoch 217/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3778 - accuracy: 0.8133\n",
+      "Epoch 218/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3766 - accuracy: 0.8133\n",
+      "Epoch 219/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3749 - accuracy: 0.8133\n",
+      "Epoch 220/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3733 - accuracy: 0.8133\n",
+      "Epoch 221/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3718 - accuracy: 0.8133\n",
+      "Epoch 222/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3704 - accuracy: 0.8133\n",
+      "Epoch 223/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3689 - accuracy: 0.8133\n",
+      "Epoch 224/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3676 - accuracy: 0.8167\n",
+      "Epoch 225/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3661 - accuracy: 0.8167\n",
+      "Epoch 226/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3647 - accuracy: 0.8200\n",
+      "Epoch 227/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3633 - accuracy: 0.8200\n",
+      "Epoch 228/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3618 - accuracy: 0.8267\n",
+      "Epoch 229/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3603 - accuracy: 0.8267\n",
+      "Epoch 230/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3590 - accuracy: 0.8267\n",
+      "Epoch 231/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3577 - accuracy: 0.8233\n",
+      "Epoch 232/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3563 - accuracy: 0.8233\n",
+      "Epoch 233/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3549 - accuracy: 0.8233\n",
+      "Epoch 234/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3535 - accuracy: 0.8233\n",
+      "Epoch 235/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3522 - accuracy: 0.8267\n",
+      "Epoch 236/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3509 - accuracy: 0.8267\n",
+      "Epoch 237/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3498 - accuracy: 0.8267\n",
+      "Epoch 238/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3484 - accuracy: 0.8300\n",
+      "Epoch 239/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3468 - accuracy: 0.8333\n",
+      "Epoch 240/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3458 - accuracy: 0.8300\n",
+      "Epoch 241/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3443 - accuracy: 0.8300\n",
+      "Epoch 242/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3429 - accuracy: 0.8333\n",
+      "Epoch 243/1000\n",
+      "3/3 [==============================] - 0s 17ms/step - loss: 0.3417 - accuracy: 0.8333\n",
+      "Epoch 244/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3403 - accuracy: 0.8367\n",
+      "Epoch 245/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3391 - accuracy: 0.8333\n",
+      "Epoch 246/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3379 - accuracy: 0.8367\n",
+      "Epoch 247/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3366 - accuracy: 0.8400\n",
+      "Epoch 248/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3353 - accuracy: 0.8400\n",
+      "Epoch 249/1000\n",
+      "3/3 [==============================] - ETA: 0s - loss: 0.3375 - accuracy: 0.83 - 0s 4ms/step - loss: 0.3341 - accuracy: 0.8433\n",
+      "Epoch 250/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3328 - accuracy: 0.8433\n",
+      "Epoch 251/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3316 - accuracy: 0.8433\n",
+      "Epoch 252/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3304 - accuracy: 0.8433\n",
+      "Epoch 253/1000\n",
+      "3/3 [==============================] - 0s 21ms/step - loss: 0.3293 - accuracy: 0.8433\n",
+      "Epoch 254/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3282 - accuracy: 0.8433\n",
+      "Epoch 255/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3269 - accuracy: 0.8433\n",
+      "Epoch 256/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3258 - accuracy: 0.8433\n",
+      "Epoch 257/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3245 - accuracy: 0.8433\n",
+      "Epoch 258/1000\n",
+      "3/3 [==============================] - 0s 20ms/step - loss: 0.3232 - accuracy: 0.8433\n",
+      "Epoch 259/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3221 - accuracy: 0.8433\n",
+      "Epoch 260/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3209 - accuracy: 0.8433\n",
+      "Epoch 261/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3197 - accuracy: 0.8467\n",
+      "Epoch 262/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3186 - accuracy: 0.8433\n",
+      "Epoch 263/1000\n",
+      "3/3 [==============================] - 0s 18ms/step - loss: 0.3175 - accuracy: 0.8433\n",
+      "Epoch 264/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3163 - accuracy: 0.8433\n",
+      "Epoch 265/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3151 - accuracy: 0.8433\n",
+      "Epoch 266/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3141 - accuracy: 0.8467\n",
+      "Epoch 267/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3131 - accuracy: 0.8500\n",
+      "Epoch 268/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3117 - accuracy: 0.8467\n",
+      "Epoch 269/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3108 - accuracy: 0.8567\n",
+      "Epoch 270/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.3096 - accuracy: 0.8567\n",
+      "Epoch 271/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3085 - accuracy: 0.8567\n",
+      "Epoch 272/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3074 - accuracy: 0.8567\n",
+      "Epoch 273/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3064 - accuracy: 0.8600\n",
+      "Epoch 274/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3052 - accuracy: 0.8600\n",
+      "Epoch 275/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3041 - accuracy: 0.8600\n",
+      "Epoch 276/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.3032 - accuracy: 0.8600\n",
+      "Epoch 277/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3020 - accuracy: 0.8633\n",
+      "Epoch 278/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.3010 - accuracy: 0.8633\n",
+      "Epoch 279/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2999 - accuracy: 0.8667\n",
+      "Epoch 280/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2988 - accuracy: 0.8667\n",
+      "Epoch 281/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2978 - accuracy: 0.8667\n",
+      "Epoch 282/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2968 - accuracy: 0.8700\n",
+      "Epoch 283/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2958 - accuracy: 0.8700\n",
+      "Epoch 284/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2948 - accuracy: 0.8700\n",
+      "Epoch 285/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2938 - accuracy: 0.8700\n",
+      "Epoch 286/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2927 - accuracy: 0.8700\n",
+      "Epoch 287/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2917 - accuracy: 0.8700\n",
+      "Epoch 288/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2907 - accuracy: 0.8767\n",
+      "Epoch 289/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2897 - accuracy: 0.8767\n",
+      "Epoch 290/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2888 - accuracy: 0.8800\n",
+      "Epoch 291/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2878 - accuracy: 0.8767\n",
+      "Epoch 292/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.2867 - accuracy: 0.8767\n",
+      "Epoch 293/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2858 - accuracy: 0.8800\n",
+      "Epoch 294/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2849 - accuracy: 0.8800\n",
+      "Epoch 295/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2839 - accuracy: 0.8800\n",
+      "Epoch 296/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2831 - accuracy: 0.8800\n",
+      "Epoch 297/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2819 - accuracy: 0.8800\n",
+      "Epoch 298/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2810 - accuracy: 0.8800\n",
+      "Epoch 299/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2801 - accuracy: 0.8800\n",
+      "Epoch 300/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2792 - accuracy: 0.8833\n",
+      "Epoch 301/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2783 - accuracy: 0.8833\n",
+      "Epoch 302/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2772 - accuracy: 0.8867\n",
+      "Epoch 303/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2764 - accuracy: 0.8833\n",
+      "Epoch 304/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2754 - accuracy: 0.8867\n",
+      "Epoch 305/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2745 - accuracy: 0.8867\n",
+      "Epoch 306/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2736 - accuracy: 0.8867\n",
+      "Epoch 307/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2727 - accuracy: 0.8867\n",
+      "Epoch 308/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2719 - accuracy: 0.8867\n",
+      "Epoch 309/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2709 - accuracy: 0.8867\n",
+      "Epoch 310/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.2700 - accuracy: 0.8867\n",
+      "Epoch 311/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2691 - accuracy: 0.8867\n",
+      "Epoch 312/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2682 - accuracy: 0.8900\n",
+      "Epoch 313/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2673 - accuracy: 0.8900\n",
+      "Epoch 314/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2665 - accuracy: 0.8900\n",
+      "Epoch 315/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2656 - accuracy: 0.8933\n",
+      "Epoch 316/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2648 - accuracy: 0.8933\n",
+      "Epoch 317/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2640 - accuracy: 0.8933\n",
+      "Epoch 318/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2631 - accuracy: 0.8967\n",
+      "Epoch 319/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.2621 - accuracy: 0.8967\n",
+      "Epoch 320/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2613 - accuracy: 0.8967\n",
+      "Epoch 321/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2605 - accuracy: 0.8967\n",
+      "Epoch 322/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2598 - accuracy: 0.8933\n",
+      "Epoch 323/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2588 - accuracy: 0.8967\n",
+      "Epoch 324/1000\n",
+      "3/3 [==============================] - 0s 7ms/step - loss: 0.2580 - accuracy: 0.8967\n",
+      "Epoch 325/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2571 - accuracy: 0.8967\n",
+      "Epoch 326/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2563 - accuracy: 0.8933\n",
+      "Epoch 327/1000\n",
+      "3/3 [==============================] - 0s 7ms/step - loss: 0.2555 - accuracy: 0.8967\n",
+      "Epoch 328/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2547 - accuracy: 0.9000\n",
+      "Epoch 329/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2540 - accuracy: 0.9000\n",
+      "Epoch 330/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2531 - accuracy: 0.9000\n",
+      "Epoch 331/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2524 - accuracy: 0.8967\n",
+      "Epoch 332/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2515 - accuracy: 0.8967\n",
+      "Epoch 333/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2507 - accuracy: 0.8967\n",
+      "Epoch 334/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2498 - accuracy: 0.9000\n",
+      "Epoch 335/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2492 - accuracy: 0.9000\n",
+      "Epoch 336/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2485 - accuracy: 0.9000\n",
+      "Epoch 337/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2476 - accuracy: 0.9000\n",
+      "Epoch 338/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2468 - accuracy: 0.9000\n",
+      "Epoch 339/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.2461 - accuracy: 0.9000\n",
+      "Epoch 340/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2453 - accuracy: 0.9000\n",
+      "Epoch 341/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2445 - accuracy: 0.9000\n",
+      "Epoch 342/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2438 - accuracy: 0.9033\n",
+      "Epoch 343/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2430 - accuracy: 0.9033\n",
+      "Epoch 344/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2422 - accuracy: 0.9000\n",
+      "Epoch 345/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2415 - accuracy: 0.9000\n",
+      "Epoch 346/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2409 - accuracy: 0.9000\n",
+      "Epoch 347/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2401 - accuracy: 0.9033\n",
+      "Epoch 348/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2392 - accuracy: 0.9033\n",
+      "Epoch 349/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2386 - accuracy: 0.9033\n",
+      "Epoch 350/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2379 - accuracy: 0.9067\n",
+      "Epoch 351/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2373 - accuracy: 0.9033\n",
+      "Epoch 352/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.2364 - accuracy: 0.9033\n",
+      "Epoch 353/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2357 - accuracy: 0.9033\n",
+      "Epoch 354/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.2349 - accuracy: 0.9067\n",
+      "Epoch 355/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2343 - accuracy: 0.9067\n",
+      "Epoch 356/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2336 - accuracy: 0.9067\n",
+      "Epoch 357/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2328 - accuracy: 0.9067\n",
+      "Epoch 358/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2321 - accuracy: 0.9133\n",
+      "Epoch 359/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2315 - accuracy: 0.9133\n",
+      "Epoch 360/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2309 - accuracy: 0.9133\n",
+      "Epoch 361/1000\n",
+      "3/3 [==============================] - ETA: 0s - loss: 0.1736 - accuracy: 0.95 - 0s 4ms/step - loss: 0.2300 - accuracy: 0.9133\n",
+      "Epoch 362/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2295 - accuracy: 0.9100\n",
+      "Epoch 363/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2288 - accuracy: 0.9100\n",
+      "Epoch 364/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2281 - accuracy: 0.9067\n",
+      "Epoch 365/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2273 - accuracy: 0.9100\n",
+      "Epoch 366/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2268 - accuracy: 0.9133\n",
+      "Epoch 367/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2261 - accuracy: 0.9133\n",
+      "Epoch 368/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2255 - accuracy: 0.9133\n",
+      "Epoch 369/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2247 - accuracy: 0.9167\n",
+      "Epoch 370/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2241 - accuracy: 0.9167\n",
+      "Epoch 371/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2235 - accuracy: 0.9167\n",
+      "Epoch 372/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2228 - accuracy: 0.9167\n",
+      "Epoch 373/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2222 - accuracy: 0.9167\n",
+      "Epoch 374/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2215 - accuracy: 0.9200\n",
+      "Epoch 375/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2208 - accuracy: 0.9200\n",
+      "Epoch 376/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2202 - accuracy: 0.9200\n",
+      "Epoch 377/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2196 - accuracy: 0.9200\n",
+      "Epoch 378/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2190 - accuracy: 0.9200\n",
+      "Epoch 379/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2182 - accuracy: 0.9200\n",
+      "Epoch 380/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2178 - accuracy: 0.9233\n",
+      "Epoch 381/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2171 - accuracy: 0.9233\n",
+      "Epoch 382/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2164 - accuracy: 0.9233\n",
+      "Epoch 383/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2159 - accuracy: 0.9233\n",
+      "Epoch 384/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2152 - accuracy: 0.9267\n",
+      "Epoch 385/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2147 - accuracy: 0.9267\n",
+      "Epoch 386/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2140 - accuracy: 0.9267\n",
+      "Epoch 387/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2133 - accuracy: 0.9267\n",
+      "Epoch 388/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2127 - accuracy: 0.9267\n",
+      "Epoch 389/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2122 - accuracy: 0.9233\n",
+      "Epoch 390/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2115 - accuracy: 0.9300\n",
+      "Epoch 391/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2110 - accuracy: 0.9267\n",
+      "Epoch 392/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2104 - accuracy: 0.9267\n",
+      "Epoch 393/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2098 - accuracy: 0.9300\n",
+      "Epoch 394/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.2091 - accuracy: 0.9300\n",
+      "Epoch 395/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2086 - accuracy: 0.9267\n",
+      "Epoch 396/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2080 - accuracy: 0.9267\n",
+      "Epoch 397/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2076 - accuracy: 0.9267\n",
+      "Epoch 398/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2070 - accuracy: 0.9300\n",
+      "Epoch 399/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2064 - accuracy: 0.9267\n",
+      "Epoch 400/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2057 - accuracy: 0.9267\n",
+      "Epoch 401/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2051 - accuracy: 0.9267\n",
+      "Epoch 402/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2046 - accuracy: 0.9267\n",
+      "Epoch 403/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2040 - accuracy: 0.9333\n",
+      "Epoch 404/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2034 - accuracy: 0.9333\n",
+      "Epoch 405/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2029 - accuracy: 0.9300\n",
+      "Epoch 406/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2023 - accuracy: 0.9333\n",
+      "Epoch 407/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2017 - accuracy: 0.9333\n",
+      "Epoch 408/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2013 - accuracy: 0.9333\n",
+      "Epoch 409/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2007 - accuracy: 0.9367\n",
+      "Epoch 410/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.2001 - accuracy: 0.9333\n",
+      "Epoch 411/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1995 - accuracy: 0.9367\n",
+      "Epoch 412/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1990 - accuracy: 0.9400\n",
+      "Epoch 413/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1984 - accuracy: 0.9367\n",
+      "Epoch 414/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1980 - accuracy: 0.9367\n",
+      "Epoch 415/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1975 - accuracy: 0.9400\n",
+      "Epoch 416/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1968 - accuracy: 0.9400\n",
+      "Epoch 417/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1963 - accuracy: 0.9400\n",
+      "Epoch 418/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1957 - accuracy: 0.9367\n",
+      "Epoch 419/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1953 - accuracy: 0.9367\n",
+      "Epoch 420/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1948 - accuracy: 0.9367\n",
+      "Epoch 421/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1942 - accuracy: 0.9400\n",
+      "Epoch 422/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1937 - accuracy: 0.9400\n",
+      "Epoch 423/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1932 - accuracy: 0.9400\n",
+      "Epoch 424/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1927 - accuracy: 0.9433\n",
+      "Epoch 425/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1922 - accuracy: 0.9433\n",
+      "Epoch 426/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1916 - accuracy: 0.9433\n",
+      "Epoch 427/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1911 - accuracy: 0.9433\n",
+      "Epoch 428/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1907 - accuracy: 0.9400\n",
+      "Epoch 429/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1900 - accuracy: 0.9400\n",
+      "Epoch 430/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1896 - accuracy: 0.9400\n",
+      "Epoch 431/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1891 - accuracy: 0.9433\n",
+      "Epoch 432/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1886 - accuracy: 0.9433\n",
+      "Epoch 433/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1881 - accuracy: 0.9433\n",
+      "Epoch 434/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.1875 - accuracy: 0.9433\n",
+      "Epoch 435/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1871 - accuracy: 0.9433\n",
+      "Epoch 436/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1866 - accuracy: 0.9433\n",
+      "Epoch 437/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1860 - accuracy: 0.9433\n",
+      "Epoch 438/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1856 - accuracy: 0.9433\n",
+      "Epoch 439/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1851 - accuracy: 0.9433\n",
+      "Epoch 440/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1846 - accuracy: 0.9433\n",
+      "Epoch 441/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1841 - accuracy: 0.9433\n",
+      "Epoch 442/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1838 - accuracy: 0.9433\n",
+      "Epoch 443/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1832 - accuracy: 0.9433\n",
+      "Epoch 444/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1828 - accuracy: 0.9433\n",
+      "Epoch 445/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1822 - accuracy: 0.9433\n",
+      "Epoch 446/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1818 - accuracy: 0.9433\n",
+      "Epoch 447/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1812 - accuracy: 0.9433\n",
+      "Epoch 448/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1809 - accuracy: 0.9433\n",
+      "Epoch 449/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1804 - accuracy: 0.9433\n",
+      "Epoch 450/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1799 - accuracy: 0.9433\n",
+      "Epoch 451/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1794 - accuracy: 0.9433\n",
+      "Epoch 452/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1789 - accuracy: 0.9433\n",
+      "Epoch 453/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1786 - accuracy: 0.9433\n",
+      "Epoch 454/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1781 - accuracy: 0.9433\n",
+      "Epoch 455/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1775 - accuracy: 0.9433\n",
+      "Epoch 456/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1771 - accuracy: 0.9433\n",
+      "Epoch 457/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1767 - accuracy: 0.9433\n",
+      "Epoch 458/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1763 - accuracy: 0.9433\n",
+      "Epoch 459/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1758 - accuracy: 0.9433\n",
+      "Epoch 460/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1753 - accuracy: 0.9467\n",
+      "Epoch 461/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1748 - accuracy: 0.9467\n",
+      "Epoch 462/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1744 - accuracy: 0.9467\n",
+      "Epoch 463/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1741 - accuracy: 0.9500\n",
+      "Epoch 464/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1736 - accuracy: 0.9467\n",
+      "Epoch 465/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1732 - accuracy: 0.9467\n",
+      "Epoch 466/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1727 - accuracy: 0.9467\n",
+      "Epoch 467/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1722 - accuracy: 0.9467\n",
+      "Epoch 468/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1719 - accuracy: 0.9467\n",
+      "Epoch 469/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1715 - accuracy: 0.9467\n",
+      "Epoch 470/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1710 - accuracy: 0.9467\n",
+      "Epoch 471/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1706 - accuracy: 0.9500\n",
+      "Epoch 472/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1701 - accuracy: 0.9467\n",
+      "Epoch 473/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1697 - accuracy: 0.9500\n",
+      "Epoch 474/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1692 - accuracy: 0.9467\n",
+      "Epoch 475/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1688 - accuracy: 0.9500\n",
+      "Epoch 476/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1683 - accuracy: 0.9533\n",
+      "Epoch 477/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1680 - accuracy: 0.9533\n",
+      "Epoch 478/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1675 - accuracy: 0.9533\n",
+      "Epoch 479/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1672 - accuracy: 0.9533\n",
+      "Epoch 480/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1667 - accuracy: 0.9533\n",
+      "Epoch 481/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1663 - accuracy: 0.9533\n",
+      "Epoch 482/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1659 - accuracy: 0.9500\n",
+      "Epoch 483/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1656 - accuracy: 0.9533\n",
+      "Epoch 484/1000\n",
+      "3/3 [==============================] - 0s 12ms/step - loss: 0.1652 - accuracy: 0.9533\n",
+      "Epoch 485/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1646 - accuracy: 0.9533\n",
+      "Epoch 486/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1642 - accuracy: 0.9567\n",
+      "Epoch 487/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1639 - accuracy: 0.9533\n",
+      "Epoch 488/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1634 - accuracy: 0.9533\n",
+      "Epoch 489/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1630 - accuracy: 0.9533\n",
+      "Epoch 490/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1627 - accuracy: 0.9533\n",
+      "Epoch 491/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1622 - accuracy: 0.9533\n",
+      "Epoch 492/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1618 - accuracy: 0.9533\n",
+      "Epoch 493/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1616 - accuracy: 0.9533\n",
+      "Epoch 494/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1610 - accuracy: 0.9567\n",
+      "Epoch 495/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1607 - accuracy: 0.9533\n",
+      "Epoch 496/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1603 - accuracy: 0.9533\n",
+      "Epoch 497/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1598 - accuracy: 0.9567\n",
+      "Epoch 498/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1595 - accuracy: 0.9567\n",
+      "Epoch 499/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1592 - accuracy: 0.9567\n",
+      "Epoch 500/1000\n",
+      "3/3 [==============================] - 0s 22ms/step - loss: 0.1587 - accuracy: 0.9567\n",
+      "Epoch 501/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1583 - accuracy: 0.9567\n",
+      "Epoch 502/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1580 - accuracy: 0.9533\n",
+      "Epoch 503/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1575 - accuracy: 0.9533\n",
+      "Epoch 504/1000\n",
+      "3/3 [==============================] - 0s 21ms/step - loss: 0.1573 - accuracy: 0.9533\n",
+      "Epoch 505/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1568 - accuracy: 0.9533\n",
+      "Epoch 506/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1564 - accuracy: 0.9567\n",
+      "Epoch 507/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1561 - accuracy: 0.9500\n",
+      "Epoch 508/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1556 - accuracy: 0.9567\n",
+      "Epoch 509/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1553 - accuracy: 0.9567\n",
+      "Epoch 510/1000\n",
+      "3/3 [==============================] - 0s 11ms/step - loss: 0.1549 - accuracy: 0.9567\n",
+      "Epoch 511/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1545 - accuracy: 0.9567\n",
+      "Epoch 512/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1541 - accuracy: 0.9567\n",
+      "Epoch 513/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1539 - accuracy: 0.9567\n",
+      "Epoch 514/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1534 - accuracy: 0.9567\n",
+      "Epoch 515/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1530 - accuracy: 0.9567\n",
+      "Epoch 516/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1527 - accuracy: 0.9567\n",
+      "Epoch 517/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1523 - accuracy: 0.9567\n",
+      "Epoch 518/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1519 - accuracy: 0.9567\n",
+      "Epoch 519/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1516 - accuracy: 0.9567\n",
+      "Epoch 520/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1513 - accuracy: 0.9567\n",
+      "Epoch 521/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1509 - accuracy: 0.9567\n",
+      "Epoch 522/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1506 - accuracy: 0.9567\n",
+      "Epoch 523/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1503 - accuracy: 0.9567\n",
+      "Epoch 524/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1499 - accuracy: 0.9567\n",
+      "Epoch 525/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1496 - accuracy: 0.9533\n",
+      "Epoch 526/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1491 - accuracy: 0.9567\n",
+      "Epoch 527/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1488 - accuracy: 0.9567\n",
+      "Epoch 528/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1484 - accuracy: 0.9567\n",
+      "Epoch 529/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1480 - accuracy: 0.9633\n",
+      "Epoch 530/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1477 - accuracy: 0.9600\n",
+      "Epoch 531/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1474 - accuracy: 0.9600\n",
+      "Epoch 532/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1470 - accuracy: 0.9633\n",
+      "Epoch 533/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.1467 - accuracy: 0.9633\n",
+      "Epoch 534/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1463 - accuracy: 0.9633\n",
+      "Epoch 535/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1460 - accuracy: 0.9633\n",
+      "Epoch 536/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1457 - accuracy: 0.9567\n",
+      "Epoch 537/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1453 - accuracy: 0.9600\n",
+      "Epoch 538/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1449 - accuracy: 0.9633\n",
+      "Epoch 539/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.1446 - accuracy: 0.9633\n",
+      "Epoch 540/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.1444 - accuracy: 0.9633\n",
+      "Epoch 541/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1440 - accuracy: 0.9633\n",
+      "Epoch 542/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1436 - accuracy: 0.9633\n",
+      "Epoch 543/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1434 - accuracy: 0.9633\n",
+      "Epoch 544/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1430 - accuracy: 0.9633\n",
+      "Epoch 545/1000\n",
+      "3/3 [==============================] - 0s 7ms/step - loss: 0.1426 - accuracy: 0.9600\n",
+      "Epoch 546/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1424 - accuracy: 0.9600\n",
+      "Epoch 547/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1420 - accuracy: 0.9633\n",
+      "Epoch 548/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1417 - accuracy: 0.9633\n",
+      "Epoch 549/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1414 - accuracy: 0.9633\n",
+      "Epoch 550/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1411 - accuracy: 0.9633\n",
+      "Epoch 551/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1408 - accuracy: 0.9600\n",
+      "Epoch 552/1000\n",
+      "3/3 [==============================] - 0s 11ms/step - loss: 0.1404 - accuracy: 0.9633\n",
+      "Epoch 553/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1401 - accuracy: 0.9633\n",
+      "Epoch 554/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1398 - accuracy: 0.9600\n",
+      "Epoch 555/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1395 - accuracy: 0.9633\n",
+      "Epoch 556/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1392 - accuracy: 0.9633\n",
+      "Epoch 557/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1390 - accuracy: 0.9633\n",
+      "Epoch 558/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1385 - accuracy: 0.9633\n",
+      "Epoch 559/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1382 - accuracy: 0.9633\n",
+      "Epoch 560/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1379 - accuracy: 0.9633\n",
+      "Epoch 561/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1377 - accuracy: 0.9633\n",
+      "Epoch 562/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1374 - accuracy: 0.9667\n",
+      "Epoch 563/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1370 - accuracy: 0.9667\n",
+      "Epoch 564/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1367 - accuracy: 0.9667\n",
+      "Epoch 565/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1365 - accuracy: 0.9667\n",
+      "Epoch 566/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1361 - accuracy: 0.9700\n",
+      "Epoch 567/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1358 - accuracy: 0.9700\n",
+      "Epoch 568/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1356 - accuracy: 0.9633\n",
+      "Epoch 569/1000\n",
+      "3/3 [==============================] - ETA: 0s - loss: 0.1524 - accuracy: 0.95 - 0s 3ms/step - loss: 0.1353 - accuracy: 0.9667\n",
+      "Epoch 570/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1349 - accuracy: 0.9667\n",
+      "Epoch 571/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1346 - accuracy: 0.9700\n",
+      "Epoch 572/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1343 - accuracy: 0.9700\n",
+      "Epoch 573/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1340 - accuracy: 0.9700\n",
+      "Epoch 574/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1337 - accuracy: 0.9667\n",
+      "Epoch 575/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1335 - accuracy: 0.9633\n",
+      "Epoch 576/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1332 - accuracy: 0.9667\n",
+      "Epoch 577/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1329 - accuracy: 0.9700\n",
+      "Epoch 578/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1325 - accuracy: 0.9700\n",
+      "Epoch 579/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1322 - accuracy: 0.9700\n",
+      "Epoch 580/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1322 - accuracy: 0.9700\n",
+      "Epoch 581/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1317 - accuracy: 0.9700\n",
+      "Epoch 582/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1315 - accuracy: 0.9700\n",
+      "Epoch 583/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1312 - accuracy: 0.9667\n",
+      "Epoch 584/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1309 - accuracy: 0.9667\n",
+      "Epoch 585/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1306 - accuracy: 0.9667\n",
+      "Epoch 586/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1304 - accuracy: 0.9667\n",
+      "Epoch 587/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1300 - accuracy: 0.9667\n",
+      "Epoch 588/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1297 - accuracy: 0.9667\n",
+      "Epoch 589/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1294 - accuracy: 0.9700\n",
+      "Epoch 590/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1293 - accuracy: 0.9633\n",
+      "Epoch 591/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1289 - accuracy: 0.9667\n",
+      "Epoch 592/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1286 - accuracy: 0.9700\n",
+      "Epoch 593/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1284 - accuracy: 0.9700\n",
+      "Epoch 594/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1281 - accuracy: 0.9700\n",
+      "Epoch 595/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1278 - accuracy: 0.9700\n",
+      "Epoch 596/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1275 - accuracy: 0.9700\n",
+      "Epoch 597/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1273 - accuracy: 0.9700\n",
+      "Epoch 598/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1270 - accuracy: 0.9700\n",
+      "Epoch 599/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1268 - accuracy: 0.9700\n",
+      "Epoch 600/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1266 - accuracy: 0.9700\n",
+      "Epoch 601/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1262 - accuracy: 0.9733\n",
+      "Epoch 602/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1260 - accuracy: 0.9733\n",
+      "Epoch 603/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1257 - accuracy: 0.9733\n",
+      "Epoch 604/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1254 - accuracy: 0.9733\n",
+      "Epoch 605/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1252 - accuracy: 0.9733\n",
+      "Epoch 606/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1249 - accuracy: 0.9733\n",
+      "Epoch 607/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1246 - accuracy: 0.9800\n",
+      "Epoch 608/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1244 - accuracy: 0.9767\n",
+      "Epoch 609/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1241 - accuracy: 0.9800\n",
+      "Epoch 610/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1238 - accuracy: 0.9800\n",
+      "Epoch 611/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1236 - accuracy: 0.9733\n",
+      "Epoch 612/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1233 - accuracy: 0.9733\n",
+      "Epoch 613/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1231 - accuracy: 0.9700\n",
+      "Epoch 614/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1229 - accuracy: 0.9733\n",
+      "Epoch 615/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1226 - accuracy: 0.9767\n",
+      "Epoch 616/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1224 - accuracy: 0.9733\n",
+      "Epoch 617/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1221 - accuracy: 0.9767\n",
+      "Epoch 618/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1218 - accuracy: 0.9767\n",
+      "Epoch 619/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1216 - accuracy: 0.9800\n",
+      "Epoch 620/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1213 - accuracy: 0.9767\n",
+      "Epoch 621/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1211 - accuracy: 0.9767\n",
+      "Epoch 622/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.1208 - accuracy: 0.9767\n",
+      "Epoch 623/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1205 - accuracy: 0.9767\n",
+      "Epoch 624/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1202 - accuracy: 0.9767\n",
+      "Epoch 625/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1201 - accuracy: 0.9767\n",
+      "Epoch 626/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1199 - accuracy: 0.9767\n",
+      "Epoch 627/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1195 - accuracy: 0.9767\n",
+      "Epoch 628/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1195 - accuracy: 0.9767\n",
+      "Epoch 629/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1191 - accuracy: 0.9800\n",
+      "Epoch 630/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1188 - accuracy: 0.9800\n",
+      "Epoch 631/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1186 - accuracy: 0.9800\n",
+      "Epoch 632/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1185 - accuracy: 0.9800\n",
+      "Epoch 633/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1181 - accuracy: 0.9833\n",
+      "Epoch 634/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1179 - accuracy: 0.9800\n",
+      "Epoch 635/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1176 - accuracy: 0.9800\n",
+      "Epoch 636/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1174 - accuracy: 0.9800\n",
+      "Epoch 637/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1171 - accuracy: 0.9800\n",
+      "Epoch 638/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1169 - accuracy: 0.9800\n",
+      "Epoch 639/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1168 - accuracy: 0.9800\n",
+      "Epoch 640/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1165 - accuracy: 0.9800\n",
+      "Epoch 641/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1162 - accuracy: 0.9800\n",
+      "Epoch 642/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1160 - accuracy: 0.9800\n",
+      "Epoch 643/1000\n",
+      "3/3 [==============================] - 0s 7ms/step - loss: 0.1158 - accuracy: 0.9800\n",
+      "Epoch 644/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1154 - accuracy: 0.9800\n",
+      "Epoch 645/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1153 - accuracy: 0.9833\n",
+      "Epoch 646/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1150 - accuracy: 0.9833\n",
+      "Epoch 647/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1147 - accuracy: 0.9833\n",
+      "Epoch 648/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1145 - accuracy: 0.9833\n",
+      "Epoch 649/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1143 - accuracy: 0.9833\n",
+      "Epoch 650/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1141 - accuracy: 0.9800\n",
+      "Epoch 651/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1139 - accuracy: 0.9767\n",
+      "Epoch 652/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1137 - accuracy: 0.9833\n",
+      "Epoch 653/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1134 - accuracy: 0.9833\n",
+      "Epoch 654/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1132 - accuracy: 0.9800\n",
+      "Epoch 655/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1129 - accuracy: 0.9800\n",
+      "Epoch 656/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1127 - accuracy: 0.9800\n",
+      "Epoch 657/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1125 - accuracy: 0.9800\n",
+      "Epoch 658/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1122 - accuracy: 0.9800\n",
+      "Epoch 659/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1120 - accuracy: 0.9800\n",
+      "Epoch 660/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1118 - accuracy: 0.9800\n",
+      "Epoch 661/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1116 - accuracy: 0.9800\n",
+      "Epoch 662/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1114 - accuracy: 0.9800\n",
+      "Epoch 663/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1111 - accuracy: 0.9800\n",
+      "Epoch 664/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1109 - accuracy: 0.9833\n",
+      "Epoch 665/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1106 - accuracy: 0.9833\n",
+      "Epoch 666/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1105 - accuracy: 0.9800\n",
+      "Epoch 667/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1102 - accuracy: 0.9800\n",
+      "Epoch 668/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1100 - accuracy: 0.9800\n",
+      "Epoch 669/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1099 - accuracy: 0.9800\n",
+      "Epoch 670/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1096 - accuracy: 0.9800\n",
+      "Epoch 671/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1095 - accuracy: 0.9800\n",
+      "Epoch 672/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1091 - accuracy: 0.9800\n",
+      "Epoch 673/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1090 - accuracy: 0.9833\n",
+      "Epoch 674/1000\n",
+      "3/3 [==============================] - ETA: 0s - loss: 0.1040 - accuracy: 1.00 - 0s 3ms/step - loss: 0.1088 - accuracy: 0.9833\n",
+      "Epoch 675/1000\n",
+      "3/3 [==============================] - 0s 8ms/step - loss: 0.1085 - accuracy: 0.9833\n",
+      "Epoch 676/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1083 - accuracy: 0.9833\n",
+      "Epoch 677/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1080 - accuracy: 0.9833\n",
+      "Epoch 678/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1079 - accuracy: 0.9800\n",
+      "Epoch 679/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1077 - accuracy: 0.9800\n",
+      "Epoch 680/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1074 - accuracy: 0.9800\n",
+      "Epoch 681/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1073 - accuracy: 0.9833\n",
+      "Epoch 682/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1071 - accuracy: 0.9800\n",
+      "Epoch 683/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1069 - accuracy: 0.9800\n",
+      "Epoch 684/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1066 - accuracy: 0.9800\n",
+      "Epoch 685/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1064 - accuracy: 0.9800\n",
+      "Epoch 686/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1062 - accuracy: 0.9833\n",
+      "Epoch 687/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1059 - accuracy: 0.9833\n",
+      "Epoch 688/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1058 - accuracy: 0.9833\n",
+      "Epoch 689/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1056 - accuracy: 0.9833\n",
+      "Epoch 690/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1054 - accuracy: 0.9833\n",
+      "Epoch 691/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1053 - accuracy: 0.9833\n",
+      "Epoch 692/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1050 - accuracy: 0.9800\n",
+      "Epoch 693/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1048 - accuracy: 0.9800\n",
+      "Epoch 694/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1046 - accuracy: 0.9833\n",
+      "Epoch 695/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1043 - accuracy: 0.9833\n",
+      "Epoch 696/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1042 - accuracy: 0.9833\n",
+      "Epoch 697/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1040 - accuracy: 0.9833\n",
+      "Epoch 698/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1038 - accuracy: 0.9800\n",
+      "Epoch 699/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1036 - accuracy: 0.9833\n",
+      "Epoch 700/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1034 - accuracy: 0.9800\n",
+      "Epoch 701/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1032 - accuracy: 0.9800\n",
+      "Epoch 702/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1029 - accuracy: 0.9833\n",
+      "Epoch 703/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1028 - accuracy: 0.9833\n",
+      "Epoch 704/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1026 - accuracy: 0.9833\n",
+      "Epoch 705/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1026 - accuracy: 0.9833\n",
+      "Epoch 706/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1022 - accuracy: 0.9833\n",
+      "Epoch 707/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1020 - accuracy: 0.9800\n",
+      "Epoch 708/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1019 - accuracy: 0.9833\n",
+      "Epoch 709/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1016 - accuracy: 0.9833\n",
+      "Epoch 710/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1015 - accuracy: 0.9800\n",
+      "Epoch 711/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1013 - accuracy: 0.9800\n",
+      "Epoch 712/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1011 - accuracy: 0.9800\n",
+      "Epoch 713/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.1010 - accuracy: 0.9800\n",
+      "Epoch 714/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1007 - accuracy: 0.9800\n",
+      "Epoch 715/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1005 - accuracy: 0.9800\n",
+      "Epoch 716/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.1004 - accuracy: 0.9833\n",
+      "Epoch 717/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.1002 - accuracy: 0.9833\n",
+      "Epoch 718/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0999 - accuracy: 0.9833\n",
+      "Epoch 719/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0998 - accuracy: 0.9800\n",
+      "Epoch 720/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0997 - accuracy: 0.9833\n",
+      "Epoch 721/1000\n",
+      "3/3 [==============================] - 0s 9ms/step - loss: 0.0994 - accuracy: 0.9833\n",
+      "Epoch 722/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0993 - accuracy: 0.9833\n",
+      "Epoch 723/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0990 - accuracy: 0.9833\n",
+      "Epoch 724/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0989 - accuracy: 0.9833\n",
+      "Epoch 725/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0987 - accuracy: 0.9833\n",
+      "Epoch 726/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0986 - accuracy: 0.9800\n",
+      "Epoch 727/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0983 - accuracy: 0.9800\n",
+      "Epoch 728/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0982 - accuracy: 0.9800\n",
+      "Epoch 729/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0980 - accuracy: 0.9800\n",
+      "Epoch 730/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0978 - accuracy: 0.9833\n",
+      "Epoch 731/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0976 - accuracy: 0.9833\n",
+      "Epoch 732/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0974 - accuracy: 0.9833\n",
+      "Epoch 733/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0973 - accuracy: 0.9833\n",
+      "Epoch 734/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0970 - accuracy: 0.9833\n",
+      "Epoch 735/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0969 - accuracy: 0.9833\n",
+      "Epoch 736/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0967 - accuracy: 0.9833\n",
+      "Epoch 737/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0965 - accuracy: 0.9833\n",
+      "Epoch 738/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0964 - accuracy: 0.9833\n",
+      "Epoch 739/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0962 - accuracy: 0.9833\n",
+      "Epoch 740/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0960 - accuracy: 0.9833\n",
+      "Epoch 741/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0959 - accuracy: 0.9800\n",
+      "Epoch 742/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0957 - accuracy: 0.9800\n",
+      "Epoch 743/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0956 - accuracy: 0.9833\n",
+      "Epoch 744/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0954 - accuracy: 0.9833\n",
+      "Epoch 745/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0952 - accuracy: 0.9833\n",
+      "Epoch 746/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0950 - accuracy: 0.9833\n",
+      "Epoch 747/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0948 - accuracy: 0.9833\n",
+      "Epoch 748/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.0948 - accuracy: 0.9833\n",
+      "Epoch 749/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0946 - accuracy: 0.9800\n",
+      "Epoch 750/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0943 - accuracy: 0.9833\n",
+      "Epoch 751/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0941 - accuracy: 0.9800\n",
+      "Epoch 752/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0940 - accuracy: 0.9833\n",
+      "Epoch 753/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0938 - accuracy: 0.9833\n",
+      "Epoch 754/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0937 - accuracy: 0.9833\n",
+      "Epoch 755/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0936 - accuracy: 0.9833\n",
+      "Epoch 756/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0933 - accuracy: 0.9833\n",
+      "Epoch 757/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0932 - accuracy: 0.9833\n",
+      "Epoch 758/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0930 - accuracy: 0.9833\n",
+      "Epoch 759/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0928 - accuracy: 0.9833\n",
+      "Epoch 760/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0927 - accuracy: 0.9833\n",
+      "Epoch 761/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0925 - accuracy: 0.9833\n",
+      "Epoch 762/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0925 - accuracy: 0.9800\n",
+      "Epoch 763/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0923 - accuracy: 0.9833\n",
+      "Epoch 764/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0920 - accuracy: 0.9800\n",
+      "Epoch 765/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0918 - accuracy: 0.9833\n",
+      "Epoch 766/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0917 - accuracy: 0.9833\n",
+      "Epoch 767/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0916 - accuracy: 0.9833\n",
+      "Epoch 768/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0914 - accuracy: 0.9833\n",
+      "Epoch 769/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0913 - accuracy: 0.9833\n",
+      "Epoch 770/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0911 - accuracy: 0.9833\n",
+      "Epoch 771/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0909 - accuracy: 0.9833\n",
+      "Epoch 772/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0908 - accuracy: 0.9833\n",
+      "Epoch 773/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0906 - accuracy: 0.9833\n",
+      "Epoch 774/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0904 - accuracy: 0.9833\n",
+      "Epoch 775/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0904 - accuracy: 0.9833\n",
+      "Epoch 776/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0901 - accuracy: 0.9833\n",
+      "Epoch 777/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0900 - accuracy: 0.9833\n",
+      "Epoch 778/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0898 - accuracy: 0.9833\n",
+      "Epoch 779/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0896 - accuracy: 0.9833\n",
+      "Epoch 780/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0895 - accuracy: 0.9833\n",
+      "Epoch 781/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0894 - accuracy: 0.9833\n",
+      "Epoch 782/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0892 - accuracy: 0.9867\n",
+      "Epoch 783/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0890 - accuracy: 0.9833\n",
+      "Epoch 784/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0890 - accuracy: 0.9833\n",
+      "Epoch 785/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0888 - accuracy: 0.9833\n",
+      "Epoch 786/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0887 - accuracy: 0.9833\n",
+      "Epoch 787/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0886 - accuracy: 0.9833\n",
+      "Epoch 788/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0883 - accuracy: 0.9833\n",
+      "Epoch 789/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0882 - accuracy: 0.9833\n",
+      "Epoch 790/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0880 - accuracy: 0.9833\n",
+      "Epoch 791/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0879 - accuracy: 0.9833\n",
+      "Epoch 792/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0877 - accuracy: 0.9833\n",
+      "Epoch 793/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0876 - accuracy: 0.9833\n",
+      "Epoch 794/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0874 - accuracy: 0.9833\n",
+      "Epoch 795/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0873 - accuracy: 0.9833\n",
+      "Epoch 796/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0872 - accuracy: 0.9833\n",
+      "Epoch 797/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0870 - accuracy: 0.9833\n",
+      "Epoch 798/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0869 - accuracy: 0.9833\n",
+      "Epoch 799/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0867 - accuracy: 0.9833\n",
+      "Epoch 800/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0866 - accuracy: 0.9833\n",
+      "Epoch 801/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0864 - accuracy: 0.9833\n",
+      "Epoch 802/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0863 - accuracy: 0.9833\n",
+      "Epoch 803/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0861 - accuracy: 0.9833\n",
+      "Epoch 804/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0860 - accuracy: 0.9833\n",
+      "Epoch 805/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0858 - accuracy: 0.9833\n",
+      "Epoch 806/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0857 - accuracy: 0.9833\n",
+      "Epoch 807/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0855 - accuracy: 0.9833\n",
+      "Epoch 808/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0854 - accuracy: 0.9833\n",
+      "Epoch 809/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0855 - accuracy: 0.9833\n",
+      "Epoch 810/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0852 - accuracy: 0.9867\n",
+      "Epoch 811/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0850 - accuracy: 0.9867\n",
+      "Epoch 812/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0849 - accuracy: 0.9833\n",
+      "Epoch 813/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0847 - accuracy: 0.9833\n",
+      "Epoch 814/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0845 - accuracy: 0.9833\n",
+      "Epoch 815/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0847 - accuracy: 0.9833\n",
+      "Epoch 816/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0843 - accuracy: 0.9800\n",
+      "Epoch 817/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0842 - accuracy: 0.9833\n",
+      "Epoch 818/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0840 - accuracy: 0.9833\n",
+      "Epoch 819/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0839 - accuracy: 0.9833\n",
+      "Epoch 820/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0837 - accuracy: 0.9833\n",
+      "Epoch 821/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0836 - accuracy: 0.9833\n",
+      "Epoch 822/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0835 - accuracy: 0.9867\n",
+      "Epoch 823/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0833 - accuracy: 0.9867\n",
+      "Epoch 824/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0832 - accuracy: 0.9867\n",
+      "Epoch 825/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0830 - accuracy: 0.9867\n",
+      "Epoch 826/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0830 - accuracy: 0.9833\n",
+      "Epoch 827/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0828 - accuracy: 0.9833\n",
+      "Epoch 828/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0827 - accuracy: 0.9833\n",
+      "Epoch 829/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0825 - accuracy: 0.9833\n",
+      "Epoch 830/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0825 - accuracy: 0.9867\n",
+      "Epoch 831/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0823 - accuracy: 0.9867\n",
+      "Epoch 832/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0822 - accuracy: 0.9867\n",
+      "Epoch 833/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0820 - accuracy: 0.9867\n",
+      "Epoch 834/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.0819 - accuracy: 0.9867\n",
+      "Epoch 835/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0818 - accuracy: 0.9867\n",
+      "Epoch 836/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0816 - accuracy: 0.9833\n",
+      "Epoch 837/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0815 - accuracy: 0.9867\n",
+      "Epoch 838/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0813 - accuracy: 0.9867\n",
+      "Epoch 839/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0813 - accuracy: 0.9833\n",
+      "Epoch 840/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0811 - accuracy: 0.9833\n",
+      "Epoch 841/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0810 - accuracy: 0.9833\n",
+      "Epoch 842/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0808 - accuracy: 0.9867\n",
+      "Epoch 843/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0807 - accuracy: 0.9867\n",
+      "Epoch 844/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0806 - accuracy: 0.9867\n",
+      "Epoch 845/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0804 - accuracy: 0.9867\n",
+      "Epoch 846/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0803 - accuracy: 0.9867\n",
+      "Epoch 847/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0802 - accuracy: 0.9867\n",
+      "Epoch 848/1000\n",
+      "3/3 [==============================] - 0s 21ms/step - loss: 0.0801 - accuracy: 0.9867\n",
+      "Epoch 849/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0800 - accuracy: 0.9867\n",
+      "Epoch 850/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0799 - accuracy: 0.9867\n",
+      "Epoch 851/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0798 - accuracy: 0.9867\n",
+      "Epoch 852/1000\n",
+      "3/3 [==============================] - 0s 23ms/step - loss: 0.0796 - accuracy: 0.9867\n",
+      "Epoch 853/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0794 - accuracy: 0.9867\n",
+      "Epoch 854/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0793 - accuracy: 0.9867\n",
+      "Epoch 855/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0792 - accuracy: 0.9867\n",
+      "Epoch 856/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0791 - accuracy: 0.9867\n",
+      "Epoch 857/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0790 - accuracy: 0.9867\n",
+      "Epoch 858/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0788 - accuracy: 0.9900\n",
+      "Epoch 859/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0787 - accuracy: 0.9867\n",
+      "Epoch 860/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0786 - accuracy: 0.9833\n",
+      "Epoch 861/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0785 - accuracy: 0.9833\n",
+      "Epoch 862/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0783 - accuracy: 0.9867\n",
+      "Epoch 863/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0783 - accuracy: 0.9867\n",
+      "Epoch 864/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0781 - accuracy: 0.9867\n",
+      "Epoch 865/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0780 - accuracy: 0.9867\n",
+      "Epoch 866/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0779 - accuracy: 0.9867\n",
+      "Epoch 867/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0777 - accuracy: 0.9867\n",
+      "Epoch 868/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0777 - accuracy: 0.9867\n",
+      "Epoch 869/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0775 - accuracy: 0.9867\n",
+      "Epoch 870/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0774 - accuracy: 0.9900\n",
+      "Epoch 871/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0772 - accuracy: 0.9900\n",
+      "Epoch 872/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0772 - accuracy: 0.9867\n",
+      "Epoch 873/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0770 - accuracy: 0.9867\n",
+      "Epoch 874/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0770 - accuracy: 0.9867\n",
+      "Epoch 875/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0768 - accuracy: 0.9867\n",
+      "Epoch 876/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0766 - accuracy: 0.9867\n",
+      "Epoch 877/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0765 - accuracy: 0.9867\n",
+      "Epoch 878/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0765 - accuracy: 0.9867\n",
+      "Epoch 879/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0763 - accuracy: 0.9867\n",
+      "Epoch 880/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0763 - accuracy: 0.9867\n",
+      "Epoch 881/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0761 - accuracy: 0.9900\n",
+      "Epoch 882/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0760 - accuracy: 0.9900\n",
+      "Epoch 883/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0759 - accuracy: 0.9867\n",
+      "Epoch 884/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0757 - accuracy: 0.9867\n",
+      "Epoch 885/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0757 - accuracy: 0.9867\n",
+      "Epoch 886/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0755 - accuracy: 0.9867\n",
+      "Epoch 887/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0754 - accuracy: 0.9867\n",
+      "Epoch 888/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0754 - accuracy: 0.9867\n",
+      "Epoch 889/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0752 - accuracy: 0.9900\n",
+      "Epoch 890/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0751 - accuracy: 0.9900\n",
+      "Epoch 891/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0750 - accuracy: 0.9900\n",
+      "Epoch 892/1000\n",
+      "3/3 [==============================] - 0s 12ms/step - loss: 0.0748 - accuracy: 0.9900\n",
+      "Epoch 893/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0748 - accuracy: 0.9867\n",
+      "Epoch 894/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0747 - accuracy: 0.9867\n",
+      "Epoch 895/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0745 - accuracy: 0.9867\n",
+      "Epoch 896/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0744 - accuracy: 0.9867\n",
+      "Epoch 897/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0743 - accuracy: 0.9833\n",
+      "Epoch 898/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0742 - accuracy: 0.9900\n",
+      "Epoch 899/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0740 - accuracy: 0.9900\n",
+      "Epoch 900/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0740 - accuracy: 0.9900\n",
+      "Epoch 901/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0739 - accuracy: 0.9900\n",
+      "Epoch 902/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0738 - accuracy: 0.9900\n",
+      "Epoch 903/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0737 - accuracy: 0.9900\n",
+      "Epoch 904/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0735 - accuracy: 0.9900\n",
+      "Epoch 905/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0737 - accuracy: 0.9867\n",
+      "Epoch 906/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0734 - accuracy: 0.9867\n",
+      "Epoch 907/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0733 - accuracy: 0.9900\n",
+      "Epoch 908/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0731 - accuracy: 0.9900\n",
+      "Epoch 909/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0730 - accuracy: 0.9900\n",
+      "Epoch 910/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0729 - accuracy: 0.9900\n",
+      "Epoch 911/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0728 - accuracy: 0.9900\n",
+      "Epoch 912/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0727 - accuracy: 0.9900\n",
+      "Epoch 913/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0726 - accuracy: 0.9900\n",
+      "Epoch 914/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0725 - accuracy: 0.9900\n",
+      "Epoch 915/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0724 - accuracy: 0.9900\n",
+      "Epoch 916/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0723 - accuracy: 0.9867\n",
+      "Epoch 917/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0722 - accuracy: 0.9900\n",
+      "Epoch 918/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0721 - accuracy: 0.9900\n",
+      "Epoch 919/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0719 - accuracy: 0.9900\n",
+      "Epoch 920/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0718 - accuracy: 0.9900\n",
+      "Epoch 921/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0718 - accuracy: 0.9900\n",
+      "Epoch 922/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0717 - accuracy: 0.9900\n",
+      "Epoch 923/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.0716 - accuracy: 0.9900\n",
+      "Epoch 924/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0715 - accuracy: 0.9833\n",
+      "Epoch 925/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0713 - accuracy: 0.9867\n",
+      "Epoch 926/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0713 - accuracy: 0.9867\n",
+      "Epoch 927/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0711 - accuracy: 0.9867\n",
+      "Epoch 928/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0710 - accuracy: 0.9900\n",
+      "Epoch 929/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0709 - accuracy: 0.9900\n",
+      "Epoch 930/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0708 - accuracy: 0.9900\n",
+      "Epoch 931/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0708 - accuracy: 0.9900\n",
+      "Epoch 932/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0706 - accuracy: 0.9900\n",
+      "Epoch 933/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0705 - accuracy: 0.9900\n",
+      "Epoch 934/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0704 - accuracy: 0.9900\n",
+      "Epoch 935/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0703 - accuracy: 0.9900\n",
+      "Epoch 936/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0702 - accuracy: 0.9900\n",
+      "Epoch 937/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0701 - accuracy: 0.9900\n",
+      "Epoch 938/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0701 - accuracy: 0.9900\n",
+      "Epoch 939/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0699 - accuracy: 0.9867\n",
+      "Epoch 940/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0699 - accuracy: 0.9900\n",
+      "Epoch 941/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0697 - accuracy: 0.9900\n",
+      "Epoch 942/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0696 - accuracy: 0.9900\n",
+      "Epoch 943/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0696 - accuracy: 0.9900\n",
+      "Epoch 944/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0694 - accuracy: 0.9900\n",
+      "Epoch 945/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0693 - accuracy: 0.9900\n",
+      "Epoch 946/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0694 - accuracy: 0.9867\n",
+      "Epoch 947/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0691 - accuracy: 0.9900\n",
+      "Epoch 948/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0690 - accuracy: 0.9900\n",
+      "Epoch 949/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0690 - accuracy: 0.9867\n",
+      "Epoch 950/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0689 - accuracy: 0.9900\n",
+      "Epoch 951/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0688 - accuracy: 0.9900\n",
+      "Epoch 952/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0687 - accuracy: 0.9900\n",
+      "Epoch 953/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0686 - accuracy: 0.9900\n",
+      "Epoch 954/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0685 - accuracy: 0.9900\n",
+      "Epoch 955/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0685 - accuracy: 0.9900\n",
+      "Epoch 956/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0684 - accuracy: 0.9900\n",
+      "Epoch 957/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0683 - accuracy: 0.9900\n",
+      "Epoch 958/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0681 - accuracy: 0.9900\n",
+      "Epoch 959/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0680 - accuracy: 0.9900\n",
+      "Epoch 960/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0679 - accuracy: 0.9867\n",
+      "Epoch 961/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0679 - accuracy: 0.9900\n",
+      "Epoch 962/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0677 - accuracy: 0.9900\n",
+      "Epoch 963/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0678 - accuracy: 0.9867\n",
+      "Epoch 964/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0675 - accuracy: 0.9900\n",
+      "Epoch 965/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0675 - accuracy: 0.9900\n",
+      "Epoch 966/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0674 - accuracy: 0.9900\n",
+      "Epoch 967/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0673 - accuracy: 0.9867\n",
+      "Epoch 968/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0672 - accuracy: 0.9900\n",
+      "Epoch 969/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0671 - accuracy: 0.9900\n",
+      "Epoch 970/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0670 - accuracy: 0.9900\n",
+      "Epoch 971/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0670 - accuracy: 0.9900\n",
+      "Epoch 972/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0670 - accuracy: 0.9900\n",
+      "Epoch 973/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0667 - accuracy: 0.9900\n",
+      "Epoch 974/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0667 - accuracy: 0.9900\n",
+      "Epoch 975/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0666 - accuracy: 0.9900\n",
+      "Epoch 976/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0664 - accuracy: 0.9900\n",
+      "Epoch 977/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0665 - accuracy: 0.9900\n",
+      "Epoch 978/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0664 - accuracy: 0.9900\n",
+      "Epoch 979/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0662 - accuracy: 0.9900\n",
+      "Epoch 980/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0661 - accuracy: 0.9900\n",
+      "Epoch 981/1000\n",
+      "3/3 [==============================] - 0s 6ms/step - loss: 0.0660 - accuracy: 0.9900\n",
+      "Epoch 982/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0659 - accuracy: 0.9900\n",
+      "Epoch 983/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0659 - accuracy: 0.9900\n",
+      "Epoch 984/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0658 - accuracy: 0.9900\n",
+      "Epoch 985/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0657 - accuracy: 0.9900\n",
+      "Epoch 986/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0657 - accuracy: 0.9900\n",
+      "Epoch 987/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0655 - accuracy: 0.9867\n",
+      "Epoch 988/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0654 - accuracy: 0.9900\n",
+      "Epoch 989/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0653 - accuracy: 0.9900\n",
+      "Epoch 990/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0653 - accuracy: 0.9867\n",
+      "Epoch 991/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0652 - accuracy: 0.9867\n",
+      "Epoch 992/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0651 - accuracy: 0.9900\n",
+      "Epoch 993/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0650 - accuracy: 0.9900\n",
+      "Epoch 994/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0649 - accuracy: 0.9900\n",
+      "Epoch 995/1000\n",
+      "3/3 [==============================] - 0s 5ms/step - loss: 0.0649 - accuracy: 0.9900\n",
+      "Epoch 996/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0647 - accuracy: 0.9900\n",
+      "Epoch 997/1000\n",
+      "3/3 [==============================] - 0s 4ms/step - loss: 0.0646 - accuracy: 0.9900\n",
+      "Epoch 998/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0645 - accuracy: 0.9900\n",
+      "Epoch 999/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0646 - accuracy: 0.9867\n",
+      "Epoch 1000/1000\n",
+      "3/3 [==============================] - 0s 3ms/step - loss: 0.0644 - accuracy: 0.9900\n"
      ]
-    }
-   ],
-   "source": [
-    "loss, accuracy = model.evaluate(X_test, y_test_cat)\n",
-    "print('Accuracy :', accuracy)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## 5. Make Predictions\n",
-    "\n",
-    "Now that we have a trained model, we can use it to predict class probabilities for\n",
-    "new digits—images that weren’t part of the training data, like those from the test set."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 91,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([7, 2, 1, 0, 4, 1, 4, 9, 6, 9])"
-      ]
-     },
-     "execution_count": 91,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "predictions=model.predict(X_test[0:10])\n",
-    "np.argmax(predictions, axis=1)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 92,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([7, 2, 1, 0, 4, 1, 4, 9, 5, 9], dtype=uint8)"
-      ]
-     },
-     "execution_count": 92,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "y_test[0:10]"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Each number of index $i$ in that array corresponds to the probability that digit image\n",
-    "`X_test[0]` belongs to class $i$. This first test digit has the highest probability score (0.9956499, almost 1) at\n",
-    "index 7, so according to our model, it must be a 7:"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 95,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([1.69774721e-04, 1.92234711e-06, 3.67960223e-04, 1.45162235e-03,\n",
-       "       1.42459385e-05, 1.13506248e-04, 2.51079655e-07, 9.95649993e-01,\n",
-       "       1.35322000e-04, 2.09532795e-03], dtype=float32)"
-      ]
-     },
-     "execution_count": 95,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "predictions[0]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 96,
-   "metadata": {},
-   "outputs": [
+    },
     {
      "data": {
       "text/plain": [
-       "7"
+       "<keras.callbacks.History at 0x7f472477ad50>"
       ]
      },
-     "execution_count": 96,
+     "execution_count": 30,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
-    "predictions[0].argmax()"
+    "BATCH_SIZE=128\n",
+    "num_train_examples = X.shape[0]\n",
+    "num_train_examples\n",
+    "model.fit(X, y_cat, epochs=1000, steps_per_epoch=math.ceil(num_train_examples/BATCH_SIZE))"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "We can check that the test label agrees:"
+    "## 4. Evaluate Network\n",
+    "\n",
+    "Finally, we can use the following command to evaluate the model:"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 100,
+   "execution_count": 31,
    "metadata": {},
    "outputs": [
     {
-     "data": {
-      "text/plain": [
-       "7"
-      ]
-     },
-     "execution_count": 100,
-     "metadata": {},
-     "output_type": "execute_result"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "10/10 [==============================] - 0s 2ms/step - loss: 0.0643 - accuracy: 0.9900\n",
+      "Accuracy on test dataset: 0.9900000095367432\n"
+     ]
     }
    ],
    "source": [
-    "y_test[0]"
+    "test_loss, test_accuracy = model.evaluate(X, y_cat, steps=math.ceil(num_train_examples/32))\n",
+    "print('Accuracy on test dataset:', test_accuracy)"
    ]
   }
  ],
-- 
GitLab