Skip to content
Snippets Groups Projects
Jupyter Notebook Block 5 - Object Detection and Segmentation.ipynb 1.32 MiB
Newer Older
    "callbacks = [\n",
    "    keras.callbacks.ModelCheckpoint(filepath=\"unet_segmentation.keras\", save_best_only=True, monitor=\"val_loss\"),\n",
    "    tf.keras.callbacks.TensorBoard(logdir, histogram_freq=1)\n",
    "] \n",
    "\n",
    "history = model.fit(train_input_imgs, train_targets,\n",
    "    epochs=50,\n",
    "    callbacks=callbacks,\n",
    "    batch_size=64,\n",
    "    validation_data=(val_input_imgs, val_targets))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Let’s display our training and validation loss:"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAA9/UlEQVR4nO3dd3xV9d3A8c83exBISMIKI4BMFRkRcNZd3LZuW6u21dZRR2tb+zx9rI9d9umw2lpHLa22VrS4aIu1qLiqCEERAZERwCTM7L3u/T5//E7gEgPcwD25ucn3/Xrd17ln/064nO/5zSOqijHGGNNRXLQTYIwxpmeyAGGMMaZTFiCMMcZ0ygKEMcaYTlmAMMYY0ykLEMYYYzplAcIYQET+JCI/CnPbzSJymt9pMibaLEAYY4zplAUIY3oREUmIdhpM72EBwsQMr2jn2yKyUkTqReQPIjJYRF4UkVoReVlEskK2P09EVotIlYi8JiKTQtZNE5H3vP2eAlI6nOscEVnh7fu2iEwJM41ni8j7IlIjIsUicleH9cd7x6vy1l/tLU8VkV+KyBYRqRaRt7xlJ4lISSd/h9O873eJyHwR+YuI1ABXi8hMEXnHO8c2EfmtiCSF7H+4iCwSkQoR2SEi/yUiQ0SkQUSyQ7abLiK7RCQxnGs3vY8FCBNrLgROB8YD5wIvAv8F5OJ+zzcDiMh44EngVm/dQuDvIpLk3SyfB/4MDAT+5h0Xb99pwFzga0A28DCwQESSw0hfPfAlIBM4G7heRC7wjjvKS+9vvDRNBVZ4+/0CmAEc66XpO0AwzL/J+cB875xPAAHgNiAHOAY4FbjBS0MG8DLwL2AYcBjwiqpuB14DLgk57pXAPFVtDTMdppexAGFizW9UdYeqlgJvAu+q6vuq2gQ8B0zztrsU+KeqLvJucL8AUnE34NlAIvBrVW1V1fnAspBzXAc8rKrvqmpAVR8Dmr399ktVX1PVD1U1qKorcUHqM97qK4CXVfVJ77zlqrpCROKALwO3qGqpd863VbU5zL/JO6r6vHfORlVdrqpLVLVNVTfjAlx7Gs4BtqvqL1W1SVVrVfVdb91jwBcBRCQeuBwXRE0fZQHCxJodId8bO5nv530fBmxpX6GqQaAYyPPWlereI1VuCfk+CviWV0RTJSJVwAhvv/0SkVkistgrmqkGvo57ksc7xsZOdsvBFXF1ti4cxR3SMF5E/iEi271ip5+EkQaAF4DJIjIal0urVtWlB5km0wtYgDC91VbcjR4AERHczbEU2AbkecvajQz5Xgz8WFUzQz5pqvpkGOf9K7AAGKGqA4CHgPbzFANjO9mnDGjax7p6IC3kOuJxxVOhOg7J/CCwFhinqv1xRXChaRjTWcK9XNjTuFzElVjuoc+zAGF6q6eBs0XkVK+S9Vu4YqK3gXeANuBmEUkUkc8DM0P2/T3wdS83ICKS7lU+Z4Rx3gygQlWbRGQmrlip3RPAaSJyiYgkiEi2iEz1cjdzgV+JyDARiReRY7w6j3VAinf+ROD7wIHqQjKAGqBORCYC14es+wcwVERuFZFkEckQkVkh6x8HrgbOwwJEn2cBwvRKqvox7kn4N7gn9HOBc1W1RVVbgM/jboQVuPqKZ0P2LQSuBX4LVAIbvG3DcQNwt4jUAnfiAlX7cT8BzsIFqwpcBfVR3urbgQ9xdSEVwM+AOFWt9o75KC73Uw/s1aqpE7fjAlMtLtg9FZKGWlzx0bnAdmA9cHLI+v/gKsffU9XQYjfTB4m9MMgYE0pEXgX+qqqPRjstJrosQBhjdhORo4FFuDqU2minx0SXFTEZYwAQkcdwfSRuteBgwHIQxhhj9sFyEMYYYzrVawb2ysnJ0fz8/GgnwxhjYsry5cvLVLVj3xqgFwWI/Px8CgsLo50MY4yJKSKyz+bMVsRkjDGmUxYgjDHGdMoChDHGmE5ZgDDGGNMpCxDGGGM6ZQHCGGNMpyxAGGOM6VSv6QdhjDGxJBhUWgJB92nb82luC1Lf0kZdUxt1zW5a603jBFKT4klOjCc1MZ6UxDhSE+PJ7pfM1BGZEU+jBQhjjDlIwaDySUUDH22roaqxlTgBESFOhDiBOBGa2wJsr25me00TO0I+ZXUtEUvH1BGZPH/jcRE7XjsLEMaYPq81EGR7dRPFlQ2UVDayo7qJ+HghLTGetKQEUpPiSUuKJyUxnuKKBtZsq2HN1ho+2lZDfUsgrHNkpycxuH8Kg/snM2X4AHL6JZOSGE9yQhxJCXEkxXvThDjSkxPISE6gX0oC/ZITyEhOJD05HgUaWwM0tQZoagnS1BagsSVAYrw/tQUWIIwxMam5LcDWqiZKKxsprWqgtKqJ+uY2VEFRN1VFgUBQaQ0EaQ24Yp3WtiCtgSD1zQFKqxrZVt1IsAsDW/dLTmDS0AwumjGcycP6M3noAHIzkgmqElR3bvcdEuOF3IxkkhPiI3LdifFx9E9JjMixDsTXACEic4D7gHjgUVW9p8P6Ubh38ebiXrP4RVUt8dYFcK9gBPhEVc/zM63GmOgKBJXSykY27qpj4646tpQ30NASoKktQHNrkGZv2tgaYEdNEztrm/faP04gLSkBARAQXHGPCMSLkJQQR2J8HAnxQlK8+56aFM+s0QMZnpXK8Ky03dPBA5IJBqGhpY2GlgCNrQE3bQkwLDOFEVlpxMVJNP5M3cq3ACEi8cADuPfflgDLRGSBqq4J2ewXwOOq+piInAL8FLjSW9eoqlP9Sp8xpvs0tQbYVdtMeX0L5XXNlNe1UFbvpttrmti4s45NZfU0twV37zMgNZF+yQmkJMaRnBBPcmIcKQnxZPdLYtLQDPIy08jLSiUvM5XhWakMGZAS8aKW1KR4siN6xNjiZw5iJrBBVYsARGQecD4QGiAmA9/0vi8GnvcxPcaYQ9QWCFLT1EZNY6trYeO1rqlvaaPWa3VTUd/invBrmtlZ6570a5vaOj1eWlI8gzKSGZvbjxPH5zI2N52xuf0Ym9uPrPSkbr4605GfASIPKA6ZLwFmddjmA+DzuGKozwEZIpKtquVAiogUAm3APar6fMcTiMh1wHUAI0eOjPgFGNPbNbUGKPEqZivqW3Z/KhtaKK9z05rGNmqaWqlpbA2rQjY5IY5B/ZMZlJHChCEZnDAul9yMZHL7JZOTkUR2ejLZ/dw0NSky5fLGH9GupL4d+K2IXA28AZQC7b/AUapaKiJjgFdF5ENV3Ri6s6o+AjwCUFBQYO9ONcYTDCq1zW1UN7RS3dhKVWML1Y2tbK9uYlNZPZvL69lc1sDW6kY6vnU4IU7ISk9iYFoSWemJ5Oek0T8lkf6piQxITaR/SgIZKYlkpOxpZdP+SU9OIC0pHpHeXz7fF/gZIEqBESHzw71lu6nqVlwOAhHpB1yoqlXeulJvWiQirwHTgL0ChDF9SVsgyJaKBtZtr+XjHbVs3FVPTWPrnorUFleR2tDiinr21SonMy2RUdnpHJ2fRX7OcEbnpDM8K5Xs9GSy0pPon5JgN3gD+BsglgHjRGQ0LjBcBlwRuoGI5AAVqhoEvodr0YSIZAENqtrsbXMc8H8+ptWYqAsElfK69g5V3rS6idKqRj7eXsuGXXW0eJW4IjAiK42stETSkhIY0j9xd1v9tKQE+qck0D81kcy0JAZ4T/6ZaYkMykgmM83K9k14fAsQqtomIjcBL+Gauc5V1dUicjdQqKoLgJOAn4qI4oqYbvR2nwQ8LCJB3HhR93Ro/WRMTKtvbmP11ho+LK1mVWk1H5ZWs7msnrYOj/1xAkP6pzBucAbHj8th/OAMJgzO4LBB/az83vhOtGMBZIwqKChQeye16SlUlerGVkqrGimtbGRrVaP77uUGisrqd5f9D8pI5si8AUwYksHQASleb9sUhgxIIadfMvF9oL29iR4RWa6qBZ2ti3YltTExZ3t1E+9/Usn7xVWs3lpNXVMbTa1u2IOm1gBNXmeulpA2/eBa9+RlpjImN51zjxrGkXkDODJvAIP6p0TpSozZPwsQxnQiGFTK61vYXt3E9pomNpfVs6K4ivc+qWRbdRMASfFxTByaQWZaEoMTXK/clAQ3wmZKYjy5GcnkZaYyLDOVvKxUstOTrPLXxBQLEKbPK69rZklRBW9vLGPt9lq2Vzexs7aJ1sDexa/Ds1IpyB/ItBGZTBuZyeRh/SM2vo4xPZEFCNPnVDW0sHRTBW9vLGdJUTlrt9cCbgC2I/L6M2v0QAYPSNldHzCkfwp5Wank9EuOcsqN6V4WIEyvtrOmiVVbq1ldWsOqrdWsKq2htKoRgJTEOI7OH8i5Rw3j2LHZHJk3gASfhk02JhZZgDC9QktbkKKyOj7aVsNH22p3T8vq9oz4OTonnWkjM/ni7FFMG+mKiayIyJh9swBhYlJTa4B3N1XwxrpdvLOxnPU7a3fXGSQlxDF+cD9OmpDL4cP6c/iwAUwamkFGN42hb0xvYQHCxIRgUNm4q47X1+3ijfVlvFtUTnNbkKSEOI7Oz+Irx49h0tAMJg3tz5icdCsqMiYCLECYHqWpNcCGne6FMUW76ndPN5XV09jqxnEcm5vOFbNG8pnxucwanW09io3xiQUIE3U7app4de1OXl27k7fWl+0OBCKuaemYnH7MHpPNhCH9OH5cLnmZqVFOsTF9gwUI0+0CQWVlSRWLP97Fq2t3sKq0BoC8zFQumjGc2WOyOWxQP0Zlp5GSaLkDY6LFAoTpFjtqmlz9wbpdvLWhjKqGVuIEpo/M4jtzJnDqxMGMH9zPehob04NYgDC+2FnbxHtbqli+pYI315ft7oyWm5HMqRMHc+L4HE4cl2uvlTSmB7MAYQ6ZqrJ6aw3vfVLJ8i2VvPdJJcUVrjNaUnwcM0ZlcceZEzlxXC6ThmZYLsGYGGEBwhy0nbVNPLO8lKcLi9lUVg/A4P7JTB+ZxZdm5zN9VCaHDxtg9QjGxCgLEKZLAkHl9XU7mbe0mFfW7iQQVGaOHsj1J43l2LHZ5GWmWg7BmF7CAoQJy8fba3lhRSnPvlfK9pomstOT+Orxo7nk6BGMze0X7eQZY3zga4AQkTnAfbhXjj6qqvd0WD8K9x7qXKAC+KKqlnjrrgK+7236I1V9zM+0mk/bVt3IghVbee79UtZuryU+TjhhXA53nTeZUyYOJinBeisb05v5FiBEJB54ADgdKAGWiciCDu+W/gXwuKo+JiKnAD8FrhSRgcAPgAJAgeXevpV+pdc4bYEgf1+5laeWFfPupgpU4agRmdx17mTOnjKM3Awb8tqYvsLPHMRMYIOqFgGIyDzgfCA0QEwGvul9Xww8733/LLBIVSu8fRcBc4AnfUxvn9bcFuCZ5aU8+PoGiisayc9O45ZTx3H+1DxG56RHO3nGmCjwM0DkAcUh8yXArA7bfAB8HlcM9TkgQ0Sy97FvXscTiMh1wHUAI0eOjFjC+5LGlgBPLv2ER94oYntNE0cNH8Cd5xzOqRMHERdnlc3G9GXRrqS+HfitiFwNvAGUAoFwd1bVR4BHAAoKCvQAm5sQjS0B/vT2Zh59s4jy+hZmjh7Izy+ewvGH5VgrJGMM4G+AKAVGhMwP95btpqpbcTkIRKQfcKGqVolIKXBSh31f8zGtfUZbIMj85SXc+/I6dtQ0c+L4XL5xymEcnT8w2kkzxvQwfgaIZcA4ERmNCwyXAVeEbiAiOUCFqgaB7+FaNAG8BPxERLK8+TO89eYgqSqL1uzg/176mA0765g+MpPfXD6dmaMtMBhjOudbgFDVNhG5CXezjwfmqupqEbkbKFTVBbhcwk9FRHFFTDd6+1aIyA9xQQbg7vYKa9N1y7dU8NOFayncUsmY3HQe+uIMPnv4YCtKMsbsl6j2jqL7goICLSwsjHYyepRdtc38ZOFHPPd+KbkZydx22nguKRhub1szxuwmIstVtaCzddGupDY+CAaVJ5d9ws9eXEtja4CbTj6MG04eS1qS/XMbY8Jnd4xeZvXWav77uVWsKK5i9piB/OiCIzlskA2FYYzpOgsQvUR9cxu//Pc6/vT2JrLSkrj30qO4YGqe1TMYYw6aBYheYFVpNTc/+T6byuu5fOZIvvvZiQxIS4x2sowxMc4CRAxTVeb+ZzM/e3EtWemJPPHVWRw7NifayTLG9BIWIGJUeV0z356/klfX7uS0SYP4v4uOYqC9vtMYE0EWIGLQ2xvKuPWpFVQ1tHLXuZO56th8q2swxkScBYgYEggqv355Hb9dvIHROen86ZqZTB7WP9rJMib6gkGo3AT9BkFyRnj7NNfBxlcgrwAGfGosUIMFiJhRVtfMLfPe5z8byrloxnDuPv9w69dg+qZAG5Stg20f7PlsXwktdTDyGLjmRQgnR/2vO+D9P7vv2YfB6M/A6BPdJ62HDUGjCq//DJpqYOgUGHoUZI+DeH/vAXaHiQHLNldw01/fo6qhlf+7cAqXHD3iwDv1da1N8OYvYMBwGH8mZAyOdorMoQi0wabX4MP5sPaf0FzjliemwZAjYao3zNvSR2Dl03DUpfs/3tb34f2/wLQvwqDJUPQ6rHwKCv8ACAyaBAkpEGyFQMhHg3D0V+D428ILQpHy3uPw2k8hLtGlCVz6Bh/hAsaI2Qe+5oNgAaIHU1UefXMT9/xrLSOyUvnjDVakFJa2FvjbVbDuX96CW2HETJh4Nkw8B7LHRuY8FUXw7/9xT3MTzoLBh3f9plG7Aza8DOv/DXU7IW86DC+A4UdD/zz/b0LBIGxbAQ3l7sk5oQe9MTAYhJKl8OHfYPXz0FAGyQNg0nkurcOmuif/uPiQ7Qth0Z0w8ax9FzWpwovfhfQc+OxPIGUAHHOjCwCl78GmN9x5NQjxSRCX4KbxiVCzFV75X6jbAZ/9KcR1w7A15RvhX99zOZwvPgPlG7yc00o3/XA+7FrnS4CwsZh6qOrGVr79tw/495odnHnEEH520RT6p1jfhgMKtML8a+Cjv8PZv4KRs90T59p/uP9MALkT3VNjYqq7ISZ408RUGDoVxp9x4PNUFcMfz4T6XdDWDChkjvKC0Nnuia5j9l8VAi2wdQVsWOSCQnua+g2BzBGw/UNoa3LLMoZC3gwYdzpMvyr8YPHWr93NNHei++SMh5SQB4u6nbDxVdjwiiuDbyh3y1My4ciL3NP4sOmRD06qULsNdn3sPmUfuxtbTQkg3vkEJM59b6p2N+KEFJhwJhxxkftb7C+IlRTCo6fCsTfDGT/sfJuVT8Oz18J5v4HpX+raNQSD8O/vw5IH4MhL4ILfucDRmYYKePVHLrcy6lgYc5KbJnXhDY2BNvjjHFekdv07ndeVBIPQXA2pWZ9eF4b9jcVkAaIHKq5o4Kq5S/mkooHvnTWJLx/Xi1opNVXDpjfhk3dgxCyYfF7kjh0MuP/4q56BOT+D2V/fe31VMXy80H2qS92NuK3JFUe1Ne3Juh93C5z6gz1Pph3VbHPBoaECrlrgbuTrXoS1C6HoNQg0u5tt2kAXPFob95xLg+4YEu+uf9xpMO4MV1Qg4nI/O1a5G13JMiheAlWfwBV/Cy9wbVsJD58ACO517p6MYZA7ARor9gSltBw47FQ47DT3FL3yKRdM25ogZ4ILFFMuhf5Dw/832JfFP4ElD+4pGgJ3ztyJMGCECwoadGlW3fP0fthp+88NdOaFG+GDee6Gmjt+73XNdfDbAug3GK5dfHA5AFV461fwyt3u3+7ixyApbc/6YACW/wle/aGrM8ib7v5dAs2uiGjELBcsxp0Gw6bt/1yv/xwW/wgu/IML3j6wABFDPtpWw1Vzl9LUGuD3Xypg1pjsaCfp0ARa3c2uaDFsXAyly0EDe24IU78AZ/6sazeAzgSD8MIN8MGTcPrd7ibfVW3NLitf+AdXb3Hh7z+drvoy+ONZUFMKVz4PI47ee317y5j1i1xgSEiBxBQ3bf8+cCyMPTm8J762FrhviruRfun5A2///I2w+lm4dRU0VcGutXs/sSemwdhT3I13yJRP3yAbq2DN87Dir1D8rstdff0tyDnswOfel9XPwd+uhvFz3HlzJ7gA1G+QP0VodbvgNzNg+Az44rN7n+OVH7q6qS//G0Z2fANyFxX+Ef5xm7vhXzHP/XsWL4WFt7sgPOp4OOv/XNFjS4ML9kWvuc+2lYC6HMycezrPVZS+B384HSZfABf94dDSuh8WIGLEOxvLue7xQvqlJPDYl2cyfvAh3jSjbccaePw8Vwwjce5paczJ7uY4bDq8da/7z5qV756Q8qYf3HmCQfjHLa4i7+Tvw2e+fWjpXvp7V0adOxEufxKyRrnljZXwp3OhfL0rC84//tDOE643fuGeRm9Y4ipP96W+DH412VW8nvOrQz/vjjXw0PFw/K1w6p0Hd4zyjfDwZ2DQRNe6aF/FMZG25CH413fh0idg0jluWcUmeGAWTD7fBf9IWP28y7Vmj3N1IiuecDnKM34ER1y47wBYXw5v3w//uc/ViV346N65iZYGePhEaG2A6/9z0MVH4bAAEQNe/HAbt8xbwcjsNB7/8kyGZaZGO0mHpm4X/P4UV+Z+5s9gzGc6/5Fv/o/7D1a3A075H1d2HPpUG2h15fLFS6Fys3vSSunvnuyT+7vPx/90WfoTvwOn/Hdk0r/xVXj6andDu+wJV2fx5wtcWi5/0j0Jd5f6crh3Mhx1GZx73763e+Pnrsz7xqXuKT0S/nKhqye45YOuF8e0Nrkn4KpP4OtvQubIyKQpHIE2V9TWUuf+HompMO8LLhf7jULoPyxy59q42B070Ayzb4DPfCf8HPGmN+DZr7mHqFO+v+f3/89vwbJH4UsL3P8dH1mA6OH+vGQLd76wimkjMpl79dFkpkVwyIy2FvjgrzD2VFcJeiAtDbD0YWiph6MuP7gWP61NLuewbSVcs/DAOYPGSlhwM3y0wLXUmHkdbH3PBYXS5e4pCiAxHdoa95TjhzruFjjtfyNbZLFrHTx5KVSXuNYyZevgkj+7MvHutuBmV0fwzY86b6MfaIVfH+lyGFc+F7nztlfoXvMvGHVM1/b95+2w7Pdw+TxXydzdNr0Jj50DJ33PNVZ4/Hz3EHLi7ZE/V0URIDBwdNf3baiAv9/i/f5PdJXfC26CY26Cz/444kntKGoBQkTmAPfhXjn6qKre02H9SOAxINPb5g5VXSgi+cBHwMfepktUtUON495iMUCoKve+vJ77X1nPqRMH8dsrppOatI+K0YMRDMIzX3Fl0gkp7unm+Nv2btGyJzGw5gXXQqO6eE8dQf4Jrpx00rnuKezAFwXPfc3dzC5+DA6/ILy0qroion/d4QKCxHvtu2e5JqojZrk+DaoueDXX7vnExbnWR36UZzdUuCazm99yxQBHXBj5c4Rjxxp48BhX1HPCtz69/sP57t/6iqdh/Gcjd97mOvjFOFdZfe6vw99v9fPu79ZNN7l9+ts1rlFC/2Hu93zDu64eqKdRdZ32Xvyu+/0POhyufbVb0hqVACEi8cA64HSgBPd+6ctVdU3INo8A76vqgyIyGVioqvlegPiHqh4R7vliMUDc9/J67n15HRfPGM5PP39kZF8Fqgovfsd1HDrhW+4peOVTruXKyd+D6VfvaYa5Y7X7YW5+EwYf6SrWska7nMd7j7uinZQB7slmxlWuY9K+tJeXH2xdQFWxC1BDj+pac0A/BQOuCCySxRIH4/HzXa7m1pWfLst/9HTXtPWm5ZFvm//Mta5J7u3rISGM3G1Fkat3yJ3QvfUOnakuda2WWhv2ro/oqco2uBZSx90SuWLCA9hfgPCzl8dMYIOqFqlqCzAPOL/DNgq0P84OALb6mJ4eZe5bm7j35XVcOH04P7twSuTfE/3Gz11wOPYb7qnz84+4Zn25E1z55oPHuKe8hd+Bh05wTSvP/hV87XXXVrv/UBdYvvE+XPV315zvvcddpeUfz3K5jUDb3udc84ILDkdecvDZ+MwRXW8r7re4+OgHB3A5wNqt7u8cqnS569g182v+dNyacolrEbVh0YG3bWt2LZYkDi6aG93gAK7fwLn3u5zMxLOjm5Zw5Bzm+lZ0U3A4ED9zEBcBc1T1q978lcAsVb0pZJuhwL+BLCAdOE1Vl3s5iNW4HEgN8H1VfbOTc1wHXAcwcuTIGVu2bPHlWiLt6WXFfOeZlcw5fAi/vWJa5IPDsj/AP78JR10B5z+w901D1WW5F93pemRKHBR8GU7+7wOPP9NQ4VppLH3EVTz2Hw4zv+o6cVVtgblnutzFVX/vmdn4WBcMuqfh1Cy49pU9y5+9zvXB+OaazosPD1WgDX45AfKPg0se3/+2C7/j6rCiVe9gumx/OYhoD7VxOfAnVf2liBwD/FlEjgC2ASNVtVxEZgDPi8jhqloTurOqPgI8Aq6IqbsTfzD+uXIbdzy7khPG5XDf5VMjHxxWP+9yCOPnwHn3f/qJUsQ9SY07wwWK7MNcO+1wpA10OZLZN7hhLN59CF6+C167x9VPpOe4Fj8WHPwRFwezr3ft7IuXuT4YtTtg1bNufCA/ggO4osgjLnQtxZqqXXFjZ7a87YLD7BssOPQSfhYxlQKhzWaGe8tCfQV4GkBV3wFSgBxVbVbVcm/5cmAj0KFLZOxZ/PFObn3qfaaPzOLhK2eQnBDBCmlwHXCevdZV6F70x/1n7+MTXXvwcINDqLh4F2Su+rvrrXrUZZA+CK54ynV+Mv456nI3HtGS37n5wrkQbHMtv/w05VLXjHPNgs7XtzW7ljiZI11zTdMr+BkglgHjRGS0iCQBlwEdf12fAKcCiMgkXIDYJSK5XiU3IjIGGAcU+ZhW371bVM7X/7ycCUMymHvN0ZEfqnvHGtcWO/sw16sztOu/nwZPdm3zb1p6cMHGdE1yP5h+pauHqChyAWLcGZEbgHBf8qa7HuArn+p8/Vu/ds2Az763Z9UfmUPiW4BQ1TbgJuAlXJPVp1V1tYjcLSLtA/B8C7hWRD4AngSuVlcpciKwUkRWAPOBr6tqhV9p9dv6HbV85bFChmel8tg1MyM/6F57i6WEZDe0gI+9Lk0PMPM6QOHJK6B+J8z6mv/nFHGV1Zvfci2DQpWtdz3ij7jQjS9keg1f6yBUdSGwsMOyO0O+rwGO62S/Z4Bn/Exbd2lpC3LLvBUkJcTxl6/OIrtfh5Eo1y6EXR+59uYtdd601k2nXLJnnPv9Wf9v10T1zJ9HZmA107NljXLDln+0wI1pNPaU7jnvkRe7dxKsmr9nrCtVNx5RYqobU8j0KtGupO71frVoHWu21fDIlTMYOqBDR7OWBph3BaCuY1hyhvsk9XPtthd8wxXbDD1q3ycItLkWSQPHQsE1vl6L6UGOudEFiNlf774X12SPda/nXPn0ngCx4gn3cHLufVb/1AtZgPDRu0XlPPzGRi47egRnHD7k0xs0VgIKZ/8SCr6y93/0hgr43Wx4/gbXf2FfHZRW/MWN2HnJn6Pf5tx0n5Gz3RhDOd3cdmPKpfDit13nyn6DXc/7kcfAtC6+V8HEhG54HVLfVNPUyjef/oCRA9P4n3Mmd75RY6WbpuV8+ikwbaB7KtuxypXvdqa5zo2zP2K2GwrD9C25E7r3tZcAh3/O5XZXPg0v/Zf7DZ57X/e8Wc10O8tB+OSuF1azvaaJv339GNKT9/Fnbg8Q+6pUnnCme2J785euWWnHoqZ3fuuGgLj0L91/ozB9U79c95KhZX9wdWUnfqfH9Po1kWdh3wf/WLmVZ98v5aaTD2P6yP20KDpQgABX8ZeW7Yqa2lr2LK/d7saSn3y+G8zOmO4y5VIXHLIP63zgQNNrWICIsG3Vjfz3c6s4akQmN51ygLdwNVW56f4CRNpAOOfXXlHTL/csX/wTN8TzqT841CQb0zUTznKtqC540HrN93JWxBRBwaBy+98+oKUtyK8vnUrigYbR2J2DyNz/dhPP8oqafuGKmuIT3dDAM7/mfwcpYzpKSnNDqphezwJEBM1bVsx/NpTzk88dyeicMHqTNlZCXIJr1nogc+5xb656/nrIGAJJGe7NVcYY4xMrYoqQptYA97+ynhmjsrh8ZhhvbgMXIFKzwqtgThvoXtiyYxVseBlO+OaBR181xphDYAEiQp5aVsz2mia+efp4JNwWRY1VXRsWY+LZMONqyJ0Es/b7gj1jjDlkVsQUAU2tAX732gZm5g/k2LHZ4e/YWAkpmV072bn3uTecxUV4JFhjjOnAchAR8OTST9hR08ytp48LP/cAe4qYusqCgzGmG1iAOEQu97CRWaMHcuzYnC7uXGUjrxpjeiwLEIfoL0u2sKu2mdtOP4gxcRqrDtzE1RhjosQCxCFobAnw0OtFHDMmm9ljulD3AK6TW3ON5SCMMT1WWAFCRJ4VkbNFxAJKiL8s2UJZ3UHmHpqq3dQChDGmhwr3hv874ApgvYjcIyJ9fnSuhpY2Hnp9I8cflsPM0QfRH6Gxyk0tQBhjeqiwAoSqvqyqXwCmA5uBl0XkbRG5RkT2+RICEZkjIh+LyAYRuaOT9SNFZLGIvC8iK0XkrJB13/P2+1hEPtv1S/PXn9/ZQnl9C7edPu7gDtA+zEZXm7kaY0w3CbvISESygauBrwLvA/fhAsaifWwfDzwAnAlMBi4XkY4vRvg+7l3V04DLcDkVvO0uAw4H5gC/847XI9Q3t/HwG0WcMC6HGaMOsjdzOCO5GmNMFIVbB/Ec8CaQBpyrquep6lOq+g1gXwMJzQQ2qGqRqrYA84DzO2yjQH/v+wBgq/f9fGCeqjar6iZgg3e8HuEvS7ZQUd9ycHUP7SxAGGN6uHB7Ut+vqos7W6GqBfvYJw8oDpkvAWZ12OYu4N8i8g0gHTgtZN8lHfbN63gCEbkOuA5g5MiR+7+CCFr44Tamj8zc/7seDiScob6NMSaKwi1imiwime0zIpIlIjdE4PyXA39S1eHAWcCfu9JSSlUfUdUCVS3Izc2NQHIOrK65jVVba7reKa6j3XUQAw49UcYY44Nwb8bXqmpV+4yqVgLXHmCfUiB0WNPh3rJQXwGe9o75DpAC5IS5b1S8t6WSQFCZNeYQR1JtrITk/hBvw2EZY3qmcANEvIQMMuRVGCcdYJ9lwDgRGS0iSbhK5wUdtvkEONU75iRcgNjlbXeZiCSLyGhgHLA0zLT6aummCuLj5NCKl8AbhykzImkyxhg/hPv4+i/gKRF52Jv/mrdsn1S1TURuAl4C4oG5qrpaRO4GClV1AfAt4PcichuuwvpqVVVgtYg8DawB2oAbVTXQ1Yvzw9JNFRyRN4D05EN88u/qUN/GGNPNwr3LfRcXFK735hcBjx5oJ1VdCCzssOzOkO9rgOP2se+PgR+Hmb5u0dQaYEVxFVcfl3/oBzuYob6NMaYbhRUgVDUIPOh9+qwVxVW0BILMzI/Am9waK6H/sEM/jjHG+CSsACEi44Cf4jq8pbQvV9UxPqWrR1q6qQIRODoSAcKG+jbG9HDhVlL/EZd7aANOBh4H/uJXonqqpZsqmDikPwPS9jm6SHhUD/5lQcYY003CDRCpqvoKIKq6RVXvAs72L1k9T2sgyPItlcw6mIH5Omqpg2CbtWIyxvRo4VZSN3sd2NZ7LZNK2fcQG73Sh6XVNLYGDm7k1o5smA1jTAwINwdxC24cppuBGcAXgav8SlRPtHRTBUCEAkSVm1qAMMb0YAfMQXid4i5V1duBOuAa31PVAy3dVMHY3HRy+iUf+sEsB2GMiQEHzEF4HdSO74a09FiBoLJscwUzR3fxtaL7Yu+CMMbEgHDrIN4XkQXA34D69oWq+qwvqephPtpWQ21TW2QqqMFyEMaYmBBugEgByoFTQpYp0CcCRETrH8CG+jbGxIRwe1L3yXqHdks3VTBiYCrDMlMjc8DGSohPhsQIHc8YY3wQbk/qP+JyDHtR1S9HPEU9jKqydHMFJ08YFLmDto/kumeAXGOM6XHCLWL6R8j3FOBz7Hk9aK+2cVcdFfUtkat/AOtFbYyJCeEWMT0TOi8iTwJv+ZKiHmZJUYTrH8CG+jbGxISwX+/ZwTgggmUuPdfSTRUM7p/MqOy0yB3UAoQxJgaEWwdRy951ENtx74jo1VSVpZtc/weJZH1BYyUMOTJyxzPGGB+EW8SU4XdCeqLiika21zRFtngJrA7CGBMTwipiEpHPiciAkPlMEbnAt1T1EEs2lQNEtoK6rQVa6y1AGGN6vHDrIH6gqtXtM6paBfzgQDuJyBwR+VhENojIHZ2sv1dEVnifdSJSFbIuELJuQZjpjKilmyoYmJ7EuEERHLh2dye5zMgd0xhjfBBuM9fOAsl+9/UG+XsAOB0oAZaJyALvPdQAqOptIdt/A5gWcohGVZ0aZvp8sXxLJQWjsiJf/wCWgzDG9Hjh5iAKReRXIjLW+/wKWH6AfWYCG1S1SFVbgHnA+fvZ/nLgyTDT47uWtiBbyuuZOCTC1S+7h/rOjOxxjTEmwsINEN8AWoCncDf6JuDGA+yTBxSHzJd4yz5FREYBo4FXQxaniEihiCzZV32HiFznbVO4a9eusC4kXJ9UNBBUyM9Jj+hxLQdhjIkV4bZiqgc+VYcQQZcB872hxduNUtVSERkDvCoiH6rqxg7pegR4BKCgoOBTQ4Ecik1lbtDa0RYgjDF9VLitmBaJSGbIfJaIvHSA3UqBESHzw71lnbmMDsVLqlrqTYuA19i7fsJ3m/0OEPYuCGNMDxduEVOO13IJAFWt5MA9qZcB40RktIgk4YLAp1ojichEIAt4J2RZlogke99zgOOANR339VNRWT1ZaYlkpiVF9sBNVYBAyoADbWmMMVEVboAIisjI9hkRyaeT0V1DqWobcBPwEvAR8LSqrhaRu0XkvJBNLwPmqWro8SbhKsY/ABYD94S2fuoOm8vqI1//AC4HkTIA4uIjf2xjjImgcJu5/jfwloi8DghwAnDdgXZS1YXAwg7L7uwwf1cn+70NRHUsik1l9Rx7WIReMRqqfahvY4zp4cKtpP6XiBTggsL7wPNAo4/piqqGlja21zQxOtunHIRVUBtjYkC4g/V9FbgFV9G8ApiNqzM4ZT+7xazNZQ0AjM71I0BUWYAwxsSEcOsgbgGOBrao6sm4FkVVfiUq2jaXuxZM+ZaDMMb0YeEGiCZVbQIQkWRVXQtM8C9Z0eVbHwjwKqkzI39cY4yJsHArqUu8fhDPA4tEpBLY4leiom1TWT2DMpJJTw73zxOmYNA1c7UchDEmBoRbSf057+tdIrIYGAD8y7dURdmmsnp/cg8ttaBBCxDGmJjQ5UdkVX3dj4T0JJvL6jl98uDIH9iG2TDGxJCDfSd1r1Xd2Ep5fYt/9Q9g/SCMMTHBAkQH7WMw+dOLuspNLQdhjIkBFiA6aG/BNMbXHIQFCGNMz2cBooNNZfWIwMjstMgf3AKEMSaGWIDoYFNZPXmZqSQn+DCYng31bYyJIRYgOthc7lMTV3B9IBJSITHFn+MbY0wEWYAIoaps2uVjgLBhNowxMcQCRIjy+hZqm9t8DBBVFiCMMTHDAkSITX42cQV7F4QxJqZYgAjhaxNXsByEMSam+BogRGSOiHwsIhtE5I5O1t8rIiu8zzoRqQpZd5WIrPc+V/mZznabyupJiBPyMlP9OYHlIIwxMSTCw5XuISLxwAPA6UAJsExEFoS+W1pVbwvZ/hu490wgIgOBHwAFuHdfL/f2rfQrveB6UY/MTiMh3qe4aZXUxpgY4mcOYiawQVWLVLUFmAecv5/tLwee9L5/FlikqhVeUFgEzPExrYA3iqsfLwkCaG2CtkbrA2GMiRl+Bog8oDhkvsRb9ikiMgoYDbzalX1F5DoRKRSRwl27dh1SYoNB9b8PBFgOwhgTM3pKJfVlwHxVDXRlJ1V9RFULVLUgNzf3kBKwvaaJptagvy2YwAKEMSZm+BkgSoERIfPDvWWduYw9xUtd3TciNvvegskChDEmtvgZIJYB40RktIgk4YLAgo4bichEIAt4J2TxS8AZIpIlIlnAGd4y3xT53geiyk2tFZMxJkb41opJVdtE5CbcjT0emKuqq0XkbqBQVduDxWXAPFXVkH0rROSHuCADcLeqVviVVnA5iJTEOIb092mcJMtBGGNijG8BAkBVFwILOyy7s8P8XfvYdy4w17fEdbCprJ787HTi4sSfE1iAMMbEmJ5SSR11m/xswQQuQEg8JPf37xzGGBNBFiCAtkCQT8ob/Kt/ANfMNWUAiE85FGOMiTALEEBpVSNtQfU/B2HFS8aYGGIBgj0tmCxAGGPMHhYg2NMHwgKEMcbsYQEC14IpIzmB7PQk/07SWGV9IIwxMcUCBN4gfbnpiJ8VyJaDMMbEGAsQ7OkD4ZtgAJqqLUAYY2JKnw8QTa0BSqsa/a1/aKoG1AKEMSam9PkAUdPUysz8gUwe5mMHtvahvu1dEMaYGOLrUBuxYFBGCk997Rh/T2LDbBhjYlCfz0F0i2pvpPKMIdFNhzHGdIEFiO5QsdFNB46JbjqMMaYLLEB0h4oiSM+FFBuozxgTOyxAdIeKTZZ7MMbEHAsQ3aF8IwwcG+1UGGNMl1iA8FtLA9RutRyEMSbm+BogRGSOiHwsIhtE5I59bHOJiKwRkdUi8teQ5QERWeF9PvUu65hRudlNB46OajKMMaarfOsHISLxwAPA6UAJsExEFqjqmpBtxgHfA45T1UoRGRRyiEZVnepX+rqNtWAyxsQoP3MQM4ENqlqkqi3APOD8DttcCzygqpUAqrrTx/RER0WRm1qAMMbEGD8DRB5QHDJf4i0LNR4YLyL/EZElIjInZF2KiBR6yy/o7AQicp23TeGuXbsimviIqSiCtGwb6tsYE3OiPdRGAjAOOAkYDrwhIkeqahUwSlVLRWQM8KqIfKiqG0N3VtVHgEcACgoKtFtTHq6KIss9GGNikp85iFJgRMj8cG9ZqBJggaq2quomYB0uYKCqpd60CHgNmOZjWv1TbgHCGBOb/AwQy4BxIjJaRJKAy4COrZGex+UeEJEcXJFTkYhkiUhyyPLjgDXEmtZGqCmxPhDGmJjkWxGTqraJyE3AS0A8MFdVV4vI3UChqi7w1p0hImuAAPBtVS0XkWOBh0UkiAti94S2fooZlVvc1HIQxpgY5GsdhKouBBZ2WHZnyHcFvul9Qrd5GzjSz7R1C2viaoyJYdaT2k/tTVyzLUAYY2JPtFsx9W4VRe4lQfaiIGN6pNbWVkpKSmhqaop2UnyXkpLC8OHDSUxMDHsfCxB+siauxvRoJSUlZGRkkJ+fj4hEOzm+UVXKy8spKSlh9Ojwh/2xIiY/WRNXY3q0pqYmsrOze3VwABARsrOzu5xTsgDhl7ZmqC62Jq7G9HC9PTi0O5jrtADhl8otgFoOwhgTsyxA+MWauBpjDqCqqorf/e53Xd7vrLPOoqqqKvIJ6sAChF9sFFdjzAHsK0C0tbXtd7+FCxeSmZnpU6r2sFZMfqkogpQBkDYw2ikxxoThf/++mjVbayJ6zMnD+vODcw/f5/o77riDjRs3MnXqVBITE0lJSSErK4u1a9eybt06LrjgAoqLi2lqauKWW27huuuuAyA/P5/CwkLq6uo488wzOf7443n77bfJy8vjhRdeIDU1NSLptxyEX8o3utxDH6kAM8Z03T333MPYsWNZsWIFP//5z3nvvfe47777WLduHQBz585l+fLlFBYWcv/991NeXv6pY6xfv54bb7yR1atXk5mZyTPPPBOx9FkOwi8VRTC8INqpMMaEaX9P+t1l5syZe/VTuP/++3nuuecAKC4uZv369WRnZ++1z+jRo5k6dSoAM2bMYPPmzRFLj+Ug/NDWYk1cjTFdlp6evvv7a6+9xssvv8w777zDBx98wLRp0zrtx5CcnLz7e3x8/AHrL7rCAoQfqj4BDVoFtTFmvzIyMqitre10XXV1NVlZWaSlpbF27VqWLFnSzamzIiZ/WBNXY0wYsrOzOe644zjiiCNITU1l8ODBu9fNmTOHhx56iEmTJjFhwgRmz57d7emzAOEHa+JqjAnTX//6106XJycn8+KLL3a6rr2eIScnh1WrVu1efvvtt0c0bVbE5IeKIkjuD+k50U6JMcYcNAsQfijfCANHWxNXY0xM8zVAiMgcEflYRDaIyB372OYSEVkjIqtF5K8hy68SkfXe5yo/0xlxNsy3MaYX8K0OQkTigQeA04ESYJmILAh9t7SIjAO+BxynqpUiMshbPhD4AVAAKLDc27fSr/RGTKDVtWI64vPRTokxxhwSP3MQM4ENqlqkqi3APOD8DttcCzzQfuNX1Z3e8s8Ci1S1wlu3CJjjY1ojp+oT0ID1gTDGxDw/A0QeUBwyX+ItCzUeGC8i/xGRJSIypwv79kzWgskY00tEu5I6ARgHnARcDvxeRDLD3VlErhORQhEp3LVrlz8p7CoLEMYYn/Tr1w+ArVu3ctFFF3W6zUknnURhYWFEzudngCgFRoTMD/eWhSoBFqhqq6puAtbhAkY4+6Kqj6hqgaoW5ObmRjTxB62iCJL6Qb9B0U6JMaaXGjZsGPPnz/f9PH52lFsGjBOR0bib+2XAFR22eR6Xc/ijiOTgipyKgI3AT0Qky9vuDFxlds9nTVyNiU0v3gHbP4zsMYccCWfes8/Vd9xxByNGjODGG28E4K677iIhIYHFixdTWVlJa2srP/rRjzj//L2rbzdv3sw555zDqlWraGxs5JprruGDDz5g4sSJNDY2Riz5vgUIVW0TkZuAl4B4YK6qrhaRu4FCVV3grTtDRNYAAeDbqloOICI/xAUZgLtVtcKvtEZURREMOSLaqTDGxIBLL72UW2+9dXeAePrpp3nppZe4+eab6d+/P2VlZcyePZvzzjtvn++UfvDBB0lLS+Ojjz5i5cqVTJ8+PWLp83WoDVVdCCzssOzOkO8KfNP7dNx3LjDXz/RFXKANqrbA5POinRJjTFft50nfL9OmTWPnzp1s3bqVXbt2kZWVxZAhQ7jtttt44403iIuLo7S0lB07djBkyJBOj/HGG29w8803AzBlyhSmTJkSsfTZWEyRVP0JBNusiasxJmwXX3wx8+fPZ/v27Vx66aU88cQT7Nq1i+XLl5OYmEh+fn6nw3x3h2i3Yoq+YBAaKqC1EVQP7VjWgskY00WXXnop8+bNY/78+Vx88cVUV1czaNAgEhMTWbx4MVu2bNnv/ieeeOLuAf9WrVrFypUrI5Y2y0E0VsLPQ27oCamQ6H0SUiA+sfP9ggEItLgcQ6DF9aBu9SqHLEAYY8J0+OGHU1tbS15eHkOHDuULX/gC5557LkceeSQFBQVMnDhxv/tff/31XHPNNUyaNIlJkyYxY8aMiKXNAkRiCsy5x93c25qgtQFam9x8a4PrFd0ZiYP4JIhLdEEkPtHNZ+VD/6HdegnGmNj24Yd7Wk/l5OTwzjvvdLpdXV0dAPn5+buH+U5NTWXevHm+pMsCRFI6zL4+2qkwxpgex+ogjDHGdMoChDGmT9NDbZwSIw7mOi1AGGP6rJSUFMrLy3t9kFBVysvLSUlJ6dJ+VgdhjOmzhg8fTklJCT1msE8fpaSkMHz48C7tYwHCGNNnJSYmMnr06Ggno8eyIiZjjDGdsgBhjDGmUxYgjDHGdEp6S+29iOwC9j9oyf7lAGURSk4ssevuW+y6+5ZwrnuUqnb6xrVeEyAOlYgUqmpBtNPR3ey6+xa77r7lUK/bipiMMcZ0ygKEMcaYTlmA2OORaCcgSuy6+xa77r7lkK7b6iCMMcZ0ynIQxhhjOmUBwhhjTKf6fIAQkTki8rGIbBCRO6KdHj+JyFwR2Skiq0KWDRSRRSKy3ptmRTONkSYiI0RksYisEZHVInKLt7y3X3eKiCwVkQ+86/5fb/loEXnX+70/JSJJ0U6rH0QkXkTeF5F/ePN95bo3i8iHIrJCRAq9ZQf9W+/TAUJE4oEHgDOBycDlIjI5uqny1Z+AOR2W3QG8oqrjgFe8+d6kDfiWqk4GZgM3ev/Gvf26m4FTVPUoYCowR0RmAz8D7lXVw4BK4CvRS6KvbgE+CpnvK9cNcLKqTg3p/3DQv/U+HSCAmcAGVS1S1RZgHnB+lNPkG1V9A6josPh84DHv+2PABd2ZJr+p6jZVfc/7Xou7aeTR+69bVbXOm030PgqcAsz3lve66wYQkeHA2cCj3rzQB657Pw76t97XA0QeUBwyX+It60sGq+o27/t2YHA0E+MnEckHpgHv0geu2ytmWQHsBBYBG4EqVW3zNumtv/dfA98Bgt58Nn3jusE9BPxbRJaLyHXesoP+rdv7IMxuqqoi0ivbPYtIP+AZ4FZVrXEPlU5vvW5VDQBTRSQTeA6YGN0U+U9EzgF2qupyETkpysmJhuNVtVREBgGLRGRt6Mqu/tb7eg6iFBgRMj/cW9aX7BCRoQDedGeU0xNxIpKICw5PqOqz3uJef93tVLUKWAwcA2SKSPuDYW/8vR8HnCcim3FFxqcA99H7rxsAVS31pjtxDwUzOYTfel8PEMuAcV4LhyTgMmBBlNPU3RYAV3nfrwJeiGJaIs4rf/4D8JGq/ipkVW+/7lwv54CIpAKn4+pfFgMXeZv1uutW1e+p6nBVzcf9f35VVb9AL79uABFJF5GM9u/AGcAqDuG33ud7UovIWbgyy3hgrqr+OLop8o+IPAmchBsCeAfwA+B54GlgJG649EtUtWNFdswSkeOBN4EP2VMm/V+4eojefN1TcBWS8bgHwadV9W4RGYN7sh4IvA98UVWbo5dS/3hFTLer6jl94bq9a3zOm00A/qqqPxaRbA7yt97nA4QxxpjO9fUiJmOMMftgAcIYY0ynLEAYY4zplAUIY4wxnbIAYYwxplMWIIzpAUTkpPaRR43pKSxAGGOM6ZQFCGO6QES+6L1nYYWIPOwNiFcnIvd67114RURyvW2nisgSEVkpIs+1j8MvIoeJyMveuxreE5Gx3uH7ich8EVkrIk9I6IBRxkSBBQhjwiQik4BLgeNUdSoQAL4ApAOFqno48DquhzrA48B3VXUKrid3+/IngAe8dzUcC7SPtDkNuBX3bpIxuHGFjIkaG83VmPCdCswAlnkP96m4gc+CwFPeNn8BnhWRAUCmqr7uLX8M+Js3Vk6eqj4HoKpNAN7xlqpqiTe/AsgH3vL9qozZBwsQxoRPgMdU9Xt7LRT5nw7bHez4NaFjAwWw/58myqyIyZjwvQJc5I213/6u31G4/0ftI4VeAbylqtVApYic4C2/Enjde6tdiYhc4B0jWUTSuvMijAmXPaEYEyZVXSMi38e9sSsOaAVuBOqBmd66nbh6CnBDKz/kBYAi4Bpv+ZXAwyJyt3eMi7vxMowJm43maswhEpE6Ve0X7XQYE2lWxGSMMaZTloMwxhjTKctBGGOM6ZQFCGOMMZ2yAGGMMaZTFiCMMcZ0ygKEMcaYTv0/zGo3CEtcRkoAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAxy0lEQVR4nO3de3xcdZ3/8ddnLsnknjZJ26QX0lroFWhpgSKiVRSRSwHl5oKKuqIu+0NccQV219vP689ddRFQcWFBF0EsiqCgC8pVubVQ2kKBFig0vSVNc7/PzOf3x/dMGtKknSRzks6cz/PxmMdczpkz35mm532+l/M9oqoYY4wJrtBEF8AYY8zEsiAwxpiAsyAwxpiAsyAwxpiAsyAwxpiAsyAwxpiAsyAwJk0icouIfCPNdbeKyHvHuh1jxoMFgTHGBJwFgTHGBJwFgckpXpPMF0VkvYh0iMhNIjJVRO4XkTYReVBEJg1Yf5WIvCAizSLysIgsGLBsqYg8673vV0Bs0GedISLrvPf+TUSOGmWZPyUiW0Rkr4jcIyI13usiIj8QkXoRaRWRDSKy2Ft2moi86JVtu4hcOaofzBgsCExu+hDwPuAI4EzgfuAaoAr3N385gIgcAdwOXOEtuw+4V0TyRCQPuBv4BTAZ+LW3Xbz3LgVuBj4NVAA/Be4RkfyRFFRE3gN8GzgfqAbeAO7wFp8CvNP7HmXeOo3espuAT6tqCbAY+MtIPteYgSwITC76karuVtXtwGPAU6r6nKp2A78FlnrrXQD8QVUfUNU+4N+BAuDtwAogCvxQVftUdTXwzIDPuBT4qao+paoJVb0V6PHeNxIXATer6rOq2gNcDZwgIrVAH1ACzAdEVTep6k7vfX3AQhEpVdUmVX12hJ9rTD8LApOLdg943DXE82LvcQ3uCBwAVU0C24Dp3rLt+tZZGd8Y8Pgw4Ates1CziDQDM733jcTgMrTjjvqnq+pfgOuA64F6EblRREq9VT8EnAa8ISKPiMgJI/xcY/pZEJgg24HboQOuTR63M98O7ASme6+lzBrweBvwTVUtH3ArVNXbx1iGIlxT03YAVb1WVZcBC3FNRF/0Xn9GVc8CpuCasO4c4eca08+CwATZncDpInKyiESBL+Cad/4GPAHEgctFJCoiHwSOG/DenwGfEZHjvU7dIhE5XURKRliG24GPi8gSr3/hW7imrK0icqy3/SjQAXQDSa8P4yIRKfOatFqB5Bh+BxNwFgQmsFT1ZeBi4EfAHlzH8pmq2quqvcAHgUuAvbj+hN8MeO8a4FO4ppsmYIu37kjL8CDwb8BduFrI24ALvcWluMBpwjUfNQLf85Z9BNgqIq3AZ3B9DcaMitiFaYwxJtisRmCMMQFnQWCMMQFnQWCMMQFnQWCMMQEXmegCjFRlZaXW1tZOdDGMMSarrF27do+qVg21LOuCoLa2ljVr1kx0MYwxJquIyBvDLbOmIWOMCTgLAmOMCTgLAmOMCbis6yMwxpiR6uvro66uju7u7okuiu9isRgzZswgGo2m/R4LAmNMzqurq6OkpITa2lreOqFsblFVGhsbqaurY/bs2Wm/z5qGjDE5r7u7m4qKipwOAQARoaKiYsQ1HwsCY0wg5HoIpIzme1oQqMLaW6Bt90FXNcaYXGRBsPc1uPdz8N8fgNYdE10aY0wOam5u5oYbbhjx+0477TSam5szX6BBLAjavGuB730VbjkdWrZPbHmMMTlnuCCIx+MHfN99991HeXm5T6Xax7cgEJGYiDwtIs+LyAsi8rUh1skXkV+JyBYReUpEav0qz7Dadrn7VT+Cjj1eGNSNezGMMbnrqquu4tVXX2XJkiUce+yxnHTSSaxatYqFCxcCcPbZZ7Ns2TIWLVrEjTfe2P++2tpa9uzZw9atW1mwYAGf+tSnWLRoEaeccgpdXV0ZK5+fw0d7gPeoart3zdXHReR+VX1ywDqfBJpUda6IXAh8F3dJwPHTXu/u558BUxbCL86B/z4NLvk9lM868HuNMVnna/e+wIs7WjO6zYU1pXzlzEXDLv/Od77Dxo0bWbduHQ8//DCnn346Gzdu7B/iefPNNzN58mS6uro49thj+dCHPkRFRcVbtrF582Zuv/12fvazn3H++edz1113cfHFF2ek/L7VCNRp955Gvdvg62KeBdzqPV4NnCzj3bXfvgvCeVAwCWYsh4/eDV3NrmbQNOwcTcYYM2rHHXfcW8b5X3vttRx99NGsWLGCbdu2sXnz5v3eM3v2bJYsWQLAsmXL2Lp1a8bK4+sJZSISBtYCc4HrVfWpQatMB7YBqGpcRFqACtyFxAdu51LgUoBZszJ8lN5eD8VTIZU/05e5MPjF2XDLGa5mMOmwzH6mMWbCHOjIfbwUFRX1P3744Yd58MEHeeKJJygsLGTlypVDngeQn5/f/zgcDme0acjXzmJVTajqEmAGcJyILB7ldm5U1eWquryqasjptEevbRcUT3nra9OPgY/eAx0N8LdrM/t5xpjAKSkpoa2tbchlLS0tTJo0icLCQl566SWefPLJIdfz07hMMaGqzSLyEHAqsHHAou3ATKBORCJAGdA4HmXq174bJg1xKnbNEpg8G1p3jmtxjDG5p6KighNPPJHFixdTUFDA1KlT+5edeuqp/OQnP2HBggXMmzePFStWjHv5fAsCEakC+rwQKADeh+sMHuge4GPAE8C5wF9UdXA/gr/ad8PM44deVlgBnXuGXmaMMSPwy1/+csjX8/Pzuf/++4dcluoHqKysZOPGfcfQV155ZUbL5meNoBq41esnCAF3qurvReTrwBpVvQe4CfiFiGwB9gIX+lie/cV7obMRSqYNvbyoCnY+P65FMsaY8eZbEKjqemDpEK9/ecDjbuA8v8pwUB0N7n5wH0FKUaXVCIwxOS/YZxa3eyeTFQ9TIyishO4WSPSNX5mMMWacBTwIvJPJSqYOvbzIO6Gjc3z7r40xZjwFOwhS00sUDxMEhZXuvsOah4wxuSvYQZCqERQdoI8A9vUlGGNMDgp4EOyCgskQyRt6eapGYE1DxphxVFxcDMCOHTs499xzh1xn5cqVrFmzJiOfF/AgqB9+6Ci44aNgTUPGmAlRU1PD6tWrff+cYAfBUNNLDFQwCSRkQ0iNMWNy1VVXcf311/c//+pXv8o3vvENTj75ZI455hiOPPJIfve73+33vq1bt7J4sZuZp6uriwsvvJAFCxZwzjnnZM001Ie+9nqomDv88lDINR1ZjcCY3HH/VbBrQ2a3Oe1I+MB3hl18wQUXcMUVV3DZZZcBcOedd/KnP/2Jyy+/nNLSUvbs2cOKFStYtWrVsNcc/vGPf0xhYSGbNm1i/fr1HHPMMRkrfnCDQNX1ERyoRgB2UpkxZsyWLl1KfX09O3bsoKGhgUmTJjFt2jQ+//nP8+ijjxIKhdi+fTu7d+9m2rShm6sfffRRLr/8cgCOOuoojjrqqIyVL7hB0N0Mid4D9xGA6zDusM5iY3LGAY7c/XTeeeexevVqdu3axQUXXMBtt91GQ0MDa9euJRqNUltbO+T00+MhuH0Ebbvd/XDnEKQUVdjwUWPMmF1wwQXccccdrF69mvPOO4+WlhamTJlCNBrloYce4o03DnwhrHe+8539E9dt3LiR9evXZ6xswa0RtKcZBIXWNGSMGbtFixbR1tbG9OnTqa6u5qKLLuLMM8/kyCOPZPny5cyfP/+A7//sZz/Lxz/+cRYsWMCCBQtYtmxZxspmQXDQGkEVdDVBIg7h4P5cxpix27BhXyd1ZWUlTzzxxJDrtbe7q/zW1tb2Tz9dUFDAHXfc4Uu5gts0lAqC4eYZSkmdXdy119/yGGPMBAluELTtgkgB5JceeL1Cb+I5G0JqjMlRwQ2C9t1u6OgwY3b7pWoE1k9gTFYb74sfTpTRfM9gB8HBho7CgBlIbeSQMdkqFovR2NiY82GgqjQ2NhKLxUb0vuD2frbthqojDr5e/wykdi6BMdlqxowZ1NXV0dCQ+wd0sViMGTNmjOg9wQ2C9t0w+50HX69gsru3piFjslY0GmX27NkTXYxDVjCbhvq63ZnFBxs6Cm7IaMEk6yw2xuSsYAZBx0EuUTlYUZXVCIwxOSuYQZDu9BIpNt+QMSaHBTMI0j2rOKWowmoExpicFdAgOMhF6wcrrLTho8aYnOVbEIjITBF5SEReFJEXRORzQ6yzUkRaRGSdd/uyX+V5i/Z6QPZdivJgiiqhcy8kE74WyxhjJoKfw0fjwBdU9VkRKQHWisgDqvrioPUeU9UzfCzH/tp2uZ17upPIFVYC6iafS51XYIwxOcK3GoGq7lTVZ73HbcAmYLpfnzci7fVQnMZZxSn9J5VZP4ExJveMSx+BiNQCS4Gnhlh8gog8LyL3i8iiYd5/qYisEZE1GTkzsH1X+kNHweYbMsbkNN+DQESKgbuAK1S1ddDiZ4HDVPVo4EfA3UNtQ1VvVNXlqrq8qirNdv0Daa9Pv6MYBsw3ZEFgjMk9vgaBiERxIXCbqv5m8HJVbVXVdu/xfUBURPxthE8mvZlHrUZgjDHg76ghAW4CNqnq94dZZ5q3HiJynFcef8/c6mqCZHyENQK7JoExJnf5OWroROAjwAYRWee9dg0wC0BVfwKcC3xWROJAF3Ch+j1PbOocgpH0EYSjECuzIDDG5CTfgkBVHwcOeNUXVb0OuM6vMgypbYQnk6XYReyNMTkqeGcWt3sTzo00CIoqrUZgjMlJAQyCUdYIiqqg0yaeM8bkngAGQT3kFUN+8cjeV1hhNQJjTE4KXhC07XIXrR+pokpXI0gmM18mY4yZQMELgpFOL5FSWAmacFc2M8aYHBLAIBhDjQCsecgYk3MCGAT1UDKaGoF3UpkNITXG5JhgBUFvJ/S0jnzEEFiNwBiTs4IVBCO9ROVAqYvYWI3AGJNjghkEI5leIqV/viE7l8AYk1uCGQSjqRFE8iG/1K5dbIzJOcEKgrZUEIyisxhcrcCahowxOSZYQdC+GyS8r5lnpGy+IWNMDgpYEHjnEIRG+bULK22+IWNMzglYENSP7mSylCKbb8gYk3uCFQRtu0bfPwD7ZiD1+do5xhgznoIVBGOtERRWQrIPulsyVyZjjJlgwQmCZAI6Rjm9RIqdXWyMyUHBCYKOPaDJ0Z1DkFLoBYENITXG5JDgBMFYTiZLKUqdXWxBYIzJHRYEI2E1AmNMDgpOEIjA1MVQWj36bVgfgTEmB0UmugDjZu573W0sogXuesd2UpkxJocEp0aQKYUVNvGcMSan+BYEIjJTRB4SkRdF5AUR+dwQ64iIXCsiW0RkvYgc41d5MsbmGzLG5Bg/awRx4AuquhBYAVwmIgsHrfMB4HDvdinwYx/LkxmFldZZbIzJKb4FgaruVNVnvcdtwCZg+qDVzgJ+rs6TQLmIjKE3dxwUVdrFaYwxOWVc+ghEpBZYCjw1aNF0YNuA53XsHxaIyKUiskZE1jQ0THD7fOqaBDbfkDEmR/geBCJSDNwFXKGqraPZhqreqKrLVXV5VVVVZgs4UkWVkOiFnraJLYcxxmSIr0EgIlFcCNymqr8ZYpXtwMwBz2d4rx267CL2xpgc4+eoIQFuAjap6veHWe0e4KPe6KEVQIuq7vSrTBlRaCeVGWNyi58nlJ0IfATYICLrvNeuAWYBqOpPgPuA04AtQCfwcR/Lkxk235AxJsf4FgSq+jggB1lHgcv8KoMvbL4hY0yOsTOLR8rmGzLG5BgLgpHKK4JIgc03ZIzJGRYEo2HTTBhjcogFwWgU2TQTxpjcYUEwGoWV0F4/0aUwxpiMsCAYjZJp0LZrokthjDEZYUEwGqU17poEib6JLokxxoyZBcFolNYAarUCY0xOsCAYjZIad9+6Y2LLYYwxGWBBMBql3iUT2iwIjDHZz4JgNEq9Sya0Htrz4xljTDosCEajYBKE86H10J4x2xhj0mFBMBoirnmozWoExpjsZ0EwWqXTrWnIGJMTLAhGq6TamoaMMTnBgmC0SqvdeQR2EXtjTJZLKwhE5HMiUupdUvImEXlWRE7xu3CHtNLpkOiBzr0TXRJjjBmTdGsEn1DVVuAUYBLuEpTf8a1U2aDEziUwxuSGdIMgdcnJ04BfqOoLHOQylDmv1M4uNsbkhnSDYK2I/C8uCP4kIiVA0r9iZQELAmNMjkj34vWfBJYAr6lqp4hMBj7uW6myQfFUQOxcAmNM1ku3RnAC8LKqNovIxcC/Ai3+FSsLhKNQPMWGkBpjsl66QfBjoFNEjga+ALwK/Ny3UmWLkmo7qcwYk/XSDYK4qipwFnCdql4PlPhXrCxROt2ahowxWS/dIGgTkatxw0b/ICIhIHqgN4jIzSJSLyIbh1m+UkRaRGSdd/vyyIp+CCi1s4uNMdkv3SC4AOjBnU+wC5gBfO8g77kFOPUg6zymqku829fTLMuho6Qaulugt3OiS2KMMaOWVhB4O//bgDIROQPoVtUD9hGo6qNAbp92m7ougTUPGWOyWLpTTJwPPA2cB5wPPCUi52bg808QkedF5H4RWZSB7Y2v1JXKrHnIGJPF0j2P4F+AY1W1HkBEqoAHgdVj+OxngcNUtV1ETgPuBg4fakURuRS4FGDWrFlj+MgM6792sdUIjDHZK90+glAqBDyNI3jvkFS1VVXbvcf3AVERqRxm3RtVdbmqLq+qqhrLx2aWXbvYGJMD0t2Z/1FE/iQil4jIJcAfgPvG8sEiMk1ExHt8nFeWxrFs80Ae29zA6dc+xo7mrsxtNL8E8kutRmCMyWppNQ2p6hdF5EPAid5LN6rqbw/0HhG5HVgJVIpIHfAVvCGnqvoT4FzgsyISB7qAC71zFXyRVHhhRyvbm7uoKS/I3IbtAjXGmCyXbh8BqnoXcNcI1v/wQZZfB1yX7vbGqqYsBpDZGgG4yeds1JAxJosdMAhEpA0Y6ihdAFXVUl9K5YNqrxaws6U7sxsurYFXX87sNo0xZhwdMAhUNWemkSjOj1ASi7Az0zWCkmpo3wWJOITTrmAZY8whI1DXLK4pK2CHHzUCTUJH/cHXNcaYQ1CggqC6PMbOFh/6CMBGDhljslawgqCsgJ3NGa4RlNjZxcaY7BaoIKgpi9HY0Ut3XyJzG03VCGzkkDEmSwUqCFIjh3Zlsp+gsBJCUbt2sTEmawUqCGrKfTiXIBTyTiqzIDDGZKdgBUGZqxFkfuRQtTUNGWOyVqCCYJp3dnHGzyUorbEagTEmawUqCGLRMBVFeZmvEZR400z4N1WSMcb4JlBBAH6dS1ANfZ3Q3ZzZ7RpjzDgIXhD4cS6BnVRmjMligQuCmrIYOzJdI0hdqcwuUGOMyUKBC4Lq8gLauuO098Qzt9H+axdbEBhjsk/wgsCPkUP900xY05AxJvsELghSVyfL6MihSL47w9iahowxWShwQVDt25XK7OxiY0x2ClwQTC2NIeLHSWXTrWnIGJOVAhcE0XCIKSX5PpxUVm1NQ8aYrBS4IADXT+DLBWo6G6EvwwFjjDE+C2YQ+HlSmU0+Z4zJMoEMgmrvpDLN5NxAqSGkFgTGmCwTzCAoL6C7L0lzZ1/mNto/zYT1Exhjsksgg6AmNYQ0k/0EJXZ2sTEmO/kWBCJys4jUi8jGYZaLiFwrIltEZL2IHONXWQZLXbIyo/0EsTKIFlnTkDEm6/hZI7gFOPUAyz8AHO7dLgV+7GNZ3iJVI8joyCERO6nMGJOVfAsCVX0U2HuAVc4Cfq7Ok0C5iFT7VZ6BKovziYbFn3MJLAiMMVlmIvsIpgPbBjyv817bj4hcKiJrRGRNQ0PDmD84FBKmlsZ8mGZiujUNGWOyTlZ0Fqvqjaq6XFWXV1VVZWSb/pxL4F3EPpnI7HaNMcZHExkE24GZA57P8F4bF9XlPlygZvIcSMahaWtmt2uMMT6ayCC4B/ioN3poBdCiquPWrlJdVsDu1m6SyQyeVFa1wN03vJy5bRpjjM/8HD56O/AEME9E6kTkkyLyGRH5jLfKfcBrwBbgZ8A/+FWWoUwvj9GXUPa092Ruo1Xz3H3Dpsxt0xhjfBbxa8Oq+uGDLFfgMr8+/2Cqy/ZdoGZKaSwzG42Vug7j+pcysz1jjBkHWdFZ7Ifqch8uWQlQNR8aLAiMMdkjsEFQU+bDJSsBpiyAPa/YyCFjTNYIbBCUF0aJRUM+1AjmQbzbRg4ZY7JGYINARNy5BJmuEdjIIWNMlglsEIDrJ9juR40AbOSQMSZrBDsIyny4ZGVq5JDVCIwxWSLQQVBTFqO+rYe+RDKzG66aD/VWIzDGZIdAB0F1eQGqsLs10/0E823kkDEmawQ7CPqvS5DpIaTz3cih5jcyu11jjPFBoIOgxrtSWcano06NHLIzjI0xWSDQQeBbjaDqCHdvI4eMMVkg0EFQEotSEotk/qSyWJmNHDLGZI1ABwG4qSYyPs0E2MghY0zWCHwQVJfHMn8uAdjIIZP9VKFjz0SXwowDCwI/LlkJNnLIZL91v4R/PwLefGqiS2J8FvggqCmL0djRS3dfho/cq+a7exs5ZLJRMgGP/TtoAu7/Z6vZ5rjAB0G1N4Q08yOHUnMOWRCYLLTpHtj7Giz6IOxcB8/9z0SXyPgo8EFQU+bTBWr6Rw5ZEJgsowqP/wAmvw0++DOYdQL8+evQ1TzRJTM+CXwQzJxcCMDfXm3M/Mar5tnIIZN9XnsYdj4PJ34OwhH4wHehsxEe+e5El8z4xIJgciGrjq7hhoe38ESmw6DKrlZmstDjP4DiaXD0he559dGw7BJ46qfW55WjAh8EAN/64JHUVhRx+R3P0dDWk7kN28ghk222r4XXH4ETLoNI/r7X3/NvkF8Mf/ySazoyOcWCACjOj3DDxcfQ2tXHFb96jkQyQ3/oqZFDdoaxyRaP/9D1by275K2vF1XAu//FNRu99IcJKJjxkwWBZ/60Uv7vWYv565ZGrv3z5sxsNDVyyPoJTDbYsxk23QvHfspdYGmw5Z90zZ1/uhr6fDgJ00yYyEQX4FBy3vIZPPl6I9f+ZTPH1k7mHYdXjm2DsTIoqbGRQ2b0uppg619h3mkQ8vm47a//6ZqDjv/M0MtTHcc/XwV/uw7e9UV/yzMS8R7YsBoqD4eZx010afbp63bDcFu2ebc6aPbuE71w3KfgyPPdb3swqpCMQzia8WL6GgQicirwn0AY+C9V/c6g5ZcA3wO2ey9dp6r/5WeZDkRE+MbZi9lQ18IVv3qOP1x+ElNLY2Pb6JT5FgRmdNrr4ednQf2LMP8MOOenrp3eD6074Pk7YNnHoLhq+PXmvAsWrILHvw8LV+2r9Y6GKrz0e6h7Bt7xeSiYNPJtxHvguV/AY9+H1u0gYTj5y27Ek8jBPz/eDdGC9D7rpfvgwa9CKAJTFrjb1EXuvmwWJPtg9wuw4zl37sWO51xrQDK+bxuhiBtWXj4Letrg7s/CY/8B77oKFn8QQuH9P7fxVVh/J2y4E5Z9HE68PN1fJ22iPnX8iEgYeAV4H1AHPAN8WFVfHLDOJcByVf3HdLe7fPlyXbNmTYZL+1abd7ex6rq/cuSMMn7598cTCY/hSOyP18Cam+GaHf4f0Znc0brTHXk3b4NjPgrP/AymLIQP3+52Ipn2v/8KT9wAlz8Lk2oPvG7TG3Dju9zR7vu+5pqSRvq33VIHf7gSXrnfPS+phlU/gsPfl977BwfAzONdmDx/O7z4O5h3Opx9AxSUD/3+1x6BB74Muza40VHv/CJMnj30um274L4vupPsqha4379+E7S8uW+daJE7wk/2uecFk6BmqbtNWQjlh0HZDCiesm9nnwrCh74N9S+4PsWVV7ug7WyEF34D63/lOvARqH0HrPgHmH9aer/RICKyVlWXD7nMxyA4Afiqqr7fe341gKp+e8A6l3AIBgHAb56t45/ufJ7Tj6zma2ctorI4/+BvGsraW+Hey+Hy52DynMwW0uSm5m1w65nQ0QB/dyfUnghbHoRff8I1C1zwP3DYCZn7vK4m+MFimPcB+FCaFfLWHXDP5bDlAag9ye100wmoZMINQ/3LNwB1O77D3g6/+0d3/Y6lH4H3f2voPgpwtaQXfuuasVIBsPJqmLPS1QBU4amfuGArmwHn/9wNf03Z/QI88BVX7rKZroazYTUk+mDJh10gpIIwmYRnb4EHvupqDiu/BG+/fF/TTHerq+3Xv+iCIZIPNcdAzRK34z9YjaT/N0nCi3fDw992w83LZrnvpgmYdqRrOlr8ISibnt72hjFRQXAucKqq/r33/CPA8QN3+l4QfBtowNUePq+q24bY1qXApQCzZs1a9sYb4zMc8/qHtvDDB1+hMC/Cl06dz4XHziQUSvMfN2Xb03DT++DDd7j/aMYcSNNWFwJdLXDxXTDz2H3L9myG2y90R+RnfN/VFEaju9XtEHdtgN0b3N9ow0vwmb/CtMXpb0fVHZX/8Rr3/P3fdGUabge483m493OuyWTu++D0/4BJh7ll8R63I/zrf7qmk7Ouczt3cG3sL/0BNv0etj0F6P4BMNi2p+HXl7jZU0/7Hsx9Lzz0LVh3mwuZk66E4y6FaMwd8T/+A1jz327nu+Tv3I73oW/Dtidh9jvhjB9CxdvS/21GI5mAjXe5MtYcA0ed75qdMuRQDoIKoF1Ve0Tk08AFqvqeA213vGoEKVvq2/iX327kqdf3csyscr5x9pEsrBnmaGUo3S3wnVlw8lfgpH/yr6Am+zW+6kKgtwM+erdrVhisqwlWfwJe/Qsc+/cw41j3N9bVDN3N7nF3i2um0OS+WzLhdtytdS5sUgomu6POhavc9kaj+U24+x9g62NuB79w1f5l6mx0zTGFk12H86IPDrMDf8a1mzduhgVnut+k3mtNnnak6yuZfzpMXXzwI+6OPXDX38NrD7m2eQnB8Z+Gd/yTK8dgrTtcIKy9xf1+BZPglG+6YEj36P4Qdsg2DQ1aPwzsVdWyA213vIMAQFX5zbPb+eZ9m2jp6uMTJ9ZyxXuPoCg/zb72/1jgjio++NODr5tMwvY17giotMYdYaXbmeWnRB+8+YQbNjj3vUN3avlFFdp2uqPG4dpxxyKZdDuszka3o03tTAfuWHvaIdED8V63k0jdVF2HafXRUL3EjVo52G+TTHg7yiZvZ9nkdloPfMW1MX/0d26nN5xEHB74N3jyhre+nlcMsXJ3xBvJdzu+wbeiKrftaUe5o/+S6szs5JJJeOa/XLt73BtaKiE3ci5W7trqZxwL777m4J3CfV2u6eiZm2D6Mlhwhhs1lao9jKhcCfjrD10t6p1Xptd81bLdNR3NO/3AHedZZqKCIIJr7jkZNyroGeDvVPWFAetUq+pO7/E5wJdUdcWBtjsRQZDS3NnLd//4Erc/vY3JRXlcfPwsLj7hMKaUHGRk0S/OcTuZTz869PJkEuqehhfudh1SqdEPmoCiKW4ExPJPQF5hxr/TAXXsgc0PwCt/dEegPa3u9cp58K5/hkXnDL/TU3VNAZsfgKJK1xE2Zf6BdwKJPmjf7ZoC6jftuzVscjtOcFXmpRfB4nOH7wgcSm+HawrZ8RzsWOd+44490LkHOve633o44Ty3k43EIJLnnofz3eNkwp0wmNr5RQu9He2RgHg7+73uvnOv2/H3tAz9OcVTXQik2xzQ+Kq7T+38fRhWOGJdTS40Y2WQXzK2kFHNiSPxQ8WEBIH3wacBP8QNH71ZVb8pIl8H1qjqPSLybWAVEAf2Ap9V1QOOtZzIIEh59s0mbnhoC39+qZ5oKMSZR9fwiXfUsqhmmMrMH6927Y/X7HDPW7e7qu+eLa7a+8of3RFvOB/mngwLz4Z5p7od1yPfhdcfdUdyb/8/rvqeV+TPF1N1bcev3A+v/Anq1gDq5p054hQ44lTXafbI99zOufIIeNeX3hoIe193nW8b7nQdX4MVT3WhUDXP7UTbdroqedtO1xHIgL/HWLkbcTFlvruPd8O6290Ii0jMNRMsvQhmr3Q7jJ42t42OehcorTth90a38294yTWRpMoweQ4UVriQKqzc97hgsncUO+AWPUjQJ+Luu+583g0b3Pk87NrofpOCSa4ZomCS23bBJO9W7u5TR8uxcne0Ot5hbwJjwoLAD4dCEKS8vqeDW/76Or9eW0dnb4IVcyZzydtn8+75VeRHBhwpp0YOVc137al9nfuW5ZW4kQuLzoHDTxl6tMQbT8Aj33Gn9xdWuOaiGce50Qljrdr3dcPWx/ft/Fu8vvqapXDEB+CI97tmj4GfkUy6mssj33VBVnmEO0Lf8qCr2QAcdqLr7FqwCnrb3WRlDZvc0XP9JrfjDOe55q+Sae57lNa4+0mHuR1/8dT9v5uqNz/+bbDh1675Jlbumo1SR+UDFVbC9GP2DeWrXgKl1aP/vYzJUhYEPmvp7OOOZ97k1r9tZUdLN6WxCKcunsaZR9dwwpwKIq3b4NcfczulysOhYq53f7jbCaa7I3/zKXj0/7lmmoFHt6kd3OTZbshh6w5X62jd4W7t9V7zRqFrusgrdo/Dea6ZpK/DvT7n3a4mcvj7oWTqwcszOBCmLHQ7/8XnQvnM0f6c6evrhpfvc52BsTLXjFY81bXrFk91t8IKa14wBguCcRNPJHlsyx7ufX4H//vCbtp74lQU5XHakdWccVQ1y2snEx7p8NOh9Ha4pocdz+07i7HhZfqbVaJF7ui6tMYNxSuucs0wve3Q2+lqJL0drlNu6kJ35D/7pNF3SieTrjmmZNrYv5sxxhcWBBOguy/Bwy83cO/6Hfx50266+5KUFUQ56fBKVs6bwruOqKKqZJQnqQ2lp90d/ZdMhfxSOwo2xryFBcEE6+iJ8/DLDTz8cj0Pv9LQf82DxdNLWXnEFE6cW8nSWeXEouM4JNMYEygWBIeQZFJ5cWcrj7zigmHtG00kFfIjIY6ZNYkT3lbBCW+r4OgZ5eRFbG4iY0xmWBAcwlq6+njm9b088VojT7zayKZdrahCQTTM8tpJrJhTwfGzJ3OUBYMxZgwsCLJIc2cvT762lydfa+TJ1xp5aVcbALFoiGWHTWLF7AqOnT2ZxdPLKE73zGZjTOBZEGSxvR29PP36/sEgAnOrijlyRhlHzyjnqBllLKgutX4GY8yQLAhySFNHL+u2NfN8XTPr61pYX9fMnvZeAMIhYU5lEfOrS5k/rYR5U0uYX13C9PICxEYRGRNoBwoCa1vIMpOK8nj3/Cm8e/4UwE2It7Olm/V1zWzY3sLLu9p47s0m7n1+R/97SvIjzJ1azOFTijliaglzvfvqspgFhDHGagS5qq27j1d2t7FpZxsv72pjc30bW+rb+2sPAMX5EWZOLqSmLEZNeYF3c4+nlxcwtTSWmRPgjDETzmoEAVQSi7LssMksO+yt867v7ehl8+42Nte3s6W+nbqmTrY3d7PmjSZauvresm40LNSUFzBzUiEzJxcwY1IhNeUxKovzqSjKp7I4j0lFeUTHcilPY8yEsyAImMlFeRw/p4Lj51Tst6yjJ87Oli62N3ezvamLbU2dbNvbSV1TFw+8uPsttYmBygujVBTlMaUkxtTSfKaUxphSks9U735KaYyqknwb5WTMIcr+Z5p+RfkR5k4pYe6UkiGXd/bG2dXSTWNHL43tPexp76WxvZc97T3sae+hvq2HNW80Ud/WQ288ud/7C/PCVJXkU1Wcz5TSfEpjUQrzIhTnhynMj1CUF6YwL0J5YbQ/RCqK8615yhifWRCYtBXmRZhTVcycg1y0SVVp7Yqzu62b3a3dNLT10NDmgqK+rYeGtm5e2tVGW3eczp44Hb3DXxQmJFDpBUdlsatVFOdHKPLui/MjFMcilMailBZEKCuIUhqLUlYQpSQWIWLNVsYclAWByTgRoawwSllhlCOmDl27GCiZVLrjCTp6EnT0xGnq7O0PjfrWbupbe9jd1k1jey9vNnbS3hOnvSdO5wECJKW8MEpVcT5VJS5Iqkry+5up8iIh8r2bexymMC/MpELX91Eai9ioKhMIFgRmwoVCQmFehMK8CFUl+dSS3hXYEkmlozdOe3ec1u4+WrvitHb10dLVR2u3u29s76WhzTVdPV/XTENbT1oBAu68jEmFUcoLXSiEQ0JI3C0cEkIhISwQi4YH3ELEomEKomFKYq6GMvhWmB8hGhbywiELGnNIsCAwWSscEtckFItSQ/rXUujoidPRG6c3nqQnnqTXu/XEk7T39NHU0UdTZ69366Opo5e27jiJpJJQJZ5M0hNXkurCqCeeoKsvQXdfku7eBN3xBH2J9IZlpwIhGglREA1TGnNNWqUFUUq9+6L8CFEveCIhIRwKEQ5BOBQiMvB1L6AiYSE/EqY4P0JhfpiivAhF3n0sGiYadutZCJkUCwITOEVeH4Of4okkbd1xWrwaysBbV2+C3oQXQIkkfd59Z2+Ctu4+2rrj1Ld1s6Xe1XQ6euJpB0u6RCAaDrkQCguRsBcq4oIkPCBY+mtA4mpvIXHL8qNe01o0TCwSJj8a6r932xbyIt5jL+jKCqJeyEW9xxEKomELpQlmQWCMDyLhEJOKXF9DpiS9GkkiqcSTSiKxr4aSTNJ/35dM0t2XoLPX9bl09CTo6I3T0ROnJ+6Cpy+RpDeh9CWS/bf+7Q66JXVf7cc9VuIJpb0nTmN7ku54gp4+95ndfa421JvYf9TYgYTE9S2FBARBxNX4Us1shXnuVuCNLBPc9fhU1bsf8FzpL6eqWxYOCUX5EUpirnZUnB+lON815/UmkvT0JfeFczxJPKmUFkQoL8jrbx5M3btRbO43SX2WKkTCsl8g5keyo/nPgsCYLBEKCSGEbJhXUFX7A2FgjafV679p7Yr39+V09sTdTnXAzlVxQdfVl6Cr1zW9dfa6x/Vt3ai6Wk0qNARABMGFSqovRwRCIehLJKlr6uzvU+roSewXVqlmurxIiHBIaO2ODzkMeqQiIem/YKArIeCVOTqgRpYXDhEJu2a+RNL9fqmQjnu/5adOmsOV75835jLtV8aMb9EYE3giQl7ENQ2RwSuyZlJPPEFPPOl2/uEQoUHnq6i6IGrq7KO5s5fmzj6aO/uIJ5P7QsYLHxGIJ9XVjFI1JO++zwsc7d/uvu2ndvbxZHLf44QSDonXpCZEQqH+wFhWO8mX38KCwBgTSPmRMPmR4atXIvtGs00vT38wQjays22MMSbgfA0CETlVRF4WkS0ictUQy/NF5Ffe8qdEpNbP8hhjjNmfb0EgImHgeuADwELgwyKycNBqnwSaVHUu8APgu36VxxhjzND8rBEcB2xR1ddUtRe4Azhr0DpnAbd6j1cDJ0s2jLUyxpgc4mcQTAe2DXhe57025DqqGgdagP3mRxaRS0VkjYisaWho8Km4xhgTTFnRWayqN6rqclVdXlV1kKkvjTHGjIifQbAdmDng+QzvtSHXEZEIUAY0+lgmY4wxg/gZBM8Ah4vIbBHJAy4E7hm0zj3Ax7zH5wJ/0Wy7iLIxxmQ5Xy9eLyKnAT8EwsDNqvpNEfk6sEZV7xGRGPALYCmwF7hQVV87yDYbgDdGWaRKYM8o35vtgvrd7XsHi33v4R2mqkO2rfsaBIcaEVmjqssnuhwTIajf3b53sNj3Hp2s6Cw2xhjjHwsCY4wJuKAFwY0TXYAJFNTvbt87WOx7j0Kg+giMMcbsL2g1AmOMMYNYEBhjTMAFJggONiV2rhCRm0WkXkQ2Dnhtsog8ICKbvXt/LnM0gURkpog8JCIvisgLIvI57/Wc/u4iEhORp0Xkee97f817fbY3tfsWb6r3zF08+RAiImEReU5Efu89z/nvLSJbRWSDiKwTkTXea2P6Ow9EEKQ5JXauuAU4ddBrVwF/VtXDgT97z3NNHPiCqi4EVgCXef/Guf7de4D3qOrRwBLgVBFZgZvS/QfeFO9NuCnfc9HngE0Dngfle79bVZcMOHdgTH/ngQgC0psSOyeo6qO4s7QHGjjd963A2eNZpvGgqjtV9VnvcRtu5zCdHP/u6rR7T6PeTYH34KZ2hxz83gAiMgM4Hfgv77kQgO89jDH9nQclCNKZEjuXTVXVnd7jXcDUiSyM37wr3S0FniIA391rHlkH1AMPAK8Czd7U7pC7f+8/BP4ZSHrPKwjG91bgf0VkrYhc6r02pr9zu3h9wKiqikjOjhkWkWLgLuAKVW0deJ2jXP3uqpoAlohIOfBbYP7Elsh/InIGUK+qa0Vk5QQXZ7y9Q1W3i8gU4AEReWngwtH8nQelRpDOlNi5bLeIVAN49/UTXB5fiEgUFwK3qepvvJcD8d0BVLUZeAg4ASj3pnaH3Px7PxFYJSJbcU297wH+k9z/3qjqdu++Hhf8xzHGv/OgBEE6U2LnsoHTfX8M+N0ElsUXXvvwTcAmVf3+gEU5/d1FpMqrCSAiBcD7cP0jD+Gmdocc/N6qerWqzlDVWtz/57+o6kXk+PcWkSIRKUk9Bk4BNjLGv/PAnFk81JTYE1sif4jI7cBK3LS0u4GvAHcDdwKzcFN4n6+qgzuUs5qIvAN4DNjAvjbja3D9BDn73UXkKFznYBh3YHenqn5dRObgjpQnA88BF6tqz8SV1D9e09CVqnpGrn9v7/v91nsaAX7pTe9fwRj+zgMTBMYYY4YWlKYhY4wxw7AgMMaYgLMgMMaYgLMgMMaYgLMgMMaYgLMgMGYcicjK1EyZxhwqLAiMMSbgLAiMGYKIXOzN879ORH7qTezWLiI/8Ob9/7OIVHnrLhGRJ0VkvYj8NjUXvIjMFZEHvWsFPCsib/M2Xywiq0XkJRG5TQZOiGTMBLAgMGYQEVkAXACcqKpLgARwEVAErFHVRcAjuLO2AX4OfElVj8Kd2Zx6/Tbgeu9aAW8HUrNDLgWuwF0bYw5u3hxjJozNPmrM/k4GlgHPeAfrBbhJvJLAr7x1/gf4jYiUAeWq+oj3+q3Ar735YKar6m8BVLUbwNve06pa5z1fB9QCj/v+rYwZhgWBMfsT4FZVvfotL4r826D1Rjs/y8C5bxLY/0MzwaxpyJj9/Rk415vvPXU92MNw/19SM1v+HfC4qrYATSJykvf6R4BHvKuk1YnI2d428kWkcDy/hDHpsiMRYwZR1RdF5F9xV4EKAX3AZUAHcJy3rB7XjwBu2t+feDv614CPe69/BPipiHzd28Z54/g1jEmbzT5qTJpEpF1Viye6HMZkmjUNGWNMwFmNwBhjAs5qBMYYE3AWBMYYE3AWBMYYE3AWBMYYE3AWBMYYE3D/H3xXa754RoaOAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(history.history['accuracy'])\n",
    "plt.plot(history.history['val_accuracy'])\n",
    "plt.title('model accuracy')\n",
    "plt.ylabel('accuracy')\n",
    "plt.xlabel('epoch')\n",
    "plt.legend(['train', 'valid'], loc='lower right')\n",
    "plt.show()\n",
    "plt.plot(history.history['loss'])\n",
    "plt.plot(history.history['val_loss'])\n",
    "plt.title('model loss')\n",
    "plt.ylabel('loss')\n",
    "plt.xlabel('epoch')\n",
    "plt.legend(['train', 'valid'], loc='upper right')\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "You can see that we start overfitting midway, around epoch 25. Let’s reload our best\n",
    "performing model according to the validation loss, and demonstrate how to use it to\n",
    "predict a segmentation mask"
   ]
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOcAAADnCAYAAADl9EEgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAX/ElEQVR4nO3deXQUVb4H8O+t6i1JZ4MkJGxJIAQCsiggyACC7LKoOE/fjDOIg+OCC67jezPzxqezPc8MCjKAqAyO2yhuCMg6CIPIIqBhE4LsS0ISMCQknfRW9f4IBpJ00t3pTld19/dzTs6hu6vTP9L97Xvr1q26QlVVEJH+SFoXQESeMZxEOsVwEukUw0mkUwwnkU4Zmnswe+5sDuUStbLjs54Unu5ny0mkUwwnkU4xnEQ6xXAS6RTDSaRTDCeRTjGcRDrFcBLpFMNJpFMMJ5FOMZxEOsVwEukUw0mkUwwnkU4xnEQ6xXAS6RTDSaRTDCeRTjGcRDrFcBLpFMNJpFMMJ5FOMZxEOsVwEukUw0mkUwwnkU4xnEQ6xXAS6RTDSaRTDCeRTjGcRDrFcBLpVLOL51JwqA2+AoWiTR0UXhjOViZn2LDlRwvrbufbk/DgihkaVkThguFsBaoETBm6C7GSA53NF5Amx9U9liZXalgZhROGM8gUiwJTmxr8od0WWCVL671OrBtSnAsoNbfaa5C2OCAUZKmdy1Aw7M1WDSYAXJt3AhuHzwNEq74MaYgtZ4j1Mhmw/LaX6m6fdSXgwZUzALWJJwhg4aTF+MvJ8Th6Og0rRvwNAJAqK2grxWLlrS9CaZDQn+25BxXHklrpf0ChwnAGUYeexZja8ZtmtzEKGb1MMXW3Mw2XMHzwAbjV2oA5FAO+2pWLlJwL6NGmGAAwyFKB6R23Ij+5c73nAkCeKbbRa/wiZxt2pmVCUSVs/bo7hJPNazhiOINBAEq8C091WYspcTa/nmqVLFjS+Yu62zbFgd7HMnFvly9xX2Lh5XtjcFf8BdwVf8Gn3/lI8kkg+STcqoKcfd0gnLJfNZE+MJxBoFgUHB6/CEYReAhiJROOjloShKoo3HFAKILJQsI/xryGtB6lWpdCLcBwRrjhFiA1tkrrMqgF2K0NgGpSoUoqYHZD0vExjViDA4qlds6gZJeaHhkmXWE4A/Da+Ncx1FIDCRLkIOxvtpZ3sv4FV9ZaAEDe+gcgyowaV0S+YDgDYBRumEXoP+i95s2E9cyV5u8n/7UajyWfaHJ7WUiQL+/B/GXIB/jbyZE4tT+jtcukADGcGlpamYjFp4fVu88ou/FRt+UwCyP2Omrw9NEfN3pe5sclcBccqbs9d8hYrM4thNngwgc5K5r9wrjdWoENScU4BYZT7xjOFlJlFTIUtGRMrVypBgA8s+mXyL1vZ73HlIQEHNnrQkeDC384MwkYdabR890Nbuc++BUAwJWUiGP5TuQYpWYP6xiEG6rEU9f0Tqhq06MD2XNnc+jAAyXJiZ2jX0ayFANZ+BfONyrSsPTGfgAA1VYN5dKlRtvIqamAJACHE+6yMr9+v5yaCts7cdh0zbImt7GrTux3qLhj2aN+/W5qHcdnPelxNJEtZwsIAaRcdRqYr4bunYrqj9shpXhbs9u5S1t+XNJdWgrXq12QPe6XOD7xNY/bmIURbSSeuqZ3DGcrO+WqxPwLQwEAl1anI/3Vra3+mnEf7kAHdRAwsdVfiloRw9kSwvfe/h/PjcGJ62v3MdPR+sH0iwCPeeoYZwj56doBR7BhxMs+bZv36kycuTOtlSvyLH7NfkyY8BPk2+0eH+9siMXq22ZDTq8OcWXkK4bTT23NVcg2WpvdxqY40OWj+9FpvQ2u4ydDVFl9SlUV1APfoUb13DmShYRcYxwkmUO2esVubZCdcVXik0u90ON/DsF9sVzrcvBJeX+kylvQ1csXCukPW84gu/PbaVjZK1kXwVRdLuRfC4z+fJbWpVALMJxR7r2Br2PEDfu1LoM8YDiDaNzBSSjfmK51GX7pZzYjN65Y6zLIA4bTD4pFQYzsbPLxykUd0eEFnR0uASBsBhx1ctJBuGE4/bBqwhzMTv9K6zL8ljtrN+6fzql64Ybh9INJKH7PpdUD1eWC5Gj6kMkdCd/giXGfQZU5I0FPwu+TpkPlSjXGHJyM2CLPB/z1Lttoxc8SDvMC1TrD45w+Ug1NtypHnDLk8cVQnadDWBFFOobTB3J6NXYMXYhkmQfyKXTYrfWBkFQky42vrB4pPrNZMHH/XRBu9mv1hC0nYX91JxQd1GaCPjWNLSeRTjGcRDrFbm2Aniy6Dp+tHIxM9w6tS6EIw5YzQJ8c6IfMZ7cCSsNr4hEFhuEk0il2a6PcyAO34MSpVH5L6xDfkyhXVJYAqYLf0XrEcEYBOSkRNSkmrcsgP/ErMwoc/Gs3HJowHwBXFwsnDKcX1w44ggfbb9S6jMDIqiaroVFg2K31ol/iGYyK4WESCj2Gk0inGE4inWI4iXSKA0JRyq468a/qeDgd/AjoFd8ZL9xqmHcuhIDwcA51ocuOR1c8Evp6yGdh/slrfW9sHYrrdt2pdRktIgwG3LjHhs03zdW6FGoBhtMLqUZCVbVZ6zJaRHW78fbSUfhT8WitS6EWYDgjmaoic+VFfHGmi9aVUAtwnzOCCYMBz378FgZbZK1LoRZgy0mkUwwnkU4xnEQ6xXAS6RQHhKLQnLIsvHJgmNZlkBdsOaNQrORArMWhdRnkBcMZhe5LLMRHfRdrXQZ5wXAS6RTDSaRTDKcPXA4Zy6tiYVedjR7r2K4M9gkD4fHUD5066qzEqqo8rcsgLxhOX5w34/GV03DG1XhZ+c29P8HsBfMhDOFzAa3ZJaPx4tqJWpdBXjCcRDrF45w+UBJcmNx3D1JkTiCn0GE4fWCyOvBy+50AYrQuhaIIu7VEOsVwEulUQOHM6l0IU8eqYNVCITIq8Vvk9DmjdRnkRUDhfCJrHfp3OB2sWiiIJIsFUnZnGEXjpSRut1bglZz3NKiK/BHQgNCjK6cDapAqoaCqvLkvPp+3AEbBpf/CVWD7nAymbqkCMAoe+glnHBCKULGFNei/+w6UuW1al0ItxHBGKGnnAbS75wK+czWeVuhU3bik8hC33vEdilC2Sdfh/ZdfRIbB2uixp88NwvKt/RE+U/WjE8MZgU797xAMHr/PYzABwKEYIFyMpt6xWxuB4geWYknnL7QugwLEcBLpFMNJpFMMpw8c5Wbc+t04nHeH/1TF+8/cgI0numldBvmAA0I+EHYJ+063hy07fGdd2FUnvrFLWLevF6QKvu3hgO+SD4xp1SgY9iYAz6Of4eAbu4S7Pn2IXaUwwvcqgkjx8Ri2twbvX7NE61IoCNhyRhAhS5iRtKvJ45sUXthyRhJFxXpbFkoiYOCKGM6I4q6owDt5nTBl391al0JBwHBGECkuDslbkjE/712tS6Eg4D5nBBFGA17qtJz7nBGCLSeRTjGcRDrFcAZoynfjMfN3s6C6Gi9yFGpKZRWmPPs0Hj47SOtSKAgYTm9S7OieXtLkw/tPtkfSW9sAVfupfarLhTZLtuGrks5al0JBwAGh5ghgRt+t+HVKgdaVUBRiOJugyipWTJmDHKMBQPgs70eRg93aZrSXVZgFg0naYDgD8Gp5exgKzVqXQRGK3doAfPDAOGT/e5vWZVCEYsvpQWxWBZZNmYsEyaJ1KS2S+rADfV+YqXUZFCCG0wOL0YU+Jgtk4fnPc8BRjS6f3A/T6bIQV+Yb1/GTiD/TeAEjCi8MZwNKjIKkmOpmt9lZk4luD+2A69iJ0BTVAoZqBWtsZjjV2pAahRtKvAsq3/GwwbeqgedGfIz1eSu0LiNg5lU7MSevL3bba2/3N5tw/ObXgSSHtoWRzxjOBmSh/UyfoFGVRne9Mezv6N7vlAbFkL8YTj89W9oLz31+q9ZltNhwC5Bl/V7rMsgHDOdVFLMCo3A1u827q4cjd+ZXIaqIohmPc16mxCjInzQXVmEGv7NID/gpvEqsMDV5+IQo1PhJjEJ5sUWwdL6kdRnkBcPpB5viQLgN5lYolrpjnT94JPkkVg5YpFFF5CuG00duVcGtd96Hrn/ar3UpPlNdLswdehN6/nuG1qVQCzCcfjCcr4RyKby6g65zxXDZZa3LoBZgOFE7UhubGl1XSbcIQE6vhmoMs356FGE4AXTPPYsDN7wDo4jQFkYVje7KMFhx+MZ/wJRm06Ag8gXDCeDwwY7otml6o4GTSNHzfwpxw1MPaF0G+YnhBCAn2zGm2yFIaNzCAMAamxm5S2cCF/R5ipg3rrOFiD9Zo3UZ5CfOEALQNb0UCzpsR1PfVWvKeyPn8e2IzHaV9IotJ5FOMZxR7p2Bi/Gjwd9qXQZ5wHBGuf5mE3pZi7QugzxgOKOEcCs46LBF7Ih0JGI4o8WOfXiix01451KG1pWQj8ImnN37ncKtw3mSc4upKpSaGrh5ha+wwXeKkGMuhjW7HE0c5iWNhE04C/I7Y9nm60P+ujbFgSpXZC+5cLu1ApsH/B2qzHm2ehI24dTKgIWP4ex4k9ZlUBRiOL0w2gB3WXhO2/NkwZzbMHTvVK3LIB8wnFEmZdE2lHzdTusyyAcMJ5FOMZxEOsWzUjx45WIH/HX1ZABA9q7mFzWKFBZhwKjr92NDQXeICxwA0wOGE0Clw4zDzirkGuMAAO+dGYiuT27XuKrQMgsjXuv0JQaWdsD3F9poXQ6B3VoAQNHBNIxb/TjnnZKusOVsoOeCmchc/j0ar89FFFpsORtIOqJA2XtI6zJaVcoeFb13/FTrMsiL8A6nqL2sZaBzQoVLQLJJ2OtwQ3JG/hS2+Pe3o9OvauD2sH5nnMkB1RT5f4NwENbhVOJdODR5PqS0wC5e1XYvkPPUTvymxzDEfRzdZ75s6PUxfjfqE63LIIT5Pqewybh+5zS4LpoC+pYRqgoobqh2DgjJQoJRRNffwdihCr/usxoA8MKBcag5Fa9xRbXCO5wugcrjieHd/JPmEuJqMC3hPADg66wCfGHqAgAoO5EM4dLuPLqwDmdTfjifWHgYclVlFUIRAHeryIM5GbuAjF0AgOzSe4HKK6sACHdogxqR4Xx09BoAwLx14xs99u6kBXixcCx27+wW6rIozOSPnQdFvfIt3n/jwyGdPRUR4fzR4G9xujIZJwvS8fNhWzDZWrtMX+UoS6NtrzE58XD7DXhvaBXWbumH9ptVxB84H30XjC4+j+v++jCevH9pXZcuWpWeS8TIA7fU3c5JOI/XOn2JRCmm3na/GrAWb50ahHMH00JSV0SEs2vseTgUA07I7fDrlHyYhRUA8NsUT8crLRhuAdqmbsRa9EPCnmK4jp0Iab164K6oQPpLW/H8wEnAdSvrBTRVroCcYYOrONbjrkGkkSoMOLX/yoXPTqe1wZq2tV3b7sYLyDbWfp56W06jbUxvnAtVXSF6nVb15ufDsfOrXK3LCEtdf5qPFxfcUe++sbFOHBi2BKo5CpLpgVpixkMr7sFDK+7B7JJRdfdP2zoDB77OClkdEdFyEgWTnF6NlUMWAADayRKA2u7txuHzUHPVcopTdjwA59m4VqsjIlrOHwiXwN0nxiHfbve6bak7Dh02KVDLykNQWfiRIHDLwK8hZ0Tf+p3OKiOePzsRqZKot9/Z2WBFrjGu7mdGz61I61HaanVEVjjdAjsOZ+OoM9XrtlWqCXEnK6H6EORIZ7yk4r1LyfWm88lCwpyMXchpF32DRdIlA7bt6IGLSvPd+qfbHMX92V9ATXbW/gR52mNEhVMxKzg09hXcbq3wuu3E2BqsXvUunIN6hKAyfWuzZBveHHItypToOLE8mKYnlODY2MU4NnYxMroGtxWNqHBKdgm9Nt2HnE3TMenwBK3LoTA3evMj+MN537+8l+S9hWk3bQ7a60dUOKHWjrSpxRYUViRoXQ2Fu/NmFNqTfN481xiH2xK/Rvd+p6AaA+/iRlY4qeUUFbvsbVDeoGtrNdqhWKLzkAoAVLpMOO+u8nn7PKMRr3ZdCtUY+N+M4SQAtRfOnpPXF7d8+5N69/8zez2WjH1do6q09+WOnhi06WGft//MlojhnzwFySZ739gLhpPqqE4HFLX+5G5ZSJCiYZpQU1RAVXyf8D7UUozfj/8gKL2NiA3npcoYvFre3utFu06PMkO58doQVRWeUiUb2nT7Pij7UZEuRY7Df1pLgSAsChWx4VSKLfi/9ZNRrjR/lYSCXyzE8ftCVFSYyjPFYud1SwGrS+tSNGNTHChz23DeXYUytw8TMwxqwJfP4fQ9Ii9EmQnXfPoIui8qhzhbArVjOyxb9SbMwuhxe1lIyB87D3cdmYqD32S2+HUjtuWkFlqYiuzl7ErUowLCISCVV8F94XtI5d5HbxOlGJikwHoakR1OVWBVVSbOuCqb3axjahkc4wYAgks7xyz7ChkbPX8s2rW7CCUheru2jk5tIbe7ci7n9hq3T/O4WyqiwykU4LnVP8bisuZXxN50zTK89Mp8CBPXCGnOtr4fYfQ1B7UuQzPHplpwcWSXutv35k/Dg4eavv6vJAIbFOI+J5EXSpITn980FzNmzIJ560EgLQUAsH7AIhiFAOD5tLHXspbjYufaQyqjNs6CKPO8j9qUiG45/dHR4ELBy30g+vfSuhTSG7uMZ07fAktRJRTblZHaDIMVKXLT53Mmy7HINlqRbbRCyP4f92Q4L0uR43B88muo6KaPa5aSfgiHwK6jmRDVl/cvnS68ejEHRV7GMgDArjqRb7dDcfkfNYaTyJtEJ46N+Tuc6YkAANfZQqzslYwnT0/2+tQ9DuD2ZbMgXfSvSwtwn5M8SFpXgNE//QXmvjEfvUz1r0D3fMZajLj5EH636j80qk4D5UZ0/fweGMfFQL5xSN3dh/e50PV4VrNPVd2ixXMRoiKc64p6INFgw2PJJ7xuWzLZDsUwGIlvR9fiuVdzl5XB8GUVatTGk7czDFYMtJzSoKrAKBYFvXv6VndJlRWlBSnI6l2IOKOj7v59lZ0hVV/pbErVElBtbvZ3BXJwLirCee5gGhZcHI7Hhp/wuu2RkUswIvVW4O1WL4tCoHYVOhWmJDuWd1vj03M+s1nwaMF0PN9lGTINNpiEQJoch66n7/EaxmCKinBS9Fo1fg5yjT9cXNy/IZZpy2fW/qOtHUdHLQluYT5gOCliqBLwm7HLEC/XnjAuQ0UXoxGy8C+UQ8zf488T/1l3O0nS5gqEUTNa66w2Yk5ZFiq9nKUCAINST+DCvTdAmEPXhQknSRKQ1qO0tsuoM0XOJPQ2FeEOazlut1Y0OTm9OTWqgkJnct3Pt/YOmFOWBaUm8BOo/RE1Lad00Yh568bj5qkHkOvlK+mFdvkoe3Yr7vr0FrhLW++6pOEqTY7D+t7v4Lry++CujtW6nDpCAZZsGAGMqr8Uh1N1w6m6ESvVn57Z1Bf1FzUdPC6CFeqWLGrCScFz1FmJMSufhHCGx4kCM06NxNZjXXFk5JX9xhJ3FQateLx2OciGAj8VMygYziZYJTO+/0ciTIuyEfNpdC9F35AbAsKhh4+vZ28dvB5rEnvW3S4qTYRaYcLQvVPr7nO4ZUh2fe/VRV0411b2BKzfItfY/BoXRiFje78P0bfLTMQ0uyXpjaswFkWF9bvbAkBRiJbuCxZ9f3W0gjnrJuDPRePrLT3QHFUCz/MkTURdOAHg37t7oueW6T5t++GjfwH+1aF1CyLyICrDKRwCjgsWTD0yBlOPjMH8i52a3DbXGIf/zlqFw4sGQk71vkBSJBD9e6FgYR90Mji1LiWqRWU4gdp1Vfbs7oo9u7tixbk+zW47IkbBkUmLUDG8Cwwd2oeoQu1U5MTj+M2vI62ZcxWp9UVtOP0lCwlb5i3C4UdafjU1In8wnH56646/wbgpQ+syKAownACOl7bFM8X9fBrBHWyR8Vzmpzjx+xtg6NQxBNVRtGI4UXtcbOmO66HAt6ul9TebUDBjIcoHdYAhO7Pez9WXTiQKRNRNQgimjXMXNLpv2J47kXhziQbVUKRhy3mZsEvou+1urLP5fhaDUciNfubnvYu4zamQ4q+6UJgQqFzTBSUzh0DunoP22+MhXaPP5e6/mzsYt/x2Q5OPv3ChGyZvfzCEFUUvtpyXCQWwn7bi9fTh2J90vO7+Cdb9yDP5fuZFf7MJb3ZZgSG/fALyDyc9CODtHrPxc8d0HM1IxarOH2Jswt26mFzdUMcexXim7XceH1tcno5lZ/rAVaifM1EiGcPZwO6d3bAb3epu20aZ6p1+5AurZMHepxp2eWPw9YD3gQFBKFIjf9wx0e8LI1PLCVXlmotEesR9TiKdYjiJdIrhJNIphpNIpxhOIp1iOIl06v8BD/yT5Jq/noEAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "from tensorflow.keras.utils import array_to_img\n",
    "model = keras.models.load_model(\"unet_segmentation.keras\")\n",
    "i = 4\n",
    "test_image = val_input_imgs[i]\n",
    "plt.axis(\"off\")\n",
    "plt.imshow(array_to_img(test_image))\n",
    "mask = model.predict(np.expand_dims(test_image, 0))[0]\n",
    "\n",
    "# Utility to display a model’s prediction\n",
    "def display_mask(pred):\n",
    "    mask = np.argmax(pred, axis=-1)\n",
    "    mask *= 127\n",
    "    plt.axis(\"off\")\n",
    "    plt.imshow(mask)\n",
    "    \n",
    "display_mask(mask)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "There are a couple of small artifacts in our predicted mask, caused by geometric shapes\n",
    "in the foreground and background. Nevertheless, our model appears to work nicely."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Part IV : Object Detection with Yolo"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "The section follows the [blog](https://machinelearningmastery.com/how-to-perform-object-detection-with-yolov3-in-keras/).  "
Mirko Birbaumer's avatar
Mirko Birbaumer committed
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Object detection is a computer vision task that involves both localizing one or more objects within an image and classifying each object in the image.\n",
    "\n",
    "It is a challenging computer vision task that requires both successful object localization in order to locate and draw a bounding box around each object in an image, and object classification to predict the correct class of object that was localized.\n",
    "\n",
    "The “You Only Look Once,” or YOLO, family of models are a series of end-to-end deep learning models designed for fast object detection, developed by Joseph Redmon, et al. and first described in the 2015 paper titled [You Only Look Once: Unified, Real-Time Object Detection](https://arxiv.org/abs/1506.02640).\n",
    "\n",
    "The approach involves a single deep convolutional neural network (originally a version of GoogLeNet, later updated and called DarkNet based on VGG) that splits the input into a grid of cells and each cell directly predicts a bounding box and object classification. The result is a large number of candidate bounding boxes that are consolidated into a final prediction by a post-processing step.\n",
    "\n",
    "There are three main variations of the approach, at the time of writing; they are YOLOv1, YOLOv2, and YOLOv3. The first version proposed the general architecture, whereas the second version refined the design and made use of predefined anchor boxes to improve bounding box proposal, and version three further refined the model architecture and training process.\n",
    "\n",
    "Although the accuracy of the models is close but not as good as Region-Based Convolutional Neural Networks (R-CNNs), they are popular for object detection because of their detection speed, often demonstrated in real-time on video or with camera feed input.\n",
    "\n",
    "A single neural network predicts bounding boxes and class probabilities directly from full images in one evaluation. Since the whole detection pipeline is a single network, it can be optimized end-to-end directly on detection performance."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Object Detection With YOLOv3\n",
    "\n",
    "The keras-yolo3 project provides a lot of capability for using [YOLOv3 models](https://github.com/experiencor/keras-yolo3), including object detection, transfer learning, and training new models from scratch.\n",
    "\n",
    "In this section, we will use a pre-trained model to perform object detection on an unseen photograph. This capability is available in a single Python file in the repository called [yolo3_one_file_to_detect_them_all.py](https://raw.githubusercontent.com/experiencor/keras-yolo3/master/yolo3_one_file_to_detect_them_all.py) that has about 435 lines. This script is, in fact, a program that will use pre-trained weights to prepare a model and use that model to perform object detection and output a model. It also depends upon OpenCV.\n",
    "\n",
    "Instead of using this program directly, we will reuse elements from this program and develop our own scripts to first prepare and save a Keras YOLOv3 model, and then load the model to make a prediction for a new photograph."
   ]
  },
  {
   "cell_type": "markdown",
Mirko Birbaumer's avatar
Mirko Birbaumer committed
   "metadata": {},
   "source": [
    "### Create and Save Model\n",
    "\n",
    "The first step is to download the pre-trained model weights.\n",
    "\n",
    "These were trained using the DarkNet code base on the MSCOCO dataset. Download the model weights and place them into your current working directory with the filename `yolov3.weights`. It is a large file and may take a moment to download depending on the speed of your internet connection.\n",
    "\n",
    "[YOLOv3 Pre-trained Model Weights (yolov3.weights) (237 MB)](https://pjreddie.com/media/files/yolov3.weights)\n",
    "\n",
    "\n",
    "Next, we need to define a Keras model that has the right number and type of layers to match the downloaded model weights. The model architecture is called a _DarkNet_ and was originally loosely based on the VGG-16 model.\n",
    "\n",
    "The `yolo3_one_file_to_detect_them_all.py` script provides the `make_yolov3_model()` function to create the model for us, and the helper function `_conv_block()` that is used to create blocks of layers. These two functions can be copied directly from the script.\n",
    "\n",
    "We can now define the Keras model for YOLOv3."
   ]
Mirko Birbaumer's avatar
Mirko Birbaumer committed
  },
  {
   "cell_type": "raw",
Mirko Birbaumer's avatar
Mirko Birbaumer committed
   "metadata": {},
   "source": [
    "# define the model\n",
    "model = make_yolov3_model()"
   ]
Mirko Birbaumer's avatar
Mirko Birbaumer committed
  },
  {
   "cell_type": "markdown",
Mirko Birbaumer's avatar
Mirko Birbaumer committed
   "metadata": {},
   "source": [
    "Next, we need to load the model weights. The model weights are stored in whatever format that was used by \n",
    "_DarkNet_. Rather than trying to decode the file manually, we can use the `WeightReader` class provided in the script.\n",
    "\n",
    "To use the `WeightReader`, it is instantiated with the path to our weights file (e.g. `yolov3.weights`). This will parse the file \n",
    "and load the model weights into memory in a format that we can set into our Keras model."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# load the model weights\n",
    "weight_reader = WeightReader('yolov3.weights')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can then call the `load_weights()` function of the `WeightReader` instance, passing in our defined Keras model to set the weights into the layers."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# set the model weights into the model\n",
    "weight_reader.load_weights(model)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "That’s it; we now have a _YOLOv3_ model for use.\n",
    "\n",
    "We can save this model to a Keras compatible `.h5` model file ready for later use."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# save the model to file\n",
    "model.save('model.h5')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can tie all of this together; the complete code example including functions copied directly from the `yolo3_one_file_to_detect_them_all.py` script is listed below."
   ]
Mirko Birbaumer's avatar
Mirko Birbaumer committed
  },
  {
   "cell_type": "code",
   "execution_count": 4,
Mirko Birbaumer's avatar
Mirko Birbaumer committed
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "loading weights of convolution #0\n",
      "loading weights of convolution #1\n",
      "loading weights of convolution #2\n",
      "loading weights of convolution #3\n",
      "no convolution #4\n",
      "loading weights of convolution #5\n",
      "loading weights of convolution #6\n",
      "loading weights of convolution #7\n",
      "no convolution #8\n",
      "loading weights of convolution #9\n",
      "loading weights of convolution #10\n",
      "no convolution #11\n",
      "loading weights of convolution #12\n",
      "loading weights of convolution #13\n",
      "loading weights of convolution #14\n",
      "no convolution #15\n",
      "loading weights of convolution #16\n",
      "loading weights of convolution #17\n",
      "no convolution #18\n",
      "loading weights of convolution #19\n",
      "loading weights of convolution #20\n",
      "no convolution #21\n",
      "loading weights of convolution #22\n",
      "loading weights of convolution #23\n",
      "no convolution #24\n",
      "loading weights of convolution #25\n",
      "loading weights of convolution #26\n",
      "no convolution #27\n",
      "loading weights of convolution #28\n",
      "loading weights of convolution #29\n",
      "no convolution #30\n",
      "loading weights of convolution #31\n",
      "loading weights of convolution #32\n",
      "no convolution #33\n",
      "loading weights of convolution #34\n",
      "loading weights of convolution #35\n",
      "no convolution #36\n",
      "loading weights of convolution #37\n",
      "loading weights of convolution #38\n",
      "loading weights of convolution #39\n",
      "no convolution #40\n",
      "loading weights of convolution #41\n",
      "loading weights of convolution #42\n",
      "no convolution #43\n",
      "loading weights of convolution #44\n",
      "loading weights of convolution #45\n",
      "no convolution #46\n",
      "loading weights of convolution #47\n",
      "loading weights of convolution #48\n",
      "no convolution #49\n",
      "loading weights of convolution #50\n",
      "loading weights of convolution #51\n",
      "no convolution #52\n",
      "loading weights of convolution #53\n",
      "loading weights of convolution #54\n",
      "no convolution #55\n",
      "loading weights of convolution #56\n",
      "loading weights of convolution #57\n",
      "no convolution #58\n",
      "loading weights of convolution #59\n",
      "loading weights of convolution #60\n",
      "no convolution #61\n",
      "loading weights of convolution #62\n",
      "loading weights of convolution #63\n",
      "loading weights of convolution #64\n",
      "no convolution #65\n",
      "loading weights of convolution #66\n",
      "loading weights of convolution #67\n",
      "no convolution #68\n",
      "loading weights of convolution #69\n",
      "loading weights of convolution #70\n",
      "no convolution #71\n",
      "loading weights of convolution #72\n",
      "loading weights of convolution #73\n",
      "no convolution #74\n",
      "loading weights of convolution #75\n",
      "loading weights of convolution #76\n",
      "loading weights of convolution #77\n",
      "loading weights of convolution #78\n",
      "loading weights of convolution #79\n",
      "loading weights of convolution #80\n",
      "loading weights of convolution #81\n",
      "no convolution #82\n",
      "no convolution #83\n",
      "loading weights of convolution #84\n",
      "no convolution #85\n",
      "no convolution #86\n",
      "loading weights of convolution #87\n",
      "loading weights of convolution #88\n",
      "loading weights of convolution #89\n",
      "loading weights of convolution #90\n",
      "loading weights of convolution #91\n",
      "loading weights of convolution #92\n",
      "loading weights of convolution #93\n",
      "no convolution #94\n",
      "no convolution #95\n",
      "loading weights of convolution #96\n",
      "no convolution #97\n",
      "no convolution #98\n",
      "loading weights of convolution #99\n",
      "loading weights of convolution #100\n",
      "loading weights of convolution #101\n",
      "loading weights of convolution #102\n",
      "loading weights of convolution #103\n",
      "loading weights of convolution #104\n",
      "loading weights of convolution #105\n",
      "WARNING:tensorflow:Compiled the loaded model, but the compiled metrics have yet to be built. `model.compile_metrics` will be empty until you train or evaluate the model.\n"
Mirko Birbaumer's avatar
Mirko Birbaumer committed
     ]
    }
   ],
   "source": [
    "# create a YOLOv3 Keras model and save it to file\n",
    "# based on https://github.com/experiencor/keras-yolo3\n",
    "import struct\n",
    "import tensorflow as tf\n",
Mirko Birbaumer's avatar
Mirko Birbaumer committed
    "import numpy as np\n",
    "from keras.layers import Conv2D\n",
    "from keras.layers import Input\n",
    "from keras.layers import BatchNormalization\n",
    "from keras.layers import LeakyReLU\n",
    "from keras.layers import ZeroPadding2D\n",
    "from keras.layers import UpSampling2D\n",
    "from keras.layers.merge import add, concatenate\n",
    "from keras.models import Model\n",
    "\n",
    "def _conv_block(inp, convs, skip=True):\n",
    "    x = inp\n",
    "    count = 0\n",
    "    for conv in convs:\n",
    "        if count == (len(convs) - 2) and skip:\n",
    "            skip_connection = x\n",
    "        count += 1\n",
    "        if conv['stride'] > 1: x = ZeroPadding2D(((1,0),(1,0)))(x) # peculiar padding as darknet prefer left and top\n",
    "        x = Conv2D(conv['filter'],\n",
    "                   conv['kernel'],\n",
    "                   strides=conv['stride'],\n",
    "                   padding='valid' if conv['stride'] > 1 else 'same', # peculiar padding as darknet prefer left and top\n",
    "                   name='conv_' + str(conv['layer_idx']),\n",
    "                   use_bias=False if conv['bnorm'] else True)(x)\n",
    "        if conv['bnorm']: x = BatchNormalization(epsilon=0.001, name='bnorm_' + str(conv['layer_idx']))(x)\n",
    "        if conv['leaky']: x = LeakyReLU(alpha=0.1, name='leaky_' + str(conv['layer_idx']))(x)\n",
    "    return add([skip_connection, x]) if skip else x\n",
    "\n",
    "def make_yolov3_model():\n",
    "    input_image = Input(shape=(None, None, 3))\n",
    "    # Layer  0 => 4\n",
    "    x = _conv_block(input_image, [{'filter': 32, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 0},\n",
    "                                  {'filter': 64, 'kernel': 3, 'stride': 2, 'bnorm': True, 'leaky': True, 'layer_idx': 1},\n",
    "                                  {'filter': 32, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 2},\n",
    "                                  {'filter': 64, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 3}])\n",
    "    # Layer  5 => 8\n",
    "    x = _conv_block(x, [{'filter': 128, 'kernel': 3, 'stride': 2, 'bnorm': True, 'leaky': True, 'layer_idx': 5},\n",
    "                        {'filter':  64, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 6},\n",
    "                        {'filter': 128, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 7}])\n",
    "    # Layer  9 => 11\n",
    "    x = _conv_block(x, [{'filter':  64, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 9},\n",
    "                        {'filter': 128, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 10}])\n",
    "    # Layer 12 => 15\n",
    "    x = _conv_block(x, [{'filter': 256, 'kernel': 3, 'stride': 2, 'bnorm': True, 'leaky': True, 'layer_idx': 12},\n",
    "                        {'filter': 128, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 13},\n",
    "                        {'filter': 256, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 14}])\n",
    "    # Layer 16 => 36\n",
    "    for i in range(7):\n",
    "        x = _conv_block(x, [{'filter': 128, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 16+i*3},\n",
    "                            {'filter': 256, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 17+i*3}])\n",
    "    skip_36 = x\n",
    "    # Layer 37 => 40\n",
    "    x = _conv_block(x, [{'filter': 512, 'kernel': 3, 'stride': 2, 'bnorm': True, 'leaky': True, 'layer_idx': 37},\n",
    "                        {'filter': 256, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 38},\n",
    "                        {'filter': 512, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 39}])\n",
    "    # Layer 41 => 61\n",
    "    for i in range(7):\n",
    "        x = _conv_block(x, [{'filter': 256, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 41+i*3},\n",
    "                            {'filter': 512, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 42+i*3}])\n",
    "    skip_61 = x\n",
    "    # Layer 62 => 65\n",
    "    x = _conv_block(x, [{'filter': 1024, 'kernel': 3, 'stride': 2, 'bnorm': True, 'leaky': True, 'layer_idx': 62},\n",
    "                        {'filter':  512, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 63},\n",
    "                        {'filter': 1024, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 64}])\n",
    "    # Layer 66 => 74\n",
    "    for i in range(3):\n",
    "        x = _conv_block(x, [{'filter':  512, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 66+i*3},\n",
    "                            {'filter': 1024, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 67+i*3}])\n",
    "    # Layer 75 => 79\n",
    "    x = _conv_block(x, [{'filter':  512, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 75},\n",
    "                        {'filter': 1024, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 76},\n",
    "                        {'filter':  512, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 77},\n",
    "                        {'filter': 1024, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 78},\n",
    "                        {'filter':  512, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 79}], skip=False)\n",
    "    # Layer 80 => 82\n",
    "    yolo_82 = _conv_block(x, [{'filter': 1024, 'kernel': 3, 'stride': 1, 'bnorm': True,  'leaky': True,  'layer_idx': 80},\n",
    "                                {'filter':  255, 'kernel': 1, 'stride': 1, 'bnorm': False, 'leaky': False, 'layer_idx': 81}], skip=False)\n",
    "    # Layer 83 => 86\n",
    "    x = _conv_block(x, [{'filter': 256, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 84}], skip=False)\n",
    "    x = UpSampling2D(2)(x)\n",
    "    x = concatenate([x, skip_61])\n",
    "    # Layer 87 => 91\n",
    "    x = _conv_block(x, [{'filter': 256, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 87},\n",
    "                        {'filter': 512, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 88},\n",
    "                        {'filter': 256, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 89},\n",
    "                        {'filter': 512, 'kernel': 3, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 90},\n",
    "                        {'filter': 256, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True, 'layer_idx': 91}], skip=False)\n",
    "    # Layer 92 => 94\n",
    "    yolo_94 = _conv_block(x, [{'filter': 512, 'kernel': 3, 'stride': 1, 'bnorm': True,  'leaky': True,  'layer_idx': 92},\n",
    "                              {'filter': 255, 'kernel': 1, 'stride': 1, 'bnorm': False, 'leaky': False, 'layer_idx': 93}], skip=False)\n",
    "    # Layer 95 => 98\n",
    "    x = _conv_block(x, [{'filter': 128, 'kernel': 1, 'stride': 1, 'bnorm': True, 'leaky': True,   'layer_idx': 96}], skip=False)\n",
    "    x = UpSampling2D(2)(x)\n",
    "    x = concatenate([x, skip_36])\n",
    "    # Layer 99 => 106\n",
    "    yolo_106 = _conv_block(x, [{'filter': 128, 'kernel': 1, 'stride': 1, 'bnorm': True,  'leaky': True,  'layer_idx': 99},\n",
    "                               {'filter': 256, 'kernel': 3, 'stride': 1, 'bnorm': True,  'leaky': True,  'layer_idx': 100},\n",
    "                               {'filter': 128, 'kernel': 1, 'stride': 1, 'bnorm': True,  'leaky': True,  'layer_idx': 101},\n",
    "                               {'filter': 256, 'kernel': 3, 'stride': 1, 'bnorm': True,  'leaky': True,  'layer_idx': 102},\n",
    "                               {'filter': 128, 'kernel': 1, 'stride': 1, 'bnorm': True,  'leaky': True,  'layer_idx': 103},\n",
    "                               {'filter': 256, 'kernel': 3, 'stride': 1, 'bnorm': True,  'leaky': True,  'layer_idx': 104},\n",
    "                               {'filter': 255, 'kernel': 1, 'stride': 1, 'bnorm': False, 'leaky': False, 'layer_idx': 105}], skip=False)\n",
    "    model = Model(input_image, [yolo_82, yolo_94, yolo_106])\n",
    "    return model\n",
    "\n",
    "class WeightReader:\n",
    "    def __init__(self, weight_file):\n",
    "        with open(weight_file, 'rb') as w_f:\n",
    "            major,\t= struct.unpack('i', w_f.read(4))\n",
    "            minor,\t= struct.unpack('i', w_f.read(4))\n",
    "            revision, = struct.unpack('i', w_f.read(4))\n",
    "            if (major*10 + minor) >= 2 and major < 1000 and minor < 1000:\n",
    "                w_f.read(8)\n",
    "            else:\n",
    "                w_f.read(4)\n",
    "            transpose = (major > 1000) or (minor > 1000)\n",
    "            binary = w_f.read()\n",
    "        self.offset = 0\n",
    "        self.all_weights = np.frombuffer(binary, dtype='float32')\n",
    "\n",
    "    def read_bytes(self, size):\n",
    "        self.offset = self.offset + size\n",
    "        return self.all_weights[self.offset-size:self.offset]\n",
    "\n",
    "    def load_weights(self, model):\n",
    "        for i in range(106):\n",
    "            try:\n",
    "                conv_layer = model.get_layer('conv_' + str(i))\n",
    "                print(\"loading weights of convolution #\" + str(i))\n",
    "                if i not in [81, 93, 105]:\n",
    "                    norm_layer = model.get_layer('bnorm_' + str(i))\n",
    "                    size = np.prod(norm_layer.get_weights()[0].shape)\n",
    "                    beta  = self.read_bytes(size) # bias\n",
    "                    gamma = self.read_bytes(size) # scale\n",
    "                    mean  = self.read_bytes(size) # mean\n",
    "                    var   = self.read_bytes(size) # variance\n",
    "                    weights = norm_layer.set_weights([gamma, beta, mean, var])\n",
    "                if len(conv_layer.get_weights()) > 1:\n",
    "                    bias   = self.read_bytes(np.prod(conv_layer.get_weights()[1].shape))\n",
    "                    kernel = self.read_bytes(np.prod(conv_layer.get_weights()[0].shape))\n",
    "                    kernel = kernel.reshape(list(reversed(conv_layer.get_weights()[0].shape)))\n",
    "                    kernel = kernel.transpose([2,3,1,0])\n",
    "                    conv_layer.set_weights([kernel, bias])\n",
    "                else:\n",
    "                    kernel = self.read_bytes(np.prod(conv_layer.get_weights()[0].shape))\n",
    "                    kernel = kernel.reshape(list(reversed(conv_layer.get_weights()[0].shape)))\n",
    "                    kernel = kernel.transpose([2,3,1,0])\n",
    "                    conv_layer.set_weights([kernel])\n",
    "            except ValueError:\n",
    "                print(\"no convolution #\" + str(i))\n",
    "\n",
    "    def reset(self):\n",
    "        self.offset = 0\n",
    "\n",
    "# define the model\n",
    "model = make_yolov3_model()\n",
    "# load the model weights\n",
    "weight_reader = WeightReader('yolov3.weights')\n",
    "# set the model weights into the model\n",
    "weight_reader.load_weights(model)\n",
    "# save the model to file\n",
    "model.save('model.h5')"
   ]
  },
3541 3542 3543 3544 3545 3546 3547 3548 3549 3550 3551 3552 3553 3554 3555 3556 3557 3558 3559 3560 3561 3562 3563 3564 3565 3566 3567 3568 3569 3570 3571 3572 3573 3574 3575 3576 3577 3578 3579 3580 3581 3582 3583 3584 3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595 3596 3597 3598 3599 3600 3601 3602 3603 3604 3605 3606 3607 3608 3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642 3643 3644 3645 3646 3647 3648 3649 3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663 3664 3665 3666 3667 3668 3669 3670 3671 3672 3673 3674 3675 3676 3677 3678 3679 3680 3681 3682 3683 3684 3685 3686 3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705 3706 3707 3708 3709 3710 3711 3712 3713 3714 3715 3716 3717 3718 3719 3720 3721 3722 3723 3724 3725 3726 3727 3728 3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747 3748 3749 3750 3751 3752 3753 3754 3755 3756 3757 3758 3759 3760 3761 3762 3763 3764 3765 3766 3767 3768 3769 3770 3771 3772 3773 3774 3775 3776 3777 3778 3779 3780 3781 3782 3783 3784 3785 3786 3787 3788 3789 3790 3791 3792 3793 3794 3795 3796 3797 3798 3799 3800 3801 3802 3803 3804 3805 3806 3807 3808 3809 3810 3811 3812 3813 3814 3815 3816 3817 3818 3819 3820 3821 3822 3823 3824 3825 3826 3827 3828 3829 3830 3831 3832 3833 3834 3835 3836 3837 3838 3839 3840 3841 3842 3843 3844 3845 3846 3847 3848 3849 3850 3851 3852 3853 3854 3855 3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867 3868 3869 3870 3871 3872 3873 3874 3875 3876 3877 3878 3879 3880 3881 3882 3883 3884 3885 3886 3887 3888 3889 3890 3891 3892 3893 3894 3895 3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906 3907 3908 3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920 3921 3922 3923 3924 3925 3926 3927 3928 3929 3930 3931 3932 3933 3934 3935 3936 3937 3938 3939 3940 3941 3942 3943 3944 3945 3946 3947 3948 3949 3950 3951 3952 3953 3954 3955 3956 3957 3958 3959 3960 3961 3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991 3992 3993 3994 3995 3996 3997 3998 3999 4000
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Running the example may take a little less than one minute to execute on modern hardware.\n",
    "\n",
    "As the weight file is loaded, you will see debug information reported about what was loaded, output by the `WeightReader` class."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "At the end of the run, the `model.h5` file is saved in your current working directory with approximately the same size as the original weight file (237MB), but ready to be loaded and used directly as a Keras model."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Make a Prediction\n",
    "\n",
    "We need a new photo for object detection, ideally with objects that we know that the model knows about from the MSCOCO dataset.\n",
    "\n",
    "We will use a photograph of two elephants.\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "<img src='./Bilder/african-elephant.jpg'>"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "The first step is to load the Keras model. This might be the slowest part of making a prediction."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# load yolov3 model\n",
    "model = load_model('model.h5')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Next, we need to load our new photograph and prepare it as suitable input to the model. The model expects inputs to be color images with the square shape of $416\\times 416$ pixels.\n",
    "\n",
    "We can use the `load_img()` Keras function to load the image and the `target_size` argument to resize the image after loading. We can also use the `img_to_array()` function to convert the loaded `PIL` image object into a NumPy array, and then rescale the pixel values from $0-255$ to $0-1$ 32-bit floating point values.\n",
    "\n"
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# load the image with the required size\n",
    "image = load_img('./Bilder/african-elephant.jpg', target_size=(416, 416))\n",
    "# convert to numpy array\n",
    "image = img_to_array(image)\n",
    "# scale pixel values to [0, 1]\n",
    "image = image.astype('float32')\n",
    "image /= 255.0"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We will want to show the original photo again later, which means we will need to scale the bounding boxes of all detected objects from the square shape back to the original shape. As such, we can load the image and retrieve the original shape."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# load the image to get its shape\n",
    "image = load_img('./Bilder/african-elephant.jpg')\n",
    "width, height = image.size"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can tie all of this together into a convenience function named `load_image_pixels()` that takes the filename and target size and returns the scaled pixel data ready to provide as input to the Keras model, as well as the original width and height of the image."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "# load and prepare an image\n",
    "def load_image_pixels(filename, shape):\n",
    "    # load the image to get its shape\n",
    "    image = load_img(filename)\n",
    "    width, height = image.size\n",
    "    # load the image with the required size\n",
    "    image = load_img(filename, target_size=shape)\n",
    "    # convert to numpy array\n",
    "    image = img_to_array(image)\n",
    "    # scale pixel values to [0, 1]\n",
    "    image = image.astype('float32')\n",
    "    image /= 255.0\n",
    "    # add a dimension so that we have one sample\n",
    "    image = expand_dims(image, 0)\n",
    "    return image, width, height"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can then call this function to load our photo of elephants."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# define the expected input shape for the model\n",
    "input_w, input_h = 416, 416\n",
    "# define our new photo\n",
    "photo_filename = 'african-elephant.jpg'\n",
    "# load and prepare image\n",
    "image, image_w, image_h = load_image_pixels(photo_filename, (input_w, input_h))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can now feed the photo into the Keras model and make a prediction."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# make prediction\n",
    "yhat = model.predict(image)\n",
    "# summarize the shape of the list of arrays\n",
    "print([a.shape for a in yhat])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "That’s it, at least for making a prediction. The complete example is listed below."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# load yolov3 model and perform object detection\n",
    "# based on https://github.com/experiencor/keras-yolo3\n",
    "from numpy import expand_dims\n",
    "from keras.models import load_model\n",
    "from keras.preprocessing.image import load_img\n",
    "from keras.preprocessing.image import img_to_array\n",
    " \n",
    "# load and prepare an image\n",
    "def load_image_pixels(filename, shape):\n",
    "    # load the image to get its shape\n",
    "    image = load_img(filename)\n",
    "    width, height = image.size\n",
    "    # load the image with the required size\n",
    "    image = load_img(filename, target_size=shape)\n",
    "    # convert to numpy array\n",
    "    image = img_to_array(image)\n",
    "    # scale pixel values to [0, 1]\n",
    "    image = image.astype('float32')\n",
    "    image /= 255.0\n",
    "    # add a dimension so that we have one sample\n",
    "    image = expand_dims(image, 0)\n",
    "    return image, width, height\n",
    " \n",
    "# load yolov3 model\n",
    "model = load_model('model.h5')\n",
    "# define the expected input shape for the model\n",
    "input_w, input_h = 416, 416\n",
    "# define our new photo\n",
    "photo_filename = 'zebra.jpg'\n",
    "# load and prepare image\n",
    "image, image_w, image_h = load_image_pixels(photo_filename, (input_w, input_h))\n",
    "# make prediction\n",
    "yhat = model.predict(image)\n",
    "# summarize the shape of the list of arrays\n",
    "print([a.shape for a in yhat])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Running the example returns a list of three NumPy arrays, the shape of which is displayed as output.\n",
    "\n",
    "These arrays predict both the bounding boxes and class labels but are encoded. They must be interpreted."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "[(1, 13, 13, 255), (1, 26, 26, 255), (1, 52, 52, 255)]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Make a Prediction and Interpret Result\n",
    "\n",
    "The output of the model is, in fact, encoded candidate bounding boxes from three different grid sizes, and the boxes are defined the context of anchor boxes, carefully chosen based on an analysis of the size of objects in the MSCOCO dataset.\n",
    "\n",
    "The script provided by experiencor provides a function called `decode_netout()` that will take each one of the NumPy arrays, one at a time, and decode the candidate bounding boxes and class predictions. Further, any bounding boxes that don’t confidently describe an object (e.g. all class probabilities below a threshold) are ignored. We will use a probability of 60% or 0.6. The function returns a list of `BoundBox` instances that define the corners of each bounding box in the context of the input image shape and class probabilities."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# define the anchors\n",
    "anchors = [[116,90, 156,198, 373,326], [30,61, 62,45, 59,119], [10,13, 16,30, 33,23]]\n",
    "# define the probability threshold for detected objects\n",
    "class_threshold = 0.6\n",
    "boxes = list()\n",
    "for i in range(len(yhat)):\n",
    "    # decode the output of the network\n",
    "    boxes += decode_netout(yhat[i][0], anchors[i], class_threshold, input_h, input_w)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Next, the bounding boxes can be stretched back into the shape of the original image. This is helpful as it means that later we can plot the original image and draw the bounding boxes, hopefully detecting real objects.\n",
    "\n",
    "The experiencor script provides the `correct_yolo_boxes()` function to perform this translation of bounding box coordinates, taking the list of bounding boxes, the original shape of our loaded photograph, and the shape of the input to the network as arguments. The coordinates of the bounding boxes are updated directly."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# correct the sizes of the bounding boxes for the shape of the image\n",
    "correct_yolo_boxes(boxes, image_h, image_w, input_h, input_w)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "The model has predicted a lot of candidate bounding boxes, and most of the boxes will be referring to the same objects. The list of bounding boxes can be filtered and those boxes that overlap and refer to the same object can be merged. We can define the amount of overlap as a configuration parameter, in this case, 50% or 0.5. This filtering of bounding box regions is generally referred to as non-maximal suppression and is a required post-processing step.\n",
    "\n",
    "The experiencor script provides this via the `do_nms()` function that takes the list of bounding boxes and a threshold parameter. Rather than purging the overlapping boxes, their predicted probability for their overlapping class is cleared. This allows the boxes to remain and be used if they also detect another object type."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# suppress non-maximal boxes\n",
    "do_nms(boxes, 0.5)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "This will leave us with the same number of boxes, but only very few of interest. We can retrieve just those boxes that strongly predict the presence of an object: that is are more than 60% confident. This can be achieved by enumerating over all boxes and checking the class prediction values. We can then look up the corresponding class label for the box and add it to the list. Each box must be considered for each class label, just in case the same box strongly predicts more than one object.\n",
    "\n",
    "We can develop a `get_boxes()` function that does this and takes the list of boxes, known labels, and our classification threshold as arguments and returns parallel lists of boxes, labels, and scores."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# get all of the results above a threshold\n",
    "def get_boxes(boxes, labels, thresh):\n",
    "    v_boxes, v_labels, v_scores = list(), list(), list()\n",
    "    # enumerate all boxes\n",
    "    for box in boxes:\n",
    "        # enumerate all possible labels\n",
    "        for i in range(len(labels)):\n",
    "            # check if the threshold for this label is high enough\n",
    "            if box.classes[i] > thresh:\n",
    "                v_boxes.append(box)\n",
    "                v_labels.append(labels[i])\n",
    "                v_scores.append(box.classes[i]*100)\n",
    "                # don't break, many labels may trigger for one box\n",
    "    return v_boxes, v_labels, v_scores"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can call this function with our list of boxes.\n",
    "\n",
    "We also need a list of strings containing the class labels known to the model in the correct order used during training, specifically those class labels from the MSCOCO dataset. Thankfully, this is provided in the experiencor script."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# define the labels\n",
    "labels = [\"person\", \"bicycle\", \"car\", \"motorbike\", \"aeroplane\", \"bus\", \"train\", \"truck\",\n",
    "    \"boat\", \"traffic light\", \"fire hydrant\", \"stop sign\", \"parking meter\", \"bench\",\n",
    "    \"bird\", \"cat\", \"dog\", \"horse\", \"sheep\", \"cow\", \"elephant\", \"bear\", \"zebra\", \"giraffe\",\n",
    "    \"backpack\", \"umbrella\", \"handbag\", \"tie\", \"suitcase\", \"frisbee\", \"skis\", \"snowboard\",\n",
    "    \"sports ball\", \"kite\", \"baseball bat\", \"baseball glove\", \"skateboard\", \"surfboard\",\n",
    "    \"tennis racket\", \"bottle\", \"wine glass\", \"cup\", \"fork\", \"knife\", \"spoon\", \"bowl\", \"banana\",\n",
    "    \"apple\", \"sandwich\", \"orange\", \"broccoli\", \"carrot\", \"hot dog\", \"pizza\", \"donut\", \"cake\",\n",
    "    \"chair\", \"sofa\", \"pottedplant\", \"bed\", \"diningtable\", \"toilet\", \"tvmonitor\", \"laptop\", \"mouse\",\n",
    "    \"remote\", \"keyboard\", \"cell phone\", \"microwave\", \"oven\", \"toaster\", \"sink\", \"refrigerator\",\n",
    "    \"book\", \"clock\", \"vase\", \"scissors\", \"teddy bear\", \"hair drier\", \"toothbrush\"]\n",
    "# get the details of the detected objects\n",
    "v_boxes, v_labels, v_scores = get_boxes(boxes, labels, class_threshold)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Now that we have those few boxes of strongly predicted objects, we can summarize them."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# summarize what we found\n",
    "for i in range(len(v_boxes)):\n",
    "    print(v_labels[i], v_scores[i])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can also plot our original photograph and draw the bounding box around each detected object. This can be achieved by retrieving the coordinates from each bounding box and creating a Rectangle object."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "box = v_boxes[i]\n",
    "# get coordinates\n",
    "y1, x1, y2, x2 = box.ymin, box.xmin, box.ymax, box.xmax\n",
    "# calculate width and height of the box\n",
    "width, height = x2 - x1, y2 - y1\n",
    "# create the shape\n",
    "rect = Rectangle((x1, y1), width, height, fill=False, color='white')\n",
    "# draw the box\n",
    "ax.add_patch(rect)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can also draw a string with the class label and confidence."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# draw text and score in top left corner\n",
    "label = \"%s (%.3f)\" % (v_labels[i], v_scores[i])\n",
    "pyplot.text(x1, y1, label, color='white')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "The `draw_boxes()` function below implements this, taking the filename of the original photograph and the parallel lists of bounding boxes, labels and scores, and creates a plot showing all detected objects."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "# draw all results\n",
    "def draw_boxes(filename, v_boxes, v_labels, v_scores):\n",
    "    # load the image\n",
    "    data = pyplot.imread(filename)\n",
    "    # plot the image\n",
    "    pyplot.imshow(data)\n",
    "    # get the context for drawing boxes\n",
    "    ax = pyplot.gca()\n",
    "    # plot each box\n",
    "    for i in range(len(v_boxes)):\n",
    "        box = v_boxes[i]\n",
    "        # get coordinates\n",
    "        y1, x1, y2, x2 = box.ymin, box.xmin, box.ymax, box.xmax\n",
    "        # calculate width and height of the box\n",
    "        width, height = x2 - x1, y2 - y1\n",
    "        # create the shape\n",
    "        rect = Rectangle((x1, y1), width, height, fill=False, color='white')\n",
    "        # draw the box\n",
    "        ax.add_patch(rect)\n",
    "        # draw text and score in top left corner\n",
    "        label = \"%s (%.3f)\" % (v_labels[i], v_scores[i])\n",
    "        pyplot.text(x1, y1, label, color='white')\n",
    "    # show the plot\n",
    "    pyplot.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We can then call this function to plot our final result."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# draw what we found\n",
    "draw_boxes(photo_filename, v_boxes, v_labels, v_scores)"
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# draw what we found\n",
    "draw_boxes(photo_filename, v_boxes, v_labels, v_scores)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:No training configuration found in the save file, so the model was *not* compiled. Compile it manually.\n",