diff --git a/notebooks/Block_5/Jupyter Notebook Block 5 - Object Detection and Segmentation.ipynb b/notebooks/Block_5/Jupyter Notebook Block 5 - Object Detection and Segmentation.ipynb index 31fa8670b296d6285b139776b1fb72970fd7e5bc..6c81d539393ff5fdc46b5e2aea06db865b5ebd76 100644 --- a/notebooks/Block_5/Jupyter Notebook Block 5 - Object Detection and Segmentation.ipynb +++ b/notebooks/Block_5/Jupyter Notebook Block 5 - Object Detection and Segmentation.ipynb @@ -839,12 +839,14 @@ "train_dataset = image_dataset_from_directory(\n", " './train',\n", " image_size=(180, 180),\n", - " batch_size=32)\n", + " batch_size=32,\n", + " label_mode=\"categorical\")\n", "\n", "validation_dataset = image_dataset_from_directory(\n", " './validation',\n", " image_size=(180, 180),\n", - " batch_size=32)" + " batch_size=32,\n", + " label_mode=\"categorical\")" ] }, { @@ -868,18 +870,6 @@ "val_features, val_labels = get_features_and_labels(validation_dataset)" ] }, - { - "cell_type": "code", - "execution_count": 53, - "metadata": {}, - "outputs": [], - "source": [ - "from tensorflow.keras.utils import to_categorical\n", - "\n", - "train_labels = to_categorical(train_labels)\n", - "val_labels = to_categorical(val_labels)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -1115,18 +1105,6 @@ "overfitting: we can just reload our saved file." ] }, - { - "cell_type": "code", - "execution_count": 67, - "metadata": {}, - "outputs": [], - "source": [ - "# Save the features as a Numpy array and the trained model as an h5-file\n", - "np.save('./models/bottleneck_features_train.npy', train_features)\n", - "np.save('./models/bottleneck_features_validation.npy', val_features)\n", - "model.save_weights('./models/bottleneck_fc_model.h5')" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -1207,9 +1185,13 @@ "metadata": {}, "outputs": [], "source": [ + "import datetime, os\n", + "\n", "# Load the TensorBoard notebook extension\n", "%load_ext tensorboard\n", "\n", + "logdir = os.path.join(\"logs\", datetime.datetime.now().strftime(\"%Y%m%d-%H%M%S\"))\n", + "\n", "os.makedirs(logdir, exist_ok=True)\n", "%tensorboard --logdir logs" ] @@ -1553,6 +1535,20 @@ "model_freeze_conv.summary()" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import datetime, os\n", + "# Load the TensorBoard notebook extension\n", + "%load_ext tensorboard\n", + "logdir = os.path.join(\"logs\", datetime.datetime.now().strftime(\"%Y%m%d-%H%M%S\"))\n", + "os.makedirs(logdir, exist_ok=True)\n", + "%tensorboard --logdir logs" + ] + }, { "cell_type": "markdown", "metadata": {