Symulowanie-wizualne/sw_lab9-10_2.ipynb

2147 lines
342 KiB
Plaintext
Raw Normal View History

{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Aleksandra Jonas, Aleksandra Gronowska, Iwona Christop\n",
"# Zestaw 9-10/zadanie2 - AlexNet, VGG16, ResNet on village"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Przygotowanie danych"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"from IPython.display import Image, display"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import subprocess\n",
"import pkg_resources\n",
"import numpy as np\n",
"\n",
"required = { 'scikit-image'}\n",
"installed = {pkg.key for pkg in pkg_resources.working_set}\n",
"missing = required - installed\n",
"\n",
"if missing: \n",
" python = sys.executable\n",
" subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL)\n",
"\n",
"def load_data(input_dir, img_size):\n",
" import numpy as np\n",
" import pandas as pd\n",
" import os\n",
" from skimage.io import imread\n",
" import cv2 as cv\n",
" from pathlib import Path\n",
" import random\n",
" from shutil import copyfile, rmtree\n",
" import json\n",
"\n",
" import seaborn as sns\n",
" import matplotlib.pyplot as plt\n",
"\n",
" import matplotlib\n",
" \n",
" image_dir = Path(input_dir)\n",
" categories_name = []\n",
" for file in os.listdir(image_dir):\n",
" d = os.path.join(image_dir, file)\n",
" if os.path.isdir(d):\n",
" categories_name.append(file)\n",
"\n",
" folders = [directory for directory in image_dir.iterdir() if directory.is_dir()]\n",
" \n",
" ds_img = []\n",
" categories_count=[]\n",
" labels=[]\n",
" for i, direc in enumerate(folders):\n",
" count = 0\n",
" for obj in direc.iterdir():\n",
" if os.path.isfile(obj) and os.path.basename(os.path.normpath(obj)) != 'desktop.ini':\n",
" labels.append(os.path.basename(os.path.normpath(direc)))\n",
" count += 1\n",
" img = imread(obj)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
" img = img[:, :, :3]\n",
" img = cv.resize(img, img_size, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
" img = img / 255 #normalizacja\n",
" ds_img.append(img)\n",
" categories_count.append(count)\n",
" X={}\n",
" X[\"values\"] = np.array(ds_img)\n",
" X[\"categories_name\"] = categories_name\n",
" X[\"categories_count\"] = categories_count\n",
" X[\"labels\"]=labels\n",
" return X"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def get_run_logdir(root_logdir):\n",
" import os\n",
" import time\n",
"\n",
" run_id = time.strftime(\"run_%Y_%m_%d-%H_%M_%S\")\n",
" return os.path.join(root_logdir, run_id)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def diagram_setup(model_name):\n",
" from tensorflow import keras\n",
" import os\n",
" \n",
" root_logdir = os.path.join(os.curdir, f\"logs\\\\fit\\\\{model_name}\\\\\")\n",
" \n",
" run_logdir = get_run_logdir(root_logdir)\n",
" tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"def prepare_data(path, img_size, test_size, val_size):\n",
" from sklearn.model_selection import train_test_split\n",
" from sklearn.preprocessing import LabelEncoder\n",
" import tensorflow as tf\n",
"\n",
" data = load_data(path, img_size)\n",
" values = data['values']\n",
" labels = data['labels']\n",
"\n",
" X_train, X_test, y_train, y_test = train_test_split(values, labels, test_size=test_size, random_state=42)\n",
" X_train, X_validate, y_train, y_validate = train_test_split(X_train, y_train, test_size=val_size, random_state=42)\n",
"\n",
" class_le = LabelEncoder()\n",
" y_train_enc = class_le.fit_transform(y_train)\n",
" y_validate_enc = class_le.fit_transform(y_validate)\n",
" y_test_enc = class_le.fit_transform(y_test)\n",
"\n",
" train_ds = tf.data.Dataset.from_tensor_slices((X_train, y_train_enc))\n",
" validation_ds = tf.data.Dataset.from_tensor_slices((X_validate, y_validate_enc))\n",
" test_ds = tf.data.Dataset.from_tensor_slices((X_test, y_test_enc))\n",
"\n",
" train_ds_size = tf.data.experimental.cardinality(train_ds).numpy()\n",
" test_ds_size = tf.data.experimental.cardinality(test_ds).numpy()\n",
" validation_ds_size = tf.data.experimental.cardinality(validation_ds).numpy()\n",
"\n",
" #Rozmiary zbiorów\n",
" print(\"Training:\", train_ds_size)\n",
" print(\"Test:\", test_ds_size)\n",
" print(\"Validation:\", validation_ds_size)\n",
"\n",
" # Mieszanie zriorów\n",
" train_ds = (train_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
" test_ds = (test_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
" validation_ds = (validation_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
"\n",
" return train_ds, test_ds, validation_ds\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# AlexNet"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"WARNING:absl:`lr` is deprecated, please use `learning_rate` instead, or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.SGD.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"sequential\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
" conv2d (Conv2D) (None, 55, 55, 96) 34944 \n",
" \n",
" max_pooling2d (MaxPooling2D (None, 27, 27, 96) 0 \n",
" ) \n",
" \n",
" conv2d_1 (Conv2D) (None, 27, 27, 256) 614656 \n",
" \n",
" max_pooling2d_1 (MaxPooling (None, 13, 13, 256) 0 \n",
" 2D) \n",
" \n",
" conv2d_2 (Conv2D) (None, 13, 13, 384) 885120 \n",
" \n",
" conv2d_3 (Conv2D) (None, 13, 13, 384) 1327488 \n",
" \n",
" conv2d_4 (Conv2D) (None, 13, 13, 256) 884992 \n",
" \n",
" max_pooling2d_2 (MaxPooling (None, 6, 6, 256) 0 \n",
" 2D) \n",
" \n",
" flatten (Flatten) (None, 9216) 0 \n",
" \n",
" dense (Dense) (None, 4096) 37752832 \n",
" \n",
" dense_1 (Dense) (None, 4096) 16781312 \n",
" \n",
" dense_2 (Dense) (None, 12) 49164 \n",
" \n",
"=================================================================\n",
"Total params: 58,330,508\n",
"Trainable params: 58,330,508\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
}
],
"source": [
"from tensorflow import keras\n",
"import tensorflow as tf\n",
"import os\n",
"import time\n",
"\n",
"model = keras.models.Sequential([\n",
" keras.layers.Conv2D(filters=96, kernel_size=(11,11), strides=(4,4), activation='relu', input_shape=(227,227,3)),\n",
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(5,5), strides=(1,1), activation='relu', padding=\"same\"),\n",
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=384, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
" keras.layers.Conv2D(filters=384, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
" keras.layers.Flatten(),\n",
" keras.layers.Dense(4096, activation='relu'),\n",
" keras.layers.Dense(4096, activation='relu'),\n",
" keras.layers.Dense(12, activation='softmax')\n",
"])\n",
"\n",
"model.compile(loss='sparse_categorical_crossentropy', optimizer=tf.optimizers.SGD(lr=.001), metrics=['accuracy'])\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Training: 2990\n",
"Test: 935\n",
"Validation: 748\n"
]
}
],
"source": [
"train_ds_a, test_ds_a, val_ds_a = prepare_data(\"./plantvillage/color\", (227, 227), 0.2, 0.2)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n",
"/var/folders/3r/c8tg1h051m18qhsdccdysrt40000gn/T/ipykernel_14470/2397086753.py:6: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" alex = model.fit_generator(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/25\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-09 18:33:27.636772: W tensorflow/tsl/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"93/93 [==============================] - ETA: 0s - loss: 1.5758 - accuracy: 0.3474\n",
"Epoch 1: val_accuracy improved from -inf to 0.38179, saving model to alex_2.h5\n",
"93/93 [==============================] - 95s 1s/step - loss: 1.5758 - accuracy: 0.3474 - val_loss: 1.4164 - val_accuracy: 0.3818\n",
"Epoch 2/25\n",
"93/93 [==============================] - ETA: 0s - loss: 1.4061 - accuracy: 0.3609\n",
"Epoch 2: val_accuracy did not improve from 0.38179\n",
"93/93 [==============================] - 100s 1s/step - loss: 1.4061 - accuracy: 0.3609 - val_loss: 1.4139 - val_accuracy: 0.3098\n",
"Epoch 3/25\n",
"93/93 [==============================] - ETA: 0s - loss: 1.3158 - accuracy: 0.3999\n",
"Epoch 3: val_accuracy improved from 0.38179 to 0.38995, saving model to alex_2.h5\n",
"93/93 [==============================] - 102s 1s/step - loss: 1.3158 - accuracy: 0.3999 - val_loss: 1.2847 - val_accuracy: 0.3899\n",
"Epoch 4/25\n",
"93/93 [==============================] - ETA: 0s - loss: 1.2229 - accuracy: 0.4792\n",
"Epoch 4: val_accuracy improved from 0.38995 to 0.57201, saving model to alex_2.h5\n",
"93/93 [==============================] - 102s 1s/step - loss: 1.2229 - accuracy: 0.4792 - val_loss: 1.1064 - val_accuracy: 0.5720\n",
"Epoch 5/25\n",
"93/93 [==============================] - ETA: 0s - loss: 1.0983 - accuracy: 0.5625\n",
"Epoch 5: val_accuracy improved from 0.57201 to 0.64946, saving model to alex_2.h5\n",
"93/93 [==============================] - 104s 1s/step - loss: 1.0983 - accuracy: 0.5625 - val_loss: 0.9796 - val_accuracy: 0.6495\n",
"Epoch 6/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.9776 - accuracy: 0.6253\n",
"Epoch 6: val_accuracy did not improve from 0.64946\n",
"93/93 [==============================] - 105s 1s/step - loss: 0.9776 - accuracy: 0.6253 - val_loss: 1.1308 - val_accuracy: 0.5476\n",
"Epoch 7/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.8467 - accuracy: 0.6969\n",
"Epoch 7: val_accuracy improved from 0.64946 to 0.67663, saving model to alex_2.h5\n",
"93/93 [==============================] - 105s 1s/step - loss: 0.8467 - accuracy: 0.6969 - val_loss: 0.9045 - val_accuracy: 0.6766\n",
"Epoch 8/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.7437 - accuracy: 0.7312\n",
"Epoch 8: val_accuracy improved from 0.67663 to 0.77853, saving model to alex_2.h5\n",
"93/93 [==============================] - 105s 1s/step - loss: 0.7437 - accuracy: 0.7312 - val_loss: 0.5997 - val_accuracy: 0.7785\n",
"Epoch 9/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.6769 - accuracy: 0.7638\n",
"Epoch 9: val_accuracy improved from 0.77853 to 0.80978, saving model to alex_2.h5\n",
"93/93 [==============================] - 105s 1s/step - loss: 0.6769 - accuracy: 0.7638 - val_loss: 0.5234 - val_accuracy: 0.8098\n",
"Epoch 10/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.5742 - accuracy: 0.7950\n",
"Epoch 10: val_accuracy did not improve from 0.80978\n",
"93/93 [==============================] - 106s 1s/step - loss: 0.5742 - accuracy: 0.7950 - val_loss: 1.3374 - val_accuracy: 0.5068\n",
"Epoch 11/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.5694 - accuracy: 0.8041\n",
"Epoch 11: val_accuracy improved from 0.80978 to 0.84375, saving model to alex_2.h5\n",
"93/93 [==============================] - 107s 1s/step - loss: 0.5694 - accuracy: 0.8041 - val_loss: 0.5118 - val_accuracy: 0.8438\n",
"Epoch 12/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.4730 - accuracy: 0.8347\n",
"Epoch 12: val_accuracy did not improve from 0.84375\n",
"93/93 [==============================] - 106s 1s/step - loss: 0.4730 - accuracy: 0.8347 - val_loss: 0.6001 - val_accuracy: 0.7826\n",
"Epoch 13/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.4713 - accuracy: 0.8364\n",
"Epoch 13: val_accuracy did not improve from 0.84375\n",
"93/93 [==============================] - 106s 1s/step - loss: 0.4713 - accuracy: 0.8364 - val_loss: 0.5150 - val_accuracy: 0.8125\n",
"Epoch 14/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.3892 - accuracy: 0.8646\n",
"Epoch 14: val_accuracy improved from 0.84375 to 0.86821, saving model to alex_2.h5\n",
"93/93 [==============================] - 110s 1s/step - loss: 0.3892 - accuracy: 0.8646 - val_loss: 0.3537 - val_accuracy: 0.8682\n",
"Epoch 15/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.3787 - accuracy: 0.8632\n",
"Epoch 15: val_accuracy did not improve from 0.86821\n",
"93/93 [==============================] - 109s 1s/step - loss: 0.3787 - accuracy: 0.8632 - val_loss: 0.5223 - val_accuracy: 0.7880\n",
"Epoch 16/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.3409 - accuracy: 0.8770\n",
"Epoch 16: val_accuracy did not improve from 0.86821\n",
"93/93 [==============================] - 110s 1s/step - loss: 0.3409 - accuracy: 0.8770 - val_loss: 0.3797 - val_accuracy: 0.8451\n",
"Epoch 17/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.4428 - accuracy: 0.8508\n",
"Epoch 17: val_accuracy did not improve from 0.86821\n",
"93/93 [==============================] - 108s 1s/step - loss: 0.4428 - accuracy: 0.8508 - val_loss: 0.9765 - val_accuracy: 0.6304\n",
"Epoch 18/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.3638 - accuracy: 0.8740\n",
"Epoch 18: val_accuracy improved from 0.86821 to 0.88451, saving model to alex_2.h5\n",
"93/93 [==============================] - 108s 1s/step - loss: 0.3638 - accuracy: 0.8740 - val_loss: 0.2889 - val_accuracy: 0.8845\n",
"Epoch 19/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.2869 - accuracy: 0.8942\n",
"Epoch 19: val_accuracy improved from 0.88451 to 0.89674, saving model to alex_2.h5\n",
"93/93 [==============================] - 109s 1s/step - loss: 0.2869 - accuracy: 0.8942 - val_loss: 0.2879 - val_accuracy: 0.8967\n",
"Epoch 20/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.2724 - accuracy: 0.9015\n",
"Epoch 20: val_accuracy improved from 0.89674 to 0.91168, saving model to alex_2.h5\n",
"93/93 [==============================] - 108s 1s/step - loss: 0.2724 - accuracy: 0.9015 - val_loss: 0.2781 - val_accuracy: 0.9117\n",
"Epoch 21/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.5926 - accuracy: 0.8021\n",
"Epoch 21: val_accuracy did not improve from 0.91168\n",
"93/93 [==============================] - 107s 1s/step - loss: 0.5926 - accuracy: 0.8021 - val_loss: 0.3587 - val_accuracy: 0.8709\n",
"Epoch 22/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.2875 - accuracy: 0.8978\n",
"Epoch 22: val_accuracy did not improve from 0.91168\n",
"93/93 [==============================] - 108s 1s/step - loss: 0.2875 - accuracy: 0.8978 - val_loss: 0.2895 - val_accuracy: 0.9035\n",
"Epoch 23/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.2233 - accuracy: 0.9267\n",
"Epoch 23: val_accuracy did not improve from 0.91168\n",
"93/93 [==============================] - 108s 1s/step - loss: 0.2233 - accuracy: 0.9267 - val_loss: 0.3617 - val_accuracy: 0.8723\n",
"Epoch 24/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.2837 - accuracy: 0.9005\n",
"Epoch 24: val_accuracy did not improve from 0.91168\n",
"93/93 [==============================] - 107s 1s/step - loss: 0.2837 - accuracy: 0.9005 - val_loss: 0.3122 - val_accuracy: 0.8981\n",
"Epoch 25/25\n",
"93/93 [==============================] - ETA: 0s - loss: 0.2049 - accuracy: 0.9368\n",
"Epoch 25: val_accuracy did not improve from 0.91168\n",
"93/93 [==============================] - 109s 1s/step - loss: 0.2049 - accuracy: 0.9368 - val_loss: 0.3776 - val_accuracy: 0.8750\n"
]
}
],
"source": [
"from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
"\n",
"checkpoint = ModelCheckpoint(\"alex_2.h5\", monitor='val_accuracy', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)\n",
"early = EarlyStopping(monitor='val_accuracy', min_delta=0, patience=20, verbose=1, mode='auto')\n",
"\n",
"alex = model.fit_generator(\n",
" steps_per_epoch=len(train_ds_a), \n",
" generator=train_ds_a, \n",
" validation_data= val_ds_a, \n",
" validation_steps=len(val_ds_a), \n",
" epochs=25, \n",
" callbacks=[checkpoint,early])"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjwAAAHHCAYAAAC7soLdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAADdgElEQVR4nOzdZ1RUVxeA4fdOo3eQotgbKvbeW2Ildo1GsZt80STGVKNGTdE0U9T0WGKMNYmmqLHF3hv2LoogIEXpbWbu9+MyAwgqIDgC51lrFjBzyx4sbM7ZZx9JlmUZQRAEQRCEUkxl6QAEQRAEQRCKm0h4BEEQBEEo9UTCIwiCIAhCqScSHkEQBEEQSj2R8AiCIAiCUOqJhEcQBEEQhFJPJDyCIAiCIJR6IuERBEEQBKHUEwmPIAiCIAilnkh4BEEoMpIkMWvWrAKfd/36dSRJYunSpUUekyAIAoiERxBKnaVLlyJJEpIksXfv3lyvy7KMr68vkiTRu3dvC0QoCILw+ImERxBKKWtra1asWJHr+V27dhEaGoqVlZUFohIEQbAMkfAIQinVs2dP1q5di16vz/H8ihUraNKkCV5eXhaKrOxISkqydAiCIGQSCY8glFJDhw4lJiaGrVu3mp9LT0/nt99+Y9iwYXmek5SUxGuvvYavry9WVlbUqlWLzz77DFmWcxyXlpbGq6++ioeHBw4ODjzzzDOEhobmec2wsDDGjBmDp6cnVlZW1K1bl8WLFxfqPcXGxvL666/j7++Pvb09jo6O9OjRg5MnT+Y6NjU1lVmzZlGzZk2sra3x9vamf//+XL161XyM0Wjkq6++wt/fH2trazw8POjevTtHjx4FHlxbdG+90qxZs5AkiXPnzjFs2DBcXFxo27YtAKdOnWLUqFFUrVoVa2trvLy8GDNmDDExMXl+v8aOHYuPjw9WVlZUqVKF//3vf6Snp3Pt2jUkSeKLL77Idd7+/fuRJImVK1cW9NsqCGWCxtIBCIJQPCpXrkyrVq1YuXIlPXr0AGDTpk3ExcXx7LPPMn/+/BzHy7LMM888w44dOxg7diwNGzZk8+bNvPHGG4SFheX4ITtu3DiWL1/OsGHDaN26Nf/99x+9evXKFUNkZCQtW7ZEkiQmTZqEh4cHmzZtYuzYscTHxzN58uQCvadr166xfv16Bg0aRJUqVYiMjOT777+nQ4cOnDt3Dh8fHwAMBgO9e/dm+/btPPvss7zyyiskJCSwdetWzpw5Q7Vq1QAYO3YsS5cupUePHowbNw69Xs+ePXs4ePAgTZs2LVBsJoMGDaJGjRrMmTPHnChu3bqVa9euMXr0aLy8vDh79iw//PADZ8+e5eDBg0iSBMCtW7do3rw5d+/eZcKECdSuXZuwsDB+++03kpOTqVq1Km3atOHXX3/l1VdfzXHfX3/9FQcHB/r06VOouAWh1JMFQShVlixZIgPykSNH5IULF8oODg5ycnKyLMuyPGjQILlTp06yLMtypUqV5F69epnPW79+vQzIH3zwQY7rDRw4UJYkSb5y5Yosy7IcFBQkA/KLL76Y47hhw4bJgDxz5kzzc2PHjpW9vb3l6OjoHMc+++yzspOTkzmu4OBgGZCXLFnywPeWmpoqGwyGHM8FBwfLVlZW8nvvvWd+bvHixTIgf/7557muYTQaZVmW5f/++08G5Jdffvm+xzwornvf68yZM2VAHjp0aK5jTe8zu5UrV8qAvHv3bvNzgYGBskqlko8cOXLfmL7//nsZkM+fP29+LT09XXZ3d5dHjhyZ6zxBEBRiSksQSrHBgweTkpLCP//8Q0JCAv/88899p7M2btyIWq3m5ZdfzvH8a6+9hizLbNq0yXwckOu4e0drZFnm999/JyAgAFmWiY6ONj+6detGXFwcx48fL9D7sbKyQqVS/tsyGAzExMRgb29PrVq1clzr999/x93dnZdeeinXNUyjKb///juSJDFz5sz7HlMYL7zwQq7nbGxszJ+npqYSHR1Ny5YtAcxxG41G1q9fT0BAQJ6jS6aYBg8ejLW1Nb/++qv5tc2bNxMdHc3w4cMLHbcglHYi4RGEUszDw4OuXbuyYsUK/vjjDwwGAwMHDszz2Bs3buDj44ODg0OO5/38/Myvmz6qVCrztJBJrVq1cnwdFRXF3bt3+eGHH/Dw8MjxGD16NAC3b98u0PsxGo188cUX1KhRAysrK9zd3fHw8ODUqVPExcWZj7t69Sq1atVCo7n/rP3Vq1fx8fHB1dW1QDE8TJUqVXI9FxsbyyuvvIKnpyc2NjZ4eHiYjzPFHRUVRXx8PPXq1Xvg9Z2dnQkICMixAu/XX3+lfPnydO7cuQjfiSCULqKGRxBKuWHDhjF+/HgiIiLo0aMHzs7Oj+W+RqMRgOHDhzNy5Mg8j6lfv36BrjlnzhxmzJjBmDFjeP/993F1dUWlUjF58mTz/YrS/UZ6DAbDfc/JPppjMnjwYPbv388bb7xBw4YNsbe3x2g00r1790LFHRgYyNq1a9m/fz/+/v789ddfvPjii+bRL0EQchMJjyCUcv369eP555/n4MGDrF69+r7HVapUiW3btpGQkJBjlOfChQvm100fjUajeRTF5OLFizmuZ1rBZTAY6Nq1a5G8l99++41OnTqxaNGiHM/fvXsXd3d389fVqlXj0KFDZGRkoNVq87xWtWrV2Lx5M7Gxsfcd5XFxcTFfPzvTaFd+3Llzh+3btzN79mzeffdd8/OXL1/OcZyHhweOjo6cOXPmodfs3r07Hh4e/Prrr7Ro0YLk5GRGjBiR75gEoSwSvw4IQilnb2/Pt99+y6xZswgICLjvcT179sRgMLBw4cIcz3/xxRdIkmRe6WX6eO8qry+//DLH12q1mgEDBvD777/n+UM8KiqqwO9FrVbnWiK/du1awsLCcjw3YMAAoqOjc70XwHz+gAEDkGWZ2bNn3/cYR0dH3N3d2b17d47Xv/nmmwLFnP2aJvd+v1QqFX379uXvv/82L4vPKyYAjUbD0KFDWbNmDUuXLsXf37/Ao2WCUNaIER5BKAPuN6WUXUBAAJ06dWLatGlcv36dBg0asGXLFv78808mT55srtlp2LAhQ4cO5ZtvviEuLo7WrVuzfft2rly5kuuaH330ETt27KBFixaMHz+eOnXqEBsby/Hjx9m2bRuxsbEFeh+9e/fmvffeY/To0bRu3ZrTp0/z66+/UrVq1RzHBQYGsmzZMqZMmcLhw4dp164dSUlJbNu2jRdffJE+ffrQqVMnRowYwfz587l8+bJ5emnPnj106tSJSZMmAcoS/I8++ohx48bRtGlTdu/ezaVLl/Ids6OjI+3bt+eTTz4hIyOD8uXLs2XLFoKDg3MdO2fOHLZs2UKHDh2YMGECfn5+hIeHs3btWvbu3ZtjOjIwMJD58+ezY8cOPv744wJ9HwWhTLLY+jBBEIpF9mXpD3LvsnRZluWEhAT51VdflX18fGStVivXqFFD/vTTT81Lok1SUlLkl19+WXZzc5Pt7OzkgIAA+ebNm7mWasuyLEdGRsoTJ06UfX19Za1WK3t5ecldunSRf/jhB/MxBVmW/tprr8ne3t6yjY2N3KZNG/nAgQNyhw4d5A4dOuQ4Njk5WZ42bZpcpUoV830HDhwoX7161XyMXq+XP/30U7l27dqyTqeTPTw85B49esjHjh3LcZ2xY8fKTk5OsoODgzx48GD59u3b912WHhUVlSvu0NBQuV+/frKzs7Ps5OQkDxo0SL5161ae368bN27IgYGBsoeHh2xlZSVXrVpVnjhxopyWlpbrunXr1pVVKpUcGhr6wO+bIAiyLMnyPeOsgiAIQonQqFEjXF1d2b59u6VDEYQnnqjhEQRBKIGOHj1KUFAQgYGBlg5FEEoEMcIjCIJQgpw5c4Zjx44xb948oqOjuXbtGtbW1pYOSxCeeGKERxAEoQT57bffGD16NBkZGaxcuVIkO4KQT2KERxAEQRCEUk+M8AiCIAiCUOqJhEcQBEEQhFKvzDUeNBqN3Lp1CwcHh0faEVkQBEEQhMdHlmUSEhLw8fEp1L5xZS7huXXrFr6+vpYOQxAEQRCEQrh58yY
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"plt.plot(alex.history[\"accuracy\"])\n",
"plt.plot(alex.history['val_accuracy'])\n",
"plt.plot(alex.history['loss'])\n",
"plt.plot(alex.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"29/29 [==============================] - 10s 306ms/step - loss: 0.3675 - accuracy: 0.8631\n"
]
},
{
"data": {
"text/plain": [
"[0.367510586977005, 0.8631465435028076]"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds_a)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# VGG16"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Training: 2990\n",
"Test: 935\n",
"Validation: 748\n"
]
}
],
"source": [
"train_ds_v, test_ds_v, val_ds_v = prepare_data('./plantvillage/color', (224, 224), 0.2, 0.2)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"model_1\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
" input_2 (InputLayer) [(None, 224, 224, 3)] 0 \n",
" \n",
" block1_conv1 (Conv2D) (None, 224, 224, 64) 1792 \n",
" \n",
" block1_conv2 (Conv2D) (None, 224, 224, 64) 36928 \n",
" \n",
" block1_pool (MaxPooling2D) (None, 112, 112, 64) 0 \n",
" \n",
" block2_conv1 (Conv2D) (None, 112, 112, 128) 73856 \n",
" \n",
" block2_conv2 (Conv2D) (None, 112, 112, 128) 147584 \n",
" \n",
" block2_pool (MaxPooling2D) (None, 56, 56, 128) 0 \n",
" \n",
" block3_conv1 (Conv2D) (None, 56, 56, 256) 295168 \n",
" \n",
" block3_conv2 (Conv2D) (None, 56, 56, 256) 590080 \n",
" \n",
" block3_conv3 (Conv2D) (None, 56, 56, 256) 590080 \n",
" \n",
" block3_pool (MaxPooling2D) (None, 28, 28, 256) 0 \n",
" \n",
" block4_conv1 (Conv2D) (None, 28, 28, 512) 1180160 \n",
" \n",
" block4_conv2 (Conv2D) (None, 28, 28, 512) 2359808 \n",
" \n",
" block4_conv3 (Conv2D) (None, 28, 28, 512) 2359808 \n",
" \n",
" block4_pool (MaxPooling2D) (None, 14, 14, 512) 0 \n",
" \n",
" block5_conv1 (Conv2D) (None, 14, 14, 512) 2359808 \n",
" \n",
" block5_conv2 (Conv2D) (None, 14, 14, 512) 2359808 \n",
" \n",
" block5_conv3 (Conv2D) (None, 14, 14, 512) 2359808 \n",
" \n",
" block5_pool (MaxPooling2D) (None, 7, 7, 512) 0 \n",
" \n",
" flatten_2 (Flatten) (None, 25088) 0 \n",
" \n",
" dense_4 (Dense) (None, 5) 125445 \n",
" \n",
"=================================================================\n",
"Total params: 14,840,133\n",
"Trainable params: 125,445\n",
"Non-trainable params: 14,714,688\n",
"_________________________________________________________________\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/var/folders/3r/c8tg1h051m18qhsdccdysrt40000gn/T/ipykernel_14470/2199093522.py:50: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" vggr = model.fit_generator(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/25\n",
"93/93 [==============================] - 389s 4s/step - loss: 0.3938 - accuracy: 0.8753 - val_loss: 0.1230 - val_accuracy: 0.9647\n",
"Epoch 2/25\n",
"93/93 [==============================] - 417s 4s/step - loss: 0.0512 - accuracy: 0.9909 - val_loss: 0.0867 - val_accuracy: 0.9715\n",
"Epoch 3/25\n",
"93/93 [==============================] - 424s 5s/step - loss: 0.0243 - accuracy: 0.9990 - val_loss: 0.0692 - val_accuracy: 0.9769\n",
"Epoch 4/25\n",
"93/93 [==============================] - 431s 5s/step - loss: 0.0148 - accuracy: 1.0000 - val_loss: 0.0614 - val_accuracy: 0.9769\n",
"Epoch 5/25\n",
"93/93 [==============================] - 439s 5s/step - loss: 0.0107 - accuracy: 1.0000 - val_loss: 0.0607 - val_accuracy: 0.9810\n",
"Epoch 6/25\n",
"93/93 [==============================] - 445s 5s/step - loss: 0.0073 - accuracy: 1.0000 - val_loss: 0.0670 - val_accuracy: 0.9755\n",
"Epoch 7/25\n",
"93/93 [==============================] - 448s 5s/step - loss: 0.0058 - accuracy: 1.0000 - val_loss: 0.0559 - val_accuracy: 0.9783\n",
"Epoch 8/25\n",
"93/93 [==============================] - 451s 5s/step - loss: 0.0046 - accuracy: 1.0000 - val_loss: 0.0530 - val_accuracy: 0.9796\n",
"Epoch 9/25\n",
"93/93 [==============================] - 482s 5s/step - loss: 0.0038 - accuracy: 1.0000 - val_loss: 0.0538 - val_accuracy: 0.9783\n",
"Epoch 10/25\n",
"93/93 [==============================] - 488s 5s/step - loss: 0.0032 - accuracy: 1.0000 - val_loss: 0.0494 - val_accuracy: 0.9810\n",
"Epoch 11/25\n",
"93/93 [==============================] - 494s 5s/step - loss: 0.0028 - accuracy: 1.0000 - val_loss: 0.0502 - val_accuracy: 0.9796\n",
"Epoch 12/25\n",
"93/93 [==============================] - 491s 5s/step - loss: 0.0024 - accuracy: 1.0000 - val_loss: 0.0503 - val_accuracy: 0.9837\n",
"Epoch 13/25\n",
"93/93 [==============================] - 494s 5s/step - loss: 0.0021 - accuracy: 1.0000 - val_loss: 0.0485 - val_accuracy: 0.9810\n",
"Epoch 14/25\n",
"93/93 [==============================] - 486s 5s/step - loss: 0.0019 - accuracy: 1.0000 - val_loss: 0.0448 - val_accuracy: 0.9851\n",
"Epoch 15/25\n",
"93/93 [==============================] - 485s 5s/step - loss: 0.0017 - accuracy: 1.0000 - val_loss: 0.0474 - val_accuracy: 0.9810\n",
"Epoch 16/25\n",
"93/93 [==============================] - 503s 5s/step - loss: 0.0015 - accuracy: 1.0000 - val_loss: 0.0430 - val_accuracy: 0.9823\n",
"Epoch 17/25\n",
"93/93 [==============================] - 472s 5s/step - loss: 0.0013 - accuracy: 1.0000 - val_loss: 0.0481 - val_accuracy: 0.9796\n",
"Epoch 18/25\n",
"93/93 [==============================] - 474s 5s/step - loss: 0.0012 - accuracy: 1.0000 - val_loss: 0.0503 - val_accuracy: 0.9783\n",
"Epoch 19/25\n",
"93/93 [==============================] - 9356s 102s/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 0.0496 - val_accuracy: 0.9783\n",
"Epoch 20/25\n",
"93/93 [==============================] - 10544s 115s/step - loss: 0.0010 - accuracy: 1.0000 - val_loss: 0.0466 - val_accuracy: 0.9837\n",
"Epoch 21/25\n",
"93/93 [==============================] - 10648s 116s/step - loss: 9.2169e-04 - accuracy: 1.0000 - val_loss: 0.0457 - val_accuracy: 0.9837\n",
"Epoch 22/25\n",
"93/93 [==============================] - 11629s 116s/step - loss: 8.5353e-04 - accuracy: 1.0000 - val_loss: 0.0462 - val_accuracy: 0.9837\n",
"Epoch 23/25\n",
"93/93 [==============================] - 4931s 54s/step - loss: 7.7390e-04 - accuracy: 1.0000 - val_loss: 0.0466 - val_accuracy: 0.9837\n",
"Epoch 24/25\n",
"93/93 [==============================] - 419s 5s/step - loss: 7.1216e-04 - accuracy: 1.0000 - val_loss: 0.0456 - val_accuracy: 0.9823\n",
"Epoch 25/25\n",
"93/93 [==============================] - 444s 5s/step - loss: 6.6600e-04 - accuracy: 1.0000 - val_loss: 0.0463 - val_accuracy: 0.9837\n"
]
}
],
"source": [
"import keras,os\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense, Conv2D, MaxPool2D , Flatten\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"import numpy as np\n",
"from keras.applications import VGG16\n",
"from keras.layers import Input, Lambda, Dense, Flatten\n",
"from keras.models import Model\n",
"from keras.preprocessing import image\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"from keras.models import Sequential\n",
"import numpy as np\n",
"from glob import glob\n",
"import matplotlib.pyplot as plt\n",
"import ssl\n",
"ssl._create_default_https_context = ssl._create_unverified_context\n",
"\n",
"IMAGE_SIZE = [224, 224]\n",
"\n",
"# add preprocessing layer to the front of resnet\n",
"vgg2 = VGG16(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)\n",
"\n",
"# don't train existing weights\n",
"for layer in vgg2.layers:\n",
" layer.trainable = False\n",
" \n",
" # useful for getting number of classes\n",
"classes = 5\n",
" \n",
"\n",
"# our layers - you can add more if you want\n",
"x = Flatten()(vgg2.output)\n",
"# x = Dense(1000, activation='relu')(x)\n",
"prediction = Dense(5, activation='softmax')(x)\n",
"\n",
"# create a model object\n",
"model = Model(inputs=vgg2.input, outputs=prediction)\n",
"\n",
"# view the structure of the model\n",
"model.summary()\n",
"# tell the model what cost and optimization method to use\n",
"model.compile(\n",
" loss='sparse_categorical_crossentropy',\n",
" optimizer='adam',\n",
" metrics=['accuracy']\n",
")\n",
"\n",
"#train_ds_vgg_sw, test_ds_vgg_sw, validation_ds_vgg_sw\n",
"# fit the model\n",
"vggr = model.fit_generator(\n",
" train_ds_v,\n",
" validation_data=val_ds_v,\n",
" epochs=25,\n",
" steps_per_epoch=len(train_ds_v),\n",
" validation_steps=len(val_ds_v))\n"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjwAAAHHCAYAAAC7soLdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABlsElEQVR4nO3dd3xT9f4/8NdJ2qQ7nXRA6WDP4mUJKEOqZfWyFESgjAIXBQTqQC5br6DiQAXl4mVc7o8lIogLhMoSEBC+ZcgQSqGMtlCge6RJPr8/0oSmA1poetr09fSRR5JzPuecdw6x55XPWZIQQoCIiIjIhinkLoCIiIjI2hh4iIiIyOYx8BAREZHNY+AhIiIim8fAQ0RERDaPgYeIiIhsHgMPERER2TwGHiIiIrJ5DDxERERk8xh4iKjSSJKE+fPnV3i6K1euQJIkrFmzptJrIiICGHiIbM6aNWsgSRIkScJvv/1WYrwQAoGBgZAkCf369ZOhQiKiqsfAQ2SjHBwcsH79+hLD9+3bh+vXr0OtVstQFRGRPBh4iGxUnz59sHnzZuh0Oovh69evR9u2beHn5ydTZbVHdna23CUQUSEGHiIbNWzYMNy5cwe7du0yD9Nqtfjmm2/w0ksvlTpNdnY2XnvtNQQGBkKtVqNJkyb48MMPIYSwaJefn4/p06fDx8cHrq6u+Pvf/47r16+XOs8bN25g7Nix8PX1hVqtRosWLbBq1apH+kx3797F66+/jlatWsHFxQVubm7o3bs3Tp48WaJtXl4e5s+fj8aNG8PBwQH+/v4YNGgQ4uPjzW0MBgM+/fRTtGrVCg4ODvDx8UGvXr3wxx9/AHjwsUXFj1eaP38+JEnC2bNn8dJLL8HDwwNPPfUUAODUqVMYPXo0QkND4eDgAD8/P4wdOxZ37twpdX1FR0cjICAAarUaISEhePnll6HVanH58mVIkoRPPvmkxHSHDh2CJEnYsGFDRVcrUa1gJ3cBRGQdwcHB6NSpEzZs2IDevXsDAH7++Wekp6fjxRdfxGeffWbRXgiBv//979izZw+io6PRpk0b7Ny5E2+88QZu3LhhsZEdN24c/t//+3946aWX0LlzZ/z666/o27dviRpSUlLw5JNPQpIkTJ48GT4+Pvj5558RHR2NjIwMTJs2rUKf6fLly9i2bRteeOEFhISEICUlBf/+97/RrVs3nD17FgEBAQAAvV6Pfv36ITY2Fi+++CKmTp2KzMxM7Nq1C2fOnEGDBg0AANHR0VizZg169+6NcePGQafT4cCBA/j999/Rrl27CtVm8sILL6BRo0ZYuHChOSju2rULly9fxpgxY+Dn54c///wTK1aswJ9//onff/8dkiQBAG7evIkOHTogLS0NEyZMQNOmTXHjxg188803yMnJQWhoKLp06YJ169Zh+vTpFstdt24dXF1d0b9//0eqm8jmCSKyKatXrxYAxLFjx8TSpUuFq6uryMnJEUII8cILL4gePXoIIYQICgoSffv2NU+3bds2AUD861//spjf888/LyRJEpcuXRJCCBEXFycAiFdeecWi3UsvvSQAiHnz5pmHRUdHC39/f5GammrR9sUXXxQajcZcV0JCggAgVq9e/cDPlpeXJ/R6vcWwhIQEoVarxdtvv20etmrVKgFAfPzxxyXmYTAYhBBC/PrrrwKAePXVV8ts86C6in/WefPmCQBi2LBhJdqaPmdRGzZsEADE/v37zcOioqKEQqEQx44dK7Omf//73wKAOHfunHmcVqsV3t7eYtSoUSWmIyIj7tIismFDhgxBbm4ufvjhB2RmZuKHH34oc3fWTz/9BKVSiVdffdVi+GuvvQYhBH7++WdzOwAl2hXvrRFCYMuWLYiMjIQQAqmpqeZHREQE0tPTceLEiQp9HrVaDYXC+GdLr9fjzp07cHFxQZMmTSzmtWXLFnh7e2PKlCkl5mHqTdmyZQskScK8efPKbPMoJk6cWGKYo6Oj+XVeXh5SU1Px5JNPAoC5boPBgG3btiEyMrLU3iVTTUOGDIGDgwPWrVtnHrdz506kpqZixIgRj1w3ka1j4CGyYT4+PggPD8f69evx7bffQq/X4/nnny+17dWrVxEQEABXV1eL4c2aNTOPNz0rFArzbiGTJk2aWLy/ffs20tLSsGLFCvj4+Fg8xowZAwC4detWhT6PwWDAJ598gkaNGkGtVsPb2xs+Pj44deoU0tPTze3i4+PRpEkT2NmVvdc+Pj4eAQEB8PT0rFANDxMSElJi2N27dzF16lT4+vrC0dERPj4+5namum/fvo2MjAy0bNnygfN3d3dHZGSkxRl469atQ926dfHMM89U4ichsi08hofIxr300ksYP348kpOT0bt3b7i7u1fJcg0GAwBgxIgRGDVqVKltWrduXaF5Lly4EHPmzMHYsWPxzjvvwNPTEwqFAtOmTTMvrzKV1dOj1+vLnKZob47JkCFDcOjQIbzxxhto06YNXFxcYDAY0KtXr0eqOyoqCps3b8ahQ4fQqlUrbN++Ha+88oq594uISmLgIbJxAwcOxD/+8Q/8/vvv2LRpU5ntgoKCsHv3bmRmZlr08pw/f9483vRsMBjMvSgmFy5csJif6QwuvV6P8PDwSvks33zzDXr06IGVK1daDE9LS4O3t7f5fYMGDXDkyBEUFBTA3t6+1Hk1aNAAO3fuxN27d8vs5fHw8DDPvyhTb1d53Lt3D7GxsViwYAHmzp1rHn7x4kWLdj4+PnBzc8OZM2ceOs9evXrBx8cH69atQ8eOHZGTk4ORI0eWuyai2og/B4hsnIuLC7788kvMnz8fkZGRZbbr06cP9Ho9li5dajH8k08+gSRJ5jO9TM/Fz/JasmSJxXulUonBgwdjy5YtpW7Eb9++XeHPolQqS5wiv3nzZty4ccNi2ODBg5GamlriswAwTz948GAIIbBgwYIy27i5ucHb2xv79++3GP/FF19UqOai8zQpvr4UCgUGDBiA77//3nxafGk1AYCdnR2GDRuGr7/+GmvWrEGrVq0q3FtGVNuwh4eoFihrl1JRkZGR6NGjB2bNmoUrV64gLCwMv/zyC7777jtMmzbNfMxOmzZtMGzYMHzxxRdIT09H586dERsbi0uXLpWY53vvvYc9e/agY8eOGD9+PJo3b467d+/ixIkT2L17N+7evVuhz9GvXz+8/fbbGDNmDDp37ozTp09j3bp1CA0NtWgXFRWFtWvXIiYmBkePHsXTTz+N7Oxs7N69G6+88gr69++PHj16YOTIkfjss89w8eJF8+6lAwcOoEePHpg8eTIA4yn47733HsaNG4d27dph//79+Ouvv8pds5ubG7p27YoPPvgABQUFqFu3Ln755RckJCSUaLtw4UL88ssv6NatGyZMmIBmzZohKSkJmzdvxm+//WaxOzIqKgqfffYZ9uzZg/fff79C65GoVpLt/DAisoqip6U/SPHT0oUQIjMzU0yfPl0EBAQIe3t70ahRI7F48WLzKdEmubm54tVXXxVeXl7C2dlZREZGimvXrpU4VVsIIVJSUsSkSZNEYGCgsLe3F35+fqJnz55ixYoV5jYVOS39tddeE/7+/sLR0VF06dJFHD58WHTr1k1069bNom1OTo6YNWuWCAkJMS/3+eefF/Hx8eY2Op1OLF68WDRt2lSoVCrh4+MjevfuLY4fP24xn+joaKHRaISrq6sYMmSIuHXrVpmnpd++fbtE3devXxcDBw4U7u7uQqPRiBdeeEHcvHmz1PV19epVERUVJXx8fIRarRahoaFi0qRJIj8/v8R8W7RoIRQKhbh+/foD1xsRCSEJUayflYiIaoQnnngCnp6eiI2NlbsUomqPx/AQEdVAf/zxB+Li4hAVFSV3KUQ1Ant4iIhqkDNnzuD48eP46KOPkJqaisuXL8PBwUHusoiqPfbwEBHVIN988w3GjBmDgoICbNiwgWGHqJzYw0NEREQ2jz08REREZPMYeIiIiMjm1boLDxoMBty8eROurq6PdUdkIiIiqjpCCGRmZiIgIOCR7htX6wLPzZs3ERgYKHcZRERE9AiuXbuGevXqVXi6Whd4TDdFvHbtGtz
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"plt.plot(vggr.history[\"accuracy\"])\n",
"plt.plot(vggr.history['val_accuracy'])\n",
"plt.plot(vggr.history['loss'])\n",
"plt.plot(vggr.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"29/29 [==============================] - 112s 4s/step - loss: 0.0430 - accuracy: 0.9860\n"
]
},
{
"data": {
"text/plain": [
"[0.043045952916145325, 0.985991358757019]"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds_v)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# ResNet101V2"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"from keras.layers import Input, Lambda, Dense, Flatten\n",
"from keras.models import Model\n",
"from keras.preprocessing import image\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"from keras.models import Sequential\n",
"import numpy as np\n",
"from glob import glob\n",
"import matplotlib.pyplot as plt\n",
"import ssl\n",
"ssl._create_default_https_context = ssl._create_unverified_context\n",
"from keras.applications import ResNet101V2\n",
"\n",
"# re-size all the images to this\n",
"IMAGE_SIZE = [224, 224]\n",
"\n",
"# add preprocessing layer to the front of resnet\n",
"resnet = ResNet101V2(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)\n",
"\n",
"# don't train existing weights\n",
"for layer in resnet.layers:\n",
" layer.trainable = False\n",
" \n",
" # useful for getting number of classes\n",
"classes = 5\n",
" \n",
"\n",
"# our layers - you can add more if you want\n",
"x = Flatten()(resnet.output)\n",
"# x = Dense(1000, activation='relu')(x)\n",
"prediction = Dense(5, activation='softmax')(x)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"model_2\"\n",
"__________________________________________________________________________________________________\n",
" Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
" input_3 (InputLayer) [(None, 224, 224, 3 0 [] \n",
" )] \n",
" \n",
" conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 ['input_3[0][0]'] \n",
" \n",
" conv1_conv (Conv2D) (None, 112, 112, 64 9472 ['conv1_pad[0][0]'] \n",
" ) \n",
" \n",
" pool1_pad (ZeroPadding2D) (None, 114, 114, 64 0 ['conv1_conv[0][0]'] \n",
" ) \n",
" \n",
" pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 ['pool1_pad[0][0]'] \n",
" \n",
" conv2_block1_preact_bn (BatchN (None, 56, 56, 64) 256 ['pool1_pool[0][0]'] \n",
" ormalization) \n",
" \n",
" conv2_block1_preact_relu (Acti (None, 56, 56, 64) 0 ['conv2_block1_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4096 ['conv2_block1_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv2_block1_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block1_2_pad (ZeroPaddin (None, 58, 58, 64) 0 ['conv2_block1_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36864 ['conv2_block1_2_pad[0][0]'] \n",
" \n",
" conv2_block1_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block1_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block1_2_relu[0][0]'] \n",
" \n",
" conv2_block1_out (Add) (None, 56, 56, 256) 0 ['conv2_block1_0_conv[0][0]', \n",
" 'conv2_block1_3_conv[0][0]'] \n",
" \n",
" conv2_block2_preact_bn (BatchN (None, 56, 56, 256) 1024 ['conv2_block1_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv2_block2_preact_relu (Acti (None, 56, 56, 256) 0 ['conv2_block2_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16384 ['conv2_block2_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv2_block2_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_2_pad (ZeroPaddin (None, 58, 58, 64) 0 ['conv2_block2_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36864 ['conv2_block2_2_pad[0][0]'] \n",
" \n",
" conv2_block2_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block2_2_relu[0][0]'] \n",
" \n",
" conv2_block2_out (Add) (None, 56, 56, 256) 0 ['conv2_block1_out[0][0]', \n",
" 'conv2_block2_3_conv[0][0]'] \n",
" \n",
" conv2_block3_preact_bn (BatchN (None, 56, 56, 256) 1024 ['conv2_block2_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv2_block3_preact_relu (Acti (None, 56, 56, 256) 0 ['conv2_block3_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16384 ['conv2_block3_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv2_block3_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block3_2_pad (ZeroPaddin (None, 58, 58, 64) 0 ['conv2_block3_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv2_block3_2_conv (Conv2D) (None, 28, 28, 64) 36864 ['conv2_block3_2_pad[0][0]'] \n",
" \n",
" conv2_block3_2_bn (BatchNormal (None, 28, 28, 64) 256 ['conv2_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_2_relu (Activatio (None, 28, 28, 64) 0 ['conv2_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" max_pooling2d_3 (MaxPooling2D) (None, 28, 28, 256) 0 ['conv2_block2_out[0][0]'] \n",
" \n",
" conv2_block3_3_conv (Conv2D) (None, 28, 28, 256) 16640 ['conv2_block3_2_relu[0][0]'] \n",
" \n",
" conv2_block3_out (Add) (None, 28, 28, 256) 0 ['max_pooling2d_3[0][0]', \n",
" 'conv2_block3_3_conv[0][0]'] \n",
" \n",
" conv3_block1_preact_bn (BatchN (None, 28, 28, 256) 1024 ['conv2_block3_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv3_block1_preact_relu (Acti (None, 28, 28, 256) 0 ['conv3_block1_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32768 ['conv3_block1_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv3_block1_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block1_2_pad (ZeroPaddin (None, 30, 30, 128) 0 ['conv3_block1_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147456 ['conv3_block1_2_pad[0][0]'] \n",
" \n",
" conv3_block1_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 ['conv3_block1_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block1_2_relu[0][0]'] \n",
" \n",
" conv3_block1_out (Add) (None, 28, 28, 512) 0 ['conv3_block1_0_conv[0][0]', \n",
" 'conv3_block1_3_conv[0][0]'] \n",
" \n",
" conv3_block2_preact_bn (BatchN (None, 28, 28, 512) 2048 ['conv3_block1_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv3_block2_preact_relu (Acti (None, 28, 28, 512) 0 ['conv3_block2_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65536 ['conv3_block2_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv3_block2_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_2_pad (ZeroPaddin (None, 30, 30, 128) 0 ['conv3_block2_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147456 ['conv3_block2_2_pad[0][0]'] \n",
" \n",
" conv3_block2_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block2_2_relu[0][0]'] \n",
" \n",
" conv3_block2_out (Add) (None, 28, 28, 512) 0 ['conv3_block1_out[0][0]', \n",
" 'conv3_block2_3_conv[0][0]'] \n",
" \n",
" conv3_block3_preact_bn (BatchN (None, 28, 28, 512) 2048 ['conv3_block2_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv3_block3_preact_relu (Acti (None, 28, 28, 512) 0 ['conv3_block3_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65536 ['conv3_block3_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv3_block3_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_2_pad (ZeroPaddin (None, 30, 30, 128) 0 ['conv3_block3_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147456 ['conv3_block3_2_pad[0][0]'] \n",
" \n",
" conv3_block3_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block3_2_relu[0][0]'] \n",
" \n",
" conv3_block3_out (Add) (None, 28, 28, 512) 0 ['conv3_block2_out[0][0]', \n",
" 'conv3_block3_3_conv[0][0]'] \n",
" \n",
" conv3_block4_preact_bn (BatchN (None, 28, 28, 512) 2048 ['conv3_block3_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv3_block4_preact_relu (Acti (None, 28, 28, 512) 0 ['conv3_block4_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65536 ['conv3_block4_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv3_block4_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block4_2_pad (ZeroPaddin (None, 30, 30, 128) 0 ['conv3_block4_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv3_block4_2_conv (Conv2D) (None, 14, 14, 128) 147456 ['conv3_block4_2_pad[0][0]'] \n",
" \n",
" conv3_block4_2_bn (BatchNormal (None, 14, 14, 128) 512 ['conv3_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_2_relu (Activatio (None, 14, 14, 128) 0 ['conv3_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" max_pooling2d_4 (MaxPooling2D) (None, 14, 14, 512) 0 ['conv3_block3_out[0][0]'] \n",
" \n",
" conv3_block4_3_conv (Conv2D) (None, 14, 14, 512) 66048 ['conv3_block4_2_relu[0][0]'] \n",
" \n",
" conv3_block4_out (Add) (None, 14, 14, 512) 0 ['max_pooling2d_4[0][0]', \n",
" 'conv3_block4_3_conv[0][0]'] \n",
" \n",
" conv4_block1_preact_bn (BatchN (None, 14, 14, 512) 2048 ['conv3_block4_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv4_block1_preact_relu (Acti (None, 14, 14, 512) 0 ['conv4_block1_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131072 ['conv4_block1_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block1_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block1_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block1_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block1_2_pad[0][0]'] \n",
" \n",
" conv4_block1_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024 525312 ['conv4_block1_preact_relu[0][0]'\n",
" ) ] \n",
" \n",
" conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block1_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block1_out (Add) (None, 14, 14, 1024 0 ['conv4_block1_0_conv[0][0]', \n",
" ) 'conv4_block1_3_conv[0][0]'] \n",
" \n",
" conv4_block2_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block1_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block2_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block2_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block2_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block2_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block2_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block2_2_pad[0][0]'] \n",
" \n",
" conv4_block2_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block2_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block2_out (Add) (None, 14, 14, 1024 0 ['conv4_block1_out[0][0]', \n",
" ) 'conv4_block2_3_conv[0][0]'] \n",
" \n",
" conv4_block3_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block2_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block3_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block3_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block3_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block3_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block3_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block3_2_pad[0][0]'] \n",
" \n",
" conv4_block3_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block3_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block3_out (Add) (None, 14, 14, 1024 0 ['conv4_block2_out[0][0]', \n",
" ) 'conv4_block3_3_conv[0][0]'] \n",
" \n",
" conv4_block4_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block3_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block4_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block4_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block4_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block4_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block4_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block4_2_pad[0][0]'] \n",
" \n",
" conv4_block4_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block4_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block4_out (Add) (None, 14, 14, 1024 0 ['conv4_block3_out[0][0]', \n",
" ) 'conv4_block4_3_conv[0][0]'] \n",
" \n",
" conv4_block5_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block4_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block5_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block5_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block5_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block5_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block5_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block5_2_pad[0][0]'] \n",
" \n",
" conv4_block5_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block5_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block5_out (Add) (None, 14, 14, 1024 0 ['conv4_block4_out[0][0]', \n",
" ) 'conv4_block5_3_conv[0][0]'] \n",
" \n",
" conv4_block6_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block5_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block6_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block6_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block6_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block6_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block6_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block6_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block6_2_pad[0][0]'] \n",
" \n",
" conv4_block6_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block6_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block6_out (Add) (None, 14, 14, 1024 0 ['conv4_block5_out[0][0]', \n",
" ) 'conv4_block6_3_conv[0][0]'] \n",
" \n",
" conv4_block7_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block6_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block7_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block7_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block7_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block7_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block7_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block7_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block7_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block7_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block7_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block7_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block7_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block7_2_pad[0][0]'] \n",
" \n",
" conv4_block7_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block7_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block7_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block7_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block7_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block7_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block7_out (Add) (None, 14, 14, 1024 0 ['conv4_block6_out[0][0]', \n",
" ) 'conv4_block7_3_conv[0][0]'] \n",
" \n",
" conv4_block8_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block7_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block8_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block8_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block8_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block8_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block8_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block8_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block8_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block8_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block8_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block8_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block8_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block8_2_pad[0][0]'] \n",
" \n",
" conv4_block8_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block8_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block8_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block8_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block8_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block8_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block8_out (Add) (None, 14, 14, 1024 0 ['conv4_block7_out[0][0]', \n",
" ) 'conv4_block8_3_conv[0][0]'] \n",
" \n",
" conv4_block9_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block8_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block9_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block9_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block9_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block9_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block9_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block9_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block9_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block9_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block9_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block9_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block9_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block9_2_pad[0][0]'] \n",
" \n",
" conv4_block9_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block9_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block9_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block9_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block9_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block9_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block9_out (Add) (None, 14, 14, 1024 0 ['conv4_block8_out[0][0]', \n",
" ) 'conv4_block9_3_conv[0][0]'] \n",
" \n",
" conv4_block10_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block9_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block10_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block10_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block10_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block10_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block10_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block10_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block10_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block10_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block10_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block10_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block10_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block10_2_pad[0][0]'] \n",
" \n",
" conv4_block10_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block10_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block10_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block10_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block10_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block10_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block10_out (Add) (None, 14, 14, 1024 0 ['conv4_block9_out[0][0]', \n",
" ) 'conv4_block10_3_conv[0][0]'] \n",
" \n",
" conv4_block11_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block10_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block11_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block11_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block11_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block11_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block11_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block11_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block11_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block11_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block11_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block11_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block11_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block11_2_pad[0][0]'] \n",
" \n",
" conv4_block11_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block11_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block11_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block11_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block11_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block11_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block11_out (Add) (None, 14, 14, 1024 0 ['conv4_block10_out[0][0]', \n",
" ) 'conv4_block11_3_conv[0][0]'] \n",
" \n",
" conv4_block12_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block11_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block12_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block12_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block12_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block12_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block12_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block12_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block12_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block12_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block12_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block12_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block12_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block12_2_pad[0][0]'] \n",
" \n",
" conv4_block12_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block12_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block12_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block12_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block12_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block12_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block12_out (Add) (None, 14, 14, 1024 0 ['conv4_block11_out[0][0]', \n",
" ) 'conv4_block12_3_conv[0][0]'] \n",
" \n",
" conv4_block13_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block12_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block13_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block13_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block13_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block13_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block13_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block13_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block13_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block13_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block13_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block13_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block13_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block13_2_pad[0][0]'] \n",
" \n",
" conv4_block13_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block13_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block13_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block13_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block13_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block13_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block13_out (Add) (None, 14, 14, 1024 0 ['conv4_block12_out[0][0]', \n",
" ) 'conv4_block13_3_conv[0][0]'] \n",
" \n",
" conv4_block14_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block13_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block14_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block14_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block14_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block14_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block14_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block14_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block14_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block14_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block14_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block14_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block14_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block14_2_pad[0][0]'] \n",
" \n",
" conv4_block14_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block14_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block14_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block14_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block14_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block14_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block14_out (Add) (None, 14, 14, 1024 0 ['conv4_block13_out[0][0]', \n",
" ) 'conv4_block14_3_conv[0][0]'] \n",
" \n",
" conv4_block15_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block14_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block15_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block15_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block15_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block15_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block15_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block15_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block15_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block15_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block15_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block15_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block15_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block15_2_pad[0][0]'] \n",
" \n",
" conv4_block15_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block15_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block15_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block15_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block15_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block15_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block15_out (Add) (None, 14, 14, 1024 0 ['conv4_block14_out[0][0]', \n",
" ) 'conv4_block15_3_conv[0][0]'] \n",
" \n",
" conv4_block16_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block15_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block16_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block16_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block16_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block16_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block16_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block16_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block16_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block16_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block16_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block16_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block16_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block16_2_pad[0][0]'] \n",
" \n",
" conv4_block16_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block16_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block16_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block16_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block16_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block16_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block16_out (Add) (None, 14, 14, 1024 0 ['conv4_block15_out[0][0]', \n",
" ) 'conv4_block16_3_conv[0][0]'] \n",
" \n",
" conv4_block17_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block16_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block17_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block17_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block17_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block17_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block17_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block17_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block17_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block17_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block17_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block17_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block17_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block17_2_pad[0][0]'] \n",
" \n",
" conv4_block17_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block17_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block17_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block17_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block17_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block17_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block17_out (Add) (None, 14, 14, 1024 0 ['conv4_block16_out[0][0]', \n",
" ) 'conv4_block17_3_conv[0][0]'] \n",
" \n",
" conv4_block18_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block17_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block18_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block18_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block18_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block18_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block18_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block18_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block18_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block18_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block18_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block18_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block18_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block18_2_pad[0][0]'] \n",
" \n",
" conv4_block18_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block18_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block18_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block18_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block18_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block18_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block18_out (Add) (None, 14, 14, 1024 0 ['conv4_block17_out[0][0]', \n",
" ) 'conv4_block18_3_conv[0][0]'] \n",
" \n",
" conv4_block19_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block18_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block19_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block19_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block19_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block19_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block19_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block19_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block19_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block19_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block19_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block19_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block19_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block19_2_pad[0][0]'] \n",
" \n",
" conv4_block19_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block19_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block19_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block19_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block19_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block19_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block19_out (Add) (None, 14, 14, 1024 0 ['conv4_block18_out[0][0]', \n",
" ) 'conv4_block19_3_conv[0][0]'] \n",
" \n",
" conv4_block20_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block19_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block20_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block20_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block20_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block20_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block20_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block20_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block20_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block20_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block20_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block20_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block20_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block20_2_pad[0][0]'] \n",
" \n",
" conv4_block20_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block20_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block20_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block20_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block20_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block20_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block20_out (Add) (None, 14, 14, 1024 0 ['conv4_block19_out[0][0]', \n",
" ) 'conv4_block20_3_conv[0][0]'] \n",
" \n",
" conv4_block21_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block20_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block21_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block21_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block21_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block21_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block21_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block21_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block21_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block21_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block21_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block21_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block21_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block21_2_pad[0][0]'] \n",
" \n",
" conv4_block21_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block21_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block21_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block21_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block21_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block21_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block21_out (Add) (None, 14, 14, 1024 0 ['conv4_block20_out[0][0]', \n",
" ) 'conv4_block21_3_conv[0][0]'] \n",
" \n",
" conv4_block22_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block21_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block22_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block22_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block22_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block22_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block22_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block22_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block22_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block22_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block22_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block22_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block22_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block22_2_pad[0][0]'] \n",
" \n",
" conv4_block22_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block22_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block22_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block22_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block22_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block22_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block22_out (Add) (None, 14, 14, 1024 0 ['conv4_block21_out[0][0]', \n",
" ) 'conv4_block22_3_conv[0][0]'] \n",
" \n",
" conv4_block23_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block22_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block23_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block23_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block23_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block23_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block23_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block23_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block23_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block23_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block23_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block23_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block23_2_conv (Conv2D) (None, 7, 7, 256) 589824 ['conv4_block23_2_pad[0][0]'] \n",
" \n",
" conv4_block23_2_bn (BatchNorma (None, 7, 7, 256) 1024 ['conv4_block23_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block23_2_relu (Activati (None, 7, 7, 256) 0 ['conv4_block23_2_bn[0][0]'] \n",
" on) \n",
" \n",
" max_pooling2d_5 (MaxPooling2D) (None, 7, 7, 1024) 0 ['conv4_block22_out[0][0]'] \n",
" \n",
" conv4_block23_3_conv (Conv2D) (None, 7, 7, 1024) 263168 ['conv4_block23_2_relu[0][0]'] \n",
" \n",
" conv4_block23_out (Add) (None, 7, 7, 1024) 0 ['max_pooling2d_5[0][0]', \n",
" 'conv4_block23_3_conv[0][0]'] \n",
" \n",
" conv5_block1_preact_bn (BatchN (None, 7, 7, 1024) 4096 ['conv4_block23_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv5_block1_preact_relu (Acti (None, 7, 7, 1024) 0 ['conv5_block1_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524288 ['conv5_block1_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv5_block1_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block1_2_pad (ZeroPaddin (None, 9, 9, 512) 0 ['conv5_block1_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359296 ['conv5_block1_2_pad[0][0]'] \n",
" \n",
" conv5_block1_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 ['conv5_block1_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block1_2_relu[0][0]'] \n",
" \n",
" conv5_block1_out (Add) (None, 7, 7, 2048) 0 ['conv5_block1_0_conv[0][0]', \n",
" 'conv5_block1_3_conv[0][0]'] \n",
" \n",
" conv5_block2_preact_bn (BatchN (None, 7, 7, 2048) 8192 ['conv5_block1_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv5_block2_preact_relu (Acti (None, 7, 7, 2048) 0 ['conv5_block2_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1048576 ['conv5_block2_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv5_block2_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_2_pad (ZeroPaddin (None, 9, 9, 512) 0 ['conv5_block2_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359296 ['conv5_block2_2_pad[0][0]'] \n",
" \n",
" conv5_block2_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block2_2_relu[0][0]'] \n",
" \n",
" conv5_block2_out (Add) (None, 7, 7, 2048) 0 ['conv5_block1_out[0][0]', \n",
" 'conv5_block2_3_conv[0][0]'] \n",
" \n",
" conv5_block3_preact_bn (BatchN (None, 7, 7, 2048) 8192 ['conv5_block2_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv5_block3_preact_relu (Acti (None, 7, 7, 2048) 0 ['conv5_block3_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1048576 ['conv5_block3_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv5_block3_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_2_pad (ZeroPaddin (None, 9, 9, 512) 0 ['conv5_block3_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359296 ['conv5_block3_2_pad[0][0]'] \n",
" \n",
" conv5_block3_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block3_2_relu[0][0]'] \n",
" \n",
" conv5_block3_out (Add) (None, 7, 7, 2048) 0 ['conv5_block2_out[0][0]', \n",
" 'conv5_block3_3_conv[0][0]'] \n",
" \n",
" post_bn (BatchNormalization) (None, 7, 7, 2048) 8192 ['conv5_block3_out[0][0]'] \n",
" \n",
" post_relu (Activation) (None, 7, 7, 2048) 0 ['post_bn[0][0]'] \n",
" \n",
" flatten_3 (Flatten) (None, 100352) 0 ['post_relu[0][0]'] \n",
" \n",
" dense_5 (Dense) (None, 5) 501765 ['flatten_3[0][0]'] \n",
" \n",
"==================================================================================================\n",
"Total params: 43,128,325\n",
"Trainable params: 501,765\n",
"Non-trainable params: 42,626,560\n",
"__________________________________________________________________________________________________\n"
]
}
],
"source": [
"# create a model object\n",
"model = Model(inputs=resnet.input, outputs=prediction)\n",
"\n",
"# view the structure of the model\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/25\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/var/folders/3r/c8tg1h051m18qhsdccdysrt40000gn/T/ipykernel_14470/2541214992.py:10: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" r = model.fit_generator(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"93/93 [==============================] - 197s 2s/step - loss: 1.1542 - accuracy: 0.8938 - val_loss: 0.2841 - val_accuracy: 0.9742\n",
"Epoch 2/25\n",
"93/93 [==============================] - 202s 2s/step - loss: 0.1366 - accuracy: 0.9819 - val_loss: 0.5596 - val_accuracy: 0.9524\n",
"Epoch 3/25\n",
"93/93 [==============================] - 208s 2s/step - loss: 0.0817 - accuracy: 0.9913 - val_loss: 0.7281 - val_accuracy: 0.9416\n",
"Epoch 4/25\n",
"93/93 [==============================] - 213s 2s/step - loss: 0.0254 - accuracy: 0.9953 - val_loss: 0.2856 - val_accuracy: 0.9769\n",
"Epoch 5/25\n",
"93/93 [==============================] - 216s 2s/step - loss: 0.0513 - accuracy: 0.9916 - val_loss: 0.7943 - val_accuracy: 0.9511\n",
"Epoch 6/25\n",
"93/93 [==============================] - 219s 2s/step - loss: 0.0716 - accuracy: 0.9919 - val_loss: 0.4567 - val_accuracy: 0.9715\n",
"Epoch 7/25\n",
"93/93 [==============================] - 221s 2s/step - loss: 0.0669 - accuracy: 0.9916 - val_loss: 0.5951 - val_accuracy: 0.9688\n",
"Epoch 8/25\n",
"93/93 [==============================] - 222s 2s/step - loss: 0.0294 - accuracy: 0.9966 - val_loss: 0.3915 - val_accuracy: 0.9769\n",
"Epoch 9/25\n",
"93/93 [==============================] - 223s 2s/step - loss: 0.0047 - accuracy: 0.9990 - val_loss: 0.5019 - val_accuracy: 0.9688\n",
"Epoch 10/25\n",
"93/93 [==============================] - 224s 2s/step - loss: 0.0159 - accuracy: 0.9976 - val_loss: 0.5905 - val_accuracy: 0.9715\n",
"Epoch 11/25\n",
"93/93 [==============================] - 225s 2s/step - loss: 0.0134 - accuracy: 0.9976 - val_loss: 0.3234 - val_accuracy: 0.9810\n",
"Epoch 12/25\n",
"93/93 [==============================] - 227s 2s/step - loss: 0.1011 - accuracy: 0.9899 - val_loss: 0.5499 - val_accuracy: 0.9728\n",
"Epoch 13/25\n",
"93/93 [==============================] - 225s 2s/step - loss: 0.0076 - accuracy: 0.9983 - val_loss: 0.4216 - val_accuracy: 0.9728\n",
"Epoch 14/25\n",
"93/93 [==============================] - 226s 2s/step - loss: 0.0643 - accuracy: 0.9926 - val_loss: 0.8745 - val_accuracy: 0.9511\n",
"Epoch 15/25\n",
"93/93 [==============================] - 226s 2s/step - loss: 0.0199 - accuracy: 0.9966 - val_loss: 0.4947 - val_accuracy: 0.9715\n",
"Epoch 16/25\n",
"93/93 [==============================] - 226s 2s/step - loss: 5.7203e-04 - accuracy: 0.9997 - val_loss: 0.4923 - val_accuracy: 0.9810\n",
"Epoch 17/25\n",
"93/93 [==============================] - 227s 2s/step - loss: 0.0131 - accuracy: 0.9970 - val_loss: 0.6881 - val_accuracy: 0.9647\n",
"Epoch 18/25\n",
"93/93 [==============================] - 225s 2s/step - loss: 0.0345 - accuracy: 0.9960 - val_loss: 0.4938 - val_accuracy: 0.9823\n",
"Epoch 19/25\n",
"93/93 [==============================] - 228s 2s/step - loss: 0.0126 - accuracy: 0.9987 - val_loss: 0.5642 - val_accuracy: 0.9688\n",
"Epoch 20/25\n",
"93/93 [==============================] - 226s 2s/step - loss: 0.0056 - accuracy: 0.9997 - val_loss: 0.4294 - val_accuracy: 0.9783\n",
"Epoch 21/25\n",
"93/93 [==============================] - 225s 2s/step - loss: 2.7678e-05 - accuracy: 1.0000 - val_loss: 0.4342 - val_accuracy: 0.9783\n",
"Epoch 22/25\n",
"93/93 [==============================] - 226s 2s/step - loss: 5.2474e-07 - accuracy: 1.0000 - val_loss: 0.4337 - val_accuracy: 0.9783\n",
"Epoch 23/25\n",
"93/93 [==============================] - 227s 2s/step - loss: 4.2286e-07 - accuracy: 1.0000 - val_loss: 0.4334 - val_accuracy: 0.9783\n",
"Epoch 24/25\n",
"93/93 [==============================] - 227s 2s/step - loss: 3.5546e-07 - accuracy: 1.0000 - val_loss: 0.4332 - val_accuracy: 0.9783\n",
"Epoch 25/25\n",
"93/93 [==============================] - 227s 2s/step - loss: 3.0831e-07 - accuracy: 1.0000 - val_loss: 0.4024 - val_accuracy: 0.9796\n"
]
}
],
"source": [
"# tell the model what cost and optimization method to use\n",
"model.compile(\n",
" loss='sparse_categorical_crossentropy',\n",
" optimizer='adam',\n",
" metrics=['accuracy']\n",
")\n",
"\n",
"#train_ds_vgg_sw, test_ds_vgg_sw, validation_ds_vgg_sw\n",
"# fit the model\n",
"r = model.fit_generator(\n",
" train_ds_v,\n",
" validation_data=val_ds_v,\n",
" epochs=25,\n",
" steps_per_epoch=len(train_ds_v),\n",
" validation_steps=len(val_ds_v)\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAGfCAYAAAB1KinVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABwTklEQVR4nO3deXxTVfo/8E+SNum+0R1aWva9IAVEwGWoIiruisvI4vL96YCjMm44CjrOyIwr6qDOuDsjgjiCGy6IooBsgmUtWym0QFeg+5I2ub8/Tm+Sli5ZbnJv2s/79eort8lNckhL8+Q5z3mOTpIkCUREREQaold7AEREREStMUAhIiIizWGAQkRERJrDAIWIiIg0hwEKERERaQ4DFCIiItIcBihERESkOQxQiIiISHMYoBAREZHmMEAhIiIizQlw9Q4///wznnvuOWzfvh2FhYVYuXIlrr766nbP//TTT/H6668jOzsbDQ0NGDp0KJ588klMmTLF6ee0Wq04efIkwsPDodPpXB0yERERqUCSJFRVVSE5ORl6vWs5EZcDlJqaGmRkZOD222/Htdde2+n5P//8My6++GI888wziIqKwrvvvotp06Zhy5YtGDVqlFPPefLkSaSkpLg6VCIiItKAgoIC9OrVy6X76DzZLFCn03WaQWnL0KFDMX36dCxYsMCp8ysqKhAVFYWCggJERES4MVIiIiLytcrKSqSkpKC8vByRkZEu3dflDIqnrFYrqqqqEBMT0+45DQ0NaGhosH1fVVUFAIiIiGCAQkRE5GfcKc/weZHs888/j+rqatx4443tnrNo0SJERkbavji9Q0RE1L34NEBZunQpnnrqKXz88ceIj49v97z58+ejoqLC9lVQUODDURIREZHafDbFs2zZMtx5551YsWIFsrKyOjzXZDLBZDL5aGRERESkNT4JUD766CPcfvvtWLZsGS6//HJfPCUREfkJSZLQ1NQEi8Wi9lDIRQaDAQEBAV5pAeJygFJdXY3Dhw/bvs/Ly0N2djZiYmKQmpqK+fPn48SJE/jggw8AiGmdmTNn4uWXX8a4ceNQVFQEAAgODna5opeIiLoWs9mMwsJC1NbWqj0UclNISAiSkpJgNBoVfVyXlxmvW7cOF1100VnXz5w5E++99x5mzZqFo0ePYt26dQCACy+8ED/99FO75zujsrISkZGRqKio4CoeIqIuwmq14tChQzAYDIiLi4PRaGQzTj8iSRLMZjNKS0thsVjQv3//s5qxefL+7VEfFF9hgEJE1PXU19cjLy8PvXv3RkhIiNrDITfV1tbi2LFjSE9PR1BQUIvbPHn/5l48RESkKldboJO2eOvnx98KIiIi0hwGKERERKQ5DFCIiIhUlpaWhsWLF6v+GFri8714iIiI/N2FF16IkSNHKhYQbNu2DaGhoYo8VlfRrQOUdzbk4UhZNWaOT0P/hHC1h0NERF2IJEmwWCwICOj8rTYuLs4HI/Iv3XqK58tdJ/Hfzfk4Ulaj9lCIiAjiTb3W3KTKl7NdN2bNmoWffvoJL7/8MnQ6HXQ6na3/l06nw9dff43Ro0fDZDJhw4YNyM3NxVVXXYWEhASEhYVhzJgx+P7771s8ZuvpGZ1Oh7feegvXXHMNQkJC0L9/f3z++ecuvZb5+fm46qqrEBYWhoiICNx4440oLi623b5z505cdNFFCA8PR0REBEaPHo1ff/0VAHDs2DFMmzYN0dHRCA0NxdChQ7F69WqXnt9T3TqDEhEcCACorGtUeSRERAQAdY0WDFnwrSrPve8vUxBi7Pxt8eWXX8bBgwcxbNgw/OUvfwEgMiBHjx4FADz66KN4/vnn0adPH0RHR6OgoACXXXYZ/va3v8FkMuGDDz7AtGnTcODAAaSmprb7PE899RSeffZZPPfcc3j11Vdx66234tixY4iJiel0jFar1Rac/PTTT2hqasKcOXMwffp0WyPVW2+9FaNGjcLrr78Og8GA7OxsBAaK98U5c+bAbDbj559/RmhoKPbt24ewsLBOn1dJ3TtACWoOUOqbVB4JERH5i8jISBiNRoSEhCAxMfGs2//yl7/g4osvtn0fExODjIwM2/dPP/00Vq5cic8//xxz585t93lmzZqFm2++GQDwzDPP4JVXXsHWrVtx6aWXdjrGtWvXYvfu3cjLy0NKSgoA4IMPPsDQoUOxbds2jBkzBvn5+XjooYcwaNAgAED//v1t98/Pz8d1112H4cOHAwD69OnT6XMqrXsHKMHin1/BDAoRkSYEBxqw7y9TVHtuJWRmZrb4vrq6Gk8++SS++uorFBYWoqmpCXV1dcjPz+/wcUaMGGE7Dg0NRUREBEpKSpwaQ05ODlJSUmzBCQAMGTIEUVFRyMnJwZgxYzBv3jzceeed+M9//oOsrCzccMMN6Nu3LwDgj3/8I+655x589913yMrKwnXXXddiPL7QrWtQbBkUBihERJqg0+kQYgxQ5UupfYBar8Z58MEHsXLlSjzzzDNYv349srOzMXz4cJjN5g4fR55ucXxtrFarImMEgCeffBJ79+7F5Zdfjh9++AFDhgzBypUrAQB33nknjhw5gttuuw27d+9GZmYmXn31VcWe2xndOkCJlGtQ6hmgEBGR84xGIywWi1Pnbty4EbNmzcI111yD4cOHIzEx0Vav4i2DBw9GQUEBCgoKbNft27cP5eXlGDJkiO26AQMG4IEHHsB3332Ha6+9Fu+++67ttpSUFNx999349NNP8ac//QlvvvmmV8fcWrcOUOxFsqxBISIi56WlpWHLli04evQoysrKOsxs9O/fH59++imys7Oxc+dO3HLLLYpmQtqSlZWF4cOH49Zbb8WOHTuwdetWzJgxAxdccAEyMzNRV1eHuXPnYt26dTh27Bg2btyIbdu2YfDgwQCA+++/H99++y3y8vKwY8cO/Pjjj7bbfKV7Byic4iEiIjc8+OCDMBgMGDJkCOLi4jqsJ3nxxRcRHR2N8847D9OmTcOUKVNwzjnneHV8Op0On332GaKjo3H++ecjKysLffr0wfLlywEABoMBp06dwowZMzBgwADceOONmDp1Kp566ikAgMViwZw5czB48GBceumlGDBgAF577TWvjvmsf4Pk7MJvFXmyXXNH1h8qxW1vb8WgxHB8c//5ij0uERF1rr6+Hnl5eUhPT0dQUJDawyE3dfRz9OT9u1tnUCLZB4WIiEiTunWAwj4oRERE2tS9A5TmDEp1QxOaLN4tWCIiIiLndesAJTzI3qeuilkUIiIizejWAUqgQY9Qo+gcyF4oRERE2tGtAxSAvVCIiIi0iAFKELvJEhERaQ0DFG4YSEREpDndPkBhLxQiIlJDWloaFi9e3O7ts2bNwtVXX+2z8WhNtw9QOMVDRESkPQxQWCRLRESkOQxQgliDQkREzvv3v/+N5OTks3Ykvuqqq3D77bcDAHJzc3HVVVchISEBYWFhGDNmDL7//nuPnrehoQF//OMfER8fj6CgIEycOBHbtm2z3X7mzBnceuutiIuLQ3BwMPr37493330XAGA2mzF37lwkJSUhKCgIvXv3xqJFizwaj7cFdH5K12bLoHCKh4hIfZIENNaq89yBIYBO1+lpN9xwA+699178+OOPmDx5MgDg9OnT+Oabb7B69WoAQHV1NS677DL87W9/g8lkwgcffIBp06bhwIEDSE1NdWt4Dz/8MP73v//h/fffR+/evfHss89iypQpOHz4MGJiYvDEE09g3759+PrrrxEbG4vDhw+jrq4OAPDKK6/g888/x8cff4zU1FQUFBSgoKDArXH4CgMUFskSEWlHYy3wTLI6z/3YScAY2ulp0dHRmDp1KpYuXWoLUD755BPExsbioosuAgBkZGQgIyPDdp+nn34aK1euxOeff465c+e6PLSamhq8/vrreO+99zB16lQAwJtvvok1a9bg7bffxkMPPYT8/HyMGjUKmZmZAEQRriw/Px/9+/fHxIkTodPp0Lt3b5fH4Guc4uGGgURE5KJbb70V//vf/9DQ0AAA+PDDD3HTTTdBrxdvq9XV1XjwwQcxePBgREVFISwsDDk5OcjPz3fr+XJ
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"<Figure size 640x480 with 0 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# loss\n",
"plt.plot(r.history['loss'], label='train loss')\n",
"plt.plot(r.history['val_loss'], label='val loss')\n",
"plt.legend()\n",
"plt.show()\n",
"plt.savefig('LossVal_loss')\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjEAAAGdCAYAAADjWSL8AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABpgUlEQVR4nO3deViU5foH8O8MMAzIqgiIgoDirpAbqamZFkp6zDYtO6l1rEwr8/TrZMe07JxsNZds87RalpVLtlmKuea+76CoILKq7Nsw8/7+eJgBlGWGWd534Pu5Li6G4Z15H8Zx5p7nuZ/7VkmSJIGIiIjIyajlHgARERFRYzCIISIiIqfEIIaIiIicEoMYIiIickoMYoiIiMgpMYghIiIip8QghoiIiJwSgxgiIiJySq5yD8BWDAYDLl++DG9vb6hUKrmHQ0RERGaQJAkFBQUICQmBWm3Z3EqTCWIuX76M0NBQuYdBREREjZCamop27dpZdJsmE8R4e3sDEA+Cj4+PzKMhIiIic+Tn5yM0NNT0Pm6JJhPEGJeQfHx8GMQQERE5mcakgjCxl4iIiJwSgxgiIiJySgxiiIiIyCkxiCEiIiKnxCCGiIiInBKDGCIiInJKDGKIiIjIKTGIISIiIqfEIIaIiIicksVBzLZt2zBmzBiEhIRApVJh3bp1Dd5my5Yt6N27N9zd3dGxY0d8/vnnNxyzbNkyhIeHQ6vVIjY2Fnv37rV0aERERNSMWBzEFBUVITo6GsuWLTPr+PPnz+POO+/EsGHDcPjwYcycORP/+Mc/8Pvvv5uOWbVqFWbNmoV58+bh4MGDiI6ORlxcHLKysiwdHhERETUTKkmSpEbfWKXC2rVrcdddd9V5zL/+9S/88ssvOH78uOm6CRMmIDc3Fxs2bAAAxMbGol+/fnjvvfcAAAaDAaGhoXjqqafwwgsvmDWW/Px8+Pr6Ii8vj72TiIiInIQ17992bwC5a9cujBgxosZ1cXFxmDlzJgCgvLwcBw4cwOzZs02/V6vVGDFiBHbt2lXn/ZaVlaGsrMz0c35+vm0HTkRETkmSJJRVGJBfqkNhaQUKSitQWFaBglJdtcviu05vkHu4TmXW7Z3grXWTexgmdg9iMjIyEBQUVOO6oKAg5Ofno6SkBNeuXYNer6/1mNOnT9d5vwsWLMArr7xilzETETmKTm/AHycyoVYBI7oFwc2F+y1qc62oHImZBUjMLMDlvFIUVAtQCsoqxOWyqusqDI1eZKB6TLu1Q/MKYuxl9uzZmDVrlunn/Px8hIaGyjgiIiLzVegNWHMoDUs3JyH1agkAoI2vFlMGhWNC/zD4KOiNwpHyS3VIyixAYmYhzmQUICmrAGcyCpFTWNbwja+jUgFe7q7wdneFt9YNXlpXeGtdxXVaN3i5u0DjyqDREp4aZYUNdh9NcHAwMjMza1yXmZkJHx8feHh4wMXFBS4uLrUeExwcXOf9uru7w93d3S5jJrK3rPxSHEy5hg6tvdChtRfUapXcQyIH0RskrD+ShsWbknDhSjEAIMDLHSoVkJ5Xitd+PY0lCWfxQP9QTBkUgRA/D5lHbB/F5RVIyizEmcwCU9CSmFmA9LzSOm/Tzt8DnYK8EdbSEz4ebpXBiWtlcOIGL3dX+FT72dPNhf+3mji7BzEDBgzAr7/+WuO6jRs3YsCAAQAAjUaDPn36ICEhwZQgbDAYkJCQgBkzZth7eEQOozdI2JaYjZV7U7D5dBb0ldPdPlpX9G7vjz5h/ujT3h/RoX5o4a6sTztkPYNBwi/H0rFoUyLOZRcBAFq20OCJoZH4+83hUKuBHw9fxvJtyUjKKsTy7efx2c4LGN2rDf4xOBI92vrK/Bc03tWicmxLzMaZzAIkZhQgMavANPtUm2AfLToFe6NToJf4HuSNqEAv/r+gG1i8O6mwsBBnz54FANx0001YuHAhhg0bhpYtWyIsLAyzZ89GWloavvzySwBii3WPHj0wffp0PPLII9i8eTOefvpp/PLLL4iLiwMgtlhPmjQJH330Efr3749Fixbhu+++w+nTp2/IlakLdyeRUl3OLcF3+1Px3b5UXK72KbNjoBfSrpWgRKevcbxaBXRt44M+7UVQ0zvMH+38PaBSNd9PlClXitHO38MpP1UbDBL+OJmBdzcm4UxmAQDAz9MNjw2JxKQB4Te8MUuShC2J2Vi+LRl/nbtiun5gh1aYOiQSt3Zq7TTPheTsQnyy4zx+OHAJZRU3JtAGeLmjU5AXOgWJQKVzsBc6BnrD16N5LqU1V9a8f1scxGzZsgXDhg274fpJkybh888/x+TJk3HhwgVs2bKlxm2effZZnDx5Eu3atcNLL72EyZMn17j9e++9h7feegsZGRmIiYnBkiVLEBsba/a4GMSQklToDdh8Ogvf7E3B1sRsGHMM/TzdcE/vdpjQLxRRQd7Q6Q04nV6AAxev4kBKLg5evIa03Bs/oQZ6u1cFNe390T3EB+6uLg7+q+Tx0dZzWPDbacRGtMQnk/vBy0k+jUuShIRTWVi4MREn08XuSW+tK6YOjsSUQeFmJUceT8vD8u3J+PloumnmrlOQF/4xOBJjY0IU+RyQJAkHLl7Dx9uSsfFUJozvMF2CvdE33N8UsHQK8kbLFhp5B0uK4NAgRqkYxJASpF4txqp9qfhufyqyCqoSEW+ObIkH+ochrnswtG71v/Gk55Xg4MVcHLh4DQdSruFEWt4NOy00rmr0bOtrmqnp2sYbLjaapQjy0Spmh8zRS7m4+/2/TH9/7zA/fDalv6I/qRtnUt7dmIijl/IAiOTSRwaF49HBkY0ae1puCT7feR7f7E1FYVkFAKC1tzsmDwzHQ7Ht4esp/+OhN0j440QGPt6ejEMpuabrR3QNxNTBkegf0dJpZpDIsRjEgEEMyae8woBNpzLxzd4U7DibY/rk2aqFBvf2aYfx/UIR2dqr0fdfqtPj6KU8EdRcvIZDKddwpajcRqO/UftWnlj12AAE+2rtdg5zFJdXYPSSHUjOKcLNkS1xKr0AeSU69GjrgxWPxMJfYZ/iJUnCzrNXsHDjGRysfBP31Lhg0sBwPDY40ibjzS/V4du9Kfh0xwVk5JeaznF/31A8eksEQlt6Wn0OSxWXV+D7/ZfwyY7zSLkqEpU1rmrc07stHr0lAh0DvR0+JnIuDGLAIIYc73xOEb7dl4If9l+qEVQMjgrAhH5huL1bkF22b0qShItXik0zNQcvXsOFK0U2ue8KvYQKg4S+7f3xzWM3yzojM3vNMXyzNwVBPu7Y8MwQpOeV4u+f7MGVonJ0DvLGV/+IRWtvZexQ3J18BQs3JmLv+asAAK2bGg8PCMdjQyIR4GX7MZZXGPDLscv4eNt5nKpcqlKrgFE92mDqkEjEhPrZ/JzXyyooxZd/XcSK3ReRV6IDIJZLH765Pf4+IFwx/zakfAxiwCCGHKNUp8fvJzLwzd4U7E6+aro+0Nsd9/Vth/F9wxDWyvGfhm3lQk4RxizdgYKyCkwdHIF/39lNlnH8cSIDj604AAD46tFY3BIVAAA4m1WAB5fvQVZBGSIDWuDrqbFo4yvfFuQDF6/inT8STQm4Glc1JsaGYdqtHRDobf+ZLOPsz8fbk7EtMdt0fYivFlFB3ugcLHb1dAryRlSQl01qfCRlFuB/289j7aE0lFdWu23fyhP/uCUC9/Rpp7g6IqR8DGLAIIbs78/TWfjn90dwtXLWRaUCbu3UGg/0D8NtXQLhqpA8EmttOJ6BJ74SAcSHD/XGyB5tHHr+rIJSjFy0HVeLyvGPWyIwZ3TNQOpCThEm/m8P0nJLENrSAyv/cbPDl1GyCkoxZ+1x/HFS1Ldyc1FhQr8wTB/WUbZluFPp+fjf9vNYfyQNOv2NL+sqlaiz0jnIWwQ4lYFNh9ZeDeZpSZKE3clXsXx7MjafrmrMe1OYHx4fEonbuwXbLCeLmh8GMWAQQ/b1+c7zmP/zSRgkUVX1/r6huL9fKNo20UJkr/16Ch9vS4a3uyvWP3ULIgJaOOS8kiRh8mf7sDUxG12CvfHjjEG17sC5dK0YE/+3BxevFKONrxYrp97ssDH+fPQy5qw
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"<Figure size 640x480 with 0 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# accuracies\n",
"plt.plot(r.history['accuracy'], label='train acc')\n",
"plt.plot(r.history['val_accuracy'], label='val acc')\n",
"plt.legend()\n",
"plt.show()\n",
"plt.savefig('AccVal_acc')\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
"model.save('resnet_1.h5')"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"29/29 [==============================] - 55s 2s/step - loss: 0.3070 - accuracy: 0.9828\n"
]
},
{
"data": {
"text/plain": [
"[0.30702900886535645, 0.982758641242981]"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds_v)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.1"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}