Symulowanie-wizualne/sw_lab9-10_1.ipynb

1330 lines
164 KiB
Plaintext
Raw Normal View History

2023-01-06 03:02:47 +01:00
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Aleksandra Jonas, Aleksandra Gronowska, Iwona Christop\n",
2023-01-07 22:15:23 +01:00
"# Zadanie 9-10, zadanie 1 - VGG16 + ResNet on train_test_sw "
2023-01-06 03:02:47 +01:00
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Przygotowanie danych"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 60,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"from IPython.display import Image, display"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 61,
2023-01-06 03:02:47 +01:00
"id": "2fe63b50",
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import subprocess\n",
"import pkg_resources\n",
"import numpy as np\n",
"\n",
"required = { 'scikit-image'}\n",
"installed = {pkg.key for pkg in pkg_resources.working_set}\n",
"missing = required - installed\n",
"# VGG16 requires images to be of dim = (224, 224, 3)\n",
"newSize = (224,224)\n",
"\n",
"if missing: \n",
" python = sys.executable\n",
" subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL)\n",
"\n",
"def load_train_data(input_dir):\n",
" import numpy as np\n",
" import pandas as pd\n",
" import os\n",
" from skimage.io import imread\n",
" import cv2 as cv\n",
" from pathlib import Path\n",
" import random\n",
" from shutil import copyfile, rmtree\n",
" import json\n",
"\n",
" import seaborn as sns\n",
" import matplotlib.pyplot as plt\n",
"\n",
" import matplotlib\n",
" \n",
" image_dir = Path(input_dir)\n",
" categories_name = []\n",
" for file in os.listdir(image_dir):\n",
" d = os.path.join(image_dir, file)\n",
" if os.path.isdir(d):\n",
" categories_name.append(file)\n",
"\n",
" folders = [directory for directory in image_dir.iterdir() if directory.is_dir()]\n",
"\n",
" train_img = []\n",
" categories_count=[]\n",
" labels=[]\n",
" for i, direc in enumerate(folders):\n",
" count = 0\n",
" for obj in direc.iterdir():\n",
" if os.path.isfile(obj) and os.path.basename(os.path.normpath(obj)) != 'desktop.ini':\n",
" labels.append(os.path.basename(os.path.normpath(direc)))\n",
" count += 1\n",
" img = imread(obj)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
" img = img[:, :, :3]\n",
" img = cv.resize(img, newSize, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
" img = img / 255 #normalizacja\n",
" train_img.append(img)\n",
" categories_count.append(count)\n",
" X={}\n",
" X[\"values\"] = np.array(train_img)\n",
" X[\"categories_name\"] = categories_name\n",
" X[\"categories_count\"] = categories_count\n",
" X[\"labels\"]=labels\n",
" return X\n",
"\n",
"def load_test_data(input_dir):\n",
" import numpy as np\n",
" import pandas as pd\n",
" import os\n",
" from skimage.io import imread\n",
" import cv2 as cv\n",
" from pathlib import Path\n",
" import random\n",
" from shutil import copyfile, rmtree\n",
" import json\n",
"\n",
" import seaborn as sns\n",
" import matplotlib.pyplot as plt\n",
"\n",
" import matplotlib\n",
"\n",
" image_path = Path(input_dir)\n",
"\n",
" labels_path = image_path.parents[0] / 'test_labels.json'\n",
"\n",
" jsonString = labels_path.read_text()\n",
" objects = json.loads(jsonString)\n",
"\n",
" categories_name = []\n",
" categories_count=[]\n",
" count = 0\n",
" c = objects[0]['value']\n",
" for e in objects:\n",
" if e['value'] != c:\n",
" categories_count.append(count)\n",
" c = e['value']\n",
" count = 1\n",
" else:\n",
" count += 1\n",
" if not e['value'] in categories_name:\n",
" categories_name.append(e['value'])\n",
"\n",
" categories_count.append(count)\n",
" \n",
" test_img = []\n",
"\n",
" labels=[]\n",
" for e in objects:\n",
" p = image_path / e['filename']\n",
" img = imread(p)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
" img = img[:, :, :3]\n",
" img = cv.resize(img, newSize, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
" img = img / 255#normalizacja\n",
" test_img.append(img)\n",
" labels.append(e['value'])\n",
"\n",
" X={}\n",
" X[\"values\"] = np.array(test_img)\n",
" X[\"categories_name\"] = categories_name\n",
" X[\"categories_count\"] = categories_count\n",
" X[\"labels\"]=labels\n",
" return X"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 62,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"def create_tf_ds(X_train, y_train_enc, X_validate, y_validate_enc, X_test, y_test_enc):\n",
" import tensorflow as tf\n",
" \n",
" train_ds = tf.data.Dataset.from_tensor_slices((X_train, y_train_enc))\n",
" validation_ds = tf.data.Dataset.from_tensor_slices((X_validate, y_validate_enc))\n",
" test_ds = tf.data.Dataset.from_tensor_slices((X_test, y_test_enc))\n",
"\n",
" train_ds_size = tf.data.experimental.cardinality(train_ds).numpy()\n",
" test_ds_size = tf.data.experimental.cardinality(test_ds).numpy()\n",
" validation_ds_size = tf.data.experimental.cardinality(validation_ds).numpy()\n",
"\n",
" print(\"Training data size:\", train_ds_size)\n",
" print(\"Test data size:\", test_ds_size)\n",
" print(\"Validation data size:\", validation_ds_size)\n",
"\n",
" train_ds = (train_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" test_ds = (test_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" validation_ds = (validation_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" \n",
" return train_ds, test_ds, validation_ds"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 63,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"def get_run_logdir(root_logdir):\n",
" import os\n",
" import time\n",
"\n",
" run_id = time.strftime(\"run_%Y_%m_%d-%H_%M_%S\")\n",
" return os.path.join(root_logdir, run_id)"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 64,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"def diagram_setup(model_name):\n",
" from tensorflow import keras\n",
" import os\n",
" \n",
" root_logdir = os.path.join(os.curdir, f\"logs\\\\fit\\\\{model_name}\\\\\")\n",
" \n",
" run_logdir = get_run_logdir(root_logdir)\n",
" tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 65,
2023-01-06 03:02:47 +01:00
"id": "cc941c5a",
"metadata": {},
"outputs": [],
"source": [
"# Data load\n",
"data_train = load_train_data(\"./train_test_sw/train_sw\")\n",
"values_train = data_train['values']\n",
"labels_train = data_train['labels']\n",
"\n",
"data_test = load_test_data(\"./train_test_sw/test_sw\")\n",
"X_test = data_test['values']\n",
"y_test = data_test['labels']"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 66,
2023-01-06 03:02:47 +01:00
"id": "25040ac9",
"metadata": {},
"outputs": [],
"source": [
"from sklearn.model_selection import train_test_split\n",
"X_train, X_validate, y_train, y_validate = train_test_split(values_train, labels_train, test_size=0.2, random_state=42)"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 67,
2023-01-06 03:02:47 +01:00
"id": "a1fe47e6",
"metadata": {},
"outputs": [],
"source": [
"from sklearn.preprocessing import LabelEncoder\n",
"class_le = LabelEncoder()\n",
"y_train_enc = class_le.fit_transform(y_train)\n",
"y_validate_enc = class_le.fit_transform(y_validate)\n",
"y_test_enc = class_le.fit_transform(y_test)"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 68,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"Training data size: 821\n",
2023-01-06 03:02:47 +01:00
"Test data size: 259\n",
"Validation data size: 206\n"
]
}
],
"source": [
"train_ds, test_ds, validation_ds = create_tf_ds(X_train, y_train_enc, X_validate, y_validate_enc, X_test, y_test_enc)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## VGG16"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 69,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"diagram_setup('vgg_sw')"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 70,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"import keras,os\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense, Conv2D, MaxPool2D , Flatten\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"import numpy as np\n",
"\n",
"model = keras.models.Sequential([\n",
" keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
" keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Flatten(),\n",
" keras.layers.Dense(units = 4096, activation='relu'),\n",
" keras.layers.Dense(units = 4096, activation='relu'),\n",
" keras.layers.Dense(units = 5, activation='softmax')\n",
"])"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 71,
2023-01-06 03:02:47 +01:00
"metadata": {},
2023-01-07 22:15:23 +01:00
"outputs": [],
2023-01-06 03:02:47 +01:00
"source": [
"from keras.optimizers import Adam\n",
"opt = Adam(lr=0.001)\n",
"model.compile(optimizer=opt, loss=keras.losses.sparse_categorical_crossentropy, metrics=['accuracy'])"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 72,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-06 13:34:05 +01:00
"Model: \"sequential_1\"\n",
2023-01-06 03:02:47 +01:00
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
2023-01-06 13:34:05 +01:00
" conv2d_13 (Conv2D) (None, 224, 224, 64) 1792 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_14 (Conv2D) (None, 224, 224, 64) 36928 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" max_pooling2d_4 (MaxPooling (None, 112, 112, 64) 0 \n",
" 2D) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_15 (Conv2D) (None, 112, 112, 128) 73856 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_16 (Conv2D) (None, 112, 112, 128) 147584 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" max_pooling2d_5 (MaxPooling (None, 56, 56, 128) 0 \n",
2023-01-06 03:02:47 +01:00
" 2D) \n",
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_17 (Conv2D) (None, 56, 56, 256) 295168 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_18 (Conv2D) (None, 56, 56, 256) 590080 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_19 (Conv2D) (None, 56, 56, 256) 590080 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" max_pooling2d_6 (MaxPooling (None, 28, 28, 256) 0 \n",
2023-01-06 03:02:47 +01:00
" 2D) \n",
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_20 (Conv2D) (None, 28, 28, 512) 1180160 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_21 (Conv2D) (None, 28, 28, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_22 (Conv2D) (None, 28, 28, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" max_pooling2d_7 (MaxPooling (None, 14, 14, 512) 0 \n",
2023-01-06 03:02:47 +01:00
" 2D) \n",
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_23 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_24 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_25 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-07 22:15:23 +01:00
" flatten_2 (Flatten) (None, 100352) 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-07 22:15:23 +01:00
" dense_4 (Dense) (None, 4096) 411045888 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-07 22:15:23 +01:00
" dense_5 (Dense) (None, 4096) 16781312 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-07 22:15:23 +01:00
" dense_6 (Dense) (None, 5) 20485 \n",
2023-01-06 03:02:47 +01:00
" \n",
"=================================================================\n",
"Total params: 442,562,373\n",
"Trainable params: 442,562,373\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
}
],
"source": [
"model.summary()"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 73,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n",
"Epoch 1/25\n"
2023-01-06 03:02:47 +01:00
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"/var/folders/3r/c8tg1h051m18qhsdccdysrt40000gn/T/ipykernel_2029/3158629982.py:4: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" hist_vgg = model.fit_generator(steps_per_epoch=len(train_ds), generator=train_ds, validation_data= validation_ds, validation_steps=len(validation_ds), epochs=25, callbacks=[checkpoint,early])\n"
2023-01-06 03:02:47 +01:00
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"25/25 [==============================] - ETA: 0s - loss: 1.6264 - accuracy: 0.1900 \n",
"Epoch 1: val_accuracy improved from -inf to 0.18229, saving model to vgg16_1.h5\n",
"25/25 [==============================] - 854s 34s/step - loss: 1.6264 - accuracy: 0.1900 - val_loss: 1.6109 - val_accuracy: 0.1823\n",
"Epoch 2/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6108 - accuracy: 0.1950 \n",
"Epoch 2: val_accuracy improved from 0.18229 to 0.18750, saving model to vgg16_1.h5\n",
"25/25 [==============================] - 897s 36s/step - loss: 1.6108 - accuracy: 0.1950 - val_loss: 1.6098 - val_accuracy: 0.1875\n",
"Epoch 3/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6097 - accuracy: 0.2062 \n",
"Epoch 3: val_accuracy improved from 0.18750 to 0.19792, saving model to vgg16_1.h5\n",
"25/25 [==============================] - 870s 35s/step - loss: 1.6097 - accuracy: 0.2062 - val_loss: 1.6102 - val_accuracy: 0.1979\n",
"Epoch 4/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6097 - accuracy: 0.2037 \n",
"Epoch 4: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 692s 28s/step - loss: 1.6097 - accuracy: 0.2037 - val_loss: 1.6106 - val_accuracy: 0.1979\n",
"Epoch 5/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6095 - accuracy: 0.1963 \n",
"Epoch 5: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 634s 26s/step - loss: 1.6095 - accuracy: 0.1963 - val_loss: 1.6114 - val_accuracy: 0.1823\n",
"Epoch 6/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6094 - accuracy: 0.1925 \n",
"Epoch 6: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 643s 26s/step - loss: 1.6094 - accuracy: 0.1925 - val_loss: 1.6112 - val_accuracy: 0.1719\n",
"Epoch 7/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6095 - accuracy: 0.2025 \n",
"Epoch 7: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 697s 28s/step - loss: 1.6095 - accuracy: 0.2025 - val_loss: 1.6115 - val_accuracy: 0.1823\n",
"Epoch 8/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6097 - accuracy: 0.1762 \n",
"Epoch 8: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 667s 27s/step - loss: 1.6097 - accuracy: 0.1762 - val_loss: 1.6106 - val_accuracy: 0.1979\n",
"Epoch 9/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6096 - accuracy: 0.2025 \n",
"Epoch 9: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 656s 26s/step - loss: 1.6096 - accuracy: 0.2025 - val_loss: 1.6103 - val_accuracy: 0.1927\n",
"Epoch 10/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6095 - accuracy: 0.1950 \n",
"Epoch 10: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 651s 26s/step - loss: 1.6095 - accuracy: 0.1950 - val_loss: 1.6104 - val_accuracy: 0.1927\n",
"Epoch 11/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6094 - accuracy: 0.2062 \n",
"Epoch 11: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 646s 26s/step - loss: 1.6094 - accuracy: 0.2062 - val_loss: 1.6105 - val_accuracy: 0.1927\n",
"Epoch 12/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6095 - accuracy: 0.2062 \n",
"Epoch 12: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 648s 26s/step - loss: 1.6095 - accuracy: 0.2062 - val_loss: 1.6103 - val_accuracy: 0.1927\n",
"Epoch 13/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6096 - accuracy: 0.2025 \n",
"Epoch 13: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 636s 26s/step - loss: 1.6096 - accuracy: 0.2025 - val_loss: 1.6108 - val_accuracy: 0.1927\n",
"Epoch 14/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6097 - accuracy: 0.2050 \n",
"Epoch 14: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 664s 27s/step - loss: 1.6097 - accuracy: 0.2050 - val_loss: 1.6110 - val_accuracy: 0.1875\n",
"Epoch 15/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6096 - accuracy: 0.1775 \n",
"Epoch 15: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 657s 27s/step - loss: 1.6096 - accuracy: 0.1775 - val_loss: 1.6105 - val_accuracy: 0.1875\n",
"Epoch 16/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6095 - accuracy: 0.2000 \n",
"Epoch 16: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 664s 27s/step - loss: 1.6095 - accuracy: 0.2000 - val_loss: 1.6102 - val_accuracy: 0.1927\n",
"Epoch 17/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6094 - accuracy: 0.1937 \n",
"Epoch 17: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 676s 27s/step - loss: 1.6094 - accuracy: 0.1937 - val_loss: 1.6104 - val_accuracy: 0.1927\n",
"Epoch 18/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6093 - accuracy: 0.1975 \n",
"Epoch 18: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 673s 27s/step - loss: 1.6093 - accuracy: 0.1975 - val_loss: 1.6103 - val_accuracy: 0.1823\n",
"Epoch 19/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6094 - accuracy: 0.2050 \n",
"Epoch 19: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 681s 27s/step - loss: 1.6094 - accuracy: 0.2050 - val_loss: 1.6111 - val_accuracy: 0.1771\n",
"Epoch 20/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6093 - accuracy: 0.2050 \n",
"Epoch 20: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 672s 27s/step - loss: 1.6093 - accuracy: 0.2050 - val_loss: 1.6108 - val_accuracy: 0.1927\n",
"Epoch 21/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6094 - accuracy: 0.2050 \n",
"Epoch 21: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 663s 27s/step - loss: 1.6094 - accuracy: 0.2050 - val_loss: 1.6110 - val_accuracy: 0.1927\n",
"Epoch 22/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6096 - accuracy: 0.1850 \n",
"Epoch 22: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 675s 27s/step - loss: 1.6096 - accuracy: 0.1850 - val_loss: 1.6111 - val_accuracy: 0.1927\n",
"Epoch 23/25\n",
"25/25 [==============================] - ETA: 0s - loss: 1.6092 - accuracy: 0.1963 \n",
"Epoch 23: val_accuracy did not improve from 0.19792\n",
"25/25 [==============================] - 664s 27s/step - loss: 1.6092 - accuracy: 0.1963 - val_loss: 1.6110 - val_accuracy: 0.1823\n",
"Epoch 23: early stopping\n"
2023-01-06 03:02:47 +01:00
]
}
],
"source": [
"from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
"checkpoint = ModelCheckpoint(\"vgg16_1.h5\", monitor='val_accuracy', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)\n",
"early = EarlyStopping(monitor='val_accuracy', min_delta=0, patience=20, verbose=1, mode='auto')\n",
2023-01-07 22:15:23 +01:00
"hist_vgg = model.fit_generator(steps_per_epoch=len(train_ds), generator=train_ds, validation_data= validation_ds, validation_steps=len(validation_ds), epochs=25, callbacks=[checkpoint,early])"
2023-01-06 03:02:47 +01:00
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 74,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"data": {
2023-01-07 22:15:23 +01:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABdr0lEQVR4nO3dd3xT9f4/8NdJ2qQz6Z4Uyt7rMiqgsqqlYJUhICAtUPCHAjIcyGU7QEURERT1Qrl8r2wFcbEqS0CmRZANhQJdlNJ0r+Tz+6PtoaEFWmib9vB68ghJzvmcc97n5KR55XNOEkkIIUBERESkECpLF0BERERUkRhuiIiISFEYboiIiEhRGG6IiIhIURhuiIiISFEYboiIiEhRGG6IiIhIURhuiIiISFEYboiIiEhRGG6IqMJIkoQ5c+aUe7orV65AkiSsXLmywmsioscPww2RwqxcuRKSJEGSJPzxxx8lxgsh4OfnB0mS8Nxzz1mgQiKiysVwQ6RQNjY2WL16dYnhe/bswfXr16HVai1QFRFR5WO4IVKo3r17Y8OGDcjPzzcbvnr1arRr1w5eXl4WquzxkZGRYekSiB5LDDdECjVkyBDcunULO3bskIfl5uZi48aNGDp0aKnTZGRk4I033oCfnx+0Wi0aN26MTz75BEIIs3Y5OTmYPHky3N3d4ejoiOeffx7Xr18vdZ43btzAqFGj4OnpCa1Wi+bNm2PFihUPtU7Jycl488030bJlSzg4OECn0yE4OBgnTpwo0TY7Oxtz5sxBo0aNYGNjA29vb/Tv3x+XLl2S25hMJnz++edo2bIlbGxs4O7ujl69euHo0aMA7n8u0N3nF82ZMweSJOH06dMYOnQonJ2d8eSTTwIA/v77b4wYMQL16tWDjY0NvLy8MGrUKNy6davU7RUeHg4fHx9otVrUrVsXr776KnJzc3H58mVIkoTPPvusxHQHDhyAJElYs2ZNeTcrkeJYWboAIqoc/v7+6NSpE9asWYPg4GAAwG+//QaDwYCXXnoJixcvNmsvhMDzzz+PXbt2ITw8HG3atMG2bdvw1ltv4caNG2YvqKNHj8b//vc/DB06FJ07d8bvv/+OPn36lKghISEBTzzxBCRJwvjx4+Hu7o7ffvsN4eHhSE1NxaRJk8q1TpcvX8bmzZsxcOBA1K1bFwkJCfj666/RtWtXnD59Gj4+PgAAo9GI5557DpGRkXjppZcwceJEpKWlYceOHTh16hTq168PAAgPD8fKlSsRHByM0aNHIz8/H/v27cOff/6J9u3bl6u2IgMHDkTDhg0xb948ORTu2LEDly9fxsiRI+Hl5YV//vkH33zzDf755x/8+eefkCQJABAbG4uOHTsiJSUFr7zyCpo0aYIbN25g48aNyMzMRL169dClSxd89913mDx5stlyv/vuOzg6OuKFF154qLqJFEUQkaJEREQIAOLIkSNiyZIlwtHRUWRmZgohhBg4cKDo3r27EEKIOnXqiD59+sjTbd68WQAQ77//vtn8XnzxRSFJkrh48aIQQoioqCgBQLz22mtm7YYOHSoAiNmzZ8vDwsPDhbe3t0hKSjJr+9JLLwm9Xi/XFR0dLQCIiIiI+65bdna2MBqNZsOio6OFVqsV7777rjxsxYoVAoBYuHBhiXmYTCYhhBC///67ACBef/31e7a5X113r+vs2bMFADFkyJASbYvWs7g1a9YIAGLv3r3ysNDQUKFSqcSRI0fuWdPXX38tAIgzZ87I43Jzc4Wbm5sICwsrMR3R44iHpYgUbNCgQcjKysLPP/+MtLQ0/Pzzz/c8JPXrr79CrVbj9ddfNxv+xhtvQAiB3377TW4HoES7u3thhBD4/vvvERISAiEEkpKS5EtQUBAMBgOOHz9ervXRarVQqQr+bBmNRty6dQsODg5o3Lix2by+//57uLm5YcKECSXmUdRL8v3330OSJMyePfuebR7G2LFjSwyztbWVb2dnZyMpKQlPPPEEAMh1m0wmbN68GSEhIaX2GhXVNGjQINjY2OC7776Tx23btg1JSUl4+eWXH7puIiVhuCFSMHd3dwQGBmL16tX44YcfYDQa8eKLL5ba9urVq/Dx8YGjo6PZ8KZNm8rji65VKpV8aKdI48aNze7fvHkTKSkp+Oabb+Du7m52GTlyJAAgMTGxXOtjMpnw2WefoWHDhtBqtXBzc4O7uzv+/vtvGAwGud2lS5fQuHFjWFnd+8j7pUuX4OPjAxcXl3LV8CB169YtMSw5ORkTJ06Ep6cnbG1t4e7uLrcrqvvmzZtITU1FixYt7jt/JycnhISEmH0S7rvvvoOvry969OhRgWtCVHPxnBsihRs6dCjGjBmD+Ph4BAcHw8nJqUqWazKZAAAvv/wywsLCSm3TqlWrcs1z3rx5mDlzJkaNGoX33nsPLi4uUKlUmDRpkry8inSvHhyj0XjPaYr30hQZNGgQDhw4gLfeegtt2rSBg4MDTCYTevXq9VB1h4aGYsOGDThw4ABatmyJLVu24LXXXpN7tYgedww3RArXr18//L//9//w559/Yt26dfdsV6dOHezcuRNpaWlmvTdnz56Vxxddm0wmuXekyLlz58zmV/RJKqPRiMDAwApZl40bN6J79+5Yvny52fCUlBS4ubnJ9+vXr49Dhw4hLy8P1tbWpc6rfv362LZtG5KTk+/Ze+Ps7CzPv7iiXqyyuH37NiIjIzF37lzMmjVLHn7hwgWzdu7u7tDpdDh16tQD59mrVy+4u7vju+++Q0BAADIzMzF8+PAy10SkdIz5RArn4OCAr776CnPmzEFISMg92/Xu3RtGoxFLliwxG/7ZZ59BkiT5E1dF13d/2mrRokVm99VqNQYMGIDvv/++1Bfsmzdvlntd1Gp1iY+lb9iwATdu3DAbNmDAACQlJZVYFwDy9AMGDIAQAnPnzr1nG51OBzc3N+zdu9ds/JdfflmumovPs8jd20ulUqFv37746aef5I+il1YTAFhZWWHIkCFYv349Vq5ciZYtW5a7F4xIydhzQ/QYuNdhoeJCQkLQvXt3TJ8+HVeuXEHr1q2xfft2/Pjjj5g0aZJ8jk2bNm0wZMgQfPnllzAYDOjcuTMiIyNx8eLFEvP88MMPsWvXLgQEBGDMmDFo1qwZkpOTcfz4cezcuRPJycnlWo/nnnsO7777LkaOHInOnTvj5MmT+O6771CvXj2zdqGhoVi1ahWmTJmCw4cP46mnnkJGRgZ27tyJ1157DS+88AK6d++O4cOHY/Hixbhw4YJ8iGjfvn3o3r07xo8fD6DgY+8ffvghRo8ejfbt22Pv3r04f/58mWvW6XR4+umn8fHHHyMvLw++vr7Yvn07oqOjS7SdN28etm/fjq5du+KVV15B06ZNERcXhw0bNuCPP/4wO6QYGhqKxYsXY9euXfjoo4/KtR2JFM9in9MiokpR/KPg93P3R8GFECItLU1MnjxZ+Pj4CGtra9GwYUOxYMEC+WPIRbKyssTrr78uXF1dhb29vQgJCRHXrl0r8fFoIYRISEgQ48aNE35+fsLa2lp4eXmJnj17im+++UZuU56Pgr/xxhvC29tb2Nraii5duoiDBw+Krl27iq5du5q1zczMFNOnTxd169aVl/viiy+KS5cuyW3y8/PFggULRJMmTYRGoxHu7u4iODhYHDt2zGw+4eHhQq/XC0dHRzFo0CCRmJh4z4+C37x5s0Td169fF/369RNOTk5Cr9eLgQMHitjY2FK319WrV0VoaKhwd3cXWq1W1KtXT4wbN07k5OSUmG/z5s2FSqUS169fv+92I3rcSELc1VdKREQ1Qtu2beHi4oLIyEhLl0JUrfCcGyKiGujo0aOIiopCaGiopUshqnbYc0NEVIOcOnUKx44dw6effoqkpCRcvnwZNjY2li6LqFphzw0RUQ2yceNGjBw5Enl5eVizZg2DDVEp2HNDREREisKeGyIiIlIUhhsiIiJSlMfuS/xMJhNiY2Ph6Oj4SL/8S0RERFVHCIG0tDT4+Pg88HfUHrtwExsbCz8/P0uXQURERA/h2rVrqFWr1n3bPHbhpugHAa9duwadTmfhaoiIiKg
2023-01-06 03:02:47 +01:00
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"plt.plot(hist_vgg.history[\"accuracy\"])\n",
"plt.plot(hist_vgg.history['val_accuracy'])\n",
"plt.plot(hist_vgg.history['loss'])\n",
"plt.plot(hist_vgg.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()"
]
},
2023-01-06 13:34:05 +01:00
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 75,
2023-01-06 13:34:05 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"8/8 [==============================] - 35s 4s/step - loss: 1.6097 - accuracy: 0.1953\n"
2023-01-06 13:34:05 +01:00
]
},
{
"data": {
"text/plain": [
2023-01-07 22:15:23 +01:00
"[1.6096564531326294, 0.1953125]"
2023-01-06 13:34:05 +01:00
]
},
2023-01-07 22:15:23 +01:00
"execution_count": 75,
2023-01-06 13:34:05 +01:00
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds)"
]
},
2023-01-06 03:02:47 +01:00
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## ResNet50"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 76,
2023-01-06 13:34:05 +01:00
"metadata": {},
2023-01-07 22:15:23 +01:00
"outputs": [],
2023-01-06 13:34:05 +01:00
"source": [
"from keras.layers import Input, Lambda, Dense, Flatten\n",
"from keras.models import Model\n",
"from keras.preprocessing import image\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"from keras.models import Sequential\n",
"import numpy as np\n",
"from glob import glob\n",
"import matplotlib.pyplot as plt\n",
2023-01-07 22:15:23 +01:00
"import ssl\n",
"ssl._create_default_https_context = ssl._create_unverified_context\n",
"from keras.applications import ResNet50\n",
2023-01-06 13:34:05 +01:00
"\n",
"# re-size all the images to this\n",
"IMAGE_SIZE = [224, 224]\n",
"\n",
"# add preprocessing layer to the front of resnet\n",
"resnet = ResNet50(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)\n",
"\n",
"# don't train existing weights\n",
"for layer in resnet.layers:\n",
" layer.trainable = False\n",
" \n",
" # useful for getting number of classes\n",
"classes = 5\n",
" \n",
"\n",
"# our layers - you can add more if you want\n",
"x = Flatten()(resnet.output)\n",
"# x = Dense(1000, activation='relu')(x)\n",
"prediction = Dense(5, activation='softmax')(x)"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 77,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"Model: \"model_1\"\n",
2023-01-06 03:02:47 +01:00
"__________________________________________________________________________________________________\n",
" Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
2023-01-07 22:15:23 +01:00
" input_3 (InputLayer) [(None, 224, 224, 3 0 [] \n",
2023-01-06 03:02:47 +01:00
" )] \n",
" \n",
2023-01-07 22:15:23 +01:00
" conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 ['input_3[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv1_conv (Conv2D) (None, 112, 112, 64 9472 ['conv1_pad[0][0]'] \n",
" ) \n",
" \n",
" conv1_bn (BatchNormalization) (None, 112, 112, 64 256 ['conv1_conv[0][0]'] \n",
" ) \n",
" \n",
" conv1_relu (Activation) (None, 112, 112, 64 0 ['conv1_bn[0][0]'] \n",
" ) \n",
" \n",
" pool1_pad (ZeroPadding2D) (None, 114, 114, 64 0 ['conv1_relu[0][0]'] \n",
" ) \n",
" \n",
" pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 ['pool1_pad[0][0]'] \n",
" \n",
" conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4160 ['pool1_pool[0][0]'] \n",
" \n",
" conv2_block1_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block1_1_relu[0][0]'] \n",
" \n",
" conv2_block1_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 ['pool1_pool[0][0]'] \n",
" \n",
" conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block1_2_relu[0][0]'] \n",
" \n",
" conv2_block1_0_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_0_bn[0][0]', \n",
" 'conv2_block1_3_bn[0][0]'] \n",
" \n",
" conv2_block1_out (Activation) (None, 56, 56, 256) 0 ['conv2_block1_add[0][0]'] \n",
" \n",
" conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16448 ['conv2_block1_out[0][0]'] \n",
" \n",
" conv2_block2_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block2_1_relu[0][0]'] \n",
" \n",
" conv2_block2_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block2_2_relu[0][0]'] \n",
" \n",
" conv2_block2_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_out[0][0]', \n",
" 'conv2_block2_3_bn[0][0]'] \n",
" \n",
" conv2_block2_out (Activation) (None, 56, 56, 256) 0 ['conv2_block2_add[0][0]'] \n",
" \n",
" conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16448 ['conv2_block2_out[0][0]'] \n",
" \n",
" conv2_block3_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block3_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block3_1_relu[0][0]'] \n",
" \n",
" conv2_block3_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block3_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block3_2_relu[0][0]'] \n",
" \n",
" conv2_block3_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_add (Add) (None, 56, 56, 256) 0 ['conv2_block2_out[0][0]', \n",
" 'conv2_block3_3_bn[0][0]'] \n",
" \n",
" conv2_block3_out (Activation) (None, 56, 56, 256) 0 ['conv2_block3_add[0][0]'] \n",
" \n",
" conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32896 ['conv2_block3_out[0][0]'] \n",
" \n",
" conv3_block1_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block1_1_relu[0][0]'] \n",
" \n",
" conv3_block1_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 ['conv2_block3_out[0][0]'] \n",
" \n",
" conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block1_2_relu[0][0]'] \n",
" \n",
" conv3_block1_0_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_0_bn[0][0]', \n",
" 'conv3_block1_3_bn[0][0]'] \n",
" \n",
" conv3_block1_out (Activation) (None, 28, 28, 512) 0 ['conv3_block1_add[0][0]'] \n",
" \n",
" conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block1_out[0][0]'] \n",
" \n",
" conv3_block2_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block2_1_relu[0][0]'] \n",
" \n",
" conv3_block2_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block2_2_relu[0][0]'] \n",
" \n",
" conv3_block2_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_out[0][0]', \n",
" 'conv3_block2_3_bn[0][0]'] \n",
" \n",
" conv3_block2_out (Activation) (None, 28, 28, 512) 0 ['conv3_block2_add[0][0]'] \n",
" \n",
" conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block2_out[0][0]'] \n",
" \n",
" conv3_block3_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block3_1_relu[0][0]'] \n",
" \n",
" conv3_block3_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block3_2_relu[0][0]'] \n",
" \n",
" conv3_block3_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_add (Add) (None, 28, 28, 512) 0 ['conv3_block2_out[0][0]', \n",
" 'conv3_block3_3_bn[0][0]'] \n",
" \n",
" conv3_block3_out (Activation) (None, 28, 28, 512) 0 ['conv3_block3_add[0][0]'] \n",
" \n",
" conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block3_out[0][0]'] \n",
" \n",
" conv3_block4_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block4_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block4_1_relu[0][0]'] \n",
" \n",
" conv3_block4_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block4_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block4_2_relu[0][0]'] \n",
" \n",
" conv3_block4_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block4_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_add (Add) (None, 28, 28, 512) 0 ['conv3_block3_out[0][0]', \n",
" 'conv3_block4_3_bn[0][0]'] \n",
" \n",
" conv3_block4_out (Activation) (None, 28, 28, 512) 0 ['conv3_block4_add[0][0]'] \n",
" \n",
" conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131328 ['conv3_block4_out[0][0]'] \n",
" \n",
" conv4_block1_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block1_1_relu[0][0]'] \n",
" \n",
" conv4_block1_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024 525312 ['conv3_block4_out[0][0]'] \n",
" ) \n",
" \n",
" conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block1_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block1_0_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block1_0_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block1_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block1_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block1_add (Add) (None, 14, 14, 1024 0 ['conv4_block1_0_bn[0][0]', \n",
" ) 'conv4_block1_3_bn[0][0]'] \n",
" \n",
" conv4_block1_out (Activation) (None, 14, 14, 1024 0 ['conv4_block1_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block1_out[0][0]'] \n",
" \n",
" conv4_block2_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block2_1_relu[0][0]'] \n",
" \n",
" conv4_block2_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block2_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block2_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block2_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block2_add (Add) (None, 14, 14, 1024 0 ['conv4_block1_out[0][0]', \n",
" ) 'conv4_block2_3_bn[0][0]'] \n",
" \n",
" conv4_block2_out (Activation) (None, 14, 14, 1024 0 ['conv4_block2_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block2_out[0][0]'] \n",
" \n",
" conv4_block3_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block3_1_relu[0][0]'] \n",
" \n",
" conv4_block3_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block3_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block3_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block3_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block3_add (Add) (None, 14, 14, 1024 0 ['conv4_block2_out[0][0]', \n",
" ) 'conv4_block3_3_bn[0][0]'] \n",
" \n",
" conv4_block3_out (Activation) (None, 14, 14, 1024 0 ['conv4_block3_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block3_out[0][0]'] \n",
" \n",
" conv4_block4_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block4_1_relu[0][0]'] \n",
" \n",
" conv4_block4_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block4_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block4_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block4_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block4_add (Add) (None, 14, 14, 1024 0 ['conv4_block3_out[0][0]', \n",
" ) 'conv4_block4_3_bn[0][0]'] \n",
" \n",
" conv4_block4_out (Activation) (None, 14, 14, 1024 0 ['conv4_block4_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block4_out[0][0]'] \n",
" \n",
" conv4_block5_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block5_1_relu[0][0]'] \n",
" \n",
" conv4_block5_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block5_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block5_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block5_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block5_add (Add) (None, 14, 14, 1024 0 ['conv4_block4_out[0][0]', \n",
" ) 'conv4_block5_3_bn[0][0]'] \n",
" \n",
" conv4_block5_out (Activation) (None, 14, 14, 1024 0 ['conv4_block5_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block5_out[0][0]'] \n",
" \n",
" conv4_block6_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block6_1_relu[0][0]'] \n",
" \n",
" conv4_block6_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block6_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block6_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block6_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block6_add (Add) (None, 14, 14, 1024 0 ['conv4_block5_out[0][0]', \n",
" ) 'conv4_block6_3_bn[0][0]'] \n",
" \n",
" conv4_block6_out (Activation) (None, 14, 14, 1024 0 ['conv4_block6_add[0][0]'] \n",
" ) \n",
" \n",
" conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524800 ['conv4_block6_out[0][0]'] \n",
" \n",
" conv5_block1_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block1_1_relu[0][0]'] \n",
" \n",
" conv5_block1_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 ['conv4_block6_out[0][0]'] \n",
" \n",
" conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block1_2_relu[0][0]'] \n",
" \n",
" conv5_block1_0_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_0_bn[0][0]', \n",
" 'conv5_block1_3_bn[0][0]'] \n",
" \n",
" conv5_block1_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block1_add[0][0]'] \n",
" \n",
" conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1049088 ['conv5_block1_out[0][0]'] \n",
" \n",
" conv5_block2_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block2_1_relu[0][0]'] \n",
" \n",
" conv5_block2_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block2_2_relu[0][0]'] \n",
" \n",
" conv5_block2_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_out[0][0]', \n",
" 'conv5_block2_3_bn[0][0]'] \n",
" \n",
" conv5_block2_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block2_add[0][0]'] \n",
" \n",
" conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1049088 ['conv5_block2_out[0][0]'] \n",
" \n",
" conv5_block3_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block3_1_relu[0][0]'] \n",
" \n",
" conv5_block3_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block3_2_relu[0][0]'] \n",
" \n",
" conv5_block3_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_add (Add) (None, 7, 7, 2048) 0 ['conv5_block2_out[0][0]', \n",
" 'conv5_block3_3_bn[0][0]'] \n",
" \n",
" conv5_block3_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block3_add[0][0]'] \n",
" \n",
2023-01-07 22:15:23 +01:00
" flatten_3 (Flatten) (None, 100352) 0 ['conv5_block3_out[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-07 22:15:23 +01:00
" dense_7 (Dense) (None, 5) 501765 ['flatten_3[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
"==================================================================================================\n",
"Total params: 24,089,477\n",
"Trainable params: 501,765\n",
"Non-trainable params: 23,587,712\n",
2023-01-06 13:34:05 +01:00
"__________________________________________________________________________________________________\n"
2023-01-06 03:02:47 +01:00
]
2023-01-06 13:34:05 +01:00
}
],
"source": [
"# create a model object\n",
"model = Model(inputs=resnet.input, outputs=prediction)\n",
"\n",
"# view the structure of the model\n",
"model.summary()"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 78,
2023-01-06 13:34:05 +01:00
"metadata": {},
"outputs": [
2023-01-07 22:15:23 +01:00
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/25\n"
]
},
2023-01-06 03:02:47 +01:00
{
"name": "stderr",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"/var/folders/3r/c8tg1h051m18qhsdccdysrt40000gn/T/ipykernel_2029/3602206220.py:10: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
2023-01-06 03:02:47 +01:00
" r = model.fit_generator(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"25/25 [==============================] - 38s 1s/step - loss: 6.4809 - accuracy: 0.1950 - val_loss: 2.9878 - val_accuracy: 0.2344\n",
"Epoch 2/25\n",
"25/25 [==============================] - 35s 1s/step - loss: 2.2159 - accuracy: 0.2338 - val_loss: 2.3206 - val_accuracy: 0.2396\n",
"Epoch 3/25\n",
"25/25 [==============================] - 35s 1s/step - loss: 1.9435 - accuracy: 0.2237 - val_loss: 1.8788 - val_accuracy: 0.2292\n",
"Epoch 4/25\n",
"25/25 [==============================] - 35s 1s/step - loss: 2.1113 - accuracy: 0.2350 - val_loss: 1.5820 - val_accuracy: 0.2604\n",
"Epoch 5/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.7911 - accuracy: 0.2975 - val_loss: 1.6257 - val_accuracy: 0.3229\n",
"Epoch 6/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.8471 - accuracy: 0.2975 - val_loss: 1.6844 - val_accuracy: 0.3542\n",
"Epoch 7/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.7567 - accuracy: 0.3075 - val_loss: 1.4758 - val_accuracy: 0.3281\n",
"Epoch 8/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.6541 - accuracy: 0.3550 - val_loss: 1.6412 - val_accuracy: 0.2708\n",
"Epoch 9/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.4498 - accuracy: 0.3762 - val_loss: 1.3539 - val_accuracy: 0.3958\n",
"Epoch 10/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.5093 - accuracy: 0.3525 - val_loss: 1.4342 - val_accuracy: 0.3385\n",
"Epoch 11/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.4125 - accuracy: 0.4062 - val_loss: 1.6245 - val_accuracy: 0.3438\n",
"Epoch 12/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.5308 - accuracy: 0.3650 - val_loss: 1.6150 - val_accuracy: 0.2292\n",
"Epoch 13/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.4149 - accuracy: 0.4263 - val_loss: 1.5404 - val_accuracy: 0.3906\n",
"Epoch 14/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.4894 - accuracy: 0.3925 - val_loss: 1.8275 - val_accuracy: 0.2292\n",
"Epoch 15/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.5978 - accuracy: 0.3775 - val_loss: 1.3376 - val_accuracy: 0.4375\n",
"Epoch 16/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.4227 - accuracy: 0.4175 - val_loss: 1.5674 - val_accuracy: 0.3958\n",
"Epoch 17/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.4758 - accuracy: 0.3837 - val_loss: 1.5279 - val_accuracy: 0.3698\n",
"Epoch 18/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.6931 - accuracy: 0.4137 - val_loss: 1.8716 - val_accuracy: 0.2865\n",
"Epoch 19/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.3378 - accuracy: 0.4500 - val_loss: 1.4395 - val_accuracy: 0.4271\n",
"Epoch 20/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.3173 - accuracy: 0.4825 - val_loss: 1.2535 - val_accuracy: 0.4583\n",
"Epoch 21/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.2702 - accuracy: 0.4900 - val_loss: 1.4282 - val_accuracy: 0.4896\n",
"Epoch 22/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.2848 - accuracy: 0.4600 - val_loss: 1.3511 - val_accuracy: 0.4115\n",
"Epoch 23/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.4002 - accuracy: 0.4487 - val_loss: 1.5821 - val_accuracy: 0.3281\n",
"Epoch 24/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.2507 - accuracy: 0.4800 - val_loss: 1.2901 - val_accuracy: 0.4635\n",
"Epoch 25/25\n",
"25/25 [==============================] - 36s 1s/step - loss: 1.3626 - accuracy: 0.4462 - val_loss: 1.5347 - val_accuracy: 0.3906\n"
2023-01-06 03:02:47 +01:00
]
}
],
"source": [
"# tell the model what cost and optimization method to use\n",
"model.compile(\n",
" loss='sparse_categorical_crossentropy',\n",
" optimizer='adam',\n",
" metrics=['accuracy']\n",
")\n",
"\n",
"#train_ds_vgg_sw, test_ds_vgg_sw, validation_ds_vgg_sw\n",
"# fit the model\n",
"r = model.fit_generator(\n",
" train_ds,\n",
" validation_data=validation_ds,\n",
2023-01-07 22:15:23 +01:00
" epochs=25,\n",
2023-01-06 03:02:47 +01:00
" steps_per_epoch=len(train_ds),\n",
" validation_steps=len(validation_ds)\n",
")"
]
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 79,
2023-01-06 03:02:47 +01:00
"metadata": {},
2023-01-06 13:34:05 +01:00
"outputs": [
{
"data": {
2023-01-07 22:15:23 +01:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAi8AAAHHCAYAAAB3K7g2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAACKt0lEQVR4nOzdd3hT5fvH8fdJ0qbp3gtKy55lyBJRAUEBFUERFERAlgNQwIEICrhwi4rgYuhXAUHF8XOwl+whe8gos3uvNPP8/ggN1BZooW0o3K/rytXm5OScO6E0nz7jPIqqqipCCCGEEFWExtUFCCGEEEKUhYQXIYQQQlQpEl6EEEIIUaVIeBFCCCFElSLhRQghhBBVioQXIYQQQlQpEl6EEEIIUaVIeBFCCCFElSLhRQghhBBVioQXIUSJFEVhypQpZX7eiRMnUBSFefPmlXtNQggBEl6EuKbNmzcPRVFQFIW///672OOqqhIVFYWiKNx7770uqFAIISqfhBchqgAPDw/mz59fbPvatWs5c+YMer3eBVUJIYRrSHgRogq4++67Wbx4MVartcj2+fPn07JlS8LDw11U2Y0jLy/P1SUIIc6R8CJEFdCvXz/S0tJYvny5c5vZbOaHH36gf//+JT4nLy+PZ599lqioKPR6PfXr1+e9997jvwvJm0wmxo4dS0hICD4+Ptx3332cOXOmxGOePXuWIUOGEBYWhl6vp3HjxsyZM+eKXlN6ejrPPfccsbGxeHt74+vrS/fu3dm9e3exfQsKCpgyZQr16tXDw8ODiIgIHnjgAY4dO+bcx26389FHHxEbG4uHhwchISF069aN7du3A5cei/Pf8T1TpkxBURQOHDhA//79CQgI4NZbbwVgz549DB48mFq1auHh4UF4eDhDhgwhLS2txPdr6NChREZGotfrqVmzJk8++SRms5njx4+jKAoffvhhsedt3LgRRVFYsGBBWd9WIW4IOlcXIIS4vJiYGNq1a8eCBQvo3r07AH/++SdZWVk8/PDDfPzxx0X2V1WV++67j9WrVzN06FCaN2/O0qVLef755zl79myRD8xhw4bx7bff0r9/f2655RZWrVrFPffcU6yGpKQkbr75ZhRFYdSoUYSEhPDnn38ydOhQsrOzGTNmTJle0/Hjx/n555/p06cPNWvWJCkpic8//5wOHTpw4MABIiMjAbDZbNx7772sXLmShx9+mGeeeYacnByWL1/Ovn37qF27NgBDhw5l3rx5dO/enWHDhmG1Wlm/fj2bN2+mVatWZaqtUJ8+fahbty5vvvmmM/QtX76c48eP89hjjxEeHs7+/fv54osv2L9/P5s3b0ZRFADi4+Np06YNmZmZjBgxggYNGnD27Fl++OEH8vPzqVWrFu3bt+e7775j7NixRc773Xff4ePjQ8+ePa+obiGue6oQ4po1d+5cFVC3bdumzpgxQ/Xx8VHz8/NVVVXVPn36qJ06dVJVVVWjo6PVe+65x/m8n3/+WQXU119/vcjxHnzwQVVRFPXo0aOqqqrqrl27VEB96qmniuzXv39/FVAnT57s3DZ06FA1IiJCTU1NLbLvww8/rPr5+TnriouLUwF17ty5l3xtBQUFqs1mK7ItLi5O1ev16quvvurcNmfOHBVQP/jgg2LHsNvtqqqq6qpVq1RAffrppy+6z6Xq+u9rnTx5sgqo/fr1K7Zv4eu80IIFC1RAXbdunXPbwIEDVY1Go27btu2iNX3++ecqoB48eND5mNlsVoODg9VBgwYVe54QwkG6jYSoIvr27YvRaOT//u//yMnJ4f/+7/8u2mX0xx9/oNVqefrpp4tsf/bZZ1FVlT///NO5H1Bsv/+2oqiqyo8//kiPHj1QVZXU1FTnrWvXrmRlZbFz584yvR69Xo9G4/gVZLPZSEtLw9vbm/r16xc51o8//khwcDCjR48udozCVo4ff/wRRVGYPHnyRfe5Ek888USxbQaDwfl9QUEBqamp3HzzzQDOuu12Oz///DM9evQosdWnsKa+ffvi4eHBd99953xs6dKlpKamMmDAgCuuW4jrnYQXIaqIkJAQunTpwvz58/npp5+w2Ww8+OCDJe578uRJIiMj8fHxKbK9YcOGzscLv2o0GmfXS6H69esXuZ+SkkJmZiZffPEFISEhRW6PPfYYAMnJyWV6PXa7nQ8//JC6deui1+sJDg4mJCSEPXv2kJWV5dzv2LFj1K9fH53u4r3cx44dIzIyksDAwDLVcDk1a9Ysti09PZ1nnnmGsLAwDAYDISEhzv0K605JSSE7O5smTZpc8vj+/v706NGjyEyy7777jmrVqnHHHXeU4ysR4voiY16EqEL69+/P8OHDSUxMpHv37vj7+1fKee12OwADBgxg0KBBJe7TtGnTMh3zzTff5OWXX2bIkCG89tprBAYGotFoGDNmjPN85eliLTA2m+2iz7mwlaVQ37592bhxI88//zzNmzfH29sbu91Ot27drqjugQMHsnjxYjZu3EhsbCy//vorTz31lLNVSghRnIQXIaqQ+++/n8cff5zNmzfz/fffX3S/6OhoVqxYQU5OTpHWl0OHDjkfL/xqt9udrRuFDh8+XOR4hTORbDYbXbp0KZfX8sMPP9CpUydmz55dZHtmZibBwcHO+7Vr12bLli1YLBbc3NxKPFbt2rVZunQp6enpF219CQgIcB7/QoWtUKWRkZHBypUrmTp1Kq+88opz+5EjR4rsFxISgq+vL/v27bvsMbt160ZISAjfffcdbdu2JT8/n0cffbTUNQlxI5JoL0QV4u3tzaxZs5gyZQo9evS46H533303NpuNGTNmFNn+4YcfoiiKc8ZS4df/zlaaPn16kftarZbevXvz448/lviBnJKSUubXotVqi03bXrx4MWfPni2yrXfv3qSmphZ7LYDz+b1790ZVVaZOnXrRfXx9fQkODmbdunVFHp85c2aZar7wmIX++35pNBp69erFb7/95pyqXVJNADqdjn79+rFo0SLmzZtHbGxsmVuxhLjRSMuLEFXMxbptLtSjRw86derExIkTOXHiBM2aNWPZsmX88ssvjBkzxjnGpXnz5vTr14+ZM2eSlZXFLbfcwsqVKzl69GixY7711lusXr2atm3bMnz4cBo1akR6ejo7d+5kxYoVpKenl+l13Hvvvbz66qs89thj3HLLLezdu5fvvvuOWrVqFdlv4MCBfPPNN4wbN46tW7dy2223kZeXx4oVK3jqqafo2bMnnTp14tFHH+Xjjz/myJEjzi6c9evX06lTJ0aNGgU4poW/9dZbDBs2jFatWrFu3Tr+/fffUtfs6+vL7bffzjvvvIPFYqFatWosW7aMuLi4Yvu++eabLFu2jA4dOjBixAgaNmxIQkICixcv5u+//y7S5Tdw4EA+/vhjVq9ezdtvv12m91GIG5LL5jkJIS7rwqnSl/LfqdKqqqo5OTnq2LFj1cjISNXNzU2tW7eu+u677zqn6RYyGo3q008/rQYFBaleXl5qjx491NOnTxebPqyqqpqUlKSOHDlSjYqKUt3c3NTw8HC1c+fO6hdffOHcpyxTpZ999lk1IiJCNRgMavv27dVNmzapHTp0UDt06FBk3/z8fHXixIlqzZo1ned98MEH1WPHjjn3sVqt6rvvvqs2aNBAdXd3V0NCQtTu3burO3bsKHKcoUOHqn5+fqqPj4/at29fNTk5+aJTpVNSUorVfebMGfX+++9X/f39VT8/P7VPnz5qfHx8ie/XyZMn1YEDB6ohISGqXq9Xa9WqpY4cOVI1mUzFjtu4cWNVo9GoZ86cueT7JoRQVUVV/9P+KYQQotK1aNGCwMBAVq5c6epShLjmyZgXIYRwse3bt7Nr1y4GDhzo6lKEqBKk5UUIIVxk37597Nixg/fff5/U1FSOHz+Oh4eHq8sS4ponLS9CCOEiP/zwA4899hgWi4UFCxZIcBGilKTlRQghhBBVirS8CCGEEKJKkfAihBBCiCqlSl+kzm63Ex8fj4+Pz1WtHCuEEEKIyqOqKjk5OURGRl7ROl5VOrzEx8cTFRXl6jKEEEIIcQVOnz5N9erVy/y8Kh1eChe
2023-01-06 13:34:05 +01:00
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
2023-01-06 03:02:47 +01:00
"source": [
"# loss\n",
"plt.plot(r.history[\"accuracy\"])\n",
"plt.plot(r.history['val_accuracy'])\n",
"plt.plot(r.history['loss'])\n",
"plt.plot(r.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()\n",
"\n",
"model.save('resnet_1.h5')"
]
2023-01-06 13:34:05 +01:00
},
{
"cell_type": "code",
2023-01-07 22:15:23 +01:00
"execution_count": 80,
2023-01-06 13:34:05 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"8/8 [==============================] - 9s 1s/step - loss: 1.4028 - accuracy: 0.4141\n"
2023-01-06 13:34:05 +01:00
]
},
{
"data": {
"text/plain": [
2023-01-07 22:15:23 +01:00
"[1.4027552604675293, 0.4140625]"
2023-01-06 13:34:05 +01:00
]
},
2023-01-07 22:15:23 +01:00
"execution_count": 80,
2023-01-06 13:34:05 +01:00
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds)"
]
2023-01-06 03:02:47 +01:00
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
2023-01-07 22:15:23 +01:00
"version": "3.10.1 (v3.10.1:2cd268a3a9, Dec 6 2021, 14:28:59) [Clang 13.0.0 (clang-1300.0.29.3)]"
2023-01-06 03:02:47 +01:00
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
2023-01-07 22:15:23 +01:00
"hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49"
2023-01-06 03:02:47 +01:00
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}