1599 lines
258 KiB
Plaintext
1599 lines
258 KiB
Plaintext
|
{
|
||
|
"cells": [
|
||
|
{
|
||
|
"attachments": {},
|
||
|
"cell_type": "markdown",
|
||
|
"metadata": {},
|
||
|
"source": [
|
||
|
"### Aleksandra Jonas, Aleksandra Gronowska, Iwona Christop\n",
|
||
|
"# Zestaw 9-10/zadanie2 - AlexNet, VGG16, ResNet on village"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"attachments": {},
|
||
|
"cell_type": "markdown",
|
||
|
"metadata": {},
|
||
|
"source": [
|
||
|
"### Przygotowanie danych"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 1,
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"from IPython.display import Image, display"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 3,
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"import sys\n",
|
||
|
"import subprocess\n",
|
||
|
"import pkg_resources\n",
|
||
|
"import numpy as np\n",
|
||
|
"\n",
|
||
|
"required = { 'scikit-image'}\n",
|
||
|
"installed = {pkg.key for pkg in pkg_resources.working_set}\n",
|
||
|
"missing = required - installed\n",
|
||
|
"\n",
|
||
|
"if missing: \n",
|
||
|
" python = sys.executable\n",
|
||
|
" subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL)\n",
|
||
|
"\n",
|
||
|
"def load_data(input_dir, img_size):\n",
|
||
|
" import numpy as np\n",
|
||
|
" import pandas as pd\n",
|
||
|
" import os\n",
|
||
|
" from skimage.io import imread\n",
|
||
|
" import cv2 as cv\n",
|
||
|
" from pathlib import Path\n",
|
||
|
" import random\n",
|
||
|
" from shutil import copyfile, rmtree\n",
|
||
|
" import json\n",
|
||
|
"\n",
|
||
|
" import seaborn as sns\n",
|
||
|
" import matplotlib.pyplot as plt\n",
|
||
|
"\n",
|
||
|
" import matplotlib\n",
|
||
|
" \n",
|
||
|
" image_dir = Path(input_dir)\n",
|
||
|
" categories_name = []\n",
|
||
|
" for file in os.listdir(image_dir):\n",
|
||
|
" d = os.path.join(image_dir, file)\n",
|
||
|
" if os.path.isdir(d):\n",
|
||
|
" categories_name.append(file)\n",
|
||
|
"\n",
|
||
|
" folders = [directory for directory in image_dir.iterdir() if directory.is_dir()]\n",
|
||
|
" \n",
|
||
|
" ds_img = []\n",
|
||
|
" categories_count=[]\n",
|
||
|
" labels=[]\n",
|
||
|
" for i, direc in enumerate(folders):\n",
|
||
|
" count = 0\n",
|
||
|
" for obj in direc.iterdir():\n",
|
||
|
" if os.path.isfile(obj) and os.path.basename(os.path.normpath(obj)) != 'desktop.ini':\n",
|
||
|
" labels.append(os.path.basename(os.path.normpath(direc)))\n",
|
||
|
" count += 1\n",
|
||
|
" img = imread(obj)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
|
||
|
" img = img[:, :, :3]\n",
|
||
|
" img = cv.resize(img, img_size, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
|
||
|
" img = img / 255 #normalizacja\n",
|
||
|
" ds_img.append(img)\n",
|
||
|
" categories_count.append(count)\n",
|
||
|
" X={}\n",
|
||
|
" X[\"values\"] = np.array(ds_img)\n",
|
||
|
" X[\"categories_name\"] = categories_name\n",
|
||
|
" X[\"categories_count\"] = categories_count\n",
|
||
|
" X[\"labels\"]=labels\n",
|
||
|
" return X"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 3,
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"def get_run_logdir(root_logdir):\n",
|
||
|
" import os\n",
|
||
|
" import time\n",
|
||
|
"\n",
|
||
|
" run_id = time.strftime(\"run_%Y_%m_%d-%H_%M_%S\")\n",
|
||
|
" return os.path.join(root_logdir, run_id)"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 4,
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"def diagram_setup(model_name):\n",
|
||
|
" from tensorflow import keras\n",
|
||
|
" import os\n",
|
||
|
" \n",
|
||
|
" root_logdir = os.path.join(os.curdir, f\"logs\\\\fit\\\\{model_name}\\\\\")\n",
|
||
|
" \n",
|
||
|
" run_logdir = get_run_logdir(root_logdir)\n",
|
||
|
" tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 2,
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"def prepare_data(path, img_size, test_size, val_size):\n",
|
||
|
" from sklearn.model_selection import train_test_split\n",
|
||
|
" from sklearn.preprocessing import LabelEncoder\n",
|
||
|
" import tensorflow as tf\n",
|
||
|
"\n",
|
||
|
" data = load_data(path, img_size)\n",
|
||
|
" values = data['values']\n",
|
||
|
" labels = data['labels']\n",
|
||
|
"\n",
|
||
|
" X_train, X_test, y_train, y_test = train_test_split(values, labels, test_size=test_size, random_state=42)\n",
|
||
|
" X_train, X_validate, y_train, y_validate = train_test_split(X_train, y_train, test_size=val_size, random_state=42)\n",
|
||
|
"\n",
|
||
|
" class_le = LabelEncoder()\n",
|
||
|
" y_train_enc = class_le.fit_transform(y_train)\n",
|
||
|
" y_validate_enc = class_le.fit_transform(y_validate)\n",
|
||
|
" y_test_enc = class_le.fit_transform(y_test)\n",
|
||
|
"\n",
|
||
|
" train_ds = tf.data.Dataset.from_tensor_slices((X_train, y_train_enc))\n",
|
||
|
" validation_ds = tf.data.Dataset.from_tensor_slices((X_validate, y_validate_enc))\n",
|
||
|
" test_ds = tf.data.Dataset.from_tensor_slices((X_test, y_test_enc))\n",
|
||
|
"\n",
|
||
|
" train_ds_size = tf.data.experimental.cardinality(train_ds).numpy()\n",
|
||
|
" test_ds_size = tf.data.experimental.cardinality(test_ds).numpy()\n",
|
||
|
" validation_ds_size = tf.data.experimental.cardinality(validation_ds).numpy()\n",
|
||
|
"\n",
|
||
|
" #Rozmiary zbiorów\n",
|
||
|
" print(\"Training:\", train_ds_size)\n",
|
||
|
" print(\"Test:\", test_ds_size)\n",
|
||
|
" print(\"Validation:\", validation_ds_size)\n",
|
||
|
"\n",
|
||
|
" # Mieszanie zriorów\n",
|
||
|
" train_ds = (train_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
|
||
|
" test_ds = (test_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
|
||
|
" validation_ds = (validation_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
|
||
|
"\n",
|
||
|
" return train_ds, test_ds, validation_ds\n",
|
||
|
"\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"attachments": {},
|
||
|
"cell_type": "markdown",
|
||
|
"metadata": {},
|
||
|
"source": [
|
||
|
"# AlexNet"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 6,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stderr",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"WARNING:absl:`lr` is deprecated, please use `learning_rate` instead, or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.SGD.\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Model: \"sequential\"\n",
|
||
|
"_________________________________________________________________\n",
|
||
|
" Layer (type) Output Shape Param # \n",
|
||
|
"=================================================================\n",
|
||
|
" conv2d (Conv2D) (None, 55, 55, 96) 34944 \n",
|
||
|
" \n",
|
||
|
" max_pooling2d (MaxPooling2D (None, 27, 27, 96) 0 \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv2d_1 (Conv2D) (None, 27, 27, 256) 614656 \n",
|
||
|
" \n",
|
||
|
" max_pooling2d_1 (MaxPooling (None, 13, 13, 256) 0 \n",
|
||
|
" 2D) \n",
|
||
|
" \n",
|
||
|
" conv2d_2 (Conv2D) (None, 13, 13, 384) 885120 \n",
|
||
|
" \n",
|
||
|
" conv2d_3 (Conv2D) (None, 13, 13, 384) 1327488 \n",
|
||
|
" \n",
|
||
|
" conv2d_4 (Conv2D) (None, 13, 13, 256) 884992 \n",
|
||
|
" \n",
|
||
|
" max_pooling2d_2 (MaxPooling (None, 6, 6, 256) 0 \n",
|
||
|
" 2D) \n",
|
||
|
" \n",
|
||
|
" flatten (Flatten) (None, 9216) 0 \n",
|
||
|
" \n",
|
||
|
" dense (Dense) (None, 4096) 37752832 \n",
|
||
|
" \n",
|
||
|
" dense_1 (Dense) (None, 4096) 16781312 \n",
|
||
|
" \n",
|
||
|
" dense_2 (Dense) (None, 12) 49164 \n",
|
||
|
" \n",
|
||
|
"=================================================================\n",
|
||
|
"Total params: 58,330,508\n",
|
||
|
"Trainable params: 58,330,508\n",
|
||
|
"Non-trainable params: 0\n",
|
||
|
"_________________________________________________________________\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"from tensorflow import keras\n",
|
||
|
"import tensorflow as tf\n",
|
||
|
"import os\n",
|
||
|
"import time\n",
|
||
|
"\n",
|
||
|
"model = keras.models.Sequential([\n",
|
||
|
" keras.layers.Conv2D(filters=96, kernel_size=(11,11), strides=(4,4), activation='relu', input_shape=(227,227,3)),\n",
|
||
|
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
|
||
|
" keras.layers.Conv2D(filters=256, kernel_size=(5,5), strides=(1,1), activation='relu', padding=\"same\"),\n",
|
||
|
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
|
||
|
" keras.layers.Conv2D(filters=384, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
|
||
|
" keras.layers.Conv2D(filters=384, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
|
||
|
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
|
||
|
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
|
||
|
" keras.layers.Flatten(),\n",
|
||
|
" keras.layers.Dense(4096, activation='relu'),\n",
|
||
|
" keras.layers.Dense(4096, activation='relu'),\n",
|
||
|
" keras.layers.Dense(12, activation='softmax')\n",
|
||
|
"])\n",
|
||
|
"\n",
|
||
|
"model.compile(loss='sparse_categorical_crossentropy', optimizer=tf.optimizers.SGD(lr=.001), metrics=['accuracy'])\n",
|
||
|
"model.summary()"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 7,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Training: 7430\n",
|
||
|
"Test: 2323\n",
|
||
|
"Validation: 1858\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"train_ds_a, test_ds_a, val_ds_a = prepare_data(\"./plantvillage/color\", (227, 227), 0.2, 0.2)"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 8,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stderr",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n",
|
||
|
"/var/folders/_h/ljwht4gd7lb99rm1hm78h7_00000gn/T/ipykernel_23432/2397086753.py:6: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
|
||
|
" alex = model.fit_generator(\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Epoch 1/25\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stderr",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"2023-01-06 20:01:38.622228: W tensorflow/tsl/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.1314 - accuracy: 0.2501\n",
|
||
|
"Epoch 1: val_accuracy improved from -inf to 0.44235, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 223s 956ms/step - loss: 2.1314 - accuracy: 0.2501 - val_loss: 1.6157 - val_accuracy: 0.4423\n",
|
||
|
"Epoch 2/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 1.3779 - accuracy: 0.5031\n",
|
||
|
"Epoch 2: val_accuracy improved from 0.44235 to 0.60614, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 264s 1s/step - loss: 1.3779 - accuracy: 0.5031 - val_loss: 1.1473 - val_accuracy: 0.6061\n",
|
||
|
"Epoch 3/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 1.0262 - accuracy: 0.6358\n",
|
||
|
"Epoch 3: val_accuracy improved from 0.60614 to 0.67726, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 266s 1s/step - loss: 1.0262 - accuracy: 0.6358 - val_loss: 0.9024 - val_accuracy: 0.6773\n",
|
||
|
"Epoch 4/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.7844 - accuracy: 0.7259\n",
|
||
|
"Epoch 4: val_accuracy improved from 0.67726 to 0.72252, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 267s 1s/step - loss: 0.7844 - accuracy: 0.7259 - val_loss: 0.7740 - val_accuracy: 0.7225\n",
|
||
|
"Epoch 5/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.5837 - accuracy: 0.7967\n",
|
||
|
"Epoch 5: val_accuracy improved from 0.72252 to 0.79472, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 269s 1s/step - loss: 0.5837 - accuracy: 0.7967 - val_loss: 0.5986 - val_accuracy: 0.7947\n",
|
||
|
"Epoch 6/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.4601 - accuracy: 0.8393\n",
|
||
|
"Epoch 6: val_accuracy did not improve from 0.79472\n",
|
||
|
"232/232 [==============================] - 273s 1s/step - loss: 0.4601 - accuracy: 0.8393 - val_loss: 0.6495 - val_accuracy: 0.7769\n",
|
||
|
"Epoch 7/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.3825 - accuracy: 0.8679\n",
|
||
|
"Epoch 7: val_accuracy improved from 0.79472 to 0.85938, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 274s 1s/step - loss: 0.3825 - accuracy: 0.8679 - val_loss: 0.4127 - val_accuracy: 0.8594\n",
|
||
|
"Epoch 8/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.2899 - accuracy: 0.8978\n",
|
||
|
"Epoch 8: val_accuracy did not improve from 0.85938\n",
|
||
|
"232/232 [==============================] - 273s 1s/step - loss: 0.2899 - accuracy: 0.8978 - val_loss: 0.4238 - val_accuracy: 0.8540\n",
|
||
|
"Epoch 9/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.2615 - accuracy: 0.9133\n",
|
||
|
"Epoch 9: val_accuracy improved from 0.85938 to 0.87338, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 270s 1s/step - loss: 0.2615 - accuracy: 0.9133 - val_loss: 0.3714 - val_accuracy: 0.8734\n",
|
||
|
"Epoch 10/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.2115 - accuracy: 0.9247\n",
|
||
|
"Epoch 10: val_accuracy improved from 0.87338 to 0.87500, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 269s 1s/step - loss: 0.2115 - accuracy: 0.9247 - val_loss: 0.3794 - val_accuracy: 0.8750\n",
|
||
|
"Epoch 11/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.1971 - accuracy: 0.9349\n",
|
||
|
"Epoch 11: val_accuracy did not improve from 0.87500\n",
|
||
|
"232/232 [==============================] - 270s 1s/step - loss: 0.1971 - accuracy: 0.9349 - val_loss: 0.4570 - val_accuracy: 0.8567\n",
|
||
|
"Epoch 12/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.1495 - accuracy: 0.9500\n",
|
||
|
"Epoch 12: val_accuracy improved from 0.87500 to 0.87662, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 270s 1s/step - loss: 0.1495 - accuracy: 0.9500 - val_loss: 0.4067 - val_accuracy: 0.8766\n",
|
||
|
"Epoch 13/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.1206 - accuracy: 0.9634\n",
|
||
|
"Epoch 13: val_accuracy improved from 0.87662 to 0.88147, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 269s 1s/step - loss: 0.1206 - accuracy: 0.9634 - val_loss: 0.4036 - val_accuracy: 0.8815\n",
|
||
|
"Epoch 14/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.1667 - accuracy: 0.9593\n",
|
||
|
"Epoch 14: val_accuracy did not improve from 0.88147\n",
|
||
|
"232/232 [==============================] - 272s 1s/step - loss: 0.1667 - accuracy: 0.9593 - val_loss: 0.5347 - val_accuracy: 0.8292\n",
|
||
|
"Epoch 15/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.1315 - accuracy: 0.9588\n",
|
||
|
"Epoch 15: val_accuracy did not improve from 0.88147\n",
|
||
|
"232/232 [==============================] - 277s 1s/step - loss: 0.1315 - accuracy: 0.9588 - val_loss: 0.7335 - val_accuracy: 0.8163\n",
|
||
|
"Epoch 16/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.0950 - accuracy: 0.9731\n",
|
||
|
"Epoch 16: val_accuracy improved from 0.88147 to 0.88308, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 272s 1s/step - loss: 0.0950 - accuracy: 0.9731 - val_loss: 0.4444 - val_accuracy: 0.8831\n",
|
||
|
"Epoch 17/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.0566 - accuracy: 0.9846\n",
|
||
|
"Epoch 17: val_accuracy did not improve from 0.88308\n",
|
||
|
"232/232 [==============================] - 273s 1s/step - loss: 0.0566 - accuracy: 0.9846 - val_loss: 0.6635 - val_accuracy: 0.8287\n",
|
||
|
"Epoch 18/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.0443 - accuracy: 0.9880\n",
|
||
|
"Epoch 18: val_accuracy improved from 0.88308 to 0.88631, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 273s 1s/step - loss: 0.0443 - accuracy: 0.9880 - val_loss: 0.4852 - val_accuracy: 0.8863\n",
|
||
|
"Epoch 19/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.0101 - accuracy: 0.9981\n",
|
||
|
"Epoch 19: val_accuracy improved from 0.88631 to 0.90248, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 274s 1s/step - loss: 0.0101 - accuracy: 0.9981 - val_loss: 0.4459 - val_accuracy: 0.9025\n",
|
||
|
"Epoch 20/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.0031 - accuracy: 0.9995\n",
|
||
|
"Epoch 20: val_accuracy improved from 0.90248 to 0.90787, saving model to alex_2.h5\n",
|
||
|
"232/232 [==============================] - 274s 1s/step - loss: 0.0031 - accuracy: 0.9995 - val_loss: 0.4574 - val_accuracy: 0.9079\n",
|
||
|
"Epoch 21/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 0.0010 - accuracy: 1.0000\n",
|
||
|
"Epoch 21: val_accuracy did not improve from 0.90787\n",
|
||
|
"232/232 [==============================] - 278s 1s/step - loss: 0.0010 - accuracy: 1.0000 - val_loss: 0.4781 - val_accuracy: 0.9073\n",
|
||
|
"Epoch 22/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 7.0759e-04 - accuracy: 1.0000\n",
|
||
|
"Epoch 22: val_accuracy did not improve from 0.90787\n",
|
||
|
"232/232 [==============================] - 270s 1s/step - loss: 7.0759e-04 - accuracy: 1.0000 - val_loss: 0.4991 - val_accuracy: 0.9062\n",
|
||
|
"Epoch 23/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 5.5237e-04 - accuracy: 1.0000\n",
|
||
|
"Epoch 23: val_accuracy did not improve from 0.90787\n",
|
||
|
"232/232 [==============================] - 270s 1s/step - loss: 5.5237e-04 - accuracy: 1.0000 - val_loss: 0.5114 - val_accuracy: 0.9073\n",
|
||
|
"Epoch 24/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 4.5192e-04 - accuracy: 1.0000\n",
|
||
|
"Epoch 24: val_accuracy did not improve from 0.90787\n",
|
||
|
"232/232 [==============================] - 268s 1s/step - loss: 4.5192e-04 - accuracy: 1.0000 - val_loss: 0.5210 - val_accuracy: 0.9052\n",
|
||
|
"Epoch 25/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 3.7889e-04 - accuracy: 1.0000\n",
|
||
|
"Epoch 25: val_accuracy did not improve from 0.90787\n",
|
||
|
"232/232 [==============================] - 268s 1s/step - loss: 3.7889e-04 - accuracy: 1.0000 - val_loss: 0.5333 - val_accuracy: 0.9057\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
|
||
|
"\n",
|
||
|
"checkpoint = ModelCheckpoint(\"alex_2.h5\", monitor='val_accuracy', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)\n",
|
||
|
"early = EarlyStopping(monitor='val_accuracy', min_delta=0, patience=20, verbose=1, mode='auto')\n",
|
||
|
"\n",
|
||
|
"alex = model.fit_generator(\n",
|
||
|
" steps_per_epoch=len(train_ds_a), \n",
|
||
|
" generator=train_ds_a, \n",
|
||
|
" validation_data= val_ds_a, \n",
|
||
|
" validation_steps=len(val_ds_a), \n",
|
||
|
" epochs=25, \n",
|
||
|
" callbacks=[checkpoint,early])"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 9,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"data": {
|
||
|
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjwAAAHHCAYAAAC7soLdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAACaUUlEQVR4nOzdd3hT1RvA8W+StuneuxTKnmVvlCHIEBCQrQjIcjBERRFxgFtxoKK4GD9UpgiKKHvvjaBsCgVauvdKmtzfH2kDhRZaaJOO9/M8eZrcnHvvm1rp23Pec45KURQFIYQQQohyTG3tAIQQQgghSpokPEIIIYQo9yThEUIIIUS5JwmPEEIIIco9SXiEEEIIUe5JwiOEEEKIck8SHiGEEEKUe5LwCCGEEKLck4RHCCGEEOWeJDxCiGKjUqmYMWNGkc+7dOkSKpWKhQsXFntMQggBkvAIUe4sXLgQlUqFSqVi165dt72vKArBwcGoVCp69eplhQiFEMLyJOERopyyt7dn8eLFtx3fvn07V69eRavVWiEqIYSwDkl4hCinHnnkEVasWEF2dnae44sXL6ZZs2b4+/tbKbKKIy0tzdohCCFySMIjRDk1dOhQ4uLi2Lhxo/mYTqfj119/5fHHH8/3nLS0NF566SWCg4PRarXUrl2bTz75BEVR8rTLysrihRdewMfHBxcXFx599FGuXr2a7zWvXbvGqFGj8PPzQ6vVUr9+febPn39Pnyk+Pp4pU6YQGhqKs7Mzrq6u9OjRg+PHj9/WNjMzkxkzZlCrVi3s7e0JCAjgscce48KFC+Y2RqORL774gtDQUOzt7fHx8aF79+4cOnQIuHNt0a31SjNmzEClUvHff//x+OOP4+HhwQMPPADAP//8w8iRI6lWrRr29vb4+/szatQo4uLi8v1+jR49msDAQLRaLVWrVuXZZ59Fp9Nx8eJFVCoVn3/++W3n7dmzB5VKxZIlS4r6bRWiQrCxdgBCiJIREhJCmzZtWLJkCT169ADg77//JikpiSFDhvDll1/maa8oCo8++ihbt25l9OjRNG7cmPXr1/Pyyy9z7dq1PL9kx4wZw88//8zjjz9O27Zt2bJlCz179rwthqioKFq3bo1KpWLChAn4+Pjw999/M3r0aJKTk5k8eXKRPtPFixdZvXo1AwcOpGrVqkRFRfHdd9/RoUMH/vvvPwIDAwEwGAz06tWLzZs3M2TIEJ5//nlSUlLYuHEjJ0+epHr16gCMHj2ahQsX0qNHD8aMGUN2djY7d+5k3759NG/evEix5Ro4cCA1a9bk/fffNyeKGzdu5OLFizz11FP4+/vz77//8v333/Pvv/+yb98+VCoVABEREbRs2ZLExETGjRtHnTp1uHbtGr/++ivp6elUq1aNdu3a8csvv/DCCy/kue8vv/yCi4sLffr0uae4hSj3FCFEubJgwQIFUA4ePKjMmTNHcXFxUdLT0xVFUZSBAwcqnTp1UhRFUapUqaL07NnTfN7q1asVQHn33XfzXG/AgAGKSqVSzp8/ryiKohw7dkwBlOeeey5Pu8cff1wBlLfeest8bPTo0UpAQIASGxubp+2QIUMUNzc3c1xhYWEKoCxYsOCOny0zM1MxGAx5joWFhSlarVZ5++23zcfmz5+vAMpnn3122zWMRqOiKIqyZcsWBVAmTZpUYJs7xXXrZ33rrbcUQBk6dOhtbXM/582WLFmiAMqOHTvMx4YPH66o1Wrl4MGDBcb03XffKYBy6tQp83s6nU7x9vZWRowYcdt5QggTGdISohwbNGgQGRkZ/Pnnn6SkpPDnn38WOJz1119/odFomDRpUp7jL730Eoqi8Pfff5vbAbe1u7W3RlEUVq5cSe/evVEUhdjYWPOjW7duJCUlceTIkSJ9Hq1Wi1pt+mfLYDAQFxeHs7MztWvXznOtlStX4u3tzcSJE2+7Rm5vysqVK1GpVLz11lsFtrkXzzzzzG3HHBwczM8zMzOJjY2ldevWAOa4jUYjq1evpnfv3vn2LuXGNGjQIOzt7fnll1/M761fv57Y2FiGDRt2z3ELUd5JwiNEOebj40OXLl1YvHgxv/32GwaDgQEDBuTb9vLlywQGBuLi4pLneN26dc3v535Vq9XmYaFctWvXzvM6JiaGxMREvv/+e3x8fPI8nnrqKQCio6OL9HmMRiOff/45NWvWRKvV4u3tjY+PD//88w9JSUnmdhcuXKB27drY2BQ8an/hwgUCAwPx9PQsUgx3U7Vq1duOxcfH8/zzz+Pn54eDgwM+Pj7mdrlxx8TEkJycTIMGDe54fXd3d3r37p1nBt4vv/xCUFAQDz30UDF+EiHKF6nhEaKce/zxxxk7dizXr1+nR48euLu7W+S+RqMRgGHDhjFixIh82zRs2LBI13z//fd54403GDVqFO+88w6enp6o1WomT55svl9xKqinx2AwFHjOzb05uQYNGsSePXt4+eWXady4Mc7OzhiNRrp3735PcQ8fPpwVK1awZ88eQkND+eOPP3juuefMvV9CiNtJwiNEOdevXz+efvpp9u3bx7JlywpsV6VKFTZt2kRKSkqeXp7Tp0+b38/9ajQazb0ouc6cOZPnerkzuAwGA126dCmWz/Lrr7/SqVMn5s2bl+d4YmIi3t7e5tfVq1dn//796PV6bG1t871W9erVWb9+PfHx8QX28nh4eJivf7Pc3q7CSEhIYPPmzcycOZM333zTfPzcuXN52vn4+ODq6srJkyfves3u3bvj4+PDL7/8QqtWrUhPT+fJJ58sdExCVETy54AQ5ZyzszNz585lxowZ9O7du8B2jzzyCAaDgTlz5uQ5/vnnn6NSqcwzvXK/3jrLa/bs2XleazQa+vfvz8qVK/P9JR4TE1Pkz6LRaG6bIr9ixQquXbuW51j//v2JjY297bMA5vP79++PoijMnDmzwDaurq54e3uzY8eOPO9/8803RYr55mvmuvX7pVar6du3L2vWrDFPi88vJgAbGxuGDh3K8uXLWbhwIaGhoUXuLROiopEeHiEqgIKGlG7Wu3dvOnXqxPTp07l06RKNGjViw4YN/P7770yePNlcs9O4cWOGDh3KN998Q1JSEm3btmXz5s2cP3/+tmt++OGHbN26lVatWjF27Fjq1atHfHw8R44cYdOmTcTHxxfpc/Tq1Yu3336bp556irZt23LixAl++eUXqlWrlqfd8OHDWbRoES+++CIHDhzgwQcfJC0tjU2bNvHcc8/Rp08fOnXqxJNPPsmXX37JuXPnzMNLO3fupFOnTkyYMAEwTcH/8MMPGTNmDM2bN2fHjh2cPXu20DG7urrSvn17Pv74Y/R6PUFBQWzYsIGwsLDb2r7//vts2LCBDh06MG7cOOrWrUtkZCQrVqxg165deYYjhw8fzpdffsnWrVv56KOPivR9FKJCstr8MCFEibh5Wvqd3DotXVEUJSUlRXnhhReUwMBAxdbWVqlZs6Yya9Ys85ToXBkZGcqkSZMULy8vxcnJSendu7dy5cqV26ZqK4qiREVFKePHj1eCg4MVW1tbxd/fX+ncubPy/fffm9sUZVr6Sy+9pAQEBCgODg5Ku3btlL179yodOnRQOnTokKdtenq6Mn36dKVq1arm+w4YMEC5cOGCuU12drYya9YspU6dOoqdnZ3i4+Oj9OjRQzl8+HCe64wePVpxc3NTXFxclEGDBinR0dEFTkuPiYm5Le6rV68q/fr1U9zd3RU3Nzdl4MCBSkRERL7fr8uXLyvDhw9XfHx8FK1Wq1SrVk0ZP368kpWVddt169evr6jVauXq1at3/L4JIRRFpSi39LMKIYQoE5o0aYKnpyebN2+2dihClHpSwyOEEGXQoUOHOHbsGMOHD7d2KEKUCdLDI4QQZcjJkyc5fPgwn376KbGxsVy8eBF7e3trhyVEqSc9PEIIUYb8+uuvPPXUU+j1epYsWSLJjhCFJD08QgghhCj3pIdHCCGEEOWeJDxCCCGEKPcq3MKDRqORiIgIXFxc7mtHZCGEEEJYjqIopKSkEBgYeE/7xlW4hCciIoLg4GBrhyGEEEKIe3DlyhUqVapU5PMqXMKTuyn
|
||
|
"text/plain": [
|
||
|
"<Figure size 640x480 with 1 Axes>"
|
||
|
]
|
||
|
},
|
||
|
"metadata": {},
|
||
|
"output_type": "display_data"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"import matplotlib.pyplot as plt\n",
|
||
|
"plt.plot(alex.history[\"accuracy\"])\n",
|
||
|
"plt.plot(alex.history['val_accuracy'])\n",
|
||
|
"plt.plot(alex.history['loss'])\n",
|
||
|
"plt.plot(alex.history['val_loss'])\n",
|
||
|
"plt.title(\"Model accuracy\")\n",
|
||
|
"plt.ylabel(\"Value\")\n",
|
||
|
"plt.xlabel(\"Epoch\")\n",
|
||
|
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
|
||
|
"plt.show()"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 10,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"72/72 [==============================] - 23s 318ms/step - loss: 0.4541 - accuracy: 0.9084\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"data": {
|
||
|
"text/plain": [
|
||
|
"[0.45413827896118164, 0.9084201455116272]"
|
||
|
]
|
||
|
},
|
||
|
"execution_count": 10,
|
||
|
"metadata": {},
|
||
|
"output_type": "execute_result"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"model.evaluate(test_ds_a)"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"attachments": {},
|
||
|
"cell_type": "markdown",
|
||
|
"metadata": {},
|
||
|
"source": [
|
||
|
"# VGG16"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 10,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Model: \"sequential\"\n",
|
||
|
"_________________________________________________________________\n",
|
||
|
" Layer (type) Output Shape Param # \n",
|
||
|
"=================================================================\n",
|
||
|
" conv2d (Conv2D) (None, 224, 224, 64) 1792 \n",
|
||
|
" \n",
|
||
|
" conv2d_1 (Conv2D) (None, 224, 224, 64) 36928 \n",
|
||
|
" \n",
|
||
|
" max_pooling2d (MaxPooling2D (None, 112, 112, 64) 0 \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv2d_2 (Conv2D) (None, 112, 112, 128) 73856 \n",
|
||
|
" \n",
|
||
|
" conv2d_3 (Conv2D) (None, 112, 112, 128) 147584 \n",
|
||
|
" \n",
|
||
|
" max_pooling2d_1 (MaxPooling (None, 56, 56, 128) 0 \n",
|
||
|
" 2D) \n",
|
||
|
" \n",
|
||
|
" conv2d_4 (Conv2D) (None, 56, 56, 256) 295168 \n",
|
||
|
" \n",
|
||
|
" conv2d_5 (Conv2D) (None, 56, 56, 256) 590080 \n",
|
||
|
" \n",
|
||
|
" conv2d_6 (Conv2D) (None, 56, 56, 256) 590080 \n",
|
||
|
" \n",
|
||
|
" max_pooling2d_2 (MaxPooling (None, 28, 28, 256) 0 \n",
|
||
|
" 2D) \n",
|
||
|
" \n",
|
||
|
" conv2d_7 (Conv2D) (None, 28, 28, 512) 1180160 \n",
|
||
|
" \n",
|
||
|
" conv2d_8 (Conv2D) (None, 28, 28, 512) 2359808 \n",
|
||
|
" \n",
|
||
|
" conv2d_9 (Conv2D) (None, 28, 28, 512) 2359808 \n",
|
||
|
" \n",
|
||
|
" max_pooling2d_3 (MaxPooling (None, 14, 14, 512) 0 \n",
|
||
|
" 2D) \n",
|
||
|
" \n",
|
||
|
" conv2d_10 (Conv2D) (None, 14, 14, 512) 2359808 \n",
|
||
|
" \n",
|
||
|
" conv2d_11 (Conv2D) (None, 14, 14, 512) 2359808 \n",
|
||
|
" \n",
|
||
|
" conv2d_12 (Conv2D) (None, 14, 14, 512) 2359808 \n",
|
||
|
" \n",
|
||
|
" flatten (Flatten) (None, 100352) 0 \n",
|
||
|
" \n",
|
||
|
" dense (Dense) (None, 4096) 411045888 \n",
|
||
|
" \n",
|
||
|
" dense_1 (Dense) (None, 4096) 16781312 \n",
|
||
|
" \n",
|
||
|
" dense_2 (Dense) (None, 12) 49164 \n",
|
||
|
" \n",
|
||
|
"=================================================================\n",
|
||
|
"Total params: 442,591,052\n",
|
||
|
"Trainable params: 442,591,052\n",
|
||
|
"Non-trainable params: 0\n",
|
||
|
"_________________________________________________________________\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stderr",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"/opt/homebrew/lib/python3.10/site-packages/keras/optimizers/optimizer_v2/adam.py:117: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead.\n",
|
||
|
" super().__init__(name, **kwargs)\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"import keras,os\n",
|
||
|
"from keras.models import Sequential\n",
|
||
|
"from keras.layers import Dense, Conv2D, MaxPool2D , Flatten\n",
|
||
|
"from keras.preprocessing.image import ImageDataGenerator\n",
|
||
|
"from keras.optimizers import Adam\n",
|
||
|
"import numpy as np\n",
|
||
|
"\n",
|
||
|
"model = keras.models.Sequential([\n",
|
||
|
" keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
|
||
|
" keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
|
||
|
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
|
||
|
" keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
|
||
|
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
|
||
|
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
|
||
|
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
|
||
|
" keras.layers.Flatten(),\n",
|
||
|
" keras.layers.Dense(units = 4096, activation='relu'),\n",
|
||
|
" keras.layers.Dense(units = 4096, activation='relu'),\n",
|
||
|
" keras.layers.Dense(units = 12, activation='softmax')\n",
|
||
|
"])\n",
|
||
|
"\n",
|
||
|
"opt = Adam(lr=0.001)\n",
|
||
|
"model.compile(optimizer=opt, loss=keras.losses.sparse_categorical_crossentropy, metrics=['accuracy'])\n",
|
||
|
"\n",
|
||
|
"model.summary()"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 8,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Training: 7430\n",
|
||
|
"Test: 2323\n",
|
||
|
"Validation: 1858\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"train_ds_v, test_ds_v, val_ds_v = prepare_data('./plantvillage/color', (224, 224), 0.2, 0.2)"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 11,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stderr",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"/var/folders/_h/ljwht4gd7lb99rm1hm78h7_00000gn/T/ipykernel_24066/3966396738.py:5: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
|
||
|
" vgg = model.fit_generator(steps_per_epoch=len(train_ds_v), generator=train_ds_v, validation_data= val_ds_v, validation_steps=len(val_ds_v), epochs=25, callbacks=[checkpoint,early])\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Epoch 1/25\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stderr",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"2023-01-06 22:32:18.362109: W tensorflow/tsl/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4227 - accuracy: 0.1339 \n",
|
||
|
"Epoch 1: val_accuracy improved from -inf to 0.15086, saving model to vgg16_2.h5\n",
|
||
|
"232/232 [==============================] - 3659s 16s/step - loss: 2.4227 - accuracy: 0.1339 - val_loss: 2.4052 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 2/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4051 - accuracy: 0.1356 \n",
|
||
|
"Epoch 2: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3761s 16s/step - loss: 2.4051 - accuracy: 0.1356 - val_loss: 2.4036 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 3/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4026 - accuracy: 0.1381 \n",
|
||
|
"Epoch 3: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3712s 16s/step - loss: 2.4026 - accuracy: 0.1381 - val_loss: 2.4002 - val_accuracy: 0.1503\n",
|
||
|
"Epoch 4/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4015 - accuracy: 0.1379 \n",
|
||
|
"Epoch 4: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3690s 16s/step - loss: 2.4015 - accuracy: 0.1379 - val_loss: 2.4012 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 5/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4015 - accuracy: 0.1382 \n",
|
||
|
"Epoch 5: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3695s 16s/step - loss: 2.4015 - accuracy: 0.1382 - val_loss: 2.3971 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 6/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4004 - accuracy: 0.1393 \n",
|
||
|
"Epoch 6: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3703s 16s/step - loss: 2.4004 - accuracy: 0.1393 - val_loss: 2.3999 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 7/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4006 - accuracy: 0.1379 \n",
|
||
|
"Epoch 7: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3678s 16s/step - loss: 2.4006 - accuracy: 0.1379 - val_loss: 2.3984 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 8/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4007 - accuracy: 0.1394 \n",
|
||
|
"Epoch 8: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3677s 16s/step - loss: 2.4007 - accuracy: 0.1394 - val_loss: 2.3993 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 9/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4006 - accuracy: 0.1354 \n",
|
||
|
"Epoch 9: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3660s 16s/step - loss: 2.4006 - accuracy: 0.1354 - val_loss: 2.3993 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 10/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4004 - accuracy: 0.1395 \n",
|
||
|
"Epoch 10: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3696s 16s/step - loss: 2.4004 - accuracy: 0.1395 - val_loss: 2.3970 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 11/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4005 - accuracy: 0.1394 \n",
|
||
|
"Epoch 11: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3672s 16s/step - loss: 2.4005 - accuracy: 0.1394 - val_loss: 2.4014 - val_accuracy: 0.1498\n",
|
||
|
"Epoch 12/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4003 - accuracy: 0.1374 \n",
|
||
|
"Epoch 12: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3548s 15s/step - loss: 2.4003 - accuracy: 0.1374 - val_loss: 2.3988 - val_accuracy: 0.1503\n",
|
||
|
"Epoch 13/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4005 - accuracy: 0.1393 \n",
|
||
|
"Epoch 13: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3600s 16s/step - loss: 2.4005 - accuracy: 0.1393 - val_loss: 2.3987 - val_accuracy: 0.1503\n",
|
||
|
"Epoch 14/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4005 - accuracy: 0.1394 \n",
|
||
|
"Epoch 14: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3600s 16s/step - loss: 2.4005 - accuracy: 0.1394 - val_loss: 2.3989 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 15/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4004 - accuracy: 0.1393 \n",
|
||
|
"Epoch 15: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3261s 14s/step - loss: 2.4004 - accuracy: 0.1393 - val_loss: 2.3988 - val_accuracy: 0.1503\n",
|
||
|
"Epoch 16/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.3998 - accuracy: 0.1367 \n",
|
||
|
"Epoch 16: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3359s 14s/step - loss: 2.3998 - accuracy: 0.1367 - val_loss: 2.3984 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 17/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4001 - accuracy: 0.1395 \n",
|
||
|
"Epoch 17: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3397s 15s/step - loss: 2.4001 - accuracy: 0.1395 - val_loss: 2.4013 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 18/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.3998 - accuracy: 0.1394 \n",
|
||
|
"Epoch 18: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3391s 15s/step - loss: 2.3998 - accuracy: 0.1394 - val_loss: 2.3987 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 19/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.3991 - accuracy: 0.1395 \n",
|
||
|
"Epoch 19: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3483s 15s/step - loss: 2.3991 - accuracy: 0.1395 - val_loss: 2.4005 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 20/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.4009 - accuracy: 0.1373 \n",
|
||
|
"Epoch 20: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3464s 15s/step - loss: 2.4009 - accuracy: 0.1373 - val_loss: 2.3981 - val_accuracy: 0.1503\n",
|
||
|
"Epoch 21/25\n",
|
||
|
"232/232 [==============================] - ETA: 0s - loss: 2.3996 - accuracy: 0.1394 \n",
|
||
|
"Epoch 21: val_accuracy did not improve from 0.15086\n",
|
||
|
"232/232 [==============================] - 3464s 15s/step - loss: 2.3996 - accuracy: 0.1394 - val_loss: 2.3978 - val_accuracy: 0.1509\n",
|
||
|
"Epoch 21: early stopping\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
|
||
|
"\n",
|
||
|
"checkpoint = ModelCheckpoint(\"vgg16_2.h5\", monitor='val_accuracy', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)\n",
|
||
|
"early = EarlyStopping(monitor='val_accuracy', min_delta=0, patience=20, verbose=1, mode='auto')\n",
|
||
|
"vgg = model.fit_generator(steps_per_epoch=len(train_ds_v), generator=train_ds_v, validation_data= val_ds_v, validation_steps=len(val_ds_v), epochs=25, callbacks=[checkpoint,early])"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 11,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"ename": "NameError",
|
||
|
"evalue": "name 'vgg' is not defined",
|
||
|
"output_type": "error",
|
||
|
"traceback": [
|
||
|
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||
|
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
|
||
|
"Cell \u001b[0;32mIn [11], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mmatplotlib\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mpyplot\u001b[39;00m \u001b[39mas\u001b[39;00m \u001b[39mplt\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m plt\u001b[39m.\u001b[39mplot(vgg\u001b[39m.\u001b[39mhistory[\u001b[39m\"\u001b[39m\u001b[39maccuracy\u001b[39m\u001b[39m\"\u001b[39m])\n\u001b[1;32m 3\u001b[0m plt\u001b[39m.\u001b[39mplot(vgg\u001b[39m.\u001b[39mhistory[\u001b[39m'\u001b[39m\u001b[39mval_accuracy\u001b[39m\u001b[39m'\u001b[39m])\n\u001b[1;32m 4\u001b[0m plt\u001b[39m.\u001b[39mplot(vgg\u001b[39m.\u001b[39mhistory[\u001b[39m'\u001b[39m\u001b[39mloss\u001b[39m\u001b[39m'\u001b[39m])\n",
|
||
|
"\u001b[0;31mNameError\u001b[0m: name 'vgg' is not defined"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"import matplotlib.pyplot as plt\n",
|
||
|
"plt.plot(vgg.history[\"accuracy\"])\n",
|
||
|
"plt.plot(vgg.history['val_accuracy'])\n",
|
||
|
"plt.plot(vgg.history['loss'])\n",
|
||
|
"plt.plot(vgg.history['val_loss'])\n",
|
||
|
"plt.title(\"Model accuracy\")\n",
|
||
|
"plt.ylabel(\"Value\")\n",
|
||
|
"plt.xlabel(\"Epoch\")\n",
|
||
|
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
|
||
|
"plt.show()"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 5,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"ename": "NameError",
|
||
|
"evalue": "name 'model' is not defined",
|
||
|
"output_type": "error",
|
||
|
"traceback": [
|
||
|
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||
|
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
|
||
|
"Cell \u001b[0;32mIn [5], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m model\u001b[39m.\u001b[39mevaluate(test_ds_v)\n",
|
||
|
"\u001b[0;31mNameError\u001b[0m: name 'model' is not defined"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"model.evaluate(test_ds_v)"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"attachments": {},
|
||
|
"cell_type": "markdown",
|
||
|
"metadata": {},
|
||
|
"source": [
|
||
|
"# ResNet50"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 9,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Model: \"model_1\"\n",
|
||
|
"__________________________________________________________________________________________________\n",
|
||
|
" Layer (type) Output Shape Param # Connected to \n",
|
||
|
"==================================================================================================\n",
|
||
|
" input_2 (InputLayer) [(None, 224, 224, 3 0 [] \n",
|
||
|
" )] \n",
|
||
|
" \n",
|
||
|
" conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 ['input_2[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv1_conv (Conv2D) (None, 112, 112, 64 9472 ['conv1_pad[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv1_bn (BatchNormalization) (None, 112, 112, 64 256 ['conv1_conv[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv1_relu (Activation) (None, 112, 112, 64 0 ['conv1_bn[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" pool1_pad (ZeroPadding2D) (None, 114, 114, 64 0 ['conv1_relu[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 ['pool1_pad[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4160 ['pool1_pool[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block1_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block1_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block1_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block1_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block1_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 ['pool1_pool[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block1_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block1_0_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block1_0_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block1_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block1_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block1_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_0_bn[0][0]', \n",
|
||
|
" 'conv2_block1_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block1_out (Activation) (None, 56, 56, 256) 0 ['conv2_block1_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16448 ['conv2_block1_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block2_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block2_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block2_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block2_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block2_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block2_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block2_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block2_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block2_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_out[0][0]', \n",
|
||
|
" 'conv2_block2_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block2_out (Activation) (None, 56, 56, 256) 0 ['conv2_block2_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16448 ['conv2_block2_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block3_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block3_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv2_block3_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block3_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block3_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block3_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv2_block3_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block3_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block3_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block3_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv2_block3_add (Add) (None, 56, 56, 256) 0 ['conv2_block2_out[0][0]', \n",
|
||
|
" 'conv2_block3_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv2_block3_out (Activation) (None, 56, 56, 256) 0 ['conv2_block3_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32896 ['conv2_block3_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block1_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block1_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block1_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block1_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block1_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 ['conv2_block3_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block1_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block1_0_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block1_0_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block1_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block1_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block1_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_0_bn[0][0]', \n",
|
||
|
" 'conv3_block1_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block1_out (Activation) (None, 28, 28, 512) 0 ['conv3_block1_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block1_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block2_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block2_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block2_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block2_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block2_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block2_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block2_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block2_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block2_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_out[0][0]', \n",
|
||
|
" 'conv3_block2_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block2_out (Activation) (None, 28, 28, 512) 0 ['conv3_block2_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block2_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block3_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block3_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block3_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block3_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block3_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block3_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block3_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block3_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block3_add (Add) (None, 28, 28, 512) 0 ['conv3_block2_out[0][0]', \n",
|
||
|
" 'conv3_block3_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block3_out (Activation) (None, 28, 28, 512) 0 ['conv3_block3_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block3_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block4_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block4_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv3_block4_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block4_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block4_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block4_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv3_block4_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block4_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block4_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block4_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv3_block4_add (Add) (None, 28, 28, 512) 0 ['conv3_block3_out[0][0]', \n",
|
||
|
" 'conv3_block4_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv3_block4_out (Activation) (None, 28, 28, 512) 0 ['conv3_block4_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131328 ['conv3_block4_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block1_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block1_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block1_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block1_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block1_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024 525312 ['conv3_block4_out[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block1_2_relu[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block1_0_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block1_0_conv[0][0]'] \n",
|
||
|
" ization) ) \n",
|
||
|
" \n",
|
||
|
" conv4_block1_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block1_3_conv[0][0]'] \n",
|
||
|
" ization) ) \n",
|
||
|
" \n",
|
||
|
" conv4_block1_add (Add) (None, 14, 14, 1024 0 ['conv4_block1_0_bn[0][0]', \n",
|
||
|
" ) 'conv4_block1_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block1_out (Activation) (None, 14, 14, 1024 0 ['conv4_block1_add[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block1_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block2_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block2_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block2_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block2_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block2_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block2_2_relu[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block2_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block2_3_conv[0][0]'] \n",
|
||
|
" ization) ) \n",
|
||
|
" \n",
|
||
|
" conv4_block2_add (Add) (None, 14, 14, 1024 0 ['conv4_block1_out[0][0]', \n",
|
||
|
" ) 'conv4_block2_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block2_out (Activation) (None, 14, 14, 1024 0 ['conv4_block2_add[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block2_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block3_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block3_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block3_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block3_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block3_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block3_2_relu[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block3_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block3_3_conv[0][0]'] \n",
|
||
|
" ization) ) \n",
|
||
|
" \n",
|
||
|
" conv4_block3_add (Add) (None, 14, 14, 1024 0 ['conv4_block2_out[0][0]', \n",
|
||
|
" ) 'conv4_block3_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block3_out (Activation) (None, 14, 14, 1024 0 ['conv4_block3_add[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block3_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block4_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block4_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block4_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block4_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block4_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block4_2_relu[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block4_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block4_3_conv[0][0]'] \n",
|
||
|
" ization) ) \n",
|
||
|
" \n",
|
||
|
" conv4_block4_add (Add) (None, 14, 14, 1024 0 ['conv4_block3_out[0][0]', \n",
|
||
|
" ) 'conv4_block4_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block4_out (Activation) (None, 14, 14, 1024 0 ['conv4_block4_add[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block4_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block5_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block5_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block5_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block5_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block5_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block5_2_relu[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block5_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block5_3_conv[0][0]'] \n",
|
||
|
" ization) ) \n",
|
||
|
" \n",
|
||
|
" conv4_block5_add (Add) (None, 14, 14, 1024 0 ['conv4_block4_out[0][0]', \n",
|
||
|
" ) 'conv4_block5_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block5_out (Activation) (None, 14, 14, 1024 0 ['conv4_block5_add[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block5_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block6_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block6_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block6_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block6_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block6_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv4_block6_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv4_block6_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block6_2_relu[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv4_block6_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block6_3_conv[0][0]'] \n",
|
||
|
" ization) ) \n",
|
||
|
" \n",
|
||
|
" conv4_block6_add (Add) (None, 14, 14, 1024 0 ['conv4_block5_out[0][0]', \n",
|
||
|
" ) 'conv4_block6_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv4_block6_out (Activation) (None, 14, 14, 1024 0 ['conv4_block6_add[0][0]'] \n",
|
||
|
" ) \n",
|
||
|
" \n",
|
||
|
" conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524800 ['conv4_block6_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block1_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block1_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block1_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block1_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block1_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 ['conv4_block6_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block1_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block1_0_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block1_0_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block1_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block1_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block1_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_0_bn[0][0]', \n",
|
||
|
" 'conv5_block1_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block1_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block1_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1049088 ['conv5_block1_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block2_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block2_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block2_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block2_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block2_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block2_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block2_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block2_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block2_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_out[0][0]', \n",
|
||
|
" 'conv5_block2_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block2_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block2_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1049088 ['conv5_block2_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block3_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_1_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block3_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_1_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block3_1_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block3_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_2_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block3_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_2_bn[0][0]'] \n",
|
||
|
" n) \n",
|
||
|
" \n",
|
||
|
" conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block3_2_relu[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block3_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block3_3_conv[0][0]'] \n",
|
||
|
" ization) \n",
|
||
|
" \n",
|
||
|
" conv5_block3_add (Add) (None, 7, 7, 2048) 0 ['conv5_block2_out[0][0]', \n",
|
||
|
" 'conv5_block3_3_bn[0][0]'] \n",
|
||
|
" \n",
|
||
|
" conv5_block3_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block3_add[0][0]'] \n",
|
||
|
" \n",
|
||
|
" flatten_1 (Flatten) (None, 100352) 0 ['conv5_block3_out[0][0]'] \n",
|
||
|
" \n",
|
||
|
" dense_1 (Dense) (None, 12) 1204236 ['flatten_1[0][0]'] \n",
|
||
|
" \n",
|
||
|
"==================================================================================================\n",
|
||
|
"Total params: 24,791,948\n",
|
||
|
"Trainable params: 1,204,236\n",
|
||
|
"Non-trainable params: 23,587,712\n",
|
||
|
"__________________________________________________________________________________________________\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"from keras.layers import Input, Lambda, Dense, Flatten\n",
|
||
|
"from keras.models import Model\n",
|
||
|
"from keras.applications import ResNet50\n",
|
||
|
"from keras.preprocessing import image\n",
|
||
|
"from keras.preprocessing.image import ImageDataGenerator\n",
|
||
|
"from keras.models import Sequential\n",
|
||
|
"import numpy as np\n",
|
||
|
"from glob import glob\n",
|
||
|
"import matplotlib.pyplot as plt\n",
|
||
|
"\n",
|
||
|
"# re-size all the images to this\n",
|
||
|
"IMAGE_SIZE = [224, 224]\n",
|
||
|
"\n",
|
||
|
"# add preprocessing layer to the front of resnet\n",
|
||
|
"resnet = ResNet50(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)\n",
|
||
|
"\n",
|
||
|
"# don't train existing weights\n",
|
||
|
"for layer in resnet.layers:\n",
|
||
|
" layer.trainable = False\n",
|
||
|
" \n",
|
||
|
" # useful for getting number of classes\n",
|
||
|
"classes = 12\n",
|
||
|
" \n",
|
||
|
"\n",
|
||
|
"# our layers - you can add more if you want\n",
|
||
|
"x = Flatten()(resnet.output)\n",
|
||
|
"# x = Dense(1000, activation='relu')(x)\n",
|
||
|
"prediction = Dense(12, activation='softmax')(x)\n",
|
||
|
"\n",
|
||
|
"# create a model object\n",
|
||
|
"model_resnet = Model(inputs=resnet.input, outputs=prediction)\n",
|
||
|
"\n",
|
||
|
"# view the structure of the model\n",
|
||
|
"model_resnet.summary()\n",
|
||
|
"\n",
|
||
|
"# tell the model what cost and optimization method to use\n",
|
||
|
"model_resnet.compile(\n",
|
||
|
" loss='sparse_categorical_crossentropy',\n",
|
||
|
" optimizer='adam',\n",
|
||
|
" metrics=['accuracy']\n",
|
||
|
")"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 6,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Training: 7430\n",
|
||
|
"Test: 2323\n",
|
||
|
"Validation: 1858\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"train_ds_r, test_ds_r, val_ds_r = prepare_data('./plantvillage/color', img_size=IMAGE_SIZE, test_size=0.2, val_size=0.2)"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 16,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"Epoch 1/25\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stderr",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"/var/folders/_h/ljwht4gd7lb99rm1hm78h7_00000gn/T/ipykernel_39241/1735889553.py:1: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
|
||
|
" r = model_resnet.fit_generator(\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"232/232 [==============================] - 297s 1s/step - loss: 0.6232 - accuracy: 0.8380 - val_loss: 1.2547 - val_accuracy: 0.7328\n",
|
||
|
"Epoch 2/25\n",
|
||
|
"232/232 [==============================] - 277s 1s/step - loss: 0.4919 - accuracy: 0.8611 - val_loss: 0.8189 - val_accuracy: 0.8308\n",
|
||
|
"Epoch 3/25\n",
|
||
|
"232/232 [==============================] - 299s 1s/step - loss: 0.6947 - accuracy: 0.8382 - val_loss: 0.5326 - val_accuracy: 0.8518\n",
|
||
|
"Epoch 4/25\n",
|
||
|
"232/232 [==============================] - 306s 1s/step - loss: 0.6153 - accuracy: 0.8599 - val_loss: 1.1360 - val_accuracy: 0.7710\n",
|
||
|
"Epoch 5/25\n",
|
||
|
"232/232 [==============================] - 311s 1s/step - loss: 0.5149 - accuracy: 0.8689 - val_loss: 1.3260 - val_accuracy: 0.7780\n",
|
||
|
"Epoch 6/25\n",
|
||
|
"232/232 [==============================] - 313s 1s/step - loss: 0.6220 - accuracy: 0.8462 - val_loss: 0.8199 - val_accuracy: 0.8233\n",
|
||
|
"Epoch 7/25\n",
|
||
|
"232/232 [==============================] - 318s 1s/step - loss: 0.6513 - accuracy: 0.8412 - val_loss: 1.1632 - val_accuracy: 0.7457\n",
|
||
|
"Epoch 8/25\n",
|
||
|
"232/232 [==============================] - 320s 1s/step - loss: 0.5098 - accuracy: 0.8623 - val_loss: 0.8247 - val_accuracy: 0.8006\n",
|
||
|
"Epoch 9/25\n",
|
||
|
"232/232 [==============================] - 323s 1s/step - loss: 0.5930 - accuracy: 0.8493 - val_loss: 0.4964 - val_accuracy: 0.8761\n",
|
||
|
"Epoch 10/25\n",
|
||
|
"232/232 [==============================] - 324s 1s/step - loss: 0.5482 - accuracy: 0.8661 - val_loss: 0.8474 - val_accuracy: 0.8109\n",
|
||
|
"Epoch 11/25\n",
|
||
|
"232/232 [==============================] - 322s 1s/step - loss: 0.5106 - accuracy: 0.8668 - val_loss: 1.2926 - val_accuracy: 0.7629\n",
|
||
|
"Epoch 12/25\n",
|
||
|
"232/232 [==============================] - 322s 1s/step - loss: 0.5876 - accuracy: 0.8579 - val_loss: 1.0667 - val_accuracy: 0.7812\n",
|
||
|
"Epoch 13/25\n",
|
||
|
"232/232 [==============================] - 323s 1s/step - loss: 0.6110 - accuracy: 0.8560 - val_loss: 0.5787 - val_accuracy: 0.8545\n",
|
||
|
"Epoch 14/25\n",
|
||
|
"232/232 [==============================] - 323s 1s/step - loss: 0.5797 - accuracy: 0.8524 - val_loss: 0.6400 - val_accuracy: 0.8658\n",
|
||
|
"Epoch 15/25\n",
|
||
|
"232/232 [==============================] - 326s 1s/step - loss: 0.4589 - accuracy: 0.8759 - val_loss: 0.6950 - val_accuracy: 0.8400\n",
|
||
|
"Epoch 16/25\n",
|
||
|
"232/232 [==============================] - 324s 1s/step - loss: 0.5822 - accuracy: 0.8700 - val_loss: 1.4940 - val_accuracy: 0.7678\n",
|
||
|
"Epoch 17/25\n",
|
||
|
"232/232 [==============================] - 322s 1s/step - loss: 0.4404 - accuracy: 0.8827 - val_loss: 1.5049 - val_accuracy: 0.7559\n",
|
||
|
"Epoch 18/25\n",
|
||
|
"232/232 [==============================] - 321s 1s/step - loss: 0.6142 - accuracy: 0.8598 - val_loss: 0.8974 - val_accuracy: 0.8060\n",
|
||
|
"Epoch 19/25\n",
|
||
|
"232/232 [==============================] - 322s 1s/step - loss: 0.5486 - accuracy: 0.8677 - val_loss: 1.5655 - val_accuracy: 0.7753\n",
|
||
|
"Epoch 20/25\n",
|
||
|
"232/232 [==============================] - 326s 1s/step - loss: 0.3964 - accuracy: 0.8947 - val_loss: 0.7896 - val_accuracy: 0.8292\n",
|
||
|
"Epoch 21/25\n",
|
||
|
"232/232 [==============================] - 324s 1s/step - loss: 0.4499 - accuracy: 0.8848 - val_loss: 1.7746 - val_accuracy: 0.7150\n",
|
||
|
"Epoch 22/25\n",
|
||
|
"232/232 [==============================] - 323s 1s/step - loss: 0.4320 - accuracy: 0.8817 - val_loss: 1.2487 - val_accuracy: 0.7974\n",
|
||
|
"Epoch 23/25\n",
|
||
|
"232/232 [==============================] - 322s 1s/step - loss: 0.4307 - accuracy: 0.8844 - val_loss: 0.6485 - val_accuracy: 0.8470\n",
|
||
|
"Epoch 24/25\n",
|
||
|
"232/232 [==============================] - 322s 1s/step - loss: 0.4287 - accuracy: 0.8900 - val_loss: 1.5260 - val_accuracy: 0.7586\n",
|
||
|
"Epoch 25/25\n",
|
||
|
"232/232 [==============================] - 323s 1s/step - loss: 0.6704 - accuracy: 0.8482 - val_loss: 0.7494 - val_accuracy: 0.8287\n"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"r = model_resnet.fit_generator(\n",
|
||
|
" train_ds_r,\n",
|
||
|
" validation_data=val_ds_r,\n",
|
||
|
" epochs=25,\n",
|
||
|
" steps_per_epoch=len(train_ds_r),\n",
|
||
|
" validation_steps=len(val_ds_r)\n",
|
||
|
")"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 17,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"data": {
|
||
|
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAGdCAYAAAA44ojeAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAACVr0lEQVR4nO2dd3hUZfbHv1Myk95IJ4HQO6EjYkFFAV1WLCsKK4JlV4W1sDZcBduKa1vLov6s6IpdQNaOIFKkQxCkQ0JCeiG9TDJzf3+8896ZhJQpt07O53nyzM20+2YyM/fcc77newyCIAggCIIgCILQEEa1F0AQBEEQBNEaClAIgiAIgtAcFKAQBEEQBKE5KEAhCIIgCEJzUIBCEARBEITmoACFIAiCIAjNQQEKQRAEQRCagwIUgiAIgiA0h1ntBXiCw+FAfn4+IiIiYDAY1F4OQRAEQRAeIAgCqqurkZKSAqPRu5yILgKU/Px8pKWlqb0MgiAIgiB8IDc3F6mpqV49RhcBSkREBAD2B0ZGRqq8GoIgCIIgPKGqqgppaWnicdwbdBGg8LJOZGQkBSgEQRAEoTN8kWeQSJYgCIIgCM1BAQpBEARBEJqDAhSCIAiCIDSH1xqUjRs34rnnnsPu3btRUFCAVatWYcaMGR0+ZsWKFXj22Wdx7NgxREVFYdq0aXjuuefQrVs3X9d9FoIgoLm5GXa7XbLnJJQhKCgIJpNJ7WUQBEEQGsLrAKW2thYZGRm4+eabcfXVV3d6/y1btmDOnDn497//jenTpyMvLw+33347brvtNqxcudKnRbfGZrOhoKAAdXV1kjwfoSwGgwGpqakIDw9XeykEQRCERvA6QJk2bRqmTZvm8f23bt2K9PR03HXXXQCAXr164a9//Sv+9a9/ebvrNnE4HMjKyoLJZEJKSgosFguZuekIQRBQUlKC06dPo1+/fpRJIQiCIAAo0GY8YcIEPPzww/j2228xbdo0FBcX44svvsDll1/e7mMaGxvR2Ngo/l5VVdXufW02GxwOB9LS0hAaGirp2glliI+PR3Z2NpqamihAIQiCIAAoIJKdOHEiVqxYgZkzZ8JisSApKQlRUVFYtmxZu49ZunQpoqKixB9PXGS9tdAltANlvAiCIIjWyH5UP3jwIO6++24sXrwYu3fvxvfff4/s7Gzcfvvt7T5m0aJFqKysFH9yc3PlXiZBEARBEBpC9hLP0qVLMXHiRNx///0AgOHDhyMsLAznn38+nnrqKSQnJ5/1GKvVCqvVKvfSCIIgCILQKLJnUOrq6s4qv3CdgSAIcu++y5Ceno6XXnpJ9ecgCIIgCCnwOoNSU1OD48ePi79nZWUhMzMTsbGx6NGjBxYtWoS8vDx88MEHAIDp06fjtttuw+uvv44pU6agoKAA99xzD8aNG4eUlBTp/hKdMWnSJIwYMUKygGDnzp0ICwuT5LkIgiAIQm28DlB27dqFiy66SPx94cKFAICbbroJy5cvR0FBAXJycsTb586di+rqavznP//B3//+d0RHR+Piiy+WrM04kBEEAXa7HWZz5/+m+Ph4BVZEEAShE7I3A6VHgTE3q70Swke8LvFMmjQJgiCc9bN8+XIAwPLly7Fhw4YWj/nb3/6G33//HXV1dcjPz8eHH36I7t27S7H+sxAEAXW2ZlV+PC1ZzZ07F7/88gtefvllGAwGGAwGZGdnY8OGDTAYDPjuu+8wevRoWK1WbN68GSdOnMCVV16JxMREhIeHY+zYsfjpp59aPGfr8ozBYMDbb7+Nq666CqGhoejXrx/WrFnj1WuZk5ODK6+8EuHh4YiMjMR1112HoqIi8fZ9+/bhoosuQkREBCIjIzF69Gjs2rULAHDq1ClMnz4dMTExCAsLw5AhQ/Dtt996tX+CIAifWX0n8PW9QOF+tVdC+IjsIlmlqW+yY/DiH1TZ98EnpiDU0vlL+vLLL+Po0aMYOnQonnjiCQAuLxAAeOihh/D888+jd+/eiImJQW5uLi6//HL885//hNVqxQcffIDp06fjyJEj6NGjR7v7efzxx/Hss8/iueeew6uvvorZs2fj1KlTiI2N7XSNDodDDE5++eUXNDc3Y/78+Zg5c6YYgM6ePRsjR47E66+/DpPJhMzMTAQFBQEA5s+fD5vNho0bNyIsLAwHDx4kp1iCIJRBEICqfLZdfBhIGqbuegifCLgARQ9ERUXBYrEgNDQUSUlJZ93+xBNP4NJLLxV/j42NRUZGhvj7k08+iVWrVmHNmjVYsGBBu/uZO3cubrjhBgDA008/jVdeeQU7duzA1KlTO13junXrsH//fmRlZYk+NB988AGGDBmCnTt3YuzYscjJycH999+PgQMHAgD69esnPj4nJwfXXHMNhg1jXwy9e/fudJ8EQRCS0FgNOJrYdvkJdddC+EzABSghQSYcfGKKavuWgjFjxrT4vaamBo899hi++eYbFBQUoLm5GfX19S20Pm0xfPhwcTssLAyRkZEoLi72aA2HDh1CWlpaC5O8wYMHIzo6GocOHcLYsWOxcOFC3Hrrrfjvf/+LyZMn409/+hP69OkDALjrrrtwxx134Mcff8TkyZNxzTXXtFgPQRCEbNSVubbLjrd/P0LTBJz9qsFgQKjFrMqPVI6orbtx7rvvPqxatQpPP/00Nm3ahMzMTAwbNgw2m63D5+HlFvfXxuFwSLJGAHjsscfw+++/44orrsD69esxePBgrFq1CgBw66234uTJk7jxxhuxf/9+jBkzBq+++qpk+yYIgmiXunLXdhllUPRKwAUoesFiscBut3t03y1btmDu3Lm46qqrMGzYMCQlJYl6FbkYNGgQcnNzW7j4Hjx4EBUVFRg8eLB4Xf/+/XHvvffixx9/xNVXX4333ntPvC0tLQ233347Vq5cib///e946623ZF0zQRAEgFYZlBNMk0LoDgpQVCI9PR3bt29HdnY2SktLO8xs9OvXDytXrkRmZib27duHWbNmSZoJaYvJkydj2LBhmD17Nvbs2YMdO3Zgzpw5uPDCCzFmzBjU19djwYIF2LBhA06dOoUtW7Zg586dGDRoEADgnnvuwQ8//ICsrCzs2bMHP//8s3gbQRCErLgHKI2VLX8ndAMFKCpx3333wWQyYfDgwYiPj+9QT/Liiy8iJiYG5557LqZPn44pU6Zg1KhRsq7PYDDgq6++QkxMDC644AJMnjwZvXv3xqeffgqAuQGXlZVhzpw56N+/P6677jpMmzYNjz/+OADAbrdj/vz5GDRoEKZOnYr+/fvjtddek3XNBEEQAM4OSEiHoksMgg785quqqhAVFYXKykpERka2uK2hoQFZWVno1asXgoODVVoh4Q/0PyQIQlJ+ehzY/KLr9ytfA0bOVm89XZiOjt+dQRkUgiAIIrCgDEpAQAEKQRAEEVjwAKVbX3ZJXii6hAIUgiAIIrDgbcZp49kltRrrEgpQCIIgiMCCZ1BSx7LL8pOAzJ2PhPRQgEIQBEEEFjxASRkJGM1AUx1QXaDumgivoQCFIAiCCBwcDqDeWeKJSAKie7Jt0qHoDgpQCIIgiMChoQIQnOWckFiXUJY6eXQHBSgEQRBE4MAFstZIwGwBurEBpiSU1R8UoOiY9PR0vPTSS+3ePnfuXMyYMUOx9RAEQagO15+EdmOXFKDoFgpQCIIgiMChdYAS6wxQSIOiOyhAIQiCIAKHszIo3KwtC7A3q7MmwicoQFGBN998EykpKWdNJL7yyitx8803AwBOnDiBK6+8EomJiQgPD8fYsWPx008/+bXfxsZG3HXXXUhISEBwcDDOO+887Ny5U7z9zJkzmD17NuLj4xESEoJ+/frhvffeAwDYbDYsWLAAycnJCA4ORs+ePbF06VK/1kMQBCE5rQOUyO6AORhwNAGVueqti/CawAtQBAGw1arz4+HcxT/96U8oKyvDzz//LF5XXl6O77//HrNns4FWNTU1uPzyy7Fu3Trs3bsXU6dOxfTp0zucetwZDzzwAL788ku8//772LNnD/r27YspU6agvJyJyh599FEcPHgQ3333HQ4dOoTXX38dcXFxAIBXXnkFa9aswWeffYY
|
||
|
"text/plain": [
|
||
|
"<Figure size 640x480 with 1 Axes>"
|
||
|
]
|
||
|
},
|
||
|
"metadata": {},
|
||
|
"output_type": "display_data"
|
||
|
},
|
||
|
{
|
||
|
"data": {
|
||
|
"text/plain": [
|
||
|
"<Figure size 640x480 with 0 Axes>"
|
||
|
]
|
||
|
},
|
||
|
"metadata": {},
|
||
|
"output_type": "display_data"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"# loss\n",
|
||
|
"plt.plot(r.history['loss'], label='train loss')\n",
|
||
|
"plt.plot(r.history['val_loss'], label='val loss')\n",
|
||
|
"plt.legend()\n",
|
||
|
"plt.show()\n",
|
||
|
"plt.savefig('LossVal_loss')\n",
|
||
|
"\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 21,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"data": {
|
||
|
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjoAAAGdCAYAAAAbudkLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAACjcklEQVR4nOydeXhTdfaH3yTd95ZCS6FQdmTfkUVFQcEFN9xAZVFxGdBRxpkRR8Xlpzg6wzA6uIwD7gy44L6MiIIiq2VRZF8LLW1pS/c9ye+P25uk0C1tkntvet7n6ZPb5Obeb9I0+eSczznHZLfb7QiCIAiCIPghZq0XIAiCIAiC4C1E6AiCIAiC4LeI0BEEQRAEwW8RoSMIgiAIgt8iQkcQBEEQBL9FhI4gCIIgCH6LCB1BEARBEPwWETqCIAiCIPgtAVovwFfYbDYyMjKIjIzEZDJpvRxBEARBEJqA3W6nqKiIpKQkzGb34zOtRuhkZGSQnJys9TIEQRAEQWgGx48fp2PHjm7fr9UIncjISEB5oqKiojRejSAIgiAITaGwsJDk5GTH57i7NEvoLFmyhOeff57MzEwGDhzIiy++yIgRI+rct6qqioULF/Lmm2+Snp5Or169+Otf/8qkSZPcOmZ5eTl/+MMfWLFiBRUVFUycOJGXXnqJhISEJq1ZTVdFRUWJ0BEEQRAEg9Fc24nbya6VK1cyb948FixYwLZt2xg4cCATJ04kOzu7zv0feeQRXn31VV588UV2797N3XffzTXXXMP27dvdOuYDDzzAZ599xvvvv8+6devIyMjg2muvbcZDFgRBEAShtWByd3r5yJEjGT58OP/6178AxeSbnJzMvffey0MPPXTW/klJSfzlL39hzpw5juumTJlCaGgo77zzTpOOWVBQQNu2bVm+fDnXXXcdAHv37uWcc85h48aNnHvuuY2uu7CwkOjoaAoKCiSiIwiCIAgGoaWf325FdCorK0lNTWXChAnOA5jNTJgwgY0bN9Z5n4qKCkJCQmpdFxoayvr165t8zNTUVKqqqmrt07t3bzp16lTveQVBEARBENzy6OTk5GC1Ws/yxSQkJLB379467zNx4kQWLVrE+eefT7du3VizZg2rVq3CarU2+ZiZmZkEBQURExNz1j6ZmZl1nreiooKKigrH74WFhY0+PrvdTnV1tWNtgr6xWCwEBARIuwBBEAShXrxedfXPf/6T2bNn07t3b0wmE926dWPWrFksW7bMq+dduHAhTzzxRJP3r6ys5OTJk5SWlnpxVYKnCQsLo3379gQFBWm9FEEQBEGHuCV04uPjsVgsZGVl1bo+KyuLxMTEOu/Ttm1bPv74Y8rLy8nNzSUpKYmHHnqIrl27NvmYiYmJVFZWkp+fXyuq09B558+fz7x58xy/q+VpdWGz2Thy5AgWi4WkpCSCgoIkSqBz7HY7lZWVnDp1iiNHjtCjR49mNZISBEEQ/Bu3hE5QUBBDhw5lzZo1XH311YAiEtasWcPcuXMbvG9ISAgdOnSgqqqKDz/8kBtuuKHJxxw6dCiBgYGsWbOGKVOmALBv3z7S0tIYNWpUnecLDg4mODi4SY+rsrLSYYAOCwtr0n0E7QkNDSUwMJBjx45RWVl5lhdMEARBENxOXc2bN48ZM2YwbNgwRowYweLFiykpKWHWrFkATJ8+nQ4dOrBw4UIANm/eTHp6OoMGDSI9PZ3HH38cm83Gn/70pyYfMzo6mttvv5158+YRFxdHVFQU9957L6NGjWpSxVVTkYiA8ZC/mSAIgtAQbgudG2+8kVOnTvHYY4+RmZnJoEGD+Prrrx1m4rS0tFofPuXl5TzyyCMcPnyYiIgILrvsMt5+++1aKajGjgnwj3/8A7PZzJQpU2o1DBQEQRAEQagPt/voGJWG6vDLy8s5cuQIXbp0kfSHwZC/nSAIgn/j0z46gv+TkpLC4sWLtV6GIAiCIHiEVjPU018ZN24cgwYN8pg42bp1K+Hh4R45liAIgiBojUR0WgFqI8Sm0LZtW6k8EwTBsFRbbby67hDb005rvRRBJ4jQqQe73U5pZbUmP021Tc2cOZN169bxz3/+E5PJhMlk4ujRo6xduxaTycRXX33F0KFDCQ4OZv369Rw6dIirrrqKhIQEIiIiGD58ON9++22tY56ZujKZTPznP//hmmuuISwsjB49evDpp582uK63336bYcOGERkZSWJiItOmTTtr6Otvv/3GFVdcQVRUFJGRkZx33nkcOnTIcfuyZcvo27cvwcHBtG/fvtH2BYIgCACrtqWz8Ku93PjvTazdV/ewaaF1IamreiirstLnsf9pcu7dT04kLKjxP80///lP9u/fT79+/XjyyScBJSJz9OhRAB566CH+9re/0bVrV2JjYzl+/DiXXXYZTz/9NMHBwbz11ltMnjyZffv20alTp3rP88QTT/Dcc8/x/PPP8+KLL3LzzTdz7Ngx4uLi6ty/qqqKp556il69epGdnc28efOYOXMmX375JQDp6emcf/75jBs3ju+++46oqCh++uknR9Tp5ZdfZt68eTz77LNceumlFBQU8NNPP7nzFAqC0Er5IPUEAJXVNu58K5WXbh7ChD4JjdxL8GdE6BiY6OhogoKCCAsLq7ND9JNPPsnFF1/s+D0uLo6BAwc6fn/qqaf46KOP+PTTTxuMmMycOZOpU6cC8Mwzz/DCCy+wZcsWJk2aVOf+t912m2O7a9euvPDCCwwfPpzi4mIiIiJYsmQJ0dHRrFixgsDAQAB69uzpuM///d//8Yc//IHf//73juuGDx/e2NMhCEIr51huCVuO5mE2wXk92rJu/ynueTeVF6cOYVK/urvoC/6PCJ16CA20sPvJiZqd2xMMGzas1u/FxcU8/vjjfPHFF5w8eZLq6mrKyspIS0tr8DgDBgxwbIeHhxMVFXVWKsqV1NRUHn/8cXbu3Mnp06ex2WyA0mOpT58+7Nixg/POO88hclzJzs4mIyOD8ePHu/NQBUEQ+HBbOgBje7Rl6YxhPPDeTj7bmcGc5dt44abBXD6gvcYrFLRAhE49mEymJqWP9MyZ1VMPPvggq1ev5m9/+xvdu3cnNDSU6667jsrKygaPc6YgMZlMDvFyJiUlJUycOJGJEyfy7rvv0rZtW9LS0pg4caLjPKGhofWeq6HbBEEQ6sNms/NhTdpqypAOBFjM/OOGgQSYTXy0PZ17/7uNatsgrhrUQeOVCr5GzMgGJygoCKvV2qR9f/rpJ2bOnMk111xD//79SUxMdPh5PMXevXvJzc3l2Wef5bzzzqN3795nRX8GDBjAjz/+SFVV1Vn3j4yMJCUlhTVr1nh0XYIg+Debj+SRnl9GZHAAE/sqaaoAi5m/XT+Q64d2xGaHB1bucHh4hNaDCB2Dk5KSwubNmzl69Cg5OTn1RloAevTowapVq9ixYwc7d+5k2rRpDe7fHDp16kRQUBAvvvgihw8f5tNPP+Wpp56qtc/cuXMpLCzkpptu4ueff+bAgQO8/fbb7Nu3D4DHH3+cv//977zwwgscOHCAbdu28eKLL3p0nYIg+BeqgLliYHtCXNL/FrOJv04ZwNQRnbDZ4Y8f7GTl1obT9YJ/IULH4Dz44INYLBb69OnjSBPVx6JFi4iNjWX06NFMnjyZiRMnMmTIEI+up23btrzxxhu8//779OnTh2effZa//e1vtfZp06YN3333HcXFxVxwwQUMHTqU1157zZEimzFjBosXL+all16ib9++XHHFFRw4cMCj6xQEwX8oqajmq10nAbhuaMezbjebTTxzTT+mj+qM3Q5//vBX3tl0zNfLFDRCZl0h85KMjPztBEH4IPUED76/ky7x4Xz3hwswmUx17me323nq8z0s++kIAI9P7sPMMV18uVRdcTC7mNd/OsKAjtHcOLz+FiNa09JZV8Z22wqCIAitHtWEfO3gDvWKHFAKKR694hwCA0y8uu4wj3+2m2qbnTvO6+qrpeqCU0UVLP52Pyu2Hsdqs/PezyauGtShVsrPnxChIwiCIBiW43mlbDyci8kE19aRtjoTk8nEQ5N6E2Qx8+J3B/m/L/ZQabXxu3HdfbB
|
||
|
"text/plain": [
|
||
|
"<Figure size 640x480 with 1 Axes>"
|
||
|
]
|
||
|
},
|
||
|
"metadata": {},
|
||
|
"output_type": "display_data"
|
||
|
},
|
||
|
{
|
||
|
"data": {
|
||
|
"text/plain": [
|
||
|
"<Figure size 640x480 with 0 Axes>"
|
||
|
]
|
||
|
},
|
||
|
"metadata": {},
|
||
|
"output_type": "display_data"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"# accuracies\n",
|
||
|
"plt.plot(r.history['accuracy'], label='train acc')\n",
|
||
|
"plt.plot(r.history['val_accuracy'], label='val acc')\n",
|
||
|
"plt.legend()\n",
|
||
|
"plt.show()\n",
|
||
|
"plt.savefig('AccVal_acc')\n",
|
||
|
"\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 20,
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"model_resnet.save('resnet_new_model_2.h5')"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 23,
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"name": "stdout",
|
||
|
"output_type": "stream",
|
||
|
"text": [
|
||
|
"72/72 [==============================] - 61s 843ms/step - loss: 0.7182 - accuracy: 0.8411\n"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"data": {
|
||
|
"text/plain": [
|
||
|
"[0.7181549072265625, 0.8411458134651184]"
|
||
|
]
|
||
|
},
|
||
|
"execution_count": 23,
|
||
|
"metadata": {},
|
||
|
"output_type": "execute_result"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"model_resnet.evaluate(test_ds_r)"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"metadata": {
|
||
|
"kernelspec": {
|
||
|
"display_name": "Python 3",
|
||
|
"language": "python",
|
||
|
"name": "python3"
|
||
|
},
|
||
|
"language_info": {
|
||
|
"codemirror_mode": {
|
||
|
"name": "ipython",
|
||
|
"version": 3
|
||
|
},
|
||
|
"file_extension": ".py",
|
||
|
"mimetype": "text/x-python",
|
||
|
"name": "python",
|
||
|
"nbconvert_exporter": "python",
|
||
|
"pygments_lexer": "ipython3",
|
||
|
"version": "3.10.9"
|
||
|
},
|
||
|
"orig_nbformat": 4,
|
||
|
"vscode": {
|
||
|
"interpreter": {
|
||
|
"hash": "b0fa6594d8f4cbf19f97940f81e996739fb7646882a419484c72d19e05852a7e"
|
||
|
}
|
||
|
}
|
||
|
},
|
||
|
"nbformat": 4,
|
||
|
"nbformat_minor": 2
|
||
|
}
|