Symulowanie-wizualne/sw_lab9-10_2.ipynb

1253 lines
132 KiB
Plaintext
Raw Normal View History

2023-01-06 13:34:05 +01:00
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Aleksandra Jonas, Aleksandra Gronowska, Iwona Christop\n",
"# Zadanie 9-10 - AlexNet, VGG16, ResNet on village"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Przygotowanie danych"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"from IPython.display import Image, display"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import subprocess\n",
"import pkg_resources\n",
"import numpy as np\n",
"\n",
"required = { 'scikit-image'}\n",
"installed = {pkg.key for pkg in pkg_resources.working_set}\n",
"missing = required - installed\n",
"\n",
"if missing: \n",
" python = sys.executable\n",
" subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL)\n",
"\n",
"def load_data(input_dir, img_size):\n",
" import numpy as np\n",
" import pandas as pd\n",
" import os\n",
" from skimage.io import imread\n",
" import cv2 as cv\n",
" from pathlib import Path\n",
" import random\n",
" from shutil import copyfile, rmtree\n",
" import json\n",
"\n",
" import seaborn as sns\n",
" import matplotlib.pyplot as plt\n",
"\n",
" import matplotlib\n",
" \n",
" image_dir = Path(input_dir)\n",
" categories_name = []\n",
" for file in os.listdir(image_dir):\n",
" d = os.path.join(image_dir, file)\n",
" if os.path.isdir(d):\n",
" categories_name.append(file)\n",
"\n",
" folders = [directory for directory in image_dir.iterdir() if directory.is_dir()]\n",
" \n",
" ds_img = []\n",
" categories_count=[]\n",
" labels=[]\n",
" for i, direc in enumerate(folders):\n",
" count = 0\n",
" for obj in direc.iterdir():\n",
" if os.path.isfile(obj) and os.path.basename(os.path.normpath(obj)) != 'desktop.ini':\n",
" labels.append(os.path.basename(os.path.normpath(direc)))\n",
" count += 1\n",
" img = imread(obj)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
" img = img[:, :, :3]\n",
" img = cv.resize(img, img_size, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
" img = img / 255 #normalizacja\n",
" ds_img.append(img)\n",
" categories_count.append(count)\n",
" X={}\n",
" X[\"values\"] = np.array(ds_img)\n",
" X[\"categories_name\"] = categories_name\n",
" X[\"categories_count\"] = categories_count\n",
" X[\"labels\"]=labels\n",
" return X"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def get_run_logdir(root_logdir):\n",
" import os\n",
" import time\n",
"\n",
" run_id = time.strftime(\"run_%Y_%m_%d-%H_%M_%S\")\n",
" return os.path.join(root_logdir, run_id)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def diagram_setup(model_name):\n",
" from tensorflow import keras\n",
" import os\n",
" \n",
" root_logdir = os.path.join(os.curdir, f\"logs\\\\fit\\\\{model_name}\\\\\")\n",
" \n",
" run_logdir = get_run_logdir(root_logdir)\n",
" tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"def prepare_data(path, img_size, test_size, val_size):\n",
" from sklearn.model_selection import train_test_split\n",
" from sklearn.preprocessing import LabelEncoder\n",
" import tensorflow as tf\n",
"\n",
" data = load_data(path, img_size)\n",
" values = data['values']\n",
" labels = data['labels']\n",
"\n",
" X_train, X_test, y_train, y_test = train_test_split(values, labels, test_size=test_size, random_state=42)\n",
" X_train, X_validate, y_train, y_validate = train_test_split(X_train, y_train, test_size=val_size, random_state=42)\n",
"\n",
" class_le = LabelEncoder()\n",
" y_train_enc = class_le.fit_transform(y_train)\n",
" y_validate_enc = class_le.fit_transform(y_validate)\n",
" y_test_enc = class_le.fit_transform(y_test)\n",
"\n",
" train_ds = tf.data.Dataset.from_tensor_slices((X_train, y_train_enc))\n",
" validation_ds = tf.data.Dataset.from_tensor_slices((X_validate, y_validate_enc))\n",
" test_ds = tf.data.Dataset.from_tensor_slices((X_test, y_test_enc))\n",
"\n",
" train_ds_size = tf.data.experimental.cardinality(train_ds).numpy()\n",
" test_ds_size = tf.data.experimental.cardinality(test_ds).numpy()\n",
" validation_ds_size = tf.data.experimental.cardinality(validation_ds).numpy()\n",
"\n",
" #Rozmiary zbiorów\n",
" print(\"Training:\", train_ds_size)\n",
" print(\"Test:\", test_ds_size)\n",
" print(\"Validation:\", validation_ds_size)\n",
"\n",
" # Mieszanie zriorów\n",
" train_ds = (train_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
" test_ds = (test_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
" validation_ds = (validation_ds.shuffle(buffer_size=train_ds_size).batch(batch_size=32, drop_remainder=True))\n",
"\n",
" return train_ds, test_ds, validation_ds\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# AlexNet"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"WARNING:absl:`lr` is deprecated, please use `learning_rate` instead, or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.SGD.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"sequential\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
" conv2d (Conv2D) (None, 55, 55, 96) 34944 \n",
" \n",
" max_pooling2d (MaxPooling2D (None, 27, 27, 96) 0 \n",
" ) \n",
" \n",
" conv2d_1 (Conv2D) (None, 27, 27, 256) 614656 \n",
" \n",
" max_pooling2d_1 (MaxPooling (None, 13, 13, 256) 0 \n",
" 2D) \n",
" \n",
" conv2d_2 (Conv2D) (None, 13, 13, 384) 885120 \n",
" \n",
" conv2d_3 (Conv2D) (None, 13, 13, 384) 1327488 \n",
" \n",
" conv2d_4 (Conv2D) (None, 13, 13, 256) 884992 \n",
" \n",
" max_pooling2d_2 (MaxPooling (None, 6, 6, 256) 0 \n",
" 2D) \n",
" \n",
" flatten (Flatten) (None, 9216) 0 \n",
" \n",
" dense (Dense) (None, 4096) 37752832 \n",
" \n",
" dense_1 (Dense) (None, 4096) 16781312 \n",
" \n",
" dense_2 (Dense) (None, 3) 12291 \n",
" \n",
"=================================================================\n",
"Total params: 58,293,635\n",
"Trainable params: 58,293,635\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
}
],
"source": [
"from tensorflow import keras\n",
"import tensorflow as tf\n",
"import os\n",
"import time\n",
"\n",
"model = keras.models.Sequential([\n",
" keras.layers.Conv2D(filters=96, kernel_size=(11,11), strides=(4,4), activation='relu', input_shape=(227,227,3)),\n",
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(5,5), strides=(1,1), activation='relu', padding=\"same\"),\n",
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=384, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
" keras.layers.Conv2D(filters=384, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), strides=(1,1), activation='relu', padding=\"same\"),\n",
" keras.layers.MaxPool2D(pool_size=(3,3), strides=(2,2)),\n",
" keras.layers.Flatten(),\n",
" keras.layers.Dense(4096, activation='relu'),\n",
" keras.layers.Dense(4096, activation='relu'),\n",
" keras.layers.Dense(3, activation='softmax')\n",
"])\n",
"\n",
"model.compile(loss='sparse_categorical_crossentropy', optimizer=tf.optimizers.SGD(lr=.001), metrics=['accuracy'])\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Training: 4772\n",
"Test: 1492\n",
"Validation: 1194\n"
]
}
],
"source": [
"train_ds_a, test_ds_a, val_ds_a = prepare_data(\"./plantvillage/color\", (227, 227), 0.2, 0.2)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n",
"/var/folders/6b/j4d60ym516x2s6wymzj707rh0000gn/T/ipykernel_9542/953612165.py:6: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" alex = model.fit_generator(\n",
"2023-01-06 04:01:52.794677: W tensorflow/tsl/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"149/149 [==============================] - ETA: 0s - loss: 0.5993 - accuracy: 0.7576\n",
"Epoch 1: val_accuracy improved from -inf to 0.41385, saving model to alex_2.h5\n",
"149/149 [==============================] - 167s 1s/step - loss: 0.5993 - accuracy: 0.7576 - val_loss: 0.9947 - val_accuracy: 0.4139\n"
]
}
],
"source": [
"from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
"\n",
"checkpoint = ModelCheckpoint(\"alex_2.h5\", monitor='val_accuracy', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)\n",
"early = EarlyStopping(monitor='val_accuracy', min_delta=0, patience=20, verbose=1, mode='auto')\n",
"\n",
"alex = model.fit_generator(\n",
" steps_per_epoch=len(train_ds_a), \n",
" generator=train_ds_a, \n",
" validation_data= val_ds_a, \n",
" validation_steps=len(val_ds_a), \n",
" epochs=1, \n",
" callbacks=[checkpoint,early])"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABNIUlEQVR4nO3deVwV9f4/8Nc5LId9E2QLRRQVDcFADK3U5IaouOSCuICImCWmkmVeF9RK67qEpml5Wa633Le8V5MQdyXXi2miqaG4AIoKCCrIOZ/fH/48344sgnI4ML6ej8c89HzmMzPvmU6dVzOfmZEJIQSIiIiIJEKu6wKIiIiIahPDDREREUkKww0RERFJCsMNERERSQrDDREREUkKww0RERFJCsMNERERSQrDDREREUkKww0RERFJCsMNEdUamUyGWbNm1Xi5y5cvQyaTISkpqdZrIqKXD8MNkcQkJSVBJpNBJpPh4MGD5eYLIeDi4gKZTIbevXvroEIiIu1iuCGSKCMjI6xevbpc+759+3Dt2jUoFAodVEVEpH0MN0QS1bNnT2zYsAFlZWUa7atXr4aPjw8cHBx0VNnLo7i4WNclEL2UGG6IJCo0NBS3b99GSkqKuq20tBQbN27E0KFDK1ymuLgYH330EVxcXKBQKNCqVSssWLAAQgiNfiUlJZg0aRLs7Oxgbm6OPn364Nq1axWu8/r16xg1ahTs7e2hUCjQtm1bJCQkPNc+3blzB5MnT4anpyfMzMxgYWGBoKAgnDp1qlzfhw8fYtasWWjZsiWMjIzg6OiId999F5cuXVL3UalUWLx4MTw9PWFkZAQ7Ozv06NEDx48fB1D1WKCnxxfNmjULMpkMZ8+exdChQ2FtbY033ngDAPDbb79h5MiRcHNzg5GRERwcHDBq1Cjcvn27wuMVGRkJJycnKBQKNGvWDO+//z5KS0vx559/QiaT4euvvy633OHDhyGTybBmzZqaHlYiydHXdQFEpB2urq7w9/fHmjVrEBQUBAD4+eefUVBQgCFDhmDJkiUa/YUQ6NOnD/bs2YPIyEh4e3sjOTkZH3/8Ma5fv67xgzp69Gj88MMPGDp0KDp16oTdu3ejV69e5WrIzc3F66+/DplMhujoaNjZ2eHnn39GZGQkCgsLMXHixBrt059//omtW7di0KBBaNasGXJzc/Hdd9+hS5cuOHv2LJycnAAASqUSvXv3RmpqKoYMGYIJEybg3r17SElJwZkzZ9C8eXMAQGRkJJKSkhAUFITRo0ejrKwMBw4cwK+//gpfX98a1fbEoEGD4O7ujrlz56pDYUpKCv78809ERETAwcEBv//+O77//nv8/vvv+PXXXyGTyQAAN27cgJ+fH/Lz8zFmzBi0bt0a169fx8aNG3H//n24ubmhc+fO+PHHHzFp0iSN7f74448wNzdH3759n6tuIkkRRCQpiYmJAoA4duyYWLp0qTA3Nxf3798XQggxaNAg0a1bNyGEEE2bNhW9evVSL7d161YBQHz++eca6xs4cKCQyWTi4sWLQggh0tPTBQDxwQcfaPQbOnSoACBiY2PVbZGRkcLR0VHk5eVp9B0yZIiwtLRU15WZmSkAiMTExCr37eHDh0KpVGq0ZWZmCoVCIebMmaNuS0hIEADEokWLyq1DpVIJIYTYvXu3ACA+/PDDSvtUVdfT+xobGysAiNDQ0HJ9n+znX61Zs0YAEPv371e3hYWFCblcLo4dO1ZpTd99950AIDIyMtTzSktLha2trQgPDy+3HNHLiJeliCRs8ODBePDgAf773//i3r17+O9//1vpJakdO3ZAT08PH374oUb7Rx99BCEEfv75Z3U/AOX6PX0WRgiBTZs2ITg4GEII5OXlqafAwEAUFBTg5MmTNdofhUIBufzxf7aUSiVu374NMzMztGrVSmNdmzZtgq2tLcaPH19uHU/OkmzatAkymQyxsbGV9nkeY8eOLddmbGys/vvDhw+Rl5eH119/HQDUdatUKmzduhXBwcEVnjV6UtPgwYNhZGSEH3/8UT0vOTkZeXl5GD58+HPXTSQlDDdEEmZnZ4eAgACsXr0amzdvhlKpxMCBAyvse+XKFTg5OcHc3Fyj3cPDQz3/yZ9yuVx9aeeJVq1aaXy+desW8vPz8f3338POzk5jioiIAADcvHmzRvujUqnw9ddfw93dHQqFAra2trCzs8Nvv/2GgoICdb9Lly6hVatW0Nev/Mr7pUuX4OTkBBsbmxrV8CzNmjUr13bnzh1MmDAB9vb2MDY2hp2dnbrfk7pv3bqFwsJCvPrqq1Wu38rKCsHBwRp3wv34449wdnbG22+/XYt7QtRwccwNkcQNHToUUVFRyMnJQVBQEKysrOpkuyqVCgAwfPhwhIeHV9inXbt2NVrn3LlzMWPGDIwaNQqfffYZbGxsIJfLMXHiRPX2alNlZ3CUSmWly/z1LM0TgwcPxuHDh/Hxxx/D29sbZmZmUKlU6NGjx3PVHRYWhg0bNuDw4cPw9PTEtm3b8MEHH6jPahG97BhuiCSuf//+eO+99/Drr79i3bp1lfZr2rQpdu3ahXv37mmcvTl37px6/pM/VSqV+uzIE+fPn9dY35M7qZRKJQICAmplXzZu3Ihu3bohPj5eoz0/Px+2trbqz82bN8eRI0fw6NEjGBgYVLiu5s2bIzk5GXfu3Kn07I21tbV6/X/15CxWddy9exepqamYPXs2Zs6cqW6/cOGCRj87OztYWFjgzJkzz1xnjx49YGdnhx9//BEdO3bE/fv3MWLEiGrXRCR1jPlEEmdmZobly5dj1qxZCA4OrrRfz549oVQqsXTpUo32r7/+GjKZTH3H1ZM/n77bKi4uTuOznp4eBgwYgE2bNlX4g33r1q0a74uenl6529I3bNiA69eva7QNGDAAeXl55fYFgHr5AQMGQAiB2bNnV9rHwsICtra22L9/v8b8b7/9tkY1/3WdTzx9vORyOfr164f//Oc/6lvRK6oJAPT19REaGor169cjKSkJnp6eNT4LRiRlPHND9BKo7LLQXwUHB6Nbt26YNm0aLl++DC8vL/zyyy/46aefMHHiRPUYG29vb4SGhuLbb79FQUEBOnXqhNTUVFy8eLHcOr/88kvs2bMHHTt2RFRUFNq0aYM7d+7g5MmT2LVrF+7cuVOj/ejduzfmzJmDiIgIdOrUCadPn8aPP/4INzc3jX5hYWFYtWoVYmJicPToUbz55psoLi7Grl278MEHH6Bv377o1q0bRowYgSVLluDChQvqS0QHDhxAt27dEB0dDeDxbe9ffvklRo8eDV9fX+zfvx9//PFHtWu2sLDAW2+9hX/84x949OgRnJ2d8csvvyAzM7Nc37lz5+KXX35Bly5dMGbMGHh4eCA7OxsbNmzAwYMHNS4phoWFYcmSJdizZw+++uqrGh1HIsnT2X1aRKQVf70VvCpP3wouhBD37t0TkyZNEk5OTsLAwEC4u7uL+fPnq29DfuLBgwfiww8/FI0aNRKmpqYiODhYXL16tdzt0UIIkZubK8aNGydcXFyEgYGBcHBwEN27dxfff/+9uk9NbgX/6KOPhKOjozA2NhadO3cWaWlpokuXLqJLly4afe/fvy+mTZsmmjVrpt7uwIEDxaVLl9R9ysrKxPz580Xr1q2FoaGhsLOzE0FBQeLEiRMa64mMjBSWlpbC3NxcDB48WNy8ebPSW8Fv3bpVru5r166J/v37CysrK2FpaSkGDRokbty4UeHxunLliggLCxN2dnZCoVAINzc3MW7cOFFSUlJuvW3bthVyuVxcu3atyuNG9LKRCfHUuVIiImoQ2rdvDxsbG6Smpuq6FKJ6hWNuiIgaoOPHjyM9PR1hYWG6LoWo3uGZGyKiBuTMmTM4ceIEFi5ciLy8PPz5558wMjLSdVlE9QrP3BARNSAbN25EREQEHj16hDVr1jDYEFWAZ26IiIhIUnjmhoiIiCSF4YaIiIgk5aV7iJ9KpcKNGzdgbm7+Qm/+JSIiorojhMC9e/fg5OT0zPeovXTh5saNG3BxcdF1GURERPQcrl69ildeeaXKPi9duHnyQsCrV6/CwsJCx9U
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"plt.plot(alex.history[\"accuracy\"])\n",
"plt.plot(alex.history['val_accuracy'])\n",
"plt.plot(alex.history['loss'])\n",
"plt.plot(alex.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"46/46 [==============================] - 24s 518ms/step - loss: 1.0025 - accuracy: 0.4137\n"
]
},
{
"data": {
"text/plain": [
"[1.0024936199188232, 0.41372281312942505]"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds_a)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# VGG16"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"sequential_1\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
" conv2d_5 (Conv2D) (None, 224, 224, 64) 1792 \n",
" \n",
" conv2d_6 (Conv2D) (None, 224, 224, 64) 36928 \n",
" \n",
" max_pooling2d_3 (MaxPooling (None, 112, 112, 64) 0 \n",
" 2D) \n",
" \n",
" conv2d_7 (Conv2D) (None, 112, 112, 128) 73856 \n",
" \n",
" conv2d_8 (Conv2D) (None, 112, 112, 128) 147584 \n",
" \n",
" max_pooling2d_4 (MaxPooling (None, 56, 56, 128) 0 \n",
" 2D) \n",
" \n",
" conv2d_9 (Conv2D) (None, 56, 56, 256) 295168 \n",
" \n",
" conv2d_10 (Conv2D) (None, 56, 56, 256) 590080 \n",
" \n",
" conv2d_11 (Conv2D) (None, 56, 56, 256) 590080 \n",
" \n",
" max_pooling2d_5 (MaxPooling (None, 28, 28, 256) 0 \n",
" 2D) \n",
" \n",
" conv2d_12 (Conv2D) (None, 28, 28, 512) 1180160 \n",
" \n",
" conv2d_13 (Conv2D) (None, 28, 28, 512) 2359808 \n",
" \n",
" conv2d_14 (Conv2D) (None, 28, 28, 512) 2359808 \n",
" \n",
" max_pooling2d_6 (MaxPooling (None, 14, 14, 512) 0 \n",
" 2D) \n",
" \n",
" conv2d_15 (Conv2D) (None, 14, 14, 512) 2359808 \n",
" \n",
" conv2d_16 (Conv2D) (None, 14, 14, 512) 2359808 \n",
" \n",
" conv2d_17 (Conv2D) (None, 14, 14, 512) 2359808 \n",
" \n",
" flatten_1 (Flatten) (None, 100352) 0 \n",
" \n",
" dense_3 (Dense) (None, 4096) 411045888 \n",
" \n",
" dense_4 (Dense) (None, 4096) 16781312 \n",
" \n",
" dense_5 (Dense) (None, 3) 12291 \n",
" \n",
"=================================================================\n",
"Total params: 442,554,179\n",
"Trainable params: 442,554,179\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/jonas/Library/Python/3.9/lib/python/site-packages/keras/optimizers/optimizer_v2/adam.py:117: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead.\n",
" super().__init__(name, **kwargs)\n"
]
}
],
"source": [
"import keras,os\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense, Conv2D, MaxPool2D , Flatten\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"from keras.optimizers import Adam\n",
"import numpy as np\n",
"\n",
"model = keras.models.Sequential([\n",
" keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
" keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Flatten(),\n",
" keras.layers.Dense(units = 4096, activation='relu'),\n",
" keras.layers.Dense(units = 4096, activation='relu'),\n",
" keras.layers.Dense(units = 3, activation='softmax')\n",
"])\n",
"\n",
"opt = Adam(lr=0.001)\n",
"model.compile(optimizer=opt, loss=keras.losses.sparse_categorical_crossentropy, metrics=['accuracy'])\n",
"\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Training: 4772\n",
"Test: 1492\n",
"Validation: 1194\n"
]
}
],
"source": [
"train_ds_v, test_ds_v, val_ds_v = prepare_data('./plantvillage/color', (224, 224), 0.2, 0.2)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n",
"/var/folders/6b/j4d60ym516x2s6wymzj707rh0000gn/T/ipykernel_9542/385174540.py:5: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" vgg = model.fit_generator(steps_per_epoch=len(train_ds_v), generator=train_ds_v, validation_data= val_ds_v, validation_steps=len(val_ds_v), epochs=1, callbacks=[checkpoint,early])\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"149/149 [==============================] - ETA: 0s - loss: 0.8037 - accuracy: 0.7024 \n",
"Epoch 1: val_accuracy improved from -inf to 0.72804, saving model to vgg16_2.h5\n",
"149/149 [==============================] - 3159s 21s/step - loss: 0.8037 - accuracy: 0.7024 - val_loss: 0.7223 - val_accuracy: 0.7280\n"
]
}
],
"source": [
"from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
"\n",
"checkpoint = ModelCheckpoint(\"vgg16_2.h5\", monitor='val_accuracy', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)\n",
"early = EarlyStopping(monitor='val_accuracy', min_delta=0, patience=20, verbose=1, mode='auto')\n",
"vgg = model.fit_generator(steps_per_epoch=len(train_ds_v), generator=train_ds_v, validation_data= val_ds_v, validation_steps=len(val_ds_v), epochs=1, callbacks=[checkpoint,early])"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAHHCAYAAABXx+fLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABOPUlEQVR4nO3de1xP2eI//td+l+73iy4mhXGflCM1mHEZnZNbZFwSKRTjDBnC4LjE3JhhTAxjzswn9XEOIrdjjhmkcRehT2ikwTRy6SJGKVR6r98fvvZv3kqK6l3t1/Px2A/ea6+99lp73jPv1+y99t6SEEKAiIiISEFU2u4AERERUV1jACIiIiLFYQAiIiIixWEAIiIiIsVhACIiIiLFYQAiIiIixWEAIiIiIsVhACIiIiLFYQAiIiIixWEAIqI6JUkSFi9eXO3tfv/9d0iShJiYmBrvExEpDwMQkQLFxMRAkiRIkoRjx46VWy+EgJOTEyRJwqBBg7TQQyKi2sUARKRgBgYG2LRpU7nyw4cP48aNG9DX19dCr4iIah8DEJGCDRgwAHFxcXj8+LFG+aZNm9ClSxfY29trqWfKUVRUpO0uECkSAxCRggUEBODOnTuIj4+Xy0pKSrBt2zaMHj26wm2Kioowc+ZMODk5QV9fH23btsWKFSsghNCoV1xcjBkzZsDW1hampqYYPHgwbty4UWGbN2/exIQJE2BnZwd9fX107NgR69evf6kx3b17F7NmzYKrqytMTExgZmaG/v3749y5c+XqPnr0CIsXL0abNm1gYGAABwcHvPvuu7h69apcR61WY9WqVXB1dYWBgQFsbW3Rr18/nDlzBkDlc5Oene+0ePFiSJKEixcvYvTo0bC0tMRbb70FADh//jzGjRuHli1bwsDAAPb29pgwYQLu3LlT4fEKCQmBo6Mj9PX10aJFC/z9739HSUkJfvvtN0iShK+++qrcdidOnIAkSdi8eXN1DytRo6Or7Q4Qkfa4uLigW7du2Lx5M/r37w8A+Omnn5Cfn49Ro0Zh9erVGvWFEBg8eDAOHjyIkJAQuLu7Y9++fZg9ezZu3ryp8aMbGhqKf//73xg9ejS6d++On3/+GQMHDizXh5ycHLz55puQJAlTp06Fra0tfvrpJ4SEhKCgoADTp0+v1ph+++037Nq1CyNGjECLFi2Qk5ODf/7zn+jVqxcuXrwIR0dHAEBZWRkGDRqEhIQEjBo1Ch988AHu37+P+Ph4pKamolWrVgCAkJAQxMTEoH///ggNDcXjx49x9OhRnDx5Eh4eHtXq21MjRoxA69at8dlnn8nBMT4+Hr/99hvGjx8Pe3t7/PLLL/juu+/wyy+/4OTJk5AkCQBw69YteHp64t69e5g0aRLatWuHmzdvYtu2bXjw4AFatmyJHj16YOPGjZgxY4bGfjdu3AhTU1MMGTLkpfpN1KgIIlKc6OhoAUCcPn1arFmzRpiamooHDx4IIYQYMWKE6NOnjxBCCGdnZzFw4EB5u127dgkA4pNPPtFob/jw4UKSJHHlyhUhhBApKSkCgHj//fc16o0ePVoAEBEREXJZSEiIcHBwEHl5eRp1R40aJczNzeV+ZWRkCAAiOjq60rE9evRIlJWVaZRlZGQIfX198dFHH8ll69evFwDEypUry7WhVquFEEL8/PPPAoCYNm3ac+tU1q9nxxoRESEAiICAgHJ1n47zzzZv3iwAiCNHjshlQUFBQqVSidOnTz+3T//85z8FAJGWliavKykpETY2NiI4OLjcdkRKxEtgRAo3cuRIPHz4EP/9739x//59/Pe//33u5a8ff/wROjo6mDZtmkb5zJkzIYTATz/9JNcDUK7es2dzhBDYvn07fH19IYRAXl6evPj4+CA/Px/JycnVGo++vj5Uqif/aSsrK8OdO3dgYmKCtm3barS1fft22NjYICwsrFwbT8+2bN++HZIkISIi4rl1XsbkyZPLlRkaGsp/f/ToEfLy8vDmm28CgNxvtVqNXbt2wdfXt8KzT0/7NHLkSBgYGGDjxo3yun379iEvLw+BgYEv3W+ixoQBiEjhbG1t4e3tjU2bNmHHjh0oKyvD8OHDK6x77do1ODo6wtTUVKO8ffv28vqnf6pUKvky0lNt27bV+Hz79m3cu3cP3333HWxtbTWW8ePHAwByc3OrNR61Wo2vvvoKrVu3hr6+PmxsbGBra4vz588jPz9frnf16lW0bdsWurrPnwlw9epVODo6wsrKqlp9eJEWLVqUK7t79y4++OAD2NnZwdDQELa2tnK9p/2+ffs2CgoK8MYbb1TavoWFBXx9fTXu8Nu4cSOaNWuGd955pwZHQtRwcQ4QEWH06NGYOHEisrOz0b9/f1hYWNTJftVqNQAgMDAQwcHBFdbp1KlTtdr87LPPsHDhQkyYMAEff/wxrKysoFKpMH36dHl/Nel5Z4LKysqeu82fz/Y8NXLkSJw4cQKzZ8+Gu7s7TExMoFar0a9fv5fqd1BQEOLi4nDixAm4urpi9+7deP/99+WzY0RKxwBERBg6dCjee+89nDx5Elu2bHluPWdnZxw4cAD379/XOAt06dIlef3TP9VqtXyW5an09HSN9p7eIVZWVgZvb+8aGcu2bdvQp08fREVFaZTfu3cPNjY28udWrVrh1KlTKC0tRZMmTSpsq1WrVti3bx/u3r373LNAlpaWcvt/9vRsWFX88ccfSEhIwJIlS7Bo0SK5/PLlyxr1bG1tYWZmhtTU1Be22a9fP9ja2mLjxo3w8vLCgwcPMHbs2Cr3iaix4/8KEBFMTEywbt06LF68GL6+vs+tN2DAAJSVlWHNmjUa5V999RUkSZLvJHv657N3kUVGRmp81tHRwbBhw7B9+/YKf9Rv375d7bHo6OiUuyU/Li4ON2/e1CgbNmwY8vLyyo0FgLz9sGHDIITAkiVLnlvHzMwMNjY2OHLkiMb6b775plp9/nObTz17vFQqFfz8/PDDDz/It+FX1CcA0NXVRUBAALZu3YqYmBi4urpW+2waUWPGM0BEBADPvQT1Z76+vujTpw/mz5+P33//HW5ubti/fz/+85//YPr06fKcH3d3dwQEBOCbb75Bfn4+unfvjoSEBFy5cqVcm8uWLcPBgwfh5eWFiRMnokOHDrh79y6Sk5Nx4MAB3L17t1rjGDRoED766COMHz8e3bt3x4ULF7Bx40a0bNlSo15QUBA2bNiA8PBwJCUl4e2330ZRUREOHDiA999/H0OGDEGfPn0wduxYrF69GpcvX5YvRx09ehR9+vTB1KlTATy55X/ZsmUIDQ2Fh4cHjhw5gl9//bXKfTYzM0PPnj3xxRdfoLS0FM2aNcP+/fuRkZFRru5nn32G/fv3o1evXpg0aRLat2+PrKwsxMXF4dixYxqXL4OCgrB69WocPHgQn3/+ebWOI1Gjp7X7z4hIa/58G3xlnr0NXggh7t+/L2bMmCEcHR1FkyZNROvWrcXy5cvlW7CfevjwoZg2bZqwtrYWxsbGwtfXV1y/fr3creFCCJGTkyOmTJkinJycRJMmTYS9vb3o27ev+O677+Q61bkNfubMmcLBwUEYGhqKHj16iMTERNGrVy/Rq1cvjboPHjwQ8+fPFy1atJD3O3z4cHH16lW5zuPHj8Xy5ctFu3bthJ6enrC1tRX9+/cXZ8+e1WgnJCREmJubC1NTUzFy5EiRm5v73Nvgb9++Xa7fN27cEEOHDhUWFhbC3NxcjBgxQty6davC43Xt2jURFBQkbG1thb6+vmjZsqWYMmWKKC4uLtdux44dhUqlEjdu3Kj0uBEpjSTEM+dciYio0ejcuTOsrKyQkJCg7a4Q1SucA0RE1EidOXMGKSkpCAoK0nZXiOodngEiImpkUlNTcfbsWXz55ZfIy8vDb7/9BgMDA213i6he4RkgIqJGZtu2bRg/fjxKS0uxefNmhh+iCvAMEBERESkOzwARERGR4jAAERERkeLwQYgVUKvVuHXrFkxNTV/pjc9ERERUd4QQuH//PhwdHV/43jsGoArcunULTk5O2u4GERERvYTr16/jtddeq7QOA1AFnr7k8fr16zAzM9Nyb4i
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"plt.plot(vgg.history[\"accuracy\"])\n",
"plt.plot(vgg.history['val_accuracy'])\n",
"plt.plot(vgg.history['loss'])\n",
"plt.plot(vgg.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"46/46 [==============================] - 238s 5s/step - loss: 0.7124 - accuracy: 0.7364\n"
]
},
{
"data": {
"text/plain": [
"[0.7124184966087341, 0.7364130616188049]"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds_v)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# ResNet50"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"model\"\n",
"__________________________________________________________________________________________________\n",
" Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
" input_1 (InputLayer) [(None, 224, 224, 3 0 [] \n",
" )] \n",
" \n",
" conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 ['input_1[0][0]'] \n",
" \n",
" conv1_conv (Conv2D) (None, 112, 112, 64 9472 ['conv1_pad[0][0]'] \n",
" ) \n",
" \n",
" conv1_bn (BatchNormalization) (None, 112, 112, 64 256 ['conv1_conv[0][0]'] \n",
" ) \n",
" \n",
" conv1_relu (Activation) (None, 112, 112, 64 0 ['conv1_bn[0][0]'] \n",
" ) \n",
" \n",
" pool1_pad (ZeroPadding2D) (None, 114, 114, 64 0 ['conv1_relu[0][0]'] \n",
" ) \n",
" \n",
" pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 ['pool1_pad[0][0]'] \n",
" \n",
" conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4160 ['pool1_pool[0][0]'] \n",
" \n",
" conv2_block1_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block1_1_relu[0][0]'] \n",
" \n",
" conv2_block1_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 ['pool1_pool[0][0]'] \n",
" \n",
" conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block1_2_relu[0][0]'] \n",
" \n",
" conv2_block1_0_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_0_bn[0][0]', \n",
" 'conv2_block1_3_bn[0][0]'] \n",
" \n",
" conv2_block1_out (Activation) (None, 56, 56, 256) 0 ['conv2_block1_add[0][0]'] \n",
" \n",
" conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16448 ['conv2_block1_out[0][0]'] \n",
" \n",
" conv2_block2_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block2_1_relu[0][0]'] \n",
" \n",
" conv2_block2_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block2_2_relu[0][0]'] \n",
" \n",
" conv2_block2_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_out[0][0]', \n",
" 'conv2_block2_3_bn[0][0]'] \n",
" \n",
" conv2_block2_out (Activation) (None, 56, 56, 256) 0 ['conv2_block2_add[0][0]'] \n",
" \n",
" conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16448 ['conv2_block2_out[0][0]'] \n",
" \n",
" conv2_block3_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block3_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block3_1_relu[0][0]'] \n",
" \n",
" conv2_block3_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block3_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block3_2_relu[0][0]'] \n",
" \n",
" conv2_block3_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_add (Add) (None, 56, 56, 256) 0 ['conv2_block2_out[0][0]', \n",
" 'conv2_block3_3_bn[0][0]'] \n",
" \n",
" conv2_block3_out (Activation) (None, 56, 56, 256) 0 ['conv2_block3_add[0][0]'] \n",
" \n",
" conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32896 ['conv2_block3_out[0][0]'] \n",
" \n",
" conv3_block1_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block1_1_relu[0][0]'] \n",
" \n",
" conv3_block1_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 ['conv2_block3_out[0][0]'] \n",
" \n",
" conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block1_2_relu[0][0]'] \n",
" \n",
" conv3_block1_0_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_0_bn[0][0]', \n",
" 'conv3_block1_3_bn[0][0]'] \n",
" \n",
" conv3_block1_out (Activation) (None, 28, 28, 512) 0 ['conv3_block1_add[0][0]'] \n",
" \n",
" conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block1_out[0][0]'] \n",
" \n",
" conv3_block2_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block2_1_relu[0][0]'] \n",
" \n",
" conv3_block2_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block2_2_relu[0][0]'] \n",
" \n",
" conv3_block2_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_out[0][0]', \n",
" 'conv3_block2_3_bn[0][0]'] \n",
" \n",
" conv3_block2_out (Activation) (None, 28, 28, 512) 0 ['conv3_block2_add[0][0]'] \n",
" \n",
" conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block2_out[0][0]'] \n",
" \n",
" conv3_block3_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block3_1_relu[0][0]'] \n",
" \n",
" conv3_block3_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block3_2_relu[0][0]'] \n",
" \n",
" conv3_block3_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_add (Add) (None, 28, 28, 512) 0 ['conv3_block2_out[0][0]', \n",
" 'conv3_block3_3_bn[0][0]'] \n",
" \n",
" conv3_block3_out (Activation) (None, 28, 28, 512) 0 ['conv3_block3_add[0][0]'] \n",
" \n",
" conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block3_out[0][0]'] \n",
" \n",
" conv3_block4_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block4_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block4_1_relu[0][0]'] \n",
" \n",
" conv3_block4_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block4_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block4_2_relu[0][0]'] \n",
" \n",
" conv3_block4_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block4_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_add (Add) (None, 28, 28, 512) 0 ['conv3_block3_out[0][0]', \n",
" 'conv3_block4_3_bn[0][0]'] \n",
" \n",
" conv3_block4_out (Activation) (None, 28, 28, 512) 0 ['conv3_block4_add[0][0]'] \n",
" \n",
" conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131328 ['conv3_block4_out[0][0]'] \n",
" \n",
" conv4_block1_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block1_1_relu[0][0]'] \n",
" \n",
" conv4_block1_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024 525312 ['conv3_block4_out[0][0]'] \n",
" ) \n",
" \n",
" conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block1_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block1_0_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block1_0_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block1_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block1_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block1_add (Add) (None, 14, 14, 1024 0 ['conv4_block1_0_bn[0][0]', \n",
" ) 'conv4_block1_3_bn[0][0]'] \n",
" \n",
" conv4_block1_out (Activation) (None, 14, 14, 1024 0 ['conv4_block1_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block1_out[0][0]'] \n",
" \n",
" conv4_block2_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block2_1_relu[0][0]'] \n",
" \n",
" conv4_block2_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block2_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block2_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block2_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block2_add (Add) (None, 14, 14, 1024 0 ['conv4_block1_out[0][0]', \n",
" ) 'conv4_block2_3_bn[0][0]'] \n",
" \n",
" conv4_block2_out (Activation) (None, 14, 14, 1024 0 ['conv4_block2_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block2_out[0][0]'] \n",
" \n",
" conv4_block3_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block3_1_relu[0][0]'] \n",
" \n",
" conv4_block3_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block3_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block3_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block3_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block3_add (Add) (None, 14, 14, 1024 0 ['conv4_block2_out[0][0]', \n",
" ) 'conv4_block3_3_bn[0][0]'] \n",
" \n",
" conv4_block3_out (Activation) (None, 14, 14, 1024 0 ['conv4_block3_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block3_out[0][0]'] \n",
" \n",
" conv4_block4_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block4_1_relu[0][0]'] \n",
" \n",
" conv4_block4_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block4_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block4_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block4_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block4_add (Add) (None, 14, 14, 1024 0 ['conv4_block3_out[0][0]', \n",
" ) 'conv4_block4_3_bn[0][0]'] \n",
" \n",
" conv4_block4_out (Activation) (None, 14, 14, 1024 0 ['conv4_block4_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block4_out[0][0]'] \n",
" \n",
" conv4_block5_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block5_1_relu[0][0]'] \n",
" \n",
" conv4_block5_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block5_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block5_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block5_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block5_add (Add) (None, 14, 14, 1024 0 ['conv4_block4_out[0][0]', \n",
" ) 'conv4_block5_3_bn[0][0]'] \n",
" \n",
" conv4_block5_out (Activation) (None, 14, 14, 1024 0 ['conv4_block5_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block5_out[0][0]'] \n",
" \n",
" conv4_block6_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block6_1_relu[0][0]'] \n",
" \n",
" conv4_block6_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block6_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block6_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block6_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block6_add (Add) (None, 14, 14, 1024 0 ['conv4_block5_out[0][0]', \n",
" ) 'conv4_block6_3_bn[0][0]'] \n",
" \n",
" conv4_block6_out (Activation) (None, 14, 14, 1024 0 ['conv4_block6_add[0][0]'] \n",
" ) \n",
" \n",
" conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524800 ['conv4_block6_out[0][0]'] \n",
" \n",
" conv5_block1_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block1_1_relu[0][0]'] \n",
" \n",
" conv5_block1_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 ['conv4_block6_out[0][0]'] \n",
" \n",
" conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block1_2_relu[0][0]'] \n",
" \n",
" conv5_block1_0_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_0_bn[0][0]', \n",
" 'conv5_block1_3_bn[0][0]'] \n",
" \n",
" conv5_block1_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block1_add[0][0]'] \n",
" \n",
" conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1049088 ['conv5_block1_out[0][0]'] \n",
" \n",
" conv5_block2_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block2_1_relu[0][0]'] \n",
" \n",
" conv5_block2_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block2_2_relu[0][0]'] \n",
" \n",
" conv5_block2_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_out[0][0]', \n",
" 'conv5_block2_3_bn[0][0]'] \n",
" \n",
" conv5_block2_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block2_add[0][0]'] \n",
" \n",
" conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1049088 ['conv5_block2_out[0][0]'] \n",
" \n",
" conv5_block3_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block3_1_relu[0][0]'] \n",
" \n",
" conv5_block3_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block3_2_relu[0][0]'] \n",
" \n",
" conv5_block3_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_add (Add) (None, 7, 7, 2048) 0 ['conv5_block2_out[0][0]', \n",
" 'conv5_block3_3_bn[0][0]'] \n",
" \n",
" conv5_block3_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block3_add[0][0]'] \n",
" \n",
" flatten_2 (Flatten) (None, 100352) 0 ['conv5_block3_out[0][0]'] \n",
" \n",
" dense_6 (Dense) (None, 5) 501765 ['flatten_2[0][0]'] \n",
" \n",
"==================================================================================================\n",
"Total params: 24,089,477\n",
"Trainable params: 501,765\n",
"Non-trainable params: 23,587,712\n",
"__________________________________________________________________________________________________\n"
]
}
],
"source": [
"from keras.layers import Input, Lambda, Dense, Flatten\n",
"from keras.models import Model\n",
"from keras.applications import ResNet50\n",
"from keras.preprocessing import image\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"from keras.models import Sequential\n",
"import numpy as np\n",
"from glob import glob\n",
"import matplotlib.pyplot as plt\n",
"\n",
"# re-size all the images to this\n",
"IMAGE_SIZE = [224, 224]\n",
"\n",
"# add preprocessing layer to the front of resnet\n",
"resnet = ResNet50(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)\n",
"\n",
"# don't train existing weights\n",
"for layer in resnet.layers:\n",
" layer.trainable = False\n",
" \n",
" # useful for getting number of classes\n",
"classes = 5\n",
" \n",
"\n",
"# our layers - you can add more if you want\n",
"x = Flatten()(resnet.output)\n",
"# x = Dense(1000, activation='relu')(x)\n",
"prediction = Dense(5, activation='softmax')(x)\n",
"\n",
"# create a model object\n",
"model = Model(inputs=resnet.input, outputs=prediction)\n",
"\n",
"# view the structure of the model\n",
"model.summary()\n",
"\n",
"# tell the model what cost and optimization method to use\n",
"model.compile(\n",
" loss='sparse_categorical_crossentropy',\n",
" optimizer='adam',\n",
" metrics=['accuracy']\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'prepare_data' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[1], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m train_ds_r, test_ds_r, val_ds_r \u001b[39m=\u001b[39m prepare_data(\u001b[39m'\u001b[39m\u001b[39m./plantvillage/color\u001b[39m\u001b[39m'\u001b[39m, img_size\u001b[39m=\u001b[39mIMAGE_SIZE, test_size\u001b[39m=\u001b[39m\u001b[39m0.2\u001b[39m, val_size\u001b[39m=\u001b[39m\u001b[39m0.2\u001b[39m)\n",
"\u001b[0;31mNameError\u001b[0m: name 'prepare_data' is not defined"
]
}
],
"source": [
"train_ds_r, test_ds_r, val_ds_r = prepare_data('./plantvillage/color', img_size=IMAGE_SIZE, test_size=0.2, val_size=0.2)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r = model.fit_generator(\n",
" train_ds_r,\n",
" validation_data=val_ds_r,\n",
" epochs=1,\n",
" steps_per_epoch=len(train_ds_r),\n",
" validation_steps=len(val_ds_r)\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.plot(r.history[\"accuracy\"])\n",
"plt.plot(r.history['val_accuracy'])\n",
"plt.plot(r.history['loss'])\n",
"plt.plot(r.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"model.save('resnet_2.h5')\n",
"model.evaluate(test_ds_r)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}