2023-01-06 03:02:47 +01:00
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Aleksandra Jonas, Aleksandra Gronowska, Iwona Christop\n",
2023-01-07 22:15:23 +01:00
"# Zadanie 9-10, zadanie 1 - VGG16 + ResNet on train_test_sw "
2023-01-06 03:02:47 +01:00
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Przygotowanie danych"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 40,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"from IPython.display import Image, display"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 41,
2023-01-06 03:02:47 +01:00
"id": "2fe63b50",
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import subprocess\n",
"import pkg_resources\n",
"import numpy as np\n",
"\n",
"required = { 'scikit-image'}\n",
"installed = {pkg.key for pkg in pkg_resources.working_set}\n",
"missing = required - installed\n",
"# VGG16 requires images to be of dim = (224, 224, 3)\n",
"newSize = (224,224)\n",
"\n",
"if missing: \n",
" python = sys.executable\n",
" subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL)\n",
"\n",
"def load_train_data(input_dir):\n",
" import numpy as np\n",
" import pandas as pd\n",
" import os\n",
" from skimage.io import imread\n",
" import cv2 as cv\n",
" from pathlib import Path\n",
" import random\n",
" from shutil import copyfile, rmtree\n",
" import json\n",
"\n",
" import seaborn as sns\n",
" import matplotlib.pyplot as plt\n",
"\n",
" import matplotlib\n",
" \n",
" image_dir = Path(input_dir)\n",
" categories_name = []\n",
" for file in os.listdir(image_dir):\n",
" d = os.path.join(image_dir, file)\n",
" if os.path.isdir(d):\n",
" categories_name.append(file)\n",
"\n",
" folders = [directory for directory in image_dir.iterdir() if directory.is_dir()]\n",
"\n",
" train_img = []\n",
" categories_count=[]\n",
" labels=[]\n",
" for i, direc in enumerate(folders):\n",
" count = 0\n",
" for obj in direc.iterdir():\n",
" if os.path.isfile(obj) and os.path.basename(os.path.normpath(obj)) != 'desktop.ini':\n",
" labels.append(os.path.basename(os.path.normpath(direc)))\n",
" count += 1\n",
" img = imread(obj)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
" img = img[:, :, :3]\n",
" img = cv.resize(img, newSize, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
" img = img / 255 #normalizacja\n",
" train_img.append(img)\n",
" categories_count.append(count)\n",
" X={}\n",
" X[\"values\"] = np.array(train_img)\n",
" X[\"categories_name\"] = categories_name\n",
" X[\"categories_count\"] = categories_count\n",
" X[\"labels\"]=labels\n",
" return X\n",
"\n",
"def load_test_data(input_dir):\n",
" import numpy as np\n",
" import pandas as pd\n",
" import os\n",
" from skimage.io import imread\n",
" import cv2 as cv\n",
" from pathlib import Path\n",
" import random\n",
" from shutil import copyfile, rmtree\n",
" import json\n",
"\n",
" import seaborn as sns\n",
" import matplotlib.pyplot as plt\n",
"\n",
" import matplotlib\n",
"\n",
" image_path = Path(input_dir)\n",
"\n",
" labels_path = image_path.parents[0] / 'test_labels.json'\n",
"\n",
" jsonString = labels_path.read_text()\n",
" objects = json.loads(jsonString)\n",
"\n",
" categories_name = []\n",
" categories_count=[]\n",
" count = 0\n",
" c = objects[0]['value']\n",
" for e in objects:\n",
" if e['value'] != c:\n",
" categories_count.append(count)\n",
" c = e['value']\n",
" count = 1\n",
" else:\n",
" count += 1\n",
" if not e['value'] in categories_name:\n",
" categories_name.append(e['value'])\n",
"\n",
" categories_count.append(count)\n",
" \n",
" test_img = []\n",
"\n",
" labels=[]\n",
" for e in objects:\n",
" p = image_path / e['filename']\n",
" img = imread(p)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
" img = img[:, :, :3]\n",
" img = cv.resize(img, newSize, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
" img = img / 255#normalizacja\n",
" test_img.append(img)\n",
" labels.append(e['value'])\n",
"\n",
" X={}\n",
" X[\"values\"] = np.array(test_img)\n",
" X[\"categories_name\"] = categories_name\n",
" X[\"categories_count\"] = categories_count\n",
" X[\"labels\"]=labels\n",
" return X"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 42,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"def create_tf_ds(X_train, y_train_enc, X_validate, y_validate_enc, X_test, y_test_enc):\n",
" import tensorflow as tf\n",
" \n",
" train_ds = tf.data.Dataset.from_tensor_slices((X_train, y_train_enc))\n",
" validation_ds = tf.data.Dataset.from_tensor_slices((X_validate, y_validate_enc))\n",
" test_ds = tf.data.Dataset.from_tensor_slices((X_test, y_test_enc))\n",
"\n",
" train_ds_size = tf.data.experimental.cardinality(train_ds).numpy()\n",
" test_ds_size = tf.data.experimental.cardinality(test_ds).numpy()\n",
" validation_ds_size = tf.data.experimental.cardinality(validation_ds).numpy()\n",
"\n",
" print(\"Training data size:\", train_ds_size)\n",
" print(\"Test data size:\", test_ds_size)\n",
" print(\"Validation data size:\", validation_ds_size)\n",
"\n",
" train_ds = (train_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" test_ds = (test_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" validation_ds = (validation_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" \n",
" return train_ds, test_ds, validation_ds"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 43,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"def get_run_logdir(root_logdir):\n",
" import os\n",
" import time\n",
"\n",
" run_id = time.strftime(\"run_%Y_%m_%d-%H_%M_%S\")\n",
" return os.path.join(root_logdir, run_id)"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 44,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"def diagram_setup(model_name):\n",
" from tensorflow import keras\n",
" import os\n",
" \n",
" root_logdir = os.path.join(os.curdir, f\"logs\\\\fit\\\\{model_name}\\\\\")\n",
" \n",
" run_logdir = get_run_logdir(root_logdir)\n",
" tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 45,
2023-01-06 03:02:47 +01:00
"id": "cc941c5a",
"metadata": {},
"outputs": [],
"source": [
"# Data load\n",
"data_train = load_train_data(\"./train_test_sw/train_sw\")\n",
"values_train = data_train['values']\n",
"labels_train = data_train['labels']\n",
"\n",
"data_test = load_test_data(\"./train_test_sw/test_sw\")\n",
"X_test = data_test['values']\n",
"y_test = data_test['labels']"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 46,
2023-01-06 03:02:47 +01:00
"id": "25040ac9",
"metadata": {},
"outputs": [],
"source": [
"from sklearn.model_selection import train_test_split\n",
"X_train, X_validate, y_train, y_validate = train_test_split(values_train, labels_train, test_size=0.2, random_state=42)"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 47,
2023-01-06 03:02:47 +01:00
"id": "a1fe47e6",
"metadata": {},
"outputs": [],
"source": [
"from sklearn.preprocessing import LabelEncoder\n",
"class_le = LabelEncoder()\n",
"y_train_enc = class_le.fit_transform(y_train)\n",
"y_validate_enc = class_le.fit_transform(y_validate)\n",
"y_test_enc = class_le.fit_transform(y_test)"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 48,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-07 22:15:23 +01:00
"Training data size: 821\n",
2023-01-06 03:02:47 +01:00
"Test data size: 259\n",
"Validation data size: 206\n"
]
}
],
"source": [
"train_ds, test_ds, validation_ds = create_tf_ds(X_train, y_train_enc, X_validate, y_validate_enc, X_test, y_test_enc)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## VGG16"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 49,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [],
"source": [
"diagram_setup('vgg_sw')"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 50,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-10 17:15:16 +01:00
"Model: \"model_3\"\n",
2023-01-06 03:02:47 +01:00
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
2023-01-10 17:15:16 +01:00
" input_4 (InputLayer) [(None, 224, 224, 3)] 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block1_conv1 (Conv2D) (None, 224, 224, 64) 1792 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block1_conv2 (Conv2D) (None, 224, 224, 64) 36928 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block1_pool (MaxPooling2D) (None, 112, 112, 64) 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block2_conv1 (Conv2D) (None, 112, 112, 128) 73856 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block2_conv2 (Conv2D) (None, 112, 112, 128) 147584 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block2_pool (MaxPooling2D) (None, 56, 56, 128) 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block3_conv1 (Conv2D) (None, 56, 56, 256) 295168 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block3_conv2 (Conv2D) (None, 56, 56, 256) 590080 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block3_conv3 (Conv2D) (None, 56, 56, 256) 590080 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block3_pool (MaxPooling2D) (None, 28, 28, 256) 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block4_conv1 (Conv2D) (None, 28, 28, 512) 1180160 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block4_conv2 (Conv2D) (None, 28, 28, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block4_conv3 (Conv2D) (None, 28, 28, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block4_pool (MaxPooling2D) (None, 14, 14, 512) 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block5_conv1 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block5_conv2 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block5_conv3 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" block5_pool (MaxPooling2D) (None, 7, 7, 512) 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" flatten_3 (Flatten) (None, 25088) 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" dense_3 (Dense) (None, 5) 125445 \n",
2023-01-06 03:02:47 +01:00
" \n",
"=================================================================\n",
2023-01-10 17:15:16 +01:00
"Total params: 14,840,133\n",
"Trainable params: 125,445\n",
"Non-trainable params: 14,714,688\n",
"_________________________________________________________________\n",
2023-01-07 22:15:23 +01:00
"Epoch 1/25\n"
2023-01-06 03:02:47 +01:00
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
2023-01-10 17:15:16 +01:00
"/var/folders/3r/c8tg1h051m18qhsdccdysrt40000gn/T/ipykernel_11345/3456911324.py:75: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" vggr = model.fit_generator(\n"
2023-01-06 03:02:47 +01:00
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 117s 5s/step - loss: 1.4384 - accuracy: 0.4363 - val_loss: 0.8596 - val_accuracy: 0.6719\n",
2023-01-07 22:15:23 +01:00
"Epoch 2/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 121s 5s/step - loss: 0.6040 - accuracy: 0.7975 - val_loss: 0.6615 - val_accuracy: 0.7552\n",
2023-01-07 22:15:23 +01:00
"Epoch 3/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 126s 5s/step - loss: 0.3955 - accuracy: 0.9000 - val_loss: 0.5536 - val_accuracy: 0.7969\n",
2023-01-07 22:15:23 +01:00
"Epoch 4/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 124s 5s/step - loss: 0.3278 - accuracy: 0.9237 - val_loss: 0.5154 - val_accuracy: 0.8438\n",
2023-01-07 22:15:23 +01:00
"Epoch 5/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 124s 5s/step - loss: 0.2700 - accuracy: 0.9350 - val_loss: 0.5352 - val_accuracy: 0.7969\n",
2023-01-07 22:15:23 +01:00
"Epoch 6/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 119s 5s/step - loss: 0.2109 - accuracy: 0.9538 - val_loss: 0.3983 - val_accuracy: 0.8854\n",
2023-01-07 22:15:23 +01:00
"Epoch 7/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 117s 5s/step - loss: 0.1713 - accuracy: 0.9812 - val_loss: 0.3841 - val_accuracy: 0.8802\n",
2023-01-07 22:15:23 +01:00
"Epoch 8/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 115s 5s/step - loss: 0.1519 - accuracy: 0.9850 - val_loss: 0.3871 - val_accuracy: 0.8854\n",
2023-01-07 22:15:23 +01:00
"Epoch 9/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 117s 5s/step - loss: 0.1412 - accuracy: 0.9800 - val_loss: 0.4005 - val_accuracy: 0.8958\n",
2023-01-07 22:15:23 +01:00
"Epoch 10/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 116s 5s/step - loss: 0.1176 - accuracy: 0.9900 - val_loss: 0.3657 - val_accuracy: 0.9062\n",
2023-01-07 22:15:23 +01:00
"Epoch 11/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 116s 5s/step - loss: 0.1200 - accuracy: 0.9825 - val_loss: 0.3862 - val_accuracy: 0.8646\n",
2023-01-07 22:15:23 +01:00
"Epoch 12/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 116s 5s/step - loss: 0.0958 - accuracy: 0.9912 - val_loss: 0.3412 - val_accuracy: 0.9010\n",
2023-01-07 22:15:23 +01:00
"Epoch 13/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 113s 5s/step - loss: 0.0914 - accuracy: 0.9925 - val_loss: 0.3484 - val_accuracy: 0.8854\n",
2023-01-07 22:15:23 +01:00
"Epoch 14/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 115s 5s/step - loss: 0.0799 - accuracy: 0.9950 - val_loss: 0.3406 - val_accuracy: 0.8906\n",
2023-01-07 22:15:23 +01:00
"Epoch 15/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 118s 5s/step - loss: 0.0714 - accuracy: 0.9975 - val_loss: 0.3355 - val_accuracy: 0.8958\n",
2023-01-07 22:15:23 +01:00
"Epoch 16/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 121s 5s/step - loss: 0.0728 - accuracy: 0.9950 - val_loss: 0.3384 - val_accuracy: 0.9062\n",
2023-01-07 22:15:23 +01:00
"Epoch 17/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 120s 5s/step - loss: 0.0674 - accuracy: 0.9962 - val_loss: 0.3627 - val_accuracy: 0.8958\n",
2023-01-07 22:15:23 +01:00
"Epoch 18/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 118s 5s/step - loss: 0.0580 - accuracy: 0.9962 - val_loss: 0.3231 - val_accuracy: 0.9115\n",
2023-01-07 22:15:23 +01:00
"Epoch 19/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 118s 5s/step - loss: 0.0509 - accuracy: 0.9987 - val_loss: 0.3387 - val_accuracy: 0.8958\n",
2023-01-07 22:15:23 +01:00
"Epoch 20/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 119s 5s/step - loss: 0.0492 - accuracy: 0.9987 - val_loss: 0.3076 - val_accuracy: 0.8906\n",
2023-01-07 22:15:23 +01:00
"Epoch 21/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 1405s 58s/step - loss: 0.0458 - accuracy: 0.9987 - val_loss: 0.3350 - val_accuracy: 0.8854\n",
2023-01-07 22:15:23 +01:00
"Epoch 22/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 1635s 68s/step - loss: 0.0458 - accuracy: 0.9975 - val_loss: 0.3148 - val_accuracy: 0.9062\n",
2023-01-07 22:15:23 +01:00
"Epoch 23/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 103s 4s/step - loss: 0.0384 - accuracy: 1.0000 - val_loss: 0.3446 - val_accuracy: 0.8750\n",
"Epoch 24/25\n",
"25/25 [==============================] - 106s 4s/step - loss: 0.0387 - accuracy: 0.9987 - val_loss: 0.2885 - val_accuracy: 0.9062\n",
"Epoch 25/25\n",
"25/25 [==============================] - 109s 4s/step - loss: 0.0335 - accuracy: 1.0000 - val_loss: 0.2845 - val_accuracy: 0.8958\n"
2023-01-06 03:02:47 +01:00
]
}
],
"source": [
2023-01-10 17:15:16 +01:00
"import keras,os\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense, Conv2D, MaxPool2D , Flatten\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"import numpy as np\n",
"from keras.applications import VGG16\n",
"from keras.layers import Input, Lambda, Dense, Flatten\n",
"from keras.models import Model\n",
"from keras.preprocessing import image\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"from keras.models import Sequential\n",
"import numpy as np\n",
"from glob import glob\n",
"import matplotlib.pyplot as plt\n",
"import ssl\n",
"ssl._create_default_https_context = ssl._create_unverified_context\n",
"\n",
"# model = keras.models.Sequential([\n",
"# keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
"# keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
"# keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
"# keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
"# keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
"# keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
"# keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
"# keras.layers.Flatten(),\n",
"# keras.layers.Dense(units = 4096, activation='relu'),\n",
"# keras.layers.Dense(units = 4096, activation='relu'),\n",
"# keras.layers.Dense(units = 5, activation='softmax')\n",
"# ])\n",
"\n",
"# re-size all the images to this\n",
"IMAGE_SIZE = [224, 224]\n",
"\n",
"# add preprocessing layer to the front of resnet\n",
"vgg2 = VGG16(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)\n",
"\n",
"# don't train existing weights\n",
"for layer in vgg2.layers:\n",
" layer.trainable = False\n",
" \n",
" # useful for getting number of classes\n",
"classes = 5\n",
" \n",
"\n",
"# our layers - you can add more if you want\n",
"x = Flatten()(vgg2.output)\n",
"# x = Dense(1000, activation='relu')(x)\n",
"prediction = Dense(5, activation='softmax')(x)\n",
"\n",
"# create a model object\n",
"model = Model(inputs=vgg2.input, outputs=prediction)\n",
"\n",
"# view the structure of the model\n",
"model.summary()\n",
"# tell the model what cost and optimization method to use\n",
"model.compile(\n",
" loss='sparse_categorical_crossentropy',\n",
" optimizer='adam',\n",
" metrics=['accuracy']\n",
")\n",
"\n",
"#train_ds_vgg_sw, test_ds_vgg_sw, validation_ds_vgg_sw\n",
"# fit the model\n",
"vggr = model.fit_generator(\n",
" train_ds,\n",
" validation_data=validation_ds,\n",
" epochs=25,\n",
" steps_per_epoch=len(train_ds),\n",
" validation_steps=len(validation_ds)\n",
")"
2023-01-06 03:02:47 +01:00
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 51,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"data": {
2023-01-10 17:15:16 +01:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjwAAAHHCAYAAAC7soLdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAACT0klEQVR4nOzdd3xT9f7H8VdGm3Slmw4olL2XjDJUQFBEQBAEBBGU4QIcOJAfKHIduFD04kSGXGWjiIIiIEv2XrIps6V7t2mb5Pz+CA0NLdBC0tDyed6bR5KTMz4ptXnne77n+1UpiqIghBBCCFGBqV1dgBBCCCGEs0ngEUIIIUSFJ4FHCCGEEBWeBB4hhBBCVHgSeIQQQghR4UngEUIIIUSFJ4FHCCGEEBWeBB4hhBBCVHgSeIQQQghR4UngEUI4jEql4u233y71dmfOnEGlUjFnzhyH1ySEECCBR4gKZ86cOahUKlQqFf/880+R1xVFISIiApVKRY8ePVxQoRBClD0JPEJUUHq9nnnz5hVZvmHDBi5cuIBOp3NBVUII4RoSeISooB566CEWL16MyWSyWz5v3jxatGhBaGioiyq7c2RlZbm6BCHEZRJ4hKigBg4cSFJSEqtXr7Yty8vLY8mSJQwaNKjYbbKysnjllVeIiIhAp9NRt25dPvnkExRFsVsvNzeXl19+meDgYHx8fHj44Ye5cOFCsfu8ePEiw4YNIyQkBJ1OR8OGDZk1a9ZNvafk5GReffVVGjdujLe3NwaDgW7durF///4i6xqNRt5++23q1KmDXq8nLCyMPn36cOrUKds6FouFzz//nMaNG6PX6wkODubBBx9k165dwPX7Fl3dX+ntt99GpVLx77//MmjQIPz9/bn77rsBOHDgAE8++SQ1atRAr9cTGhrKsGHDSEpKKvbnNXz4cMLDw9HpdFSvXp3nnnuOvLw8Tp8+jUql4rPPPiuy3ZYtW1CpVMyfP7+0P1Yh7ghaVxcghHCOyMhI2rZty/z58+nWrRsAf/zxB2lpaTz22GN88cUXdusrisLDDz/MunXrGD58OM2aNWPVqlW89tprXLx40e5DdsSIEfz4448MGjSIdu3a8ffff9O9e/ciNcTFxdGmTRtUKhWjR48mODiYP/74g+HDh5Oens5LL71Uqvd0+vRpli1bRr9+/ahevTpxcXF8++23dOjQgX///Zfw8HAAzGYzPXr0YO3atTz22GO8+OKLZGRksHr1ag4dOkTNmjUBGD58OHPmzKFbt26MGDECk8nEpk2b2LZtGy1btixVbQX69etH7dq1ef/9921BcfXq1Zw+fZqnnnqK0NBQDh8+zHfffcfhw4fZtm0bKpUKgJiYGFq3bk1qaipPP/009erV4+LFiyxZsoTs7Gxq1KhB+/bt+emnn3j55ZftjvvTTz/h4+NDr169bqpuISo8RQhRocyePVsBlJ07dyrTp09XfHx8lOzsbEVRFKVfv35Kp06dFEVRlGrVqindu3e3bbds2TIFUN599127/T366KOKSqVSTp48qSiKouzbt08BlOeff95uvUGDBimAMmnSJNuy4cOHK2FhYUpiYqLduo899pji6+trqys6OloBlNmzZ1/3vRmNRsVsNtsti46OVnQ6nfKf//zHtmzWrFkKoHz66adF9mGxWBRFUZS///5bAZQXXnjhmutcr66r3+ukSZMUQBk4cGCRdQveZ2Hz589XAGXjxo22ZUOGDFHUarWyc+fOa9b07bffKoBy5MgR22t5eXlKUFCQMnTo0CLbCSGs5JSWEBVY//79ycnJ4ffffycjI4Pff//9mqezVq5ciUaj4YUXXrBb/sorr6AoCn/88YdtPaDIele31iiKwtKlS+nZsyeKopCYmGi7de3albS0NPbs2VOq96PT6VCrrX+2zGYzSUlJeHt7U7duXbt9LV26lKCgIMaMGVNkHwWtKUuXLkWlUjFp0qRrrnMznn322SLLPDw8bI+NRiOJiYm0adMGwFa3xWJh2bJl9OzZs9jWpYKa+vfvj16v56effrK9tmrVKhITExk8ePBN1y1ERSeBR4gKLDg4mC5dujBv3jx+/vlnzGYzjz76aLHrnj17lvDwcHx8fOyW169f3/Z6wb1arbadFipQt25du+cJCQmkpqby3XffERwcbHd76qmnAIiPjy/V+7FYLHz22WfUrl0bnU5HUFAQwcHBHDhwgLS0NNt6p06dom7dumi11z5rf+rUKcLDwwkICChVDTdSvXr1IsuSk5N58cUXCQkJwcPDg+DgYNt6BXUnJCSQnp5Oo0aNrrt/Pz8/evbsaXcF3k8//UTlypW57777HPhOhKhYpA+PEBXcoEGDGDlyJJcuXaJbt274+fmVyXEtFgsAgwcPZujQocWu06RJk1Lt8/333+fNN99k2LBhvPPOOwQEBKBWq3nppZdsx3Oka7X0mM3ma25TuDWnQP/+/dmyZQuvvfYazZo1w9vbG4vFwoMPPnhTdQ8ZMoTFixezZcsWGjduzPLly3n++edtrV9CiKIk8AhRwT3yyCM888wzbNu2jYULF15zvWrVqrFmzRoyMjLsWnmOHj1qe73g3mKx2FpRChw7dsxufwVXcJnNZrp06eKQ97JkyRI6derEzJkz7ZanpqYSFBRke16zZk22b99Ofn4+bm5uxe6rZs2arFq1iuTk5Gu28vj7+9v2X1hBa1dJpKSksHbtWiZPnsxbb71lW37ixAm79YKDgzEYDBw6dOiG+3zwwQcJDg7mp59+IioqiuzsbJ544okS1yTEnUi+DghRwXl7e/P111/z9ttv07Nnz2uu99BDD2E2m5k+fbrd8s8++wyVSmW70qvg/uqrvKZNm2b3XKPR0LdvX5YuXVrsh3hCQkKp34tGoylyifzixYu5ePGi3bK+ffuSmJhY5L0Atu379u2LoihMnjz5musYDAaCgoLYuHGj3etfffVVqWouvM8CV/+81Go1vXv35rfffrNdFl9cTQBarZaBAweyaNEi5syZQ+PGjUvdWibEnUZaeIS4A1zrlFJhPXv2pFOnTkyYMIEzZ87QtGlT/vrrL3799VdeeuklW5+dZs2aMXDgQL766ivS0tJo164da9eu5eTJk0X2+cEHH7Bu3TqioqIYOXIkDRo0IDk5mT179rBmzRqSk5NL9T569OjBf/7zH5566inatWvHwYMH+emnn6hRo4bdekOGDGHu3LmMHTuWHTt2cM8995CVlcWaNWt4/vnn6dWrF506deKJJ57giy++4MSJE7bTS5s2baJTp06MHj0asF6C/8EHHzBixAhatmzJxo0bOX78eIlrNhgM3HvvvXz00Ufk5+dTuXJl/vrrL6Kjo4us+/777/PXX3/RoUMHnn76aerXr09sbCyLFy/mn3/+sTsdOWTIEL744gvWrVvHhx9+WKqfoxB3JJddHyaEcIrCl6Vfz9WXpSuKomRkZCgvv/yyEh4erri5uSm1a9dWPv74Y9sl0QVycnKUF154QQkMDFS8vLyUnj17KufPny9yqbaiKEpcXJwyatQoJSIiQnFzc1NCQ0OVzp07K999951tndJclv7KK68oYWFhioeHh9K+fXtl69atSocOHZQOHTrYrZudna1MmDBBqV69uu24jz76qHLq1CnbOiaTSfn444+VevXqKe7u7kpwcLDSrVs3Zffu3Xb7GT58uOLr66v4+Pgo/fv3V+Lj4695WXpCQkKRui9cuKA88sgjip+fn+Lr66v069dPiYmJKfbndfbsWWXIkCFKcHCwotPplBo1aiijRo1ScnNzi+y3YcOGilqtVi5cuHDdn5sQQlFUinJVO6sQQohyoXnz5gQEBLB27VpXlyLEbU/68AghRDm0a9cu9u3bx5AhQ1xdihDlgrTwCCFEOXLo0CF2797N1KlTSUxM5PTp0+j1eleXJcRtT1p4hBCiHFmyZAlPPfUU+fn5zJ8/X8KOECUkLTxCCCGEqPCkhUcIIYQQFZ4EHiGEEEJUeHfcwIMWi4WYmBh8fHxuaUZkIYQQQpQdRVHIyMggPDz8puaNu+MCT0xMDBEREa4uQwghhBA34fz581SpUqX
2023-01-06 03:02:47 +01:00
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
2023-01-10 17:15:16 +01:00
"plt.plot(vggr.history[\"accuracy\"])\n",
"plt.plot(vggr.history['val_accuracy'])\n",
"plt.plot(vggr.history['loss'])\n",
"plt.plot(vggr.history['val_loss'])\n",
2023-01-06 03:02:47 +01:00
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()"
]
},
2023-01-06 13:34:05 +01:00
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 52,
2023-01-06 13:34:05 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-10 17:15:16 +01:00
"8/8 [==============================] - 29s 4s/step - loss: 0.3817 - accuracy: 0.8633\n"
2023-01-06 13:34:05 +01:00
]
},
{
"data": {
"text/plain": [
2023-01-10 17:15:16 +01:00
"[0.38167834281921387, 0.86328125]"
2023-01-06 13:34:05 +01:00
]
},
2023-01-10 17:15:16 +01:00
"execution_count": 52,
2023-01-06 13:34:05 +01:00
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds)"
]
},
2023-01-06 03:02:47 +01:00
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
2023-01-10 17:15:16 +01:00
"## ResNet101V2"
2023-01-06 03:02:47 +01:00
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 53,
2023-01-06 13:34:05 +01:00
"metadata": {},
2023-01-07 22:15:23 +01:00
"outputs": [],
2023-01-06 13:34:05 +01:00
"source": [
"from keras.layers import Input, Lambda, Dense, Flatten\n",
"from keras.models import Model\n",
"from keras.preprocessing import image\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"from keras.models import Sequential\n",
"import numpy as np\n",
"from glob import glob\n",
"import matplotlib.pyplot as plt\n",
2023-01-07 22:15:23 +01:00
"import ssl\n",
"ssl._create_default_https_context = ssl._create_unverified_context\n",
2023-01-10 17:15:16 +01:00
"from keras.applications import ResNet101V2\n",
2023-01-06 13:34:05 +01:00
"\n",
"# re-size all the images to this\n",
"IMAGE_SIZE = [224, 224]\n",
"\n",
"# add preprocessing layer to the front of resnet\n",
2023-01-10 17:15:16 +01:00
"resnet = ResNet101V2(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)\n",
2023-01-06 13:34:05 +01:00
"\n",
"# don't train existing weights\n",
"for layer in resnet.layers:\n",
" layer.trainable = False\n",
" \n",
" # useful for getting number of classes\n",
"classes = 5\n",
" \n",
"\n",
"# our layers - you can add more if you want\n",
"x = Flatten()(resnet.output)\n",
"# x = Dense(1000, activation='relu')(x)\n",
"prediction = Dense(5, activation='softmax')(x)"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 54,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-10 17:15:16 +01:00
"Model: \"model_4\"\n",
2023-01-06 03:02:47 +01:00
"__________________________________________________________________________________________________\n",
" Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
2023-01-10 17:15:16 +01:00
" input_5 (InputLayer) [(None, 224, 224, 3 0 [] \n",
2023-01-06 03:02:47 +01:00
" )] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 ['input_5[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv1_conv (Conv2D) (None, 112, 112, 64 9472 ['conv1_pad[0][0]'] \n",
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" pool1_pad (ZeroPadding2D) (None, 114, 114, 64 0 ['conv1_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 ['pool1_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block1_preact_bn (BatchN (None, 56, 56, 64) 256 ['pool1_pool[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block1_preact_relu (Acti (None, 56, 56, 64) 0 ['conv2_block1_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4096 ['conv2_block1_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv2_block1_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block1_2_pad (ZeroPaddin (None, 58, 58, 64) 0 ['conv2_block1_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36864 ['conv2_block1_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv2_block1_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block1_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block1_2_relu[0][0]'] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block1_out (Add) (None, 56, 56, 256) 0 ['conv2_block1_0_conv[0][0]', \n",
" 'conv2_block1_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block2_preact_bn (BatchN (None, 56, 56, 256) 1024 ['conv2_block1_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block2_preact_relu (Acti (None, 56, 56, 256) 0 ['conv2_block2_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16384 ['conv2_block2_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv2_block2_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block2_2_pad (ZeroPaddin (None, 58, 58, 64) 0 ['conv2_block2_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36864 ['conv2_block2_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv2_block2_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block2_2_relu[0][0]'] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block2_out (Add) (None, 56, 56, 256) 0 ['conv2_block1_out[0][0]', \n",
" 'conv2_block2_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block3_preact_bn (BatchN (None, 56, 56, 256) 1024 ['conv2_block2_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block3_preact_relu (Acti (None, 56, 56, 256) 0 ['conv2_block3_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16384 ['conv2_block3_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv2_block3_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block3_2_pad (ZeroPaddin (None, 58, 58, 64) 0 ['conv2_block3_1_relu[0][0]'] \n",
" g2D) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block3_2_conv (Conv2D) (None, 28, 28, 64) 36864 ['conv2_block3_2_pad[0][0]'] \n",
" \n",
" conv2_block3_2_bn (BatchNormal (None, 28, 28, 64) 256 ['conv2_block3_2_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" ization) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block3_2_relu (Activatio (None, 28, 28, 64) 0 ['conv2_block3_2_bn[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" max_pooling2d_3 (MaxPooling2D) (None, 28, 28, 256) 0 ['conv2_block2_out[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv2_block3_3_conv (Conv2D) (None, 28, 28, 256) 16640 ['conv2_block3_2_relu[0][0]'] \n",
" \n",
" conv2_block3_out (Add) (None, 28, 28, 256) 0 ['max_pooling2d_3[0][0]', \n",
" 'conv2_block3_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block1_preact_bn (BatchN (None, 28, 28, 256) 1024 ['conv2_block3_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block1_preact_relu (Acti (None, 28, 28, 256) 0 ['conv3_block1_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32768 ['conv3_block1_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv3_block1_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block1_2_pad (ZeroPaddin (None, 30, 30, 128) 0 ['conv3_block1_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147456 ['conv3_block1_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv3_block1_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 ['conv3_block1_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block1_2_relu[0][0]'] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block1_out (Add) (None, 28, 28, 512) 0 ['conv3_block1_0_conv[0][0]', \n",
" 'conv3_block1_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block2_preact_bn (BatchN (None, 28, 28, 512) 2048 ['conv3_block1_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block2_preact_relu (Acti (None, 28, 28, 512) 0 ['conv3_block2_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65536 ['conv3_block2_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv3_block2_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block2_2_pad (ZeroPaddin (None, 30, 30, 128) 0 ['conv3_block2_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147456 ['conv3_block2_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv3_block2_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block2_2_relu[0][0]'] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block2_out (Add) (None, 28, 28, 512) 0 ['conv3_block1_out[0][0]', \n",
" 'conv3_block2_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block3_preact_bn (BatchN (None, 28, 28, 512) 2048 ['conv3_block2_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block3_preact_relu (Acti (None, 28, 28, 512) 0 ['conv3_block3_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65536 ['conv3_block3_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv3_block3_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block3_2_pad (ZeroPaddin (None, 30, 30, 128) 0 ['conv3_block3_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147456 ['conv3_block3_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv3_block3_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block3_2_relu[0][0]'] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block3_out (Add) (None, 28, 28, 512) 0 ['conv3_block2_out[0][0]', \n",
" 'conv3_block3_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block4_preact_bn (BatchN (None, 28, 28, 512) 2048 ['conv3_block3_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block4_preact_relu (Acti (None, 28, 28, 512) 0 ['conv3_block4_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65536 ['conv3_block4_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv3_block4_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block4_2_pad (ZeroPaddin (None, 30, 30, 128) 0 ['conv3_block4_1_relu[0][0]'] \n",
" g2D) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block4_2_conv (Conv2D) (None, 14, 14, 128) 147456 ['conv3_block4_2_pad[0][0]'] \n",
" \n",
" conv3_block4_2_bn (BatchNormal (None, 14, 14, 128) 512 ['conv3_block4_2_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" ization) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block4_2_relu (Activatio (None, 14, 14, 128) 0 ['conv3_block4_2_bn[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" max_pooling2d_4 (MaxPooling2D) (None, 14, 14, 512) 0 ['conv3_block3_out[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv3_block4_3_conv (Conv2D) (None, 14, 14, 512) 66048 ['conv3_block4_2_relu[0][0]'] \n",
" \n",
" conv3_block4_out (Add) (None, 14, 14, 512) 0 ['max_pooling2d_4[0][0]', \n",
" 'conv3_block4_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block1_preact_bn (BatchN (None, 14, 14, 512) 2048 ['conv3_block4_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block1_preact_relu (Acti (None, 14, 14, 512) 0 ['conv4_block1_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131072 ['conv4_block1_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block1_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block1_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block1_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block1_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block1_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024 525312 ['conv4_block1_preact_relu[0][0]'\n",
" ) ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block1_2_relu[0][0]'] \n",
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block1_out (Add) (None, 14, 14, 1024 0 ['conv4_block1_0_conv[0][0]', \n",
" ) 'conv4_block1_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block2_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block1_out[0][0]'] \n",
" ormalization) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block2_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block2_preact_bn[0][0]'] \n",
" vation) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block2_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block2_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block2_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block2_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block2_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block2_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block2_2_relu[0][0]'] \n",
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block2_out (Add) (None, 14, 14, 1024 0 ['conv4_block1_out[0][0]', \n",
" ) 'conv4_block2_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block3_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block2_out[0][0]'] \n",
" ormalization) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block3_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block3_preact_bn[0][0]'] \n",
" vation) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block3_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block3_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block3_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block3_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block3_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block3_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block3_2_relu[0][0]'] \n",
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block3_out (Add) (None, 14, 14, 1024 0 ['conv4_block2_out[0][0]', \n",
" ) 'conv4_block3_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block4_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block3_out[0][0]'] \n",
" ormalization) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block4_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block4_preact_bn[0][0]'] \n",
" vation) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block4_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block4_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block4_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block4_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block4_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block4_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block4_2_relu[0][0]'] \n",
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block4_out (Add) (None, 14, 14, 1024 0 ['conv4_block3_out[0][0]', \n",
" ) 'conv4_block4_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block5_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block4_out[0][0]'] \n",
" ormalization) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block5_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block5_preact_bn[0][0]'] \n",
" vation) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block5_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block5_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block5_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block5_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block5_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block5_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block5_2_relu[0][0]'] \n",
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block5_out (Add) (None, 14, 14, 1024 0 ['conv4_block4_out[0][0]', \n",
" ) 'conv4_block5_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block6_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block5_out[0][0]'] \n",
" ormalization) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block6_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block6_preact_bn[0][0]'] \n",
" vation) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block6_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block6_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block6_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block6_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block6_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block6_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv4_block6_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block6_2_relu[0][0]'] \n",
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block6_out (Add) (None, 14, 14, 1024 0 ['conv4_block5_out[0][0]', \n",
" ) 'conv4_block6_3_conv[0][0]'] \n",
" \n",
" conv4_block7_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block6_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block7_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block7_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block7_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block7_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block7_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block7_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block7_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block7_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block7_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block7_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block7_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block7_2_pad[0][0]'] \n",
" \n",
" conv4_block7_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block7_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block7_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block7_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block7_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block7_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block7_out (Add) (None, 14, 14, 1024 0 ['conv4_block6_out[0][0]', \n",
" ) 'conv4_block7_3_conv[0][0]'] \n",
" \n",
" conv4_block8_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block7_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block8_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block8_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block8_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block8_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block8_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block8_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block8_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block8_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block8_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block8_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block8_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block8_2_pad[0][0]'] \n",
" \n",
" conv4_block8_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block8_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block8_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block8_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block8_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block8_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block8_out (Add) (None, 14, 14, 1024 0 ['conv4_block7_out[0][0]', \n",
" ) 'conv4_block8_3_conv[0][0]'] \n",
" \n",
" conv4_block9_preact_bn (BatchN (None, 14, 14, 1024 4096 ['conv4_block8_out[0][0]'] \n",
" ormalization) ) \n",
" \n",
" conv4_block9_preact_relu (Acti (None, 14, 14, 1024 0 ['conv4_block9_preact_bn[0][0]'] \n",
" vation) ) \n",
" \n",
" conv4_block9_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block9_preact_relu[0][0]'\n",
" ] \n",
" \n",
" conv4_block9_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block9_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block9_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block9_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block9_2_pad (ZeroPaddin (None, 16, 16, 256) 0 ['conv4_block9_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv4_block9_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block9_2_pad[0][0]'] \n",
" \n",
" conv4_block9_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block9_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block9_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block9_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block9_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block9_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block9_out (Add) (None, 14, 14, 1024 0 ['conv4_block8_out[0][0]', \n",
" ) 'conv4_block9_3_conv[0][0]'] \n",
" \n",
" conv4_block10_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block9_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block10_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block10_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block10_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block10_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block10_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block10_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block10_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block10_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block10_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block10_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block10_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block10_2_pad[0][0]'] \n",
" \n",
" conv4_block10_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block10_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block10_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block10_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block10_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block10_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block10_out (Add) (None, 14, 14, 1024 0 ['conv4_block9_out[0][0]', \n",
" ) 'conv4_block10_3_conv[0][0]'] \n",
" \n",
" conv4_block11_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block10_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block11_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block11_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block11_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block11_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block11_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block11_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block11_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block11_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block11_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block11_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block11_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block11_2_pad[0][0]'] \n",
" \n",
" conv4_block11_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block11_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block11_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block11_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block11_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block11_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block11_out (Add) (None, 14, 14, 1024 0 ['conv4_block10_out[0][0]', \n",
" ) 'conv4_block11_3_conv[0][0]'] \n",
" \n",
" conv4_block12_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block11_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block12_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block12_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block12_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block12_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block12_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block12_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block12_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block12_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block12_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block12_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block12_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block12_2_pad[0][0]'] \n",
" \n",
" conv4_block12_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block12_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block12_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block12_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block12_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block12_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block12_out (Add) (None, 14, 14, 1024 0 ['conv4_block11_out[0][0]', \n",
" ) 'conv4_block12_3_conv[0][0]'] \n",
" \n",
" conv4_block13_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block12_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block13_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block13_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block13_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block13_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block13_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block13_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block13_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block13_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block13_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block13_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block13_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block13_2_pad[0][0]'] \n",
" \n",
" conv4_block13_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block13_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block13_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block13_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block13_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block13_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block13_out (Add) (None, 14, 14, 1024 0 ['conv4_block12_out[0][0]', \n",
" ) 'conv4_block13_3_conv[0][0]'] \n",
" \n",
" conv4_block14_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block13_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block14_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block14_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block14_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block14_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block14_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block14_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block14_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block14_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block14_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block14_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block14_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block14_2_pad[0][0]'] \n",
" \n",
" conv4_block14_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block14_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block14_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block14_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block14_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block14_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block14_out (Add) (None, 14, 14, 1024 0 ['conv4_block13_out[0][0]', \n",
" ) 'conv4_block14_3_conv[0][0]'] \n",
" \n",
" conv4_block15_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block14_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block15_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block15_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block15_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block15_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block15_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block15_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block15_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block15_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block15_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block15_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block15_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block15_2_pad[0][0]'] \n",
" \n",
" conv4_block15_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block15_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block15_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block15_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block15_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block15_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block15_out (Add) (None, 14, 14, 1024 0 ['conv4_block14_out[0][0]', \n",
" ) 'conv4_block15_3_conv[0][0]'] \n",
" \n",
" conv4_block16_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block15_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block16_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block16_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block16_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block16_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block16_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block16_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block16_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block16_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block16_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block16_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block16_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block16_2_pad[0][0]'] \n",
" \n",
" conv4_block16_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block16_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block16_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block16_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block16_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block16_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block16_out (Add) (None, 14, 14, 1024 0 ['conv4_block15_out[0][0]', \n",
" ) 'conv4_block16_3_conv[0][0]'] \n",
" \n",
" conv4_block17_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block16_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block17_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block17_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block17_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block17_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block17_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block17_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block17_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block17_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block17_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block17_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block17_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block17_2_pad[0][0]'] \n",
" \n",
" conv4_block17_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block17_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block17_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block17_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block17_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block17_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block17_out (Add) (None, 14, 14, 1024 0 ['conv4_block16_out[0][0]', \n",
" ) 'conv4_block17_3_conv[0][0]'] \n",
" \n",
" conv4_block18_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block17_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block18_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block18_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block18_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block18_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block18_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block18_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block18_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block18_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block18_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block18_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block18_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block18_2_pad[0][0]'] \n",
" \n",
" conv4_block18_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block18_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block18_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block18_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block18_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block18_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block18_out (Add) (None, 14, 14, 1024 0 ['conv4_block17_out[0][0]', \n",
" ) 'conv4_block18_3_conv[0][0]'] \n",
" \n",
" conv4_block19_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block18_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block19_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block19_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block19_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block19_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block19_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block19_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block19_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block19_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block19_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block19_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block19_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block19_2_pad[0][0]'] \n",
" \n",
" conv4_block19_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block19_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block19_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block19_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block19_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block19_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block19_out (Add) (None, 14, 14, 1024 0 ['conv4_block18_out[0][0]', \n",
" ) 'conv4_block19_3_conv[0][0]'] \n",
" \n",
" conv4_block20_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block19_out[0][0]'] \n",
" Normalization) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block20_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block20_preact_bn[0][0]']\n",
" ivation) ) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block20_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block20_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block20_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block20_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block20_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block20_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block20_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block20_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block20_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block20_2_pad[0][0]'] \n",
" \n",
" conv4_block20_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block20_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block20_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block20_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block20_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block20_2_relu[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" ) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv4_block20_out (Add) (None, 14, 14, 1024 0 ['conv4_block19_out[0][0]', \n",
" ) 'conv4_block20_3_conv[0][0]'] \n",
" \n",
" conv4_block21_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block20_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block21_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block21_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block21_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block21_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block21_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block21_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block21_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block21_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block21_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block21_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block21_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block21_2_pad[0][0]'] \n",
" \n",
" conv4_block21_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block21_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block21_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block21_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block21_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block21_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block21_out (Add) (None, 14, 14, 1024 0 ['conv4_block20_out[0][0]', \n",
" ) 'conv4_block21_3_conv[0][0]'] \n",
" \n",
" conv4_block22_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block21_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block22_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block22_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block22_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block22_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block22_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block22_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block22_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block22_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block22_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block22_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block22_2_conv (Conv2D) (None, 14, 14, 256) 589824 ['conv4_block22_2_pad[0][0]'] \n",
" \n",
" conv4_block22_2_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block22_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block22_2_relu (Activati (None, 14, 14, 256) 0 ['conv4_block22_2_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block22_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block22_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block22_out (Add) (None, 14, 14, 1024 0 ['conv4_block21_out[0][0]', \n",
" ) 'conv4_block22_3_conv[0][0]'] \n",
" \n",
" conv4_block23_preact_bn (Batch (None, 14, 14, 1024 4096 ['conv4_block22_out[0][0]'] \n",
" Normalization) ) \n",
" \n",
" conv4_block23_preact_relu (Act (None, 14, 14, 1024 0 ['conv4_block23_preact_bn[0][0]']\n",
" ivation) ) \n",
" \n",
" conv4_block23_1_conv (Conv2D) (None, 14, 14, 256) 262144 ['conv4_block23_preact_relu[0][0]\n",
" '] \n",
" \n",
" conv4_block23_1_bn (BatchNorma (None, 14, 14, 256) 1024 ['conv4_block23_1_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block23_1_relu (Activati (None, 14, 14, 256) 0 ['conv4_block23_1_bn[0][0]'] \n",
" on) \n",
" \n",
" conv4_block23_2_pad (ZeroPaddi (None, 16, 16, 256) 0 ['conv4_block23_1_relu[0][0]'] \n",
" ng2D) \n",
" \n",
" conv4_block23_2_conv (Conv2D) (None, 7, 7, 256) 589824 ['conv4_block23_2_pad[0][0]'] \n",
" \n",
" conv4_block23_2_bn (BatchNorma (None, 7, 7, 256) 1024 ['conv4_block23_2_conv[0][0]'] \n",
" lization) \n",
" \n",
" conv4_block23_2_relu (Activati (None, 7, 7, 256) 0 ['conv4_block23_2_bn[0][0]'] \n",
" on) \n",
" \n",
" max_pooling2d_5 (MaxPooling2D) (None, 7, 7, 1024) 0 ['conv4_block22_out[0][0]'] \n",
" \n",
" conv4_block23_3_conv (Conv2D) (None, 7, 7, 1024) 263168 ['conv4_block23_2_relu[0][0]'] \n",
" \n",
" conv4_block23_out (Add) (None, 7, 7, 1024) 0 ['max_pooling2d_5[0][0]', \n",
" 'conv4_block23_3_conv[0][0]'] \n",
" \n",
" conv5_block1_preact_bn (BatchN (None, 7, 7, 1024) 4096 ['conv4_block23_out[0][0]'] \n",
" ormalization) \n",
" \n",
" conv5_block1_preact_relu (Acti (None, 7, 7, 1024) 0 ['conv5_block1_preact_bn[0][0]'] \n",
" vation) \n",
" \n",
" conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524288 ['conv5_block1_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv5_block1_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block1_2_pad (ZeroPaddin (None, 9, 9, 512) 0 ['conv5_block1_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359296 ['conv5_block1_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv5_block1_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 ['conv5_block1_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block1_2_relu[0][0]'] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block1_out (Add) (None, 7, 7, 2048) 0 ['conv5_block1_0_conv[0][0]', \n",
" 'conv5_block1_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block2_preact_bn (BatchN (None, 7, 7, 2048) 8192 ['conv5_block1_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block2_preact_relu (Acti (None, 7, 7, 2048) 0 ['conv5_block2_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1048576 ['conv5_block2_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv5_block2_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block2_2_pad (ZeroPaddin (None, 9, 9, 512) 0 ['conv5_block2_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359296 ['conv5_block2_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv5_block2_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block2_2_relu[0][0]'] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block2_out (Add) (None, 7, 7, 2048) 0 ['conv5_block1_out[0][0]', \n",
" 'conv5_block2_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block3_preact_bn (BatchN (None, 7, 7, 2048) 8192 ['conv5_block2_out[0][0]'] \n",
" ormalization) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block3_preact_relu (Acti (None, 7, 7, 2048) 0 ['conv5_block3_preact_bn[0][0]'] \n",
" vation) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1048576 ['conv5_block3_preact_relu[0][0]'\n",
" ] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv5_block3_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block3_2_pad (ZeroPaddin (None, 9, 9, 512) 0 ['conv5_block3_1_relu[0][0]'] \n",
" g2D) \n",
" \n",
" conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359296 ['conv5_block3_2_pad[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv5_block3_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block3_2_relu[0][0]'] \n",
" \n",
2023-01-10 17:15:16 +01:00
" conv5_block3_out (Add) (None, 7, 7, 2048) 0 ['conv5_block2_out[0][0]', \n",
" 'conv5_block3_3_conv[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" post_bn (BatchNormalization) (None, 7, 7, 2048) 8192 ['conv5_block3_out[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" post_relu (Activation) (None, 7, 7, 2048) 0 ['post_bn[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" flatten_4 (Flatten) (None, 100352) 0 ['post_relu[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-10 17:15:16 +01:00
" dense_4 (Dense) (None, 5) 501765 ['flatten_4[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
"==================================================================================================\n",
2023-01-10 17:15:16 +01:00
"Total params: 43,128,325\n",
2023-01-06 03:02:47 +01:00
"Trainable params: 501,765\n",
2023-01-10 17:15:16 +01:00
"Non-trainable params: 42,626,560\n",
2023-01-06 13:34:05 +01:00
"__________________________________________________________________________________________________\n"
2023-01-06 03:02:47 +01:00
]
2023-01-06 13:34:05 +01:00
}
],
"source": [
"# create a model object\n",
"model = Model(inputs=resnet.input, outputs=prediction)\n",
"\n",
"# view the structure of the model\n",
"model.summary()"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 55,
2023-01-06 13:34:05 +01:00
"metadata": {},
"outputs": [
2023-01-07 22:15:23 +01:00
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/25\n"
]
},
2023-01-06 03:02:47 +01:00
{
"name": "stderr",
"output_type": "stream",
"text": [
2023-01-10 17:15:16 +01:00
"/var/folders/3r/c8tg1h051m18qhsdccdysrt40000gn/T/ipykernel_11345/3602206220.py:10: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
2023-01-06 03:02:47 +01:00
" r = model.fit_generator(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 59s 2s/step - loss: 3.6952 - accuracy: 0.7675 - val_loss: 1.0397 - val_accuracy: 0.9427\n",
2023-01-07 22:15:23 +01:00
"Epoch 2/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 55s 2s/step - loss: 0.2606 - accuracy: 0.9688 - val_loss: 0.6033 - val_accuracy: 0.9479\n",
2023-01-07 22:15:23 +01:00
"Epoch 3/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 55s 2s/step - loss: 0.0624 - accuracy: 0.9887 - val_loss: 0.7021 - val_accuracy: 0.9323\n",
2023-01-07 22:15:23 +01:00
"Epoch 4/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 55s 2s/step - loss: 0.0150 - accuracy: 0.9987 - val_loss: 0.4405 - val_accuracy: 0.9688\n",
2023-01-07 22:15:23 +01:00
"Epoch 5/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 56s 2s/step - loss: 0.0123 - accuracy: 0.9975 - val_loss: 0.3344 - val_accuracy: 0.9740\n",
2023-01-07 22:15:23 +01:00
"Epoch 6/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 56s 2s/step - loss: 1.9117e-07 - accuracy: 1.0000 - val_loss: 0.1343 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 7/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 56s 2s/step - loss: 4.4405e-08 - accuracy: 1.0000 - val_loss: 0.2787 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 8/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 56s 2s/step - loss: 3.5911e-08 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 9/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 2.7716e-08 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 10/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 2.2948e-08 - accuracy: 1.0000 - val_loss: 0.1292 - val_accuracy: 0.9896\n",
2023-01-07 22:15:23 +01:00
"Epoch 11/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 2.0563e-08 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 12/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 1.7583e-08 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 13/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 60s 2s/step - loss: 1.5646e-08 - accuracy: 1.0000 - val_loss: 0.2775 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 14/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 1.4305e-08 - accuracy: 1.0000 - val_loss: 0.1950 - val_accuracy: 0.9896\n",
2023-01-07 22:15:23 +01:00
"Epoch 15/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 1.3560e-08 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 16/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 1.1921e-08 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 17/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 1.1176e-08 - accuracy: 1.0000 - val_loss: 0.1318 - val_accuracy: 0.9896\n",
2023-01-07 22:15:23 +01:00
"Epoch 18/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 59s 2s/step - loss: 1.0431e-08 - accuracy: 1.0000 - val_loss: 0.2776 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 19/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 58s 2s/step - loss: 9.8347e-09 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 20/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 58s 2s/step - loss: 9.2387e-09 - accuracy: 1.0000 - val_loss: 0.2775 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 21/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 60s 2s/step - loss: 8.7917e-09 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 22/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 58s 2s/step - loss: 8.3446e-09 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 23/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 5.5134e-09 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 24/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 56s 2s/step - loss: 7.5996e-09 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n",
2023-01-07 22:15:23 +01:00
"Epoch 25/25\n",
2023-01-10 17:15:16 +01:00
"25/25 [==============================] - 57s 2s/step - loss: 7.3016e-09 - accuracy: 1.0000 - val_loss: 0.2785 - val_accuracy: 0.9844\n"
2023-01-06 03:02:47 +01:00
]
}
],
"source": [
"# tell the model what cost and optimization method to use\n",
"model.compile(\n",
" loss='sparse_categorical_crossentropy',\n",
" optimizer='adam',\n",
" metrics=['accuracy']\n",
")\n",
"\n",
"#train_ds_vgg_sw, test_ds_vgg_sw, validation_ds_vgg_sw\n",
"# fit the model\n",
"r = model.fit_generator(\n",
" train_ds,\n",
" validation_data=validation_ds,\n",
2023-01-07 22:15:23 +01:00
" epochs=25,\n",
2023-01-06 03:02:47 +01:00
" steps_per_epoch=len(train_ds),\n",
" validation_steps=len(validation_ds)\n",
")"
]
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 56,
2023-01-06 03:02:47 +01:00
"metadata": {},
2023-01-06 13:34:05 +01:00
"outputs": [
{
"data": {
2023-01-10 17:15:16 +01:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjwAAAHHCAYAAAC7soLdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABwFUlEQVR4nO3dd3wT9f8H8NclTdJdWrqhlFV2GTILypBqGSJLNpZR4KsCMkSwsh0URQQRRFFo5adlCjhQECpLlgIWAQEBgQLd0L2y7vdHaSC0hRaSXJu+no9HHm0+97m7952RvPtZJ4iiKIKIiIjIismkDoCIiIjI3JjwEBERkdVjwkNERERWjwkPERERWT0mPERERGT1mPAQERGR1WPCQ0RERFaPCQ8RERFZPSY8REREZPWY8BCRyQiCgAULFpR7v2vXrkEQBERFRZk8JiIigAkPkdWJioqCIAgQBAG///57se2iKMLPzw+CIOCFF16QIEIiIstjwkNkpWxtbREdHV2s/MCBA7h58yZUKpUEURERSYMJD5GV6tWrF7Zs2QKtVmtUHh0djdatW8Pb21uiyKqOnJwcqUMgoruY8BBZqWHDhuH27dvYs2ePoUytVmPr1q0YPnx4ifvk5OTgjTfegJ+fH1QqFRo2bIiPPvoIoiga1SsoKMC0adPg4eEBJycnvPjii7h582aJx7x16xbGjh0LLy8vqFQqNG3aFOvWrXusa7pz5w5mzJiBwMBAODo6wtnZGT179sTp06eL1c3Pz8eCBQvQoEED2NrawsfHBwMGDMCVK1cMdfR6PT755BMEBgbC1tYWHh4e6NGjB06cOAHg4WOLHhyvtGDBAgiCgH/++QfDhw+Hq6srnn76aQDA33//jdGjR6Nu3bqwtbWFt7c3xo4di9u3b5d4v8LCwuDr6wuVSoU6derg1VdfhVqtxn///QdBELBs2bJi+x05cgSCIGDDhg3lva1EVYKN1AEQkXnUrl0bQUFB2LBhA3r27AkA+OWXX5CRkYGhQ4dixYoVRvVFUcSLL76Iffv2ISwsDC1btsTu3bvx5ptv4tatW0ZfsuPGjcM333yD4cOHo2PHjvjtt9/Qu3fvYjEkJSWhQ4cOEAQBkyZNgoeHB3755ReEhYUhMzMTU6dOLdc1/ffff9ixYwcGDRqEOnXqICkpCV988QW6dOmCf/75B76+vgAAnU6HF154ATExMRg6dCimTJmCrKws7NmzB2fPnkW9evUAAGFhYYiKikLPnj0xbtw4aLVaHDp0CMeOHUObNm3KFVuRQYMGISAgAIsWLTIkinv27MF///2HMWPGwNvbG+fOncOaNWtw7tw5HDt2DIIgAADi4+PRrl07pKenY8KECWjUqBFu3bqFrVu3Ijc3F3Xr1kWnTp3w7bffYtq0aUbn/fbbb+Hk5IS+ffs+VtxEVk8kIqsSGRkpAhD//PNPceXKlaKTk5OYm5sriqIoDho0SOzWrZsoiqLo7+8v9u7d27Dfjh07RADie++9Z3S8l156SRQEQbx8+bIoiqIYGxsrAhBfe+01o3rDhw8XAYjz5883lIWFhYk+Pj5iamqqUd2hQ4eKLi4uhriuXr0qAhAjIyMfem35+fmiTqczKrt69aqoUqnEd955x1C2bt06EYD48ccfFzuGXq8XRVEUf/vtNxGA+Prrr5da52FxPXit8+fPFwGIw4YNK1a36Drvt2HDBhGAePDgQUNZaGioKJPJxD///LPUmL744gsRgHj+/HnDNrVaLbq7u4ujRo0qth8RFWKXFpEVGzx4MPLy8vDTTz8hKysLP/30U6ndWT///DPkcjlef/11o/I33ngDoijil19+MdQDUKzeg601oijiu+++Q58+fSCKIlJTUw2vkJAQZGRk4NSpU+W6HpVKBZms8J8tnU6H27dvw9HREQ0bNjQ61nfffQd3d3dMnjy52DGKWlO+++47CIKA+fPnl1rncbzyyivFyuzs7Ay/5+fnIzU1FR06dAAAQ9x6vR47duxAnz59SmxdKopp8ODBsLW1xbfffmvYtnv3bqSmpmLkyJGPHTeRtWPCQ2TFPDw8EBwcjOjoaGzbtg06nQ4vvfRSiXWvX78OX19fODk5GZU3btzYsL3op0wmM3QLFWnYsKHR+5SUFKSnp2PNmjXw8PAweo0ZMwYAkJycXK7r0ev1WLZsGQICAqBSqeDu7g4PDw/8/fffyMjIMNS7cuUKGjZsCBub0nvtr1y5Al9fX7i5uZUrhkepU6dOsbI7d+5gypQp8PLygp2dHTw8PAz1iuJOSUlBZmYmmjVr9tDjV6tWDX369DGagfftt9+iRo0aePbZZ014JUTWhWN4iKzc8OHDMX78eCQmJqJnz56oVq2aRc6r1+sBACNHjsSoUaNKrNO8efNyHXPRokWYO3cuxo4di3fffRdubm6QyWSYOnWq4XymVFpLj06nK3Wf+1tzigwePBhHjhzBm2++iZYtW8LR0RF6vR49evR4rLhDQ0OxZcsWHDlyBIGBgfjhhx/w2muvGVq/iKg4JjxEVq5///743//+h2PHjmHTpk2l1vP398fevXuRlZVl1Mpz4cIFw/ain3q93tCKUuTixYtGxyuawaXT6RAcHGySa9m6dSu6deuGtWvXGpWnp6fD3d3d8L5evXo4fvw4NBoNFApFiceqV68edu/ejTt37pTayuPq6mo4/v2KWrvKIi0tDTExMVi4cCHmzZtnKL906ZJRPQ8PDzg7O+Ps2bOPPGaPHj3g4eGBb7/9Fu3bt0dubi5efvnlMsdEVBXxzwEiK+fo6IjVq1djwYIF6NOnT6n1evXqBZ1Oh5UrVxqVL1u2DIIgGGZ6Ff18cJbX8uXLjd7L5XIMHDgQ3333XYlf4ikpKeW+FrlcXmyK/JYtW3Dr1i2jsoEDByI1NbXYtQAw7D9w4ECIooiFCxeWWsfZ2Rnu7u44ePCg0fbPPvusXDHff8wiD94vmUyGfv364ccffzRMiy8pJgCwsbHBsGHDsHnzZkRFRSEwMLDcrWVEVQ1beIiqgNK6lO7Xp08fdOvWDbNnz8a1a9fQokUL/Prrr/j+++8xdepUw5idli1bYtiwYfjss8+QkZGBjh07IiYmBpcvXy52zMWLF2Pfvn1o3749xo8fjyZNmuDOnTs4deoU9u7dizt37pTrOl544QW88847GDNmDDp27IgzZ87g22+/Rd26dY3qhYaGYv369Zg+fTr++OMPPPPMM8jJycHevXvx2muvoW/fvujWrRtefvllrFixApcuXTJ0Lx06dAjdunXDpEmTABROwV+8eDHGjRuHNm3a4ODBg/j333/LHLOzszM6d+6MDz/8EBqNBjVq1MCvv/6Kq1evFqu7aNEi/Prrr+jSpQsmTJiAxo0bIyEhAVu2bMHvv/9u1B0ZGhqKFStWYN++ffjggw/KdR+JqiTJ5ocRkVncPy39YR6cli6KopiVlSVOmzZN9PX1FRUKhRgQECAuWbLEMCW6SF5envj666+L1atXFx0cHMQ+ffqIN27cKDZVWxRFMSkpSZw4caLo5+cnKhQK0dvbW+zevbu4Zs0aQ53yTEt/4403RB8fH9HOzk7s1KmTePToUbFLly5ily5djOrm5uaKs2fPFuvUqWM470svvSReuXLFUEer1YpLliwRGzVqJCqVStHDw0Ps2bOnePLkSaPjhIWFiS4uLqKTk5M4ePBgMTk5udRp6SkpKcXivnnzpti/f3+xWrVqoouLizho0CAxPj6+xPt1/fp1MTQ0VPTw8BBVKpVYt25dceLEiWJBQUGx4zZt2lSUyWTizZs3H3rfiEgUBVF8oJ2ViIgqhVatWsHNzQ0xMTFSh0JU4XEMDxFRJXTixAnExsYiNDRU6lCIKgW28BARVSJnz57FyZMnsXTpUqSmpuK///6Dra2t1GERVXhs4SEiqkS2bt2KMWPGQKPRYMOGDUx2iMqILTxERERk9djCQ0RERFaPCQ8RERFZvSq38KBer0d8fDycnJye6InIREREZDmiKCIrKwu+vr6P9dy4KpfwxMfHw8/PT+owiIiI6DHcuHEDNWvWLPd+VS7hKXoo4o0
2023-01-06 13:34:05 +01:00
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
2023-01-06 03:02:47 +01:00
"source": [
"# loss\n",
"plt.plot(r.history[\"accuracy\"])\n",
"plt.plot(r.history['val_accuracy'])\n",
"plt.plot(r.history['loss'])\n",
"plt.plot(r.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()\n",
"\n",
"model.save('resnet_1.h5')"
]
2023-01-06 13:34:05 +01:00
},
{
"cell_type": "code",
2023-01-10 17:15:16 +01:00
"execution_count": 57,
2023-01-06 13:34:05 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-10 17:15:16 +01:00
"8/8 [==============================] - 15s 2s/step - loss: 0.7370 - accuracy: 0.9414\n"
2023-01-06 13:34:05 +01:00
]
},
{
"data": {
"text/plain": [
2023-01-10 17:15:16 +01:00
"[0.7369823455810547, 0.94140625]"
2023-01-06 13:34:05 +01:00
]
},
2023-01-10 17:15:16 +01:00
"execution_count": 57,
2023-01-06 13:34:05 +01:00
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds)"
]
2023-01-06 03:02:47 +01:00
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
2023-01-07 22:15:23 +01:00
"version": "3.10.1 (v3.10.1:2cd268a3a9, Dec 6 2021, 14:28:59) [Clang 13.0.0 (clang-1300.0.29.3)]"
2023-01-06 03:02:47 +01:00
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
2023-01-07 22:15:23 +01:00
"hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49"
2023-01-06 03:02:47 +01:00
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}