Symulowanie-wizualne/sw_lab9-10_1.ipynb

1201 lines
126 KiB
Plaintext
Raw Normal View History

2023-01-06 03:02:47 +01:00
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Aleksandra Jonas, Aleksandra Gronowska, Iwona Christop\n",
"# Zadanie 9-10 - VGG16 + ResNet on train_test_sw "
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Przygotowanie danych"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
"from IPython.display import Image, display"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "2fe63b50",
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import subprocess\n",
"import pkg_resources\n",
"import numpy as np\n",
"\n",
"required = { 'scikit-image'}\n",
"installed = {pkg.key for pkg in pkg_resources.working_set}\n",
"missing = required - installed\n",
"# VGG16 requires images to be of dim = (224, 224, 3)\n",
"newSize = (224,224)\n",
"\n",
"if missing: \n",
" python = sys.executable\n",
" subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL)\n",
"\n",
"def load_train_data(input_dir):\n",
" import numpy as np\n",
" import pandas as pd\n",
" import os\n",
" from skimage.io import imread\n",
" import cv2 as cv\n",
" from pathlib import Path\n",
" import random\n",
" from shutil import copyfile, rmtree\n",
" import json\n",
"\n",
" import seaborn as sns\n",
" import matplotlib.pyplot as plt\n",
"\n",
" import matplotlib\n",
" \n",
" image_dir = Path(input_dir)\n",
" categories_name = []\n",
" for file in os.listdir(image_dir):\n",
" d = os.path.join(image_dir, file)\n",
" if os.path.isdir(d):\n",
" categories_name.append(file)\n",
"\n",
" folders = [directory for directory in image_dir.iterdir() if directory.is_dir()]\n",
"\n",
" train_img = []\n",
" categories_count=[]\n",
" labels=[]\n",
" for i, direc in enumerate(folders):\n",
" count = 0\n",
" for obj in direc.iterdir():\n",
" if os.path.isfile(obj) and os.path.basename(os.path.normpath(obj)) != 'desktop.ini':\n",
" labels.append(os.path.basename(os.path.normpath(direc)))\n",
" count += 1\n",
" img = imread(obj)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
" img = img[:, :, :3]\n",
" img = cv.resize(img, newSize, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
" img = img / 255 #normalizacja\n",
" train_img.append(img)\n",
" categories_count.append(count)\n",
" X={}\n",
" X[\"values\"] = np.array(train_img)\n",
" X[\"categories_name\"] = categories_name\n",
" X[\"categories_count\"] = categories_count\n",
" X[\"labels\"]=labels\n",
" return X\n",
"\n",
"def load_test_data(input_dir):\n",
" import numpy as np\n",
" import pandas as pd\n",
" import os\n",
" from skimage.io import imread\n",
" import cv2 as cv\n",
" from pathlib import Path\n",
" import random\n",
" from shutil import copyfile, rmtree\n",
" import json\n",
"\n",
" import seaborn as sns\n",
" import matplotlib.pyplot as plt\n",
"\n",
" import matplotlib\n",
"\n",
" image_path = Path(input_dir)\n",
"\n",
" labels_path = image_path.parents[0] / 'test_labels.json'\n",
"\n",
" jsonString = labels_path.read_text()\n",
" objects = json.loads(jsonString)\n",
"\n",
" categories_name = []\n",
" categories_count=[]\n",
" count = 0\n",
" c = objects[0]['value']\n",
" for e in objects:\n",
" if e['value'] != c:\n",
" categories_count.append(count)\n",
" c = e['value']\n",
" count = 1\n",
" else:\n",
" count += 1\n",
" if not e['value'] in categories_name:\n",
" categories_name.append(e['value'])\n",
"\n",
" categories_count.append(count)\n",
" \n",
" test_img = []\n",
"\n",
" labels=[]\n",
" for e in objects:\n",
" p = image_path / e['filename']\n",
" img = imread(p)#zwraca ndarry postaci xSize x ySize x colorDepth\n",
" img = img[:, :, :3]\n",
" img = cv.resize(img, newSize, interpolation=cv.INTER_AREA)# zwraca ndarray\n",
" img = img / 255#normalizacja\n",
" test_img.append(img)\n",
" labels.append(e['value'])\n",
"\n",
" X={}\n",
" X[\"values\"] = np.array(test_img)\n",
" X[\"categories_name\"] = categories_name\n",
" X[\"categories_count\"] = categories_count\n",
" X[\"labels\"]=labels\n",
" return X"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [],
"source": [
"def create_tf_ds(X_train, y_train_enc, X_validate, y_validate_enc, X_test, y_test_enc):\n",
" import tensorflow as tf\n",
" \n",
" train_ds = tf.data.Dataset.from_tensor_slices((X_train, y_train_enc))\n",
" validation_ds = tf.data.Dataset.from_tensor_slices((X_validate, y_validate_enc))\n",
" test_ds = tf.data.Dataset.from_tensor_slices((X_test, y_test_enc))\n",
"\n",
" train_ds_size = tf.data.experimental.cardinality(train_ds).numpy()\n",
" test_ds_size = tf.data.experimental.cardinality(test_ds).numpy()\n",
" validation_ds_size = tf.data.experimental.cardinality(validation_ds).numpy()\n",
"\n",
" print(\"Training data size:\", train_ds_size)\n",
" print(\"Test data size:\", test_ds_size)\n",
" print(\"Validation data size:\", validation_ds_size)\n",
"\n",
" train_ds = (train_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" test_ds = (test_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" validation_ds = (validation_ds\n",
" .shuffle(buffer_size=train_ds_size)\n",
" .batch(batch_size=32, drop_remainder=True))\n",
" \n",
" return train_ds, test_ds, validation_ds"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [],
"source": [
"def get_run_logdir(root_logdir):\n",
" import os\n",
" import time\n",
"\n",
" run_id = time.strftime(\"run_%Y_%m_%d-%H_%M_%S\")\n",
" return os.path.join(root_logdir, run_id)"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [],
"source": [
"def diagram_setup(model_name):\n",
" from tensorflow import keras\n",
" import os\n",
" \n",
" root_logdir = os.path.join(os.curdir, f\"logs\\\\fit\\\\{model_name}\\\\\")\n",
" \n",
" run_logdir = get_run_logdir(root_logdir)\n",
" tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "cc941c5a",
"metadata": {},
"outputs": [],
"source": [
"# Data load\n",
"data_train = load_train_data(\"./train_test_sw/train_sw\")\n",
"values_train = data_train['values']\n",
"labels_train = data_train['labels']\n",
"\n",
"data_test = load_test_data(\"./train_test_sw/test_sw\")\n",
"X_test = data_test['values']\n",
"y_test = data_test['labels']"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "25040ac9",
"metadata": {},
"outputs": [],
"source": [
"from sklearn.model_selection import train_test_split\n",
"X_train, X_validate, y_train, y_validate = train_test_split(values_train, labels_train, test_size=0.2, random_state=42)"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "a1fe47e6",
"metadata": {},
"outputs": [],
"source": [
"from sklearn.preprocessing import LabelEncoder\n",
"class_le = LabelEncoder()\n",
"y_train_enc = class_le.fit_transform(y_train)\n",
"y_validate_enc = class_le.fit_transform(y_validate)\n",
"y_test_enc = class_le.fit_transform(y_test)"
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Training data size: 820\n",
"Test data size: 259\n",
"Validation data size: 206\n"
]
}
],
"source": [
"train_ds, test_ds, validation_ds = create_tf_ds(X_train, y_train_enc, X_validate, y_validate_enc, X_test, y_test_enc)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## VGG16"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [],
"source": [
"diagram_setup('vgg_sw')"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [],
"source": [
"import keras,os\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense, Conv2D, MaxPool2D , Flatten\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"import numpy as np\n",
"\n",
"model = keras.models.Sequential([\n",
" keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
" keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu', input_shape=(224,224,3), padding=\"same\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=128, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=256, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Conv2D(filters=512, kernel_size=(3,3), padding=\"same\", activation=\"relu\"),\n",
" keras.layers.Flatten(),\n",
" keras.layers.Dense(units = 4096, activation='relu'),\n",
" keras.layers.Dense(units = 4096, activation='relu'),\n",
" keras.layers.Dense(units = 5, activation='softmax')\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/jonas/Library/Python/3.9/lib/python/site-packages/keras/optimizers/optimizer_v2/adam.py:117: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead.\n",
" super().__init__(name, **kwargs)\n"
]
}
],
"source": [
"from keras.optimizers import Adam\n",
"opt = Adam(lr=0.001)\n",
"model.compile(optimizer=opt, loss=keras.losses.sparse_categorical_crossentropy, metrics=['accuracy'])"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-06 13:34:05 +01:00
"Model: \"sequential_1\"\n",
2023-01-06 03:02:47 +01:00
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
2023-01-06 13:34:05 +01:00
" conv2d_13 (Conv2D) (None, 224, 224, 64) 1792 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_14 (Conv2D) (None, 224, 224, 64) 36928 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" max_pooling2d_4 (MaxPooling (None, 112, 112, 64) 0 \n",
" 2D) \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_15 (Conv2D) (None, 112, 112, 128) 73856 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_16 (Conv2D) (None, 112, 112, 128) 147584 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" max_pooling2d_5 (MaxPooling (None, 56, 56, 128) 0 \n",
2023-01-06 03:02:47 +01:00
" 2D) \n",
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_17 (Conv2D) (None, 56, 56, 256) 295168 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_18 (Conv2D) (None, 56, 56, 256) 590080 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_19 (Conv2D) (None, 56, 56, 256) 590080 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" max_pooling2d_6 (MaxPooling (None, 28, 28, 256) 0 \n",
2023-01-06 03:02:47 +01:00
" 2D) \n",
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_20 (Conv2D) (None, 28, 28, 512) 1180160 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_21 (Conv2D) (None, 28, 28, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_22 (Conv2D) (None, 28, 28, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" max_pooling2d_7 (MaxPooling (None, 14, 14, 512) 0 \n",
2023-01-06 03:02:47 +01:00
" 2D) \n",
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_23 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_24 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" conv2d_25 (Conv2D) (None, 14, 14, 512) 2359808 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" flatten_1 (Flatten) (None, 100352) 0 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" dense_3 (Dense) (None, 4096) 411045888 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" dense_4 (Dense) (None, 4096) 16781312 \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" dense_5 (Dense) (None, 5) 20485 \n",
2023-01-06 03:02:47 +01:00
" \n",
"=================================================================\n",
"Total params: 442,562,373\n",
"Trainable params: 442,562,373\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
}
],
"source": [
"model.summary()"
]
},
{
"cell_type": "code",
2023-01-06 13:34:05 +01:00
"execution_count": 29,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-06 13:34:05 +01:00
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n"
2023-01-06 03:02:47 +01:00
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
2023-01-06 13:34:05 +01:00
"/var/folders/6b/j4d60ym516x2s6wymzj707rh0000gn/T/ipykernel_9339/4100383455.py:4: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" hist_vgg = model.fit_generator(steps_per_epoch=len(train_ds), generator=train_ds, validation_data= validation_ds, validation_steps=len(validation_ds), epochs=1, callbacks=[checkpoint,early])\n"
2023-01-06 03:02:47 +01:00
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-06 13:34:05 +01:00
"25/25 [==============================] - ETA: 0s - loss: 1.6126 - accuracy: 0.1663 \n",
"Epoch 1: val_accuracy improved from -inf to 0.18750, saving model to vgg16_1.h5\n",
"25/25 [==============================] - 515s 21s/step - loss: 1.6126 - accuracy: 0.1663 - val_loss: 1.6102 - val_accuracy: 0.1875\n"
2023-01-06 03:02:47 +01:00
]
}
],
"source": [
"from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
"checkpoint = ModelCheckpoint(\"vgg16_1.h5\", monitor='val_accuracy', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)\n",
"early = EarlyStopping(monitor='val_accuracy', min_delta=0, patience=20, verbose=1, mode='auto')\n",
2023-01-06 13:34:05 +01:00
"hist_vgg = model.fit_generator(steps_per_epoch=len(train_ds), generator=train_ds, validation_data= validation_ds, validation_steps=len(validation_ds), epochs=1, callbacks=[checkpoint,early])"
2023-01-06 03:02:47 +01:00
]
},
{
"cell_type": "code",
2023-01-06 13:34:05 +01:00
"execution_count": 30,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"data": {
2023-01-06 13:34:05 +01:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABN30lEQVR4nO3deXxMZ98/8M+ZRCb7JpFFQ4QgJSIV0lBFpY0gFbXGkoSgailSLXnsXdBaGmrpzU1ye4pYiupNpRE7qbVRKvYQSxJCsyKRzPX7w888RoKETCZzfN6v17xqrnOdc77ndNr5uM515khCCAEiIiIimVDougAiIiKiysRwQ0RERLLCcENERESywnBDREREssJwQ0RERLLCcENERESywnBDREREssJwQ0RERLLCcENERESywnBDRJVGkiRMnz69wutduXIFkiQhNja20msiotcPww2RzMTGxkKSJEiShAMHDpRaLoSAi4sLJElC165ddVAhEZF2MdwQyZSxsTHWrFlTqn3v3r24fv06lEqlDqoiItI+hhsimercuTM2bNiA4uJijfY1a9agRYsWcHR01FFlr4+CggJdl0D0WmK4IZKpkJAQ3LlzBwkJCeq2oqIibNy4Ef369StznYKCAnz22WdwcXGBUqlEo0aNMHfuXAghNPoVFhZi3LhxsLe3h4WFBT788ENcv369zG3euHEDgwcPhoODA5RKJZo0aYKVK1e+1DHdvXsX48ePh6enJ8zNzWFpaYnAwECcPHmyVN8HDx5g+vTpaNiwIYyNjeHk5ISPPvoIly5dUvdRqVRYsGABPD09YWxsDHt7e3Tq1AnHjh0D8Py5QE/PL5o+fTokScKZM2fQr18/2NjY4J133gEA/PXXXwgPD4ebmxuMjY3h6OiIwYMH486dO2Wer4iICDg7O0OpVKJevXr45JNPUFRUhMuXL0OSJHz//fel1jt06BAkScLatWsrelqJZMdQ1wUQkXa4urrCz88Pa9euRWBgIADgt99+Q05ODvr27YuFCxdq9BdC4MMPP8Tu3bsRERGB5s2bIz4+Hp9//jlu3Lih8YU6ZMgQ/PTTT+jXrx9at26NXbt2oUuXLqVqyMzMxNtvvw1JkjBq1CjY29vjt99+Q0REBHJzczF27NgKHdPly5exZcsW9OrVC/Xq1UNmZib+9a9/oV27djhz5gycnZ0BACUlJejatSsSExPRt29fjBkzBnl5eUhISMDp06dRv359AEBERARiY2MRGBiIIUOGoLi4GPv378cff/wBHx+fCtX2WK9eveDu7o6ZM2eqQ2FCQgIuX76MQYMGwdHREX///TeWLVuGv//+G3/88QckSQIA3Lx5E61atUJ2djaGDRuGxo0b48aNG9i4cSPu3bsHNzc3tGnTBqtXr8a4ceM09rt69WpYWFigW7duL1U3kawIIpKVmJgYAUAcPXpULFq0SFhYWIh79+4JIYTo1auX6NChgxBCiLp164ouXbqo19uyZYsAIL7++muN7fXs2VNIkiQuXrwohBAiOTlZABAjRozQ6NevXz8BQEybNk3dFhERIZycnERWVpZG3759+worKyt1XampqQKAiImJee6xPXjwQJSUlGi0paamCqVSKb788kt128qVKwUAMX/+/FLbUKlUQgghdu3aJQCITz/99Jl9nlfX08c6bdo0AUCEhISU6vv4OJ+0du1aAUDs27dP3RYaGioUCoU4evToM2v617/+JQCIlJQU9bKioiJhZ2cnwsLCSq1H9DriZSkiGevduzfu37+P//73v8jLy8N///vfZ16S2r59OwwMDPDpp59qtH/22WcQQuC3335T9wNQqt/TozBCCPz8888ICgqCEAJZWVnqV0BAAHJycnDixIkKHY9SqYRC8eh/WyUlJbhz5w7Mzc3RqFEjjW39/PPPsLOzw+jRo0tt4/Eoyc8//wxJkjBt2rRn9nkZw4cPL9VmYmKi/vODBw+QlZWFt99+GwDUdatUKmzZsgVBQUFljho9rql3794wNjbG6tWr1cvi4+ORlZWFAQMGvHTdRHLCcEMkY/b29vD398eaNWuwadMmlJSUoGfPnmX2vXr1KpydnWFhYaHR7uHhoV7++J8KhUJ9aeexRo0aaby/ffs2srOzsWzZMtjb22u8Bg0aBAC4detWhY5HpVLh+++/h7u7O5RKJezs7GBvb4+//voLOTk56n6XLl1Co0aNYGj47Cvvly5dgrOzM2xtbStUw4vUq1evVNvdu3cxZswYODg4wMTEBPb29up+j+u+ffs2cnNz0bRp0+du39raGkFBQRp3wq1evRq1a9fGe++9V4lHQqS/OOeGSOb69euHoUOHIiMjA4GBgbC2tq6S/apUKgDAgAEDEBYWVmafZs2aVWibM2fOxJQpUzB48GB89dVXsLW1hUKhwNixY9X7q0zPGsEpKSl55jpPjtI81rt3bxw6dAiff/45mjdvDnNzc6hUKnTq1Oml6g4NDcWGDRtw6NAheHp6YuvWrRgxYoR6VIvodcdwQyRz3bt3x8cff4w//vgD69ate2a/unXrYufOncjLy9MYvTl79qx6+eN/qlQq9ejIY+fOndPY3uM7qUpKSuDv718px7Jx40Z06NABK1as0GjPzs6GnZ2d+n39+vVx+PBhPHz4EDVq1ChzW/Xr10d8fDzu3r37zNEbGxsb9faf9HgUqzz++ecfJCYmYsaMGZg6daq6/cKFCxr97O3tYWlpidOnT79wm506dYK9vT1Wr14NX19f3Lt3DwMHDix3TURyx5hPJHPm5uZYunQppk+fjqCgoGf269y5M0pKSrBo0SKN9u+//x6SJKnvuHr8z6fvtoqOjtZ4b2BggB49euDnn38u8wv79u3bFT4WAwODUrelb9iwATdu3NBo69GjB7KyskodCwD1+j169IAQAjNmzHhmH0tLS9jZ2WHfvn0ay5csWVKhmp/c5mNPny+FQoHg4GD8+uuv6lvRy6oJAAwNDRESEoL169cjNjYWnp6eFR4FI5IzjtwQvQaedVnoSUFBQejQoQMmTZqEK1euwMvLC7///jt++eUXjB07Vj3Hpnnz5ggJCcGSJUuQk5OD1q1bIzExERcvXiy1zdmzZ2P37t3w9fXF0KFD8eabb+Lu3bs4ceIEdu7cibt371boOLp27Yovv/wSgwYNQuvWrXHq1CmsXr0abm5uGv1CQ0OxatUqREZG4siRI2jbti0KCgqwc+dOjBgxAt26dUOHDh0wcOBALFy4EBcuXFBfItq/fz86dOiAUaNGAXh02/vs2bMxZMgQ+Pj4YN++fTh//ny5a7a0tMS7776L7777Dg8fPkTt2rXx+++/IzU1tVTfmTNn4vfff0e7du0wbNgweHh4ID09HRs2bMCBAwc0LimGhoZi4cKF2L17N7799tsKnUci2dPZfVpEpBVP3gr+PE/fCi6EEHl5eWLcuHHC2dlZ1KhRQ7i7u4s5c+aob0N+7P79++LTTz8VNWvWFGZmZiIoKEhcu3at1O3RQgiRmZkpRo4cKVxcXESNGjWEo6Oj6Nixo1i2bJm6T0VuBf/ss8+Ek5OTMDExEW3atBFJSUmiXbt2ol27dhp97927JyZNmiTq1aun3m/Pnj3FpUuX1H2Ki4vFnDlzROPGjYWRkZGwt7cXgYGB4vjx4xrbiYiIEFZWVsLCwkL07t1b3Lp165m3gt++fbtU3devXxfdu3cX1tbWwsrKSvTq1UvcvHmzzPN19epVERoaKuzt7YVSqRRubm5i5MiRorCwsNR2mzRpIhQKhbh+/fpzzxvR60YS4qmxUiIi0gve3t6wtbVFYmKirkshqlY454aISA8dO3YMycnJCA0N1XUpRNUOR26IiPTI6dOncfz4ccybNw9ZWVm4fPkyjI2NdV0WUbXCkRsiIj2yceNGDBo0CA8fPsTatWsZbIjKwJEbIiIikhWO3BAREZGsMNwQERGRrLx2P+KnUqlw8+ZNWFhYvNKTf4mIiKjqCCGQl5cHZ2fnFz5H7bULNzdv3oSLi4uuyyAiIqKXcO3aNbzxxhvP7fPahZvHDwS8du0aLC0tdVwNERERlUd
2023-01-06 03:02:47 +01:00
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"plt.plot(hist_vgg.history[\"accuracy\"])\n",
"plt.plot(hist_vgg.history['val_accuracy'])\n",
"plt.plot(hist_vgg.history['loss'])\n",
"plt.plot(hist_vgg.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()"
]
},
2023-01-06 13:34:05 +01:00
{
"cell_type": "code",
"execution_count": 31,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"8/8 [==============================] - 32s 4s/step - loss: 1.6094 - accuracy: 0.1992\n"
]
},
{
"data": {
"text/plain": [
"[1.609419822692871, 0.19921875]"
]
},
"execution_count": 31,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds)"
]
},
2023-01-06 03:02:47 +01:00
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## ResNet50"
]
},
{
"cell_type": "code",
2023-01-06 13:34:05 +01:00
"execution_count": 32,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/resnet/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5\n",
"94765736/94765736 [==============================] - 5s 0us/step\n"
]
}
],
"source": [
"from keras.layers import Input, Lambda, Dense, Flatten\n",
"from keras.models import Model\n",
"from keras.applications import ResNet50\n",
"from keras.preprocessing import image\n",
"from keras.preprocessing.image import ImageDataGenerator\n",
"from keras.models import Sequential\n",
"import numpy as np\n",
"from glob import glob\n",
"import matplotlib.pyplot as plt\n",
"\n",
"# re-size all the images to this\n",
"IMAGE_SIZE = [224, 224]\n",
"\n",
"# add preprocessing layer to the front of resnet\n",
"resnet = ResNet50(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)\n",
"\n",
"# don't train existing weights\n",
"for layer in resnet.layers:\n",
" layer.trainable = False\n",
" \n",
" # useful for getting number of classes\n",
"classes = 5\n",
" \n",
"\n",
"# our layers - you can add more if you want\n",
"x = Flatten()(resnet.output)\n",
"# x = Dense(1000, activation='relu')(x)\n",
"prediction = Dense(5, activation='softmax')(x)"
]
},
{
"cell_type": "code",
"execution_count": 33,
2023-01-06 03:02:47 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-06 13:34:05 +01:00
"Model: \"model\"\n",
2023-01-06 03:02:47 +01:00
"__________________________________________________________________________________________________\n",
" Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
2023-01-06 13:34:05 +01:00
" input_1 (InputLayer) [(None, 224, 224, 3 0 [] \n",
2023-01-06 03:02:47 +01:00
" )] \n",
" \n",
2023-01-06 13:34:05 +01:00
" conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 ['input_1[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
" conv1_conv (Conv2D) (None, 112, 112, 64 9472 ['conv1_pad[0][0]'] \n",
" ) \n",
" \n",
" conv1_bn (BatchNormalization) (None, 112, 112, 64 256 ['conv1_conv[0][0]'] \n",
" ) \n",
" \n",
" conv1_relu (Activation) (None, 112, 112, 64 0 ['conv1_bn[0][0]'] \n",
" ) \n",
" \n",
" pool1_pad (ZeroPadding2D) (None, 114, 114, 64 0 ['conv1_relu[0][0]'] \n",
" ) \n",
" \n",
" pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 ['pool1_pad[0][0]'] \n",
" \n",
" conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4160 ['pool1_pool[0][0]'] \n",
" \n",
" conv2_block1_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block1_1_relu[0][0]'] \n",
" \n",
" conv2_block1_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 ['pool1_pool[0][0]'] \n",
" \n",
" conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block1_2_relu[0][0]'] \n",
" \n",
" conv2_block1_0_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block1_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_0_bn[0][0]', \n",
" 'conv2_block1_3_bn[0][0]'] \n",
" \n",
" conv2_block1_out (Activation) (None, 56, 56, 256) 0 ['conv2_block1_add[0][0]'] \n",
" \n",
" conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16448 ['conv2_block1_out[0][0]'] \n",
" \n",
" conv2_block2_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block2_1_relu[0][0]'] \n",
" \n",
" conv2_block2_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block2_2_relu[0][0]'] \n",
" \n",
" conv2_block2_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block2_add (Add) (None, 56, 56, 256) 0 ['conv2_block1_out[0][0]', \n",
" 'conv2_block2_3_bn[0][0]'] \n",
" \n",
" conv2_block2_out (Activation) (None, 56, 56, 256) 0 ['conv2_block2_add[0][0]'] \n",
" \n",
" conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16448 ['conv2_block2_out[0][0]'] \n",
" \n",
" conv2_block3_1_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_1_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block3_2_conv (Conv2D) (None, 56, 56, 64) 36928 ['conv2_block3_1_relu[0][0]'] \n",
" \n",
" conv2_block3_2_bn (BatchNormal (None, 56, 56, 64) 256 ['conv2_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_2_relu (Activatio (None, 56, 56, 64) 0 ['conv2_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv2_block3_3_conv (Conv2D) (None, 56, 56, 256) 16640 ['conv2_block3_2_relu[0][0]'] \n",
" \n",
" conv2_block3_3_bn (BatchNormal (None, 56, 56, 256) 1024 ['conv2_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv2_block3_add (Add) (None, 56, 56, 256) 0 ['conv2_block2_out[0][0]', \n",
" 'conv2_block3_3_bn[0][0]'] \n",
" \n",
" conv2_block3_out (Activation) (None, 56, 56, 256) 0 ['conv2_block3_add[0][0]'] \n",
" \n",
" conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32896 ['conv2_block3_out[0][0]'] \n",
" \n",
" conv3_block1_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block1_1_relu[0][0]'] \n",
" \n",
" conv3_block1_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 ['conv2_block3_out[0][0]'] \n",
" \n",
" conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block1_2_relu[0][0]'] \n",
" \n",
" conv3_block1_0_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block1_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_0_bn[0][0]', \n",
" 'conv3_block1_3_bn[0][0]'] \n",
" \n",
" conv3_block1_out (Activation) (None, 28, 28, 512) 0 ['conv3_block1_add[0][0]'] \n",
" \n",
" conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block1_out[0][0]'] \n",
" \n",
" conv3_block2_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block2_1_relu[0][0]'] \n",
" \n",
" conv3_block2_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block2_2_relu[0][0]'] \n",
" \n",
" conv3_block2_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block2_add (Add) (None, 28, 28, 512) 0 ['conv3_block1_out[0][0]', \n",
" 'conv3_block2_3_bn[0][0]'] \n",
" \n",
" conv3_block2_out (Activation) (None, 28, 28, 512) 0 ['conv3_block2_add[0][0]'] \n",
" \n",
" conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block2_out[0][0]'] \n",
" \n",
" conv3_block3_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block3_1_relu[0][0]'] \n",
" \n",
" conv3_block3_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block3_2_relu[0][0]'] \n",
" \n",
" conv3_block3_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block3_add (Add) (None, 28, 28, 512) 0 ['conv3_block2_out[0][0]', \n",
" 'conv3_block3_3_bn[0][0]'] \n",
" \n",
" conv3_block3_out (Activation) (None, 28, 28, 512) 0 ['conv3_block3_add[0][0]'] \n",
" \n",
" conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65664 ['conv3_block3_out[0][0]'] \n",
" \n",
" conv3_block4_1_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_1_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block4_2_conv (Conv2D) (None, 28, 28, 128) 147584 ['conv3_block4_1_relu[0][0]'] \n",
" \n",
" conv3_block4_2_bn (BatchNormal (None, 28, 28, 128) 512 ['conv3_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_2_relu (Activatio (None, 28, 28, 128) 0 ['conv3_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv3_block4_3_conv (Conv2D) (None, 28, 28, 512) 66048 ['conv3_block4_2_relu[0][0]'] \n",
" \n",
" conv3_block4_3_bn (BatchNormal (None, 28, 28, 512) 2048 ['conv3_block4_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv3_block4_add (Add) (None, 28, 28, 512) 0 ['conv3_block3_out[0][0]', \n",
" 'conv3_block4_3_bn[0][0]'] \n",
" \n",
" conv3_block4_out (Activation) (None, 28, 28, 512) 0 ['conv3_block4_add[0][0]'] \n",
" \n",
" conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131328 ['conv3_block4_out[0][0]'] \n",
" \n",
" conv4_block1_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block1_1_relu[0][0]'] \n",
" \n",
" conv4_block1_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block1_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024 525312 ['conv3_block4_out[0][0]'] \n",
" ) \n",
" \n",
" conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block1_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block1_0_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block1_0_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block1_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block1_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block1_add (Add) (None, 14, 14, 1024 0 ['conv4_block1_0_bn[0][0]', \n",
" ) 'conv4_block1_3_bn[0][0]'] \n",
" \n",
" conv4_block1_out (Activation) (None, 14, 14, 1024 0 ['conv4_block1_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block1_out[0][0]'] \n",
" \n",
" conv4_block2_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block2_1_relu[0][0]'] \n",
" \n",
" conv4_block2_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block2_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block2_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block2_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block2_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block2_add (Add) (None, 14, 14, 1024 0 ['conv4_block1_out[0][0]', \n",
" ) 'conv4_block2_3_bn[0][0]'] \n",
" \n",
" conv4_block2_out (Activation) (None, 14, 14, 1024 0 ['conv4_block2_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block2_out[0][0]'] \n",
" \n",
" conv4_block3_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block3_1_relu[0][0]'] \n",
" \n",
" conv4_block3_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block3_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block3_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block3_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block3_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block3_add (Add) (None, 14, 14, 1024 0 ['conv4_block2_out[0][0]', \n",
" ) 'conv4_block3_3_bn[0][0]'] \n",
" \n",
" conv4_block3_out (Activation) (None, 14, 14, 1024 0 ['conv4_block3_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block3_out[0][0]'] \n",
" \n",
" conv4_block4_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block4_1_relu[0][0]'] \n",
" \n",
" conv4_block4_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block4_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block4_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block4_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block4_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block4_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block4_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block4_add (Add) (None, 14, 14, 1024 0 ['conv4_block3_out[0][0]', \n",
" ) 'conv4_block4_3_bn[0][0]'] \n",
" \n",
" conv4_block4_out (Activation) (None, 14, 14, 1024 0 ['conv4_block4_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block4_out[0][0]'] \n",
" \n",
" conv4_block5_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block5_1_relu[0][0]'] \n",
" \n",
" conv4_block5_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block5_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block5_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block5_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block5_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block5_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block5_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block5_add (Add) (None, 14, 14, 1024 0 ['conv4_block4_out[0][0]', \n",
" ) 'conv4_block5_3_bn[0][0]'] \n",
" \n",
" conv4_block5_out (Activation) (None, 14, 14, 1024 0 ['conv4_block5_add[0][0]'] \n",
" ) \n",
" \n",
" conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262400 ['conv4_block5_out[0][0]'] \n",
" \n",
" conv4_block6_1_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_1_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_2_conv (Conv2D) (None, 14, 14, 256) 590080 ['conv4_block6_1_relu[0][0]'] \n",
" \n",
" conv4_block6_2_bn (BatchNormal (None, 14, 14, 256) 1024 ['conv4_block6_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv4_block6_2_relu (Activatio (None, 14, 14, 256) 0 ['conv4_block6_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv4_block6_3_conv (Conv2D) (None, 14, 14, 1024 263168 ['conv4_block6_2_relu[0][0]'] \n",
" ) \n",
" \n",
" conv4_block6_3_bn (BatchNormal (None, 14, 14, 1024 4096 ['conv4_block6_3_conv[0][0]'] \n",
" ization) ) \n",
" \n",
" conv4_block6_add (Add) (None, 14, 14, 1024 0 ['conv4_block5_out[0][0]', \n",
" ) 'conv4_block6_3_bn[0][0]'] \n",
" \n",
" conv4_block6_out (Activation) (None, 14, 14, 1024 0 ['conv4_block6_add[0][0]'] \n",
" ) \n",
" \n",
" conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524800 ['conv4_block6_out[0][0]'] \n",
" \n",
" conv5_block1_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block1_1_relu[0][0]'] \n",
" \n",
" conv5_block1_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block1_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block1_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 ['conv4_block6_out[0][0]'] \n",
" \n",
" conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block1_2_relu[0][0]'] \n",
" \n",
" conv5_block1_0_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block1_0_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block1_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block1_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_0_bn[0][0]', \n",
" 'conv5_block1_3_bn[0][0]'] \n",
" \n",
" conv5_block1_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block1_add[0][0]'] \n",
" \n",
" conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1049088 ['conv5_block1_out[0][0]'] \n",
" \n",
" conv5_block2_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block2_1_relu[0][0]'] \n",
" \n",
" conv5_block2_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block2_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block2_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block2_2_relu[0][0]'] \n",
" \n",
" conv5_block2_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block2_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block2_add (Add) (None, 7, 7, 2048) 0 ['conv5_block1_out[0][0]', \n",
" 'conv5_block2_3_bn[0][0]'] \n",
" \n",
" conv5_block2_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block2_add[0][0]'] \n",
" \n",
" conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1049088 ['conv5_block2_out[0][0]'] \n",
" \n",
" conv5_block3_1_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_1_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_1_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_1_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359808 ['conv5_block3_1_relu[0][0]'] \n",
" \n",
" conv5_block3_2_bn (BatchNormal (None, 7, 7, 512) 2048 ['conv5_block3_2_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_2_relu (Activatio (None, 7, 7, 512) 0 ['conv5_block3_2_bn[0][0]'] \n",
" n) \n",
" \n",
" conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 ['conv5_block3_2_relu[0][0]'] \n",
" \n",
" conv5_block3_3_bn (BatchNormal (None, 7, 7, 2048) 8192 ['conv5_block3_3_conv[0][0]'] \n",
" ization) \n",
" \n",
" conv5_block3_add (Add) (None, 7, 7, 2048) 0 ['conv5_block2_out[0][0]', \n",
" 'conv5_block3_3_bn[0][0]'] \n",
" \n",
" conv5_block3_out (Activation) (None, 7, 7, 2048) 0 ['conv5_block3_add[0][0]'] \n",
" \n",
2023-01-06 13:34:05 +01:00
" flatten_2 (Flatten) (None, 100352) 0 ['conv5_block3_out[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
2023-01-06 13:34:05 +01:00
" dense_6 (Dense) (None, 5) 501765 ['flatten_2[0][0]'] \n",
2023-01-06 03:02:47 +01:00
" \n",
"==================================================================================================\n",
"Total params: 24,089,477\n",
"Trainable params: 501,765\n",
"Non-trainable params: 23,587,712\n",
2023-01-06 13:34:05 +01:00
"__________________________________________________________________________________________________\n"
2023-01-06 03:02:47 +01:00
]
2023-01-06 13:34:05 +01:00
}
],
"source": [
"# create a model object\n",
"model = Model(inputs=resnet.input, outputs=prediction)\n",
"\n",
"# view the structure of the model\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {},
"outputs": [
2023-01-06 03:02:47 +01:00
{
"name": "stderr",
"output_type": "stream",
"text": [
2023-01-06 13:34:05 +01:00
"/var/folders/6b/j4d60ym516x2s6wymzj707rh0000gn/T/ipykernel_9339/2291254579.py:10: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
2023-01-06 03:02:47 +01:00
" r = model.fit_generator(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-06 13:34:05 +01:00
"25/25 [==============================] - 54s 2s/step - loss: 7.2784 - accuracy: 0.2350 - val_loss: 2.7387 - val_accuracy: 0.2240\n"
2023-01-06 03:02:47 +01:00
]
}
],
"source": [
"# tell the model what cost and optimization method to use\n",
"model.compile(\n",
" loss='sparse_categorical_crossentropy',\n",
" optimizer='adam',\n",
" metrics=['accuracy']\n",
")\n",
"\n",
"#train_ds_vgg_sw, test_ds_vgg_sw, validation_ds_vgg_sw\n",
"# fit the model\n",
"r = model.fit_generator(\n",
" train_ds,\n",
" validation_data=validation_ds,\n",
2023-01-06 13:34:05 +01:00
" epochs=1,\n",
2023-01-06 03:02:47 +01:00
" steps_per_epoch=len(train_ds),\n",
" validation_steps=len(validation_ds)\n",
")"
]
},
{
"cell_type": "code",
2023-01-06 13:34:05 +01:00
"execution_count": 35,
2023-01-06 03:02:47 +01:00
"metadata": {},
2023-01-06 13:34:05 +01:00
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHHCAYAAACRAnNyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABGwklEQVR4nO3deXxN597///dOsDMnQkjSpqaaqjHUVHVaVE5JUdTUFDEEHdCietRNi/YUPTpoq3W3vUmOc4qipe1pNUVNNQ8nSg1FU/OsEjEkJNfvDz/7a0uQkGSt8Ho+HuvBvta11vqsJey3a11rb4cxxggAAMCGPKwuAAAA4FoIKgAAwLYIKgAAwLYIKgAAwLYIKgAAwLYIKgAAwLYIKgAAwLYIKgAAwLYIKgAAwLYIKgBy5HA4NHr06Dxv98cff8jhcCghISHfawJw5yGoADaWkJAgh8Mhh8Ohn3/+Odt6Y4wiIiLkcDjUunVrCyoEgIJFUAGKAC8vL02fPj1b+9KlS7V//345nU4LqgKAgkdQAYqAxx9/XLNnz9bFixfd2qdPn666desqNDTUosruHGfOnLG6BOCORFABioCYmBidOHFCCxYscLVlZGRozpw5evrpp3Pc5syZM3rppZcUEREhp9OpqlWr6u2339bVX5ienp6uwYMHKyQkRP7+/nriiSe0f//+HPd54MAB9e7dW2XLlpXT6VSNGjU0derUmzqnkydPaujQoYqMjJSfn58CAgIUHR2tTZs2Zet7/vx5jR49WlWqVJGXl5fCwsL05JNPavfu3a4+WVlZev/99xUZGSkvLy+FhISoZcuWWr9+vaTrz525ej7O6NGj5XA4tHXrVj399NMqWbKk/vKXv0iSfvnlF/Xs2VMVK1aUl5eXQkND1bt3b504cSLH6xUXF6fw8HA5nU5VqFBBzz33nDIyMvT777/L4XDovffey7bdypUr5XA4NGPGjLxeVuC2U8zqAgDcWPny5dWoUSPNmDFD0dHRkqT58+crJSVFTz31lD744AO3/sYYPfHEE1q8eLHi4uJUu3ZtJSYm6uWXX9aBAwfc3hz79Omjf//733r66af10EMP6aefflKrVq2y1XDkyBE9+OCDcjgcGjBggEJCQjR//nzFxcUpNTVVgwYNytM5/f7775o3b546deqkChUq6MiRI/rkk0/UpEkTbd26VeHh4ZKkzMxMtW7dWosWLdJTTz2lF198UadPn9aCBQu0ZcsWVapUSZIUFxenhIQERUdHq0+fPrp48aKWL1+u1atXq169enmq7bJOnTqpcuXKGjt2rCvgLViwQL///rt69eql0NBQ/frrr/r000/166+/avXq1XI4HJKkgwcPqkGDBjp16pT69eunatWq6cCBA5ozZ47Onj2rihUrqnHjxvr88881ePBgt+N+/vnn8vf3V9u2bW+qbuC2YgDYVnx8vJFk1q1bZyZNmmT8/f3N2bNnjTHGdOrUyTRr1swYY0y5cuVMq1atXNvNmzfPSDJ///vf3fbXsWNH43A4zK5du4wxxiQlJRlJ5vnnn3fr9/TTTxtJZtSoUa62uLg4ExYWZo4fP+7W96mnnjKBgYGuupKTk40kEx8ff91zO3/+vMnMzHRrS05ONk6n07z++uuutqlTpxpJ5t133822j6ysLGOMMT/99JORZF544YVr9rleXVef66hRo4wkExMTk63v5fO80owZM4wks2zZMldbbGys8fDwMOvWrbtmTZ988omRZLZt2+Zal5GRYUqXLm169OiRbTvgTsStH6CI6Ny5s86dO6f//Oc/On36tP7zn/9c87bP999/L09PT73wwgtu7S+99JKMMZo/f76rn6Rs/a4eHTHG6Msvv1SbNm1kjNHx48ddS4sWLZSSkqKNGzfm6XycTqc8PC79E5SZmakTJ07Iz89PVatWddvXl19+qdKlS2vgwIHZ9nF59OLLL7+Uw+HQqFGjrtnnZjz77LPZ2ry9vV2/P3/+vI4fP64HH3xQklx1Z2Vlad68eWrTpk2OozmXa+rcubO8vLz0+eefu9YlJibq+PHj6tat203XDdxOCCpAERESEqKoqChNnz5dX331lTIzM9WxY8cc++7Zs0fh4eHy9/d3a69evbpr/eVfPTw8XLdPLqtatarb62PHjunUqVP69NNPFRIS4rb06tVLknT06NE8nU9WVpbee+89Va5cWU6nU6VLl1ZISIh++eUXpaSkuPrt3r1bVatWVbFi175TvXv3boWHhys4ODhPNdxIhQoVsrWdPHlSL774osqWLStvb2+FhIS4+l2u+9ixY0pNTdX9999/3f0HBQWpTZs2bk90ff7557rrrrv06KOP5uOZAEUXc1SAIuTpp59W3759dfjwYUVHRysoKKhQjpuVlSVJ6tatm3r06JFjn5o1a+Zpn2PHjtWrr76q3r1764033lBwcLA8PDw0aNAg1/Hy07VGVjIzM6+5zZWjJ5d17txZK1eu1Msvv6zatWvLz89PWVlZatmy5U3VHRsbq9mzZ2vlypWKjIzUN998o+eff9412gTc6QgqQBHSvn17PfPMM1q9erW++OKLa/YrV66cFi5cqNOnT7uNqmzfvt21/vKvWVlZrlGLy3bs2OG2v8tPBGVmZioqKipfzmXOnDlq1qyZpkyZ4tZ+6tQplS5d2vW6UqVKWrNmjS5cuKDixYvnuK9KlSopMTFRJ0+evOaoSsmSJV37v9Ll0aXc+PPPP7Vo0SKNGTNGr732mqt9586dbv1CQkIUEBCgLVu23HCfLVu2VEhIiD7//HM1bNhQZ8+eVffu3XNdE3C7I7IDRYifn58mT56s0aNHq02bNtfs9/jjjyszM1OTJk1ya3/vvffkcDhcTw5d/vXqp4YmTpzo9trT01MdOnTQl19+meOb77Fjx/J8Lp6entkelZ49e7YOHDjg1tahQwcdP34827lIcm3foUMHGWM0ZsyYa/YJCAhQ6dKltWzZMrf1H3/8cZ5qvnKfl119vTw8PNSuXTt9++23rsejc6pJkooVK6aYmBjNmjVLCQkJioyMzPPoFHA7Y0QFKGKudevlSm3atFGzZs00YsQI/fHHH6pVq5Z+/PFHff311xo0aJBrTkrt2rUVExOjjz/+WCkpKXrooYe0aNEi7dq1K9s+x48fr8WLF6thw4bq27ev7rvvPp08eVIbN27UwoULdfLkyTydR+vWrfX666+rV69eeuihh7R582Z9/vnnqlixolu/2NhYTZs2TUOGDNHatWv18MMP68yZM1q4cKGef/55tW3bVs2aNVP37t31wQcfaOfOna7bMMuXL1ezZs00YMAASZcexR4/frz69OmjevXqadmyZfrtt99yXXNAQIAeeeQR/eMf/9CFCxd011136ccff1RycnK2vmPHjtWPP/6oJk2aqF+/fqpevboOHTqk2bNn6+eff3a7bRcbG6sPPvhAixcv1ltvvZWn6wjc9ix73gjADV35ePL1XP14sjHGnD592gwePNiEh4eb4sWLm8qVK5sJEya4Ho297Ny5c+aFF14wpUqVMr6+vqZNmzZm37592R7ZNcaYI0eOmP79+5uIiAhTvHhxExoaapo3b24+/fRTV5+8PJ780ksvmbCwMOPt7W0aN25sVq1aZZo0aWKaNGni1vfs2bNmxIgRpkKFCq7jduzY0ezevdvV5+LFi2bChAmmWrVqpkSJEiYkJMRER0ebDRs2uO0nLi7OBAYGGn9/f9O5c2dz9OjRaz6efOzYsWx179+/37Rv394EBQWZwMBA06lTJ3Pw4MEcr9eePXtMbGysCQkJMU6n01SsWNH079/fpKenZ9tvjRo1jIeHh9m/f/91rxtwp3EYc9UYJgCg0NWpU0fBwcFatGiR1aUAtsIcFQCw2Pr165WUlKTY2FirSwFshxEVALDIli1btGHDBr3zzjs6fvy4fv/9d3l5eVldFmArjKgAgEXmzJmjXr166cKFC5oxYwYhBcgBIyoAAMC2GFEBAAC2RVABAAC2VaQ/8C0rK0sHDx6Uv7//LX1DKgAAKDzGGJ0+fVrh4eE3/F6rIh1UDh48qIiICKvLAAA
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
2023-01-06 03:02:47 +01:00
"source": [
"# loss\n",
"plt.plot(r.history[\"accuracy\"])\n",
"plt.plot(r.history['val_accuracy'])\n",
"plt.plot(r.history['loss'])\n",
"plt.plot(r.history['val_loss'])\n",
"plt.title(\"Model accuracy\")\n",
"plt.ylabel(\"Value\")\n",
"plt.xlabel(\"Epoch\")\n",
"plt.legend([\"Accuracy\",\"Validation Accuracy\",\"Loss\",\"Validation Loss\"])\n",
"plt.show()\n",
"\n",
"model.save('resnet_1.h5')"
]
2023-01-06 13:34:05 +01:00
},
{
"cell_type": "code",
"execution_count": 36,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"8/8 [==============================] - 12s 1s/step - loss: 2.9013 - accuracy: 0.2031\n"
]
},
{
"data": {
"text/plain": [
"[2.901285171508789, 0.203125]"
]
},
"execution_count": 36,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(test_ds)"
]
2023-01-06 03:02:47 +01:00
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}