102 KiB
102 KiB
import pandas as pd
import numpy as np
import glob
from os import walk
TRAIN_DATA_DIR = "./data/cropped_images"
TRAIN_LABELS_PATH = "./data/labels.txt"
labels = pd.read_csv(TRAIN_LABELS_PATH, sep=" ", header=None)[0].to_numpy()
d = {'label':labels}
labels = pd.DataFrame(d)
labels
label | |
---|---|
0 | without_mask |
1 | with_mask |
2 | without_mask |
3 | with_mask |
4 | with_mask |
... | ... |
4067 | with_mask |
4068 | with_mask |
4069 | mask_weared_incorrect |
4070 | with_mask |
4071 | with_mask |
4072 rows × 1 columns
labels.value_counts()
label with_mask 3232 without_mask 717 mask_weared_incorrect 123 dtype: int64
classes = np.unique(labels)
classes
array(['mask_weared_incorrect', 'with_mask', 'without_mask'], dtype=object)
filenames = []
for i in range(len(labels)):
filenames.append(str(i) + '.png')
d = {'file_name':filenames}
filenames = pd.DataFrame(d)
file_name | |
---|---|
0 | 0.png |
1 | 1.png |
2 | 2.png |
3 | 3.png |
4 | 4.png |
... | ... |
4067 | 4067.png |
4068 | 4068.png |
4069 | 4069.png |
4070 | 4070.png |
4071 | 4071.png |
4072 rows × 1 columns
from sklearn.model_selection import train_test_split
X_train,X_test,Y_train,Y_test=train_test_split(filenames,labels,test_size=0.20,stratify=labels,random_state=42)
print(X_train.shape,X_test.shape)
(3257, 1) (815, 1)
Y_train.value_counts()
label with_mask 2585 without_mask 574 mask_weared_incorrect 98 dtype: int64
X_train['label']=Y_train
X_test['label']=Y_test
C:\Users\Piotrek\AppData\Local\Temp/ipykernel_1696/91195937.py:1: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy X_train['label']=Y_train C:\Users\Piotrek\AppData\Local\Temp/ipykernel_1696/91195937.py:2: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy X_test['label']=Y_test
X_test
file_name | label | |
---|---|---|
1660 | 1660.png | with_mask |
2976 | 2976.png | with_mask |
920 | 920.png | with_mask |
3984 | 3984.png | with_mask |
3867 | 3867.png | with_mask |
... | ... | ... |
530 | 530.png | with_mask |
1344 | 1344.png | with_mask |
1756 | 1756.png | with_mask |
1196 | 1196.png | without_mask |
364 | 364.png | with_mask |
815 rows × 2 columns
from keras_preprocessing.image import ImageDataGenerator
image_target_size = (int(128), int(128))
train_image_generator = ImageDataGenerator(rescale = 1. / 255.)
train_generator = train_image_generator.flow_from_dataframe(
dataframe = X_train,
directory = TRAIN_DATA_DIR,
x_col = 'file_name',
y_col = 'label',
subset = 'training',
batch_size = 32,
seed = 42,
shuffle = True,
class_mode = 'categorical',
target_size = image_target_size
)
Found 3257 validated image filenames belonging to 3 classes.
class_ind=train_generator.class_indices
class_ind
{'mask_weared_incorrect': 0, 'with_mask': 1, 'without_mask': 2}
test_image_generator = ImageDataGenerator(rescale = 1. / 255.)
test_generator = train_image_generator.flow_from_dataframe(
dataframe = X_test,
directory = TRAIN_DATA_DIR,
x_col = 'file_name',
y_col = 'label',
batch_size = 32,
seed = 42,
shuffle = True,
class_mode = 'categorical',
target_size = image_target_size
)
Found 815 validated image filenames belonging to 3 classes.
train_generator.class_indices
{'mask_weared_incorrect': 0, 'with_mask': 1, 'without_mask': 2}
from tensorflow.keras.applications import MobileNetV2
from tensorflow.keras.layers import Input
input_tensor = Input(shape=(128, 128, 3))
mobileNet = MobileNetV2(weights="imagenet", include_top=False,input_tensor=input_tensor)
WARNING:tensorflow:`input_shape` is undefined or non-square, or `rows` is not in [96, 128, 160, 192, 224]. Weights for input shape (224, 224) will be loaded as the default.
from tensorflow.keras.layers import AveragePooling2D
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
x = mobileNet.output
x = AveragePooling2D(pool_size=(4, 4))(x)
x = layers.Flatten()(x)
x = layers.Dense(256, activation="relu")(x)
x = layers.Dropout(0.5)(x)
x = layers.Dense(64, activation="relu")(x)
outputs = layers.Dense(3,activation='softmax')(x)
model = keras.Model(inputs=mobileNet.input, outputs=outputs)
for layer in mobileNet.layers:
layer.trainable = False
model.compile(loss = 'categorical_crossentropy',
optimizer = keras.optimizers.Adam(lr=0.001),
metrics = ['accuracy'])
history_1 = model.fit(train_generator, epochs = 15, steps_per_epoch = len(train_generator),
validation_data = test_generator, validation_steps = len(test_generator))
Epoch 1/15 102/102 [==============================] - 31s 277ms/step - loss: 0.5791 - accuracy: 0.8001 - val_loss: 0.4123 - val_accuracy: 0.8466 Epoch 2/15 102/102 [==============================] - 27s 270ms/step - loss: 0.4334 - accuracy: 0.8342 - val_loss: 0.3737 - val_accuracy: 0.8466 Epoch 3/15 102/102 [==============================] - 28s 271ms/step - loss: 0.3859 - accuracy: 0.8505 - val_loss: 0.3362 - val_accuracy: 0.8847 Epoch 4/15 102/102 [==============================] - 28s 270ms/step - loss: 0.3585 - accuracy: 0.8591 - val_loss: 0.3520 - val_accuracy: 0.8540 Epoch 5/15 102/102 [==============================] - 28s 273ms/step - loss: 0.3224 - accuracy: 0.8806 - val_loss: 0.3561 - val_accuracy: 0.8503 Epoch 6/15 102/102 [==============================] - 27s 270ms/step - loss: 0.3080 - accuracy: 0.8766 - val_loss: 0.3010 - val_accuracy: 0.8798 Epoch 7/15 102/102 [==============================] - 30s 291ms/step - loss: 0.2779 - accuracy: 0.8870 - val_loss: 0.2870 - val_accuracy: 0.8982 Epoch 8/15 102/102 [==============================] - 32s 319ms/step - loss: 0.2697 - accuracy: 0.8971 - val_loss: 0.3190 - val_accuracy: 0.8847 Epoch 9/15 102/102 [==============================] - 32s 312ms/step - loss: 0.2526 - accuracy: 0.9011 - val_loss: 0.2778 - val_accuracy: 0.8945 Epoch 10/15 102/102 [==============================] - 31s 304ms/step - loss: 0.2416 - accuracy: 0.9073 - val_loss: 0.2891 - val_accuracy: 0.8883 Epoch 11/15 102/102 [==============================] - 31s 303ms/step - loss: 0.2325 - accuracy: 0.9100 - val_loss: 0.2945 - val_accuracy: 0.8933 Epoch 12/15 102/102 [==============================] - 31s 302ms/step - loss: 0.2156 - accuracy: 0.9128 - val_loss: 0.2748 - val_accuracy: 0.9067 Epoch 13/15 102/102 [==============================] - 33s 328ms/step - loss: 0.2105 - accuracy: 0.9211 - val_loss: 0.3212 - val_accuracy: 0.8994 Epoch 14/15 102/102 [==============================] - 32s 311ms/step - loss: 0.1986 - accuracy: 0.9171 - val_loss: 0.2914 - val_accuracy: 0.9043 Epoch 15/15 102/102 [==============================] - 32s 311ms/step - loss: 0.2002 - accuracy: 0.9254 - val_loss: 0.2541 - val_accuracy: 0.9117
model.save('face_mask_detection2.h5')
model.summary()
Model: "model_8" __________________________________________________________________________________________________ Layer (type) Output Shape Param # Connected to ================================================================================================== input_17 (InputLayer) [(None, 128, 128, 3 0 [] )] Conv1 (Conv2D) (None, 64, 64, 32) 864 ['input_17[0][0]'] bn_Conv1 (BatchNormalization) (None, 64, 64, 32) 128 ['Conv1[0][0]'] Conv1_relu (ReLU) (None, 64, 64, 32) 0 ['bn_Conv1[0][0]'] expanded_conv_depthwise (Depth (None, 64, 64, 32) 288 ['Conv1_relu[0][0]'] wiseConv2D) expanded_conv_depthwise_BN (Ba (None, 64, 64, 32) 128 ['expanded_conv_depthwise[0][0]'] tchNormalization) expanded_conv_depthwise_relu ( (None, 64, 64, 32) 0 ['expanded_conv_depthwise_BN[0][0 ReLU) ]'] expanded_conv_project (Conv2D) (None, 64, 64, 16) 512 ['expanded_conv_depthwise_relu[0] [0]'] expanded_conv_project_BN (Batc (None, 64, 64, 16) 64 ['expanded_conv_project[0][0]'] hNormalization) block_1_expand (Conv2D) (None, 64, 64, 96) 1536 ['expanded_conv_project_BN[0][0]' ] block_1_expand_BN (BatchNormal (None, 64, 64, 96) 384 ['block_1_expand[0][0]'] ization) block_1_expand_relu (ReLU) (None, 64, 64, 96) 0 ['block_1_expand_BN[0][0]'] block_1_pad (ZeroPadding2D) (None, 65, 65, 96) 0 ['block_1_expand_relu[0][0]'] block_1_depthwise (DepthwiseCo (None, 32, 32, 96) 864 ['block_1_pad[0][0]'] nv2D) block_1_depthwise_BN (BatchNor (None, 32, 32, 96) 384 ['block_1_depthwise[0][0]'] malization) block_1_depthwise_relu (ReLU) (None, 32, 32, 96) 0 ['block_1_depthwise_BN[0][0]'] block_1_project (Conv2D) (None, 32, 32, 24) 2304 ['block_1_depthwise_relu[0][0]'] block_1_project_BN (BatchNorma (None, 32, 32, 24) 96 ['block_1_project[0][0]'] lization) block_2_expand (Conv2D) (None, 32, 32, 144) 3456 ['block_1_project_BN[0][0]'] block_2_expand_BN (BatchNormal (None, 32, 32, 144) 576 ['block_2_expand[0][0]'] ization) block_2_expand_relu (ReLU) (None, 32, 32, 144) 0 ['block_2_expand_BN[0][0]'] block_2_depthwise (DepthwiseCo (None, 32, 32, 144) 1296 ['block_2_expand_relu[0][0]'] nv2D) block_2_depthwise_BN (BatchNor (None, 32, 32, 144) 576 ['block_2_depthwise[0][0]'] malization) block_2_depthwise_relu (ReLU) (None, 32, 32, 144) 0 ['block_2_depthwise_BN[0][0]'] block_2_project (Conv2D) (None, 32, 32, 24) 3456 ['block_2_depthwise_relu[0][0]'] block_2_project_BN (BatchNorma (None, 32, 32, 24) 96 ['block_2_project[0][0]'] lization) block_2_add (Add) (None, 32, 32, 24) 0 ['block_1_project_BN[0][0]', 'block_2_project_BN[0][0]'] block_3_expand (Conv2D) (None, 32, 32, 144) 3456 ['block_2_add[0][0]'] block_3_expand_BN (BatchNormal (None, 32, 32, 144) 576 ['block_3_expand[0][0]'] ization) block_3_expand_relu (ReLU) (None, 32, 32, 144) 0 ['block_3_expand_BN[0][0]'] block_3_pad (ZeroPadding2D) (None, 33, 33, 144) 0 ['block_3_expand_relu[0][0]'] block_3_depthwise (DepthwiseCo (None, 16, 16, 144) 1296 ['block_3_pad[0][0]'] nv2D) block_3_depthwise_BN (BatchNor (None, 16, 16, 144) 576 ['block_3_depthwise[0][0]'] malization) block_3_depthwise_relu (ReLU) (None, 16, 16, 144) 0 ['block_3_depthwise_BN[0][0]'] block_3_project (Conv2D) (None, 16, 16, 32) 4608 ['block_3_depthwise_relu[0][0]'] block_3_project_BN (BatchNorma (None, 16, 16, 32) 128 ['block_3_project[0][0]'] lization) block_4_expand (Conv2D) (None, 16, 16, 192) 6144 ['block_3_project_BN[0][0]'] block_4_expand_BN (BatchNormal (None, 16, 16, 192) 768 ['block_4_expand[0][0]'] ization) block_4_expand_relu (ReLU) (None, 16, 16, 192) 0 ['block_4_expand_BN[0][0]'] block_4_depthwise (DepthwiseCo (None, 16, 16, 192) 1728 ['block_4_expand_relu[0][0]'] nv2D) block_4_depthwise_BN (BatchNor (None, 16, 16, 192) 768 ['block_4_depthwise[0][0]'] malization) block_4_depthwise_relu (ReLU) (None, 16, 16, 192) 0 ['block_4_depthwise_BN[0][0]'] block_4_project (Conv2D) (None, 16, 16, 32) 6144 ['block_4_depthwise_relu[0][0]'] block_4_project_BN (BatchNorma (None, 16, 16, 32) 128 ['block_4_project[0][0]'] lization) block_4_add (Add) (None, 16, 16, 32) 0 ['block_3_project_BN[0][0]', 'block_4_project_BN[0][0]'] block_5_expand (Conv2D) (None, 16, 16, 192) 6144 ['block_4_add[0][0]'] block_5_expand_BN (BatchNormal (None, 16, 16, 192) 768 ['block_5_expand[0][0]'] ization) block_5_expand_relu (ReLU) (None, 16, 16, 192) 0 ['block_5_expand_BN[0][0]'] block_5_depthwise (DepthwiseCo (None, 16, 16, 192) 1728 ['block_5_expand_relu[0][0]'] nv2D) block_5_depthwise_BN (BatchNor (None, 16, 16, 192) 768 ['block_5_depthwise[0][0]'] malization) block_5_depthwise_relu (ReLU) (None, 16, 16, 192) 0 ['block_5_depthwise_BN[0][0]'] block_5_project (Conv2D) (None, 16, 16, 32) 6144 ['block_5_depthwise_relu[0][0]'] block_5_project_BN (BatchNorma (None, 16, 16, 32) 128 ['block_5_project[0][0]'] lization) block_5_add (Add) (None, 16, 16, 32) 0 ['block_4_add[0][0]', 'block_5_project_BN[0][0]'] block_6_expand (Conv2D) (None, 16, 16, 192) 6144 ['block_5_add[0][0]'] block_6_expand_BN (BatchNormal (None, 16, 16, 192) 768 ['block_6_expand[0][0]'] ization) block_6_expand_relu (ReLU) (None, 16, 16, 192) 0 ['block_6_expand_BN[0][0]'] block_6_pad (ZeroPadding2D) (None, 17, 17, 192) 0 ['block_6_expand_relu[0][0]'] block_6_depthwise (DepthwiseCo (None, 8, 8, 192) 1728 ['block_6_pad[0][0]'] nv2D) block_6_depthwise_BN (BatchNor (None, 8, 8, 192) 768 ['block_6_depthwise[0][0]'] malization) block_6_depthwise_relu (ReLU) (None, 8, 8, 192) 0 ['block_6_depthwise_BN[0][0]'] block_6_project (Conv2D) (None, 8, 8, 64) 12288 ['block_6_depthwise_relu[0][0]'] block_6_project_BN (BatchNorma (None, 8, 8, 64) 256 ['block_6_project[0][0]'] lization) block_7_expand (Conv2D) (None, 8, 8, 384) 24576 ['block_6_project_BN[0][0]'] block_7_expand_BN (BatchNormal (None, 8, 8, 384) 1536 ['block_7_expand[0][0]'] ization) block_7_expand_relu (ReLU) (None, 8, 8, 384) 0 ['block_7_expand_BN[0][0]'] block_7_depthwise (DepthwiseCo (None, 8, 8, 384) 3456 ['block_7_expand_relu[0][0]'] nv2D) block_7_depthwise_BN (BatchNor (None, 8, 8, 384) 1536 ['block_7_depthwise[0][0]'] malization) block_7_depthwise_relu (ReLU) (None, 8, 8, 384) 0 ['block_7_depthwise_BN[0][0]'] block_7_project (Conv2D) (None, 8, 8, 64) 24576 ['block_7_depthwise_relu[0][0]'] block_7_project_BN (BatchNorma (None, 8, 8, 64) 256 ['block_7_project[0][0]'] lization) block_7_add (Add) (None, 8, 8, 64) 0 ['block_6_project_BN[0][0]', 'block_7_project_BN[0][0]'] block_8_expand (Conv2D) (None, 8, 8, 384) 24576 ['block_7_add[0][0]'] block_8_expand_BN (BatchNormal (None, 8, 8, 384) 1536 ['block_8_expand[0][0]'] ization) block_8_expand_relu (ReLU) (None, 8, 8, 384) 0 ['block_8_expand_BN[0][0]'] block_8_depthwise (DepthwiseCo (None, 8, 8, 384) 3456 ['block_8_expand_relu[0][0]'] nv2D) block_8_depthwise_BN (BatchNor (None, 8, 8, 384) 1536 ['block_8_depthwise[0][0]'] malization) block_8_depthwise_relu (ReLU) (None, 8, 8, 384) 0 ['block_8_depthwise_BN[0][0]'] block_8_project (Conv2D) (None, 8, 8, 64) 24576 ['block_8_depthwise_relu[0][0]'] block_8_project_BN (BatchNorma (None, 8, 8, 64) 256 ['block_8_project[0][0]'] lization) block_8_add (Add) (None, 8, 8, 64) 0 ['block_7_add[0][0]', 'block_8_project_BN[0][0]'] block_9_expand (Conv2D) (None, 8, 8, 384) 24576 ['block_8_add[0][0]'] block_9_expand_BN (BatchNormal (None, 8, 8, 384) 1536 ['block_9_expand[0][0]'] ization) block_9_expand_relu (ReLU) (None, 8, 8, 384) 0 ['block_9_expand_BN[0][0]'] block_9_depthwise (DepthwiseCo (None, 8, 8, 384) 3456 ['block_9_expand_relu[0][0]'] nv2D) block_9_depthwise_BN (BatchNor (None, 8, 8, 384) 1536 ['block_9_depthwise[0][0]'] malization) block_9_depthwise_relu (ReLU) (None, 8, 8, 384) 0 ['block_9_depthwise_BN[0][0]'] block_9_project (Conv2D) (None, 8, 8, 64) 24576 ['block_9_depthwise_relu[0][0]'] block_9_project_BN (BatchNorma (None, 8, 8, 64) 256 ['block_9_project[0][0]'] lization) block_9_add (Add) (None, 8, 8, 64) 0 ['block_8_add[0][0]', 'block_9_project_BN[0][0]'] block_10_expand (Conv2D) (None, 8, 8, 384) 24576 ['block_9_add[0][0]'] block_10_expand_BN (BatchNorma (None, 8, 8, 384) 1536 ['block_10_expand[0][0]'] lization) block_10_expand_relu (ReLU) (None, 8, 8, 384) 0 ['block_10_expand_BN[0][0]'] block_10_depthwise (DepthwiseC (None, 8, 8, 384) 3456 ['block_10_expand_relu[0][0]'] onv2D) block_10_depthwise_BN (BatchNo (None, 8, 8, 384) 1536 ['block_10_depthwise[0][0]'] rmalization) block_10_depthwise_relu (ReLU) (None, 8, 8, 384) 0 ['block_10_depthwise_BN[0][0]'] block_10_project (Conv2D) (None, 8, 8, 96) 36864 ['block_10_depthwise_relu[0][0]'] block_10_project_BN (BatchNorm (None, 8, 8, 96) 384 ['block_10_project[0][0]'] alization) block_11_expand (Conv2D) (None, 8, 8, 576) 55296 ['block_10_project_BN[0][0]'] block_11_expand_BN (BatchNorma (None, 8, 8, 576) 2304 ['block_11_expand[0][0]'] lization) block_11_expand_relu (ReLU) (None, 8, 8, 576) 0 ['block_11_expand_BN[0][0]'] block_11_depthwise (DepthwiseC (None, 8, 8, 576) 5184 ['block_11_expand_relu[0][0]'] onv2D) block_11_depthwise_BN (BatchNo (None, 8, 8, 576) 2304 ['block_11_depthwise[0][0]'] rmalization) block_11_depthwise_relu (ReLU) (None, 8, 8, 576) 0 ['block_11_depthwise_BN[0][0]'] block_11_project (Conv2D) (None, 8, 8, 96) 55296 ['block_11_depthwise_relu[0][0]'] block_11_project_BN (BatchNorm (None, 8, 8, 96) 384 ['block_11_project[0][0]'] alization) block_11_add (Add) (None, 8, 8, 96) 0 ['block_10_project_BN[0][0]', 'block_11_project_BN[0][0]'] block_12_expand (Conv2D) (None, 8, 8, 576) 55296 ['block_11_add[0][0]'] block_12_expand_BN (BatchNorma (None, 8, 8, 576) 2304 ['block_12_expand[0][0]'] lization) block_12_expand_relu (ReLU) (None, 8, 8, 576) 0 ['block_12_expand_BN[0][0]'] block_12_depthwise (DepthwiseC (None, 8, 8, 576) 5184 ['block_12_expand_relu[0][0]'] onv2D) block_12_depthwise_BN (BatchNo (None, 8, 8, 576) 2304 ['block_12_depthwise[0][0]'] rmalization) block_12_depthwise_relu (ReLU) (None, 8, 8, 576) 0 ['block_12_depthwise_BN[0][0]'] block_12_project (Conv2D) (None, 8, 8, 96) 55296 ['block_12_depthwise_relu[0][0]'] block_12_project_BN (BatchNorm (None, 8, 8, 96) 384 ['block_12_project[0][0]'] alization) block_12_add (Add) (None, 8, 8, 96) 0 ['block_11_add[0][0]', 'block_12_project_BN[0][0]'] block_13_expand (Conv2D) (None, 8, 8, 576) 55296 ['block_12_add[0][0]'] block_13_expand_BN (BatchNorma (None, 8, 8, 576) 2304 ['block_13_expand[0][0]'] lization) block_13_expand_relu (ReLU) (None, 8, 8, 576) 0 ['block_13_expand_BN[0][0]'] block_13_pad (ZeroPadding2D) (None, 9, 9, 576) 0 ['block_13_expand_relu[0][0]'] block_13_depthwise (DepthwiseC (None, 4, 4, 576) 5184 ['block_13_pad[0][0]'] onv2D) block_13_depthwise_BN (BatchNo (None, 4, 4, 576) 2304 ['block_13_depthwise[0][0]'] rmalization) block_13_depthwise_relu (ReLU) (None, 4, 4, 576) 0 ['block_13_depthwise_BN[0][0]'] block_13_project (Conv2D) (None, 4, 4, 160) 92160 ['block_13_depthwise_relu[0][0]'] block_13_project_BN (BatchNorm (None, 4, 4, 160) 640 ['block_13_project[0][0]'] alization) block_14_expand (Conv2D) (None, 4, 4, 960) 153600 ['block_13_project_BN[0][0]'] block_14_expand_BN (BatchNorma (None, 4, 4, 960) 3840 ['block_14_expand[0][0]'] lization) block_14_expand_relu (ReLU) (None, 4, 4, 960) 0 ['block_14_expand_BN[0][0]'] block_14_depthwise (DepthwiseC (None, 4, 4, 960) 8640 ['block_14_expand_relu[0][0]'] onv2D) block_14_depthwise_BN (BatchNo (None, 4, 4, 960) 3840 ['block_14_depthwise[0][0]'] rmalization) block_14_depthwise_relu (ReLU) (None, 4, 4, 960) 0 ['block_14_depthwise_BN[0][0]'] block_14_project (Conv2D) (None, 4, 4, 160) 153600 ['block_14_depthwise_relu[0][0]'] block_14_project_BN (BatchNorm (None, 4, 4, 160) 640 ['block_14_project[0][0]'] alization) block_14_add (Add) (None, 4, 4, 160) 0 ['block_13_project_BN[0][0]', 'block_14_project_BN[0][0]'] block_15_expand (Conv2D) (None, 4, 4, 960) 153600 ['block_14_add[0][0]'] block_15_expand_BN (BatchNorma (None, 4, 4, 960) 3840 ['block_15_expand[0][0]'] lization) block_15_expand_relu (ReLU) (None, 4, 4, 960) 0 ['block_15_expand_BN[0][0]'] block_15_depthwise (DepthwiseC (None, 4, 4, 960) 8640 ['block_15_expand_relu[0][0]'] onv2D) block_15_depthwise_BN (BatchNo (None, 4, 4, 960) 3840 ['block_15_depthwise[0][0]'] rmalization) block_15_depthwise_relu (ReLU) (None, 4, 4, 960) 0 ['block_15_depthwise_BN[0][0]'] block_15_project (Conv2D) (None, 4, 4, 160) 153600 ['block_15_depthwise_relu[0][0]'] block_15_project_BN (BatchNorm (None, 4, 4, 160) 640 ['block_15_project[0][0]'] alization) block_15_add (Add) (None, 4, 4, 160) 0 ['block_14_add[0][0]', 'block_15_project_BN[0][0]'] block_16_expand (Conv2D) (None, 4, 4, 960) 153600 ['block_15_add[0][0]'] block_16_expand_BN (BatchNorma (None, 4, 4, 960) 3840 ['block_16_expand[0][0]'] lization) block_16_expand_relu (ReLU) (None, 4, 4, 960) 0 ['block_16_expand_BN[0][0]'] block_16_depthwise (DepthwiseC (None, 4, 4, 960) 8640 ['block_16_expand_relu[0][0]'] onv2D) block_16_depthwise_BN (BatchNo (None, 4, 4, 960) 3840 ['block_16_depthwise[0][0]'] rmalization) block_16_depthwise_relu (ReLU) (None, 4, 4, 960) 0 ['block_16_depthwise_BN[0][0]'] block_16_project (Conv2D) (None, 4, 4, 320) 307200 ['block_16_depthwise_relu[0][0]'] block_16_project_BN (BatchNorm (None, 4, 4, 320) 1280 ['block_16_project[0][0]'] alization) Conv_1 (Conv2D) (None, 4, 4, 1280) 409600 ['block_16_project_BN[0][0]'] Conv_1_bn (BatchNormalization) (None, 4, 4, 1280) 5120 ['Conv_1[0][0]'] out_relu (ReLU) (None, 4, 4, 1280) 0 ['Conv_1_bn[0][0]'] average_pooling2d_22 (AverageP (None, 1, 1, 1280) 0 ['out_relu[0][0]'] ooling2D) flatten_9 (Flatten) (None, 1280) 0 ['average_pooling2d_22[0][0]'] dense_27 (Dense) (None, 256) 327936 ['flatten_9[0][0]'] dropout_9 (Dropout) (None, 256) 0 ['dense_27[0][0]'] dense_28 (Dense) (None, 64) 16448 ['dropout_9[0][0]'] dense_29 (Dense) (None, 3) 195 ['dense_28[0][0]'] ================================================================================================== Total params: 2,602,563 Trainable params: 344,579 Non-trainable params: 2,257,984 __________________________________________________________________________________________________
score = model.evaluate(test_generator)
print(f'Test loss: {score[0]} / Test accuracy: {score[1]}')
26/26 [==============================] - 6s 213ms/step - loss: 0.2541 - accuracy: 0.9117 Test loss: 0.2540619671344757 / Test accuracy: 0.9116564393043518
from matplotlib import pyplot as plt
plt.subplot(2,1,1)
plt.plot(history_1.history['accuracy'])
plt.plot(history_1.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='lower right')
<matplotlib.legend.Legend at 0x22b800e0a90>
plt.plot(history_1.history['loss'])
plt.plot(history_1.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()