executable script for training

This commit is contained in:
Maciej Sobkowiak 2022-02-16 22:29:01 +01:00
parent 59b2b9e687
commit af76c778d7
4 changed files with 127 additions and 34 deletions

41
main.py Normal file
View File

@ -0,0 +1,41 @@
from src.Unet import Unet
from src.loss import jaccard_loss
from src.metrics import IOU
from src.consts import EPOCHS, STEPS, SEED
from src.generators import create_generators
from tensorflow.keras.callbacks import ModelCheckpoint
import tensorflow as tf
if __name__ == "__main__":
model = Unet(num_classes=1).build_model()
compile_params ={
'loss':jaccard_loss(smooth=90),
'optimizer':'rmsprop',
'metrics':[IOU]
}
model.compile(**compile_params)
# tf.keras.utils.plot_model(model, show_shapes=True)
model_name = "models/unet.h5"
modelcheckpoint = ModelCheckpoint(model_name,
monitor='val_loss',
mode='auto',
verbose=1,
save_best_only=True)
train_gen = create_generators('training', SEED)
val_gen = create_generators('validation', SEED)
history = model.fit_generator(train_gen,
validation_data=val_gen,
epochs=EPOCHS,
steps_per_epoch=STEPS,
validation_steps = STEPS,
shuffle=True,
)

54
src/Unet.py Normal file
View File

@ -0,0 +1,54 @@
import shutil
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.keras.layers import concatenate
from tensorflow.keras.layers import UpSampling2D, Conv2D, Dropout, MaxPooling2D
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
from src.consts import IMG_SIZE
class Unet():
def __init__(self, num_classes=1):
self.num_classes=num_classes
def build_model(self):
in1 = Input(shape=(IMG_SIZE[0], IMG_SIZE[1], 3 ))
conv1 = Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(in1)
conv1 = Dropout(0.2)(conv1)
conv1 = Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(conv1)
pool1 = MaxPooling2D((2, 2))(conv1)
conv2 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(pool1)
conv2 = Dropout(0.2)(conv2)
conv2 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(conv2)
pool2 = MaxPooling2D((2, 2))(conv2)
conv3 = Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(pool2)
conv3 = Dropout(0.2)(conv3)
conv3 = Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(conv3)
pool3 = MaxPooling2D((2, 2))(conv3)
conv4 = Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(pool3)
conv4 = Dropout(0.2)(conv4)
conv4 = Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(conv4)
up1 = concatenate([UpSampling2D((2, 2))(conv4), conv3], axis=-1)
conv5 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(up1)
conv5 = Dropout(0.2)(conv5)
conv5 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(conv5)
up2 = concatenate([UpSampling2D((2, 2))(conv5), conv2], axis=-1)
conv6 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(up2)
conv6 = Dropout(0.2)(conv6)
conv6 = Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(conv6)
up2 = concatenate([UpSampling2D((2, 2))(conv6), conv1], axis=-1)
conv7 = Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(up2)
conv7 = Dropout(0.2)(conv7)
conv7 = Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(conv7)
segmentation = Conv2D(self.num_classes, (1, 1), activation='sigmoid', name='seg')(conv7)
#segmentation = Conv2D(3, (1, 1), activation='sigmoid', name='seg')(conv7)
model = Model(inputs=[in1], outputs=[segmentation])
return model

View File

@ -1,7 +1,7 @@
import pandas as pd import pandas as pd
FEATURES ='../data/train_features' FEATURES ='../data/train_features'
LABELS = '../data/train_labels' LABELS = '../data/train_labels'
JPG_IMAGES = '../images' JPG_IMAGES = 'images'
RGB_DIR = "rgb/img" RGB_DIR = "rgb/img"
FC_DIR = "fc/img" FC_DIR = "fc/img"
MASK_DIR = "mask/img" MASK_DIR = "mask/img"
@ -9,4 +9,7 @@ MASK_DIR = "mask/img"
BATCH = 8 BATCH = 8
IMG_SIZE = (512,512) IMG_SIZE = (512,512)
SEED = 7 SEED = 7
EPOCHS = 10
STEPS = 10

View File

@ -1,41 +1,36 @@
from consts import JPG_IMAGES, RGB_DIR, MASK_DIR, FC_DIR, BATCH, IMG_SIZE from src.consts import JPG_IMAGES, RGB_DIR, MASK_DIR, BATCH, IMG_SIZE
import os import os
from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.preprocessing.image import ImageDataGenerator
def create_generators(mode='train'): def create_generators(mode='training', seed=1):
''' '''
mode can be train or validation. Params
mode: training or validation
seed: same value as in fit function.
''' '''
if(mode=='train'): # we create two instances with the same arguments
subset = 'training'
else:
subset = 'validation'
train_datagen = ImageDataGenerator(rescale=1 / 255.0, train_datagen = ImageDataGenerator(rescale=1 / 255.0,
horizontal_flip=True, horizontal_flip=True,
vertical_flip=True, vertical_flip=True,
validation_split=0.2) validation_split=0.2)
rgb_gen = train_datagen.flow_from_directory(directory=os.path.join(JPG_IMAGES, RGB_DIR), # Provide the same seed and keyword arguments to the fit and flow methods
target_size= IMG_SIZE,
batch_size=BATCH,
class_mode=None,
classes=None,
shuffle=False,
subset=subset)
mask_gen = train_datagen.flow_from_directory( image_generator = train_datagen.flow_from_directory(
directory=os.path.join(JPG_IMAGES, MASK_DIR ), os.path.dirname(os.path.join(JPG_IMAGES, RGB_DIR)),
target_size= IMG_SIZE, class_mode=None,
batch_size=BATCH, target_size= IMG_SIZE,
class_mode=None, # class_mode='binary',
classes=None, seed=seed,
shuffle=False, subset=mode
subset=subset) )
mask_generator = train_datagen.flow_from_directory(
os.path.dirname(os.path.join(JPG_IMAGES, MASK_DIR)),
target_size= IMG_SIZE,
class_mode=None,
seed=seed,
subset=mode
)
return zip(image_generator, mask_generator)
# train_genenerator = zip(rgb_gen,mask_gen)
# for (imgs, mask) in train_genenerator:
# yield (imgs, mask)
return rgb_gen, mask_gen