78 lines
2.1 KiB
Python
78 lines
2.1 KiB
Python
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img
|
|
from keras.models import Sequential
|
|
from keras.layers import Conv2D, MaxPooling2D
|
|
from keras.layers import Activation, Dropout, Flatten, Dense
|
|
from keras import backend as K
|
|
|
|
img_width, img_height = 256, 256
|
|
|
|
train_data_dir = 'data/train'
|
|
examine_data_dir = 'data/examine'
|
|
nb_train_samples = 290
|
|
nb_examine_samples = 80
|
|
epochs = 50
|
|
batch_size = 16
|
|
|
|
if K.image_data_format() == 'channels_first':
|
|
input_shape = (3, img_width, img_height)
|
|
else:
|
|
input_shape = (img_width, img_height, 3)
|
|
|
|
model = Sequential()
|
|
model.add(Conv2D(32, (2, 2), input_shape=input_shape))
|
|
model.add(Activation('relu'))
|
|
model.add(MaxPooling2D(pool_size=(2, 2)))
|
|
|
|
model.add(Conv2D(32, (2, 2)))
|
|
model.add(Activation('relu'))
|
|
model.add(MaxPooling2D(pool_size=(2, 2)))
|
|
|
|
model.add(Conv2D(64, (2, 2)))
|
|
model.add(Activation('relu'))
|
|
model.add(MaxPooling2D(pool_size=(2, 2)))
|
|
|
|
model.add(Flatten())
|
|
model.add(Dense(64))
|
|
model.add(Activation('relu'))
|
|
model.add(Dropout(0.5))
|
|
model.add(Dense(7))
|
|
model.add(Activation('sigmoid'))
|
|
|
|
model.compile(loss='categorical_crossentropy',
|
|
optimizer='rmsprop',
|
|
metrics=['accuracy'])
|
|
|
|
train_datagen = ImageDataGenerator(
|
|
rotation_range=45,
|
|
width_shift_range=0.3,
|
|
height_shift_range=0.3,
|
|
rescale=1. / 255,
|
|
shear_range=0.25,
|
|
zoom_range=0.1,
|
|
horizontal_flip=True)
|
|
|
|
examine_datagen = ImageDataGenerator(rescale=1. / 255)
|
|
|
|
train_generator = train_datagen.flow_from_directory(
|
|
train_data_dir,
|
|
target_size=(img_width, img_height),
|
|
batch_size=batch_size,
|
|
class_mode='categorical')
|
|
|
|
examine_generator = examine_datagen.flow_from_directory(
|
|
examine_data_dir,
|
|
target_size=(img_width, img_height),
|
|
batch_size=batch_size,
|
|
class_mode='categorical')
|
|
|
|
model.fit_generator(
|
|
train_generator,
|
|
steps_per_epoch=nb_train_samples // batch_size,
|
|
epochs=epochs,
|
|
validation_data=examine_generator,
|
|
validation_steps=nb_examine_samples // batch_size)
|
|
|
|
model.save_weights('model_payment.h5')
|
|
results = model.evaluate(train_generator)
|
|
print(results)
|