39 lines
1.2 KiB
Python
39 lines
1.2 KiB
Python
SAMPLES_PATH = 'data/samples.npz'
|
|
MODEL_PATH = 'data/autoencoder_model.h5'
|
|
EPOCHS = 100
|
|
|
|
import tensorflow as tf
|
|
from tensorflow.keras import layers
|
|
from keras.layers import Input, Dense, Conv2D, Flatten
|
|
from keras.models import Model
|
|
import numpy as np
|
|
from sys import exit
|
|
import pickle
|
|
|
|
print('Reading samples from: {}'.format(SAMPLES_PATH))
|
|
|
|
train_samples = np.load(SAMPLES_PATH)['arr_0']
|
|
train_samples = train_samples.reshape(train_samples.shape[0], 1*96*128)
|
|
# input = Input(shape=(1,96,128))
|
|
# encoded = Conv2D(filters = 32, kernel_size = 1, activation='relu')(input)
|
|
# decoded = Conv2D(filters = 128, kernel_size = 1, activation='sigmoid')(encoded)
|
|
# autoencoder = Model(input, decoded)
|
|
#
|
|
# autoencoder.compile(optimizer='adadelta',
|
|
# loss='binary_crossentropy',
|
|
# metrics=['accuracy'])
|
|
|
|
|
|
encoded = Dense(128, input_shape=(1*96*128))
|
|
decoded = Dense(96*128)(encoded)
|
|
autoencoder = Model(input,decoded)
|
|
|
|
autoencoder.compile(optimizer='adadelta',
|
|
loss='binary_crossentropy',
|
|
metrics=['accuracy'])
|
|
|
|
autoencoder.fit(train_samples, train_samples, epochs=EPOCHS, batch_size=150)
|
|
|
|
autoencoder.save_weights(MODEL_PATH)
|
|
print("Saved model to disk")
|