Neural Network - #4 Cleanup #2

This commit is contained in:
Mirrowel 2023-06-02 00:14:50 +02:00
parent b1880d61cb
commit 56ca4bc891
4 changed files with 25 additions and 37 deletions

1
.gitignore vendored
View File

@ -16,6 +16,7 @@ lib64/
parts/ parts/
sdist/ sdist/
var/ var/
Network/Results
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg

View File

@ -4,27 +4,20 @@ from keras.models import Sequential
from keras.optimizers import Adam from keras.optimizers import Adam
from keras.utils import to_categorical from keras.utils import to_categorical
from keras.preprocessing.image import ImageDataGenerator from keras.preprocessing.image import ImageDataGenerator
import os
import PIL
import PIL.Image
import numpy
# Normalizes the pixel values of an image to the range [0, 1].
def normalize(image, label): def normalize(image, label):
return image / 255, label return image / 255, label
# Set the paths to the folder containing the training data
# Set the paths to the folders containing the training data train_data_dir = "Network/Training/"
train_data_dir = "Training/"
# Set the number of classes and batch size # Set the number of classes and batch size
num_classes = 3 num_classes = 3
batch_size = 32 batch_size = 32
# Set the image size and input shape # Set the image size and input shape
img_width, img_height = 100, 100 img_width, img_height = 100, 100
input_shape = (img_width, img_height, 1) input_shape = (img_width, img_height, 1)
# Load the training and validation data
train_ds = tf.keras.utils.image_dataset_from_directory( train_ds = tf.keras.utils.image_dataset_from_directory(
train_data_dir, train_data_dir,
validation_split=0.2, validation_split=0.2,
@ -42,13 +35,12 @@ val_ds = tf.keras.utils.image_dataset_from_directory(
seed=123, seed=123,
image_size=(img_height, img_width), image_size=(img_height, img_width),
batch_size=batch_size) batch_size=batch_size)
# Get the class names
class_names = train_ds.class_names class_names = train_ds.class_names
print(class_names) print(class_names)
# Normalize the training and validation data
train_ds = train_ds.map(normalize) train_ds = train_ds.map(normalize)
val_ds = val_ds.map(normalize) val_ds = val_ds.map(normalize)
# Define the model architecture # Define the model architecture
model = tf.keras.Sequential([ model = tf.keras.Sequential([
layers.Conv2D(16, 3, padding='same', activation='relu', input_shape=(img_height, img_width, 1)), layers.Conv2D(16, 3, padding='same', activation='relu', input_shape=(img_height, img_width, 1)),
@ -61,20 +53,16 @@ model = tf.keras.Sequential([
layers.Dense(128, activation='relu'), layers.Dense(128, activation='relu'),
layers.Dense(num_classes, activation='softmax') layers.Dense(num_classes, activation='softmax')
]) ])
# Compile the model # Compile the model
model.compile(optimizer='adam', model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy']) metrics=['accuracy'])
# Print the model summary
model.summary() model.summary()
epochs=10
# Train the model # Train the model
epochs=10
model.fit(train_ds, model.fit(train_ds,
validation_data=val_ds, validation_data=val_ds,
epochs=epochs) epochs=epochs)
# Save the trained model # Save the trained model
model.save('trained_model.h5') model.save('Network/trained_model.h5')

View File

@ -4,13 +4,13 @@ import tensorflow as tf
from tensorflow import keras from tensorflow import keras
# Load the trained model # Load the trained model
model = keras.models.load_model('trained_model.h5') model = keras.models.load_model('Network/trained_model.h5')
# Load the class names # Load the class names
class_names = ['Table', 'Done','Order'] class_names = ['Table', 'Done','Order']
# Path to the folder containing test images # Path to the folder containing test images
test_images_folder = 'Testing/' test_images_folder = 'Network/Testing/'
# Iterate over the test images # Iterate over the test images
i = 0 i = 0
@ -27,7 +27,6 @@ for folder_name in os.listdir(test_images_folder):
true_class = 'Done' true_class = 'Done'
elif folder_name == 'People': elif folder_name == 'People':
true_class = 'Order' true_class = 'Order'
true_class = folder_name
# Iterate over the files in the subfolder # Iterate over the files in the subfolder
for filename in os.listdir(folder_path): for filename in os.listdir(folder_path):
@ -35,8 +34,8 @@ for folder_name in os.listdir(test_images_folder):
i+=1 i+=1
# Load and preprocess the test image # Load and preprocess the test image
image_path = os.path.join(folder_path, filename) image_path = os.path.join(folder_path, filename)
test_image = keras.preprocessingimage.load_img(image_path, target_size=(100, 100)) test_image = keras.preprocessing.image.load_img(image_path, target_size=(100, 100))
test_image = keras.preprocessingimage.img_to_array(test_image) test_image = keras.preprocessing.image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis=0) test_image = np.expand_dims(test_image, axis=0)
test_image = test_image / 255.0 # Normalize the image test_image = test_image / 255.0 # Normalize the image
@ -48,7 +47,7 @@ for folder_name in os.listdir(test_images_folder):
predicted_class_index = np.argmax(predictions[0]) predicted_class_index = np.argmax(predictions[0])
predicted_class = class_names[predicted_class_index] predicted_class = class_names[predicted_class_index]
direct = 'Results/' direct = 'Network/Results/'
filename = str(i) + predicted_class + '.jpeg' filename = str(i) + predicted_class + '.jpeg'
test_image = np.reshape(test_image, (100, 100, 3)) test_image = np.reshape(test_image, (100, 100, 3))
tf.keras.preprocessing.image.save_img(direct+filename, test_image) tf.keras.preprocessing.image.save_img(direct+filename, test_image)

View File

@ -4,13 +4,13 @@ import tensorflow as tf
from tensorflow import keras from tensorflow import keras
# Load the trained model # Load the trained model
model = keras.models.load_model('trained_model.h5') model = keras.models.load_model('Network/trained_model.h5')
# Load the class names # Load the class names
class_names = ['Table', 'Done','Order'] class_names = ['Table', 'Done','Order']
# Load and preprocess the validation dataset # Load and preprocess the validation dataset
data_dir = "Training/" data_dir = "Network/Training/"
image_size = (100, 100) image_size = (100, 100)
batch_size = 32 batch_size = 32
@ -45,9 +45,9 @@ for i in range(60):
true_class = class_names[test_label] true_class = class_names[test_label]
direct = 'Results/' direct = 'Network/Results/'
filename = predicted_class + str(i) + '.jpeg' filename = predicted_class + str(i) + '.jpeg'
tf.keras.preprocessing.image.save_img(direct+filename, test_image) tf.keras.preprocessing.image.save_img(direct+filename, val_images[i])
if predicted_class != true_class: if predicted_class != true_class:
errorcount += 1 errorcount += 1
print('Image', i+1) print('Image', i+1)