Neural Network - #3

This commit is contained in:
Mirrowel 2023-06-01 22:52:51 +02:00
parent de4d54d21d
commit f416812251
6 changed files with 173 additions and 1 deletions

63
Network3.py Normal file
View File

@ -0,0 +1,63 @@
import tensorflow as tf
from tensorflow import keras
from keras import layers
# Load and preprocess the dataset
# Assuming you have three folders named 'class1', 'class2', and 'class3'
# each containing images of their respective classes
data_dir = 'Training/'
image_size = (100, 100)
batch_size = 32
train_ds = tf.keras.preprocessing.image_dataset_from_directory(
data_dir,
validation_split=0.2,
subset="training",
seed=123,
image_size=image_size,
batch_size=batch_size,
)
val_ds = tf.keras.preprocessing.image_dataset_from_directory(
data_dir,
validation_split=0.2,
subset="validation",
seed=123,
image_size=image_size,
batch_size=batch_size,
)
class_names = train_ds.class_names
num_classes = len(class_names)
# Create the model
model = keras.Sequential([
layers.Rescaling(1./255, input_shape=(100, 100, 3)),
layers.Conv2D(16, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(32, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(64, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(128, activation='relu'),
layers.Dense(num_classes)
])
# Compile the model
model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])
# Train the model
epochs = 10
model.fit(
train_ds,
validation_data=val_ds,
epochs=epochs
)
# Save the trained model
model.save('trained_model')

View File

@ -10,7 +10,7 @@ train_ds = tf.keras.utils.image_dataset_from_directory(train_data_dir, validatio
val_ds = tf.keras.utils.image_dataset_from_directory(train_data_dir, validation_split=0.2, val_ds = tf.keras.utils.image_dataset_from_directory(train_data_dir, validation_split=0.2,
subset="validation", seed=123, batch_size=32, image_size=(100, 100)) subset="validation", seed=123, batch_size=32, image_size=(100, 100))
model = keras.models.load_model("trained_model") model = keras.models.load_model("trained_model.h5")
predictions = model.predict(val_ds.take(32)) predictions = model.predict(val_ds.take(32))

BIN
mode2.h5 Normal file

Binary file not shown.

52
network2.py Normal file
View File

@ -0,0 +1,52 @@
import numpy as np
import tensorflow as tf
from tensorflow import keras
from keras import layers
import matplotlib.pyplot as plt
def normalize(image, label):
return image / 255, label
train_data_dir = "Training/"
train_ds = tf.keras.utils.image_dataset_from_directory(train_data_dir,
validation_split=0.2, subset="training", seed=123, batch_size=32,
image_size=(100, 100), color_mode='grayscale')
val_ds = tf.keras.utils.image_dataset_from_directory(train_data_dir,
validation_split=0.2, subset="validation", seed=123, batch_size=32,
image_size=(100, 100), color_mode='grayscale')
class_names = train_ds.class_names
print(class_names)
train_ds = train_ds.map(normalize)
val_ds = val_ds.map(normalize)
model = keras.Sequential([
layers.Conv2D(64, (3, 3), activation='relu', input_shape=(100, 100, 1)),
layers.MaxPool2D((2, 2)),
layers.Conv2D(128, (3, 3), activation='relu'),
layers.MaxPool2D((2, 2)),
layers.Conv2D(256, (3, 3), activation='relu'),
layers.MaxPool2D((2, 2)),
layers.Flatten(),
layers.Dense(1024, activation='relu'),
layers.Dense(1, activation='sigmoid')
])
print(model.summary())
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
trainHistory = model.fit(train_ds, epochs=10, validation_data=val_ds)
(loss, accuracy) = model.evaluate(val_ds)
print(loss)
print(accuracy)
model.save("mode2.h5")

57
testerZBS.py Normal file
View File

@ -0,0 +1,57 @@
import os
import numpy as np
import tensorflow as tf
from tensorflow import keras
# Load the trained model
model = keras.models.load_model('trained_model.h5')
# Load the class names
class_names = ['Empty', 'Food','People']
# Load and preprocess the validation dataset
data_dir = "Training/"
image_size = (100, 100)
batch_size = 32
val_ds = tf.keras.preprocessing.image_dataset_from_directory(
data_dir,
validation_split=0.2,
subset="validation",
seed=123,
image_size=image_size,
batch_size=batch_size,
)
# Select 20 random images from the validation set
val_images = []
val_labels = []
for images, labels in val_ds.unbatch().shuffle(1000).take(60):
val_images.append(images)
val_labels.append(labels)
# Make predictions on the random images
errorcount = 0
for i in range(60):
test_image = val_images[i]
test_label = val_labels[i]
test_image = np.expand_dims(test_image, axis=0)
test_image = test_image / 255.0 # Normalize the image
# Make predictions
predictions = model.predict(test_image)
predicted_class_index = np.argmax(predictions[0])
predicted_class = class_names[predicted_class_index]
true_class = class_names[test_label]
direct = 'Results/'
filename = predicted_class + str(i) + '.jpeg'
tf.keras.preprocessing.image.save_img(direct+filename, val_images[i])
if predicted_class != true_class:
errorcount += 1
print('Image', i+1)
print('True class:', true_class)
print('Predicted class:', predicted_class)
print()
print('Error count: ', errorcount)

BIN
trained_model.h5 Normal file

Binary file not shown.