diff --git a/checkingTrash.py b/checkingTrash.py new file mode 100644 index 0000000..2ef5a4e --- /dev/null +++ b/checkingTrash.py @@ -0,0 +1,114 @@ + +import os +import numpy as np +import random +import shutil +from keras.models import Sequential +from keras.layers import Conv2D, Flatten, MaxPooling2D, Dense +from keras.preprocessing import image + +#dataset from https://www.kaggle.com/asdasdasasdas/garbage-classification + +'''#sepperating the file into training and testing data, creation of folders by hand removal of 75 images from papers for a more even distribution +def sepperate(type): + for i in type: + folder = "Garbage classification\\Garbage classification\\" + i + destination = "Garbage classification\\testset\\" + i + howmany = len(os.listdir(folder)) + for j in range(int(howmany*0.2)): + move1 = random.choice(os.listdir(folder)) + source = "Garbage classification\\Garbage classification\\" + i + "\\" + move1 + d = shutil.move(source, destination, copy_function = shutil.copytree) + + +types = ["cardboard", "glass", "metal", "paper", "plastic"] +sepperate(types) +os.rename("Garbage classification\\Garbage classification", "Garbage classification\\trainset") +''' + + +classifier = Sequential() +classifier.add(Conv2D(32, (3, 3), input_shape=(110, 110, 3), activation = "relu")) +classifier.add(MaxPooling2D(pool_size = (2, 2))) +classifier.add(Conv2D(64, (3, 3), activation = "relu")) +classifier.add(MaxPooling2D(pool_size=(2, 2))) +# this layer in ver 4 +classifier.add(Conv2D(32, (3, 3), activation = "relu")) +classifier.add(MaxPooling2D(pool_size=(2, 2))) +# ----------------- +classifier.add(Flatten()) +classifier.add(Dense(activation = "relu", units = 64 )) +classifier.add(Dense(activation = "softmax", units = 5)) +classifier.compile(optimizer = "adam", loss = "binary_crossentropy", metrics = ["accuracy"]) + + +from keras.preprocessing.image import ImageDataGenerator + +train_datagen = ImageDataGenerator( + rescale=1./255, + shear_range=0.1, + zoom_range=0.1, + width_shift_range=0.1, + height_shift_range=0.1, + horizontal_flip=True, + vertical_flip=True, +) + +test_datagen = ImageDataGenerator( + rescale=1./255, + validation_split=0.1 +) + +train_generator = train_datagen.flow_from_directory( + "Garbage classification\\trainset", + target_size=(110, 110), + batch_size=16, + class_mode='categorical', + #seed=0 +) + +test_generator = test_datagen.flow_from_directory( + "Garbage classification\\testset", + target_size=(110, 110), + batch_size=16, + class_mode='categorical', +) + + +#Teaching the classifier +'''classifier.fit_generator( train_generator, steps_per_epoch = 150, epochs = 25, validation_data = test_generator ) +classifier.save_weights('model_ver_4.h5')''' + +import matplotlib.pyplot as plt +labels = (train_generator.class_indices) +labels = dict((value,key) for key,value in labels.items()) + + +classifier.load_weights("model_ver_4.h5") +import random + + +def getTrashPhoto(x, type): + for i in range(x): + kind = random.choice(type) + path = "Garbage classification\\testset\\" + kind + file = random.choice(os.listdir(path)) + path = "Garbage classification\\testset\\" + kind + "\\" + file + gz = image.load_img(path, target_size = (110,110)) + ti = image.img_to_array(gz) + ti=np.array(ti)/255.0 + ti = np.expand_dims(ti, axis = 0) + prediction = classifier.predict(ti) + plt.subplot(1, 3, i+1) + plt.imshow(gz) + plt.title("AI thinks:%s \nReality:\n %s" % (labels[np.argmax(prediction)], file)) + plt.show() + + +types = ["cardboard", "glass", "metal", "paper", "plastic"] +type = ["metal"] + +getTrashPhoto(3, types) + + +plt.show() diff --git a/model_ver_4.h5 b/model_ver_4.h5 new file mode 100644 index 0000000..d218205 Binary files /dev/null and b/model_ver_4.h5 differ