recognizing garbage

This commit is contained in:
Pawel Felcyn 2023-06-18 21:01:45 +02:00
parent 2d7ed16185
commit 58b53f5817
11 changed files with 57 additions and 9 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

After

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

View File

@ -2,7 +2,7 @@ import joblib
from sklearn.calibration import LabelEncoder from sklearn.calibration import LabelEncoder
from agentActionType import AgentActionType from agentActionType import AgentActionType
import time import time
from garbage import GarbageType, RecognizedGarbage from garbage import Garbage, GarbageType, RecognizedGarbage
from garbageCan import GarbageCan from garbageCan import GarbageCan
from turnCar import turn_left_orientation, turn_right_orientation from turnCar import turn_left_orientation, turn_right_orientation
from garbageTruck import GarbageTruck from garbageTruck import GarbageTruck
@ -14,6 +14,12 @@ import pygame
from bfs import find_path_to_nearest_can from bfs import find_path_to_nearest_can
from agentState import AgentState from agentState import AgentState
import tensorflow as tf
from keras.models import load_model
import keras.utils as image
from keras.optimizers import Adam
import numpy as np
def collect_garbage(game_context: GameContext) -> None: def collect_garbage(game_context: GameContext) -> None:
while True: while True:
@ -30,11 +36,12 @@ def collect_garbage(game_context: GameContext) -> None:
pass pass
def _recognize_garbage(dust_car: GarbageTruck, can: GarbageCan) -> None: def _recognize_garbage(dust_car: GarbageTruck, can: GarbageCan) -> None:
loaded_model = joblib.load('machine_learning/model.pkl') tree_model = joblib.load('machine_learning/model.pkl')
optimizer = Adam(learning_rate=0.001)
neural_model = load_model('machine_learning/neuralModel.h5', compile=False)
neural_model.compile(optimizer=optimizer)
for garbage in can.garbage: for garbage in can.garbage:
attributes = [garbage.shape, garbage.flexibility, garbage.does_smell, garbage.weight, garbage.size, garbage.color, garbage.softness, garbage.does_din] predicted_class = predict_class(garbage, tree_model, neural_model)
encoded = attributes_to_floats(attributes)
predicted_class = loaded_model.predict([encoded])[0]
garbage_type: GarbageType = None garbage_type: GarbageType = None
if predicted_class == 'PAPER': if predicted_class == 'PAPER':
garbage_type = GarbageType.PAPER garbage_type = GarbageType.PAPER
@ -50,6 +57,35 @@ def _recognize_garbage(dust_car: GarbageTruck, can: GarbageCan) -> None:
recognized_garbage = RecognizedGarbage(garbage, garbage_type) recognized_garbage = RecognizedGarbage(garbage, garbage_type)
dust_car.sort_garbage(recognized_garbage) dust_car.sort_garbage(recognized_garbage)
def predict_class(garbage: Garbage, tree_model, neural_model) -> str:
if garbage.img is None:
return predict_class_from_tree(garbage, tree_model)
return predict_class_from_neural_model(garbage, neural_model)
def predict_class_from_tree(garbage: Garbage, tree_model) -> str:
attributes = [garbage.shape, garbage.flexibility, garbage.does_smell, garbage.weight, garbage.size, garbage.color, garbage.softness, garbage.does_din]
encoded = attributes_to_floats(attributes)
return tree_model.predict([encoded])[0]
def predict_class_from_neural_model(garbage: Garbage, neural_model) -> str:
img = image.load_img(garbage.img, target_size=(150, 150))
img_array = image.img_to_array(img)
img_array = np.expand_dims(img_array, axis=0)
img_array /= 255.
predictions = neural_model.predict(img_array)
prediction = np.argmax(predictions[0])
if prediction == 0:
return "BIO"
if prediction == 1:
return "GLASS"
if prediction == 2:
return "MIXED"
if prediction == 3:
return "PAPER"
if prediction == 4:
return "PLASTIC_AND_METAL"
def attributes_to_floats(attributes: list[str]) -> list[float]: def attributes_to_floats(attributes: list[str]) -> list[float]:
output: list[float] = [] output: list[float] = []
if attributes[0] == 'Longitiudonal': if attributes[0] == 'Longitiudonal':

View File

@ -36,12 +36,19 @@ def create_city() -> City:
streets = create_streets() streets = create_streets()
trashcans = create_trashcans() trashcans = create_trashcans()
bumps = create_speed_bumps() bumps = create_speed_bumps()
garbage_pieces = create_garbage_pieces() garbage_pieces = create_garbage_pieces_witout_imgs()
garbage_pieces_counter = 0 garbage_pieces_counter = 0
for s in streets: for s in streets:
city.add_street(s) city.add_street(s)
for t in trashcans: for t in trashcans:
for i in range(4): for _ in range(4):
t.add_garbage(garbage_pieces[garbage_pieces_counter])
garbage_pieces_counter = garbage_pieces_counter + 1
city.add_can(t)
garbage_pieces = create_garbage_pieces_with_images()
garbage_pieces_counter = 0
for t in trashcans:
for _ in range(3):
t.add_garbage(garbage_pieces[garbage_pieces_counter]) t.add_garbage(garbage_pieces[garbage_pieces_counter])
garbage_pieces_counter = garbage_pieces_counter + 1 garbage_pieces_counter = garbage_pieces_counter + 1
city.add_can(t) city.add_can(t)
@ -50,16 +57,21 @@ def create_city() -> City:
return city return city
def create_garbage_pieces() -> List[Garbage]: def create_garbage_pieces_witout_imgs() -> List[Garbage]:
garbage_pieces = [] garbage_pieces = []
with open('machine_learning/garbage_infill.csv', 'r') as file: with open('machine_learning/garbage_infill.csv', 'r') as file:
lines = file.readlines() lines = file.readlines()
for line in lines[1:]: for line in lines[1:]:
param = line.strip().split(',') param = line.strip().split(',')
garbage_pieces.append( garbage_pieces.append(
Garbage('img', param[0], param[1], param[2], param[3], param[4], param[5], param[6], param[7].strip())) Garbage(None, param[0], param[1], param[2], param[3], param[4], param[5], param[6], param[7].strip()))
return garbage_pieces return garbage_pieces
def create_garbage_pieces_with_images() -> list[Garbage]:
garbage_pieces = []
for i in range(1, 22):
garbage_pieces.append(Garbage('machine_learning/photos_not_from_train_set/' + str(i) + '.jpg', None, None, None, None, None, None, None, None))
return garbage_pieces
def create_streets() -> List[Street]: def create_streets() -> List[Street]:
streets = [] streets = []