diff --git a/Readme.md b/Readme.md index 3896aec..e97d34d 100644 --- a/Readme.md +++ b/Readme.md @@ -35,14 +35,14 @@ VERTICAL_NUM_OF_FIELDS = 3 HORIZONTAL_NUM_OF_FIELDS = 3 ``` \ -#####4.1 Save generated map: +####4.1 Save generated map: ```bash python main.py --save-map ``` Map will be saved in maps directory. Generated filename: map-uuid -#####4.2 Load map +####4.2 Load map ```bash python main.py --load-map=name_of_map ``` diff --git a/app/__init__.py b/app/__init__.py index b5ae6fc..1c25260 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -42,21 +42,21 @@ class App: def keys_pressed_handler(self): keys = pygame.key.get_pressed() - if keys[pygame.K_m]: - self.__tractor.move() - print(self.__tractor) - if keys[pygame.K_w]: self.__tractor.move() print(self.__tractor) + if keys[pygame.K_n]: + self.__bot_is_running.set() + self.__tractor.harvest_checked_fields_handler(self.__bot_is_running) + if keys[pygame.K_h]: self.__tractor.harvest() if keys[pygame.K_v]: self.__tractor.sow() - if keys[pygame.K_n]: + if keys[pygame.K_j]: self.__tractor.hydrate() if keys[pygame.K_f]: diff --git a/app/base_field.py b/app/base_field.py index 19c1198..55c27fe 100644 --- a/app/base_field.py +++ b/app/base_field.py @@ -7,6 +7,9 @@ class BaseField: def __init__(self, img_path: str): self._img_path = img_path + def get_img_path(self): + return self._img_path + def draw_field(self, screen: pygame.Surface, pos_x: int, pos_y: int, is_centered: bool = False, size: tuple = None, angle: float = 0.0) -> None: diff --git a/app/neural_network.py b/app/neural_network.py new file mode 100644 index 0000000..b9710c1 --- /dev/null +++ b/app/neural_network.py @@ -0,0 +1,113 @@ +#!/usr/bin/python3 +import os + +from tensorflow.keras.models import Sequential, save_model, load_model +from tensorflow.keras.layers import Dense, Flatten, Conv2D +from tensorflow.keras.losses import sparse_categorical_crossentropy +from tensorflow.keras.optimizers import Adam +from tensorflow.keras.preprocessing.image import ImageDataGenerator +from tensorflow import keras as k +import numpy as np + +from app.base_field import BaseField +from config import * + + +class NeuralNetwork: + def __init__(self): + # Model config + self.batch_size = 25 + self.img_width, self.img_height, self.img_num_channels = 25, 25, 3 + self.loss_function = sparse_categorical_crossentropy + self.no_classes = 7 + self.no_epochs = 40 + self.optimizer = Adam() + self.verbosity = 1 + + # Determine shape of the data + self.input_shape = (self.img_width, self.img_height, self.img_num_channels) + + # labels + self.labels = ["cabbage", "carrot", "corn", "lettuce", "paprika", "potato", "tomato"] + + def init_model(self): + if not self.model_dir_is_empty(): + # Load the model + self.model = load_model( + os.path.join(RESOURCE_DIR, "saved_model"), + custom_objects=None, + compile=True + ) + else: + # Create the model + self.model = Sequential() + self.model.add(Conv2D(16, kernel_size=(5, 5), activation='relu', input_shape=self.input_shape)) + self.model.add(Conv2D(32, kernel_size=(5, 5), activation='relu')) + self.model.add(Conv2D(64, kernel_size=(5, 5), activation='relu')) + self.model.add(Conv2D(128, kernel_size=(5, 5), activation='relu')) + self.model.add(Flatten()) + self.model.add(Dense(16, activation='relu')) + self.model.add(Dense(self.no_classes, activation='softmax')) + + # Display a model summary + self.model.summary() + + def load_images(self): + # Create a generator + self.train_datagen = ImageDataGenerator( + rescale=1. / 255 + ) + self.train_datagen = self.train_datagen.flow_from_directory( + TRAINING_SET_DIR, + save_to_dir=ADAPTED_IMG_DIR, + save_format='jpeg', + batch_size=self.batch_size, + target_size=(25, 25), + class_mode='sparse') + + def train(self): + self.model.compile(loss=self.loss_function, + optimizer=self.optimizer, + metrics=['accuracy']) + + # Start training + self.model.fit( + self.train_datagen, + epochs=self.no_epochs, + shuffle=False) + + def predict(self, field: BaseField) -> str: + print(field.get_img_path()) + # corn_img_path = os.path.join(RESOURCE_DIR,'corn.png') + loaded_image = k.preprocessing.image.load_img(field.get_img_path(), + target_size=( + self.img_width, self.img_height, self.img_num_channels)) + + # convert to array and resample dividing by 255 + img_array = k.preprocessing.image.img_to_array(loaded_image) / 255. + + # add sample dimension. the predictor is expecting (1, CHANNELS, IMG_WIDTH, IMG_HEIGHT) + img_np_array = np.expand_dims(img_array, axis=0) + # print(img_np_array) + predictions = self.model.predict(img_np_array) + prediction = np.argmax(predictions[0]) + + label = self.labels[prediction] + print(f'Ground truth: {type(field).__name__} - Prediction: {label}') + return label + + def model_dir_is_empty(self) -> bool: + if len(os.listdir(MODEL_DIR)) == 0: + return True + return False + + def check(self, field: BaseField) -> str: + self.load_images() + self.init_model() + prediction = self.predict(field) + + # Saving model + if not self.model_dir_is_empty(): + save_model(self.model, MODEL_DIR) + + return prediction diff --git a/app/tractor.py b/app/tractor.py index a104b7c..a5f6442 100644 --- a/app/tractor.py +++ b/app/tractor.py @@ -11,6 +11,7 @@ from typing import Union from app.base_field import BaseField from app.board import Board +from app.neural_network import NeuralNetwork from app.utils import get_class from app.fields import CROPS, PLANTS, Crops, Sand, Clay, Field from config import * @@ -28,6 +29,7 @@ class Tractor(BaseField): self.__board = board self.__harvested_corps = [] self.__fuel = 10 + self.__neural_network = None def draw(self, screen: pygame.Surface) -> None: self.draw_field(screen, self.__pos_x + FIELD_SIZE / 2, self.__pos_y + FIELD_SIZE / 2, @@ -185,7 +187,7 @@ class Tractor(BaseField): def run_bot(self, moves: list[tuple[str, str]], is_running: threading.Event) -> None: print(moves) - print(f"Length of Moves {len(moves)}") #- {3 ** len(moves)}") + print(f"Length of Moves {len(moves)}") # - {3 ** len(moves)}") while len(moves) > 0: movement, action = moves.pop(0) # do action @@ -203,17 +205,20 @@ class Tractor(BaseField): time.sleep(1) # move - print(f"Move {movement}") - if movement == M_GO_FORWARD: - self.move() - elif movement == M_ROTATE_LEFT: - self.rotate_left() - elif movement == M_ROTATE_RIGHT: - self.rotate_right() + self.move_or_rotate(movement) time.sleep(TIME_OF_MOVING) is_running.clear() + def move_or_rotate(self, movement: str): + print(f"Move {movement}") + if movement == M_GO_FORWARD: + self.move() + elif movement == M_ROTATE_LEFT: + self.rotate_left() + elif movement == M_ROTATE_RIGHT: + self.rotate_right() + @staticmethod def move_is_correct(x: int, y: int, direction: float) -> Union[(int, int), None]: pos_x = x * FIELD_SIZE @@ -261,3 +266,26 @@ class Tractor(BaseField): obj = get_class("app.fields", choosen_type) board.get_fields()[x][y] = obj() return obj() + + def harvest_checked_fields_handler(self, is_running: threading.Event): + thread = threading.Thread(target=self.harvest_checked_fields, args=(is_running,), daemon=True) + thread.start() + + def harvest_checked_fields(self, is_running: threading.Event): + moves = [M_GO_FORWARD, M_ROTATE_LEFT, M_ROTATE_RIGHT] + distribution=[0.6,0.2,0.2] + while True: + field = self.get_field_from_board() + + self.__neural_network = NeuralNetwork() + prediction = self.__neural_network.check(field) + + if prediction.capitalize() in CROPS: + self.harvest() + break + chosen_move = random.choices(moves,distribution) + + self.move_or_rotate(chosen_move[0]) + time.sleep(1) + + is_running.clear() diff --git a/config.py b/config.py index eb06a0b..2f99c49 100644 --- a/config.py +++ b/config.py @@ -12,7 +12,8 @@ __all__ = ( 'A_SOW', 'A_HARVEST', 'A_HYDRATE', 'A_FERTILIZE', 'A_DO_NOTHING', 'D_NORTH', 'D_EAST', 'D_SOUTH', 'D_WEST', 'VALUE_OF_CROPS', 'VALUE_OF_PLANT', 'VALUE_OF_SAND', 'VALUE_OF_CLAY', - 'MAP_FILE_NAME', 'JSON','SAVE_MAP', 'LOAD_MAP' + 'MAP_FILE_NAME', 'JSON', 'SAVE_MAP', 'LOAD_MAP', + 'TRAINING_SET_DIR', 'TEST_SET_DIR', 'ADAPTED_IMG_DIR', 'MODEL_DIR' ) # Board settings: @@ -26,11 +27,15 @@ HEIGHT = VERTICAL_NUM_OF_FIELDS * FIELD_SIZE FPS = 10 CAPTION = 'Tractor' -# Path +# Paths BASE_DIR = os.path.dirname(__file__) RESOURCE_DIR = os.path.join(BASE_DIR, 'resources') MAP_DIR = os.path.join(BASE_DIR, 'maps') MAP_FILE_NAME = 'map' +TRAINING_SET_DIR = os.path.join(RESOURCE_DIR, 'smaller_train') +TEST_SET_DIR = os.path.join(RESOURCE_DIR, 'smaller_test') +ADAPTED_IMG_DIR = os.path.join(RESOURCE_DIR, "adapted-images") +MODEL_DIR = os.path.join(RESOURCE_DIR, 'saved_model') # Picture format PNG = "png" diff --git a/requirements.txt b/requirements.txt index 4fd71e3..5c4ab1f 100644 Binary files a/requirements.txt and b/requirements.txt differ diff --git a/resources/saved_model/keras_metadata.pb b/resources/saved_model/keras_metadata.pb index 00dca1b..8b49876 100644 --- a/resources/saved_model/keras_metadata.pb +++ b/resources/saved_model/keras_metadata.pb @@ -9,5 +9,5 @@ “ root.layer-4"_tf_keras_layer*é{"name": "flatten", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 13, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 1, "axes": {}}, "shared_object_id": 26}}2 Êroot.layer_with_weights-4"_tf_keras_layer*“{"name": "dense", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 16, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 14}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 15}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 16, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 10368}}, "shared_object_id": 27}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 10368]}}2 Êroot.layer_with_weights-5"_tf_keras_layer*“{"name": "dense_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 7, "activation": "softmax", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 17}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 18}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 19, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 16}}, "shared_object_id": 28}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 16]}}2 -¹croot.keras_api.metrics.0"_tf_keras_metric*‚{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 29}2 -ódroot.keras_api.metrics.1"_tf_keras_metric*¼{"class_name": "MeanMetricWrapper", "name": "accuracy", "dtype": "float32", "config": {"name": "accuracy", "dtype": "float32", "fn": "sparse_categorical_accuracy"}, "shared_object_id": 22}2 \ No newline at end of file +¹froot.keras_api.metrics.0"_tf_keras_metric*‚{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 29}2 +ógroot.keras_api.metrics.1"_tf_keras_metric*¼{"class_name": "MeanMetricWrapper", "name": "accuracy", "dtype": "float32", "config": {"name": "accuracy", "dtype": "float32", "fn": "sparse_categorical_accuracy"}, "shared_object_id": 22}2 \ No newline at end of file diff --git a/resources/saved_model/saved_model.pb b/resources/saved_model/saved_model.pb index 6dcf4ba..3f2294b 100644 Binary files a/resources/saved_model/saved_model.pb and b/resources/saved_model/saved_model.pb differ diff --git a/resources/saved_model/variables/variables.data-00000-of-00001 b/resources/saved_model/variables/variables.data-00000-of-00001 index e215389..9c0a23f 100644 Binary files a/resources/saved_model/variables/variables.data-00000-of-00001 and b/resources/saved_model/variables/variables.data-00000-of-00001 differ diff --git a/resources/saved_model/variables/variables.index b/resources/saved_model/variables/variables.index index 87f9b07..6f29c26 100644 Binary files a/resources/saved_model/variables/variables.index and b/resources/saved_model/variables/variables.index differ