dataset, neural network
This commit is contained in:
parent
8c5c5c48f0
commit
03d74b5aee
14
main.py
14
main.py
@ -1,12 +1,16 @@
|
|||||||
# from collections import deque
|
# from collections import deque
|
||||||
from queue import PriorityQueue
|
from queue import PriorityQueue
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
from neural import *
|
||||||
from path_algorithms.a_star import a_star
|
from path_algorithms.a_star import a_star
|
||||||
# from path_algorithms.bfs import bfs
|
# from path_algorithms.bfs import bfs
|
||||||
from rubbish import *
|
from rubbish import *
|
||||||
from tree import evaluate_values, trash_selection
|
from tree import evaluate_values, trash_selection
|
||||||
from truck import Truck
|
from truck import Truck
|
||||||
from surface import *
|
from surface import *
|
||||||
|
from PIL import Image
|
||||||
from genetic import genetic
|
from genetic import genetic
|
||||||
|
|
||||||
RESOLUTION = 900
|
RESOLUTION = 900
|
||||||
@ -53,6 +57,9 @@ for i in range(15):
|
|||||||
rubbish_list.append(Rubbish(screen, j * 60, i * 60))
|
rubbish_list.append(Rubbish(screen, j * 60, i * 60))
|
||||||
|
|
||||||
path = []
|
path = []
|
||||||
|
X,y = create_training_data()
|
||||||
|
model = learn_neural_network(X,y)
|
||||||
|
|
||||||
gen = [(truck.y / 60, truck.x / 60)]
|
gen = [(truck.y / 60, truck.x / 60)]
|
||||||
fl = 0
|
fl = 0
|
||||||
length = []
|
length = []
|
||||||
@ -118,6 +125,13 @@ while True:
|
|||||||
|
|
||||||
# the decision that takes what to do with the garbage
|
# the decision that takes what to do with the garbage
|
||||||
if not path and order:
|
if not path and order:
|
||||||
|
|
||||||
|
number = np.random.randint(2077)
|
||||||
|
path_img = "images/bbb"
|
||||||
|
img = Image.open(path_img+'/'+str(number)+'.jpg')
|
||||||
|
img.show()
|
||||||
|
prediction = predict(model,path_img+'/'+str(number)+'.jpg')
|
||||||
|
result(prediction)
|
||||||
data = rubbish_list[order[0]].data_for_decision_tree()
|
data = rubbish_list[order[0]].data_for_decision_tree()
|
||||||
print(f'----------\n'
|
print(f'----------\n'
|
||||||
f'Characteristics of the garbage we met:\n'
|
f'Characteristics of the garbage we met:\n'
|
||||||
|
85
neural.py
Normal file
85
neural.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
import math
|
||||||
|
import os
|
||||||
|
import cv2
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
from keras.layers import Conv2D, MaxPooling2D
|
||||||
|
from keras.layers import Dense, Activation, Flatten
|
||||||
|
from keras.models import Sequential
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
def create_training_data():
|
||||||
|
DATADIR = "images"
|
||||||
|
CATEGORIES = ["plastic", "other"]
|
||||||
|
IMG_SIZE = 100
|
||||||
|
training_data = []
|
||||||
|
for category in CATEGORIES:
|
||||||
|
path = os.path.join(DATADIR, category)
|
||||||
|
class_num = CATEGORIES.index(category) # 0 - plastic, 1 - other
|
||||||
|
for img in tqdm(os.listdir(path)):
|
||||||
|
try:
|
||||||
|
img_array = cv2.imread(os.path.join(path, img), cv2.IMREAD_GRAYSCALE)
|
||||||
|
new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE))
|
||||||
|
training_data.append([new_array, class_num])
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
X = []
|
||||||
|
y = []
|
||||||
|
|
||||||
|
for features, label in training_data:
|
||||||
|
X.append(features)
|
||||||
|
y.append(label)
|
||||||
|
|
||||||
|
X = np.array(X).reshape(-1, IMG_SIZE, IMG_SIZE, 1)
|
||||||
|
y = np.array(y)
|
||||||
|
|
||||||
|
print("Training data created!")
|
||||||
|
return X,y
|
||||||
|
|
||||||
|
def learn_neural_network(X,y):
|
||||||
|
X = X/255.0
|
||||||
|
|
||||||
|
model = Sequential()
|
||||||
|
|
||||||
|
model.add(Conv2D(64, (3, 3), input_shape=X.shape[1:]))
|
||||||
|
model.add(Activation('relu'))
|
||||||
|
model.add(MaxPooling2D(pool_size=(2, 2)))
|
||||||
|
|
||||||
|
model.add(Conv2D(64, (3, 3)))
|
||||||
|
model.add(Activation('relu'))
|
||||||
|
model.add(MaxPooling2D(pool_size=(2, 2)))
|
||||||
|
|
||||||
|
model.add(Flatten())
|
||||||
|
|
||||||
|
model.add(Dense(64))
|
||||||
|
|
||||||
|
model.add(Dense(1))
|
||||||
|
model.add(Activation('sigmoid'))
|
||||||
|
|
||||||
|
model.compile(loss='binary_crossentropy',
|
||||||
|
optimizer='adam',
|
||||||
|
metrics=['accuracy'])
|
||||||
|
|
||||||
|
model.fit(X, y, batch_size=1, epochs=1, validation_batch_size=0.1)
|
||||||
|
|
||||||
|
return model
|
||||||
|
|
||||||
|
def prepare_img(filepath):
|
||||||
|
IMG_SIZE = 100
|
||||||
|
img_array = cv2.imread(filepath, cv2.IMREAD_GRAYSCALE)
|
||||||
|
new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE))
|
||||||
|
return new_array.reshape(-1, IMG_SIZE, IMG_SIZE, 1) / 255
|
||||||
|
|
||||||
|
def predict(model, filepath):
|
||||||
|
return model.predict([prepare_img(filepath)])
|
||||||
|
|
||||||
|
def result(prediction):
|
||||||
|
if prediction[0][0] >= 0.65:
|
||||||
|
print(prediction)
|
||||||
|
print(math.ceil(prediction[0][0]))
|
||||||
|
print('No plastic')
|
||||||
|
elif prediction[0][0] < 0.65:
|
||||||
|
print(prediction)
|
||||||
|
print(math.floor(prediction[0][0]))
|
||||||
|
print("Plastic")
|
BIN
trash.xlsx
BIN
trash.xlsx
Binary file not shown.
Loading…
Reference in New Issue
Block a user