Merge remote-tracking branch 'origin/master'

# Conflicts:
#	createNeuralNetworkDatabase.py
This commit is contained in:
secret_dude 2021-06-22 23:12:42 +02:00
commit bed9656adf
4 changed files with 77 additions and 14 deletions

View File

@ -3,37 +3,46 @@ import cv2
import matplotlib import matplotlib
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import pandas as pd
from matplotlib.pyplot import imshow
path_potatoes = 'neural_network\\images\\potatoes' path_potatoes = 'neural_network\\images\\potatoes'
path_beetroot = 'neural_network\\images\\beetroot' path_beetroot = 'neural_network\\images\\beetroot'
size = 250 size = 100
#POTATOES #POTATOES
training_data_potatoes = [] image_data = []
label_data = []
for img in os.listdir(path_potatoes): for img in os.listdir(path_potatoes):
pic = cv2.imread(os.path.join(path_potatoes,img)) pic = cv2.imread(os.path.join(path_potatoes,img))
pic = cv2.cvtColor(pic,cv2.COLOR_BGR2RGB) pic = cv2.cvtColor(pic,cv2.COLOR_BGR2RGB)
pic = cv2.resize(pic,(size,size)) pic = cv2.resize(pic,(size,size))
training_data_potatoes.append([pic]) image_data.append([pic])
label_data.append(1)
#np.save(os.path.join('neural_network','potatoes-dataset'),np.array(training_data_potatoes))
np.save(os.path.join('neural_network','potatoes-dataset'),np.array(training_data_potatoes)) #saved_potatoes = np.load(os.path.join('neural_network','potatoes-dataset.npy'))
saved_potatoes = np.load(os.path.join('neural_network','potatoes-dataset.npy'))
#BEETROOT #BEETROOT
training_data_beetroot = []
for img in os.listdir(path_beetroot): for img in os.listdir(path_beetroot):
pic = cv2.imread(os.path.join(path_beetroot,img)) pic = cv2.imread(os.path.join(path_beetroot,img))
pic = cv2.cvtColor(pic,cv2.COLOR_BGR2RGB) pic = cv2.cvtColor(pic,cv2.COLOR_BGR2RGB)
pic = cv2.resize(pic,(size,size)) pic = cv2.resize(pic,(size,size))
training_data_beetroot.append([pic]) image_data.append([pic])
label_data.append(0)
np.save(os.path.join('neural_network','beetroot-dataset'),np.array(training_data_beetroot)) #np.save(os.path.join('neural_network','beetroot-dataset'),np.array(training_data_beetroot))
saved_potatoes = np.load(os.path.join('neural_network','beetroot-dataset.npy')) #saved_potatoes = np.load(os.path.join('neural_network','beetroot-dataset.npy'))
dict = { np.save(os.path.join('neural_network','image-dataset'),np.array(image_data))
'beetroots': training_data_beetroot, np.save(os.path.join('neural_network','label-dataset'),np.array(label_data))
'potatoes': training_data_potatoes
} saved_images = np.load(os.path.join('neural_network','image-dataset.npy'))
print(saved_images.shape)
plt.imshow(saved_images[0].reshape(size,size,3))
plt.imshow(np.array(image_data[0]).reshape(size,size,3))
plt.show()

54
neuralNetwork.py Normal file
View File

@ -0,0 +1,54 @@
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from matplotlib.pyplot import imshow
import numpy as np
from matplotlib.pyplot import imshow
import matplotlib.pyplot as ppl
def plotdigit(image):
img = np.reshape(image, (-250, 250))
imshow(img, cmap='Greys', vmin=0, vmax=255)
ppl.show()
train_images = np.load('neural_network\\image-dataset.npy')
print(train_images.shape)
train_labels = np.load('neural_network\\label-dataset.npy')
train_images = train_images / 255
train_labels = train_labels / 255
train_images = [torch.tensor(image, dtype=torch.float32) for image in train_images]
print(train_images[0].shape)
train_labels = [torch.tensor(label, dtype=torch.long) for label in train_labels]
input_dim = 100*100*3
output_dim = 2
model = nn.Sequential(
nn.Linear(input_dim, output_dim),
nn.LogSoftmax()
)
def train(model, n_iter):
criterion = nn.NLLLoss()
optimizer = optim.SGD(model.parameters(), lr=0.001)
for epoch in range(n_iter):
for image, label in zip(train_images, train_labels):
optimizer.zero_grad()
output = model(image)
loss = criterion(output.unsqueeze(0), label.unsqueeze(0))
loss.backward()
optimizer.step()
print(f'epoch: {epoch:03}')
train(model, 100)

Binary file not shown.

Binary file not shown.