Neural Network
This commit is contained in:
parent
e8db800831
commit
076472cfd1
@ -3,40 +3,46 @@ import cv2
|
||||
import matplotlib
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pandas as pd
|
||||
from matplotlib.pyplot import imshow
|
||||
|
||||
path_potatoes = 'neural_network\\images\\potatoes'
|
||||
path_beetroot = 'neural_network\\images\\beetroot'
|
||||
size = 250
|
||||
size = 100
|
||||
|
||||
#POTATOES
|
||||
training_data_potatoes = []
|
||||
image_data = []
|
||||
label_data = []
|
||||
for img in os.listdir(path_potatoes):
|
||||
pic = cv2.imread(os.path.join(path_potatoes,img))
|
||||
pic = cv2.cvtColor(pic,cv2.COLOR_BGR2RGB)
|
||||
pic = cv2.resize(pic,(size,size))
|
||||
training_data_potatoes.append([pic])
|
||||
image_data.append([pic])
|
||||
label_data.append(1)
|
||||
|
||||
#np.save(os.path.join('neural_network','potatoes-dataset'),np.array(training_data_potatoes))
|
||||
|
||||
np.save(os.path.join('neural_network','potatoes-dataset'),np.array(training_data_potatoes))
|
||||
|
||||
saved_potatoes = np.load(os.path.join('neural_network','potatoes-dataset.npy'))
|
||||
#saved_potatoes = np.load(os.path.join('neural_network','potatoes-dataset.npy'))
|
||||
|
||||
#BEETROOT
|
||||
training_data_beetroot = []
|
||||
for img in os.listdir(path_beetroot):
|
||||
pic = cv2.imread(os.path.join(path_beetroot,img))
|
||||
pic = cv2.cvtColor(pic,cv2.COLOR_BGR2RGB)
|
||||
pic = cv2.resize(pic,(size,size))
|
||||
training_data_beetroot.append([pic])
|
||||
image_data.append([pic])
|
||||
label_data.append(0)
|
||||
|
||||
np.save(os.path.join('neural_network','beetroot-dataset'),np.array(training_data_beetroot))
|
||||
#np.save(os.path.join('neural_network','beetroot-dataset'),np.array(training_data_beetroot))
|
||||
|
||||
saved_potatoes = np.load(os.path.join('neural_network','beetroot-dataset.npy'))
|
||||
#saved_potatoes = np.load(os.path.join('neural_network','beetroot-dataset.npy'))
|
||||
|
||||
dict = {
|
||||
'beetroots': training_data_beetroot,
|
||||
'potatoes': training_data_potatoes
|
||||
}
|
||||
np.save(os.path.join('neural_network','image-dataset'),np.array(image_data))
|
||||
np.save(os.path.join('neural_network','label-dataset'),np.array(label_data))
|
||||
|
||||
print(dict)
|
||||
np.save(os.path.join('neural_network','dataset'), np.array(dict))
|
||||
saved_images = np.load(os.path.join('neural_network','image-dataset.npy'))
|
||||
|
||||
print(saved_images.shape)
|
||||
|
||||
plt.imshow(saved_images[0].reshape(size,size,3))
|
||||
plt.imshow(np.array(image_data[0]).reshape(size,size,3))
|
||||
plt.show()
|
54
neuralNetwork.py
Normal file
54
neuralNetwork.py
Normal file
@ -0,0 +1,54 @@
|
||||
import numpy as np
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.optim as optim
|
||||
from matplotlib.pyplot import imshow
|
||||
import numpy as np
|
||||
from matplotlib.pyplot import imshow
|
||||
import matplotlib.pyplot as ppl
|
||||
|
||||
def plotdigit(image):
|
||||
img = np.reshape(image, (-250, 250))
|
||||
imshow(img, cmap='Greys', vmin=0, vmax=255)
|
||||
ppl.show()
|
||||
|
||||
train_images = np.load('neural_network\\image-dataset.npy')
|
||||
print(train_images.shape)
|
||||
|
||||
train_labels = np.load('neural_network\\label-dataset.npy')
|
||||
|
||||
train_images = train_images / 255
|
||||
train_labels = train_labels / 255
|
||||
|
||||
train_images = [torch.tensor(image, dtype=torch.float32) for image in train_images]
|
||||
print(train_images[0].shape)
|
||||
|
||||
train_labels = [torch.tensor(label, dtype=torch.long) for label in train_labels]
|
||||
|
||||
|
||||
input_dim = 100*100*3
|
||||
output_dim = 2
|
||||
|
||||
model = nn.Sequential(
|
||||
nn.Linear(input_dim, output_dim),
|
||||
nn.LogSoftmax()
|
||||
)
|
||||
|
||||
def train(model, n_iter):
|
||||
|
||||
criterion = nn.NLLLoss()
|
||||
optimizer = optim.SGD(model.parameters(), lr=0.001)
|
||||
|
||||
for epoch in range(n_iter):
|
||||
for image, label in zip(train_images, train_labels):
|
||||
optimizer.zero_grad()
|
||||
|
||||
output = model(image)
|
||||
loss = criterion(output.unsqueeze(0), label.unsqueeze(0))
|
||||
loss.backward()
|
||||
optimizer.step()
|
||||
|
||||
print(f'epoch: {epoch:03}')
|
||||
|
||||
|
||||
train(model, 100)
|
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue
Block a user