neural network model generated + fixed dataset structure (#26) from neural_network_python into master
Reviewed-on: s474139/Inteligentny_Wozek#26
BIN
NeuralNetwork/best_model.pth
Normal file
10
NeuralNetwork/learning_results.txt
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
Epoch: 1 Train Loss: 65 Train Accuracy: 0.5754245754245755
|
||||||
|
Epoch: 2 Train Loss: 25 Train Accuracy: 0.7457542457542458
|
||||||
|
Epoch: 3 Train Loss: 8 Train Accuracy: 0.8431568431568431
|
||||||
|
Epoch: 4 Train Loss: 2 Train Accuracy: 0.9010989010989011
|
||||||
|
Epoch: 5 Train Loss: 1 Train Accuracy: 0.9335664335664335
|
||||||
|
Epoch: 6 Train Loss: 0 Train Accuracy: 0.9545454545454546
|
||||||
|
Epoch: 7 Train Loss: 0 Train Accuracy: 0.972027972027972
|
||||||
|
Epoch: 8 Train Loss: 0 Train Accuracy: 0.9820179820179821
|
||||||
|
Epoch: 9 Train Loss: 0 Train Accuracy: 0.994005994005994
|
||||||
|
Epoch: 10 Train Loss: 0 Train Accuracy: 0.9945054945054945
|
60
NeuralNetwork/neural_network_learning.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
import glob
|
||||||
|
from src.torchvision_resize_dataset import combined_dataset, images_path, classes
|
||||||
|
import src.data_model
|
||||||
|
from torch.optim import Adam
|
||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
from torch.utils.data import DataLoader
|
||||||
|
|
||||||
|
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
||||||
|
|
||||||
|
train_loader = DataLoader(
|
||||||
|
combined_dataset, #dataset of images
|
||||||
|
batch_size=256, # accuracy
|
||||||
|
shuffle=True # rand order
|
||||||
|
)
|
||||||
|
|
||||||
|
model = src.data_model.DataModel(num_objects=2).to(device)
|
||||||
|
|
||||||
|
#optimizer
|
||||||
|
optimizer = Adam(model.parameters(), lr=0.001, weight_decay=0.0001)
|
||||||
|
#loss function
|
||||||
|
criterion = nn.CrossEntropyLoss()
|
||||||
|
|
||||||
|
num_epochs = 10
|
||||||
|
# train_size = len(glob.glob(images_path+'*.jpg'))
|
||||||
|
train_size = 2002
|
||||||
|
|
||||||
|
go_to_accuracy = 0.0
|
||||||
|
for epoch in range(num_epochs):
|
||||||
|
#training on dataset
|
||||||
|
model.train()
|
||||||
|
train_accuracy = 0.0
|
||||||
|
train_loss = 0.0
|
||||||
|
for i, (images, labels) in enumerate(train_loader):
|
||||||
|
if torch.cuda.is_available():
|
||||||
|
images = torch.Variable(images.cuda())
|
||||||
|
labels = torch.Variable(labels.cuda())
|
||||||
|
# clearing the optimizer gradients
|
||||||
|
optimizer.zero_grad()
|
||||||
|
|
||||||
|
outputs = model(images) # predoction
|
||||||
|
loss = criterion(outputs, labels) #loss calculation
|
||||||
|
loss.backward()
|
||||||
|
optimizer.step()
|
||||||
|
|
||||||
|
train_loss += loss.cpu().data*images.size(0)
|
||||||
|
_, prediction = torch.max(outputs.data, 1)
|
||||||
|
|
||||||
|
train_accuracy += int(torch.sum(prediction == labels.data))
|
||||||
|
|
||||||
|
train_accuracy = train_accuracy/train_size
|
||||||
|
train_loss = train_loss/train_size
|
||||||
|
|
||||||
|
model.eval()
|
||||||
|
|
||||||
|
print('Epoch: '+ str(epoch+1) +' Train Loss: '+ str(int(train_loss)) +' Train Accuracy: '+ str(train_accuracy))
|
||||||
|
|
||||||
|
if train_accuracy > go_to_accuracy:
|
||||||
|
go_to_accuracy= train_accuracy
|
||||||
|
torch.save(model.state_dict(), "best_model.pth")
|
61
NeuralNetwork/src/data_model.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import torch.nn as nn
|
||||||
|
import torch
|
||||||
|
|
||||||
|
|
||||||
|
class DataModel(nn.Module):
|
||||||
|
def __init__(self, num_objects):
|
||||||
|
super(DataModel, self).__init__()
|
||||||
|
#input (batch=256, nr of channels rgb=3 , size=244x244)
|
||||||
|
|
||||||
|
# convolution
|
||||||
|
self.conv1 = nn.Conv2d(in_channels=3, out_channels=12, kernel_size=3, stride=1, padding=1)
|
||||||
|
#shape (256, 12, 224x224)
|
||||||
|
|
||||||
|
# batch normalization
|
||||||
|
self.bn1 = nn.BatchNorm2d(num_features=12)
|
||||||
|
#shape (256, 12, 224x224)
|
||||||
|
self.reul1 = nn.ReLU()
|
||||||
|
|
||||||
|
self.pool=nn.MaxPool2d(kernel_size=2, stride=2)
|
||||||
|
# reduce image size by factor 2
|
||||||
|
# pooling window moves by 2 pixels at a time instead of 1
|
||||||
|
# shape (256, 12, 112x112)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
self.conv2 = nn.Conv2d(in_channels=12, out_channels=24, kernel_size=3, stride=1, padding=1)
|
||||||
|
self.bn2 = nn.BatchNorm2d(num_features=24)
|
||||||
|
self.reul2 = nn.ReLU()
|
||||||
|
# shape (256, 24, 112x112)
|
||||||
|
|
||||||
|
self.conv3 = nn.Conv2d(in_channels=24, out_channels=48, kernel_size=3, stride=1, padding=1)
|
||||||
|
#shape (256, 48, 112x112)
|
||||||
|
self.bn3 = nn.BatchNorm2d(num_features=48)
|
||||||
|
#shape (256, 48, 112x112)
|
||||||
|
self.reul3 = nn.ReLU()
|
||||||
|
|
||||||
|
# connected layer
|
||||||
|
self.fc = nn.Linear(in_features=48*112*112, out_features=num_objects)
|
||||||
|
|
||||||
|
def forward(self, input):
|
||||||
|
output = self.conv1(input)
|
||||||
|
output = self.bn1(output)
|
||||||
|
output = self.reul1(output)
|
||||||
|
|
||||||
|
output = self.pool(output)
|
||||||
|
output = self.conv2(output)
|
||||||
|
output = self.bn2(output)
|
||||||
|
output = self.reul2(output)
|
||||||
|
|
||||||
|
output = self.conv3(output)
|
||||||
|
output = self.bn3(output)
|
||||||
|
output = self.reul3(output)
|
||||||
|
|
||||||
|
# output shape matrix (256, 48, 112x112)
|
||||||
|
#print(output.shape)
|
||||||
|
#print(self.fc.weight.shape)
|
||||||
|
|
||||||
|
output = output.view(-1, 48*112*112)
|
||||||
|
output = self.fc(output)
|
||||||
|
|
||||||
|
return output
|
31
NeuralNetwork/src/torchvision_resize_dataset.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import glob
|
||||||
|
import pathlib
|
||||||
|
import torchvision.transforms as transforms
|
||||||
|
from torchvision.datasets import ImageFolder
|
||||||
|
from torch.utils.data import ConcatDataset
|
||||||
|
|
||||||
|
# images have to be the same size for the algorithm to work
|
||||||
|
transform = transforms.Compose([
|
||||||
|
transforms.Resize((224, 224)), # Resize images to (224, 224)
|
||||||
|
transforms.ToTensor(), # Convert images to tensors, 0-255 to 0-1
|
||||||
|
# transforms.RandomHorizontalFlip(), # 0.5 chance to flip the image
|
||||||
|
transforms.Normalize([0.5,0.5,0.5], [0.5,0.5,0.5])
|
||||||
|
])
|
||||||
|
|
||||||
|
letters_path = 'C:/Users/wojmed/Documents/VS repositories/Inteligentny_Wozek/NeuralNetwork/src/train_images/letters'
|
||||||
|
package_path = 'C:/Users/wojmed/Documents/VS repositories/Inteligentny_Wozek/NeuralNetwork/src/train_images/package'
|
||||||
|
images_path = 'C:/Users/wojmed/Documents/VS repositories/Inteligentny_Wozek/NeuralNetwork/src/train_images'
|
||||||
|
|
||||||
|
# # Load images from folders
|
||||||
|
# letter_folder = ImageFolder(letters_path, transform=transform)
|
||||||
|
# package_folder = ImageFolder(package_path, transform=transform)
|
||||||
|
|
||||||
|
# Combine the both datasets into a single dataset
|
||||||
|
#combined_dataset = ConcatDataset([letter_folder, package_folder])
|
||||||
|
combined_dataset = ImageFolder(images_path, transform=transform)
|
||||||
|
|
||||||
|
#image classes
|
||||||
|
path=pathlib.Path(images_path)
|
||||||
|
classes = sorted([i.name.split("/")[-1] for i in path.iterdir()])
|
||||||
|
|
||||||
|
# print(classes)
|
Before Width: | Height: | Size: 2.7 MiB After Width: | Height: | Size: 2.7 MiB |
Before Width: | Height: | Size: 1.8 MiB After Width: | Height: | Size: 1.8 MiB |
Before Width: | Height: | Size: 193 KiB After Width: | Height: | Size: 193 KiB |
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 59 KiB |
Before Width: | Height: | Size: 8.0 KiB After Width: | Height: | Size: 8.0 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 36 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 41 KiB After Width: | Height: | Size: 41 KiB |
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 37 KiB After Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 36 KiB |
Before Width: | Height: | Size: 7.7 MiB After Width: | Height: | Size: 7.7 MiB |
Before Width: | Height: | Size: 40 KiB After Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
Before Width: | Height: | Size: 64 KiB After Width: | Height: | Size: 64 KiB |
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 32 KiB |
Before Width: | Height: | Size: 135 KiB After Width: | Height: | Size: 135 KiB |
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 60 KiB After Width: | Height: | Size: 60 KiB |
Before Width: | Height: | Size: 76 KiB After Width: | Height: | Size: 76 KiB |
Before Width: | Height: | Size: 62 KiB After Width: | Height: | Size: 62 KiB |
Before Width: | Height: | Size: 78 KiB After Width: | Height: | Size: 78 KiB |
Before Width: | Height: | Size: 6.6 MiB After Width: | Height: | Size: 6.6 MiB |
Before Width: | Height: | Size: 91 KiB After Width: | Height: | Size: 91 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 36 KiB |
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 44 KiB |
Before Width: | Height: | Size: 35 KiB After Width: | Height: | Size: 35 KiB |
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 53 KiB |
Before Width: | Height: | Size: 6.3 KiB After Width: | Height: | Size: 6.3 KiB |
Before Width: | Height: | Size: 74 KiB After Width: | Height: | Size: 74 KiB |
Before Width: | Height: | Size: 1.8 MiB After Width: | Height: | Size: 1.8 MiB |
Before Width: | Height: | Size: 99 KiB After Width: | Height: | Size: 99 KiB |
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
Before Width: | Height: | Size: 1004 KiB After Width: | Height: | Size: 1004 KiB |
Before Width: | Height: | Size: 976 B After Width: | Height: | Size: 976 B |
Before Width: | Height: | Size: 434 KiB After Width: | Height: | Size: 434 KiB |
Before Width: | Height: | Size: 131 KiB After Width: | Height: | Size: 131 KiB |
Before Width: | Height: | Size: 111 KiB After Width: | Height: | Size: 111 KiB |
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 3.6 KiB After Width: | Height: | Size: 3.6 KiB |
Before Width: | Height: | Size: 73 KiB After Width: | Height: | Size: 73 KiB |
Before Width: | Height: | Size: 1.6 MiB After Width: | Height: | Size: 1.6 MiB |
Before Width: | Height: | Size: 5.9 KiB After Width: | Height: | Size: 5.9 KiB |
Before Width: | Height: | Size: 5.4 KiB After Width: | Height: | Size: 5.4 KiB |
Before Width: | Height: | Size: 4.2 KiB After Width: | Height: | Size: 4.2 KiB |
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 27 KiB |
Before Width: | Height: | Size: 6.7 KiB After Width: | Height: | Size: 6.7 KiB |
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
Before Width: | Height: | Size: 555 KiB After Width: | Height: | Size: 555 KiB |
Before Width: | Height: | Size: 52 KiB After Width: | Height: | Size: 52 KiB |
Before Width: | Height: | Size: 51 KiB After Width: | Height: | Size: 51 KiB |
Before Width: | Height: | Size: 362 KiB After Width: | Height: | Size: 362 KiB |
Before Width: | Height: | Size: 1.6 MiB After Width: | Height: | Size: 1.6 MiB |
Before Width: | Height: | Size: 153 KiB After Width: | Height: | Size: 153 KiB |
Before Width: | Height: | Size: 108 KiB After Width: | Height: | Size: 108 KiB |
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
Before Width: | Height: | Size: 3.7 KiB After Width: | Height: | Size: 3.7 KiB |
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 60 KiB After Width: | Height: | Size: 60 KiB |
Before Width: | Height: | Size: 66 KiB After Width: | Height: | Size: 66 KiB |
Before Width: | Height: | Size: 150 KiB After Width: | Height: | Size: 150 KiB |
Before Width: | Height: | Size: 3.0 MiB After Width: | Height: | Size: 3.0 MiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 120 KiB After Width: | Height: | Size: 120 KiB |
Before Width: | Height: | Size: 3.7 KiB After Width: | Height: | Size: 3.7 KiB |
Before Width: | Height: | Size: 9.8 KiB After Width: | Height: | Size: 9.8 KiB |
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 59 KiB |
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 59 KiB |
Before Width: | Height: | Size: 9.4 KiB After Width: | Height: | Size: 9.4 KiB |
Before Width: | Height: | Size: 7.5 KiB After Width: | Height: | Size: 7.5 KiB |
Before Width: | Height: | Size: 8.8 KiB After Width: | Height: | Size: 8.8 KiB |
Before Width: | Height: | Size: 88 KiB After Width: | Height: | Size: 88 KiB |
Before Width: | Height: | Size: 1.1 MiB After Width: | Height: | Size: 1.1 MiB |
Before Width: | Height: | Size: 96 KiB After Width: | Height: | Size: 96 KiB |
Before Width: | Height: | Size: 9.2 KiB After Width: | Height: | Size: 9.2 KiB |
Before Width: | Height: | Size: 55 KiB After Width: | Height: | Size: 55 KiB |
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 27 KiB |
Before Width: | Height: | Size: 78 KiB After Width: | Height: | Size: 78 KiB |
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 30 KiB |
Before Width: | Height: | Size: 280 KiB After Width: | Height: | Size: 280 KiB |
Before Width: | Height: | Size: 264 KiB After Width: | Height: | Size: 264 KiB |
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 2.8 MiB After Width: | Height: | Size: 2.8 MiB |
Before Width: | Height: | Size: 8.9 KiB After Width: | Height: | Size: 8.9 KiB |
Before Width: | Height: | Size: 52 KiB After Width: | Height: | Size: 52 KiB |
Before Width: | Height: | Size: 106 KiB After Width: | Height: | Size: 106 KiB |
Before Width: | Height: | Size: 47 KiB After Width: | Height: | Size: 47 KiB |