Projekt_AI-Automatyczny_saper/Engine/Neurons.py

141 lines
4.5 KiB
Python
Raw Normal View History

2021-06-01 17:38:31 +02:00
import numpy as np
import torch
from numpy import asarray
from torch import nn
from torch import optim
from torch.utils.data import Dataset
import os
import pandas as pd
from PIL import Image
from torchvision.io import read_image
2021-06-01 18:53:56 +02:00
2021-06-01 17:38:31 +02:00
from torchvision.transforms import Resize, Lambda, transforms, ToTensor
from Constants import *
from torch.utils.data import DataLoader
dir = "D:\\fotyProjekt\\all"
csv = "C:\\Users\\kratu\\PycharmProjects\\Projekt_AI-Automatyczny_saper\\Engine\\labels.csv"
class Neurons:
def __init__(self):
transform = transforms.Compose(
[transforms.Resize((100, 100)),
ToTensor()])
dataset = CustomImageDataset(csv, dir, transform, Lambda(lambda y: torch.zeros(10, dtype=torch.float).scatter_(0, torch.tensor(y), value=1)))
self.d = {ATOMIC_BOMB: 0, CLAYMORE: 1, LAND_MINE: 2, CHEMICAL_BOMB: 3, DECOY: 4}
#self.loadImages()
self.train_dataloader = DataLoader(dataset, batch_size=64, shuffle=True)
2021-06-01 18:53:56 +02:00
input_dim = 100
2021-06-01 17:38:31 +02:00
output_dim = 10
hidden_dim = 10
print("create model")
self.model = nn.Sequential(
nn.Linear(input_dim, hidden_dim),
nn.ReLU(),
nn.Linear(hidden_dim, output_dim),
nn.LogSoftmax()
)
print("model created")
print("start learing....")
torch.save(self.train(self.model, 100), 'model.pth')
print("model learned and saved")
def train(self,model, n_iter):
criterion = nn.NLLLoss()
optimizer = optim.SGD(model.parameters(), lr=0.001)
for epoch in range(n_iter):
2021-06-01 18:53:56 +02:00
for image, label in self.train_dataloader:
2021-06-01 17:38:31 +02:00
optimizer.zero_grad()
output = model(image)
loss = criterion(output.unsqueeze(0), label.unsqueeze(0))
loss.backward()
optimizer.step()
print(f'epoch: {epoch:03}')
return model
def accuracy(self,expected, predicted):
return len([_ for e, p in zip(expected, predicted) if e == p])/len(expected)
def loadImages(self):
col_dir = 'D:\\fotyProjekt\\clowns'
images = []
labels = []
imgs = os.listdir(col_dir)
for i in imgs:
images.append(i)
print("Load clowns")
print(imgs)
for i in imgs:
labels.append(self.getLabelsForImages(DECOY))
col_dir = 'D:\\fotyProjekt\\chemical'
imgs = os.listdir(col_dir)
for i in imgs:
images.append(i)
print("Load chemical")
for i in imgs:
labels.append(self.getLabelsForImages(CHEMICAL_BOMB))
col_dir = 'D:\\fotyProjekt\\atomic'
imgs = os.listdir(col_dir)
for i in imgs:
images.append(i)
print("Load atomic")
for i in imgs:
labels.append(self.getLabelsForImages(ATOMIC_BOMB))
col_dir = 'D:\\fotyProjekt\\landmine'
imgs = os.listdir(col_dir)
for i in imgs:
images.append(i)
print("Load landmine")
for i in imgs:
labels.append(self.getLabelsForImages(LAND_MINE))
col_dir = 'D:\\fotyProjekt\\claymore'
imgs = os.listdir(col_dir)
for i in imgs:
images.append(i)
print("Load claymore")
for i in imgs:
labels.append(self.getLabelsForImages(CLAYMORE))
f = open('labels.csv', 'w')
while images:
img = images.pop()
lb = labels.pop()
f.write(os.path.basename(img) + ", " + lb.__str__() + "\n")
f.close()
def getLabelsForImages(self,imageType):
return self.d.get(imageType)
class CustomImageDataset(Dataset):
def __init__(self, annotations_file, img_dir, transform=None, target_transform=None):
self.img_labels = pd.read_csv(annotations_file)
self.img_dir = img_dir
self.transform = transform
self.target_transform = target_transform
def __len__(self):
return len(self.img_labels)
def __getitem__(self, idx):
img_path = os.path.join(self.img_dir, self.img_labels.iloc[idx, 0])
2021-06-01 18:53:56 +02:00
image = Image.open(img_path).convert("RGB")
2021-06-01 17:38:31 +02:00
label = self.img_labels.iloc[idx, 1]
if self.transform:
image = self.transform(image)
if self.target_transform:
label = self.target_transform(label)
pixels = asarray(image)
# convert from integers to floats
pixels = pixels.astype('float32')
# normalize to the range 0-1
pixels /= 255.0
return image, label
if __name__ == '__main__':
Neurons()