tau-2020-pytorch-tutorial/pytorch9.py

70 lines
1.8 KiB
Python
Raw Normal View History

2020-12-09 10:12:35 +01:00
#!/usr/bin/python3
import torch
import pandas as pd
from sklearn.model_selection import train_test_split
data = pd.read_csv('iris.data.multilabel', sep=',', header=None)
NAMES_DICT = {
'Iris-setosa': 0,
'Iris-versicolor': 1,
'Iris-virginica': 2}
data[5] = data[4].apply(lambda x: NAMES_DICT[x])
x = torch.tensor(data[[0,1,2,3]].values, dtype=torch.float)
y = torch.tensor(data[5], dtype=torch.long)
X_train, X_test, y_train, y_test = train_test_split(x, y, random_state=42)
class Network(torch.nn.Module):
def __init__(self):
super(Network, self).__init__()
self.fc = torch.nn.Linear(4, 3)
def forward(self, x):
x = self.fc(x)
x = torch.nn.functional.softmax(x)
return x
network = Network()
optimizer = torch.optim.SGD(network.parameters(), lr=0.002)
criterion = torch.nn.CrossEntropyLoss(reduction='sum')
samples_in_batch = 5
for epoch in range(3000):
network.train()
for i in range(0, len(X_train), samples_in_batch):
batch_x = X_train[i:i + samples_in_batch]
batch_y = y_train[i:i + samples_in_batch]
optimizer.zero_grad()
ypredicted = network(batch_x)
loss = criterion(ypredicted, batch_y)
loss.backward()
optimizer.step()
network.eval()
predicted_correct = 0
loss_sum = 0
for i in range(0, len(X_test), samples_in_batch):
batch_x = X_test[i:i + samples_in_batch]
batch_y = y_test[i:i + samples_in_batch]
optimizer.zero_grad()
ypredicted = network(batch_x)
y_most_probable_class = torch.max(ypredicted,1)[1]
loss = criterion(ypredicted, batch_y)
predicted_correct += sum(y_most_probable_class == batch_y).item()
accuracy = 100 * predicted_correct / len(y_test)
print('{:.3}'.format(loss.item()), "\t => ", accuracy, '% accuracy')