38 KiB
38 KiB
AITech — Uczenie maszynowe — laboratoria
10. Sieci neuronowe (PyTorch)
Przykład implementacji sieci neuronowej do rozpoznawania cyfr ze zbioru MNIST, według https://github.com/pytorch/examples/tree/master/mnist
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
class Net(nn.Module):
"""W PyTorchu tworzenie sieci neuronowej
polega na zdefiniowaniu klasy, która dziedziczy z nn.Module.
"""
def __init__(self):
super().__init__()
# Warstwy splotowe
self.conv1 = nn.Conv2d(1, 32, 3, 1)
self.conv2 = nn.Conv2d(32, 64, 3, 1)
# Warstwy dropout
self.dropout1 = nn.Dropout(0.25)
self.dropout2 = nn.Dropout(0.5)
# Warstwy liniowe
self.fc1 = nn.Linear(9216, 128)
self.fc2 = nn.Linear(128, 10)
def forward(self, x):
"""Definiujemy przechodzenie "do przodu" jako kolejne przekształcenia wejścia x"""
x = self.conv1(x)
x = F.relu(x)
x = self.conv2(x)
x = F.relu(x)
x = F.max_pool2d(x, 2)
x = self.dropout1(x)
x = torch.flatten(x, 1)
x = self.fc1(x)
x = F.relu(x)
x = self.dropout2(x)
x = self.fc2(x)
output = F.log_softmax(x, dim=1)
return output
def train(model, device, train_loader, optimizer, epoch, log_interval, dry_run):
"""Uczenie modelu"""
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device) # wrzucenie danych na kartę graficzną (jeśli dotyczy)
optimizer.zero_grad() # wyzerowanie gradientu
output = model(data) # przejście "do przodu"
loss = F.nll_loss(output, target) # obliczenie funkcji kosztu
loss.backward() # propagacja wsteczna
optimizer.step() # krok optymalizatora
if batch_idx % log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
if dry_run:
break
def test(model, device, test_loader):
"""Testowanie modelu"""
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device) # wrzucenie danych na kartę graficzną (jeśli dotyczy)
output = model(data) # przejście "do przodu"
test_loss += F.nll_loss(output, target, reduction='sum').item() # suma kosztów z każdego batcha
pred = output.argmax(dim=1, keepdim=True) # predykcja na podstawie maks. logarytmu prawdopodobieństwa
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset) # obliczenie kosztu na zbiorze testowym
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
def run(
batch_size=64,
test_batch_size=1000,
epochs=14,
lr=1.0,
gamma=0.7,
no_cuda=False,
dry_run=False,
seed=1,
log_interval=10,
save_model=False,
):
"""Main training function.
Arguments:
batch_size - wielkość batcha podczas uczenia (default: 64),
test_batch_size - wielkość batcha podczas testowania (default: 1000)
epochs - liczba epok uczenia (default: 14)
lr - współczynnik uczenia (learning rate) (default: 1.0)
gamma - współczynnik gamma (dla optymalizatora) (default: 0.7)
no_cuda - wyłącza uczenie na karcie graficznej (default: False)
dry_run - szybko ("na sucho") sprawdza pojedyncze przejście (default: False)
seed - ziarno generatora liczb pseudolosowych (default: 1)
log_interval - interwał logowania stanu uczenia (default: 10)
save_model - zapisuje bieżący model (default: False)
"""
use_cuda = no_cuda and torch.cuda.is_available()
torch.manual_seed(seed)
device = torch.device("cuda" if use_cuda else "cpu")
train_kwargs = {'batch_size': batch_size}
test_kwargs = {'batch_size': test_batch_size}
if use_cuda:
cuda_kwargs = {'num_workers': 1,
'pin_memory': True,
'shuffle': True}
train_kwargs.update(cuda_kwargs)
test_kwargs.update(cuda_kwargs)
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
dataset1 = datasets.MNIST('../data', train=True, download=True,
transform=transform)
dataset2 = datasets.MNIST('../data', train=False,
transform=transform)
train_loader = torch.utils.data.DataLoader(dataset1,**train_kwargs)
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
model = Net().to(device)
optimizer = optim.Adadelta(model.parameters(), lr=lr)
scheduler = StepLR(optimizer, step_size=1, gamma=gamma)
for epoch in range(1, epochs + 1):
train(model, device, train_loader, optimizer, epoch, log_interval, dry_run)
test(model, device, test_loader)
scheduler.step()
if save_model:
torch.save(model.state_dict(), "mnist_cnn.pt")
Uwaga: uruchomienie tego przykładu długo trwa. Żeby trwało krócej, można zmniejszyć liczbę epok.
run(epochs=5)
C:\Users\pawel\anaconda3\lib\site-packages\torch\autograd\__init__.py:130: UserWarning: CUDA initialization: The NVIDIA driver on your system is too old (found version 9020). Please update your GPU driver by downloading and installing a new version from the URL: http://www.nvidia.com/Download/index.aspx Alternatively, go to: https://pytorch.org to install a PyTorch version that has been compiled with your version of the CUDA driver. (Triggered internally at ..\c10\cuda\CUDAFunctions.cpp:100.) Variable._execution_engine.run_backward(
Train Epoch: 1 [0/60000 (0%)] Loss: 2.305400 Train Epoch: 1 [640/60000 (1%)] Loss: 1.359776 Train Epoch: 1 [1280/60000 (2%)] Loss: 0.842885 Train Epoch: 1 [1920/60000 (3%)] Loss: 0.587047 Train Epoch: 1 [2560/60000 (4%)] Loss: 0.368678 Train Epoch: 1 [3200/60000 (5%)] Loss: 0.468111 Train Epoch: 1 [3840/60000 (6%)] Loss: 0.264335 Train Epoch: 1 [4480/60000 (7%)] Loss: 0.288264 Train Epoch: 1 [5120/60000 (9%)] Loss: 0.579878 Train Epoch: 1 [5760/60000 (10%)] Loss: 0.225971 Train Epoch: 1 [6400/60000 (11%)] Loss: 0.235435 Train Epoch: 1 [7040/60000 (12%)] Loss: 0.334189 Train Epoch: 1 [7680/60000 (13%)] Loss: 0.205391 Train Epoch: 1 [8320/60000 (14%)] Loss: 0.224400 Train Epoch: 1 [8960/60000 (15%)] Loss: 0.265982 Train Epoch: 1 [9600/60000 (16%)] Loss: 0.110670 Train Epoch: 1 [10240/60000 (17%)] Loss: 0.266168 Train Epoch: 1 [10880/60000 (18%)] Loss: 0.086807 Train Epoch: 1 [11520/60000 (19%)] Loss: 0.417719 Train Epoch: 1 [12160/60000 (20%)] Loss: 0.276456 Train Epoch: 1 [12800/60000 (21%)] Loss: 0.242908 Train Epoch: 1 [13440/60000 (22%)] Loss: 0.221252 Train Epoch: 1 [14080/60000 (23%)] Loss: 0.130435 Train Epoch: 1 [14720/60000 (25%)] Loss: 0.371944 Train Epoch: 1 [15360/60000 (26%)] Loss: 0.143184 Train Epoch: 1 [16000/60000 (27%)] Loss: 0.132785 Train Epoch: 1 [16640/60000 (28%)] Loss: 0.167957 Train Epoch: 1 [17280/60000 (29%)] Loss: 0.075128 Train Epoch: 1 [17920/60000 (30%)] Loss: 0.200841 Train Epoch: 1 [18560/60000 (31%)] Loss: 0.176965 Train Epoch: 1 [19200/60000 (32%)] Loss: 0.277037 Train Epoch: 1 [19840/60000 (33%)] Loss: 0.068315 Train Epoch: 1 [20480/60000 (34%)] Loss: 0.035655 Train Epoch: 1 [21120/60000 (35%)] Loss: 0.225525 Train Epoch: 1 [21760/60000 (36%)] Loss: 0.012368 Train Epoch: 1 [22400/60000 (37%)] Loss: 0.077660 Train Epoch: 1 [23040/60000 (38%)] Loss: 0.235851 Train Epoch: 1 [23680/60000 (39%)] Loss: 0.140474 Train Epoch: 1 [24320/60000 (41%)] Loss: 0.014417 Train Epoch: 1 [24960/60000 (42%)] Loss: 0.090741 Train Epoch: 1 [25600/60000 (43%)] Loss: 0.058374 Train Epoch: 1 [26240/60000 (44%)] Loss: 0.073511 Train Epoch: 1 [26880/60000 (45%)] Loss: 0.284830 Train Epoch: 1 [27520/60000 (46%)] Loss: 0.242107 Train Epoch: 1 [28160/60000 (47%)] Loss: 0.106403 Train Epoch: 1 [28800/60000 (48%)] Loss: 0.126598 Train Epoch: 1 [29440/60000 (49%)] Loss: 0.048677 Train Epoch: 1 [30080/60000 (50%)] Loss: 0.170355 Train Epoch: 1 [30720/60000 (51%)] Loss: 0.048502 Train Epoch: 1 [31360/60000 (52%)] Loss: 0.110658 Train Epoch: 1 [32000/60000 (53%)] Loss: 0.209499 Train Epoch: 1 [32640/60000 (54%)] Loss: 0.129011 Train Epoch: 1 [33280/60000 (55%)] Loss: 0.054514 Train Epoch: 1 [33920/60000 (57%)] Loss: 0.022598 Train Epoch: 1 [34560/60000 (58%)] Loss: 0.013603 Train Epoch: 1 [35200/60000 (59%)] Loss: 0.234786 Train Epoch: 1 [35840/60000 (60%)] Loss: 0.159701 Train Epoch: 1 [36480/60000 (61%)] Loss: 0.046117 Train Epoch: 1 [37120/60000 (62%)] Loss: 0.116941 Train Epoch: 1 [37760/60000 (63%)] Loss: 0.135829 Train Epoch: 1 [38400/60000 (64%)] Loss: 0.148995 Train Epoch: 1 [39040/60000 (65%)] Loss: 0.065900 Train Epoch: 1 [39680/60000 (66%)] Loss: 0.025586 Train Epoch: 1 [40320/60000 (67%)] Loss: 0.063601 Train Epoch: 1 [40960/60000 (68%)] Loss: 0.102640 Train Epoch: 1 [41600/60000 (69%)] Loss: 0.105056 Train Epoch: 1 [42240/60000 (70%)] Loss: 0.086704 Train Epoch: 1 [42880/60000 (71%)] Loss: 0.107370 Train Epoch: 1 [43520/60000 (72%)] Loss: 0.253792 Train Epoch: 1 [44160/60000 (74%)] Loss: 0.062311 Train Epoch: 1 [44800/60000 (75%)] Loss: 0.162836 Train Epoch: 1 [45440/60000 (76%)] Loss: 0.199484 Train Epoch: 1 [46080/60000 (77%)] Loss: 0.153846 Train Epoch: 1 [46720/60000 (78%)] Loss: 0.180161 Train Epoch: 1 [47360/60000 (79%)] Loss: 0.136180 Train Epoch: 1 [48000/60000 (80%)] Loss: 0.115283 Train Epoch: 1 [48640/60000 (81%)] Loss: 0.027290 Train Epoch: 1 [49280/60000 (82%)] Loss: 0.042729 Train Epoch: 1 [49920/60000 (83%)] Loss: 0.075887 Train Epoch: 1 [50560/60000 (84%)] Loss: 0.063403 Train Epoch: 1 [51200/60000 (85%)] Loss: 0.313571 Train Epoch: 1 [51840/60000 (86%)] Loss: 0.013781 Train Epoch: 1 [52480/60000 (87%)] Loss: 0.033717 Train Epoch: 1 [53120/60000 (88%)] Loss: 0.182661 Train Epoch: 1 [53760/60000 (90%)] Loss: 0.039041 Train Epoch: 1 [54400/60000 (91%)] Loss: 0.099427 Train Epoch: 1 [55040/60000 (92%)] Loss: 0.016252 Train Epoch: 1 [55680/60000 (93%)] Loss: 0.077332 Train Epoch: 1 [56320/60000 (94%)] Loss: 0.057406 Train Epoch: 1 [56960/60000 (95%)] Loss: 0.107130 Train Epoch: 1 [57600/60000 (96%)] Loss: 0.126342 Train Epoch: 1 [58240/60000 (97%)] Loss: 0.031756 Train Epoch: 1 [58880/60000 (98%)] Loss: 0.009388 Train Epoch: 1 [59520/60000 (99%)] Loss: 0.001617 Test set: Average loss: 0.0452, Accuracy: 9848/10000 (98%) Train Epoch: 2 [0/60000 (0%)] Loss: 0.128514 Train Epoch: 2 [640/60000 (1%)] Loss: 0.056695 Train Epoch: 2 [1280/60000 (2%)] Loss: 0.034919 Train Epoch: 2 [1920/60000 (3%)] Loss: 0.125458 Train Epoch: 2 [2560/60000 (4%)] Loss: 0.052010 Train Epoch: 2 [3200/60000 (5%)] Loss: 0.043915 Train Epoch: 2 [3840/60000 (6%)] Loss: 0.015439 Train Epoch: 2 [4480/60000 (7%)] Loss: 0.063102 Train Epoch: 2 [5120/60000 (9%)] Loss: 0.121400 Train Epoch: 2 [5760/60000 (10%)] Loss: 0.114424 Train Epoch: 2 [6400/60000 (11%)] Loss: 0.212067 Train Epoch: 2 [7040/60000 (12%)] Loss: 0.195634 Train Epoch: 2 [7680/60000 (13%)] Loss: 0.075988 Train Epoch: 2 [8320/60000 (14%)] Loss: 0.032679 Train Epoch: 2 [8960/60000 (15%)] Loss: 0.111834 Train Epoch: 2 [9600/60000 (16%)] Loss: 0.027801 Train Epoch: 2 [10240/60000 (17%)] Loss: 0.073348 Train Epoch: 2 [10880/60000 (18%)] Loss: 0.033118 Train Epoch: 2 [11520/60000 (19%)] Loss: 0.172008 Train Epoch: 2 [12160/60000 (20%)] Loss: 0.057611 Train Epoch: 2 [12800/60000 (21%)] Loss: 0.064679 Train Epoch: 2 [13440/60000 (22%)] Loss: 0.006825 Train Epoch: 2 [14080/60000 (23%)] Loss: 0.019145 Train Epoch: 2 [14720/60000 (25%)] Loss: 0.094843 Train Epoch: 2 [15360/60000 (26%)] Loss: 0.047758 Train Epoch: 2 [16000/60000 (27%)] Loss: 0.179497 Train Epoch: 2 [16640/60000 (28%)] Loss: 0.076738 Train Epoch: 2 [17280/60000 (29%)] Loss: 0.006352 Train Epoch: 2 [17920/60000 (30%)] Loss: 0.051825 Train Epoch: 2 [18560/60000 (31%)] Loss: 0.110851 Train Epoch: 2 [19200/60000 (32%)] Loss: 0.065105 Train Epoch: 2 [19840/60000 (33%)] Loss: 0.135653 Train Epoch: 2 [20480/60000 (34%)] Loss: 0.021735 Train Epoch: 2 [21120/60000 (35%)] Loss: 0.071245 Train Epoch: 2 [21760/60000 (36%)] Loss: 0.003421 Train Epoch: 2 [22400/60000 (37%)] Loss: 0.014809 Train Epoch: 2 [23040/60000 (38%)] Loss: 0.053631 Train Epoch: 2 [23680/60000 (39%)] Loss: 0.082716 Train Epoch: 2 [24320/60000 (41%)] Loss: 0.001589 Train Epoch: 2 [24960/60000 (42%)] Loss: 0.006215 Train Epoch: 2 [25600/60000 (43%)] Loss: 0.042557 Train Epoch: 2 [26240/60000 (44%)] Loss: 0.014680 Train Epoch: 2 [26880/60000 (45%)] Loss: 0.124249 Train Epoch: 2 [27520/60000 (46%)] Loss: 0.029917 Train Epoch: 2 [28160/60000 (47%)] Loss: 0.100452 Train Epoch: 2 [28800/60000 (48%)] Loss: 0.009274 Train Epoch: 2 [29440/60000 (49%)] Loss: 0.076723 Train Epoch: 2 [30080/60000 (50%)] Loss: 0.036926 Train Epoch: 2 [30720/60000 (51%)] Loss: 0.097355 Train Epoch: 2 [31360/60000 (52%)] Loss: 0.113212 Train Epoch: 2 [32000/60000 (53%)] Loss: 0.126080 Train Epoch: 2 [32640/60000 (54%)] Loss: 0.116121 Train Epoch: 2 [33280/60000 (55%)] Loss: 0.053296 Train Epoch: 2 [33920/60000 (57%)] Loss: 0.004935 Train Epoch: 2 [34560/60000 (58%)] Loss: 0.018139 Train Epoch: 2 [35200/60000 (59%)] Loss: 0.083827 Train Epoch: 2 [35840/60000 (60%)] Loss: 0.064212 Train Epoch: 2 [36480/60000 (61%)] Loss: 0.042852 Train Epoch: 2 [37120/60000 (62%)] Loss: 0.053815 Train Epoch: 2 [37760/60000 (63%)] Loss: 0.064109 Train Epoch: 2 [38400/60000 (64%)] Loss: 0.082490 Train Epoch: 2 [39040/60000 (65%)] Loss: 0.001922 Train Epoch: 2 [39680/60000 (66%)] Loss: 0.021783 Train Epoch: 2 [40320/60000 (67%)] Loss: 0.041218 Train Epoch: 2 [40960/60000 (68%)] Loss: 0.037719 Train Epoch: 2 [41600/60000 (69%)] Loss: 0.040485 Train Epoch: 2 [42240/60000 (70%)] Loss: 0.025866 Train Epoch: 2 [42880/60000 (71%)] Loss: 0.079971 Train Epoch: 2 [43520/60000 (72%)] Loss: 0.051924 Train Epoch: 2 [44160/60000 (74%)] Loss: 0.003454 Train Epoch: 2 [44800/60000 (75%)] Loss: 0.059499 Train Epoch: 2 [45440/60000 (76%)] Loss: 0.109399 Train Epoch: 2 [46080/60000 (77%)] Loss: 0.078003 Train Epoch: 2 [46720/60000 (78%)] Loss: 0.111255 Train Epoch: 2 [47360/60000 (79%)] Loss: 0.061806 Train Epoch: 2 [48000/60000 (80%)] Loss: 0.039426 Train Epoch: 2 [48640/60000 (81%)] Loss: 0.035167 Train Epoch: 2 [49280/60000 (82%)] Loss: 0.027696 Train Epoch: 2 [49920/60000 (83%)] Loss: 0.021057 Train Epoch: 2 [50560/60000 (84%)] Loss: 0.040626 Train Epoch: 2 [51200/60000 (85%)] Loss: 0.150808 Train Epoch: 2 [51840/60000 (86%)] Loss: 0.026038 Train Epoch: 2 [52480/60000 (87%)] Loss: 0.014357 Train Epoch: 2 [53120/60000 (88%)] Loss: 0.030147 Train Epoch: 2 [53760/60000 (90%)] Loss: 0.085780 Train Epoch: 2 [54400/60000 (91%)] Loss: 0.028594 Train Epoch: 2 [55040/60000 (92%)] Loss: 0.037993 Train Epoch: 2 [55680/60000 (93%)] Loss: 0.070294 Train Epoch: 2 [56320/60000 (94%)] Loss: 0.035509 Train Epoch: 2 [56960/60000 (95%)] Loss: 0.022443 Train Epoch: 2 [57600/60000 (96%)] Loss: 0.034794 Train Epoch: 2 [58240/60000 (97%)] Loss: 0.017368 Train Epoch: 2 [58880/60000 (98%)] Loss: 0.016261 Train Epoch: 2 [59520/60000 (99%)] Loss: 0.009625 Test set: Average loss: 0.0345, Accuracy: 9876/10000 (99%) Train Epoch: 3 [0/60000 (0%)] Loss: 0.017473 Train Epoch: 3 [640/60000 (1%)] Loss: 0.018726 Train Epoch: 3 [1280/60000 (2%)] Loss: 0.012606 Train Epoch: 3 [1920/60000 (3%)] Loss: 0.078804 Train Epoch: 3 [2560/60000 (4%)] Loss: 0.044306 Train Epoch: 3 [3200/60000 (5%)] Loss: 0.054774 Train Epoch: 3 [3840/60000 (6%)] Loss: 0.028103 Train Epoch: 3 [4480/60000 (7%)] Loss: 0.017842 Train Epoch: 3 [5120/60000 (9%)] Loss: 0.051417 Train Epoch: 3 [5760/60000 (10%)] Loss: 0.021005 Train Epoch: 3 [6400/60000 (11%)] Loss: 0.079213 Train Epoch: 3 [7040/60000 (12%)] Loss: 0.249057 Train Epoch: 3 [7680/60000 (13%)] Loss: 0.021483 Train Epoch: 3 [8320/60000 (14%)] Loss: 0.049537 Train Epoch: 3 [8960/60000 (15%)] Loss: 0.064109 Train Epoch: 3 [9600/60000 (16%)] Loss: 0.121206 Train Epoch: 3 [10240/60000 (17%)] Loss: 0.272828 Train Epoch: 3 [10880/60000 (18%)] Loss: 0.011667 Train Epoch: 3 [11520/60000 (19%)] Loss: 0.074186 Train Epoch: 3 [12160/60000 (20%)] Loss: 0.020923 Train Epoch: 3 [12800/60000 (21%)] Loss: 0.071615 Train Epoch: 3 [13440/60000 (22%)] Loss: 0.032925 Train Epoch: 3 [14080/60000 (23%)] Loss: 0.020151 Train Epoch: 3 [14720/60000 (25%)] Loss: 0.137694 Train Epoch: 3 [15360/60000 (26%)] Loss: 0.014524 Train Epoch: 3 [16000/60000 (27%)] Loss: 0.018904 Train Epoch: 3 [16640/60000 (28%)] Loss: 0.115159 Train Epoch: 3 [17280/60000 (29%)] Loss: 0.001221 Train Epoch: 3 [17920/60000 (30%)] Loss: 0.039947 Train Epoch: 3 [18560/60000 (31%)] Loss: 0.027275 Train Epoch: 3 [19200/60000 (32%)] Loss: 0.115719 Train Epoch: 3 [19840/60000 (33%)] Loss: 0.056799 Train Epoch: 3 [20480/60000 (34%)] Loss: 0.003543 Train Epoch: 3 [21120/60000 (35%)] Loss: 0.093628 Train Epoch: 3 [21760/60000 (36%)] Loss: 0.041564 Train Epoch: 3 [22400/60000 (37%)] Loss: 0.001555 Train Epoch: 3 [23040/60000 (38%)] Loss: 0.047547 Train Epoch: 3 [23680/60000 (39%)] Loss: 0.028232 Train Epoch: 3 [24320/60000 (41%)] Loss: 0.002724 Train Epoch: 3 [24960/60000 (42%)] Loss: 0.014905 Train Epoch: 3 [25600/60000 (43%)] Loss: 0.077347 Train Epoch: 3 [26240/60000 (44%)] Loss: 0.055335 Train Epoch: 3 [26880/60000 (45%)] Loss: 0.034777 Train Epoch: 3 [27520/60000 (46%)] Loss: 0.137610 Train Epoch: 3 [28160/60000 (47%)] Loss: 0.087771 Train Epoch: 3 [28800/60000 (48%)] Loss: 0.031867 Train Epoch: 3 [29440/60000 (49%)] Loss: 0.024958 Train Epoch: 3 [30080/60000 (50%)] Loss: 0.047101 Train Epoch: 3 [30720/60000 (51%)] Loss: 0.094225 Train Epoch: 3 [31360/60000 (52%)] Loss: 0.076411 Train Epoch: 3 [32000/60000 (53%)] Loss: 0.029375 Train Epoch: 3 [32640/60000 (54%)] Loss: 0.003572 Train Epoch: 3 [33280/60000 (55%)] Loss: 0.081241 Train Epoch: 3 [33920/60000 (57%)] Loss: 0.001588 Train Epoch: 3 [34560/60000 (58%)] Loss: 0.002668 Train Epoch: 3 [35200/60000 (59%)] Loss: 0.061726 Train Epoch: 3 [35840/60000 (60%)] Loss: 0.061300 Train Epoch: 3 [36480/60000 (61%)] Loss: 0.012152 Train Epoch: 3 [37120/60000 (62%)] Loss: 0.042971 Train Epoch: 3 [37760/60000 (63%)] Loss: 0.053396 Train Epoch: 3 [38400/60000 (64%)] Loss: 0.072361 Train Epoch: 3 [39040/60000 (65%)] Loss: 0.001462 Train Epoch: 3 [39680/60000 (66%)] Loss: 0.027137 Train Epoch: 3 [40320/60000 (67%)] Loss: 0.054929 Train Epoch: 3 [40960/60000 (68%)] Loss: 0.052149 Train Epoch: 3 [41600/60000 (69%)] Loss: 0.042770 Train Epoch: 3 [42240/60000 (70%)] Loss: 0.022091 Train Epoch: 3 [42880/60000 (71%)] Loss: 0.040765 Train Epoch: 3 [43520/60000 (72%)] Loss: 0.065408 Train Epoch: 3 [44160/60000 (74%)] Loss: 0.002670 Train Epoch: 3 [44800/60000 (75%)] Loss: 0.020735 Train Epoch: 3 [45440/60000 (76%)] Loss: 0.035558 Train Epoch: 3 [46080/60000 (77%)] Loss: 0.086752 Train Epoch: 3 [46720/60000 (78%)] Loss: 0.063626 Train Epoch: 3 [47360/60000 (79%)] Loss: 0.066880 Train Epoch: 3 [48000/60000 (80%)] Loss: 0.028604 Train Epoch: 3 [48640/60000 (81%)] Loss: 0.012193 Train Epoch: 3 [49280/60000 (82%)] Loss: 0.002023 Train Epoch: 3 [49920/60000 (83%)] Loss: 0.005326 Train Epoch: 3 [50560/60000 (84%)] Loss: 0.028037 Train Epoch: 3 [51200/60000 (85%)] Loss: 0.041471 Train Epoch: 3 [51840/60000 (86%)] Loss: 0.034811 Train Epoch: 3 [52480/60000 (87%)] Loss: 0.005038 Train Epoch: 3 [53120/60000 (88%)] Loss: 0.037799 Train Epoch: 3 [53760/60000 (90%)] Loss: 0.159812 Train Epoch: 3 [54400/60000 (91%)] Loss: 0.021355 Train Epoch: 3 [55040/60000 (92%)] Loss: 0.006514 Train Epoch: 3 [55680/60000 (93%)] Loss: 0.058171 Train Epoch: 3 [56320/60000 (94%)] Loss: 0.011602 Train Epoch: 3 [56960/60000 (95%)] Loss: 0.008109 Train Epoch: 3 [57600/60000 (96%)] Loss: 0.068050 Train Epoch: 3 [58240/60000 (97%)] Loss: 0.010048 Train Epoch: 3 [58880/60000 (98%)] Loss: 0.004794 Train Epoch: 3 [59520/60000 (99%)] Loss: 0.000842 Test set: Average loss: 0.0319, Accuracy: 9889/10000 (99%) Train Epoch: 4 [0/60000 (0%)] Loss: 0.011954 Train Epoch: 4 [640/60000 (1%)] Loss: 0.011093 Train Epoch: 4 [1280/60000 (2%)] Loss: 0.022356 Train Epoch: 4 [1920/60000 (3%)] Loss: 0.066412 Train Epoch: 4 [2560/60000 (4%)] Loss: 0.008823 Train Epoch: 4 [3200/60000 (5%)] Loss: 0.006209 Train Epoch: 4 [3840/60000 (6%)] Loss: 0.016633 Train Epoch: 4 [4480/60000 (7%)] Loss: 0.046557 Train Epoch: 4 [5120/60000 (9%)] Loss: 0.236557 Train Epoch: 4 [5760/60000 (10%)] Loss: 0.016318 Train Epoch: 4 [6400/60000 (11%)] Loss: 0.095599 Train Epoch: 4 [7040/60000 (12%)] Loss: 0.109512 Train Epoch: 4 [7680/60000 (13%)] Loss: 0.025031 Train Epoch: 4 [8320/60000 (14%)] Loss: 0.022703 Train Epoch: 4 [8960/60000 (15%)] Loss: 0.072901 Train Epoch: 4 [9600/60000 (16%)] Loss: 0.027679 Train Epoch: 4 [10240/60000 (17%)] Loss: 0.100027 Train Epoch: 4 [10880/60000 (18%)] Loss: 0.022117 Train Epoch: 4 [11520/60000 (19%)] Loss: 0.058990 Train Epoch: 4 [12160/60000 (20%)] Loss: 0.022886 Train Epoch: 4 [12800/60000 (21%)] Loss: 0.014279 Train Epoch: 4 [13440/60000 (22%)] Loss: 0.009374 Train Epoch: 4 [14080/60000 (23%)] Loss: 0.004224 Train Epoch: 4 [14720/60000 (25%)] Loss: 0.128787 Train Epoch: 4 [15360/60000 (26%)] Loss: 0.006627 Train Epoch: 4 [16000/60000 (27%)] Loss: 0.045232 Train Epoch: 4 [16640/60000 (28%)] Loss: 0.126329 Train Epoch: 4 [17280/60000 (29%)] Loss: 0.002526 Train Epoch: 4 [17920/60000 (30%)] Loss: 0.062796 Train Epoch: 4 [18560/60000 (31%)] Loss: 0.006109 Train Epoch: 4 [19200/60000 (32%)] Loss: 0.032889 Train Epoch: 4 [19840/60000 (33%)] Loss: 0.053419 Train Epoch: 4 [20480/60000 (34%)] Loss: 0.003135 Train Epoch: 4 [21120/60000 (35%)] Loss: 0.087492 Train Epoch: 4 [21760/60000 (36%)] Loss: 0.005437 Train Epoch: 4 [22400/60000 (37%)] Loss: 0.001357 Train Epoch: 4 [23040/60000 (38%)] Loss: 0.199949 Train Epoch: 4 [23680/60000 (39%)] Loss: 0.018877 Train Epoch: 4 [24320/60000 (41%)] Loss: 0.016835 Train Epoch: 4 [24960/60000 (42%)] Loss: 0.007058 Train Epoch: 4 [25600/60000 (43%)] Loss: 0.036731 Train Epoch: 4 [26240/60000 (44%)] Loss: 0.013287 Train Epoch: 4 [26880/60000 (45%)] Loss: 0.090547 Train Epoch: 4 [27520/60000 (46%)] Loss: 0.068249 Train Epoch: 4 [28160/60000 (47%)] Loss: 0.065214 Train Epoch: 4 [28800/60000 (48%)] Loss: 0.005579 Train Epoch: 4 [29440/60000 (49%)] Loss: 0.010757 Train Epoch: 4 [30080/60000 (50%)] Loss: 0.013080 Train Epoch: 4 [30720/60000 (51%)] Loss: 0.004695 Train Epoch: 4 [31360/60000 (52%)] Loss: 0.009816 Train Epoch: 4 [32000/60000 (53%)] Loss: 0.097901 Train Epoch: 4 [32640/60000 (54%)] Loss: 0.008036 Train Epoch: 4 [33280/60000 (55%)] Loss: 0.025720 Train Epoch: 4 [33920/60000 (57%)] Loss: 0.007743 Train Epoch: 4 [34560/60000 (58%)] Loss: 0.010240 Train Epoch: 4 [35200/60000 (59%)] Loss: 0.040739 Train Epoch: 4 [35840/60000 (60%)] Loss: 0.046888 Train Epoch: 4 [36480/60000 (61%)] Loss: 0.002148 Train Epoch: 4 [37120/60000 (62%)] Loss: 0.018123 Train Epoch: 4 [37760/60000 (63%)] Loss: 0.138039 Train Epoch: 4 [38400/60000 (64%)] Loss: 0.092445 Train Epoch: 4 [39040/60000 (65%)] Loss: 0.004439 Train Epoch: 4 [39680/60000 (66%)] Loss: 0.059561 Train Epoch: 4 [40320/60000 (67%)] Loss: 0.016702 Train Epoch: 4 [40960/60000 (68%)] Loss: 0.048608 Train Epoch: 4 [41600/60000 (69%)] Loss: 0.043941 Train Epoch: 4 [42240/60000 (70%)] Loss: 0.028248 Train Epoch: 4 [42880/60000 (71%)] Loss: 0.004207 Train Epoch: 4 [43520/60000 (72%)] Loss: 0.050349 Train Epoch: 4 [44160/60000 (74%)] Loss: 0.004836 Train Epoch: 4 [44800/60000 (75%)] Loss: 0.039172 Train Epoch: 4 [45440/60000 (76%)] Loss: 0.060112 Train Epoch: 4 [46080/60000 (77%)] Loss: 0.038748 Train Epoch: 4 [46720/60000 (78%)] Loss: 0.027801 Train Epoch: 4 [47360/60000 (79%)] Loss: 0.043409 Train Epoch: 4 [48000/60000 (80%)] Loss: 0.023842 Train Epoch: 4 [48640/60000 (81%)] Loss: 0.043613 Train Epoch: 4 [49280/60000 (82%)] Loss: 0.005819 Train Epoch: 4 [49920/60000 (83%)] Loss: 0.013224 Train Epoch: 4 [50560/60000 (84%)] Loss: 0.008549 Train Epoch: 4 [51200/60000 (85%)] Loss: 0.115843 Train Epoch: 4 [51840/60000 (86%)] Loss: 0.012308 Train Epoch: 4 [52480/60000 (87%)] Loss: 0.024157 Train Epoch: 4 [53120/60000 (88%)] Loss: 0.003395 Train Epoch: 4 [53760/60000 (90%)] Loss: 0.084941 Train Epoch: 4 [54400/60000 (91%)] Loss: 0.057644 Train Epoch: 4 [55040/60000 (92%)] Loss: 0.002062 Train Epoch: 4 [55680/60000 (93%)] Loss: 0.038266 Train Epoch: 4 [56320/60000 (94%)] Loss: 0.006398 Train Epoch: 4 [56960/60000 (95%)] Loss: 0.007706 Train Epoch: 4 [57600/60000 (96%)] Loss: 0.027255 Train Epoch: 4 [58240/60000 (97%)] Loss: 0.044076 Train Epoch: 4 [58880/60000 (98%)] Loss: 0.000889 Train Epoch: 4 [59520/60000 (99%)] Loss: 0.001196 Test set: Average loss: 0.0311, Accuracy: 9886/10000 (99%) Train Epoch: 5 [0/60000 (0%)] Loss: 0.015992 Train Epoch: 5 [640/60000 (1%)] Loss: 0.012034 Train Epoch: 5 [1280/60000 (2%)] Loss: 0.012463 Train Epoch: 5 [1920/60000 (3%)] Loss: 0.053295 Train Epoch: 5 [2560/60000 (4%)] Loss: 0.013971 Train Epoch: 5 [3200/60000 (5%)] Loss: 0.008351 Train Epoch: 5 [3840/60000 (6%)] Loss: 0.000522 Train Epoch: 5 [4480/60000 (7%)] Loss: 0.056046 Train Epoch: 5 [5120/60000 (9%)] Loss: 0.226117 Train Epoch: 5 [5760/60000 (10%)] Loss: 0.024622 Train Epoch: 5 [6400/60000 (11%)] Loss: 0.114540 Train Epoch: 5 [7040/60000 (12%)] Loss: 0.164275 Train Epoch: 5 [7680/60000 (13%)] Loss: 0.015020 Train Epoch: 5 [8320/60000 (14%)] Loss: 0.009615 Train Epoch: 5 [8960/60000 (15%)] Loss: 0.060808 Train Epoch: 5 [9600/60000 (16%)] Loss: 0.021185 Train Epoch: 5 [10240/60000 (17%)] Loss: 0.071090 Train Epoch: 5 [10880/60000 (18%)] Loss: 0.004819 Train Epoch: 5 [11520/60000 (19%)] Loss: 0.044744 Train Epoch: 5 [12160/60000 (20%)] Loss: 0.036432 Train Epoch: 5 [12800/60000 (21%)] Loss: 0.007292 Train Epoch: 5 [13440/60000 (22%)] Loss: 0.005680 Train Epoch: 5 [14080/60000 (23%)] Loss: 0.003425 Train Epoch: 5 [14720/60000 (25%)] Loss: 0.055383 Train Epoch: 5 [15360/60000 (26%)] Loss: 0.007300 Train Epoch: 5 [16000/60000 (27%)] Loss: 0.034897 Train Epoch: 5 [16640/60000 (28%)] Loss: 0.126585 Train Epoch: 5 [17280/60000 (29%)] Loss: 0.001609 Train Epoch: 5 [17920/60000 (30%)] Loss: 0.011380 Train Epoch: 5 [18560/60000 (31%)] Loss: 0.031130 Train Epoch: 5 [19200/60000 (32%)] Loss: 0.030126 Train Epoch: 5 [19840/60000 (33%)] Loss: 0.111376 Train Epoch: 5 [20480/60000 (34%)] Loss: 0.005547 Train Epoch: 5 [21120/60000 (35%)] Loss: 0.123237 Train Epoch: 5 [21760/60000 (36%)] Loss: 0.023191 Train Epoch: 5 [22400/60000 (37%)] Loss: 0.001363 Train Epoch: 5 [23040/60000 (38%)] Loss: 0.057234 Train Epoch: 5 [23680/60000 (39%)] Loss: 0.015569 Train Epoch: 5 [24320/60000 (41%)] Loss: 0.000795 Train Epoch: 5 [24960/60000 (42%)] Loss: 0.000723 Train Epoch: 5 [25600/60000 (43%)] Loss: 0.014871 Train Epoch: 5 [26240/60000 (44%)] Loss: 0.007171 Train Epoch: 5 [26880/60000 (45%)] Loss: 0.117038 Train Epoch: 5 [27520/60000 (46%)] Loss: 0.111855 Train Epoch: 5 [28160/60000 (47%)] Loss: 0.018824 Train Epoch: 5 [28800/60000 (48%)] Loss: 0.012503 Train Epoch: 5 [29440/60000 (49%)] Loss: 0.056160 Train Epoch: 5 [30080/60000 (50%)] Loss: 0.043957 Train Epoch: 5 [30720/60000 (51%)] Loss: 0.001754 Train Epoch: 5 [31360/60000 (52%)] Loss: 0.091498 Train Epoch: 5 [32000/60000 (53%)] Loss: 0.018654 Train Epoch: 5 [32640/60000 (54%)] Loss: 0.023146 Train Epoch: 5 [33280/60000 (55%)] Loss: 0.036612 Train Epoch: 5 [33920/60000 (57%)] Loss: 0.002565 Train Epoch: 5 [34560/60000 (58%)] Loss: 0.003447 Train Epoch: 5 [35200/60000 (59%)] Loss: 0.110711 Train Epoch: 5 [35840/60000 (60%)] Loss: 0.031876 Train Epoch: 5 [36480/60000 (61%)] Loss: 0.009661 Train Epoch: 5 [37120/60000 (62%)] Loss: 0.053748 Train Epoch: 5 [37760/60000 (63%)] Loss: 0.079816 Train Epoch: 5 [38400/60000 (64%)] Loss: 0.052890 Train Epoch: 5 [39040/60000 (65%)] Loss: 0.001838 Train Epoch: 5 [39680/60000 (66%)] Loss: 0.032443 Train Epoch: 5 [40320/60000 (67%)] Loss: 0.016371 Train Epoch: 5 [40960/60000 (68%)] Loss: 0.032993 Train Epoch: 5 [41600/60000 (69%)] Loss: 0.009191 Train Epoch: 5 [42240/60000 (70%)] Loss: 0.012432 Train Epoch: 5 [42880/60000 (71%)] Loss: 0.021050 Train Epoch: 5 [43520/60000 (72%)] Loss: 0.014490 Train Epoch: 5 [44160/60000 (74%)] Loss: 0.003937 Train Epoch: 5 [44800/60000 (75%)] Loss: 0.023810 Train Epoch: 5 [45440/60000 (76%)] Loss: 0.024212 Train Epoch: 5 [46080/60000 (77%)] Loss: 0.032333 Train Epoch: 5 [46720/60000 (78%)] Loss: 0.081611 Train Epoch: 5 [47360/60000 (79%)] Loss: 0.055151 Train Epoch: 5 [48000/60000 (80%)] Loss: 0.046237 Train Epoch: 5 [48640/60000 (81%)] Loss: 0.007069 Train Epoch: 5 [49280/60000 (82%)] Loss: 0.004486 Train Epoch: 5 [49920/60000 (83%)] Loss: 0.021935 Train Epoch: 5 [50560/60000 (84%)] Loss: 0.009369 Train Epoch: 5 [51200/60000 (85%)] Loss: 0.133733 Train Epoch: 5 [51840/60000 (86%)] Loss: 0.004490 Train Epoch: 5 [52480/60000 (87%)] Loss: 0.004431 Train Epoch: 5 [53120/60000 (88%)] Loss: 0.022499 Train Epoch: 5 [53760/60000 (90%)] Loss: 0.111768 Train Epoch: 5 [54400/60000 (91%)] Loss: 0.021636 Train Epoch: 5 [55040/60000 (92%)] Loss: 0.002808 Train Epoch: 5 [55680/60000 (93%)] Loss: 0.007162 Train Epoch: 5 [56320/60000 (94%)] Loss: 0.012326 Train Epoch: 5 [56960/60000 (95%)] Loss: 0.002056 Train Epoch: 5 [57600/60000 (96%)] Loss: 0.003829 Train Epoch: 5 [58240/60000 (97%)] Loss: 0.013328 Train Epoch: 5 [58880/60000 (98%)] Loss: 0.000146 Train Epoch: 5 [59520/60000 (99%)] Loss: 0.000575 Test set: Average loss: 0.0299, Accuracy: 9903/10000 (99%)
Polecam również bibliotekę PyTorch-Lightning, dzięki któej kod PyTorcha staje się trochę bardziej "uporządkowany".
Tutaj artykuł o tym, jak stworzyć dataloader dla danych z własnego pliku CSV: https://androidkt.com/load-pandas-dataframe-using-dataset-and-dataloader-in-pytorch