projekt_uma/UMA_projekt.ipynb
Mikołaj Pokrywka 2ca9f74c9b Projekt uma
2022-06-19 16:46:10 +02:00

58 KiB
Raw Blame History

import time, gc

# Timing utilities
start_time = None

def start_timer():
    global start_time
    gc.collect()
    torch.cuda.empty_cache()
    torch.cuda.reset_max_memory_allocated()
    torch.cuda.synchronize()
    start_time = time.time()

def end_timer_and_print(local_msg):
    torch.cuda.synchronize()
    end_time = time.time()
    print("\n" + local_msg)
    print("Total execution time = {:.3f} sec".format(end_time - start_time))
    print("Max memory used by tensors = {} bytes".format(torch.cuda.max_memory_allocated()))
from tensorflow.python.client import device_lib
device_lib.list_local_devices()
[name: "/device:CPU:0"
 device_type: "CPU"
 memory_limit: 268435456
 locality {
 }
 incarnation: 7116988186229065702
 xla_global_id: -1, name: "/device:GPU:0"
 device_type: "GPU"
 memory_limit: 14465892352
 locality {
   bus_id: 1
   links {
   }
 }
 incarnation: 10048785647988876421
 physical_device_desc: "device: 0, name: Tesla T4, pci bus id: 0000:00:04.0, compute capability: 7.5"
 xla_global_id: 416903419]
import pandas as pd
import numpy as np
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import train_test_split
import torch
import scipy
# !unzip real-or-fake-fake-jobposting-prediction.zip
data = pd.read_csv('fake_job_postings.csv', engine='python')
data = data[["company_profile", "fraudulent"]]
data = data.sample(frac=1)
data = data.dropna()
data
company_profile fraudulent
16503 At Hayes-Corp, we create the fun stuff.  With ... 0
16706 Tribal Worldwide Athens is a digitally centric... 0
3364 About ECHOING GREEN:  Echoing Green unleashes ... 0
16856 Daily Secret is the fastest growing digital me... 0
1566 ding* is the worlds largest top-up provider. ... 0
... ... ...
7607 Established on the principles that full time e... 0
682 AGOGO creates a personalized audio channel by ... 0
2759 We are a family run business that has been in ... 0
5751 We have aggressive growth plans in place for t... 1
3629 Want to build a 21st century financial service... 0

14572 rows × 2 columns

data_train, data_test = train_test_split(data, test_size=2000, random_state=1)
data_dev, data_test = train_test_split(data_test, test_size=1000, random_state=1)
len(data_train), len(data_dev), len(data_test)
(12572, 1000, 1000)
x_train = data_train["company_profile"]
x_dev = data_dev["company_profile"]
x_test = data_test["company_profile"]

y_train = data_train["fraudulent"]
y_dev = data_dev["fraudulent"]
y_test = data_test["fraudulent"]

x_train = np.array(x_train)
x_dev = np.array(x_dev)
x_test = np.array(x_test)

y_train = np.array(y_train)
y_dev = np.array(y_dev)
y_test = np.array(y_test)


y_train_np = np.array(y_train)
y_dev_np = np.array(y_dev)
y_test_np = np.array(y_test)
vectorizer = TfidfVectorizer()
import copy
x_train = vectorizer.fit_transform(x_train)
x_dev = vectorizer.transform(x_dev)
x_test = vectorizer.transform(x_test)

x_train_np = x_train.copy()
x_dev_np = x_dev.copy()
x_test_np = x_test.copy()
device = 'cuda'
x_train = torch.tensor(scipy.sparse.csr_matrix.todense(x_train), device=device).float()
x_dev = torch.tensor(scipy.sparse.csr_matrix.todense(x_dev), device=device).float()
x_test = torch.tensor(scipy.sparse.csr_matrix.todense(x_test), device=device).float()

y_train = torch.tensor(y_train, device=device)
y_dev = torch.tensor(y_dev, device=device)
y_test = torch.tensor(y_test, device=device)
from sklearn.linear_model import LogisticRegression
start_timer()
reg = LogisticRegression().fit(x_train_np, y_train_np)
end_timer_and_print("Logistic regression: ")
/usr/local/lib/python3.7/dist-packages/torch/cuda/memory.py:274: FutureWarning: torch.cuda.reset_max_memory_allocated now calls torch.cuda.reset_peak_memory_stats, which resets /all/ peak memory stats.
  FutureWarning)
Logistic regression: 
Total execution time = 0.365 sec
Max memory used by tensors = 2335263744 bytes
from sklearn.metrics import f1_score
from sklearn.metrics import accuracy_score

y_pred_np = reg.predict(x_test_np)
print('F-score: ', f1_score(y_test_np, y_pred_np, average='macro'))

print('Accuracy: ', accuracy_score(y_test_np, y_pred_np))
F-score:  0.8685964220682922
Accuracy:  0.993
device="cuda"
def prepare_batches(X, Y, batch_size):
  data_X = []
  data_Y = []
  for i in range(0, len(X)-1, batch_size):
    data_X.append(X[i:i+batch_size])
    data_Y.append(Y[i:i+batch_size].reshape(-1,1))
  data_X = data_X[0:-1]
  data_Y = data_Y[0:-1]
  return data_X, data_Y
size = 512
epochs = 150

from torch import nn
from torch import optim
model = nn.Sequential(
        nn.Linear(x_train.shape[1], size),
        nn.ReLU(),
        # nn.Linear(64, data_train["fraudulent"].nunique()),

        nn.Linear(size, size),
        nn.ReLU(),
        nn.Linear(size, size),
        nn.ReLU(),

        nn.Linear(size, size),
        nn.ReLU(),
        nn.Linear(size, size),
        nn.ReLU(),

        nn.Linear(size, size),
        nn.ReLU(),
        nn.Linear(size, size),
        nn.ReLU(),

        nn.Linear(size, size),
        nn.ReLU(),
        nn.Linear(size, data_train["fraudulent"].nunique()),
        
        nn.LogSoftmax(dim=1))
model.cuda()
# Define the loss
criterion = nn.NLLLoss()  # Forward pass, log
logps = model(x_train)  # Calculate the loss with the logits and the labels
loss = criterion(logps, y_train)
loss.backward()  # Optimizers need parameters to optimize and a learning rate
optimizer = optim.Adam(model.parameters(), lr=0.002)

train_losses = []
test_losses = []
test_accuracies = []
start_timer()
for e in range(epochs):
    optimizer.zero_grad()

    output = model.forward(x_train)
    loss = criterion(output, y_train)
    loss.backward()
    train_loss = loss.item()
    train_losses.append(train_loss)

    optimizer.step()

    # Turn off gradients for validation, saves memory and computations
    with torch.no_grad():
        model.eval()
        log_ps = model(x_dev)
        test_loss = criterion(log_ps, y_dev)
        test_losses.append(test_loss)

        ps = torch.exp(log_ps)
        top_p, top_class = ps.topk(1, dim=1)
        equals = top_class == y_dev.view(*top_class.shape)
        test_accuracy = torch.mean(equals.float())
        test_accuracies.append(test_accuracy)

    model.train()

    print(f"Epoch: {e + 1}/{epochs}.. ",
          f"Training Loss: {train_loss:.3f}.. ",
          f"Test Loss: {test_loss:.3f}.. ",
          f"Test Accuracy: {test_accuracy:.3f}")
end_timer_and_print("Mixed precision:")
/usr/local/lib/python3.7/dist-packages/torch/cuda/memory.py:274: FutureWarning: torch.cuda.reset_max_memory_allocated now calls torch.cuda.reset_peak_memory_stats, which resets /all/ peak memory stats.
  FutureWarning)
Epoch: 1/150..  Training Loss: 0.666..  Test Loss: 0.580..  Test Accuracy: 0.983
Epoch: 2/150..  Training Loss: 0.581..  Test Loss: 0.454..  Test Accuracy: 0.983
Epoch: 3/150..  Training Loss: 0.455..  Test Loss: 0.191..  Test Accuracy: 0.983
Epoch: 4/150..  Training Loss: 0.195..  Test Loss: 0.103..  Test Accuracy: 0.983
Epoch: 5/150..  Training Loss: 0.115..  Test Loss: 0.177..  Test Accuracy: 0.983
Epoch: 6/150..  Training Loss: 0.193..  Test Loss: 0.166..  Test Accuracy: 0.983
Epoch: 7/150..  Training Loss: 0.178..  Test Loss: 0.122..  Test Accuracy: 0.983
Epoch: 8/150..  Training Loss: 0.131..  Test Loss: 0.085..  Test Accuracy: 0.983
Epoch: 9/150..  Training Loss: 0.093..  Test Loss: 0.072..  Test Accuracy: 0.983
Epoch: 10/150..  Training Loss: 0.079..  Test Loss: 0.091..  Test Accuracy: 0.983
Epoch: 11/150..  Training Loss: 0.096..  Test Loss: 0.098..  Test Accuracy: 0.983
Epoch: 12/150..  Training Loss: 0.103..  Test Loss: 0.081..  Test Accuracy: 0.983
Epoch: 13/150..  Training Loss: 0.086..  Test Loss: 0.063..  Test Accuracy: 0.983
Epoch: 14/150..  Training Loss: 0.067..  Test Loss: 0.059..  Test Accuracy: 0.983
Epoch: 15/150..  Training Loss: 0.062..  Test Loss: 0.063..  Test Accuracy: 0.983
Epoch: 16/150..  Training Loss: 0.062..  Test Loss: 0.067..  Test Accuracy: 0.983
Epoch: 17/150..  Training Loss: 0.061..  Test Loss: 0.068..  Test Accuracy: 0.983
Epoch: 18/150..  Training Loss: 0.058..  Test Loss: 0.067..  Test Accuracy: 0.983
Epoch: 19/150..  Training Loss: 0.053..  Test Loss: 0.064..  Test Accuracy: 0.983
Epoch: 20/150..  Training Loss: 0.047..  Test Loss: 0.061..  Test Accuracy: 0.983
Epoch: 21/150..  Training Loss: 0.041..  Test Loss: 0.057..  Test Accuracy: 0.983
Epoch: 22/150..  Training Loss: 0.037..  Test Loss: 0.054..  Test Accuracy: 0.983
Epoch: 23/150..  Training Loss: 0.033..  Test Loss: 0.051..  Test Accuracy: 0.983
Epoch: 24/150..  Training Loss: 0.030..  Test Loss: 0.048..  Test Accuracy: 0.983
Epoch: 25/150..  Training Loss: 0.027..  Test Loss: 0.045..  Test Accuracy: 0.983
Epoch: 26/150..  Training Loss: 0.025..  Test Loss: 0.044..  Test Accuracy: 0.983
Epoch: 27/150..  Training Loss: 0.023..  Test Loss: 0.042..  Test Accuracy: 0.983
Epoch: 28/150..  Training Loss: 0.021..  Test Loss: 0.041..  Test Accuracy: 0.983
Epoch: 29/150..  Training Loss: 0.020..  Test Loss: 0.042..  Test Accuracy: 0.983
Epoch: 30/150..  Training Loss: 0.019..  Test Loss: 0.043..  Test Accuracy: 0.983
Epoch: 31/150..  Training Loss: 0.017..  Test Loss: 0.044..  Test Accuracy: 0.983
Epoch: 32/150..  Training Loss: 0.016..  Test Loss: 0.047..  Test Accuracy: 0.983
Epoch: 33/150..  Training Loss: 0.015..  Test Loss: 0.050..  Test Accuracy: 0.993
Epoch: 34/150..  Training Loss: 0.013..  Test Loss: 0.053..  Test Accuracy: 0.997
Epoch: 35/150..  Training Loss: 0.012..  Test Loss: 0.056..  Test Accuracy: 0.997
Epoch: 36/150..  Training Loss: 0.008..  Test Loss: 0.058..  Test Accuracy: 0.997
Epoch: 37/150..  Training Loss: 0.003..  Test Loss: 0.062..  Test Accuracy: 0.996
Epoch: 38/150..  Training Loss: 0.000..  Test Loss: 0.069..  Test Accuracy: 0.996
Epoch: 39/150..  Training Loss: 0.000..  Test Loss: 0.086..  Test Accuracy: 0.995
Epoch: 40/150..  Training Loss: 0.001..  Test Loss: 0.104..  Test Accuracy: 0.995
Epoch: 41/150..  Training Loss: 0.001..  Test Loss: 0.122..  Test Accuracy: 0.995
Epoch: 42/150..  Training Loss: 0.001..  Test Loss: 0.138..  Test Accuracy: 0.996
Epoch: 43/150..  Training Loss: 0.002..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 44/150..  Training Loss: 0.002..  Test Loss: 0.169..  Test Accuracy: 0.996
Epoch: 45/150..  Training Loss: 0.001..  Test Loss: 0.181..  Test Accuracy: 0.996
Epoch: 46/150..  Training Loss: 0.000..  Test Loss: 0.192..  Test Accuracy: 0.997
Epoch: 47/150..  Training Loss: 0.000..  Test Loss: 0.214..  Test Accuracy: 0.997
Epoch: 48/150..  Training Loss: 0.000..  Test Loss: 0.236..  Test Accuracy: 0.997
Epoch: 49/150..  Training Loss: 0.002..  Test Loss: 0.182..  Test Accuracy: 0.997
Epoch: 50/150..  Training Loss: 0.000..  Test Loss: 0.129..  Test Accuracy: 0.997
Epoch: 51/150..  Training Loss: 0.000..  Test Loss: 0.101..  Test Accuracy: 0.996
Epoch: 52/150..  Training Loss: 0.000..  Test Loss: 0.083..  Test Accuracy: 0.996
Epoch: 53/150..  Training Loss: 0.000..  Test Loss: 0.077..  Test Accuracy: 0.995
Epoch: 54/150..  Training Loss: 0.000..  Test Loss: 0.072..  Test Accuracy: 0.995
Epoch: 55/150..  Training Loss: 0.000..  Test Loss: 0.070..  Test Accuracy: 0.995
Epoch: 56/150..  Training Loss: 0.001..  Test Loss: 0.077..  Test Accuracy: 0.995
Epoch: 57/150..  Training Loss: 0.001..  Test Loss: 0.080..  Test Accuracy: 0.995
Epoch: 58/150..  Training Loss: 0.000..  Test Loss: 0.080..  Test Accuracy: 0.995
Epoch: 59/150..  Training Loss: 0.000..  Test Loss: 0.079..  Test Accuracy: 0.995
Epoch: 60/150..  Training Loss: 0.000..  Test Loss: 0.078..  Test Accuracy: 0.995
Epoch: 61/150..  Training Loss: 0.000..  Test Loss: 0.078..  Test Accuracy: 0.995
Epoch: 62/150..  Training Loss: 0.000..  Test Loss: 0.079..  Test Accuracy: 0.995
Epoch: 63/150..  Training Loss: 0.000..  Test Loss: 0.081..  Test Accuracy: 0.995
Epoch: 64/150..  Training Loss: 0.000..  Test Loss: 0.084..  Test Accuracy: 0.995
Epoch: 65/150..  Training Loss: 0.000..  Test Loss: 0.089..  Test Accuracy: 0.995
Epoch: 66/150..  Training Loss: 0.000..  Test Loss: 0.095..  Test Accuracy: 0.995
Epoch: 67/150..  Training Loss: 0.000..  Test Loss: 0.101..  Test Accuracy: 0.995
Epoch: 68/150..  Training Loss: 0.000..  Test Loss: 0.107..  Test Accuracy: 0.995
Epoch: 69/150..  Training Loss: 0.000..  Test Loss: 0.112..  Test Accuracy: 0.995
Epoch: 70/150..  Training Loss: 0.000..  Test Loss: 0.116..  Test Accuracy: 0.995
Epoch: 71/150..  Training Loss: 0.000..  Test Loss: 0.120..  Test Accuracy: 0.995
Epoch: 72/150..  Training Loss: 0.000..  Test Loss: 0.124..  Test Accuracy: 0.995
Epoch: 73/150..  Training Loss: 0.000..  Test Loss: 0.127..  Test Accuracy: 0.995
Epoch: 74/150..  Training Loss: 0.000..  Test Loss: 0.129..  Test Accuracy: 0.995
Epoch: 75/150..  Training Loss: 0.000..  Test Loss: 0.132..  Test Accuracy: 0.995
Epoch: 76/150..  Training Loss: 0.000..  Test Loss: 0.134..  Test Accuracy: 0.996
Epoch: 77/150..  Training Loss: 0.000..  Test Loss: 0.136..  Test Accuracy: 0.996
Epoch: 78/150..  Training Loss: 0.000..  Test Loss: 0.138..  Test Accuracy: 0.996
Epoch: 79/150..  Training Loss: 0.000..  Test Loss: 0.139..  Test Accuracy: 0.996
Epoch: 80/150..  Training Loss: 0.000..  Test Loss: 0.141..  Test Accuracy: 0.996
Epoch: 81/150..  Training Loss: 0.000..  Test Loss: 0.142..  Test Accuracy: 0.996
Epoch: 82/150..  Training Loss: 0.000..  Test Loss: 0.144..  Test Accuracy: 0.996
Epoch: 83/150..  Training Loss: 0.000..  Test Loss: 0.145..  Test Accuracy: 0.996
Epoch: 84/150..  Training Loss: 0.000..  Test Loss: 0.146..  Test Accuracy: 0.996
Epoch: 85/150..  Training Loss: 0.000..  Test Loss: 0.147..  Test Accuracy: 0.996
Epoch: 86/150..  Training Loss: 0.000..  Test Loss: 0.148..  Test Accuracy: 0.996
Epoch: 87/150..  Training Loss: 0.000..  Test Loss: 0.148..  Test Accuracy: 0.996
Epoch: 88/150..  Training Loss: 0.000..  Test Loss: 0.149..  Test Accuracy: 0.996
Epoch: 89/150..  Training Loss: 0.000..  Test Loss: 0.150..  Test Accuracy: 0.996
Epoch: 90/150..  Training Loss: 0.000..  Test Loss: 0.150..  Test Accuracy: 0.996
Epoch: 91/150..  Training Loss: 0.000..  Test Loss: 0.151..  Test Accuracy: 0.996
Epoch: 92/150..  Training Loss: 0.000..  Test Loss: 0.151..  Test Accuracy: 0.996
Epoch: 93/150..  Training Loss: 0.000..  Test Loss: 0.152..  Test Accuracy: 0.996
Epoch: 94/150..  Training Loss: 0.000..  Test Loss: 0.152..  Test Accuracy: 0.996
Epoch: 95/150..  Training Loss: 0.000..  Test Loss: 0.152..  Test Accuracy: 0.996
Epoch: 96/150..  Training Loss: 0.000..  Test Loss: 0.153..  Test Accuracy: 0.996
Epoch: 97/150..  Training Loss: 0.000..  Test Loss: 0.153..  Test Accuracy: 0.996
Epoch: 98/150..  Training Loss: 0.000..  Test Loss: 0.153..  Test Accuracy: 0.996
Epoch: 99/150..  Training Loss: 0.000..  Test Loss: 0.153..  Test Accuracy: 0.996
Epoch: 100/150..  Training Loss: 0.000..  Test Loss: 0.153..  Test Accuracy: 0.996
Epoch: 101/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 102/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 103/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 104/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 105/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 106/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 107/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 108/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 109/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 110/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 111/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 112/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 113/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 114/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 115/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 116/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 117/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 118/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 119/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 120/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 121/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 122/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 123/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 124/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 125/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 126/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 127/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 128/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 129/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 130/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 131/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 132/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 133/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 134/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 135/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 136/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 137/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 138/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 139/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 140/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 141/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 142/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 143/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 144/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 145/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 146/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 147/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 148/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 149/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996
Epoch: 150/150..  Training Loss: 0.000..  Test Loss: 0.155..  Test Accuracy: 0.996

Mixed precision:
Total execution time = 21.202 sec
Max memory used by tensors = 2485789184 bytes
# Default model
model.eval()
predictions = []
output = model(x_test)
ps = torch.exp(output)
top_p, top_class = ps.topk(1, dim=1)
predictions = np.array(top_class.cpu().detach())
y_pred = []
for d in predictions:
  y_pred.append(d)
y_true = []
for d in y_test:
  y_true.append(int(d))
y_true
print('F-score: ', f1_score(y_true, y_pred, average='macro'))

print('Accuracy: ', accuracy_score(y_true, y_pred))
F-score:  0.9845942906441127
Accuracy:  0.999
# Mixed precision model
use_amp = True


model = nn.Sequential(
        nn.Linear(x_train.shape[1], size),
        nn.ReLU(),
        # nn.Linear(64, data_train["fraudulent"].nunique()),

        nn.Linear(size, size),
        nn.ReLU(),
        nn.Linear(size, size),
        nn.ReLU(),

        nn.Linear(size, size),
        nn.ReLU(),
        nn.Linear(size, size),
        nn.ReLU(),

        nn.Linear(size, size),
        nn.ReLU(),
        nn.Linear(size, size),
        nn.ReLU(),

        nn.Linear(size, size),
        nn.ReLU(),
        nn.Linear(size, data_train["fraudulent"].nunique()),
        
        nn.LogSoftmax(dim=1))
model.cuda()
# Define the loss
criterion = nn.NLLLoss()  # Forward pass, log
logps = model(x_train)  # Calculate the loss with the logits and the labels
loss = criterion(logps, y_train)
loss.backward()  # Optimizers need parameters to optimize and a learning rate
optimizer = optim.Adam(model.parameters(), lr=0.002)

train_losses = []
test_losses = []
test_accuracies = []
scaler = torch.cuda.amp.GradScaler(enabled=use_amp)
start_timer()
for e in range(epochs):
    optimizer.zero_grad()
    with torch.cuda.amp.autocast(enabled=use_amp):
      output = model.forward(x_train)
      loss = criterion(output, y_train)
    scaler.scale(loss).backward()
    train_loss = loss.item()
    train_losses.append(train_loss)
    scaler.step(optimizer)
    scaler.update()



    # Turn off gradients for validation, saves memory and computations
    with torch.no_grad():
        model.eval()
        log_ps = model(x_dev)
        test_loss = criterion(log_ps, y_dev)
        test_losses.append(test_loss)

        ps = torch.exp(log_ps)
        top_p, top_class = ps.topk(1, dim=1)
        equals = top_class == y_dev.view(*top_class.shape)
        test_accuracy = torch.mean(equals.float())
        test_accuracies.append(test_accuracy)

    model.train()

    print(f"Epoch: {e + 1}/{epochs}.. ",
          f"Training Loss: {train_loss:.3f}.. ",
          f"Test Loss: {test_loss:.3f}.. ",
          f"Test Accuracy: {test_accuracy:.3f}")
end_timer_and_print("Mixed precision:")
/usr/local/lib/python3.7/dist-packages/torch/cuda/memory.py:274: FutureWarning: torch.cuda.reset_max_memory_allocated now calls torch.cuda.reset_peak_memory_stats, which resets /all/ peak memory stats.
  FutureWarning)
Epoch: 1/150..  Training Loss: 0.729..  Test Loss: 0.643..  Test Accuracy: 0.983
Epoch: 2/150..  Training Loss: 0.644..  Test Loss: 0.518..  Test Accuracy: 0.983
Epoch: 3/150..  Training Loss: 0.519..  Test Loss: 0.245..  Test Accuracy: 0.983
Epoch: 4/150..  Training Loss: 0.249..  Test Loss: 0.087..  Test Accuracy: 0.983
Epoch: 5/150..  Training Loss: 0.098..  Test Loss: 0.171..  Test Accuracy: 0.983
Epoch: 6/150..  Training Loss: 0.187..  Test Loss: 0.178..  Test Accuracy: 0.983
Epoch: 7/150..  Training Loss: 0.191..  Test Loss: 0.135..  Test Accuracy: 0.983
Epoch: 8/150..  Training Loss: 0.145..  Test Loss: 0.093..  Test Accuracy: 0.983
Epoch: 9/150..  Training Loss: 0.101..  Test Loss: 0.070..  Test Accuracy: 0.983
Epoch: 10/150..  Training Loss: 0.077..  Test Loss: 0.088..  Test Accuracy: 0.983
Epoch: 11/150..  Training Loss: 0.093..  Test Loss: 0.100..  Test Accuracy: 0.983
Epoch: 12/150..  Training Loss: 0.104..  Test Loss: 0.080..  Test Accuracy: 0.983
Epoch: 13/150..  Training Loss: 0.085..  Test Loss: 0.061..  Test Accuracy: 0.983
Epoch: 14/150..  Training Loss: 0.065..  Test Loss: 0.059..  Test Accuracy: 0.983
Epoch: 15/150..  Training Loss: 0.061..  Test Loss: 0.063..  Test Accuracy: 0.983
Epoch: 16/150..  Training Loss: 0.062..  Test Loss: 0.066..  Test Accuracy: 0.983
Epoch: 17/150..  Training Loss: 0.060..  Test Loss: 0.066..  Test Accuracy: 0.983
Epoch: 18/150..  Training Loss: 0.056..  Test Loss: 0.064..  Test Accuracy: 0.983
Epoch: 19/150..  Training Loss: 0.051..  Test Loss: 0.060..  Test Accuracy: 0.983
Epoch: 20/150..  Training Loss: 0.044..  Test Loss: 0.057..  Test Accuracy: 0.983
Epoch: 21/150..  Training Loss: 0.039..  Test Loss: 0.053..  Test Accuracy: 0.983
Epoch: 22/150..  Training Loss: 0.034..  Test Loss: 0.050..  Test Accuracy: 0.983
Epoch: 23/150..  Training Loss: 0.031..  Test Loss: 0.047..  Test Accuracy: 0.983
Epoch: 24/150..  Training Loss: 0.027..  Test Loss: 0.045..  Test Accuracy: 0.983
Epoch: 25/150..  Training Loss: 0.025..  Test Loss: 0.043..  Test Accuracy: 0.983
Epoch: 26/150..  Training Loss: 0.022..  Test Loss: 0.041..  Test Accuracy: 0.983
Epoch: 27/150..  Training Loss: 0.020..  Test Loss: 0.040..  Test Accuracy: 0.983
Epoch: 28/150..  Training Loss: 0.019..  Test Loss: 0.040..  Test Accuracy: 0.983
Epoch: 29/150..  Training Loss: 0.017..  Test Loss: 0.040..  Test Accuracy: 0.983
Epoch: 30/150..  Training Loss: 0.016..  Test Loss: 0.041..  Test Accuracy: 0.983
Epoch: 31/150..  Training Loss: 0.015..  Test Loss: 0.043..  Test Accuracy: 0.994
Epoch: 32/150..  Training Loss: 0.013..  Test Loss: 0.045..  Test Accuracy: 0.996
Epoch: 33/150..  Training Loss: 0.012..  Test Loss: 0.047..  Test Accuracy: 0.996
Epoch: 34/150..  Training Loss: 0.009..  Test Loss: 0.049..  Test Accuracy: 0.996
Epoch: 35/150..  Training Loss: 0.005..  Test Loss: 0.054..  Test Accuracy: 0.996
Epoch: 36/150..  Training Loss: 0.001..  Test Loss: 0.064..  Test Accuracy: 0.996
Epoch: 37/150..  Training Loss: 0.000..  Test Loss: 0.077..  Test Accuracy: 0.996
Epoch: 38/150..  Training Loss: 0.001..  Test Loss: 0.094..  Test Accuracy: 0.995
Epoch: 39/150..  Training Loss: 0.001..  Test Loss: 0.113..  Test Accuracy: 0.995
Epoch: 40/150..  Training Loss: 0.001..  Test Loss: 0.131..  Test Accuracy: 0.995
Epoch: 41/150..  Training Loss: 0.002..  Test Loss: 0.144..  Test Accuracy: 0.996
Epoch: 42/150..  Training Loss: 0.002..  Test Loss: 0.158..  Test Accuracy: 0.996
Epoch: 43/150..  Training Loss: 0.001..  Test Loss: 0.170..  Test Accuracy: 0.996
Epoch: 44/150..  Training Loss: 0.001..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 45/150..  Training Loss: 0.000..  Test Loss: 0.195..  Test Accuracy: 0.997
Epoch: 46/150..  Training Loss: 0.000..  Test Loss: 0.216..  Test Accuracy: 0.997
Epoch: 47/150..  Training Loss: 0.000..  Test Loss: 0.237..  Test Accuracy: 0.997
Epoch: 48/150..  Training Loss: 0.002..  Test Loss: 0.174..  Test Accuracy: 0.997
Epoch: 49/150..  Training Loss: 0.000..  Test Loss: 0.126..  Test Accuracy: 0.997
Epoch: 50/150..  Training Loss: 0.000..  Test Loss: 0.090..  Test Accuracy: 0.997
Epoch: 51/150..  Training Loss: 0.000..  Test Loss: 0.062..  Test Accuracy: 0.997
Epoch: 52/150..  Training Loss: 0.000..  Test Loss: 0.045..  Test Accuracy: 0.996
Epoch: 53/150..  Training Loss: 0.000..  Test Loss: 0.035..  Test Accuracy: 0.996
Epoch: 54/150..  Training Loss: 0.000..  Test Loss: 0.031..  Test Accuracy: 0.996
Epoch: 55/150..  Training Loss: 0.000..  Test Loss: 0.042..  Test Accuracy: 0.996
Epoch: 56/150..  Training Loss: 0.000..  Test Loss: 0.053..  Test Accuracy: 0.996
Epoch: 57/150..  Training Loss: 0.000..  Test Loss: 0.063..  Test Accuracy: 0.996
Epoch: 58/150..  Training Loss: 0.000..  Test Loss: 0.072..  Test Accuracy: 0.996
Epoch: 59/150..  Training Loss: 0.000..  Test Loss: 0.081..  Test Accuracy: 0.996
Epoch: 60/150..  Training Loss: 0.000..  Test Loss: 0.089..  Test Accuracy: 0.996
Epoch: 61/150..  Training Loss: 0.000..  Test Loss: 0.097..  Test Accuracy: 0.996
Epoch: 62/150..  Training Loss: 0.000..  Test Loss: 0.104..  Test Accuracy: 0.996
Epoch: 63/150..  Training Loss: 0.000..  Test Loss: 0.110..  Test Accuracy: 0.996
Epoch: 64/150..  Training Loss: 0.000..  Test Loss: 0.117..  Test Accuracy: 0.996
Epoch: 65/150..  Training Loss: 0.000..  Test Loss: 0.122..  Test Accuracy: 0.996
Epoch: 66/150..  Training Loss: 0.000..  Test Loss: 0.127..  Test Accuracy: 0.996
Epoch: 67/150..  Training Loss: 0.000..  Test Loss: 0.132..  Test Accuracy: 0.996
Epoch: 68/150..  Training Loss: 0.000..  Test Loss: 0.136..  Test Accuracy: 0.996
Epoch: 69/150..  Training Loss: 0.000..  Test Loss: 0.140..  Test Accuracy: 0.996
Epoch: 70/150..  Training Loss: 0.000..  Test Loss: 0.143..  Test Accuracy: 0.996
Epoch: 71/150..  Training Loss: 0.000..  Test Loss: 0.147..  Test Accuracy: 0.996
Epoch: 72/150..  Training Loss: 0.000..  Test Loss: 0.149..  Test Accuracy: 0.996
Epoch: 73/150..  Training Loss: 0.000..  Test Loss: 0.152..  Test Accuracy: 0.996
Epoch: 74/150..  Training Loss: 0.000..  Test Loss: 0.154..  Test Accuracy: 0.996
Epoch: 75/150..  Training Loss: 0.000..  Test Loss: 0.156..  Test Accuracy: 0.996
Epoch: 76/150..  Training Loss: 0.000..  Test Loss: 0.158..  Test Accuracy: 0.996
Epoch: 77/150..  Training Loss: 0.000..  Test Loss: 0.160..  Test Accuracy: 0.996
Epoch: 78/150..  Training Loss: 0.000..  Test Loss: 0.162..  Test Accuracy: 0.996
Epoch: 79/150..  Training Loss: 0.000..  Test Loss: 0.163..  Test Accuracy: 0.996
Epoch: 80/150..  Training Loss: 0.000..  Test Loss: 0.164..  Test Accuracy: 0.996
Epoch: 81/150..  Training Loss: 0.000..  Test Loss: 0.166..  Test Accuracy: 0.996
Epoch: 82/150..  Training Loss: 0.000..  Test Loss: 0.167..  Test Accuracy: 0.996
Epoch: 83/150..  Training Loss: 0.000..  Test Loss: 0.168..  Test Accuracy: 0.996
Epoch: 84/150..  Training Loss: 0.000..  Test Loss: 0.169..  Test Accuracy: 0.996
Epoch: 85/150..  Training Loss: 0.000..  Test Loss: 0.169..  Test Accuracy: 0.996
Epoch: 86/150..  Training Loss: 0.000..  Test Loss: 0.170..  Test Accuracy: 0.996
Epoch: 87/150..  Training Loss: 0.000..  Test Loss: 0.171..  Test Accuracy: 0.996
Epoch: 88/150..  Training Loss: 0.000..  Test Loss: 0.171..  Test Accuracy: 0.996
Epoch: 89/150..  Training Loss: 0.000..  Test Loss: 0.172..  Test Accuracy: 0.996
Epoch: 90/150..  Training Loss: 0.000..  Test Loss: 0.172..  Test Accuracy: 0.996
Epoch: 91/150..  Training Loss: 0.000..  Test Loss: 0.173..  Test Accuracy: 0.996
Epoch: 92/150..  Training Loss: 0.000..  Test Loss: 0.173..  Test Accuracy: 0.996
Epoch: 93/150..  Training Loss: 0.000..  Test Loss: 0.174..  Test Accuracy: 0.996
Epoch: 94/150..  Training Loss: 0.000..  Test Loss: 0.174..  Test Accuracy: 0.996
Epoch: 95/150..  Training Loss: 0.000..  Test Loss: 0.174..  Test Accuracy: 0.996
Epoch: 96/150..  Training Loss: 0.000..  Test Loss: 0.175..  Test Accuracy: 0.996
Epoch: 97/150..  Training Loss: 0.000..  Test Loss: 0.175..  Test Accuracy: 0.996
Epoch: 98/150..  Training Loss: 0.000..  Test Loss: 0.175..  Test Accuracy: 0.996
Epoch: 99/150..  Training Loss: 0.000..  Test Loss: 0.175..  Test Accuracy: 0.996
Epoch: 100/150..  Training Loss: 0.000..  Test Loss: 0.176..  Test Accuracy: 0.996
Epoch: 101/150..  Training Loss: 0.000..  Test Loss: 0.176..  Test Accuracy: 0.996
Epoch: 102/150..  Training Loss: 0.000..  Test Loss: 0.176..  Test Accuracy: 0.996
Epoch: 103/150..  Training Loss: 0.000..  Test Loss: 0.176..  Test Accuracy: 0.996
Epoch: 104/150..  Training Loss: 0.000..  Test Loss: 0.176..  Test Accuracy: 0.996
Epoch: 105/150..  Training Loss: 0.000..  Test Loss: 0.176..  Test Accuracy: 0.996
Epoch: 106/150..  Training Loss: 0.000..  Test Loss: 0.176..  Test Accuracy: 0.996
Epoch: 107/150..  Training Loss: 0.000..  Test Loss: 0.176..  Test Accuracy: 0.996
Epoch: 108/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 109/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 110/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 111/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 112/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 113/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 114/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 115/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 116/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 117/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 118/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 119/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 120/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 121/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 122/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 123/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 124/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 125/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 126/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 127/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 128/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 129/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 130/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 131/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 132/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 133/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 134/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 135/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 136/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 137/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 138/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 139/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 140/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 141/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 142/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 143/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 144/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 145/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 146/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 147/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 148/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 149/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996
Epoch: 150/150..  Training Loss: 0.000..  Test Loss: 0.177..  Test Accuracy: 0.996

Mixed precision:
Total execution time = 7.507 sec
Max memory used by tensors = 2737311232 bytes
# Mixed precision model
model.eval()
predictions = []
output = model(x_test)
ps = torch.exp(output)
top_p, top_class = ps.topk(1, dim=1)
predictions = np.array(top_class.cpu().detach())
y_pred = []
for d in predictions:
  y_pred.append(d)
y_true = []
for d in y_test:
  y_true.append(int(d))
y_true
print('F-score: ', f1_score(y_true, y_pred, average='macro'))

print('Accuracy: ', accuracy_score(y_true, y_pred))
F-score:  0.9845942906441127
Accuracy:  0.999