parametrized training
All checks were successful
444507-training/pipeline/head This commit looks good

This commit is contained in:
Adam Wojdyla 2022-05-01 18:54:27 +02:00
parent f6f1cabf10
commit c823e0e4fb

View File

@ -1,3 +1,5 @@
#!/usr/bin/python
import numpy as np
import torch
from torch import nn
@ -8,7 +10,7 @@ from sklearn.metrics import accuracy_score
import torch.nn.functional as F
import pandas as pd
from sklearn import preprocessing
# import matplotlib.pyplot as plt
import sys
class Model(nn.Module):
@ -83,7 +85,16 @@ features_train, features_test, labels_train, labels_test = train_test_split(feat
model = Model(features_train.shape[1])
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
loss_fn = nn.CrossEntropyLoss()
epochs = 1000
# number of epochs is parametrized
try:
epochs = int(sys.argv[1])
except Exception as e:
print(e)
print("Setting default epochs value to 1000.")
epochs = 1000
print(f"Number of epochs: {epochs}")
print("Starting model training...")
x_train, y_train = Variable(torch.from_numpy(features_train)).float(), Variable(torch.from_numpy(labels_train)).long()