From c823e0e4fb06e8c4111ae12a6a785c1c126c0b8e Mon Sep 17 00:00:00 2001 From: Adam Wojdyla Date: Sun, 1 May 2022 18:54:27 +0200 Subject: [PATCH] parametrized training --- lab05_deepLearning.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/lab05_deepLearning.py b/lab05_deepLearning.py index 3c0c37d..4f1a8d8 100644 --- a/lab05_deepLearning.py +++ b/lab05_deepLearning.py @@ -1,3 +1,5 @@ +#!/usr/bin/python + import numpy as np import torch from torch import nn @@ -8,7 +10,7 @@ from sklearn.metrics import accuracy_score import torch.nn.functional as F import pandas as pd from sklearn import preprocessing -# import matplotlib.pyplot as plt +import sys class Model(nn.Module): @@ -83,7 +85,16 @@ features_train, features_test, labels_train, labels_test = train_test_split(feat model = Model(features_train.shape[1]) optimizer = torch.optim.Adam(model.parameters(), lr=0.01) loss_fn = nn.CrossEntropyLoss() -epochs = 1000 + +# number of epochs is parametrized +try: + epochs = int(sys.argv[1]) +except Exception as e: + print(e) + print("Setting default epochs value to 1000.") + epochs = 1000 + +print(f"Number of epochs: {epochs}") print("Starting model training...") x_train, y_train = Variable(torch.from_numpy(features_train)).float(), Variable(torch.from_numpy(labels_train)).long()