From 46d7831b98274dbc3e387869d9a11e3a0b63a05a Mon Sep 17 00:00:00 2001 From: Marcin Kostrzewski Date: Thu, 5 May 2022 22:11:32 +0200 Subject: [PATCH] Added customizable model params --- Jenkinsfile-train | 17 ++++++++++++++++- train_model.py | 21 +++++++++++++++++++-- 2 files changed, 35 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile-train b/Jenkinsfile-train index 4e67552..604cd31 100644 --- a/Jenkinsfile-train +++ b/Jenkinsfile-train @@ -1,4 +1,19 @@ pipeline { + parameters { + string( + defaultValue: '64', + description: 'Batch size used in ADAM', + name: 'BATCHSIZE', + trim: true + ) + string( + defaultValue: '5', + description: 'Number of iterations', + name: 'EPOCHS', + trim: true + ) + } + agent { docker { image 's444409-create-dataset' @@ -8,7 +23,7 @@ pipeline { stages { stage('Train model') { steps { - sh "python train_model.py" + sh "python train_model.py -e ${params.EPOCHS} -b ${params.BATCHSIZE}" } } } diff --git a/train_model.py b/train_model.py index 21bf880..5fe4978 100644 --- a/train_model.py +++ b/train_model.py @@ -1,10 +1,17 @@ +from ast import arg +from sqlite3 import paramstyle import numpy as np import pandas as pd import torch +import argparse from torch import nn from torch.utils.data import DataLoader, Dataset +default_batch_size = 64 +default_epochs = 5 + + def hour_to_int(text: str): return float(text.replace(':', '')) @@ -82,10 +89,20 @@ def test(dataloader, model, loss_fn): print(f"Avg loss: {test_loss:>8f} \n") +def setup_args(): + args_parser = argparse.ArgumentParser(prefix_chars='-') + args_parser.add_argument('-b', '--batchSize', type=int, default=default_batch_size) + args_parser.add_argument('-e', '--epochs', type=int, default=default_epochs) + + return args_parser.parse_args() + + device = "cuda" if torch.cuda.is_available() else "cpu" print(f"Using {device} device") -batch_size = 64 +args = setup_args() + +batch_size = args.batchSize plant_test = PlantsDataset('data/Plant_1_Generation_Data.csv.test') plant_train = PlantsDataset('data/Plant_1_Generation_Data.csv.train') @@ -103,7 +120,7 @@ print(model) loss_fn = nn.MSELoss() optimizer = torch.optim.Adam(model.parameters(), lr=1e-3) -epochs = 5 +epochs = args.epochs for t in range(epochs): print(f"Epoch {t + 1}\n-------------------------------") train(train_dataloader, model, loss_fn, optimizer)