ium_06 jenkinsfileTrain + 2nd pipeline

This commit is contained in:
wikbom 2023-05-10 16:45:00 +02:00
parent a99e2c5ede
commit 8000d21bf4
4 changed files with 47 additions and 7 deletions

3
Jenkinsfile vendored
View File

@ -29,16 +29,13 @@ node {
"KAGGLE_KEY=${params.KAGGLE_KEY}",
"CUTOFF=${params.CUTOFF}"]) {
sh "./script.sh ${CUTOFF}"
sh "./learning.py"
}
}
stage('artifacts') {
echo 'saving artifacts'
archiveArtifacts 'output.txt'
archiveArtifacts 'model.pt'
}
}
}

35
JenkinsfileTrain Normal file
View File

@ -0,0 +1,35 @@
node {
checkout scm
def dockerimage = docker.build("titanic-image")
dockerimage.inside {
stage('Preparation') {
properties([
parameters([
string(
defaultValue: 'default',
description: 'Number of head lines to be taken from test file',
name: 'LEARNING_PARAMETERS',
trim: false)
])
])
copyArtifacts projectName: 's470618-create-dataset', filter: '*.csv', fingerprintArtifacts: true, selector: lastSuccessful(), target: '.'
}
stage('Build') {
withEnv(["LEARNING_PARAMETERS"=${params.LEARNING_PARAMETERS}]) {
sh "./learning.py ${LEARNING_PARAMETERS}"
}
}
stage('artifacts') {
echo 'saving artifacts'
archiveArtifacts 'model.pt'
}
stage('Trigger Learning pipeline') {
steps {
build 's470618-training'
}
}
}
}

View File

@ -4,6 +4,7 @@ import torch
from torch import nn
import pandas as pd
import subprocess
import sys
from sklearn.model_selection import train_test_split
import torch.nn.functional as F
@ -27,7 +28,15 @@ def print_(loss):
print ("The loss calculated: ", loss)
if __name__ == "__main__":
df = pd.read_csv("train.csv")
if sys.argv[1]=='default':
alpha = 0.003 #learning rate
epochs = 1000
else:
pass
#TODO split args string to make hyperparameters work
df = pd.read_csv("output.csv")
df = df.dropna() #drop NA values
columns_to_normalize=['Age','Fare'] #NORMALIZATION
@ -52,9 +61,8 @@ if __name__ == "__main__":
Yt = torch.tensor(Y_train, dtype=torch.long)
model = Model(Xt.shape[1])
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
optimizer = torch.optim.Adam(model.parameters(), lr=alpha)
loss_fn = nn.CrossEntropyLoss()
epochs = 1000
#TRAINING LOOP
for epoch in range(1, epochs+1):

View File

@ -3,5 +3,5 @@ kaggle competitions download -c titanic
echo 'kaggle download completed'
unzip titanic.zip
wc -l train.csv
head -$1 train.csv | shuf > output.txt
head -$1 train.csv | shuf > output.csv
echo 'script done'