node { stage('Preparation') { properties([ pipelineTriggers([ upstream( threshold: hudson.model.Result.SUCCESS, upstreamProjects: 's424714-create-dataset' ) ]), copyArtifactPermission('*'), parameters([ buildSelector( defaultSelector: lastSuccessful(), description: 'Which build to use for copying artifacts', name: 'BUILD_SELECTOR' ), string( defaultValue: '2', description: 'Batch size for training process', name: 'BATCH_SIZE', trim: false ), string( defaultValue: '3', description: 'Number of training epochs', name: 'NUM_EPOCHS', trim: false ), string( defaultValue: '1e-6', description: 'Learning rate', name: 'LR', trim: false ), ]) ]) } stage('Git clone') { //cloning git repo checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [], userRemoteConfigs: [[credentialsId: 's424714', url: 'https://git.wmi.amu.edu.pl/s424714/ium_424714']]]) } stage('Dockerfile build') { sh "chmod +x -R ${env.WORKSPACE}" copyArtifacts fingerprintArtifacts: true, projectName: 's424714-create-dataset', selector: buildParameter('BUILD_SELECTOR') def dockerImage = docker.build("s424714-model") dockerImage.inside { withEnv(["TRANSFORMERS_CACHE=./.cache"]) { stage("Docker: cloning artifacts"){ sh 'mkdir -p ./data/dataset' sh 'mv -t ./data/dataset train.csv test.csv val.csv' sh 'mv -t ./data True.csv Fake.csv' } stage("Docker: Running training model") { sh 'mkdir -p ./.cache' sh 'mkdir -p ./sacred' // sh "" sh 'python ./src/main.py --train --lr=$LR --batch=$BATCH_SIZE --epochs=$NUM_EPOCHS' sh "cp ./results/model.pt ${WORKSPACE}" sh "cp ./results/sacred-artifacts.zip ${WORKSPACE}" } } } } stage('Saving artefacts') { echo 'Goodbye!' sh 'ls' archiveArtifacts artifacts: '*.pt, *.zip' } stage('Triggering eval job') { build job: "s424714-evaluation/master", wait: false } }