diff --git a/Jenkingfile-multi b/Jenkingfile-multi new file mode 100644 index 0000000..517239c --- /dev/null +++ b/Jenkingfile-multi @@ -0,0 +1,85 @@ +pipeline { + agent { + dockerfile { + filename 'Dockerfile' + label 'docker' + } + } + + triggers { + upstream(upstreamProjects: 's123456-create-dataset', threshold: hudson.model.Result.SUCCESS) + } + + parameters { + string( + defaultValue: '--epochs 100 --batch_size 32 --learning_rate 0.01', + description: 'Parametry trenowania', + name: 'TRAINING_PARAMS' + ) + } + + environment { + DATASET_PROJECT = 's123456-create-dataset' + DATA_DIR = 'data' + } + + stages { + stage('Clone repository') { + steps { + checkout scm + } + } + + stage('Copy Dataset') { + steps { + script { + copyArtifacts( + projectName: "${env.DATASET_PROJECT}", + selector: lastSuccessful(), + target: "${env.DATA_DIR}" + ) + } + } + } + + stage('Set execute permission') { + steps { + script { + sh 'chmod +x data_processing.sh' + } + } + } + + stage('Run shell script') { + steps { + script { + sh './data_processing.sh' + } + } + post { + success { + archiveArtifacts artifacts: 'results.txt', allowEmptyArchive: true + } + } + } + + stage('Train Model') { + steps { + sh "python3 model.py ${params.TRAINING_PARAMS}" + } + } + + stage('Archive Model') { + steps { + archiveArtifacts artifacts: 'orange_quality_model_tf.h5', allowEmptyArchive: true + archiveArtifacts artifacts: 'predictions_tf.json', allowEmptyArchive: true + } + } + } + + post { + always { + cleanWs() + } + } +}