pipeline { agent { dockerfile { filename 'Dockerfile' label 'docker' } } triggers { upstream(upstreamProjects: 's123456-create-dataset', threshold: hudson.model.Result.SUCCESS) } parameters { string( defaultValue: '--epochs 100 --batch_size 32 --learning_rate 0.01', description: 'Parametry trenowania', name: 'TRAINING_PARAMS' ) } environment { DATASET_PROJECT = 's123456-create-dataset' DATA_DIR = 'data' } stages { stage('Clone repository') { steps { checkout scm } } stage('Copy Dataset') { steps { script { copyArtifacts( projectName: "${env.DATASET_PROJECT}", selector: lastSuccessful(), target: "${env.DATA_DIR}" ) } } } stage('Set execute permission') { steps { script { sh 'chmod +x data_processing.sh' } } } stage('Run shell script') { steps { script { sh './data_processing.sh' } } post { success { archiveArtifacts artifacts: 'results.txt', allowEmptyArchive: true } } } stage('Train Model') { steps { sh "python3 model.py ${params.TRAINING_PARAMS}" } } stage('Archive Model') { steps { archiveArtifacts artifacts: 'orange_quality_model_tf.h5', allowEmptyArchive: true archiveArtifacts artifacts: 'predictions_tf.json', allowEmptyArchive: true } } } post { always { cleanWs() } } }