pipeline { agent { dockerfile true } parameters { string( defaultValue: '10000', name: 'CUTOFF', description: 'Liczba wierszy do obcięcia ze zbioru danych') string( defaultValue: '--epochs 100 --batch_size 32 --learning_rate 0.01', name: 'TRAINING_PARAMS', description: 'Parametry trenowania' ) } environment { DATASET_PROJECT = 's123456-create-dataset' DATA_DIR = 'data' } stages { stage('Clone repository') { steps { checkout([$class: 'GitSCM', branches: [[name: '*/master']], userRemoteConfigs: [[url: 'https://git.wmi.amu.edu.pl/s464906/ium_464906']]]) } } stage('Copy Dataset') { steps { script { copyArtifacts( projectName: "${DATASET_PROJECT}", selector: lastSuccessful(), target: "${env.DATA_DIR}" ) } } } stage('Set execute permission') { steps { script { sh 'chmod +x data_processing.sh' } } } stage('Run shell script') { steps { script { sh './data_processing.sh' } } post { success { archiveArtifacts artifacts: 'results.txt', allowEmptyArchive: true } } } stage('Install Dependencies') { steps { sh 'pip install -r requirements.txt' } } stage('Train Model') { steps { sh "python model.py ${params.TRAINING_PARAMS}" } } stage('Archive Model') { steps { archiveArtifacts artifacts: 'orange_quality_model_tf.h5', allowEmptyArchive: true archiveArtifacts artifacts: 'predictions_tf.json', allowEmptyArchive: true } } } post { always { cleanWs() } } }