pipeline { agent any parameters{ string( defaultValue: 'kalkam', description: 'Kaggle username', name: 'KAGGLE_USERNAME', trim: false ) password( defaultValue: '', description: 'Kaggle token taken from kaggle.json file, as described in https://github.com/Kaggle/kaggle-api#api-credentials', name: 'KAGGLE_KEY' ) string( defaultValue: '500', description: 'CUTOFF', name: 'CUTOFF', trim: false ) } stages { stage('Clear directory before executing') { steps { sh 'rm -rf *' } } stage('Clone Git') { steps { sh 'git clone https://git.wmi.amu.edu.pl/s486867/ium_z486867' } } stage('Download dataset') { steps { withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}", "KAGGLE_KEY=${params.KAGGLE_KEY}" ]) { sh 'kaggle datasets download -d dansbecker/powerlifting-database' sh 'unzip powerlifting-database.zip -d ./ium_z486867' } } } stage('Docker') { agent { dockerfile { filename 'Dockerfile' dir 'ium_z486867' reuseNode true } } steps { sh 'python ./ium_z486867/create-dataset.py' archiveArtifacts 'X_test.csv' archiveArtifacts 'X_dev.csv' archiveArtifacts 'X_train.csv' } } stage('clear_after') { steps { sh 'rm -rf *' } } } }