ium_464863/Jenkinsfile

50 lines
1.5 KiB
Plaintext
Raw Normal View History

2024-03-20 14:23:42 +01:00
pipeline {
agent {
2024-03-29 16:08:59 +01:00
dockerfile {
filename 'Dockerfile'
args '-u root'
}
}
parameters {
password (
name: 'KAGGLE_USERNAME',
defaultValue: '',
description: 'Kaggle username'
2024-03-20 17:30:31 +01:00
)
password (
name: 'KAGGLE_KEY',
defaultValue: '',
description: 'Kaggle API key'
2024-03-20 17:30:31 +01:00
)
2024-03-20 17:33:00 +01:00
string (
name: 'CUTOFF',
defaultValue: '500',
description: 'Get only the first CUTOFF rows of the dataset'
)
}
2024-03-20 14:23:42 +01:00
stages {
stage('Clone repository') {
steps {
checkout scm
}
}
stage('Download dataset') {
2024-03-20 14:23:42 +01:00
steps {
2024-03-20 17:49:03 +01:00
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}", "KAGGLE_KEY=${params.KAGGLE_KEY}", "CUTOFF=${params.CUTOFF}"]) {
2024-03-29 16:04:19 +01:00
sh "apt-get install sudo -y"
2024-03-29 15:59:59 +01:00
sh "sudo mkdir /.kaggle"
sh "sudo echo > /.kaggle/kaggle.json"
sh "sudo chmod 777 /.kaggle/kaggle.json"
sh "sudo chown `whoami` /.kaggle/kaggle.json"
2024-03-29 16:21:40 +01:00
sh "echo $KAGGLE_USERNAME"
2024-03-29 15:59:59 +01:00
sh "sudo chmod +x ./download_dataset.py"
sh "python3 ./download_dataset.py $CUTOFF"
archiveArtifacts artifacts: './datasets/data.csv,./datasets/train.csv,./datasets/dev.csv,./datasets/test.csv', onlyIfSuccessful: true
}
2024-03-20 14:23:42 +01:00
}
}
}
}