ium_z487179/createDataset/Jenkinsfile

69 lines
2.1 KiB
Plaintext
Raw Normal View History

2023-04-14 18:51:14 +02:00
pipeline {
agent any
parameters {
string(
defaultValue: 'wojciechbatruszewicz',
description: 'Kaggle username',
name: 'KAGGLE_USERNAME',
trim: false
)
password(
defaultValue: '',
description: 'Kaggle token taken from kaggle.json file, as described in https://github.com/Kaggle/kaggle-api#api-credentials',
name: 'KAGGLE_KEY'
)
string(
defaultValue: '50',
description: 'dataset cutoff',
name: 'CUTOFF',
trim: false
)
}
stages {
2023-04-20 19:37:30 +02:00
stage('Download dataset') {
2023-04-14 18:51:14 +02:00
steps {
checkout scm
2023-04-14 19:24:34 +02:00
dir ('./createDataset') {
sh 'ls -l'
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}",
"KAGGLE_KEY=${params.KAGGLE_KEY}" ]) {
2023-04-20 19:37:30 +02:00
// sh 'chmod +x ./datasetScript.sh'
// sh './datasetScript.sh'
sh 'kaggle datasets download -d rishikeshkonapure/home-loan-approval'
sh 'unzip -o home-loan-approval.zip'
2023-04-14 19:24:34 +02:00
}
2023-04-14 18:51:14 +02:00
}
}
}
2023-04-20 19:37:30 +02:00
stage('Docker') {
steps {
2023-04-20 19:41:12 +02:00
script {
2023-06-11 10:54:35 +02:00
def dockerImage = docker.build("docker-image", "./docker")
2023-04-20 19:41:12 +02:00
dockerImage.inside {
sh 'ls -l'
2023-04-20 19:46:26 +02:00
dir ('./createDataset') {
sh 'ls -l'
2023-04-20 19:53:26 +02:00
sh 'python3 ./createDataset.py'
2023-04-20 19:50:15 +02:00
archiveArtifacts 'home_loan_train.csv'
archiveArtifacts 'home_loan_test.csv'
archiveArtifacts 'home_loan_val.csv'
2023-04-20 19:46:26 +02:00
}
2023-04-20 19:41:12 +02:00
}
}
2023-04-20 19:37:30 +02:00
}
}
stage('triggerTrain') {
steps {
2023-06-11 11:40:58 +02:00
build job: "z-s487179-training", wait: true
}
}
2023-04-20 19:37:30 +02:00
// stage('Archive file') {
// steps {
// dir ('./createDataset') {
// archiveArtifacts artifacts: 'loan_sanction_shuffled.csv', fingerprint: true\
// }
// }
// }
2023-04-14 18:51:14 +02:00
}
}