ium_464903/Jenkinsfile3

82 lines
2.9 KiB
Plaintext

pipeline {
agent any
parameters {
string(
defaultValue: 'jakubbg',
description: 'Kaggle username',
name: 'KAGGLE_USERNAME',
trim: false
)
password(
defaultValue: 'e42b293c818e4ecd7b9365ee037af428',
description: 'Kaggle token taken from kaggle.json file, as described in https://github.com/Kaggle/kaggle-api#api-credentials',
name: 'KAGGLE_KEY'
)
string(
defaultValue: '100',
description: 'CUTOFF parameter',
name: 'CUTOFF',
trim: false
)
}
stages {
stage('Build image'){
steps {
script {
checkout scm
// First argument is the tag assigned to the built image
// If you want to use a Dockerfile from a different path than ./Dockerfile, you can specify it as the second argument
def testImage = docker.build("test-image", ".")
}
}
}
stage('Run in container'){
steps {
script {
docker.image('test-image').inside {
stage('Checkout') {
steps {
// Step: Clone the git repository
checkout scm
}
}
stage('Build') {
steps {
// Run the maven build
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}",
"KAGGLE_KEY=${params.KAGGLE_KEY}" ]) {
sh 'echo KAGGLE_USERNAME: $KAGGLE_USERNAME'
sh 'kaggle datasets list'
}
}
}
stage('Execute Shell Script') {
steps {
withEnv(["CUTOFF=${params.CUTOFF}" ]) {
// Step: Invoke the shell script
script {
sh 'chmod +x data_processing_script.sh' // Grant permissions to execute the script
sh './data_processing_script.sh $CUTOFF' // Execute the script
}
}
}
}
stage('Archive Artifacts') {
steps {
// Step: Archive artifacts
archiveArtifacts artifacts: 'processed_data.csv', fingerprint: true
}
}
}
}
}
}
}
}