Use Dockerfiles inside Jenkinsfiles
This commit is contained in:
parent
70dde6e633
commit
7d8d0a4dec
9
CreateDataset.dockerfile
Normal file
9
CreateDataset.dockerfile
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
FROM ubuntu:latest
|
||||||
|
|
||||||
|
ADD get-data.sh /get-data.sh
|
||||||
|
ADD prepare_dataset.py /prepare_dataset.py
|
||||||
|
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get install -y python3 python3-pip unzip
|
||||||
|
RUN pip install pandas
|
||||||
|
RUN pip install scikit-learn
|
1
DatasetStats.dockerfile
Normal file
1
DatasetStats.dockerfile
Normal file
@ -0,0 +1 @@
|
|||||||
|
FROM ubuntu:latest
|
@ -21,22 +21,37 @@ pipeline {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
stages {
|
stages {
|
||||||
|
stage('Checkout') {
|
||||||
|
steps {
|
||||||
|
sh 'rm -rf ium_z487183'
|
||||||
|
sh 'git clone https://git.wmi.amu.edu.pl/s487183/ium_z487183.git'
|
||||||
|
}
|
||||||
|
}
|
||||||
stage('Prepare data') {
|
stage('Prepare data') {
|
||||||
steps {
|
steps {
|
||||||
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}", "KAGGLE_KEY=${params.KAGGLE_KEY}"]) {
|
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}", "KAGGLE_KEY=${params.KAGGLE_KEY}"]) {
|
||||||
sh './get-data.sh'
|
sh 'ium_z487183/get-data.sh'
|
||||||
sh 'python3 prepare_dataset.py'
|
sh 'python3 ium_z487183/prepare_dataset.py'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stage('Archive artifacts') {
|
stage('Archive artifacts') {
|
||||||
|
agent {
|
||||||
|
dockerfile {
|
||||||
|
filename 'CreateDataset.dockerfile'
|
||||||
|
dir 'ium_z487183'
|
||||||
|
reuseNode true
|
||||||
|
}
|
||||||
|
}
|
||||||
steps {
|
steps {
|
||||||
archiveArtifacts 'X_test.csv'
|
withEnv(["CUTOFF=${params.CUTOFF}"]) {
|
||||||
archiveArtifacts 'X_val.csv'
|
archiveArtifacts 'X_test.csv'
|
||||||
archiveArtifacts 'X_train.csv'
|
archiveArtifacts 'X_val.csv'
|
||||||
archiveArtifacts 'Y_test.csv'
|
archiveArtifacts 'X_train.csv'
|
||||||
archiveArtifacts 'Y_val.csv'
|
archiveArtifacts 'Y_test.csv'
|
||||||
archiveArtifacts 'Y_train.csv'
|
archiveArtifacts 'Y_val.csv'
|
||||||
|
archiveArtifacts 'Y_train.csv'
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,15 +8,24 @@ pipeline {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
stages {
|
stages {
|
||||||
stage('Copy artifacts') {
|
stage('Checkout') {
|
||||||
steps {
|
steps {
|
||||||
copyArtifacts filter: 'X_test.csv,X_val.csv,X_train.csv,Y_test.csv,Y_val.csv,Y_train.csv', fingerprintArtifacts: true, projectName: 'z487183-create-dataset', selector: workspace()
|
sh 'rm -rf ium_z487183'
|
||||||
|
sh 'git clone https://git.wmi.amu.edu.pl/s487183/ium_z487183.git'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stage('Prepare stats') {
|
stage('Prepare stats') {
|
||||||
|
agent {
|
||||||
|
dockerfile {
|
||||||
|
filename 'DatasetStats.dockerfile'
|
||||||
|
dir 'ium_z487183'
|
||||||
|
reuseNode true
|
||||||
|
}
|
||||||
|
}
|
||||||
steps {
|
steps {
|
||||||
sh './prepare-stats.sh'
|
copyArtifacts filter: 'X_test.csv,X_val.csv,X_train.csv,Y_test.csv,Y_val.csv,Y_train.csv', fingerprintArtifacts: true, projectName: 'z487183-create-dataset', selector: workspace()
|
||||||
archiveArtifacts 'stats.txt'
|
sh './prepare-stats.sh'
|
||||||
|
archiveArtifacts 'stats.txt'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
rm -f stats.txt
|
||||||
touch stats.txt
|
touch stats.txt
|
||||||
wc -l X_test.csv >> stats.txt
|
wc -l X_test.csv >> stats.txt
|
||||||
wc -l X_val.csv >> stats.txt
|
wc -l X_val.csv >> stats.txt
|
||||||
|
Loading…
Reference in New Issue
Block a user