correction for create-dataset Jenkinsfile
This commit is contained in:
parent
5cec765150
commit
39f02f99ff
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,3 +3,4 @@ df_wta.csv
|
|||||||
atp-and-wta-tennis-data.zip
|
atp-and-wta-tennis-data.zip
|
||||||
data
|
data
|
||||||
model.zip
|
model.zip
|
||||||
|
secret.txt
|
||||||
|
11
Dockerfile
11
Dockerfile
@ -13,18 +13,19 @@ RUN pip3 install pandas
|
|||||||
RUN pip3 install pillow --global-option="build_ext" --global-option="--disable-zlib" --global-option="--disable-jpeg"
|
RUN pip3 install pillow --global-option="build_ext" --global-option="--disable-zlib" --global-option="--disable-jpeg"
|
||||||
RUN pip3 install scikit-learn
|
RUN pip3 install scikit-learn
|
||||||
RUN pip3 install matplotlib
|
RUN pip3 install matplotlib
|
||||||
RUN mkdir ~/.kaggle/
|
|
||||||
RUN echo '{"username":"wirus006","key":"c3323e37d3f91a0914d0172ef3c7c30c"}' > ~/.kaggle/kaggle.json
|
|
||||||
|
|
||||||
# Create app directory in image
|
# Create app directory in image
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy init dataset script to /app directory in image
|
COPY . .
|
||||||
COPY ./init.py ./
|
ARG KAGGLE_USERNAME
|
||||||
|
ARG KAGGLE_KEY
|
||||||
|
|
||||||
# Download kaggle dataset
|
# Download kaggle dataset
|
||||||
RUN kaggle datasets download -d hakeem/atp-and-wta-tennis-data
|
RUN kaggle datasets download -d hakeem/atp-and-wta-tennis-data
|
||||||
RUN unzip -o atp-and-wta-tennis-data.zip
|
RUN unzip -o atp-and-wta-tennis-data.zip
|
||||||
|
|
||||||
# Script executed after docker run
|
# Script executed after docker run
|
||||||
CMD python3 ./init.py
|
RUN python3 ./init.py
|
||||||
|
RUN chmod a+rwx -R *
|
||||||
|
RUN ls -la
|
32
Jenkinsfile
vendored
32
Jenkinsfile
vendored
@ -1,7 +1,4 @@
|
|||||||
pipeline {
|
pipeline {
|
||||||
agent {
|
|
||||||
dockerfile true
|
|
||||||
}
|
|
||||||
parameters {
|
parameters {
|
||||||
string (
|
string (
|
||||||
defaultValue: 'wirus006',
|
defaultValue: 'wirus006',
|
||||||
@ -10,35 +7,28 @@ pipeline {
|
|||||||
trim: false
|
trim: false
|
||||||
)
|
)
|
||||||
password (
|
password (
|
||||||
defaultValue: 'c3323e37d3f91a0914d0172ef3c7c30c',
|
defaultValue: '',
|
||||||
description: 'Kaggle token taken from kaggle.json file, as described in https://github.com/Kaggle/kaggle-api#api-credentials',
|
description: 'Kaggle token taken from kaggle.json file, as described in https://github.com/Kaggle/kaggle-api#api-credentials',
|
||||||
name: 'KAGGLE_KEY'
|
name: 'KAGGLE_KEY'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
agent {
|
||||||
|
dockerfile {
|
||||||
|
additionalBuildArgs "--build-arg KAGGLE_USERNAME=${params.KAGGLE_USERNAME} --build-arg KAGGLE_KEY=${params.KAGGLE_KEY} -t s444498-create-dataset"
|
||||||
|
}
|
||||||
|
}
|
||||||
stages {
|
stages {
|
||||||
stage('Hello') {
|
stage('Archive dataset') {
|
||||||
steps {
|
|
||||||
echo 'Hello world!'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stage('Checkout') {
|
|
||||||
steps {
|
|
||||||
git 'https://git.wmi.amu.edu.pl/s444498/ium_444498.git'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stage('Shell Script') {
|
|
||||||
steps {
|
steps {
|
||||||
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}",
|
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}",
|
||||||
"KAGGLE_KEY=${params.KAGGLE_KEY}"]) {
|
"KAGGLE_KEY=${params.KAGGLE_KEY}"]) {
|
||||||
sh "chmod +x -R ${env.WORKSPACE}"
|
|
||||||
sh 'echo hello world | figlet'
|
sh 'echo hello world | figlet'
|
||||||
|
sh 'chmod a+rwx -R *'
|
||||||
|
sh 'pwd && ls'
|
||||||
|
sh 'ls /app/data/'
|
||||||
|
archiveArtifacts artifacts: '/app/data/*', onlyIfSuccessful: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
post {
|
|
||||||
always {
|
|
||||||
archiveArtifacts artifacts: '*atp*.csv', onlyIfSuccessful: true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
6
init.py
6
init.py
@ -8,12 +8,6 @@ from pathlib import Path
|
|||||||
|
|
||||||
# Inicjalizacja danych
|
# Inicjalizacja danych
|
||||||
|
|
||||||
file_exists = exists('./df_atp.csv')
|
|
||||||
|
|
||||||
if not file_exists:
|
|
||||||
subprocess.run(["kaggle", "datasets", "download", "-d", "hakeem/atp-and-wta-tennis-data"])
|
|
||||||
subprocess.run(["unzip", "-o", "atp-and-wta-tennis-data.zip"])
|
|
||||||
|
|
||||||
atp_data = pd.read_csv('df_atp.csv')
|
atp_data = pd.read_csv('df_atp.csv')
|
||||||
print(atp_data)
|
print(atp_data)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user