jenkins 2
This commit is contained in:
parent
33bd65fe00
commit
42ebc25f24
51
Jenkinsfile_predict
Normal file
51
Jenkinsfile_predict
Normal file
@ -0,0 +1,51 @@
|
||||
pipeline {
|
||||
agent any
|
||||
stages {
|
||||
stage('Clear_Before') {
|
||||
steps {
|
||||
sh 'rm -rf *'
|
||||
}
|
||||
}
|
||||
stage('Clone') {
|
||||
steps {
|
||||
sh 'git clone https://git.wmi.amu.edu.pl/s486867/ium_z486867.git'
|
||||
}
|
||||
}
|
||||
stage('copy_artifacts_from_training') {
|
||||
steps {
|
||||
copyArtifacts(projectName: 'z-s486867-training', fingerprintArtifacts: true)
|
||||
}
|
||||
}
|
||||
stage('copy_artifacts_from_evaluation') {
|
||||
steps {
|
||||
copyArtifacts(projectName: 'z-s486867-evaluation', fingerprintArtifacts: true, optional: true)
|
||||
}
|
||||
}
|
||||
stage('copy_artifacts_test') {
|
||||
steps {
|
||||
copyArtifacts(projectName: 'z-s486867-create-dataset', filter: 'X_test.csv,Y_test.csv', fingerprintArtifacts: true)
|
||||
}
|
||||
}
|
||||
stage('Docker') {
|
||||
agent {
|
||||
dockerfile {
|
||||
filename 'Dockerfile'
|
||||
dir 'ium_z486867'
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'ls -a'
|
||||
sh 'python ./ium_z486867/predict.py'
|
||||
archiveArtifacts 'prediction.csv'
|
||||
archiveArtifacts 'metrics.csv'
|
||||
}
|
||||
}
|
||||
|
||||
stage('clear_after') {
|
||||
steps {
|
||||
sh 'rm -rf *'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
47
Jenkinsfile_train
Normal file
47
Jenkinsfile_train
Normal file
@ -0,0 +1,47 @@
|
||||
pipeline {
|
||||
agent any
|
||||
parameters{
|
||||
string(
|
||||
defaultValue: '1000',
|
||||
description: 'EPOCHS',
|
||||
name: 'EPOCHS',
|
||||
trim: false
|
||||
)
|
||||
}
|
||||
stages {
|
||||
stage('Clear_Before') {
|
||||
steps {
|
||||
sh 'rm -rf *'
|
||||
}
|
||||
}
|
||||
stage('Clone') {
|
||||
steps {
|
||||
sh 'git clone https://git.wmi.amu.edu.pl/s486867/ium_z486867.git'
|
||||
}
|
||||
}
|
||||
|
||||
stage('copy_artifacts') {
|
||||
steps {
|
||||
copyArtifacts(projectName: 'z-s486867-create-dataset', fingerprintArtifacts: true)
|
||||
}
|
||||
}
|
||||
stage('Docker') {
|
||||
agent {
|
||||
dockerfile {
|
||||
filename 'Dockerfile'
|
||||
dir 'ium_z486867'
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'python ./ium_z486867/train.py'
|
||||
archiveArtifacts 'model/'
|
||||
}
|
||||
}
|
||||
stage('Clear_After') {
|
||||
steps {
|
||||
sh 'rm -rf *'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
21
predict.py
Normal file
21
predict.py
Normal file
@ -0,0 +1,21 @@
|
||||
import os
|
||||
import tensorflow as tf
|
||||
import pandas as pd
|
||||
from keras import utils
|
||||
|
||||
build_number = int(os.environ['BUILD_NUMBER'])
|
||||
model = tf.keras.models.load_model('model')
|
||||
|
||||
x_to_test = pd.read_csv('./X_test.csv')
|
||||
y_to_test = pd.read_csv('./Y_test.csv')
|
||||
|
||||
y_to_test = utils.to_categorical(y_to_test)
|
||||
|
||||
metrics = model.evaluate(x_to_test, y_to_test)
|
||||
|
||||
with open('metrics.csv', 'a') as file:
|
||||
file.write(f'{build_number},{metrics}\n')
|
||||
|
||||
predictions = model.predict(x_to_test)
|
||||
|
||||
predictions.tofile('prediction.csv', sep=',', format='%s')
|
56
train.py
Normal file
56
train.py
Normal file
@ -0,0 +1,56 @@
|
||||
import pandas as pd
|
||||
import tensorflow as tf
|
||||
from keras.models import Sequential
|
||||
from keras.layers import Dense
|
||||
from keras import utils
|
||||
import os
|
||||
|
||||
EPOCHS = int(os.environ['EPOCHS'])
|
||||
if EPOCHS <= 0:
|
||||
EPOCHS = 1000
|
||||
|
||||
X_train = pd.read_csv('./X_train.csv',
|
||||
engine = 'python',
|
||||
encoding = 'ISO-8859-1',
|
||||
sep=',')
|
||||
|
||||
X_val = pd.read_csv('./X_val.csv',
|
||||
engine = 'python',
|
||||
encoding = 'ISO-8859-1',
|
||||
sep=',')
|
||||
|
||||
Y_train = pd.read_csv('./Y_train.csv',
|
||||
engine = 'python',
|
||||
encoding = 'ISO-8859-1',
|
||||
sep=',')
|
||||
|
||||
Y_val = pd.read_csv('./Y_val.csv',
|
||||
engine = 'python',
|
||||
encoding = 'ISO-8859-1',
|
||||
sep=',')
|
||||
|
||||
|
||||
Y_train = utils.to_categorical(Y_train)
|
||||
Y_val = utils.to_categorical(Y_val)
|
||||
|
||||
model = Sequential(
|
||||
[
|
||||
Dense(100, input_dim=X_train.shape[1], activation='relu'),
|
||||
Dense(70, activation='relu'),
|
||||
Dense(50, activation='relu'),
|
||||
Dense(4, activation='softmax')
|
||||
], name = "Powerlifters_model"
|
||||
)
|
||||
|
||||
model.compile(
|
||||
loss=tf.keras.losses.CategoricalCrossentropy(),
|
||||
optimizer=tf.keras.optimizers.Adam(),
|
||||
metrics=['accuracy'])
|
||||
|
||||
model.fit(
|
||||
X_train,Y_train,
|
||||
epochs = EPOCHS,
|
||||
validation_data=(X_val, Y_val)
|
||||
)
|
||||
|
||||
model.save('model')
|
Loading…
Reference in New Issue
Block a user