This commit is contained in:
Jakub Zaręba 2023-05-11 00:32:25 +02:00
parent bc646e2982
commit 7dc5e630f6
4 changed files with 130 additions and 4 deletions

View File

@ -2,7 +2,7 @@ pipeline {
agent { agent {
docker { docker {
image 'python:3.11' image 'python:3.11'
args '-v /root/.cache:/root/.cache -u root' args '-v /root/.cache:/root/.cache -u root -v /tmp/mlruns:/tmp/mlruns -v /mlruns:/mlruns'
} }
} }
parameters { parameters {
@ -11,7 +11,7 @@ pipeline {
stages { stages {
stage('Preparation') { stage('Preparation') {
steps { steps {
sh 'pip install pandas tensorflow scikit-learn imbalanced-learn sacred pymongo' sh 'pip install pandas tensorflow scikit-learn imbalanced-learn sacred pymongo mlflow'
} }
} }
stage('Pobierz dane') { stage('Pobierz dane') {
@ -24,7 +24,6 @@ pipeline {
stage('Trenuj model') { stage('Trenuj model') {
steps { steps {
script { script {
// sh "python3 train.py --epochs $EPOCHS"
sh "python3 train.py" sh "python3 train.py"
} }
} }

38
old_JenkinsfileDL Normal file
View File

@ -0,0 +1,38 @@
pipeline {
agent {
docker {
image 'python:3.11'
args '-v /root/.cache:/root/.cache -u root'
}
}
parameters {
string(name: 'EPOCHS', defaultValue: '10', description: 'Liczba Epok')
}
stages {
stage('Preparation') {
steps {
sh 'pip install pandas tensorflow scikit-learn imbalanced-learn sacred pymongo'
}
}
stage('Pobierz dane') {
steps {
script {
copyArtifacts(projectName: 's487187-create-dataset', fingerprintArtifacts: true)
}
}
}
stage('Trenuj model') {
steps {
script {
// sh "python3 train.py --epochs $EPOCHS"
sh "python3 train.py"
}
}
}
stage('Zarchiwizuj model') {
steps {
archiveArtifacts artifacts: 'model.h5', fingerprint: true
}
}
}
}

74
old_train.py Normal file
View File

@ -0,0 +1,74 @@
from sacred import Experiment
from sacred.observers import MongoObserver, FileStorageObserver
import os
os.environ["SACRED_NO_GIT"] = "1"
ex = Experiment('s487187-training', interactive=True, save_git_info=False)
ex.observers.append(MongoObserver(url='mongodb://admin:IUM_2021@172.17.0.1:27017', db_name='sacred'))
@ex.config
def my_config():
data_file = 'data.csv'
model_file = 'model.h5'
epochs = 10
batch_size = 32
test_size = 0.2
random_state = 42
@ex.capture
def train_model(data_file, model_file, epochs, batch_size, test_size, random_state):
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
import tensorflow as tf
from imblearn.over_sampling import SMOTE
smote = SMOTE(random_state=random_state)
data = pd.read_csv(data_file, sep=';')
print('Total rows:', len(data))
print('Rows with medal:', len(data.dropna(subset=['Medal'])))
data = pd.get_dummies(data, columns=['Sex', 'Medal'])
data = data.drop(columns=['Name', 'Team', 'NOC', 'Games', 'Year', 'Season', 'City', 'Sport', 'Event'])
scaler = MinMaxScaler()
data = pd.DataFrame(scaler.fit_transform(data), columns=data.columns)
X = data.filter(regex='Sex|Age')
y = data.filter(regex='Medal')
y = pd.get_dummies(y)
X = X.fillna(0)
y = y.fillna(0)
y = y.values
X_resampled, y_resampled = smote.fit_resample(X, y)
X_train, X_test, y_train, y_test = train_test_split(X_resampled, y_resampled, test_size=test_size, random_state=random_state)
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(64, input_dim=X_train.shape[1], activation='relu'))
model.add(tf.keras.layers.Dense(32, activation='relu'))
model.add(tf.keras.layers.Dense(y.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size)
loss, accuracy = model.evaluate(X_test, y_test)
print('Test accuracy:', accuracy)
print('Test loss:', loss)
model.save(model_file)
return accuracy
@ex.main
def run_experiment():
accuracy = train_model()
ex.log_scalar('accuracy', accuracy)
ex.add_artifact('model.h5')
ex.run()

View File

@ -1,3 +1,8 @@
import mlflow
import mlflow.keras
from mlflow.models.signature import infer_signature
from mlflow.models import Model
import pandas as pd
from sacred import Experiment from sacred import Experiment
from sacred.observers import MongoObserver, FileStorageObserver from sacred.observers import MongoObserver, FileStorageObserver
import os import os
@ -7,6 +12,9 @@ os.environ["SACRED_NO_GIT"] = "1"
ex = Experiment('s487187-training', interactive=True, save_git_info=False) ex = Experiment('s487187-training', interactive=True, save_git_info=False)
ex.observers.append(MongoObserver(url='mongodb://admin:IUM_2021@172.17.0.1:27017', db_name='sacred')) ex.observers.append(MongoObserver(url='mongodb://admin:IUM_2021@172.17.0.1:27017', db_name='sacred'))
mlflow.set_tracking_uri("http://172.17.0.1:5000")
mlflow.set_experiment("s487187")
@ex.config @ex.config
def my_config(): def my_config():
@ -63,6 +71,13 @@ def train_model(data_file, model_file, epochs, batch_size, test_size, random_sta
model.save(model_file) model.save(model_file)
mlflow.keras.log_model(model, "model")
mlflow.log_artifact("model.h5")
signature = infer_signature(X_train, model.predict(X_train))
input_example = pd.DataFrame(X_train[:1])
mlflow.keras.save_model(model, "model", signature=signature, input_example=input_example)
return accuracy return accuracy
@ex.main @ex.main