mlflow
This commit is contained in:
parent
508f315ef4
commit
96e4c03d11
@ -17,6 +17,7 @@ RUN pip3 install matplotlib
|
|||||||
RUN pip3 install sacred
|
RUN pip3 install sacred
|
||||||
RUN pip3 install pymongo
|
RUN pip3 install pymongo
|
||||||
RUN pip3 install dvc
|
RUN pip3 install dvc
|
||||||
|
RUN pip3 install mlflow
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
@ -25,7 +25,7 @@ node {
|
|||||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [], userRemoteConfigs: [[credentialsId: 's487197', url: 'https://git.wmi.amu.edu.pl/s487197/ium_487197']]])
|
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [], userRemoteConfigs: [[credentialsId: 's487197', url: 'https://git.wmi.amu.edu.pl/s487197/ium_487197']]])
|
||||||
}
|
}
|
||||||
stage('Dockerfile'){
|
stage('Dockerfile'){
|
||||||
def testImage = docker.image('s487197/ium:52')
|
def testImage = docker.image('s487197/ium:55')
|
||||||
testImage.inside{
|
testImage.inside{
|
||||||
copyArtifacts filter: 'baltimore_train.csv', projectName: 's487197-create-dataset'
|
copyArtifacts filter: 'baltimore_train.csv', projectName: 's487197-create-dataset'
|
||||||
sh "python3 ium_sacred.py -epochs $EPOCHS -lr $LR -validation_split $VALIDATION_SPLIT"
|
sh "python3 ium_sacred.py -epochs $EPOCHS -lr $LR -validation_split $VALIDATION_SPLIT"
|
||||||
|
21
ium_train.py
21
ium_train.py
@ -11,6 +11,13 @@ import numpy as np
|
|||||||
from sklearn.preprocessing import LabelEncoder
|
from sklearn.preprocessing import LabelEncoder
|
||||||
import argparse
|
import argparse
|
||||||
import shutil
|
import shutil
|
||||||
|
import mlflow
|
||||||
|
import logging
|
||||||
|
logging.basicConfig(level=logging.WARN)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
mlflow.set_tracking_uri("http://localhost:5000")
|
||||||
|
mlflow.set_experiment("s487197")
|
||||||
|
|
||||||
def get_x_y(data):
|
def get_x_y(data):
|
||||||
|
|
||||||
@ -40,6 +47,12 @@ def train_model():
|
|||||||
data_train, x_train, y_train = get_x_y(train)
|
data_train, x_train, y_train = get_x_y(train)
|
||||||
normalizer = tf.keras.layers.Normalization(axis=1)
|
normalizer = tf.keras.layers.Normalization(axis=1)
|
||||||
normalizer.adapt(np.array(x_train))
|
normalizer.adapt(np.array(x_train))
|
||||||
|
with mlflow.start_run() as run:
|
||||||
|
print("MLflow run experiment_id: {0}".format(run.info.experiment_id))
|
||||||
|
print("MLflow run artifact_uri: {0}".format(run.info.artifact_uri))
|
||||||
|
mlflow.log_param("epochs", args.epochs)
|
||||||
|
mlflow.log_param("lr", args.lr)
|
||||||
|
mlflow.log_param("validation_split", args.validation_split)
|
||||||
model = Sequential(normalizer)
|
model = Sequential(normalizer)
|
||||||
model.add(Dense(64, activation="relu"))
|
model.add(Dense(64, activation="relu"))
|
||||||
model.add(Dense(10, activation='relu'))
|
model.add(Dense(10, activation='relu'))
|
||||||
@ -54,6 +67,14 @@ def train_model():
|
|||||||
y_train,
|
y_train,
|
||||||
epochs=args.epochs,
|
epochs=args.epochs,
|
||||||
validation_split=args.validation_split)
|
validation_split=args.validation_split)
|
||||||
|
mlflow.log_metric("loss", float(, hist['loss']))
|
||||||
|
mlflow.log_metric('accuracy', float(hist['accuracy']))
|
||||||
|
signature = mlflow.models.signature.infer_signature(train_x, model.predict(x_test))
|
||||||
|
if tracking_url_type_store != "file":
|
||||||
|
|
||||||
|
mlflow.sklearn.log_model(model, "wines-model", registered_model_name="ElasticnetWineModel", signature=signature)
|
||||||
|
else:
|
||||||
|
mlflow.sklearn.log_model(model, "model", signature=signature)
|
||||||
hist = pd.DataFrame(history.history)
|
hist = pd.DataFrame(history.history)
|
||||||
hist['epoch'] = history.epoch
|
hist['epoch'] = history.epoch
|
||||||
model.save('baltimore_model')
|
model.save('baltimore_model')
|
||||||
|
Loading…
Reference in New Issue
Block a user