Add MLflow
This commit is contained in:
parent
c9c4ac204c
commit
197efd7bcb
@ -18,6 +18,7 @@ RUN pip3 install matplotlib
|
|||||||
RUN pip3 install --no-cache-dir tensorflow
|
RUN pip3 install --no-cache-dir tensorflow
|
||||||
RUN pip3 install sacred
|
RUN pip3 install sacred
|
||||||
RUN pip3 install pymongo
|
RUN pip3 install pymongo
|
||||||
|
RUN pip3 install mlflow
|
||||||
|
|
||||||
CMD ./run.sh
|
CMD ./run.sh
|
||||||
|
|
||||||
|
6
Jenkinsfile
vendored
6
Jenkinsfile
vendored
@ -18,4 +18,10 @@ pipeline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
post {
|
||||||
|
success {
|
||||||
|
|
||||||
|
build job: 's434684-training/master'
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
10
MLproject
Normal file
10
MLproject
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
name: 434684-mlflow
|
||||||
|
|
||||||
|
docker_env:
|
||||||
|
image: zollinka/ium:latest
|
||||||
|
|
||||||
|
entry_points:
|
||||||
|
main:
|
||||||
|
parameters:
|
||||||
|
learning_rate: {type: float, default: 0.0001}
|
||||||
|
command: "python3 ium_zadanie6_training.py {learning_rate}"
|
@ -10,6 +10,7 @@ from sacred.observers import FileStorageObserver, MongoObserver
|
|||||||
from sacred import Experiment
|
from sacred import Experiment
|
||||||
from sacred.observers import MongoObserver
|
from sacred.observers import MongoObserver
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import mlflow
|
||||||
|
|
||||||
|
|
||||||
ex = Experiment("434684", interactive=False, save_git_info=False)
|
ex = Experiment("434684", interactive=False, save_git_info=False)
|
||||||
@ -23,34 +24,38 @@ def my_config():
|
|||||||
|
|
||||||
@ex.capture
|
@ex.capture
|
||||||
def prepare_train_model(learning_rate, _run):
|
def prepare_train_model(learning_rate, _run):
|
||||||
_run.info["prepare_model"] = str(datetime.now())
|
with mlflow.start_run():
|
||||||
|
_run.info["prepare_model"] = str(datetime.now())
|
||||||
|
|
||||||
movies_train = pd.read_csv('movies_train.csv')
|
movies_train = pd.read_csv('movies_train.csv')
|
||||||
|
|
||||||
x_train = movies_train.copy()
|
x_train = movies_train.copy()
|
||||||
y_train = x_train.pop('rottentomatoes_audience_score')
|
y_train = x_train.pop('rottentomatoes_audience_score')
|
||||||
x_train.pop('Unnamed: 0')
|
x_train.pop('Unnamed: 0')
|
||||||
|
|
||||||
learning_rate = float(sys.argv[1])
|
learning_rate = float(sys.argv[1])
|
||||||
model = Sequential()
|
model = Sequential()
|
||||||
model.add(layers.Input(shape=(22,)))
|
model.add(layers.Input(shape=(22,)))
|
||||||
model.add(layers.Dense(64))
|
model.add(layers.Dense(64))
|
||||||
model.add(layers.Dense(64))
|
model.add(layers.Dense(64))
|
||||||
model.add(layers.Dense(32))
|
model.add(layers.Dense(32))
|
||||||
model.add(layers.Dense(1))
|
model.add(layers.Dense(1))
|
||||||
|
|
||||||
model.compile(loss='mean_absolute_error', optimizer=Adam(learning_rate))
|
|
||||||
|
|
||||||
_run.info["train model"] = str(datetime.now())
|
|
||||||
|
|
||||||
history = model.fit(
|
|
||||||
x = convert_to_tensor(x_train, np.float32),
|
|
||||||
y = y_train,
|
|
||||||
verbose=0, epochs=99)
|
|
||||||
|
|
||||||
loss = history.history['loss'][-1]
|
mlflow.log_param("learning_rate", learning_rate)
|
||||||
_run.info["Loss"] = str(loss)
|
|
||||||
model.save('model_movies.h5')
|
model.compile(loss='mean_absolute_error', optimizer=Adam(learning_rate))
|
||||||
|
|
||||||
|
_run.info["train model"] = str(datetime.now())
|
||||||
|
|
||||||
|
history = model.fit(
|
||||||
|
x = convert_to_tensor(x_train, np.float32),
|
||||||
|
y = y_train,
|
||||||
|
verbose=0, epochs=99)
|
||||||
|
|
||||||
|
loss = history.history['loss'][-1]
|
||||||
|
_run.info["Loss"] = str(loss)
|
||||||
|
mlflow.log_metric("Loss", loss)
|
||||||
|
model.save('model_movies.h5')
|
||||||
|
|
||||||
|
|
||||||
@ex.main
|
@ex.main
|
||||||
|
46
ium_zadanie8.py
Normal file
46
ium_zadanie8.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import sys
|
||||||
|
from tensorflow.keras import layers, Sequential
|
||||||
|
# from keras.layers import Flatten,Dense,Dropout, GlobalAveragePooling2D
|
||||||
|
from tensorflow.keras.optimizers import Adam
|
||||||
|
from tensorflow import convert_to_tensor
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
from sklearn.metrics import mean_squared_error
|
||||||
|
from sacred.observers import FileStorageObserver, MongoObserver
|
||||||
|
from sacred import Experiment
|
||||||
|
from sacred.observers import MongoObserver
|
||||||
|
from datetime import datetime
|
||||||
|
import mlflow
|
||||||
|
|
||||||
|
with mlflow.start_run():
|
||||||
|
learning_rate = float(sys.argv[1])
|
||||||
|
|
||||||
|
movies_train = pd.read_csv('movies_train.csv')
|
||||||
|
|
||||||
|
x_train = movies_train.copy()
|
||||||
|
y_train = x_train.pop('rottentomatoes_audience_score')
|
||||||
|
x_train.pop('Unnamed: 0')
|
||||||
|
|
||||||
|
learning_rate = float(sys.argv[1])
|
||||||
|
model = Sequential()
|
||||||
|
model.add(layers.Input(shape=(22,)))
|
||||||
|
model.add(layers.Dense(64))
|
||||||
|
model.add(layers.Dense(64))
|
||||||
|
model.add(layers.Dense(32))
|
||||||
|
model.add(layers.Dense(1))
|
||||||
|
|
||||||
|
mlflow.log_param("learning_rate", learning_rate)
|
||||||
|
|
||||||
|
model.compile(loss='mean_absolute_error', optimizer=Adam(learning_rate))
|
||||||
|
|
||||||
|
|
||||||
|
history = model.fit(
|
||||||
|
x = convert_to_tensor(x_train, np.float32),
|
||||||
|
y = y_train,
|
||||||
|
verbose=0, epochs=99)
|
||||||
|
|
||||||
|
loss = history.history['loss'][-1]
|
||||||
|
|
||||||
|
mlflow.log_metric("Loss", loss)
|
||||||
|
model.save('model_movies.h5')
|
||||||
|
|
Loading…
Reference in New Issue
Block a user