Adding mlflow
This commit is contained in:
parent
340888294d
commit
b82917a5d9
10
MLproject
Normal file
10
MLproject
Normal file
@ -0,0 +1,10 @@
|
||||
name: 434704
|
||||
|
||||
docker_env:
|
||||
image: jarmosz/ium
|
||||
entry_points:
|
||||
main:
|
||||
parameters:
|
||||
verbose: {type: int, default: 100}
|
||||
epochs: {type: int, default: 0}
|
||||
command: "python3 mlflow_exp.py {epochs} {verbose}"
|
65
mlflow_exp.py
Normal file
65
mlflow_exp.py
Normal file
@ -0,0 +1,65 @@
|
||||
import sys
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import tensorflow as tf
|
||||
import os.path
|
||||
|
||||
import mlflow
|
||||
|
||||
from tensorflow import keras
|
||||
from tensorflow.keras import layers
|
||||
from tensorflow.keras.layers.experimental import preprocessing
|
||||
|
||||
|
||||
with mlflow.start_run():
|
||||
|
||||
arguments = sys.argv[1:]
|
||||
|
||||
verbose = int(arguments[0])
|
||||
epochs = int(arguments[1])
|
||||
|
||||
mlflow.log_param("verbose", verbose)
|
||||
mlflow.log_param("epochs", epochs)
|
||||
pd.set_option("display.max_columns", None)
|
||||
|
||||
# Wczytanie danych
|
||||
train_data = pd.read_csv("./MoviesOnStreamingPlatforms_updated.train")
|
||||
|
||||
# Stworzenie modelu
|
||||
columns_to_use = ['Year', 'Runtime', 'Netflix']
|
||||
train_X = tf.convert_to_tensor(train_data[columns_to_use])
|
||||
train_Y = tf.convert_to_tensor(train_data[["IMDb"]])
|
||||
|
||||
normalizer = preprocessing.Normalization(input_shape=[3,])
|
||||
normalizer.adapt(train_X)
|
||||
|
||||
model = keras.Sequential([
|
||||
keras.Input(shape=(len(columns_to_use),)),
|
||||
normalizer,
|
||||
layers.Dense(30, activation='relu'),
|
||||
layers.Dense(10, activation='relu'),
|
||||
layers.Dense(25, activation='relu'),
|
||||
layers.Dense(1)
|
||||
])
|
||||
|
||||
model.compile(loss='mean_absolute_error',
|
||||
optimizer=tf.keras.optimizers.Adam(0.001),
|
||||
metrics=[tf.keras.metrics.RootMeanSquaredError()])
|
||||
|
||||
model.fit(train_X, train_Y, verbose=verbose, epochs=epochs)
|
||||
|
||||
model.save('linear_regression.h5')
|
||||
|
||||
# Evaluation
|
||||
|
||||
test_data = pd.read_csv("./MoviesOnStreamingPlatforms_updated.test")
|
||||
|
||||
columns_to_use = ['Year', 'Runtime', 'Netflix']
|
||||
test_X = tf.convert_to_tensor(test_data[columns_to_use])
|
||||
test_Y = tf.convert_to_tensor(test_data[["IMDb"]])
|
||||
|
||||
scores = model.evaluate(x=test_X,
|
||||
y=test_Y)
|
||||
|
||||
mlflow.log_metric("RMSE", scores[1])
|
15
mlruns/0/023db6388f714cdbb05c20e41895edab/meta.yaml
Normal file
15
mlruns/0/023db6388f714cdbb05c20e41895edab/meta.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
artifact_uri: file:///Volumes/seagate/ium_434704/mlruns/0/023db6388f714cdbb05c20e41895edab/artifacts
|
||||
end_time: null
|
||||
entry_point_name: ''
|
||||
experiment_id: '0'
|
||||
lifecycle_stage: active
|
||||
name: ''
|
||||
run_id: 023db6388f714cdbb05c20e41895edab
|
||||
run_uuid: 023db6388f714cdbb05c20e41895edab
|
||||
source_name: ''
|
||||
source_type: 4
|
||||
source_version: ''
|
||||
start_time: 1621096782764
|
||||
status: 1
|
||||
tags: []
|
||||
user_id: wj
|
1
mlruns/0/023db6388f714cdbb05c20e41895edab/params/epochs
Normal file
1
mlruns/0/023db6388f714cdbb05c20e41895edab/params/epochs
Normal file
@ -0,0 +1 @@
|
||||
0
|
1
mlruns/0/023db6388f714cdbb05c20e41895edab/params/verbose
Normal file
1
mlruns/0/023db6388f714cdbb05c20e41895edab/params/verbose
Normal file
@ -0,0 +1 @@
|
||||
100
|
@ -0,0 +1 @@
|
||||
https://git.wmi.amu.edu.pl/s434704/ium_434704.git
|
@ -0,0 +1 @@
|
||||
main
|
@ -0,0 +1 @@
|
||||
docker
|
@ -0,0 +1 @@
|
||||
340888294d4cc3b500347513e4793df41b1db84d
|
@ -0,0 +1 @@
|
||||
https://git.wmi.amu.edu.pl/s434704/ium_434704.git
|
@ -0,0 +1 @@
|
||||
file:///Volumes/seagate/ium_434704
|
@ -0,0 +1 @@
|
||||
PROJECT
|
@ -0,0 +1 @@
|
||||
wj
|
4
mlruns/0/meta.yaml
Normal file
4
mlruns/0/meta.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
artifact_location: file:///Volumes/seagate/ium_434704/mlruns/0
|
||||
experiment_id: '0'
|
||||
lifecycle_stage: active
|
||||
name: Default
|
Loading…
Reference in New Issue
Block a user