Save RMSE as metrics
This commit is contained in:
parent
276a9ea711
commit
340888294d
@ -22,7 +22,7 @@ def my_config():
|
||||
epochs = 100
|
||||
|
||||
@exp.capture
|
||||
def training(verbose, epochs, _log):
|
||||
def training(verbose, epochs, _log, _run):
|
||||
|
||||
pd.set_option("display.max_columns", None)
|
||||
|
||||
@ -68,11 +68,10 @@ def training(verbose, epochs, _log):
|
||||
scores = model.evaluate(x=test_X,
|
||||
y=test_Y)
|
||||
|
||||
evaluation_info = f"RMSE: {scores[1]}"
|
||||
_log.info(evaluation_info)
|
||||
_run.log_scalar("training.RMSE", scores[1])
|
||||
|
||||
@exp.automain
|
||||
def run(verbose, epochs, _run):
|
||||
def run(verbose, epochs):
|
||||
training()
|
||||
|
||||
runner = exp.run()
|
||||
|
@ -1,5 +0,0 @@
|
||||
{
|
||||
"epochs": 100,
|
||||
"seed": 80188794,
|
||||
"verbose": 0
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
INFO - s434704 - Running command 'run'
|
||||
INFO - s434704 - Started run with ID "1"
|
||||
2021-05-15 16:30:17.771747: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA
|
||||
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
|
||||
2021-05-15 16:30:18.525767: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:176] None of the MLIR Optimization Passes are enabled (registered 2)
|
||||
WARNING:tensorflow:Please add `keras.layers.InputLayer` instead of `keras.Input` to Sequential model. `keras.Input` is intended to be used by Functional model.
|
||||
WARNING - tensorflow - Please add `keras.layers.InputLayer` instead of `keras.Input` to Sequential model. `keras.Input` is intended to be used by Functional model.
|
||||
INFO - training - Verbose: 0, Epochs: 100
|
||||
1/11 [=>............................] - ETA: 1s - loss: 0.0957 - root_mean_squared_error: 0.1177
11/11 [==============================] - 0s 674us/step - loss: 0.1033 - root_mean_squared_error: 0.1313
|
||||
INFO - training - RMSE: 0.1313309669494629
|
||||
INFO - s434704 - Completed after 0:00:08
|
@ -1 +0,0 @@
|
||||
{}
|
@ -1,66 +0,0 @@
|
||||
{
|
||||
"artifacts": [],
|
||||
"command": "run",
|
||||
"experiment": {
|
||||
"base_dir": "/Volumes/seagate/ium_434704",
|
||||
"dependencies": [
|
||||
"numpy==1.19.5",
|
||||
"pandas==1.2.4",
|
||||
"sacred==0.8.2",
|
||||
"tensorflow==2.5.0rc1"
|
||||
],
|
||||
"mainfile": "sacred_exp.py",
|
||||
"name": "s434704",
|
||||
"repositories": [],
|
||||
"sources": [
|
||||
[
|
||||
"sacred_exp.py",
|
||||
"_sources/sacred_exp_8150ed54d93299dfccf6867ea7220971.py"
|
||||
]
|
||||
]
|
||||
},
|
||||
"heartbeat": "2021-05-15T14:30:25.850078",
|
||||
"host": {
|
||||
"ENV": {},
|
||||
"cpu": "Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz",
|
||||
"hostname": "Wojciechs-MacBook-Pro.local",
|
||||
"os": [
|
||||
"Darwin",
|
||||
"macOS-11.2.1-x86_64-i386-64bit"
|
||||
],
|
||||
"python_version": "3.9.1"
|
||||
},
|
||||
"meta": {
|
||||
"command": "run",
|
||||
"options": {
|
||||
"--beat-interval": null,
|
||||
"--capture": null,
|
||||
"--comment": null,
|
||||
"--debug": false,
|
||||
"--enforce_clean": false,
|
||||
"--file_storage": null,
|
||||
"--force": false,
|
||||
"--help": false,
|
||||
"--loglevel": null,
|
||||
"--mongo_db": null,
|
||||
"--name": null,
|
||||
"--pdb": false,
|
||||
"--print-config": false,
|
||||
"--priority": null,
|
||||
"--queue": false,
|
||||
"--s3": null,
|
||||
"--sql": null,
|
||||
"--tiny_db": null,
|
||||
"--unobserved": false,
|
||||
"COMMAND": null,
|
||||
"UPDATE": [],
|
||||
"help": false,
|
||||
"with": false
|
||||
}
|
||||
},
|
||||
"resources": [],
|
||||
"result": null,
|
||||
"start_time": "2021-05-15T14:30:17.351901",
|
||||
"status": "COMPLETED",
|
||||
"stop_time": "2021-05-15T14:30:25.848159"
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
{
|
||||
"epochs": 100,
|
||||
"seed": 426629893,
|
||||
"verbose": 0
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
INFO - s434704 - Running command 'run'
|
||||
INFO - s434704 - Started run with ID "2"
|
||||
WARNING:tensorflow:Please add `keras.layers.InputLayer` instead of `keras.Input` to Sequential model. `keras.Input` is intended to be used by Functional model.
|
||||
WARNING - tensorflow - Please add `keras.layers.InputLayer` instead of `keras.Input` to Sequential model. `keras.Input` is intended to be used by Functional model.
|
||||
INFO - training - Verbose: 0, Epochs: 100
|
||||
1/11 [=>............................] - ETA: 0s - loss: 0.0914 - root_mean_squared_error: 0.1140
11/11 [==============================] - 0s 638us/step - loss: 0.1024 - root_mean_squared_error: 0.1294
|
||||
INFO - training - RMSE: 0.12944550812244415
|
||||
INFO - s434704 - Completed after 0:00:05
|
Binary file not shown.
@ -1 +0,0 @@
|
||||
{}
|
@ -1,64 +0,0 @@
|
||||
{
|
||||
"artifacts": [
|
||||
"linear_regression.h5"
|
||||
],
|
||||
"command": "run",
|
||||
"experiment": {
|
||||
"base_dir": "/Volumes/seagate/ium_434704",
|
||||
"dependencies": [
|
||||
"numpy==1.19.5",
|
||||
"pandas==1.2.4",
|
||||
"sacred==0.8.2",
|
||||
"tensorflow==2.5.0rc1"
|
||||
],
|
||||
"mainfile": "sacred_exp.py",
|
||||
"name": "s434704",
|
||||
"repositories": [],
|
||||
"sources": [
|
||||
[
|
||||
"sacred_exp.py",
|
||||
"_sources/sacred_exp_8150ed54d93299dfccf6867ea7220971.py"
|
||||
]
|
||||
]
|
||||
},
|
||||
"heartbeat": "2021-05-15T14:30:31.335228",
|
||||
"host": {
|
||||
"ENV": {},
|
||||
"cpu": "Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz",
|
||||
"hostname": "Wojciechs-MacBook-Pro.local",
|
||||
"os": [
|
||||
"Darwin",
|
||||
"macOS-11.2.1-x86_64-i386-64bit"
|
||||
],
|
||||
"python_version": "3.9.1"
|
||||
},
|
||||
"meta": {
|
||||
"command": "run",
|
||||
"options": {
|
||||
"--beat-interval": null,
|
||||
"--capture": null,
|
||||
"--comment": null,
|
||||
"--debug": false,
|
||||
"--enforce_clean": false,
|
||||
"--file_storage": null,
|
||||
"--force": false,
|
||||
"--help": false,
|
||||
"--loglevel": null,
|
||||
"--mongo_db": null,
|
||||
"--name": null,
|
||||
"--pdb": false,
|
||||
"--print-config": false,
|
||||
"--priority": null,
|
||||
"--queue": false,
|
||||
"--s3": null,
|
||||
"--sql": null,
|
||||
"--tiny_db": null,
|
||||
"--unobserved": false
|
||||
}
|
||||
},
|
||||
"resources": [],
|
||||
"result": null,
|
||||
"start_time": "2021-05-15T14:30:25.893032",
|
||||
"status": "COMPLETED",
|
||||
"stop_time": "2021-05-15T14:30:31.333523"
|
||||
}
|
@ -1,80 +0,0 @@
|
||||
import sys
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import tensorflow as tf
|
||||
import os.path
|
||||
|
||||
from sacred import Experiment
|
||||
from sacred.observers import FileStorageObserver, MongoObserver
|
||||
|
||||
from tensorflow import keras
|
||||
from tensorflow.keras import layers
|
||||
from tensorflow.keras.layers.experimental import preprocessing
|
||||
|
||||
exp = Experiment("s434704", interactive=False, save_git_info=False)
|
||||
exp.observers.append(FileStorageObserver("sacred_file"))
|
||||
# exp.observers.append(MongoObserver(url='mongodb://mongo_user:mongo_password_IUM_2021@172.17.0.1:27017', db_name="sacred"))
|
||||
|
||||
@exp.config
|
||||
def my_config():
|
||||
verbose = 0
|
||||
epochs = 100
|
||||
|
||||
@exp.capture
|
||||
def training(verbose, epochs, _log):
|
||||
|
||||
pd.set_option("display.max_columns", None)
|
||||
|
||||
# Wczytanie danych
|
||||
train_data = pd.read_csv("./MoviesOnStreamingPlatforms_updated.train")
|
||||
|
||||
# Stworzenie modelu
|
||||
columns_to_use = ['Year', 'Runtime', 'Netflix']
|
||||
train_X = tf.convert_to_tensor(train_data[columns_to_use])
|
||||
train_Y = tf.convert_to_tensor(train_data[["IMDb"]])
|
||||
|
||||
normalizer = preprocessing.Normalization(input_shape=[3,])
|
||||
normalizer.adapt(train_X)
|
||||
|
||||
model = keras.Sequential([
|
||||
keras.Input(shape=(len(columns_to_use),)),
|
||||
normalizer,
|
||||
layers.Dense(30, activation='relu'),
|
||||
layers.Dense(10, activation='relu'),
|
||||
layers.Dense(25, activation='relu'),
|
||||
layers.Dense(1)
|
||||
])
|
||||
|
||||
model.compile(loss='mean_absolute_error',
|
||||
optimizer=tf.keras.optimizers.Adam(0.001),
|
||||
metrics=[tf.keras.metrics.RootMeanSquaredError()])
|
||||
|
||||
params = f"Verbose: {verbose}, Epochs: {epochs}"
|
||||
_log.info(params)
|
||||
|
||||
model.fit(train_X, train_Y, verbose=verbose, epochs=epochs)
|
||||
|
||||
model.save('linear_regression.h5')
|
||||
|
||||
# Evaluation
|
||||
|
||||
test_data = pd.read_csv("./MoviesOnStreamingPlatforms_updated.test")
|
||||
|
||||
columns_to_use = ['Year', 'Runtime', 'Netflix']
|
||||
test_X = tf.convert_to_tensor(test_data[columns_to_use])
|
||||
test_Y = tf.convert_to_tensor(test_data[["IMDb"]])
|
||||
|
||||
scores = model.evaluate(x=test_X,
|
||||
y=test_Y)
|
||||
|
||||
evaluation_info = f"RMSE: {scores[1]}"
|
||||
_log.info(evaluation_info)
|
||||
|
||||
@exp.automain
|
||||
def run(verbose, epochs, _run):
|
||||
training()
|
||||
|
||||
runner = exp.run()
|
||||
exp.add_source_file("./training.py")
|
||||
exp.add_artifact("linear_regression.h5")
|
Loading…
Reference in New Issue
Block a user