fixed saving sacred runs

This commit is contained in:
patrycjalazna 2021-05-15 12:59:38 +02:00
parent bad2a78013
commit 9dab0233db
7 changed files with 182 additions and 2 deletions

View File

@ -57,7 +57,7 @@ pipeline {
steps{
archiveArtifacts 'eval_results.txt'
archiveArtifacts 'eval_plot.png'
archiveArtifacts 'my_runs/*'
archiveArtifacts 'sacred_runs/**'
}
}

5
my_runs/4/config.json Normal file
View File

@ -0,0 +1,5 @@
{
"batch_size": 16,
"epochs": 10,
"seed": 347156927
}

18
my_runs/4/cout.txt Normal file
View File

@ -0,0 +1,18 @@
INFO - 434742-file - Running command 'my_main'
INFO - 434742-file - Started run with ID "4"
9
2021-05-15 12:56:10.913293: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2021-05-15 12:56:11.240918: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:176] None of the MLIR Optimization Passes are enabled (registered 2)
Epoch 1/10
1/375 [..............................] - ETA: 2:39:04 - loss: 0.6933 - accuracy: 0.5000 47/375 [==>...........................] - ETA: 0s - loss: 0.6929 - accuracy: 0.4739  93/375 [======>.......................] - ETA: 0s - loss: 0.6925 - accuracy: 0.4976 139/375 [==========>...................] - ETA: 0s - loss: 0.6919 - accuracy: 0.5453 189/375 [==============>...............] - ETA: 0s - loss: 0.6910 - accuracy: 0.5839 239/375 [==================>...........] - ETA: 0s - loss: 0.6899 - accuracy: 0.6141 284/375 [=====================>........] - ETA: 0s - loss: 0.6886 - accuracy: 0.6368 335/375 [=========================>....] - ETA: 0s - loss: 0.6870 - accuracy: 0.6577 375/375 [==============================] - 27s 3ms/step - loss: 0.6855 - accuracy: 0.6718 - val_loss: 0.6227 - val_accuracy: 0.8720
Epoch 2/10
1/375 [..............................] - ETA: 0s - loss: 0.6240 - accuracy: 1.0000 23/375 [>.............................] - ETA: 0s - loss: 0.6206 - accuracy: 0.9091 62/375 [===>..........................] - ETA: 0s - loss: 0.6215 - accuracy: 0.8915 94/375 [======>.......................] - ETA: 0s - loss: 0.6186 - accuracy: 0.8912 129/375 [=========>....................] - ETA: 0s - loss: 0.6156 - accuracy: 0.8901 165/375 [============>.................] - ETA: 0s - loss: 0.6127 - accuracy: 0.8887 212/375 [===============>..............] - ETA: 0s - loss: 0.6089 - accuracy: 0.8872 257/375 [===================>..........] - ETA: 0s - loss: 0.6052 - accuracy: 0.8867 304/375 [=======================>......] - ETA: 0s - loss: 0.6012 - accuracy: 0.8866 352/375 [===========================>..] - ETA: 0s - loss: 0.5970 - accuracy: 0.8866 375/375 [==============================] - 1s 2ms/step - loss: 0.5949 - accuracy: 0.8866 - val_loss: 0.4903 - val_accuracy: 0.9040
Epoch 3/10
1/375 [..............................] - ETA: 0s - loss: 0.4609 - accuracy: 0.9375 31/375 [=>............................] - ETA: 0s - loss: 0.4950 - accuracy: 0.8832 79/375 [=====>........................] - ETA: 0s - loss: 0.4923 - accuracy: 0.8815 126/375 [=========>....................] - ETA: 0s - loss: 0.4901 - accuracy: 0.8802 171/375 [============>.................] - ETA: 0s - loss: 0.4869 - accuracy: 0.8797 217/375 [================>.............] - ETA: 0s - loss: 0.4840 - accuracy: 0.8798 260/375 [===================>..........] - ETA: 0s - loss: 0.4811 - accuracy: 0.8807 308/375 [=======================>......] - ETA: 0s - loss: 0.4779 - accuracy: 0.8818 355/375 [===========================>..] - ETA: 0s - loss: 0.4746 - accuracy: 0.8830 375/375 [==============================] - 1s 1ms/step - loss: 0.4732 - accuracy: 0.8834 - val_loss: 0.3904 - val_accuracy: 0.9075
Epoch 4/10
1/375 [..............................] - ETA: 0s - loss: 0.4200 - accuracy: 0.9375 40/375 [==>...........................] - ETA: 0s - loss: 0.4032 - accuracy: 0.9083 92/375 [======>.......................] - ETA: 0s - loss: 0.3957 - accuracy: 0.9041 145/375 [==========>...................] - ETA: 0s - loss: 0.3931 - accuracy: 0.9020 195/375 [==============>...............] - ETA: 0s - loss: 0.3905 - accuracy: 0.9013 248/375 [==================>...........] - ETA: 0s - loss: 0.3886 - accuracy: 0.8999 295/375 [======================>.......] - ETA: 0s - loss: 0.3872 - accuracy: 0.8988 345/375 [==========================>...] - ETA: 0s - loss: 0.3856 - accuracy: 0.8981 375/375 [==============================] - 0s 1ms/step - loss: 0.3844 - accuracy: 0.8977 - val_loss: 0.3302 - val_accuracy: 0.9100
Epoch 5/10
1/375 [..............................] - ETA: 0s - loss: 0.3368 - accuracy: 0.8750 39/375 [==>...........................] - ETA: 0s - loss: 0.3672 - accuracy: 0.8816 88/375 [======>.......................] - ETA: 0s - loss: 0.3620 - accuracy: 0.8832 140/375 [==========>...................] - ETA: 0s - loss: 0.3537 - accuracy: 0.8875 191/375 [==============>...............] - ETA: 0s - loss: 0.3490 - accuracy: 0.8900 240/375 [==================>...........] - ETA: 0s - loss: 0.3459 - accuracy: 0.8911 289/375 [======================>.......] - ETA: 0s - loss: 0.3436 - accuracy: 0.8918 340/375 [==========================>...] - ETA: 0s - loss: 0.3417 - accuracy: 0.8920 375/375 [==============================] - 0s 1ms/step - loss: 0.3404 - accuracy: 0.8921 - val_loss: 0.2945 - val_accuracy: 0.9100
Epoch 6/10
1/375 [..............................] - ETA: 1s - loss: 0.2755 - accuracy: 0.9375 24/375 [>.............................] - ETA: 0s - loss: 0.3345 - accuracy: 0.8822 55/375 [===>..........................] - ETA: 0s - loss: 0.3241 - accuracy: 0.8859 84/375 [=====>........................] - ETA: 0s - loss: 0.3210 - accuracy: 0.8850 94/375 [======>.......................] - ETA: 0s - loss: 0.3203 - accuracy: 0.8852 105/375 [=======>......................] - ETA: 0s - loss: 0.3193 - accuracy: 0.8857 124/375 [========>.....................] - ETA: 0s - loss: 0.3183 - accuracy: 0.8859 150/375 [===========>..................] - ETA: 0s - loss: 0.3178 - accuracy: 0.8855 179/375 [=============>................] - ETA: 0s - loss: 0.3175 - accuracy: 0.8853 211/375 [===============>..............] - ETA: 0s - loss: 0.3166 - accuracy: 0.8857 239/375 [==================>...........] - ETA: 0s - loss: 0.3155 - accuracy: 0.8861 253/375 [===================>..........] - ETA: 0s - loss: 0.3150 - accuracy: 0.8863 275/375 [=====================>........] - ETA: 0s - loss: 0.3141 - accuracy: 0.8867 301/375 [=======================>......] - ETA: 0s - loss: 0.3131 - accuracy: 0.8871 329/375 [=========================>....] - ETA: 0s - loss: 0.3121 - accuracy: 0.8876 349/375 [==========================>...] - ETA: 0s - loss: 0.3114 - accuracy: 0.8879 372/375 [============================>.] - ETA: 0s - loss: 0.3107 - accuracy: 0.8882

1
my_runs/4/metrics.json Normal file
View File

@ -0,0 +1 @@
{}

85
my_runs/4/run.json Normal file
View File

@ -0,0 +1,85 @@
{
"artifacts": [],
"command": "my_main",
"experiment": {
"base_dir": "/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka",
"dependencies": [
"keras-nightly==2.5.0.dev2021032900",
"numpy==1.19.5",
"pandas==1.2.3",
"sacred==0.8.2",
"scikit-learn==0.24.1",
"tensorflow==2.5.0rc1"
],
"mainfile": "sacred-fileobserver.py",
"name": "434742-file",
"repositories": [],
"sources": [
[
"sacred-fileobserver.py",
"_sources/sacred-fileobserver_3e3cbf02c631699c29e47a557c9a608d.py"
]
]
},
"fail_trace": [
"Traceback (most recent call last):\n",
" File \"/usr/local/lib/python3.9/site-packages/sacred/config/captured_function.py\", line 42, in captured_function\n result = wrapped(*args, **kwargs)\n",
" File \"/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka/sacred-fileobserver.py\", line 68, in my_main\n print(prepare_model())\n",
" File \"/usr/local/lib/python3.9/site-packages/sacred/config/captured_function.py\", line 42, in captured_function\n result = wrapped(*args, **kwargs)\n",
" File \"/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka/sacred-fileobserver.py\", line 60, in prepare_model\n model.save('avocado_model.h5')\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/engine/training.py\", line 2086, in save\n save.save_model(self, filepath, overwrite, include_optimizer, save_format,\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/saving/save.py\", line 146, in save_model\n hdf5_format.save_model_to_hdf5(\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/saving/hdf5_format.py\", line 110, in save_model_to_hdf5\n model_metadata = saving_utils.model_metadata(model, include_optimizer)\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/saving/saving_utils.py\", line 152, in model_metadata\n raise e\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/saving/saving_utils.py\", line 149, in model_metadata\n model_config['config'] = model.get_config()\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/engine/sequential.py\", line 471, in get_config\n layer_configs.append(generic_utils.serialize_keras_object(layer))\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/utils/generic_utils.py\", line 508, in serialize_keras_object\n raise e\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/utils/generic_utils.py\", line 503, in serialize_keras_object\n config = instance.get_config()\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/engine/base_layer.py\", line 695, in get_config\n raise NotImplementedError('Layer %s has arguments in `__init__` and '\n",
"NotImplementedError: Layer ModuleWrapper has arguments in `__init__` and therefore must override `get_config`.\n"
],
"heartbeat": "2021-05-15T10:56:44.017631",
"host": {
"ENV": {},
"cpu": "Intel(R) Core(TM) i5-5287U CPU @ 2.90GHz",
"hostname": "patrycjalazna.local",
"os": [
"Darwin",
"macOS-10.15.7-x86_64-i386-64bit"
],
"python_version": "3.9.4"
},
"meta": {
"command": "my_main",
"options": {
"--beat-interval": null,
"--capture": null,
"--comment": null,
"--debug": false,
"--enforce_clean": false,
"--file_storage": null,
"--force": false,
"--help": false,
"--loglevel": null,
"--mongo_db": null,
"--name": null,
"--pdb": false,
"--print-config": false,
"--priority": null,
"--queue": false,
"--s3": null,
"--sql": null,
"--tiny_db": null,
"--unobserved": false,
"COMMAND": null,
"UPDATE": [],
"help": false,
"with": false
}
},
"resources": [],
"result": null,
"start_time": "2021-05-15T10:56:10.769284",
"status": "FAILED",
"stop_time": "2021-05-15T10:56:44.020146"
}

View File

@ -0,0 +1,71 @@
import sys
from keras.backend import mean
import pandas as pd
import numpy as np
from sklearn import preprocessing
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.layers import Input, Dense, Activation,Dropout
from tensorflow.keras.models import Model
from tensorflow.keras.callbacks import EarlyStopping
from keras.models import Sequential
from sacred import Experiment
from sacred.observers import FileStorageObserver
ex = Experiment("434742-file", interactive=False, save_git_info=False)
ex.observers.append(FileStorageObserver('my_runs'))
@ex.config
def my_config():
epochs = 10
batch_size = 16
@ex.capture
def prepare_model(epochs, batch_size):
# odczytanie danych z plików
avocado_train = pd.read_csv('avocado_train.csv')
avocado_test = pd.read_csv('avocado_test.csv')
avocado_validate = pd.read_csv('avocado_validate.csv')
# podzial na X i y
X_train = avocado_train[['average_price', 'total_volume', '4046', '4225', '4770', 'total_bags', 'small_bags', 'large_bags', 'xlarge_bags']]
y_train = avocado_train[['type']]
X_test = avocado_test[['average_price', 'total_volume', '4046', '4225', '4770', 'total_bags', 'small_bags', 'large_bags', 'xlarge_bags']]
y_test = avocado_test[['type']]
print(X_train.shape[1])
# keras model
model = Sequential()
model.add(Dense(9, input_dim = X_train.shape[1], kernel_initializer='normal', activation='relu'))
model.add(Dense(1,kernel_initializer='normal', activation='sigmoid'))
early_stop = EarlyStopping(monitor="val_loss", mode="min", verbose=1, patience=10)
# kompilacja
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# trenowanie modelu
model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, validation_data=(X_test, y_test))
# predykcja
prediction = model.predict(X_test)
# ewaluacja
rmse = mean_squared_error(y_test, prediction)
# zapisanie modelu
model.save('avocado_model.h5')
return rmse
@ex.automain
def my_main(epochs, batch_size):
print(prepare_model())
ex.run()
ex.add_artifact('avocado_model.h5')

View File

@ -15,7 +15,7 @@ from sacred import Experiment
from sacred.observers import FileStorageObserver
ex = Experiment("434742-file", interactive=False, save_git_info=False)
ex.observers.append(FileStorageObserver('my_runs'))
ex.observers.append(FileStorageObserver('sacred_runs/my_runs'))
@ex.config
def my_config():