params in function added
Some checks failed
s434742-training/pipeline/head There was a failure building this commit
s434742-evaluation/pipeline/head There was a failure building this commit

This commit is contained in:
patrycjalazna 2021-05-14 21:01:05 +02:00
parent 4fc45ed8ef
commit 89ee49bca3
19 changed files with 419 additions and 1 deletions

BIN
avocado_model.h5 Normal file

Binary file not shown.

View File

Can't render this file because it is too large.

5
my_runs/1/config.json Normal file
View File

@ -0,0 +1,5 @@
{
"batch_size": 16,
"epochs": 10,
"seed": 752365448
}

0
my_runs/1/cout.txt Normal file
View File

1
my_runs/1/metrics.json Normal file
View File

@ -0,0 +1 @@
{}

85
my_runs/1/run.json Normal file
View File

@ -0,0 +1,85 @@
{
"artifacts": [],
"command": "my_main",
"experiment": {
"base_dir": "/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka",
"dependencies": [
"keras-nightly==2.5.0.dev2021032900",
"numpy==1.19.5",
"pandas==1.2.3",
"sacred==0.8.2",
"scikit-learn==0.24.1",
"tensorflow==2.5.0rc1"
],
"mainfile": "sacred-training.py",
"name": "file_observer",
"repositories": [
{
"commit": "4fc45ed8ef6201f91d818362878d3155dfe4295e",
"dirty": false,
"url": "https://git.wmi.amu.edu.pl/s434742/ium_434742.git"
}
],
"sources": [
[
"sacred-training.py",
"_sources/sacred-training_58bada47539aa2ee4591c9929ac8c60f.py"
]
]
},
"fail_trace": [
"Traceback (most recent call last):\n",
" File \"/usr/local/lib/python3.9/site-packages/sacred/config/captured_function.py\", line 42, in captured_function\n result = wrapped(*args, **kwargs)\n",
" File \"/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka/sacred-training.py\", line 68, in my_main\n print(prepare_model())\n",
" File \"/usr/local/lib/python3.9/site-packages/sacred/config/captured_function.py\", line 42, in captured_function\n result = wrapped(*args, **kwargs)\n",
" File \"/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka/sacred-training.py\", line 28, in prepare_model\n avocado_train = pd.read_csv('avocado_train.csv')\n",
" File \"/usr/local/lib/python3.9/site-packages/pandas/io/parsers.py\", line 610, in read_csv\n return _read(filepath_or_buffer, kwds)\n",
" File \"/usr/local/lib/python3.9/site-packages/pandas/io/parsers.py\", line 462, in _read\n parser = TextFileReader(filepath_or_buffer, **kwds)\n",
" File \"/usr/local/lib/python3.9/site-packages/pandas/io/parsers.py\", line 819, in __init__\n self._engine = self._make_engine(self.engine)\n",
" File \"/usr/local/lib/python3.9/site-packages/pandas/io/parsers.py\", line 1050, in _make_engine\n return mapping[engine](self.f, **self.options) # type: ignore[call-arg]\n",
" File \"/usr/local/lib/python3.9/site-packages/pandas/io/parsers.py\", line 1867, in __init__\n self._open_handles(src, kwds)\n",
" File \"/usr/local/lib/python3.9/site-packages/pandas/io/parsers.py\", line 1362, in _open_handles\n self.handles = get_handle(\n",
" File \"/usr/local/lib/python3.9/site-packages/pandas/io/common.py\", line 642, in get_handle\n handle = open(\n",
"FileNotFoundError: [Errno 2] No such file or directory: 'avocado_train.csv'\n"
],
"heartbeat": "2021-05-14T18:58:43.968999",
"host": {
"ENV": {},
"cpu": "Intel(R) Core(TM) i5-5287U CPU @ 2.90GHz",
"hostname": "patrycjalazna.local",
"os": [
"Darwin",
"macOS-10.15.7-x86_64-i386-64bit"
],
"python_version": "3.9.4"
},
"meta": {
"command": "my_main",
"options": {
"--beat-interval": null,
"--capture": null,
"--comment": null,
"--debug": false,
"--enforce_clean": false,
"--file_storage": null,
"--force": false,
"--help": false,
"--loglevel": null,
"--mongo_db": null,
"--name": null,
"--pdb": false,
"--print-config": false,
"--priority": null,
"--queue": false,
"--s3": null,
"--sql": null,
"--tiny_db": null,
"--unobserved": false
}
},
"resources": [],
"result": null,
"start_time": "2021-05-14T18:58:43.955201",
"status": "FAILED",
"stop_time": "2021-05-14T18:58:43.972665"
}

5
my_runs/2/config.json Normal file
View File

@ -0,0 +1,5 @@
{
"batch_size": 16,
"epochs": 10,
"seed": 808581577
}

0
my_runs/2/cout.txt Normal file
View File

1
my_runs/2/metrics.json Normal file
View File

@ -0,0 +1 @@
{}

78
my_runs/2/run.json Normal file
View File

@ -0,0 +1,78 @@
{
"artifacts": [],
"command": "my_main",
"experiment": {
"base_dir": "/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka",
"dependencies": [
"keras-nightly==2.5.0.dev2021032900",
"numpy==1.19.5",
"pandas==1.2.3",
"sacred==0.8.2",
"scikit-learn==0.24.1",
"tensorflow==2.5.0rc1"
],
"mainfile": "sacred-training.py",
"name": "file_observer",
"repositories": [
{
"commit": "4fc45ed8ef6201f91d818362878d3155dfe4295e",
"dirty": true,
"url": "https://git.wmi.amu.edu.pl/s434742/ium_434742.git"
}
],
"sources": [
[
"sacred-training.py",
"_sources/sacred-training_58bada47539aa2ee4591c9929ac8c60f.py"
]
]
},
"fail_trace": [
"Traceback (most recent call last):\n",
" File \"/usr/local/lib/python3.9/site-packages/sacred/config/captured_function.py\", line 42, in captured_function\n result = wrapped(*args, **kwargs)\n",
" File \"/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka/sacred-training.py\", line 68, in my_main\n print(prepare_model())\n",
" File \"/usr/local/lib/python3.9/site-packages/sacred/config/captured_function.py\", line 42, in captured_function\n result = wrapped(*args, **kwargs)\n",
" File \"/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka/sacred-training.py\", line 51, in prepare_model\n model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, validation_data=(X_test, y_test))\n",
"NameError: name 'epochs' is not defined\n"
],
"heartbeat": "2021-05-14T18:59:21.424846",
"host": {
"ENV": {},
"cpu": "Intel(R) Core(TM) i5-5287U CPU @ 2.90GHz",
"hostname": "patrycjalazna.local",
"os": [
"Darwin",
"macOS-10.15.7-x86_64-i386-64bit"
],
"python_version": "3.9.4"
},
"meta": {
"command": "my_main",
"options": {
"--beat-interval": null,
"--capture": null,
"--comment": null,
"--debug": false,
"--enforce_clean": false,
"--file_storage": null,
"--force": false,
"--help": false,
"--loglevel": null,
"--mongo_db": null,
"--name": null,
"--pdb": false,
"--print-config": false,
"--priority": null,
"--queue": false,
"--s3": null,
"--sql": null,
"--tiny_db": null,
"--unobserved": false
}
},
"resources": [],
"result": null,
"start_time": "2021-05-14T18:59:21.272740",
"status": "FAILED",
"stop_time": "2021-05-14T18:59:21.427473"
}

5
my_runs/3/config.json Normal file
View File

@ -0,0 +1,5 @@
{
"batch_size": 16,
"epochs": 10,
"seed": 981585385
}

8
my_runs/3/cout.txt Normal file
View File

@ -0,0 +1,8 @@
INFO - file_observer - Running command 'my_main'
INFO - file_observer - Started run with ID "3"
9
2021-05-14 20:59:43.136009: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2021-05-14 20:59:43.542832: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:176] None of the MLIR Optimization Passes are enabled (registered 2)
Epoch 1/10
1/375 [..............................] - ETA: 3:02:52 - loss: 0.6931 - accuracy: 0.4375 49/375 [==>...........................] - ETA: 0s - loss: 0.6929 - accuracy: 0.6856  96/375 [======>.......................] - ETA: 0s - loss: 0.6925 - accuracy: 0.7135 129/375 [=========>....................] - ETA: 0s - loss: 0.6922 - accuracy: 0.7033

1
my_runs/3/metrics.json Normal file
View File

@ -0,0 +1 @@
{}

87
my_runs/3/run.json Normal file
View File

@ -0,0 +1,87 @@
{
"artifacts": [],
"command": "my_main",
"experiment": {
"base_dir": "/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka",
"dependencies": [
"keras-nightly==2.5.0.dev2021032900",
"numpy==1.19.5",
"pandas==1.2.3",
"sacred==0.8.2",
"scikit-learn==0.24.1",
"tensorflow==2.5.0rc1"
],
"mainfile": "sacred-training.py",
"name": "file_observer",
"repositories": [
{
"commit": "4fc45ed8ef6201f91d818362878d3155dfe4295e",
"dirty": true,
"url": "https://git.wmi.amu.edu.pl/s434742/ium_434742.git"
}
],
"sources": [
[
"sacred-training.py",
"_sources/sacred-training_36981fcdfc28636f97f6ecc881108ea8.py"
]
]
},
"fail_trace": [
"Traceback (most recent call last):\n",
" File \"/usr/local/lib/python3.9/site-packages/sacred/config/captured_function.py\", line 42, in captured_function\n result = wrapped(*args, **kwargs)\n",
" File \"/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka/sacred-training.py\", line 68, in my_main\n print(prepare_model())\n",
" File \"/usr/local/lib/python3.9/site-packages/sacred/config/captured_function.py\", line 42, in captured_function\n result = wrapped(*args, **kwargs)\n",
" File \"/Users/patrycjalazna/Desktop/inzynieria-uczenia-maszynowego/zadanka/sacred-training.py\", line 60, in prepare_model\n model.save('avocado_model.h5')\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/engine/training.py\", line 2086, in save\n save.save_model(self, filepath, overwrite, include_optimizer, save_format,\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/saving/save.py\", line 146, in save_model\n hdf5_format.save_model_to_hdf5(\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/saving/hdf5_format.py\", line 110, in save_model_to_hdf5\n model_metadata = saving_utils.model_metadata(model, include_optimizer)\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/saving/saving_utils.py\", line 152, in model_metadata\n raise e\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/saving/saving_utils.py\", line 149, in model_metadata\n model_config['config'] = model.get_config()\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/engine/sequential.py\", line 471, in get_config\n layer_configs.append(generic_utils.serialize_keras_object(layer))\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/utils/generic_utils.py\", line 508, in serialize_keras_object\n raise e\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/utils/generic_utils.py\", line 503, in serialize_keras_object\n config = instance.get_config()\n",
" File \"/usr/local/lib/python3.9/site-packages/keras/engine/base_layer.py\", line 695, in get_config\n raise NotImplementedError('Layer %s has arguments in `__init__` and '\n",
"NotImplementedError: Layer ModuleWrapper has arguments in `__init__` and therefore must override `get_config`.\n"
],
"heartbeat": "2021-05-14T19:00:19.975884",
"host": {
"ENV": {},
"cpu": "Intel(R) Core(TM) i5-5287U CPU @ 2.90GHz",
"hostname": "patrycjalazna.local",
"os": [
"Darwin",
"macOS-10.15.7-x86_64-i386-64bit"
],
"python_version": "3.9.4"
},
"meta": {
"command": "my_main",
"options": {
"--beat-interval": null,
"--capture": null,
"--comment": null,
"--debug": false,
"--enforce_clean": false,
"--file_storage": null,
"--force": false,
"--help": false,
"--loglevel": null,
"--mongo_db": null,
"--name": null,
"--pdb": false,
"--print-config": false,
"--priority": null,
"--queue": false,
"--s3": null,
"--sql": null,
"--tiny_db": null,
"--unobserved": false
}
},
"resources": [],
"result": null,
"start_time": "2021-05-14T18:59:43.051621",
"status": "FAILED",
"stop_time": "2021-05-14T19:00:19.978586"
}

View File

@ -0,0 +1,71 @@
import sys
from keras.backend import mean
import pandas as pd
import numpy as np
from sklearn import preprocessing
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.layers import Input, Dense, Activation,Dropout
from tensorflow.keras.models import Model
from tensorflow.keras.callbacks import EarlyStopping
from keras.models import Sequential
from sacred import Experiment
from sacred.observers import FileStorageObserver
ex = Experiment("file_observer")
ex.observers.append(FileStorageObserver('my_runs'))
@ex.config
def my_config():
epochs = 10
batch_size = 16
@ex.capture
def prepare_model(epochs, batch_size):
# odczytanie danych z plików
avocado_train = pd.read_csv('avocado_train.csv')
avocado_test = pd.read_csv('avocado_test.csv')
avocado_validate = pd.read_csv('avocado_validate.csv')
# podzial na X i y
X_train = avocado_train[['average_price', 'total_volume', '4046', '4225', '4770', 'total_bags', 'small_bags', 'large_bags', 'xlarge_bags']]
y_train = avocado_train[['type']]
X_test = avocado_test[['average_price', 'total_volume', '4046', '4225', '4770', 'total_bags', 'small_bags', 'large_bags', 'xlarge_bags']]
y_test = avocado_test[['type']]
print(X_train.shape[1])
# keras model
model = Sequential()
model.add(Dense(9, input_dim = X_train.shape[1], kernel_initializer='normal', activation='relu'))
model.add(Dense(1,kernel_initializer='normal', activation='sigmoid'))
early_stop = EarlyStopping(monitor="val_loss", mode="min", verbose=1, patience=10)
# kompilacja
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# trenowanie modelu
model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, validation_data=(X_test, y_test))
# predykcja
prediction = model.predict(X_test)
# ewaluacja
rmse = mean_squared_error(y_test, prediction)
# zapisanie modelu
model.save('avocado_model.h5')
return rmse
@ex.main
def my_main():
print(prepare_model())
ex.run()
ex.add_artifact('avocado_model.h5')

View File

@ -0,0 +1,71 @@
import sys
from keras.backend import mean
import pandas as pd
import numpy as np
from sklearn import preprocessing
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.layers import Input, Dense, Activation,Dropout
from tensorflow.keras.models import Model
from tensorflow.keras.callbacks import EarlyStopping
from keras.models import Sequential
from sacred import Experiment
from sacred.observers import FileStorageObserver
ex = Experiment("file_observer")
ex.observers.append(FileStorageObserver('my_runs'))
@ex.config
def my_config():
epochs = 10
batch_size = 16
@ex.capture
def prepare_model():
# odczytanie danych z plików
avocado_train = pd.read_csv('avocado_train.csv')
avocado_test = pd.read_csv('avocado_test.csv')
avocado_validate = pd.read_csv('avocado_validate.csv')
# podzial na X i y
X_train = avocado_train[['average_price', 'total_volume', '4046', '4225', '4770', 'total_bags', 'small_bags', 'large_bags', 'xlarge_bags']]
y_train = avocado_train[['type']]
X_test = avocado_test[['average_price', 'total_volume', '4046', '4225', '4770', 'total_bags', 'small_bags', 'large_bags', 'xlarge_bags']]
y_test = avocado_test[['type']]
print(X_train.shape[1])
# keras model
model = Sequential()
model.add(Dense(9, input_dim = X_train.shape[1], kernel_initializer='normal', activation='relu'))
model.add(Dense(1,kernel_initializer='normal', activation='sigmoid'))
early_stop = EarlyStopping(monitor="val_loss", mode="min", verbose=1, patience=10)
# kompilacja
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# trenowanie modelu
model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, validation_data=(X_test, y_test))
# predykcja
prediction = model.predict(X_test)
# ewaluacja
rmse = mean_squared_error(y_test, prediction)
# zapisanie modelu
model.save('avocado_model.h5')
return rmse
@ex.main
def my_main():
print(prepare_model())
ex.run()
ex.add_artifact('avocado_model.h5')

View File

@ -23,7 +23,7 @@ def my_config():
batch_size = 16
@ex.capture
def prepare_model():
def prepare_model(epochs, batch_size):
# odczytanie danych z plików
avocado_train = pd.read_csv('avocado_train.csv')
avocado_test = pd.read_csv('avocado_test.csv')