eval script
Some checks failed
s444417-training/pipeline/head There was a failure building this commit

This commit is contained in:
s444417 2022-05-03 20:47:24 +02:00
parent 47962416bf
commit 7579071b7f
11 changed files with 89 additions and 16 deletions

14
Jenkinsfile.eval Normal file
View File

@ -0,0 +1,14 @@
pipeline {
stages {
stage('Copy') {
steps {
sh 'python3 evalScript.py'
}
}
}
post {
always {
emailext body: "${currentBuild.currentResult}", subject: 's444417-evaluation build status', to: 'e19191c5.uam.onmicrosoft.com@emea.teams.ms'
}
}
}

View File

@ -4,8 +4,8 @@ IUM_6
Zadanie 1
1. stworzono job [s444417-training](https://tzietkiewicz.vm.wmi.amu.edu.pl:8080/job/s444417-training/)
2. s444417-training uruchamia się automatycznie po zakończeniu joba s444417-create-dataset, plik "Jenkinsfile", przy pomocy build job. Kopiuje zbiór danych przy pomocy copyArtifact w pliku "Jenkinsfile3"
3. "Jenkinsfile3" archiveArtifacts linia 12
4. Powiadomienia, "Jenkinsfile3" linia 11
3. "Jenkinsfile3" przy pomocy archiveArtifacts
4. Powiadomienia, "Jenkinsfile3" przy pomocy emailext
5. Parametr podany jest w pliku "startscript1.sh" w linii 11, przy wołaniu skryptu uruchamiającego uczenie "startscript2.sh", parametr oznacza ilość epok
Zadanie 2

View File

@ -1,2 +1,2 @@
predictions: [84.40604, 472.22028, 106.96647, 141.08197, 105.62965, 55.602768, 107.484055, 185.62663, 48.709442, 86.00946]
expected: [190. 330. 78. 54.4 39. 69. 48. 200. 100. 85. ]
predictions: [166.16302, 211.04045, 123.3409, 42.00785, 87.86473, 109.27005, 169.75987, 173.21875, 232.33553, 142.31973]
expected: [ 80. 42. 64. 72. 14. 97.4 75. 200. 140. 61. ]

View File

@ -2,4 +2,4 @@
ëroot"_tf_keras_sequential*Æ{"name": "sequential", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "must_restore_from_config": false, "class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, null]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "normalization_input"}}, {"class_name": "Normalization", "config": {"name": "normalization", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, null]}, "dtype": "float32", "axis": {"class_name": "__tuple__", "items": [-1]}, "mean": null, "variance": null}}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "shared_object_id": 5, "input_spec": [{"class_name": "InputSpec", "config": {"dtype": null, "shape": {"class_name": "__tuple__", "items": [null, null]}, "ndim": 2, "max_ndim": null, "min_ndim": null, "axes": {}}}], "build_input_shape": {"class_name": "TensorShape", "items": [null, null]}, "is_graph_network": true, "full_save_spec": {"class_name": "__tuple__", "items": [[{"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, null]}, "float32", "normalization_input"]}], {}]}, "save_spec": {"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, null]}, "float32", "normalization_input"]}, "keras_version": "2.8.0", "backend": "tensorflow", "model_config": {"class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, null]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "normalization_input"}, "shared_object_id": 0}, {"class_name": "Normalization", "config": {"name": "normalization", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, null]}, "dtype": "float32", "axis": {"class_name": "__tuple__", "items": [-1]}, "mean": null, "variance": null}, "shared_object_id": 1}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 2}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 3}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 4}]}}, "training_config": {"loss": {"class_name": "MeanSquaredError", "config": {"reduction": "auto", "name": "mean_squared_error"}, "shared_object_id": 7}, "metrics": null, "weighted_metrics": null, "loss_weights": null, "optimizer_config": {"class_name": "Adam", "config": {"name": "Adam", "learning_rate": 1, "decay": 0.0, "beta_1": 0.8999999761581421, "beta_2": 0.9990000128746033, "epsilon": 1e-07, "amsgrad": false}}}}2
ûroot.layer_with_weights-0"_tf_keras_layer*Ä{"name": "normalization", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": {"class_name": "__tuple__", "items": [null, null]}, "stateful": false, "must_restore_from_config": true, "class_name": "Normalization", "config": {"name": "normalization", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, null]}, "dtype": "float32", "axis": {"class_name": "__tuple__", "items": [-1]}, "mean": null, "variance": null}, "shared_object_id": 1, "build_input_shape": {"class_name": "TensorShape", "items": [null, 8]}}2
¿root.layer_with_weights-1"_tf_keras_layer*ˆ{"name": "dense", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 2}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 3}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 4, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 8}}, "shared_object_id": 8}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 8]}}2
¸-root.keras_api.metrics.0"_tf_keras_metric*<2A>{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 9}2
¸0root.keras_api.metrics.0"_tf_keras_metric*<2A>{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 9}2

Binary file not shown.

42
src/evalScript.py Normal file
View File

@ -0,0 +1,42 @@
import os
import sys
import pandas as pd
import tensorflow as tf
cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
modelPath = 'saved_model/MyModel_tf'
pathTrain = cwd + "/../Participants_Data_HPP/Train.csv"
pathTest = cwd + "/../Participants_Data_HPP/Test.csv"
features = ["UNDER_CONSTRUCTION", "RERA", "BHK_NO.", "SQUARE_FT", "READY_TO_MOVE", "RESALE", "LONGITUDE", "LATITUDE", "TARGET(PRICE_IN_LACS)"]
# get dataset
house_price_train = pd.read_csv(pathTrain)[features]
# get test dataset
house_price_test = pd.read_csv(pathTest)[features]
house_price_test_features = house_price_test.copy()
# pop column
house_price_test_expected = house_price_test_features.pop('TARGET(PRICE_IN_LACS)')
# load model
new_model = tf.keras.models.load_model(modelPath)
# Check its architecture
# new_model.summary()
# Evaluate the restored model
loss = new_model.evaluate(house_price_test_features, house_price_test_expected, verbose=2)
print(loss)
#print('Restored model, accuracy: {:5.2f}%'.format(100 * acc))
count = 0
try:
with open('trainResults.csv', 'r') as trainResults:
count = sum(1 for _ in trainResults)
except:
pass
with open('trainResults.csv', 'a+') as trainResults:
trainResults.write(f"{count},{loss}" + "\n")

View File

@ -45,26 +45,39 @@ house_price_test_expected = house_price_test_features.pop('TARGET(PRICE_IN_LACS)
house_price_features = np.array(house_price_features)
# checkoints
checkpoint_path = "training_1/cp.ckpt"
checkpoint_dir = os.path.dirname(checkpoint_path)
# checkpoint_path = "training_1/cp.ckpt"
# checkpoint_dir = os.path.dirname(checkpoint_path)
# Create a callback that saves the model's weights
# cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path, save_weights_only=True, verbose=1)
# model keras.Sequential
# one output tensor
linear_model = tf.keras.Sequential([
normalize,
layers.Dense(1)
])
linear_model.compile(loss = tf.losses.MeanSquaredError(),
optimizer = tf.optimizers.Adam(1))
modelPath = 'saved_model/MyModel_tf'
try:
linear_model = tf.keras.models.load_model(modelPath)
print("open existing model")
except Exception as ex:
print(ex)
linear_model = tf.keras.Sequential([
normalize,
layers.Dense(1)
])
linear_model.compile(loss = tf.losses.MeanSquaredError(),
optimizer = tf.optimizers.Adam(1))
print("creating new model")
# train model
history = linear_model.fit(house_price_features, house_price_labels, epochs=int(numberOfEpoch), )
history = linear_model.fit(
house_price_features,
house_price_labels,
epochs=int(numberOfEpoch),
validation_data = (house_price_test_features,house_price_test_expected),
verbose=1)
#callbacks=[cp_callback])
# print(history)
# save model
linear_model.save('saved_model/my_model')
linear_model.save(modelPath, save_format='tf')
test_results = {}
test_results['linear_model'] = linear_model.evaluate(
@ -82,4 +95,3 @@ flatten_pred = flatten(pred)
with open(cwd + "/../result.txt", "w+") as resultFile:
resultFile.write("predictions: " + str(flatten_pred) + '\n')
resultFile.write("expected: " + str(labels_test_sample.to_numpy()))

5
trainResults.csv Normal file
View File

@ -0,0 +1,5 @@
0,306900.46875
1,304823.75
2,298283.34375
3,303093.53125
4,304189.1875
1 0 306900.46875
2 1 304823.75
3 2 298283.34375
4 3 303093.53125
5 4 304189.1875