2022-05-07 15:37:17 +02:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import pandas as pd
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
import tensorflow as tf
|
|
|
|
from tensorflow.keras import layers
|
|
|
|
|
|
|
|
from sacred import Experiment
|
|
|
|
from sacred.observers import FileStorageObserver
|
|
|
|
from sacred.observers import MongoObserver
|
|
|
|
|
2022-05-07 15:41:58 +02:00
|
|
|
ex = Experiment("sacred_scopes", interactive=True, save_git_info=False)
|
2022-05-07 15:37:17 +02:00
|
|
|
|
|
|
|
ex.observers.append(FileStorageObserver('my_runs'))
|
|
|
|
# Mongo observer
|
2022-05-12 14:55:14 +02:00
|
|
|
ex.observers.append(MongoObserver(url='mongodb://admin:IUM_2021@172.17.0.1:27017', db_name='sacred'))
|
2022-05-07 15:37:17 +02:00
|
|
|
|
|
|
|
# train params
|
|
|
|
numberOfEpochParam = 0
|
|
|
|
|
|
|
|
try:
|
|
|
|
numberOfEpochParam = int(sys.argv[1])
|
|
|
|
except:
|
|
|
|
# dafault val
|
|
|
|
numberOfEpochParam = 3
|
|
|
|
|
|
|
|
@ex.config
|
|
|
|
def my_config():
|
|
|
|
numberOfEpoch = numberOfEpochParam
|
|
|
|
|
|
|
|
@ex.capture
|
|
|
|
def train(numberOfEpoch, _run):
|
|
|
|
cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
|
|
|
|
|
|
|
|
pathTrain = cwd + "/../Participants_Data_HPP/Train.csv"
|
|
|
|
pathTest = cwd + "/../Participants_Data_HPP/Test.csv"
|
|
|
|
|
|
|
|
features = ["UNDER_CONSTRUCTION", "RERA", "BHK_NO.", "SQUARE_FT", "READY_TO_MOVE", "RESALE", "LONGITUDE", "LATITUDE", "TARGET(PRICE_IN_LACS)"]
|
|
|
|
|
|
|
|
# get dataset
|
|
|
|
house_price_train = pd.read_csv(pathTrain)[features]
|
|
|
|
|
|
|
|
# get test dataset
|
|
|
|
house_price_test = pd.read_csv(pathTest)[features]
|
|
|
|
|
|
|
|
|
|
|
|
house_price_features = house_price_train.copy()
|
|
|
|
# pop column
|
|
|
|
house_price_labels = house_price_features.pop('TARGET(PRICE_IN_LACS)')
|
|
|
|
|
|
|
|
# process data
|
|
|
|
normalize = layers.Normalization()
|
|
|
|
normalize.adapt(house_price_features)
|
|
|
|
|
|
|
|
feature_test_sample = house_price_test.sample(10)
|
|
|
|
labels_test_sample = feature_test_sample.pop('TARGET(PRICE_IN_LACS)')
|
|
|
|
|
|
|
|
house_price_test_features = house_price_test.copy()
|
|
|
|
# pop column
|
|
|
|
house_price_test_expected = house_price_test_features.pop('TARGET(PRICE_IN_LACS)')
|
|
|
|
|
|
|
|
house_price_features = np.array(house_price_features)
|
|
|
|
|
|
|
|
# load model if exists or create new
|
|
|
|
modelPath = 'saved_model/MyModel_tf'
|
|
|
|
try:
|
|
|
|
linear_model = tf.keras.models.load_model(modelPath)
|
|
|
|
print("open existing model")
|
|
|
|
except Exception as exception:
|
|
|
|
print(exception)
|
|
|
|
linear_model = tf.keras.Sequential([
|
|
|
|
normalize,
|
|
|
|
layers.Dense(1)
|
|
|
|
])
|
|
|
|
linear_model.compile(loss = tf.losses.MeanSquaredError(),
|
|
|
|
optimizer = tf.optimizers.Adam(1))
|
|
|
|
print("creating new model")
|
|
|
|
|
|
|
|
# train model
|
|
|
|
history = linear_model.fit(
|
|
|
|
house_price_features,
|
|
|
|
house_price_labels,
|
|
|
|
epochs=int(numberOfEpoch),
|
|
|
|
validation_split=0.33,
|
|
|
|
verbose=1)
|
|
|
|
|
|
|
|
# save model
|
|
|
|
linear_model.save(modelPath, save_format='tf')
|
|
|
|
# save model as artifact
|
|
|
|
ex.add_artifact(modelPath + "/saved_model.pb")
|
|
|
|
|
|
|
|
# finall loss
|
|
|
|
hist = pd.DataFrame(history.history)
|
|
|
|
hist['epoch'] = history.epoch
|
|
|
|
_run.log_scalar('final.training.loss', hist['loss'].iloc[-1])
|
|
|
|
|
|
|
|
test_results = {}
|
|
|
|
test_results['linear_model'] = linear_model.evaluate(
|
|
|
|
house_price_test_features, house_price_test_expected, verbose=0)
|
|
|
|
|
|
|
|
def flatten(t):
|
|
|
|
return [item for sublist in t for item in sublist]
|
|
|
|
|
|
|
|
pred = np.array(linear_model.predict(feature_test_sample))
|
|
|
|
flatten_pred = flatten(pred)
|
|
|
|
|
|
|
|
with open(cwd + "/../result.txt", "w+") as resultFile:
|
|
|
|
resultFile.write("predictions: " + str(flatten_pred) + '\n')
|
|
|
|
resultFile.write("expected: " + str(labels_test_sample.to_numpy()))
|
|
|
|
|
|
|
|
@ex.main
|
|
|
|
def main():
|
|
|
|
train()
|
|
|
|
|
|
|
|
ex.run()
|