116 lines
3.9 KiB
Python
116 lines
3.9 KiB
Python
from keras.models import Sequential, load_model
|
|
from keras.layers import Dense
|
|
from keras.optimizers import Adam
|
|
import pandas as pd
|
|
import tensorflow as tf
|
|
import numpy as np
|
|
from sklearn.preprocessing import LabelEncoder
|
|
import argparse
|
|
import shutil
|
|
from sacred.observers import FileStorageObserver, MongoObserver
|
|
from sacred import Experiment
|
|
from sklearn import metrics
|
|
import math
|
|
|
|
ex = Experiment('s487197-train', save_git_info=False,interactive=True)
|
|
|
|
ex.observers.append(FileStorageObserver('sacred_results'))
|
|
ex.observers.append(MongoObserver(url='mongodb://admin:IUM_2021@172.17.0.1:27017', db_name='sacred'))
|
|
#ex.observers.append(MongoObserver(url='mongodb://admin:IUM_2021@172.17.0.1:27017', db_name='sacred'))
|
|
|
|
|
|
def write_list(names):
|
|
with open('listfile.txt', 'w') as fp:
|
|
fp.write("\n".join(str(item) for item in names))
|
|
|
|
def get_x_y(data):
|
|
lb = LabelEncoder()
|
|
data = data.drop(["Location 1"], axis=1)
|
|
data = data.drop(
|
|
columns=["Longitude", "Latitude", "Location", "Total Incidents", "CrimeTime", "Neighborhood", "Post",
|
|
"CrimeDate", "Inside/Outside"], axis=1)
|
|
for column_name in data.columns:
|
|
data[column_name] = lb.fit_transform(data[column_name])
|
|
x = data.drop('Weapon', axis=1)
|
|
y = data['Weapon']
|
|
|
|
return data, x, y
|
|
|
|
|
|
@ex.config
|
|
def my_config():
|
|
parser = argparse.ArgumentParser(description='Train')
|
|
|
|
parser.add_argument('-epochs', type=int, default=20)
|
|
parser.add_argument('-lr', type=float, default=0.01)
|
|
parser.add_argument('-validation_split', type=float, default=0.2)
|
|
args = parser.parse_args()
|
|
epochs = args.epochs
|
|
lr = args.lr
|
|
validation_split = args.validation_split
|
|
|
|
|
|
@ex.capture
|
|
def prepare_message(epochs, lr, validation_split):
|
|
return "{0} {1} {2}!".format(epochs, lr, validation_split)
|
|
|
|
|
|
@ex.main
|
|
def my_main(epochs, lr, validation_split, _run):
|
|
train = pd.read_csv('baltimore_train.csv')
|
|
|
|
data_train, x_train, y_train = get_x_y(train)
|
|
normalizer = tf.keras.layers.Normalization(axis=1)
|
|
normalizer.adapt(np.array(x_train))
|
|
model = Sequential(normalizer)
|
|
model.add(Dense(64, activation="relu"))
|
|
model.add(Dense(10, activation='relu'))
|
|
model.add(Dense(10, activation='relu'))
|
|
model.add(Dense(10, activation='relu'))
|
|
model.add(Dense(5, activation="softmax"))
|
|
model.compile(Adam(learning_rate=lr), loss='sparse_categorical_crossentropy', metrics=['accuracy'])
|
|
model.summary()
|
|
|
|
history = model.fit(
|
|
x_train,
|
|
y_train,
|
|
epochs=epochs,
|
|
validation_split=validation_split)
|
|
hist = pd.DataFrame(history.history)
|
|
hist['epoch'] = history.epoch
|
|
for his in hist.iterrows():
|
|
ex.log_scalar('training.loss', his[1]['loss'])
|
|
ex.log_scalar('accuracy', his[1]['accuracy'])
|
|
model.save('baltimore_model')
|
|
ex.add_artifact('baltimore_model')
|
|
|
|
"""
|
|
baltimore_data_test =pd.read_csv('baltimore_test.csv')
|
|
baltimore_data_test.columns = train.columns
|
|
baltimore_data_test, x_test, y_test = get_x_y(baltimore_data_test)
|
|
scores = model.evaluate(x_test, y_test)
|
|
print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1] * 100))
|
|
|
|
y_predicted = model.predict(x_test)
|
|
y_predicted = np.argmax(y_predicted, axis=1)
|
|
test_results = {}
|
|
test_results['Weapon'] = model.evaluate(
|
|
x_test,
|
|
y_test, verbose=0)
|
|
write_list(y_predicted)
|
|
print('Accuracy : ', scores[1] * 100)
|
|
print('Mean Absolute Error : ', metrics.mean_absolute_error(y_test, y_predicted))
|
|
print('Root Mean Squared Error : ', math.sqrt(metrics.mean_squared_error(y_test, y_predicted)))
|
|
|
|
data = {
|
|
'mse': metrics.mean_squared_error(y_test, y_predicted),
|
|
'rmse': math.sqrt(metrics.mean_squared_error(y_test, y_predicted)),
|
|
'accuracy': scores[1] * 100
|
|
}
|
|
_run.log_scalar('accuracy', data['accuracy'])
|
|
_run.log_scalar('rmse', data['rmse'])
|
|
_run.log_scalar('accuracy', data['accuracy'])
|
|
"""
|
|
ex.run()
|
|
|