Karolina Oparczyk
7a94a4d989
All checks were successful
s434765-training/pipeline/head This commit looks good
70 lines
2.0 KiB
Python
70 lines
2.0 KiB
Python
import pandas as pd
|
|
import numpy as np
|
|
from sklearn.metrics import mean_squared_error
|
|
|
|
from tensorflow import keras
|
|
|
|
|
|
def normalize_data(data):
|
|
return (data - np.min(data)) / (np.max(data) - np.min(data))
|
|
|
|
|
|
data = pd.read_csv("data_train", sep=',', error_bad_lines=False).dropna()
|
|
X = data.loc[:,data.columns == "2805317"].astype(int)
|
|
y = data.loc[:,data.columns == "198909"].astype(int)
|
|
|
|
min_val_sub = np.min(X)
|
|
max_val_sub = np.max(X)
|
|
X = (X - min_val_sub) / (max_val_sub - min_val_sub)
|
|
print(min_val_sub)
|
|
print(max_val_sub)
|
|
|
|
min_val_like = np.min(y)
|
|
max_val_like = np.max(y)
|
|
y = (y - min_val_like) / (max_val_like - min_val_like)
|
|
|
|
print(min_val_like)
|
|
print(max_val_like)
|
|
|
|
|
|
model = keras.Sequential([
|
|
keras.layers.Dense(512,input_dim = X.shape[1], activation='relu'),
|
|
keras.layers.Dense(256, activation='relu'),
|
|
keras.layers.Dense(256, activation='relu'),
|
|
keras.layers.Dense(128, activation='relu'),
|
|
keras.layers.Dense(1,activation='linear'),
|
|
])
|
|
|
|
model.compile(loss='mean_absolute_error', optimizer="Adam", metrics=['mean_absolute_error'])
|
|
|
|
model.fit(X, y, epochs=30, validation_split = 0.3)
|
|
|
|
data = pd.read_csv("data_dev", sep=',', error_bad_lines=False, skip_blank_lines=True).dropna()
|
|
X_test = data.loc[:,data.columns == "440265"].astype(int)
|
|
y_test = data.loc[:,data.columns == "21629"].astype(int)
|
|
|
|
min_val_sub = np.min(X_test)
|
|
max_val_sub = np.max(X_test)
|
|
X_test = (X_test - min_val_sub) / (max_val_sub - min_val_sub)
|
|
print(min_val_sub)
|
|
print(max_val_sub)
|
|
|
|
min_val_like = np.min(y_test)
|
|
max_val_like = np.max(y_test)
|
|
print(min_val_like)
|
|
print(max_val_like)
|
|
|
|
prediction = model.predict(X_test)
|
|
|
|
prediction_denormalized = []
|
|
for pred in prediction:
|
|
denorm = pred[0] * (max_val_like[0] - min_val_like[0]) + min_val_like[0]
|
|
prediction_denormalized.append(denorm)
|
|
|
|
f = open("predictions.txt", "w")
|
|
for (pred, test) in zip(prediction_denormalized, y_test.values):
|
|
f.write("predicted: %s expected: %s\n" % (str(pred), str(test[0])))
|
|
|
|
error = mean_squared_error(y_test, prediction_denormalized)
|
|
print(error)
|