forked from s464914/ium_464914
24 lines
913 B
Python
24 lines
913 B
Python
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score, mean_squared_error
|
|
import numpy as np
|
|
|
|
true_labels = []
|
|
predicted_labels = []
|
|
|
|
f = open("predictions.txt", "r")
|
|
for line in f:
|
|
parts = line.strip().split(' ')
|
|
true_labels.append(int(parts[3]))
|
|
predicted_labels.append(int(parts[1]))
|
|
|
|
accuracy = accuracy_score(true_labels, predicted_labels)
|
|
precision_micro = precision_score(true_labels, predicted_labels, average='micro')
|
|
recall_micro = recall_score(true_labels, predicted_labels, average='micro')
|
|
f1_micro = f1_score(true_labels, predicted_labels, average='micro')
|
|
rmse = np.sqrt(mean_squared_error(true_labels, predicted_labels))
|
|
|
|
with open(r'metrics.txt', 'a') as fp:
|
|
fp.write(f"Accuracy: {accuracy}\n")
|
|
fp.write(f"Precision: {precision_micro}\n")
|
|
fp.write(f"Recall: {recall_micro}\n")
|
|
fp.write(f"F1-score: {f1_micro}\n")
|
|
fp.write(f"RMSE: {rmse}\n") |