ium_464914/metrics.py

25 lines
994 B
Python
Raw Normal View History

2024-04-30 16:03:02 +02:00
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score, mean_squared_error
import numpy as np
true_labels = []
predicted_labels = []
f = open("predictions.txt", "r")
for line in f:
parts = line.strip().split(' ')
true_labels.append(int(parts[3]))
predicted_labels.append(int(parts[1]))
accuracy = accuracy_score(true_labels, predicted_labels)
precision_micro = precision_score(true_labels, predicted_labels, average='micro')
recall_micro = recall_score(true_labels, predicted_labels, average='micro')
f1_micro = f1_score(true_labels, predicted_labels, average='micro')
rmse = np.sqrt(mean_squared_error(true_labels, predicted_labels))
with open(r'metrics.txt', 'a') as fp:
fp.write(f"Accuracy: {accuracy}\n")
fp.write(f"Micro-average Precision: {precision_micro}\n")
fp.write(f"Micro-average Recall: {recall_micro}\n")
fp.write(f"Micro-average F1-score: {f1_micro}\n")
fp.write(f"RMSE: {rmse}\n")
fp.write("--------------------\n")