IUM_06
This commit is contained in:
parent
b1a03b41b0
commit
0920a59d1f
11
Jenkinsfile
vendored
11
Jenkinsfile
vendored
@ -35,7 +35,7 @@ pipeline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage('Copy Artifacts from training job') {
|
stage('Copy Artifacts from evaluation job') {
|
||||||
steps {
|
steps {
|
||||||
copyArtifacts filter: 'evaluation/*', projectName: 's464913-evaluation/evaluation', selector: buildParameter('BUILD_SELECTOR'), optional: true
|
copyArtifacts filter: 'evaluation/*', projectName: 's464913-evaluation/evaluation', selector: buildParameter('BUILD_SELECTOR'), optional: true
|
||||||
}
|
}
|
||||||
@ -51,7 +51,14 @@ pipeline {
|
|||||||
stage('Run metrics script') {
|
stage('Run metrics script') {
|
||||||
steps {
|
steps {
|
||||||
sh 'chmod +x metrics.py'
|
sh 'chmod +x metrics.py'
|
||||||
sh 'python3 ./metrics.py'
|
sh "python3 ./metrics.py ${currentBuild.number}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Run plot script') {
|
||||||
|
steps {
|
||||||
|
sh 'chmod +x plot.py'
|
||||||
|
sh 'python3 ./plot.py'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
from sklearn.metrics import confusion_matrix
|
from sklearn.metrics import confusion_matrix
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
y_test = pd.read_csv("data/y_test.csv")
|
y_test = pd.read_csv("data/y_test.csv")
|
||||||
y_pred = pd.read_csv("evaluation/y_pred.csv", header=None)
|
y_pred = pd.read_csv("evaluation/y_pred.csv", header=None)
|
||||||
|
build_number = sys.argv[1]
|
||||||
|
|
||||||
cm = confusion_matrix(y_test, y_pred)
|
cm = confusion_matrix(y_test, y_pred)
|
||||||
print(
|
print(
|
||||||
@ -14,8 +16,7 @@ def main():
|
|||||||
accuracy = cm[1, 1] / (cm[1, 0] + cm[1, 1])
|
accuracy = cm[1, 1] / (cm[1, 0] + cm[1, 1])
|
||||||
|
|
||||||
with open(r"evaluation/metrics.txt", "a") as f:
|
with open(r"evaluation/metrics.txt", "a") as f:
|
||||||
f.write(f"Accuracy: {accuracy}\n")
|
f.write(f"{accuracy},{build_number}\n")
|
||||||
f.write(f"\n")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
16
plot.py
Normal file
16
plot.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
accuracy = []
|
||||||
|
|
||||||
|
with open("evaluation/metrics.txt") as f:
|
||||||
|
for line in f:
|
||||||
|
if "Accuracy" in line:
|
||||||
|
accuracy.append(float(line.split(":")[1].strip()))
|
||||||
|
|
||||||
|
build_numbers = np.arange(1, len(accuracy) + 1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
Loading…
Reference in New Issue
Block a user