🚵
This commit is contained in:
parent
43c6a961f3
commit
7fc345bf14
@ -1,6 +1,8 @@
|
||||
node {
|
||||
checkout scm
|
||||
|
||||
stage('Configuration')
|
||||
|
||||
def local_image = docker.build("s452639-image")
|
||||
|
||||
local_image.inside {
|
||||
@ -11,10 +13,10 @@ node {
|
||||
archiveArtifacts artifacts: 'src/stop_times.normalized.tsv,src/stop_times.train.tsv,src/stop_times.test.tsv,src/stop_times.valid.tsv,src/stop_times.categories.tsv',
|
||||
followSymlinks: false
|
||||
}
|
||||
}
|
||||
|
||||
stage('Trigger') {
|
||||
build wait: false, job: 's452639-training'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
39
eval.jenkinsfile
Normal file
39
eval.jenkinsfile
Normal file
@ -0,0 +1,39 @@
|
||||
node {
|
||||
checkout scm
|
||||
|
||||
def local_image = docker.build("s452639-image")
|
||||
|
||||
local_image.inside {
|
||||
stage('Prepare artifacts') {
|
||||
try {
|
||||
copyArtifacts(projectName: currentBuild.projectName,
|
||||
selector: specific("${currentBuild.previousBuild.number}")),
|
||||
flatten: true,
|
||||
target: 'src/'
|
||||
|
||||
} catch (err) {
|
||||
echo("with new accuracy log")
|
||||
}
|
||||
}
|
||||
|
||||
stage('Evaluate') {
|
||||
checkout([$class: 'GitSCM', branches: [[name: 'ztm']], extensions: [], userRemoteConfigs: [[url: 'https://git.wmi.amu.edu.pl/s452639/ium_452639']]])
|
||||
|
||||
copyArtifacts fingerprintArtifacts: true,
|
||||
projectName: 's452639-create-dataset',
|
||||
selector: lastSuccessful(),
|
||||
flatten: true,
|
||||
target: 'src/'
|
||||
|
||||
copyArtifacts fingerprintArtifacts: true,
|
||||
projectName: 's452639-training',
|
||||
selector: lastSuccessful(),
|
||||
flatten: true,
|
||||
target: 'src/'
|
||||
|
||||
sh 'cd src; python tf_test.py'
|
||||
archiveArtifacts artifacts: 'src/stop_times.predictions.tsv,src/stop_times.accuracy.tsv'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
from tf_train import *
|
||||
import numpy as np
|
||||
from sklearn.metrics import accuracy_score
|
||||
|
||||
def test():
|
||||
global model, le
|
||||
@ -8,7 +9,15 @@ def test():
|
||||
test_y = tf.convert_to_tensor(test_y)
|
||||
|
||||
model = tf.keras.models.load_model('model.keras')
|
||||
pd.DataFrame(model.predict(test_x), columns=le.classes_).to_csv('stop_times.predictions.tsv', sep='\t')
|
||||
predictions = np.argmax(model.predict(test_x), 1)
|
||||
|
||||
with open('stop_times.predictions.tsv', 'w') as f:
|
||||
f.write('stop_headsign\n')
|
||||
for x in le.inverse_transform(predictions):
|
||||
print(x, file=f)
|
||||
|
||||
with open('stop_times.accuracy.tsv', 'a') as f:
|
||||
print(accuracy_score(test_y, predictions), file=f, sep='\t')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -23,7 +23,7 @@ def load_data(path: str, le: LabelEncoder):
|
||||
num_classes = len(le.classes_)
|
||||
|
||||
|
||||
def train():
|
||||
def train(epochs: int):
|
||||
global le
|
||||
|
||||
model = tf.keras.Sequential([
|
||||
@ -46,13 +46,10 @@ def train():
|
||||
valid_x = tf.convert_to_tensor(valid_x, dtype=tf.float32)
|
||||
valid_y = tf.convert_to_tensor(valid_y)
|
||||
|
||||
model_checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(filepath='checkpoint.ckpt', save_weights_only=True)
|
||||
history = model.fit(train_x, train_y, validation_data=(valid_x, valid_y), epochs=2, batch_size=1024, callbacks=[model_checkpoint_callback])
|
||||
|
||||
with open('history', 'w') as f:
|
||||
print(repr(history), file=f)
|
||||
|
||||
model.fit(train_x, train_y, validation_data=(valid_x, valid_y), epochs=epochs, batch_size=1024)
|
||||
model.save('model.keras')
|
||||
|
||||
if __name__ == "__main__":
|
||||
train()
|
||||
import sys
|
||||
epochs = int('2' if len(sys.argv) != 2 else sys.argv[1])
|
||||
train(epochs)
|
||||
|
@ -1,6 +1,25 @@
|
||||
node {
|
||||
checkout scm
|
||||
|
||||
stage('Init') {
|
||||
properties([
|
||||
pipelineTriggers([ upstream(threshold: hudson.model.Result.SUCCESS, upstreamProjects: 's452639-create-dataset' ) ]),
|
||||
parameters([
|
||||
buildSelector(
|
||||
defaultSelector: lastSuccessful(),
|
||||
description: "Source of dataset",
|
||||
name: 'BUILD_SELECTOR'
|
||||
),
|
||||
string(
|
||||
defaultValue: "2",
|
||||
description: "Epochs count",
|
||||
name: "EPOCHS",
|
||||
trim: true
|
||||
),
|
||||
])
|
||||
])
|
||||
}
|
||||
|
||||
def local_image = docker.build("s452639-image")
|
||||
|
||||
local_image.inside {
|
||||
@ -13,9 +32,13 @@ node {
|
||||
flatten: true,
|
||||
target: 'src/'
|
||||
|
||||
sh 'cd src; python tf_train.py'
|
||||
sh 'cd src; python tf_train.py $EPOCHS'
|
||||
archiveArtifacts artifacts: 'src/model.keras', followSymlinks: false
|
||||
}
|
||||
}
|
||||
|
||||
stage('Trigger') {
|
||||
build wait: false, job: 's452639-evaluation.eg'
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user