From 7df11da81d9ef6380176188494980031f5396863 Mon Sep 17 00:00:00 2001 From: s424714 Date: Sun, 7 May 2023 20:02:09 +0200 Subject: [PATCH] feat: automatic trigger v2 --- Jenkinsfile-create | 1 + Jenkinsfile-training | 28 +++++++++++++++------------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/Jenkinsfile-create b/Jenkinsfile-create index 23add15..d9582f2 100644 --- a/Jenkinsfile-create +++ b/Jenkinsfile-create @@ -42,6 +42,7 @@ node { sh "kaggle datasets download -p data --unzip clmentbisaillon/fake-and-real-news-dataset && python ./dataset.py --dataset" sh "cp ./data/dataset/* ${WORKSPACE}" + sh "cp ./data/*.csv ${WORKSPACE}" } } diff --git a/Jenkinsfile-training b/Jenkinsfile-training index bf74005..70e2e34 100644 --- a/Jenkinsfile-training +++ b/Jenkinsfile-training @@ -50,20 +50,22 @@ node { def dockerImage = docker.build("s424714-model") dockerImage.inside { - stage("Docker: cloning artifacts"){ - sh 'mkdir -p ./data/dataset' - sh 'mv -t ./data/dataset train.csv test.csv val.csv' - } - stage("Docker: Running training model") - - { - sh 'mkdir -p ./.cache' - sh "export TRANSFORMERS_CACHE=./.cache" - sh 'python ./src/main.py --train --lr=$LR --batch=$BATCH_SIZE --epochs=$NUM_EPOCHS' - sh "cp ./results/model.pt ${WORKSPACE}" - } + withEnv(["TRANSFORMERS_CACHE=./.cache"]) { + stage("Docker: cloning artifacts"){ + sh 'mkdir -p ./data/dataset' + sh 'mv -t ./data/dataset train.csv test.csv val.csv' + sh 'mv -t ./data True.csv Fake.csv' + } + stage("Docker: Running training model") + { + sh 'mkdir -p ./.cache' + // sh "" + sh 'python ./src/main.py --train --lr=$LR --batch=$BATCH_SIZE --epochs=$NUM_EPOCHS' + sh "cp ./results/model.pt ${WORKSPACE}" + } - } + } + } }