pipeline { agent any parameters { string( name: 'epochs', defaultValue: '1000', description: 'Number of epochs for training' ) string( name: 'learning_rate', defaultValue: '0.001', description: 'Learning rate for training' ) string( name: 'weight_decay', defaultValue: '0.001', description: 'Regularization parameter for training' ) } stages { stage('Clone repository') { steps { checkout scm } } stage('Copy artifacts') { agent { dockerfile { filename 'Dockerfile' reuseNode true } } steps { copyArtifacts(projectName: 'z-s464863-create-dataset', filter: 'datasets/*', selector: lastSuccessful()) } } stage('Create model') { agent { dockerfile { filename 'Dockerfile' reuseNode true } } steps { script { sh "chmod +x ./create_model.py" sh "python3 ./create_model.py with 'num_epochs=${params.epochs}' 'learning_rate=${params.learning_rate}' 'weight_decay=${params.weight_decay}'" archiveArtifacts artifacts: "models/model.pth", onlyIfSuccessful: true def experiment_id = readFile('experiment_id.txt').trim() archiveArtifacts artifacts: "my_runs/${experiment_id}/*", onlyIfSuccessful: true def sources = readFile('sources.txt').split('\n') for (def source in sources) { archiveArtifacts artifacts: "my_runs/${source}", onlyIfSuccessful: true } def resources = readFile('resources.txt').split('\n') for (def resource in resources) { archiveArtifacts artifacts: "${resource}", onlyIfSuccessful: true } build job: 's464863-evaluation/main', wait: false } } } } }