@NonCPS def isUpstreamOK(jobName, buildId) { def job = Jenkins.instance.getItem(jobName) if (!job) { error("cannot find upstream job ${jobName}") } def build = job.getBuild(buildId.toString()) def result = build.getResult() if (result) { if (result == Result.SUCCESS || result == Result.UNSTABLE) { return 'OK' } else { return 'FAILED' } } else { return 'IN_PROGRESS' } } @NonCPS def getTriggerBuild(currentBuild) { def triggerBuild = currentBuild.rawBuild.getCause(hudson.model.Cause$UpstreamCause) if (triggerBuild) { return [triggerBuild.getUpstreamProject(), triggerBuild.getUpstreamBuild()] } return null } @NonCPS def findBuildTriggeredBy(job, triggerJob, triggerBuild) { def jobBuilds = job.getBuilds() for (buildIndex = 0; buildIndex < jobBuilds.size(); ++buildIndex) { def build = jobBuilds[buildIndex] def buildCause = build.getCause(hudson.model.Cause$UpstreamCause) if (buildCause) { def causeJob = buildCause.getUpstreamProject() def causeBuild = buildCause.getUpstreamBuild() if (causeJob == triggerJob && causeBuild == triggerBuild) { return build.getNumber() } } } return null } def getUpstreamBuilds(upstreamJobNames, triggerJob, triggerBuild) { def upstreamBuilds = [] // Iterate list -- NOTE: we cannot use groovy style or even modern java style iteration for (jobIndex = 0; jobIndex < upstreamJobNames.size(); ++jobIndex) { def jobName = upstreamJobNames[jobIndex] if (jobName == triggerJob) { echo "upstream build: ${jobName}#${triggerBuild}" upstreamBuilds << [jobName, triggerBuild] } else { def job = Jenkins.instance.getItem(jobName) def matchingBuild = findBuildTriggeredBy(job, triggerJob, triggerBuild) if (!matchingBuild) { if (triggerJob) { echo "no build from ${jobName} has been triggered by ${triggerJob}#${triggerBuild}, using last successful build" } else { echo "manual build trigger, using last successful build for ${jobName}" } matchingBuild = job.getLastSuccessfulBuild().getNumber() } echo "upstream build: ${jobName}#${matchingBuild}" upstreamBuilds << [jobName, matchingBuild] } } return upstreamBuilds } def waitForUpstreamBuilds(upstreamBuilds) { def waitContinue = true; while(waitContinue) { waitContinue = false; // Iterate list -- NOTE: we cannot use groovy style or even modern java style iteration for (upstreamBuildIndex = 0; upstreamBuildIndex < upstreamBuilds.size(); ++upstreamBuildIndex) { def entry = upstreamBuilds[upstreamBuildIndex] def upstreamJobName = entry[0] def upstreamBuildId = entry[1] def status = isUpstreamOK(upstreamJobName, upstreamBuildId) if (status == 'IN_PROGRESS') { echo "waiting for job ${upstreamJobName}#${upstreamBuildId} to finish" waitContinue = true sleep time: 1, units: 'SECONDS' } else if (status == 'FAILED') { echo "${upstreamJobName}#${upstreamBuildId} did not finish successfully, aborting this build" currentBuild.result = 'NOT_BUILT'; return false; } } } return true } def makeUpstreamArtifactImporters(autoproj, fullWorkspaceDir, upstreamDir, upstreamJobNames, upstreamPrefixes, upstreamBuilds) { def fullUpstreamDir = "${fullWorkspaceDir}/${upstreamDir}" dir(upstreamDir) { deleteDir() } def upstreamArtifactImporters = [:] for (jobIndex = 0; jobIndex < upstreamJobNames.size(); ++jobIndex) { def jobName = upstreamJobNames[jobIndex] def fullPrefix = upstreamPrefixes[jobIndex] def buildId = upstreamBuilds[jobIndex][1] def relativePrefix = Paths.get(fullWorkspaceDir).relativize(Paths.get(fullPrefix)).toString() upstreamArtifactImporters[jobName] = { dir(upstreamDir) { step ([$class: 'CopyArtifact', projectName: jobName, filter: "${jobName}-prefix.zip", selector: [$class: 'SpecificBuildSelector', buildNumber: buildId.toString()]]) dir(jobName) { unzip zipFile: "${fullUpstreamDir}/${jobName}-prefix.zip" sh "${autoproj} jenkins relativize ./ '@WORKSPACE_ROOT@' '${fullWorkspaceDir}'" } } dir(relativePrefix) { sh "rsync '${fullUpstreamDir}/${jobName}/' './' --delete --recursive --safe-links --perms --checksum" } } } return upstreamArtifactImporters } def installUpstreamArtifacts(autoproj, fullWorkspaceDir, jobPackageName, jobPackagePrefix, upstreamJobNames, upstreamPackagePrefixes, upstreamBuilds) { def upstreamDir = "artifacts/upstream" parallel(makeUpstreamArtifactImporters( autoproj, fullWorkspaceDir, upstreamDir, upstreamJobNames, upstreamPackagePrefixes, upstreamBuilds) ) // We don't need the upstream artifacts anymore, clear some disk space dir(upstreamDir) { deleteDir() } if (fileExists("lastPrefix")) { sh "mv lastPrefix '${jobPackagePrefix}'" } return null } def handleDownstream(autoproj, fullWorkspaceDir, jobName, jobPackagePrefix, artifactGlob) { def downstreamDir = "artifacts/downstream" def targetArtifactPath = "${fullWorkspaceDir}/${downstreamDir}/${jobName}-prefix.zip" dir(downstreamDir) { deleteDir() } dir("${downstreamDir}/${jobName}") { sh "rsync '${jobPackagePrefix}/' './' -a --delete" sh "${autoproj} jenkins relativize ./ '${fullWorkspaceDir}' '@WORKSPACE_ROOT@'" zip zipFile: targetArtifactPath, glob: artifactGlob } dir(downstreamDir) { archiveArtifacts artifacts: "*.zip" deleteDir() } return null } def triggerDownstreamJobs(jobNames) { for (jobIndex = 0; jobIndex < jobNames.size(); ++jobIndex) { build job: jobNames[jobIndex], wait: false } return null }