Commit | Line | Data |
---|---|---|
962ee225 FD |
1 | /** |
2 | * Copyright (C) 2017 - Francis Deslauriers <francis.deslauriers@efficios.com> | |
3 | * | |
4 | * This program is free software: you can redistribute it and/or modify | |
5 | * it under the terms of the GNU General Public License as published by | |
6 | * the Free Software Foundation, either version 3 of the License, or | |
7 | * (at your option) any later version. | |
8 | * | |
9 | * This program is distributed in the hope that it will be useful, | |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | * GNU General Public License for more details. | |
13 | * | |
14 | * You should have received a copy of the GNU General Public License | |
15 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | */ | |
17 | ||
18 | import hudson.console.HyperlinkNote | |
19 | import hudson.model.* | |
20 | import java.io.File | |
21 | import org.eclipse.jgit.api.Git | |
22 | import org.eclipse.jgit.lib.Ref | |
23 | ||
24 | class InvalidKVersionException extends Exception { | |
25 | public InvalidKVersionException(String message) { | |
26 | super(message) | |
27 | } | |
28 | } | |
29 | ||
30 | class EmptyKVersionException extends Exception { | |
31 | public EmptyKVersionException(String message) { | |
32 | super(message) | |
33 | } | |
34 | } | |
35 | ||
36 | class VanillaKVersion implements Comparable<VanillaKVersion> { | |
37 | ||
38 | Integer major = 0 | |
39 | Integer majorB = 0 | |
40 | Integer minor = 0 | |
41 | Integer patch = 0 | |
42 | Integer rc = Integer.MAX_VALUE | |
43 | Boolean inStable = false; | |
44 | ||
45 | VanillaKVersion() {} | |
46 | ||
47 | VanillaKVersion(version) { | |
48 | this.parse(version) | |
49 | } | |
50 | ||
51 | static VanillaKVersion minKVersion() { | |
52 | return new VanillaKVersion("v0.0.0") | |
53 | } | |
54 | ||
55 | static VanillaKVersion maxKVersion() { | |
56 | return new VanillaKVersion("v" + Integer.MAX_VALUE + ".0.0") | |
57 | } | |
58 | ||
59 | static VanillaKVersion factory(version) { | |
60 | return new VanillaKVersion(version) | |
61 | } | |
62 | ||
63 | def parse(version) { | |
64 | this.major = 0 | |
65 | this.majorB = 0 | |
66 | this.minor = 0 | |
67 | this.patch = 0 | |
68 | this.rc = Integer.MAX_VALUE | |
69 | ||
70 | if (!version) { | |
71 | throw new EmptyKVersionException("Empty kernel version") | |
72 | } | |
73 | ||
74 | def match = version =~ /^v(\d+)\.(\d+)(\.(\d+))?(\.(\d+))?(-rc(\d+))?$/ | |
75 | if (!match) { | |
76 | throw new InvalidKVersionException("Invalid kernel version: ${version}") | |
77 | } | |
78 | ||
79 | Integer offset = 0; | |
80 | ||
81 | // Major | |
82 | this.major = Integer.parseInt(match.group(1)) | |
83 | if (this.major <= 2) { | |
84 | offset = 2 | |
85 | this.majorB = Integer.parseInt(match.group(2)) | |
86 | } | |
87 | ||
88 | // Minor | |
89 | if (match.group(2 + offset) != null) { | |
90 | this.minor = Integer.parseInt(match.group(2 + offset)) | |
91 | } | |
92 | ||
93 | // Patch level | |
94 | if (match.group(4 + offset) != null) { | |
95 | this.patch = Integer.parseInt(match.group(4 + offset)) | |
96 | this.inStable = true | |
97 | } | |
98 | ||
99 | // RC | |
100 | if (match.group(8) != null) { | |
101 | this.rc = Integer.parseInt(match.group(8)) | |
102 | } | |
103 | } | |
104 | ||
105 | Boolean isInStableBranch() { | |
106 | return this.inStable | |
107 | } | |
108 | ||
109 | // Return true if both version are of the same stable branch | |
110 | Boolean isSameStable(VanillaKVersion o) { | |
111 | if (this.major != o.major) { | |
112 | return false | |
113 | } | |
114 | if (this.majorB != o.majorB) { | |
115 | return false | |
116 | } | |
117 | if (this.minor != o.minor) { | |
118 | return false | |
119 | } | |
120 | ||
121 | return true | |
122 | } | |
123 | ||
124 | @Override int compareTo(VanillaKVersion o) { | |
125 | if (this.major != o.major) { | |
126 | return Integer.compare(this.major, o.major) | |
127 | } | |
128 | if (this.majorB != o.majorB) { | |
129 | return Integer.compare(this.majorB, o.majorB) | |
130 | } | |
131 | if (this.minor != o.minor) { | |
132 | return Integer.compare(this.minor, o.minor) | |
133 | } | |
134 | if (this.patch != o.patch) { | |
135 | return Integer.compare(this.patch, o.patch) | |
136 | } | |
137 | if (this.rc != o.rc) { | |
138 | return Integer.compare(this.rc, o.rc) | |
139 | } | |
140 | ||
141 | // Same version | |
142 | return 0; | |
143 | } | |
144 | ||
145 | String toString() { | |
146 | String vString = "v${this.major}" | |
147 | ||
148 | if (this.majorB > 0) { | |
149 | vString = vString.concat(".${this.majorB}") | |
150 | } | |
151 | ||
152 | vString = vString.concat(".${this.minor}") | |
153 | ||
154 | if (this.patch > 0) { | |
155 | vString = vString.concat(".${this.patch}") | |
156 | } | |
157 | ||
158 | if (this.rc > 0 && this.rc < Integer.MAX_VALUE) { | |
159 | vString = vString.concat("-rc${this.rc}") | |
160 | } | |
161 | return vString | |
162 | } | |
163 | } | |
164 | ||
802e75a7 FD |
165 | // Save the hashmap containing all the jobs and their status to disk. We can do |
166 | // that because this job is configured to always run on the master node on | |
167 | // Jenkins. | |
168 | def SaveCurrentJobsToWorkspace = { currentJobs, ondiskpath-> | |
962ee225 FD |
169 | try { |
170 | File myFile = new File(ondiskpath); | |
802e75a7 FD |
171 | myFile.createNewFile(); |
172 | def out = new ObjectOutputStream(new FileOutputStream(ondiskpath)) | |
173 | out.writeObject(currentJobs) | |
174 | out.close() | |
5a754cf7 | 175 | } catch (e) { |
802e75a7 | 176 | println("Failed to save previous Git object IDs to disk." + e); |
962ee225 | 177 | } |
962ee225 FD |
178 | } |
179 | ||
802e75a7 FD |
180 | // Load the hashmap containing all the jobs and their last status from disk. |
181 | // It's possible because this job is configured to always run on the master | |
182 | // node on Jenkins | |
183 | def LoadPreviousJobsFromWorkspace = { ondiskpath -> | |
184 | def previousJobs = [:] | |
962ee225 FD |
185 | try { |
186 | File myFile = new File(ondiskpath); | |
802e75a7 FD |
187 | def input = new ObjectInputStream(new FileInputStream(ondiskpath)) |
188 | previousJobs = input.readObject() | |
189 | input.close() | |
5a754cf7 | 190 | } catch (e) { |
802e75a7 | 191 | println("Failed to load previous runs from disk." + e); |
962ee225 | 192 | } |
802e75a7 | 193 | return previousJobs |
962ee225 FD |
194 | } |
195 | ||
802e75a7 | 196 | |
962ee225 FD |
197 | def GetHeadCommits = { remoteRepo, branchesOfInterest -> |
198 | def remoteHeads = [:] | |
199 | def remoteHeadRefs = Git.lsRemoteRepository() | |
200 | .setTags(false) | |
201 | .setHeads(true) | |
202 | .setRemote(remoteRepo).call() | |
203 | ||
204 | remoteHeadRefs.each { | |
205 | def branch = it.getName().replaceAll('refs/heads/', '') | |
206 | if (branchesOfInterest.contains(branch)) | |
207 | remoteHeads[branch] = it.getObjectId().name() | |
208 | } | |
209 | ||
210 | return remoteHeads | |
211 | } | |
212 | ||
213 | def GetTagIds = { remoteRepo -> | |
214 | def remoteTags = [:] | |
215 | def remoteTagRefs = Git.lsRemoteRepository() | |
216 | .setTags(true) | |
217 | .setHeads(false) | |
218 | .setRemote(remoteRepo).call() | |
219 | ||
220 | remoteTagRefs.each { | |
221 | // Exclude release candidate tags | |
222 | if (!it.getName().contains('-rc')) { | |
223 | remoteTags[it.getName().replaceAll('refs/tags/', '')] = it.getObjectId().name() | |
224 | } | |
225 | } | |
226 | ||
227 | return remoteTags | |
228 | } | |
229 | ||
230 | def GetLastTagOfBranch = { tagRefs, branch -> | |
231 | def tagVersions = tagRefs.collect {new VanillaKVersion(it.key)} | |
232 | def currMax = new VanillaKVersion('v0.0.0'); | |
233 | if (!branch.contains('master')){ | |
234 | def targetVersion = new VanillaKVersion(branch.replaceAll('linux-', 'v').replaceAll('.y', '')) | |
235 | tagVersions.each { | |
236 | if (it.isSameStable(targetVersion)) { | |
237 | if (currMax < it) { | |
238 | currMax = it; | |
239 | } | |
240 | } | |
241 | } | |
242 | } else { | |
243 | tagVersions.each { | |
244 | if (!it.isInStableBranch() && currMax < it) { | |
245 | currMax = it; | |
246 | } | |
247 | } | |
248 | } | |
249 | return currMax.toString() | |
250 | } | |
251 | ||
252 | // Returns the latest tags of each of the branches passed in the argument | |
253 | def GetLastTagIds = { remoteRepo, branchesOfInterest -> | |
254 | def remoteHeads = GetHeadCommits(remoteRepo, branchesOfInterest) | |
255 | def remoteTagRefs = GetTagIds(remoteRepo) | |
256 | def remoteLastTagCommit = [:] | |
257 | ||
258 | remoteTagRefs = remoteTagRefs.findAll { !it.key.contains("v2.") } | |
259 | branchesOfInterest.each { | |
260 | remoteLastTagCommit[it] = remoteTagRefs[GetLastTagOfBranch(remoteTagRefs, it)] | |
261 | } | |
262 | ||
263 | return remoteLastTagCommit | |
264 | } | |
265 | ||
802e75a7 FD |
266 | def CraftJobName = { jobType, linuxBranch, lttngBranch -> |
267 | return "${jobType}_k${linuxBranch}_l${lttngBranch}" | |
962ee225 FD |
268 | } |
269 | ||
802e75a7 | 270 | def LaunchJob = { jobName, jobInfo -> |
962ee225 | 271 | def job = Hudson.instance.getJob(jobName) |
9ee19c2b KS |
272 | if (job == null) { |
273 | println(String.format("Failed to find job by name '%s'", jobName)) | |
274 | return null; | |
275 | } | |
962ee225 FD |
276 | def params = [] |
277 | for (paramdef in job.getProperty(ParametersDefinitionProperty.class).getParameterDefinitions()) { | |
0d4a7f6b FD |
278 | // If there is a default value for this parameter, use it. Don't use empty |
279 | // default value parameters. | |
c500f461 | 280 | if (paramdef.getDefaultParameterValue() != null) { |
0d4a7f6b FD |
281 | params += paramdef.getDefaultParameterValue(); |
282 | } | |
962ee225 FD |
283 | } |
284 | ||
802e75a7 FD |
285 | params.add(new StringParameterValue('LTTNG_TOOLS_COMMIT_ID', jobInfo['config']['toolsCommit'])) |
286 | params.add(new StringParameterValue('LTTNG_MODULES_COMMIT_ID', jobInfo['config']['modulesCommit'])) | |
287 | params.add(new StringParameterValue('LTTNG_UST_COMMIT_ID', jobInfo['config']['ustCommit'])) | |
288 | params.add(new StringParameterValue('KERNEL_TAG_ID', jobInfo['config']['linuxTagID'])) | |
5a754cf7 FD |
289 | def currBuild = job.scheduleBuild2(0, new Cause.UpstreamCause(build), new ParametersAction(params)) |
290 | ||
291 | if (currBuild != null ) { | |
292 | println("Launching job: ${HyperlinkNote.encodeTo('/' + job.url, job.fullDisplayName)}"); | |
293 | } else { | |
294 | println("Job ${jobName} not found or deactivated."); | |
295 | } | |
296 | ||
297 | return currBuild | |
962ee225 FD |
298 | } |
299 | ||
962ee225 FD |
300 | final String toolsRepo = "https://github.com/lttng/lttng-tools.git" |
301 | final String modulesRepo = "https://github.com/lttng/lttng-modules.git" | |
302 | final String ustRepo = "https://github.com/lttng/lttng-ust.git" | |
303 | final String linuxRepo = "git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux-stable.git" | |
304 | ||
802e75a7 | 305 | final String pastJobsPath = build.getEnvironment(listener).get('WORKSPACE') + "/pastjobs"; |
962ee225 | 306 | |
920a3cfd | 307 | def recentLttngBranchesOfInterest = ['master', |
bcb99e25 JR |
308 | 'stable-2.13', |
309 | 'stable-2.12'] | |
2537aa57 | 310 | def recentLinuxBranchesOfInterest = ['master', |
d8858d79 | 311 | 'linux-6.1.y', |
bcb99e25 JR |
312 | 'linux-5.15.y', |
313 | 'linux-5.10.y', | |
942c3046 | 314 | 'linux-5.4.y', |
2537aa57 JR |
315 | 'linux-4.19.y', |
316 | 'linux-4.14.y', | |
d8858d79 | 317 | ] |
962ee225 | 318 | |
3f0881e9 | 319 | def legacyLttngBranchesOfInterest = [] |
d8858d79 KS |
320 | def legacyLinuxBranchesOfInterest = [ |
321 | 'linux-5.14.y', | |
322 | 'linux-4.18.y', | |
323 | 'linux-4.12.y', | |
324 | 'linux-4.9.y', | |
325 | ] | |
326 | ||
327 | def vmLinuxBranchesOfInterest = [] | |
a28d0f54 | 328 | |
ca4d4c72 | 329 | // Generate configurations of interest. |
962ee225 FD |
330 | def configurationOfInterest = [] as Set |
331 | ||
332 | recentLttngBranchesOfInterest.each { lttngBranch -> | |
333 | recentLinuxBranchesOfInterest.each { linuxBranch -> | |
334 | configurationOfInterest.add([lttngBranch, linuxBranch]) | |
335 | } | |
336 | } | |
337 | ||
338 | legacyLttngBranchesOfInterest.each { lttngBranch -> | |
339 | legacyLinuxBranchesOfInterest.each { linuxBranch -> | |
340 | configurationOfInterest.add([lttngBranch, linuxBranch]) | |
341 | } | |
342 | } | |
343 | ||
344 | def lttngBranchesOfInterest = recentLttngBranchesOfInterest + legacyLttngBranchesOfInterest | |
edddabaa | 345 | def linuxBranchesOfInterest = recentLinuxBranchesOfInterest + legacyLinuxBranchesOfInterest + vmLinuxBranchesOfInterest |
962ee225 | 346 | |
ca4d4c72 | 347 | // For LTTng branches, we look for new commits. |
962ee225 FD |
348 | def toolsHeadCommits = GetHeadCommits(toolsRepo, lttngBranchesOfInterest) |
349 | def modulesHeadCommits = GetHeadCommits(modulesRepo, lttngBranchesOfInterest) | |
350 | def ustHeadCommits = GetHeadCommits(ustRepo, lttngBranchesOfInterest) | |
351 | ||
ca4d4c72 | 352 | // For Linux branches, we look for new non-RC tags. |
962ee225 FD |
353 | def linuxLastTagIds = GetLastTagIds(linuxRepo, linuxBranchesOfInterest) |
354 | ||
802e75a7 FD |
355 | def CraftConfig = { linuxBr, lttngBr -> |
356 | def job = [:]; | |
357 | job['config'] = [:]; | |
358 | job['config']['linuxBranch'] = linuxBr; | |
359 | job['config']['lttngBranch'] = lttngBr; | |
360 | job['config']['linuxTagID'] = linuxLastTagIds[linuxBr]; | |
361 | job['config']['toolsCommit'] = toolsHeadCommits[lttngBr]; | |
362 | job['config']['modulesCommit'] = modulesHeadCommits[lttngBr]; | |
363 | job['config']['ustCommit'] = ustHeadCommits[lttngBr]; | |
364 | job['status'] = 'NOT_SET'; | |
365 | job['build'] = null; | |
366 | return job; | |
962ee225 FD |
367 | } |
368 | ||
5a754cf7 FD |
369 | // Check what type of jobs should be triggered. |
370 | triggerJobName = build.project.getFullDisplayName(); | |
371 | if (triggerJobName.contains("vm_tests")) { | |
372 | jobType = 'vm_tests'; | |
a28d0f54 JR |
373 | recentLttngBranchesOfInterest.each { lttngBranch -> |
374 | vmLinuxBranchesOfInterest.each { linuxBranch -> | |
375 | configurationOfInterest.add([lttngBranch, linuxBranch]) | |
376 | } | |
377 | } | |
5a754cf7 FD |
378 | } else if (triggerJobName.contains("baremetal_tests")) { |
379 | jobType = 'baremetal_tests'; | |
5a754cf7 | 380 | } |
962ee225 | 381 | |
802e75a7 FD |
382 | // Hashmap containing all the jobs, their configuration (commit id, etc. )and |
383 | // their status (SUCCEEDED, FAILED, etc.). This Hashmap is made of basic strings | |
384 | // rather than objects and enums because strings are easily serializable. | |
385 | def currentJobs = [:]; | |
5a754cf7 | 386 | |
802e75a7 FD |
387 | // Get an up to date view of all the branches of interest. |
388 | configurationOfInterest.each { lttngBr, linuxBr -> | |
389 | def jobName = CraftJobName(jobType, linuxBr, lttngBr); | |
390 | currentJobs[jobName] = CraftConfig(linuxBr, lttngBr); | |
962ee225 | 391 | } |
5a754cf7 | 392 | |
802e75a7 | 393 | //Add canary job |
c17a93f3 | 394 | def jobNameCanary = jobType + "_kcanary_lcanary"; |
802e75a7 FD |
395 | currentJobs[jobNameCanary] = [:]; |
396 | currentJobs[jobNameCanary]['config'] = [:]; | |
26990b2f KS |
397 | currentJobs[jobNameCanary]['config']['linuxBranch'] = 'v5.15.112'; |
398 | currentJobs[jobNameCanary]['config']['lttngBranch'] = 'v2.13.9'; | |
399 | currentJobs[jobNameCanary]['config']['linuxTagID'] ='9d6bde853685609a631871d7c12be94fdf8d912e'; // v5.15.112 | |
400 | currentJobs[jobNameCanary]['config']['toolsCommit'] = '2ff0385718ff894b3d0e06f3961334c20c5436f8' // v2.13.9 | |
401 | currentJobs[jobNameCanary]['config']['modulesCommit'] = 'da1f5a264fff33fc5a9518e519fb0084bf1074af' // v2.13.9 | |
402 | currentJobs[jobNameCanary]['config']['ustCommit'] = 'de624c20694f69702b42c5d47b5bcf692293a238' // v2.13.5 | |
802e75a7 FD |
403 | currentJobs[jobNameCanary]['status'] = 'NOT_SET'; |
404 | currentJobs[jobNameCanary]['build'] = null; | |
405 | ||
406 | def pastJobs = LoadPreviousJobsFromWorkspace(pastJobsPath); | |
112ef919 | 407 | |
802e75a7 FD |
408 | def failedRuns = [] |
409 | def abortedRuns = [] | |
410 | def isFailed = false | |
411 | def isAborted = false | |
412 | def ongoingJobs = 0; | |
413 | ||
414 | currentJobs.each { jobName, jobInfo -> | |
415 | // If the job ran in the past, we check if the IDs changed since. | |
4e9d9241 | 416 | // Fetch past results only if the job is not of type canary. |
c17a93f3 | 417 | if (!jobName.contains('_kcanary_lcanary') && pastJobs.containsKey(jobName) && |
7aab161e | 418 | build.getBuildVariables().get('FORCE_JOB_RUN') == 'false') { |
802e75a7 | 419 | pastJob = pastJobs[jobName]; |
7cd96a8d FD |
420 | |
421 | // If the code has not changed report previous status. | |
802e75a7 FD |
422 | if (pastJob['config'] == jobInfo['config']) { |
423 | // if the config has not changed, we keep it. | |
424 | // if it's failed, we don't launch a new job and keep it failed. | |
425 | jobInfo['status'] = pastJob['status']; | |
75ecf045 JR |
426 | if (pastJob['status'] == 'FAILED' && |
427 | build.getBuildVariables().get('FORCE_FAILED_JOB_RUN') == 'false') { | |
802e75a7 FD |
428 | println("${jobName} as not changed since the last failed run. Don't run it again."); |
429 | // Marked the umbrella job for failure but still run the jobs that since the | |
430 | // last run. | |
431 | isFailed = true; | |
432 | return; | |
433 | } else if (pastJob['status'] == 'ABORTED') { | |
434 | println("${jobName} as not changed since last aborted run. Run it again."); | |
435 | } else if (pastJob['status'] == 'SUCCEEDED') { | |
436 | println("${jobName} as not changed since the last successful run. Don't run it again."); | |
437 | return; | |
438 | } | |
439 | } | |
112ef919 | 440 | } |
802e75a7 FD |
441 | |
442 | jobInfo['status'] = 'PENDING'; | |
443 | jobInfo['build'] = LaunchJob(jobName, jobInfo); | |
eedda979 KS |
444 | if (jobInfo['build'] != null) { |
445 | ongoingJobs += 1; | |
446 | } | |
447 | } | |
448 | ||
449 | // Some jobs may have a null build immediately if LaunchJob | |
450 | // failed for some reason, those jobs can immediately be removed. | |
451 | def jobKeys = currentJobs.collect { jobName, jobInfo -> | |
452 | return jobName; | |
453 | } | |
454 | jobKeys.each { k -> | |
455 | if (currentJobs.get(k)['build'] == null) { | |
456 | println(String.format("Removing job '%s' since build is null", k)); | |
457 | currentJobs.remove(k); | |
458 | } | |
112ef919 FD |
459 | } |
460 | ||
802e75a7 FD |
461 | while (ongoingJobs > 0) { |
462 | currentJobs.each { jobName, jobInfo -> | |
463 | ||
464 | if (jobInfo['status'] != 'PENDING') { | |
465 | return; | |
466 | } | |
467 | ||
468 | jobBuild = jobInfo['build'] | |
5a754cf7 FD |
469 | |
470 | // The isCancelled() method checks if the run was cancelled before | |
471 | // execution. We consider such run as being aborted. | |
eedda979 | 472 | if (jobBuild.isCancelled()) { |
5a754cf7 | 473 | println("${jobName} was cancelled before launch.") |
5a754cf7 | 474 | isAborted = true; |
802e75a7 FD |
475 | abortedRuns.add(jobName); |
476 | ongoingJobs -= 1; | |
477 | jobInfo['status'] = 'ABORTED' | |
478 | // Invalidate the build field, as it's not serializable and we don't need | |
479 | // it anymore. | |
480 | jobInfo['build'] = null; | |
481 | } else if (jobBuild.isDone()) { | |
482 | ||
483 | jobExitStatus = jobBuild.get(); | |
5a754cf7 | 484 | |
802e75a7 FD |
485 | // Invalidate the build field, as it's not serializable and we don't need |
486 | // it anymore. | |
487 | jobInfo['build'] = null; | |
488 | println("${jobExitStatus.fullDisplayName} completed with status ${jobExitStatus.result}."); | |
5a754cf7 FD |
489 | |
490 | // If the job didn't succeed, add its name to the right list so it can | |
491 | // be printed at the end of the execution. | |
802e75a7 FD |
492 | ongoingJobs -= 1; |
493 | switch (jobExitStatus.result) { | |
5a754cf7 FD |
494 | case Result.ABORTED: |
495 | isAborted = true; | |
496 | abortedRuns.add(jobName); | |
802e75a7 | 497 | jobInfo['status'] = 'ABORTED' |
5a754cf7 FD |
498 | break; |
499 | case Result.FAILURE: | |
500 | isFailed = true; | |
501 | failedRuns.add(jobName); | |
802e75a7 | 502 | jobInfo['status'] = 'FAILED' |
5a754cf7 FD |
503 | break; |
504 | case Result.SUCCESS: | |
802e75a7 FD |
505 | jobInfo['status'] = 'SUCCEEDED' |
506 | break; | |
5a754cf7 FD |
507 | default: |
508 | break; | |
509 | } | |
5a754cf7 FD |
510 | } |
511 | } | |
512 | ||
513 | // Sleep before the next iteration. | |
514 | try { | |
b2c3f97c | 515 | Thread.sleep(30000) |
5a754cf7 FD |
516 | } catch(e) { |
517 | if (e in InterruptedException) { | |
518 | build.setResult(hudson.model.Result.ABORTED) | |
519 | throw new InterruptedException() | |
520 | } else { | |
521 | throw(e) | |
522 | } | |
523 | } | |
524 | } | |
525 | ||
802e75a7 FD |
526 | //All jobs are done running. Save their exit status to disk. |
527 | SaveCurrentJobsToWorkspace(currentJobs, pastJobsPath); | |
528 | ||
5a754cf7 FD |
529 | // Get log of failed runs. |
530 | if (failedRuns.size() > 0) { | |
531 | println("Failed job(s):"); | |
532 | for (failedRun in failedRuns) { | |
533 | println("\t" + failedRun) | |
534 | } | |
535 | } | |
536 | ||
537 | // Get log of aborted runs. | |
538 | if (abortedRuns.size() > 0) { | |
539 | println("Cancelled job(s):"); | |
540 | for (cancelledRun in abortedRuns) { | |
541 | println("\t" + cancelledRun) | |
542 | } | |
543 | } | |
544 | ||
545 | // Mark this build as Failed if atleast one child build has failed and mark as | |
546 | // aborted if there was no failure but atleast one job aborted. | |
547 | if (isFailed) { | |
548 | build.setResult(hudson.model.Result.FAILURE) | |
549 | } else if (isAborted) { | |
550 | build.setResult(hudson.model.Result.ABORTED) | |
551 | } |