Commit | Line | Data |
---|---|---|
962ee225 FD |
1 | /** |
2 | * Copyright (C) 2017 - Francis Deslauriers <francis.deslauriers@efficios.com> | |
3 | * | |
4 | * This program is free software: you can redistribute it and/or modify | |
5 | * it under the terms of the GNU General Public License as published by | |
6 | * the Free Software Foundation, either version 3 of the License, or | |
7 | * (at your option) any later version. | |
8 | * | |
9 | * This program is distributed in the hope that it will be useful, | |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | * GNU General Public License for more details. | |
13 | * | |
14 | * You should have received a copy of the GNU General Public License | |
15 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | */ | |
17 | ||
18 | import hudson.console.HyperlinkNote | |
19 | import hudson.model.* | |
20 | import java.io.File | |
21 | import org.eclipse.jgit.api.Git | |
22 | import org.eclipse.jgit.lib.Ref | |
5a754cf7 | 23 | import groovy.transform.EqualsAndHashCode |
962ee225 FD |
24 | |
25 | class InvalidKVersionException extends Exception { | |
26 | public InvalidKVersionException(String message) { | |
27 | super(message) | |
28 | } | |
29 | } | |
30 | ||
31 | class EmptyKVersionException extends Exception { | |
32 | public EmptyKVersionException(String message) { | |
33 | super(message) | |
34 | } | |
35 | } | |
36 | ||
37 | class VanillaKVersion implements Comparable<VanillaKVersion> { | |
38 | ||
39 | Integer major = 0 | |
40 | Integer majorB = 0 | |
41 | Integer minor = 0 | |
42 | Integer patch = 0 | |
43 | Integer rc = Integer.MAX_VALUE | |
44 | Boolean inStable = false; | |
45 | ||
46 | VanillaKVersion() {} | |
47 | ||
48 | VanillaKVersion(version) { | |
49 | this.parse(version) | |
50 | } | |
51 | ||
52 | static VanillaKVersion minKVersion() { | |
53 | return new VanillaKVersion("v0.0.0") | |
54 | } | |
55 | ||
56 | static VanillaKVersion maxKVersion() { | |
57 | return new VanillaKVersion("v" + Integer.MAX_VALUE + ".0.0") | |
58 | } | |
59 | ||
60 | static VanillaKVersion factory(version) { | |
61 | return new VanillaKVersion(version) | |
62 | } | |
63 | ||
64 | def parse(version) { | |
65 | this.major = 0 | |
66 | this.majorB = 0 | |
67 | this.minor = 0 | |
68 | this.patch = 0 | |
69 | this.rc = Integer.MAX_VALUE | |
70 | ||
71 | if (!version) { | |
72 | throw new EmptyKVersionException("Empty kernel version") | |
73 | } | |
74 | ||
75 | def match = version =~ /^v(\d+)\.(\d+)(\.(\d+))?(\.(\d+))?(-rc(\d+))?$/ | |
76 | if (!match) { | |
77 | throw new InvalidKVersionException("Invalid kernel version: ${version}") | |
78 | } | |
79 | ||
80 | Integer offset = 0; | |
81 | ||
82 | // Major | |
83 | this.major = Integer.parseInt(match.group(1)) | |
84 | if (this.major <= 2) { | |
85 | offset = 2 | |
86 | this.majorB = Integer.parseInt(match.group(2)) | |
87 | } | |
88 | ||
89 | // Minor | |
90 | if (match.group(2 + offset) != null) { | |
91 | this.minor = Integer.parseInt(match.group(2 + offset)) | |
92 | } | |
93 | ||
94 | // Patch level | |
95 | if (match.group(4 + offset) != null) { | |
96 | this.patch = Integer.parseInt(match.group(4 + offset)) | |
97 | this.inStable = true | |
98 | } | |
99 | ||
100 | // RC | |
101 | if (match.group(8) != null) { | |
102 | this.rc = Integer.parseInt(match.group(8)) | |
103 | } | |
104 | } | |
105 | ||
106 | Boolean isInStableBranch() { | |
107 | return this.inStable | |
108 | } | |
109 | ||
110 | // Return true if both version are of the same stable branch | |
111 | Boolean isSameStable(VanillaKVersion o) { | |
112 | if (this.major != o.major) { | |
113 | return false | |
114 | } | |
115 | if (this.majorB != o.majorB) { | |
116 | return false | |
117 | } | |
118 | if (this.minor != o.minor) { | |
119 | return false | |
120 | } | |
121 | ||
122 | return true | |
123 | } | |
124 | ||
125 | @Override int compareTo(VanillaKVersion o) { | |
126 | if (this.major != o.major) { | |
127 | return Integer.compare(this.major, o.major) | |
128 | } | |
129 | if (this.majorB != o.majorB) { | |
130 | return Integer.compare(this.majorB, o.majorB) | |
131 | } | |
132 | if (this.minor != o.minor) { | |
133 | return Integer.compare(this.minor, o.minor) | |
134 | } | |
135 | if (this.patch != o.patch) { | |
136 | return Integer.compare(this.patch, o.patch) | |
137 | } | |
138 | if (this.rc != o.rc) { | |
139 | return Integer.compare(this.rc, o.rc) | |
140 | } | |
141 | ||
142 | // Same version | |
143 | return 0; | |
144 | } | |
145 | ||
146 | String toString() { | |
147 | String vString = "v${this.major}" | |
148 | ||
149 | if (this.majorB > 0) { | |
150 | vString = vString.concat(".${this.majorB}") | |
151 | } | |
152 | ||
153 | vString = vString.concat(".${this.minor}") | |
154 | ||
155 | if (this.patch > 0) { | |
156 | vString = vString.concat(".${this.patch}") | |
157 | } | |
158 | ||
159 | if (this.rc > 0 && this.rc < Integer.MAX_VALUE) { | |
160 | vString = vString.concat("-rc${this.rc}") | |
161 | } | |
162 | return vString | |
163 | } | |
164 | } | |
165 | ||
5a754cf7 | 166 | @EqualsAndHashCode(includeFields=true) |
962ee225 FD |
167 | class RunConfiguration { |
168 | def linuxBranch | |
169 | def linuxTagId | |
170 | def lttngBranch | |
171 | def lttngModulesCommitId | |
172 | def lttngToolsCommitId | |
173 | def lttngUstCommitId | |
174 | RunConfiguration(linuxBranch, linuxTagId, lttngBranch, lttngToolsCommitId, | |
175 | lttngModulesCommitId, lttngUstCommitId) { | |
176 | this.linuxBranch = linuxBranch | |
177 | this.linuxTagId = linuxTagId | |
178 | this.lttngBranch = lttngBranch | |
179 | this.lttngModulesCommitId = lttngModulesCommitId | |
180 | this.lttngToolsCommitId = lttngToolsCommitId | |
181 | this.lttngUstCommitId = lttngUstCommitId | |
182 | } | |
183 | ||
184 | String toString() { | |
4cc3d57c FD |
185 | return "${this.linuxBranch}:{${this.linuxTagId}}, ${this.lttngBranch}" + |
186 | ":{${this.lttngModulesCommitId}, ${this.lttngToolsCommitId}," + | |
187 | "${this.lttngUstCommitId}}" | |
962ee225 FD |
188 | } |
189 | } | |
190 | ||
191 | def LoadPreviousIdsFromWorkspace = { ondiskpath -> | |
192 | def previousIds = [] | |
193 | try { | |
194 | File myFile = new File(ondiskpath); | |
195 | def input = new ObjectInputStream(new FileInputStream(ondiskpath)) | |
196 | previousIds = input.readObject() | |
197 | input.close() | |
5a754cf7 FD |
198 | } catch (e) { |
199 | println("Failed to load previous Git object IDs from disk." + e); | |
962ee225 FD |
200 | } |
201 | return previousIds | |
202 | } | |
203 | ||
204 | def saveCurrentIdsToWorkspace = { currentIds, ondiskpath -> | |
205 | try { | |
206 | File myFile = new File(ondiskpath); | |
207 | myFile.createNewFile(); | |
208 | def out = new ObjectOutputStream(new FileOutputStream(ondiskpath)) | |
209 | out.writeObject(currentIds) | |
210 | out.close() | |
5a754cf7 FD |
211 | } catch (e) { |
212 | println("Failed to save previous Git object IDs from disk." + e); | |
962ee225 FD |
213 | } |
214 | } | |
215 | ||
216 | def GetHeadCommits = { remoteRepo, branchesOfInterest -> | |
217 | def remoteHeads = [:] | |
218 | def remoteHeadRefs = Git.lsRemoteRepository() | |
219 | .setTags(false) | |
220 | .setHeads(true) | |
221 | .setRemote(remoteRepo).call() | |
222 | ||
223 | remoteHeadRefs.each { | |
224 | def branch = it.getName().replaceAll('refs/heads/', '') | |
225 | if (branchesOfInterest.contains(branch)) | |
226 | remoteHeads[branch] = it.getObjectId().name() | |
227 | } | |
228 | ||
229 | return remoteHeads | |
230 | } | |
231 | ||
232 | def GetTagIds = { remoteRepo -> | |
233 | def remoteTags = [:] | |
234 | def remoteTagRefs = Git.lsRemoteRepository() | |
235 | .setTags(true) | |
236 | .setHeads(false) | |
237 | .setRemote(remoteRepo).call() | |
238 | ||
239 | remoteTagRefs.each { | |
240 | // Exclude release candidate tags | |
241 | if (!it.getName().contains('-rc')) { | |
242 | remoteTags[it.getName().replaceAll('refs/tags/', '')] = it.getObjectId().name() | |
243 | } | |
244 | } | |
245 | ||
246 | return remoteTags | |
247 | } | |
248 | ||
249 | def GetLastTagOfBranch = { tagRefs, branch -> | |
250 | def tagVersions = tagRefs.collect {new VanillaKVersion(it.key)} | |
251 | def currMax = new VanillaKVersion('v0.0.0'); | |
252 | if (!branch.contains('master')){ | |
253 | def targetVersion = new VanillaKVersion(branch.replaceAll('linux-', 'v').replaceAll('.y', '')) | |
254 | tagVersions.each { | |
255 | if (it.isSameStable(targetVersion)) { | |
256 | if (currMax < it) { | |
257 | currMax = it; | |
258 | } | |
259 | } | |
260 | } | |
261 | } else { | |
262 | tagVersions.each { | |
263 | if (!it.isInStableBranch() && currMax < it) { | |
264 | currMax = it; | |
265 | } | |
266 | } | |
267 | } | |
268 | return currMax.toString() | |
269 | } | |
270 | ||
271 | // Returns the latest tags of each of the branches passed in the argument | |
272 | def GetLastTagIds = { remoteRepo, branchesOfInterest -> | |
273 | def remoteHeads = GetHeadCommits(remoteRepo, branchesOfInterest) | |
274 | def remoteTagRefs = GetTagIds(remoteRepo) | |
275 | def remoteLastTagCommit = [:] | |
276 | ||
277 | remoteTagRefs = remoteTagRefs.findAll { !it.key.contains("v2.") } | |
278 | branchesOfInterest.each { | |
279 | remoteLastTagCommit[it] = remoteTagRefs[GetLastTagOfBranch(remoteTagRefs, it)] | |
280 | } | |
281 | ||
282 | return remoteLastTagCommit | |
283 | } | |
284 | ||
285 | def CraftJobName = { jobType, runConfig -> | |
286 | return "${jobType}_k${runConfig.linuxBranch}_l${runConfig.lttngBranch}" | |
287 | } | |
288 | ||
289 | def LaunchJob = { jobName, runConfig -> | |
290 | def job = Hudson.instance.getJob(jobName) | |
291 | def params = [] | |
292 | for (paramdef in job.getProperty(ParametersDefinitionProperty.class).getParameterDefinitions()) { | |
293 | params += paramdef.getDefaultParameterValue(); | |
294 | } | |
295 | ||
fdbdb52a FD |
296 | params.add(new StringParameterValue('LTTNG_TOOLS_COMMIT_ID', runConfig.lttngToolsCommitId)) |
297 | params.add(new StringParameterValue('LTTNG_MODULES_COMMIT_ID', runConfig.lttngModulesCommitId)) | |
298 | params.add(new StringParameterValue('LTTNG_UST_COMMIT_ID', runConfig.lttngUstCommitId)) | |
299 | params.add(new StringParameterValue('KERNEL_TAG_ID', runConfig.linuxTagId)) | |
5a754cf7 FD |
300 | def currBuild = job.scheduleBuild2(0, new Cause.UpstreamCause(build), new ParametersAction(params)) |
301 | ||
302 | if (currBuild != null ) { | |
303 | println("Launching job: ${HyperlinkNote.encodeTo('/' + job.url, job.fullDisplayName)}"); | |
304 | } else { | |
305 | println("Job ${jobName} not found or deactivated."); | |
306 | } | |
307 | ||
308 | return currBuild | |
962ee225 FD |
309 | } |
310 | ||
962ee225 FD |
311 | final String toolsRepo = "https://github.com/lttng/lttng-tools.git" |
312 | final String modulesRepo = "https://github.com/lttng/lttng-modules.git" | |
313 | final String ustRepo = "https://github.com/lttng/lttng-ust.git" | |
314 | final String linuxRepo = "git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux-stable.git" | |
315 | ||
316 | final String toolsOnDiskPath = build.getEnvironment(listener).get('WORKSPACE') + "/on-disk-tools-ref" | |
317 | final String modulesOnDiskPath = build.getEnvironment(listener).get('WORKSPACE') + "/on-disk-modules-ref" | |
318 | final String ustOnDiskPath = build.getEnvironment(listener).get('WORKSPACE') + "/on-disk-ust-ref" | |
319 | final String linuxOnDiskPath = build.getEnvironment(listener).get('WORKSPACE') + "/on-disk-linux-ref" | |
320 | ||
321 | def recentLttngBranchesOfInterest = ['master', 'stable-2.10', 'stable-2.9'] | |
322 | def recentLinuxBranchesOfInterest = ['master', 'linux-4.9.y', 'linux-4.4.y'] | |
323 | ||
324 | def legacyLttngBranchesOfInterest = ['stable-2.7'] | |
325 | def legacyLinuxBranchesOfInterest = ['linux-3.18.y', 'linux-4.4.y'] | |
326 | ||
ca4d4c72 | 327 | // Generate configurations of interest. |
962ee225 FD |
328 | def configurationOfInterest = [] as Set |
329 | ||
330 | recentLttngBranchesOfInterest.each { lttngBranch -> | |
331 | recentLinuxBranchesOfInterest.each { linuxBranch -> | |
332 | configurationOfInterest.add([lttngBranch, linuxBranch]) | |
333 | } | |
334 | } | |
335 | ||
336 | legacyLttngBranchesOfInterest.each { lttngBranch -> | |
337 | legacyLinuxBranchesOfInterest.each { linuxBranch -> | |
338 | configurationOfInterest.add([lttngBranch, linuxBranch]) | |
339 | } | |
340 | } | |
341 | ||
342 | def lttngBranchesOfInterest = recentLttngBranchesOfInterest + legacyLttngBranchesOfInterest | |
343 | def linuxBranchesOfInterest = recentLinuxBranchesOfInterest + legacyLinuxBranchesOfInterest | |
344 | ||
ca4d4c72 | 345 | // For LTTng branches, we look for new commits. |
962ee225 FD |
346 | def toolsHeadCommits = GetHeadCommits(toolsRepo, lttngBranchesOfInterest) |
347 | def modulesHeadCommits = GetHeadCommits(modulesRepo, lttngBranchesOfInterest) | |
348 | def ustHeadCommits = GetHeadCommits(ustRepo, lttngBranchesOfInterest) | |
349 | ||
ca4d4c72 | 350 | // For Linux branches, we look for new non-RC tags. |
962ee225 FD |
351 | def linuxLastTagIds = GetLastTagIds(linuxRepo, linuxBranchesOfInterest) |
352 | ||
5a754cf7 FD |
353 | // Load previously built Linux tag ids. |
354 | println("Loading Git object IDs of previously built projects from the workspace."); | |
962ee225 FD |
355 | def oldLinuxTags = LoadPreviousIdsFromWorkspace(linuxOnDiskPath) as Set |
356 | ||
ca4d4c72 | 357 | // Load previously built LTTng commit ids. |
962ee225 FD |
358 | def oldToolsHeadCommits = LoadPreviousIdsFromWorkspace(toolsOnDiskPath) as Set |
359 | def oldModulesHeadCommits = LoadPreviousIdsFromWorkspace(modulesOnDiskPath) as Set | |
360 | def oldUstHeadCommits = LoadPreviousIdsFromWorkspace(ustOnDiskPath) as Set | |
361 | ||
362 | def newOldLinuxTags = oldLinuxTags | |
363 | def newOldToolsHeadCommits = oldToolsHeadCommits | |
364 | def newOldModulesHeadCommits = oldModulesHeadCommits | |
365 | def newOldUstHeadCommits = oldUstHeadCommits | |
366 | ||
5a754cf7 | 367 | // Canary jobs are run daily to make sure the lava pipeline is working properly. |
962ee225 FD |
368 | def canaryRunConfigs = [] as Set |
369 | canaryRunConfigs.add( | |
370 | ['v4.4.9', '1a1a512b983108015ced1e7a7c7775cfeec42d8c', 'v2.8.1','d11e0db', '7fd9215', '514a87f'] as RunConfiguration) | |
371 | ||
372 | def runConfigs = [] as Set | |
373 | ||
374 | // For each top of branch kernel tags that were not seen before, schedule one | |
ca4d4c72 | 375 | // job for each lttng/linux tracked configurations. |
962ee225 FD |
376 | linuxLastTagIds.each { linuxTag -> |
377 | if (!oldLinuxTags.contains(linuxTag.value)) { | |
378 | lttngBranchesOfInterest.each { lttngBranch -> | |
379 | if (configurationOfInterest.contains([lttngBranch, linuxTag.key])) { | |
380 | runConfigs.add([linuxTag.key, linuxTag.value, | |
381 | lttngBranch, toolsHeadCommits[lttngBranch], | |
382 | modulesHeadCommits[lttngBranch], ustHeadCommits[lttngBranch]] | |
383 | as RunConfiguration) | |
384 | ||
385 | newOldLinuxTags.add(linuxTag.value) | |
386 | } | |
387 | } | |
388 | } | |
389 | } | |
390 | ||
4cc3d57c FD |
391 | // For each top of branch commits of LTTng-Tools that were not seen before, |
392 | // schedule one job for each lttng/linux tracked configurations | |
962ee225 FD |
393 | toolsHeadCommits.each { toolsHead -> |
394 | if (!oldToolsHeadCommits.contains(toolsHead.value)) { | |
395 | linuxLastTagIds.each { linuxTag -> | |
396 | def lttngBranch = toolsHead.key | |
397 | if (configurationOfInterest.contains([lttngBranch, linuxTag.key])) { | |
398 | runConfigs.add([linuxTag.key, linuxTag.value, | |
399 | lttngBranch, toolsHeadCommits[lttngBranch], | |
400 | modulesHeadCommits[lttngBranch], ustHeadCommits[lttngBranch]] | |
401 | as RunConfiguration) | |
402 | ||
403 | newOldToolsHeadCommits.add(toolsHead.value) | |
404 | } | |
405 | } | |
406 | } | |
407 | } | |
408 | ||
4cc3d57c FD |
409 | // For each top of branch commits of LTTng-Modules that were not seen before, |
410 | // schedule one job for each lttng/linux tracked configurations | |
962ee225 FD |
411 | modulesHeadCommits.each { modulesHead -> |
412 | if (!oldModulesHeadCommits.contains(modulesHead.value)) { | |
413 | linuxLastTagIds.each { linuxTag -> | |
414 | def lttngBranch = modulesHead.key | |
415 | if (configurationOfInterest.contains([lttngBranch, linuxTag.key])) { | |
416 | runConfigs.add([linuxTag.key, linuxTag.value, | |
417 | lttngBranch, toolsHeadCommits[lttngBranch], | |
418 | modulesHeadCommits[lttngBranch], ustHeadCommits[lttngBranch]] | |
419 | as RunConfiguration) | |
420 | ||
421 | newOldModulesHeadCommits.add(modulesHead.value) | |
422 | } | |
423 | } | |
424 | } | |
425 | } | |
426 | ||
4cc3d57c FD |
427 | // For each top of branch commits of LTTng-UST that were not seen before, |
428 | // schedule one job for each lttng/linux tracked configurations | |
962ee225 FD |
429 | ustHeadCommits.each { ustHead -> |
430 | if (!oldUstHeadCommits.contains(ustHead.value)) { | |
431 | linuxLastTagIds.each { linuxTag -> | |
432 | def lttngBranch = ustHead.key | |
433 | if (configurationOfInterest.contains([lttngBranch, linuxTag.key])) { | |
434 | runConfigs.add([linuxTag.key, linuxTag.value, | |
435 | lttngBranch, toolsHeadCommits[lttngBranch], | |
436 | modulesHeadCommits[lttngBranch], ustHeadCommits[lttngBranch]] | |
437 | as RunConfiguration) | |
438 | ||
439 | newOldUstHeadCommits.add(ustHead.value) | |
440 | } | |
441 | } | |
442 | } | |
443 | } | |
444 | ||
5a754cf7 FD |
445 | def ongoingBuild = [:] |
446 | def failedRuns = [] | |
447 | def abortedRuns = [] | |
448 | def isFailed = false | |
449 | def isAborted = false | |
450 | ||
451 | // Check what type of jobs should be triggered. | |
452 | triggerJobName = build.project.getFullDisplayName(); | |
453 | if (triggerJobName.contains("vm_tests")) { | |
454 | jobType = 'vm_tests'; | |
455 | } else if (triggerJobName.contains("baremetal_tests")) { | |
456 | jobType = 'baremetal_tests'; | |
457 | } else if (triggerJobName.contains("baremetal_benchmarks")) { | |
458 | jobType = 'baremetal_benchmarks'; | |
459 | } | |
962ee225 | 460 | |
5a754cf7 | 461 | // Launch regular jobs. |
962ee225 | 462 | if (runConfigs.size() > 0) { |
8f3754f3 | 463 | println("\nSchedule jobs triggered by code changes:"); |
962ee225 | 464 | runConfigs.each { config -> |
5a754cf7 FD |
465 | def jobName = CraftJobName(jobType, config); |
466 | def currBuild = LaunchJob(jobName, config); | |
467 | ||
468 | // LaunchJob will return null if the job doesn't exist or is disabled. | |
469 | if (currBuild != null) { | |
470 | ongoingBuild[jobName] = currBuild; | |
962ee225 FD |
471 | } |
472 | ||
5a754cf7 | 473 | // Jobs to run only on master branchs of both Linux and LTTng. |
962ee225 FD |
474 | if (config.linuxBranch.contains('master') && |
475 | config.lttngBranch.contains('master')) { | |
5a754cf7 FD |
476 | // vm_tests specific. |
477 | if (jobType.contains("vm_tests")) { | |
478 | jobName = CraftJobName('vm_tests_fuzzing', config); | |
479 | currBuild = LaunchJob(jobName, config); | |
480 | ||
481 | // LaunchJob will return null if the job doesn't exist or is disabled. | |
482 | if (currBuild != null) { | |
483 | ongoingBuild[jobName] = currBuild; | |
484 | } | |
485 | } | |
962ee225 FD |
486 | } |
487 | } | |
488 | } else { | |
489 | println("No new commit or tags, nothing more to do.") | |
490 | } | |
5a754cf7 | 491 | |
112ef919 FD |
492 | // Launch canary jobs. |
493 | println("\nSchedule canary jobs once a day:") | |
494 | canaryRunConfigs.each { config -> | |
495 | def jobName = jobType + '_canary'; | |
496 | def currBuild = LaunchJob(jobName, config); | |
497 | ||
498 | // LaunchJob will return null if the job doesn't exist or is disabled. | |
499 | if (currBuild != null) { | |
500 | ongoingBuild[jobName] = currBuild; | |
501 | } | |
502 | } | |
503 | ||
5a754cf7 FD |
504 | // Save the tag and commit IDs scheduled in the past and during this run to the |
505 | // workspace. We save it at the end to be sure all jobs were launched. We save | |
506 | // the object IDs even in case of failure. There is no point of re-running the | |
507 | // same job is there are no code changes even in case of failure. | |
508 | println("Saving Git object IDs of previously built projects to the workspace."); | |
509 | saveCurrentIdsToWorkspace(newOldLinuxTags, linuxOnDiskPath); | |
510 | saveCurrentIdsToWorkspace(newOldToolsHeadCommits, toolsOnDiskPath); | |
511 | saveCurrentIdsToWorkspace(newOldModulesHeadCommits, modulesOnDiskPath); | |
512 | saveCurrentIdsToWorkspace(newOldUstHeadCommits, ustOnDiskPath); | |
513 | ||
514 | // Iterate over all the running jobs. Record the status of completed jobs. | |
515 | while (ongoingBuild.size() > 0) { | |
516 | def ongoingIterator = ongoingBuild.iterator(); | |
517 | while (ongoingIterator.hasNext()) { | |
518 | currentBuild = ongoingIterator.next(); | |
519 | ||
520 | jobName = currentBuild.getKey(); | |
521 | job_run = currentBuild.getValue(); | |
522 | ||
523 | // The isCancelled() method checks if the run was cancelled before | |
524 | // execution. We consider such run as being aborted. | |
525 | if (job_run.isCancelled()) { | |
526 | println("${jobName} was cancelled before launch.") | |
527 | abortedRuns.add(jobName); | |
528 | isAborted = true; | |
529 | ongoingIterator.remove(); | |
530 | } else if (job_run.isDone()) { | |
531 | ||
532 | job_status = job_run.get(); | |
533 | println("${job_status.fullDisplayName} completed with status ${job_status.result}."); | |
534 | ||
535 | // If the job didn't succeed, add its name to the right list so it can | |
536 | // be printed at the end of the execution. | |
537 | switch (job_status.result) { | |
538 | case Result.ABORTED: | |
539 | isAborted = true; | |
540 | abortedRuns.add(jobName); | |
541 | break; | |
542 | case Result.FAILURE: | |
543 | isFailed = true; | |
544 | failedRuns.add(jobName); | |
545 | break; | |
546 | case Result.SUCCESS: | |
547 | default: | |
548 | break; | |
549 | } | |
550 | ||
551 | ongoingIterator.remove(); | |
552 | } | |
553 | } | |
554 | ||
555 | // Sleep before the next iteration. | |
556 | try { | |
557 | Thread.sleep(10000) | |
558 | } catch(e) { | |
559 | if (e in InterruptedException) { | |
560 | build.setResult(hudson.model.Result.ABORTED) | |
561 | throw new InterruptedException() | |
562 | } else { | |
563 | throw(e) | |
564 | } | |
565 | } | |
566 | } | |
567 | ||
568 | // Get log of failed runs. | |
569 | if (failedRuns.size() > 0) { | |
570 | println("Failed job(s):"); | |
571 | for (failedRun in failedRuns) { | |
572 | println("\t" + failedRun) | |
573 | } | |
574 | } | |
575 | ||
576 | // Get log of aborted runs. | |
577 | if (abortedRuns.size() > 0) { | |
578 | println("Cancelled job(s):"); | |
579 | for (cancelledRun in abortedRuns) { | |
580 | println("\t" + cancelledRun) | |
581 | } | |
582 | } | |
583 | ||
584 | // Mark this build as Failed if atleast one child build has failed and mark as | |
585 | // aborted if there was no failure but atleast one job aborted. | |
586 | if (isFailed) { | |
587 | build.setResult(hudson.model.Result.FAILURE) | |
588 | } else if (isAborted) { | |
589 | build.setResult(hudson.model.Result.ABORTED) | |
590 | } |