-// For each top of branch commits of LTTng-Tools that were not seen before,
-// schedule one job for each lttng/linux tracked configurations
-toolsHeadCommits.each { toolsHead ->
- if (!oldToolsHeadCommits.contains(toolsHead.value)) {
- linuxLastTagIds.each { linuxTag ->
- def lttngBranch = toolsHead.key
- if (configurationOfInterest.contains([lttngBranch, linuxTag.key])) {
- runConfigs.add([linuxTag.key, linuxTag.value,
- lttngBranch, toolsHeadCommits[lttngBranch],
- modulesHeadCommits[lttngBranch], ustHeadCommits[lttngBranch]]
- as RunConfiguration)
-
- newOldToolsHeadCommits.add(toolsHead.value)
+//Add canary job
+def jobNameCanary = jobType + "_kcanary_lcanary";
+currentJobs[jobNameCanary] = [:];
+currentJobs[jobNameCanary]['config'] = [:];
+currentJobs[jobNameCanary]['config']['linuxBranch'] = 'v5.15.112';
+currentJobs[jobNameCanary]['config']['lttngBranch'] = 'v2.13.9';
+currentJobs[jobNameCanary]['config']['linuxTagID'] ='9d6bde853685609a631871d7c12be94fdf8d912e'; // v5.15.112
+currentJobs[jobNameCanary]['config']['toolsCommit'] = '2ff0385718ff894b3d0e06f3961334c20c5436f8' // v2.13.9
+currentJobs[jobNameCanary]['config']['modulesCommit'] = 'da1f5a264fff33fc5a9518e519fb0084bf1074af' // v2.13.9
+currentJobs[jobNameCanary]['config']['ustCommit'] = 'de624c20694f69702b42c5d47b5bcf692293a238' // v2.13.5
+currentJobs[jobNameCanary]['status'] = 'NOT_SET';
+currentJobs[jobNameCanary]['build'] = null;
+
+def pastJobs = LoadPreviousJobsFromWorkspace(pastJobsPath);
+
+def failedRuns = []
+def abortedRuns = []
+def isFailed = false
+def isAborted = false
+def ongoingJobs = 0;
+
+currentJobs.each { jobName, jobInfo ->
+ // If the job ran in the past, we check if the IDs changed since.
+ // Fetch past results only if the job is not of type canary.
+ if (!jobName.contains('_kcanary_lcanary') && pastJobs.containsKey(jobName) &&
+ build.getBuildVariables().get('FORCE_JOB_RUN') == 'false') {
+ pastJob = pastJobs[jobName];
+
+ // If the code has not changed report previous status.
+ if (pastJob['config'] == jobInfo['config']) {
+ // if the config has not changed, we keep it.
+ // if it's failed, we don't launch a new job and keep it failed.
+ jobInfo['status'] = pastJob['status'];
+ if (pastJob['status'] == 'FAILED' &&
+ build.getBuildVariables().get('FORCE_FAILED_JOB_RUN') == 'false') {
+ println("${jobName} as not changed since the last failed run. Don't run it again.");
+ // Marked the umbrella job for failure but still run the jobs that since the
+ // last run.
+ isFailed = true;
+ return;
+ } else if (pastJob['status'] == 'ABORTED') {
+ println("${jobName} as not changed since last aborted run. Run it again.");
+ } else if (pastJob['status'] == 'SUCCEEDED') {
+ println("${jobName} as not changed since the last successful run. Don't run it again.");
+ return;