-// For each top of branch commits that were not seen before, schedule one job
-// for each lttng/linux tracked configurations
-toolsHeadCommits.each { toolsHead ->
- if (!oldToolsHeadCommits.contains(toolsHead.value)) {
- linuxLastTagIds.each { linuxTag ->
- def lttngBranch = toolsHead.key
- if (configurationOfInterest.contains([lttngBranch, linuxTag.key])) {
- runConfigs.add([linuxTag.key, linuxTag.value,
- lttngBranch, toolsHeadCommits[lttngBranch],
- modulesHeadCommits[lttngBranch], ustHeadCommits[lttngBranch]]
- as RunConfiguration)
-
- newOldToolsHeadCommits.add(toolsHead.value)
+//Add canary job
+def jobNameCanary = jobType + "_canary";
+currentJobs[jobNameCanary] = [:];
+currentJobs[jobNameCanary]['config'] = [:];
+currentJobs[jobNameCanary]['config']['linuxBranch'] = 'v4.4.194';
+currentJobs[jobNameCanary]['config']['lttngBranch'] = 'v2.10.7';
+currentJobs[jobNameCanary]['config']['linuxTagID'] ='a227f8436f2b21146fc024d84e6875907475ace2';
+currentJobs[jobNameCanary]['config']['toolsCommit'] = '93fa2c9ff6b52c30173bee80445501ce8677fecc'
+currentJobs[jobNameCanary]['config']['modulesCommit'] = 'fe3ca7a9045221ffbedeac40ba7e09b1fc500e21'
+currentJobs[jobNameCanary]['config']['ustCommit'] = '0172ce8ece2102d46c7785e6bd96163225c59e49'
+currentJobs[jobNameCanary]['status'] = 'NOT_SET';
+currentJobs[jobNameCanary]['build'] = null;
+
+def pastJobs = LoadPreviousJobsFromWorkspace(pastJobsPath);
+
+def failedRuns = []
+def abortedRuns = []
+def isFailed = false
+def isAborted = false
+def ongoingJobs = 0;
+
+currentJobs.each { jobName, jobInfo ->
+ // If the job ran in the past, we check if the IDs changed since.
+ // Fetch past results only if the job is not of type canary or fuzzing.
+ if (!jobName.contains('_canary') && !jobName.contains('_fuzzing') &&
+ pastJobs.containsKey(jobName) &&
+ build.getBuildVariables().get('FORCE_JOB_RUN') == 'false') {
+ pastJob = pastJobs[jobName];
+
+ // If the code has not changed report previous status.
+ if (pastJob['config'] == jobInfo['config']) {
+ // if the config has not changed, we keep it.
+ // if it's failed, we don't launch a new job and keep it failed.
+ jobInfo['status'] = pastJob['status'];
+ if (pastJob['status'] == 'FAILED' &&
+ build.getBuildVariables().get('FORCE_FAILED_JOB_RUN') == 'false') {
+ println("${jobName} as not changed since the last failed run. Don't run it again.");
+ // Marked the umbrella job for failure but still run the jobs that since the
+ // last run.
+ isFailed = true;
+ return;
+ } else if (pastJob['status'] == 'ABORTED') {
+ println("${jobName} as not changed since last aborted run. Run it again.");
+ } else if (pastJob['status'] == 'SUCCEEDED') {
+ println("${jobName} as not changed since the last successful run. Don't run it again.");
+ return;