jjb: Add lttng-modules trigger job
[lttng-ci.git] / scripts / lttng-baremetal-tests / lava-submit.py
CommitLineData
b3d73c46
FD
1#!/usr/bin/python
2# Copyright (C) 2016 - Francis Deslauriers <francis.deslauriers@efficios.com>
3#
4# This program is free software: you can redistribute it and/or modify
5# it under the terms of the GNU General Public License as published by
6# the Free Software Foundation, either version 3 of the License, or
7# (at your option) any later version.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17import argparse
18import base64
19import json
20import os
21import sys
22import time
f42b2b7e 23import xmlrpc.client
b3d73c46
FD
24from collections import OrderedDict
25from enum import Enum
26
27USERNAME = 'frdeso'
28HOSTNAME = 'lava-master.internal.efficios.com'
29SCP_PATH = 'scp://jenkins-lava@storage.internal.efficios.com'
30
31class TestType(Enum):
1ac7fa2c
FD
32 baremetal_benchmarks=1
33 baremetal_tests=2
34 kvm_tests=3
b3d73c46
FD
35
36def get_job_bundle_content(server, job):
534a243d
FD
37 try:
38 bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
39 bundle = server.dashboard.get(bundle_sha)
f42b2b7e
FD
40 except xmlrpc.client.Fault as f:
41 print('Error while fetching results bundle', f.faultString)
b3d73c46
FD
42
43 return json.loads(bundle['content'])
44
45# Parse the results bundle to see the run-tests testcase
46# of the lttng-kernel-tests passed successfully
47def check_job_all_test_cases_state_count(server, job):
48 content = get_job_bundle_content(server, job)
49
50 passed_tests=0
51 failed_tests=0
52 for run in content['test_runs']:
53 for result in run['test_results']:
3e7245a4 54 if 'test_case_id' in result :
b3d73c46
FD
55 if result['result'] in 'pass':
56 passed_tests+=1
3e7245a4
FD
57 elif result['test_case_id'] in 'wait_for_test_image_prompt':
58 # FIXME:This test is part of the boot action and fails
59 # randomly but doesn't affect the behaviour of the tests.
60 # No reply on the Lava IRC channel yet. We should update
61 # our Lava installation and try to reproduce it. This error
62 # was encountered ont the KVM trusty image only. Not seen
63 # on Xenial at this point.
64 pass
b3d73c46
FD
65 else:
66 failed_tests+=1
67 return (passed_tests, failed_tests)
68
104ed94b
FD
69# Get the benchmark results from the lava bundle
70# save them as CSV files localy
71def fetch_benchmark_results(server, job):
72 content = get_job_bundle_content(server, job)
70e85c98
FD
73 testcases = ['processed_results_close.csv',
74 'processed_results_open_efault.csv',
dff1609b
FD
75 'processed_results_dup_close.csv',
76 'processed_results_lttng_test_filter.csv']
104ed94b
FD
77
78 # The result bundle is a large JSON containing the results of every testcase
79 # of the LAVA job as well as the files that were attached during the run.
80 # We need to iterate over this JSON to get the base64 representation of the
81 # benchmark results produced during the run.
82 for run in content['test_runs']:
83 # We only care of the benchmark testcases
dff1609b 84 if 'benchmark-' in run['test_id']:
104ed94b
FD
85 if 'test_results' in run:
86 for res in run['test_results']:
87 if 'attachments' in res:
88 for a in res['attachments']:
89 # We only save the results file
90 if a['pathname'] in testcases:
f42b2b7e 91 with open(a['pathname'],'wb') as f:
104ed94b
FD
92 # Convert the b64 representation of the
93 # result file and write it to a file
94 # in the current working directory
95 f.write(base64.b64decode(a['content']))
96
b3d73c46
FD
97# Parse the attachment of the testcase to fetch the stdout of the test suite
98def print_test_output(server, job):
99 content = get_job_bundle_content(server, job)
100 found = False
101
102 for run in content['test_runs']:
103 if run['test_id'] in 'lttng-kernel-test':
104 for attachment in run['attachments']:
105 if attachment['pathname'] in 'stdout.log':
106
107 # Decode the base64 file and split on newlines to iterate
108 # on list
f42b2b7e 109 testoutput = str(base64.b64decode(bytes(attachment['content'], encoding='UTF-8'))).split('\n')
b3d73c46
FD
110
111 # Create a generator to iterate on the lines and keeping
112 # the state of the iterator across the two loops.
113 testoutput_iter = iter(testoutput)
114 for line in testoutput_iter:
115
116 # Find the header of the test case and start printing
117 # from there
118 if 'LAVA_SIGNAL_STARTTC run-tests' in line:
119 found = True
120 print('---- TEST SUITE OUTPUT BEGIN ----')
121 for line in testoutput_iter:
122 if 'LAVA_SIGNAL_ENDTC run-tests' not in line:
123 print(line)
124 else:
125 # Print until we reach the end of the
126 # section
127 break
128
129 if found is True:
130 print('----- TEST SUITE OUTPUT END -----')
131 break
132
dc9700c9 133def create_new_job(name, build_device):
b3d73c46
FD
134 job = OrderedDict({
135 'health_check': False,
136 'job_name': name,
dc9700c9
FD
137 'device_type':build_device,
138 'tags': [ ],
b3d73c46
FD
139 'timeout': 18000,
140 'actions': []
141 })
dc9700c9
FD
142 if build_device in 'x86':
143 job['tags'].append('dev-sda1')
144
b3d73c46
FD
145 return job
146
147def get_boot_cmd():
148 command = OrderedDict({
149 'command': 'boot_image'
150 })
151 return command
152
dc9700c9 153def get_config_cmd(build_device):
b3d73c46
FD
154 packages=['bsdtar', 'psmisc', 'wget', 'python3', 'python3-pip', \
155 'libglib2.0-dev', 'libffi-dev', 'elfutils', 'libdw-dev', \
e1504b01
FD
156 'libelf-dev', 'libmount-dev', 'libxml2', 'libpfm4-dev', \
157 'libnuma-dev']
b3d73c46
FD
158 command = OrderedDict({
159 'command': 'lava_command_run',
160 'parameters': {
161 'commands': [
b3d73c46
FD
162 'cat /etc/resolv.conf',
163 'echo nameserver 172.18.0.12 > /etc/resolv.conf',
dc9700c9 164 'groupadd tracing'
38cfd62c
FD
165 ],
166 'timeout':300
b3d73c46
FD
167 }
168 })
dc9700c9
FD
169 if build_device in 'x86':
170 command['parameters']['commands'].extend([
171 'mount /dev/sda1 /tmp',
172 'rm -rf /tmp/*'])
173
174 command['parameters']['commands'].extend([
175 'depmod -a',
176 'locale-gen en_US.UTF-8',
177 'apt-get update',
819f0b86 178 'apt-get upgrade',
dc9700c9
FD
179 'apt-get install -y {}'.format(' '.join(packages))
180 ])
b3d73c46
FD
181 return command
182
1ac7fa2c 183def get_baremetal_benchmarks_cmd():
b3d73c46
FD
184 command = OrderedDict({
185 'command': 'lava_test_shell',
186 'parameters': {
187 'testdef_repos': [
188 {
189 'git-repo': 'https://github.com/lttng/lttng-ci.git',
190 'revision': 'master',
191 'testdef': 'lava/baremetal-tests/failing-close.yml'
192 },
193 {
194 'git-repo': 'https://github.com/lttng/lttng-ci.git',
195 'revision': 'master',
196 'testdef': 'lava/baremetal-tests/failing-open-efault.yml'
197 },
198 {
199 'git-repo': 'https://github.com/lttng/lttng-ci.git',
200 'revision': 'master',
70e85c98 201 'testdef': 'lava/baremetal-tests/success-dup-close.yml'
dff1609b
FD
202 },
203 {
204 'git-repo': 'https://github.com/lttng/lttng-ci.git',
205 'revision': 'master',
206 'testdef': 'lava/baremetal-tests/lttng-test-filter.yml'
1ac7fa2c
FD
207 }
208 ],
209 'timeout': 18000
210 }
211 })
212 return command
213
214def get_baremetal_tests_cmd():
215 command = OrderedDict({
216 'command': 'lava_test_shell',
217 'parameters': {
218 'testdef_repos': [
f3d4ee9f
FD
219 {
220 'git-repo': 'https://github.com/lttng/lttng-ci.git',
221 'revision': 'master',
222 'testdef': 'lava/baremetal-tests/perf-tests.yml'
b3d73c46
FD
223 }
224 ],
225 'timeout': 18000
226 }
227 })
228 return command
229
1ac7fa2c 230def get_kvm_tests_cmd():
b3d73c46
FD
231 command = OrderedDict({
232 'command': 'lava_test_shell',
233 'parameters': {
234 'testdef_repos': [
235 {
236 'git-repo': 'https://github.com/lttng/lttng-ci.git',
237 'revision': 'master',
238 'testdef': 'lava/baremetal-tests/kernel-tests.yml'
ee02050f
FD
239 },
240 {
241 'git-repo': 'https://github.com/lttng/lttng-ci.git',
242 'revision': 'master',
243 'testdef': 'lava/baremetal-tests/destructive-tests.yml'
b3d73c46
FD
244 }
245 ],
246 'timeout': 18000
247 }
248 })
249 return command
250
251def get_results_cmd(stream_name):
252 command = OrderedDict({
253 'command': 'submit_results',
254 'parameters': {
255 'server': 'http://lava-master.internal.efficios.com/RPC2/'
256 }
257 })
258 command['parameters']['stream']='/anonymous/'+stream_name+'/'
259 return command
260
dc9700c9
FD
261def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path):
262 command = OrderedDict({
263 'command': 'deploy_kernel',
264 'metadata': {},
265 'parameters': {
266 'customize': {},
267 'kernel': None,
819f0b86
FD
268 'target_type': 'ubuntu',
269 'rootfs': 'file:///var/lib/lava-server/default/media/images/xenial.img.gz',
270 'login_prompt': 'kvm02 login:',
271 'username': 'root'
dc9700c9
FD
272 }
273 })
274
275 command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive']
276 command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive']
277 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
278 command['metadata']['jenkins_jobname'] = jenkins_job
279
280 return command
281
282def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None):
b3d73c46
FD
283 command = OrderedDict({
284 'command': 'deploy_kernel',
285 'metadata': {},
286 'parameters': {
287 'overlays': [],
288 'kernel': None,
289 'nfsrootfs': str(SCP_PATH+'/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz'),
290 'target_type': 'ubuntu'
291 }
292 })
293
294 command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path))
295 command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path))
296 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
dc9700c9 297 command['metadata']['jenkins_jobname'] = jenkins_job
b3d73c46
FD
298 if nb_iter is not None:
299 command['metadata']['nb_iterations'] = nb_iter
300
301 return command
302
303
dc9700c9 304def get_env_setup_cmd(build_device, lttng_tools_commit, lttng_ust_commit=None):
b3d73c46
FD
305 command = OrderedDict({
306 'command': 'lava_command_run',
307 'parameters': {
308 'commands': [
819f0b86 309 'pip3 install --upgrade pip',
46fb8afa 310 'hash -r',
b3d73c46
FD
311 'git clone https://github.com/frdeso/syscall-bench-it.git bm',
312 'pip3 install vlttng',
313 ],
314 'timeout': 18000
315 }
316 })
317
4418be37 318 vlttng_cmd = 'vlttng --jobs=$(nproc) --profile urcu-master' \
b3d73c46
FD
319 ' --profile babeltrace-stable-1.4 ' \
320 ' --profile lttng-tools-master' \
321 ' --override projects.lttng-tools.checkout='+lttng_tools_commit + \
322 ' --profile lttng-tools-no-man-pages'
323
324 if lttng_ust_commit is not None:
325 vlttng_cmd += ' --profile lttng-ust-master ' \
326 ' --override projects.lttng-ust.checkout='+lttng_ust_commit+ \
327 ' --profile lttng-ust-no-man-pages'
328
dc9700c9
FD
329 virtenv_path = None
330 if build_device in 'kvm':
331 virtenv_path = '/root/virtenv'
332 else:
333 virtenv_path = '/tmp/virtenv'
334
335 vlttng_cmd += ' '+virtenv_path
b3d73c46
FD
336
337 command['parameters']['commands'].append(vlttng_cmd)
dc9700c9
FD
338 command['parameters']['commands'].append('ln -s '+virtenv_path+' /root/lttngvenv')
339 command['parameters']['commands'].append('sync')
340
b3d73c46
FD
341 return command
342
343def main():
344 test_type = None
345 parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
346 parser.add_argument('-t', '--type', required=True)
347 parser.add_argument('-j', '--jobname', required=True)
348 parser.add_argument('-k', '--kernel', required=True)
349 parser.add_argument('-km', '--kmodule', required=True)
350 parser.add_argument('-lm', '--lmodule', required=True)
b3d73c46
FD
351 parser.add_argument('-tc', '--tools-commit', required=True)
352 parser.add_argument('-uc', '--ust-commit', required=False)
353 args = parser.parse_args()
354
1ac7fa2c
FD
355 if args.type in 'baremetal-benchmarks':
356 test_type = TestType.baremetal_benchmarks
357 elif args.type in 'baremetal-tests':
358 test_type = TestType.baremetal_tests
359 elif args.type in 'kvm-tests':
360 test_type = TestType.kvm_tests
b3d73c46
FD
361 else:
362 print('argument -t/--type {} unrecognized. Exiting...'.format(args.type))
363 return -1
364
8cef2adf
FD
365 lava_api_key = None
366 try:
30003819 367 lava_api_key = os.environ['LAVA_JENKINS_TOKEN']
f42b2b7e 368 except Exception as e:
30003819 369 print('LAVA_JENKINS_TOKEN not found in the environment variable. Exiting...', e )
8cef2adf
FD
370 return -1
371
1ac7fa2c 372 if test_type is TestType.baremetal_benchmarks:
dc9700c9
FD
373 j = create_new_job(args.jobname, build_device='x86')
374 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
1ac7fa2c
FD
375 elif test_type is TestType.baremetal_tests:
376 j = create_new_job(args.jobname, build_device='x86')
377 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
378 elif test_type is TestType.kvm_tests:
dc9700c9
FD
379 j = create_new_job(args.jobname, build_device='kvm')
380 j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule))
381
382 j['actions'].append(get_boot_cmd())
383
1ac7fa2c 384 if test_type is TestType.baremetal_benchmarks:
dc9700c9
FD
385 j['actions'].append(get_config_cmd('x86'))
386 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit))
1ac7fa2c 387 j['actions'].append(get_baremetal_benchmarks_cmd())
b3d73c46 388 j['actions'].append(get_results_cmd(stream_name='benchmark-kernel'))
1ac7fa2c
FD
389 elif test_type is TestType.baremetal_tests:
390 if args.ust_commit is None:
391 print('Tests runs need -uc/--ust-commit options. Exiting...')
392 return -1
393 j['actions'].append(get_config_cmd('x86'))
394 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit, args.ust_commit))
395 j['actions'].append(get_baremetal_tests_cmd())
396 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
397 elif test_type is TestType.kvm_tests:
b3d73c46
FD
398 if args.ust_commit is None:
399 print('Tests runs need -uc/--ust-commit options. Exiting...')
400 return -1
dc9700c9
FD
401 j['actions'].append(get_config_cmd('kvm'))
402 j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit))
1ac7fa2c 403 j['actions'].append(get_kvm_tests_cmd())
b3d73c46
FD
404 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
405 else:
406 assert False, 'Unknown test type'
407
f42b2b7e 408 server = xmlrpc.client.ServerProxy('http://%s:%s@%s/RPC2' % (USERNAME, lava_api_key, HOSTNAME))
b3d73c46
FD
409
410 jobid = server.scheduler.submit_job(json.dumps(j))
411
21e89f7e 412 print('Lava jobid:{}'.format(jobid))
0467dff8 413 print('Lava job URL: http://lava-master.internal.efficios.com/scheduler/job/{}/log_file'.format(jobid))
21e89f7e 414
b3d73c46
FD
415 #Check the status of the job every 30 seconds
416 jobstatus = server.scheduler.job_status(jobid)['job_status']
d9997fe4 417 not_running = False
b3d73c46 418 while jobstatus in 'Submitted' or jobstatus in 'Running':
d9997fe4
FD
419 if not_running is False and jobstatus in 'Running':
420 print('Job started running')
421 not_running = True
b3d73c46
FD
422 time.sleep(30)
423 jobstatus = server.scheduler.job_status(jobid)['job_status']
424
534a243d 425 if test_type is TestType.kvm_tests or test_type is TestType.baremetal_tests:
b3d73c46 426 print_test_output(server, jobid)
104ed94b
FD
427 elif test_type is TestType.baremetal_benchmarks:
428 fetch_benchmark_results(server, jobid)
b3d73c46 429
d1b69f4d
FD
430 print('Job ended with {} status.'.format(jobstatus))
431 if jobstatus not in 'Complete':
432 return -1
433 else:
73c1d4bc 434 passed, failed=check_job_all_test_cases_state_count(server, jobid)
d1b69f4d
FD
435 print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
436
73c1d4bc
FD
437 if failed == 0:
438 return 0
439 else:
440 return -1
b3d73c46
FD
441
442if __name__ == "__main__":
443 sys.exit(main())
This page took 0.047718 seconds and 4 git commands to generate.