Lava: Handle results bundle download error
[lttng-ci.git] / scripts / lttng-baremetal-tests / lava-submit.py
1 #!/usr/bin/python
2 # Copyright (C) 2016 - Francis Deslauriers <francis.deslauriers@efficios.com>
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 import argparse
18 import base64
19 import json
20 import os
21 import sys
22 import time
23 import xmlrpclib
24 from collections import OrderedDict
25 from enum import Enum
26
27 USERNAME = 'frdeso'
28 HOSTNAME = 'lava-master.internal.efficios.com'
29 SCP_PATH = 'scp://jenkins-lava@storage.internal.efficios.com'
30
31 class TestType(Enum):
32 baremetal_benchmarks=1
33 baremetal_tests=2
34 kvm_tests=3
35
36 def get_job_bundle_content(server, job):
37 try:
38 bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
39 bundle = server.dashboard.get(bundle_sha)
40 except Fault as f:
41 print 'Error while fetching results bundle', f
42
43 return json.loads(bundle['content'])
44
45 # Parse the results bundle to see the run-tests testcase
46 # of the lttng-kernel-tests passed successfully
47 def check_job_all_test_cases_state_count(server, job):
48 content = get_job_bundle_content(server, job)
49
50 passed_tests=0
51 failed_tests=0
52 for run in content['test_runs']:
53 for result in run['test_results']:
54 if 'test_case_id' in result :
55 if result['result'] in 'pass':
56 passed_tests+=1
57 elif result['test_case_id'] in 'wait_for_test_image_prompt':
58 # FIXME:This test is part of the boot action and fails
59 # randomly but doesn't affect the behaviour of the tests.
60 # No reply on the Lava IRC channel yet. We should update
61 # our Lava installation and try to reproduce it. This error
62 # was encountered ont the KVM trusty image only. Not seen
63 # on Xenial at this point.
64 pass
65 else:
66 failed_tests+=1
67 return (passed_tests, failed_tests)
68
69 # Parse the attachment of the testcase to fetch the stdout of the test suite
70 def print_test_output(server, job):
71 content = get_job_bundle_content(server, job)
72 found = False
73
74 for run in content['test_runs']:
75 if run['test_id'] in 'lttng-kernel-test':
76 for attachment in run['attachments']:
77 if attachment['pathname'] in 'stdout.log':
78
79 # Decode the base64 file and split on newlines to iterate
80 # on list
81 testoutput = base64.b64decode(attachment['content']).split('\n')
82
83 # Create a generator to iterate on the lines and keeping
84 # the state of the iterator across the two loops.
85 testoutput_iter = iter(testoutput)
86 for line in testoutput_iter:
87
88 # Find the header of the test case and start printing
89 # from there
90 if 'LAVA_SIGNAL_STARTTC run-tests' in line:
91 found = True
92 print('---- TEST SUITE OUTPUT BEGIN ----')
93 for line in testoutput_iter:
94 if 'LAVA_SIGNAL_ENDTC run-tests' not in line:
95 print(line)
96 else:
97 # Print until we reach the end of the
98 # section
99 break
100
101 if found is True:
102 print('----- TEST SUITE OUTPUT END -----')
103 break
104
105 def create_new_job(name, build_device):
106 job = OrderedDict({
107 'health_check': False,
108 'job_name': name,
109 'device_type':build_device,
110 'tags': [ ],
111 'timeout': 18000,
112 'actions': []
113 })
114 if build_device in 'x86':
115 job['tags'].append('dev-sda1')
116
117 return job
118
119 def get_boot_cmd():
120 command = OrderedDict({
121 'command': 'boot_image'
122 })
123 return command
124
125 def get_config_cmd(build_device):
126 packages=['bsdtar', 'psmisc', 'wget', 'python3', 'python3-pip', \
127 'libglib2.0-dev', 'libffi-dev', 'elfutils', 'libdw-dev', \
128 'libelf-dev', 'libmount-dev', 'libxml2', 'libpfm4-dev']
129 command = OrderedDict({
130 'command': 'lava_command_run',
131 'parameters': {
132 'commands': [
133 'cat /etc/resolv.conf',
134 'echo nameserver 172.18.0.12 > /etc/resolv.conf',
135 'groupadd tracing'
136 ]
137 }
138 })
139 if build_device in 'x86':
140 command['parameters']['commands'].extend([
141 'mount /dev/sda1 /tmp',
142 'rm -rf /tmp/*'])
143
144 command['parameters']['commands'].extend([
145 'depmod -a',
146 'locale-gen en_US.UTF-8',
147 'apt-get update',
148 'apt-get upgrade',
149 'apt-get install -y {}'.format(' '.join(packages))
150 ])
151 return command
152
153 def get_baremetal_benchmarks_cmd():
154 command = OrderedDict({
155 'command': 'lava_test_shell',
156 'parameters': {
157 'testdef_repos': [
158 {
159 'git-repo': 'https://github.com/lttng/lttng-ci.git',
160 'revision': 'master',
161 'testdef': 'lava/baremetal-tests/failing-close.yml'
162 },
163 {
164 'git-repo': 'https://github.com/lttng/lttng-ci.git',
165 'revision': 'master',
166 'testdef': 'lava/baremetal-tests/failing-open-efault.yml'
167 },
168 {
169 'git-repo': 'https://github.com/lttng/lttng-ci.git',
170 'revision': 'master',
171 'testdef': 'lava/baremetal-tests/failing-open-enoent.yml'
172 }
173 ],
174 'timeout': 18000
175 }
176 })
177 return command
178
179 def get_baremetal_tests_cmd():
180 command = OrderedDict({
181 'command': 'lava_test_shell',
182 'parameters': {
183 'testdef_repos': [
184 {
185 'git-repo': 'https://github.com/lttng/lttng-ci.git',
186 'revision': 'master',
187 'testdef': 'lava/baremetal-tests/perf-tests.yml'
188 }
189 ],
190 'timeout': 18000
191 }
192 })
193 return command
194
195 def get_kvm_tests_cmd():
196 command = OrderedDict({
197 'command': 'lava_test_shell',
198 'parameters': {
199 'testdef_repos': [
200 {
201 'git-repo': 'https://github.com/lttng/lttng-ci.git',
202 'revision': 'master',
203 'testdef': 'lava/baremetal-tests/kernel-tests.yml'
204 },
205 {
206 'git-repo': 'https://github.com/lttng/lttng-ci.git',
207 'revision': 'master',
208 'testdef': 'lava/baremetal-tests/destructive-tests.yml'
209 }
210 ],
211 'timeout': 18000
212 }
213 })
214 return command
215
216 def get_results_cmd(stream_name):
217 command = OrderedDict({
218 'command': 'submit_results',
219 'parameters': {
220 'server': 'http://lava-master.internal.efficios.com/RPC2/'
221 }
222 })
223 command['parameters']['stream']='/anonymous/'+stream_name+'/'
224 return command
225
226 def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path):
227 command = OrderedDict({
228 'command': 'deploy_kernel',
229 'metadata': {},
230 'parameters': {
231 'customize': {},
232 'kernel': None,
233 'target_type': 'ubuntu',
234 'rootfs': 'file:///var/lib/lava-server/default/media/images/xenial.img.gz',
235 'login_prompt': 'kvm02 login:',
236 'username': 'root'
237 }
238 })
239
240 command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive']
241 command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive']
242 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
243 command['metadata']['jenkins_jobname'] = jenkins_job
244
245 return command
246
247 def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None):
248 command = OrderedDict({
249 'command': 'deploy_kernel',
250 'metadata': {},
251 'parameters': {
252 'overlays': [],
253 'kernel': None,
254 'nfsrootfs': str(SCP_PATH+'/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz'),
255 'target_type': 'ubuntu'
256 }
257 })
258
259 command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path))
260 command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path))
261 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
262 command['metadata']['jenkins_jobname'] = jenkins_job
263 if nb_iter is not None:
264 command['metadata']['nb_iterations'] = nb_iter
265
266 return command
267
268
269 def get_env_setup_cmd(build_device, lttng_tools_commit, lttng_ust_commit=None):
270 command = OrderedDict({
271 'command': 'lava_command_run',
272 'parameters': {
273 'commands': [
274 'pip3 install --upgrade pip',
275 'hash -r',
276 'git clone https://github.com/frdeso/syscall-bench-it.git bm',
277 'pip3 install vlttng',
278 ],
279 'timeout': 18000
280 }
281 })
282
283 vlttng_cmd = 'vlttng --jobs=$(nproc) --profile urcu-master' \
284 ' --profile babeltrace-stable-1.4 ' \
285 ' --profile lttng-tools-master' \
286 ' --override projects.lttng-tools.checkout='+lttng_tools_commit + \
287 ' --profile lttng-tools-no-man-pages'
288
289 if lttng_ust_commit is not None:
290 vlttng_cmd += ' --profile lttng-ust-master ' \
291 ' --override projects.lttng-ust.checkout='+lttng_ust_commit+ \
292 ' --profile lttng-ust-no-man-pages'
293
294 virtenv_path = None
295 if build_device in 'kvm':
296 virtenv_path = '/root/virtenv'
297 else:
298 virtenv_path = '/tmp/virtenv'
299
300 vlttng_cmd += ' '+virtenv_path
301
302 command['parameters']['commands'].append(vlttng_cmd)
303 command['parameters']['commands'].append('ln -s '+virtenv_path+' /root/lttngvenv')
304 command['parameters']['commands'].append('sync')
305
306 return command
307
308 def main():
309 test_type = None
310 parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
311 parser.add_argument('-t', '--type', required=True)
312 parser.add_argument('-j', '--jobname', required=True)
313 parser.add_argument('-k', '--kernel', required=True)
314 parser.add_argument('-km', '--kmodule', required=True)
315 parser.add_argument('-lm', '--lmodule', required=True)
316 parser.add_argument('-l', '--lava-key', required=True)
317 parser.add_argument('-tc', '--tools-commit', required=True)
318 parser.add_argument('-uc', '--ust-commit', required=False)
319 args = parser.parse_args()
320
321 if args.type in 'baremetal-benchmarks':
322 test_type = TestType.baremetal_benchmarks
323 elif args.type in 'baremetal-tests':
324 test_type = TestType.baremetal_tests
325 elif args.type in 'kvm-tests':
326 test_type = TestType.kvm_tests
327 else:
328 print('argument -t/--type {} unrecognized. Exiting...'.format(args.type))
329 return -1
330
331 if test_type is TestType.baremetal_benchmarks:
332 j = create_new_job(args.jobname, build_device='x86')
333 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
334 elif test_type is TestType.baremetal_tests:
335 j = create_new_job(args.jobname, build_device='x86')
336 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
337 elif test_type is TestType.kvm_tests:
338 j = create_new_job(args.jobname, build_device='kvm')
339 j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule))
340
341 j['actions'].append(get_boot_cmd())
342
343 if test_type is TestType.baremetal_benchmarks:
344 j['actions'].append(get_config_cmd('x86'))
345 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit))
346 j['actions'].append(get_baremetal_benchmarks_cmd())
347 j['actions'].append(get_results_cmd(stream_name='benchmark-kernel'))
348 elif test_type is TestType.baremetal_tests:
349 if args.ust_commit is None:
350 print('Tests runs need -uc/--ust-commit options. Exiting...')
351 return -1
352 j['actions'].append(get_config_cmd('x86'))
353 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit, args.ust_commit))
354 j['actions'].append(get_baremetal_tests_cmd())
355 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
356 elif test_type is TestType.kvm_tests:
357 if args.ust_commit is None:
358 print('Tests runs need -uc/--ust-commit options. Exiting...')
359 return -1
360 j['actions'].append(get_config_cmd('kvm'))
361 j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit))
362 j['actions'].append(get_kvm_tests_cmd())
363 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
364 else:
365 assert False, 'Unknown test type'
366
367 server = xmlrpclib.ServerProxy('http://%s:%s@%s/RPC2' % (USERNAME, args.lava_key, HOSTNAME))
368
369 jobid = server.scheduler.submit_job(json.dumps(j))
370
371 print('Lava jobid:{}'.format(jobid))
372
373 #Check the status of the job every 30 seconds
374 jobstatus = server.scheduler.job_status(jobid)['job_status']
375 while jobstatus in 'Submitted' or jobstatus in 'Running':
376 time.sleep(30)
377 jobstatus = server.scheduler.job_status(jobid)['job_status']
378
379 passed, failed=check_job_all_test_cases_state_count(server, jobid)
380
381 if test_type is TestType.kvm_tests or test_type is TestType.baremetal_tests:
382 print_test_output(server, jobid)
383
384 print('Job ended with {} status.'.format(jobstatus))
385 if jobstatus not in 'Complete':
386 return -1
387 else:
388 print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
389
390 if failed == 0:
391 return 0
392 else:
393 return -1
394
395 if __name__ == "__main__":
396 sys.exit(main())
This page took 0.038724 seconds and 4 git commands to generate.