Lava: Don't count flaky test included in boot action
[lttng-ci.git] / scripts / lttng-baremetal-tests / lava-submit.py
1 #!/usr/bin/python
2 # Copyright (C) 2016 - Francis Deslauriers <francis.deslauriers@efficios.com>
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 import argparse
18 import base64
19 import json
20 import os
21 import sys
22 import time
23 import xmlrpclib
24 from collections import OrderedDict
25 from enum import Enum
26
27 USERNAME = 'frdeso'
28 HOSTNAME = 'lava-master.internal.efficios.com'
29 SCP_PATH = 'scp://jenkins-lava@storage.internal.efficios.com'
30
31 class TestType(Enum):
32 benchmarks=1
33 tests=2
34
35 def get_job_bundle_content(server, job):
36 bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
37 bundle = server.dashboard.get(bundle_sha)
38
39 return json.loads(bundle['content'])
40
41 # Parse the results bundle to see the run-tests testcase
42 # of the lttng-kernel-tests passed successfully
43 def check_job_all_test_cases_state_count(server, job):
44 content = get_job_bundle_content(server, job)
45
46 passed_tests=0
47 failed_tests=0
48 for run in content['test_runs']:
49 for result in run['test_results']:
50 if 'test_case_id' in result :
51 if result['result'] in 'pass':
52 passed_tests+=1
53 elif result['test_case_id'] in 'wait_for_test_image_prompt':
54 # FIXME:This test is part of the boot action and fails
55 # randomly but doesn't affect the behaviour of the tests.
56 # No reply on the Lava IRC channel yet. We should update
57 # our Lava installation and try to reproduce it. This error
58 # was encountered ont the KVM trusty image only. Not seen
59 # on Xenial at this point.
60 pass
61 else:
62 failed_tests+=1
63 return (passed_tests, failed_tests)
64
65 # Parse the attachment of the testcase to fetch the stdout of the test suite
66 def print_test_output(server, job):
67 content = get_job_bundle_content(server, job)
68 found = False
69
70 for run in content['test_runs']:
71 if run['test_id'] in 'lttng-kernel-test':
72 for attachment in run['attachments']:
73 if attachment['pathname'] in 'stdout.log':
74
75 # Decode the base64 file and split on newlines to iterate
76 # on list
77 testoutput = base64.b64decode(attachment['content']).split('\n')
78
79 # Create a generator to iterate on the lines and keeping
80 # the state of the iterator across the two loops.
81 testoutput_iter = iter(testoutput)
82 for line in testoutput_iter:
83
84 # Find the header of the test case and start printing
85 # from there
86 if 'LAVA_SIGNAL_STARTTC run-tests' in line:
87 found = True
88 print('---- TEST SUITE OUTPUT BEGIN ----')
89 for line in testoutput_iter:
90 if 'LAVA_SIGNAL_ENDTC run-tests' not in line:
91 print(line)
92 else:
93 # Print until we reach the end of the
94 # section
95 break
96
97 if found is True:
98 print('----- TEST SUITE OUTPUT END -----')
99 break
100
101 def create_new_job(name, build_device):
102 job = OrderedDict({
103 'health_check': False,
104 'job_name': name,
105 'device_type':build_device,
106 'tags': [ ],
107 'timeout': 18000,
108 'actions': []
109 })
110 if build_device in 'x86':
111 job['tags'].append('dev-sda1')
112
113 return job
114
115 def get_boot_cmd():
116 command = OrderedDict({
117 'command': 'boot_image'
118 })
119 return command
120
121 def get_config_cmd(build_device):
122 packages=['bsdtar', 'psmisc', 'wget', 'python3', 'python3-pip', \
123 'libglib2.0-dev', 'libffi-dev', 'elfutils', 'libdw-dev', \
124 'libelf-dev', 'libmount-dev', 'libxml2']
125 command = OrderedDict({
126 'command': 'lava_command_run',
127 'parameters': {
128 'commands': [
129 'ifup eth0',
130 'route -n',
131 'cat /etc/resolv.conf',
132 'echo nameserver 172.18.0.12 > /etc/resolv.conf',
133 'groupadd tracing'
134 ]
135 }
136 })
137 if build_device in 'x86':
138 command['parameters']['commands'].extend([
139 'mount /dev/sda1 /tmp',
140 'rm -rf /tmp/*'])
141
142 command['parameters']['commands'].extend([
143 'depmod -a',
144 'locale-gen en_US.UTF-8',
145 'apt-get update',
146 'apt-get install -y {}'.format(' '.join(packages))
147 ])
148 return command
149
150 def get_benchmarks_cmd():
151 command = OrderedDict({
152 'command': 'lava_test_shell',
153 'parameters': {
154 'testdef_repos': [
155 {
156 'git-repo': 'https://github.com/lttng/lttng-ci.git',
157 'revision': 'master',
158 'testdef': 'lava/baremetal-tests/failing-close.yml'
159 },
160 {
161 'git-repo': 'https://github.com/lttng/lttng-ci.git',
162 'revision': 'master',
163 'testdef': 'lava/baremetal-tests/failing-open-efault.yml'
164 },
165 {
166 'git-repo': 'https://github.com/lttng/lttng-ci.git',
167 'revision': 'master',
168 'testdef': 'lava/baremetal-tests/failing-open-enoent.yml'
169 },
170 {
171 'git-repo': 'https://github.com/lttng/lttng-ci.git',
172 'revision': 'master',
173 'testdef': 'lava/baremetal-tests/perf-tests.yml'
174 }
175 ],
176 'timeout': 18000
177 }
178 })
179 return command
180
181 def get_tests_cmd():
182 command = OrderedDict({
183 'command': 'lava_test_shell',
184 'parameters': {
185 'testdef_repos': [
186 {
187 'git-repo': 'https://github.com/lttng/lttng-ci.git',
188 'revision': 'master',
189 'testdef': 'lava/baremetal-tests/kernel-tests.yml'
190 }
191 ],
192 'timeout': 18000
193 }
194 })
195 return command
196
197 def get_results_cmd(stream_name):
198 command = OrderedDict({
199 'command': 'submit_results',
200 'parameters': {
201 'server': 'http://lava-master.internal.efficios.com/RPC2/'
202 }
203 })
204 command['parameters']['stream']='/anonymous/'+stream_name+'/'
205 return command
206
207 def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path):
208 command = OrderedDict({
209 'command': 'deploy_kernel',
210 'metadata': {},
211 'parameters': {
212 'customize': {},
213 'kernel': None,
214 'rootfs': 'file:///var/lib/lava-server/default/media/images/trusty-grub.img.gz',
215 'target_type': 'ubuntu'
216 }
217 })
218
219 command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive']
220 command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive']
221 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
222 command['metadata']['jenkins_jobname'] = jenkins_job
223
224 return command
225
226 def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None):
227 command = OrderedDict({
228 'command': 'deploy_kernel',
229 'metadata': {},
230 'parameters': {
231 'overlays': [],
232 'kernel': None,
233 'nfsrootfs': str(SCP_PATH+'/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz'),
234 'target_type': 'ubuntu'
235 }
236 })
237
238 command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path))
239 command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path))
240 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
241 command['metadata']['jenkins_jobname'] = jenkins_job
242 if nb_iter is not None:
243 command['metadata']['nb_iterations'] = nb_iter
244
245 return command
246
247
248 def get_env_setup_cmd(build_device, lttng_tools_commit, lttng_ust_commit=None):
249 command = OrderedDict({
250 'command': 'lava_command_run',
251 'parameters': {
252 'commands': [
253 'git clone https://github.com/frdeso/syscall-bench-it.git bm',
254 'pip3 install vlttng',
255 ],
256 'timeout': 18000
257 }
258 })
259
260 vlttng_cmd = 'vlttng --jobs=16 --profile urcu-master' \
261 ' --profile babeltrace-stable-1.4 ' \
262 ' --profile lttng-tools-master' \
263 ' --override projects.lttng-tools.checkout='+lttng_tools_commit + \
264 ' --profile lttng-tools-no-man-pages'
265
266 if lttng_ust_commit is not None:
267 vlttng_cmd += ' --profile lttng-ust-master ' \
268 ' --override projects.lttng-ust.checkout='+lttng_ust_commit+ \
269 ' --profile lttng-ust-no-man-pages'
270
271 virtenv_path = None
272 if build_device in 'kvm':
273 virtenv_path = '/root/virtenv'
274 else:
275 virtenv_path = '/tmp/virtenv'
276
277 vlttng_cmd += ' '+virtenv_path
278
279 command['parameters']['commands'].append(vlttng_cmd)
280 command['parameters']['commands'].append('ln -s '+virtenv_path+' /root/lttngvenv')
281 command['parameters']['commands'].append('sync')
282
283 return command
284
285 def main():
286 test_type = None
287 parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
288 parser.add_argument('-t', '--type', required=True)
289 parser.add_argument('-j', '--jobname', required=True)
290 parser.add_argument('-k', '--kernel', required=True)
291 parser.add_argument('-km', '--kmodule', required=True)
292 parser.add_argument('-lm', '--lmodule', required=True)
293 parser.add_argument('-l', '--lava-key', required=True)
294 parser.add_argument('-tc', '--tools-commit', required=True)
295 parser.add_argument('-uc', '--ust-commit', required=False)
296 args = parser.parse_args()
297
298 if args.type in 'benchmarks':
299 test_type = TestType.benchmarks
300 elif args.type in 'tests':
301 test_type = TestType.tests
302 else:
303 print('argument -t/--type {} unrecognized. Exiting...'.format(args.type))
304 return -1
305
306 if test_type is TestType.benchmarks:
307 j = create_new_job(args.jobname, build_device='x86')
308 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
309 elif test_type is TestType.tests:
310 j = create_new_job(args.jobname, build_device='kvm')
311 j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule))
312
313 j['actions'].append(get_boot_cmd())
314
315 if test_type is TestType.benchmarks:
316 j['actions'].append(get_config_cmd('x86'))
317 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit))
318 j['actions'].append(get_benchmarks_cmd())
319 j['actions'].append(get_results_cmd(stream_name='benchmark-kernel'))
320 elif test_type is TestType.tests:
321 if args.ust_commit is None:
322 print('Tests runs need -uc/--ust-commit options. Exiting...')
323 return -1
324 j['actions'].append(get_config_cmd('kvm'))
325 j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit))
326 j['actions'].append(get_tests_cmd())
327 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
328 else:
329 assert False, 'Unknown test type'
330
331 server = xmlrpclib.ServerProxy('http://%s:%s@%s/RPC2' % (USERNAME, args.lava_key, HOSTNAME))
332
333 jobid = server.scheduler.submit_job(json.dumps(j))
334
335 print('Lava jobid:{}'.format(jobid))
336
337 #Check the status of the job every 30 seconds
338 jobstatus = server.scheduler.job_status(jobid)['job_status']
339 while jobstatus in 'Submitted' or jobstatus in 'Running':
340 time.sleep(30)
341 jobstatus = server.scheduler.job_status(jobid)['job_status']
342
343 passed, failed=check_job_all_test_cases_state_count(server, jobid)
344
345 if test_type is TestType.tests:
346 print_test_output(server, jobid)
347
348 print('Job ended with {} status.'.format(jobstatus))
349 if jobstatus not in 'Complete':
350 return -1
351 else:
352 print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
353
354 if failed == 0:
355 return 0
356 else:
357 return -1
358
359 if __name__ == "__main__":
360 sys.exit(main())
This page took 0.056903 seconds and 4 git commands to generate.