Lava: Add root destructive tests to kvm test runs
[lttng-ci.git] / scripts / lttng-baremetal-tests / lava-submit.py
1 #!/usr/bin/python
2 # Copyright (C) 2016 - Francis Deslauriers <francis.deslauriers@efficios.com>
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 import argparse
18 import base64
19 import json
20 import os
21 import sys
22 import time
23 import xmlrpclib
24 from collections import OrderedDict
25 from enum import Enum
26
27 USERNAME = 'frdeso'
28 HOSTNAME = 'lava-master.internal.efficios.com'
29 SCP_PATH = 'scp://jenkins-lava@storage.internal.efficios.com'
30
31 class TestType(Enum):
32 baremetal_benchmarks=1
33 baremetal_tests=2
34 kvm_tests=3
35
36 def get_job_bundle_content(server, job):
37 bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
38 bundle = server.dashboard.get(bundle_sha)
39
40 return json.loads(bundle['content'])
41
42 # Parse the results bundle to see the run-tests testcase
43 # of the lttng-kernel-tests passed successfully
44 def check_job_all_test_cases_state_count(server, job):
45 content = get_job_bundle_content(server, job)
46
47 passed_tests=0
48 failed_tests=0
49 for run in content['test_runs']:
50 for result in run['test_results']:
51 if 'test_case_id' in result :
52 if result['result'] in 'pass':
53 passed_tests+=1
54 elif result['test_case_id'] in 'wait_for_test_image_prompt':
55 # FIXME:This test is part of the boot action and fails
56 # randomly but doesn't affect the behaviour of the tests.
57 # No reply on the Lava IRC channel yet. We should update
58 # our Lava installation and try to reproduce it. This error
59 # was encountered ont the KVM trusty image only. Not seen
60 # on Xenial at this point.
61 pass
62 else:
63 failed_tests+=1
64 return (passed_tests, failed_tests)
65
66 # Parse the attachment of the testcase to fetch the stdout of the test suite
67 def print_test_output(server, job):
68 content = get_job_bundle_content(server, job)
69 found = False
70
71 for run in content['test_runs']:
72 if run['test_id'] in 'lttng-kernel-test':
73 for attachment in run['attachments']:
74 if attachment['pathname'] in 'stdout.log':
75
76 # Decode the base64 file and split on newlines to iterate
77 # on list
78 testoutput = base64.b64decode(attachment['content']).split('\n')
79
80 # Create a generator to iterate on the lines and keeping
81 # the state of the iterator across the two loops.
82 testoutput_iter = iter(testoutput)
83 for line in testoutput_iter:
84
85 # Find the header of the test case and start printing
86 # from there
87 if 'LAVA_SIGNAL_STARTTC run-tests' in line:
88 found = True
89 print('---- TEST SUITE OUTPUT BEGIN ----')
90 for line in testoutput_iter:
91 if 'LAVA_SIGNAL_ENDTC run-tests' not in line:
92 print(line)
93 else:
94 # Print until we reach the end of the
95 # section
96 break
97
98 if found is True:
99 print('----- TEST SUITE OUTPUT END -----')
100 break
101
102 def create_new_job(name, build_device):
103 job = OrderedDict({
104 'health_check': False,
105 'job_name': name,
106 'device_type':build_device,
107 'tags': [ ],
108 'timeout': 18000,
109 'actions': []
110 })
111 if build_device in 'x86':
112 job['tags'].append('dev-sda1')
113
114 return job
115
116 def get_boot_cmd():
117 command = OrderedDict({
118 'command': 'boot_image'
119 })
120 return command
121
122 def get_config_cmd(build_device):
123 packages=['bsdtar', 'psmisc', 'wget', 'python3', 'python3-pip', \
124 'libglib2.0-dev', 'libffi-dev', 'elfutils', 'libdw-dev', \
125 'libelf-dev', 'libmount-dev', 'libxml2', 'libpfm4-dev']
126 command = OrderedDict({
127 'command': 'lava_command_run',
128 'parameters': {
129 'commands': [
130 'cat /etc/resolv.conf',
131 'echo nameserver 172.18.0.12 > /etc/resolv.conf',
132 'groupadd tracing'
133 ]
134 }
135 })
136 if build_device in 'x86':
137 command['parameters']['commands'].extend([
138 'mount /dev/sda1 /tmp',
139 'rm -rf /tmp/*'])
140
141 command['parameters']['commands'].extend([
142 'depmod -a',
143 'locale-gen en_US.UTF-8',
144 'apt-get update',
145 'apt-get upgrade',
146 'apt-get install -y {}'.format(' '.join(packages))
147 ])
148 return command
149
150 def get_baremetal_benchmarks_cmd():
151 command = OrderedDict({
152 'command': 'lava_test_shell',
153 'parameters': {
154 'testdef_repos': [
155 {
156 'git-repo': 'https://github.com/lttng/lttng-ci.git',
157 'revision': 'master',
158 'testdef': 'lava/baremetal-tests/failing-close.yml'
159 },
160 {
161 'git-repo': 'https://github.com/lttng/lttng-ci.git',
162 'revision': 'master',
163 'testdef': 'lava/baremetal-tests/failing-open-efault.yml'
164 },
165 {
166 'git-repo': 'https://github.com/lttng/lttng-ci.git',
167 'revision': 'master',
168 'testdef': 'lava/baremetal-tests/failing-open-enoent.yml'
169 }
170 ],
171 'timeout': 18000
172 }
173 })
174 return command
175
176 def get_baremetal_tests_cmd():
177 command = OrderedDict({
178 'command': 'lava_test_shell',
179 'parameters': {
180 'testdef_repos': [
181 {
182 'git-repo': 'https://github.com/lttng/lttng-ci.git',
183 'revision': 'master',
184 'testdef': 'lava/baremetal-tests/perf-tests.yml'
185 }
186 ],
187 'timeout': 18000
188 }
189 })
190 return command
191
192 def get_kvm_tests_cmd():
193 command = OrderedDict({
194 'command': 'lava_test_shell',
195 'parameters': {
196 'testdef_repos': [
197 {
198 'git-repo': 'https://github.com/lttng/lttng-ci.git',
199 'revision': 'master',
200 'testdef': 'lava/baremetal-tests/kernel-tests.yml'
201 },
202 {
203 'git-repo': 'https://github.com/lttng/lttng-ci.git',
204 'revision': 'master',
205 'testdef': 'lava/baremetal-tests/destructive-tests.yml'
206 }
207 ],
208 'timeout': 18000
209 }
210 })
211 return command
212
213 def get_results_cmd(stream_name):
214 command = OrderedDict({
215 'command': 'submit_results',
216 'parameters': {
217 'server': 'http://lava-master.internal.efficios.com/RPC2/'
218 }
219 })
220 command['parameters']['stream']='/anonymous/'+stream_name+'/'
221 return command
222
223 def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path):
224 command = OrderedDict({
225 'command': 'deploy_kernel',
226 'metadata': {},
227 'parameters': {
228 'customize': {},
229 'kernel': None,
230 'target_type': 'ubuntu',
231 'rootfs': 'file:///var/lib/lava-server/default/media/images/xenial.img.gz',
232 'login_prompt': 'kvm02 login:',
233 'username': 'root'
234 }
235 })
236
237 command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive']
238 command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive']
239 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
240 command['metadata']['jenkins_jobname'] = jenkins_job
241
242 return command
243
244 def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None):
245 command = OrderedDict({
246 'command': 'deploy_kernel',
247 'metadata': {},
248 'parameters': {
249 'overlays': [],
250 'kernel': None,
251 'nfsrootfs': str(SCP_PATH+'/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz'),
252 'target_type': 'ubuntu'
253 }
254 })
255
256 command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path))
257 command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path))
258 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
259 command['metadata']['jenkins_jobname'] = jenkins_job
260 if nb_iter is not None:
261 command['metadata']['nb_iterations'] = nb_iter
262
263 return command
264
265
266 def get_env_setup_cmd(build_device, lttng_tools_commit, lttng_ust_commit=None):
267 command = OrderedDict({
268 'command': 'lava_command_run',
269 'parameters': {
270 'commands': [
271 'pip3 install --upgrade pip',
272 'hash -r',
273 'git clone https://github.com/frdeso/syscall-bench-it.git bm',
274 'pip3 install vlttng',
275 ],
276 'timeout': 18000
277 }
278 })
279
280 vlttng_cmd = 'vlttng --jobs=$(nproc) --profile urcu-master' \
281 ' --profile babeltrace-stable-1.4 ' \
282 ' --profile lttng-tools-master' \
283 ' --override projects.lttng-tools.checkout='+lttng_tools_commit + \
284 ' --profile lttng-tools-no-man-pages'
285
286 if lttng_ust_commit is not None:
287 vlttng_cmd += ' --profile lttng-ust-master ' \
288 ' --override projects.lttng-ust.checkout='+lttng_ust_commit+ \
289 ' --profile lttng-ust-no-man-pages'
290
291 virtenv_path = None
292 if build_device in 'kvm':
293 virtenv_path = '/root/virtenv'
294 else:
295 virtenv_path = '/tmp/virtenv'
296
297 vlttng_cmd += ' '+virtenv_path
298
299 command['parameters']['commands'].append(vlttng_cmd)
300 command['parameters']['commands'].append('ln -s '+virtenv_path+' /root/lttngvenv')
301 command['parameters']['commands'].append('sync')
302
303 return command
304
305 def main():
306 test_type = None
307 parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
308 parser.add_argument('-t', '--type', required=True)
309 parser.add_argument('-j', '--jobname', required=True)
310 parser.add_argument('-k', '--kernel', required=True)
311 parser.add_argument('-km', '--kmodule', required=True)
312 parser.add_argument('-lm', '--lmodule', required=True)
313 parser.add_argument('-l', '--lava-key', required=True)
314 parser.add_argument('-tc', '--tools-commit', required=True)
315 parser.add_argument('-uc', '--ust-commit', required=False)
316 args = parser.parse_args()
317
318 if args.type in 'baremetal-benchmarks':
319 test_type = TestType.baremetal_benchmarks
320 elif args.type in 'baremetal-tests':
321 test_type = TestType.baremetal_tests
322 elif args.type in 'kvm-tests':
323 test_type = TestType.kvm_tests
324 else:
325 print('argument -t/--type {} unrecognized. Exiting...'.format(args.type))
326 return -1
327
328 if test_type is TestType.baremetal_benchmarks:
329 j = create_new_job(args.jobname, build_device='x86')
330 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
331 elif test_type is TestType.baremetal_tests:
332 j = create_new_job(args.jobname, build_device='x86')
333 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
334 elif test_type is TestType.kvm_tests:
335 j = create_new_job(args.jobname, build_device='kvm')
336 j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule))
337
338 j['actions'].append(get_boot_cmd())
339
340 if test_type is TestType.baremetal_benchmarks:
341 j['actions'].append(get_config_cmd('x86'))
342 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit))
343 j['actions'].append(get_baremetal_benchmarks_cmd())
344 j['actions'].append(get_results_cmd(stream_name='benchmark-kernel'))
345 elif test_type is TestType.baremetal_tests:
346 if args.ust_commit is None:
347 print('Tests runs need -uc/--ust-commit options. Exiting...')
348 return -1
349 j['actions'].append(get_config_cmd('x86'))
350 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit, args.ust_commit))
351 j['actions'].append(get_baremetal_tests_cmd())
352 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
353 elif test_type is TestType.kvm_tests:
354 if args.ust_commit is None:
355 print('Tests runs need -uc/--ust-commit options. Exiting...')
356 return -1
357 j['actions'].append(get_config_cmd('kvm'))
358 j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit))
359 j['actions'].append(get_kvm_tests_cmd())
360 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
361 else:
362 assert False, 'Unknown test type'
363
364 server = xmlrpclib.ServerProxy('http://%s:%s@%s/RPC2' % (USERNAME, args.lava_key, HOSTNAME))
365
366 jobid = server.scheduler.submit_job(json.dumps(j))
367
368 print('Lava jobid:{}'.format(jobid))
369
370 #Check the status of the job every 30 seconds
371 jobstatus = server.scheduler.job_status(jobid)['job_status']
372 while jobstatus in 'Submitted' or jobstatus in 'Running':
373 time.sleep(30)
374 jobstatus = server.scheduler.job_status(jobid)['job_status']
375
376 passed, failed=check_job_all_test_cases_state_count(server, jobid)
377
378 if test_type is TestType.kvm_tests:
379 print_test_output(server, jobid)
380
381 print('Job ended with {} status.'.format(jobstatus))
382 if jobstatus not in 'Complete':
383 return -1
384 else:
385 print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
386
387 if failed == 0:
388 return 0
389 else:
390 return -1
391
392 if __name__ == "__main__":
393 sys.exit(main())
This page took 0.065561 seconds and 4 git commands to generate.