Lava: More clearly split baremetal and kvm tests
[lttng-ci.git] / scripts / lttng-baremetal-tests / lava-submit.py
1 #!/usr/bin/python
2 # Copyright (C) 2016 - Francis Deslauriers <francis.deslauriers@efficios.com>
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 import argparse
18 import base64
19 import json
20 import os
21 import sys
22 import time
23 import xmlrpclib
24 from collections import OrderedDict
25 from enum import Enum
26
27 USERNAME = 'frdeso'
28 HOSTNAME = 'lava-master.internal.efficios.com'
29 SCP_PATH = 'scp://jenkins-lava@storage.internal.efficios.com'
30
31 class TestType(Enum):
32 baremetal_benchmarks=1
33 baremetal_tests=2
34 kvm_tests=3
35
36 def get_job_bundle_content(server, job):
37 bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
38 bundle = server.dashboard.get(bundle_sha)
39
40 return json.loads(bundle['content'])
41
42 # Parse the results bundle to see the run-tests testcase
43 # of the lttng-kernel-tests passed successfully
44 def check_job_all_test_cases_state_count(server, job):
45 content = get_job_bundle_content(server, job)
46
47 passed_tests=0
48 failed_tests=0
49 for run in content['test_runs']:
50 for result in run['test_results']:
51 if 'test_case_id' in result :
52 if result['result'] in 'pass':
53 passed_tests+=1
54 elif result['test_case_id'] in 'wait_for_test_image_prompt':
55 # FIXME:This test is part of the boot action and fails
56 # randomly but doesn't affect the behaviour of the tests.
57 # No reply on the Lava IRC channel yet. We should update
58 # our Lava installation and try to reproduce it. This error
59 # was encountered ont the KVM trusty image only. Not seen
60 # on Xenial at this point.
61 pass
62 else:
63 failed_tests+=1
64 return (passed_tests, failed_tests)
65
66 # Parse the attachment of the testcase to fetch the stdout of the test suite
67 def print_test_output(server, job):
68 content = get_job_bundle_content(server, job)
69 found = False
70
71 for run in content['test_runs']:
72 if run['test_id'] in 'lttng-kernel-test':
73 for attachment in run['attachments']:
74 if attachment['pathname'] in 'stdout.log':
75
76 # Decode the base64 file and split on newlines to iterate
77 # on list
78 testoutput = base64.b64decode(attachment['content']).split('\n')
79
80 # Create a generator to iterate on the lines and keeping
81 # the state of the iterator across the two loops.
82 testoutput_iter = iter(testoutput)
83 for line in testoutput_iter:
84
85 # Find the header of the test case and start printing
86 # from there
87 if 'LAVA_SIGNAL_STARTTC run-tests' in line:
88 found = True
89 print('---- TEST SUITE OUTPUT BEGIN ----')
90 for line in testoutput_iter:
91 if 'LAVA_SIGNAL_ENDTC run-tests' not in line:
92 print(line)
93 else:
94 # Print until we reach the end of the
95 # section
96 break
97
98 if found is True:
99 print('----- TEST SUITE OUTPUT END -----')
100 break
101
102 def create_new_job(name, build_device):
103 job = OrderedDict({
104 'health_check': False,
105 'job_name': name,
106 'device_type':build_device,
107 'tags': [ ],
108 'timeout': 18000,
109 'actions': []
110 })
111 if build_device in 'x86':
112 job['tags'].append('dev-sda1')
113
114 return job
115
116 def get_boot_cmd():
117 command = OrderedDict({
118 'command': 'boot_image'
119 })
120 return command
121
122 def get_config_cmd(build_device):
123 packages=['bsdtar', 'psmisc', 'wget', 'python3', 'python3-pip', \
124 'libglib2.0-dev', 'libffi-dev', 'elfutils', 'libdw-dev', \
125 'libelf-dev', 'libmount-dev', 'libxml2', 'libpfm4-dev']
126 command = OrderedDict({
127 'command': 'lava_command_run',
128 'parameters': {
129 'commands': [
130 'cat /etc/resolv.conf',
131 'echo nameserver 172.18.0.12 > /etc/resolv.conf',
132 'groupadd tracing'
133 ]
134 }
135 })
136 if build_device in 'x86':
137 command['parameters']['commands'].extend([
138 'mount /dev/sda1 /tmp',
139 'rm -rf /tmp/*'])
140
141 command['parameters']['commands'].extend([
142 'depmod -a',
143 'locale-gen en_US.UTF-8',
144 'apt-get update',
145 'apt-get upgrade',
146 'apt-get install -y {}'.format(' '.join(packages))
147 ])
148 return command
149
150 def get_baremetal_benchmarks_cmd():
151 command = OrderedDict({
152 'command': 'lava_test_shell',
153 'parameters': {
154 'testdef_repos': [
155 {
156 'git-repo': 'https://github.com/lttng/lttng-ci.git',
157 'revision': 'master',
158 'testdef': 'lava/baremetal-tests/failing-close.yml'
159 },
160 {
161 'git-repo': 'https://github.com/lttng/lttng-ci.git',
162 'revision': 'master',
163 'testdef': 'lava/baremetal-tests/failing-open-efault.yml'
164 },
165 {
166 'git-repo': 'https://github.com/lttng/lttng-ci.git',
167 'revision': 'master',
168 'testdef': 'lava/baremetal-tests/failing-open-enoent.yml'
169 }
170 ],
171 'timeout': 18000
172 }
173 })
174 return command
175
176 def get_baremetal_tests_cmd():
177 command = OrderedDict({
178 'command': 'lava_test_shell',
179 'parameters': {
180 'testdef_repos': [
181 {
182 'git-repo': 'https://github.com/lttng/lttng-ci.git',
183 'revision': 'master',
184 'testdef': 'lava/baremetal-tests/perf-tests.yml'
185 }
186 ],
187 'timeout': 18000
188 }
189 })
190 return command
191
192 def get_kvm_tests_cmd():
193 command = OrderedDict({
194 'command': 'lava_test_shell',
195 'parameters': {
196 'testdef_repos': [
197 {
198 'git-repo': 'https://github.com/lttng/lttng-ci.git',
199 'revision': 'master',
200 'testdef': 'lava/baremetal-tests/kernel-tests.yml'
201 }
202 ],
203 'timeout': 18000
204 }
205 })
206 return command
207
208 def get_results_cmd(stream_name):
209 command = OrderedDict({
210 'command': 'submit_results',
211 'parameters': {
212 'server': 'http://lava-master.internal.efficios.com/RPC2/'
213 }
214 })
215 command['parameters']['stream']='/anonymous/'+stream_name+'/'
216 return command
217
218 def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path):
219 command = OrderedDict({
220 'command': 'deploy_kernel',
221 'metadata': {},
222 'parameters': {
223 'customize': {},
224 'kernel': None,
225 'target_type': 'ubuntu',
226 'rootfs': 'file:///var/lib/lava-server/default/media/images/xenial.img.gz',
227 'login_prompt': 'kvm02 login:',
228 'username': 'root'
229 }
230 })
231
232 command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive']
233 command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive']
234 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
235 command['metadata']['jenkins_jobname'] = jenkins_job
236
237 return command
238
239 def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None):
240 command = OrderedDict({
241 'command': 'deploy_kernel',
242 'metadata': {},
243 'parameters': {
244 'overlays': [],
245 'kernel': None,
246 'nfsrootfs': str(SCP_PATH+'/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz'),
247 'target_type': 'ubuntu'
248 }
249 })
250
251 command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path))
252 command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path))
253 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
254 command['metadata']['jenkins_jobname'] = jenkins_job
255 if nb_iter is not None:
256 command['metadata']['nb_iterations'] = nb_iter
257
258 return command
259
260
261 def get_env_setup_cmd(build_device, lttng_tools_commit, lttng_ust_commit=None):
262 command = OrderedDict({
263 'command': 'lava_command_run',
264 'parameters': {
265 'commands': [
266 'pip3 install --upgrade pip',
267 'hash -r',
268 'git clone https://github.com/frdeso/syscall-bench-it.git bm',
269 'pip3 install vlttng',
270 ],
271 'timeout': 18000
272 }
273 })
274
275 vlttng_cmd = 'vlttng --jobs=$(nproc) --profile urcu-master' \
276 ' --profile babeltrace-stable-1.4 ' \
277 ' --profile lttng-tools-master' \
278 ' --override projects.lttng-tools.checkout='+lttng_tools_commit + \
279 ' --profile lttng-tools-no-man-pages'
280
281 if lttng_ust_commit is not None:
282 vlttng_cmd += ' --profile lttng-ust-master ' \
283 ' --override projects.lttng-ust.checkout='+lttng_ust_commit+ \
284 ' --profile lttng-ust-no-man-pages'
285
286 virtenv_path = None
287 if build_device in 'kvm':
288 virtenv_path = '/root/virtenv'
289 else:
290 virtenv_path = '/tmp/virtenv'
291
292 vlttng_cmd += ' '+virtenv_path
293
294 command['parameters']['commands'].append(vlttng_cmd)
295 command['parameters']['commands'].append('ln -s '+virtenv_path+' /root/lttngvenv')
296 command['parameters']['commands'].append('sync')
297
298 return command
299
300 def main():
301 test_type = None
302 parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
303 parser.add_argument('-t', '--type', required=True)
304 parser.add_argument('-j', '--jobname', required=True)
305 parser.add_argument('-k', '--kernel', required=True)
306 parser.add_argument('-km', '--kmodule', required=True)
307 parser.add_argument('-lm', '--lmodule', required=True)
308 parser.add_argument('-l', '--lava-key', required=True)
309 parser.add_argument('-tc', '--tools-commit', required=True)
310 parser.add_argument('-uc', '--ust-commit', required=False)
311 args = parser.parse_args()
312
313 if args.type in 'baremetal-benchmarks':
314 test_type = TestType.baremetal_benchmarks
315 elif args.type in 'baremetal-tests':
316 test_type = TestType.baremetal_tests
317 elif args.type in 'kvm-tests':
318 test_type = TestType.kvm_tests
319 else:
320 print('argument -t/--type {} unrecognized. Exiting...'.format(args.type))
321 return -1
322
323 if test_type is TestType.baremetal_benchmarks:
324 j = create_new_job(args.jobname, build_device='x86')
325 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
326 elif test_type is TestType.baremetal_tests:
327 j = create_new_job(args.jobname, build_device='x86')
328 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
329 elif test_type is TestType.kvm_tests:
330 j = create_new_job(args.jobname, build_device='kvm')
331 j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule))
332
333 j['actions'].append(get_boot_cmd())
334
335 if test_type is TestType.baremetal_benchmarks:
336 j['actions'].append(get_config_cmd('x86'))
337 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit))
338 j['actions'].append(get_baremetal_benchmarks_cmd())
339 j['actions'].append(get_results_cmd(stream_name='benchmark-kernel'))
340 elif test_type is TestType.baremetal_tests:
341 if args.ust_commit is None:
342 print('Tests runs need -uc/--ust-commit options. Exiting...')
343 return -1
344 j['actions'].append(get_config_cmd('x86'))
345 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit, args.ust_commit))
346 j['actions'].append(get_baremetal_tests_cmd())
347 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
348 elif test_type is TestType.kvm_tests:
349 if args.ust_commit is None:
350 print('Tests runs need -uc/--ust-commit options. Exiting...')
351 return -1
352 j['actions'].append(get_config_cmd('kvm'))
353 j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit))
354 j['actions'].append(get_kvm_tests_cmd())
355 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
356 else:
357 assert False, 'Unknown test type'
358
359 server = xmlrpclib.ServerProxy('http://%s:%s@%s/RPC2' % (USERNAME, args.lava_key, HOSTNAME))
360
361 jobid = server.scheduler.submit_job(json.dumps(j))
362
363 print('Lava jobid:{}'.format(jobid))
364
365 #Check the status of the job every 30 seconds
366 jobstatus = server.scheduler.job_status(jobid)['job_status']
367 while jobstatus in 'Submitted' or jobstatus in 'Running':
368 time.sleep(30)
369 jobstatus = server.scheduler.job_status(jobid)['job_status']
370
371 passed, failed=check_job_all_test_cases_state_count(server, jobid)
372
373 if test_type is TestType.kvm_tests:
374 print_test_output(server, jobid)
375
376 print('Job ended with {} status.'.format(jobstatus))
377 if jobstatus not in 'Complete':
378 return -1
379 else:
380 print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
381
382 if failed == 0:
383 return 0
384 else:
385 return -1
386
387 if __name__ == "__main__":
388 sys.exit(main())
This page took 0.039576 seconds and 5 git commands to generate.