Lava: Add perf regression test suite to baremetal runs
[lttng-ci.git] / scripts / lttng-baremetal-tests / lava-submit.py
CommitLineData
b3d73c46
FD
1#!/usr/bin/python
2# Copyright (C) 2016 - Francis Deslauriers <francis.deslauriers@efficios.com>
3#
4# This program is free software: you can redistribute it and/or modify
5# it under the terms of the GNU General Public License as published by
6# the Free Software Foundation, either version 3 of the License, or
7# (at your option) any later version.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17import argparse
18import base64
19import json
20import os
21import sys
22import time
23import xmlrpclib
24from collections import OrderedDict
25from enum import Enum
26
27USERNAME = 'frdeso'
28HOSTNAME = 'lava-master.internal.efficios.com'
29SCP_PATH = 'scp://jenkins-lava@storage.internal.efficios.com'
30
31class TestType(Enum):
32 benchmarks=1
33 tests=2
34
35def get_job_bundle_content(server, job):
36 bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
37 bundle = server.dashboard.get(bundle_sha)
38
39 return json.loads(bundle['content'])
40
41# Parse the results bundle to see the run-tests testcase
42# of the lttng-kernel-tests passed successfully
43def check_job_all_test_cases_state_count(server, job):
44 content = get_job_bundle_content(server, job)
45
46 passed_tests=0
47 failed_tests=0
48 for run in content['test_runs']:
49 for result in run['test_results']:
50 if 'test_case_id' in result:
51 if result['result'] in 'pass':
52 passed_tests+=1
53 else:
54 failed_tests+=1
55 return (passed_tests, failed_tests)
56
57# Parse the attachment of the testcase to fetch the stdout of the test suite
58def print_test_output(server, job):
59 content = get_job_bundle_content(server, job)
60 found = False
61
62 for run in content['test_runs']:
63 if run['test_id'] in 'lttng-kernel-test':
64 for attachment in run['attachments']:
65 if attachment['pathname'] in 'stdout.log':
66
67 # Decode the base64 file and split on newlines to iterate
68 # on list
69 testoutput = base64.b64decode(attachment['content']).split('\n')
70
71 # Create a generator to iterate on the lines and keeping
72 # the state of the iterator across the two loops.
73 testoutput_iter = iter(testoutput)
74 for line in testoutput_iter:
75
76 # Find the header of the test case and start printing
77 # from there
78 if 'LAVA_SIGNAL_STARTTC run-tests' in line:
79 found = True
80 print('---- TEST SUITE OUTPUT BEGIN ----')
81 for line in testoutput_iter:
82 if 'LAVA_SIGNAL_ENDTC run-tests' not in line:
83 print(line)
84 else:
85 # Print until we reach the end of the
86 # section
87 break
88
89 if found is True:
90 print('----- TEST SUITE OUTPUT END -----')
91 break
92
dc9700c9 93def create_new_job(name, build_device):
b3d73c46
FD
94 job = OrderedDict({
95 'health_check': False,
96 'job_name': name,
dc9700c9
FD
97 'device_type':build_device,
98 'tags': [ ],
b3d73c46
FD
99 'timeout': 18000,
100 'actions': []
101 })
dc9700c9
FD
102 if build_device in 'x86':
103 job['tags'].append('dev-sda1')
104
b3d73c46
FD
105 return job
106
107def get_boot_cmd():
108 command = OrderedDict({
109 'command': 'boot_image'
110 })
111 return command
112
dc9700c9 113def get_config_cmd(build_device):
b3d73c46
FD
114 packages=['bsdtar', 'psmisc', 'wget', 'python3', 'python3-pip', \
115 'libglib2.0-dev', 'libffi-dev', 'elfutils', 'libdw-dev', \
116 'libelf-dev', 'libmount-dev', 'libxml2', 'python3-pandas', \
117 'python3-numpy']
118 command = OrderedDict({
119 'command': 'lava_command_run',
120 'parameters': {
121 'commands': [
122 'ifup eth0',
123 'route -n',
124 'cat /etc/resolv.conf',
125 'echo nameserver 172.18.0.12 > /etc/resolv.conf',
dc9700c9 126 'groupadd tracing'
b3d73c46
FD
127 ]
128 }
129 })
dc9700c9
FD
130 if build_device in 'x86':
131 command['parameters']['commands'].extend([
132 'mount /dev/sda1 /tmp',
133 'rm -rf /tmp/*'])
134
135 command['parameters']['commands'].extend([
136 'depmod -a',
137 'locale-gen en_US.UTF-8',
138 'apt-get update',
139 'apt-get install -y {}'.format(' '.join(packages))
140 ])
b3d73c46
FD
141 return command
142
143def get_benchmarks_cmd():
144 command = OrderedDict({
145 'command': 'lava_test_shell',
146 'parameters': {
147 'testdef_repos': [
148 {
149 'git-repo': 'https://github.com/lttng/lttng-ci.git',
150 'revision': 'master',
151 'testdef': 'lava/baremetal-tests/failing-close.yml'
152 },
153 {
154 'git-repo': 'https://github.com/lttng/lttng-ci.git',
155 'revision': 'master',
156 'testdef': 'lava/baremetal-tests/failing-open-efault.yml'
157 },
158 {
159 'git-repo': 'https://github.com/lttng/lttng-ci.git',
160 'revision': 'master',
161 'testdef': 'lava/baremetal-tests/failing-open-enoent.yml'
f3d4ee9f
FD
162 },
163 {
164 'git-repo': 'https://github.com/lttng/lttng-ci.git',
165 'revision': 'master',
166 'testdef': 'lava/baremetal-tests/perf-tests.yml'
b3d73c46
FD
167 }
168 ],
169 'timeout': 18000
170 }
171 })
172 return command
173
174def get_tests_cmd():
175 command = OrderedDict({
176 'command': 'lava_test_shell',
177 'parameters': {
178 'testdef_repos': [
179 {
180 'git-repo': 'https://github.com/lttng/lttng-ci.git',
181 'revision': 'master',
182 'testdef': 'lava/baremetal-tests/kernel-tests.yml'
183 }
184 ],
185 'timeout': 18000
186 }
187 })
188 return command
189
190def get_results_cmd(stream_name):
191 command = OrderedDict({
192 'command': 'submit_results',
193 'parameters': {
194 'server': 'http://lava-master.internal.efficios.com/RPC2/'
195 }
196 })
197 command['parameters']['stream']='/anonymous/'+stream_name+'/'
198 return command
199
dc9700c9
FD
200def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path):
201 command = OrderedDict({
202 'command': 'deploy_kernel',
203 'metadata': {},
204 'parameters': {
205 'customize': {},
206 'kernel': None,
207 'rootfs': 'file:///var/lib/lava-server/default/media/images/trusty-grub.img.gz',
208 'target_type': 'ubuntu'
209 }
210 })
211
212 command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive']
213 command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive']
214 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
215 command['metadata']['jenkins_jobname'] = jenkins_job
216
217 return command
218
219def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None):
b3d73c46
FD
220 command = OrderedDict({
221 'command': 'deploy_kernel',
222 'metadata': {},
223 'parameters': {
224 'overlays': [],
225 'kernel': None,
226 'nfsrootfs': str(SCP_PATH+'/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz'),
227 'target_type': 'ubuntu'
228 }
229 })
230
231 command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path))
232 command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path))
233 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
dc9700c9 234 command['metadata']['jenkins_jobname'] = jenkins_job
b3d73c46
FD
235 if nb_iter is not None:
236 command['metadata']['nb_iterations'] = nb_iter
237
238 return command
239
240
dc9700c9 241def get_env_setup_cmd(build_device, lttng_tools_commit, lttng_ust_commit=None):
b3d73c46
FD
242 command = OrderedDict({
243 'command': 'lava_command_run',
244 'parameters': {
245 'commands': [
246 'git clone https://github.com/frdeso/syscall-bench-it.git bm',
247 'pip3 install vlttng',
248 ],
249 'timeout': 18000
250 }
251 })
252
253 vlttng_cmd = 'vlttng --jobs=16 --profile urcu-master' \
254 ' --profile babeltrace-stable-1.4 ' \
255 ' --profile lttng-tools-master' \
256 ' --override projects.lttng-tools.checkout='+lttng_tools_commit + \
257 ' --profile lttng-tools-no-man-pages'
258
259 if lttng_ust_commit is not None:
260 vlttng_cmd += ' --profile lttng-ust-master ' \
261 ' --override projects.lttng-ust.checkout='+lttng_ust_commit+ \
262 ' --profile lttng-ust-no-man-pages'
263
dc9700c9
FD
264 virtenv_path = None
265 if build_device in 'kvm':
266 virtenv_path = '/root/virtenv'
267 else:
268 virtenv_path = '/tmp/virtenv'
269
270 vlttng_cmd += ' '+virtenv_path
b3d73c46
FD
271
272 command['parameters']['commands'].append(vlttng_cmd)
dc9700c9
FD
273 command['parameters']['commands'].append('ln -s '+virtenv_path+' /root/lttngvenv')
274 command['parameters']['commands'].append('sync')
275
b3d73c46
FD
276 return command
277
278def main():
279 test_type = None
280 parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
281 parser.add_argument('-t', '--type', required=True)
282 parser.add_argument('-j', '--jobname', required=True)
283 parser.add_argument('-k', '--kernel', required=True)
284 parser.add_argument('-km', '--kmodule', required=True)
285 parser.add_argument('-lm', '--lmodule', required=True)
286 parser.add_argument('-l', '--lava-key', required=True)
287 parser.add_argument('-tc', '--tools-commit', required=True)
288 parser.add_argument('-uc', '--ust-commit', required=False)
289 args = parser.parse_args()
290
b3d73c46
FD
291 if args.type in 'benchmarks':
292 test_type = TestType.benchmarks
293 elif args.type in 'tests':
294 test_type = TestType.tests
295 else:
296 print('argument -t/--type {} unrecognized. Exiting...'.format(args.type))
297 return -1
298
299 if test_type is TestType.benchmarks:
dc9700c9
FD
300 j = create_new_job(args.jobname, build_device='x86')
301 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
302 elif test_type is TestType.tests:
303 j = create_new_job(args.jobname, build_device='kvm')
304 j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule))
305
306 j['actions'].append(get_boot_cmd())
307
308 if test_type is TestType.benchmarks:
309 j['actions'].append(get_config_cmd('x86'))
310 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit))
b3d73c46
FD
311 j['actions'].append(get_benchmarks_cmd())
312 j['actions'].append(get_results_cmd(stream_name='benchmark-kernel'))
313 elif test_type is TestType.tests:
314 if args.ust_commit is None:
315 print('Tests runs need -uc/--ust-commit options. Exiting...')
316 return -1
dc9700c9
FD
317 j['actions'].append(get_config_cmd('kvm'))
318 j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit))
b3d73c46
FD
319 j['actions'].append(get_tests_cmd())
320 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
321 else:
322 assert False, 'Unknown test type'
323
324 server = xmlrpclib.ServerProxy('http://%s:%s@%s/RPC2' % (USERNAME, args.lava_key, HOSTNAME))
325
326 jobid = server.scheduler.submit_job(json.dumps(j))
327
21e89f7e
FD
328 print('Lava jobid:{}'.format(jobid))
329
b3d73c46
FD
330 #Check the status of the job every 30 seconds
331 jobstatus = server.scheduler.job_status(jobid)['job_status']
332 while jobstatus in 'Submitted' or jobstatus in 'Running':
333 time.sleep(30)
334 jobstatus = server.scheduler.job_status(jobid)['job_status']
335
336 print('Job ended with {} status.'.format(jobstatus))
337 if jobstatus not in 'Complete':
338 return -1
339
340 passed, failed=check_job_all_test_cases_state_count(server, jobid)
341
21e89f7e 342 print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
b3d73c46
FD
343
344 if test_type is TestType.tests:
345 print_test_output(server, jobid)
346
21e89f7e 347 if failed == 0:
b3d73c46
FD
348 return 0
349 else:
350 return -1
351
352if __name__ == "__main__":
353 sys.exit(main())
This page took 0.034963 seconds and 4 git commands to generate.