X-Git-Url: http://git.lttng.org./?a=blobdiff_plain;f=scripts%2Fsystem-tests%2Flava-submit.py;h=cb1f7794dc533862a0934a0d1692201274169afa;hb=a76a58f3a90663a5a9494f399eea63720b8d257c;hp=94ecba2d0953c031c95ed69a223d4e2344cb41e4;hpb=3868791dff578a02f2a8d2e0b155aa7d4ef9361f;p=lttng-ci.git diff --git a/scripts/system-tests/lava-submit.py b/scripts/system-tests/lava-submit.py index 94ecba2..cb1f779 100644 --- a/scripts/system-tests/lava-submit.py +++ b/scripts/system-tests/lava-submit.py @@ -18,6 +18,7 @@ import argparse import base64 import json import os +import random import sys import time import xmlrpc.client @@ -267,7 +268,9 @@ def get_kvm_tests_cmd(): } }) return command -def get_kprobes_test_cmd(): + +def get_kprobes_generate_data_cmd(): + random_seed = random.randint(0, 1000000) command = OrderedDict({ 'command': 'lava_test_shell', 'parameters': { @@ -275,10 +278,28 @@ def get_kprobes_test_cmd(): { 'git-repo': 'https://github.com/lttng/lttng-ci.git', 'revision': 'master', - 'testdef': 'lava/system-tests/kprobe-fuzzing-tests.yml' + 'testdef': 'lava/system-tests/kprobe-fuzzing-generate-data.yml', + 'parameters': { 'RANDOM_SEED': str(random_seed) } } ], - 'timeout': 7200 + 'timeout': 60 + } + }) + return command + +def get_kprobes_test_cmd(round_nb): + command = OrderedDict({ + 'command': 'lava_test_shell', + 'parameters': { + 'testdef_repos': [ + { + 'git-repo': 'https://github.com/lttng/lttng-ci.git', + 'revision': 'master', + 'testdef': 'lava/system-tests/kprobe-fuzzing-tests.yml', + 'parameters': { 'ROUND_NB': str(round_nb) } + } + ], + 'timeout': 1000 } }) return command @@ -293,7 +314,7 @@ def get_results_cmd(stream_name): command['parameters']['stream']='/anonymous/'+stream_name+'/' return command -def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path): +def get_deploy_cmd_kvm(jenkins_job, kernel_path, lttng_modules_path): command = OrderedDict({ 'command': 'deploy_kernel', 'metadata': {}, @@ -307,14 +328,13 @@ def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modul } }) - command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive'] command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive'] command['parameters']['kernel'] = str(SCP_PATH+kernel_path) command['metadata']['jenkins_jobname'] = jenkins_job return command -def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None): +def get_deploy_cmd_x86(jenkins_job, kernel_path, lttng_modules_path, nb_iter=None): command = OrderedDict({ 'command': 'deploy_kernel', 'metadata': {}, @@ -326,7 +346,6 @@ def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modul } }) - command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path)) command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path)) command['parameters']['kernel'] = str(SCP_PATH+kernel_path) command['metadata']['jenkins_jobname'] = jenkins_job @@ -383,7 +402,6 @@ def main(): parser.add_argument('-t', '--type', required=True) parser.add_argument('-j', '--jobname', required=True) parser.add_argument('-k', '--kernel', required=True) - parser.add_argument('-km', '--kmodule', required=True) parser.add_argument('-lm', '--lmodule', required=True) parser.add_argument('-tc', '--tools-commit', required=True) parser.add_argument('-uc', '--ust-commit', required=False) @@ -410,13 +428,13 @@ def main(): if test_type is TestType.baremetal_benchmarks: j = create_new_job(args.jobname, build_device='x86') - j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule)) + j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.lmodule)) elif test_type is TestType.baremetal_tests: j = create_new_job(args.jobname, build_device='x86') - j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule)) + j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.lmodule)) elif test_type is TestType.kvm_tests or test_type is TestType.kvm_fuzzing_tests: j = create_new_job(args.jobname, build_device='kvm') - j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule)) + j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.lmodule)) j['actions'].append(get_boot_cmd()) @@ -446,8 +464,9 @@ def main(): print('Tests runs need -uc/--ust-commit options. Exiting...') return -1 j['actions'].append(get_config_cmd('kvm')) - j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit)) - j['actions'].append(get_kprobes_test_cmd()) + j['actions'].append(get_kprobes_generate_data_cmd()) + for i in range(10): + j['actions'].append(get_kprobes_test_cmd(round_nb=i)) j['actions'].append(get_results_cmd(stream_name='tests-kernel')) else: assert False, 'Unknown test type'