class KernelVersion(Version):
- """ Kernel version class """
-
- re26 = re.compile(r'^(2\.\d+) \. (\d+) (\. (\d+))? (\-rc(\d+))?$',
- re.VERBOSE)
- re30 = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? (\-rc(\d+))?$', re.VERBOSE)
+ """Kernel version class"""
+ re26 = re.compile(r"^(2\.\d+) \. (\d+) (\. (\d+))? (\-rc(\d+))?$", re.VERBOSE)
+ re30 = re.compile(r"^(\d+) \. (\d+) (\. (\d+))? (\-rc(\d+))?$", re.VERBOSE)
def __init__(self, vstring=None):
self._rc = None
if vstring:
self.parse(vstring)
-
def parse(self, vstring):
- """ Parse version string """
+ """Parse version string"""
self._vstring = vstring
else:
self._rc = None
-
def isrc(self):
- """ Is this version an RC """
+ """Is this version an RC"""
return self._rc is not None
-
def __str__(self):
return self._vstring
-
def __repr__(self):
return "KernelVersion ('%s')" % str(self)
-
def _cmp(self, other):
if isinstance(other, str):
other = KernelVersion(other)
# case 3: self doesn't have rc, other does: self is greater
# case 4: both have rc: must compare them!
- if (not self._rc and not other._rc):
+ if not self._rc and not other._rc:
return 0
- elif (self._rc and not other._rc):
+ elif self._rc and not other._rc:
return -1
- elif (not self._rc and other._rc):
+ elif not self._rc and other._rc:
return 1
- elif (self._rc and other._rc):
+ elif self._rc and other._rc:
if self._rc == other._rc:
return 0
elif self._rc < other._rc:
else:
assert False, "never get here"
+
def main():
- """ Main """
+ """Main"""
versions = []
kernel_cutoff = KernelVersion("2.6.36")
kernel_path = os.getcwd() + "/kernel"
parser = argparse.ArgumentParser()
- parser.add_argument("--kernel-path", help="The location of the kernel. Default to the currentdir/linux")
- parser.add_argument("--kernel-git-remote", help="The git url of the kernel. Default to git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux-stable.git")
- parser.add_argument("--kernel-cutoff", help="The lower bersion cutoff in X.X.X format. Default to 2.6.36")
+ parser.add_argument(
+ "--kernel-path",
+ help="The location of the kernel. Default to the currentdir/linux",
+ )
+ parser.add_argument(
+ "--kernel-git-remote",
+ help="The git url of the kernel. Default to git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux-stable.git",
+ )
+ parser.add_argument(
+ "--kernel-cutoff",
+ help="The lower bersion cutoff in X.X.X format. Default to 2.6.36",
+ )
args = parser.parse_args()
if args.kernel_path:
# First get all valid versions
for tag in linux_repo.tags:
try:
- version = KernelVersion(tag.name.lstrip('v'))
+ version = KernelVersion(tag.name.lstrip("v"))
# Add only those who are superior to the cutoff version
if version >= kernel_cutoff:
versions.append(version)
except ValueError:
- #print(tag.name)
+ # print(tag.name)
continue
# Sort the list by version order
versions.remove(version)
last = False
- #for version in versions:
+ # for version in versions:
# print(version)
# Build yaml object
print(yaml.dump(yversions, default_flow_style=False))
-
-
if __name__ == "__main__":
main()
""" This script is used to upgrade the base snapshot of standalone ci slaves """
-USERNAME = ''
-APIKEY = ''
-JENKINS_URL = 'https://ci.lttng.org'
+USERNAME = ""
+APIKEY = ""
+JENKINS_URL = "https://ci.lttng.org"
-DISTRO_LIST = ['el', 'sles', 'ubuntu']
-DEFAULT_DISTRO = 'ubuntu'
+DISTRO_LIST = ["el", "sles", "ubuntu"]
+DEFAULT_DISTRO = "ubuntu"
DISTRO_COMMAND = {
- 'el': 'yum update -y && package-cleanup -y --oldkernels --count=2 && yum clean all',
- 'sles': 'zypper --non-interactive refresh && zypper --non-interactive patch --auto-agree-with-licenses --with-interactive',
- 'ubuntu': 'apt-get update && apt-get dist-upgrade -V -y && apt-get clean && apt-get --purge autoremove -y',
+ "el": "yum update -y && package-cleanup -y --oldkernels --count=2 && yum clean all",
+ "sles": "zypper --non-interactive refresh && zypper --non-interactive patch --auto-agree-with-licenses --with-interactive",
+ "ubuntu": "apt-get update && apt-get dist-upgrade -V -y && apt-get clean && apt-get --purge autoremove -y",
}
-BASESNAP = 'base-configuration'
+BASESNAP = "base-configuration"
-SNAPSHOTXML = """
+SNAPSHOTXML = (
+ """
<domainsnapshot>
<name>%s</name>
<description>Snapshot of OS install and updates</description>
<memory snapshot='no'/>
</domainsnapshot>
-""" % BASESNAP
+"""
+ % BASESNAP
+)
import argparse
import sys
def main():
- """ Main """
-
- parser = argparse.ArgumentParser(description='Update base snapshot.')
- parser.add_argument('instance_name', metavar='INSTANCE', type=str,
- help='the shortname of the instance to update')
- parser.add_argument('vmhost_name', metavar='VMHOST', type=str,
- help='the hostname of the VM host')
- parser.add_argument('--distro', choices=DISTRO_LIST,
- default=DEFAULT_DISTRO, type=str,
- help='the distro of the target instance')
+ """Main"""
+
+ parser = argparse.ArgumentParser(description="Update base snapshot.")
+ parser.add_argument(
+ "instance_name",
+ metavar="INSTANCE",
+ type=str,
+ help="the shortname of the instance to update",
+ )
+ parser.add_argument(
+ "vmhost_name", metavar="VMHOST", type=str, help="the hostname of the VM host"
+ )
+ parser.add_argument(
+ "--distro",
+ choices=DISTRO_LIST,
+ default=DEFAULT_DISTRO,
+ type=str,
+ help="the distro of the target instance",
+ )
args = parser.parse_args()
vmhost_name = args.vmhost_name
distro = args.distro
-
# Get jenkibs connexion
jenkins = Jenkins(JENKINS_URL, username=USERNAME, password=APIKEY)
print("Node %s is not idle" % instance_name)
sys.exit(1)
-
# Set node temporarily offline
if not node.is_temporarily_offline():
- node.toggle_temporarily_offline('Down for upgrade to base snapshot')
+ node.toggle_temporarily_offline("Down for upgrade to base snapshot")
# Get libvirt connexion
print("Opening libvirt connexion to %s..." % vmhost_name)
print("Failed to shutdown %s", instance_name)
sys.exit(1)
-
# Revert to base snapshot
print("Getting base snapshot...")
basesnap = vminstance.snapshotLookupByName(BASESNAP)
print("Could not find base snapshot %s" % BASESNAP)
sys.exit(1)
- #if not basesnap.isCurrent():
+ # if not basesnap.isCurrent():
# print("Not current snapshot")
print("Reverting to base snapshot...")
print("Failed to start instance %s" % instance_name)
sys.exit(1)
-
# Wait for instance to boot
print("Waiting for instance to boot...")
sleep(10)
"""
"""
import jinja2
+
+
@jinja2.pass_environment
def do_groovy(env, data, skip_list_wrap=False):
- list_format="[{}]"
+ list_format = "[{}]"
if skip_list_wrap:
- list_format="{}"
+ list_format = "{}"
if isinstance(data, str):
- return '"{}"'.format(data.replace('"', '\"'))
+ return '"{}"'.format(data.replace('"', '"'))
elif isinstance(data, list) or isinstance(data, tuple):
_data = [do_groovy(env, d) for d in data]
return list_format.format(", ".join(_data))
elif isinstance(data, dict):
- _data = ["{}: {}".format(key, do_groovy(env, value)) for key, value in data.items()]
+ _data = [
+ "{}: {}".format(key, do_groovy(env, value)) for key, value in data.items()
+ ]
return "[{}]".format(", ".join(_data))
elif isinstance(data, bool):
- return 'true' if data else 'false'
+ return "true" if data else "false"
else:
raise Exception("Unknown data type: '{}'".format(type(data)))
+
+
FILTERS = {
"to_groovy": do_groovy,
}
def compress(filename):
- command = [
- 'tar', '-c', '-z',
- '-f', filename + ".tar.gz",
- '-C', filename,
- './'
- ]
+ command = ["tar", "-c", "-z", "-f", filename + ".tar.gz", "-C", filename, "./"]
subprocess.run(command, check=True)
shutil.rmtree(filename)
packages = [
- 'autoconf',
- 'automake',
- 'bash-completion',
- 'bison',
- 'build-essential',
- 'chrpath',
- 'clang',
- 'cloc',
- 'curl',
- 'elfutils',
- 'flex',
- 'gettext',
- 'git',
- 'htop',
- 'jq',
- 'libarchive-tools',
- 'libdw-dev',
- 'libelf-dev',
- 'libffi-dev',
- 'libglib2.0-dev',
- 'libmount-dev',
- 'libnuma-dev',
- 'libpfm4-dev',
- 'libpopt-dev',
- 'libtap-harness-archive-perl',
- 'libtool',
- 'libxml2',
- 'libxml2-dev',
- 'locales',
- 'netcat-traditional',
- 'openssh-server',
- 'psmisc',
- 'python3-virtualenv',
- 'python3',
- 'python3-dev',
- 'python3-numpy',
- 'python3-pandas',
- 'python3-pip',
- 'python3-setuptools',
- 'python3-sphinx',
- 'python3-venv',
- 'rsync',
- 'stress',
- 'swig',
- 'systemd-timesyncd',
- 'systemtap-sdt-dev',
- 'texinfo',
- 'tree',
- 'uuid-dev',
- 'vim',
- 'wget',
+ "autoconf",
+ "automake",
+ "bash-completion",
+ "bison",
+ "build-essential",
+ "chrpath",
+ "clang",
+ "cloc",
+ "curl",
+ "elfutils",
+ "flex",
+ "gettext",
+ "git",
+ "htop",
+ "jq",
+ "libarchive-tools",
+ "libdw-dev",
+ "libelf-dev",
+ "libffi-dev",
+ "libglib2.0-dev",
+ "libmount-dev",
+ "libnuma-dev",
+ "libpfm4-dev",
+ "libpopt-dev",
+ "libtap-harness-archive-perl",
+ "libtool",
+ "libxml2",
+ "libxml2-dev",
+ "locales",
+ "netcat-traditional",
+ "openssh-server",
+ "psmisc",
+ "python3-virtualenv",
+ "python3",
+ "python3-dev",
+ "python3-numpy",
+ "python3-pandas",
+ "python3-pip",
+ "python3-setuptools",
+ "python3-sphinx",
+ "python3-venv",
+ "rsync",
+ "stress",
+ "swig",
+ "systemd-timesyncd",
+ "systemtap-sdt-dev",
+ "texinfo",
+ "tree",
+ "uuid-dev",
+ "vim",
+ "wget",
]
def main():
- parser = argparse.ArgumentParser(description='Generate lava lttng rootfs')
- parser.add_argument("--arch", default='amd64')
- parser.add_argument("--distribution", default='bookworm')
- parser.add_argument("--mirror", default='https://deb.debian.org/debian')
- parser.add_argument(
- "--component", default='main')
+ parser = argparse.ArgumentParser(description="Generate lava lttng rootfs")
+ parser.add_argument("--arch", default="amd64")
+ parser.add_argument("--distribution", default="bookworm")
+ parser.add_argument("--mirror", default="https://deb.debian.org/debian")
+ parser.add_argument("--component", default="main")
args = parser.parse_args()
- name = "rootfs_{}_{}_{}".format(args.arch, args.distribution,
- datetime.now().strftime("%Y-%m-%d"))
+ name = "rootfs_{}_{}_{}".format(
+ args.arch, args.distribution, datetime.now().strftime("%Y-%m-%d")
+ )
hostname = "linaro-server"
user = "linaro/linaro"
# packages
command = [
- 'chroot', name,
- 'apt-get', 'install', '-y', ] + packages
+ "chroot",
+ name,
+ "apt-get",
+ "install",
+ "-y",
+ ] + packages
completed_command = subprocess.run(command, check=True)
# hostname
- with open(os.path.join(name, 'etc', 'hostname'), 'w', encoding='utf-8') as f:
+ with open(os.path.join(name, "etc", "hostname"), "w", encoding="utf-8") as f:
f.write(hostname + "\n")
# user
command = [
- 'chroot', name,
- 'adduser', '--gecos', '', '--disabled-password', 'linaro',
+ "chroot",
+ name,
+ "adduser",
+ "--gecos",
+ "",
+ "--disabled-password",
+ "linaro",
]
completed_process = subprocess.run(command, check=True)
command = [
- 'chroot', name, 'chpasswd',
- ]
+ "chroot",
+ name,
+ "chpasswd",
+ ]
process = subprocess.Popen(command, stdin=subprocess.PIPE, text=True)
- process.communicate(input='linaro:linaro')
+ process.communicate(input="linaro:linaro")
# root password
process = subprocess.Popen(command, stdin=subprocess.PIPE, text=True)
if __name__ == "__main__":
if os.getuid() != 0:
- print("This script should be run as root: this is required by deboostrap", file=sys.stderr)
+ print(
+ "This script should be run as root: this is required by deboostrap",
+ file=sys.stderr,
+ )
sys.exit(1)
main()
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
+[[package]]
+name = "black"
+version = "24.10.0"
+description = "The uncompromising code formatter."
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"},
+ {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"},
+ {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"},
+ {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"},
+ {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"},
+ {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"},
+ {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"},
+ {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"},
+ {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"},
+ {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"},
+ {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"},
+ {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"},
+ {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"},
+ {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"},
+ {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"},
+ {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"},
+ {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"},
+ {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"},
+ {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"},
+ {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"},
+ {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"},
+ {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"},
+]
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.10)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
[[package]]
name = "certifi"
version = "2024.8.30"
{file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
]
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
[[package]]
name = "fasteners"
version = "0.19"
{file = "multi_key_dict-2.0.3.zip", hash = "sha256:3a1e1fc705a30a7de1a153ec2992b3ca3655ccd9225d2e427fe6525c8f160d6d"},
]
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
[[package]]
name = "packaging"
version = "24.2"
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
]
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
+]
+
[[package]]
name = "pbr"
version = "6.1.0"
{file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"},
]
+[[package]]
+name = "platformdirs"
+version = "4.3.6"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
+ {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
+]
+
+[package.extras]
+docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
+type = ["mypy (>=1.11.2)"]
+
[[package]]
name = "python-jenkins"
version = "1.8.2"
[metadata]
lock-version = "2.0"
python-versions = "^3.12"
-content-hash = "525caade07e8a85bdb9261c2e39d1027c9757d52f652898542ff9080446f69bb"
+content-hash = "4498a7b62020dadd3af79e5f3d729e79299e10c0077f53b3c1baf39c05316571"
python = "^3.12"
jenkins-job-builder = "^6.4.2"
+[tool.poetry.group.dev.dependencies]
+black = "^24.10.0"
+
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
DEFAULT_BUCKET = "lava"
invalid_commits = {
- "ec9a9794af488a9accce7708a8b0d8188b498789", # Does not build
- "8c99128c640cbce71fb8a6caa15e4c672252b662", # Block on configure
- "f3847c753f1b4f12353c38d97b0577d9993d19fb", # Does not build
- "e0111295f17ddfcc33ec771a8deac505473a06ad", # Does not build
- "d0d4e0ed487ea23aaf0d023513c0a4d86901b79b", # Does not build
- "c24f7ab4dd9edeb5e50b0070fd9d9e8691057dde", # Does not build
- "ce67f5614a4db3b2de4d887eca52135b439b4937", # Does not build
- "80aff5efc66679fd934cef433c0e698694748385", # Does not build
- "f4f11e84942d36fcc8a597d226928bce2ccac4b3", # Does not build
- "ae466a6e1b856d96cf5112a371b4df2b732503ec", # Does not build
- "ade5c95e2a4f90f839f222fc1a66175b3b199922", # Configuration fails
- "30341532906d62808e9d66fb115f5edb4e6f5706", # Configuration fails
- "006c5ffb42f32e802136e3c27a63accb59b4d6c4", # Does not build
- "88488ff5bdcd7679ff1f04fe6cff0d24b4f8fc0c", # Does not build
+ "ec9a9794af488a9accce7708a8b0d8188b498789", # Does not build
+ "8c99128c640cbce71fb8a6caa15e4c672252b662", # Block on configure
+ "f3847c753f1b4f12353c38d97b0577d9993d19fb", # Does not build
+ "e0111295f17ddfcc33ec771a8deac505473a06ad", # Does not build
+ "d0d4e0ed487ea23aaf0d023513c0a4d86901b79b", # Does not build
+ "c24f7ab4dd9edeb5e50b0070fd9d9e8691057dde", # Does not build
+ "ce67f5614a4db3b2de4d887eca52135b439b4937", # Does not build
+ "80aff5efc66679fd934cef433c0e698694748385", # Does not build
+ "f4f11e84942d36fcc8a597d226928bce2ccac4b3", # Does not build
+ "ae466a6e1b856d96cf5112a371b4df2b732503ec", # Does not build
+ "ade5c95e2a4f90f839f222fc1a66175b3b199922", # Configuration fails
+ "30341532906d62808e9d66fb115f5edb4e6f5706", # Configuration fails
+ "006c5ffb42f32e802136e3c27a63accb59b4d6c4", # Does not build
+ "88488ff5bdcd7679ff1f04fe6cff0d24b4f8fc0c", # Does not build
# Other errors
"7c7301d5827bd10ec7c34da7ffc5fe74e5047d38",
"a0df3abf88616cb0799f87f4eb57c54268e63448",
"f4f8f79893b18199b38edc3330093a9403c4c737",
}
+
def json_type(string):
"""
Argpase type for json args.
raise argparse.ArgumentTypeError(msg)
return passed_json
+
def graph_get_color(branch):
"""
Get the color matching the branch.
client = get_client()
commits_to_test = set()
for branch, cutoff in branches.items():
- commits = [x for x in get_git_log(branch, cutoff, git_path) if x not in invalid_commits]
+ commits = [
+ x for x in get_git_log(branch, cutoff, git_path) if x not in invalid_commits
+ ]
with tempfile.TemporaryDirectory() as workdir:
for commit in commits:
b_results = get_benchmark_results(client, commit, workdir)[0]
continue
commits_to_test.add(commit)
for index, commit in enumerate(commits_to_test):
- print("Job {}/{}".format(index+1, len(commits_to_test)))
- lava_submit.submit(
- commit, wait_for_completion=wait_for_completion, debug=debug
- )
+ print("Job {}/{}".format(index + 1, len(commits_to_test)))
+ lava_submit.submit(commit, wait_for_completion=wait_for_completion, debug=debug)
def main():
help="A dictionary of the form {"
"'branch_name': 'commit_hash_cutoff',...}. Allow custom graphing and"
"jobs generation.",
- required=False, type=json_type
+ required=False,
+ type=json_type,
)
args = parser.parse_args()
commit, debug=False, kernel_commit=DEFAULT_KERNEL_COMMIT, wait_for_completion=True
):
nfsrootfs = "https://obj.internal.efficios.com/lava/rootfs/rootfs_amd64_xenial_2018-12-05.tar.gz"
- nfsrootfs_sha256 = "0df15933ed18eb73ed5f0e7b1eca8d032ee88d92e5dbfc0f56dcc68c821048a8"
- kernel_url = "https://obj.internal.efficios.com/lava/kernel/{}.baremetal.bzImage".format(
- kernel_commit
+ nfsrootfs_sha256 = (
+ "0df15933ed18eb73ed5f0e7b1eca8d032ee88d92e5dbfc0f56dcc68c821048a8"
+ )
+ kernel_url = (
+ "https://obj.internal.efficios.com/lava/kernel/{}.baremetal.bzImage".format(
+ kernel_commit
+ )
)
modules_url = "https://obj.internal.efficios.com/lava/modules/linux/{}.baremetal.linux.modules.tar.gz".format(
kernel_commit
print("Lava jobid:{}".format(jobid), flush=True)
print(
- "Lava job URL: https://{}/scheduler/job/{}".format(
- HOSTNAME, jobid
- ),
+ "Lava job URL: https://{}/scheduler/job/{}".format(HOSTNAME, jobid),
flush=True,
)
import json
from collections import defaultdict
+
def wall_clock_parser(value):
"""
Parse /usr/bin/time wall clock value.
}
-
def parse(path, results):
"""
Parser and accumulator for /usr/bin/time results.
import yaml
from jinja2 import Environment, FileSystemLoader
-USERNAME = 'lava-jenkins'
-HOSTNAME = os.environ.get('LAVA_HOST', 'lava-master-03.internal.efficios.com')
-PROTO = os.environ.get('LAVA_PROTO', 'https')
+USERNAME = "lava-jenkins"
+HOSTNAME = os.environ.get("LAVA_HOST", "lava-master-03.internal.efficios.com")
+PROTO = os.environ.get("LAVA_PROTO", "https")
OBJSTORE_URL = "https://obj.internal.efficios.com/lava/results/"
+
def parse_stable_version(stable_version_string):
# Get the major and minor version numbers from the lttng version string.
- version_match = re.search('stable-(\d).(\d\d)', stable_version_string)
+ version_match = re.search("stable-(\d).(\d\d)", stable_version_string)
if version_match is not None:
major_version = int(version_match.group(1))
class TestType:
- """ Enum like for test type """
+ """Enum like for test type"""
baremetal_tests = 1
kvm_tests = 2
values = {
- 'baremetal-tests': baremetal_tests,
- 'kvm-tests': kvm_tests,
+ "baremetal-tests": baremetal_tests,
+ "kvm-tests": kvm_tests,
}
class DeviceType:
- """ Enum like for device type """
+ """Enum like for device type"""
- x86 = 'x86'
- kvm = 'qemu'
- values = {'kvm': kvm, 'x86': x86}
+ x86 = "x86"
+ kvm = "qemu"
+ values = {"kvm": kvm, "x86": x86}
def get_job_bundle_content(server, job):
try:
- bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
+ bundle_sha = server.scheduler.job_status(str(job))["bundle_sha1"]
bundle = server.dashboard.get(bundle_sha)
except xmlrpc.client.Fault as error:
- print('Error while fetching results bundle', error.faultString)
+ print("Error while fetching results bundle", error.faultString)
raise error
- return json.loads(bundle['content'])
+ return json.loads(bundle["content"])
def check_job_all_test_cases_state_count(server, job):
passed_tests = 0
failed_tests = 0
for testcase in testcases:
- if testcase['result'] != 'pass':
+ if testcase["result"] != "pass":
print(
"\tFAILED {}\n\t\t See {}://{}{}".format(
- testcase['name'], PROTO, HOSTNAME, testcase['url']
+ testcase["name"], PROTO, HOSTNAME, testcase["url"]
)
)
failed_tests += 1
Parse the attachment of the testcase to fetch the stdout of the test suite
"""
job_finished, log = server.scheduler.jobs.logs(str(job))
- logs = yaml.load(log.data.decode('ascii'), Loader=yaml.Loader)
+ logs = yaml.load(log.data.decode("ascii"), Loader=yaml.Loader)
print_line = False
for line in logs:
- if line['lvl'] != 'target':
+ if line["lvl"] != "target":
continue
- if line['msg'] == '<LAVA_SIGNAL_STARTTC run-tests>':
- print('---- TEST SUITE OUTPUT BEGIN ----')
+ if line["msg"] == "<LAVA_SIGNAL_STARTTC run-tests>":
+ print("---- TEST SUITE OUTPUT BEGIN ----")
print_line = True
continue
- if line['msg'] == '<LAVA_SIGNAL_ENDTC run-tests>':
- print('----- TEST SUITE OUTPUT END -----')
+ if line["msg"] == "<LAVA_SIGNAL_ENDTC run-tests>":
+ print("----- TEST SUITE OUTPUT END -----")
print_line = False
continue
if print_line:
- print("{} {}".format(line['dt'], line['msg']))
+ print("{} {}".format(line["dt"], line["msg"]))
def get_vlttng_cmd(
- lttng_version, lttng_tools_url, lttng_tools_commit, lttng_ust_url=None, lttng_ust_commit=None
+ lttng_version,
+ lttng_tools_url,
+ lttng_tools_commit,
+ lttng_ust_url=None,
+ lttng_ust_commit=None,
):
"""
Return vlttng cmd to be used in the job template for setup.
major_version, minor_version = parse_stable_version(lttng_version)
urcu_profile = ""
- if lttng_version == 'master' or (major_version >= 2 and minor_version >= 11):
+ if lttng_version == "master" or (major_version >= 2 and minor_version >= 11):
urcu_profile = "urcu-master"
else:
urcu_profile = "urcu-stable-0.12"
# Starting with 2.14, babeltrace2 is the reader for testing.
- if lttng_version == 'master' or (major_version >= 2 and minor_version >= 14):
- babeltrace_profile = " --profile babeltrace2-stable-2.0 --profile babeltrace2-python"
+ if lttng_version == "master" or (major_version >= 2 and minor_version >= 14):
+ babeltrace_profile = (
+ " --profile babeltrace2-stable-2.0 --profile babeltrace2-python"
+ )
babeltrace_overrides = " --override projects.babeltrace2.build-env.PYTHON=python3 --override projects.babeltrace2.build-env.PYTHON_CONFIG=python3-config -o projects.babeltrace2.configure+=--disable-man-pages"
else:
- babeltrace_profile = " --profile babeltrace-stable-1.5 --profile babeltrace-python"
+ babeltrace_profile = (
+ " --profile babeltrace-stable-1.5 --profile babeltrace-python"
+ )
babeltrace_overrides = " --override projects.babeltrace.build-env.PYTHON=python3 --override projects.babeltrace.build-env.PYTHON_CONFIG=python3-config"
-
vlttng_cmd = (
- 'vlttng --jobs=$(nproc) --profile ' + urcu_profile
+ "vlttng --jobs=$(nproc) --profile "
+ + urcu_profile
+ babeltrace_profile
+ babeltrace_overrides
- + ' --profile lttng-tools-master'
- ' --override projects.lttng-tools.source='
+ + " --profile lttng-tools-master"
+ " --override projects.lttng-tools.source="
+ lttng_tools_url
- + ' --override projects.lttng-tools.checkout='
+ + " --override projects.lttng-tools.checkout="
+ lttng_tools_commit
- + ' --profile lttng-tools-no-man-pages'
+ + " --profile lttng-tools-no-man-pages"
)
if lttng_ust_commit is not None:
vlttng_cmd += (
- ' --profile lttng-ust-master '
- ' --override projects.lttng-ust.source='
+ " --profile lttng-ust-master "
+ " --override projects.lttng-ust.source="
+ lttng_ust_url
- + ' --override projects.lttng-ust.checkout='
+ + " --override projects.lttng-ust.checkout="
+ lttng_ust_commit
- + ' --profile lttng-ust-no-man-pages'
+ + " --profile lttng-ust-no-man-pages"
)
-
- if lttng_version == 'master' or (major_version >= 2 and minor_version >= 11):
+ if lttng_version == "master" or (major_version >= 2 and minor_version >= 11):
vlttng_cmd += (
- ' --override projects.lttng-tools.configure+=--enable-test-sdt-uprobe'
+ " --override projects.lttng-tools.configure+=--enable-test-sdt-uprobe"
)
- vlttng_path = '/tmp/virtenv'
+ vlttng_path = "/tmp/virtenv"
- vlttng_cmd += ' ' + vlttng_path
+ vlttng_cmd += " " + vlttng_path
return vlttng_cmd
def main():
send_retry_limit = 10
test_type = None
- parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
- parser.add_argument('-t', '--type', required=True)
- parser.add_argument('-lv', '--lttng-version', required=True)
- parser.add_argument('-j', '--jobname', required=True)
- parser.add_argument('-k', '--kernel', required=True)
- parser.add_argument('-lm', '--lmodule', required=True)
- parser.add_argument('-tu', '--tools-url', required=True)
- parser.add_argument('-tc', '--tools-commit', required=True)
- parser.add_argument('-id', '--build-id', required=True)
- parser.add_argument('-uu', '--ust-url', required=False)
- parser.add_argument('-uc', '--ust-commit', required=False)
- parser.add_argument('-d', '--debug', required=False, action='store_true')
+ parser = argparse.ArgumentParser(description="Launch baremetal test using Lava")
+ parser.add_argument("-t", "--type", required=True)
+ parser.add_argument("-lv", "--lttng-version", required=True)
+ parser.add_argument("-j", "--jobname", required=True)
+ parser.add_argument("-k", "--kernel", required=True)
+ parser.add_argument("-lm", "--lmodule", required=True)
+ parser.add_argument("-tu", "--tools-url", required=True)
+ parser.add_argument("-tc", "--tools-commit", required=True)
+ parser.add_argument("-id", "--build-id", required=True)
+ parser.add_argument("-uu", "--ust-url", required=False)
+ parser.add_argument("-uc", "--ust-commit", required=False)
+ parser.add_argument("-d", "--debug", required=False, action="store_true")
+ parser.add_argument(
+ "-r",
+ "--rootfs-url",
+ required=False,
+ default="https://obj.internal.efficios.com/lava/rootfs_amd64_bookworm_2024-01-15.tar.gz",
+ )
parser.add_argument(
- '-r', '--rootfs-url', required=False,
- default="https://obj.internal.efficios.com/lava/rootfs_amd64_bookworm_2024-01-15.tar.gz"
+ "--ci-repo", required=False, default="https://github.com/lttng/lttng-ci.git"
)
- parser.add_argument('--ci-repo', required=False, default='https://github.com/lttng/lttng-ci.git')
- parser.add_argument('--ci-branch', required=False, default='master')
+ parser.add_argument("--ci-branch", required=False, default="master")
args = parser.parse_args()
if args.type not in TestType.values:
- print('argument -t/--type {} unrecognized.'.format(args.type))
- print('Possible values are:')
+ print("argument -t/--type {} unrecognized.".format(args.type))
+ print("Possible values are:")
for k in TestType.values:
- print('\t {}'.format(k))
+ print("\t {}".format(k))
return -1
lava_api_key = None
if not args.debug:
try:
- lava_api_key = os.environ['LAVA2_JENKINS_TOKEN']
+ lava_api_key = os.environ["LAVA2_JENKINS_TOKEN"]
except Exception as error:
print(
- 'LAVA2_JENKINS_TOKEN not found in the environment variable. Exiting...',
+ "LAVA2_JENKINS_TOKEN not found in the environment variable. Exiting...",
error,
)
return -1
jinja_loader = FileSystemLoader(os.path.dirname(os.path.realpath(__file__)))
jinja_env = Environment(loader=jinja_loader, trim_blocks=True, lstrip_blocks=True)
- jinja_template = jinja_env.get_template('template_lava_job.jinja2')
+ jinja_template = jinja_env.get_template("template_lava_job.jinja2")
test_type = TestType.values[args.type]
else:
device_type = DeviceType.kvm
- vlttng_path = '/tmp/virtenv'
+ vlttng_path = "/tmp/virtenv"
vlttng_cmd = get_vlttng_cmd(
- args.lttng_version, args.tools_url, args.tools_commit, args.ust_url, args.ust_commit
+ args.lttng_version,
+ args.tools_url,
+ args.tools_commit,
+ args.ust_url,
+ args.ust_commit,
)
if args.lttng_version == "master":
major, minor = parse_stable_version(args.lttng_version)
lttng_version_string = str(major) + "." + str(minor)
-
context = dict()
- context['DeviceType'] = DeviceType
- context['TestType'] = TestType
+ context["DeviceType"] = DeviceType
+ context["TestType"] = TestType
- context['job_name'] = args.jobname
- context['test_type'] = test_type
- context['random_seed'] = random.randint(0, 1000000)
- context['device_type'] = device_type
+ context["job_name"] = args.jobname
+ context["test_type"] = test_type
+ context["random_seed"] = random.randint(0, 1000000)
+ context["device_type"] = device_type
- context['vlttng_cmd'] = vlttng_cmd
- context['vlttng_path'] = vlttng_path
- context['lttng_version_string'] = lttng_version_string
+ context["vlttng_cmd"] = vlttng_cmd
+ context["vlttng_path"] = vlttng_path
+ context["lttng_version_string"] = lttng_version_string
- context['kernel_url'] = args.kernel
- context['nfsrootfs_url'] = args.rootfs_url
- context['lttng_modules_url'] = args.lmodule
- context['jenkins_build_id'] = args.build_id
+ context["kernel_url"] = args.kernel
+ context["nfsrootfs_url"] = args.rootfs_url
+ context["lttng_modules_url"] = args.lmodule
+ context["jenkins_build_id"] = args.build_id
- context['kprobe_round_nb'] = 10
+ context["kprobe_round_nb"] = 10
- context['ci_repo'] = args.ci_repo
- context['ci_branch'] = args.ci_branch
+ context["ci_repo"] = args.ci_repo
+ context["ci_branch"] = args.ci_branch
render = jinja_template.render(context)
- print('Job to be submitted:')
+ print("Job to be submitted:")
print(render)
return 0
server = xmlrpc.client.ServerProxy(
- '%s://%s:%s@%s/RPC2' % (PROTO, USERNAME, lava_api_key, HOSTNAME)
+ "%s://%s:%s@%s/RPC2" % (PROTO, USERNAME, lava_api_key, HOSTNAME)
)
for attempt in range(1, send_retry_limit + 1):
jobid = server.scheduler.submit_job(render)
except xmlrpc.client.ProtocolError as error:
print(
- 'Protocol error on submit, sleeping and retrying. Attempt #{}'.format(
+ "Protocol error on submit, sleeping and retrying. Attempt #{}".format(
attempt
)
)
break
# Early exit when the maximum number of retry is reached.
if attempt == send_retry_limit:
- print(
- 'Protocol error on submit, maximum number of retry reached ({})'.format(
- attempt
- )
+ print(
+ "Protocol error on submit, maximum number of retry reached ({})".format(
+ attempt
)
- return -1
-
- print('Lava jobid:{}'.format(jobid))
- print(
- 'Lava job URL: {}://{}/scheduler/job/{}'.format(
- PROTO, HOSTNAME, jobid
)
- )
+ return -1
+
+ print("Lava jobid:{}".format(jobid))
+ print("Lava job URL: {}://{}/scheduler/job/{}".format(PROTO, HOSTNAME, jobid))
# Check the status of the job every 30 seconds
- jobstatus = server.scheduler.job_state(jobid)['job_state']
+ jobstatus = server.scheduler.job_state(jobid)["job_state"]
running = False
- while jobstatus in ['Submitted', 'Scheduling', 'Scheduled', 'Running']:
- if not running and jobstatus == 'Running':
- print('Job started running')
+ while jobstatus in ["Submitted", "Scheduling", "Scheduled", "Running"]:
+ if not running and jobstatus == "Running":
+ print("Job started running")
running = True
time.sleep(30)
try:
- jobstatus = server.scheduler.job_state(jobid)['job_state']
+ jobstatus = server.scheduler.job_state(jobid)["job_state"]
except xmlrpc.client.ProtocolError as error:
- print('Protocol error, retrying')
+ print("Protocol error, retrying")
continue
- print('Job ended with {} status.'.format(jobstatus))
+ print("Job ended with {} status.".format(jobstatus))
- if jobstatus != 'Finished':
+ if jobstatus != "Finished":
return -1
if test_type is TestType.kvm_tests or test_type is TestType.baremetal_tests:
print_test_output(server, jobid)
passed, failed = check_job_all_test_cases_state_count(server, jobid)
- print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
+ print("With {} passed and {} failed Lava test cases.".format(passed, failed))
if failed != 0:
return -1
import pandas as pd
import sys
+
def test_case(df):
# Duration is in usec
# usecPecIter = Duration/(average number of iteration per thread)
- df['usecperiter'] = (df['nbthreads'] * df['duration']) / df['nbiter']
-
- periter_mean = pd.DataFrame({'periter_mean' :
- df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['usecperiter'].mean()}).reset_index()
-
- periter_stdev = pd.DataFrame({'periter_stdev' :
- df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['usecperiter'].std()}).reset_index()
-
- nbiter_mean = pd.DataFrame({'nbiter_mean' :
- df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nbiter'].mean()}).reset_index()
-
- nbiter_stdev = pd.DataFrame({'nbiter_stdev' :
- df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nbiter'].std()}).reset_index()
-
- duration_mean = pd.DataFrame({'duration_mean' :
- df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['duration'].mean()}).reset_index()
-
- duration_stdev = pd.DataFrame({'duration_stdev' :
- df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['duration'].std()}).reset_index()
+ df["usecperiter"] = (df["nbthreads"] * df["duration"]) / df["nbiter"]
+
+ periter_mean = pd.DataFrame(
+ {
+ "periter_mean": df.groupby(
+ ["nbthreads", "tracer", "testcase", "sleeptime"]
+ )["usecperiter"].mean()
+ }
+ ).reset_index()
+
+ periter_stdev = pd.DataFrame(
+ {
+ "periter_stdev": df.groupby(
+ ["nbthreads", "tracer", "testcase", "sleeptime"]
+ )["usecperiter"].std()
+ }
+ ).reset_index()
+
+ nbiter_mean = pd.DataFrame(
+ {
+ "nbiter_mean": df.groupby(["nbthreads", "tracer", "testcase", "sleeptime"])[
+ "nbiter"
+ ].mean()
+ }
+ ).reset_index()
+
+ nbiter_stdev = pd.DataFrame(
+ {
+ "nbiter_stdev": df.groupby(
+ ["nbthreads", "tracer", "testcase", "sleeptime"]
+ )["nbiter"].std()
+ }
+ ).reset_index()
+
+ duration_mean = pd.DataFrame(
+ {
+ "duration_mean": df.groupby(
+ ["nbthreads", "tracer", "testcase", "sleeptime"]
+ )["duration"].mean()
+ }
+ ).reset_index()
+
+ duration_stdev = pd.DataFrame(
+ {
+ "duration_stdev": df.groupby(
+ ["nbthreads", "tracer", "testcase", "sleeptime"]
+ )["duration"].std()
+ }
+ ).reset_index()
tmp = periter_mean.merge(periter_stdev)
# if there is any NaN or None value in the DF we raise an exeception
if tmp.isnull().values.any():
- raise Exception('NaN value found in dataframe')
+ raise Exception("NaN value found in dataframe")
for i, row in tmp.iterrows():
- testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'peritermean'])
- yield( {"name": testcase_name, "result": "pass", "units": "usec/iter",
- "measurement": str(row['periter_mean'])})
-
- testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'periterstdev'])
- yield( {"name": testcase_name, "result": "pass", "units": "usec/iter",
- "measurement": str(row['periter_stdev'])})
-
- testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'nbitermean'])
- yield( {"name": testcase_name, "result": "pass", "units": "iterations",
- "measurement": str(row['nbiter_mean'])})
+ testcase_name = "_".join(
+ [row["tracer"], str(row["nbthreads"]) + "thr", "peritermean"]
+ )
+ yield (
+ {
+ "name": testcase_name,
+ "result": "pass",
+ "units": "usec/iter",
+ "measurement": str(row["periter_mean"]),
+ }
+ )
+
+ testcase_name = "_".join(
+ [row["tracer"], str(row["nbthreads"]) + "thr", "periterstdev"]
+ )
+ yield (
+ {
+ "name": testcase_name,
+ "result": "pass",
+ "units": "usec/iter",
+ "measurement": str(row["periter_stdev"]),
+ }
+ )
+
+ testcase_name = "_".join(
+ [row["tracer"], str(row["nbthreads"]) + "thr", "nbitermean"]
+ )
+ yield (
+ {
+ "name": testcase_name,
+ "result": "pass",
+ "units": "iterations",
+ "measurement": str(row["nbiter_mean"]),
+ }
+ )
+
+ testcase_name = "_".join(
+ [row["tracer"], str(row["nbthreads"]) + "thr", "nbiterstdev"]
+ )
+ yield (
+ {
+ "name": testcase_name,
+ "result": "pass",
+ "units": "iterations",
+ "measurement": str(row["nbiter_stdev"]),
+ }
+ )
+
+ testcase_name = "_".join(
+ [row["tracer"], str(row["nbthreads"]) + "thr", "durationmean"]
+ )
+ yield (
+ {
+ "name": testcase_name,
+ "result": "pass",
+ "units": "usec",
+ "measurement": str(row["duration_mean"]),
+ }
+ )
+
+ testcase_name = "_".join(
+ [row["tracer"], str(row["nbthreads"]) + "thr", "durationstdev"]
+ )
+ yield (
+ {
+ "name": testcase_name,
+ "result": "pass",
+ "units": "usec",
+ "measurement": str(row["duration_stdev"]),
+ }
+ )
- testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'nbiterstdev'])
- yield( {"name": testcase_name, "result": "pass", "units": "iterations",
- "measurement": str(row['nbiter_stdev'])})
-
- testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'durationmean'])
- yield( {"name": testcase_name, "result": "pass", "units": "usec",
- "measurement": str(row['duration_mean'])})
-
- testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'durationstdev'])
- yield( {"name": testcase_name, "result": "pass", "units": "usec",
- "measurement": str(row['duration_stdev'])})
def main():
- results_file=sys.argv[1]
+ results_file = sys.argv[1]
df = pd.read_csv(results_file)
- results=defaultdict()
+ results = defaultdict()
data = test_case(df)
for res in data:
call(
- ['lava-test-case',
- res['name'],
- '--result', res['result'],
- '--measurement', res['measurement'],
- '--units', res['units']])
+ [
+ "lava-test-case",
+ res["name"],
+ "--result",
+ res["result"],
+ "--measurement",
+ res["measurement"],
+ "--units",
+ res["units"],
+ ]
+ )
# Save the results to write to the CSV file
- results[res['name']]=res['measurement']
+ results[res["name"]] = res["measurement"]
# Write the dictionnary to a csv file where each key is a column
- with open('processed_results.csv', 'w') as output_csv:
- dict_csv_write=csv.DictWriter(output_csv, results.keys())
+ with open("processed_results.csv", "w") as output_csv:
+ dict_csv_write = csv.DictWriter(output_csv, results.keys())
dict_csv_write.writeheader()
dict_csv_write.writerow(results)
-if __name__ == '__main__':
+
+if __name__ == "__main__":
main()