diff --git a/changelog/disks.feature b/changelog/disks.feature new file mode 100644 index 0000000..43fe204 --- /dev/null +++ b/changelog/disks.feature @@ -0,0 +1 @@ +Adding support for pulling disk information, usage both atomic and process wise. All measure are implemented for Linux. For QNX only disk info is implemneted. diff --git a/docs/source/conf.py b/docs/source/conf.py index da38ee9..4c728c5 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -19,7 +19,7 @@ sys.path.insert(0, os.path.abspath(".")) sys.path.insert(0, os.path.abspath(os.path.join("..", ".."))) -from src import __version__ # noqa: E402 +from remoteperf import __version__ # noqa: E402 # -- Project information ----------------------------------------------------- diff --git a/docs/source/tutorial.rst b/docs/source/tutorial.rst index 47c97d0..58ffb0b 100644 --- a/docs/source/tutorial.rst +++ b/docs/source/tutorial.rst @@ -22,8 +22,8 @@ To communicate with a remote host, a handler is needed to specify which operatin .. code-block:: python - from src.clients import SSHClient - from src.handlers import LinuxHandler + from remoteperf.clients import SSHClient + from remoteperf.handlers import LinuxHandler with SSHClient("127.0.0.1", port=22, username="root", password="root") as instance: handler = LinuxHandler(instance) @@ -110,8 +110,8 @@ The SSH client specifically also supports jump-posting: .. code-block:: python - from src.clients import SSHClient - from src.handlers import LinuxHandler + from remoteperf.clients import SSHClient + from remoteperf.handlers import LinuxHandler with SSHClient("127.0.0.1", port=22, username="root", password="root") as jump_client1: with SSHClient("host2", port=22, username="root", password="root", jump_client=jump_client1) as jump_client2: @@ -125,8 +125,8 @@ Example 2: Basic Android Usage: .. code-block:: python - from src.clients import ADBClient - from src.handlers import AndroidHandler + from remoteperf.clients import ADBClient + from remoteperf.handlers import AndroidHandler with ADBClient(device_id=...) as instance: handler = AndroidHandler(instance) @@ -147,8 +147,8 @@ Example 3: Basic QNX Usage: .. code-block:: python - from src.clients import SSHClient - from src.handlers import QNXHandler + from remoteperf.clients import SSHClient + from remoteperf.handlers import QNXHandler with SSHClient("127.0.0.1", port=22, username="root", password="root") as instance: handler = QNXHandler(instance) @@ -174,8 +174,8 @@ Example 4: Continuous background measurement: .. code-block:: python - from src.clients import SSHClient - from src.handlers import LinuxHandler + from remoteperf.clients import SSHClient + from remoteperf.handlers import LinuxHandler with SSHClient("127.0.0.1", port=22, username="root", password="root") as instance: handler = LinuxHandler(instance) @@ -211,8 +211,8 @@ Example 5: Processwise Resource Measurement: .. code-block:: python - from src.clients import SSHClient - from src.handlers import LinuxHandler + from remoteperf.clients import SSHClient + from remoteperf.handlers import LinuxHandler with SSHClient("127.0.0.1", port=22, username="root", password="root") as client: handler = LinuxHandler(client) @@ -240,8 +240,8 @@ Example 6: Continuous Processwise Resource Measurement: .. code-block:: python - from src.clients import SSHClient - from src.handlers import LinuxHandler + from remoteperf.clients import SSHClient + from remoteperf.handlers import LinuxHandler with SSHClient("127.0.0.1", port=22, username="root", password="root") as client: handler = LinuxHandler(client) @@ -261,3 +261,35 @@ Example result (for a docker container running only sshd): ProcessInfo(pid=2769 name='sshd' command='sshd: root@notty' start_time='2507319' samples=[LinuxResourceSample(timestamp=datetime.datetime(2024, 8, 29, 15, 55, 57, 465394), mem_usage=8116.0, cpu_load=0.0), LinuxResourceSample(timestamp=datetime.datetime(2024, 8, 29, 15, 55, 58, 760324), mem_usage=8116.0, cpu_load=0.08), LinuxResourceSample(timestamp=datetime.datetime(2024, 8, 29, 15, 55, 59, 459172), mem_usage=8116.0, cpu_load=0.0)]) ] + + +Example 7: Disk Info and IO: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: python + + from remoteperf.clients import SSHClient + from remoteperf.handlers import LinuxHandler + + with SSHClient("127.0.0.1", port=22, username="root", password="root") as client: + handler = LinuxHandler(client) + handler.get_diskinfo() + handler.get_diskio() + handler.get_diskio_proc_wise() + +The result is then a list of handler-specific disk information models. Info pulls the current available storage, usage pulls the current io on the disks and usage_proc_wise pulls pulls read and write access by each process. +All these measures can also be pulled continuously. + +.. code-block:: python + + handler.start_disk_info_measurement(0.1) + time.sleep(1) + result = handler.stop_disk_info_measurement() + + handler.start_diskio_measurement(0.1) + time.sleep(1) + result = handler.stop_diskio_measurement() + + handler.start_diskio_measurement_proc_wise(0.1) + time.sleep(1) + result = handler.stop_diskio_measurement_proc_wise() diff --git a/docs/source/tutorial_math.rst b/docs/source/tutorial_math.rst index 1390364..d15b21e 100644 --- a/docs/source/tutorial_math.rst +++ b/docs/source/tutorial_math.rst @@ -8,15 +8,15 @@ Remoteperf is not a mathematical library nor does it intend to be, so for more c large scale computations we advice you to export data first and and do them in a more suitable environment. However, it does support a few basic operations on all objects returned by all continuous measurement functions, and if you feel some might be missing you're more than -welcome to contribute by extending the existing list wrappers in ``src/models/super.py``. +welcome to contribute by extending the existing list wrappers in ``remoteperf/models/super.py``. Example 1: Highest average load/usage ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: python - from src.clients import ADBClient - from src.handlers import AndroidHandler + from remoteperf.clients import ADBClient + from remoteperf.handlers import AndroidHandler with ADBClient(device_id="emulator-5554") as instance: handler = AndroidHandler(instance) @@ -75,8 +75,8 @@ Example 2: Highest average priocess-wise load/usage .. code-block:: python - from src.clients import ADBClient - from src.handlers import AndroidHandler + from remoteperf.clients import ADBClient + from remoteperf.handlers import AndroidHandler with ADBClient(device_id="emulator-5554") as instance: handler = AndroidHandler(instance) @@ -240,8 +240,8 @@ Sorting, filtering, and querying can be combined in an advanced manner as such: .. code-block:: python - from src.clients import ADBClient - from src.handlers import AndroidHandler + from remoteperf.clients import ADBClient + from remoteperf.handlers import AndroidHandler with ADBClient(device_id="emulator-5554") as instance: handler = AndroidHandler(instance) diff --git a/docs/source/tutorial_remotefs.rst b/docs/source/tutorial_remotefs.rst index a8868e9..57ee964 100644 --- a/docs/source/tutorial_remotefs.rst +++ b/docs/source/tutorial_remotefs.rst @@ -14,8 +14,8 @@ To check wheteher a file exists, the following functions can be used: .. code-block:: python - from src.clients import SSHClient - from src.handlers import QNXHandler + from remoteperf.clients import SSHClient + from remoteperf.handlers import QNXHandler with SSHClient("127.0.0.1", port=22, username="root", password="root") as instance: handler = QNXHandler(instance, log_path="/tmp/core") @@ -30,8 +30,8 @@ In addition, it also allows for removal of files and directory: .. code-block:: python - from src.clients import SSHClient - from src.utils.fs_utils import RemoteFS + from remoteperf.clients import SSHClient + from remoteperf.utils.fs_utils import RemoteFS with SSHClient("127.0.0.1", port=22, username="root", password="root") as instance: fs = RemoteFS(instance) @@ -47,8 +47,8 @@ There is also support for remote temporary directories: .. code-block:: python - from src.clients import SSHClient - from src.handlers import LinuxHandler + from remoteperf.clients import SSHClient + from remoteperf.handlers import LinuxHandler with SSHClient("127.0.0.1", port=22, username="root", password="root") as instance: handler = LinuxHandler(instance) diff --git a/integration_tests/image/mocks/nicinfo b/integration_tests/image/mocks/nicinfo new file mode 100644 index 0000000..f03b233 --- /dev/null +++ b/integration_tests/image/mocks/nicinfo @@ -0,0 +1,30 @@ + +#!/bin/bash +nicinfo_output='eq0:\n + Ethernet Controller\n +\n + Physical Node ID ........................... 000000 000000\n + Current Physical Node ID ................... 000001 000001\n + Current Operation Rate ..................... 2.50 Mb/s full-duplex\n + Active Interface Type ...................... SMI\n + Active PHY address ....................... 0\n + Maximum Transmittable data Unit ............ 0\n + Maximum Receivable data Unit ............... 0\n + Promiscuous Mode ........................... Off\n + Multicast Support .......................... Enabled\n +\n + Packets Transmitted OK ..................... 9225590\n + Bytes Transmitted OK ....................... 1366702382\n + Broadcast Packets Transmitted OK ........... 19\n + Multicast Packets Transmitted OK ........... 8671\n +\n + Packets Received OK ........................ 10786584\n + Bytes Received OK .......................... 1198846300\n + Broadcast Packets Received OK .............. 7634\n + Multicast Packets Received OK .............. 80894\n +\n + Received packets with CRC errors ........... 0\n + Short packets .............................. 0\n +\n +vlan0:' +echo -e $nicinfo_output diff --git a/integration_tests/tests/conftest.py b/integration_tests/tests/conftest.py index d18d0e3..608afe1 100644 --- a/integration_tests/tests/conftest.py +++ b/integration_tests/tests/conftest.py @@ -7,8 +7,8 @@ import pytest -from src.clients.adb_client import ADBClient -from src.clients.ssh_client import SSHClient +from remoteperf.clients.adb_client import ADBClient +from remoteperf.clients.ssh_client import SSHClient def singleton(cls): diff --git a/integration_tests/tests/test_adb_client.py b/integration_tests/tests/test_adb_client.py index a1cbe1f..dbe0ca1 100644 --- a/integration_tests/tests/test_adb_client.py +++ b/integration_tests/tests/test_adb_client.py @@ -5,13 +5,13 @@ import pytest -from src.clients.adb_client import ADBClientException -from src.clients.base_client import BaseClientException -from src.handlers.android_handler import AndroidHandler -from src.handlers.qnx_handler import MissingQnxCapabilityException, QNXHandler -from src.models.base import BaseCpuSample, BaseMemorySample, SystemMemory -from src.models.linux import LinuxCpuUsageInfo, LinuxResourceSample -from src.models.super import ProcessInfo +from remoteperf.clients.adb_client import ADBClientException +from remoteperf.clients.base_client import BaseClientException +from remoteperf.handlers.android_handler import AndroidHandler +from remoteperf.handlers.qnx_handler import MissingQnxCapabilityException, QNXHandler +from remoteperf.models.base import BaseCpuSample, BaseMemorySample, SystemMemory +from remoteperf.models.linux import LinuxCpuUsageInfo, LinuxResourceSample +from remoteperf.models.super import ProcessInfo def kill_adb_server(): diff --git a/integration_tests/tests/test_fs_utils.py b/integration_tests/tests/test_fs_utils.py index fd7d91c..405f1f3 100644 --- a/integration_tests/tests/test_fs_utils.py +++ b/integration_tests/tests/test_fs_utils.py @@ -1,4 +1,4 @@ -from src.handlers.linux_handler import LinuxHandler +from remoteperf.handlers.linux_handler import LinuxHandler def test_remove_file(ssh_client, random_name): diff --git a/integration_tests/tests/test_ssh_client.py b/integration_tests/tests/test_ssh_client.py index fc5dc65..8e9f761 100644 --- a/integration_tests/tests/test_ssh_client.py +++ b/integration_tests/tests/test_ssh_client.py @@ -5,20 +5,19 @@ import pytest from conftest import override_client_config -from src.clients.base_client import BaseClientException -from src.clients.ssh_client import SSHClient, SSHClientException -from src.handlers.linux_handler import LinuxHandler -from src.models.base import ( +from remoteperf.clients.base_client import BaseClientException +from remoteperf.clients.ssh_client import SSHClient, SSHClientException +from remoteperf.handlers.linux_handler import LinuxHandler +from remoteperf.models.base import ( BaseCpuSample, BaseCpuUsageInfo, BaseMemorySample, BootTimeInfo, - Command, SystemMemory, SystemUptimeInfo, ) -from src.models.linux import LinuxCpuUsageInfo, LinuxResourceSample -from src.models.super import ProcessInfo +from remoteperf.models.linux import LinuxCpuUsageInfo, LinuxResourceSample +from remoteperf.models.super import DiskInfoList, DiskIOList, ProcessDiskIOList, ProcessInfo def close_transport(client: SSHClient): @@ -217,13 +216,13 @@ def test_ssh_pull_directory(ssh_client): def test_ssh_pull_dir(ssh_client): - path = f"/tmp" + path = "/tmp" with pytest.raises(BaseClientException): ssh_client.pull_file("/usr/bin", path) def test_ssh_pull_imaginary_dir(ssh_client): - path = f"/sdfgsdf/sfgdh" + path = "/sdfgsdf/sfgdh" with pytest.raises(BaseClientException): ssh_client.pull_file("/usr/bin/xz", path) @@ -334,3 +333,25 @@ def test_ssh_client_retry(ssh_client): with pytest.raises(SSHClientException): client.run_command("sleep 1.5") assert time.time() - t0 > 3 and time.time() - t0 < 7 + + +def test_diskinfo(ssh_client): + handler = LinuxHandler(ssh_client) + output = handler.get_diskinfo() + assert output + assert isinstance(output, DiskInfoList) + + +def test_diskio(ssh_client): + handler = LinuxHandler(ssh_client) + output = handler.get_diskio() + assert output + assert isinstance(output, DiskIOList) + + +def test_diskio_proc_wise(ssh_client): + handler = LinuxHandler(ssh_client) + output = handler.get_diskio_proc_wise() + assert output + assert isinstance(output, ProcessDiskIOList) + assert all((isinstance(model, ProcessInfo) for model in output)) diff --git a/noxfile.py b/noxfile.py index a956958..54d0bed 100644 --- a/noxfile.py +++ b/noxfile.py @@ -9,7 +9,7 @@ PROJECT_NAME = "remoteperf" -@nox.session(python="3.8") +@nox.session(python=["3.8", "3.10", "3.12"]) def lint(session): shutil.rmtree("tmp", ignore_errors=True) session.install("-U", "-r", "requirements/requirements.txt") @@ -17,7 +17,7 @@ def lint(session): run_lint(session) -@nox.session(python="3.8") +@nox.session(python=["3.8", "3.10", "3.12"]) def test(session): session.install("-U", "-r", "requirements/requirements.txt") session.install("-U", "-r", "requirements/requirements_test.txt") @@ -70,7 +70,7 @@ def integration_test(session): "pytest", *args, "--color=yes", - f"--cov=./src", + "--cov=./remoteperf", "--cov-report", "xml:tmp/report/coverage/xml/integration_report.xml", "--cov-report", @@ -126,9 +126,7 @@ def integration_test(session): def remove_docker_container(session, container): existing_container_command = f"docker ps -a | grep {container} || true" - existing_container = subprocess.check_output( - ["bash", "-c", existing_container_command] - ).decode() + existing_container = subprocess.check_output(["bash", "-c", existing_container_command]).decode() if existing_container and len(existing_container.split()[0]) == 12: session.run("docker", "container", "rm", "-f", container) @@ -173,7 +171,7 @@ def build_wheel_package(session, clean=False, install=False): def run_lint(session): shutil.rmtree("tmp", ignore_errors=True) session.log("Running black") - session.run("black", "--check", "src", "tests", silent=True) + session.run("black", "--check", "remoteperf", "tests", silent=True) session.log("Running pylint") session.run( @@ -181,14 +179,11 @@ def run_lint(session): "--output-format=colorized", "--reports=y", "--disable=W0511", # Don't fail on FIXMEs - f"./src", + "./remoteperf", ) - session.log("Running flake8") - session.run("pflake8", "src", "tests") - session.log("Running mypy") - session.run("mypy", f"./src", success_codes=[1, 0]) + session.run("mypy", "./remoteperf", success_codes=[1, 0]) def run_test(session): @@ -200,7 +195,7 @@ def run_test(session): "pytest", *session.posargs, "--color=yes", - f"--cov=./src", + "--cov=./remoteperf", "--cov-report", "xml:tmp/report/coverage/xml/report.xml", "--cov-report", @@ -224,7 +219,7 @@ def run_doc(session): "sphinx-apidoc", "-o", "docs/source/code", - "src", + "remoteperf", "-e", "--force", "-d", diff --git a/pyproject.toml b/pyproject.toml index c162799..5aa41d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,16 +20,16 @@ include-package-data = true remoteperf = ["**"] [tool.setuptools.packages.find] -include = ["src*"] +include = ["remoteperf*"] exclude = ["docs*", "tests*"] namespaces = false [tool.setuptools.dynamic] -version = { attr = "src.__version__" } +version = { attr = "remoteperf.__version__" } dependencies = { file = ["requirements/requirements.txt"] } [tool.flake8] -per-file-ignores = "tests/*.py:F401,F811,E302,W504,W503,N818 src/*.py:E203" +per-file-ignores = "tests/*.py:F401,F811,E302,W504,W503,N818 remoteperf/*.py:E203" max-line-length = 120 max-complexity = 12 diff --git a/remoteperf/__init__.py b/remoteperf/__init__.py new file mode 100644 index 0000000..63fecc5 --- /dev/null +++ b/remoteperf/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2024 Volvo Cars +# SPDX-License-Identifier: Apache-2.0 +from remoteperf.version import __date__, __version__ # noqa: F401 diff --git a/src/_parsers/__init__.py b/remoteperf/_parsers/__init__.py similarity index 100% rename from src/_parsers/__init__.py rename to remoteperf/_parsers/__init__.py diff --git a/remoteperf/_parsers/generic.py b/remoteperf/_parsers/generic.py new file mode 100644 index 0000000..a544aa1 --- /dev/null +++ b/remoteperf/_parsers/generic.py @@ -0,0 +1,125 @@ +# Copyright 2024 Volvo Cars +# SPDX-License-Identifier: Apache-2.0 +import re +from dataclasses import dataclass +from typing import Dict, Optional + + +class ParsingError(Exception): + pass + + +@dataclass +class ParsingInfo: + regex: str + parse: callable + key: Optional[int] = ( + None # The index of the parsed value that should be used as the key in the resulting dictionary. + ) + # It can still be used to define multiple keys, that will then return them as a concatenated string. + # But this at the moment breaks to many uses down the line. To Do for maybe later. + rename: Optional[str] = None + + +def parse_table(raw_table_data: str, categories: Dict[str, ParsingInfo], required=tuple(), header: bool = True) -> dict: + if header: + _, lines, regex_parsers = get_table_header(raw_table_data, categories) + else: + lines = raw_table_data.split("\n") + regex_parsers = {} + for category in categories: + regex_parsers[categories[category].rename if categories[category].rename else category] = categories[ + category + ] + + result = parse_table_lines(lines, regex_parsers) + + if not result: + raise ParsingError(f"Failed to parse table: {raw_table_data}") + sample = list(result.values())[0] + if not all(req in sample for req in required): + raise ParsingError(f"Failed to parse all required categories ({required}) from table: {raw_table_data}") + return result + + +def get_table_header(raw_table_data: str, categories: Dict[str, ParsingInfo]) -> str: + replacements = {c: categories[c].rename for c in categories if categories[c].rename} + try: + header, *lines = raw_table_data.split("\n") + for orig, repl in replacements.items(): + header = header.replace(orig, repl) + categories[repl] = categories.pop(orig) + + while any(h not in categories for h in header.split()) and lines: + header, *lines = lines + for orig, repl in replacements.items(): + header = header.replace(orig, repl) + + regex_parsers = {} + for category in header.split(): + if category not in categories: + raise ParsingError(f"Error, unknown parse header: {category} in input: {raw_table_data}") + regex_parsers[categories[category].rename if categories[category].rename else category] = categories[ + category + ] + except Exception as e: + raise ParsingError(f"Failed to parse categories ({categories})") from e + + return header, lines, regex_parsers + + +def parse_table_lines(lines: str, regex_parsers: Dict[str, ParsingInfo]) -> dict: + regex = re.compile(r"\s+".join([parsing_info.regex for parsing_info in regex_parsers.values()])) + result = {} + for line in lines: + if match := re.search(regex, line): + parsed_key = { + parsing_info.key: parsing_info.parse(value) + for (_, parsing_info), value in zip(regex_parsers.items(), match.groups()) + if parsing_info.key + } + if not parsed_key: + raise ValueError(f"Failed to specify a key for the parsed data: {regex_parsers}") + + # Other part of using multiple column as keys. See aboves ToDo. + # result[".".join(str(i) for _, i in sorted(parsed_key.items()))] = { + result[parsed_key[1]] = { + category: parsing_info.parse(value) + for (category, parsing_info), value in zip(regex_parsers.items(), match.groups()) + } + + return result + + +def convert_compact_format_to_seconds(time_str: str) -> float: + """ + Converts a time string into seconds. + + Args: + time_str (str): A string representing time in the format 'D-HH:MM:SS.SSS'. + + Returns: + float: The total number of seconds represented by the input time string. + + """ + regex_pattern = r"(\d*\.?\d+)([d|h|m|s])?" + + total_seconds: float = 0 + for match in re.finditer(regex_pattern, time_str): + value, unit = match.groups() + if unit: + multiplier = {"d": 86400, "h": 3600, "m": 60, "s": 1}[unit] + total_seconds += float(value) * multiplier + else: + total_seconds += float(value) + + return total_seconds + + +def convert_to_int(value): + try: + if isinstance(value, str): + value = "".join([d for d in "value" if d.isnumeric()]) + return int(value) + except ValueError: + return 0 diff --git a/src/_parsers/linux.py b/remoteperf/_parsers/linux.py similarity index 70% rename from src/_parsers/linux.py rename to remoteperf/_parsers/linux.py index ca48e54..ee4086d 100644 --- a/src/_parsers/linux.py +++ b/remoteperf/_parsers/linux.py @@ -6,10 +6,10 @@ from attrs import fields -from src._parsers.generic import ParsingError -from src.models.base import Process -from src.models.linux import LinuxCpuModeUsageInfo -from src.utils.math import Vector +from remoteperf._parsers.generic import ParsingError, ParsingInfo, parse_table +from remoteperf.models.base import Process +from remoteperf.models.linux import LinuxCpuModeUsageInfo +from remoteperf.utils.math import Vector def parse_proc_stat(raw_cpu_usage_1: str, raw_cpu_usage_2: str, timestamp: Optional[datetime] = None) -> dict: @@ -159,6 +159,16 @@ def parse_mem_usage_from_proc_files( return result +def parse_disk_usage_from_proc_files( + raw_process_disk_usage: str, separator_pattern: str, timestamp: Optional[datetime] = None +) -> Dict[Process, dict]: + timestamp = timestamp or datetime.now() + raw_stat_list = raw_process_disk_usage.split(f"/bin/cat: {separator_pattern}")[1:] + if not raw_stat_list: + raise ParsingError(f"Could not separate processes (delimiter:{separator_pattern}): {raw_process_disk_usage}") + return parse_io_from_proc_files(raw_stat_list) + + def parse_times_from_proc_files(proc_metrics, page_size) -> Dict[Process, int]: proc_times = {} regex_components = [r"(?P\d+)", r"\((?P\S+)\)", r"\S", *([r"(-?\d+)"] * 49)] @@ -175,6 +185,26 @@ def parse_times_from_proc_files(proc_metrics, page_size) -> Dict[Process, int]: return proc_times +def parse_io_from_proc_files(proc_metrics) -> Dict[Process, int]: + proc_times = {} + regex_components = [r"(?P\d+)", r"\((?P\S+)\)", r"\S", *([r"(-?\d+)"] * 49)] + io_regex = r"".join([r"\n(\w+:\s+\d+)"] * 7) + proc_regex = r"\s+".join(regex_components) + io_regex + r"\n(?P.*)" + + for proc in proc_metrics: + if match := re.search(proc_regex, proc): + proc = Process( + pid=int(match.group("pid")), + name=match.group("name"), + start_time=match.group(21), + command=match.group("cmdline") or match.group("name"), + ) + proc_times[proc] = { + str(match.group(i).split(": ")[0]): int(match.group(i).split(": ")[1]) for i in range(52, 59) + } + return proc_times + + def parse_systemd_analyze(analyze_string: str) -> dict: """Parse the systemd-analyze output into a dictionary with all times in seconds.""" @@ -207,3 +237,43 @@ def parse_systemd_analyze(analyze_string: str) -> dict: results["extra"]["graphical.target"] = float(target_time.group(1)) return results + + +def parse_proc_diskio(raw_data: str) -> dict: + categories = { + "device_major_number": ParsingInfo(r"(\d+)", int), + "device_minor_number": ParsingInfo(r"(\d+)", int), + "device_name": ParsingInfo(r"(\w+)", str, key=1), + "reads_completed": ParsingInfo(r"(\d+)", int), + "reads_merged": ParsingInfo(r"(\d+)", int), + "sectors_reads": ParsingInfo(r"(\d+)", int), + "time_spent_reading": ParsingInfo(r"(\d+)", int), + "writes_completed": ParsingInfo(r"(\d+)", int), + "writes_merged": ParsingInfo(r"(\d+)", int), + "sectors_written": ParsingInfo(r"(\d+)", int), + "time_spent_writing": ParsingInfo(r"(\d+)", int), + "IOs_currently_in_progress": ParsingInfo(r"(\d+)", int), + "time_spent_doing_io": ParsingInfo(r"(\d+)", int), + "weighted_time_spent_doing_io": ParsingInfo(r"(\d+)", int), + "discards_completed": ParsingInfo(r"(\d+)", int), + "discards_merged": ParsingInfo(r"(\d+)", int), + "sectors_discarded": ParsingInfo(r"(\d+)", int), + "time_spent_discarding": ParsingInfo(r"(\d+)", int), + "flushes_completed": ParsingInfo(r"(\d+)", int), + "time_spent_flushing": ParsingInfo(r"(\d+)", int), + } + return parse_table(raw_data, categories, header=False) + + +def parse_df(raw_data: str) -> dict: + catergories = { + "Filesystem": ParsingInfo(r"(\S+)", str, key=1, rename="filesystem"), + "1K-blocks": ParsingInfo(r"(\d+)", int, rename="size"), + "Used": ParsingInfo(r"(\d+)", int, rename="used"), + "Available": ParsingInfo(r"(\d+)", int, rename="available"), + "Use%": ParsingInfo(r"(\d+)%", int, rename="used_percent"), + "Mounted on": ParsingInfo(r"(\S+)", str, rename="mounted_on"), + } + return parse_table( + raw_data, catergories, required=("filesystem", "size", "used", "available", "used_percent", "mounted_on") + ) diff --git a/src/_parsers/qnx.py b/remoteperf/_parsers/qnx.py similarity index 87% rename from src/_parsers/qnx.py rename to remoteperf/_parsers/qnx.py index 18ae606..2799ecf 100644 --- a/src/_parsers/qnx.py +++ b/remoteperf/_parsers/qnx.py @@ -6,14 +6,12 @@ import regex as re -from src._parsers import generic -from src._parsers.generic import ParsingError, ParsingInfo -from src.models.base import Process +from remoteperf._parsers import generic +from remoteperf._parsers.generic import ParsingError, ParsingInfo +from remoteperf.models.base import Process -def parse_hogs_cpu_usage( - hogs_output: str, timestamp: Optional[datetime] = None -) -> dict: +def parse_hogs_cpu_usage(hogs_output: str, timestamp: Optional[datetime] = None) -> dict: """ Parses CPU usage data from the output of the 'hogs' command and returns a summary of the CPU load and per-core load. @@ -61,9 +59,7 @@ def parse_hogs_cpu_usage( return result -def parse_hogs_pidin_proc_wise( - raw_cpu_data: str, timestamp: Optional[datetime] = None -) -> Dict[Process, dict]: +def parse_hogs_pidin_proc_wise(raw_cpu_data: str, timestamp: Optional[datetime] = None) -> Dict[Process, dict]: """ Parses CPU usage data and process information from a concatenated string of 'hogs' and `pidin` outputs. @@ -104,18 +100,13 @@ def parse_hogs_pidin_proc_wise( """ timestamp = timestamp or datetime.now() parsed_hogs_data = parse_hogs(raw_cpu_data, ("SYS", "PID", "NAME")) - parsed_pidin_data = parse_pidin( - raw_cpu_data, ("pid", "name", "Arguments", "start_time") - ) + parsed_pidin_data = parse_pidin(raw_cpu_data, ("pid", "name", "Arguments", "start_time")) result = {} for pid, process in parsed_pidin_data.items(): hogs_process = parsed_hogs_data.get(pid) - if hogs_process and ( - hogs_process["NAME"] in process["name"] - or hogs_process["NAME"] in process["Arguments"] - ): + if hogs_process and (hogs_process["NAME"] in process["name"] or hogs_process["NAME"] in process["Arguments"]): p = Process( pid=process["pid"], name=process["name"], @@ -176,9 +167,7 @@ def parse_mem_usage_from_proc_files( """ timestamp = timestamp or datetime.now() - parsed_pidin = parse_pidin( - raw_process_memory_usage, required=("pid", "name", "Arguments", "start_time") - ) + parsed_pidin = parse_pidin(raw_process_memory_usage, required=("pid", "name", "Arguments", "start_time")) parsed_procfiles = parse_memory_per_pid(raw_process_memory_usage) result = {} for pid, process in parsed_pidin.items(): @@ -251,25 +240,14 @@ def parse_memory_per_pid(raw_data: str) -> dict: def parse_pidin(raw_pidin_data: str, required: Tuple[str]) -> dict: categories = { - "pid": ParsingInfo(r"(\d+)", int), + "pid": ParsingInfo(r"(\d+)", int, key=1), "name": ParsingInfo(r"([\w/.-]+)", lambda s: PurePosixPath(s).name), "sid": ParsingInfo(r"(\d+)", int), - "start_time": ParsingInfo( - r"([a-zA-Z]{3}\s+\d+\s+\d{2}:\d{2})", - str, - ), - "utime": ParsingInfo( - r"([\d.smhd]+)", generic.convert_compact_format_to_seconds - ), - "stime": ParsingInfo( - r"([\d.smhd]+)", generic.convert_compact_format_to_seconds - ), - "cutime": ParsingInfo( - r"([\d.smhd]+)", generic.convert_compact_format_to_seconds - ), - "cstime": ParsingInfo( - r"([\d.smhd]+)", generic.convert_compact_format_to_seconds - ), + "start time": ParsingInfo(r"([a-zA-Z]{3}\s+\d+\s+\d{2}:\d{2})", str, rename="start_time"), + "utime": ParsingInfo(r"([\d.smhd]+)", generic.convert_compact_format_to_seconds), + "stime": ParsingInfo(r"([\d.smhd]+)", generic.convert_compact_format_to_seconds), + "cutime": ParsingInfo(r"([\d.smhd]+)", generic.convert_compact_format_to_seconds), + "cstime": ParsingInfo(r"([\d.smhd]+)", generic.convert_compact_format_to_seconds), "Arguments": ParsingInfo(r"(.*)", lambda x: x), } return generic.parse_table(raw_pidin_data, categories, required=required) @@ -277,7 +255,7 @@ def parse_pidin(raw_pidin_data: str, required: Tuple[str]) -> dict: def parse_hogs(raw_pidin_data: str, required: Tuple[str]) -> dict: categories = { - "PID": ParsingInfo(r"(\d+)", int), + "PID": ParsingInfo(r"(\d+)", int, key=1), "NAME": ParsingInfo(r"([\w/.-]+)", lambda s: PurePosixPath(s).name), "MSEC": ParsingInfo(r"(\d+)", int), "PIDS": ParsingInfo(r"(\d*\.?\d+)%", float), @@ -287,9 +265,7 @@ def parse_hogs(raw_pidin_data: str, required: Tuple[str]) -> dict: return generic.parse_table(raw_pidin_data, categories, required=required) -def parse_proc_vm_stat( - raw_memory_usage: str, timestamp: Optional[datetime] = None -) -> dict: +def parse_proc_vm_stat(raw_memory_usage: str, timestamp: Optional[datetime] = None) -> dict: """ Parses memory usage statistics from a string and calculates total, used, and free memory. @@ -361,15 +337,11 @@ def parse_bmetrics_boot_time(raw_output: str) -> dict: ns = int(match.group(2).replace("ns", "")) boot_time = seconds + (ns / (10**9)) else: - raise ParsingError( - f"Unable to extract boot time: no match found for pattern: {pattern}" - ) + raise ParsingError(f"Unable to extract boot time: no match found for pattern: {pattern}") return {"total": boot_time} # type: ignore[call-arg] -def parse_uptime( - boot_data: str, date_data: str, timestamp: Optional[datetime] = None -) -> dict: +def parse_uptime(boot_data: str, date_data: str, timestamp: Optional[datetime] = None) -> dict: """ Parses boot and date information to calculate system uptime. @@ -397,9 +369,7 @@ def parse_uptime( try: boot_time_str = boot_time_match.group(1) + " " + boot_time_match.group(2) boot_time = datetime.strptime(boot_time_str, "%b %d %H:%M:%S %Y") - current_time = datetime.strptime( - date_data.strip(), "%a %b %d %H:%M:%S %Z %Y" - ) + current_time = datetime.strptime(date_data.strip(), "%a %b %d %H:%M:%S %Z %Y") uptime = current_time - boot_time except ValueError as e: raise ParsingError("Failed to parse datetime data from data") from e @@ -407,3 +377,23 @@ def parse_uptime( raise ParsingError("Error during extracting data") return {"total": uptime.total_seconds(), "timestamp": timestamp} + + +def parse_df_qnx(raw_data: str) -> dict: + catergories = { + "filesystem": ParsingInfo(r"(\S+)", str, key=1), + "size": ParsingInfo(r"(\d+)", int), + "used": ParsingInfo(r"(\d+)", int), + "available": ParsingInfo(r"(\d+)", int), + "use_percent": ParsingInfo(r"(\d+)%", int), + "mounted_on": ParsingInfo(r"(\S+)", str), + } + + df_stats = generic.parse_table( + raw_data, + catergories, + required=("filesystem", "size", "used", "available", "used_percent", "mounted_on"), + header=False, + ) + + return df_stats diff --git a/src/cli/__init__.py b/remoteperf/cli/__init__.py similarity index 100% rename from src/cli/__init__.py rename to remoteperf/cli/__init__.py diff --git a/src/cli/remotestat.py b/remoteperf/cli/remotestat.py similarity index 100% rename from src/cli/remotestat.py rename to remoteperf/cli/remotestat.py diff --git a/remoteperf/clients/__init__.py b/remoteperf/clients/__init__.py new file mode 100644 index 0000000..295f8f5 --- /dev/null +++ b/remoteperf/clients/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2024 Volvo Cars +# SPDX-License-Identifier: Apache-2.0 +from remoteperf.clients.adb_client import ADBClient, ADBClientException +from remoteperf.clients.base_client import BaseClient, BaseClientException +from remoteperf.clients.ssh_client import SSHClient, SSHClientException + +__all__ = [ + "ADBClient", + "SSHClient", + "BaseClient", + "ADBClientException", + "SSHClientException", + "BaseClientException", +] diff --git a/src/clients/adb_client.py b/remoteperf/clients/adb_client.py similarity index 98% rename from src/clients/adb_client.py rename to remoteperf/clients/adb_client.py index 5c05b47..38d8121 100644 --- a/src/clients/adb_client.py +++ b/remoteperf/clients/adb_client.py @@ -7,7 +7,7 @@ from adbutils import AdbClient, AdbDevice, AdbError, AdbTimeout -from src.clients.lock_client import LockClient, LockClientException +from remoteperf.clients.lock_client import LockClient, LockClientException class ADBClientException(LockClientException): @@ -15,20 +15,17 @@ class ADBClientException(LockClientException): class ADBClient(LockClient): - _session: Union[subprocess.Popen, None] @overload def __init__( self, device_id: str, *, host: str = "127.0.0.1", port: int = 5037, timeout: int = 10, retries: int = 2 - ) -> None: - ... + ) -> None: ... @overload def __init__( self, transport_id: int, *, host: str = "127.0.0.1", port: int = 5037, timeout: int = 10, retries: int = 2 - ) -> None: - ... + ) -> None: ... def __init__( self, *args, host: str = "127.0.0.1", port: int = 5037, timeout: int = 10, retries: int = 2, **kwargs diff --git a/src/clients/base_client.py b/remoteperf/clients/base_client.py similarity index 100% rename from src/clients/base_client.py rename to remoteperf/clients/base_client.py diff --git a/src/clients/lock_client.py b/remoteperf/clients/lock_client.py similarity index 97% rename from src/clients/lock_client.py rename to remoteperf/clients/lock_client.py index ac8203f..74403ea 100644 --- a/src/clients/lock_client.py +++ b/remoteperf/clients/lock_client.py @@ -9,8 +9,8 @@ from threading import Lock from typing import Optional -from src.clients.base_client import BaseClient, BaseClientException -from src.utils.fs_utils import RemoteFs +from remoteperf.clients.base_client import BaseClient, BaseClientException +from remoteperf.utils.fs_utils import RemoteFs class LockClientException(BaseClientException): diff --git a/src/clients/ssh_client.py b/remoteperf/clients/ssh_client.py similarity index 84% rename from src/clients/ssh_client.py rename to remoteperf/clients/ssh_client.py index 2ffa8e4..e6d8c1b 100644 --- a/src/clients/ssh_client.py +++ b/remoteperf/clients/ssh_client.py @@ -7,7 +7,7 @@ import paramiko -from src.clients.lock_client import LockClient, LockClientException +from remoteperf.clients.lock_client import LockClient, LockClientException class SSHClientException(LockClientException): @@ -87,9 +87,7 @@ def socket(self) -> paramiko.Channel: def _disconnect(self) -> None: if not self._client: - raise SSHClientException( - "You cannot close a connection that does not exist" - ) + raise SSHClientException("You cannot close a connection that does not exist") try: self._client.close() finally: @@ -106,9 +104,7 @@ def _run_command(self, command, *, retries=None, timeout=None, **_): if isinstance(error, TimeoutError): self._logger.error(f"Timeout received, retrying {attempt}.") else: - self._logger.error( - f"Error occured, retrying {attempt}, (cause: {error})." - ) + self._logger.error(f"Error occured, retrying {attempt}, (cause: {error}).") try: with self._create_session() as session: session.exec_command(command) @@ -117,9 +113,7 @@ def _run_command(self, command, *, retries=None, timeout=None, **_): except Exception as e: # Cannot guarantee paramiko errors exclusively: pylint: disable=W0718 error = e self._recover_connection() - raise SSHClientException( - f"Timeout exceeded when reading output after {_retries+1} attempts" - ) from error + raise SSHClientException(f"Timeout exceeded when reading output after {_retries+1} attempts") from error @contextmanager def _create_session(self) -> Generator[paramiko.Channel, Any, Any]: @@ -143,13 +137,9 @@ def _read_and_wait_for_exit_status(self, session: paramiko.Channel, timeout): or not self._transport_alive(session.get_transport()) or not session.active ): - raise SSHClientException( - f"Error occured after final read. Partial output: {output}" - ) + raise SSHClientException(f"Error occured after final read. Partial output: {output}") return output - raise TimeoutError( - f"Timeout occured reading exit status. Partial output:{output}" - ) + raise TimeoutError(f"Timeout occured reading exit status. Partial output:{output}") def _recv_all(self, session: paramiko.Channel) -> str: out = "" @@ -166,15 +156,11 @@ def _pull_file(self, path: str, dest: str) -> None: with self._client.open_sftp() as sftp: return sftp.get(path, dest) except IOError as e: - raise SSHClientException( - f"Pulled file did not match remote: {e}" - ) from e + raise SSHClientException(f"Pulled file did not match remote: {e}") from e except Exception as e: # Cannot guarantee paramiko errors exclusively: pylint: disable=W0718 error = e self._recover_connection() - raise SSHClientException( - f"Failed to pull file: ({error}) after {attempt} attempt(s)" - ) from error + raise SSHClientException(f"Failed to pull file: ({error}) after {attempt} attempt(s)") from error def _push_file(self, path: str, dest: str) -> None: error = None @@ -183,15 +169,11 @@ def _push_file(self, path: str, dest: str) -> None: with self._client.open_sftp() as sftp: return sftp.put(path, dest, confirm=True) except IOError as e: - raise SSHClientException( - f"Pulled file did not match remote: {e}" - ) from e + raise SSHClientException(f"Pulled file did not match remote: {e}") from e except Exception as e: # Cannot guarantee paramiko errors exclusively: pylint: disable=W0718 error = e self._recover_connection() - raise SSHClientException( - f"Failed to pull file: ({error}) after {attempt} attempt(s)" - ) from error + raise SSHClientException(f"Failed to pull file: ({error}) after {attempt} attempt(s)") from error def _recover_connection(self) -> bool: if not self.connected: diff --git a/remoteperf/handlers/__init__.py b/remoteperf/handlers/__init__.py new file mode 100644 index 0000000..3943b15 --- /dev/null +++ b/remoteperf/handlers/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2024 Volvo Cars +# SPDX-License-Identifier: Apache-2.0 +from remoteperf.handlers.android_handler import AndroidHandler, AndroidHandlerException +from remoteperf.handlers.base_handler import BaseHandler, BaseHandlerException +from remoteperf.handlers.linux_handler import LinuxHandler, LinuxHandlerException +from remoteperf.handlers.qnx_handler import QNXHandler, QNXHandlerException + +__all__ = [ + "AndroidHandler", + "AndroidHandlerException", + "BaseHandler", + "BaseHandlerException", + "LinuxHandler", + "LinuxHandlerException", + "QNXHandler", + "QNXHandlerException", +] diff --git a/src/handlers/android_handler.py b/remoteperf/handlers/android_handler.py similarity index 84% rename from src/handlers/android_handler.py rename to remoteperf/handlers/android_handler.py index 8c21784..389d36c 100644 --- a/src/handlers/android_handler.py +++ b/remoteperf/handlers/android_handler.py @@ -3,10 +3,10 @@ import re from contextlib import contextmanager -from src._parsers.generic import ParsingError -from src.handlers.base_linux_handler import BaseLinuxHandler, BaseLinuxHandlerException -from src.handlers.posix_implementation_handler import MissingPosixCapabilityException -from src.models.base import BootTimeInfo +from remoteperf._parsers.generic import ParsingError +from remoteperf.handlers.base_linux_handler import BaseLinuxHandler, BaseLinuxHandlerException +from remoteperf.handlers.posix_implementation_handler import MissingPosixCapabilityException +from remoteperf.models.base import BootTimeInfo class AndroidHandlerException(BaseLinuxHandlerException): diff --git a/src/handlers/base_handler.py b/remoteperf/handlers/base_handler.py similarity index 81% rename from src/handlers/base_handler.py rename to remoteperf/handlers/base_handler.py index 99e9a3b..9aeb5af 100644 --- a/src/handlers/base_handler.py +++ b/remoteperf/handlers/base_handler.py @@ -2,8 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 from abc import ABC, abstractmethod -from src.models.base import BaseCpuUsageInfo, BootTimeInfo, SystemMemory, SystemUptimeInfo -from src.models.super import CpuList, MemoryList, ProcessCpuList, ProcessMemoryList +from remoteperf.models.base import BaseCpuUsageInfo, BootTimeInfo, SystemMemory, SystemUptimeInfo +from remoteperf.models.super import CpuList, DiskInfoList, MemoryList, ProcessCpuList, ProcessMemoryList class BaseHandlerException(Exception): @@ -236,3 +236,51 @@ def stop_mem_measurement_proc_wise(self) -> ProcessMemoryList: .. seealso:: :meth:`start_mem_measurement_proc_wise` - Method to start the memory measurement thread. """ + + @abstractmethod + def get_diskinfo(self) -> DiskInfoList: + """ + Retrieves information about the disks. Storage used and available space. + + :return: An object containing information about the disk. + :rtype: DiskInfoList + """ + + @abstractmethod + def start_diskinfo_measurement(self, interval: float) -> None: + """ + Starts a new thread which takes disk information at specified intervals. + + :param interval: The time interval, in seconds, between successive disk information. + The interval must be a positive number. + :type interval: float + + :raises PosixHandlerException: If a disk information thread is already running. + + :example: + To start collecting disk information every 2 seconds:: + + handler.start_diskinfo_measurement(2.0) + + .. note:: + The collected information are stored internally and can be retrieved + by calling :meth:`stop_diskinfo_measurement`, which stops the measurement thread + and returns the collected data. + + .. seealso:: + :meth:`stop_diskinfo_measurement` - Method to stop the disk information thread. + """ + + @abstractmethod + def stop_diskinfo_measurement(self) -> DiskInfoList: + """ + Stops an existing disk information thread and returns the results. + + :return: A list of disk information results. + :rtype: DiskInfo + + :raises PosixHandlerException: If a disk information thread is not running. + + .. seealso:: + :meth:`start_diskinfo_measurement` - Method to start the disk information thread. + """ diff --git a/src/handlers/base_linux_handler.py b/remoteperf/handlers/base_linux_handler.py similarity index 60% rename from src/handlers/base_linux_handler.py rename to remoteperf/handlers/base_linux_handler.py index 06cc8e5..70a1f4b 100644 --- a/src/handlers/base_linux_handler.py +++ b/remoteperf/handlers/base_linux_handler.py @@ -4,15 +4,28 @@ from contextlib import contextmanager from typing import Dict, List, Tuple -from src._parsers import linux as linux_parsers -from src._parsers.generic import ParsingError -from src.handlers.posix_implementation_handler import PosixHandlerException, PosixImplementationHandler -from src.models.base import BaseMemorySample, Process, Sample, SystemMemory, SystemUptimeInfo -from src.models.linux import LinuxCpuUsageInfo, LinuxResourceSample -from src.models.super import ( +from remoteperf._parsers import linux as linux_parsers +from remoteperf._parsers.generic import ParsingError +from remoteperf.handlers.posix_implementation_handler import PosixHandlerException, PosixImplementationHandler +from remoteperf.models.base import ( + BaseMemorySample, + DiskInfo, + DiskIOInfo, + Process, + Sample, + SystemMemory, + SystemUptimeInfo, +) +from remoteperf.models.linux import LinuxCpuUsageInfo, LinuxResourceSample +from remoteperf.models.super import ( CpuList, + DiskInfoList, + DiskIOList, + DiskIOProcessSample, + DiskIOSampleProcessInfo, MemoryList, MemorySampleProcessInfo, + ProcessDiskIOList, ProcessMemoryList, ProcessResourceList, ResourceSampleProcessInfo, @@ -34,7 +47,7 @@ def _handle_parsing_error(sample1, sample2=None): raise BaseLinuxHandlerException(f"Failed to parse data {result}") from e -class BaseLinuxHandler(PosixImplementationHandler): +class BaseLinuxHandler(PosixImplementationHandler): # pylint: disable=R0904 (too-many-public-methods) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._nonexistant_separator_file = "e39f7761903b" @@ -81,6 +94,24 @@ def get_system_uptime(self) -> SystemUptimeInfo: output = self._client.run_command(command) return SystemUptimeInfo(total=float(output)) + def get_diskinfo(self) -> DiskInfoList: + output = linux_parsers.parse_df(self._get_df()) + return DiskInfoList([DiskInfo(**kpis) for _, kpis in output.items()]) + + def get_diskio(self) -> DiskIOList: + output = linux_parsers.parse_proc_diskio(self._get_diskstats()) + return DiskIOList([DiskIOInfo(**kpis) for _, kpis in output.items()]) + + def get_diskio_proc_wise(self) -> ProcessDiskIOList: + output = self._diskio_measurement_proc_wise() + with _handle_parsing_error(output): + return ProcessDiskIOList( + DiskIOSampleProcessInfo(**p.model_dump(), samples=[sample]) + for p, sample in linux_parsers.parse_disk_usage_from_proc_files( + output, self._nonexistant_separator_file + ).items() + ) + def start_cpu_measurement(self, interval: float) -> None: self._start_measurement(self._cpu_measurement, interval) @@ -106,12 +137,47 @@ def stop_mem_measurement(self) -> MemoryList: ) return MemoryList(parsed_results) + def start_diskinfo_measurement(self, interval: float) -> None: + self._start_measurement(self._get_df, interval) + + def stop_diskinfo_measurement(self) -> DiskInfoList: + results, _ = self._stop_measurement(self._get_df) + processed_results = [] + for sample in results: + with _handle_parsing_error(sample.data): + output = linux_parsers.parse_df(sample.data) + for _, kpis in output.items(): + kpis["timestamp"] = sample.timestamp + processed_results.append(DiskInfo(**kpis)) + + return DiskInfoList(processed_results) + + def start_diskio_measurement(self, interval: float) -> None: + """Starts disk usage measurement""" + self._start_measurement(self._get_diskstats, interval) + + def stop_diskio_measurement(self) -> DiskIOList: + """Stops disk usage measurement""" + results, _ = self._stop_measurement(self._get_diskstats) + processed_results = [] + for sample in results: + with _handle_parsing_error(sample): + output = linux_parsers.parse_proc_diskio(sample.data) + for _, kpis in output.items(): + kpis["timestamp"] = sample.timestamp + processed_results.append(DiskIOInfo(**kpis)) + + return DiskIOList(processed_results) + def start_cpu_measurement_proc_wise(self, interval: float) -> None: self._start_measurement(self._cpu_measurement_proc_wise, interval, self._process_cpu_measurements) def start_mem_measurement_proc_wise(self, interval: float) -> None: self._start_measurement(self._mem_measurement_proc_wise, interval, self._process_mem_measurements) + def start_diskio_measurement_proc_wise(self, interval: float) -> None: + self._start_measurement(self._diskio_measurement_proc_wise, interval, self._process_diskio_measurements) + def stop_cpu_measurement_proc_wise(self) -> ProcessResourceList: _, results = self._stop_measurement(self._cpu_measurement_proc_wise) lst = [ResourceSampleProcessInfo(**p.model_dump(), samples=samples) for p, samples in results.items()] @@ -125,6 +191,13 @@ def stop_mem_measurement_proc_wise(self) -> ProcessMemoryList: ) return output + def stop_diskio_measurement_proc_wise(self) -> ProcessDiskIOList: + _, results = self._stop_measurement(self._diskio_measurement_proc_wise) + output = ProcessDiskIOList( + DiskIOSampleProcessInfo(**p.model_dump(), samples=samples) for p, samples in results.items() + ) + return output + def _process_cpu_measurements( self, results: List[Sample], processed_results: Dict[Process, List[tuple]] ) -> Tuple[List[Sample], Dict[Process, List[tuple]]]: @@ -150,6 +223,18 @@ def _process_mem_measurements( processed_results.setdefault(p, []).append(BaseMemorySample(**sample)) return [], processed_results + def _process_diskio_measurements( + self, results: List[Sample], processed_results: Dict[Process, List[tuple]] + ) -> Tuple[List[Sample], Dict[Process, List[tuple]]]: + processed_results = processed_results or {} + for result in results: + with _handle_parsing_error(result.data): + for p, sample in linux_parsers.parse_disk_usage_from_proc_files( + result.data, self._nonexistant_separator_file, timestamp=result.timestamp + ).items(): + processed_results.setdefault(p, []).append(DiskIOProcessSample(**sample)) + return [], processed_results + def _cpu_measurement(self, **_): command = "cat /proc/stat | grep cpu" return self._client.run_command(command.strip()) @@ -162,7 +247,14 @@ def _resource_measurement_proc_wise(self, **_) -> Tuple[str, str]: r'sed "s:\([0-9]*\):' + self._nonexistant_separator_file + r" /proc/\1/stat " r'/proc/\1/cmdline:") ' + self._nonexistant_separator_file + r" /proc/stat 2>&1" ) + return self._client.run_command(command) + def _io_measurement_proc_wise(self, **_) -> Tuple[str, str]: + command = ( + r'/bin/cat $(ls /proc | grep "[0-9]" | ' + r'sed "s:\([0-9]*\):' + self._nonexistant_separator_file + r" /proc/\1/stat " + r'/proc/\1/io /proc/\1/cmdline:") ' + self._nonexistant_separator_file + r" 2>&1" + ) return self._client.run_command(command) def _cpu_measurement_proc_wise(self, *args, **kwargs) -> str: @@ -171,6 +263,17 @@ def _cpu_measurement_proc_wise(self, *args, **kwargs) -> str: def _mem_measurement_proc_wise(self, *args, **kwargs) -> str: return self._resource_measurement_proc_wise(*args, **kwargs) + def _diskio_measurement_proc_wise(self, *args, **kwargs) -> str: + return self._io_measurement_proc_wise(*args, **kwargs) + def _mem_measurement(self, **_): command = "cat /proc/meminfo" return self._client.run_command(command) + + def _get_diskstats(self, **_): + command = "cat /proc/diskstats" + return self._client.run_command(command) + + def _get_df(self, **_): + command = "df" + return self._client.run_command(command) diff --git a/src/handlers/linux_handler.py b/remoteperf/handlers/linux_handler.py similarity index 76% rename from src/handlers/linux_handler.py rename to remoteperf/handlers/linux_handler.py index 55c5a9b..8ff7157 100644 --- a/src/handlers/linux_handler.py +++ b/remoteperf/handlers/linux_handler.py @@ -2,11 +2,11 @@ # SPDX-License-Identifier: Apache-2.0 from contextlib import contextmanager -from src._parsers import linux as linux_parsers -from src._parsers.generic import ParsingError -from src.handlers.base_linux_handler import BaseLinuxHandler, BaseLinuxHandlerException -from src.handlers.posix_implementation_handler import MissingPosixCapabilityException -from src.models.linux import LinuxBootTimeInfo +from remoteperf._parsers import linux as linux_parsers +from remoteperf._parsers.generic import ParsingError +from remoteperf.handlers.base_linux_handler import BaseLinuxHandler, BaseLinuxHandlerException +from remoteperf.handlers.posix_implementation_handler import MissingPosixCapabilityException +from remoteperf.models.linux import LinuxBootTimeInfo class LinuxHandlerException(BaseLinuxHandlerException): diff --git a/src/handlers/posix_implementation_handler.py b/remoteperf/handlers/posix_implementation_handler.py similarity index 93% rename from src/handlers/posix_implementation_handler.py rename to remoteperf/handlers/posix_implementation_handler.py index aacf544..9b05c75 100644 --- a/src/handlers/posix_implementation_handler.py +++ b/remoteperf/handlers/posix_implementation_handler.py @@ -8,11 +8,11 @@ from pathlib import Path from typing import List, Optional -from src.clients.base_client import BaseClient -from src.handlers.base_handler import BaseHandler, BaseHandlerException -from src.models.base import Sample -from src.utils.fs_utils import RemoteFs -from src.utils.threading import ExceptionThread +from remoteperf.clients.base_client import BaseClient +from remoteperf.handlers.base_handler import BaseHandler, BaseHandlerException +from remoteperf.models.base import Sample +from remoteperf.utils.fs_utils import RemoteFs +from remoteperf.utils.threading import ExceptionThread class PosixHandlerException(BaseHandlerException): diff --git a/src/handlers/qnx_handler.py b/remoteperf/handlers/qnx_handler.py similarity index 87% rename from src/handlers/qnx_handler.py rename to remoteperf/handlers/qnx_handler.py index 38e611e..978cd90 100644 --- a/src/handlers/qnx_handler.py +++ b/remoteperf/handlers/qnx_handler.py @@ -5,32 +5,34 @@ from more_itertools import partition -from src._parsers import qnx as qnx_parsers -from src._parsers.generic import ParsingError -from src.handlers.posix_implementation_handler import ( +from remoteperf._parsers import qnx as qnx_parsers +from remoteperf._parsers.generic import ParsingError +from remoteperf.handlers.posix_implementation_handler import ( MissingPosixCapabilityException, PosixHandlerException, PosixImplementationHandler, ) -from src.models.base import ( +from remoteperf.models.base import ( BaseCpuSample, BaseMemorySample, BootTimeInfo, + DiskInfo, Process, Sample, SystemMemory, SystemUptimeInfo, ) -from src.models.qnx import QnxCpuUsageInfo -from src.models.super import ( +from remoteperf.models.qnx import QnxCpuUsageInfo +from remoteperf.models.super import ( CpuList, CpuSampleProcessInfo, + DiskInfoList, MemoryList, MemorySampleProcessInfo, ProcessCpuList, ProcessMemoryList, ) -from src.utils.threading import DelegatedExecutionThread +from remoteperf.utils.threading import DelegatedExecutionThread class QNXHandlerException(PosixHandlerException): @@ -73,6 +75,10 @@ def get_mem_usage(self) -> SystemMemory: with _handle_parsing_error(result): return SystemMemory(**qnx_parsers.parse_proc_vm_stat(result)) + def get_diskinfo(self) -> DiskInfoList: + output = qnx_parsers.parse_df_qnx(self._get_df()) + return DiskInfoList([DiskInfo(**kpis) for _, kpis in output.items()]) + def get_mem_usage_proc_wise(self, **_) -> ProcessMemoryList: """ Get process-wise memory usage @@ -87,12 +93,26 @@ def get_mem_usage_proc_wise(self, **_) -> ProcessMemoryList: ) def get_system_uptime(self) -> SystemUptimeInfo: - pidin = self._client.run_command("pidin info") date_data = self._client.run_command("date") with _handle_parsing_error(f"pidin: ({pidin}) date: ({date_data})"): return SystemUptimeInfo(**qnx_parsers.parse_uptime(pidin, date_data)) + def start_diskinfo_measurement(self, interval: float) -> None: + self._start_measurement(self._get_df, interval) + + def stop_diskinfo_measurement(self) -> DiskInfoList: + results, _ = self._stop_measurement(self._get_df) + processed_results = [] + for sample in results: + with _handle_parsing_error(sample.data): + output = qnx_parsers.parse_df_qnx(sample.data) + for _, kpis in output.items(): + kpis["timestamp"] = sample.timestamp + processed_results.append(DiskInfo(**kpis)) + + return DiskInfoList(processed_results) + def get_boot_time(self) -> BootTimeInfo: """ Get the time it took for the system to boot. @@ -222,3 +242,7 @@ def _process_mem_measurements( ).items(): processed_results.setdefault(p, []).append(BaseMemorySample(**sample)) return [], processed_results + + def _get_df(self, **_): + command = "df" + return self._client.run_command(command) diff --git a/src/models/__init__.py b/remoteperf/models/__init__.py similarity index 100% rename from src/models/__init__.py rename to remoteperf/models/__init__.py diff --git a/src/models/base.py b/remoteperf/models/base.py similarity index 84% rename from src/models/base.py rename to remoteperf/models/base.py index 0fdb7f5..19ac730 100644 --- a/src/models/base.py +++ b/remoteperf/models/base.py @@ -20,7 +20,7 @@ import yaml from typeguard import TypeCheckError, check_type -from src.utils.attrs_util import attrs_init_replacement, converter +from remoteperf.utils.attrs_util import attrs_init_replacement, converter class BaseRemoteperfModelException(Exception): @@ -96,9 +96,7 @@ def __add__(self, other): lambda a, b: a + b, ) ) - raise TypeError( - f"Unsupported operand type(s) for +: '{type(self).__name__}' and '{type(other).__name__}'" - ) + raise TypeError(f"Unsupported operand type(s) for +: '{type(self).__name__}' and '{type(other).__name__}'") def __div__(self, denominator): if isinstance(denominator, (float, int)): @@ -109,9 +107,7 @@ def __div__(self, denominator): lambda a, b: a / b, ) ) - raise TypeError( - f"Unsupported operand type(s) for /: 'MemoryInfo' and '{type(denominator).__name__}'" - ) + raise TypeError(f"Unsupported operand type(s) for /: 'MemoryInfo' and '{type(denominator).__name__}'") @classmethod def _recursive_op(cls, dict1, other, operation): @@ -132,9 +128,7 @@ def _recursive_op(cls, dict1, other, operation): elif value is None: result[key] = None else: - raise TypeError( - f"Unsupported value types for key '{key}': {type(value)} and {type(operand)}" - ) + raise TypeError(f"Unsupported value types for key '{key}': {type(value)} and {type(operand)}") return result __floordiv__ = __div__ @@ -175,6 +169,41 @@ class ExtendedMemoryInfo(MemoryInfo): available: int +@attrs_init_replacement +@attr.s(auto_attribs=True, kw_only=True) +class DiskInfo(BaseInfoModel): + filesystem: str + size: int + used: int + available: int + used_percent: int + mounted_on: str + + +@attrs_init_replacement +@attr.s(auto_attribs=True, kw_only=True) +class DiskIOInfo(BaseInfoModel): + device_major_number: int + device_minor_number: int + device_name: str + reads_completed: int + reads_merged: int + sectors_reads: int + time_spent_reading: int + writes_completed: int + writes_merged: int + sectors_written: int + time_spent_writing: int + IOs_currently_in_progress: int + time_spent_doing_io: int + weighted_time_spent_doing_io: int + discards_completed: int + discards_merged: int + sectors_discarded: int + time_spent_discarding: int + time_spent_flushing: int + + @attrs_init_replacement @attr.s(auto_attribs=True, kw_only=True) class BaseProcess(BaseRemoteperfModel): @@ -223,6 +252,24 @@ class SystemMemory(ArithmeticBaseInfoModel): swap: Optional[MemoryInfo] = None +@attrs_init_replacement +@attr.s(auto_attribs=True, kw_only=True) +class DiskIOProcess(BaseRemoteperfModel): + rchar: int + wchar: int + syscr: int + syscw: int + read_bytes: int + write_bytes: int + cancelled_write_bytes: int + + +@attrs_init_replacement +@attr.s(auto_attribs=True, kw_only=True) +class DiskIOProcessSample(ArithmeticBaseInfoModel, DiskIOProcess): + pass + + @attr.s(auto_attribs=True, kw_only=True) class Sample(BaseInfoModel): def __new__(cls, *_, **__): diff --git a/src/models/linux.py b/remoteperf/models/linux.py similarity index 91% rename from src/models/linux.py rename to remoteperf/models/linux.py index 708c036..49be1da 100644 --- a/src/models/linux.py +++ b/remoteperf/models/linux.py @@ -5,13 +5,13 @@ import attr -from src.models.base import ( +from remoteperf.models.base import ( BaseCpuSample, BaseCpuUsageInfo, BaseRemoteperfModel, BootTimeInfo, ) -from src.utils.attrs_util import attrs_init_replacement +from remoteperf.utils.attrs_util import attrs_init_replacement @attrs_init_replacement diff --git a/remoteperf/models/qnx.py b/remoteperf/models/qnx.py new file mode 100644 index 0000000..0f49c3e --- /dev/null +++ b/remoteperf/models/qnx.py @@ -0,0 +1,70 @@ +# Copyright 2024 Volvo Cars +# SPDX-License-Identifier: Apache-2.0 +import time + +import attr + +from remoteperf.models.base import BaseCpuUsageInfo, DiskInfo +from remoteperf.models.super import DiskInfoList +from remoteperf.utils.attrs_util import attrs_init_replacement + + +@attrs_init_replacement +@attr.s(auto_attribs=True, kw_only=True) +class QnxCpuUsageInfo(BaseCpuUsageInfo): + pass + + +@attrs_init_replacement +@attr.s(auto_attribs=True, kw_only=True, slots=True) +class QnxCpuSample(BaseCpuUsageInfo): + pass + + +def test_disc_info(unique_qnx_handler): + output = unique_qnx_handler.get_diskinfo() + desired_output = DiskInfoList( + [ + DiskInfo( + filesystem="/dev/sda", + size=7654321, + used=0, + available=1234567, + used_percent=11, + mounted_on="/", + ), + DiskInfo( + filesystem="/dev/sda", + size=654321, + used=0, + available=123456, + used_percent=10, + mounted_on="/", + ), + ], + ) + assert output.model_dump(exclude="timestamp") == desired_output.model_dump(exclude="timestamp") + + +def test_disc_info_cont(unique_qnx_handler): + unique_qnx_handler.start_diskinfo_measurement(0.1) + time.sleep(0.1) + output = unique_qnx_handler.stop_diskinfo_measurement() + disc_info_sda = DiskInfo( + filesystem="/dev/sda", + size=7654321, + used=0, + available=1234567, + used_percent=11, + mounted_on="/", + ) + disc_info_sdb = DiskInfo( + filesystem="/dev/sda", + size=654321, + used=0, + available=123456, + used_percent=10, + mounted_on="/", + ) + desired_output = DiskInfoList([disc_info_sda, disc_info_sda, disc_info_sdb, disc_info_sdb]) + assert output.model_dump(exclude="timestamp") == desired_output.model_dump(exclude="timestamp") diff --git a/src/models/super.py b/remoteperf/models/super.py similarity index 69% rename from src/models/super.py rename to remoteperf/models/super.py index 31f3903..20ce1fd 100644 --- a/src/models/super.py +++ b/remoteperf/models/super.py @@ -5,17 +5,19 @@ import attr -from src.models.base import ( +from remoteperf.models.base import ( ArithmeticBaseInfoModel, BaseCpuSample, BaseCpuUsageInfo, BaseMemorySample, BaseProcess, + DiskIOInfo, + DiskIOProcessSample, ModelList, SystemMemory, ) -from src.models.linux import LinuxResourceSample -from src.utils.attrs_util import attrs_init_replacement +from remoteperf.models.linux import LinuxResourceSample +from remoteperf.utils.attrs_util import attrs_init_replacement # Superstructure models that can't reside in base due to circular imports @@ -112,6 +114,28 @@ def max_mem_usage(self) -> BaseMemorySample: return top.model +@attrs_init_replacement +@attr.s(auto_attribs=True, kw_only=True) +class DiskIOSampleProcessInfo(ProcessInfo): + samples: List[DiskIOProcessSample] + + @property + def avg_read_bytes(self): + return self._sum_read_bytes / len(self.samples) + + @property + def avg_write_bytes(self): + return self._sum_write_bytes / len(self.samples) + + @property + def _sum_read_bytes(self): + return sum(model.read_bytes for model in self.samples) + + @property + def _sum_write_bytes(self): + return sum(model.write_bytes for model in self.samples) + + @attrs_init_replacement @attr.s(auto_attribs=True, kw_only=True) class ResourceSampleProcessInfo(CpuSampleProcessInfo, MemorySampleProcessInfo): @@ -171,6 +195,46 @@ def highest_peak_mem_usage(self, n: int = 5): return self.__class__(sorted(self, key=lambda m: m.max_mem_usage.mem_usage, reverse=True)[:n]) +class _ProcessDiskIOList: + def highest_average_read_bytes(self, n: int = 5): + """ + Returns the top `n` processes with the highest average read bytes. + + Args: + n (int, optional): The number of processes to return. Defaults to 5. + + Returns: + ProcessDiskIOList: A new instance of ProcessDiskIOList containing the top `n` processes. + """ + return self.__class__(sorted(self, key=lambda m: m.avg_read_bytes, reverse=True)[:n]) + + def highest_average_write_bytes(self, n: int = 5): + """ + Returns the top `n` processes with the highest average write bytes. + + Args: + n (int, optional): The number of processes to return. Defaults to 5. + + Returns: + ProcessDiskIOList: A new instance of ProcessDiskIOList containing the top `n` processes. + """ + return self.__class__(sorted(self, key=lambda m: m.avg_write_bytes, reverse=True)[:n]) + + +class _DiskList: + def get_disk(self, disk: str): + """ + Returns a new instance with only the disk specified. + + Args: + disk (str): The disk to filter by. + + Returns: + DiskList: A new instance of DiskList containing only the specified disk. + """ + return self.__class__(filter(lambda m: m.disk == disk, self)) + + class ProcessCpuList(ModelList[CpuSampleProcessInfo], _ProcessCpuList): pass @@ -181,3 +245,15 @@ class ProcessMemoryList(ModelList[MemorySampleProcessInfo], _ProcessMemoryList): class ProcessResourceList(ModelList[ResourceSampleProcessInfo], _ProcessCpuList, _ProcessMemoryList): pass + + +class ProcessDiskIOList(ModelList[DiskIOSampleProcessInfo], _ProcessDiskIOList): + pass + + +class DiskIOList(ModelList[DiskIOInfo], _DiskList): + pass + + +class DiskInfoList(ModelList[DiskIOInfo], _DiskList): + pass diff --git a/src/utils/__init__.py b/remoteperf/utils/__init__.py similarity index 100% rename from src/utils/__init__.py rename to remoteperf/utils/__init__.py diff --git a/src/utils/attrs_util.py b/remoteperf/utils/attrs_util.py similarity index 100% rename from src/utils/attrs_util.py rename to remoteperf/utils/attrs_util.py diff --git a/src/utils/fs_utils.py b/remoteperf/utils/fs_utils.py similarity index 98% rename from src/utils/fs_utils.py rename to remoteperf/utils/fs_utils.py index b6820de..6032aef 100644 --- a/src/utils/fs_utils.py +++ b/remoteperf/utils/fs_utils.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 import uuid -from src.clients.base_client import BaseClient +from remoteperf.clients.base_client import BaseClient class RemoteFsException(Exception): diff --git a/src/utils/math.py b/remoteperf/utils/math.py similarity index 100% rename from src/utils/math.py rename to remoteperf/utils/math.py diff --git a/src/utils/threading.py b/remoteperf/utils/threading.py similarity index 86% rename from src/utils/threading.py rename to remoteperf/utils/threading.py index 4e2ae7e..7b54459 100644 --- a/src/utils/threading.py +++ b/remoteperf/utils/threading.py @@ -4,7 +4,7 @@ import time from datetime import datetime -from src.clients.base_client import BaseClient +from remoteperf.clients.base_client import BaseClient class ThreadException(Exception): @@ -59,13 +59,9 @@ def __init__( join=False, **kwargs, ): - filename = ( - f"/tmp/remoteperf_delayed_{uid}-{round(datetime.now().timestamp()*100)}" - ) + filename = f"/tmp/remoteperf_delayed_{uid}-{round(datetime.now().timestamp()*100)}" # Output from command is streamed into file, so we move the file to avoid race conditions - command = ( - f"({command}) > {filename}_tmp && mv {filename}_tmp {filename} & echo $!" - ) + command = f"({command}) > {filename}_tmp && mv {filename}_tmp {filename} & echo $!" client.add_cleanup("/tmp/remoteperf_delayed_*") client.run_command(command) super().__init__( @@ -81,9 +77,7 @@ def __init__( self.join() @staticmethod - def _delayed_file_read_and_remove( - delay: float, filename: str, client: BaseClient, retries: int = 3 - ) -> str: + def _delayed_file_read_and_remove(delay: float, filename: str, client: BaseClient, retries: int = 3) -> str: _retries = retries time.sleep(delay + 0.2) command = f"cat {filename}" diff --git a/src/version.py b/remoteperf/version.py similarity index 100% rename from src/version.py rename to remoteperf/version.py diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 8de83c4..6a236e1 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -6,4 +6,4 @@ paramiko>=3.4.0,<4.0.0 pyyaml>=6.0.2,<7.0.0 more_itertools>=9.0.0,<11.0.0 adbutils>=2.0.0,<3.0.0 -regex>=2021.11.10,<=2024.9.11 # Manually update \ No newline at end of file +regex>=2021.11.10,<=2024.9.11 # Manually update diff --git a/src/__init__.py b/src/__init__.py deleted file mode 100644 index ff79797..0000000 --- a/src/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# Copyright 2024 Volvo Cars -# SPDX-License-Identifier: Apache-2.0 -from src.version import __date__, __version__ # noqa: F401 diff --git a/src/_parsers/generic.py b/src/_parsers/generic.py deleted file mode 100644 index 8df32af..0000000 --- a/src/_parsers/generic.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2024 Volvo Cars -# SPDX-License-Identifier: Apache-2.0 -import re -from dataclasses import dataclass -from typing import Dict - - -class ParsingError(Exception): - pass - - -@dataclass -class ParsingInfo: - regex: str - parse: callable - - -def parse_table(raw_table_data: str, categories: Dict[str, ParsingInfo], required=tuple()) -> dict: - try: - header, *lines = raw_table_data.split("\n") - while not header.strip() or lines and any(h not in categories for h in header.split()): - header, *lines = lines - header = header.replace("start time", "start_time") - regex_parsers = {} - for category in header.split(): - if category not in categories: - raise ParsingError(f"Error, unknonw parse header: {category} in input: {raw_table_data}") - regex_parsers[category] = categories[category] - regex = re.compile(r"\s+".join([parsing_info.regex for parsing_info in regex_parsers.values()])) - except Exception as e: - raise ParsingError(f"Failed to parse categories ({categories})") from e - - result = {} - for line in lines: - if match := re.search(regex, line): - pid = int(match.group(1)) - result[pid] = { - category: parsing_info.parse(value) - for (category, parsing_info), value in zip(regex_parsers.items(), match.groups()) - } - if not result: - raise ParsingError(f"Failed to parse table: {raw_table_data}") - sample = list(result.values())[0] - if not all(req in sample for req in required): - raise ParsingError(f"Failed to parse all required categories ({required}) from table: {raw_table_data}") - return result - - -def convert_compact_format_to_seconds(time_str: str) -> float: - """ - Converts a time string into seconds. - - Args: - time_str (str): A string representing time in the format 'D-HH:MM:SS.SSS'. - - Returns: - float: The total number of seconds represented by the input time string. - - """ - regex_pattern = r"(\d*\.?\d+)([d|h|m|s])?" - - total_seconds: float = 0 - for match in re.finditer(regex_pattern, time_str): - value, unit = match.groups() - if unit: - multiplier = {"d": 86400, "h": 3600, "m": 60, "s": 1}[unit] - total_seconds += float(value) * multiplier - else: - total_seconds += float(value) - - return total_seconds - - -def convert_to_int(value): - try: - if isinstance(value, str): - value = "".join([d for d in "value" if d.isnumeric()]) - return int(value) - except ValueError: - return 0 diff --git a/src/clients/__init__.py b/src/clients/__init__.py deleted file mode 100644 index a5c9e1b..0000000 --- a/src/clients/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2024 Volvo Cars -# SPDX-License-Identifier: Apache-2.0 -from src.clients.adb_client import ADBClient, ADBClientException -from src.clients.base_client import BaseClient, BaseClientException -from src.clients.ssh_client import SSHClient, SSHClientException - -__all__ = [ - "ADBClient", - "SSHClient", - "BaseClient", - "ADBClientException", - "SSHClientException", - "BaseClientException", -] diff --git a/src/handlers/__init__.py b/src/handlers/__init__.py deleted file mode 100644 index 03929d2..0000000 --- a/src/handlers/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2024 Volvo Cars -# SPDX-License-Identifier: Apache-2.0 -from src.handlers.android_handler import AndroidHandler, AndroidHandlerException -from src.handlers.base_handler import BaseHandler, BaseHandlerException -from src.handlers.linux_handler import LinuxHandler, LinuxHandlerException -from src.handlers.qnx_handler import QNXHandler, QNXHandlerException - -__all__ = [ - "AndroidHandler", - "AndroidHandlerException", - "BaseHandler", - "BaseHandlerException", - "LinuxHandler", - "LinuxHandlerException", - "QNXHandler", - "QNXHandlerException", -] diff --git a/src/models/qnx.py b/src/models/qnx.py deleted file mode 100644 index d7ab57a..0000000 --- a/src/models/qnx.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2024 Volvo Cars -# SPDX-License-Identifier: Apache-2.0 -import attr - -from src.models.base import BaseCpuUsageInfo -from src.utils.attrs_util import attrs_init_replacement - - -@attrs_init_replacement -@attr.s(auto_attribs=True, kw_only=True) -class QnxCpuUsageInfo(BaseCpuUsageInfo): - pass - - -@attrs_init_replacement -@attr.s(auto_attribs=True, kw_only=True, slots=True) -class QnxCpuSample(BaseCpuUsageInfo): - pass diff --git a/tests/conftest.py b/tests/conftest.py index 1d33124..9a95b37 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,10 +8,9 @@ import pytest import yaml -from src.clients.base_client import BaseClient -from src.clients.ssh_client import SSHClient -from src.handlers.linux_handler import LinuxHandler -from src.handlers.qnx_handler import QNXHandler +from remoteperf.clients.base_client import BaseClient +from remoteperf.handlers.linux_handler import LinuxHandler +from remoteperf.handlers.qnx_handler import QNXHandler class MockClient(BaseClient): @@ -129,6 +128,13 @@ def mock_client(): return MockClient(queries) +@pytest.fixture +def mock_qnx_client(): + with (pathlib.Path(__file__).parent / "data" / "valid_handler_data_qnx.yaml").open() as file: + queries = yaml.safe_load(file) + return MockClient(queries) + + @pytest.fixture def subprocess_client(): return SubprocessLocalClient() @@ -144,6 +150,11 @@ def qnx_handler(mock_client): return QNXHandler(client=mock_client) +@pytest.fixture +def unique_qnx_handler(mock_qnx_client): + return QNXHandler(client=mock_qnx_client) + + @pytest.fixture def broken_mock_client(): with (pathlib.Path(__file__).parent / "data" / "invalid_handler_data.yaml").open() as file: diff --git a/tests/data/invalid_handler_data.yaml b/tests/data/invalid_handler_data.yaml index c6db091..477281e 100644 --- a/tests/data/invalid_handler_data.yaml +++ b/tests/data/invalid_handler_data.yaml @@ -12,3 +12,26 @@ systemd-analyze: | '[ -e "/dev/bmetrics" ] && echo "True" || echo "False"': "False" + +"df": | + Filesystem 1K-blocks Used Available Use% Mounted on + /dev/sda 7654321 0 1234567 11% / + +"cat /proc/diskstats": | + 1 0 sda 123 12 1 4 45 456 7 78 789 0 321 21 654 54 987 87 98 + +'/bin/cat $(ls /proc | grep "[0-9]" | sed "s:\([0-9]*\):e39f7761903b /proc/\1/stat /proc/\1/io /proc/\1/cmdline:") e39f7761903b 2>&1': &proc_disk_list + - | + /bin/cat: e39f7761903b: No such file or directory + 846 (bash) S 844 846 846 34816 1055492 4194560 4316 23491 5 608 9 0 56 25 20 0 1 0 1715 6504448 1355 18446744073709551615 94242165944320 94242166857581 140721942657360 0 0 0 65536 3686404 1266761467 1 0 0 17 6 0 0 0 0 0 94242167102096 94242167150160 94243001663488 140721942665787 140721942665793 140721942665793 140721942667246 0 + rchar: 1 + wchar: 2 + syscr: 3 + syscw: 4 + read_bytes: 5 + write_bytes: 6 + cancelled_write_bytes: 7 + /bin/bash/bin/cat: e39f7761903b: No such file or directory + 911 ((sd-pam)) S 910 910 910 0 -1 1077936448 54 0 0 0 0 0 0 0 20 0 1 0 1815 106061824 926 18446744073709551615 1 1 0 0 0 0 0 4096 0 0 0 0 17 2 0 0 0 0 0 0 0 0 0 0 0 0 0 + /bin/cat: /proc/911/io: Permission denied + (sd-pam)/bin/cat: e39f7761903b: No such file or directory diff --git a/tests/data/valid_handler_data.yaml b/tests/data/valid_handler_data.yaml index 2bd53bf..7d62ae5 100644 --- a/tests/data/valid_handler_data.yaml +++ b/tests/data/valid_handler_data.yaml @@ -297,3 +297,26 @@ systemd-analyze: | 789 Oct 04 05:27 usr/lib/messageservice/messageservice /usr/lib/messageservice/messageservice --type=utility --utility-sub-type=audio.mojom.AudioService --lang=en-US 12345 Oct 04 05:27 bin/usr/bin/python3 /bin/usr/bin/python3 /usr/bin/landscape-monitor--ignore-sigint 54321 Oct 04 05:27 bin/bash bash + +"df": | + Filesystem 1K-blocks Used Available Use% Mounted on + /dev/sda 7654321 0 1234567 11% / + +"cat /proc/diskstats": | + 1 0 sda 123 12 1 4 45 456 7 78 789 0 321 21 654 54 987 87 98 + +'/bin/cat $(ls /proc | grep "[0-9]" | sed "s:\([0-9]*\):e39f7761903b /proc/\1/stat /proc/\1/io /proc/\1/cmdline:") e39f7761903b 2>&1': &proc_disk_list + - | + /bin/cat: e39f7761903b: No such file or directory + 846 (bash) S 844 846 846 34816 1055492 4194560 4316 23491 5 608 9 0 56 25 20 0 1 0 1715 6504448 1355 18446744073709551615 94242165944320 94242166857581 140721942657360 0 0 0 65536 3686404 1266761467 1 0 0 17 6 0 0 0 0 0 94242167102096 94242167150160 94243001663488 140721942665787 140721942665793 140721942665793 140721942667246 0 + rchar: 1 + wchar: 2 + syscr: 3 + syscw: 4 + read_bytes: 5 + write_bytes: 6 + cancelled_write_bytes: 7 + /bin/bash/bin/cat: e39f7761903b: No such file or directory + 911 ((sd-pam)) S 910 910 910 0 -1 1077936448 54 0 0 0 0 0 0 0 20 0 1 0 1815 106061824 926 18446744073709551615 1 1 0 0 0 0 0 4096 0 0 0 0 17 2 0 0 0 0 0 0 0 0 0 0 0 0 0 + /bin/cat: /proc/911/io: Permission denied + (sd-pam)/bin/cat: e39f7761903b: No such file or directory diff --git a/tests/data/valid_handler_data_qnx.yaml b/tests/data/valid_handler_data_qnx.yaml new file mode 100644 index 0000000..d554dbd --- /dev/null +++ b/tests/data/valid_handler_data_qnx.yaml @@ -0,0 +1,3 @@ +"df": | + /dev/sda 7654321 0 1234567 11% / + /dev/sdb 654321 0 123456 10% / diff --git a/tests/test_android_handler.py b/tests/test_android_handler.py index af8072a..d07466e 100644 --- a/tests/test_android_handler.py +++ b/tests/test_android_handler.py @@ -1,6 +1,10 @@ import pytest -from src.handlers.android_handler import AndroidHandler, AndroidHandlerException, MissingAndroidCapabilityException +from remoteperf.handlers.android_handler import ( + AndroidHandler, + AndroidHandlerException, + MissingAndroidCapabilityException, +) @pytest.fixture diff --git a/tests/test_arithmetic.py b/tests/test_arithmetic.py index f97484d..680707d 100644 --- a/tests/test_arithmetic.py +++ b/tests/test_arithmetic.py @@ -1,9 +1,9 @@ import textwrap import time -from src.handlers.linux_handler import LinuxHandler -from src.models.base import ExtendedMemoryInfo, MemoryInfo, SystemMemory -from src.models.linux import LinuxCpuUsageInfo +from remoteperf.handlers.linux_handler import LinuxHandler +from remoteperf.models.base import ExtendedMemoryInfo, MemoryInfo, SystemMemory +from remoteperf.models.linux import LinuxCpuUsageInfo def test_avg_cpu(linux_handler: LinuxHandler): diff --git a/tests/test_common_handler.py b/tests/test_common_handler.py index 2f6db42..81931a1 100644 --- a/tests/test_common_handler.py +++ b/tests/test_common_handler.py @@ -2,7 +2,7 @@ import pytest -from src.handlers.linux_handler import LinuxHandler +from remoteperf.handlers.linux_handler import LinuxHandler @pytest.fixture diff --git a/tests/test_linux_handler.py b/tests/test_linux_handler.py index f5afba2..7fe3a3a 100644 --- a/tests/test_linux_handler.py +++ b/tests/test_linux_handler.py @@ -2,21 +2,32 @@ import pytest -from src.handlers.base_linux_handler import BaseLinuxHandlerException -from src.handlers.linux_handler import LinuxHandler, LinuxHandlerException, MissingLinuxCapabilityException -from src.models.base import ( +from remoteperf.handlers.base_linux_handler import BaseLinuxHandlerException +from remoteperf.handlers.linux_handler import LinuxHandler, LinuxHandlerException, MissingLinuxCapabilityException +from remoteperf.models.base import ( BaseCpuSample, BaseCpuUsageInfo, BaseMemorySample, BootTimeInfo, + DiskInfo, + DiskIOInfo, ExtendedMemoryInfo, MemoryInfo, ModelList, SystemMemory, SystemUptimeInfo, ) -from src.models.linux import LinuxCpuModeUsageInfo, LinuxCpuUsageInfo, LinuxResourceSample -from src.models.super import MemorySampleProcessInfo, ProcessMemoryList, ResourceSampleProcessInfo +from remoteperf.models.linux import LinuxCpuModeUsageInfo, LinuxCpuUsageInfo, LinuxResourceSample +from remoteperf.models.super import ( + DiskInfoList, + DiskIOList, + DiskIOProcessSample, + DiskIOSampleProcessInfo, + MemorySampleProcessInfo, + ProcessDiskIOList, + ProcessMemoryList, + ResourceSampleProcessInfo, +) def get_time_py_pid(lst, pid): @@ -243,3 +254,119 @@ def test_type_process(linux_handler: LinuxHandler): def test_missing_boot_time_exception(broken_linux_handler): with pytest.raises(MissingLinuxCapabilityException): broken_linux_handler.get_boot_time() + + +desired_output_info = DiskInfoList( + [ + DiskInfo( + filesystem="/dev/sda", + size=7654321, + used=0, + available=1234567, + used_percent=11, + mounted_on="/", + ) + ] +) + + +def test_disc_info(linux_handler): + output = linux_handler.get_diskinfo() + assert output.model_dump(exclude="timestamp") == desired_output_info.model_dump(exclude="timestamp") + + +def test_disc_info_cont(linux_handler): + linux_handler.start_diskinfo_measurement(0.1) + time.sleep(0.05) + output = linux_handler.stop_diskinfo_measurement() + assert output.model_dump(exclude="timestamp") == desired_output_info.model_dump(exclude="timestamp") + + +desired_output_usage = DiskIOList( + [ + DiskIOInfo( + device_major_number=1, + device_minor_number=0, + device_name="sda", + reads_completed=123, + reads_merged=12, + sectors_reads=1, + time_spent_reading=4, + writes_completed=45, + writes_merged=456, + sectors_written=7, + time_spent_writing=78, + IOs_currently_in_progress=789, + time_spent_doing_io=0, + weighted_time_spent_doing_io=321, + discards_completed=21, + discards_merged=654, + sectors_discarded=54, + time_spent_discarding=987, + time_spent_flushing=98, + ) + ] +) + + +def test_discio(linux_handler): + output = linux_handler.get_diskio() + assert output.model_dump(exclude="timestamp") == desired_output_usage.model_dump(exclude="timestamp") + + +def test_discio_cont(linux_handler): + linux_handler.start_diskio_measurement(0.1) + time.sleep(0.05) + output = linux_handler.stop_diskio_measurement() + assert output.model_dump(exclude="timestamp") == desired_output_usage.model_dump(exclude="timestamp") + + +sample = DiskIOProcessSample( + rchar=1, + wchar=2, + syscr=3, + syscw=4, + read_bytes=5, + write_bytes=6, + cancelled_write_bytes=7, +) +desired_output_usage_proc_atomic = ProcessDiskIOList( + [ + DiskIOSampleProcessInfo( + pid=846, + name="bash", + command="/bin/bash", + samples=[sample], + start_time="1715", + ) + ] +) + + +def test_discio_proc(linux_handler): + output = linux_handler.get_diskio_proc_wise() + assert output.model_dump(exclude="timestamp") == desired_output_usage_proc_atomic.model_dump(exclude="timestamp") + + +desired_output_usage_proc_cont = ProcessDiskIOList( + [ + DiskIOSampleProcessInfo( + pid=846, + name="bash", + command="/bin/bash", + samples=[sample, sample], + start_time="1715", + ) + ] +) + + +def test_discio_proc_cont(linux_handler): + linux_handler.start_diskio_measurement_proc_wise(0.1) + time.sleep(0.15) + output = linux_handler.stop_diskio_measurement_proc_wise() + assert output.model_dump(exclude="timestamp") == desired_output_usage_proc_cont.model_dump(exclude="timestamp") + assert output[0].avg_read_bytes == 5.0 + assert output[0].avg_write_bytes == 6.0 + assert output[0]._sum_read_bytes == 10 + assert output[0]._sum_write_bytes == 12 diff --git a/tests/test_model_serialization.py b/tests/test_model_serialization.py index 5b7eaf5..9a83eab 100644 --- a/tests/test_model_serialization.py +++ b/tests/test_model_serialization.py @@ -1,10 +1,8 @@ import json -from datetime import datetime -import pytest import yaml -from src.models.base import ( +from remoteperf.models.base import ( BaseCpuSample, BaseMemorySample, BootTimeInfo, @@ -13,9 +11,9 @@ SystemMemory, SystemUptimeInfo, ) -from src.models.linux import LinuxBootTimeInfo, LinuxCpuModeUsageInfo, LinuxCpuUsageInfo, LinuxResourceSample -from src.models.qnx import QnxCpuUsageInfo -from src.models.super import CpuSampleProcessInfo, MemorySampleProcessInfo, ResourceSampleProcessInfo +from remoteperf.models.linux import LinuxBootTimeInfo, LinuxCpuModeUsageInfo, LinuxCpuUsageInfo, LinuxResourceSample +from remoteperf.models.qnx import QnxCpuUsageInfo +from remoteperf.models.super import CpuSampleProcessInfo, MemorySampleProcessInfo, ResourceSampleProcessInfo def reserialize_yaml(object: dict): diff --git a/tests/test_qnx_handler.py b/tests/test_qnx_handler.py index d6b6d89..010ac0d 100644 --- a/tests/test_qnx_handler.py +++ b/tests/test_qnx_handler.py @@ -2,8 +2,8 @@ import pytest -from src.handlers.qnx_handler import MissingQnxCapabilityException, QNXHandler, QNXHandlerException -from src.models.base import ( +from remoteperf.handlers.qnx_handler import MissingQnxCapabilityException, QNXHandler, QNXHandlerException +from remoteperf.models.base import ( BaseCpuSample, BaseCpuUsageInfo, BaseMemorySample, @@ -13,8 +13,8 @@ SystemMemory, SystemUptimeInfo, ) -from src.models.qnx import QnxCpuUsageInfo -from src.models.super import CpuSampleProcessInfo, MemorySampleProcessInfo, ProcessInfo, ProcessMemoryList +from remoteperf.models.qnx import QnxCpuUsageInfo +from remoteperf.models.super import CpuSampleProcessInfo, MemorySampleProcessInfo, ProcessInfo, ProcessMemoryList @pytest.fixture diff --git a/tests/test_sshclient.py b/tests/test_sshclient.py index 5121cd0..15453f4 100644 --- a/tests/test_sshclient.py +++ b/tests/test_sshclient.py @@ -3,7 +3,7 @@ import pytest from paramiko import AuthenticationException, SSHException -from src.clients.ssh_client import SSHClient, SSHClientException +from remoteperf.clients.ssh_client import SSHClient, SSHClientException @pytest.fixture