diff options
Diffstat (limited to 'src')
33 files changed, 1226 insertions, 1151 deletions
diff --git a/src/mlia/backend/__init__.py b/src/mlia/backend/__init__.py index 3d60372..745aa1b 100644 --- a/src/mlia/backend/__init__.py +++ b/src/mlia/backend/__init__.py @@ -1,3 +1,3 @@ # SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 -"""Backend module.""" +"""Backends module.""" diff --git a/src/mlia/tools/metadata/__init__.py b/src/mlia/backend/corstone/__init__.py index f877e4f..a1eac14 100644 --- a/src/mlia/tools/metadata/__init__.py +++ b/src/mlia/backend/corstone/__init__.py @@ -1,3 +1,3 @@ # SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 -"""Module for the tools metadata.""" +"""Corstone backend module.""" diff --git a/src/mlia/backend/corstone/install.py b/src/mlia/backend/corstone/install.py new file mode 100644 index 0000000..2a0e5c9 --- /dev/null +++ b/src/mlia/backend/corstone/install.py @@ -0,0 +1,155 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Module for Corstone based FVPs. + +The import of subprocess module raises a B404 bandit error. MLIA usage of +subprocess is needed and can be considered safe hence disabling the security +check. +""" +from __future__ import annotations + +import logging +import subprocess # nosec +from pathlib import Path + +from mlia.backend.executor.runner import BackendRunner +from mlia.backend.install import BackendInstallation +from mlia.backend.install import BackendMetadata +from mlia.backend.install import CompoundPathChecker +from mlia.backend.install import Installation +from mlia.backend.install import PackagePathChecker +from mlia.backend.install import StaticPathChecker +from mlia.utils.download import DownloadArtifact +from mlia.utils.filesystem import working_directory + + +logger = logging.getLogger(__name__) + + +class Corstone300Installer: + """Helper class that wraps Corstone 300 installation logic.""" + + def __call__(self, eula_agreement: bool, dist_dir: Path) -> Path: + """Install Corstone-300 and return path to the models.""" + with working_directory(dist_dir): + install_dir = "corstone-300" + try: + fvp_install_cmd = [ + "./FVP_Corstone_SSE-300.sh", + "-q", + "-d", + install_dir, + ] + if not eula_agreement: + fvp_install_cmd += [ + "--nointeractive", + "--i-agree-to-the-contained-eula", + ] + + # The following line raises a B603 error for bandit. In this + # specific case, the input is pretty much static and cannot be + # changed byt the user hence disabling the security check for + # this instance + subprocess.check_call(fvp_install_cmd) # nosec + except subprocess.CalledProcessError as err: + raise Exception( + "Error occurred during Corstone-300 installation" + ) from err + + return dist_dir / install_dir + + +def get_corstone_300_installation() -> Installation: + """Get Corstone-300 installation.""" + corstone_300 = BackendInstallation( + backend_runner=BackendRunner(), + # pylint: disable=line-too-long + metadata=BackendMetadata( + name="Corstone-300", + description="Corstone-300 FVP", + system_config="backend_configs/systems/corstone-300/backend-config.json", + apps_resources=[], + fvp_dir_name="corstone_300", + download_artifact=DownloadArtifact( + name="Corstone-300 FVP", + url="https://developer.arm.com/-/media/Arm%20Developer%20Community/Downloads/OSS/FVP/Corstone-300/FVP_Corstone_SSE-300_11.16_26.tgz", + filename="FVP_Corstone_SSE-300_11.16_26.tgz", + version="11.16_26", + sha256_hash="e26139be756b5003a30d978c629de638aed1934d597dc24a17043d4708e934d7", + ), + supported_platforms=["Linux"], + ), + # pylint: enable=line-too-long + path_checker=CompoundPathChecker( + PackagePathChecker( + expected_files=[ + "models/Linux64_GCC-6.4/FVP_Corstone_SSE-300_Ethos-U55", + "models/Linux64_GCC-6.4/FVP_Corstone_SSE-300_Ethos-U65", + ], + backend_subfolder="models/Linux64_GCC-6.4", + ), + StaticPathChecker( + static_backend_path=Path("/opt/VHT"), + expected_files=[ + "VHT_Corstone_SSE-300_Ethos-U55", + "VHT_Corstone_SSE-300_Ethos-U65", + ], + copy_source=False, + system_config=( + "backend_configs/systems/corstone-300-vht/backend-config.json" + ), + ), + ), + backend_installer=Corstone300Installer(), + ) + + return corstone_300 + + +def get_corstone_310_installation() -> Installation: + """Get Corstone-310 installation.""" + corstone_310 = BackendInstallation( + backend_runner=BackendRunner(), + # pylint: disable=line-too-long + metadata=BackendMetadata( + name="Corstone-310", + description="Corstone-310 FVP", + system_config="backend_configs/systems/corstone-310/backend-config.json", + apps_resources=[], + fvp_dir_name="corstone_310", + download_artifact=None, + supported_platforms=["Linux"], + ), + # pylint: enable=line-too-long + path_checker=CompoundPathChecker( + PackagePathChecker( + expected_files=[ + "models/Linux64_GCC-9.3/FVP_Corstone_SSE-310", + "models/Linux64_GCC-9.3/FVP_Corstone_SSE-310_Ethos-U65", + ], + backend_subfolder="models/Linux64_GCC-9.3", + ), + StaticPathChecker( + static_backend_path=Path("/opt/VHT"), + expected_files=[ + "VHT_Corstone_SSE-310", + "VHT_Corstone_SSE-310_Ethos-U65", + ], + copy_source=False, + system_config=( + "backend_configs/systems/corstone-310-vht/backend-config.json" + ), + ), + ), + backend_installer=None, + ) + + return corstone_310 + + +def get_corstone_installations() -> list[Installation]: + """Get Corstone installations.""" + return [ + get_corstone_300_installation(), + get_corstone_310_installation(), + ] diff --git a/src/mlia/backend/corstone/performance.py b/src/mlia/backend/corstone/performance.py new file mode 100644 index 0000000..5aabfa5 --- /dev/null +++ b/src/mlia/backend/corstone/performance.py @@ -0,0 +1,233 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Module for backend integration.""" +from __future__ import annotations + +import logging +from abc import ABC +from abc import abstractmethod +from dataclasses import dataclass +from pathlib import Path +from typing import Literal + +from mlia.backend.executor.output_consumer import Base64OutputConsumer +from mlia.backend.executor.output_consumer import OutputConsumer +from mlia.backend.executor.runner import BackendRunner +from mlia.backend.executor.runner import ExecutionParams +from mlia.backend.install import get_application_name +from mlia.backend.install import get_system_name + + +logger = logging.getLogger(__name__) + + +@dataclass +class DeviceInfo: + """Device information.""" + + device_type: Literal["ethos-u55", "ethos-u65"] + mac: int + + +@dataclass +class ModelInfo: + """Model info.""" + + model_path: Path + + +@dataclass +class PerformanceMetrics: + """Performance metrics parsed from generic inference output.""" + + npu_active_cycles: int + npu_idle_cycles: int + npu_total_cycles: int + npu_axi0_rd_data_beat_received: int + npu_axi0_wr_data_beat_written: int + npu_axi1_rd_data_beat_received: int + + +class LogWriter(OutputConsumer): + """Redirect output to the logger.""" + + def feed(self, line: str) -> bool: + """Process line from the output.""" + logger.debug(line.strip()) + return False + + +class GenericInferenceOutputParser(Base64OutputConsumer): + """Generic inference app output parser.""" + + def __init__(self) -> None: + """Init generic inference output parser instance.""" + super().__init__() + self._map = { + "NPU ACTIVE": "npu_active_cycles", + "NPU IDLE": "npu_idle_cycles", + "NPU TOTAL": "npu_total_cycles", + "NPU AXI0_RD_DATA_BEAT_RECEIVED": "npu_axi0_rd_data_beat_received", + "NPU AXI0_WR_DATA_BEAT_WRITTEN": "npu_axi0_wr_data_beat_written", + "NPU AXI1_RD_DATA_BEAT_RECEIVED": "npu_axi1_rd_data_beat_received", + } + + @property + def result(self) -> dict: + """Merge the raw results and map the names to the right output names.""" + merged_result = {} + for raw_result in self.parsed_output: + for profiling_result in raw_result: + for sample in profiling_result["samples"]: + name, values = (sample["name"], sample["value"]) + if name in merged_result: + raise KeyError( + f"Duplicate key '{name}' in base64 output.", + ) + new_name = self._map[name] + merged_result[new_name] = values[0] + return merged_result + + def is_ready(self) -> bool: + """Return true if all expected data has been parsed.""" + return set(self.result.keys()) == set(self._map.values()) + + def missed_keys(self) -> set[str]: + """Return a set of the keys that have not been found in the output.""" + return set(self._map.values()) - set(self.result.keys()) + + +class GenericInferenceRunner(ABC): + """Abstract class for generic inference runner.""" + + def __init__(self, backend_runner: BackendRunner): + """Init generic inference runner instance.""" + self.backend_runner = backend_runner + + def run( + self, model_info: ModelInfo, output_consumers: list[OutputConsumer] + ) -> None: + """Run generic inference for the provided device/model.""" + execution_params = self.get_execution_params(model_info) + + ctx = self.backend_runner.run_application(execution_params) + if ctx.stdout is not None: + ctx.stdout = self.consume_output(ctx.stdout, output_consumers) + + @abstractmethod + def get_execution_params(self, model_info: ModelInfo) -> ExecutionParams: + """Get execution params for the provided model.""" + + def check_system_and_application(self, system_name: str, app_name: str) -> None: + """Check if requested system and application installed.""" + if not self.backend_runner.is_system_installed(system_name): + raise Exception(f"System {system_name} is not installed") + + if not self.backend_runner.is_application_installed(app_name, system_name): + raise Exception( + f"Application {app_name} for the system {system_name} " + "is not installed" + ) + + @staticmethod + def consume_output(output: bytearray, consumers: list[OutputConsumer]) -> bytearray: + """ + Pass program's output to the consumers and filter it. + + Returns the filtered output. + """ + filtered_output = bytearray() + for line_bytes in output.splitlines(): + line = line_bytes.decode("utf-8") + remove_line = False + for consumer in consumers: + if consumer.feed(line): + remove_line = True + if not remove_line: + filtered_output.extend(line_bytes) + + return filtered_output + + +class GenericInferenceRunnerEthosU(GenericInferenceRunner): + """Generic inference runner on U55/65.""" + + def __init__( + self, backend_runner: BackendRunner, device_info: DeviceInfo, backend: str + ) -> None: + """Init generic inference runner instance.""" + super().__init__(backend_runner) + + system_name, app_name = self.resolve_system_and_app(device_info, backend) + self.system_name = system_name + self.app_name = app_name + self.device_info = device_info + + @staticmethod + def resolve_system_and_app( + device_info: DeviceInfo, backend: str + ) -> tuple[str, str]: + """Find appropriate system and application for the provided device/backend.""" + try: + system_name = get_system_name(backend, device_info.device_type) + except KeyError as ex: + raise RuntimeError( + f"Unsupported device {device_info.device_type} " + f"for backend {backend}" + ) from ex + + try: + app_name = get_application_name(system_name) + except KeyError as err: + raise RuntimeError(f"System {system_name} is not installed") from err + + return system_name, app_name + + def get_execution_params(self, model_info: ModelInfo) -> ExecutionParams: + """Get execution params for Ethos-U55/65.""" + self.check_system_and_application(self.system_name, self.app_name) + + system_params = [ + f"mac={self.device_info.mac}", + f"input_file={model_info.model_path.absolute()}", + ] + + return ExecutionParams( + self.app_name, + self.system_name, + [], + system_params, + ) + + +def get_generic_runner(device_info: DeviceInfo, backend: str) -> GenericInferenceRunner: + """Get generic runner for provided device and backend.""" + backend_runner = get_backend_runner() + return GenericInferenceRunnerEthosU(backend_runner, device_info, backend) + + +def estimate_performance( + model_info: ModelInfo, device_info: DeviceInfo, backend: str +) -> PerformanceMetrics: + """Get performance estimations.""" + output_parser = GenericInferenceOutputParser() + output_consumers = [output_parser, LogWriter()] + + generic_runner = get_generic_runner(device_info, backend) + generic_runner.run(model_info, output_consumers) + + if not output_parser.is_ready(): + missed_data = ",".join(output_parser.missed_keys()) + logger.debug("Unable to get performance metrics, missed data %s", missed_data) + raise Exception("Unable to get performance metrics, insufficient data") + + return PerformanceMetrics(**output_parser.result) + + +def get_backend_runner() -> BackendRunner: + """ + Return BackendRunner instance. + + Note: This is needed for the unit tests. + """ + return BackendRunner() diff --git a/src/mlia/tools/__init__.py b/src/mlia/backend/executor/__init__.py index 184e966..3d60372 100644 --- a/src/mlia/tools/__init__.py +++ b/src/mlia/backend/executor/__init__.py @@ -1,3 +1,3 @@ # SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 -"""Tools module.""" +"""Backend module.""" diff --git a/src/mlia/backend/application.py b/src/mlia/backend/executor/application.py index a5d99f7..738ac4e 100644 --- a/src/mlia/backend/application.py +++ b/src/mlia/backend/executor/application.py @@ -9,18 +9,18 @@ from typing import Any from typing import cast from typing import List -from mlia.backend.common import Backend -from mlia.backend.common import ConfigurationException -from mlia.backend.common import get_backend_configs -from mlia.backend.common import get_backend_directories -from mlia.backend.common import load_application_configs -from mlia.backend.common import load_config -from mlia.backend.common import remove_backend -from mlia.backend.config import ApplicationConfig -from mlia.backend.config import ExtendedApplicationConfig -from mlia.backend.fs import get_backends_path -from mlia.backend.source import create_destination_and_install -from mlia.backend.source import get_source +from mlia.backend.executor.common import Backend +from mlia.backend.executor.common import ConfigurationException +from mlia.backend.executor.common import get_backend_configs +from mlia.backend.executor.common import get_backend_directories +from mlia.backend.executor.common import load_application_configs +from mlia.backend.executor.common import load_config +from mlia.backend.executor.common import remove_backend +from mlia.backend.executor.config import ApplicationConfig +from mlia.backend.executor.config import ExtendedApplicationConfig +from mlia.backend.executor.fs import get_backends_path +from mlia.backend.executor.source import create_destination_and_install +from mlia.backend.executor.source import get_source def get_available_application_directory_names() -> list[str]: diff --git a/src/mlia/backend/common.py b/src/mlia/backend/executor/common.py index 0f04553..48dbd4a 100644 --- a/src/mlia/backend/common.py +++ b/src/mlia/backend/executor/common.py @@ -19,14 +19,14 @@ from typing import Match from typing import NamedTuple from typing import Pattern -from mlia.backend.config import BackendConfig -from mlia.backend.config import BaseBackendConfig -from mlia.backend.config import NamedExecutionConfig -from mlia.backend.config import UserParamConfig -from mlia.backend.config import UserParamsConfig -from mlia.backend.fs import get_backends_path -from mlia.backend.fs import remove_resource -from mlia.backend.fs import ResourceType +from mlia.backend.executor.config import BackendConfig +from mlia.backend.executor.config import BaseBackendConfig +from mlia.backend.executor.config import NamedExecutionConfig +from mlia.backend.executor.config import UserParamConfig +from mlia.backend.executor.config import UserParamsConfig +from mlia.backend.executor.fs import get_backends_path +from mlia.backend.executor.fs import remove_resource +from mlia.backend.executor.fs import ResourceType BACKEND_CONFIG_FILE: Final[str] = "backend-config.json" diff --git a/src/mlia/backend/config.py b/src/mlia/backend/executor/config.py index dca53da..dca53da 100644 --- a/src/mlia/backend/config.py +++ b/src/mlia/backend/executor/config.py diff --git a/src/mlia/backend/execution.py b/src/mlia/backend/executor/execution.py index 5c8e53f..e253b16 100644 --- a/src/mlia/backend/execution.py +++ b/src/mlia/backend/executor/execution.py @@ -7,13 +7,13 @@ import logging import re from typing import cast -from mlia.backend.application import Application -from mlia.backend.application import get_application -from mlia.backend.common import Backend -from mlia.backend.common import ConfigurationException -from mlia.backend.common import Param -from mlia.backend.system import get_system -from mlia.backend.system import System +from mlia.backend.executor.application import Application +from mlia.backend.executor.application import get_application +from mlia.backend.executor.common import Backend +from mlia.backend.executor.common import ConfigurationException +from mlia.backend.executor.common import Param +from mlia.backend.executor.system import get_system +from mlia.backend.executor.system import System logger = logging.getLogger(__name__) diff --git a/src/mlia/backend/fs.py b/src/mlia/backend/executor/fs.py index 3fce19c..3fce19c 100644 --- a/src/mlia/backend/fs.py +++ b/src/mlia/backend/executor/fs.py diff --git a/src/mlia/backend/output_consumer.py b/src/mlia/backend/executor/output_consumer.py index 3c3b132..3c3b132 100644 --- a/src/mlia/backend/output_consumer.py +++ b/src/mlia/backend/executor/output_consumer.py diff --git a/src/mlia/backend/proc.py b/src/mlia/backend/executor/proc.py index 4838e47..39a0689 100644 --- a/src/mlia/backend/proc.py +++ b/src/mlia/backend/executor/proc.py @@ -21,7 +21,7 @@ from sh import CommandNotFound from sh import ErrorReturnCode from sh import RunningCommand -from mlia.backend.fs import valid_for_filename +from mlia.backend.executor.fs import valid_for_filename logger = logging.getLogger(__name__) diff --git a/src/mlia/backend/executor/runner.py b/src/mlia/backend/executor/runner.py new file mode 100644 index 0000000..2330fd9 --- /dev/null +++ b/src/mlia/backend/executor/runner.py @@ -0,0 +1,98 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Module for backend runner.""" +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path + +from mlia.backend.executor.application import get_available_applications +from mlia.backend.executor.application import install_application +from mlia.backend.executor.execution import ExecutionContext +from mlia.backend.executor.execution import run_application +from mlia.backend.executor.system import get_available_systems +from mlia.backend.executor.system import install_system + + +@dataclass +class ExecutionParams: + """Application execution params.""" + + application: str + system: str + application_params: list[str] + system_params: list[str] + + +class BackendRunner: + """Backend runner.""" + + def __init__(self) -> None: + """Init BackendRunner instance.""" + + @staticmethod + def get_installed_systems() -> list[str]: + """Get list of the installed systems.""" + return [system.name for system in get_available_systems()] + + @staticmethod + def get_installed_applications(system: str | None = None) -> list[str]: + """Get list of the installed application.""" + return [ + app.name + for app in get_available_applications() + if system is None or app.can_run_on(system) + ] + + def is_application_installed(self, application: str, system: str) -> bool: + """Return true if requested application installed.""" + return application in self.get_installed_applications(system) + + def is_system_installed(self, system: str) -> bool: + """Return true if requested system installed.""" + return system in self.get_installed_systems() + + def systems_installed(self, systems: list[str]) -> bool: + """Check if all provided systems are installed.""" + if not systems: + return False + + installed_systems = self.get_installed_systems() + return all(system in installed_systems for system in systems) + + def applications_installed(self, applications: list[str]) -> bool: + """Check if all provided applications are installed.""" + if not applications: + return False + + installed_apps = self.get_installed_applications() + return all(app in installed_apps for app in applications) + + def all_installed(self, systems: list[str], apps: list[str]) -> bool: + """Check if all provided artifacts are installed.""" + return self.systems_installed(systems) and self.applications_installed(apps) + + @staticmethod + def install_system(system_path: Path) -> None: + """Install system.""" + install_system(system_path) + + @staticmethod + def install_application(app_path: Path) -> None: + """Install application.""" + install_application(app_path) + + @staticmethod + def run_application(execution_params: ExecutionParams) -> ExecutionContext: + """Run requested application.""" + ctx = run_application( + execution_params.application, + execution_params.application_params, + execution_params.system, + execution_params.system_params, + ) + return ctx + + @staticmethod + def _params(name: str, params: list[str]) -> list[str]: + return [p for item in [(name, param) for param in params] for p in item] diff --git a/src/mlia/backend/source.py b/src/mlia/backend/executor/source.py index c951eae..6abc49f 100644 --- a/src/mlia/backend/source.py +++ b/src/mlia/backend/executor/source.py @@ -11,13 +11,13 @@ from abc import abstractmethod from pathlib import Path from tarfile import TarFile -from mlia.backend.common import BACKEND_CONFIG_FILE -from mlia.backend.common import ConfigurationException -from mlia.backend.common import get_backend_config -from mlia.backend.common import is_backend_directory -from mlia.backend.common import load_config -from mlia.backend.config import BackendConfig -from mlia.backend.fs import copy_directory_content +from mlia.backend.executor.common import BACKEND_CONFIG_FILE +from mlia.backend.executor.common import ConfigurationException +from mlia.backend.executor.common import get_backend_config +from mlia.backend.executor.common import is_backend_directory +from mlia.backend.executor.common import load_config +from mlia.backend.executor.config import BackendConfig +from mlia.backend.executor.fs import copy_directory_content class Source(ABC): diff --git a/src/mlia/backend/system.py b/src/mlia/backend/executor/system.py index 0e51ab2..a5ecf19 100644 --- a/src/mlia/backend/system.py +++ b/src/mlia/backend/executor/system.py @@ -8,17 +8,17 @@ from typing import Any from typing import cast from typing import List -from mlia.backend.common import Backend -from mlia.backend.common import ConfigurationException -from mlia.backend.common import get_backend_configs -from mlia.backend.common import get_backend_directories -from mlia.backend.common import load_config -from mlia.backend.common import remove_backend -from mlia.backend.config import SystemConfig -from mlia.backend.fs import get_backends_path -from mlia.backend.proc import run_and_wait -from mlia.backend.source import create_destination_and_install -from mlia.backend.source import get_source +from mlia.backend.executor.common import Backend +from mlia.backend.executor.common import ConfigurationException +from mlia.backend.executor.common import get_backend_configs +from mlia.backend.executor.common import get_backend_directories +from mlia.backend.executor.common import load_config +from mlia.backend.executor.common import remove_backend +from mlia.backend.executor.config import SystemConfig +from mlia.backend.executor.fs import get_backends_path +from mlia.backend.executor.proc import run_and_wait +from mlia.backend.executor.source import create_destination_and_install +from mlia.backend.executor.source import get_source class System(Backend): diff --git a/src/mlia/tools/metadata/corstone.py b/src/mlia/backend/install.py index df2dcdb..eea3403 100644 --- a/src/mlia/tools/metadata/corstone.py +++ b/src/mlia/backend/install.py @@ -1,29 +1,22 @@ # SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 -"""Module for Corstone based FVPs. - -The import of subprocess module raises a B404 bandit error. MLIA usage of -subprocess is needed and can be considered safe hence disabling the security -check. -""" +"""Module for installation process.""" from __future__ import annotations import logging import platform -import subprocess # nosec import tarfile +from abc import ABC +from abc import abstractmethod from dataclasses import dataclass from pathlib import Path from typing import Callable from typing import Iterable from typing import Optional +from typing import Union -import mlia.backend.manager as backend_manager -from mlia.backend.system import remove_system -from mlia.tools.metadata.common import DownloadAndInstall -from mlia.tools.metadata.common import Installation -from mlia.tools.metadata.common import InstallationType -from mlia.tools.metadata.common import InstallFromPath +from mlia.backend.executor.runner import BackendRunner +from mlia.backend.executor.system import remove_system from mlia.utils.download import DownloadArtifact from mlia.utils.filesystem import all_files_exist from mlia.utils.filesystem import all_paths_valid @@ -31,11 +24,124 @@ from mlia.utils.filesystem import copy_all from mlia.utils.filesystem import get_mlia_resources from mlia.utils.filesystem import temp_directory from mlia.utils.filesystem import working_directory +from mlia.utils.py_manager import get_package_manager logger = logging.getLogger(__name__) +# Mapping backend -> device_type -> system_name +_SUPPORTED_SYSTEMS = { + "Corstone-300": { + "ethos-u55": "Corstone-300: Cortex-M55+Ethos-U55", + "ethos-u65": "Corstone-300: Cortex-M55+Ethos-U65", + }, + "Corstone-310": { + "ethos-u55": "Corstone-310: Cortex-M85+Ethos-U55", + "ethos-u65": "Corstone-310: Cortex-M85+Ethos-U65", + }, +} + +# Mapping system_name -> application +_SYSTEM_TO_APP_MAP = { + "Corstone-300: Cortex-M55+Ethos-U55": "Generic Inference Runner: Ethos-U55", + "Corstone-300: Cortex-M55+Ethos-U65": "Generic Inference Runner: Ethos-U65", + "Corstone-310: Cortex-M85+Ethos-U55": "Generic Inference Runner: Ethos-U55", + "Corstone-310: Cortex-M85+Ethos-U65": "Generic Inference Runner: Ethos-U65", +} + + +def get_system_name(backend: str, device_type: str) -> str: + """Get the system name for the given backend and device type.""" + return _SUPPORTED_SYSTEMS[backend][device_type] + + +def get_application_name(system_name: str) -> str: + """Get application name for the provided system name.""" + return _SYSTEM_TO_APP_MAP[system_name] + + +def is_supported(backend: str, device_type: str | None = None) -> bool: + """Check if the backend (and optionally device type) is supported.""" + if device_type is None: + return backend in _SUPPORTED_SYSTEMS + + try: + get_system_name(backend, device_type) + return True + except KeyError: + return False + + +def supported_backends() -> list[str]: + """Get a list of all backends supported by the backend manager.""" + return list(_SUPPORTED_SYSTEMS.keys()) + + +def get_all_system_names(backend: str) -> list[str]: + """Get all systems supported by the backend.""" + return list(_SUPPORTED_SYSTEMS.get(backend, {}).values()) + + +def get_all_application_names(backend: str) -> list[str]: + """Get all applications supported by the backend.""" + app_set = {_SYSTEM_TO_APP_MAP[sys] for sys in get_all_system_names(backend)} + return list(app_set) + + +@dataclass +class InstallFromPath: + """Installation from the local path.""" + + backend_path: Path + + +@dataclass +class DownloadAndInstall: + """Download and install.""" + + eula_agreement: bool = True + + +InstallationType = Union[InstallFromPath, DownloadAndInstall] + + +class Installation(ABC): + """Base class for the installation process of the backends.""" + + @property + @abstractmethod + def name(self) -> str: + """Return name of the backend.""" + + @property + @abstractmethod + def description(self) -> str: + """Return description of the backend.""" + + @property + @abstractmethod + def could_be_installed(self) -> bool: + """Return true if backend could be installed in current environment.""" + + @property + @abstractmethod + def already_installed(self) -> bool: + """Return true if backend is already installed.""" + + @abstractmethod + def supports(self, install_type: InstallationType) -> bool: + """Return true if installation supports requested installation type.""" + + @abstractmethod + def install(self, install_type: InstallationType) -> None: + """Install the backend.""" + + @abstractmethod + def uninstall(self) -> None: + """Uninstall the backend.""" + + @dataclass class BackendInfo: """Backend information.""" @@ -75,8 +181,8 @@ class BackendMetadata: self.download_artifact = download_artifact self.supported_platforms = supported_platforms - self.expected_systems = backend_manager.get_all_system_names(name) - self.expected_apps = backend_manager.get_all_application_names(name) + self.expected_systems = get_all_system_names(name) + self.expected_apps = get_all_application_names(name) @property def expected_resources(self) -> Iterable[Path]: @@ -99,7 +205,7 @@ class BackendInstallation(Installation): def __init__( self, - backend_runner: backend_manager.BackendRunner, + backend_runner: BackendRunner, metadata: BackendMetadata, path_checker: PathChecker, backend_installer: BackendInstaller | None, @@ -288,130 +394,57 @@ class CompoundPathChecker: return next(first_resolved_backend_info, None) -class Corstone300Installer: - """Helper class that wraps Corstone 300 installation logic.""" +class PyPackageBackendInstallation(Installation): + """Backend based on the python package.""" - def __call__(self, eula_agreement: bool, dist_dir: Path) -> Path: - """Install Corstone-300 and return path to the models.""" - with working_directory(dist_dir): - install_dir = "corstone-300" - try: - fvp_install_cmd = [ - "./FVP_Corstone_SSE-300.sh", - "-q", - "-d", - install_dir, - ] - if not eula_agreement: - fvp_install_cmd += [ - "--nointeractive", - "--i-agree-to-the-contained-eula", - ] - - # The following line raises a B603 error for bandit. In this - # specific case, the input is pretty much static and cannot be - # changed byt the user hence disabling the security check for - # this instance - subprocess.check_call(fvp_install_cmd) # nosec - except subprocess.CalledProcessError as err: - raise Exception( - "Error occurred during Corstone-300 installation" - ) from err - - return dist_dir / install_dir - - -def get_corstone_300_installation() -> Installation: - """Get Corstone-300 installation.""" - corstone_300 = BackendInstallation( - backend_runner=backend_manager.BackendRunner(), - # pylint: disable=line-too-long - metadata=BackendMetadata( - name="Corstone-300", - description="Corstone-300 FVP", - system_config="backend_configs/systems/corstone-300/backend-config.json", - apps_resources=[], - fvp_dir_name="corstone_300", - download_artifact=DownloadArtifact( - name="Corstone-300 FVP", - url="https://developer.arm.com/-/media/Arm%20Developer%20Community/Downloads/OSS/FVP/Corstone-300/FVP_Corstone_SSE-300_11.16_26.tgz", - filename="FVP_Corstone_SSE-300_11.16_26.tgz", - version="11.16_26", - sha256_hash="e26139be756b5003a30d978c629de638aed1934d597dc24a17043d4708e934d7", - ), - supported_platforms=["Linux"], - ), - # pylint: enable=line-too-long - path_checker=CompoundPathChecker( - PackagePathChecker( - expected_files=[ - "models/Linux64_GCC-6.4/FVP_Corstone_SSE-300_Ethos-U55", - "models/Linux64_GCC-6.4/FVP_Corstone_SSE-300_Ethos-U65", - ], - backend_subfolder="models/Linux64_GCC-6.4", - ), - StaticPathChecker( - static_backend_path=Path("/opt/VHT"), - expected_files=[ - "VHT_Corstone_SSE-300_Ethos-U55", - "VHT_Corstone_SSE-300_Ethos-U65", - ], - copy_source=False, - system_config=( - "backend_configs/systems/corstone-300-vht/backend-config.json" - ), - ), - ), - backend_installer=Corstone300Installer(), - ) - - return corstone_300 - - -def get_corstone_310_installation() -> Installation: - """Get Corstone-310 installation.""" - corstone_310 = BackendInstallation( - backend_runner=backend_manager.BackendRunner(), - # pylint: disable=line-too-long - metadata=BackendMetadata( - name="Corstone-310", - description="Corstone-310 FVP", - system_config="backend_configs/systems/corstone-310/backend-config.json", - apps_resources=[], - fvp_dir_name="corstone_310", - download_artifact=None, - supported_platforms=["Linux"], - ), - # pylint: enable=line-too-long - path_checker=CompoundPathChecker( - PackagePathChecker( - expected_files=[ - "models/Linux64_GCC-9.3/FVP_Corstone_SSE-310", - "models/Linux64_GCC-9.3/FVP_Corstone_SSE-310_Ethos-U65", - ], - backend_subfolder="models/Linux64_GCC-9.3", - ), - StaticPathChecker( - static_backend_path=Path("/opt/VHT"), - expected_files=[ - "VHT_Corstone_SSE-310", - "VHT_Corstone_SSE-310_Ethos-U65", - ], - copy_source=False, - system_config=( - "backend_configs/systems/corstone-310-vht/backend-config.json" - ), - ), - ), - backend_installer=None, - ) - - return corstone_310 - - -def get_corstone_installations() -> list[Installation]: - """Get Corstone installations.""" - return [ - get_corstone_300_installation(), - get_corstone_310_installation(), - ] + def __init__( + self, + name: str, + description: str, + packages_to_install: list[str], + packages_to_uninstall: list[str], + expected_packages: list[str], + ) -> None: + """Init the backend installation.""" + self._name = name + self._description = description + self._packages_to_install = packages_to_install + self._packages_to_uninstall = packages_to_uninstall + self._expected_packages = expected_packages + + self.package_manager = get_package_manager() + + @property + def name(self) -> str: + """Return name of the backend.""" + return self._name + + @property + def description(self) -> str: + """Return description of the backend.""" + return self._description + + @property + def could_be_installed(self) -> bool: + """Check if backend could be installed.""" + return True + + @property + def already_installed(self) -> bool: + """Check if backend already installed.""" + return self.package_manager.packages_installed(self._expected_packages) + + def supports(self, install_type: InstallationType) -> bool: + """Return true if installation supports requested installation type.""" + return isinstance(install_type, DownloadAndInstall) + + def install(self, install_type: InstallationType) -> None: + """Install the backend.""" + if not self.supports(install_type): + raise Exception(f"Unsupported installation type {install_type}") + + self.package_manager.install(self._packages_to_install) + + def uninstall(self) -> None: + """Uninstall the backend.""" + self.package_manager.uninstall(self._packages_to_uninstall) diff --git a/src/mlia/backend/manager.py b/src/mlia/backend/manager.py index 6a61ab0..c02dc6e 100644 --- a/src/mlia/backend/manager.py +++ b/src/mlia/backend/manager.py @@ -1,372 +1,271 @@ # SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 -"""Module for backend integration.""" +"""Module for installation process.""" from __future__ import annotations import logging from abc import ABC from abc import abstractmethod -from dataclasses import dataclass from pathlib import Path -from typing import Literal +from typing import Callable -from mlia.backend.application import get_available_applications -from mlia.backend.application import install_application -from mlia.backend.execution import ExecutionContext -from mlia.backend.execution import run_application -from mlia.backend.output_consumer import Base64OutputConsumer -from mlia.backend.output_consumer import OutputConsumer -from mlia.backend.system import get_available_systems -from mlia.backend.system import install_system +from mlia.backend.install import DownloadAndInstall +from mlia.backend.install import Installation +from mlia.backend.install import InstallationType +from mlia.backend.install import InstallFromPath +from mlia.core.errors import ConfigurationError +from mlia.core.errors import InternalError +from mlia.utils.misc import yes logger = logging.getLogger(__name__) -# Mapping backend -> device_type -> system_name -_SUPPORTED_SYSTEMS = { - "Corstone-300": { - "ethos-u55": "Corstone-300: Cortex-M55+Ethos-U55", - "ethos-u65": "Corstone-300: Cortex-M55+Ethos-U65", - }, - "Corstone-310": { - "ethos-u55": "Corstone-310: Cortex-M85+Ethos-U55", - "ethos-u65": "Corstone-310: Cortex-M85+Ethos-U65", - }, -} +InstallationFilter = Callable[[Installation], bool] -# Mapping system_name -> application -_SYSTEM_TO_APP_MAP = { - "Corstone-300: Cortex-M55+Ethos-U55": "Generic Inference Runner: Ethos-U55", - "Corstone-300: Cortex-M55+Ethos-U65": "Generic Inference Runner: Ethos-U65", - "Corstone-310: Cortex-M85+Ethos-U55": "Generic Inference Runner: Ethos-U55", - "Corstone-310: Cortex-M85+Ethos-U65": "Generic Inference Runner: Ethos-U65", -} +class AlreadyInstalledFilter: + """Filter for already installed backends.""" -def get_system_name(backend: str, device_type: str) -> str: - """Get the system name for the given backend and device type.""" - return _SUPPORTED_SYSTEMS[backend][device_type] + def __call__(self, installation: Installation) -> bool: + """Installation filter.""" + return installation.already_installed -def is_supported(backend: str, device_type: str | None = None) -> bool: - """Check if the backend (and optionally device type) is supported.""" - if device_type is None: - return backend in _SUPPORTED_SYSTEMS +class ReadyForInstallationFilter: + """Filter for ready to be installed backends.""" - try: - get_system_name(backend, device_type) - return True - except KeyError: - return False + def __call__(self, installation: Installation) -> bool: + """Installation filter.""" + return installation.could_be_installed and not installation.already_installed -def supported_backends() -> list[str]: - """Get a list of all backends supported by the backend manager.""" - return list(_SUPPORTED_SYSTEMS.keys()) +class SupportsInstallTypeFilter: + """Filter backends that support certain type of the installation.""" + def __init__(self, installation_type: InstallationType) -> None: + """Init filter.""" + self.installation_type = installation_type -def get_all_system_names(backend: str) -> list[str]: - """Get all systems supported by the backend.""" - return list(_SUPPORTED_SYSTEMS.get(backend, {}).values()) + def __call__(self, installation: Installation) -> bool: + """Installation filter.""" + return installation.supports(self.installation_type) -def get_all_application_names(backend: str) -> list[str]: - """Get all applications supported by the backend.""" - app_set = {_SYSTEM_TO_APP_MAP[sys] for sys in get_all_system_names(backend)} - return list(app_set) +class SearchByNameFilter: + """Filter installation by name.""" + def __init__(self, backend_name: str | None) -> None: + """Init filter.""" + self.backend_name = backend_name -@dataclass -class DeviceInfo: - """Device information.""" - - device_type: Literal["ethos-u55", "ethos-u65"] - mac: int - - -@dataclass -class ModelInfo: - """Model info.""" - - model_path: Path - - -@dataclass -class PerformanceMetrics: - """Performance metrics parsed from generic inference output.""" - - npu_active_cycles: int - npu_idle_cycles: int - npu_total_cycles: int - npu_axi0_rd_data_beat_received: int - npu_axi0_wr_data_beat_written: int - npu_axi1_rd_data_beat_received: int - - -@dataclass -class ExecutionParams: - """Application execution params.""" - - application: str - system: str - application_params: list[str] - system_params: list[str] - - -class LogWriter(OutputConsumer): - """Redirect output to the logger.""" - - def feed(self, line: str) -> bool: - """Process line from the output.""" - logger.debug(line.strip()) - return False + def __call__(self, installation: Installation) -> bool: + """Installation filter.""" + return ( + not self.backend_name + or installation.name.casefold() == self.backend_name.casefold() + ) -class GenericInferenceOutputParser(Base64OutputConsumer): - """Generic inference app output parser.""" +class InstallationManager(ABC): + """Helper class for managing installations.""" - def __init__(self) -> None: - """Init generic inference output parser instance.""" - super().__init__() - self._map = { - "NPU ACTIVE": "npu_active_cycles", - "NPU IDLE": "npu_idle_cycles", - "NPU TOTAL": "npu_total_cycles", - "NPU AXI0_RD_DATA_BEAT_RECEIVED": "npu_axi0_rd_data_beat_received", - "NPU AXI0_WR_DATA_BEAT_WRITTEN": "npu_axi0_wr_data_beat_written", - "NPU AXI1_RD_DATA_BEAT_RECEIVED": "npu_axi1_rd_data_beat_received", - } + @abstractmethod + def install_from(self, backend_path: Path, backend_name: str, force: bool) -> None: + """Install backend from the local directory.""" - @property - def result(self) -> dict: - """Merge the raw results and map the names to the right output names.""" - merged_result = {} - for raw_result in self.parsed_output: - for profiling_result in raw_result: - for sample in profiling_result["samples"]: - name, values = (sample["name"], sample["value"]) - if name in merged_result: - raise KeyError( - f"Duplicate key '{name}' in base64 output.", - ) - new_name = self._map[name] - merged_result[new_name] = values[0] - return merged_result + @abstractmethod + def download_and_install( + self, backend_name: str, eula_agreement: bool, force: bool + ) -> None: + """Download and install backends.""" - def is_ready(self) -> bool: - """Return true if all expected data has been parsed.""" - return set(self.result.keys()) == set(self._map.values()) + @abstractmethod + def show_env_details(self) -> None: + """Show environment details.""" - def missed_keys(self) -> set[str]: - """Return a set of the keys that have not been found in the output.""" - return set(self._map.values()) - set(self.result.keys()) + @abstractmethod + def backend_installed(self, backend_name: str) -> bool: + """Return true if requested backend installed.""" + @abstractmethod + def uninstall(self, backend_name: str) -> None: + """Delete the existing installation.""" -class BackendRunner: - """Backend runner.""" - def __init__(self) -> None: - """Init BackendRunner instance.""" +class InstallationFiltersMixin: + """Mixin for filtering installation based on different conditions.""" - @staticmethod - def get_installed_systems() -> list[str]: - """Get list of the installed systems.""" - return [system.name for system in get_available_systems()] + installations: list[Installation] - @staticmethod - def get_installed_applications(system: str | None = None) -> list[str]: - """Get list of the installed application.""" + def filter_by(self, *filters: InstallationFilter) -> list[Installation]: + """Filter installations.""" return [ - app.name - for app in get_available_applications() - if system is None or app.can_run_on(system) + installation + for installation in self.installations + if all(filter_(installation) for filter_ in filters) ] - def is_application_installed(self, application: str, system: str) -> bool: - """Return true if requested application installed.""" - return application in self.get_installed_applications(system) - - def is_system_installed(self, system: str) -> bool: - """Return true if requested system installed.""" - return system in self.get_installed_systems() - - def systems_installed(self, systems: list[str]) -> bool: - """Check if all provided systems are installed.""" - if not systems: - return False - - installed_systems = self.get_installed_systems() - return all(system in installed_systems for system in systems) - - def applications_installed(self, applications: list[str]) -> bool: - """Check if all provided applications are installed.""" - if not applications: - return False - - installed_apps = self.get_installed_applications() - return all(app in installed_apps for app in applications) + def find_by_name(self, backend_name: str) -> list[Installation]: + """Return list of the backends filtered by name.""" + return self.filter_by(SearchByNameFilter(backend_name)) - def all_installed(self, systems: list[str], apps: list[str]) -> bool: - """Check if all provided artifacts are installed.""" - return self.systems_installed(systems) and self.applications_installed(apps) - - @staticmethod - def install_system(system_path: Path) -> None: - """Install system.""" - install_system(system_path) - - @staticmethod - def install_application(app_path: Path) -> None: - """Install application.""" - install_application(app_path) - - @staticmethod - def run_application(execution_params: ExecutionParams) -> ExecutionContext: - """Run requested application.""" - ctx = run_application( - execution_params.application, - execution_params.application_params, - execution_params.system, - execution_params.system_params, + def already_installed(self, backend_name: str = None) -> list[Installation]: + """Return list of backends that are already installed.""" + return self.filter_by( + AlreadyInstalledFilter(), + SearchByNameFilter(backend_name), ) - return ctx - @staticmethod - def _params(name: str, params: list[str]) -> list[str]: - return [p for item in [(name, param) for param in params] for p in item] + def ready_for_installation(self) -> list[Installation]: + """Return list of the backends that could be installed.""" + return self.filter_by(ReadyForInstallationFilter()) -class GenericInferenceRunner(ABC): - """Abstract class for generic inference runner.""" +class DefaultInstallationManager(InstallationManager, InstallationFiltersMixin): + """Interactive installation manager.""" - def __init__(self, backend_runner: BackendRunner): - """Init generic inference runner instance.""" - self.backend_runner = backend_runner - - def run( - self, model_info: ModelInfo, output_consumers: list[OutputConsumer] + def __init__( + self, installations: list[Installation], noninteractive: bool = False ) -> None: - """Run generic inference for the provided device/model.""" - execution_params = self.get_execution_params(model_info) - - ctx = self.backend_runner.run_application(execution_params) - if ctx.stdout is not None: - ctx.stdout = self.consume_output(ctx.stdout, output_consumers) - - @abstractmethod - def get_execution_params(self, model_info: ModelInfo) -> ExecutionParams: - """Get execution params for the provided model.""" - - def check_system_and_application(self, system_name: str, app_name: str) -> None: - """Check if requested system and application installed.""" - if not self.backend_runner.is_system_installed(system_name): - raise Exception(f"System {system_name} is not installed") - - if not self.backend_runner.is_application_installed(app_name, system_name): - raise Exception( - f"Application {app_name} for the system {system_name} " - "is not installed" + """Init the manager.""" + self.installations = installations + self.noninteractive = noninteractive + + def _install( + self, + backend_name: str, + install_type: InstallationType, + prompt: Callable[[Installation], str], + force: bool, + ) -> None: + """Check metadata and install backend.""" + installs = self.find_by_name(backend_name) + + if not installs: + logger.info("Unknown backend '%s'.", backend_name) + logger.info( + "Please run command 'mlia-backend list' to get list of " + "supported backend names." ) - @staticmethod - def consume_output(output: bytearray, consumers: list[OutputConsumer]) -> bytearray: - """ - Pass program's output to the consumers and filter it. + return + + if len(installs) > 1: + raise InternalError(f"More than one backend with name {backend_name} found") + + installation = installs[0] + if not installation.supports(install_type): + if isinstance(install_type, InstallFromPath): + logger.info( + "Backend '%s' could not be installed using path '%s'.", + installation.name, + install_type.backend_path, + ) + logger.info( + "Please check that '%s' is a valid path to the installed backend.", + install_type.backend_path, + ) + else: + logger.info( + "Backend '%s' could not be downloaded and installed", + installation.name, + ) + logger.info( + "Please refer to the project's documentation for more details." + ) + + return + + if installation.already_installed and not force: + logger.info("Backend '%s' is already installed.", installation.name) + logger.info("Please, consider using --force option.") + return + + proceed = self.noninteractive or yes(prompt(installation)) + if not proceed: + logger.info("%s installation canceled.", installation.name) + return + + if installation.already_installed and force: + logger.info( + "Force installing %s, so delete the existing " + "installed backend first.", + installation.name, + ) + installation.uninstall() - Returns the filtered output. - """ - filtered_output = bytearray() - for line_bytes in output.splitlines(): - line = line_bytes.decode("utf-8") - remove_line = False - for consumer in consumers: - if consumer.feed(line): - remove_line = True - if not remove_line: - filtered_output.extend(line_bytes) + installation.install(install_type) + logger.info("%s successfully installed.", installation.name) - return filtered_output + def install_from( + self, backend_path: Path, backend_name: str, force: bool = False + ) -> None: + """Install from the provided directory.""" + def prompt(install: Installation) -> str: + return ( + f"{install.name} was found in {backend_path}. " + "Would you like to install it?" + ) -class GenericInferenceRunnerEthosU(GenericInferenceRunner): - """Generic inference runner on U55/65.""" + install_type = InstallFromPath(backend_path) + self._install(backend_name, install_type, prompt, force) - def __init__( - self, backend_runner: BackendRunner, device_info: DeviceInfo, backend: str + def download_and_install( + self, backend_name: str, eula_agreement: bool = True, force: bool = False ) -> None: - """Init generic inference runner instance.""" - super().__init__(backend_runner) + """Download and install available backends.""" - system_name, app_name = self.resolve_system_and_app(device_info, backend) - self.system_name = system_name - self.app_name = app_name - self.device_info = device_info + def prompt(install: Installation) -> str: + return f"Would you like to download and install {install.name}?" - @staticmethod - def resolve_system_and_app( - device_info: DeviceInfo, backend: str - ) -> tuple[str, str]: - """Find appropriate system and application for the provided device/backend.""" - try: - system_name = get_system_name(backend, device_info.device_type) - except KeyError as ex: - raise RuntimeError( - f"Unsupported device {device_info.device_type} " - f"for backend {backend}" - ) from ex - - try: - app_name = _SYSTEM_TO_APP_MAP[system_name] - except KeyError as err: - raise RuntimeError(f"System {system_name} is not installed") from err - - return system_name, app_name - - def get_execution_params(self, model_info: ModelInfo) -> ExecutionParams: - """Get execution params for Ethos-U55/65.""" - self.check_system_and_application(self.system_name, self.app_name) - - system_params = [ - f"mac={self.device_info.mac}", - f"input_file={model_info.model_path.absolute()}", - ] + install_type = DownloadAndInstall(eula_agreement=eula_agreement) + self._install(backend_name, install_type, prompt, force) - return ExecutionParams( - self.app_name, - self.system_name, - [], - system_params, - ) + def show_env_details(self) -> None: + """Print current state of the execution environment.""" + if installed := self.already_installed(): + self._print_installation_list("Installed backends:", installed) + + if could_be_installed := self.ready_for_installation(): + self._print_installation_list( + "Following backends could be installed:", + could_be_installed, + new_section=bool(installed), + ) + if not installed and not could_be_installed: + logger.info("No backends installed") -def get_generic_runner(device_info: DeviceInfo, backend: str) -> GenericInferenceRunner: - """Get generic runner for provided device and backend.""" - backend_runner = get_backend_runner() - return GenericInferenceRunnerEthosU(backend_runner, device_info, backend) + @staticmethod + def _print_installation_list( + header: str, installations: list[Installation], new_section: bool = False + ) -> None: + """Print list of the installations.""" + logger.info("%s%s\n", "\n" if new_section else "", header) + for installation in installations: + logger.info(" - %s", installation.name) -def estimate_performance( - model_info: ModelInfo, device_info: DeviceInfo, backend: str -) -> PerformanceMetrics: - """Get performance estimations.""" - output_parser = GenericInferenceOutputParser() - output_consumers = [output_parser, LogWriter()] + def uninstall(self, backend_name: str) -> None: + """Uninstall the backend with name backend_name.""" + installations = self.already_installed(backend_name) - generic_runner = get_generic_runner(device_info, backend) - generic_runner.run(model_info, output_consumers) + if not installations: + raise ConfigurationError(f"Backend '{backend_name}' is not installed") - if not output_parser.is_ready(): - missed_data = ",".join(output_parser.missed_keys()) - logger.debug("Unable to get performance metrics, missed data %s", missed_data) - raise Exception("Unable to get performance metrics, insufficient data") + if len(installations) != 1: + raise InternalError( + f"More than one installed backend with name {backend_name} found" + ) - return PerformanceMetrics(**output_parser.result) + installation = installations[0] + installation.uninstall() + logger.info("%s successfully uninstalled.", installation.name) -def get_backend_runner() -> BackendRunner: - """ - Return BackendRunner instance. + def backend_installed(self, backend_name: str) -> bool: + """Return true if requested backend installed.""" + installations = self.already_installed(backend_name) - Note: This is needed for the unit tests. - """ - return BackendRunner() + return len(installations) == 1 diff --git a/src/mlia/backend/tosa_checker/__init__.py b/src/mlia/backend/tosa_checker/__init__.py new file mode 100644 index 0000000..cec210d --- /dev/null +++ b/src/mlia/backend/tosa_checker/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""TOSA checker backend module.""" diff --git a/src/mlia/backend/tosa_checker/install.py b/src/mlia/backend/tosa_checker/install.py new file mode 100644 index 0000000..72454bc --- /dev/null +++ b/src/mlia/backend/tosa_checker/install.py @@ -0,0 +1,19 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Module for python package based installations.""" +from __future__ import annotations + +from mlia.backend.install import Installation +from mlia.backend.install import PyPackageBackendInstallation + + +def get_tosa_backend_installation() -> Installation: + """Get TOSA backend installation.""" + return PyPackageBackendInstallation( + name="tosa-checker", + description="Tool to check if a ML model is compatible " + "with the TOSA specification", + packages_to_install=["mlia[tosa]"], + packages_to_uninstall=["tosa-checker"], + expected_packages=["tosa-checker"], + ) diff --git a/src/mlia/backend/vela/__init__.py b/src/mlia/backend/vela/__init__.py new file mode 100644 index 0000000..6ea0c21 --- /dev/null +++ b/src/mlia/backend/vela/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Vela backend module.""" diff --git a/src/mlia/backend/vela/compat.py b/src/mlia/backend/vela/compat.py new file mode 100644 index 0000000..3ec42d1 --- /dev/null +++ b/src/mlia/backend/vela/compat.py @@ -0,0 +1,158 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Vela operator compatibility module.""" +from __future__ import annotations + +import itertools +import logging +from dataclasses import dataclass +from pathlib import Path + +from ethosu.vela.operation import Op +from ethosu.vela.tflite_mapping import optype_to_builtintype +from ethosu.vela.tflite_model_semantic import TFLiteSemantic +from ethosu.vela.tflite_supported_operators import TFLiteSupportedOperators +from ethosu.vela.vela import generate_supported_ops + +from mlia.backend.vela.compiler import VelaCompiler +from mlia.backend.vela.compiler import VelaCompilerOptions +from mlia.utils.logging import redirect_output + + +logger = logging.getLogger(__name__) + +VELA_INTERNAL_OPS = (Op.Placeholder, Op.SubgraphInput, Op.Const) + + +@dataclass +class NpuSupported: + """Operator's npu supported attribute.""" + + supported: bool + reasons: list[tuple[str, str]] + + +@dataclass +class Operator: + """Model operator.""" + + name: str + op_type: str + run_on_npu: NpuSupported + + @property + def cpu_only(self) -> bool: + """Return true if operator is CPU only.""" + cpu_only_reasons = [("CPU only operator", "")] + return ( + not self.run_on_npu.supported + and self.run_on_npu.reasons == cpu_only_reasons + ) + + +@dataclass +class Operators: + """Model's operators.""" + + ops: list[Operator] + + @property + def npu_supported_ratio(self) -> float: + """Return NPU supported ratio.""" + total = self.total_number + npu_supported = self.npu_supported_number + + if total == 0 or npu_supported == 0: + return 0 + + return npu_supported / total + + @property + def npu_unsupported_ratio(self) -> float: + """Return NPU unsupported ratio.""" + return 1 - self.npu_supported_ratio + + @property + def total_number(self) -> int: + """Return total number of operators.""" + return len(self.ops) + + @property + def npu_supported_number(self) -> int: + """Return number of npu supported operators.""" + return sum(op.run_on_npu.supported for op in self.ops) + + +def supported_operators( + model_path: Path, compiler_options: VelaCompilerOptions +) -> Operators: + """Return list of model's operators.""" + logger.debug("Check supported operators for the model %s", model_path) + + vela_compiler = VelaCompiler(compiler_options) + initial_model = vela_compiler.read_model(model_path) + + return Operators( + [ + Operator(op.name, optype_to_builtintype(op.type), run_on_npu(op)) + for sg in initial_model.nng.subgraphs + for op in sg.get_all_ops() + if op.type not in VELA_INTERNAL_OPS + ] + ) + + +def run_on_npu(operator: Op) -> NpuSupported: + """Return information if operator can run on NPU. + + Vela does a number of checks that can help establish whether + a particular operator is supported to run on NPU. + + There are two groups of checks: + - general TensorFlow Lite constraints + - operator specific constraints + + If an operator is not supported on NPU then this function + will return the reason of that. + + The reason is split in two parts: + - general description of why the operator cannot be placed on NPU + - details on the particular operator + """ + semantic_checker = TFLiteSemantic() + semantic_constraints = itertools.chain( + semantic_checker.generic_constraints, + semantic_checker.specific_constraints[operator.type], + ) + + for constraint in semantic_constraints: + op_valid, op_reason = constraint(operator) + if not op_valid: + return NpuSupported(False, [(constraint.__doc__, op_reason)]) + + if operator.type not in TFLiteSupportedOperators.supported_operators: + reasons = ( + [("CPU only operator", "")] + if operator.type not in VELA_INTERNAL_OPS + else [] + ) + + return NpuSupported(False, reasons) + + tflite_supported_operators = TFLiteSupportedOperators() + operation_constraints = itertools.chain( + tflite_supported_operators.generic_constraints, + tflite_supported_operators.specific_constraints[operator.type], + ) + for constraint in operation_constraints: + op_valid, op_reason = constraint(operator) + if not op_valid: + return NpuSupported(False, [(constraint.__doc__, op_reason)]) + + return NpuSupported(True, []) + + +def generate_supported_operators_report() -> None: + """Generate supported operators report in current working directory.""" + with redirect_output(logger): + generate_supported_ops() diff --git a/src/mlia/tools/vela_wrapper.py b/src/mlia/backend/vela/compiler.py index 00d2f2c..3d3847a 100644 --- a/src/mlia/tools/vela_wrapper.py +++ b/src/mlia/backend/vela/compiler.py @@ -1,9 +1,8 @@ # SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 -"""Vela wrapper module.""" +"""Vela compiler wrapper module.""" from __future__ import annotations -import itertools import logging import sys from dataclasses import dataclass @@ -11,7 +10,6 @@ from pathlib import Path from typing import Any from typing import Literal -import numpy as np from ethosu.vela.architecture_features import ArchitectureFeatures from ethosu.vela.compiler_driver import compiler_driver from ethosu.vela.compiler_driver import CompilerOptions @@ -20,106 +18,18 @@ from ethosu.vela.model_reader import ModelReaderOptions from ethosu.vela.model_reader import read_model from ethosu.vela.nn_graph import Graph from ethosu.vela.nn_graph import NetworkType -from ethosu.vela.npu_performance import PassCycles from ethosu.vela.operation import CustomType -from ethosu.vela.operation import Op from ethosu.vela.scheduler import OptimizationStrategy from ethosu.vela.scheduler import SchedulerOptions from ethosu.vela.tensor import BandwidthDirection from ethosu.vela.tensor import MemArea from ethosu.vela.tensor import Tensor -from ethosu.vela.tflite_mapping import optype_to_builtintype -from ethosu.vela.tflite_model_semantic import TFLiteSemantic -from ethosu.vela.tflite_supported_operators import TFLiteSupportedOperators from ethosu.vela.tflite_writer import write_tflite -from ethosu.vela.vela import generate_supported_ops from mlia.utils.logging import redirect_output - logger = logging.getLogger(__name__) -VELA_INTERNAL_OPS = (Op.Placeholder, Op.SubgraphInput, Op.Const) - - -@dataclass -class PerformanceMetrics: # pylint: disable=too-many-instance-attributes - """Contains all the performance metrics Vela generates in a run.""" - - npu_cycles: int - sram_access_cycles: int - dram_access_cycles: int - on_chip_flash_access_cycles: int - off_chip_flash_access_cycles: int - total_cycles: int - batch_inference_time: float - inferences_per_second: float - batch_size: int - unknown_memory_area_size: int - sram_memory_area_size: int - dram_memory_area_size: int - on_chip_flash_memory_area_size: int - off_chip_flash_memory_area_size: int - - -@dataclass -class NpuSupported: - """Operator's npu supported attribute.""" - - supported: bool - reasons: list[tuple[str, str]] - - -@dataclass -class Operator: - """Model operator.""" - - name: str - op_type: str - run_on_npu: NpuSupported - - @property - def cpu_only(self) -> bool: - """Return true if operator is CPU only.""" - cpu_only_reasons = [("CPU only operator", "")] - return ( - not self.run_on_npu.supported - and self.run_on_npu.reasons == cpu_only_reasons - ) - - -@dataclass -class Operators: - """Model's operators.""" - - ops: list[Operator] - - @property - def npu_supported_ratio(self) -> float: - """Return NPU supported ratio.""" - total = self.total_number - npu_supported = self.npu_supported_number - - if total == 0 or npu_supported == 0: - return 0 - - return npu_supported / total - - @property - def npu_unsupported_ratio(self) -> float: - """Return NPU unsupported ratio.""" - return 1 - self.npu_supported_ratio - - @property - def total_number(self) -> int: - """Return total number of operators.""" - return len(self.ops) - - @property - def npu_supported_number(self) -> int: - """Return number of npu supported operators.""" - return sum(op.run_on_npu.supported for op in self.ops) - @dataclass class Model: @@ -347,30 +257,6 @@ def resolve_compiler_config( return vela_compiler.get_config() -def estimate_performance( - model_path: Path, compiler_options: VelaCompilerOptions -) -> PerformanceMetrics: - """Return performance estimations for the model/device. - - Logic for this function comes from Vela module stats_writer.py - """ - logger.debug( - "Estimate performance for the model %s on %s", - model_path, - compiler_options.accelerator_config, - ) - - vela_compiler = VelaCompiler(compiler_options) - - initial_model = vela_compiler.read_model(model_path) - if initial_model.optimized: - raise Exception("Unable to estimate performance for the given optimized model") - - optimized_model = vela_compiler.compile_model(initial_model) - - return _performance_metrics(optimized_model) - - def optimize_model( model_path: Path, compiler_options: VelaCompilerOptions, output_model_path: Path ) -> None: @@ -386,112 +272,3 @@ def optimize_model( logger.debug("Save optimized model into %s", output_model_path) optimized_model.save(output_model_path) - - -def _performance_metrics(optimized_model: OptimizedModel) -> PerformanceMetrics: - """Return performance metrics for optimized model.""" - cycles = optimized_model.nng.cycles - - def memory_usage(mem_area: MemArea) -> int: - """Get memory usage for the proviced memory area type.""" - memory_used: dict[MemArea, int] = optimized_model.nng.memory_used - bandwidths = optimized_model.nng.bandwidths - - return memory_used.get(mem_area, 0) if np.sum(bandwidths[mem_area]) > 0 else 0 - - midpoint_fps = np.nan - midpoint_inference_time = cycles[PassCycles.Total] / optimized_model.arch.core_clock - if midpoint_inference_time > 0: - midpoint_fps = 1 / midpoint_inference_time - - return PerformanceMetrics( - npu_cycles=int(cycles[PassCycles.Npu]), - sram_access_cycles=int(cycles[PassCycles.SramAccess]), - dram_access_cycles=int(cycles[PassCycles.DramAccess]), - on_chip_flash_access_cycles=int(cycles[PassCycles.OnChipFlashAccess]), - off_chip_flash_access_cycles=int(cycles[PassCycles.OffChipFlashAccess]), - total_cycles=int(cycles[PassCycles.Total]), - batch_inference_time=midpoint_inference_time * 1000, - inferences_per_second=midpoint_fps, - batch_size=optimized_model.nng.batch_size, - unknown_memory_area_size=memory_usage(MemArea.Unknown), - sram_memory_area_size=memory_usage(MemArea.Sram), - dram_memory_area_size=memory_usage(MemArea.Dram), - on_chip_flash_memory_area_size=memory_usage(MemArea.OnChipFlash), - off_chip_flash_memory_area_size=memory_usage(MemArea.OffChipFlash), - ) - - -def supported_operators( - model_path: Path, compiler_options: VelaCompilerOptions -) -> Operators: - """Return list of model's operators.""" - logger.debug("Check supported operators for the model %s", model_path) - - vela_compiler = VelaCompiler(compiler_options) - initial_model = vela_compiler.read_model(model_path) - - return Operators( - [ - Operator(op.name, optype_to_builtintype(op.type), run_on_npu(op)) - for sg in initial_model.nng.subgraphs - for op in sg.get_all_ops() - if op.type not in VELA_INTERNAL_OPS - ] - ) - - -def run_on_npu(operator: Op) -> NpuSupported: - """Return information if operator can run on NPU. - - Vela does a number of checks that can help establish whether - a particular operator is supported to run on NPU. - - There are two groups of checks: - - general TensorFlow Lite constraints - - operator specific constraints - - If an operator is not supported on NPU then this function - will return the reason of that. - - The reason is split in two parts: - - general description of why the operator cannot be placed on NPU - - details on the particular operator - """ - semantic_checker = TFLiteSemantic() - semantic_constraints = itertools.chain( - semantic_checker.generic_constraints, - semantic_checker.specific_constraints[operator.type], - ) - - for constraint in semantic_constraints: - op_valid, op_reason = constraint(operator) - if not op_valid: - return NpuSupported(False, [(constraint.__doc__, op_reason)]) - - if operator.type not in TFLiteSupportedOperators.supported_operators: - reasons = ( - [("CPU only operator", "")] - if operator.type not in VELA_INTERNAL_OPS - else [] - ) - - return NpuSupported(False, reasons) - - tflite_supported_operators = TFLiteSupportedOperators() - operation_constraints = itertools.chain( - tflite_supported_operators.generic_constraints, - tflite_supported_operators.specific_constraints[operator.type], - ) - for constraint in operation_constraints: - op_valid, op_reason = constraint(operator) - if not op_valid: - return NpuSupported(False, [(constraint.__doc__, op_reason)]) - - return NpuSupported(True, []) - - -def generate_supported_operators_report() -> None: - """Generate supported operators report in current working directory.""" - with redirect_output(logger): - generate_supported_ops() diff --git a/src/mlia/backend/vela/performance.py b/src/mlia/backend/vela/performance.py new file mode 100644 index 0000000..ccd2f6f --- /dev/null +++ b/src/mlia/backend/vela/performance.py @@ -0,0 +1,97 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Vela performance module.""" +from __future__ import annotations + +import logging +from dataclasses import dataclass +from pathlib import Path + +import numpy as np +from ethosu.vela.npu_performance import PassCycles +from ethosu.vela.tensor import MemArea + +from mlia.backend.vela.compiler import OptimizedModel +from mlia.backend.vela.compiler import VelaCompiler +from mlia.backend.vela.compiler import VelaCompilerOptions + + +logger = logging.getLogger(__name__) + + +@dataclass +class PerformanceMetrics: # pylint: disable=too-many-instance-attributes + """Contains all the performance metrics Vela generates in a run.""" + + npu_cycles: int + sram_access_cycles: int + dram_access_cycles: int + on_chip_flash_access_cycles: int + off_chip_flash_access_cycles: int + total_cycles: int + batch_inference_time: float + inferences_per_second: float + batch_size: int + unknown_memory_area_size: int + sram_memory_area_size: int + dram_memory_area_size: int + on_chip_flash_memory_area_size: int + off_chip_flash_memory_area_size: int + + +def estimate_performance( + model_path: Path, compiler_options: VelaCompilerOptions +) -> PerformanceMetrics: + """Return performance estimations for the model/device. + + Logic for this function comes from Vela module stats_writer.py + """ + logger.debug( + "Estimate performance for the model %s on %s", + model_path, + compiler_options.accelerator_config, + ) + + vela_compiler = VelaCompiler(compiler_options) + + initial_model = vela_compiler.read_model(model_path) + if initial_model.optimized: + raise Exception("Unable to estimate performance for the given optimized model") + + optimized_model = vela_compiler.compile_model(initial_model) + + return _performance_metrics(optimized_model) + + +def _performance_metrics(optimized_model: OptimizedModel) -> PerformanceMetrics: + """Return performance metrics for optimized model.""" + cycles = optimized_model.nng.cycles + + def memory_usage(mem_area: MemArea) -> int: + """Get memory usage for the proviced memory area type.""" + memory_used: dict[MemArea, int] = optimized_model.nng.memory_used + bandwidths = optimized_model.nng.bandwidths + + return memory_used.get(mem_area, 0) if np.sum(bandwidths[mem_area]) > 0 else 0 + + midpoint_fps = np.nan + midpoint_inference_time = cycles[PassCycles.Total] / optimized_model.arch.core_clock + if midpoint_inference_time > 0: + midpoint_fps = 1 / midpoint_inference_time + + return PerformanceMetrics( + npu_cycles=int(cycles[PassCycles.Npu]), + sram_access_cycles=int(cycles[PassCycles.SramAccess]), + dram_access_cycles=int(cycles[PassCycles.DramAccess]), + on_chip_flash_access_cycles=int(cycles[PassCycles.OnChipFlashAccess]), + off_chip_flash_access_cycles=int(cycles[PassCycles.OffChipFlashAccess]), + total_cycles=int(cycles[PassCycles.Total]), + batch_inference_time=midpoint_inference_time * 1000, + inferences_per_second=midpoint_fps, + batch_size=optimized_model.nng.batch_size, + unknown_memory_area_size=memory_usage(MemArea.Unknown), + sram_memory_area_size=memory_usage(MemArea.Sram), + dram_memory_area_size=memory_usage(MemArea.Dram), + on_chip_flash_memory_area_size=memory_usage(MemArea.OnChipFlash), + off_chip_flash_memory_area_size=memory_usage(MemArea.OffChipFlash), + ) diff --git a/src/mlia/cli/config.py b/src/mlia/cli/config.py index 6ea9bb4..2d694dc 100644 --- a/src/mlia/cli/config.py +++ b/src/mlia/cli/config.py @@ -6,18 +6,19 @@ from __future__ import annotations import logging from functools import lru_cache -import mlia.backend.manager as backend_manager -from mlia.tools.metadata.common import DefaultInstallationManager -from mlia.tools.metadata.common import InstallationManager -from mlia.tools.metadata.corstone import get_corstone_installations -from mlia.tools.metadata.py_package import get_pypackage_backend_installations +from mlia.backend.corstone.install import get_corstone_installations +from mlia.backend.install import supported_backends +from mlia.backend.manager import DefaultInstallationManager +from mlia.backend.manager import InstallationManager +from mlia.backend.tosa_checker.install import get_tosa_backend_installation logger = logging.getLogger(__name__) def get_installation_manager(noninteractive: bool = False) -> InstallationManager: """Return installation manager.""" - backends = get_corstone_installations() + get_pypackage_backend_installations() + backends = get_corstone_installations() + backends.append(get_tosa_backend_installation()) return DefaultInstallationManager(backends, noninteractive=noninteractive) @@ -31,7 +32,7 @@ def get_available_backends() -> list[str]: manager = get_installation_manager() available_backends.extend( backend - for backend in backend_manager.supported_backends() + for backend in supported_backends() if manager.backend_installed(backend) ) diff --git a/src/mlia/devices/ethosu/config.py b/src/mlia/devices/ethosu/config.py index e44dcdc..f2e867e 100644 --- a/src/mlia/devices/ethosu/config.py +++ b/src/mlia/devices/ethosu/config.py @@ -6,9 +6,9 @@ from __future__ import annotations import logging from typing import Any +from mlia.backend.vela.compiler import resolve_compiler_config +from mlia.backend.vela.compiler import VelaCompilerOptions from mlia.devices.config import IPConfiguration -from mlia.tools.vela_wrapper import resolve_compiler_config -from mlia.tools.vela_wrapper import VelaCompilerOptions from mlia.utils.filesystem import get_profile from mlia.utils.filesystem import get_vela_config diff --git a/src/mlia/devices/ethosu/data_analysis.py b/src/mlia/devices/ethosu/data_analysis.py index 70b6f65..db89a5f 100644 --- a/src/mlia/devices/ethosu/data_analysis.py +++ b/src/mlia/devices/ethosu/data_analysis.py @@ -6,12 +6,12 @@ from __future__ import annotations from dataclasses import dataclass from functools import singledispatchmethod +from mlia.backend.vela.compat import Operators from mlia.core.common import DataItem from mlia.core.data_analysis import Fact from mlia.core.data_analysis import FactExtractor from mlia.devices.ethosu.performance import OptimizationPerformanceMetrics from mlia.nn.tensorflow.optimizations.select import OptimizationSettings -from mlia.tools.vela_wrapper import Operators @dataclass diff --git a/src/mlia/devices/ethosu/data_collection.py b/src/mlia/devices/ethosu/data_collection.py index c8d5293..d68eadb 100644 --- a/src/mlia/devices/ethosu/data_collection.py +++ b/src/mlia/devices/ethosu/data_collection.py @@ -6,6 +6,8 @@ from __future__ import annotations import logging from pathlib import Path +from mlia.backend.vela.compat import Operators +from mlia.backend.vela.compat import supported_operators from mlia.core.context import Context from mlia.core.data_collection import ContextAwareDataCollector from mlia.core.errors import FunctionalityNotSupportedError @@ -20,8 +22,6 @@ from mlia.nn.tensorflow.config import KerasModel from mlia.nn.tensorflow.optimizations.select import get_optimizer from mlia.nn.tensorflow.optimizations.select import OptimizationSettings from mlia.nn.tensorflow.utils import save_keras_model -from mlia.tools.vela_wrapper import Operators -from mlia.tools.vela_wrapper import supported_operators from mlia.utils.logging import log_action from mlia.utils.types import is_list_of diff --git a/src/mlia/devices/ethosu/handlers.py b/src/mlia/devices/ethosu/handlers.py index 48f9a2e..f010bdb 100644 --- a/src/mlia/devices/ethosu/handlers.py +++ b/src/mlia/devices/ethosu/handlers.py @@ -5,6 +5,7 @@ from __future__ import annotations import logging +from mlia.backend.vela.compat import Operators from mlia.core.events import CollectedDataEvent from mlia.core.handlers import WorkflowEventsHandler from mlia.core.typing import PathOrFileLike @@ -13,7 +14,6 @@ from mlia.devices.ethosu.events import EthosUAdvisorStartedEvent from mlia.devices.ethosu.performance import OptimizationPerformanceMetrics from mlia.devices.ethosu.performance import PerformanceMetrics from mlia.devices.ethosu.reporters import ethos_u_formatters -from mlia.tools.vela_wrapper import Operators logger = logging.getLogger(__name__) diff --git a/src/mlia/devices/ethosu/operators.py b/src/mlia/devices/ethosu/operators.py index 1a4ce8d..97c2b17 100644 --- a/src/mlia/devices/ethosu/operators.py +++ b/src/mlia/devices/ethosu/operators.py @@ -3,7 +3,7 @@ """Operators module.""" import logging -from mlia.tools import vela_wrapper +from mlia.backend.vela.compat import generate_supported_operators_report logger = logging.getLogger(__name__) @@ -11,4 +11,4 @@ logger = logging.getLogger(__name__) def report() -> None: """Generate supported operators report.""" - vela_wrapper.generate_supported_operators_report() + generate_supported_operators_report() diff --git a/src/mlia/devices/ethosu/performance.py b/src/mlia/devices/ethosu/performance.py index 431dd89..8051d6e 100644 --- a/src/mlia/devices/ethosu/performance.py +++ b/src/mlia/devices/ethosu/performance.py @@ -9,8 +9,13 @@ from enum import Enum from pathlib import Path from typing import Union -import mlia.backend.manager as backend_manager -import mlia.tools.vela_wrapper as vela +import mlia.backend.vela.compiler as vela_comp +import mlia.backend.vela.performance as vela_perf +from mlia.backend.corstone.performance import DeviceInfo +from mlia.backend.corstone.performance import estimate_performance +from mlia.backend.corstone.performance import ModelInfo +from mlia.backend.install import is_supported +from mlia.backend.install import supported_backends from mlia.core.context import Context from mlia.core.performance import PerformanceEstimator from mlia.devices.ethosu.config import EthosUConfiguration @@ -133,7 +138,7 @@ class VelaPerformanceEstimator( else model ) - vela_perf_metrics = vela.estimate_performance( + vela_perf_metrics = vela_perf.estimate_performance( model_path, self.device.compiler_options ) @@ -177,17 +182,17 @@ class CorstonePerformanceEstimator( f"{model_path.stem}_vela.tflite" ) - vela.optimize_model( + vela_comp.optimize_model( model_path, self.device.compiler_options, optimized_model_path ) - model_info = backend_manager.ModelInfo(model_path=optimized_model_path) - device_info = backend_manager.DeviceInfo( + model_info = ModelInfo(model_path=optimized_model_path) + device_info = DeviceInfo( device_type=self.device.target, # type: ignore mac=self.device.mac, ) - corstone_perf_metrics = backend_manager.estimate_performance( + corstone_perf_metrics = estimate_performance( model_info, device_info, self.backend ) @@ -218,10 +223,10 @@ class EthosUPerformanceEstimator( if backends is None: backends = ["Vela"] # Only Vela is always available as default for backend in backends: - if backend != "Vela" and not backend_manager.is_supported(backend): + if backend != "Vela" and not is_supported(backend): raise ValueError( f"Unsupported backend '{backend}'. " - f"Only 'Vela' and {backend_manager.supported_backends()} " + f"Only 'Vela' and {supported_backends()} " "are supported." ) self.backends = set(backends) @@ -241,7 +246,7 @@ class EthosUPerformanceEstimator( if backend == "Vela": vela_estimator = VelaPerformanceEstimator(self.context, self.device) memory_usage = vela_estimator.estimate(tflite_model) - elif backend in backend_manager.supported_backends(): + elif backend in supported_backends(): corstone_estimator = CorstonePerformanceEstimator( self.context, self.device, backend ) diff --git a/src/mlia/devices/ethosu/reporters.py b/src/mlia/devices/ethosu/reporters.py index f0fcb39..7ecaab1 100644 --- a/src/mlia/devices/ethosu/reporters.py +++ b/src/mlia/devices/ethosu/reporters.py @@ -7,6 +7,8 @@ from collections import defaultdict from typing import Any from typing import Callable +from mlia.backend.vela.compat import Operator +from mlia.backend.vela.compat import Operators from mlia.core.advice_generation import Advice from mlia.core.reporters import report_advice from mlia.core.reporting import BytesCell @@ -23,8 +25,6 @@ from mlia.core.reporting import SingleRow from mlia.core.reporting import Table from mlia.devices.ethosu.config import EthosUConfiguration from mlia.devices.ethosu.performance import PerformanceMetrics -from mlia.tools.vela_wrapper import Operator -from mlia.tools.vela_wrapper import Operators from mlia.utils.console import style_improvement from mlia.utils.types import is_list_of diff --git a/src/mlia/tools/metadata/common.py b/src/mlia/tools/metadata/common.py deleted file mode 100644 index 5019da9..0000000 --- a/src/mlia/tools/metadata/common.py +++ /dev/null @@ -1,322 +0,0 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. -# SPDX-License-Identifier: Apache-2.0 -"""Module for installation process.""" -from __future__ import annotations - -import logging -from abc import ABC -from abc import abstractmethod -from dataclasses import dataclass -from pathlib import Path -from typing import Callable -from typing import Union - -from mlia.core.errors import ConfigurationError -from mlia.core.errors import InternalError -from mlia.utils.misc import yes - -logger = logging.getLogger(__name__) - - -@dataclass -class InstallFromPath: - """Installation from the local path.""" - - backend_path: Path - - -@dataclass -class DownloadAndInstall: - """Download and install.""" - - eula_agreement: bool = True - - -InstallationType = Union[InstallFromPath, DownloadAndInstall] - - -class Installation(ABC): - """Base class for the installation process of the backends.""" - - @property - @abstractmethod - def name(self) -> str: - """Return name of the backend.""" - - @property - @abstractmethod - def description(self) -> str: - """Return description of the backend.""" - - @property - @abstractmethod - def could_be_installed(self) -> bool: - """Return true if backend could be installed in current environment.""" - - @property - @abstractmethod - def already_installed(self) -> bool: - """Return true if backend is already installed.""" - - @abstractmethod - def supports(self, install_type: InstallationType) -> bool: - """Return true if installation supports requested installation type.""" - - @abstractmethod - def install(self, install_type: InstallationType) -> None: - """Install the backend.""" - - @abstractmethod - def uninstall(self) -> None: - """Uninstall the backend.""" - - -InstallationFilter = Callable[[Installation], bool] - - -class AlreadyInstalledFilter: - """Filter for already installed backends.""" - - def __call__(self, installation: Installation) -> bool: - """Installation filter.""" - return installation.already_installed - - -class ReadyForInstallationFilter: - """Filter for ready to be installed backends.""" - - def __call__(self, installation: Installation) -> bool: - """Installation filter.""" - return installation.could_be_installed and not installation.already_installed - - -class SupportsInstallTypeFilter: - """Filter backends that support certain type of the installation.""" - - def __init__(self, installation_type: InstallationType) -> None: - """Init filter.""" - self.installation_type = installation_type - - def __call__(self, installation: Installation) -> bool: - """Installation filter.""" - return installation.supports(self.installation_type) - - -class SearchByNameFilter: - """Filter installation by name.""" - - def __init__(self, backend_name: str | None) -> None: - """Init filter.""" - self.backend_name = backend_name - - def __call__(self, installation: Installation) -> bool: - """Installation filter.""" - return ( - not self.backend_name - or installation.name.casefold() == self.backend_name.casefold() - ) - - -class InstallationManager(ABC): - """Helper class for managing installations.""" - - @abstractmethod - def install_from(self, backend_path: Path, backend_name: str, force: bool) -> None: - """Install backend from the local directory.""" - - @abstractmethod - def download_and_install( - self, backend_name: str, eula_agreement: bool, force: bool - ) -> None: - """Download and install backends.""" - - @abstractmethod - def show_env_details(self) -> None: - """Show environment details.""" - - @abstractmethod - def backend_installed(self, backend_name: str) -> bool: - """Return true if requested backend installed.""" - - @abstractmethod - def uninstall(self, backend_name: str) -> None: - """Delete the existing installation.""" - - -class InstallationFiltersMixin: - """Mixin for filtering installation based on different conditions.""" - - installations: list[Installation] - - def filter_by(self, *filters: InstallationFilter) -> list[Installation]: - """Filter installations.""" - return [ - installation - for installation in self.installations - if all(filter_(installation) for filter_ in filters) - ] - - def find_by_name(self, backend_name: str) -> list[Installation]: - """Return list of the backends filtered by name.""" - return self.filter_by(SearchByNameFilter(backend_name)) - - def already_installed(self, backend_name: str = None) -> list[Installation]: - """Return list of backends that are already installed.""" - return self.filter_by( - AlreadyInstalledFilter(), - SearchByNameFilter(backend_name), - ) - - def ready_for_installation(self) -> list[Installation]: - """Return list of the backends that could be installed.""" - return self.filter_by(ReadyForInstallationFilter()) - - -class DefaultInstallationManager(InstallationManager, InstallationFiltersMixin): - """Interactive installation manager.""" - - def __init__( - self, installations: list[Installation], noninteractive: bool = False - ) -> None: - """Init the manager.""" - self.installations = installations - self.noninteractive = noninteractive - - def _install( - self, - backend_name: str, - install_type: InstallationType, - prompt: Callable[[Installation], str], - force: bool, - ) -> None: - """Check metadata and install backend.""" - installs = self.find_by_name(backend_name) - - if not installs: - logger.info("Unknown backend '%s'.", backend_name) - logger.info( - "Please run command 'mlia-backend list' to get list of " - "supported backend names." - ) - - return - - if len(installs) > 1: - raise InternalError(f"More than one backend with name {backend_name} found") - - installation = installs[0] - if not installation.supports(install_type): - if isinstance(install_type, InstallFromPath): - logger.info( - "Backend '%s' could not be installed using path '%s'.", - installation.name, - install_type.backend_path, - ) - logger.info( - "Please check that '%s' is a valid path to the installed backend.", - install_type.backend_path, - ) - else: - logger.info( - "Backend '%s' could not be downloaded and installed", - installation.name, - ) - logger.info( - "Please refer to the project's documentation for more details." - ) - - return - - if installation.already_installed and not force: - logger.info("Backend '%s' is already installed.", installation.name) - logger.info("Please, consider using --force option.") - return - - proceed = self.noninteractive or yes(prompt(installation)) - if not proceed: - logger.info("%s installation canceled.", installation.name) - return - - if installation.already_installed and force: - logger.info( - "Force installing %s, so delete the existing " - "installed backend first.", - installation.name, - ) - installation.uninstall() - - installation.install(install_type) - logger.info("%s successfully installed.", installation.name) - - def install_from( - self, backend_path: Path, backend_name: str, force: bool = False - ) -> None: - """Install from the provided directory.""" - - def prompt(install: Installation) -> str: - return ( - f"{install.name} was found in {backend_path}. " - "Would you like to install it?" - ) - - install_type = InstallFromPath(backend_path) - self._install(backend_name, install_type, prompt, force) - - def download_and_install( - self, backend_name: str, eula_agreement: bool = True, force: bool = False - ) -> None: - """Download and install available backends.""" - - def prompt(install: Installation) -> str: - return f"Would you like to download and install {install.name}?" - - install_type = DownloadAndInstall(eula_agreement=eula_agreement) - self._install(backend_name, install_type, prompt, force) - - def show_env_details(self) -> None: - """Print current state of the execution environment.""" - if installed := self.already_installed(): - self._print_installation_list("Installed backends:", installed) - - if could_be_installed := self.ready_for_installation(): - self._print_installation_list( - "Following backends could be installed:", - could_be_installed, - new_section=bool(installed), - ) - - if not installed and not could_be_installed: - logger.info("No backends installed") - - @staticmethod - def _print_installation_list( - header: str, installations: list[Installation], new_section: bool = False - ) -> None: - """Print list of the installations.""" - logger.info("%s%s\n", "\n" if new_section else "", header) - - for installation in installations: - logger.info(" - %s", installation.name) - - def uninstall(self, backend_name: str) -> None: - """Uninstall the backend with name backend_name.""" - installations = self.already_installed(backend_name) - - if not installations: - raise ConfigurationError(f"Backend '{backend_name}' is not installed") - - if len(installations) != 1: - raise InternalError( - f"More than one installed backend with name {backend_name} found" - ) - - installation = installations[0] - installation.uninstall() - - logger.info("%s successfully uninstalled.", installation.name) - - def backend_installed(self, backend_name: str) -> bool: - """Return true if requested backend installed.""" - installations = self.already_installed(backend_name) - - return len(installations) == 1 diff --git a/src/mlia/tools/metadata/py_package.py b/src/mlia/tools/metadata/py_package.py deleted file mode 100644 index 716b62a..0000000 --- a/src/mlia/tools/metadata/py_package.py +++ /dev/null @@ -1,84 +0,0 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. -# SPDX-License-Identifier: Apache-2.0 -"""Module for python package based installations.""" -from __future__ import annotations - -from mlia.tools.metadata.common import DownloadAndInstall -from mlia.tools.metadata.common import Installation -from mlia.tools.metadata.common import InstallationType -from mlia.utils.py_manager import get_package_manager - - -class PyPackageBackendInstallation(Installation): - """Backend based on the python package.""" - - def __init__( - self, - name: str, - description: str, - packages_to_install: list[str], - packages_to_uninstall: list[str], - expected_packages: list[str], - ) -> None: - """Init the backend installation.""" - self._name = name - self._description = description - self._packages_to_install = packages_to_install - self._packages_to_uninstall = packages_to_uninstall - self._expected_packages = expected_packages - - self.package_manager = get_package_manager() - - @property - def name(self) -> str: - """Return name of the backend.""" - return self._name - - @property - def description(self) -> str: - """Return description of the backend.""" - return self._description - - @property - def could_be_installed(self) -> bool: - """Check if backend could be installed.""" - return True - - @property - def already_installed(self) -> bool: - """Check if backend already installed.""" - return self.package_manager.packages_installed(self._expected_packages) - - def supports(self, install_type: InstallationType) -> bool: - """Return true if installation supports requested installation type.""" - return isinstance(install_type, DownloadAndInstall) - - def install(self, install_type: InstallationType) -> None: - """Install the backend.""" - if not self.supports(install_type): - raise Exception(f"Unsupported installation type {install_type}") - - self.package_manager.install(self._packages_to_install) - - def uninstall(self) -> None: - """Uninstall the backend.""" - self.package_manager.uninstall(self._packages_to_uninstall) - - -def get_tosa_backend_installation() -> Installation: - """Get TOSA backend installation.""" - return PyPackageBackendInstallation( - name="tosa-checker", - description="Tool to check if a ML model is compatible " - "with the TOSA specification", - packages_to_install=["mlia[tosa]"], - packages_to_uninstall=["tosa-checker"], - expected_packages=["tosa-checker"], - ) - - -def get_pypackage_backend_installations() -> list[Installation]: - """Return list of the backend installations based on python packages.""" - return [ - get_tosa_backend_installation(), - ] |