aboutsummaryrefslogtreecommitdiff
path: root/src/mlia/tools
diff options
context:
space:
mode:
authorBenjamin Klimczak <benjamin.klimczak@arm.com>2022-06-28 10:29:35 +0100
committerBenjamin Klimczak <benjamin.klimczak@arm.com>2022-07-08 10:57:19 +0100
commitc9b4089b3037b5943565d76242d3016b8776f8d2 (patch)
tree3de24f79dedf0f26f492a7fa1562bf684e13a055 /src/mlia/tools
parentba2c7fcccf37e8c81946f0776714c64f73191787 (diff)
downloadmlia-c9b4089b3037b5943565d76242d3016b8776f8d2.tar.gz
MLIA-546 Merge AIET into MLIA
Merge the deprecated AIET interface for backend execution into MLIA: - Execute backends directly (without subprocess and the aiet CLI) - Fix issues with the unit tests - Remove src/aiet and tests/aiet - Re-factor code to replace 'aiet' with 'backend' - Adapt and improve unit tests after re-factoring - Remove dependencies that are not needed anymore (click and cloup) Change-Id: I450734c6a3f705ba9afde41862b29e797e511f7c
Diffstat (limited to 'src/mlia/tools')
-rw-r--r--src/mlia/tools/aiet_wrapper.py435
-rw-r--r--src/mlia/tools/metadata/corstone.py61
2 files changed, 31 insertions, 465 deletions
diff --git a/src/mlia/tools/aiet_wrapper.py b/src/mlia/tools/aiet_wrapper.py
deleted file mode 100644
index 73e82ee..0000000
--- a/src/mlia/tools/aiet_wrapper.py
+++ /dev/null
@@ -1,435 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Module for AIET integration."""
-import logging
-import re
-from abc import ABC
-from abc import abstractmethod
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Any
-from typing import Dict
-from typing import List
-from typing import Literal
-from typing import Optional
-from typing import Tuple
-
-from aiet.backend.application import get_available_applications
-from aiet.backend.application import install_application
-from aiet.backend.system import get_available_systems
-from aiet.backend.system import install_system
-from mlia.utils.proc import CommandExecutor
-from mlia.utils.proc import OutputConsumer
-from mlia.utils.proc import RunningCommand
-
-
-logger = logging.getLogger(__name__)
-
-# Mapping backend -> device_type -> system_name
-_SUPPORTED_SYSTEMS = {
- "Corstone-300": {
- "ethos-u55": "Corstone-300: Cortex-M55+Ethos-U55",
- "ethos-u65": "Corstone-300: Cortex-M55+Ethos-U65",
- },
- "Corstone-310": {
- "ethos-u55": "Corstone-310: Cortex-M85+Ethos-U55",
- },
-}
-
-# Mapping system_name -> memory_mode -> application
-_SYSTEM_TO_APP_MAP = {
- "Corstone-300: Cortex-M55+Ethos-U55": {
- "Sram": "Generic Inference Runner: Ethos-U55 SRAM",
- "Shared_Sram": "Generic Inference Runner: Ethos-U55/65 Shared SRAM",
- },
- "Corstone-300: Cortex-M55+Ethos-U65": {
- "Shared_Sram": "Generic Inference Runner: Ethos-U55/65 Shared SRAM",
- "Dedicated_Sram": "Generic Inference Runner: Ethos-U65 Dedicated SRAM",
- },
- "Corstone-310: Cortex-M85+Ethos-U55": {
- "Sram": "Generic Inference Runner: Ethos-U55 SRAM",
- "Shared_Sram": "Generic Inference Runner: Ethos-U55/65 Shared SRAM",
- },
-}
-
-
-def get_system_name(backend: str, device_type: str) -> str:
- """Get the AIET system name for the given backend and device type."""
- return _SUPPORTED_SYSTEMS[backend][device_type]
-
-
-def is_supported(backend: str, device_type: Optional[str] = None) -> bool:
- """Check if the backend (and optionally device type) is supported."""
- if device_type is None:
- return backend in _SUPPORTED_SYSTEMS
-
- try:
- get_system_name(backend, device_type)
- return True
- except KeyError:
- return False
-
-
-def supported_backends() -> List[str]:
- """Get a list of all backends supported by the AIET wrapper."""
- return list(_SUPPORTED_SYSTEMS.keys())
-
-
-def get_all_system_names(backend: str) -> List[str]:
- """Get all systems supported by the backend."""
- return list(_SUPPORTED_SYSTEMS.get(backend, {}).values())
-
-
-def get_all_application_names(backend: str) -> List[str]:
- """Get all applications supported by the backend."""
- app_set = {
- app
- for sys in get_all_system_names(backend)
- for app in _SYSTEM_TO_APP_MAP[sys].values()
- }
- return list(app_set)
-
-
-@dataclass
-class DeviceInfo:
- """Device information."""
-
- device_type: Literal["ethos-u55", "ethos-u65"]
- mac: int
- memory_mode: Literal["Sram", "Shared_Sram", "Dedicated_Sram"]
-
-
-@dataclass
-class ModelInfo:
- """Model info."""
-
- model_path: Path
-
-
-@dataclass
-class PerformanceMetrics:
- """Performance metrics parsed from generic inference output."""
-
- npu_active_cycles: int
- npu_idle_cycles: int
- npu_total_cycles: int
- npu_axi0_rd_data_beat_received: int
- npu_axi0_wr_data_beat_written: int
- npu_axi1_rd_data_beat_received: int
-
-
-@dataclass
-class ExecutionParams:
- """Application execution params."""
-
- application: str
- system: str
- application_params: List[str]
- system_params: List[str]
- deploy_params: List[str]
-
-
-class AIETLogWriter(OutputConsumer):
- """Redirect AIET command output to the logger."""
-
- def feed(self, line: str) -> None:
- """Process line from the output."""
- logger.debug(line.strip())
-
-
-class GenericInferenceOutputParser(OutputConsumer):
- """Generic inference app output parser."""
-
- PATTERNS = {
- name: tuple(re.compile(pattern, re.IGNORECASE) for pattern in patterns)
- for name, patterns in (
- (
- "npu_active_cycles",
- (
- r"NPU ACTIVE cycles: (?P<value>\d+)",
- r"NPU ACTIVE: (?P<value>\d+) cycles",
- ),
- ),
- (
- "npu_idle_cycles",
- (
- r"NPU IDLE cycles: (?P<value>\d+)",
- r"NPU IDLE: (?P<value>\d+) cycles",
- ),
- ),
- (
- "npu_total_cycles",
- (
- r"NPU TOTAL cycles: (?P<value>\d+)",
- r"NPU TOTAL: (?P<value>\d+) cycles",
- ),
- ),
- (
- "npu_axi0_rd_data_beat_received",
- (
- r"NPU AXI0_RD_DATA_BEAT_RECEIVED beats: (?P<value>\d+)",
- r"NPU AXI0_RD_DATA_BEAT_RECEIVED: (?P<value>\d+) beats",
- ),
- ),
- (
- "npu_axi0_wr_data_beat_written",
- (
- r"NPU AXI0_WR_DATA_BEAT_WRITTEN beats: (?P<value>\d+)",
- r"NPU AXI0_WR_DATA_BEAT_WRITTEN: (?P<value>\d+) beats",
- ),
- ),
- (
- "npu_axi1_rd_data_beat_received",
- (
- r"NPU AXI1_RD_DATA_BEAT_RECEIVED beats: (?P<value>\d+)",
- r"NPU AXI1_RD_DATA_BEAT_RECEIVED: (?P<value>\d+) beats",
- ),
- ),
- )
- }
-
- def __init__(self) -> None:
- """Init generic inference output parser instance."""
- self.result: Dict = {}
-
- def feed(self, line: str) -> None:
- """Feed new line to the parser."""
- for name, patterns in self.PATTERNS.items():
- for pattern in patterns:
- match = pattern.search(line)
-
- if match:
- self.result[name] = int(match["value"])
- return
-
- def is_ready(self) -> bool:
- """Return true if all expected data has been parsed."""
- return self.result.keys() == self.PATTERNS.keys()
-
- def missed_keys(self) -> List[str]:
- """Return list of the keys that have not been found in the output."""
- return sorted(self.PATTERNS.keys() - self.result.keys())
-
-
-class AIETRunner:
- """AIET runner."""
-
- def __init__(self, executor: CommandExecutor) -> None:
- """Init AIET runner instance."""
- self.executor = executor
-
- @staticmethod
- def get_installed_systems() -> List[str]:
- """Get list of the installed systems."""
- return [system.name for system in get_available_systems()]
-
- @staticmethod
- def get_installed_applications(system: Optional[str] = None) -> List[str]:
- """Get list of the installed application."""
- return [
- app.name
- for app in get_available_applications()
- if system is None or app.can_run_on(system)
- ]
-
- def is_application_installed(self, application: str, system: str) -> bool:
- """Return true if requested application installed."""
- return application in self.get_installed_applications(system)
-
- def is_system_installed(self, system: str) -> bool:
- """Return true if requested system installed."""
- return system in self.get_installed_systems()
-
- def systems_installed(self, systems: List[str]) -> bool:
- """Check if all provided systems are installed."""
- if not systems:
- return False
-
- installed_systems = self.get_installed_systems()
- return all(system in installed_systems for system in systems)
-
- def applications_installed(self, applications: List[str]) -> bool:
- """Check if all provided applications are installed."""
- if not applications:
- return False
-
- installed_apps = self.get_installed_applications()
- return all(app in installed_apps for app in applications)
-
- def all_installed(self, systems: List[str], apps: List[str]) -> bool:
- """Check if all provided artifacts are installed."""
- return self.systems_installed(systems) and self.applications_installed(apps)
-
- @staticmethod
- def install_system(system_path: Path) -> None:
- """Install system."""
- install_system(system_path)
-
- @staticmethod
- def install_application(app_path: Path) -> None:
- """Install application."""
- install_application(app_path)
-
- def run_application(self, execution_params: ExecutionParams) -> RunningCommand:
- """Run requested application."""
- command = [
- "aiet",
- "application",
- "run",
- "-n",
- execution_params.application,
- "-s",
- execution_params.system,
- *self._params("-p", execution_params.application_params),
- *self._params("--system-param", execution_params.system_params),
- *self._params("--deploy", execution_params.deploy_params),
- ]
-
- return self._submit(command)
-
- @staticmethod
- def _params(name: str, params: List[str]) -> List[str]:
- return [p for item in [(name, param) for param in params] for p in item]
-
- def _submit(self, command: List[str]) -> RunningCommand:
- """Submit command for the execution."""
- logger.debug("Submit command %s", " ".join(command))
- return self.executor.submit(command)
-
-
-class GenericInferenceRunner(ABC):
- """Abstract class for generic inference runner."""
-
- def __init__(self, aiet_runner: AIETRunner):
- """Init generic inference runner instance."""
- self.aiet_runner = aiet_runner
- self.running_inference: Optional[RunningCommand] = None
-
- def run(
- self, model_info: ModelInfo, output_consumers: List[OutputConsumer]
- ) -> None:
- """Run generic inference for the provided device/model."""
- execution_params = self.get_execution_params(model_info)
-
- self.running_inference = self.aiet_runner.run_application(execution_params)
- self.running_inference.output_consumers = output_consumers
- self.running_inference.consume_output()
-
- def stop(self) -> None:
- """Stop running inference."""
- if self.running_inference is None:
- return
-
- self.running_inference.stop()
-
- @abstractmethod
- def get_execution_params(self, model_info: ModelInfo) -> ExecutionParams:
- """Get execution params for the provided model."""
-
- def __enter__(self) -> "GenericInferenceRunner":
- """Enter context."""
- return self
-
- def __exit__(self, *_args: Any) -> None:
- """Exit context."""
- self.stop()
-
- def check_system_and_application(self, system_name: str, app_name: str) -> None:
- """Check if requested system and application installed."""
- if not self.aiet_runner.is_system_installed(system_name):
- raise Exception(f"System {system_name} is not installed")
-
- if not self.aiet_runner.is_application_installed(app_name, system_name):
- raise Exception(
- f"Application {app_name} for the system {system_name} "
- "is not installed"
- )
-
-
-class GenericInferenceRunnerEthosU(GenericInferenceRunner):
- """Generic inference runner on U55/65."""
-
- def __init__(
- self, aiet_runner: AIETRunner, device_info: DeviceInfo, backend: str
- ) -> None:
- """Init generic inference runner instance."""
- super().__init__(aiet_runner)
-
- system_name, app_name = self.resolve_system_and_app(device_info, backend)
- self.system_name = system_name
- self.app_name = app_name
- self.device_info = device_info
-
- @staticmethod
- def resolve_system_and_app(
- device_info: DeviceInfo, backend: str
- ) -> Tuple[str, str]:
- """Find appropriate system and application for the provided device/backend."""
- try:
- system_name = get_system_name(backend, device_info.device_type)
- except KeyError as ex:
- raise RuntimeError(
- f"Unsupported device {device_info.device_type} "
- f"for backend {backend}"
- ) from ex
-
- if system_name not in _SYSTEM_TO_APP_MAP:
- raise RuntimeError(f"System {system_name} is not installed")
-
- try:
- app_name = _SYSTEM_TO_APP_MAP[system_name][device_info.memory_mode]
- except KeyError as err:
- raise RuntimeError(
- f"Unsupported memory mode {device_info.memory_mode}"
- ) from err
-
- return system_name, app_name
-
- def get_execution_params(self, model_info: ModelInfo) -> ExecutionParams:
- """Get execution params for Ethos-U55/65."""
- self.check_system_and_application(self.system_name, self.app_name)
-
- system_params = [
- f"mac={self.device_info.mac}",
- f"input_file={model_info.model_path.absolute()}",
- ]
-
- return ExecutionParams(
- self.app_name,
- self.system_name,
- [],
- system_params,
- [],
- )
-
-
-def get_generic_runner(device_info: DeviceInfo, backend: str) -> GenericInferenceRunner:
- """Get generic runner for provided device and backend."""
- aiet_runner = get_aiet_runner()
- return GenericInferenceRunnerEthosU(aiet_runner, device_info, backend)
-
-
-def estimate_performance(
- model_info: ModelInfo, device_info: DeviceInfo, backend: str
-) -> PerformanceMetrics:
- """Get performance estimations."""
- with get_generic_runner(device_info, backend) as generic_runner:
- output_parser = GenericInferenceOutputParser()
- output_consumers = [output_parser, AIETLogWriter()]
-
- generic_runner.run(model_info, output_consumers)
-
- if not output_parser.is_ready():
- missed_data = ",".join(output_parser.missed_keys())
- logger.debug(
- "Unable to get performance metrics, missed data %s", missed_data
- )
- raise Exception("Unable to get performance metrics, insufficient data")
-
- return PerformanceMetrics(**output_parser.result)
-
-
-def get_aiet_runner() -> AIETRunner:
- """Return AIET runner."""
- executor = CommandExecutor()
- return AIETRunner(executor)
diff --git a/src/mlia/tools/metadata/corstone.py b/src/mlia/tools/metadata/corstone.py
index 7a9d113..a92f81c 100644
--- a/src/mlia/tools/metadata/corstone.py
+++ b/src/mlia/tools/metadata/corstone.py
@@ -12,7 +12,8 @@ from typing import Iterable
from typing import List
from typing import Optional
-import mlia.tools.aiet_wrapper as aiet
+import mlia.backend.manager as backend_manager
+from mlia.backend.fs import get_backend_resources
from mlia.tools.metadata.common import DownloadAndInstall
from mlia.tools.metadata.common import Installation
from mlia.tools.metadata.common import InstallationType
@@ -41,8 +42,8 @@ PathChecker = Callable[[Path], Optional[BackendInfo]]
BackendInstaller = Callable[[bool, Path], Path]
-class AIETMetadata:
- """AIET installation metadata."""
+class BackendMetadata:
+ """Backend installation metadata."""
def __init__(
self,
@@ -55,7 +56,7 @@ class AIETMetadata:
supported_platforms: Optional[List[str]] = None,
) -> None:
"""
- Initialize AIETMetaData.
+ Initialize BackendMetadata.
Members expected_systems and expected_apps are filled automatically.
"""
@@ -67,15 +68,15 @@ class AIETMetadata:
self.download_artifact = download_artifact
self.supported_platforms = supported_platforms
- self.expected_systems = aiet.get_all_system_names(name)
- self.expected_apps = aiet.get_all_application_names(name)
+ self.expected_systems = backend_manager.get_all_system_names(name)
+ self.expected_apps = backend_manager.get_all_application_names(name)
@property
def expected_resources(self) -> Iterable[Path]:
"""Return list of expected resources."""
resources = [self.system_config, *self.apps_resources]
- return (get_mlia_resources() / resource for resource in resources)
+ return (get_backend_resources() / resource for resource in resources)
@property
def supported_platform(self) -> bool:
@@ -86,49 +87,49 @@ class AIETMetadata:
return platform.system() in self.supported_platforms
-class AIETBasedInstallation(Installation):
- """Backend installation based on AIET functionality."""
+class BackendInstallation(Installation):
+ """Backend installation."""
def __init__(
self,
- aiet_runner: aiet.AIETRunner,
- metadata: AIETMetadata,
+ backend_runner: backend_manager.BackendRunner,
+ metadata: BackendMetadata,
path_checker: PathChecker,
backend_installer: Optional[BackendInstaller],
) -> None:
- """Init the tool installation."""
- self.aiet_runner = aiet_runner
+ """Init the backend installation."""
+ self.backend_runner = backend_runner
self.metadata = metadata
self.path_checker = path_checker
self.backend_installer = backend_installer
@property
def name(self) -> str:
- """Return name of the tool."""
+ """Return name of the backend."""
return self.metadata.name
@property
def description(self) -> str:
- """Return description of the tool."""
+ """Return description of the backend."""
return self.metadata.description
@property
def already_installed(self) -> bool:
- """Return true if tool already installed."""
- return self.aiet_runner.all_installed(
+ """Return true if backend already installed."""
+ return self.backend_runner.all_installed(
self.metadata.expected_systems, self.metadata.expected_apps
)
@property
def could_be_installed(self) -> bool:
- """Return true if tool could be installed."""
+ """Return true if backend could be installed."""
if not self.metadata.supported_platform:
return False
return all_paths_valid(self.metadata.expected_resources)
def supports(self, install_type: InstallationType) -> bool:
- """Return true if tools supported type of the installation."""
+ """Return true if backends supported type of the installation."""
if isinstance(install_type, DownloadAndInstall):
return self.metadata.download_artifact is not None
@@ -138,7 +139,7 @@ class AIETBasedInstallation(Installation):
return False # type: ignore
def install(self, install_type: InstallationType) -> None:
- """Install the tool."""
+ """Install the backend."""
if isinstance(install_type, DownloadAndInstall):
download_artifact = self.metadata.download_artifact
assert download_artifact is not None, "No artifact provided"
@@ -153,7 +154,7 @@ class AIETBasedInstallation(Installation):
raise Exception(f"Unable to install {install_type}")
def install_from(self, backend_info: BackendInfo) -> None:
- """Install tool from the directory."""
+ """Install backend from the directory."""
mlia_resources = get_mlia_resources()
with temp_directory() as tmpdir:
@@ -169,15 +170,15 @@ class AIETBasedInstallation(Installation):
copy_all(*resources_to_copy, dest=fvp_dist_dir)
- self.aiet_runner.install_system(fvp_dist_dir)
+ self.backend_runner.install_system(fvp_dist_dir)
for app in self.metadata.apps_resources:
- self.aiet_runner.install_application(mlia_resources / app)
+ self.backend_runner.install_application(mlia_resources / app)
def download_and_install(
self, download_artifact: DownloadArtifact, eula_agrement: bool
) -> None:
- """Download and install the tool."""
+ """Download and install the backend."""
with temp_directory() as tmpdir:
try:
downloaded_to = download_artifact.download_to(tmpdir)
@@ -307,10 +308,10 @@ class Corstone300Installer:
def get_corstone_300_installation() -> Installation:
"""Get Corstone-300 installation."""
- corstone_300 = AIETBasedInstallation(
- aiet_runner=aiet.get_aiet_runner(),
+ corstone_300 = BackendInstallation(
+ backend_runner=backend_manager.BackendRunner(),
# pylint: disable=line-too-long
- metadata=AIETMetadata(
+ metadata=BackendMetadata(
name="Corstone-300",
description="Corstone-300 FVP",
system_config="aiet/systems/corstone-300/aiet-config.json",
@@ -356,10 +357,10 @@ def get_corstone_300_installation() -> Installation:
def get_corstone_310_installation() -> Installation:
"""Get Corstone-310 installation."""
- corstone_310 = AIETBasedInstallation(
- aiet_runner=aiet.get_aiet_runner(),
+ corstone_310 = BackendInstallation(
+ backend_runner=backend_manager.BackendRunner(),
# pylint: disable=line-too-long
- metadata=AIETMetadata(
+ metadata=BackendMetadata(
name="Corstone-310",
description="Corstone-310 FVP",
system_config="aiet/systems/corstone-310/aiet-config.json",