aboutsummaryrefslogtreecommitdiff
path: root/src/mlia
diff options
context:
space:
mode:
authorDmitrii Agibov <dmitrii.agibov@arm.com>2023-01-27 09:12:50 +0000
committerBenjamin Klimczak <benjamin.klimczak@arm.com>2023-02-08 15:25:11 +0000
commit3e3dcb9bd5abb88adcd85b4f89e8a81e7f6fa293 (patch)
tree020eee6abef093113de5b49c135c915c37173843 /src/mlia
parent836efd40317a397761ec8b66e3f4398faac43ad0 (diff)
downloadmlia-3e3dcb9bd5abb88adcd85b4f89e8a81e7f6fa293.tar.gz
MLIA-595 Remove old backend configuration mechanism
- Remove old backend configuration code - Install backends into directory ~/.mlia - Rename targets/backends in registry to make it consistent across codebase. Change-Id: I9c8b012fe863280f1c692940c0dcad3ef638aaae
Diffstat (limited to 'src/mlia')
-rw-r--r--src/mlia/backend/corstone/__init__.py7
-rw-r--r--src/mlia/backend/corstone/install.py57
-rw-r--r--src/mlia/backend/corstone/performance.py365
-rw-r--r--src/mlia/backend/errors.py6
-rw-r--r--src/mlia/backend/executor/__init__.py3
-rw-r--r--src/mlia/backend/executor/application.py170
-rw-r--r--src/mlia/backend/executor/common.py517
-rw-r--r--src/mlia/backend/executor/config.py68
-rw-r--r--src/mlia/backend/executor/execution.py342
-rw-r--r--src/mlia/backend/executor/fs.py88
-rw-r--r--src/mlia/backend/executor/output_consumer.py67
-rw-r--r--src/mlia/backend/executor/proc.py191
-rw-r--r--src/mlia/backend/executor/runner.py98
-rw-r--r--src/mlia/backend/executor/source.py207
-rw-r--r--src/mlia/backend/executor/system.py178
-rw-r--r--src/mlia/backend/install.py250
-rw-r--r--src/mlia/backend/manager.py12
-rw-r--r--src/mlia/backend/repo.py190
-rw-r--r--src/mlia/backend/tosa_checker/__init__.py2
-rw-r--r--src/mlia/cli/command_validators.py8
-rw-r--r--src/mlia/cli/config.py53
-rw-r--r--src/mlia/cli/main.py2
-rw-r--r--src/mlia/cli/options.py8
-rw-r--r--src/mlia/resources/backend_configs/systems/SYSTEMS.txt9
-rw-r--r--src/mlia/resources/backend_configs/systems/corstone-300-vht/backend-config.json72
-rw-r--r--src/mlia/resources/backend_configs/systems/corstone-300-vht/backend-config.json.license3
-rw-r--r--src/mlia/resources/backend_configs/systems/corstone-300/backend-config.json72
-rw-r--r--src/mlia/resources/backend_configs/systems/corstone-300/backend-config.json.license3
-rw-r--r--src/mlia/resources/backend_configs/systems/corstone-310-vht/backend-config.json72
-rw-r--r--src/mlia/resources/backend_configs/systems/corstone-310-vht/backend-config.json.license3
-rw-r--r--src/mlia/resources/backend_configs/systems/corstone-310/backend-config.json72
-rw-r--r--src/mlia/resources/backend_configs/systems/corstone-310/backend-config.json.license3
-rw-r--r--src/mlia/resources/backends/applications/APPLICATIONS.txt4
-rw-r--r--src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U55-Default-noTA/backend-config.json14
-rw-r--r--src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U55-Default-noTA/backend-config.json.license3
-rw-r--r--src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U65-Default-noTA/backend-config.json14
-rw-r--r--src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U65-Default-noTA/backend-config.json.license3
-rw-r--r--src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U55-Default-noTA/backend-config.json14
-rw-r--r--src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U55-Default-noTA/backend-config.json.license3
-rw-r--r--src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U65-Default-noTA/backend-config.json14
-rw-r--r--src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U65-Default-noTA/backend-config.json.license3
-rw-r--r--src/mlia/resources/backends/systems/.gitignore6
-rw-r--r--src/mlia/target/cortex_a/__init__.py4
-rw-r--r--src/mlia/target/ethos_u/__init__.py6
-rw-r--r--src/mlia/target/ethos_u/performance.py26
-rw-r--r--src/mlia/target/registry.py18
-rw-r--r--src/mlia/target/tosa/__init__.py4
-rw-r--r--src/mlia/utils/proc.py55
48 files changed, 580 insertions, 2809 deletions
diff --git a/src/mlia/backend/corstone/__init__.py b/src/mlia/backend/corstone/__init__.py
index f89da63..36f74ee 100644
--- a/src/mlia/backend/corstone/__init__.py
+++ b/src/mlia/backend/corstone/__init__.py
@@ -1,4 +1,4 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Corstone backend module."""
from mlia.backend.config import BackendConfiguration
@@ -23,3 +23,8 @@ registry.register(
backend_type=BackendType.CUSTOM,
),
)
+
+
+def is_corstone_backend(backend_name: str) -> bool:
+ """Check if backend belongs to Corstone."""
+ return backend_name in ["Corstone-300", "Corstone-310"]
diff --git a/src/mlia/backend/corstone/install.py b/src/mlia/backend/corstone/install.py
index 2a0e5c9..c57a47b 100644
--- a/src/mlia/backend/corstone/install.py
+++ b/src/mlia/backend/corstone/install.py
@@ -1,4 +1,4 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Module for Corstone based FVPs.
@@ -12,9 +12,7 @@ import logging
import subprocess # nosec
from pathlib import Path
-from mlia.backend.executor.runner import BackendRunner
from mlia.backend.install import BackendInstallation
-from mlia.backend.install import BackendMetadata
from mlia.backend.install import CompoundPathChecker
from mlia.backend.install import Installation
from mlia.backend.install import PackagePathChecker
@@ -33,6 +31,7 @@ class Corstone300Installer:
"""Install Corstone-300 and return path to the models."""
with working_directory(dist_dir):
install_dir = "corstone-300"
+
try:
fvp_install_cmd = [
"./FVP_Corstone_SSE-300.sh",
@@ -62,23 +61,18 @@ class Corstone300Installer:
def get_corstone_300_installation() -> Installation:
"""Get Corstone-300 installation."""
corstone_300 = BackendInstallation(
- backend_runner=BackendRunner(),
# pylint: disable=line-too-long
- metadata=BackendMetadata(
- name="Corstone-300",
- description="Corstone-300 FVP",
- system_config="backend_configs/systems/corstone-300/backend-config.json",
- apps_resources=[],
- fvp_dir_name="corstone_300",
- download_artifact=DownloadArtifact(
- name="Corstone-300 FVP",
- url="https://developer.arm.com/-/media/Arm%20Developer%20Community/Downloads/OSS/FVP/Corstone-300/FVP_Corstone_SSE-300_11.16_26.tgz",
- filename="FVP_Corstone_SSE-300_11.16_26.tgz",
- version="11.16_26",
- sha256_hash="e26139be756b5003a30d978c629de638aed1934d597dc24a17043d4708e934d7",
- ),
- supported_platforms=["Linux"],
+ name="Corstone-300",
+ description="Corstone-300 FVP",
+ fvp_dir_name="corstone_300",
+ download_artifact=DownloadArtifact(
+ name="Corstone-300 FVP",
+ url="https://developer.arm.com/-/media/Arm%20Developer%20Community/Downloads/OSS/FVP/Corstone-300/FVP_Corstone_SSE-300_11.16_26.tgz",
+ filename="FVP_Corstone_SSE-300_11.16_26.tgz",
+ version="11.16_26",
+ sha256_hash="e26139be756b5003a30d978c629de638aed1934d597dc24a17043d4708e934d7",
),
+ supported_platforms=["Linux"],
# pylint: enable=line-too-long
path_checker=CompoundPathChecker(
PackagePathChecker(
@@ -87,6 +81,7 @@ def get_corstone_300_installation() -> Installation:
"models/Linux64_GCC-6.4/FVP_Corstone_SSE-300_Ethos-U65",
],
backend_subfolder="models/Linux64_GCC-6.4",
+ settings={"profile": "default"},
),
StaticPathChecker(
static_backend_path=Path("/opt/VHT"),
@@ -95,9 +90,7 @@ def get_corstone_300_installation() -> Installation:
"VHT_Corstone_SSE-300_Ethos-U65",
],
copy_source=False,
- system_config=(
- "backend_configs/systems/corstone-300-vht/backend-config.json"
- ),
+ settings={"profile": "AVH"},
),
),
backend_installer=Corstone300Installer(),
@@ -109,18 +102,11 @@ def get_corstone_300_installation() -> Installation:
def get_corstone_310_installation() -> Installation:
"""Get Corstone-310 installation."""
corstone_310 = BackendInstallation(
- backend_runner=BackendRunner(),
- # pylint: disable=line-too-long
- metadata=BackendMetadata(
- name="Corstone-310",
- description="Corstone-310 FVP",
- system_config="backend_configs/systems/corstone-310/backend-config.json",
- apps_resources=[],
- fvp_dir_name="corstone_310",
- download_artifact=None,
- supported_platforms=["Linux"],
- ),
- # pylint: enable=line-too-long
+ name="Corstone-310",
+ description="Corstone-310 FVP",
+ fvp_dir_name="corstone_310",
+ download_artifact=None,
+ supported_platforms=["Linux"],
path_checker=CompoundPathChecker(
PackagePathChecker(
expected_files=[
@@ -128,6 +114,7 @@ def get_corstone_310_installation() -> Installation:
"models/Linux64_GCC-9.3/FVP_Corstone_SSE-310_Ethos-U65",
],
backend_subfolder="models/Linux64_GCC-9.3",
+ settings={"profile": "default"},
),
StaticPathChecker(
static_backend_path=Path("/opt/VHT"),
@@ -136,9 +123,7 @@ def get_corstone_310_installation() -> Installation:
"VHT_Corstone_SSE-310_Ethos-U65",
],
copy_source=False,
- system_config=(
- "backend_configs/systems/corstone-310-vht/backend-config.json"
- ),
+ settings={"profile": "AVH"},
),
),
backend_installer=None,
diff --git a/src/mlia/backend/corstone/performance.py b/src/mlia/backend/corstone/performance.py
index 531f0cd..8fd3e40 100644
--- a/src/mlia/backend/corstone/performance.py
+++ b/src/mlia/backend/corstone/performance.py
@@ -3,40 +3,25 @@
"""Module for backend integration."""
from __future__ import annotations
+import base64
+import json
import logging
-from abc import ABC
-from abc import abstractmethod
+import re
+import subprocess # nosec
from dataclasses import dataclass
from pathlib import Path
-from typing import Literal
-from mlia.backend.executor.output_consumer import Base64OutputConsumer
-from mlia.backend.executor.output_consumer import OutputConsumer
-from mlia.backend.executor.runner import BackendRunner
-from mlia.backend.executor.runner import ExecutionParams
-from mlia.backend.install import get_application_name
-from mlia.backend.install import get_system_name
+from mlia.backend.errors import BackendExecutionFailed
+from mlia.backend.repo import get_backend_repository
+from mlia.utils.filesystem import get_mlia_resources
+from mlia.utils.proc import Command
+from mlia.utils.proc import process_command_output
logger = logging.getLogger(__name__)
@dataclass
-class DeviceInfo:
- """Device information."""
-
- device_type: Literal["Ethos-U55", "Ethos-U65", "ethos-u55", "ethos-u65"]
- mac: int
-
-
-@dataclass
-class ModelInfo:
- """Model info."""
-
- model_path: Path
-
-
-@dataclass
class PerformanceMetrics:
"""Performance metrics parsed from generic inference output."""
@@ -48,186 +33,188 @@ class PerformanceMetrics:
npu_axi1_rd_data_beat_received: int
-class LogWriter(OutputConsumer):
- """Redirect output to the logger."""
+class GenericInferenceOutputParser:
+ """Generic inference runner output parser."""
- def feed(self, line: str) -> bool:
- """Process line from the output."""
- logger.debug(line.strip())
- return False
+ pattern = re.compile(r"<metrics>(.*)</metrics>")
+ def __init__(self) -> None:
+ """Init parser."""
+ self.base64_data: list[str] = []
-class GenericInferenceOutputParser(Base64OutputConsumer):
- """Generic inference app output parser."""
+ def __call__(self, line: str) -> None:
+ """Extract base64 strings from the app output."""
+ if res_b64 := self.pattern.search(line):
+ self.base64_data.append(res_b64.group(1))
- def __init__(self) -> None:
- """Init generic inference output parser instance."""
- super().__init__()
- self._map = {
- "NPU ACTIVE": "npu_active_cycles",
- "NPU IDLE": "npu_idle_cycles",
- "NPU TOTAL": "npu_total_cycles",
- "NPU AXI0_RD_DATA_BEAT_RECEIVED": "npu_axi0_rd_data_beat_received",
- "NPU AXI0_WR_DATA_BEAT_WRITTEN": "npu_axi0_wr_data_beat_written",
- "NPU AXI1_RD_DATA_BEAT_RECEIVED": "npu_axi1_rd_data_beat_received",
- }
-
- @property
- def result(self) -> dict:
- """Merge the raw results and map the names to the right output names."""
- merged_result = {}
- for raw_result in self.parsed_output:
- for profiling_result in raw_result:
- for sample in profiling_result["samples"]:
- name, values = (sample["name"], sample["value"])
- if name in merged_result:
- raise KeyError(
- f"Duplicate key '{name}' in base64 output.",
- )
- new_name = self._map[name]
- merged_result[new_name] = values[0]
- return merged_result
-
- def is_ready(self) -> bool:
- """Return true if all expected data has been parsed."""
- return set(self.result.keys()) == set(self._map.values())
-
- def missed_keys(self) -> set[str]:
- """Return a set of the keys that have not been found in the output."""
- return set(self._map.values()) - set(self.result.keys())
-
-
-class GenericInferenceRunner(ABC):
- """Abstract class for generic inference runner."""
-
- def __init__(self, backend_runner: BackendRunner):
- """Init generic inference runner instance."""
- self.backend_runner = backend_runner
-
- def run(
- self, model_info: ModelInfo, output_consumers: list[OutputConsumer]
- ) -> None:
- """Run generic inference for the provided device/model."""
- execution_params = self.get_execution_params(model_info)
-
- ctx = self.backend_runner.run_application(execution_params)
- if ctx.stdout is not None:
- ctx.stdout = self.consume_output(ctx.stdout, output_consumers)
-
- @abstractmethod
- def get_execution_params(self, model_info: ModelInfo) -> ExecutionParams:
- """Get execution params for the provided model."""
-
- def check_system_and_application(self, system_name: str, app_name: str) -> None:
- """Check if requested system and application installed."""
- if not self.backend_runner.is_system_installed(system_name):
- raise Exception(f"System {system_name} is not installed")
-
- if not self.backend_runner.is_application_installed(app_name, system_name):
- raise Exception(
- f"Application {app_name} for the system {system_name} "
- "is not installed"
+ def get_metrics(self) -> PerformanceMetrics:
+ """Parse the collected data and return perf metrics."""
+ try:
+ parsed_metrics = self._parse_data()
+
+ return PerformanceMetrics(
+ parsed_metrics["NPU ACTIVE"],
+ parsed_metrics["NPU IDLE"],
+ parsed_metrics["NPU TOTAL"],
+ parsed_metrics["NPU AXI0_RD_DATA_BEAT_RECEIVED"],
+ parsed_metrics["NPU AXI0_WR_DATA_BEAT_WRITTEN"],
+ parsed_metrics["NPU AXI1_RD_DATA_BEAT_RECEIVED"],
)
+ except Exception as err:
+ raise ValueError("Unable to parse output and get metrics.") from err
- @staticmethod
- def consume_output(output: bytearray, consumers: list[OutputConsumer]) -> bytearray:
- """
- Pass program's output to the consumers and filter it.
-
- Returns the filtered output.
- """
- filtered_output = bytearray()
- for line_bytes in output.splitlines():
- line = line_bytes.decode("utf-8")
- remove_line = False
- for consumer in consumers:
- if consumer.feed(line):
- remove_line = True
- if not remove_line:
- filtered_output.extend(line_bytes)
-
- return filtered_output
-
-
-class GenericInferenceRunnerEthosU(GenericInferenceRunner):
- """Generic inference runner on U55/65."""
-
- def __init__(
- self, backend_runner: BackendRunner, device_info: DeviceInfo, backend: str
- ) -> None:
- """Init generic inference runner instance."""
- super().__init__(backend_runner)
-
- system_name, app_name = self.resolve_system_and_app(device_info, backend)
- self.system_name = system_name
- self.app_name = app_name
- self.device_info = device_info
-
- @staticmethod
- def resolve_system_and_app(
- device_info: DeviceInfo, backend: str
- ) -> tuple[str, str]:
- """Find appropriate system and application for the provided device/backend."""
- try:
- system_name = get_system_name(backend, device_info.device_type)
- except KeyError as ex:
- raise RuntimeError(
- f"Unsupported device {device_info.device_type} "
- f"for backend {backend}"
- ) from ex
+ def _parse_data(self) -> dict[str, int]:
+ """Parse the data."""
+ parsed_metrics: dict[str, int] = {}
- try:
- app_name = get_application_name(system_name)
- except KeyError as err:
- raise RuntimeError(f"System {system_name} is not installed") from err
-
- return system_name, app_name
-
- def get_execution_params(self, model_info: ModelInfo) -> ExecutionParams:
- """Get execution params for Ethos-U55/65."""
- self.check_system_and_application(self.system_name, self.app_name)
-
- system_params = [
- f"mac={self.device_info.mac}",
- f"input_file={model_info.model_path.absolute()}",
- ]
-
- return ExecutionParams(
- self.app_name,
- self.system_name,
- [],
- system_params,
- )
+ for base64_item in self.base64_data:
+ res_json = base64.b64decode(base64_item, validate=True)
+ for profiling_group in json.loads(res_json):
+ for metric in profiling_group["samples"]:
+ metric_name = metric["name"]
+ metric_value = int(metric["value"][0])
-def get_generic_runner(device_info: DeviceInfo, backend: str) -> GenericInferenceRunner:
- """Get generic runner for provided device and backend."""
- backend_runner = get_backend_runner()
- return GenericInferenceRunnerEthosU(backend_runner, device_info, backend)
+ if metric_name in parsed_metrics:
+ raise KeyError(f"Duplicate key {metric_name}")
+ parsed_metrics[metric_name] = metric_value
-def estimate_performance(
- model_info: ModelInfo, device_info: DeviceInfo, backend: str
+ return parsed_metrics
+
+
+@dataclass
+class FVPMetadata:
+ """Metadata for FVP."""
+
+ executable: str
+ generic_inf_app: Path
+
+
+def get_generic_inference_app_path(fvp: str, target: str) -> Path:
+ """Return path to the generic inference runner binary."""
+ apps_path = get_mlia_resources() / "backends/applications"
+
+ fvp_mapping = {"Corstone-300": "300", "Corstone-310": "310"}
+ target_mapping = {"ethos-u55": "U55", "ethos-u65": "U65"}
+
+ fvp_version = f"sse-{fvp_mapping[fvp]}"
+ app_version = f"22.08.02-ethos-{target_mapping[target]}-Default-noTA"
+
+ app_dir = f"inference_runner-{fvp_version}-{app_version}"
+ return apps_path.joinpath(app_dir, "ethos-u-inference_runner.axf")
+
+
+def get_executable_name(fvp: str, profile: str, target: str) -> str:
+ """Return name of the executable for selected FVP and profile."""
+ executable_name_mapping = {
+ ("Corstone-300", "AVH", "ethos-u55"): "VHT_Corstone_SSE-300_Ethos-U55",
+ ("Corstone-300", "AVH", "ethos-u65"): "VHT_Corstone_SSE-300_Ethos-U65",
+ ("Corstone-300", "default", "ethos-u55"): "FVP_Corstone_SSE-300_Ethos-U55",
+ ("Corstone-300", "default", "ethos-u65"): "FVP_Corstone_SSE-300_Ethos-U65",
+ ("Corstone-310", "AVH", "ethos-u55"): "VHT_Corstone_SSE-310",
+ ("Corstone-310", "AVH", "ethos-u65"): "VHT_Corstone_SSE-310_Ethos-U65",
+ }
+
+ return executable_name_mapping[(fvp, profile, target)]
+
+
+def get_fvp_metadata(fvp: str, profile: str, target: str) -> FVPMetadata:
+ """Return metadata for selected Corstone backend."""
+ executable_name = get_executable_name(fvp, profile, target)
+ app = get_generic_inference_app_path(fvp, target)
+
+ return FVPMetadata(executable_name, app)
+
+
+def build_corstone_command(
+ backend_path: Path,
+ fvp: str,
+ target: str,
+ mac: int,
+ model: Path,
+ profile: str,
+) -> Command:
+ """Build command to run Corstone FVP."""
+ fvp_metadata = get_fvp_metadata(fvp, profile, target)
+
+ cmd = [
+ backend_path.joinpath(fvp_metadata.executable).as_posix(),
+ "-a",
+ fvp_metadata.generic_inf_app.as_posix(),
+ "--data",
+ f"{model}@0x90000000",
+ "-C",
+ f"ethosu.num_macs={mac}",
+ "-C",
+ "mps3_board.telnetterminal0.start_telnet=0",
+ "-C",
+ "mps3_board.uart0.out_file='-'",
+ "-C",
+ "mps3_board.uart0.shutdown_on_eot=1",
+ "-C",
+ "mps3_board.visualisation.disable-visualisation=1",
+ "--stat",
+ ]
+
+ return Command(cmd)
+
+
+def get_metrics(
+ backend_path: Path,
+ fvp: str,
+ target: str,
+ mac: int,
+ model: Path,
+ profile: str = "default",
) -> PerformanceMetrics:
- """Get performance estimations."""
- output_parser = GenericInferenceOutputParser()
- output_consumers = [output_parser, LogWriter()]
+ """Run generic inference and return perf metrics."""
+ try:
+ command = build_corstone_command(
+ backend_path,
+ fvp,
+ target,
+ mac,
+ model,
+ profile,
+ )
+ except Exception as err:
+ raise BackendExecutionFailed(
+ f"Unable to construct a command line for {fvp}"
+ ) from err
- generic_runner = get_generic_runner(device_info, backend)
- generic_runner.run(model_info, output_consumers)
+ output_parser = GenericInferenceOutputParser()
- if not output_parser.is_ready():
- missed_data = ",".join(output_parser.missed_keys())
- logger.debug("Unable to get performance metrics, missed data %s", missed_data)
- raise Exception("Unable to get performance metrics, insufficient data")
+ def redirect_to_log(line: str) -> None:
+ """Redirect FVP output to the logger."""
+ logger.debug(line.strip())
- return PerformanceMetrics(**output_parser.result)
+ try:
+ process_command_output(
+ command,
+ [output_parser, redirect_to_log],
+ )
+ except subprocess.CalledProcessError as err:
+ raise BackendExecutionFailed("Backend execution failed.") from err
+ return output_parser.get_metrics()
-def get_backend_runner() -> BackendRunner:
- """
- Return BackendRunner instance.
- Note: This is needed for the unit tests.
- """
- return BackendRunner()
+def estimate_performance(
+ target: str, mac: int, model: Path, backend: str
+) -> PerformanceMetrics:
+ """Get performance estimations."""
+ backend_repo = get_backend_repository()
+ backend_path, settings = backend_repo.get_backend_settings(backend)
+
+ if not settings or "profile" not in settings:
+ raise BackendExecutionFailed(f"Unable to configure backend {backend}.")
+
+ return get_metrics(
+ backend_path,
+ backend,
+ target,
+ mac,
+ model,
+ settings["profile"],
+ )
diff --git a/src/mlia/backend/errors.py b/src/mlia/backend/errors.py
index bd5da95..cf0ffad 100644
--- a/src/mlia/backend/errors.py
+++ b/src/mlia/backend/errors.py
@@ -1,4 +1,4 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Backend errors."""
@@ -10,3 +10,7 @@ class BackendUnavailableError(Exception):
"""Init error."""
super().__init__(msg)
self.backend = backend
+
+
+class BackendExecutionFailed(Exception):
+ """Backend execution failed."""
diff --git a/src/mlia/backend/executor/__init__.py b/src/mlia/backend/executor/__init__.py
deleted file mode 100644
index 3d60372..0000000
--- a/src/mlia/backend/executor/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Backend module."""
diff --git a/src/mlia/backend/executor/application.py b/src/mlia/backend/executor/application.py
deleted file mode 100644
index 738ac4e..0000000
--- a/src/mlia/backend/executor/application.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Application backend module."""
-from __future__ import annotations
-
-import re
-from pathlib import Path
-from typing import Any
-from typing import cast
-from typing import List
-
-from mlia.backend.executor.common import Backend
-from mlia.backend.executor.common import ConfigurationException
-from mlia.backend.executor.common import get_backend_configs
-from mlia.backend.executor.common import get_backend_directories
-from mlia.backend.executor.common import load_application_configs
-from mlia.backend.executor.common import load_config
-from mlia.backend.executor.common import remove_backend
-from mlia.backend.executor.config import ApplicationConfig
-from mlia.backend.executor.config import ExtendedApplicationConfig
-from mlia.backend.executor.fs import get_backends_path
-from mlia.backend.executor.source import create_destination_and_install
-from mlia.backend.executor.source import get_source
-
-
-def get_available_application_directory_names() -> list[str]:
- """Return a list of directory names for all available applications."""
- return [entry.name for entry in get_backend_directories("applications")]
-
-
-def get_available_applications() -> list[Application]:
- """Return a list with all available applications."""
- available_applications = []
- for config_json in get_backend_configs("applications"):
- config_entries = cast(List[ExtendedApplicationConfig], load_config(config_json))
- for config_entry in config_entries:
- config_entry["config_location"] = config_json.parent.absolute()
- applications = load_applications(config_entry)
- available_applications += applications
-
- return sorted(available_applications, key=lambda application: application.name)
-
-
-def get_application(
- application_name: str, system_name: str | None = None
-) -> list[Application]:
- """Return a list of application instances with provided name."""
- return [
- application
- for application in get_available_applications()
- if application.name == application_name
- and (not system_name or application.can_run_on(system_name))
- ]
-
-
-def install_application(source_path: Path) -> None:
- """Install application."""
- try:
- source = get_source(source_path)
- config = cast(List[ExtendedApplicationConfig], source.config())
- applications_to_install = [
- s for entry in config for s in load_applications(entry)
- ]
- except Exception as error:
- raise ConfigurationException("Unable to read application definition") from error
-
- if not applications_to_install:
- raise ConfigurationException("No application definition found")
-
- available_applications = get_available_applications()
- already_installed = [
- s for s in applications_to_install if s in available_applications
- ]
- if already_installed:
- names = {application.name for application in already_installed}
- raise ConfigurationException(
- f"Applications [{','.join(names)}] are already installed."
- )
-
- create_destination_and_install(source, get_backends_path("applications"))
-
-
-def remove_application(directory_name: str) -> None:
- """Remove application directory."""
- remove_backend(directory_name, "applications")
-
-
-def get_unique_application_names(system_name: str | None = None) -> list[str]:
- """Extract a list of unique application names of all application available."""
- return list(
- {
- application.name
- for application in get_available_applications()
- if not system_name or application.can_run_on(system_name)
- }
- )
-
-
-class Application(Backend):
- """Class for representing a single application component."""
-
- def __init__(self, config: ApplicationConfig) -> None:
- """Construct a Application instance from a dict."""
- super().__init__(config)
-
- self.supported_systems = config.get("supported_systems", [])
-
- def __eq__(self, other: object) -> bool:
- """Overload operator ==."""
- if not isinstance(other, Application):
- return False
-
- return (
- super().__eq__(other)
- and self.name == other.name
- and set(self.supported_systems) == set(other.supported_systems)
- )
-
- def can_run_on(self, system_name: str) -> bool:
- """Check if the application can run on the system passed as argument."""
- return system_name in self.supported_systems
-
- def get_details(self) -> dict[str, Any]:
- """Return dictionary with information about the Application instance."""
- output = {
- "type": "application",
- "name": self.name,
- "description": self.description,
- "supported_systems": self.supported_systems,
- "commands": self._get_command_details(),
- }
-
- return output
-
- def remove_unused_params(self) -> None:
- """Remove unused params in commands.
-
- After merging default and system related configuration application
- could have parameters that are not being used in commands. They
- should be removed.
- """
- for command in self.commands.values():
- indexes_or_aliases = [
- m
- for cmd_str in command.command_strings
- for m in re.findall(r"{user_params:(?P<index_or_alias>\w+)}", cmd_str)
- ]
-
- only_aliases = all(not item.isnumeric() for item in indexes_or_aliases)
- if only_aliases:
- used_params = [
- param
- for param in command.params
- if param.alias in indexes_or_aliases
- ]
- command.params = used_params
-
-
-def load_applications(config: ExtendedApplicationConfig) -> list[Application]:
- """Load application.
-
- Application configuration could contain different parameters/commands for different
- supported systems. For each supported system this function will return separate
- Application instance with appropriate configuration.
- """
- configs = load_application_configs(config, ApplicationConfig)
- applications = [Application(cfg) for cfg in configs]
- for application in applications:
- application.remove_unused_params()
- return applications
diff --git a/src/mlia/backend/executor/common.py b/src/mlia/backend/executor/common.py
deleted file mode 100644
index 48dbd4a..0000000
--- a/src/mlia/backend/executor/common.py
+++ /dev/null
@@ -1,517 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Contain all common functions for the backends."""
-from __future__ import annotations
-
-import json
-import logging
-import re
-from abc import ABC
-from collections import Counter
-from pathlib import Path
-from typing import Any
-from typing import Callable
-from typing import cast
-from typing import Final
-from typing import IO
-from typing import Iterable
-from typing import Match
-from typing import NamedTuple
-from typing import Pattern
-
-from mlia.backend.executor.config import BackendConfig
-from mlia.backend.executor.config import BaseBackendConfig
-from mlia.backend.executor.config import NamedExecutionConfig
-from mlia.backend.executor.config import UserParamConfig
-from mlia.backend.executor.config import UserParamsConfig
-from mlia.backend.executor.fs import get_backends_path
-from mlia.backend.executor.fs import remove_resource
-from mlia.backend.executor.fs import ResourceType
-
-
-BACKEND_CONFIG_FILE: Final[str] = "backend-config.json"
-
-
-class ConfigurationException(Exception):
- """Configuration exception."""
-
-
-def get_backend_config(dir_path: Path) -> Path:
- """Get path to backendir configuration file."""
- return dir_path / BACKEND_CONFIG_FILE
-
-
-def get_backend_configs(resource_type: ResourceType) -> Iterable[Path]:
- """Get path to the backend configs for provided resource_type."""
- return (
- get_backend_config(entry) for entry in get_backend_directories(resource_type)
- )
-
-
-def get_backend_directories(resource_type: ResourceType) -> Iterable[Path]:
- """Get path to the backend directories for provided resource_type."""
- return (
- entry
- for entry in get_backends_path(resource_type).iterdir()
- if is_backend_directory(entry)
- )
-
-
-def is_backend_directory(dir_path: Path) -> bool:
- """Check if path is backend's configuration directory."""
- return dir_path.is_dir() and get_backend_config(dir_path).is_file()
-
-
-def remove_backend(directory_name: str, resource_type: ResourceType) -> None:
- """Remove backend with provided type and directory_name."""
- if not directory_name:
- raise Exception("No directory name provided")
-
- remove_resource(directory_name, resource_type)
-
-
-def load_config(config: Path | IO[bytes] | None) -> BackendConfig:
- """Return a loaded json file."""
- if config is None:
- raise Exception("Unable to read config")
-
- if isinstance(config, Path):
- with config.open() as json_file:
- return cast(BackendConfig, json.load(json_file))
-
- return cast(BackendConfig, json.load(config))
-
-
-def parse_raw_parameter(parameter: str) -> tuple[str, str | None]:
- """Split the parameter string in name and optional value.
-
- It manages the following cases:
- --param=1 -> --param, 1
- --param 1 -> --param, 1
- --flag -> --flag, None
- """
- data = re.split(" |=", parameter)
- if len(data) == 1:
- param_name = data[0]
- param_value = None
- else:
- param_name = " ".join(data[0:-1])
- param_value = data[-1]
- return param_name, param_value
-
-
-class DataPaths(NamedTuple):
- """DataPaths class."""
-
- src: Path
- dst: str
-
-
-class Backend(ABC):
- """Backend class."""
-
- # pylint: disable=too-many-instance-attributes
-
- def __init__(self, config: BaseBackendConfig):
- """Initialize backend."""
- name = config.get("name")
- if not name:
- raise ConfigurationException("Name is empty")
-
- self.name = name
- self.description = config.get("description", "")
- self.config_location = config.get("config_location")
- self.variables = config.get("variables", {})
- self.annotations = config.get("annotations", {})
-
- self._parse_commands_and_params(config)
-
- def validate_parameter(self, command_name: str, parameter: str) -> bool:
- """Validate the parameter string against the application configuration.
-
- We take the parameter string, extract the parameter name/value and
- check them against the current configuration.
- """
- param_name, param_value = parse_raw_parameter(parameter)
- valid_param_name = valid_param_value = False
-
- command = self.commands.get(command_name)
- if not command:
- raise AttributeError(f"Unknown command: '{command_name}'")
-
- # Iterate over all available parameters until we have a match.
- for param in command.params:
- if self._same_parameter(param_name, param):
- valid_param_name = True
- # This is a non-empty list
- if param.values:
- # We check if the value is allowed in the configuration
- valid_param_value = param_value in param.values
- else:
- # In this case we don't validate the value and accept
- # whatever we have set.
- valid_param_value = True
- break
-
- return valid_param_name and valid_param_value
-
- def __eq__(self, other: object) -> bool:
- """Overload operator ==."""
- if not isinstance(other, Backend):
- return False
-
- return (
- self.name == other.name
- and self.description == other.description
- and self.commands == other.commands
- )
-
- def __repr__(self) -> str:
- """Represent the Backend instance by its name."""
- return self.name
-
- def _parse_commands_and_params(self, config: BaseBackendConfig) -> None:
- """Parse commands and user parameters."""
- self.commands: dict[str, Command] = {}
-
- commands = config.get("commands")
- if commands:
- params = config.get("user_params")
-
- for command_name in commands.keys():
- command_params = self._parse_params(params, command_name)
- command_strings = [
- self._substitute_variables(cmd)
- for cmd in commands.get(command_name, [])
- ]
- self.commands[command_name] = Command(command_strings, command_params)
-
- def _substitute_variables(self, str_val: str) -> str:
- """Substitute variables in string.
-
- Variables is being substituted at backend's creation stage because
- they could contain references to other params which will be
- resolved later.
- """
- if not str_val:
- return str_val
-
- var_pattern: Final[Pattern] = re.compile(r"{variables:(?P<var_name>\w+)}")
-
- def var_value(match: Match) -> str:
- var_name = match["var_name"]
- if var_name not in self.variables:
- raise ConfigurationException(f"Unknown variable {var_name}")
-
- return self.variables[var_name]
-
- return var_pattern.sub(var_value, str_val)
-
- @classmethod
- def _parse_params(
- cls, params: UserParamsConfig | None, command: str
- ) -> list[Param]:
- if not params:
- return []
-
- return [cls._parse_param(p) for p in params.get(command, [])]
-
- @classmethod
- def _parse_param(cls, param: UserParamConfig) -> Param:
- """Parse a single parameter."""
- name = param.get("name")
- if name is not None and not name:
- raise ConfigurationException("Parameter has an empty 'name' attribute.")
- values = param.get("values", None)
- default_value = param.get("default_value", None)
- description = param.get("description", "")
- alias = param.get("alias")
-
- return Param(
- name=name,
- description=description,
- values=values,
- default_value=default_value,
- alias=alias,
- )
-
- def _get_command_details(self) -> dict:
- command_details = {
- command_name: command.get_details()
- for command_name, command in self.commands.items()
- }
- return command_details
-
- def _get_user_param_value(self, user_params: list[str], param: Param) -> str | None:
- """Get the user-specified value of a parameter."""
- for user_param in user_params:
- user_param_name, user_param_value = parse_raw_parameter(user_param)
- if user_param_name == param.name:
- warn_message = (
- "The direct use of parameter name is deprecated"
- " and might be removed in the future.\n"
- f"Please use alias '{param.alias}' instead of "
- "'{user_param_name}' to provide the parameter."
- )
- logging.warning(warn_message)
-
- if self._same_parameter(user_param_name, param):
- return user_param_value
-
- return None
-
- @staticmethod
- def _same_parameter(user_param_name_or_alias: str, param: Param) -> bool:
- """Compare user parameter name with param name or alias."""
- # Strip the "=" sign in the param_name. This is needed just for
- # comparison with the parameters passed by the user.
- # The equal sign needs to be honoured when re-building the
- # parameter back.
- param_name = None if not param.name else param.name.rstrip("=")
- return user_param_name_or_alias in [param_name, param.alias]
-
- def resolved_parameters(
- self, command_name: str, user_params: list[str]
- ) -> list[tuple[str | None, Param]]:
- """Return list of parameters with values."""
- result: list[tuple[str | None, Param]] = []
- command = self.commands.get(command_name)
- if not command:
- return result
-
- for param in command.params:
- value = self._get_user_param_value(user_params, param)
- if not value:
- value = param.default_value
- result.append((value, param))
-
- return result
-
- def build_command(
- self,
- command_name: str,
- user_params: list[str],
- param_resolver: Callable[[str, str, list[tuple[str | None, Param]]], str],
- ) -> list[str]:
- """
- Return a list of executable command strings.
-
- Given a command and associated parameters, returns a list of executable command
- strings.
- """
- command = self.commands.get(command_name)
- if not command:
- raise ConfigurationException(
- f"Command '{command_name}' could not be found."
- )
-
- commands_to_run = []
-
- params_values = self.resolved_parameters(command_name, user_params)
- for cmd_str in command.command_strings:
- cmd_str = resolve_all_parameters(
- cmd_str, param_resolver, command_name, params_values
- )
- commands_to_run.append(cmd_str)
-
- return commands_to_run
-
-
-class Param:
- """Class for representing a generic application parameter."""
-
- def __init__( # pylint: disable=too-many-arguments
- self,
- name: str | None,
- description: str,
- values: list[str] | None = None,
- default_value: str | None = None,
- alias: str | None = None,
- ) -> None:
- """Construct a Param instance."""
- if not name and not alias:
- raise ConfigurationException(
- "Either name, alias or both must be set to identify a parameter."
- )
- self.name = name
- self.values = values
- self.description = description
- self.default_value = default_value
- self.alias = alias
-
- def get_details(self) -> dict:
- """Return a dictionary with all relevant information of a Param."""
- return {key: value for key, value in self.__dict__.items() if value}
-
- def __eq__(self, other: object) -> bool:
- """Overload operator ==."""
- if not isinstance(other, Param):
- return False
-
- return (
- self.name == other.name
- and self.values == other.values
- and self.default_value == other.default_value
- and self.description == other.description
- )
-
-
-class Command:
- """Class for representing a command."""
-
- def __init__(
- self, command_strings: list[str], params: list[Param] | None = None
- ) -> None:
- """Construct a Command instance."""
- self.command_strings = command_strings
-
- if params:
- self.params = params
- else:
- self.params = []
-
- self._validate()
-
- def _validate(self) -> None:
- """Validate command."""
- if not self.params:
- return
-
- aliases = [param.alias for param in self.params if param.alias is not None]
- repeated_aliases = [
- alias for alias, count in Counter(aliases).items() if count > 1
- ]
-
- if repeated_aliases:
- raise ConfigurationException(
- f"Non-unique aliases {', '.join(repeated_aliases)}"
- )
-
- both_name_and_alias = [
- param.name
- for param in self.params
- if param.name in aliases and param.name != param.alias
- ]
- if both_name_and_alias:
- raise ConfigurationException(
- f"Aliases {', '.join(both_name_and_alias)} could not be used "
- "as parameter name."
- )
-
- def get_details(self) -> dict:
- """Return a dictionary with all relevant information of a Command."""
- output = {
- "command_strings": self.command_strings,
- "user_params": [param.get_details() for param in self.params],
- }
- return output
-
- def __eq__(self, other: object) -> bool:
- """Overload operator ==."""
- if not isinstance(other, Command):
- return False
-
- return (
- self.command_strings == other.command_strings
- and self.params == other.params
- )
-
-
-def resolve_all_parameters(
- str_val: str,
- param_resolver: Callable[[str, str, list[tuple[str | None, Param]]], str],
- command_name: str | None = None,
- params_values: list[tuple[str | None, Param]] | None = None,
-) -> str:
- """Resolve all parameters in the string."""
- if not str_val:
- return str_val
-
- param_pattern: Final[Pattern] = re.compile(r"{(?P<param_name>[\w.:]+)}")
- while param_pattern.findall(str_val):
- str_val = param_pattern.sub(
- lambda m: param_resolver(
- m["param_name"], command_name or "", params_values or []
- ),
- str_val,
- )
- return str_val
-
-
-def load_application_configs(
- config: Any,
- config_type: type[Any],
- is_system_required: bool = True,
-) -> Any:
- """Get one config for each system supported by the application.
-
- The configuration could contain different parameters/commands for different
- supported systems. For each supported system this function will return separate
- config with appropriate configuration.
- """
- merged_configs = []
- supported_systems: list[NamedExecutionConfig] | None = config.get(
- "supported_systems"
- )
- if not supported_systems:
- if is_system_required:
- raise ConfigurationException("No supported systems definition provided")
- # Create an empty system to be used in the parsing below
- supported_systems = [cast(NamedExecutionConfig, {})]
-
- default_user_params = config.get("user_params", {})
-
- def merge_config(system: NamedExecutionConfig) -> Any:
- system_name = system.get("name")
- if not system_name and is_system_required:
- raise ConfigurationException(
- "Unable to read supported system definition, name is missed"
- )
-
- merged_config = config_type(**config)
- merged_config["supported_systems"] = [system_name] if system_name else []
- # merge default configuration and specific to the system
- merged_config["commands"] = {
- **config.get("commands", {}),
- **system.get("commands", {}),
- }
-
- params = {}
- tool_user_params = system.get("user_params", {})
- command_names = tool_user_params.keys() | default_user_params.keys()
- for command_name in command_names:
- if command_name not in merged_config["commands"]:
- continue
-
- params_default = default_user_params.get(command_name, [])
- params_tool = tool_user_params.get(command_name, [])
- if not params_default or not params_tool:
- params[command_name] = params_tool or params_default
- if params_default and params_tool:
- if any(not p.get("alias") for p in params_default):
- raise ConfigurationException(
- f"Default parameters for command {command_name} "
- "should have aliases"
- )
- if any(not p.get("alias") for p in params_tool):
- raise ConfigurationException(
- f"{system_name} parameters for command {command_name} "
- "should have aliases."
- )
-
- merged_by_alias = {
- **{p.get("alias"): p for p in params_default},
- **{p.get("alias"): p for p in params_tool},
- }
- params[command_name] = list(merged_by_alias.values())
-
- merged_config["user_params"] = params
- merged_config["variables"] = {
- **config.get("variables", {}),
- **system.get("variables", {}),
- }
- return merged_config
-
- merged_configs = [merge_config(system) for system in supported_systems]
-
- return merged_configs
diff --git a/src/mlia/backend/executor/config.py b/src/mlia/backend/executor/config.py
deleted file mode 100644
index dca53da..0000000
--- a/src/mlia/backend/executor/config.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Contain definition of backend configuration."""
-from __future__ import annotations
-
-from pathlib import Path
-from typing import Dict
-from typing import List
-from typing import TypedDict
-from typing import Union
-
-
-class UserParamConfig(TypedDict, total=False):
- """User parameter configuration."""
-
- name: str | None
- default_value: str
- values: list[str]
- description: str
- alias: str
-
-
-UserParamsConfig = Dict[str, List[UserParamConfig]]
-
-
-class ExecutionConfig(TypedDict, total=False):
- """Execution configuration."""
-
- commands: dict[str, list[str]]
- user_params: UserParamsConfig
- variables: dict[str, str]
-
-
-class NamedExecutionConfig(ExecutionConfig):
- """Execution configuration with name."""
-
- name: str
-
-
-class BaseBackendConfig(ExecutionConfig, total=False):
- """Base backend configuration."""
-
- name: str
- description: str
- config_location: Path
- annotations: dict[str, str | list[str]]
-
-
-class ApplicationConfig(BaseBackendConfig, total=False):
- """Application configuration."""
-
- supported_systems: list[str]
-
-
-class ExtendedApplicationConfig(BaseBackendConfig, total=False):
- """Extended application configuration."""
-
- supported_systems: list[NamedExecutionConfig]
-
-
-class SystemConfig(BaseBackendConfig, total=False):
- """System configuration."""
-
- reporting: dict[str, dict]
-
-
-BackendItemConfig = Union[ApplicationConfig, SystemConfig]
-BackendConfig = Union[List[ExtendedApplicationConfig], List[SystemConfig]]
diff --git a/src/mlia/backend/executor/execution.py b/src/mlia/backend/executor/execution.py
deleted file mode 100644
index e253b16..0000000
--- a/src/mlia/backend/executor/execution.py
+++ /dev/null
@@ -1,342 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Application execution module."""
-from __future__ import annotations
-
-import logging
-import re
-from typing import cast
-
-from mlia.backend.executor.application import Application
-from mlia.backend.executor.application import get_application
-from mlia.backend.executor.common import Backend
-from mlia.backend.executor.common import ConfigurationException
-from mlia.backend.executor.common import Param
-from mlia.backend.executor.system import get_system
-from mlia.backend.executor.system import System
-
-logger = logging.getLogger(__name__)
-
-
-class AnotherInstanceIsRunningException(Exception):
- """Concurrent execution error."""
-
-
-class ExecutionContext: # pylint: disable=too-few-public-methods
- """Command execution context."""
-
- def __init__(
- self,
- app: Application,
- app_params: list[str],
- system: System,
- system_params: list[str],
- ):
- """Init execution context."""
- self.app = app
- self.app_params = app_params
- self.system = system
- self.system_params = system_params
-
- self.param_resolver = ParamResolver(self)
-
- self.stdout: bytearray | None = None
- self.stderr: bytearray | None = None
-
-
-class ParamResolver:
- """Parameter resolver."""
-
- def __init__(self, context: ExecutionContext):
- """Init parameter resolver."""
- self.ctx = context
-
- @staticmethod
- def resolve_user_params(
- cmd_name: str | None,
- index_or_alias: str,
- resolved_params: list[tuple[str | None, Param]] | None,
- ) -> str:
- """Resolve user params."""
- if not cmd_name or resolved_params is None:
- raise ConfigurationException("Unable to resolve user params")
-
- param_value: str | None = None
- param: Param | None = None
-
- if index_or_alias.isnumeric():
- i = int(index_or_alias)
- if i not in range(len(resolved_params)):
- raise ConfigurationException(
- f"Invalid index {i} for user params of command {cmd_name}"
- )
- param_value, param = resolved_params[i]
- else:
- for val, par in resolved_params:
- if par.alias == index_or_alias:
- param_value, param = val, par
- break
-
- if param is None:
- raise ConfigurationException(
- f"No user parameter for command '{cmd_name}' with "
- f"alias '{index_or_alias}'."
- )
-
- if param_value:
- # We need to handle to cases of parameters here:
- # 1) Optional parameters (non-positional with a name and value)
- # 2) Positional parameters (value only, no name needed)
- # Default to empty strings for positional arguments
- param_name = ""
- separator = ""
- if param.name is not None:
- # A valid param name means we have an optional/non-positional argument:
- # The separator is an empty string in case the param_name
- # has an equal sign as we have to honour it.
- # If the parameter doesn't end with an equal sign then a
- # space character is injected to split the parameter name
- # and its value
- param_name = param.name
- separator = "" if param.name.endswith("=") else " "
-
- return f"{param_name}{separator}{param_value}"
-
- if param.name is None:
- raise ConfigurationException(
- f"Missing user parameter with alias '{index_or_alias}' for "
- f"command '{cmd_name}'."
- )
-
- return param.name # flag: just return the parameter name
-
- def resolve_commands_and_params(
- self, backend_type: str, cmd_name: str, return_params: bool, index_or_alias: str
- ) -> str:
- """Resolve command or command's param value."""
- if backend_type == "system":
- backend = cast(Backend, self.ctx.system)
- backend_params = self.ctx.system_params
- else: # Application backend
- backend = cast(Backend, self.ctx.app)
- backend_params = self.ctx.app_params
-
- if cmd_name not in backend.commands:
- raise ConfigurationException(f"Command {cmd_name} not found")
-
- if return_params:
- params = backend.resolved_parameters(cmd_name, backend_params)
- if index_or_alias.isnumeric():
- i = int(index_or_alias)
- if i not in range(len(params)):
- raise ConfigurationException(
- f"Invalid parameter index {i} for command {cmd_name}"
- )
-
- param_value = params[i][0]
- else:
- param_value = None
- for value, param in params:
- if param.alias == index_or_alias:
- param_value = value
- break
-
- if not param_value:
- raise ConfigurationException(
- "No value for parameter with index or "
- f"alias {index_or_alias} of command {cmd_name}."
- )
- return param_value
-
- if not index_or_alias.isnumeric():
- raise ConfigurationException(f"Bad command index {index_or_alias}")
-
- i = int(index_or_alias)
- commands = backend.build_command(cmd_name, backend_params, self.param_resolver)
- if i not in range(len(commands)):
- raise ConfigurationException(f"Invalid index {i} for command {cmd_name}")
-
- return commands[i]
-
- def resolve_variables(self, backend_type: str, var_name: str) -> str:
- """Resolve variable value."""
- if backend_type == "system":
- backend = cast(Backend, self.ctx.system)
- else: # Application backend
- backend = cast(Backend, self.ctx.app)
-
- if var_name not in backend.variables:
- raise ConfigurationException(f"Unknown variable {var_name}")
-
- return backend.variables[var_name]
-
- def param_matcher(
- self,
- param_name: str,
- cmd_name: str | None,
- resolved_params: list[tuple[str | None, Param]] | None,
- ) -> str:
- """Regexp to resolve a param from the param_name."""
- # this pattern supports parameter names like "application.commands.run:0" and
- # "system.commands.run.params:0"
- # Note: 'software' is included for backward compatibility.
- commands_and_params_match = re.match(
- r"(?P<type>application|software|system)[.]commands[.]"
- r"(?P<name>\w+)"
- r"(?P<params>[.]params|)[:]"
- r"(?P<index_or_alias>\w+)",
- param_name,
- )
-
- if commands_and_params_match:
- backend_type, cmd_name, return_params, index_or_alias = (
- commands_and_params_match["type"],
- commands_and_params_match["name"],
- commands_and_params_match["params"],
- commands_and_params_match["index_or_alias"],
- )
- return self.resolve_commands_and_params(
- backend_type, cmd_name, bool(return_params), index_or_alias
- )
-
- # Note: 'software' is included for backward compatibility.
- variables_match = re.match(
- r"(?P<type>application|software|system)[.]variables:(?P<var_name>\w+)",
- param_name,
- )
- if variables_match:
- backend_type, var_name = (
- variables_match["type"],
- variables_match["var_name"],
- )
- return self.resolve_variables(backend_type, var_name)
-
- user_params_match = re.match(r"user_params:(?P<index_or_alias>\w+)", param_name)
- if user_params_match:
- index_or_alias = user_params_match["index_or_alias"]
- return self.resolve_user_params(cmd_name, index_or_alias, resolved_params)
-
- raise ConfigurationException(f"Unable to resolve parameter {param_name}")
-
- def param_resolver(
- self,
- param_name: str,
- cmd_name: str | None = None,
- resolved_params: list[tuple[str | None, Param]] | None = None,
- ) -> str:
- """Resolve parameter value based on current execution context."""
- # Note: 'software.*' is included for backward compatibility.
- resolved_param = None
- if param_name in ["application.name", "software.name"]:
- resolved_param = self.ctx.app.name
- elif param_name in ["application.description", "software.description"]:
- resolved_param = self.ctx.app.description
- elif self.ctx.app.config_location and (
- param_name in ["application.config_dir", "software.config_dir"]
- ):
- resolved_param = str(self.ctx.app.config_location.absolute())
- elif self.ctx.system is not None:
- if param_name == "system.name":
- resolved_param = self.ctx.system.name
- elif param_name == "system.description":
- resolved_param = self.ctx.system.description
- elif param_name == "system.config_dir" and self.ctx.system.config_location:
- resolved_param = str(self.ctx.system.config_location.absolute())
-
- if not resolved_param:
- resolved_param = self.param_matcher(param_name, cmd_name, resolved_params)
- return resolved_param
-
- def __call__(
- self,
- param_name: str,
- cmd_name: str | None = None,
- resolved_params: list[tuple[str | None, Param]] | None = None,
- ) -> str:
- """Resolve provided parameter."""
- return self.param_resolver(param_name, cmd_name, resolved_params)
-
-
-def validate_parameters(
- backend: Backend, command_names: list[str], params: list[str]
-) -> None:
- """Check parameters passed to backend."""
- for param in params:
- acceptable = any(
- backend.validate_parameter(command_name, param)
- for command_name in command_names
- if command_name in backend.commands
- )
-
- if not acceptable:
- backend_type = "System" if isinstance(backend, System) else "Application"
- raise ValueError(
- f"{backend_type} parameter '{param}' not valid for "
- f"command '{' or '.join(command_names)}'."
- )
-
-
-def get_application_by_name_and_system(
- application_name: str, system_name: str
-) -> Application:
- """Get application."""
- applications = get_application(application_name, system_name)
- if not applications:
- raise ValueError(
- f"Application '{application_name}' doesn't support the "
- f"system '{system_name}'."
- )
-
- if len(applications) != 1:
- raise ValueError(
- f"Error during getting application {application_name} for the "
- f"system {system_name}."
- )
-
- return applications[0]
-
-
-def get_application_and_system(
- application_name: str, system_name: str
-) -> tuple[Application, System]:
- """Return application and system by provided names."""
- system = get_system(system_name)
- if not system:
- raise ValueError(f"System {system_name} is not found.")
-
- application = get_application_by_name_and_system(application_name, system_name)
-
- return application, system
-
-
-def run_application(
- application_name: str,
- application_params: list[str],
- system_name: str,
- system_params: list[str],
-) -> ExecutionContext:
- """Run application on the provided system."""
- application, system = get_application_and_system(application_name, system_name)
- validate_parameters(application, ["run"], application_params)
- validate_parameters(system, ["run"], system_params)
-
- ctx = ExecutionContext(
- app=application,
- app_params=application_params,
- system=system,
- system_params=system_params,
- )
-
- logger.debug("Generating commands to execute")
- commands_to_run = ctx.system.build_command(
- "run", ctx.system_params, ctx.param_resolver
- )
-
- for command in commands_to_run:
- logger.debug("Running: %s", command)
- exit_code, ctx.stdout, ctx.stderr = ctx.system.run(command)
-
- if exit_code != 0:
- logger.warning("Application exited with exit code %i", exit_code)
-
- return ctx
diff --git a/src/mlia/backend/executor/fs.py b/src/mlia/backend/executor/fs.py
deleted file mode 100644
index 3fce19c..0000000
--- a/src/mlia/backend/executor/fs.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Module to host all file system related functions."""
-from __future__ import annotations
-
-import re
-import shutil
-from pathlib import Path
-from typing import Literal
-
-from mlia.utils.filesystem import get_mlia_resources
-
-ResourceType = Literal["applications", "systems"]
-
-
-def get_backend_resources() -> Path:
- """Get backend resources folder path."""
- return get_mlia_resources() / "backends"
-
-
-def get_backends_path(name: ResourceType) -> Path:
- """Return the absolute path of the specified resource.
-
- It uses importlib to return resources packaged with MANIFEST.in.
- """
- if not name:
- raise ResourceWarning("Resource name is not provided")
-
- resource_path = get_backend_resources() / name
- if resource_path.is_dir():
- return resource_path
-
- raise ResourceWarning(f"Resource '{name}' not found.")
-
-
-def copy_directory_content(source: Path, destination: Path) -> None:
- """Copy content of the source directory into destination directory."""
- for item in source.iterdir():
- src = source / item.name
- dest = destination / item.name
-
- if src.is_dir():
- shutil.copytree(src, dest)
- else:
- shutil.copy2(src, dest)
-
-
-def remove_resource(resource_directory: str, resource_type: ResourceType) -> None:
- """Remove resource data."""
- resources = get_backends_path(resource_type)
-
- resource_location = resources / resource_directory
- if not resource_location.exists():
- raise Exception(f"Resource {resource_directory} does not exist")
-
- if not resource_location.is_dir():
- raise Exception(f"Wrong resource {resource_directory}")
-
- shutil.rmtree(resource_location)
-
-
-def remove_directory(directory_path: Path | None) -> None:
- """Remove directory."""
- if not directory_path or not directory_path.is_dir():
- raise Exception("No directory path provided")
-
- shutil.rmtree(directory_path)
-
-
-def recreate_directory(directory_path: Path | None) -> None:
- """Recreate directory."""
- if not directory_path:
- raise Exception("No directory path provided")
-
- if directory_path.exists() and not directory_path.is_dir():
- raise Exception(
- f"Path {str(directory_path)} does exist and it is not a directory."
- )
-
- if directory_path.is_dir():
- remove_directory(directory_path)
-
- directory_path.mkdir()
-
-
-def valid_for_filename(value: str, replacement: str = "") -> str:
- """Replace non alpha numeric characters."""
- return re.sub(r"[^\w.]", replacement, value, flags=re.ASCII)
diff --git a/src/mlia/backend/executor/output_consumer.py b/src/mlia/backend/executor/output_consumer.py
deleted file mode 100644
index 3c3b132..0000000
--- a/src/mlia/backend/executor/output_consumer.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Output consumers module."""
-from __future__ import annotations
-
-import base64
-import json
-import re
-from typing import Protocol
-from typing import runtime_checkable
-
-
-@runtime_checkable
-class OutputConsumer(Protocol):
- """Protocol to consume output."""
-
- def feed(self, line: str) -> bool:
- """
- Feed a new line to be parsed.
-
- Return True if the line should be removed from the output.
- """
-
-
-class Base64OutputConsumer(OutputConsumer):
- """
- Parser to extract base64-encoded JSON from tagged standard output.
-
- Example of the tagged output:
- ```
- # Encoded JSON: {"test": 1234}
- <metrics>eyJ0ZXN0IjogMTIzNH0</metrics>
- ```
- """
-
- TAG_NAME = "metrics"
-
- def __init__(self) -> None:
- """Set up the regular expression to extract tagged strings."""
- self._regex = re.compile(rf"<{self.TAG_NAME}>(.*)</{self.TAG_NAME}>")
- self.parsed_output: list = []
-
- def feed(self, line: str) -> bool:
- """
- Parse the output line and save the decoded output.
-
- Returns True if the line contains tagged output.
-
- Example:
- Using the tagged output from the class docs the parser should collect
- the following:
- ```
- [
- {"test": 1234}
- ]
- ```
- """
- res_b64 = self._regex.search(line)
- if res_b64:
- res_json = base64.b64decode(res_b64.group(1), validate=True)
- res = json.loads(res_json)
- self.parsed_output.append(res)
- # Remove this line from the output, i.e. consume it, as it
- # does not contain any human readable content.
- return True
-
- return False
diff --git a/src/mlia/backend/executor/proc.py b/src/mlia/backend/executor/proc.py
deleted file mode 100644
index 39a0689..0000000
--- a/src/mlia/backend/executor/proc.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Processes module.
-
-This module contains all classes and functions for dealing with Linux
-processes.
-"""
-from __future__ import annotations
-
-import datetime
-import logging
-import shlex
-import signal
-import tempfile
-import time
-from pathlib import Path
-from typing import Any
-
-from sh import Command
-from sh import CommandNotFound
-from sh import ErrorReturnCode
-from sh import RunningCommand
-
-from mlia.backend.executor.fs import valid_for_filename
-
-logger = logging.getLogger(__name__)
-
-
-class CommandFailedException(Exception):
- """Exception for failed command execution."""
-
-
-class ShellCommand:
- """Wrapper class for shell commands."""
-
- def run(
- self,
- cmd: str,
- *args: str,
- _cwd: Path | None = None,
- _tee: bool = True,
- _bg: bool = True,
- _out: Any = None,
- _err: Any = None,
- _search_paths: list[Path] | None = None,
- ) -> RunningCommand:
- """Run the shell command with the given arguments.
-
- There are special arguments that modify the behaviour of the process.
- _cwd: current working directory
- _tee: it redirects the stdout both to console and file
- _bg: if True, it runs the process in background and the command is not
- blocking.
- _out: use this object for stdout redirect,
- _err: use this object for stderr redirect,
- _search_paths: If presented used for searching executable
- """
- try:
- kwargs = {}
- if _cwd:
- kwargs["_cwd"] = str(_cwd)
- command = Command(cmd, _search_paths).bake(args, **kwargs)
- except CommandNotFound as error:
- logging.error("Command '%s' not found", error.args[0])
- raise error
-
- out, err = _out, _err
- if not _out and not _err:
- out, err = (str(item) for item in self.get_stdout_stderr_paths(cmd))
-
- return command(_out=out, _err=err, _tee=_tee, _bg=_bg, _bg_exc=False)
-
- @classmethod
- def get_stdout_stderr_paths(cls, cmd: str) -> tuple[Path, Path]:
- """Construct and returns the paths of stdout/stderr files."""
- timestamp = datetime.datetime.now().timestamp()
- base_path = Path(tempfile.mkdtemp(prefix="mlia-", suffix=f"{timestamp}"))
- base = base_path / f"{valid_for_filename(cmd, '_')}_{timestamp}"
- stdout = base.with_suffix(".out")
- stderr = base.with_suffix(".err")
- try:
- stdout.touch()
- stderr.touch()
- except FileNotFoundError as error:
- logging.error("File not found: %s", error.filename)
- raise error
- return stdout, stderr
-
-
-def parse_command(command: str, shell: str = "bash") -> list[str]:
- """Parse command."""
- cmd, *args = shlex.split(command, posix=True)
-
- if is_shell_script(cmd):
- args = [cmd] + args
- cmd = shell
-
- return [cmd] + args
-
-
-def execute_command( # pylint: disable=invalid-name
- command: str,
- cwd: Path,
- bg: bool = False,
- shell: str = "bash",
- out: Any = None,
- err: Any = None,
-) -> RunningCommand:
- """Execute shell command."""
- cmd, *args = parse_command(command, shell)
-
- search_paths = None
- if cmd != shell and (cwd / cmd).is_file():
- search_paths = [cwd]
-
- return ShellCommand().run(
- cmd, *args, _cwd=cwd, _bg=bg, _search_paths=search_paths, _out=out, _err=err
- )
-
-
-def is_shell_script(cmd: str) -> bool:
- """Check if command is shell script."""
- return cmd.endswith(".sh")
-
-
-def run_and_wait(
- command: str,
- cwd: Path,
- terminate_on_error: bool = False,
- out: Any = None,
- err: Any = None,
-) -> tuple[int, bytearray, bytearray]:
- """
- Run command and wait while it is executing.
-
- Returns a tuple: (exit_code, stdout, stderr)
- """
- running_cmd: RunningCommand | None = None
- try:
- running_cmd = execute_command(command, cwd, bg=True, out=out, err=err)
- return running_cmd.exit_code, running_cmd.stdout, running_cmd.stderr
- except ErrorReturnCode as cmd_failed:
- raise CommandFailedException() from cmd_failed
- except Exception as error:
- is_running = running_cmd is not None and running_cmd.is_alive()
- if terminate_on_error and is_running:
- logger.debug("Terminating ...")
- terminate_command(running_cmd)
-
- raise error
-
-
-def terminate_command(
- running_cmd: RunningCommand,
- wait: bool = True,
- wait_period: float = 0.5,
- number_of_attempts: int = 20,
-) -> None:
- """Terminate running command."""
- try:
- running_cmd.process.signal_group(signal.SIGINT)
- if wait:
- for _ in range(number_of_attempts):
- time.sleep(wait_period)
- if not running_cmd.is_alive():
- return
- logger.error(
- "Unable to terminate process %i. Sending SIGTERM...",
- running_cmd.process.pid,
- )
- running_cmd.process.signal_group(signal.SIGTERM)
- except ProcessLookupError:
- pass
-
-
-def print_command_stdout(command: RunningCommand) -> None:
- """Print the stdout of a command.
-
- The command has 2 states: running and done.
- If the command is running, the output is taken by the running process.
- If the command has ended its execution, the stdout is taken from stdout
- property
- """
- if command.is_alive():
- while True:
- try:
- print(command.next(), end="")
- except StopIteration:
- break
- else:
- print(command.stdout)
diff --git a/src/mlia/backend/executor/runner.py b/src/mlia/backend/executor/runner.py
deleted file mode 100644
index 2330fd9..0000000
--- a/src/mlia/backend/executor/runner.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Module for backend runner."""
-from __future__ import annotations
-
-from dataclasses import dataclass
-from pathlib import Path
-
-from mlia.backend.executor.application import get_available_applications
-from mlia.backend.executor.application import install_application
-from mlia.backend.executor.execution import ExecutionContext
-from mlia.backend.executor.execution import run_application
-from mlia.backend.executor.system import get_available_systems
-from mlia.backend.executor.system import install_system
-
-
-@dataclass
-class ExecutionParams:
- """Application execution params."""
-
- application: str
- system: str
- application_params: list[str]
- system_params: list[str]
-
-
-class BackendRunner:
- """Backend runner."""
-
- def __init__(self) -> None:
- """Init BackendRunner instance."""
-
- @staticmethod
- def get_installed_systems() -> list[str]:
- """Get list of the installed systems."""
- return [system.name for system in get_available_systems()]
-
- @staticmethod
- def get_installed_applications(system: str | None = None) -> list[str]:
- """Get list of the installed application."""
- return [
- app.name
- for app in get_available_applications()
- if system is None or app.can_run_on(system)
- ]
-
- def is_application_installed(self, application: str, system: str) -> bool:
- """Return true if requested application installed."""
- return application in self.get_installed_applications(system)
-
- def is_system_installed(self, system: str) -> bool:
- """Return true if requested system installed."""
- return system in self.get_installed_systems()
-
- def systems_installed(self, systems: list[str]) -> bool:
- """Check if all provided systems are installed."""
- if not systems:
- return False
-
- installed_systems = self.get_installed_systems()
- return all(system in installed_systems for system in systems)
-
- def applications_installed(self, applications: list[str]) -> bool:
- """Check if all provided applications are installed."""
- if not applications:
- return False
-
- installed_apps = self.get_installed_applications()
- return all(app in installed_apps for app in applications)
-
- def all_installed(self, systems: list[str], apps: list[str]) -> bool:
- """Check if all provided artifacts are installed."""
- return self.systems_installed(systems) and self.applications_installed(apps)
-
- @staticmethod
- def install_system(system_path: Path) -> None:
- """Install system."""
- install_system(system_path)
-
- @staticmethod
- def install_application(app_path: Path) -> None:
- """Install application."""
- install_application(app_path)
-
- @staticmethod
- def run_application(execution_params: ExecutionParams) -> ExecutionContext:
- """Run requested application."""
- ctx = run_application(
- execution_params.application,
- execution_params.application_params,
- execution_params.system,
- execution_params.system_params,
- )
- return ctx
-
- @staticmethod
- def _params(name: str, params: list[str]) -> list[str]:
- return [p for item in [(name, param) for param in params] for p in item]
diff --git a/src/mlia/backend/executor/source.py b/src/mlia/backend/executor/source.py
deleted file mode 100644
index 6abc49f..0000000
--- a/src/mlia/backend/executor/source.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""Contain source related classes and functions."""
-from __future__ import annotations
-
-import os
-import shutil
-import tarfile
-from abc import ABC
-from abc import abstractmethod
-from pathlib import Path
-from tarfile import TarFile
-
-from mlia.backend.executor.common import BACKEND_CONFIG_FILE
-from mlia.backend.executor.common import ConfigurationException
-from mlia.backend.executor.common import get_backend_config
-from mlia.backend.executor.common import is_backend_directory
-from mlia.backend.executor.common import load_config
-from mlia.backend.executor.config import BackendConfig
-from mlia.backend.executor.fs import copy_directory_content
-
-
-class Source(ABC):
- """Source class."""
-
- @abstractmethod
- def name(self) -> str | None:
- """Get source name."""
-
- @abstractmethod
- def config(self) -> BackendConfig | None:
- """Get configuration file content."""
-
- @abstractmethod
- def install_into(self, destination: Path) -> None:
- """Install source into destination directory."""
-
- @abstractmethod
- def create_destination(self) -> bool:
- """Return True if destination folder should be created before installation."""
-
-
-class DirectorySource(Source):
- """DirectorySource class."""
-
- def __init__(self, directory_path: Path) -> None:
- """Create the DirectorySource instance."""
- assert isinstance(directory_path, Path)
- self.directory_path = directory_path
-
- def name(self) -> str:
- """Return name of source."""
- return self.directory_path.name
-
- def config(self) -> BackendConfig | None:
- """Return configuration file content."""
- if not is_backend_directory(self.directory_path):
- raise ConfigurationException("No configuration file found")
-
- config_file = get_backend_config(self.directory_path)
- return load_config(config_file)
-
- def install_into(self, destination: Path) -> None:
- """Install source into destination directory."""
- if not destination.is_dir():
- raise ConfigurationException(f"Wrong destination {destination}.")
-
- if not self.directory_path.is_dir():
- raise ConfigurationException(
- f"Directory {self.directory_path} does not exist."
- )
-
- copy_directory_content(self.directory_path, destination)
-
- def create_destination(self) -> bool:
- """Return True if destination folder should be created before installation."""
- return True
-
-
-class TarArchiveSource(Source):
- """TarArchiveSource class."""
-
- def __init__(self, archive_path: Path) -> None:
- """Create the TarArchiveSource class."""
- assert isinstance(archive_path, Path)
- self.archive_path = archive_path
- self._config: BackendConfig | None = None
- self._has_top_level_folder: bool | None = None
- self._name: str | None = None
-
- def _read_archive_content(self) -> None:
- """Read various information about archive."""
- # get source name from archive name (everything without extensions)
- extensions = "".join(self.archive_path.suffixes)
- self._name = self.archive_path.name.rstrip(extensions)
-
- if not self.archive_path.exists():
- return
-
- with self._open(self.archive_path) as archive:
- try:
- config_entry = archive.getmember(BACKEND_CONFIG_FILE)
- self._has_top_level_folder = False
- except KeyError as error_no_config:
- try:
- archive_entries = archive.getnames()
- entries_common_prefix = os.path.commonprefix(archive_entries)
- top_level_dir = entries_common_prefix.rstrip("/")
-
- if not top_level_dir:
- raise RuntimeError(
- "Archive has no top level directory"
- ) from error_no_config
-
- config_path = f"{top_level_dir}/{BACKEND_CONFIG_FILE}"
-
- config_entry = archive.getmember(config_path)
- self._has_top_level_folder = True
- self._name = top_level_dir
- except (KeyError, RuntimeError) as error_no_root_dir_or_config:
- raise ConfigurationException(
- "No configuration file found"
- ) from error_no_root_dir_or_config
-
- content = archive.extractfile(config_entry)
- self._config = load_config(content)
-
- def config(self) -> BackendConfig | None:
- """Return configuration file content."""
- if self._config is None:
- self._read_archive_content()
-
- return self._config
-
- def name(self) -> str | None:
- """Return name of the source."""
- if self._name is None:
- self._read_archive_content()
-
- return self._name
-
- def create_destination(self) -> bool:
- """Return True if destination folder must be created before installation."""
- if self._has_top_level_folder is None:
- self._read_archive_content()
-
- return not self._has_top_level_folder
-
- def install_into(self, destination: Path) -> None:
- """Install source into destination directory."""
- if not destination.is_dir():
- raise ConfigurationException(f"Wrong destination {destination}.")
-
- with self._open(self.archive_path) as archive:
- archive.extractall(destination)
-
- def _open(self, archive_path: Path) -> TarFile:
- """Open archive file."""
- if not archive_path.is_file():
- raise ConfigurationException(f"File {archive_path} does not exist.")
-
- if archive_path.name.endswith("tar.gz") or archive_path.name.endswith("tgz"):
- mode = "r:gz"
- else:
- raise ConfigurationException(f"Unsupported archive type {archive_path}.")
-
- # The returned TarFile object can be used as a context manager (using
- # 'with') by the calling instance.
- return tarfile.open( # pylint: disable=consider-using-with
- self.archive_path, mode=mode
- )
-
-
-def get_source(source_path: Path) -> TarArchiveSource | DirectorySource:
- """Return appropriate source instance based on provided source path."""
- if source_path.is_file():
- return TarArchiveSource(source_path)
-
- if source_path.is_dir():
- return DirectorySource(source_path)
-
- raise ConfigurationException(f"Unable to read {source_path}.")
-
-
-def create_destination_and_install(source: Source, resource_path: Path) -> None:
- """Create destination directory and install source.
-
- This function is used for actual installation of system/backend New
- directory will be created inside :resource_path: if needed If for example
- archive contains top level folder then no need to create new directory
- """
- destination = resource_path
- create_destination = source.create_destination()
-
- if create_destination:
- name = source.name()
- if not name:
- raise ConfigurationException("Unable to get source name.")
-
- destination = resource_path / name
- destination.mkdir()
- try:
- source.install_into(destination)
- except Exception as error:
- if create_destination:
- shutil.rmtree(destination)
- raise error
diff --git a/src/mlia/backend/executor/system.py b/src/mlia/backend/executor/system.py
deleted file mode 100644
index a5ecf19..0000000
--- a/src/mlia/backend/executor/system.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-"""System backend module."""
-from __future__ import annotations
-
-from pathlib import Path
-from typing import Any
-from typing import cast
-from typing import List
-
-from mlia.backend.executor.common import Backend
-from mlia.backend.executor.common import ConfigurationException
-from mlia.backend.executor.common import get_backend_configs
-from mlia.backend.executor.common import get_backend_directories
-from mlia.backend.executor.common import load_config
-from mlia.backend.executor.common import remove_backend
-from mlia.backend.executor.config import SystemConfig
-from mlia.backend.executor.fs import get_backends_path
-from mlia.backend.executor.proc import run_and_wait
-from mlia.backend.executor.source import create_destination_and_install
-from mlia.backend.executor.source import get_source
-
-
-class System(Backend):
- """System class."""
-
- def __init__(self, config: SystemConfig) -> None:
- """Construct the System class using the dictionary passed."""
- super().__init__(config)
-
- self._setup_reporting(config)
-
- def _setup_reporting(self, config: SystemConfig) -> None:
- self.reporting = config.get("reporting")
-
- def run(self, command: str) -> tuple[int, bytearray, bytearray]:
- """
- Run command on the system.
-
- Returns a tuple: (exit_code, stdout, stderr)
- """
- cwd = self.config_location
- if not isinstance(cwd, Path) or not cwd.is_dir():
- raise ConfigurationException(
- f"System has invalid config location: {cwd}",
- )
-
- stdout = bytearray()
- stderr = bytearray()
-
- return run_and_wait(
- command,
- cwd=cwd,
- terminate_on_error=True,
- out=stdout,
- err=stderr,
- )
-
- def __eq__(self, other: object) -> bool:
- """Overload operator ==."""
- if not isinstance(other, System):
- return False
-
- return super().__eq__(other) and self.name == other.name
-
- def get_details(self) -> dict[str, Any]:
- """Return a dictionary with all relevant information of a System."""
- output = {
- "type": "system",
- "name": self.name,
- "description": self.description,
- "commands": self._get_command_details(),
- "annotations": self.annotations,
- }
-
- return output
-
-
-def get_available_systems_directory_names() -> list[str]:
- """Return a list of directory names for all avialable systems."""
- return [entry.name for entry in get_backend_directories("systems")]
-
-
-def get_available_systems() -> list[System]:
- """Return a list with all available systems."""
- available_systems = []
- for config_json in get_backend_configs("systems"):
- config_entries = cast(List[SystemConfig], (load_config(config_json)))
- for config_entry in config_entries:
- config_entry["config_location"] = config_json.parent.absolute()
- system = load_system(config_entry)
- available_systems.append(system)
-
- return sorted(available_systems, key=lambda system: system.name)
-
-
-def get_system(system_name: str) -> System:
- """Return a system instance with the same name passed as argument."""
- available_systems = get_available_systems()
- for system in available_systems:
- if system_name == system.name:
- return system
- raise ConfigurationException(f"System '{system_name}' not found.")
-
-
-def install_system(source_path: Path) -> None:
- """Install new system."""
- try:
- source = get_source(source_path)
- config = cast(List[SystemConfig], source.config())
- systems_to_install = [load_system(entry) for entry in config]
- except Exception as error:
- raise ConfigurationException("Unable to read system definition") from error
-
- if not systems_to_install:
- raise ConfigurationException("No system definition found")
-
- available_systems = get_available_systems()
- already_installed = [s for s in systems_to_install if s in available_systems]
- if already_installed:
- names = [system.name for system in already_installed]
- raise ConfigurationException(
- f"Systems [{','.join(names)}] are already installed."
- )
-
- create_destination_and_install(source, get_backends_path("systems"))
-
-
-def remove_system(directory_name: str) -> None:
- """Remove system."""
- remove_backend(directory_name, "systems")
-
-
-def load_system(config: SystemConfig) -> System:
- """Load system based on it's execution type."""
- populate_shared_params(config)
-
- return System(config)
-
-
-def populate_shared_params(config: SystemConfig) -> None:
- """Populate command parameters with shared parameters."""
- user_params = config.get("user_params")
- if not user_params or "shared" not in user_params:
- return
-
- shared_user_params = user_params["shared"]
- if not shared_user_params:
- return
-
- only_aliases = all(p.get("alias") for p in shared_user_params)
- if not only_aliases:
- raise ConfigurationException("All shared parameters should have aliases")
-
- commands = config.get("commands", {})
- for cmd_name in ["run"]:
- command = commands.get(cmd_name)
- if command is None:
- commands[cmd_name] = []
- cmd_user_params = user_params.get(cmd_name)
- if not cmd_user_params:
- cmd_user_params = shared_user_params
- else:
- only_aliases = all(p.get("alias") for p in cmd_user_params)
- if not only_aliases:
- raise ConfigurationException(
- f"All parameters for command {cmd_name} should have aliases."
- )
- merged_by_alias = {
- **{p.get("alias"): p for p in shared_user_params},
- **{p.get("alias"): p for p in cmd_user_params},
- }
- cmd_user_params = list(merged_by_alias.values())
-
- user_params[cmd_name] = cmd_user_params
-
- config["commands"] = commands
- del user_params["shared"]
diff --git a/src/mlia/backend/install.py b/src/mlia/backend/install.py
index 37a277b..c76e3e2 100644
--- a/src/mlia/backend/install.py
+++ b/src/mlia/backend/install.py
@@ -11,17 +11,12 @@ from abc import abstractmethod
from dataclasses import dataclass
from pathlib import Path
from typing import Callable
-from typing import Iterable
from typing import Optional
from typing import Union
-from mlia.backend.executor.runner import BackendRunner
-from mlia.backend.executor.system import remove_system
+from mlia.backend.repo import get_backend_repository
from mlia.utils.download import DownloadArtifact
from mlia.utils.filesystem import all_files_exist
-from mlia.utils.filesystem import all_paths_valid
-from mlia.utils.filesystem import copy_all
-from mlia.utils.filesystem import get_mlia_resources
from mlia.utils.filesystem import temp_directory
from mlia.utils.filesystem import working_directory
from mlia.utils.py_manager import get_package_manager
@@ -29,52 +24,6 @@ from mlia.utils.py_manager import get_package_manager
logger = logging.getLogger(__name__)
-# Mapping backend -> device_type -> system_name
-_SUPPORTED_SYSTEMS = {
- "Corstone-300": {
- "Ethos-U55": "Corstone-300: Cortex-M55+Ethos-U55",
- "Ethos-U65": "Corstone-300: Cortex-M55+Ethos-U65",
- "ethos-u55": "Corstone-300: Cortex-M55+Ethos-U55",
- "ethos-u65": "Corstone-300: Cortex-M55+Ethos-U65",
- },
- "Corstone-310": {
- "Ethos-U55": "Corstone-310: Cortex-M85+Ethos-U55",
- "Ethos-U65": "Corstone-310: Cortex-M85+Ethos-U65",
- "ethos-u55": "Corstone-310: Cortex-M85+Ethos-U55",
- "ethos-u65": "Corstone-310: Cortex-M85+Ethos-U65",
- },
-}
-
-# Mapping system_name -> application
-_SYSTEM_TO_APP_MAP = {
- "Corstone-300: Cortex-M55+Ethos-U55": "Generic Inference Runner: Ethos-U55",
- "Corstone-300: Cortex-M55+Ethos-U65": "Generic Inference Runner: Ethos-U65",
- "Corstone-310: Cortex-M85+Ethos-U55": "Generic Inference Runner: Ethos-U55",
- "Corstone-310: Cortex-M85+Ethos-U65": "Generic Inference Runner: Ethos-U65",
-}
-
-
-def get_system_name(backend: str, device_type: str) -> str:
- """Get the system name for the given backend and device type."""
- return _SUPPORTED_SYSTEMS[backend][device_type]
-
-
-def get_application_name(system_name: str) -> str:
- """Get application name for the provided system name."""
- return _SYSTEM_TO_APP_MAP[system_name]
-
-
-def get_all_system_names(backend: str) -> list[str]:
- """Get all systems supported by the backend."""
- return list(_SUPPORTED_SYSTEMS.get(backend, {}).values())
-
-
-def get_all_application_names(backend: str) -> list[str]:
- """Get all applications supported by the backend."""
- app_set = {_SYSTEM_TO_APP_MAP[sys] for sys in get_all_system_names(backend)}
- return list(app_set)
-
-
@dataclass
class InstallFromPath:
"""Installation from the local path."""
@@ -95,29 +44,24 @@ InstallationType = Union[InstallFromPath, DownloadAndInstall]
class Installation(ABC):
"""Base class for the installation process of the backends."""
- @property
- @abstractmethod
- def name(self) -> str:
- """Return name of the backend."""
-
- @property
- @abstractmethod
- def description(self) -> str:
- """Return description of the backend."""
+ def __init__(self, name: str, description: str) -> None:
+ """Init the installation."""
+ self.name = name
+ self.description = description
@property
@abstractmethod
def could_be_installed(self) -> bool:
- """Return true if backend could be installed in current environment."""
+ """Check if backend could be installed in current environment."""
@property
@abstractmethod
def already_installed(self) -> bool:
- """Return true if backend is already installed."""
+ """Check if backend is already installed."""
@abstractmethod
def supports(self, install_type: InstallationType) -> bool:
- """Return true if installation supports requested installation type."""
+ """Check if installation supports requested installation type."""
@abstractmethod
def install(self, install_type: InstallationType) -> None:
@@ -134,103 +78,53 @@ class BackendInfo:
backend_path: Path
copy_source: bool = True
- system_config: str | None = None
+ settings: dict | None = None
PathChecker = Callable[[Path], Optional[BackendInfo]]
BackendInstaller = Callable[[bool, Path], Path]
-class BackendMetadata:
- """Backend installation metadata."""
+class BackendInstallation(Installation):
+ """Backend installation."""
def __init__(
self,
name: str,
description: str,
- system_config: str,
- apps_resources: list[str],
fvp_dir_name: str,
download_artifact: DownloadArtifact | None,
- supported_platforms: list[str] | None = None,
+ supported_platforms: list[str] | None,
+ path_checker: PathChecker,
+ backend_installer: BackendInstaller | None,
) -> None:
- """
- Initialize BackendMetadata.
+ """Init the backend installation."""
+ super().__init__(name, description)
- Members expected_systems and expected_apps are filled automatically.
- """
- self.name = name
- self.description = description
- self.system_config = system_config
- self.apps_resources = apps_resources
self.fvp_dir_name = fvp_dir_name
self.download_artifact = download_artifact
self.supported_platforms = supported_platforms
-
- self.expected_systems = get_all_system_names(name)
- self.expected_apps = get_all_application_names(name)
-
- @property
- def expected_resources(self) -> Iterable[Path]:
- """Return list of expected resources."""
- resources = [self.system_config, *self.apps_resources]
-
- return (get_mlia_resources() / resource for resource in resources)
-
- @property
- def supported_platform(self) -> bool:
- """Return true if current platform supported."""
- if not self.supported_platforms:
- return True
-
- return platform.system() in self.supported_platforms
-
-
-class BackendInstallation(Installation):
- """Backend installation."""
-
- def __init__(
- self,
- backend_runner: BackendRunner,
- metadata: BackendMetadata,
- path_checker: PathChecker,
- backend_installer: BackendInstaller | None,
- ) -> None:
- """Init the backend installation."""
- self.backend_runner = backend_runner
- self.metadata = metadata
self.path_checker = path_checker
self.backend_installer = backend_installer
@property
- def name(self) -> str:
- """Return name of the backend."""
- return self.metadata.name
-
- @property
- def description(self) -> str:
- """Return description of the backend."""
- return self.metadata.description
-
- @property
def already_installed(self) -> bool:
"""Return true if backend already installed."""
- return self.backend_runner.all_installed(
- self.metadata.expected_systems, self.metadata.expected_apps
- )
+ backend_repo = get_backend_repository()
+ return backend_repo.is_backend_installed(self.name)
@property
def could_be_installed(self) -> bool:
"""Return true if backend could be installed."""
- if not self.metadata.supported_platform:
- return False
-
- return all_paths_valid(self.metadata.expected_resources)
+ return (
+ not self.supported_platforms
+ or platform.system() in self.supported_platforms
+ )
def supports(self, install_type: InstallationType) -> bool:
"""Return true if backends supported type of the installation."""
if isinstance(install_type, DownloadAndInstall):
- return self.metadata.download_artifact is not None
+ return self.download_artifact is not None
if isinstance(install_type, InstallFromPath):
return self.path_checker(install_type.backend_path) is not None
@@ -240,41 +134,38 @@ class BackendInstallation(Installation):
def install(self, install_type: InstallationType) -> None:
"""Install the backend."""
if isinstance(install_type, DownloadAndInstall):
- download_artifact = self.metadata.download_artifact
- assert download_artifact is not None, "No artifact provided"
+ assert self.download_artifact is not None, "No artifact provided"
- self.download_and_install(download_artifact, install_type.eula_agreement)
+ self._download_and_install(
+ self.download_artifact, install_type.eula_agreement
+ )
elif isinstance(install_type, InstallFromPath):
- backend_path = self.path_checker(install_type.backend_path)
- assert backend_path is not None, "Unable to resolve backend path"
+ backend_info = self.path_checker(install_type.backend_path)
- self.install_from(backend_path)
+ assert backend_info is not None, "Unable to resolve backend path"
+ self._install_from(backend_info)
else:
raise Exception(f"Unable to install {install_type}")
- def install_from(self, backend_info: BackendInfo) -> None:
+ def _install_from(self, backend_info: BackendInfo) -> None:
"""Install backend from the directory."""
- mlia_resources = get_mlia_resources()
-
- with temp_directory() as tmpdir:
- fvp_dist_dir = tmpdir / self.metadata.fvp_dir_name
-
- system_config = self.metadata.system_config
- if backend_info.system_config:
- system_config = backend_info.system_config
-
- resources_to_copy = [mlia_resources / system_config]
- if backend_info.copy_source:
- resources_to_copy.append(backend_info.backend_path)
-
- copy_all(*resources_to_copy, dest=fvp_dist_dir)
-
- self.backend_runner.install_system(fvp_dist_dir)
-
- for app in self.metadata.apps_resources:
- self.backend_runner.install_application(mlia_resources / app)
+ backend_repo = get_backend_repository()
+
+ if backend_info.copy_source:
+ backend_repo.copy_backend(
+ self.name,
+ backend_info.backend_path,
+ self.fvp_dir_name,
+ backend_info.settings,
+ )
+ else:
+ backend_repo.add_backend(
+ self.name,
+ backend_info.backend_path,
+ backend_info.settings,
+ )
- def download_and_install(
+ def _download_and_install(
self, download_artifact: DownloadArtifact, eula_agrement: bool
) -> None:
"""Download and install the backend."""
@@ -288,11 +179,10 @@ class BackendInstallation(Installation):
with tarfile.open(downloaded_to) as archive:
archive.extractall(dist_dir)
- assert self.backend_installer, (
- f"Backend '{self.metadata.name}' does not support "
- "download and installation."
- )
- backend_path = self.backend_installer(eula_agrement, dist_dir)
+ backend_path = dist_dir
+ if self.backend_installer:
+ backend_path = self.backend_installer(eula_agrement, dist_dir)
+
if self.path_checker(backend_path) is None:
raise Exception("Downloaded artifact has invalid structure")
@@ -300,18 +190,23 @@ class BackendInstallation(Installation):
def uninstall(self) -> None:
"""Uninstall the backend."""
- remove_system(self.metadata.fvp_dir_name)
+ backend_repo = get_backend_repository()
+ backend_repo.remove_backend(self.name)
class PackagePathChecker:
"""Package path checker."""
def __init__(
- self, expected_files: list[str], backend_subfolder: str | None = None
+ self,
+ expected_files: list[str],
+ backend_subfolder: str | None = None,
+ settings: dict = None,
) -> None:
"""Init the path checker."""
self.expected_files = expected_files
self.backend_subfolder = backend_subfolder
+ self.settings = settings
def __call__(self, backend_path: Path) -> BackendInfo | None:
"""Check if directory contains all expected files."""
@@ -319,15 +214,14 @@ class PackagePathChecker:
if not all_files_exist(resolved_paths):
return None
+ actual_backend_path = backend_path
if self.backend_subfolder:
subfolder = backend_path / self.backend_subfolder
- if not subfolder.is_dir():
- return None
+ if subfolder.is_dir():
+ actual_backend_path = subfolder
- return BackendInfo(subfolder)
-
- return BackendInfo(backend_path)
+ return BackendInfo(actual_backend_path, settings=self.settings)
class StaticPathChecker:
@@ -338,13 +232,13 @@ class StaticPathChecker:
static_backend_path: Path,
expected_files: list[str],
copy_source: bool = False,
- system_config: str | None = None,
+ settings: dict | None = None,
) -> None:
"""Init static path checker."""
self.static_backend_path = static_backend_path
self.expected_files = expected_files
self.copy_source = copy_source
- self.system_config = system_config
+ self.settings = settings
def __call__(self, backend_path: Path) -> BackendInfo | None:
"""Check if directory equals static backend path with all expected files."""
@@ -358,7 +252,7 @@ class StaticPathChecker:
return BackendInfo(
backend_path,
copy_source=self.copy_source,
- system_config=self.system_config,
+ settings=self.settings,
)
@@ -392,8 +286,8 @@ class PyPackageBackendInstallation(Installation):
expected_packages: list[str],
) -> None:
"""Init the backend installation."""
- self._name = name
- self._description = description
+ super().__init__(name, description)
+
self._packages_to_install = packages_to_install
self._packages_to_uninstall = packages_to_uninstall
self._expected_packages = expected_packages
@@ -401,16 +295,6 @@ class PyPackageBackendInstallation(Installation):
self.package_manager = get_package_manager()
@property
- def name(self) -> str:
- """Return name of the backend."""
- return self._name
-
- @property
- def description(self) -> str:
- """Return description of the backend."""
- return self._description
-
- @property
def could_be_installed(self) -> bool:
"""Check if backend could be installed."""
return True
diff --git a/src/mlia/backend/manager.py b/src/mlia/backend/manager.py
index c02dc6e..b0fa919 100644
--- a/src/mlia/backend/manager.py
+++ b/src/mlia/backend/manager.py
@@ -1,4 +1,4 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Module for installation process."""
from __future__ import annotations
@@ -9,10 +9,12 @@ from abc import abstractmethod
from pathlib import Path
from typing import Callable
+from mlia.backend.corstone.install import get_corstone_installations
from mlia.backend.install import DownloadAndInstall
from mlia.backend.install import Installation
from mlia.backend.install import InstallationType
from mlia.backend.install import InstallFromPath
+from mlia.backend.tosa_checker.install import get_tosa_backend_installation
from mlia.core.errors import ConfigurationError
from mlia.core.errors import InternalError
from mlia.utils.misc import yes
@@ -269,3 +271,11 @@ class DefaultInstallationManager(InstallationManager, InstallationFiltersMixin):
installations = self.already_installed(backend_name)
return len(installations) == 1
+
+
+def get_installation_manager(noninteractive: bool = False) -> InstallationManager:
+ """Return installation manager."""
+ backends = get_corstone_installations()
+ backends.append(get_tosa_backend_installation())
+
+ return DefaultInstallationManager(backends, noninteractive=noninteractive)
diff --git a/src/mlia/backend/repo.py b/src/mlia/backend/repo.py
new file mode 100644
index 0000000..3dd2e57
--- /dev/null
+++ b/src/mlia/backend/repo.py
@@ -0,0 +1,190 @@
+# SPDX-FileCopyrightText: Copyright 2023, Arm Limited and/or its affiliates.
+# SPDX-License-Identifier: Apache-2.0
+"""Module for backend repository.
+
+Backend repository is responsible for managing backends
+(apart from python package based) that have been installed
+via command "mlia-backend".
+
+Repository has associated directory (by default ~/.mlia) and
+configuration file (by default ~/.mlia/mlia_config.json). In
+configuration file repository keeps track of installed backends
+and their settings. Backend settings could be used by MLIA for
+correct instantiation of the backend.
+
+If backend is removed then repository removes corresponding record
+from configuration file along with backend files if needed.
+"""
+from __future__ import annotations
+
+import json
+import shutil
+from pathlib import Path
+
+from mlia.utils.filesystem import copy_all
+
+
+class _ConfigFile:
+ """Configuration file for backend repository."""
+
+ def __init__(self, config_file: Path) -> None:
+ """Init configuration file."""
+ self.config_file = config_file
+ self.config: dict = {"backends": []}
+
+ if self.exists():
+ content = self.config_file.read_text()
+ self.config = json.loads(content)
+
+ def exists(self) -> bool:
+ """Check if configuration file exists."""
+ return self.config_file.is_file()
+
+ def save(self) -> None:
+ """Save configuration."""
+ content = json.dumps(self.config, indent=4)
+ self.config_file.write_text(content)
+
+ def add_backend(
+ self,
+ backend_name: str,
+ settings: dict,
+ ) -> None:
+ """Add backend settings to configuration file."""
+ item = {"name": backend_name, "settings": settings}
+ self.config["backends"].append(item)
+
+ self.save()
+
+ def remove_backend(self, backend_name: str) -> None:
+ """Remove backend settings."""
+ backend = self._get_backend(backend_name)
+
+ if backend:
+ self.config["backends"].remove(backend)
+
+ self.save()
+
+ def backend_exists(self, backend_name: str) -> bool:
+ """Check if backend exists in configuration file."""
+ return self._get_backend(backend_name) is not None
+
+ def _get_backend(self, backend_name: str) -> dict | None:
+ """Find backend settings by backend name."""
+ find_backend = (
+ item for item in self.config["backends"] if item["name"] == backend_name
+ )
+
+ return next(find_backend, None)
+
+ def get_backend_settings(self, backend_name: str) -> dict | None:
+ """Get backend settings."""
+ backend = self._get_backend(backend_name)
+
+ return backend["settings"] if backend else None
+
+
+class BackendRepository:
+ """Repository for keeping track of the installed backends."""
+
+ def __init__(
+ self,
+ repository: Path,
+ config_filename: str = "mlia_config.json",
+ ) -> None:
+ """Init repository instance."""
+ self.repository = repository
+ self.config_file = _ConfigFile(repository / config_filename)
+
+ self._init_repo()
+
+ def copy_backend(
+ self,
+ backend_name: str,
+ backend_path: Path,
+ backend_dir_name: str,
+ settings: dict | None = None,
+ ) -> None:
+ """Copy backend files into repository."""
+ repo_backend_path = self._get_backend_path(backend_dir_name)
+
+ if repo_backend_path.exists():
+ raise Exception(f"Unable to copy backend files for {backend_name}.")
+
+ copy_all(backend_path, dest=repo_backend_path)
+
+ settings = settings or {}
+ settings["backend_dir"] = backend_dir_name
+
+ self.config_file.add_backend(backend_name, settings)
+
+ def add_backend(
+ self,
+ backend_name: str,
+ backend_path: Path,
+ settings: dict | None = None,
+ ) -> None:
+ """Add backend to repository."""
+ if self.is_backend_installed(backend_name):
+ raise Exception(f"Backend {backend_name} already installed.")
+
+ settings = settings or {}
+ settings["backend_path"] = backend_path.absolute().as_posix()
+
+ self.config_file.add_backend(backend_name, settings)
+
+ def remove_backend(self, backend_name: str) -> None:
+ """Remove backend from repository."""
+ settings = self.config_file.get_backend_settings(backend_name)
+
+ if not settings:
+ raise Exception(f"Backend {backend_name} is not installed.")
+
+ if "backend_dir" in settings:
+ repo_backend_path = self._get_backend_path(settings["backend_dir"])
+ shutil.rmtree(repo_backend_path)
+
+ self.config_file.remove_backend(backend_name)
+
+ def is_backend_installed(self, backend_name: str) -> bool:
+ """Check if backend is installed."""
+ return self.config_file.backend_exists(backend_name)
+
+ def get_backend_settings(self, backend_name: str) -> tuple[Path, dict]:
+ """Return backend settings."""
+ settings = self.config_file.get_backend_settings(backend_name)
+
+ if not settings:
+ raise Exception(f"Backend {backend_name} is not installed.")
+
+ if backend_dir := settings.get("backend_dir", None):
+ return self._get_backend_path(backend_dir), settings
+
+ if backend_path := settings.get("backend_path", None):
+ return Path(backend_path), settings
+
+ raise Exception(f"Unable to resolve path of the backend {backend_name}.")
+
+ def _get_backend_path(self, backend_dir_name: str) -> Path:
+ """Return path to backend."""
+ return self.repository.joinpath("backends", backend_dir_name)
+
+ def _init_repo(self) -> None:
+ """Init repository."""
+ if self.repository.exists():
+ if not self.config_file.exists():
+ raise Exception(
+ f"Directory {self.repository} could not be used as MLIA repository."
+ )
+ else:
+ self.repository.mkdir()
+ self.repository.joinpath("backends").mkdir()
+
+ self.config_file.save()
+
+
+def get_backend_repository(
+ repository: Path = Path.home() / ".mlia",
+) -> BackendRepository:
+ """Return backend repository."""
+ return BackendRepository(repository)
diff --git a/src/mlia/backend/tosa_checker/__init__.py b/src/mlia/backend/tosa_checker/__init__.py
index c06a122..e11034f 100644
--- a/src/mlia/backend/tosa_checker/__init__.py
+++ b/src/mlia/backend/tosa_checker/__init__.py
@@ -8,7 +8,7 @@ from mlia.backend.registry import registry
from mlia.core.common import AdviceCategory
registry.register(
- "TOSA-Checker",
+ "tosa-checker",
BackendConfiguration(
supported_advice=[AdviceCategory.COMPATIBILITY],
supported_systems=[System.LINUX_AMD64],
diff --git a/src/mlia/cli/command_validators.py b/src/mlia/cli/command_validators.py
index 8eb966b..23101e0 100644
--- a/src/mlia/cli/command_validators.py
+++ b/src/mlia/cli/command_validators.py
@@ -23,18 +23,12 @@ def validate_backend(
compatible with each other.
It assumes that prior checks where made on the validity of the target-profile.
"""
- target_map = {
- "ethos-u55": "Ethos-U55",
- "ethos-u65": "Ethos-U65",
- "cortex-a": "Cortex-A",
- "tosa": "TOSA",
- }
target = get_target(target_profile)
if not backend:
return get_default_backends_dict()[target]
- compatible_backends = supported_backends(target_map[target])
+ compatible_backends = supported_backends(target)
nor_backend = list(map(normalize_string, backend))
nor_compat_backend = list(map(normalize_string, compatible_backends))
diff --git a/src/mlia/cli/config.py b/src/mlia/cli/config.py
index 0dac3e8..433300c 100644
--- a/src/mlia/cli/config.py
+++ b/src/mlia/cli/config.py
@@ -4,16 +4,9 @@
from __future__ import annotations
import logging
-from functools import lru_cache
-from typing import List
-from typing import Optional
-from typing import TypedDict
-from mlia.backend.corstone.install import get_corstone_installations
-from mlia.backend.manager import DefaultInstallationManager
-from mlia.backend.manager import InstallationManager
-from mlia.backend.registry import get_supported_backends
-from mlia.backend.tosa_checker.install import get_tosa_backend_installation
+from mlia.backend.manager import get_installation_manager
+from mlia.target.registry import all_supported_backends
logger = logging.getLogger(__name__)
@@ -21,31 +14,24 @@ DEFAULT_PRUNING_TARGET = 0.5
DEFAULT_CLUSTERING_TARGET = 32
-def get_installation_manager(noninteractive: bool = False) -> InstallationManager:
- """Return installation manager."""
- backends = get_corstone_installations()
- backends.append(get_tosa_backend_installation())
-
- return DefaultInstallationManager(backends, noninteractive=noninteractive)
-
-
-@lru_cache
def get_available_backends() -> list[str]:
"""Return list of the available backends."""
+ available_backends = ["Vela", "ArmNNTFLiteDelegate"]
+
# Add backends using backend manager
manager = get_installation_manager()
- available_backends = [
+ available_backends.extend(
backend
- for backend in get_supported_backends()
+ for backend in all_supported_backends()
if manager.backend_installed(backend)
- ]
+ )
return available_backends
# List of mutually exclusive Corstone backends ordered by priority
_CORSTONE_EXCLUSIVE_PRIORITY = ("Corstone-310", "Corstone-300")
-_NON_ETHOS_U_BACKENDS = ("TOSA-Checker", "ArmNNTFLiteDelegate")
+_NON_ETHOS_U_BACKENDS = ("tosa-checker", "ArmNNTFLiteDelegate")
def get_ethos_u_default_backends(backends: list[str]) -> list[str]:
@@ -70,29 +56,14 @@ def get_default_backends() -> list[str]:
return backends
-def is_corstone_backend(backend: str) -> bool:
- """Check if the given backend is a Corstone backend."""
- return backend in _CORSTONE_EXCLUSIVE_PRIORITY
-
-
-BackendCompatibility = TypedDict(
- "BackendCompatibility",
- {
- "partial-match": bool,
- "backends": List[str],
- "default-return": Optional[List[str]],
- "use-custom-return": bool,
- "custom-return": Optional[List[str]],
- },
-)
-
-
def get_default_backends_dict() -> dict[str, list[str]]:
"""Return default backends for all targets."""
- ethos_u_defaults = get_ethos_u_default_backends(get_default_backends())
+ default_backends = get_default_backends()
+ ethos_u_defaults = get_ethos_u_default_backends(default_backends)
+
return {
"ethos-u55": ethos_u_defaults,
"ethos-u65": ethos_u_defaults,
"tosa": ["tosa-checker"],
- "cortex-a": ["armnn-tflitedelegate"],
+ "cortex-a": ["ArmNNTFLiteDelegate"],
}
diff --git a/src/mlia/cli/main.py b/src/mlia/cli/main.py
index 4a91b08..76f199e 100644
--- a/src/mlia/cli/main.py
+++ b/src/mlia/cli/main.py
@@ -74,7 +74,7 @@ def get_commands() -> list[CommandInfo]:
partial(add_target_options, profiles_to_skip=["tosa", "cortex-a"]),
partial(
add_backend_options,
- backends_to_skip=["tosa-checker", "armnn-tflitedelegate"],
+ backends_to_skip=["tosa-checker", "ArmNNTFLiteDelegate"],
),
add_multi_optimization_options,
add_output_options,
diff --git a/src/mlia/cli/options.py b/src/mlia/cli/options.py
index dac8c82..421533a 100644
--- a/src/mlia/cli/options.py
+++ b/src/mlia/cli/options.py
@@ -8,10 +8,10 @@ from pathlib import Path
from typing import Any
from typing import Callable
+from mlia.backend.corstone import is_corstone_backend
from mlia.cli.config import DEFAULT_CLUSTERING_TARGET
from mlia.cli.config import DEFAULT_PRUNING_TARGET
from mlia.cli.config import get_available_backends
-from mlia.cli.config import is_corstone_backend
from mlia.core.typing import OutputFormat
from mlia.target.config import get_builtin_supported_profile_names
@@ -47,12 +47,12 @@ def add_target_options(
"--target-profile",
required=required,
default=default_target_profile,
- help="Builtin target profile: {target_profiles}"
- "or path to custom target profile"
+ help="Built-in target profile or path to the custom target profile. "
+ f"Built-in target profiles are {', '.join(target_profiles)}. "
"Target profile that will set the target options "
"such as target, mac value, memory mode, etc. "
"For the values associated with each target profile "
- "please refer to the documentation.",
+ "please refer to the documentation. ",
)
diff --git a/src/mlia/resources/backend_configs/systems/SYSTEMS.txt b/src/mlia/resources/backend_configs/systems/SYSTEMS.txt
deleted file mode 100644
index 3861769..0000000
--- a/src/mlia/resources/backend_configs/systems/SYSTEMS.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-SPDX-License-Identifier: Apache-2.0
-
-This directory contains the configuration files of the system backends.
-
-Supported systems:
-
- * FVP Corstone-300 Ecosystem
- * FVP Corstone-310 Ecosystem
diff --git a/src/mlia/resources/backend_configs/systems/corstone-300-vht/backend-config.json b/src/mlia/resources/backend_configs/systems/corstone-300-vht/backend-config.json
deleted file mode 100644
index 7bc12c7..0000000
--- a/src/mlia/resources/backend_configs/systems/corstone-300-vht/backend-config.json
+++ /dev/null
@@ -1,72 +0,0 @@
-[
- {
- "name": "Corstone-300: Cortex-M55+Ethos-U55",
- "description": "Cortex-M55 and Ethos-U55 functional model implementations based on Corstone-300 design for MPS3 board.",
- "annotations": {
- "ip_class": "Ethos-U55",
- "sim_type": "FM",
- "variant": "Cortex-M55+Ethos-U55"
- },
- "commands": {
- "run": [
- "/opt/VHT/VHT_Corstone_SSE-300_Ethos-U55 -a {software.variables:eval_app} {user_params:input_file}@0x90000000 -C {user_params:mac} -C mps3_board.telnetterminal0.start_telnet=0 -C mps3_board.uart0.out_file='-' -C mps3_board.uart0.shutdown_on_eot=1 -C mps3_board.visualisation.disable-visualisation=1 --stat"
- ]
- },
- "user_params": {
- "run": [
- {
- "name": "--data",
- "description": "Full file name for a custom model. Model must be in TensorFlow Lite format compiled with Vela.",
- "values": [],
- "alias": "input_file"
- },
- {
- "name": "ethosu.num_macs=",
- "description": "Arm Ethos-U55 configuration - the number represents MACs per cycle.",
- "values": [
- "32",
- "64",
- "128",
- "256"
- ],
- "default_value": "256",
- "alias": "mac"
- }
- ]
- }
- },
- {
- "name": "Corstone-300: Cortex-M55+Ethos-U65",
- "description": "Cortex-M55 and Ethos-U65 functional model implementations based on Corstone-300 design for MPS3 board.",
- "annotations": {
- "ip_class": "Ethos-U65",
- "sim_type": "FM",
- "variant": "Cortex-M55+Ethos-U65"
- },
- "commands": {
- "run": [
- "/opt/VHT/VHT_Corstone_SSE-300_Ethos-U65 -a {software.variables:eval_app} {user_params:input_file}@0x90000000 -C {user_params:mac} -C mps3_board.telnetterminal0.start_telnet=0 -C mps3_board.uart0.out_file='-' -C mps3_board.uart0.shutdown_on_eot=1 -C mps3_board.visualisation.disable-visualisation=1 --stat"
- ]
- },
- "user_params": {
- "run": [
- {
- "name": "--data",
- "description": "Full file name for a custom model. Model must be in TensorFlow Lite format compiled with Vela.",
- "values": [],
- "alias": "input_file"
- },
- {
- "name": "ethosu.num_macs=",
- "description": "Arm Ethos-U65 configuration - the number represents MACs per cycle.",
- "values": [
- "256",
- "512"
- ],
- "default_value": "512",
- "alias": "mac"
- }
- ]
- }
- }
-]
diff --git a/src/mlia/resources/backend_configs/systems/corstone-300-vht/backend-config.json.license b/src/mlia/resources/backend_configs/systems/corstone-300-vht/backend-config.json.license
deleted file mode 100644
index 9b83bfc..0000000
--- a/src/mlia/resources/backend_configs/systems/corstone-300-vht/backend-config.json.license
+++ /dev/null
@@ -1,3 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-
-SPDX-License-Identifier: Apache-2.0
diff --git a/src/mlia/resources/backend_configs/systems/corstone-300/backend-config.json b/src/mlia/resources/backend_configs/systems/corstone-300/backend-config.json
deleted file mode 100644
index c27c6f5..0000000
--- a/src/mlia/resources/backend_configs/systems/corstone-300/backend-config.json
+++ /dev/null
@@ -1,72 +0,0 @@
-[
- {
- "name": "Corstone-300: Cortex-M55+Ethos-U55",
- "description": "Cortex-M55 and Ethos-U55 functional model implementations based on Corstone-300 design for MPS3 board.",
- "annotations": {
- "ip_class": "Ethos-U55",
- "sim_type": "FM",
- "variant": "Cortex-M55+Ethos-U55"
- },
- "commands": {
- "run": [
- "FVP_Corstone_SSE-300_Ethos-U55 -a {software.variables:eval_app} {user_params:input_file}@0x90000000 -C {user_params:mac} -C mps3_board.telnetterminal0.start_telnet=0 -C mps3_board.uart0.out_file='-' -C mps3_board.uart0.shutdown_on_eot=1 -C mps3_board.visualisation.disable-visualisation=1 --stat"
- ]
- },
- "user_params": {
- "run": [
- {
- "name": "--data",
- "description": "Full file name for a custom model. Model must be in TensorFlow Lite format compiled with Vela.",
- "values": [],
- "alias": "input_file"
- },
- {
- "name": "ethosu.num_macs=",
- "description": "Arm Ethos-U55 configuration - the number represents MACs per cycle.",
- "values": [
- "32",
- "64",
- "128",
- "256"
- ],
- "default_value": "256",
- "alias": "mac"
- }
- ]
- }
- },
- {
- "name": "Corstone-300: Cortex-M55+Ethos-U65",
- "description": "Cortex-M55 and Ethos-U65 functional model implementations based on Corstone-300 design for MPS3 board.",
- "annotations": {
- "ip_class": "Ethos-U65",
- "sim_type": "FM",
- "variant": "Cortex-M55+Ethos-U65"
- },
- "commands": {
- "run": [
- "FVP_Corstone_SSE-300_Ethos-U65 -a {software.variables:eval_app} {user_params:input_file}@0x90000000 -C {user_params:mac} -C mps3_board.telnetterminal0.start_telnet=0 -C mps3_board.uart0.out_file='-' -C mps3_board.uart0.shutdown_on_eot=1 -C mps3_board.visualisation.disable-visualisation=1 --stat"
- ]
- },
- "user_params": {
- "run": [
- {
- "name": "--data",
- "description": "Full file name for a custom model. Model must be in TensorFlow Lite format compiled with Vela.",
- "values": [],
- "alias": "input_file"
- },
- {
- "name": "ethosu.num_macs=",
- "description": "Arm Ethos-U65 configuration - the number represents MACs per cycle.",
- "values": [
- "256",
- "512"
- ],
- "default_value": "512",
- "alias": "mac"
- }
- ]
- }
- }
-]
diff --git a/src/mlia/resources/backend_configs/systems/corstone-300/backend-config.json.license b/src/mlia/resources/backend_configs/systems/corstone-300/backend-config.json.license
deleted file mode 100644
index 9b83bfc..0000000
--- a/src/mlia/resources/backend_configs/systems/corstone-300/backend-config.json.license
+++ /dev/null
@@ -1,3 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-
-SPDX-License-Identifier: Apache-2.0
diff --git a/src/mlia/resources/backend_configs/systems/corstone-310-vht/backend-config.json b/src/mlia/resources/backend_configs/systems/corstone-310-vht/backend-config.json
deleted file mode 100644
index dcb105a..0000000
--- a/src/mlia/resources/backend_configs/systems/corstone-310-vht/backend-config.json
+++ /dev/null
@@ -1,72 +0,0 @@
-[
- {
- "name": "Corstone-310: Cortex-M85+Ethos-U55",
- "description": "Cortex-M85 and Ethos-U55 functional model implementations based on Corstone-310 design for MPS3 board.",
- "annotations": {
- "ip_class": "Ethos-U55",
- "sim_type": "FM",
- "variant": "Cortex-M85+Ethos-U55"
- },
- "commands": {
- "run": [
- "/opt/VHT/VHT_Corstone_SSE-310 -a {software.variables:eval_app} {user_params:input_file}@0x90000000 -C {user_params:mac} -C mps3_board.telnetterminal0.start_telnet=0 -C mps3_board.uart0.out_file='-' -C mps3_board.uart0.shutdown_on_eot=1 -C mps3_board.visualisation.disable-visualisation=1 --stat"
- ]
- },
- "user_params": {
- "run": [
- {
- "name": "--data",
- "description": "Full file name for a custom model. Model must be in TensorFlow Lite format compiled with Vela.",
- "values": [],
- "alias": "input_file"
- },
- {
- "name": "ethosu.num_macs=",
- "description": "Arm Ethos-U55 configuration - the number represents MACs per cycle.",
- "values": [
- "32",
- "64",
- "128",
- "256"
- ],
- "default_value": "256",
- "alias": "mac"
- }
- ]
- }
- },
- {
- "name": "Corstone-310: Cortex-M85+Ethos-U65",
- "description": "Cortex-M85 and Ethos-U65 functional model implementations based on Corstone-310 design for MPS3 board.",
- "annotations": {
- "ip_class": "Ethos-U65",
- "sim_type": "FM",
- "variant": "Cortex-M85+Ethos-U65"
- },
- "commands": {
- "run": [
- "/opt/VHT/VHT_Corstone_SSE-310_Ethos-U65 -a {software.variables:eval_app} {user_params:input_file}@0x90000000 -C {user_params:mac} -C mps3_board.telnetterminal0.start_telnet=0 -C mps3_board.uart0.out_file='-' -C mps3_board.uart0.shutdown_on_eot=1 -C mps3_board.visualisation.disable-visualisation=1 --stat"
- ]
- },
- "user_params": {
- "run": [
- {
- "name": "--data",
- "description": "Full file name for a custom model. Model must be in TensorFlow Lite format compiled with Vela.",
- "values": [],
- "alias": "input_file"
- },
- {
- "name": "ethosu.num_macs=",
- "description": "Arm Ethos-U65 configuration - the number represents MACs per cycle.",
- "values": [
- "256",
- "512"
- ],
- "default_value": "512",
- "alias": "mac"
- }
- ]
- }
- }
-]
diff --git a/src/mlia/resources/backend_configs/systems/corstone-310-vht/backend-config.json.license b/src/mlia/resources/backend_configs/systems/corstone-310-vht/backend-config.json.license
deleted file mode 100644
index 9b83bfc..0000000
--- a/src/mlia/resources/backend_configs/systems/corstone-310-vht/backend-config.json.license
+++ /dev/null
@@ -1,3 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-
-SPDX-License-Identifier: Apache-2.0
diff --git a/src/mlia/resources/backend_configs/systems/corstone-310/backend-config.json b/src/mlia/resources/backend_configs/systems/corstone-310/backend-config.json
deleted file mode 100644
index 6f4f89b..0000000
--- a/src/mlia/resources/backend_configs/systems/corstone-310/backend-config.json
+++ /dev/null
@@ -1,72 +0,0 @@
-[
- {
- "name": "Corstone-310: Cortex-M85+Ethos-U55",
- "description": "Cortex-M85 and Ethos-U55 functional model implementations based on Corstone-310 design for MPS3 board.",
- "annotations": {
- "ip_class": "Ethos-U55",
- "sim_type": "FM",
- "variant": "Cortex-M85+Ethos-U55"
- },
- "commands": {
- "run": [
- "FVP_Corstone_SSE-310 -a {software.variables:eval_app} {user_params:input_file}@0x90000000 -C {user_params:mac} -C mps3_board.telnetterminal0.start_telnet=0 -C mps3_board.uart0.out_file='-' -C mps3_board.uart0.shutdown_on_eot=1 -C mps3_board.visualisation.disable-visualisation=1 --stat"
- ]
- },
- "user_params": {
- "run": [
- {
- "name": "--data",
- "description": "Full file name for a custom model. Model must be in TensorFlow Lite format compiled with Vela.",
- "values": [],
- "alias": "input_file"
- },
- {
- "name": "ethosu.num_macs=",
- "description": "Arm Ethos-U55 configuration - the number represents MACs per cycle.",
- "values": [
- "32",
- "64",
- "128",
- "256"
- ],
- "default_value": "256",
- "alias": "mac"
- }
- ]
- }
- },
- {
- "name": "Corstone-310: Cortex-M85+Ethos-U65",
- "description": "Cortex-M85 and Ethos-U65 functional model implementations based on Corstone-310 design for MPS3 board.",
- "annotations": {
- "ip_class": "Ethos-U65",
- "sim_type": "FM",
- "variant": "Cortex-M85+Ethos-U65"
- },
- "commands": {
- "run": [
- "FVP_Corstone_SSE-310_Ethos-U65 -a {software.variables:eval_app} {user_params:input_file}@0x90000000 -C {user_params:mac} -C mps3_board.telnetterminal0.start_telnet=0 -C mps3_board.uart0.out_file='-' -C mps3_board.uart0.shutdown_on_eot=1 -C mps3_board.visualisation.disable-visualisation=1 --stat"
- ]
- },
- "user_params": {
- "run": [
- {
- "name": "--data",
- "description": "Full file name for a custom model. Model must be in TensorFlow Lite format compiled with Vela.",
- "values": [],
- "alias": "input_file"
- },
- {
- "name": "ethosu.num_macs=",
- "description": "Arm Ethos-U65 configuration - the number represents MACs per cycle.",
- "values": [
- "256",
- "512"
- ],
- "default_value": "512",
- "alias": "mac"
- }
- ]
- }
- }
-]
diff --git a/src/mlia/resources/backend_configs/systems/corstone-310/backend-config.json.license b/src/mlia/resources/backend_configs/systems/corstone-310/backend-config.json.license
deleted file mode 100644
index 9b83bfc..0000000
--- a/src/mlia/resources/backend_configs/systems/corstone-310/backend-config.json.license
+++ /dev/null
@@ -1,3 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-
-SPDX-License-Identifier: Apache-2.0
diff --git a/src/mlia/resources/backends/applications/APPLICATIONS.txt b/src/mlia/resources/backends/applications/APPLICATIONS.txt
index ca1003b..dd7ffdd 100644
--- a/src/mlia/resources/backends/applications/APPLICATIONS.txt
+++ b/src/mlia/resources/backends/applications/APPLICATIONS.txt
@@ -1,7 +1,5 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
SPDX-License-Identifier: Apache-2.0
This directory contains the application packages for the Generic Inference
Runner.
-
-Each package should contain its own backend-config.json file.
diff --git a/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U55-Default-noTA/backend-config.json b/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U55-Default-noTA/backend-config.json
deleted file mode 100644
index 4d8c928..0000000
--- a/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U55-Default-noTA/backend-config.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "name": "Generic Inference Runner: Ethos-U55",
- "description": "This application allows running inferences using custom NN TensorFlow Lite models on Ethos-U. No data pre-/post-processing is executed.",
- "supported_systems": [
- {
- "name": "Corstone-300: Cortex-M55+Ethos-U55"
- }
- ],
- "variables": {
- "eval_app": "{software.config_dir}/ethos-u-inference_runner.axf"
- }
- }
-]
diff --git a/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U55-Default-noTA/backend-config.json.license b/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U55-Default-noTA/backend-config.json.license
deleted file mode 100644
index 9b83bfc..0000000
--- a/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U55-Default-noTA/backend-config.json.license
+++ /dev/null
@@ -1,3 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-
-SPDX-License-Identifier: Apache-2.0
diff --git a/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U65-Default-noTA/backend-config.json b/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U65-Default-noTA/backend-config.json
deleted file mode 100644
index 22ba2d9..0000000
--- a/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U65-Default-noTA/backend-config.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "name": "Generic Inference Runner: Ethos-U65",
- "description": "This application allows running inferences using custom NN TensorFlow Lite models on Ethos-U. No data pre-/post-processing is executed.",
- "supported_systems": [
- {
- "name": "Corstone-300: Cortex-M55+Ethos-U65"
- }
- ],
- "variables": {
- "eval_app": "{software.config_dir}/ethos-u-inference_runner.axf"
- }
- }
-]
diff --git a/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U65-Default-noTA/backend-config.json.license b/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U65-Default-noTA/backend-config.json.license
deleted file mode 100644
index 9b83bfc..0000000
--- a/src/mlia/resources/backends/applications/inference_runner-sse-300-22.08.02-ethos-U65-Default-noTA/backend-config.json.license
+++ /dev/null
@@ -1,3 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-
-SPDX-License-Identifier: Apache-2.0
diff --git a/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U55-Default-noTA/backend-config.json b/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U55-Default-noTA/backend-config.json
deleted file mode 100644
index f7ee996..0000000
--- a/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U55-Default-noTA/backend-config.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "name": "Generic Inference Runner: Ethos-U55",
- "description": "This application allows running inferences using custom NN TensorFlow Lite models on Ethos-U. No data pre-/post-processing is executed.",
- "supported_systems": [
- {
- "name": "Corstone-310: Cortex-M85+Ethos-U55"
- }
- ],
- "variables": {
- "eval_app": "{software.config_dir}/ethos-u-inference_runner.axf"
- }
- }
-]
diff --git a/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U55-Default-noTA/backend-config.json.license b/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U55-Default-noTA/backend-config.json.license
deleted file mode 100644
index 9b83bfc..0000000
--- a/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U55-Default-noTA/backend-config.json.license
+++ /dev/null
@@ -1,3 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-
-SPDX-License-Identifier: Apache-2.0
diff --git a/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U65-Default-noTA/backend-config.json b/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U65-Default-noTA/backend-config.json
deleted file mode 100644
index 21d8239..0000000
--- a/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U65-Default-noTA/backend-config.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "name": "Generic Inference Runner: Ethos-U65",
- "description": "This application allows running inferences using custom NN TensorFlow Lite models on Ethos-U. No data pre-/post-processing is executed.",
- "supported_systems": [
- {
- "name": "Corstone-310: Cortex-M85+Ethos-U65"
- }
- ],
- "variables": {
- "eval_app": "{software.config_dir}/ethos-u-inference_runner.axf"
- }
- }
-]
diff --git a/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U65-Default-noTA/backend-config.json.license b/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U65-Default-noTA/backend-config.json.license
deleted file mode 100644
index 9b83bfc..0000000
--- a/src/mlia/resources/backends/applications/inference_runner-sse-310-22.08.02-ethos-U65-Default-noTA/backend-config.json.license
+++ /dev/null
@@ -1,3 +0,0 @@
-SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-
-SPDX-License-Identifier: Apache-2.0
diff --git a/src/mlia/resources/backends/systems/.gitignore b/src/mlia/resources/backends/systems/.gitignore
deleted file mode 100644
index 0226166..0000000
--- a/src/mlia/resources/backends/systems/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
-# SPDX-License-Identifier: Apache-2.0
-# Ignore everything in this directory
-*
-# Except this file
-!.gitignore
diff --git a/src/mlia/target/cortex_a/__init__.py b/src/mlia/target/cortex_a/__init__.py
index 9b0e611..f686bfc 100644
--- a/src/mlia/target/cortex_a/__init__.py
+++ b/src/mlia/target/cortex_a/__init__.py
@@ -1,7 +1,7 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Cortex-A target module."""
from mlia.target.registry import registry
from mlia.target.registry import TargetInfo
-registry.register("Cortex-A", TargetInfo(["ArmNNTFLiteDelegate"]))
+registry.register("cortex-a", TargetInfo(["ArmNNTFLiteDelegate"]))
diff --git a/src/mlia/target/ethos_u/__init__.py b/src/mlia/target/ethos_u/__init__.py
index 3c92ae5..d53be53 100644
--- a/src/mlia/target/ethos_u/__init__.py
+++ b/src/mlia/target/ethos_u/__init__.py
@@ -1,8 +1,8 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Ethos-U target module."""
from mlia.target.registry import registry
from mlia.target.registry import TargetInfo
-registry.register("Ethos-U55", TargetInfo(["Vela", "Corstone-300", "Corstone-310"]))
-registry.register("Ethos-U65", TargetInfo(["Vela", "Corstone-300", "Corstone-310"]))
+registry.register("ethos-u55", TargetInfo(["Vela", "Corstone-300", "Corstone-310"]))
+registry.register("ethos-u65", TargetInfo(["Vela", "Corstone-300", "Corstone-310"]))
diff --git a/src/mlia/target/ethos_u/performance.py b/src/mlia/target/ethos_u/performance.py
index 0d791a1..be1a287 100644
--- a/src/mlia/target/ethos_u/performance.py
+++ b/src/mlia/target/ethos_u/performance.py
@@ -11,20 +11,17 @@ from typing import Union
import mlia.backend.vela.compiler as vela_comp
import mlia.backend.vela.performance as vela_perf
-from mlia.backend.corstone.performance import DeviceInfo
+from mlia.backend.corstone import is_corstone_backend
from mlia.backend.corstone.performance import estimate_performance
-from mlia.backend.corstone.performance import ModelInfo
-from mlia.backend.registry import get_supported_backends
from mlia.core.context import Context
from mlia.core.performance import PerformanceEstimator
from mlia.nn.tensorflow.config import get_tflite_model
from mlia.nn.tensorflow.config import ModelConfiguration
from mlia.nn.tensorflow.optimizations.select import OptimizationSettings
from mlia.target.ethos_u.config import EthosUConfiguration
-from mlia.target.registry import is_supported
+from mlia.target.registry import supported_backends
from mlia.utils.logging import log_action
-
logger = logging.getLogger(__name__)
@@ -186,14 +183,11 @@ class CorstonePerformanceEstimator(
model_path, self.device.compiler_options, optimized_model_path
)
- model_info = ModelInfo(model_path=optimized_model_path)
- device_info = DeviceInfo(
- device_type=self.device.target, # type: ignore
- mac=self.device.mac,
- )
-
corstone_perf_metrics = estimate_performance(
- model_info, device_info, self.backend
+ self.device.target,
+ self.device.mac,
+ optimized_model_path,
+ self.backend,
)
return NPUCycles(
@@ -222,11 +216,12 @@ class EthosUPerformanceEstimator(
self.device = device
if backends is None:
backends = ["Vela"] # Only Vela is always available as default
+ ethos_u_backends = supported_backends(device.target)
for backend in backends:
- if backend != "Vela" and not is_supported(backend):
+ if backend != "Vela" and backend not in ethos_u_backends:
raise ValueError(
f"Unsupported backend '{backend}'. "
- f"Only 'Vela' and {get_supported_backends()} "
+ f"Only 'Vela' and {ethos_u_backends} "
"are supported."
)
self.backends = set(backends)
@@ -241,12 +236,11 @@ class EthosUPerformanceEstimator(
memory_usage = None
npu_cycles = None
-
for backend in self.backends:
if backend == "Vela":
vela_estimator = VelaPerformanceEstimator(self.context, self.device)
memory_usage = vela_estimator.estimate(tflite_model)
- elif backend in get_supported_backends():
+ elif is_corstone_backend(backend):
corstone_estimator = CorstonePerformanceEstimator(
self.context, self.device, backend
)
diff --git a/src/mlia/target/registry.py b/src/mlia/target/registry.py
index 325dd04..4870fc8 100644
--- a/src/mlia/target/registry.py
+++ b/src/mlia/target/registry.py
@@ -3,12 +3,9 @@
"""Target module."""
from __future__ import annotations
-from typing import cast
-
from mlia.backend.config import BackendType
-from mlia.backend.manager import DefaultInstallationManager
+from mlia.backend.manager import get_installation_manager
from mlia.backend.registry import registry as backend_registry
-from mlia.cli.config import get_installation_manager
from mlia.core.common import AdviceCategory
from mlia.core.reporting import Column
from mlia.core.reporting import Table
@@ -65,14 +62,23 @@ def supported_targets(advice: AdviceCategory) -> list[str]:
]
+def all_supported_backends() -> set[str]:
+ """Return set of all supported backends by all targets."""
+ return {
+ backend
+ for item in registry.items.values()
+ for backend in item.supported_backends
+ }
+
+
def table() -> Table:
"""Get a table representation of registered targets with backends."""
def get_status(backend: str) -> str:
if backend_registry.items[backend].type == BackendType.BUILTIN:
return BackendType.BUILTIN.name
- mgr = cast(DefaultInstallationManager, get_installation_manager())
- return "INSTALLED" if mgr.already_installed(backend) else "NOT INSTALLED"
+ mgr = get_installation_manager()
+ return "INSTALLED" if mgr.backend_installed(backend) else "NOT INSTALLED"
def get_advice(target: str) -> tuple[str, str, str]:
supported = supported_advice(target)
diff --git a/src/mlia/target/tosa/__init__.py b/src/mlia/target/tosa/__init__.py
index 33c9cf2..06bf1a9 100644
--- a/src/mlia/target/tosa/__init__.py
+++ b/src/mlia/target/tosa/__init__.py
@@ -1,7 +1,7 @@
-# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""TOSA target module."""
from mlia.target.registry import registry
from mlia.target.registry import TargetInfo
-registry.register("TOSA", TargetInfo(["TOSA-Checker"]))
+registry.register("tosa", TargetInfo(["tosa-checker"]))
diff --git a/src/mlia/utils/proc.py b/src/mlia/utils/proc.py
new file mode 100644
index 0000000..d11bfc5
--- /dev/null
+++ b/src/mlia/utils/proc.py
@@ -0,0 +1,55 @@
+# SPDX-FileCopyrightText: Copyright 2023, Arm Limited and/or its affiliates.
+# SPDX-License-Identifier: Apache-2.0
+"""Module for process management."""
+from __future__ import annotations
+
+import logging
+import subprocess # nosec
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Callable
+from typing import Generator
+
+
+logger = logging.getLogger(__name__)
+
+
+@dataclass(frozen=True)
+class Command:
+ """Command information."""
+
+ cmd: list[str]
+ cwd: Path = Path.cwd()
+ env: dict[str, str] | None = None
+
+
+def command_output(command: Command) -> Generator[str, None, None]:
+ """Get command output."""
+ logger.debug("Running command: %s", command)
+
+ with subprocess.Popen( # nosec
+ command.cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ bufsize=1,
+ cwd=command.cwd,
+ env=command.env,
+ ) as process:
+ yield from process.stdout or []
+
+ if process.returncode:
+ raise subprocess.CalledProcessError(process.returncode, command.cmd)
+
+
+OutputConsumer = Callable[[str], None]
+
+
+def process_command_output(
+ command: Command,
+ consumers: list[OutputConsumer],
+) -> None:
+ """Execute command and process output."""
+ for line in command_output(command):
+ for consumer in consumers:
+ consumer(line)