aboutsummaryrefslogtreecommitdiff
path: root/src/mlia/target/ethos_u
diff options
context:
space:
mode:
Diffstat (limited to 'src/mlia/target/ethos_u')
-rw-r--r--src/mlia/target/ethos_u/advisor.py3
-rw-r--r--src/mlia/target/ethos_u/performance.py46
-rw-r--r--src/mlia/target/ethos_u/reporters.py109
3 files changed, 128 insertions, 30 deletions
diff --git a/src/mlia/target/ethos_u/advisor.py b/src/mlia/target/ethos_u/advisor.py
index 9f5b3a6..b5932d0 100644
--- a/src/mlia/target/ethos_u/advisor.py
+++ b/src/mlia/target/ethos_u/advisor.py
@@ -1,4 +1,4 @@
-# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2024, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Ethos-U MLIA module."""
from __future__ import annotations
@@ -44,6 +44,7 @@ class EthosUInferenceAdvisor(DefaultInferenceAdvisor):
"""Return list of the data collectors."""
model = self.get_model(context)
target_config = self._get_target_config(context)
+ target_config.compiler_options.output_dir = context.output_dir # type: ignore
backends = self._get_backends(context)
collectors: list[DataCollector] = []
diff --git a/src/mlia/target/ethos_u/performance.py b/src/mlia/target/ethos_u/performance.py
index a0526e4..8decb75 100644
--- a/src/mlia/target/ethos_u/performance.py
+++ b/src/mlia/target/ethos_u/performance.py
@@ -1,4 +1,4 @@
-# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2024, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Performance estimation."""
from __future__ import annotations
@@ -13,6 +13,7 @@ import mlia.backend.vela.compiler as vela_comp
import mlia.backend.vela.performance as vela_perf
from mlia.backend.corstone import is_corstone_backend
from mlia.backend.corstone.performance import estimate_performance
+from mlia.backend.vela.performance import LayerwisePerfInfo
from mlia.core.context import Context
from mlia.core.performance import PerformanceEstimator
from mlia.nn.select import OptimizationSettings
@@ -95,16 +96,23 @@ class PerformanceMetrics:
target_config: EthosUConfiguration
npu_cycles: NPUCycles | None
memory_usage: MemoryUsage | None
+ layerwise_perf_info: LayerwisePerfInfo | None
def in_kilobytes(self) -> PerformanceMetrics:
"""Return metrics with memory usage in KiB."""
if self.memory_usage is None:
return PerformanceMetrics(
- self.target_config, self.npu_cycles, self.memory_usage
+ self.target_config,
+ self.npu_cycles,
+ self.memory_usage,
+ self.layerwise_perf_info,
)
return PerformanceMetrics(
- self.target_config, self.npu_cycles, self.memory_usage.in_kilobytes()
+ self.target_config,
+ self.npu_cycles,
+ self.memory_usage.in_kilobytes(),
+ self.layerwise_perf_info,
)
@@ -119,7 +127,9 @@ class OptimizationPerformanceMetrics:
class VelaPerformanceEstimator(
- PerformanceEstimator[Union[Path, ModelConfiguration], MemoryUsage]
+ PerformanceEstimator[
+ Union[Path, ModelConfiguration], tuple[MemoryUsage, LayerwisePerfInfo]
+ ]
):
"""Vela based performance estimator."""
@@ -128,7 +138,9 @@ class VelaPerformanceEstimator(
self.context = context
self.target = target_config
- def estimate(self, model: Path | ModelConfiguration) -> MemoryUsage:
+ def estimate(
+ self, model: Path | ModelConfiguration
+ ) -> tuple[MemoryUsage, LayerwisePerfInfo]:
"""Estimate performance."""
with log_action("Getting the memory usage metrics ..."):
model_path = (
@@ -141,12 +153,15 @@ class VelaPerformanceEstimator(
model_path, self.target.compiler_options
)
- return MemoryUsage(
- vela_perf_metrics.sram_memory_area_size,
- vela_perf_metrics.dram_memory_area_size,
- vela_perf_metrics.unknown_memory_area_size,
- vela_perf_metrics.on_chip_flash_memory_area_size,
- vela_perf_metrics.off_chip_flash_memory_area_size,
+ return (
+ MemoryUsage(
+ vela_perf_metrics.sram_memory_area_size,
+ vela_perf_metrics.dram_memory_area_size,
+ vela_perf_metrics.unknown_memory_area_size,
+ vela_perf_metrics.on_chip_flash_memory_area_size,
+ vela_perf_metrics.off_chip_flash_memory_area_size,
+ ),
+ vela_perf_metrics.layerwise_performance_info,
)
@@ -238,12 +253,15 @@ class EthosUPerformanceEstimator(
memory_usage = None
npu_cycles = None
+ layerwise_perf_info = None
for backend in self.backends:
if backend == "vela":
vela_estimator = VelaPerformanceEstimator(
self.context, self.target_config
)
- memory_usage = vela_estimator.estimate(tflite_model)
+ memory_usage, layerwise_perf_info = vela_estimator.estimate(
+ tflite_model
+ )
elif is_corstone_backend(backend):
corstone_estimator = CorstonePerformanceEstimator(
self.context, self.target_config, backend
@@ -256,4 +274,6 @@ class EthosUPerformanceEstimator(
backend,
)
- return PerformanceMetrics(self.target_config, npu_cycles, memory_usage)
+ return PerformanceMetrics(
+ self.target_config, npu_cycles, memory_usage, layerwise_perf_info
+ )
diff --git a/src/mlia/target/ethos_u/reporters.py b/src/mlia/target/ethos_u/reporters.py
index 711f036..b747ce5 100644
--- a/src/mlia/target/ethos_u/reporters.py
+++ b/src/mlia/target/ethos_u/reporters.py
@@ -1,14 +1,16 @@
-# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates.
+# SPDX-FileCopyrightText: Copyright 2022-2024, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Reports module."""
from __future__ import annotations
from collections import defaultdict
+from dataclasses import fields
from typing import Any
from typing import Callable
from mlia.backend.vela.compat import Operator
from mlia.backend.vela.compat import Operators
+from mlia.backend.vela.performance import layer_metrics
from mlia.core.advice_generation import Advice
from mlia.core.reporters import report_advice
from mlia.core.reporting import BytesCell
@@ -16,6 +18,7 @@ from mlia.core.reporting import Cell
from mlia.core.reporting import ClockCell
from mlia.core.reporting import Column
from mlia.core.reporting import CompoundFormatter
+from mlia.core.reporting import CompoundReport
from mlia.core.reporting import CyclesCell
from mlia.core.reporting import Format
from mlia.core.reporting import NestedReport
@@ -237,10 +240,59 @@ def report_target_details(target_config: EthosUConfiguration) -> Report:
)
-def metrics_as_records(perf_metrics: list[PerformanceMetrics]) -> list[tuple]:
+def metrics_as_records(
+ perf_metrics: list[PerformanceMetrics],
+) -> tuple[list[tuple], list[tuple]]:
"""Convert perf metrics object into list of records."""
perf_metrics = [item.in_kilobytes() for item in perf_metrics]
+ def _layerwise_as_metrics(
+ perf_metrics: list[PerformanceMetrics],
+ ) -> list[tuple]:
+ metric_map = defaultdict(list) # type: dict[str, list]
+ format_types = {int: "12,d", str: "", float: "12.2f"}
+ rows = []
+ for perf_metric in perf_metrics:
+ if perf_metric.layerwise_perf_info:
+ for layerwise_metric in perf_metric.layerwise_perf_info.layerwise_info:
+ field_names = [
+ field.name
+ for field in fields(layerwise_metric)
+ if field.name != "name"
+ ]
+ duplicate_idx = 1
+ dict_key = getattr(layerwise_metric, "name")
+ while dict_key in metric_map:
+ dict_key = (
+ getattr(layerwise_metric, "name")
+ + " ("
+ + str(duplicate_idx)
+ + ")"
+ )
+ duplicate_idx += 1
+ for field_name in field_names:
+ metric_map[dict_key].append(
+ getattr(layerwise_metric, field_name)
+ )
+ rows = [
+ (
+ name,
+ *(
+ Cell(
+ value,
+ Format(
+ str_fmt=format_types[type(value)]
+ if type(value) in format_types
+ else ""
+ ),
+ )
+ for value in values
+ ),
+ )
+ for name, values in metric_map.items()
+ ]
+ return rows
+
def _cycles_as_records(perf_metrics: list[PerformanceMetrics]) -> list[tuple]:
metric_map = defaultdict(list)
for metrics in perf_metrics:
@@ -306,7 +358,7 @@ def metrics_as_records(perf_metrics: list[PerformanceMetrics]) -> list[tuple]:
_data_beats_as_records,
)
for metrics in metrics_func(perf_metrics)
- ]
+ ], _layerwise_as_metrics(perf_metrics)
def report_perf_metrics(
@@ -315,9 +367,9 @@ def report_perf_metrics(
"""Return comparison table for the performance metrics."""
if isinstance(perf_metrics, PerformanceMetrics):
perf_metrics = [perf_metrics]
+ rows, layerwise_rows = metrics_as_records(perf_metrics)
- rows = metrics_as_records(perf_metrics)
-
+ # Create a seperate table for layerwise data
if len(perf_metrics) == 2:
return Table(
columns=[
@@ -349,17 +401,42 @@ def report_perf_metrics(
alias="performance_metrics",
notes="IMPORTANT: The performance figures above refer to NPU only",
)
-
- return Table(
- columns=[
- Column("Metric", alias="metric", fmt=Format(wrap_width=30)),
- Column("Value", alias="value", fmt=Format(wrap_width=15)),
- Column("Unit", alias="unit", fmt=Format(wrap_width=15)),
- ],
- rows=rows,
- name="Performance metrics",
- alias="performance_metrics",
- notes="IMPORTANT: The performance figures above refer to NPU only",
+ if layerwise_rows == []:
+ return Table(
+ columns=[
+ Column("Metric", alias="metric", fmt=Format(wrap_width=30)),
+ Column("Value", alias="value", fmt=Format(wrap_width=15)),
+ Column("Unit", alias="unit", fmt=Format(wrap_width=15)),
+ ],
+ rows=rows,
+ name="Performance metrics",
+ alias="performance_metrics",
+ notes="IMPORTANT: The performance figures above refer to NPU only",
+ )
+ return CompoundReport(
+ [
+ Table(
+ columns=[
+ Column("Metric", alias="metric", fmt=Format(wrap_width=30)),
+ Column("Value", alias="value", fmt=Format(wrap_width=15)),
+ Column("Unit", alias="unit", fmt=Format(wrap_width=15)),
+ ],
+ rows=rows,
+ name="Performance metrics",
+ alias="performance_metrics",
+ notes="IMPORTANT: The performance figures above refer to NPU only",
+ ),
+ Table(
+ columns=[
+ Column(name, alias=alias, fmt=Format(wrap_width=30))
+ for alias, _, name in layer_metrics
+ ],
+ rows=layerwise_rows,
+ name="Layer-Wise Metrics",
+ alias="layerwise_metrics",
+ notes="",
+ ),
+ ]
)