From 446c379c92e15ad8f24ed0db853dd0fc9c271151 Mon Sep 17 00:00:00 2001 From: Ruomei Yan Date: Mon, 20 Feb 2023 15:32:54 +0000 Subject: Add a CLI component to enable rewrites * Add flags for rewrite (--rewrite, --rewrite-start, --rewrite-end, --rewrite-target) * Refactor CLI interfaces to accept tflite models with optimize for rewrite, keras models with optimize for clustering and pruning * Refactor and move common.py and select.py out of the folder nn/tensorflow/optimizations * Add file nn/rewrite/core/rewrite.py as placeholder * Update/add unit tests * Refactor OptimizeModel in ethos_u/data_collection.py for accepting tflite model case * Extend the logic so that if "--rewrite" is specified, we don't add pruning to also accept TFLite models. * Update README.md Resolves: MLIA-750, MLIA-854, MLIA-865 Signed-off-by: Benjamin Klimczak Change-Id: I67d85f71fa253d2bad4efe304ad8225970b9622c --- src/mlia/cli/commands.py | 12 +- src/mlia/cli/helpers.py | 8 +- src/mlia/cli/main.py | 2 + src/mlia/cli/options.py | 64 ++++++- src/mlia/nn/common.py | 29 +++ src/mlia/nn/rewrite/core/rewrite.py | 45 +++++ src/mlia/nn/select.py | 210 +++++++++++++++++++++ src/mlia/nn/tensorflow/optimizations/clustering.py | 6 +- src/mlia/nn/tensorflow/optimizations/common.py | 29 --- src/mlia/nn/tensorflow/optimizations/pruning.py | 4 +- src/mlia/nn/tensorflow/optimizations/select.py | 175 ----------------- src/mlia/target/ethos_u/advice_generation.py | 2 +- src/mlia/target/ethos_u/advisor.py | 16 +- src/mlia/target/ethos_u/data_analysis.py | 2 +- src/mlia/target/ethos_u/data_collection.py | 42 +++-- src/mlia/target/ethos_u/performance.py | 2 +- 16 files changed, 416 insertions(+), 232 deletions(-) create mode 100644 src/mlia/nn/common.py create mode 100644 src/mlia/nn/rewrite/core/rewrite.py create mode 100644 src/mlia/nn/select.py delete mode 100644 src/mlia/nn/tensorflow/optimizations/common.py delete mode 100644 src/mlia/nn/tensorflow/optimizations/select.py (limited to 'src') diff --git a/src/mlia/cli/commands.py b/src/mlia/cli/commands.py index 1f339ee..7af41d9 100644 --- a/src/mlia/cli/commands.py +++ b/src/mlia/cli/commands.py @@ -96,7 +96,7 @@ def check( ) -def optimize( # pylint: disable=too-many-arguments +def optimize( # pylint: disable=too-many-locals,too-many-arguments ctx: ExecutionContext, target_profile: str, model: str, @@ -104,8 +104,13 @@ def optimize( # pylint: disable=too-many-arguments clustering: bool, pruning_target: float | None, clustering_target: int | None, + rewrite: bool | None = None, + rewrite_target: str | None = None, + rewrite_start: str | None = None, + rewrite_end: str | None = None, layers_to_optimize: list[str] | None = None, backend: list[str] | None = None, + dataset: Path | None = None, ) -> None: """Show the performance improvements (if any) after applying the optimizations. @@ -145,7 +150,12 @@ def optimize( # pylint: disable=too-many-arguments clustering, pruning_target, clustering_target, + rewrite, + rewrite_target, + rewrite_start, + rewrite_end, layers_to_optimize, + dataset, ) ) diff --git a/src/mlia/cli/helpers.py b/src/mlia/cli/helpers.py index abc6df0..824db1b 100644 --- a/src/mlia/cli/helpers.py +++ b/src/mlia/cli/helpers.py @@ -10,7 +10,7 @@ from typing import cast from mlia.cli.options import get_target_profile_opts from mlia.core.helpers import ActionResolver -from mlia.nn.tensorflow.optimizations.select import OptimizationSettings +from mlia.nn.select import OptimizationSettings from mlia.nn.tensorflow.utils import is_keras_model from mlia.target.config import get_builtin_profile_path from mlia.target.config import is_builtin_profile @@ -47,7 +47,11 @@ class CLIActionResolver(ActionResolver): ) -> list[str]: """Return specific optimization command description.""" opt_types = " ".join("--" + opt.optimization_type for opt in opt_settings) - opt_targs_strings = ["--pruning-target", "--clustering-target"] + opt_targs_strings = [ + "--pruning-target", + "--clustering-target", + "--rewrite-target", + ] opt_targs = ",".join( f"{opt_targs_strings[i]} {opt.optimization_target}" for i, opt in enumerate(opt_settings) diff --git a/src/mlia/cli/main.py b/src/mlia/cli/main.py index 88258d5..9e1b7cd 100644 --- a/src/mlia/cli/main.py +++ b/src/mlia/cli/main.py @@ -23,6 +23,7 @@ from mlia.cli.options import add_backend_install_options from mlia.cli.options import add_backend_options from mlia.cli.options import add_backend_uninstall_options from mlia.cli.options import add_check_category_options +from mlia.cli.options import add_dataset_options from mlia.cli.options import add_debug_options from mlia.cli.options import add_keras_model_options from mlia.cli.options import add_model_options @@ -89,6 +90,7 @@ def get_commands() -> list[CommandInfo]: add_multi_optimization_options, add_output_options, add_debug_options, + add_dataset_options, ], ), ] diff --git a/src/mlia/cli/options.py b/src/mlia/cli/options.py index fe177eb..7b3b373 100644 --- a/src/mlia/cli/options.py +++ b/src/mlia/cli/options.py @@ -12,6 +12,7 @@ from typing import Sequence from mlia.backend.corstone import is_corstone_backend from mlia.backend.manager import get_available_backends from mlia.core.common import AdviceCategory +from mlia.core.errors import ConfigurationError from mlia.core.typing import OutputFormat from mlia.target.registry import builtin_profile_names from mlia.target.registry import registry as target_registry @@ -89,6 +90,10 @@ def add_multi_optimization_options(parser: argparse.ArgumentParser) -> None: "--clustering", action="store_true", help="Apply clustering optimization." ) + multi_optimization_group.add_argument( + "--rewrite", action="store_true", help="Apply rewrite optimization." + ) + multi_optimization_group.add_argument( "--pruning-target", type=float, @@ -103,6 +108,24 @@ def add_multi_optimization_options(parser: argparse.ArgumentParser) -> None: f"(default: {DEFAULT_CLUSTERING_TARGET})", ) + multi_optimization_group.add_argument( + "--rewrite-target", + type=str, + help="Type of rewrite to apply to the subgraph/layer.", + ) + + multi_optimization_group.add_argument( + "--rewrite-start", + type=str, + help="Starting node in the graph of the subgraph to be rewritten.", + ) + + multi_optimization_group.add_argument( + "--rewrite-end", + type=str, + help="Ending node in the graph of the subgraph to be rewritten.", + ) + def add_model_options(parser: argparse.ArgumentParser) -> None: """Add model specific options.""" @@ -131,6 +154,16 @@ def add_debug_options(parser: argparse.ArgumentParser) -> None: ) +def add_dataset_options(parser: argparse.ArgumentParser) -> None: + """Addd dataset options.""" + dataset_group = parser.add_argument_group("dataset options") + dataset_group.add_argument( + "--dataset", + type=Path, + help="The path of input tfrec file", + ) + + def add_keras_model_options(parser: argparse.ArgumentParser) -> None: """Add model specific options.""" model_group = parser.add_argument_group("Keras model options") @@ -239,12 +272,17 @@ def add_output_directory(parser: argparse.ArgumentParser) -> None: ) -def parse_optimization_parameters( +def parse_optimization_parameters( # pylint: disable=too-many-arguments pruning: bool = False, clustering: bool = False, pruning_target: float | None = None, clustering_target: int | None = None, + rewrite: bool | None = False, + rewrite_target: str | None = None, + rewrite_start: str | None = None, + rewrite_end: str | None = None, layers_to_optimize: list[str] | None = None, + dataset: Path | None = None, ) -> list[dict[str, Any]]: """Parse provided optimization parameters.""" opt_types = [] @@ -263,7 +301,14 @@ def parse_optimization_parameters( if not clustering_target: clustering_target = DEFAULT_CLUSTERING_TARGET - if (pruning is False and clustering is False) or pruning: + if rewrite: + if not rewrite_target or not rewrite_start or not rewrite_end: + raise ConfigurationError( + "To perform rewrite, rewrite-target, rewrite-start and " + "rewrite-end must be set." + ) + + if not any((pruning, clustering, rewrite)) or pruning: opt_types.append("pruning") opt_targets.append(pruning_target) @@ -276,10 +321,25 @@ def parse_optimization_parameters( "optimization_type": opt_type.strip(), "optimization_target": float(opt_target), "layers_to_optimize": layers_to_optimize, + "dataset": dataset, } for opt_type, opt_target in zip(opt_types, opt_targets) ] + if rewrite: + if rewrite_target not in ["remove", "fully_connected"]: + raise ConfigurationError( + "Currently only remove and fully_connected are supported." + ) + optimizer_params.append( + { + "optimization_type": "rewrite", + "optimization_target": rewrite_target, + "layers_to_optimize": [rewrite_start, rewrite_end], + "dataset": dataset, + } + ) + return optimizer_params diff --git a/src/mlia/nn/common.py b/src/mlia/nn/common.py new file mode 100644 index 0000000..5a8d685 --- /dev/null +++ b/src/mlia/nn/common.py @@ -0,0 +1,29 @@ +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Common items for the optimizations module.""" +from abc import ABC +from abc import abstractmethod +from dataclasses import dataclass + +from mlia.nn.tensorflow.config import ModelConfiguration + + +@dataclass +class OptimizerConfiguration: + """Abstract optimizer configuration.""" + + +class Optimizer(ABC): + """Abstract class for the optimizer.""" + + @abstractmethod + def get_model(self) -> ModelConfiguration: + """Abstract method to return the model instance from the optimizer.""" + + @abstractmethod + def apply_optimization(self) -> None: + """Abstract method to apply optimization to the model.""" + + @abstractmethod + def optimization_config(self) -> str: + """Return string representation of the optimization config.""" diff --git a/src/mlia/nn/rewrite/core/rewrite.py b/src/mlia/nn/rewrite/core/rewrite.py new file mode 100644 index 0000000..d4f61c5 --- /dev/null +++ b/src/mlia/nn/rewrite/core/rewrite.py @@ -0,0 +1,45 @@ +# SPDX-FileCopyrightText: Copyright 2023, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Contains class Rewriter to replace a subgraph/layer of a model.""" +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path + +from mlia.nn.common import Optimizer +from mlia.nn.common import OptimizerConfiguration +from mlia.nn.tensorflow.config import TFLiteModel + + +@dataclass +class RewriteConfiguration(OptimizerConfiguration): + """Rewrite configuration.""" + + optimization_target: str + layers_to_optimize: list[str] | None = None + dataset: Path | None = None + + def __str__(self) -> str: + """Return string representation of the configuration.""" + return f"rewrite: {self.optimization_target}" + + +class Rewriter(Optimizer): + """Rewriter class for basic rewrite flow.""" + + def __init__( + self, tflite_model_path: Path, optimizer_configuration: RewriteConfiguration + ): + """Init Rewriter instance.""" + self.model = TFLiteModel(tflite_model_path) + self.optimizer_configuration = optimizer_configuration + + def apply_optimization(self) -> None: + """Apply the rewrite flow.""" + + def get_model(self) -> TFLiteModel: + """Return optimized model.""" + return self.model + + def optimization_config(self) -> str: + """Optimization configirations.""" diff --git a/src/mlia/nn/select.py b/src/mlia/nn/select.py new file mode 100644 index 0000000..7a25e47 --- /dev/null +++ b/src/mlia/nn/select.py @@ -0,0 +1,210 @@ +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Module for optimization selection.""" +from __future__ import annotations + +import math +from pathlib import Path +from typing import Any +from typing import NamedTuple + +import tensorflow as tf + +from mlia.core.errors import ConfigurationError +from mlia.nn.common import Optimizer +from mlia.nn.common import OptimizerConfiguration +from mlia.nn.rewrite.core.rewrite import RewriteConfiguration +from mlia.nn.rewrite.core.rewrite import Rewriter +from mlia.nn.tensorflow.config import KerasModel +from mlia.nn.tensorflow.config import TFLiteModel +from mlia.nn.tensorflow.optimizations.clustering import Clusterer +from mlia.nn.tensorflow.optimizations.clustering import ClusteringConfiguration +from mlia.nn.tensorflow.optimizations.pruning import Pruner +from mlia.nn.tensorflow.optimizations.pruning import PruningConfiguration +from mlia.utils.types import is_list_of + + +class OptimizationSettings(NamedTuple): + """Optimization settings.""" + + optimization_type: str + optimization_target: int | float + layers_to_optimize: list[str] | None + dataset: Path | None = None + + @staticmethod + def create_from( + optimizer_params: list[tuple[str, float]], + layers_to_optimize: list[str] | None = None, + dataset: Path | None = None, + ) -> list[OptimizationSettings]: + """Create optimization settings from the provided parameters.""" + return [ + OptimizationSettings( + optimization_type=opt_type, + optimization_target=opt_target, + layers_to_optimize=layers_to_optimize, + dataset=dataset, + ) + for opt_type, opt_target in optimizer_params + ] + + def __str__(self) -> str: + """Return string representation.""" + return f"{self.optimization_type}: {self.optimization_target}" + + def next_target(self) -> OptimizationSettings: + """Return next optimization target.""" + if self.optimization_type == "pruning": + next_target = round(min(self.optimization_target + 0.1, 0.9), 2) + return OptimizationSettings( + self.optimization_type, next_target, self.layers_to_optimize + ) + + if self.optimization_type == "clustering": + # return next lowest power of two for clustering + next_target = math.log(self.optimization_target, 2) + if next_target.is_integer(): + next_target -= 1 + + next_target = max(int(2 ** int(next_target)), 4) + return OptimizationSettings( + self.optimization_type, next_target, self.layers_to_optimize + ) + + if self.optimization_type == "rewrite": + return OptimizationSettings( + self.optimization_type, + self.optimization_target, + self.layers_to_optimize, + self.dataset, + ) + + raise ValueError(f"Optimization type {self.optimization_type} is unknown.") + + +class MultiStageOptimizer(Optimizer): + """Optimizer with multiply stages.""" + + def __init__( + self, + model: tf.keras.Model, + optimizations: list[OptimizerConfiguration], + ) -> None: + """Init MultiStageOptimizer instance.""" + self.model = model + self.optimizations = optimizations + + def optimization_config(self) -> str: + """Return string representation of the optimization config.""" + return " - ".join(str(opt) for opt in self.optimizations) + + def get_model(self) -> Any: + """Return optimized model.""" + return self.model + + def apply_optimization(self) -> None: + """Apply optimization to the model.""" + for config in self.optimizations: + optimizer = get_optimizer(self.model, config) + optimizer.apply_optimization() + self.model = optimizer.get_model() + + +def get_optimizer( + model: tf.keras.Model | KerasModel | TFLiteModel, + config: OptimizerConfiguration | OptimizationSettings | list[OptimizationSettings], +) -> Optimizer: + """Get optimizer for provided configuration.""" + if isinstance(model, KerasModel): + model = model.get_keras_model() + + if isinstance(model, TFLiteModel): + model = model.model_path + + if isinstance(config, PruningConfiguration): + return Pruner(model, config) + + if isinstance(config, ClusteringConfiguration): + return Clusterer(model, config) + + if isinstance(config, RewriteConfiguration): + return Rewriter(model, config) # type: ignore + + if isinstance(config, OptimizationSettings) or is_list_of( + config, OptimizationSettings + ): + return _get_optimizer(model, config) # type: ignore + + raise ConfigurationError(f"Unknown optimization configuration {config}") + + +def _get_optimizer( + model: tf.keras.Model | Path, + optimization_settings: OptimizationSettings | list[OptimizationSettings], +) -> Optimizer: + if isinstance(optimization_settings, OptimizationSettings): + optimization_settings = [optimization_settings] + + optimizer_configs = [] + for opt_type, opt_target, layers_to_optimize, dataset in optimization_settings: + _check_optimizer_params(opt_type, opt_target) + + opt_config = _get_optimizer_configuration( + opt_type, opt_target, layers_to_optimize, dataset + ) + optimizer_configs.append(opt_config) + + if len(optimizer_configs) == 1: + return get_optimizer(model, optimizer_configs[0]) + + return MultiStageOptimizer(model, optimizer_configs) + + +def _get_optimizer_configuration( + optimization_type: str, + optimization_target: int | float | str, + layers_to_optimize: list[str] | None = None, + dataset: Path | None = None, +) -> OptimizerConfiguration: + """Get optimizer configuration for provided parameters.""" + _check_optimizer_params(optimization_type, optimization_target) + + opt_type = optimization_type.lower() + if opt_type == "pruning": + return PruningConfiguration(float(optimization_target), layers_to_optimize) + + if opt_type == "clustering": + # make sure an integer is given as clustering target + if optimization_target == int(optimization_target): + return ClusteringConfiguration(int(optimization_target), layers_to_optimize) + + raise ConfigurationError( + "Optimization target should be a positive integer. " + f"Optimization target provided: {optimization_target}" + ) + + if opt_type == "rewrite": + if isinstance(optimization_target, str): + return RewriteConfiguration( # type: ignore + str(optimization_target), layers_to_optimize, dataset + ) + + raise ConfigurationError( + "Optimization target should be a string indicating a" + "choice from rewrite library. " + f"Optimization target provided: {optimization_target}" + ) + + raise ConfigurationError(f"Unsupported optimization type: {optimization_type}") + + +def _check_optimizer_params( + optimization_type: str, optimization_target: int | float | str +) -> None: + """Check optimizer params.""" + if not optimization_target: + raise ConfigurationError("Optimization target is not provided") + + if not optimization_type: + raise ConfigurationError("Optimization type is not provided") diff --git a/src/mlia/nn/tensorflow/optimizations/clustering.py b/src/mlia/nn/tensorflow/optimizations/clustering.py index 4aaa33e..f9018b3 100644 --- a/src/mlia/nn/tensorflow/optimizations/clustering.py +++ b/src/mlia/nn/tensorflow/optimizations/clustering.py @@ -1,4 +1,4 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 """ Contains class Clusterer that clusters unique weights per layer to a specified number. @@ -18,8 +18,8 @@ from tensorflow_model_optimization.python.core.clustering.keras.experimental imp cluster as experimental_cluster, ) -from mlia.nn.tensorflow.optimizations.common import Optimizer -from mlia.nn.tensorflow.optimizations.common import OptimizerConfiguration +from mlia.nn.common import Optimizer +from mlia.nn.common import OptimizerConfiguration @dataclass diff --git a/src/mlia/nn/tensorflow/optimizations/common.py b/src/mlia/nn/tensorflow/optimizations/common.py deleted file mode 100644 index 1dce0b2..0000000 --- a/src/mlia/nn/tensorflow/optimizations/common.py +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. -# SPDX-License-Identifier: Apache-2.0 -"""Common items for the optimizations module.""" -from abc import ABC -from abc import abstractmethod -from dataclasses import dataclass - -import tensorflow as tf - - -@dataclass -class OptimizerConfiguration: - """Abstract optimizer configuration.""" - - -class Optimizer(ABC): - """Abstract class for the optimizer.""" - - @abstractmethod - def get_model(self) -> tf.keras.Model: - """Abstract method to return the model instance from the optimizer.""" - - @abstractmethod - def apply_optimization(self) -> None: - """Abstract method to apply optimization to the model.""" - - @abstractmethod - def optimization_config(self) -> str: - """Return string representation of the optimization config.""" diff --git a/src/mlia/nn/tensorflow/optimizations/pruning.py b/src/mlia/nn/tensorflow/optimizations/pruning.py index 2d5ef0e..a30b301 100644 --- a/src/mlia/nn/tensorflow/optimizations/pruning.py +++ b/src/mlia/nn/tensorflow/optimizations/pruning.py @@ -24,8 +24,8 @@ from tensorflow_model_optimization.python.core.sparsity.keras import ( # pylint pruning_wrapper, ) -from mlia.nn.tensorflow.optimizations.common import Optimizer -from mlia.nn.tensorflow.optimizations.common import OptimizerConfiguration +from mlia.nn.common import Optimizer +from mlia.nn.common import OptimizerConfiguration logger = logging.getLogger(__name__) diff --git a/src/mlia/nn/tensorflow/optimizations/select.py b/src/mlia/nn/tensorflow/optimizations/select.py deleted file mode 100644 index a78df12..0000000 --- a/src/mlia/nn/tensorflow/optimizations/select.py +++ /dev/null @@ -1,175 +0,0 @@ -# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. -# SPDX-License-Identifier: Apache-2.0 -"""Module for optimization selection.""" -from __future__ import annotations - -import math -from typing import NamedTuple - -import tensorflow as tf - -from mlia.core.errors import ConfigurationError -from mlia.nn.tensorflow.config import KerasModel -from mlia.nn.tensorflow.optimizations.clustering import Clusterer -from mlia.nn.tensorflow.optimizations.clustering import ClusteringConfiguration -from mlia.nn.tensorflow.optimizations.common import Optimizer -from mlia.nn.tensorflow.optimizations.common import OptimizerConfiguration -from mlia.nn.tensorflow.optimizations.pruning import Pruner -from mlia.nn.tensorflow.optimizations.pruning import PruningConfiguration -from mlia.utils.types import is_list_of - - -class OptimizationSettings(NamedTuple): - """Optimization settings.""" - - optimization_type: str - optimization_target: int | float - layers_to_optimize: list[str] | None - - @staticmethod - def create_from( - optimizer_params: list[tuple[str, float]], - layers_to_optimize: list[str] | None = None, - ) -> list[OptimizationSettings]: - """Create optimization settings from the provided parameters.""" - return [ - OptimizationSettings( - optimization_type=opt_type, - optimization_target=opt_target, - layers_to_optimize=layers_to_optimize, - ) - for opt_type, opt_target in optimizer_params - ] - - def __str__(self) -> str: - """Return string representation.""" - return f"{self.optimization_type}: {self.optimization_target}" - - def next_target(self) -> OptimizationSettings: - """Return next optimization target.""" - if self.optimization_type == "pruning": - next_target = round(min(self.optimization_target + 0.1, 0.9), 2) - return OptimizationSettings( - self.optimization_type, next_target, self.layers_to_optimize - ) - - if self.optimization_type == "clustering": - # return next lowest power of two for clustering - next_target = math.log(self.optimization_target, 2) - if next_target.is_integer(): - next_target -= 1 - - next_target = max(int(2 ** int(next_target)), 4) - return OptimizationSettings( - self.optimization_type, next_target, self.layers_to_optimize - ) - - raise ValueError(f"Optimization type {self.optimization_type} is unknown.") - - -class MultiStageOptimizer(Optimizer): - """Optimizer with multiply stages.""" - - def __init__( - self, - model: tf.keras.Model, - optimizations: list[OptimizerConfiguration], - ) -> None: - """Init MultiStageOptimizer instance.""" - self.model = model - self.optimizations = optimizations - - def optimization_config(self) -> str: - """Return string representation of the optimization config.""" - return " - ".join(str(opt) for opt in self.optimizations) - - def get_model(self) -> tf.keras.Model: - """Return optimized model.""" - return self.model - - def apply_optimization(self) -> None: - """Apply optimization to the model.""" - for config in self.optimizations: - optimizer = get_optimizer(self.model, config) - optimizer.apply_optimization() - self.model = optimizer.get_model() - - -def get_optimizer( - model: tf.keras.Model | KerasModel, - config: OptimizerConfiguration | OptimizationSettings | list[OptimizationSettings], -) -> Optimizer: - """Get optimizer for provided configuration.""" - if isinstance(model, KerasModel): - model = model.get_keras_model() - - if isinstance(config, PruningConfiguration): - return Pruner(model, config) - - if isinstance(config, ClusteringConfiguration): - return Clusterer(model, config) - - if isinstance(config, OptimizationSettings) or is_list_of( - config, OptimizationSettings - ): - return _get_optimizer(model, config) # type: ignore - - raise ConfigurationError(f"Unknown optimization configuration {config}") - - -def _get_optimizer( - model: tf.keras.Model, - optimization_settings: OptimizationSettings | list[OptimizationSettings], -) -> Optimizer: - if isinstance(optimization_settings, OptimizationSettings): - optimization_settings = [optimization_settings] - - optimizer_configs = [] - for opt_type, opt_target, layers_to_optimize in optimization_settings: - _check_optimizer_params(opt_type, opt_target) - - opt_config = _get_optimizer_configuration( - opt_type, opt_target, layers_to_optimize - ) - optimizer_configs.append(opt_config) - - if len(optimizer_configs) == 1: - return get_optimizer(model, optimizer_configs[0]) - - return MultiStageOptimizer(model, optimizer_configs) - - -def _get_optimizer_configuration( - optimization_type: str, - optimization_target: int | float, - layers_to_optimize: list[str] | None = None, -) -> OptimizerConfiguration: - """Get optimizer configuration for provided parameters.""" - _check_optimizer_params(optimization_type, optimization_target) - - opt_type = optimization_type.lower() - if opt_type == "pruning": - return PruningConfiguration(optimization_target, layers_to_optimize) - - if opt_type == "clustering": - # make sure an integer is given as clustering target - if optimization_target == int(optimization_target): - return ClusteringConfiguration(int(optimization_target), layers_to_optimize) - - raise ConfigurationError( - "Optimization target should be a positive integer. " - f"Optimization target provided: {optimization_target}" - ) - - raise ConfigurationError(f"Unsupported optimization type: {optimization_type}") - - -def _check_optimizer_params( - optimization_type: str, optimization_target: int | float -) -> None: - """Check optimizer params.""" - if not optimization_target: - raise ConfigurationError("Optimization target is not provided") - - if not optimization_type: - raise ConfigurationError("Optimization type is not provided") diff --git a/src/mlia/target/ethos_u/advice_generation.py b/src/mlia/target/ethos_u/advice_generation.py index a9f9eac..351082a 100644 --- a/src/mlia/target/ethos_u/advice_generation.py +++ b/src/mlia/target/ethos_u/advice_generation.py @@ -11,7 +11,7 @@ from mlia.core.advice_generation import ContextAwareAdviceProducer from mlia.core.advice_generation import FactBasedAdviceProducer from mlia.core.common import AdviceCategory from mlia.core.common import DataItem -from mlia.nn.tensorflow.optimizations.select import OptimizationSettings +from mlia.nn.select import OptimizationSettings from mlia.target.common.reporters import handle_model_is_not_tflite_compatible_common from mlia.target.common.reporters import handle_tflite_check_failed_common from mlia.target.common.reporters import ModelIsNotTFLiteCompatible diff --git a/src/mlia/target/ethos_u/advisor.py b/src/mlia/target/ethos_u/advisor.py index d2c308a..321734c 100644 --- a/src/mlia/target/ethos_u/advisor.py +++ b/src/mlia/target/ethos_u/advisor.py @@ -54,8 +54,20 @@ class EthosUInferenceAdvisor(DefaultInferenceAdvisor): if is_tflite_model(model): # TensorFlow Lite models do not support optimization (only performance)! if context.category_enabled(AdviceCategory.OPTIMIZATION): - raise RuntimeError( - "Optimizations are not supported for TensorFlow Lite files." + optimization_settings = self._get_optimization_settings(context) + + optimization_types = { + opt["optimization_type"] for opt in optimization_settings[0] + } + if optimization_types != {"rewrite"}: + raise RuntimeError( + "Only 'rewrite' is supported for TensorFlow Lite files." + ) + + collectors.append( + EthosUOptimizationPerformance( + model, target_config, optimization_settings, backends + ) ) if context.category_enabled(AdviceCategory.PERFORMANCE): collectors.append(EthosUPerformance(model, target_config, backends)) diff --git a/src/mlia/target/ethos_u/data_analysis.py b/src/mlia/target/ethos_u/data_analysis.py index 3df4bff..5c6080f 100644 --- a/src/mlia/target/ethos_u/data_analysis.py +++ b/src/mlia/target/ethos_u/data_analysis.py @@ -10,7 +10,7 @@ from mlia.backend.vela.compat import Operators from mlia.core.common import DataItem from mlia.core.data_analysis import Fact from mlia.core.data_analysis import FactExtractor -from mlia.nn.tensorflow.optimizations.select import OptimizationSettings +from mlia.nn.select import OptimizationSettings from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo from mlia.target.common.reporters import analyze_tflite_compatibility_common from mlia.target.ethos_u.performance import OptimizationPerformanceMetrics diff --git a/src/mlia/target/ethos_u/data_collection.py b/src/mlia/target/ethos_u/data_collection.py index 0654143..0f3a8d2 100644 --- a/src/mlia/target/ethos_u/data_collection.py +++ b/src/mlia/target/ethos_u/data_collection.py @@ -5,6 +5,7 @@ from __future__ import annotations import logging from pathlib import Path +from typing import Any from mlia.backend.vela.compat import Operators from mlia.backend.vela.compat import supported_operators @@ -12,15 +13,17 @@ from mlia.core.context import Context from mlia.core.data_collection import ContextAwareDataCollector from mlia.core.errors import FunctionalityNotSupportedError from mlia.core.performance import estimate_performance +from mlia.nn.select import get_optimizer +from mlia.nn.select import OptimizationSettings from mlia.nn.tensorflow.config import get_keras_model from mlia.nn.tensorflow.config import get_tflite_model from mlia.nn.tensorflow.config import KerasModel -from mlia.nn.tensorflow.optimizations.select import get_optimizer -from mlia.nn.tensorflow.optimizations.select import OptimizationSettings +from mlia.nn.tensorflow.config import TFLiteModel from mlia.nn.tensorflow.tflite_compat import TFLiteChecker from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo from mlia.nn.tensorflow.utils import is_tflite_model from mlia.nn.tensorflow.utils import save_keras_model +from mlia.nn.tensorflow.utils import save_tflite_model from mlia.target.ethos_u.config import EthosUConfiguration from mlia.target.ethos_u.performance import EthosUPerformanceEstimator from mlia.target.ethos_u.performance import OptimizationPerformanceMetrics @@ -103,7 +106,7 @@ class OptimizeModel: self.context = context self.opt_settings = opt_settings - def __call__(self, keras_model: KerasModel) -> KerasModel: + def __call__(self, keras_model: KerasModel) -> Any: """Run optimization.""" optimizer = get_optimizer(keras_model, self.opt_settings) @@ -112,9 +115,19 @@ class OptimizeModel: optimizer.apply_optimization() model = optimizer.get_model() + + if isinstance(model, Path): + return model + + if isinstance(model, TFLiteModel): + model_path = self.context.get_model_path("optimized_model.tflite") + with open(model.model_path, "rb") as file_handle: + model_data = bytearray(file_handle.read()) + save_tflite_model(model_data, model_path) + return TFLiteModel(model_path) + model_path = self.context.get_model_path("optimized_model.h5") save_keras_model(model, model_path) - return KerasModel(model_path) @@ -146,14 +159,17 @@ class EthosUOptimizationPerformance(ContextAwareDataCollector): opt_settings = self._parse_optimization_params(self.optimizations) - try: - keras_model = get_keras_model(self.model, self.context) - except NotImplementedError as err: - raise FunctionalityNotSupportedError( - reason="Unable to run model optimizations", - description=f"{self.model} is not a Keras model and " - "could not be converted to a Keras model", - ) from err + if opt_settings[0][0].optimization_type != "rewrite": + try: + model = get_keras_model(self.model, self.context) + except NotImplementedError as err: + raise FunctionalityNotSupportedError( + reason="Unable to run model optimizations", + description=f"{self.model} is not a Keras model and " + "could not be converted to a Keras model", + ) from err + else: + model = self.model # type: ignore optimizers = [OptimizeModel(self.context, opts) for opts in opt_settings] @@ -163,7 +179,7 @@ class EthosUOptimizationPerformance(ContextAwareDataCollector): self.backends, ) original_metrics, *optimized_metrics = estimate_performance( - keras_model, estimator, optimizers # type: ignore + model, estimator, optimizers # type: ignore ) result = OptimizationPerformanceMetrics( diff --git a/src/mlia/target/ethos_u/performance.py b/src/mlia/target/ethos_u/performance.py index f7f9a8c..a0526e4 100644 --- a/src/mlia/target/ethos_u/performance.py +++ b/src/mlia/target/ethos_u/performance.py @@ -15,9 +15,9 @@ from mlia.backend.corstone import is_corstone_backend from mlia.backend.corstone.performance import estimate_performance from mlia.core.context import Context from mlia.core.performance import PerformanceEstimator +from mlia.nn.select import OptimizationSettings from mlia.nn.tensorflow.config import get_tflite_model from mlia.nn.tensorflow.config import ModelConfiguration -from mlia.nn.tensorflow.optimizations.select import OptimizationSettings from mlia.target.ethos_u.config import EthosUConfiguration from mlia.target.registry import supported_backends from mlia.utils.logging import log_action -- cgit v1.2.1