diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/mlia/nn/rewrite/core/graph_edit/diff.py | 23 | ||||
-rw-r--r-- | src/mlia/nn/rewrite/core/graph_edit/join.py | 14 | ||||
-rw-r--r-- | src/mlia/nn/rewrite/core/rewrite.py | 69 | ||||
-rw-r--r-- | src/mlia/nn/rewrite/core/train.py | 1 | ||||
-rw-r--r-- | src/mlia/nn/rewrite/library/__init__.py | 3 | ||||
-rw-r--r-- | src/mlia/nn/rewrite/library/fc_layer.py | 18 | ||||
-rw-r--r-- | src/mlia/nn/select.py | 1 | ||||
-rw-r--r-- | src/mlia/target/ethos_u/data_collection.py | 3 |
8 files changed, 117 insertions, 15 deletions
diff --git a/src/mlia/nn/rewrite/core/graph_edit/diff.py b/src/mlia/nn/rewrite/core/graph_edit/diff.py index 198e47e..7fa2a72 100644 --- a/src/mlia/nn/rewrite/core/graph_edit/diff.py +++ b/src/mlia/nn/rewrite/core/graph_edit/diff.py @@ -31,6 +31,21 @@ def add_total(name: str, key: str, values: list, totals: dict) -> None: totals[name][key] += values +def _handle_zeros_in_denominator(denominator: np.ndarray) -> np.ndarray: + """Handle zeros in the denominator in nrmse to avoid dividing by zero(s).""" + denominator[denominator == 0.0] = 1.0 + return denominator + + +def calc_nrmse(rmse: dict, dataset1_var: dict) -> dict: + """Divide rmse by target standard deviation.""" + nrmse = { + k: v / _handle_zeros_in_denominator(np.sqrt(dataset1_var[k])) + for k, v in rmse.items() + } + return nrmse + + def diff_stats( file1: str | Path, file2: str | Path, per_tensor_and_channel: bool = False ) -> tuple: @@ -80,14 +95,8 @@ def diff_stats( mse = per_tensor_mean("se") rmse = {k: np.sqrt(v) for k, v in mse.items()} dataset1_var = per_tensor_mean("dataset1_variance") - is_nonzero = {k: dataset1_var[k] > 0 for k in dataset1_var} - # Divide by target standard deviation to get the per-channel nrmse for each - # tensor where possible - nrmse = { - k: v[is_nonzero[k]] / np.sqrt(dataset1_var[k][is_nonzero[k]]) - for k, v in rmse.items() - } + nrmse = calc_nrmse(rmse, dataset1_var) if per_tensor_and_channel: return mae, nrmse diff --git a/src/mlia/nn/rewrite/core/graph_edit/join.py b/src/mlia/nn/rewrite/core/graph_edit/join.py index 14a7347..2530ec8 100644 --- a/src/mlia/nn/rewrite/core/graph_edit/join.py +++ b/src/mlia/nn/rewrite/core/graph_edit/join.py @@ -22,8 +22,8 @@ def join_models( input_src: str | Path, input_dst: str | Path, output_file: str | Path, - subgraph_src: SubGraphT = 0, - subgraph_dst: SubGraphT = 0, + subgraph_src: int = 0, + subgraph_dst: int = 0, ) -> None: """Join two models and save the result into a given model file path.""" src_model = load(input_src) @@ -150,12 +150,12 @@ def join_subgraphs( dst_subgraph.outputs = list(set(src_subgraph.outputs).union(dst_subgraph.outputs)) -def append_relabel(src: list, dst: list, operator_map: dict | None = None) -> dict: - """Return a map over relabeled tensors in a subgraph.""" - if not operator_map: - operator_map = {} +def append_relabel(src: list, dst: list, operator_map: dict) -> None: + """Update the operator map over relabeled tensors in a subgraph.""" + if operator_map is None: + raise ValueError("The input operator map cannot be None!") + for i, x in enumerate(src): # pylint: disable=invalid-name if i not in operator_map: operator_map[i] = len(dst) dst.append(x) - return operator_map diff --git a/src/mlia/nn/rewrite/core/rewrite.py b/src/mlia/nn/rewrite/core/rewrite.py index ab34b47..0d182df 100644 --- a/src/mlia/nn/rewrite/core/rewrite.py +++ b/src/mlia/nn/rewrite/core/rewrite.py @@ -3,11 +3,19 @@ """Contains class Rewriter to replace a subgraph/layer of a model.""" from __future__ import annotations +import importlib +import tempfile from dataclasses import dataclass from pathlib import Path +from typing import Any +from mlia.core.errors import ConfigurationError from mlia.nn.common import Optimizer from mlia.nn.common import OptimizerConfiguration +from mlia.nn.rewrite.core.train import eval_in_dir +from mlia.nn.rewrite.core.train import join_in_dir +from mlia.nn.rewrite.core.train import train +from mlia.nn.rewrite.core.train import train_in_dir from mlia.nn.tensorflow.config import TFLiteModel @@ -33,10 +41,71 @@ class Rewriter(Optimizer): """Init Rewriter instance.""" self.model = TFLiteModel(tflite_model_path) self.optimizer_configuration = optimizer_configuration + self.train_dir = "" def apply_optimization(self) -> None: """Apply the rewrite flow.""" + def get_function(arg: str) -> Any: + module_name = ".".join(arg.split(".")[:-1]) + fn_name = arg.split(".")[-1] + module = importlib.import_module(module_name) + return getattr(module, fn_name) + + if self.optimizer_configuration.optimization_target == "fully_connected": + replace_function = "mlia.nn.rewrite.library.fc_layer.get_keras_model" + else: + raise ConfigurationError( + "Only fully_connected replacement is supported in rewrite module." + ) + + replace_fn = get_function(replace_function) + + augmentation_preset = (None, None) + use_unmodified_model = True + tflite_model = self.model.model_path + tfrecord = str(self.optimizer_configuration.dataset) + + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_output = Path(tmp_dir, "output.tflite") + + if self.train_dir: + tmp_new = Path(tmp_dir, "new.tflite") + new_part = train_in_dir( + train_dir=self.train_dir, + baseline_dir=None, + output_filename=tmp_new, + replace_fn=replace_fn, + augmentations=augmentation_preset, + steps=32, + learning_rate=1e-3, + batch_size=1, + verbose=True, + show_progress=True, + ) + eval_in_dir(self.train_dir, new_part[0]) + join_in_dir(self.train_dir, new_part[0], str(tmp_output)) + else: + if not self.optimizer_configuration.layers_to_optimize: + raise ConfigurationError( + "Input and output tensor names need to be set for rewrite." + ) + train( + source_model=tflite_model, + unmodified_model=tflite_model if use_unmodified_model else None, + output_model=str(tmp_output), + input_tfrec=str(tfrecord), + replace_fn=replace_fn, + input_tensors=[self.optimizer_configuration.layers_to_optimize[0]], + output_tensors=[self.optimizer_configuration.layers_to_optimize[1]], + augment=augmentation_preset, + steps=32, + learning_rate=1e-3, + batch_size=1, + verbose=True, + show_progress=True, + ) + def get_model(self) -> TFLiteModel: """Return optimized model.""" return self.model diff --git a/src/mlia/nn/rewrite/core/train.py b/src/mlia/nn/rewrite/core/train.py index f837964..c8497a4 100644 --- a/src/mlia/nn/rewrite/core/train.py +++ b/src/mlia/nn/rewrite/core/train.py @@ -33,6 +33,7 @@ from mlia.nn.rewrite.core.utils.utils import load from mlia.nn.rewrite.core.utils.utils import save from mlia.utils.logging import log_action + os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR) logger = logging.getLogger(__name__) diff --git a/src/mlia/nn/rewrite/library/__init__.py b/src/mlia/nn/rewrite/library/__init__.py new file mode 100644 index 0000000..2988554 --- /dev/null +++ b/src/mlia/nn/rewrite/library/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: Copyright 2023, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Rewrite functions as library.""" diff --git a/src/mlia/nn/rewrite/library/fc_layer.py b/src/mlia/nn/rewrite/library/fc_layer.py new file mode 100644 index 0000000..8704154 --- /dev/null +++ b/src/mlia/nn/rewrite/library/fc_layer.py @@ -0,0 +1,18 @@ +# SPDX-FileCopyrightText: Copyright 2023, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Example rewrite with one fully connected layer.""" +from typing import Any + +import tensorflow as tf + + +def get_keras_model(input_shape: Any, output_shape: Any) -> tf.keras.Model: + """Generate tflite model for rewrite.""" + input_tensor = tf.keras.layers.Input( + shape=input_shape, name="MbileNet/avg_pool/AvgPool" + ) + output_tensor = tf.keras.layers.Dense(output_shape, name="MobileNet/fc1/BiasAdd")( + input_tensor + ) + model = tf.keras.Model(input_tensor, output_tensor) + return model diff --git a/src/mlia/nn/select.py b/src/mlia/nn/select.py index 5e223fa..5a7f289 100644 --- a/src/mlia/nn/select.py +++ b/src/mlia/nn/select.py @@ -135,6 +135,7 @@ def get_optimizer( if isinstance(config, OptimizationSettings): return _get_optimizer(model, cast(OptimizationSettings, config)) + if is_list_of(config, OptimizationSettings): return _get_optimizer(model, cast(List[OptimizationSettings], config)) diff --git a/src/mlia/target/ethos_u/data_collection.py b/src/mlia/target/ethos_u/data_collection.py index 0f3a8d2..ba8b0fe 100644 --- a/src/mlia/target/ethos_u/data_collection.py +++ b/src/mlia/target/ethos_u/data_collection.py @@ -201,7 +201,8 @@ class EthosUOptimizationPerformance(ContextAwareDataCollector): OptimizationSettings( item.get("optimization_type"), # type: ignore item.get("optimization_target"), # type: ignore - item.get("layers_to_optimized"), + item.get("layers_to_optimize"), + item.get("dataset"), ) for item in opt_configuration ] |