aboutsummaryrefslogtreecommitdiff
path: root/src/mlia/nn/tensorflow/optimizations
diff options
context:
space:
mode:
Diffstat (limited to 'src/mlia/nn/tensorflow/optimizations')
-rw-r--r--src/mlia/nn/tensorflow/optimizations/clustering.py9
-rw-r--r--src/mlia/nn/tensorflow/optimizations/pruning.py13
-rw-r--r--src/mlia/nn/tensorflow/optimizations/select.py34
3 files changed, 25 insertions, 31 deletions
diff --git a/src/mlia/nn/tensorflow/optimizations/clustering.py b/src/mlia/nn/tensorflow/optimizations/clustering.py
index 16d9e4b..4aaa33e 100644
--- a/src/mlia/nn/tensorflow/optimizations/clustering.py
+++ b/src/mlia/nn/tensorflow/optimizations/clustering.py
@@ -7,11 +7,10 @@ In order to do this, we need to have a base model and corresponding training dat
We also have to specify a subset of layers we want to cluster. For more details,
please refer to the documentation for TensorFlow Model Optimization Toolkit.
"""
+from __future__ import annotations
+
from dataclasses import dataclass
from typing import Any
-from typing import Dict
-from typing import List
-from typing import Optional
import tensorflow as tf
import tensorflow_model_optimization as tfmot
@@ -28,7 +27,7 @@ class ClusteringConfiguration(OptimizerConfiguration):
"""Clustering configuration."""
optimization_target: int
- layers_to_optimize: Optional[List[str]] = None
+ layers_to_optimize: list[str] | None = None
def __str__(self) -> str:
"""Return string representation of the configuration."""
@@ -61,7 +60,7 @@ class Clusterer(Optimizer):
"""Return string representation of the optimization config."""
return str(self.optimizer_configuration)
- def _setup_clustering_params(self) -> Dict[str, Any]:
+ def _setup_clustering_params(self) -> dict[str, Any]:
CentroidInitialization = tfmot.clustering.keras.CentroidInitialization
return {
"number_of_clusters": self.optimizer_configuration.optimization_target,
diff --git a/src/mlia/nn/tensorflow/optimizations/pruning.py b/src/mlia/nn/tensorflow/optimizations/pruning.py
index 0a3fda5..41954b9 100644
--- a/src/mlia/nn/tensorflow/optimizations/pruning.py
+++ b/src/mlia/nn/tensorflow/optimizations/pruning.py
@@ -7,11 +7,10 @@ In order to do this, we need to have a base model and corresponding training dat
We also have to specify a subset of layers we want to prune. For more details,
please refer to the documentation for TensorFlow Model Optimization Toolkit.
"""
+from __future__ import annotations
+
import typing
from dataclasses import dataclass
-from typing import List
-from typing import Optional
-from typing import Tuple
import numpy as np
import tensorflow as tf
@@ -29,9 +28,9 @@ class PruningConfiguration(OptimizerConfiguration):
"""Pruning configuration."""
optimization_target: float
- layers_to_optimize: Optional[List[str]] = None
- x_train: Optional[np.ndarray] = None
- y_train: Optional[np.ndarray] = None
+ layers_to_optimize: list[str] | None = None
+ x_train: np.ndarray | None = None
+ y_train: np.ndarray | None = None
batch_size: int = 1
num_epochs: int = 1
@@ -74,7 +73,7 @@ class Pruner(Optimizer):
"""Return string representation of the optimization config."""
return str(self.optimizer_configuration)
- def _mock_train_data(self) -> Tuple[np.ndarray, np.ndarray]:
+ def _mock_train_data(self) -> tuple[np.ndarray, np.ndarray]:
# get rid of the batch_size dimension in input and output shape
input_shape = tuple(x for x in self.model.input_shape if x is not None)
output_shape = tuple(x for x in self.model.output_shape if x is not None)
diff --git a/src/mlia/nn/tensorflow/optimizations/select.py b/src/mlia/nn/tensorflow/optimizations/select.py
index 1b0c755..d4a8ea4 100644
--- a/src/mlia/nn/tensorflow/optimizations/select.py
+++ b/src/mlia/nn/tensorflow/optimizations/select.py
@@ -1,12 +1,10 @@
# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Module for optimization selection."""
+from __future__ import annotations
+
import math
-from typing import List
from typing import NamedTuple
-from typing import Optional
-from typing import Tuple
-from typing import Union
import tensorflow as tf
@@ -25,14 +23,14 @@ class OptimizationSettings(NamedTuple):
"""Optimization settings."""
optimization_type: str
- optimization_target: Union[int, float]
- layers_to_optimize: Optional[List[str]]
+ optimization_target: int | float
+ layers_to_optimize: list[str] | None
@staticmethod
def create_from(
- optimizer_params: List[Tuple[str, float]],
- layers_to_optimize: Optional[List[str]] = None,
- ) -> List["OptimizationSettings"]:
+ optimizer_params: list[tuple[str, float]],
+ layers_to_optimize: list[str] | None = None,
+ ) -> list[OptimizationSettings]:
"""Create optimization settings from the provided parameters."""
return [
OptimizationSettings(
@@ -47,7 +45,7 @@ class OptimizationSettings(NamedTuple):
"""Return string representation."""
return f"{self.optimization_type}: {self.optimization_target}"
- def next_target(self) -> "OptimizationSettings":
+ def next_target(self) -> OptimizationSettings:
"""Return next optimization target."""
if self.optimization_type == "pruning":
next_target = round(min(self.optimization_target + 0.1, 0.9), 2)
@@ -75,7 +73,7 @@ class MultiStageOptimizer(Optimizer):
def __init__(
self,
model: tf.keras.Model,
- optimizations: List[OptimizerConfiguration],
+ optimizations: list[OptimizerConfiguration],
) -> None:
"""Init MultiStageOptimizer instance."""
self.model = model
@@ -98,10 +96,8 @@ class MultiStageOptimizer(Optimizer):
def get_optimizer(
- model: Union[tf.keras.Model, KerasModel],
- config: Union[
- OptimizerConfiguration, OptimizationSettings, List[OptimizationSettings]
- ],
+ model: tf.keras.Model | KerasModel,
+ config: OptimizerConfiguration | OptimizationSettings | list[OptimizationSettings],
) -> Optimizer:
"""Get optimizer for provided configuration."""
if isinstance(model, KerasModel):
@@ -123,7 +119,7 @@ def get_optimizer(
def _get_optimizer(
model: tf.keras.Model,
- optimization_settings: Union[OptimizationSettings, List[OptimizationSettings]],
+ optimization_settings: OptimizationSettings | list[OptimizationSettings],
) -> Optimizer:
if isinstance(optimization_settings, OptimizationSettings):
optimization_settings = [optimization_settings]
@@ -145,8 +141,8 @@ def _get_optimizer(
def _get_optimizer_configuration(
optimization_type: str,
- optimization_target: Union[int, float],
- layers_to_optimize: Optional[List[str]] = None,
+ optimization_target: int | float,
+ layers_to_optimize: list[str] | None = None,
) -> OptimizerConfiguration:
"""Get optimizer configuration for provided parameters."""
_check_optimizer_params(optimization_type, optimization_target)
@@ -169,7 +165,7 @@ def _get_optimizer_configuration(
def _check_optimizer_params(
- optimization_type: str, optimization_target: Union[int, float]
+ optimization_type: str, optimization_target: int | float
) -> None:
"""Check optimizer params."""
if not optimization_target: