aboutsummaryrefslogtreecommitdiff
path: root/src/mlia/nn/tensorflow/optimizations/pruning.py
diff options
context:
space:
mode:
authorDmitrii Agibov <dmitrii.agibov@arm.com>2022-09-08 14:24:39 +0100
committerDmitrii Agibov <dmitrii.agibov@arm.com>2022-09-09 17:21:48 +0100
commitf5b293d0927506c2a979a091bf0d07ecc78fa181 (patch)
tree4de585b7cb6ed34da8237063752270189a730a41 /src/mlia/nn/tensorflow/optimizations/pruning.py
parentcde0c6ee140bd108849bff40467d8f18ffc332ef (diff)
downloadmlia-f5b293d0927506c2a979a091bf0d07ecc78fa181.tar.gz
MLIA-386 Simplify typing in the source code
- Enable deferred annotations evaluation - Use builtin types for type hints whenever possible - Use | syntax for union types - Rename mlia.core._typing into mlia.core.typing Change-Id: I3f6ffc02fa069c589bdd9e8bddbccd504285427a
Diffstat (limited to 'src/mlia/nn/tensorflow/optimizations/pruning.py')
-rw-r--r--src/mlia/nn/tensorflow/optimizations/pruning.py13
1 files changed, 6 insertions, 7 deletions
diff --git a/src/mlia/nn/tensorflow/optimizations/pruning.py b/src/mlia/nn/tensorflow/optimizations/pruning.py
index 0a3fda5..41954b9 100644
--- a/src/mlia/nn/tensorflow/optimizations/pruning.py
+++ b/src/mlia/nn/tensorflow/optimizations/pruning.py
@@ -7,11 +7,10 @@ In order to do this, we need to have a base model and corresponding training dat
We also have to specify a subset of layers we want to prune. For more details,
please refer to the documentation for TensorFlow Model Optimization Toolkit.
"""
+from __future__ import annotations
+
import typing
from dataclasses import dataclass
-from typing import List
-from typing import Optional
-from typing import Tuple
import numpy as np
import tensorflow as tf
@@ -29,9 +28,9 @@ class PruningConfiguration(OptimizerConfiguration):
"""Pruning configuration."""
optimization_target: float
- layers_to_optimize: Optional[List[str]] = None
- x_train: Optional[np.ndarray] = None
- y_train: Optional[np.ndarray] = None
+ layers_to_optimize: list[str] | None = None
+ x_train: np.ndarray | None = None
+ y_train: np.ndarray | None = None
batch_size: int = 1
num_epochs: int = 1
@@ -74,7 +73,7 @@ class Pruner(Optimizer):
"""Return string representation of the optimization config."""
return str(self.optimizer_configuration)
- def _mock_train_data(self) -> Tuple[np.ndarray, np.ndarray]:
+ def _mock_train_data(self) -> tuple[np.ndarray, np.ndarray]:
# get rid of the batch_size dimension in input and output shape
input_shape = tuple(x for x in self.model.input_shape if x is not None)
output_shape = tuple(x for x in self.model.output_shape if x is not None)