aboutsummaryrefslogtreecommitdiff
path: root/tests/test_nn_tensorflow_optimizations_pruning.py
diff options
context:
space:
mode:
authorBenjamin Klimczak <benjamin.klimczak@arm.com>2022-07-11 12:33:42 +0100
committerBenjamin Klimczak <benjamin.klimczak@arm.com>2022-07-26 14:08:21 +0100
commit5d81f37de09efe10f90512e50252be9c36925fcf (patch)
treeb4d7cdfd051da0a6e882bdfcf280fd7ca7b39e57 /tests/test_nn_tensorflow_optimizations_pruning.py
parent7899b908c1fe6d86b92a80f3827ddd0ac05b674b (diff)
downloadmlia-5d81f37de09efe10f90512e50252be9c36925fcf.tar.gz
MLIA-551 Rework remains of AIET architecture
Re-factoring the code base to further merge the old AIET code into MLIA. - Remove last traces of the backend type 'tool' - Controlled systems removed, including SSH protocol, controller, RunningCommand, locks etc. - Build command / build dir and deploy functionality removed from Applications and Systems - Moving working_dir() - Replace module 'output_parser' with new module 'output_consumer' and merge Base64 parsing into it - Change the output consumption to optionally remove (i.e. actually consume) lines - Use Base64 parsing in GenericInferenceOutputParser, replacing the regex-based parsing and remove the now unused regex parsing - Remove AIET reporting - Pre-install applications by moving them to src/mlia/resources/backends - Rename aiet-config.json to backend-config.json - Move tests from tests/mlia/ to tests/ - Adapt unit tests to code changes - Dependencies removed: paramiko, filelock, psutil - Fix bug in corstone.py: The wrong resource directory was used which broke the functionality to download backends. - Use f-string formatting. - Use logging instead of print. Change-Id: I768bc3bb6b2eda57d219ad01be4a8e0a74167d76
Diffstat (limited to 'tests/test_nn_tensorflow_optimizations_pruning.py')
-rw-r--r--tests/test_nn_tensorflow_optimizations_pruning.py117
1 files changed, 117 insertions, 0 deletions
diff --git a/tests/test_nn_tensorflow_optimizations_pruning.py b/tests/test_nn_tensorflow_optimizations_pruning.py
new file mode 100644
index 0000000..5d92f5e
--- /dev/null
+++ b/tests/test_nn_tensorflow_optimizations_pruning.py
@@ -0,0 +1,117 @@
+# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
+# SPDX-License-Identifier: Apache-2.0
+"""Test for module optimizations/pruning."""
+from pathlib import Path
+from typing import List
+from typing import Optional
+
+import pytest
+import tensorflow as tf
+from numpy.core.numeric import isclose
+
+from mlia.nn.tensorflow.optimizations.pruning import Pruner
+from mlia.nn.tensorflow.optimizations.pruning import PruningConfiguration
+from mlia.nn.tensorflow.tflite_metrics import TFLiteMetrics
+from mlia.nn.tensorflow.utils import convert_to_tflite
+from mlia.nn.tensorflow.utils import save_tflite_model
+from tests.utils.common import get_dataset
+from tests.utils.common import train_model
+
+
+def _test_sparsity(
+ metrics: TFLiteMetrics,
+ target_sparsity: float,
+ layers_to_prune: Optional[List[str]],
+) -> None:
+ pruned_sparsity_dict = metrics.sparsity_per_layer()
+ num_sparse_layers = 0
+ num_optimizable_layers = len(pruned_sparsity_dict)
+ error_margin = 0.03
+ if layers_to_prune:
+ expected_num_sparse_layers = len(layers_to_prune)
+ else:
+ expected_num_sparse_layers = num_optimizable_layers
+ for layer_name in pruned_sparsity_dict:
+ if abs(pruned_sparsity_dict[layer_name] - target_sparsity) < error_margin:
+ num_sparse_layers = num_sparse_layers + 1
+ # make sure we are having exactly as many sparse layers as we wanted
+ assert num_sparse_layers == expected_num_sparse_layers
+
+
+def _test_check_sparsity(base_tflite_metrics: TFLiteMetrics) -> None:
+ """Assert the sparsity of a model is zero."""
+ base_sparsity_dict = base_tflite_metrics.sparsity_per_layer()
+ for layer_name, sparsity in base_sparsity_dict.items():
+ assert isclose(
+ sparsity, 0, atol=1e-2
+ ), f"Sparsity for layer '{layer_name}' is {sparsity}, but should be zero."
+
+
+def _get_tflite_metrics(
+ path: Path, tflite_fn: str, model: tf.keras.Model
+) -> TFLiteMetrics:
+ """Save model as TFLiteModel and return metrics."""
+ temp_file = path / tflite_fn
+ save_tflite_model(convert_to_tflite(model), temp_file)
+ return TFLiteMetrics(str(temp_file))
+
+
+@pytest.mark.parametrize("target_sparsity", (0.5, 0.9))
+@pytest.mark.parametrize("mock_data", (False, True))
+@pytest.mark.parametrize("layers_to_prune", (["conv1"], ["conv1", "conv2"], None))
+def test_prune_simple_model_fully(
+ target_sparsity: float,
+ mock_data: bool,
+ layers_to_prune: Optional[List[str]],
+ tmp_path: Path,
+ test_keras_model: Path,
+) -> None:
+ """Simple MNIST test to see if pruning works correctly."""
+ x_train, y_train = get_dataset()
+ batch_size = 1
+ num_epochs = 1
+
+ base_model = tf.keras.models.load_model(str(test_keras_model))
+ train_model(base_model)
+
+ base_tflite_metrics = _get_tflite_metrics(
+ path=tmp_path,
+ tflite_fn="test_prune_simple_model_fully_before.tflite",
+ model=base_model,
+ )
+
+ # Make sure sparsity is zero before pruning
+ _test_check_sparsity(base_tflite_metrics)
+
+ if mock_data:
+ pruner = Pruner(
+ base_model,
+ PruningConfiguration(
+ target_sparsity,
+ layers_to_prune,
+ ),
+ )
+
+ else:
+ pruner = Pruner(
+ base_model,
+ PruningConfiguration(
+ target_sparsity,
+ layers_to_prune,
+ x_train,
+ y_train,
+ batch_size,
+ num_epochs,
+ ),
+ )
+
+ pruner.apply_optimization()
+ pruned_model = pruner.get_model()
+
+ pruned_tflite_metrics = _get_tflite_metrics(
+ path=tmp_path,
+ tflite_fn="test_prune_simple_model_fully_after.tflite",
+ model=pruned_model,
+ )
+
+ _test_sparsity(pruned_tflite_metrics, target_sparsity, layers_to_prune)