From e40a7adadd254e29d71af38f69a0a20ff4871eef Mon Sep 17 00:00:00 2001 From: Benjamin Klimczak Date: Tue, 25 Oct 2022 18:12:34 +0100 Subject: MLIA-411 Report Cortex-A operator compatibility Check input model for Arm NN TensorFlow Lite Delegate 22.08 support. Change-Id: I1253c4c0b294c5283e08f0a39561b922ef0f62e6 --- tests/test_cli_main.py | 15 +++++ tests/test_devices_cortexa_advice_generation.py | 76 +++++++++++++++++++++-- tests/test_devices_cortexa_data_analysis.py | 81 ++++++++++++++++++++++++- tests/test_devices_cortexa_data_collection.py | 30 ++++++++- tests/test_devices_cortexa_operators.py | 73 ++++++++++++++++++++++ tests/test_devices_cortexa_reporters.py | 52 ++++++++++++++++ tests/test_nn_tensorflow_tflite_graph.py | 81 +++++++++++++++++++++++++ 7 files changed, 396 insertions(+), 12 deletions(-) create mode 100644 tests/test_devices_cortexa_operators.py create mode 100644 tests/test_devices_cortexa_reporters.py create mode 100644 tests/test_nn_tensorflow_tflite_graph.py (limited to 'tests') diff --git a/tests/test_cli_main.py b/tests/test_cli_main.py index 78adc53..4b16ac5 100644 --- a/tests/test_cli_main.py +++ b/tests/test_cli_main.py @@ -250,6 +250,21 @@ def test_default_command(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Non evaluate_on=["some_backend"], ), ], + [ + [ + "operators", + "sample_model.h5", + "--target-profile", + "cortex-a", + ], + call( + ctx=ANY, + target_profile="cortex-a", + model="sample_model.h5", + output=None, + supported_ops_report=False, + ), + ], ], ) def test_commands_execution( diff --git a/tests/test_devices_cortexa_advice_generation.py b/tests/test_devices_cortexa_advice_generation.py index ead8ae6..0446f38 100644 --- a/tests/test_devices_cortexa_advice_generation.py +++ b/tests/test_devices_cortexa_advice_generation.py @@ -13,27 +13,91 @@ from mlia.devices.cortexa.advice_generation import CortexAAdviceProducer from mlia.devices.cortexa.data_analysis import ModelIsCortexACompatible from mlia.devices.cortexa.data_analysis import ModelIsNotCortexACompatible from mlia.devices.cortexa.data_analysis import ModelIsNotTFLiteCompatible +from mlia.devices.cortexa.operator_compatibility import ARMNN_TFLITE_DELEGATE +from mlia.nn.tensorflow.tflite_graph import TFL_ACTIVATION_FUNCTION + +BACKEND_INFO = ( + f"{ARMNN_TFLITE_DELEGATE['metadata']['backend']} " + f"{ARMNN_TFLITE_DELEGATE['metadata']['version']}" +) @pytest.mark.parametrize( "input_data, advice_category, expected_advice", [ [ - ModelIsNotCortexACompatible(), + ModelIsNotCortexACompatible(BACKEND_INFO, {"UNSUPPORTED_OP"}, {}), AdviceCategory.OPERATORS, [ Advice( [ - "Some operators in the model are not compatible with Cortex-A. " - "Please, refer to the operators table for more information." + "The following operators are not supported by " + f"{BACKEND_INFO} and will fall back to the TensorFlow " + "Lite runtime:", + " - UNSUPPORTED_OP", ] - ) + ), + Advice( + [ + "Please, refer to the full table of operators above " + "for more information.", + CortexAAdviceProducer.cortex_a_disclaimer, + ] + ), + ], + ], + [ + ModelIsNotCortexACompatible( + BACKEND_INFO, + {"UNSUPPORTED_OP"}, + { + "CONV_2D": ModelIsNotCortexACompatible.ActivationFunctionSupport( + used_unsupported={TFL_ACTIVATION_FUNCTION.SIGN_BIT.name}, + supported={"RELU"}, + ) + }, + ), + AdviceCategory.OPERATORS, + [ + Advice( + [ + "The following operators are not supported by " + f"{BACKEND_INFO} and will fall back to the TensorFlow " + "Lite runtime:", + " - UNSUPPORTED_OP", + ] + ), + Advice( + [ + "The fused activation functions of the following " + f"operators are not supported by {BACKEND_INFO}. " + "Please consider using one of the supported activation " + "functions instead:", + " - CONV_2D\n" + " - Used unsupported: {'SIGN_BIT'}\n" + " - Supported: {'RELU'}", + ] + ), + Advice( + [ + "Please, refer to the full table of operators above " + "for more information.", + CortexAAdviceProducer.cortex_a_disclaimer, + ] + ), ], ], [ - ModelIsCortexACompatible(), + ModelIsCortexACompatible(BACKEND_INFO), AdviceCategory.OPERATORS, - [Advice(["Model is fully compatible with Cortex-A."])], + [ + Advice( + [ + f"Model is fully compatible with {BACKEND_INFO} for Cortex-A.", + CortexAAdviceProducer.cortex_a_disclaimer, + ] + ) + ], ], [ ModelIsNotTFLiteCompatible( diff --git a/tests/test_devices_cortexa_data_analysis.py b/tests/test_devices_cortexa_data_analysis.py index b491e52..4d98212 100644 --- a/tests/test_devices_cortexa_data_analysis.py +++ b/tests/test_devices_cortexa_data_analysis.py @@ -11,10 +11,18 @@ from mlia.devices.cortexa.data_analysis import CortexADataAnalyzer from mlia.devices.cortexa.data_analysis import ModelIsCortexACompatible from mlia.devices.cortexa.data_analysis import ModelIsNotCortexACompatible from mlia.devices.cortexa.data_analysis import ModelIsNotTFLiteCompatible +from mlia.devices.cortexa.operator_compatibility import ARMNN_TFLITE_DELEGATE from mlia.devices.cortexa.operators import CortexACompatibilityInfo +from mlia.devices.cortexa.operators import Operator from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo from mlia.nn.tensorflow.tflite_compat import TFLiteConversionError from mlia.nn.tensorflow.tflite_compat import TFLiteConversionErrorCode +from mlia.nn.tensorflow.tflite_graph import TFL_ACTIVATION_FUNCTION + +BACKEND_INFO = ( + f"{ARMNN_TFLITE_DELEGATE['metadata']['backend']} " + f"{ARMNN_TFLITE_DELEGATE['metadata']['version']}" +) @pytest.mark.parametrize( @@ -22,11 +30,78 @@ from mlia.nn.tensorflow.tflite_compat import TFLiteConversionErrorCode [ [ CortexACompatibilityInfo(True, []), - [ModelIsCortexACompatible()], + [ModelIsCortexACompatible(BACKEND_INFO)], + ], + [ + CortexACompatibilityInfo( + True, + [ + Operator( + "CONV_2D", + "somewhere", + support_type=Operator.SupportType.COMPATIBLE, + activation_func=TFL_ACTIVATION_FUNCTION.NONE, + ), + Operator( + "CUSTOM", + "somewhere else", + support_type=Operator.SupportType.COMPATIBLE, + activation_func=TFL_ACTIVATION_FUNCTION.SIGN_BIT, + custom_name="MaxPool3D", + ), + ], + ), + [ModelIsCortexACompatible(BACKEND_INFO)], ], [ - CortexACompatibilityInfo(False, []), - [ModelIsNotCortexACompatible()], + # pylint: disable=line-too-long + CortexACompatibilityInfo( + False, + [ + Operator( + "UNSUPPORTED_OP", + "somewhere", + support_type=Operator.SupportType.OP_NOT_SUPPORTED, + activation_func=TFL_ACTIVATION_FUNCTION.NONE, + ), + Operator( + "CUSTOM", + "somewhere", + support_type=Operator.SupportType.OP_NOT_SUPPORTED, + activation_func=TFL_ACTIVATION_FUNCTION.NONE, + custom_name="UNSUPPORTED_OP", + ), + Operator( + "CONV_2D", + "somewhere else", + support_type=Operator.SupportType.ACTIVATION_NOT_SUPPORTED, + activation_func=TFL_ACTIVATION_FUNCTION.SIGN_BIT, + ), + ], + ), + [ + ModelIsNotCortexACompatible( + BACKEND_INFO, + { + "UNSUPPORTED_OP", + "CUSTOM - 'UNSUPPORTED_OP'", + }, + { + "CONV_2D": ModelIsNotCortexACompatible.ActivationFunctionSupport( + used_unsupported={TFL_ACTIVATION_FUNCTION.SIGN_BIT.name}, + supported={ + "RELU", + "RELU6", + "RELU_N1_TO_1", + "SIGMOID", + "TANH", + "NONE", + }, + ) + }, + ) + ], + # pylint: enable=line-too-long ], [ TFLiteCompatibilityInfo(compatible=True), diff --git a/tests/test_devices_cortexa_data_collection.py b/tests/test_devices_cortexa_data_collection.py index 7ea3e52..6d3b2ac 100644 --- a/tests/test_devices_cortexa_data_collection.py +++ b/tests/test_devices_cortexa_data_collection.py @@ -11,18 +11,42 @@ from mlia.devices.cortexa.data_collection import CortexAOperatorCompatibility from mlia.devices.cortexa.operators import CortexACompatibilityInfo -def test_cortex_a_data_collection( - monkeypatch: pytest.MonkeyPatch, test_tflite_model: Path, tmpdir: str +def check_cortex_a_data_collection( + monkeypatch: pytest.MonkeyPatch, model: Path, tmpdir: str ) -> None: """Test Cortex-A data collection.""" + assert CortexAOperatorCompatibility.name() + monkeypatch.setattr( "mlia.devices.cortexa.data_collection.get_cortex_a_compatibility_info", MagicMock(return_value=CortexACompatibilityInfo(True, [])), ) + context = ExecutionContext(working_dir=tmpdir) - collector = CortexAOperatorCompatibility(test_tflite_model) + collector = CortexAOperatorCompatibility(model) collector.set_context(context) data_item = collector.collect_data() assert isinstance(data_item, CortexACompatibilityInfo) + + +def test_cortex_a_data_collection_tflite( + monkeypatch: pytest.MonkeyPatch, test_tflite_model: Path, tmpdir: str +) -> None: + """Test Cortex-A data collection with a TensorFlow Lite model.""" + check_cortex_a_data_collection(monkeypatch, test_tflite_model, tmpdir) + + +def test_cortex_a_data_collection_keras( + monkeypatch: pytest.MonkeyPatch, test_keras_model: Path, tmpdir: str +) -> None: + """Test Cortex-A data collection with a Keras model.""" + check_cortex_a_data_collection(monkeypatch, test_keras_model, tmpdir) + + +def test_cortex_a_data_collection_tf( + monkeypatch: pytest.MonkeyPatch, test_tf_model: Path, tmpdir: str +) -> None: + """Test Cortex-A data collection with a SavedModel.""" + check_cortex_a_data_collection(monkeypatch, test_tf_model, tmpdir) diff --git a/tests/test_devices_cortexa_operators.py b/tests/test_devices_cortexa_operators.py new file mode 100644 index 0000000..23c4b0a --- /dev/null +++ b/tests/test_devices_cortexa_operators.py @@ -0,0 +1,73 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Tests for Cortex-A operator compatibility.""" +from pathlib import Path + +import pytest +import tensorflow as tf + +from mlia.devices.cortexa import operator_compatibility as op_compat +from mlia.devices.cortexa.operators import CortexACompatibilityInfo +from mlia.devices.cortexa.operators import get_cortex_a_compatibility_info +from mlia.devices.cortexa.operators import Operator +from mlia.nn.tensorflow.tflite_graph import TFL_OP +from mlia.nn.tensorflow.utils import convert_to_tflite + + +def test_op_compat_data() -> None: + """Make sure all data contains the necessary items.""" + builtin_tfl_ops = {op.name for op in TFL_OP} + for data in [op_compat.ARMNN_TFLITE_DELEGATE]: + assert "metadata" in data + assert "backend" in data["metadata"] + assert "version" in data["metadata"] + assert "builtin_ops" in data + for comp in data["builtin_ops"]: + assert comp in builtin_tfl_ops + assert "custom_ops" in data + + +def check_get_cortex_a_compatibility_info( + model_path: Path, + expected_success: bool, +) -> None: + """Check the function 'get_cortex_a_compatibility_info'.""" + compat_info = get_cortex_a_compatibility_info(model_path) + assert isinstance(compat_info, CortexACompatibilityInfo) + assert expected_success == compat_info.cortex_a_compatible + assert compat_info.operators + for oper in compat_info.operators: + assert oper.name + assert oper.location + assert oper.support_type in Operator.SupportType + + +def test_get_cortex_a_compatibility_info_compatible( + test_tflite_model: Path, +) -> None: + """Test a fully compatible TensorFlow Lite model.""" + check_get_cortex_a_compatibility_info(test_tflite_model, expected_success=True) + + +def test_get_cortex_a_compatibility_info_not_compatible( + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Construct and test a NOT fully compatible TensorFlow Lite model.""" + keras_model = tf.keras.Sequential( + [ + tf.keras.Input(shape=(28, 28, 1), batch_size=1, name="input"), + tf.keras.layers.Conv2D( + filters=12, kernel_size=(3, 3), activation="softmax", name="conv1" + ), + tf.keras.layers.LeakyReLU(), + ] + ) + keras_model.compile(optimizer="sgd", loss="mean_squared_error") + tflite_model = convert_to_tflite(keras_model, quantized=False) + + monkeypatch.setattr( + "mlia.nn.tensorflow.tflite_graph.load_tflite", lambda _p: tflite_model + ) + check_get_cortex_a_compatibility_info( + Path("NOT_USED_BECAUSE_OF_MOCKING"), expected_success=False + ) diff --git a/tests/test_devices_cortexa_reporters.py b/tests/test_devices_cortexa_reporters.py new file mode 100644 index 0000000..4177b55 --- /dev/null +++ b/tests/test_devices_cortexa_reporters.py @@ -0,0 +1,52 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Tests for Cortex-A reporters.""" +from typing import Any + +import pytest + +from mlia.core.advice_generation import Advice +from mlia.core.reporting import Report +from mlia.devices.cortexa.config import CortexAConfiguration +from mlia.devices.cortexa.operators import Operator +from mlia.devices.cortexa.reporters import cortex_a_formatters +from mlia.devices.cortexa.reporters import report_device +from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo +from mlia.nn.tensorflow.tflite_graph import TFL_ACTIVATION_FUNCTION + + +def test_report_device() -> None: + """Test function report_device().""" + report = report_device(CortexAConfiguration("cortex-a")) + assert report.to_plain_text() + + +@pytest.mark.parametrize( + "data", + ( + [Advice(["Sample", "Advice"])], + TFLiteCompatibilityInfo(compatible=True), + [ + Operator( + name="Test", + location="loc", + support_type=Operator.SupportType.OP_NOT_SUPPORTED, + activation_func=TFL_ACTIVATION_FUNCTION.NONE, + ) + ], + ), +) +def test_cortex_a_formatters(data: Any) -> None: + """Test function cortex_a_formatters() with valid input.""" + formatter = cortex_a_formatters(data) + report = formatter(data) + assert isinstance(report, Report) + + +def test_cortex_a_formatters_invalid_data() -> None: + """Test cortex_a_formatters() with invalid input.""" + with pytest.raises( + Exception, + match=r"^Unable to find appropriate formatter for .*", + ): + cortex_a_formatters(12) diff --git a/tests/test_nn_tensorflow_tflite_graph.py b/tests/test_nn_tensorflow_tflite_graph.py new file mode 100644 index 0000000..cd1fad6 --- /dev/null +++ b/tests/test_nn_tensorflow_tflite_graph.py @@ -0,0 +1,81 @@ +# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-License-Identifier: Apache-2.0 +"""Tests for the tflite_graph module.""" +import json +from pathlib import Path + +from mlia.nn.tensorflow.tflite_graph import Op +from mlia.nn.tensorflow.tflite_graph import parse_subgraphs +from mlia.nn.tensorflow.tflite_graph import TensorInfo +from mlia.nn.tensorflow.tflite_graph import TFL_ACTIVATION_FUNCTION +from mlia.nn.tensorflow.tflite_graph import TFL_OP +from mlia.nn.tensorflow.tflite_graph import TFL_TYPE + + +def test_tensor_info() -> None: + """Test class 'TensorInfo'.""" + expected = { + "name": "Test", + "type": TFL_TYPE.INT8.name, + "shape": (1,), + "is_variable": False, + } + info = TensorInfo(**expected) + assert vars(info) == expected + + expected = { + "name": "Test2", + "type": TFL_TYPE.FLOAT32.name, + "shape": [2, 3], + "is_variable": True, + } + tensor_dict = { + "name": [ord(c) for c in expected["name"]], + "type": TFL_TYPE[expected["type"]], + "shape": expected["shape"], + "is_variable": expected["is_variable"], + } + info = TensorInfo.from_dict(tensor_dict) + assert vars(info) == expected + + json_repr = json.loads(repr(info)) + assert vars(info) == json_repr + + assert str(info) + + +def test_op() -> None: + """Test class 'Op'.""" + expected = { + "type": TFL_OP.CONV_2D.name, + "builtin_options": {}, + "inputs": [], + "outputs": [], + "custom_type": None, + } + oper = Op(**expected) + assert vars(oper) == expected + + expected["builtin_options"] = {"some_random_option": 3.14} + oper = Op(**expected) + assert vars(oper) == expected + + activation_func = TFL_ACTIVATION_FUNCTION.RELU + expected["builtin_options"] = {"fused_activation_function": activation_func.value} + oper = Op(**expected) + assert oper.builtin_options + assert oper.builtin_options["fused_activation_function"] == activation_func.name + + assert str(oper) + assert repr(oper) + + +def test_parse_subgraphs(test_tflite_model: Path) -> None: + """Test function 'parse_subgraphs'.""" + model = parse_subgraphs(test_tflite_model) + assert len(model) == 1 + assert len(model[0]) == 5 + for oper in model[0]: + assert TFL_OP[oper.type] in TFL_OP + assert len(oper.inputs) > 0 + assert len(oper.outputs) > 0 -- cgit v1.2.1