aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDmitrii Agibov <dmitrii.agibov@arm.com>2022-11-09 11:23:50 +0000
committerDmitrii Agibov <dmitrii.agibov@arm.com>2022-11-15 12:55:49 +0000
commitef73bb773df214f3f33f8e4ca7d276041106cad2 (patch)
tree313d5bbcea9574dd4fa026639443548766cf2b91
parentbb20d22509a304c76f849486fe15e3acd7667fb8 (diff)
downloadmlia-ef73bb773df214f3f33f8e4ca7d276041106cad2.tar.gz
MLIA-685 Warn about custom operators in SavedModel/Keras models
- Add new error types for the TensorFlow Lite compatibility check - Try to detect custom operators in SavedModel/Keras models - Add warning to the advice about models with custom operators Change-Id: I2f65474eecf2788110acc43585fa300eda80e21b
-rw-r--r--src/mlia/devices/cortexa/advice_generation.py38
-rw-r--r--src/mlia/devices/cortexa/data_analysis.py32
-rw-r--r--src/mlia/nn/tensorflow/tflite_compat.py92
-rw-r--r--tests/test_devices_cortexa_advice_generation.py38
-rw-r--r--tests/test_devices_cortexa_data_analysis.py21
-rw-r--r--tests/test_devices_cortexa_reporters.py3
-rw-r--r--tests/test_nn_tensorflow_tflite_compat.py32
7 files changed, 225 insertions, 31 deletions
diff --git a/src/mlia/devices/cortexa/advice_generation.py b/src/mlia/devices/cortexa/advice_generation.py
index 186f489..3d2f106 100644
--- a/src/mlia/devices/cortexa/advice_generation.py
+++ b/src/mlia/devices/cortexa/advice_generation.py
@@ -7,9 +7,11 @@ from mlia.core.advice_generation import advice_category
from mlia.core.advice_generation import FactBasedAdviceProducer
from mlia.core.common import AdviceCategory
from mlia.core.common import DataItem
+from mlia.devices.cortexa.data_analysis import ModelHasCustomOperators
from mlia.devices.cortexa.data_analysis import ModelIsCortexACompatible
from mlia.devices.cortexa.data_analysis import ModelIsNotCortexACompatible
from mlia.devices.cortexa.data_analysis import ModelIsNotTFLiteCompatible
+from mlia.devices.cortexa.data_analysis import TFLiteCompatibilityCheckFailed
class CortexAAdviceProducer(FactBasedAdviceProducer):
@@ -92,17 +94,25 @@ class CortexAAdviceProducer(FactBasedAdviceProducer):
"The following operators are not natively "
"supported by TensorFlow Lite: "
f"{', '.join(data_item.flex_ops)}.",
+ "Using select TensorFlow operators in TensorFlow Lite model "
+ "requires special initialization of TFLiteConverter and "
+ "TensorFlow Lite run-time.",
"Please refer to the TensorFlow documentation for more details.",
+ "Note, such models are not supported by the ML Inference Advisor.",
]
)
if data_item.custom_ops:
self.add_advice(
[
- "The following operators are custom and not natively "
+ "The following operators appears to be custom and not natively "
"supported by TensorFlow Lite: "
f"{', '.join(data_item.custom_ops)}.",
+ "Using custom operators in TensorFlow Lite model "
+ "requires special initialization of TFLiteConverter and "
+ "TensorFlow Lite run-time.",
"Please refer to the TensorFlow documentation for more details.",
+ "Note, such models are not supported by the ML Inference Advisor.",
]
)
@@ -113,3 +123,29 @@ class CortexAAdviceProducer(FactBasedAdviceProducer):
"Please refer to the table for more details.",
]
)
+
+ @produce_advice.register
+ @advice_category(AdviceCategory.ALL, AdviceCategory.OPERATORS)
+ def handle_tflite_check_failed(
+ self, _data_item: TFLiteCompatibilityCheckFailed
+ ) -> None:
+ """Advice for the failed TensorFlow Lite compatibility checks."""
+ self.add_advice(
+ [
+ "Model could not be converted into TensorFlow Lite format.",
+ "Please refer to the table for more details.",
+ ]
+ )
+
+ @produce_advice.register
+ @advice_category(AdviceCategory.ALL, AdviceCategory.OPERATORS)
+ def handle_model_has_custom_operators(
+ self, _data_item: ModelHasCustomOperators
+ ) -> None:
+ """Advice for the models with custom operators."""
+ self.add_advice(
+ [
+ "Models with custom operators require special initialization "
+ "and currently are not supported by the ML Inference Advisor.",
+ ]
+ )
diff --git a/src/mlia/devices/cortexa/data_analysis.py b/src/mlia/devices/cortexa/data_analysis.py
index 6a82dd0..04bc819 100644
--- a/src/mlia/devices/cortexa/data_analysis.py
+++ b/src/mlia/devices/cortexa/data_analysis.py
@@ -14,7 +14,6 @@ from mlia.core.data_analysis import FactExtractor
from mlia.devices.cortexa.operators import CortexACompatibilityInfo
from mlia.devices.cortexa.operators import Operator
from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo
-from mlia.nn.tensorflow.tflite_compat import TFLiteConversionErrorCode
class CortexADataAnalyzer(FactExtractor):
@@ -69,18 +68,19 @@ class CortexADataAnalyzer(FactExtractor):
if data_item.compatible:
return
- custom_ops, flex_ops = [], []
- if data_item.conversion_errors:
- custom_ops = data_item.unsupported_ops_by_code(
- TFLiteConversionErrorCode.NEEDS_CUSTOM_OPS
- )
- flex_ops = data_item.unsupported_ops_by_code(
- TFLiteConversionErrorCode.NEEDS_FLEX_OPS
+ if data_item.conversion_failed_with_errors:
+ self.add_fact(
+ ModelIsNotTFLiteCompatible(
+ custom_ops=data_item.required_custom_ops,
+ flex_ops=data_item.required_flex_ops,
+ )
)
- self.add_fact(
- ModelIsNotTFLiteCompatible(custom_ops=custom_ops, flex_ops=flex_ops)
- )
+ if data_item.check_failed_with_unknown_error:
+ self.add_fact(TFLiteCompatibilityCheckFailed())
+
+ if data_item.conversion_failed_for_model_with_custom_ops:
+ self.add_fact(ModelHasCustomOperators())
@dataclass
@@ -116,3 +116,13 @@ class ModelIsNotTFLiteCompatible(Fact):
custom_ops: list[str] | None = None
flex_ops: list[str] | None = None
+
+
+@dataclass
+class TFLiteCompatibilityCheckFailed(Fact):
+ """TensorFlow Lite compatibility check failed by unknown reason."""
+
+
+@dataclass
+class ModelHasCustomOperators(Fact):
+ """Model could not be loaded because it contains custom ops."""
diff --git a/src/mlia/nn/tensorflow/tflite_compat.py b/src/mlia/nn/tensorflow/tflite_compat.py
index 6f183ca..2b29879 100644
--- a/src/mlia/nn/tensorflow/tflite_compat.py
+++ b/src/mlia/nn/tensorflow/tflite_compat.py
@@ -49,11 +49,20 @@ class TFLiteConversionError:
location: list[str]
+class TFLiteCompatibilityStatus(Enum):
+ """TensorFlow lite compatiblity status."""
+
+ COMPATIBLE = auto()
+ TFLITE_CONVERSION_ERROR = auto()
+ MODEL_WITH_CUSTOM_OP_ERROR = auto()
+ UNKNOWN_ERROR = auto()
+
+
@dataclass
class TFLiteCompatibilityInfo:
"""TensorFlow Lite compatibility information."""
- compatible: bool
+ status: TFLiteCompatibilityStatus
conversion_exception: Exception | None = None
conversion_errors: list[TFLiteConversionError] | None = None
@@ -64,6 +73,36 @@ class TFLiteCompatibilityInfo:
return [err.operator for err in self.conversion_errors if err.code == code]
+ @property
+ def compatible(self) -> bool:
+ """Return true if model compatible with the TensorFlow Lite format."""
+ return self.status == TFLiteCompatibilityStatus.COMPATIBLE
+
+ @property
+ def conversion_failed_with_errors(self) -> bool:
+ """Return true if conversion to TensorFlow Lite format failed."""
+ return self.status == TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR
+
+ @property
+ def conversion_failed_for_model_with_custom_ops(self) -> bool:
+ """Return true if conversion failed due to custom ops in the model."""
+ return self.status == TFLiteCompatibilityStatus.MODEL_WITH_CUSTOM_OP_ERROR
+
+ @property
+ def check_failed_with_unknown_error(self) -> bool:
+ """Return true if check failed with unknown error."""
+ return self.status == TFLiteCompatibilityStatus.UNKNOWN_ERROR
+
+ @property
+ def required_custom_ops(self) -> list[str]:
+ """Return list of the custom ops reported during conversion."""
+ return self.unsupported_ops_by_code(TFLiteConversionErrorCode.NEEDS_CUSTOM_OPS)
+
+ @property
+ def required_flex_ops(self) -> list[str]:
+ """Return list of the flex ops reported during conversion."""
+ return self.unsupported_ops_by_code(TFLiteConversionErrorCode.NEEDS_FLEX_OPS)
+
class TFLiteChecker:
"""Class for checking TensorFlow Lite compatibility."""
@@ -86,13 +125,15 @@ class TFLiteChecker:
):
converter.convert()
except convert.ConverterError as err:
- return self._process_exception(err)
+ return self._process_convert_error(err)
except Exception as err: # pylint: disable=broad-except
- return TFLiteCompatibilityInfo(compatible=False, conversion_exception=err)
- else:
- return TFLiteCompatibilityInfo(compatible=True)
+ return self._process_exception(err)
- def _process_exception(
+ return TFLiteCompatibilityInfo(
+ status=TFLiteCompatibilityStatus.COMPATIBLE,
+ )
+
+ def _process_convert_error(
self, err: convert.ConverterError
) -> TFLiteCompatibilityInfo:
"""Parse error details if possible."""
@@ -114,11 +155,48 @@ class TFLiteChecker:
]
return TFLiteCompatibilityInfo(
- compatible=False,
+ status=TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR,
conversion_exception=err,
conversion_errors=conversion_errors,
)
+ def _process_exception(self, err: Exception) -> TFLiteCompatibilityInfo:
+ """Process exception during conversion."""
+ status = TFLiteCompatibilityStatus.UNKNOWN_ERROR
+
+ if self._model_with_custom_op(err):
+ status = TFLiteCompatibilityStatus.MODEL_WITH_CUSTOM_OP_ERROR
+
+ return TFLiteCompatibilityInfo(
+ status=status,
+ conversion_exception=err,
+ )
+
+ @staticmethod
+ def _model_with_custom_op(err: Exception) -> bool:
+ """Check if model could not be loaded because of custom ops."""
+ exc_attrs = [
+ (
+ ValueError,
+ [
+ "Unable to restore custom object",
+ "passed to the `custom_objects`",
+ ],
+ ),
+ (
+ FileNotFoundError,
+ [
+ "Op type not registered",
+ ],
+ ),
+ ]
+
+ return any(
+ any(msg in str(err) for msg in messages)
+ for exc_type, messages in exc_attrs
+ if isinstance(err, exc_type)
+ )
+
@staticmethod
def _convert_error_code(code: int) -> TFLiteConversionErrorCode:
"""Convert internal error codes."""
diff --git a/tests/test_devices_cortexa_advice_generation.py b/tests/test_devices_cortexa_advice_generation.py
index 0446f38..4e6d892 100644
--- a/tests/test_devices_cortexa_advice_generation.py
+++ b/tests/test_devices_cortexa_advice_generation.py
@@ -10,9 +10,11 @@ from mlia.core.common import AdviceCategory
from mlia.core.common import DataItem
from mlia.core.context import ExecutionContext
from mlia.devices.cortexa.advice_generation import CortexAAdviceProducer
+from mlia.devices.cortexa.data_analysis import ModelHasCustomOperators
from mlia.devices.cortexa.data_analysis import ModelIsCortexACompatible
from mlia.devices.cortexa.data_analysis import ModelIsNotCortexACompatible
from mlia.devices.cortexa.data_analysis import ModelIsNotTFLiteCompatible
+from mlia.devices.cortexa.data_analysis import TFLiteCompatibilityCheckFailed
from mlia.devices.cortexa.operator_compatibility import ARMNN_TFLITE_DELEGATE
from mlia.nn.tensorflow.tflite_graph import TFL_ACTIVATION_FUNCTION
@@ -110,16 +112,26 @@ BACKEND_INFO = (
[
"The following operators are not natively "
"supported by TensorFlow Lite: flex_op1, flex_op2.",
+ "Using select TensorFlow operators in TensorFlow Lite model "
+ "requires special initialization of TFLiteConverter and "
+ "TensorFlow Lite run-time.",
"Please refer to the TensorFlow documentation for "
"more details.",
+ "Note, such models are not supported by "
+ "the ML Inference Advisor.",
]
),
Advice(
[
- "The following operators are custom and not natively "
+ "The following operators appears to be custom and not natively "
"supported by TensorFlow Lite: custom_op1, custom_op2.",
+ "Using custom operators in TensorFlow Lite model "
+ "requires special initialization of TFLiteConverter and "
+ "TensorFlow Lite run-time.",
"Please refer to the TensorFlow documentation for "
"more details.",
+ "Note, such models are not supported by "
+ "the ML Inference Advisor.",
]
),
],
@@ -136,6 +148,30 @@ BACKEND_INFO = (
),
],
],
+ [
+ ModelHasCustomOperators(),
+ AdviceCategory.OPERATORS,
+ [
+ Advice(
+ [
+ "Models with custom operators require special initialization "
+ "and currently are not supported by the ML Inference Advisor.",
+ ]
+ ),
+ ],
+ ],
+ [
+ TFLiteCompatibilityCheckFailed(),
+ AdviceCategory.OPERATORS,
+ [
+ Advice(
+ [
+ "Model could not be converted into TensorFlow Lite format.",
+ "Please refer to the table for more details.",
+ ]
+ ),
+ ],
+ ],
],
)
def test_cortex_a_advice_producer(
diff --git a/tests/test_devices_cortexa_data_analysis.py b/tests/test_devices_cortexa_data_analysis.py
index 4d98212..ed30b9a 100644
--- a/tests/test_devices_cortexa_data_analysis.py
+++ b/tests/test_devices_cortexa_data_analysis.py
@@ -8,13 +8,16 @@ import pytest
from mlia.core.common import DataItem
from mlia.core.data_analysis import Fact
from mlia.devices.cortexa.data_analysis import CortexADataAnalyzer
+from mlia.devices.cortexa.data_analysis import ModelHasCustomOperators
from mlia.devices.cortexa.data_analysis import ModelIsCortexACompatible
from mlia.devices.cortexa.data_analysis import ModelIsNotCortexACompatible
from mlia.devices.cortexa.data_analysis import ModelIsNotTFLiteCompatible
+from mlia.devices.cortexa.data_analysis import TFLiteCompatibilityCheckFailed
from mlia.devices.cortexa.operator_compatibility import ARMNN_TFLITE_DELEGATE
from mlia.devices.cortexa.operators import CortexACompatibilityInfo
from mlia.devices.cortexa.operators import Operator
from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo
+from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityStatus
from mlia.nn.tensorflow.tflite_compat import TFLiteConversionError
from mlia.nn.tensorflow.tflite_compat import TFLiteConversionErrorCode
from mlia.nn.tensorflow.tflite_graph import TFL_ACTIVATION_FUNCTION
@@ -104,16 +107,28 @@ BACKEND_INFO = (
# pylint: enable=line-too-long
],
[
- TFLiteCompatibilityInfo(compatible=True),
+ TFLiteCompatibilityInfo(status=TFLiteCompatibilityStatus.COMPATIBLE),
[],
],
[
- TFLiteCompatibilityInfo(compatible=False),
+ TFLiteCompatibilityInfo(
+ status=TFLiteCompatibilityStatus.MODEL_WITH_CUSTOM_OP_ERROR
+ ),
+ [ModelHasCustomOperators()],
+ ],
+ [
+ TFLiteCompatibilityInfo(status=TFLiteCompatibilityStatus.UNKNOWN_ERROR),
+ [TFLiteCompatibilityCheckFailed()],
+ ],
+ [
+ TFLiteCompatibilityInfo(
+ status=TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR
+ ),
[ModelIsNotTFLiteCompatible(custom_ops=[], flex_ops=[])],
],
[
TFLiteCompatibilityInfo(
- compatible=False,
+ status=TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR,
conversion_errors=[
TFLiteConversionError(
"error",
diff --git a/tests/test_devices_cortexa_reporters.py b/tests/test_devices_cortexa_reporters.py
index 4177b55..1110653 100644
--- a/tests/test_devices_cortexa_reporters.py
+++ b/tests/test_devices_cortexa_reporters.py
@@ -12,6 +12,7 @@ from mlia.devices.cortexa.operators import Operator
from mlia.devices.cortexa.reporters import cortex_a_formatters
from mlia.devices.cortexa.reporters import report_device
from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo
+from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityStatus
from mlia.nn.tensorflow.tflite_graph import TFL_ACTIVATION_FUNCTION
@@ -25,7 +26,7 @@ def test_report_device() -> None:
"data",
(
[Advice(["Sample", "Advice"])],
- TFLiteCompatibilityInfo(compatible=True),
+ TFLiteCompatibilityInfo(status=TFLiteCompatibilityStatus.COMPATIBLE),
[
Operator(
name="Test",
diff --git a/tests/test_nn_tensorflow_tflite_compat.py b/tests/test_nn_tensorflow_tflite_compat.py
index 1bd4c34..f203125 100644
--- a/tests/test_nn_tensorflow_tflite_compat.py
+++ b/tests/test_nn_tensorflow_tflite_compat.py
@@ -12,6 +12,7 @@ from tensorflow.lite.python import convert
from mlia.nn.tensorflow.tflite_compat import converter_error_data_pb2
from mlia.nn.tensorflow.tflite_compat import TFLiteChecker
from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo
+from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityStatus
from mlia.nn.tensorflow.tflite_compat import TFLiteConversionError
from mlia.nn.tensorflow.tflite_compat import TFLiteConversionErrorCode
@@ -87,11 +88,14 @@ def _get_tflite_conversion_error(
@pytest.mark.parametrize(
"conversion_error, expected_result",
[
- (None, TFLiteCompatibilityInfo(compatible=True)),
+ (
+ None,
+ TFLiteCompatibilityInfo(status=TFLiteCompatibilityStatus.COMPATIBLE),
+ ),
(
err := _get_tflite_conversion_error(custom_op=True),
TFLiteCompatibilityInfo(
- compatible=False,
+ status=TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR,
conversion_exception=err,
conversion_errors=[
TFLiteConversionError(
@@ -106,7 +110,7 @@ def _get_tflite_conversion_error(
(
err := _get_tflite_conversion_error(flex_op=True),
TFLiteCompatibilityInfo(
- compatible=False,
+ status=TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR,
conversion_exception=err,
conversion_errors=[
TFLiteConversionError(
@@ -121,7 +125,7 @@ def _get_tflite_conversion_error(
(
err := _get_tflite_conversion_error(unknown_reason=True),
TFLiteCompatibilityInfo(
- compatible=False,
+ status=TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR,
conversion_exception=err,
conversion_errors=[
TFLiteConversionError(
@@ -141,7 +145,7 @@ def _get_tflite_conversion_error(
unsupported_flow_v1=True,
),
TFLiteCompatibilityInfo(
- compatible=False,
+ status=TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR,
conversion_exception=err,
conversion_errors=[
TFLiteConversionError(
@@ -174,7 +178,7 @@ def _get_tflite_conversion_error(
(
err := _get_tflite_conversion_error(),
TFLiteCompatibilityInfo(
- compatible=False,
+ status=TFLiteCompatibilityStatus.TFLITE_CONVERSION_ERROR,
conversion_exception=err,
conversion_errors=[],
),
@@ -182,7 +186,21 @@ def _get_tflite_conversion_error(
(
err := ValueError("Some unknown issue"),
TFLiteCompatibilityInfo(
- compatible=False,
+ status=TFLiteCompatibilityStatus.UNKNOWN_ERROR,
+ conversion_exception=err,
+ ),
+ ),
+ (
+ err := ValueError("Unable to restore custom object"),
+ TFLiteCompatibilityInfo(
+ status=TFLiteCompatibilityStatus.MODEL_WITH_CUSTOM_OP_ERROR,
+ conversion_exception=err,
+ ),
+ ),
+ (
+ err := FileNotFoundError("Op type not registered"),
+ TFLiteCompatibilityInfo(
+ status=TFLiteCompatibilityStatus.MODEL_WITH_CUSTOM_OP_ERROR,
conversion_exception=err,
),
),