aboutsummaryrefslogtreecommitdiff
path: root/src/mlia/target/cortex_a
diff options
context:
space:
mode:
Diffstat (limited to 'src/mlia/target/cortex_a')
-rw-r--r--src/mlia/target/cortex_a/advice_generation.py53
-rw-r--r--src/mlia/target/cortex_a/data_analysis.py36
-rw-r--r--src/mlia/target/cortex_a/reporters.py54
3 files changed, 11 insertions, 132 deletions
diff --git a/src/mlia/target/cortex_a/advice_generation.py b/src/mlia/target/cortex_a/advice_generation.py
index 98e8c06..1011d6c 100644
--- a/src/mlia/target/cortex_a/advice_generation.py
+++ b/src/mlia/target/cortex_a/advice_generation.py
@@ -7,11 +7,13 @@ from mlia.core.advice_generation import advice_category
from mlia.core.advice_generation import FactBasedAdviceProducer
from mlia.core.common import AdviceCategory
from mlia.core.common import DataItem
-from mlia.target.cortex_a.data_analysis import ModelHasCustomOperators
+from mlia.target.common.reporters import handle_model_is_not_tflite_compatible_common
+from mlia.target.common.reporters import handle_tflite_check_failed_common
+from mlia.target.common.reporters import ModelHasCustomOperators
+from mlia.target.common.reporters import ModelIsNotTFLiteCompatible
+from mlia.target.common.reporters import TFLiteCompatibilityCheckFailed
from mlia.target.cortex_a.data_analysis import ModelIsCortexACompatible
from mlia.target.cortex_a.data_analysis import ModelIsNotCortexACompatible
-from mlia.target.cortex_a.data_analysis import ModelIsNotTFLiteCompatible
-from mlia.target.cortex_a.data_analysis import TFLiteCompatibilityCheckFailed
class CortexAAdviceProducer(FactBasedAdviceProducer):
@@ -88,43 +90,7 @@ class CortexAAdviceProducer(FactBasedAdviceProducer):
self, data_item: ModelIsNotTFLiteCompatible
) -> None:
"""Advice for TensorFlow Lite compatibility."""
- if data_item.flex_ops:
- self.add_advice(
- [
- "The following operators are not natively "
- "supported by TensorFlow Lite: "
- f"{', '.join(data_item.flex_ops)}.",
- "Using select TensorFlow operators in TensorFlow Lite model "
- "requires special initialization of TFLiteConverter and "
- "TensorFlow Lite run-time.",
- "Please refer to the TensorFlow documentation for more "
- "details: https://www.tensorflow.org/lite/guide/ops_select",
- "Note, such models are not supported by the ML Inference Advisor.",
- ]
- )
-
- if data_item.custom_ops:
- self.add_advice(
- [
- "The following operators appear to be custom and not natively "
- "supported by TensorFlow Lite: "
- f"{', '.join(data_item.custom_ops)}.",
- "Using custom operators in TensorFlow Lite model "
- "requires special initialization of TFLiteConverter and "
- "TensorFlow Lite run-time.",
- "Please refer to the TensorFlow documentation for more "
- "details: https://www.tensorflow.org/lite/guide/ops_custom",
- "Note, such models are not supported by the ML Inference Advisor.",
- ]
- )
-
- if not data_item.flex_ops and not data_item.custom_ops:
- self.add_advice(
- [
- "Model could not be converted into TensorFlow Lite format.",
- "Please refer to the table for more details.",
- ]
- )
+ handle_model_is_not_tflite_compatible_common(self, data_item)
@produce_advice.register
@advice_category(AdviceCategory.COMPATIBILITY)
@@ -132,12 +98,7 @@ class CortexAAdviceProducer(FactBasedAdviceProducer):
self, _data_item: TFLiteCompatibilityCheckFailed
) -> None:
"""Advice for the failed TensorFlow Lite compatibility checks."""
- self.add_advice(
- [
- "Model could not be converted into TensorFlow Lite format.",
- "Please refer to the table for more details.",
- ]
- )
+ handle_tflite_check_failed_common(self, _data_item)
@produce_advice.register
@advice_category(AdviceCategory.COMPATIBILITY)
diff --git a/src/mlia/target/cortex_a/data_analysis.py b/src/mlia/target/cortex_a/data_analysis.py
index 089c1a2..3161618 100644
--- a/src/mlia/target/cortex_a/data_analysis.py
+++ b/src/mlia/target/cortex_a/data_analysis.py
@@ -12,6 +12,7 @@ from mlia.core.common import DataItem
from mlia.core.data_analysis import Fact
from mlia.core.data_analysis import FactExtractor
from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo
+from mlia.target.common.reporters import analyze_tflite_compatibility_common
from mlia.target.cortex_a.operators import CortexACompatibilityInfo
@@ -64,22 +65,7 @@ class CortexADataAnalyzer(FactExtractor):
@analyze_data.register
def analyze_tflite_compatibility(self, data_item: TFLiteCompatibilityInfo) -> None:
"""Analyze TensorFlow Lite compatibility information."""
- if data_item.compatible:
- return
-
- if data_item.conversion_failed_with_errors:
- self.add_fact(
- ModelIsNotTFLiteCompatible(
- custom_ops=data_item.required_custom_ops,
- flex_ops=data_item.required_flex_ops,
- )
- )
-
- if data_item.check_failed_with_unknown_error:
- self.add_fact(TFLiteCompatibilityCheckFailed())
-
- if data_item.conversion_failed_for_model_with_custom_ops:
- self.add_fact(ModelHasCustomOperators())
+ analyze_tflite_compatibility_common(self, data_item)
@dataclass
@@ -107,21 +93,3 @@ class ModelIsNotCortexACompatible(CortexACompatibility):
unsupported_ops: set[str]
activation_func_support: dict[str, ActivationFunctionSupport]
-
-
-@dataclass
-class ModelIsNotTFLiteCompatible(Fact):
- """Model could not be converted into TensorFlow Lite format."""
-
- custom_ops: list[str] | None = None
- flex_ops: list[str] | None = None
-
-
-@dataclass
-class TFLiteCompatibilityCheckFailed(Fact):
- """TensorFlow Lite compatibility check failed by unknown reason."""
-
-
-@dataclass
-class ModelHasCustomOperators(Fact):
- """Model could not be loaded because it contains custom ops."""
diff --git a/src/mlia/target/cortex_a/reporters.py b/src/mlia/target/cortex_a/reporters.py
index 65d7906..fc80c9f 100644
--- a/src/mlia/target/cortex_a/reporters.py
+++ b/src/mlia/target/cortex_a/reporters.py
@@ -17,6 +17,7 @@ from mlia.core.reporting import Report
from mlia.core.reporting import ReportItem
from mlia.core.reporting import Table
from mlia.nn.tensorflow.tflite_compat import TFLiteCompatibilityInfo
+from mlia.target.common.reporters import report_tflite_compatiblity
from mlia.target.cortex_a.config import CortexAConfiguration
from mlia.target.cortex_a.operators import CortexACompatibilityInfo
from mlia.utils.console import style_improvement
@@ -34,57 +35,6 @@ def report_target(target_config: CortexAConfiguration) -> Report:
)
-def report_tflite_compatiblity(compat_info: TFLiteCompatibilityInfo) -> Report:
- """Generate report for the TensorFlow Lite compatibility information."""
- if compat_info.conversion_errors:
- return Table(
- [
- Column("#", only_for=["plain_text"]),
- Column("Operator", alias="operator"),
- Column(
- "Operator location",
- alias="operator_location",
- fmt=Format(wrap_width=25),
- ),
- Column("Error code", alias="error_code"),
- Column(
- "Error message", alias="error_message", fmt=Format(wrap_width=25)
- ),
- ],
- [
- (
- index + 1,
- err.operator,
- ", ".join(err.location),
- err.code.name,
- err.message,
- )
- for index, err in enumerate(compat_info.conversion_errors)
- ],
- name="TensorFlow Lite conversion errors",
- alias="tensorflow_lite_conversion_errors",
- )
-
- return Table(
- columns=[
- Column("Reason", alias="reason"),
- Column(
- "Exception details",
- alias="exception_details",
- fmt=Format(wrap_width=40),
- ),
- ],
- rows=[
- (
- "TensorFlow Lite compatibility check failed with exception",
- str(compat_info.conversion_exception),
- ),
- ],
- name="TensorFlow Lite compatibility errors",
- alias="tflite_compatibility",
- )
-
-
def report_cortex_a_operators(op_compat: CortexACompatibilityInfo) -> Report:
"""Generate report for the operators."""
return Table(
@@ -132,7 +82,7 @@ def cortex_a_formatters(data: Any) -> Callable[[Any], Report]:
return report_target
if isinstance(data, TFLiteCompatibilityInfo):
- return report_tflite_compatiblity
+ return report_tflite_compatiblity # type: ignore
if isinstance(data, CortexACompatibilityInfo):
return report_cortex_a_operators