1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
|
# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Cortex-A advice generation."""
from functools import singledispatchmethod
from mlia.core.advice_generation import advice_category
from mlia.core.advice_generation import FactBasedAdviceProducer
from mlia.core.common import AdviceCategory
from mlia.core.common import DataItem
from mlia.devices.cortexa.data_analysis import ModelIsCortexACompatible
from mlia.devices.cortexa.data_analysis import ModelIsNotCortexACompatible
from mlia.devices.cortexa.data_analysis import ModelIsNotTFLiteCompatible
class CortexAAdviceProducer(FactBasedAdviceProducer):
"""Cortex-A advice producer."""
cortex_a_disclaimer = (
"Note that the provided compatibility information is general. "
"At runtime individual operators in the given model might fall back to "
"the TensorFlow Lite reference or might produce errors based on the "
"specific parameters."
)
@singledispatchmethod
def produce_advice(self, _data_item: DataItem) -> None: # type: ignore
"""Produce advice."""
@produce_advice.register
@advice_category(AdviceCategory.ALL, AdviceCategory.OPERATORS)
def handle_model_is_cortex_a_compatible(
self, data_item: ModelIsCortexACompatible
) -> None:
"""Advice for Cortex-A compatibility."""
self.add_advice(
[
f"Model is fully compatible with {data_item.backend_info} for "
"Cortex-A.",
self.cortex_a_disclaimer,
]
)
@produce_advice.register
@advice_category(AdviceCategory.ALL, AdviceCategory.OPERATORS)
def handle_model_is_not_cortex_a_compatible(
self, data_item: ModelIsNotCortexACompatible
) -> None:
"""Advice for Cortex-A compatibility."""
if data_item.unsupported_ops:
self.add_advice(
[
"The following operators are not supported by "
f"{data_item.backend_info} and will fall back to the "
"TensorFlow Lite runtime:",
"\n".join(f" - {op}" for op in data_item.unsupported_ops),
]
)
if data_item.activation_func_support:
self.add_advice(
[
"The fused activation functions of the following operators "
f"are not supported by {data_item.backend_info}. Please "
"consider using one of the supported activation functions "
"instead:",
"\n".join(
f" - {op}\n"
f" - Used unsupported: {act.used_unsupported}\n"
f" - Supported: {act.supported}"
for op, act in data_item.activation_func_support.items()
),
]
)
self.add_advice(
[
"Please, refer to the full table of operators above for more "
"information.",
self.cortex_a_disclaimer,
]
)
@produce_advice.register
@advice_category(AdviceCategory.ALL, AdviceCategory.OPERATORS)
def handle_model_is_not_tflite_compatible(
self, data_item: ModelIsNotTFLiteCompatible
) -> None:
"""Advice for TensorFlow Lite compatibility."""
if data_item.flex_ops:
self.add_advice(
[
"The following operators are not natively "
"supported by TensorFlow Lite: "
f"{', '.join(data_item.flex_ops)}.",
"Please refer to the TensorFlow documentation for more details.",
]
)
if data_item.custom_ops:
self.add_advice(
[
"The following operators are custom and not natively "
"supported by TensorFlow Lite: "
f"{', '.join(data_item.custom_ops)}.",
"Please refer to the TensorFlow documentation for more details.",
]
)
if not data_item.flex_ops and not data_item.custom_ops:
self.add_advice(
[
"Model could not be converted into TensorFlow Lite format.",
"Please refer to the table for more details.",
]
)
|