aboutsummaryrefslogtreecommitdiff
path: root/ethosu/vela/supported_operators.py
diff options
context:
space:
mode:
Diffstat (limited to 'ethosu/vela/supported_operators.py')
-rw-r--r--ethosu/vela/supported_operators.py14
1 files changed, 14 insertions, 0 deletions
diff --git a/ethosu/vela/supported_operators.py b/ethosu/vela/supported_operators.py
index deae75a2..f7dfec27 100644
--- a/ethosu/vela/supported_operators.py
+++ b/ethosu/vela/supported_operators.py
@@ -247,6 +247,9 @@ class SupportedOperators:
# LeakyRelu specific checks:
self.specific_constraints[Op.LeakyRelu].append(SupportedOperators.constraint_alpha_valid)
+ # FullyConnected specific checks:
+ self.specific_constraints[Op.FullyConnected].append(SupportedOperators.constraint_fc_output_2d)
+
def is_operator_supported(self, op):
ext_type = optype_to_builtintype(op.type)
if op.type not in SupportedOperators.supported_operators:
@@ -409,6 +412,17 @@ class SupportedOperators:
extra.append(tens.name)
return valid, "The following tensor(s) have per-axis quantization parameters: " + ", ".join(extra)
+ @staticmethod
+ def constraint_fc_output_2d(op):
+ "The output tensor(s) must have 2D shape"
+ valid = True
+ extra = []
+ for tens in op.outputs:
+ if len(tens.shape) != 2:
+ valid = False
+ extra.append(f"Tensor '{tens.name}' is {len(tens.shape)}D")
+ return valid, ", ".join(extra)
+
@classmethod
@docstring_format_args([_optype_formatter(supported_fused_activations)])
def constraint_faf(cls, op):