aboutsummaryrefslogtreecommitdiff
path: root/ethosu/vela/supported_operators.py
diff options
context:
space:
mode:
authorDwight Lidman <dwight.lidman@arm.com>2020-04-27 11:15:12 +0200
committerTim Hall <tim.hall@arm.com>2020-06-18 17:53:52 +0100
commitf995db7b503eb2e5690972d95f40b96199c5555c (patch)
treef6cb2451a8a0a2c2be8a8b13c2c33d0613c0e49d /ethosu/vela/supported_operators.py
parentd67c0aaccd91f4be3ea76f69fa063301ffc73aa3 (diff)
downloadethos-u-vela-f995db7b503eb2e5690972d95f40b96199c5555c.tar.gz
MLBEDSW-2001: Fix unary elementwise operator regression
Change-Id: I8f109cd148aaa17c18a97068fad52419c8d9d12e Signed-off-by: Dwight Lidman <dwight.lidman@arm.com>
Diffstat (limited to 'ethosu/vela/supported_operators.py')
-rw-r--r--ethosu/vela/supported_operators.py22
1 files changed, 13 insertions, 9 deletions
diff --git a/ethosu/vela/supported_operators.py b/ethosu/vela/supported_operators.py
index 23135f8a..fb3061f7 100644
--- a/ethosu/vela/supported_operators.py
+++ b/ethosu/vela/supported_operators.py
@@ -45,9 +45,10 @@ class SupportedOperators:
# RNN/LSTM/GRU
| set(("BlockLSTM"))
)
- self.elem_wise_main_ops = set(
+ self.unary_elem_wise_main_ops = set(("LeakyRelu", "Abs"))
+ self.binary_elem_wise_main_ops = set(
(
- # element-wise
+ # binary element-wise
"AddAct",
"MulAct",
"SubAct",
@@ -61,6 +62,7 @@ class SupportedOperators:
"Maximum",
)
)
+ self.elem_wise_main_ops = self.binary_elem_wise_main_ops | self.unary_elem_wise_main_ops
self.activation_ops = set(
("QuantizedRelu", "QuantizedRelu1", "QuantizedRelu6", "Relu", "Relu6", "ReluN1To1", "Sigmoid", "Tanh")
)
@@ -223,16 +225,18 @@ class SupportedOperators:
return False
# check batch size
- if (len(ifm_tensor.shape) > 2 and ifm_tensor.shape[0] != 1) or (
- len(ifm2_tensor.shape) > 2 and ifm2_tensor.shape[0] != 1
- ):
- return False
+ if len(ifm_tensor.shape) > 2 and ifm_tensor.shape[0] != 1:
+ return False
+ if op.type in self.binary_elem_wise_main_ops: # if op type is unary, ifm2_tensor is None
+ if len(ifm2_tensor.shape) > 2 and ifm2_tensor.shape[0] != 1:
+ return False
# check scalar size
- if (hasattr(ifm_tensor.values, "__len__") and len(ifm_tensor.values) > 1) or (
- hasattr(ifm2_tensor.values, "__len__") and len(ifm2_tensor.values) > 1
- ):
+ if hasattr(ifm_tensor.values, "__len__") and len(ifm_tensor.values) > 1:
return False
+ if op.type in self.binary_elem_wise_main_ops: # same as above
+ if hasattr(ifm2_tensor.values, "__len__") and len(ifm2_tensor.values) > 1:
+ return False
return True
def check_memory_only_restrictions(self, op):