aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPatrik Gustavsson <patrik.gustavsson@arm.com>2021-09-09 09:48:14 +0200
committerPatrik Gustavsson <patrik.gustavsson@arm.com>2021-09-09 14:51:59 +0200
commit76b794e59b4ced500b0a0acb660e99caa78dbf90 (patch)
tree0916d1129a4d7ebf3c0002963fe0a2454d88bafd
parente2bfa7ec7495591139e19f41397054a44a0348bc (diff)
downloadethos-u-vela-76b794e59b4ced500b0a0acb660e99caa78dbf90.tar.gz
TOSA: Support for standalone CLAMP/RELU
Added support for standalone CLAMP/RELU. Limited to: -Rank <= 4 -N = 1 if Rank = 4 Signed-off-by: Patrik Gustavsson <patrik.gustavsson@arm.com> Change-Id: If1a32fb330ce6c67c09ec4b554b4a0688444d5f0
-rw-r--r--ethosu/vela/tosa_graph_optimiser.py18
1 files changed, 0 insertions, 18 deletions
diff --git a/ethosu/vela/tosa_graph_optimiser.py b/ethosu/vela/tosa_graph_optimiser.py
index 49fc997..bade4a9 100644
--- a/ethosu/vela/tosa_graph_optimiser.py
+++ b/ethosu/vela/tosa_graph_optimiser.py
@@ -29,7 +29,6 @@ from .graph_optimiser_util import needed_total_padding
from .graph_optimiser_util import set_ifm_ofm_op_shapes
from .graph_optimiser_util import set_tensor_equivalence
from .operation import ExplicitScaling
-from .operation import NpuBlockType
from .operation import Op
from .operation_util import create_add_nop
from .operation_util import create_avgpool_nop
@@ -303,20 +302,6 @@ def rewrite_activation(op, arch, nng):
return op
ifm = op.ifm
- prev_op = ifm.ops[0]
-
- # Note: the below checks on prev_op require that a first optimize pass on the full graph has been performed
- fuseable = (
- prev_op.run_on_npu
- and prev_op.type.npu_block_type != NpuBlockType.Default
- and len(ifm.ops) == 1
- and len(prev_op.outputs[0].consumers()) == 1
- and prev_op.activation is None
- )
- if not fuseable:
- print("Warning: relu like op will not be possible to fuse, currently not supported")
- assert False
-
zp = ifm.quantization.zero_point if ifm.quantization.zero_point else 0
if op.ofm.quantization.zero_point is None:
op.ofm.quantization.zero_point = zp
@@ -326,9 +311,6 @@ def rewrite_activation(op, arch, nng):
op.attrs["max"] = op.attrs["max_int"] - zp
elif op.type == Op.ReluN:
op.attrs["max"] = op.attrs["max_int"] - zp
- else:
- print("Warning: Unknown TOSA activation Op")
- assert False
return op