From 76b794e59b4ced500b0a0acb660e99caa78dbf90 Mon Sep 17 00:00:00 2001 From: Patrik Gustavsson Date: Thu, 9 Sep 2021 09:48:14 +0200 Subject: TOSA: Support for standalone CLAMP/RELU Added support for standalone CLAMP/RELU. Limited to: -Rank <= 4 -N = 1 if Rank = 4 Signed-off-by: Patrik Gustavsson Change-Id: If1a32fb330ce6c67c09ec4b554b4a0688444d5f0 --- ethosu/vela/tosa_graph_optimiser.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/ethosu/vela/tosa_graph_optimiser.py b/ethosu/vela/tosa_graph_optimiser.py index 49fc997d..bade4a97 100644 --- a/ethosu/vela/tosa_graph_optimiser.py +++ b/ethosu/vela/tosa_graph_optimiser.py @@ -29,7 +29,6 @@ from .graph_optimiser_util import needed_total_padding from .graph_optimiser_util import set_ifm_ofm_op_shapes from .graph_optimiser_util import set_tensor_equivalence from .operation import ExplicitScaling -from .operation import NpuBlockType from .operation import Op from .operation_util import create_add_nop from .operation_util import create_avgpool_nop @@ -303,20 +302,6 @@ def rewrite_activation(op, arch, nng): return op ifm = op.ifm - prev_op = ifm.ops[0] - - # Note: the below checks on prev_op require that a first optimize pass on the full graph has been performed - fuseable = ( - prev_op.run_on_npu - and prev_op.type.npu_block_type != NpuBlockType.Default - and len(ifm.ops) == 1 - and len(prev_op.outputs[0].consumers()) == 1 - and prev_op.activation is None - ) - if not fuseable: - print("Warning: relu like op will not be possible to fuse, currently not supported") - assert False - zp = ifm.quantization.zero_point if ifm.quantization.zero_point else 0 if op.ofm.quantization.zero_point is None: op.ofm.quantization.zero_point = zp @@ -326,9 +311,6 @@ def rewrite_activation(op, arch, nng): op.attrs["max"] = op.attrs["max_int"] - zp elif op.type == Op.ReluN: op.attrs["max"] = op.attrs["max_int"] - zp - else: - print("Warning: Unknown TOSA activation Op") - assert False return op -- cgit v1.2.1