From 1a7527cd4ad56b49f120b10dc5e87a1e8f5a8122 Mon Sep 17 00:00:00 2001 From: Fredrik Svedberg Date: Mon, 13 Sep 2021 15:52:16 +0200 Subject: MLBEDSW-5052 Fix RELU scaling Fixed scaling for RELUs with different IFM/OFM scaling. Signed-off-by: Fredrik Svedberg Change-Id: I0ac96326b3960c0fb025b885e06a259d24b2e684 --- ethosu/vela/tflite_graph_optimiser.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ethosu/vela/tflite_graph_optimiser.py b/ethosu/vela/tflite_graph_optimiser.py index 7526f46a..15b82c7e 100644 --- a/ethosu/vela/tflite_graph_optimiser.py +++ b/ethosu/vela/tflite_graph_optimiser.py @@ -42,6 +42,7 @@ from .numeric_util import clamp_sigmoid from .numeric_util import full_shape from .numeric_util import round_away_zero from .operation import create_activation_function +from .operation import ExplicitScaling from .operation import NpuBlockType from .operation import Op from .operation import Operation @@ -628,6 +629,10 @@ def fixup_relus_with_differing_ifm_ofm_scaling(op, arch, nng): relu_fused_op = create_avgpool_nop(op.name + "_avgpool") # And fuse the original activation function to it relu_fused_op.activation = create_activation_function(op.type) + # Add explicit rescaling + rescale = ifm.quantization.scale_f32 / ofm.quantization.scale_f32 + multiplier, shift = scaling.quantise_scale(rescale) + relu_fused_op.rescale = ExplicitScaling(False, [shift], [multiplier]) # Tidy up and assign the ifm and ofm to the new op ifm.consumer_list.remove(op) -- cgit v1.2.1