From f3d737ea14eabffede935cb418611b1f624e180a Mon Sep 17 00:00:00 2001 From: Andreas Nevalainen Date: Fri, 25 Sep 2020 14:12:43 +0200 Subject: MLBEDSW-2811: Add rescaling to ReLus with different scaling If IFM/OFM is not 4d rescaling ops are added to ReLus with different scaling. Change-Id: I631d44fc8a51fb476b9f62ef90eda26eef3d35f3 Signed-off-by: Andreas Nevalainen --- ethosu/vela/graph_optimiser.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ethosu/vela/graph_optimiser.py b/ethosu/vela/graph_optimiser.py index e5fbc25f..68473307 100644 --- a/ethosu/vela/graph_optimiser.py +++ b/ethosu/vela/graph_optimiser.py @@ -657,6 +657,15 @@ def fixup_relus_with_differing_ifm_ofm_scaling(op, arch): relu_fused_op.attrs["fused_activation_function"] = op.type # Tidy up and assign the ifm and ofm to the new op ifm.consumer_list.remove(op) + + # if not 4d, reshape ifm/ofm + if len(ifm.shape) < 4: + ifm_shaped = create_reshape_tensor(ifm, full_shape(4, ifm.shape, 1)) + ifm = ifm_shaped + if len(ofm.shape) < 4: + ofm_shaped = create_reshape_tensor(ofm, full_shape(4, ofm.shape, 1), False) + ofm = ofm_shaped + relu_fused_op.add_input_tensor(ifm) relu_fused_op.set_output_tensor(ofm) op = relu_fused_op -- cgit v1.2.1