diff options
author | Jonas Ohlsson <jonas.ohlsson@arm.com> | 2021-08-20 09:33:28 +0200 |
---|---|---|
committer | patrik.gustavsson <patrik.gustavsson@arm.com> | 2021-08-23 07:04:51 +0000 |
commit | 81942e9d59c1dcb1a9a54cb461f85bf582c7a3fd (patch) | |
tree | 8ecaa13f3419ffe86acad86cc0482bec92872416 /ethosu/vela/tflite_graph_optimiser.py | |
parent | cc34d5d33938b6cbd9f8b11883f12e8c3097b613 (diff) | |
download | ethos-u-vela-81942e9d59c1dcb1a9a54cb461f85bf582c7a3fd.tar.gz |
MLBEDSW-4913 Fix inception_v1/v3 output diff
Fix inception_v1/v3 output diffs.
Removing the Squeeze operator in the graph optimisation step.
The squeeze operator removes dimensions of size 1 from tensor shape.
The memory layout is preserved.
Signed-off-by: Jonas Ohlsson <jonas.ohlsson@arm.com>
Change-Id: I4ceffcbb141af5ed50b0d1a9d1d67622e638c2a1
Diffstat (limited to 'ethosu/vela/tflite_graph_optimiser.py')
-rw-r--r-- | ethosu/vela/tflite_graph_optimiser.py | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/ethosu/vela/tflite_graph_optimiser.py b/ethosu/vela/tflite_graph_optimiser.py index 29598032..6c85bb43 100644 --- a/ethosu/vela/tflite_graph_optimiser.py +++ b/ethosu/vela/tflite_graph_optimiser.py @@ -1061,8 +1061,8 @@ def convert_tanh_sigmoid_to_lut(op, arch, nng): return op -def remove_reshapes(op, arch): - if op.run_on_npu and op.type == Op.Reshape: +def remove_reshape_and_squeeze_ops(op, arch): + if op.run_on_npu and (op.type == Op.Reshape or op.type == Op.Squeeze): ofm = op.ofm ifm = op.ifm @@ -1073,11 +1073,11 @@ def remove_reshapes(op, arch): # or the reshape need to be replace with a NOP. return - # Check if Reshape ifm/ofm are network ifm/ofm + # Check if ifm/ofm are network ifm/ofm ifm_is_sg_ifm = ifm.ops[0].type in (Op.Placeholder, Op.SubgraphInput, Op.Const) ifm_is_sg_ofm = any(ifm_cons is None for ifm_cons in ifm.consumer_list) ofm_is_sg_ofm = any(ofm_cons is None for ofm_cons in ofm.consumer_list) - # Check if ifm/ofm is produced repectivly consumed by CPU + # Check if ifm/ofm is produced respectively consumed by CPU ifm_is_cpu_produced = any(ifm_prod is not None and not ifm_prod.run_on_npu for ifm_prod in op.ifm.ops) ofm_is_cpu_consumed = any(ofm_cons is not None and not ofm_cons.run_on_npu for ofm_cons in op.ofm.consumer_list) @@ -1097,7 +1097,7 @@ def remove_reshapes(op, arch): if cons_ifm == ifm: ifm_cons.set_input_tensor(ofm, ifm_idx) else: - # Bypassed Reshape by replacing ofm with ifm + # Bypassed by replacing ofm with ifm for cons in ofm.consumer_list: for ifm_idx, cons_ifm in enumerate(cons.inputs): if cons_ifm == ofm: @@ -1567,9 +1567,9 @@ def tflite_optimise_graph(nng, arch): nng, sg, arch, [], [fix_sg_input_output], rewrite_unsupported=False, ) - # Removal of reshapes + # Removal of reshapes and squeeze for sg in nng.subgraphs: - rewrite_graph.visit_graph_post_order(sg.output_tensors, arch, [], [remove_reshapes]) + rewrite_graph.visit_graph_post_order(sg.output_tensors, arch, [], [remove_reshape_and_squeeze_ops]) sg.refresh_after_modification() # Rewrite of operators |