aboutsummaryrefslogtreecommitdiff
path: root/ethosu/vela/high_level_command_stream_generator.py
diff options
context:
space:
mode:
authorPatrik Gustavsson <patrik.gustavsson@arm.com>2021-02-08 10:13:48 +0100
committerPatrik Gustavsson <patrik.gustavsson@arm.com>2021-02-09 15:31:23 +0100
commit138d47f5a3e87d294b3714ae799ccad8ac9111bd (patch)
tree98bf917b14a1dd3ee5bbadc8116e543dcc37dfd3 /ethosu/vela/high_level_command_stream_generator.py
parentda2b0030220e87788573a724979626aa92afd13e (diff)
downloadethos-u-vela-138d47f5a3e87d294b3714ae799ccad8ac9111bd.tar.gz
MLBEDSW-3774 Removed ConcatSliceWrite
-Removed ConcatSliceWrite from the optimised graph. Always executed as avgpool, which is equivalent with before the patch. -Added copy op to enable more removal of reshapes. Sg input/outputs need to remain. When Reshape input and outut, are sg input/outputs a copy op is needed to be inserted, in order to remove the reshape. Signed-off-by: Patrik Gustavsson <patrik.gustavsson@arm.com> Change-Id: Id7be9966673ae34499e8518a5544104493fe326b
Diffstat (limited to 'ethosu/vela/high_level_command_stream_generator.py')
-rw-r--r--ethosu/vela/high_level_command_stream_generator.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/ethosu/vela/high_level_command_stream_generator.py b/ethosu/vela/high_level_command_stream_generator.py
index 66613ba8..97b42aeb 100644
--- a/ethosu/vela/high_level_command_stream_generator.py
+++ b/ethosu/vela/high_level_command_stream_generator.py
@@ -109,7 +109,7 @@ def generate_high_level_command_stream_for_pass(strat, passes, block_configs, id
concat_offset = 0
for op in ps.ops:
- if op.type == Op.ConcatSliceWrite:
+ if op.attrs.get("concat_axis", None) is not None:
concat_axis = op.attrs["concat_axis"]
concat_start = op.attrs["concat_start"]
concat_end = op.attrs["concat_end"]
@@ -117,7 +117,7 @@ def generate_high_level_command_stream_for_pass(strat, passes, block_configs, id
ofm_start[concat_axis] = concat_start
ofm_end[concat_axis] = concat_end
concat_offset = concat_start
- ps.primary_op.memory_function = op.type
+ ps.primary_op.memory_function = Op.ConcatSliceWrite
elif op.type.is_relu_op() or op.type in (Op.Tanh, Op.Sigmoid):
ps.primary_op.activation = create_activation_function(op.type)