diff options
author | Louis Verhaard <louis.verhaard@arm.com> | 2020-09-30 09:01:52 +0200 |
---|---|---|
committer | Louis Verhaard <louis.verhaard@arm.com> | 2020-10-08 16:29:29 +0200 |
commit | aee5d7537ff81ffda5ba222721b72f914ce50fb8 (patch) | |
tree | 495b9dfff2a188c6916f8ca2e390ee88f7da8ccc /ethosu/vela/extract_npu_subgraphs.py | |
parent | 36ad73a0fb46d3f844845c97c56d92de2a7a9b3d (diff) | |
download | ethos-u-vela-aee5d7537ff81ffda5ba222721b72f914ce50fb8.tar.gz |
MLBEDSW-3148: Refactor Operation
- op.type is now an enum instead of a string
- Removed unused operator codes
- Refactored some attributes like npu_block_type, fused_activation_function
- Refactored operator index calculation
- Refactored a number of operator sets
Change-Id: I641f65ee375794b7aec42abc0664251ae37d78e8
Signed-off-by: Louis Verhaard <louis.verhaard@arm.com>
Diffstat (limited to 'ethosu/vela/extract_npu_subgraphs.py')
-rw-r--r-- | ethosu/vela/extract_npu_subgraphs.py | 13 |
1 files changed, 7 insertions, 6 deletions
diff --git a/ethosu/vela/extract_npu_subgraphs.py b/ethosu/vela/extract_npu_subgraphs.py index c0430b5d..e08392dc 100644 --- a/ethosu/vela/extract_npu_subgraphs.py +++ b/ethosu/vela/extract_npu_subgraphs.py @@ -25,17 +25,19 @@ import numpy as np from .nn_graph import Pass from .nn_graph import PassPlacement from .nn_graph import Subgraph +from .operation import CustomType from .operation import NpuBlockType +from .operation import Op from .operation import Operation def make_npu_call_op_pass(npu_subgraph): - op = Operation("NpuOp", "call_" + npu_subgraph.name) + op = Operation(Op.CustomNpuOp, "call_" + npu_subgraph.name) op.attrs["subgraph"] = npu_subgraph + op.attrs["custom_type"] = CustomType.NpuOp ps = Pass(op.name, PassPlacement.MemoryOnly, False, NpuBlockType.Default) ps.ops = [op] ps.primary_op = op - op.attrs["npu_block_type"] = ps.npu_block_type op.scheduled_pass = ps # Inputs and outputs filled in later as we cut the graphs @@ -69,14 +71,13 @@ def switch_tensor_for_op(op, orig_tens, new_tens): def rewrite_tensor_cpu_producer_npu_consumers( orig_tens, call_ps, startup_init_ps, npu_subgraph, cpu_subgraph, subgraph_for_pass ): - is_const = orig_tens.ops[0].type == "Const" + is_const = orig_tens.ops[0].type == Op.Const new_tens = orig_tens.clone("_npu") - op_type = "SubgraphInput" + op_type = Op.SubgraphInput if is_const: - op_type = "Const" + op_type = Op.Const op = Operation(op_type, orig_tens.name + "_input") - op.attrs["npu_block_type"] = NpuBlockType.Default op.scheduled_pass = startup_init_ps op.set_output_tensor(new_tens) startup_init_ps.ops.append(op) |