diff options
author | Tim Hall <tim.hall@arm.com> | 2020-06-17 14:53:11 +0100 |
---|---|---|
committer | Tim Hall <tim.hall@arm.com> | 2020-06-18 17:53:52 +0100 |
commit | c8310b1432f7a77df3c95e8ecf8248c8a953b411 (patch) | |
tree | eaddfe6ae80db3c85ddca824e0fc70739d05a9d5 /ethosu/vela/mark_tensors.py | |
parent | 10a6618784aae35de389e0291fd2d78cbfa03bb7 (diff) | |
download | ethos-u-vela-c8310b1432f7a77df3c95e8ecf8248c8a953b411.tar.gz |
MLBEDSW-2528: MLCE-219: Custom operator pass through
- Fixed custom operator pass through
- Added error printing functions for operators and tensor
- Minor cleanup of custom exception handling
Signed-off-by: Tim Hall <tim.hall@arm.com>
Change-Id: Idf295df1e4c544381dc480244d880c32fb285e38
Diffstat (limited to 'ethosu/vela/mark_tensors.py')
-rw-r--r-- | ethosu/vela/mark_tensors.py | 21 |
1 files changed, 20 insertions, 1 deletions
diff --git a/ethosu/vela/mark_tensors.py b/ethosu/vela/mark_tensors.py index 72ab8cfa..c4f2bae2 100644 --- a/ethosu/vela/mark_tensors.py +++ b/ethosu/vela/mark_tensors.py @@ -17,8 +17,10 @@ # Mark purpose and select formats for Tensors. Also compresses the weights. from . import rewrite_graph from . import weight_compressor +from .errors import OperatorError from .tensor import TensorFormat from .tensor import TensorPurpose +from .tflite_mapping import custom_prefix def purpose_from_list(lst): @@ -268,18 +270,33 @@ def mark_tensor_purpose(nng, arch, verbose_tensor_purpose=False): if ops is None or op.type in ops: if ops is None: print( - "warning: don't know how to mark up purpose for", + "Warning: Don't know how to mark up purpose for", op.type, op.inputs, "triggering all feature map fallback", ) + for idx, tens in enumerate(op.inputs): purpose = input_purpose(op, idx) mark_tensor_helper(tens, purpose) + if op.type == "Reshape": # Reshape's input and output point to same data op.outputs[0].mem_area = op.inputs[0].mem_area + + if op.type.startswith(custom_prefix) and op.attrs.get("custom_type", "") == "ExistingNpuOp": + scratch_tensor = None + + if len(op.inputs) >= 3: + scratch_tensor = op.inputs[2] # should be existing scratch tensor + if scratch_tensor.name.endswith("_scratch"): + scratch_tensor.purpose = TensorPurpose.Scratch + + if scratch_tensor is None: + raise OperatorError(op, "Scratch tensor not found.") + break + return op for sg in nng.subgraphs: @@ -316,6 +333,8 @@ def mark_tensor_format(nng, arch, verbose_tensor_format=False): fmt = arch.default_feature_map_format elif tens.purpose == TensorPurpose.Weights: fmt = arch.default_weight_format + elif tens.purpose == TensorPurpose.Scratch: + fmt = arch.default_feature_map_format elif tens.purpose == TensorPurpose.Unknown: fmt = TensorFormat.Unknown else: |