From 04986c0016e59993563490fe67052371fc0e1ad2 Mon Sep 17 00:00:00 2001 From: Louis Verhaard Date: Wed, 14 Oct 2020 09:18:31 +0200 Subject: MLBEDSW-3219: Suppress CPU info for Const/Placeholder Suppress info print that Const/Placeholder/SubgraphInput are not supported on the NPU. Change-Id: I689d25481df0cd10487484c9f639e4253df081ee Signed-off-by: Louis Verhaard --- ethosu/vela/operation.py | 3 +++ ethosu/vela/pass_packing.py | 2 +- ethosu/vela/supported_operators.py | 3 ++- ethosu/vela/tflite_writer.py | 2 +- 4 files changed, 7 insertions(+), 3 deletions(-) (limited to 'ethosu') diff --git a/ethosu/vela/operation.py b/ethosu/vela/operation.py index 710511c6..9c438556 100644 --- a/ethosu/vela/operation.py +++ b/ethosu/vela/operation.py @@ -271,6 +271,9 @@ class Op(Enum): def needs_bias(self): return bool(self.info.indices.biases) + def is_startup_init_op(self): + return self in (Op.Const, Op.Placeholder, Op.SubgraphInput) + @classmethod def op_set(cls, predicate): # Returns the set of all operator codes that fulfill the given predicate diff --git a/ethosu/vela/pass_packing.py b/ethosu/vela/pass_packing.py index 5673c2df..db30cd66 100644 --- a/ethosu/vela/pass_packing.py +++ b/ethosu/vela/pass_packing.py @@ -90,7 +90,7 @@ quantization_ops = set((Op.Dequantize, Op.Max, Op.Min)) cpu_ops = set((Op.Softmax, Op.LRN, Op.Shape, Op.Pad, Op.AddN)) | quantization_ops npu_dma_ops = set((Op.DMA,)) -startup_init_ops = set((Op.Const, Op.Placeholder, Op.SubgraphInput)) +startup_init_ops = Op.op_set(Op.is_startup_init_op) memory_only_ops = set((Op.Squeeze, Op.Reshape, Op.QuantizedReshape, Op.ExpandDims,)) diff --git a/ethosu/vela/supported_operators.py b/ethosu/vela/supported_operators.py index 4e989124..18496c54 100644 --- a/ethosu/vela/supported_operators.py +++ b/ethosu/vela/supported_operators.py @@ -137,7 +137,8 @@ class SupportedOperators: def is_operator_supported(self, op): if op.type not in SupportedOperators.supported_operators: - print('Info: "{}" is not supported on the NPU. Placing on CPU instead'.format(op.type)) + if not op.type.is_startup_init_op(): + print('Info: "{}" is not supported on the NPU. Placing on CPU instead'.format(op.type)) return False for constraint in self.generic_constraints: valid, extra = constraint(op) diff --git a/ethosu/vela/tflite_writer.py b/ethosu/vela/tflite_writer.py index e24aa282..4cb5df17 100644 --- a/ethosu/vela/tflite_writer.py +++ b/ethosu/vela/tflite_writer.py @@ -77,7 +77,7 @@ class TFLiteSerialiser: self.scratch_fast_buf_id = 1 # Always assign scratch_fast to buffer 1 self.buffers_to_write = [] # have an empty array there - self.ops_to_ignore = set((Op.Const, Op.Placeholder, Op.SubgraphInput)) + self.ops_to_ignore = Op.op_set(Op.is_startup_init_op) self.tensors_to_reshape = {} -- cgit v1.2.1