diff options
author | Michael McGeagh <michael.mcgeagh@arm.com> | 2020-09-08 11:09:48 +0100 |
---|---|---|
committer | Michael McGeagh <michael.mcgeagh@arm.com> | 2020-09-11 13:29:06 +0100 |
commit | 8dbf8cfefa1feea6598f5f4864657ba6b6ad60ed (patch) | |
tree | e55debe4a80b01a79381a6aca378a9c6a7da5447 /ethosu/vela/operation.py | |
parent | fa4cb29996ffe1e64e39655c2195af6ff02e887a (diff) | |
download | ethos-u-vela-8dbf8cfefa1feea6598f5f4864657ba6b6ad60ed.tar.gz |
MLBEDSW-2745 Support relus with differing scales
In the event we have a relu op with different input and output scales,
we need to fuse it with a nop avgpool.
Also refactor the existing avgpool nop code to a common function.
Signed-off-by: Michael McGeagh <michael.mcgeagh@arm.com>
Change-Id: Iedf4513e7595ee4ee1777ba0b1eb38a8df8aed5e
Diffstat (limited to 'ethosu/vela/operation.py')
-rw-r--r-- | ethosu/vela/operation.py | 15 |
1 files changed, 15 insertions, 0 deletions
diff --git a/ethosu/vela/operation.py b/ethosu/vela/operation.py index e7fd97c4..6bc5a32d 100644 --- a/ethosu/vela/operation.py +++ b/ethosu/vela/operation.py @@ -28,6 +28,21 @@ class NpuBlockType(enum.Enum): ReduceSum = 6 +def create_avgpool_nop(name): + op = Operation("AvgPool", name) + op.attrs["padding"] = b"VALID" + op.attrs["npu_block_type"] = NpuBlockType.Pooling + op.attrs["stride_w"] = 1 + op.attrs["stride_h"] = 1 + op.attrs["filter_width"] = 1 + op.attrs["filter_height"] = 1 + op.attrs["strides"] = [1, 1, 1, 1] + op.attrs["ksize"] = [1, 1, 1, 1] + op.attrs["skirt"] = [0, 0, 0, 0] + op.attrs["explicit_padding"] = [0, 0, 0, 0] + return op + + class Operation: """Class representing a Neural Network operation. Has a name, a type, input and output tensors, as well as an attribute dictionary.""" |