aboutsummaryrefslogtreecommitdiff
path: root/ethosu/vela/graph_optimiser.py
diff options
context:
space:
mode:
authorJacob Bohlin <jacob.bohlin@arm.com>2020-05-20 09:03:40 +0200
committerTim Hall <tim.hall@arm.com>2020-06-18 17:53:52 +0100
commitcf7da10987cac3fc68cf180a9af665fe06d608fa (patch)
tree5467fe8b062e4e37a4e0dadc178a9716d233facc /ethosu/vela/graph_optimiser.py
parent5b3dcd7323a5852bd188bb3929e7f38db3b52862 (diff)
downloadethos-u-vela-cf7da10987cac3fc68cf180a9af665fe06d608fa.tar.gz
MLBEDSW-1716: Transpose Convolution support
Change-Id: Ie6d8d6de9f3447f19ba06aafa9fa480fc96a973b Signed-off-by: Jacob Bohlin <jacob.bohlin@arm.com>
Diffstat (limited to 'ethosu/vela/graph_optimiser.py')
-rw-r--r--ethosu/vela/graph_optimiser.py53
1 files changed, 48 insertions, 5 deletions
diff --git a/ethosu/vela/graph_optimiser.py b/ethosu/vela/graph_optimiser.py
index b004f4cc..ca8b89fc 100644
--- a/ethosu/vela/graph_optimiser.py
+++ b/ethosu/vela/graph_optimiser.py
@@ -131,12 +131,50 @@ def calc_padding_and_skirt(padding_type, kernel_size, stride, input_dims):
skirt = (top_pad, left_pad, ypad - top_pad, xpad - left_pad)
return padding, skirt
+def calc_upscaled_padding_and_skirt(padding_type, kernel_size, stride, input_dims):
+ upscaled_shape = [input_dims[0], input_dims[1] * stride[1], input_dims[2] * stride[2], input_dims[3]]
+ ypad = needed_total_padding(int(upscaled_shape[1]), int(stride[1]), int(kernel_size[0]))
+ xpad = needed_total_padding(int(upscaled_shape[2]), int(stride[2]), int(kernel_size[1]))
+
+ if padding_type == b"SAME":
+ right_pad = ((xpad + 1) // 2) - 1
+ bottom_pad = ((ypad + 1) // 2) - 1
+ left_pad = max(kernel_size[0] - 1 - right_pad, 0)
+ top_pad = max(kernel_size[1] - 1 - bottom_pad, 0)
+ elif padding_type == b"VALID":
+ right_pad = (xpad + 1) // 2
+ bottom_pad = (ypad + 1) // 2
+ left_pad = max(kernel_size[0] - right_pad, 0)
+ top_pad = max(kernel_size[1] - bottom_pad, 0)
+ else:
+ assert 0, "Unknown padding"
+
+ padding = (top_pad, left_pad, bottom_pad, right_pad)
+ skirt = (top_pad, left_pad, ypad - top_pad, xpad - left_pad)
+ return padding, skirt
+
def fixup_conv2d_backprop(op, arch):
if op.type == "Conv2DBackpropInput":
# flip the inputs
op.inputs[0], op.inputs[2] = op.inputs[2], op.inputs[0]
- op.type = "Conv2DBackpropInputSwitched"
+ op.type = "Conv2DBackpropInputSwitchedBias"
+ weight_shape = op.inputs[1].shape
+ weight_sets = weight_shape[3]
+
+ if len(op.inputs) < 4:
+ # Add bias/scale tensor filled with zeros
+ scale_op = Operation("Const", op.name + "_bias")
+ scale_tens = Tensor([weight_sets], DataType.int32, op.name + "_bias_tens")
+ scale_tens.values = [0] * weight_sets
+ scale_tens.quant_values = [0] * weight_sets
+ scale_tens.ops = [scale_op]
+ scale_op.outputs = [scale_tens]
+ scale_tens.consumer_list = [op]
+ op.inputs.append(scale_tens)
+
+ # Update strides
+ op.attrs.update( {"stride_w": 1, "stride_h": 1, "strides": (1,1,1,1)} )
return op
@@ -292,15 +330,20 @@ def add_padding_fields(op, arch):
else:
raise UnsupportedFeatureError("Unknown operation that uses padding: {}".format(op.type))
- dilation_h, dilation_w = op.get_dilation_h_w()
- dilated_kernel_size = [dilation_h * (kernel_size[0] - 1) + 1, dilation_w * (kernel_size[1] - 1) + 1]
- padding, skirt = calc_padding_and_skirt(op.attrs["padding"], dilated_kernel_size, op.attrs["strides"], input_shape)
+ if op.type == "Conv2DBackpropInputSwitchedBias":
+ padding, skirt = calc_upscaled_padding_and_skirt(op.attrs["padding"], kernel_size, op.attrs["strides"], input_shape)
+ else:
+ dilation_h, dilation_w = op.get_dilation_h_w()
+ dilated_kernel_size = [dilation_h * (kernel_size[0] - 1) + 1, dilation_w * (kernel_size[1] - 1) + 1]
+ padding, skirt = calc_padding_and_skirt(op.attrs["padding"], dilated_kernel_size, op.attrs["strides"], input_shape)
+
op.attrs["explicit_padding"] = padding
op.attrs["skirt"] = skirt
+
return op
-conv_op = set(("Conv2D", "QuantizedConv2D", "Conv2DBackpropInputSwitched", "Conv2DBiasAct"))
+conv_op = set(("Conv2D", "QuantizedConv2D", "Conv2DBackpropInputSwitchedBias", "Conv2DBiasAct"))
fc_op = set(
(
"MatMul",