aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Kelly <mike.kelly@arm.com>2019-07-11 11:44:52 +0100
committermike.kelly <mike.kelly@arm.com>2019-07-11 12:24:22 +0000
commite1d60bbe806312a9c17ef0e9426a71a38619f210 (patch)
tree2cfa27a446133d3cd5b0edd75e3cb19684478a75
parentc16c9c16a898c229c819eeb27ffab4a46f814863 (diff)
downloadandroid-nn-driver-e1d60bbe806312a9c17ef0e9426a71a38619f210.tar.gz
IVGCVSW-3454 Fix VTS conv2d dilation test failures
* Ensure that conv2d filter layout matches the input layout. * Ensure that the correct dimensions are used when calculating padding. Signed-off-by: Mike Kelly <mike.kelly@arm.com> Change-Id: I334b8d7625a77ae73b147d9693ff3f2dfa56e445
-rw-r--r--1.2/HalPolicy.cpp49
1 files changed, 33 insertions, 16 deletions
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index cdf8c0f4..5a940bea 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -178,11 +178,34 @@ bool HalPolicy::ConvertConv2d(const Operation& operation, const Model& model, Co
return Fail("%s: Dynamic output not supported", __func__);
}
+ armnn::Convolution2dDescriptor desc;
+ desc.m_DataLayout = armnn::DataLayout::NHWC;
+
+ // Determine whether padding is implicit or explicit
+ bool implicitPadding = operation.inputs.size() == 7 ||
+ (operation.inputs.size() >= 8 &&
+ GetInputOperand<hal_1_2::HalPolicy>(operation, 7, model)->type == OperandType::BOOL);
+
+ if (implicitPadding)
+ {
+ desc.m_DataLayout = OptionalDataLayout<hal_1_2::HalPolicy>(operation, 7, model, data);
+ }
+ else if (operation.inputs.size() >= 10)
+ {
+ desc.m_DataLayout = OptionalDataLayout<hal_1_2::HalPolicy>(operation, 10, model, data);
+ }
+
+ const armnn::PermutationVector OHWIToOIHW = {0, 2, 3, 1};
+
// ArmNN does not currently support non-fixed weights or bias
- const ConstTensorPin weightsPin =
- ConvertOperationInputToConstTensorPin<hal_1_2::HalPolicy>(operation, 1, model, data);
+ // The NNAPI filter is always OHWI [depth_out, filter_height, filter_width, depth_in] but ArmNN expects the
+ // filter's height and width indices to match the input's height and width indices so we permute it to OIHW if
+ // the DataLayout is NCHW
+ const ConstTensorPin weightsPin = (desc.m_DataLayout == armnn::DataLayout::NCHW) ?
+ ConvertOperationInputToConstTensorPin<hal_1_2::HalPolicy>(operation, 1, model, data, OHWIToOIHW) :
+ ConvertOperationInputToConstTensorPin<hal_1_2::HalPolicy>(operation, 1, model, data);
const ConstTensorPin biasPin =
- ConvertOperationInputToConstTensorPin<hal_1_2::HalPolicy>(operation, 2, model, data);
+ ConvertOperationInputToConstTensorPin<hal_1_2::HalPolicy>(operation, 2, model, data);
if (!weightsPin.IsValid())
{
@@ -198,15 +221,8 @@ bool HalPolicy::ConvertConv2d(const Operation& operation, const Model& model, Co
armnn::ConstTensor bias = biasPin.GetConstTensor();
SanitizeBiasQuantizationScale(bias.GetInfo(), weights.GetInfo(), inputInfo);
- armnn::Convolution2dDescriptor desc;
- desc.m_DataLayout = armnn::DataLayout::NHWC;
ActivationFn activation;
- // Determine whether padding is implicit or explicit
- bool implicitPadding = operation.inputs.size() == 7 ||
- (operation.inputs.size() >= 8 &&
- GetInputOperand<hal_1_2::HalPolicy>(operation, 7, model)->type == OperandType::BOOL);
-
if (implicitPadding)
{
android::nn::PaddingScheme paddingScheme;
@@ -219,15 +235,17 @@ bool HalPolicy::ConvertConv2d(const Operation& operation, const Model& model, Co
return Fail("%s: Operation has invalid inputs (implicit padding)", __func__);
}
- const uint32_t kernelX = weights.GetShape()[2];
- const uint32_t kernelY = weights.GetShape()[1];
- const uint32_t inputX = inputInfo.GetShape()[2];
- const uint32_t inputY = inputInfo.GetShape()[1];
+ armnnUtils::DataLayoutIndexed dataLayoutIndexed(desc.m_DataLayout);
+ unsigned int widthIndex = dataLayoutIndexed.GetWidthIndex();
+ unsigned int heightIndex = dataLayoutIndexed.GetHeightIndex();
+ const uint32_t kernelX = weights.GetShape()[widthIndex];
+ const uint32_t kernelY = weights.GetShape()[heightIndex];
+ const uint32_t inputX = inputInfo.GetShape()[widthIndex];
+ const uint32_t inputY = inputInfo.GetShape()[heightIndex];
CalcPadding(inputX, kernelX, desc.m_StrideX, desc.m_PadLeft, desc.m_PadRight, paddingScheme);
CalcPadding(inputY, kernelY, desc.m_StrideY, desc.m_PadTop, desc.m_PadBottom, paddingScheme);
- desc.m_DataLayout = OptionalDataLayout<hal_1_2::HalPolicy>(operation, 7, model, data);
}
else if (operation.inputs.size() >= 10)
{
@@ -243,7 +261,6 @@ bool HalPolicy::ConvertConv2d(const Operation& operation, const Model& model, Co
{
return Fail("%s: Operation has invalid inputs (explicit padding)", __func__);
}
- desc.m_DataLayout = OptionalDataLayout<hal_1_2::HalPolicy>(operation, 10, model, data);
}
else
{