From 5786eb7b4021ebed55e3fca31cb2594b81472cb5 Mon Sep 17 00:00:00 2001 From: Teresa Charlin Date: Fri, 21 May 2021 16:29:45 +0100 Subject: IVGCVSW-6069 Fold PAD into Depthwise Convolution Signed-off-by: Teresa Charlin Change-Id: Ib01629256309cfe17f341909d5b9bbbb09361422 --- src/armnn/test/OptimizerTests.cpp | 81 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) (limited to 'src/armnn/test') diff --git a/src/armnn/test/OptimizerTests.cpp b/src/armnn/test/OptimizerTests.cpp index d0734d83be..110b2834d0 100644 --- a/src/armnn/test/OptimizerTests.cpp +++ b/src/armnn/test/OptimizerTests.cpp @@ -615,6 +615,87 @@ BOOST_AUTO_TEST_CASE(FoldPadLayerIntoConvolution2dLayer) &IsLayerOfType)); } +BOOST_AUTO_TEST_CASE(FoldPadLayerIntoDepthwiseConvolution2dLayer) +{ + Graph graph; + const unsigned int inputShape[] = {1, 2, 2, 3}; + const unsigned int paddedShape[] = {1, 6, 6, 3}; + const unsigned int weightsShape[] = {1, 2, 3, 3}; + const unsigned int outputShape[] = {1, 2, 1, 3}; + + armnn::TensorInfo inputInfo(4, inputShape, DataType::Float32); + armnn::TensorInfo paddedInfo(4, paddedShape, DataType::Float32); + armnn::TensorInfo outputInfo(4, outputShape, DataType::Float32); + + Layer* input = graph.AddLayer(0, "input"); + input->GetOutputSlot().SetTensorInfo(inputInfo); + + PadDescriptor padDescriptor({{0, 0}, + {2, 2}, + {2, 2}, + {0, 0}}); + + PadLayer* padLayer = graph.AddLayer(padDescriptor, "pad"); + padLayer->GetOutputSlot().SetTensorInfo(paddedInfo); + + DepthwiseConvolution2dDescriptor depthwiseConvolution2dDescriptor; + depthwiseConvolution2dDescriptor.m_BiasEnabled = false; + depthwiseConvolution2dDescriptor.m_StrideX = 1; + depthwiseConvolution2dDescriptor.m_StrideY = 1; + depthwiseConvolution2dDescriptor.m_DataLayout = DataLayout::NHWC; + + std::vector weightsVector(18); + armnn::ConstTensor weights(armnn::TensorInfo(4, weightsShape, armnn::DataType::Float32), weightsVector); + + auto* depthwiseConv2dLayer = graph + .AddLayer(depthwiseConvolution2dDescriptor, "depthwiseConv2d"); + depthwiseConv2dLayer->m_Weight = std::make_unique(weights); + depthwiseConv2dLayer->GetOutputSlot().SetTensorInfo(outputInfo); + + Layer* output = graph.AddLayer(0, "output"); + + // Connect up layers - input -> pad -> depthwiseConv2d -> output + input->GetOutputSlot().Connect(padLayer->GetInputSlot(0)); + padLayer->GetOutputSlot().Connect(depthwiseConv2dLayer->GetInputSlot(0)); + depthwiseConv2dLayer->GetOutputSlot().Connect(output->GetInputSlot(0)); + + auto checkSimpleDepthwiseConv2d = [](const armnn::Layer* const layer)->bool { + const auto depthwiseConv2dLayer = static_cast(layer); + const auto depthwiseConv2dLayerParams = depthwiseConv2dLayer->GetParameters(); + return IsLayerOfType(layer) && (layer->GetNameStr() == "depthwiseConv2d")&& + (depthwiseConv2dLayerParams.m_PadLeft == 0) && (depthwiseConv2dLayerParams.m_PadRight == 0) && + (depthwiseConv2dLayerParams.m_PadTop == 0) && (depthwiseConv2dLayerParams.m_PadBottom == 0) && + (depthwiseConv2dLayerParams.m_BiasEnabled == false) && (depthwiseConv2dLayerParams.m_StrideX == 1) && + (depthwiseConv2dLayerParams.m_StrideY == 1) + && (depthwiseConv2dLayerParams.m_DataLayout == DataLayout::NHWC); + }; + + BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + checkSimpleDepthwiseConv2d, + &IsLayerOfType)); + + armnn::Optimizer::Pass(graph, armnn::MakeOptimizations(FoldPadIntoDepthwiseConvolution2d())); + + auto checkPadFoldedIntoDepthwiseConv2d = [](const armnn::Layer* const layer)->bool { + const auto depthwiseConv2dLayer = static_cast(layer); + const auto depthwiseConv2dLayerParams = depthwiseConv2dLayer->GetParameters(); + return IsLayerOfType(layer) + && (layer->GetNameStr() == "folded-pad-into-depthwiseConv2d") && + (depthwiseConv2dLayerParams.m_PadLeft == 2) && (depthwiseConv2dLayerParams.m_PadRight == 2) && + (depthwiseConv2dLayerParams.m_PadTop == 2) && (depthwiseConv2dLayerParams.m_PadBottom == 2) && + (depthwiseConv2dLayerParams.m_BiasEnabled == false) && (depthwiseConv2dLayerParams.m_StrideX == 1) && + (depthwiseConv2dLayerParams.m_StrideY == 1) + && (depthwiseConv2dLayerParams.m_DataLayout == DataLayout::NHWC); + }; + + BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), + &IsLayerOfType, + checkPadFoldedIntoDepthwiseConv2d, + &IsLayerOfType)); +} + BOOST_AUTO_TEST_CASE(FoldPadLayerIntoPooling2dLayer) { Graph graph; -- cgit v1.2.1