From 51ce7d487c761358de105f82ff90553570aedac0 Mon Sep 17 00:00:00 2001 From: Diego Lopez Recas Date: Fri, 19 Mar 2021 12:40:16 +0000 Subject: Fold PAD into Pooling2d Some models would add a PAD layer before a pooling when they can't express their padding configuration as SAME or VALID. Arm NN can merge the two because pooling layers are described with explicit padding. Signed-off-by: Diego Lopez Recas Change-Id: Id048186db6a005e0257bfbc1406c3b0dab2cdd58 --- src/armnn/optimizations/All.hpp | 2 +- .../optimizations/FoldPadIntoConvolution2d.hpp | 93 ---------------- src/armnn/optimizations/FoldPadIntoLayer2d.hpp | 117 +++++++++++++++++++++ 3 files changed, 118 insertions(+), 94 deletions(-) delete mode 100644 src/armnn/optimizations/FoldPadIntoConvolution2d.hpp create mode 100644 src/armnn/optimizations/FoldPadIntoLayer2d.hpp (limited to 'src/armnn/optimizations') diff --git a/src/armnn/optimizations/All.hpp b/src/armnn/optimizations/All.hpp index d042616ba4..5decc7c969 100644 --- a/src/armnn/optimizations/All.hpp +++ b/src/armnn/optimizations/All.hpp @@ -9,7 +9,7 @@ #include "ConvertConstants.hpp" #include "ConvertFp32NetworkToBf16.hpp" #include "ConvertFp32NetworkToFp16.hpp" -#include "FoldPadIntoConvolution2d.hpp" +#include "FoldPadIntoLayer2d.hpp" #include "FuseBatchNorm.hpp" #include "MovePermuteUp.hpp" #include "MoveTransposeUp.hpp" diff --git a/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp b/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp deleted file mode 100644 index 5def6dfdd2..0000000000 --- a/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp +++ /dev/null @@ -1,93 +0,0 @@ -// -// Copyright © 2017 Arm Ltd. All rights reserved. -// SPDX-License-Identifier: MIT -// - -#pragma once - -#include "Optimization.hpp" - -#include - -namespace armnn -{ -namespace optimizations -{ - -class FoldPadIntoConvolution2dImpl -{ -public: - - void Run(Graph& graph, InputSlot& connection) const - { - Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer(); - Layer& child = connection.GetOwningLayer(); - - ARMNN_ASSERT(base.GetType() == LayerType::Pad); - ARMNN_ASSERT(child.GetType() == LayerType::Convolution2d); - - PadLayer* padLayer = PolymorphicDowncast(&base); - Convolution2dLayer* convolution2dLayer = PolymorphicDowncast(&child); - - OutputSlot* parentOut = base.GetInputSlot(0).GetConnectedOutputSlot(); - - const std::string name = std::string("folded-") + base.GetName() + std::string("-into-") + child.GetName(); - Convolution2dDescriptor descriptor = convolution2dLayer->GetParameters(); - - auto padList = padLayer->GetParameters().m_PadList; - - armnn::DataLayout dataLayout = descriptor.m_DataLayout; - - // In Convolution2dDescriptor, padLeft and padRight are defined as paddings on width dimension - // whereas padTop and padBottom - paddings on height dimension, so setting these according to data layout - if(dataLayout == armnn::DataLayout::NHWC) - { - descriptor.m_PadLeft = padList[2].first; - descriptor.m_PadRight = padList[2].second; - descriptor.m_PadTop = padList[1].first; - descriptor.m_PadBottom = padList[1].second; - } - else - { - descriptor.m_PadLeft = padList[3].first; - descriptor.m_PadRight = padList[3].second; - descriptor.m_PadTop = padList[2].first; - descriptor.m_PadBottom = padList[2].second; - } - - auto& newConv2dLayer = *graph.InsertNewLayer(base.GetInputSlot(0), - descriptor, - name.c_str()); - - // Copy weights and bias to the new convolution layer - ARMNN_ASSERT_MSG(convolution2dLayer->m_Weight != nullptr, - "FoldPadIntoConvolution2d: Weights data should not be null."); - newConv2dLayer.m_Weight = std::move(convolution2dLayer->m_Weight); - if (descriptor.m_BiasEnabled) - { - ARMNN_ASSERT_MSG(convolution2dLayer->m_Bias != nullptr, - "FoldPadIntoConvolution2d: Bias data should not be null if bias is enabled."); - newConv2dLayer.m_Bias = std::move(convolution2dLayer->m_Bias); - } - - // Reconnects with original parent. - newConv2dLayer.GetOutputSlot().MoveAllConnections(*parentOut); - // Parent is now the new convolution2d layer. - parentOut = &newConv2dLayer.GetOutputSlot(); - - // Moves connections in child output to parent layer. - // Child layer will be removed as it's left unconnected. - // Base layer will be removed if left unconnected. - child.GetOutputSlot().MoveAllConnections(*parentOut); - } -protected: - FoldPadIntoConvolution2dImpl() = default; - ~FoldPadIntoConvolution2dImpl() = default; -}; - -using FoldPadIntoConvolution2d = OptimizeForConnection; - -} // namespace optimizations -} // namespace armnn - - diff --git a/src/armnn/optimizations/FoldPadIntoLayer2d.hpp b/src/armnn/optimizations/FoldPadIntoLayer2d.hpp new file mode 100644 index 0000000000..cadc2f3017 --- /dev/null +++ b/src/armnn/optimizations/FoldPadIntoLayer2d.hpp @@ -0,0 +1,117 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#pragma once + +#include "Optimization.hpp" + +#include + +namespace armnn +{ +namespace optimizations +{ +namespace +{ +template +Layer2dT* FoldPadIntoLayer2dImpl(Graph& graph, InputSlot& connection) +{ + Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer(); + Layer& child = connection.GetOwningLayer(); + + ARMNN_ASSERT(base.GetType() == LayerType::Pad); + ARMNN_ASSERT(child.GetType() == LayerEnumOf()); + + PadLayer* padLayer = PolymorphicDowncast(&base); + Layer2dT* layer2d = PolymorphicDowncast(&child); + + OutputSlot* parentOut = base.GetInputSlot(0).GetConnectedOutputSlot(); + + const std::string name = std::string("folded-") + base.GetName() + std::string("-into-") + child.GetName(); + auto descriptor = layer2d->GetParameters(); + + auto padList = padLayer->GetParameters().m_PadList; + + armnn::DataLayout dataLayout = descriptor.m_DataLayout; + + // In Convolution2dDescriptor/Pooling2dDescriptor, padLeft and padRight are defined as paddings + // on width dimension whereas padTop and padBottom - paddings on height dimension, so setting these + // according to data layout + if(dataLayout == armnn::DataLayout::NHWC) + { + descriptor.m_PadLeft = padList[2].first; + descriptor.m_PadRight = padList[2].second; + descriptor.m_PadTop = padList[1].first; + descriptor.m_PadBottom = padList[1].second; + } + else + { + descriptor.m_PadLeft = padList[3].first; + descriptor.m_PadRight = padList[3].second; + descriptor.m_PadTop = padList[2].first; + descriptor.m_PadBottom = padList[2].second; + } + + const auto newLayer2d = graph.InsertNewLayer(base.GetInputSlot(0), descriptor, name.c_str()); + + // Reconnects with original parent. + newLayer2d->GetOutputSlot().MoveAllConnections(*parentOut); + // Parent is now the new layer. + parentOut = &newLayer2d->GetOutputSlot(); + + // Moves connections in child output to parent layer. + // Child layer will be removed as it's left unconnected. + // Base layer will be removed if left unconnected. + child.GetOutputSlot().MoveAllConnections(*parentOut); + + return newLayer2d; +} +} // namespace + +class FoldPadIntoConvolution2dImpl +{ +public: + void Run(Graph& graph, InputSlot& connection) const + { + const auto conv2dLayer = PolymorphicDowncast(&connection.GetOwningLayer()); + const auto newConv2dLayer = FoldPadIntoLayer2dImpl(graph, connection); + + // Copy weights and bias to the new convolution layer + ARMNN_ASSERT_MSG(conv2dLayer->m_Weight != nullptr, + "FoldPadIntoConvolution2d: Weights data should not be null."); + newConv2dLayer->m_Weight = std::move(conv2dLayer->m_Weight); + if (conv2dLayer->GetParameters().m_BiasEnabled) + { + ARMNN_ASSERT_MSG(conv2dLayer->m_Bias != nullptr, + "FoldPadIntoConvolution2d: Bias data should not be null if bias is enabled."); + newConv2dLayer->m_Bias = std::move(conv2dLayer->m_Bias); + } + } + +protected: + FoldPadIntoConvolution2dImpl() = default; + ~FoldPadIntoConvolution2dImpl() = default; +}; + +class FoldPadIntoPooling2dImpl +{ +public: + void Run(Graph& graph, InputSlot& connection) const + { + FoldPadIntoLayer2dImpl(graph, connection); + } + +protected: + FoldPadIntoPooling2dImpl() = default; + ~FoldPadIntoPooling2dImpl() = default; +}; + +using FoldPadIntoConvolution2d = OptimizeForConnection; +using FoldPadIntoPooling2d = OptimizeForConnection; + +} // namespace optimizations +} // namespace armnn + + -- cgit v1.2.1