From 381f5029c643a3c7c2bfa3454a6673c9dedf2fd0 Mon Sep 17 00:00:00 2001 From: Jim Flynn Date: Tue, 23 Mar 2021 14:20:19 +0000 Subject: Revert "Fold PAD into Pooling2d" This reverts commit 51ce7d487c761358de105f82ff90553570aedac0. Reason for revert: https://jira.arm.com/browse/IVGCVSW-5798 LargeGraph_TENSOR_FLOAT32 CTS tests failures Change-Id: Ib031a47f605340b2202ecf074ce96a8b54c51075 --- src/armnn/optimizations/All.hpp | 2 +- .../optimizations/FoldPadIntoConvolution2d.hpp | 93 ++++++++++++++++ src/armnn/optimizations/FoldPadIntoLayer2d.hpp | 117 --------------------- 3 files changed, 94 insertions(+), 118 deletions(-) create mode 100644 src/armnn/optimizations/FoldPadIntoConvolution2d.hpp delete mode 100644 src/armnn/optimizations/FoldPadIntoLayer2d.hpp (limited to 'src/armnn/optimizations') diff --git a/src/armnn/optimizations/All.hpp b/src/armnn/optimizations/All.hpp index 5decc7c969..d042616ba4 100644 --- a/src/armnn/optimizations/All.hpp +++ b/src/armnn/optimizations/All.hpp @@ -9,7 +9,7 @@ #include "ConvertConstants.hpp" #include "ConvertFp32NetworkToBf16.hpp" #include "ConvertFp32NetworkToFp16.hpp" -#include "FoldPadIntoLayer2d.hpp" +#include "FoldPadIntoConvolution2d.hpp" #include "FuseBatchNorm.hpp" #include "MovePermuteUp.hpp" #include "MoveTransposeUp.hpp" diff --git a/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp b/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp new file mode 100644 index 0000000000..5def6dfdd2 --- /dev/null +++ b/src/armnn/optimizations/FoldPadIntoConvolution2d.hpp @@ -0,0 +1,93 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#pragma once + +#include "Optimization.hpp" + +#include + +namespace armnn +{ +namespace optimizations +{ + +class FoldPadIntoConvolution2dImpl +{ +public: + + void Run(Graph& graph, InputSlot& connection) const + { + Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer(); + Layer& child = connection.GetOwningLayer(); + + ARMNN_ASSERT(base.GetType() == LayerType::Pad); + ARMNN_ASSERT(child.GetType() == LayerType::Convolution2d); + + PadLayer* padLayer = PolymorphicDowncast(&base); + Convolution2dLayer* convolution2dLayer = PolymorphicDowncast(&child); + + OutputSlot* parentOut = base.GetInputSlot(0).GetConnectedOutputSlot(); + + const std::string name = std::string("folded-") + base.GetName() + std::string("-into-") + child.GetName(); + Convolution2dDescriptor descriptor = convolution2dLayer->GetParameters(); + + auto padList = padLayer->GetParameters().m_PadList; + + armnn::DataLayout dataLayout = descriptor.m_DataLayout; + + // In Convolution2dDescriptor, padLeft and padRight are defined as paddings on width dimension + // whereas padTop and padBottom - paddings on height dimension, so setting these according to data layout + if(dataLayout == armnn::DataLayout::NHWC) + { + descriptor.m_PadLeft = padList[2].first; + descriptor.m_PadRight = padList[2].second; + descriptor.m_PadTop = padList[1].first; + descriptor.m_PadBottom = padList[1].second; + } + else + { + descriptor.m_PadLeft = padList[3].first; + descriptor.m_PadRight = padList[3].second; + descriptor.m_PadTop = padList[2].first; + descriptor.m_PadBottom = padList[2].second; + } + + auto& newConv2dLayer = *graph.InsertNewLayer(base.GetInputSlot(0), + descriptor, + name.c_str()); + + // Copy weights and bias to the new convolution layer + ARMNN_ASSERT_MSG(convolution2dLayer->m_Weight != nullptr, + "FoldPadIntoConvolution2d: Weights data should not be null."); + newConv2dLayer.m_Weight = std::move(convolution2dLayer->m_Weight); + if (descriptor.m_BiasEnabled) + { + ARMNN_ASSERT_MSG(convolution2dLayer->m_Bias != nullptr, + "FoldPadIntoConvolution2d: Bias data should not be null if bias is enabled."); + newConv2dLayer.m_Bias = std::move(convolution2dLayer->m_Bias); + } + + // Reconnects with original parent. + newConv2dLayer.GetOutputSlot().MoveAllConnections(*parentOut); + // Parent is now the new convolution2d layer. + parentOut = &newConv2dLayer.GetOutputSlot(); + + // Moves connections in child output to parent layer. + // Child layer will be removed as it's left unconnected. + // Base layer will be removed if left unconnected. + child.GetOutputSlot().MoveAllConnections(*parentOut); + } +protected: + FoldPadIntoConvolution2dImpl() = default; + ~FoldPadIntoConvolution2dImpl() = default; +}; + +using FoldPadIntoConvolution2d = OptimizeForConnection; + +} // namespace optimizations +} // namespace armnn + + diff --git a/src/armnn/optimizations/FoldPadIntoLayer2d.hpp b/src/armnn/optimizations/FoldPadIntoLayer2d.hpp deleted file mode 100644 index cadc2f3017..0000000000 --- a/src/armnn/optimizations/FoldPadIntoLayer2d.hpp +++ /dev/null @@ -1,117 +0,0 @@ -// -// Copyright © 2017 Arm Ltd. All rights reserved. -// SPDX-License-Identifier: MIT -// - -#pragma once - -#include "Optimization.hpp" - -#include - -namespace armnn -{ -namespace optimizations -{ -namespace -{ -template -Layer2dT* FoldPadIntoLayer2dImpl(Graph& graph, InputSlot& connection) -{ - Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer(); - Layer& child = connection.GetOwningLayer(); - - ARMNN_ASSERT(base.GetType() == LayerType::Pad); - ARMNN_ASSERT(child.GetType() == LayerEnumOf()); - - PadLayer* padLayer = PolymorphicDowncast(&base); - Layer2dT* layer2d = PolymorphicDowncast(&child); - - OutputSlot* parentOut = base.GetInputSlot(0).GetConnectedOutputSlot(); - - const std::string name = std::string("folded-") + base.GetName() + std::string("-into-") + child.GetName(); - auto descriptor = layer2d->GetParameters(); - - auto padList = padLayer->GetParameters().m_PadList; - - armnn::DataLayout dataLayout = descriptor.m_DataLayout; - - // In Convolution2dDescriptor/Pooling2dDescriptor, padLeft and padRight are defined as paddings - // on width dimension whereas padTop and padBottom - paddings on height dimension, so setting these - // according to data layout - if(dataLayout == armnn::DataLayout::NHWC) - { - descriptor.m_PadLeft = padList[2].first; - descriptor.m_PadRight = padList[2].second; - descriptor.m_PadTop = padList[1].first; - descriptor.m_PadBottom = padList[1].second; - } - else - { - descriptor.m_PadLeft = padList[3].first; - descriptor.m_PadRight = padList[3].second; - descriptor.m_PadTop = padList[2].first; - descriptor.m_PadBottom = padList[2].second; - } - - const auto newLayer2d = graph.InsertNewLayer(base.GetInputSlot(0), descriptor, name.c_str()); - - // Reconnects with original parent. - newLayer2d->GetOutputSlot().MoveAllConnections(*parentOut); - // Parent is now the new layer. - parentOut = &newLayer2d->GetOutputSlot(); - - // Moves connections in child output to parent layer. - // Child layer will be removed as it's left unconnected. - // Base layer will be removed if left unconnected. - child.GetOutputSlot().MoveAllConnections(*parentOut); - - return newLayer2d; -} -} // namespace - -class FoldPadIntoConvolution2dImpl -{ -public: - void Run(Graph& graph, InputSlot& connection) const - { - const auto conv2dLayer = PolymorphicDowncast(&connection.GetOwningLayer()); - const auto newConv2dLayer = FoldPadIntoLayer2dImpl(graph, connection); - - // Copy weights and bias to the new convolution layer - ARMNN_ASSERT_MSG(conv2dLayer->m_Weight != nullptr, - "FoldPadIntoConvolution2d: Weights data should not be null."); - newConv2dLayer->m_Weight = std::move(conv2dLayer->m_Weight); - if (conv2dLayer->GetParameters().m_BiasEnabled) - { - ARMNN_ASSERT_MSG(conv2dLayer->m_Bias != nullptr, - "FoldPadIntoConvolution2d: Bias data should not be null if bias is enabled."); - newConv2dLayer->m_Bias = std::move(conv2dLayer->m_Bias); - } - } - -protected: - FoldPadIntoConvolution2dImpl() = default; - ~FoldPadIntoConvolution2dImpl() = default; -}; - -class FoldPadIntoPooling2dImpl -{ -public: - void Run(Graph& graph, InputSlot& connection) const - { - FoldPadIntoLayer2dImpl(graph, connection); - } - -protected: - FoldPadIntoPooling2dImpl() = default; - ~FoldPadIntoPooling2dImpl() = default; -}; - -using FoldPadIntoConvolution2d = OptimizeForConnection; -using FoldPadIntoPooling2d = OptimizeForConnection; - -} // namespace optimizations -} // namespace armnn - - -- cgit v1.2.1