From 9cd4ce1e6f76c070ac20ebcf4c67fc7ba8ba358a Mon Sep 17 00:00:00 2001 From: Finn Williams Date: Fri, 12 Mar 2021 15:05:49 +0000 Subject: IVGCVSW-5754 Change the behaviour of the AddBroadcastReshapeLayer Optimisation when the input is a const tensor Signed-off-by: Finn Williams Change-Id: I8b1357bdefc45880d064d7e448af364ac8644c0d --- .../optimizations/AddBroadcastReshapeLayer.hpp | 15 +++++++ .../AddBroadcastReshapeLayerTests.cpp | 49 ++++++++++++++++++++++ 2 files changed, 64 insertions(+) diff --git a/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp b/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp index 6bb53d0f12..26661cfcde 100644 --- a/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp +++ b/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp @@ -8,6 +8,7 @@ #include #include +#include namespace armnn { @@ -65,6 +66,20 @@ public: std::copy_backward (reshapedDim.begin(), reshapedDim.end(), reshapedDimensions.end()); reshapeInfo.SetShape(armnn::TensorShape{ numDimensions, reshapedDimensions.data() }); + + // If the parent layer is a Constant layer we just change the tensor info rather than adding a reshape layer + Layer& parentLayer = layer.GetInputSlot(reshapeSlot).GetConnectedOutputSlot()->GetOwningLayer(); + if (parentLayer.GetType() == armnn::LayerType::Constant) + { + ConstantLayer& constantLayer = static_cast(parentLayer); + + constantLayer.m_LayerOutput = std::make_unique( + ConstTensor(reshapeInfo,constantLayer.m_LayerOutput.get()->GetTensor())); + constantLayer.GetOutputSlot().SetTensorInfo(reshapeInfo); + + return; + } + const std::string layerName = "Reshape_for:" + layer.GetNameStr() + "-" + std::to_string(reshapeSlot); const ReshapeDescriptor descriptor{reshapeInfo.GetShape()}; ReshapeLayer *reshapeLayer = graph.InsertNewLayer(layer.GetInputSlot(reshapeSlot), diff --git a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp index fe3cc31838..594b17261d 100644 --- a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp +++ b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp @@ -285,4 +285,53 @@ BOOST_AUTO_TEST_CASE(AddNoBroadcastReshapeLayerTest) BOOST_TEST(!reshapeLayer); } +BOOST_AUTO_TEST_CASE(ReshapeParentConstLayerTest) +{ + Graph graph; + const TensorInfo info0({ 1, 2, 3, 5 }, DataType::QAsymmU8); + const TensorInfo info1({ 5 }, DataType::QAsymmU8); + const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8); + + auto input = graph.AddLayer(0, "input"); + auto constant = graph.AddLayer("constant"); + auto mul = graph.AddLayer("mul"); + auto output = graph.AddLayer(0, "output"); + + uint8_t tensor[] = { 1, 1, 1, 1, 1 }; + + constant->m_LayerOutput = std::make_unique(ConstTensor(info1, &tensor)); + + input->GetOutputSlot().SetTensorInfo(info0); + constant->GetOutputSlot().SetTensorInfo(info1); + mul->GetOutputSlot().SetTensorInfo(outputInfo); + + input->GetOutputSlot().Connect(mul->GetInputSlot(0)); + constant->GetOutputSlot().Connect(mul->GetInputSlot(1)); + mul->GetOutputSlot().Connect(output->GetInputSlot(0)); + + BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + // Run optimizer + armnn::Optimizer::Pass(graph, MakeOptimizations(AddBroadcastReshapeLayer())); + + // Broadcast reshape layer has not been added to the graph + BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + TensorShape expectedShape = TensorShape{ 1, 1, 1, 5 }; + BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == expectedShape); + + BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetNumDimensions() == info0.GetNumDimensions()); + + Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0"); + BOOST_TEST(!reshapeLayer); +} + BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file -- cgit v1.2.1