aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFinn Williams <Finn.Williams@arm.com>2021-03-12 15:05:49 +0000
committerJim Flynn <jim.flynn@arm.com>2021-03-16 09:50:58 +0000
commit9cd4ce1e6f76c070ac20ebcf4c67fc7ba8ba358a (patch)
treef5be908ea8c47ce36dd16758d08245e63c5e7d50
parentc2d9559287bd9df0bb361d4d977c170e80dd4475 (diff)
downloadarmnn-9cd4ce1e6f76c070ac20ebcf4c67fc7ba8ba358a.tar.gz
IVGCVSW-5754 Change the behaviour of the AddBroadcastReshapeLayer Optimisation when the input is a const tensor
Signed-off-by: Finn Williams <Finn.Williams@arm.com> Change-Id: I8b1357bdefc45880d064d7e448af364ac8644c0d
-rw-r--r--src/armnn/optimizations/AddBroadcastReshapeLayer.hpp15
-rw-r--r--src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp49
2 files changed, 64 insertions, 0 deletions
diff --git a/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp b/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp
index 6bb53d0f12..26661cfcde 100644
--- a/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp
+++ b/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp
@@ -8,6 +8,7 @@
#include <armnn/utility/IgnoreUnused.hpp>
#include <armnn/utility/PolymorphicDowncast.hpp>
+#include <backendsCommon/CpuTensorHandle.hpp>
namespace armnn
{
@@ -65,6 +66,20 @@ public:
std::copy_backward (reshapedDim.begin(), reshapedDim.end(), reshapedDimensions.end());
reshapeInfo.SetShape(armnn::TensorShape{ numDimensions, reshapedDimensions.data() });
+
+ // If the parent layer is a Constant layer we just change the tensor info rather than adding a reshape layer
+ Layer& parentLayer = layer.GetInputSlot(reshapeSlot).GetConnectedOutputSlot()->GetOwningLayer();
+ if (parentLayer.GetType() == armnn::LayerType::Constant)
+ {
+ ConstantLayer& constantLayer = static_cast<ConstantLayer&>(parentLayer);
+
+ constantLayer.m_LayerOutput = std::make_unique<ScopedCpuTensorHandle>(
+ ConstTensor(reshapeInfo,constantLayer.m_LayerOutput.get()->GetTensor<void>()));
+ constantLayer.GetOutputSlot().SetTensorInfo(reshapeInfo);
+
+ return;
+ }
+
const std::string layerName = "Reshape_for:" + layer.GetNameStr() + "-" + std::to_string(reshapeSlot);
const ReshapeDescriptor descriptor{reshapeInfo.GetShape()};
ReshapeLayer *reshapeLayer = graph.InsertNewLayer<ReshapeLayer>(layer.GetInputSlot(reshapeSlot),
diff --git a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp
index fe3cc31838..594b17261d 100644
--- a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp
+++ b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp
@@ -285,4 +285,53 @@ BOOST_AUTO_TEST_CASE(AddNoBroadcastReshapeLayerTest)
BOOST_TEST(!reshapeLayer);
}
+BOOST_AUTO_TEST_CASE(ReshapeParentConstLayerTest)
+{
+ Graph graph;
+ const TensorInfo info0({ 1, 2, 3, 5 }, DataType::QAsymmU8);
+ const TensorInfo info1({ 5 }, DataType::QAsymmU8);
+ const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8);
+
+ auto input = graph.AddLayer<InputLayer>(0, "input");
+ auto constant = graph.AddLayer<ConstantLayer>("constant");
+ auto mul = graph.AddLayer<MultiplicationLayer>("mul");
+ auto output = graph.AddLayer<OutputLayer>(0, "output");
+
+ uint8_t tensor[] = { 1, 1, 1, 1, 1 };
+
+ constant->m_LayerOutput = std::make_unique<ScopedCpuTensorHandle>(ConstTensor(info1, &tensor));
+
+ input->GetOutputSlot().SetTensorInfo(info0);
+ constant->GetOutputSlot().SetTensorInfo(info1);
+ mul->GetOutputSlot().SetTensorInfo(outputInfo);
+
+ input->GetOutputSlot().Connect(mul->GetInputSlot(0));
+ constant->GetOutputSlot().Connect(mul->GetInputSlot(1));
+ mul->GetOutputSlot().Connect(output->GetInputSlot(0));
+
+ BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
+ &IsLayerOfType<InputLayer>,
+ &IsLayerOfType<ConstantLayer>,
+ &IsLayerOfType<MultiplicationLayer>,
+ &IsLayerOfType<OutputLayer>));
+
+ // Run optimizer
+ armnn::Optimizer::Pass(graph, MakeOptimizations(AddBroadcastReshapeLayer()));
+
+ // Broadcast reshape layer has not been added to the graph
+ BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
+ &IsLayerOfType<InputLayer>,
+ &IsLayerOfType<ConstantLayer>,
+ &IsLayerOfType<MultiplicationLayer>,
+ &IsLayerOfType<OutputLayer>));
+
+ TensorShape expectedShape = TensorShape{ 1, 1, 1, 5 };
+ BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == expectedShape);
+
+ BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetNumDimensions() == info0.GetNumDimensions());
+
+ Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0");
+ BOOST_TEST(!reshapeLayer);
+}
+
BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file