From 4df97eb257d3fc29b7431d9cb8a054b21d5a7448 Mon Sep 17 00:00:00 2001 From: Colm Donelan Date: Thu, 29 Apr 2021 08:00:06 +0100 Subject: IVGCVSW-5890 Prevent modification to const layers with multiple connections * In AddBroadcastReshapeLayerImpl check if a constant layer has other connections before modifying its output tensor shape. * In ElementWiseBaseLayer replace an ARMNN_ASSERT with a proper error message. Signed-off-by: Colm Donelan Change-Id: Id3f3796c260eede61f076660505257a8b65d93fc --- .../AddBroadcastReshapeLayerTests.cpp | 64 ++++++++++++++++++++++ 1 file changed, 64 insertions(+) (limited to 'src/armnn/test/optimizations') diff --git a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp index 594b17261d..4523e70437 100644 --- a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp +++ b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp @@ -334,4 +334,68 @@ BOOST_AUTO_TEST_CASE(ReshapeParentConstLayerTest) BOOST_TEST(!reshapeLayer); } +BOOST_AUTO_TEST_CASE(ReshapeParentConstAddLayerMultipleConnectionsTest) +{ + // In this test case we recreate the situation where an Addition layer has + // a constant second term, e.g. [1,512] + [1]. The AddBroadcastReshapeLayer + // should modify the constant tensor info to match the number of dimensions. + // However, if this constant term is being reused elsewhere then we shouldn't + // modify it. Instead we insert a resize layer. + + // What we'll do is have two sequential add layers both using the same const tensor. + Graph graph; + const TensorInfo inputInfo({ 1, 512 }, DataType::Float32); + const TensorInfo constantTermInfo({ 1 }, DataType::Float32); + const TensorInfo outputInfo({ 1, 512 }, DataType::Float32); + + auto input = graph.AddLayer(0, "input"); + auto constant = graph.AddLayer("constant"); + auto add1 = graph.AddLayer("add1"); + auto add2 = graph.AddLayer("add2"); + auto output = graph.AddLayer(0, "output"); + + input->GetOutputSlot().SetTensorInfo(inputInfo); + constant->GetOutputSlot().SetTensorInfo(constantTermInfo); + float tensor[] = { 2.0f }; + constant->m_LayerOutput = std::make_unique(ConstTensor(constantTermInfo, &tensor)); + add1->GetOutputSlot().SetTensorInfo(outputInfo); + + input->GetOutputSlot().Connect(add1->GetInputSlot(0)); + constant->GetOutputSlot().Connect(add1->GetInputSlot(1)); + add1->GetOutputSlot().Connect(add2->GetInputSlot(0)); + add2->GetOutputSlot().Connect(output->GetInputSlot(0)); + // This second connection should prevent the modification of the const output tensor. + constant->GetOutputSlot().Connect(add2->GetInputSlot(1)); + + BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + // Run optimizer + armnn::Optimizer::Pass(graph, MakeOptimizations(AddBroadcastReshapeLayer())); + + // Broadcast reshape should have been added before each addition layer. + BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + // Ensure the output shape of the constant hasn't changed. + BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == constantTermInfo.GetShape()); + // There should be two extra reshape layers with appropriate names. + Layer* const reshapeLayer1 = GetFirstLayerWithName(graph, "Reshape_for:add1-1"); + Layer* const reshapeLayer2 = GetFirstLayerWithName(graph, "Reshape_for:add2-1"); + BOOST_TEST(reshapeLayer1); + BOOST_TEST(reshapeLayer2); +} + + + BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file -- cgit v1.2.1