diff options
author | Finn Williams <Finn.Williams@arm.com> | 2021-03-12 15:05:49 +0000 |
---|---|---|
committer | Jim Flynn <jim.flynn@arm.com> | 2021-03-16 09:50:58 +0000 |
commit | 9cd4ce1e6f76c070ac20ebcf4c67fc7ba8ba358a (patch) | |
tree | f5be908ea8c47ce36dd16758d08245e63c5e7d50 /src/armnn/test/optimizations | |
parent | c2d9559287bd9df0bb361d4d977c170e80dd4475 (diff) | |
download | armnn-9cd4ce1e6f76c070ac20ebcf4c67fc7ba8ba358a.tar.gz |
IVGCVSW-5754 Change the behaviour of the AddBroadcastReshapeLayer Optimisation when the input is a const tensor
Signed-off-by: Finn Williams <Finn.Williams@arm.com>
Change-Id: I8b1357bdefc45880d064d7e448af364ac8644c0d
Diffstat (limited to 'src/armnn/test/optimizations')
-rw-r--r-- | src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp | 49 |
1 files changed, 49 insertions, 0 deletions
diff --git a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp index fe3cc31838..594b17261d 100644 --- a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp +++ b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp @@ -285,4 +285,53 @@ BOOST_AUTO_TEST_CASE(AddNoBroadcastReshapeLayerTest) BOOST_TEST(!reshapeLayer); } +BOOST_AUTO_TEST_CASE(ReshapeParentConstLayerTest) +{ + Graph graph; + const TensorInfo info0({ 1, 2, 3, 5 }, DataType::QAsymmU8); + const TensorInfo info1({ 5 }, DataType::QAsymmU8); + const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8); + + auto input = graph.AddLayer<InputLayer>(0, "input"); + auto constant = graph.AddLayer<ConstantLayer>("constant"); + auto mul = graph.AddLayer<MultiplicationLayer>("mul"); + auto output = graph.AddLayer<OutputLayer>(0, "output"); + + uint8_t tensor[] = { 1, 1, 1, 1, 1 }; + + constant->m_LayerOutput = std::make_unique<ScopedCpuTensorHandle>(ConstTensor(info1, &tensor)); + + input->GetOutputSlot().SetTensorInfo(info0); + constant->GetOutputSlot().SetTensorInfo(info1); + mul->GetOutputSlot().SetTensorInfo(outputInfo); + + input->GetOutputSlot().Connect(mul->GetInputSlot(0)); + constant->GetOutputSlot().Connect(mul->GetInputSlot(1)); + mul->GetOutputSlot().Connect(output->GetInputSlot(0)); + + BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), + &IsLayerOfType<InputLayer>, + &IsLayerOfType<ConstantLayer>, + &IsLayerOfType<MultiplicationLayer>, + &IsLayerOfType<OutputLayer>)); + + // Run optimizer + armnn::Optimizer::Pass(graph, MakeOptimizations(AddBroadcastReshapeLayer())); + + // Broadcast reshape layer has not been added to the graph + BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), + &IsLayerOfType<InputLayer>, + &IsLayerOfType<ConstantLayer>, + &IsLayerOfType<MultiplicationLayer>, + &IsLayerOfType<OutputLayer>)); + + TensorShape expectedShape = TensorShape{ 1, 1, 1, 5 }; + BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == expectedShape); + + BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetNumDimensions() == info0.GetNumDimensions()); + + Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0"); + BOOST_TEST(!reshapeLayer); +} + BOOST_AUTO_TEST_SUITE_END()
\ No newline at end of file |