From 07810fc2fcdd34db74222d90cc73ef12a88e7b78 Mon Sep 17 00:00:00 2001 From: Mike Kelly Date: Thu, 12 Nov 2020 10:58:48 +0000 Subject: IVGCVSW-5328-5329 Fuse Activation * Added Fused Activation Optimization to both CL and Neon backends. * Added Fused Activation support to all the CL and Neon workloads that support it. * Changed ProfilingTest network to be a Convolution layer followed by an Abs layer rather than an Activation layer. * Added IBackendInternal::OptimizeSubgraphView function that can accept a ModelOptions. * Network will now call OptimizeSubgraphView passing in the ModelOptions. Signed-off-by: Keith Davis Signed-off-by: Mike Kelly Signed-off-by: Teresa Charlin Change-Id: Ib536ac3cbafc7d9b35c139ad9a65b7735262cd9d --- src/armnn/test/OptimizerTests.cpp | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) (limited to 'src/armnn/test/OptimizerTests.cpp') diff --git a/src/armnn/test/OptimizerTests.cpp b/src/armnn/test/OptimizerTests.cpp index 0179589bf4..e7eab9d00d 100644 --- a/src/armnn/test/OptimizerTests.cpp +++ b/src/armnn/test/OptimizerTests.cpp @@ -810,10 +810,10 @@ BOOST_AUTO_TEST_CASE(OptimizeForExclusiveConnectionsFuseTest) std::vector weightsVector = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; ConstTensor weights(TensorInfo(4, weightsDimensionSizes, DataType::Float32), weightsVector); - std::vector betaVector = {0.1f}; - std::vector gammaVector = {0.5f}; - std::vector meanVector = {0}; - std::vector varianceVector = {1}; + std::vector betaVector = { 0.1f }; + std::vector gammaVector = { 0.5f }; + std::vector meanVector = { 0 }; + std::vector varianceVector = { 1 }; ConstTensor beta(TensorInfo(1, outputChannelSize, DataType::Float32), betaVector); ConstTensor gamma(TensorInfo(1, outputChannelSize, DataType::Float32), gammaVector); ConstTensor mean(TensorInfo(1, outputChannelSize, DataType::Float32), meanVector); @@ -830,7 +830,7 @@ BOOST_AUTO_TEST_CASE(OptimizeForExclusiveConnectionsFuseTest) input->GetOutputSlot().SetTensorInfo(inputInfo); conv->GetOutputSlot().SetTensorInfo(outputInfo); batchNorm->GetOutputSlot().SetTensorInfo(outputInfo); - conv ->m_Weight = std::make_unique(weights); + conv->m_Weight = std::make_unique(weights); batchNorm->m_Beta = std::make_unique(beta); batchNorm->m_Gamma = std::make_unique(gamma); batchNorm->m_Mean = std::make_unique(mean); @@ -843,9 +843,9 @@ BOOST_AUTO_TEST_CASE(OptimizeForExclusiveConnectionsFuseTest) } // Connect layers - input ->GetOutputSlot(0).Connect(conv ->GetInputSlot(0)); - conv ->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0)); - batchNorm ->GetOutputSlot(0).Connect(output ->GetInputSlot(0)); + input->GetOutputSlot(0).Connect(conv->GetInputSlot(0)); + conv->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0)); + batchNorm->GetOutputSlot(0).Connect(output->GetInputSlot(0)); BOOST_CHECK(4 == graph.GetNumLayers()); BOOST_TEST(CheckSequence(graph.cbegin(), @@ -887,10 +887,10 @@ BOOST_AUTO_TEST_CASE(OptimizeForExclusiveConnectionsWithoutFuseTest) auto output2 = graph.AddLayer(1, "output2"); // Connect layers - input ->GetOutputSlot(0).Connect(conv ->GetInputSlot(0)); - conv ->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0)); - batchNorm ->GetOutputSlot(0).Connect(output ->GetInputSlot(0)); - conv ->GetOutputSlot(0).Connect(output2 ->GetInputSlot(0)); + input->GetOutputSlot(0).Connect(conv->GetInputSlot(0)); + conv->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0)); + batchNorm->GetOutputSlot(0).Connect(output->GetInputSlot(0)); + conv->GetOutputSlot(0).Connect(output2->GetInputSlot(0)); BOOST_CHECK(5 == graph.GetNumLayers()); BOOST_TEST(CheckSequence(graph.cbegin(), -- cgit v1.2.1