diff options
author | Mike Kelly <mike.kelly@arm.com> | 2020-11-12 10:58:48 +0000 |
---|---|---|
committer | Jim Flynn <jim.flynn@arm.com> | 2020-11-13 14:25:30 +0000 |
commit | 07810fc2fcdd34db74222d90cc73ef12a88e7b78 (patch) | |
tree | 8becef8453674822d079815b06ae37310b97d2cf /src/armnn/test/OptimizerTests.cpp | |
parent | 8502adeafbbb1db0acefa62560d93453e38dcadb (diff) | |
download | armnn-07810fc2fcdd34db74222d90cc73ef12a88e7b78.tar.gz |
IVGCVSW-5328-5329 Fuse Activation
* Added Fused Activation Optimization to both CL and Neon backends.
* Added Fused Activation support to all the CL and Neon workloads
that support it.
* Changed ProfilingTest network to be a Convolution layer
followed by an Abs layer rather than an Activation layer.
* Added IBackendInternal::OptimizeSubgraphView function that can accept a
ModelOptions.
* Network will now call OptimizeSubgraphView passing in the ModelOptions.
Signed-off-by: Keith Davis <keith.davis@arm.com>
Signed-off-by: Mike Kelly <mike.kelly@arm.com>
Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com>
Change-Id: Ib536ac3cbafc7d9b35c139ad9a65b7735262cd9d
Diffstat (limited to 'src/armnn/test/OptimizerTests.cpp')
-rw-r--r-- | src/armnn/test/OptimizerTests.cpp | 24 |
1 files changed, 12 insertions, 12 deletions
diff --git a/src/armnn/test/OptimizerTests.cpp b/src/armnn/test/OptimizerTests.cpp index 0179589bf4..e7eab9d00d 100644 --- a/src/armnn/test/OptimizerTests.cpp +++ b/src/armnn/test/OptimizerTests.cpp @@ -810,10 +810,10 @@ BOOST_AUTO_TEST_CASE(OptimizeForExclusiveConnectionsFuseTest) std::vector<float> weightsVector = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; ConstTensor weights(TensorInfo(4, weightsDimensionSizes, DataType::Float32), weightsVector); - std::vector<float> betaVector = {0.1f}; - std::vector<float> gammaVector = {0.5f}; - std::vector<float> meanVector = {0}; - std::vector<float> varianceVector = {1}; + std::vector<float> betaVector = { 0.1f }; + std::vector<float> gammaVector = { 0.5f }; + std::vector<float> meanVector = { 0 }; + std::vector<float> varianceVector = { 1 }; ConstTensor beta(TensorInfo(1, outputChannelSize, DataType::Float32), betaVector); ConstTensor gamma(TensorInfo(1, outputChannelSize, DataType::Float32), gammaVector); ConstTensor mean(TensorInfo(1, outputChannelSize, DataType::Float32), meanVector); @@ -830,7 +830,7 @@ BOOST_AUTO_TEST_CASE(OptimizeForExclusiveConnectionsFuseTest) input->GetOutputSlot().SetTensorInfo(inputInfo); conv->GetOutputSlot().SetTensorInfo(outputInfo); batchNorm->GetOutputSlot().SetTensorInfo(outputInfo); - conv ->m_Weight = std::make_unique<ScopedCpuTensorHandle>(weights); + conv->m_Weight = std::make_unique<ScopedCpuTensorHandle>(weights); batchNorm->m_Beta = std::make_unique<ScopedCpuTensorHandle>(beta); batchNorm->m_Gamma = std::make_unique<ScopedCpuTensorHandle>(gamma); batchNorm->m_Mean = std::make_unique<ScopedCpuTensorHandle>(mean); @@ -843,9 +843,9 @@ BOOST_AUTO_TEST_CASE(OptimizeForExclusiveConnectionsFuseTest) } // Connect layers - input ->GetOutputSlot(0).Connect(conv ->GetInputSlot(0)); - conv ->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0)); - batchNorm ->GetOutputSlot(0).Connect(output ->GetInputSlot(0)); + input->GetOutputSlot(0).Connect(conv->GetInputSlot(0)); + conv->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0)); + batchNorm->GetOutputSlot(0).Connect(output->GetInputSlot(0)); BOOST_CHECK(4 == graph.GetNumLayers()); BOOST_TEST(CheckSequence(graph.cbegin(), @@ -887,10 +887,10 @@ BOOST_AUTO_TEST_CASE(OptimizeForExclusiveConnectionsWithoutFuseTest) auto output2 = graph.AddLayer<OutputLayer>(1, "output2"); // Connect layers - input ->GetOutputSlot(0).Connect(conv ->GetInputSlot(0)); - conv ->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0)); - batchNorm ->GetOutputSlot(0).Connect(output ->GetInputSlot(0)); - conv ->GetOutputSlot(0).Connect(output2 ->GetInputSlot(0)); + input->GetOutputSlot(0).Connect(conv->GetInputSlot(0)); + conv->GetOutputSlot(0).Connect(batchNorm->GetInputSlot(0)); + batchNorm->GetOutputSlot(0).Connect(output->GetInputSlot(0)); + conv->GetOutputSlot(0).Connect(output2->GetInputSlot(0)); BOOST_CHECK(5 == graph.GetNumLayers()); BOOST_TEST(CheckSequence(graph.cbegin(), |