diff options
author | Mike Kelly <mike.kelly@arm.com> | 2023-03-15 15:06:23 +0000 |
---|---|---|
committer | TeresaARM <teresa.charlinreyes@arm.com> | 2023-03-30 13:40:17 +0000 |
commit | 52e90bf59ecbe90d33368d8fc1fd120f07658aaf (patch) | |
tree | 7ea7d3bb8148ce3973e0fd6abcd951437211255d /src/armnn/test/SubgraphViewTests.cpp | |
parent | 41f9d2a5bc060f6c63e80621ff2264a66fb298bd (diff) | |
download | armnn-52e90bf59ecbe90d33368d8fc1fd120f07658aaf.tar.gz |
IVGCVSW-3808 Deprecation notices for old ElementwiseBinary layers
* Added Deprecation notices for old ElementwiseBinary layers.
Signed-off-by: Mike Kelly <mike.kelly@arm.com>
Change-Id: Iebbbaff38cc9c347b25eb2f9054c914a4f931c68
Diffstat (limited to 'src/armnn/test/SubgraphViewTests.cpp')
-rw-r--r-- | src/armnn/test/SubgraphViewTests.cpp | 45 |
1 files changed, 27 insertions, 18 deletions
diff --git a/src/armnn/test/SubgraphViewTests.cpp b/src/armnn/test/SubgraphViewTests.cpp index e0fd5fe7c1..4fcb476fcf 100644 --- a/src/armnn/test/SubgraphViewTests.cpp +++ b/src/armnn/test/SubgraphViewTests.cpp @@ -1054,7 +1054,7 @@ TEST_CASE("MultiInputSingleOutput") auto layerX2 = graph.AddLayer<InputLayer>(1, "layerX2"); auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1"); auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2"); - auto layerM3 = graph.AddLayer<AdditionLayer>("layerM3"); + auto layerM3 = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Add, "layerM3"); auto layerX3 = graph.AddLayer<OutputLayer>(0, "layerX3"); // X1 X2 @@ -1081,7 +1081,7 @@ TEST_CASE("MultiInputSingleOutput") [](const Layer & l) { bool toSelect = (l.GetType() == LayerType::Activation - || l.GetType() == LayerType::Addition); + || l.GetType() == LayerType::ElementwiseBinary); return toSelect; }); @@ -1772,7 +1772,7 @@ TEST_CASE("SubgraphCycles") auto m0 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m0"); auto x1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x1"); auto m1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m1"); - auto m2 = graph.AddLayer<AdditionLayer>("m2"); + auto m2 = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Add, "m2"); auto x2 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x2"); x0->GetOutputSlot(0).Connect(m0->GetInputSlot(0)); @@ -1872,7 +1872,7 @@ TEST_CASE("SubgraphViewWorkingCopy") bool ReplaceConstantMultiplicationWithDepthwise(SubgraphView& subgraph, IConnectableLayer* layer) { - if (layer->GetType() == LayerType::Multiplication) + if (layer->GetType() == LayerType::ElementwiseBinary) { IInputSlot* patternSubgraphInput = &layer->GetInputSlot(0); IInputSlot* patternSubgraphConstant = &layer->GetInputSlot(1); @@ -1937,12 +1937,12 @@ bool ReplaceConstantMultiplicationWithDepthwise(SubgraphView& subgraph, bool ReplaceTestMultiplication(SubgraphView& subgraph, IConnectableLayer* layer) { - if (layer->GetType() == LayerType::Multiplication) + if (layer->GetType() == LayerType::ElementwiseBinary) { switch (layer->GetType()) { - case LayerType::Multiplication: + case LayerType::ElementwiseBinary: return ReplaceConstantMultiplicationWithDepthwise(subgraph, layer); break; default: @@ -1993,7 +1993,7 @@ TEST_CASE("SubgraphViewWorkingCopyReplacementFunc") auto constant = graph.AddLayer<ConstantLayer>("const"); constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor); - IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul"); + IConnectableLayer* mul = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Mul, "mul"); IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output"); // Create connections between layers @@ -2015,7 +2015,10 @@ TEST_CASE("SubgraphViewWorkingCopyReplacementFunc") // Check the WorkingCopy is as expected before replacement CHECK(workingCopy.GetIConnectableLayers().size() == 4); int idx=0; - LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output}; + LayerType expectedSorted[] = {LayerType::Input, + LayerType::Constant, + LayerType::ElementwiseBinary, + LayerType::Output}; workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l) { CHECK((expectedSorted[idx] == l->GetType())); @@ -2209,7 +2212,7 @@ TEST_CASE("SubgraphViewWorkingCopyOptimizationViews") auto constant = graph.AddLayer<ConstantLayer>("const"); constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor); - IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul"); + IConnectableLayer* mul = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Mul, "mul"); IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output"); // Create connections between layers @@ -2230,7 +2233,10 @@ TEST_CASE("SubgraphViewWorkingCopyOptimizationViews") // Check the WorkingCopy is as expected before replacement int idx=0; - LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output}; + LayerType expectedSorted[] = {LayerType::Input, + LayerType::Constant, + LayerType::ElementwiseBinary, + LayerType::Output}; workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l) { CHECK((expectedSorted[idx] == l->GetType())); @@ -2285,7 +2291,7 @@ TEST_CASE("SubgraphViewWorkingCopyReplaceSlots") auto constant = graph.AddLayer<ConstantLayer>("const"); constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor); - IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul"); + IConnectableLayer* mul = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Mul, "mul"); IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output"); // Create connections between layers @@ -2306,7 +2312,10 @@ TEST_CASE("SubgraphViewWorkingCopyReplaceSlots") // Check the WorkingCopy is as expected before replacement CHECK(workingCopy.GetIConnectableLayers().size() == 4); int idx=0; - LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output}; + LayerType expectedSorted[] = {LayerType::Input, + LayerType::Constant, + LayerType::ElementwiseBinary, + LayerType::Output}; workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l) { CHECK((expectedSorted[idx] == l->GetType())); @@ -2346,7 +2355,7 @@ TEST_CASE("SubgraphViewWorkingCopyCloneInputAndOutputSlots") auto constant = graph.AddLayer<ConstantLayer>("const"); constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor); - IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul"); + IConnectableLayer* mul = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Mul, "mul"); armnn::ViewsDescriptor splitterDesc(2,4); IConnectableLayer* split = graph.AddLayer<SplitterLayer>(splitterDesc, "split"); IConnectableLayer* abs = graph.AddLayer<ActivationLayer>(ActivationFunction::Abs, "abs"); @@ -2411,7 +2420,7 @@ TEST_CASE("SubgraphViewWorkingCopyCloneInputAndOutputSlots") CHECK(workingCopy.GetIConnectableLayers().size() == 4); int idx=0; LayerType expectedSorted[] = {LayerType::Constant, - LayerType::Multiplication, + LayerType::ElementwiseBinary, LayerType::Splitter, LayerType::Activation}; workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l) @@ -2532,7 +2541,7 @@ TEST_CASE("MultipleInputMultipleOutputSlots_SubstituteGraph") Layer* convLayer = graph.AddLayer<Convolution2dLayer>(Convolution2dDescriptor(), "conv"); Layer* reluLayer = graph.AddLayer<ActivationLayer>(ActivationDescriptor(), "activation"); Layer* constLayer = graph.AddLayer<ConstantLayer>("const"); - Layer* addLayer = graph.AddLayer<AdditionLayer>("add"); + Layer* addLayer = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Add, "add"); Layer* outputLayer1 = graph.AddLayer<OutputLayer>(0, "output1"); Layer* outputLayer2 = graph.AddLayer<OutputLayer>(1, "output2"); @@ -2583,7 +2592,7 @@ TEST_CASE("MultipleInputMultipleOutputSlots_SubstituteGraph") // GetWorkingCopy() has caused address pointer of convolution layer to change. // Finding new address pointer... - if (layer->GetType() == LayerType::Addition) + if (layer->GetType() == LayerType::ElementwiseBinary) { addCopyLayer = layer; } @@ -2634,7 +2643,7 @@ TEST_CASE("MultipleInputMultipleOutputSlots_SubstituteGraphNewSlots") Layer* convLayer = graph.AddLayer<Convolution2dLayer>(Convolution2dDescriptor(), "conv"); Layer* reluLayer = graph.AddLayer<ActivationLayer>(ActivationDescriptor(), "activation"); Layer* constLayer = graph.AddLayer<ConstantLayer>("const"); - Layer* addLayer = graph.AddLayer<AdditionLayer>("add"); + Layer* addLayer = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Add, "add"); Layer* outputLayer1 = graph.AddLayer<OutputLayer>(0, "output1"); Layer* outputLayer2 = graph.AddLayer<OutputLayer>(1, "output2"); @@ -2660,7 +2669,7 @@ TEST_CASE("MultipleInputMultipleOutputSlots_SubstituteGraphNewSlots") { // GetWorkingCopy() has caused address pointer of convolution layer to change. // Finding new address pointer... - if (layer->GetType() == LayerType::Addition) + if (layer->GetType() == LayerType::ElementwiseBinary) { addCopyLayer = layer; } |