From 1a05aad6d5adf3b25848ffd873a0e0e82756aa06 Mon Sep 17 00:00:00 2001 From: Mike Kelly Date: Fri, 31 Mar 2023 18:00:00 +0100 Subject: Revert "IVGCVSW-3808 Deprecation notices for old ElementwiseBinary layers" This reverts commit 52e90bf59ecbe90d33368d8fc1fd120f07658aaf. Change-Id: I5a0d244593d8e760ee7ba0c9d38c02377e1bdc24 Signed-off-by: Mike Kelly --- .../backendsCommon/test/AdditionEndToEndTestImpl.hpp | 5 +++-- src/backends/backendsCommon/test/EndToEndTestImpl.hpp | 5 ++--- .../backendsCommon/test/IsLayerSupportedTestImpl.hpp | 12 ------------ src/backends/backendsCommon/test/OptimizationViewsTests.cpp | 2 -- .../backendsCommon/test/OptimizeSubgraphViewTests.cpp | 4 ---- 5 files changed, 5 insertions(+), 23 deletions(-) (limited to 'src/backends/backendsCommon/test') diff --git a/src/backends/backendsCommon/test/AdditionEndToEndTestImpl.hpp b/src/backends/backendsCommon/test/AdditionEndToEndTestImpl.hpp index a0d1af6ab7..f33521888f 100644 --- a/src/backends/backendsCommon/test/AdditionEndToEndTestImpl.hpp +++ b/src/backends/backendsCommon/test/AdditionEndToEndTestImpl.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2022 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // #pragma once @@ -30,7 +30,8 @@ armnn::INetworkPtr CreateAdditionNetwork(const armnn::TensorShape& inputXShape, TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset); - IConnectableLayer* addition = network->AddElementwiseBinaryLayer(BinaryOperation::Add, "addition"); + + IConnectableLayer* addition = network->AddAdditionLayer("addition"); IConnectableLayer* inputX = network->AddInputLayer(0, "inputX"); IConnectableLayer* inputY = network->AddInputLayer(1, "inputY"); IConnectableLayer* output = network->AddOutputLayer(0, "output"); diff --git a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp index 9213f0eac9..795fc13c32 100644 --- a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp +++ b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp @@ -39,7 +39,7 @@ bool ConstantUsageTest(const std::vector& computeDevice, IConnectableLayer* input = net->AddInputLayer(0); IConnectableLayer* constant = net->AddConstantLayer(ConstTensor(commonTensorInfo, constantData)); - IConnectableLayer* add = net->AddElementwiseBinaryLayer(BinaryOperation::Add); + IConnectableLayer* add = net->AddAdditionLayer(); IConnectableLayer* output = net->AddOutputLayer(0); input->GetOutputSlot(0).Connect(add->GetInputSlot(0)); @@ -176,8 +176,7 @@ void EndToEndLayerTestImpl(INetworkPtr network, for (unsigned int i = 0; i < out.size(); ++i) { CHECK_MESSAGE(Compare(it.second[i], out[i], tolerance) == true, - "Position: " << i <<" Actual output: " << static_cast(out[i]) << - ". Expected output:" << static_cast(it.second[i])); + "Actual output: " << out[i] << ". Expected output:" << it.second[i]); } } diff --git a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp index 5475762a53..5b95d3cd92 100644 --- a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp +++ b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp @@ -622,9 +622,7 @@ struct LayerTypePolicy; // Every entry in the armnn::LayerType enum must be accounted for below. DECLARE_LAYER_POLICY_2_PARAM(Activation) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Addition) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax) @@ -696,21 +694,15 @@ DECLARE_LAYER_POLICY_2_PARAM(Lstm) DECLARE_LAYER_POLICY_MAP_PARAM(Map, void) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Maximum) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_2_PARAM(Mean) DECLARE_LAYER_POLICY_1_PARAM(Merge) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Minimum) -ARMNN_NO_DEPRECATE_WARN_END -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Multiplication) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_2_PARAM(Normalization) @@ -734,9 +726,7 @@ DECLARE_LAYER_POLICY_2_PARAM(QLstm) DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Division) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_1_PARAM(Rank) @@ -762,9 +752,7 @@ DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn) DECLARE_LAYER_POLICY_2_PARAM(StridedSlice) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Subtraction) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_2_PARAM(Reduce) diff --git a/src/backends/backendsCommon/test/OptimizationViewsTests.cpp b/src/backends/backendsCommon/test/OptimizationViewsTests.cpp index 665358b9c6..ff3217911a 100644 --- a/src/backends/backendsCommon/test/OptimizationViewsTests.cpp +++ b/src/backends/backendsCommon/test/OptimizationViewsTests.cpp @@ -263,9 +263,7 @@ TEST_CASE("OptimizeViewsValidateDeviceMockBackend") armnn::IConnectableLayer* input = net->AddInputLayer(0, "inLayer0"); armnn::IConnectableLayer* input1 = net->AddInputLayer(1, "inLayer1"); - ARMNN_NO_DEPRECATE_WARN_BEGIN armnn::IConnectableLayer* addition = net->AddAdditionLayer("addLayer"); - ARMNN_NO_DEPRECATE_WARN_END armnn::IConnectableLayer* output = net->AddOutputLayer(0, "outLayer"); diff --git a/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp b/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp index 7303733e17..f5a6c4217b 100644 --- a/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp +++ b/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp @@ -133,7 +133,6 @@ Pooling2dLayer* AddPoolingLayer(Graph& graph, } // Convenience function to add an addition layer to a graph -ARMNN_NO_DEPRECATE_WARN_BEGIN AdditionLayer* AddAdditionaLayer(Graph& graph, LayerNameToLayerMap& layersInGraph, const std::string& layerName, @@ -145,7 +144,6 @@ AdditionLayer* AddAdditionaLayer(Graph& graph, layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer)); return additionLayer; } -ARMNN_NO_DEPRECATE_WARN_END // Convenience function to check that the given substitution matches the specified expected values void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution, @@ -752,9 +750,7 @@ SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, L "conv2 layer unoptimizable", outputInfo); Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor, "conv3 layer", outputInfo); - ARMNN_NO_DEPRECATE_WARN_BEGIN AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo); - ARMNN_NO_DEPRECATE_WARN_END Layer* const outputLayer = AddOutputLayer(graph, "output layer"); // Connect the network -- cgit v1.2.1