From 1a05aad6d5adf3b25848ffd873a0e0e82756aa06 Mon Sep 17 00:00:00 2001 From: Mike Kelly Date: Fri, 31 Mar 2023 18:00:00 +0100 Subject: Revert "IVGCVSW-3808 Deprecation notices for old ElementwiseBinary layers" This reverts commit 52e90bf59ecbe90d33368d8fc1fd120f07658aaf. Change-Id: I5a0d244593d8e760ee7ba0c9d38c02377e1bdc24 Signed-off-by: Mike Kelly --- src/backends/aclCommon/ArmComputeSubgraphUtils.hpp | 8 ----- .../backendsCommon/WorkloadFactoryBase.hpp | 8 +---- .../test/AdditionEndToEndTestImpl.hpp | 5 +-- .../backendsCommon/test/EndToEndTestImpl.hpp | 5 ++- .../test/IsLayerSupportedTestImpl.hpp | 12 ------- .../backendsCommon/test/OptimizationViewsTests.cpp | 2 -- .../test/OptimizeSubgraphViewTests.cpp | 4 --- src/backends/cl/ClBackend.cpp | 8 ----- src/backends/cl/ClLayerSupport.cpp | 10 ------ src/backends/cl/ClLayerSupport.hpp | 10 ++---- src/backends/neon/NeonBackend.cpp | 8 ----- src/backends/neon/test/NeonCreateWorkloadTests.cpp | 18 +--------- .../reference/test/RefCreateWorkloadTests.cpp | 38 +--------------------- 13 files changed, 10 insertions(+), 126 deletions(-) (limited to 'src/backends') diff --git a/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp b/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp index fb7a4e1387..599d3538eb 100644 --- a/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp +++ b/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp @@ -77,10 +77,8 @@ LayerType* FuseAdditionLayer(OptimizationViews& optimizationViews, ActivationDescriptor& activationDesc, std::string name) { - ARMNN_NO_DEPRECATE_WARN_BEGIN IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddAdditionLayer(name.c_str()); LayerType* replacementLayer = PolymorphicDowncast(replacement); - ARMNN_NO_DEPRECATE_WARN_END FuseLayer(optimizationViews, baseLayer, @@ -98,10 +96,8 @@ LayerType* FuseSubtractionLayer(OptimizationViews& optimizationViews, ActivationDescriptor& activationDesc, std::string name) { - ARMNN_NO_DEPRECATE_WARN_BEGIN IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddSubtractionLayer(name.c_str()); LayerType* replacementLayer = PolymorphicDowncast(replacement); - ARMNN_NO_DEPRECATE_WARN_END FuseLayer(optimizationViews, baseLayer, @@ -119,10 +115,8 @@ LayerType* FuseDivisionLayer(OptimizationViews& optimizationViews, ActivationDescriptor& activationDesc, std::string name) { - ARMNN_NO_DEPRECATE_WARN_BEGIN IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddDivisionLayer(name.c_str()); LayerType* replacementLayer = PolymorphicDowncast(replacement); - ARMNN_NO_DEPRECATE_WARN_END FuseLayer(optimizationViews, baseLayer, @@ -140,10 +134,8 @@ LayerType* FuseMultiplicationLayer(OptimizationViews& optimizationViews, ActivationDescriptor& activationDesc, std::string name) { - ARMNN_NO_DEPRECATE_WARN_BEGIN IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddMultiplicationLayer(name.c_str()); LayerType* replacementLayer = PolymorphicDowncast(replacement); - ARMNN_NO_DEPRECATE_WARN_END FuseLayer(optimizationViews, baseLayer, diff --git a/src/backends/backendsCommon/WorkloadFactoryBase.hpp b/src/backends/backendsCommon/WorkloadFactoryBase.hpp index e793b44cf4..00e549c933 100644 --- a/src/backends/backendsCommon/WorkloadFactoryBase.hpp +++ b/src/backends/backendsCommon/WorkloadFactoryBase.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2019-2023 Arm Ltd. All rights reserved. +// Copyright © 2019 Arm Ltd. All rights reserved. // SPDX-License-Identifier: MIT // @@ -43,7 +43,6 @@ public: const WorkloadInfo& /*info*/) const override { return nullptr; } - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") std::unique_ptr CreateAddition(const AdditionQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const override { return nullptr; } @@ -104,7 +103,6 @@ public: const WorkloadInfo& /*info*/) const override { return nullptr; } - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") std::unique_ptr CreateDivision(const DivisionQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const override { return nullptr; } @@ -154,7 +152,6 @@ public: const WorkloadInfo& /*info*/) const override { return nullptr; } - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") std::unique_ptr CreateMaximum(const MaximumQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const override { return nullptr; } @@ -175,12 +172,10 @@ public: const WorkloadInfo& /*info*/) const override { return nullptr; } - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") std::unique_ptr CreateMinimum(const MinimumQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const override { return nullptr; } - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") std::unique_ptr CreateMultiplication(const MultiplicationQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const override { return nullptr; } @@ -253,7 +248,6 @@ public: const WorkloadInfo& /*info*/) const override { return nullptr; } - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") std::unique_ptr CreateSubtraction(const SubtractionQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const override { return nullptr; } diff --git a/src/backends/backendsCommon/test/AdditionEndToEndTestImpl.hpp b/src/backends/backendsCommon/test/AdditionEndToEndTestImpl.hpp index a0d1af6ab7..f33521888f 100644 --- a/src/backends/backendsCommon/test/AdditionEndToEndTestImpl.hpp +++ b/src/backends/backendsCommon/test/AdditionEndToEndTestImpl.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2022 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // #pragma once @@ -30,7 +30,8 @@ armnn::INetworkPtr CreateAdditionNetwork(const armnn::TensorShape& inputXShape, TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset); - IConnectableLayer* addition = network->AddElementwiseBinaryLayer(BinaryOperation::Add, "addition"); + + IConnectableLayer* addition = network->AddAdditionLayer("addition"); IConnectableLayer* inputX = network->AddInputLayer(0, "inputX"); IConnectableLayer* inputY = network->AddInputLayer(1, "inputY"); IConnectableLayer* output = network->AddOutputLayer(0, "output"); diff --git a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp index 9213f0eac9..795fc13c32 100644 --- a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp +++ b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp @@ -39,7 +39,7 @@ bool ConstantUsageTest(const std::vector& computeDevice, IConnectableLayer* input = net->AddInputLayer(0); IConnectableLayer* constant = net->AddConstantLayer(ConstTensor(commonTensorInfo, constantData)); - IConnectableLayer* add = net->AddElementwiseBinaryLayer(BinaryOperation::Add); + IConnectableLayer* add = net->AddAdditionLayer(); IConnectableLayer* output = net->AddOutputLayer(0); input->GetOutputSlot(0).Connect(add->GetInputSlot(0)); @@ -176,8 +176,7 @@ void EndToEndLayerTestImpl(INetworkPtr network, for (unsigned int i = 0; i < out.size(); ++i) { CHECK_MESSAGE(Compare(it.second[i], out[i], tolerance) == true, - "Position: " << i <<" Actual output: " << static_cast(out[i]) << - ". Expected output:" << static_cast(it.second[i])); + "Actual output: " << out[i] << ". Expected output:" << it.second[i]); } } diff --git a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp index 5475762a53..5b95d3cd92 100644 --- a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp +++ b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp @@ -622,9 +622,7 @@ struct LayerTypePolicy; // Every entry in the armnn::LayerType enum must be accounted for below. DECLARE_LAYER_POLICY_2_PARAM(Activation) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Addition) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax) @@ -696,21 +694,15 @@ DECLARE_LAYER_POLICY_2_PARAM(Lstm) DECLARE_LAYER_POLICY_MAP_PARAM(Map, void) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Maximum) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_2_PARAM(Mean) DECLARE_LAYER_POLICY_1_PARAM(Merge) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Minimum) -ARMNN_NO_DEPRECATE_WARN_END -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Multiplication) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_2_PARAM(Normalization) @@ -734,9 +726,7 @@ DECLARE_LAYER_POLICY_2_PARAM(QLstm) DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Division) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_1_PARAM(Rank) @@ -762,9 +752,7 @@ DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn) DECLARE_LAYER_POLICY_2_PARAM(StridedSlice) -ARMNN_NO_DEPRECATE_WARN_BEGIN DECLARE_LAYER_POLICY_1_PARAM(Subtraction) -ARMNN_NO_DEPRECATE_WARN_END DECLARE_LAYER_POLICY_2_PARAM(Reduce) diff --git a/src/backends/backendsCommon/test/OptimizationViewsTests.cpp b/src/backends/backendsCommon/test/OptimizationViewsTests.cpp index 665358b9c6..ff3217911a 100644 --- a/src/backends/backendsCommon/test/OptimizationViewsTests.cpp +++ b/src/backends/backendsCommon/test/OptimizationViewsTests.cpp @@ -263,9 +263,7 @@ TEST_CASE("OptimizeViewsValidateDeviceMockBackend") armnn::IConnectableLayer* input = net->AddInputLayer(0, "inLayer0"); armnn::IConnectableLayer* input1 = net->AddInputLayer(1, "inLayer1"); - ARMNN_NO_DEPRECATE_WARN_BEGIN armnn::IConnectableLayer* addition = net->AddAdditionLayer("addLayer"); - ARMNN_NO_DEPRECATE_WARN_END armnn::IConnectableLayer* output = net->AddOutputLayer(0, "outLayer"); diff --git a/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp b/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp index 7303733e17..f5a6c4217b 100644 --- a/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp +++ b/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp @@ -133,7 +133,6 @@ Pooling2dLayer* AddPoolingLayer(Graph& graph, } // Convenience function to add an addition layer to a graph -ARMNN_NO_DEPRECATE_WARN_BEGIN AdditionLayer* AddAdditionaLayer(Graph& graph, LayerNameToLayerMap& layersInGraph, const std::string& layerName, @@ -145,7 +144,6 @@ AdditionLayer* AddAdditionaLayer(Graph& graph, layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer)); return additionLayer; } -ARMNN_NO_DEPRECATE_WARN_END // Convenience function to check that the given substitution matches the specified expected values void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution, @@ -752,9 +750,7 @@ SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, L "conv2 layer unoptimizable", outputInfo); Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor, "conv3 layer", outputInfo); - ARMNN_NO_DEPRECATE_WARN_BEGIN AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo); - ARMNN_NO_DEPRECATE_WARN_END Layer* const outputLayer = AddOutputLayer(graph, "output layer"); // Connect the network diff --git a/src/backends/cl/ClBackend.cpp b/src/backends/cl/ClBackend.cpp index 46ba9cb717..a10b6fbb43 100644 --- a/src/backends/cl/ClBackend.cpp +++ b/src/backends/cl/ClBackend.cpp @@ -461,7 +461,6 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph, } else if (base.GetType() == LayerType::Addition) { - ARMNN_NO_DEPRECATE_WARN_BEGIN AdditionLayer* baseLayer = PolymorphicDowncast(&base); arm_compute::Status status = ClAdditionValidate( @@ -480,11 +479,9 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph, untouched.erase(baseLayer->GetGuid()); untouched.erase(activationLayer->GetGuid()); } - ARMNN_NO_DEPRECATE_WARN_END } else if (base.GetType() == LayerType::Division) { - ARMNN_NO_DEPRECATE_WARN_BEGIN DivisionLayer* baseLayer = PolymorphicDowncast(&base); arm_compute::Status status = ClDivisionWorkloadValidate( @@ -503,11 +500,9 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph, untouched.erase(baseLayer->GetGuid()); untouched.erase(activationLayer->GetGuid()); } - ARMNN_NO_DEPRECATE_WARN_END } else if (base.GetType() == LayerType::Multiplication) { - ARMNN_NO_DEPRECATE_WARN_BEGIN MultiplicationLayer* baseLayer = PolymorphicDowncast(&base); arm_compute::Status status = ClMultiplicationWorkloadValidate( @@ -526,11 +521,9 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph, untouched.erase(baseLayer->GetGuid()); untouched.erase(activationLayer->GetGuid()); } - ARMNN_NO_DEPRECATE_WARN_END } else if (base.GetType() == LayerType::Subtraction) { - ARMNN_NO_DEPRECATE_WARN_BEGIN SubtractionLayer* baseLayer = PolymorphicDowncast(&base); arm_compute::Status status = ClSubtractionValidate( @@ -549,7 +542,6 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph, untouched.erase(baseLayer->GetGuid()); untouched.erase(activationLayer->GetGuid()); } - ARMNN_NO_DEPRECATE_WARN_END } else if (base.GetType() == LayerType::ElementwiseBinary) { diff --git a/src/backends/cl/ClLayerSupport.cpp b/src/backends/cl/ClLayerSupport.cpp index e1266c8299..89bcf9bc01 100644 --- a/src/backends/cl/ClLayerSupport.cpp +++ b/src/backends/cl/ClLayerSupport.cpp @@ -346,9 +346,7 @@ bool ClLayerSupport::IsLayerSupported(const LayerType& type, case LayerType::Dequantize: return IsDequantizeSupported(infos[0], infos[1], reasonIfUnsupported); case LayerType::Division: - ARMNN_NO_DEPRECATE_WARN_BEGIN return IsDivisionSupported(infos[0], infos[1], infos[2], reasonIfUnsupported); - ARMNN_NO_DEPRECATE_WARN_END case LayerType::ElementwiseBinary: { auto desc = *(PolymorphicDowncast(&descriptor)); @@ -476,22 +474,16 @@ bool ClLayerSupport::IsLayerSupported(const LayerType& type, infos[2], reasonIfUnsupported); case LayerType::Maximum: - ARMNN_NO_DEPRECATE_WARN_BEGIN return IsMaximumSupported(infos[0], infos[1], infos[2], reasonIfUnsupported); - ARMNN_NO_DEPRECATE_WARN_END case LayerType::Mean: return IsMeanSupported(infos[0], infos[1], *(PolymorphicDowncast(&descriptor)), reasonIfUnsupported); case LayerType::Minimum: - ARMNN_NO_DEPRECATE_WARN_BEGIN return IsMinimumSupported(infos[0], infos[1], infos[2], reasonIfUnsupported); - ARMNN_NO_DEPRECATE_WARN_END case LayerType::Multiplication: - ARMNN_NO_DEPRECATE_WARN_BEGIN return IsMultiplicationSupported(infos[0], infos[1], infos[2], reasonIfUnsupported); - ARMNN_NO_DEPRECATE_WARN_END case LayerType::Normalization: return IsNormalizationSupported(infos[0], infos[1], @@ -612,9 +604,7 @@ bool ClLayerSupport::IsLayerSupported(const LayerType& type, *(PolymorphicDowncast(&descriptor)), reasonIfUnsupported); case LayerType::Subtraction: - ARMNN_NO_DEPRECATE_WARN_BEGIN return IsSubtractionSupported(infos[0], infos[1], infos[2], reasonIfUnsupported); - ARMNN_NO_DEPRECATE_WARN_END case LayerType::Transpose: return IsTransposeSupported(infos[0], infos[1], diff --git a/src/backends/cl/ClLayerSupport.hpp b/src/backends/cl/ClLayerSupport.hpp index fa28141dcb..2d784e3df8 100644 --- a/src/backends/cl/ClLayerSupport.hpp +++ b/src/backends/cl/ClLayerSupport.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2017 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // #pragma once @@ -30,7 +30,6 @@ public: const ActivationDescriptor& descriptor, Optional reasonIfUnsupported = EmptyOptional()) const override; - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") bool IsAdditionSupported(const TensorInfo& input0, const TensorInfo& input1, const TensorInfo& output, @@ -129,14 +128,13 @@ public: const Optional& biases, Optional reason = EmptyOptional()) const override; - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") bool IsDivisionSupported(const TensorInfo& input0, const TensorInfo& input1, const TensorInfo& output, Optional reasonIfUnsupported = EmptyOptional()) const override; bool IsElementwiseUnarySupported(const TensorInfo& input, - const TensorInfo& output, + const TensorInfo& ouput, const ElementwiseUnaryDescriptor& descriptor, Optional reasonIfUnsupported = EmptyOptional()) const override; @@ -202,7 +200,6 @@ public: const LstmInputParamsInfo& paramsInfo, Optional reasonIfUnsupported = EmptyOptional()) const override; - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") bool IsMaximumSupported(const TensorInfo& input0, const TensorInfo& input1, const TensorInfo& output, @@ -213,13 +210,11 @@ public: const MeanDescriptor& descriptor, Optional reasonIfUnsupported = EmptyOptional()) const override; - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") bool IsMinimumSupported(const TensorInfo& input0, const TensorInfo& input1, const TensorInfo& output, Optional reasonIfUnsupported = EmptyOptional()) const override; - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") bool IsMultiplicationSupported(const TensorInfo& input0, const TensorInfo& input1, const TensorInfo& output, @@ -330,7 +325,6 @@ public: const StridedSliceDescriptor& descriptor, Optional reasonIfUnsupported = EmptyOptional()) const override; - ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Use CreateElementwiseBinary instead", "24.02") bool IsSubtractionSupported(const TensorInfo& input0, const TensorInfo& input1, const TensorInfo& output, diff --git a/src/backends/neon/NeonBackend.cpp b/src/backends/neon/NeonBackend.cpp index c68f4ce95b..cea2aa3eba 100644 --- a/src/backends/neon/NeonBackend.cpp +++ b/src/backends/neon/NeonBackend.cpp @@ -313,7 +313,6 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph } else if (base.GetType() == LayerType::Addition) { - ARMNN_NO_DEPRECATE_WARN_BEGIN AdditionLayer* baseLayer = PolymorphicDowncast(&base); arm_compute::Status status = NeonAdditionWorkloadValidate( @@ -332,11 +331,9 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph untouched.erase(baseLayer->GetGuid()); untouched.erase(activationLayer->GetGuid()); } - ARMNN_NO_DEPRECATE_WARN_END } else if (base.GetType() == LayerType::Division) { - ARMNN_NO_DEPRECATE_WARN_BEGIN DivisionLayer* baseLayer = PolymorphicDowncast(&base); arm_compute::Status status = NeonDivisionWorkloadValidate( @@ -355,11 +352,9 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph untouched.erase(baseLayer->GetGuid()); untouched.erase(activationLayer->GetGuid()); } - ARMNN_NO_DEPRECATE_WARN_END } else if (base.GetType() == LayerType::Multiplication) { - ARMNN_NO_DEPRECATE_WARN_BEGIN MultiplicationLayer* baseLayer = PolymorphicDowncast(&base); arm_compute::Status status = NeonMultiplicationWorkloadValidate( @@ -378,11 +373,9 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph untouched.erase(baseLayer->GetGuid()); untouched.erase(activationLayer->GetGuid()); } - ARMNN_NO_DEPRECATE_WARN_END } else if (base.GetType() == LayerType::Subtraction) { - ARMNN_NO_DEPRECATE_WARN_BEGIN SubtractionLayer* baseLayer = PolymorphicDowncast(&base); arm_compute::Status status = NeonSubtractionWorkloadValidate( @@ -401,7 +394,6 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph untouched.erase(baseLayer->GetGuid()); untouched.erase(activationLayer->GetGuid()); } - ARMNN_NO_DEPRECATE_WARN_END } else if (base.GetType() == LayerType::ElementwiseBinary) { diff --git a/src/backends/neon/test/NeonCreateWorkloadTests.cpp b/src/backends/neon/test/NeonCreateWorkloadTests.cpp index 19881c26a0..66718cc481 100644 --- a/src/backends/neon/test/NeonCreateWorkloadTests.cpp +++ b/src/backends/neon/test/NeonCreateWorkloadTests.cpp @@ -1,5 +1,5 @@ // -// Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2017 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -128,86 +128,70 @@ TEST_CASE("CreateAdditionFloat16Workload") TEST_CASE("CreateAdditionFloatWorkload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN NeonCreateElementwiseWorkloadTest(); - ARMNN_NO_DEPRECATE_WARN_END } #ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC TEST_CASE("CreateSubtractionFloat16Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN NeonCreateElementwiseWorkloadTest(); - ARMNN_NO_DEPRECATE_WARN_END } #endif TEST_CASE("CreateSubtractionFloatWorkload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN NeonCreateElementwiseWorkloadTest(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateSubtractionUint8Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN NeonCreateElementwiseWorkloadTest(); - ARMNN_NO_DEPRECATE_WARN_END } #ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC TEST_CASE("CreateMultiplicationFloat16Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN NeonCreateElementwiseWorkloadTest(); - ARMNN_NO_DEPRECATE_WARN_END } #endif TEST_CASE("CreateMultiplicationFloatWorkload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN NeonCreateElementwiseWorkloadTest(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateMultiplicationUint8Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN NeonCreateElementwiseWorkloadTest(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateDivisionFloatWorkloadTest") { - ARMNN_NO_DEPRECATE_WARN_BEGIN NeonCreateElementwiseWorkloadTest(); - ARMNN_NO_DEPRECATE_WARN_END } template diff --git a/src/backends/reference/test/RefCreateWorkloadTests.cpp b/src/backends/reference/test/RefCreateWorkloadTests.cpp index 3bba0b7393..c46a9e5bac 100644 --- a/src/backends/reference/test/RefCreateWorkloadTests.cpp +++ b/src/backends/reference/test/RefCreateWorkloadTests.cpp @@ -1,5 +1,5 @@ // -// Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2017 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -142,182 +142,146 @@ TEST_CASE("CreateMultiplicationWorkloadWithBlobTest") TEST_CASE("CreateAdditionFloatWorkload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, AdditionQueueDescriptor, AdditionLayer, armnn::DataType::Float32>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateAdditionUint8Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, AdditionQueueDescriptor, AdditionLayer, armnn::DataType::QAsymmU8>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateAdditionInt16Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, AdditionQueueDescriptor, AdditionLayer, armnn::DataType::QSymmS16>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateAdditionInt32Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, AdditionQueueDescriptor, AdditionLayer, armnn::DataType::Signed32>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateSubtractionFloat32Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, SubtractionQueueDescriptor, SubtractionLayer, armnn::DataType::Float32>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateSubtractionFloat16Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, SubtractionQueueDescriptor, SubtractionLayer, armnn::DataType::Float16>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateSubtractionUint8Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, SubtractionQueueDescriptor, SubtractionLayer, armnn::DataType::QAsymmU8>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateSubtractionInt16Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, SubtractionQueueDescriptor, SubtractionLayer, armnn::DataType::QSymmS16>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateSubtractionInt32Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, SubtractionQueueDescriptor, SubtractionLayer, armnn::DataType::Signed32>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateMultiplicationFloatWorkload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, MultiplicationQueueDescriptor, MultiplicationLayer, armnn::DataType::Float32>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateMultiplicationUint8Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, MultiplicationQueueDescriptor, MultiplicationLayer, armnn::DataType::QAsymmU8>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateMultiplicationInt16Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, MultiplicationQueueDescriptor, MultiplicationLayer, armnn::DataType::QSymmS16>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateMultiplicationInt32Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, MultiplicationQueueDescriptor, MultiplicationLayer, armnn::DataType::Signed32>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateDivisionFloat32Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, DivisionQueueDescriptor, DivisionLayer, armnn::DataType::Float32>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateDivisionFloat16Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, DivisionQueueDescriptor, DivisionLayer, armnn::DataType::Float16>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateDivisionUint8Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, DivisionQueueDescriptor, DivisionLayer, armnn::DataType::QAsymmU8>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateDivisionInt16Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, DivisionQueueDescriptor, DivisionLayer, armnn::DataType::QSymmS16>(); - ARMNN_NO_DEPRECATE_WARN_END } TEST_CASE("CreateDivisionInt32Workload") { - ARMNN_NO_DEPRECATE_WARN_BEGIN RefCreateElementwiseWorkloadTest, DivisionQueueDescriptor, DivisionLayer, armnn::DataType::Signed32>(); - ARMNN_NO_DEPRECATE_WARN_END } template -- cgit v1.2.1