diff options
author | Teresa Charlin <teresa.charlinreyes@arm.com> | 2021-12-03 16:07:42 +0000 |
---|---|---|
committer | Mike Kelly <mike.kelly@arm.com> | 2023-03-14 18:30:34 +0000 |
commit | ee5872d95455351458ad4373176360200594daa1 (patch) | |
tree | 63207fa1e397e1c3cf87072d93c490a234935a8c /ConversionUtils_1_2.hpp | |
parent | de547168f108ec1494f18b3ab1ea50bd09f370c1 (diff) | |
download | android-nn-driver-ee5872d95455351458ad4373176360200594daa1.tar.gz |
IVGCVSW-3809 Refactor Elementwise Binary ops to use ElementwiseBinaryLayer
!armnn:9319
* Refactored all functions to convert Add, Div, Maximum, Minimum, Mul
and Sub to use ElementwiseBinary layers instead.
Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com>
Signed-off-by: Mike Kelly <mike.kelly@arm.com>
Change-Id: Ic05885cd8692e7f1b7032862fb4a395af70e0bcd
Signed-off-by: Mike Kelly <mike.kelly@arm.com>
Change-Id: If9717d6ab236d97c76f6cd39b96bde86c81e4382
Diffstat (limited to 'ConversionUtils_1_2.hpp')
-rw-r--r-- | ConversionUtils_1_2.hpp | 138 |
1 files changed, 1 insertions, 137 deletions
diff --git a/ConversionUtils_1_2.hpp b/ConversionUtils_1_2.hpp index ce6be440..2ad14c2f 100644 --- a/ConversionUtils_1_2.hpp +++ b/ConversionUtils_1_2.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2020,2022 Arm Ltd and Contributors. All rights reserved. +// Copyright © 2020-2023 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -1555,142 +1555,6 @@ bool ConvertLogSoftmax(const HalOperation& operation, const HalModel& model, Con template<typename HalPolicy, typename HalOperation = typename HalPolicy::Operation, typename HalModel = typename HalPolicy::Model> -bool ConvertMaximum(const HalOperation& operation, const HalModel& model, ConversionData& data) -{ - using HalOperand = typename HalPolicy::Operand; - - ALOGV("HalPolicy::ConvertMaximum()"); - - LayerInputHandle input0 = ConvertToLayerInputHandle<HalPolicy>(operation, 0, model, data); - LayerInputHandle input1 = ConvertToLayerInputHandle<HalPolicy>(operation, 1, model, data); - - if (!input0.IsValid() || !input1.IsValid()) - { - return Fail("%s: Operation has invalid inputs", __func__); - } - - const HalOperand* outputOperand = GetOutputOperand<HalPolicy>(operation, 0, model); - if (!outputOperand) - { - return Fail("%s: Could not read output", __func__); - } - - const TensorInfo& outInfo = GetTensorInfoForOperand(*outputOperand); - - bool isSupported = false; - armnn::BackendId setBackend; - auto validateFunc = [&](const armnn::TensorInfo& outInfo, bool& isSupported) - { - FORWARD_LAYER_SUPPORT_FUNC(__func__, - IsMaximumSupported, - data.m_Backends, - isSupported, - setBackend, - input0.GetTensorInfo(), - input1.GetTensorInfo(), - outInfo); - }; - - if(IsDynamicTensor(outInfo)) - { - isSupported = AreDynamicTensorsSupported(); - } - else - { - validateFunc(outInfo, isSupported); - } - - if (!isSupported) - { - return false; - } - - IConnectableLayer* layer = data.m_Network->AddMaximumLayer(); - layer->SetBackendId(setBackend); - if (!layer) - { - return Fail("%s: Could not add the MaximumLayer", __func__); - } - bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data); - if (!isReshapeSupported) - { - return false; - } - - return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, model, data, nullptr, validateFunc); -} - -template<typename HalPolicy, - typename HalOperation = typename HalPolicy::Operation, - typename HalModel = typename HalPolicy::Model> -bool ConvertMinimum(const HalOperation& operation, const HalModel& model, ConversionData& data) -{ - using HalOperand = typename HalPolicy::Operand; - - ALOGV("HalPolicy::ConvertMinimum()"); - - LayerInputHandle input0 = ConvertToLayerInputHandle<HalPolicy>(operation, 0, model, data); - LayerInputHandle input1 = ConvertToLayerInputHandle<HalPolicy>(operation, 1, model, data); - - if (!input0.IsValid() || !input1.IsValid()) - { - return Fail("%s: Operation has invalid inputs", __func__); - } - - const HalOperand* output = GetOutputOperand<HalPolicy>(operation, 0, model); - if (!output) - { - return Fail("%s: Could not read output 0", __func__); - } - - const TensorInfo& outputInfo = GetTensorInfoForOperand(*output); - - bool isSupported = false; - armnn::BackendId setBackend; - auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported) - { - FORWARD_LAYER_SUPPORT_FUNC(__func__, - IsMinimumSupported, - data.m_Backends, - isSupported, - setBackend, - input0.GetTensorInfo(), - input1.GetTensorInfo(), - outputInfo); - }; - - if(IsDynamicTensor(outputInfo)) - { - isSupported = AreDynamicTensorsSupported(); - } - else - { - validateFunc(outputInfo, isSupported); - } - - if (!isSupported) - { - return false; - } - - IConnectableLayer* const layer = data.m_Network->AddMinimumLayer(); - layer->SetBackendId(setBackend); - if (!layer) - { - return Fail("%s: Could not add the MinimumLayer", __func__); - } - bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data); - if (!isReshapeSupported) - { - return false; - } - - return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, model, data, nullptr, validateFunc); -} - -template<typename HalPolicy, - typename HalOperation = typename HalPolicy::Operation, - typename HalModel = typename HalPolicy::Model> bool ConvertPadV2(const HalOperation& operation, const HalModel& model, ConversionData& data) { using HalOperand = typename HalPolicy::Operand; |