From 0629eb8198e705436a6c514751ba51a2568c7f8b Mon Sep 17 00:00:00 2001 From: Narumol Prangnawarat Date: Thu, 12 Nov 2020 18:27:37 +0000 Subject: IVGCVSW-5090 Add android-nn-driver Support for Logical operators * Add ConvertLogicalBinary * Add support for LOGICAL_AND, LOGICAL_NOT, LOGICAL_OR Signed-off-by: Narumol Prangnawarat Change-Id: I4dcecbebe031915b8c237fd5b9b92c8844847fb7 --- ConversionUtils_1_3.hpp | 73 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) (limited to 'ConversionUtils_1_3.hpp') diff --git a/ConversionUtils_1_3.hpp b/ConversionUtils_1_3.hpp index a7f00fc3..150735e9 100644 --- a/ConversionUtils_1_3.hpp +++ b/ConversionUtils_1_3.hpp @@ -150,6 +150,79 @@ bool ConvertFill(const HalOperation& operation, const HalModel& model, Conversio return SetupAndTrackLayerOutputSlot(operation, 0, *layer, model, data); } +template +bool ConvertLogicalBinary(const HalOperation& operation, + const HalModel& model, + ConversionData& data, + LogicalBinaryOperation logicalOperation) +{ + using HalOperand = typename HalPolicy::Operand; + + ALOGV("HalPolicy::ConvertLogicalBinary()"); + ALOGV("logicalOperation = %s", GetLogicalBinaryOperationAsCString(logicalOperation)); + + LayerInputHandle input0 = ConvertToLayerInputHandle(operation, 0, model, data); + LayerInputHandle input1 = ConvertToLayerInputHandle(operation, 1, model, data); + + if (!(input0.IsValid() && input1.IsValid())) + { + return Fail("%s: Operation has invalid inputs", __func__); + } + + const HalOperand* output = GetOutputOperand(operation, 0, model); + if (!output) + { + return Fail("%s: Could not read output 0", __func__); + } + + const TensorInfo& inputInfo0 = input0.GetTensorInfo(); + const TensorInfo& inputInfo1 = input1.GetTensorInfo(); + const TensorInfo& outputInfo = GetTensorInfoForOperand(*output); + + LogicalBinaryDescriptor descriptor(logicalOperation); + + bool isSupported = false; + + auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported) + { + FORWARD_LAYER_SUPPORT_FUNC(__func__, + IsLogicalBinarySupported, + data.m_Backends, + isSupported, + inputInfo0, + inputInfo1, + outputInfo, + descriptor); + }; + + if(!IsDynamicTensor(outputInfo)) + { + validateFunc(outputInfo, isSupported); + } + else + { + isSupported = AreDynamicTensorsSupported(); + } + + if (!isSupported) + { + return false; + } + + IConnectableLayer* layer = data.m_Network->AddLogicalBinaryLayer(descriptor); + assert(layer != nullptr); + + bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data); + if (!isReshapeSupported) + { + return false; + } + + return SetupAndTrackLayerOutputSlot(operation, 0, *layer, model, data, nullptr, validateFunc); +} + template -- cgit v1.2.1