aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNarumol Prangnawarat <narpra01@e125232.sheffield.arm.com>2020-11-12 18:27:37 +0000
committerNarumol Prangnawarat <narumol.prangnawarat@arm.com>2020-11-12 19:39:59 +0000
commit0629eb8198e705436a6c514751ba51a2568c7f8b (patch)
tree8c1b66e005df149fdb068466babc8ecd552041ef
parentd7fdbe2306b9ebaba0aa625d8be14114f8631054 (diff)
downloadandroid-nn-driver-0629eb8198e705436a6c514751ba51a2568c7f8b.tar.gz
IVGCVSW-5090 Add android-nn-driver Support for Logical operators
* Add ConvertLogicalBinary * Add support for LOGICAL_AND, LOGICAL_NOT, LOGICAL_OR Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com> Change-Id: I4dcecbebe031915b8c237fd5b9b92c8844847fb7
-rw-r--r--1.3/HalPolicy.cpp15
-rw-r--r--1.3/HalPolicy.hpp5
-rw-r--r--ConversionUtils_1_3.hpp73
-rw-r--r--NnapiSupport.txt3
4 files changed, 96 insertions, 0 deletions
diff --git a/1.3/HalPolicy.cpp b/1.3/HalPolicy.cpp
index 37deec3d..5e456b8a 100644
--- a/1.3/HalPolicy.cpp
+++ b/1.3/HalPolicy.cpp
@@ -81,6 +81,12 @@ bool HalPolicy::ConvertOperation(const Operation& operation, const Model& model,
return ConvertComparison(operation, model, data, ComparisonOperation::LessOrEqual);
case V1_3::OperationType::LOCAL_RESPONSE_NORMALIZATION:
return ConvertLocalResponseNormalization(operation, model, data);
+ case V1_3::OperationType::LOGICAL_AND:
+ return ConvertLogicalBinary(operation, model, data, LogicalBinaryOperation::LogicalAnd);
+ case V1_3::OperationType::LOGICAL_NOT:
+ return ConvertElementwiseUnary(operation, model, data, UnaryOperation::LogicalNot);
+ case V1_3::OperationType::LOGICAL_OR:
+ return ConvertLogicalBinary(operation, model, data, LogicalBinaryOperation::LogicalOr);
case V1_3::OperationType::LOGISTIC:
return ConvertLogistic(operation, model, data);
case V1_3::OperationType::LOG_SOFTMAX:
@@ -314,6 +320,15 @@ bool HalPolicy::ConvertLocalResponseNormalization(const Operation& operation,
return ::ConvertLocalResponseNormalization<hal_1_3::HalPolicy>(operation, model, data);
}
+bool HalPolicy::ConvertLogicalBinary(const Operation& operation,
+ const Model& model,
+ ConversionData& data,
+ armnn::LogicalBinaryOperation logicalOperation)
+{
+ ALOGV("hal_1_3::HalPolicy::ConvertLogicalBinary()");
+ return ::ConvertLogicalBinary<hal_1_3::HalPolicy>(operation, model, data, logicalOperation);
+}
+
bool HalPolicy::ConvertLogistic(const Operation& operation, const Model& model, ConversionData& data)
{
ALOGV("hal_1_3::HalPolicy::ConvertLogistic()");
diff --git a/1.3/HalPolicy.hpp b/1.3/HalPolicy.hpp
index 0eb5f4d7..f82a5ef7 100644
--- a/1.3/HalPolicy.hpp
+++ b/1.3/HalPolicy.hpp
@@ -95,6 +95,11 @@ private:
const Model& model,
ConversionData& data);
+ static bool ConvertLogicalBinary(const Operation& operation,
+ const Model& model,
+ ConversionData& data,
+ armnn::LogicalBinaryOperation logicalOperation);
+
static bool ConvertLogistic(const Operation& operation, const Model& model, ConversionData& data);
static bool ConvertLogSoftmax(const Operation& operation, const Model& model, ConversionData& data);
diff --git a/ConversionUtils_1_3.hpp b/ConversionUtils_1_3.hpp
index a7f00fc3..150735e9 100644
--- a/ConversionUtils_1_3.hpp
+++ b/ConversionUtils_1_3.hpp
@@ -153,6 +153,79 @@ bool ConvertFill(const HalOperation& operation, const HalModel& model, Conversio
template<typename HalPolicy,
typename HalOperation = typename HalPolicy::Operation,
typename HalModel = typename HalPolicy::Model>
+bool ConvertLogicalBinary(const HalOperation& operation,
+ const HalModel& model,
+ ConversionData& data,
+ LogicalBinaryOperation logicalOperation)
+{
+ using HalOperand = typename HalPolicy::Operand;
+
+ ALOGV("HalPolicy::ConvertLogicalBinary()");
+ ALOGV("logicalOperation = %s", GetLogicalBinaryOperationAsCString(logicalOperation));
+
+ LayerInputHandle input0 = ConvertToLayerInputHandle<HalPolicy>(operation, 0, model, data);
+ LayerInputHandle input1 = ConvertToLayerInputHandle<HalPolicy>(operation, 1, model, data);
+
+ if (!(input0.IsValid() && input1.IsValid()))
+ {
+ return Fail("%s: Operation has invalid inputs", __func__);
+ }
+
+ const HalOperand* output = GetOutputOperand<HalPolicy>(operation, 0, model);
+ if (!output)
+ {
+ return Fail("%s: Could not read output 0", __func__);
+ }
+
+ const TensorInfo& inputInfo0 = input0.GetTensorInfo();
+ const TensorInfo& inputInfo1 = input1.GetTensorInfo();
+ const TensorInfo& outputInfo = GetTensorInfoForOperand(*output);
+
+ LogicalBinaryDescriptor descriptor(logicalOperation);
+
+ bool isSupported = false;
+
+ auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported)
+ {
+ FORWARD_LAYER_SUPPORT_FUNC(__func__,
+ IsLogicalBinarySupported,
+ data.m_Backends,
+ isSupported,
+ inputInfo0,
+ inputInfo1,
+ outputInfo,
+ descriptor);
+ };
+
+ if(!IsDynamicTensor(outputInfo))
+ {
+ validateFunc(outputInfo, isSupported);
+ }
+ else
+ {
+ isSupported = AreDynamicTensorsSupported();
+ }
+
+ if (!isSupported)
+ {
+ return false;
+ }
+
+ IConnectableLayer* layer = data.m_Network->AddLogicalBinaryLayer(descriptor);
+ assert(layer != nullptr);
+
+ bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+ if (!isReshapeSupported)
+ {
+ return false;
+ }
+
+ return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, model, data, nullptr, validateFunc);
+}
+
+template<typename HalPolicy,
+ typename HalOperation = typename HalPolicy::Operation,
+ typename HalModel = typename HalPolicy::Model>
bool ConvertQuantizedLstm(const HalOperation& operation, const HalModel& model, ConversionData& data)
{
using HalOperand = typename HalPolicy::Operand;
diff --git a/NnapiSupport.txt b/NnapiSupport.txt
index 2b6eaca0..e0400e1f 100644
--- a/NnapiSupport.txt
+++ b/NnapiSupport.txt
@@ -43,6 +43,9 @@ L2_POOL_2D (FLOAT32, QUANT8_ASYMM)
LESS (FLOAT32, INT32, QUANT8_ASYMM, QUANT8_ASYMM_SIGNED)
LESS_EQUAL (FLOAT32, INT32, QUANT8_ASYMM, QUANT8_ASYMM_SIGNED)
LOCAL_RESPONSE_NORMALIZATION (FLOAT32)
+LOGICAL_AND (BOOL8)
+LOGICAL_NOT (BOOL8)
+LOGICAL_OR (BOOL8)
LOGISTIC (FLOAT32, QUANT8_ASYMM, QUANT8_ASYMM_SIGNED)
LOG_SOFTMAX (FLOAT32)
LSTM (FLOAT32)