From f69ae5602f370b8f108618a8f01e39a9538d3651 Mon Sep 17 00:00:00 2001 From: Teresa Charlin Date: Thu, 27 Apr 2023 14:42:23 +0100 Subject: IVGCVSW-7589 IVGCVSW-7595 IVGCVSW-7593 ElementwiseUnary, Normalization and LogicalBinary operators for opaque delegate * Report the operator as part of the layer name for: - LogicalBinary, - ElementwiseUnary - Comparison - Activation * Fixing indentation in Gather.hpp * Removing not needed includes in Gather, GatherNd and Comparison * Correct end of namespace comment in Comparison * Correct log from TfLiteArmnnDelegate to TfLiteArmnnOpaqueDelegate Signed-off-by: Teresa Charlin Change-Id: Ia0d497709309e912d31eb4b6db0fef9e79b7a3af --- delegate/opaque/src/Activation.hpp | 53 +++++++++++++++++++++++++++++++++++--- 1 file changed, 50 insertions(+), 3 deletions(-) (limited to 'delegate/opaque/src/Activation.hpp') diff --git a/delegate/opaque/src/Activation.hpp b/delegate/opaque/src/Activation.hpp index a45bba95a9..9fce7a12e0 100644 --- a/delegate/opaque/src/Activation.hpp +++ b/delegate/opaque/src/Activation.hpp @@ -10,6 +10,53 @@ namespace armnnOpaqueDelegate { +std::string GetLayerName(armnn::ActivationFunction activationFunction) +{ + std::string layerName = "ACTIVATION"; + switch (activationFunction) + { + case armnn::ActivationFunction::Abs: + layerName += " ABS"; + break; + case armnn::ActivationFunction::BoundedReLu: + layerName += " BOUNDED_RELU"; + break; + case armnn::ActivationFunction::Elu: + layerName += " ELU"; + break; + case armnn::ActivationFunction::HardSwish: + layerName += " HARD_SWISH"; + break; + case armnn::ActivationFunction::LeakyReLu: + layerName += " LEAKY_RELU"; + break; + case armnn::ActivationFunction::Linear: + layerName += " LINEAR"; + break; + case armnn::ActivationFunction::ReLu: + layerName += " RELU"; + break; + case armnn::ActivationFunction::Sigmoid: + layerName += " SIGMOID"; + break; + case armnn::ActivationFunction::SoftReLu: + layerName += " SOFT_RELU"; + break; + case armnn::ActivationFunction::Square: + layerName += " SQUARE"; + break; + case armnn::ActivationFunction::Sqrt: + layerName += " SQRT"; + break; + case armnn::ActivationFunction::TanH: + layerName += " TANH"; + break; + default: + layerName += " UNKNOWN"; + } + return layerName; +} + TfLiteStatus ValidateActivationOperator(DelegateData& delegateData, TfLiteOpaqueContext* tfLiteContext, const armnn::TensorInfo& inputInfo, @@ -17,9 +64,9 @@ TfLiteStatus ValidateActivationOperator(DelegateData& delegateData, armnn::ActivationDescriptor& activationDesc) { bool isSupported = false; - auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported) + auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported, std::string layerName) { - FORWARD_LAYER_OPAQUE_SUPPORT_FUNC("ACTIVATION", + FORWARD_LAYER_OPAQUE_SUPPORT_FUNC(layerName.c_str(), tfLiteContext, IsActivationSupported, delegateData.m_Backends, @@ -30,7 +77,7 @@ TfLiteStatus ValidateActivationOperator(DelegateData& delegateData, activationDesc); }; - validateFunc(outputInfo, isSupported); + validateFunc(outputInfo, isSupported, GetLayerName(activationDesc.m_Function)); return isSupported ? kTfLiteOk : kTfLiteError; } -- cgit v1.2.1