diff options
author | Teresa Charlin <teresa.charlinreyes@arm.com> | 2023-04-27 14:42:23 +0100 |
---|---|---|
committer | Teresa Charlin <teresa.charlinreyes@arm.com> | 2023-04-27 15:24:11 +0100 |
commit | f69ae5602f370b8f108618a8f01e39a9538d3651 (patch) | |
tree | 803e756259b8ad277fdcded79f381806994492ac /delegate/opaque/src/Activation.hpp | |
parent | 0cc93ab9e7cee6a0fc43f73c3520d3579464ce72 (diff) | |
download | armnn-f69ae5602f370b8f108618a8f01e39a9538d3651.tar.gz |
IVGCVSW-7589 IVGCVSW-7595 IVGCVSW-7593 ElementwiseUnary, Normalization and LogicalBinary operators for opaque delegate
* Report the operator as part of the layer name for:
- LogicalBinary,
- ElementwiseUnary
- Comparison
- Activation
* Fixing indentation in Gather.hpp
* Removing not needed includes in Gather, GatherNd and Comparison
* Correct end of namespace comment in Comparison
* Correct log from TfLiteArmnnDelegate to TfLiteArmnnOpaqueDelegate
Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com>
Change-Id: Ia0d497709309e912d31eb4b6db0fef9e79b7a3af
Diffstat (limited to 'delegate/opaque/src/Activation.hpp')
-rw-r--r-- | delegate/opaque/src/Activation.hpp | 53 |
1 files changed, 50 insertions, 3 deletions
diff --git a/delegate/opaque/src/Activation.hpp b/delegate/opaque/src/Activation.hpp index a45bba95a9..9fce7a12e0 100644 --- a/delegate/opaque/src/Activation.hpp +++ b/delegate/opaque/src/Activation.hpp @@ -10,6 +10,53 @@ namespace armnnOpaqueDelegate { +std::string GetLayerName(armnn::ActivationFunction activationFunction) +{ + std::string layerName = "ACTIVATION"; + switch (activationFunction) + { + case armnn::ActivationFunction::Abs: + layerName += " ABS"; + break; + case armnn::ActivationFunction::BoundedReLu: + layerName += " BOUNDED_RELU"; + break; + case armnn::ActivationFunction::Elu: + layerName += " ELU"; + break; + case armnn::ActivationFunction::HardSwish: + layerName += " HARD_SWISH"; + break; + case armnn::ActivationFunction::LeakyReLu: + layerName += " LEAKY_RELU"; + break; + case armnn::ActivationFunction::Linear: + layerName += " LINEAR"; + break; + case armnn::ActivationFunction::ReLu: + layerName += " RELU"; + break; + case armnn::ActivationFunction::Sigmoid: + layerName += " SIGMOID"; + break; + case armnn::ActivationFunction::SoftReLu: + layerName += " SOFT_RELU"; + break; + case armnn::ActivationFunction::Square: + layerName += " SQUARE"; + break; + case armnn::ActivationFunction::Sqrt: + layerName += " SQRT"; + break; + case armnn::ActivationFunction::TanH: + layerName += " TANH"; + break; + default: + layerName += " UNKNOWN"; + } + return layerName; +} + TfLiteStatus ValidateActivationOperator(DelegateData& delegateData, TfLiteOpaqueContext* tfLiteContext, const armnn::TensorInfo& inputInfo, @@ -17,9 +64,9 @@ TfLiteStatus ValidateActivationOperator(DelegateData& delegateData, armnn::ActivationDescriptor& activationDesc) { bool isSupported = false; - auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported) + auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported, std::string layerName) { - FORWARD_LAYER_OPAQUE_SUPPORT_FUNC("ACTIVATION", + FORWARD_LAYER_OPAQUE_SUPPORT_FUNC(layerName.c_str(), tfLiteContext, IsActivationSupported, delegateData.m_Backends, @@ -30,7 +77,7 @@ TfLiteStatus ValidateActivationOperator(DelegateData& delegateData, activationDesc); }; - validateFunc(outputInfo, isSupported); + validateFunc(outputInfo, isSupported, GetLayerName(activationDesc.m_Function)); return isSupported ? kTfLiteOk : kTfLiteError; } |