aboutsummaryrefslogtreecommitdiff
path: root/delegate
diff options
context:
space:
mode:
authorTeresa Charlin <teresa.charlinreyes@arm.com>2023-09-15 15:19:21 +0100
committerTeresaARM <teresa.charlinreyes@arm.com>2023-09-29 11:05:29 +0000
commit077cddbe9e956c6740557a9add499385f235c384 (patch)
treeae1816443bf4f85c7968aa3e542ef2b5e5400e7e /delegate
parent9a45e8fab86f7078d22360794058f5550413df78 (diff)
downloadarmnn-077cddbe9e956c6740557a9add499385f235c384.tar.gz
IVGCVSW-8055 Add support for GELU activation function.
* Add support to CpuRef, CpuAcc and GpuAcc * Add support to tflite parser, classic and opaque tflite delegates * Add support to serializer and deserializer * Add Unit tests Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com> Change-Id: Ibc60ef2ef2a051e6d9af6e15d24c46316ec19de4
Diffstat (limited to 'delegate')
-rw-r--r--delegate/classic/src/Activation.hpp5
-rw-r--r--delegate/classic/src/armnn_delegate.cpp6
-rw-r--r--delegate/opaque/src/Activation.hpp8
-rw-r--r--delegate/opaque/src/armnn_delegate.cpp6
-rw-r--r--delegate/test/ActivationTest.cpp45
-rw-r--r--delegate/test/ActivationTestHelper.hpp6
6 files changed, 75 insertions, 1 deletions
diff --git a/delegate/classic/src/Activation.hpp b/delegate/classic/src/Activation.hpp
index a93cee43a3..1c55c2e9b5 100644
--- a/delegate/classic/src/Activation.hpp
+++ b/delegate/classic/src/Activation.hpp
@@ -109,6 +109,11 @@ TfLiteStatus VisitActivationOperator(DelegateData& delegateData,
activationDesc.m_A = leakyReluParameters->alpha;
break;
}
+ case kTfLiteBuiltinGelu:
+ {
+ activationDesc.m_Function = armnn::ActivationFunction::Gelu;
+ break;
+ }
default:
{
return kTfLiteError;
diff --git a/delegate/classic/src/armnn_delegate.cpp b/delegate/classic/src/armnn_delegate.cpp
index c8f57d6cc3..6054de5c5e 100644
--- a/delegate/classic/src/armnn_delegate.cpp
+++ b/delegate/classic/src/armnn_delegate.cpp
@@ -729,6 +729,12 @@ TfLiteStatus ArmnnSubgraph::VisitNode(DelegateData& delegateData,
tfLiteNode,
nodeIndex,
kTfLiteBuiltinGatherNd);
+ case kTfLiteBuiltinGelu:
+ return VisitActivationOperator(delegateData,
+ tfLiteContext,
+ tfLiteNode,
+ nodeIndex,
+ kTfLiteBuiltinGelu);
case kTfLiteBuiltinGreater:
return VisitComparisonOperator(delegateData,
tfLiteContext,
diff --git a/delegate/opaque/src/Activation.hpp b/delegate/opaque/src/Activation.hpp
index dd9c2f68bc..ad242e5799 100644
--- a/delegate/opaque/src/Activation.hpp
+++ b/delegate/opaque/src/Activation.hpp
@@ -24,6 +24,9 @@ std::string GetLayerName(armnn::ActivationFunction activationFunction)
case armnn::ActivationFunction::Elu:
layerName += " ELU";
break;
+ case armnn::ActivationFunction::Gelu:
+ layerName += " GELU";
+ break;
case armnn::ActivationFunction::HardSwish:
layerName += " HARD_SWISH";
break;
@@ -175,6 +178,11 @@ TfLiteStatus VisitActivationOperator(DelegateData& delegateData,
activationDesc.m_A = leakyReluParameters->alpha;
break;
}
+ case kTfLiteBuiltinGelu:
+ {
+ activationDesc.m_Function = armnn::ActivationFunction::Gelu;
+ break;
+ }
default:
{
return kTfLiteError;
diff --git a/delegate/opaque/src/armnn_delegate.cpp b/delegate/opaque/src/armnn_delegate.cpp
index 08b1504efb..6abf7398cc 100644
--- a/delegate/opaque/src/armnn_delegate.cpp
+++ b/delegate/opaque/src/armnn_delegate.cpp
@@ -808,6 +808,12 @@ TfLiteStatus ArmnnSubgraph::VisitNode(DelegateData& delegateData,
tfLiteNode,
nodeIndex,
kTfLiteBuiltinGatherNd);
+ case kTfLiteBuiltinGelu:
+ return VisitActivationOperator(delegateData,
+ tfLiteContext,
+ tfLiteNode,
+ nodeIndex,
+ kTfLiteBuiltinGelu);
case kTfLiteBuiltinGreater:
return VisitComparisonOperator(delegateData,
tfLiteContext,
diff --git a/delegate/test/ActivationTest.cpp b/delegate/test/ActivationTest.cpp
index 620c299803..70321cd7e5 100644
--- a/delegate/test/ActivationTest.cpp
+++ b/delegate/test/ActivationTest.cpp
@@ -196,6 +196,33 @@ void ActivationLeakyReLuTest(std::vector<armnn::BackendId>& backends)
alpha);
}
+void ActivationGeluTest(std::vector<armnn::BackendId>& backends)
+{
+ std::vector<float> inputData =
+ {
+ -0.1f, -0.2f, -0.3f, -0.4f,
+ 0.1f, 0.2f, 0.3f, 0.4f,
+ -1.0f, -2.0f, -3.0f, -4.0f,
+ 1.0f, 2.0f, 3.0f, 4.0f
+ };
+
+ // Calculate output values for input.
+ auto f = [](float x)
+ {
+ // gelu(x) = x * 1/2 * (1 + erf(x / sqrt(2))),
+ // where erf is Gaussian error function
+ auto result = x * (0.5f * (1.0f + erff(static_cast<float>(x / std::sqrt(2)))));
+ return result;
+ };
+ std::vector<float> outputExpectedData(inputData.size());
+ std::transform(inputData.begin(), inputData.end(), outputExpectedData.begin(), f);
+
+ ActivationTest(tflite::BuiltinOperator_GELU,
+ backends,
+ inputData,
+ outputExpectedData);
+}
+
TEST_SUITE("Activation_CpuRefTests")
{
@@ -241,6 +268,12 @@ TEST_CASE ("Activation_LeakyRelu_CpuRef_Test")
ActivationLeakyReLuTest(backends);
}
+TEST_CASE ("Activation_Gelu_CpuRef_Test")
+{
+ std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
+ ActivationGeluTest(backends);
+}
+
}
TEST_SUITE("Activation_CpuAccTests")
@@ -288,6 +321,12 @@ TEST_CASE ("Activation_LeakyRelu_CpuAcc_Test")
ActivationLeakyReLuTest(backends);
}
+TEST_CASE ("Activation_Gelu_CpuAcc_Test")
+{
+ std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc };
+ ActivationGeluTest(backends);
+}
+
}
TEST_SUITE("Activation_GpuAccTests")
@@ -335,6 +374,12 @@ TEST_CASE ("Activation_LeakyRelu_GpuAcc_Test")
ActivationLeakyReLuTest(backends);
}
+TEST_CASE ("Activation_Gelu_GpuAcc_Test")
+{
+ std::vector<armnn::BackendId> backends = { armnn::Compute::GpuAcc };
+ ActivationGeluTest(backends);
+}
+
}
} // namespace armnnDelegate \ No newline at end of file
diff --git a/delegate/test/ActivationTestHelper.hpp b/delegate/test/ActivationTestHelper.hpp
index b0a4d6785d..7beb53ba3d 100644
--- a/delegate/test/ActivationTestHelper.hpp
+++ b/delegate/test/ActivationTestHelper.hpp
@@ -73,7 +73,11 @@ std::vector<char> CreateActivationTfLiteModel(tflite::BuiltinOperator activation
flatbuffers::Offset <flatbuffers::String> modelDescription =
flatBufferBuilder.CreateString("ArmnnDelegate: Activation Operator Model");
- flatbuffers::Offset <OperatorCode> operatorCode = CreateOperatorCode(flatBufferBuilder, activationOperatorCode);
+ flatbuffers::Offset <OperatorCode> operatorCode = CreateOperatorCode(flatBufferBuilder,
+ activationOperatorCode,
+ 0,
+ 1,
+ activationOperatorCode);
flatbuffers::Offset <Model> flatbufferModel =
CreateModel(flatBufferBuilder,