From 077cddbe9e956c6740557a9add499385f235c384 Mon Sep 17 00:00:00 2001 From: Teresa Charlin Date: Fri, 15 Sep 2023 15:19:21 +0100 Subject: IVGCVSW-8055 Add support for GELU activation function. * Add support to CpuRef, CpuAcc and GpuAcc * Add support to tflite parser, classic and opaque tflite delegates * Add support to serializer and deserializer * Add Unit tests Signed-off-by: Teresa Charlin Change-Id: Ibc60ef2ef2a051e6d9af6e15d24c46316ec19de4 --- src/armnnDeserializer/Deserializer.cpp | 2 + src/armnnSerializer/ArmnnSchema.fbs | 3 +- src/armnnSerializer/Serializer.cpp | 2 + src/armnnTfLiteParser/TfLiteParser.cpp | 11 ++++ src/armnnTfLiteParser/TfLiteParser.hpp | 1 + src/armnnTfLiteParser/test/Activations.cpp | 12 ++++ src/backends/aclCommon/ArmComputeUtils.hpp | 1 + .../test/layerTests/ActivationTestImpl.cpp | 65 ++++++++++++++++++++++ .../test/layerTests/ActivationTestImpl.hpp | 21 ++++++- src/backends/cl/test/ClLayerTests.cpp | 3 + src/backends/neon/test/NeonLayerTests.cpp | 3 + src/backends/reference/RefLayerSupport.cpp | 1 + src/backends/reference/test/RefLayerTests.cpp | 6 ++ src/backends/reference/workloads/Activation.cpp | 7 +++ 14 files changed, 136 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp index 505f4d88a4..6e2c07bf37 100644 --- a/src/armnnDeserializer/Deserializer.cpp +++ b/src/armnnDeserializer/Deserializer.cpp @@ -502,6 +502,8 @@ armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFuncti return armnn::ActivationFunction::Elu; case armnnSerializer::ActivationFunction_HardSwish: return armnn::ActivationFunction::HardSwish; + case armnnSerializer::ActivationFunction_Gelu: + return armnn::ActivationFunction::Gelu; default: return armnn::ActivationFunction::Sigmoid; } diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs index ec4b48639d..131970e449 100644 --- a/src/armnnSerializer/ArmnnSchema.fbs +++ b/src/armnnSerializer/ArmnnSchema.fbs @@ -21,7 +21,8 @@ enum ActivationFunction : byte { Sqrt = 8, Square = 9, Elu = 10, - HardSwish = 11 + HardSwish = 11, + Gelu = 12 } enum ArgMinMaxFunction : byte { diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp index 6cadb598a2..0df675d1db 100644 --- a/src/armnnSerializer/Serializer.cpp +++ b/src/armnnSerializer/Serializer.cpp @@ -78,6 +78,8 @@ serializer::ActivationFunction GetFlatBufferActivationFunction(armnn::Activation return serializer::ActivationFunction::ActivationFunction_Elu; case armnn::ActivationFunction::HardSwish: return serializer::ActivationFunction::ActivationFunction_HardSwish; + case armnn::ActivationFunction::Gelu: + return serializer::ActivationFunction::ActivationFunction_Gelu; default: return serializer::ActivationFunction::ActivationFunction_Sigmoid; } diff --git a/src/armnnTfLiteParser/TfLiteParser.cpp b/src/armnnTfLiteParser/TfLiteParser.cpp index 3f4f0d811f..c2be54f5f5 100644 --- a/src/armnnTfLiteParser/TfLiteParser.cpp +++ b/src/armnnTfLiteParser/TfLiteParser.cpp @@ -771,6 +771,7 @@ TfLiteParserImpl::TfLiteParserImpl(const Optional(0, + { -4.0f, -3.0f, -2.9f, 1.2f, 2.2f, 3.0f, 4.0f }, + {-0.000126361847f, -0.00404950976f, -0.00541083235f, 1.06191647f, 2.16941237f, 2.9959507f, 3.99987364f }); +} + } diff --git a/src/backends/aclCommon/ArmComputeUtils.hpp b/src/backends/aclCommon/ArmComputeUtils.hpp index fc59b281b5..f466ab1777 100644 --- a/src/backends/aclCommon/ArmComputeUtils.hpp +++ b/src/backends/aclCommon/ArmComputeUtils.hpp @@ -77,6 +77,7 @@ ConvertActivationFunctionToAclActivationFunction(ActivationFunction armnnFunctio case ActivationFunction::TanH: return AclActivationFunction::TANH; case ActivationFunction::Elu: return AclActivationFunction::ELU; case ActivationFunction::HardSwish: return AclActivationFunction::HARD_SWISH; + case ActivationFunction::Gelu: return AclActivationFunction::GELU; default: throw InvalidArgumentException("Unsupported activation function"); } } diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp index 1dcbdfac9e..b562a8af32 100644 --- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp +++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp @@ -1217,6 +1217,71 @@ LayerTestResult HardSwishInt16Test( } +template> +LayerTestResult GeluTestCommon( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::ITensorHandleFactory& tensorHandleFactory, + float qScale, + int32_t qOffset) +{ + std::vector inputData = + { + -0.1f, -0.2f, -0.3f, -0.4f, + 0.1f, 0.2f, 0.3f, 0.4f, + -1.0f, -2.0f, -3.0f, -4.0f, + 1.0f, 2.0f, 3.0f, 4.0f + }; + // Calculate output values for input. + auto f = [](float x) + { + // gelu(x) = x * 1/2 * (1 + erf(x / sqrt(2))), + // where erf is Gaussian error function + auto result = x * (0.5f * (1.0f + erff(static_cast(x / std::sqrt(2))))); + return result; + }; + std::vector expectedOutput(inputData.size()); + std::transform(inputData.begin(), inputData.end(), expectedOutput.begin(), f); + + return SimpleActivationTest(workloadFactory, + memoryManager, + tensorHandleFactory, + armnn::ActivationFunction::Gelu, + 0.f, + 0.f, + qScale, + qOffset, + inputData, + qScale, + qOffset, + expectedOutput); +} + +LayerTestResult GeluTest( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::ITensorHandleFactory& tensorHandleFactory) +{ + return GeluTestCommon(workloadFactory, memoryManager, tensorHandleFactory, 0.1f, 0); +} + +LayerTestResult GeluUint8Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::ITensorHandleFactory& tensorHandleFactory) +{ + return GeluTestCommon(workloadFactory, memoryManager, tensorHandleFactory, 0.1f, 64); +} + +LayerTestResult GeluInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::ITensorHandleFactory& tensorHandleFactory) +{ + return GeluTestCommon(workloadFactory, memoryManager, tensorHandleFactory, 0.1f, 0); +} + + template> LayerTestResult CompareActivationTestImpl( armnn::IWorkloadFactory& workloadFactory, diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp index e23cd32583..5df6813466 100644 --- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp +++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp @@ -1,5 +1,5 @@ // -// Copyright © 2017 Arm Ltd. All rights reserved. +// Copyright © 2017-2021, 2023 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // @@ -273,6 +273,25 @@ LayerTestResult HardSwishInt16Test( const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); +// +// Gelu +// + +LayerTestResult GeluTest( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::ITensorHandleFactory& tensorHandleFactory); + +LayerTestResult GeluUint8Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::ITensorHandleFactory& tensorHandleFactory); + +LayerTestResult GeluInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::ITensorHandleFactory& tensorHandleFactory); + // // Other // diff --git a/src/backends/cl/test/ClLayerTests.cpp b/src/backends/cl/test/ClLayerTests.cpp index 33e1b69ade..a596a01be8 100644 --- a/src/backends/cl/test/ClLayerTests.cpp +++ b/src/backends/cl/test/ClLayerTests.cpp @@ -73,6 +73,9 @@ ARMNN_AUTO_TEST_FIXTURE_WITH_THF(Tanh, ClContextControlFixture, TanhTest) // Elu Activation ARMNN_AUTO_TEST_FIXTURE_WITH_THF(Elu, ClContextControlFixture, EluTest) +// Gelu Activation +ARMNN_AUTO_TEST_FIXTURE_WITH_THF(Gelu, ClContextControlFixture, GeluTest) + // Batch Mat Mul ARMNN_AUTO_TEST_FIXTURE_WITH_THF(BatchMatMul2DSimpleFloat32, ClContextControlFixture, diff --git a/src/backends/neon/test/NeonLayerTests.cpp b/src/backends/neon/test/NeonLayerTests.cpp index 658d718b19..a938ceb9c3 100644 --- a/src/backends/neon/test/NeonLayerTests.cpp +++ b/src/backends/neon/test/NeonLayerTests.cpp @@ -722,6 +722,9 @@ ARMNN_AUTO_TEST_CASE_WITH_THF(Tanh, TanhTest) // Elu Activation ARMNN_AUTO_TEST_CASE_WITH_THF(Elu, EluTest) +// Gelu Activation +ARMNN_AUTO_TEST_CASE_WITH_THF(Gelu, GeluTest) + // Softmax // Moved to NeonLayerTests_NDK_Bug.cpp //ARMNN_AUTO_TEST_CASE_WITH_THF(SimpleSoftmaxBeta1, SimpleSoftmaxTest, 1.0f) diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp index defdf0d807..167639a733 100644 --- a/src/backends/reference/RefLayerSupport.cpp +++ b/src/backends/reference/RefLayerSupport.cpp @@ -588,6 +588,7 @@ bool RefLayerSupport::IsActivationSupported(const TensorInfo& input, case ActivationFunction::Abs: case ActivationFunction::BoundedReLu: case ActivationFunction::Elu: + case ActivationFunction::Gelu: case ActivationFunction::HardSwish: case ActivationFunction::LeakyReLu: case ActivationFunction::Linear: diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp index af4ed966b2..cfe85594b3 100644 --- a/src/backends/reference/test/RefLayerTests.cpp +++ b/src/backends/reference/test/RefLayerTests.cpp @@ -770,11 +770,17 @@ ARMNN_AUTO_TEST_CASE_WITH_THF(TanhInt16, TanhInt16Test) ARMNN_AUTO_TEST_CASE_WITH_THF(Elu, EluTest) ARMNN_AUTO_TEST_CASE_WITH_THF(EluUint8, EluUint8Test) ARMNN_AUTO_TEST_CASE_WITH_THF(EluInt16, EluInt16Test) + // HardSwish Activation ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwish, HardSwishTest) ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwishUint8, HardSwishUint8Test) ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwishInt16, HardSwishInt16Test) +// Gelu Activation +ARMNN_AUTO_TEST_CASE_WITH_THF(Gelu, GeluTest) +ARMNN_AUTO_TEST_CASE_WITH_THF(GeluUint8, GeluUint8Test) +ARMNN_AUTO_TEST_CASE_WITH_THF(GeluInt16, GeluInt16Test) + // Fully Connected ARMNN_AUTO_TEST_CASE_WITH_THF(SimpleFullyConnected, FullyConnectedFloat32Test, false, false) ARMNN_AUTO_TEST_CASE_WITH_THF(FullyConnectedUint8, FullyConnectedTest, false, true) diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp index 8de0e8b3b2..1577543fe4 100644 --- a/src/backends/reference/workloads/Activation.cpp +++ b/src/backends/reference/workloads/Activation.cpp @@ -82,6 +82,13 @@ float Activation(float in, output = in * (std::min(std::max((in + 3),0.0f),6.0f)) / 6; break; } + case ActivationFunction::Gelu: + { + // gelu(x) = x * 1/2 * (1 + erf(x / sqrt(2))), + // where erf is Gaussian error function + output = in * (0.5f * (1.0f + erff(static_cast(in / std::sqrt(2))))); + break; + } default: { throw InvalidArgumentException("Unsupported activation function"); -- cgit v1.2.1