diff options
author | Teresa Charlin <teresa.charlinreyes@arm.com> | 2023-09-15 15:19:21 +0100 |
---|---|---|
committer | TeresaARM <teresa.charlinreyes@arm.com> | 2023-09-29 11:05:29 +0000 |
commit | 077cddbe9e956c6740557a9add499385f235c384 (patch) | |
tree | ae1816443bf4f85c7968aa3e542ef2b5e5400e7e /src/backends/reference | |
parent | 9a45e8fab86f7078d22360794058f5550413df78 (diff) | |
download | armnn-077cddbe9e956c6740557a9add499385f235c384.tar.gz |
IVGCVSW-8055 Add support for GELU activation function.
* Add support to CpuRef, CpuAcc and GpuAcc
* Add support to tflite parser, classic and opaque tflite delegates
* Add support to serializer and deserializer
* Add Unit tests
Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com>
Change-Id: Ibc60ef2ef2a051e6d9af6e15d24c46316ec19de4
Diffstat (limited to 'src/backends/reference')
-rw-r--r-- | src/backends/reference/RefLayerSupport.cpp | 1 | ||||
-rw-r--r-- | src/backends/reference/test/RefLayerTests.cpp | 6 | ||||
-rw-r--r-- | src/backends/reference/workloads/Activation.cpp | 7 |
3 files changed, 14 insertions, 0 deletions
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp index defdf0d807..167639a733 100644 --- a/src/backends/reference/RefLayerSupport.cpp +++ b/src/backends/reference/RefLayerSupport.cpp @@ -588,6 +588,7 @@ bool RefLayerSupport::IsActivationSupported(const TensorInfo& input, case ActivationFunction::Abs: case ActivationFunction::BoundedReLu: case ActivationFunction::Elu: + case ActivationFunction::Gelu: case ActivationFunction::HardSwish: case ActivationFunction::LeakyReLu: case ActivationFunction::Linear: diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp index af4ed966b2..cfe85594b3 100644 --- a/src/backends/reference/test/RefLayerTests.cpp +++ b/src/backends/reference/test/RefLayerTests.cpp @@ -770,11 +770,17 @@ ARMNN_AUTO_TEST_CASE_WITH_THF(TanhInt16, TanhInt16Test) ARMNN_AUTO_TEST_CASE_WITH_THF(Elu, EluTest) ARMNN_AUTO_TEST_CASE_WITH_THF(EluUint8, EluUint8Test) ARMNN_AUTO_TEST_CASE_WITH_THF(EluInt16, EluInt16Test) + // HardSwish Activation ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwish, HardSwishTest) ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwishUint8, HardSwishUint8Test) ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwishInt16, HardSwishInt16Test) +// Gelu Activation +ARMNN_AUTO_TEST_CASE_WITH_THF(Gelu, GeluTest) +ARMNN_AUTO_TEST_CASE_WITH_THF(GeluUint8, GeluUint8Test) +ARMNN_AUTO_TEST_CASE_WITH_THF(GeluInt16, GeluInt16Test) + // Fully Connected ARMNN_AUTO_TEST_CASE_WITH_THF(SimpleFullyConnected, FullyConnectedFloat32Test, false, false) ARMNN_AUTO_TEST_CASE_WITH_THF(FullyConnectedUint8, FullyConnectedTest<DataType::QAsymmU8>, false, true) diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp index 8de0e8b3b2..1577543fe4 100644 --- a/src/backends/reference/workloads/Activation.cpp +++ b/src/backends/reference/workloads/Activation.cpp @@ -82,6 +82,13 @@ float Activation(float in, output = in * (std::min(std::max((in + 3),0.0f),6.0f)) / 6; break; } + case ActivationFunction::Gelu: + { + // gelu(x) = x * 1/2 * (1 + erf(x / sqrt(2))), + // where erf is Gaussian error function + output = in * (0.5f * (1.0f + erff(static_cast<float>(in / std::sqrt(2))))); + break; + } default: { throw InvalidArgumentException("Unsupported activation function"); |