aboutsummaryrefslogtreecommitdiff
path: root/src/backends
diff options
context:
space:
mode:
Diffstat (limited to 'src/backends')
-rw-r--r--src/backends/aclCommon/ArmComputeUtils.hpp1
-rw-r--r--src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp65
-rw-r--r--src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp21
-rw-r--r--src/backends/cl/test/ClLayerTests.cpp3
-rw-r--r--src/backends/neon/test/NeonLayerTests.cpp3
-rw-r--r--src/backends/reference/RefLayerSupport.cpp1
-rw-r--r--src/backends/reference/test/RefLayerTests.cpp6
-rw-r--r--src/backends/reference/workloads/Activation.cpp7
8 files changed, 106 insertions, 1 deletions
diff --git a/src/backends/aclCommon/ArmComputeUtils.hpp b/src/backends/aclCommon/ArmComputeUtils.hpp
index fc59b281b5..f466ab1777 100644
--- a/src/backends/aclCommon/ArmComputeUtils.hpp
+++ b/src/backends/aclCommon/ArmComputeUtils.hpp
@@ -77,6 +77,7 @@ ConvertActivationFunctionToAclActivationFunction(ActivationFunction armnnFunctio
case ActivationFunction::TanH: return AclActivationFunction::TANH;
case ActivationFunction::Elu: return AclActivationFunction::ELU;
case ActivationFunction::HardSwish: return AclActivationFunction::HARD_SWISH;
+ case ActivationFunction::Gelu: return AclActivationFunction::GELU;
default: throw InvalidArgumentException("Unsupported activation function");
}
}
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
index 1dcbdfac9e..b562a8af32 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
@@ -1218,6 +1218,71 @@ LayerTestResult<int16_t, 4> HardSwishInt16Test(
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
+LayerTestResult<T, 4> GeluTestCommon(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory,
+ float qScale,
+ int32_t qOffset)
+{
+ std::vector<float> inputData =
+ {
+ -0.1f, -0.2f, -0.3f, -0.4f,
+ 0.1f, 0.2f, 0.3f, 0.4f,
+ -1.0f, -2.0f, -3.0f, -4.0f,
+ 1.0f, 2.0f, 3.0f, 4.0f
+ };
+ // Calculate output values for input.
+ auto f = [](float x)
+ {
+ // gelu(x) = x * 1/2 * (1 + erf(x / sqrt(2))),
+ // where erf is Gaussian error function
+ auto result = x * (0.5f * (1.0f + erff(static_cast<float>(x / std::sqrt(2)))));
+ return result;
+ };
+ std::vector<float> expectedOutput(inputData.size());
+ std::transform(inputData.begin(), inputData.end(), expectedOutput.begin(), f);
+
+ return SimpleActivationTest<ArmnnType>(workloadFactory,
+ memoryManager,
+ tensorHandleFactory,
+ armnn::ActivationFunction::Gelu,
+ 0.f,
+ 0.f,
+ qScale,
+ qOffset,
+ inputData,
+ qScale,
+ qOffset,
+ expectedOutput);
+}
+
+LayerTestResult<float, 4> GeluTest(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
+{
+ return GeluTestCommon<armnn::DataType::Float32>(workloadFactory, memoryManager, tensorHandleFactory, 0.1f, 0);
+}
+
+LayerTestResult<uint8_t, 4> GeluUint8Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
+{
+ return GeluTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, tensorHandleFactory, 0.1f, 64);
+}
+
+LayerTestResult<int16_t, 4> GeluInt16Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
+{
+ return GeluTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, tensorHandleFactory, 0.1f, 0);
+}
+
+
+template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
LayerTestResult<T, 4> CompareActivationTestImpl(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
index e23cd32583..5df6813466 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
@@ -1,5 +1,5 @@
//
-// Copyright © 2017 Arm Ltd. All rights reserved.
+// Copyright © 2017-2021, 2023 Arm Ltd and Contributors. All rights reserved.
// SPDX-License-Identifier: MIT
//
@@ -274,6 +274,25 @@ LayerTestResult<int16_t, 4> HardSwishInt16Test(
const armnn::ITensorHandleFactory& tensorHandleFactory);
//
+// Gelu
+//
+
+LayerTestResult<float, 4> GeluTest(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory);
+
+LayerTestResult<uint8_t, 4> GeluUint8Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory);
+
+LayerTestResult<int16_t, 4> GeluInt16Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory);
+
+//
// Other
//
diff --git a/src/backends/cl/test/ClLayerTests.cpp b/src/backends/cl/test/ClLayerTests.cpp
index 33e1b69ade..a596a01be8 100644
--- a/src/backends/cl/test/ClLayerTests.cpp
+++ b/src/backends/cl/test/ClLayerTests.cpp
@@ -73,6 +73,9 @@ ARMNN_AUTO_TEST_FIXTURE_WITH_THF(Tanh, ClContextControlFixture, TanhTest)
// Elu Activation
ARMNN_AUTO_TEST_FIXTURE_WITH_THF(Elu, ClContextControlFixture, EluTest)
+// Gelu Activation
+ARMNN_AUTO_TEST_FIXTURE_WITH_THF(Gelu, ClContextControlFixture, GeluTest)
+
// Batch Mat Mul
ARMNN_AUTO_TEST_FIXTURE_WITH_THF(BatchMatMul2DSimpleFloat32,
ClContextControlFixture,
diff --git a/src/backends/neon/test/NeonLayerTests.cpp b/src/backends/neon/test/NeonLayerTests.cpp
index 658d718b19..a938ceb9c3 100644
--- a/src/backends/neon/test/NeonLayerTests.cpp
+++ b/src/backends/neon/test/NeonLayerTests.cpp
@@ -722,6 +722,9 @@ ARMNN_AUTO_TEST_CASE_WITH_THF(Tanh, TanhTest)
// Elu Activation
ARMNN_AUTO_TEST_CASE_WITH_THF(Elu, EluTest)
+// Gelu Activation
+ARMNN_AUTO_TEST_CASE_WITH_THF(Gelu, GeluTest)
+
// Softmax
// Moved to NeonLayerTests_NDK_Bug.cpp
//ARMNN_AUTO_TEST_CASE_WITH_THF(SimpleSoftmaxBeta1, SimpleSoftmaxTest, 1.0f)
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp
index defdf0d807..167639a733 100644
--- a/src/backends/reference/RefLayerSupport.cpp
+++ b/src/backends/reference/RefLayerSupport.cpp
@@ -588,6 +588,7 @@ bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
case ActivationFunction::Abs:
case ActivationFunction::BoundedReLu:
case ActivationFunction::Elu:
+ case ActivationFunction::Gelu:
case ActivationFunction::HardSwish:
case ActivationFunction::LeakyReLu:
case ActivationFunction::Linear:
diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp
index af4ed966b2..cfe85594b3 100644
--- a/src/backends/reference/test/RefLayerTests.cpp
+++ b/src/backends/reference/test/RefLayerTests.cpp
@@ -770,11 +770,17 @@ ARMNN_AUTO_TEST_CASE_WITH_THF(TanhInt16, TanhInt16Test)
ARMNN_AUTO_TEST_CASE_WITH_THF(Elu, EluTest)
ARMNN_AUTO_TEST_CASE_WITH_THF(EluUint8, EluUint8Test)
ARMNN_AUTO_TEST_CASE_WITH_THF(EluInt16, EluInt16Test)
+
// HardSwish Activation
ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwish, HardSwishTest)
ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwishUint8, HardSwishUint8Test)
ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwishInt16, HardSwishInt16Test)
+// Gelu Activation
+ARMNN_AUTO_TEST_CASE_WITH_THF(Gelu, GeluTest)
+ARMNN_AUTO_TEST_CASE_WITH_THF(GeluUint8, GeluUint8Test)
+ARMNN_AUTO_TEST_CASE_WITH_THF(GeluInt16, GeluInt16Test)
+
// Fully Connected
ARMNN_AUTO_TEST_CASE_WITH_THF(SimpleFullyConnected, FullyConnectedFloat32Test, false, false)
ARMNN_AUTO_TEST_CASE_WITH_THF(FullyConnectedUint8, FullyConnectedTest<DataType::QAsymmU8>, false, true)
diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp
index 8de0e8b3b2..1577543fe4 100644
--- a/src/backends/reference/workloads/Activation.cpp
+++ b/src/backends/reference/workloads/Activation.cpp
@@ -82,6 +82,13 @@ float Activation(float in,
output = in * (std::min(std::max((in + 3),0.0f),6.0f)) / 6;
break;
}
+ case ActivationFunction::Gelu:
+ {
+ // gelu(x) = x * 1/2 * (1 + erf(x / sqrt(2))),
+ // where erf is Gaussian error function
+ output = in * (0.5f * (1.0f + erff(static_cast<float>(in / std::sqrt(2)))));
+ break;
+ }
default:
{
throw InvalidArgumentException("Unsupported activation function");