diff options
Diffstat (limited to 'src/backends/reference')
-rw-r--r-- | src/backends/reference/RefLayerSupport.cpp | 1 | ||||
-rw-r--r-- | src/backends/reference/test/RefLayerTests.cpp | 6 | ||||
-rw-r--r-- | src/backends/reference/workloads/Activation.cpp | 7 |
3 files changed, 14 insertions, 0 deletions
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp index defdf0d807..167639a733 100644 --- a/src/backends/reference/RefLayerSupport.cpp +++ b/src/backends/reference/RefLayerSupport.cpp @@ -588,6 +588,7 @@ bool RefLayerSupport::IsActivationSupported(const TensorInfo& input, case ActivationFunction::Abs: case ActivationFunction::BoundedReLu: case ActivationFunction::Elu: + case ActivationFunction::Gelu: case ActivationFunction::HardSwish: case ActivationFunction::LeakyReLu: case ActivationFunction::Linear: diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp index af4ed966b2..cfe85594b3 100644 --- a/src/backends/reference/test/RefLayerTests.cpp +++ b/src/backends/reference/test/RefLayerTests.cpp @@ -770,11 +770,17 @@ ARMNN_AUTO_TEST_CASE_WITH_THF(TanhInt16, TanhInt16Test) ARMNN_AUTO_TEST_CASE_WITH_THF(Elu, EluTest) ARMNN_AUTO_TEST_CASE_WITH_THF(EluUint8, EluUint8Test) ARMNN_AUTO_TEST_CASE_WITH_THF(EluInt16, EluInt16Test) + // HardSwish Activation ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwish, HardSwishTest) ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwishUint8, HardSwishUint8Test) ARMNN_AUTO_TEST_CASE_WITH_THF(HardSwishInt16, HardSwishInt16Test) +// Gelu Activation +ARMNN_AUTO_TEST_CASE_WITH_THF(Gelu, GeluTest) +ARMNN_AUTO_TEST_CASE_WITH_THF(GeluUint8, GeluUint8Test) +ARMNN_AUTO_TEST_CASE_WITH_THF(GeluInt16, GeluInt16Test) + // Fully Connected ARMNN_AUTO_TEST_CASE_WITH_THF(SimpleFullyConnected, FullyConnectedFloat32Test, false, false) ARMNN_AUTO_TEST_CASE_WITH_THF(FullyConnectedUint8, FullyConnectedTest<DataType::QAsymmU8>, false, true) diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp index 8de0e8b3b2..1577543fe4 100644 --- a/src/backends/reference/workloads/Activation.cpp +++ b/src/backends/reference/workloads/Activation.cpp @@ -82,6 +82,13 @@ float Activation(float in, output = in * (std::min(std::max((in + 3),0.0f),6.0f)) / 6; break; } + case ActivationFunction::Gelu: + { + // gelu(x) = x * 1/2 * (1 + erf(x / sqrt(2))), + // where erf is Gaussian error function + output = in * (0.5f * (1.0f + erff(static_cast<float>(in / std::sqrt(2))))); + break; + } default: { throw InvalidArgumentException("Unsupported activation function"); |