aboutsummaryrefslogtreecommitdiff
path: root/tests/validation
diff options
context:
space:
mode:
authorMurray Kornelsen <murray.kornelsen@mail.mcgill.ca>2022-07-13 21:22:39 -0400
committerPablo Marquez Tello <pablo.tello@arm.com>2022-09-14 09:15:03 +0000
commit926f502ca731fa49bcdf949408ce25728616e5f2 (patch)
tree7e221103a9c0c5c0e4c054abc07cbdf11c7c7b4e /tests/validation
parent6e09e1404c635d948cf20eb6b4b5747dfb6656f2 (diff)
downloadComputeLibrary-926f502ca731fa49bcdf949408ce25728616e5f2.tar.gz
Adding GELU activation
OpenCL implementation uses built in erf. NEON implementation requires new vectorized erf. Uses the following approximation: erf(x) = 1 - 1 / (1 + a1x + a2x^2 + a3x^3 + a4x^4)^4 a1 = 0.278393, a2 = 0.230389, a3 = 0.000972, a4 = 0.078108 From https://en.wikipedia.org/wiki/Error_function#Numerical_approximations Signed-off-by: Murray Kornelsen <murray.kornelsen@mail.mcgill.ca> Change-Id: I2d3964b2c26a4334166b17135f9104bc6324fad2 Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/7921 Reviewed-by: Viet-Hoa Do <viet-hoa.do@arm.com> Reviewed-by: Pablo Marquez Tello <pablo.tello@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com> Comments-Addressed: Pablo Marquez Tello <pablo.tello@arm.com> Tested-by: Arm Jenkins <bsgcomp@arm.com> Benchmark: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests/validation')
-rw-r--r--tests/validation/CL/ActivationLayer.cpp3
-rw-r--r--tests/validation/NEON/ActivationLayer.cpp1
-rw-r--r--tests/validation/reference/ActivationLayer.h5
3 files changed, 7 insertions, 2 deletions
diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp
index fa95594157..133b39d154 100644
--- a/tests/validation/CL/ActivationLayer.cpp
+++ b/tests/validation/CL/ActivationLayer.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2020 Arm Limited.
+ * Copyright (c) 2017-2020, 2022 Arm Limited.
*
* SPDX-License-Identifier: MIT
*
@@ -70,6 +70,7 @@ AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activ
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
case ActivationLayerInfo::ActivationFunction::ELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
+ case ActivationLayerInfo::ActivationFunction::GELU:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
case ActivationLayerInfo::ActivationFunction::TANH:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : 0.00001f);
diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp
index e45b7fa5ad..a2971f28ba 100644
--- a/tests/validation/NEON/ActivationLayer.cpp
+++ b/tests/validation/NEON/ActivationLayer.cpp
@@ -68,6 +68,7 @@ RelativeTolerance<float> relative_tolerance(DataType data_type, ActivationLayerI
case ActivationLayerInfo::ActivationFunction::SQRT:
case ActivationLayerInfo::ActivationFunction::TANH:
case ActivationLayerInfo::ActivationFunction::HARD_SWISH:
+ case ActivationLayerInfo::ActivationFunction::GELU:
switch(data_type)
{
case DataType::F16:
diff --git a/tests/validation/reference/ActivationLayer.h b/tests/validation/reference/ActivationLayer.h
index 8aad1af63e..97733238ef 100644
--- a/tests/validation/reference/ActivationLayer.h
+++ b/tests/validation/reference/ActivationLayer.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2020 Arm Limited.
+ * Copyright (c) 2017-2020, 2022 Arm Limited.
*
* SPDX-License-Identifier: MIT
*
@@ -84,6 +84,9 @@ inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction a
case ActivationLayerInfo::ActivationFunction::HARD_SWISH:
ret = x * ((std::min(std::max(static_cast<T>(x + 3), static_cast<T>(0.0f)), static_cast<T>(6.0f))) * 0.166666667f);
break;
+ case ActivationLayerInfo::ActivationFunction::GELU:
+ ret = x * 0.5f * (1 + erf(x / std::sqrt(2.0f)));
+ break;
default:
ARM_COMPUTE_ERROR("Unsupported activation function");
break;