aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGeorgios Pinitas <georgios.pinitas@arm.com>2019-08-22 17:10:04 +0100
committerGeorgios Pinitas <georgios.pinitas@arm.com>2019-08-23 11:03:19 +0000
commitfb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 (patch)
tree99dc659420ea76dc4fbf6724e1140467249ad105
parentc8530210c17b391f27ace95523e9590e8166fcd8 (diff)
downloadComputeLibrary-fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94.tar.gz
COMPMID-2460: Add ELU activation on NEON,CL,GLES
Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas <georgios.pinitas@arm.com> Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
-rw-r--r--arm_compute/core/Types.h1
-rw-r--r--src/core/CL/cl_kernels/activation_float_helpers.h3
-rw-r--r--src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs7
-rw-r--r--src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h5
-rw-r--r--src/core/NEON/kernels/NEActivationLayerKernel.cpp8
-rw-r--r--src/core/Utils.cpp1
-rw-r--r--tests/datasets/ActivationFunctionsDataset.h1
-rw-r--r--tests/validation/CL/ActivationLayer.cpp1
-rw-r--r--tests/validation/GLES_COMPUTE/ActivationLayer.cpp3
-rw-r--r--tests/validation/NEON/ActivationLayer.cpp1
-rw-r--r--tests/validation/reference/ActivationLayer.h3
-rw-r--r--utils/TypePrinter.h3
12 files changed, 35 insertions, 2 deletions
diff --git a/arm_compute/core/Types.h b/arm_compute/core/Types.h
index aa07067855..59e39958b6 100644
--- a/arm_compute/core/Types.h
+++ b/arm_compute/core/Types.h
@@ -1528,6 +1528,7 @@ public:
LU_BOUNDED_RELU, /**< Lower and Upper Bounded Rectifier ( \f$ f(x) = min(a, max(b,x)) \f$ ) */
LEAKY_RELU, /**< Leaky Rectifier ( \f$ f(x) = \begin{cases} \alpha x & \quad \text{if } x \text{ < 0}\\ x & \quad \text{if } x \geq \text{ 0 } \end{cases} \f$ ) */
SOFT_RELU, /**< Soft Rectifier ( \f$ f(x)= log(1+e^x) \f$ ) */
+ ELU, /**< Exponential Linear Unit ( \f$ f(x) = \begin{cases} \alpha (exp(x) - 1) & \quad \text{if } x \text{ < 0}\\ x & \quad \text{if } x \geq \text{ 0 } \end{cases} \f$ ) */
ABS, /**< Absolute ( \f$ f(x)= |x| \f$ ) */
SQUARE, /**< Square ( \f$ f(x)= x^2 \f$ )*/
SQRT, /**< Square root ( \f$ f(x) = \sqrt{x} \f$ )*/
diff --git a/src/core/CL/cl_kernels/activation_float_helpers.h b/src/core/CL/cl_kernels/activation_float_helpers.h
index 2efd2699d7..0c82f83dbc 100644
--- a/src/core/CL/cl_kernels/activation_float_helpers.h
+++ b/src/core/CL/cl_kernels/activation_float_helpers.h
@@ -50,6 +50,9 @@
// Soft RELU Activation
#define srelu_op(DATA_TYPE, x, A_VAL, B_VAL) (log((DATA_TYPE)1.0 + exp(x)))
+// ELU Activation
+#define elu_op(DATA_TYPE, x, A_VAL, B_VAL) (select(((DATA_TYPE)A_VAL * (exp(x) - (DATA_TYPE)1.0)), x, isgreaterequal(x, (DATA_TYPE)0.0)))
+
// Absolute Activation
#define abs_op(DATA_TYPE, x, A_VAL, B_VAL) (fabs(x))
diff --git a/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs b/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs
index dd97c1501b..e5411de985 100644
--- a/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs
+++ b/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs
@@ -69,6 +69,8 @@ void main(void)
data_out = lrelu_op(data);
#elif defined(SRELU) /*SRELU*/
data_out = srelu_op(data);
+#elif defined(ELU) /*ELU*/
+ data_out = elu_op(data);
#elif defined(ABS) /*ABS*/
data_out = abs_op(data);
#elif defined(SQUARE) /*SQUARE*/
@@ -121,6 +123,9 @@ void main(void)
#elif defined(SRELU) /*SRELU*/
data_out.x = srelu_op(a);
data_out.y = srelu_op(b);
+#elif defined(ELU) /*ELU*/
+ data_out.x = elu_op(a);
+ data_out.y = elu_op(b);
#elif defined(ABS) /*ABS*/
data_out.x = abs_op(a);
data_out.y = abs_op(b);
@@ -133,7 +138,7 @@ void main(void)
#elif defined(LINEAR) /*LINEAR*/
data_out.x = linear_op(a);
data_out.y = linear_op(b);
-#elif defined(IDENTITY) /*IDENTITY*/
+#elif defined(IDENTITY) /*IDENTITY*/
data_out.x = identity_op(a);
data_out.y = identity_op(b);
#else /*LOGISTIC*/
diff --git a/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h b/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h
index e5a89a830f..e0eacf8dc4 100644
--- a/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h
+++ b/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h
@@ -97,6 +97,11 @@ float srelu_op(float x)
{
return LOG_OP(ADD_OP(CONST_ONE, EXP_OP(x)));
}
+// ELU Activation
+float elu_op(float x)
+{
+ return (x >= float(0.0)) ? x : MUL_OP(float(A_VAL), SUB_OP(EXP_OP(x), CONST_ONE));
+}
// Absolute Activation
float abs_op(float x)
{
diff --git a/src/core/NEON/kernels/NEActivationLayerKernel.cpp b/src/core/NEON/kernels/NEActivationLayerKernel.cpp
index 3953305996..242382c206 100644
--- a/src/core/NEON/kernels/NEActivationLayerKernel.cpp
+++ b/src/core/NEON/kernels/NEActivationLayerKernel.cpp
@@ -139,6 +139,7 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat
{ ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation<ActivationFunction::LU_BOUNDED_RELU, float> },
{ ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation<ActivationFunction::LEAKY_RELU, float> },
{ ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation<ActivationFunction::SOFT_RELU, float> },
+ { ActivationFunction::ELU, &NEActivationLayerKernel::activation<ActivationFunction::ELU, float> },
{ ActivationFunction::SQRT, &NEActivationLayerKernel::activation<ActivationFunction::SQRT, float> },
{ ActivationFunction::SQUARE, &NEActivationLayerKernel::activation<ActivationFunction::SQUARE, float> },
{ ActivationFunction::TANH, &NEActivationLayerKernel::activation<ActivationFunction::TANH, float> },
@@ -157,6 +158,7 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat
{ ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation<ActivationFunction::LU_BOUNDED_RELU, float16_t> },
{ ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation<ActivationFunction::LEAKY_RELU, float16_t> },
{ ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation<ActivationFunction::SOFT_RELU, float16_t> },
+ { ActivationFunction::ELU, &NEActivationLayerKernel::activation<ActivationFunction::ELU, float16_t> },
{ ActivationFunction::SQRT, &NEActivationLayerKernel::activation<ActivationFunction::SQRT, float16_t> },
{ ActivationFunction::SQUARE, &NEActivationLayerKernel::activation<ActivationFunction::SQUARE, float16_t> },
{ ActivationFunction::TANH, &NEActivationLayerKernel::activation<ActivationFunction::TANH, float16_t> },
@@ -271,6 +273,9 @@ NEActivationLayerKernel::activation(const Window &window)
case ActivationFunction::SOFT_RELU:
tmp = wrapper::vlog(wrapper::vadd(const_1, wrapper::vexpq(vin)));
break;
+ case ActivationFunction::ELU:
+ tmp = wrapper::vbsl(wrapper::vcge(vin, const_0), vin, wrapper::vmul(va, wrapper::vsub(wrapper::vexpq(vin), const_1)));
+ break;
case ActivationFunction::SQRT:
tmp = wrapper::vinv(wrapper::vinvsqrt(vin));
break;
@@ -320,6 +325,9 @@ NEActivationLayerKernel::activation(const Window &window)
case ActivationFunction::SOFT_RELU:
tmp = std::log(static_cast<T>(1) + std::exp(in));
break;
+ case ActivationFunction::ELU:
+ tmp = (in >= 0) ? in : a * (std::exp(in) - 1);
+ break;
case ActivationFunction::SQRT:
tmp = std::sqrt(in);
break;
diff --git a/src/core/Utils.cpp b/src/core/Utils.cpp
index 0c7eea84e3..d5d9d10e6b 100644
--- a/src/core/Utils.cpp
+++ b/src/core/Utils.cpp
@@ -180,6 +180,7 @@ const std::string &arm_compute::string_from_activation_func(ActivationLayerInfo:
{ ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, "LU_BRELU" },
{ ActivationLayerInfo::ActivationFunction::LEAKY_RELU, "LRELU" },
{ ActivationLayerInfo::ActivationFunction::SOFT_RELU, "SRELU" },
+ { ActivationLayerInfo::ActivationFunction::ELU, "ELU" },
{ ActivationLayerInfo::ActivationFunction::SQRT, "SQRT" },
{ ActivationLayerInfo::ActivationFunction::SQUARE, "SQUARE" },
{ ActivationLayerInfo::ActivationFunction::TANH, "TANH" },
diff --git a/tests/datasets/ActivationFunctionsDataset.h b/tests/datasets/ActivationFunctionsDataset.h
index 29fb21cec0..d8218648e0 100644
--- a/tests/datasets/ActivationFunctionsDataset.h
+++ b/tests/datasets/ActivationFunctionsDataset.h
@@ -49,6 +49,7 @@ public:
ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
ActivationLayerInfo::ActivationFunction::LEAKY_RELU,
ActivationLayerInfo::ActivationFunction::SOFT_RELU,
+ ActivationLayerInfo::ActivationFunction::ELU,
ActivationLayerInfo::ActivationFunction::SQRT,
ActivationLayerInfo::ActivationFunction::SQUARE,
ActivationLayerInfo::ActivationFunction::TANH,
diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp
index fd203ccb7e..e97f12f5a3 100644
--- a/tests/validation/CL/ActivationLayer.cpp
+++ b/tests/validation/CL/ActivationLayer.cpp
@@ -67,6 +67,7 @@ AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activ
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.00001f : epsilon);
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
+ case ActivationLayerInfo::ActivationFunction::ELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
case ActivationLayerInfo::ActivationFunction::TANH:
diff --git a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
index 7676b858f6..fdb9d1823e 100644
--- a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
+++ b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2018 ARM Limited.
+ * Copyright (c) 2017-2019 ARM Limited.
*
* SPDX-License-Identifier: MIT
*
@@ -65,6 +65,7 @@ AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activ
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.00001f : epsilon);
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
+ case ActivationLayerInfo::ActivationFunction::ELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
case ActivationLayerInfo::ActivationFunction::TANH:
diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp
index 1174a055c7..eb3a37fba7 100644
--- a/tests/validation/NEON/ActivationLayer.cpp
+++ b/tests/validation/NEON/ActivationLayer.cpp
@@ -56,6 +56,7 @@ RelativeTolerance<float> relative_tolerance(DataType data_type, ActivationLayerI
{
case ActivationLayerInfo::ActivationFunction::LOGISTIC:
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
+ case ActivationLayerInfo::ActivationFunction::ELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
case ActivationLayerInfo::ActivationFunction::TANH:
switch(data_type)
diff --git a/tests/validation/reference/ActivationLayer.h b/tests/validation/reference/ActivationLayer.h
index 5beca7c76d..2d5dfdf4af 100644
--- a/tests/validation/reference/ActivationLayer.h
+++ b/tests/validation/reference/ActivationLayer.h
@@ -66,6 +66,9 @@ inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction a
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
ret = std::log(static_cast<T>(1) + std::exp(x));
break;
+ case ActivationLayerInfo::ActivationFunction::ELU:
+ ret = (x > 0) ? x : a * (std::exp(x) - static_cast<T>(1));
+ break;
case ActivationLayerInfo::ActivationFunction::SQRT:
ret = std::sqrt(x);
break;
diff --git a/utils/TypePrinter.h b/utils/TypePrinter.h
index 69ffe9e4a6..904115360a 100644
--- a/utils/TypePrinter.h
+++ b/utils/TypePrinter.h
@@ -379,6 +379,9 @@ inline ::std::ostream &operator<<(::std::ostream &os, const ActivationLayerInfo:
case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
os << "LU_BOUNDED_RELU";
break;
+ case ActivationLayerInfo::ActivationFunction::ELU:
+ os << "ELU";
+ break;
case ActivationLayerInfo::ActivationFunction::SQUARE:
os << "SQUARE";
break;