From fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 22 Aug 2019 17:10:04 +0100 Subject: COMPMID-2460: Add ELU activation on NEON,CL,GLES Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Comments-Addressed: Arm Jenkins --- arm_compute/core/Types.h | 1 + src/core/CL/cl_kernels/activation_float_helpers.h | 3 +++ src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs | 7 ++++++- src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h | 5 +++++ src/core/NEON/kernels/NEActivationLayerKernel.cpp | 8 ++++++++ src/core/Utils.cpp | 1 + tests/datasets/ActivationFunctionsDataset.h | 1 + tests/validation/CL/ActivationLayer.cpp | 1 + tests/validation/GLES_COMPUTE/ActivationLayer.cpp | 3 ++- tests/validation/NEON/ActivationLayer.cpp | 1 + tests/validation/reference/ActivationLayer.h | 3 +++ utils/TypePrinter.h | 3 +++ 12 files changed, 35 insertions(+), 2 deletions(-) diff --git a/arm_compute/core/Types.h b/arm_compute/core/Types.h index aa07067855..59e39958b6 100644 --- a/arm_compute/core/Types.h +++ b/arm_compute/core/Types.h @@ -1528,6 +1528,7 @@ public: LU_BOUNDED_RELU, /**< Lower and Upper Bounded Rectifier ( \f$ f(x) = min(a, max(b,x)) \f$ ) */ LEAKY_RELU, /**< Leaky Rectifier ( \f$ f(x) = \begin{cases} \alpha x & \quad \text{if } x \text{ < 0}\\ x & \quad \text{if } x \geq \text{ 0 } \end{cases} \f$ ) */ SOFT_RELU, /**< Soft Rectifier ( \f$ f(x)= log(1+e^x) \f$ ) */ + ELU, /**< Exponential Linear Unit ( \f$ f(x) = \begin{cases} \alpha (exp(x) - 1) & \quad \text{if } x \text{ < 0}\\ x & \quad \text{if } x \geq \text{ 0 } \end{cases} \f$ ) */ ABS, /**< Absolute ( \f$ f(x)= |x| \f$ ) */ SQUARE, /**< Square ( \f$ f(x)= x^2 \f$ )*/ SQRT, /**< Square root ( \f$ f(x) = \sqrt{x} \f$ )*/ diff --git a/src/core/CL/cl_kernels/activation_float_helpers.h b/src/core/CL/cl_kernels/activation_float_helpers.h index 2efd2699d7..0c82f83dbc 100644 --- a/src/core/CL/cl_kernels/activation_float_helpers.h +++ b/src/core/CL/cl_kernels/activation_float_helpers.h @@ -50,6 +50,9 @@ // Soft RELU Activation #define srelu_op(DATA_TYPE, x, A_VAL, B_VAL) (log((DATA_TYPE)1.0 + exp(x))) +// ELU Activation +#define elu_op(DATA_TYPE, x, A_VAL, B_VAL) (select(((DATA_TYPE)A_VAL * (exp(x) - (DATA_TYPE)1.0)), x, isgreaterequal(x, (DATA_TYPE)0.0))) + // Absolute Activation #define abs_op(DATA_TYPE, x, A_VAL, B_VAL) (fabs(x)) diff --git a/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs b/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs index dd97c1501b..e5411de985 100644 --- a/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs +++ b/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs @@ -69,6 +69,8 @@ void main(void) data_out = lrelu_op(data); #elif defined(SRELU) /*SRELU*/ data_out = srelu_op(data); +#elif defined(ELU) /*ELU*/ + data_out = elu_op(data); #elif defined(ABS) /*ABS*/ data_out = abs_op(data); #elif defined(SQUARE) /*SQUARE*/ @@ -121,6 +123,9 @@ void main(void) #elif defined(SRELU) /*SRELU*/ data_out.x = srelu_op(a); data_out.y = srelu_op(b); +#elif defined(ELU) /*ELU*/ + data_out.x = elu_op(a); + data_out.y = elu_op(b); #elif defined(ABS) /*ABS*/ data_out.x = abs_op(a); data_out.y = abs_op(b); @@ -133,7 +138,7 @@ void main(void) #elif defined(LINEAR) /*LINEAR*/ data_out.x = linear_op(a); data_out.y = linear_op(b); -#elif defined(IDENTITY) /*IDENTITY*/ +#elif defined(IDENTITY) /*IDENTITY*/ data_out.x = identity_op(a); data_out.y = identity_op(b); #else /*LOGISTIC*/ diff --git a/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h b/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h index e5a89a830f..e0eacf8dc4 100644 --- a/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h +++ b/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h @@ -97,6 +97,11 @@ float srelu_op(float x) { return LOG_OP(ADD_OP(CONST_ONE, EXP_OP(x))); } +// ELU Activation +float elu_op(float x) +{ + return (x >= float(0.0)) ? x : MUL_OP(float(A_VAL), SUB_OP(EXP_OP(x), CONST_ONE)); +} // Absolute Activation float abs_op(float x) { diff --git a/src/core/NEON/kernels/NEActivationLayerKernel.cpp b/src/core/NEON/kernels/NEActivationLayerKernel.cpp index 3953305996..242382c206 100644 --- a/src/core/NEON/kernels/NEActivationLayerKernel.cpp +++ b/src/core/NEON/kernels/NEActivationLayerKernel.cpp @@ -139,6 +139,7 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat { ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation }, { ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation }, { ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation }, + { ActivationFunction::ELU, &NEActivationLayerKernel::activation }, { ActivationFunction::SQRT, &NEActivationLayerKernel::activation }, { ActivationFunction::SQUARE, &NEActivationLayerKernel::activation }, { ActivationFunction::TANH, &NEActivationLayerKernel::activation }, @@ -157,6 +158,7 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat { ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation }, { ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation }, { ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation }, + { ActivationFunction::ELU, &NEActivationLayerKernel::activation }, { ActivationFunction::SQRT, &NEActivationLayerKernel::activation }, { ActivationFunction::SQUARE, &NEActivationLayerKernel::activation }, { ActivationFunction::TANH, &NEActivationLayerKernel::activation }, @@ -271,6 +273,9 @@ NEActivationLayerKernel::activation(const Window &window) case ActivationFunction::SOFT_RELU: tmp = wrapper::vlog(wrapper::vadd(const_1, wrapper::vexpq(vin))); break; + case ActivationFunction::ELU: + tmp = wrapper::vbsl(wrapper::vcge(vin, const_0), vin, wrapper::vmul(va, wrapper::vsub(wrapper::vexpq(vin), const_1))); + break; case ActivationFunction::SQRT: tmp = wrapper::vinv(wrapper::vinvsqrt(vin)); break; @@ -320,6 +325,9 @@ NEActivationLayerKernel::activation(const Window &window) case ActivationFunction::SOFT_RELU: tmp = std::log(static_cast(1) + std::exp(in)); break; + case ActivationFunction::ELU: + tmp = (in >= 0) ? in : a * (std::exp(in) - 1); + break; case ActivationFunction::SQRT: tmp = std::sqrt(in); break; diff --git a/src/core/Utils.cpp b/src/core/Utils.cpp index 0c7eea84e3..d5d9d10e6b 100644 --- a/src/core/Utils.cpp +++ b/src/core/Utils.cpp @@ -180,6 +180,7 @@ const std::string &arm_compute::string_from_activation_func(ActivationLayerInfo: { ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, "LU_BRELU" }, { ActivationLayerInfo::ActivationFunction::LEAKY_RELU, "LRELU" }, { ActivationLayerInfo::ActivationFunction::SOFT_RELU, "SRELU" }, + { ActivationLayerInfo::ActivationFunction::ELU, "ELU" }, { ActivationLayerInfo::ActivationFunction::SQRT, "SQRT" }, { ActivationLayerInfo::ActivationFunction::SQUARE, "SQUARE" }, { ActivationLayerInfo::ActivationFunction::TANH, "TANH" }, diff --git a/tests/datasets/ActivationFunctionsDataset.h b/tests/datasets/ActivationFunctionsDataset.h index 29fb21cec0..d8218648e0 100644 --- a/tests/datasets/ActivationFunctionsDataset.h +++ b/tests/datasets/ActivationFunctionsDataset.h @@ -49,6 +49,7 @@ public: ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, ActivationLayerInfo::ActivationFunction::LEAKY_RELU, ActivationLayerInfo::ActivationFunction::SOFT_RELU, + ActivationLayerInfo::ActivationFunction::ELU, ActivationLayerInfo::ActivationFunction::SQRT, ActivationLayerInfo::ActivationFunction::SQUARE, ActivationLayerInfo::ActivationFunction::TANH, diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp index fd203ccb7e..e97f12f5a3 100644 --- a/tests/validation/CL/ActivationLayer.cpp +++ b/tests/validation/CL/ActivationLayer.cpp @@ -67,6 +67,7 @@ AbsoluteTolerance tolerance(ActivationLayerInfo::ActivationFunction activ case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: return AbsoluteTolerance(data_type == DataType::F16 ? 0.00001f : epsilon); case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: return AbsoluteTolerance(data_type == DataType::F16 ? 0.01f : 0.00001f); case ActivationLayerInfo::ActivationFunction::TANH: diff --git a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp index 7676b858f6..fdb9d1823e 100644 --- a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp +++ b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018 ARM Limited. + * Copyright (c) 2017-2019 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -65,6 +65,7 @@ AbsoluteTolerance tolerance(ActivationLayerInfo::ActivationFunction activ case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: return AbsoluteTolerance(data_type == DataType::F16 ? 0.00001f : epsilon); case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: return AbsoluteTolerance(data_type == DataType::F16 ? 0.01f : 0.00001f); case ActivationLayerInfo::ActivationFunction::TANH: diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp index 1174a055c7..eb3a37fba7 100644 --- a/tests/validation/NEON/ActivationLayer.cpp +++ b/tests/validation/NEON/ActivationLayer.cpp @@ -56,6 +56,7 @@ RelativeTolerance relative_tolerance(DataType data_type, ActivationLayerI { case ActivationLayerInfo::ActivationFunction::LOGISTIC: case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: case ActivationLayerInfo::ActivationFunction::TANH: switch(data_type) diff --git a/tests/validation/reference/ActivationLayer.h b/tests/validation/reference/ActivationLayer.h index 5beca7c76d..2d5dfdf4af 100644 --- a/tests/validation/reference/ActivationLayer.h +++ b/tests/validation/reference/ActivationLayer.h @@ -66,6 +66,9 @@ inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction a case ActivationLayerInfo::ActivationFunction::SOFT_RELU: ret = std::log(static_cast(1) + std::exp(x)); break; + case ActivationLayerInfo::ActivationFunction::ELU: + ret = (x > 0) ? x : a * (std::exp(x) - static_cast(1)); + break; case ActivationLayerInfo::ActivationFunction::SQRT: ret = std::sqrt(x); break; diff --git a/utils/TypePrinter.h b/utils/TypePrinter.h index 69ffe9e4a6..904115360a 100644 --- a/utils/TypePrinter.h +++ b/utils/TypePrinter.h @@ -379,6 +379,9 @@ inline ::std::ostream &operator<<(::std::ostream &os, const ActivationLayerInfo: case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU: os << "LU_BOUNDED_RELU"; break; + case ActivationLayerInfo::ActivationFunction::ELU: + os << "ELU"; + break; case ActivationLayerInfo::ActivationFunction::SQUARE: os << "SQUARE"; break; -- cgit v1.2.1