From fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 22 Aug 2019 17:10:04 +0100 Subject: COMPMID-2460: Add ELU activation on NEON,CL,GLES Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Comments-Addressed: Arm Jenkins --- src/core/CL/cl_kernels/activation_float_helpers.h | 3 +++ src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs | 7 ++++++- src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h | 5 +++++ src/core/NEON/kernels/NEActivationLayerKernel.cpp | 8 ++++++++ src/core/Utils.cpp | 1 + 5 files changed, 23 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/core/CL/cl_kernels/activation_float_helpers.h b/src/core/CL/cl_kernels/activation_float_helpers.h index 2efd2699d7..0c82f83dbc 100644 --- a/src/core/CL/cl_kernels/activation_float_helpers.h +++ b/src/core/CL/cl_kernels/activation_float_helpers.h @@ -50,6 +50,9 @@ // Soft RELU Activation #define srelu_op(DATA_TYPE, x, A_VAL, B_VAL) (log((DATA_TYPE)1.0 + exp(x))) +// ELU Activation +#define elu_op(DATA_TYPE, x, A_VAL, B_VAL) (select(((DATA_TYPE)A_VAL * (exp(x) - (DATA_TYPE)1.0)), x, isgreaterequal(x, (DATA_TYPE)0.0))) + // Absolute Activation #define abs_op(DATA_TYPE, x, A_VAL, B_VAL) (fabs(x)) diff --git a/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs b/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs index dd97c1501b..e5411de985 100644 --- a/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs +++ b/src/core/GLES_COMPUTE/cs_shaders/activation_layer.cs @@ -69,6 +69,8 @@ void main(void) data_out = lrelu_op(data); #elif defined(SRELU) /*SRELU*/ data_out = srelu_op(data); +#elif defined(ELU) /*ELU*/ + data_out = elu_op(data); #elif defined(ABS) /*ABS*/ data_out = abs_op(data); #elif defined(SQUARE) /*SQUARE*/ @@ -121,6 +123,9 @@ void main(void) #elif defined(SRELU) /*SRELU*/ data_out.x = srelu_op(a); data_out.y = srelu_op(b); +#elif defined(ELU) /*ELU*/ + data_out.x = elu_op(a); + data_out.y = elu_op(b); #elif defined(ABS) /*ABS*/ data_out.x = abs_op(a); data_out.y = abs_op(b); @@ -133,7 +138,7 @@ void main(void) #elif defined(LINEAR) /*LINEAR*/ data_out.x = linear_op(a); data_out.y = linear_op(b); -#elif defined(IDENTITY) /*IDENTITY*/ +#elif defined(IDENTITY) /*IDENTITY*/ data_out.x = identity_op(a); data_out.y = identity_op(b); #else /*LOGISTIC*/ diff --git a/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h b/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h index e5a89a830f..e0eacf8dc4 100644 --- a/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h +++ b/src/core/GLES_COMPUTE/cs_shaders/activation_layer_helpers_cs.h @@ -97,6 +97,11 @@ float srelu_op(float x) { return LOG_OP(ADD_OP(CONST_ONE, EXP_OP(x))); } +// ELU Activation +float elu_op(float x) +{ + return (x >= float(0.0)) ? x : MUL_OP(float(A_VAL), SUB_OP(EXP_OP(x), CONST_ONE)); +} // Absolute Activation float abs_op(float x) { diff --git a/src/core/NEON/kernels/NEActivationLayerKernel.cpp b/src/core/NEON/kernels/NEActivationLayerKernel.cpp index 3953305996..242382c206 100644 --- a/src/core/NEON/kernels/NEActivationLayerKernel.cpp +++ b/src/core/NEON/kernels/NEActivationLayerKernel.cpp @@ -139,6 +139,7 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat { ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation }, { ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation }, { ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation }, + { ActivationFunction::ELU, &NEActivationLayerKernel::activation }, { ActivationFunction::SQRT, &NEActivationLayerKernel::activation }, { ActivationFunction::SQUARE, &NEActivationLayerKernel::activation }, { ActivationFunction::TANH, &NEActivationLayerKernel::activation }, @@ -157,6 +158,7 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat { ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation }, { ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation }, { ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation }, + { ActivationFunction::ELU, &NEActivationLayerKernel::activation }, { ActivationFunction::SQRT, &NEActivationLayerKernel::activation }, { ActivationFunction::SQUARE, &NEActivationLayerKernel::activation }, { ActivationFunction::TANH, &NEActivationLayerKernel::activation }, @@ -271,6 +273,9 @@ NEActivationLayerKernel::activation(const Window &window) case ActivationFunction::SOFT_RELU: tmp = wrapper::vlog(wrapper::vadd(const_1, wrapper::vexpq(vin))); break; + case ActivationFunction::ELU: + tmp = wrapper::vbsl(wrapper::vcge(vin, const_0), vin, wrapper::vmul(va, wrapper::vsub(wrapper::vexpq(vin), const_1))); + break; case ActivationFunction::SQRT: tmp = wrapper::vinv(wrapper::vinvsqrt(vin)); break; @@ -320,6 +325,9 @@ NEActivationLayerKernel::activation(const Window &window) case ActivationFunction::SOFT_RELU: tmp = std::log(static_cast(1) + std::exp(in)); break; + case ActivationFunction::ELU: + tmp = (in >= 0) ? in : a * (std::exp(in) - 1); + break; case ActivationFunction::SQRT: tmp = std::sqrt(in); break; diff --git a/src/core/Utils.cpp b/src/core/Utils.cpp index 0c7eea84e3..d5d9d10e6b 100644 --- a/src/core/Utils.cpp +++ b/src/core/Utils.cpp @@ -180,6 +180,7 @@ const std::string &arm_compute::string_from_activation_func(ActivationLayerInfo: { ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, "LU_BRELU" }, { ActivationLayerInfo::ActivationFunction::LEAKY_RELU, "LRELU" }, { ActivationLayerInfo::ActivationFunction::SOFT_RELU, "SRELU" }, + { ActivationLayerInfo::ActivationFunction::ELU, "ELU" }, { ActivationLayerInfo::ActivationFunction::SQRT, "SQRT" }, { ActivationLayerInfo::ActivationFunction::SQUARE, "SQUARE" }, { ActivationLayerInfo::ActivationFunction::TANH, "TANH" }, -- cgit v1.2.1