From fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 22 Aug 2019 17:10:04 +0100 Subject: COMPMID-2460: Add ELU activation on NEON,CL,GLES Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Comments-Addressed: Arm Jenkins --- src/core/CL/cl_kernels/activation_float_helpers.h | 3 +++ 1 file changed, 3 insertions(+) (limited to 'src/core/CL/cl_kernels/activation_float_helpers.h') diff --git a/src/core/CL/cl_kernels/activation_float_helpers.h b/src/core/CL/cl_kernels/activation_float_helpers.h index 2efd2699d7..0c82f83dbc 100644 --- a/src/core/CL/cl_kernels/activation_float_helpers.h +++ b/src/core/CL/cl_kernels/activation_float_helpers.h @@ -50,6 +50,9 @@ // Soft RELU Activation #define srelu_op(DATA_TYPE, x, A_VAL, B_VAL) (log((DATA_TYPE)1.0 + exp(x))) +// ELU Activation +#define elu_op(DATA_TYPE, x, A_VAL, B_VAL) (select(((DATA_TYPE)A_VAL * (exp(x) - (DATA_TYPE)1.0)), x, isgreaterequal(x, (DATA_TYPE)0.0))) + // Absolute Activation #define abs_op(DATA_TYPE, x, A_VAL, B_VAL) (fabs(x)) -- cgit v1.2.1