aboutsummaryrefslogtreecommitdiff
path: root/src/core/NEON/kernels
diff options
context:
space:
mode:
authorGeorgios Pinitas <georgios.pinitas@arm.com>2019-08-22 17:10:04 +0100
committerGeorgios Pinitas <georgios.pinitas@arm.com>2019-08-23 11:03:19 +0000
commitfb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 (patch)
tree99dc659420ea76dc4fbf6724e1140467249ad105 /src/core/NEON/kernels
parentc8530210c17b391f27ace95523e9590e8166fcd8 (diff)
downloadComputeLibrary-fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94.tar.gz
COMPMID-2460: Add ELU activation on NEON,CL,GLES
Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas <georgios.pinitas@arm.com> Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'src/core/NEON/kernels')
-rw-r--r--src/core/NEON/kernels/NEActivationLayerKernel.cpp8
1 files changed, 8 insertions, 0 deletions
diff --git a/src/core/NEON/kernels/NEActivationLayerKernel.cpp b/src/core/NEON/kernels/NEActivationLayerKernel.cpp
index 3953305996..242382c206 100644
--- a/src/core/NEON/kernels/NEActivationLayerKernel.cpp
+++ b/src/core/NEON/kernels/NEActivationLayerKernel.cpp
@@ -139,6 +139,7 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat
{ ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation<ActivationFunction::LU_BOUNDED_RELU, float> },
{ ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation<ActivationFunction::LEAKY_RELU, float> },
{ ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation<ActivationFunction::SOFT_RELU, float> },
+ { ActivationFunction::ELU, &NEActivationLayerKernel::activation<ActivationFunction::ELU, float> },
{ ActivationFunction::SQRT, &NEActivationLayerKernel::activation<ActivationFunction::SQRT, float> },
{ ActivationFunction::SQUARE, &NEActivationLayerKernel::activation<ActivationFunction::SQUARE, float> },
{ ActivationFunction::TANH, &NEActivationLayerKernel::activation<ActivationFunction::TANH, float> },
@@ -157,6 +158,7 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat
{ ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation<ActivationFunction::LU_BOUNDED_RELU, float16_t> },
{ ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation<ActivationFunction::LEAKY_RELU, float16_t> },
{ ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation<ActivationFunction::SOFT_RELU, float16_t> },
+ { ActivationFunction::ELU, &NEActivationLayerKernel::activation<ActivationFunction::ELU, float16_t> },
{ ActivationFunction::SQRT, &NEActivationLayerKernel::activation<ActivationFunction::SQRT, float16_t> },
{ ActivationFunction::SQUARE, &NEActivationLayerKernel::activation<ActivationFunction::SQUARE, float16_t> },
{ ActivationFunction::TANH, &NEActivationLayerKernel::activation<ActivationFunction::TANH, float16_t> },
@@ -271,6 +273,9 @@ NEActivationLayerKernel::activation(const Window &window)
case ActivationFunction::SOFT_RELU:
tmp = wrapper::vlog(wrapper::vadd(const_1, wrapper::vexpq(vin)));
break;
+ case ActivationFunction::ELU:
+ tmp = wrapper::vbsl(wrapper::vcge(vin, const_0), vin, wrapper::vmul(va, wrapper::vsub(wrapper::vexpq(vin), const_1)));
+ break;
case ActivationFunction::SQRT:
tmp = wrapper::vinv(wrapper::vinvsqrt(vin));
break;
@@ -320,6 +325,9 @@ NEActivationLayerKernel::activation(const Window &window)
case ActivationFunction::SOFT_RELU:
tmp = std::log(static_cast<T>(1) + std::exp(in));
break;
+ case ActivationFunction::ELU:
+ tmp = (in >= 0) ? in : a * (std::exp(in) - 1);
+ break;
case ActivationFunction::SQRT:
tmp = std::sqrt(in);
break;