From fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 22 Aug 2019 17:10:04 +0100 Subject: COMPMID-2460: Add ELU activation on NEON,CL,GLES Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Comments-Addressed: Arm Jenkins --- tests/validation/reference/ActivationLayer.h | 3 +++ 1 file changed, 3 insertions(+) (limited to 'tests/validation/reference/ActivationLayer.h') diff --git a/tests/validation/reference/ActivationLayer.h b/tests/validation/reference/ActivationLayer.h index 5beca7c76d..2d5dfdf4af 100644 --- a/tests/validation/reference/ActivationLayer.h +++ b/tests/validation/reference/ActivationLayer.h @@ -66,6 +66,9 @@ inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction a case ActivationLayerInfo::ActivationFunction::SOFT_RELU: ret = std::log(static_cast(1) + std::exp(x)); break; + case ActivationLayerInfo::ActivationFunction::ELU: + ret = (x > 0) ? x : a * (std::exp(x) - static_cast(1)); + break; case ActivationLayerInfo::ActivationFunction::SQRT: ret = std::sqrt(x); break; -- cgit v1.2.1