From fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 22 Aug 2019 17:10:04 +0100 Subject: COMPMID-2460: Add ELU activation on NEON,CL,GLES Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Comments-Addressed: Arm Jenkins --- tests/datasets/ActivationFunctionsDataset.h | 1 + tests/validation/CL/ActivationLayer.cpp | 1 + tests/validation/GLES_COMPUTE/ActivationLayer.cpp | 3 ++- tests/validation/NEON/ActivationLayer.cpp | 1 + tests/validation/reference/ActivationLayer.h | 3 +++ 5 files changed, 8 insertions(+), 1 deletion(-) (limited to 'tests') diff --git a/tests/datasets/ActivationFunctionsDataset.h b/tests/datasets/ActivationFunctionsDataset.h index 29fb21cec0..d8218648e0 100644 --- a/tests/datasets/ActivationFunctionsDataset.h +++ b/tests/datasets/ActivationFunctionsDataset.h @@ -49,6 +49,7 @@ public: ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, ActivationLayerInfo::ActivationFunction::LEAKY_RELU, ActivationLayerInfo::ActivationFunction::SOFT_RELU, + ActivationLayerInfo::ActivationFunction::ELU, ActivationLayerInfo::ActivationFunction::SQRT, ActivationLayerInfo::ActivationFunction::SQUARE, ActivationLayerInfo::ActivationFunction::TANH, diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp index fd203ccb7e..e97f12f5a3 100644 --- a/tests/validation/CL/ActivationLayer.cpp +++ b/tests/validation/CL/ActivationLayer.cpp @@ -67,6 +67,7 @@ AbsoluteTolerance tolerance(ActivationLayerInfo::ActivationFunction activ case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: return AbsoluteTolerance(data_type == DataType::F16 ? 0.00001f : epsilon); case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: return AbsoluteTolerance(data_type == DataType::F16 ? 0.01f : 0.00001f); case ActivationLayerInfo::ActivationFunction::TANH: diff --git a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp index 7676b858f6..fdb9d1823e 100644 --- a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp +++ b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018 ARM Limited. + * Copyright (c) 2017-2019 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -65,6 +65,7 @@ AbsoluteTolerance tolerance(ActivationLayerInfo::ActivationFunction activ case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: return AbsoluteTolerance(data_type == DataType::F16 ? 0.00001f : epsilon); case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: return AbsoluteTolerance(data_type == DataType::F16 ? 0.01f : 0.00001f); case ActivationLayerInfo::ActivationFunction::TANH: diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp index 1174a055c7..eb3a37fba7 100644 --- a/tests/validation/NEON/ActivationLayer.cpp +++ b/tests/validation/NEON/ActivationLayer.cpp @@ -56,6 +56,7 @@ RelativeTolerance relative_tolerance(DataType data_type, ActivationLayerI { case ActivationLayerInfo::ActivationFunction::LOGISTIC: case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: case ActivationLayerInfo::ActivationFunction::TANH: switch(data_type) diff --git a/tests/validation/reference/ActivationLayer.h b/tests/validation/reference/ActivationLayer.h index 5beca7c76d..2d5dfdf4af 100644 --- a/tests/validation/reference/ActivationLayer.h +++ b/tests/validation/reference/ActivationLayer.h @@ -66,6 +66,9 @@ inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction a case ActivationLayerInfo::ActivationFunction::SOFT_RELU: ret = std::log(static_cast(1) + std::exp(x)); break; + case ActivationLayerInfo::ActivationFunction::ELU: + ret = (x > 0) ? x : a * (std::exp(x) - static_cast(1)); + break; case ActivationLayerInfo::ActivationFunction::SQRT: ret = std::sqrt(x); break; -- cgit v1.2.1