aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorGeorgios Pinitas <georgios.pinitas@arm.com>2019-08-22 17:10:04 +0100
committerGeorgios Pinitas <georgios.pinitas@arm.com>2019-08-23 11:03:19 +0000
commitfb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 (patch)
tree99dc659420ea76dc4fbf6724e1140467249ad105 /tests
parentc8530210c17b391f27ace95523e9590e8166fcd8 (diff)
downloadComputeLibrary-fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94.tar.gz
COMPMID-2460: Add ELU activation on NEON,CL,GLES
Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas <georgios.pinitas@arm.com> Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/datasets/ActivationFunctionsDataset.h1
-rw-r--r--tests/validation/CL/ActivationLayer.cpp1
-rw-r--r--tests/validation/GLES_COMPUTE/ActivationLayer.cpp3
-rw-r--r--tests/validation/NEON/ActivationLayer.cpp1
-rw-r--r--tests/validation/reference/ActivationLayer.h3
5 files changed, 8 insertions, 1 deletions
diff --git a/tests/datasets/ActivationFunctionsDataset.h b/tests/datasets/ActivationFunctionsDataset.h
index 29fb21cec0..d8218648e0 100644
--- a/tests/datasets/ActivationFunctionsDataset.h
+++ b/tests/datasets/ActivationFunctionsDataset.h
@@ -49,6 +49,7 @@ public:
ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
ActivationLayerInfo::ActivationFunction::LEAKY_RELU,
ActivationLayerInfo::ActivationFunction::SOFT_RELU,
+ ActivationLayerInfo::ActivationFunction::ELU,
ActivationLayerInfo::ActivationFunction::SQRT,
ActivationLayerInfo::ActivationFunction::SQUARE,
ActivationLayerInfo::ActivationFunction::TANH,
diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp
index fd203ccb7e..e97f12f5a3 100644
--- a/tests/validation/CL/ActivationLayer.cpp
+++ b/tests/validation/CL/ActivationLayer.cpp
@@ -67,6 +67,7 @@ AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activ
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.00001f : epsilon);
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
+ case ActivationLayerInfo::ActivationFunction::ELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
case ActivationLayerInfo::ActivationFunction::TANH:
diff --git a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
index 7676b858f6..fdb9d1823e 100644
--- a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
+++ b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2018 ARM Limited.
+ * Copyright (c) 2017-2019 ARM Limited.
*
* SPDX-License-Identifier: MIT
*
@@ -65,6 +65,7 @@ AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activ
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.00001f : epsilon);
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
+ case ActivationLayerInfo::ActivationFunction::ELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
case ActivationLayerInfo::ActivationFunction::TANH:
diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp
index 1174a055c7..eb3a37fba7 100644
--- a/tests/validation/NEON/ActivationLayer.cpp
+++ b/tests/validation/NEON/ActivationLayer.cpp
@@ -56,6 +56,7 @@ RelativeTolerance<float> relative_tolerance(DataType data_type, ActivationLayerI
{
case ActivationLayerInfo::ActivationFunction::LOGISTIC:
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
+ case ActivationLayerInfo::ActivationFunction::ELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
case ActivationLayerInfo::ActivationFunction::TANH:
switch(data_type)
diff --git a/tests/validation/reference/ActivationLayer.h b/tests/validation/reference/ActivationLayer.h
index 5beca7c76d..2d5dfdf4af 100644
--- a/tests/validation/reference/ActivationLayer.h
+++ b/tests/validation/reference/ActivationLayer.h
@@ -66,6 +66,9 @@ inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction a
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
ret = std::log(static_cast<T>(1) + std::exp(x));
break;
+ case ActivationLayerInfo::ActivationFunction::ELU:
+ ret = (x > 0) ? x : a * (std::exp(x) - static_cast<T>(1));
+ break;
case ActivationLayerInfo::ActivationFunction::SQRT:
ret = std::sqrt(x);
break;