aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorGeorgios Pinitas <georgios.pinitas@arm.com>2017-09-01 17:44:24 +0100
committerAnthony Barbier <anthony.barbier@arm.com>2018-11-02 16:35:24 +0000
commit64ebe5b392b8135ec939b63596ffb8567a3e3248 (patch)
tree9291ce93dd474eee8d2d59b7b391e62b32e56cde /tests
parenta09de0c8b2ed0f1481502d3b023375609362d9e3 (diff)
downloadComputeLibrary-64ebe5b392b8135ec939b63596ffb8567a3e3248.tar.gz
COMPMID-519: Add support for Lower and Upper Bounded RELU for CL/NEON
Change-Id: I7b16216ac59c899a33942bf17757b54535256d7a Reviewed-on: http://mpd-gerrit.cambridge.arm.com/86172 Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com> Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/TypePrinter.h3
-rw-r--r--tests/datasets/ActivationFunctionsDataset.h1
-rw-r--r--tests/validation/CPP/ActivationLayer.cpp6
3 files changed, 10 insertions, 0 deletions
diff --git a/tests/TypePrinter.h b/tests/TypePrinter.h
index c207c1d634..bbccaadc6d 100644
--- a/tests/TypePrinter.h
+++ b/tests/TypePrinter.h
@@ -254,6 +254,9 @@ inline ::std::ostream &operator<<(::std::ostream &os, const ActivationLayerInfo:
case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
os << "BOUNDED_RELU";
break;
+ case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
+ os << "LU_BOUNDED_RELU";
+ break;
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
os << "LEAKY_RELU";
break;
diff --git a/tests/datasets/ActivationFunctionsDataset.h b/tests/datasets/ActivationFunctionsDataset.h
index 3e4f408614..31323dc8be 100644
--- a/tests/datasets/ActivationFunctionsDataset.h
+++ b/tests/datasets/ActivationFunctionsDataset.h
@@ -46,6 +46,7 @@ public:
ActivationLayerInfo::ActivationFunction::LOGISTIC,
ActivationLayerInfo::ActivationFunction::RELU,
ActivationLayerInfo::ActivationFunction::BOUNDED_RELU,
+ ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
ActivationLayerInfo::ActivationFunction::LEAKY_RELU,
ActivationLayerInfo::ActivationFunction::SOFT_RELU,
ActivationLayerInfo::ActivationFunction::SQRT,
diff --git a/tests/validation/CPP/ActivationLayer.cpp b/tests/validation/CPP/ActivationLayer.cpp
index fa393be5e1..8fcacca1e2 100644
--- a/tests/validation/CPP/ActivationLayer.cpp
+++ b/tests/validation/CPP/ActivationLayer.cpp
@@ -66,6 +66,9 @@ SimpleTensor<T> activation_layer(const SimpleTensor<T> &src, ActivationLayerInfo
case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
dst[i] = std::min<T>(a, std::max(static_cast<T>(0), x));
break;
+ case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
+ dst[i] = std::min<T>(a, std::max<T>(b, x));
+ break;
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
dst[i] = (x > 0) ? x : a * x;
break;
@@ -125,6 +128,9 @@ SimpleTensor<T> activation_layer(const SimpleTensor<T> &src, ActivationLayerInfo
case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
dst[i] = min(a, max(const_0, x)).raw();
break;
+ case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
+ dst[i] = min(a, max(b, x)).raw();
+ break;
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
dst[i] = (x > const_0) ? x.raw() : mul(a, x).raw();
break;