aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorGeorgios Pinitas <georgios.pinitas@arm.com>2017-07-12 16:12:12 +0100
committerAnthony Barbier <anthony.barbier@arm.com>2018-09-17 14:16:42 +0100
commit579c0498e161215be1a36080b0b454e5198a992a (patch)
tree1ec07b602935e7261a8a7aea900dc925e9bc35a1 /tests
parent81f0d15d6840a0ae8ef571114555a26da74c4a43 (diff)
downloadComputeLibrary-579c0498e161215be1a36080b0b454e5198a992a.tar.gz
COMPMID-417: Add Leaky RELU support for both NEON/CL.
-Adds parametrizable leaky relu (x>0) ? x : a*x. Change-Id: Ief19a435b5832a30b56f4aaaf55125787addee94 Reviewed-on: http://mpd-gerrit.cambridge.arm.com/80575 Reviewed-by: Anthony Barbier <anthony.barbier@arm.com> Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/TypePrinter.h9
-rw-r--r--tests/dataset/ActivationFunctionDataset.h5
-rw-r--r--tests/validation/TensorOperations.h18
3 files changed, 21 insertions, 11 deletions
diff --git a/tests/TypePrinter.h b/tests/TypePrinter.h
index ff9863e1fb..c4f3495761 100644
--- a/tests/TypePrinter.h
+++ b/tests/TypePrinter.h
@@ -197,9 +197,6 @@ inline ::std::ostream &operator<<(::std::ostream &os, const ActivationLayerInfo:
case ActivationLayerInfo::ActivationFunction::ABS:
os << "ABS";
break;
- case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
- os << "BOUNDED_RELU";
- break;
case ActivationLayerInfo::ActivationFunction::LINEAR:
os << "LINEAR";
break;
@@ -209,6 +206,12 @@ inline ::std::ostream &operator<<(::std::ostream &os, const ActivationLayerInfo:
case ActivationLayerInfo::ActivationFunction::RELU:
os << "RELU";
break;
+ case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
+ os << "BOUNDED_RELU";
+ break;
+ case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
+ os << "LEAKY_RELU";
+ break;
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
os << "SOFT_RELU";
break;
diff --git a/tests/dataset/ActivationFunctionDataset.h b/tests/dataset/ActivationFunctionDataset.h
index bc0e011bde..e6c196560b 100644
--- a/tests/dataset/ActivationFunctionDataset.h
+++ b/tests/dataset/ActivationFunctionDataset.h
@@ -40,17 +40,18 @@ namespace test
* Can be used as input for Boost data test cases to automatically run a test
* case on all activation functions.
*/
-class ActivationFunctions final : public GenericDataset<ActivationLayerInfo::ActivationFunction, 9>
+class ActivationFunctions final : public GenericDataset<ActivationLayerInfo::ActivationFunction, 10>
{
public:
ActivationFunctions()
: GenericDataset
{
ActivationLayerInfo::ActivationFunction::ABS,
- ActivationLayerInfo::ActivationFunction::BOUNDED_RELU,
ActivationLayerInfo::ActivationFunction::LINEAR,
ActivationLayerInfo::ActivationFunction::LOGISTIC,
ActivationLayerInfo::ActivationFunction::RELU,
+ ActivationLayerInfo::ActivationFunction::BOUNDED_RELU,
+ ActivationLayerInfo::ActivationFunction::LEAKY_RELU,
ActivationLayerInfo::ActivationFunction::SOFT_RELU,
ActivationLayerInfo::ActivationFunction::SQRT,
ActivationLayerInfo::ActivationFunction::SQUARE,
diff --git a/tests/validation/TensorOperations.h b/tests/validation/TensorOperations.h
index e2747249b4..27c50cf6d2 100644
--- a/tests/validation/TensorOperations.h
+++ b/tests/validation/TensorOperations.h
@@ -868,9 +868,6 @@ void activation_layer(const Tensor<T> &in, Tensor<T> &out, ActivationLayerInfo a
case ActivationLayerInfo::ActivationFunction::ABS:
out[i] = std::abs(x);
break;
- case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
- out[i] = std::min<T>(a, std::max<T>(0, x));
- break;
case ActivationLayerInfo::ActivationFunction::LINEAR:
out[i] = a * x + b;
break;
@@ -880,6 +877,12 @@ void activation_layer(const Tensor<T> &in, Tensor<T> &out, ActivationLayerInfo a
case ActivationLayerInfo::ActivationFunction::RELU:
out[i] = std::max<T>(0, x);
break;
+ case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
+ out[i] = std::min<T>(a, std::max<T>(0, x));
+ break;
+ case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
+ out[i] = (x > 0) ? x : a * x;
+ break;
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
out[i] = std::log(static_cast<T>(1) + std::exp(x));
break;
@@ -919,9 +922,6 @@ void activation_layer(const Tensor<T> &in, Tensor<T> &out, ActivationLayerInfo a
case ActivationLayerInfo::ActivationFunction::ABS:
out[i] = abs(x).raw();
break;
- case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
- out[i] = min(a, max(const_0, x)).raw();
- break;
case ActivationLayerInfo::ActivationFunction::LINEAR:
out[i] = add(b, mul(a, x)).raw();
break;
@@ -931,6 +931,12 @@ void activation_layer(const Tensor<T> &in, Tensor<T> &out, ActivationLayerInfo a
case ActivationLayerInfo::ActivationFunction::RELU:
out[i] = max(const_0, x).raw();
break;
+ case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
+ out[i] = min(a, max(const_0, x)).raw();
+ break;
+ case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
+ out[i] = (x > const_0) ? x.raw() : mul(a, x).raw();
+ break;
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
out[i] = log(const_1 + exp(x)).raw();
break;