From 579c0498e161215be1a36080b0b454e5198a992a Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Wed, 12 Jul 2017 16:12:12 +0100 Subject: COMPMID-417: Add Leaky RELU support for both NEON/CL. -Adds parametrizable leaky relu (x>0) ? x : a*x. Change-Id: Ief19a435b5832a30b56f4aaaf55125787addee94 Reviewed-on: http://mpd-gerrit.cambridge.arm.com/80575 Reviewed-by: Anthony Barbier Tested-by: Kaizen --- tests/TypePrinter.h | 9 ++++++--- tests/dataset/ActivationFunctionDataset.h | 5 +++-- tests/validation/TensorOperations.h | 18 ++++++++++++------ 3 files changed, 21 insertions(+), 11 deletions(-) (limited to 'tests') diff --git a/tests/TypePrinter.h b/tests/TypePrinter.h index ff9863e1fb..c4f3495761 100644 --- a/tests/TypePrinter.h +++ b/tests/TypePrinter.h @@ -197,9 +197,6 @@ inline ::std::ostream &operator<<(::std::ostream &os, const ActivationLayerInfo: case ActivationLayerInfo::ActivationFunction::ABS: os << "ABS"; break; - case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU: - os << "BOUNDED_RELU"; - break; case ActivationLayerInfo::ActivationFunction::LINEAR: os << "LINEAR"; break; @@ -209,6 +206,12 @@ inline ::std::ostream &operator<<(::std::ostream &os, const ActivationLayerInfo: case ActivationLayerInfo::ActivationFunction::RELU: os << "RELU"; break; + case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU: + os << "BOUNDED_RELU"; + break; + case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: + os << "LEAKY_RELU"; + break; case ActivationLayerInfo::ActivationFunction::SOFT_RELU: os << "SOFT_RELU"; break; diff --git a/tests/dataset/ActivationFunctionDataset.h b/tests/dataset/ActivationFunctionDataset.h index bc0e011bde..e6c196560b 100644 --- a/tests/dataset/ActivationFunctionDataset.h +++ b/tests/dataset/ActivationFunctionDataset.h @@ -40,17 +40,18 @@ namespace test * Can be used as input for Boost data test cases to automatically run a test * case on all activation functions. */ -class ActivationFunctions final : public GenericDataset +class ActivationFunctions final : public GenericDataset { public: ActivationFunctions() : GenericDataset { ActivationLayerInfo::ActivationFunction::ABS, - ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, ActivationLayerInfo::ActivationFunction::LINEAR, ActivationLayerInfo::ActivationFunction::LOGISTIC, ActivationLayerInfo::ActivationFunction::RELU, + ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, + ActivationLayerInfo::ActivationFunction::LEAKY_RELU, ActivationLayerInfo::ActivationFunction::SOFT_RELU, ActivationLayerInfo::ActivationFunction::SQRT, ActivationLayerInfo::ActivationFunction::SQUARE, diff --git a/tests/validation/TensorOperations.h b/tests/validation/TensorOperations.h index e2747249b4..27c50cf6d2 100644 --- a/tests/validation/TensorOperations.h +++ b/tests/validation/TensorOperations.h @@ -868,9 +868,6 @@ void activation_layer(const Tensor &in, Tensor &out, ActivationLayerInfo a case ActivationLayerInfo::ActivationFunction::ABS: out[i] = std::abs(x); break; - case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU: - out[i] = std::min(a, std::max(0, x)); - break; case ActivationLayerInfo::ActivationFunction::LINEAR: out[i] = a * x + b; break; @@ -880,6 +877,12 @@ void activation_layer(const Tensor &in, Tensor &out, ActivationLayerInfo a case ActivationLayerInfo::ActivationFunction::RELU: out[i] = std::max(0, x); break; + case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU: + out[i] = std::min(a, std::max(0, x)); + break; + case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: + out[i] = (x > 0) ? x : a * x; + break; case ActivationLayerInfo::ActivationFunction::SOFT_RELU: out[i] = std::log(static_cast(1) + std::exp(x)); break; @@ -919,9 +922,6 @@ void activation_layer(const Tensor &in, Tensor &out, ActivationLayerInfo a case ActivationLayerInfo::ActivationFunction::ABS: out[i] = abs(x).raw(); break; - case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU: - out[i] = min(a, max(const_0, x)).raw(); - break; case ActivationLayerInfo::ActivationFunction::LINEAR: out[i] = add(b, mul(a, x)).raw(); break; @@ -931,6 +931,12 @@ void activation_layer(const Tensor &in, Tensor &out, ActivationLayerInfo a case ActivationLayerInfo::ActivationFunction::RELU: out[i] = max(const_0, x).raw(); break; + case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU: + out[i] = min(a, max(const_0, x)).raw(); + break; + case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: + out[i] = (x > const_0) ? x.raw() : mul(a, x).raw(); + break; case ActivationLayerInfo::ActivationFunction::SOFT_RELU: out[i] = log(const_1 + exp(x)).raw(); break; -- cgit v1.2.1