From 64ebe5b392b8135ec939b63596ffb8567a3e3248 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Fri, 1 Sep 2017 17:44:24 +0100 Subject: COMPMID-519: Add support for Lower and Upper Bounded RELU for CL/NEON Change-Id: I7b16216ac59c899a33942bf17757b54535256d7a Reviewed-on: http://mpd-gerrit.cambridge.arm.com/86172 Tested-by: Kaizen Reviewed-by: Anthony Barbier --- tests/validation/CPP/ActivationLayer.cpp | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'tests/validation/CPP') diff --git a/tests/validation/CPP/ActivationLayer.cpp b/tests/validation/CPP/ActivationLayer.cpp index fa393be5e1..8fcacca1e2 100644 --- a/tests/validation/CPP/ActivationLayer.cpp +++ b/tests/validation/CPP/ActivationLayer.cpp @@ -66,6 +66,9 @@ SimpleTensor activation_layer(const SimpleTensor &src, ActivationLayerInfo case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU: dst[i] = std::min(a, std::max(static_cast(0), x)); break; + case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU: + dst[i] = std::min(a, std::max(b, x)); + break; case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: dst[i] = (x > 0) ? x : a * x; break; @@ -125,6 +128,9 @@ SimpleTensor activation_layer(const SimpleTensor &src, ActivationLayerInfo case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU: dst[i] = min(a, max(const_0, x)).raw(); break; + case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU: + dst[i] = min(a, max(b, x)).raw(); + break; case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: dst[i] = (x > const_0) ? x.raw() : mul(a, x).raw(); break; -- cgit v1.2.1