From 199982fc01bcce10120cd6df03d11829a215c85c Mon Sep 17 00:00:00 2001 From: Milos Puzovic Date: Fri, 28 Oct 2022 00:09:32 +0100 Subject: Add threshold for floating-point SOFT_RELU activation Added missing threshold for calculating SOFT_RELU when SVE and CL implementations are used. As a result removed from the testing bounds for input values that were set to be in the interval [-40, 40]. Resolves: COMPMID-5658 Signed-off-by: Milos Puzovic Change-Id: I3d14df60125e36e4eb85aeb222f4fb0cc5741521 Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/8536 Comments-Addressed: Arm Jenkins Reviewed-by: Viet-Hoa Do Reviewed-by: Gunes Bayir Tested-by: Arm Jenkins Benchmark: Arm Jenkins --- tests/validation/reference/ActivationLayer.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'tests/validation/reference/ActivationLayer.h') diff --git a/tests/validation/reference/ActivationLayer.h b/tests/validation/reference/ActivationLayer.h index 2bf96831a6..a813ba5037 100644 --- a/tests/validation/reference/ActivationLayer.h +++ b/tests/validation/reference/ActivationLayer.h @@ -64,7 +64,7 @@ inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction a ret = (x > 0) ? x : a * x; break; case ActivationLayerInfo::ActivationFunction::SOFT_RELU: - ret = std::log(static_cast(1) + std::exp(x)); + ret = std::log(static_cast(1) + std::exp(static_cast(x))); break; case ActivationLayerInfo::ActivationFunction::ELU: ret = (x > 0) ? x : a * (std::exp(x) - static_cast(1)); -- cgit v1.2.1