From 01934e9953bdc0f3b931e6719241a5e415a4f2a1 Mon Sep 17 00:00:00 2001 From: Gunes Bayir Date: Wed, 2 Nov 2022 11:50:37 +0000 Subject: Partially Revert "Add threshold for floating-point SOFT_RELU activation" Revert the range removal in tests for soft relu and bring the former implementation in CL backend back. Resolves: COMPMID-5677 Change-Id: I35d5ac03a134299041ce97aabc9fff2d4380d09f Signed-off-by: Gunes Bayir Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/8551 Reviewed-by: Milos Puzovic Reviewed-by: Viet-Hoa Do Comments-Addressed: Arm Jenkins Benchmark: Arm Jenkins Tested-by: Arm Jenkins --- tests/validation/Helpers.h | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'tests/validation/Helpers.h') diff --git a/tests/validation/Helpers.h b/tests/validation/Helpers.h index cbbdfbb6c4..2e48a6b8c6 100644 --- a/tests/validation/Helpers.h +++ b/tests/validation/Helpers.h @@ -91,6 +91,10 @@ std::pair get_activation_layer_test_bounds(ActivationLayerInfo::Activation case DataType::F32: switch(activation) { + case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + // Reduce range as exponent overflows + bounds = std::make_pair(-40.f, 40.f); + break; case ActivationLayerInfo::ActivationFunction::SQRT: // Reduce range as sqrt should take a non-negative number bounds = std::make_pair(0.f, 255.f); -- cgit v1.2.1