aboutsummaryrefslogtreecommitdiff
path: root/src/core/CL/cl_kernels/activation_float_helpers.h
diff options
context:
space:
mode:
authorMilos Puzovic <Milos.Puzovic@arm.com>2022-10-28 00:09:32 +0100
committerViet-Hoa Do <viet-hoa.do@arm.com>2022-11-01 11:08:33 +0000
commit199982fc01bcce10120cd6df03d11829a215c85c (patch)
tree5a8884fa7a216edf8c18b5cfeb8b1537935b5e80 /src/core/CL/cl_kernels/activation_float_helpers.h
parent4b5f6efef15efd79727a58c520c92c9e7a084256 (diff)
downloadComputeLibrary-199982fc01bcce10120cd6df03d11829a215c85c.tar.gz
Add threshold for floating-point SOFT_RELU activation
Added missing threshold for calculating SOFT_RELU when SVE and CL implementations are used. As a result removed from the testing bounds for input values that were set to be in the interval [-40, 40]. Resolves: COMPMID-5658 Signed-off-by: Milos Puzovic <Milos.Puzovic@arm.com> Change-Id: I3d14df60125e36e4eb85aeb222f4fb0cc5741521 Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/8536 Comments-Addressed: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Viet-Hoa Do <viet-hoa.do@arm.com> Reviewed-by: Gunes Bayir <gunes.bayir@arm.com> Tested-by: Arm Jenkins <bsgcomp@arm.com> Benchmark: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'src/core/CL/cl_kernels/activation_float_helpers.h')
-rw-r--r--src/core/CL/cl_kernels/activation_float_helpers.h2
1 files changed, 1 insertions, 1 deletions
diff --git a/src/core/CL/cl_kernels/activation_float_helpers.h b/src/core/CL/cl_kernels/activation_float_helpers.h
index 3f93c8d6fc..fe124bc032 100644
--- a/src/core/CL/cl_kernels/activation_float_helpers.h
+++ b/src/core/CL/cl_kernels/activation_float_helpers.h
@@ -52,7 +52,7 @@
#define lrelu_op(DATA_TYPE, VEC_SIZE, x, A_VAL, B_VAL) ((min(x, (DATA_TYPE)0.0) * (DATA_TYPE)A_VAL) + max(x, (DATA_TYPE)0.0))
// Soft RELU Activation
-#define srelu_op(DATA_TYPE, VEC_SIZE, x, A_VAL, B_VAL) (log((DATA_TYPE)1.0 + exp(x)))
+#define srelu_op(DATA_TYPE, VEC_SIZE, x, A_VAL, B_VAL) (select((log((DATA_TYPE)1.0 + exp(x))), x, (SELECT_VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE))isgreaterequal(x,(DATA_TYPE)16.63553047)))
// ELU Activation
#define elu_op(DATA_TYPE, VEC_SIZE, x, A_VAL, B_VAL) (select(((DATA_TYPE)A_VAL * (exp(x) - (DATA_TYPE)1.0)), x, (SELECT_VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE))isgreaterequal(x, (DATA_TYPE)0.0)))