From a0205b987509d239b1635024fe8f334a4534f56e Mon Sep 17 00:00:00 2001 From: Sang-Hoon Park Date: Tue, 7 Jul 2020 09:36:09 +0100 Subject: COMPMID-3574: add logarithm to LogSoftmaxLayer Missed logarithm for the summation is added to NEON, CL and reference backends. To avoid complex changes, log softmax layer on CL backend doesn't support quantized data types. Tests and doxygen comments are modified accordingly. Change-Id: Iafd29291be8b81345cb4999b2668dbc3ae0c3345 Signed-off-by: Sang-Hoon Park Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/3517 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Reviewed-by: SiCong Li Comments-Addressed: Arm Jenkins --- arm_compute/runtime/NEON/functions/NESoftmaxLayer.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'arm_compute/runtime/NEON/functions/NESoftmaxLayer.h') diff --git a/arm_compute/runtime/NEON/functions/NESoftmaxLayer.h b/arm_compute/runtime/NEON/functions/NESoftmaxLayer.h index 51d981de44..fc1316d33c 100644 --- a/arm_compute/runtime/NEON/functions/NESoftmaxLayer.h +++ b/arm_compute/runtime/NEON/functions/NESoftmaxLayer.h @@ -39,10 +39,10 @@ class ITensor; /** Basic function to compute a SoftmaxLayer and a Log SoftmaxLayer. * * Softmax is calculated by : - * @f[ out = \frac{e^{x - max(x)}}{\sum{e^{x - max(x)}}} @f] + * @f[ out = exp((x - max(x)) * beta) / sum(exp((x - max(x)) * beta)) @f] * * Log Softmax is calculated by : - * @f[ out = (x - max(x)) - \sum{e^{x - max(x)}} @f] + * @f[ out = (x - max(x) * beta) - log(\sum{e^{x - max(x) * beta}}) @f] * * This function runs the following kernels: * -# @ref NEFillBorderKernel -- cgit v1.2.1