From a0205b987509d239b1635024fe8f334a4534f56e Mon Sep 17 00:00:00 2001 From: Sang-Hoon Park Date: Tue, 7 Jul 2020 09:36:09 +0100 Subject: COMPMID-3574: add logarithm to LogSoftmaxLayer Missed logarithm for the summation is added to NEON, CL and reference backends. To avoid complex changes, log softmax layer on CL backend doesn't support quantized data types. Tests and doxygen comments are modified accordingly. Change-Id: Iafd29291be8b81345cb4999b2668dbc3ae0c3345 Signed-off-by: Sang-Hoon Park Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/3517 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Reviewed-by: SiCong Li Comments-Addressed: Arm Jenkins --- src/core/NEON/kernels/NESoftmaxLayerKernel.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) (limited to 'src/core/NEON') diff --git a/src/core/NEON/kernels/NESoftmaxLayerKernel.cpp b/src/core/NEON/kernels/NESoftmaxLayerKernel.cpp index 41bf03ad1d..35e5973aff 100644 --- a/src/core/NEON/kernels/NESoftmaxLayerKernel.cpp +++ b/src/core/NEON/kernels/NESoftmaxLayerKernel.cpp @@ -368,6 +368,10 @@ void logits_1d_softmax_qasymm8(const ITensor &in, const ITensor &max, void *cons { sum_inversed = 256.f / sum; } + else + { + sum = std::log(sum); + } } /* Normalize exponentials */ @@ -516,6 +520,10 @@ void logits_1d_softmax_float(const ITensor &in, const ITensor &max, void *const { sum_inversed = T(1) / sum; } + else + { + sum = static_cast(std::log(sum)); + } } /* Normalize exponentials */ -- cgit v1.2.1