From d24affe0abefe8f4a83c7d4487386920895fd2e7 Mon Sep 17 00:00:00 2001 From: Sang-Hoon Park Date: Tue, 8 Oct 2019 18:07:23 +0100 Subject: COMPMID-2265 add support for Log Softmax to NEON Kernel (NEON/reference), validation tests, function and fixture are updated to add support for Log Softmax Change-Id: I641dbf1552f4128c691af8875949ebf88da71ee8 Signed-off-by: Sang-Hoon Park Reviewed-on: https://review.mlplatform.org/c/2075 Comments-Addressed: Arm Jenkins Reviewed-by: Michele Di Giorgio Tested-by: Arm Jenkins --- tests/validation/reference/SoftmaxLayer.cpp | 37 ++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 6 deletions(-) (limited to 'tests/validation/reference/SoftmaxLayer.cpp') diff --git a/tests/validation/reference/SoftmaxLayer.cpp b/tests/validation/reference/SoftmaxLayer.cpp index fabc62bedb..ef2468df59 100644 --- a/tests/validation/reference/SoftmaxLayer.cpp +++ b/tests/validation/reference/SoftmaxLayer.cpp @@ -34,7 +34,7 @@ namespace validation namespace reference { template ::value, int>::type> -SimpleTensor softmax_layer(const SimpleTensor &src, float beta, size_t axis) +SimpleTensor softmax_layer_generic(const SimpleTensor &src, float beta, size_t axis, bool is_log) { // Create reference SimpleTensor dst{ src.shape(), src.data_type(), 1 }; @@ -65,23 +65,48 @@ SimpleTensor softmax_layer(const SimpleTensor &src, float beta, size_t axi // Regularize T sum(0.f); - std::transform(src_row_ptr, src_row_ptr + lower_dims, dst_row_ptr, [&sum, max, beta](T val) + std::transform(src_row_ptr, src_row_ptr + lower_dims, dst_row_ptr, [&sum, max, beta, is_log](T val) { - const T res(std::exp((val - max) * beta)); - sum += res; + T res{ (val - max) *beta }; + + if(is_log) + { + sum += std::exp(res); + } + else + { + res = std::exp(res); + sum += res; + } return res; }); // Normalize - std::transform(dst_row_ptr, dst_row_ptr + lower_dims, dst_row_ptr, [sum](T val) + std::transform(dst_row_ptr, dst_row_ptr + lower_dims, dst_row_ptr, [sum, is_log](T val) { - return val / sum; + if(is_log) + { + return val - sum; + } + else + { + return val / sum; + } }); } return dst; } +template SimpleTensor softmax_layer_generic(const SimpleTensor &src, float beta, size_t axis, bool is_log); +template SimpleTensor softmax_layer_generic(const SimpleTensor &src, float beta, size_t axis, bool is_log); + +template ::value, int>::type> +SimpleTensor softmax_layer(const SimpleTensor &src, float beta, size_t axis) +{ + return softmax_layer_generic(src, beta, axis, false); +} + template ::value, int>::type> SimpleTensor softmax_layer(const SimpleTensor &src, float beta, size_t axis) { -- cgit v1.2.1