diff options
author | Sang-Hoon Park <sang-hoon.park@arm.com> | 2019-10-08 18:07:23 +0100 |
---|---|---|
committer | Sang-Hoon Park <sang-hoon.park@arm.com> | 2019-10-16 12:14:41 +0000 |
commit | d24affe0abefe8f4a83c7d4487386920895fd2e7 (patch) | |
tree | 8ccdf4891b2107f49814002817e6895f201686a7 /tests/validation/fixtures | |
parent | 7c60c990fbed62aab1369c0e4462c4081dc3cfeb (diff) | |
download | ComputeLibrary-d24affe0abefe8f4a83c7d4487386920895fd2e7.tar.gz |
COMPMID-2265 add support for Log Softmax to NEON
Kernel (NEON/reference), validation tests, function and fixture
are updated to add support for Log Softmax
Change-Id: I641dbf1552f4128c691af8875949ebf88da71ee8
Signed-off-by: Sang-Hoon Park <sang-hoon.park@arm.com>
Reviewed-on: https://review.mlplatform.org/c/2075
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com>
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests/validation/fixtures')
-rw-r--r-- | tests/validation/fixtures/SoftmaxLayerFixture.h | 42 |
1 files changed, 25 insertions, 17 deletions
diff --git a/tests/validation/fixtures/SoftmaxLayerFixture.h b/tests/validation/fixtures/SoftmaxLayerFixture.h index e39ee74800..f747ab3574 100644 --- a/tests/validation/fixtures/SoftmaxLayerFixture.h +++ b/tests/validation/fixtures/SoftmaxLayerFixture.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018 ARM Limited. + * Copyright (c) 2017-2019 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -32,6 +32,7 @@ #include "tests/IAccessor.h" #include "tests/framework/Asserts.h" #include "tests/framework/Fixture.h" +#include "tests/validation/reference/LogSoftmaxLayer.h" #include "tests/validation/reference/SoftmaxLayer.h" #include <random> @@ -42,7 +43,7 @@ namespace test { namespace validation { -template <typename TensorType, typename AccessorType, typename FunctionType, typename T> +template <typename TensorType, typename AccessorType, typename FunctionType, typename T, bool IS_LOG = false> class SoftmaxValidationGenericFixture : public framework::Fixture { public: @@ -110,7 +111,14 @@ protected: // Fill reference fill(src); - return reference::softmax_layer<T>(src, beta, axis); + if(IS_LOG) + { + return reference::log_softmax_layer<T>(src, beta, axis); + } + else + { + return reference::softmax_layer<T>(src, beta, axis); + } } TensorType _target{}; @@ -118,33 +126,33 @@ protected: QuantizationInfo _quantization_info{}; }; -template <typename TensorType, typename AccessorType, typename FunctionType, typename T> -class SoftmaxValidationFixture : public SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T> +template <typename TensorType, typename AccessorType, typename FunctionType, typename T, bool IS_LOG = false> +class SoftmaxValidationFixture : public SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T, IS_LOG> { public: template <typename...> void setup(TensorShape shape, DataType data_type, float beta, size_t axis) { - SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, - data_type, - QuantizationInfo(), - beta, - axis); + SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T, IS_LOG>::setup(shape, + data_type, + QuantizationInfo(), + beta, + axis); } }; -template <typename TensorType, typename AccessorType, typename FunctionType, typename T> -class SoftmaxValidationQuantizedFixture : public SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T> +template <typename TensorType, typename AccessorType, typename FunctionType, typename T, bool IS_LOG = false> +class SoftmaxValidationQuantizedFixture : public SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T, IS_LOG> { public: template <typename...> void setup(TensorShape shape, DataType data_type, QuantizationInfo quantization_info, float beta, size_t axis) { - SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, - data_type, - quantization_info, - beta, - axis); + SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T, IS_LOG>::setup(shape, + data_type, + quantization_info, + beta, + axis); } }; } // namespace validation |