diff options
author | SiCong Li <sicong.li@arm.com> | 2020-08-21 12:28:30 +0100 |
---|---|---|
committer | SiCong Li <sicong.li@arm.com> | 2020-08-25 14:12:07 +0000 |
commit | 96209c73b071bb65d4919fb441076f977095a31b (patch) | |
tree | 50252f1a33992b3a6171c6b2becf6da1b6f0022d /tests/validation/fixtures | |
parent | 5111264954e2d1a4d3e91d23a0869a0d7105be4c (diff) | |
download | ComputeLibrary-96209c73b071bb65d4919fb441076f977095a31b.tar.gz |
COMPMID-3694 COMPMID-3695 COMPMID-3458: Softmax Axis
* Properly support "axis" in CL and NEON (and GC) SoftmaxLayer and
LogSoftmaxLayer in accord with mainstream frameworks. Axis now defines
the dimension on which softmax is performed, and supports the range
[-rank, rank)
* Extend validation tests to include valid and invalid axes
* Remove unnecessary LogSoftmaxLayer fixture, as it is only a
specialisation of the SoftmaxLayer fixture
* Change the validation fill value range from [-1000, 1000] to [-10,
10], as the former often results in sparse outputs with a single one and
zeros elsewhere
Change-Id: I8a0040453182b04ed88260de3ba434e98258d863
Signed-off-by: Manuel Bottini <manuel.bottini@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/3830
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com>
Reviewed-by: Gian Marco Iodice <gianmarco.iodice@arm.com>
Diffstat (limited to 'tests/validation/fixtures')
-rw-r--r-- | tests/validation/fixtures/SoftmaxLayerFixture.h | 15 |
1 files changed, 4 insertions, 11 deletions
diff --git a/tests/validation/fixtures/SoftmaxLayerFixture.h b/tests/validation/fixtures/SoftmaxLayerFixture.h index 29a3ed2cd0..30356d648d 100644 --- a/tests/validation/fixtures/SoftmaxLayerFixture.h +++ b/tests/validation/fixtures/SoftmaxLayerFixture.h @@ -32,7 +32,6 @@ #include "tests/IAccessor.h" #include "tests/framework/Asserts.h" #include "tests/framework/Fixture.h" -#include "tests/validation/reference/LogSoftmaxLayer.h" #include "tests/validation/reference/SoftmaxLayer.h" #include <random> @@ -52,8 +51,8 @@ public: { _quantization_info = quantization_info; - _target = compute_target(shape, data_type, quantization_info, beta, axis); _reference = compute_reference(shape, data_type, quantization_info, beta, axis); + _target = compute_target(shape, data_type, quantization_info, beta, axis); } protected: @@ -62,7 +61,7 @@ protected: { if(!is_data_type_quantized(tensor.data_type())) { - std::uniform_real_distribution<> distribution(-1000.f, 1000.f); + std::uniform_real_distribution<> distribution(-10.f, 10.f); library->fill(tensor, distribution, 0); } else // data type is quantized_asymmetric (signed or unsigned) @@ -111,14 +110,7 @@ protected: // Fill reference fill(src); - if(IS_LOG) - { - return reference::log_softmax_layer<T>(src, beta, axis); - } - else - { - return reference::softmax_layer<T>(src, beta, axis); - } + return reference::softmax_layer<T>(src, beta, axis, IS_LOG); } TensorType _target{}; @@ -155,6 +147,7 @@ public: axis); } }; + } // namespace validation } // namespace test } // namespace arm_compute |