diff options
author | giuros01 <giuseppe.rossini@arm.com> | 2018-09-03 09:53:53 +0100 |
---|---|---|
committer | Anthony Barbier <anthony.barbier@arm.com> | 2018-11-02 16:54:54 +0000 |
commit | efbf6c8fd54159b26eda43eea7a12fce491ca13a (patch) | |
tree | f24f63d73703ddcb5fe0ea3ccef101660a9eb9a4 /tests/validation/fixtures/SoftmaxLayerFixture.h | |
parent | 477531c258801caf3cce44eb3e43df611b42fc6d (diff) | |
download | ComputeLibrary-efbf6c8fd54159b26eda43eea7a12fce491ca13a.tar.gz |
[COMPMID-386] Github: Support SoftmaxLayer on different number of dimensions?
Change-Id: I7422b977538ff29930a90f078badc2edee78af93
Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/146638
Tested-by: Jenkins <bsgcomp@arm.com>
Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Diffstat (limited to 'tests/validation/fixtures/SoftmaxLayerFixture.h')
-rw-r--r-- | tests/validation/fixtures/SoftmaxLayerFixture.h | 24 |
1 files changed, 13 insertions, 11 deletions
diff --git a/tests/validation/fixtures/SoftmaxLayerFixture.h b/tests/validation/fixtures/SoftmaxLayerFixture.h index 99c0710f7f..e39ee74800 100644 --- a/tests/validation/fixtures/SoftmaxLayerFixture.h +++ b/tests/validation/fixtures/SoftmaxLayerFixture.h @@ -47,12 +47,12 @@ class SoftmaxValidationGenericFixture : public framework::Fixture { public: template <typename...> - void setup(TensorShape shape, DataType data_type, QuantizationInfo quantization_info, float beta) + void setup(TensorShape shape, DataType data_type, QuantizationInfo quantization_info, float beta, size_t axis) { _quantization_info = quantization_info; - _target = compute_target(shape, data_type, quantization_info, beta); - _reference = compute_reference(shape, data_type, quantization_info, beta); + _target = compute_target(shape, data_type, quantization_info, beta, axis); + _reference = compute_reference(shape, data_type, quantization_info, beta, axis); } protected: @@ -72,7 +72,7 @@ protected: } TensorType compute_target(const TensorShape &shape, DataType data_type, - QuantizationInfo quantization_info, float beta) + QuantizationInfo quantization_info, float beta, size_t axis) { // Create tensors TensorType src = create_tensor<TensorType>(shape, data_type, 1, quantization_info); @@ -80,7 +80,7 @@ protected: // Create and configure function FunctionType smx_layer; - smx_layer.configure(&src, &dst, beta); + smx_layer.configure(&src, &dst, beta, axis); ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS); @@ -102,7 +102,7 @@ protected: } SimpleTensor<T> compute_reference(const TensorShape &shape, DataType data_type, - QuantizationInfo quantization_info, float beta) + QuantizationInfo quantization_info, float beta, size_t axis) { // Create reference SimpleTensor<T> src{ shape, data_type, 1, quantization_info }; @@ -110,7 +110,7 @@ protected: // Fill reference fill(src); - return reference::softmax_layer<T>(src, beta); + return reference::softmax_layer<T>(src, beta, axis); } TensorType _target{}; @@ -123,12 +123,13 @@ class SoftmaxValidationFixture : public SoftmaxValidationGenericFixture<TensorTy { public: template <typename...> - void setup(TensorShape shape, DataType data_type, float beta) + void setup(TensorShape shape, DataType data_type, float beta, size_t axis) { SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, data_type, QuantizationInfo(), - beta); + beta, + axis); } }; @@ -137,12 +138,13 @@ class SoftmaxValidationQuantizedFixture : public SoftmaxValidationGenericFixture { public: template <typename...> - void setup(TensorShape shape, DataType data_type, QuantizationInfo quantization_info, float beta) + void setup(TensorShape shape, DataType data_type, QuantizationInfo quantization_info, float beta, size_t axis) { SoftmaxValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, data_type, quantization_info, - beta); + beta, + axis); } }; } // namespace validation |