diff options
author | Michele Di Giorgio <michele.digiorgio@arm.com> | 2018-03-01 16:56:48 +0000 |
---|---|---|
committer | Anthony Barbier <anthony.barbier@arm.com> | 2018-11-02 16:49:54 +0000 |
commit | 0cbb927ac309e332ac6e6f1ab9170f041f0138ab (patch) | |
tree | 102d50dec9f741f04b1126ae03e6e491dda2d3ba /tests/validation/fixtures | |
parent | 82b51482479951cf133c223eb81aae291cb4d590 (diff) | |
download | ComputeLibrary-0cbb927ac309e332ac6e6f1ab9170f041f0138ab.tar.gz |
COMPMID-804: Add NHWC data format support for NEON batch normalisation
Change-Id: I04892e7be3f5aa58cd95917a4f90a6b4ffcf6efc
Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/122897
Reviewed-by: Giorgio Arena <giorgio.arena@arm.com>
Tested-by: Jenkins <bsgcomp@arm.com>
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Diffstat (limited to 'tests/validation/fixtures')
-rw-r--r-- | tests/validation/fixtures/BatchNormalizationLayerFixture.h | 26 |
1 files changed, 16 insertions, 10 deletions
diff --git a/tests/validation/fixtures/BatchNormalizationLayerFixture.h b/tests/validation/fixtures/BatchNormalizationLayerFixture.h index 4a6ac1af7f..7e072e7023 100644 --- a/tests/validation/fixtures/BatchNormalizationLayerFixture.h +++ b/tests/validation/fixtures/BatchNormalizationLayerFixture.h @@ -45,14 +45,20 @@ class BatchNormalizationLayerValidationFixedPointFixture : public framework::Fix { public: template <typename...> - void setup(TensorShape shape0, TensorShape shape1, float epsilon, bool use_beta, bool use_gamma, ActivationLayerInfo act_info, DataType dt, int fractional_bits) + void setup(TensorShape shape0, TensorShape shape1, float epsilon, bool use_beta, bool use_gamma, ActivationLayerInfo act_info, DataType dt, DataLayout data_layout, int fractional_bits) { _fractional_bits = fractional_bits; _data_type = dt; _use_beta = use_beta; _use_gamma = use_gamma; - _target = compute_target(shape0, shape1, epsilon, act_info, dt, fractional_bits); - _reference = compute_reference(shape0, shape1, epsilon, act_info, dt, fractional_bits); + + if(data_layout == DataLayout::NHWC) + { + permute(shape0, PermutationVector(2U, 0U, 1U)); + } + + _target = compute_target(shape0, shape1, epsilon, act_info, dt, data_layout, fractional_bits); + _reference = compute_reference(shape0, shape1, epsilon, act_info, dt, data_layout, fractional_bits); } protected: @@ -119,11 +125,11 @@ protected: } } - TensorType compute_target(const TensorShape &shape0, const TensorShape &shape1, float epsilon, ActivationLayerInfo act_info, DataType dt, int fixed_point_position) + TensorType compute_target(const TensorShape &shape0, const TensorShape &shape1, float epsilon, ActivationLayerInfo act_info, DataType dt, DataLayout data_layout, int fixed_point_position) { // Create tensors - TensorType src = create_tensor<TensorType>(shape0, dt, 1, fixed_point_position); - TensorType dst = create_tensor<TensorType>(shape0, dt, 1, fixed_point_position); + TensorType src = create_tensor<TensorType>(shape0, dt, 1, fixed_point_position, QuantizationInfo(), data_layout); + TensorType dst = create_tensor<TensorType>(shape0, dt, 1, fixed_point_position, QuantizationInfo(), data_layout); TensorType mean = create_tensor<TensorType>(shape1, dt, 1, fixed_point_position); TensorType var = create_tensor<TensorType>(shape1, dt, 1, fixed_point_position); TensorType beta = create_tensor<TensorType>(shape1, dt, 1, fixed_point_position); @@ -166,10 +172,10 @@ protected: return dst; } - SimpleTensor<T> compute_reference(const TensorShape &shape0, const TensorShape &shape1, float epsilon, ActivationLayerInfo act_info, DataType dt, int fixed_point_position) + SimpleTensor<T> compute_reference(const TensorShape &shape0, const TensorShape &shape1, float epsilon, ActivationLayerInfo act_info, DataType dt, DataLayout data_layout, int fixed_point_position) { // Create reference - SimpleTensor<T> ref_src{ shape0, dt, 1, fixed_point_position }; + SimpleTensor<T> ref_src{ shape0, dt, 1, fixed_point_position, QuantizationInfo(), data_layout }; SimpleTensor<T> ref_mean{ shape1, dt, 1, fixed_point_position }; SimpleTensor<T> ref_var{ shape1, dt, 1, fixed_point_position }; SimpleTensor<T> ref_beta{ shape1, dt, 1, fixed_point_position }; @@ -194,9 +200,9 @@ class BatchNormalizationLayerValidationFixture : public BatchNormalizationLayerV { public: template <typename...> - void setup(TensorShape shape0, TensorShape shape1, float epsilon, bool use_beta, bool use_gamma, ActivationLayerInfo act_info, DataType dt) + void setup(TensorShape shape0, TensorShape shape1, float epsilon, bool use_beta, bool use_gamma, ActivationLayerInfo act_info, DataType dt, DataLayout data_layout) { - BatchNormalizationLayerValidationFixedPointFixture<TensorType, AccessorType, FunctionType, T>::setup(shape0, shape1, epsilon, use_beta, use_gamma, act_info, dt, 0); + BatchNormalizationLayerValidationFixedPointFixture<TensorType, AccessorType, FunctionType, T>::setup(shape0, shape1, epsilon, use_beta, use_gamma, act_info, dt, data_layout, 0); } }; } // namespace validation |