From 980a9168b81d778f4902973b4920b54c103907e0 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Wed, 3 Jun 2020 20:16:46 +0100 Subject: COMPMID-3177: Remove padding from NEBatchNormalizationLayer Signed-off-by: Georgios Pinitas Change-Id: I9be23e6ef1f552eb159e39fda16c82fa20124094 Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/3307 Tested-by: Arm Jenkins Reviewed-by: Gian Marco Iodice Comments-Addressed: Arm Jenkins --- tests/validation/NEON/BatchNormalizationLayer.cpp | 43 +++-------------------- 1 file changed, 4 insertions(+), 39 deletions(-) (limited to 'tests') diff --git a/tests/validation/NEON/BatchNormalizationLayer.cpp b/tests/validation/NEON/BatchNormalizationLayer.cpp index 58b7474b41..6075e6be8d 100644 --- a/tests/validation/NEON/BatchNormalizationLayer.cpp +++ b/tests/validation/NEON/BatchNormalizationLayer.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2019 ARM Limited. + * Copyright (c) 2017-2020 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -71,69 +71,34 @@ TEST_SUITE(BatchNormalizationLayer) template using NEBatchNormalizationLayerFixture = BatchNormalizationLayerValidationFixture; -DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(combine(combine(datasets::SmallRandomBatchNormalizationLayerDataset(), - combine(framework::dataset::make("UseBeta", { false, true }), framework::dataset::make("UseGamma", { false, true }))), - framework::dataset::make("DataType", { DataType::F32 })), - framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })), - shape0, shape1, epsilon, use_beta, use_gamma, dt, data_layout) -{ - TensorShape src_dst_shapes = shape0; - if(data_layout == DataLayout::NHWC) - { - permute(src_dst_shapes, PermutationVector(2U, 0U, 1U)); - } - - // Create tensors - Tensor src = create_tensor(src_dst_shapes, dt, 1, QuantizationInfo(), data_layout); - Tensor dst = create_tensor(src_dst_shapes, dt, 1, QuantizationInfo(), data_layout); - Tensor mean = create_tensor(shape1, dt, 1); - Tensor var = create_tensor(shape1, dt, 1); - Tensor beta = create_tensor(shape1, dt, 1); - Tensor gamma = create_tensor(shape1, dt, 1); - - // Create and Configure function - NEBatchNormalizationLayer norm; - Tensor *beta_ptr = use_beta ? &beta : nullptr; - Tensor *gamma_ptr = use_gamma ? &gamma : nullptr; - norm.configure(&src, &dst, &mean, &var, beta_ptr, gamma_ptr, epsilon); - - // Validate valid region - const ValidRegion valid_region = shape_to_valid_region(src_dst_shapes); - validate(dst.info()->valid_region(), valid_region); -} - // *INDENT-OFF* // clang-format off DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(zip( - framework::dataset::make("InputInfo", { TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink + framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), // Mismatching data types TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), // Mismatching data types TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), // Invalid mean/var/beta/gamma shape TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), // Fused activation's a < b }), - framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), + framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F16), TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), })), framework::dataset::make("MVBGInfo",{ TensorInfo(TensorShape(2U), 1, DataType::F32), - TensorInfo(TensorShape(2U), 1, DataType::F32), TensorInfo(TensorShape(2U), 1, DataType::F16), TensorInfo(TensorShape(2U), 1, DataType::F32), TensorInfo(TensorShape(5U), 1, DataType::F32), TensorInfo(TensorShape(2U), 1, DataType::F32), })), framework::dataset::make("ActivationLayerInfo",{ ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU), - ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 2.f, 6.f), })), - framework::dataset::make("Expected", { true, false, false, false, false, false})), + framework::dataset::make("Expected", { true, false, false, false, false})), input_info, output_info, mvbg_info, act_info, expected) { const auto &mean_info = mvbg_info; -- cgit v1.2.1