From 2dc7e407141a1b213a31b9fa78a958d6652d4889 Mon Sep 17 00:00:00 2001 From: Michalis Spyrou Date: Fri, 28 Feb 2020 14:41:35 +0000 Subject: COMPMID-3171: Remove padding from NESoftmaxLayerKernel Change-Id: Ia01ad8cda34c42e681b006f570e8d150d97fb208 Signed-off-by: Michalis Spyrou Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/2809 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio --- tests/validation/NEON/SoftmaxLayer.cpp | 43 ++-------------------------------- 1 file changed, 2 insertions(+), 41 deletions(-) (limited to 'tests') diff --git a/tests/validation/NEON/SoftmaxLayer.cpp b/tests/validation/NEON/SoftmaxLayer.cpp index cbf7729bc3..c429782e60 100644 --- a/tests/validation/NEON/SoftmaxLayer.cpp +++ b/tests/validation/NEON/SoftmaxLayer.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2019 ARM Limited. + * Copyright (c) 2017-2020 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -63,37 +63,6 @@ const auto CNNDataTypes = framework::dataset::make("DataType", TEST_SUITE(NEON) TEST_SUITE(SoftmaxLayer) -DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(concat(datasets::Small2DShapes(), datasets::Medium2DShapes()), CNNDataTypes), shape, data_type) -{ - const QuantizationInfo quantization_info = is_data_type_quantized_asymmetric(data_type) ? QuantizationInfo(1.f / 255.f, 0) : QuantizationInfo(); - - // Create tensors - Tensor src = create_tensor(shape, data_type, 1, quantization_info); - Tensor dst = create_tensor(shape, data_type, 1, QuantizationInfo(1.f / 256.f, 0)); - - ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS); - ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS); - - // Create and configure function - NESoftmaxLayer smx_layer; - smx_layer.configure(&src, &dst); - - // Validate valid region - const ValidRegion valid_region = shape_to_valid_region(shape); - validate(src.info()->valid_region(), valid_region); - validate(dst.info()->valid_region(), valid_region); - - // NESoftmaxLayer configures the paddings only in the 2D case - if(shape.num_dimensions() <= 2) - { - // Validate padding - const int step = 16 / data_size_from_type(data_type); - const PaddingSize padding = PaddingCalculator(shape.x(), step).required_padding(); - validate(src.info()->padding(), padding); - validate(dst.info()->padding(), PaddingSize()); - } -} - // *INDENT-OFF* // clang-format off DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(zip( @@ -101,8 +70,6 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(zip( TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), // Mismatching shapes TensorInfo(TensorShape(27U, 13U), 1, DataType::QASYMM8, // Invalid output quantization info QuantizationInfo(1.f/256, 12)), - TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), // Window shrink - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32),// Invalid input dimensionality TensorInfo(TensorShape(32U, 13U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U), 1, DataType::QASYMM8, QuantizationInfo(1.f/256, 12)), @@ -113,8 +80,6 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(zip( TensorInfo(TensorShape(27U, 11U), 1, DataType::F32), TensorInfo(TensorShape(27U, 13U), 1, DataType::QASYMM8, QuantizationInfo(1.f/256, 12)), - TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U), 1, DataType::QASYMM8, QuantizationInfo(1.f/256, 0)), @@ -128,19 +93,15 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(zip( 1.0, 2.0, 1.0, - 2.0, - 1.0, })), framework::dataset::make("axis", { 1, 1, 1, 1, 1, - 1, - 1, 0, })), - framework::dataset::make("Expected", { false, false, false, false, false, true, true, false })), + framework::dataset::make("Expected", { false, false, false, true, true, false })), input_info, output_info, beta, axis, expected) { ARM_COMPUTE_EXPECT(bool(NESoftmaxLayer::validate(&input_info.clone()->set_is_resizable(false), &output_info.clone()->set_is_resizable(false), beta, axis)) == expected, framework::LogLevel::ERRORS); -- cgit v1.2.1