From fff9a4cb56d3d3dbfe85db555eea4bc9b3143996 Mon Sep 17 00:00:00 2001 From: Omar Al Khatib Date: Tue, 28 Mar 2023 11:14:29 +0100 Subject: Add Cropping to CLBatchToSpace - Deprecate dynamic block shape interface - Iterate over output window instead of input window for simpler implementation and better performance. - Add cropping support and cropping tests Resolves [COMPMID-5865] Signed-off-by: Omar Al Khatib Change-Id: Ic67d44a6a39299ecdafc507f12e3dc5d517dfb62 Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/9385 Comments-Addressed: Arm Jenkins Reviewed-by: SiCong Li Tested-by: Arm Jenkins Benchmark: Arm Jenkins --- tests/validation/CL/BatchToSpaceLayer.cpp | 88 ++++++++++++++++--------------- 1 file changed, 45 insertions(+), 43 deletions(-) (limited to 'tests') diff --git a/tests/validation/CL/BatchToSpaceLayer.cpp b/tests/validation/CL/BatchToSpaceLayer.cpp index e90ac921c5..ca12b76e8a 100644 --- a/tests/validation/CL/BatchToSpaceLayer.cpp +++ b/tests/validation/CL/BatchToSpaceLayer.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018-2020 Arm Limited. + * Copyright (c) 2018-2021, 2023 Arm Limited. * * SPDX-License-Identifier: MIT * @@ -50,56 +50,38 @@ using CLBatchToSpaceLayerFixture = BatchToSpaceLayerValidationFixture blocky - TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // blockx != blocky && blocky > blockx - TensorInfo(TensorShape(32U, 13U, 2U, 2U), 1, DataType::F32), // Mismatching data types - TensorInfo(TensorShape(32U, 13U, 2U, 2U), 1, DataType::F32), // Wrong data type block shape - TensorInfo(TensorShape(32U, 13U, 2U, 2U, 4U), 1, DataType::F32), // Wrong tensor shape - }), - framework::dataset::make("BlockShapeInfo",{ TensorInfo(TensorShape(2U, 2U), 1, DataType::S32), - TensorInfo(TensorShape(2U, 4U), 1, DataType::S32), - TensorInfo(TensorShape(4U, 2U), 1, DataType::S32), - TensorInfo(TensorShape(2U, 2U), 1, DataType::S32), - TensorInfo(TensorShape(2U, 2U), 1, DataType::F16), - TensorInfo(TensorShape(2U, 2U), 1, DataType::S32), - })), - framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(32U, 13U, 2U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(64U, 16U, 2U, 1U), 1, DataType::F32), - TensorInfo(TensorShape(32U, 32U, 2U, 1U), 1, DataType::F32), - - TensorInfo(TensorShape(32U, 13U, 2U, 2U), 1, DataType::F16), - TensorInfo(TensorShape(32U, 13U, 2U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(32U, 13U, 2U, 2U), 1, DataType::F32), - })), - framework::dataset::make("Expected", { true, true,true, false, false, false})), - input_info, block_shape_info, output_info, expected) -{ - bool has_error = bool(CLBatchToSpaceLayer::validate(&input_info.clone()->set_is_resizable(false), &block_shape_info.clone()->set_is_resizable(false), &output_info.clone()->set_is_resizable(false))); - ARM_COMPUTE_EXPECT(has_error == expected, framework::LogLevel::ERRORS); -} -DATA_TEST_CASE(ValidateStatic, framework::DatasetMode::ALL, zip(zip(zip(zip( +DATA_TEST_CASE(ValidateStatic, framework::DatasetMode::ALL, zip(zip(zip(zip(zip( framework::dataset::make("InputInfo", { TensorInfo(TensorShape(16U, 8U, 2U, 4U), 1, DataType::F32), - TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // blockx != blocky && blockx > blocky - TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // blockx != blocky && blocky > blockx - TensorInfo(TensorShape(16U, 8U, 2U, 4U), 1, DataType::F32), // Mismatching data types - TensorInfo(TensorShape(16U, 8U, 2U, 4U), 1, DataType::F32), // Negative block shapes - TensorInfo(TensorShape(32U, 16U, 2U, 4U, 4U), 1, DataType::F32), // Wrong tensor shape + TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // Supported: blockx != blocky && blockx > blocky + TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // Supported: blockx != blocky && blocky > blockx + TensorInfo(TensorShape(16U, 8U, 2U, 4U), 1, DataType::F32), // Invalid: Mismatching data types + TensorInfo(TensorShape(16U, 8U, 2U, 4U), 1, DataType::F32), // Invalid: Negative block shapes + TensorInfo(TensorShape(32U, 16U, 2U, 4U, 4U), 1, DataType::F32),// Unsupported tensor rank + TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // Invalid output tensor shape (invalid batch dimension) + TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // Invalid output tensor shape (invalid spatial dimension) + TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // Supported: correct tensor shape with cropping + TensorInfo(TensorShape(16U, 8U, 2U, 16U), 1, DataType::F32), // Invalid tensor shape with cropping }), - framework::dataset::make("BlockShapeX", { 2, 4, 2, 2, 2, 2 })), - framework::dataset::make("BlockShapeY", { 2, 2, 4, 2, -2, 2 })), + framework::dataset::make("BlockShapeX", { 2, 4, 2, 2, 2, 2, 2, 2, 2, 2 })), + framework::dataset::make("BlockShapeY", { 2, 2, 4, 2, -2, 2, 2, 2, 2, 2 })), + framework::dataset::make("CropInfo", { + CropInfo{}, CropInfo{}, CropInfo{}, CropInfo{}, CropInfo{}, CropInfo{}, CropInfo{}, CropInfo{}, CropInfo{3, 2, 1, 3}, CropInfo{3, 2, 1, 3} + })), framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(32U, 16U, 2U, 1U), 1, DataType::F32), - TensorInfo(TensorShape(64U, 16U, 2U, 1U), 1, DataType::F32), - TensorInfo(TensorShape(32U, 32U, 2U, 1U), 1, DataType::F32), + TensorInfo(TensorShape(64U, 16U, 2U, 2U), 1, DataType::F32), + TensorInfo(TensorShape(32U, 32U, 2U, 2U), 1, DataType::F32), TensorInfo(TensorShape(32U, 16U, 2U, 1U), 1, DataType::F16), TensorInfo(TensorShape(32U, 16U, 2U, 1U), 1, DataType::F32), TensorInfo(TensorShape(32U, 8U, 2U, 1U), 1, DataType::F32), + TensorInfo(TensorShape(32U, 16U, 2U, 1U), 1, DataType::F32), + TensorInfo(TensorShape(33U, 32U, 2U, 4U), 1, DataType::F32), + TensorInfo(TensorShape(27, 12U, 2U, 4U), 1, DataType::F32), + TensorInfo(TensorShape(32U, 16U, 2U, 4U), 1, DataType::F32), })), - framework::dataset::make("Expected", { true, true,true, false, false, false})), - input_info, block_shape_x, block_shape_y, output_info, expected) + framework::dataset::make("Expected", { true, true, true, false, false, false, false, false, true, false})), + input_info, block_shape_x, block_shape_y, crop_info, output_info, expected) { - bool has_error = bool(CLBatchToSpaceLayer::validate(&input_info.clone()->set_is_resizable(false), block_shape_x, block_shape_y, &output_info.clone()->set_is_resizable(false))); + bool has_error = bool(CLBatchToSpaceLayer::validate(&input_info.clone()->set_is_resizable(false), block_shape_x, block_shape_y, &output_info.clone()->set_is_resizable(false), crop_info)); ARM_COMPUTE_EXPECT(has_error == expected, framework::LogLevel::ERRORS); } // clang-format on @@ -114,6 +96,16 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLBatchToSpaceLayerFixture, framework::D // Validate output validate(CLAccessor(_target), _reference); } + +FIXTURE_DATA_TEST_CASE(RunSmallWithCropping, CLBatchToSpaceLayerFixture, framework::DatasetMode::PRECOMMIT, + combine(combine(datasets::SmallBatchToSpaceLayerWithCroppingDataset(), framework::dataset::make("DataType", + DataType::F32)), + framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC }))) +{ + // Validate output + validate(CLAccessor(_target), _reference); +} + FIXTURE_DATA_TEST_CASE(RunLarge, CLBatchToSpaceLayerFixture, framework::DatasetMode::NIGHTLY, combine(combine(datasets::LargeBatchToSpaceLayerDataset(), framework::dataset::make("DataType", DataType::F32)), framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC }))) @@ -131,6 +123,16 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLBatchToSpaceLayerFixture, framework::Da // Validate output validate(CLAccessor(_target), _reference); } + +FIXTURE_DATA_TEST_CASE(RunSmallWithCropping, CLBatchToSpaceLayerFixture, framework::DatasetMode::PRECOMMIT, + combine(combine(datasets::SmallBatchToSpaceLayerWithCroppingDataset(), framework::dataset::make("DataType", + DataType::F16)), + framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC }))) +{ + // Validate output + validate(CLAccessor(_target), _reference); +} + FIXTURE_DATA_TEST_CASE(RunLarge, CLBatchToSpaceLayerFixture, framework::DatasetMode::NIGHTLY, combine(combine(datasets::LargeBatchToSpaceLayerDataset(), framework::dataset::make("DataType", DataType::F16)), framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC }))) -- cgit v1.2.1