From d304adbb1c6a2f66144c9cac1104f6e3f30d255a Mon Sep 17 00:00:00 2001 From: Giorgio Arena Date: Fri, 2 Oct 2020 10:20:11 +0100 Subject: COMPMID-3703 Remove OpenCL padding: CLActivationLayerKernel + create utility macro Change-Id: I73edadc7299247e7bc51ae37c00d3709023da44a Signed-off-by: Giorgio Arena Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4073 Tested-by: Arm Jenkins Reviewed-by: Gian Marco Iodice Comments-Addressed: Arm Jenkins --- tests/validation/CL/ActivationLayer.cpp | 48 +++++++++++++++++++++++++++++++-- 1 file changed, 46 insertions(+), 2 deletions(-) (limited to 'tests/validation') diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp index c1844674ac..f776e334a0 100644 --- a/tests/validation/CL/ActivationLayer.cpp +++ b/tests/validation/CL/ActivationLayer.cpp @@ -89,6 +89,29 @@ const auto CNNDataTypes = framework::dataset::make("DataType", /** Input data sets. */ const auto ActivationDataset = combine(combine(framework::dataset::make("InPlace", { false, true }), datasets::ActivationFunctions()), framework::dataset::make("AlphaBeta", { 0.5f, 1.f })); + +/** Zero padding test */ +bool validate_zero_padding(unsigned int width, unsigned int height, unsigned int channels, unsigned int batches, const ActivationLayerInfo &act_info, DataType data_type) +{ + TensorShape shape(width, height, channels, batches); + + // Create tensors + CLTensor src = create_tensor(shape, data_type); + CLTensor dst = create_tensor(shape, data_type); + + src.info()->set_quantization_info(QuantizationInfo(1.f / 256.f, 0)); + dst.info()->set_quantization_info(QuantizationInfo(1.f / 256.f, 0)); + + ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS); + ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS); + + // Create and configure function + CLActivationLayer act; + act.configure(&src, &dst, act_info); + + // Padding can be added along rhs and bias's X dimension + return src.info()->padding().empty() && dst.info()->padding().empty(); +} } // namespace TEST_SUITE(CL) @@ -97,7 +120,7 @@ TEST_SUITE(ActivationLayer) // clang-format off DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip( framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink + TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QASYMM8), TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::QASYMM8), // Invalid quantization info @@ -126,11 +149,32 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip( ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::SQRT), })), - framework::dataset::make("Expected", { false, false, true, true, false, false, true, true, false })), + framework::dataset::make("Expected", { false, true, true, true, false, false, true, true, false })), input_info, output_info, act_info, expected) { ARM_COMPUTE_EXPECT(bool(CLActivationLayer::validate(&input_info.clone()->set_is_resizable(false), (output_info.total_size() == 0) ? nullptr : &output_info.clone()->set_is_resizable(false), act_info)) == expected, framework::LogLevel::ERRORS); } + +/** Validate zero padding tests + * + * A series of validation tests to check that no padding is added as part of configuration for 4 different scenarios. + * + * Checks performed in order: + * - First dimension multiple of 16 + * - First dimension non-multiple of 16 + * - First dimension less than 16 (vec_size for qasymm8) but multiple + * - First dimension less than 16 (vec_size for qasymm8) non-multiple + * - Tensor with only one element + */ +DATA_TEST_CASE(ValidateZeroPadding, framework::DatasetMode::ALL, zip( +framework::dataset::make("Width", { 32U, 37U, 12U, 13U, 1U }), +framework::dataset::make("DataType", { DataType::F32, DataType::QASYMM8 })), +width, data_type) +{ + const bool one_elem = (width == 1U); + bool status = validate_zero_padding(width, one_elem ? 1U : 17U, one_elem ? 1U : 7U, one_elem ? 1U : 2U, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 1U, 6U), data_type); + ARM_COMPUTE_EXPECT(status, framework::LogLevel::ERRORS); +} // clang-format on // *INDENT-ON* -- cgit v1.2.1