aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorGiorgio Arena <giorgio.arena@arm.com>2020-10-02 10:20:11 +0100
committerGiorgio Arena <giorgio.arena@arm.com>2020-10-07 14:28:17 +0000
commitd304adbb1c6a2f66144c9cac1104f6e3f30d255a (patch)
tree325849f9280cfb0c92900794371d1c63d70a619c /tests
parent1e75adac392dd979bd1a838583ed196e311bc77a (diff)
downloadComputeLibrary-d304adbb1c6a2f66144c9cac1104f6e3f30d255a.tar.gz
COMPMID-3703 Remove OpenCL padding: CLActivationLayerKernel + create utility macro
Change-Id: I73edadc7299247e7bc51ae37c00d3709023da44a Signed-off-by: Giorgio Arena <giorgio.arena@arm.com> Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4073 Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Gian Marco Iodice <gianmarco.iodice@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/datasets/ShapeDatasets.h1
-rw-r--r--tests/validation/CL/ActivationLayer.cpp48
2 files changed, 47 insertions, 2 deletions
diff --git a/tests/datasets/ShapeDatasets.h b/tests/datasets/ShapeDatasets.h
index ccd0756c2d..565982efb6 100644
--- a/tests/datasets/ShapeDatasets.h
+++ b/tests/datasets/ShapeDatasets.h
@@ -179,6 +179,7 @@ public:
{
// Batch size 1
TensorShape{ 11U, 11U },
+ TensorShape{ 16U, 16U },
TensorShape{ 27U, 13U, 7U },
TensorShape{ 31U, 27U, 17U, 2U },
// Batch size 4
diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp
index c1844674ac..f776e334a0 100644
--- a/tests/validation/CL/ActivationLayer.cpp
+++ b/tests/validation/CL/ActivationLayer.cpp
@@ -89,6 +89,29 @@ const auto CNNDataTypes = framework::dataset::make("DataType",
/** Input data sets. */
const auto ActivationDataset = combine(combine(framework::dataset::make("InPlace", { false, true }), datasets::ActivationFunctions()), framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
+
+/** Zero padding test */
+bool validate_zero_padding(unsigned int width, unsigned int height, unsigned int channels, unsigned int batches, const ActivationLayerInfo &act_info, DataType data_type)
+{
+ TensorShape shape(width, height, channels, batches);
+
+ // Create tensors
+ CLTensor src = create_tensor<CLTensor>(shape, data_type);
+ CLTensor dst = create_tensor<CLTensor>(shape, data_type);
+
+ src.info()->set_quantization_info(QuantizationInfo(1.f / 256.f, 0));
+ dst.info()->set_quantization_info(QuantizationInfo(1.f / 256.f, 0));
+
+ ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS);
+ ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS);
+
+ // Create and configure function
+ CLActivationLayer act;
+ act.configure(&src, &dst, act_info);
+
+ // Padding can be added along rhs and bias's X dimension
+ return src.info()->padding().empty() && dst.info()->padding().empty();
+}
} // namespace
TEST_SUITE(CL)
@@ -97,7 +120,7 @@ TEST_SUITE(ActivationLayer)
// clang-format off
DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32),
TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QASYMM8),
TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::QASYMM8), // Invalid quantization info
@@ -126,11 +149,32 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC),
ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::SQRT),
})),
- framework::dataset::make("Expected", { false, false, true, true, false, false, true, true, false })),
+ framework::dataset::make("Expected", { false, true, true, true, false, false, true, true, false })),
input_info, output_info, act_info, expected)
{
ARM_COMPUTE_EXPECT(bool(CLActivationLayer::validate(&input_info.clone()->set_is_resizable(false), (output_info.total_size() == 0) ? nullptr : &output_info.clone()->set_is_resizable(false), act_info)) == expected, framework::LogLevel::ERRORS);
}
+
+/** Validate zero padding tests
+ *
+ * A series of validation tests to check that no padding is added as part of configuration for 4 different scenarios.
+ *
+ * Checks performed in order:
+ * - First dimension multiple of 16
+ * - First dimension non-multiple of 16
+ * - First dimension less than 16 (vec_size for qasymm8) but multiple
+ * - First dimension less than 16 (vec_size for qasymm8) non-multiple
+ * - Tensor with only one element
+ */
+DATA_TEST_CASE(ValidateZeroPadding, framework::DatasetMode::ALL, zip(
+framework::dataset::make("Width", { 32U, 37U, 12U, 13U, 1U }),
+framework::dataset::make("DataType", { DataType::F32, DataType::QASYMM8 })),
+width, data_type)
+{
+ const bool one_elem = (width == 1U);
+ bool status = validate_zero_padding(width, one_elem ? 1U : 17U, one_elem ? 1U : 7U, one_elem ? 1U : 2U, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 1U, 6U), data_type);
+ ARM_COMPUTE_EXPECT(status, framework::LogLevel::ERRORS);
+}
// clang-format on
// *INDENT-ON*