From 63825e8259508dc7731b6de2e008c5ef8c738d79 Mon Sep 17 00:00:00 2001 From: Giorgio Arena Date: Thu, 25 Mar 2021 14:54:50 +0000 Subject: Implicit padding testing along the X axis on high priority operators Add artificial implicit padding testing for the following fixtures: - Scale - FullyConnected - Pooling - DepthwiseConvolution - DirectConvolution - Winograd - FFT - GEMM/GEMMLowp Create utility function that loops through a list of tensor and adds random padding based on the global seed (only for NHWC layer layout). Remove GEMMLowpAssemblyFixture since it wasn't used Remove some AssetsLibrary headers since they weren't used Resolve COMPMID-4161 Change-Id: Ib6f4f7f113ae69b993d7b2a9e04abbf3de8c99fe Signed-off-by: Giorgio Arena Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/5327 Reviewed-by: Michele Di Giorgio Reviewed-by: Georgios Pinitas Comments-Addressed: Arm Jenkins Tested-by: Arm Jenkins --- tests/validation/fixtures/GEMMFixture.h | 46 +++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) (limited to 'tests/validation/fixtures/GEMMFixture.h') diff --git a/tests/validation/fixtures/GEMMFixture.h b/tests/validation/fixtures/GEMMFixture.h index 500e094e18..45516d4187 100644 --- a/tests/validation/fixtures/GEMMFixture.h +++ b/tests/validation/fixtures/GEMMFixture.h @@ -105,6 +105,8 @@ protected: ARM_COMPUTE_EXPECT(c.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &a, &b, &c, &dst }); + // Allocate tensors a.allocator()->allocate(); b.allocator()->allocate(); @@ -231,6 +233,8 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &lhs, &rhs, &bias, &dst }); + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -347,6 +351,8 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &lhs, &rhs, &bias, &dst }); + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -483,6 +489,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -625,6 +637,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -787,6 +805,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -955,6 +979,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -1118,6 +1148,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -1277,6 +1313,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -1410,6 +1452,8 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &lhs, &rhs, &bias, &dst }); + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -1539,6 +1583,8 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &lhs, &rhs, &bias, &dst }); + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); -- cgit v1.2.1