diff options
Diffstat (limited to 'tests/validation/fixtures/GEMMFixture.h')
-rw-r--r-- | tests/validation/fixtures/GEMMFixture.h | 46 |
1 files changed, 46 insertions, 0 deletions
diff --git a/tests/validation/fixtures/GEMMFixture.h b/tests/validation/fixtures/GEMMFixture.h index 500e094e18..45516d4187 100644 --- a/tests/validation/fixtures/GEMMFixture.h +++ b/tests/validation/fixtures/GEMMFixture.h @@ -105,6 +105,8 @@ protected: ARM_COMPUTE_EXPECT(c.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &a, &b, &c, &dst }); + // Allocate tensors a.allocator()->allocate(); b.allocator()->allocate(); @@ -231,6 +233,8 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &lhs, &rhs, &bias, &dst }); + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -347,6 +351,8 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &lhs, &rhs, &bias, &dst }); + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -483,6 +489,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -625,6 +637,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -787,6 +805,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -955,6 +979,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -1118,6 +1148,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -1277,6 +1313,12 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + // TODO: remove if statement after COMPMID-4368 + if(!rhs_info.export_to_cl_image) + { + add_padding_x({ &lhs, &rhs, &rhs_reshaped, &bias, &dst }); + } + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -1410,6 +1452,8 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &lhs, &rhs, &bias, &dst }); + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); @@ -1539,6 +1583,8 @@ protected: ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS); ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS); + add_padding_x({ &lhs, &rhs, &bias, &dst }); + // Allocate tensors lhs.allocator()->allocate(); rhs.allocator()->allocate(); |