aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/fixtures/GEMMLowpFixture.h
diff options
context:
space:
mode:
Diffstat (limited to 'tests/validation/fixtures/GEMMLowpFixture.h')
-rw-r--r--tests/validation/fixtures/GEMMLowpFixture.h14
1 files changed, 14 insertions, 0 deletions
diff --git a/tests/validation/fixtures/GEMMLowpFixture.h b/tests/validation/fixtures/GEMMLowpFixture.h
index 95f49601a5..c3da2e261d 100644
--- a/tests/validation/fixtures/GEMMLowpFixture.h
+++ b/tests/validation/fixtures/GEMMLowpFixture.h
@@ -133,6 +133,8 @@ TensorType compute_gemmlowp_target(const TensorShape &shape_a, const TensorShape
ARM_COMPUTE_EXPECT(b.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(output.info()->is_resizable(), framework::LogLevel::ERRORS);
+ add_padding_x({ &a, &b, &output });
+
// Allocate tensors
a.allocator()->allocate();
b.allocator()->allocate();
@@ -948,6 +950,8 @@ protected:
ARM_COMPUTE_EXPECT(lhs.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS);
+ add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &dst });
+
// Allocate tensors
lhs.allocator()->allocate();
rhs.allocator()->allocate();
@@ -1098,6 +1102,8 @@ protected:
ARM_COMPUTE_EXPECT(lhs.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS);
+ add_padding_x({ &lhs, &rhs, &lhs_reshaped, &rhs_reshaped, &dst });
+
// Allocate tensors
lhs.allocator()->allocate();
rhs.allocator()->allocate();
@@ -1247,6 +1253,8 @@ protected:
ARM_COMPUTE_EXPECT(lhs.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS);
+ add_padding_x({ &lhs, &rhs, &rhs_reshaped, &dst });
+
// Allocate tensors
lhs.allocator()->allocate();
rhs.allocator()->allocate();
@@ -1390,6 +1398,8 @@ protected:
ARM_COMPUTE_EXPECT(lhs.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS);
+ add_padding_x({ &lhs, &rhs, &rhs_reshaped, &dst });
+
// Allocate tensors
lhs.allocator()->allocate();
rhs.allocator()->allocate();
@@ -1502,6 +1512,8 @@ protected:
ARM_COMPUTE_EXPECT(lhs.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS);
+ add_padding_x({ &lhs, &rhs, &dst });
+
// Allocate tensors
lhs.allocator()->allocate();
rhs.allocator()->allocate();
@@ -1597,6 +1609,8 @@ protected:
ARM_COMPUTE_EXPECT(lhs.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(rhs.info()->is_resizable(), framework::LogLevel::ERRORS);
+ add_padding_x({ &lhs, &rhs, &dst });
+
// Allocate tensors
lhs.allocator()->allocate();
rhs.allocator()->allocate();