aboutsummaryrefslogtreecommitdiff
path: root/tests/benchmark/fixtures/BatchNormalizationLayerFixture.h
diff options
context:
space:
mode:
Diffstat (limited to 'tests/benchmark/fixtures/BatchNormalizationLayerFixture.h')
-rw-r--r--tests/benchmark/fixtures/BatchNormalizationLayerFixture.h6
1 files changed, 4 insertions, 2 deletions
diff --git a/tests/benchmark/fixtures/BatchNormalizationLayerFixture.h b/tests/benchmark/fixtures/BatchNormalizationLayerFixture.h
index fbb7700710..c55bb2acc9 100644
--- a/tests/benchmark/fixtures/BatchNormalizationLayerFixture.h
+++ b/tests/benchmark/fixtures/BatchNormalizationLayerFixture.h
@@ -42,7 +42,7 @@ class BatchNormalizationLayerFixture : public framework::Fixture
{
public:
template <typename...>
- void setup(TensorShape tensor_shape, TensorShape param_shape, float epsilon, ActivationLayerInfo act_info, DataType data_type, int batches)
+ void setup(TensorShape tensor_shape, TensorShape param_shape, float epsilon, bool use_gamma, bool use_beta, ActivationLayerInfo act_info, DataType data_type, int batches)
{
// Set batched in source and destination shapes
const unsigned int fixed_point_position = 4;
@@ -57,7 +57,9 @@ public:
gamma = create_tensor<TensorType>(param_shape, data_type, 1, fixed_point_position);
// Create and configure function
- batch_norm_layer.configure(&src, &dst, &mean, &variance, &beta, &gamma, epsilon, act_info);
+ TensorType *beta_ptr = use_beta ? &beta : nullptr;
+ TensorType *gamma_ptr = use_gamma ? &gamma : nullptr;
+ batch_norm_layer.configure(&src, &dst, &mean, &variance, beta_ptr, gamma_ptr, epsilon, act_info);
// Allocate tensors
src.allocator()->allocate();