From 1167487ea8e54a76d0a3625e0aa84e2ad9ffd317 Mon Sep 17 00:00:00 2001 From: Giorgio Arena Date: Wed, 7 Feb 2018 15:38:12 +0000 Subject: COMPMID-897 Merge batch normalization with bounded relu Change-Id: I9a607fe620f795cdea1a99fdd3f5f8c2fc76f980 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/119234 Tested-by: Jenkins Reviewed-by: Gian Marco Iodice Reviewed-by: Georgios Pinitas --- .../reference/BatchNormalizationLayer.cpp | 24 +++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) (limited to 'tests/validation/reference/BatchNormalizationLayer.cpp') diff --git a/tests/validation/reference/BatchNormalizationLayer.cpp b/tests/validation/reference/BatchNormalizationLayer.cpp index e4446d1694..a9d9f0320d 100644 --- a/tests/validation/reference/BatchNormalizationLayer.cpp +++ b/tests/validation/reference/BatchNormalizationLayer.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017 ARM Limited. + * Copyright (c) 2017-2018 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -23,6 +23,8 @@ */ #include "BatchNormalizationLayer.h" +#include "ActivationLayer.h" + #include "tests/validation/FixedPoint.h" #include "tests/validation/Helpers.h" @@ -37,8 +39,9 @@ namespace reference // Batch Normalization Layer for fixed point type template ::value, int>::type *> SimpleTensor batch_normalization_layer(const SimpleTensor &src, const SimpleTensor &mean, const SimpleTensor &var, const SimpleTensor &beta, const SimpleTensor &gamma, float epsilon, - int fixed_point_position) + ActivationLayerInfo act_info, int fixed_point_position) { + ARM_COMPUTE_UNUSED(act_info); SimpleTensor result(src.shape(), src.data_type()); const auto cols = static_cast(src.shape()[0]); @@ -79,7 +82,7 @@ SimpleTensor batch_normalization_layer(const SimpleTensor &src, const Simp // Batch Normalization Layer for floating point type template ::value, int>::type *> SimpleTensor batch_normalization_layer(const SimpleTensor &src, const SimpleTensor &mean, const SimpleTensor &var, const SimpleTensor &beta, const SimpleTensor &gamma, float epsilon, - int fixed_point_position) + ActivationLayerInfo act_info, int fixed_point_position) { ARM_COMPUTE_UNUSED(fixed_point_position); @@ -103,21 +106,28 @@ SimpleTensor batch_normalization_layer(const SimpleTensor &src, const Simp const float numerator = src[pos] - mean[i]; const float x_bar = numerator / denominator; result[pos] = beta[i] + x_bar * gamma[i]; + ; } } } } + + if(act_info.enabled()) + { + result = activation_layer(result, act_info); + } + return result; } template SimpleTensor batch_normalization_layer(const SimpleTensor &src, const SimpleTensor &mean, const SimpleTensor &var, const SimpleTensor &beta, - const SimpleTensor &gamma, float epsilon, int fixed_point_position); + const SimpleTensor &gamma, float epsilon, ActivationLayerInfo act_info, int fixed_point_position); template SimpleTensor batch_normalization_layer(const SimpleTensor &src, const SimpleTensor &mean, const SimpleTensor &var, const SimpleTensor &beta, - const SimpleTensor &gamma, float epsilon, int fixed_point_position); + const SimpleTensor &gamma, float epsilon, ActivationLayerInfo act_info, int fixed_point_position); template SimpleTensor batch_normalization_layer(const SimpleTensor &src, const SimpleTensor &mean, const SimpleTensor &var, const SimpleTensor &beta, - const SimpleTensor &gamma, float epsilon, int fixed_point_position); + const SimpleTensor &gamma, float epsilon, ActivationLayerInfo act_info, int fixed_point_position); template SimpleTensor batch_normalization_layer(const SimpleTensor &src, const SimpleTensor &mean, const SimpleTensor &var, const SimpleTensor &beta, - const SimpleTensor &gamma, float epsilon, int fixed_point_position); + const SimpleTensor &gamma, float epsilon, ActivationLayerInfo act_info, int fixed_point_position); } // namespace reference } // namespace validation -- cgit v1.2.1