From 1167487ea8e54a76d0a3625e0aa84e2ad9ffd317 Mon Sep 17 00:00:00 2001 From: Giorgio Arena Date: Wed, 7 Feb 2018 15:38:12 +0000 Subject: COMPMID-897 Merge batch normalization with bounded relu Change-Id: I9a607fe620f795cdea1a99fdd3f5f8c2fc76f980 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/119234 Tested-by: Jenkins Reviewed-by: Gian Marco Iodice Reviewed-by: Georgios Pinitas --- arm_compute/core/Types.h | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) (limited to 'arm_compute/core/Types.h') diff --git a/arm_compute/core/Types.h b/arm_compute/core/Types.h index 5a08ac9153..3affe7e8ec 100644 --- a/arm_compute/core/Types.h +++ b/arm_compute/core/Types.h @@ -713,6 +713,7 @@ public: LINEAR /**< Linear ( \f$ f(x)= ax + b \f$ ) */ }; + ActivationLayerInfo() = default; /** Default Constructor * * @param[in] f The activation function to use. @@ -721,7 +722,7 @@ public: * @param[in] b (Optional) The beta parameter used by some activation functions (@ref ActivationFunction::LINEAR, @ref ActivationFunction::LU_BOUNDED_RELU, @ref ActivationFunction::TANH). */ ActivationLayerInfo(ActivationFunction f, float a = 0.0f, float b = 0.0f) - : _act(f), _a(a), _b(b) + : _act(f), _a(a), _b(b), _enabled(true) { } ActivationFunction activation() const @@ -736,11 +737,16 @@ public: { return _b; } + bool enabled() const + { + return _enabled; + } private: - ActivationFunction _act; - float _a; - float _b; + ActivationFunction _act = { ActivationLayerInfo::ActivationFunction::LOGISTIC }; + float _a = {}; + float _b = {}; + bool _enabled = { false }; }; /** Normalization Layer Information class */ -- cgit v1.2.1