From 64ebe5b392b8135ec939b63596ffb8567a3e3248 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Fri, 1 Sep 2017 17:44:24 +0100 Subject: COMPMID-519: Add support for Lower and Upper Bounded RELU for CL/NEON Change-Id: I7b16216ac59c899a33942bf17757b54535256d7a Reviewed-on: http://mpd-gerrit.cambridge.arm.com/86172 Tested-by: Kaizen Reviewed-by: Anthony Barbier --- arm_compute/core/Types.h | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) (limited to 'arm_compute/core') diff --git a/arm_compute/core/Types.h b/arm_compute/core/Types.h index 5eaaee6b7b..b90798e5ff 100644 --- a/arm_compute/core/Types.h +++ b/arm_compute/core/Types.h @@ -509,24 +509,25 @@ public: /** Available activation functions */ enum class ActivationFunction { - LOGISTIC, /**< Logistic */ - TANH, /**< Hyperbolic tangent */ - RELU, /**< Rectifier */ - BOUNDED_RELU, /**< Bounded Rectifier */ - LEAKY_RELU, /**< Leaky Rectifier */ - SOFT_RELU, /**< Soft Rectifier */ - ABS, /**< Absolute */ - SQUARE, /**< Square */ - SQRT, /**< Square root */ - LINEAR /**< Linear */ + LOGISTIC, /**< Logistic ( \f$ f(x) = \frac{1}{1 + e^{-x}} \f$ ) */ + TANH, /**< Hyperbolic tangent ( \f$ f(x) = a \cdot tanh(b \cdot x) \f$ ) */ + RELU, /**< Rectifier ( \f$ f(x) = max(0,x) \f$ ) */ + BOUNDED_RELU, /**< Upper Bounded Rectifier ( \f$ f(x) = min(a, max(0,x)) \f$ ) */ + LU_BOUNDED_RELU, /**< Lower and Upper Bounded Rectifier ( \f$ f(x) = min(a, max(b,x)) \f$ ) */ + LEAKY_RELU, /**< Leaky Rectifier ( \f$ f(x)= log(1+e^x) \f$ ) */ + SOFT_RELU, /**< Soft Rectifier ( \f$ f(x)= log(1+e^x) \f$ ) */ + ABS, /**< Absolute ( \f$ f(x)= |x| \f$ ) */ + SQUARE, /**< Square ( \f$ f(x)= x^2 \f$ )*/ + SQRT, /**< Square root ( \f$ f(x) = \sqrt{x} \f$ )*/ + LINEAR /**< Linear ( \f$ f(x)= ax + b \f$ ) */ }; /** Default Constructor * * @param[in] f The activation function to use. * @param[in] a (Optional) The alpha parameter used by some activation functions - * (@ref ActivationFunction::BOUNDED_RELU, @ref ActivationFunction::LINEAR, @ref ActivationFunction::TANH). - * @param[in] b (Optional) The beta parameter used by some activation functions (@ref ActivationFunction::LINEAR, @ref ActivationFunction::TANH). + * (@ref ActivationFunction::BOUNDED_RELU, @ref ActivationFunction::LU_BOUNDED_RELU, @ref ActivationFunction::LINEAR, @ref ActivationFunction::TANH). + * @param[in] b (Optional) The beta parameter used by some activation functions (@ref ActivationFunction::LINEAR, @ref ActivationFunction::LU_BOUNDED_RELU, @ref ActivationFunction::TANH). */ ActivationLayerInfo(ActivationFunction f, float a = 0.0f, float b = 0.0f) : _act(f), _a(a), _b(b) -- cgit v1.2.1