aboutsummaryrefslogtreecommitdiff
path: root/arm_compute/core/Types.h
diff options
context:
space:
mode:
authorGeorgios Pinitas <georgios.pinitas@arm.com>2017-09-01 17:44:24 +0100
committerAnthony Barbier <anthony.barbier@arm.com>2018-11-02 16:35:24 +0000
commit64ebe5b392b8135ec939b63596ffb8567a3e3248 (patch)
tree9291ce93dd474eee8d2d59b7b391e62b32e56cde /arm_compute/core/Types.h
parenta09de0c8b2ed0f1481502d3b023375609362d9e3 (diff)
downloadComputeLibrary-64ebe5b392b8135ec939b63596ffb8567a3e3248.tar.gz
COMPMID-519: Add support for Lower and Upper Bounded RELU for CL/NEON
Change-Id: I7b16216ac59c899a33942bf17757b54535256d7a Reviewed-on: http://mpd-gerrit.cambridge.arm.com/86172 Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com> Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Diffstat (limited to 'arm_compute/core/Types.h')
-rw-r--r--arm_compute/core/Types.h25
1 files changed, 13 insertions, 12 deletions
diff --git a/arm_compute/core/Types.h b/arm_compute/core/Types.h
index 5eaaee6b7b..b90798e5ff 100644
--- a/arm_compute/core/Types.h
+++ b/arm_compute/core/Types.h
@@ -509,24 +509,25 @@ public:
/** Available activation functions */
enum class ActivationFunction
{
- LOGISTIC, /**< Logistic */
- TANH, /**< Hyperbolic tangent */
- RELU, /**< Rectifier */
- BOUNDED_RELU, /**< Bounded Rectifier */
- LEAKY_RELU, /**< Leaky Rectifier */
- SOFT_RELU, /**< Soft Rectifier */
- ABS, /**< Absolute */
- SQUARE, /**< Square */
- SQRT, /**< Square root */
- LINEAR /**< Linear */
+ LOGISTIC, /**< Logistic ( \f$ f(x) = \frac{1}{1 + e^{-x}} \f$ ) */
+ TANH, /**< Hyperbolic tangent ( \f$ f(x) = a \cdot tanh(b \cdot x) \f$ ) */
+ RELU, /**< Rectifier ( \f$ f(x) = max(0,x) \f$ ) */
+ BOUNDED_RELU, /**< Upper Bounded Rectifier ( \f$ f(x) = min(a, max(0,x)) \f$ ) */
+ LU_BOUNDED_RELU, /**< Lower and Upper Bounded Rectifier ( \f$ f(x) = min(a, max(b,x)) \f$ ) */
+ LEAKY_RELU, /**< Leaky Rectifier ( \f$ f(x)= log(1+e^x) \f$ ) */
+ SOFT_RELU, /**< Soft Rectifier ( \f$ f(x)= log(1+e^x) \f$ ) */
+ ABS, /**< Absolute ( \f$ f(x)= |x| \f$ ) */
+ SQUARE, /**< Square ( \f$ f(x)= x^2 \f$ )*/
+ SQRT, /**< Square root ( \f$ f(x) = \sqrt{x} \f$ )*/
+ LINEAR /**< Linear ( \f$ f(x)= ax + b \f$ ) */
};
/** Default Constructor
*
* @param[in] f The activation function to use.
* @param[in] a (Optional) The alpha parameter used by some activation functions
- * (@ref ActivationFunction::BOUNDED_RELU, @ref ActivationFunction::LINEAR, @ref ActivationFunction::TANH).
- * @param[in] b (Optional) The beta parameter used by some activation functions (@ref ActivationFunction::LINEAR, @ref ActivationFunction::TANH).
+ * (@ref ActivationFunction::BOUNDED_RELU, @ref ActivationFunction::LU_BOUNDED_RELU, @ref ActivationFunction::LINEAR, @ref ActivationFunction::TANH).
+ * @param[in] b (Optional) The beta parameter used by some activation functions (@ref ActivationFunction::LINEAR, @ref ActivationFunction::LU_BOUNDED_RELU, @ref ActivationFunction::TANH).
*/
ActivationLayerInfo(ActivationFunction f, float a = 0.0f, float b = 0.0f)
: _act(f), _a(a), _b(b)