From bca73e1c82438f160364a113793d0a2195c665ac Mon Sep 17 00:00:00 2001 From: Jan Eilers Date: Wed, 11 Mar 2020 12:52:46 +0000 Subject: IVGCVSW-4444 Adding Elu end to end test * implemented activation layer end to end test * adds support for different tolerances in layer tests * added tests for Elu (Ref, Cl, Neon) Signed-off-by: Jan Eilers Change-Id: I81e28cfb4456e815bae2fb31f5c345134ff2432f --- include/armnn/Descriptors.hpp | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) (limited to 'include/armnn') diff --git a/include/armnn/Descriptors.hpp b/include/armnn/Descriptors.hpp index f1b29cc6c7..57917261d4 100644 --- a/include/armnn/Descriptors.hpp +++ b/include/armnn/Descriptors.hpp @@ -25,15 +25,23 @@ struct ActivationDescriptor , m_B(0) {} + ActivationDescriptor(armnn::ActivationFunction activation, + float a = 0, + float b = 0) + : m_Function(activation) + , m_A(a) + , m_B(b) + {} + bool operator ==(const ActivationDescriptor &rhs) const { return m_Function == rhs.m_Function && m_A == rhs.m_B && m_B == rhs.m_B; } /// @brief The activation function to use - /// (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square). + /// (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu). ActivationFunction m_Function; - /// Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH). + /// Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu). float m_A; /// Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH). float m_B; -- cgit v1.2.1