From fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 22 Aug 2019 17:10:04 +0100 Subject: COMPMID-2460: Add ELU activation on NEON,CL,GLES Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Comments-Addressed: Arm Jenkins --- tests/validation/NEON/ActivationLayer.cpp | 1 + 1 file changed, 1 insertion(+) (limited to 'tests/validation/NEON/ActivationLayer.cpp') diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp index 1174a055c7..eb3a37fba7 100644 --- a/tests/validation/NEON/ActivationLayer.cpp +++ b/tests/validation/NEON/ActivationLayer.cpp @@ -56,6 +56,7 @@ RelativeTolerance relative_tolerance(DataType data_type, ActivationLayerI { case ActivationLayerInfo::ActivationFunction::LOGISTIC: case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: case ActivationLayerInfo::ActivationFunction::TANH: switch(data_type) -- cgit v1.2.1