From fb0fdcdaec57e6f8e1b96f924411921cc0ba6d94 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 22 Aug 2019 17:10:04 +0100 Subject: COMPMID-2460: Add ELU activation on NEON,CL,GLES Change-Id: I641b12fcfc5dc4a18d2d489c38673b937125dfb9 Signed-off-by: Georgios Pinitas Reviewed-on: https://review.mlplatform.org/c/1796 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Comments-Addressed: Arm Jenkins --- tests/validation/GLES_COMPUTE/ActivationLayer.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'tests/validation/GLES_COMPUTE/ActivationLayer.cpp') diff --git a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp index 7676b858f6..fdb9d1823e 100644 --- a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp +++ b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018 ARM Limited. + * Copyright (c) 2017-2019 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -65,6 +65,7 @@ AbsoluteTolerance tolerance(ActivationLayerInfo::ActivationFunction activ case ActivationLayerInfo::ActivationFunction::LEAKY_RELU: return AbsoluteTolerance(data_type == DataType::F16 ? 0.00001f : epsilon); case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: return AbsoluteTolerance(data_type == DataType::F16 ? 0.01f : 0.00001f); case ActivationLayerInfo::ActivationFunction::TANH: -- cgit v1.2.1