From add8e815ea94c8f8e6b1c9faf18527695f1332ec Mon Sep 17 00:00:00 2001 From: Sang-Hoon Park Date: Wed, 25 Nov 2020 11:46:03 +0000 Subject: COMPMID-3862: Add support QASYMM8 LEAKY RELU activation - LEAKY RELU activation is supported for QASYMM8 data type - vquantize on NEON side has been modified to match with other backends (OpenCL and reference) Change-Id: I194631225c8d4f3cc96027d64812ec2be2b4328a Signed-off-by: Sang-Hoon Park Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4593 Tested-by: Arm Jenkins Reviewed-by: Georgios Pinitas Reviewed-by: Manuel Bottini Comments-Addressed: Arm Jenkins --- src/core/NEON/kernels/NEActivationLayerKernel.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'src/core/NEON/kernels/NEActivationLayerKernel.cpp') diff --git a/src/core/NEON/kernels/NEActivationLayerKernel.cpp b/src/core/NEON/kernels/NEActivationLayerKernel.cpp index 51257cb96b..d969fd8e38 100644 --- a/src/core/NEON/kernels/NEActivationLayerKernel.cpp +++ b/src/core/NEON/kernels/NEActivationLayerKernel.cpp @@ -110,7 +110,8 @@ Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output, c ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, ActivationLayerInfo::ActivationFunction::LOGISTIC, ActivationLayerInfo::ActivationFunction::TANH, - ActivationLayerInfo::ActivationFunction::HARD_SWISH + ActivationLayerInfo::ActivationFunction::HARD_SWISH, + ActivationLayerInfo::ActivationFunction::LEAKY_RELU, }; const static std::set qsymm16_supported_activations = { @@ -123,7 +124,7 @@ Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output, c const ActivationLayerInfo::ActivationFunction f_act = activation_info.activation(); ARM_COMPUTE_RETURN_ERROR_ON_MSG(is_data_type_quantized_asymmetric(data_type) && (qasymm8_supported_activations.count(f_act) == 0), - "For QASYMM8 only tanh, logistic, relu and lower/upper bounded relu are supported"); + "For QASYMM8 only hard swish, leaky relu, tanh, logistic, relu and lower/upper bounded relu are supported"); ARM_COMPUTE_RETURN_ERROR_ON_MSG(is_data_type_quantized_symmetric(data_type) && (qsymm16_supported_activations.count(f_act) == 0), "For QSYMM16 only tanh and logistic are supported"); -- cgit v1.2.1