From add8e815ea94c8f8e6b1c9faf18527695f1332ec Mon Sep 17 00:00:00 2001 From: Sang-Hoon Park Date: Wed, 25 Nov 2020 11:46:03 +0000 Subject: COMPMID-3862: Add support QASYMM8 LEAKY RELU activation - LEAKY RELU activation is supported for QASYMM8 data type - vquantize on NEON side has been modified to match with other backends (OpenCL and reference) Change-Id: I194631225c8d4f3cc96027d64812ec2be2b4328a Signed-off-by: Sang-Hoon Park Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4593 Tested-by: Arm Jenkins Reviewed-by: Georgios Pinitas Reviewed-by: Manuel Bottini Comments-Addressed: Arm Jenkins --- src/core/CL/kernels/CLActivationLayerKernel.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) (limited to 'src/core/CL') diff --git a/src/core/CL/kernels/CLActivationLayerKernel.cpp b/src/core/CL/kernels/CLActivationLayerKernel.cpp index 9f9538cb76..00b6829e91 100644 --- a/src/core/CL/kernels/CLActivationLayerKernel.cpp +++ b/src/core/CL/kernels/CLActivationLayerKernel.cpp @@ -53,14 +53,15 @@ Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output, c ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, ActivationLayerInfo::ActivationFunction::LOGISTIC, ActivationLayerInfo::ActivationFunction::TANH, - ActivationLayerInfo::ActivationFunction::HARD_SWISH + ActivationLayerInfo::ActivationFunction::HARD_SWISH, + ActivationLayerInfo::ActivationFunction::LEAKY_RELU, }; const DataType data_type = input->data_type(); const QuantizationInfo &oq_info = (output != nullptr) ? output->quantization_info() : input->quantization_info(); const ActivationLayerInfo::ActivationFunction f_act = act_info.activation(); ARM_COMPUTE_RETURN_ERROR_ON_MSG(is_data_type_quantized(data_type) && (quantized_supported_activations.count(f_act) == 0), - "For Quantized data type only tanh, logistic, relu and lower/upper bounded relu are supported"); + "For Quantized data type only hard swish, leaky relu, tanh, logistic, relu and lower/upper bounded relu are supported"); ARM_COMPUTE_RETURN_ERROR_ON(data_type == DataType::QASYMM8 && (f_act == ActivationLayerInfo::ActivationFunction::TANH) && (oq_info != QuantizationInfo(1.f / 128.f, 128))); ARM_COMPUTE_RETURN_ERROR_ON(data_type == DataType::QASYMM8 && (f_act == ActivationLayerInfo::ActivationFunction::LOGISTIC) && (oq_info != QuantizationInfo(1.f / 256.f, 0))); @@ -112,7 +113,10 @@ void CLActivationLayerKernel::configure(const CLCompileContext &compile_context, const ActivationLayerInfo::ActivationFunction f_act = act_info.activation(); const bool is_quantized = is_data_type_quantized(dt); const bool perform_activation_in_float = - (f_act == ActivationLayerInfo::ActivationFunction::LOGISTIC) || (f_act == ActivationLayerInfo::ActivationFunction::TANH) || (f_act == ActivationLayerInfo::ActivationFunction::HARD_SWISH); + (f_act == ActivationLayerInfo::ActivationFunction::LOGISTIC) + || (f_act == ActivationLayerInfo::ActivationFunction::TANH) + || (f_act == ActivationLayerInfo::ActivationFunction::HARD_SWISH) + || (f_act == ActivationLayerInfo::ActivationFunction::LEAKY_RELU); // Set build options CLBuildOptions build_opts; -- cgit v1.2.1