diff options
author | Sang-Hoon Park <sang-hoon.park@arm.com> | 2020-11-25 11:46:03 +0000 |
---|---|---|
committer | Sang-Hoon Park <sang-hoon.park@arm.com> | 2020-12-02 10:18:46 +0000 |
commit | add8e815ea94c8f8e6b1c9faf18527695f1332ec (patch) | |
tree | 5941064344b426d12bc76b2fba3d0c631e796088 /src/core/CL | |
parent | 4ffc42afafc8e6eee9917ac27b4bc510973335bf (diff) | |
download | ComputeLibrary-add8e815ea94c8f8e6b1c9faf18527695f1332ec.tar.gz |
COMPMID-3862: Add support QASYMM8 LEAKY RELU activation
- LEAKY RELU activation is supported for QASYMM8 data type
- vquantize on NEON side has been modified to match with
other backends (OpenCL and reference)
Change-Id: I194631225c8d4f3cc96027d64812ec2be2b4328a
Signed-off-by: Sang-Hoon Park <sang-hoon.park@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4593
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Reviewed-by: Manuel Bottini <manuel.bottini@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'src/core/CL')
-rw-r--r-- | src/core/CL/kernels/CLActivationLayerKernel.cpp | 10 |
1 files changed, 7 insertions, 3 deletions
diff --git a/src/core/CL/kernels/CLActivationLayerKernel.cpp b/src/core/CL/kernels/CLActivationLayerKernel.cpp index 9f9538cb76..00b6829e91 100644 --- a/src/core/CL/kernels/CLActivationLayerKernel.cpp +++ b/src/core/CL/kernels/CLActivationLayerKernel.cpp @@ -53,14 +53,15 @@ Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output, c ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, ActivationLayerInfo::ActivationFunction::LOGISTIC, ActivationLayerInfo::ActivationFunction::TANH, - ActivationLayerInfo::ActivationFunction::HARD_SWISH + ActivationLayerInfo::ActivationFunction::HARD_SWISH, + ActivationLayerInfo::ActivationFunction::LEAKY_RELU, }; const DataType data_type = input->data_type(); const QuantizationInfo &oq_info = (output != nullptr) ? output->quantization_info() : input->quantization_info(); const ActivationLayerInfo::ActivationFunction f_act = act_info.activation(); ARM_COMPUTE_RETURN_ERROR_ON_MSG(is_data_type_quantized(data_type) && (quantized_supported_activations.count(f_act) == 0), - "For Quantized data type only tanh, logistic, relu and lower/upper bounded relu are supported"); + "For Quantized data type only hard swish, leaky relu, tanh, logistic, relu and lower/upper bounded relu are supported"); ARM_COMPUTE_RETURN_ERROR_ON(data_type == DataType::QASYMM8 && (f_act == ActivationLayerInfo::ActivationFunction::TANH) && (oq_info != QuantizationInfo(1.f / 128.f, 128))); ARM_COMPUTE_RETURN_ERROR_ON(data_type == DataType::QASYMM8 && (f_act == ActivationLayerInfo::ActivationFunction::LOGISTIC) && (oq_info != QuantizationInfo(1.f / 256.f, 0))); @@ -112,7 +113,10 @@ void CLActivationLayerKernel::configure(const CLCompileContext &compile_context, const ActivationLayerInfo::ActivationFunction f_act = act_info.activation(); const bool is_quantized = is_data_type_quantized(dt); const bool perform_activation_in_float = - (f_act == ActivationLayerInfo::ActivationFunction::LOGISTIC) || (f_act == ActivationLayerInfo::ActivationFunction::TANH) || (f_act == ActivationLayerInfo::ActivationFunction::HARD_SWISH); + (f_act == ActivationLayerInfo::ActivationFunction::LOGISTIC) + || (f_act == ActivationLayerInfo::ActivationFunction::TANH) + || (f_act == ActivationLayerInfo::ActivationFunction::HARD_SWISH) + || (f_act == ActivationLayerInfo::ActivationFunction::LEAKY_RELU); // Set build options CLBuildOptions build_opts; |