diff options
author | Michele Di Giorgio <michele.digiorgio@arm.com> | 2018-01-22 17:26:36 +0000 |
---|---|---|
committer | Anthony Barbier <anthony.barbier@arm.com> | 2018-11-02 16:43:42 +0000 |
commit | a1f7e33e10e14e7b44913d3dbb794bbd9588a1ad (patch) | |
tree | 2f4a66faa20c2fa650dafb957f3a7c6c0c80cbe5 /src | |
parent | 9efceafe007cddb52c8555f6fe847c81e7ad0cba (diff) | |
download | ComputeLibrary-a1f7e33e10e14e7b44913d3dbb794bbd9588a1ad.tar.gz |
COMPMID-841: Add CL QASYMM8 RELU Activation
Change-Id: I8e0b7cad2f977942224d0116e8498bf9b2d6014d
Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/117229
Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Tested-by: Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'src')
-rw-r--r-- | src/core/CL/cl_kernels/activation_layer_qa8.cl | 8 | ||||
-rw-r--r-- | src/core/CL/kernels/CLActivationLayerKernel.cpp | 9 |
2 files changed, 14 insertions, 3 deletions
diff --git a/src/core/CL/cl_kernels/activation_layer_qa8.cl b/src/core/CL/cl_kernels/activation_layer_qa8.cl index 02668f7ac1..cb31e99efb 100644 --- a/src/core/CL/cl_kernels/activation_layer_qa8.cl +++ b/src/core/CL/cl_kernels/activation_layer_qa8.cl @@ -25,10 +25,15 @@ #define TYPE VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE) +// RELU Activation +inline TYPE relu_op(TYPE x) +{ + return max((TYPE)CONST_0, x); +} // Bounded RELU Activation inline TYPE brelu_op(TYPE x) { - return min((TYPE)A_VAL, max(0, x)); + return min((TYPE)A_VAL, max(CONST_0, x)); } // Lower Upper Bounded RELU Activation inline TYPE lu_brelu_op(TYPE x) @@ -49,6 +54,7 @@ inline TYPE lu_brelu_op(TYPE x) * @note A, B variables required by some activation functions are set using -DA_VAL= and -DB_VAL= respectively. * @note Quantization scales of the input/output tensors are passed in with -DS1_VAL= and -DS2_VAL= respectively. * @note Quantization offsets of the input/output tensors are passed in with -DO1_VAL= and -DO2_VAL= respectively. + * @note Quantized value of constant zero should be given as a preprocessor argument using -DCONST_0=value. e.g. -DCONST_0=128. * * @param[in] input_ptr Pointer to the source image. Supported data types: QASYMM8 * @param[in] input_stride_x Stride of the source image in X dimension (in bytes) diff --git a/src/core/CL/kernels/CLActivationLayerKernel.cpp b/src/core/CL/kernels/CLActivationLayerKernel.cpp index d85de88ae2..8d4c0b82d2 100644 --- a/src/core/CL/kernels/CLActivationLayerKernel.cpp +++ b/src/core/CL/kernels/CLActivationLayerKernel.cpp @@ -47,8 +47,10 @@ namespace Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output, const ActivationLayerInfo &act_info) { ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8, DataType::QS8, DataType::QS16, DataType::F16, DataType::F32); - ARM_COMPUTE_RETURN_ERROR_ON_MSG((input->data_type() == DataType::QASYMM8) && (act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU), - "For QASYMM8 only lower/upper bounded relu is supported"); + ARM_COMPUTE_RETURN_ERROR_ON_MSG((input->data_type() == DataType::QASYMM8) && (act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU) + && (act_info.activation() != ActivationLayerInfo::ActivationFunction::BOUNDED_RELU) + && (act_info.activation() != ActivationLayerInfo::ActivationFunction::RELU), + "For QASYMM8 only relu, lower bounded relu and lower-upper bounded relu are supported"); // Checks performed when output is configured if((output != nullptr) && (output->total_size() != 0)) @@ -160,6 +162,9 @@ void CLActivationLayerKernel::configure(ICLTensor *input, ICLTensor *output, Act build_opts.emplace(("-DO1_VAL=" + support::cpp11::to_string(o1))); build_opts.emplace(("-DO2_VAL=" + support::cpp11::to_string(o2))); } + + // Quantized value of 0 corresponds to the offset o1 + build_opts.emplace(("-DCONST_0=" + support::cpp11::to_string(o1))); } } else |