From 7485d5a62685cb745ab50e970adb722cb71557ac Mon Sep 17 00:00:00 2001 From: Vidhya Sudhan Loganathan Date: Wed, 4 Jul 2018 09:34:00 +0100 Subject: COMPMID-970 : Remove QS8 / QS16 support Removed fixed point related code. Change-Id: I487acf138dace3b0450e0d72ca7071eaec254566 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/137678 Tested-by: Jenkins Reviewed-by: Anthony Barbier --- src/core/NEON/kernels/NEActivationLayerKernel.cpp | 235 +--------------------- 1 file changed, 1 insertion(+), 234 deletions(-) (limited to 'src/core/NEON/kernels/NEActivationLayerKernel.cpp') diff --git a/src/core/NEON/kernels/NEActivationLayerKernel.cpp b/src/core/NEON/kernels/NEActivationLayerKernel.cpp index ec125154a4..bdc93ed1b8 100644 --- a/src/core/NEON/kernels/NEActivationLayerKernel.cpp +++ b/src/core/NEON/kernels/NEActivationLayerKernel.cpp @@ -23,7 +23,6 @@ */ #include "arm_compute/core/NEON/kernels/NEActivationLayerKernel.h" -#include "arm_compute/core/FixedPoint.h" #include "arm_compute/core/Helpers.h" #include "arm_compute/core/ITensor.h" #include "arm_compute/core/NEON/NEAsymm.h" @@ -46,14 +45,13 @@ namespace Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output) { ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input); - ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8, DataType::QS8, DataType::QS16, DataType::F16, DataType::F32); + ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8, DataType::F16, DataType::F32); // Checks performed when output is configured if((output != nullptr) && (output->total_size() != 0)) { ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_SHAPES(input, output); ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output); - ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_FIXED_POINT(input, output); } return Status{}; @@ -146,36 +144,6 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat }; #endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC*/ - // Activation functions : QS8 - static std::map act_map_qs8 = - { - { ActivationFunction::ABS, &NEActivationLayerKernel::activation }, - { ActivationFunction::LINEAR, &NEActivationLayerKernel::activation }, - { ActivationFunction::LOGISTIC, &NEActivationLayerKernel::activation }, - { ActivationFunction::RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::BOUNDED_RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::SQRT, &NEActivationLayerKernel::activation }, - { ActivationFunction::SQUARE, &NEActivationLayerKernel::activation }, - { ActivationFunction::TANH, &NEActivationLayerKernel::activation }, - }; - // Activation functions : QS16 - static std::map act_map_qs16 = - { - { ActivationFunction::ABS, &NEActivationLayerKernel::activation }, - { ActivationFunction::LINEAR, &NEActivationLayerKernel::activation }, - { ActivationFunction::LOGISTIC, &NEActivationLayerKernel::activation }, - { ActivationFunction::RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::BOUNDED_RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::LU_BOUNDED_RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::LEAKY_RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::SOFT_RELU, &NEActivationLayerKernel::activation }, - { ActivationFunction::SQRT, &NEActivationLayerKernel::activation }, - { ActivationFunction::SQUARE, &NEActivationLayerKernel::activation }, - { ActivationFunction::TANH, &NEActivationLayerKernel::activation }, - }; // Activation functions : QASYMM8 static std::map act_map_qasymm8 = { @@ -188,12 +156,6 @@ void NEActivationLayerKernel::configure(ITensor *input, ITensor *output, Activat case DataType::QASYMM8: _func = act_map_qasymm8[activation_info.activation()]; break; - case DataType::QS8: - _func = act_map_qs8[activation_info.activation()]; - break; - case DataType::QS16: - _func = act_map_qs16[activation_info.activation()]; - break; case DataType::F32: _func = act_map_f32[activation_info.activation()]; break; @@ -507,70 +469,6 @@ typename std::enable_if::value, void>::type NEActivationL input, output); } -template -typename std::enable_if::value, void>::type NEActivationLayerKernel::activation(const Window &window) -{ - Iterator input(_input, window); - Iterator output(_output, window); - const int fixed_point_position = _input->info()->fixed_point_position(); - - static const qint8x16_t CONST_0 = vdupq_n_qs8(0); - const qint8x16_t CONST_1 = vdupq_n_qs8(sqcvt_qs8_f32(1.f, fixed_point_position)); - const qint8x16_t a = vdupq_n_qs8(sqcvt_qs8_f32(_act_info.a(), fixed_point_position)); - const qint8x16_t b = vdupq_n_qs8(sqcvt_qs8_f32(_act_info.b(), fixed_point_position)); - - execute_window_loop(window, [&](const Coordinates & id) - { - const auto input_ptr = reinterpret_cast(input.ptr()); - const auto output_ptr = reinterpret_cast(output.ptr()); - - const qint8x16_t in = vld1q_qs8(input_ptr); - qint8x16_t tmp = {}; - - switch(F) - { - case ActivationFunction::ABS: - tmp = vqabsq_qs8(in); - break; - case ActivationFunction::LINEAR: - tmp = vqmlaq_qs8(b, a, in, fixed_point_position); - break; - case ActivationFunction::LOGISTIC: - tmp = vqrecipq_qs8(vqaddq_qs8(CONST_1, vqexpq_qs8(vnegq_s8(in), fixed_point_position)), fixed_point_position); - break; - case ActivationFunction::RELU: - tmp = vmaxq_qs8(CONST_0, in); - break; - case ActivationFunction::BOUNDED_RELU: - tmp = vminq_qs8(a, vmaxq_qs8(CONST_0, in)); - break; - case ActivationFunction::LU_BOUNDED_RELU: - tmp = vminq_qs8(a, vmaxq_qs8(b, in)); - break; - case ActivationFunction::LEAKY_RELU: - tmp = vbslq_s8(vcgtq_s8(in, CONST_0), in, vmulq_qs8(a, in, fixed_point_position)); - break; - case ActivationFunction::SOFT_RELU: - tmp = vlogq_qs8(vqaddq_qs8(CONST_1, vqexpq_qs8(in, fixed_point_position)), fixed_point_position); - break; - case ActivationFunction::SQRT: - tmp = vqrecipq_qs8(vqinvsqrtq_qs8(in, fixed_point_position), fixed_point_position); - break; - case ActivationFunction::SQUARE: - tmp = vqmulq_qs8(in, in, fixed_point_position); - break; - case ActivationFunction::TANH: - tmp = vqmulq_qs8(a, vqtanhq_qs8(vqmulq_qs8(b, in, fixed_point_position), fixed_point_position), fixed_point_position); - break; - default: - break; - } - - vst1q_qs8(output_ptr, tmp); - }, - input, output); -} - template typename std::enable_if::value, void>::type NEActivationLayerKernel::activation(const Window &window) { @@ -620,137 +518,6 @@ typename std::enable_if::value, void>::type NEActivat input, output); } -template -typename std::enable_if::value, void>::type NEActivationLayerKernel::activation(const Window &window) -{ - Iterator input(_input, window); - Iterator output(_output, window); - const int fixed_point_position = _input->info()->fixed_point_position(); - - static const qint16x8_t CONST_0 = vdupq_n_qs16(0); - const qint16x8_t CONST_1 = vdupq_n_qs16(sqcvt_qs16_f32(1.f, fixed_point_position)); - const qint16x8_t a = vdupq_n_qs16(sqcvt_qs16_f32(_act_info.a(), fixed_point_position)); - const qint16x8_t b = vdupq_n_qs16(sqcvt_qs16_f32(_act_info.b(), fixed_point_position)); - - execute_window_loop(window, [&](const Coordinates & id) - { - const auto input_ptr = reinterpret_cast(input.ptr()); - const auto output_ptr = reinterpret_cast(output.ptr()); - - const qint16x8x2_t in = vld2q_s16(input_ptr); - qint16x8x2_t tmp = { {} }; - - switch(F) - { - case ActivationFunction::ABS: - tmp = - { - { - vqabsq_qs16(in.val[0]), - vqabsq_qs16(in.val[1]), - } - }; - break; - case ActivationFunction::LINEAR: - tmp = - { - { - vqmlaq_qs16(b, a, in.val[0], fixed_point_position), - vqmlaq_qs16(b, a, in.val[1], fixed_point_position), - } - }; - break; - case ActivationFunction::LOGISTIC: - tmp = - { - { - vqrecipq_qs16(vqaddq_qs16(CONST_1, vqexpq_qs16(vnegq_s16(in.val[0]), fixed_point_position)), fixed_point_position), - vqrecipq_qs16(vqaddq_qs16(CONST_1, vqexpq_qs16(vnegq_s16(in.val[1]), fixed_point_position)), fixed_point_position), - } - }; - break; - case ActivationFunction::RELU: - tmp = - { - { - vmaxq_qs16(CONST_0, in.val[0]), - vmaxq_qs16(CONST_0, in.val[1]), - } - }; - break; - case ActivationFunction::BOUNDED_RELU: - tmp = - { - { - vminq_qs16(a, vmaxq_qs16(CONST_0, in.val[0])), - vminq_qs16(a, vmaxq_qs16(CONST_0, in.val[1])), - } - }; - break; - case ActivationFunction::LU_BOUNDED_RELU: - tmp = - { - { - vminq_qs16(a, vmaxq_qs16(b, in.val[0])), - vminq_qs16(a, vmaxq_qs16(b, in.val[1])), - } - }; - break; - case ActivationFunction::LEAKY_RELU: - tmp = - { - { - vbslq_s16(vcgtq_s16(in.val[0], CONST_0), in.val[0], vmulq_qs16(a, in.val[0], fixed_point_position)), - vbslq_s16(vcgtq_s16(in.val[1], CONST_0), in.val[1], vmulq_qs16(a, in.val[1], fixed_point_position)), - } - }; - break; - case ActivationFunction::SOFT_RELU: - tmp = - { - { - vlogq_qs16(vqaddq_qs16(CONST_1, vqexpq_qs16(in.val[0], fixed_point_position)), fixed_point_position), - vlogq_qs16(vqaddq_qs16(CONST_1, vqexpq_qs16(in.val[1], fixed_point_position)), fixed_point_position), - } - }; - break; - case ActivationFunction::SQRT: - tmp = - { - { - vqrecipq_qs16(vqinvsqrtq_qs16(in.val[0], fixed_point_position), fixed_point_position), - vqrecipq_qs16(vqinvsqrtq_qs16(in.val[1], fixed_point_position), fixed_point_position), - } - }; - break; - case ActivationFunction::SQUARE: - tmp = - { - { - vqmulq_qs16(in.val[0], in.val[0], fixed_point_position), - vqmulq_qs16(in.val[1], in.val[1], fixed_point_position), - } - }; - break; - case ActivationFunction::TANH: - tmp = - { - { - vqmulq_qs16(a, vqtanhq_qs16(vqmulq_qs16(b, in.val[0], fixed_point_position), fixed_point_position), fixed_point_position), - vqmulq_qs16(a, vqtanhq_qs16(vqmulq_qs16(b, in.val[1], fixed_point_position), fixed_point_position), fixed_point_position), - } - }; - break; - default: - ARM_COMPUTE_ERROR("Function not implemented"); - break; - } - - vst2q_qs16(output_ptr, tmp); - }, - input, output); -} - Status NEActivationLayerKernel::validate(const ITensorInfo *input, const ITensorInfo *output, const ActivationLayerInfo &act_info) { ARM_COMPUTE_UNUSED(act_info); -- cgit v1.2.1