From c73e2b8c18f00434e1c8c37a2e122e3cd34c204c Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 8 Nov 2018 13:33:16 +0000 Subject: COMPMID-1451: Fix fused activation in GEMMConvolutionLayer -Uses output quantization information for the activation layer. -Updates checks for BoundedRelu at CL side. Change-Id: I0447860e90f1c89b67b9ace3c8daad713f6c64e0 --- .../CL/functions/CLGEMMConvolutionLayer.cpp | 25 +++++++--------------- 1 file changed, 8 insertions(+), 17 deletions(-) (limited to 'src/runtime/CL') diff --git a/src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp b/src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp index 4825d878f8..4694aa7f37 100644 --- a/src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp +++ b/src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp @@ -291,10 +291,10 @@ void CLGEMMConvolutionLayer::configure(const ICLTensor *input, const ICLTensor * if(_is_activationlayer_enabled && supported_acts.count(act_info.activation()) != 0) { - const int a_const_int = input->info()->quantization_info().quantize(act_info.a(), RoundingPolicy::TO_NEAREST_UP); - const int b_const_int = input->info()->quantization_info().quantize(act_info.b(), RoundingPolicy::TO_NEAREST_UP); + const int a_const_int = output_quant_info.quantize(act_info.a(), RoundingPolicy::TO_NEAREST_UP); + const int b_const_int = output_quant_info.quantize(act_info.b(), RoundingPolicy::TO_NEAREST_UP); - min_activation = act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU ? input->info()->quantization_info().offset : b_const_int; + min_activation = act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU ? output_quant_info.offset : b_const_int; max_activation = act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU ? 255 : a_const_int; // If the activation layer is RELU, BOUNDED_RELU or LU_BOUNDED_RELU, we can use the GEMMLowp output stage to perform this operation @@ -475,20 +475,11 @@ Status CLGEMMConvolutionLayer::validate(const ITensorInfo *input, const ITensorI if(is_activationlayer_enabled && supported_acts.count(act_info.activation()) != 0) { - const int a_const_int = input->quantization_info().quantize(act_info.a(), RoundingPolicy::TO_NEAREST_UP); - const int b_const_int = input->quantization_info().quantize(act_info.b(), RoundingPolicy::TO_NEAREST_UP); - - min_activation = b_const_int; - max_activation = a_const_int; - - if(act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU) - { - min_activation = input->quantization_info().offset; - } - if(act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU) - { - max_activation = 255; - } + const int a_const_int = output_quant_info.quantize(act_info.a(), RoundingPolicy::TO_NEAREST_UP); + const int b_const_int = output_quant_info.quantize(act_info.b(), RoundingPolicy::TO_NEAREST_UP); + + min_activation = act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU ? output_quant_info.offset : b_const_int; + max_activation = act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU ? 255 : a_const_int; // If the activation layer is RELU, BOUNDED_RELU or LU_BOUNDED_RELU, we can use the GEMMLowp output stage to perform this operation is_activationlayer_enabled = false; -- cgit v1.2.1