diff options
author | Georgios Pinitas <georgios.pinitas@arm.com> | 2018-11-08 13:33:16 +0000 |
---|---|---|
committer | Georgios Pinitas <georgios.pinitas@arm.com> | 2018-11-08 16:32:41 +0000 |
commit | c73e2b8c18f00434e1c8c37a2e122e3cd34c204c (patch) | |
tree | 8fac9d6db0c7690c516da3f9e39fdc7139fa3f49 /src/runtime/CL/functions | |
parent | 5e25b12bf292c1b9301a4a54cdca5d3c657a5d3a (diff) | |
download | ComputeLibrary-c73e2b8c18f00434e1c8c37a2e122e3cd34c204c.tar.gz |
COMPMID-1451: Fix fused activation in GEMMConvolutionLayer
-Uses output quantization information for the activation layer.
-Updates checks for BoundedRelu at CL side.
Change-Id: I0447860e90f1c89b67b9ace3c8daad713f6c64e0
Diffstat (limited to 'src/runtime/CL/functions')
-rw-r--r-- | src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp | 25 |
1 files changed, 8 insertions, 17 deletions
diff --git a/src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp b/src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp index 4825d878f8..4694aa7f37 100644 --- a/src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp +++ b/src/runtime/CL/functions/CLGEMMConvolutionLayer.cpp @@ -291,10 +291,10 @@ void CLGEMMConvolutionLayer::configure(const ICLTensor *input, const ICLTensor * if(_is_activationlayer_enabled && supported_acts.count(act_info.activation()) != 0) { - const int a_const_int = input->info()->quantization_info().quantize(act_info.a(), RoundingPolicy::TO_NEAREST_UP); - const int b_const_int = input->info()->quantization_info().quantize(act_info.b(), RoundingPolicy::TO_NEAREST_UP); + const int a_const_int = output_quant_info.quantize(act_info.a(), RoundingPolicy::TO_NEAREST_UP); + const int b_const_int = output_quant_info.quantize(act_info.b(), RoundingPolicy::TO_NEAREST_UP); - min_activation = act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU ? input->info()->quantization_info().offset : b_const_int; + min_activation = act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU ? output_quant_info.offset : b_const_int; max_activation = act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU ? 255 : a_const_int; // If the activation layer is RELU, BOUNDED_RELU or LU_BOUNDED_RELU, we can use the GEMMLowp output stage to perform this operation @@ -475,20 +475,11 @@ Status CLGEMMConvolutionLayer::validate(const ITensorInfo *input, const ITensorI if(is_activationlayer_enabled && supported_acts.count(act_info.activation()) != 0) { - const int a_const_int = input->quantization_info().quantize(act_info.a(), RoundingPolicy::TO_NEAREST_UP); - const int b_const_int = input->quantization_info().quantize(act_info.b(), RoundingPolicy::TO_NEAREST_UP); - - min_activation = b_const_int; - max_activation = a_const_int; - - if(act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU) - { - min_activation = input->quantization_info().offset; - } - if(act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU) - { - max_activation = 255; - } + const int a_const_int = output_quant_info.quantize(act_info.a(), RoundingPolicy::TO_NEAREST_UP); + const int b_const_int = output_quant_info.quantize(act_info.b(), RoundingPolicy::TO_NEAREST_UP); + + min_activation = act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU ? output_quant_info.offset : b_const_int; + max_activation = act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU ? 255 : a_const_int; // If the activation layer is RELU, BOUNDED_RELU or LU_BOUNDED_RELU, we can use the GEMMLowp output stage to perform this operation is_activationlayer_enabled = false; |