From 066017249985d3ade9c1788721943c85077ef984 Mon Sep 17 00:00:00 2001 From: Gunes Bayir Date: Fri, 8 Dec 2023 17:11:48 +0000 Subject: Fix validation error in graph_ssd_mobilenet The graph example has fixed quantization information given for certain layers, and some of the offsets exceed the 8-bit range for Int8 data type. This shouldn't have been the case and the offsets should respect the 8-bit quantization specification laid out here: https://www.tensorflow.org/lite/performance/quantization_spec However, the mechanism added in the helper function introduces robustness in case of such irregularities with little/no cost; and therefore added as a fix. Resolves: COMPMID-6748 Change-Id: If39bf323382f109fa100ee2b87ce63cc7bc89759 Signed-off-by: Gunes Bayir Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/10858 Reviewed-by: SiCong Li Comments-Addressed: Arm Jenkins Benchmark: Arm Jenkins Tested-by: Arm Jenkins --- src/core/Utils.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'src/core') diff --git a/src/core/Utils.cpp b/src/core/Utils.cpp index 90a7ac32c0..532d08de92 100644 --- a/src/core/Utils.cpp +++ b/src/core/Utils.cpp @@ -450,8 +450,9 @@ std::pair get_quantized_activation_min_max(const ActivationLay const int b_int = is_qasymm8_signed ? quantize_qasymm8_signed(b, oq_info) : quantize_qasymm8(b, oq_info); const auto type_max_value = std::get<1>(get_min_max(data_type)).get(); - const int32_t min_activation = - act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU ? oq_info.offset : b_int; + const int32_t min_activation = act_info.activation() != ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU + ? std::min(oq_info.offset, type_max_value) + : b_int; const int32_t max_activation = act_info.activation() == ActivationLayerInfo::ActivationFunction::RELU ? type_max_value : a_int; -- cgit v1.2.1