aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGunes Bayir <gunes.bayir@arm.com>2021-08-12 14:21:45 +0100
committerSiCongLi <sicong.li@arm.com>2021-08-13 13:56:11 +0100
commit80e4b88d8249c1d4772de17bae83b14c0a7e63ed (patch)
tree7ae74e344274bc75b1be3fd47e90b8c4f91b14bf
parentbcbd7068b8b6800f0f5c34239fbd26d5f6135d28 (diff)
downloadComputeLibrary-80e4b88d8249c1d4772de17bae83b14c0a7e63ed.tar.gz
increase CL/BatchNormalizationLayer Fp16 absolute threshold
Resolves: COMPMID-4768 Signed-off-by: Gunes Bayir <gunes.bayir@arm.com> Change-Id: I4f194f6fc3c991c6c034ef6617c00057ce163e17 Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/6103 Reviewed-by: Giorgio Arena <giorgio.arena@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com> Tested-by: Arm Jenkins <bsgcomp@arm.com>
-rw-r--r--tests/validation/CL/BatchNormalizationLayer.cpp4
1 files changed, 2 insertions, 2 deletions
diff --git a/tests/validation/CL/BatchNormalizationLayer.cpp b/tests/validation/CL/BatchNormalizationLayer.cpp
index 8b3bdbc3ea..3b87b9d1b5 100644
--- a/tests/validation/CL/BatchNormalizationLayer.cpp
+++ b/tests/validation/CL/BatchNormalizationLayer.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2020 Arm Limited.
+ * Copyright (c) 2017-2021 Arm Limited.
*
* SPDX-License-Identifier: MIT
*
@@ -50,7 +50,7 @@ namespace
{
RelativeTolerance<float> rel_tolerance_f32(0.05f); /**< Tolerance value for comparing reference's output against implementation's output for DataType::F32 */
constexpr AbsoluteTolerance<float> abs_tolerance_f32(0.0001f); /**< Tolerance value for comparing reference's output against implementation's output for DataType::F32 */
-constexpr AbsoluteTolerance<float> tolerance_f16(0.01f); /**< Tolerance value for comparing reference's output against implementation's output for DataType::F16 */
+constexpr AbsoluteTolerance<float> tolerance_f16(0.02f); /**< Tolerance value for comparing reference's output against implementation's output for DataType::F16 */
const auto act_infos = framework::dataset::make("ActivationInfo",
{
ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),