From 4e22f607bfb85d31146d96c937fc0706e12a0fa7 Mon Sep 17 00:00:00 2001 From: James Ward Date: Tue, 20 Oct 2020 15:50:33 +0100 Subject: IVGCVSW-5267 Remove boost from core android-nn-driver * WIP !armnn:4231 !armnn:4287 Signed-off-by: James Ward Change-Id: I7844efc84fac018d8aad1f72dd236ede3fd8e7f2 --- ConversionUtils.hpp | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) (limited to 'ConversionUtils.hpp') diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp index f139383e..c1476845 100644 --- a/ConversionUtils.hpp +++ b/ConversionUtils.hpp @@ -23,7 +23,7 @@ #include #include -#include +#include #include #include @@ -477,8 +477,7 @@ void SanitizeBiasQuantizationScale(armnn::TensorInfo& biasInfo, const float expectedBiasScale = weightInfo.GetQuantizationScale() * inputInfo.GetQuantizationScale(); if (biasInfo.GetQuantizationScale() != expectedBiasScale) { - boost::math::fpc::close_at_tolerance comparer(boost::math::fpc::percent_tolerance(1.0f)); - if (comparer(biasInfo.GetQuantizationScale(), expectedBiasScale)) + if (armnnUtils::within_percentage_tolerance(biasInfo.GetQuantizationScale(), expectedBiasScale, 1.0f)) { ALOGW("Bias quantization scale has been modified to match input * weights"); biasInfo.SetQuantizationScale(expectedBiasScale); @@ -1209,7 +1208,7 @@ LayerInputHandle ConvertToLayerInputHandle(const HalOperation& operation, return LayerInputHandle(); } - BOOST_FALLTHROUGH; // intentional fallthrough + [[clang::fallthrough]]; // intentional fallthrough } case HalOperandLifeTime::TEMPORARY_VARIABLE: // intentional fallthrough case HalOperandLifeTime::MODEL_OUTPUT: @@ -1336,7 +1335,7 @@ LayerInputHandle ConvertToLayerInputHandle(const ::android::hardware::neuralnetw return LayerInputHandle(); } - BOOST_FALLTHROUGH; // intentional fallthrough + [[clang::fallthrough]]; // intentional fallthrough } case HalOperandLifeTime::TEMPORARY_VARIABLE: // intentional fallthrough case HalOperandLifeTime::SUBGRAPH_OUTPUT: -- cgit v1.2.1