diff options
author | James Ward <james.ward@arm.com> | 2020-10-20 15:50:33 +0100 |
---|---|---|
committer | James Ward <james.ward@arm.com> | 2020-10-29 18:53:22 +0000 |
commit | 4e22f607bfb85d31146d96c937fc0706e12a0fa7 (patch) | |
tree | ec436ba4be38c507252eb7cad4c1434c43e8c42f /ConversionUtils.hpp | |
parent | 7f492ac8aa24a1ff90e1b4e7800ef9fc30b73d36 (diff) | |
download | android-nn-driver-4e22f607bfb85d31146d96c937fc0706e12a0fa7.tar.gz |
IVGCVSW-5267 Remove boost from core android-nn-driver
* WIP
!armnn:4231
!armnn:4287
Signed-off-by: James Ward <james.ward@arm.com>
Change-Id: I7844efc84fac018d8aad1f72dd236ede3fd8e7f2
Diffstat (limited to 'ConversionUtils.hpp')
-rw-r--r-- | ConversionUtils.hpp | 9 |
1 files changed, 4 insertions, 5 deletions
diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp index f139383e..c1476845 100644 --- a/ConversionUtils.hpp +++ b/ConversionUtils.hpp @@ -23,7 +23,7 @@ #include <CpuExecutor.h> #include <OperationsUtils.h> -#include <boost/test/tools/floating_point_comparison.hpp> +#include <armnnUtils/FloatingPointComparison.hpp> #include <log/log.h> #include <vector> @@ -477,8 +477,7 @@ void SanitizeBiasQuantizationScale(armnn::TensorInfo& biasInfo, const float expectedBiasScale = weightInfo.GetQuantizationScale() * inputInfo.GetQuantizationScale(); if (biasInfo.GetQuantizationScale() != expectedBiasScale) { - boost::math::fpc::close_at_tolerance<float> comparer(boost::math::fpc::percent_tolerance(1.0f)); - if (comparer(biasInfo.GetQuantizationScale(), expectedBiasScale)) + if (armnnUtils::within_percentage_tolerance(biasInfo.GetQuantizationScale(), expectedBiasScale, 1.0f)) { ALOGW("Bias quantization scale has been modified to match input * weights"); biasInfo.SetQuantizationScale(expectedBiasScale); @@ -1209,7 +1208,7 @@ LayerInputHandle ConvertToLayerInputHandle(const HalOperation& operation, return LayerInputHandle(); } - BOOST_FALLTHROUGH; // intentional fallthrough + [[clang::fallthrough]]; // intentional fallthrough } case HalOperandLifeTime::TEMPORARY_VARIABLE: // intentional fallthrough case HalOperandLifeTime::MODEL_OUTPUT: @@ -1336,7 +1335,7 @@ LayerInputHandle ConvertToLayerInputHandle(const ::android::hardware::neuralnetw return LayerInputHandle(); } - BOOST_FALLTHROUGH; // intentional fallthrough + [[clang::fallthrough]]; // intentional fallthrough } case HalOperandLifeTime::TEMPORARY_VARIABLE: // intentional fallthrough case HalOperandLifeTime::SUBGRAPH_OUTPUT: |