From bdcdc39d89b6a6556f5c0483af5379f75eae0c55 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 22 Apr 2021 16:42:03 +0100 Subject: Enable fat binary support Changes our build system to allow building both Neon(TM) and SVE kernels and package them in the same binary. This will allow runtime selection of the underlying architecture. Adds new build option, fat_binary, for enabling this feature. Change-Id: I8e8386149773ce28e071a2fb7ddd8c8ae0f28a4a Signed-off-by: Michalis Spyrou Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/5704 Tested-by: Arm Jenkins Reviewed-by: Georgios Pinitas Comments-Addressed: Arm Jenkins --- tests/validation/NEON/ActivationLayer.cpp | 24 ++++++++++++------------ tests/validation/NEON/ArithmeticAddition.cpp | 6 +++--- 2 files changed, 15 insertions(+), 15 deletions(-) (limited to 'tests/validation') diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp index 577603d07d..111e969bae 100644 --- a/tests/validation/NEON/ActivationLayer.cpp +++ b/tests/validation/NEON/ActivationLayer.cpp @@ -68,11 +68,11 @@ RelativeTolerance relative_tolerance(DataType data_type, ActivationLayerI switch(data_type) { case DataType::F16: -#if defined(__ARM_FEATURE_SVE) +#if defined(ENABLE_SVE) return RelativeTolerance(0.25f); -#else // !defined(__ARM_FEATURE_SVE) +#else // !defined(ENABLE_SVE) return RelativeTolerance(0.1f); -#endif // defined(__ARM_FEATURE_SVE) +#endif // defined(ENABLE_SVE) default: return RelativeTolerance(0.05f); } @@ -80,11 +80,11 @@ RelativeTolerance relative_tolerance(DataType data_type, ActivationLayerI switch(data_type) { case DataType::F16: -#if defined(__ARM_FEATURE_SVE) +#if defined(ENABLE_SVE) return RelativeTolerance(0.9f); -#else // !defined(__ARM_FEATURE_SVE) +#else // !defined(ENABLE_SVE) return RelativeTolerance(0.01f); -#endif // defined(__ARM_FEATURE_SVE) +#endif // defined(ENABLE_SVE) default: return RelativeTolerance(0.00001f); } @@ -111,11 +111,11 @@ AbsoluteTolerance absolute_tolerance(DataType data_type, ActivationLayerI switch(data_type) { case DataType::F16: -#if defined(__ARM_FEATURE_SVE) +#if defined(ENABLE_SVE) return AbsoluteTolerance(0.25f); -#else // !defined(__ARM_FEATURE_SVE) +#else // !defined(ENABLE_SVE) return AbsoluteTolerance(0.01f); -#endif // defined(__ARM_FEATURE_SVE) +#endif // defined(ENABLE_SVE) default: return AbsoluteTolerance(0.00001f); } @@ -123,11 +123,11 @@ AbsoluteTolerance absolute_tolerance(DataType data_type, ActivationLayerI switch(data_type) { case DataType::F16: -#if defined(__ARM_FEATURE_SVE) +#if defined(ENABLE_SVE) return AbsoluteTolerance(0.9f); -#else // !defined(__ARM_FEATURE_SVE) +#else // !defined(ENABLE_SVE) return AbsoluteTolerance(0.01f); -#endif // defined(__ARM_FEATURE_SVE) +#endif // defined(ENABLE_SVE) default: return AbsoluteTolerance(0.00001f); } diff --git a/tests/validation/NEON/ArithmeticAddition.cpp b/tests/validation/NEON/ArithmeticAddition.cpp index 98341805ed..ea6656eefe 100644 --- a/tests/validation/NEON/ArithmeticAddition.cpp +++ b/tests/validation/NEON/ArithmeticAddition.cpp @@ -43,11 +43,11 @@ namespace validation { namespace { -#if !defined(__aarch64__) || defined(__ARM_FEATURE_SVE) +#if !defined(__aarch64__) || defined(ENABLE_SVE) constexpr AbsoluteTolerance tolerance_quant(1); /**< Tolerance value for comparing reference's output against implementation's output for quantized data types */ -#else // !defined(__aarch64__) || defined(__ARM_FEATURE_SVE) +#else // !defined(__aarch64__) || defined(ENABLE_SVE) constexpr AbsoluteTolerance tolerance_quant(0); -#endif // !defined(__aarch64__) || defined(__ARM_FEATURE_SVE) +#endif // !defined(__aarch64__) || defined(ENABLE_SVE) /** Input data sets **/ const auto ArithmeticAdditionU8Dataset = combine(combine(framework::dataset::make("DataType", DataType::U8), framework::dataset::make("DataType", DataType::U8)), framework::dataset::make("DataType", -- cgit v1.2.1