diff options
author | Georgios Pinitas <georgios.pinitas@arm.com> | 2021-04-22 16:42:03 +0100 |
---|---|---|
committer | Michalis Spyrou <michalis.spyrou@arm.com> | 2021-06-07 13:21:17 +0000 |
commit | bdcdc39d89b6a6556f5c0483af5379f75eae0c55 (patch) | |
tree | 454cd50afa81da3ca3382701619fef023911e3f7 /src/core/cpu/kernels/CpuSoftmaxKernel.cpp | |
parent | 5a643320b79f15a5d09b5366c4744579cf71e303 (diff) | |
download | ComputeLibrary-bdcdc39d89b6a6556f5c0483af5379f75eae0c55.tar.gz |
Enable fat binary support
Changes our build system to allow building both Neon(TM) and SVE
kernels and package them in the same binary. This will allow
runtime selection of the underlying architecture.
Adds new build option, fat_binary, for enabling this feature.
Change-Id: I8e8386149773ce28e071a2fb7ddd8c8ae0f28a4a
Signed-off-by: Michalis Spyrou <michalis.spyrou@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/5704
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'src/core/cpu/kernels/CpuSoftmaxKernel.cpp')
-rw-r--r-- | src/core/cpu/kernels/CpuSoftmaxKernel.cpp | 19 |
1 files changed, 11 insertions, 8 deletions
diff --git a/src/core/cpu/kernels/CpuSoftmaxKernel.cpp b/src/core/cpu/kernels/CpuSoftmaxKernel.cpp index d2453ed21d..8ea186b16a 100644 --- a/src/core/cpu/kernels/CpuSoftmaxKernel.cpp +++ b/src/core/cpu/kernels/CpuSoftmaxKernel.cpp @@ -34,8 +34,8 @@ #include "src/core/helpers/WindowHelpers.h" #include "src/core/common/Registrars.h" -#include "src/core/cpu/kernels/softmax/impl/NEON/list.h" -#include "src/core/cpu/kernels/softmax/impl/SVE/list.h" +#include "src/core/cpu/kernels/softmax/impl/neon/list.h" +#include "src/core/cpu/kernels/softmax/impl/sve/list.h" namespace arm_compute { @@ -69,7 +69,7 @@ struct SoftmaxLogits1DMaxKernel static const SoftmaxLogits1DKernel available_logits_1d_kernels[] = { -#if defined(__ARM_FEATURE_SVE) +#if defined(ENABLE_SVE) { "sve_softmax_logits_1d_float", [](const SoftmaxSelectorData & data) { return (data.dt == DataType::F32); }, @@ -80,7 +80,9 @@ static const SoftmaxLogits1DKernel available_logits_1d_kernels[] = [](const SoftmaxSelectorData & data) { return (data.dt == DataType::F16); }, REGISTER_FP16_SVE(arm_compute::cpu::sve_softmax_logits_1d_float<float16_t>) }, -#else /* !defined(__ARM_FEATURE_SVE) */ +#endif /* defined(ENABLE_SVE) */ + +#if defined(ENABLE_NEON) { "neon_softmax_logits_1d_float", [](const SoftmaxSelectorData & data) { return (data.dt == DataType::F32); }, @@ -93,7 +95,7 @@ static const SoftmaxLogits1DKernel available_logits_1d_kernels[] = REGISTER_FP16_NEON(arm_compute::cpu::neon_softmax_logits_1d_float<float16_t>) }, #endif /* defined(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) */ -#endif /* defined(__ARM_FEATURE_SVE) */ +#endif /* !defined(ENABLE_NEON) */ #if defined(__ARM_FEATURE_SVE2) { @@ -123,7 +125,7 @@ static const SoftmaxLogits1DKernel available_logits_1d_kernels[] = static const SoftmaxLogits1DMaxKernel available_logits_1d_max_kernels[] = { -#if defined(__ARM_FEATURE_SVE) +#if defined(ENABLE_SVE) { "sve_logits_1d_max", [](const SoftmaxSelectorData & data) { return (data.dt == DataType::F32); }, @@ -144,7 +146,8 @@ static const SoftmaxLogits1DMaxKernel available_logits_1d_max_kernels[] = [](const SoftmaxSelectorData & data) { return (data.dt == DataType::QASYMM8_SIGNED); }, REGISTER_QASYMM8_SIGNED_SVE(arm_compute::cpu::sve_logits_1d_max<qasymm8_signed_t>) }, -#else /* !defined(__ARM_FEATURE_SVE) */ +#endif /* defined(ENABLE_SVE) */ +#if defined(ENABLE_NEON) { "neon_logits_1d_max", [](const SoftmaxSelectorData & data) { return (data.dt == DataType::F32); }, @@ -167,7 +170,7 @@ static const SoftmaxLogits1DMaxKernel available_logits_1d_max_kernels[] = [](const SoftmaxSelectorData & data) { return (data.dt == DataType::QASYMM8_SIGNED); }, REGISTER_QASYMM8_SIGNED_NEON(arm_compute::cpu::neon_logits_1d_max<qasymm8_signed_t>) }, -#endif /* defined(__ARM_FEATURE_SVE) */ +#endif /* defined(ENABLE_NEON) */ }; const SoftmaxLogits1DKernel *get_implementation_logits(const SoftmaxSelectorData &data) |