diff options
author | Michalis Spyrou <michalis.spyrou@arm.com> | 2020-11-22 00:49:42 +0000 |
---|---|---|
committer | Michalis Spyrou <michalis.spyrou@arm.com> | 2020-12-14 16:02:26 +0000 |
commit | aa51a5ba9a3f05be08b94859b53c398edee5d2e3 (patch) | |
tree | b28829b483421b210cd7c8a256c7feafed736b36 /tests | |
parent | 3737c7934da929003bda446291489cf352e43751 (diff) | |
download | ComputeLibrary-aa51a5ba9a3f05be08b94859b53c398edee5d2e3.tar.gz |
COMPMID-3870: Create ActivationLayer SVE/SVE2
Adds support for ActivationLayer for SVE and SVE2.
Datatypes supported:
*FP32
*FP16
*QASYMM8
*QASYMM8_SIGNED
*QSYMM16
Change-Id: Ia3583891795cda4ca2f9fa27c440731a5c27710d
Signed-off-by: Michalis Spyrou <michalis.spyrou@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4566
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests')
-rw-r--r-- | tests/validation/NEON/ActivationLayer.cpp | 40 |
1 files changed, 35 insertions, 5 deletions
diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp index 84ff288b2f..f8f35f0a8e 100644 --- a/tests/validation/NEON/ActivationLayer.cpp +++ b/tests/validation/NEON/ActivationLayer.cpp @@ -61,7 +61,6 @@ RelativeTolerance<float> relative_tolerance(DataType data_type, ActivationLayerI switch(activation) { case ActivationLayerInfo::ActivationFunction::LOGISTIC: - case ActivationLayerInfo::ActivationFunction::SOFT_RELU: case ActivationLayerInfo::ActivationFunction::ELU: case ActivationLayerInfo::ActivationFunction::SQRT: case ActivationLayerInfo::ActivationFunction::TANH: @@ -69,10 +68,26 @@ RelativeTolerance<float> relative_tolerance(DataType data_type, ActivationLayerI switch(data_type) { case DataType::F16: +#if defined(__ARM_FEATURE_SVE) + return RelativeTolerance<float>(0.25f); +#else // !defined(__ARM_FEATURE_SVE) return RelativeTolerance<float>(0.1f); +#endif // defined(__ARM_FEATURE_SVE) default: return RelativeTolerance<float>(0.05f); } + case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + switch(data_type) + { + case DataType::F16: +#if defined(__ARM_FEATURE_SVE) + return RelativeTolerance<float>(0.9f); +#else // !defined(__ARM_FEATURE_SVE) + return RelativeTolerance<float>(0.01f); +#endif // defined(__ARM_FEATURE_SVE) + default: + return RelativeTolerance<float>(0.00001f); + } default: return RelativeTolerance<float>(0.f); } @@ -90,14 +105,29 @@ AbsoluteTolerance<float> absolute_tolerance(DataType data_type, ActivationLayerI switch(activation) { case ActivationLayerInfo::ActivationFunction::LOGISTIC: - case ActivationLayerInfo::ActivationFunction::SOFT_RELU: case ActivationLayerInfo::ActivationFunction::SQRT: case ActivationLayerInfo::ActivationFunction::TANH: case ActivationLayerInfo::ActivationFunction::HARD_SWISH: switch(data_type) { case DataType::F16: +#if defined(__ARM_FEATURE_SVE) + return AbsoluteTolerance<float>(0.25f); +#else // !defined(__ARM_FEATURE_SVE) + return AbsoluteTolerance<float>(0.01f); +#endif // defined(__ARM_FEATURE_SVE) + default: + return AbsoluteTolerance<float>(0.00001f); + } + case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + switch(data_type) + { + case DataType::F16: +#if defined(__ARM_FEATURE_SVE) + return AbsoluteTolerance<float>(0.9f); +#else // !defined(__ARM_FEATURE_SVE) return AbsoluteTolerance<float>(0.01f); +#endif // defined(__ARM_FEATURE_SVE) default: return AbsoluteTolerance<float>(0.00001f); } @@ -107,10 +137,10 @@ AbsoluteTolerance<float> absolute_tolerance(DataType data_type, ActivationLayerI } /** Tolerance for quantized asymmetric operations */ -#if defined(__aarch64__) -constexpr AbsoluteTolerance<uint8_t> tolerance_qasymm8(0); -#else // defined(__aarch64__) +#if(!defined(__aarch64__) || defined(__ARM_FEATURE_SVE2)) constexpr AbsoluteTolerance<uint8_t> tolerance_qasymm8(1); +#else // !(!defined(__aarch64__) || defined(__ARM_FEATURE_SVE2)) +constexpr AbsoluteTolerance<uint8_t> tolerance_qasymm8(0); #endif // defined(__aarch64__) constexpr AbsoluteTolerance<int16_t> tolerance_qsymm16(1); |