aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/NEON/ActivationLayer.cpp
diff options
context:
space:
mode:
authorSang-Hoon Park <sang-hoon.park@arm.com>2020-11-25 11:46:03 +0000
committerSang-Hoon Park <sang-hoon.park@arm.com>2020-12-02 10:18:46 +0000
commitadd8e815ea94c8f8e6b1c9faf18527695f1332ec (patch)
tree5941064344b426d12bc76b2fba3d0c631e796088 /tests/validation/NEON/ActivationLayer.cpp
parent4ffc42afafc8e6eee9917ac27b4bc510973335bf (diff)
downloadComputeLibrary-add8e815ea94c8f8e6b1c9faf18527695f1332ec.tar.gz
COMPMID-3862: Add support QASYMM8 LEAKY RELU activation
- LEAKY RELU activation is supported for QASYMM8 data type - vquantize on NEON side has been modified to match with other backends (OpenCL and reference) Change-Id: I194631225c8d4f3cc96027d64812ec2be2b4328a Signed-off-by: Sang-Hoon Park <sang-hoon.park@arm.com> Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4593 Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com> Reviewed-by: Manuel Bottini <manuel.bottini@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests/validation/NEON/ActivationLayer.cpp')
-rw-r--r--tests/validation/NEON/ActivationLayer.cpp18
1 files changed, 10 insertions, 8 deletions
diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp
index 0ef4590d7e..84ff288b2f 100644
--- a/tests/validation/NEON/ActivationLayer.cpp
+++ b/tests/validation/NEON/ActivationLayer.cpp
@@ -48,8 +48,7 @@ namespace validation
namespace
{
RelativeTolerance<float> tolerance_float_sqrt(0.0001f);
-
-
+
/** Define relative tolerance of the activation layer.
*
* @param[in] data_type The data type used.
@@ -234,12 +233,15 @@ template <typename T>
using NEActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<Tensor, Accessor, NEActivationLayer, T>;
/** Input data sets. */
-const auto QuantizedActivationFunctionsDataset = framework::dataset::make("ActivationFunction", { ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
- ActivationLayerInfo::ActivationFunction::RELU,
- ActivationLayerInfo::ActivationFunction::BOUNDED_RELU,
- ActivationLayerInfo::ActivationFunction::LOGISTIC,
- ActivationLayerInfo::ActivationFunction::TANH
- });
+const auto QuantizedActivationFunctionsDataset = framework::dataset::make("ActivationFunction",
+{
+ ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
+ ActivationLayerInfo::ActivationFunction::RELU,
+ ActivationLayerInfo::ActivationFunction::BOUNDED_RELU,
+ ActivationLayerInfo::ActivationFunction::LOGISTIC,
+ ActivationLayerInfo::ActivationFunction::TANH,
+ ActivationLayerInfo::ActivationFunction::LEAKY_RELU,
+});
const auto QuantizedActivationDataset = combine(combine(framework::dataset::make("InPlace", { false }),
concat(QuantizedActivationFunctionsDataset, framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH))),