aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/validation/CL/ActivationLayer.cpp6
-rw-r--r--tests/validation/NEON/ActivationLayer.cpp18
2 files changed, 14 insertions, 10 deletions
diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp
index 9b725a44e7..fa95594157 100644
--- a/tests/validation/CL/ActivationLayer.cpp
+++ b/tests/validation/CL/ActivationLayer.cpp
@@ -168,8 +168,10 @@ template <typename T>
using CLActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<CLTensor, CLAccessor, CLActivationLayer, T>;
const auto QuantizedActivationDataset8 = combine(combine(framework::dataset::make("InPlace", { false }),
- concat(datasets::ActivationFunctionsQuantized(), framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH))),
- framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
+ concat(datasets::ActivationFunctionsQuantized(),
+ framework::dataset::make("ActivationFunction",
+{ ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::LEAKY_RELU }))),
+framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
const auto QuantizedActivationDataset16 = combine(combine(framework::dataset::make("InPlace", { false }),
datasets::ActivationFunctionsQuantized()),
diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp
index 0ef4590d7e..84ff288b2f 100644
--- a/tests/validation/NEON/ActivationLayer.cpp
+++ b/tests/validation/NEON/ActivationLayer.cpp
@@ -48,8 +48,7 @@ namespace validation
namespace
{
RelativeTolerance<float> tolerance_float_sqrt(0.0001f);
-
-
+
/** Define relative tolerance of the activation layer.
*
* @param[in] data_type The data type used.
@@ -234,12 +233,15 @@ template <typename T>
using NEActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<Tensor, Accessor, NEActivationLayer, T>;
/** Input data sets. */
-const auto QuantizedActivationFunctionsDataset = framework::dataset::make("ActivationFunction", { ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
- ActivationLayerInfo::ActivationFunction::RELU,
- ActivationLayerInfo::ActivationFunction::BOUNDED_RELU,
- ActivationLayerInfo::ActivationFunction::LOGISTIC,
- ActivationLayerInfo::ActivationFunction::TANH
- });
+const auto QuantizedActivationFunctionsDataset = framework::dataset::make("ActivationFunction",
+{
+ ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
+ ActivationLayerInfo::ActivationFunction::RELU,
+ ActivationLayerInfo::ActivationFunction::BOUNDED_RELU,
+ ActivationLayerInfo::ActivationFunction::LOGISTIC,
+ ActivationLayerInfo::ActivationFunction::TANH,
+ ActivationLayerInfo::ActivationFunction::LEAKY_RELU,
+});
const auto QuantizedActivationDataset = combine(combine(framework::dataset::make("InPlace", { false }),
concat(QuantizedActivationFunctionsDataset, framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH))),