aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/CL/ActivationLayer.cpp
diff options
context:
space:
mode:
authorSang-Hoon Park <sang-hoon.park@arm.com>2020-11-25 11:46:03 +0000
committerSang-Hoon Park <sang-hoon.park@arm.com>2020-12-02 10:18:46 +0000
commitadd8e815ea94c8f8e6b1c9faf18527695f1332ec (patch)
tree5941064344b426d12bc76b2fba3d0c631e796088 /tests/validation/CL/ActivationLayer.cpp
parent4ffc42afafc8e6eee9917ac27b4bc510973335bf (diff)
downloadComputeLibrary-add8e815ea94c8f8e6b1c9faf18527695f1332ec.tar.gz
COMPMID-3862: Add support QASYMM8 LEAKY RELU activation
- LEAKY RELU activation is supported for QASYMM8 data type - vquantize on NEON side has been modified to match with other backends (OpenCL and reference) Change-Id: I194631225c8d4f3cc96027d64812ec2be2b4328a Signed-off-by: Sang-Hoon Park <sang-hoon.park@arm.com> Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4593 Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com> Reviewed-by: Manuel Bottini <manuel.bottini@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests/validation/CL/ActivationLayer.cpp')
-rw-r--r--tests/validation/CL/ActivationLayer.cpp6
1 files changed, 4 insertions, 2 deletions
diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp
index 9b725a44e7..fa95594157 100644
--- a/tests/validation/CL/ActivationLayer.cpp
+++ b/tests/validation/CL/ActivationLayer.cpp
@@ -168,8 +168,10 @@ template <typename T>
using CLActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<CLTensor, CLAccessor, CLActivationLayer, T>;
const auto QuantizedActivationDataset8 = combine(combine(framework::dataset::make("InPlace", { false }),
- concat(datasets::ActivationFunctionsQuantized(), framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH))),
- framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
+ concat(datasets::ActivationFunctionsQuantized(),
+ framework::dataset::make("ActivationFunction",
+{ ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::LEAKY_RELU }))),
+framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
const auto QuantizedActivationDataset16 = combine(combine(framework::dataset::make("InPlace", { false }),
datasets::ActivationFunctionsQuantized()),