From add8e815ea94c8f8e6b1c9faf18527695f1332ec Mon Sep 17 00:00:00 2001 From: Sang-Hoon Park Date: Wed, 25 Nov 2020 11:46:03 +0000 Subject: COMPMID-3862: Add support QASYMM8 LEAKY RELU activation - LEAKY RELU activation is supported for QASYMM8 data type - vquantize on NEON side has been modified to match with other backends (OpenCL and reference) Change-Id: I194631225c8d4f3cc96027d64812ec2be2b4328a Signed-off-by: Sang-Hoon Park Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4593 Tested-by: Arm Jenkins Reviewed-by: Georgios Pinitas Reviewed-by: Manuel Bottini Comments-Addressed: Arm Jenkins --- tests/validation/CL/ActivationLayer.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) (limited to 'tests/validation/CL/ActivationLayer.cpp') diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp index 9b725a44e7..fa95594157 100644 --- a/tests/validation/CL/ActivationLayer.cpp +++ b/tests/validation/CL/ActivationLayer.cpp @@ -168,8 +168,10 @@ template using CLActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture; const auto QuantizedActivationDataset8 = combine(combine(framework::dataset::make("InPlace", { false }), - concat(datasets::ActivationFunctionsQuantized(), framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH))), - framework::dataset::make("AlphaBeta", { 0.5f, 1.f })); + concat(datasets::ActivationFunctionsQuantized(), + framework::dataset::make("ActivationFunction", +{ ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::LEAKY_RELU }))), +framework::dataset::make("AlphaBeta", { 0.5f, 1.f })); const auto QuantizedActivationDataset16 = combine(combine(framework::dataset::make("InPlace", { false }), datasets::ActivationFunctionsQuantized()), -- cgit v1.2.1