From c9573f35c4267aa55648df4a134ebec82c5af93b Mon Sep 17 00:00:00 2001 From: giuros01 Date: Thu, 20 Jun 2019 10:30:17 +0100 Subject: COMPMID-2407: Add (logistic and tanh) activation support for QSYMM16 for NEON Change-Id: Ib89c9cfe12975e51d1710af736c73ce79e667363 Signed-off-by: giuros01 Reviewed-on: https://review.mlplatform.org/c/1412 Comments-Addressed: Arm Jenkins Reviewed-by: Manuel Bottini Tested-by: Arm Jenkins Reviewed-by: Georgios Pinitas --- tests/validation/NEON/ActivationLayer.cpp | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) (limited to 'tests/validation/NEON/ActivationLayer.cpp') diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp index a5030b94b7..1174a055c7 100644 --- a/tests/validation/NEON/ActivationLayer.cpp +++ b/tests/validation/NEON/ActivationLayer.cpp @@ -104,6 +104,8 @@ constexpr AbsoluteTolerance tolerance_qasymm8(0); constexpr AbsoluteTolerance tolerance_qasymm8(1); #endif // defined(__aarch64__) +constexpr AbsoluteTolerance tolerance_qsymm16(1); + /** CNN data types */ const auto CNNDataTypes = framework::dataset::make("DataType", { @@ -233,7 +235,6 @@ const auto QuantizedActivationFunctionsDataset = framework::dataset::make("Activ ActivationLayerInfo::ActivationFunction::LOGISTIC, ActivationLayerInfo::ActivationFunction::TANH }); - const auto QuantizedActivationDataset = combine(combine(framework::dataset::make("InPlace", { false }), QuantizedActivationFunctionsDataset), framework::dataset::make("AlphaBeta", { 0.5f, 1.f })); @@ -256,6 +257,32 @@ FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerQuantizedFixture, fra validate(Accessor(_target), _reference, tolerance_qasymm8); } TEST_SUITE_END() // QASYMM8 + +/** Input data sets. */ +const auto Int16QuantizedActivationFunctionsDataset = framework::dataset::make("ActivationFunction", { ActivationLayerInfo::ActivationFunction::LOGISTIC, + ActivationLayerInfo::ActivationFunction::TANH + }); +const auto Int16QuantizedActivationDataset = combine(combine(framework::dataset::make("InPlace", { false }), Int16QuantizedActivationFunctionsDataset), + framework::dataset::make("AlphaBeta", { 0.5f, 1.f })); + +TEST_SUITE(QSYMM16) +FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::SmallShapes(), Int16QuantizedActivationDataset), + framework::dataset::make("DataType", + DataType::QSYMM16)), + framework::dataset::make("QuantizationInfo", { QuantizationInfo(1.f / 32768.f, 0.f) }))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance_qsymm16); +} +FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerQuantizedFixture, framework::DatasetMode::NIGHTLY, combine(combine(combine(datasets::LargeShapes(), Int16QuantizedActivationDataset), + framework::dataset::make("DataType", + DataType::QSYMM16)), + framework::dataset::make("QuantizationInfo", { QuantizationInfo(1.f / 32768.f, 0.f) }))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance_qsymm16); +} +TEST_SUITE_END() // QSYMM16 TEST_SUITE_END() // Quantized TEST_SUITE_END() // ActivationLayer -- cgit v1.2.1