From 52ec3463086a12282d8b833521e9e32d1055c6a6 Mon Sep 17 00:00:00 2001 From: Francis Murtagh Date: Tue, 19 Nov 2019 12:24:19 +0000 Subject: IVGCVSW-3697 Add check for ArgMinMax QAsymm8 to ClLayerSupport * Enable Neon EndToEnd tests for ArgMinMax QAsymm8 * Enable Neon Layer tests for ArgMinMax QAsymm8 Signed-off-by: Francis Murtagh Change-Id: Ifa7463ded4397cacb82fb3667006f08ecbe3cd32 --- .../test/layerTests/ArgMinMaxTestImpl.cpp | 30 +++++++++++-- src/backends/cl/ClLayerSupport.cpp | 5 +++ src/backends/neon/test/NeonEndToEndTests.cpp | 50 ++++++++++++++++++++++ src/backends/neon/test/NeonLayerTests.cpp | 7 +++ 4 files changed, 88 insertions(+), 4 deletions(-) diff --git a/src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp index 4475fb7abf..18c9e54f25 100644 --- a/src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp +++ b/src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp @@ -70,7 +70,7 @@ LayerTestResult ArgMaxSimpleTest( armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType); - if(armnn::IsQuantizedType()) + if (armnn::IsQuantizedType()) { inputTensorInfo.SetQuantizationScale(1.0f); inputTensorInfo.SetQuantizationOffset(0); @@ -97,7 +97,7 @@ LayerTestResult ArgMinSimpleTest( armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType); - if(armnn::IsQuantizedType()) + if (armnn::IsQuantizedType()) { inputTensorInfo.SetQuantizationScale(1.0f); inputTensorInfo.SetQuantizationOffset(0); @@ -124,7 +124,7 @@ LayerTestResult ArgMinChannelTest( armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType); - if(armnn::IsQuantizedType()) + if (armnn::IsQuantizedType()) { inputTensorInfo.SetQuantizationScale(1.0f); inputTensorInfo.SetQuantizationOffset(0); @@ -159,7 +159,7 @@ LayerTestResult ArgMaxChannelTest( armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType); - if(armnn::IsQuantizedType()) + if (armnn::IsQuantizedType()) { inputTensorInfo.SetQuantizationScale(1.0f); inputTensorInfo.SetQuantizationOffset(0); @@ -195,6 +195,12 @@ LayerTestResult ArgMaxHeightTest( armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType); armnn::TensorInfo outputTensorInfo(outputShape, armnn::DataType::Signed32); + if (armnn::IsQuantizedType()) + { + inputTensorInfo.SetQuantizationScale(1.0f); + inputTensorInfo.SetQuantizationOffset(0); + } + std::vector inputValues({ 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, @@ -224,6 +230,12 @@ LayerTestResult ArgMinWidthTest( armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType); armnn::TensorInfo outputTensorInfo(outputShape, armnn::DataType::Signed32); + if (armnn::IsQuantizedType()) + { + inputTensorInfo.SetQuantizationScale(1.0f); + inputTensorInfo.SetQuantizationOffset(0); + } + std::vector inputValues({ 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, @@ -335,6 +347,11 @@ ArgMaxHeightTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager); +template LayerTestResult +ArgMaxHeightTest( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager); + template LayerTestResult ArgMinWidthTest( armnn::IWorkloadFactory& workloadFactory, @@ -344,3 +361,8 @@ template LayerTestResult ArgMinWidthTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager); + +template LayerTestResult +ArgMinWidthTest( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager); diff --git a/src/backends/cl/ClLayerSupport.cpp b/src/backends/cl/ClLayerSupport.cpp index 8cbf9bd114..612af6be2c 100644 --- a/src/backends/cl/ClLayerSupport.cpp +++ b/src/backends/cl/ClLayerSupport.cpp @@ -184,6 +184,11 @@ bool ClLayerSupport::IsArgMinMaxSupported(const TensorInfo& input, const ArgMinMaxDescriptor& descriptor, Optional reasonIfUnsupported) const { + if (input.GetDataType() == DataType::QuantisedAsymm8) + { + return false; + } + FORWARD_WORKLOAD_VALIDATE_FUNC(ClArgMinMaxWorkloadValidate, reasonIfUnsupported, input, diff --git a/src/backends/neon/test/NeonEndToEndTests.cpp b/src/backends/neon/test/NeonEndToEndTests.cpp index e2b25a14f3..e841821b57 100644 --- a/src/backends/neon/test/NeonEndToEndTests.cpp +++ b/src/backends/neon/test/NeonEndToEndTests.cpp @@ -454,4 +454,54 @@ BOOST_AUTO_TEST_CASE(NeonArgMinAxis3Test) ArgMinAxis3EndToEnd(defaultBackends); } +BOOST_AUTO_TEST_CASE(NeonArgMaxSimpleTestQuantisedAsymm8) +{ + ArgMaxEndToEndSimple(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMinSimpleTestQuantisedAsymm8) +{ + ArgMinEndToEndSimple(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMaxAxis0TestQuantisedAsymm8) +{ + ArgMaxAxis0EndToEnd(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMinAxis0TestQuantisedAsymm8) +{ + ArgMinAxis0EndToEnd(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMaxAxis1TestQuantisedAsymm8) +{ + ArgMaxAxis1EndToEnd(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMinAxis1TestQuantisedAsymm8) +{ + ArgMinAxis1EndToEnd(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMaxAxis2TestQuantisedAsymm8) +{ + ArgMaxAxis2EndToEnd(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMinAxis2TestQuantisedAsymm8) +{ + ArgMinAxis2EndToEnd(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMaxAxis3TestQuantisedAsymm8) +{ + ArgMaxAxis3EndToEnd(defaultBackends); +} + +BOOST_AUTO_TEST_CASE(NeonArgMinAxis3TestQuantisedAsymm8) +{ + ArgMinAxis3EndToEnd(defaultBackends); +} + BOOST_AUTO_TEST_SUITE_END() diff --git a/src/backends/neon/test/NeonLayerTests.cpp b/src/backends/neon/test/NeonLayerTests.cpp index cd9a55d3ee..26c55365cf 100644 --- a/src/backends/neon/test/NeonLayerTests.cpp +++ b/src/backends/neon/test/NeonLayerTests.cpp @@ -972,6 +972,13 @@ ARMNN_AUTO_TEST_CASE(ArgMaxChannel, ArgMaxChannelTest) ARMNN_AUTO_TEST_CASE(ArgMaxHeight, ArgMaxHeightTest) ARMNN_AUTO_TEST_CASE(ArgMinWidth, ArgMinWidthTest) +ARMNN_AUTO_TEST_CASE(ArgMinQAsymm8, ArgMinSimpleTest) +ARMNN_AUTO_TEST_CASE(ArgMaxQAsymm8, ArgMaxSimpleTest) +ARMNN_AUTO_TEST_CASE(ArgMinChannelQAsymm8, ArgMinChannelTest) +ARMNN_AUTO_TEST_CASE(ArgMaxChannelQAsymm8, ArgMaxChannelTest) +ARMNN_AUTO_TEST_CASE(ArgMaxHeightQAsymm8, ArgMaxHeightTest) +ARMNN_AUTO_TEST_CASE(ArgMinWidthQAsymm8, ArgMinWidthTest) + #if defined(ARMNNREF_ENABLED) // The ARMNN_COMPARE_REF_AUTO_TEST_CASE and the ARMNN_COMPARE_REF_FIXTURE_TEST_CASE test units are not available -- cgit v1.2.1