aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEllen Norris-Thompson <ellen.norris-thompson@arm.com>2019-06-17 11:32:49 +0100
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-06-17 14:08:59 +0000
commit3cb85f329c2128c671dae1ecfdf3ab136f254d84 (patch)
tree32df09cd663441cc98c2c63c9b571f2622b94f74
parenta7240e7fe5734b777b7776c5869ee49a22f3f528 (diff)
downloadarmnn-3cb85f329c2128c671dae1ecfdf3ab136f254d84.tar.gz
IVGCVSW-3219: Add QSymm16 support to the ResizeBilinear Reference workload
* Adds CreateWorkload and LayerTests for QSymm16 * Updated ResizeBilinearQueueDescriptor::Validate function * Reimplemented RefLayerSupport::IsResizeBilinearSupported Signed-off-by: Ellen Norris-Thompson <ellen.norris-thompson@arm.com> Change-Id: Id9d14d3b41d26ac3d51227ab248ce8cca9dc9969
-rw-r--r--src/backends/backendsCommon/WorkloadData.cpp16
-rw-r--r--src/backends/backendsCommon/test/LayerTests.hpp60
-rw-r--r--src/backends/reference/RefLayerSupport.cpp23
-rw-r--r--src/backends/reference/test/RefCreateWorkloadTests.cpp5
-rw-r--r--src/backends/reference/test/RefLayerTests.cpp30
5 files changed, 69 insertions, 65 deletions
diff --git a/src/backends/backendsCommon/WorkloadData.cpp b/src/backends/backendsCommon/WorkloadData.cpp
index 1505078b77..7c9d4ac58c 100644
--- a/src/backends/backendsCommon/WorkloadData.cpp
+++ b/src/backends/backendsCommon/WorkloadData.cpp
@@ -914,6 +914,22 @@ void ResizeBilinearQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) c
ValidateTensorNumDimensions(workloadInfo.m_InputTensorInfos[0], "ResizeBilinearQueueDescriptor", 4, "input");
ValidateTensorNumDimensions(workloadInfo.m_OutputTensorInfos[0], "ResizeBilinearQueueDescriptor", 4, "output");
+ std::vector<DataType> supportedTypes =
+ {
+ DataType::Float16,
+ DataType::Float32,
+ DataType::QuantisedAsymm8,
+ DataType::QuantisedSymm16
+ };
+
+ ValidateDataTypes(workloadInfo.m_InputTensorInfos[0],
+ supportedTypes,
+ "ResizeBilinearQueueDescriptor");
+
+ ValidateDataTypes(workloadInfo.m_OutputTensorInfos[0],
+ {workloadInfo.m_InputTensorInfos[0].GetDataType()},
+ "ResizeBilinearQueueDescriptor");
+
// Resizes bilinear only changes width and height: batch and channel count must match.
{
const unsigned int inputBatchSize = workloadInfo.m_InputTensorInfos[0].GetShape()[0];
diff --git a/src/backends/backendsCommon/test/LayerTests.hpp b/src/backends/backendsCommon/test/LayerTests.hpp
index fab1ad83c1..704e88ef6b 100644
--- a/src/backends/backendsCommon/test/LayerTests.hpp
+++ b/src/backends/backendsCommon/test/LayerTests.hpp
@@ -2443,18 +2443,6 @@ LayerTestResult<T, 4> ResizeBilinearNopTest(
return result;
}
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::Float32>, 4>
-ResizeBilinearNopTest<armnn::DataType::Float32>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeBilinearNopTest<armnn::DataType::QuantisedAsymm8>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
template<armnn::DataType ArmnnType, typename T>
LayerTestResult<T, 4> SimpleResizeBilinearTest(
armnn::IWorkloadFactory& workloadFactory,
@@ -2553,18 +2541,6 @@ LayerTestResult<T, 4> SimpleResizeBilinearTest(
return result;
}
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::Float32>, 4>
-SimpleResizeBilinearTest<armnn::DataType::Float32>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-SimpleResizeBilinearTest<armnn::DataType::QuantisedAsymm8>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
template<armnn::DataType ArmnnType, typename T>
LayerTestResult<T, 4> ResizeBilinearSqMinTest(
armnn::IWorkloadFactory& workloadFactory,
@@ -2666,18 +2642,6 @@ LayerTestResult<T, 4> ResizeBilinearSqMinTest(
return result;
}
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::Float32>, 4>
-ResizeBilinearSqMinTest<armnn::DataType::Float32>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeBilinearSqMinTest<armnn::DataType::QuantisedAsymm8>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
template<armnn::DataType ArmnnType, typename T>
LayerTestResult<T, 4> ResizeBilinearMinTest(
armnn::IWorkloadFactory& workloadFactory,
@@ -2774,18 +2738,6 @@ LayerTestResult<T, 4> ResizeBilinearMinTest(
return result;
}
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::Float32>, 4>
-ResizeBilinearMinTest<armnn::DataType::Float32>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeBilinearMinTest<armnn::DataType::QuantisedAsymm8>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
template<armnn::DataType ArmnnType, typename T>
LayerTestResult<T, 4> ResizeBilinearMagTest(
armnn::IWorkloadFactory& workloadFactory,
@@ -2889,18 +2841,6 @@ LayerTestResult<T, 4> ResizeBilinearMagTest(
return result;
}
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::Float32>, 4>
-ResizeBilinearMagTest<armnn::DataType::Float32>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
-template LayerTestResult<typename armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeBilinearMagTest<armnn::DataType::QuantisedAsymm8>(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
- const armnn::DataLayout dataLayout);
-
template<armnn::DataType ArmnnType, typename T, std::size_t InputDim, std::size_t OutputDim>
LayerTestResult<T, OutputDim> MeanTestHelper(
armnn::IWorkloadFactory& workloadFactory,
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp
index a25338f906..919dd5fd6c 100644
--- a/src/backends/reference/RefLayerSupport.cpp
+++ b/src/backends/reference/RefLayerSupport.cpp
@@ -1183,11 +1183,24 @@ bool RefLayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
const TensorInfo& output,
Optional<std::string&> reasonIfUnsupported) const
{
- ignore_unused(output);
- return IsSupportedForDataTypeRef(reasonIfUnsupported,
- input.GetDataType(),
- &TrueFunc<>,
- &TrueFunc<>);
+ bool supported = true;
+ std::array<DataType,3> supportedTypes =
+ {
+ DataType::Float32,
+ DataType::QuantisedAsymm8,
+ DataType::QuantisedSymm16
+ };
+
+ supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
+ "Reference ResizeBilinear: input type not supported");
+
+ supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
+ "Reference ResizeBilinear: output type not supported");
+
+ supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
+ "Reference ResizeBilinear: input and output types not matching");
+
+ return supported;
}
bool RefLayerSupport::IsRsqrtSupported(const TensorInfo& input,
diff --git a/src/backends/reference/test/RefCreateWorkloadTests.cpp b/src/backends/reference/test/RefCreateWorkloadTests.cpp
index 7816d5b17f..e541692654 100644
--- a/src/backends/reference/test/RefCreateWorkloadTests.cpp
+++ b/src/backends/reference/test/RefCreateWorkloadTests.cpp
@@ -651,6 +651,11 @@ BOOST_AUTO_TEST_CASE(CreateResizeBilinearUint8)
RefCreateResizeBilinearTest<RefResizeBilinearWorkload, armnn::DataType::QuantisedAsymm8>(DataLayout::NCHW);
}
+BOOST_AUTO_TEST_CASE(CreateResizeBilinearQuantisedAsymm16)
+{
+ RefCreateResizeBilinearTest<RefResizeBilinearWorkload, armnn::DataType::QuantisedSymm16>(DataLayout::NCHW);
+}
+
BOOST_AUTO_TEST_CASE(CreateResizeBilinearFloat32Nhwc)
{
RefCreateResizeBilinearTest<RefResizeBilinearWorkload, armnn::DataType::Float32>(DataLayout::NHWC);
diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp
index 155da246bd..7540f1d4c1 100644
--- a/src/backends/reference/test/RefLayerTests.cpp
+++ b/src/backends/reference/test/RefLayerTests.cpp
@@ -407,30 +407,45 @@ ARMNN_AUTO_TEST_CASE(SimpleResizeBilinear,
ARMNN_AUTO_TEST_CASE(SimpleResizeBilinearUint8,
SimpleResizeBilinearTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NCHW)
+ARMNN_AUTO_TEST_CASE(SimpleResizeBilinearUint16,
+ SimpleResizeBilinearTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NCHW)
ARMNN_AUTO_TEST_CASE(ResizeBilinearNop,
ResizeBilinearNopTest<armnn::DataType::Float32>,
armnn::DataLayout::NCHW)
ARMNN_AUTO_TEST_CASE(ResizeBilinearNopUint8,
ResizeBilinearNopTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NCHW)
+ARMNN_AUTO_TEST_CASE(esizeBilinearNopUint16,
+ SimpleResizeBilinearTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NCHW)
ARMNN_AUTO_TEST_CASE(ResizeBilinearSqMin,
ResizeBilinearSqMinTest<armnn::DataType::Float32>,
armnn::DataLayout::NCHW)
ARMNN_AUTO_TEST_CASE(ResizeBilinearSqMinUint8,
ResizeBilinearSqMinTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NCHW)
+ARMNN_AUTO_TEST_CASE(ResizeBilinearSqMinUint16,
+ SimpleResizeBilinearTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NCHW)
ARMNN_AUTO_TEST_CASE(ResizeBilinearMin,
ResizeBilinearMinTest<armnn::DataType::Float32>,
armnn::DataLayout::NCHW)
ARMNN_AUTO_TEST_CASE(ResizeBilinearMinUint8,
ResizeBilinearMinTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NCHW)
+ARMNN_AUTO_TEST_CASE(ResizeBilinearMinUint16,
+ SimpleResizeBilinearTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NCHW)
ARMNN_AUTO_TEST_CASE(ResizeBilinearMag,
ResizeBilinearMagTest<armnn::DataType::Float32>,
armnn::DataLayout::NCHW)
ARMNN_AUTO_TEST_CASE(ResizeBilinearMagUint8,
ResizeBilinearMagTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NCHW)
+ARMNN_AUTO_TEST_CASE(ResizeBilinearMagUint16,
+ SimpleResizeBilinearTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NCHW)
// Resize Bilinear - NHWC
ARMNN_AUTO_TEST_CASE(ResizeBilinearNopNhwc,
@@ -439,30 +454,45 @@ ARMNN_AUTO_TEST_CASE(ResizeBilinearNopNhwc,
ARMNN_AUTO_TEST_CASE(ResizeBilinearNopUint8Nhwc,
ResizeBilinearNopTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NHWC)
+ARMNN_AUTO_TEST_CASE(ResizeBilinearNopUint16Nhwc,
+ ResizeBilinearNopTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NHWC)
ARMNN_AUTO_TEST_CASE(SimpleResizeBilinearNhwc,
SimpleResizeBilinearTest<armnn::DataType::Float32>,
armnn::DataLayout::NHWC)
ARMNN_AUTO_TEST_CASE(SimpleResizeBilinearUint8Nhwc,
SimpleResizeBilinearTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NHWC)
+ARMNN_AUTO_TEST_CASE(SimpleResizeBilinearUint16Nhwc,
+ ResizeBilinearNopTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NHWC)
ARMNN_AUTO_TEST_CASE(ResizeBilinearSqMinNhwc,
ResizeBilinearSqMinTest<armnn::DataType::Float32>,
armnn::DataLayout::NHWC)
ARMNN_AUTO_TEST_CASE(ResizeBilinearSqMinUint8Nhwc,
ResizeBilinearSqMinTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NHWC)
+ARMNN_AUTO_TEST_CASE(ResizeBilinearSqMinUint16Nhwc,
+ ResizeBilinearNopTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NHWC)
ARMNN_AUTO_TEST_CASE(ResizeBilinearMinNhwc,
ResizeBilinearMinTest<armnn::DataType::Float32>,
armnn::DataLayout::NHWC)
ARMNN_AUTO_TEST_CASE(ResizeBilinearMinUint8Nhwc,
ResizeBilinearMinTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NHWC)
+ARMNN_AUTO_TEST_CASE(ResizeBilinearMinUint16Nhwc,
+ ResizeBilinearNopTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NHWC)
ARMNN_AUTO_TEST_CASE(ResizeBilinearMagNhwc,
ResizeBilinearMagTest<armnn::DataType::Float32>,
armnn::DataLayout::NHWC)
ARMNN_AUTO_TEST_CASE(ResizeBilinearMagUint8Nhwc,
ResizeBilinearMagTest<armnn::DataType::QuantisedAsymm8>,
armnn::DataLayout::NHWC)
+ARMNN_AUTO_TEST_CASE(ResizeBilinearMagUint16Nhwc,
+ ResizeBilinearNopTest<armnn::DataType::QuantisedSymm16>,
+ armnn::DataLayout::NHWC)
// Fake Quantization
ARMNN_AUTO_TEST_CASE(FakeQuantization, FakeQuantizationTest)