aboutsummaryrefslogtreecommitdiff
path: root/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp')
-rw-r--r--src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp42
1 files changed, 25 insertions, 17 deletions
diff --git a/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp
index d16669502a..5a36856e54 100644
--- a/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp
@@ -23,6 +23,7 @@ template<typename T, std::size_t Dim>
LayerTestResult<T, Dim> QuantizeTestImpl(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory,
const armnn::TensorInfo& inputTensorInfo,
const armnn::TensorInfo& outputTensorInfo,
const std::vector<float>& inputData,
@@ -35,10 +36,8 @@ LayerTestResult<T, Dim> QuantizeTestImpl(
LayerTestResult<T, Dim> ret(outputTensorInfo);
ret.outputExpected = MakeTensor<T, Dim>(outputTensorInfo, expectedOutputData);
- ARMNN_NO_DEPRECATE_WARN_BEGIN
- std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
- std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
- ARMNN_NO_DEPRECATE_WARN_END
+ std::unique_ptr<armnn::ITensorHandle> inputHandle = tensorHandleFactory.CreateTensorHandle(inputTensorInfo);
+ std::unique_ptr<armnn::ITensorHandle> outputHandle = tensorHandleFactory.CreateTensorHandle(outputTensorInfo);
armnn::WorkloadInfo info;
AddInputToWorkload(descriptor, info, inputTensorInfo, inputHandle.get());
@@ -61,7 +60,8 @@ LayerTestResult<T, Dim> QuantizeTestImpl(
template <armnn::DataType ArmnnOutputType, typename T = armnn::ResolveType<ArmnnOutputType>>
LayerTestResult<T, 4> QuantizeSimpleTest(
armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
{
armnn::QuantizeQueueDescriptor desc;
@@ -86,6 +86,7 @@ LayerTestResult<T, 4> QuantizeSimpleTest(
return QuantizeTestImpl<T, 4>(workloadFactory,
memoryManager,
+ tensorHandleFactory,
inputTensorInfo,
outputTensorInfo,
inputData,
@@ -96,7 +97,8 @@ LayerTestResult<T, 4> QuantizeSimpleTest(
template <armnn::DataType ArmnnOutputType, typename T = armnn::ResolveType<ArmnnOutputType>>
LayerTestResult<T, 4> QuantizeClampTest(
armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
{
armnn::QuantizeQueueDescriptor desc;
@@ -118,6 +120,7 @@ LayerTestResult<T, 4> QuantizeClampTest(
return QuantizeTestImpl<T, 4>(workloadFactory,
memoryManager,
+ tensorHandleFactory,
inputTensorInfo,
outputTensorInfo,
inputData,
@@ -129,35 +132,40 @@ LayerTestResult<T, 4> QuantizeClampTest(
LayerTestResult<uint8_t, 4> QuantizeSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
{
- return QuantizeSimpleTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
+ return QuantizeSimpleTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, tensorHandleFactory);
}
LayerTestResult<uint8_t, 4> QuantizeClampUint8Test(
armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
{
- return QuantizeClampTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
+ return QuantizeClampTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, tensorHandleFactory);
}
LayerTestResult<int8_t, 4> QuantizeClampAsymmInt8Test(
armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
{
- return QuantizeClampTest<armnn::DataType::QAsymmS8>(workloadFactory, memoryManager);
+ return QuantizeClampTest<armnn::DataType::QAsymmS8>(workloadFactory, memoryManager, tensorHandleFactory);
}
LayerTestResult<int8_t, 4> QuantizeClampInt8Test(
- armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
{
- return QuantizeClampTest<armnn::DataType::QSymmS8>(workloadFactory, memoryManager);
+ return QuantizeClampTest<armnn::DataType::QSymmS8>(workloadFactory, memoryManager, tensorHandleFactory);
}
LayerTestResult<int16_t, 4> QuantizeClampInt16Test(
armnn::IWorkloadFactory& workloadFactory,
- const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ const armnn::ITensorHandleFactory& tensorHandleFactory)
{
- return QuantizeClampTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
+ return QuantizeClampTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, tensorHandleFactory);
}