ArmNN
 21.11
DequantizeTestImpl.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "DequantizeTestImpl.hpp"
7 
8 #include <ResolveType.hpp>
9 
10 
13 
14 #include <test/TensorHelpers.hpp>
15 
16 namespace
17 {
18 
19 template<typename T, std::size_t Dim, typename T1=float>
20 LayerTestResult<T1, Dim> DequantizeTestImpl(
21  armnn::IWorkloadFactory& workloadFactory,
23  const armnn::TensorInfo& inputTensorInfo,
24  const armnn::TensorInfo& outputTensorInfo,
25  const std::vector<T>& inputData,
26  const std::vector<T1>& expectedOutputData,
28 {
29  IgnoreUnused(memoryManager);
30 
31  std::vector<T1> actualOutput(outputTensorInfo.GetNumElements());
32 
34  std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
35  std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
37 
39  AddInputToWorkload(descriptor, info, inputTensorInfo, inputHandle.get());
40  AddOutputToWorkload(descriptor, info, outputTensorInfo, outputHandle.get());
41 
42  std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateDequantize(descriptor, info);
43 
44  inputHandle->Allocate();
45  outputHandle->Allocate();
46 
47  CopyDataToITensorHandle(inputHandle.get(), inputData.data());
48 
49  ExecuteWorkload(*workload, memoryManager);
50 
51  CopyDataFromITensorHandle(actualOutput.data(), outputHandle.get());
52 
53  return LayerTestResult<T1, Dim>(actualOutput,
54  expectedOutputData,
55  outputHandle->GetShape(),
56  outputTensorInfo.GetShape());
57 }
58 
59 template <armnn::DataType ArmnnInputType,
61  typename OutType=armnn::ResolveType<ArmnnOutputType>>
62 LayerTestResult<OutType, 4> DequantizeSimpleTest(
63  armnn::IWorkloadFactory& workloadFactory,
65 {
67 
69 
70  const armnn::TensorInfo inputTensorInfo({1, 2, 2, 3}, ArmnnInputType, 0.5f, 0);
71  const armnn::TensorInfo outputTensorInfo({1, 2, 2, 3}, ArmnnOutputType);
72 
73  std::vector<T> inputData = std::vector<T>(
74  {
75  2, 4, 6,
76  8, 10, 12,
77  14, 16, 18,
78  20, 22, 24,
79  });
80 
81  std::vector<OutType> expectedOutputData;
82  for (OutType i = OutType(1); i <= OutType(12); ++i)
83  {
84  expectedOutputData.push_back(i);
85  }
86 
87  return DequantizeTestImpl<T, 4, OutType>(workloadFactory,
88  memoryManager,
89  inputTensorInfo,
90  outputTensorInfo,
91  inputData,
92  expectedOutputData,
93  desc);
94 }
95 
96 template <armnn::DataType ArmnnInputType>
97 LayerTestResult<float, 4> DequantizeOffsetTest(
98  armnn::IWorkloadFactory& workloadFactory,
100 {
102 
104 
105  const armnn::TensorInfo inputTensorInfo({1, 2, 2, 3}, ArmnnInputType, 0.5f, 1);
106  const armnn::TensorInfo outputTensorInfo({1, 2, 2, 3}, armnn::DataType::Float32);
107 
108  std::vector<T> inputData = std::vector<T>(
109  {
110  3, 5, 7,
111  9, 11, 13,
112  15, 17, 19,
113  21, 23, 25,
114  });
115 
116  std::vector<float> expectedOutputData = std::vector<float>(
117  {
118  1.0f, 2.0f, 3.0f,
119  4.0f, 5.0f, 6.0f,
120  7.0f, 8.0f, 9.0f,
121  10.0f, 11.0f, 12.0f,
122  });
123 
124  return DequantizeTestImpl<T, 4>(workloadFactory,
125  memoryManager,
126  inputTensorInfo,
127  outputTensorInfo,
128  inputData,
129  expectedOutputData,
130  desc);
131 }
132 
133 } // anonymous namespace
134 
136  armnn::IWorkloadFactory& workloadFactory,
138 {
139  return DequantizeSimpleTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
140 }
141 
143  armnn::IWorkloadFactory& workloadFactory,
145 {
146  return DequantizeOffsetTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
147 }
148 
150  armnn::IWorkloadFactory& workloadFactory,
152 {
153  return DequantizeSimpleTest<armnn::DataType::QAsymmS8>(workloadFactory, memoryManager);
154 }
155 
157  armnn::IWorkloadFactory& workloadFactory,
159 {
160  return DequantizeOffsetTest<armnn::DataType::QAsymmS8>(workloadFactory, memoryManager);
161 }
162 
164  armnn::IWorkloadFactory& workloadFactory,
166 {
167  return DequantizeSimpleTest<armnn::DataType::QSymmS8>(workloadFactory, memoryManager);
168 }
169 
171  armnn::IWorkloadFactory& workloadFactory,
173 {
174  return DequantizeSimpleTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
175 }
176 
178  armnn::IWorkloadFactory& workloadFactory,
180 {
181  return DequantizeSimpleTest<armnn::DataType::QAsymmU8, armnn::DataType::Float16>(workloadFactory,
182  memoryManager);
183 }
184 
186  armnn::IWorkloadFactory& workloadFactory,
188 {
189  return DequantizeSimpleTest<armnn::DataType::QSymmS8, armnn::DataType::Float16>(workloadFactory, memoryManager);
190 }
191 
193  armnn::IWorkloadFactory& workloadFactory,
195 {
196  return DequantizeSimpleTest<armnn::DataType::QSymmS16, armnn::DataType::Float16>(workloadFactory,
197  memoryManager);
198 }
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
LayerTestResult< float, 4 > DequantizeOffsetAsymmInt8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
typename ResolveTypeImpl< DT >::Type ResolveType
Definition: ResolveType.hpp:79
void IgnoreUnused(Ts &&...)
LayerTestResult< armnn::Half, 4 > DequantizeSimpleUint8ToFp16Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
DataType
Definition: Types.hpp:35
LayerTestResult< float, 4 > DequantizeSimpleUint8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
LayerTestResult< float, 4 > DequantizeOffsetUint8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< float, 4 > DequantizeSimpleAsymmInt8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
void CopyDataFromITensorHandle(void *memory, const armnn::ITensorHandle *tensorHandle)
virtual std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo, const bool IsMemoryManaged=true) const =0
LayerTestResult< armnn::Half, 4 > DequantizeSimpleInt16ToFp16Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< armnn::Half, 4 > DequantizeSimpleInt8ToFp16Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
Contains information about TensorInfos of a layer.
LayerTestResult< float, 4 > DequantizeSimpleInt16Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< float, 4 > DequantizeSimpleInt8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
unsigned int GetNumElements() const
Definition: Tensor.hpp:196
void CopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)
virtual std::unique_ptr< IWorkload > CreateDequantize(const DequantizeQueueDescriptor &descriptor, const WorkloadInfo &info) const