ArmNN
 21.05
DequantizeTestImpl.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "DequantizeTestImpl.hpp"
7 
8 #include <ResolveType.hpp>
9 
10 
13 
14 #include <test/TensorHelpers.hpp>
15 
16 namespace
17 {
18 
19 template<typename T, std::size_t Dim, typename T1=float>
20 LayerTestResult<T1, Dim> DequantizeTestImpl(
21  armnn::IWorkloadFactory& workloadFactory,
23  const armnn::TensorInfo& inputTensorInfo,
24  const armnn::TensorInfo& outputTensorInfo,
25  const std::vector<T>& inputData,
26  const std::vector<T1>& expectedOutputData,
28 {
29  IgnoreUnused(memoryManager);
30  boost::multi_array<T, Dim> input = MakeTensor<T, Dim>(inputTensorInfo, inputData);
31 
32  LayerTestResult<T1, Dim> ret(outputTensorInfo);
33  ret.outputExpected = MakeTensor<T1, Dim>(outputTensorInfo, expectedOutputData);
34 
36  std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
37  std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
39 
41  AddInputToWorkload(descriptor, info, inputTensorInfo, inputHandle.get());
42  AddOutputToWorkload(descriptor, info, outputTensorInfo, outputHandle.get());
43 
44  std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateDequantize(descriptor, info);
45 
46  inputHandle->Allocate();
47  outputHandle->Allocate();
48 
49  CopyDataToITensorHandle(inputHandle.get(), input.data());
50 
51  ExecuteWorkload(*workload, memoryManager);
52 
53  CopyDataFromITensorHandle(ret.output.data(), outputHandle.get());
54 
55  return ret;
56 }
57 
58 template <armnn::DataType ArmnnInputType,
60  typename OutType=armnn::ResolveType<ArmnnOutputType>>
61 LayerTestResult<OutType, 4> DequantizeSimpleTest(
62  armnn::IWorkloadFactory& workloadFactory,
64 {
66 
68 
69  const armnn::TensorInfo inputTensorInfo({1, 2, 2, 3}, ArmnnInputType, 0.5f, 0);
70  const armnn::TensorInfo outputTensorInfo({1, 2, 2, 3}, ArmnnOutputType);
71 
72  std::vector<T> inputData = std::vector<T>(
73  {
74  2, 4, 6,
75  8, 10, 12,
76  14, 16, 18,
77  20, 22, 24,
78  });
79 
80  std::vector<OutType> expectedOutputData;
81  for (OutType i = OutType(1); i <= OutType(12); ++i)
82  {
83  expectedOutputData.push_back(i);
84  }
85 
86  return DequantizeTestImpl<T, 4, OutType>(workloadFactory,
87  memoryManager,
88  inputTensorInfo,
89  outputTensorInfo,
90  inputData,
91  expectedOutputData,
92  desc);
93 }
94 
95 template <armnn::DataType ArmnnInputType>
96 LayerTestResult<float, 4> DequantizeOffsetTest(
97  armnn::IWorkloadFactory& workloadFactory,
99 {
101 
103 
104  const armnn::TensorInfo inputTensorInfo({1, 2, 2, 3}, ArmnnInputType, 0.5f, 1);
105  const armnn::TensorInfo outputTensorInfo({1, 2, 2, 3}, armnn::DataType::Float32);
106 
107  std::vector<T> inputData = std::vector<T>(
108  {
109  3, 5, 7,
110  9, 11, 13,
111  15, 17, 19,
112  21, 23, 25,
113  });
114 
115  std::vector<float> expectedOutputData = std::vector<float>(
116  {
117  1.0f, 2.0f, 3.0f,
118  4.0f, 5.0f, 6.0f,
119  7.0f, 8.0f, 9.0f,
120  10.0f, 11.0f, 12.0f,
121  });
122 
123  return DequantizeTestImpl<T, 4>(workloadFactory,
124  memoryManager,
125  inputTensorInfo,
126  outputTensorInfo,
127  inputData,
128  expectedOutputData,
129  desc);
130 }
131 
132 } // anonymous namespace
133 
135  armnn::IWorkloadFactory& workloadFactory,
137 {
138  return DequantizeSimpleTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
139 }
140 
142  armnn::IWorkloadFactory& workloadFactory,
144 {
145  return DequantizeOffsetTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
146 }
147 
149  armnn::IWorkloadFactory& workloadFactory,
151 {
152  return DequantizeSimpleTest<armnn::DataType::QAsymmS8>(workloadFactory, memoryManager);
153 }
154 
156  armnn::IWorkloadFactory& workloadFactory,
158 {
159  return DequantizeOffsetTest<armnn::DataType::QAsymmS8>(workloadFactory, memoryManager);
160 }
161 
163  armnn::IWorkloadFactory& workloadFactory,
165 {
166  return DequantizeSimpleTest<armnn::DataType::QSymmS8>(workloadFactory, memoryManager);
167 }
168 
170  armnn::IWorkloadFactory& workloadFactory,
172 {
173  return DequantizeSimpleTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
174 }
175 
177  armnn::IWorkloadFactory& workloadFactory,
179 {
180  return DequantizeSimpleTest<armnn::DataType::QAsymmU8, armnn::DataType::Float16>(workloadFactory,
181  memoryManager);
182 }
183 
185  armnn::IWorkloadFactory& workloadFactory,
187 {
188  return DequantizeSimpleTest<armnn::DataType::QSymmS8, armnn::DataType::Float16>(workloadFactory, memoryManager);
189 }
190 
192  armnn::IWorkloadFactory& workloadFactory,
194 {
195  return DequantizeSimpleTest<armnn::DataType::QSymmS16, armnn::DataType::Float16>(workloadFactory,
196  memoryManager);
197 }
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
LayerTestResult< float, 4 > DequantizeOffsetAsymmInt8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
typename ResolveTypeImpl< DT >::Type ResolveType
Definition: ResolveType.hpp:79
void IgnoreUnused(Ts &&...)
LayerTestResult< armnn::Half, 4 > DequantizeSimpleUint8ToFp16Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
DataType
Definition: Types.hpp:36
LayerTestResult< float, 4 > DequantizeSimpleUint8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
LayerTestResult< float, 4 > DequantizeOffsetUint8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< float, 4 > DequantizeSimpleAsymmInt8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
void CopyDataFromITensorHandle(void *memory, const armnn::ITensorHandle *tensorHandle)
virtual std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo, const bool IsMemoryManaged=true) const =0
LayerTestResult< armnn::Half, 4 > DequantizeSimpleInt16ToFp16Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< armnn::Half, 4 > DequantizeSimpleInt8ToFp16Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
Contains information about inputs and outputs to a layer.
LayerTestResult< float, 4 > DequantizeSimpleInt16Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< float, 4 > DequantizeSimpleInt8Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
void CopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)
virtual std::unique_ptr< IWorkload > CreateDequantize(const DequantizeQueueDescriptor &descriptor, const WorkloadInfo &info) const