ArmNN
 20.02
ReshapeTestImpl.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "ReshapeTestImpl.hpp"
7 
11 
12 #include <test/TensorHelpers.hpp>
13 
14 namespace
15 {
16 
17 template<typename T, size_t NumDims>
18 LayerTestResult<T, NumDims> SimpleReshapeTestImpl(
19  armnn::IWorkloadFactory& workloadFactory,
21  armnn::TensorInfo inputTensorInfo,
22  armnn::TensorInfo outputTensorInfo,
23  const std::vector<T>& inputData,
24  const std::vector<T>& outputExpectedData)
25 {
26  IgnoreUnused(memoryManager);
27  auto input = MakeTensor<T, NumDims>(inputTensorInfo, inputData);
28 
29  LayerTestResult<T, NumDims> ret(outputTensorInfo);
30  ret.outputExpected = MakeTensor<T, NumDims>(outputTensorInfo, outputExpectedData);
31 
32  std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
33  std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
34 
37  AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());
38  AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());
39 
40  std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateReshape(data, info);
41 
42  inputHandle->Allocate();
43  outputHandle->Allocate();
44 
45  CopyDataToITensorHandle(inputHandle.get(), input.origin());
46 
47  workload->Execute();
48 
49  CopyDataFromITensorHandle(ret.output.origin(), outputHandle.get());
50 
51  return ret;
52 }
53 
54 } // anonymous namespace
55 
56 template<armnn::DataType ArmnnType, typename T>
58  armnn::IWorkloadFactory& workloadFactory,
60 {
61  armnn::TensorInfo inputTensorInfo;
62  armnn::TensorInfo outputTensorInfo;
63 
64  unsigned int inputShape[] = { 2, 2, 3, 3 };
65  unsigned int outputShape[] = { 2, 2, 9, 1 };
66 
67  inputTensorInfo = armnn::TensorInfo(4, inputShape, ArmnnType);
68  inputTensorInfo.SetQuantizationScale(1.0f);
69  outputTensorInfo = armnn::TensorInfo(4, outputShape, ArmnnType);
70  outputTensorInfo.SetQuantizationScale(1.0f);
71 
72  auto input = ConvertToDataType<ArmnnType>(
73  {
74  0.0f, 1.0f, 2.0f,
75  3.0f, 4.0f, 5.0f,
76  6.0f, 7.0f, 8.0f,
77 
78  9.0f, 10.0f, 11.0f,
79  12.0f, 13.0f, 14.0f,
80  15.0f, 16.0f, 17.0f,
81 
82  18.0f, 19.0f, 20.0f,
83  21.0f, 22.0f, 23.0f,
84  24.0f, 25.0f, 26.0f,
85 
86  27.0f, 28.0f, 29.0f,
87  30.0f, 31.0f, 32.0f,
88  33.0f, 34.0f, 35.0f,
89  },
90  inputTensorInfo);
91 
92  auto outputExpected = ConvertToDataType<ArmnnType>(
93  {
94  0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f,
95 
96  9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f, 16.0f, 17.0f,
97 
98  18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f, 25.0f, 26.0f,
99 
100  27.0f, 28.0f, 29.0f, 30.0f, 31.0f, 32.0f, 33.0f, 34.0f, 35.0f,
101  },
102  outputTensorInfo);
103 
104  return SimpleReshapeTestImpl<T, 4>(
105  workloadFactory, memoryManager, inputTensorInfo, outputTensorInfo, input, outputExpected);
106 }
107 
108 template<armnn::DataType ArmnnType, typename T>
110  armnn::IWorkloadFactory& workloadFactory,
112 {
113  armnn::TensorInfo inputTensorInfo;
114  armnn::TensorInfo outputTensorInfo;
115 
116  unsigned int inputShape[] = { 2, 2, 8, 1, 1 };
117  unsigned int outputShape[] = { 2, 2, 2, 2, 2 };
118 
119  inputTensorInfo = armnn::TensorInfo(5, inputShape, ArmnnType);
120  inputTensorInfo.SetQuantizationScale(1.0f);
121  outputTensorInfo = armnn::TensorInfo(5, outputShape, ArmnnType);
122  outputTensorInfo.SetQuantizationScale(1.0f);
123 
124  auto input = ConvertToDataType<ArmnnType>(
125  {
126  0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f,
127  8.0f, 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f,
128 
129  16.0f, 17.0f, 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f,
130  24.0f, 25.0f, 26.0f, 27.0f, 28.0f, 29.0f, 30.0f, 31.0f,
131  },
132  inputTensorInfo);
133 
134  auto outputExpected = ConvertToDataType<ArmnnType>(
135  {
136  0.0f, 1.0f,
137  2.0f, 3.0f,
138 
139  4.0f, 5.0f,
140  6.0f, 7.0f,
141 
142 
143  8.0f, 9.0f,
144  10.0f, 11.0f,
145 
146  12.0f, 13.0f,
147  14.0f, 15.0f,
148 
149 
150 
151  16.0f, 17.0f,
152  18.0f, 19.0f,
153 
154  20.0f, 21.0f,
155  22.0f, 23.0f,
156 
157 
158  24.0f, 25.0f,
159  26.0f, 27.0f,
160 
161  28.0f, 29.0f,
162  30.0f, 31.0f,
163  },
164  outputTensorInfo);
165 
166  return SimpleReshapeTestImpl<T, 5>(
167  workloadFactory, memoryManager, inputTensorInfo, outputTensorInfo, input, outputExpected);
168 }
169 
170 //
171 // Explicit template specializations
172 //
173 
175 SimpleReshapeTest<armnn::DataType::Float32>(
176  armnn::IWorkloadFactory& workloadFactory,
178 
180 SimpleReshapeTest<armnn::DataType::QAsymmU8>(
181  armnn::IWorkloadFactory& workloadFactory,
183 
185 SimpleReshapeTest<armnn::DataType::QSymmS16>(
186  armnn::IWorkloadFactory& workloadFactory,
188 
189 template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 5>
190 Reshape5dTest<armnn::DataType::Float32>(
191  armnn::IWorkloadFactory& workloadFactory,
193 
194 template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 5>
195 Reshape5dTest<armnn::DataType::QAsymmU8>(
196  armnn::IWorkloadFactory& workloadFactory,
198 
199 template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 5>
200 Reshape5dTest<armnn::DataType::QSymmS16>(
201  armnn::IWorkloadFactory& workloadFactory,
void IgnoreUnused(Ts &&...)
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:259
void CopyDataFromITensorHandle(void *memory, const armnn::ITensorHandle *tensorHandle)
virtual std::unique_ptr< IWorkload > CreateReshape(const ReshapeQueueDescriptor &descriptor, const WorkloadInfo &info) const
virtual std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo, const bool IsMemoryManaged=true) const =0
LayerTestResult< T, 4 > SimpleReshapeTest(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
Contains information about inputs and outputs to a layer.
LayerTestResult< T, 5 > Reshape5dTest(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
void CopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)