ArmNN
 20.02
MeanTestImpl.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 
8 #include "LayerTestResult.hpp"
9 
10 #include <ResolveType.hpp>
11 
14 
15 namespace
16 {
17 
18 template<armnn::DataType ArmnnType, typename T, std::size_t InputDim, std::size_t OutputDim>
19 LayerTestResult<T, OutputDim> MeanTestHelper(
20  armnn::IWorkloadFactory& workloadFactory,
22  const unsigned int* inputShape,
23  const std::vector<float>& inputData,
24  const std::vector<unsigned int>& axis,
25  bool keepDims,
26  const unsigned int* outputShape,
27  const std::vector<float>& outputData,
28  float scale = 1.0f,
29  int32_t offset = 0)
30 {
31  IgnoreUnused(memoryManager);
32 
33  armnn::TensorInfo inputTensorInfo(InputDim, inputShape, ArmnnType);
34  armnn::TensorInfo outputTensorInfo(OutputDim, outputShape, ArmnnType);
35 
36  inputTensorInfo.SetQuantizationScale(scale);
37  inputTensorInfo.SetQuantizationOffset(offset);
38 
39  outputTensorInfo.SetQuantizationScale(scale);
40  outputTensorInfo.SetQuantizationOffset(offset);
41 
42  auto input = MakeTensor<T, InputDim>(inputTensorInfo, ConvertToDataType<ArmnnType>(inputData, inputTensorInfo));
43 
44  LayerTestResult<T, OutputDim> result(outputTensorInfo);
45  result.outputExpected = MakeTensor<T, OutputDim>(
46  outputTensorInfo, ConvertToDataType<ArmnnType>(outputData, outputTensorInfo));
47 
48  std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
49  std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
50 
52  data.m_Parameters.m_Axis = axis;
53  data.m_Parameters.m_KeepDims = keepDims;
55  AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());
56  AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());
57 
58  std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateMean(data, info);
59 
60  inputHandle->Allocate();
61  outputHandle->Allocate();
62 
63  CopyDataToITensorHandle(inputHandle.get(), input.origin());
64 
65  workload->PostAllocationConfigure();
66  workload->Execute();
67 
68  CopyDataFromITensorHandle(result.output.origin(), outputHandle.get());
69 
70  return result;
71 }
72 
73 } // anonymous namespace
74 
75 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
77  armnn::IWorkloadFactory& workloadFactory,
79 {
80  const unsigned int inputShape[] = { 3, 2 };
81  const unsigned int outputShape[] = { 1 };
82 
83  std::vector<float> input({ 1.5f, 1.5f, 2.5f, 2.5f, 3.5f, 3.5f });
84  std::vector<float> output({ 2.5f });
85 
86  return MeanTestHelper<ArmnnType, T, 2, 1>(
87  workloadFactory, memoryManager, inputShape, input, {}, false, outputShape, output);
88 }
89 
90 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
92  armnn::IWorkloadFactory& workloadFactory,
94 {
95  const unsigned int inputShape[] = { 2, 3, 1, 2 };
96  const unsigned int outputShape[] = { 3, 1, 2 };
97 
98  std::vector<float> input({ 1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5f, 1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5f });
99  std::vector<float> output({ 1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5f });
100 
101  return MeanTestHelper<ArmnnType, T, 4, 3>(
102  workloadFactory, memoryManager, inputShape, input, { 0 }, false, outputShape, output);
103 }
104 
105 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
107  armnn::IWorkloadFactory& workloadFactory,
109 {
110  const unsigned int inputShape[] = { 1, 1, 3, 2 };
111  const unsigned int outputShape[] = { 1, 1, 1, 2 };
112 
113  std::vector<float> input({ 1.5f, 1.5f, 2.5f, 2.5f, 3.5f, 3.5f });
114  std::vector<float> output({ 2.5f, 2.5f });
115 
116  return MeanTestHelper<ArmnnType, T, 4, 4>(
117  workloadFactory, memoryManager, inputShape, input, { 2 }, true, outputShape, output);
118 }
119 
120 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
122  armnn::IWorkloadFactory& workloadFactory,
124 {
125  const unsigned int inputShape[] = { 2, 3, 1, 2 };
126  const unsigned int outputShape[] = { 1, 3, 1, 1 };
127 
128  std::vector<float> input({ 1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5f, 1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5 });
129  std::vector<float> output({ 2.0f, 4.0f, 6.0f });
130 
131  return MeanTestHelper<ArmnnType, T, 4, 4>(
132  workloadFactory, memoryManager, inputShape, input, { 0, 3 }, true, outputShape, output);
133 }
134 
135 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
137  armnn::IWorkloadFactory& workloadFactory,
139 {
140  const unsigned int inputShape[] = { 4, 3, 2 };
141  const unsigned int outputShape[] = { 2 };
142 
143  std::vector<float> input({ 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f,
144  15.0f, 16.0f, 17.0f, 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f });
145  std::vector<float> output({ 12.0f, 13.0f });
146 
147  return MeanTestHelper<ArmnnType, T, 3, 1>(
148  workloadFactory, memoryManager, inputShape, input, { 0, 1 }, false, outputShape, output);
149 }
150 
151 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
153  armnn::IWorkloadFactory& workloadFactory,
155 {
156  const unsigned int inputShape[] = { 4, 3, 2 };
157  const unsigned int outputShape[] = { 1, 3, 1 };
158 
159  std::vector<float> input({ 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f,
160  15.0f, 16.0f, 17.0f, 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f });
161  std::vector<float> output({ 10.5f, 12.5f, 14.5f });
162 
163  return MeanTestHelper<ArmnnType, T, 3, 3>(
164  workloadFactory, memoryManager, inputShape, input, { 0, 2 }, true, outputShape, output);
165 }
166 
167 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
169  armnn::IWorkloadFactory& workloadFactory,
171 {
172  const unsigned int inputShape[] = { 1, 2, 2, 1 };
173  const unsigned int outputShape[] = { 1, 2, 1 };
174 
175  std::vector<float> input({ 1.0f, 2.0f, 3.0f, 4.0f });
176  std::vector<float> output({ 1.5f, 3.5f });
177 
178  return MeanTestHelper<ArmnnType, T, 4, 3>(
179  workloadFactory, memoryManager, inputShape, input, { 2 }, false, outputShape, output);
180 }
LayerTestResult< T, 1 > MeanVts1Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< T, 3 > MeanSimpleAxisTest(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< T, 1 > MeanSimpleTest(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< T, 3 > MeanVts2Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< T, 3 > MeanVts3Test(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
void IgnoreUnused(Ts &&...)
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
bool m_KeepDims
Enable/disable keep dimensions. If true, then the reduced dimensions that are of length 1 are kept...
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
void CopyDataFromITensorHandle(void *memory, const armnn::ITensorHandle *tensorHandle)
virtual std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo, const bool IsMemoryManaged=true) const =0
virtual std::unique_ptr< IWorkload > CreateMean(const MeanQueueDescriptor &descriptor, const WorkloadInfo &Info) const
Contains information about inputs and outputs to a layer.
LayerTestResult< T, 4 > MeanKeepDimsTest(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< T, 4 > MeanMultipleDimsTest(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
void CopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)