ArmNN
 22.05.01
ElementwiseUnaryTestImpl.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2019 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 
9 
10 #include <armnn/ArmNN.hpp>
11 
12 #include <ResolveType.hpp>
13 
18 
19 #include <DataTypeUtils.hpp>
22 
24 
25 #include <memory>
26 
27 std::unique_ptr<armnn::IWorkload> CreateWorkload(
28  const armnn::IWorkloadFactory& workloadFactory,
29  const armnn::WorkloadInfo& info,
30  const armnn::ElementwiseUnaryQueueDescriptor& descriptor);
31 
32 template <std::size_t NumDims,
33  armnn::DataType ArmnnType,
34  typename T = armnn::ResolveType<ArmnnType>>
36  armnn::IWorkloadFactory & workloadFactory,
39  const unsigned int shape[NumDims],
40  std::vector<float> values,
41  float quantScale,
42  int quantOffset,
43  const unsigned int outShape[NumDims],
44  std::vector<float> outValues,
45  const armnn::ITensorHandleFactory& tensorHandleFactory,
46  float outQuantScale,
47  int outQuantOffset)
48 {
49  armnn::TensorInfo inputTensorInfo{NumDims, shape, ArmnnType};
50  armnn::TensorInfo outputTensorInfo{NumDims, outShape, ArmnnType};
51 
52  inputTensorInfo.SetQuantizationScale(quantScale);
53  inputTensorInfo.SetQuantizationOffset(quantOffset);
54 
55  outputTensorInfo.SetQuantizationScale(outQuantScale);
56  outputTensorInfo.SetQuantizationOffset(outQuantOffset);
57 
58  std::vector<T> input = ConvertToDataType<ArmnnType>(values, inputTensorInfo);
59  std::vector<T> expectedOutput = ConvertToDataType<ArmnnType>(outValues, inputTensorInfo);
60  std::vector<T> actualOutput(outputTensorInfo.GetNumElements());
61 
62  std::unique_ptr<armnn::ITensorHandle> inputHandle = tensorHandleFactory.CreateTensorHandle(inputTensorInfo);
63  std::unique_ptr<armnn::ITensorHandle> outputHandle = tensorHandleFactory.CreateTensorHandle(outputTensorInfo);
64 
67  qDesc.m_Parameters = desc;
69  AddInputToWorkload(qDesc, info, inputTensorInfo, inputHandle.get());
70  AddOutputToWorkload(qDesc, info, outputTensorInfo, outputHandle.get());
71  auto workload = CreateWorkload(workloadFactory, info, qDesc);
72 
73  inputHandle->Allocate();
74  outputHandle->Allocate();
75 
76  CopyDataToITensorHandle(inputHandle.get(), input.data());
77 
78  workload->PostAllocationConfigure();
79  ExecuteWorkload(*workload, memoryManager);
80 
81  CopyDataFromITensorHandle(actualOutput.data(), outputHandle.get());
82 
83  return LayerTestResult<T, NumDims>(actualOutput,
84  expectedOutput,
85  outputHandle->GetShape(),
86  outputTensorInfo.GetShape());
87 
88 }
89 
90 template <std::size_t NumDims,
91  armnn::DataType ArmnnType,
92  typename T = armnn::ResolveType<ArmnnType>>
94  armnn::IWorkloadFactory & workloadFactory,
97  const unsigned int shape[NumDims],
98  std::vector<float> values,
99  const unsigned int outShape[NumDims],
100  std::vector<float> outValues,
101  const armnn::ITensorHandleFactory& tensorHandleFactory,
102  float quantScale = 1.0f,
103  int quantOffset = 0)
104 {
105  return ElementwiseUnaryTestHelper<NumDims, ArmnnType>(
106  workloadFactory,
107  memoryManager,
108  op,
109  shape,
110  values,
111  quantScale,
112  quantOffset,
113  outShape,
114  outValues,
115  tensorHandleFactory,
116  quantScale,
117  quantOffset);
118 }
typename ResolveTypeImpl< DT >::Type ResolveType
Definition: ResolveType.hpp:79
DataType
Definition: Types.hpp:48
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
void CopyDataFromITensorHandle(void *mem, const armnn::ITensorHandle *tensorHandle)
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:473
std::unique_ptr< armnn::IWorkload > CreateWorkload(const armnn::IWorkloadFactory &workloadFactory, const armnn::WorkloadInfo &info, const armnn::ElementwiseUnaryQueueDescriptor &descriptor)
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
void CopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)
UnaryOperation
Definition: Types.hpp:124
LayerTestResult< T, NumDims > ElementwiseUnaryTestHelper(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager, armnn::UnaryOperation op, const unsigned int shape[NumDims], std::vector< float > values, float quantScale, int quantOffset, const unsigned int outShape[NumDims], std::vector< float > outValues, const armnn::ITensorHandleFactory &tensorHandleFactory, float outQuantScale, int outQuantOffset)
Contains information about TensorInfos of a layer.
virtual std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo) const =0