17 template<
typename T,
size_t NumDims>
23 const std::vector<T>& inputData,
24 const std::vector<T>& outputExpectedData)
26 boost::ignore_unused(memoryManager);
27 auto input = MakeTensor<T, NumDims>(inputTensorInfo, inputData);
30 ret.outputExpected = MakeTensor<T, NumDims>(outputTensorInfo, outputExpectedData);
32 std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.
CreateTensorHandle(inputTensorInfo);
33 std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.
CreateTensorHandle(outputTensorInfo);
37 AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());
38 AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());
40 std::unique_ptr<armnn::IWorkload> workload = workloadFactory.
CreateReshape(data, info);
42 inputHandle->Allocate();
43 outputHandle->Allocate();
56 template<armnn::DataType ArmnnType,
typename T>
64 unsigned int inputShape[] = { 2, 2, 3, 3 };
65 unsigned int outputShape[] = { 2, 2, 9, 1 };
72 auto input = ConvertToDataType<ArmnnType>(
92 auto outputExpected = ConvertToDataType<ArmnnType>(
94 0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f,
96 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f, 16.0f, 17.0f,
98 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f, 25.0f, 26.0f,
100 27.0f, 28.0f, 29.0f, 30.0f, 31.0f, 32.0f, 33.0f, 34.0f, 35.0f,
104 return SimpleReshapeTestImpl<T, 4>(
105 workloadFactory, memoryManager, inputTensorInfo, outputTensorInfo, input, outputExpected);
108 template<armnn::DataType ArmnnType,
typename T>
116 unsigned int inputShape[] = { 2, 2, 8, 1, 1 };
117 unsigned int outputShape[] = { 2, 2, 2, 2, 2 };
124 auto input = ConvertToDataType<ArmnnType>(
126 0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f,
127 8.0f, 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f,
129 16.0f, 17.0f, 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f,
130 24.0f, 25.0f, 26.0f, 27.0f, 28.0f, 29.0f, 30.0f, 31.0f,
134 auto outputExpected = ConvertToDataType<ArmnnType>(
166 return SimpleReshapeTestImpl<T, 5>(
167 workloadFactory, memoryManager, inputTensorInfo, outputTensorInfo, input, outputExpected);
175 SimpleReshapeTest<armnn::DataType::Float32>(
180 SimpleReshapeTest<armnn::DataType::QAsymmU8>(
185 SimpleReshapeTest<armnn::DataType::QSymmS16>(
189 template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 5>
190 Reshape5dTest<armnn::DataType::Float32>(
194 template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 5>
195 Reshape5dTest<armnn::DataType::QAsymmU8>(
199 template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 5>
200 Reshape5dTest<armnn::DataType::QSymmS16>(
LayerTestResult< T, 4 > SimpleReshapeTest(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
LayerTestResult< T, 5 > Reshape5dTest(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager)
void CopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)
virtual std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo, const bool IsMemoryManaged=true) const =0
void CopyDataFromITensorHandle(void *memory, const armnn::ITensorHandle *tensorHandle)
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
void SetQuantizationScale(float scale)
virtual std::unique_ptr< IWorkload > CreateReshape(const ReshapeQueueDescriptor &descriptor, const WorkloadInfo &info) const