// // Copyright © 2017 Arm Ltd and Contributors. All rights reserved. // SPDX-License-Identifier: MIT // #pragma once #include #include #include #include #include #include #include #include #include #include #include template std::unique_ptr CreateWorkload( const armnn::IWorkloadFactory& workloadFactory, const armnn::WorkloadInfo& info, const DescriptorType& descriptor) { return CreateWorkload(workloadFactory, info, descriptor); } template, typename TOutput = armnn::ResolveType> LayerTestResult ElementwiseTestHelper( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const unsigned int shape0[NumDims], std::vector values0, float quantScale0, int quantOffset0, const unsigned int shape1[NumDims], std::vector values1, float quantScale1, int quantOffset1, const unsigned int outShape[NumDims], std::vector outValues, const armnn::ITensorHandleFactory& tensorHandleFactory, float outQuantScale, int outQuantOffset) { armnn::TensorInfo inputTensorInfo0{NumDims, shape0, ArmnnTypeInput}; armnn::TensorInfo inputTensorInfo1{NumDims, shape1, ArmnnTypeInput}; armnn::TensorInfo outputTensorInfo{NumDims, outShape, ArmnnTypeOutput}; inputTensorInfo0.SetQuantizationScale(quantScale0); inputTensorInfo0.SetQuantizationOffset(quantOffset0); inputTensorInfo1.SetQuantizationScale(quantScale1); inputTensorInfo1.SetQuantizationOffset(quantOffset1); outputTensorInfo.SetQuantizationScale(outQuantScale); outputTensorInfo.SetQuantizationOffset(outQuantOffset); std::vector actualOutput(outputTensorInfo.GetNumElements()); bool isBoolean = false; if (ArmnnTypeOutput == armnn::DataType::Boolean) { isBoolean = true; } std::unique_ptr inputHandle0 = tensorHandleFactory.CreateTensorHandle(inputTensorInfo0); std::unique_ptr inputHandle1 = tensorHandleFactory.CreateTensorHandle(inputTensorInfo1); std::unique_ptr outputHandle = tensorHandleFactory.CreateTensorHandle(outputTensorInfo); Descriptor data; armnn::WorkloadInfo info; AddInputToWorkload(data, info, inputTensorInfo0, inputHandle0.get()); AddInputToWorkload(data, info, inputTensorInfo1, inputHandle1.get()); AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get()); auto workload = CreateWorkload(workloadFactory, info, data); inputHandle0->Allocate(); inputHandle1->Allocate(); outputHandle->Allocate(); CopyDataToITensorHandle(inputHandle0.get(), values0.data()); CopyDataToITensorHandle(inputHandle1.get(), values1.data()); workload->PostAllocationConfigure(); ExecuteWorkload(*workload, memoryManager); CopyDataFromITensorHandle(actualOutput.data(), outputHandle.get()); return LayerTestResult(actualOutput, outValues, outputHandle->GetShape(), outputTensorInfo.GetShape(), isBoolean); } template> LayerTestResult ElementwiseTestHelper( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const unsigned int shape0[NumDims], std::vector values0, float quantScale0, int quantOffset0, const unsigned int shape1[NumDims], std::vector values1, float quantScale1, int quantOffset1, const unsigned int outShape[NumDims], std::vector outValues, const armnn::ITensorHandleFactory& tensorHandleFactory, float outQuantScale, int outQuantOffset) { return ElementwiseTestHelper( workloadFactory, memoryManager, shape0, values0, quantScale0, quantOffset0, shape1, values1, quantScale1, quantOffset1, outShape, outValues, tensorHandleFactory, outQuantScale, outQuantOffset); } template, typename TOutput = armnn::ResolveType> LayerTestResult ElementwiseTestHelper( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const unsigned int shape0[NumDims], std::vector values0, const unsigned int shape1[NumDims], std::vector values1, const unsigned int outShape[NumDims], std::vector outValues, const armnn::ITensorHandleFactory& tensorHandleFactory, float quantScale = 1.0f, int quantOffset = 0) { return ElementwiseTestHelper( workloadFactory, memoryManager, shape0, values0, quantScale, quantOffset, shape1, values1, quantScale, quantOffset, outShape, outValues, tensorHandleFactory, quantScale, quantOffset); } template> LayerTestResult ElementwiseTestHelper( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const unsigned int shape0[NumDims], std::vector values0, const unsigned int shape1[NumDims], std::vector values1, const unsigned int outShape[NumDims], std::vector outValues, const armnn::ITensorHandleFactory& tensorHandleFactory, float quantScale = 1.0f, int quantOffset = 0) { return ElementwiseTestHelper( workloadFactory, memoryManager, shape0, values0, shape1, values1, outShape, outValues, tensorHandleFactory, quantScale, quantOffset); }