From 10b4dfd8e9ccd7a03df7bb053ee1c644cb37f8ab Mon Sep 17 00:00:00 2001 From: David Beck Date: Wed, 19 Sep 2018 12:03:20 +0100 Subject: IVGCVSW-1897 : build infrastructure for the src/backends folder Change-Id: I7ebafb675ccc77ad54d1deb01412a8379a5356bb --- src/backends/test/ReshapeTestImpl.hpp | 177 ++++++++++++++++++++++++++++++++++ 1 file changed, 177 insertions(+) create mode 100644 src/backends/test/ReshapeTestImpl.hpp (limited to 'src/backends/test/ReshapeTestImpl.hpp') diff --git a/src/backends/test/ReshapeTestImpl.hpp b/src/backends/test/ReshapeTestImpl.hpp new file mode 100644 index 0000000000..5d32d9d3a6 --- /dev/null +++ b/src/backends/test/ReshapeTestImpl.hpp @@ -0,0 +1,177 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// +#pragma once + +#include +#include +#include +#include + +#include "test/TensorHelpers.hpp" +#include "QuantizeHelper.hpp" + +#include "backends/CpuTensorHandle.hpp" +#include "backends/WorkloadFactory.hpp" + +template +LayerTestResult SimpleReshapeTestImpl( + armnn::IWorkloadFactory& workloadFactory, + armnn::TensorInfo inputTensorInfo, + armnn::TensorInfo outputTensorInfo, + const std::vector& inputData, + const std::vector& outputExpectedData) +{ + auto input = MakeTensor(inputTensorInfo, inputData); + + LayerTestResult ret(outputTensorInfo); + ret.outputExpected = MakeTensor(outputTensorInfo, outputExpectedData); + + std::unique_ptr inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo); + std::unique_ptr outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo); + + armnn::ReshapeQueueDescriptor data; + armnn::WorkloadInfo info; + AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get()); + AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get()); + + std::unique_ptr workload = workloadFactory.CreateReshape(data, info); + + inputHandle->Allocate(); + outputHandle->Allocate(); + + CopyDataToITensorHandle(inputHandle.get(), &input[0][0][0][0]); + + workload->Execute(); + + CopyDataFromITensorHandle(&ret.output[0][0][0][0], outputHandle.get()); + + return ret; +} + +LayerTestResult SimpleReshapeFloat32Test(armnn::IWorkloadFactory& workloadFactory) +{ + armnn::TensorInfo inputTensorInfo; + armnn::TensorInfo outputTensorInfo; + + unsigned int inputShape[] = { 2, 2, 3, 3 }; + unsigned int outputShape[] = { 2, 2, 9, 1 }; + + inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::Float32); + outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::Float32); + + std::vector input = std::vector( + { + 0.0f, 1.0f, 2.0f, + 3.0f, 4.0f, 5.0f, + 6.0f, 7.0f, 8.0f, + + 9.0f, 10.0f, 11.0f, + 12.0f, 13.0f, 14.0f, + 15.0f, 16.0f, 17.0f, + + 18.0f, 19.0f, 20.0f, + 21.0f, 22.0f, 23.0f, + 24.0f, 25.0f, 26.0f, + + 27.0f, 28.0f, 29.0f, + 30.0f, 31.0f, 32.0f, + 33.0f, 34.0f, 35.0f, + }); + + std::vector outputExpected = std::vector( + { + 0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, + + 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f, 16.0f, 17.0f, + + 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f, 25.0f, 26.0f, + + 27.0f, 28.0f, 29.0f, 30.0f, 31.0f, 32.0f, 33.0f, 34.0f, 35.0f, + }); + + return SimpleReshapeTestImpl(workloadFactory, inputTensorInfo, outputTensorInfo, input, outputExpected); +} + +LayerTestResult SimpleFloorTest(armnn::IWorkloadFactory& workloadFactory) +{ + const armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, armnn::DataType::Float32); + const armnn::TensorInfo outputTensorInfo(inputTensorInfo); + + auto input = MakeTensor(inputTensorInfo, + { -37.5f, -15.2f, -8.76f, -2.0f, -1.5f, -1.3f, -0.5f, -0.4f, 0.0f, + 1.0f, 0.4f, 0.5f, 1.3f, 1.5f, 2.0f, 8.76f, 15.2f, 37.5f }); + + LayerTestResult ret(outputTensorInfo); + ret.outputExpected = MakeTensor(outputTensorInfo, + { -38.0f, -16.0f, -9.0f, -2.0f, -2.0f, -2.0f, -1.0f, -1.0f, 0.0f, + 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 2.0f, 8.0f, 15.0f, 37.0f }); + + std::unique_ptr inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo); + std::unique_ptr outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo); + + armnn::FloorQueueDescriptor data; + armnn::WorkloadInfo info; + AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get()); + AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get()); + + std::unique_ptr workload = workloadFactory.CreateFloor(data, info); + + inputHandle->Allocate(); + outputHandle->Allocate(); + + CopyDataToITensorHandle(inputHandle.get(), &input[0][0][0][0]); + + workload->Execute(); + + CopyDataFromITensorHandle(&ret.output[0][0][0][0], outputHandle.get()); + + return ret; +} + +LayerTestResult SimpleReshapeUint8Test(armnn::IWorkloadFactory& workloadFactory) +{ + armnn::TensorInfo inputTensorInfo; + armnn::TensorInfo outputTensorInfo; + + unsigned int inputShape[] = { 2, 2, 3, 3 }; + unsigned int outputShape[] = { 2, 2, 9, 1 }; + + inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::QuantisedAsymm8); + inputTensorInfo.SetQuantizationScale(1.0f); + outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::QuantisedAsymm8); + outputTensorInfo.SetQuantizationScale(1.0f); + + std::vector input = std::vector( + { + 0, 1, 2, + 3, 4, 5, + 6, 7, 8, + + 9, 10, 11, + 12, 13, 14, + 15, 16, 17, + + 18, 19, 20, + 21, 22, 23, + 24, 25, 26, + + 27, 28, 29, + 30, 31, 32, + 33, 34, 35, + }); + + std::vector outputExpected = std::vector( + { + 0, 1, 2, 3, 4, 5, 6, 7, 8, + + 9, 10, 11, 12, 13, 14, 15, 16, 17, + + 18, 19, 20, 21, 22, 23, 24, 25, 26, + + 27, 28, 29, 30, 31, 32, 33, 34, 35, + }); + + return SimpleReshapeTestImpl(workloadFactory, inputTensorInfo, outputTensorInfo, input, outputExpected); +} -- cgit v1.2.1