From 9066d3c46c434406d9e08b397a14e9ef2eb2cf01 Mon Sep 17 00:00:00 2001 From: Nattapat Chaimanowong Date: Wed, 27 Feb 2019 17:27:16 +0000 Subject: IVGCVSW-2765 Modify ParserFlatBuffersSerilizeFixture to accept different output type Change-Id: Ia61e25e7375f7c58db6b0cf6389af151ac004f4c Signed-off-by: Nattapat Chaimanowong --- .../test/ParserFlatbuffersSerializeFixture.hpp | 79 ++++++++++++++++------ 1 file changed, 59 insertions(+), 20 deletions(-) diff --git a/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp b/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp index 8db9d5684b..7baa010840 100644 --- a/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp +++ b/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp @@ -116,22 +116,40 @@ struct ParserFlatbuffersSerializeFixture /// Executes the network with the given input tensor and checks the result against the given output tensor. /// This overload assumes the network has a single input and a single output. - template > + template> void RunTest(unsigned int layersId, const std::vector& inputData, const std::vector& expectedOutputData); + template, + typename OutputDataType = armnn::ResolveType> + void RunTest(unsigned int layersId, + const std::vector& inputData, + const std::vector& expectedOutputData); + /// Executes the network with the given input tensors and checks the results against the given output tensors. /// This overload supports multiple inputs and multiple outputs, identified by name. - template > + template> void RunTest(unsigned int layersId, const std::map>& inputData, const std::map>& expectedOutputData); + template, + typename OutputDataType = armnn::ResolveType> + void RunTest(unsigned int layersId, + const std::map>& inputData, + const std::map>& expectedOutputData); + void CheckTensors(const TensorRawPtr& tensors, size_t shapeSize, const std::vector& shape, armnnSerializer::TensorInfo tensorType, const std::string& name, const float scale, const int64_t zeroPoint) @@ -145,24 +163,45 @@ struct ParserFlatbuffersSerializeFixture } }; -template +template void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId, const std::vector& inputData, const std::vector& expectedOutputData) { - RunTest(layersId, - { { m_SingleInputName, inputData } }, - { { m_SingleOutputName, expectedOutputData } }); + RunTest(layersId, inputData, expectedOutputData); } -template +template +void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId, + const std::vector& inputData, + const std::vector& expectedOutputData) +{ + RunTest(layersId, + { { m_SingleInputName, inputData } }, + { { m_SingleOutputName, expectedOutputData } }); +} + +template void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId, const std::map>& inputData, const std::map>& expectedOutputData) +{ + RunTest(layersId, inputData, expectedOutputData); +} + +template +void ParserFlatbuffersSerializeFixture::RunTest( + unsigned int layersId, + const std::map>& inputData, + const std::map>& expectedOutputData) { using BindingPointInfo = std::pair; @@ -176,18 +215,18 @@ void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId, for (auto&& it : inputData) { BindingPointInfo bindingInfo = ConvertBindingInfo(m_Parser->GetNetworkInputBindingInfo(layersId, it.first)); - armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnType); + armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnInputType); inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) }); } // Allocate storage for the output tensors to be written to and setup the armnn output tensors. - std::map> outputStorage; + std::map> outputStorage; armnn::OutputTensors outputTensors; for (auto&& it : expectedOutputData) { BindingPointInfo bindingInfo = ConvertBindingInfo(m_Parser->GetNetworkOutputBindingInfo(layersId, it.first)); - armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnType); - outputStorage.emplace(it.first, MakeTensor(bindingInfo.second)); + armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnOutputType); + outputStorage.emplace(it.first, MakeTensor(bindingInfo.second)); outputTensors.push_back( { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) }); } @@ -198,7 +237,7 @@ void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId, for (auto&& it : expectedOutputData) { BindingPointInfo bindingInfo = ConvertBindingInfo(m_Parser->GetNetworkOutputBindingInfo(layersId, it.first)); - auto outputExpected = MakeTensor(bindingInfo.second, it.second); + auto outputExpected = MakeTensor(bindingInfo.second, it.second); BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first])); } } -- cgit v1.2.1