From 483c811ea6fd0e7801aac1afd979ed02a649064b Mon Sep 17 00:00:00 2001 From: Sadik Armagan Date: Tue, 1 Jun 2021 09:24:52 +0100 Subject: IVGCVSW-5962 Remove boost::multi_array * Replaced all instances of boost::multi_array with flat vectors. * Updated LayerTestResult struct with new member variables. * Updated CompareTensor function to compare flat vectors and the shape. * Removed MakeTensor function from TensorHelpers.hpp. * Removed GetTensorShapeAsArray function from LayerTestResult.hpp. * Removed boost::array usage. * Removed boost::extents usages. * Removed boost::random usages. Signed-off-by: Matthew Sloyan Signed-off-by: Sadik Armagan Change-Id: Iccde9d6640b534940292ff048fb80c00b38c4743 --- .../test/ParserFlatbuffersFixture.hpp | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) (limited to 'src/armnnTfLiteParser/test') diff --git a/src/armnnTfLiteParser/test/ParserFlatbuffersFixture.hpp b/src/armnnTfLiteParser/test/ParserFlatbuffersFixture.hpp index f333ac0d40..196af190fd 100644 --- a/src/armnnTfLiteParser/test/ParserFlatbuffersFixture.hpp +++ b/src/armnnTfLiteParser/test/ParserFlatbuffersFixture.hpp @@ -293,7 +293,7 @@ void ParserFlatbuffersFixture::RunTest(size_t subgraphId, FillInputTensors(inputTensors, inputData, subgraphId); // Allocate storage for the output tensors to be written to and setup the armnn output tensors. - std::map> outputStorage; + std::map> outputStorage; armnn::OutputTensors outputTensors; for (auto&& it : expectedOutputData) { @@ -309,7 +309,7 @@ void ParserFlatbuffersFixture::RunTest(size_t subgraphId, it.first)); armnn::VerifyTensorInfoDataType(outputTensorInfo, armnnType2); - outputStorage.emplace(it.first, MakeTensor(outputTensorInfo)); + outputStorage.emplace(it.first, std::vector(outputTensorInfo.GetNumElements())); outputTensors.push_back( { outputBindingId, armnn::Tensor(outputTensorInfo, outputStorage.at(it.first).data()) }); } @@ -320,8 +320,10 @@ void ParserFlatbuffersFixture::RunTest(size_t subgraphId, for (auto&& it : expectedOutputData) { armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first); - auto outputExpected = MakeTensor(bindingInfo.second, it.second, isDynamic); - auto result = CompareTensors(outputExpected, outputStorage[it.first], false, isDynamic); + auto outputExpected = it.second; + auto result = CompareTensors(outputExpected, outputStorage[it.first], + bindingInfo.second.GetShape(), bindingInfo.second.GetShape(), + false, isDynamic); BOOST_TEST(result.m_Result, result.m_Message.str()); } } @@ -393,7 +395,7 @@ void ParserFlatbuffersFixture::RunTest(size_t subgraphId, FillInputTensors(inputTensors, input2Data, subgraphId); // Allocate storage for the output tensors to be written to and setup the armnn output tensors. - std::map> outputStorage; + std::map> outputStorage; armnn::OutputTensors outputTensors; for (auto&& it : expectedOutputData) { @@ -409,7 +411,7 @@ void ParserFlatbuffersFixture::RunTest(size_t subgraphId, it.first)); armnn::VerifyTensorInfoDataType(outputTensorInfo, outputType); - outputStorage.emplace(it.first, MakeTensor(outputTensorInfo)); + outputStorage.emplace(it.first, std::vector(outputTensorInfo.GetNumElements())); outputTensors.push_back( { outputBindingId, armnn::Tensor(outputTensorInfo, outputStorage.at(it.first).data()) }); } @@ -420,8 +422,9 @@ void ParserFlatbuffersFixture::RunTest(size_t subgraphId, for (auto&& it : expectedOutputData) { armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first); - auto outputExpected = MakeTensor(bindingInfo.second, it.second); - auto result = CompareTensors(outputExpected, outputStorage[it.first], false); + auto outputExpected = it.second; + auto result = CompareTensors(outputExpected, outputStorage[it.first], + bindingInfo.second.GetShape(), bindingInfo.second.GetShape(), false); BOOST_TEST(result.m_Result, result.m_Message.str()); } } \ No newline at end of file -- cgit v1.2.1