diff options
author | Sadik Armagan <sadik.armagan@arm.com> | 2021-06-01 09:24:52 +0100 |
---|---|---|
committer | Sadik Armagan <sadik.armagan@arm.com> | 2021-06-02 13:00:56 +0000 |
commit | 483c811ea6fd0e7801aac1afd979ed02a649064b (patch) | |
tree | a0969c8786528334b62043b40983fa21d54d524e /src/armnnDeserializer | |
parent | 31f86bfeb311ccc0c6ed94c35a78a51551148ea4 (diff) | |
download | armnn-483c811ea6fd0e7801aac1afd979ed02a649064b.tar.gz |
IVGCVSW-5962 Remove boost::multi_array
* Replaced all instances of boost::multi_array with flat vectors.
* Updated LayerTestResult struct with new member variables.
* Updated CompareTensor function to compare flat vectors and the shape.
* Removed MakeTensor function from TensorHelpers.hpp.
* Removed GetTensorShapeAsArray function from LayerTestResult.hpp.
* Removed boost::array usage.
* Removed boost::extents usages.
* Removed boost::random usages.
Signed-off-by: Matthew Sloyan <matthew.sloyan@arm.com>
Signed-off-by: Sadik Armagan <sadik.armagan@arm.com>
Change-Id: Iccde9d6640b534940292ff048fb80c00b38c4743
Diffstat (limited to 'src/armnnDeserializer')
-rw-r--r-- | src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp | 10 |
1 files changed, 6 insertions, 4 deletions
diff --git a/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp b/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp index 5f5ec1c5f4..a62cb96eb6 100644 --- a/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp +++ b/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp @@ -20,6 +20,7 @@ #include <fmt/format.h> +#include <vector> using armnnDeserializer::IDeserializer; using TensorRawPtr = armnnSerializer::TensorInfo*; @@ -218,14 +219,14 @@ void ParserFlatbuffersSerializeFixture::RunTest( } // Allocate storage for the output tensors to be written to and setup the armnn output tensors. - std::map<std::string, boost::multi_array<OutputDataType, NumOutputDimensions>> outputStorage; + std::map<std::string, std::vector<OutputDataType>> outputStorage; armnn::OutputTensors outputTensors; for (auto&& it : expectedOutputData) { armnn::BindingPointInfo bindingInfo = ConvertBindingInfo( m_Parser->GetNetworkOutputBindingInfo(layersId, it.first)); armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnOutputType); - outputStorage.emplace(it.first, MakeTensor<OutputDataType, NumOutputDimensions>(bindingInfo.second)); + outputStorage.emplace(it.first, std::vector<OutputDataType>(bindingInfo.second.GetNumElements())); outputTensors.push_back( { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) }); } @@ -237,8 +238,9 @@ void ParserFlatbuffersSerializeFixture::RunTest( { armnn::BindingPointInfo bindingInfo = ConvertBindingInfo( m_Parser->GetNetworkOutputBindingInfo(layersId, it.first)); - auto outputExpected = MakeTensor<OutputDataType, NumOutputDimensions>(bindingInfo.second, it.second); - auto result = CompareTensors(outputExpected, outputStorage[it.first]); + auto outputExpected = it.second; + auto result = CompareTensors(outputExpected, outputStorage[it.first], + bindingInfo.second.GetShape(), bindingInfo.second.GetShape()); BOOST_TEST(result.m_Result, result.m_Message.str()); } } |