diff options
author | Aron Virginas-Tar <Aron.Virginas-Tar@arm.com> | 2018-11-19 10:58:30 +0000 |
---|---|---|
committer | Aron Virginas-Tar <aron.virginas-tar@arm.com> | 2018-11-19 12:39:33 +0000 |
commit | 1d67a6905daed13354e66f00549e12fea62170ed (patch) | |
tree | 778a2b1f9d8330af8725f7f728a0b3ec435b0511 /src/armnnUtils | |
parent | 347dfcf1b2cb058cdc31f4fa670dbe55040adc3b (diff) | |
download | armnn-1d67a6905daed13354e66f00549e12fea62170ed.tar.gz |
IVGCVSW-2167: Run parser unit tests on the reference backend only
Change-Id: Ib11c3d36c7109198da7266955414580e8fb916b5
Diffstat (limited to 'src/armnnUtils')
-rw-r--r-- | src/armnnUtils/ParserPrototxtFixture.hpp | 134 |
1 files changed, 58 insertions, 76 deletions
diff --git a/src/armnnUtils/ParserPrototxtFixture.hpp b/src/armnnUtils/ParserPrototxtFixture.hpp index b10590342e..669b1fd0ca 100644 --- a/src/armnnUtils/ParserPrototxtFixture.hpp +++ b/src/armnnUtils/ParserPrototxtFixture.hpp @@ -26,16 +26,9 @@ struct ParserPrototxtFixture { ParserPrototxtFixture() : m_Parser(TParser::Create()) + , m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions())) , m_NetworkIdentifier(-1) { - armnn::IRuntime::CreationOptions options; - - // Create runtimes for each available backend - const armnn::BackendIdSet availableBackendIds = armnn::BackendRegistryInstance().GetBackendIds(); - for (auto& backendId : availableBackendIds) - { - m_Runtimes.push_back(std::make_pair(armnn::IRuntime::Create(options), backendId)); - } } /// Parses and loads the network defined by the m_Prototext string. @@ -62,7 +55,7 @@ struct ParserPrototxtFixture std::string m_Prototext; std::unique_ptr<TParser, void(*)(TParser* parser)> m_Parser; - std::vector<std::pair<armnn::IRuntimePtr, armnn::BackendId>> m_Runtimes; + armnn::IRuntimePtr m_Runtime; armnn::NetworkId m_NetworkIdentifier; /// If the single-input-single-output overload of Setup() is called, these will store the input and output name @@ -98,44 +91,36 @@ template<typename TParser> void ParserPrototxtFixture<TParser>::Setup(const std::map<std::string, armnn::TensorShape>& inputShapes, const std::vector<std::string>& requestedOutputs) { - for (auto&& runtime : m_Runtimes) + std::string errorMessage; + + armnn::INetworkPtr network = + m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs); + auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec()); + armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage); + if (ret != armnn::Status::Success) { - std::string errorMessage; - - armnn::INetworkPtr network = - m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs); - auto optimized = Optimize(*network, - { runtime.second, armnn::Compute::CpuRef }, runtime.first->GetDeviceSpec()); - armnn::Status ret = runtime.first->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage); - if (ret != armnn::Status::Success) - { - throw armnn::Exception(boost::str( - boost::format("LoadNetwork failed with error: '%1%' %2%") - % errorMessage - % CHECK_LOCATION().AsString())); - } + throw armnn::Exception(boost::str( + boost::format("LoadNetwork failed with error: '%1%' %2%") + % errorMessage + % CHECK_LOCATION().AsString())); } } template<typename TParser> void ParserPrototxtFixture<TParser>::Setup() { - for (auto&& runtime : m_Runtimes) + std::string errorMessage; + + armnn::INetworkPtr network = + m_Parser->CreateNetworkFromString(m_Prototext.c_str()); + auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec()); + armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage); + if (ret != armnn::Status::Success) { - std::string errorMessage; - - armnn::INetworkPtr network = - m_Parser->CreateNetworkFromString(m_Prototext.c_str()); - auto optimized = Optimize(*network, - { runtime.second, armnn::Compute::CpuRef }, runtime.first->GetDeviceSpec()); - armnn::Status ret = runtime.first->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage); - if (ret != armnn::Status::Success) - { - throw armnn::Exception(boost::str( - boost::format("LoadNetwork failed with error: '%1%' %2%") - % errorMessage - % CHECK_LOCATION().AsString())); - } + throw armnn::Exception(boost::str( + boost::format("LoadNetwork failed with error: '%1%' %2%") + % errorMessage + % CHECK_LOCATION().AsString())); } } @@ -152,49 +137,46 @@ template <std::size_t NumOutputDimensions> void ParserPrototxtFixture<TParser>::RunTest(const std::map<std::string, std::vector<float>>& inputData, const std::map<std::string, std::vector<float>>& expectedOutputData) { - for (auto&& runtime : m_Runtimes) - { - using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>; + using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>; - // Sets up the armnn input tensors from the given vectors. - armnn::InputTensors inputTensors; - for (auto&& it : inputData) - { - BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(it.first); - inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) }); - } + // Sets up the armnn input tensors from the given vectors. + armnn::InputTensors inputTensors; + for (auto&& it : inputData) + { + BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(it.first); + inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) }); + } - // Allocates storage for the output tensors to be written to and sets up the armnn output tensors. - std::map<std::string, boost::multi_array<float, NumOutputDimensions>> outputStorage; - armnn::OutputTensors outputTensors; - for (auto&& it : expectedOutputData) - { - BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first); - outputStorage.emplace(it.first, MakeTensor<float, NumOutputDimensions>(bindingInfo.second)); - outputTensors.push_back( - { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) }); - } + // Allocates storage for the output tensors to be written to and sets up the armnn output tensors. + std::map<std::string, boost::multi_array<float, NumOutputDimensions>> outputStorage; + armnn::OutputTensors outputTensors; + for (auto&& it : expectedOutputData) + { + BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first); + outputStorage.emplace(it.first, MakeTensor<float, NumOutputDimensions>(bindingInfo.second)); + outputTensors.push_back( + { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) }); + } - runtime.first->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors); + m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors); - // Compares each output tensor to the expected values. - for (auto&& it : expectedOutputData) + // Compares each output tensor to the expected values. + for (auto&& it : expectedOutputData) + { + BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first); + if (bindingInfo.second.GetNumElements() != it.second.size()) { - BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first); - if (bindingInfo.second.GetNumElements() != it.second.size()) - { - throw armnn::Exception( - boost::str( - boost::format("Output tensor %1% is expected to have %2% elements. " - "%3% elements supplied. %4%") % - it.first % - bindingInfo.second.GetNumElements() % - it.second.size() % - CHECK_LOCATION().AsString())); - } - auto outputExpected = MakeTensor<float, NumOutputDimensions>(bindingInfo.second, it.second); - BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first])); + throw armnn::Exception( + boost::str( + boost::format("Output tensor %1% is expected to have %2% elements. " + "%3% elements supplied. %4%") % + it.first % + bindingInfo.second.GetNumElements() % + it.second.size() % + CHECK_LOCATION().AsString())); } + auto outputExpected = MakeTensor<float, NumOutputDimensions>(bindingInfo.second, it.second); + BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first])); } } |