diff options
author | Matteo Martincigh <matteo.martincigh@arm.com> | 2019-10-28 10:48:05 +0000 |
---|---|---|
committer | Matteo Martincigh <matteo.martincigh@arm.com> | 2019-10-28 14:30:16 +0000 |
commit | d6f26fc022defe1f6220bb6e2874fefc9a4c0cba (patch) | |
tree | 86a32e208eee33ae34bab977b7ad7d4964c181ee /tests | |
parent | 49e7d1b09b47fa40237c4696f6c590cbebf19a7e (diff) | |
download | armnn-d6f26fc022defe1f6220bb6e2874fefc9a4c0cba.tar.gz |
Fix ExecuteNetwork no longer printing out the inference results,
breaking some of the nightly builds
* The TensorPrinter must be called regardless of the value of
the output tensor files, it will automatically handle an empty
file path by only printing the results to the console
* Code refactoring
Signed-off-by: Matteo Martincigh <matteo.martincigh@arm.com>
Change-Id: I548ec7cf6d51badf78643c9a6c1c56ea9200142b
Diffstat (limited to 'tests')
-rw-r--r-- | tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp | 26 |
1 files changed, 11 insertions, 15 deletions
diff --git a/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp b/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp index 004e9fbdb2..2556a104b5 100644 --- a/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp +++ b/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp @@ -471,24 +471,20 @@ int MainImpl(const ExecuteNetworkParams& params, // model.Run returns the inference time elapsed in EnqueueWorkload (in milliseconds) auto inference_duration = model.Run(inputDataContainers, outputDataContainers); - // Print output tensors (if requested) - if (!params.m_OutputTensorFiles.empty()) + if (params.m_GenerateTensorData) { - if (params.m_GenerateTensorData) - { - BOOST_LOG_TRIVIAL(warning) << "Requested to write output to file, although the input was generated. " - << "Note that the output will not be useful."; - } + BOOST_LOG_TRIVIAL(warning) << "The input data was generated, note that the output will not be useful"; + } - const auto& infosOut = model.GetOutputBindingInfos(); - for (size_t i = 0; i < numOutputs; i++) - { - const armnn::TensorInfo& infoOut = infosOut[i].second; - auto outputTensorFile = params.m_OutputTensorFiles[i]; + // Print output tensors + const auto& infosOut = model.GetOutputBindingInfos(); + for (size_t i = 0; i < numOutputs; i++) + { + const armnn::TensorInfo& infoOut = infosOut[i].second; + auto outputTensorFile = params.m_OutputTensorFiles.empty() ? "" : params.m_OutputTensorFiles[i]; - TensorPrinter printer(inferenceModelParams.m_OutputBindings[i], infoOut, outputTensorFile); - boost::apply_visitor(printer, outputDataContainers[i]); - } + TensorPrinter printer(inferenceModelParams.m_OutputBindings[i], infoOut, outputTensorFile); + boost::apply_visitor(printer, outputDataContainers[i]); } BOOST_LOG_TRIVIAL(info) << "\nInference time: " << std::setprecision(2) |