aboutsummaryrefslogtreecommitdiff
path: root/tests/InferenceModel.hpp
diff options
context:
space:
mode:
authorsurmeh01 <surabhi.mehta@arm.com>2018-05-18 16:31:43 +0100
committertelsoa01 <telmo.soares@arm.com>2018-05-23 13:09:07 +0100
commit3537c2ca7ebf31c1673b9ec2bb0c17b0406bbae0 (patch)
tree5950603ad78ec3fe56fb31ddc7f4d52a19f5bc60 /tests/InferenceModel.hpp
parentbceff2fb3fc68bb0aa88b886900c34b77340c826 (diff)
downloadarmnn-3537c2ca7ebf31c1673b9ec2bb0c17b0406bbae0.tar.gz
Release 18.05
Diffstat (limited to 'tests/InferenceModel.hpp')
-rw-r--r--tests/InferenceModel.hpp62
1 files changed, 50 insertions, 12 deletions
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index c390ccdc2f..f5f00378ca 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -3,15 +3,19 @@
// See LICENSE file in the project root for full license information.
//
#pragma once
-
#include "armnn/ArmNN.hpp"
+#include "HeapProfiling.hpp"
+#include <boost/exception/exception.hpp>
+#include <boost/exception/diagnostic_information.hpp>
#include <boost/log/trivial.hpp>
#include <boost/format.hpp>
#include <boost/program_options.hpp>
+#include <boost/filesystem.hpp>
#include <map>
#include <string>
+#include <fstream>
template<typename TContainer>
inline armnn::InputTensors MakeInputTensors(const std::pair<armnn::LayerBindingId, armnn::TensorInfo>& input,
@@ -19,8 +23,16 @@ inline armnn::InputTensors MakeInputTensors(const std::pair<armnn::LayerBindingI
{
if (inputTensorData.size() != input.second.GetNumElements())
{
- throw armnn::Exception(boost::str(boost::format("Input tensor has incorrect size. Expected %1% elements "
- "but got %2%.") % input.second.GetNumElements() % inputTensorData.size()));
+ try
+ {
+ throw armnn::Exception(boost::str(boost::format("Input tensor has incorrect size. Expected %1% elements "
+ "but got %2%.") % input.second.GetNumElements() % inputTensorData.size()));
+ } catch (const boost::exception& e)
+ {
+ // Coverity fix: it should not be possible to get here but boost::str and boost::format can both
+ // throw uncaught exceptions - convert them to armnn exceptions and rethrow
+ throw armnn::Exception(diagnostic_information(e));
+ }
}
return { { input.first, armnn::ConstTensor(input.second, inputTensorData.data()) } };
}
@@ -46,6 +58,7 @@ public:
{
std::string m_ModelDir;
armnn::Compute m_ComputeDevice;
+ bool m_VisualizePostOptimizationModel;
};
static void AddCommandLineOptions(boost::program_options::options_description& desc, CommandLineOptions& options)
@@ -56,7 +69,11 @@ public:
("model-dir,m", po::value<std::string>(&options.m_ModelDir)->required(),
"Path to directory containing model files (.caffemodel/.prototxt)")
("compute,c", po::value<armnn::Compute>(&options.m_ComputeDevice)->default_value(armnn::Compute::CpuAcc),
- "Which device to run layers on by default. Possible choices: CpuAcc, CpuRef, GpuAcc");
+ "Which device to run layers on by default. Possible choices: CpuAcc, CpuRef, GpuAcc")
+ ("visualize-optimized-model,v",
+ po::value<bool>(&options.m_VisualizePostOptimizationModel)->default_value(false),
+ "Produce a dot file useful for visualizing the graph post optimization."
+ "The file will have the same name as the model with the .dot extention.");
}
struct Params
@@ -67,11 +84,13 @@ public:
const armnn::TensorShape* m_InputTensorShape;
armnn::Compute m_ComputeDevice;
bool m_IsModelBinary;
+ bool m_VisualizePostOptimizationModel;
Params()
: m_InputTensorShape(nullptr)
, m_ComputeDevice(armnn::Compute::CpuRef)
, m_IsModelBinary(true)
+ , m_VisualizePostOptimizationModel(false)
{
}
};
@@ -92,19 +111,38 @@ public:
}
std::vector<std::string> requestedOutputs{ params.m_OutputBinding };
- // Handle text and binary input differently by calling the corresponding parser function
- armnn::INetworkPtr network = (params.m_IsModelBinary ?
- parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
- parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
+ armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
+ {
+ ARMNN_SCOPED_HEAP_PROFILING("Parsing");
+ // Handle text and binary input differently by calling the corresponding parser function
+ network = (params.m_IsModelBinary ?
+ parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
+ parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
+ }
m_InputBindingInfo = parser->GetNetworkInputBindingInfo(params.m_InputBinding);
m_OutputBindingInfo = parser->GetNetworkOutputBindingInfo(params.m_OutputBinding);
- armnn::IOptimizedNetworkPtr optNet =
- armnn::Optimize(*network, m_Runtime->GetDeviceSpec());
+ armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork *){}};
+ {
+ ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
+ optNet = armnn::Optimize(*network, m_Runtime->GetDeviceSpec());
+ }
+
+ if (params.m_VisualizePostOptimizationModel)
+ {
+ boost::filesystem::path filename = params.m_ModelPath;
+ filename.replace_extension("dot");
+ std::fstream file(filename.c_str(),file.out);
+ optNet->SerializeToDot(file);
+ }
+
+ armnn::Status ret;
+ {
+ ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
+ ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet));
+ }
- // Load the network into the runtime.
- armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet));
if (ret == armnn::Status::Failure)
{
throw armnn::Exception("IRuntime::LoadNetwork failed");