aboutsummaryrefslogtreecommitdiff
path: root/Utils.hpp
diff options
context:
space:
mode:
authorMatteo Martincigh <matteo.martincigh@arm.com>2018-09-03 13:50:50 +0100
committerMatthew Bentham <matthew.bentham@arm.com>2018-09-18 12:40:38 +0100
commite48bdff741568236d3c0747ad3d18a8eba5b36dd (patch)
tree77aabce6f75d86d3f2f3924f342292ae5a7267e7 /Utils.hpp
parenta15dc11fd7bf3ad49e752ec75157b731287fe46d (diff)
downloadandroid-nn-driver-e48bdff741568236d3c0747ad3d18a8eba5b36dd.tar.gz
IVGCVSW-1806 Refactored Android-NN-Driver, added common "getCapabilities",
"getSupportedOperations" and "prepareModel" implementations * Added common base ArmnnDriverImpl class * Added common template implementation of the driver's "getCapabilities", "getSupportedOperations" and "prepareModel" methods * Refactored ArmnnPreparedModel and RequestThread to support HAL v1.1 models * Moved "getStatus" to the common base class, as it is shared by both HAL implementations * Refactored the code where necessary Change-Id: I747334730026d63b4002662523fb93608f67c899
Diffstat (limited to 'Utils.hpp')
-rw-r--r--Utils.hpp49
1 files changed, 45 insertions, 4 deletions
diff --git a/Utils.hpp b/Utils.hpp
index 5d9f7003..ac90a9ab 100644
--- a/Utils.hpp
+++ b/Utils.hpp
@@ -12,8 +12,13 @@
#include <armnn/ArmNN.hpp>
#include <CpuExecutor.h>
+#include <boost/format.hpp>
+#include <log/log.h>
+
#include <vector>
#include <string>
+#include <fstream>
+#include <iomanip>
namespace armnn_driver
{
@@ -44,8 +49,8 @@ armnn::TensorInfo GetTensorInfoForOperand(const Operand& operand);
std::string GetOperandSummary(const Operand& operand);
-template <typename Model>
-std::string GetModelSummary(const Model& model)
+template <typename HalModel>
+std::string GetModelSummary(const HalModel& model)
{
std::stringstream result;
@@ -86,8 +91,44 @@ void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled,
armnn::NetworkId networkId,
const armnn::IProfiler* profiler);
+template <typename HalModel>
void ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
const std::string& dumpDir,
- const ::android::hardware::neuralnetworks::V1_0::Model& model);
+ const HalModel& model)
+{
+ // The dump directory must exist in advance.
+ if (dumpDir.empty())
+ {
+ return;
+ }
+
+ // Get the memory address of the model and convert it to a hex string (of at least a '0' character).
+ size_t modelAddress = uintptr_t(&model);
+ std::stringstream ss;
+ ss << std::uppercase << std::hex << std::setfill('0') << std::setw(1) << modelAddress;
+ std::string modelAddressHexString = ss.str();
+
+ // Set the name of the output .dot file.
+ const std::string fileName = boost::str(boost::format("%1%/networkgraph_%2%.dot")
+ % dumpDir
+ % modelAddressHexString);
+
+ ALOGV("Exporting the optimized network graph to file: %s", fileName.c_str());
+
+ // Write the network graph to a dot file.
+ std::ofstream fileStream;
+ fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
-} \ No newline at end of file
+ if (!fileStream.good())
+ {
+ ALOGW("Could not open file %s for writing", fileName.c_str());
+ return;
+ }
+
+ if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
+ {
+ ALOGW("An error occurred when writing to file %s", fileName.c_str());
+ }
+}
+
+}