diff options
author | Colm Donelan <Colm.Donelan@arm.com> | 2021-10-04 22:55:37 +0100 |
---|---|---|
committer | Colm Donelan <colm.donelan@arm.com> | 2021-10-11 13:55:19 +0000 |
commit | 3e32a8700bf12d3b70d2824c12cdae907bde9360 (patch) | |
tree | 9b2ff65b2ccf4f14ee526b4e8e13bbf4e6f07c1b /delegate/src/armnn_delegate.cpp | |
parent | 1112b016e7ffad979b7bd0c8d54c9c679d4043e2 (diff) | |
download | armnn-3e32a8700bf12d3b70d2824c12cdae907bde9360.tar.gz |
IVGCVSW-5752 Add missing runtime parameters to TfLite delegate.
* Adding Runtime parameter: dynamic-backends-path
* Add profiling parameters:
gpu-enable-profiling,
enable-internal-profiling, internal-profiling-detail,
enable-external-profiling, timeline-profiling, outgoing-capture-file,
incoming-capture-file, file-only-external-profiling,
counter-capture-period, profiling-file-format
* Adding utility parameter "serialize-to-dot"
Signed-off-by: Colm Donelan <Colm.Donelan@arm.com>
Change-Id: Ibff4b9a85ff0f0da5d70e8aa0bb6cba96aaabbc3
Diffstat (limited to 'delegate/src/armnn_delegate.cpp')
-rw-r--r-- | delegate/src/armnn_delegate.cpp | 37 |
1 files changed, 24 insertions, 13 deletions
diff --git a/delegate/src/armnn_delegate.cpp b/delegate/src/armnn_delegate.cpp index f13bb5779c..2ede23c12b 100644 --- a/delegate/src/armnn_delegate.cpp +++ b/delegate/src/armnn_delegate.cpp @@ -39,6 +39,7 @@ #include "UnidirectionalSequenceLstm.hpp" #include "Unpack.hpp" +#include <armnnUtils/Filesystem.hpp> #include <flatbuffers/flatbuffers.h> #include <tensorflow/lite/context_util.h> @@ -137,6 +138,9 @@ Delegate::Delegate(armnnDelegate::DelegateOptions options) // Create ArmNN Runtime armnn::IRuntime::CreationOptions runtimeOptions; + runtimeOptions.m_DynamicBackendsPath = m_Options.GetDynamicBackendsPath(); + runtimeOptions.m_EnableGpuProfiling = m_Options.GetGpuProfilingState(); + runtimeOptions.m_ProfilingOptions = m_Options.GetExternalProfilingParams(); auto backendOptions = m_Options.GetBackendOptions(); if (!backendOptions.empty()) @@ -363,27 +367,34 @@ ArmnnSubgraph* ArmnnSubgraph::Create(TfLiteContext* tfLiteContext, throw armnn::Exception("TfLiteArmnnDelegate: Unable to optimize the network!"); } + // If set, we will serialize the optimized model into a dot file. + const std::string serializeToDotFile = delegate->m_Options.GetSerializeToDot(); + if (!serializeToDotFile.empty()) + { + fs::path filename = serializeToDotFile; + std::fstream file(filename.c_str(), std::ios_base::out); + optNet->SerializeToDot(file); + } + try { // Load graph into runtime std::string errorMessage; armnn::Status loadingStatus; + armnn::MemorySource memorySource = armnn::MemorySource::Undefined; if (delegate->m_Options.GetOptimizerOptions().m_ImportEnabled) { - armnn::INetworkProperties networkProperties(false, - armnn::MemorySource::Malloc, - armnn::MemorySource::Malloc); - loadingStatus = delegate->m_Runtime->LoadNetwork(networkId, - std::move(optNet), - errorMessage, - networkProperties); - } - else - { - loadingStatus = delegate->m_Runtime->LoadNetwork(networkId, - std::move(optNet), - errorMessage); + memorySource = armnn::MemorySource::Malloc; } + armnn::INetworkProperties networkProperties(false, + memorySource, + memorySource, + delegate->m_Options.GetInternalProfilingState(), + delegate->m_Options.GetInternalProfilingDetail()); + loadingStatus = delegate->m_Runtime->LoadNetwork(networkId, + std::move(optNet), + errorMessage, + networkProperties); if (loadingStatus != armnn::Status::Success) { // Optimize failed |