From 4514228a5ced96fe79bbe1a089f4006f53fd5f29 Mon Sep 17 00:00:00 2001 From: Colm Donelan Date: Thu, 21 Oct 2021 23:39:52 +0100 Subject: IVGCVSW-5879 Fix problems with using internal profiling from delegate. * Pass through the value of m_EnableProfiling from Executenetwork to DelegateOptions. * If internal profiling is enabled print it out from inside the delegate. * Remove an unnecessary ProfilerImpl instance from WorkingMemhandle.hpp * Remove an unnecessary parameter from TfLiteDelegateMainImpl in ExecuteNetwork. Signed-off-by: Colm Donelan Change-Id: Ia1d4b1eb3a05ca5b4d80cc39e138c7fac182d948 --- delegate/src/armnn_delegate.cpp | 8 +++++++- src/armnn/WorkingMemHandle.hpp | 1 - tests/ExecuteNetwork/ExecuteNetwork.cpp | 5 ++--- tests/ExecuteNetwork/ExecuteNetworkParams.cpp | 3 ++- 4 files changed, 11 insertions(+), 6 deletions(-) diff --git a/delegate/src/armnn_delegate.cpp b/delegate/src/armnn_delegate.cpp index 5fbc920a1e..e029e2c420 100644 --- a/delegate/src/armnn_delegate.cpp +++ b/delegate/src/armnn_delegate.cpp @@ -397,7 +397,7 @@ ArmnnSubgraph* ArmnnSubgraph::Create(TfLiteContext* tfLiteContext, networkProperties); if (loadingStatus != armnn::Status::Success) { - // Optimize failed + // Network load failed. throw armnn::Exception("TfLiteArmnnDelegate: Network could not be loaded:" + errorMessage); } } @@ -457,6 +457,12 @@ TfLiteStatus ArmnnSubgraph::Invoke(TfLiteContext* tfLiteContext, TfLiteNode* tfL // Run graph auto status = m_Runtime->EnqueueWorkload(m_NetworkId, inputTensors, outputTensors); + // The delegate holds its own Arm NN runtime so this is our last chance to print internal profiling data. + std::shared_ptr profiler = m_Runtime->GetProfiler(m_NetworkId); + if (profiler && profiler->IsProfilingEnabled()) + { + profiler->Print(std::cout); + } return (status == armnn::Status::Success) ? kTfLiteOk : kTfLiteError; } diff --git a/src/armnn/WorkingMemHandle.hpp b/src/armnn/WorkingMemHandle.hpp index aaa9d593ee..9078a8d54c 100644 --- a/src/armnn/WorkingMemHandle.hpp +++ b/src/armnn/WorkingMemHandle.hpp @@ -119,7 +119,6 @@ public: private: using DifferenceType = std::vector::difference_type; NetworkId m_NetworkId; - std::shared_ptr m_Profiler; std::unordered_map m_InputHandleMap; std::unordered_map m_OutputHandleMap; diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index 66be8fd02a..db15872ad6 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -68,8 +68,7 @@ bool CheckInferenceTimeThreshold(const std::chrono::duration } #if defined(ARMNN_TFLITE_DELEGATE) -int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params, const armnn::IRuntime::CreationOptions runtimeOptions, - const std::shared_ptr& runtime = nullptr) +int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params, const armnn::IRuntime::CreationOptions runtimeOptions) { using namespace tflite; @@ -867,7 +866,7 @@ int main(int argc, const char* argv[]) ExecuteNetworkParams::TfLiteExecutor::TfliteInterpreter) { #if defined(ARMNN_TF_LITE_DELEGATE) - return TfLiteDelegateMainImpl(ProgramOptions.m_ExNetParams, ProgramOptions.m_RuntimeOptions, runtime); + return TfLiteDelegateMainImpl(ProgramOptions.m_ExNetParams, ProgramOptions.m_RuntimeOptions); #else ARMNN_LOG(fatal) << "Not built with Arm NN Tensorflow-Lite delegate support."; return EXIT_FAILURE; diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.cpp b/tests/ExecuteNetwork/ExecuteNetworkParams.cpp index 541430c421..b3d18cdfd1 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkParams.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkParams.cpp @@ -250,7 +250,8 @@ armnnDelegate::DelegateOptions ExecuteNetworkParams::ToDelegateOptions() const options.m_ReduceFp32ToFp16 = m_EnableFp16TurboMode; options.m_ReduceFp32ToBf16 = m_EnableBf16TurboMode; options.m_Debug = m_PrintIntermediate; - + options.m_ProfilingEnabled = m_EnableProfiling; + delegateOptions.SetInternalProfilingParams(m_EnableProfiling, armnn::ProfilingDetailsMethod::DetailsWithEvents); options.m_shapeInferenceMethod = armnn::ShapeInferenceMethod::ValidateOnly; if (m_InferOutputShape) { -- cgit v1.2.1