From 4514228a5ced96fe79bbe1a089f4006f53fd5f29 Mon Sep 17 00:00:00 2001 From: Colm Donelan Date: Thu, 21 Oct 2021 23:39:52 +0100 Subject: IVGCVSW-5879 Fix problems with using internal profiling from delegate. * Pass through the value of m_EnableProfiling from Executenetwork to DelegateOptions. * If internal profiling is enabled print it out from inside the delegate. * Remove an unnecessary ProfilerImpl instance from WorkingMemhandle.hpp * Remove an unnecessary parameter from TfLiteDelegateMainImpl in ExecuteNetwork. Signed-off-by: Colm Donelan Change-Id: Ia1d4b1eb3a05ca5b4d80cc39e138c7fac182d948 --- tests/ExecuteNetwork/ExecuteNetwork.cpp | 5 ++--- tests/ExecuteNetwork/ExecuteNetworkParams.cpp | 3 ++- 2 files changed, 4 insertions(+), 4 deletions(-) (limited to 'tests') diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index 66be8fd02a..db15872ad6 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -68,8 +68,7 @@ bool CheckInferenceTimeThreshold(const std::chrono::duration } #if defined(ARMNN_TFLITE_DELEGATE) -int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params, const armnn::IRuntime::CreationOptions runtimeOptions, - const std::shared_ptr& runtime = nullptr) +int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params, const armnn::IRuntime::CreationOptions runtimeOptions) { using namespace tflite; @@ -867,7 +866,7 @@ int main(int argc, const char* argv[]) ExecuteNetworkParams::TfLiteExecutor::TfliteInterpreter) { #if defined(ARMNN_TF_LITE_DELEGATE) - return TfLiteDelegateMainImpl(ProgramOptions.m_ExNetParams, ProgramOptions.m_RuntimeOptions, runtime); + return TfLiteDelegateMainImpl(ProgramOptions.m_ExNetParams, ProgramOptions.m_RuntimeOptions); #else ARMNN_LOG(fatal) << "Not built with Arm NN Tensorflow-Lite delegate support."; return EXIT_FAILURE; diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.cpp b/tests/ExecuteNetwork/ExecuteNetworkParams.cpp index 541430c421..b3d18cdfd1 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkParams.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkParams.cpp @@ -250,7 +250,8 @@ armnnDelegate::DelegateOptions ExecuteNetworkParams::ToDelegateOptions() const options.m_ReduceFp32ToFp16 = m_EnableFp16TurboMode; options.m_ReduceFp32ToBf16 = m_EnableBf16TurboMode; options.m_Debug = m_PrintIntermediate; - + options.m_ProfilingEnabled = m_EnableProfiling; + delegateOptions.SetInternalProfilingParams(m_EnableProfiling, armnn::ProfilingDetailsMethod::DetailsWithEvents); options.m_shapeInferenceMethod = armnn::ShapeInferenceMethod::ValidateOnly; if (m_InferOutputShape) { -- cgit v1.2.1