From 4f29f156599e0d1ebf5b9b5423450ba2ae3288b4 Mon Sep 17 00:00:00 2001 From: Matthew Sloyan Date: Mon, 18 Jan 2021 16:10:20 +0000 Subject: IVGCVSW-5484 Add Network loading time to InferenceModel * Added output log to capture time taken to load network into runtime. * This time is cut down when loading a cached network. Signed-off-by: Matthew Sloyan Change-Id: I043c177f17d01df35fbe0752ec5d77e350749164 --- tests/InferenceModel.hpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp index 7daae26bd4..936d0bf9ea 100644 --- a/tests/InferenceModel.hpp +++ b/tests/InferenceModel.hpp @@ -464,7 +464,12 @@ public: armnn::Status ret; { ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork"); + + const auto loading_start_time = armnn::GetTimeNow(); ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet)); + + ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2) + << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms\n"; } if (ret == armnn::Status::Failure) -- cgit v1.2.1