From 4422ceca976a88aac49b21808a43e465bc87a35e Mon Sep 17 00:00:00 2001 From: Finn Williams Date: Mon, 22 Mar 2021 17:51:06 +0000 Subject: Fix graph copy memory spike * Change layer storage of ConstTensors to std::shared_ptr * Change clone to share ConstTensor rather than copy * Remove uses of non-const GetTensor() call * Reduce scope of non-optimized network in ExeNet, so memory can be released after use Signed-off-by: Finn Williams Change-Id: Ibb2c7309d12411d21405bd6024c76bcdf5404545 --- tests/InferenceModel.hpp | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) (limited to 'tests') diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp index 6bfad067ca..dca3ab2788 100644 --- a/tests/InferenceModel.hpp +++ b/tests/InferenceModel.hpp @@ -419,14 +419,14 @@ public: throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends); } - const auto parsing_start_time = armnn::GetTimeNow(); - armnn::INetworkPtr network = CreateNetworkImpl::Create(params, m_InputBindings, m_OutputBindings); - - ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2) - << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms\n"; - armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}}; { + const auto parsing_start_time = armnn::GetTimeNow(); + armnn::INetworkPtr network = CreateNetworkImpl::Create(params, m_InputBindings, m_OutputBindings); + + ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2) + << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms\n"; + ARMNN_SCOPED_HEAP_PROFILING("Optimizing"); armnn::OptimizerOptions options; @@ -460,6 +460,8 @@ public: { throw armnn::Exception("Optimize returned nullptr"); } + + } if (params.m_VisualizePostOptimizationModel) @@ -470,6 +472,8 @@ public: optNet->SerializeToDot(file); } + + armnn::Status ret; { ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork"); -- cgit v1.2.1