aboutsummaryrefslogtreecommitdiff
path: root/tests/InferenceModel.hpp
diff options
context:
space:
mode:
Diffstat (limited to 'tests/InferenceModel.hpp')
-rw-r--r--tests/InferenceModel.hpp16
1 files changed, 10 insertions, 6 deletions
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index 6bfad067ca..dca3ab2788 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -419,14 +419,14 @@ public:
throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
}
- const auto parsing_start_time = armnn::GetTimeNow();
- armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
-
- ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
- << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms\n";
-
armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
{
+ const auto parsing_start_time = armnn::GetTimeNow();
+ armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
+
+ ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
+ << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms\n";
+
ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
armnn::OptimizerOptions options;
@@ -460,6 +460,8 @@ public:
{
throw armnn::Exception("Optimize returned nullptr");
}
+
+
}
if (params.m_VisualizePostOptimizationModel)
@@ -470,6 +472,8 @@ public:
optNet->SerializeToDot(file);
}
+
+
armnn::Status ret;
{
ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");