aboutsummaryrefslogtreecommitdiff
path: root/src/armnn
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn')
-rw-r--r--src/armnn/BackendRegistry.cpp5
-rw-r--r--src/armnn/Layer.cpp4
-rw-r--r--src/armnn/LoadedNetwork.cpp25
-rw-r--r--src/armnn/LoadedNetwork.hpp9
-rw-r--r--src/armnn/Network.cpp8
-rw-r--r--src/armnn/Runtime.cpp28
-rw-r--r--src/armnn/Runtime.hpp6
7 files changed, 44 insertions, 41 deletions
diff --git a/src/armnn/BackendRegistry.cpp b/src/armnn/BackendRegistry.cpp
index 7b1f6bcb5d..01f632fb22 100644
--- a/src/armnn/BackendRegistry.cpp
+++ b/src/armnn/BackendRegistry.cpp
@@ -5,7 +5,8 @@
#include <armnn/BackendRegistry.hpp>
#include <armnn/Exceptions.hpp>
-#include <ProfilingService.hpp>
+#include <armnn/profiling/ArmNNProfiling.hpp>
+#include <IProfilingService.hpp>
namespace armnn
{
@@ -102,7 +103,7 @@ void BackendRegistry::Swap(BackendRegistry& instance, BackendRegistry::FactorySt
std::swap(instance.m_Factories, other);
}
-void BackendRegistry::SetProfilingService(armnn::Optional<arm::pipe::ProfilingService&> profilingService)
+void BackendRegistry::SetProfilingService(armnn::Optional<arm::pipe::IProfilingService&> profilingService)
{
m_ProfilingService = profilingService;
}
diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp
index c827b4b681..5818eefa39 100644
--- a/src/armnn/Layer.cpp
+++ b/src/armnn/Layer.cpp
@@ -5,7 +5,7 @@
#include "Layer.hpp"
#include "Graph.hpp"
-#include <ProfilingService.hpp>
+#include <IProfilingService.hpp>
#include <armnn/utility/NumericCast.hpp>
#include <armnn/backends/TensorHandle.hpp>
#include <armnn/backends/WorkloadData.hpp>
@@ -202,7 +202,7 @@ Layer::Layer(unsigned int numInputSlots,
, m_Type(type)
, m_BackendId()
, m_BackendHint(EmptyOptional())
-, m_Guid(arm::pipe::ProfilingService::GetNextGuid())
+, m_Guid(arm::pipe::IProfilingService::GetNextGuid())
{
IgnoreUnused(layout);
m_InputSlots.reserve(numInputSlots);
diff --git a/src/armnn/LoadedNetwork.cpp b/src/armnn/LoadedNetwork.cpp
index 1367552f98..1dbd1e3112 100644
--- a/src/armnn/LoadedNetwork.cpp
+++ b/src/armnn/LoadedNetwork.cpp
@@ -20,6 +20,7 @@
#include <armnn/backends/MemCopyWorkload.hpp>
#include <backendsCommon/MemSyncWorkload.hpp>
#include <armnn/BackendHelper.hpp>
+#include <armnn/profiling/ArmNNProfiling.hpp>
#include <fmt/format.h>
@@ -82,7 +83,7 @@ void AddWorkloadStructure(std::unique_ptr<TimelineUtilityMethods>& timelineUtils
std::unique_ptr<LoadedNetwork> LoadedNetwork::MakeLoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
std::string& errorMessage,
const INetworkProperties& networkProperties,
- ProfilingService& profilingService)
+ arm::pipe::IProfilingService* profilingService)
{
std::unique_ptr<LoadedNetwork> loadedNetwork;
@@ -116,7 +117,7 @@ std::unique_ptr<LoadedNetwork> LoadedNetwork::MakeLoadedNetwork(std::unique_ptr<
LoadedNetwork::LoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
const INetworkProperties& networkProperties,
- ProfilingService& profilingService) :
+ arm::pipe::IProfilingService* profilingService) :
m_OptimizedNetwork(std::move(net)),
m_NetworkProperties(networkProperties),
m_TensorHandleFactoryRegistry(),
@@ -254,7 +255,7 @@ LoadedNetwork::LoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
ProfilingGuid networkGuid = m_OptimizedNetwork->GetGuid();
std::unique_ptr<TimelineUtilityMethods> timelineUtils =
- TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
+ TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService);
if (timelineUtils)
{
timelineUtils->CreateTypedEntity(networkGuid, LabelsAndEventClasses::NETWORK_GUID);
@@ -549,7 +550,7 @@ void LoadedNetwork::SendNetworkStructure()
ProfilingGuid networkGuid = m_OptimizedNetwork->GetGuid();
std::unique_ptr<TimelineUtilityMethods> timelineUtils =
- TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
+ TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService);
timelineUtils->CreateTypedEntity(networkGuid, LabelsAndEventClasses::NETWORK_GUID);
@@ -893,8 +894,8 @@ Status LoadedNetwork::EnqueueWorkload(const InputTensors& inputTensors,
}
std::unique_ptr<TimelineUtilityMethods> timelineUtils =
- TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
- ProfilingGuid inferenceGuid = m_ProfilingService.GetNextGuid();
+ TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService);
+ ProfilingGuid inferenceGuid = m_ProfilingService->GetNextGuid();
if (timelineUtils)
{
// Add inference timeline trace if profiling is enabled.
@@ -910,9 +911,9 @@ Status LoadedNetwork::EnqueueWorkload(const InputTensors& inputTensors,
bool executionSucceeded = true;
{
- if (m_ProfilingService.IsProfilingEnabled())
+ if (m_ProfilingService->IsProfilingEnabled())
{
- m_ProfilingService.IncrementCounterValue(INFERENCES_RUN);
+ m_ProfilingService->IncrementCounterValue(INFERENCES_RUN);
}
ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Execute");
ARMNN_SCOPED_HEAP_PROFILING("Executing");
@@ -982,7 +983,7 @@ void LoadedNetwork::EnqueueInput(const BindableLayer& layer, ITensorHandle* tens
ARMNN_ASSERT_MSG(inputWorkload, "No input workload created");
std::unique_ptr<TimelineUtilityMethods> timelineUtils =
- TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
+ TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService);
if (timelineUtils)
{
// Add Input Workload to the post-optimisation network structure
@@ -1070,7 +1071,7 @@ void LoadedNetwork::EnqueueOutput(const BindableLayer& layer, ITensorHandle* ten
ARMNN_ASSERT_MSG(outputWorkload, "No output workload created");
std::unique_ptr<TimelineUtilityMethods> timelineUtils =
- TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
+ TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService);
if (timelineUtils)
{
// Add Output Workload to the post-optimisation network structure
@@ -1683,8 +1684,8 @@ Status LoadedNetwork::Execute(const InputTensors& inputTensors,
};
std::unique_ptr<TimelineUtilityMethods> timelineUtils =
- TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
- ProfilingGuid inferenceGuid = m_ProfilingService.GetNextGuid();
+ TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService);
+ ProfilingGuid inferenceGuid = m_ProfilingService->GetNextGuid();
if (timelineUtils)
{
// Add inference timeline trace if profiling is enabled.
diff --git a/src/armnn/LoadedNetwork.hpp b/src/armnn/LoadedNetwork.hpp
index 09d760454e..19f2bcf907 100644
--- a/src/armnn/LoadedNetwork.hpp
+++ b/src/armnn/LoadedNetwork.hpp
@@ -19,7 +19,7 @@
#include <backendsCommon/memoryOptimizerStrategyLibrary/strategies/SingleAxisPriorityList.hpp>
-#include <ProfilingService.hpp>
+#include <IProfilingService.hpp>
#include <TimelineUtilityMethods.hpp>
#include <common/include/LabelsAndEventClasses.hpp>
@@ -78,7 +78,7 @@ public:
static std::unique_ptr<LoadedNetwork> MakeLoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
std::string& errorMessage,
const INetworkProperties& networkProperties,
- arm::pipe::ProfilingService& profilingService);
+ arm::pipe::IProfilingService* profilingService);
// NOTE we return by reference as the purpose of this method is only to provide
// access to the private m_Profiler and in theory we should not need to increment
@@ -112,7 +112,7 @@ private:
LoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
const INetworkProperties& networkProperties,
- arm::pipe::ProfilingService& profilingService);
+ arm::pipe::IProfilingService* profilingService);
void EnqueueInput(const BindableLayer& layer, ITensorHandle* tensorHandle, const TensorInfo& tensorInfo);
@@ -158,7 +158,8 @@ private:
TensorHandleFactoryRegistry m_TensorHandleFactoryRegistry;
- arm::pipe::ProfilingService& m_ProfilingService;
+ // NOTE: raw pointer because the profiling service is controlled by the Runtime
+ arm::pipe::IProfilingService* m_ProfilingService;
struct ImportedTensorHandlePin
{
diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp
index 498c4a72ad..58e8b503ee 100644
--- a/src/armnn/Network.cpp
+++ b/src/armnn/Network.cpp
@@ -26,7 +26,7 @@
#include <armnn/utility/IgnoreUnused.hpp>
#include <armnn/utility/PolymorphicDowncast.hpp>
-#include <ProfilingService.hpp>
+#include <IProfilingService.hpp>
#include <common/include/ProfilingGuid.hpp>
@@ -2866,18 +2866,18 @@ void NetworkImpl::ExecuteStrategy(IStrategy& strategy) const
OptimizedNetworkImpl::OptimizedNetworkImpl(const OptimizedNetworkImpl& other, const ModelOptions& modelOptions)
: m_Graph(new Graph(*other.m_Graph.get()))
- , m_Guid(arm::pipe::ProfilingService::GetNextGuid())
+ , m_Guid(arm::pipe::IProfilingService::GetNextGuid())
, m_ModelOptions(modelOptions)
{
}
OptimizedNetworkImpl::OptimizedNetworkImpl(std::unique_ptr<Graph> graph)
- : m_Graph(std::move(graph)), m_Guid(arm::pipe::ProfilingService::GetNextGuid())
+ : m_Graph(std::move(graph)), m_Guid(arm::pipe::IProfilingService::GetNextGuid())
{
}
OptimizedNetworkImpl::OptimizedNetworkImpl(std::unique_ptr<Graph> graph, const ModelOptions& modelOptions)
- : m_Graph(std::move(graph)), m_Guid(arm::pipe::ProfilingService::GetNextGuid()), m_ModelOptions(modelOptions)
+ : m_Graph(std::move(graph)), m_Guid(arm::pipe::IProfilingService::GetNextGuid()), m_ModelOptions(modelOptions)
{
}
diff --git a/src/armnn/Runtime.cpp b/src/armnn/Runtime.cpp
index 640e5947e2..4cc34ff6ac 100644
--- a/src/armnn/Runtime.cpp
+++ b/src/armnn/Runtime.cpp
@@ -185,7 +185,7 @@ Status RuntimeImpl::LoadNetwork(NetworkId& networkIdOut,
std::unique_ptr<IOptimizedNetwork>(rawNetwork),
errorMessage,
networkProperties,
- m_ProfilingService);
+ m_ProfilingService.get());
if (!loadedNetwork)
{
@@ -204,9 +204,9 @@ Status RuntimeImpl::LoadNetwork(NetworkId& networkIdOut,
context.second->AfterLoadNetwork(networkIdOut);
}
- if (m_ProfilingService.IsProfilingEnabled())
+ if (m_ProfilingService->IsProfilingEnabled())
{
- m_ProfilingService.IncrementCounterValue(arm::pipe::NETWORK_LOADS);
+ m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
}
return Status::Success;
@@ -228,7 +228,7 @@ Status RuntimeImpl::UnloadNetwork(NetworkId networkId)
}
std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
- arm::pipe::TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
+ arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
{
std::lock_guard<std::mutex> lockGuard(m_Mutex);
@@ -250,9 +250,9 @@ Status RuntimeImpl::UnloadNetwork(NetworkId networkId)
return Status::Failure;
}
- if (m_ProfilingService.IsProfilingEnabled())
+ if (m_ProfilingService->IsProfilingEnabled())
{
- m_ProfilingService.IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
+ m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
}
}
@@ -296,9 +296,9 @@ void RuntimeImpl::ReportStructure() // arm::pipe::IProfilingService& profilingSe
}
RuntimeImpl::RuntimeImpl(const IRuntime::CreationOptions& options)
- : m_NetworkIdCounter(0),
- m_ProfilingService(*this)
+ : m_NetworkIdCounter(0)
{
+ m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(*this);
const auto start_time = armnn::GetTimeNow();
ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION;
if ( options.m_ProfilingOptions.m_TimelineEnabled && !options.m_ProfilingOptions.m_EnableProfiling )
@@ -475,7 +475,9 @@ RuntimeImpl::RuntimeImpl(const IRuntime::CreationOptions& options)
unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
std::make_unique<arm::pipe::BackendProfiling>(arm::pipe::BackendProfiling(
- arm::pipe::ConvertExternalProfilingOptions(options.m_ProfilingOptions), m_ProfilingService, id));
+ arm::pipe::ConvertExternalProfilingOptions(options.m_ProfilingOptions),
+ *m_ProfilingService.get(),
+ id));
// Backends may also provide a profiling context. Ask for it now.
auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
@@ -483,7 +485,7 @@ RuntimeImpl::RuntimeImpl(const IRuntime::CreationOptions& options)
if (profilingContext)
{
// Pass the context onto the profiling service.
- m_ProfilingService.AddBackendProfilingContext(id, profilingContext);
+ m_ProfilingService->AddBackendProfilingContext(id, profilingContext);
}
}
catch (const BackendUnavailableException&)
@@ -492,14 +494,14 @@ RuntimeImpl::RuntimeImpl(const IRuntime::CreationOptions& options)
}
}
- BackendRegistryInstance().SetProfilingService(m_ProfilingService);
+ BackendRegistryInstance().SetProfilingService(*m_ProfilingService.get());
// pass configuration info to the profiling service
- m_ProfilingService.ConfigureProfilingService(
+ m_ProfilingService->ConfigureProfilingService(
arm::pipe::ConvertExternalProfilingOptions(options.m_ProfilingOptions));
if (options.m_ProfilingOptions.m_EnableProfiling)
{
// try to wait for the profiling service to initialise
- m_ProfilingService.WaitForProfilingServiceActivation(3000);
+ m_ProfilingService->WaitForProfilingServiceActivation(3000);
}
m_DeviceSpec.AddSupportedBackends(supportedBackends);
diff --git a/src/armnn/Runtime.hpp b/src/armnn/Runtime.hpp
index 1ac0d21b63..a8fa5fd5c5 100644
--- a/src/armnn/Runtime.hpp
+++ b/src/armnn/Runtime.hpp
@@ -14,8 +14,6 @@
#include <armnn/backends/DynamicBackend.hpp>
-#include <ProfilingService.hpp>
-
#include <IProfilingService.hpp>
#include <IReportStructure.hpp>
@@ -115,7 +113,7 @@ public:
private:
friend void RuntimeLoadedNetworksReserve(RuntimeImpl* runtime); // See RuntimeTests.cpp
- friend arm::pipe::ProfilingService& GetProfilingService(RuntimeImpl* runtime); // See RuntimeTests.cpp
+ friend arm::pipe::IProfilingService& GetProfilingService(RuntimeImpl* runtime); // See RuntimeTests.cpp
int GenerateNetworkId();
@@ -150,7 +148,7 @@ private:
std::vector<DynamicBackendPtr> m_DynamicBackends;
/// Profiling Service Instance
- arm::pipe::ProfilingService m_ProfilingService;
+ std::unique_ptr<arm::pipe::IProfilingService> m_ProfilingService;
};
} // namespace armnn