aboutsummaryrefslogtreecommitdiff
path: root/src/armnn
diff options
context:
space:
mode:
authorCathal Corbett <cathal.corbett@arm.com>2022-02-25 15:33:28 +0000
committerCathal Corbett <cathal.corbett@arm.com>2022-03-01 11:44:14 +0000
commit5aa9fd7ac6bf8dad576fa4a0a32aa3dae98d11ab (patch)
tree23a3a0d5ca5143924277f34c98d36cf7b99430a7 /src/armnn
parent1fc448ad2455ad31b96a3891f847125a3295d75a (diff)
downloadarmnn-5aa9fd7ac6bf8dad576fa4a0a32aa3dae98d11ab.tar.gz
IVGCVSW-6704 Change the namespace from armnn::profiling to arm::pipe
* Updated ABI version to 29 due to being the first ABI break in 22.05 !android-nn-driver:7226 Signed-off-by: Cathal Corbett <cathal.corbett@arm.com> Change-Id: I9c50007dcd5b5e792757e7bd1213606df5ffec36
Diffstat (limited to 'src/armnn')
-rw-r--r--src/armnn/BackendRegistry.cpp6
-rw-r--r--src/armnn/JsonPrinter.cpp2
-rw-r--r--src/armnn/JsonPrinter.hpp8
-rw-r--r--src/armnn/Layer.cpp2
-rw-r--r--src/armnn/LoadedNetwork.cpp32
-rw-r--r--src/armnn/LoadedNetwork.hpp12
-rw-r--r--src/armnn/Network.cpp8
-rw-r--r--src/armnn/OptimizedNetworkImpl.hpp4
-rw-r--r--src/armnn/Profiling.cpp6
-rw-r--r--src/armnn/Profiling.hpp10
-rw-r--r--src/armnn/ProfilingDetails.hpp2
-rw-r--r--src/armnn/ProfilingEvent.cpp4
-rw-r--r--src/armnn/ProfilingEvent.hpp6
-rw-r--r--src/armnn/RangeTracker.cpp2
-rw-r--r--src/armnn/Runtime.cpp27
-rw-r--r--src/armnn/Runtime.hpp6
-rw-r--r--src/armnn/test/NetworkTests.cpp6
-rw-r--r--src/armnn/test/RuntimeTests.cpp12
18 files changed, 78 insertions, 77 deletions
diff --git a/src/armnn/BackendRegistry.cpp b/src/armnn/BackendRegistry.cpp
index ade844fc39..7b1f6bcb5d 100644
--- a/src/armnn/BackendRegistry.cpp
+++ b/src/armnn/BackendRegistry.cpp
@@ -30,7 +30,7 @@ void BackendRegistry::Register(const BackendId& id, BackendRegistry::FactoryFunc
{
if (m_ProfilingService.has_value() && m_ProfilingService.value().IsProfilingEnabled())
{
- m_ProfilingService.value().IncrementCounterValue(armnn::profiling::REGISTERED_BACKENDS);
+ m_ProfilingService.value().IncrementCounterValue(arm::pipe::REGISTERED_BACKENDS);
}
}
@@ -43,7 +43,7 @@ void BackendRegistry::Deregister(const BackendId& id)
if (m_ProfilingService.has_value() && m_ProfilingService.value().IsProfilingEnabled())
{
- m_ProfilingService.value().IncrementCounterValue(armnn::profiling::UNREGISTERED_BACKENDS);
+ m_ProfilingService.value().IncrementCounterValue(arm::pipe::UNREGISTERED_BACKENDS);
}
}
@@ -102,7 +102,7 @@ void BackendRegistry::Swap(BackendRegistry& instance, BackendRegistry::FactorySt
std::swap(instance.m_Factories, other);
}
-void BackendRegistry::SetProfilingService(armnn::Optional<profiling::ProfilingService&> profilingService)
+void BackendRegistry::SetProfilingService(armnn::Optional<arm::pipe::ProfilingService&> profilingService)
{
m_ProfilingService = profilingService;
}
diff --git a/src/armnn/JsonPrinter.cpp b/src/armnn/JsonPrinter.cpp
index f771cb1786..8085108c31 100644
--- a/src/armnn/JsonPrinter.cpp
+++ b/src/armnn/JsonPrinter.cpp
@@ -138,7 +138,7 @@ void JsonPrinter::PrintType(armnn::JsonObjectType type)
m_OutputStream << R"(")";
}
-void JsonPrinter::PrintGuid(armnn::profiling::ProfilingGuid guid)
+void JsonPrinter::PrintGuid(arm::pipe::ProfilingGuid guid)
{
PrintTabs();
m_OutputStream << std::quoted("GUID") << ": " << std::quoted(std::to_string(guid)) << "," << std::endl;
diff --git a/src/armnn/JsonPrinter.hpp b/src/armnn/JsonPrinter.hpp
index d9b83869d7..05b6e6a510 100644
--- a/src/armnn/JsonPrinter.hpp
+++ b/src/armnn/JsonPrinter.hpp
@@ -51,9 +51,9 @@ struct JsonChildObject
}
}
- void SetGuid(profiling::ProfilingGuid guid)
+ void SetGuid(arm::pipe::ProfilingGuid guid)
{
- m_Guid = Optional<profiling::ProfilingGuid>(guid);
+ m_Guid = Optional<arm::pipe::ProfilingGuid>(guid);
}
void AddChild(const JsonChildObject& childObject)
@@ -101,7 +101,7 @@ struct JsonChildObject
std::string m_Label;
Measurement::Unit m_Unit;
JsonObjectType m_Type;
- Optional<profiling::ProfilingGuid> m_Guid;
+ Optional<arm::pipe::ProfilingGuid> m_Guid;
std::vector<double> m_Measurements;
std::vector<std::string> m_LayerDetailsList;
std::vector<JsonChildObject> m_Children;
@@ -118,7 +118,7 @@ public:
void PrintLabel(const std::string& label, size_t id);
void PrintUnit(armnn::Measurement::Unit unit);
void PrintType(armnn::JsonObjectType type);
- void PrintGuid(armnn::profiling::ProfilingGuid guid);
+ void PrintGuid(arm::pipe::ProfilingGuid guid);
void PrintMeasurementsList(const std::vector<double>& measurementsVector);
public:
diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp
index 4ed179fa22..c827b4b681 100644
--- a/src/armnn/Layer.cpp
+++ b/src/armnn/Layer.cpp
@@ -202,7 +202,7 @@ Layer::Layer(unsigned int numInputSlots,
, m_Type(type)
, m_BackendId()
, m_BackendHint(EmptyOptional())
-, m_Guid(profiling::ProfilingService::GetNextGuid())
+, m_Guid(arm::pipe::ProfilingService::GetNextGuid())
{
IgnoreUnused(layout);
m_InputSlots.reserve(numInputSlots);
diff --git a/src/armnn/LoadedNetwork.cpp b/src/armnn/LoadedNetwork.cpp
index bcceaf4a99..1367552f98 100644
--- a/src/armnn/LoadedNetwork.cpp
+++ b/src/armnn/LoadedNetwork.cpp
@@ -27,7 +27,7 @@ namespace armnn
{
using namespace std;
-using namespace armnn::profiling;
+using namespace arm::pipe;
namespace
{
@@ -82,7 +82,7 @@ void AddWorkloadStructure(std::unique_ptr<TimelineUtilityMethods>& timelineUtils
std::unique_ptr<LoadedNetwork> LoadedNetwork::MakeLoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
std::string& errorMessage,
const INetworkProperties& networkProperties,
- profiling::ProfilingService& profilingService)
+ ProfilingService& profilingService)
{
std::unique_ptr<LoadedNetwork> loadedNetwork;
@@ -116,7 +116,7 @@ std::unique_ptr<LoadedNetwork> LoadedNetwork::MakeLoadedNetwork(std::unique_ptr<
LoadedNetwork::LoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
const INetworkProperties& networkProperties,
- profiling::ProfilingService& profilingService) :
+ ProfilingService& profilingService) :
m_OptimizedNetwork(std::move(net)),
m_NetworkProperties(networkProperties),
m_TensorHandleFactoryRegistry(),
@@ -580,7 +580,7 @@ void LoadedNetwork::SendNetworkStructure()
timelineUtils->Commit();
}
-profiling::ProfilingGuid LoadedNetwork::GetNetworkGuid()
+ProfilingGuid LoadedNetwork::GetNetworkGuid()
{
return m_OptimizedNetwork->GetGuid();
}
@@ -912,7 +912,7 @@ Status LoadedNetwork::EnqueueWorkload(const InputTensors& inputTensors,
{
if (m_ProfilingService.IsProfilingEnabled())
{
- m_ProfilingService.IncrementCounterValue(armnn::profiling::INFERENCES_RUN);
+ m_ProfilingService.IncrementCounterValue(INFERENCES_RUN);
}
ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Execute");
ARMNN_SCOPED_HEAP_PROFILING("Executing");
@@ -1142,7 +1142,7 @@ void LoadedNetwork::FreeWorkingMemory()
}
bool LoadedNetwork::Execute(std::unique_ptr<TimelineUtilityMethods>& timelineUtils,
- profiling::ProfilingGuid inferenceGuid)
+ ProfilingGuid inferenceGuid)
{
bool success = true;
@@ -1682,19 +1682,19 @@ Status LoadedNetwork::Execute(const InputTensors& inputTensors,
}
};
- std::unique_ptr<profiling::TimelineUtilityMethods> timelineUtils =
- profiling::TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
- profiling::ProfilingGuid inferenceGuid = m_ProfilingService.GetNextGuid();
+ std::unique_ptr<TimelineUtilityMethods> timelineUtils =
+ TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
+ ProfilingGuid inferenceGuid = m_ProfilingService.GetNextGuid();
if (timelineUtils)
{
// Add inference timeline trace if profiling is enabled.
- profiling::ProfilingGuid networkGuid = m_OptimizedNetwork->GetGuid();
- timelineUtils->CreateTypedEntity(inferenceGuid, profiling::LabelsAndEventClasses::INFERENCE_GUID);
- timelineUtils->CreateRelationship(profiling::ProfilingRelationshipType::RetentionLink,
+ ProfilingGuid networkGuid = m_OptimizedNetwork->GetGuid();
+ timelineUtils->CreateTypedEntity(inferenceGuid,LabelsAndEventClasses::INFERENCE_GUID);
+ timelineUtils->CreateRelationship(ProfilingRelationshipType::RetentionLink,
networkGuid,
inferenceGuid,
- profiling::LabelsAndEventClasses::EXECUTION_OF_GUID);
- timelineUtils->RecordEvent(inferenceGuid, profiling::LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS);
+ LabelsAndEventClasses::EXECUTION_OF_GUID);
+ timelineUtils->RecordEvent(inferenceGuid,LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS);
}
bool executionSucceeded = true;
@@ -1702,7 +1702,7 @@ Status LoadedNetwork::Execute(const InputTensors& inputTensors,
if (timelineUtils)
{
// Add end of life of the inference timeline if profiling is enabled.
- timelineUtils->RecordEvent(inferenceGuid, profiling::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
+ timelineUtils->RecordEvent(inferenceGuid,LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
timelineUtils->Commit();
}
@@ -1762,7 +1762,7 @@ Status LoadedNetwork::Execute(const InputTensors& inputTensors,
ARMNN_LOG(error) << "An error occurred attempting to execute a workload: " << error.what();
executionSucceeded = false;
};
- profiling::ProfilingDynamicGuid workloadInferenceID(0);
+ ProfilingDynamicGuid workloadInferenceID(0);
try
{
diff --git a/src/armnn/LoadedNetwork.hpp b/src/armnn/LoadedNetwork.hpp
index dc2f4dc10f..09d760454e 100644
--- a/src/armnn/LoadedNetwork.hpp
+++ b/src/armnn/LoadedNetwork.hpp
@@ -78,7 +78,7 @@ public:
static std::unique_ptr<LoadedNetwork> MakeLoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
std::string& errorMessage,
const INetworkProperties& networkProperties,
- profiling::ProfilingService& profilingService);
+ arm::pipe::ProfilingService& profilingService);
// NOTE we return by reference as the purpose of this method is only to provide
// access to the private m_Profiler and in theory we should not need to increment
@@ -96,7 +96,7 @@ public:
return m_NetworkProperties.m_AsyncEnabled;
}
- profiling::ProfilingGuid GetNetworkGuid();
+ arm::pipe::ProfilingGuid GetNetworkGuid();
private:
@@ -112,7 +112,7 @@ private:
LoadedNetwork(std::unique_ptr<IOptimizedNetwork> net,
const INetworkProperties& networkProperties,
- profiling::ProfilingService& profilingService);
+ arm::pipe::ProfilingService& profilingService);
void EnqueueInput(const BindableLayer& layer, ITensorHandle* tensorHandle, const TensorInfo& tensorInfo);
@@ -122,8 +122,8 @@ private:
void ImportOutputTensor(const Tensor& outputTensor, ITensorHandle* outputTensorHandle);
- bool Execute(std::unique_ptr<profiling::TimelineUtilityMethods>& timelineUtils,
- profiling::ProfilingGuid inferenceGuid);
+ bool Execute(std::unique_ptr<arm::pipe::TimelineUtilityMethods>& timelineUtils,
+ arm::pipe::ProfilingGuid inferenceGuid);
const IWorkloadFactory& GetWorkloadFactory(const Layer& layer) const;
@@ -158,7 +158,7 @@ private:
TensorHandleFactoryRegistry m_TensorHandleFactoryRegistry;
- profiling::ProfilingService& m_ProfilingService;
+ arm::pipe::ProfilingService& m_ProfilingService;
struct ImportedTensorHandlePin
{
diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp
index 408003ec91..498c4a72ad 100644
--- a/src/armnn/Network.cpp
+++ b/src/armnn/Network.cpp
@@ -514,7 +514,7 @@ const std::shared_ptr<IProfiler>& IOptimizedNetwork::GetProfiler() const
return pOptimizedNetworkImpl->GetGraph().GetProfiler();
}
-profiling::ProfilingGuid IOptimizedNetwork::GetGuid() const
+arm::pipe::ProfilingGuid IOptimizedNetwork::GetGuid() const
{
return pOptimizedNetworkImpl->GetGuid();
}
@@ -2866,18 +2866,18 @@ void NetworkImpl::ExecuteStrategy(IStrategy& strategy) const
OptimizedNetworkImpl::OptimizedNetworkImpl(const OptimizedNetworkImpl& other, const ModelOptions& modelOptions)
: m_Graph(new Graph(*other.m_Graph.get()))
- , m_Guid(profiling::ProfilingService::GetNextGuid())
+ , m_Guid(arm::pipe::ProfilingService::GetNextGuid())
, m_ModelOptions(modelOptions)
{
}
OptimizedNetworkImpl::OptimizedNetworkImpl(std::unique_ptr<Graph> graph)
- : m_Graph(std::move(graph)), m_Guid(profiling::ProfilingService::GetNextGuid())
+ : m_Graph(std::move(graph)), m_Guid(arm::pipe::ProfilingService::GetNextGuid())
{
}
OptimizedNetworkImpl::OptimizedNetworkImpl(std::unique_ptr<Graph> graph, const ModelOptions& modelOptions)
- : m_Graph(std::move(graph)), m_Guid(profiling::ProfilingService::GetNextGuid()), m_ModelOptions(modelOptions)
+ : m_Graph(std::move(graph)), m_Guid(arm::pipe::ProfilingService::GetNextGuid()), m_ModelOptions(modelOptions)
{
}
diff --git a/src/armnn/OptimizedNetworkImpl.hpp b/src/armnn/OptimizedNetworkImpl.hpp
index 112d585aee..cb0dc4c8cf 100644
--- a/src/armnn/OptimizedNetworkImpl.hpp
+++ b/src/armnn/OptimizedNetworkImpl.hpp
@@ -19,7 +19,7 @@ public:
virtual Status PrintGraph();
virtual Status SerializeToDot(std::ostream& stream) const;
- virtual profiling::ProfilingGuid GetGuid() const { return m_Guid; };
+ virtual arm::pipe::ProfilingGuid GetGuid() const { return m_Guid; };
virtual size_t GetNumInputs() const;
virtual size_t GetNumOutputs() const;
@@ -29,7 +29,7 @@ public:
private:
std::unique_ptr<Graph> m_Graph;
- profiling::ProfilingGuid m_Guid;
+ arm::pipe::ProfilingGuid m_Guid;
ModelOptions m_ModelOptions;
};
diff --git a/src/armnn/Profiling.cpp b/src/armnn/Profiling.cpp
index e7df459103..e18bf475d1 100644
--- a/src/armnn/Profiling.cpp
+++ b/src/armnn/Profiling.cpp
@@ -206,7 +206,7 @@ Event* ProfilerImpl::BeginEvent(armnn::IProfiler* profiler,
const BackendId& backendId,
const std::string& label,
std::vector<InstrumentPtr>&& instruments,
- const Optional<profiling::ProfilingGuid>& guid)
+ const Optional<arm::pipe::ProfilingGuid>& guid)
{
Event* parent = m_Parents.empty() ? nullptr : m_Parents.top();
m_EventSequence.push_back(std::make_unique<Event>(label,
@@ -310,7 +310,7 @@ void ExtractJsonObjects(unsigned int inferenceIndex,
// If profiling GUID is entered, process it
if (parentEvent->GetProfilingGuid().has_value())
{
- profiling::ProfilingGuid profilingGuid;
+ arm::pipe::ProfilingGuid profilingGuid;
profilingGuid = parentEvent->GetProfilingGuid().value();
parentObject.SetGuid(profilingGuid);
}
@@ -610,7 +610,7 @@ void IProfiler::Print(std::ostream& outStream) const
Event* IProfiler::BeginEvent(const BackendId& backendId,
const std::string& label,
std::vector<InstrumentPtr>&& instruments,
- const Optional<profiling::ProfilingGuid>& guid)
+ const Optional<arm::pipe::ProfilingGuid>& guid)
{
return pProfilerImpl->BeginEvent(this, backendId, label, std::move(instruments), guid);
}
diff --git a/src/armnn/Profiling.hpp b/src/armnn/Profiling.hpp
index c6571d1309..b493d228ea 100644
--- a/src/armnn/Profiling.hpp
+++ b/src/armnn/Profiling.hpp
@@ -39,13 +39,13 @@ public:
const BackendId& backendId,
const std::string& name,
std::vector<InstrumentPtr>&& instruments,
- const Optional<profiling::ProfilingGuid>& guid);
+ const Optional<arm::pipe::ProfilingGuid>& guid);
template<typename DescriptorType>
void AddLayerDetails(const std::string& label,
const DescriptorType& desc,
const WorkloadInfo& infos,
- const profiling::ProfilingGuid guid)
+ const arm::pipe::ProfilingGuid guid)
{
m_ProfilingDetails->AddDetailsToString(label, desc, infos, guid);
}
@@ -134,7 +134,7 @@ public:
template<typename... Args>
ScopedProfilingEvent(const BackendId& backendId,
- const Optional<profiling::ProfilingGuid>& guid,
+ const Optional<arm::pipe::ProfilingGuid>& guid,
const std::string& name,
Args&& ... args)
: m_Event(nullptr)
@@ -180,7 +180,7 @@ template<typename DescriptorType>
inline void ProfilingUpdateDescriptions(const std::string& name,
const DescriptorType& desc,
const WorkloadInfo& infos,
- const profiling::ProfilingGuid guid)
+ const arm::pipe::ProfilingGuid guid)
{
IProfiler* profiler(ProfilerManager::GetInstance().GetProfiler()); ///< Profiler used
if (profiler && profiler->IsProfilingEnabled())
@@ -193,7 +193,7 @@ template<typename DescriptorType>
void IProfiler::AddLayerDetails(const std::string& name,
const DescriptorType& desc,
const WorkloadInfo& infos,
- const profiling::ProfilingGuid guid)
+ const arm::pipe::ProfilingGuid guid)
{
return pProfilerImpl->AddLayerDetails(name, desc, infos, guid);
}
diff --git a/src/armnn/ProfilingDetails.hpp b/src/armnn/ProfilingDetails.hpp
index 774565fd93..68ec00e7f8 100644
--- a/src/armnn/ProfilingDetails.hpp
+++ b/src/armnn/ProfilingDetails.hpp
@@ -34,7 +34,7 @@ public:
void AddDetailsToString(const std::string& workloadName,
const DescriptorType& desc,
const WorkloadInfo& infos,
- const profiling::ProfilingGuid guid)
+ const arm::pipe::ProfilingGuid guid)
{
// Once details exist, we can assume we're on the second iteration of details
if (m_DetailsExist)
diff --git a/src/armnn/ProfilingEvent.cpp b/src/armnn/ProfilingEvent.cpp
index 31d4b91d23..e341344544 100644
--- a/src/armnn/ProfilingEvent.cpp
+++ b/src/armnn/ProfilingEvent.cpp
@@ -13,7 +13,7 @@ Event::Event(const std::string& eventName,
Event* parent,
const BackendId backendId,
std::vector<InstrumentPtr>&& instruments,
- const Optional<profiling::ProfilingGuid> guid)
+ const Optional<arm::pipe::ProfilingGuid> guid)
: m_EventName(eventName)
, m_Profiler(profiler)
, m_Parent(parent)
@@ -86,7 +86,7 @@ BackendId Event::GetBackendId() const
return m_BackendId;
}
-Optional<profiling::ProfilingGuid> Event::GetProfilingGuid() const
+Optional<arm::pipe::ProfilingGuid> Event::GetProfilingGuid() const
{
return m_ProfilingGuid;
}
diff --git a/src/armnn/ProfilingEvent.hpp b/src/armnn/ProfilingEvent.hpp
index b0d5be70ba..3b9d5261ef 100644
--- a/src/armnn/ProfilingEvent.hpp
+++ b/src/armnn/ProfilingEvent.hpp
@@ -35,7 +35,7 @@ public:
Event* parent,
const BackendId backendId,
std::vector<InstrumentPtr>&& instrument,
- const Optional<profiling::ProfilingGuid> guid);
+ const Optional<arm::pipe::ProfilingGuid> guid);
Event(const Event& other) = delete;
@@ -73,7 +73,7 @@ public:
/// Get the associated profiling GUID if the event is a workload
/// \return Optional GUID of the event
- Optional<profiling::ProfilingGuid> GetProfilingGuid() const;
+ Optional<arm::pipe::ProfilingGuid> GetProfilingGuid() const;
/// Assignment operator
Event& operator=(const Event& other) = delete;
@@ -98,7 +98,7 @@ private:
Instruments m_Instruments;
/// Workload Profiling id
- Optional<profiling::ProfilingGuid> m_ProfilingGuid;
+ Optional<arm::pipe::ProfilingGuid> m_ProfilingGuid;
};
} // namespace armnn
diff --git a/src/armnn/RangeTracker.cpp b/src/armnn/RangeTracker.cpp
index ae756fbb9c..346c8bdd2e 100644
--- a/src/armnn/RangeTracker.cpp
+++ b/src/armnn/RangeTracker.cpp
@@ -26,7 +26,7 @@ void RangeTracker::SetRange(const armnn::IConnectableLayer* layer, unsigned int
ranges[outputIdx] = std::make_pair(min, max);
}
-RangeTracker::MinMaxRange RangeTracker::GetRange(armnn::LayerGuid guid, unsigned int idx) const
+RangeTracker::MinMaxRange RangeTracker::GetRange(LayerGuid guid, unsigned int idx) const
{
auto search = m_GuidToRangesMap.find(guid);
if (search == m_GuidToRangesMap.end())
diff --git a/src/armnn/Runtime.cpp b/src/armnn/Runtime.cpp
index 4998b3597e..640e5947e2 100644
--- a/src/armnn/Runtime.cpp
+++ b/src/armnn/Runtime.cpp
@@ -206,7 +206,7 @@ Status RuntimeImpl::LoadNetwork(NetworkId& networkIdOut,
if (m_ProfilingService.IsProfilingEnabled())
{
- m_ProfilingService.IncrementCounterValue(armnn::profiling::NETWORK_LOADS);
+ m_ProfilingService.IncrementCounterValue(arm::pipe::NETWORK_LOADS);
}
return Status::Success;
@@ -227,8 +227,8 @@ Status RuntimeImpl::UnloadNetwork(NetworkId networkId)
return Status::Failure;
}
- std::unique_ptr<profiling::TimelineUtilityMethods> timelineUtils =
- profiling::TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
+ std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
+ arm::pipe::TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
{
std::lock_guard<std::mutex> lockGuard(m_Mutex);
@@ -238,9 +238,9 @@ Status RuntimeImpl::UnloadNetwork(NetworkId networkId)
auto search = m_LoadedNetworks.find(networkId);
if (search != m_LoadedNetworks.end())
{
- profiling::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
+ arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
timelineUtils->RecordEvent(networkGuid,
- profiling::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
+ arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
}
}
@@ -252,7 +252,7 @@ Status RuntimeImpl::UnloadNetwork(NetworkId networkId)
if (m_ProfilingService.IsProfilingEnabled())
{
- m_ProfilingService.IncrementCounterValue(armnn::profiling::NETWORK_UNLOADS);
+ m_ProfilingService.IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
}
}
@@ -280,7 +280,7 @@ const std::shared_ptr<IProfiler> RuntimeImpl::GetProfiler(NetworkId networkId) c
return nullptr;
}
-void RuntimeImpl::ReportStructure() // armnn::profiling::IProfilingService& profilingService as param
+void RuntimeImpl::ReportStructure() // arm::pipe::IProfilingService& profilingService as param
{
// No-op for the time being, but this may be useful in future to have the profilingService available
// if (profilingService.IsProfilingEnabled()){}
@@ -311,7 +311,7 @@ RuntimeImpl::RuntimeImpl(const IRuntime::CreationOptions& options)
// goes through the backend registry
LoadDynamicBackends(options.m_DynamicBackendsPath);
- BackendIdSet supportedBackends;
+ armnn::BackendIdSet supportedBackends;
for (const auto& id : BackendRegistryInstance().GetBackendIds())
{
// Store backend contexts for the supported ones
@@ -473,9 +473,9 @@ RuntimeImpl::RuntimeImpl(const IRuntime::CreationOptions& options)
}
supportedBackends.emplace(id);
- unique_ptr<armnn::profiling::IBackendProfiling> profilingIface =
- std::make_unique<armnn::profiling::BackendProfiling>(armnn::profiling::BackendProfiling(
- ConvertExternalProfilingOptions(options.m_ProfilingOptions), m_ProfilingService, id));
+ unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
+ std::make_unique<arm::pipe::BackendProfiling>(arm::pipe::BackendProfiling(
+ arm::pipe::ConvertExternalProfilingOptions(options.m_ProfilingOptions), m_ProfilingService, id));
// Backends may also provide a profiling context. Ask for it now.
auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
@@ -494,7 +494,8 @@ RuntimeImpl::RuntimeImpl(const IRuntime::CreationOptions& options)
BackendRegistryInstance().SetProfilingService(m_ProfilingService);
// pass configuration info to the profiling service
- m_ProfilingService.ConfigureProfilingService(ConvertExternalProfilingOptions(options.m_ProfilingOptions));
+ m_ProfilingService.ConfigureProfilingService(
+ arm::pipe::ConvertExternalProfilingOptions(options.m_ProfilingOptions));
if (options.m_ProfilingOptions.m_EnableProfiling)
{
// try to wait for the profiling service to initialise
@@ -734,7 +735,7 @@ void RuntimeImpl::LoadDynamicBackends(const std::string& overrideBackendPath)
m_DynamicBackends = DynamicBackendUtils::CreateDynamicBackends(sharedObjects);
// Register the dynamic backends in the backend registry
- BackendIdSet registeredBackendIds = DynamicBackendUtils::RegisterDynamicBackends(m_DynamicBackends);
+ armnn::BackendIdSet registeredBackendIds = DynamicBackendUtils::RegisterDynamicBackends(m_DynamicBackends);
// Add the registered dynamic backend ids to the list of supported backends
m_DeviceSpec.AddSupportedBackends(registeredBackendIds, true);
diff --git a/src/armnn/Runtime.hpp b/src/armnn/Runtime.hpp
index bd37013ad0..1ac0d21b63 100644
--- a/src/armnn/Runtime.hpp
+++ b/src/armnn/Runtime.hpp
@@ -25,7 +25,7 @@
namespace armnn
{
using LoadedNetworks = std::unordered_map<NetworkId, std::unique_ptr<LoadedNetwork>>;
-using IReportStructure = profiling::IReportStructure;
+using IReportStructure = arm::pipe::IReportStructure;
struct RuntimeImpl final : public IReportStructure
{
@@ -115,7 +115,7 @@ public:
private:
friend void RuntimeLoadedNetworksReserve(RuntimeImpl* runtime); // See RuntimeTests.cpp
- friend profiling::ProfilingService& GetProfilingService(RuntimeImpl* runtime); // See RuntimeTests.cpp
+ friend arm::pipe::ProfilingService& GetProfilingService(RuntimeImpl* runtime); // See RuntimeTests.cpp
int GenerateNetworkId();
@@ -150,7 +150,7 @@ private:
std::vector<DynamicBackendPtr> m_DynamicBackends;
/// Profiling Service Instance
- profiling::ProfilingService m_ProfilingService;
+ arm::pipe::ProfilingService m_ProfilingService;
};
} // namespace armnn
diff --git a/src/armnn/test/NetworkTests.cpp b/src/armnn/test/NetworkTests.cpp
index 66dbb4ee16..c64c0a0d40 100644
--- a/src/armnn/test/NetworkTests.cpp
+++ b/src/armnn/test/NetworkTests.cpp
@@ -32,9 +32,9 @@ TEST_SUITE("Network")
TEST_CASE("LayerGuids")
{
armnn::NetworkImpl net;
- armnn::LayerGuid inputId = net.AddInputLayer(0)->GetGuid();
- armnn::LayerGuid addId = net.AddAdditionLayer()->GetGuid();
- armnn::LayerGuid outputId = net.AddOutputLayer(0)->GetGuid();
+ LayerGuid inputId = net.AddInputLayer(0)->GetGuid();
+ LayerGuid addId = net.AddAdditionLayer()->GetGuid();
+ LayerGuid outputId = net.AddOutputLayer(0)->GetGuid();
CHECK(inputId != addId);
CHECK(addId != outputId);
diff --git a/src/armnn/test/RuntimeTests.cpp b/src/armnn/test/RuntimeTests.cpp
index e175c12b57..73e36eaf56 100644
--- a/src/armnn/test/RuntimeTests.cpp
+++ b/src/armnn/test/RuntimeTests.cpp
@@ -539,7 +539,7 @@ TEST_CASE("IVGCVSW_1929_QuantizedSoftmaxIssue")
errMessages);
FAIL("An exception should have been thrown");
}
- catch (const InvalidArgumentException&)
+ catch (const armnn::InvalidArgumentException&)
{
// Different exceptions are thrown on different backends
}
@@ -627,8 +627,8 @@ TEST_CASE("ProfilingDisable")
armnn::NetworkId netId;
CHECK(runtime.LoadNetwork(netId, std::move(optNet)) == Status::Success);
- profiling::ProfilingServiceRuntimeHelper profilingServiceHelper(GetProfilingService(&runtime));
- profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
+ ProfilingServiceRuntimeHelper profilingServiceHelper(GetProfilingService(&runtime));
+ BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
auto readableBuffer = bufferManager.GetReadableBuffer();
// Profiling is not enabled, the post-optimisation structure should not be created
@@ -638,7 +638,7 @@ TEST_CASE("ProfilingDisable")
TEST_CASE("ProfilingEnableCpuRef")
{
using namespace armnn;
- using namespace armnn::profiling;
+ using namespace arm::pipe;
// Create runtime in which the test will run
armnn::IRuntime::CreationOptions options;
@@ -649,7 +649,7 @@ TEST_CASE("ProfilingEnableCpuRef")
GetProfilingService(&runtime).ResetExternalProfilingOptions(
ConvertExternalProfilingOptions(options.m_ProfilingOptions), false);
- profiling::ProfilingServiceRuntimeHelper profilingServiceHelper(GetProfilingService(&runtime));
+ ProfilingServiceRuntimeHelper profilingServiceHelper(GetProfilingService(&runtime));
profilingServiceHelper.ForceTransitionToState(ProfilingState::NotConnected);
profilingServiceHelper.ForceTransitionToState(ProfilingState::WaitingForAck);
profilingServiceHelper.ForceTransitionToState(ProfilingState::Active);
@@ -680,7 +680,7 @@ TEST_CASE("ProfilingEnableCpuRef")
armnn::NetworkId netId;
CHECK(runtime.LoadNetwork(netId, std::move(optNet)) == Status::Success);
- profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
+ BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
auto readableBuffer = bufferManager.GetReadableBuffer();
// Profiling is enabled, the post-optimisation structure should be created