ArmNN
 20.05
LoadedNetwork Class Reference

#include <LoadedNetwork.hpp>

Public Types

using WorkloadQueue = std::vector< std::unique_ptr< IWorkload > >
 

Public Member Functions

 ~LoadedNetwork ()
 
TensorInfo GetInputTensorInfo (LayerBindingId layerId) const
 
TensorInfo GetOutputTensorInfo (LayerBindingId layerId) const
 
Status EnqueueWorkload (const InputTensors &inputTensors, const OutputTensors &outputTensors)
 
const std::shared_ptr< Profiler > & GetProfiler () const
 
void FreeWorkingMemory ()
 
void RegisterDebugCallback (const DebugCallbackFunction &func)
 
void SendNetworkStructure ()
 

Static Public Member Functions

static std::unique_ptr< LoadedNetworkMakeLoadedNetwork (std::unique_ptr< OptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, profiling::ProfilingService &profilingService)
 

Detailed Description

Definition at line 34 of file LoadedNetwork.hpp.

Member Typedef Documentation

◆ WorkloadQueue

using WorkloadQueue = std::vector< std::unique_ptr<IWorkload> >

Definition at line 37 of file LoadedNetwork.hpp.

Constructor & Destructor Documentation

◆ ~LoadedNetwork()

~LoadedNetwork ( )
inline

Definition at line 38 of file LoadedNetwork.hpp.

References armnn::GetInputTensorInfo(), and profilingService.

Member Function Documentation

◆ EnqueueWorkload()

Status EnqueueWorkload ( const InputTensors inputTensors,
const OutputTensors outputTensors 
)

Definition at line 451 of file LoadedNetwork.cpp.

References ARMNN_ASSERT_MSG, ARMNN_LOG, LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS, LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS, ARMNN_SCOPED_HEAP_PROFILING, ARMNN_SCOPED_PROFILING_EVENT, armnn::CheckFlag(), armnn::Failure, ITensorHandle::GetImportFlags(), Graph::GetInputLayers(), Layer::GetInputSlots(), Graph::GetNumInputs(), Layer::GetNumInputSlots(), Graph::GetNumLayers(), Graph::GetNumOutputs(), Layer::GetNumOutputSlots(), Layer::GetOutputHandler(), Graph::GetOutputLayers(), TimelineUtilityMethods::GetTimelineUtils(), Layer::GetType(), ITensorHandle::Import(), LabelsAndEventClasses::INFERENCE_GUID, armnn::info, armnn::Input, QueueDescriptor::m_Inputs, WorkloadInfo::m_InputTensorInfos, QueueDescriptor::m_Outputs, WorkloadInfo::m_OutputTensorInfos, armnn::Malloc, ITensorHandle::Map(), armnn::Output, armnn::profiling::RetentionLink, armnn::Success, armnn::Undefined, ITensorHandle::Unmap(), and armnn::warning.

Referenced by Runtime::EnqueueWorkload().

453 {
455 
456  const Graph& graph = m_OptimizedNetwork->GetGraph();
457 
458  // Walk graph to determine the order of execution.
459  if (graph.GetNumLayers() < 2)
460  {
461  ARMNN_LOG(warning) << "IRuntime::EnqueueWorkload()::Less than two nodes in graph";
462  return Status::Failure;
463  }
464 
465  // Data that must be kept alive for the entire execution of the workload.
466  WorkloadData workloadData(inputTensors, outputTensors);
467 
468  if (graph.GetNumInputs() != inputTensors.size())
469  {
470  throw InvalidArgumentException("Number of inputs provided does not match network.");
471  }
472 
473  // For each input to the network, call EnqueueInput with the data passed by the user.
474  m_InputQueue.clear();
475  m_InputQueue.reserve(graph.GetNumInputs());
476  for (const BindableLayer* inputLayer : graph.GetInputLayers())
477  {
478  const TensorPin& pin = workloadData.GetInputTensorPin(inputLayer->GetBindingId());
479  EnqueueInput(*inputLayer, pin.GetTensorHandle(), pin.GetTensorInfo());
480  }
481 
482  // For each output to the network, call EnqueueOutput with the data passed by the user.
483  m_OutputQueue.clear();
484  m_OutputQueue.reserve(graph.GetNumOutputs());
485  for (const BindableLayer* outputLayer : graph.GetOutputLayers())
486  {
487  const TensorPin& pin = workloadData.GetOutputTensorPin(outputLayer->GetBindingId());
488  EnqueueOutput(*outputLayer, pin.GetTensorHandle(), pin.GetTensorInfo());
489  }
490 
491  std::unique_ptr<TimelineUtilityMethods> timelineUtils =
492  TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
493  ProfilingGuid inferenceGuid = m_ProfilingService.GetNextGuid();
494  if (timelineUtils)
495  {
496  // Add inference timeline trace if profiling is enabled.
497  ProfilingGuid networkGuid = m_OptimizedNetwork->GetGuid();
498  timelineUtils->CreateTypedEntity(inferenceGuid, LabelsAndEventClasses::INFERENCE_GUID);
499  timelineUtils->CreateRelationship(ProfilingRelationshipType::RetentionLink, networkGuid, inferenceGuid);
500  timelineUtils->RecordEvent(inferenceGuid, LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS);
501  }
502 
503  bool executionSucceeded = true;
504 
505  {
506  if (m_ProfilingService.IsProfilingEnabled())
507  {
508  m_ProfilingService.IncrementCounterValue(armnn::profiling::INFERENCES_RUN);
509  }
511  ARMNN_SCOPED_HEAP_PROFILING("Executing");
512  executionSucceeded = Execute(timelineUtils, inferenceGuid);
513  }
514 
515  if (timelineUtils)
516  {
517  // Add end of life of the inference timeline if profiling is enabled.
518  timelineUtils->RecordEvent(inferenceGuid, LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
519  timelineUtils->Commit();
520  }
521  return executionSucceeded ? Status::Success : Status::Failure;
522 }
static ARMNN_DLLEXPORT ProfilingStaticGuid INFERENCE_GUID
static std::unique_ptr< TimelineUtilityMethods > GetTimelineUtils(ProfilingService &profilingService)
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
uint32_t IncrementCounterValue(uint16_t counterUid) override
static ARMNN_DLLEXPORT ProfilingStaticGuid ARMNN_PROFILING_EOL_EVENT_CLASS
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:169
static ARMNN_DLLEXPORT ProfilingStaticGuid ARMNN_PROFILING_SOL_EVENT_CLASS
#define ARMNN_SCOPED_HEAP_PROFILING(TAG)
bool IsProfilingEnabled() const override
static ProfilingDynamicGuid GetNextGuid()

◆ FreeWorkingMemory()

void FreeWorkingMemory ( )

Definition at line 702 of file LoadedNetwork.cpp.

References ARMNN_LOG, and armnn::error.

Referenced by Runtime::EnqueueWorkload().

703 {
704  std::lock_guard<std::mutex> lockGuard(m_WorkingMemMutex);
705  if (!m_IsWorkingMemAllocated)
706  {
707  return;
708  }
709  // Informs the memory managers to release memory in it's respective memory group
710  for (auto&& workloadFactory : m_WorkloadFactories)
711  {
712  IBackendInternal::IMemoryManagerSharedPtr memoryManager = workloadFactory.second.second;
713  if (memoryManager)
714  {
715  memoryManager->Release();
716  }
717  }
718  m_TensorHandleFactoryRegistry.ReleaseMemory();
719  m_IsWorkingMemAllocated = false;
720 }
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
void ReleaseMemory()
Release memory required for inference.

◆ GetInputTensorInfo()

TensorInfo GetInputTensorInfo ( LayerBindingId  layerId) const

Definition at line 302 of file LoadedNetwork.cpp.

References ARMNN_ASSERT_MSG.

Referenced by Runtime::GetInputTensorInfo().

303 {
304  for (auto&& inputLayer : m_OptimizedNetwork->GetGraph().GetInputLayers())
305  {
306  ARMNN_ASSERT_MSG(inputLayer->GetNumOutputSlots() == 1, "Input layer should have exactly 1 output slot");
307  if (inputLayer->GetBindingId() == layerId)
308  {
309  return inputLayer->GetOutputSlot(0).GetTensorInfo();
310  }
311  }
312 
313  throw InvalidArgumentException(boost::str(boost::format("No input layer is associated with id %1%") % layerId));
314 }
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15

◆ GetOutputTensorInfo()

TensorInfo GetOutputTensorInfo ( LayerBindingId  layerId) const

Definition at line 316 of file LoadedNetwork.cpp.

References ARMNN_ASSERT_MSG, CHECK_LOCATION, BackendId::Get(), Layer::GetBackendId(), Layer::GetNameStr(), armnn::IgnoreUnused(), armnn::info, and IWorkloadFactory::IsLayerSupported().

Referenced by Runtime::GetOutputTensorInfo().

317 {
318  for (auto&& outputLayer : m_OptimizedNetwork->GetGraph().GetOutputLayers())
319  {
320  ARMNN_ASSERT_MSG(outputLayer->GetNumInputSlots() == 1, "Output layer should have exactly 1 input slot");
321  ARMNN_ASSERT_MSG(outputLayer->GetInputSlot(0).GetConnection(), "Input slot on Output layer must be connected");
322  if (outputLayer->GetBindingId() == layerId)
323  {
324  return outputLayer->GetInputSlot(0).GetConnection()->GetTensorInfo();
325  }
326  }
327 
328  throw InvalidArgumentException(boost::str(boost::format("No output layer is associated with id %1%") % layerId));
329 }
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15

◆ GetProfiler()

const std::shared_ptr<Profiler>& GetProfiler ( ) const
inline

Definition at line 53 of file LoadedNetwork.hpp.

53 { return m_Profiler; }

◆ MakeLoadedNetwork()

std::unique_ptr< LoadedNetwork > MakeLoadedNetwork ( std::unique_ptr< OptimizedNetwork net,
std::string &  errorMessage,
const INetworkProperties networkProperties,
profiling::ProfilingService profilingService 
)
static

Definition at line 83 of file LoadedNetwork.cpp.

References ARMNN_LOG, armnn::BackendRegistryInstance(), IBackendInternal::CreateMemoryManager(), IBackendInternal::CreateWorkloadFactory(), armnn::error, BackendRegistry::GetFactory(), ProfilerManager::GetInstance(), TimelineUtilityMethods::GetTimelineUtils(), armnn::Input, INetworkProperties::m_ExportEnabled, INetworkProperties::m_ImportEnabled, LabelsAndEventClasses::NETWORK_GUID, armnn::Output, profilingService, ProfilerManager::RegisterProfiler(), IBackendInternal::RegisterTensorHandleFactories(), IBackendInternal::SupportsTensorAllocatorAPI(), and Graph::TopologicalSort().

Referenced by Runtime::LoadNetwork().

87 {
88  std::unique_ptr<LoadedNetwork> loadedNetwork;
89 
90  auto Fail = [&](const std::exception& error) -> std::unique_ptr<LoadedNetwork>
91  {
92  errorMessage = ToErrorMessage("An error occurred when preparing the network workloads: ", error);
93  ARMNN_LOG(error) << errorMessage;
94 
95  return std::unique_ptr<LoadedNetwork>();
96  };
97 
98  try
99  {
100  loadedNetwork.reset(new LoadedNetwork(std::move(net), networkProperties, profilingService));
101  }
102  catch (const armnn::RuntimeException& error)
103  {
104  return Fail(error);
105  }
106  catch (const armnn::Exception& error)
107  {
108  return Fail(error);
109  }
110  catch (const std::runtime_error& error)
111  {
112  return Fail(error);
113  }
114 
115  return loadedNetwork;
116 }
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
armnn::profiling::ProfilingService profilingService
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46

◆ RegisterDebugCallback()

void RegisterDebugCallback ( const DebugCallbackFunction func)

Definition at line 792 of file LoadedNetwork.cpp.

Referenced by Runtime::RegisterDebugCallback().

793 {
794  for (auto&& workloadPtr: m_WorkloadQueue)
795  {
796  workloadPtr.get()->RegisterDebugCallback(func);
797  }
798 }

◆ SendNetworkStructure()

void SendNetworkStructure ( )

Definition at line 265 of file LoadedNetwork.cpp.

References TimelineUtilityMethods::GetTimelineUtils(), armnn::Input, LabelsAndEventClasses::NETWORK_GUID, armnn::Output, and Graph::TopologicalSort().

266 {
267  Graph& order = m_OptimizedNetwork->GetGraph().TopologicalSort();
268  ProfilingGuid networkGuid = m_OptimizedNetwork->GetGuid();
269 
270  std::unique_ptr<TimelineUtilityMethods> timelineUtils =
271  TimelineUtilityMethods::GetTimelineUtils(m_ProfilingService);
272 
273  timelineUtils->CreateTypedEntity(networkGuid, LabelsAndEventClasses::NETWORK_GUID);
274 
275  for (auto&& layer : order)
276  {
277  // Add layer to the post-optimisation network structure
278  AddLayerStructure(timelineUtils, *layer, networkGuid);
279  switch (layer->GetType())
280  {
281  case LayerType::Input:
282  case LayerType::Output:
283  {
284  // Inputs and outputs are treated in a special way - see EnqueueInput() and EnqueueOutput().
285  break;
286  }
287  default:
288  {
289  for (auto& workload : m_WorkloadQueue)
290  {
291  // Add workload to the post-optimisation network structure
292  AddWorkloadStructure(timelineUtils, workload, *layer);
293  }
294  break;
295  }
296  }
297  }
298  // Commit to send the post-optimisation network structure
299  timelineUtils->Commit();
300 }
static std::unique_ptr< TimelineUtilityMethods > GetTimelineUtils(ProfilingService &profilingService)
static ARMNN_DLLEXPORT ProfilingStaticGuid NETWORK_GUID

The documentation for this class was generated from the following files: