ArmNN
 22.02
RuntimeImpl Struct Referencefinal

#include <Runtime.hpp>

Inheritance diagram for RuntimeImpl:
IReportStructure

Public Member Functions

Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network)
 Loads a complete network into the Runtime. More...
 
Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage)
 Load a complete network into the IRuntime. More...
 
Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage, const INetworkProperties &networkProperties)
 
armnn::TensorInfo GetInputTensorInfo (NetworkId networkId, LayerBindingId layerId) const
 
armnn::TensorInfo GetOutputTensorInfo (NetworkId networkId, LayerBindingId layerId) const
 
std::vector< ImportedInputIdImportInputs (NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
 
std::vector< ImportedOutputIdImportOutputs (NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
 
void ClearImportedInputs (NetworkId networkId, const std::vector< ImportedInputId > inputIds)
 
void ClearImportedOutputs (NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
 
Status EnqueueWorkload (NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
 
Status Execute (IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs, std::vector< ImportedOutputId > preImportedOutputs)
 This is an experimental function. More...
 
Status UnloadNetwork (NetworkId networkId)
 Unloads a network from the Runtime. More...
 
const IDeviceSpecGetDeviceSpec () const
 
const std::shared_ptr< IProfilerGetProfiler (NetworkId networkId) const
 Gets the profiler corresponding to the given network id. More...
 
std::unique_ptr< IWorkingMemHandleCreateWorkingMemHandle (NetworkId networkId)
 Create a new unique WorkingMemHandle object. More...
 
void RegisterDebugCallback (NetworkId networkId, const DebugCallbackFunction &func)
 Registers a callback function to debug layers performing custom computations on intermediate tensors. More...
 
 RuntimeImpl (const IRuntime::CreationOptions &options)
 Creates a runtime for workload execution. More...
 
 ~RuntimeImpl ()
 
void ReportStructure ()
 
- Public Member Functions inherited from IReportStructure
virtual ~IReportStructure ()
 

Friends

void RuntimeLoadedNetworksReserve (RuntimeImpl *runtime)
 
profiling::ProfilingServiceGetProfilingService (RuntimeImpl *runtime)
 

Detailed Description

Definition at line 30 of file Runtime.hpp.

Constructor & Destructor Documentation

◆ RuntimeImpl()

RuntimeImpl ( const IRuntime::CreationOptions options)

Creates a runtime for workload execution.

Definition at line 297 of file Runtime.cpp.

References ProfilingService::AddBackendProfilingContext(), DeviceSpec::AddSupportedBackends(), ARMNN_ASSERT, ARMNN_LOG, ARMNN_VERSION, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), ProfilingService::ConfigureProfilingService(), DynamicBackendUtils::DeregisterDynamicBackends(), armnn::DmaBufProtected, armnn::error, DeviceSpec::GetDynamicBackends(), BackendRegistry::GetFactory(), armnn::GetMemBlockStrategyTypeName(), armnn::GetMemoryOptimizerStrategy(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::HasCapability(), armnn::info, IRuntime::CreationOptions::m_BackendOptions, IRuntime::CreationOptions::m_CustomAllocatorMap, IRuntime::CreationOptions::m_DynamicBackendsPath, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap, IRuntime::CreationOptions::m_ProfilingOptions, IRuntime::CreationOptions::m_ProtectedMode, IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled, armnn::ParseOptions(), BackendRegistry::RegisterAllocator(), BackendRegistry::RegisterMemoryOptimizerStrategy(), BackendRegistry::SetProfilingService(), ProfilingService::WaitForProfilingServiceActivation(), and armnn::warning.

Referenced by RuntimeImpl::GetDeviceSpec().

298  : m_NetworkIdCounter(0),
299  m_ProfilingService(*this)
300 {
301  const auto start_time = armnn::GetTimeNow();
302  ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION;
304  {
305  throw RuntimeException(
306  "It is not possible to enable timeline reporting without profiling being enabled");
307  }
308 
309  // Load any available/compatible dynamic backend before the runtime
310  // goes through the backend registry
311  LoadDynamicBackends(options.m_DynamicBackendsPath);
312 
313  BackendIdSet supportedBackends;
314  for (const auto& id : BackendRegistryInstance().GetBackendIds())
315  {
316  // Store backend contexts for the supported ones
317  try {
318  auto factoryFun = BackendRegistryInstance().GetFactory(id);
319  ARMNN_ASSERT(factoryFun != nullptr);
320  auto backend = factoryFun();
321  ARMNN_ASSERT(backend != nullptr);
322  ARMNN_ASSERT(backend.get() != nullptr);
323 
324  auto customAllocatorMapIterator = options.m_CustomAllocatorMap.find(id);
325  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end() &&
326  customAllocatorMapIterator->second == nullptr)
327  {
328  // We need to manually clean up the dynamic backends before throwing an exception.
330  m_DeviceSpec.ClearDynamicBackends();
331  throw armnn::Exception("Allocator associated with id " + id.Get() + " is null");
332  }
333 
334  // If the runtime is created in protected mode only add backends that support this mode
335  if (options.m_ProtectedMode)
336  {
337  // check if backend supports ProtectedMode
339  BackendCapability protectedContentCapability {"ProtectedContentAllocation", true};
340  if (!HasCapability(protectedContentCapability, id))
341  {
342  // Protected Content Allocation is not supported by the backend
343  // backend should not be registered
344  ARMNN_LOG(warning) << "Backend "
345  << id
346  << " is not registered as does not support protected content allocation.";
347  continue;
348  }
349  // The user is responsible to provide a custom memory allocator which allows to allocate
350  // protected memory
351  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
352  {
353  std::string err;
354  if (customAllocatorMapIterator->second->GetMemorySourceType()
356  {
357  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
358  {
359  ARMNN_LOG(error) << "The backend "
360  << id
361  << " reported an error when entering protected mode. Backend won't be"
362  << " used. ErrorMsg: " << err;
363  continue;
364  }
365  // No errors so register the Custom Allocator with the BackendRegistry
366  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
367  }
368  else
369  {
370  ARMNN_LOG(error) << "The CustomAllocator provided with the runtime options doesn't support "
371  "protected memory. Protected mode can't be activated. The backend "
372  << id
373  << " is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
374  continue;
375  }
376  }
377  else
378  {
379  ARMNN_LOG(error) << "Protected mode can't be activated for backend: "
380  << id
381  << " no custom allocator was provided to the runtime options.";
382  continue;
383  }
384  }
385  else
386  {
387  // If a custom memory allocator is provided make the backend use that instead of the default
388  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
389  {
390  std::string err;
391  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
392  {
393  ARMNN_LOG(error) << "The backend "
394  << id
395  << " reported an error when trying to use the provided custom allocator."
396  " Backend won't be used."
397  << " ErrorMsg: " << err;
398  continue;
399  }
400  // No errors so register the Custom Allocator with the BackendRegistry
401  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
402  }
403  }
404 
405  // check if custom memory optimizer strategy map is set
406  if (!options.m_MemoryOptimizerStrategyMap.empty())
407  {
408  auto customMemoryOptimizerStrategyMapIterator = options.m_MemoryOptimizerStrategyMap.find(id);
409  // if a memory optimizer strategy is provided make the backend use that instead of the default
410  if (customMemoryOptimizerStrategyMapIterator != options.m_MemoryOptimizerStrategyMap.end())
411  {
412  // no errors.. register the memory optimizer strategy with the BackendRegistry
414  id, customMemoryOptimizerStrategyMapIterator->second);
415 
416  ARMNN_LOG(info) << "MemoryOptimizerStrategy "
417  << customMemoryOptimizerStrategyMapIterator->second->GetName()
418  << " set for the backend " << id << ".";
419  }
420  }
421  else
422  {
423  // check if to use one of the existing memory optimizer strategies is set
424  std::string memoryOptimizerStrategyName = "";
425  ParseOptions(options.m_BackendOptions, id, [&](std::string name, const BackendOptions::Var& value)
426  {
427  if (name == "MemoryOptimizerStrategy")
428  {
429  memoryOptimizerStrategyName = ParseStringBackendOption(value, "");
430  }
431  });
432  if (memoryOptimizerStrategyName != "")
433  {
434  std::shared_ptr<IMemoryOptimizerStrategy> strategy =
435  GetMemoryOptimizerStrategy(memoryOptimizerStrategyName);
436 
437  if (!strategy)
438  {
439  ARMNN_LOG(warning) << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
440  << " was not found.";
441  }
442  else
443  {
445  auto strategyType = GetMemBlockStrategyTypeName(strategy->GetMemBlockStrategyType());
446  BackendCapability memOptimizeStrategyCapability {strategyType, true};
447  if (HasCapability(memOptimizeStrategyCapability, id))
448  {
450 
451  ARMNN_LOG(info) << "MemoryOptimizerStrategy: "
452  << memoryOptimizerStrategyName << " set for the backend " << id << ".";
453  }
454  else
455  {
456  ARMNN_LOG(warning) << "Backend "
457  << id
458  << " does not have multi-axis packing capability and cannot support"
459  << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName << ".";
460  }
461  }
462  }
463  }
464 
465  auto context = backend->CreateBackendContext(options);
466 
467  // backends are allowed to return nullptrs if they
468  // don't wish to create a backend specific context
469  if (context)
470  {
471  m_BackendContexts.emplace(std::make_pair(id, std::move(context)));
472  }
473  supportedBackends.emplace(id);
474 
475  unique_ptr<armnn::profiling::IBackendProfiling> profilingIface =
476  std::make_unique<armnn::profiling::BackendProfiling>(armnn::profiling::BackendProfiling(
477  options, m_ProfilingService, id));
478 
479  // Backends may also provide a profiling context. Ask for it now.
480  auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
481  // Backends that don't support profiling will return a null profiling context.
482  if (profilingContext)
483  {
484  // Pass the context onto the profiling service.
485  m_ProfilingService.AddBackendProfilingContext(id, profilingContext);
486  }
487  }
488  catch (const BackendUnavailableException&)
489  {
490  // Ignore backends which are unavailable
491  }
492  }
493 
494  BackendRegistryInstance().SetProfilingService(m_ProfilingService);
495  // pass configuration info to the profiling service
496  m_ProfilingService.ConfigureProfilingService(options.m_ProfilingOptions);
498  {
499  // try to wait for the profiling service to initialise
500  m_ProfilingService.WaitForProfilingServiceActivation(3000);
501  }
502 
503  m_DeviceSpec.AddSupportedBackends(supportedBackends);
504 
505  ARMNN_LOG(info) << "Initialization time: " << std::setprecision(2)
506  << std::fixed << armnn::GetTimeDuration(start_time).count() << " ms.";
507 }
void AddSupportedBackends(const BackendIdSet &backendIds, bool isDynamic=false)
Definition: DeviceSpec.hpp:30
void WaitForProfilingServiceActivation(unsigned int timeout) override
bool HasCapability(const std::string &name, const BackendCapabilities &capabilities)
Convenience function to check if a capability exists in a BackendCapabilites struct.
FactoryFunction GetFactory(const BackendId &id) const
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
Very basic type safe variant.
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
std::unordered_set< BackendId > BackendIdSet
Definition: BackendId.hpp:193
void ParseOptions(const std::vector< BackendOptions > &options, BackendId backend, F f)
#define ARMNN_VERSION
ARMNN_VERSION: "X.Y.Z" where: X = Major version number Y = Minor version number Z = Patch version num...
Definition: Version.hpp:22
void RegisterMemoryOptimizerStrategy(const BackendId &id, std::shared_ptr< IMemoryOptimizerStrategy > strategy)
void RegisterAllocator(const BackendId &id, std::shared_ptr< ICustomAllocator > alloc)
#define ARMNN_LOG(severity)
Definition: Logging.hpp:205
BackendRegistry & BackendRegistryInstance()
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
std::map< BackendId, std::shared_ptr< IMemoryOptimizerStrategy > > m_MemoryOptimizerStrategyMap
A map to define a custom memory optimizer strategy for specific backend Ids.
Definition: IRuntime.hpp:120
bool m_EnableProfiling
Indicates whether external profiling is enabled or not.
Definition: IRuntime.hpp:136
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:187
BackendCapability
BackendCapability class.
Definition: Types.hpp:254
void SetProfilingService(armnn::Optional< profiling::ProfilingService &> profilingService)
std::map< BackendId, std::shared_ptr< ICustomAllocator > > m_CustomAllocatorMap
A map to define a custom memory allocator for specific backend Ids.
Definition: IRuntime.hpp:112
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
std::unique_ptr< IMemoryOptimizerStrategy > GetMemoryOptimizerStrategy(const std::string &strategyName)
bool m_ProtectedMode
Setting this flag will allow the user to create the Runtime in protected mode.
Definition: IRuntime.hpp:103
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
Definition: IRuntime.hpp:96
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
constexpr const char * GetMemBlockStrategyTypeName(MemBlockStrategyType memBlockStrategyType)
Definition: TypesUtils.hpp:264
const BackendIdSet & GetDynamicBackends() const
Definition: DeviceSpec.hpp:48
bool m_TimelineEnabled
Indicates whether external timeline profiling is enabled or not.
Definition: IRuntime.hpp:138
void ClearDynamicBackends()
Definition: DeviceSpec.hpp:39
void AddBackendProfilingContext(const BackendId backendId, std::shared_ptr< armnn::profiling::IBackendProfilingContext > profilingContext)
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:151
ProfilingState ConfigureProfilingService(const ExternalProfilingOptions &options, bool resetProfilingService=false)
Class for non-fatal exceptions raised while initialising a backend.
Definition: Exceptions.hpp:68

◆ ~RuntimeImpl()

Definition at line 509 of file Runtime.cpp.

References ARMNN_LOG, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), DynamicBackendUtils::DeregisterDynamicBackends(), DeviceSpec::GetDynamicBackends(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, BackendRegistry::SetProfilingService(), and RuntimeImpl::UnloadNetwork().

Referenced by RuntimeImpl::GetDeviceSpec().

510 {
511  const auto startTime = armnn::GetTimeNow();
512  std::vector<int> networkIDs;
513  try
514  {
515  // Coverity fix: The following code may throw an exception of type std::length_error.
516  std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
517  std::back_inserter(networkIDs),
518  [](const auto &pair) { return pair.first; });
519  }
520  catch (const std::exception& e)
521  {
522  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
523  // exception of type std::length_error.
524  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
525  std::cerr << "WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
526  << "\nSome of the loaded networks may not be unloaded" << std::endl;
527  }
528  // We then proceed to unload all the networks which IDs have been appended to the list
529  // up to the point the exception was thrown (if any).
530 
531  for (auto networkID : networkIDs)
532  {
533  try
534  {
535  // Coverity fix: UnloadNetwork() may throw an exception of type std::length_error,
536  // boost::log::v2s_mt_posix::odr_violation or boost::log::v2s_mt_posix::system_error
537  UnloadNetwork(networkID);
538  }
539  catch (const std::exception& e)
540  {
541  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
542  // exception of type std::length_error.
543  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
544  std::cerr << "WARNING: An error has occurred when unloading network " << networkID << ": " << e.what()
545  << std::endl;
546  }
547  }
548 
549  // Clear all dynamic backends.
551  m_DeviceSpec.ClearDynamicBackends();
552  m_BackendContexts.clear();
553 
555  ARMNN_LOG(info) << "Shutdown time: " << std::setprecision(2)
556  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
557 }
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Definition: Runtime.cpp:214
#define ARMNN_LOG(severity)
Definition: Logging.hpp:205
BackendRegistry & BackendRegistryInstance()
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
void SetProfilingService(armnn::Optional< profiling::ProfilingService &> profilingService)
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
const BackendIdSet & GetDynamicBackends() const
Definition: DeviceSpec.hpp:48
void ClearDynamicBackends()
Definition: DeviceSpec.hpp:39

Member Function Documentation

◆ ClearImportedInputs()

void ClearImportedInputs ( NetworkId  networkId,
const std::vector< ImportedInputId inputIds 
)

Definition at line 587 of file Runtime.cpp.

References LoadedNetwork::ClearImportedInputs().

588 {
589  return GetLoadedNetworkPtr(networkId)->ClearImportedInputs(inputIds);
590 }
void ClearImportedInputs(const std::vector< ImportedInputId > inputIds)

◆ ClearImportedOutputs()

void ClearImportedOutputs ( NetworkId  networkId,
const std::vector< ImportedOutputId outputIds 
)

Definition at line 591 of file Runtime.cpp.

References LoadedNetwork::ClearImportedOutputs().

592 {
593  return GetLoadedNetworkPtr(networkId)->ClearImportedOutputs(outputIds);
594 }
void ClearImportedOutputs(const std::vector< ImportedOutputId > outputIds)

◆ CreateWorkingMemHandle()

std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle ( NetworkId  networkId)

Create a new unique WorkingMemHandle object.

Create multiple handles if you wish to have overlapped Execution by calling this function from different threads.

Definition at line 687 of file Runtime.cpp.

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::CreateWorkingMemHandle(), armnn::error, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

Referenced by RuntimeImpl::GetDeviceSpec().

688 {
689  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
690 
691  if (!loadedNetwork)
692  {
693  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
694  return nullptr;
695  }
696  if (!loadedNetwork->IsAsyncEnabled())
697  {
698  ARMNN_LOG(error) << "Network " << networkId << " is not async enabled.";
699  return nullptr;
700  }
702 
703  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "CreateWorkingMemHandle");
704 
705  static thread_local NetworkId lastId = networkId;
706  if (lastId != networkId)
707  {
708  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
709  {
710  network->FreeWorkingMemory();
711  });
712  }
713  lastId=networkId;
714 
715  return loadedNetwork->CreateWorkingMemHandle(networkId);
716 }
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:568
#define ARMNN_LOG(severity)
Definition: Logging.hpp:205
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
int NetworkId
Definition: IRuntime.hpp:25
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:575
const std::shared_ptr< IProfiler > & GetProfiler() const

◆ EnqueueWorkload()

Status EnqueueWorkload ( NetworkId  networkId,
const InputTensors inputTensors,
const OutputTensors outputTensors,
std::vector< ImportedInputId preImportedInputIds = {},
std::vector< ImportedOutputId preImportedOutputIds = {} 
)

Definition at line 596 of file Runtime.cpp.

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::EnqueueWorkload(), armnn::error, armnn::Failure, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

Referenced by TEST_SUITE(), and VerifyPostOptimisationStructureTestImpl().

601 {
602  const auto startTime = armnn::GetTimeNow();
603 
604  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
605 
606  if (!loadedNetwork)
607  {
608  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
609  return Status::Failure;
610  }
611  if (loadedNetwork->IsAsyncEnabled())
612  {
613  ARMNN_LOG(error) << "Network " << networkId << " is async enabled.";
614  return Status::Failure;
615  }
617 
619 
620  static thread_local NetworkId lastId = networkId;
621  if (lastId != networkId)
622  {
623  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
624  {
625  network->FreeWorkingMemory();
626  });
627  }
628  lastId=networkId;
629 
630  auto status = loadedNetwork->EnqueueWorkload(inputTensors, outputTensors,
631  preImportedInputIds, preImportedOutputIds);
632 
633 
634  // Check if we imported, if not there's no need to call the After EnqueueWorkload events
635  if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
636  {
637  // Call After EnqueueWorkload events
638  for (auto&& context : m_BackendContexts)
639  {
640  context.second->AfterEnqueueWorkload(networkId);
641  }
642  }
643  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
644  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
645  return status;
646 }
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:568
#define ARMNN_LOG(severity)
Definition: Logging.hpp:205
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
int NetworkId
Definition: IRuntime.hpp:25
Status EnqueueWorkload(const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Single thread execution of the loaded network.
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:575
const std::shared_ptr< IProfiler > & GetProfiler() const

◆ Execute()

Status Execute ( IWorkingMemHandle workingMemHandle,
const InputTensors inputTensors,
const OutputTensors outputTensors,
std::vector< ImportedInputId preImportedInputs,
std::vector< ImportedOutputId preImportedOutputs 
)

This is an experimental function.

Evaluates a network using input in inputTensors and outputs filled into outputTensors. This function performs a thread safe execution of the network. Returns once execution is complete. Will block until this and any other thread using the same workingMem object completes.

Definition at line 648 of file Runtime.cpp.

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, armnn::error, LoadedNetwork::Execute(), armnn::Failure, ProfilerManager::GetInstance(), IWorkingMemHandle::GetNetworkId(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

653 {
654  const auto startTime = armnn::GetTimeNow();
655 
656  NetworkId networkId = iWorkingMemHandle.GetNetworkId();
657  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
658 
659  if (!loadedNetwork)
660  {
661  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
662  return Status::Failure;
663  }
664  if (!loadedNetwork->IsAsyncEnabled())
665  {
666  ARMNN_LOG(error) << "Attempting execute " << networkId << " when it is not async enabled.";
667  return Status::Failure;
668  }
670 
672 
673  auto status = loadedNetwork->Execute(inputTensors,
674  outputTensors,
675  iWorkingMemHandle,
676  preImportedInputs,
677  preImportedOutputs);
678 
679  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
680  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
681 
682  return status;
683 }
Status Execute(const InputTensors &inputTensors, const OutputTensors &outputTensors, IWorkingMemHandle &workingMemHandle, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
Thread safe execution of the loaded network.
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:568
#define ARMNN_LOG(severity)
Definition: Logging.hpp:205
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
int NetworkId
Definition: IRuntime.hpp:25
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:575
const std::shared_ptr< IProfiler > & GetProfiler() const

◆ GetDeviceSpec()

◆ GetInputTensorInfo()

TensorInfo GetInputTensorInfo ( NetworkId  networkId,
LayerBindingId  layerId 
) const

Definition at line 565 of file Runtime.cpp.

References LoadedNetwork::GetInputTensorInfo().

Referenced by TEST_SUITE(), and VerifyPostOptimisationStructureTestImpl().

566 {
567  return GetLoadedNetworkPtr(networkId)->GetInputTensorInfo(layerId);
568 }
TensorInfo GetInputTensorInfo(LayerBindingId layerId) const

◆ GetOutputTensorInfo()

TensorInfo GetOutputTensorInfo ( NetworkId  networkId,
LayerBindingId  layerId 
) const

Definition at line 570 of file Runtime.cpp.

References LoadedNetwork::GetOutputTensorInfo().

Referenced by TEST_SUITE(), and VerifyPostOptimisationStructureTestImpl().

571 {
572  return GetLoadedNetworkPtr(networkId)->GetOutputTensorInfo(layerId);
573 }
TensorInfo GetOutputTensorInfo(LayerBindingId layerId) const

◆ GetProfiler()

const std::shared_ptr< IProfiler > GetProfiler ( NetworkId  networkId) const

Gets the profiler corresponding to the given network id.

Parameters
networkIdThe id of the network for which to get the profile.
Returns
A pointer to the requested profiler, or nullptr if not found.

Definition at line 270 of file Runtime.cpp.

Referenced by RuntimeImpl::GetDeviceSpec().

271 {
272  auto it = m_LoadedNetworks.find(networkId);
273  if (it != m_LoadedNetworks.end())
274  {
275  auto& loadedNetwork = it->second;
276  return loadedNetwork->GetProfiler();
277  }
278 
279  return nullptr;
280 }

◆ ImportInputs()

std::vector< ImportedInputId > ImportInputs ( NetworkId  networkId,
const InputTensors inputTensors,
MemorySource  forceImportMemorySource = MemorySource::Undefined 
)

Definition at line 575 of file Runtime.cpp.

References LoadedNetwork::ImportInputs().

577 {
578  return GetLoadedNetworkPtr(networkId)->ImportInputs(inputTensors, forceImportMemorySource);
579 }
std::vector< ImportedInputId > ImportInputs(const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)

◆ ImportOutputs()

std::vector< ImportedOutputId > ImportOutputs ( NetworkId  networkId,
const OutputTensors outputTensors,
MemorySource  forceImportMemorySource = MemorySource::Undefined 
)

Definition at line 581 of file Runtime.cpp.

References LoadedNetwork::ImportOutputs().

583 {
584  return GetLoadedNetworkPtr(networkId)->ImportOutputs(outputTensors, forceImportMemorySource);
585 }
std::vector< ImportedOutputId > ImportOutputs(const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)

◆ LoadNetwork() [1/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network 
)

Loads a complete network into the Runtime.

Parameters
[out]networkIdOut- Unique identifier for the network is returned in this reference.
[in]network- Complete network to load into the Runtime. The runtime takes ownership of the network once passed in.
Returns
armnn::Status

Definition at line 150 of file Runtime.cpp.

References IRuntime::LoadNetwork().

Referenced by TEST_SUITE(), and VerifyPostOptimisationStructureTestImpl().

151 {
152  std::string ignoredErrorMessage;
153  return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
154 }
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition: Runtime.cpp:150

◆ LoadNetwork() [2/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network,
std::string &  errorMessage 
)

Load a complete network into the IRuntime.

Parameters
[out]networkIdOutUnique identifier for the network is returned in this reference.
[in]networkComplete network to load into the IRuntime.
[out]errorMessageError message if there were any errors. The runtime takes ownership of the network once passed in.
Returns
armnn::Status

Definition at line 156 of file Runtime.cpp.

References IRuntime::LoadNetwork(), and armnn::Undefined.

159 {
160  INetworkProperties networkProperties(
162  return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
163 }
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition: Runtime.cpp:150

◆ LoadNetwork() [3/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network,
std::string &  errorMessage,
const INetworkProperties networkProperties 
)

Definition at line 165 of file Runtime.cpp.

References armnn::Failure, ProfilerManager::GetInstance(), LoadedNetwork::MakeLoadedNetwork(), ProfilerManager::RegisterProfiler(), and armnn::Success.

169 {
170  // Register the profiler
171  auto profiler = inNetwork->GetProfiler();
173 
174  IOptimizedNetwork* rawNetwork = inNetwork.release();
175 
176  networkIdOut = GenerateNetworkId();
177 
178  for (auto&& context : m_BackendContexts)
179  {
180  context.second->BeforeLoadNetwork(networkIdOut);
181  }
182 
183  unique_ptr<LoadedNetwork> loadedNetwork = LoadedNetwork::MakeLoadedNetwork(
184  std::unique_ptr<IOptimizedNetwork>(rawNetwork),
185  errorMessage,
186  networkProperties,
187  m_ProfilingService);
188 
189  if (!loadedNetwork)
190  {
191  return Status::Failure;
192  }
193 
194  {
195  std::lock_guard<std::mutex> lockGuard(m_Mutex);
196 
197  // Stores the network
198  m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
199  }
200 
201  for (auto&& context : m_BackendContexts)
202  {
203  context.second->AfterLoadNetwork(networkIdOut);
204  }
205 
206  if (m_ProfilingService.IsProfilingEnabled())
207  {
208  m_ProfilingService.IncrementCounterValue(armnn::profiling::NETWORK_LOADS);
209  }
210 
211  return Status::Success;
212 }
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:568
uint32_t IncrementCounterValue(uint16_t counterUid) override
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:575
bool IsProfilingEnabled() const override
static std::unique_ptr< LoadedNetwork > MakeLoadedNetwork(std::unique_ptr< IOptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, profiling::ProfilingService &profilingService)

◆ RegisterDebugCallback()

void RegisterDebugCallback ( NetworkId  networkId,
const DebugCallbackFunction func 
)

Registers a callback function to debug layers performing custom computations on intermediate tensors.

Parameters
networkIdThe id of the network to register the callback.
funccallback function to pass to the debug layer.

Definition at line 718 of file Runtime.cpp.

References DeviceSpec::AddSupportedBackends(), DynamicBackendUtils::CreateDynamicBackends(), DynamicBackendUtils::GetBackendPaths(), DynamicBackendUtils::GetSharedObjects(), LoadedNetwork::RegisterDebugCallback(), and DynamicBackendUtils::RegisterDynamicBackends().

Referenced by RuntimeImpl::GetDeviceSpec().

719 {
720  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
721  loadedNetwork->RegisterDebugCallback(func);
722 }
void RegisterDebugCallback(const DebugCallbackFunction &func)

◆ ReportStructure()

void ReportStructure ( )
virtual

Implements IReportStructure.

Definition at line 282 of file Runtime.cpp.

Referenced by RuntimeImpl::GetDeviceSpec().

283 {
284  // No-op for the time being, but this may be useful in future to have the profilingService available
285  // if (profilingService.IsProfilingEnabled()){}
286 
287  LoadedNetworks::iterator it = m_LoadedNetworks.begin();
288  while (it != m_LoadedNetworks.end())
289  {
290  auto& loadedNetwork = it->second;
291  loadedNetwork->SendNetworkStructure();
292  // Increment the Iterator to point to next entry
293  it++;
294  }
295 }

◆ UnloadNetwork()

Status UnloadNetwork ( NetworkId  networkId)

Unloads a network from the Runtime.

At the moment this only removes the network from the m_Impl->m_Network. This might need more work in the future to be AndroidNN compliant.

Parameters
[in]networkIdUnique identifier for the network to be unloaded. Generated in LoadNetwork().
Returns
armnn::Status

Definition at line 214 of file Runtime.cpp.

References ARMNN_LOG, armnn::debug, armnn::Failure, ProfilerManager::GetInstance(), TimelineUtilityMethods::GetTimelineUtils(), ProfilerManager::RegisterProfiler(), armnn::Success, and armnn::warning.

Referenced by TEST_SUITE(), and RuntimeImpl::~RuntimeImpl().

215 {
216  bool unloadOk = true;
217  for (auto&& context : m_BackendContexts)
218  {
219  unloadOk &= context.second->BeforeUnloadNetwork(networkId);
220  }
221 
222  if (!unloadOk)
223  {
224  ARMNN_LOG(warning) << "RuntimeImpl::UnloadNetwork(): failed to unload "
225  "network with ID:" << networkId << " because BeforeUnloadNetwork failed";
226  return Status::Failure;
227  }
228 
229  std::unique_ptr<profiling::TimelineUtilityMethods> timelineUtils =
231  {
232  std::lock_guard<std::mutex> lockGuard(m_Mutex);
233 
234  // If timeline recording is on mark the Network end of life
235  if (timelineUtils)
236  {
237  auto search = m_LoadedNetworks.find(networkId);
238  if (search != m_LoadedNetworks.end())
239  {
240  profiling::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
241  timelineUtils->RecordEvent(networkGuid,
242  profiling::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
243  }
244  }
245 
246  if (m_LoadedNetworks.erase(networkId) == 0)
247  {
248  ARMNN_LOG(warning) << "WARNING: RuntimeImpl::UnloadNetwork(): " << networkId << " not found!";
249  return Status::Failure;
250  }
251 
252  if (m_ProfilingService.IsProfilingEnabled())
253  {
254  m_ProfilingService.IncrementCounterValue(armnn::profiling::NETWORK_UNLOADS);
255  }
256  }
257 
258  for (auto&& context : m_BackendContexts)
259  {
260  context.second->AfterUnloadNetwork(networkId);
261  }
262 
263  // Unregister the profiler
265 
266  ARMNN_LOG(debug) << "RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
267  return Status::Success;
268 }
static std::unique_ptr< TimelineUtilityMethods > GetTimelineUtils(ProfilingService &profilingService)
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:568
#define ARMNN_LOG(severity)
Definition: Logging.hpp:205
uint32_t IncrementCounterValue(uint16_t counterUid) override
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:575
bool IsProfilingEnabled() const override

Friends And Related Function Documentation

◆ GetProfilingService

profiling::ProfilingService& GetProfilingService ( RuntimeImpl runtime)
friend

Definition at line 57 of file TestUtils.cpp.

Referenced by RuntimeImpl::GetDeviceSpec().

58 {
59  return runtime->m_ProfilingService;
60 }

◆ RuntimeLoadedNetworksReserve

void RuntimeLoadedNetworksReserve ( RuntimeImpl runtime)
friend

Definition at line 30 of file RuntimeTests.cpp.

Referenced by RuntimeImpl::GetDeviceSpec().

31 {
32  runtime->m_LoadedNetworks.reserve(1);
33 }

The documentation for this struct was generated from the following files: