ArmNN
 23.08
RuntimeImpl Struct Referencefinal

#include <Runtime.hpp>

Inheritance diagram for RuntimeImpl:
[legend]
Collaboration diagram for RuntimeImpl:
[legend]

Public Member Functions

Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network)
 Loads a complete network into the Runtime. More...
 
Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage)
 Load a complete network into the IRuntime. More...
 
Status LoadNetwork (NetworkId &networkIdOut, IOptimizedNetworkPtr network, std::string &errorMessage, const INetworkProperties &networkProperties)
 
armnn::TensorInfo GetInputTensorInfo (NetworkId networkId, LayerBindingId layerId) const
 
armnn::TensorInfo GetOutputTensorInfo (NetworkId networkId, LayerBindingId layerId) const
 
std::vector< ImportedInputIdImportInputs (NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource)
 
std::vector< ImportedOutputIdImportOutputs (NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource)
 
void ClearImportedInputs (NetworkId networkId, const std::vector< ImportedInputId > inputIds)
 
void ClearImportedOutputs (NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
 
Status EnqueueWorkload (NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
 
Status Execute (IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs, std::vector< ImportedOutputId > preImportedOutputs)
 This is an experimental function. More...
 
Status UnloadNetwork (NetworkId networkId)
 Unloads a network from the Runtime. More...
 
const IDeviceSpecGetDeviceSpec () const
 
const std::shared_ptr< IProfilerGetProfiler (NetworkId networkId) const
 Gets the profiler corresponding to the given network id. More...
 
std::unique_ptr< IWorkingMemHandleCreateWorkingMemHandle (NetworkId networkId)
 Create a new unique WorkingMemHandle object. More...
 
void RegisterDebugCallback (NetworkId networkId, const DebugCallbackFunction &func)
 Registers a callback function to debug layers performing custom computations on intermediate tensors. More...
 
 RuntimeImpl (const IRuntime::CreationOptions &options)
 Creates a runtime for workload execution. More...
 
 ~RuntimeImpl ()
 
void ReportStructure (arm::pipe::IProfilingService &profilingService) override
 
void InitialiseProfilingService (arm::pipe::IProfilingService &profilingService) override
 

Friends

void RuntimeLoadedNetworksReserve (RuntimeImpl *runtime)
 
arm::pipe::IProfilingService & GetProfilingService (RuntimeImpl *runtime)
 

Detailed Description

Definition at line 30 of file Runtime.hpp.

Constructor & Destructor Documentation

◆ RuntimeImpl()

RuntimeImpl ( const IRuntime::CreationOptions options)

Creates a runtime for workload execution.

Definition at line 323 of file Runtime.cpp.

324  : m_NetworkIdCounter(0)
325 {
326  m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
327  arm::pipe::MAX_ARMNN_COUNTER,
328  *this,
329  arm::pipe::ARMNN_SOFTWARE_INFO,
330  arm::pipe::ARMNN_SOFTWARE_VERSION,
331  arm::pipe::ARMNN_HARDWARE_VERSION,
332  *this);
333  const auto start_time = armnn::GetTimeNow();
334  ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION;
336  {
337  throw RuntimeException(
338  "It is not possible to enable timeline reporting without profiling being enabled");
339  }
340 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
341  // Load any available/compatible dynamic backend before the runtime
342  // goes through the backend registry
343  LoadDynamicBackends(options.m_DynamicBackendsPath);
344 #endif
345  armnn::BackendIdSet supportedBackends;
346  for (const auto& id : BackendRegistryInstance().GetBackendIds())
347  {
348  // Store backend contexts for the supported ones
349  try {
350  auto factoryFun = BackendRegistryInstance().GetFactory(id);
351  ARMNN_ASSERT(factoryFun != nullptr);
352  auto backend = factoryFun();
353  ARMNN_ASSERT(backend != nullptr);
354  ARMNN_ASSERT(backend.get() != nullptr);
355 
356  auto customAllocatorMapIterator = options.m_CustomAllocatorMap.find(id);
357  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end() &&
358  customAllocatorMapIterator->second == nullptr)
359  {
360 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
361  // We need to manually clean up the dynamic backends before throwing an exception.
363  m_DeviceSpec.ClearDynamicBackends();
364 #endif
365  throw armnn::Exception("Allocator associated with id " + id.Get() + " is null");
366  }
367 
368  // If the runtime is created in protected mode only add backends that support this mode
369  if (options.m_ProtectedMode)
370  {
371  // check if backend supports ProtectedMode
373  BackendCapability protectedContentCapability {"ProtectedContentAllocation", true};
374  if (!HasMatchingCapability(protectedContentCapability, id))
375  {
376  // Protected Content Allocation is not supported by the backend
377  // backend should not be registered
378  ARMNN_LOG(warning) << "Backend "
379  << id
380  << " is not registered as does not support protected content allocation.";
381  continue;
382  }
383  // The user is responsible to provide a custom memory allocator which allows to allocate
384  // protected memory
385  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
386  {
387  std::string err;
388  if (customAllocatorMapIterator->second->GetMemorySourceType()
390  {
391  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
392  {
393  ARMNN_LOG(error) << "The backend "
394  << id
395  << " reported an error when entering protected mode. Backend won't be"
396  << " used. ErrorMsg: " << err;
397  continue;
398  }
399  // No errors so register the Custom Allocator with the BackendRegistry
400  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
401  m_AllocatorsAddedByThisRuntime.emplace(id);
402  }
403  else
404  {
405  ARMNN_LOG(error) << "The CustomAllocator provided with the runtime options doesn't support "
406  "protected memory. Protected mode can't be activated. The backend "
407  << id
408  << " is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
409  continue;
410  }
411  }
412  else
413  {
414  ARMNN_LOG(error) << "Protected mode can't be activated for backend: "
415  << id
416  << " no custom allocator was provided to the runtime options.";
417  continue;
418  }
419  }
420  else
421  {
422  // If a custom memory allocator is provided make the backend use that instead of the default
423  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
424  {
425  std::string err;
426  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
427  {
428  ARMNN_LOG(error) << "The backend "
429  << id
430  << " reported an error when trying to use the provided custom allocator."
431  " Backend won't be used."
432  << " ErrorMsg: " << err;
433  continue;
434  }
435  // No errors so register the Custom Allocator with the BackendRegistry
436  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
437  m_AllocatorsAddedByThisRuntime.emplace(id);
438  }
439  }
440 
441  // check if custom memory optimizer strategy map is set
442  if (!options.m_MemoryOptimizerStrategyMap.empty())
443  {
444  auto customMemoryOptimizerStrategyMapIterator = options.m_MemoryOptimizerStrategyMap.find(id);
445  // if a memory optimizer strategy is provided make the backend use that instead of the default
446  if (customMemoryOptimizerStrategyMapIterator != options.m_MemoryOptimizerStrategyMap.end())
447  {
448  // no errors.. register the memory optimizer strategy with the BackendRegistry
450  id, customMemoryOptimizerStrategyMapIterator->second);
451 
452  ARMNN_LOG(info) << "MemoryOptimizerStrategy "
453  << customMemoryOptimizerStrategyMapIterator->second->GetName()
454  << " set for the backend " << id << ".";
455  }
456  }
457  else
458  {
459  // check if to use one of the existing memory optimizer strategies is set
460  std::string memoryOptimizerStrategyName = "";
461  ParseOptions(options.m_BackendOptions, id, [&](std::string name, const BackendOptions::Var& value)
462  {
463  if (name == "MemoryOptimizerStrategy")
464  {
465  memoryOptimizerStrategyName = ParseStringBackendOption(value, "");
466  }
467  });
468  if (memoryOptimizerStrategyName != "")
469  {
470  std::shared_ptr<IMemoryOptimizerStrategy> strategy =
471  GetMemoryOptimizerStrategy(memoryOptimizerStrategyName);
472 
473  if (!strategy)
474  {
475  ARMNN_LOG(warning) << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
476  << " was not found.";
477  }
478  else
479  {
481  auto strategyType = GetMemBlockStrategyTypeName(strategy->GetMemBlockStrategyType());
482  BackendCapability memOptimizeStrategyCapability {strategyType, true};
483  if (HasMatchingCapability(memOptimizeStrategyCapability, id))
484  {
486 
487  ARMNN_LOG(info) << "MemoryOptimizerStrategy: "
488  << memoryOptimizerStrategyName << " set for the backend " << id << ".";
489  }
490  else
491  {
492  ARMNN_LOG(warning) << "Backend "
493  << id
494  << " does not have multi-axis packing capability and cannot support"
495  << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName << ".";
496  }
497  }
498  }
499  }
500 
501  auto context = backend->CreateBackendContext(options);
502 
503  // backends are allowed to return nullptrs if they
504  // don't wish to create a backend specific context
505  if (context)
506  {
507  m_BackendContexts.emplace(std::make_pair(id, std::move(context)));
508  }
509  supportedBackends.emplace(id);
510 
511  unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
512  arm::pipe::IBackendProfiling::CreateBackendProfiling(
514  *m_ProfilingService.get(),
515  id.Get());
516 
517  // Backends may also provide a profiling context. Ask for it now.
518  auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
519  // Backends that don't support profiling will return a null profiling context.
520  if (profilingContext)
521  {
522  // Pass the context onto the profiling service.
523  m_ProfilingService->AddBackendProfilingContext(id, profilingContext);
524  }
525  }
526  catch (const BackendUnavailableException&)
527  {
528  // Ignore backends which are unavailable
529  }
530  }
531 
532  BackendRegistryInstance().SetProfilingService(*m_ProfilingService.get());
533  // pass configuration info to the profiling service
534  m_ProfilingService->ConfigureProfilingService(
537  {
538  // try to wait for the profiling service to initialise
539  m_ProfilingService->WaitForProfilingServiceActivation(3000);
540  }
541 
542  m_DeviceSpec.AddSupportedBackends(supportedBackends);
543 
544  ARMNN_LOG(info) << "Initialization time: " << std::setprecision(2)
545  << std::fixed << armnn::GetTimeDuration(start_time).count() << " ms.";
546 }

References ARMNN_ASSERT, ARMNN_LOG, ARMNN_VERSION, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), arm::pipe::ConvertExternalProfilingOptions(), DynamicBackendUtils::DeregisterDynamicBackends(), armnn::DmaBufProtected, armnn::error, DeviceSpec::GetDynamicBackends(), BackendRegistry::GetFactory(), armnn::GetMemBlockStrategyTypeName(), armnn::GetMemoryOptimizerStrategy(), armnn::GetTimeNow(), armnn::HasMatchingCapability(), armnn::info, IRuntime::CreationOptions::m_BackendOptions, IRuntime::CreationOptions::m_CustomAllocatorMap, IRuntime::CreationOptions::m_DynamicBackendsPath, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap, IRuntime::CreationOptions::m_ProfilingOptions, IRuntime::CreationOptions::m_ProtectedMode, IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled, armnn::ParseOptions(), BackendRegistry::RegisterAllocator(), BackendRegistry::RegisterMemoryOptimizerStrategy(), and armnn::warning.

◆ ~RuntimeImpl()

Definition at line 548 of file Runtime.cpp.

549 {
550  const auto startTime = armnn::GetTimeNow();
551  std::vector<int> networkIDs;
552  try
553  {
554  // Coverity fix: The following code may throw an exception of type std::length_error.
555  std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
556  std::back_inserter(networkIDs),
557  [](const auto &pair) { return pair.first; });
558  }
559  catch (const std::exception& e)
560  {
561  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
562  // exception of type std::length_error.
563  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
564  std::cerr << "WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
565  << "\nSome of the loaded networks may not be unloaded" << std::endl;
566  }
567  // We then proceed to unload all the networks which IDs have been appended to the list
568  // up to the point the exception was thrown (if any).
569 
570  for (auto networkID : networkIDs)
571  {
572  try
573  {
574  // Coverity fix: UnloadNetwork() may throw an exception of type std::length_error,
575  // boost::log::v2s_mt_posix::odr_violation or boost::log::v2s_mt_posix::system_error
576  UnloadNetwork(networkID);
577  }
578  catch (const std::exception& e)
579  {
580  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
581  // exception of type std::length_error.
582  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
583  std::cerr << "WARNING: An error has occurred when unloading network " << networkID << ": " << e.what()
584  << std::endl;
585  }
586  }
587 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
588  // Clear all dynamic backends.
590  m_DeviceSpec.ClearDynamicBackends();
591 #endif
592  m_BackendContexts.clear();
593 
595  // Remove custom allocators that this runtime has added.
596  // Note: that as backends can be per process and there can be many instances of a runtime in a process an allocator
597  // may have been overwritten by another runtime.
598  for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
599  [](BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
600 
601  ARMNN_LOG(info) << "Shutdown time: " << std::setprecision(2)
602  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
603 }

References ARMNN_LOG, armnn::BackendRegistryInstance(), DeviceSpec::ClearDynamicBackends(), DynamicBackendUtils::DeregisterDynamicBackends(), DeviceSpec::GetDynamicBackends(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, BackendRegistry::SetProfilingService(), and RuntimeImpl::UnloadNetwork().

Member Function Documentation

◆ ClearImportedInputs()

void ClearImportedInputs ( NetworkId  networkId,
const std::vector< ImportedInputId inputIds 
)

Definition at line 635 of file Runtime.cpp.

636 {
637  return GetLoadedNetworkPtr(networkId)->ClearImportedInputs(inputIds);
638 }

References LoadedNetwork::ClearImportedInputs().

◆ ClearImportedOutputs()

void ClearImportedOutputs ( NetworkId  networkId,
const std::vector< ImportedOutputId outputIds 
)

Definition at line 639 of file Runtime.cpp.

640 {
641  return GetLoadedNetworkPtr(networkId)->ClearImportedOutputs(outputIds);
642 }

References LoadedNetwork::ClearImportedOutputs().

◆ CreateWorkingMemHandle()

std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle ( NetworkId  networkId)

Create a new unique WorkingMemHandle object.

Create multiple handles if you wish to have overlapped Execution by calling this function from different threads.

Definition at line 735 of file Runtime.cpp.

736 {
737  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
738 
739  if (!loadedNetwork)
740  {
741  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
742  return nullptr;
743  }
744  if (!loadedNetwork->IsAsyncEnabled())
745  {
746  ARMNN_LOG(error) << "Network " << networkId << " is not async enabled.";
747  return nullptr;
748  }
750 
751  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "CreateWorkingMemHandle");
752 
753  static thread_local NetworkId lastId = networkId;
754  if (lastId != networkId)
755  {
756  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
757  {
758  network->FreeWorkingMemory();
759  });
760  }
761  lastId=networkId;
762 
763  return loadedNetwork->CreateWorkingMemHandle(networkId);
764 }

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::CreateWorkingMemHandle(), armnn::error, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

◆ EnqueueWorkload()

Status EnqueueWorkload ( NetworkId  networkId,
const InputTensors inputTensors,
const OutputTensors outputTensors,
std::vector< ImportedInputId preImportedInputIds = {},
std::vector< ImportedOutputId preImportedOutputIds = {} 
)

Definition at line 644 of file Runtime.cpp.

649 {
650  const auto startTime = armnn::GetTimeNow();
651 
652  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
653 
654  if (!loadedNetwork)
655  {
656  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
657  return Status::Failure;
658  }
659  if (loadedNetwork->IsAsyncEnabled())
660  {
661  ARMNN_LOG(error) << "Network " << networkId << " is async enabled.";
662  return Status::Failure;
663  }
665 
667 
668  static thread_local NetworkId lastId = networkId;
669  if (lastId != networkId)
670  {
671  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
672  {
673  network->FreeWorkingMemory();
674  });
675  }
676  lastId=networkId;
677 
678  auto status = loadedNetwork->EnqueueWorkload(inputTensors, outputTensors,
679  preImportedInputIds, preImportedOutputIds);
680 
681 
682  // Check if we imported, if not there's no need to call the After EnqueueWorkload events
683  if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
684  {
685  // Call After EnqueueWorkload events
686  for (auto&& context : m_BackendContexts)
687  {
688  context.second->AfterEnqueueWorkload(networkId);
689  }
690  }
691  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
692  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
693  return status;
694 }

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, LoadedNetwork::EnqueueWorkload(), armnn::error, armnn::Failure, LoadedNetwork::FreeWorkingMemory(), ProfilerManager::GetInstance(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

◆ Execute()

Status Execute ( IWorkingMemHandle workingMemHandle,
const InputTensors inputTensors,
const OutputTensors outputTensors,
std::vector< ImportedInputId preImportedInputs,
std::vector< ImportedOutputId preImportedOutputs 
)

This is an experimental function.

Evaluates a network using input in inputTensors and outputs filled into outputTensors. This function performs a thread safe execution of the network. Returns once execution is complete. Will block until this and any other thread using the same workingMem object completes.

Definition at line 696 of file Runtime.cpp.

701 {
702  const auto startTime = armnn::GetTimeNow();
703 
704  NetworkId networkId = iWorkingMemHandle.GetNetworkId();
705  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
706 
707  if (!loadedNetwork)
708  {
709  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
710  return Status::Failure;
711  }
712  if (!loadedNetwork->IsAsyncEnabled())
713  {
714  ARMNN_LOG(error) << "Attempting execute " << networkId << " when it is not async enabled.";
715  return Status::Failure;
716  }
718 
720 
721  auto status = loadedNetwork->Execute(inputTensors,
722  outputTensors,
723  iWorkingMemHandle,
724  preImportedInputs,
725  preImportedOutputs);
726 
727  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
728  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
729 
730  return status;
731 }

References ARMNN_LOG, ARMNN_SCOPED_PROFILING_EVENT, armnn::error, LoadedNetwork::Execute(), armnn::Failure, ProfilerManager::GetInstance(), IWorkingMemHandle::GetNetworkId(), LoadedNetwork::GetProfiler(), armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::info, LoadedNetwork::IsAsyncEnabled(), ProfilerManager::RegisterProfiler(), and armnn::Undefined.

◆ GetDeviceSpec()

const IDeviceSpec& GetDeviceSpec ( ) const
inline

Definition at line 90 of file Runtime.hpp.

90 { return m_DeviceSpec; }

◆ GetInputTensorInfo()

TensorInfo GetInputTensorInfo ( NetworkId  networkId,
LayerBindingId  layerId 
) const

Definition at line 613 of file Runtime.cpp.

614 {
615  return GetLoadedNetworkPtr(networkId)->GetInputTensorInfo(layerId);
616 }

References LoadedNetwork::GetInputTensorInfo().

◆ GetOutputTensorInfo()

TensorInfo GetOutputTensorInfo ( NetworkId  networkId,
LayerBindingId  layerId 
) const

Definition at line 618 of file Runtime.cpp.

619 {
620  return GetLoadedNetworkPtr(networkId)->GetOutputTensorInfo(layerId);
621 }

References LoadedNetwork::GetOutputTensorInfo().

◆ GetProfiler()

const std::shared_ptr< IProfiler > GetProfiler ( NetworkId  networkId) const

Gets the profiler corresponding to the given network id.

Parameters
networkIdThe id of the network for which to get the profile.
Returns
A pointer to the requested profiler, or nullptr if not found.

Definition at line 290 of file Runtime.cpp.

291 {
292  auto it = m_LoadedNetworks.find(networkId);
293  if (it != m_LoadedNetworks.end())
294  {
295  auto& loadedNetwork = it->second;
296  return loadedNetwork->GetProfiler();
297  }
298 
299  return nullptr;
300 }

◆ ImportInputs()

std::vector< ImportedInputId > ImportInputs ( NetworkId  networkId,
const InputTensors inputTensors,
MemorySource  forceImportMemorySource 
)

Definition at line 623 of file Runtime.cpp.

625 {
626  return GetLoadedNetworkPtr(networkId)->ImportInputs(inputTensors, forceImportMemorySource);
627 }

References LoadedNetwork::ImportInputs().

◆ ImportOutputs()

std::vector< ImportedOutputId > ImportOutputs ( NetworkId  networkId,
const OutputTensors outputTensors,
MemorySource  forceImportMemorySource 
)

Definition at line 629 of file Runtime.cpp.

631 {
632  return GetLoadedNetworkPtr(networkId)->ImportOutputs(outputTensors, forceImportMemorySource);
633 }

References LoadedNetwork::ImportOutputs().

◆ InitialiseProfilingService()

void InitialiseProfilingService ( arm::pipe::IProfilingService &  profilingService)
override

Definition at line 317 of file Runtime.cpp.

318 {
320  initialiser.InitialiseProfilingService(profilingService);
321 }

References ArmNNProfilingServiceInitialiser::InitialiseProfilingService().

◆ LoadNetwork() [1/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network 
)

Loads a complete network into the Runtime.

Parameters
[out]networkIdOut- Unique identifier for the network is returned in this reference.
[in]network- Complete network to load into the Runtime. The runtime takes ownership of the network once passed in.
Returns
armnn::Status

Definition at line 166 of file Runtime.cpp.

167 {
168  std::string ignoredErrorMessage;
169  return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
170 }

Referenced by RuntimeImpl::LoadNetwork().

◆ LoadNetwork() [2/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network,
std::string &  errorMessage 
)

Load a complete network into the IRuntime.

Parameters
[out]networkIdOutUnique identifier for the network is returned in this reference.
[in]networkComplete network to load into the IRuntime.
[out]errorMessageError message if there were any errors. The runtime takes ownership of the network once passed in.
Returns
armnn::Status

Definition at line 172 of file Runtime.cpp.

175 {
176  INetworkProperties networkProperties(
178  return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
179 }

References RuntimeImpl::LoadNetwork(), and armnn::Undefined.

◆ LoadNetwork() [3/3]

Status LoadNetwork ( NetworkId networkIdOut,
IOptimizedNetworkPtr  network,
std::string &  errorMessage,
const INetworkProperties networkProperties 
)

Definition at line 181 of file Runtime.cpp.

185 {
186  // Register the profiler
187  auto profiler = inNetwork->GetProfiler();
189 
190  IOptimizedNetwork* rawNetwork = inNetwork.release();
191 
192  networkIdOut = GenerateNetworkId();
193 
194  for (auto&& context : m_BackendContexts)
195  {
196  context.second->BeforeLoadNetwork(networkIdOut);
197  }
198 
199  unique_ptr<LoadedNetwork> loadedNetwork = LoadedNetwork::MakeLoadedNetwork(
200  std::unique_ptr<IOptimizedNetwork>(rawNetwork),
201  errorMessage,
202  networkProperties,
203  m_ProfilingService.get());
204 
205  if (!loadedNetwork)
206  {
207  return Status::Failure;
208  }
209 
210  {
211 #if !defined(ARMNN_DISABLE_THREADS)
212  std::lock_guard<std::mutex> lockGuard(m_Mutex);
213 #endif
214 
215  // Stores the network
216  m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
217  }
218 
219  for (auto&& context : m_BackendContexts)
220  {
221  context.second->AfterLoadNetwork(networkIdOut);
222  }
223 
224  if (m_ProfilingService->IsProfilingEnabled())
225  {
226  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
227  }
228 
229  return Status::Success;
230 }

References armnn::Failure, ProfilerManager::GetInstance(), LoadedNetwork::MakeLoadedNetwork(), ProfilerManager::RegisterProfiler(), and armnn::Success.

◆ RegisterDebugCallback()

void RegisterDebugCallback ( NetworkId  networkId,
const DebugCallbackFunction func 
)

Registers a callback function to debug layers performing custom computations on intermediate tensors.

Parameters
networkIdThe id of the network to register the callback.
funccallback function to pass to the debug layer.

Definition at line 766 of file Runtime.cpp.

767 {
768  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
769  loadedNetwork->RegisterDebugCallback(func);
770 }

References LoadedNetwork::RegisterDebugCallback().

◆ ReportStructure()

void ReportStructure ( arm::pipe::IProfilingService &  profilingService)
override

Definition at line 302 of file Runtime.cpp.

303 {
304  if (profilingService.IsProfilingEnabled())
305  {
306  LoadedNetworks::iterator it = m_LoadedNetworks.begin();
307  while (it != m_LoadedNetworks.end())
308  {
309  auto& loadedNetwork = it->second;
310  loadedNetwork->SendNetworkStructure(profilingService);
311  // Increment the Iterator to point to next entry
312  it++;
313  }
314  }
315 }

◆ UnloadNetwork()

Status UnloadNetwork ( NetworkId  networkId)

Unloads a network from the Runtime.

At the moment this only removes the network from the m_Impl->m_Network. This might need more work in the future to be AndroidNN compliant.

Parameters
[in]networkIdUnique identifier for the network to be unloaded. Generated in LoadNetwork().
Returns
armnn::Status

Definition at line 232 of file Runtime.cpp.

233 {
234  bool unloadOk = true;
235  for (auto&& context : m_BackendContexts)
236  {
237  unloadOk &= context.second->BeforeUnloadNetwork(networkId);
238  }
239 
240  if (!unloadOk)
241  {
242  ARMNN_LOG(warning) << "RuntimeImpl::UnloadNetwork(): failed to unload "
243  "network with ID:" << networkId << " because BeforeUnloadNetwork failed";
244  return Status::Failure;
245  }
246 
247  std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
248  arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
249  {
250 #if !defined(ARMNN_DISABLE_THREADS)
251  std::lock_guard<std::mutex> lockGuard(m_Mutex);
252 #endif
253 
254  // If timeline recording is on mark the Network end of life
255  if (timelineUtils)
256  {
257  auto search = m_LoadedNetworks.find(networkId);
258  if (search != m_LoadedNetworks.end())
259  {
260  arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
261  timelineUtils->RecordEvent(networkGuid,
262  arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
263  }
264  }
265 
266  if (m_LoadedNetworks.erase(networkId) == 0)
267  {
268  ARMNN_LOG(warning) << "WARNING: RuntimeImpl::UnloadNetwork(): " << networkId << " not found!";
269  return Status::Failure;
270  }
271 
272  if (m_ProfilingService->IsProfilingEnabled())
273  {
274  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
275  }
276  }
277 
278  for (auto&& context : m_BackendContexts)
279  {
280  context.second->AfterUnloadNetwork(networkId);
281  }
282 
283  // Unregister the profiler
285 
286  ARMNN_LOG(debug) << "RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
287  return Status::Success;
288 }

References ARMNN_LOG, armnn::debug, armnn::Failure, ProfilerManager::GetInstance(), ProfilerManager::RegisterProfiler(), armnn::Success, and armnn::warning.

Referenced by RuntimeImpl::~RuntimeImpl().

Friends And Related Function Documentation

◆ GetProfilingService

arm::pipe::IProfilingService& GetProfilingService ( RuntimeImpl runtime)
friend

Definition at line 59 of file TestUtils.cpp.

60 {
61  return *(runtime->m_ProfilingService.get());
62 }

◆ RuntimeLoadedNetworksReserve

void RuntimeLoadedNetworksReserve ( RuntimeImpl runtime)
friend

The documentation for this struct was generated from the following files:
ARMNN_ASSERT
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
armnn::Compute::Undefined
@ Undefined
armnn::DeviceSpec::AddSupportedBackends
void AddSupportedBackends(const BackendIdSet &backendIds, bool isDynamic=false)
Definition: DeviceSpec.hpp:30
armnn::ArmNNProfilingServiceInitialiser
Definition: ArmNNProfilingServiceInitialiser.hpp:14
armnn::LoadedNetwork::GetOutputTensorInfo
TensorInfo GetOutputTensorInfo(LayerBindingId layerId) const
Definition: LoadedNetwork.cpp:723
armnn::ProfilerManager::RegisterProfiler
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:600
armnn::LoadedNetwork::EnqueueWorkload
Status EnqueueWorkload(const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Single thread execution of the loaded network.
Definition: LoadedNetwork.cpp:851
armnn::LoadedNetwork::RegisterDebugCallback
void RegisterDebugCallback(const DebugCallbackFunction &func)
Definition: LoadedNetwork.cpp:2227
armnn::DeviceSpec::GetDynamicBackends
const BackendIdSet & GetDynamicBackends() const
Definition: DeviceSpec.hpp:48
armnn::BackendIdSet
std::unordered_set< BackendId > BackendIdSet
Definition: BackendId.hpp:193
arm::pipe::ConvertExternalProfilingOptions
ProfilingOptions ConvertExternalProfilingOptions(const armnn::IRuntime::CreationOptions::ExternalProfilingOptions &options)
Definition: ProfilingOptionsConverter.cpp:17
armnn::IRuntime::CreationOptions::m_DynamicBackendsPath
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
Definition: IRuntime.hpp:99
armnn::GetTimeNow
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
armnn::MemorySource::DmaBufProtected
@ DmaBufProtected
armnn::BackendCapability
BackendCapability
BackendCapability class.
Definition: Types.hpp:280
armnn::IOptimizedNetwork
Definition: INetwork.hpp:886
armnn::LoadedNetwork
Definition: LoadedNetwork.hpp:42
armnn::BackendOptions::BackendOption
Definition: BackendOptions.hpp:215
armnn::BackendRegistry::RegisterMemoryOptimizerStrategy
void RegisterMemoryOptimizerStrategy(const BackendId &id, std::shared_ptr< IMemoryOptimizerStrategy > strategy)
Definition: BackendRegistry.cpp:133
armnn::RuntimeImpl::LoadNetwork
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition: Runtime.cpp:166
armnn::LoadedNetwork::ImportInputs
std::vector< ImportedInputId > ImportInputs(const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
Definition: LoadedNetwork.cpp:1430
armnn::IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled
bool m_TimelineEnabled
Indicates whether external timeline profiling is enabled or not.
Definition: IRuntime.hpp:141
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
armnn::RuntimeImpl::UnloadNetwork
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Definition: Runtime.cpp:232
armnn::DeviceSpec::ClearDynamicBackends
void ClearDynamicBackends()
Definition: DeviceSpec.hpp:39
armnn::BackendRegistry::SetProfilingService
void SetProfilingService(armnn::Optional< arm::pipe::IProfilingService & > profilingService)
Definition: BackendRegistry.cpp:107
armnn::NetworkId
int NetworkId
Definition: IRuntime.hpp:35
armnn::IRuntime::CreationOptions::m_BackendOptions
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:190
ARMNN_SCOPED_PROFILING_EVENT
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
armnn::MemorySource::Undefined
@ Undefined
armnn::EmptyOptional
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
armnn::GetMemoryOptimizerStrategy
std::unique_ptr< IMemoryOptimizerStrategy > GetMemoryOptimizerStrategy(const std::string &strategyName)
Definition: MemoryOptimizerStrategyLibrary.hpp:36
armnn::LoadedNetwork::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
Definition: LoadedNetwork.cpp:1573
armnn::BackendRegistryInstance
BackendRegistry & BackendRegistryInstance()
Definition: BackendRegistry.cpp:15
armnn::Status::Success
@ Success
armnn::INetworkProperties
Definition: IRuntime.hpp:43
armnn::Exception
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
armnn::RuntimeException
Definition: Exceptions.hpp:120
armnn::LoadedNetwork::GetInputTensorInfo
TensorInfo GetInputTensorInfo(LayerBindingId layerId) const
Definition: LoadedNetwork.cpp:709
armnn::BackendRegistry::GetFactory
FactoryFunction GetFactory(const BackendId &id) const
Definition: BackendRegistry.cpp:57
armnn::ParseOptions
void ParseOptions(const std::vector< BackendOptions > &options, BackendId backend, F f)
Definition: BackendOptions.hpp:297
armnn::IRuntime::CreationOptions::m_ProtectedMode
bool m_ProtectedMode
Setting this flag will allow the user to create the Runtime in protected mode.
Definition: IRuntime.hpp:106
armnn::DynamicBackendUtils::DeregisterDynamicBackends
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
Definition: DynamicBackendUtils.cpp:320
ARMNN_VERSION
#define ARMNN_VERSION
ARMNN_VERSION: "X.Y.Z" where: X = Major version number Y = Minor version number Z = Patch version num...
Definition: Version.hpp:22
armnn::BackendRegistry::RegisterAllocator
void RegisterAllocator(const BackendId &id, std::shared_ptr< ICustomAllocator > alloc)
Definition: BackendRegistry.cpp:112
armnn::IRuntime::CreationOptions::m_ProfilingOptions
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:154
armnn::LoadedNetwork::MakeLoadedNetwork
static std::unique_ptr< LoadedNetwork > MakeLoadedNetwork(std::unique_ptr< IOptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, arm::pipe::IProfilingService *profilingService)
Definition: LoadedNetwork.cpp:170
armnn::LoadedNetwork::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: LoadedNetwork.cpp:1963
armnn::ProfilerManager::GetInstance
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:593
armnn::LoadedNetwork::Execute
Status Execute(const InputTensors &inputTensors, const OutputTensors &outputTensors, IWorkingMemHandle &workingMemHandle, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
Thread safe execution of the loaded network.
Definition: LoadedNetwork.cpp:1741
armnn::BackendOptions::Var
Very basic type safe variant.
Definition: BackendOptions.hpp:38
armnn::BackendId
Definition: BackendId.hpp:75
armnn::LoadedNetwork::GetProfiler
const std::shared_ptr< IProfiler > & GetProfiler() const
Definition: LoadedNetwork.hpp:87
armnn::LoadedNetwork::ClearImportedOutputs
void ClearImportedOutputs(const std::vector< ImportedOutputId > outputIds)
Definition: LoadedNetwork.cpp:1720
armnn::IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap
std::map< BackendId, std::shared_ptr< IMemoryOptimizerStrategy > > m_MemoryOptimizerStrategyMap
A map to define a custom memory optimizer strategy for specific backend Ids.
Definition: IRuntime.hpp:123
armnn::LoadedNetwork::FreeWorkingMemory
void FreeWorkingMemory()
Definition: LoadedNetwork.cpp:1234
armnn::LoadedNetwork::IsAsyncEnabled
bool IsAsyncEnabled()
Definition: LoadedNetwork.hpp:95
armnn::IRuntime::CreationOptions::m_CustomAllocatorMap
std::map< BackendId, std::shared_ptr< ICustomAllocator > > m_CustomAllocatorMap
A map to define a custom memory allocator for specific backend Ids.
Definition: IRuntime.hpp:115
armnn::LoadedNetwork::ClearImportedInputs
void ClearImportedInputs(const std::vector< ImportedInputId > inputIds)
Definition: LoadedNetwork.cpp:1699
armnn::ArmNNProfilingServiceInitialiser::InitialiseProfilingService
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Definition: ArmNNProfilingServiceInitialiser.cpp:17
armnn::BackendUnavailableException
Class for non-fatal exceptions raised while initialising a backend.
Definition: Exceptions.hpp:68
armnn::IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling
bool m_EnableProfiling
Indicates whether external profiling is enabled or not.
Definition: IRuntime.hpp:139
armnn::GetMemBlockStrategyTypeName
constexpr const char * GetMemBlockStrategyTypeName(MemBlockStrategyType memBlockStrategyType)
Definition: TypesUtils.hpp:285
armnn::Status::Failure
@ Failure
armnn::GetTimeDuration
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
armnn::HasMatchingCapability
bool HasMatchingCapability(const BackendOptions::BackendOption &capability, const BackendCapabilities &capabilities)
Convenience function to check if a given capability matches a capability in a BackendCapabilities str...
Definition: BackendHelper.cpp:85