ArmNN
 23.05
Runtime.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017, 2022-2023 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
7 #include "Runtime.hpp"
8 
10 
11 #include <armnn/Version.hpp>
13 #include <armnn/BackendHelper.hpp>
14 #include <armnn/Logging.hpp>
15 
17 
19 
21 #include <armnn/utility/Timer.hpp>
22 
23 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
25 #endif
26 
28 
29 #include <client/include/backends/IBackendProfiling.hpp>
30 
31 #include <common/include/LabelsAndEventClasses.hpp>
32 
33 #include <iostream>
34 
35 
36 using namespace armnn;
37 using namespace std;
38 
39 namespace armnn
40 {
42 
43 IRuntime::IRuntime(const IRuntime::CreationOptions& options) : pRuntimeImpl(new RuntimeImpl(options)) {}
44 
45 IRuntime::~IRuntime() = default;
46 
48 {
49  return new IRuntime(options);
50 }
51 
53 {
54  return IRuntimePtr(CreateRaw(options), &IRuntime::Destroy);
55 }
56 
58 {
59  delete runtime;
60 }
61 
63 {
64  return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network));
65 }
66 
68  IOptimizedNetworkPtr network,
69  std::string& errorMessage)
70 {
71  return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network), errorMessage);
72 }
73 
75  IOptimizedNetworkPtr network,
76  std::string& errorMessage,
77  const INetworkProperties& networkProperties)
78 {
79  return pRuntimeImpl->LoadNetwork(networkIdOut, std::move(network), errorMessage, networkProperties);
80 }
81 
83 {
84  return pRuntimeImpl->GetInputTensorInfo(networkId, layerId);
85 }
86 
88 {
89  return pRuntimeImpl->GetOutputTensorInfo(networkId, layerId);
90 }
91 
92 std::vector<ImportedInputId> IRuntime::ImportInputs(NetworkId networkId, const InputTensors& inputTensors,
93  MemorySource forceImportMemorySource)
94 {
95  return pRuntimeImpl->ImportInputs(networkId, inputTensors, forceImportMemorySource);
96 }
97 
98 std::vector<ImportedOutputId> IRuntime::ImportOutputs(NetworkId networkId, const OutputTensors& outputTensors,
99  MemorySource forceImportMemorySource)
100 {
101  return pRuntimeImpl->ImportOutputs(networkId, outputTensors, forceImportMemorySource);
102 }
103 
104 void IRuntime::ClearImportedInputs(NetworkId networkId, const std::vector<ImportedInputId> inputIds)
105 {
106  return pRuntimeImpl->ClearImportedInputs(networkId, inputIds);
107 }
108 void IRuntime::ClearImportedOutputs(NetworkId networkId, const std::vector<ImportedOutputId> outputIds)
109 {
110  return pRuntimeImpl->ClearImportedOutputs(networkId, outputIds);
111 }
112 
114  const InputTensors& inputTensors,
115  const OutputTensors& outputTensors,
116  std::vector<ImportedInputId> preImportedInputIds,
117  std::vector<ImportedOutputId> preImportedOutputIds)
118 {
119  return pRuntimeImpl->EnqueueWorkload(networkId, inputTensors, outputTensors,
120  preImportedInputIds, preImportedOutputIds);
121 }
122 
124  const InputTensors& inputTensors,
125  const OutputTensors& outputTensors,
126  std::vector<ImportedInputId> preImportedInputs,
127  std::vector<ImportedOutputId> preImportedOutputs)
128 {
129  return pRuntimeImpl->Execute(workingMemHandle,
130  inputTensors,
131  outputTensors,
132  preImportedInputs,
133  preImportedOutputs);
134 }
135 
137 {
138  return pRuntimeImpl->UnloadNetwork(networkId);
139 }
140 
142 {
143  return pRuntimeImpl->GetDeviceSpec();
144 }
145 
146 std::unique_ptr<IWorkingMemHandle> IRuntime::CreateWorkingMemHandle(NetworkId networkId)
147 {
148  return pRuntimeImpl->CreateWorkingMemHandle(networkId);
149 }
150 
151 const std::shared_ptr<IProfiler> IRuntime::GetProfiler(NetworkId networkId) const
152 {
153  return pRuntimeImpl->GetProfiler(networkId);
154 }
155 
157 {
158  return pRuntimeImpl->RegisterDebugCallback(networkId, func);
159 }
160 
161 int RuntimeImpl::GenerateNetworkId()
162 {
163  return m_NetworkIdCounter++;
164 }
165 
167 {
168  std::string ignoredErrorMessage;
169  return LoadNetwork(networkIdOut, std::move(inNetwork), ignoredErrorMessage);
170 }
171 
173  IOptimizedNetworkPtr inNetwork,
174  std::string& errorMessage)
175 {
176  INetworkProperties networkProperties(
178  return LoadNetwork(networkIdOut, std::move(inNetwork), errorMessage, networkProperties);
179 }
180 
182  IOptimizedNetworkPtr inNetwork,
183  std::string& errorMessage,
184  const INetworkProperties& networkProperties)
185 {
186  // Register the profiler
187  auto profiler = inNetwork->GetProfiler();
189 
190  IOptimizedNetwork* rawNetwork = inNetwork.release();
191 
192  networkIdOut = GenerateNetworkId();
193 
194  for (auto&& context : m_BackendContexts)
195  {
196  context.second->BeforeLoadNetwork(networkIdOut);
197  }
198 
199  unique_ptr<LoadedNetwork> loadedNetwork = LoadedNetwork::MakeLoadedNetwork(
200  std::unique_ptr<IOptimizedNetwork>(rawNetwork),
201  errorMessage,
202  networkProperties,
203  m_ProfilingService.get());
204 
205  if (!loadedNetwork)
206  {
207  return Status::Failure;
208  }
209 
210  {
211 #if !defined(ARMNN_DISABLE_THREADS)
212  std::lock_guard<std::mutex> lockGuard(m_Mutex);
213 #endif
214 
215  // Stores the network
216  m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork);
217  }
218 
219  for (auto&& context : m_BackendContexts)
220  {
221  context.second->AfterLoadNetwork(networkIdOut);
222  }
223 
224  if (m_ProfilingService->IsProfilingEnabled())
225  {
226  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_LOADS);
227  }
228 
229  return Status::Success;
230 }
231 
233 {
234  bool unloadOk = true;
235  for (auto&& context : m_BackendContexts)
236  {
237  unloadOk &= context.second->BeforeUnloadNetwork(networkId);
238  }
239 
240  if (!unloadOk)
241  {
242  ARMNN_LOG(warning) << "RuntimeImpl::UnloadNetwork(): failed to unload "
243  "network with ID:" << networkId << " because BeforeUnloadNetwork failed";
244  return Status::Failure;
245  }
246 
247  std::unique_ptr<arm::pipe::TimelineUtilityMethods> timelineUtils =
248  arm::pipe::TimelineUtilityMethods::GetTimelineUtils(*m_ProfilingService.get());
249  {
250 #if !defined(ARMNN_DISABLE_THREADS)
251  std::lock_guard<std::mutex> lockGuard(m_Mutex);
252 #endif
253 
254  // If timeline recording is on mark the Network end of life
255  if (timelineUtils)
256  {
257  auto search = m_LoadedNetworks.find(networkId);
258  if (search != m_LoadedNetworks.end())
259  {
260  arm::pipe::ProfilingGuid networkGuid = search->second->GetNetworkGuid();
261  timelineUtils->RecordEvent(networkGuid,
262  arm::pipe::LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS);
263  }
264  }
265 
266  if (m_LoadedNetworks.erase(networkId) == 0)
267  {
268  ARMNN_LOG(warning) << "WARNING: RuntimeImpl::UnloadNetwork(): " << networkId << " not found!";
269  return Status::Failure;
270  }
271 
272  if (m_ProfilingService->IsProfilingEnabled())
273  {
274  m_ProfilingService->IncrementCounterValue(arm::pipe::NETWORK_UNLOADS);
275  }
276  }
277 
278  for (auto&& context : m_BackendContexts)
279  {
280  context.second->AfterUnloadNetwork(networkId);
281  }
282 
283  // Unregister the profiler
285 
286  ARMNN_LOG(debug) << "RuntimeImpl::UnloadNetwork(): Unloaded network with ID: " << networkId;
287  return Status::Success;
288 }
289 
290 const std::shared_ptr<IProfiler> RuntimeImpl::GetProfiler(NetworkId networkId) const
291 {
292  auto it = m_LoadedNetworks.find(networkId);
293  if (it != m_LoadedNetworks.end())
294  {
295  auto& loadedNetwork = it->second;
296  return loadedNetwork->GetProfiler();
297  }
298 
299  return nullptr;
300 }
301 
302 void RuntimeImpl::ReportStructure(arm::pipe::IProfilingService& profilingService)
303 {
304  if (profilingService.IsProfilingEnabled())
305  {
306  LoadedNetworks::iterator it = m_LoadedNetworks.begin();
307  while (it != m_LoadedNetworks.end())
308  {
309  auto& loadedNetwork = it->second;
310  loadedNetwork->SendNetworkStructure(profilingService);
311  // Increment the Iterator to point to next entry
312  it++;
313  }
314  }
315 }
316 
317 void RuntimeImpl::InitialiseProfilingService(arm::pipe::IProfilingService& profilingService)
318 {
320  initialiser.InitialiseProfilingService(profilingService);
321 }
322 
324  : m_NetworkIdCounter(0)
325 {
326  m_ProfilingService = arm::pipe::IProfilingService::CreateProfilingService(
327  arm::pipe::MAX_ARMNN_COUNTER,
328  *this,
329  arm::pipe::ARMNN_SOFTWARE_INFO,
330  arm::pipe::ARMNN_SOFTWARE_VERSION,
331  arm::pipe::ARMNN_HARDWARE_VERSION,
332  *this);
333  const auto start_time = armnn::GetTimeNow();
334  ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION;
336  {
337  throw RuntimeException(
338  "It is not possible to enable timeline reporting without profiling being enabled");
339  }
340 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
341  // Load any available/compatible dynamic backend before the runtime
342  // goes through the backend registry
343  LoadDynamicBackends(options.m_DynamicBackendsPath);
344 #endif
345  armnn::BackendIdSet supportedBackends;
346  for (const auto& id : BackendRegistryInstance().GetBackendIds())
347  {
348  // Store backend contexts for the supported ones
349  try {
350  auto factoryFun = BackendRegistryInstance().GetFactory(id);
351  ARMNN_ASSERT(factoryFun != nullptr);
352  auto backend = factoryFun();
353  ARMNN_ASSERT(backend != nullptr);
354  ARMNN_ASSERT(backend.get() != nullptr);
355 
356  auto customAllocatorMapIterator = options.m_CustomAllocatorMap.find(id);
357  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end() &&
358  customAllocatorMapIterator->second == nullptr)
359  {
360 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
361  // We need to manually clean up the dynamic backends before throwing an exception.
363  m_DeviceSpec.ClearDynamicBackends();
364 #endif
365  throw armnn::Exception("Allocator associated with id " + id.Get() + " is null");
366  }
367 
368  // If the runtime is created in protected mode only add backends that support this mode
369  if (options.m_ProtectedMode)
370  {
371  // check if backend supports ProtectedMode
373  BackendCapability protectedContentCapability {"ProtectedContentAllocation", true};
374  if (!HasCapability(protectedContentCapability, id))
375  {
376  // Protected Content Allocation is not supported by the backend
377  // backend should not be registered
378  ARMNN_LOG(warning) << "Backend "
379  << id
380  << " is not registered as does not support protected content allocation.";
381  continue;
382  }
383  // The user is responsible to provide a custom memory allocator which allows to allocate
384  // protected memory
385  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
386  {
387  std::string err;
388  if (customAllocatorMapIterator->second->GetMemorySourceType()
390  {
391  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
392  {
393  ARMNN_LOG(error) << "The backend "
394  << id
395  << " reported an error when entering protected mode. Backend won't be"
396  << " used. ErrorMsg: " << err;
397  continue;
398  }
399  // No errors so register the Custom Allocator with the BackendRegistry
400  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
401  m_AllocatorsAddedByThisRuntime.emplace(id);
402  }
403  else
404  {
405  ARMNN_LOG(error) << "The CustomAllocator provided with the runtime options doesn't support "
406  "protected memory. Protected mode can't be activated. The backend "
407  << id
408  << " is not going to be used. MemorySource must be MemorySource::DmaBufProtected";
409  continue;
410  }
411  }
412  else
413  {
414  ARMNN_LOG(error) << "Protected mode can't be activated for backend: "
415  << id
416  << " no custom allocator was provided to the runtime options.";
417  continue;
418  }
419  }
420  else
421  {
422  // If a custom memory allocator is provided make the backend use that instead of the default
423  if (customAllocatorMapIterator != options.m_CustomAllocatorMap.end())
424  {
425  std::string err;
426  if (!backend->UseCustomMemoryAllocator(customAllocatorMapIterator->second, err))
427  {
428  ARMNN_LOG(error) << "The backend "
429  << id
430  << " reported an error when trying to use the provided custom allocator."
431  " Backend won't be used."
432  << " ErrorMsg: " << err;
433  continue;
434  }
435  // No errors so register the Custom Allocator with the BackendRegistry
436  BackendRegistryInstance().RegisterAllocator(id, customAllocatorMapIterator->second);
437  m_AllocatorsAddedByThisRuntime.emplace(id);
438  }
439  }
440 
441  // check if custom memory optimizer strategy map is set
442  if (!options.m_MemoryOptimizerStrategyMap.empty())
443  {
444  auto customMemoryOptimizerStrategyMapIterator = options.m_MemoryOptimizerStrategyMap.find(id);
445  // if a memory optimizer strategy is provided make the backend use that instead of the default
446  if (customMemoryOptimizerStrategyMapIterator != options.m_MemoryOptimizerStrategyMap.end())
447  {
448  // no errors.. register the memory optimizer strategy with the BackendRegistry
450  id, customMemoryOptimizerStrategyMapIterator->second);
451 
452  ARMNN_LOG(info) << "MemoryOptimizerStrategy "
453  << customMemoryOptimizerStrategyMapIterator->second->GetName()
454  << " set for the backend " << id << ".";
455  }
456  }
457  else
458  {
459  // check if to use one of the existing memory optimizer strategies is set
460  std::string memoryOptimizerStrategyName = "";
461  ParseOptions(options.m_BackendOptions, id, [&](std::string name, const BackendOptions::Var& value)
462  {
463  if (name == "MemoryOptimizerStrategy")
464  {
465  memoryOptimizerStrategyName = ParseStringBackendOption(value, "");
466  }
467  });
468  if (memoryOptimizerStrategyName != "")
469  {
470  std::shared_ptr<IMemoryOptimizerStrategy> strategy =
471  GetMemoryOptimizerStrategy(memoryOptimizerStrategyName);
472 
473  if (!strategy)
474  {
475  ARMNN_LOG(warning) << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName
476  << " was not found.";
477  }
478  else
479  {
481  auto strategyType = GetMemBlockStrategyTypeName(strategy->GetMemBlockStrategyType());
482  BackendCapability memOptimizeStrategyCapability {strategyType, true};
483  if (HasCapability(memOptimizeStrategyCapability, id))
484  {
486 
487  ARMNN_LOG(info) << "MemoryOptimizerStrategy: "
488  << memoryOptimizerStrategyName << " set for the backend " << id << ".";
489  }
490  else
491  {
492  ARMNN_LOG(warning) << "Backend "
493  << id
494  << " does not have multi-axis packing capability and cannot support"
495  << "MemoryOptimizerStrategy: " << memoryOptimizerStrategyName << ".";
496  }
497  }
498  }
499  }
500 
501  auto context = backend->CreateBackendContext(options);
502 
503  // backends are allowed to return nullptrs if they
504  // don't wish to create a backend specific context
505  if (context)
506  {
507  m_BackendContexts.emplace(std::make_pair(id, std::move(context)));
508  }
509  supportedBackends.emplace(id);
510 
511  unique_ptr<arm::pipe::IBackendProfiling> profilingIface =
512  arm::pipe::IBackendProfiling::CreateBackendProfiling(
514  *m_ProfilingService.get(),
515  id.Get());
516 
517  // Backends may also provide a profiling context. Ask for it now.
518  auto profilingContext = backend->CreateBackendProfilingContext(options, profilingIface);
519  // Backends that don't support profiling will return a null profiling context.
520  if (profilingContext)
521  {
522  // Pass the context onto the profiling service.
523  m_ProfilingService->AddBackendProfilingContext(id, profilingContext);
524  }
525  }
526  catch (const BackendUnavailableException&)
527  {
528  // Ignore backends which are unavailable
529  }
530  }
531 
532  BackendRegistryInstance().SetProfilingService(*m_ProfilingService.get());
533  // pass configuration info to the profiling service
534  m_ProfilingService->ConfigureProfilingService(
535  arm::pipe::ConvertExternalProfilingOptions(options.m_ProfilingOptions));
536  if (options.m_ProfilingOptions.m_EnableProfiling)
537  {
538  // try to wait for the profiling service to initialise
539  m_ProfilingService->WaitForProfilingServiceActivation(3000);
540  }
541 
542  m_DeviceSpec.AddSupportedBackends(supportedBackends);
543 
544  ARMNN_LOG(info) << "Initialization time: " << std::setprecision(2)
545  << std::fixed << armnn::GetTimeDuration(start_time).count() << " ms.";
546 }
547 
548 RuntimeImpl::~RuntimeImpl()
549 {
550  const auto startTime = armnn::GetTimeNow();
551  std::vector<int> networkIDs;
552  try
553  {
554  // Coverity fix: The following code may throw an exception of type std::length_error.
555  std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(),
556  std::back_inserter(networkIDs),
557  [](const auto &pair) { return pair.first; });
558  }
559  catch (const std::exception& e)
560  {
561  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
562  // exception of type std::length_error.
563  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
564  std::cerr << "WARNING: An error has occurred when getting the IDs of the networks to unload: " << e.what()
565  << "\nSome of the loaded networks may not be unloaded" << std::endl;
566  }
567  // We then proceed to unload all the networks which IDs have been appended to the list
568  // up to the point the exception was thrown (if any).
569 
570  for (auto networkID : networkIDs)
571  {
572  try
573  {
574  // Coverity fix: UnloadNetwork() may throw an exception of type std::length_error,
575  // boost::log::v2s_mt_posix::odr_violation or boost::log::v2s_mt_posix::system_error
576  UnloadNetwork(networkID);
577  }
578  catch (const std::exception& e)
579  {
580  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
581  // exception of type std::length_error.
582  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
583  std::cerr << "WARNING: An error has occurred when unloading network " << networkID << ": " << e.what()
584  << std::endl;
585  }
586  }
587 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
588  // Clear all dynamic backends.
590  m_DeviceSpec.ClearDynamicBackends();
591 #endif
592  m_BackendContexts.clear();
593 
595  // Remove custom allocators that this runtime has added.
596  // Note: that as backends can be per process and there can be many instances of a runtime in a process an allocator
597  // may have been overwritten by another runtime.
598  for_each(m_AllocatorsAddedByThisRuntime.begin(), m_AllocatorsAddedByThisRuntime.end(),
599  [](BackendId id) {BackendRegistryInstance().DeregisterAllocator(id);});
600 
601  ARMNN_LOG(info) << "Shutdown time: " << std::setprecision(2)
602  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
603 }
604 
605 LoadedNetwork* RuntimeImpl::GetLoadedNetworkPtr(NetworkId networkId) const
606 {
607 #if !defined(ARMNN_DISABLE_THREADS)
608  std::lock_guard<std::mutex> lockGuard(m_Mutex);
609 #endif
610  return m_LoadedNetworks.at(networkId).get();
611 }
612 
614 {
615  return GetLoadedNetworkPtr(networkId)->GetInputTensorInfo(layerId);
616 }
617 
619 {
620  return GetLoadedNetworkPtr(networkId)->GetOutputTensorInfo(layerId);
621 }
622 
623 std::vector<ImportedInputId> RuntimeImpl::ImportInputs(NetworkId networkId, const InputTensors& inputTensors,
624  MemorySource forceImportMemorySource)
625 {
626  return GetLoadedNetworkPtr(networkId)->ImportInputs(inputTensors, forceImportMemorySource);
627 }
628 
629 std::vector<ImportedOutputId> RuntimeImpl::ImportOutputs(NetworkId networkId, const OutputTensors& outputTensors,
630  MemorySource forceImportMemorySource)
631 {
632  return GetLoadedNetworkPtr(networkId)->ImportOutputs(outputTensors, forceImportMemorySource);
633 }
634 
635 void RuntimeImpl::ClearImportedInputs(NetworkId networkId, const std::vector<ImportedInputId> inputIds)
636 {
637  return GetLoadedNetworkPtr(networkId)->ClearImportedInputs(inputIds);
638 }
639 void RuntimeImpl::ClearImportedOutputs(NetworkId networkId, const std::vector<ImportedOutputId> outputIds)
640 {
641  return GetLoadedNetworkPtr(networkId)->ClearImportedOutputs(outputIds);
642 }
643 
645  const InputTensors& inputTensors,
646  const OutputTensors& outputTensors,
647  std::vector<ImportedInputId> preImportedInputIds,
648  std::vector<ImportedOutputId> preImportedOutputIds)
649 {
650  const auto startTime = armnn::GetTimeNow();
651 
652  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
653 
654  if (!loadedNetwork)
655  {
656  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
657  return Status::Failure;
658  }
659  if (loadedNetwork->IsAsyncEnabled())
660  {
661  ARMNN_LOG(error) << "Network " << networkId << " is async enabled.";
662  return Status::Failure;
663  }
665 
667 
668  static thread_local NetworkId lastId = networkId;
669  if (lastId != networkId)
670  {
671  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
672  {
673  network->FreeWorkingMemory();
674  });
675  }
676  lastId=networkId;
677 
678  auto status = loadedNetwork->EnqueueWorkload(inputTensors, outputTensors,
679  preImportedInputIds, preImportedOutputIds);
680 
681 
682  // Check if we imported, if not there's no need to call the After EnqueueWorkload events
683  if (!preImportedInputIds.empty() || !preImportedOutputIds.empty())
684  {
685  // Call After EnqueueWorkload events
686  for (auto&& context : m_BackendContexts)
687  {
688  context.second->AfterEnqueueWorkload(networkId);
689  }
690  }
691  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
692  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
693  return status;
694 }
695 
697  const InputTensors& inputTensors,
698  const OutputTensors& outputTensors,
699  std::vector<ImportedInputId> preImportedInputs,
700  std::vector<ImportedOutputId> preImportedOutputs)
701 {
702  const auto startTime = armnn::GetTimeNow();
703 
704  NetworkId networkId = iWorkingMemHandle.GetNetworkId();
705  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
706 
707  if (!loadedNetwork)
708  {
709  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
710  return Status::Failure;
711  }
712  if (!loadedNetwork->IsAsyncEnabled())
713  {
714  ARMNN_LOG(error) << "Attempting execute " << networkId << " when it is not async enabled.";
715  return Status::Failure;
716  }
718 
720 
721  auto status = loadedNetwork->Execute(inputTensors,
722  outputTensors,
723  iWorkingMemHandle,
724  preImportedInputs,
725  preImportedOutputs);
726 
727  ARMNN_LOG(info) << "Execution time: " << std::setprecision(2)
728  << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms.";
729 
730  return status;
731 }
732 
733 /// Create a new unique WorkingMemHandle object. Create multiple handles if you wish to have
734 /// overlapped Execution by calling this function from different threads.
735 std::unique_ptr<IWorkingMemHandle> RuntimeImpl::CreateWorkingMemHandle(NetworkId networkId)
736 {
737  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
738 
739  if (!loadedNetwork)
740  {
741  ARMNN_LOG(error) << "A Network with an id of " << networkId << " does not exist.";
742  return nullptr;
743  }
744  if (!loadedNetwork->IsAsyncEnabled())
745  {
746  ARMNN_LOG(error) << "Network " << networkId << " is not async enabled.";
747  return nullptr;
748  }
750 
751  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "CreateWorkingMemHandle");
752 
753  static thread_local NetworkId lastId = networkId;
754  if (lastId != networkId)
755  {
756  LoadedNetworkFuncSafe(lastId, [](LoadedNetwork* network)
757  {
758  network->FreeWorkingMemory();
759  });
760  }
761  lastId=networkId;
762 
763  return loadedNetwork->CreateWorkingMemHandle(networkId);
764 }
765 
767 {
768  LoadedNetwork* loadedNetwork = GetLoadedNetworkPtr(networkId);
769  loadedNetwork->RegisterDebugCallback(func);
770 }
771 
772 #if !defined(ARMNN_DISABLE_DYNAMIC_BACKENDS)
773 void RuntimeImpl::LoadDynamicBackends(const std::string& overrideBackendPath)
774 {
775  // Get the paths where to load the dynamic backends from
776  std::vector<std::string> backendPaths = DynamicBackendUtils::GetBackendPaths(overrideBackendPath);
777 
778  // Get the shared objects to try to load as dynamic backends
779  std::vector<std::string> sharedObjects = DynamicBackendUtils::GetSharedObjects(backendPaths);
780 
781  // Create a list of dynamic backends
782  m_DynamicBackends = DynamicBackendUtils::CreateDynamicBackends(sharedObjects);
783 
784  // Register the dynamic backends in the backend registry
785  armnn::BackendIdSet registeredBackendIds = DynamicBackendUtils::RegisterDynamicBackends(m_DynamicBackends);
786 
787  // Add the registered dynamic backend ids to the list of supported backends
788  m_DeviceSpec.AddSupportedBackends(registeredBackendIds, true);
789 }
790 #endif
791 } // namespace armnn
armnn::DynamicBackendUtils::GetBackendPaths
static std::vector< std::string > GetBackendPaths(const std::string &overrideBackendPath="")
Definition: DynamicBackendUtils.cpp:81
Runtime.hpp
armnn::BackendId
Definition: BackendId.hpp:75
DynamicBackendUtils.hpp
armnn::experimental::IWorkingMemHandle
Definition: IWorkingMemHandle.hpp:20
armnn::BackendRegistryInstance
BackendRegistry & BackendRegistryInstance()
Definition: BackendRegistry.cpp:15
armnn::IRuntime::CreateRaw
static IRuntime * CreateRaw(const CreationOptions &options)
Definition: Runtime.cpp:47
armnn::LoadedNetwork::MakeLoadedNetwork
static std::unique_ptr< LoadedNetwork > MakeLoadedNetwork(std::unique_ptr< IOptimizedNetwork > net, std::string &errorMessage, const INetworkProperties &networkProperties, arm::pipe::IProfilingService *profilingService)
Definition: LoadedNetwork.cpp:170
armnn::MemorySource::Undefined
@ Undefined
armnn::IOptimizedNetworkPtr
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:328
armnn::RuntimeImpl::RegisterDebugCallback
void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction &func)
Registers a callback function to debug layers performing custom computations on intermediate tensors.
Definition: Runtime.cpp:766
armnn::LoadedNetwork::ClearImportedInputs
void ClearImportedInputs(const std::vector< ImportedInputId > inputIds)
Definition: LoadedNetwork.cpp:1685
armnn::DynamicBackendUtils::RegisterDynamicBackends
static BackendIdSet RegisterDynamicBackends(const std::vector< DynamicBackendPtr > &dynamicBackends)
Definition: DynamicBackendUtils.cpp:332
armnn::BackendRegistry::RegisterAllocator
void RegisterAllocator(const BackendId &id, std::shared_ptr< ICustomAllocator > alloc)
Definition: BackendRegistry.cpp:112
armnn::Exception
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
armnn::RuntimeImpl::ClearImportedInputs
void ClearImportedInputs(NetworkId networkId, const std::vector< ImportedInputId > inputIds)
Definition: Runtime.cpp:635
armnn::IRuntime::UnloadNetwork
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the IRuntime.
Definition: Runtime.cpp:136
armnn::IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling
bool m_EnableProfiling
Indicates whether external profiling is enabled or not.
Definition: IRuntime.hpp:146
armnn::IRuntime::ClearImportedOutputs
void ClearImportedOutputs(NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
Un-import and delete the imported OutputTensor/s This function is not thread safe and must not be use...
Definition: Runtime.cpp:108
armnn::IRuntime::CreationOptions::m_DynamicBackendsPath
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
Definition: IRuntime.hpp:106
armnn::IRuntime::CreationOptions::m_ProtectedMode
bool m_ProtectedMode
Setting this flag will allow the user to create the Runtime in protected mode.
Definition: IRuntime.hpp:113
armnn::LayerBindingId
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:301
armnn::LoadedNetwork
Definition: LoadedNetwork.hpp:42
armnn::BackendRegistry::GetFactory
FactoryFunction GetFactory(const BackendId &id) const
Definition: BackendRegistry.cpp:57
armnn::ParseOptions
void ParseOptions(const std::vector< BackendOptions > &options, BackendId backend, F f)
Definition: BackendOptions.hpp:297
armnn::IOptimizedNetwork
Definition: INetwork.hpp:862
armnn::IRuntime::GetProfiler
const std::shared_ptr< IProfiler > GetProfiler(NetworkId networkId) const
Gets the profiler corresponding to the given network id.
Definition: Runtime.cpp:151
IBackendContext.hpp
armnn::LoadedNetwork::FreeWorkingMemory
void FreeWorkingMemory()
Definition: LoadedNetwork.cpp:1222
BackendHelper.hpp
armnn::IRuntime::CreationOptions
Definition: IRuntime.hpp:85
PolymorphicDowncast.hpp
armnn::BackendRegistry::SetProfilingService
void SetProfilingService(armnn::Optional< arm::pipe::IProfilingService & > profilingService)
Definition: BackendRegistry.cpp:107
Timer.hpp
armnn::RuntimeImpl::ImportInputs
std::vector< ImportedInputId > ImportInputs(NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource)
Definition: Runtime.cpp:623
armnn::LoadedNetwork::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
Definition: LoadedNetwork.cpp:1560
armnn::DebugCallbackFunction
std::function< void(LayerGuid guid, unsigned int slotIndex, ITensorHandle *tensorHandle)> DebugCallbackFunction
Define the type of callback for the Debug layer to call.
Definition: Types.hpp:390
armnn::IRuntime::GetOutputTensorInfo
TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:87
armnn::RuntimeImpl
Definition: Runtime.hpp:30
armnn::RuntimeImpl::EnqueueWorkload
Status EnqueueWorkload(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Definition: Runtime.cpp:644
armnn::IRuntime::IRuntime
IRuntime()
Definition: Runtime.cpp:41
armnn::RuntimeImpl::ReportStructure
void ReportStructure(arm::pipe::IProfilingService &profilingService) override
Definition: Runtime.cpp:302
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
armnn::RuntimeImpl::UnloadNetwork
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Definition: Runtime.cpp:232
armnn::IRuntime::RegisterDebugCallback
void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction &func)
Registers a callback function to debug layers performing custom computations on intermediate tensors.
Definition: Runtime.cpp:156
armnn::GetTimeDuration
std::chrono::duration< double, std::milli > GetTimeDuration(std::chrono::high_resolution_clock::time_point start_time)
Definition: Timer.hpp:19
armnn::DeviceSpec::GetDynamicBackends
const BackendIdSet & GetDynamicBackends() const
Definition: DeviceSpec.hpp:48
armnn::IRuntime::GetInputTensorInfo
TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:82
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition: 01_00_quick_start.dox:6
MemoryOptimizerStrategyLibrary.hpp
armnn::DeviceSpec::ClearDynamicBackends
void ClearDynamicBackends()
Definition: DeviceSpec.hpp:39
armnn::BoostLogSeverityMapping::warning
@ warning
armnn::RuntimeException
Definition: Exceptions.hpp:120
armnn::experimental::IWorkingMemHandle::GetNetworkId
virtual NetworkId GetNetworkId()=0
Returns the NetworkId of the Network that this IWorkingMemHandle works with.
armnn::GetMemoryOptimizerStrategy
std::unique_ptr< IMemoryOptimizerStrategy > GetMemoryOptimizerStrategy(const std::string &strategyName)
Definition: MemoryOptimizerStrategyLibrary.hpp:36
armnn::GetTimeNow
std::chrono::high_resolution_clock::time_point GetTimeNow()
Definition: Timer.hpp:14
armnn::INetworkProperties
Definition: IRuntime.hpp:43
armnn::LoadedNetwork::GetOutputTensorInfo
TensorInfo GetOutputTensorInfo(LayerBindingId layerId) const
Definition: LoadedNetwork.cpp:711
armnn::ArmNNProfilingServiceInitialiser::InitialiseProfilingService
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Definition: ArmNNProfilingServiceInitialiser.cpp:17
armnn::RuntimeImpl::InitialiseProfilingService
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Definition: Runtime.cpp:317
armnn::LoadedNetwork::GetProfiler
const std::shared_ptr< IProfiler > & GetProfiler() const
Definition: LoadedNetwork.hpp:87
armnn::IRuntime::~IRuntime
~IRuntime()
armnn::IRuntime::LoadNetwork
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the IRuntime.
Definition: Runtime.cpp:62
armnn::IRuntime::CreationOptions::m_ProfilingOptions
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:161
armnn::BackendRegistry::RegisterMemoryOptimizerStrategy
void RegisterMemoryOptimizerStrategy(const BackendId &id, std::shared_ptr< IMemoryOptimizerStrategy > strategy)
Definition: BackendRegistry.cpp:133
armnn::IRuntime::CreationOptions::m_BackendOptions
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:197
armnn::InputTensors
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:392
armnn::Status::Failure
@ Failure
armnn::IDeviceSpec
Device specific knowledge to be passed to the optimizer.
Definition: Types.hpp:291
ARMNN_SCOPED_PROFILING_EVENT
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
armnn::MemorySource
MemorySource
Define the Memory Source to reduce copies.
Definition: Types.hpp:241
armnn::LoadedNetwork::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: LoadedNetwork.cpp:1949
armnn::LoadedNetwork::Execute
Status Execute(const InputTensors &inputTensors, const OutputTensors &outputTensors, IWorkingMemHandle &workingMemHandle, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
Thread safe execution of the loaded network.
Definition: LoadedNetwork.cpp:1727
armnn::LoadedNetwork::GetInputTensorInfo
TensorInfo GetInputTensorInfo(LayerBindingId layerId) const
Definition: LoadedNetwork.cpp:697
armnn::DeviceSpec::AddSupportedBackends
void AddSupportedBackends(const BackendIdSet &backendIds, bool isDynamic=false)
Definition: DeviceSpec.hpp:30
armnn::LoadedNetwork::EnqueueWorkload
Status EnqueueWorkload(const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Single thread execution of the loaded network.
Definition: LoadedNetwork.cpp:839
armnn::TensorInfo
Definition: Tensor.hpp:152
armnn::ProfilerManager::GetInstance
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:593
ProfilingOptionsConverter.hpp
ArmNNProfiling.hpp
armnn::IRuntime::EnqueueWorkload
Status EnqueueWorkload(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Evaluates a network using input in inputTensors and outputs filled into outputTensors.
Definition: Runtime.cpp:113
armnn::NetworkId
int NetworkId
Definition: IRuntime.hpp:35
armnn::BackendOptions::BackendOption
Definition: BackendOptions.hpp:215
armnn::RuntimeImpl::GetProfiler
const std::shared_ptr< IProfiler > GetProfiler(NetworkId networkId) const
Gets the profiler corresponding to the given network id.
Definition: Runtime.cpp:290
armnn::Status::Success
@ Success
armnn::EmptyOptional
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
armnn::Status
Status
Definition: Types.hpp:42
armnn::LoadedNetwork::ImportInputs
std::vector< ImportedInputId > ImportInputs(const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
Definition: LoadedNetwork.cpp:1418
armnn::RuntimeImpl::ClearImportedOutputs
void ClearImportedOutputs(NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
Definition: Runtime.cpp:639
armnn::Compute::Undefined
@ Undefined
armnn::IRuntime::GetDeviceSpec
const IDeviceSpec & GetDeviceSpec() const
Definition: Runtime.cpp:141
armnn::RuntimeImpl::GetInputTensorInfo
armnn::TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:613
BackendRegistry.hpp
armnn::RuntimeImpl::LoadNetwork
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition: Runtime.cpp:166
armnn::BackendIdSet
std::unordered_set< BackendId > BackendIdSet
Definition: BackendId.hpp:193
armnn::LoadedNetwork::ClearImportedOutputs
void ClearImportedOutputs(const std::vector< ImportedOutputId > outputIds)
Definition: LoadedNetwork.cpp:1706
armnn::RuntimeImpl::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource)
Definition: Runtime.cpp:629
armnn::IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled
bool m_TimelineEnabled
Indicates whether external timeline profiling is enabled or not.
Definition: IRuntime.hpp:148
armnn::BoostLogSeverityMapping::debug
@ debug
armnn::IRuntime::ImportInputs
std::vector< ImportedInputId > ImportInputs(NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
ImportInputs separates the importing and mapping of InputTensors from network execution.
Definition: Runtime.cpp:92
ArmNNProfilingServiceInitialiser.hpp
armnn::BackendCapability
BackendCapability
BackendCapability class.
Definition: Types.hpp:278
armnn::IRuntime::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource=MemorySource::Undefined)
ImportOutputs separates the importing and mapping of OutputTensors from network execution.
Definition: Runtime.cpp:98
ARMNN_ASSERT
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
armnn::IRuntime::pRuntimeImpl
std::unique_ptr< RuntimeImpl > pRuntimeImpl
Definition: IRuntime.hpp:302
armnn::DynamicBackendUtils::GetSharedObjects
static std::vector< std::string > GetSharedObjects(const std::vector< std::string > &backendPaths)
Definition: DynamicBackendUtils.cpp:178
armnn::IRuntime::ClearImportedInputs
void ClearImportedInputs(NetworkId networkId, const std::vector< ImportedInputId > inputIds)
Un-import and delete the imported InputTensor/s This function is not thread safe and must not be used...
Definition: Runtime.cpp:104
armnn::IRuntime
Definition: IRuntime.hpp:82
armnn::IRuntime::CreationOptions::m_MemoryOptimizerStrategyMap
std::map< BackendId, std::shared_ptr< IMemoryOptimizerStrategy > > m_MemoryOptimizerStrategyMap
A map to define a custom memory optimizer strategy for specific backend Ids.
Definition: IRuntime.hpp:130
armnn::OutputTensors
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:393
armnn::ArmNNProfilingServiceInitialiser
Definition: ArmNNProfilingServiceInitialiser.hpp:14
armnn::RuntimeImpl::Execute
Status Execute(IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs, std::vector< ImportedOutputId > preImportedOutputs)
This is an experimental function.
Definition: Runtime.cpp:696
Logging.hpp
arm::pipe::ConvertExternalProfilingOptions
ProfilingOptions ConvertExternalProfilingOptions(const armnn::IRuntime::CreationOptions::ExternalProfilingOptions &options)
Definition: ProfilingOptionsConverter.cpp:17
armnn::BackendUnavailableException
Class for non-fatal exceptions raised while initialising a backend.
Definition: Exceptions.hpp:68
armnn::IRuntime::CreationOptions::m_CustomAllocatorMap
std::map< BackendId, std::shared_ptr< ICustomAllocator > > m_CustomAllocatorMap
A map to define a custom memory allocator for specific backend Ids.
Definition: IRuntime.hpp:122
armnn::LoadedNetwork::RegisterDebugCallback
void RegisterDebugCallback(const DebugCallbackFunction &func)
Definition: LoadedNetwork.cpp:2197
armnn::DynamicBackendUtils::DeregisterDynamicBackends
static void DeregisterDynamicBackends(const BackendIdSet &dynamicBackends)
Definition: DynamicBackendUtils.cpp:320
armnn::IRuntime::Create
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:52
armnn::LoadedNetwork::IsAsyncEnabled
bool IsAsyncEnabled()
Definition: LoadedNetwork.hpp:95
armnn::IRuntime::Execute
Status Execute(IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs={}, std::vector< ImportedOutputId > preImportedOutputs={})
This is an experimental function.
Definition: Runtime.cpp:123
std
Definition: BackendId.hpp:149
armnn::RuntimeImpl::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: Runtime.cpp:735
Version.hpp
ARMNN_VERSION
#define ARMNN_VERSION
ARMNN_VERSION: "X.Y.Z" where: X = Major version number Y = Minor version number Z = Patch version num...
Definition: Version.hpp:22
armnn::MemorySource::DmaBufProtected
@ DmaBufProtected
armnn::IRuntime::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: Runtime.cpp:146
armnn::BoostLogSeverityMapping::error
@ error
armnn::DynamicBackendUtils::CreateDynamicBackends
static std::vector< DynamicBackendPtr > CreateDynamicBackends(const std::vector< std::string > &sharedObjects)
Definition: DynamicBackendUtils.cpp:269
armnn::IRuntimePtr
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:41
armnn::BackendOptions::Var
Very basic type safe variant.
Definition: BackendOptions.hpp:38
armnn::IRuntime::Destroy
static void Destroy(IRuntime *runtime)
Definition: Runtime.cpp:57
armnn::ProfilerManager::RegisterProfiler
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:600
armnn::RuntimeImpl::RuntimeImpl
RuntimeImpl(const IRuntime::CreationOptions &options)
Creates a runtime for workload execution.
Definition: Runtime.cpp:323
armnn::HasCapability
bool HasCapability(const std::string &name, const BackendCapabilities &capabilities)
Convenience function to check if a capability exists in a BackendCapabilites struct.
Definition: BackendHelper.cpp:65
armnn::BoostLogSeverityMapping::info
@ info
armnn::GetMemBlockStrategyTypeName
constexpr const char * GetMemBlockStrategyTypeName(MemBlockStrategyType memBlockStrategyType)
Definition: TypesUtils.hpp:282
armnn::RuntimeImpl::GetOutputTensorInfo
armnn::TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:618