ArmNN
 24.02
Runtime.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017, 2023 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #pragma once
6 
7 #include "LoadedNetwork.hpp"
8 #include "DeviceSpec.hpp"
9 
10 #include <armnn/INetwork.hpp>
11 #include <armnn/IRuntime.hpp>
12 #include <armnn/Tensor.hpp>
13 #include <armnn/BackendId.hpp>
14 
16 
17 #include <client/include/IInitialiseProfilingService.hpp>
18 #include <client/include/IProfilingService.hpp>
19 #include <client/include/IReportStructure.hpp>
20 
21 #include <mutex>
22 #include <unordered_map>
23 
24 namespace armnn
25 {
26 using LoadedNetworks = std::unordered_map<NetworkId, std::unique_ptr<LoadedNetwork>>;
29 
31 {
32 public:
33  /// Loads a complete network into the Runtime.
34  /// @param [out] networkIdOut - Unique identifier for the network is returned in this reference.
35  /// @param [in] network - Complete network to load into the Runtime.
36  /// The runtime takes ownership of the network once passed in.
37  /// @return armnn::Status
38  Status LoadNetwork(NetworkId& networkIdOut, IOptimizedNetworkPtr network);
39 
40  /// Load a complete network into the IRuntime.
41  /// @param [out] networkIdOut Unique identifier for the network is returned in this reference.
42  /// @param [in] network Complete network to load into the IRuntime.
43  /// @param [out] errorMessage Error message if there were any errors.
44  /// The runtime takes ownership of the network once passed in.
45  /// @return armnn::Status
46  Status LoadNetwork(NetworkId& networkIdOut,
47  IOptimizedNetworkPtr network,
48  std::string& errorMessage);
49 
50  Status LoadNetwork(NetworkId& networkIdOut,
51  IOptimizedNetworkPtr network,
52  std::string& errorMessage,
53  const INetworkProperties& networkProperties);
54 
57 
58  std::vector<ImportedInputId> ImportInputs(NetworkId networkId, const InputTensors& inputTensors,
59  MemorySource forceImportMemorySource);
60  std::vector<ImportedOutputId> ImportOutputs(NetworkId networkId, const OutputTensors& outputTensors,
61  MemorySource forceImportMemorySource);
62 
63  void ClearImportedInputs(NetworkId networkId, const std::vector<ImportedInputId> inputIds);
64  void ClearImportedOutputs(NetworkId networkId, const std::vector<ImportedOutputId> outputIds);
65 
66  // Evaluates network using input in inputTensors, outputs filled into outputTensors.
68  const InputTensors& inputTensors,
69  const OutputTensors& outputTensors,
70  std::vector<ImportedInputId> preImportedInputIds = {},
71  std::vector<ImportedOutputId> preImportedOutputIds = {});
72 
73  /// This is an experimental function.
74  /// Evaluates a network using input in inputTensors and outputs filled into outputTensors.
75  /// This function performs a thread safe execution of the network. Returns once execution is complete.
76  /// Will block until this and any other thread using the same workingMem object completes.
77  Status Execute(IWorkingMemHandle& workingMemHandle,
78  const InputTensors& inputTensors,
79  const OutputTensors& outputTensors,
80  std::vector<ImportedInputId> preImportedInputs,
81  std::vector<ImportedOutputId> preImportedOutputs);
82 
83  /// Unloads a network from the Runtime.
84  /// At the moment this only removes the network from the m_Impl->m_Network.
85  /// This might need more work in the future to be AndroidNN compliant.
86  /// @param [in] networkId Unique identifier for the network to be unloaded. Generated in LoadNetwork().
87  /// @return armnn::Status
88  Status UnloadNetwork(NetworkId networkId);
89 
90  const IDeviceSpec& GetDeviceSpec() const { return m_DeviceSpec; }
91 
92  /// Gets the profiler corresponding to the given network id.
93  /// @param networkId The id of the network for which to get the profile.
94  /// @return A pointer to the requested profiler, or nullptr if not found.
95  const std::shared_ptr<IProfiler> GetProfiler(NetworkId networkId) const;
96 
97  /// Create a new unique WorkingMemHandle object. Create multiple handles if you wish to have
98  /// overlapped Execution by calling this function from different threads.
99  std::unique_ptr<IWorkingMemHandle> CreateWorkingMemHandle(NetworkId networkId);
100 
101  /// Registers a callback function to debug layers performing custom computations on intermediate tensors.
102  /// @param networkId The id of the network to register the callback.
103  /// @param func callback function to pass to the debug layer.
104  void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction& func);
105 
106  /// Creates a runtime for workload execution.
107  RuntimeImpl(const IRuntime::CreationOptions& options);
108 
109  ~RuntimeImpl();
110 
111  //NOTE: we won't need the profiling service reference but it is good to pass the service
112  // in this way to facilitate other implementations down the road
113  void ReportStructure(arm::pipe::IProfilingService& profilingService) override;
114 
115  void InitialiseProfilingService(arm::pipe::IProfilingService& profilingService) override;
116 
117 private:
118  friend void RuntimeLoadedNetworksReserve(RuntimeImpl* runtime); // See RuntimeTests.cpp
119 
120  friend arm::pipe::IProfilingService& GetProfilingService(RuntimeImpl* runtime); // See RuntimeTests.cpp
121 
122  int GenerateNetworkId();
123 
124  LoadedNetwork* GetLoadedNetworkPtr(NetworkId networkId) const;
125 
126  template<typename Func>
127  void LoadedNetworkFuncSafe(NetworkId networkId, Func f)
128  {
129 #if !defined(ARMNN_DISABLE_THREADS)
130  std::lock_guard<std::mutex> lockGuard(m_Mutex);
131 #endif
132  auto iter = m_LoadedNetworks.find(networkId);
133  if (iter != m_LoadedNetworks.end())
134  {
135  f(iter->second.get());
136  }
137  }
138 
139  /// Loads any available/compatible dynamic backend in the runtime.
140  void LoadDynamicBackends(const std::string& overrideBackendPath);
141 
142 #if !defined(ARMNN_DISABLE_THREADS)
143  mutable std::mutex m_Mutex;
144 #endif
145 
146  /// Map of Loaded Networks with associated GUID as key
147  LoadedNetworks m_LoadedNetworks;
148 
149  std::unordered_map<BackendId, IBackendInternal::IBackendContextPtr> m_BackendContexts;
150 
151  int m_NetworkIdCounter;
152 
153  DeviceSpec m_DeviceSpec;
154 
155  /// List of dynamic backends loaded in the runtime
156  std::vector<DynamicBackendPtr> m_DynamicBackends;
157 
158  /// Profiling Service Instance
159  std::unique_ptr<arm::pipe::IProfilingService> m_ProfilingService;
160 
161  /// Keep track of backend ids of the custom allocators that this instance of the runtime added. The
162  /// destructor can then clean up for this runtime.
163  std::set<BackendId> m_AllocatorsAddedByThisRuntime;
164 };
165 
166 } // namespace armnn
armnn::RuntimeImpl::ReportStructure
void ReportStructure(arm::pipe::IProfilingService &profilingService) override
Definition: Runtime.cpp:302
armnn::IOptimizedNetworkPtr
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:340
IRuntime.hpp
armnn::RuntimeImpl::ClearImportedOutputs
void ClearImportedOutputs(NetworkId networkId, const std::vector< ImportedOutputId > outputIds)
Definition: Runtime.cpp:639
armnn::InputTensors
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:394
DeviceSpec.hpp
LoadedNetwork.hpp
armnn::TensorInfo
Definition: Tensor.hpp:152
BackendId.hpp
armnn::LoadedNetwork
Definition: LoadedNetwork.hpp:42
armnn::RuntimeImpl::LoadNetwork
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the Runtime.
Definition: Runtime.cpp:166
armnn::RuntimeImpl::GetProfiler
const std::shared_ptr< IProfiler > GetProfiler(NetworkId networkId) const
Gets the profiler corresponding to the given network id.
Definition: Runtime.cpp:290
armnn::RuntimeImpl::ClearImportedInputs
void ClearImportedInputs(NetworkId networkId, const std::vector< ImportedInputId > inputIds)
Definition: Runtime.cpp:635
armnn::RuntimeImpl::~RuntimeImpl
~RuntimeImpl()
Definition: Runtime.cpp:548
armnn::RuntimeImpl::GetDeviceSpec
const IDeviceSpec & GetDeviceSpec() const
Definition: Runtime.hpp:90
armnn::OutputTensors
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:395
armnn::experimental::IWorkingMemHandle
Definition: IWorkingMemHandle.hpp:20
armnn::RuntimeImpl::ImportInputs
std::vector< ImportedInputId > ImportInputs(NetworkId networkId, const InputTensors &inputTensors, MemorySource forceImportMemorySource)
Definition: Runtime.cpp:623
armnn::RuntimeImpl::UnloadNetwork
Status UnloadNetwork(NetworkId networkId)
Unloads a network from the Runtime.
Definition: Runtime.cpp:232
armnn::IReportStructure
arm::pipe::IReportStructure IReportStructure
Definition: Runtime.hpp:27
armnn::RuntimeImpl::EnqueueWorkload
Status EnqueueWorkload(NetworkId networkId, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputIds={}, std::vector< ImportedOutputId > preImportedOutputIds={})
Definition: Runtime.cpp:644
armnn::RuntimeImpl::ImportOutputs
std::vector< ImportedOutputId > ImportOutputs(NetworkId networkId, const OutputTensors &outputTensors, MemorySource forceImportMemorySource)
Definition: Runtime.cpp:629
armnn::RuntimeImpl::GetProfilingService
friend arm::pipe::IProfilingService & GetProfilingService(RuntimeImpl *runtime)
Definition: TestUtils.cpp:59
armnn::NetworkId
int NetworkId
Definition: IRuntime.hpp:35
INetwork.hpp
armnn::IInitialiseProfilingService
arm::pipe::IInitialiseProfilingService IInitialiseProfilingService
Definition: Runtime.hpp:28
armnn::LoadedNetworks
std::unordered_map< NetworkId, std::unique_ptr< LoadedNetwork > > LoadedNetworks
Definition: Runtime.hpp:26
armnn::LayerBindingId
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:309
armnn::RuntimeImpl::RuntimeImpl
RuntimeImpl(const IRuntime::CreationOptions &options)
Creates a runtime for workload execution.
Definition: Runtime.cpp:323
DynamicBackend.hpp
armnn::INetworkProperties
Definition: IRuntime.hpp:43
armnn::RuntimeImpl::RuntimeLoadedNetworksReserve
friend void RuntimeLoadedNetworksReserve(RuntimeImpl *runtime)
armnn::RuntimeImpl::RegisterDebugCallback
void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction &func)
Registers a callback function to debug layers performing custom computations on intermediate tensors.
Definition: Runtime.cpp:766
Tensor.hpp
armnn::Status
Status
Definition: Types.hpp:42
armnn::IRuntime::CreationOptions
Definition: IRuntime.hpp:78
armnn::DebugCallbackFunction
std::function< void(LayerGuid guid, unsigned int slotIndex, ITensorHandle *tensorHandle)> DebugCallbackFunction
Define the type of callback for the Debug layer to call.
Definition: Types.hpp:398
armnn::MemorySource
MemorySource
Define the Memory Source to reduce copies.
Definition: Types.hpp:244
armnn::RuntimeImpl::Execute
Status Execute(IWorkingMemHandle &workingMemHandle, const InputTensors &inputTensors, const OutputTensors &outputTensors, std::vector< ImportedInputId > preImportedInputs, std::vector< ImportedOutputId > preImportedOutputs)
This is an experimental function.
Definition: Runtime.cpp:696
armnn::RuntimeImpl
Definition: Runtime.hpp:30
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition: 01_00_quick_start.dox:6
armnn::RuntimeImpl::InitialiseProfilingService
void InitialiseProfilingService(arm::pipe::IProfilingService &profilingService) override
Definition: Runtime.cpp:317
armnn::IDeviceSpec
Device specific knowledge to be passed to the optimizer.
Definition: Types.hpp:299
armnn::RuntimeImpl::CreateWorkingMemHandle
std::unique_ptr< IWorkingMemHandle > CreateWorkingMemHandle(NetworkId networkId)
Create a new unique WorkingMemHandle object.
Definition: Runtime.cpp:735
armnn::RuntimeImpl::GetInputTensorInfo
armnn::TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:613
armnn::RuntimeImpl::GetOutputTensorInfo
armnn::TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const
Definition: Runtime.cpp:618