ArmNN
 24.02
OutputHandler.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #pragma once
6 
9 
10 #include <armnn/Descriptors.hpp>
11 #include <armnn/INetwork.hpp>
12 #include <armnn/Tensor.hpp>
13 #include <armnn/Types.hpp>
14 
15 #include <memory>
16 #include <set>
17 #include <string>
18 #include <vector>
19 
20 namespace armnn
21 {
22 
23 class ITensorHandle;
24 class IWorkloadFactory;
25 class OutputSlot;
26 class WorkloadDataCollector;
27 
29 {
30 public:
31  /// @brief - Sets the TensorInfo used by this output handler.
32  /// @param tensorInfo - TensorInfo for the output.
33  void SetTensorInfo(const TensorInfo& tensorInfo);
34 
35  /// @brief - Creates tensor handles used by the intermediate tensors. Does not allocate memory.
36  /// @param factory - Factory to be used for handler creation.
37  void CreateTensorHandles(const IWorkloadFactory& factory, const bool IsMemoryManaged = true);
38  void CreateTensorHandles(const ITensorHandleFactory& factory, const bool IsMemoryManaged = true);
39 
40  /// @brief - Gets the matching TensorInfo for the output.
41  /// @return - References to the output TensorInfo.
42  const TensorInfo& GetTensorInfo() const { return m_TensorInfo; }
43 
44  /// @brief - Gets the allocated tensor memory.
45  /// @return - Pointer to the tensor memory.
46  ITensorHandle* GetData() const { return m_TensorHandle.get(); }
47 
48  /// Fill the outputs for a given queue descriptor.
49  void CollectWorkloadOutputs(WorkloadDataCollector& dataCollector) const;
50 
51  void SetData(std::unique_ptr<ITensorHandle> data) { m_TensorHandle = std::move(data); }
52 
53  void SetAllocatedData();
54 
55  void UseAllocatedData() { m_TensorHandle = m_AllocatedTensorHandle; }
56 
57  /// @brief Returns true if SetTensorInfo() has been called at least once on this.
58  bool IsTensorInfoSet() const { return m_bTensorInfoSet; }
59 private:
60  std::shared_ptr<ITensorHandle> m_TensorHandle;
61  std::shared_ptr<ITensorHandle> m_AllocatedTensorHandle;
62  TensorInfo m_TensorInfo;
63  bool m_bTensorInfoSet = false;
64 };
65 
66 } //namespace armnn
Descriptors.hpp
armnn::TensorInfo
Definition: Tensor.hpp:152
armnn::OutputHandler::SetTensorInfo
void SetTensorInfo(const TensorInfo &tensorInfo)
Sets the TensorInfo used by this output handler.
Definition: OutputHandler.cpp:15
ITensorHandleFactory.hpp
armnn::ITensorHandle
Definition: ITensorHandle.hpp:16
armnn::OutputHandler::CreateTensorHandles
void CreateTensorHandles(const IWorkloadFactory &factory, const bool IsMemoryManaged=true)
Creates tensor handles used by the intermediate tensors.
Definition: OutputHandler.cpp:21
armnn::OutputHandler::GetData
ITensorHandle * GetData() const
Gets the allocated tensor memory.
Definition: OutputHandler.hpp:46
INetwork.hpp
ITensorHandle.hpp
armnn::OutputHandler::IsTensorInfoSet
bool IsTensorInfoSet() const
Returns true if SetTensorInfo() has been called at least once on this.
Definition: OutputHandler.hpp:58
armnn::WorkloadDataCollector
Definition: WorkloadDataCollector.hpp:15
armnn::IWorkloadFactory
Definition: WorkloadFactory.hpp:22
armnn::ITensorHandleFactory
Definition: ITensorHandleFactory.hpp:46
armnn::OutputHandler::SetData
void SetData(std::unique_ptr< ITensorHandle > data)
Definition: OutputHandler.hpp:51
armnn::OutputHandler
Definition: OutputHandler.hpp:28
Tensor.hpp
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition: 01_00_quick_start.dox:6
Types.hpp
armnn::OutputHandler::GetTensorInfo
const TensorInfo & GetTensorInfo() const
Gets the matching TensorInfo for the output.
Definition: OutputHandler.hpp:42
armnn::OutputHandler::SetAllocatedData
void SetAllocatedData()
Definition: OutputHandler.cpp:38
armnn::OutputHandler::CollectWorkloadOutputs
void CollectWorkloadOutputs(WorkloadDataCollector &dataCollector) const
Fill the outputs for a given queue descriptor.
Definition: OutputHandler.cpp:33
armnn::OutputHandler::UseAllocatedData
void UseAllocatedData()
Definition: OutputHandler.hpp:55