From 6940dd720ebb6b3d1df8ca203ab696daefe58189 Mon Sep 17 00:00:00 2001 From: Jim Flynn Date: Fri, 20 Mar 2020 12:25:56 +0000 Subject: renamed Documentation folder 20.02 and added .nojekyll file Signed-off-by: Jim Flynn --- 20.02/_output_handler_8hpp_source.xhtml | 133 ++++++++++++++++++++++++++++++++ 1 file changed, 133 insertions(+) create mode 100644 20.02/_output_handler_8hpp_source.xhtml (limited to '20.02/_output_handler_8hpp_source.xhtml') diff --git a/20.02/_output_handler_8hpp_source.xhtml b/20.02/_output_handler_8hpp_source.xhtml new file mode 100644 index 0000000000..684cca9fb9 --- /dev/null +++ b/20.02/_output_handler_8hpp_source.xhtml @@ -0,0 +1,133 @@ + + + + + + + + + + + + + +ArmNN: src/armnn/OutputHandler.hpp Source File + + + + + + + + + + + + + + + + +
+
+ + + + ArmNN + + + +
+
+  20.02 +
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OutputHandler.hpp
+
+
+Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #pragma once
6 
9 
10 #include <armnn/Descriptors.hpp>
11 #include <armnn/INetwork.hpp>
12 #include <armnn/Tensor.hpp>
13 #include <armnn/Types.hpp>
14 
15 #include <memory>
16 #include <set>
17 #include <string>
18 #include <vector>
19 
20 #include <boost/assert.hpp>
21 
22 namespace armnn
23 {
24 
25 class ITensorHandle;
26 class IWorkloadFactory;
27 class OutputSlot;
28 class WorkloadDataCollector;
29 
31 {
32 public:
33  /// @brief - Sets the TensorInfo used by this output handler.
34  /// @param tensorInfo - TensorInfo for the output.
35  void SetTensorInfo(const TensorInfo& tensorInfo);
36 
37  /// @brief - Creates tensor handles used by the intermediate tensors. Does not allocate memory.
38  /// @param factory - Factory to be used for handler creation.
39  void CreateTensorHandles(const IWorkloadFactory& factory, const bool IsMemoryManaged = true);
40  void CreateTensorHandles(const ITensorHandleFactory& factory, const bool IsMemoryManaged = true);
41 
42  /// @brief - Gets the matching TensorInfo for the output.
43  /// @return - References to the output TensorInfo.
44  const TensorInfo& GetTensorInfo() const { return m_TensorInfo; }
45 
46  /// @brief - Gets the allocated tensor memory.
47  /// @return - Pointer to the tensor memory.
48  ITensorHandle* GetData() const { return m_TensorHandle.get(); }
49 
50  /// Fill the outputs for a given queue descriptor.
51  void CollectWorkloadOutputs(WorkloadDataCollector& dataCollector) const;
52 
53  void SetData(std::unique_ptr<ITensorHandle> data) { m_TensorHandle = std::move(data); }
54 
55  /// @brief Returns true if SetTensorInfo() has been called at least once on this.
56  bool IsTensorInfoSet() const { return m_bTensorInfoSet; }
57 private:
58  std::unique_ptr<ITensorHandle> m_TensorHandle;
59  TensorInfo m_TensorInfo;
60  bool m_bTensorInfoSet = false;
61 };
62 
63 } //namespace armnn
+
void SetData(std::unique_ptr< ITensorHandle > data)
+ + +
Copyright (c) 2020 ARM Limited.
+ +
void CreateTensorHandles(const IWorkloadFactory &factory, const bool IsMemoryManaged=true)
Creates tensor handles used by the intermediate tensors.
+
void CollectWorkloadOutputs(WorkloadDataCollector &dataCollector) const
Fill the outputs for a given queue descriptor.
+ + + +
ITensorHandle * GetData() const
Gets the allocated tensor memory.
+
bool IsTensorInfoSet() const
Returns true if SetTensorInfo() has been called at least once on this.
+ +
void SetTensorInfo(const TensorInfo &tensorInfo)
Sets the TensorInfo used by this output handler.
+ + + + +
const TensorInfo & GetTensorInfo() const
Gets the matching TensorInfo for the output.
+
+
+ + + + -- cgit v1.2.1