ArmNN
 24.02
ClWorkloadFactory.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #pragma once
6 
7 #include <armnn/IRuntime.hpp>
8 #include <armnn/Optional.hpp>
9 
11 
14 
15 #include <arm_compute/core/CL/CLCompileContext.h>
16 
17 namespace armnn
18 {
19 
20 // ARM Compute OpenCL workload factory.
22 {
23 public:
24  ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager);
25 
26  ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager,
28 
29  void AfterWorkloadsCreated() override;
30 
31  const BackendId& GetBackendId() const override;
32 
33  static bool IsLayerSupported(const Layer& layer,
34  Optional<DataType> dataType,
35  std::string& outReasonIfUnsupported);
36 
37  static bool IsLayerSupported(const IConnectableLayer& layer,
38  Optional<DataType> dataType,
39  std::string& outReasonIfUnsupported,
40  const ModelOptions& modelOptions);
41 
42  bool SupportsSubTensors() const override { return true; }
43 
44  ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateSubTensorHandle instead")
46  TensorShape const& subTensorShape,
47  unsigned int const* subTensorOrigin) const override;
48 
50  std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo,
51  const bool IsMemoryManaged = true) const override;
52 
54  std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo,
55  DataLayout dataLayout,
56  const bool IsMemoryManaged = true) const override;
57  std::unique_ptr<IWorkload> CreateWorkload(LayerType type,
58  const QueueDescriptor& descriptor,
59  const WorkloadInfo& info) const override;
60 
61 private:
62  template<typename FloatWorkload, typename Uint8Workload, typename QueueDescriptorType, typename... Args>
63  static std::unique_ptr<IWorkload> MakeWorkload(const QueueDescriptorType& descriptor,
64  const WorkloadInfo& info,
65  Args&&... args);
66 
67  template <typename Workload, typename QueueDescriptorType, typename... Args>
68  static std::unique_ptr<IWorkload> MakeWorkload(const QueueDescriptorType& descriptor,
69  const WorkloadInfo& info,
70  Args&&... args);
71 
72  void InitializeCLCompileContext();
73 
74  mutable std::shared_ptr<ClMemoryManager> m_MemoryManager;
75  const IBackendInternal::IBackendSpecificModelContextPtr m_ModelContextPtr;
76  arm_compute::CLCompileContext m_CLCompileContext;
77 };
78 
79 } // namespace armnn
IRuntime.hpp
armnn::Optional
Definition: Optional.hpp:270
armnn::ClWorkloadFactory::GetBackendId
const BackendId & GetBackendId() const override
Definition: ClWorkloadFactory.cpp:61
WorkloadFactoryBase.hpp
armnn::WorkloadFactoryBase
Definition: WorkloadFactoryBase.hpp:13
armnn::DataLayout
DataLayout
Definition: Types.hpp:62
armnn::TensorInfo
Definition: Tensor.hpp:152
armnn::IWorkload
Workload interface to enqueue a layer computation.
Definition: IWorkload.hpp:23
armnn::ClWorkloadFactory
Definition: ClWorkloadFactory.hpp:21
armnn::ClWorkloadFactory::CreateWorkload
std::unique_ptr< IWorkload > CreateWorkload(LayerType type, const QueueDescriptor &descriptor, const WorkloadInfo &info) const override
Backends should implement their own CreateWorkload function with a switch statement.
Definition: ClWorkloadFactory.cpp:247
BaseMemoryManager.hpp
armnn::ITensorHandle
Definition: ITensorHandle.hpp:16
armnn::IBackendInternal
Definition: IBackendInternal.hpp:77
armnn::TypedWorkload
Definition: Workload.hpp:101
armnn::Layer
Definition: Layer.hpp:230
armnn::TensorShape
Definition: Tensor.hpp:20
Optional.hpp
armnn::WorkloadInfo
Contains information about TensorInfos of a layer.
Definition: WorkloadInfo.hpp:16
IBackendInternal.hpp
armnn::QueueDescriptor
Definition: WorkloadData.hpp:24
armnn::ClMemoryManager
Definition: BaseMemoryManager.hpp:79
armnn::ITensorHandleFactory
Definition: ITensorHandleFactory.hpp:46
armnn::ClWorkloadFactory::CreateTensorHandle
std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo, const bool IsMemoryManaged=true) const override
Definition: ClWorkloadFactory.cpp:201
armnn::ClWorkloadFactory::IsLayerSupported
static bool IsLayerSupported(const Layer &layer, Optional< DataType > dataType, std::string &outReasonIfUnsupported)
Definition: ClWorkloadFactory.cpp:46
armnn::ClWorkloadFactory::AfterWorkloadsCreated
void AfterWorkloadsCreated() override
Definition: ClWorkloadFactory.cpp:66
ARMNN_DEPRECATED_MSG
#define ARMNN_DEPRECATED_MSG(message)
Definition: Deprecated.hpp:43
std
Definition: BackendId.hpp:149
armnn::ClWorkloadFactory::SupportsSubTensors
bool SupportsSubTensors() const override
Definition: ClWorkloadFactory.hpp:42
armnn::BackendId
Definition: BackendId.hpp:75
armnn::ClWorkloadFactory::CreateSubTensorHandle
std::unique_ptr< ITensorHandle > CreateSubTensorHandle(ITensorHandle &parent, TensorShape const &subTensorShape, unsigned int const *subTensorOrigin) const override
Definition: ClWorkloadFactory.cpp:222
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition: 01_00_quick_start.dox:6
armnn::IConnectableLayer
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:80
armnn::ModelOptions
std::vector< BackendOptions > ModelOptions
Definition: BackendOptions.hpp:18
armnn::ClWorkloadFactory::ClWorkloadFactory
ClWorkloadFactory(const std::shared_ptr< ClMemoryManager > &memoryManager)
Definition: ClWorkloadFactory.cpp:188
armnn::LayerType
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
Definition: Types.hpp:491
armnn::IBackendInternal::IBackendSpecificModelContextPtr
std::shared_ptr< IBackendModelContext > IBackendSpecificModelContextPtr
Definition: IBackendInternal.hpp:96