ArmNN
 21.08
ClSoftmaxWorkload.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2020 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 
8 #include <armnn/Descriptors.hpp>
9 
10 #include <arm_compute/core/Error.h>
11 #include <arm_compute/runtime/MemoryManagerOnDemand.h>
12 #include <arm_compute/runtime/CL/functions/CLSoftmaxLayer.h>
13 
15 
16 namespace armnn
17 {
18 
19 arm_compute::Status ClSoftmaxWorkloadValidate(const TensorInfo& input,
20  const TensorInfo& output,
21  const SoftmaxDescriptor& descriptor);
22 
23 class ClSoftmaxWorkload : public BaseWorkload<SoftmaxQueueDescriptor>
24 {
25 public:
26  ClSoftmaxWorkload(const SoftmaxQueueDescriptor& descriptor,
27  const WorkloadInfo& info,
28  std::shared_ptr<arm_compute::MemoryManagerOnDemand>& memoryManager,
29  const arm_compute::CLCompileContext& clCompileContext);
30  void Execute() const override;
31 
32 private:
33  mutable arm_compute::CLSoftmaxLayer m_SoftmaxLayer;
34 };
35 
36 } // namespace armnn
void Execute() const override
Copyright (c) 2021 ARM Limited and Contributors.
arm_compute::Status ClSoftmaxWorkloadValidate(const TensorInfo &input, const TensorInfo &output, const SoftmaxDescriptor &descriptor)
Status
enumeration
Definition: Types.hpp:29
ClSoftmaxWorkload(const SoftmaxQueueDescriptor &descriptor, const WorkloadInfo &info, std::shared_ptr< arm_compute::MemoryManagerOnDemand > &memoryManager, const arm_compute::CLCompileContext &clCompileContext)
Contains information about TensorInfos of a layer.