From 6f92c8e9f8bb38dcf5dccf8deeff5112ecd8e37c Mon Sep 17 00:00:00 2001 From: Nikhil Raj Date: Wed, 22 Nov 2023 11:41:15 +0000 Subject: Update Doxygen for 23.11 Signed-off-by: Nikhil Raj Change-Id: I47cd933f5002cb94a73aa97689d7b3d9c93cb849 --- 23.11/_log_softmax_layer_8cpp_source.html | 205 ++++++++++++++++++++++++++++++ 1 file changed, 205 insertions(+) create mode 100644 23.11/_log_softmax_layer_8cpp_source.html (limited to '23.11/_log_softmax_layer_8cpp_source.html') diff --git a/23.11/_log_softmax_layer_8cpp_source.html b/23.11/_log_softmax_layer_8cpp_source.html new file mode 100644 index 0000000000..eff5299b10 --- /dev/null +++ b/23.11/_log_softmax_layer_8cpp_source.html @@ -0,0 +1,205 @@ + + + + + + + + +Arm NN: src/armnn/layers/LogSoftmaxLayer.cpp Source File + + + + + + + + + + + + + + + + +
+
+ + + + ArmNN + + + +
+
+  23.11 +
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
LogSoftmaxLayer.cpp
+
+
+Go to the documentation of this file.
1 //
+
2 // Copyright © 2019-2023 Arm Ltd and Contributors. All rights reserved.
+
3 // SPDX-License-Identifier: MIT
+
4 //
+
5 
+
6 #include "LogSoftmaxLayer.hpp"
+
7 
+
8 #include "LayerCloneBase.hpp"
+
9 
+
10 #include <armnn/TypesUtils.hpp>
+
11 
+ + +
14 
+
15 namespace armnn
+
16 {
+
17 
+ +
19  : LayerWithParameters(1, 1, LayerType::LogSoftmax, param, name) {}
+
20 
+
21 std::unique_ptr<IWorkload> LogSoftmaxLayer::CreateWorkload(const IWorkloadFactory& factory) const
+
22 {
+
23  LogSoftmaxQueueDescriptor descriptor;
+
24  SetAdditionalInfo(descriptor);
+
25 
+
26  return factory.CreateWorkload(LayerType::LogSoftmax, descriptor, PrepInfoAndDesc(descriptor));
+
27 }
+
28 
+ +
30 {
+
31  return CloneBase<LogSoftmaxLayer>(graph, m_Param, GetName());
+
32 }
+
33 
+ +
35 {
+ +
37 
+
38  const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
+
39 
+ +
41 
+
42  auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetTensorInfo().GetShape() });
+
43  ARMNN_ASSERT(inferredShapes.size() == 1);
+
44 
+
45  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "LogSoftmaxLayer");
+
46 }
+
47 
+ +
49 {
+
50  strategy.ExecuteStrategy(this, GetParameters(), {}, GetName());
+
51 }
+
52 
+
53 } // namespace armnn
+
+
+
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
+
void ExecuteStrategy(IStrategy &strategy) const override
Apply a visitor to this layer.
+
LogSoftmaxLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
+
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:92
+ +
This layer represents a log softmax operation.
+ +
#define CHECK_LOCATION()
Definition: Exceptions.hpp:203
+
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
Definition: Layer.cpp:435
+
LogSoftmaxLayer(const LogSoftmaxDescriptor &param, const char *name)
Constructor to create a LogSoftmaxLayer.
+ +
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:339
+
void LogSoftmax(Decoder< float > &input, Encoder< float > &output, const TensorInfo &inputInfo, const LogSoftmaxDescriptor &descriptor)
Definition: LogSoftmax.cpp:29
+ +
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:337
+
const LogSoftmaxDescriptor & GetParameters() const override
+ + +
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:332
+
const TensorInfo & GetTensorInfo() const override
Gets the TensorInfo for this InputSlot.
Definition: Layer.cpp:592
+ +
LogSoftmaxDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
+
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *Layer::CreateWorkload.
+
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of LogSoftmaxLayer.
+ +
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Definition: Layer.cpp:504
+
void SetAdditionalInfo(QueueDescriptor &descriptor) const
Definition: Layer.cpp:287
+
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the LogSoftmax type.
+ + +
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
+
Copyright (c) 2021 ARM Limited and Contributors.
+
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
Infer the shape of the output(s) based on the provided input shape(s)
Definition: Layer.cpp:410
+
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
Definition: Layer.cpp:391
+
ShapeInferenceMethod m_ShapeInferenceMethod
Definition: Layer.hpp:441
+
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
Definition: Types.hpp:491
+ +
virtual std::unique_ptr< IWorkload > CreateWorkload(LayerType type, const QueueDescriptor &descriptor, const WorkloadInfo &info) const =0
Backends should implement their own CreateWorkload function with a switch statement.
+
virtual void ExecuteStrategy(const IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0)=0
+
A SoftmaxDescriptor for the SoftmaxLayer.
+ + + + + -- cgit v1.2.1