diff options
Diffstat (limited to 'src/armnn/layers')
-rw-r--r-- | src/armnn/layers/LogSoftmaxLayer.cpp | 50 | ||||
-rw-r--r-- | src/armnn/layers/LogSoftmaxLayer.hpp | 44 |
2 files changed, 94 insertions, 0 deletions
diff --git a/src/armnn/layers/LogSoftmaxLayer.cpp b/src/armnn/layers/LogSoftmaxLayer.cpp new file mode 100644 index 0000000000..6ca15b2d6f --- /dev/null +++ b/src/armnn/layers/LogSoftmaxLayer.cpp @@ -0,0 +1,50 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "LogSoftmaxLayer.hpp" + +#include "LayerCloneBase.hpp" + +#include <armnn/TypesUtils.hpp> + +#include <backendsCommon/WorkloadData.hpp> +#include <backendsCommon/WorkloadFactory.hpp> + +namespace armnn +{ + +LogSoftmaxLayer::LogSoftmaxLayer(const LogSoftmaxDescriptor ¶m, const char* name) + : LayerWithParameters(1, 1, LayerType::LogSoftmax, param, name) {} + +std::unique_ptr<IWorkload> LogSoftmaxLayer::CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const +{ + LogSoftmaxQueueDescriptor descriptor; + return factory.CreateLogSoftmax(descriptor, PrepInfoAndDesc(descriptor, graph)); +} + +LogSoftmaxLayer* LogSoftmaxLayer::Clone(Graph& graph) const +{ + return CloneBase<LogSoftmaxLayer>(graph, m_Param, GetName()); +} + +void LogSoftmaxLayer::ValidateTensorShapesFromInputs() +{ + VerifyLayerConnections(1, CHECK_LOCATION()); + + auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() }); + BOOST_ASSERT(inferredShapes.size() == 1); + + ConditionalThrowIfNotEqual<LayerValidationException>( + "LogSoftmaxLayer: TensorShape set on OutputSlot[0] does not match the inferred shape.", + GetOutputSlot(0).GetTensorInfo().GetShape(), + inferredShapes[0]); +} + +void LogSoftmaxLayer::Accept(ILayerVisitor& visitor) const +{ + visitor.VisitLogSoftmaxLayer(this, GetParameters(), GetName()); +} + +} // namespace armnn diff --git a/src/armnn/layers/LogSoftmaxLayer.hpp b/src/armnn/layers/LogSoftmaxLayer.hpp new file mode 100644 index 0000000000..13da542139 --- /dev/null +++ b/src/armnn/layers/LogSoftmaxLayer.hpp @@ -0,0 +1,44 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#pragma once + +#include "LayerWithParameters.hpp" + +namespace armnn +{ + +/// This layer represents a log softmax operation. +class LogSoftmaxLayer : public LayerWithParameters<LogSoftmaxDescriptor> +{ +public: + /// Makes a workload for the LogSoftmax type. + /// @param [in] graph The graph where this layer can be found. + /// @param [in] factory The workload factory which will create the workload. + /// @return A pointer to the created workload, or nullptr if not created. + virtual std::unique_ptr<IWorkload> CreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const override; + + /// Creates a dynamically-allocated copy of this layer. + /// @param [in] graph The graph into which this layer is being cloned. + LogSoftmaxLayer* Clone(Graph& graph) const override; + + /// Check if the input tensor shape(s) + /// will lead to a valid configuration of @ref LogSoftmaxLayer. + void ValidateTensorShapesFromInputs() override; + + void Accept(ILayerVisitor& visitor) const override; + +protected: + /// Constructor to create a LogSoftmaxLayer. + /// @param [in] param LogSoftmaxDescriptor to configure the log softmax operation. + /// @param [in] name Optional name for the layer. + LogSoftmaxLayer(const LogSoftmaxDescriptor& param, const char* name); + + /// Default destructor + ~LogSoftmaxLayer() = default; +}; + +} // namespace |