From f982deaefbe5fe5814487b27f7099829839b8666 Mon Sep 17 00:00:00 2001 From: Aron Virginas-Tar Date: Fri, 11 Oct 2019 14:07:53 +0100 Subject: IVGCVSW-3973 Add frontend for LOG_SOFTMAX Signed-off-by: Aron Virginas-Tar Change-Id: Ic6acc7176deea3753b32ce6340f642d19dce0e9f --- src/armnn/layers/LogSoftmaxLayer.cpp | 50 ++++++++++++++++++++++++++++++++++++ src/armnn/layers/LogSoftmaxLayer.hpp | 44 +++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+) create mode 100644 src/armnn/layers/LogSoftmaxLayer.cpp create mode 100644 src/armnn/layers/LogSoftmaxLayer.hpp (limited to 'src/armnn/layers') diff --git a/src/armnn/layers/LogSoftmaxLayer.cpp b/src/armnn/layers/LogSoftmaxLayer.cpp new file mode 100644 index 0000000000..6ca15b2d6f --- /dev/null +++ b/src/armnn/layers/LogSoftmaxLayer.cpp @@ -0,0 +1,50 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "LogSoftmaxLayer.hpp" + +#include "LayerCloneBase.hpp" + +#include + +#include +#include + +namespace armnn +{ + +LogSoftmaxLayer::LogSoftmaxLayer(const LogSoftmaxDescriptor ¶m, const char* name) + : LayerWithParameters(1, 1, LayerType::LogSoftmax, param, name) {} + +std::unique_ptr LogSoftmaxLayer::CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const +{ + LogSoftmaxQueueDescriptor descriptor; + return factory.CreateLogSoftmax(descriptor, PrepInfoAndDesc(descriptor, graph)); +} + +LogSoftmaxLayer* LogSoftmaxLayer::Clone(Graph& graph) const +{ + return CloneBase(graph, m_Param, GetName()); +} + +void LogSoftmaxLayer::ValidateTensorShapesFromInputs() +{ + VerifyLayerConnections(1, CHECK_LOCATION()); + + auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() }); + BOOST_ASSERT(inferredShapes.size() == 1); + + ConditionalThrowIfNotEqual( + "LogSoftmaxLayer: TensorShape set on OutputSlot[0] does not match the inferred shape.", + GetOutputSlot(0).GetTensorInfo().GetShape(), + inferredShapes[0]); +} + +void LogSoftmaxLayer::Accept(ILayerVisitor& visitor) const +{ + visitor.VisitLogSoftmaxLayer(this, GetParameters(), GetName()); +} + +} // namespace armnn diff --git a/src/armnn/layers/LogSoftmaxLayer.hpp b/src/armnn/layers/LogSoftmaxLayer.hpp new file mode 100644 index 0000000000..13da542139 --- /dev/null +++ b/src/armnn/layers/LogSoftmaxLayer.hpp @@ -0,0 +1,44 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#pragma once + +#include "LayerWithParameters.hpp" + +namespace armnn +{ + +/// This layer represents a log softmax operation. +class LogSoftmaxLayer : public LayerWithParameters +{ +public: + /// Makes a workload for the LogSoftmax type. + /// @param [in] graph The graph where this layer can be found. + /// @param [in] factory The workload factory which will create the workload. + /// @return A pointer to the created workload, or nullptr if not created. + virtual std::unique_ptr CreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const override; + + /// Creates a dynamically-allocated copy of this layer. + /// @param [in] graph The graph into which this layer is being cloned. + LogSoftmaxLayer* Clone(Graph& graph) const override; + + /// Check if the input tensor shape(s) + /// will lead to a valid configuration of @ref LogSoftmaxLayer. + void ValidateTensorShapesFromInputs() override; + + void Accept(ILayerVisitor& visitor) const override; + +protected: + /// Constructor to create a LogSoftmaxLayer. + /// @param [in] param LogSoftmaxDescriptor to configure the log softmax operation. + /// @param [in] name Optional name for the layer. + LogSoftmaxLayer(const LogSoftmaxDescriptor& param, const char* name); + + /// Default destructor + ~LogSoftmaxLayer() = default; +}; + +} // namespace -- cgit v1.2.1