diff options
author | Nattapat Chaimanowong <nattapat.chaimanowong@arm.com> | 2018-11-28 10:44:37 +0000 |
---|---|---|
committer | Matthew Bentham <matthew.bentham@arm.com> | 2018-11-28 13:57:58 +0000 |
commit | 5a4304a09fcbfd5fab4c73e5fd0d4cc9f3170395 (patch) | |
tree | c48ca6e33adec68612a90d19c734c2c085807493 /src/armnn | |
parent | 831faedd5d2f6306b23202fa4e450c6d241a44a0 (diff) | |
download | armnn-5a4304a09fcbfd5fab4c73e5fd0d4cc9f3170395.tar.gz |
IVGCVSW-2253 Add maximum layer and corresponding no-op factory implementation
Change-Id: I8964f5e8978c2d2a07734a381e3f7c656c22456a
Diffstat (limited to 'src/armnn')
-rw-r--r-- | src/armnn/InternalTypes.cpp | 1 | ||||
-rw-r--r-- | src/armnn/InternalTypes.hpp | 1 | ||||
-rw-r--r-- | src/armnn/LayerSupport.cpp | 11 | ||||
-rw-r--r-- | src/armnn/LayersFwd.hpp | 2 | ||||
-rw-r--r-- | src/armnn/Network.cpp | 5 | ||||
-rw-r--r-- | src/armnn/Network.hpp | 2 | ||||
-rw-r--r-- | src/armnn/layers/MaximumLayer.cpp | 32 | ||||
-rw-r--r-- | src/armnn/layers/MaximumLayer.hpp | 26 |
8 files changed, 80 insertions, 0 deletions
diff --git a/src/armnn/InternalTypes.cpp b/src/armnn/InternalTypes.cpp index f37b1a0c66..37c63fd92a 100644 --- a/src/armnn/InternalTypes.cpp +++ b/src/armnn/InternalTypes.cpp @@ -30,6 +30,7 @@ char const* GetLayerTypeAsCString(LayerType type) case LayerType::Input: return "Input"; case LayerType::L2Normalization: return "L2Normalization"; case LayerType::Lstm: return "Lstm"; + case LayerType::Maximum: return "Maximum"; case LayerType::Mean: return "Mean"; case LayerType::MemCopy: return "MemCopy"; case LayerType::Merger: return "Merger"; diff --git a/src/armnn/InternalTypes.hpp b/src/armnn/InternalTypes.hpp index 3e2f298119..989718b0f7 100644 --- a/src/armnn/InternalTypes.hpp +++ b/src/armnn/InternalTypes.hpp @@ -30,6 +30,7 @@ enum class LayerType Input, L2Normalization, Lstm, + Maximum, Mean, MemCopy, Merger, diff --git a/src/armnn/LayerSupport.cpp b/src/armnn/LayerSupport.cpp index 6489fe4f66..5834b81ccc 100644 --- a/src/armnn/LayerSupport.cpp +++ b/src/armnn/LayerSupport.cpp @@ -255,6 +255,17 @@ bool IsLstmSupported(const BackendId& backend, const TensorInfo& input, const Te cellToInputWeights, inputGateBias, projectionWeights, projectionBias, cellToForgetWeights, cellToOutputWeights); } + +bool IsMaximumSupported(const BackendId& backend, + const TensorInfo& input0, + const TensorInfo& input1, + const TensorInfo& output, + char* reasonIfUnsupported, + size_t reasonIfUnsupportedMaxLength) +{ + FORWARD_LAYER_SUPPORT_FUNC(backend, IsMaximumSupported, input0, input1, output); +} + bool IsMergerSupported(const BackendId& backend, std::vector<const TensorInfo*> inputs, const TensorInfo& output, diff --git a/src/armnn/LayersFwd.hpp b/src/armnn/LayersFwd.hpp index 0e873d7c70..bab40da372 100644 --- a/src/armnn/LayersFwd.hpp +++ b/src/armnn/LayersFwd.hpp @@ -22,6 +22,7 @@ #include "layers/InputLayer.hpp" #include "layers/L2NormalizationLayer.hpp" #include "layers/LstmLayer.hpp" +#include "layers/MaximumLayer.hpp" #include "layers/MeanLayer.hpp" #include "layers/MemCopyLayer.hpp" #include "layers/MergerLayer.hpp" @@ -82,6 +83,7 @@ DECLARE_LAYER(FullyConnected) DECLARE_LAYER(Input) DECLARE_LAYER(L2Normalization) DECLARE_LAYER(Lstm) +DECLARE_LAYER(Maximum) DECLARE_LAYER(Mean) DECLARE_LAYER(MemCopy) DECLARE_LAYER(Merger) diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 32464f7757..57949fb430 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -507,6 +507,11 @@ IConnectableLayer* Network::AddSplitterLayer(const ViewsDescriptor& splitterDesc return m_Graph->AddLayer<SplitterLayer>(splitterDescriptor, name); } +IConnectableLayer* Network::AddMaximumLayer(const char* name) +{ + return m_Graph->AddLayer<MaximumLayer>(name); +} + IConnectableLayer* Network::AddMergerLayer(const OriginsDescriptor& mergerDescriptor, const char* name) { diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp index 471ce3ec68..84d1f58ab9 100644 --- a/src/armnn/Network.hpp +++ b/src/armnn/Network.hpp @@ -124,6 +124,8 @@ public: IConnectableLayer* AddSubtractionLayer(const char* name = nullptr) override; + IConnectableLayer* AddMaximumLayer(const char* name = nullptr) override; + IConnectableLayer* AddMeanLayer(const MeanDescriptor& meanDescriptor, const char* name = nullptr) override; IConnectableLayer* AddPadLayer(const PadDescriptor& padDescriptor, const char* name = nullptr) override; diff --git a/src/armnn/layers/MaximumLayer.cpp b/src/armnn/layers/MaximumLayer.cpp new file mode 100644 index 0000000000..67a234216c --- /dev/null +++ b/src/armnn/layers/MaximumLayer.cpp @@ -0,0 +1,32 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "MaximumLayer.hpp" + +#include "LayerCloneBase.hpp" + +#include <backendsCommon/WorkloadData.hpp> +#include <backendsCommon/WorkloadFactory.hpp> + +namespace armnn +{ + +MaximumLayer::MaximumLayer(const char* name) +: ArithmeticBaseLayer(2, 1, LayerType::Maximum, name) +{} + +std::unique_ptr<IWorkload> MaximumLayer::CreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const +{ + MaximumQueueDescriptor descriptor; + return factory.CreateMaximum(descriptor, PrepInfoAndDesc(descriptor, graph)); +} + +MaximumLayer* MaximumLayer::Clone(Graph& graph) const +{ + return CloneBase<MaximumLayer>(graph, GetName()); +} + +} // namespace armnn diff --git a/src/armnn/layers/MaximumLayer.hpp b/src/armnn/layers/MaximumLayer.hpp new file mode 100644 index 0000000000..da4c3ed18c --- /dev/null +++ b/src/armnn/layers/MaximumLayer.hpp @@ -0,0 +1,26 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// +#pragma once + +#include "ArithmeticBaseLayer.hpp" + +namespace armnn +{ + +class MaximumLayer : public ArithmeticBaseLayer +{ +public: + virtual std::unique_ptr<IWorkload> CreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const override; + + MaximumLayer* Clone(Graph& graph) const override; + +protected: + MaximumLayer(const char* name); + + ~MaximumLayer() = default; +}; + +} // namespace |