From 636ab40d3741e12eaad11d5b50e4b34bfbb258b5 Mon Sep 17 00:00:00 2001 From: Aron Virginas-Tar Date: Mon, 16 Sep 2019 14:27:45 +0100 Subject: IVGCVSW-3875 Add frontend for SLICE layer Signed-off-by: Aron Virginas-Tar Change-Id: Iebe675a0cee02db6f133d48ce58cbc1e233061db --- Android.mk | 3 +- CMakeLists.txt | 3 +- include/armnn/Descriptors.hpp | 20 +++- include/armnn/DescriptorsFwd.hpp | 1 + include/armnn/ILayerSupport.hpp | 5 + include/armnn/ILayerVisitor.hpp | 8 ++ include/armnn/INetwork.hpp | 10 +- include/armnn/LayerVisitorBase.hpp | 4 + src/armnn/InternalTypes.hpp | 1 + src/armnn/LayersFwd.hpp | 2 + src/armnn/Network.cpp | 5 + src/armnn/Network.hpp | 2 + src/armnn/layers/SliceLayer.cpp | 66 ++++++++++++ src/armnn/layers/SliceLayer.hpp | 49 +++++++++ src/armnn/test/TestNameOnlyLayerVisitor.cpp | 116 +++++++++++++-------- src/armnn/test/TestNameOnlyLayerVisitor.hpp | 82 +++++++++------ src/armnnSerializer/Serializer.cpp | 7 ++ src/armnnSerializer/Serializer.hpp | 4 + src/backends/backendsCommon/LayerSupportBase.cpp | 8 ++ src/backends/backendsCommon/LayerSupportBase.hpp | 5 + src/backends/backendsCommon/WorkloadData.cpp | 57 +++++++++- src/backends/backendsCommon/WorkloadData.hpp | 7 +- src/backends/backendsCommon/WorkloadFactory.cpp | 21 +++- src/backends/backendsCommon/WorkloadFactory.hpp | 3 + .../test/IsLayerSupportedTestImpl.hpp | 2 + 25 files changed, 406 insertions(+), 85 deletions(-) create mode 100644 src/armnn/layers/SliceLayer.cpp create mode 100644 src/armnn/layers/SliceLayer.hpp diff --git a/Android.mk b/Android.mk index 3640e0c43b..89956ddad1 100644 --- a/Android.mk +++ b/Android.mk @@ -157,9 +157,10 @@ LOCAL_SRC_FILES := \ src/armnn/layers/ReshapeLayer.cpp \ src/armnn/layers/ResizeLayer.cpp \ src/armnn/layers/RsqrtLayer.cpp \ + src/armnn/layers/SliceLayer.cpp \ + src/armnn/layers/SoftmaxLayer.cpp \ src/armnn/layers/SpaceToBatchNdLayer.cpp \ src/armnn/layers/SpaceToDepthLayer.cpp \ - src/armnn/layers/SoftmaxLayer.cpp \ src/armnn/layers/SplitterLayer.cpp \ src/armnn/layers/StackLayer.cpp \ src/armnn/layers/StridedSliceLayer.cpp \ diff --git a/CMakeLists.txt b/CMakeLists.txt index ef79ee1afe..a04f30baf2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -319,6 +319,8 @@ list(APPEND armnn_sources src/armnn/layers/ResizeLayer.cpp src/armnn/layers/RsqrtLayer.cpp src/armnn/layers/RsqrtLayer.hpp + src/armnn/layers/SliceLayer.cpp + src/armnn/layers/SliceLayer.hpp src/armnn/layers/SoftmaxLayer.hpp src/armnn/layers/SoftmaxLayer.cpp src/armnn/layers/SpaceToBatchNdLayer.hpp @@ -846,4 +848,3 @@ if(BUILD_GATORD_MOCK) ) endif() - diff --git a/include/armnn/Descriptors.hpp b/include/armnn/Descriptors.hpp index e871e89ebb..8d382f700e 100644 --- a/include/armnn/Descriptors.hpp +++ b/include/armnn/Descriptors.hpp @@ -667,6 +667,24 @@ struct PadDescriptor float m_PadValue; }; +/// A SliceDescriptor for the SliceLayer. +struct SliceDescriptor +{ + SliceDescriptor(const std::vector& begin, const std::vector& size) + : m_Begin(begin) + , m_Size(size) + {} + + SliceDescriptor() : SliceDescriptor({}, {}) + {} + + /// Beginning indices of the slice in each dimension. + std::vector m_Begin; + + /// Size of the slice in each dimension. + std::vector m_Size; +}; + /// A StackDescriptor for the StackLayer. struct StackDescriptor { @@ -786,4 +804,4 @@ struct TransposeConvolution2dDescriptor DataLayout m_DataLayout; }; -} // namespace armnn \ No newline at end of file +} // namespace armnn diff --git a/include/armnn/DescriptorsFwd.hpp b/include/armnn/DescriptorsFwd.hpp index 8f81b4fe3e..bddb0cad59 100644 --- a/include/armnn/DescriptorsFwd.hpp +++ b/include/armnn/DescriptorsFwd.hpp @@ -31,6 +31,7 @@ struct ResizeDescriptor; struct SoftmaxDescriptor; struct SpaceToBatchNdDescriptor; struct SpaceToDepthDescriptor; +struct SliceDescriptor; struct StackDescriptor; struct StridedSliceDescriptor; struct TransposeConvolution2dDescriptor; diff --git a/include/armnn/ILayerSupport.hpp b/include/armnn/ILayerSupport.hpp index d168226402..cab2df19af 100644 --- a/include/armnn/ILayerSupport.hpp +++ b/include/armnn/ILayerSupport.hpp @@ -269,6 +269,11 @@ public: const TensorInfo& output, Optional reasonIfUnsupported = EmptyOptional()) const = 0; + virtual bool IsSliceSupported(const TensorInfo& input, + const TensorInfo& output, + const SliceDescriptor& descriptor, + Optional reasonIfUnsupported = EmptyOptional()) const = 0; + virtual bool IsSoftmaxSupported(const TensorInfo& input, const TensorInfo& output, const SoftmaxDescriptor& descriptor, diff --git a/include/armnn/ILayerVisitor.hpp b/include/armnn/ILayerVisitor.hpp index a504a4190d..6c0977303c 100644 --- a/include/armnn/ILayerVisitor.hpp +++ b/include/armnn/ILayerVisitor.hpp @@ -357,6 +357,14 @@ public: virtual void VisitRsqrtLayer(const IConnectableLayer* layer, const char* name = nullptr) = 0; + /// Function that a slice layer should call back to when its Accept(ILayerVisitor&) function is invoked. + /// @param layer - pointer to the layer which is calling back to this visit function. + /// @param sliceDescriptor - SliceDescriptor to configure the slice operation. + /// @param name - Optional name for the layer. + virtual void VisitSliceLayer(const IConnectableLayer* layer, + const SliceDescriptor& sliceDescriptor, + const char* name = nullptr) = 0; + /// Function that a softmax layer should call back to when its Accept(ILayerVisitor&) function is invoked. /// @param layer - pointer to the layer which is calling back to this visit function. diff --git a/include/armnn/INetwork.hpp b/include/armnn/INetwork.hpp index cd1b7a6319..09026ad9e6 100644 --- a/include/armnn/INetwork.hpp +++ b/include/armnn/INetwork.hpp @@ -237,6 +237,12 @@ public: virtual IConnectableLayer* AddNormalizationLayer(const NormalizationDescriptor& normalizationDescriptor, const char* name = nullptr) = 0; + /// Adds a slice layer to the network. + /// @param sliceDescriptor - SliceDescriptor to configure the slice operation. + /// @param name - Optional name for the layer. + /// @return - Interface for configuring the layer. + virtual IConnectableLayer* AddSliceLayer(const SliceDescriptor& sliceDescriptor, const char* name = nullptr) = 0; + /// Adds a softmax layer to the network. /// If the data type is QAsymm8, then the output quantization parameters /// must have a scale of 1/256 and an offset of 0 @@ -253,8 +259,8 @@ public: /// the first output, second view to the second output, etc.... /// @param name - Optional name for the layer. /// @return - Interface for configuring the layer. - virtual IConnectableLayer* AddSplitterLayer(const ViewsDescriptor& splitterDescriptor - , const char* name = nullptr) = 0; + virtual IConnectableLayer* AddSplitterLayer(const ViewsDescriptor& splitterDescriptor, + const char* name = nullptr) = 0; /// Adds a merge layer to the network. /// @param name - Optional name for the layer. diff --git a/include/armnn/LayerVisitorBase.hpp b/include/armnn/LayerVisitorBase.hpp index 0739b43736..d626c712ba 100644 --- a/include/armnn/LayerVisitorBase.hpp +++ b/include/armnn/LayerVisitorBase.hpp @@ -182,6 +182,10 @@ public: void VisitRsqrtLayer(const IConnectableLayer*, const char*) override { DefaultPolicy::Apply(__func__); } + void VisitSliceLayer(const IConnectableLayer*, + const SliceDescriptor&, + const char*) override { DefaultPolicy::Apply(__func__); } + void VisitSoftmaxLayer(const IConnectableLayer*, const SoftmaxDescriptor&, const char*) override { DefaultPolicy::Apply(__func__); } diff --git a/src/armnn/InternalTypes.hpp b/src/armnn/InternalTypes.hpp index 98308f92a1..1e05fff769 100644 --- a/src/armnn/InternalTypes.hpp +++ b/src/armnn/InternalTypes.hpp @@ -58,6 +58,7 @@ enum class LayerType Reshape, Resize, Rsqrt, + Slice, Softmax, SpaceToBatchNd, SpaceToDepth, diff --git a/src/armnn/LayersFwd.hpp b/src/armnn/LayersFwd.hpp index 6e4cf6ab04..a98c104f85 100644 --- a/src/armnn/LayersFwd.hpp +++ b/src/armnn/LayersFwd.hpp @@ -50,6 +50,7 @@ #include "layers/ReshapeLayer.hpp" #include "layers/ResizeLayer.hpp" #include "layers/RsqrtLayer.hpp" +#include "layers/SliceLayer.hpp" #include "layers/SoftmaxLayer.hpp" #include "layers/SpaceToBatchNdLayer.hpp" #include "layers/SpaceToDepthLayer.hpp" @@ -131,6 +132,7 @@ DECLARE_LAYER(QuantizedLstm) DECLARE_LAYER(Reshape) DECLARE_LAYER(Resize) DECLARE_LAYER(Rsqrt) +DECLARE_LAYER(Slice) DECLARE_LAYER(Softmax) DECLARE_LAYER(SpaceToBatchNd) DECLARE_LAYER(SpaceToDepth) diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 6971cb89ba..c055407b3a 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -1129,6 +1129,11 @@ normalizationDescriptor, return m_Graph->AddLayer(normalizationDescriptor, name); } +IConnectableLayer* Network::AddSliceLayer(const SliceDescriptor& sliceDescriptor, const char* name) +{ + return m_Graph->AddLayer(sliceDescriptor, name); +} + IConnectableLayer* Network::AddSoftmaxLayer(const SoftmaxDescriptor& softmaxDescriptor, const char* name) { diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp index aac875aac7..274cc1ab7c 100644 --- a/src/armnn/Network.hpp +++ b/src/armnn/Network.hpp @@ -117,6 +117,8 @@ public: IConnectableLayer* AddNormalizationLayer(const NormalizationDescriptor& normalizationDescriptor, const char* name = nullptr) override; + IConnectableLayer* AddSliceLayer(const SliceDescriptor& sliceDescriptor, const char* name = nullptr) override; + IConnectableLayer* AddSoftmaxLayer(const SoftmaxDescriptor& softmaxDescriptor, const char* name = nullptr) override; diff --git a/src/armnn/layers/SliceLayer.cpp b/src/armnn/layers/SliceLayer.cpp new file mode 100644 index 0000000000..8ea5fd8f25 --- /dev/null +++ b/src/armnn/layers/SliceLayer.cpp @@ -0,0 +1,66 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "SliceLayer.hpp" + +#include "LayerCloneBase.hpp" + +#include + +#include +#include + +#include +#include + +namespace armnn +{ + +SliceLayer::SliceLayer(const SliceDescriptor& param, const char* name) + : LayerWithParameters(1, 1, LayerType::Slice, param, name) +{ +} + +std::unique_ptr SliceLayer::CreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const +{ + SliceQueueDescriptor descriptor; + return factory.CreateSlice(descriptor, PrepInfoAndDesc(descriptor, graph)); +} + +SliceLayer* SliceLayer::Clone(Graph& graph) const +{ + return CloneBase(graph, m_Param, GetName()); +} + +void SliceLayer::ValidateTensorShapesFromInputs() +{ + VerifyLayerConnections(1, CHECK_LOCATION()); + + auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() }); + + BOOST_ASSERT(inferredShapes.size() == 1); + + ConditionalThrowIfNotEqual( + "SliceLayer: TensorShape set on OutputSlot[0] does not match the inferred shape.", + GetOutputSlot(0).GetTensorInfo().GetShape(), + inferredShapes[0]); +} + +std::vector SliceLayer::InferOutputShapes(const std::vector& inputShapes) const +{ + BOOST_ASSERT(inputShapes.size() == 1); + + TensorShape outputShape(boost::numeric_cast(m_Param.m_Size.size()), m_Param.m_Size.data()); + + return std::vector({ outputShape }); +} + +void SliceLayer::Accept(ILayerVisitor& visitor) const +{ + visitor.VisitSliceLayer(this, GetParameters(), GetName()); +} + +} // namespace armnn diff --git a/src/armnn/layers/SliceLayer.hpp b/src/armnn/layers/SliceLayer.hpp new file mode 100644 index 0000000000..38f0747f05 --- /dev/null +++ b/src/armnn/layers/SliceLayer.hpp @@ -0,0 +1,49 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#pragma once + +#include "LayerWithParameters.hpp" + +namespace armnn +{ + +class SliceLayer : public LayerWithParameters +{ +public: + /// Makes a workload for the Slice type. + /// @param [in] graph The graph where this layer can be found. + /// @param [in] factory The workload factory which will create the workload. + /// @return A pointer to the created workload, or nullptr if not created. + virtual std::unique_ptr CreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const override; + + /// Creates a dynamically-allocated copy of this layer. + /// @param [in] graph The graph into which this layer is being cloned. + SliceLayer* Clone(Graph& graph) const override; + + /// Check if the input tensor shape(s) + /// will lead to a valid configuration of @ref SliceLayer. + void ValidateTensorShapesFromInputs() override; + + /// By default returns inputShapes if the number of inputs are equal to number of outputs, + /// otherwise infers the output shapes from given input shapes and layer properties. + /// @param [in] inputShapes The input shapes layer has. + /// @return A vector to the inferred output shape. + std::vector InferOutputShapes(const std::vector& inputShapes) const override; + + void Accept(ILayerVisitor& visitor) const override; + +protected: + /// Constructor to create a SliceLayer. + /// @param [in] param SliceDescriptor to configure the resize operation. + /// @param [in] name Optional name for the layer. + SliceLayer(const SliceDescriptor& param, const char* name); + + /// Default destructor. + ~SliceLayer() = default; +}; + +} // namespace armnn diff --git a/src/armnn/test/TestNameOnlyLayerVisitor.cpp b/src/armnn/test/TestNameOnlyLayerVisitor.cpp index 4bb9614385..c4c4a479eb 100644 --- a/src/armnn/test/TestNameOnlyLayerVisitor.cpp +++ b/src/armnn/test/TestNameOnlyLayerVisitor.cpp @@ -10,6 +10,7 @@ namespace armnn { BOOST_AUTO_TEST_SUITE(TestNameOnlyLayerVisitor) +// Addition BOOST_AUTO_TEST_CASE(CheckAdditionLayerVisitorName) { TestAdditionLayerVisitor visitor("AdditionLayer"); @@ -28,24 +29,45 @@ BOOST_AUTO_TEST_CASE(CheckAdditionLayerVisitorNameNullptr) layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckMultiplicationLayerVisitorName) +// Division +BOOST_AUTO_TEST_CASE(CheckDivisionLayerVisitorName) { - TestMultiplicationLayerVisitor visitor("MultiplicationLayer"); + TestDivisionLayerVisitor visitor("DivisionLayer"); Network net; - IConnectableLayer *const layer = net.AddMultiplicationLayer("MultiplicationLayer"); + IConnectableLayer *const layer = net.AddAdditionLayer("DivisionLayer"); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckMultiplicationLayerVisitorNameNullptr) +BOOST_AUTO_TEST_CASE(CheckDivisionLayerVisitorNameNullptr) { - TestMultiplicationLayerVisitor visitor; + TestDivisionLayerVisitor visitor; Network net; - IConnectableLayer *const layer = net.AddMultiplicationLayer(); + IConnectableLayer *const layer = net.AddDivisionLayer(); + layer->Accept(visitor); +} + +// Equal +BOOST_AUTO_TEST_CASE(CheckEqualLayerVisitorName) +{ + TestEqualLayerVisitor visitor("EqualLayer"); + Network net; + + IConnectableLayer *const layer = net.AddEqualLayer("EqualLayer"); + layer->Accept(visitor); +} + +BOOST_AUTO_TEST_CASE(CheckEqualLayerVisitorNameNullptr) +{ + TestEqualLayerVisitor visitor; + Network net; + + IConnectableLayer *const layer = net.AddEqualLayer(); layer->Accept(visitor); } +// Floor BOOST_AUTO_TEST_CASE(CheckFloorLayerVisitorName) { TestFloorLayerVisitor visitor("FloorLayer"); @@ -64,42 +86,45 @@ BOOST_AUTO_TEST_CASE(CheckFloorLayerVisitorNameNullptr) layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckDivisionLayerVisitorName) +// Gather +BOOST_AUTO_TEST_CASE(CheckGatherLayerVisitorName) { - TestDivisionLayerVisitor visitor("DivisionLayer"); + TestGatherLayerVisitor visitor("GatherLayer"); Network net; - IConnectableLayer *const layer = net.AddAdditionLayer("DivisionLayer"); + IConnectableLayer *const layer = net.AddGatherLayer("GatherLayer"); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckDivisionLayerVisitorNameNullptr) +BOOST_AUTO_TEST_CASE(CheckGatherLayerVisitorNameNullptr) { - TestDivisionLayerVisitor visitor; + TestGatherLayerVisitor visitor; Network net; - IConnectableLayer *const layer = net.AddDivisionLayer(); + IConnectableLayer *const layer = net.AddGatherLayer(); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckSubtractionLayerVisitorName) +// Greater +BOOST_AUTO_TEST_CASE(CheckGreaterLayerVisitorName) { - TestSubtractionLayerVisitor visitor("SubtractionLayer"); + TestGreaterLayerVisitor visitor("GreaterLayer"); Network net; - IConnectableLayer *const layer = net.AddSubtractionLayer("SubtractionLayer"); + IConnectableLayer *const layer = net.AddGreaterLayer("GreaterLayer"); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckSubtractionLayerVisitorNameNullptr) +BOOST_AUTO_TEST_CASE(CheckGreaterLayerVisitorNameNullptr) { - TestSubtractionLayerVisitor visitor; + TestGreaterLayerVisitor visitor; Network net; - IConnectableLayer *const layer = net.AddSubtractionLayer(); + IConnectableLayer *const layer = net.AddGreaterLayer(); layer->Accept(visitor); } +// Maximum BOOST_AUTO_TEST_CASE(CheckMaximumLayerVisitorName) { TestMaximumLayerVisitor visitor("MaximumLayer"); @@ -118,6 +143,7 @@ BOOST_AUTO_TEST_CASE(CheckMaximumLayerVisitorNameNullptr) layer->Accept(visitor); } +// Minimum BOOST_AUTO_TEST_CASE(CheckMinimumLayerVisitorName) { TestMinimumLayerVisitor visitor("MinimumLayer"); @@ -136,78 +162,82 @@ BOOST_AUTO_TEST_CASE(CheckMinimumLayerVisitorNameNullptr) layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckGreaterLayerVisitorName) +// Multiplication +BOOST_AUTO_TEST_CASE(CheckMultiplicationLayerVisitorName) { - TestGreaterLayerVisitor visitor("GreaterLayer"); + TestMultiplicationLayerVisitor visitor("MultiplicationLayer"); Network net; - IConnectableLayer *const layer = net.AddGreaterLayer("GreaterLayer"); + IConnectableLayer *const layer = net.AddMultiplicationLayer("MultiplicationLayer"); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckGreaterLayerVisitorNameNullptr) +BOOST_AUTO_TEST_CASE(CheckMultiplicationLayerVisitorNameNullptr) { - TestGreaterLayerVisitor visitor; + TestMultiplicationLayerVisitor visitor; Network net; - IConnectableLayer *const layer = net.AddGreaterLayer(); + IConnectableLayer *const layer = net.AddMultiplicationLayer(); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckEqualLayerVisitorName) +// Rsqrt +BOOST_AUTO_TEST_CASE(CheckRsqrtLayerVisitorName) { - TestEqualLayerVisitor visitor("EqualLayer"); + TestRsqrtLayerVisitor visitor("RsqrtLayer"); Network net; - IConnectableLayer *const layer = net.AddEqualLayer("EqualLayer"); + IConnectableLayer *const layer = net.AddRsqrtLayer("RsqrtLayer"); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckEqualLayerVisitorNameNullptr) +BOOST_AUTO_TEST_CASE(CheckRsqrtLayerVisitorNameNullptr) { - TestEqualLayerVisitor visitor; + TestRsqrtLayerVisitor visitor; Network net; - IConnectableLayer *const layer = net.AddEqualLayer(); + IConnectableLayer *const layer = net.AddRsqrtLayer(); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckRsqrtLayerVisitorName) +// Slice +BOOST_AUTO_TEST_CASE(CheckSliceLayerVisitorName) { - TestRsqrtLayerVisitor visitor("RsqrtLayer"); + TestSliceLayerVisitor visitor("SliceLayer"); Network net; - IConnectableLayer *const layer = net.AddRsqrtLayer("RsqrtLayer"); + IConnectableLayer *const layer = net.AddSliceLayer(SliceDescriptor(), "SliceLayer"); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckRsqrtLayerVisitorNameNullptr) +BOOST_AUTO_TEST_CASE(CheckSliceLayerVisitorNameNullptr) { - TestRsqrtLayerVisitor visitor; + TestSliceLayerVisitor visitor; Network net; - IConnectableLayer *const layer = net.AddRsqrtLayer(); + IConnectableLayer *const layer = net.AddSliceLayer(SliceDescriptor()); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckGatherLayerVisitorName) +// Subtraction +BOOST_AUTO_TEST_CASE(CheckSubtractionLayerVisitorName) { - TestGatherLayerVisitor visitor("GatherLayer"); + TestSubtractionLayerVisitor visitor("SubtractionLayer"); Network net; - IConnectableLayer *const layer = net.AddGatherLayer("GatherLayer"); + IConnectableLayer *const layer = net.AddSubtractionLayer("SubtractionLayer"); layer->Accept(visitor); } -BOOST_AUTO_TEST_CASE(CheckGatherLayerVisitorNameNullptr) +BOOST_AUTO_TEST_CASE(CheckSubtractionLayerVisitorNameNullptr) { - TestGatherLayerVisitor visitor; + TestSubtractionLayerVisitor visitor; Network net; - IConnectableLayer *const layer = net.AddGatherLayer(); + IConnectableLayer *const layer = net.AddSubtractionLayer(); layer->Accept(visitor); } BOOST_AUTO_TEST_SUITE_END() -} //namespace armnn \ No newline at end of file +} // namespace armnn diff --git a/src/armnn/test/TestNameOnlyLayerVisitor.hpp b/src/armnn/test/TestNameOnlyLayerVisitor.hpp index c0037ae28f..dec0d15a96 100644 --- a/src/armnn/test/TestNameOnlyLayerVisitor.hpp +++ b/src/armnn/test/TestNameOnlyLayerVisitor.hpp @@ -22,97 +22,97 @@ public: }; }; -class TestMultiplicationLayerVisitor : public TestLayerVisitor +class TestDivisionLayerVisitor : public TestLayerVisitor { public: - explicit TestMultiplicationLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestDivisionLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitMultiplicationLayer(const IConnectableLayer* layer, - const char* name = nullptr) override { + void VisitDivisionLayer(const IConnectableLayer* layer, + const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; }; -class TestFloorLayerVisitor : public TestLayerVisitor +class TestEqualLayerVisitor : public TestLayerVisitor { public: - explicit TestFloorLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestEqualLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitFloorLayer(const IConnectableLayer* layer, + void VisitEqualLayer(const IConnectableLayer* layer, const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; }; -class TestDivisionLayerVisitor : public TestLayerVisitor +class TestFloorLayerVisitor : public TestLayerVisitor { public: - explicit TestDivisionLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestFloorLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitDivisionLayer(const IConnectableLayer* layer, - const char* name = nullptr) override { + void VisitFloorLayer(const IConnectableLayer* layer, + const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; }; -class TestSubtractionLayerVisitor : public TestLayerVisitor +class TestGatherLayerVisitor : public TestLayerVisitor { public: - explicit TestSubtractionLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestGatherLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitSubtractionLayer(const IConnectableLayer* layer, - const char* name = nullptr) override { + void VisitGatherLayer(const IConnectableLayer* layer, + const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; }; -class TestMaximumLayerVisitor : public TestLayerVisitor +class TestGreaterLayerVisitor : public TestLayerVisitor { public: - explicit TestMaximumLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestGreaterLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitMaximumLayer(const IConnectableLayer* layer, + void VisitGreaterLayer(const IConnectableLayer* layer, const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; }; -class TestMinimumLayerVisitor : public TestLayerVisitor +class TestMultiplicationLayerVisitor : public TestLayerVisitor { public: - explicit TestMinimumLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestMultiplicationLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitMinimumLayer(const IConnectableLayer* layer, - const char* name = nullptr) override { + void VisitMultiplicationLayer(const IConnectableLayer* layer, + const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; }; -class TestGreaterLayerVisitor : public TestLayerVisitor +class TestMaximumLayerVisitor : public TestLayerVisitor { public: - explicit TestGreaterLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestMaximumLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitGreaterLayer(const IConnectableLayer* layer, + void VisitMaximumLayer(const IConnectableLayer* layer, const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; }; -class TestEqualLayerVisitor : public TestLayerVisitor +class TestMinimumLayerVisitor : public TestLayerVisitor { public: - explicit TestEqualLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestMinimumLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitEqualLayer(const IConnectableLayer* layer, - const char* name = nullptr) override { + void VisitMinimumLayer(const IConnectableLayer* layer, + const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; @@ -130,16 +130,30 @@ public: }; }; -class TestGatherLayerVisitor : public TestLayerVisitor +class TestSliceLayerVisitor : public TestLayerVisitor { public: - explicit TestGatherLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + explicit TestSliceLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; - void VisitGatherLayer(const IConnectableLayer* layer, - const char* name = nullptr) override { + void VisitSliceLayer(const IConnectableLayer* layer, + const SliceDescriptor& sliceDescriptor, + const char* name = nullptr) override + { + CheckLayerPointer(layer); + CheckLayerName(name); + }; +}; + +class TestSubtractionLayerVisitor : public TestLayerVisitor +{ +public: + explicit TestSubtractionLayerVisitor(const char* name = nullptr) : TestLayerVisitor(name) {}; + + void VisitSubtractionLayer(const IConnectableLayer* layer, + const char* name = nullptr) override { CheckLayerPointer(layer); CheckLayerName(name); }; }; -} //namespace armnn \ No newline at end of file +} // namespace armnn diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp index 602c4ab99f..06bfb91e83 100644 --- a/src/armnnSerializer/Serializer.cpp +++ b/src/armnnSerializer/Serializer.cpp @@ -745,6 +745,13 @@ void SerializerVisitor::VisitRsqrtLayer(const armnn::IConnectableLayer* layer, c CreateAnyLayer(fbRsqrtLayer.o, serializer::Layer::Layer_RsqrtLayer); } +void SerializerVisitor::VisitSliceLayer(const armnn::IConnectableLayer* layer, + const armnn::SliceDescriptor& sliceDescriptor, + const char* name) +{ + throw UnimplementedException("SerializerVisitor::VisitSliceLayer is not implemented"); +} + // Build FlatBuffer for Softmax Layer void SerializerVisitor::VisitSoftmaxLayer(const armnn::IConnectableLayer* layer, const armnn::SoftmaxDescriptor& softmaxDescriptor, diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp index 190ed231e3..8e65902002 100644 --- a/src/armnnSerializer/Serializer.hpp +++ b/src/armnnSerializer/Serializer.hpp @@ -193,6 +193,10 @@ public: void VisitRsqrtLayer(const armnn::IConnectableLayer* layer, const char* name = nullptr) override; + void VisitSliceLayer(const armnn::IConnectableLayer* layer, + const armnn::SliceDescriptor& sliceDescriptor, + const char* name = nullptr) override; + void VisitSoftmaxLayer(const armnn::IConnectableLayer* layer, const armnn::SoftmaxDescriptor& softmaxDescriptor, const char* name = nullptr) override; diff --git a/src/backends/backendsCommon/LayerSupportBase.cpp b/src/backends/backendsCommon/LayerSupportBase.cpp index a8d1eaddc3..7f1fd1097a 100644 --- a/src/backends/backendsCommon/LayerSupportBase.cpp +++ b/src/backends/backendsCommon/LayerSupportBase.cpp @@ -414,6 +414,14 @@ bool LayerSupportBase::IsRsqrtSupported(const TensorInfo &input, return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported); } +bool LayerSupportBase::IsSliceSupported(const TensorInfo& input, + const TensorInfo& output, + const SliceDescriptor& descriptor, + Optional reasonIfUnsupported) const +{ + return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported); +} + bool LayerSupportBase::IsSoftmaxSupported(const TensorInfo& input, const TensorInfo& output, const SoftmaxDescriptor& descriptor, diff --git a/src/backends/backendsCommon/LayerSupportBase.hpp b/src/backends/backendsCommon/LayerSupportBase.hpp index 25dbdf2906..8df1f8d54f 100644 --- a/src/backends/backendsCommon/LayerSupportBase.hpp +++ b/src/backends/backendsCommon/LayerSupportBase.hpp @@ -253,6 +253,11 @@ public: const TensorInfo& output, Optional reasonIfUnsupported = EmptyOptional()) const override; + bool IsSliceSupported(const TensorInfo& input, + const TensorInfo& output, + const SliceDescriptor& descriptor, + Optional reasonIfUnsupported = EmptyOptional()) const override; + bool IsSoftmaxSupported(const TensorInfo& input, const TensorInfo& output, const SoftmaxDescriptor& descriptor, diff --git a/src/backends/backendsCommon/WorkloadData.cpp b/src/backends/backendsCommon/WorkloadData.cpp index f290cbd9cf..2fa0c92daf 100644 --- a/src/backends/backendsCommon/WorkloadData.cpp +++ b/src/backends/backendsCommon/WorkloadData.cpp @@ -2443,7 +2443,7 @@ void QuantizedLstmQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) co ValidateTensorQuantizationSpace(inputInfo, outputStateInInfo, descriptorName, "input", "outputStateIn"); ValidateTensorQuantizationSpace(inputInfo, outputStateOutInfo, descriptorName, "input", "outputStateOut"); ValidateTensorQuantizationSpace(cellStateInInfo, cellStateOutInfo, descriptorName, "cellStateIn", "cellStateOut"); - + // Infer number of batches, input size and output size from tensor dimensions const uint32_t numBatches = inputInfo.GetShape()[0]; const uint32_t inputSize = inputInfo.GetShape()[1]; @@ -2584,4 +2584,59 @@ void AbsQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) const ValidateTensorDataTypesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output"); } +void SliceQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) const +{ + const std::string descriptorName{"SliceQueueDescriptor"}; + + ValidateNumInputs(workloadInfo, descriptorName, 1); + ValidateNumOutputs(workloadInfo, descriptorName, 1); + + const TensorInfo& inputTensorInfo = workloadInfo.m_InputTensorInfos[0]; + const TensorInfo& outputTensorInfo = workloadInfo.m_OutputTensorInfos[0]; + + ValidateTensorDataTypesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output"); + + const unsigned int rank = inputTensorInfo.GetNumDimensions(); + if (rank > 4) + { + throw InvalidArgumentException(descriptorName + ": Input tensors with rank greater than 4 are not supported."); + } + + ValidateTensorNumDimensions(outputTensorInfo, descriptorName, rank, "output"); + + // Check if m_Begin and m_Size have the expected length + if (m_Parameters.m_Begin.size() != rank) + { + throw InvalidArgumentException(descriptorName + + ": Length of begin offset descriptor must equal rank " + std::to_string(rank)); + } + if (m_Parameters.m_Size.size() != rank) + { + throw InvalidArgumentException(descriptorName + + ": Length of size descriptor must equal rank " + std::to_string(rank)); + } + + // Check if the shape of the output tensor matches m_Size + const TensorShape& outputShape = outputTensorInfo.GetShape(); + for (unsigned int i = 0u; i < rank; ++i) + { + if (m_Parameters.m_Size[i] != outputShape[i]) + { + throw InvalidArgumentException(descriptorName + ": Size descriptor does not match output tensor."); + } + } + + // Check if the sum of begin offset and size in a given dimension + // does not exceed the size of corresponding input + const TensorShape& inputShape = inputTensorInfo.GetShape(); + for(unsigned int i = 0u; i < rank; ++i) + { + if (m_Parameters.m_Begin[i] + m_Parameters.m_Size[i] >= inputShape[i]) + { + throw InvalidArgumentException(descriptorName + ": Sum of begin offset and size for dimension " + + std::to_string(i) + " exceeds input size."); + } + } +} + } // namespace armnn diff --git a/src/backends/backendsCommon/WorkloadData.hpp b/src/backends/backendsCommon/WorkloadData.hpp index 35130ad160..1e49243b34 100644 --- a/src/backends/backendsCommon/WorkloadData.hpp +++ b/src/backends/backendsCommon/WorkloadData.hpp @@ -533,4 +533,9 @@ struct AbsQueueDescriptor : QueueDescriptor void Validate(const WorkloadInfo& workloadInfo) const; }; -} //namespace armnn +struct SliceQueueDescriptor : QueueDescriptorWithParameters +{ + void Validate(const WorkloadInfo& workloadInfo) const; +}; + +} // namespace armnn diff --git a/src/backends/backendsCommon/WorkloadFactory.cpp b/src/backends/backendsCommon/WorkloadFactory.cpp index 17bd98b349..9d6b2bd6a9 100644 --- a/src/backends/backendsCommon/WorkloadFactory.cpp +++ b/src/backends/backendsCommon/WorkloadFactory.cpp @@ -760,6 +760,19 @@ bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId, reason); break; } + case LayerType::Slice: + { + auto cLayer = boost::polymorphic_downcast(&layer); + + const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo(); + const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo(); + + result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType), + OverrideDataType(output, dataType), + cLayer->GetParameters(), + reason); + break; + } case LayerType::Softmax: { auto cLayer = boost::polymorphic_downcast(&layer); @@ -1245,6 +1258,12 @@ std::unique_ptr IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescrip return std::unique_ptr(); } +std::unique_ptr IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor, + const WorkloadInfo& info) const +{ + return std::unique_ptr(); +} + std::unique_ptr IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor, const WorkloadInfo& info) const { @@ -1300,4 +1319,4 @@ std::unique_ptr IWorkloadFactory::CreateTransposeConvolution2d( return std::unique_ptr(); } -} // namepsace armnn \ No newline at end of file +} // namepsace armnn diff --git a/src/backends/backendsCommon/WorkloadFactory.hpp b/src/backends/backendsCommon/WorkloadFactory.hpp index 6fd334b49c..91cf2c742c 100644 --- a/src/backends/backendsCommon/WorkloadFactory.hpp +++ b/src/backends/backendsCommon/WorkloadFactory.hpp @@ -186,6 +186,9 @@ public: virtual std::unique_ptr CreateRsqrt(const RsqrtQueueDescriptor& descriptor, const WorkloadInfo& info) const; + virtual std::unique_ptr CreateSlice(const SliceQueueDescriptor& descriptor, + const WorkloadInfo& info) const; + virtual std::unique_ptr CreateSoftmax(const SoftmaxQueueDescriptor& descriptor, const WorkloadInfo& info) const; diff --git a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp index 1dc9e9700f..17b7934e9f 100644 --- a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp +++ b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp @@ -473,6 +473,8 @@ DECLARE_LAYER_POLICY_2_PARAM(Reshape) DECLARE_LAYER_POLICY_1_PARAM(Rsqrt) +DECLARE_LAYER_POLICY_2_PARAM(Slice) + DECLARE_LAYER_POLICY_2_PARAM(Softmax) DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd) -- cgit v1.2.1