diff options
Diffstat (limited to 'src/armnn')
-rw-r--r-- | src/armnn/Graph.cpp | 2 | ||||
-rw-r--r-- | src/armnn/Graph.hpp | 10 | ||||
-rw-r--r-- | src/armnn/Layer.cpp | 41 | ||||
-rw-r--r-- | src/armnn/Layer.hpp | 8 | ||||
-rw-r--r-- | src/armnn/Network.cpp | 29 | ||||
-rw-r--r-- | src/armnn/Network.hpp | 1 | ||||
-rw-r--r-- | src/armnn/layers/LayerCloneBase.hpp | 1 | ||||
-rw-r--r-- | src/armnn/layers/LayerWithParameters.hpp | 1 |
8 files changed, 81 insertions, 12 deletions
diff --git a/src/armnn/Graph.cpp b/src/armnn/Graph.cpp index 8500e529b0..ae773cc876 100644 --- a/src/armnn/Graph.cpp +++ b/src/armnn/Graph.cpp @@ -26,6 +26,8 @@ namespace armnn Graph::Graph(const Graph& other) : m_LayersInOrder(other.m_LayersInOrder) +, m_AllowExpandedDims(other.m_AllowExpandedDims) +, m_ShapeInferenceMethod(other.m_ShapeInferenceMethod) , m_Profiler(other.m_Profiler) { std::unordered_map<const Layer*, Layer*> otherToClonedMap; diff --git a/src/armnn/Graph.hpp b/src/armnn/Graph.hpp index 0c34d35685..5edf34c179 100644 --- a/src/armnn/Graph.hpp +++ b/src/armnn/Graph.hpp @@ -95,8 +95,9 @@ public: const Graph& m_Graph; }; - Graph(bool shapeInferenceMethod = false) + Graph(bool shapeInferenceMethod = false, bool allowExpandedDims = false) : m_LayersInOrder(true) + , m_AllowExpandedDims(allowExpandedDims) , m_ShapeInferenceMethod(shapeInferenceMethod ? ShapeInferenceMethod::InferAndValidate : ShapeInferenceMethod::ValidateOnly) , m_Profiler(std::make_shared<IProfiler>()) @@ -118,11 +119,12 @@ public: m_LayersInOrder = std::move(other.m_LayersInOrder); m_Views = std::move(other.m_Views); m_Profiler = std::move(other.m_Profiler); - other.ForEachLayer([this](Layer* otherLayer) { otherLayer->Reparent(*this, m_Layers.end()); }); + m_AllowExpandedDims = other.m_AllowExpandedDims; + m_ShapeInferenceMethod = other.m_ShapeInferenceMethod; ARMNN_ASSERT(other.m_PosInGraphMap.empty()); ARMNN_ASSERT(other.m_Layers.empty()); @@ -272,8 +274,11 @@ private: mutable LayerList m_Layers; mutable bool m_LayersInOrder; + bool m_AllowExpandedDims; + std::map<const GraphEvent, std::list<IGraphObservable*>> m_Views; ShapeInferenceMethod m_ShapeInferenceMethod; + std::shared_ptr<IProfiler> m_Profiler; // Throws exception due to a layer input not being connected to an output slot. @@ -424,6 +429,7 @@ inline LayerT* Graph::AddLayer(Args&&... args) LayerT* const layer = new LayerInGraph<LayerT>(*this, std::forward<Args>(args)...); layer->SetShapeInferenceMethod(m_ShapeInferenceMethod); + layer->SetAllowExpandedDims(m_AllowExpandedDims); NotifyObservables(GraphEvent::LayerAdded, layer); diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp index 3241b5024e..b1d495244d 100644 --- a/src/armnn/Layer.cpp +++ b/src/armnn/Layer.cpp @@ -11,6 +11,8 @@ #include <armnn/utility/NumericCast.hpp> +#include <armnnUtils/TensorUtils.hpp> + #include <client/include/IProfilingService.hpp> #include <fmt/format.h> @@ -425,11 +427,40 @@ void Layer::ValidateAndCopyShape(const TensorShape& outputShape, { if (shapeInferenceMethod == ShapeInferenceMethod::ValidateOnly) { - ConditionalThrowIfNotEqual<LayerValidationException>( - layerName + ": TensorShape set on OutputSlot[0] does not match the inferred shape.", - outputShape, - inferredShape); - return; + if (m_AllowExpandedDims) + { + std::vector<unsigned int> outputDims = armnnUtils::SqueezeDims(outputShape); + std::vector<unsigned int> inferredDims = armnnUtils::SqueezeDims(inferredShape); + + if (outputDims.size() != inferredDims.size()) + { + std::stringstream ss; + ss << layerName << ": TensorShape set on OutputSlot[" << outputSlotIndex << + "] does not match the inferred shape. "; + ss << outputShape << " != " << inferredShape; + throw LayerValidationException(ss.str()); + } + for (unsigned int i = 0; i < outputDims.size(); ++i) + { + if (outputDims[i] != inferredDims[i]) + { + std::stringstream ss; + ss << layerName << ": TensorShape set on OutputSlot[" << outputSlotIndex << + "] does not match the inferred shape at dimension index ["; + ss << i << "] " << outputShape << " != " << inferredShape; + throw LayerValidationException(ss.str()); + } + } + return; + } + else + { + ConditionalThrowIfNotEqual<LayerValidationException>( + layerName + ": TensorShape set on OutputSlot[0] does not match the inferred shape.", + outputShape, + inferredShape); + return; + } } if (outputShape.GetDimensionality() == Dimensionality::Specified) diff --git a/src/armnn/Layer.hpp b/src/armnn/Layer.hpp index 114d69c652..767cf97eb3 100644 --- a/src/armnn/Layer.hpp +++ b/src/armnn/Layer.hpp @@ -238,6 +238,7 @@ public: } ShapeInferenceMethod GetShapeInferenceMethod() const { return m_ShapeInferenceMethod; }; + bool GetAllowExpandedDims() const { return m_AllowExpandedDims; }; const std::vector<InputSlot>& GetInputSlots() const { return m_InputSlots; } const std::vector<OutputSlot>& GetOutputSlots() const { return m_OutputSlots; } @@ -343,6 +344,11 @@ public: m_ShapeInferenceMethod = shapeInferenceMethod; } + void SetAllowExpandedDims(bool allowExpandedDims) + { + m_AllowExpandedDims = allowExpandedDims; + } + template<typename T> std::shared_ptr<T> GetAdditionalInformation() const { @@ -428,6 +434,8 @@ private: mutable LayerPriority m_Priority = 0; mutable bool m_Visiting = false; + bool m_AllowExpandedDims = false; + LayerGuid m_Guid; std::list<std::string> m_RelatedLayerNames; diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 77ad5c4dc2..6a646d3cc8 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -1854,16 +1854,35 @@ IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, bool NetworkImpl::GetShapeInferenceMethod() { - if (m_NetworkOptions.size() > 0 && m_NetworkOptions[0].GetBackendId().Get() == "ShapeInferenceMethod") + bool shapeInferenceMethod = false; + + ParseOptions(m_NetworkOptions, "ShapeInferenceMethod", [&](std::string name, const BackendOptions::Var& value) { - return m_NetworkOptions[0].GetOption(0).GetValue().AsBool(); - } + if (name == "InferAndValidate") + { + shapeInferenceMethod |= value.AsBool(); + } + }); + return shapeInferenceMethod; +} - return false; +bool NetworkImpl::GetAllowExpandedDims() +{ + bool allowExpandedDims = false; + + ParseOptions(m_NetworkOptions, "AllowExpandedDims", [&](std::string name, const BackendOptions::Var& value) + { + if (name == "AllowExpandedDims") + { + allowExpandedDims |= value.AsBool(); + } + }); + return allowExpandedDims; } + NetworkImpl::NetworkImpl(NetworkOptions networkOptions) : m_NetworkOptions(networkOptions), - m_Graph(std::make_unique<Graph>(GetShapeInferenceMethod())) + m_Graph(std::make_unique<Graph>(GetShapeInferenceMethod(), GetAllowExpandedDims())) {} NetworkImpl::~NetworkImpl() diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp index c2be600d05..6c7c2f5c7e 100644 --- a/src/armnn/Network.hpp +++ b/src/armnn/Network.hpp @@ -262,6 +262,7 @@ public: private: bool GetShapeInferenceMethod(); + bool GetAllowExpandedDims(); NetworkOptions m_NetworkOptions; std::unique_ptr<Graph> m_Graph; diff --git a/src/armnn/layers/LayerCloneBase.hpp b/src/armnn/layers/LayerCloneBase.hpp index 348b1f3bf6..54b64c5852 100644 --- a/src/armnn/layers/LayerCloneBase.hpp +++ b/src/armnn/layers/LayerCloneBase.hpp @@ -19,6 +19,7 @@ LayerType* Layer::CloneBase(Graph& graph, Params&& ... params) const layer->SetBackendId(GetBackendId()); layer->SetGuid(GetGuid()); layer->SetShapeInferenceMethod(m_ShapeInferenceMethod); + layer->SetAllowExpandedDims(m_AllowExpandedDims); return layer; } diff --git a/src/armnn/layers/LayerWithParameters.hpp b/src/armnn/layers/LayerWithParameters.hpp index 2ac16c5f5f..8d9ddffc24 100644 --- a/src/armnn/layers/LayerWithParameters.hpp +++ b/src/armnn/layers/LayerWithParameters.hpp @@ -43,6 +43,7 @@ protected: WorkloadInfo PrepInfoAndDesc(QueueDescriptor& descriptor) const { descriptor.m_Parameters = m_Param; + descriptor.m_AllowExpandedDims = GetAllowExpandedDims(); return Layer::PrepInfoAndDesc(descriptor); } |