aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTeresa Charlin <teresa.charlinreyes@arm.com>2020-08-11 23:00:18 +0100
committerTeresaARM <teresa.charlinreyes@arm.com>2020-08-11 22:43:11 +0000
commitca44ffc43cbe6972f8cfc2a803626418554195c2 (patch)
tree588a9067eb19341de0bfccc89c1b6175617f2aea
parent4d682747d0f04804cd5f07bf46032f0cdb65e388 (diff)
downloadarmnn-ca44ffc43cbe6972f8cfc2a803626418554195c2.tar.gz
IVGCVSW-5207 Add to PadLayer the functions to validate and infer shapes
Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com> Change-Id: I63a6da418862b0a843637d73cbe178086f23bc84
-rw-r--r--src/armnn/layers/PadLayer.cpp31
-rw-r--r--src/armnn/layers/PadLayer.hpp6
2 files changed, 36 insertions, 1 deletions
diff --git a/src/armnn/layers/PadLayer.cpp b/src/armnn/layers/PadLayer.cpp
index 4fcbc77c7b..a8c749c570 100644
--- a/src/armnn/layers/PadLayer.cpp
+++ b/src/armnn/layers/PadLayer.cpp
@@ -36,10 +36,39 @@ PadLayer* PadLayer::Clone(Graph& graph) const
return std::move(layer);
}
+std::vector<TensorShape> PadLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
+{
+ ARMNN_ASSERT(inputShapes.size() == 1);
+ const TensorShape& inputShape = inputShapes[0];
+
+ unsigned int rank = inputShape.GetNumDimensions();
+ ARMNN_ASSERT(m_Param.m_PadList.size() == rank);
+ ARMNN_ASSERT(rank != 0);
+
+ std::vector<unsigned int> outputDimensionSizes;
+ outputDimensionSizes.reserve(rank);
+ for (unsigned int i = 0; i < rank; ++i)
+ {
+ outputDimensionSizes[i] = inputShape[i] + m_Param.m_PadList[i].first + m_Param.m_PadList[i].second;
+ }
+
+ TensorShape tensorShape = TensorShape( rank, outputDimensionSizes.data());
+ return std::vector<TensorShape>({ tensorShape });
+}
+
void PadLayer::ValidateTensorShapesFromInputs()
{
+ VerifyLayerConnections(1, CHECK_LOCATION());
+
+ const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
+
+ VerifyShapeInferenceType(outputShape, m_ShapeInferenceMethod);
+
+ auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
+
+ ARMNN_ASSERT(inferredShapes.size() == 1);
- return;
+ ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "PadLayer");
}
void PadLayer::Accept(ILayerVisitor& visitor) const
diff --git a/src/armnn/layers/PadLayer.hpp b/src/armnn/layers/PadLayer.hpp
index 16cdbf57d4..5664997597 100644
--- a/src/armnn/layers/PadLayer.hpp
+++ b/src/armnn/layers/PadLayer.hpp
@@ -29,6 +29,12 @@ public:
/// @param [in] shapeInferenceMethod Indicates if output shape shall be overwritten or just validated.
void ValidateTensorShapesFromInputs() override;
+ /// By default returns inputShapes if the number of inputs are equal to number of outputs,
+ /// otherwise infers the output shapes from given input shapes and layer properties.
+ /// @param [in] inputShapes The input shapes layer has.
+ /// @return A vector to the inferred output shape.
+ std::vector<TensorShape> InferOutputShapes(const std::vector<TensorShape> &inputShapes) const override;
+
void Accept(ILayerVisitor& visitor) const override;
protected: