ArmNN
 22.08
ReshapeLayer.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "ReshapeLayer.hpp"
6 
7 #include "LayerCloneBase.hpp"
8 
10 #include <armnn/TypesUtils.hpp>
13 
14 namespace armnn
15 {
16 
17 ReshapeLayer::ReshapeLayer(const ReshapeDescriptor& param, const char* name)
18  : LayerWithParameters(1, 1, LayerType::Reshape, param, name)
19 {
20 }
21 
22 std::unique_ptr<IWorkload> ReshapeLayer::CreateWorkload(const IWorkloadFactory& factory) const
23 {
24  ReshapeQueueDescriptor descriptor;
25  SetAdditionalInfo(descriptor);
26 
27  return factory.CreateWorkload(LayerType::Reshape, descriptor, PrepInfoAndDesc(descriptor));
28 }
29 
31 {
32  return CloneBase<ReshapeLayer>(graph, m_Param, GetName());
33 }
34 
35 std::vector<TensorShape> ReshapeLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
36 {
37  IgnoreUnused(inputShapes);
38  return std::vector<TensorShape>({ m_Param.m_TargetShape });
39 }
40 
42 {
44 
45  const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
46 
48 
49  auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
50 
51  ARMNN_ASSERT(inferredShapes.size() == 1);
52 
53  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "ReshapeLayer");
54 }
55 
57 {
58  strategy.ExecuteStrategy(this, GetParameters(), {}, GetName());
59 }
60 
61 } // namespace armnn
ReshapeDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the Reshape type.
A ReshapeDescriptor for the ReshapeLayer.
This layer represents a reshape operation.
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Definition: Layer.cpp:491
ReshapeLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
Copyright (c) 2021 ARM Limited and Contributors.
const ReshapeDescriptor & GetParameters() const override
void IgnoreUnused(Ts &&...)
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:206
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
Definition: Layer.cpp:422
TensorShape m_TargetShape
Target shape value.
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
Definition: Layer.cpp:378
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:324
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
virtual void ExecuteStrategy(const IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0)=0
#define CHECK_LOCATION()
Definition: Exceptions.hpp:203
void SetAdditionalInfo(QueueDescriptor &descriptor) const
Definition: Layer.cpp:274
ReshapeLayer(const ReshapeDescriptor &desc, const char *name)
Constructor to create a ReshapeLayer.
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of ReshapeLayer.
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *LayerCreateWorkload.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:326
virtual const TensorInfo & GetTensorInfo() const =0
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:319
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
By default returns inputShapes if the number of inputs are equal to number of outputs, otherwise infers the output shapes from given input shapes and layer properties.
virtual std::unique_ptr< IWorkload > CreateWorkload(LayerType type, const QueueDescriptor &descriptor, const WorkloadInfo &info) const
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:92
ShapeInferenceMethod m_ShapeInferenceMethod
Definition: Layer.hpp:423
void ExecuteStrategy(IStrategy &strategy) const override
Apply a visitor to this layer.
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:468