ArmNN
 21.02
ReshapeLayer.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "ReshapeLayer.hpp"
6 
7 #include "LayerCloneBase.hpp"
8 
10 #include <armnn/TypesUtils.hpp>
13 
14 namespace armnn
15 {
16 
17 ReshapeLayer::ReshapeLayer(const ReshapeDescriptor& param, const char* name)
18  : LayerWithParameters(1, 1, LayerType::Reshape, param, name)
19 {
20 }
21 
22 std::unique_ptr<IWorkload> ReshapeLayer::CreateWorkload(const IWorkloadFactory& factory) const
23 {
24  ReshapeQueueDescriptor descriptor;
25  SetAdditionalInfo(descriptor);
26 
27  return factory.CreateReshape(descriptor, PrepInfoAndDesc(descriptor));
28 }
29 
31 {
32  return CloneBase<ReshapeLayer>(graph, m_Param, GetName());
33 }
34 
35 std::vector<TensorShape> ReshapeLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
36 {
37  IgnoreUnused(inputShapes);
38  return std::vector<TensorShape>({ m_Param.m_TargetShape });
39 }
40 
42 {
44 
45  const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
46 
48 
49  auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
50 
51  ARMNN_ASSERT(inferredShapes.size() == 1);
52 
53  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "ReshapeLayer");
54 }
55 
56 void ReshapeLayer::Accept(ILayerVisitor& visitor) const
57 {
58  visitor.VisitReshapeLayer(this, GetParameters(), GetName());
59 }
60 
61 } // namespace armnn
ReshapeDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
const ReshapeDescriptor & GetParameters() const
const TensorShape & GetShape() const
Definition: Tensor.hpp:187
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the Reshape type.
A ReshapeDescriptor for the ReshapeLayer.
This layer represents a reshape operation.
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Definition: Layer.cpp:432
ReshapeLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
Copyright (c) 2021 ARM Limited and Contributors.
void IgnoreUnused(Ts &&...)
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:199
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
Definition: Layer.cpp:392
TensorShape m_TargetShape
Target shape value.
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
Definition: Layer.cpp:348
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
void Accept(ILayerVisitor &visitor) const override
Apply a visitor to this layer.
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
#define CHECK_LOCATION()
Definition: Exceptions.hpp:197
virtual std::unique_ptr< IWorkload > CreateReshape(const ReshapeQueueDescriptor &descriptor, const WorkloadInfo &info) const
void SetAdditionalInfo(QueueDescriptor &descriptor) const
Definition: Layer.cpp:245
ReshapeLayer(const ReshapeDescriptor &desc, const char *name)
Constructor to create a ReshapeLayer.
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of ReshapeLayer.
virtual void VisitReshapeLayer(const IConnectableLayer *layer, const ReshapeDescriptor &reshapeDescriptor, const char *name=nullptr)=0
Function a reshape layer should call back to when its Accept(ILayerVisitor&) function is invoked...
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *LayerCreateWorkload.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
virtual const TensorInfo & GetTensorInfo() const =0
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:311
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
By default returns inputShapes if the number of inputs are equal to number of outputs, otherwise infers the output shapes from given input shapes and layer properties.
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63
ShapeInferenceMethod m_ShapeInferenceMethod
Definition: Layer.hpp:408
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:419