37 return std::move(layer);
49 std::vector<unsigned int> outputDimensionSizes(rank);
50 for (
unsigned int i = 0; i < rank; ++i)
56 return std::vector<TensorShape>({ tensorShape });
PadDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
const PadDescriptor & GetParameters() const
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
By default returns inputShapes if the number of inputs are equal to number of outputs, otherwise infers the output shapes from given input shapes and layer properties.
const TensorShape & GetShape() const
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the Pad type.
std::vector< std::pair< unsigned int, unsigned int > > m_PadList
Specifies the padding for input dimension.
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Copyright (c) 2021 ARM Limited and Contributors.
This layer represents a pad operation.
LayerDescriptor m_Parameters
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
A PadDescriptor for the PadLayer.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
virtual std::unique_ptr< IWorkload > CreatePad(const PadQueueDescriptor &descriptor, const WorkloadInfo &Info) const
#define ARMNN_ASSERT(COND)
void Pad(const TensorInfo &inputInfo, const TensorInfo &outputInfo, const ITensorHandle *inputHandle, ITensorHandle *outputHandle, const PadQueueDescriptor &data)
void SetAdditionalInfo(QueueDescriptor &descriptor) const
PadLayer(const PadDescriptor ¶m, const char *name)
Constructor to create a PadLayer.
unsigned int GetNumDimensions() const
Function that returns the tensor rank.
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *LayerCreateWorkload.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
virtual const TensorInfo & GetTensorInfo() const =0
void Accept(ILayerVisitor &visitor) const override
Apply a visitor to this layer.
const char * GetName() const override
Returns the name of the layer.
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of PadLayer.
PadLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
const TensorInfo & GetTensorInfo() const override
ShapeInferenceMethod m_ShapeInferenceMethod
virtual void VisitPadLayer(const IConnectableLayer *layer, const PadDescriptor &padDescriptor, const char *name=nullptr)=0
Function a pad layer should call back to when its Accept(ILayerVisitor&) function is invoked...
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...