23.02
|
Go to the documentation of this file.
24 bool checkDataTypeInputandOutput(
const Layer& layer)
26 auto inputInfo = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
27 auto outputInfo = layer.GetOutputSlot(0).GetTensorInfo();
28 bool sameDataType = (inputInfo.GetDataType() == outputInfo.GetDataType());
35 bool sameScale = (inputInfo.GetQuantizationScale() == outputInfo.GetQuantizationScale());
36 bool sameOffset = (inputInfo.GetQuantizationOffset() == outputInfo.GetQuantizationOffset());
38 return (sameScale && sameOffset);
53 template<
typename LayerType>
60 replacementLayer->SetAdditionalInfoForObject(
61 std::make_shared<ActivationDescriptor>(activationDesc));
63 SubgraphView substitutionSubgraph({baseLayer, activationLayer},
64 CreateIInputsFrom({baseLayer}),
65 CreateIOutputsFrom({activationLayer}));
68 optimizationViews.
AddSubstitution({substitutionSubgraph, replacementSubgraph});
70 return replacementLayer;
73 template<
typename LayerType>
81 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
89 return replacementLayer;
92 template<
typename LayerType>
100 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
108 return replacementLayer;
111 template<
typename LayerType>
119 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
127 return replacementLayer;
130 template<
typename LayerType>
138 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
146 return replacementLayer;
149 template<
typename LayerType>
163 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
171 SubgraphView substitutionSubgraph({baseLayer, activationLayer},
172 CreateIInputsFrom({baseLayer}),
173 CreateIOutputsFrom({activationLayer}));
176 return replacementLayer;
179 template<
typename LayerType>
189 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
198 return replacementLayer;
201 template<
typename LayerType>
211 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
220 return replacementLayer;
223 template<
typename LayerType>
233 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
242 return replacementLayer;
249 template<
typename LayerType>
255 std::vector<IConnectableLayer*> layers;
258 std::vector<uint32_t> axes;
259 unsigned int recalulatedAxis = 0;
261 for (
unsigned int i = 0; i != desc.
m_vAxis.size(); ++i)
264 TensorInfo layerInfo = baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo();
266 axes.emplace_back(desc.
m_vAxis[i]);
274 std::vector<uint32_t> singleAxis(1, desc.
m_vAxis[i] - recalulatedAxis);
278 newReduceDescriptor.
m_vAxis.assign(singleAxis.begin(), singleAxis.end());
281 std::string layerName =
"reduce_layer_" + std::to_string(i);
283 Layer* replacementLayer = PolymorphicDowncast<Layer*>(
291 layers[i - 1]->GetOutputSlot(0).Connect(replacementLayer->
GetInputSlot(0));
302 layers.emplace_back(replacementLayer);
306 ARMNN_ASSERT(baseLayer->GetOutputSlot(0).GetTensorInfo() ==
307 PolymorphicDowncast<Layer*>(layers.back())->GetOutputSlot().GetTensorInfo());
315 template<
typename LayerType>
318 std::vector<IConnectableLayer*>& layers)
320 std::list<IConnectableLayer*> replacementLayers(layers.begin(), layers.end());
323 SubgraphView replacementSubgraph(std::move(replacementLayers),
324 CreateIInputsFrom({replacementLayers.front()}),
325 CreateIOutputsFrom({replacementLayers.back()}));
327 optimizationViews.
AddSubstitution({substitutionSubgraph, replacementSubgraph});
IConnectableLayer * AddConvolution2dLayer(const Convolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Adds a 2D convolution layer to the network.
IConnectableLayer * AddDepthwiseConvolution2dLayer(const DepthwiseConvolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Adds a 2D depthwise convolution layer to the network.
LayerType * FuseLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, LayerType *replacementLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc)
LayerType * FuseMultiplicationLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
IConnectableLayer * AddDivisionLayer(const char *name=nullptr)
Adds a division layer to the network.
LayerType * FuseSubtractionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
An ActivationDescriptor for the ActivationLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
void SetTensorInfo(const TensorInfo &tensorInfo) override
constexpr bool IsQuantizedType()
IConnectableLayer * AddSubtractionLayer(const char *name=nullptr)
Adds a subtraction layer to the network.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
A ReduceDescriptor for the REDUCE operators.
LayerType * FuseDivisionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
void AddSubstitution(SubstitutionPair &&substitution)
const TensorInfo ComputeReductionTensorShape(const armnn::TensorInfo &input, const std::vector< uint32_t > &vAxis, const bool keepDims)
Function to compute the output tensor shape based on the axes and if keepDims is set.
This layer represents an activation operation with the specified activation function.
LayerType * FuseFullyConnectedLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
IConnectableLayer * AddReduceLayer(const ReduceDescriptor &reduceDescriptor, const char *name=nullptr)
Adds a reduce layer to the network.
Copyright (c) 2021 ARM Limited and Contributors.
IConnectableLayer * AddMultiplicationLayer(const char *name=nullptr)
Adds a multiplication layer to the network.
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
void ReplaceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, std::vector< IConnectableLayer * > &layers)
IConnectableLayer * AddAdditionLayer(const char *name=nullptr)
Adds an addition layer to the network.
LayerType * FuseConvolution2dLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
LayerType * FuseBatchNormalizationLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
LayerType * FuseDepthwiseConvolution2dLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
#define ARMNN_ASSERT(COND)
LayerType * FuseAdditionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
bool m_KeepDims
if true then output shape has no change.
IConnectableLayer * AddBatchNormalizationLayer(const BatchNormalizationDescriptor &desc, const ConstTensor &mean, const ConstTensor &variance, const ConstTensor &beta, const ConstTensor &gamma, const char *name=nullptr)
Adds a batch normalization layer to the network.
std::vector< IConnectableLayer * > ChainReduceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, ReduceDescriptor &desc)
IConnectableLayer * AddFullyConnectedLayer(const FullyConnectedDescriptor &fullyConnectedDescriptor, const char *name=nullptr)
Adds a fully connected layer to the network.