26 for (
auto&& layer : layers)
28 for (
unsigned int i = 0 ; i < layer->GetNumInputSlots(); ++i)
30 result.push_back(&(layer->GetInputSlot(i)));
43 for (
auto &&layer: layers)
45 for (
unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
47 result.push_back(&(layer->GetOutputSlot(i)));
53 bool checkDataTypeInputandOutput(
const Layer& layer)
55 auto inputInfo = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
56 auto outputInfo = layer.GetOutputSlot(0).GetTensorInfo();
57 bool sameDataType = (inputInfo.GetDataType() == outputInfo.GetDataType());
64 bool sameScale = (inputInfo.GetQuantizationScale() == outputInfo.GetQuantizationScale());
65 bool sameOffset = (inputInfo.GetQuantizationOffset() == outputInfo.GetQuantizationOffset());
67 return (sameScale && sameOffset);
84 std::vector<Layer*> untouchedVector;
85 for (
const auto& pair : untouched)
87 Layer* layer = pair.second;
89 CreateIInputsFrom({layer}),
90 CreateIOutputsFrom({layer}));
95 template<
typename LayerType>
102 replacementLayer->SetAdditionalInfoForObject(
103 std::make_shared<ActivationDescriptor>(activationDesc));
105 SubgraphView substitutionSubgraph({baseLayer, activationLayer},
106 CreateIInputsFrom({baseLayer}),
107 CreateIOutputsFrom({activationLayer}));
110 optimizationViews.
AddSubstitution({substitutionSubgraph, replacementSubgraph});
112 return replacementLayer;
115 template<
typename LayerType>
123 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
131 return replacementLayer;
134 template<
typename LayerType>
142 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
150 return replacementLayer;
153 template<
typename LayerType>
161 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
169 return replacementLayer;
172 template<
typename LayerType>
180 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
188 return replacementLayer;
191 template<
typename LayerType>
199 optimizationViews.
GetINetwork()->AddBatchNormalizationLayer(baseLayer->GetParameters(),
205 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
213 SubgraphView substitutionSubgraph({baseLayer, activationLayer},
214 CreateIInputsFrom({baseLayer}),
215 CreateIOutputsFrom({activationLayer}));
218 return replacementLayer;
221 template<
typename LayerType>
228 std::shared_ptr<ConstTensorHandle> weightHandle = baseLayer->m_Weight;
229 TensorInfo weightInfo = weightHandle->GetTensorInfo();
231 std::shared_ptr<ConstTensorHandle> biasHandle = baseLayer->m_Bias;
239 biasTensor =
ConstTensor(biasHandle->GetTensorInfo(), biasHandle->Map(
true));
244 AddConvolution2dLayer(baseLayer->GetParameters(),
248 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
256 return replacementLayer;
259 template<
typename LayerType>
266 std::shared_ptr<ConstTensorHandle> weightHandle = baseLayer->m_Weight;
267 TensorInfo weightInfo = weightHandle->GetTensorInfo();
269 std::shared_ptr<ConstTensorHandle> biasHandle = baseLayer->m_Bias;
277 biasTensor =
ConstTensor(biasHandle->GetTensorInfo(), biasHandle->Map(
true));
282 AddDepthwiseConvolution2dLayer(baseLayer->GetParameters(),
286 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
294 return replacementLayer;
297 template<
typename LayerType>
305 optimizationViews.
GetINetwork()->AddFullyConnectedLayer(baseLayer->GetParameters(),
307 LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
315 replacementLayer->m_Weight = std::move(baseLayer->m_Weight);
316 replacementLayer->m_Bias = std::move(baseLayer->m_Bias);
318 return replacementLayer;
325 template<
typename LayerType>
331 std::vector<IConnectableLayer*> layers;
334 std::vector<uint32_t> axes;
335 unsigned int recalulatedAxis = 0;
337 for (
unsigned int i = 0; i != desc.
m_vAxis.size(); ++i)
340 TensorInfo layerInfo = baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo();
342 axes.emplace_back(desc.
m_vAxis[i]);
350 std::vector<uint32_t> singleAxis(1, desc.
m_vAxis[i] - recalulatedAxis);
354 newReduceDescriptor.
m_vAxis.assign(singleAxis.begin(), singleAxis.end());
357 std::string layerName =
"reduce_layer_" + std::to_string(i);
359 Layer* replacementLayer = PolymorphicDowncast<Layer*>(
360 optimizationViews.
GetINetwork()->AddReduceLayer(newReduceDescriptor,
367 layers[i - 1]->GetOutputSlot(0).Connect(replacementLayer->
GetInputSlot(0));
378 layers.emplace_back(replacementLayer);
382 ARMNN_ASSERT(baseLayer->GetOutputSlot(0).GetTensorInfo() ==
383 PolymorphicDowncast<Layer*>(layers.back())->GetOutputSlot().GetTensorInfo());
391 template<
typename LayerType>
394 std::vector<IConnectableLayer*>& layers)
396 std::list<IConnectableLayer*> replacementLayers(layers.begin(), layers.end());
399 SubgraphView replacementSubgraph(std::move(replacementLayers),
400 CreateIInputsFrom({replacementLayers.front()}),
401 CreateIOutputsFrom({replacementLayers.back()}));
403 optimizationViews.
AddSubstitution({substitutionSubgraph, replacementSubgraph});
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
constexpr bool IsQuantizedType()
LayerType * FuseConvolution2dLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
void ReportUntouchedLayers(OptimizationViews &optimizationViews, std::map< LayerGuid, Layer *> untouched)
bool m_KeepDims
if true then output shape has no change.
void AddSubstitution(SubstitutionPair &&substitution)
This layer represents an activation operation with the specified activation function.
Copyright (c) 2021 ARM Limited and Contributors.
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
LayerType * FuseDivisionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
A ReduceDescriptor for the REDUCE operators.
std::vector< IOutputSlot * > IOutputSlots
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
INetworkPtr & GetINetwork()
std::vector< IInputSlot * > IInputSlots
#define ARMNN_ASSERT(COND)
LayerType * FuseLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, LayerType *replacementLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc)
LayerType * FuseBatchNormalizationLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
An ActivationDescriptor for the ActivationLayer.
void AddUntouchedSubgraph(SubgraphView &&subgraph)
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
std::vector< IConnectableLayer * > ChainReduceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, ReduceDescriptor &desc)
LayerType * FuseSubtractionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
void SetTensorInfo(const TensorInfo &tensorInfo) override
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
LayerType * FuseAdditionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
LayerType * FuseDepthwiseConvolution2dLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
void ReplaceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, std::vector< IConnectableLayer *> &layers)
const TensorInfo ComputeReductionTensorShape(const armnn::TensorInfo &input, const std::vector< uint32_t > &vAxis, const bool keepDims)
Function to compute the output tensor shape based on the axes and if keepDims is set.
LayerType * FuseMultiplicationLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
LayerType * FuseFullyConnectedLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...