From ae050524109f1ce827962665436ef7430f2ac479 Mon Sep 17 00:00:00 2001 From: David Monahan Date: Wed, 22 Mar 2023 16:48:58 +0000 Subject: IVGCVSW-7255 Update Doxygen Documentation and publish on GitHub. * Updating Doxygen documentation for 23.02 release. Signed-off-by: David Monahan Change-Id: I545574ff7664b4595d2fe6a91a3c35d2ad55df82 --- .../_arm_compute_subgraph_utils_8hpp_source.xhtml | 443 ++++++++++++++++++--- 1 file changed, 388 insertions(+), 55 deletions(-) (limited to '23.02/_arm_compute_subgraph_utils_8hpp_source.xhtml') diff --git a/23.02/_arm_compute_subgraph_utils_8hpp_source.xhtml b/23.02/_arm_compute_subgraph_utils_8hpp_source.xhtml index 0b2aef05e6..511c20af1f 100644 --- a/23.02/_arm_compute_subgraph_utils_8hpp_source.xhtml +++ b/23.02/_arm_compute_subgraph_utils_8hpp_source.xhtml @@ -8,7 +8,7 @@ - + ArmNN: src/backends/aclCommon/ArmComputeSubgraphUtils.hpp Source File @@ -19,9 +19,6 @@ - @@ -30,7 +27,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -51,18 +49,21 @@ - + +/* @license-end */
@@ -76,7 +77,9 @@ $(function() {
@@ -98,60 +101,390 @@ $(document).ready(function(){initNavTree('_arm_compute_subgraph_utils_8hpp_sourc
ArmComputeSubgraphUtils.hpp
-Go to the documentation of this file.
1 //
2 // Copyright © 2020,2022 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 
10 
13 
14 namespace armnn
15 {
16 
17 namespace
18 {
19 
20 //
21 // this helper only works if all layers where the inputs connect to are not selected
22 //
23 
24 bool checkDataTypeInputandOutput(const Layer& layer)
25 {
26  auto inputInfo = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
27  auto outputInfo = layer.GetOutputSlot(0).GetTensorInfo();
28  bool sameDataType = (inputInfo.GetDataType() == outputInfo.GetDataType());
29 
30  // Check is same quantization info (same scale and offset)
31  if (sameDataType)
32  {
33  if (IsQuantizedType(inputInfo.GetDataType()))
34  {
35  bool sameScale = (inputInfo.GetQuantizationScale() == outputInfo.GetQuantizationScale());
36  bool sameOffset = (inputInfo.GetQuantizationOffset() == outputInfo.GetQuantizationOffset());
37 
38  return (sameScale && sameOffset);
39  }
40  else
41  {
42  return true;
43  }
44  }
45  else
46  {
47  return false;
48  }
49 }
50 
51 } // namespace
52 
53 template<typename LayerType>
55  LayerType* baseLayer,
56  LayerType* replacementLayer,
57  ActivationLayer* activationLayer,
58  ActivationDescriptor& activationDesc)
59 {
60  replacementLayer->SetAdditionalInfoForObject(
61  std::make_shared<ActivationDescriptor>(activationDesc));
62 
63  SubgraphView substitutionSubgraph({baseLayer, activationLayer},
64  CreateIInputsFrom({baseLayer}),
65  CreateIOutputsFrom({activationLayer}));
66  SubgraphView replacementSubgraph(replacementLayer);
67 
68  optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
69 
70  return replacementLayer;
71 }
72 
73 template<typename LayerType>
75  LayerType* baseLayer,
76  ActivationLayer* activationLayer,
77  ActivationDescriptor& activationDesc,
78  std::string name)
79 {
80  IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddAdditionLayer(name.c_str());
81  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
82 
83  FuseLayer(optimizationViews,
84  baseLayer,
85  replacementLayer,
86  activationLayer,
87  activationDesc);
88 
89  return replacementLayer;
90 }
91 
92 template<typename LayerType>
94  LayerType* baseLayer,
95  ActivationLayer* activationLayer,
96  ActivationDescriptor& activationDesc,
97  std::string name)
98 {
99  IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddSubtractionLayer(name.c_str());
100  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
101 
102  FuseLayer(optimizationViews,
103  baseLayer,
104  replacementLayer,
105  activationLayer,
106  activationDesc);
107 
108  return replacementLayer;
109 }
110 
111 template<typename LayerType>
113  LayerType* baseLayer,
114  ActivationLayer* activationLayer,
115  ActivationDescriptor& activationDesc,
116  std::string name)
117 {
118  IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddDivisionLayer(name.c_str());
119  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
120 
121  FuseLayer(optimizationViews,
122  baseLayer,
123  replacementLayer,
124  activationLayer,
125  activationDesc);
126 
127  return replacementLayer;
128 }
129 
130 template<typename LayerType>
132  LayerType* baseLayer,
133  ActivationLayer* activationLayer,
134  ActivationDescriptor& activationDesc,
135  std::string name)
136 {
137  IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddMultiplicationLayer(name.c_str());
138  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
139 
140  FuseLayer(optimizationViews,
141  baseLayer,
142  replacementLayer,
143  activationLayer,
144  activationDesc);
145 
146  return replacementLayer;
147 }
148 
149 template<typename LayerType>
151  LayerType* baseLayer,
152  ActivationLayer* activationLayer,
153  ActivationDescriptor& activationDesc,
154  std::string name)
155 {
156  IConnectableLayer* replacement =
157  optimizationViews.GetINetwork()->AddBatchNormalizationLayer(baseLayer->GetParameters(),
158  ConstTensor(),
159  ConstTensor(),
160  ConstTensor(),
161  ConstTensor(),
162  name.c_str());
163  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
164 
165  FuseLayer(optimizationViews,
166  baseLayer,
167  replacementLayer,
168  activationLayer,
169  activationDesc);
170 
171  SubgraphView substitutionSubgraph({baseLayer, activationLayer},
172  CreateIInputsFrom({baseLayer}),
173  CreateIOutputsFrom({activationLayer}));
174  SubgraphView replacementSubgraph(replacementLayer);
175 
176  return replacementLayer;
177 }
178 
179 template<typename LayerType>
181  LayerType* baseLayer,
182  ActivationLayer* activationLayer,
183  ActivationDescriptor& activationDesc,
184  std::string name)
185 {
186  IConnectableLayer* replacement = optimizationViews.GetINetwork()
187  ->AddConvolution2dLayer(baseLayer->GetParameters(), name.c_str());
188 
189  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
190 
191 
192  FuseLayer(optimizationViews,
193  baseLayer,
194  replacementLayer,
195  activationLayer,
196  activationDesc);
197 
198  return replacementLayer;
199 }
200 
201 template<typename LayerType>
203  LayerType* baseLayer,
204  ActivationLayer* activationLayer,
205  ActivationDescriptor& activationDesc,
206  std::string name)
207 {
208  IConnectableLayer* replacement =
209  optimizationViews.GetINetwork()->AddDepthwiseConvolution2dLayer(baseLayer->GetParameters(), name.c_str());
210 
211  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
212 
213 
214  FuseLayer(optimizationViews,
215  baseLayer,
216  replacementLayer,
217  activationLayer,
218  activationDesc);
219 
220  return replacementLayer;
221 }
222 
223 template<typename LayerType>
225  LayerType* baseLayer,
226  ActivationLayer* activationLayer,
227  ActivationDescriptor& activationDesc,
228  std::string name)
229 {
230  IConnectableLayer* replacement =
231  optimizationViews.GetINetwork()->AddFullyConnectedLayer(baseLayer->GetParameters(),
232  name.c_str());
233  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
234 
235  FuseLayer(optimizationViews,
236  baseLayer,
237  replacementLayer,
238  activationLayer,
239  activationDesc);
240 
241 
242  return replacementLayer;
243 }
244 
245 //
246 // If reduce layer has multiple axes, add new layer for each axis to simulate the same behaviour
247 // as currently only one axis is supported.
248 //
249 template<typename LayerType>
250 std::vector<IConnectableLayer*> ChainReduceLayers(OptimizationViews& optimizationViews,
251  LayerType* baseLayer,
252  ReduceDescriptor& desc)
253 {
254  // Vector of new chained layers, used for substitution.
255  std::vector<IConnectableLayer*> layers;
256 
257  // Vector of axes so each layer is reshaped correctly.
258  std::vector<uint32_t> axes;
259  unsigned int recalulatedAxis = 0;
260 
261  for (unsigned int i = 0; i != desc.m_vAxis.size(); ++i)
262  {
263  // Get TensorInfo from base layer and reduce shape using axis.
264  TensorInfo layerInfo = baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo();
265 
266  axes.emplace_back(desc.m_vAxis[i]);
267 
268  const TensorInfo& reducedTensorInfo = ComputeReductionTensorShape(layerInfo,
269  axes,
270  desc.m_KeepDims);
271 
272  // Create a vector for the single axis to be assigned to the descriptor.
273  // Update axis if keepDims is set reduce layers correctly.
274  std::vector<uint32_t> singleAxis(1, desc.m_vAxis[i] - recalulatedAxis);
275 
276  // Create a descriptor and assign single axis.
277  ReduceDescriptor newReduceDescriptor = baseLayer->GetParameters();
278  newReduceDescriptor.m_vAxis.assign(singleAxis.begin(), singleAxis.end());
279 
280  // Add new layer to graph.
281  std::string layerName = "reduce_layer_" + std::to_string(i);
282 
283  Layer* replacementLayer = PolymorphicDowncast<Layer*>(
284  optimizationViews.GetINetwork()->AddReduceLayer(newReduceDescriptor,
285  layerName.c_str()));
286 
287  // Connect previous layer with new layer.
288  // The first and last layer will be connected when the subgraph is replaced.
289  if (!layers.empty())
290  {
291  layers[i - 1]->GetOutputSlot(0).Connect(replacementLayer->GetInputSlot(0));
292  }
293 
294  // Set updated tensorInfo for new layer.
295  replacementLayer->GetOutputSlot(0).SetTensorInfo(reducedTensorInfo);
296 
297  if (!desc.m_KeepDims)
298  {
299  recalulatedAxis++;
300  }
301 
302  layers.emplace_back(replacementLayer);
303  }
304 
305  // Check if the TensorInfo from the last layer equals the inferred output from the original layer.
306  ARMNN_ASSERT(baseLayer->GetOutputSlot(0).GetTensorInfo() ==
307  PolymorphicDowncast<Layer*>(layers.back())->GetOutputSlot().GetTensorInfo());
308 
309  return layers;
310 }
311 
312 //
313 // Substitute baseLayer with new subgraph
314 //
315 template<typename LayerType>
316 void ReplaceLayers(OptimizationViews& optimizationViews,
317  LayerType* baseLayer,
318  std::vector<IConnectableLayer*>& layers)
319 {
320  std::list<IConnectableLayer*> replacementLayers(layers.begin(), layers.end());
321 
322  SubgraphView substitutionSubgraph(baseLayer);
323  SubgraphView replacementSubgraph(std::move(replacementLayers),
324  CreateIInputsFrom({replacementLayers.front()}),
325  CreateIOutputsFrom({replacementLayers.back()}));
326 
327  optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
328 }
329 
330 } // namespace armnn
IConnectableLayer * AddSubtractionLayer(const char *name=nullptr)
Adds a subtraction layer to the network.
Definition: Network.cpp:314
-
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:68
- -
constexpr bool IsQuantizedType()
Definition: TypesUtils.hpp:284
- -
IConnectableLayer * AddDepthwiseConvolution2dLayer(const DepthwiseConvolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Adds a 2D depthwise convolution layer to the network.
Definition: Network.cpp:107
-
LayerType * FuseConvolution2dLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
- -
bool m_KeepDims
if true then output shape has no change.
-
void AddSubstitution(SubstitutionPair &&substitution)
-
This layer represents an activation operation with the specified activation function.
- -
Copyright (c) 2021 ARM Limited and Contributors.
-
IConnectableLayer * AddDivisionLayer(const char *name=nullptr)
Adds a division layer to the network.
Definition: Network.cpp:309
-
IConnectableLayer * AddFullyConnectedLayer(const FullyConnectedDescriptor &fullyConnectedDescriptor, const char *name=nullptr)
Adds a fully connected layer to the network.
Definition: Network.cpp:143
- -
The SubgraphView class represents a subgraph of a Graph.
-
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:324
-
LayerType * FuseDivisionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
-
A ReduceDescriptor for the REDUCE operators.
-
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
-
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
-
LayerType * FuseLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, LayerType *replacementLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc)
-
LayerType * FuseBatchNormalizationLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
-
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
-
IConnectableLayer * AddBatchNormalizationLayer(const BatchNormalizationDescriptor &desc, const ConstTensor &mean, const ConstTensor &variance, const ConstTensor &beta, const ConstTensor &gamma, const char *name=nullptr)
Adds a batch normalization layer to the network.
Definition: Network.cpp:224
- -
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
-
IConnectableLayer * AddAdditionLayer(const char *name=nullptr)
Adds an addition layer to the network.
Definition: Network.cpp:214
- -
std::vector< IConnectableLayer * > ChainReduceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, ReduceDescriptor &desc)
-
LayerType * FuseSubtractionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
-
IConnectableLayer * AddConvolution2dLayer(const Convolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Adds a 2D convolution layer to the network.
Definition: Network.cpp:87
-
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:87
-
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:326
-
IConnectableLayer * AddMultiplicationLayer(const char *name=nullptr)
Adds a multiplication layer to the network.
Definition: Network.cpp:219
-
IConnectableLayer * AddReduceLayer(const ReduceDescriptor &reduceDescriptor, const char *name=nullptr)
Adds a reduce layer to the network.
Definition: Network.cpp:245
-
LayerType * FuseAdditionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
-
LayerType * FuseDepthwiseConvolution2dLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
-
void ReplaceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, std::vector< IConnectableLayer *> &layers)
-
const TensorInfo ComputeReductionTensorShape(const armnn::TensorInfo &input, const std::vector< uint32_t > &vAxis, const bool keepDims)
Function to compute the output tensor shape based on the axes and if keepDims is set.
- -
LayerType * FuseMultiplicationLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
-
LayerType * FuseFullyConnectedLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
-
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:466
+Go to the documentation of this file.
1 //
+
2 // Copyright © 2020,2022 Arm Ltd and Contributors. All rights reserved.
+
3 // SPDX-License-Identifier: MIT
+
4 //
+
5 
+
6 #pragma once
+
7 
+ + +
10 
+ + +
13 
+
14 namespace armnn
+
15 {
+
16 
+
17 namespace
+
18 {
+
19 
+
20 //
+
21 // this helper only works if all layers where the inputs connect to are not selected
+
22 //
+
23 
+
24 bool checkDataTypeInputandOutput(const Layer& layer)
+
25 {
+
26  auto inputInfo = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
+
27  auto outputInfo = layer.GetOutputSlot(0).GetTensorInfo();
+
28  bool sameDataType = (inputInfo.GetDataType() == outputInfo.GetDataType());
+
29 
+
30  // Check is same quantization info (same scale and offset)
+
31  if (sameDataType)
+
32  {
+
33  if (IsQuantizedType(inputInfo.GetDataType()))
+
34  {
+
35  bool sameScale = (inputInfo.GetQuantizationScale() == outputInfo.GetQuantizationScale());
+
36  bool sameOffset = (inputInfo.GetQuantizationOffset() == outputInfo.GetQuantizationOffset());
+
37 
+
38  return (sameScale && sameOffset);
+
39  }
+
40  else
+
41  {
+
42  return true;
+
43  }
+
44  }
+
45  else
+
46  {
+
47  return false;
+
48  }
+
49 }
+
50 
+
51 } // namespace
+
52 
+
53 template<typename LayerType>
+ +
55  LayerType* baseLayer,
+
56  LayerType* replacementLayer,
+
57  ActivationLayer* activationLayer,
+
58  ActivationDescriptor& activationDesc)
+
59 {
+
60  replacementLayer->SetAdditionalInfoForObject(
+
61  std::make_shared<ActivationDescriptor>(activationDesc));
+
62 
+
63  SubgraphView substitutionSubgraph({baseLayer, activationLayer},
+
64  CreateIInputsFrom({baseLayer}),
+
65  CreateIOutputsFrom({activationLayer}));
+
66  SubgraphView replacementSubgraph(replacementLayer);
+
67 
+
68  optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
+
69 
+
70  return replacementLayer;
+
71 }
+
72 
+
73 template<typename LayerType>
+ +
75  LayerType* baseLayer,
+
76  ActivationLayer* activationLayer,
+
77  ActivationDescriptor& activationDesc,
+
78  std::string name)
+
79 {
+
80  IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddAdditionLayer(name.c_str());
+
81  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
82 
+
83  FuseLayer(optimizationViews,
+
84  baseLayer,
+
85  replacementLayer,
+
86  activationLayer,
+
87  activationDesc);
+
88 
+
89  return replacementLayer;
+
90 }
+
91 
+
92 template<typename LayerType>
+ +
94  LayerType* baseLayer,
+
95  ActivationLayer* activationLayer,
+
96  ActivationDescriptor& activationDesc,
+
97  std::string name)
+
98 {
+
99  IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddSubtractionLayer(name.c_str());
+
100  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
101 
+
102  FuseLayer(optimizationViews,
+
103  baseLayer,
+
104  replacementLayer,
+
105  activationLayer,
+
106  activationDesc);
+
107 
+
108  return replacementLayer;
+
109 }
+
110 
+
111 template<typename LayerType>
+ +
113  LayerType* baseLayer,
+
114  ActivationLayer* activationLayer,
+
115  ActivationDescriptor& activationDesc,
+
116  std::string name)
+
117 {
+
118  IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddDivisionLayer(name.c_str());
+
119  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
120 
+
121  FuseLayer(optimizationViews,
+
122  baseLayer,
+
123  replacementLayer,
+
124  activationLayer,
+
125  activationDesc);
+
126 
+
127  return replacementLayer;
+
128 }
+
129 
+
130 template<typename LayerType>
+ +
132  LayerType* baseLayer,
+
133  ActivationLayer* activationLayer,
+
134  ActivationDescriptor& activationDesc,
+
135  std::string name)
+
136 {
+
137  IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddMultiplicationLayer(name.c_str());
+
138  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
139 
+
140  FuseLayer(optimizationViews,
+
141  baseLayer,
+
142  replacementLayer,
+
143  activationLayer,
+
144  activationDesc);
+
145 
+
146  return replacementLayer;
+
147 }
+
148 
+
149 template<typename LayerType>
+ +
151  LayerType* baseLayer,
+
152  ActivationLayer* activationLayer,
+
153  ActivationDescriptor& activationDesc,
+
154  std::string name)
+
155 {
+
156  IConnectableLayer* replacement =
+
157  optimizationViews.GetINetwork()->AddBatchNormalizationLayer(baseLayer->GetParameters(),
+
158  ConstTensor(),
+
159  ConstTensor(),
+
160  ConstTensor(),
+
161  ConstTensor(),
+
162  name.c_str());
+
163  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
164 
+
165  FuseLayer(optimizationViews,
+
166  baseLayer,
+
167  replacementLayer,
+
168  activationLayer,
+
169  activationDesc);
+
170 
+
171  SubgraphView substitutionSubgraph({baseLayer, activationLayer},
+
172  CreateIInputsFrom({baseLayer}),
+
173  CreateIOutputsFrom({activationLayer}));
+
174  SubgraphView replacementSubgraph(replacementLayer);
+
175 
+
176  return replacementLayer;
+
177 }
+
178 
+
179 template<typename LayerType>
+ +
181  LayerType* baseLayer,
+
182  ActivationLayer* activationLayer,
+
183  ActivationDescriptor& activationDesc,
+
184  std::string name)
+
185 {
+
186  IConnectableLayer* replacement = optimizationViews.GetINetwork()
+
187  ->AddConvolution2dLayer(baseLayer->GetParameters(), name.c_str());
+
188 
+
189  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
190 
+
191 
+
192  FuseLayer(optimizationViews,
+
193  baseLayer,
+
194  replacementLayer,
+
195  activationLayer,
+
196  activationDesc);
+
197 
+
198  return replacementLayer;
+
199 }
+
200 
+
201 template<typename LayerType>
+ +
203  LayerType* baseLayer,
+
204  ActivationLayer* activationLayer,
+
205  ActivationDescriptor& activationDesc,
+
206  std::string name)
+
207 {
+
208  IConnectableLayer* replacement =
+
209  optimizationViews.GetINetwork()->AddDepthwiseConvolution2dLayer(baseLayer->GetParameters(), name.c_str());
+
210 
+
211  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
212 
+
213 
+
214  FuseLayer(optimizationViews,
+
215  baseLayer,
+
216  replacementLayer,
+
217  activationLayer,
+
218  activationDesc);
+
219 
+
220  return replacementLayer;
+
221 }
+
222 
+
223 template<typename LayerType>
+ +
225  LayerType* baseLayer,
+
226  ActivationLayer* activationLayer,
+
227  ActivationDescriptor& activationDesc,
+
228  std::string name)
+
229 {
+
230  IConnectableLayer* replacement =
+
231  optimizationViews.GetINetwork()->AddFullyConnectedLayer(baseLayer->GetParameters(),
+
232  name.c_str());
+
233  LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
234 
+
235  FuseLayer(optimizationViews,
+
236  baseLayer,
+
237  replacementLayer,
+
238  activationLayer,
+
239  activationDesc);
+
240 
+
241 
+
242  return replacementLayer;
+
243 }
+
244 
+
245 //
+
246 // If reduce layer has multiple axes, add new layer for each axis to simulate the same behaviour
+
247 // as currently only one axis is supported.
+
248 //
+
249 template<typename LayerType>
+
250 std::vector<IConnectableLayer*> ChainReduceLayers(OptimizationViews& optimizationViews,
+
251  LayerType* baseLayer,
+
252  ReduceDescriptor& desc)
+
253 {
+
254  // Vector of new chained layers, used for substitution.
+
255  std::vector<IConnectableLayer*> layers;
+
256 
+
257  // Vector of axes so each layer is reshaped correctly.
+
258  std::vector<uint32_t> axes;
+
259  unsigned int recalulatedAxis = 0;
+
260 
+
261  for (unsigned int i = 0; i != desc.m_vAxis.size(); ++i)
+
262  {
+
263  // Get TensorInfo from base layer and reduce shape using axis.
+
264  TensorInfo layerInfo = baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo();
+
265 
+
266  axes.emplace_back(desc.m_vAxis[i]);
+
267 
+
268  const TensorInfo& reducedTensorInfo = ComputeReductionTensorShape(layerInfo,
+
269  axes,
+
270  desc.m_KeepDims);
+
271 
+
272  // Create a vector for the single axis to be assigned to the descriptor.
+
273  // Update axis if keepDims is set reduce layers correctly.
+
274  std::vector<uint32_t> singleAxis(1, desc.m_vAxis[i] - recalulatedAxis);
+
275 
+
276  // Create a descriptor and assign single axis.
+
277  ReduceDescriptor newReduceDescriptor = baseLayer->GetParameters();
+
278  newReduceDescriptor.m_vAxis.assign(singleAxis.begin(), singleAxis.end());
+
279 
+
280  // Add new layer to graph.
+
281  std::string layerName = "reduce_layer_" + std::to_string(i);
+
282 
+
283  Layer* replacementLayer = PolymorphicDowncast<Layer*>(
+
284  optimizationViews.GetINetwork()->AddReduceLayer(newReduceDescriptor,
+
285  layerName.c_str()));
+
286 
+
287  // Connect previous layer with new layer.
+
288  // The first and last layer will be connected when the subgraph is replaced.
+
289  if (!layers.empty())
+
290  {
+
291  layers[i - 1]->GetOutputSlot(0).Connect(replacementLayer->GetInputSlot(0));
+
292  }
+
293 
+
294  // Set updated tensorInfo for new layer.
+
295  replacementLayer->GetOutputSlot(0).SetTensorInfo(reducedTensorInfo);
+
296 
+
297  if (!desc.m_KeepDims)
+
298  {
+
299  recalulatedAxis++;
+
300  }
+
301 
+
302  layers.emplace_back(replacementLayer);
+
303  }
+
304 
+
305  // Check if the TensorInfo from the last layer equals the inferred output from the original layer.
+
306  ARMNN_ASSERT(baseLayer->GetOutputSlot(0).GetTensorInfo() ==
+
307  PolymorphicDowncast<Layer*>(layers.back())->GetOutputSlot().GetTensorInfo());
+
308 
+
309  return layers;
+
310 }
+
311 
+
312 //
+
313 // Substitute baseLayer with new subgraph
+
314 //
+
315 template<typename LayerType>
+
316 void ReplaceLayers(OptimizationViews& optimizationViews,
+
317  LayerType* baseLayer,
+
318  std::vector<IConnectableLayer*>& layers)
+
319 {
+
320  std::list<IConnectableLayer*> replacementLayers(layers.begin(), layers.end());
+
321 
+
322  SubgraphView substitutionSubgraph(baseLayer);
+
323  SubgraphView replacementSubgraph(std::move(replacementLayers),
+
324  CreateIInputsFrom({replacementLayers.front()}),
+
325  CreateIOutputsFrom({replacementLayers.back()}));
+
326 
+
327  optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
+
328 }
+
329 
+
330 } // namespace armnn
+
IConnectableLayer * AddConvolution2dLayer(const Convolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Adds a 2D convolution layer to the network.
Definition: Network.cpp:87
+ +
IConnectableLayer * AddDepthwiseConvolution2dLayer(const DepthwiseConvolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Adds a 2D depthwise convolution layer to the network.
Definition: Network.cpp:107
+
LayerType * FuseLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, LayerType *replacementLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc)
+
LayerType * FuseMultiplicationLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
+
IConnectableLayer * AddDivisionLayer(const char *name=nullptr)
Adds a division layer to the network.
Definition: Network.cpp:309
+
LayerType * FuseSubtractionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
+
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
+
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:68
+
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:87
+ +
constexpr bool IsQuantizedType()
Definition: TypesUtils.hpp:284
+
IConnectableLayer * AddSubtractionLayer(const char *name=nullptr)
Adds a subtraction layer to the network.
Definition: Network.cpp:314
+
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
+
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
+
A ReduceDescriptor for the REDUCE operators.
+ + +
LayerType * FuseDivisionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
+
void AddSubstitution(SubstitutionPair &&substitution)
+
const TensorInfo ComputeReductionTensorShape(const armnn::TensorInfo &input, const std::vector< uint32_t > &vAxis, const bool keepDims)
Function to compute the output tensor shape based on the axes and if keepDims is set.
+
This layer represents an activation operation with the specified activation function.
+
LayerType * FuseFullyConnectedLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
+
IConnectableLayer * AddReduceLayer(const ReduceDescriptor &reduceDescriptor, const char *name=nullptr)
Adds a reduce layer to the network.
Definition: Network.cpp:245
+
Copyright (c) 2021 ARM Limited and Contributors.
+ + +
IConnectableLayer * AddMultiplicationLayer(const char *name=nullptr)
Adds a multiplication layer to the network.
Definition: Network.cpp:219
+
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
Definition: Types.hpp:466
+
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:326
+
void ReplaceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, std::vector< IConnectableLayer * > &layers)
+
IConnectableLayer * AddAdditionLayer(const char *name=nullptr)
Adds an addition layer to the network.
Definition: Network.cpp:214
+
LayerType * FuseConvolution2dLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
+
LayerType * FuseBatchNormalizationLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
+
The SubgraphView class represents a subgraph of a Graph.
+ + +
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:324
+
LayerType * FuseDepthwiseConvolution2dLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
+
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
+
LayerType * FuseAdditionLayer(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
+
bool m_KeepDims
if true then output shape has no change.
+
IConnectableLayer * AddBatchNormalizationLayer(const BatchNormalizationDescriptor &desc, const ConstTensor &mean, const ConstTensor &variance, const ConstTensor &beta, const ConstTensor &gamma, const char *name=nullptr)
Adds a batch normalization layer to the network.
Definition: Network.cpp:224
+
std::vector< IConnectableLayer * > ChainReduceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, ReduceDescriptor &desc)
+
IConnectableLayer * AddFullyConnectedLayer(const FullyConnectedDescriptor &fullyConnectedDescriptor, const char *name=nullptr)
Adds a fully connected layer to the network.
Definition: Network.cpp:143
-- cgit v1.2.1