ArmNN
 22.02
OptimizeSubgraphViewTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <CommonTestUtils.hpp>
7 #include "MockBackend.hpp"
8 #include "MockBackendId.hpp"
9 
10 #include <Graph.hpp>
11 #include <Network.hpp>
12 
14 
15 #include <doctest/doctest.h>
16 #include <unordered_map>
17 
18 using namespace armnn;
19 
20 namespace
21 {
22 
23 // The expected number of layers, input and output slots in a subgraph after a test
24 struct ExpectedSubgraphSize
25 {
26  size_t m_NumInputSlots = 0;
27  size_t m_NumOutputSlots = 0;
28  size_t m_NumLayers = 0;
29 };
30 
31 // Keep the layers organized by layer name
32 using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
33 
34 // Used to convert input and output slots from reference type (as stored in graphs) to
35 // pointer type (as stored in subgraphs)
36 template <typename SlotType>
37 SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
38 {
39  return const_cast<SlotType*>(&input);
40 }
41 
42 // Used to convert input and output slots from reference type (as stored in graphs) to
43 // pointer type (as stored in subgraphs), array version
44 template <typename SlotType>
45 std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
46 {
47  std::vector<SlotType*> output;
48  std::transform(input.begin(),
49  input.end(),
50  std::back_inserter(output),
51  [](const SlotType& inputItem)
52  {
53  return ConvertReferenceTypeToPointerType(inputItem);
54  });
55 
56  return output;
57 }
58 
59 // Convert from vector of Slots* (Input/Output) to vector of ISlots* (IInput/IOutput)
60 template <typename SlotType, typename ResultSlotType>
61 std::vector<ResultSlotType*> ConvertSlotsToISlots(const std::vector<SlotType*> input)
62 {
63  std::vector<ResultSlotType*> output;
64  for (auto slot : input)
65  {
66  output.push_back(PolymorphicDowncast<ResultSlotType*>(slot));
67  }
68  return output;
69 }
70 
71 // Convenience function to add an input layer to a graph
72 Layer* AddInputLayer(Graph& graph,
73  const std::string& layerName,
74  const TensorInfo& inputInfo,
75  LayerBindingId inputId = 0)
76 {
77  Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
78  CHECK(inputLayer);
79  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
80  return inputLayer;
81 }
82 
83 // Convenience function to add an output layer to a graph
84 Layer* AddOutputLayer(Graph& graph,
85  const std::string& layerName)
86 {
87  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
88  CHECK(outputLayer);
89  return outputLayer;
90 }
91 
92 // Convenience function to add a convolution layer to a graph
93 Convolution2dLayer* AddConvolutionLayer(Graph& graph,
94  LayerNameToLayerMap& layersInGraph,
95  const Convolution2dDescriptor& convolutionDescriptor,
96  const std::string& layerName,
97  const TensorInfo& weightInfo,
98  const TensorInfo& biasInfo,
99  const TensorInfo& outputInfo)
100 {
101  Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
102  CHECK(convLayer);
103  SetWeightAndBias(convLayer, weightInfo, biasInfo);
104  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
105  layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
106  return convLayer;
107 }
108 
109 // Convenience function to add a pooling layer to a graph
110 Pooling2dLayer* AddPoolingLayer(Graph& graph,
111  LayerNameToLayerMap& layersInGraph,
112  const Pooling2dDescriptor& poolingDescriptor,
113  const std::string& layerName,
114  const TensorInfo& outputInfo)
115 {
116  Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
117  CHECK(poolingLayer);
118  poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
119  layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
120  return poolingLayer;
121 }
122 
123 // Convenience function to add an addition layer to a graph
124 AdditionLayer* AddAdditionaLayer(Graph& graph,
125  LayerNameToLayerMap& layersInGraph,
126  const std::string& layerName,
127  const TensorInfo& outputInfo)
128 {
129  AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
130  CHECK(additionLayer);
131  additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
132  layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
133  return additionLayer;
134 }
135 
136 // Convenience function to check that the given substitution matches the specified expected values
137 void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
138  const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
139  const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
140  const SubgraphView::IInputSlots& expectedSubstitutableInputSlots,
141  const SubgraphView::IOutputSlots& expectedSubstitutableOutputSlots,
142  const SubgraphView::IConnectableLayers& expectedSubstitutableLayers)
143 {
144  const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
145  const SubgraphView::IInputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetIInputSlots();
146  const SubgraphView::IOutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetIOutputSlots();
147  const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
148  substitutableSubgraph.GetIConnectableLayers();
149 
150  const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
151  const SubgraphView::IInputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetIInputSlots();
152  const SubgraphView::IOutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetIOutputSlots();
153  const SubgraphView::IConnectableLayers& replacementSubgraphLayers = replacementSubgraph.GetIConnectableLayers();
154 
155  CHECK(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
156  CHECK(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
157  CHECK(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
158 
159  CHECK(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
160  CHECK(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
161  CHECK(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
162 
163  CHECK(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
164  CHECK(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
165  CHECK(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
166 
167  CHECK(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
168  CHECK(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
169  CHECK(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
170 
171  CHECK(std::all_of(replacementSubgraphLayers.begin(),
172  replacementSubgraphLayers.end(),
173  [](const IConnectableLayer* layer)
174  {
175  return layer->GetType() == LayerType::PreCompiled;
176  }));
177 }
178 
179 // Convenience function to check that the given failed subgraph matches the specified expected values
180 void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
181  const ExpectedSubgraphSize& expectedFailedSubgraphSize,
182  const SubgraphView::IInputSlots& expectedFailedInputSlots,
183  const SubgraphView::IOutputSlots& expectedFailedOutputSlots,
184  const SubgraphView::IConnectableLayers& expectedFailedLayers)
185 {
186  const SubgraphView::IInputSlots& failedSubgraphInputSlots = failedSubgraph.GetIInputSlots();
187  const SubgraphView::IOutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetIOutputSlots();
188  const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
189 
190  CHECK(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
191  CHECK(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
192  CHECK(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
193 
194  CHECK(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
195  CHECK(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
196  CHECK(AreEqual(failedSubgraphLayers, expectedFailedLayers));
197 }
198 
199 // Convenience function to check that the given untouched subgraph matches the specified expected values
200 void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
201  const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
202  const SubgraphView::IInputSlots& expectedUntouchedInputSlots,
203  const SubgraphView::IOutputSlots& expectedUntouchedOutputSlots,
204  const SubgraphView::IConnectableLayers& expectedUntouchedLayers)
205 {
206  const SubgraphView::IInputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetIInputSlots();
207  const SubgraphView::IOutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetIOutputSlots();
208  const SubgraphView::IConnectableLayers& untouchedSubgraphLayers = untouchedSubgraph.GetIConnectableLayers();
209 
210  CHECK(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
211  CHECK(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
212  CHECK(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
213 
214  CHECK(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
215  CHECK(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
216  CHECK(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
217 }
218 
219 // Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
220 SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
221 {
222  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
223  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
224 
225  Pooling2dDescriptor poolingDescriptor;
226  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
227  poolingDescriptor.m_PoolWidth = 2;
228  poolingDescriptor.m_PoolHeight = 2;
229  poolingDescriptor.m_StrideX = 2;
230  poolingDescriptor.m_StrideY = 2;
231  poolingDescriptor.m_PadLeft = 1;
232  poolingDescriptor.m_PadRight = 1;
233  poolingDescriptor.m_PadTop = 1;
234  poolingDescriptor.m_PadBottom = 1;
235  poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
236  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
237 
238  // Construct the graph
239  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
240  Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
241  "pooling layer", outputInfo);
242  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
243 
244  // Connect the network
245  inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
246  poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
247 
248  // Create the subgraph view for the whole network
249  return CreateSubgraphViewFrom(CreateInputsFrom({poolingLayer}),
250  CreateOutputsFrom({poolingLayer}),
251  {poolingLayer});
252 }
253 
254 // Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
255 SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
256 {
257  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
258  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
259 
260  Pooling2dDescriptor poolingDescriptor;
261  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
262  poolingDescriptor.m_PoolWidth = 2;
263  poolingDescriptor.m_PoolHeight = 2;
264  poolingDescriptor.m_StrideX = 2;
265  poolingDescriptor.m_StrideY = 2;
266  poolingDescriptor.m_PadLeft = 1;
267  poolingDescriptor.m_PadRight = 1;
268  poolingDescriptor.m_PadTop = 1;
269  poolingDescriptor.m_PadBottom = 1;
270  poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
271  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
272 
273  // Construct the graph
274  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
275  Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
276  "pooling1 layer", outputInfo);
277  Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
278  "pooling2 layer", outputInfo);
279  Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
280  "pooling3 layer", outputInfo);
281  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
282 
283  // Connect the network
284  inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
285  pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
286  pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
287  pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
288 
289  // Create the subgraph view for the whole network
290  return CreateSubgraphViewFrom(CreateInputsFrom({pooling1Layer}),
291  CreateOutputsFrom({pooling3Layer}),
292  {pooling1Layer,
293  pooling2Layer,
294  pooling3Layer});
295 }
296 
297 // Creates a simple subgraph with only one convolution layer, supported by the mock backend
298 SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
299 {
300  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
301  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
302  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
303  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
304 
305  Convolution2dDescriptor convolutionDescriptor;
306  convolutionDescriptor.m_StrideX = 1;
307  convolutionDescriptor.m_StrideY = 1;
308  convolutionDescriptor.m_BiasEnabled = true;
309  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
310 
311  // Construct the graph
312  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
313  Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
314  "conv layer", weightInfo, biasInfo, outputInfo);
315  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
316 
317  // Connect the network
318  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
319  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
320 
321  // Create the subgraph view for the whole network
322  return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
323  CreateOutputsFrom({convLayer}),
324  {convLayer});
325 }
326 
327 // Creates a subgraph with five convolutions layers, all supported by the mock backend
328 SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
329 {
330  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
331  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
332  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
333  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
334 
335  Convolution2dDescriptor convolutionDescriptor;
336  convolutionDescriptor.m_StrideX = 1;
337  convolutionDescriptor.m_StrideY = 1;
338  convolutionDescriptor.m_BiasEnabled = true;
339  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
340 
341  // Construct the graph
342  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
343  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
344  "conv1 layer", weightInfo, biasInfo, outputInfo);
345  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
346  "conv2 layer", weightInfo, biasInfo, outputInfo);
347  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
348  "conv3 layer", weightInfo, biasInfo, outputInfo);
349  Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
350  "conv4 layer", weightInfo, biasInfo, outputInfo);
351  Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
352  "conv5 layer", weightInfo, biasInfo, outputInfo);
353  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
354 
355  // Connect the network
356  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
357  conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
358  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
359  conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
360  conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
361  conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
362 
363  // Create the subgraph view for the whole network
364  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
365  CreateOutputsFrom({conv5Layer}),
366  {conv1Layer,
367  conv2Layer,
368  conv3Layer,
369  conv4Layer,
370  conv5Layer});
371 }
372 
373 // Creates a subgraph with both supported and unsupported layers
374 // (only convolutions are unsupported by the mock backend)
375 SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
376 {
377  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
378  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
379  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
380  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
381 
382  Convolution2dDescriptor convolutionDescriptor;
383  convolutionDescriptor.m_StrideX = 1;
384  convolutionDescriptor.m_StrideY = 1;
385  convolutionDescriptor.m_BiasEnabled = true;
386  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
387 
388  Pooling2dDescriptor poolingDescriptor;
389  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
390  poolingDescriptor.m_PoolWidth = 2;
391  poolingDescriptor.m_PoolHeight = 2;
392  poolingDescriptor.m_StrideX = 2;
393  poolingDescriptor.m_StrideY = 2;
394  poolingDescriptor.m_PadLeft = 1;
395  poolingDescriptor.m_PadRight = 1;
396  poolingDescriptor.m_PadTop = 1;
397  poolingDescriptor.m_PadBottom = 1;
399  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
400 
401  // Construct the graph
402  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
403  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
404  "conv1 layer", weightInfo, biasInfo, outputInfo);
405  Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
406  "pooling1 layer", outputInfo);
407  Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
408  "pooling2 layer", outputInfo);
409  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
410  "conv2 layer", weightInfo, biasInfo, outputInfo);
411  Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
412  "pooling3 layer", outputInfo);
413  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
414 
415  // Connect the network
416  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
417  conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
418  pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
419  pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
420  conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
421  pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
422 
423  // Create the subgraph view for the whole network
424  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
425  CreateOutputsFrom({pooling3Layer}),
426  {conv1Layer,
427  pooling1Layer,
428  pooling2Layer,
429  conv2Layer,
430  pooling3Layer});
431 }
432 
433 // Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
434 SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
435 {
436  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
437  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
438  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
439  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
440 
441  Convolution2dDescriptor convolutionDescriptor;
442  convolutionDescriptor.m_StrideX = 1;
443  convolutionDescriptor.m_StrideY = 1;
444  convolutionDescriptor.m_BiasEnabled = true;
445  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
446 
447  // Construct the graph
448  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
449  Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
450  "conv layer unoptimizable", weightInfo, biasInfo,
451  outputInfo);
452  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
453 
454  // Connect the network
455  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
456  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
457 
458  // Create the subgraph view for the whole network
459  return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
460  CreateOutputsFrom({convLayer}),
461  {convLayer});
462 }
463 
464 // Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
465 SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
466 {
467  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
468  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
469  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
470  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
471 
472  Convolution2dDescriptor convolutionDescriptor;
473  convolutionDescriptor.m_StrideX = 1;
474  convolutionDescriptor.m_StrideY = 1;
475  convolutionDescriptor.m_BiasEnabled = true;
476  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
477 
478  // Construct the graph
479  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
480  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
481  "conv1 layer", weightInfo, biasInfo, outputInfo);
482  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
483  "conv2 layer unoptimizable", weightInfo, biasInfo,
484  outputInfo);
485  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
486  "conv3 layer", weightInfo, biasInfo, outputInfo);
487  Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
488  "conv4 layer unoptimizable", weightInfo, biasInfo,
489  outputInfo);
490  Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
491  "conv5 layer", weightInfo, biasInfo, outputInfo);
492  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
493 
494  // Connect the network
495  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
496  conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
497  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
498  conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
499  conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
500  conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
501 
502  // Create the subgraph view for the whole network
503  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
504  CreateOutputsFrom({conv5Layer}),
505  {conv1Layer,
506  conv2Layer,
507  conv3Layer,
508  conv4Layer,
509  conv5Layer});
510 }
511 
512 // Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
513 // this is meant to test input slots coming from different layers
514 SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
515 {
516  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
517  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
518  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
519  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
520 
521  Convolution2dDescriptor convolutionDescriptor;
522  convolutionDescriptor.m_StrideX = 1;
523  convolutionDescriptor.m_StrideY = 1;
524  convolutionDescriptor.m_BiasEnabled = true;
525  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
526 
527  // Construct the graph
528  Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
529  Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
530  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
531  "conv1 layer", weightInfo, biasInfo, outputInfo);
532  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
533  "conv2 layer unoptimizable", weightInfo, biasInfo,
534  outputInfo);
535  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
536  "conv3 layer", weightInfo, biasInfo, outputInfo);
537  AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
538  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
539 
540  // Connect the network
541  input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
542  input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
543  conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
544  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
545  conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
546  addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
547 
548  // Create the subgraph view for the whole network
549  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
550  conv2Layer}),
551  CreateOutputsFrom({addLayer}),
552  {conv1Layer,
553  conv2Layer,
554  conv3Layer,
555  addLayer});
556 }
557 
558 // The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
559 void FullyUnsupporteSubgraphTestImpl1()
560 {
561  Graph graph;
562  LayerNameToLayerMap layersInGraph;
563 
564  // Create an unsupported subgraph
565  SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
566  CHECK((subgraphPtr != nullptr));
567 
568  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
569  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
570  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
571 
572  CHECK(subgraphInputSlots.size() == 1);
573  CHECK(subgraphOutputSlots.size() == 1);
574  CHECK(subgraphLayers.size() == 1);
575 
576  CHECK(Contains(layersInGraph, "pooling layer"));
577 
578  // Create a mock backend object
579  MockBackendInitialiser initialiser; // Register the Mock Backend
580  auto backendObjPtr = CreateBackendObject(MockBackendId());
581  CHECK((backendObjPtr != nullptr));
582 
583  // Optimize the subgraph
584  OptimizationViews optimizationViews;
585 
586  // Check that the optimization is carried out correctly, but no optimization is performed
587  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
588 
589  // =======================================================================
590  // The expected results are:
591  // - No substitutions
592  // - Exactly one failed subgraph, corresponding to the whole original one
593  // - No untouched subgraphs
594  // =======================================================================
595 
596  // -----------------------
597  // Check the substitutions
598  // -----------------------
599 
600  CHECK(optimizationViews.GetSubstitutions().empty());
601 
602  // --------------------------
603  // Check the failed subgraphs
604  // --------------------------
605 
606  const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
607  CHECK(failedSubgraphs.size() == 1);
608 
609  CheckFailedSubgraph(failedSubgraphs.at(0),
610  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
611  subgraphInputSlots,
612  subgraphOutputSlots,
613  subgraphLayers);
614 
615  // -----------------------------
616  // Check the untouched subgraphs
617  // -----------------------------
618 
619  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
620 }
621 
622 // The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
623 void FullyUnsupporteSubgraphTestImpl2()
624 {
625  Graph graph;
626  LayerNameToLayerMap layersInGraph;
627 
628  // Create an unsupported subgraph
629  SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
630  CHECK((subgraphPtr != nullptr));
631 
632  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
633  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
634  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
635 
636  CHECK(subgraphInputSlots.size() == 1);
637  CHECK(subgraphOutputSlots.size() == 1);
638  CHECK(subgraphLayers.size() == 3);
639 
640  CHECK(Contains(layersInGraph, "pooling1 layer"));
641  CHECK(Contains(layersInGraph, "pooling2 layer"));
642  CHECK(Contains(layersInGraph, "pooling3 layer"));
643 
644  // Create a mock backend object
645  MockBackendInitialiser initialiser; // Register the Mock Backend
646  auto backendObjPtr = CreateBackendObject(MockBackendId());
647  CHECK((backendObjPtr != nullptr));
648 
649  // Optimize the subgraph
650  OptimizationViews optimizationViews;
651 
652  // Check that the optimization is carried out correctly, but no optimization is performed
653  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
654 
655  // =======================================================================
656  // The expected results are:
657  // - No substitutions
658  // - Exactly one failed subgraph, corresponding to the whole original one
659  // - No untouched subgraphs
660  // =======================================================================
661 
662  // -----------------------
663  // Check the substitutions
664  // -----------------------
665 
666  CHECK(optimizationViews.GetSubstitutions().empty());
667 
668  // --------------------------
669  // Check the failed subgraphs
670  // --------------------------
671 
672  const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
673  CHECK(failedSubgraphs.size() == 1);
674 
675  std::list<IConnectableLayer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
676  layersInGraph.at("pooling2 layer"),
677  layersInGraph.at("pooling3 layer") };
678 
679  const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
680 
681  CheckFailedSubgraph(failedSubgraph,
682  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
683  subgraphInputSlots,
684  subgraphOutputSlots,
685  subgraphLayers);
686 
687  const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
688 
689  CHECK_EQ(failedSubgraphLayers.front() + 0, expectedFailedLayers.front() + 0);
690  CHECK_EQ(failedSubgraphLayers.front() + 1, expectedFailedLayers.front() + 1);
691  CHECK_EQ(failedSubgraphLayers.front() + 2, expectedFailedLayers.front() + 2);
692 
693  // -----------------------------
694  // Check the untouched subgraphs
695  // -----------------------------
696 
697  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
698 }
699 
700 // A simple case with only one layer (convolution) to optimize, supported by the mock backend
701 void FullyOptimizableSubgraphTestImpl1()
702 {
703  Graph graph;
704  LayerNameToLayerMap layersInGraph;
705 
706  // Create a fully optimizable subgraph
707  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
708  CHECK((subgraphPtr != nullptr));
709 
710  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
711  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
712  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
713 
714  CHECK(subgraphInputSlots.size() == 1);
715  CHECK(subgraphOutputSlots.size() == 1);
716  CHECK(subgraphLayers.size() == 1);
717 
718  CHECK(Contains(layersInGraph, "conv layer"));
719 
720  // Create a mock backend object
721  MockBackendInitialiser initialiser; // Register the Mock Backend
722  auto backendObjPtr = CreateBackendObject(MockBackendId());
723  CHECK((backendObjPtr != nullptr));
724 
725  // Optimize the subgraph
726  OptimizationViews optimizationViews;
727 
728  // Check that the optimization is carried out correctly
729  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
730 
731  // ===========================================================================================
732  // The expected results are:
733  // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
734  // - No failed subgraphs
735  // - No untouched subgraphs
736  // ===========================================================================================
737 
738  // -----------------------
739  // Check the substitutions
740  // -----------------------
741 
742  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
743  CHECK(substitutions.size() == 1);
744 
745  CheckSubstitution(substitutions.at(0),
746  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
747  { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
748  subgraphInputSlots,
749  subgraphOutputSlots,
750  subgraphLayers);
751 
752  // --------------------------
753  // Check the failed subgraphs
754  // --------------------------
755 
756  CHECK(optimizationViews.GetFailedSubgraphs().empty());
757 
758  // -----------------------------
759  // Check the untouched subgraphs
760  // -----------------------------
761 
762  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
763 }
764 
765 // A case with five layers (all convolutions) to optimize, all supported by the mock backend
766 void FullyOptimizableSubgraphTestImpl2()
767 {
768  Graph graph;
769  LayerNameToLayerMap layersInGraph;
770 
771  // Create a fully optimizable subgraph
772  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
773  CHECK((subgraphPtr != nullptr));
774 
775  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
776  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
777  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
778 
779  CHECK(subgraphPtr->GetIInputSlots().size() == 1);
780  CHECK(subgraphPtr->GetIOutputSlots().size() == 1);
781  CHECK(subgraphPtr->GetIConnectableLayers().size() == 5);
782 
783  CHECK(Contains(layersInGraph, "conv1 layer"));
784  CHECK(Contains(layersInGraph, "conv2 layer"));
785  CHECK(Contains(layersInGraph, "conv3 layer"));
786  CHECK(Contains(layersInGraph, "conv4 layer"));
787  CHECK(Contains(layersInGraph, "conv5 layer"));
788 
789  // Create a mock backend object
790  MockBackendInitialiser initialiser; // Register the Mock Backend
791  auto backendObjPtr = CreateBackendObject(MockBackendId());
792  CHECK((backendObjPtr != nullptr));
793 
794  // Optimize the subgraph
795  OptimizationViews optimizationViews;
796 
797  // Check that the optimization is carried out correctly
798  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
799 
800  // ===========================================================================================
801  // The expected results are:
802  // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
803  // - No failed subgraphs
804  // - No untouched subgraphs
805  // ===========================================================================================
806 
807  // -----------------------
808  // Check the substitutions
809  // -----------------------
810 
811  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
812  CHECK(substitutions.size() == 1);
813 
814  std::list<IConnectableLayer*> expectedSubstitutableLayers{ layersInGraph.at("conv1 layer"),
815  layersInGraph.at("conv2 layer"),
816  layersInGraph.at("conv3 layer"),
817  layersInGraph.at("conv4 layer"),
818  layersInGraph.at("conv5 layer") };
819 
820  const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
821 
822  CheckSubstitution(substitution,
823  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
824  { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
825  subgraphInputSlots,
826  subgraphOutputSlots,
827  expectedSubstitutableLayers);
828 
829  const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
831 
832  CHECK_EQ(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
833  CHECK_EQ(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
834  CHECK_EQ(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
835  CHECK_EQ(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
836  CHECK_EQ(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
837 
838  // --------------------------
839  // Check the failed subgraphs
840  // --------------------------
841 
842  CHECK(optimizationViews.GetFailedSubgraphs().empty());
843 
844  // -----------------------------
845  // Check the untouched subgraphs
846  // -----------------------------
847 
848  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
849 }
850 
851 // The input subgraph contaions both supported and unsupported layers
852 // (but only convolutions are unsupported by the mock backend)
853 void PartiallySupportedSubgraphTestImpl()
854 {
855  Graph graph;
856  LayerNameToLayerMap layersInGraph;
857 
858  // Create a fully optimizable subgraph
859  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
860  CHECK((subgraphPtr != nullptr));
861 
862  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
863  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
864  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
865 
866  CHECK(subgraphInputSlots.size() == 1);
867  CHECK(subgraphOutputSlots.size() == 1);
868  CHECK(subgraphLayers.size() == 5);
869 
870  CHECK(Contains(layersInGraph, "conv1 layer"));
871  CHECK(Contains(layersInGraph, "pooling1 layer"));
872  CHECK(Contains(layersInGraph, "pooling2 layer"));
873  CHECK(Contains(layersInGraph, "conv2 layer"));
874  CHECK(Contains(layersInGraph, "pooling3 layer"));
875 
876  // Create a mock backend object
877  MockBackendInitialiser initialiser; // Register the Mock Backend
878  auto backendObjPtr = CreateBackendObject(MockBackendId());
879  CHECK((backendObjPtr != nullptr));
880 
881  // Optimize the subgraph
882  OptimizationViews optimizationViews;
883 
884  // Check that the optimization is carried out correctly
885  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
886 
887  // ========================================================================
888  // The expected results are:
889  // - Exactly two substitution, corresponding to the supported layers
890  // - Exactly two failed subgraphs, corresponding to the unsupported layers
891  // - No untouched subgraphs
892  // ========================================================================
893 
894  // -----------------------
895  // Check the substitutions
896  // -----------------------
897 
898  OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
899  CHECK(substitutions.size() == 2);
900  // Sort into a consistent order
901  std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
902  return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
903  s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0;
904  });
905 
906  std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
907  { 1, 1, 1 } };
908  std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
909  { 1, 1, 1 } };
910  std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
911  {
912  ConvertSlotsToISlots<InputSlot, IInputSlot>(
913  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots())),
914  ConvertSlotsToISlots<InputSlot, IInputSlot>(
915  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlots()))
916  };
917 
918  std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
919  {
920  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
921  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
922  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
923  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots()))
924  };
925  std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
926  {
927  { layersInGraph.at("conv1 layer") },
928  { layersInGraph.at("conv2 layer") }
929  };
930 
931  for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
932  {
933  CheckSubstitution(substitutions.at(substitutionIndex),
934  expectedSubstitutableSubgraphSizes.at(substitutionIndex),
935  expectedReplacementSubgraphSizes.at(substitutionIndex),
936  expectedSubstitutableInputSlots.at(substitutionIndex),
937  expectedSubstitutableOutputSlots.at(substitutionIndex),
938  expectedSubstitutableLayers.at(substitutionIndex));
939  }
940 
941  // --------------------------
942  // Check the failed subgraphs
943  // --------------------------
944 
945  OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
946  CHECK(failedSubgraphs.size() == 2);
947  // Sort into a consistent order
948  std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
949  return strcmp(s1.GetIConnectableLayers().front()->GetName(),
950  s2.GetIConnectableLayers().front()->GetName()) < 0;
951  });
952 
953  std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
954  { 1, 1, 1 } };
955  std::vector<SubgraphView::IInputSlots> expectedFailedInputSlots
956  {
957  ConvertSlotsToISlots<InputSlot, IInputSlot>(
958  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots())),
959  ConvertSlotsToISlots<InputSlot, IInputSlot>(
960  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots()))
961  };
962  std::vector<SubgraphView::IOutputSlots> expectedFailedOutputSlots
963  {
964  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
965  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots())),
966  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
967  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots()))
968  };
969  std::vector<SubgraphView::IConnectableLayers> expectedFailedLayers
970  {
971  { layersInGraph.at("pooling1 layer"),
972  layersInGraph.at("pooling2 layer") },
973  { layersInGraph.at("pooling3 layer") }
974  };
975 
976  for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
977  {
978  CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
979  expectedFailedSubgraphSizes.at(failedIndex),
980  expectedFailedInputSlots.at(failedIndex),
981  expectedFailedOutputSlots.at(failedIndex),
982  expectedFailedLayers.at(failedIndex));
983  }
984 
985  // -----------------------------
986  // Check the untouched subgraphs
987  // -----------------------------
988 
989  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
990 }
991 
992 // The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
993 void FullyUnoptimizableSubgraphTestImpl1()
994 {
995  Graph graph;
996  LayerNameToLayerMap layersInGraph;
997 
998  // Create a fully optimizable subgraph
999  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
1000  CHECK((subgraphPtr != nullptr));
1001 
1002  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1003  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1004  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1005 
1006  CHECK(subgraphInputSlots.size() == 1);
1007  CHECK(subgraphOutputSlots.size() == 1);
1008  CHECK(subgraphLayers.size() == 1);
1009 
1010  CHECK(Contains(layersInGraph, "conv layer unoptimizable"));
1011 
1012  // Create a mock backend object
1013  MockBackendInitialiser initialiser; // Register the Mock Backend
1014  auto backendObjPtr = CreateBackendObject(MockBackendId());
1015  CHECK((backendObjPtr != nullptr));
1016 
1017  // Optimize the subgraph
1018  OptimizationViews optimizationViews;
1019 
1020  // Check that the optimization is carried out correctly
1021  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1022 
1023  // ============================================================================
1024  // The expected results are:
1025  // - No substitutions
1026  // - No failed subgraphs
1027  // - Exactly one untouched subgraph, corresponding to the whole input subgraph
1028  // ============================================================================
1029 
1030  // -----------------------
1031  // Check the substitutions
1032  // -----------------------
1033 
1034  CHECK(optimizationViews.GetSubstitutions().empty());
1035 
1036  // --------------------------
1037  // Check the failed subgraphs
1038  // --------------------------
1039 
1040  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1041 
1042  // -----------------------------
1043  // Check the untouched subgraphs
1044  // -----------------------------
1045 
1046  const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1047  CHECK(untouchedSubgraphs.size() == 1);
1048 
1049  CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
1050  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
1051  subgraphInputSlots,
1052  subgraphOutputSlots,
1053  subgraphLayers);
1054 }
1055 
1056 // The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1057 void PartiallyOptimizableSubgraphTestImpl1()
1058 {
1059  Graph graph;
1060  LayerNameToLayerMap layersInGraph;
1061 
1062  // Create a fully optimizable subgraph
1063  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
1064  CHECK((subgraphPtr != nullptr));
1065 
1066  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1067  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1068  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1069 
1070  CHECK(subgraphInputSlots.size() == 1);
1071  CHECK(subgraphOutputSlots.size() == 1);
1072  CHECK(subgraphLayers.size() == 5);
1073 
1074  CHECK(Contains(layersInGraph, "conv1 layer"));
1075  CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1076  CHECK(Contains(layersInGraph, "conv3 layer"));
1077  CHECK(Contains(layersInGraph, "conv4 layer unoptimizable"));
1078  CHECK(Contains(layersInGraph, "conv5 layer"));
1079 
1080  // Create a mock backend object
1081  MockBackendInitialiser initialiser; // Register the Mock Backend
1082  auto backendObjPtr = CreateBackendObject(MockBackendId());
1083  CHECK((backendObjPtr != nullptr));
1084 
1085  // Optimize the subgraph
1086  OptimizationViews optimizationViews;
1087 
1088  // Check that the optimization is carried out correctly
1089  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1090 
1091  // ===============================================================================
1092  // The expected results are:
1093  // - Exactly three substitutions, corresponding to the optimizable layers
1094  // - No failed subgraphs
1095  // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1096  // ===============================================================================
1097 
1098  // -----------------------
1099  // Check the substitutions
1100  // -----------------------
1101 
1102  OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
1103  CHECK(substitutions.size() == 3);
1104  // Sort into a consistent order
1105  std::sort(substitutions.begin(), substitutions.end(),
1106  [](auto s1, auto s2)
1107  { return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1108  s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0; });
1109 
1110  std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
1111  { 1, 1, 1 },
1112  { 1, 1, 1 } };
1113  std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1114  { 1, 1, 1 },
1115  { 1, 1, 1 } };
1116  std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
1117  {
1118  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1119  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots())),
1120  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1121  ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots())),
1122  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1123  ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlots()))
1124  };
1125  std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
1126  {
1127  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1128  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1129  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1130  ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots())),
1131  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1132  ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots()))
1133  };
1134  std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
1135  {
1136  { layersInGraph.at("conv1 layer") },
1137  { layersInGraph.at("conv3 layer") },
1138  { layersInGraph.at("conv5 layer") }
1139  };
1140 
1141  for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1142  {
1143  CheckSubstitution(substitutions.at(substitutionIndex),
1144  expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1145  expectedReplacementSubgraphSizes.at(substitutionIndex),
1146  expectedSubstitutableInputSlots.at(substitutionIndex),
1147  expectedSubstitutableOutputSlots.at(substitutionIndex),
1148  expectedSubstitutableLayers.at(substitutionIndex));
1149  }
1150 
1151  // --------------------------
1152  // Check the failed subgraphs
1153  // --------------------------
1154 
1155  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1156 
1157  // -----------------------------
1158  // Check the untouched subgraphs
1159  // -----------------------------
1160 
1161  OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1162  CHECK(untouchedSubgraphs.size() == 2);
1163  // Sort into a consistent order
1164  std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
1165  return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1166  s2.GetIConnectableLayers().front()->GetName()) < 0;
1167  });
1168 
1169  std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 },
1170  { 1, 1, 1 } };
1171  std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots
1172  {
1173  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1174  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots())),
1175  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1176  ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetInputSlots()))
1177  };
1178  std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
1179  {
1180  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1181  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())),
1182  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1183  ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots()))
1184  };
1185  std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
1186  {
1187  { layersInGraph.at("conv2 layer unoptimizable") },
1188  { layersInGraph.at("conv4 layer unoptimizable") }
1189  };
1190 
1191  for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1192  {
1193  CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1194  expectedUntouchedSubgraphSizes.at(untouchedIndex),
1195  expectedUntouchedInputSlots.at(untouchedIndex),
1196  expectedUntouchedOutputSlots.at(untouchedIndex),
1197  expectedUntouchedLayers.at(untouchedIndex));
1198  }
1199 }
1200 
1201 // The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1202 // this is meant to test input slots coming from different layers
1203 void PartiallyOptimizableSubgraphTestImpl2()
1204 {
1205  Graph graph;
1206  LayerNameToLayerMap layersInGraph;
1207 
1208  // Create a partially optimizable subgraph
1209  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
1210  CHECK((subgraphPtr != nullptr));
1211 
1212  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1213  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1214  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1215 
1216  CHECK(subgraphInputSlots.size() == 2);
1217  CHECK(subgraphOutputSlots.size() == 1);
1218  CHECK(subgraphLayers.size() == 4);
1219 
1220  CHECK(Contains(layersInGraph, "conv1 layer"));
1221  CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1222  CHECK(Contains(layersInGraph, "conv3 layer"));
1223  CHECK(Contains(layersInGraph, "add layer"));
1224 
1225  // Create a mock backend object
1226  MockBackendInitialiser initialiser; // Register the Mock Backend
1227  auto backendObjPtr = CreateBackendObject(MockBackendId());
1228  CHECK((backendObjPtr != nullptr));
1229 
1230  // Optimize the subgraph
1231  OptimizationViews optimizationViews;
1232 
1233  // Check that the optimization is carried out correctly
1234  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1235 
1236  // ==============================================================================
1237  // The expected results are:
1238  // - Exactly one substitution, corresponding to the optimizable layers
1239  // - No failed subgraphs
1240  // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1241  // ==============================================================================
1242 
1243  // -----------------------
1244  // Check the substitutions
1245  // -----------------------
1246 
1247  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
1248  CHECK(substitutions.size() == 1);
1249 
1250  ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 3 };
1251  ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
1252 
1253  SubgraphView::IInputSlots expectedSubstitutableInputSlots
1254  {
1255  ConvertSlotsToISlots<InputSlot, IInputSlot>({
1256  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0])})[0],
1257  ConvertSlotsToISlots<InputSlot, IInputSlot>({
1258  ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])})[0]
1259  };
1260 
1261  SubgraphView::IOutputSlots expectedSubstitutableOutputSlots
1262  {
1263  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1264  ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()))
1265  };
1266 
1267  SubgraphView::IConnectableLayers expectedSubstitutableLayers
1268  {
1269  layersInGraph.at("conv1 layer"),
1270  layersInGraph.at("conv3 layer"),
1271  layersInGraph.at("add layer")
1272  };
1273 
1274  CheckSubstitution(substitutions[0],
1275  expectedSubstitutableSubgraphSizes,
1276  expectedReplacementSubgraphSizes,
1277  expectedSubstitutableInputSlots,
1278  expectedSubstitutableOutputSlots,
1279  expectedSubstitutableLayers);
1280 
1281  // --------------------------
1282  // Check the failed subgraphs
1283  // --------------------------
1284 
1285  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1286 
1287  // -----------------------------
1288  // Check the untouched subgraphs
1289  // -----------------------------
1290 
1291  const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1292  CHECK(untouchedSubgraphs.size() == 1);
1293 
1294  std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 } };
1295  std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots
1296  {
1297  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1298  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots()))
1299  };
1300  std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
1301  {
1302  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1303  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()))
1304  };
1305  std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
1306  {
1307  { layersInGraph.at("conv2 layer unoptimizable") }
1308  };
1309 
1310  for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1311  {
1312  CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1313  expectedUntouchedSubgraphSizes.at(untouchedIndex),
1314  expectedUntouchedInputSlots.at(untouchedIndex),
1315  expectedUntouchedOutputSlots.at(untouchedIndex),
1316  expectedUntouchedLayers.at(untouchedIndex));
1317  }
1318 }
1319 
1320 } // Anonymous namespace
1321 
1322 TEST_SUITE("OptimizeSubGraph")
1323 {
1324 TEST_CASE("FullyUnsupportedSubgraph1") { FullyUnsupporteSubgraphTestImpl1(); }
1325 TEST_CASE("FullyUnsupportedSubgraph2") { FullyUnsupporteSubgraphTestImpl2(); }
1326 TEST_CASE("FullyOptimizableSubgraph1") { FullyOptimizableSubgraphTestImpl1(); }
1327 TEST_CASE("FullyOptimizableSubgraph2") { FullyOptimizableSubgraphTestImpl2(); }
1328 TEST_CASE("PartiallySupportedSubgraph") { PartiallySupportedSubgraphTestImpl(); }
1329 TEST_CASE("FullyUnoptimizableSubgraph") { FullyUnoptimizableSubgraphTestImpl1(); }
1330 TEST_CASE("PartiallyOptimizableSubgraph1") { PartiallyOptimizableSubgraphTestImpl1(); }
1331 TEST_CASE("PartiallyOptimizableSubgraph2") { PartiallyOptimizableSubgraphTestImpl2(); }
1332 
1333 }
TEST_SUITE("TestConstTensorLayerVisitor")
bool Contains(const CollectionType &collection, const typename CollectionType::value_type &item)
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
uint32_t m_PadBottom
Padding bottom value in the height dimension.
const IOutputSlots & GetIOutputSlots() const
constexpr const char * MockBackendId()
Definition: MockBackend.cpp:13
uint32_t m_PadLeft
Padding left value in the width dimension.
const IConnectableLayers & GetIConnectableLayers() const
const IInputSlots & GetIInputSlots() const
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:420
uint32_t m_PoolWidth
Pooling width value.
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:86
The padding fields don&#39;t count and are ignored.
PaddingMethod m_PaddingMethod
The padding method to be used. (Exclude, IgnoreValue).
uint32_t m_PadTop
Padding top value in the height dimension.
std::vector< SubgraphView > Subgraphs
std::vector< SubstitutionPair > Substitutions
Copyright (c) 2021 ARM Limited and Contributors.
SubgraphView m_SubstitutableSubgraph
Subgraph of Layers from the original graph which should be replaced.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:277
The SubgraphView class represents a subgraph of a Graph.
uint32_t m_PoolHeight
Pooling height value.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:321
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
std::unique_ptr< SubgraphView > SubgraphViewPtr
std::list< IConnectableLayer * > IConnectableLayers
uint32_t m_PadRight
Padding right value in the width dimension.
SubgraphView::InputSlots CreateInputsFrom(const std::vector< Layer *> &layers)
std::vector< IOutputSlot * > IOutputSlots
const Subgraphs & GetUntouchedSubgraphs() const
const Subgraphs & GetFailedSubgraphs() const
SubgraphView m_ReplacementSubgraph
A subgraph of new layers which will replace layers in m_SubstitutableSubgraph.
std::vector< IInputSlot * > IInputSlots
void SetWeightAndBias(ConvolutionLayer *layer, const armnn::TensorInfo &weightInfo, const armnn::TensorInfo &biasInfo)
This layer represents a pooling 2d operation.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
This layer represents an addition operation.
const Substitutions & GetSubstitutions() const
std::unique_ptr< SubgraphView > SubgraphViewPtr
bool AreEqual(const CollectionType &lhs, const CollectionType &rhs)
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
PoolingAlgorithm m_PoolType
The pooling algorithm to use (Max. Average, L2).
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:61
armnn::IBackendInternalUniquePtr CreateBackendObject(const armnn::BackendId &backendId)
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:323
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:316
This layer represents a convolution 2d operation.
A Pooling2dDescriptor for the Pooling2dLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.