ArmNN
 22.05
OptimizeSubgraphViewTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <CommonTestUtils.hpp>
7 #include "MockBackendId.hpp"
8 
9 #include <Graph.hpp>
10 #include <Network.hpp>
11 
14 
15 #include <doctest/doctest.h>
16 #include <unordered_map>
17 
18 using namespace armnn;
19 
20 namespace
21 {
22 
23 // The expected number of layers, input and output slots in a subgraph after a test
24 struct ExpectedSubgraphSize
25 {
26  size_t m_NumInputSlots = 0;
27  size_t m_NumOutputSlots = 0;
28  size_t m_NumLayers = 0;
29 };
30 
31 // Keep the layers organized by layer name
32 using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
33 
34 // Used to convert input and output slots from reference type (as stored in graphs) to
35 // pointer type (as stored in subgraphs)
36 template <typename SlotType>
37 SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
38 {
39  return const_cast<SlotType*>(&input);
40 }
41 
42 // Used to convert input and output slots from reference type (as stored in graphs) to
43 // pointer type (as stored in subgraphs), array version
44 template <typename SlotType>
45 std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
46 {
47  std::vector<SlotType*> output;
48  std::transform(input.begin(),
49  input.end(),
50  std::back_inserter(output),
51  [](const SlotType& inputItem)
52  {
53  return ConvertReferenceTypeToPointerType(inputItem);
54  });
55 
56  return output;
57 }
58 
59 // Convert from vector of Slots* (Input/Output) to vector of ISlots* (IInput/IOutput)
60 template <typename SlotType, typename ResultSlotType>
61 std::vector<ResultSlotType*> ConvertSlotsToISlots(const std::vector<SlotType*> input)
62 {
63  std::vector<ResultSlotType*> output;
64  for (auto slot : input)
65  {
66  output.push_back(PolymorphicDowncast<ResultSlotType*>(slot));
67  }
68  return output;
69 }
70 
71 // Convenience function to add an input layer to a graph
72 Layer* AddInputLayer(Graph& graph,
73  const std::string& layerName,
74  const TensorInfo& inputInfo,
75  LayerBindingId inputId = 0)
76 {
77  Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
78  CHECK(inputLayer);
79  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
80  return inputLayer;
81 }
82 
83 // Convenience function to add an output layer to a graph
84 Layer* AddOutputLayer(Graph& graph,
85  const std::string& layerName)
86 {
87  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
88  CHECK(outputLayer);
89  return outputLayer;
90 }
91 
92 // Convenience function to add a convolution layer to a graph
93 Convolution2dLayer* AddConvolutionLayer(Graph& graph,
94  LayerNameToLayerMap& layersInGraph,
95  const Convolution2dDescriptor& convolutionDescriptor,
96  const std::string& layerName,
97  const TensorInfo& weightInfo,
98  const TensorInfo& biasInfo,
99  const TensorInfo& outputInfo)
100 {
101  Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
102  CHECK(convLayer);
103  SetWeightAndBias(convLayer, weightInfo, biasInfo);
104  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
105  layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
106  return convLayer;
107 }
108 
109 // Convenience function to add a constant layer to a graph
110 ConstantLayer* AddConstantLayer(Graph& graph,
111  LayerNameToLayerMap& layersInGraph,
112  const std::string& layerName,
113  const ConstTensor& constTensor,
114  const TensorInfo& outputInfo)
115 {
116  ConstantLayer* const constantLayer = graph.AddLayer<ConstantLayer>(layerName.c_str());
117  CHECK(constantLayer);
118  constantLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
119  constantLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
120  layersInGraph.insert(std::make_pair(constantLayer->GetName(), constantLayer));
121  return constantLayer;
122 }
123 
124 // Convenience function to add a pooling layer to a graph
125 Pooling2dLayer* AddPoolingLayer(Graph& graph,
126  LayerNameToLayerMap& layersInGraph,
127  const Pooling2dDescriptor& poolingDescriptor,
128  const std::string& layerName,
129  const TensorInfo& outputInfo)
130 {
131  Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
132  CHECK(poolingLayer);
133  poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
134  layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
135  return poolingLayer;
136 }
137 
138 // Convenience function to add an addition layer to a graph
139 AdditionLayer* AddAdditionaLayer(Graph& graph,
140  LayerNameToLayerMap& layersInGraph,
141  const std::string& layerName,
142  const TensorInfo& outputInfo)
143 {
144  AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
145  CHECK(additionLayer);
146  additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
147  layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
148  return additionLayer;
149 }
150 
151 // Convenience function to check that the given substitution matches the specified expected values
152 void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
153  const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
154  const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
155  const SubgraphView::IInputSlots& expectedSubstitutableInputSlots,
156  const SubgraphView::IOutputSlots& expectedSubstitutableOutputSlots,
157  const SubgraphView::IConnectableLayers& expectedSubstitutableLayers)
158 {
159  const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
160  const SubgraphView::IInputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetIInputSlots();
161  const SubgraphView::IOutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetIOutputSlots();
162  const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
163  substitutableSubgraph.GetIConnectableLayers();
164 
165  const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
166  const SubgraphView::IInputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetIInputSlots();
167  const SubgraphView::IOutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetIOutputSlots();
168  const SubgraphView::IConnectableLayers& replacementSubgraphLayers = replacementSubgraph.GetIConnectableLayers();
169 
170  CHECK(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
171  CHECK(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
172  CHECK(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
173 
174  CHECK(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
175  CHECK(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
176  CHECK(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
177 
178  CHECK(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
179  CHECK(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
180  CHECK(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
181 
182  CHECK(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
183  CHECK(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
184  CHECK(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
185 
186  CHECK(std::all_of(replacementSubgraphLayers.begin(),
187  replacementSubgraphLayers.end(),
188  [](const IConnectableLayer* layer)
189  {
190  return layer->GetType() == LayerType::PreCompiled;
191  }));
192 }
193 
194 // Convenience function to check that the given failed subgraph matches the specified expected values
195 void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
196  const ExpectedSubgraphSize& expectedFailedSubgraphSize,
197  const SubgraphView::IInputSlots& expectedFailedInputSlots,
198  const SubgraphView::IOutputSlots& expectedFailedOutputSlots,
199  const SubgraphView::IConnectableLayers& expectedFailedLayers)
200 {
201  const SubgraphView::IInputSlots& failedSubgraphInputSlots = failedSubgraph.GetIInputSlots();
202  const SubgraphView::IOutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetIOutputSlots();
203  const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
204 
205  CHECK(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
206  CHECK(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
207  CHECK(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
208 
209  CHECK(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
210  CHECK(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
211  CHECK(AreEqual(failedSubgraphLayers, expectedFailedLayers));
212 }
213 
214 // Convenience function to check that the given untouched subgraph matches the specified expected values
215 void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
216  const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
217  const SubgraphView::IInputSlots& expectedUntouchedInputSlots,
218  const SubgraphView::IOutputSlots& expectedUntouchedOutputSlots,
219  const SubgraphView::IConnectableLayers& expectedUntouchedLayers)
220 {
221  const SubgraphView::IInputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetIInputSlots();
222  const SubgraphView::IOutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetIOutputSlots();
223  const SubgraphView::IConnectableLayers& untouchedSubgraphLayers = untouchedSubgraph.GetIConnectableLayers();
224 
225  CHECK(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
226  CHECK(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
227  CHECK(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
228 
229  CHECK(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
230  CHECK(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
231  CHECK(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
232 }
233 
234 // Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
235 SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
236 {
237  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
238  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
239 
240  Pooling2dDescriptor poolingDescriptor;
241  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
242  poolingDescriptor.m_PoolWidth = 2;
243  poolingDescriptor.m_PoolHeight = 2;
244  poolingDescriptor.m_StrideX = 2;
245  poolingDescriptor.m_StrideY = 2;
246  poolingDescriptor.m_PadLeft = 1;
247  poolingDescriptor.m_PadRight = 1;
248  poolingDescriptor.m_PadTop = 1;
249  poolingDescriptor.m_PadBottom = 1;
250  poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
251  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
252 
253  // Construct the graph
254  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
255  Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
256  "pooling layer", outputInfo);
257  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
258 
259  // Connect the network
260  inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
261  poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
262 
263  // Create the subgraph view for the whole network
264  return CreateSubgraphViewFrom(CreateInputsFrom(poolingLayer),
265  CreateOutputsFrom({poolingLayer}),
266  {poolingLayer});
267 }
268 
269 // Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
270 SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
271 {
272  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
273  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
274 
275  Pooling2dDescriptor poolingDescriptor;
276  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
277  poolingDescriptor.m_PoolWidth = 2;
278  poolingDescriptor.m_PoolHeight = 2;
279  poolingDescriptor.m_StrideX = 2;
280  poolingDescriptor.m_StrideY = 2;
281  poolingDescriptor.m_PadLeft = 1;
282  poolingDescriptor.m_PadRight = 1;
283  poolingDescriptor.m_PadTop = 1;
284  poolingDescriptor.m_PadBottom = 1;
285  poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
286  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
287 
288  // Construct the graph
289  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
290  Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
291  "pooling1 layer", outputInfo);
292  Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
293  "pooling2 layer", outputInfo);
294  Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
295  "pooling3 layer", outputInfo);
296  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
297 
298  // Connect the network
299  inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
300  pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
301  pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
302  pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
303 
304  // Create the subgraph view for the whole network
305  return CreateSubgraphViewFrom(CreateInputsFrom(pooling1Layer),
306  CreateOutputsFrom({pooling3Layer}),
307  {pooling1Layer,
308  pooling2Layer,
309  pooling3Layer});
310 }
311 
312 // Creates a simple subgraph with only one convolution layer, supported by the mock backend
313 SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
314 {
315  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
316  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
317  TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
318  TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
319 
320  weightInfo.SetConstant(true);
321  biasInfo.SetConstant(true);
322 
323  Convolution2dDescriptor convolutionDescriptor;
324  convolutionDescriptor.m_StrideX = 1;
325  convolutionDescriptor.m_StrideY = 1;
326  convolutionDescriptor.m_BiasEnabled = true;
327  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
328 
329  std::vector<float> weightsVector(64);
330  ConstTensor constWeightsTensor(weightInfo, weightsVector);
331 
332  std::vector<float> biasVector(16);
333  ConstTensor constBiasTensor(biasInfo, biasVector);
334 
335  // Construct the graph
336  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
337  Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
338  "conv layer", weightInfo, biasInfo, outputInfo);
339 
340  ConstantLayer* const weightsLayer =
341  AddConstantLayer(graph, layersInGraph, "Weights Layer", constWeightsTensor, outputInfo);
342  ConstantLayer* const biasLayer = AddConstantLayer(graph, layersInGraph, "Bias Layer", constBiasTensor, outputInfo);
343 
344  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
345 
346  // Connect the network
347  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
348  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
349  biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
350  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
351 
352  std::vector<unsigned int> ignoreSlots = {1, 2};
353  // Create the subgraph view for the whole network
354  return CreateSubgraphViewFrom(CreateInputsFrom(convLayer, ignoreSlots),
355  CreateOutputsFrom({convLayer}),
356  {convLayer, weightsLayer, biasLayer});
357 }
358 
359 // Creates a subgraph with five convolutions layers, all supported by the mock backend
360 SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
361 {
362  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
363  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
364  TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
365  TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
366 
367  weightInfo.SetConstant(true);
368  biasInfo.SetConstant(true);
369 
370  std::vector<float> weightsVector(64);
371  ConstTensor constWeightsTensor(weightInfo, weightsVector);
372 
373  std::vector<float> biasVector(16);
374  ConstTensor constBiasTensor(biasInfo, biasVector);
375 
376 
377  Convolution2dDescriptor convolutionDescriptor;
378  convolutionDescriptor.m_StrideX = 1;
379  convolutionDescriptor.m_StrideY = 1;
380  convolutionDescriptor.m_BiasEnabled = true;
381  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
382 
383  // Construct the graph
384  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
385  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
386  "conv1 layer", weightInfo, biasInfo, outputInfo);
387  ConstantLayer* const weightsLayer1 =
388  AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, outputInfo);
389  ConstantLayer* const biasLayer1 =
390  AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, outputInfo);
391 
392  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
393  "conv2 layer", weightInfo, biasInfo, outputInfo);
394  ConstantLayer* const weightsLayer2 =
395  AddConstantLayer(graph, layersInGraph, "Weights Layer 2", constWeightsTensor, outputInfo);
396  ConstantLayer* const biasLayer2 =
397  AddConstantLayer(graph, layersInGraph, "Bias Layer 2", constBiasTensor, outputInfo);
398 
399 
400  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
401  "conv3 layer", weightInfo, biasInfo, outputInfo);
402  ConstantLayer* const weightsLayer3 =
403  AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, outputInfo);
404  ConstantLayer* const biasLayer3 =
405  AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, outputInfo);
406 
407  Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
408  "conv4 layer", weightInfo, biasInfo, outputInfo);
409  ConstantLayer* const weightsLayer4 =
410  AddConstantLayer(graph, layersInGraph, "Weights Layer 4", constWeightsTensor, outputInfo);
411  ConstantLayer* const biasLayer4 =
412  AddConstantLayer(graph, layersInGraph, "Bias Layer 4", constBiasTensor, outputInfo);
413 
414  Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
415  "conv5 layer", weightInfo, biasInfo, outputInfo);
416  ConstantLayer* const weightsLayer5 =
417  AddConstantLayer(graph, layersInGraph, "Weights Layer 5", constWeightsTensor, outputInfo);
418  ConstantLayer* const biasLayer5 =
419  AddConstantLayer(graph, layersInGraph, "Bias Layer 5", constBiasTensor, outputInfo);
420 
421 
422  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
423 
424  // Connect the network
425  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
426  weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
427  biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
428 
429  conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
430  weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
431  biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
432 
433  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
434  weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
435  biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
436 
437  conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
438  weightsLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(1));
439  biasLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(2));
440 
441  conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
442  weightsLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(1));
443  biasLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(2));
444 
445  conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
446  std::vector<unsigned int> ignoreSlots = {1, 2};
447  // Create the subgraph view for the whole network
448  return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
449  CreateOutputsFrom({ conv5Layer }),
450  { weightsLayer1,
451  biasLayer1,
452  conv1Layer,
453  weightsLayer2,
454  biasLayer2,
455  conv2Layer,
456  weightsLayer3,
457  biasLayer3,
458  conv3Layer,
459  weightsLayer4,
460  biasLayer4,
461  conv4Layer,
462  weightsLayer5,
463  biasLayer5,
464  conv5Layer });
465 }
466 
467 // Creates a subgraph with both supported and unsupported layers
468 // (only convolutions are unsupported by the mock backend)
469 SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
470 {
471  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
472  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
473  TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
474  TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
475 
476  weightInfo.SetConstant(true);
477  biasInfo.SetConstant(true);
478 
479  std::vector<float> weightsVector(64);
480  ConstTensor constWeightsTensor(weightInfo, weightsVector);
481 
482  std::vector<float> biasVector(16);
483  ConstTensor constBiasTensor(biasInfo, biasVector);
484 
485  Convolution2dDescriptor convolutionDescriptor;
486  convolutionDescriptor.m_StrideX = 1;
487  convolutionDescriptor.m_StrideY = 1;
488  convolutionDescriptor.m_BiasEnabled = true;
489  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
490 
491  Pooling2dDescriptor poolingDescriptor;
492  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
493  poolingDescriptor.m_PoolWidth = 2;
494  poolingDescriptor.m_PoolHeight = 2;
495  poolingDescriptor.m_StrideX = 2;
496  poolingDescriptor.m_StrideY = 2;
497  poolingDescriptor.m_PadLeft = 1;
498  poolingDescriptor.m_PadRight = 1;
499  poolingDescriptor.m_PadTop = 1;
500  poolingDescriptor.m_PadBottom = 1;
502  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
503 
504  // Construct the graph
505  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
506  ConstantLayer* const weightsLayer1 =
507  AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, outputInfo);
508 
509  ConstantLayer* const biasLayer1 =
510  AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, outputInfo);
511 
512  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
513  "conv1 layer", weightInfo, biasInfo, outputInfo);
514  Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
515  "pooling1 layer", outputInfo);
516  Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
517  "pooling2 layer", outputInfo);
518 
519  ConstantLayer* const weightsLayer2 =
520  AddConstantLayer(graph, layersInGraph, "Weights Layer 2", constWeightsTensor, outputInfo);
521 
522  ConstantLayer* const biasLayer2 =
523  AddConstantLayer(graph, layersInGraph, "Bias Layer 2", constBiasTensor, outputInfo);
524 
525  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
526  "conv2 layer", weightInfo, biasInfo, outputInfo);
527  Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
528  "pooling3 layer", outputInfo);
529  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
530 
531  // Connect the network
532  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
533  weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
534  biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
535  conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
536  pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
537  pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
538  weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
539  biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
540  conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
541  pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
542 
543  std::vector<unsigned int> ignoreSlots = {1, 2};
544  // Create the subgraph view for the whole network
545  return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
546  CreateOutputsFrom({pooling3Layer}),
547  {weightsLayer1,
548  biasLayer1,
549  conv1Layer,
550  pooling1Layer,
551  pooling2Layer,
552  weightsLayer2,
553  biasLayer2,
554  conv2Layer,
555  pooling3Layer});
556 }
557 
558 // Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
559 SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
560 {
561  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
562  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
563  TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
564  TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
565 
566  weightInfo.SetConstant(true);
567  biasInfo.SetConstant(true);
568 
569  std::vector<float> weightsVector(64);
570  ConstTensor constWeightsTensor(weightInfo, weightsVector);
571 
572  std::vector<float> biasVector(16);
573  ConstTensor constBiasTensor(biasInfo, biasVector);
574  Convolution2dDescriptor convolutionDescriptor;
575  convolutionDescriptor.m_StrideX = 1;
576  convolutionDescriptor.m_StrideY = 1;
577  convolutionDescriptor.m_BiasEnabled = true;
578  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
579 
580  // Construct the graph
581  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
582 
583  ConstantLayer* const weightsLayer =
584  AddConstantLayer(graph, layersInGraph, "Weights Layer unoptimizable", constWeightsTensor, outputInfo);
585 
586  ConstantLayer* const biasLayer =
587  AddConstantLayer(graph, layersInGraph, "Bias Layer unoptimizable", constBiasTensor, outputInfo);
588 
589  Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
590  "conv layer unoptimizable", weightInfo, biasInfo,
591  outputInfo);
592  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
593 
594  // Connect the network
595  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
596  weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
597  biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
598  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
599 
600  std::vector<unsigned int> ignoreSlots = {1, 2};
601  // Create the subgraph view for the whole network
602  return CreateSubgraphViewFrom(CreateInputsFrom(convLayer, ignoreSlots),
603  CreateOutputsFrom({convLayer}),
604  {convLayer, weightsLayer, biasLayer});
605 }
606 
607 // Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
608 SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
609 {
610  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
611  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
612  TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
613  TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
614 
615  weightInfo.SetConstant(true);
616  biasInfo.SetConstant(true);
617 
618  std::vector<float> weightsVector(64);
619  ConstTensor constWeightsTensor(weightInfo, weightsVector);
620 
621  std::vector<float> biasVector(16);
622  ConstTensor constBiasTensor(biasInfo, biasVector);
623 
624  Convolution2dDescriptor convolutionDescriptor;
625  convolutionDescriptor.m_StrideX = 1;
626  convolutionDescriptor.m_StrideY = 1;
627  convolutionDescriptor.m_BiasEnabled = true;
628  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
629 
630  // Construct the graph
631  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
632 
633  ConstantLayer* const weightsLayer1 =
634  AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, outputInfo);
635  ConstantLayer* const biasLayer1 =
636  AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, outputInfo);
637  ConstantLayer* const weightsLayer2 =
638  AddConstantLayer(graph, layersInGraph, "Weights Layer 2 unoptimizable", constWeightsTensor, outputInfo);
639  ConstantLayer* const biasLayer2 =
640  AddConstantLayer(graph, layersInGraph, "Bias Layer 2 unoptimizable", constBiasTensor, outputInfo);
641  ConstantLayer* const weightsLayer3 =
642  AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, outputInfo);
643  ConstantLayer* const biasLayer3 =
644  AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, outputInfo);
645  ConstantLayer* const weightsLayer4 =
646  AddConstantLayer(graph, layersInGraph, "Weights Layer 4 unoptimizable", constWeightsTensor, outputInfo);
647  ConstantLayer* const biasLayer4 =
648  AddConstantLayer(graph, layersInGraph, "Bias Layer 4 unoptimizable", constBiasTensor, outputInfo);
649  ConstantLayer* const weightsLayer5 =
650  AddConstantLayer(graph, layersInGraph, "Weights Layer 5", constWeightsTensor, outputInfo);
651  ConstantLayer* const biasLayer5 =
652  AddConstantLayer(graph, layersInGraph, "Bias Layer 5", constBiasTensor, outputInfo);
653 
654  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
655  "conv1 layer", weightInfo, biasInfo, outputInfo);
656  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph,
657  layersInGraph,
658  convolutionDescriptor,
659  "conv2 layer unoptimizable",
660  weightInfo,
661  biasInfo,
662  outputInfo);
663  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
664  "conv3 layer", weightInfo, biasInfo, outputInfo);
665  Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph,
666  layersInGraph,
667  convolutionDescriptor,
668  "conv4 layer unoptimizable",
669  weightInfo,
670  biasInfo,
671  outputInfo);
672  Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
673  "conv5 layer", weightInfo, biasInfo, outputInfo);
674 
675  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
676 
677  // Connect the network
678  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
679  weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
680  biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
681 
682  conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
683  weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
684  biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
685 
686  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
687  weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
688  biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
689 
690  conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
691  weightsLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(1));
692  biasLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(2));
693 
694  conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
695  weightsLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(1));
696  biasLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(2));
697 
698  conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
699 
700  std::vector<unsigned int> ignoreSlots = {1, 2};
701  // Create the subgraph view for the whole network
702  return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
703  CreateOutputsFrom({conv5Layer}),
704  {weightsLayer1,
705  biasLayer1,
706  conv1Layer,
707  weightsLayer2,
708  biasLayer2,
709  conv2Layer,
710  weightsLayer3,
711  biasLayer3,
712  conv3Layer,
713  weightsLayer4,
714  biasLayer4,
715  conv4Layer,
716  weightsLayer5,
717  biasLayer5,
718  conv5Layer});
719 }
720 
721 // Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
722 // this is meant to test input slots coming from different layers
723 SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
724 {
725  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
726  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
727  TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
728  TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
729 
730  weightInfo.SetConstant(true);
731  biasInfo.SetConstant(true);
732 
733  std::vector<float> weightsVector(64);
734  ConstTensor constWeightsTensor(weightInfo, weightsVector);
735 
736  std::vector<float> biasVector(16);
737  ConstTensor constBiasTensor(biasInfo, biasVector);
738 
739  Convolution2dDescriptor convolutionDescriptor;
740  convolutionDescriptor.m_StrideX = 1;
741  convolutionDescriptor.m_StrideY = 1;
742  convolutionDescriptor.m_BiasEnabled = true;
743  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
744 
745  // Construct the graph
746  Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
747  Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
748 
749  ConstantLayer* const weightsLayer1 =
750  AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, outputInfo);
751  ConstantLayer* const biasLayer1 =
752  AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, outputInfo);
753  ConstantLayer* const weightsLayer2 =
754  AddConstantLayer(graph, layersInGraph, "Weights Layer 2 unoptimizable", constWeightsTensor, outputInfo);
755  ConstantLayer* const biasLayer2 =
756  AddConstantLayer(graph, layersInGraph, "Bias Layer 2 unoptimizable", constBiasTensor, outputInfo);
757  ConstantLayer* const weightsLayer3 =
758  AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, outputInfo);
759  ConstantLayer* const biasLayer3 =
760  AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, outputInfo);
761 
762  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
763  "conv1 layer", weightInfo, biasInfo, outputInfo);
764  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
765  "conv2 layer unoptimizable", weightInfo, biasInfo,
766  outputInfo);
767  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
768  "conv3 layer", weightInfo, biasInfo, outputInfo);
769  AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
770  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
771 
772  // Connect the network
773  input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
774  weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
775  biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
776  conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
777 
778  input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
779  weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
780  biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
781  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
782  weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
783  biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
784  conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
785 
786  addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
787 
788  // Create the subgraph view for the whole network
789  std::vector<unsigned int> ignoreSlots = {1, 2};
790  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
791  conv2Layer}, ignoreSlots),
792  CreateOutputsFrom({addLayer}),
793  { weightsLayer1,
794  biasLayer1,
795  weightsLayer2,
796  biasLayer2,
797  weightsLayer3,
798  biasLayer3,
799  conv1Layer,
800  conv2Layer,
801  conv3Layer,
802  addLayer });
803 }
804 
805 // The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
806 void FullyUnsupporteSubgraphTestImpl1()
807 {
808  Graph graph;
809  LayerNameToLayerMap layersInGraph;
810 
811  // Create an unsupported subgraph
812  SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
813  CHECK((subgraphPtr != nullptr));
814 
815  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
816  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
817  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
818 
819  CHECK(subgraphInputSlots.size() == 1);
820  CHECK(subgraphOutputSlots.size() == 1);
821  CHECK(subgraphLayers.size() == 1);
822 
823  CHECK(Contains(layersInGraph, "pooling layer"));
824 
825  // Create a mock backend object
826  MockBackendInitialiser initialiser; // Register the Mock Backend
827  auto backendObjPtr = CreateBackendObject(MockBackendId());
828  CHECK((backendObjPtr != nullptr));
829 
830  // Optimize the subgraph
831  OptimizationViews optimizationViews;
832 
833  // Check that the optimization is carried out correctly, but no optimization is performed
834  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
835 
836  // =======================================================================
837  // The expected results are:
838  // - No substitutions
839  // - Exactly one failed subgraph, corresponding to the whole original one
840  // - No untouched subgraphs
841  // =======================================================================
842 
843  // -----------------------
844  // Check the substitutions
845  // -----------------------
846 
847  CHECK(optimizationViews.GetSubstitutions().empty());
848 
849  // --------------------------
850  // Check the failed subgraphs
851  // --------------------------
852 
853  const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
854  CHECK(failedSubgraphs.size() == 1);
855 
856  CheckFailedSubgraph(failedSubgraphs.at(0),
857  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
858  subgraphInputSlots,
859  subgraphOutputSlots,
860  subgraphLayers);
861 
862  // -----------------------------
863  // Check the untouched subgraphs
864  // -----------------------------
865 
866  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
867 }
868 
869 // The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
870 void FullyUnsupporteSubgraphTestImpl2()
871 {
872  Graph graph;
873  LayerNameToLayerMap layersInGraph;
874 
875  // Create an unsupported subgraph
876  SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
877  CHECK((subgraphPtr != nullptr));
878 
879  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
880  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
881  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
882 
883  CHECK(subgraphInputSlots.size() == 1);
884  CHECK(subgraphOutputSlots.size() == 1);
885  CHECK(subgraphLayers.size() == 3);
886 
887  CHECK(Contains(layersInGraph, "pooling1 layer"));
888  CHECK(Contains(layersInGraph, "pooling2 layer"));
889  CHECK(Contains(layersInGraph, "pooling3 layer"));
890 
891  // Create a mock backend object
892  MockBackendInitialiser initialiser; // Register the Mock Backend
893  auto backendObjPtr = CreateBackendObject(MockBackendId());
894  CHECK((backendObjPtr != nullptr));
895 
896  // Optimize the subgraph
897  OptimizationViews optimizationViews;
898 
899  // Check that the optimization is carried out correctly, but no optimization is performed
900  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
901 
902  // =======================================================================
903  // The expected results are:
904  // - No substitutions
905  // - Exactly one failed subgraph, corresponding to the whole original one
906  // - No untouched subgraphs
907  // =======================================================================
908 
909  // -----------------------
910  // Check the substitutions
911  // -----------------------
912 
913  CHECK(optimizationViews.GetSubstitutions().empty());
914 
915  // --------------------------
916  // Check the failed subgraphs
917  // --------------------------
918 
919  const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
920  CHECK(failedSubgraphs.size() == 1);
921 
922  std::list<IConnectableLayer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
923  layersInGraph.at("pooling2 layer"),
924  layersInGraph.at("pooling3 layer") };
925 
926  const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
927 
928  CheckFailedSubgraph(failedSubgraph,
929  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
930  subgraphInputSlots,
931  subgraphOutputSlots,
932  subgraphLayers);
933 
934  const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
935 
936  CHECK_EQ(failedSubgraphLayers.front() + 0, expectedFailedLayers.front() + 0);
937  CHECK_EQ(failedSubgraphLayers.front() + 1, expectedFailedLayers.front() + 1);
938  CHECK_EQ(failedSubgraphLayers.front() + 2, expectedFailedLayers.front() + 2);
939 
940  // -----------------------------
941  // Check the untouched subgraphs
942  // -----------------------------
943 
944  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
945 }
946 
947 // A simple case with only one layer (convolution) to optimize, supported by the mock backend
948 void FullyOptimizableSubgraphTestImpl1()
949 {
950  Graph graph;
951  LayerNameToLayerMap layersInGraph;
952 
953  // Create a fully optimizable subgraph
954  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
955  CHECK((subgraphPtr != nullptr));
956 
957  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
958  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
959  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
960 
961  CHECK(subgraphInputSlots.size() == 1);
962  CHECK(subgraphOutputSlots.size() == 1);
963  CHECK(subgraphLayers.size() == 3);
964 
965  CHECK(Contains(layersInGraph, "conv layer"));
966  CHECK(Contains(layersInGraph, "Weights Layer"));
967  CHECK(Contains(layersInGraph, "Bias Layer"));
968 
969  // Create a mock backend object
970  MockBackendInitialiser initialiser; // Register the Mock Backend
971  auto backendObjPtr = CreateBackendObject(MockBackendId());
972  CHECK((backendObjPtr != nullptr));
973 
974  // Optimize the subgraph
975  OptimizationViews optimizationViews;
976 
977  // Check that the optimization is carried out correctly
978  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
979 
980  // ===========================================================================================
981  // The expected results are:
982  // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
983  // - No failed subgraphs
984  // - No untouched subgraphs
985  // ===========================================================================================
986 
987  // -----------------------
988  // Check the substitutions
989  // -----------------------
990 
991  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
992  CHECK(substitutions.size() == 1);
993 
994  CheckSubstitution(substitutions.at(0),
995  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
996  { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
997  subgraphInputSlots,
998  subgraphOutputSlots,
999  subgraphLayers);
1000 
1001  // --------------------------
1002  // Check the failed subgraphs
1003  // --------------------------
1004 
1005  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1006 
1007  // -----------------------------
1008  // Check the untouched subgraphs
1009  // -----------------------------
1010 
1011  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
1012 }
1013 
1014 // A case with five layers (all convolutions) to optimize, all supported by the mock backend
1015 void FullyOptimizableSubgraphTestImpl2()
1016 {
1017  Graph graph;
1018  LayerNameToLayerMap layersInGraph;
1019 
1020  // Create a fully optimizable subgraph
1021  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
1022  CHECK((subgraphPtr != nullptr));
1023 
1024  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1025  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1026  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1027 
1028  CHECK(subgraphInputSlots.size() == 1);
1029  CHECK(subgraphOutputSlots.size() == 1);
1030  CHECK(subgraphPtr->GetIConnectableLayers().size() == 15);
1031 
1032  CHECK(Contains(layersInGraph, "conv1 layer"));
1033  CHECK(Contains(layersInGraph, "conv2 layer"));
1034  CHECK(Contains(layersInGraph, "conv3 layer"));
1035  CHECK(Contains(layersInGraph, "conv4 layer"));
1036  CHECK(Contains(layersInGraph, "conv5 layer"));
1037  CHECK(Contains(layersInGraph, "Weights Layer 1"));
1038  CHECK(Contains(layersInGraph, "Weights Layer 2"));
1039  CHECK(Contains(layersInGraph, "Weights Layer 3"));
1040  CHECK(Contains(layersInGraph, "Weights Layer 4"));
1041  CHECK(Contains(layersInGraph, "Weights Layer 5"));
1042  CHECK(Contains(layersInGraph, "Bias Layer 1"));
1043  CHECK(Contains(layersInGraph, "Bias Layer 2"));
1044  CHECK(Contains(layersInGraph, "Bias Layer 3"));
1045  CHECK(Contains(layersInGraph, "Bias Layer 4"));
1046  CHECK(Contains(layersInGraph, "Bias Layer 5"));
1047 
1048  // Create a mock backend object
1049  MockBackendInitialiser initialiser; // Register the Mock Backend
1050  auto backendObjPtr = CreateBackendObject(MockBackendId());
1051  CHECK((backendObjPtr != nullptr));
1052 
1053  // Optimize the subgraph
1054  OptimizationViews optimizationViews;
1055 
1056  // Check that the optimization is carried out correctly
1057  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1058 
1059  // ===========================================================================================
1060  // The expected results are:
1061  // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
1062  // - No failed subgraphs
1063  // - No untouched subgraphs
1064  // ===========================================================================================
1065 
1066  // -----------------------
1067  // Check the substitutions
1068  // -----------------------
1069 
1070  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
1071  CHECK(substitutions.size() == 1);
1072 
1073  std::list<IConnectableLayer*> expectedSubstitutableLayers{
1074  layersInGraph.at("Weights Layer 1"),
1075  layersInGraph.at("Weights Layer 2"),
1076  layersInGraph.at("Weights Layer 3"),
1077  layersInGraph.at("Weights Layer 4"),
1078  layersInGraph.at("Weights Layer 5"),
1079  layersInGraph.at("Bias Layer 1"),
1080  layersInGraph.at("Bias Layer 2"),
1081  layersInGraph.at("Bias Layer 3"),
1082  layersInGraph.at("Bias Layer 4"),
1083  layersInGraph.at("Bias Layer 5"),
1084  layersInGraph.at("conv1 layer"),
1085  layersInGraph.at("conv2 layer"),
1086  layersInGraph.at("conv3 layer"),
1087  layersInGraph.at("conv4 layer"),
1088  layersInGraph.at("conv5 layer")};
1089 
1090  const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
1091 
1092  CheckSubstitution(
1093  substitution,
1094  {subgraphInputSlots.size(), subgraphOutputSlots.size(),
1095  subgraphLayers.size()},
1096  {subgraphInputSlots.size(), subgraphOutputSlots.size(), 1},
1097  subgraphInputSlots, subgraphOutputSlots, expectedSubstitutableLayers);
1098 
1099  const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
1101 
1102  CHECK_EQ(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
1103  CHECK_EQ(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
1104  CHECK_EQ(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
1105  CHECK_EQ(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
1106  CHECK_EQ(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
1107 
1108  // --------------------------
1109  // Check the failed subgraphs
1110  // --------------------------
1111 
1112  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1113 
1114  // -----------------------------
1115  // Check the untouched subgraphs
1116  // -----------------------------
1117 
1118  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
1119 }
1120 
1121 // The input subgraph contaions both supported and unsupported layers
1122 // (but only convolutions are unsupported by the mock backend)
1123 void PartiallySupportedSubgraphTestImpl()
1124 {
1125  Graph graph;
1126  LayerNameToLayerMap layersInGraph;
1127 
1128  // Create a fully optimizable subgraph
1129  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
1130  CHECK((subgraphPtr != nullptr));
1131 
1132  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1133  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1134  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1135 
1136  CHECK(subgraphInputSlots.size() == 1);
1137  CHECK(subgraphOutputSlots.size() == 1);
1138  CHECK(subgraphLayers.size() == 9);
1139 
1140  CHECK(Contains(layersInGraph, "Weights Layer 1"));
1141  CHECK(Contains(layersInGraph, "Bias Layer 1"));
1142  CHECK(Contains(layersInGraph, "conv1 layer"));
1143  CHECK(Contains(layersInGraph, "pooling1 layer"));
1144  CHECK(Contains(layersInGraph, "pooling2 layer"));
1145  CHECK(Contains(layersInGraph, "Weights Layer 2"));
1146  CHECK(Contains(layersInGraph, "Bias Layer 2"));
1147  CHECK(Contains(layersInGraph, "conv2 layer"));
1148  CHECK(Contains(layersInGraph, "pooling3 layer"));
1149 
1150  // Create a mock backend object
1151  MockBackendInitialiser initialiser; // Register the Mock Backend
1152  auto backendObjPtr = CreateBackendObject(MockBackendId());
1153  CHECK((backendObjPtr != nullptr));
1154 
1155  // Optimize the subgraph
1156  OptimizationViews optimizationViews;
1157 
1158  // Check that the optimization is carried out correctly
1159  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1160 
1161  // ========================================================================
1162  // The expected results are:
1163  // - Exactly two substitution, corresponding to the supported layers
1164  // - Exactly two failed subgraphs, corresponding to the unsupported layers
1165  // - No untouched subgraphs
1166  // ========================================================================
1167 
1168  // -----------------------
1169  // Check the substitutions
1170  // -----------------------
1171 
1172  OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
1173  CHECK(substitutions.size() == 2);
1174  // Sort into a consistent order
1175  std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
1176  return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1177  s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0;
1178  });
1179 
1180  std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 3 },
1181  { 1, 1, 3 } };
1182  std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1183  { 1, 1, 1 } };
1184  std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
1185  {
1186  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1187  {ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlot(0))}),
1188  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1189  {ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlot(0))})
1190  };
1191 
1192  std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
1193  {
1194  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1195  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1196  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1197  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots()))
1198  };
1199  std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
1200  {
1201  { layersInGraph.at("Weights Layer 1"), layersInGraph.at("Bias Layer 1"), layersInGraph.at("conv1 layer") },
1202  { layersInGraph.at("Weights Layer 2"), layersInGraph.at("Bias Layer 2"), layersInGraph.at("conv2 layer") }
1203  };
1204 
1205  for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1206  {
1207  CheckSubstitution(substitutions.at(substitutionIndex),
1208  expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1209  expectedReplacementSubgraphSizes.at(substitutionIndex),
1210  expectedSubstitutableInputSlots.at(substitutionIndex),
1211  expectedSubstitutableOutputSlots.at(substitutionIndex),
1212  expectedSubstitutableLayers.at(substitutionIndex));
1213  }
1214 
1215  // --------------------------
1216  // Check the failed subgraphs
1217  // --------------------------
1218 
1219  OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
1220  CHECK(failedSubgraphs.size() == 2);
1221  // Sort into a consistent order
1222  std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
1223  return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1224  s2.GetIConnectableLayers().front()->GetName()) < 0;
1225  });
1226 
1227  std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
1228  { 1, 1, 1 } };
1229  std::vector<SubgraphView::IInputSlots> expectedFailedInputSlots
1230  {
1231  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1232  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots())),
1233  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1234  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots()))
1235  };
1236  std::vector<SubgraphView::IOutputSlots> expectedFailedOutputSlots
1237  {
1238  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1239  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots())),
1240  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1241  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots()))
1242  };
1243  std::vector<SubgraphView::IConnectableLayers> expectedFailedLayers
1244  {
1245  { layersInGraph.at("pooling1 layer"),
1246  layersInGraph.at("pooling2 layer") },
1247  { layersInGraph.at("pooling3 layer") }
1248  };
1249 
1250  for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
1251  {
1252  CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
1253  expectedFailedSubgraphSizes.at(failedIndex),
1254  expectedFailedInputSlots.at(failedIndex),
1255  expectedFailedOutputSlots.at(failedIndex),
1256  expectedFailedLayers.at(failedIndex));
1257  }
1258 
1259  // -----------------------------
1260  // Check the untouched subgraphs
1261  // -----------------------------
1262 
1263  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
1264 }
1265 
1266 // The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
1267 void FullyUnoptimizableSubgraphTestImpl1()
1268 {
1269  Graph graph;
1270  LayerNameToLayerMap layersInGraph;
1271 
1272  // Create a fully optimizable subgraph
1273  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
1274  CHECK((subgraphPtr != nullptr));
1275 
1276  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1277  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1278  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1279 
1280  CHECK(subgraphInputSlots.size() == 1);
1281  CHECK(subgraphOutputSlots.size() == 1);
1282  CHECK(subgraphLayers.size() == 3);
1283 
1284  CHECK(Contains(layersInGraph, "conv layer unoptimizable"));
1285 
1286  // Create a mock backend object
1287  MockBackendInitialiser initialiser; // Register the Mock Backend
1288  auto backendObjPtr = CreateBackendObject(MockBackendId());
1289  CHECK((backendObjPtr != nullptr));
1290 
1291  // Optimize the subgraph
1292  OptimizationViews optimizationViews;
1293 
1294  // Check that the optimization is carried out correctly
1295  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1296 
1297  // ============================================================================
1298  // The expected results are:
1299  // - No substitutions
1300  // - No failed subgraphs
1301  // - Exactly one untouched subgraph, corresponding to the whole input subgraph
1302  // ============================================================================
1303 
1304  // -----------------------
1305  // Check the substitutions
1306  // -----------------------
1307 
1308  CHECK(optimizationViews.GetSubstitutions().empty());
1309 
1310  // --------------------------
1311  // Check the failed subgraphs
1312  // --------------------------
1313 
1314  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1315 
1316  // -----------------------------
1317  // Check the untouched subgraphs
1318  // -----------------------------
1319 
1320  const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1321  CHECK(untouchedSubgraphs.size() == 1);
1322 
1323  CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
1324  {subgraphInputSlots.size(),
1325  subgraphOutputSlots.size(), subgraphLayers.size()},
1326  subgraphInputSlots, subgraphOutputSlots,
1327  subgraphLayers);
1328 }
1329 
1330 // The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1331 void PartiallyOptimizableSubgraphTestImpl1()
1332 {
1333  Graph graph;
1334  LayerNameToLayerMap layersInGraph;
1335 
1336  // Create a fully optimizable subgraph
1337  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
1338  CHECK((subgraphPtr != nullptr));
1339 
1340  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1341  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1342  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1343 
1344  CHECK(subgraphInputSlots.size() == 1);
1345  CHECK(subgraphOutputSlots.size() == 1);
1346  CHECK(subgraphLayers.size() == 15);
1347 
1348  CHECK(Contains(layersInGraph, "conv1 layer"));
1349  CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1350  CHECK(Contains(layersInGraph, "conv3 layer"));
1351  CHECK(Contains(layersInGraph, "conv4 layer unoptimizable"));
1352  CHECK(Contains(layersInGraph, "conv5 layer"));
1353 
1354  // Create a mock backend object
1355  MockBackendInitialiser initialiser; // Register the Mock Backend
1356  auto backendObjPtr = CreateBackendObject(MockBackendId());
1357  CHECK((backendObjPtr != nullptr));
1358 
1359  // Optimize the subgraph
1360  OptimizationViews optimizationViews;
1361 
1362  // Check that the optimization is carried out correctly
1363  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1364 
1365  // ===============================================================================
1366  // The expected results are:
1367  // - Exactly three substitutions, corresponding to the optimizable layers
1368  // - No failed subgraphs
1369  // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1370  // ===============================================================================
1371 
1372  // -----------------------
1373  // Check the substitutions
1374  // -----------------------
1375 
1376  OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
1377  CHECK(substitutions.size() == 3);
1378  // Sort into a consistent order
1379  std::sort(substitutions.begin(), substitutions.end(),
1380  [](auto s1, auto s2)
1381  { return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1382  s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0; });
1383 
1384  std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 3 },
1385  { 1, 1, 3 },
1386  { 1, 1, 3 } };
1387  std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1388  { 1, 1, 1 },
1389  { 1, 1, 1 } };
1390  std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
1391  {
1392  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1393  {ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlot(0))}),
1394  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1395  {ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlot(0))}),
1396  ConvertSlotsToISlots<InputSlot, IInputSlot>(
1397  {ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlot(0))})
1398  };
1399  std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
1400  {
1401  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1402  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1403  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1404  ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots())),
1405  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1406  ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots()))
1407  };
1408  std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
1409  {
1410  { layersInGraph.at("Weights Layer 1"), layersInGraph.at("Bias Layer 1"), layersInGraph.at("conv1 layer") },
1411  { layersInGraph.at("Weights Layer 3"), layersInGraph.at("Bias Layer 3"), layersInGraph.at("conv3 layer") },
1412  { layersInGraph.at("Weights Layer 5"), layersInGraph.at("Bias Layer 5"), layersInGraph.at("conv5 layer") }
1413  };
1414 
1415  for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1416  {
1417  CheckSubstitution(substitutions.at(substitutionIndex),
1418  expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1419  expectedReplacementSubgraphSizes.at(substitutionIndex),
1420  expectedSubstitutableInputSlots.at(substitutionIndex),
1421  expectedSubstitutableOutputSlots.at(substitutionIndex),
1422  expectedSubstitutableLayers.at(substitutionIndex));
1423  }
1424 
1425  // --------------------------
1426  // Check the failed subgraphs
1427  // --------------------------
1428 
1429  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1430 
1431  // -----------------------------
1432  // Check the untouched subgraphs
1433  // -----------------------------
1434 
1435  OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1436  CHECK(untouchedSubgraphs.size() == 2);
1437  // Sort into a consistent order
1438  std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
1439  return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1440  s2.GetIConnectableLayers().front()->GetName()) < 0;
1441  });
1442 
1443  std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 3 },
1444  { 1, 1, 3 } };
1445  std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots{
1446  ConvertSlotsToISlots<InputSlot,
1447  IInputSlot>({ConvertReferenceTypeToPointerType(
1448  layersInGraph.at("conv2 layer unoptimizable")->GetInputSlot(0))}),
1449  ConvertSlotsToISlots<InputSlot,
1450  IInputSlot>({ConvertReferenceTypeToPointerType(
1451  layersInGraph.at("conv4 layer unoptimizable")->GetInputSlot(0))})};
1452 
1453  std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
1454  {
1455  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1456  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())),
1457  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1458  ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots()))
1459  };
1460 
1461  std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
1462  {
1463  { layersInGraph.at("Weights Layer 2 unoptimizable"),
1464  layersInGraph.at("Bias Layer 2 unoptimizable"),
1465  layersInGraph.at("conv2 layer unoptimizable") },
1466  { layersInGraph.at("Weights Layer 4 unoptimizable"),
1467  layersInGraph.at("Bias Layer 4 unoptimizable"),
1468  layersInGraph.at("conv4 layer unoptimizable") }
1469  };
1470 
1471  for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1472  {
1473  CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1474  expectedUntouchedSubgraphSizes.at(untouchedIndex),
1475  expectedUntouchedInputSlots.at(untouchedIndex),
1476  expectedUntouchedOutputSlots.at(untouchedIndex),
1477  expectedUntouchedLayers.at(untouchedIndex));
1478  }
1479 }
1480 
1481 // The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1482 // this is meant to test input slots coming from different layers
1483 void PartiallyOptimizableSubgraphTestImpl2()
1484 {
1485  Graph graph;
1486  LayerNameToLayerMap layersInGraph;
1487 
1488  // Create a partially optimizable subgraph
1489  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
1490  CHECK((subgraphPtr != nullptr));
1491 
1492  const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1493  const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1494  const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1495 
1496  CHECK(subgraphInputSlots.size() == 2);
1497  CHECK(subgraphOutputSlots.size() == 1);
1498  CHECK(subgraphLayers.size() == 10);
1499 
1500  CHECK(Contains(layersInGraph, "conv1 layer"));
1501  CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1502  CHECK(Contains(layersInGraph, "conv3 layer"));
1503  CHECK(Contains(layersInGraph, "add layer"));
1504 
1505  // Create a mock backend object
1506  MockBackendInitialiser initialiser; // Register the Mock Backend
1507  auto backendObjPtr = CreateBackendObject(MockBackendId());
1508  CHECK((backendObjPtr != nullptr));
1509 
1510  // Optimize the subgraph
1511  OptimizationViews optimizationViews;
1512 
1513  // Check that the optimization is carried out correctly
1514  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1515 
1516  // ==============================================================================
1517  // The expected results are:
1518  // - Exactly one substitution, corresponding to the optimizable layers
1519  // - No failed subgraphs
1520  // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1521  // ==============================================================================
1522 
1523  // -----------------------
1524  // Check the substitutions
1525  // -----------------------
1526 
1527  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
1528  CHECK(substitutions.size() == 1);
1529 
1530  ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 7 };
1531  ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
1532 
1533  SubgraphView::IInputSlots expectedSubstitutableInputSlots
1534  {
1535  ConvertSlotsToISlots<InputSlot, IInputSlot>({
1536  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0])})[0],
1537  ConvertSlotsToISlots<InputSlot, IInputSlot>({
1538  ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])})[0]
1539  };
1540 
1541  SubgraphView::IOutputSlots expectedSubstitutableOutputSlots
1542  {
1543  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1544  ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()))
1545  };
1546 
1547  SubgraphView::IConnectableLayers expectedSubstitutableLayers
1548  {
1549  layersInGraph.at("Weights Layer 1"),
1550  layersInGraph.at("Weights Layer 3"),
1551  layersInGraph.at("Bias Layer 1"),
1552  layersInGraph.at("Bias Layer 3"),
1553  layersInGraph.at("conv1 layer"),
1554  layersInGraph.at("conv3 layer"),
1555  layersInGraph.at("add layer")
1556  };
1557 
1558  CheckSubstitution(substitutions[0],
1559  expectedSubstitutableSubgraphSizes,
1560  expectedReplacementSubgraphSizes,
1561  expectedSubstitutableInputSlots,
1562  expectedSubstitutableOutputSlots,
1563  expectedSubstitutableLayers);
1564 
1565  // --------------------------
1566  // Check the failed subgraphs
1567  // --------------------------
1568 
1569  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1570 
1571  // -----------------------------
1572  // Check the untouched subgraphs
1573  // -----------------------------
1574 
1575  const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1576  CHECK(untouchedSubgraphs.size() == 1);
1577 
1578  std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 3 } };
1579  std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots
1580  {
1581  ConvertSlotsToISlots<InputSlot,
1582  IInputSlot>({ConvertReferenceTypeToPointerType(
1583  layersInGraph.at("conv2 layer unoptimizable")->GetInputSlot(0))})};
1584  std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
1585  {
1586  ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1587  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()))
1588  };
1589  std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
1590  {
1591  { layersInGraph.at("conv2 layer unoptimizable"), layersInGraph.at("Weights Layer 2 unoptimizable"),
1592  layersInGraph.at("Bias Layer 2 unoptimizable") }
1593  };
1594 
1595  for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1596  {
1597  CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1598  expectedUntouchedSubgraphSizes.at(untouchedIndex),
1599  expectedUntouchedInputSlots.at(untouchedIndex),
1600  expectedUntouchedOutputSlots.at(untouchedIndex),
1601  expectedUntouchedLayers.at(untouchedIndex));
1602  }
1603 }
1604 
1605 } // Anonymous namespace
1606 
1607 TEST_SUITE("OptimizeSubGraph")
1608 {
1609 TEST_CASE("FullyUnsupportedSubgraph1") { FullyUnsupporteSubgraphTestImpl1(); }
1610 TEST_CASE("FullyUnsupportedSubgraph2") { FullyUnsupporteSubgraphTestImpl2(); }
1611 TEST_CASE("FullyOptimizableSubgraph1") { FullyOptimizableSubgraphTestImpl1(); }
1612 TEST_CASE("FullyOptimizableSubgraph2") { FullyOptimizableSubgraphTestImpl2(); }
1613 TEST_CASE("PartiallySupportedSubgraph") { PartiallySupportedSubgraphTestImpl(); }
1614 TEST_CASE("FullyUnoptimizableSubgraph") { FullyUnoptimizableSubgraphTestImpl1(); }
1615 TEST_CASE("PartiallyOptimizableSubgraph1") { PartiallyOptimizableSubgraphTestImpl1(); }
1616 TEST_CASE("PartiallyOptimizableSubgraph2") { PartiallyOptimizableSubgraphTestImpl2(); }
1617 
1618 }
TEST_SUITE("TestConstTensorLayerVisitor")
A layer that the constant data can be bound to.
bool m_BiasEnabled
Enable/disable bias.
bool Contains(const CollectionType &collection, const typename CollectionType::value_type &item)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
uint32_t m_PadBottom
Padding bottom value in the height dimension.
const IOutputSlots & GetIOutputSlots() const
uint32_t m_PadLeft
Padding left value in the width dimension.
const IConnectableLayers & GetIConnectableLayers() const
const IInputSlots & GetIInputSlots() const
std::shared_ptr< ConstTensorHandle > m_LayerOutput
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:425
uint32_t m_PoolWidth
Pooling width value.
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:112
The padding fields don&#39;t count and are ignored.
PaddingMethod m_PaddingMethod
The padding method to be used. (Exclude, IgnoreValue).
constexpr const char * MockBackendId()
uint32_t m_PadTop
Padding top value in the height dimension.
std::vector< SubgraphView > Subgraphs
std::vector< SubstitutionPair > Substitutions
Copyright (c) 2021 ARM Limited and Contributors.
SubgraphView m_SubstitutableSubgraph
Subgraph of Layers from the original graph which should be replaced.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:290
The SubgraphView class represents a subgraph of a Graph.
uint32_t m_PoolHeight
Pooling height value.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:322
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
std::unique_ptr< SubgraphView > SubgraphViewPtr
std::list< IConnectableLayer * > IConnectableLayers
uint32_t m_PadRight
Padding right value in the width dimension.
std::vector< IOutputSlot * > IOutputSlots
const Subgraphs & GetUntouchedSubgraphs() const
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
const Subgraphs & GetFailedSubgraphs() const
SubgraphView m_ReplacementSubgraph
A subgraph of new layers which will replace layers in m_SubstitutableSubgraph.
std::vector< IInputSlot * > IInputSlots
void SetWeightAndBias(ConvolutionLayer *layer, const armnn::TensorInfo &weightInfo, const armnn::TensorInfo &biasInfo)
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
This layer represents a pooling 2d operation.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
This layer represents an addition operation.
const Substitutions & GetSubstitutions() const
std::unique_ptr< SubgraphView > SubgraphViewPtr
bool AreEqual(const CollectionType &lhs, const CollectionType &rhs)
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
PoolingAlgorithm m_PoolType
The pooling algorithm to use (Max. Average, L2).
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:87
armnn::IBackendInternalUniquePtr CreateBackendObject(const armnn::BackendId &backendId)
void SetConstant(const bool IsConstant=true)
Marks the data corresponding to this tensor info as constant.
Definition: Tensor.cpp:514
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:324
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:317
This layer represents a convolution 2d operation.
A Pooling2dDescriptor for the Pooling2dLayer.
SubgraphView::InputSlots CreateInputsFrom(Layer *layer, std::vector< unsigned int > ignoreSlots)
An input connection slot for a layer.
Definition: INetwork.hpp:26
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.