ArmNN
 21.11
OptimizeSubgraphViewTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "CommonTestUtils.hpp"
7 #include "MockBackend.hpp"
8 #include "MockBackendId.hpp"
9 
10 #include <Graph.hpp>
11 #include <Network.hpp>
12 
14 
15 #include <doctest/doctest.h>
16 #include <unordered_map>
17 
18 using namespace armnn;
19 
20 namespace
21 {
22 
23 // The expected number of layers, input and output slots in a subgraph after a test
24 struct ExpectedSubgraphSize
25 {
26  size_t m_NumInputSlots = 0;
27  size_t m_NumOutputSlots = 0;
28  size_t m_NumLayers = 0;
29 };
30 
31 // Keep the layers organized by layer name
32 using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
33 
34 // Used to convert input and output slots from reference type (as stored in graphs) to
35 // pointer type (as stored in subgraphs)
36 template <typename SlotType>
37 SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
38 {
39  return const_cast<SlotType*>(&input);
40 }
41 
42 // Used to convert input and output slots from reference type (as stored in graphs) to
43 // pointer type (as stored in subgraphs), array version
44 template <typename SlotType>
45 std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
46 {
47  std::vector<SlotType*> output;
48  std::transform(input.begin(),
49  input.end(),
50  std::back_inserter(output),
51  [](const SlotType& inputItem)
52  {
53  return ConvertReferenceTypeToPointerType(inputItem);
54  });
55 
56  return output;
57 }
58 
59 // Convenience function to add an input layer to a graph
60 Layer* AddInputLayer(Graph& graph,
61  const std::string& layerName,
62  const TensorInfo& inputInfo,
63  LayerBindingId inputId = 0)
64 {
65  Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
66  CHECK(inputLayer);
67  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
68  return inputLayer;
69 }
70 
71 // Convenience function to add an output layer to a graph
72 Layer* AddOutputLayer(Graph& graph,
73  const std::string& layerName)
74 {
75  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
76  CHECK(outputLayer);
77  return outputLayer;
78 }
79 
80 // Convenience function to add a convolution layer to a graph
81 Convolution2dLayer* AddConvolutionLayer(Graph& graph,
82  LayerNameToLayerMap& layersInGraph,
83  const Convolution2dDescriptor& convolutionDescriptor,
84  const std::string& layerName,
85  const TensorInfo& weightInfo,
86  const TensorInfo& biasInfo,
87  const TensorInfo& outputInfo)
88 {
89  Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
90  CHECK(convLayer);
91  SetWeightAndBias(convLayer, weightInfo, biasInfo);
92  convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
93  layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
94  return convLayer;
95 }
96 
97 // Convenience function to add a pooling layer to a graph
98 Pooling2dLayer* AddPoolingLayer(Graph& graph,
99  LayerNameToLayerMap& layersInGraph,
100  const Pooling2dDescriptor& poolingDescriptor,
101  const std::string& layerName,
102  const TensorInfo& outputInfo)
103 {
104  Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
105  CHECK(poolingLayer);
106  poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
107  layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
108  return poolingLayer;
109 }
110 
111 // Convenience function to add an addition layer to a graph
112 AdditionLayer* AddAdditionaLayer(Graph& graph,
113  LayerNameToLayerMap& layersInGraph,
114  const std::string& layerName,
115  const TensorInfo& outputInfo)
116 {
117  AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
118  CHECK(additionLayer);
119  additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
120  layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
121  return additionLayer;
122 }
123 
124 // Convenience function to check that the given substitution matches the specified expected values
125 void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
126  const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
127  const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
128  const SubgraphView::InputSlots& expectedSubstitutableInputSlots,
129  const SubgraphView::OutputSlots& expectedSubstitutableOutputSlots,
130  const SubgraphView::Layers& expectedSubstitutableLayers)
131 {
132  const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
133  const SubgraphView::InputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetInputSlots();
134  const SubgraphView::OutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetOutputSlots();
135  const SubgraphView::Layers& substitutableSubgraphLayers = substitutableSubgraph.GetLayers();
136 
137  const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
138  const SubgraphView::InputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetInputSlots();
139  const SubgraphView::OutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetOutputSlots();
140  const SubgraphView::Layers& replacementSubgraphLayers = replacementSubgraph.GetLayers();
141 
142  CHECK(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
143  CHECK(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
144  CHECK(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
145 
146  CHECK(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
147  CHECK(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
148  CHECK(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
149 
150  CHECK(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
151  CHECK(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
152  CHECK(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
153 
154  CHECK(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
155  CHECK(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
156  CHECK(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
157 
158  CHECK(std::all_of(replacementSubgraphLayers.begin(),
159  replacementSubgraphLayers.end(),
160  [](const Layer* layer)
161  {
162  return layer->GetType() == LayerType::PreCompiled;
163  }));
164 }
165 
166 // Convenience function to check that the given failed subgraph matches the specified expected values
167 void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
168  const ExpectedSubgraphSize& expectedFailedSubgraphSize,
169  const SubgraphView::InputSlots& expectedFailedInputSlots,
170  const SubgraphView::OutputSlots& expectedFailedOutputSlots,
171  const SubgraphView::Layers& expectedFailedLayers)
172 {
173  const SubgraphView::InputSlots& failedSubgraphInputSlots = failedSubgraph.GetInputSlots();
174  const SubgraphView::OutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetOutputSlots();
175  const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
176 
177  CHECK(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
178  CHECK(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
179  CHECK(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
180 
181  CHECK(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
182  CHECK(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
183  CHECK(AreEqual(failedSubgraphLayers, expectedFailedLayers));
184 }
185 
186 // Convenience function to check that the given untouched subgraph matches the specified expected values
187 void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
188  const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
189  const SubgraphView::InputSlots& expectedUntouchedInputSlots,
190  const SubgraphView::OutputSlots& expectedUntouchedOutputSlots,
191  const SubgraphView::Layers& expectedUntouchedLayers)
192 {
193  const SubgraphView::InputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetInputSlots();
194  const SubgraphView::OutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetOutputSlots();
195  const SubgraphView::Layers& untouchedSubgraphLayers = untouchedSubgraph.GetLayers();
196 
197  CHECK(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
198  CHECK(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
199  CHECK(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
200 
201  CHECK(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
202  CHECK(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
203  CHECK(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
204 }
205 
206 // Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
207 SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
208 {
209  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
210  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
211 
212  Pooling2dDescriptor poolingDescriptor;
213  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
214  poolingDescriptor.m_PoolWidth = 2;
215  poolingDescriptor.m_PoolHeight = 2;
216  poolingDescriptor.m_StrideX = 2;
217  poolingDescriptor.m_StrideY = 2;
218  poolingDescriptor.m_PadLeft = 1;
219  poolingDescriptor.m_PadRight = 1;
220  poolingDescriptor.m_PadTop = 1;
221  poolingDescriptor.m_PadBottom = 1;
222  poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
223  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
224 
225  // Construct the graph
226  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
227  Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
228  "pooling layer", outputInfo);
229  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
230 
231  // Connect the network
232  inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
233  poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
234 
235  // Create the subgraph view for the whole network
236  return CreateSubgraphViewFrom(CreateInputsFrom({poolingLayer}),
237  CreateOutputsFrom({poolingLayer}),
238  {poolingLayer});
239 }
240 
241 // Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
242 SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
243 {
244  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
245  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
246 
247  Pooling2dDescriptor poolingDescriptor;
248  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
249  poolingDescriptor.m_PoolWidth = 2;
250  poolingDescriptor.m_PoolHeight = 2;
251  poolingDescriptor.m_StrideX = 2;
252  poolingDescriptor.m_StrideY = 2;
253  poolingDescriptor.m_PadLeft = 1;
254  poolingDescriptor.m_PadRight = 1;
255  poolingDescriptor.m_PadTop = 1;
256  poolingDescriptor.m_PadBottom = 1;
257  poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
258  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
259 
260  // Construct the graph
261  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
262  Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
263  "pooling1 layer", outputInfo);
264  Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
265  "pooling2 layer", outputInfo);
266  Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
267  "pooling3 layer", outputInfo);
268  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
269 
270  // Connect the network
271  inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
272  pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
273  pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
274  pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
275 
276  // Create the subgraph view for the whole network
277  return CreateSubgraphViewFrom(CreateInputsFrom({pooling1Layer}),
278  CreateOutputsFrom({pooling3Layer}),
279  {pooling1Layer,
280  pooling2Layer,
281  pooling3Layer});
282 }
283 
284 // Creates a simple subgraph with only one convolution layer, supported by the mock backend
285 SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
286 {
287  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
288  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
289  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
290  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
291 
292  Convolution2dDescriptor convolutionDescriptor;
293  convolutionDescriptor.m_StrideX = 1;
294  convolutionDescriptor.m_StrideY = 1;
295  convolutionDescriptor.m_BiasEnabled = true;
296  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
297 
298  // Construct the graph
299  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
300  Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
301  "conv layer", weightInfo, biasInfo, outputInfo);
302  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
303 
304  // Connect the network
305  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
306  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
307 
308  // Create the subgraph view for the whole network
309  return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
310  CreateOutputsFrom({convLayer}),
311  {convLayer});
312 }
313 
314 // Creates a subgraph with five convolutions layers, all supported by the mock backend
315 SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
316 {
317  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
318  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
319  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
320  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
321 
322  Convolution2dDescriptor convolutionDescriptor;
323  convolutionDescriptor.m_StrideX = 1;
324  convolutionDescriptor.m_StrideY = 1;
325  convolutionDescriptor.m_BiasEnabled = true;
326  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
327 
328  // Construct the graph
329  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
330  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
331  "conv1 layer", weightInfo, biasInfo, outputInfo);
332  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
333  "conv2 layer", weightInfo, biasInfo, outputInfo);
334  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
335  "conv3 layer", weightInfo, biasInfo, outputInfo);
336  Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
337  "conv4 layer", weightInfo, biasInfo, outputInfo);
338  Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
339  "conv5 layer", weightInfo, biasInfo, outputInfo);
340  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
341 
342  // Connect the network
343  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
344  conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
345  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
346  conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
347  conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
348  conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
349 
350  // Create the subgraph view for the whole network
351  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
352  CreateOutputsFrom({conv5Layer}),
353  {conv1Layer,
354  conv2Layer,
355  conv3Layer,
356  conv4Layer,
357  conv5Layer});
358 }
359 
360 // Creates a subgraph with both supported and unsupported layers
361 // (only convolutions are unsupported by the mock backend)
362 SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
363 {
364  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
365  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
366  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
367  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
368 
369  Convolution2dDescriptor convolutionDescriptor;
370  convolutionDescriptor.m_StrideX = 1;
371  convolutionDescriptor.m_StrideY = 1;
372  convolutionDescriptor.m_BiasEnabled = true;
373  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
374 
375  Pooling2dDescriptor poolingDescriptor;
376  poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
377  poolingDescriptor.m_PoolWidth = 2;
378  poolingDescriptor.m_PoolHeight = 2;
379  poolingDescriptor.m_StrideX = 2;
380  poolingDescriptor.m_StrideY = 2;
381  poolingDescriptor.m_PadLeft = 1;
382  poolingDescriptor.m_PadRight = 1;
383  poolingDescriptor.m_PadTop = 1;
384  poolingDescriptor.m_PadBottom = 1;
386  poolingDescriptor.m_DataLayout = DataLayout::NHWC;
387 
388  // Construct the graph
389  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
390  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
391  "conv1 layer", weightInfo, biasInfo, outputInfo);
392  Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
393  "pooling1 layer", outputInfo);
394  Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
395  "pooling2 layer", outputInfo);
396  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
397  "conv2 layer", weightInfo, biasInfo, outputInfo);
398  Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
399  "pooling3 layer", outputInfo);
400  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
401 
402  // Connect the network
403  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
404  conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
405  pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
406  pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
407  conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
408  pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
409 
410  // Create the subgraph view for the whole network
411  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
412  CreateOutputsFrom({pooling3Layer}),
413  {conv1Layer,
414  pooling1Layer,
415  pooling2Layer,
416  conv2Layer,
417  pooling3Layer});
418 }
419 
420 // Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
421 SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
422 {
423  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
424  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
425  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
426  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
427 
428  Convolution2dDescriptor convolutionDescriptor;
429  convolutionDescriptor.m_StrideX = 1;
430  convolutionDescriptor.m_StrideY = 1;
431  convolutionDescriptor.m_BiasEnabled = true;
432  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
433 
434  // Construct the graph
435  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
436  Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
437  "conv layer unoptimizable", weightInfo, biasInfo,
438  outputInfo);
439  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
440 
441  // Connect the network
442  inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
443  convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
444 
445  // Create the subgraph view for the whole network
446  return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
447  CreateOutputsFrom({convLayer}),
448  {convLayer});
449 }
450 
451 // Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
452 SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
453 {
454  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
455  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
456  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
457  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
458 
459  Convolution2dDescriptor convolutionDescriptor;
460  convolutionDescriptor.m_StrideX = 1;
461  convolutionDescriptor.m_StrideY = 1;
462  convolutionDescriptor.m_BiasEnabled = true;
463  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
464 
465  // Construct the graph
466  Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
467  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
468  "conv1 layer", weightInfo, biasInfo, outputInfo);
469  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
470  "conv2 layer unoptimizable", weightInfo, biasInfo,
471  outputInfo);
472  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
473  "conv3 layer", weightInfo, biasInfo, outputInfo);
474  Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
475  "conv4 layer unoptimizable", weightInfo, biasInfo,
476  outputInfo);
477  Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
478  "conv5 layer", weightInfo, biasInfo, outputInfo);
479  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
480 
481  // Connect the network
482  inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
483  conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
484  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
485  conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
486  conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
487  conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
488 
489  // Create the subgraph view for the whole network
490  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
491  CreateOutputsFrom({conv5Layer}),
492  {conv1Layer,
493  conv2Layer,
494  conv3Layer,
495  conv4Layer,
496  conv5Layer});
497 }
498 
499 // Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
500 // this is meant to test input slots coming from different layers
501 SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
502 {
503  const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
504  const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
505  const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
506  const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
507 
508  Convolution2dDescriptor convolutionDescriptor;
509  convolutionDescriptor.m_StrideX = 1;
510  convolutionDescriptor.m_StrideY = 1;
511  convolutionDescriptor.m_BiasEnabled = true;
512  convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
513 
514  // Construct the graph
515  Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
516  Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
517  Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
518  "conv1 layer", weightInfo, biasInfo, outputInfo);
519  Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
520  "conv2 layer unoptimizable", weightInfo, biasInfo,
521  outputInfo);
522  Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
523  "conv3 layer", weightInfo, biasInfo, outputInfo);
524  AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
525  Layer* const outputLayer = AddOutputLayer(graph, "output layer");
526 
527  // Connect the network
528  input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
529  input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
530  conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
531  conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
532  conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
533  addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
534 
535  // Create the subgraph view for the whole network
536  return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
537  conv2Layer}),
538  CreateOutputsFrom({addLayer}),
539  {conv1Layer,
540  conv2Layer,
541  conv3Layer,
542  addLayer});
543 }
544 
545 // The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
546 void FullyUnsupporteSubgraphTestImpl1()
547 {
548  Graph graph;
549  LayerNameToLayerMap layersInGraph;
550 
551  // Create an unsupported subgraph
552  SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
553  CHECK((subgraphPtr != nullptr));
554 
555  const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
556  const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
557  const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
558 
559  CHECK(subgraphInputSlots.size() == 1);
560  CHECK(subgraphOutputSlots.size() == 1);
561  CHECK(subgraphLayers.size() == 1);
562 
563  CHECK(Contains(layersInGraph, "pooling layer"));
564 
565  // Create a mock backend object
566  MockBackendInitialiser initialiser; // Register the Mock Backend
567  auto backendObjPtr = CreateBackendObject(MockBackendId());
568  CHECK((backendObjPtr != nullptr));
569 
570  // Optimize the subgraph
571  OptimizationViews optimizationViews;
572 
573  // Check that the optimization is carried out correctly, but no optimization is performed
574  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
575 
576  // =======================================================================
577  // The expected results are:
578  // - No substitutions
579  // - Exactly one failed subgraph, corresponding to the whole original one
580  // - No untouched subgraphs
581  // =======================================================================
582 
583  // -----------------------
584  // Check the substitutions
585  // -----------------------
586 
587  CHECK(optimizationViews.GetSubstitutions().empty());
588 
589  // --------------------------
590  // Check the failed subgraphs
591  // --------------------------
592 
593  const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
594  CHECK(failedSubgraphs.size() == 1);
595 
596  CheckFailedSubgraph(failedSubgraphs.at(0),
597  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
598  subgraphInputSlots,
599  subgraphOutputSlots,
600  subgraphLayers);
601 
602  // -----------------------------
603  // Check the untouched subgraphs
604  // -----------------------------
605 
606  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
607 }
608 
609 // The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
610 void FullyUnsupporteSubgraphTestImpl2()
611 {
612  Graph graph;
613  LayerNameToLayerMap layersInGraph;
614 
615  // Create an unsupported subgraph
616  SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
617  CHECK((subgraphPtr != nullptr));
618 
619  const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
620  const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
621  const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
622 
623  CHECK(subgraphInputSlots.size() == 1);
624  CHECK(subgraphOutputSlots.size() == 1);
625  CHECK(subgraphLayers.size() == 3);
626 
627  CHECK(Contains(layersInGraph, "pooling1 layer"));
628  CHECK(Contains(layersInGraph, "pooling2 layer"));
629  CHECK(Contains(layersInGraph, "pooling3 layer"));
630 
631  // Create a mock backend object
632  MockBackendInitialiser initialiser; // Register the Mock Backend
633  auto backendObjPtr = CreateBackendObject(MockBackendId());
634  CHECK((backendObjPtr != nullptr));
635 
636  // Optimize the subgraph
637  OptimizationViews optimizationViews;
638 
639  // Check that the optimization is carried out correctly, but no optimization is performed
640  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
641 
642  // =======================================================================
643  // The expected results are:
644  // - No substitutions
645  // - Exactly one failed subgraph, corresponding to the whole original one
646  // - No untouched subgraphs
647  // =======================================================================
648 
649  // -----------------------
650  // Check the substitutions
651  // -----------------------
652 
653  CHECK(optimizationViews.GetSubstitutions().empty());
654 
655  // --------------------------
656  // Check the failed subgraphs
657  // --------------------------
658 
659  const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
660  CHECK(failedSubgraphs.size() == 1);
661 
662  std::list<Layer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
663  layersInGraph.at("pooling2 layer"),
664  layersInGraph.at("pooling3 layer") };
665 
666  const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
667 
668  CheckFailedSubgraph(failedSubgraph,
669  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
670  subgraphInputSlots,
671  subgraphOutputSlots,
672  subgraphLayers);
673 
674  const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
675 
676  CHECK_EQ(failedSubgraphLayers.front() + 0, expectedFailedLayers.front() + 0);
677  CHECK_EQ(failedSubgraphLayers.front() + 1, expectedFailedLayers.front() + 1);
678  CHECK_EQ(failedSubgraphLayers.front() + 2, expectedFailedLayers.front() + 2);
679 
680  // -----------------------------
681  // Check the untouched subgraphs
682  // -----------------------------
683 
684  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
685 }
686 
687 // A simple case with only one layer (convolution) to optimize, supported by the mock backend
688 void FullyOptimizableSubgraphTestImpl1()
689 {
690  Graph graph;
691  LayerNameToLayerMap layersInGraph;
692 
693  // Create a fully optimizable subgraph
694  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
695  CHECK((subgraphPtr != nullptr));
696 
697  const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
698  const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
699  const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
700 
701  CHECK(subgraphInputSlots.size() == 1);
702  CHECK(subgraphOutputSlots.size() == 1);
703  CHECK(subgraphLayers.size() == 1);
704 
705  CHECK(Contains(layersInGraph, "conv layer"));
706 
707  // Create a mock backend object
708  MockBackendInitialiser initialiser; // Register the Mock Backend
709  auto backendObjPtr = CreateBackendObject(MockBackendId());
710  CHECK((backendObjPtr != nullptr));
711 
712  // Optimize the subgraph
713  OptimizationViews optimizationViews;
714 
715  // Check that the optimization is carried out correctly
716  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
717 
718  // ===========================================================================================
719  // The expected results are:
720  // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
721  // - No failed subgraphs
722  // - No untouched subgraphs
723  // ===========================================================================================
724 
725  // -----------------------
726  // Check the substitutions
727  // -----------------------
728 
729  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
730  CHECK(substitutions.size() == 1);
731 
732  CheckSubstitution(substitutions.at(0),
733  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
734  { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
735  subgraphInputSlots,
736  subgraphOutputSlots,
737  subgraphLayers);
738 
739  // --------------------------
740  // Check the failed subgraphs
741  // --------------------------
742 
743  CHECK(optimizationViews.GetFailedSubgraphs().empty());
744 
745  // -----------------------------
746  // Check the untouched subgraphs
747  // -----------------------------
748 
749  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
750 }
751 
752 // A case with five layers (all convolutions) to optimize, all supported by the mock backend
753 void FullyOptimizableSubgraphTestImpl2()
754 {
755  Graph graph;
756  LayerNameToLayerMap layersInGraph;
757 
758  // Create a fully optimizable subgraph
759  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
760  CHECK((subgraphPtr != nullptr));
761 
762  const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
763  const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
764  const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
765 
766  CHECK(subgraphPtr->GetInputSlots().size() == 1);
767  CHECK(subgraphPtr->GetOutputSlots().size() == 1);
768  CHECK(subgraphPtr->GetLayers().size() == 5);
769 
770  CHECK(Contains(layersInGraph, "conv1 layer"));
771  CHECK(Contains(layersInGraph, "conv2 layer"));
772  CHECK(Contains(layersInGraph, "conv3 layer"));
773  CHECK(Contains(layersInGraph, "conv4 layer"));
774  CHECK(Contains(layersInGraph, "conv5 layer"));
775 
776  // Create a mock backend object
777  MockBackendInitialiser initialiser; // Register the Mock Backend
778  auto backendObjPtr = CreateBackendObject(MockBackendId());
779  CHECK((backendObjPtr != nullptr));
780 
781  // Optimize the subgraph
782  OptimizationViews optimizationViews;
783 
784  // Check that the optimization is carried out correctly
785  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
786 
787  // ===========================================================================================
788  // The expected results are:
789  // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
790  // - No failed subgraphs
791  // - No untouched subgraphs
792  // ===========================================================================================
793 
794  // -----------------------
795  // Check the substitutions
796  // -----------------------
797 
798  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
799  CHECK(substitutions.size() == 1);
800 
801  std::list<Layer*> expectedSubstitutableLayers{ layersInGraph.at("conv1 layer"),
802  layersInGraph.at("conv2 layer"),
803  layersInGraph.at("conv3 layer"),
804  layersInGraph.at("conv4 layer"),
805  layersInGraph.at("conv5 layer") };
806 
807  const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
808 
809  CheckSubstitution(substitution,
810  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
811  { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
812  subgraphInputSlots,
813  subgraphOutputSlots,
814  expectedSubstitutableLayers);
815 
816  const SubgraphView::Layers& substitutableSubgraphLayers = substitution.m_SubstitutableSubgraph.GetLayers();
817 
818  CHECK_EQ(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
819  CHECK_EQ(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
820  CHECK_EQ(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
821  CHECK_EQ(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
822  CHECK_EQ(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
823 
824  // --------------------------
825  // Check the failed subgraphs
826  // --------------------------
827 
828  CHECK(optimizationViews.GetFailedSubgraphs().empty());
829 
830  // -----------------------------
831  // Check the untouched subgraphs
832  // -----------------------------
833 
834  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
835 }
836 
837 // The input subgraph contaions both supported and unsupported layers
838 // (but only convolutions are unsupported by the mock backend)
839 void PartiallySupportedSubgraphTestImpl()
840 {
841  Graph graph;
842  LayerNameToLayerMap layersInGraph;
843 
844  // Create a fully optimizable subgraph
845  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
846  CHECK((subgraphPtr != nullptr));
847 
848  const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
849  const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
850  const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
851 
852  CHECK(subgraphInputSlots.size() == 1);
853  CHECK(subgraphOutputSlots.size() == 1);
854  CHECK(subgraphLayers.size() == 5);
855 
856  CHECK(Contains(layersInGraph, "conv1 layer"));
857  CHECK(Contains(layersInGraph, "pooling1 layer"));
858  CHECK(Contains(layersInGraph, "pooling2 layer"));
859  CHECK(Contains(layersInGraph, "conv2 layer"));
860  CHECK(Contains(layersInGraph, "pooling3 layer"));
861 
862  // Create a mock backend object
863  MockBackendInitialiser initialiser; // Register the Mock Backend
864  auto backendObjPtr = CreateBackendObject(MockBackendId());
865  CHECK((backendObjPtr != nullptr));
866 
867  // Optimize the subgraph
868  OptimizationViews optimizationViews;
869 
870  // Check that the optimization is carried out correctly
871  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
872 
873  // ========================================================================
874  // The expected results are:
875  // - Exactly two substitution, corresponding to the supported layers
876  // - Exactly two failed subgraphs, corresponding to the unsupported layers
877  // - No untouched subgraphs
878  // ========================================================================
879 
880  // -----------------------
881  // Check the substitutions
882  // -----------------------
883 
884  OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
885  CHECK(substitutions.size() == 2);
886  // Sort into a consistent order
887  std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
888  return strcmp(s1.m_SubstitutableSubgraph.GetLayers().front()->GetName(),
889  s2.m_SubstitutableSubgraph.GetLayers().front()->GetName()) < 0;
890  });
891 
892  std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
893  { 1, 1, 1 } };
894  std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
895  { 1, 1, 1 } };
896  std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
897  {
898  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
899  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlots())
900  };
901  std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
902  {
903  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
904  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots())
905  };
906  std::vector<SubgraphView::Layers> expectedSubstitutableLayers
907  {
908  { layersInGraph.at("conv1 layer") },
909  { layersInGraph.at("conv2 layer") }
910  };
911 
912  for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
913  {
914  CheckSubstitution(substitutions.at(substitutionIndex),
915  expectedSubstitutableSubgraphSizes.at(substitutionIndex),
916  expectedReplacementSubgraphSizes.at(substitutionIndex),
917  expectedSubstitutableInputSlots.at(substitutionIndex),
918  expectedSubstitutableOutputSlots.at(substitutionIndex),
919  expectedSubstitutableLayers.at(substitutionIndex));
920  }
921 
922  // --------------------------
923  // Check the failed subgraphs
924  // --------------------------
925 
926  OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
927  CHECK(failedSubgraphs.size() == 2);
928  // Sort into a consistent order
929  std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
930  return strcmp(s1.GetLayers().front()->GetName(), s2.GetLayers().front()->GetName()) < 0;
931  });
932 
933  std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
934  { 1, 1, 1 } };
935  std::vector<SubgraphView::InputSlots> expectedFailedInputSlots
936  {
937  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots()),
938  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots())
939  };
940  std::vector<SubgraphView::OutputSlots> expectedFailedOutputSlots
941  {
942  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots()),
943  ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots())
944  };
945  std::vector<SubgraphView::Layers> expectedFailedLayers
946  {
947  { layersInGraph.at("pooling1 layer"),
948  layersInGraph.at("pooling2 layer") },
949  { layersInGraph.at("pooling3 layer") }
950  };
951 
952  for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
953  {
954  CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
955  expectedFailedSubgraphSizes.at(failedIndex),
956  expectedFailedInputSlots.at(failedIndex),
957  expectedFailedOutputSlots.at(failedIndex),
958  expectedFailedLayers.at(failedIndex));
959  }
960 
961  // -----------------------------
962  // Check the untouched subgraphs
963  // -----------------------------
964 
965  CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
966 }
967 
968 // The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
969 void FullyUnoptimizableSubgraphTestImpl1()
970 {
971  Graph graph;
972  LayerNameToLayerMap layersInGraph;
973 
974  // Create a fully optimizable subgraph
975  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
976  CHECK((subgraphPtr != nullptr));
977 
978  const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
979  const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
980  const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
981 
982  CHECK(subgraphInputSlots.size() == 1);
983  CHECK(subgraphOutputSlots.size() == 1);
984  CHECK(subgraphLayers.size() == 1);
985 
986  CHECK(Contains(layersInGraph, "conv layer unoptimizable"));
987 
988  // Create a mock backend object
989  MockBackendInitialiser initialiser; // Register the Mock Backend
990  auto backendObjPtr = CreateBackendObject(MockBackendId());
991  CHECK((backendObjPtr != nullptr));
992 
993  // Optimize the subgraph
994  OptimizationViews optimizationViews;
995 
996  // Check that the optimization is carried out correctly
997  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
998 
999  // ============================================================================
1000  // The expected results are:
1001  // - No substitutions
1002  // - No failed subgraphs
1003  // - Exactly one untouched subgraph, corresponding to the whole input subgraph
1004  // ============================================================================
1005 
1006  // -----------------------
1007  // Check the substitutions
1008  // -----------------------
1009 
1010  CHECK(optimizationViews.GetSubstitutions().empty());
1011 
1012  // --------------------------
1013  // Check the failed subgraphs
1014  // --------------------------
1015 
1016  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1017 
1018  // -----------------------------
1019  // Check the untouched subgraphs
1020  // -----------------------------
1021 
1022  const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1023  CHECK(untouchedSubgraphs.size() == 1);
1024 
1025  CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
1026  { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
1027  subgraphInputSlots,
1028  subgraphOutputSlots,
1029  subgraphLayers);
1030 }
1031 
1032 // The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1033 void PartiallyOptimizableSubgraphTestImpl1()
1034 {
1035  Graph graph;
1036  LayerNameToLayerMap layersInGraph;
1037 
1038  // Create a fully optimizable subgraph
1039  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
1040  CHECK((subgraphPtr != nullptr));
1041 
1042  const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1043  const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1044  const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1045 
1046  CHECK(subgraphInputSlots.size() == 1);
1047  CHECK(subgraphOutputSlots.size() == 1);
1048  CHECK(subgraphLayers.size() == 5);
1049 
1050  CHECK(Contains(layersInGraph, "conv1 layer"));
1051  CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1052  CHECK(Contains(layersInGraph, "conv3 layer"));
1053  CHECK(Contains(layersInGraph, "conv4 layer unoptimizable"));
1054  CHECK(Contains(layersInGraph, "conv5 layer"));
1055 
1056  // Create a mock backend object
1057  MockBackendInitialiser initialiser; // Register the Mock Backend
1058  auto backendObjPtr = CreateBackendObject(MockBackendId());
1059  CHECK((backendObjPtr != nullptr));
1060 
1061  // Optimize the subgraph
1062  OptimizationViews optimizationViews;
1063 
1064  // Check that the optimization is carried out correctly
1065  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1066 
1067  // ===============================================================================
1068  // The expected results are:
1069  // - Exactly three substitutions, corresponding to the optimizable layers
1070  // - No failed subgraphs
1071  // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1072  // ===============================================================================
1073 
1074  // -----------------------
1075  // Check the substitutions
1076  // -----------------------
1077 
1078  OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
1079  CHECK(substitutions.size() == 3);
1080  // Sort into a consistent order
1081  std::sort(substitutions.begin(), substitutions.end(),
1082  [](auto s1, auto s2) { return strcmp(s1.m_SubstitutableSubgraph.GetLayers().front()->GetName(),
1083  s2.m_SubstitutableSubgraph.GetLayers().front()->GetName()) < 0; });
1084 
1085  std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
1086  { 1, 1, 1 },
1087  { 1, 1, 1 } };
1088  std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1089  { 1, 1, 1 },
1090  { 1, 1, 1 } };
1091  std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
1092  {
1093  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
1094  ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()),
1095  ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlots())
1096  };
1097  std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
1098  {
1099  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
1100  ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots()),
1101  ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots())
1102  };
1103  std::vector<SubgraphView::Layers> expectedSubstitutableLayers
1104  {
1105  { layersInGraph.at("conv1 layer") },
1106  { layersInGraph.at("conv3 layer") },
1107  { layersInGraph.at("conv5 layer") }
1108  };
1109 
1110  for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1111  {
1112  CheckSubstitution(substitutions.at(substitutionIndex),
1113  expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1114  expectedReplacementSubgraphSizes.at(substitutionIndex),
1115  expectedSubstitutableInputSlots.at(substitutionIndex),
1116  expectedSubstitutableOutputSlots.at(substitutionIndex),
1117  expectedSubstitutableLayers.at(substitutionIndex));
1118  }
1119 
1120  // --------------------------
1121  // Check the failed subgraphs
1122  // --------------------------
1123 
1124  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1125 
1126  // -----------------------------
1127  // Check the untouched subgraphs
1128  // -----------------------------
1129 
1130  OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1131  CHECK(untouchedSubgraphs.size() == 2);
1132  // Sort into a consistent order
1133  std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
1134  return strcmp(s1.GetLayers().front()->GetName(), s2.GetLayers().front()->GetName()) < 0;
1135  });
1136 
1137  std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 },
1138  { 1, 1, 1 } };
1139  std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1140  {
1141  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots()),
1142  ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetInputSlots())
1143  };
1144  std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1145  {
1146  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()),
1147  ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots())
1148  };
1149  std::vector<SubgraphView::Layers> expectedUntouchedLayers
1150  {
1151  { layersInGraph.at("conv2 layer unoptimizable") },
1152  { layersInGraph.at("conv4 layer unoptimizable") }
1153  };
1154 
1155  for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1156  {
1157  CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1158  expectedUntouchedSubgraphSizes.at(untouchedIndex),
1159  expectedUntouchedInputSlots.at(untouchedIndex),
1160  expectedUntouchedOutputSlots.at(untouchedIndex),
1161  expectedUntouchedLayers.at(untouchedIndex));
1162  }
1163 }
1164 
1165 // The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1166 // this is meant to test input slots coming from different layers
1167 void PartiallyOptimizableSubgraphTestImpl2()
1168 {
1169  Graph graph;
1170  LayerNameToLayerMap layersInGraph;
1171 
1172  // Create a partially optimizable subgraph
1173  SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
1174  CHECK((subgraphPtr != nullptr));
1175 
1176  const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1177  const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1178  const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1179 
1180  CHECK(subgraphInputSlots.size() == 2);
1181  CHECK(subgraphOutputSlots.size() == 1);
1182  CHECK(subgraphLayers.size() == 4);
1183 
1184  CHECK(Contains(layersInGraph, "conv1 layer"));
1185  CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1186  CHECK(Contains(layersInGraph, "conv3 layer"));
1187  CHECK(Contains(layersInGraph, "add layer"));
1188 
1189  // Create a mock backend object
1190  MockBackendInitialiser initialiser; // Register the Mock Backend
1191  auto backendObjPtr = CreateBackendObject(MockBackendId());
1192  CHECK((backendObjPtr != nullptr));
1193 
1194  // Optimize the subgraph
1195  OptimizationViews optimizationViews;
1196 
1197  // Check that the optimization is carried out correctly
1198  CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1199 
1200  // ==============================================================================
1201  // The expected results are:
1202  // - Exactly one substitution, corresponding to the optimizable layers
1203  // - No failed subgraphs
1204  // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1205  // ==============================================================================
1206 
1207  // -----------------------
1208  // Check the substitutions
1209  // -----------------------
1210 
1211  const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
1212  CHECK(substitutions.size() == 1);
1213 
1214  ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 3 };
1215  ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
1216 
1217  SubgraphView::InputSlots expectedSubstitutableInputSlots = {
1218  ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0]),
1219  ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])
1220  };
1221  SubgraphView::OutputSlots expectedSubstitutableOutputSlots =
1222  {
1223  ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()[0])
1224  };
1225  SubgraphView::Layers expectedSubstitutableLayers
1226  {
1227  layersInGraph.at("conv1 layer"),
1228  layersInGraph.at("conv3 layer"),
1229  layersInGraph.at("add layer")
1230  };
1231 
1232  CheckSubstitution(substitutions[0],
1233  expectedSubstitutableSubgraphSizes,
1234  expectedReplacementSubgraphSizes,
1235  expectedSubstitutableInputSlots,
1236  expectedSubstitutableOutputSlots,
1237  expectedSubstitutableLayers);
1238 
1239  // --------------------------
1240  // Check the failed subgraphs
1241  // --------------------------
1242 
1243  CHECK(optimizationViews.GetFailedSubgraphs().empty());
1244 
1245  // -----------------------------
1246  // Check the untouched subgraphs
1247  // -----------------------------
1248 
1249  const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1250  CHECK(untouchedSubgraphs.size() == 1);
1251 
1252  std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 } };
1253  std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1254  {
1255  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots())
1256  };
1257  std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1258  {
1259  ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())
1260  };
1261  std::vector<SubgraphView::Layers> expectedUntouchedLayers
1262  {
1263  { layersInGraph.at("conv2 layer unoptimizable") }
1264  };
1265 
1266  for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1267  {
1268  CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1269  expectedUntouchedSubgraphSizes.at(untouchedIndex),
1270  expectedUntouchedInputSlots.at(untouchedIndex),
1271  expectedUntouchedOutputSlots.at(untouchedIndex),
1272  expectedUntouchedLayers.at(untouchedIndex));
1273  }
1274 }
1275 
1276 } // Anonymous namespace
1277 
1278 TEST_SUITE("OptimizeSubGraph")
1279 {
1280 TEST_CASE("FullyUnsupportedSubgraph1") { FullyUnsupporteSubgraphTestImpl1(); }
1281 TEST_CASE("FullyUnsupportedSubgraph2") { FullyUnsupporteSubgraphTestImpl2(); }
1282 TEST_CASE("FullyOptimizableSubgraph1") { FullyOptimizableSubgraphTestImpl1(); }
1283 TEST_CASE("FullyOptimizableSubgraph2") { FullyOptimizableSubgraphTestImpl2(); }
1284 TEST_CASE("PartiallySupportedSubgraph") { PartiallySupportedSubgraphTestImpl(); }
1285 TEST_CASE("FullyUnoptimizableSubgraph") { FullyUnoptimizableSubgraphTestImpl1(); }
1286 TEST_CASE("PartiallyOptimizableSubgraph1") { PartiallyOptimizableSubgraphTestImpl1(); }
1287 TEST_CASE("PartiallyOptimizableSubgraph2") { PartiallyOptimizableSubgraphTestImpl2(); }
1288 
1289 }
TEST_SUITE("TestConstTensorLayerVisitor")
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_PadLeft
Padding left value in the width dimension.
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:417
bool AreEqual(const CollectionType &lhs, const CollectionType &rhs)
uint32_t m_PoolWidth
Pooling width value.
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
std::vector< OutputSlot * > OutputSlots
The padding fields don&#39;t count and are ignored.
PaddingMethod m_PaddingMethod
The padding method to be used. (Exclude, IgnoreValue).
constexpr const char * MockBackendId()
uint32_t m_PadTop
Padding top value in the height dimension.
std::vector< SubgraphView > Subgraphs
std::vector< SubstitutionPair > Substitutions
Copyright (c) 2021 ARM Limited and Contributors.
SubgraphView m_SubstitutableSubgraph
Subgraph of Layers from the original graph which should be replaced.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:277
The SubgraphView class represents a subgraph of a Graph.
uint32_t m_PoolHeight
Pooling height value.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
std::unique_ptr< SubgraphView > SubgraphViewPtr
uint32_t m_PadRight
Padding right value in the width dimension.
SubgraphView::InputSlots CreateInputsFrom(const std::vector< Layer *> &layers)
bool Contains(const CollectionType &collection, const typename CollectionType::value_type &item)
const Subgraphs & GetUntouchedSubgraphs() const
const Subgraphs & GetFailedSubgraphs() const
SubgraphView m_ReplacementSubgraph
A subgraph of new layers which will replace layers in m_SubstitutableSubgraph.
This layer represents a pooling 2d operation.
std::vector< InputSlot * > InputSlots
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
This layer represents an addition operation.
const InputSlots & GetInputSlots() const
const Substitutions & GetSubstitutions() const
std::unique_ptr< SubgraphView > SubgraphViewPtr
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
PoolingAlgorithm m_PoolType
The pooling algorithm to use (Max. Average, L2).
const OutputSlots & GetOutputSlots() const
void SetWeightAndBias(ConvolutionLayer *layer, const armnn::TensorInfo &weightInfo, const armnn::TensorInfo &biasInfo)
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
armnn::IBackendInternalUniquePtr CreateBackendObject(const armnn::BackendId &backendId)
const Layers & GetLayers() const
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:311
This layer represents a convolution 2d operation.
A Pooling2dDescriptor for the Pooling2dLayer.
std::list< Layer * > Layers
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.