ArmNN
 21.08
SubgraphViewTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <doctest/doctest.h>
7 
8 #include <Graph.hpp>
9 #include <SubgraphView.hpp>
10 #include <SubgraphViewSelector.hpp>
11 
13 
15 #include <fstream>
16 #include <map>
17 #include <queue>
18 #include <random>
19 #include <chrono>
20 using namespace armnn;
21 
22 namespace
23 {
24 
25 bool AreAnySubgraphLayersPresentInGraph(const SubgraphView::Layers &subgraphLayers, const Graph &graph)
26 {
27  for(auto&& layer : subgraphLayers)
28  {
29  auto posInGraph = std::find(graph.begin(), graph.end(), layer);
30  if(posInGraph != graph.end())
31  {
32  return true;
33  }
34  }
35 
36  return false;
37 }
38 
39 //
40 // this helper only works if all layers where the inputs connect to are not selected
41 //
42 SubgraphView::InputSlots CreateInputsFrom(const std::vector<Layer*>& layers)
43 {
45  for (auto&& layer : layers)
46  {
47  for (auto&& it = layer->BeginInputSlots(); it != layer->EndInputSlots(); ++it)
48  {
49  result.push_back(&(*it));
50  }
51  }
52  return result;
53 }
54 
55 //
56 // this helper only works if all layers where the outputs connect to are not selected
57 //
58 SubgraphView::OutputSlots CreateOutputsFrom(const std::vector<Layer*>& layers)
59 {
61  for (auto && layer : layers)
62  {
63  for (auto&& it = layer->BeginOutputSlots(); it != layer->EndOutputSlots(); ++it)
64  {
65  result.push_back(&(*it));
66  }
67  }
68  return result;
69 }
70 
71 //
72 // this takes the inputs, outputs and layers as a copy and the move these copies into the
73 // resulting subgraph, so the pass by value is intentional
74 //
76  SubgraphView::OutputSlots&& outputs,
77  SubgraphView::Layers&& layers)
78 {
79  return std::make_unique<SubgraphView>(std::move(inputs), std::move(outputs), std::move(layers));
80 }
81 
82 template <typename T, typename Iterator>
83 std::vector<T> ToSortedArray(Iterator begin, Iterator end)
84 {
85  std::vector<T> result(begin, end);
86  std::sort(result.begin(), result.end());
87  return result;
88 }
89 
90 template <typename T>
91 void CompareVectors(const std::vector<T>& result, const std::vector<T>& expected)
92 {
93  CHECK(std::equal(result.begin(), result.end(), expected.begin(), expected.end()));
94 }
95 
96 void CompareSubgraphViews(SubgraphViewSelector::SubgraphViewPtr& result,
98 {
99  // expect both to be valid subgraphs
100  CHECK((result.get() != nullptr));
101  CHECK((expected.get() != nullptr));
102 
103  if (result.get() != nullptr && expected.get() != nullptr)
104  {
105  CHECK(result->GetInputSlots().size() == expected->GetInputSlots().size());
106  CHECK(result->GetOutputSlots().size() == expected->GetOutputSlots().size());
107  CHECK(result->GetLayers().size() == expected->GetLayers().size());
108 
109  auto resultLayers = ToSortedArray<Layer *>(result->GetLayers().begin(),
110  result->GetLayers().end());
111  auto expectedLayers = ToSortedArray<Layer *>(expected->GetLayers().begin(),
112  expected->GetLayers().end());
113  CompareVectors(resultLayers, expectedLayers);
114 
115  auto resultInputs = ToSortedArray<InputSlot *>(result->GetInputSlots().begin(),
116  result->GetInputSlots().end());
117  auto expectedInputs = ToSortedArray<InputSlot *>(expected->GetInputSlots().begin(),
118  expected->GetInputSlots().end());
119  CompareVectors(resultInputs, expectedInputs);
120 
121  auto resultOutputs = ToSortedArray<OutputSlot *>(result->GetOutputSlots().begin(),
122  result->GetOutputSlots().end());
123  auto expectedOutputs = ToSortedArray<OutputSlot *>(expected->GetOutputSlots().begin(),
124  expected->GetOutputSlots().end());
125  CompareVectors(resultOutputs, expectedOutputs);
126  }
127 }
128 
129 } // namespace <anonymous>
130 
131 TEST_SUITE("SubgraphSubstitution")
132 {
133 TEST_CASE("SingleInputSingleOutput")
134 {
135  // Construct graph
136  Graph graph;
137 
138  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
139 
140  Convolution2dDescriptor convDescriptor;
141  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
142  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
143 
144  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
145 
146  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
147  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
148  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
149 
150  // Construct sub-graph
152  CreateOutputsFrom({convLayer2}),
153  {});
154 
155  // Save sub-graph connections for comparison after substitution
156  IOutputSlot* subgraphInputConn = subgraph->GetInputSlot(0)->GetConnection();
157  IInputSlot* subgraphOutputConn = subgraph->GetOutputSlot(0)->GetConnection(0);
158 
159  // Construct dummy pre-compiled layer
160  PreCompiledDescriptor preCompiledDescriptor(1, 1);
161  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
162 
163  // Substitute sub-graph with pre-compiled layer
164  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
165 
166  // Check that connections are correct after substitution
167  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
168  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
169 }
170 
171 TEST_CASE("SingleInputSingleOutputSubstituteGraph")
172 {
173  // Construct graph
174  Graph graph;
175 
176  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
177 
178  Convolution2dDescriptor convDescriptor;
179  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
180  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
181 
182  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
183 
184  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
185  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
186  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
187 
188  // Construct sub-graph
190  CreateOutputsFrom({convLayer2}),
191  {});
192 
193  // Save sub-graph connections for comparison after substitution
194  IOutputSlot* subgraphInputConn = subgraph->GetInputSlot(0)->GetConnection();
195  IInputSlot* subgraphOutputConn = subgraph->GetOutputSlot(0)->GetConnection(0);
196 
197  // Construct second graph with a single pre-compiled layer
198  Graph substituteGraph;
199  PreCompiledDescriptor preCompiledDescriptor(1, 1);
200  Layer* const preCompiledLayer = substituteGraph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
201 
202  SubgraphViewSelector::SubgraphViewPtr substituteSubgraph =
203  CreateSubgraphViewFrom(CreateInputsFrom({preCompiledLayer}),
204  CreateOutputsFrom({preCompiledLayer}),
205  {preCompiledLayer});
206  // Substitute subgraph with pre-compiled layer
207  graph.SubstituteSubgraph(*subgraph, *substituteSubgraph);
208 
209  // Check that connections are correct after substitution
210  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
211  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
212 }
213 
214 TEST_CASE("MultiInputSingleOutput")
215 {
216  // Construct graph
217  Graph graph;
218 
219  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
220 
221  ViewsDescriptor splitterDescriptor(2);
222  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
223 
224  Convolution2dDescriptor convDescriptor;
225  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
226  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
227 
228  OriginsDescriptor concatDescriptor(2);
229  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
230 
231  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
232 
233  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
234  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
235  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
236  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
237  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
238  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
239 
240  // Construct sub-graph
242  CreateOutputsFrom({concatLayer}),
243  {});
244 
245  // Save sub-graph connections for comparison after substitution
246  IOutputSlot* subgraphInputConn1 = subgraph->GetInputSlot(0)->GetConnection();
247  IOutputSlot* subgraphInputConn2 = subgraph->GetInputSlot(1)->GetConnection();
248 
249  IInputSlot* subgraphOutputConn = subgraph->GetOutputSlot(0)->GetConnection(0);
250 
251  // Construct dummy pre-compiled layer
252  PreCompiledDescriptor preCompiledDescriptor(2, 1);
253  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
254 
255  // Substitute sub-graph with pre-compiled layer
256  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
257 
258  // Check that connections are correct after substitution
259  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
260  CHECK_EQ(preCompiledLayer->GetInputSlot(1).GetConnection(), subgraphInputConn2);
261 
262  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
263 }
264 
265 TEST_CASE("SingleInputMultiOutput")
266 {
267  // Construct graph
268  Graph graph;
269 
270  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
271 
272  Convolution2dDescriptor convDescriptor;
273  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
274  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
275  OriginsDescriptor concatDescriptor(2);
276  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
277  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
278 
279  ViewsDescriptor splitterDescriptor(2);
280  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
281 
282  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
283  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
284  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
285  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
286  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
287  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
288 
289  // Construct sub-graph
291  CreateOutputsFrom({convLayer1, convLayer2}),
292  {});
293 
294  // Save sub-graph connections for comparison after substitution
295  IOutputSlot* subgraphInputConn1 = subgraph->GetInputSlot(0)->GetConnection();
296 
297  IInputSlot* subgraphOutputConn1 = subgraph->GetOutputSlot(0)->GetConnection(0);
298  IInputSlot* subgraphOutputConn2 = subgraph->GetOutputSlot(1)->GetConnection(0);
299 
300  // Construct dummy pre-compiled layer
301  PreCompiledDescriptor preCompiledDescriptor(1, 2);
302  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
303 
304  // Substitute sub-graph with pre-compiled layer
305  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
306 
307  // Check that connections are correct after substitution
308  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
309 
310  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
311  CHECK_EQ(preCompiledLayer->GetOutputSlot(1).GetConnection(0), subgraphOutputConn2);
312 }
313 
314 TEST_CASE("MultiInputMultiOutput")
315 {
316  // Construct graph
317  Graph graph;
318 
319  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
320 
321  ViewsDescriptor splitterDescriptor(2);
322  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
323 
324  Convolution2dDescriptor convDescriptor;
325  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
326  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
327 
328  OriginsDescriptor concatDescriptor(2);
329  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
330 
331  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
332 
333  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
334  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
335  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
336  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
337  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
338  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
339 
340  // Construct sub-graph
342  CreateOutputsFrom({convLayer1, convLayer2}),
343  {});
344 
345  // Save sub-graph connections for comparison after substitution
346  IOutputSlot* subgraphInputConn1 = subgraph->GetInputSlot(0)->GetConnection();
347  IOutputSlot* subgraphInputConn2 = subgraph->GetInputSlot(1)->GetConnection();
348 
349  IInputSlot* subgraphOutputConn1 = subgraph->GetOutputSlot(0)->GetConnection(0);
350  IInputSlot* subgraphOutputConn2 = subgraph->GetOutputSlot(1)->GetConnection(0);
351 
352  // Construct dummy pre-compiled layer
353  PreCompiledDescriptor preCompiledDescriptor(2, 2);
354  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
355 
356  // Substitute sub-graph with pre-compiled layer
357  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
358 
359  // Check that connections are correct after substitution
360  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
361  CHECK_EQ(preCompiledLayer->GetInputSlot(1).GetConnection(), subgraphInputConn2);
362 
363  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
364  CHECK_EQ(preCompiledLayer->GetOutputSlot(1).GetConnection(0), subgraphOutputConn2);
365 }
366 
367 TEST_CASE("EraseReplacedLayers")
368 {
369  // Construct graph
370  Graph graph;
371 
372  graph.AddLayer<InputLayer>(0, "input");
373 
374  ViewsDescriptor splitterDescriptor(2);
375  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
376 
377  Convolution2dDescriptor convDescriptor;
378  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
379  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
380 
381  OriginsDescriptor concatDescriptor(2);
382  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
383 
384  graph.AddLayer<OutputLayer>(0, "output");
385 
386  // Construct sub-graph
388  {},
389  {splitterLayer,
390  convLayer1,
391  convLayer2,
392  concatLayer});
393 
394  // Construct dummy pre-compiled layer
395  PreCompiledDescriptor preCompiledDescriptor(0, 0);
396  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
397 
398  // Save sub-graph layers for later verification
399  const SubgraphView::Layers subgraphLayers = subgraph->GetLayers();
400 
401  // Substitute sub-graph with pre-compiled layer
402  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
403 
404  // Check that the layers belonging to the sub-graph have been erased from the graph after substitution
405  CHECK(!AreAnySubgraphLayersPresentInGraph(subgraphLayers, graph));
406 }
407 
408 }
409 
410 TEST_SUITE("SubgraphSelection")
411 {
412 TEST_CASE("SubgraphForEmptyGraph")
413 {
414  Graph graph;
415  SubgraphView subgraph(graph);
416 
417  CHECK(subgraph.GetInputSlots().empty());
418  CHECK(subgraph.GetOutputSlots().empty());
419  CHECK(subgraph.GetLayers().empty());
420 }
421 
422 TEST_CASE("SubgraphForEntireGraph")
423 {
424  Graph graph;
425 
426  auto output = graph.AddLayer<OutputLayer>(0, "output");
427  auto mid0 = graph.InsertNewLayer<ActivationLayer>(output->GetInputSlot(0),
429  "mid0");
430  auto mid1 = graph.InsertNewLayer<ActivationLayer>(mid0->GetInputSlot(0),
432  "mid1");
433  graph.InsertNewLayer<InputLayer>(mid1->GetInputSlot(0), 0, "input");
434 
435  SubgraphView subgraph(graph);
436 
437  CHECK(subgraph.GetInputSlots().empty());
438  CHECK(subgraph.GetOutputSlots().empty());
439  CHECK(subgraph.GetLayers().size() == graph.GetNumLayers());
440 }
441 
442 TEST_CASE("NoSubgraphsForNoMatch")
443 {
444  Graph graph;
445 
446  auto output = graph.AddLayer<OutputLayer>(0, "output");
447  graph.InsertNewLayer<InputLayer>(output->GetInputSlot(0), 0, "input");
448 
450  SubgraphViewSelector::SelectSubgraphs(graph, [](const Layer &) { return false; });
451 
452  CHECK(subgraphs.empty());
453 }
454 
455 TEST_CASE("OneSubgraphsSelectedASingleMatch")
456 {
457  Graph graph;
458 
459  auto output = graph.AddLayer<OutputLayer>(0, "output");
460  graph.InsertNewLayer<InputLayer>(output->GetInputSlot(0), 0, "input");
461 
464  graph,
465  // select the output layer only
466  [](const Layer & l)
467  {
468  bool isOutput = l.GetNameStr().compare("output") == 0;
469  return isOutput;
470  });
471 
472  CHECK(subgraphs.size() == 1);
473  if (subgraphs.size() == 1)
474  {
475  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({output}),
476  // outputs of 'output' will be empty
477  CreateOutputsFrom({output}),
478  {output});
479 
480  CompareSubgraphViews(subgraphs[0], expected);
481  }
482 }
483 
484 TEST_CASE("MultipleLayersSelectedInTheMiddle")
485 {
486  Graph graph;
487 
488  auto output = graph.AddLayer<OutputLayer>(0, "output");
489  auto mid0 = graph.InsertNewLayer<ActivationLayer>(output->GetInputSlot(0),
491  "mid0");
492  auto mid1 = graph.InsertNewLayer<ActivationLayer>(mid0->GetInputSlot(0),
494  "mid1");
495  graph.InsertNewLayer<InputLayer>(mid1->GetInputSlot(0), 0, "input");
496 
499  graph,
500  // select the middle layers only
501  [](const Layer & l)
502  {
503  bool toSelect = (l.GetType() == LayerType::Activation);
504  return toSelect;
505  });
506 
507  CHECK(subgraphs.size() == 1);
508  if (subgraphs.size() == 1)
509  {
510  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({mid1}),
511  CreateOutputsFrom({mid0}),
512  {mid1, mid0});
513 
514  CompareSubgraphViews(subgraphs[0], expected);
515  }
516 }
517 
518 TEST_CASE("DisjointGraphs")
519 {
520  // The input graph has two disjoint sections and all layers are selected.
521  // This should result in two subgraphs being produced.
522  Graph graph;
523 
524  // the graph is constructed in reverse order
525  auto o0 = graph.AddLayer<OutputLayer>(0, "output0");
526  auto n0 = graph.InsertNewLayer<ActivationLayer>(o0->GetInputSlot(0), ActivationDescriptor{}, "intermediate0");
527  auto i0 = graph.InsertNewLayer<InputLayer>(n0->GetInputSlot(0), 0, "input0");
528 
529  auto o1 = graph.AddLayer<OutputLayer>(1, "output1");
530  auto n1 = graph.InsertNewLayer<ActivationLayer>(o1->GetInputSlot(0), ActivationDescriptor{}, "intermediate1");
531  auto i1 = graph.InsertNewLayer<InputLayer>(n1->GetInputSlot(0), 1, "input1");
532 
535  // select the middle layers only
536  [](const Layer&) {
537  return true;
538  });
539 
540  // expected results to test against
541  auto expected1 = CreateSubgraphViewFrom({}, {}, { o0, n0, i0 });
542  auto expected2 = CreateSubgraphViewFrom({}, {}, { o1, n1, i1 });
543  CHECK(subgraphs.size() == 2);
544  if (subgraphs.size() == 2)
545  {
546  CHECK((subgraphs[0] != nullptr));
547  CHECK((subgraphs[1] != nullptr));
548  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
549  {
550  if (std::find(subgraphs[0]->GetLayers().begin(), subgraphs[0]->GetLayers().end(), i0) !=
551  subgraphs[0]->GetLayers().end())
552  {
553  CompareSubgraphViews(subgraphs[0], expected1);
554  CompareSubgraphViews(subgraphs[1], expected2);
555  }
556  else
557  {
558  CompareSubgraphViews(subgraphs[0], expected2);
559  CompareSubgraphViews(subgraphs[1], expected1);
560  }
561  }
562  }
563 }
564 
565 TEST_CASE("IslandInTheMiddle")
566 {
567  // This case represent the scenario when a non-selected X1 node placed in the middle
568  // of the selected M* nodes.
569  // This checks that we don't merge M6 and M3 and create a dependency loop.
570  /*
571  M0
572  / \
573  M1 M4
574  | |
575  M2 X1 < the island in the middle !
576  | |
577  M3 M5
578  \ /
579  M6
580  */
581  Graph graph;
582 
583  OriginsDescriptor concatDescriptor(2);
584  auto m6 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m6");
585  auto m3 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(0),
587  "m3");
588  auto m2 = graph.InsertNewLayer<ActivationLayer>(m3->GetInputSlot(0),
590  "m2");
591  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
593  "m1");
594  auto m0 = graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "m0");
595 
596  auto m5 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(1),
598  "m5");
599  auto x1 = graph.InsertNewLayer<ActivationLayer>(m5->GetInputSlot(0),
601  "x1");
602  auto m4 = graph.InsertNewLayer<ActivationLayer>(x1->GetInputSlot(0),
604  "m4");
605 
606  // Connect the other branch to the input layer
607  m0->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
608 
609  // All selected 'M*' layers will be of Activation type
612  graph,
613  // select the middle layers only
614  [](const Layer& l)
615  {
616  bool toSelect = std::string(l.GetName())[0] == 'm';
617  return toSelect;
618  });
619 
620  // expected results to test against
621  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({ m0 }),
622  CreateOutputsFrom({ m3, m4 }),
623  { m0, m1, m2, m3, m4 });
624 
625  auto smallerSubgraph =
626  CreateSubgraphViewFrom(std::vector<InputSlot*>{ &m5->GetInputSlot(0), & m6->GetInputSlot(0) },
627  std::vector<OutputSlot*>{},
628  { m5, m6 });
629 
630  CHECK(subgraphs.size() == 2);
631  if (subgraphs.size() == 2)
632  {
633  // we need to have valid subgraph pointers here
634  CHECK((subgraphs[0] != nullptr));
635  CHECK((subgraphs[1] != nullptr));
636 
637  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
638  {
639  // sort the subgraphs by layer size, so it is simpler to test
640  std::sort(subgraphs.begin(), subgraphs.end(),
642  {
643  return (lhs->GetLayers().size() < rhs->GetLayers().size());
644  }
645  );
646 
647  CHECK(subgraphs[0]->GetLayers().size() == 2);
648  CHECK(subgraphs[1]->GetLayers().size() == 5);
649 
650  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
651  CompareSubgraphViews(subgraphs[1], largerSubgraph);
652  }
653  }
654 }
655 
656 TEST_CASE("MultipleSimpleSubgraphs")
657 {
658  // This test case represents the scenario when we have two distinct subgraphs
659  // in a simple linear network. The selected nodes are the M* and the
660  // non-selected ones are the X*
661  //
662  // X1 -> M1 -> M2 -> X2 -> M3 -> X3
663  //
664  // The expected results is two subgraphs, one with {M1, M2} and another one
665  // with {M3}
666  //
667  Graph graph;
668 
669  // the graph is constructed in reverse order
670  auto x3 = graph.AddLayer<OutputLayer>(0, "output");
671  auto m3 = graph.InsertNewLayer<ActivationLayer>(x3->GetInputSlot(0),
673  "m3");
674  auto x2 = graph.InsertNewLayer<Convolution2dLayer>(m3->GetInputSlot(0),
676  "x2");
677  auto m2 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0),
679  "m2");
680  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
682  "m1");
683  graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "x1");
684 
685  // All selected 'M*' layers will be of Activation type
688  graph,
689  // select the middle layers only
690  [](const Layer & l)
691  {
692  bool toSelect = (l.GetType() == LayerType::Activation);
693  return toSelect;
694  });
695 
696  // expected results to test against
697  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m1}),
698  CreateOutputsFrom({m2}),
699  {m1, m2});
700 
701  auto smallerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m3}),
702  CreateOutputsFrom({m3}),
703  {m3});
704 
705  CHECK(subgraphs.size() == 2);
706  if (subgraphs.size() == 2)
707  {
708  // we need to have valid subgraph pointers here
709  CHECK((subgraphs[0] != nullptr));
710  CHECK((subgraphs[1] != nullptr));
711 
712  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
713  {
714  // sort the subgraphs by layer size, so it is simpler to test
715  std::sort(subgraphs.begin(), subgraphs.end(),
717  {
718  return (lhs->GetLayers().size() < rhs->GetLayers().size());
719  }
720  );
721 
722  CHECK(subgraphs[0]->GetLayers().size() == 1);
723  CHECK(subgraphs[1]->GetLayers().size() == 2);
724 
725  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
726  CompareSubgraphViews(subgraphs[1], largerSubgraph);
727  }
728  }
729 }
730 
731 TEST_CASE("SimpleLinearTest")
732 {
733  //X1 -> M1 -> M2 -> X2
734  //Where the input slots of M1 and the output slots of M2 are to be the sub graph boundaries.
735  Graph graph;
736 
737  ActivationDescriptor activationDefaults;
738 
739  auto layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
740  auto layerX2 = graph.AddLayer<OutputLayer>(0, "layerX2");
741  auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1");
742  auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
743 
744  // X1
745  // |
746  // M1
747  // |
748  // M2
749  // |
750  // X2
751 
752  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
753  layerM1->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
754  layerM2->GetOutputSlot(0).Connect(layerX2->GetInputSlot(0));
755 
758  graph,
759  // select the activation layers M1 and M2
760  [](const Layer & l)
761  {
762  bool toSelect = (l.GetType() == LayerType::Activation);
763  return toSelect;
764  });
765 
766  CHECK(subgraphs.size() == 1);
767  if(subgraphs.size() == 1)
768  {
769  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1}),
770  CreateOutputsFrom({layerM2}),
771  {layerM1, layerM2});
772 
773  CompareSubgraphViews(subgraphs[0], expected);
774  }
775 }
776 
777 TEST_CASE("MultiInputSingleOutput")
778 {
779  //X1 -> M1 -> M3 -> X3
780  //X2 -> M2 -> M3 -> X3
781  //Where the input slots of {M1, M2} and the output slots of M3 are to be the subgraph boundaries.
782  Graph graph;
783 
784  ActivationDescriptor activationDefaults;
785 
786  auto layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
787  auto layerX2 = graph.AddLayer<InputLayer>(1, "layerX2");
788  auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1");
789  auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
790  auto layerM3 = graph.AddLayer<AdditionLayer>("layerM3");
791  auto layerX3 = graph.AddLayer<OutputLayer>(0, "layerX3");
792 
793  // X1 X2
794  // | |
795  // M1 M2
796  // \ |
797  // \ |
798  // \|
799  // M3
800  // |
801  // |
802  // X3
803 
804  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
805  layerX2->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
806  layerM1->GetOutputSlot(0).Connect(layerM3->GetInputSlot(0));
807  layerM2->GetOutputSlot(0).Connect(layerM3->GetInputSlot(1));
808  layerM3->GetOutputSlot(0).Connect(layerX3->GetInputSlot(0));
809 
812  graph,
813  // select Activation and Addition Layers M1, M2 and M3
814  [](const Layer & l)
815  {
816  bool toSelect = (l.GetType() == LayerType::Activation
817  || l.GetType() == LayerType::Addition);
818  return toSelect;
819  });
820 
821  CHECK(subgraphs.size() == 1);
822  if (subgraphs.size() == 1)
823  {
824  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1, layerM2}),
825  CreateOutputsFrom({layerM3}),
826  {layerM1, layerM2, layerM3});
827 
828  CompareSubgraphViews(subgraphs[0], expected);
829  }
830 }
831 
832 TEST_CASE("SingleInputMultiOutput")
833 {
834  //X1 -> M1 -> M2 -> X2
835  //X1 -> M1 -> M3 -> X3
836  //Where the input slots of M1 and the output slots of {M2, M3} are to be the subgraph boundaries.
837  Graph graph;
838 
839  ActivationDescriptor activationDefaults;
840  ViewsDescriptor viewDefaults(2,4);
841 
842  Layer* layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
843  Layer* layerM1 = graph.AddLayer<SplitterLayer>(viewDefaults, "layerM1");
844  Layer* layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
845  Layer* layerM3 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM3");
846  Layer* layerX2 = graph.AddLayer<OutputLayer>(0, "layerX2");
847  Layer* layerX3 = graph.AddLayer<OutputLayer>(1, "layerX3");
848 
849  // X1
850  // |
851  // M1
852  // /|
853  // / |
854  // / |
855  // M2 M3
856  // | |
857  // | |
858  // X2 X3
859 
860  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
861  layerM1->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
862  layerM1->GetOutputSlot(1).Connect(layerM3->GetInputSlot(0));
863  layerM2->GetOutputSlot(0).Connect(layerX2->GetInputSlot(0));
864  layerM3->GetOutputSlot(0).Connect(layerX3->GetInputSlot(0));
865 
868  graph,
869  // select Activation and Splitter Layers M1, M2 and M3
870  [](const Layer & l)
871  {
872  bool toSelect = (l.GetType() == LayerType::Activation
873  || l.GetType() == LayerType::Splitter);
874  return toSelect;
875  });
876 
877  CHECK(subgraphs.size() == 1);
878  if(subgraphs.size() == 1)
879  {
880  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1}),
881  CreateOutputsFrom({layerM2, layerM3}),
882  {layerM1, layerM2, layerM3});
883 
884  CompareSubgraphViews(subgraphs[0], expected);
885  }
886 }
887 
888 TEST_CASE("MultiInputMultiOutput")
889 {
890  // This case represents the scenario with multiple inputs and multiple outputs
891  //
892  // X1 -> M1 -> M3 -> M4 -> X3
893  // X2 -> M2 -> M3 -> M5 -> X4
894  //
895  // Where the input slots of {M1, M2} and the output slots of {M4, M5} are to be the subgraph
896  // boundaries.
897 
898  Graph graph;
899 
900  ActivationDescriptor activationDefaults;
901  OriginsDescriptor concatDescriptor(2);
902 
903  auto x1 = graph.AddLayer<InputLayer>(0, "x1");
904  auto x2 = graph.AddLayer<InputLayer>(1, "x2");
905 
906  auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
907  auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
908  auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
909 
910  auto m4 = graph.AddLayer<ActivationLayer>(activationDefaults, "m4");
911  auto m5 = graph.AddLayer<ActivationLayer>(activationDefaults, "m5");
912 
913  auto x3 = graph.AddLayer<OutputLayer>(0, "x3");
914  auto x4 = graph.AddLayer<OutputLayer>(1, "x4");
915 
916  x1->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
917  x2->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
918 
919  m1->GetOutputSlot(0).Connect(m3->GetInputSlot(0));
920  m2->GetOutputSlot(0).Connect(m3->GetInputSlot(1));
921 
922  m3->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
923  m3->GetOutputSlot(0).Connect(m5->GetInputSlot(0));
924 
925  m4->GetOutputSlot(0).Connect(x3->GetInputSlot(0));
926  m5->GetOutputSlot(0).Connect(x4->GetInputSlot(0));
927 
928 
931  graph,
932  // select Activation and Concat Layers M1, M2, M3, M4, M5
933  [](const Layer & l)
934  {
935  bool toSelect = (l.GetType() == LayerType::Activation
936  || l.GetType() == LayerType::Concat);
937  return toSelect;
938  });
939 
940 
941  CHECK(subgraphs.size() == 1);
942  if (subgraphs.size() == 1)
943  {
944  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({m1, m2}),
945  CreateOutputsFrom({m4, m5}),
946  {m1, m2, m3, m4, m5});
947 
948  CompareSubgraphViews(subgraphs[0], expected);
949  }
950 }
951 
952 TEST_CASE("ValidMerge")
953 {
954  // Checks that a node that has multiple choices for merge candidates (M3 in this case) correctly merges with the
955  // one that it can (M0), and doesn't merge with the ones it can't (X2 and M2).
956  //
957  // X1
958  // |
959  // M1
960  // / \'
961  // X2 M2 M0
962  // \ | /
963  // M3
964  //
965  Graph graph;
966 
967  ActivationDescriptor activationDefaults;
968  OriginsDescriptor concatDescriptor(3);
969 
970  auto x1 = graph.AddLayer<InputLayer>(0, "x1");
971  auto x2 = graph.AddLayer<ActivationLayer>(activationDefaults, "x2");
972  auto m0 = graph.AddLayer<InputLayer>(1, "m0");
973  auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
974  auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
975  auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
976 
977  x1->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
978  m1->GetOutputSlot(0).Connect(x2->GetInputSlot(0));
979  m1->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
980  x2->GetOutputSlot(0).Connect(m3->GetInputSlot(0));
981  m2->GetOutputSlot(0).Connect(m3->GetInputSlot(1));
982  m0->GetOutputSlot(0).Connect(m3->GetInputSlot(2));
983 
985  graph,
986  [](const Layer& l) {
987  return std::string(l.GetName())[0] == 'm';
988  });
989 
990  // expected results to test against
991  auto expectedSubgraph0 =
993  CreateInputsFrom({ m1 }),
994  std::vector<OutputSlot*>{ &m1->GetOutputSlot(0), &m2->GetOutputSlot(0) },
995  { m1, m2 });
996 
997  auto expectedSubgraph1 = CreateSubgraphViewFrom(
998  std::vector<InputSlot*>{ &m3->GetInputSlot(0), & m3->GetInputSlot(1) },
999  CreateOutputsFrom({ }),
1000  { m0, m3 });
1001 
1002  CHECK(subgraphs.size() == 2);
1003  if (subgraphs.size() == 2)
1004  {
1005  // we need to have valid subgraph pointers here
1006  CHECK((subgraphs[0] != nullptr));
1007  CHECK((subgraphs[1] != nullptr));
1008 
1009  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1010  {
1011  if (subgraphs[0]->GetInputSlots().size() == 1)
1012  {
1013  CompareSubgraphViews(subgraphs[0], expectedSubgraph0);
1014  CompareSubgraphViews(subgraphs[1], expectedSubgraph1);
1015  }
1016  else
1017  {
1018  CompareSubgraphViews(subgraphs[0], expectedSubgraph1);
1019  CompareSubgraphViews(subgraphs[1], expectedSubgraph0);
1020  }
1021  }
1022  }
1023 }
1024 
1025 TEST_CASE("PropagatedDependencies")
1026 {
1027  // Version of IslandInTheMiddle with longer chain
1028  // to make sure antecedents are propagated.
1029  /*
1030  M0
1031  / \
1032  M1 M4
1033  | |
1034  M2 X1 < the island in the middle !
1035  | |
1036  | M10
1037  | |
1038  | X2 < another island in the middle !
1039  | |
1040  M3 M5
1041  \ /
1042  M6
1043  */
1044  Graph graph;
1045 
1046  OriginsDescriptor concatDescriptor(2);
1047  auto m6 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m6");
1048  auto m3 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(0),
1050  "m3");
1051  auto m2 = graph.InsertNewLayer<ActivationLayer>(m3->GetInputSlot(0),
1053  "m2");
1054  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
1056  "m1");
1057  auto m0 = graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "m0");
1058 
1059  auto m5 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(1),
1061  "m5");
1062  auto x2 = graph.InsertNewLayer<ActivationLayer>(m5->GetInputSlot(0), ActivationDescriptor{}, "x2");
1063  auto m10 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0), ActivationDescriptor{}, "m10");
1064  auto x1 = graph.InsertNewLayer<ActivationLayer>(m10->GetInputSlot(0),
1066  "x1");
1067  auto m4 = graph.InsertNewLayer<ActivationLayer>(x1->GetInputSlot(0),
1069  "m4");
1070 
1071  // Connect the other branch to the input layer
1072  m0->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
1073 
1074  // All selected 'M*' layers will be of Activation type
1077  graph,
1078  // select the middle layers only
1079  [](const Layer& l)
1080  {
1081  bool toSelect = std::string(l.GetName())[0] == 'm';
1082  return toSelect;
1083  });
1084 
1085  // expected results to test against
1086  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({ m0 }),
1087  CreateOutputsFrom({ m3, m4 }),
1088  { m0, m1, m2, m3, m4 });
1089 
1090  auto mediumSubgraph = CreateSubgraphViewFrom(std::vector<InputSlot*>{ &m5->GetInputSlot(0), &m6->GetInputSlot(0) },
1091  std::vector<OutputSlot*>{}, { m5, m6 });
1092 
1093  auto smallerSubgraph =
1094  CreateSubgraphViewFrom(CreateInputsFrom({ m10 }), CreateOutputsFrom({ m10 }), { m10 });
1095 
1096  CHECK(subgraphs.size() == 3);
1097  if (subgraphs.size() == 3)
1098  {
1099  // we need to have valid subgraph pointers here
1100  CHECK((subgraphs[0] != nullptr));
1101  CHECK((subgraphs[1] != nullptr));
1102  CHECK((subgraphs[2] != nullptr));
1103 
1104  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr && subgraphs[2].get() != nullptr)
1105  {
1106  // sort the subgraphs by layer size, so it is simpler to test
1107  std::sort(subgraphs.begin(), subgraphs.end(),
1109  {
1110  return (lhs->GetLayers().size() < rhs->GetLayers().size());
1111  }
1112  );
1113 
1114  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
1115  CompareSubgraphViews(subgraphs[1], mediumSubgraph);
1116  CompareSubgraphViews(subgraphs[2], largerSubgraph);
1117  }
1118  }
1119 }
1120 
1121 TEST_CASE("Random")
1122 {
1123  // Creates random networks, splits them into subgraphs and checks the resulting subgraphs obey the required
1124  // dependency rules. We can easily generate very large networks which helps cover corner cases the other
1125  // small, manually crafted tests have missed. We can also use this to measure performance on large networks.
1126  constexpr bool debug = false; // Enable this to dump dot files and performance timings.
1127 
1128  std::mt19937 randomGenerator;
1129 
1130  // Helper function to get a random number in [0, maxExclusive)
1131  auto GetRandom = [&randomGenerator](auto maxExclusive) {
1132  // Note we could use uniform_int_distribution here, but that gives inconsistent results across platforms
1133  // which makes it harder to reproduce results.
1134  // It appears that uniform_real_distribution is consistent across MSVC and gcc so we use that and round it.
1135  std::uniform_real_distribution<float> uniform(0.0f, 1.0f);
1136  return static_cast<decltype(maxExclusive)>(uniform(randomGenerator) * static_cast<float>(maxExclusive));
1137  };
1138  // Helper function to get a bool that has probability 'trueProb' of being true.
1139  auto GetRandomFlag = [&randomGenerator](float trueProb) {
1140  std::uniform_real_distribution<float> uniform(0.0f, 1.0f);
1141  return uniform(randomGenerator) < trueProb;
1142  };
1143 
1144  constexpr uint32_t numTests = 100;
1145  for (uint32_t testIdx = 0; testIdx < numTests; ++testIdx)
1146  {
1147  randomGenerator.seed(testIdx); // Set a deterministic seed for reproducibility.
1148 
1149  // Create random graph
1150  Graph graph;
1151  {
1152  // First add the layers, without any connections. The following random constants determine the number of
1153  // each layer to add, along with the chance that each layer will be 'supported' (i.e. selected for
1154  // inclusion in the resulting subgraphs).
1155  uint32_t numInputs = 1 + GetRandom(4u);
1156  uint32_t numConstants = 1 + GetRandom(4u);
1157  uint32_t numOutputs = 1 + GetRandom(4u);
1158  uint32_t numConcats = 0 + GetRandom(500u);
1159  uint32_t numSplits = 0 + GetRandom(500u);
1160  float supportedProb = 0.7f;
1161 
1162  for (uint32_t i = 0; i < numInputs; ++i)
1163  {
1164  std::string name = "input" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1165  graph.AddLayer<InputLayer>(static_cast<LayerBindingId>(i), name.c_str());
1166  }
1167  for (uint32_t i = 0; i < numConstants; ++i)
1168  {
1169  std::string name = "constant" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1170  graph.AddLayer<ConstantLayer>(name.c_str());
1171  }
1172  for (uint32_t i = 0; i < numOutputs; ++i)
1173  {
1174  std::string name = "output" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1175  graph.AddLayer<OutputLayer>(static_cast<LayerBindingId>(i), name.c_str());
1176  }
1177  for (uint32_t i = 0; i < numConcats; ++i)
1178  {
1179  std::string name = "concat" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1180  uint32_t numInputs = 1 + GetRandom(3u);
1181  OriginsDescriptor concatDesc(numInputs);
1182  graph.AddLayer<ConcatLayer>(concatDesc, name.c_str());
1183  }
1184  for (uint32_t i = 0; i < numSplits; ++i)
1185  {
1186  std::string name = "split" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1187  uint32_t numOutputs = 1 + GetRandom(3u);
1188  ViewsDescriptor splitDesc(numOutputs);
1189  graph.AddLayer<SplitterLayer>(splitDesc, name.c_str());
1190  }
1191 
1192  // Associate each layer with a "depth" parameter. This is used when creating connections to ensure
1193  // that we don't have any loops, by only connecting to layers with a lower "depth".
1194  // This can be thought of as distance from the "top" of the graph (assuming the graph flows top-to-bottom).
1195  // Unfortunately this approach ends up producing very "wide" graphs,
1196  // which probably isn't very representative of 'real' networks.
1197  uint32_t maxLayerDepth = 5 + GetRandom(2000u);
1198  std::map<Layer*, uint32_t> layerDepths;
1199  std::map<uint32_t, std::vector<Layer*>> layersAtDepth;
1200  for (Layer* layer : graph)
1201  {
1202  uint32_t depth;
1203  if (layer->GetType() == LayerType::Input || layer->GetType() == LayerType::Constant)
1204  {
1205  // There needs to be at least one input-like layer above everything else, otherwise would be
1206  // nothing for them to connect to!
1207  depth = 0;
1208  }
1209  else
1210  {
1211  // Other layers are randomly assigned to later depths.
1212  depth = 1 + GetRandom(maxLayerDepth);
1213  }
1214  layerDepths[layer] = depth;
1215  layersAtDepth[depth].push_back(layer);
1216  }
1217 
1218  // Connect layers to each other. Every input slot of every layer must be connected, but it doesn't
1219  // matter if an output slot goes unused.
1220  for (Layer* layer : graph)
1221  {
1222  for (uint32_t inputSlotIdx = 0; inputSlotIdx < layer->GetNumInputSlots(); ++inputSlotIdx)
1223  {
1224  InputSlot& inputSlot = layer->GetInputSlot(inputSlotIdx);
1225  uint32_t maxLayerDepthToConnectTo = layerDepths[layer]; // This prevents a connection causing a loop
1226  // Finding a layer to connect to may take multiple attempts, so keep trying until it works.
1227  while (inputSlot.GetConnectedOutputSlot() == nullptr)
1228  {
1229  uint32_t layerDepth = GetRandom(maxLayerDepthToConnectTo);
1230  const std::vector<Layer*>& layersToChooseFrom = layersAtDepth[layerDepth];
1231  if (layersToChooseFrom.size() == 0)
1232  {
1233  continue;
1234  }
1235  Layer* layerToConnectWith = layersToChooseFrom[GetRandom(layersToChooseFrom.size())];
1236  if (layerToConnectWith->GetNumOutputSlots() == 0)
1237  {
1238  continue;
1239  }
1240  uint32_t outputSlotIdx = GetRandom(layerToConnectWith->GetNumOutputSlots());
1241  layerToConnectWith->GetOutputSlot(outputSlotIdx).Connect(inputSlot);
1242  }
1243  }
1244  }
1245  }
1246 
1247  if (debug)
1248  {
1249  std::ofstream f("INPUT_" + std::to_string(testIdx) + ".dot");
1250  graph.SerializeToDot(f);
1251  }
1252 
1253  // Run the splitting algorithm, selecting all nodes ending in an 'S' (as randomly assigned above).
1254  auto startTime = std::chrono::high_resolution_clock::now();
1255 
1258  [](const Layer& l) { return std::string(l.GetName()).back() == 'S'; });
1259 
1260  auto endTime = std::chrono::high_resolution_clock::now();
1261  auto duration = std::chrono::duration_cast<std::chrono::microseconds>(endTime - startTime);
1262  if (debug)
1263  {
1264  std::cout << "Test " << testIdx << ": " << duration.count() << " microseconds" << std::endl;
1265  }
1266 
1267  // Build a map of which subgraph is assigned to each layer.
1268  // This helps some of the following code.
1269  std::map<Layer*, SubgraphView*> layerToSubgraph;
1270  for (Layer* layer : graph)
1271  {
1272  size_t i = 0;
1273  for (std::unique_ptr<SubgraphView>& subgraph : subgraphs)
1274  {
1275  std::string name = std::to_string(i++);
1276  if (std::find(subgraph->begin(), subgraph->end(), layer) != subgraph->end())
1277  {
1278  layerToSubgraph[layer] = subgraph.get();
1279  break;
1280  }
1281  }
1282  }
1283 
1284  if (debug)
1285  {
1286  // Before dumping the dot file, set each Layer's BackendId property so that the dot file
1287  // shows the resulting subgraph assignments.
1288  for (Layer* layer : graph)
1289  {
1290  std::string name = "NotAssigned";
1291  auto subgraphIt = layerToSubgraph.find(layer);
1292  if (subgraphIt != layerToSubgraph.end())
1293  {
1294  auto subgraphIdx = std::distance(subgraphs.begin(),
1295  std::find_if(subgraphs.begin(), subgraphs.end(),
1296  [&](auto& s) { return s.get() == subgraphIt->second; }));
1297  name = std::to_string(subgraphIdx);
1298  }
1299  layer->SetBackendId(armnn::BackendId(name));
1300  }
1301 
1302  std::ofstream f("GRAPH_" + std::to_string(testIdx) + ".dot");
1303  graph.SerializeToDot(f);
1304  }
1305 
1306  // Check the dependencies between subgraphs to make sure that the algorithm has produced a valid result.
1307  // Starting from each of the input slots of each subgraph, recurse up the graph and ensure that we never
1308  // encounter a layer that belongs to the subgraph that we started from.
1309  for (std::unique_ptr<SubgraphView>& subgraph : subgraphs)
1310  {
1311  for (InputSlot* inputSlot : subgraph->GetInputSlots())
1312  {
1313  std::queue<Layer*> toProcess;
1314  toProcess.push(&inputSlot->GetConnectedOutputSlot()->GetOwningLayer());
1315  while (toProcess.size() > 0)
1316  {
1317  Layer* l = toProcess.front();
1318  toProcess.pop();
1319 
1320  CHECK(layerToSubgraph[l] != subgraph.get());
1321 
1322  for (const InputSlot& is : l->GetInputSlots())
1323  {
1324  toProcess.push(&is.GetConnectedOutputSlot()->GetOwningLayer());
1325  }
1326  }
1327  }
1328  }
1329  }
1330 }
1331 
1332 }
1333 
1334 TEST_SUITE("IntegrationTests")
1335 {
1336 TEST_CASE("SingleSubgraph")
1337 {
1338  // This test case represents the scenario when we have one subgraph
1339  // in which two layers have GpuAcc backend assigned
1340 
1341  //Construct graph
1342  Graph graph;
1343 
1344  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
1345 
1346  Convolution2dDescriptor convDescriptor;
1347  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
1348  convLayer1->SetBackendId(Compute::GpuAcc);
1349 
1350  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
1351  convLayer2->SetBackendId(Compute::GpuAcc);
1352 
1353  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
1354 
1355  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
1356  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
1357  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1358 
1359  // GpuAcc sub graph selector
1362  graph,
1363  // select the GpuAcc layers only
1364  [](const Layer & l){
1365  bool toSelect = (l.GetBackendId() == Compute::GpuAcc);
1366  return toSelect;
1367  });
1368 
1369  CHECK(subgraphs.size() == 1);
1370  if(subgraphs.size() == 1)
1371  {
1372  CHECK((subgraphs[0] != nullptr));
1373 
1374  if (subgraphs[0].get() != nullptr)
1375  {
1376  unsigned int numInputSlots = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetInputSlots().size());
1377  unsigned int numOutputSlots = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetOutputSlots().size());
1378 
1379  CHECK((numInputSlots == 1));
1380  CHECK((numOutputSlots == 1));
1381 
1382  // Save sub-graph connections for comparison after substitution
1383  IOutputSlot* subgraphInputConn1 = subgraphs[0]->GetInputSlot(0)->GetConnection();
1384  IInputSlot* subgraphOutputConn1 = subgraphs[0]->GetOutputSlot(0)->GetConnection(0);
1385 
1386  // Construct dummy pre-compiled layer
1387  PreCompiledDescriptor preCompiledDescriptor(numInputSlots, numOutputSlots);
1388  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
1389 
1390  // Substitute sub-graph with pre-compiled layer
1391  graph.SubstituteSubgraph(*subgraphs[0], preCompiledLayer);
1392 
1393  // Check that connections are correct after substitution
1394  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
1395 
1396  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
1397  }
1398  }
1399 }
1400 
1401 TEST_CASE("MultipleSubgraphs")
1402 {
1403  // This test case represents the scenario when we have two subgraphs
1404  // in which two layers have CpuAcc backend assigned
1405 
1406  //Construct graph
1407  Graph graph;
1408 
1409  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
1410 
1411  ViewsDescriptor splitterDescriptor(2);
1412  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
1413  splitterLayer->SetBackendId(Compute::CpuAcc);
1414 
1415  Convolution2dDescriptor convDescriptor;
1416  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
1417  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
1418 
1419  OriginsDescriptor concatDescriptor(2);
1420  Layer* const pConcatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
1421  pConcatLayer->SetBackendId(Compute::CpuAcc);
1422 
1423  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
1424 
1425  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
1426  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
1427  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
1428  convLayer1->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(0));
1429  convLayer2->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(1));
1430  pConcatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1431 
1432  // CpuAcc sub graph selector
1435  graph,
1436  // select the CpuAcc layers only
1437  [](const Layer & l){
1438  bool toSelect = (l.GetBackendId() == Compute::CpuAcc);
1439  return toSelect;
1440  });
1441 
1442  CHECK(subgraphs.size() == 2);
1443  if(subgraphs.size() == 2)
1444  {
1445  CHECK((subgraphs[0] != nullptr));
1446  CHECK((subgraphs[1] != nullptr));
1447 
1448  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1449  {
1450  //Sort subgraphs by their inputSlot size.
1451  std::sort(subgraphs.begin(), subgraphs.end(),
1453  {
1454  return (lhs->GetInputSlots().size() < rhs->GetInputSlots().size());
1455  }
1456  );
1457 
1458  unsigned int numInputSlots1 = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetInputSlots().size());
1459  unsigned int numOutputSlots1 = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetOutputSlots().size());
1460 
1461  unsigned int numInputSlots2 = armnn::numeric_cast<unsigned int>(subgraphs[1]->GetInputSlots().size());
1462  unsigned int numOutputSlots2 = armnn::numeric_cast<unsigned int>(subgraphs[1]->GetOutputSlots().size());
1463 
1464  // Save sub-graph connections for comparison after substitution
1465  IOutputSlot* subgraph1InputConn = subgraphs[0]->GetInputSlot(0)->GetConnection();
1466  IInputSlot* subgraph1OutputConn1 = subgraphs[0]->GetOutputSlot(0)->GetConnection(0);
1467  IInputSlot* subgraph1OutputConn2 = subgraphs[0]->GetOutputSlot(1)->GetConnection(0);
1468 
1469  // Save sub-graph connections for comparison after substitution
1470  IOutputSlot* subgraph2InputConn1 = subgraphs[1]->GetInputSlot(0)->GetConnection();
1471  IOutputSlot* subgraph2InputConn2 = subgraphs[1]->GetInputSlot(1)->GetConnection();
1472  IInputSlot* subgraph2OutputConn = subgraphs[1]->GetOutputSlot(0)->GetConnection(0);
1473 
1474  PreCompiledDescriptor preCompiledDescriptor1(numInputSlots1, numOutputSlots1);
1475  Layer* const preCompiledLayer1 = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor1, "pre-compiled1");
1476 
1477  PreCompiledDescriptor preCompiledDescriptor2(numInputSlots2, numOutputSlots2);
1478  Layer* const preCompiledLayer2 = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor2, "pre-compiled2");
1479 
1480  // Substitute sub-graph with pre-compiled layer
1481  graph.SubstituteSubgraph(*subgraphs[0], preCompiledLayer1);
1482  graph.SubstituteSubgraph(*subgraphs[1], preCompiledLayer2);
1483 
1484  // Check that connections are correct after substitution
1485  CHECK_EQ(preCompiledLayer1->GetInputSlot(0).GetConnection(), subgraph1InputConn);
1486  CHECK_EQ(preCompiledLayer1->GetOutputSlot(0).GetConnection(0), subgraph1OutputConn1);
1487  CHECK_EQ(preCompiledLayer1->GetOutputSlot(1).GetConnection(0), subgraph1OutputConn2);
1488 
1489  CHECK_EQ(preCompiledLayer2->GetInputSlot(0).GetConnection(), subgraph2InputConn1);
1490  CHECK_EQ(preCompiledLayer2->GetInputSlot(1).GetConnection(), subgraph2InputConn2);
1491  CHECK_EQ(preCompiledLayer2->GetOutputSlot(0).GetConnection(0), subgraph2OutputConn);
1492  }
1493  }
1494 }
1495 
1496 TEST_CASE("SubgraphCycles")
1497 {
1498  // This case represent the scenario when a naive split could lead to a cyclic dependency between two subgraphs
1499  //
1500  // X0 -> M0 -> X1 -> M2 -> X2
1501  // X0 -> M0 -> M1 -> M2 -> X2
1502  //
1503  /*
1504  X0
1505  |
1506  |
1507  M0
1508  / |
1509  / |
1510  X1 M1
1511  \ /
1512  M2
1513  |
1514  X2
1515  */
1516  // The expected result for this is that M0,M1 will be part of one subgraph and M2 in another and the
1517  // input and output slots in the subgraphs will be set accordingly.
1518  //
1519  Graph graph;
1520 
1521  OriginsDescriptor originsDescriptor(2);
1522  auto x0 = graph.AddLayer<InputLayer>(0, "x0");
1523  auto m0 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m0");
1524  auto x1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x1");
1525  auto m1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m1");
1526  auto m2 = graph.AddLayer<AdditionLayer>("m2");
1527  auto x2 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x2");
1528 
1529  x0->GetOutputSlot(0).Connect(m0->GetInputSlot(0));
1530  m0->GetOutputSlot(0).Connect(x1->GetInputSlot(0));
1531  m0->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1532  x1->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1533  m1->GetOutputSlot(0).Connect(m2->GetInputSlot(1));
1534  m2->GetOutputSlot(0).Connect(x2->GetInputSlot(0));
1535 
1536  // All selected 'M*' layers will be have 'm' in the name
1539  graph,
1540  // select the middle layers only
1541  [](const Layer & l)
1542  {
1543  bool toSelect = (l.GetNameStr().find('m') != std::string::npos);
1544  return toSelect;
1545  });
1546 
1547  // expected results to test against
1548  auto inputSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m0}),
1549  CreateOutputsFrom({m0, m1}),
1550  {m0, m1});
1551 
1552  auto outputSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m2}),
1553  CreateOutputsFrom({m2}),
1554  {m2});
1555 
1556  CHECK(subgraphs.size() == 2);
1557  if (subgraphs.size() == 2)
1558  {
1559  // we need to have valid subgraph pointers here
1560  CHECK((subgraphs[0] != nullptr));
1561  CHECK((subgraphs[1] != nullptr));
1562 
1563  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1564  {
1565  // sort the subgraphs by layer size, so it is simpler to test
1566  std::sort(subgraphs.begin(), subgraphs.end(),
1568  {
1569  return (lhs->GetLayers().size() < rhs->GetLayers().size());
1570  }
1571  );
1572 
1573  // one subgraph needs to be size=1 and the other one is 4
1574  CHECK(subgraphs[0]->GetLayers().size() == 1);
1575  CHECK(subgraphs[1]->GetLayers().size() == 2);
1576 
1577  CompareSubgraphViews(subgraphs[0], outputSubgraph);
1578  CompareSubgraphViews(subgraphs[1], inputSubgraph);
1579  }
1580  }
1581 }
1582 
1583 TEST_CASE("SubgraphOrder")
1584 {
1585  Graph graph;
1586 
1587  auto input = graph.AddLayer<InputLayer>(0, "Input");
1588  auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
1589  auto output = graph.AddLayer<OutputLayer>(1, "Output");
1590 
1591  input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
1592  activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1593 
1594  //Add in out of order
1595  auto view = CreateSubgraphViewFrom({},
1596  {},
1597  {output, input, activation});
1598 
1599  // Check the layers are sorted topologically in the view
1600  int idx=0;
1602  view->ForEachLayer([&idx, &expectedSorted](const Layer* l)
1603  {
1604  CHECK((expectedSorted[idx] == l->GetType()));
1605  idx++;
1606  }
1607  );
1608 }
1609 
1610 }
TEST_SUITE("TestConstTensorLayerVisitor")
A layer that the constant data can be bound to.
Iterator begin()
Returns iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:162
This layer represents a split operation.
A ViewsDescriptor for the SplitterLayer.
Status SerializeToDot(std::ostream &stream)
Definition: Graph.cpp:118
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
std::vector< OutputSlot * > OutputSlots
This layer represents an activation operation with the specified activation function.
Copyright (c) 2021 ARM Limited and Contributors.
void SetBackendId(const BackendId &id)
Definition: Layer.hpp:270
const std::vector< InputSlot > & GetInputSlots() const
Definition: Layer.hpp:237
virtual const IInputSlot * GetConnection(unsigned int index) const =0
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:199
unsigned int GetNumOutputSlots() const override
Returns the number of connectable output slots.
Definition: Layer.hpp:314
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:244
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
An output connection slot for a layer.
Definition: INetwork.hpp:37
SubgraphView::InputSlots CreateInputsFrom(const std::vector< Layer *> &layers)
An OriginsDescriptor for the ConcatLayer.
This layer represents a merge operation.
Definition: ConcatLayer.hpp:13
const std::string & GetNameStr() const
Definition: Layer.hpp:220
LayerType GetType() const override
Returns the armnn::LayerType of this layer.
Definition: Layer.hpp:265
std::vector< SubgraphViewPtr > Subgraphs
const OutputSlot * GetConnectedOutputSlot() const
Definition: Layer.hpp:55
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
const BackendId & GetBackendId() const
Definition: Layer.hpp:269
std::vector< InputSlot * > InputSlots
This layer represents an addition operation.
void SubstituteSubgraph(SubgraphView &subgraph, IConnectableLayer *substituteLayer)
Substitutes the given sub-graph with either a new layer or a new sub-graph.
Definition: Graph.cpp:432
const InputSlots & GetInputSlots() const
std::unique_ptr< SubgraphView > SubgraphViewPtr
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
static Subgraphs SelectSubgraphs(Graph &graph, const LayerSelectorFunction &selector)
Selects subgraphs from a graph based on the selector function and the algorithm.
const OutputSlots & GetOutputSlots() const
CPU Execution: NEON: ArmCompute.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
Iterator end()
Returns iterator pointing to the end of the list. Lowercase for range-based for loops.
Definition: Graph.hpp:164
const Layers & GetLayers() const
virtual const IOutputSlot * GetConnection() const =0
std::enable_if_t< std::is_unsigned< Source >::value &&std::is_unsigned< Dest >::value, Dest > numeric_cast(Source source)
Definition: NumericCast.hpp:35
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:311
This layer represents a convolution 2d operation.
A PreCompiledDescriptor for the PreCompiledLayer.
std::list< Layer * > Layers
size_t GetNumLayers() const
Definition: Graph.hpp:191
LayerT * InsertNewLayer(InputSlot &insertBefore, Args &&... args)
Inserts a new layer between the output slot currently connected to insertBefore and insertBefore itse...
Definition: Graph.hpp:416
An input connection slot for a layer.
Definition: INetwork.hpp:24
const InputSlot * GetConnection(unsigned int index) const override
Definition: Layer.cpp:46
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:405