ArmNN
 22.02
SubgraphViewTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <Graph.hpp>
8 
13 
14 #include <doctest/doctest.h>
15 
16 #include <fstream>
17 #include <map>
18 #include <queue>
19 #include <random>
20 #include <chrono>
21 #include <numeric>
22 
23 using namespace armnn;
24 
25 namespace
26 {
27 
28 bool AreAnySubgraphLayersPresentInGraph(const SubgraphView::IConnectableLayers &subgraphLayers, const Graph &graph)
29 {
30  for(auto&& layer : subgraphLayers)
31  {
32  auto posInGraph = std::find(graph.begin(), graph.end(), layer);
33  if(posInGraph != graph.end())
34  {
35  return true;
36  }
37  }
38 
39  return false;
40 }
41 
42 //
43 // this helper only works if all layers where the inputs connect to are not selected
44 //
45 SubgraphView::InputSlots CreateInputsFrom(const std::vector<Layer*>& layers)
46 {
48  for (auto&& layer : layers)
49  {
50  for (auto&& it = layer->BeginInputSlots(); it != layer->EndInputSlots(); ++it)
51  {
52  result.push_back(&(*it));
53  }
54  }
55  return result;
56 }
57 
58 /// Duplication for IConnectableLayer
59 SubgraphView::IInputSlots CreateIInputsFrom(const std::vector<armnn::IConnectableLayer*>& layers)
60 {
62  for (auto&& layer : layers)
63  {
64  for (unsigned int i = 0 ; i < layer->GetNumInputSlots(); ++i)
65  {
66  result.push_back(&(layer->GetInputSlot(i)));
67  }
68  }
69  return result;
70 }
71 
72 //
73 // this helper only works if all layers where the outputs connect to are not selected
74 //
75 SubgraphView::OutputSlots CreateOutputsFrom(const std::vector<Layer*>& layers)
76 {
78  for (auto && layer : layers)
79  {
80  for (auto&& it = layer->BeginOutputSlots(); it != layer->EndOutputSlots(); ++it)
81  {
82  result.push_back(&(*it));
83  }
84  }
85  return result;
86 }
87 
88 /// Duplication for IConnectableLayer
89 SubgraphView::IOutputSlots CreateIOutputsFrom(const std::vector<armnn::IConnectableLayer*>& layers)
90 {
92  for (auto&& layer: layers)
93  {
94  for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
95  {
96  result.push_back(&(layer->GetOutputSlot(i)));
97  }
98  }
99  return result;
100 }
101 
102 //
103 // this takes the inputs, outputs and layers as a copy and the move these copies into the
104 // resulting subgraph, so the pass by value is intentional
105 //
107  SubgraphView::OutputSlots&& outputs,
108  SubgraphView::Layers&& layers)
109 {
110  return std::make_unique<SubgraphView>(std::move(inputs), std::move(outputs), std::move(layers));
111 }
112 
114  SubgraphView::IInputSlots&& inputs,
115  SubgraphView::IOutputSlots&& outputs)
116 {
117  return std::make_unique<SubgraphView>(std::move(layers), std::move(inputs), std::move(outputs));
118 }
119 
120 template <typename T, typename Iterator>
121 std::vector<T> ToSortedArray(Iterator begin, Iterator end)
122 {
123  std::vector<T> result(begin, end);
124  std::sort(result.begin(), result.end());
125  return result;
126 }
127 
128 template <typename T>
129 void CompareVectors(const std::vector<T>& result, const std::vector<T>& expected)
130 {
131  CHECK(std::equal(result.begin(), result.end(), expected.begin(), expected.end()));
132 }
133 
134 void CompareSubgraphViews(SubgraphViewSelector::SubgraphViewPtr& result,
136 {
137  // expect both to be valid subgraphs
138  CHECK((result.get() != nullptr));
139  CHECK((expected.get() != nullptr));
140 
141  if (result.get() != nullptr && expected.get() != nullptr)
142  {
143  CHECK(result->GetIInputSlots().size() == expected->GetIInputSlots().size());
144  CHECK(result->GetIOutputSlots().size() == expected->GetIOutputSlots().size());
145  CHECK(result->GetIConnectableLayers().size() == expected->GetIConnectableLayers().size());
146 
147  auto resultLayers = ToSortedArray<IConnectableLayer*>(result->GetIConnectableLayers().begin(),
148  result->GetIConnectableLayers().end());
149  auto expectedLayers = ToSortedArray<IConnectableLayer*>(expected->GetIConnectableLayers().begin(),
150  expected->GetIConnectableLayers().end());
151  CompareVectors(resultLayers, expectedLayers);
152 
153  auto resultInputs = ToSortedArray<IInputSlot *>(result->GetIInputSlots().begin(),
154  result->GetIInputSlots().end());
155  auto expectedInputs = ToSortedArray<IInputSlot *>(expected->GetIInputSlots().begin(),
156  expected->GetIInputSlots().end());
157  CompareVectors(resultInputs, expectedInputs);
158 
159  auto resultOutputs = ToSortedArray<IOutputSlot *>(result->GetIOutputSlots().begin(),
160  result->GetIOutputSlots().end());
161  auto expectedOutputs = ToSortedArray<IOutputSlot *>(expected->GetIOutputSlots().begin(),
162  expected->GetIOutputSlots().end());
163  CompareVectors(resultOutputs, expectedOutputs);
164  }
165 }
166 
167 } // namespace <anonymous>
168 
169 TEST_SUITE("SubgraphViewBackwardCompatibilityTests")
170 {
171 // Test that SubraphView has been converted to using IConnectableLayer/IInputSlot/IOutputSlot
172 // in a backward compatible manner from ILayer/InputSlot/OutputSlot
173 TEST_CASE("SubgraphViewIterators")
174 {
176  IConnectableLayer* layer = net->AddInputLayer(1, "input");
177 
178  SubgraphView subgraph{layer};
179 
180  // cbeginIConnectable() and cendIConnectable()
181  bool found = false;
182  if (std::find(subgraph.cbeginIConnectable(), subgraph.cendIConnectable(), layer)
183  != subgraph.cendIConnectable())
184  {
185  found = true;
186  }
187  CHECK(found);
188  found = false;
189 
190  // beginIConnectable() and endIConnectable()
191  if (std::find(subgraph.beginIConnectable(), subgraph.endIConnectable(), layer)
192  != subgraph.endIConnectable())
193  {
194  found = true;
195  }
196  CHECK(found);
197  found = false;
198 
199  // GetIConnectableLayers returns IConnectableLayers initialized when calling constructor given IConnectableLayers
200  const SubgraphView::IConnectableLayers& subgraphLayers = subgraph.GetIConnectableLayers();
201  for (auto& iConnectableLayer : subgraphLayers)
202  {
203  if (std::string(iConnectableLayer->GetName()) == "input")
204  {
205  found = true;
206  }
207  }
208  CHECK(found);
209  found = false;
210 
211  // Test GetLayers returns layers initialized when calling constructor given IConnectableLayers
213  const SubgraphView::Layers& subgraphLayersOld = subgraph.GetLayers();
215  for (auto& layerOld : subgraphLayersOld)
216  {
217  if (std::string(layerOld->GetName()) == "input")
218  {
219  found = true;
220  }
221  }
222  CHECK(found);
223 }
224 
225 TEST_CASE("SubgraphViewSlots")
226 {
227  // Construct graph
228  Graph graph;
229 
230  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
231 
232  Convolution2dDescriptor convDescriptor;
233  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
234  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
235 
236  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
237 
238  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
239  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
240  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
241 
242  // Construct sub-graph
244  CreateIInputsFrom({convLayer1}),
245  CreateIOutputsFrom({convLayer2}));
246 
247  // Test that both old and new are initialized
248  CHECK(subgraph->GetIInputSlots().size() == 1);
249  CHECK(subgraph->GetIOutputSlots().size() == 1);
250 
252  CHECK(subgraph->GetInputSlots().size() == 1);
253  CHECK(subgraph->GetOutputSlots().size() == 1);
254 
255  // Check old and new pointing to same address
256  CHECK(subgraph->GetOutputSlot(0) == subgraph->GetIOutputSlot(0));
257  CHECK(subgraph->GetInputSlot(0) == subgraph->GetIInputSlot(0));
259 
260 }
261 
262 TEST_CASE("SubgraphViewConstructors")
263 {
264  // Construct graph
265  Graph graph;
266 
267  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
268 
269  Convolution2dDescriptor convDescriptor;
270  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
271  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
272 
273  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
274 
275  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
276  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
277  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
278 
279  // Construct sub-graph
281  CreateSubgraphViewFrom({inputLayer, convLayer1, convLayer2, outputLayer},
282  CreateIInputsFrom({convLayer1}),
283  CreateIOutputsFrom({convLayer2}));
284 
285  // Copy Constructor
286  SubgraphView subgraph2(*subgraph.get());
287  CHECK(subgraph->GetIConnectableLayers() == subgraph2.GetIConnectableLayers());
288  CHECK(subgraph->GetIInputSlots() == subgraph2.GetIInputSlots());
289  CHECK(subgraph->GetIOutputSlots() == subgraph2.GetIOutputSlots());
290 
292  CHECK(subgraph->GetLayers() == subgraph2.GetLayers());
293  CHECK(subgraph->GetInputSlots() == subgraph2.GetInputSlots());
294  CHECK(subgraph->GetOutputSlots() == subgraph2.GetOutputSlots());
296 
297  // Move Constructor
298  SubgraphView subgraph3(std::move(subgraph2));
299  CHECK(subgraph->GetIConnectableLayers() == subgraph3.GetIConnectableLayers());
300  CHECK(subgraph->GetIInputSlots() == subgraph3.GetIInputSlots());
301  CHECK(subgraph->GetIOutputSlots() == subgraph3.GetIOutputSlots());
302 
304  CHECK(subgraph->GetLayers() == subgraph3.GetLayers());
305  CHECK(subgraph->GetInputSlots() == subgraph3.GetInputSlots());
306  CHECK(subgraph->GetOutputSlots() == subgraph3.GetOutputSlots());
308 
309  // Clear
310  subgraph.get()->Clear();
311  CHECK(subgraph->GetIConnectableLayers().size() == 0);
312  CHECK(subgraph->GetIInputSlots().size() == 0);
313  CHECK(subgraph->GetIOutputSlots().size() == 0);
314 }
315 
316 } // SubgraphViewBackwardCompatibilityTests Test Suite end
317 
318 TEST_SUITE("SubgraphSubstitution")
319 {
320 TEST_CASE("SingleInputSingleOutput")
321 {
322  // Construct graph
323  Graph graph;
324 
325  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
326 
327  Convolution2dDescriptor convDescriptor;
328  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
329  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
330 
331  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
332 
333  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
334  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
335  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
336 
337  // Construct sub-graph
340  CreateIInputsFrom({convLayer1}),
341  CreateIOutputsFrom({convLayer2}));
342 
343  // Save sub-graph connections for comparison after substitution
344  // Using GetIInputSlot/GetIIOutputSlot functions
345  IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
346  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
347 
348  // Construct dummy pre-compiled layer
349  PreCompiledDescriptor preCompiledDescriptor(1, 1);
350 
351  IConnectableLayer* const preCompiledLayer =
352  graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
353 
354  // Substitute sub-graph with pre-compiled layer
355  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
356 
357  // Check that connections are correct after substitution
358  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
359  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
360 }
361 
362 TEST_CASE("SingleInputSingleOutputAddPrecompiledLayerSubstituteSubgraph1")
363 {
364  // Construct graph.
365  Graph graph;
366 
367  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
368 
369  Convolution2dDescriptor convDescriptor;
370  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
371  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
372 
373  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
374 
375  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
376  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
377  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
378 
379  // Construct sub-graph
381  CreateOutputsFrom({convLayer2}),
382  {});
383 
384  // Save sub-graph connections for comparison after substitution
385  IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
386  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
387 
388  PreCompiledDescriptor preCompiledDescriptor(1, 1);
389  CompiledBlobPtr compiledBlobPtr;
390  BackendId backend = Compute::CpuRef;
391 
392  // Construct dummy pre-compiled layer
393  INetworkPtr network = INetwork::Create();
394  IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
395  std::move(compiledBlobPtr),
396  backend);
397 
398  // Substitute sub-graph with pre-compiled layer
399  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
400 
401  // Check that connections are correct after substitution
402  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
403  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
404 }
405 
406 TEST_CASE("SingleInputSingleOutputAddPrecompiledLayerSubstituteSubgraph2")
407 {
408  // Construct graph.
409  Graph graph;
410 
411  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
412 
413  Convolution2dDescriptor convDescriptor;
414  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
415  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
416 
417  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
418 
419  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
420  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
421  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
422 
423  // Construct sub-graph
425  CreateOutputsFrom({convLayer2}),
426  {});
427 
428  // Save sub-graph connections for comparison after substitution
429  IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
430  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
431 
432  PreCompiledDescriptor preCompiledDescriptor(1, 1);
433  CompiledBlobPtr compiledBlobPtr;
434  BackendId backend = Compute::CpuRef;
435 
436  // Construct dummy pre-compiled layer
437  INetworkPtr network = INetwork::Create();
438  IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
439  std::move(compiledBlobPtr),
440  backend);
441  SubgraphView substituteSubgraph(preCompiledLayer);
442 
443  // Substitute sub-graph with pre-compiled layer
444  graph.SubstituteSubgraph(*subgraph, substituteSubgraph);
445 
446  // Check that connections are correct after substitution
447  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
448  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
449 }
450 
451 TEST_CASE("SingleInputSingleOutputSubstituteGraph")
452 {
453  // Construct graph
454  Graph graph;
455 
456  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
457 
458  Convolution2dDescriptor convDescriptor;
459  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
460  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
461 
462  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
463 
464  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
465  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
466  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
467 
468  // Construct sub-graph
471  CreateOutputsFrom({convLayer2}),
472  {});
473 
474  // Save sub-graph connections for comparison after substitution
475  IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
476  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
477 
478  // Construct second graph with a single pre-compiled layer
479  Graph substituteGraph;
480  PreCompiledDescriptor preCompiledDescriptor(1, 1);
481  Layer* const preCompiledLayer = substituteGraph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
482 
483  SubgraphViewSelector::SubgraphViewPtr substituteSubgraph =
484  CreateSubgraphViewFrom(CreateInputsFrom({preCompiledLayer}),
485  CreateOutputsFrom({preCompiledLayer}),
486  {preCompiledLayer});
487  // Substitute subgraph with pre-compiled layer
488  graph.SubstituteSubgraph(*subgraph, *substituteSubgraph);
489 
490  // Check that connections are correct after substitution
491  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
492  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
493 }
494 
495 TEST_CASE("MultiInputSingleOutput")
496 {
497  // Construct graph
498  Graph graph;
499 
500  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
501 
502  ViewsDescriptor splitterDescriptor(2);
503  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
504 
505  Convolution2dDescriptor convDescriptor;
506  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
507  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
508 
509  OriginsDescriptor concatDescriptor(2);
510  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
511 
512  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
513 
514  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
515  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
516  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
517  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
518  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
519  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
520 
521  // Construct sub-graph
522  auto subgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1, convLayer2}),
523  CreateOutputsFrom({concatLayer}),
524  {});
525 
526  // Save sub-graph connections for comparison after substitution
527  IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
528  IOutputSlot* subgraphInputConn2 = subgraph->GetIInputSlot(1)->GetConnection();
529 
530  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
531 
532  // Construct dummy pre-compiled layer
533  PreCompiledDescriptor preCompiledDescriptor(2, 1);
534  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
535 
536  // Substitute sub-graph with pre-compiled layer
537  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
538 
539  // Check that connections are correct after substitution
540  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
541  CHECK_EQ(preCompiledLayer->GetInputSlot(1).GetConnection(), subgraphInputConn2);
542 
543  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
544 }
545 
546 TEST_CASE("SingleInputMultiOutput")
547 {
548  // Construct graph
549  Graph graph;
550 
551  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
552 
553  Convolution2dDescriptor convDescriptor;
554  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
555  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
556  OriginsDescriptor concatDescriptor(2);
557  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
558  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
559 
560  ViewsDescriptor splitterDescriptor(2);
561  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
562 
563  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
564  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
565  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
566  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
567  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
568  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
569 
570  // Construct sub-graph
572  CreateSubgraphViewFrom(CreateInputsFrom({splitterLayer}),
573  CreateOutputsFrom({convLayer1, convLayer2}),
574  {});
575 
576  // Save sub-graph connections for comparison after substitution
577  IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
578 
579  IInputSlot* subgraphOutputConn1 = subgraph->GetIOutputSlot(0)->GetConnection(0);
580  IInputSlot* subgraphOutputConn2 = subgraph->GetIOutputSlot(1)->GetConnection(0);
581 
582  // Construct dummy pre-compiled layer
583  PreCompiledDescriptor preCompiledDescriptor(1, 2);
584  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
585 
586  // Substitute sub-graph with pre-compiled layer
587  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
588 
589  // Check that connections are correct after substitution
590  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
591 
592  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
593  CHECK_EQ(preCompiledLayer->GetOutputSlot(1).GetConnection(0), subgraphOutputConn2);
594 }
595 
596 TEST_CASE("MultiInputMultiOutput")
597 {
598  // Construct graph
599  Graph graph;
600 
601  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
602 
603  ViewsDescriptor splitterDescriptor(2);
604  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
605 
606  Convolution2dDescriptor convDescriptor;
607  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
608  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
609 
610  OriginsDescriptor concatDescriptor(2);
611  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
612 
613  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
614 
615  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
616  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
617  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
618  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
619  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
620  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
621 
622  // Construct sub-graph
624  CreateSubgraphViewFrom(CreateInputsFrom({convLayer1, convLayer2}),
625  CreateOutputsFrom({convLayer1, convLayer2}),
626  {});
627 
628  // Save sub-graph connections for comparison after substitution
629  IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
630  IOutputSlot* subgraphInputConn2 = subgraph->GetIInputSlot(1)->GetConnection();
631 
632  IInputSlot* subgraphOutputConn1 = subgraph->GetIOutputSlot(0)->GetConnection(0);
633  IInputSlot* subgraphOutputConn2 = subgraph->GetIOutputSlot(1)->GetConnection(0);
634 
635  // Construct dummy pre-compiled layer
636  PreCompiledDescriptor preCompiledDescriptor(2, 2);
637  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
638 
639  // Substitute sub-graph with pre-compiled layer
640  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
641 
642  // Check that connections are correct after substitution
643  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
644  CHECK_EQ(preCompiledLayer->GetInputSlot(1).GetConnection(), subgraphInputConn2);
645 
646  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
647  CHECK_EQ(preCompiledLayer->GetOutputSlot(1).GetConnection(0), subgraphOutputConn2);
648 }
649 
650 TEST_CASE("EraseReplacedIConnectableLayers")
651 {
652  // Construct graph
653  Graph graph;
654 
655  graph.AddLayer<InputLayer>(0, "input");
656 
657  ViewsDescriptor splitterDescriptor(2);
658  IConnectableLayer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
659 
660  Convolution2dDescriptor convDescriptor;
661  IConnectableLayer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
662  IConnectableLayer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
663 
664  OriginsDescriptor concatDescriptor(2);
665  IConnectableLayer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
666 
667  graph.AddLayer<OutputLayer>(0, "output");
668 
669  // Construct sub-graph
671  convLayer1,
672  convLayer2,
673  concatLayer},
674  {},
675  {});
676 
677  // Construct dummy pre-compiled layer
678  PreCompiledDescriptor preCompiledDescriptor(0, 0);
679  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
680 
681  // Save sub-graph layers for later verification
682  const SubgraphView::IConnectableLayers subgraphLayers = subgraph->GetIConnectableLayers();
683 
684  // Substitute sub-graph with pre-compiled layer
685  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
686 
687  // Check that the layers belonging to the sub-graph have been erased from the graph after substitution
688  CHECK(!AreAnySubgraphLayersPresentInGraph(subgraphLayers, graph));
689 }
690 
691 }
692 
693 TEST_SUITE("SubgraphSelection")
694 {
695 TEST_CASE("SubgraphForEmptyGraph")
696 {
697  Graph graph;
698  SubgraphView subgraph(graph);
699 
700  CHECK(subgraph.GetIInputSlots().empty());
701  CHECK(subgraph.GetIOutputSlots().empty());
702  CHECK(subgraph.GetIConnectableLayers().empty());
703 }
704 
705 TEST_CASE("SubgraphForEntireGraph")
706 {
707  Graph graph;
708 
709  auto output = graph.AddLayer<OutputLayer>(0, "output");
710  auto mid0 = graph.InsertNewLayer<ActivationLayer>(output->GetInputSlot(0),
712  "mid0");
713  auto mid1 = graph.InsertNewLayer<ActivationLayer>(mid0->GetInputSlot(0),
715  "mid1");
716  graph.InsertNewLayer<InputLayer>(mid1->GetInputSlot(0), 0, "input");
717 
718  SubgraphView subgraph(graph);
719 
720  CHECK(subgraph.GetIInputSlots().empty());
721  CHECK(subgraph.GetIOutputSlots().empty());
722  CHECK(subgraph.GetIConnectableLayers().size() == graph.GetNumLayers());
723 }
724 
725 TEST_CASE("NoSubgraphsForNoMatch")
726 {
727  Graph graph;
728 
729  auto output = graph.AddLayer<OutputLayer>(0, "output");
730  graph.InsertNewLayer<InputLayer>(output->GetInputSlot(0), 0, "input");
731 
733  SubgraphViewSelector::SelectSubgraphs(graph, [](const Layer &) { return false; });
734 
735  CHECK(subgraphs.empty());
736 }
737 
738 TEST_CASE("OneSubgraphsSelectedASingleMatch")
739 {
740  Graph graph;
741 
742  auto output = graph.AddLayer<OutputLayer>(0, "output");
743  graph.InsertNewLayer<InputLayer>(output->GetInputSlot(0), 0, "input");
744 
747  graph,
748  // select the output layer only
749  [](const Layer & l)
750  {
751  bool isOutput = l.GetNameStr().compare("output") == 0;
752  return isOutput;
753  });
754 
755  CHECK(subgraphs.size() == 1);
756  if (subgraphs.size() == 1)
757  {
758  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({output}),
759  // outputs of 'output' will be empty
760  CreateOutputsFrom({output}),
761  {output});
762 
763  CompareSubgraphViews(subgraphs[0], expected);
764  }
765 }
766 
767 TEST_CASE("MultipleLayersSelectedInTheMiddle")
768 {
769  Graph graph;
770 
771  auto output = graph.AddLayer<OutputLayer>(0, "output");
772  auto mid0 = graph.InsertNewLayer<ActivationLayer>(output->GetInputSlot(0),
774  "mid0");
775  auto mid1 = graph.InsertNewLayer<ActivationLayer>(mid0->GetInputSlot(0),
777  "mid1");
778  graph.InsertNewLayer<InputLayer>(mid1->GetInputSlot(0), 0, "input");
779 
782  graph,
783  // select the middle layers only
784  [](const Layer & l)
785  {
786  bool toSelect = (l.GetType() == LayerType::Activation);
787  return toSelect;
788  });
789 
790  CHECK(subgraphs.size() == 1);
791  if (subgraphs.size() == 1)
792  {
793  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({mid1}),
794  CreateOutputsFrom({mid0}),
795  {mid1, mid0});
796 
797  CompareSubgraphViews(subgraphs[0], expected);
798  }
799 }
800 
801 TEST_CASE("DisjointGraphs")
802 {
803  // The input graph has two disjoint sections and all layers are selected.
804  // This should result in two subgraphs being produced.
805  Graph graph;
806 
807  // the graph is constructed in reverse order
808  auto o0 = graph.AddLayer<OutputLayer>(0, "output0");
809  auto n0 = graph.InsertNewLayer<ActivationLayer>(o0->GetInputSlot(0), ActivationDescriptor{}, "intermediate0");
810  auto i0 = graph.InsertNewLayer<InputLayer>(n0->GetInputSlot(0), 0, "input0");
811 
812  auto o1 = graph.AddLayer<OutputLayer>(1, "output1");
813  auto n1 = graph.InsertNewLayer<ActivationLayer>(o1->GetInputSlot(0), ActivationDescriptor{}, "intermediate1");
814  auto i1 = graph.InsertNewLayer<InputLayer>(n1->GetInputSlot(0), 1, "input1");
815 
818  // select the middle layers only
819  [](const Layer&) {
820  return true;
821  });
822 
823  // expected results to test against
824  auto expected1 = CreateSubgraphViewFrom({}, {}, { o0, n0, i0 });
825  auto expected2 = CreateSubgraphViewFrom({}, {}, { o1, n1, i1 });
826  CHECK(subgraphs.size() == 2);
827  if (subgraphs.size() == 2)
828  {
829  CHECK((subgraphs[0] != nullptr));
830  CHECK((subgraphs[1] != nullptr));
831  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
832  {
833  if (std::find(subgraphs[0]->GetIConnectableLayers().begin(),
834  subgraphs[0]->GetIConnectableLayers().end(), i0) !=
835  subgraphs[0]->GetIConnectableLayers().end())
836  {
837  CompareSubgraphViews(subgraphs[0], expected1);
838  CompareSubgraphViews(subgraphs[1], expected2);
839  }
840  else
841  {
842  CompareSubgraphViews(subgraphs[0], expected2);
843  CompareSubgraphViews(subgraphs[1], expected1);
844  }
845  }
846  }
847 }
848 
849 TEST_CASE("IslandInTheMiddle")
850 {
851  // This case represent the scenario when a non-selected X1 node placed in the middle
852  // of the selected M* nodes.
853  // This checks that we don't merge M6 and M3 and create a dependency loop.
854  /*
855  M0
856  / \
857  M1 M4
858  | |
859  M2 X1 < the island in the middle !
860  | |
861  M3 M5
862  \ /
863  M6
864  */
865  Graph graph;
866 
867  OriginsDescriptor concatDescriptor(2);
868  auto m6 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m6");
869  auto m3 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(0),
871  "m3");
872  auto m2 = graph.InsertNewLayer<ActivationLayer>(m3->GetInputSlot(0),
874  "m2");
875  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
877  "m1");
878  auto m0 = graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "m0");
879 
880  auto m5 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(1),
882  "m5");
883  auto x1 = graph.InsertNewLayer<ActivationLayer>(m5->GetInputSlot(0),
885  "x1");
886  auto m4 = graph.InsertNewLayer<ActivationLayer>(x1->GetInputSlot(0),
888  "m4");
889 
890  // Connect the other branch to the input layer
891  m0->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
892 
893  // All selected 'M*' layers will be of Activation type
896  graph,
897  // select the middle layers only
898  [](const Layer& l)
899  {
900  bool toSelect = std::string(l.GetName())[0] == 'm';
901  return toSelect;
902  });
903 
904  // expected results to test against
905  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({ m0 }),
906  CreateOutputsFrom({ m3, m4 }),
907  { m0, m1, m2, m3, m4 });
908 
909  auto smallerSubgraph =
910  CreateSubgraphViewFrom(std::vector<InputSlot*>{ &m5->GetInputSlot(0), & m6->GetInputSlot(0) },
911  std::vector<OutputSlot*>{},
912  { m5, m6 });
913 
914  CHECK(subgraphs.size() == 2);
915  if (subgraphs.size() == 2)
916  {
917  // we need to have valid subgraph pointers here
918  CHECK((subgraphs[0] != nullptr));
919  CHECK((subgraphs[1] != nullptr));
920 
921  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
922  {
923  // sort the subgraphs by layer size, so it is simpler to test
924  std::sort(subgraphs.begin(), subgraphs.end(),
926  {
927  return (lhs->GetIConnectableLayers().size() < rhs->GetIConnectableLayers().size());
928  }
929  );
930 
931  CHECK(subgraphs[0]->GetIConnectableLayers().size() == 2);
932  CHECK(subgraphs[1]->GetIConnectableLayers().size() == 5);
933 
934  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
935  CompareSubgraphViews(subgraphs[1], largerSubgraph);
936  }
937  }
938 }
939 
940 TEST_CASE("MultipleSimpleSubgraphs")
941 {
942  // This test case represents the scenario when we have two distinct subgraphs
943  // in a simple linear network. The selected nodes are the M* and the
944  // non-selected ones are the X*
945  //
946  // X1 -> M1 -> M2 -> X2 -> M3 -> X3
947  //
948  // The expected results is two subgraphs, one with {M1, M2} and another one
949  // with {M3}
950  //
951  Graph graph;
952 
953  // the graph is constructed in reverse order
954  auto x3 = graph.AddLayer<OutputLayer>(0, "output");
955  auto m3 = graph.InsertNewLayer<ActivationLayer>(x3->GetInputSlot(0),
957  "m3");
958  auto x2 = graph.InsertNewLayer<Convolution2dLayer>(m3->GetInputSlot(0),
960  "x2");
961  auto m2 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0),
963  "m2");
964  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
966  "m1");
967  graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "x1");
968 
969  // All selected 'M*' layers will be of Activation type
972  graph,
973  // select the middle layers only
974  [](const Layer & l)
975  {
976  bool toSelect = (l.GetType() == LayerType::Activation);
977  return toSelect;
978  });
979 
980  // expected results to test against
981  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m1}),
982  CreateOutputsFrom({m2}),
983  {m1, m2});
984 
985  auto smallerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m3}),
986  CreateOutputsFrom({m3}),
987  {m3});
988 
989  CHECK(subgraphs.size() == 2);
990  if (subgraphs.size() == 2)
991  {
992  // we need to have valid subgraph pointers here
993  CHECK((subgraphs[0] != nullptr));
994  CHECK((subgraphs[1] != nullptr));
995 
996  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
997  {
998  // sort the subgraphs by layer size, so it is simpler to test
999  std::sort(subgraphs.begin(), subgraphs.end(),
1001  {
1002  return (lhs->GetIConnectableLayers().size() < rhs->GetIConnectableLayers().size());
1003  }
1004  );
1005 
1006  CHECK(subgraphs[0]->GetIConnectableLayers().size() == 1);
1007  CHECK(subgraphs[1]->GetIConnectableLayers().size() == 2);
1008 
1009  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
1010  CompareSubgraphViews(subgraphs[1], largerSubgraph);
1011  }
1012  }
1013 }
1014 
1015 TEST_CASE("SimpleLinearTest")
1016 {
1017  //X1 -> M1 -> M2 -> X2
1018  //Where the input slots of M1 and the output slots of M2 are to be the sub graph boundaries.
1019  Graph graph;
1020 
1021  ActivationDescriptor activationDefaults;
1022 
1023  auto layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1024  auto layerX2 = graph.AddLayer<OutputLayer>(0, "layerX2");
1025  auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1");
1026  auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1027 
1028  // X1
1029  // |
1030  // M1
1031  // |
1032  // M2
1033  // |
1034  // X2
1035 
1036  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1037  layerM1->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1038  layerM2->GetOutputSlot(0).Connect(layerX2->GetInputSlot(0));
1039 
1042  graph,
1043  // select the activation layers M1 and M2
1044  [](const Layer & l)
1045  {
1046  bool toSelect = (l.GetType() == LayerType::Activation);
1047  return toSelect;
1048  });
1049 
1050  CHECK(subgraphs.size() == 1);
1051  if(subgraphs.size() == 1)
1052  {
1053  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1}),
1054  CreateOutputsFrom({layerM2}),
1055  {layerM1, layerM2});
1056 
1057  CompareSubgraphViews(subgraphs[0], expected);
1058  }
1059 }
1060 
1061 TEST_CASE("MultiInputSingleOutput")
1062 {
1063  //X1 -> M1 -> M3 -> X3
1064  //X2 -> M2 -> M3 -> X3
1065  //Where the input slots of {M1, M2} and the output slots of M3 are to be the subgraph boundaries.
1066  Graph graph;
1067 
1068  ActivationDescriptor activationDefaults;
1069 
1070  auto layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1071  auto layerX2 = graph.AddLayer<InputLayer>(1, "layerX2");
1072  auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1");
1073  auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1074  auto layerM3 = graph.AddLayer<AdditionLayer>("layerM3");
1075  auto layerX3 = graph.AddLayer<OutputLayer>(0, "layerX3");
1076 
1077  // X1 X2
1078  // | |
1079  // M1 M2
1080  // \ |
1081  // \ |
1082  // \|
1083  // M3
1084  // |
1085  // |
1086  // X3
1087 
1088  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1089  layerX2->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1090  layerM1->GetOutputSlot(0).Connect(layerM3->GetInputSlot(0));
1091  layerM2->GetOutputSlot(0).Connect(layerM3->GetInputSlot(1));
1092  layerM3->GetOutputSlot(0).Connect(layerX3->GetInputSlot(0));
1093 
1096  graph,
1097  // select Activation and Addition Layers M1, M2 and M3
1098  [](const Layer & l)
1099  {
1100  bool toSelect = (l.GetType() == LayerType::Activation
1101  || l.GetType() == LayerType::Addition);
1102  return toSelect;
1103  });
1104 
1105  CHECK(subgraphs.size() == 1);
1106  if (subgraphs.size() == 1)
1107  {
1108  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1, layerM2}),
1109  CreateOutputsFrom({layerM3}),
1110  {layerM1, layerM2, layerM3});
1111 
1112  CompareSubgraphViews(subgraphs[0], expected);
1113  }
1114 }
1115 
1116 TEST_CASE("SingleInputMultiOutput")
1117 {
1118  //X1 -> M1 -> M2 -> X2
1119  //X1 -> M1 -> M3 -> X3
1120  //Where the input slots of M1 and the output slots of {M2, M3} are to be the subgraph boundaries.
1121  Graph graph;
1122 
1123  ActivationDescriptor activationDefaults;
1124  ViewsDescriptor viewDefaults(2,4);
1125 
1126  Layer* layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1127  Layer* layerM1 = graph.AddLayer<SplitterLayer>(viewDefaults, "layerM1");
1128  Layer* layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1129  Layer* layerM3 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM3");
1130  Layer* layerX2 = graph.AddLayer<OutputLayer>(0, "layerX2");
1131  Layer* layerX3 = graph.AddLayer<OutputLayer>(1, "layerX3");
1132 
1133  // X1
1134  // |
1135  // M1
1136  // /|
1137  // / |
1138  // / |
1139  // M2 M3
1140  // | |
1141  // | |
1142  // X2 X3
1143 
1144  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1145  layerM1->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1146  layerM1->GetOutputSlot(1).Connect(layerM3->GetInputSlot(0));
1147  layerM2->GetOutputSlot(0).Connect(layerX2->GetInputSlot(0));
1148  layerM3->GetOutputSlot(0).Connect(layerX3->GetInputSlot(0));
1149 
1152  graph,
1153  // select Activation and Splitter Layers M1, M2 and M3
1154  [](const Layer & l)
1155  {
1156  bool toSelect = (l.GetType() == LayerType::Activation
1157  || l.GetType() == LayerType::Splitter);
1158  return toSelect;
1159  });
1160 
1161  CHECK(subgraphs.size() == 1);
1162  if(subgraphs.size() == 1)
1163  {
1164  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1}),
1165  CreateOutputsFrom({layerM2, layerM3}),
1166  {layerM1, layerM2, layerM3});
1167 
1168  CompareSubgraphViews(subgraphs[0], expected);
1169  }
1170 }
1171 
1172 TEST_CASE("MultiInputMultiOutput")
1173 {
1174  // This case represents the scenario with multiple inputs and multiple outputs
1175  //
1176  // X1 -> M1 -> M3 -> M4 -> X3
1177  // X2 -> M2 -> M3 -> M5 -> X4
1178  //
1179  // Where the input slots of {M1, M2} and the output slots of {M4, M5} are to be the subgraph
1180  // boundaries.
1181 
1182  Graph graph;
1183 
1184  ActivationDescriptor activationDefaults;
1185  OriginsDescriptor concatDescriptor(2);
1186 
1187  auto x1 = graph.AddLayer<InputLayer>(0, "x1");
1188  auto x2 = graph.AddLayer<InputLayer>(1, "x2");
1189 
1190  auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
1191  auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
1192  auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
1193 
1194  auto m4 = graph.AddLayer<ActivationLayer>(activationDefaults, "m4");
1195  auto m5 = graph.AddLayer<ActivationLayer>(activationDefaults, "m5");
1196 
1197  auto x3 = graph.AddLayer<OutputLayer>(0, "x3");
1198  auto x4 = graph.AddLayer<OutputLayer>(1, "x4");
1199 
1200  x1->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1201  x2->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1202 
1203  m1->GetOutputSlot(0).Connect(m3->GetInputSlot(0));
1204  m2->GetOutputSlot(0).Connect(m3->GetInputSlot(1));
1205 
1206  m3->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
1207  m3->GetOutputSlot(0).Connect(m5->GetInputSlot(0));
1208 
1209  m4->GetOutputSlot(0).Connect(x3->GetInputSlot(0));
1210  m5->GetOutputSlot(0).Connect(x4->GetInputSlot(0));
1211 
1212 
1215  graph,
1216  // select Activation and Concat Layers M1, M2, M3, M4, M5
1217  [](const Layer & l)
1218  {
1219  bool toSelect = (l.GetType() == LayerType::Activation
1220  || l.GetType() == LayerType::Concat);
1221  return toSelect;
1222  });
1223 
1224 
1225  CHECK(subgraphs.size() == 1);
1226  if (subgraphs.size() == 1)
1227  {
1228  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({m1, m2}),
1229  CreateOutputsFrom({m4, m5}),
1230  {m1, m2, m3, m4, m5});
1231 
1232  CompareSubgraphViews(subgraphs[0], expected);
1233  }
1234 }
1235 
1236 TEST_CASE("ValidMerge")
1237 {
1238  // Checks that a node that has multiple choices for merge candidates (M3 in this case) correctly merges with the
1239  // one that it can (M0), and doesn't merge with the ones it can't (X2 and M2).
1240  //
1241  // X1
1242  // |
1243  // M1
1244  // / \'
1245  // X2 M2 M0
1246  // \ | /
1247  // M3
1248  //
1249  Graph graph;
1250 
1251  ActivationDescriptor activationDefaults;
1252  OriginsDescriptor concatDescriptor(3);
1253 
1254  auto x1 = graph.AddLayer<InputLayer>(0, "x1");
1255  auto x2 = graph.AddLayer<ActivationLayer>(activationDefaults, "x2");
1256  auto m0 = graph.AddLayer<InputLayer>(1, "m0");
1257  auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
1258  auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
1259  auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
1260 
1261  x1->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1262  m1->GetOutputSlot(0).Connect(x2->GetInputSlot(0));
1263  m1->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1264  x2->GetOutputSlot(0).Connect(m3->GetInputSlot(0));
1265  m2->GetOutputSlot(0).Connect(m3->GetInputSlot(1));
1266  m0->GetOutputSlot(0).Connect(m3->GetInputSlot(2));
1267 
1269  graph,
1270  [](const Layer& l) {
1271  return std::string(l.GetName())[0] == 'm';
1272  });
1273 
1274  // expected results to test against
1275  auto expectedSubgraph0 =
1277  CreateInputsFrom({ m1 }),
1278  std::vector<OutputSlot*>{ &m1->GetOutputSlot(0), &m2->GetOutputSlot(0) },
1279  { m1, m2 });
1280 
1281  auto expectedSubgraph1 = CreateSubgraphViewFrom(
1282  std::vector<InputSlot*>{ &m3->GetInputSlot(0), & m3->GetInputSlot(1) },
1283  CreateOutputsFrom({ }),
1284  { m0, m3 });
1285 
1286  CHECK(subgraphs.size() == 2);
1287  if (subgraphs.size() == 2)
1288  {
1289  // we need to have valid subgraph pointers here
1290  CHECK((subgraphs[0] != nullptr));
1291  CHECK((subgraphs[1] != nullptr));
1292 
1293  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1294  {
1295  if (subgraphs[0]->GetIInputSlots().size() == 1)
1296  {
1297  CompareSubgraphViews(subgraphs[0], expectedSubgraph0);
1298  CompareSubgraphViews(subgraphs[1], expectedSubgraph1);
1299  }
1300  else
1301  {
1302  CompareSubgraphViews(subgraphs[0], expectedSubgraph1);
1303  CompareSubgraphViews(subgraphs[1], expectedSubgraph0);
1304  }
1305  }
1306  }
1307 }
1308 
1309 TEST_CASE("PropagatedDependencies")
1310 {
1311  // Version of IslandInTheMiddle with longer chain
1312  // to make sure antecedents are propagated.
1313  /*
1314  M0
1315  / \
1316  M1 M4
1317  | |
1318  M2 X1 < the island in the middle !
1319  | |
1320  | M10
1321  | |
1322  | X2 < another island in the middle !
1323  | |
1324  M3 M5
1325  \ /
1326  M6
1327  */
1328  Graph graph;
1329 
1330  OriginsDescriptor concatDescriptor(2);
1331  auto m6 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m6");
1332  auto m3 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(0),
1334  "m3");
1335  auto m2 = graph.InsertNewLayer<ActivationLayer>(m3->GetInputSlot(0),
1337  "m2");
1338  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
1340  "m1");
1341  auto m0 = graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "m0");
1342 
1343  auto m5 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(1),
1345  "m5");
1346  auto x2 = graph.InsertNewLayer<ActivationLayer>(m5->GetInputSlot(0), ActivationDescriptor{}, "x2");
1347  auto m10 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0), ActivationDescriptor{}, "m10");
1348  auto x1 = graph.InsertNewLayer<ActivationLayer>(m10->GetInputSlot(0),
1350  "x1");
1351  auto m4 = graph.InsertNewLayer<ActivationLayer>(x1->GetInputSlot(0),
1353  "m4");
1354 
1355  // Connect the other branch to the input layer
1356  m0->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
1357 
1358  // All selected 'M*' layers will be of Activation type
1361  graph,
1362  // select the middle layers only
1363  [](const Layer& l)
1364  {
1365  bool toSelect = std::string(l.GetName())[0] == 'm';
1366  return toSelect;
1367  });
1368 
1369  // expected results to test against
1370  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({ m0 }),
1371  CreateOutputsFrom({ m3, m4 }),
1372  { m0, m1, m2, m3, m4 });
1373 
1374  auto mediumSubgraph = CreateSubgraphViewFrom(std::vector<InputSlot*>{ &m5->GetInputSlot(0), &m6->GetInputSlot(0) },
1375  std::vector<OutputSlot*>{}, { m5, m6 });
1376 
1377  auto smallerSubgraph =
1378  CreateSubgraphViewFrom(CreateInputsFrom({ m10 }), CreateOutputsFrom({ m10 }), { m10 });
1379 
1380  CHECK(subgraphs.size() == 3);
1381  if (subgraphs.size() == 3)
1382  {
1383  // we need to have valid subgraph pointers here
1384  CHECK((subgraphs[0] != nullptr));
1385  CHECK((subgraphs[1] != nullptr));
1386  CHECK((subgraphs[2] != nullptr));
1387 
1388  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr && subgraphs[2].get() != nullptr)
1389  {
1390  // sort the subgraphs by layer size, so it is simpler to test
1391  std::sort(subgraphs.begin(), subgraphs.end(),
1393  {
1394  return (lhs->GetIConnectableLayers().size() < rhs->GetIConnectableLayers().size());
1395  }
1396  );
1397 
1398  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
1399  CompareSubgraphViews(subgraphs[1], mediumSubgraph);
1400  CompareSubgraphViews(subgraphs[2], largerSubgraph);
1401  }
1402  }
1403 }
1404 
1405 TEST_CASE("Random")
1406 {
1407  // Creates random networks, splits them into subgraphs and checks the resulting subgraphs obey the required
1408  // dependency rules. We can easily generate very large networks which helps cover corner cases the other
1409  // small, manually crafted tests have missed. We can also use this to measure performance on large networks.
1410  constexpr bool debug = false; // Enable this to dump dot files and performance timings.
1411 
1412  std::mt19937 randomGenerator;
1413 
1414  // Helper function to get a random number in [0, maxExclusive)
1415  auto GetRandom = [&randomGenerator](auto maxExclusive) {
1416  // Note we could use uniform_int_distribution here, but that gives inconsistent results across platforms
1417  // which makes it harder to reproduce results.
1418  // It appears that uniform_real_distribution is consistent across MSVC and gcc so we use that and round it.
1419  std::uniform_real_distribution<float> uniform(0.0f, 1.0f);
1420  return static_cast<decltype(maxExclusive)>(uniform(randomGenerator) * static_cast<float>(maxExclusive));
1421  };
1422  // Helper function to get a bool that has probability 'trueProb' of being true.
1423  auto GetRandomFlag = [&randomGenerator](float trueProb) {
1424  std::uniform_real_distribution<float> uniform(0.0f, 1.0f);
1425  return uniform(randomGenerator) < trueProb;
1426  };
1427 
1428  constexpr uint32_t numTests = 100;
1429  for (uint32_t testIdx = 0; testIdx < numTests; ++testIdx)
1430  {
1431  randomGenerator.seed(testIdx); // Set a deterministic seed for reproducibility.
1432 
1433  // Create random graph
1434  Graph graph;
1435  {
1436  // First add the layers, without any connections. The following random constants determine the number of
1437  // each layer to add, along with the chance that each layer will be 'supported' (i.e. selected for
1438  // inclusion in the resulting subgraphs).
1439  uint32_t numInputs = 1 + GetRandom(4u);
1440  uint32_t numConstants = 1 + GetRandom(4u);
1441  uint32_t numOutputs = 1 + GetRandom(4u);
1442  uint32_t numConcats = 0 + GetRandom(500u);
1443  uint32_t numSplits = 0 + GetRandom(500u);
1444  float supportedProb = 0.7f;
1445 
1446  for (uint32_t i = 0; i < numInputs; ++i)
1447  {
1448  std::string name = "input" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1449  graph.AddLayer<InputLayer>(static_cast<LayerBindingId>(i), name.c_str());
1450  }
1451  for (uint32_t i = 0; i < numConstants; ++i)
1452  {
1453  std::string name = "constant" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1454  graph.AddLayer<ConstantLayer>(name.c_str());
1455  }
1456  for (uint32_t i = 0; i < numOutputs; ++i)
1457  {
1458  std::string name = "output" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1459  graph.AddLayer<OutputLayer>(static_cast<LayerBindingId>(i), name.c_str());
1460  }
1461  for (uint32_t i = 0; i < numConcats; ++i)
1462  {
1463  std::string name = "concat" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1464  numInputs = 1 + GetRandom(3u);
1465  OriginsDescriptor concatDesc(numInputs);
1466  graph.AddLayer<ConcatLayer>(concatDesc, name.c_str());
1467  }
1468  for (uint32_t i = 0; i < numSplits; ++i)
1469  {
1470  std::string name = "split" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1471  numOutputs = 1 + GetRandom(3u);
1472  ViewsDescriptor splitDesc(numOutputs);
1473  graph.AddLayer<SplitterLayer>(splitDesc, name.c_str());
1474  }
1475 
1476  // Associate each layer with a "depth" parameter. This is used when creating connections to ensure
1477  // that we don't have any loops, by only connecting to layers with a lower "depth".
1478  // This can be thought of as distance from the "top" of the graph (assuming the graph flows top-to-bottom).
1479  // Unfortunately this approach ends up producing very "wide" graphs,
1480  // which probably isn't very representative of 'real' networks.
1481  uint32_t maxLayerDepth = 5 + GetRandom(2000u);
1482  std::map<Layer*, uint32_t> layerDepths;
1483  std::map<uint32_t, std::vector<Layer*>> layersAtDepth;
1484  for (Layer* layer : graph)
1485  {
1486  uint32_t depth;
1487  if (layer->GetType() == LayerType::Input || layer->GetType() == LayerType::Constant)
1488  {
1489  // There needs to be at least one input-like layer above everything else, otherwise would be
1490  // nothing for them to connect to!
1491  depth = 0;
1492  }
1493  else
1494  {
1495  // Other layers are randomly assigned to later depths.
1496  depth = 1 + GetRandom(maxLayerDepth);
1497  }
1498  layerDepths[layer] = depth;
1499  layersAtDepth[depth].push_back(layer);
1500  }
1501 
1502  // Connect layers to each other. Every input slot of every layer must be connected, but it doesn't
1503  // matter if an output slot goes unused.
1504  for (Layer* layer : graph)
1505  {
1506  for (uint32_t inputSlotIdx = 0; inputSlotIdx < layer->GetNumInputSlots(); ++inputSlotIdx)
1507  {
1508  InputSlot& inputSlot = layer->GetInputSlot(inputSlotIdx);
1509  uint32_t maxLayerDepthToConnectTo = layerDepths[layer];
1510  // This prevents a connection causing a loop
1511  // Finding a layer to connect to may take multiple attempts, so keep trying until it works.
1512  while (inputSlot.GetConnectedOutputSlot() == nullptr)
1513  {
1514  uint32_t layerDepth = GetRandom(maxLayerDepthToConnectTo);
1515  const std::vector<Layer*>& layersToChooseFrom = layersAtDepth[layerDepth];
1516  if (layersToChooseFrom.size() == 0)
1517  {
1518  continue;
1519  }
1520  Layer* layerToConnectWith = layersToChooseFrom[GetRandom(layersToChooseFrom.size())];
1521  if (layerToConnectWith->GetNumOutputSlots() == 0)
1522  {
1523  continue;
1524  }
1525  uint32_t outputSlotIdx = GetRandom(layerToConnectWith->GetNumOutputSlots());
1526  layerToConnectWith->GetOutputSlot(outputSlotIdx).Connect(inputSlot);
1527  }
1528  }
1529  }
1530  }
1531 
1532  if (debug)
1533  {
1534  std::ofstream f("INPUT_" + std::to_string(testIdx) + ".dot");
1535  graph.SerializeToDot(f);
1536  }
1537 
1538  // Run the splitting algorithm, selecting all nodes ending in an 'S' (as randomly assigned above).
1539  auto startTime = std::chrono::high_resolution_clock::now();
1540 
1543  [](const Layer& l) { return std::string(l.GetName()).back() == 'S'; });
1544 
1545  auto endTime = std::chrono::high_resolution_clock::now();
1546  auto duration = std::chrono::duration_cast<std::chrono::microseconds>(endTime - startTime);
1547  if (debug)
1548  {
1549  std::cout << "Test " << testIdx << ": " << duration.count() << " microseconds" << std::endl;
1550  }
1551 
1552  // Build a map of which subgraph is assigned to each layer.
1553  // This helps some of the following code.
1554  std::map<Layer*, SubgraphView*> layerToSubgraph;
1555  for (Layer* layer : graph)
1556  {
1557  size_t i = 0;
1558  for (std::unique_ptr<SubgraphView>& subgraph : subgraphs)
1559  {
1560  std::string name = std::to_string(i++);
1561  if (std::find(subgraph->cbeginIConnectable(), subgraph->cendIConnectable(), layer)
1562  != subgraph->cendIConnectable())
1563  {
1564  layerToSubgraph[layer] = subgraph.get();
1565  break;
1566  }
1567  }
1568  }
1569 
1570  if (debug)
1571  {
1572  // Before dumping the dot file, set each Layer's BackendId property so that the dot file
1573  // shows the resulting subgraph assignments.
1574  for (Layer* layer : graph)
1575  {
1576  std::string name = "NotAssigned";
1577  auto subgraphIt = layerToSubgraph.find(layer);
1578  if (subgraphIt != layerToSubgraph.end())
1579  {
1580  auto subgraphIdx = std::distance(subgraphs.begin(),
1581  std::find_if(subgraphs.begin(), subgraphs.end(),
1582  [&](auto& s) { return s.get() == subgraphIt->second; }));
1583  name = std::to_string(subgraphIdx);
1584  }
1585  layer->SetBackendId(armnn::BackendId(name));
1586  }
1587 
1588  std::ofstream f("GRAPH_" + std::to_string(testIdx) + ".dot");
1589  graph.SerializeToDot(f);
1590  }
1591 
1592  // Check the dependencies between subgraphs to make sure that the algorithm has produced a valid result.
1593  // Starting from each of the input slots of each subgraph, recurse up the graph and ensure that we never
1594  // encounter a layer that belongs to the subgraph that we started from.
1595  for (std::unique_ptr<SubgraphView>& subgraph : subgraphs)
1596  {
1597  for (IInputSlot* inSlot : subgraph->GetIInputSlots())
1598  {
1599  std::queue<Layer*> toProcess;
1600  toProcess.push(&PolymorphicDowncast<InputSlot*>(inSlot)->GetConnectedOutputSlot()->GetOwningLayer());
1601  while (toProcess.size() > 0)
1602  {
1603  Layer* l = toProcess.front();
1604  toProcess.pop();
1605 
1606  CHECK(layerToSubgraph[l] != subgraph.get());
1607 
1608  for (const InputSlot& is : l->GetInputSlots())
1609  {
1610  toProcess.push(&is.GetConnectedOutputSlot()->GetOwningLayer());
1611  }
1612  }
1613  }
1614  }
1615  }
1616 }
1617 
1618 }
1619 
1620 TEST_SUITE("IntegrationTests")
1621 {
1622 TEST_CASE("SingleSubgraph")
1623 {
1624  // This test case represents the scenario when we have one subgraph
1625  // in which two layers have GpuAcc backend assigned
1626 
1627  //Construct graph
1628  Graph graph;
1629 
1630  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
1631 
1632  Convolution2dDescriptor convDescriptor;
1633  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
1634  convLayer1->SetBackendId(Compute::GpuAcc);
1635 
1636  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
1637  convLayer2->SetBackendId(Compute::GpuAcc);
1638 
1639  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
1640 
1641  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
1642  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
1643  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1644 
1645  // GpuAcc sub graph selector
1648  graph,
1649  // select the GpuAcc layers only
1650  [](const Layer & l){
1651  bool toSelect = (l.GetBackendId() == Compute::GpuAcc);
1652  return toSelect;
1653  });
1654 
1655  CHECK(subgraphs.size() == 1);
1656  if(subgraphs.size() == 1)
1657  {
1658  CHECK((subgraphs[0] != nullptr));
1659 
1660  if (subgraphs[0].get() != nullptr)
1661  {
1662  unsigned int numInputSlots = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIInputSlots().size());
1663  unsigned int numOutputSlots = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIOutputSlots().size());
1664 
1665  CHECK((numInputSlots == 1));
1666  CHECK((numOutputSlots == 1));
1667 
1668  // Save sub-graph connections for comparison after substitution
1669  IOutputSlot* subgraphInputConn1 = subgraphs[0]->GetIInputSlot(0)->GetConnection();
1670  IInputSlot* subgraphOutputConn1 = subgraphs[0]->GetIOutputSlot(0)->GetConnection(0);
1671 
1672  // Construct dummy pre-compiled layer
1673  PreCompiledDescriptor preCompiledDescriptor(numInputSlots, numOutputSlots);
1674  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
1675 
1676  // Substitute sub-graph with pre-compiled layer
1677  graph.SubstituteSubgraph(*subgraphs[0], preCompiledLayer);
1678 
1679  // Check that connections are correct after substitution
1680  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
1681 
1682  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
1683  }
1684  }
1685 }
1686 
1687 TEST_CASE("MultipleSubgraphs")
1688 {
1689  // This test case represents the scenario when we have two subgraphs
1690  // in which two layers have CpuAcc backend assigned
1691 
1692  //Construct graph
1693  Graph graph;
1694 
1695  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
1696 
1697  ViewsDescriptor splitterDescriptor(2);
1698  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
1699  splitterLayer->SetBackendId(Compute::CpuAcc);
1700 
1701  Convolution2dDescriptor convDescriptor;
1702  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
1703  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
1704 
1705  OriginsDescriptor concatDescriptor(2);
1706  Layer* const pConcatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
1707  pConcatLayer->SetBackendId(Compute::CpuAcc);
1708 
1709  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
1710 
1711  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
1712  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
1713  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
1714  convLayer1->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(0));
1715  convLayer2->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(1));
1716  pConcatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1717 
1718  // CpuAcc sub graph selector
1721  graph,
1722  // select the CpuAcc layers only
1723  [](const Layer & l){
1724  bool toSelect = (l.GetBackendId() == Compute::CpuAcc);
1725  return toSelect;
1726  });
1727 
1728  CHECK(subgraphs.size() == 2);
1729  if(subgraphs.size() == 2)
1730  {
1731  CHECK((subgraphs[0] != nullptr));
1732  CHECK((subgraphs[1] != nullptr));
1733 
1734  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1735  {
1736  //Sort subgraphs by their inputSlot size.
1737  std::sort(subgraphs.begin(), subgraphs.end(),
1739  {
1740  return (lhs->GetIInputSlots().size() < rhs->GetIInputSlots().size());
1741  }
1742  );
1743 
1744  unsigned int numInputSlots1 = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIInputSlots().size());
1745  unsigned int numOutputSlots1 = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIOutputSlots().size());
1746 
1747  unsigned int numInputSlots2 = armnn::numeric_cast<unsigned int>(subgraphs[1]->GetIInputSlots().size());
1748  unsigned int numOutputSlots2 = armnn::numeric_cast<unsigned int>(subgraphs[1]->GetIOutputSlots().size());
1749 
1750  // Save sub-graph connections for comparison after substitution
1751  IOutputSlot* subgraph1InputConn = subgraphs[0]->GetIInputSlot(0)->GetConnection();
1752  IInputSlot* subgraph1OutputConn1 = subgraphs[0]->GetIOutputSlot(0)->GetConnection(0);
1753  IInputSlot* subgraph1OutputConn2 = subgraphs[0]->GetIOutputSlot(1)->GetConnection(0);
1754 
1755  // Save sub-graph connections for comparison after substitution
1756  IOutputSlot* subgraph2InputConn1 = subgraphs[1]->GetIInputSlot(0)->GetConnection();
1757  IOutputSlot* subgraph2InputConn2 = subgraphs[1]->GetIInputSlot(1)->GetConnection();
1758  IInputSlot* subgraph2OutputConn = subgraphs[1]->GetIOutputSlot(0)->GetConnection(0);
1759 
1760  PreCompiledDescriptor preCompiledDescriptor1(numInputSlots1, numOutputSlots1);
1761  Layer* const preCompiledLayer1 = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor1, "pre-compiled1");
1762 
1763  PreCompiledDescriptor preCompiledDescriptor2(numInputSlots2, numOutputSlots2);
1764  Layer* const preCompiledLayer2 = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor2, "pre-compiled2");
1765 
1766  // Substitute sub-graph with pre-compiled layer
1767  graph.SubstituteSubgraph(*subgraphs[0], preCompiledLayer1);
1768  graph.SubstituteSubgraph(*subgraphs[1], preCompiledLayer2);
1769 
1770  // Check that connections are correct after substitution
1771  CHECK_EQ(preCompiledLayer1->GetInputSlot(0).GetConnection(), subgraph1InputConn);
1772  CHECK_EQ(preCompiledLayer1->GetOutputSlot(0).GetConnection(0), subgraph1OutputConn1);
1773  CHECK_EQ(preCompiledLayer1->GetOutputSlot(1).GetConnection(0), subgraph1OutputConn2);
1774 
1775  CHECK_EQ(preCompiledLayer2->GetInputSlot(0).GetConnection(), subgraph2InputConn1);
1776  CHECK_EQ(preCompiledLayer2->GetInputSlot(1).GetConnection(), subgraph2InputConn2);
1777  CHECK_EQ(preCompiledLayer2->GetOutputSlot(0).GetConnection(0), subgraph2OutputConn);
1778  }
1779  }
1780 }
1781 
1782 TEST_CASE("SubgraphCycles")
1783 {
1784  // This case represent the scenario when a naive split could lead to a cyclic dependency between two subgraphs
1785  //
1786  // X0 -> M0 -> X1 -> M2 -> X2
1787  // X0 -> M0 -> M1 -> M2 -> X2
1788  //
1789  /*
1790  X0
1791  |
1792  |
1793  M0
1794  / |
1795  / |
1796  X1 M1
1797  \ /
1798  M2
1799  |
1800  X2
1801  */
1802  // The expected result for this is that M0,M1 will be part of one subgraph and M2 in another and the
1803  // input and output slots in the subgraphs will be set accordingly.
1804  //
1805  Graph graph;
1806 
1807  OriginsDescriptor originsDescriptor(2);
1808  auto x0 = graph.AddLayer<InputLayer>(0, "x0");
1809  auto m0 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m0");
1810  auto x1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x1");
1811  auto m1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m1");
1812  auto m2 = graph.AddLayer<AdditionLayer>("m2");
1813  auto x2 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x2");
1814 
1815  x0->GetOutputSlot(0).Connect(m0->GetInputSlot(0));
1816  m0->GetOutputSlot(0).Connect(x1->GetInputSlot(0));
1817  m0->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1818  x1->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1819  m1->GetOutputSlot(0).Connect(m2->GetInputSlot(1));
1820  m2->GetOutputSlot(0).Connect(x2->GetInputSlot(0));
1821 
1822  // All selected 'M*' layers will be have 'm' in the name
1825  graph,
1826  // select the middle layers only
1827  [](const Layer & l)
1828  {
1829  bool toSelect = (l.GetNameStr().find('m') != std::string::npos);
1830  return toSelect;
1831  });
1832 
1833  // expected results to test against
1834  auto inputSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m0}),
1835  CreateOutputsFrom({m0, m1}),
1836  {m0, m1});
1837 
1838  auto outputSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m2}),
1839  CreateOutputsFrom({m2}),
1840  {m2});
1841 
1842  CHECK(subgraphs.size() == 2);
1843  if (subgraphs.size() == 2)
1844  {
1845  // we need to have valid subgraph pointers here
1846  CHECK((subgraphs[0] != nullptr));
1847  CHECK((subgraphs[1] != nullptr));
1848 
1849  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1850  {
1851  // sort the subgraphs by layer size, so it is simpler to test
1852  std::sort(subgraphs.begin(), subgraphs.end(),
1854  {
1855  return (lhs->GetIConnectableLayers().size() < rhs->GetIConnectableLayers().size());
1856  }
1857  );
1858 
1859  // one subgraph needs to be size=1 and the other one is 4
1860  CHECK(subgraphs[0]->GetIConnectableLayers().size() == 1);
1861  CHECK(subgraphs[1]->GetIConnectableLayers().size() == 2);
1862 
1863  CompareSubgraphViews(subgraphs[0], outputSubgraph);
1864  CompareSubgraphViews(subgraphs[1], inputSubgraph);
1865  }
1866  }
1867 }
1868 
1869 TEST_CASE("SubgraphOrder")
1870 {
1871  Graph graph;
1872 
1873  auto input = graph.AddLayer<InputLayer>(0, "Input");
1874  auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
1875  auto output = graph.AddLayer<OutputLayer>(1, "Output");
1876 
1877  input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
1878  activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1879 
1880  //Add in out of order
1881  auto view = CreateSubgraphViewFrom({},
1882  {},
1883  {output, input, activation});
1884 
1885  // Check the layers are sorted topologically in the view
1886  int idx=0;
1888  view->ForEachLayer([&idx, &expectedSorted](const Layer* l)
1889  {
1890  CHECK((expectedSorted[idx] == l->GetType()));
1891  idx++;
1892  }
1893  );
1894 }
1895 
1896 TEST_CASE("SubgraphViewWorkingCopy")
1897 {
1898  Graph graph;
1899 
1900  auto input = graph.AddLayer<InputLayer>(0, "Input");
1901  auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
1902  auto output = graph.AddLayer<OutputLayer>(1, "Output");
1903 
1904  input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
1905  activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1906 
1907  //Add in out of order
1908  auto view = CreateSubgraphViewFrom({output, input, activation},
1909  {},
1910  {});
1911 
1912  SubgraphView workingCopy = view->GetWorkingCopy();
1913 
1914  // Check the layers are sorted topologically in the view
1915  int idx=0;
1917  workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
1918  {
1919  CHECK((expectedSorted[idx] == l->GetType()));
1920  idx++;
1921  }
1922  );
1923 }
1924 
1925 bool ReplaceConstantMultiplicationWithDepthwise(SubgraphView& subgraph,
1926  IConnectableLayer* layer)
1927 {
1928  if (layer->GetType() == LayerType::Multiplication)
1929  {
1930  IInputSlot* patternSubgraphInput = &layer->GetInputSlot(0);
1931 
1932  const IConnectableLayer* inputLayer = &patternSubgraphInput->GetConnection()->GetOwningIConnectableLayer();
1933  const IConnectableLayer* constantLayer = &layer->GetInputSlot(1).GetConnection()->GetOwningIConnectableLayer();
1934 
1935  // Figure out which of the two inputs is the constant
1936  if (constantLayer->GetType() != LayerType::Constant)
1937  {
1938  patternSubgraphInput = &layer->GetInputSlot(1);
1939  std::swap(inputLayer, constantLayer);
1940  }
1941 
1942  if (constantLayer->GetType() == LayerType::Constant)
1943  {
1944  const TensorInfo& inputInfo = inputLayer->GetOutputSlot(0).GetTensorInfo();
1945  const TensorInfo& constInfo = constantLayer->GetOutputSlot(0).GetTensorInfo();
1946 
1947  // Add a Depthwise only where the constant input is a scalar that takes the form { 1, 1, 1, C }.
1948  // The scalar is used as weights for the convolution.
1949  if (constInfo.GetShape() == TensorShape({ 1, 1, 1, inputInfo.GetShape()[3] }))
1950  {
1951  auto replacementGraph = INetwork::Create();
1952 
1955 
1956  TensorInfo weightInfo = constInfo;
1957  const TensorInfo& outputInfo = layer->GetOutputSlot(0).GetTensorInfo();
1958  unsigned int M = outputInfo.GetShape()[3] / inputInfo.GetShape()[3];
1959  ARMNN_ASSERT_MSG(M == 1, "Constant multiplication only support 1x1x1xC, so M should always be 1 here");
1960  weightInfo.SetShape({ 1, 1, 1, constInfo.GetShape()[3] * M }); //1HW(I*M)
1961 
1962  const void* weightData = PolymorphicPointerDowncast<const ConstantLayer>(constantLayer)
1963  ->m_LayerOutput->GetConstTensor<void>();
1964  TensorInfo weightsInfo = constInfo;
1965  ConstTensor weights(weightsInfo, weightData);
1966 
1967  const auto depthwiseLayer = replacementGraph->AddDepthwiseConvolution2dLayer(
1968  desc, weights, armnn::EmptyOptional(), "Replacement for Constant-Multiplication");
1969 
1970  auto& outslot = layer->GetOutputSlot(0);
1971  SubgraphView::IOutputSlots outputs{ &outslot };
1973  layers.push_back(layer);
1974  layers.push_back(const_cast<IConnectableLayer*>(constantLayer));
1975 
1976  SubgraphView patternSubgraph(std::move(layers), {patternSubgraphInput}, {&layer->GetOutputSlot(0)});
1977 
1978  subgraph.SubstituteSubgraph(patternSubgraph, depthwiseLayer );
1979 
1980  return true;
1981  }
1982  }
1983  }
1984  return false;
1985 }
1986 
1987 bool ReplaceTestMultiplication(SubgraphView& subgraph,
1988  IConnectableLayer* layer)
1989 {
1990  if (layer->GetType() == LayerType::Multiplication)
1991  {
1992 
1993  switch (layer->GetType())
1994  {
1996  return ReplaceConstantMultiplicationWithDepthwise(subgraph, layer);
1997  break;
1998  default:
1999  throw Exception("Found unknown MultiplicationSupportedMode value");
2000  break;
2001  }
2002  }
2003  return false;
2004 }
2005 
2006 void ReplaceUnsupportedLayers(SubgraphView& subgraph)
2007 {
2008  using ReplacementFunc = bool (*)(SubgraphView&, IConnectableLayer*);
2009  const ReplacementFunc replacementFuncs[] = {
2010  &ReplaceTestMultiplication,
2011  };
2012 
2013  subgraph.ForEachLayer([replacementFuncs, &subgraph](IConnectableLayer* layer)
2014  {
2015  auto madeChange = false;
2016  for (const ReplacementFunc f : replacementFuncs)
2017  {
2018  madeChange = f(subgraph, layer);
2019  if (madeChange)
2020  {
2021  goto nextIteration;
2022  }
2023  }
2024  nextIteration:;
2025  }
2026  );
2027 }
2028 
2029 TEST_CASE("SubgraphViewWorkingCopyReplacementFunc")
2030 {
2031  Graph graph;
2032 
2033  const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2034  const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2035  const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2036 
2037  std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2038  std::iota(constData.begin(), constData.end(), 0);
2039  ConstTensor constTensor(constInfo, constData);
2040 
2041  // Add the original pattern
2042  IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2043  auto constant = graph.AddLayer<ConstantLayer>("const");
2044 
2045  constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2046  IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2047  IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2048 
2049  // Create connections between layers
2050  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2051  constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2052  mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2053 
2054  input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2055  constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2056  mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2057 
2058  //Add in out of order
2059  auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2060  {},
2061  {});
2062 
2063  SubgraphView workingCopy = view->GetWorkingCopy();
2064 
2065  // Check the WorkingCopy is as expected before replacement
2066  CHECK(workingCopy.GetIConnectableLayers().size() == 4);
2067  int idx=0;
2069  workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
2070  {
2071  CHECK((expectedSorted[idx] == l->GetType()));
2072  idx++;
2073  }
2074  );
2075 
2076  // Replace Multiplication and Constant with Depthwise
2077  ReplaceUnsupportedLayers(workingCopy);
2078 
2079  // Check the layers are as expected
2080  CHECK(workingCopy.GetIConnectableLayers().size() == 3);
2081  idx=0;
2082  LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2083  workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
2084  {
2085  CHECK((expectedSortedReplaced[idx] == l->GetType()));
2086  idx++;
2087  }
2088  );
2089 }
2090 
2091 TEST_CASE("SubgraphViewWorkingCopySubstituteSubgraph")
2092 {
2093  Graph graph;
2094 
2095  auto input = graph.AddLayer<InputLayer>(0, "Input");
2096  auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
2097  auto output = graph.AddLayer<OutputLayer>(1, "Output");
2098 
2099  input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
2100  activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2101 
2102  //Add in out of order
2103  auto view = CreateSubgraphViewFrom({output, input, activation},
2104  {},
2105  {});
2106 
2107  // Check SubstituteSubgraphView throws when called on original SubgraphView
2108  SubgraphView temp(input);
2109  CHECK_THROWS_AS(view->SubstituteSubgraph(temp, input), NullPointerException);
2110 
2111  // Check that GetWorkingCopy() being called on a working copy throws an exception
2112  auto workingCopy = view->GetWorkingCopy();
2113  CHECK_THROWS_AS(workingCopy.GetWorkingCopy(), Exception);
2114 }
2115 
2116 TEST_CASE("SubgraphViewWorkingCopyOptimizationViews")
2117 {
2118  Graph graph;
2119 
2120  const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2121  const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2122  const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2123 
2124  std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2125  std::iota(constData.begin(), constData.end(), 0);
2126  ConstTensor constTensor(constInfo, constData);
2127 
2128  // Add the original pattern
2129  IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2130  auto constant = graph.AddLayer<ConstantLayer>("const");
2131 
2132  constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2133  IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2134  IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2135 
2136  // Create connections between layers
2137  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2138  constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2139  mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2140 
2141  input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2142  constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2143  mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2144 
2145  //Add in out of order
2146  auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2147  {},
2148  {});
2149 
2150  SubgraphView workingCopy = view->GetWorkingCopy();
2151 
2152  // Check the WorkingCopy is as expected before replacement
2153  int idx=0;
2155  workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
2156  {
2157  CHECK((expectedSorted[idx] == l->GetType()));
2158  idx++;
2159  }
2160  );
2161 
2162  // Replace Multiplication and Constant with Depthwise
2163  ReplaceUnsupportedLayers(workingCopy);
2164 
2165  // Check the layers are as expected
2166  idx=0;
2167  LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2168  workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
2169  {
2170  CHECK((expectedSortedReplaced[idx] == l->GetType()));
2171  idx++;
2172  }
2173  );
2174 
2175 
2176  // At this stage NPU would take the working copy and create CompiledBlocPtr with it.
2177 
2178  // We will just check that the procompiledLayer can still be added to the optimizationViews via a SubgraphView.
2179  OptimizationViews optimizationViews;
2180 
2181  CompiledBlobPtr ptr;
2182  IConnectableLayer* preCompiledLayer = optimizationViews.GetINetwork()->AddPrecompiledLayer(
2183  PreCompiledDescriptor(view->GetNumInputSlots(), view->GetNumOutputSlots()),
2184  std::move(ptr),
2185  EmptyOptional(),
2186  "pre-compiled");
2187 
2188 
2189  optimizationViews.AddSubstitution({ *view, SubgraphView(preCompiledLayer) });
2190  CHECK(optimizationViews.Validate(*view));
2191 }
2192 
2193 TEST_CASE("SubgraphViewWorkingCopyReplaceSlots")
2194 {
2195  Graph graph;
2196  const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2197  const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2198  const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2199 
2200  std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2201  std::iota(constData.begin(), constData.end(), 0);
2202  ConstTensor constTensor(constInfo, constData);
2203 
2204  // Add the original pattern
2205  IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2206  auto constant = graph.AddLayer<ConstantLayer>("const");
2207 
2208  constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2209  IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2210  IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2211 
2212  // Create connections between layers
2213  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2214  constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2215  mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2216 
2217  input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2218  constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2219  mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2220 
2221  auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2222  CreateIInputsFrom({mul}),
2223  CreateIOutputsFrom({mul}));
2224 
2225  SubgraphView workingCopy = view->GetWorkingCopy();
2226 
2227  // Check the WorkingCopy is as expected before replacement
2228  CHECK(workingCopy.GetIConnectableLayers().size() == 4);
2229  int idx=0;
2231  workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
2232  {
2233  CHECK((expectedSorted[idx] == l->GetType()));
2234  idx++;
2235  }
2236  );
2237 
2238  // Replace Multiplication and Constant with Depthwise
2239  ReplaceUnsupportedLayers(workingCopy);
2240 
2241  // Check the layers are as expected
2242  idx=0;
2243  LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2244  CHECK(workingCopy.GetIConnectableLayers().size() == 3);
2245  workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
2246  {
2247  CHECK((expectedSortedReplaced[idx] == l->GetType()));
2248  idx++;
2249  }
2250  );
2251 }
2252 
2253 }
TEST_SUITE("TestConstTensorLayerVisitor")
A layer that the constant data can be bound to.
Iterator begin()
Returns iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:167
This layer represents a split operation.
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
const IOutputSlots & GetIOutputSlots() const
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
CPU Execution: Reference C++ kernels.
Status SerializeToDot(std::ostream &stream)
Definition: Graph.cpp:118
void swap(OriginsDescriptor &first, OriginsDescriptor &second)
const IConnectableLayers & GetIConnectableLayers() const
const IInputSlots & GetIInputSlots() const
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:420
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:86
std::vector< OutputSlot * > OutputSlots
void AddSubstitution(SubstitutionPair &&substitution)
This layer represents an activation operation with the specified activation function.
Copyright (c) 2021 ARM Limited and Contributors.
void SetBackendId(const BackendId &id)
Definition: Layer.hpp:275
const std::vector< InputSlot > & GetInputSlots() const
Definition: Layer.hpp:242
virtual const IInputSlot * GetConnection(unsigned int index) const =0
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:204
unsigned int GetNumOutputSlots() const override
Returns the number of connectable output slots.
Definition: Layer.hpp:319
void SubstituteSubgraph(SubgraphView &, IConnectableLayer *)
These methods should be called on a working copy subgraph created from GetWorkingCopy.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:277
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
void SetShape(const TensorShape &newShape)
Definition: Tensor.hpp:193
std::unique_ptr< void, CompiledBlobDeleter > CompiledBlobPtr
Definition: INetwork.hpp:245
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:321
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
std::list< IConnectableLayer * > IConnectableLayers
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
An output connection slot for a layer.
Definition: INetwork.hpp:40
SubgraphView::InputSlots CreateInputsFrom(const std::vector< Layer *> &layers)
An OriginsDescriptor for the ConcatLayer.
std::vector< IOutputSlot * > IOutputSlots
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
This layer represents a merge operation.
Definition: ConcatLayer.hpp:13
void ForEachIConnectableLayer(Func func) const
const std::string & GetNameStr() const
Definition: Layer.hpp:225
LayerType GetType() const override
Returns the armnn::LayerType of this layer.
Definition: Layer.hpp:270
std::vector< SubgraphViewPtr > Subgraphs
virtual const IConnectableLayer & GetOwningIConnectableLayer() const =0
std::vector< IInputSlot * > IInputSlots
const OutputSlot * GetConnectedOutputSlot() const
Definition: Layer.hpp:56
GPU Execution: OpenCL: ArmCompute.
bool Validate(const SubgraphView &originalSubgraph) const
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
const BackendId & GetBackendId() const
Definition: Layer.hpp:274
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
std::vector< InputSlot * > InputSlots
This layer represents an addition operation.
void SubstituteSubgraph(SubgraphView &subgraph, IConnectableLayer *substituteLayer)
Substitutes the given sub-graph with either a new layer or a new sub-graph.
Definition: Graph.cpp:433
std::unique_ptr< SubgraphView > SubgraphViewPtr
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
static Subgraphs SelectSubgraphs(Graph &graph, const LayerSelectorFunction &selector)
Selects subgraphs from a graph based on the selector function and the algorithm.
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
CPU Execution: NEON: ArmCompute.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
Iterator end()
Returns iterator pointing to the end of the list. Lowercase for range-based for loops.
Definition: Graph.hpp:169
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:61
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot * GetConnection() const =0
std::enable_if_t< std::is_unsigned< Source >::value &&std::is_unsigned< Dest >::value, Dest > numeric_cast(Source source)
Definition: NumericCast.hpp:35
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:323
virtual const TensorInfo & GetTensorInfo() const =0
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:316
This layer represents a convolution 2d operation.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
virtual int Connect(IInputSlot &destination)=0
void ForEachLayer(Func func) const
A PreCompiledDescriptor for the PreCompiledLayer.
std::list< Layer * > Layers
size_t GetNumLayers() const
Definition: Graph.hpp:196
LayerT * InsertNewLayer(InputSlot &insertBefore, Args &&... args)
Inserts a new layer between the output slot currently connected to insertBefore and insertBefore itse...
Definition: Graph.hpp:434
This layer represents a multiplication operation.
SubgraphView GetWorkingCopy()
This method returns a copy of the original SubgraphView provided by OptimizeSubgraphView with a separ...
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:492
An input connection slot for a layer.
Definition: INetwork.hpp:26
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
const InputSlot * GetConnection(unsigned int index) const override
Definition: Layer.cpp:49
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:458