ArmNN
 22.05
SubgraphViewTests.cpp File Reference
#include <Graph.hpp>
#include <SubgraphViewSelector.hpp>
#include <armnn/backends/OptimizationViews.hpp>
#include <armnn/backends/SubgraphView.hpp>
#include <armnn/backends/TensorHandle.hpp>
#include <armnn/utility/NumericCast.hpp>
#include <doctest/doctest.h>
#include <fstream>
#include <map>
#include <queue>
#include <random>
#include <chrono>
#include <numeric>

Go to the source code of this file.

Functions

 TEST_SUITE ("SubgraphViewBackwardCompatibilityTests")
 
 TEST_SUITE ("SubgraphSubstitution")
 
 TEST_SUITE ("SubgraphSelection")
 
 TEST_SUITE ("IntegrationTests")
 

Function Documentation

◆ TEST_SUITE() [1/4]

TEST_SUITE ( "SubgraphViewBackwardCompatibilityTests"  )

Definition at line 185 of file SubgraphViewTests.cpp.

References Graph::AddLayer(), ARMNN_NO_DEPRECATE_WARN_BEGIN, ARMNN_NO_DEPRECATE_WARN_END, OutputSlot::Connect(), INetwork::Create(), CreateSubgraphViewFrom(), Layer::GetInputSlot(), and Layer::GetOutputSlot().

186 {
187 // Test that SubraphView has been converted to using IConnectableLayer/IInputSlot/IOutputSlot
188 // in a backward compatible manner from ILayer/InputSlot/OutputSlot
189 TEST_CASE("SubgraphViewIterators")
190 {
191  INetworkPtr net(INetwork::Create());
192  IConnectableLayer* layer = net->AddInputLayer(1, "input");
193 
194  SubgraphView subgraph{layer};
195 
196  // cbeginIConnectable() and cendIConnectable()
197  bool found = false;
198  if (std::find(subgraph.cbeginIConnectable(), subgraph.cendIConnectable(), layer)
199  != subgraph.cendIConnectable())
200  {
201  found = true;
202  }
203  CHECK(found);
204  found = false;
205 
206  // beginIConnectable() and endIConnectable()
207  if (std::find(subgraph.beginIConnectable(), subgraph.endIConnectable(), layer)
208  != subgraph.endIConnectable())
209  {
210  found = true;
211  }
212  CHECK(found);
213  found = false;
214 
215  // GetIConnectableLayers returns IConnectableLayers initialized when calling constructor given IConnectableLayers
216  const SubgraphView::IConnectableLayers& subgraphLayers = subgraph.GetIConnectableLayers();
217  for (auto& iConnectableLayer : subgraphLayers)
218  {
219  if (std::string(iConnectableLayer->GetName()) == "input")
220  {
221  found = true;
222  }
223  }
224  CHECK(found);
225  found = false;
226 
227  // Test GetLayers returns layers initialized when calling constructor given IConnectableLayers
229  const SubgraphView::Layers& subgraphLayersOld = subgraph.GetLayers();
231  for (auto& layerOld : subgraphLayersOld)
232  {
233  if (std::string(layerOld->GetName()) == "input")
234  {
235  found = true;
236  }
237  }
238  CHECK(found);
239 }
240 
241 TEST_CASE("SubgraphViewSlots")
242 {
243  // Construct graph
244  Graph graph;
245 
246  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
247 
248  Convolution2dDescriptor convDescriptor;
249  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
250  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
251 
252  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
253 
254  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
255  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
256  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
257 
258  // Construct sub-graph
260  CreateIInputsFrom({convLayer1}, {1, 2}),
261  CreateIOutputsFrom({convLayer2}));
262 
263  // Test that both old and new are initialized
264  CHECK(subgraph->GetIInputSlots().size() == 1);
265  CHECK(subgraph->GetIOutputSlots().size() == 1);
266 
268  CHECK(subgraph->GetInputSlots().size() == 1);
269  CHECK(subgraph->GetOutputSlots().size() == 1);
270 
271  // Check old and new pointing to same address
272  CHECK(subgraph->GetOutputSlot(0) == subgraph->GetIOutputSlot(0));
273  CHECK(subgraph->GetInputSlot(0) == subgraph->GetIInputSlot(0));
275 
276 }
277 
278 TEST_CASE("SubgraphViewConstructors")
279 {
280  // Construct graph
281  Graph graph;
282 
283  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
284 
285  Convolution2dDescriptor convDescriptor;
286  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
287  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
288 
289  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
290 
291  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
292  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
293  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
294 
295  // Construct sub-graph
297  CreateSubgraphViewFrom({inputLayer, convLayer1, convLayer2, outputLayer},
298  CreateIInputsFrom({convLayer1}),
299  CreateIOutputsFrom({convLayer2}));
300 
301  // Copy Constructor
302  SubgraphView subgraph2(*subgraph.get());
303  CHECK(subgraph->GetIConnectableLayers() == subgraph2.GetIConnectableLayers());
304  CHECK(subgraph->GetIInputSlots() == subgraph2.GetIInputSlots());
305  CHECK(subgraph->GetIOutputSlots() == subgraph2.GetIOutputSlots());
306 
308  CHECK(subgraph->GetLayers() == subgraph2.GetLayers());
309  CHECK(subgraph->GetInputSlots() == subgraph2.GetInputSlots());
310  CHECK(subgraph->GetOutputSlots() == subgraph2.GetOutputSlots());
312 
313  // Move Constructor
314  SubgraphView subgraph3(std::move(subgraph2));
315  CHECK(subgraph->GetIConnectableLayers() == subgraph3.GetIConnectableLayers());
316  CHECK(subgraph->GetIInputSlots() == subgraph3.GetIInputSlots());
317  CHECK(subgraph->GetIOutputSlots() == subgraph3.GetIOutputSlots());
318 
320  CHECK(subgraph->GetLayers() == subgraph3.GetLayers());
321  CHECK(subgraph->GetInputSlots() == subgraph3.GetInputSlots());
322  CHECK(subgraph->GetOutputSlots() == subgraph3.GetOutputSlots());
324 
325  // Clear
326  subgraph.get()->Clear();
327  CHECK(subgraph->GetIConnectableLayers().size() == 0);
328  CHECK(subgraph->GetIInputSlots().size() == 0);
329  CHECK(subgraph->GetIOutputSlots().size() == 0);
330 }
331 
332 } // SubgraphViewBackwardCompatibilityTests Test Suite end
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:425
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:112
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:322
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
std::list< IConnectableLayer * > IConnectableLayers
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
std::unique_ptr< SubgraphView > SubgraphViewPtr
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:324
This layer represents a convolution 2d operation.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
std::list< Layer * > Layers

◆ TEST_SUITE() [2/4]

TEST_SUITE ( "SubgraphSubstitution"  )

Definition at line 334 of file SubgraphViewTests.cpp.

References Graph::AddLayer(), OutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), CreateInputsFrom(), CreateOutputsFrom(), CreateSubgraphViewFrom(), IInputSlot::GetConnection(), IOutputSlot::GetConnection(), InputSlot::GetConnection(), OutputSlot::GetConnection(), IConnectableLayer::GetInputSlot(), Layer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), Layer::GetOutputSlot(), and Graph::SubstituteSubgraph().

335 {
336 TEST_CASE("SingleInputSingleOutput")
337 {
338  // Construct graph
339  Graph graph;
340 
341  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
342 
343  Convolution2dDescriptor convDescriptor;
344  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
345  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
346  Layer* const weightsLayer1 = graph.AddLayer<ConstantLayer>("weights1");
347  Layer* const weightsLayer2 = graph.AddLayer<ConstantLayer>("weights2");
348  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
349 
350  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
351  weightsLayer1->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(1));
352  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
353  weightsLayer2->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(1));
354  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
355 
356  // Construct sub-graph
359  CreateIInputsFrom({convLayer1}, {1}),
360  CreateIOutputsFrom({convLayer2}));
361 
362  // Save sub-graph connections for comparison after substitution
363  // Using GetIInputSlot/GetIIOutputSlot functions
364  IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
365  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
366 
367  // Construct dummy pre-compiled layer
368  PreCompiledDescriptor preCompiledDescriptor(1, 1);
369 
370  IConnectableLayer* const preCompiledLayer =
371  graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
372 
373  // Substitute sub-graph with pre-compiled layer
374  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
375 
376  // Check that connections are correct after substitution
377  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
378  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
379 }
380 
381 TEST_CASE("SingleInputSingleOutputAddPrecompiledLayerSubstituteSubgraph1")
382 {
383  // Construct graph.
384  Graph graph;
385 
386  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
387 
388  Convolution2dDescriptor convDescriptor;
389  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
390  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
391 
392  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
393 
394  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
395  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
396  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
397 
398  // Construct sub-graph
400  CreateOutputsFrom({convLayer2}),
401  {});
402 
403  // Save sub-graph connections for comparison after substitution
404  IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
405  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
406 
407  PreCompiledDescriptor preCompiledDescriptor(1, 1);
408  CompiledBlobPtr compiledBlobPtr;
409  BackendId backend = Compute::CpuRef;
410 
411  // Construct dummy pre-compiled layer
412  INetworkPtr network = INetwork::Create();
413  IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
414  std::move(compiledBlobPtr),
415  backend);
416 
417  // Substitute sub-graph with pre-compiled layer
418  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
419 
420  // Check that connections are correct after substitution
421  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
422  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
423 }
424 
425 TEST_CASE("SingleInputSingleOutputAddPrecompiledLayerSubstituteSubgraph2")
426 {
427  // Construct graph.
428  Graph graph;
429 
430  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
431 
432  Convolution2dDescriptor convDescriptor;
433  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
434  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
435 
436  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
437 
438  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
439  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
440  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
441 
442  // Construct sub-graph
444  CreateOutputsFrom({convLayer2}),
445  {});
446 
447  // Save sub-graph connections for comparison after substitution
448  IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
449  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
450 
451  PreCompiledDescriptor preCompiledDescriptor(1, 1);
452  CompiledBlobPtr compiledBlobPtr;
453  BackendId backend = Compute::CpuRef;
454 
455  // Construct dummy pre-compiled layer
456  INetworkPtr network = INetwork::Create();
457  IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
458  std::move(compiledBlobPtr),
459  backend);
460  SubgraphView substituteSubgraph(preCompiledLayer);
461 
462  // Substitute sub-graph with pre-compiled layer
463  graph.SubstituteSubgraph(*subgraph, substituteSubgraph);
464 
465  // Check that connections are correct after substitution
466  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
467  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
468 }
469 
470 TEST_CASE("SingleInputSingleOutputSubstituteGraph")
471 {
472  // Construct graph
473  Graph graph;
474 
475  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
476 
477  Convolution2dDescriptor convDescriptor;
478  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
479  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
480 
481  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
482 
483  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
484  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
485  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
486 
487  // Construct sub-graph
489  CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}, {1}),
490  CreateOutputsFrom({convLayer2}),
491  {});
492 
493  // Save sub-graph connections for comparison after substitution
494  IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
495  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
496 
497  // Construct second graph with a single pre-compiled layer
498  Graph substituteGraph;
499  PreCompiledDescriptor preCompiledDescriptor(1, 1);
500  Layer* const preCompiledLayer = substituteGraph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
501 
502  SubgraphViewSelector::SubgraphViewPtr substituteSubgraph =
503  CreateSubgraphViewFrom(CreateInputsFrom({preCompiledLayer}),
504  CreateOutputsFrom({preCompiledLayer}),
505  {preCompiledLayer});
506  // Substitute subgraph with pre-compiled layer
507  graph.SubstituteSubgraph(*subgraph, *substituteSubgraph);
508 
509  // Check that connections are correct after substitution
510  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
511  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
512 }
513 
514 TEST_CASE("MultiInputSingleOutput")
515 {
516  // Construct graph
517  Graph graph;
518 
519  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
520 
521  ViewsDescriptor splitterDescriptor(2);
522  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
523 
524  Convolution2dDescriptor convDescriptor;
525  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
526  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
527 
528  OriginsDescriptor concatDescriptor(2);
529  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
530 
531  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
532 
533  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
534  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
535  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
536  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
537  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
538  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
539 
540  // Construct sub-graph
541  auto subgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1, convLayer2}, {1}),
542  CreateOutputsFrom({concatLayer}),
543  {});
544 
545  // Save sub-graph connections for comparison after substitution
546  IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
547  IOutputSlot* subgraphInputConn2 = subgraph->GetIInputSlot(1)->GetConnection();
548 
549  IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
550 
551  // Construct dummy pre-compiled layer
552  PreCompiledDescriptor preCompiledDescriptor(2, 1);
553  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
554 
555  // Substitute sub-graph with pre-compiled layer
556  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
557 
558  // Check that connections are correct after substitution
559  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
560  CHECK_EQ(preCompiledLayer->GetInputSlot(1).GetConnection(), subgraphInputConn2);
561 
562  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
563 }
564 
565 TEST_CASE("SingleInputMultiOutput")
566 {
567  // Construct graph
568  Graph graph;
569 
570  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
571 
572  Convolution2dDescriptor convDescriptor;
573  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
574  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
575  OriginsDescriptor concatDescriptor(2);
576  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
577  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
578 
579  ViewsDescriptor splitterDescriptor(2);
580  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
581 
582  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
583  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
584  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
585  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
586  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
587  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
588 
589  // Construct sub-graph
591  CreateSubgraphViewFrom(CreateInputsFrom({splitterLayer}),
592  CreateOutputsFrom({convLayer1, convLayer2}),
593  {});
594 
595  // Save sub-graph connections for comparison after substitution
596  IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
597 
598  IInputSlot* subgraphOutputConn1 = subgraph->GetIOutputSlot(0)->GetConnection(0);
599  IInputSlot* subgraphOutputConn2 = subgraph->GetIOutputSlot(1)->GetConnection(0);
600 
601  // Construct dummy pre-compiled layer
602  PreCompiledDescriptor preCompiledDescriptor(1, 2);
603  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
604 
605  // Substitute sub-graph with pre-compiled layer
606  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
607 
608  // Check that connections are correct after substitution
609  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
610 
611  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
612  CHECK_EQ(preCompiledLayer->GetOutputSlot(1).GetConnection(0), subgraphOutputConn2);
613 }
614 
615 TEST_CASE("MultiInputMultiOutput")
616 {
617  // Construct graph
618  Graph graph;
619 
620  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
621 
622  ViewsDescriptor splitterDescriptor(2);
623  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
624 
625  Convolution2dDescriptor convDescriptor;
626  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
627  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
628 
629  OriginsDescriptor concatDescriptor(2);
630  Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
631 
632  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
633 
634  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
635  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
636  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
637  convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
638  convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
639  concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
640 
641  // Construct sub-graph
643  CreateSubgraphViewFrom(CreateInputsFrom({convLayer1, convLayer2}, {1}),
644  CreateOutputsFrom({convLayer1, convLayer2}),
645  {});
646 
647  // Save sub-graph connections for comparison after substitution
648  IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
649  IOutputSlot* subgraphInputConn2 = subgraph->GetIInputSlot(1)->GetConnection();
650 
651  IInputSlot* subgraphOutputConn1 = subgraph->GetIOutputSlot(0)->GetConnection(0);
652  IInputSlot* subgraphOutputConn2 = subgraph->GetIOutputSlot(1)->GetConnection(0);
653 
654  // Construct dummy pre-compiled layer
655  PreCompiledDescriptor preCompiledDescriptor(2, 2);
656  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
657 
658  // Substitute sub-graph with pre-compiled layer
659  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
660 
661  // Check that connections are correct after substitution
662  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
663  CHECK_EQ(preCompiledLayer->GetInputSlot(1).GetConnection(), subgraphInputConn2);
664 
665  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
666  CHECK_EQ(preCompiledLayer->GetOutputSlot(1).GetConnection(0), subgraphOutputConn2);
667 }
668 
669 TEST_CASE("EraseReplacedIConnectableLayers")
670 {
671  // Construct graph
672  Graph graph;
673 
674  graph.AddLayer<InputLayer>(0, "input");
675 
676  ViewsDescriptor splitterDescriptor(2);
677  IConnectableLayer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
678 
679  Convolution2dDescriptor convDescriptor;
680  IConnectableLayer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
681  IConnectableLayer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
682 
683  OriginsDescriptor concatDescriptor(2);
684  IConnectableLayer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
685 
686  graph.AddLayer<OutputLayer>(0, "output");
687 
688  // Construct sub-graph
690  convLayer1,
691  convLayer2,
692  concatLayer},
693  {},
694  {});
695 
696  // Construct dummy pre-compiled layer
697  PreCompiledDescriptor preCompiledDescriptor(0, 0);
698  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
699 
700  // Save sub-graph layers for later verification
701  const SubgraphView::IConnectableLayers subgraphLayers = subgraph->GetIConnectableLayers();
702 
703  // Substitute sub-graph with pre-compiled layer
704  graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
705 
706  // Check that the layers belonging to the sub-graph have been erased from the graph after substitution
707  CHECK(!AreAnySubgraphLayersPresentInGraph(subgraphLayers, graph));
708 }
709 
710 }
A layer that the constant data can be bound to.
This layer represents a split operation.
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:425
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:112
virtual const IInputSlot * GetConnection(unsigned int index) const =0
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:204
std::unique_ptr< void, CompiledBlobDeleter > CompiledBlobPtr
Definition: INetwork.hpp:245
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:322
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
std::list< IConnectableLayer * > IConnectableLayers
An output connection slot for a layer.
Definition: INetwork.hpp:40
An OriginsDescriptor for the ConcatLayer.
This layer represents a merge operation.
Definition: ConcatLayer.hpp:13
void SubstituteSubgraph(SubgraphView &subgraph, IConnectableLayer *substituteLayer)
Substitutes the given sub-graph with either a new layer or a new sub-graph.
Definition: Graph.cpp:435
std::unique_ptr< SubgraphView > SubgraphViewPtr
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot * GetConnection() const =0
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:324
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
This layer represents a convolution 2d operation.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
A PreCompiledDescriptor for the PreCompiledLayer.
SubgraphView::InputSlots CreateInputsFrom(Layer *layer, std::vector< unsigned int > ignoreSlots)
An input connection slot for a layer.
Definition: INetwork.hpp:26
const InputSlot * GetConnection(unsigned int index) const override
Definition: Layer.cpp:75

◆ TEST_SUITE() [3/4]

TEST_SUITE ( "SubgraphSelection"  )

Definition at line 712 of file SubgraphViewTests.cpp.

References armnn::Activation, armnn::Addition, Graph::AddLayer(), armnn::Concat, OutputSlot::Connect(), armnn::Constant, CreateInputsFrom(), CreateOutputsFrom(), CreateSubgraphViewFrom(), armnn::debug, InputSlot::GetConnectedOutputSlot(), SubgraphView::GetIConnectableLayers(), SubgraphView::GetIInputSlots(), Layer::GetInputSlot(), Layer::GetInputSlots(), SubgraphView::GetIOutputSlots(), Layer::GetName(), Layer::GetNameStr(), Graph::GetNumLayers(), Layer::GetNumOutputSlots(), Layer::GetOutputSlot(), Layer::GetType(), armnn::IgnoreUnused(), armnn::Input, Graph::InsertNewLayer(), SubgraphViewSelector::SelectSubgraphs(), Graph::SerializeToDot(), and armnn::Splitter.

713 {
714 TEST_CASE("SubgraphForEmptyGraph")
715 {
716  Graph graph;
717  SubgraphView subgraph(graph);
718 
719  CHECK(subgraph.GetIInputSlots().empty());
720  CHECK(subgraph.GetIOutputSlots().empty());
721  CHECK(subgraph.GetIConnectableLayers().empty());
722 }
723 
724 TEST_CASE("SubgraphForEntireGraph")
725 {
726  Graph graph;
727 
728  auto output = graph.AddLayer<OutputLayer>(0, "output");
729  auto mid0 = graph.InsertNewLayer<ActivationLayer>(output->GetInputSlot(0),
731  "mid0");
732  auto mid1 = graph.InsertNewLayer<ActivationLayer>(mid0->GetInputSlot(0),
734  "mid1");
735  graph.InsertNewLayer<InputLayer>(mid1->GetInputSlot(0), 0, "input");
736 
737  SubgraphView subgraph(graph);
738 
739  CHECK(subgraph.GetIInputSlots().empty());
740  CHECK(subgraph.GetIOutputSlots().empty());
741  CHECK(subgraph.GetIConnectableLayers().size() == graph.GetNumLayers());
742 }
743 
744 TEST_CASE("NoSubgraphsForNoMatch")
745 {
746  Graph graph;
747 
748  auto output = graph.AddLayer<OutputLayer>(0, "output");
749  graph.InsertNewLayer<InputLayer>(output->GetInputSlot(0), 0, "input");
750 
752  SubgraphViewSelector::SelectSubgraphs(graph, [](const Layer &) { return false; });
753 
754  CHECK(subgraphs.empty());
755 }
756 
757 TEST_CASE("OneSubgraphsSelectedASingleMatch")
758 {
759  Graph graph;
760 
761  auto output = graph.AddLayer<OutputLayer>(0, "output");
762  graph.InsertNewLayer<InputLayer>(output->GetInputSlot(0), 0, "input");
763 
765  SubgraphViewSelector::SelectSubgraphs(
766  graph,
767  // select the output layer only
768  [](const Layer & l)
769  {
770  bool isOutput = l.GetNameStr().compare("output") == 0;
771  return isOutput;
772  });
773 
774  CHECK(subgraphs.size() == 1);
775  if (subgraphs.size() == 1)
776  {
777  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({output}),
778  // outputs of 'output' will be empty
779  CreateOutputsFrom({output}),
780  {output});
781 
782  CompareSubgraphViews(subgraphs[0], expected);
783  }
784 }
785 
786 TEST_CASE("MultipleLayersSelectedInTheMiddle")
787 {
788  Graph graph;
789 
790  auto output = graph.AddLayer<OutputLayer>(0, "output");
791  auto mid0 = graph.InsertNewLayer<ActivationLayer>(output->GetInputSlot(0),
793  "mid0");
794  auto mid1 = graph.InsertNewLayer<ActivationLayer>(mid0->GetInputSlot(0),
796  "mid1");
797  graph.InsertNewLayer<InputLayer>(mid1->GetInputSlot(0), 0, "input");
798 
800  SubgraphViewSelector::SelectSubgraphs(
801  graph,
802  // select the middle layers only
803  [](const Layer & l)
804  {
805  bool toSelect = (l.GetType() == LayerType::Activation);
806  return toSelect;
807  });
808 
809  CHECK(subgraphs.size() == 1);
810  if (subgraphs.size() == 1)
811  {
812  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({mid1}),
813  CreateOutputsFrom({mid0}),
814  {mid1, mid0});
815 
816  CompareSubgraphViews(subgraphs[0], expected);
817  }
818 }
819 
820 TEST_CASE("DisjointGraphs")
821 {
822  // The input graph has two disjoint sections and all layers are selected.
823  // This should result in two subgraphs being produced.
824  Graph graph;
825 
826  // the graph is constructed in reverse order
827  auto o0 = graph.AddLayer<OutputLayer>(0, "output0");
828  auto n0 = graph.InsertNewLayer<ActivationLayer>(o0->GetInputSlot(0), ActivationDescriptor{}, "intermediate0");
829  auto i0 = graph.InsertNewLayer<InputLayer>(n0->GetInputSlot(0), 0, "input0");
830 
831  auto o1 = graph.AddLayer<OutputLayer>(1, "output1");
832  auto n1 = graph.InsertNewLayer<ActivationLayer>(o1->GetInputSlot(0), ActivationDescriptor{}, "intermediate1");
833  auto i1 = graph.InsertNewLayer<InputLayer>(n1->GetInputSlot(0), 1, "input1");
834 
836  SubgraphViewSelector::SelectSubgraphs(graph,
837  // select the middle layers only
838  [](const Layer&) {
839  return true;
840  });
841 
842  // expected results to test against
843  auto expected1 = CreateSubgraphViewFrom({}, {}, { o0, n0, i0 });
844  auto expected2 = CreateSubgraphViewFrom({}, {}, { o1, n1, i1 });
845  CHECK(subgraphs.size() == 2);
846  if (subgraphs.size() == 2)
847  {
848  CHECK((subgraphs[0] != nullptr));
849  CHECK((subgraphs[1] != nullptr));
850  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
851  {
852  if (std::find(subgraphs[0]->GetIConnectableLayers().begin(),
853  subgraphs[0]->GetIConnectableLayers().end(), i0) !=
854  subgraphs[0]->GetIConnectableLayers().end())
855  {
856  CompareSubgraphViews(subgraphs[0], expected1);
857  CompareSubgraphViews(subgraphs[1], expected2);
858  }
859  else
860  {
861  CompareSubgraphViews(subgraphs[0], expected2);
862  CompareSubgraphViews(subgraphs[1], expected1);
863  }
864  }
865  }
866 }
867 
868 TEST_CASE("IslandInTheMiddle")
869 {
870  // This case represent the scenario when a non-selected X1 node placed in the middle
871  // of the selected M* nodes.
872  // This checks that we don't merge M6 and M3 and create a dependency loop.
873  /*
874  M0
875  / \
876  M1 M4
877  | |
878  M2 X1 < the island in the middle !
879  | |
880  M3 M5
881  \ /
882  M6
883  */
884  Graph graph;
885 
886  OriginsDescriptor concatDescriptor(2);
887  auto m6 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m6");
888  auto m3 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(0),
890  "m3");
891  auto m2 = graph.InsertNewLayer<ActivationLayer>(m3->GetInputSlot(0),
893  "m2");
894  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
896  "m1");
897  auto m0 = graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "m0");
898 
899  auto m5 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(1),
901  "m5");
902  auto x1 = graph.InsertNewLayer<ActivationLayer>(m5->GetInputSlot(0),
904  "x1");
905  auto m4 = graph.InsertNewLayer<ActivationLayer>(x1->GetInputSlot(0),
907  "m4");
908 
909  // Connect the other branch to the input layer
910  m0->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
911 
912  // All selected 'M*' layers will be of Activation type
914  SubgraphViewSelector::SelectSubgraphs(
915  graph,
916  // select the middle layers only
917  [](const Layer& l)
918  {
919  bool toSelect = std::string(l.GetName())[0] == 'm';
920  return toSelect;
921  });
922 
923  // expected results to test against
924  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({ m0 }),
925  CreateOutputsFrom({ m3, m4 }),
926  { m0, m1, m2, m3, m4 });
927 
928  auto smallerSubgraph =
929  CreateSubgraphViewFrom(std::vector<InputSlot*>{ &m5->GetInputSlot(0), & m6->GetInputSlot(0) },
930  std::vector<OutputSlot*>{},
931  { m5, m6 });
932 
933  CHECK(subgraphs.size() == 2);
934  if (subgraphs.size() == 2)
935  {
936  // we need to have valid subgraph pointers here
937  CHECK((subgraphs[0] != nullptr));
938  CHECK((subgraphs[1] != nullptr));
939 
940  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
941  {
942  // sort the subgraphs by layer size, so it is simpler to test
943  std::sort(subgraphs.begin(), subgraphs.end(),
945  {
946  return (lhs->GetIConnectableLayers().size() < rhs->GetIConnectableLayers().size());
947  }
948  );
949 
950  CHECK(subgraphs[0]->GetIConnectableLayers().size() == 2);
951  CHECK(subgraphs[1]->GetIConnectableLayers().size() == 5);
952 
953  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
954  CompareSubgraphViews(subgraphs[1], largerSubgraph);
955  }
956  }
957 }
958 
959 TEST_CASE("MultipleSimpleSubgraphs")
960 {
961  // This test case represents the scenario when we have two distinct subgraphs
962  // in a simple linear network. The selected nodes are the M* and the
963  // non-selected ones are the X*
964  // W2 ->->
965  // |
966  // X1 -> M1 -> M2 -> X2 -> M3 -> X3
967  //
968  // The expected results is two subgraphs, one with {M1, M2} and another one
969  // with {M3}
970  //
971  Graph graph;
972 
973  // the graph is constructed in reverse order
974  auto x3 = graph.AddLayer<OutputLayer>(0, "output");
975 
976  auto m3 = graph.InsertNewLayer<ActivationLayer>(x3->GetInputSlot(0),
978  "m3");
979 
980  auto x2 = graph.InsertNewLayer<Convolution2dLayer>(m3->GetInputSlot(0),
982  "x2");
983 
984  auto w2 = graph.InsertNewLayer<ConstantLayer>(x2->GetInputSlot(1), "w2");
985 
986  auto m2 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0),
988  "m2");
989  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
991  "m1");
992  graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "x1");
993 
994  IgnoreUnused(w2);
995  // All selected 'M*' layers will be of Activation type
997  SubgraphViewSelector::SelectSubgraphs(
998  graph,
999  // select the middle layers only
1000  [](const Layer & l)
1001  {
1002  bool toSelect = (l.GetType() == LayerType::Activation);
1003  return toSelect;
1004  });
1005 
1006  // expected results to test against
1007  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m1}),
1008  CreateOutputsFrom({m2}),
1009  {m1, m2});
1010 
1011  auto smallerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m3}),
1012  CreateOutputsFrom({m3}),
1013  {m3});
1014 
1015  CHECK(subgraphs.size() == 2);
1016  if (subgraphs.size() == 2)
1017  {
1018  // we need to have valid subgraph pointers here
1019  CHECK((subgraphs[0] != nullptr));
1020  CHECK((subgraphs[1] != nullptr));
1021 
1022  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1023  {
1024  // sort the subgraphs by layer size, so it is simpler to test
1025  std::sort(subgraphs.begin(), subgraphs.end(),
1027  {
1028  return (lhs->GetIConnectableLayers().size() < rhs->GetIConnectableLayers().size());
1029  }
1030  );
1031 
1032  CHECK(subgraphs[0]->GetIConnectableLayers().size() == 1);
1033  CHECK(subgraphs[1]->GetIConnectableLayers().size() == 2);
1034 
1035  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
1036  CompareSubgraphViews(subgraphs[1], largerSubgraph);
1037  }
1038  }
1039 }
1040 
1041 TEST_CASE("SimpleLinearTest")
1042 {
1043  //X1 -> M1 -> M2 -> X2
1044  //Where the input slots of M1 and the output slots of M2 are to be the sub graph boundaries.
1045  Graph graph;
1046 
1047  ActivationDescriptor activationDefaults;
1048 
1049  auto layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1050  auto layerX2 = graph.AddLayer<OutputLayer>(0, "layerX2");
1051  auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1");
1052  auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1053 
1054  // X1
1055  // |
1056  // M1
1057  // |
1058  // M2
1059  // |
1060  // X2
1061 
1062  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1063  layerM1->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1064  layerM2->GetOutputSlot(0).Connect(layerX2->GetInputSlot(0));
1065 
1067  SubgraphViewSelector::SelectSubgraphs(
1068  graph,
1069  // select the activation layers M1 and M2
1070  [](const Layer & l)
1071  {
1072  bool toSelect = (l.GetType() == LayerType::Activation);
1073  return toSelect;
1074  });
1075 
1076  CHECK(subgraphs.size() == 1);
1077  if(subgraphs.size() == 1)
1078  {
1079  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1}),
1080  CreateOutputsFrom({layerM2}),
1081  {layerM1, layerM2});
1082 
1083  CompareSubgraphViews(subgraphs[0], expected);
1084  }
1085 }
1086 
1087 TEST_CASE("MultiInputSingleOutput")
1088 {
1089  //X1 -> M1 -> M3 -> X3
1090  //X2 -> M2 -> M3 -> X3
1091  //Where the input slots of {M1, M2} and the output slots of M3 are to be the subgraph boundaries.
1092  Graph graph;
1093 
1094  ActivationDescriptor activationDefaults;
1095 
1096  auto layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1097  auto layerX2 = graph.AddLayer<InputLayer>(1, "layerX2");
1098  auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1");
1099  auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1100  auto layerM3 = graph.AddLayer<AdditionLayer>("layerM3");
1101  auto layerX3 = graph.AddLayer<OutputLayer>(0, "layerX3");
1102 
1103  // X1 X2
1104  // | |
1105  // M1 M2
1106  // \ |
1107  // \ |
1108  // \|
1109  // M3
1110  // |
1111  // |
1112  // X3
1113 
1114  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1115  layerX2->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1116  layerM1->GetOutputSlot(0).Connect(layerM3->GetInputSlot(0));
1117  layerM2->GetOutputSlot(0).Connect(layerM3->GetInputSlot(1));
1118  layerM3->GetOutputSlot(0).Connect(layerX3->GetInputSlot(0));
1119 
1121  SubgraphViewSelector::SelectSubgraphs(
1122  graph,
1123  // select Activation and Addition Layers M1, M2 and M3
1124  [](const Layer & l)
1125  {
1126  bool toSelect = (l.GetType() == LayerType::Activation
1127  || l.GetType() == LayerType::Addition);
1128  return toSelect;
1129  });
1130 
1131  CHECK(subgraphs.size() == 1);
1132  if (subgraphs.size() == 1)
1133  {
1134  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1, layerM2}),
1135  CreateOutputsFrom({layerM3}),
1136  {layerM1, layerM2, layerM3});
1137 
1138  CompareSubgraphViews(subgraphs[0], expected);
1139  }
1140 }
1141 
1142 TEST_CASE("SingleInputMultiOutput")
1143 {
1144  //X1 -> M1 -> M2 -> X2
1145  //X1 -> M1 -> M3 -> X3
1146  //Where the input slots of M1 and the output slots of {M2, M3} are to be the subgraph boundaries.
1147  Graph graph;
1148 
1149  ActivationDescriptor activationDefaults;
1150  ViewsDescriptor viewDefaults(2,4);
1151 
1152  Layer* layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1153  Layer* layerM1 = graph.AddLayer<SplitterLayer>(viewDefaults, "layerM1");
1154  Layer* layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1155  Layer* layerM3 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM3");
1156  Layer* layerX2 = graph.AddLayer<OutputLayer>(0, "layerX2");
1157  Layer* layerX3 = graph.AddLayer<OutputLayer>(1, "layerX3");
1158 
1159  // X1
1160  // |
1161  // M1
1162  // /|
1163  // / |
1164  // / |
1165  // M2 M3
1166  // | |
1167  // | |
1168  // X2 X3
1169 
1170  layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1171  layerM1->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1172  layerM1->GetOutputSlot(1).Connect(layerM3->GetInputSlot(0));
1173  layerM2->GetOutputSlot(0).Connect(layerX2->GetInputSlot(0));
1174  layerM3->GetOutputSlot(0).Connect(layerX3->GetInputSlot(0));
1175 
1177  SubgraphViewSelector::SelectSubgraphs(
1178  graph,
1179  // select Activation and Splitter Layers M1, M2 and M3
1180  [](const Layer & l)
1181  {
1182  bool toSelect = (l.GetType() == LayerType::Activation
1183  || l.GetType() == LayerType::Splitter);
1184  return toSelect;
1185  });
1186 
1187  CHECK(subgraphs.size() == 1);
1188  if(subgraphs.size() == 1)
1189  {
1190  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1}),
1191  CreateOutputsFrom({layerM2, layerM3}),
1192  {layerM1, layerM2, layerM3});
1193 
1194  CompareSubgraphViews(subgraphs[0], expected);
1195  }
1196 }
1197 
1198 TEST_CASE("MultiInputMultiOutput")
1199 {
1200  // This case represents the scenario with multiple inputs and multiple outputs
1201  //
1202  // X1 -> M1 -> M3 -> M4 -> X3
1203  // X2 -> M2 -> M3 -> M5 -> X4
1204  //
1205  // Where the input slots of {M1, M2} and the output slots of {M4, M5} are to be the subgraph
1206  // boundaries.
1207 
1208  Graph graph;
1209 
1210  ActivationDescriptor activationDefaults;
1211  OriginsDescriptor concatDescriptor(2);
1212 
1213  auto x1 = graph.AddLayer<InputLayer>(0, "x1");
1214  auto x2 = graph.AddLayer<InputLayer>(1, "x2");
1215 
1216  auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
1217  auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
1218  auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
1219 
1220  auto m4 = graph.AddLayer<ActivationLayer>(activationDefaults, "m4");
1221  auto m5 = graph.AddLayer<ActivationLayer>(activationDefaults, "m5");
1222 
1223  auto x3 = graph.AddLayer<OutputLayer>(0, "x3");
1224  auto x4 = graph.AddLayer<OutputLayer>(1, "x4");
1225 
1226  x1->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1227  x2->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1228 
1229  m1->GetOutputSlot(0).Connect(m3->GetInputSlot(0));
1230  m2->GetOutputSlot(0).Connect(m3->GetInputSlot(1));
1231 
1232  m3->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
1233  m3->GetOutputSlot(0).Connect(m5->GetInputSlot(0));
1234 
1235  m4->GetOutputSlot(0).Connect(x3->GetInputSlot(0));
1236  m5->GetOutputSlot(0).Connect(x4->GetInputSlot(0));
1237 
1238 
1240  SubgraphViewSelector::SelectSubgraphs(
1241  graph,
1242  // select Activation and Concat Layers M1, M2, M3, M4, M5
1243  [](const Layer & l)
1244  {
1245  bool toSelect = (l.GetType() == LayerType::Activation
1246  || l.GetType() == LayerType::Concat);
1247  return toSelect;
1248  });
1249 
1250 
1251  CHECK(subgraphs.size() == 1);
1252  if (subgraphs.size() == 1)
1253  {
1254  auto expected = CreateSubgraphViewFrom(CreateInputsFrom({m1, m2}),
1255  CreateOutputsFrom({m4, m5}),
1256  {m1, m2, m3, m4, m5});
1257 
1258  CompareSubgraphViews(subgraphs[0], expected);
1259  }
1260 }
1261 
1262 TEST_CASE("ValidMerge")
1263 {
1264  // Checks that a node that has multiple choices for merge candidates (M3 in this case) correctly merges with the
1265  // one that it can (M0), and doesn't merge with the ones it can't (X2 and M2).
1266  //
1267  // X1
1268  // |
1269  // M1
1270  // / \'
1271  // X2 M2 M0
1272  // \ | /
1273  // M3
1274  //
1275  Graph graph;
1276 
1277  ActivationDescriptor activationDefaults;
1278  OriginsDescriptor concatDescriptor(3);
1279 
1280  auto x1 = graph.AddLayer<InputLayer>(0, "x1");
1281  auto x2 = graph.AddLayer<ActivationLayer>(activationDefaults, "x2");
1282  auto m0 = graph.AddLayer<InputLayer>(1, "m0");
1283  auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
1284  auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
1285  auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
1286 
1287  x1->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1288  m1->GetOutputSlot(0).Connect(x2->GetInputSlot(0));
1289  m1->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1290  x2->GetOutputSlot(0).Connect(m3->GetInputSlot(0));
1291  m2->GetOutputSlot(0).Connect(m3->GetInputSlot(1));
1292  m0->GetOutputSlot(0).Connect(m3->GetInputSlot(2));
1293 
1294  SubgraphViewSelector::Subgraphs subgraphs = SubgraphViewSelector::SelectSubgraphs(
1295  graph,
1296  [](const Layer& l) {
1297  return std::string(l.GetName())[0] == 'm';
1298  });
1299 
1300  // expected results to test against
1301  auto expectedSubgraph0 =
1303  CreateInputsFrom({ m1 }),
1304  std::vector<OutputSlot*>{ &m1->GetOutputSlot(0), &m2->GetOutputSlot(0) },
1305  { m1, m2 });
1306 
1307  auto expectedSubgraph1 = CreateSubgraphViewFrom(
1308  std::vector<InputSlot*>{ &m3->GetInputSlot(0), & m3->GetInputSlot(1) },
1309  CreateOutputsFrom({ }),
1310  { m0, m3 });
1311 
1312  CHECK(subgraphs.size() == 2);
1313  if (subgraphs.size() == 2)
1314  {
1315  // we need to have valid subgraph pointers here
1316  CHECK((subgraphs[0] != nullptr));
1317  CHECK((subgraphs[1] != nullptr));
1318 
1319  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1320  {
1321  if (subgraphs[0]->GetIInputSlots().size() == 1)
1322  {
1323  CompareSubgraphViews(subgraphs[0], expectedSubgraph0);
1324  CompareSubgraphViews(subgraphs[1], expectedSubgraph1);
1325  }
1326  else
1327  {
1328  CompareSubgraphViews(subgraphs[0], expectedSubgraph1);
1329  CompareSubgraphViews(subgraphs[1], expectedSubgraph0);
1330  }
1331  }
1332  }
1333 }
1334 
1335 TEST_CASE("PropagatedDependencies")
1336 {
1337  // Version of IslandInTheMiddle with longer chain
1338  // to make sure antecedents are propagated.
1339  /*
1340  M0
1341  / \
1342  M1 M4
1343  | |
1344  M2 X1 < the island in the middle !
1345  | |
1346  | M10
1347  | |
1348  | X2 < another island in the middle !
1349  | |
1350  M3 M5
1351  \ /
1352  M6
1353  */
1354  Graph graph;
1355 
1356  OriginsDescriptor concatDescriptor(2);
1357  auto m6 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m6");
1358  auto m3 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(0),
1360  "m3");
1361  auto m2 = graph.InsertNewLayer<ActivationLayer>(m3->GetInputSlot(0),
1363  "m2");
1364  auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
1366  "m1");
1367  auto m0 = graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "m0");
1368 
1369  auto m5 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(1),
1371  "m5");
1372  auto x2 = graph.InsertNewLayer<ActivationLayer>(m5->GetInputSlot(0), ActivationDescriptor{}, "x2");
1373  auto m10 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0), ActivationDescriptor{}, "m10");
1374  auto x1 = graph.InsertNewLayer<ActivationLayer>(m10->GetInputSlot(0),
1376  "x1");
1377  auto m4 = graph.InsertNewLayer<ActivationLayer>(x1->GetInputSlot(0),
1379  "m4");
1380 
1381  // Connect the other branch to the input layer
1382  m0->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
1383 
1384  // All selected 'M*' layers will be of Activation type
1386  SubgraphViewSelector::SelectSubgraphs(
1387  graph,
1388  // select the middle layers only
1389  [](const Layer& l)
1390  {
1391  bool toSelect = std::string(l.GetName())[0] == 'm';
1392  return toSelect;
1393  });
1394 
1395  // expected results to test against
1396  auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({ m0 }),
1397  CreateOutputsFrom({ m3, m4 }),
1398  { m0, m1, m2, m3, m4 });
1399 
1400  auto mediumSubgraph = CreateSubgraphViewFrom(std::vector<InputSlot*>{ &m5->GetInputSlot(0), &m6->GetInputSlot(0) },
1401  std::vector<OutputSlot*>{}, { m5, m6 });
1402 
1403  auto smallerSubgraph =
1404  CreateSubgraphViewFrom(CreateInputsFrom({ m10 }), CreateOutputsFrom({ m10 }), { m10 });
1405 
1406  CHECK(subgraphs.size() == 3);
1407  if (subgraphs.size() == 3)
1408  {
1409  // we need to have valid subgraph pointers here
1410  CHECK((subgraphs[0] != nullptr));
1411  CHECK((subgraphs[1] != nullptr));
1412  CHECK((subgraphs[2] != nullptr));
1413 
1414  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr && subgraphs[2].get() != nullptr)
1415  {
1416  // sort the subgraphs by layer size, so it is simpler to test
1417  std::sort(subgraphs.begin(), subgraphs.end(),
1419  {
1420  return (lhs->GetIConnectableLayers().size() < rhs->GetIConnectableLayers().size());
1421  }
1422  );
1423 
1424  CompareSubgraphViews(subgraphs[0], smallerSubgraph);
1425  CompareSubgraphViews(subgraphs[1], mediumSubgraph);
1426  CompareSubgraphViews(subgraphs[2], largerSubgraph);
1427  }
1428  }
1429 }
1430 
1431 TEST_CASE("Random")
1432 {
1433  // Creates random networks, splits them into subgraphs and checks the resulting subgraphs obey the required
1434  // dependency rules. We can easily generate very large networks which helps cover corner cases the other
1435  // small, manually crafted tests have missed. We can also use this to measure performance on large networks.
1436  constexpr bool debug = false; // Enable this to dump dot files and performance timings.
1437 
1438  std::mt19937 randomGenerator;
1439 
1440  // Helper function to get a random number in [0, maxExclusive)
1441  auto GetRandom = [&randomGenerator](auto maxExclusive) {
1442  // Note we could use uniform_int_distribution here, but that gives inconsistent results across platforms
1443  // which makes it harder to reproduce results.
1444  // It appears that uniform_real_distribution is consistent across MSVC and gcc so we use that and round it.
1445  std::uniform_real_distribution<float> uniform(0.0f, 1.0f);
1446  return static_cast<decltype(maxExclusive)>(uniform(randomGenerator) * static_cast<float>(maxExclusive));
1447  };
1448  // Helper function to get a bool that has probability 'trueProb' of being true.
1449  auto GetRandomFlag = [&randomGenerator](float trueProb) {
1450  std::uniform_real_distribution<float> uniform(0.0f, 1.0f);
1451  return uniform(randomGenerator) < trueProb;
1452  };
1453 
1454  constexpr uint32_t numTests = 100;
1455  for (uint32_t testIdx = 0; testIdx < numTests; ++testIdx)
1456  {
1457  randomGenerator.seed(testIdx); // Set a deterministic seed for reproducibility.
1458 
1459  // Create random graph
1460  Graph graph;
1461  {
1462  // First add the layers, without any connections. The following random constants determine the number of
1463  // each layer to add, along with the chance that each layer will be 'supported' (i.e. selected for
1464  // inclusion in the resulting subgraphs).
1465  uint32_t numInputs = 1 + GetRandom(4u);
1466  uint32_t numConstants = 1 + GetRandom(4u);
1467  uint32_t numOutputs = 1 + GetRandom(4u);
1468  uint32_t numConcats = 0 + GetRandom(500u);
1469  uint32_t numSplits = 0 + GetRandom(500u);
1470  float supportedProb = 0.7f;
1471 
1472  for (uint32_t i = 0; i < numInputs; ++i)
1473  {
1474  std::string name = "input" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1475  graph.AddLayer<InputLayer>(static_cast<LayerBindingId>(i), name.c_str());
1476  }
1477  for (uint32_t i = 0; i < numConstants; ++i)
1478  {
1479  std::string name = "constant" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1480  graph.AddLayer<ConstantLayer>(name.c_str());
1481  }
1482  for (uint32_t i = 0; i < numOutputs; ++i)
1483  {
1484  std::string name = "output" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1485  graph.AddLayer<OutputLayer>(static_cast<LayerBindingId>(i), name.c_str());
1486  }
1487  for (uint32_t i = 0; i < numConcats; ++i)
1488  {
1489  std::string name = "concat" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1490  numInputs = 1 + GetRandom(3u);
1491  OriginsDescriptor concatDesc(numInputs);
1492  graph.AddLayer<ConcatLayer>(concatDesc, name.c_str());
1493  }
1494  for (uint32_t i = 0; i < numSplits; ++i)
1495  {
1496  std::string name = "split" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1497  numOutputs = 1 + GetRandom(3u);
1498  ViewsDescriptor splitDesc(numOutputs);
1499  graph.AddLayer<SplitterLayer>(splitDesc, name.c_str());
1500  }
1501 
1502  // Associate each layer with a "depth" parameter. This is used when creating connections to ensure
1503  // that we don't have any loops, by only connecting to layers with a lower "depth".
1504  // This can be thought of as distance from the "top" of the graph (assuming the graph flows top-to-bottom).
1505  // Unfortunately this approach ends up producing very "wide" graphs,
1506  // which probably isn't very representative of 'real' networks.
1507  uint32_t maxLayerDepth = 5 + GetRandom(2000u);
1508  std::map<Layer*, uint32_t> layerDepths;
1509  std::map<uint32_t, std::vector<Layer*>> layersAtDepth;
1510  for (Layer* layer : graph)
1511  {
1512  uint32_t depth;
1513  if (layer->GetType() == LayerType::Input || layer->GetType() == LayerType::Constant)
1514  {
1515  // There needs to be at least one input-like layer above everything else, otherwise would be
1516  // nothing for them to connect to!
1517  depth = 0;
1518  }
1519  else
1520  {
1521  // Other layers are randomly assigned to later depths.
1522  depth = 1 + GetRandom(maxLayerDepth);
1523  }
1524  layerDepths[layer] = depth;
1525  layersAtDepth[depth].push_back(layer);
1526  }
1527 
1528  // Connect layers to each other. Every input slot of every layer must be connected, but it doesn't
1529  // matter if an output slot goes unused.
1530  for (Layer* layer : graph)
1531  {
1532  for (uint32_t inputSlotIdx = 0; inputSlotIdx < layer->GetNumInputSlots(); ++inputSlotIdx)
1533  {
1534  InputSlot& inputSlot = layer->GetInputSlot(inputSlotIdx);
1535  uint32_t maxLayerDepthToConnectTo = layerDepths[layer];
1536  // This prevents a connection causing a loop
1537  // Finding a layer to connect to may take multiple attempts, so keep trying until it works.
1538  while (inputSlot.GetConnectedOutputSlot() == nullptr)
1539  {
1540  uint32_t layerDepth = GetRandom(maxLayerDepthToConnectTo);
1541  const std::vector<Layer*>& layersToChooseFrom = layersAtDepth[layerDepth];
1542  if (layersToChooseFrom.size() == 0)
1543  {
1544  continue;
1545  }
1546  Layer* layerToConnectWith = layersToChooseFrom[GetRandom(layersToChooseFrom.size())];
1547  if (layerToConnectWith->GetNumOutputSlots() == 0)
1548  {
1549  continue;
1550  }
1551  uint32_t outputSlotIdx = GetRandom(layerToConnectWith->GetNumOutputSlots());
1552  layerToConnectWith->GetOutputSlot(outputSlotIdx).Connect(inputSlot);
1553  }
1554  }
1555  }
1556  }
1557 
1558  if (debug)
1559  {
1560  std::ofstream f("INPUT_" + std::to_string(testIdx) + ".dot");
1561  graph.SerializeToDot(f);
1562  }
1563 
1564  // Run the splitting algorithm, selecting all nodes ending in an 'S' (as randomly assigned above).
1565  auto startTime = std::chrono::high_resolution_clock::now();
1566 
1568  SubgraphViewSelector::SelectSubgraphs(graph,
1569  [](const Layer& l) { return std::string(l.GetName()).back() == 'S'; });
1570 
1571  auto endTime = std::chrono::high_resolution_clock::now();
1572  auto duration = std::chrono::duration_cast<std::chrono::microseconds>(endTime - startTime);
1573  if (debug)
1574  {
1575  std::cout << "Test " << testIdx << ": " << duration.count() << " microseconds" << std::endl;
1576  }
1577 
1578  // Build a map of which subgraph is assigned to each layer.
1579  // This helps some of the following code.
1580  std::map<Layer*, SubgraphView*> layerToSubgraph;
1581  for (Layer* layer : graph)
1582  {
1583  size_t i = 0;
1584  for (std::unique_ptr<SubgraphView>& subgraph : subgraphs)
1585  {
1586  std::string name = std::to_string(i++);
1587  if (std::find(subgraph->cbeginIConnectable(), subgraph->cendIConnectable(), layer)
1588  != subgraph->cendIConnectable())
1589  {
1590  layerToSubgraph[layer] = subgraph.get();
1591  break;
1592  }
1593  }
1594  }
1595 
1596  if (debug)
1597  {
1598  // Before dumping the dot file, set each Layer's BackendId property so that the dot file
1599  // shows the resulting subgraph assignments.
1600  for (Layer* layer : graph)
1601  {
1602  std::string name = "NotAssigned";
1603  auto subgraphIt = layerToSubgraph.find(layer);
1604  if (subgraphIt != layerToSubgraph.end())
1605  {
1606  auto subgraphIdx = std::distance(subgraphs.begin(),
1607  std::find_if(subgraphs.begin(), subgraphs.end(),
1608  [&](auto& s) { return s.get() == subgraphIt->second; }));
1609  name = std::to_string(subgraphIdx);
1610  }
1611  layer->SetBackendId(armnn::BackendId(name));
1612  }
1613 
1614  std::ofstream f("GRAPH_" + std::to_string(testIdx) + ".dot");
1615  graph.SerializeToDot(f);
1616  }
1617 
1618  // Check the dependencies between subgraphs to make sure that the algorithm has produced a valid result.
1619  // Starting from each of the input slots of each subgraph, recurse up the graph and ensure that we never
1620  // encounter a layer that belongs to the subgraph that we started from.
1621  for (std::unique_ptr<SubgraphView>& subgraph : subgraphs)
1622  {
1623  for (IInputSlot* inSlot : subgraph->GetIInputSlots())
1624  {
1625  std::queue<Layer*> toProcess;
1626  toProcess.push(&PolymorphicDowncast<InputSlot*>(inSlot)->GetConnectedOutputSlot()->GetOwningLayer());
1627  while (toProcess.size() > 0)
1628  {
1629  Layer* l = toProcess.front();
1630  toProcess.pop();
1631 
1632  CHECK(layerToSubgraph[l] != subgraph.get());
1633 
1634  for (const InputSlot& is : l->GetInputSlots())
1635  {
1636  toProcess.push(&is.GetConnectedOutputSlot()->GetOwningLayer());
1637  }
1638  }
1639  }
1640  }
1641  }
1642 }
1643 
1644 }
A layer that the constant data can be bound to.
This layer represents a split operation.
A ViewsDescriptor for the SplitterLayer.
void Splitter(const SplitterQueueDescriptor &data, std::vector< ITensorHandle *> inputs, std::vector< ITensorHandle *> outputs)
Definition: Splitter.hpp:17
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:425
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:112
This layer represents an activation operation with the specified activation function.
void IgnoreUnused(Ts &&...)
const std::vector< InputSlot > & GetInputSlots() const
Definition: Layer.hpp:243
unsigned int GetNumOutputSlots() const override
Returns the number of connectable output slots.
Definition: Layer.hpp:320
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:290
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:322
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
An OriginsDescriptor for the ConcatLayer.
This layer represents a merge operation.
Definition: ConcatLayer.hpp:13
const std::string & GetNameStr() const
Definition: Layer.hpp:225
LayerType GetType() const override
Returns the armnn::LayerType of this layer.
Definition: Layer.hpp:271
float Activation(float in, ActivationFunction function, float a, float b)
Definition: Activation.cpp:13
std::vector< SubgraphViewPtr > Subgraphs
const OutputSlot * GetConnectedOutputSlot() const
Definition: Layer.hpp:56
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
This layer represents an addition operation.
std::unique_ptr< SubgraphView > SubgraphViewPtr
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:324
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:317
This layer represents a convolution 2d operation.
size_t GetNumLayers() const
Definition: Graph.hpp:198
LayerT * InsertNewLayer(InputSlot &insertBefore, Args &&... args)
Inserts a new layer between the output slot currently connected to insertBefore and insertBefore itse...
Definition: Graph.hpp:440
SubgraphView::InputSlots CreateInputsFrom(Layer *layer, std::vector< unsigned int > ignoreSlots)
An input connection slot for a layer.
Definition: INetwork.hpp:26

◆ TEST_SUITE() [4/4]

TEST_SUITE ( "IntegrationTests"  )

Definition at line 1646 of file SubgraphViewTests.cpp.

References armnn::Activation, Graph::AddLayer(), OptimizationViews::AddSubstitution(), ARMNN_ASSERT_MSG, IOutputSlot::Connect(), OutputSlot::Connect(), armnn::Constant, armnn::CpuAcc, INetwork::Create(), CreateInputsFrom(), CreateOutputsFrom(), CreateSubgraphViewFrom(), armnn::DepthwiseConvolution2d, SubgraphView::ForEachIConnectableLayer(), SubgraphView::ForEachLayer(), Layer::GetBackendId(), IInputSlot::GetConnection(), IOutputSlot::GetConnection(), InputSlot::GetConnection(), OutputSlot::GetConnection(), SubgraphView::GetIConnectableLayers(), OptimizationViews::GetINetwork(), IConnectableLayer::GetInputSlot(), Layer::GetInputSlot(), Layer::GetNameStr(), IConnectableLayer::GetOutputSlot(), Layer::GetOutputSlot(), IOutputSlot::GetOwningIConnectableLayer(), TensorInfo::GetShape(), IOutputSlot::GetTensorInfo(), IConnectableLayer::GetType(), Layer::GetType(), SubgraphView::GetWorkingCopy(), armnn::GpuAcc, armnn::Input, DepthwiseConvolution2dDescriptor::m_DataLayout, armnn::Multiplication, armnn::NHWC, armnn::numeric_cast(), armnn::Output, armnn::QAsymmU8, SubgraphViewSelector::SelectSubgraphs(), Layer::SetBackendId(), TensorInfo::SetShape(), IOutputSlot::SetTensorInfo(), OutputSlot::SetTensorInfo(), SubgraphView::SubstituteSubgraph(), Graph::SubstituteSubgraph(), armnn::swap(), and OptimizationViews::Validate().

1647 {
1648 TEST_CASE("SingleSubgraph")
1649 {
1650  // This test case represents the scenario when we have one subgraph
1651  // in which two layers have GpuAcc backend assigned
1652 
1653  //Construct graph
1654  Graph graph;
1655 
1656  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
1657 
1658  Convolution2dDescriptor convDescriptor;
1659  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
1660  convLayer1->SetBackendId(Compute::GpuAcc);
1661 
1662  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
1663  convLayer2->SetBackendId(Compute::GpuAcc);
1664 
1665  Layer* const weights1 = graph.AddLayer<ConstantLayer>("weights1");
1666  weights1->SetBackendId(Compute::GpuAcc);
1667  Layer* const weights2 = graph.AddLayer<ConstantLayer>("weights2");
1668  weights2->SetBackendId(Compute::GpuAcc);
1669 
1670  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
1671 
1672  inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
1673  weights1->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(1));
1674  convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
1675  weights2->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(1));
1676  convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1677 
1678  // GpuAcc sub graph selector
1680  SubgraphViewSelector::SelectSubgraphs(
1681  graph,
1682  // select the GpuAcc layers only
1683  [](const Layer & l){
1684  bool toSelect = (l.GetBackendId() == Compute::GpuAcc);
1685  return toSelect;
1686  });
1687 
1688  CHECK(subgraphs.size() == 1);
1689  if(subgraphs.size() == 1)
1690  {
1691  CHECK((subgraphs[0] != nullptr));
1692 
1693  if (subgraphs[0].get() != nullptr)
1694  {
1695  unsigned int numInputSlots = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIInputSlots().size());
1696  unsigned int numOutputSlots = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIOutputSlots().size());
1697 
1698  CHECK((numInputSlots == 1));
1699  CHECK((numOutputSlots == 1));
1700 
1701  // Save sub-graph connections for comparison after substitution
1702  IOutputSlot* subgraphInputConn1 = subgraphs[0]->GetIInputSlot(0)->GetConnection();
1703  IInputSlot* subgraphOutputConn1 = subgraphs[0]->GetIOutputSlot(0)->GetConnection(0);
1704 
1705  // Construct dummy pre-compiled layer
1706  PreCompiledDescriptor preCompiledDescriptor(numInputSlots, numOutputSlots);
1707  Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
1708 
1709  // Substitute sub-graph with pre-compiled layer
1710  graph.SubstituteSubgraph(*subgraphs[0], preCompiledLayer);
1711 
1712  // Check that connections are correct after substitution
1713  CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
1714 
1715  CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
1716  }
1717  }
1718 }
1719 
1720 TEST_CASE("MultipleSubgraphs")
1721 {
1722  // This test case represents the scenario when we have two subgraphs
1723  // in which two layers have CpuAcc backend assigned
1724 
1725  //Construct graph
1726  Graph graph;
1727 
1728  Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
1729 
1730  ViewsDescriptor splitterDescriptor(2);
1731  Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
1732  splitterLayer->SetBackendId(Compute::CpuAcc);
1733 
1734  Convolution2dDescriptor convDescriptor;
1735  Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
1736  Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
1737 
1738  Layer* const weights1 = graph.AddLayer<ConstantLayer>("weights1");
1739  Layer* const weights2 = graph.AddLayer<ConstantLayer>("weights2");
1740 
1741  OriginsDescriptor concatDescriptor(2);
1742  Layer* const pConcatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
1743  pConcatLayer->SetBackendId(Compute::CpuAcc);
1744 
1745  Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
1746 
1747  inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
1748  splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
1749  splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
1750  weights1->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(1));
1751  convLayer1->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(0));
1752  weights2->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(1));
1753  convLayer2->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(1));
1754  pConcatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1755 
1756  // CpuAcc sub graph selector
1758  SubgraphViewSelector::SelectSubgraphs(
1759  graph,
1760  // select the CpuAcc layers only
1761  [](const Layer & l){
1762  bool toSelect = (l.GetBackendId() == Compute::CpuAcc);
1763  return toSelect;
1764  });
1765 
1766  CHECK(subgraphs.size() == 2);
1767  if(subgraphs.size() == 2)
1768  {
1769  CHECK((subgraphs[0] != nullptr));
1770  CHECK((subgraphs[1] != nullptr));
1771 
1772  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1773  {
1774  //Sort subgraphs by their inputSlot size.
1775  std::sort(subgraphs.begin(), subgraphs.end(),
1777  {
1778  return (lhs->GetIInputSlots().size() < rhs->GetIInputSlots().size());
1779  }
1780  );
1781 
1782  unsigned int numInputSlots1 = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIInputSlots().size());
1783  unsigned int numOutputSlots1 = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIOutputSlots().size());
1784 
1785  unsigned int numInputSlots2 = armnn::numeric_cast<unsigned int>(subgraphs[1]->GetIInputSlots().size());
1786  unsigned int numOutputSlots2 = armnn::numeric_cast<unsigned int>(subgraphs[1]->GetIOutputSlots().size());
1787 
1788  // Save sub-graph connections for comparison after substitution
1789  IOutputSlot* subgraph1InputConn = subgraphs[0]->GetIInputSlot(0)->GetConnection();
1790  IInputSlot* subgraph1OutputConn1 = subgraphs[0]->GetIOutputSlot(0)->GetConnection(0);
1791  IInputSlot* subgraph1OutputConn2 = subgraphs[0]->GetIOutputSlot(1)->GetConnection(0);
1792 
1793  // Save sub-graph connections for comparison after substitution
1794  IOutputSlot* subgraph2InputConn1 = subgraphs[1]->GetIInputSlot(0)->GetConnection();
1795  IOutputSlot* subgraph2InputConn2 = subgraphs[1]->GetIInputSlot(1)->GetConnection();
1796  IInputSlot* subgraph2OutputConn = subgraphs[1]->GetIOutputSlot(0)->GetConnection(0);
1797 
1798  PreCompiledDescriptor preCompiledDescriptor1(numInputSlots1, numOutputSlots1);
1799  Layer* const preCompiledLayer1 = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor1, "pre-compiled1");
1800 
1801  PreCompiledDescriptor preCompiledDescriptor2(numInputSlots2, numOutputSlots2);
1802  Layer* const preCompiledLayer2 = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor2, "pre-compiled2");
1803 
1804  // Substitute sub-graph with pre-compiled layer
1805  graph.SubstituteSubgraph(*subgraphs[0], preCompiledLayer1);
1806  graph.SubstituteSubgraph(*subgraphs[1], preCompiledLayer2);
1807 
1808  // Check that connections are correct after substitution
1809  CHECK_EQ(preCompiledLayer1->GetInputSlot(0).GetConnection(), subgraph1InputConn);
1810  CHECK_EQ(preCompiledLayer1->GetOutputSlot(0).GetConnection(0), subgraph1OutputConn1);
1811  CHECK_EQ(preCompiledLayer1->GetOutputSlot(1).GetConnection(0), subgraph1OutputConn2);
1812 
1813  CHECK_EQ(preCompiledLayer2->GetInputSlot(0).GetConnection(), subgraph2InputConn1);
1814  CHECK_EQ(preCompiledLayer2->GetInputSlot(1).GetConnection(), subgraph2InputConn2);
1815  CHECK_EQ(preCompiledLayer2->GetOutputSlot(0).GetConnection(0), subgraph2OutputConn);
1816  }
1817  }
1818 }
1819 
1820 TEST_CASE("SubgraphCycles")
1821 {
1822  // This case represent the scenario when a naive split could lead to a cyclic dependency between two subgraphs
1823  //
1824  // X0 -> M0 -> X1 -> M2 -> X2
1825  // X0 -> M0 -> M1 -> M2 -> X2
1826  //
1827  /*
1828  X0
1829  |
1830  |
1831  M0
1832  / |
1833  / |
1834  X1 M1
1835  \ /
1836  M2
1837  |
1838  X2
1839  */
1840  // The expected result for this is that M0,M1 will be part of one subgraph and M2 in another and the
1841  // input and output slots in the subgraphs will be set accordingly.
1842  //
1843  Graph graph;
1844 
1845  OriginsDescriptor originsDescriptor(2);
1846  auto x0 = graph.AddLayer<InputLayer>(0, "x0");
1847  auto m0 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m0");
1848  auto x1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x1");
1849  auto m1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m1");
1850  auto m2 = graph.AddLayer<AdditionLayer>("m2");
1851  auto x2 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x2");
1852 
1853  x0->GetOutputSlot(0).Connect(m0->GetInputSlot(0));
1854  m0->GetOutputSlot(0).Connect(x1->GetInputSlot(0));
1855  m0->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1856  x1->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1857  m1->GetOutputSlot(0).Connect(m2->GetInputSlot(1));
1858  m2->GetOutputSlot(0).Connect(x2->GetInputSlot(0));
1859 
1860  // All selected 'M*' layers will be have 'm' in the name
1862  SubgraphViewSelector::SelectSubgraphs(
1863  graph,
1864  // select the middle layers only
1865  [](const Layer & l)
1866  {
1867  bool toSelect = (l.GetNameStr().find('m') != std::string::npos);
1868  return toSelect;
1869  });
1870 
1871  // expected results to test against
1872  auto inputSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m0}),
1873  CreateOutputsFrom({m0, m1}),
1874  {m0, m1});
1875 
1876  auto outputSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m2}),
1877  CreateOutputsFrom({m2}),
1878  {m2});
1879 
1880  CHECK(subgraphs.size() == 2);
1881  if (subgraphs.size() == 2)
1882  {
1883  // we need to have valid subgraph pointers here
1884  CHECK((subgraphs[0] != nullptr));
1885  CHECK((subgraphs[1] != nullptr));
1886 
1887  if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1888  {
1889  // sort the subgraphs by layer size, so it is simpler to test
1890  std::sort(subgraphs.begin(), subgraphs.end(),
1892  {
1893  return (lhs->GetIConnectableLayers().size() < rhs->GetIConnectableLayers().size());
1894  }
1895  );
1896 
1897  // one subgraph needs to be size=1 and the other one is 4
1898  CHECK(subgraphs[0]->GetIConnectableLayers().size() == 1);
1899  CHECK(subgraphs[1]->GetIConnectableLayers().size() == 2);
1900 
1901  CompareSubgraphViews(subgraphs[0], outputSubgraph);
1902  CompareSubgraphViews(subgraphs[1], inputSubgraph);
1903  }
1904  }
1905 }
1906 
1907 TEST_CASE("SubgraphOrder")
1908 {
1909  Graph graph;
1910 
1911  auto input = graph.AddLayer<InputLayer>(0, "Input");
1912  auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
1913  auto output = graph.AddLayer<OutputLayer>(1, "Output");
1914 
1915  input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
1916  activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1917 
1918  //Add in out of order
1919  auto view = CreateSubgraphViewFrom({},
1920  {},
1921  {output, input, activation});
1922 
1923  // Check the layers are sorted topologically in the view
1924  int idx=0;
1925  LayerType expectedSorted[] = {LayerType::Input, LayerType::Activation, LayerType::Output};
1926  view->ForEachLayer([&idx, &expectedSorted](const Layer* l)
1927  {
1928  CHECK((expectedSorted[idx] == l->GetType()));
1929  idx++;
1930  }
1931  );
1932 }
1933 
1934 TEST_CASE("SubgraphViewWorkingCopy")
1935 {
1936  Graph graph;
1937 
1938  auto input = graph.AddLayer<InputLayer>(0, "Input");
1939  auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
1940  auto output = graph.AddLayer<OutputLayer>(1, "Output");
1941 
1942  input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
1943  activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1944 
1945  //Add in out of order
1946  auto view = CreateSubgraphViewFrom({output, input, activation},
1947  {},
1948  {});
1949 
1950  SubgraphView workingCopy = view->GetWorkingCopy();
1951 
1952  // Check the layers are sorted topologically in the view
1953  int idx=0;
1954  LayerType expectedSorted[] = {LayerType::Input, LayerType::Activation, LayerType::Output};
1955  workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
1956  {
1957  CHECK((expectedSorted[idx] == l->GetType()));
1958  idx++;
1959  }
1960  );
1961 }
1962 
1963 bool ReplaceConstantMultiplicationWithDepthwise(SubgraphView& subgraph,
1964  IConnectableLayer* layer)
1965 {
1966  if (layer->GetType() == LayerType::Multiplication)
1967  {
1968  IInputSlot* patternSubgraphInput = &layer->GetInputSlot(0);
1969  IInputSlot* patternSubgraphConstant = &layer->GetInputSlot(1);
1970 
1971  const IConnectableLayer* inputLayer = &patternSubgraphInput->GetConnection()->GetOwningIConnectableLayer();
1972  const IConnectableLayer* constantLayer = &layer->GetInputSlot(1).GetConnection()->GetOwningIConnectableLayer();
1973 
1974  // Figure out which of the two inputs is the constant
1975  if (constantLayer->GetType() != LayerType::Constant)
1976  {
1977  std::swap(patternSubgraphInput, patternSubgraphConstant);
1978  std::swap(inputLayer, constantLayer);
1979  }
1980 
1981  if (constantLayer->GetType() == LayerType::Constant)
1982  {
1983  const TensorInfo& inputInfo = inputLayer->GetOutputSlot(0).GetTensorInfo();
1984  const TensorInfo& constInfo = constantLayer->GetOutputSlot(0).GetTensorInfo();
1985 
1986  // Add a Depthwise only where the constant input is a scalar that takes the form { 1, 1, 1, C }.
1987  // The scalar is used as weights for the convolution.
1988  if (constInfo.GetShape() == TensorShape({ 1, 1, 1, inputInfo.GetShape()[3] }))
1989  {
1990  auto replacementGraph = INetwork::Create();
1991 
1993  desc.m_DataLayout = DataLayout::NHWC;
1994 
1995  TensorInfo weightInfo = constInfo;
1996  const TensorInfo& outputInfo = layer->GetOutputSlot(0).GetTensorInfo();
1997  unsigned int M = outputInfo.GetShape()[3] / inputInfo.GetShape()[3];
1998  ARMNN_ASSERT_MSG(M == 1, "Constant multiplication only support 1x1x1xC, so M should always be 1 here");
1999  weightInfo.SetShape({ 1, 1, 1, constInfo.GetShape()[3] * M }); //1HW(I*M)
2000 
2001  const void* weightData = PolymorphicPointerDowncast<const ConstantLayer>(constantLayer)
2002  ->m_LayerOutput->GetConstTensor<void>();
2003  TensorInfo weightsInfo = constInfo;
2004  ConstTensor weights(weightsInfo, weightData);
2005 
2006  const auto depthwiseLayer = replacementGraph->AddDepthwiseConvolution2dLayer(
2007  desc, "Replacement for Constant-Multiplication");
2008 
2009  auto& outslot = layer->GetOutputSlot(0);
2010  SubgraphView::IOutputSlots outputs{ &outslot };
2012  layers.push_back(layer);
2013  layers.push_back(const_cast<IConnectableLayer*>(constantLayer));
2014 
2015  SubgraphView patternSubgraph(std::move(layers),
2016  {patternSubgraphInput, patternSubgraphConstant},
2017  {&layer->GetOutputSlot(0)});
2018 
2019  subgraph.SubstituteSubgraph(patternSubgraph, depthwiseLayer );
2020 
2021  return true;
2022  }
2023  }
2024  }
2025  return false;
2026 }
2027 
2028 bool ReplaceTestMultiplication(SubgraphView& subgraph,
2029  IConnectableLayer* layer)
2030 {
2031  if (layer->GetType() == LayerType::Multiplication)
2032  {
2033 
2034  switch (layer->GetType())
2035  {
2036  case LayerType::Multiplication:
2037  return ReplaceConstantMultiplicationWithDepthwise(subgraph, layer);
2038  break;
2039  default:
2040  throw Exception("Found unknown MultiplicationSupportedMode value");
2041  break;
2042  }
2043  }
2044  return false;
2045 }
2046 
2047 void ReplaceUnsupportedLayers(SubgraphView& subgraph)
2048 {
2049  using ReplacementFunc = bool (*)(SubgraphView&, IConnectableLayer*);
2050  const ReplacementFunc replacementFuncs[] = {
2051  &ReplaceTestMultiplication,
2052  };
2053 
2054  subgraph.ForEachLayer([replacementFuncs, &subgraph](IConnectableLayer* layer)
2055  {
2056  auto madeChange = false;
2057  for (const ReplacementFunc f : replacementFuncs)
2058  {
2059  madeChange = f(subgraph, layer);
2060  if (madeChange)
2061  {
2062  goto nextIteration;
2063  }
2064  }
2065  nextIteration:;
2066  }
2067  );
2068 }
2069 
2070 TEST_CASE("SubgraphViewWorkingCopyReplacementFunc")
2071 {
2072  Graph graph;
2073 
2074  const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2075  const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2076  const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2077 
2078  std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2079  std::iota(constData.begin(), constData.end(), 0);
2080  ConstTensor constTensor(constInfo, constData);
2081 
2082  // Add the original pattern
2083  IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2084  auto constant = graph.AddLayer<ConstantLayer>("const");
2085 
2086  constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2087  IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2088  IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2089 
2090  // Create connections between layers
2091  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2092  constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2093  mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2094 
2095  input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2096  constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2097  mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2098 
2099  //Add in out of order
2100  auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2101  {},
2102  {});
2103 
2104  SubgraphView workingCopy = view->GetWorkingCopy();
2105 
2106  // Check the WorkingCopy is as expected before replacement
2107  CHECK(workingCopy.GetIConnectableLayers().size() == 4);
2108  int idx=0;
2109  LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output};
2110  workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
2111  {
2112  CHECK((expectedSorted[idx] == l->GetType()));
2113  idx++;
2114  }
2115  );
2116 
2117  // Replace Multiplication and Constant with Depthwise
2118  ReplaceUnsupportedLayers(workingCopy);
2119 
2120  // Check the layers are as expected
2121  CHECK(workingCopy.GetIConnectableLayers().size() == 3);
2122  idx=0;
2123  LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2124  workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
2125  {
2126  CHECK((expectedSortedReplaced[idx] == l->GetType()));
2127  idx++;
2128  }
2129  );
2130 }
2131 
2132 TEST_CASE("SubgraphViewWorkingCopySubstituteSubgraph")
2133 {
2134  Graph graph;
2135 
2136  auto input = graph.AddLayer<InputLayer>(0, "Input");
2137  auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
2138  auto output = graph.AddLayer<OutputLayer>(1, "Output");
2139 
2140  input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
2141  activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2142 
2143  //Add in out of order
2144  auto view = CreateSubgraphViewFrom({output, input, activation},
2145  {},
2146  {});
2147 
2148  // Check SubstituteSubgraphView throws when called on original SubgraphView
2149  SubgraphView temp(input);
2150  CHECK_THROWS_AS(view->SubstituteSubgraph(temp, input), NullPointerException);
2151 
2152  // Check that GetWorkingCopy() being called on a working copy throws an exception
2153  auto workingCopy = view->GetWorkingCopy();
2154  CHECK_THROWS_AS(workingCopy.GetWorkingCopy(), Exception);
2155 }
2156 
2157 TEST_CASE("SubgraphViewWorkingCopyOptimizationViews")
2158 {
2159  Graph graph;
2160 
2161  const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2162  const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2163  const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2164 
2165  std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2166  std::iota(constData.begin(), constData.end(), 0);
2167  ConstTensor constTensor(constInfo, constData);
2168 
2169  // Add the original pattern
2170  IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2171  auto constant = graph.AddLayer<ConstantLayer>("const");
2172 
2173  constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2174  IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2175  IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2176 
2177  // Create connections between layers
2178  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2179  constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2180  mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2181 
2182  input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2183  constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2184  mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2185 
2186  //Add in out of order
2187  auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2188  {},
2189  {});
2190 
2191  SubgraphView workingCopy = view->GetWorkingCopy();
2192 
2193  // Check the WorkingCopy is as expected before replacement
2194  int idx=0;
2195  LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output};
2196  workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
2197  {
2198  CHECK((expectedSorted[idx] == l->GetType()));
2199  idx++;
2200  }
2201  );
2202 
2203  // Replace Multiplication and Constant with Depthwise
2204  ReplaceUnsupportedLayers(workingCopy);
2205 
2206  // Check the layers are as expected
2207  idx=0;
2208  LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2209  workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
2210  {
2211  CHECK((expectedSortedReplaced[idx] == l->GetType()));
2212  idx++;
2213  }
2214  );
2215 
2216 
2217  // At this stage NPU would take the working copy and create CompiledBlocPtr with it.
2218 
2219  // We will just check that the procompiledLayer can still be added to the optimizationViews via a SubgraphView.
2220  OptimizationViews optimizationViews;
2221 
2222  CompiledBlobPtr ptr;
2223  IConnectableLayer* preCompiledLayer = optimizationViews.GetINetwork()->AddPrecompiledLayer(
2224  PreCompiledDescriptor(view->GetNumInputSlots(), view->GetNumOutputSlots()),
2225  std::move(ptr),
2226  EmptyOptional(),
2227  "pre-compiled");
2228 
2229 
2230  optimizationViews.AddSubstitution({ *view, SubgraphView(preCompiledLayer) });
2231  CHECK(optimizationViews.Validate(*view));
2232 }
2233 
2234 TEST_CASE("SubgraphViewWorkingCopyReplaceSlots")
2235 {
2236  Graph graph;
2237  const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2238  const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2239  const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2240 
2241  std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2242  std::iota(constData.begin(), constData.end(), 0);
2243  ConstTensor constTensor(constInfo, constData);
2244 
2245  // Add the original pattern
2246  IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2247  auto constant = graph.AddLayer<ConstantLayer>("const");
2248 
2249  constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2250  IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2251  IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2252 
2253  // Create connections between layers
2254  input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2255  constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2256  mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2257 
2258  input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2259  constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2260  mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2261 
2262  auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2263  CreateIInputsFrom({mul}),
2264  CreateIOutputsFrom({mul}));
2265 
2266  SubgraphView workingCopy = view->GetWorkingCopy();
2267 
2268  // Check the WorkingCopy is as expected before replacement
2269  CHECK(workingCopy.GetIConnectableLayers().size() == 4);
2270  int idx=0;
2271  LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output};
2272  workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
2273  {
2274  CHECK((expectedSorted[idx] == l->GetType()));
2275  idx++;
2276  }
2277  );
2278 
2279  // Replace Multiplication and Constant with Depthwise
2280  ReplaceUnsupportedLayers(workingCopy);
2281 
2282  // Check the layers are as expected
2283  idx=0;
2284  LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2285  CHECK(workingCopy.GetIConnectableLayers().size() == 3);
2286  workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
2287  {
2288  CHECK((expectedSortedReplaced[idx] == l->GetType()));
2289  idx++;
2290  }
2291  );
2292 }
2293 
2294 }
A layer that the constant data can be bound to.
This layer represents a split operation.
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
void swap(OriginsDescriptor &first, OriginsDescriptor &second)
const IConnectableLayers & GetIConnectableLayers() const
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:425
A Convolution2dDescriptor for the Convolution2dLayer.
int Connect(InputSlot &destination)
Definition: Layer.cpp:112
void AddSubstitution(SubstitutionPair &&substitution)
This layer represents an activation operation with the specified activation function.
void SetBackendId(const BackendId &id)
Definition: Layer.hpp:276
virtual const IInputSlot * GetConnection(unsigned int index) const =0
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:204
void SubstituteSubgraph(SubgraphView &, IConnectableLayer *)
These methods should be called on a working copy subgraph created from GetWorkingCopy.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
void SetShape(const TensorShape &newShape)
Definition: Tensor.hpp:193
std::unique_ptr< void, CompiledBlobDeleter > CompiledBlobPtr
Definition: INetwork.hpp:245
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:322
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
std::list< IConnectableLayer * > IConnectableLayers
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
An output connection slot for a layer.
Definition: INetwork.hpp:40
An OriginsDescriptor for the ConcatLayer.
std::vector< IOutputSlot * > IOutputSlots
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
This layer represents a merge operation.
Definition: ConcatLayer.hpp:13
void ForEachIConnectableLayer(Func func) const
const std::string & GetNameStr() const
Definition: Layer.hpp:225
LayerType GetType() const override
Returns the armnn::LayerType of this layer.
Definition: Layer.hpp:271
float Activation(float in, ActivationFunction function, float a, float b)
Definition: Activation.cpp:13
std::vector< SubgraphViewPtr > Subgraphs
virtual const IConnectableLayer & GetOwningIConnectableLayer() const =0
bool Validate(const SubgraphView &originalSubgraph) const
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
const BackendId & GetBackendId() const
Definition: Layer.hpp:275
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
This layer represents an addition operation.
void SubstituteSubgraph(SubgraphView &subgraph, IConnectableLayer *substituteLayer)
Substitutes the given sub-graph with either a new layer or a new sub-graph.
Definition: Graph.cpp:435
std::unique_ptr< SubgraphView > SubgraphViewPtr
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots &&inputs, SubgraphView::OutputSlots &&outputs, SubgraphView::Layers &&layers)
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:87
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot * GetConnection() const =0
std::enable_if_t< std::is_unsigned< Source >::value &&std::is_unsigned< Dest >::value, Dest > numeric_cast(Source source)
Definition: NumericCast.hpp:35
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:324
virtual const TensorInfo & GetTensorInfo() const =0
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
This layer represents a convolution 2d operation.
virtual int Connect(IInputSlot &destination)=0
void ForEachLayer(Func func) const
A PreCompiledDescriptor for the PreCompiledLayer.
This layer represents a multiplication operation.
SubgraphView GetWorkingCopy()
This method returns a copy of the original SubgraphView provided by OptimizeSubgraphView with a separ...
SubgraphView::InputSlots CreateInputsFrom(Layer *layer, std::vector< unsigned int > ignoreSlots)
An input connection slot for a layer.
Definition: INetwork.hpp:26
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
const InputSlot * GetConnection(unsigned int index) const override
Definition: Layer.cpp:75
unsigned int GetNumElements() const
Definition: Tensor.hpp:196
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:467